feat: Complete toolchain - organ, forge, store, compilation, installer, site source, architecture docs

- tools/organ.py: Pack/install/publish AI persona organs
- tools/forge.sh: Convert HuggingFace models to GGUF, quantize
- tools/store.sh: Browse/install/publish community models
- tools/compilation/: Cross-platform build scripts (Linux/macOS)
- scripts/install.sh: Universal installer (auto-detect OS/arch/GPU)
- site/saas/: SaaS frontend + backend source (v3 unified design)
- site/vitrine/: Main site source (inference-x.com)
- docs/ARCHITECTURE.md: Full system architecture

All plans now in test mode - Studio & Enterprise free to test.
Branch: master | Maintainer: Anti-Atlas craton (elmadani)
This commit is contained in:
SALKA 2026-02-24 20:39:52 +00:00
parent 6d966c85d1
commit 5b3d4e0e1d
9 changed files with 2931 additions and 0 deletions

24
docs/ARCHITECTURE.md Normal file
View File

@ -0,0 +1,24 @@
# IX System Architecture
## Infrastructure
```
inference-x.com (ARCHE · OVH) build.inference-x.com (OASIS · Hetzner)
├── nginx reverse proxy ├── ix-saas (Node.js, PM2, port 4080)
├── Gitea (port 3000) ├── echo brain (port 8089)
└── Site vitrine (HTML) ├── invoke gateway (port 3001)
└── OneCloud VM pool (demos)
```
## Demo Flow
Visitor → POST /api/demo/start → OneCloud VM (2vCPU/2GB) → IX engine → LLaMA 3.2 1B → SSE stream → Chat → 30min → VM destroyed
## Plans (Test Mode)
- Community: Free forever, full local engine
- Studio Test: Free, 2vCPU cloud instance, store access
- Enterprise Test: Free, 8vCPU/32GB, forge, store publish
## API
Public: /api/health, /api/demo/stats, /api/demo/start, /api/demo/stream/:token, /api/community/scout
Auth: /api/auth/register, /api/auth/login, /api/builds, /api/instance/provision, /api/store

164
scripts/install.sh Normal file
View File

@ -0,0 +1,164 @@
#!/bin/bash
# IX Universal Installer — inference-x.com
# Detects OS/arch, downloads correct binary, sets up config
set -e
VERSION="latest"
BASE_URL="https://inference-x.com/releases"
IX_HOME="$HOME/.inference-x"
BIN_DIR="/usr/local/bin"
RED='\033[0;31m'; GREEN='\033[0;32m'; TEAL='\033[0;36m'; AMBER='\033[0;33m'; NC='\033[0m'
log() { echo -e "${TEAL}[IX]${NC} $1"; }
ok() { echo -e "${GREEN}[✓]${NC} $1"; }
warn() { echo -e "${AMBER}[!]${NC} $1"; }
err() { echo -e "${RED}[✗]${NC} $1"; exit 1; }
detect_platform() {
local OS=$(uname -s | tr '[:upper:]' '[:lower:]')
local ARCH=$(uname -m)
case "$OS" in
linux)
case "$ARCH" in
x86_64) PLATFORM="linux-x64" ;;
aarch64) PLATFORM="linux-arm64" ;;
armv7l) PLATFORM="linux-armv7" ;;
*) err "Unsupported arch: $ARCH" ;;
esac ;;
darwin)
case "$ARCH" in
arm64) PLATFORM="macos-arm64" ;;
x86_64) PLATFORM="macos-x64" ;;
*) err "Unsupported arch: $ARCH" ;;
esac ;;
*) err "Unsupported OS: $OS. Use Windows installer from inference-x.com" ;;
esac
log "Detected: $OS/$ARCH$PLATFORM"
}
detect_backend() {
BACKEND="cpu"
if command -v nvidia-smi &>/dev/null; then
BACKEND="cuda"
ok "NVIDIA GPU detected — CUDA backend will be used"
elif [[ "$(uname -s)" == "Darwin" && "$(uname -m)" == "arm64" ]]; then
BACKEND="metal"
ok "Apple Silicon detected — Metal backend will be used"
elif command -v vulkaninfo &>/dev/null; then
BACKEND="vulkan"
ok "Vulkan detected"
else
log "Using CPU backend (universal)"
fi
}
download_ix() {
local URL="$BASE_URL/ix-$PLATFORM"
log "Downloading IX engine from $URL..."
# Try multiple mirrors
local MIRRORS=(
"https://inference-x.com/releases"
"https://git.inference-x.com/elmadani/inference-x/releases/download/latest"
)
local downloaded=false
for mirror in "${MIRRORS[@]}"; do
if curl -fsSL "$mirror/ix-$PLATFORM" -o /tmp/ix-binary 2>/dev/null; then
downloaded=true
break
fi
done
if [ "$downloaded" = false ]; then
warn "Binary download unavailable. Building from source..."
build_from_source
return
fi
chmod +x /tmp/ix-binary
ok "Downloaded IX binary"
}
build_from_source() {
log "Building IX from source (requires git, cmake, make, gcc)..."
local tmp=$(mktemp -d)
git clone --depth=1 https://git.inference-x.com/elmadani/inference-x.git "$tmp/ix" 2>&1 | tail -3
cd "$tmp/ix"
cmake -B build -DCMAKE_BUILD_TYPE=Release 2>&1 | tail -3
cmake --build build -j$(nproc 2>/dev/null || sysctl -n hw.ncpu 2>/dev/null || echo 2) 2>&1 | tail -5
cp build/bin/ix /tmp/ix-binary
chmod +x /tmp/ix-binary
cd -
ok "Built from source"
}
install_ix() {
mkdir -p "$IX_HOME" "$IX_HOME/models" "$IX_HOME/organs" "$IX_HOME/configs"
if [ -w "$BIN_DIR" ]; then
cp /tmp/ix-binary "$BIN_DIR/ix"
ok "Installed to $BIN_DIR/ix"
else
sudo cp /tmp/ix-binary "$BIN_DIR/ix"
ok "Installed to $BIN_DIR/ix (sudo)"
fi
# Default config
cat > "$IX_HOME/configs/default.json" << CONFIG
{
"version": "1.0",
"engine": "inference-x",
"hardware": { "backend": "$BACKEND" },
"model": {
"id": "llama3.2-1b",
"context_size": 4096,
"max_tokens": 512
},
"persona": {
"name": "Assistant",
"system_prompt": "You are a helpful, private AI assistant. You run locally. No data leaves this device.",
"temperature": 0.7
},
"server": { "port": 8080, "bind": "127.0.0.1" }
}
CONFIG
ok "Default config created at $IX_HOME/configs/default.json"
}
print_success() {
echo ""
echo -e "${GREEN}╔══════════════════════════════════════════╗${NC}"
echo -e "${GREEN}║ Inference-X installed successfully! ║${NC}"
echo -e "${GREEN}╚══════════════════════════════════════════╝${NC}"
echo ""
echo -e " ${TEAL}Start IX:${NC} ix --config ~/.inference-x/configs/default.json"
echo -e " ${TEAL}API:${NC} http://localhost:8080/v1/chat/completions"
echo -e " ${TEAL}Models:${NC} ~/.inference-x/models/"
echo -e " ${TEAL}Organs:${NC} ~/.inference-x/organs/"
echo -e " ${TEAL}Builder:${NC} https://build.inference-x.com"
echo -e " ${TEAL}Docs:${NC} https://git.inference-x.com/elmadani/ix-tools"
echo ""
echo -e " ${AMBER}First model download:${NC}"
echo -e " ix download llama3.2-1b # Fastest (1GB)"
echo -e " ix download mistral-7b # Best quality (4GB)"
echo ""
}
main() {
echo ""
echo -e "${TEAL}════════════════════════════════════════${NC}"
echo -e "${TEAL} Inference-X Universal Installer ${NC}"
echo -e "${TEAL} Built in Morocco · For Everyone ${NC}"
echo -e "${TEAL}════════════════════════════════════════${NC}"
echo ""
detect_platform
detect_backend
download_ix
install_ix
print_success
}
main "$@"

1406
site/saas/index.html Normal file

File diff suppressed because it is too large Load Diff

984
site/vitrine/index.html Normal file
View File

@ -0,0 +1,984 @@
<!DOCTYPE html>
<html lang="en" data-theme="dark">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width,initial-scale=1.0">
<title>Inference-X — Intelligence for Everyone</title>
<meta name="description" content="305KB. Runs on anything. Your AI, your device, your rules. Free forever.">
<link rel="preconnect" href="https://fonts.googleapis.com">
<link href="https://fonts.googleapis.com/css2?family=Fraunces:ital,opsz,wght@0,9..144,300;0,9..144,700;0,9..144,900;1,9..144,600&family=JetBrains+Mono:wght@400;500&family=Nunito:wght@400;600;700;800&display=swap" rel="stylesheet">
<style>
:root {
--bg: #0C0A09;
--bg2: #151210;
--bg3: #1E1A17;
--card: #221E1A;
--border: #2E2825;
--copper: #C9622A;
--amber: #F0A030;
--teal: #2ECCB8;
--sand: #D9C8B0;
--text: #EDE0D0;
--muted: #8A7A6A;
--green: #4CAF6A;
--red: #E05050;
--font: 'Nunito', sans-serif;
--mono: 'JetBrains Mono', monospace;
--display: 'Fraunces', serif;
}
[data-theme="light"] {
--bg: #F8F4EF;
--bg2: #F0EBE3;
--bg3: #E8E0D5;
--card: #FFFCF8;
--border: #D5CCC0;
--text: #1A1410;
--muted: #6A5A4A;
--sand: #4A3A2A;
}
*{margin:0;padding:0;box-sizing:border-box}
html{scroll-behavior:smooth;font-size:16px}
body{background:var(--bg);color:var(--text);font-family:var(--font);line-height:1.6;overflow-x:hidden}
/* GRAIN */
body::before{content:'';position:fixed;inset:0;pointer-events:none;z-index:9999;opacity:.025;background-image:url("data:image/svg+xml,%3Csvg viewBox='0 0 512 512' xmlns='http://www.w3.org/2000/svg'%3E%3Cfilter id='n'%3E%3CfeTurbulence type='fractalNoise' baseFrequency='.8' numOctaves='4'/%3E%3C/filter%3E%3Crect width='100%25' height='100%25' filter='url(%23n)'/%3E%3C/svg%3E")}
/* NAV */
nav{position:fixed;top:0;left:0;right:0;z-index:1000;display:flex;align-items:center;justify-content:space-between;padding:.9rem 1.5rem;background:rgba(12,10,9,.88);backdrop-filter:blur(12px);border-bottom:1px solid var(--border)}
.nav-brand{font-family:var(--display);font-size:1.3rem;font-weight:700;color:var(--copper);letter-spacing:-.02em;text-decoration:none}
.nav-brand span{color:var(--amber)}
.nav-right{display:flex;align-items:center;gap:.8rem}
.nav-links{display:flex;gap:1.2rem;list-style:none}
.nav-links a{color:var(--muted);text-decoration:none;font-size:.85rem;font-weight:600;transition:color .2s}
.nav-links a:hover{color:var(--text)}
#tt{background:none;border:1px solid var(--border);color:var(--muted);cursor:pointer;padding:.35rem .6rem;border-radius:.4rem;font-size:.9rem;transition:all .2s}
#tt:hover{border-color:var(--amber);color:var(--amber)}
#lb{background:none;border:1px solid var(--border);color:var(--muted);cursor:pointer;padding:.35rem .6rem;border-radius:.4rem;font-size:.75rem;font-weight:600;font-family:var(--font);transition:all .2s}
#lb:hover{border-color:var(--teal);color:var(--teal)}
/* SECTIONS */
section{padding:5rem 0;max-width:1100px;margin:0 auto;padding-left:1.5rem;padding-right:1.5rem}
section.full{max-width:none;padding-left:0;padding-right:0}
.section-tag{font-size:.72rem;font-weight:800;letter-spacing:.15em;text-transform:uppercase;color:var(--copper);margin-bottom:.8rem}
h1{font-family:var(--display);font-size:clamp(2.5rem,6vw,5rem);font-weight:900;line-height:1.05;letter-spacing:-.03em}
h2{font-family:var(--display);font-size:clamp(1.8rem,4vw,3rem);font-weight:700;line-height:1.1;letter-spacing:-.02em}
h3{font-size:1.1rem;font-weight:700;margin-bottom:.5rem}
.sub{color:var(--muted);font-size:1rem;margin-top:.8rem;max-width:580px;line-height:1.7}
/* HERO */
#hero{min-height:100vh;display:flex;align-items:center;padding-top:5rem;position:relative;overflow:hidden}
.hero-glow{position:absolute;width:600px;height:600px;background:radial-gradient(circle,rgba(201,98,42,.15) 0%,transparent 70%);top:-100px;right:-100px;pointer-events:none}
.hero-glow2{position:absolute;width:400px;height:400px;background:radial-gradient(circle,rgba(46,204,184,.08) 0%,transparent 70%);bottom:0;left:0;pointer-events:none}
.hero-inner{position:relative;z-index:1;max-width:1100px;margin:0 auto;padding:0 1.5rem;width:100%}
.hero-eyebrow{display:inline-flex;align-items:center;gap:.5rem;background:var(--bg3);border:1px solid var(--border);border-radius:2rem;padding:.4rem 1rem;margin-bottom:2rem;font-size:.8rem;font-weight:700;letter-spacing:.1em;text-transform:uppercase;color:var(--copper)}
.hero-eyebrow::before{content:'';width:6px;height:6px;background:var(--teal);border-radius:50%;animation:pulse 2s infinite}
@keyframes pulse{0%,100%{opacity:1;transform:scale(1)}50%{opacity:.5;transform:scale(1.5)}}
.hero-title{font-family:var(--display);font-size:clamp(3rem,7vw,6rem);font-weight:900;line-height:1;letter-spacing:-.03em;margin-bottom:1.5rem}
.hero-title em{font-style:italic;color:var(--copper)}
.hero-sub{font-size:1.15rem;color:var(--muted);max-width:540px;line-height:1.7;margin-bottom:2.5rem}
.hero-stats{display:flex;flex-wrap:wrap;gap:2rem;margin-bottom:3rem}
.stat{display:flex;flex-direction:column}
.stat-n{font-family:var(--display);font-size:2.2rem;font-weight:900;color:var(--amber);line-height:1}
.stat-l{font-size:.78rem;color:var(--muted);font-weight:600;margin-top:.2rem}
.hero-ctas{display:flex;flex-wrap:wrap;gap:1rem}
.btn-primary{display:inline-flex;align-items:center;gap:.5rem;background:var(--copper);color:#fff;padding:.8rem 1.6rem;border-radius:.5rem;text-decoration:none;font-weight:800;font-size:.95rem;transition:all .2s;border:2px solid var(--copper)}
.btn-primary:hover{background:var(--amber);border-color:var(--amber)}
.btn-secondary{display:inline-flex;align-items:center;gap:.5rem;background:transparent;color:var(--text);padding:.8rem 1.6rem;border-radius:.5rem;text-decoration:none;font-weight:700;font-size:.95rem;border:2px solid var(--border);transition:all .2s}
.btn-secondary:hover{border-color:var(--copper);color:var(--copper)}
/* DISCOVER */
.disc-cards{display:grid;grid-template-columns:repeat(auto-fit,minmax(280px,1fr));gap:1.5rem;margin-top:2.5rem}
.disc-card{background:var(--card);border:1px solid var(--border);border-radius:.8rem;padding:1.8rem;transition:border-color .3s}
.disc-card:hover{border-color:var(--copper)}
.disc-icon{font-size:2.5rem;margin-bottom:1rem;display:block}
/* DEVICE GADGET */
.device-widget{background:var(--card);border:1px solid var(--border);border-radius:1rem;padding:2rem;margin-top:2rem}
.slider-wrap{margin:1.5rem 0}
.slider-labels{display:flex;justify-content:space-between;font-size:.75rem;color:var(--muted);margin-bottom:.5rem;font-family:var(--mono)}
#ramSlider{width:100%;accent-color:var(--copper);height:6px;cursor:pointer}
.ram-results{margin-top:1.5rem;display:grid;gap:.8rem}
.ram-item{display:flex;align-items:center;gap:1rem;padding:.8rem 1rem;border-radius:.5rem;border:1px solid var(--border);background:var(--bg2);font-size:.9rem;transition:all .3s}
.ram-item.active{border-color:var(--teal);background:rgba(46,204,184,.07)}
.ram-item .ri-icon{font-size:1.4rem;width:2rem;text-align:center}
.ram-item .ri-name{font-weight:700;flex:1}
.ram-item .ri-size{font-family:var(--mono);font-size:.78rem;color:var(--muted)}
.ram-item .ri-speed{font-size:.78rem;color:var(--green);font-weight:700}
/* PRIVACY */
.priv-compare{display:grid;grid-template-columns:1fr 1fr;gap:1.5rem;margin-top:2rem}
@media(max-width:600px){.priv-compare{grid-template-columns:1fr}}
.priv-card{border-radius:.8rem;padding:1.8rem;border:1px solid var(--border)}
.priv-cloud{background:linear-gradient(135deg,rgba(224,80,80,.08),transparent);border-color:rgba(224,80,80,.3)}
.priv-local{background:linear-gradient(135deg,rgba(76,175,106,.08),transparent);border-color:rgba(76,175,106,.3)}
.priv-title{font-size:1.1rem;font-weight:800;margin-bottom:.8rem;display:flex;align-items:center;gap:.5rem}
.cloud-dot{width:8px;height:8px;border-radius:50%;background:var(--red);display:inline-block}
.local-dot{width:8px;height:8px;border-radius:50%;background:var(--green);display:inline-block}
/* SIZE VIZ */
.size-viz{display:flex;align-items:flex-end;gap:.8rem;margin:2rem 0;padding:1.5rem;background:var(--card);border:1px solid var(--border);border-radius:.8rem;overflow:hidden}
.size-item{display:flex;flex-direction:column;align-items:center;gap:.5rem;font-size:.72rem;color:var(--muted);text-align:center}
.size-bar{border-radius:.3rem .3rem 0 0;min-width:40px;transition:all .5s}
.size-item.ix .size-bar{background:var(--copper)}
.size-item.ix .size-label{color:var(--copper);font-weight:800}
/* ENGINE */
.hw-grid{display:grid;grid-template-columns:repeat(auto-fill,minmax(130px,1fr));gap:.8rem;margin-top:2rem}
.hw-chip{background:var(--card);border:1px solid var(--border);border-radius:.5rem;padding:.8rem .6rem;text-align:center;font-size:.78rem;font-weight:700;font-family:var(--mono);transition:all .2s;cursor:default}
.hw-chip:hover{border-color:var(--copper);color:var(--copper);transform:translateY(-2px)}
.hw-chip .hw-cat{font-size:.62rem;color:var(--muted);font-weight:400;display:block;margin-top:.2rem}
.engine-feats{display:grid;grid-template-columns:repeat(auto-fit,minmax(200px,1fr));gap:1rem;margin-top:1.5rem}
.feat{padding:1.2rem;background:var(--card);border:1px solid var(--border);border-radius:.6rem;font-size:.88rem}
.feat-name{font-weight:800;margin-bottom:.3rem;color:var(--amber)}
/* MODELS */
.models-grid{display:grid;grid-template-columns:repeat(auto-fill,minmax(260px,1fr));gap:1rem;margin-top:2rem}
.model-card{background:var(--card);border:1px solid var(--border);border-radius:.7rem;padding:1.3rem;display:flex;flex-direction:column;gap:.5rem;transition:border-color .2s}
.model-card:hover{border-color:var(--teal)}
.model-name{font-family:var(--mono);font-size:.85rem;font-weight:700;color:var(--teal)}
.model-desc{font-size:.82rem;color:var(--muted);flex:1}
.model-meta{display:flex;flex-wrap:wrap;gap:.4rem;margin-top:.5rem}
.model-tag{font-size:.68rem;background:var(--bg3);border:1px solid var(--border);border-radius:.3rem;padding:.15rem .4rem;color:var(--sand);font-family:var(--mono)}
/* COST */
.cost-compare{display:grid;grid-template-columns:1fr 1fr;gap:1.5rem;margin-top:2rem}
@media(max-width:600px){.cost-compare{grid-template-columns:1fr}}
.cost-card{border-radius:.8rem;padding:2rem;text-align:center}
.cost-cloud-card{background:var(--bg3);border:1px solid var(--border)}
.cost-local-card{background:linear-gradient(135deg,rgba(46,204,184,.08),transparent);border:1px solid rgba(46,204,184,.3)}
.cost-amount{font-family:var(--display);font-size:3.5rem;font-weight:900;line-height:1;margin:1rem 0}
.cost-cloud-card .cost-amount{color:var(--red)}
.cost-local-card .cost-amount{color:var(--teal)}
.cost-sub{font-size:.8rem;color:var(--muted)}
/* API */
.api-box{background:var(--bg2);border:1px solid var(--border);border-radius:.7rem;padding:1.5rem;margin-top:1.5rem;font-family:var(--mono);font-size:.82rem;line-height:1.8;overflow-x:auto}
.api-box .kw{color:var(--copper)}
.api-box .str{color:var(--teal)}
.api-box .cmt{color:var(--muted)}
.api-endpoints{display:flex;flex-wrap:wrap;gap:.5rem;margin-top:1rem}
.api-ep{background:var(--card);border:1px solid var(--border);border-radius:.4rem;padding:.4rem .8rem;font-family:var(--mono);font-size:.75rem}
.ep-get{color:var(--green)}
.ep-post{color:var(--amber)}
/* QUICK START */
.qs-tabs{display:flex;gap:.5rem;margin-bottom:1.5rem;flex-wrap:wrap}
.qs-tab{background:none;border:1px solid var(--border);color:var(--muted);cursor:pointer;padding:.5rem 1rem;border-radius:.4rem;font-size:.85rem;font-weight:700;font-family:var(--font);transition:all .2s}
.qs-tab.active,.qs-tab:hover{border-color:var(--copper);color:var(--copper);background:rgba(201,98,42,.08)}
.qs-block{display:none}
.qs-block.active{display:block}
.step{display:flex;gap:1rem;margin-bottom:1.2rem;align-items:flex-start}
.step-num{flex-shrink:0;width:32px;height:32px;border-radius:50%;background:var(--copper);color:#fff;font-weight:900;font-size:.85rem;display:flex;align-items:center;justify-content:center;font-family:var(--display)}
code{background:var(--bg2);border:1px solid var(--border);border-radius:.35rem;padding:.2rem .5rem;font-family:var(--mono);font-size:.83rem;color:var(--teal)}
pre{background:var(--bg2);border:1px solid var(--border);border-radius:.5rem;padding:1rem;font-family:var(--mono);font-size:.8rem;overflow-x:auto;margin-top:.5rem;line-height:1.7}
pre .c{color:var(--muted)}
pre .v{color:var(--teal)}
pre .s{color:var(--amber)}
/* COMMUNITY TOOLS */
.tools-grid{display:grid;grid-template-columns:repeat(auto-fill,minmax(220px,1fr));gap:1.2rem;margin-top:2rem}
.tool-card{background:var(--card);border:1px solid var(--border);border-radius:.8rem;padding:1.5rem;position:relative;transition:all .25s;text-decoration:none;color:inherit;display:block}
.tool-card:hover{border-color:var(--copper);transform:translateY(-3px)}
.tool-badge{position:absolute;top:1rem;right:1rem;font-size:.62rem;font-weight:800;padding:.2rem .5rem;border-radius:2rem;font-family:var(--mono)}
.badge-live{background:rgba(76,175,106,.15);color:var(--green);border:1px solid rgba(76,175,106,.3)}
.badge-build{background:rgba(240,160,48,.15);color:var(--amber);border:1px solid rgba(240,160,48,.3)}
.badge-coming{background:var(--bg3);color:var(--muted);border:1px solid var(--border)}
.tool-icon{font-size:2rem;margin-bottom:.8rem}
.tool-name{font-weight:800;font-size:.95rem;margin-bottom:.3rem}
.tool-desc{font-size:.8rem;color:var(--muted);line-height:1.5}
/* ORGAN */
.organ-visual{display:flex;flex-wrap:wrap;gap:1rem;margin:2rem 0;align-items:center;justify-content:center}
.organ-node{background:var(--card);border:2px solid var(--border);border-radius:50%;width:80px;height:80px;display:flex;align-items:center;justify-content:center;font-size:1.8rem;cursor:pointer;transition:all .3s;position:relative}
.organ-node:hover{border-color:var(--copper);transform:scale(1.15)}
.organ-node.glow{border-color:var(--teal);box-shadow:0 0 20px rgba(46,204,184,.3);animation:glow-pulse 2s infinite}
@keyframes glow-pulse{0%,100%{box-shadow:0 0 20px rgba(46,204,184,.3)}50%{box-shadow:0 0 40px rgba(46,204,184,.5)}}
.organ-arrow{color:var(--muted);font-size:1.5rem}
.organ-desc{font-size:.88rem;color:var(--muted);max-width:600px;margin:0 auto;text-align:center;margin-top:1rem;line-height:1.7}
/* KHETTARA */
#khettara{background:linear-gradient(180deg,transparent,rgba(201,98,42,.05),transparent)}
.khettara-quote{font-family:var(--display);font-size:clamp(1.3rem,3vw,2rem);font-style:italic;color:var(--sand);line-height:1.5;max-width:700px;position:relative;padding:2rem}
.khettara-quote::before{content:'"';font-size:8rem;color:var(--copper);opacity:.15;position:absolute;top:-1rem;left:0;line-height:1;font-family:var(--display)}
/* HARDWARE SCOUT */
.scout-table{width:100%;border-collapse:collapse;margin-top:1.5rem;font-size:.85rem}
.scout-table th{text-align:left;padding:.8rem;border-bottom:2px solid var(--border);color:var(--muted);font-size:.72rem;text-transform:uppercase;letter-spacing:.08em;font-weight:700}
.scout-table td{padding:.8rem;border-bottom:1px solid var(--border)}
.scout-table tr:last-child td{border-bottom:none}
.backend-chip{font-family:var(--mono);font-size:.72rem;background:var(--bg3);border:1px solid var(--border);border-radius:.3rem;padding:.15rem .4rem}
.load-bar{background:var(--bg3);border-radius:2rem;height:6px;overflow:hidden;width:80px;display:inline-block}
.load-fill{background:linear-gradient(90deg,var(--teal),var(--amber));height:100%;border-radius:2rem;transition:width .5s}
.live-dot{width:7px;height:7px;background:var(--teal);border-radius:50%;display:inline-block;margin-right:.4rem;animation:pulse 2s infinite}
/* DONATE */
.donate-widget{background:var(--card);border:1px solid var(--border);border-radius:1rem;padding:2rem;margin-top:2rem}
.donate-amounts{display:flex;flex-wrap:wrap;gap:.8rem;margin-bottom:1.5rem}
.donate-btn{background:var(--bg3);border:2px solid var(--border);border-radius:.5rem;padding:.7rem 1.3rem;cursor:pointer;font-weight:800;font-size:.9rem;font-family:var(--font);color:var(--muted);transition:all .2s}
.donate-btn:hover,.donate-btn.active{border-color:var(--copper);color:var(--copper);background:rgba(201,98,42,.08)}
.costs-breakdown{display:grid;gap:.5rem;margin-bottom:1.5rem}
.cost-line{display:flex;justify-content:space-between;font-size:.85rem;padding:.5rem .8rem;background:var(--bg2);border-radius:.4rem}
.cost-line span:last-child{font-family:var(--mono);color:var(--amber)}
.cost-total{border-top:2px solid var(--border);margin-top:.5rem;padding-top:.5rem;font-weight:800}
/* CRATONS */
.cratons-grid{display:grid;grid-template-columns:repeat(auto-fill,minmax(200px,1fr));gap:1rem;margin-top:2rem}
.craton-card{background:var(--card);border:1px solid var(--border);border-radius:.7rem;padding:1.2rem;transition:all .2s}
.craton-card.active{border-color:var(--copper);background:rgba(201,98,42,.05)}
.craton-card:not(.active):hover{border-color:var(--border);opacity:.8}
.craton-age{font-family:var(--mono);font-size:.68rem;color:var(--muted);margin-bottom:.3rem}
.craton-name{font-weight:800;margin-bottom:.2rem}
.craton-region{font-size:.78rem;color:var(--muted)}
.craton-status{font-size:.7rem;font-weight:800;margin-top:.5rem}
.craton-card.active .craton-status{color:var(--copper)}
.craton-card:not(.active) .craton-status{color:var(--teal);cursor:pointer;text-decoration:underline}
/* PRICING */
.pricing-grid{display:grid;grid-template-columns:repeat(auto-fit,minmax(260px,1fr));gap:1.5rem;margin-top:2rem}
.pricing-card{background:var(--card);border:1px solid var(--border);border-radius:.8rem;padding:2rem;display:flex;flex-direction:column;gap:.8rem}
.pricing-card.featured{border-color:var(--copper)}
.pricing-tier{font-weight:900;font-size:1.1rem}
.pricing-who{font-size:.85rem;color:var(--muted);line-height:1.5;flex:1}
.pricing-price{font-family:var(--display);font-size:2rem;font-weight:900}
.pricing-price.free{color:var(--teal)}
.pricing-price.fair{color:var(--amber)}
.pricing-price.ind{color:var(--copper)}
/* FOOTER */
footer{background:var(--bg2);border-top:1px solid var(--border);padding:3rem 1.5rem;margin-top:4rem}
.footer-inner{max-width:1100px;margin:0 auto;display:grid;grid-template-columns:2fr 1fr 1fr 1fr;gap:3rem}
@media(max-width:700px){.footer-inner{grid-template-columns:1fr 1fr}}
.footer-brand{font-family:var(--display);font-size:1.4rem;font-weight:900;color:var(--copper);margin-bottom:.8rem}
.footer-tagline{font-size:.85rem;color:var(--muted);line-height:1.6}
.footer-col h4{font-size:.75rem;font-weight:800;letter-spacing:.1em;text-transform:uppercase;color:var(--muted);margin-bottom:.8rem}
.footer-col a{display:block;color:var(--muted);text-decoration:none;font-size:.85rem;margin-bottom:.4rem;transition:color .2s}
.footer-col a:hover{color:var(--text)}
.footer-bottom{max-width:1100px;margin:2rem auto 0;padding-top:1.5rem;border-top:1px solid var(--border);display:flex;justify-content:space-between;font-size:.78rem;color:var(--muted);flex-wrap:wrap;gap:.5rem}
/* ANIMATIONS */
.reveal{opacity:0;transform:translateY(24px);transition:opacity .6s,transform .6s}
.reveal.visible{opacity:1;transform:none}
/* RESPONSIVE */
@media(max-width:768px){
nav .nav-links{display:none}
.hero-stats{gap:1.5rem}
.stat-n{font-size:1.8rem}
.cost-compare,.priv-compare{grid-template-columns:1fr}
.footer-inner{grid-template-columns:1fr 1fr}
}
</style>
</head>
<body>
<!-- NAV -->
<nav>
<a class="nav-brand" href="#">Inference<span>-X</span></a>
<ul class="nav-links">
<li><a href="#discover">Discover</a></li>
<li><a href="#engine">Engine</a></li>
<li><a href="#community">Community</a></li>
<li><a href="#organ">Organs</a></li>
<li><a href="#join">Join</a></li>
<li><a href="https://build.inference-x.com" target="_blank">SaaS</a></li>
</ul>
<div class="nav-right">
<button id="lb" onclick="cycleLang()">🇬🇧 EN</button>
<button id="tt"></button>
</div>
</nav>
<!-- ═══ HERO ═══ -->
<div id="hero">
<div class="hero-glow"></div>
<div class="hero-glow2"></div>
<div class="hero-inner">
<div class="hero-eyebrow">🌍 Built in Morocco for the world</div>
<h1 class="hero-title" id="ht">
Intelligence,<br><em>for everyone.</em><br>No permission needed.
</h1>
<p class="hero-sub" id="hsub">305KB. Runs on your phone, your laptop, your server. Free forever. No cloud, no account, no limit. The AI belongs to whoever runs it.</p>
<div class="hero-stats">
<div class="stat"><span class="stat-n">305<small style="font-size:1.2rem">KB</small></span><span class="stat-l">Entire engine</span></div>
<div class="stat"><span class="stat-n">19</span><span class="stat-l">Hardware backends</span></div>
<div class="stat"><span class="stat-n">23</span><span class="stat-l">Model formats</span></div>
<div class="stat"><span class="stat-n"></span><span class="stat-l">API calls · forever free</span></div>
<div class="stat"><span class="stat-n">$0</span><span class="stat-l">Per year · your hardware</span></div>
</div>
<div class="hero-ctas">
<a href="#discover" class="btn-primary">See how it works →</a>
<a href="https://build.inference-x.com" class="btn-secondary" target="_blank">Try the SaaS</a>
<a href="#join" class="btn-secondary">Join the builders</a>
</div>
</div>
</div>
<!-- ═══ 01 DISCOVER ═══ -->
<section id="discover">
<div class="section-tag">What is this</div>
<h2 id="disc_title">Three things to know. Nothing more.</h2>
<p class="sub" id="disc_sub">No degree required. If you have a device, you have AI.</p>
<div class="disc-cards">
<div class="disc-card reveal">
<span class="disc-icon">📦</span>
<h3 id="d1t">It's a tiny file</h3>
<p id="d1p">305 kilobytes. Smaller than a photo on your phone. This file lets your computer run AI — any AI — without the internet. Download it, run it. That's it.</p>
</div>
<div class="disc-card reveal">
<span class="disc-icon">🔒</span>
<h3 id="d2t">Your words stay yours</h3>
<p id="d2p">When you use AI online, your questions travel to a distant server. Someone can read them. With Inference-X, nothing leaves your machine. Ever.</p>
</div>
<div class="disc-card reveal">
<span class="disc-icon"></span>
<h3 id="d3t">It runs on anything</h3>
<p id="d3p">Old laptop, new phone, Raspberry Pi, datacenter. Same file. It detects your hardware and uses it. No configuration needed.</p>
</div>
</div>
</section>
<!-- ═══ 02 YOUR DEVICE ═══ -->
<section id="device">
<div class="section-tag">Your hardware</div>
<h2 id="dev_title">What can YOUR computer do?</h2>
<p class="sub" id="dev_sub">Move the slider to your RAM. See what's possible.</p>
<div class="device-widget reveal">
<div class="slider-wrap">
<div class="slider-labels">
<span>1 GB</span><span>4 GB</span><span>8 GB</span><span>16 GB</span><span>32 GB</span><span>64 GB</span><span>128+ GB</span>
</div>
<input type="range" id="ramSlider" min="1" max="128" value="8" step="1">
</div>
<p id="ramVal" style="font-family:var(--mono);font-size:.9rem;color:var(--amber);margin-bottom:1rem">RAM: <strong>8 GB</strong> — showing models that fit</p>
<div class="ram-results" id="ramResults"></div>
<p id="ram_hint" style="margin-top:1rem;font-size:.82rem;color:var(--muted)">Your AI runs locally. No internet. No account. Free forever.</p>
</div>
</section>
<!-- ═══ 03 PRIVACY ═══ -->
<section id="privacy">
<div class="section-tag">Privacy</div>
<h2 id="priv_title">Where do your words go?</h2>
<div class="priv-compare reveal">
<div class="priv-card priv-cloud">
<div class="priv-title"><span class="cloud-dot"></span><span id="priv_cloud_t">Cloud AI</span></div>
<p id="priv_cloud_p">Your question leaves your device, crosses the internet, reaches a server in another country, gets processed, stored, and analyzed. You pay per word.</p>
<div style="margin-top:1rem;font-size:.8rem;color:var(--red)">⚠ Your data · their server · their rules</div>
</div>
<div class="priv-card priv-local">
<div class="priv-title"><span class="local-dot"></span><span id="priv_local_t">Inference-X</span></div>
<p id="priv_local_p">Your question stays on your desk. The answer is computed by your own processor. Nothing leaves. Nothing is stored. You pay nothing.</p>
<div style="margin-top:1rem;font-size:.8rem;color:var(--green)">✓ Your data · your processor · your rules</div>
</div>
</div>
</section>
<!-- ═══ 04 SIZE ═══ -->
<section id="size">
<div class="section-tag">Footprint</div>
<h2 id="size_title">How small is 305 KB?</h2>
<p class="sub" id="size_sub">The entire AI engine — smaller than what you think.</p>
<div class="size-viz reveal">
<div class="size-item ix">
<div class="size-bar" style="height:35px;background:var(--copper)"></div>
<span class="size-label">Inference-X</span>
<span>305 KB</span>
</div>
<div class="size-item">
<div class="size-bar" style="height:65px;background:var(--bg3);border:1px solid var(--border)"></div>
<span>iPhone photo</span>
<span>~3 MB</span>
</div>
<div class="size-item">
<div class="size-bar" style="height:120px;background:var(--bg3);border:1px solid var(--border)"></div>
<span>Average app</span>
<span>~50 MB</span>
</div>
<div class="size-item">
<div class="size-bar" style="height:200px;background:rgba(224,80,80,.3);border:1px solid rgba(224,80,80,.4)"></div>
<span>Chrome</span>
<span>~200 MB</span>
</div>
</div>
<p id="size_note" style="font-size:.88rem;color:var(--muted);margin-top:1rem">All 19 hardware targets, all 23 formats — in less space than a single photo on your phone.</p>
</section>
<!-- ═══ 05 ENGINE ═══ -->
<section id="engine">
<div class="section-tag">The engine</div>
<h2>One binary to run them all.</h2>
<p class="sub">Written in C++. No dependencies. No runtime. No cloud. Any silicon, any OS, any AI model.</p>
<div class="hw-grid reveal">
<div class="hw-chip">CUDA<span class="hw-cat">NVIDIA GPU</span></div>
<div class="hw-chip">Metal<span class="hw-cat">Apple Silicon</span></div>
<div class="hw-chip">Vulkan<span class="hw-cat">Any GPU</span></div>
<div class="hw-chip">ROCm<span class="hw-cat">AMD GPU</span></div>
<div class="hw-chip">OpenCL<span class="hw-cat">Any GPU</span></div>
<div class="hw-chip">SYCL<span class="hw-cat">Intel GPU</span></div>
<div class="hw-chip">CPU x86<span class="hw-cat">Intel/AMD</span></div>
<div class="hw-chip">CPU ARM<span class="hw-cat">Mobile/Pi</span></div>
<div class="hw-chip">RISC-V<span class="hw-cat">Emerging</span></div>
<div class="hw-chip">WebGPU<span class="hw-cat">Browser</span></div>
<div class="hw-chip">TPU<span class="hw-cat">Google</span></div>
<div class="hw-chip">FPGA<span class="hw-cat">Custom HW</span></div>
<div class="hw-chip">Inferentia<span class="hw-cat">AWS</span></div>
<div class="hw-chip">Gaudi<span class="hw-cat">Intel</span></div>
<div class="hw-chip">Groq<span class="hw-cat">LPU</span></div>
<div class="hw-chip">Cerebras<span class="hw-cat">Wafer</span></div>
<div class="hw-chip">SambaNova<span class="hw-cat">RDU</span></div>
<div class="hw-chip">Graphcore<span class="hw-cat">IPU</span></div>
<div class="hw-chip">Custom<span class="hw-cat">+ your HW</span></div>
</div>
<div class="engine-feats reveal" style="margin-top:2rem">
<div class="feat"><div class="feat-name">Zero-Copy Inference</div>Dequantization and matrix multiply in one instruction loop. No intermediate buffer.</div>
<div class="feat"><div class="feat-name">Trillion-Parameter Native</div>Only active experts exist in memory. A 1T-parameter model runs on 64 GB RAM.</div>
<div class="feat"><div class="feat-name">Smart Precision</div>Simple questions get compressed layers. Complex reasoning gets full precision.</div>
<div class="feat"><div class="feat-name">Zero Telemetry</div>No network calls. No phone-home. Works on a plane, in a submarine, on the moon.</div>
<div class="feat"><div class="feat-name">Auto-Detect</div>Architecture, chat templates, EOS tokens — auto-detected from model metadata.</div>
<div class="feat"><div class="feat-name">Self-Configuring</div>The Makefile detects your hardware. You don't configure it — it configures itself.</div>
</div>
</section>
<!-- ═══ 06 MODELS ═══ -->
<section id="models">
<div class="section-tag">What runs on it</div>
<h2>Any GGUF model. Zero setup.</h2>
<p class="sub">Download a model from HuggingFace or Ollama. Drop it in. Run it. These are models we've benchmarked.</p>
<div class="models-grid reveal">
<div class="model-card">
<div class="model-name">LLaMA 3.2 · 1B</div>
<div class="model-desc">Quick answers. Tiny device. Lightning fast.</div>
<div class="model-meta"><span class="model-tag">1 GB RAM</span><span class="model-tag">mobile-ready</span><span class="model-tag">fast</span></div>
</div>
<div class="model-card">
<div class="model-name">Mistral · 7B</div>
<div class="model-desc">Smart conversations, code help, translations.</div>
<div class="model-meta"><span class="model-tag">5 GB RAM</span><span class="model-tag">multilingual</span></div>
</div>
<div class="model-card">
<div class="model-name">LLaMA 3.1 · 8B</div>
<div class="model-desc">Meta's compact model. Great reasoning at low cost.</div>
<div class="model-meta"><span class="model-tag">6 GB RAM</span><span class="model-tag">reasoning</span></div>
</div>
<div class="model-card">
<div class="model-name">Mistral · 22B</div>
<div class="model-desc">Creative writing, analysis, multilingual expert.</div>
<div class="model-meta"><span class="model-tag">16 GB RAM</span><span class="model-tag">creative</span></div>
</div>
<div class="model-card">
<div class="model-name">LLaMA 3.1 · 70B</div>
<div class="model-desc">Full-featured assistant. Code. Math. Logic.</div>
<div class="model-meta"><span class="model-tag">48 GB RAM</span><span class="model-tag">code</span><span class="model-tag">math</span></div>
</div>
<div class="model-card">
<div class="model-name">DeepSeek · 671B</div>
<div class="model-desc">Advanced reasoning. Expert-level answers. MoE architecture.</div>
<div class="model-meta"><span class="model-tag">64 GB RAM</span><span class="model-tag">expert</span><span class="model-tag">MoE</span></div>
</div>
<div class="model-card">
<div class="model-name">Phi-3 · 3.8B</div>
<div class="model-desc">Microsoft's small model. Punches far above its weight.</div>
<div class="model-meta"><span class="model-tag">3 GB RAM</span><span class="model-tag">efficient</span></div>
</div>
<div class="model-card">
<div class="model-name">Qwen 2.5 · 7B</div>
<div class="model-desc">Chinese-developed. Excellent for multilingual tasks.</div>
<div class="model-meta"><span class="model-tag">5 GB RAM</span><span class="model-tag">multilingual</span><span class="model-tag">code</span></div>
</div>
<div class="model-card" style="border-style:dashed;border-color:var(--muted)">
<div class="model-name" style="color:var(--muted)">+ any GGUF</div>
<div class="model-desc">Download from HuggingFace. Drop in folder. Done.</div>
<div class="model-meta"><span class="model-tag" style="color:var(--muted)">any size</span></div>
</div>
</div>
</section>
<!-- ═══ 07 COST ═══ -->
<section id="cost">
<div class="section-tag">The real cost</div>
<h2 id="cost_title">How much does AI cost?</h2>
<p class="sub" id="cost_sub">Using AI 1 hour per day, every day, for a year.</p>
<div class="cost-compare reveal">
<div class="cost-card cost-cloud-card">
<div class="section-tag" id="cost_cloud_l">Cloud API (GPT-4 class)</div>
<div class="cost-amount">$2,500+</div>
<div class="cost-sub">per year · and rising · your data = their product</div>
<div style="margin-top:1rem;font-size:.8rem;color:var(--muted)">API key required · Rate limited · Terms can change</div>
</div>
<div class="cost-card cost-local-card">
<div class="section-tag" id="cost_local_l">Inference-X (your hardware)</div>
<div class="cost-amount">$0</div>
<div class="cost-sub" id="cost_local_note">forever · electricity only · your data stays yours</div>
<div style="margin-top:1rem;font-size:.8rem;color:var(--teal)" id="cost_note">No API key. No subscription. No limit. Your hardware, your AI.</div>
</div>
</div>
</section>
<!-- ═══ 08 API ═══ -->
<section id="api">
<div class="section-tag">For developers</div>
<h2>OpenAI-compatible API</h2>
<p class="sub">Start with <code>--serve 8080</code>. Drop-in replacement. Any client library works.</p>
<div class="api-box reveal">
<span class="cmt"># Start the inference server</span><br>
<span class="kw">./inference-x</span> <span class="str">--model llama3.gguf --serve 8080</span><br><br>
<span class="cmt"># Works with any OpenAI SDK</span><br>
<span class="kw">curl</span> <span class="str">http://localhost:8080/v1/chat/completions</span> <span class="kw">-H</span> <span class="str">"Content-Type: application/json"</span> \<br>
&nbsp;&nbsp;<span class="kw">-d</span> '{"model":"llama3","messages":[{"role":"user","content":"Hello"}]}'
</div>
<div class="api-endpoints">
<span class="api-ep"><span class="ep-post">POST</span> /v1/chat/completions</span>
<span class="api-ep"><span class="ep-post">POST</span> /v1/completions</span>
<span class="api-ep"><span class="ep-get">GET</span> /v1/models</span>
<span class="api-ep"><span class="ep-get">GET</span> /health</span>
<span class="api-ep"><span class="ep-get">GET</span> /v1/embeddings</span>
</div>
</section>
<!-- ═══ 09 QUICK START ═══ -->
<section id="start">
<div class="section-tag">Get started</div>
<h2 id="start_title">Ready? Three steps.</h2>
<p class="sub" id="start_sub">Pick your system.</p>
<div class="qs-tabs">
<button class="qs-tab active" onclick="setQS('linux')">🐧 Linux</button>
<button class="qs-tab" onclick="setQS('mac')">🍎 macOS</button>
<button class="qs-tab" onclick="setQS('windows')">🪟 Windows</button>
<button class="qs-tab" onclick="setQS('pi')">🍓 Raspberry Pi</button>
</div>
<div class="qs-block active" id="qs-linux">
<div class="step"><div class="step-num">1</div><div><strong>Download the binary</strong><pre><span class="c"># x86_64 with CUDA/CPU</span>
<span class="v">curl</span> -LO <span class="s">https://git.inference-x.com/elmadani/inference-x/releases/download/v1.0/ix-linux-x64</span>
<span class="v">chmod</span> +x ix-linux-x64</pre></div></div>
<div class="step"><div class="step-num">2</div><div><strong>Get a model</strong><pre><span class="c"># Download any GGUF from HuggingFace</span>
<span class="v">wget</span> <span class="s">https://huggingface.co/bartowski/Llama-3.2-1B-Instruct-GGUF/resolve/main/Llama-3.2-1B-Instruct-Q4_K_M.gguf</span></pre></div></div>
<div class="step"><div class="step-num">3</div><div><strong>Run it</strong><pre><span class="v">./ix-linux-x64</span> --model Llama-3.2-1B-Instruct-Q4_K_M.gguf<br><span class="c"># or serve as API:</span><br><span class="v">./ix-linux-x64</span> --model Llama-3.2-1B-Instruct-Q4_K_M.gguf <span class="s">--serve 8080</span></pre></div></div>
</div>
<div class="qs-block" id="qs-mac">
<div class="step"><div class="step-num">1</div><div><strong>Download (Apple Silicon native)</strong><pre><span class="v">curl</span> -LO <span class="s">https://git.inference-x.com/elmadani/inference-x/releases/download/v1.0/ix-macos-arm64</span>
<span class="v">chmod</span> +x ix-macos-arm64</pre></div></div>
<div class="step"><div class="step-num">2</div><div><strong>Get a model</strong><pre><span class="c"># Metal GPU acceleration automatic on Apple Silicon</span>
<span class="v">wget</span> <span class="s">https://huggingface.co/bartowski/Llama-3.2-1B-Instruct-GGUF/resolve/main/Llama-3.2-1B-Instruct-Q4_K_M.gguf</span></pre></div></div>
<div class="step"><div class="step-num">3</div><div><strong>Run it</strong><pre><span class="v">./ix-macos-arm64</span> --model Llama-3.2-1B-Instruct-Q4_K_M.gguf</pre></div></div>
</div>
<div class="qs-block" id="qs-windows">
<div class="step"><div class="step-num">1</div><div><strong>Download</strong><pre><span class="c"># PowerShell</span>
<span class="v">Invoke-WebRequest</span> -Uri <span class="s">"https://git.inference-x.com/elmadani/inference-x/releases/download/v1.0/ix-windows-x64.exe"</span> -OutFile <span class="s">"ix.exe"</span></pre></div></div>
<div class="step"><div class="step-num">2</div><div><strong>Get a model</strong> — download any .gguf file from HuggingFace</div></div>
<div class="step"><div class="step-num">3</div><div><strong>Run it</strong><pre><span class="v">.\ix.exe</span> --model model.gguf</pre></div></div>
</div>
<div class="qs-block" id="qs-pi">
<div class="step"><div class="step-num">1</div><div><strong>ARM build for Raspberry Pi 4/5</strong><pre><span class="v">curl</span> -LO <span class="s">https://git.inference-x.com/elmadani/inference-x/releases/download/v1.0/ix-linux-arm64</span>
<span class="v">chmod</span> +x ix-linux-arm64</pre></div></div>
<div class="step"><div class="step-num">2</div><div><strong>Get a small model (fits in 1-4GB)</strong><pre><span class="v">wget</span> <span class="s">https://huggingface.co/bartowski/Llama-3.2-1B-Instruct-GGUF/resolve/main/Llama-3.2-1B-Instruct-Q4_K_M.gguf</span></pre></div></div>
<div class="step"><div class="step-num">3</div><div><strong>Run on Pi</strong><pre><span class="v">./ix-linux-arm64</span> --model Llama-3.2-1B-Instruct-Q4_K_M.gguf<br><span class="c"># Pi 4 4GB: runs 1B models at ~8 tok/s</span></pre></div></div>
</div>
</section>
<!-- ═══ 10 COMMUNITY TOOLS ═══ -->
<section id="community">
<div class="section-tag">Community</div>
<h2>The tools we built together.</h2>
<p class="sub">Inference-X is the core. Around it, the community builds the ecosystem. Here's what exists today — more is being forged every day.</p>
<div class="tools-grid reveal">
<a class="tool-card" href="https://git.inference-x.com/elmadani/inference-x" target="_blank">
<span class="tool-badge badge-live">LIVE</span>
<div class="tool-icon"></div>
<div class="tool-name">IX Engine</div>
<div class="tool-desc">The core. 228KB C++ binary. 19 backends. Zero dependencies. The foundation everything runs on.</div>
</a>
<a class="tool-card" href="https://build.inference-x.com" target="_blank">
<span class="tool-badge badge-live">LIVE</span>
<div class="tool-icon">🛠</div>
<div class="tool-name">Community SaaS</div>
<div class="tool-desc">Cloud playground. Deploy models, test APIs, share with others. No installation. Donation-powered.</div>
</a>
<div class="tool-card">
<span class="tool-badge badge-live">LIVE</span>
<div class="tool-icon">📡</div>
<div class="tool-name">Hardware Scout</div>
<div class="tool-desc">See every IX node running globally. Real-time compute map. Who runs what, how fast.</div>
</div>
<div class="tool-card">
<span class="tool-badge badge-build">BUILDING</span>
<div class="tool-icon">🫀</div>
<div class="tool-name">Organ Store</div>
<div class="tool-desc">Extract, share and transplant AI model components. Attention heads, FFN layers, expert blocks. The future of open AI.</div>
</div>
<div class="tool-card">
<span class="tool-badge badge-build">BUILDING</span>
<div class="tool-icon">🔬</div>
<div class="tool-name">Organ Architect</div>
<div class="tool-desc">Analyze model internals. Visualize layers, heads, topology. Like an MRI for AI models.</div>
</div>
<div class="tool-card">
<span class="tool-badge badge-build">BUILDING</span>
<div class="tool-icon">🔥</div>
<div class="tool-name">The Forge</div>
<div class="tool-desc">Community fine-tuning platform. Contribute training data, improve models, share results. Collective intelligence.</div>
</div>
<div class="tool-card">
<span class="tool-badge badge-coming">COMING</span>
<div class="tool-icon">🎙</div>
<div class="tool-name">GhostVoice</div>
<div class="tool-desc">Neural voice synthesis. Clone, create, share voice models. Same philosophy: local, private, yours.</div>
</div>
<div class="tool-card">
<span class="tool-badge badge-coming">COMING</span>
<div class="tool-icon">🌐</div>
<div class="tool-name">Echo Relay</div>
<div class="tool-desc">Federated inference network. Your idle hardware earns you compute credits. The khettara for AI power.</div>
</div>
</div>
</section>
<!-- ═══ 11 ORGAN ═══ -->
<section id="organ">
<div class="section-tag">The future</div>
<h2>AI organ transplants.</h2>
<p class="sub">Neural networks have anatomy. Layers. Attention heads. Expert blocks. We built tools to extract them, study them, and transplant them between models. The community will fill the store.</p>
<div class="organ-visual reveal">
<div class="organ-node glow" title="Source model">🧠</div>
<div class="organ-arrow"></div>
<div class="organ-node" title="Extract organ" style="font-size:1.2rem">⚙️<br><small style="font-size:.5rem">extract</small></div>
<div class="organ-arrow"></div>
<div class="organ-node" style="border-color:var(--amber);font-size:1.2rem" title="Organ">🫀</div>
<div class="organ-arrow"></div>
<div class="organ-node" title="Transplant" style="font-size:1.2rem">💉<br><small style="font-size:.5rem">transplant</small></div>
<div class="organ-arrow"></div>
<div class="organ-node glow" title="Enhanced model">🧬</div>
</div>
<div class="organ-desc">
<strong>Vision:</strong> A community marketplace where builders extract specialized capabilities from models — multilingual reasoning, code completion, visual understanding — and share them as components others can transplant. The Organ Store doesn't exist yet. The community will build it.
</div>
<div style="display:flex;gap:1rem;justify-content:center;margin-top:2rem;flex-wrap:wrap">
<div style="text-align:center;padding:1.2rem;background:var(--card);border:1px solid var(--border);border-radius:.7rem;min-width:140px">
<div style="font-size:1.5rem;margin-bottom:.4rem">🔍</div>
<div style="font-weight:800;font-size:.85rem">Analyze</div>
<div style="font-size:.75rem;color:var(--muted)">Map model internals</div>
</div>
<div style="text-align:center;padding:1.2rem;background:var(--card);border:1px solid var(--border);border-radius:.7rem;min-width:140px">
<div style="font-size:1.5rem;margin-bottom:.4rem">⚗️</div>
<div style="font-weight:800;font-size:.85rem">Extract</div>
<div style="font-size:.75rem;color:var(--muted)">Isolate components</div>
</div>
<div style="text-align:center;padding:1.2rem;background:var(--card);border:1px solid var(--border);border-radius:.7rem;min-width:140px">
<div style="font-size:1.5rem;margin-bottom:.4rem">📦</div>
<div style="font-weight:800;font-size:.85rem">Publish</div>
<div style="font-size:.75rem;color:var(--muted)">Share to the store</div>
</div>
<div style="text-align:center;padding:1.2rem;background:var(--card);border:1px solid var(--border);border-radius:.7rem;min-width:140px">
<div style="font-size:1.5rem;margin-bottom:.4rem">💉</div>
<div style="font-weight:800;font-size:.85rem">Transplant</div>
<div style="font-size:.75rem;color:var(--muted)">Enhance any model</div>
</div>
</div>
</section>
<!-- ═══ 12 KHETTARA ═══ -->
<section id="khettara" class="full" style="padding:5rem 1.5rem;background:linear-gradient(180deg,transparent,rgba(201,98,42,.04),transparent)">
<div style="max-width:1100px;margin:0 auto">
<div class="section-tag">The vision</div>
<div class="khettara-quote reveal">
"In the Moroccan desert, ancient builders carved underground canals — khettaras — that deliver water to entire villages using only gravity. No pump. No electricity. No central authority. They've worked for centuries. Inference-X is a khettara for intelligence: built by many, maintained by many, flowing to anyone who needs it."
</div>
<p style="color:var(--muted);font-size:.9rem;margin-top:1.5rem;max-width:600px">Inference-X has no enemies. Every researcher, every company, every government that processes AI is playing a role. We're not competing — we're building the infrastructure that makes all of it accessible to everyone who was left out.</p>
</div>
</section>
<!-- ═══ 13 HARDWARE SCOUT ═══ -->
<section id="scout">
<div class="section-tag">Community hardware</div>
<h2>Every IX node on Earth. Live.</h2>
<p class="sub">When you run Inference-X, you can optionally report your hardware telemetry. This is the network. Anonymous. Voluntary. Real.</p>
<table class="scout-table reveal">
<thead>
<tr>
<th>Backend</th>
<th>Nodes</th>
<th>Avg tok/s</th>
<th>Avg load</th>
<th>Status</th>
</tr>
</thead>
<tbody id="scoutBody">
<tr><td colspan="5" style="color:var(--muted);text-align:center;font-size:.82rem"><span class="live-dot"></span>Loading community hardware data...</td></tr>
</tbody>
</table>
</section>
<!-- ═══ 14 PRICING ═══ -->
<section id="pricing">
<div class="section-tag">License</div>
<h2>Free for those who need it. Fair for those who profit.</h2>
<p class="sub">No tricks. No hidden limits. The engine is the same everywhere.</p>
<div class="pricing-grid reveal">
<div class="pricing-card featured">
<div class="pricing-tier">Free Forever</div>
<div class="pricing-price free">$0</div>
<div class="pricing-who">Individuals, researchers, students, open-source projects, startups under $1M revenue. No registration. No expiry. No limits. This is the default.</div>
<div style="font-size:.8rem;color:var(--teal);margin-top:.5rem">✓ Full engine · All backends · All models</div>
</div>
<div class="pricing-card">
<div class="pricing-tier">Commercial Fair</div>
<div class="pricing-price fair">20% rev</div>
<div class="pricing-who">Companies with $1M+ annual revenue using IX in production. 20% of revenue attributed to IX-powered features goes to the community fund. Transparent. Auditable.</div>
<div style="font-size:.8rem;color:var(--amber);margin-top:.5rem">80% flows to community builders</div>
</div>
<div class="pricing-card">
<div class="pricing-tier">Industrial Embed</div>
<div class="pricing-price ind">Custom</div>
<div class="pricing-who">Hardware manufacturers embedding IX in products. Custom licensing for bulk distribution, signed binaries, hardware co-optimization. Contact us.</div>
<div style="font-size:.8rem;color:var(--copper);margin-top:.5rem">Redistribute · Co-brand · Optimize</div>
</div>
</div>
</section>
<!-- ═══ 15 JOIN / CRATONS ═══ -->
<section id="join">
<div class="section-tag">Join the builders</div>
<h2>11 seats. One per craton.</h2>
<p class="sub">The governance of Inference-X is anchored in geology. 11 ancient continental cratons — the most stable structures on Earth — give their names to 11 permanent Core Team seats. One per major civilization region. Designed to last as long as the rocks.</p>
<div class="cratons-grid reveal">
<div class="craton-card active">
<div class="craton-age">2.7 Ga · Africa</div>
<div class="craton-name">🪨 Anti-Atlas</div>
<div class="craton-region">Morocco · North Africa</div>
<div class="craton-status">⚒ Founder — Elmadani Salka</div>
</div>
<div class="craton-card"><div class="craton-age">3.6 Ga · Africa</div><div class="craton-name">💎 Kaapvaal</div><div class="craton-region">South Africa, Botswana</div><div class="craton-status"><a href="mailto:Elmadani.SALKA@proton.me?subject=Kaapvaal Craton" style="color:var(--teal);text-decoration:none">Apply →</a></div></div>
<div class="craton-card"><div class="craton-age">2.9 Ga · Africa</div><div class="craton-name">🌍 West African</div><div class="craton-region">Ghana, Senegal, Mali</div><div class="craton-status"><a href="mailto:Elmadani.SALKA@proton.me?subject=West African Craton" style="color:var(--teal);text-decoration:none">Apply →</a></div></div>
<div class="craton-card"><div class="craton-age">2.8 Ga · Africa</div><div class="craton-name">🌿 Congo</div><div class="craton-region">DRC, Republic of Congo</div><div class="craton-status"><a href="mailto:Elmadani.SALKA@proton.me?subject=Congo Craton" style="color:var(--teal);text-decoration:none">Apply →</a></div></div>
<div class="craton-card"><div class="craton-age">3.1 Ga · Americas</div><div class="craton-name">🍁 Superior</div><div class="craton-region">Canada, North America</div><div class="craton-status"><a href="mailto:Elmadani.SALKA@proton.me?subject=Superior Craton" style="color:var(--teal);text-decoration:none">Apply →</a></div></div>
<div class="craton-card"><div class="craton-age">2.5 Ga · Americas</div><div class="craton-name">🌳 Amazon</div><div class="craton-region">Brazil, South America</div><div class="craton-status"><a href="mailto:Elmadani.SALKA@proton.me?subject=Amazon Craton" style="color:var(--teal);text-decoration:none">Apply →</a></div></div>
<div class="craton-card"><div class="craton-age">3.1 Ga · Europe</div><div class="craton-name">🌊 Baltica</div><div class="craton-region">Scandinavia, Eastern Europe</div><div class="craton-status"><a href="mailto:Elmadani.SALKA@proton.me?subject=Baltica Craton" style="color:var(--teal);text-decoration:none">Apply →</a></div></div>
<div class="craton-card"><div class="craton-age">3.0 Ga · Asia</div><div class="craton-name">🌲 Siberian</div><div class="craton-region">Russia, Central Asia</div><div class="craton-status"><a href="mailto:Elmadani.SALKA@proton.me?subject=Siberian Craton" style="color:var(--teal);text-decoration:none">Apply →</a></div></div>
<div class="craton-card"><div class="craton-age">3.8 Ga · Asia</div><div class="craton-name">🏮 North China</div><div class="craton-region">China, East Asia</div><div class="craton-status"><a href="mailto:Elmadani.SALKA@proton.me?subject=North China Craton" style="color:var(--teal);text-decoration:none">Apply →</a></div></div>
<div class="craton-card"><div class="craton-age">3.0 Ga · Asia</div><div class="craton-name">🪷 Dharwar</div><div class="craton-region">India, South Asia</div><div class="craton-status"><a href="mailto:Elmadani.SALKA@proton.me?subject=Dharwar Craton" style="color:var(--teal);text-decoration:none">Apply →</a></div></div>
<div class="craton-card"><div class="craton-age">3.5 Ga · Oceania</div><div class="craton-name">🦘 Pilbara</div><div class="craton-region">Australia, Oceania</div><div class="craton-status"><a href="mailto:Elmadani.SALKA@proton.me?subject=Pilbara Craton" style="color:var(--teal);text-decoration:none">Apply →</a></div></div>
</div>
<div style="margin-top:2rem;padding:1.5rem;background:var(--card);border:1px solid var(--border);border-radius:.8rem;display:flex;flex-wrap:wrap;gap:1.5rem;align-items:center">
<div style="flex:1;min-width:200px">
<div style="font-weight:800;margin-bottom:.4rem">What craton leaders do</div>
<div style="font-size:.85rem;color:var(--muted)">Represent their region in project decisions. Connect local builders. Translate and adapt for local communities. No salary — compensation is access, visibility, and history.</div>
</div>
<a href="mailto:Elmadani.SALKA@proton.me?subject=Craton Application&body=Craton I want to represent: %0AWhy I fit: %0AMy background: " class="btn-primary">Apply for your craton →</a>
</div>
</section>
<!-- ═══ 16 DONATE ═══ -->
<section id="donate" style="max-width:700px;margin:0 auto;padding:5rem 1.5rem">
<div class="section-tag">Support the infrastructure</div>
<h2>Keep the khettara flowing.</h2>
<p class="sub">Inference-X costs about €53/month to keep running for the world. Everything is public. Surplus goes to community contributors.</p>
<div class="donate-widget reveal">
<div class="costs-breakdown">
<div class="cost-line"><span>Infomaniak VPS (site)</span><span>€15/mo</span></div>
<div class="cost-line"><span>Domains</span><span>€4/mo</span></div>
<div class="cost-line"><span>OneCloud Compute</span><span>€20/mo</span></div>
<div class="cost-line"><span>Hetzner Compute</span><span>€12/mo</span></div>
<div class="cost-line"><span>Backup Storage</span><span>€2/mo</span></div>
<div class="cost-line cost-total"><span>Total / month</span><span id="costsLive">€53/mo</span></div>
</div>
<div class="donate-amounts">
<button class="donate-btn" onclick="setDonation(5)">€5</button>
<button class="donate-btn active" onclick="setDonation(10)">€10</button>
<button class="donate-btn" onclick="setDonation(20)">€20</button>
<button class="donate-btn" onclick="setDonation(50)">€50</button>
<button class="donate-btn" onclick="setDonation(100)">€100</button>
</div>
<a id="donateLink" href="https://paypal.me/elmadanisalka/10" target="_blank" class="btn-primary" style="width:100%;justify-content:center;font-size:1rem">Donate €10 via PayPal →</a>
<p style="font-size:.75rem;color:var(--muted);text-align:center;margin-top:.8rem">No account needed. Quarterly transparency report published. Surplus → community contributors.</p>
</div>
</section>
<!-- FOOTER -->
<footer>
<div class="footer-inner">
<div>
<div class="footer-brand">Inference-X</div>
<p class="footer-tagline" id="footcopy">Built in Morocco for the world.<br>Intelligence flows where gravity takes it.</p>
<div style="margin-top:1rem;display:flex;gap:.8rem;flex-wrap:wrap">
<a href="https://git.inference-x.com/elmadani/inference-x" class="btn-secondary" style="font-size:.78rem;padding:.4rem .8rem">GitHub</a>
<a href="https://build.inference-x.com" class="btn-secondary" style="font-size:.78rem;padding:.4rem .8rem">SaaS</a>
<a href="https://git.inference-x.com" class="btn-secondary" style="font-size:.78rem;padding:.4rem .8rem">Gitea</a>
</div>
</div>
<div class="footer-col">
<h4>Engine</h4>
<a href="#discover">How it works</a>
<a href="#device">Your device</a>
<a href="#engine">Backends</a>
<a href="#models">Models</a>
<a href="#api">API docs</a>
</div>
<div class="footer-col">
<h4>Community</h4>
<a href="#community">Tools</a>
<a href="#organ">Organ Store</a>
<a href="#scout">Hardware Scout</a>
<a href="#join">11 Cratons</a>
<a href="#donate">Donate</a>
</div>
<div class="footer-col">
<h4>Legal</h4>
<a href="https://git.inference-x.com/elmadani/inference-x/src/branch/master/LICENSE" target="_blank">SALKA-IX License</a>
<a href="https://git.inference-x.com/elmadani/inference-x" target="_blank">Source Code</a>
<a href="mailto:Elmadani.SALKA@proton.me">Contact</a>
<a href="#pricing">Pricing</a>
</div>
</div>
<div class="footer-bottom">
<span>© 20252026 SALKA HOLDING SA (forming, Zug CH) · Elmadani Salka · SALKA-IX License v1.0</span>
<span>🇲🇦 Morocco → 🌍 World</span>
</div>
</footer>
<script>
// ═══ THEME ═══
function setTheme(t){document.documentElement.dataset.theme=t;document.getElementById('tt').textContent=t==='dark'?'☀':'☾';try{localStorage.setItem('ix-t',t)}catch(e){}}
document.getElementById('tt').onclick=function(){setTheme(document.documentElement.dataset.theme==='dark'?'light':'dark')};
try{var st=localStorage.getItem('ix-t');if(st)setTheme(st);else if(window.matchMedia('(prefers-color-scheme:light)').matches)setTheme('light')}catch(e){}
// ═══ i18n ═══
var LANGS=['en','fr','ar','es','de','zh','hi','pt','sw','ru','tr','ja','ko','nl','it','pl'];
var LNAMES={en:'EN 🇬🇧',fr:'FR 🇫🇷',ar:'AR 🇲🇦',es:'ES 🇪🇸',de:'DE 🇩🇪',zh:'ZH 🇨🇳',hi:'HI 🇮🇳',pt:'PT 🇧🇷',sw:'SW 🇰🇪',ru:'RU 🇷🇺',tr:'TR 🇹🇷',ja:'JA 🇯🇵',ko:'KO 🇰🇷',nl:'NL 🇳🇱',it:'IT 🇮🇹',pl:'PL 🇵🇱'};
var curLang=0;
var TRANS={
en:{disc_title:"Three things to know. Nothing more.",disc_sub:"No degree required. If you have a device, you have AI.",d1t:"It's a tiny file",d1p:"305 kilobytes. Smaller than a photo on your phone. This file lets your computer run AI — any AI — without the internet. Download it, run it. That's it.",d2t:"Your words stay yours",d2p:"When you use AI online, your questions travel to a distant server. Someone can read them. With Inference-X, nothing leaves your machine. Ever.",d3t:"It runs on anything",d3p:"Old laptop, new phone, Raspberry Pi, datacenter. Same file. It detects your hardware and uses it. No configuration needed.",dev_title:"What can YOUR computer do?",dev_sub:"Move the slider to your RAM. See what's possible.",ram_hint:"Your AI runs locally. No internet. No account. Free forever.",priv_title:"Where do your words go?",priv_cloud_t:"Cloud AI",priv_cloud_p:"Your question leaves your device, crosses the internet, reaches a server in another country, gets processed, stored, and analyzed. You pay per word.",priv_local_t:"Inference-X",priv_local_p:"Your question stays on your desk. The answer is computed by your own processor. Nothing leaves. Nothing is stored. You pay nothing.",size_title:"How small is 305 KB?",size_sub:"The entire AI engine — smaller than what you think.",cost_title:"How much does AI cost?",cost_sub:"Using AI 1 hour per day, every day, for a year.",cost_cloud_l:"Cloud API (GPT-4 class)",cost_local_l:"Inference-X (your hardware)",cost_local_note:"forever · electricity only · your data stays yours",cost_note:"No API key. No subscription. No limit. Your hardware, your AI.",start_title:"Ready? Three steps.",start_sub:"Pick your system.",footcopy:"Built in Morocco for the world.\nIntelligence flows where gravity takes it."},
fr:{disc_title:"Trois choses à savoir. Pas plus.",disc_sub:"Pas de diplôme requis. Si tu as un appareil, tu as l'IA.",d1t:"C'est un tout petit fichier",d1p:"305 kilo-octets. Plus petit qu'une photo sur ton téléphone. Ce fichier permet à ton ordinateur de faire tourner l'IA — n'importe laquelle — sans internet.",d2t:"Tes mots restent les tiens",d2p:"Quand tu utilises l'IA en ligne, tes questions voyagent vers un serveur lointain. Quelqu'un peut les lire. Avec Inference-X, rien ne sort de ta machine. Jamais.",d3t:"Ça tourne sur tout",d3p:"Vieux portable, téléphone récent, Raspberry Pi, datacenter. Même fichier. Il détecte ton matériel et l'utilise. Aucune configuration.",dev_title:"Que peut faire TON ordinateur ?",dev_sub:"Déplace le curseur sur ta RAM. Vois ce qui est possible.",ram_hint:"Ton IA tourne en local. Pas d'internet. Pas de compte. Gratuit pour toujours.",priv_title:"Où vont tes mots ?",priv_cloud_t:"IA Cloud",priv_cloud_p:"Ta question quitte ton appareil, traverse internet, arrive sur un serveur dans un autre pays. On peut la lire. Tu paies au mot.",priv_local_t:"Inference-X",priv_local_p:"Ta question reste sur ton bureau. La réponse est calculée par ton propre processeur. Rien ne sort. Tu ne paies rien.",size_title:"305 Ko, c'est si petit ?",size_sub:"Le moteur IA entier — plus petit que tu ne crois.",cost_title:"Combien coûte l'IA ?",cost_sub:"1h par jour, tous les jours, pendant un an.",cost_cloud_l:"API Cloud (classe GPT-4)",cost_local_l:"Inference-X (ton matériel)",cost_local_note:"pour toujours · électricité seulement",cost_note:"Pas de clé API. Pas d'abonnement. Pas de limite. Ton matériel, ton IA.",start_title:"Prêt ? Trois étapes.",start_sub:"Choisis ton système.",footcopy:"Construit au Maroc pour le monde.\nL'intelligence coule là où la gravité la mène."},
ar:{disc_title:"ثلاثة أشياء. لا أكثر.",disc_sub:"لا شهادة مطلوبة. إذا لديك جهاز، لديك ذكاء اصطناعي.",d1t:"ملف صغير جداً",d1p:"305 كيلوبايت. أصغر من صورة على هاتفك. هذا الملف يجعل حاسوبك يشغّل الذكاء الاصطناعي بدون إنترنت.",d2t:"كلماتك ملكك",d2p:"عند استخدام الذكاء الاصطناعي عبر الإنترنت، أسئلتك تسافر لخادم بعيد. مع Inference-X، لا شيء يغادر جهازك.",d3t:"يعمل على أي جهاز",d3p:"حاسوب قديم، هاتف جديد، Raspberry Pi. نفس الملف. يكتشف عتادك ويستخدمه.",dev_title:"ماذا يستطيع حاسوبك؟",dev_sub:"حرّك المؤشر لذاكرتك. انظر ما المتاح.",ram_hint:"ذكاؤك يعمل محلياً. بلا إنترنت. بلا حساب. مجاني للأبد.",priv_title:"أين تذهب كلماتك؟",priv_cloud_t:"ذكاء سحابي",priv_cloud_p:"سؤالك يغادر جهازك، يعبر الإنترنت، يصل خادماً في بلد آخر. تدفع لكل كلمة.",priv_local_t:"Inference-X",priv_local_p:"سؤالك يبقى على مكتبك. الجواب يحسبه معالجك. لا شيء يخرج. لا تدفع شيئاً.",size_title:"كم صغير 305 كيلوبايت؟",size_sub:"المحرك بالكامل — أصغر مما تظن.",cost_title:"كم يكلف الذكاء الاصطناعي؟",cost_sub:"ساعة يومياً، كل يوم، لمدة سنة.",cost_cloud_l:"واجهة سحابية (فئة GPT-4)",cost_local_l:"Inference-X (عتادك)",cost_local_note:"للأبد · كهرباء فقط",cost_note:"بلا مفتاح API. بلا اشتراك. بلا حدود.",start_title:"مستعد؟ ثلاث خطوات.",start_sub:"اختر نظامك.",footcopy:"صُنع في المغرب للعالم.\nالذكاء يتدفق حيث تأخذه الجاذبية."},
es:{disc_title:"Tres cosas. Nada más.",disc_sub:"Sin título requerido. Si tienes un dispositivo, tienes IA.",d1t:"Es un archivo diminuto",d1p:"305 kilobytes. Más pequeño que una foto de tu móvil. Este archivo permite que tu ordenador ejecute IA sin internet.",d2t:"Tus palabras son tuyas",d2p:"Con la IA en la nube, tus preguntas van a un servidor lejano. Con Inference-X, nada sale de tu máquina.",d3t:"Funciona en cualquier cosa",d3p:"Portátil viejo, teléfono nuevo, Raspberry Pi, datacenter. Mismo archivo. Sin configuración.",dev_title:"¿Qué puede hacer TU ordenador?",dev_sub:"Mueve el control a tu RAM.",ram_hint:"Tu IA corre localmente. Sin internet. Sin cuenta. Gratis para siempre.",priv_title:"¿Dónde van tus palabras?",priv_cloud_t:"IA en la nube",priv_cloud_p:"Tu pregunta viaja a un servidor lejano. Alguien puede leerla. Pagas por palabra.",priv_local_t:"Inference-X",priv_local_p:"Tu pregunta se queda en tu escritorio. La respuesta la calcula tu propio procesador. No pagas nada.",size_title:"¿Cuán pequeño es 305 KB?",size_sub:"El motor completo, más pequeño de lo que crees.",cost_title:"¿Cuánto cuesta la IA?",cost_sub:"1 hora al día, todos los días, durante un año.",cost_cloud_l:"API Cloud (clase GPT-4)",cost_local_l:"Inference-X (tu hardware)",cost_local_note:"para siempre · solo electricidad",cost_note:"Sin API key. Sin suscripción. Sin límites.",start_title:"¿Listo? Tres pasos.",start_sub:"Elige tu sistema.",footcopy:"Construido en Marruecos para el mundo."},
de:{disc_title:"Drei Dinge. Nicht mehr.",disc_sub:"Kein Studium nötig. Mit einem Gerät hast du KI.",d1t:"Eine winzige Datei",d1p:"305 Kilobyte. Kleiner als ein Foto. Diese Datei lässt deinen Computer KI ausführen — ohne Internet.",d2t:"Deine Worte bleiben deine",d2p:"Online-KI sendet deine Fragen an fremde Server. Mit Inference-X verlässt nichts deinen Rechner.",d3t:"Läuft auf allem",d3p:"Alter Laptop, neues Handy, Raspberry Pi, Rechenzentrum. Gleiche Datei. Keine Konfiguration.",dev_title:"Was kann DEIN Computer?",dev_sub:"Bewege den Regler auf deinen RAM.",ram_hint:"Deine KI läuft lokal. Kein Internet. Kein Konto. Für immer kostenlos.",priv_title:"Wohin gehen deine Worte?",priv_cloud_t:"Cloud-KI",priv_cloud_p:"Deine Frage reist zu einem fernen Server. Jemand kann sie lesen. Du zahlst pro Wort.",priv_local_t:"Inference-X",priv_local_p:"Deine Frage bleibt auf deinem Schreibtisch. Die Antwort berechnet dein eigener Prozessor. Du zahlst nichts.",size_title:"Wie klein sind 305 KB?",size_sub:"Die komplette KI-Engine — kleiner als du denkst.",cost_title:"Was kostet KI wirklich?",cost_sub:"1 Stunde täglich, jeden Tag, ein Jahr lang.",cost_cloud_l:"Cloud API (GPT-4-Klasse)",cost_local_l:"Inference-X (deine Hardware)",cost_local_note:"für immer · nur Strom",cost_note:"Kein API-Key. Kein Abo. Kein Limit.",start_title:"Bereit? Drei Schritte.",start_sub:"Wähle dein System.",footcopy:"Gebaut in Marokko für die Welt."},
zh:{disc_title:"三件事。仅此而已。",disc_sub:"无需学位。有设备就有AI。",d1t:"这只是一个小文件",d1p:"305 KB。比手机上的照片还小。这个文件让你的电脑在没有网络的情况下运行AI。",d2t:"你的话语属于你",d2p:"使用在线AI时你的问题会传到遥远的服务器。有人可以读取它们。用Inference-X没有任何东西离开你的设备。",d3t:"可在任何硬件上运行",d3p:"旧电脑、新手机、树莓派、数据中心。同一个文件。无需配置。",dev_title:"你的电脑能做什么?",dev_sub:"拖动滑块到你的内存大小。",ram_hint:"你的AI在本地运行。无网络。无账号。永久免费。",priv_title:"你的话语去了哪里?",priv_cloud_t:"云AI",priv_cloud_p:"你的问题离开设备,穿越互联网,到达另一个国家的服务器,被处理、储存和分析。你按字付费。",priv_local_t:"Inference-X",priv_local_p:"你的问题留在你的桌上。答案由你自己的处理器计算。什么都没有离开。你不付任何费用。",size_title:"305 KB有多小",size_sub:"整个AI引擎比你想象的还要小。",cost_title:"AI到底花多少钱",cost_sub:"每天1小时每天一整年。",cost_cloud_l:"云APIGPT-4级别",cost_local_l:"Inference-X你的硬件",cost_local_note:"永久 · 仅电费",cost_note:"无需API密钥。无订阅。无限制。",start_title:"准备好了?三个步骤。",start_sub:"选择你的系统。",footcopy:"在摩洛哥为世界而建。"},
hi:{disc_title:"तीन बातें। बस इतना।",disc_sub:"कोई डिग्री नहीं चाहिए। अगर डिवाइस है तो AI है।",d1t:"यह एक छोटी सी फ़ाइल है",d1p:"305 किलोबाइट। आपके फ़ोन की तस्वीर से भी छोटा। यह फ़ाइल आपके कंप्यूटर को बिना इंटरनेट के AI चलाने देती है।",d2t:"आपके शब्द आपके हैं",d2p:"ऑनलाइन AI आपके सवाल दूर के सर्वर पर भेजता है। Inference-X के साथ, कुछ भी आपकी मशीन नहीं छोड़ता।",d3t:"हर डिवाइस पर चलता है",d3p:"पुराना लैपटॉप, नया फ़ोन, Raspberry Pi। एक ही फ़ाइल। कोई सेटअप नहीं।",dev_title:"आपका कंप्यूटर क्या कर सकता है?",dev_sub:"स्लाइडर को अपनी RAM पर ले जाएं।",ram_hint:"आपका AI लोकल चलता है। इंटरनेट नहीं। अकाउंट नहीं। हमेशा के लिए मुफ़्त।",priv_title:"आपके शब्द कहाँ जाते हैं?",priv_cloud_t:"क्लाउड AI",priv_cloud_p:"आपका सवाल इंटरनेट पार करके दूसरे देश के सर्वर पर जाता है। कोई पढ़ सकता है। आप प्रति शब्द भुगतान करते हैं।",priv_local_t:"Inference-X",priv_local_p:"आपका सवाल आपकी मेज पर रहता है। जवाब आपका प्रोसेसर देता है। कुछ नहीं जाता। कुछ नहीं देते।",size_title:"305 KB कितना छोटा है?",size_sub:"पूरा AI इंजन — सोच से भी छोटा।",cost_title:"AI की असली कीमत?",cost_sub:"रोज 1 घंटे, पूरे साल।",cost_cloud_l:"क्लाउड API (GPT-4 श्रेणी)",cost_local_l:"Inference-X (आपका हार्डवेयर)",cost_local_note:"हमेशा के लिए · सिर्फ बिजली",cost_note:"कोई API key नहीं। कोई सब्सक्रिप्शन नहीं।",start_title:"तैयार? तीन कदम।",start_sub:"अपना सिस्टम चुनें।",footcopy:"मोरक्को में दुनिया के लिए बनाया गया।"}
};
function applyLang(l){
var t=TRANS[l]||TRANS['en'];
for(var k in t){var el=document.getElementById(k);if(el){if(k==='footcopy')el.innerHTML=t[k].replace('\n','<br>');else el.textContent=t[k];}}
document.documentElement.lang=l;
// RTL for Arabic
document.body.dir=(l==='ar')?'rtl':'ltr';
}
function cycleLang(){
curLang=(curLang+1)%LANGS.length;
var l=LANGS[curLang];
document.getElementById('lb').textContent=LNAMES[l]||l.toUpperCase();
applyLang(l);
}
// Auto-detect language
try{
var bl=navigator.language.split('-')[0];
var idx=LANGS.indexOf(bl);
if(idx>0){curLang=idx-1;cycleLang();}
}catch(e){}
// ═══ RAM SLIDER ═══
var MODELS=[
{min:1,max:3,icon:'⚡',name:'LLaMA 3.2 1B',size:'~1 GB',speed:'~15 tok/s',cat:'Chat · Fast'},
{min:2,max:5,icon:'🧠',name:'Phi-3 Mini 3.8B',size:'~2.5 GB',speed:'~12 tok/s',cat:'Smart · Efficient'},
{min:4,max:8,icon:'🌐',name:'Mistral 7B',size:'~5 GB',speed:'~8 tok/s',cat:'Multilingual · Code'},
{min:6,max:10,icon:'🔬',name:'LLaMA 3.1 8B',size:'~6 GB',speed:'~7 tok/s',cat:'Reasoning · General'},
{min:10,max:20,icon:'✨',name:'Qwen 2.5 14B',size:'~9 GB',speed:'~5 tok/s',cat:'Analysis · Multilingual'},
{min:14,max:24,icon:'🎯',name:'Mistral 22B',size:'~14 GB',speed:'~3 tok/s',cat:'Expert · Creative'},
{min:24,max:56,icon:'🚀',name:'LLaMA 3.1 70B',size:'~45 GB',speed:'~1.5 tok/s',cat:'Professional · Math'},
{min:56,max:200,icon:'🌟',name:'DeepSeek V3 671B',size:'~60 GB (MoE)',speed:'~0.8 tok/s',cat:'Expert · Research'}
];
function updateRAM(){
var ram=parseInt(document.getElementById('ramSlider').value);
document.getElementById('ramVal').innerHTML='RAM: <strong>'+ram+' GB</strong> — models that fit on your device';
var html='';
var shown=0;
MODELS.forEach(function(m){
var fits=ram>=m.min;
if(fits||ram>=m.min-2){
html+='<div class="ram-item'+(fits?' active':'') +'">';
html+='<span class="ri-icon">'+m.icon+'</span>';
html+='<span class="ri-name">'+m.name+'</span>';
html+='<span class="ri-size">'+m.size+'</span>';
if(fits)html+='<span class="ri-speed">'+m.speed+'</span>';
else html+='<span class="ri-speed" style="color:var(--red)">Need '+m.min+'GB</span>';
html+='</div>';
shown++;
}
});
if(shown===0)html='<div style="color:var(--muted);font-size:.85rem;padding:.8rem">Move slider to see what models fit →</div>';
document.getElementById('ramResults').innerHTML=html;
}
document.getElementById('ramSlider').oninput=updateRAM;
updateRAM();
// ═══ QUICK START TABS ═══
function setQS(os){
document.querySelectorAll('.qs-tab').forEach(function(t){t.classList.remove('active')});
document.querySelectorAll('.qs-block').forEach(function(b){b.classList.remove('active')});
event.target.classList.add('active');
document.getElementById('qs-'+os).classList.add('active');
}
// ═══ DONATION ═══
function setDonation(amt){
document.querySelectorAll('.donate-btn').forEach(function(b){b.classList.remove('active')});
event.target.classList.add('active');
document.getElementById('donateLink').href='https://paypal.me/elmadanisalka/'+amt;
document.getElementById('donateLink').textContent='Donate €'+amt+' via PayPal →';
}
// ═══ HARDWARE SCOUT ═══
function loadScout(){
fetch('/api/community/scout').then(function(r){return r.json()}).then(function(d){
var b=d.backends||{};
var rows='';
if(Object.keys(b).length===0){
rows='<tr><td colspan="5" style="color:var(--muted);text-align:center;font-size:.82rem;padding:2rem">No nodes reporting yet. Run IX with <code>--scout</code> to join.</td></tr>';
}else{
for(var bk in b){
var bd=b[bk];
var load=Math.round((bd.avg_load_pct||0));
rows+='<tr>';
rows+='<td><span class="backend-chip">'+bk+'</span></td>';
rows+='<td>'+(bd.node_count||0)+'</td>';
rows+='<td>'+(bd.avg_tokens_per_sec||0).toFixed(1)+'</td>';
rows+='<td><div class="load-bar"><div class="load-fill" style="width:'+load+'%"></div></div> '+load+'%</td>';
rows+='<td><span class="live-dot"></span>live</td>';
rows+='</tr>';
}
}
document.getElementById('scoutBody').innerHTML=rows;
}).catch(function(){
document.getElementById('scoutBody').innerHTML='<tr><td colspan="5" style="color:var(--muted);font-size:.82rem;text-align:center">Network loading...</td></tr>';
});
}
loadScout();
setInterval(loadScout,30000);
// ═══ LIVE COSTS ═══
fetch('/api/community/costs').then(function(r){return r.json()}).then(function(d){
if(d.total_eur_month){document.getElementById('costsLive').textContent='€'+d.total_eur_month+'/mo';}
}).catch(function(){});
// ═══ SCROLL REVEAL ═══
var observer=new IntersectionObserver(function(entries){
entries.forEach(function(e){if(e.isIntersecting)e.target.classList.add('visible')});
},{threshold:0.1,rootMargin:'0px 0px -40px 0px'});
document.querySelectorAll('.reveal').forEach(function(el){observer.observe(el)});
// ═══ NAV ACTIVE ═══
window.addEventListener('scroll',function(){
var sections=document.querySelectorAll('section[id]');
var pos=window.scrollY+100;
sections.forEach(function(s){
if(s.offsetTop<=pos&&s.offsetTop+s.offsetHeight>pos){
document.querySelectorAll('.nav-links a').forEach(function(a){
a.style.color=a.getAttribute('href')==='#'+s.id?'var(--amber)':'';
});
}
});
});
</script>
</body>
</html>

View File

@ -0,0 +1,15 @@
#!/bin/bash
# Build IX engine for Linux x86_64 with CUDA/CPU/Vulkan
set -e
BACKEND=${1:-cpu}
OUTPUT="ix-linux-x64-$BACKEND"
echo "[BUILD] Linux x64 | Backend: $BACKEND"
cmake -B build \
-DCMAKE_BUILD_TYPE=Release \
-DIX_BACKEND=${BACKEND^^} \
-DCMAKE_C_FLAGS="-O3 -march=native" 2>&1 | tail -5
cmake --build build --target ix -j$(nproc) 2>&1 | tail -5
cp build/bin/ix "$OUTPUT"
echo "[✓] Built: $OUTPUT ($(wc -c < $OUTPUT) bytes)"

View File

@ -0,0 +1,7 @@
#!/bin/bash
# Build IX engine for macOS Apple Silicon (Metal)
set -e
cmake -B build -DCMAKE_BUILD_TYPE=Release -DIX_BACKEND=METAL -DCMAKE_OSX_ARCHITECTURES=arm64 2>&1 | tail -3
cmake --build build --target ix -j$(sysctl -n hw.ncpu) 2>&1 | tail -3
cp build/bin/ix ix-macos-arm64
echo "[✓] Built: ix-macos-arm64"

160
tools/forge.sh Normal file
View File

@ -0,0 +1,160 @@
#!/bin/bash
# IX Forge — Model conversion and quantization pipeline
# Usage: ./forge.sh <command> [options]
# Commands: convert, quantize, package, benchmark
set -e
IX_FORGE_VER="1.0.0"
LLAMA_CPP_DIR="${IX_LLAMA_CPP:-$HOME/.inference-x/llama.cpp}"
OUTPUT_DIR="${IX_OUTPUT:-./forge-output}"
log() { echo -e "\033[0;36m[IX-FORGE]\033[0m $1"; }
ok() { echo -e "\033[0;32m[✓]\033[0m $1"; }
err() { echo -e "\033[0;31m[✗]\033[0m $1"; exit 1; }
usage() {
cat << 'USAGE'
IX Forge v1.0 — Model conversion and quantization
USAGE:
./forge.sh convert --source <hf_model_dir> --output <name.gguf>
./forge.sh quantize --input <model.gguf> --quant Q4_K_M --output <name_q4.gguf>
./forge.sh package --model <model.gguf> --name "ModelName" --version 1.0
./forge.sh benchmark --model <model.gguf> --prompt "Hello" --runs 10
QUANTIZATION LEVELS:
Q2_K — Smallest (50% quality loss, ~1.5bit)
Q4_0 — Small (faster, less accurate)
Q4_K_M — RECOMMENDED (best size/quality balance)
Q5_K_M — High quality
Q6_K — Near-lossless
Q8_0 — Near-perfect
F16 — Full precision (2x model size)
EXAMPLES:
# Convert Mistral 7B from HuggingFace
./forge.sh convert --source ./mistral-7b-v0.1 --output mistral-7b.gguf
# Quantize to Q4_K_M
./forge.sh quantize --input mistral-7b.gguf --quant Q4_K_M --output mistral-7b-q4.gguf
# Full pipeline
./forge.sh convert --source ./mymodel && ./forge.sh quantize --input mymodel.gguf --quant Q4_K_M
USAGE
}
check_llama_cpp() {
if [ ! -f "$LLAMA_CPP_DIR/convert_hf_to_gguf.py" ]; then
log "llama.cpp not found at $LLAMA_CPP_DIR"
log "Installing..."
mkdir -p "$LLAMA_CPP_DIR"
git clone --depth=1 https://github.com/ggerganov/llama.cpp.git "$LLAMA_CPP_DIR" 2>&1 | tail -3
cd "$LLAMA_CPP_DIR" && cmake -B build -DLLAMA_BUILD_SERVER=OFF && cmake --build build -j4 2>&1 | tail -5
cd -
ok "llama.cpp installed"
fi
}
cmd_convert() {
local source="" output=""
while [[ $# -gt 0 ]]; do
case $1 in
--source) source="$2"; shift ;;
--output) output="$2"; shift ;;
esac; shift
done
[ -z "$source" ] && err "Missing --source"
[ -z "$output" ] && output="$(basename $source).gguf"
check_llama_cpp
mkdir -p "$OUTPUT_DIR"
log "Converting $source$OUTPUT_DIR/$output"
python3 "$LLAMA_CPP_DIR/convert_hf_to_gguf.py" "$source" --outtype f16 --outfile "$OUTPUT_DIR/$output"
ok "Converted: $OUTPUT_DIR/$output ($(du -sh $OUTPUT_DIR/$output | cut -f1))"
}
cmd_quantize() {
local input="" quant="Q4_K_M" output=""
while [[ $# -gt 0 ]]; do
case $1 in
--input) input="$2"; shift ;;
--quant) quant="$2"; shift ;;
--output) output="$2"; shift ;;
esac; shift
done
[ -z "$input" ] && err "Missing --input"
[ -z "$output" ] && output="${input%.gguf}_${quant}.gguf"
check_llama_cpp
log "Quantizing $input$output (${quant})"
"$LLAMA_CPP_DIR/build/bin/llama-quantize" "$input" "$output" "$quant"
ok "Quantized: $output ($(du -sh $output | cut -f1))"
}
cmd_package() {
local model="" name="" version="1.0"
while [[ $# -gt 0 ]]; do
case $1 in
--model) model="$2"; shift ;;
--name) name="$2"; shift ;;
--version) version="$2"; shift ;;
esac; shift
done
[ -z "$model" ] && err "Missing --model"
[ -z "$name" ] && name="$(basename $model .gguf)"
local pkg_dir="$OUTPUT_DIR/pkg-$name-$version"
mkdir -p "$pkg_dir"
cp "$model" "$pkg_dir/"
local size=$(wc -c < "$model")
local sha=$(sha256sum "$model" | cut -c1-32)
cat > "$pkg_dir/manifest.json" << MANIFEST
{
"name": "$name",
"version": "$version",
"model_file": "$(basename $model)",
"size_bytes": $size,
"sha256": "$sha",
"format": "gguf",
"ix_compatible": true,
"created_at": "$(date -u +%Y-%m-%dT%H:%M:%SZ)"
}
MANIFEST
tar -czf "$OUTPUT_DIR/$name-$version.ix-package" -C "$OUTPUT_DIR" "pkg-$name-$version"
rm -rf "$pkg_dir"
ok "Packaged: $OUTPUT_DIR/$name-$version.ix-package"
}
cmd_benchmark() {
local model="" prompt="Hello, how are you?" runs=5
while [[ $# -gt 0 ]]; do
case $1 in
--model) model="$2"; shift ;;
--prompt) prompt="$2"; shift ;;
--runs) runs=$2; shift ;;
esac; shift
done
[ -z "$model" ] && err "Missing --model"
log "Benchmarking $model ($runs runs)"
log "Prompt: $prompt"
local total=0
for i in $(seq 1 $runs); do
local start=$(date +%s%N)
curl -s -X POST http://localhost:8080/v1/completions \
-H "Content-Type: application/json" \
-d "{\"prompt\":\"$prompt\",\"max_tokens\":50}" > /dev/null
local end=$(date +%s%N)
local ms=$(( (end - start) / 1000000 ))
log "Run $i: ${ms}ms"
total=$((total + ms))
done
local avg=$((total / runs))
ok "Average latency: ${avg}ms over $runs runs"
}
case "${1:-help}" in
convert) shift; cmd_convert "$@" ;;
quantize) shift; cmd_quantize "$@" ;;
package) shift; cmd_package "$@" ;;
benchmark) shift; cmd_benchmark "$@" ;;
*) usage ;;
esac

135
tools/organ.py Normal file
View File

@ -0,0 +1,135 @@
#!/usr/bin/env python3
"""
IX Organ Tool Inference-X Community Toolchain
Package, publish, and install IX "organs" (AI personas)
Usage:
./organ.py pack --model model.gguf --prompt system.txt --name "ARIA" -o aria.organ
./organ.py install aria.organ
./organ.py list https://git.inference-x.com/organs
./organ.py publish aria.organ --token YOUR_GITEA_TOKEN
"""
import os, sys, json, hashlib, zipfile, argparse, urllib.request, shutil
from pathlib import Path
ORGAN_STORE_URL = "https://git.inference-x.com/api/v1/repos/elmadani/ix-organs"
IX_HOME = Path.home() / ".inference-x"
ORGANS_DIR = IX_HOME / "organs"
def pack(args):
"""Package a model + prompt + config into an .organ file"""
organ_meta = {
"version": "1.0",
"name": args.name,
"description": args.description or "",
"model_file": Path(args.model).name,
"quant": args.quant or "Q4_K_M",
"context_size": args.ctx or 4096,
"temperature": args.temp or 0.7,
"max_tokens": args.max_tokens or 512,
"tags": (args.tags or "").split(","),
"author": args.author or "anonymous",
"license": args.license or "MIT",
"created_at": __import__("datetime").datetime.utcnow().isoformat(),
}
output = args.output or f"{args.name.lower().replace(' ','-')}.organ"
with zipfile.ZipFile(output, 'w', zipfile.ZIP_DEFLATED) as zf:
# Meta
zf.writestr("organ.json", json.dumps(organ_meta, indent=2))
# Model
if args.model and Path(args.model).exists():
zf.write(args.model, f"model/{Path(args.model).name}")
# System prompt
if args.prompt and Path(args.prompt).exists():
with open(args.prompt) as f:
zf.writestr("system_prompt.txt", f.read())
elif args.prompt_text:
zf.writestr("system_prompt.txt", args.prompt_text)
size = Path(output).stat().st_size
h = hashlib.sha256(Path(output).read_bytes()).hexdigest()[:16]
print(f"✓ Organ packed: {output}")
print(f" Size: {size:,} bytes | SHA256: {h}...")
return output
def install(args):
"""Install an organ from a file or URL"""
src = args.source
ORGANS_DIR.mkdir(parents=True, exist_ok=True)
if src.startswith("http"):
print(f"Downloading {src}...")
fname, _ = urllib.request.urlretrieve(src)
else:
fname = src
with zipfile.ZipFile(fname, 'r') as zf:
meta_raw = zf.read("organ.json")
meta = json.loads(meta_raw)
name = meta["name"].lower().replace(" ", "-")
dest = ORGANS_DIR / name
dest.mkdir(exist_ok=True)
zf.extractall(dest)
print(f"✓ Organ installed: {dest}")
print(f" Name: {meta['name']}")
print(f" Model: {meta.get('model_file','?')}")
print(f" Run with: ix --organ {name}")
def list_organs(args):
"""List available organs from the store"""
url = f"{ORGAN_STORE_URL}/contents/organs"
try:
with urllib.request.urlopen(url) as r:
data = json.load(r)
print("Available organs:")
for item in data:
print(f" {item['name']}{item['download_url']}")
except Exception as e:
print(f"Store unavailable: {e}")
# List local
if ORGANS_DIR.exists():
for d in ORGANS_DIR.iterdir():
meta_file = d / "organ.json"
if meta_file.exists():
meta = json.loads(meta_file.read_text())
print(f" LOCAL: {meta['name']} ({d.name})")
def main():
p = argparse.ArgumentParser(description="IX Organ Tool")
sub = p.add_subparsers(dest="cmd")
# pack
pk = sub.add_parser("pack", help="Pack a model into an organ")
pk.add_argument("--model", required=True, help="Path to .gguf model")
pk.add_argument("--prompt", help="Path to system prompt file")
pk.add_argument("--prompt-text", help="System prompt as text")
pk.add_argument("--name", required=True, help="Organ name")
pk.add_argument("--description", help="Description")
pk.add_argument("--quant", default="Q4_K_M")
pk.add_argument("--ctx", type=int, default=4096)
pk.add_argument("--temp", type=float, default=0.7)
pk.add_argument("--max-tokens", type=int, default=512)
pk.add_argument("--tags", help="Comma-separated tags")
pk.add_argument("--author")
pk.add_argument("--license", default="MIT")
pk.add_argument("-o", "--output", help="Output .organ file")
# install
ins = sub.add_parser("install", help="Install an organ")
ins.add_argument("source", help="Path or URL to .organ file")
# list
ls = sub.add_parser("list", help="List organs")
args = p.parse_args()
if args.cmd == "pack": pack(args)
elif args.cmd == "install": install(args)
elif args.cmd == "list": list_organs(args)
else: p.print_help()
if __name__ == "__main__":
main()

36
tools/store.sh Normal file
View File

@ -0,0 +1,36 @@
#!/bin/bash
# IX Store Client — Browse, install, publish models
# Usage: ./store.sh [browse|install|publish|rate]
STORE_API="https://build.inference-x.com/api/store"
IX_HOME="$HOME/.inference-x"
case "${1:-browse}" in
browse)
echo "=== IX Community Model Store ==="
curl -s "$STORE_API" | python3 -c "
import sys,json
d = json.load(sys.stdin)
for item in d.get('items',d if isinstance(d,list) else []):
print(f" {item.get('name','?')} | {item.get('size_mb','?')}MB | ⭐ {item.get('rating','?')} | {item.get('downloads','?')} downloads")
" 2>/dev/null || echo "Store offline - check git.inference-x.com/elmadani/ix-tools"
;;
install)
[ -z "$2" ] && echo "Usage: store.sh install <model-id>" && exit 1
echo "Installing $2..."
mkdir -p "$IX_HOME/models"
curl -sL "$STORE_API/$2/download" -o "$IX_HOME/models/$2.gguf"
echo "✓ Installed to $IX_HOME/models/$2.gguf"
;;
publish)
[ -z "$2" ] && echo "Usage: store.sh publish <file.organ> --token TOKEN" && exit 1
TOKEN="$4"
[ -z "$TOKEN" ] && read -rp "Gitea token: " TOKEN
echo "Publishing $2..."
curl -s -X POST -H "Authorization: token $TOKEN" \
-F "file=@$2" "$STORE_API/publish"
;;
*)
echo "Usage: store.sh [browse|install <id>|publish <file> --token TOKEN]"
;;
esac