forked from elmadani/ix-tools
- tools/organ.py: Pack/install/publish AI persona organs - tools/forge.sh: Convert HuggingFace models to GGUF, quantize - tools/store.sh: Browse/install/publish community models - tools/compilation/: Cross-platform build scripts (Linux/macOS) - scripts/install.sh: Universal installer (auto-detect OS/arch/GPU) - site/saas/: SaaS frontend + backend source (v3 unified design) - site/vitrine/: Main site source (inference-x.com) - docs/ARCHITECTURE.md: Full system architecture All plans now in test mode - Studio & Enterprise free to test. Branch: master | Maintainer: Anti-Atlas craton (elmadani)
165 lines
5.1 KiB
Bash
165 lines
5.1 KiB
Bash
#!/bin/bash
|
|
# IX Universal Installer — inference-x.com
|
|
# Detects OS/arch, downloads correct binary, sets up config
|
|
|
|
set -e
|
|
|
|
VERSION="latest"
|
|
BASE_URL="https://inference-x.com/releases"
|
|
IX_HOME="$HOME/.inference-x"
|
|
BIN_DIR="/usr/local/bin"
|
|
|
|
RED='\033[0;31m'; GREEN='\033[0;32m'; TEAL='\033[0;36m'; AMBER='\033[0;33m'; NC='\033[0m'
|
|
log() { echo -e "${TEAL}[IX]${NC} $1"; }
|
|
ok() { echo -e "${GREEN}[✓]${NC} $1"; }
|
|
warn() { echo -e "${AMBER}[!]${NC} $1"; }
|
|
err() { echo -e "${RED}[✗]${NC} $1"; exit 1; }
|
|
|
|
detect_platform() {
|
|
local OS=$(uname -s | tr '[:upper:]' '[:lower:]')
|
|
local ARCH=$(uname -m)
|
|
case "$OS" in
|
|
linux)
|
|
case "$ARCH" in
|
|
x86_64) PLATFORM="linux-x64" ;;
|
|
aarch64) PLATFORM="linux-arm64" ;;
|
|
armv7l) PLATFORM="linux-armv7" ;;
|
|
*) err "Unsupported arch: $ARCH" ;;
|
|
esac ;;
|
|
darwin)
|
|
case "$ARCH" in
|
|
arm64) PLATFORM="macos-arm64" ;;
|
|
x86_64) PLATFORM="macos-x64" ;;
|
|
*) err "Unsupported arch: $ARCH" ;;
|
|
esac ;;
|
|
*) err "Unsupported OS: $OS. Use Windows installer from inference-x.com" ;;
|
|
esac
|
|
log "Detected: $OS/$ARCH → $PLATFORM"
|
|
}
|
|
|
|
detect_backend() {
|
|
BACKEND="cpu"
|
|
if command -v nvidia-smi &>/dev/null; then
|
|
BACKEND="cuda"
|
|
ok "NVIDIA GPU detected — CUDA backend will be used"
|
|
elif [[ "$(uname -s)" == "Darwin" && "$(uname -m)" == "arm64" ]]; then
|
|
BACKEND="metal"
|
|
ok "Apple Silicon detected — Metal backend will be used"
|
|
elif command -v vulkaninfo &>/dev/null; then
|
|
BACKEND="vulkan"
|
|
ok "Vulkan detected"
|
|
else
|
|
log "Using CPU backend (universal)"
|
|
fi
|
|
}
|
|
|
|
download_ix() {
|
|
local URL="$BASE_URL/ix-$PLATFORM"
|
|
log "Downloading IX engine from $URL..."
|
|
|
|
# Try multiple mirrors
|
|
local MIRRORS=(
|
|
"https://inference-x.com/releases"
|
|
"https://git.inference-x.com/elmadani/inference-x/releases/download/latest"
|
|
)
|
|
|
|
local downloaded=false
|
|
for mirror in "${MIRRORS[@]}"; do
|
|
if curl -fsSL "$mirror/ix-$PLATFORM" -o /tmp/ix-binary 2>/dev/null; then
|
|
downloaded=true
|
|
break
|
|
fi
|
|
done
|
|
|
|
if [ "$downloaded" = false ]; then
|
|
warn "Binary download unavailable. Building from source..."
|
|
build_from_source
|
|
return
|
|
fi
|
|
|
|
chmod +x /tmp/ix-binary
|
|
ok "Downloaded IX binary"
|
|
}
|
|
|
|
build_from_source() {
|
|
log "Building IX from source (requires git, cmake, make, gcc)..."
|
|
local tmp=$(mktemp -d)
|
|
git clone --depth=1 https://git.inference-x.com/elmadani/inference-x.git "$tmp/ix" 2>&1 | tail -3
|
|
cd "$tmp/ix"
|
|
cmake -B build -DCMAKE_BUILD_TYPE=Release 2>&1 | tail -3
|
|
cmake --build build -j$(nproc 2>/dev/null || sysctl -n hw.ncpu 2>/dev/null || echo 2) 2>&1 | tail -5
|
|
cp build/bin/ix /tmp/ix-binary
|
|
chmod +x /tmp/ix-binary
|
|
cd -
|
|
ok "Built from source"
|
|
}
|
|
|
|
install_ix() {
|
|
mkdir -p "$IX_HOME" "$IX_HOME/models" "$IX_HOME/organs" "$IX_HOME/configs"
|
|
|
|
if [ -w "$BIN_DIR" ]; then
|
|
cp /tmp/ix-binary "$BIN_DIR/ix"
|
|
ok "Installed to $BIN_DIR/ix"
|
|
else
|
|
sudo cp /tmp/ix-binary "$BIN_DIR/ix"
|
|
ok "Installed to $BIN_DIR/ix (sudo)"
|
|
fi
|
|
|
|
# Default config
|
|
cat > "$IX_HOME/configs/default.json" << CONFIG
|
|
{
|
|
"version": "1.0",
|
|
"engine": "inference-x",
|
|
"hardware": { "backend": "$BACKEND" },
|
|
"model": {
|
|
"id": "llama3.2-1b",
|
|
"context_size": 4096,
|
|
"max_tokens": 512
|
|
},
|
|
"persona": {
|
|
"name": "Assistant",
|
|
"system_prompt": "You are a helpful, private AI assistant. You run locally. No data leaves this device.",
|
|
"temperature": 0.7
|
|
},
|
|
"server": { "port": 8080, "bind": "127.0.0.1" }
|
|
}
|
|
CONFIG
|
|
ok "Default config created at $IX_HOME/configs/default.json"
|
|
}
|
|
|
|
print_success() {
|
|
echo ""
|
|
echo -e "${GREEN}╔══════════════════════════════════════════╗${NC}"
|
|
echo -e "${GREEN}║ Inference-X installed successfully! ║${NC}"
|
|
echo -e "${GREEN}╚══════════════════════════════════════════╝${NC}"
|
|
echo ""
|
|
echo -e " ${TEAL}Start IX:${NC} ix --config ~/.inference-x/configs/default.json"
|
|
echo -e " ${TEAL}API:${NC} http://localhost:8080/v1/chat/completions"
|
|
echo -e " ${TEAL}Models:${NC} ~/.inference-x/models/"
|
|
echo -e " ${TEAL}Organs:${NC} ~/.inference-x/organs/"
|
|
echo -e " ${TEAL}Builder:${NC} https://build.inference-x.com"
|
|
echo -e " ${TEAL}Docs:${NC} https://git.inference-x.com/elmadani/ix-tools"
|
|
echo ""
|
|
echo -e " ${AMBER}First model download:${NC}"
|
|
echo -e " ix download llama3.2-1b # Fastest (1GB)"
|
|
echo -e " ix download mistral-7b # Best quality (4GB)"
|
|
echo ""
|
|
}
|
|
|
|
main() {
|
|
echo ""
|
|
echo -e "${TEAL}════════════════════════════════════════${NC}"
|
|
echo -e "${TEAL} Inference-X Universal Installer ${NC}"
|
|
echo -e "${TEAL} Built in Morocco · For Everyone ${NC}"
|
|
echo -e "${TEAL}════════════════════════════════════════${NC}"
|
|
echo ""
|
|
|
|
detect_platform
|
|
detect_backend
|
|
download_ix
|
|
install_ix
|
|
print_success
|
|
}
|
|
|
|
main "$@"
|