- tools/organ.py: Pack/install/publish AI persona organs - tools/forge.sh: Convert HuggingFace models to GGUF, quantize - tools/store.sh: Browse/install/publish community models - tools/compilation/: Cross-platform build scripts (Linux/macOS) - scripts/install.sh: Universal installer (auto-detect OS/arch/GPU) - site/saas/: SaaS frontend + backend source (v3 unified design) - site/vitrine/: Main site source (inference-x.com) - docs/ARCHITECTURE.md: Full system architecture All plans now in test mode - Studio & Enterprise free to test. Branch: master | Maintainer: Anti-Atlas craton (elmadani)
37 lines
1.3 KiB
Bash
37 lines
1.3 KiB
Bash
#!/bin/bash
|
|
# IX Store Client — Browse, install, publish models
|
|
# Usage: ./store.sh [browse|install|publish|rate]
|
|
|
|
STORE_API="https://build.inference-x.com/api/store"
|
|
IX_HOME="$HOME/.inference-x"
|
|
|
|
case "${1:-browse}" in
|
|
browse)
|
|
echo "=== IX Community Model Store ==="
|
|
curl -s "$STORE_API" | python3 -c "
|
|
import sys,json
|
|
d = json.load(sys.stdin)
|
|
for item in d.get('items',d if isinstance(d,list) else []):
|
|
print(f" {item.get('name','?')} | {item.get('size_mb','?')}MB | ⭐ {item.get('rating','?')} | {item.get('downloads','?')} downloads")
|
|
" 2>/dev/null || echo "Store offline - check git.inference-x.com/elmadani/ix-tools"
|
|
;;
|
|
install)
|
|
[ -z "$2" ] && echo "Usage: store.sh install <model-id>" && exit 1
|
|
echo "Installing $2..."
|
|
mkdir -p "$IX_HOME/models"
|
|
curl -sL "$STORE_API/$2/download" -o "$IX_HOME/models/$2.gguf"
|
|
echo "✓ Installed to $IX_HOME/models/$2.gguf"
|
|
;;
|
|
publish)
|
|
[ -z "$2" ] && echo "Usage: store.sh publish <file.organ> --token TOKEN" && exit 1
|
|
TOKEN="$4"
|
|
[ -z "$TOKEN" ] && read -rp "Gitea token: " TOKEN
|
|
echo "Publishing $2..."
|
|
curl -s -X POST -H "Authorization: token $TOKEN" \
|
|
-F "file=@$2" "$STORE_API/publish"
|
|
;;
|
|
*)
|
|
echo "Usage: store.sh [browse|install <id>|publish <file> --token TOKEN]"
|
|
;;
|
|
esac
|