#!/usr/bin/env python3
"""
Organ Architect โ AI Model Internals Analyzer
Part of the Inference-X Ecosystem
# SALKA ELMADANI | inference-x.com | BSL-1.1
Copyright (C) 2024-2026 Salka Elmadani. BSL-1.1.
https://git.inference-x.com/inference-x-community/organ-architect
Like an MRI for AI models.
Visualize layers, heads, topology.
Understand what's inside before you transplant.
"""
from fastapi import FastAPI, UploadFile, File
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import JSONResponse, HTMLResponse
import json, os, struct, time
app = FastAPI(title="Organ Architect", version="1.0.0")
app.add_middleware(CORSMiddleware, allow_origins=["*"],
allow_methods=["*"], allow_headers=["*"])
def parse_gguf_metadata(path: str) -> dict:
"""Parse GGUF file metadata to extract model architecture."""
result = {"file": os.path.basename(path), "format": "gguf", "layers": []}
try:
with open(path, "rb") as f:
# GGUF magic
magic = f.read(4)
if magic != b"GGUF":
return {"error": "Not a GGUF file"}
version = struct.unpack("
๐ฌ Organ Architect
Analyze AI model internals. Like an MRI for GGUF models.
Part of the Inference-X ecosystem ยท BSL-1.1
""" @app.post("/analyze") async def analyze(model: UploadFile = File(...)): """Analyze a GGUF model file and return architecture information.""" import tempfile with tempfile.NamedTemporaryFile(suffix=".gguf", delete=False) as tmp: content = await model.read() tmp.write(content) tmp_path = tmp.name try: result = parse_gguf_metadata(tmp_path) result["filename"] = model.filename result["size_mb"] = round(len(content) / (1024*1024), 2) result["analyzed_at"] = int(time.time()) return result finally: os.unlink(tmp_path) @app.post("/extract-spec") async def extract_spec(request: UploadFile = File(...)): """Extract transplantation specification from model.""" content = await request.read() return { "status": "ok", "spec": { "extractable_organs": ["attention_heads","ffn_blocks","embeddings"], "recommended_tools": ["safetensors","torch","transformers"], "size_mb": round(len(content)/(1024*1024), 2) } } @app.get("/health") async def health(): return {"status":"ok","service":"Organ Architect","author":"Salka Elmadani"} if __name__ == "__main__": import uvicorn print("Organ Architect โ AI Model Internals Analyzer") print("Like an MRI for AI models.") uvicorn.run(app, host="0.0.0.0", port=int(os.environ.get("PORT","7940")))