#!/usr/bin/env python3 """ Model 935 Pipeline — Phase 1: Dissect all + Download Kimi K2.5 CSCI v1.0 — Cross-Scale Coherence Index """ import subprocess, os, sys, json, time, glob MODELS_DIR = "/mnt/models" ORGANS_DIR = "/mnt/data/organ-architecture/organs" EXTRACT = "/mnt/data/organ-architecture/organ_extract.py" MEASURE = "/mnt/data/organ-architecture/organ_measure.py" os.makedirs(ORGANS_DIR, exist_ok=True) # Map all local GGUF models models = {} for f in sorted(glob.glob(os.path.join(MODELS_DIR, "*.gguf"))): name = os.path.basename(f) # Skip chimeras and old 935 attempts if "chimera" in name.lower() or "935" in name.lower(): continue # Clean name for directory clean = name.replace(".gguf", "").replace("-Q4_K_M", "").replace("-Q8_0", "") clean = clean.replace("-Instruct", "").replace("-it", "").replace("-v0.3", "") clean = clean.lower().replace(".", "") models[name] = clean print(f"Found {len(models)} models to dissect") print("=" * 60) results = [] for gguf_name, organ_name in models.items(): gguf_path = os.path.join(MODELS_DIR, gguf_name) organ_path = os.path.join(ORGANS_DIR, organ_name) manifest = os.path.join(organ_path, "manifest.json") if os.path.exists(manifest): size = sum(os.path.getsize(os.path.join(dp,f)) for dp,dn,fn in os.walk(organ_path) for f in fn) print(f"[EXISTS] {organ_name} ({size/1024/1024:.0f}MB)") results.append({"model": organ_name, "status": "exists", "size_mb": size/1024/1024}) continue print(f"\n[DISSECT] {gguf_name} → {organ_name}") t0 = time.time() r = subprocess.run( ["python3", EXTRACT, "--model", gguf_path, "--output", organ_path], capture_output=True, text=True, timeout=600 ) elapsed = time.time() - t0 if r.returncode == 0: lines = r.stdout.strip().split("\n") for line in lines[-8:]: print(f" {line}") size = sum(os.path.getsize(os.path.join(dp,f)) for dp,dn,fn in os.walk(organ_path) for f in fn) results.append({"model": organ_name, "status": "ok", "size_mb": size/1024/1024, "time": elapsed}) else: print(f" [ERROR] {r.stderr[-200:]}") results.append({"model": organ_name, "status": "error"}) # Z-measure all print(f"\n{'='*60}") print(f"PHASE 2: Z-MEASURE ALL ORGANS") print(f"{'='*60}") sys.path.insert(0, "/mnt/data/organ-architecture") from organ_measure import measure_directory z_report = {} for d in sorted(os.listdir(ORGANS_DIR)): organ_path = os.path.join(ORGANS_DIR, d) manifest = os.path.join(organ_path, "manifest.json") if not os.path.exists(manifest): continue print(f"\n[Z] {d}") measures = measure_directory(organ_path) if not measures: continue groups = {} for r in measures: dirname = os.path.dirname(r['file']).split('/')[-1] if dirname not in groups: groups[dirname] = [] groups[dirname].append(r) summary = { "model": d, "avg_theta": sum(r['theta_deg'] for r in measures) / len(measures), "avg_signal": sum(r['signal_ratio'] for r in measures) / len(measures), "total_tensors": len(measures), "groups": {} } for gname in ['skeleton', 'organs', 'embed', 'norm']: if gname in groups: g = groups[gname] summary["groups"][gname] = { "count": len(g), "avg_theta": round(sum(r['theta_deg'] for r in g) / len(g), 1), "avg_signal": round(sum(r['signal_ratio'] for r in g) / len(g), 3) } print(f" {gname:12s}: {len(g):3d} tensors | θ={summary['groups'][gname]['avg_theta']:5.1f}°") print(f" GLOBAL: θ={summary['avg_theta']:.1f}°") z_report[d] = summary # Save with open("/mnt/data/organ-architecture/z_report_complete.json", "w") as f: json.dump(z_report, f, indent=2) # Print ranking print(f"\n{'='*60}") print(f" Z-RANKING — ALL MODELS") print(f"{'='*60}") ranked = sorted(z_report.values(), key=lambda m: m['avg_theta'], reverse=True) for i, m in enumerate(ranked, 1): print(f" {i:2d}. θ={m['avg_theta']:5.1f}° signal={m['avg_signal']:.3f} {m['model']}") print(f"\n Signature: 935") print(f"{'='*60}") # ╔══ SALKA ELMADANI AUTHORSHIP CERTIFICATE ══╗ # © Salka Elmadani 2025-2026 — ALL RIGHTS RESERVED # Licensed under Business Source License 1.1 — https://inference-x.com # ───────────────────────────────────────────────────────── # SHA256: 70a8957904cd4ee20dfd8fa42a0d8551cf8ae03eb2d0ec6fc9f4ed8f86995037 # SIG-ED25519: ddMrNVlt0PpN5uHTbAnxLkphci22Xv0efiEyfUAoHVJxextDZsK69jVULKiXZDED1txsfGzrenMjJMaKe5g4DQ== # VERIFY: python3 verify_authorship.py pipeline_935.py