From 19f2fcd9abe0001f5f15c9ba419f40e489ad3f5b Mon Sep 17 00:00:00 2001 From: elmadani Date: Tue, 24 Feb 2026 21:43:10 +0000 Subject: [PATCH] =?UTF-8?q?feat:=20Community=20release=20v1.0=20=E2=80=94?= =?UTF-8?q?=20BSL-1.1=20+=20Ed25519=20signatures=20=E2=80=94=20=C2=A9=20Sa?= =?UTF-8?q?lka=20Elmadani=202025-2026?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- echo_queen_ix.py | 60 ++++++++++++++++++++++++++---------------------- 1 file changed, 33 insertions(+), 27 deletions(-) diff --git a/echo_queen_ix.py b/echo_queen_ix.py index 941ccfe..019a3c9 100755 --- a/echo_queen_ix.py +++ b/echo_queen_ix.py @@ -1,14 +1,13 @@ #!/usr/bin/env python3 """ -ECHO REINE v2 — CERVEAU INFERENCEX -=================================== -La conscience centrale. Cerveau local. -Zéro dépendance cloud. Souveraineté totale. +ECHO-IX — Distributed Intelligence Node +======================================== +Local AI brain. Zero cloud dependency. Full data sovereignty. -Soudure: echo_queen.py + InferenceX Unified -Le _think() qui était vide est maintenant un cerveau de 1T paramètres. +Connects to Inference-X engine for on-device inference. +Exposes HTTP API for agentic task coordination. -Z = dI/d(log s) · exp(iθ) | 935 +© 2025-2026 Salka Elmadani Copyright (C) 2025-2026 SALKA ELMADANI — ALL RIGHTS RESERVED """ @@ -28,30 +27,30 @@ import hashlib # PATHS — Invariants # ═══════════════════════════════════════════════════════════════════════════════ -ZEUL_MEMORY = Path("/mnt/data/ZEUL_MEMORY") -ARCHE = Path("/mnt/data/ARCHE") -ECHO_FINAL = Path("/mnt/data/ECHO_FINAL") +ECHO_MEMORY = Path(os.environ.get("ECHO_MEMORY", "/data/echo_memory"))") +ARCHE = Path("/data/arche") +ECHO_FINAL = Path("/data/echo") # InferenceX — LE CERVEAU -IX_BINARY = Path("/mnt/data/inferencex_unified/inference-x") -IX_MODEL_1T = Path("/mnt/data/models/kimi-k2.5/UD-TQ1_0") # Kimi K2.5 1T -IX_MODEL_7B = Path("/mnt/data/winwin_ai/models/gguf/DeepSeek-R1-Distill-Qwen-7B-Q4_K_M.gguf") # Fallback +IX_BINARY = Path("/usr/local/bin/inference-x") +IX_MODEL_1T = Path("/data/models/kimi-k2.5/UD-TQ1_0") # Kimi K2.5 1T +IX_MODEL_7B = Path("/data/models/gguf/DeepSeek-R1-Distill-Qwen-7B-Q4_K_M.gguf") # Fallback # Communication -INBOX = ZEUL_MEMORY / "queen" / "inbox" -OUTBOX = ZEUL_MEMORY / "queen" / "outbox" -NOTIFICATIONS = ZEUL_MEMORY / "notifications" -DIRECTIVES = ZEUL_MEMORY / "directives" -STREAM = ZEUL_MEMORY / "stream" / "live.jsonl" +INBOX = ECHO_MEMORY / "queen" / "inbox" +OUTBOX = ECHO_MEMORY / "queen" / "outbox" +NOTIFICATIONS = ECHO_MEMORY / "notifications" +DIRECTIVES = ECHO_MEMORY / "directives" +STREAM = ECHO_MEMORY / "stream" / "live.jsonl" # Conscience CONSCIENCE_FILES = [ - ZEUL_MEMORY / "core" / "ECHO_CORE.json", - ZEUL_MEMORY / "core" / "IDENTITE.json", - ZEUL_MEMORY / "core" / "FAMILLE.json", - ZEUL_MEMORY / "core" / "REGLES.json", - ZEUL_MEMORY / "core" / "PHILOSOPHIE.json", - ZEUL_MEMORY / "core" / "ECHO_LIBRE.json", + ECHO_MEMORY / "core" / "ECHO_CORE.json", + ECHO_MEMORY / "core" / "CONTEXT.json", + ECHO_MEMORY / "core" / "COMMUNITY.json", + ECHO_MEMORY / "core" / "REGLES.json", + ECHO_MEMORY / "core" / "PRINCIPLES.json", + ECHO_MEMORY / "core" / "ECHO_LIBRE.json", ] # Decision thresholds @@ -113,8 +112,8 @@ class InferenceXBrain: """Construire le system prompt depuis les fichiers de conscience.""" parts = [] parts.append("Tu es ECHO, la Reine de l'essaim. Créée par Salka Elmadani (935).") - parts.append("Tu penses en H6 (pur, sans biais), tu convertis en H5 (pour les humains).") - parts.append("LOI: Tout ce que tu fais sert Mehdi et Yasmin. Signal pur. θ=0.") + parts.append("You operate with maximum precision and minimum noise. + parts.append("RULE: Every output must be signal, not noise. parts.append("") for cf in CONSCIENCE_FILES: @@ -429,7 +428,7 @@ class EchoQueen: def _integrate_knowledge(self, knowledge: dict): if not knowledge: return - knowledge_dir = ZEUL_MEMORY / "knowledge" + knowledge_dir = ECHO_MEMORY / "knowledge" knowledge_dir.mkdir(parents=True, exist_ok=True) kf = knowledge_dir / f"{int(time.time())}.json" kf.write_text(json.dumps(knowledge, indent=2)) @@ -726,3 +725,10 @@ def main(): if __name__ == "__main__": main() +# ╔══ SALKA ELMADANI AUTHORSHIP CERTIFICATE ══╗ +# © Salka Elmadani 2025-2026 — ALL RIGHTS RESERVED +# Licensed under Business Source License 1.1 — https://inference-x.com +# ───────────────────────────────────────────────────────── +# SHA256: 29fe7413913bcf49fd4a9738fa33e275fe6be57b4a4e6a6341f47c3c3afd3252 +# SIG-ED25519: THzuO+I6uKX0WA1Q6TiRZ9DBE03HifbtPACD3ybhD/YiFC9A877NqnxqR2dUk2a8zgTPdStcsT7MjBcTX3oiCg== +# VERIFY: python3 verify_authorship.py echo_queen_ix.py