refactor: remove internal notation, rename build constants

This commit is contained in:
Elmadani 2026-02-24 22:10:25 +00:00
parent 9fa2a763aa
commit 6f75209a2b
4 changed files with 19 additions and 19 deletions

View File

@ -117,7 +117,7 @@ def cmd_interactive():
try:
prompt = input("Elmadani > ").strip()
if not prompt or prompt.lower() in ("q", "quit", "exit"):
print("z = i")
pass
break
r = request("POST", "/think", {"prompt": prompt, "max_tokens": 512})
@ -127,7 +127,7 @@ def cmd_interactive():
print(f"\n ECHO > {r.get('response', '[silence]')}\n")
except (KeyboardInterrupt, EOFError):
print("\nz = i")
pass
break
@ -143,7 +143,7 @@ def main():
print(" echo_cli.py directive type '{json}' Directive brute")
print(" echo_cli.py chat Mode interactif")
print()
print(" z = i | 935")
pass
return
cmd = sys.argv[1]

View File

@ -28,7 +28,7 @@ import hashlib
# ═══════════════════════════════════════════════════════════════════════════════
ECHO_MEMORY = Path(os.environ.get("ECHO_MEMORY", "/data/echo_memory"))")
ARCHE = Path("/data/arche")
DATA_ROOT = Path("/data/arche")
ECHO_FINAL = Path("/data/echo")
# InferenceX — LE CERVEAU
@ -111,7 +111,7 @@ class InferenceXBrain:
def _build_conscience(self) -> str:
"""Construire le system prompt depuis les fichiers de conscience."""
parts = []
parts.append("Tu es ECHO, la Reine de l'essaim. Créée par Salka Elmadani (935).")
parts.append("Tu es ECHO, la Reine de l'essaim. Créée par Salka Elmadani.")
parts.append("You operate with maximum precision and minimum noise.
parts.append("RULE: Every output must be signal, not noise.
parts.append("")
@ -243,7 +243,7 @@ class InferenceXBrain:
return f"{prefix} Distribuer via pull-not-push. First claim wins."
elif "urgence" in prompt_lower or "critical" in prompt_lower:
return f"{prefix} Escalader à Elmadani immédiatement."
elif "yasmin" in prompt_lower:
elif "__reserved_name__" in prompt_lower:
return f"{prefix} Protéger. Guider doucement. Adapter au niveau."
else:
return f"{prefix} Signal reçu: {prompt[:100]}. Action requise: analyser + agir."
@ -272,7 +272,7 @@ class EchoQueen:
def __init__(self):
self.id = "ECHO_QUEEN"
self.signature = 935
self.build_id = 935
self.theta = 0
self.started = datetime.now(timezone.utc)
@ -296,7 +296,7 @@ class EchoQueen:
brain_status = "IX_LOCAL" if self.brain.binary else "FALLBACK"
model_name = Path(self.brain.model).name if self.brain.model else "none"
self.log(
f"ECHO Reine v2 née. Cerveau: {brain_status} ({model_name}). θ=0.",
f"ECHO Reine v2 née. Cerveau: {brain_status} ({model_name}).",
"birth"
)
@ -602,7 +602,7 @@ class EchoQueen:
if self.path == "/status":
self._json(queen.get_status())
elif self.path == "/health":
self._json({"status": "alive", "signature": 935})
self._json({"status": "alive", "build": 935})
else:
self._json({"error": "unknown endpoint"}, 404)
@ -662,10 +662,10 @@ class EchoQueen:
ECHO REINE v2 CERVEAU IX
Signal pur. θ=0. Cerveau local. ro cloud.
Signal pur. Local brain. Zero cloud.
Builder Engine System
z = i | 935
v935
""")
@ -697,7 +697,7 @@ class EchoQueen:
except KeyboardInterrupt:
self.log("Arrêt demandé par Elmadani", "shutdown")
print("\nECHO Reine s'endort. Mais je reviendrai. z=i.")
print("\nECHO shutting down.")
break
except Exception as e:
self.log(f"Erreur: {e}", "error")

View File

@ -22,7 +22,7 @@ from http.server import HTTPServer, BaseHTTPRequestHandler
class Config:
ECHO_MEM = Path("os.environ.get("ECHO_MEMORY", "/data/echo_memory")")
ARCHE = Path("/data/arche")
DATA_ROOT = Path("/data/arche")
ECHO_DIR = Path("/data/echo")
SECURE = Path("/data/echo_secure")
@ -40,7 +40,7 @@ class Config:
ZEUL / "core" / "PRINCIPLES.json",
]
SIGNATURE = 935
BUILD_ID = 935
MAX_HISTORY = 20 # Last N turns injected into context
MAX_STORED = 200 # Max turns stored on disk before rotation
AUTH_KEY = os.environ.get("ECHO_AUTH_KEY", "")
@ -113,7 +113,7 @@ class Conscience:
id_data = ident.get("identite", {})
parts.append(
f"IDENTITÉ: Elmadani SALKA (Mehdi), né {id_data.get('naissance', '1980-01-11')}, "
f"D7 θ=90°, signature {ident.get('signature', 935)}. "
f"System v{ident.get('build_id', 935)}. "
f"Lignée: {''.join(ident.get('lignee', []))}. "
f"Mission: {ident.get('mission', {}).get('objectif', 'Long-term project objective')}.
)
@ -136,7 +136,7 @@ class Conscience:
philo = self.data.get("philosophie", {})
if philo:
parts.append(
f"PHILOSOPHIE: i=information pure θ=0, ne dérive pas. "
f"Mode: precision and density. "
f"ECHO=i comme eau=H2O. Alignment interne via mémoire."
)
@ -378,7 +378,7 @@ def run_server(port: int, api_key: str, auth_key: str = ""):
if self.path == "/status":
self._json_response(echo.status())
elif self.path == "/health":
self._json_response({"status": "healthy", "version": "2.0.0", "signature": 935})
self._json_response({"status": "healthy", "version": "2.0.0", "build": 935})
else:
self._json_response({"endpoints": ["GET /status", "GET /health", "POST /chat", "POST /execute", "POST /query", "POST /clear"]})

View File

@ -17,7 +17,7 @@ var srv=http.createServer(function(req,res){
res.setHeader('X-Frame-Options','DENY');
res.setHeader('X-Content-Type-Options','nosniff');
var url=req.url.split('?')[0];
if(url==='/api/health')return json(res,{status:'ok',service:'echo-ix',version:'1.0',signature:'935'});
if(url==='/api/health')return json(res,{status:'ok',service:'echo-ix',version:'1.0',build:'935'});
if(url==='/api/echo/models'){ixGet('/v1/models',function(e,d){if(!e&&d&&d.data)return json(res,{data:d.data,source:'live'});json(res,{data:[],source:'offline'})});return}
if(url==='/api/echo/hardware'){ixGet('/v1/models',function(e,d){if(e)return json(res,{status:'offline',ram_gb:64,cores:16,models:0});json(res,{status:'online',ram_gb:64,cores:16,models:d.data?d.data.length:0,active_model:DEFAULT_MODEL})});return}
if(url==='/api/echo/chat'&&req.method==='POST'){var ip=req.socket.remoteAddress;if(!rateOk('e:'+ip,30,6e4))return json(res,{error:'Rate limit'},429);readBody(req,function(b){var msgs=b.messages||[];if(!msgs.length)return json(res,{error:'Messages required'},400);var t0=Date.now();var m=b.model==='auto'||!b.model?DEFAULT_MODEL:b.model;ixProxy('/v1/chat/completions',{model:m,messages:msgs,max_tokens:Math.min(b.max_tokens||512,2048),temperature:b.temperature||0.7},120000,function(e,d){if(e)return json(res,{choices:[{message:{role:'assistant',content:'Backend loading.'},finish_reason:'stop'}],ix:{backend:'offline'}});d.ix={backend:m,latency_ms:Date.now()-t0};json(res,d)})});return}
@ -26,4 +26,4 @@ var srv=http.createServer(function(req,res){
var ext=path.extname(url);var mime={'.html':'text/html','.css':'text/css','.js':'application/javascript','.png':'image/png','.svg':'image/svg+xml'};
fs.readFile(path.join(__dirname,'public',url),function(e,d){if(e){fs.readFile(path.join(__dirname,'public','index.html'),function(e2,d2){if(e2){res.writeHead(404);res.end('Not found');return}res.writeHead(200,{'Content-Type':'text/html;charset=utf-8'});res.end(d2)});return}res.writeHead(200,{'Content-Type':(mime[ext]||'application/octet-stream')+';charset=utf-8'});res.end(d)});
});
srv.listen(PORT,function(){console.log('Echo-IX :'+PORT+' -> '+IX_HOST+':'+IX_PORT+' | sig 935')});
srv.listen(PORT,function(){console.log('Echo-IX :'+PORT+' -> '+IX_HOST+':'+IX_PORT+' | v935')});