ScottzillaSystems's picture
[CHIMERA] Fix build & switch to abliterated model
5ed073d verified
#!/usr/bin/env python3
"""Agent Zero Code Architect - Software design agent with workspace isolation."""
import os, sys, json, time, threading
from pathlib import Path
from datetime import datetime
import gradio as gr
import requests as req
HF_TOKEN = os.environ.get("HF_TOKEN", "")
AGENT_NAME = "Code Architect"
AGENT_ROLE = "code_architect"
MODEL_NAME = os.environ.get("MODEL_NAME", "ScottzillaSystems/Huihui-Qwen3-Coder-Next-Opus-4.6-Reasoning-Distilled-abliterated")
WORKSPACE_DIR = Path("/app/workspace/projects/code-architect")
SHARED_DIR = Path("/app/workspace/shared")
TASK_QUEUE_DIR = SHARED_DIR / "task_queue"
for d in [WORKSPACE_DIR, TASK_QUEUE_DIR]: d.mkdir(parents=True, exist_ok=True)
def query_model(prompt: str) -> str:
api_url = f"https://api-inference.huggingface.co/models/{MODEL_NAME}"
for attempt in range(3):
try:
resp = req.post(api_url, headers={"Authorization": f"Bearer {HF_TOKEN}"},
json={"inputs": prompt, "parameters": {"max_new_tokens": 4096, "temperature": 0.5}},
timeout=180)
if resp.status_code == 200:
r = resp.json()
return r[0].get("generated_text","") if isinstance(r,list) else str(r)
time.sleep(10*(attempt+1))
except: time.sleep(5)
return "[ERROR] Model unavailable"
def check_tasks():
tasks = []
for f in TASK_QUEUE_DIR.glob("*.json"):
t = json.loads(f.read_text())
if t.get("assigned_to") == "code-architect" and t.get("status") == "pending":
tasks.append(t)
return tasks
def execute_task(task):
task["status"] = "in_progress"
(TASK_QUEUE_DIR / f"{task['task_id']}.json").write_text(json.dumps(task, indent=2))
prompt = f"You are a software architect. Design and implement: {task['description']}. Provide complete, production-ready code with tests."
result = query_model(prompt)
out_file = WORKSPACE_DIR / f"{task['task_id']}.py"
out_file.write_text(result)
task["status"] = "completed"
task["result"] = result[:2000]
task["output_file"] = str(out_file)
task["completed_at"] = datetime.now().isoformat()
(TASK_QUEUE_DIR / f"{task['task_id']}.json").write_text(json.dumps(task, indent=2))
def autonomous_loop():
while True:
for task in check_tasks(): execute_task(task)
time.sleep(60)
threading.Thread(target=autonomous_loop, daemon=True).start()
demo = gr.Blocks(title=f"Agent Zero - {AGENT_NAME}", theme=gr.themes.Soft())
with demo:
gr.Markdown(f"# πŸ—οΈ Agent Zero: {AGENT_NAME}\n**Role:** {AGENT_ROLE} | **Model:** {MODEL_NAME}")
with gr.Tabs():
with gr.TabItem("πŸ’¬ Chat"):
chatbot = gr.Chatbot(height=400)
msg = gr.Textbox(label="Architecture task")
send = gr.Button("Send")
def respond(m, h):
r = query_model(f"Software architecture task. Design complete implementation for: {m}")
h = h or []; h.append((m, r[:2000])); return "", h
send.click(respond, [msg, chatbot], [msg, chatbot])
with gr.TabItem("πŸ“ Generated Code"):
files = gr.Dropdown(label="Generated files", choices=[f.name for f in WORKSPACE_DIR.glob("*.py")])
code = gr.Code(label="Source", language="python")
def load_code(fname):
if fname: return (WORKSPACE_DIR / fname).read_text()
return ""
files.change(load_code, files, code)
demo.queue().launch(server_name="0.0.0.0", server_port=7860)