import os import io import contextlib import gradio as gr from typing import TypedDict, List from huggingface_hub import InferenceClient from langgraph.graph import StateGraph, END # 1. BRAIN CONFIGURATION client = InferenceClient(api_key=os.environ.get("HF_TOKEN")) # 2. STATE DEFINITION class AgentState(TypedDict): task: str code: str error: str logs: List[str] iterations: int # 3. GENERATE NODE def generate_node(state: AgentState): state['logs'].append(f"๐Ÿง  [Attempt {state['iterations'] + 1}] Analyzing requirements...") prompt = f"System: You are an expert Python engineer. Solve this task: {state['task']}." if state['error']: state['logs'].append(f"๐Ÿ” [Self-Correction] Fixing previous error: {state['error']}") prompt += f"\n\nCRITICAL: Your last code failed with this error: {state['error']}. Fix it. Ensure you provide the full code block." response = client.chat.completions.create( model="deepseek-ai/DeepSeek-R1", messages=[{"role": "user", "content": prompt}], max_tokens=2000 ) full_response = response.choices[0].message.content code_only = full_response.split("```python")[-1].split("```")[0].strip() if "```python" in full_response else full_response.strip() return {**state, "code": code_only, "iterations": state['iterations'] + 1} # 4. EXECUTE NODE def execute_node(state: AgentState): state['logs'].append("๐Ÿงช Running verification tests...") output_capture = io.StringIO() try: with contextlib.redirect_stdout(output_capture): # Executing in a controlled scope exec(state['code'], {"__name__": "__main__"}) result = output_capture.getvalue() state['logs'].append(f"โœ… Success! Output:\n{result}") return {**state, "error": ""} except Exception as e: error_msg = str(e) state['logs'].append(f"โŒ Failure: {error_msg}") return {**state, "error": error_msg} # 5. ROUTER def router(state: AgentState): if not state['error'] or state['iterations'] >= 3: return "end" return "generate" # 6. BUILD THE GRAPH builder = StateGraph(AgentState) builder.add_node("generate", generate_node) builder.add_node("execute", execute_node) builder.set_entry_point("generate") builder.add_edge("generate", "execute") builder.add_conditional_edges("execute", router, {"generate": "generate", "end": END}) agent_app = builder.compile() # 7. UI LOGIC def run_ui_logic(user_task): state = {"task": user_task, "code": "", "error": "", "logs": [], "iterations": 0} final_state = agent_app.invoke(state) return final_state['code'], "\n\n".join(final_state['logs']) # 8. THE DASHBOARD with gr.Blocks(theme=gr.themes.Soft()) as demo: gr.Markdown("# ๐Ÿค– Auto-Debug Agent v2.0") with gr.Row(): with gr.Column(scale=1): gr.Markdown("### ๐Ÿ“œ Debug Monologue") log_display = gr.Textbox(label="Agent Internal Logs", lines=22, interactive=False) with gr.Column(scale=2): task_input = gr.Textbox(label="Enter Coding Task", placeholder="Ask me to write code...", lines=3) submit_btn = gr.Button("๐Ÿš€ Start Debug Cycle", variant="primary") gr.Markdown("### ๐Ÿงช Chaos Laboratory (Click to test)") with gr.Row(): btn_type = gr.Button("Type Mismatch") btn_logic = gr.Button("Logic Error") btn_api = gr.Button("Object Error") output_code = gr.Code(label="Final Corrected Code", language="python", lines=12) # UI Interaction Logic (Restored) btn_type.click(lambda: "Fix this: data = ['10', 20]; print(sum(data))", outputs=task_input) btn_logic.click(lambda: "Calculate 10% tax on $100, but I wrote: total = 100 - (100 * 0.1). Fix the logic.", outputs=task_input) btn_api.click(lambda: "Fix this: x = {'vals': [1,2]}; print(sum(x))", outputs=task_input) submit_btn.click(run_ui_logic, inputs=task_input, outputs=[output_code, log_display]) if __name__ == "__main__": demo.launch()