File size: 4,146 Bytes
5f71020
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
572b576
5f71020
572b576
 
5f71020
 
 
 
 
 
572b576
5f71020
 
572b576
5f71020
 
 
 
 
 
 
 
572b576
5f71020
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
572b576
5f71020
 
572b576
 
 
5f71020
572b576
5f71020
572b576
 
 
 
 
 
 
 
5f71020
572b576
 
 
 
 
5f71020
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
import os
import io
import contextlib
import gradio as gr
from typing import TypedDict, List
from huggingface_hub import InferenceClient
from langgraph.graph import StateGraph, END

# 1. BRAIN CONFIGURATION
client = InferenceClient(api_key=os.environ.get("HF_TOKEN"))

# 2. STATE DEFINITION
class AgentState(TypedDict):
    task: str
    code: str
    error: str
    logs: List[str]
    iterations: int

# 3. GENERATE NODE
def generate_node(state: AgentState):
    state['logs'].append(f"🧠 [Attempt {state['iterations'] + 1}] Analyzing requirements...")
    prompt = f"System: You are an expert Python engineer. Solve this task: {state['task']}."
    
    if state['error']:
        state['logs'].append(f"πŸ” [Self-Correction] Fixing previous error: {state['error']}")
        prompt += f"\n\nCRITICAL: Your last code failed with this error: {state['error']}. Fix it. Ensure you provide the full code block."
    
    response = client.chat.completions.create(
        model="deepseek-ai/DeepSeek-R1",
        messages=[{"role": "user", "content": prompt}],
        max_tokens=2000
    )
    
    full_response = response.choices[0].message.content
    code_only = full_response.split("```python")[-1].split("```")[0].strip() if "```python" in full_response else full_response.strip()
    
    return {**state, "code": code_only, "iterations": state['iterations'] + 1}

# 4. EXECUTE NODE
def execute_node(state: AgentState):
    state['logs'].append("πŸ§ͺ Running verification tests...")
    output_capture = io.StringIO()
    try:
        with contextlib.redirect_stdout(output_capture):
            # Executing in a controlled scope
            exec(state['code'], {"__name__": "__main__"})
        result = output_capture.getvalue()
        state['logs'].append(f"βœ… Success! Output:\n{result}")
        return {**state, "error": ""}
    except Exception as e:
        error_msg = str(e)
        state['logs'].append(f"❌ Failure: {error_msg}")
        return {**state, "error": error_msg}

# 5. ROUTER
def router(state: AgentState):
    if not state['error'] or state['iterations'] >= 3:
        return "end"
    return "generate"

# 6. BUILD THE GRAPH
builder = StateGraph(AgentState)
builder.add_node("generate", generate_node)
builder.add_node("execute", execute_node)
builder.set_entry_point("generate")
builder.add_edge("generate", "execute")
builder.add_conditional_edges("execute", router, {"generate": "generate", "end": END})
agent_app = builder.compile()

# 7. UI LOGIC
def run_ui_logic(user_task):
    state = {"task": user_task, "code": "", "error": "", "logs": [], "iterations": 0}
    final_state = agent_app.invoke(state)
    return final_state['code'], "\n\n".join(final_state['logs'])

# 8. THE DASHBOARD
with gr.Blocks(theme=gr.themes.Soft()) as demo:
    gr.Markdown("# πŸ€– Auto-Debug Agent v2.0")
    
    with gr.Row():
        with gr.Column(scale=1):
            gr.Markdown("### πŸ“œ Debug Monologue")
            log_display = gr.Textbox(label="Agent Internal Logs", lines=22, interactive=False)
            
        with gr.Column(scale=2):
            task_input = gr.Textbox(label="Enter Coding Task", placeholder="Ask me to write code...", lines=3)
            submit_btn = gr.Button("πŸš€ Start Debug Cycle", variant="primary")
            
            gr.Markdown("### πŸ§ͺ Chaos Laboratory (Click to test)")
            with gr.Row():
                btn_type = gr.Button("Type Mismatch")
                btn_logic = gr.Button("Logic Error")
                btn_api = gr.Button("Object Error")
            
            output_code = gr.Code(label="Final Corrected Code", language="python", lines=12)

    # UI Interaction Logic (Restored)
    btn_type.click(lambda: "Fix this: data = ['10', 20]; print(sum(data))", outputs=task_input)
    btn_logic.click(lambda: "Calculate 10% tax on $100, but I wrote: total = 100 - (100 * 0.1). Fix the logic.", outputs=task_input)
    btn_api.click(lambda: "Fix this: x = {'vals': [1,2]}; print(sum(x))", outputs=task_input)
    
    submit_btn.click(run_ui_logic, inputs=task_input, outputs=[output_code, log_display])

if __name__ == "__main__":
    demo.launch()