File size: 4,644 Bytes
d4df579
 
 
af9ac6c
d4df579
af9ac6c
 
 
 
d4df579
af9ac6c
 
d4df579
 
 
4134925
af9ac6c
 
 
d4df579
af9ac6c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d4df579
 
af9ac6c
 
 
 
d4df579
af9ac6c
 
 
 
 
 
 
d4df579
af9ac6c
 
 
 
 
 
 
 
 
 
d4df579
af9ac6c
 
 
 
 
 
 
 
 
 
d4df579
 
af9ac6c
 
d4df579
af9ac6c
 
 
 
 
 
 
 
 
d4df579
 
af9ac6c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
import os
import streamlit as st
from dotenv import load_dotenv
from langgraph.graph import StateGraph, END, START
from langgraph.checkpoint.memory import MemorySaver
from langchain_core.messages import HumanMessage, AIMessage
from langchain_core.runnables import Runnable
from langchain_together import ChatTogether
from typing import TypedDict, List, Optional
import traceback
import io
import sys

# Load environment variables
load_dotenv()
llm = ChatTogether(model="deepseek-ai/DeepSeek-R1-Distill-Llama-70B-free")

# Define the graph state
class GraphState(TypedDict):
    input: str
    messages: List[HumanMessage | AIMessage]
    code: Optional[str]
    execution_result: Optional[str]
    explanation: Optional[str]

# === Node 1: Generate Code ===
def generate_code(state: GraphState) -> GraphState:
    prompt = f"""You are a senior Python developer.
Generate Python code for the following user request. Just return the code only, no explanation.

Request: {state['input']}
"""
    messages = state["messages"] + [HumanMessage(content=prompt)]
    response = llm.invoke(messages)
    return {
        **state,
        "messages": messages + [AIMessage(content=response.content)],
        "code": response.content
    }

# === Node 2: Execute Code ===
def execute_code(state: GraphState) -> GraphState:
    code = state.get("code", "")
    try:
        buffer = io.StringIO()
        with io.StringIO() as buf, io.StringIO() as err_buf:
            sys.stdout = buf
            sys.stderr = err_buf
            exec(code, {})
            output = buf.getvalue()
            error = err_buf.getvalue()
        sys.stdout = sys.__stdout__
        sys.stderr = sys.__stderr__
        result = output if output else "✅ Code executed without output."
        if error:
            result += "\n⚠️ Error:\n" + error
    except Exception:
        result = "❌ Exception:\n" + traceback.format_exc()
    return {
        **state,
        "execution_result": result
    }

# === Node 3: Explain Code ===
def explain_code(state: GraphState) -> GraphState:
    prompt = f"""You are a code explainer. Please explain the following Python code:
{state['code']}
"""
    messages = state["messages"] + [HumanMessage(content=prompt)]
    response = llm.invoke(messages)
    return {
        **state,
        "messages": messages + [AIMessage(content=response.content)],
        "explanation": response.content
    }

# === Build LangGraph ===
builder = StateGraph(GraphState)
builder.add_node("Generate_Code", generate_code)
builder.add_node("Execute_Code", execute_code)
builder.add_node("Explain_Code", explain_code)

builder.set_entry_point("Generate_Code")
builder.add_edge("Generate_Code", "Execute_Code")
builder.add_edge("Execute_Code", "Explain_Code")
builder.set_finish_point("Explain_Code")

graph = builder.compile()

# === Streamlit App ===
st.set_page_config(page_title="🧠 MitraVerse", page_icon="🧠")
st.title("🧠 MitraVerse - LangGraph Code Assistant")

# Initialize session state
for key in ["chat_history", "latest_code", "latest_explanation", "execution_result"]:
    if key not in st.session_state:
        st.session_state[key] = [] if key == "chat_history" else ""

# User input form
with st.container():
    with st.form("chat_form", clear_on_submit=True):
        user_input = st.text_input("Ask me anything", placeholder="e.g., Write a bubble sort in Python")
        submitted = st.form_submit_button("🚀 Run End-to-End")

        if submitted and user_input:
            st.session_state.chat_history.append(HumanMessage(content=user_input))
            state_input = {
                "messages": st.session_state.chat_history,
                "input": user_input,
                "code": "",
                "execution_result": "",
                "explanation": ""
            }
            result = graph.invoke(state_input)
            st.session_state.latest_code = result["code"]
            st.session_state.execution_result = result["execution_result"]
            st.session_state.latest_explanation = result["explanation"]

# Show generated code
if st.session_state.latest_code:
    st.subheader("🧾 Generated Code")
    st.code(st.session_state.latest_code, language="python")
    st.download_button("📥 Download Code", st.session_state.latest_code, file_name="generated_code.py")

# Show execution result
if st.session_state.execution_result:
    st.subheader("🧪 Execution Result")
    st.text(st.session_state.execution_result)

# Show code explanation
if st.session_state.latest_explanation:
    st.subheader("💡 Code Explanation")
    st.markdown(st.session_state.latest_explanation)