Spaces:
Sleeping
Sleeping
| import os | |
| import streamlit as st | |
| from dotenv import load_dotenv | |
| from langgraph.graph import StateGraph, END, START | |
| from langgraph.checkpoint.memory import MemorySaver | |
| from langchain_core.messages import HumanMessage, AIMessage | |
| from langchain_core.runnables import Runnable | |
| from langchain_together import ChatTogether | |
| from typing import TypedDict, List, Optional | |
| import traceback | |
| import io | |
| import sys | |
| # Load environment variables | |
| load_dotenv() | |
| llm = ChatTogether(model="deepseek-ai/DeepSeek-R1-Distill-Llama-70B-free") | |
| # Define the graph state | |
| class GraphState(TypedDict): | |
| input: str | |
| messages: List[HumanMessage | AIMessage] | |
| code: Optional[str] | |
| execution_result: Optional[str] | |
| explanation: Optional[str] | |
| # === Node 1: Generate Code === | |
| def generate_code(state: GraphState) -> GraphState: | |
| prompt = f"""You are a senior Python developer. | |
| Generate Python code for the following user request. Just return the code only, no explanation. | |
| Request: {state['input']} | |
| """ | |
| messages = state["messages"] + [HumanMessage(content=prompt)] | |
| response = llm.invoke(messages) | |
| return { | |
| **state, | |
| "messages": messages + [AIMessage(content=response.content)], | |
| "code": response.content | |
| } | |
| # === Node 2: Execute Code === | |
| def execute_code(state: GraphState) -> GraphState: | |
| code = state.get("code", "") | |
| try: | |
| buffer = io.StringIO() | |
| with io.StringIO() as buf, io.StringIO() as err_buf: | |
| sys.stdout = buf | |
| sys.stderr = err_buf | |
| exec(code, {}) | |
| output = buf.getvalue() | |
| error = err_buf.getvalue() | |
| sys.stdout = sys.__stdout__ | |
| sys.stderr = sys.__stderr__ | |
| result = output if output else "✅ Code executed without output." | |
| if error: | |
| result += "\n⚠️ Error:\n" + error | |
| except Exception: | |
| result = "❌ Exception:\n" + traceback.format_exc() | |
| return { | |
| **state, | |
| "execution_result": result | |
| } | |
| # === Node 3: Explain Code === | |
| def explain_code(state: GraphState) -> GraphState: | |
| prompt = f"""You are a code explainer. Please explain the following Python code: | |
| {state['code']} | |
| """ | |
| messages = state["messages"] + [HumanMessage(content=prompt)] | |
| response = llm.invoke(messages) | |
| return { | |
| **state, | |
| "messages": messages + [AIMessage(content=response.content)], | |
| "explanation": response.content | |
| } | |
| # === Build LangGraph === | |
| builder = StateGraph(GraphState) | |
| builder.add_node("Generate_Code", generate_code) | |
| builder.add_node("Execute_Code", execute_code) | |
| builder.add_node("Explain_Code", explain_code) | |
| builder.set_entry_point("Generate_Code") | |
| builder.add_edge("Generate_Code", "Execute_Code") | |
| builder.add_edge("Execute_Code", "Explain_Code") | |
| builder.set_finish_point("Explain_Code") | |
| graph = builder.compile() | |
| # === Streamlit App === | |
| st.set_page_config(page_title="🧠 MitraVerse", page_icon="🧠") | |
| st.title("🧠 MitraVerse - LangGraph Code Assistant") | |
| # Initialize session state | |
| for key in ["chat_history", "latest_code", "latest_explanation", "execution_result"]: | |
| if key not in st.session_state: | |
| st.session_state[key] = [] if key == "chat_history" else "" | |
| # User input form | |
| with st.container(): | |
| with st.form("chat_form", clear_on_submit=True): | |
| user_input = st.text_input("Ask me anything", placeholder="e.g., Write a bubble sort in Python") | |
| submitted = st.form_submit_button("🚀 Run End-to-End") | |
| if submitted and user_input: | |
| st.session_state.chat_history.append(HumanMessage(content=user_input)) | |
| state_input = { | |
| "messages": st.session_state.chat_history, | |
| "input": user_input, | |
| "code": "", | |
| "execution_result": "", | |
| "explanation": "" | |
| } | |
| result = graph.invoke(state_input) | |
| st.session_state.latest_code = result["code"] | |
| st.session_state.execution_result = result["execution_result"] | |
| st.session_state.latest_explanation = result["explanation"] | |
| # Show generated code | |
| if st.session_state.latest_code: | |
| st.subheader("🧾 Generated Code") | |
| st.code(st.session_state.latest_code, language="python") | |
| st.download_button("📥 Download Code", st.session_state.latest_code, file_name="generated_code.py") | |
| # Show execution result | |
| if st.session_state.execution_result: | |
| st.subheader("🧪 Execution Result") | |
| st.text(st.session_state.execution_result) | |
| # Show code explanation | |
| if st.session_state.latest_explanation: | |
| st.subheader("💡 Code Explanation") | |
| st.markdown(st.session_state.latest_explanation) | |