Anshini commited on
Commit
d4df579
Β·
verified Β·
1 Parent(s): 5901ddd

Create app1.py

Browse files
Files changed (1) hide show
  1. app1.py +214 -0
app1.py ADDED
@@ -0,0 +1,214 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # app.py
2
+
3
+ import os
4
+ import streamlit as st
5
+ from dotenv import load_dotenv
6
+ from langchain_groq import ChatGroq
7
+ from langgraph.graph import StateGraph, END
8
+ from langgraph.checkpoint.memory import MemorySaver
9
+ from langchain_core.messages import AIMessage, HumanMessage
10
+ from typing import Annotated
11
+ from typing_extensions import TypedDict
12
+ from langchain_together import Together
13
+ from tools import execute_python_code
14
+ import io
15
+ import contextlib
16
+ import traceback
17
+ import time
18
+
19
+ # Load environment variables
20
+ load_dotenv()
21
+ os.environ["GROQ_API_KEY"] = os.getenv("GROQ_API_KEY")
22
+ together_api_key = os.getenv("TOGETHER_API_KEY")
23
+
24
+ # LangGraph State definition
25
+ class State(TypedDict):
26
+ messages: Annotated[list, ...]
27
+ name: str
28
+ birthday: str
29
+ input: str
30
+ code: str
31
+ explanation: str
32
+ execution_result: str
33
+
34
+ # LLM
35
+ code_generator = Together(
36
+ model="deepseek-ai/DeepSeek-R1-Distill-Llama-70B-free",
37
+ temperature=0.2,
38
+ max_tokens=1500,
39
+ api_key=together_api_key,
40
+ )
41
+
42
+ # Memory
43
+ memory = MemorySaver()
44
+
45
+ # Define LangGraph Nodes
46
+ def generate_code(state: State):
47
+ user_prompt = state["input"]
48
+ system_prompt = """You are an expert Python coding assistant specializing in LangGraph applications.
49
+ Generate clean, working Python code for the user's request with these requirements:
50
+ 1. The code MUST use the LangGraph framework (langgraph library).
51
+ 2. Implement a proper flow graph using StateGraph.
52
+ 3. Include all necessary imports and make sure the code is complete.
53
+ 4. Include code to visualize the flow graph.
54
+ 5. Output ONLY the final Python code.
55
+
56
+ User request:"""
57
+ full_prompt = system_prompt + user_prompt
58
+
59
+ for attempt in range(3):
60
+ try:
61
+ response = code_generator.invoke(full_prompt)
62
+ return {**state, "code": str(response)}
63
+ except Exception as e:
64
+ if "503" in str(e):
65
+ time.sleep(2)
66
+ else:
67
+ raise e
68
+ raise Exception("Code generation failed after retries.")
69
+
70
+ def execute_code(state: State):
71
+ code = state.get("code", "")
72
+ buffer = io.StringIO()
73
+ try:
74
+ with contextlib.redirect_stdout(buffer):
75
+ exec(code, {})
76
+ output = buffer.getvalue() or "βœ… Code executed successfully with no output."
77
+ except Exception:
78
+ output = "❌ Execution Error:\n" + traceback.format_exc()
79
+ return {**state, "execution_result": output}
80
+
81
+ def explain_code(state: State):
82
+ code = state["code"]
83
+ user_prompt = state["input"]
84
+ system_prompt = """You are a LangGraph expert who explains code clearly. Provide a detailed explanation of the code in three parts:
85
+ 1. LANGGRAPH FLOW: Describe nodes, edges, and how the graph flows.
86
+ 2. CODE FLOW: High-level architecture and logic.
87
+ 3. STEP-BY-STEP: Explain each part of the code so a beginner can understand it.
88
+ 4. VISUALIZATION: Instructions on how to run and see the graph output.
89
+ """
90
+
91
+ prompt = f"User Prompt: {user_prompt}\n\nCode:\n```python\n{code}\n```"
92
+ full_prompt = system_prompt + prompt
93
+
94
+ explanation = code_generator.invoke(full_prompt)
95
+ return {**state, "explanation": explanation}
96
+
97
+ # LangGraph setup
98
+ builder = StateGraph(State)
99
+ builder.add_node("Generate_Code", generate_code)
100
+ builder.add_node("Execute_Code", execute_code)
101
+ builder.add_node("Code_Explainer", explain_code)
102
+ builder.set_entry_point("Generate_Code")
103
+ graph = builder.compile(checkpointer=memory)
104
+
105
+ # Streamlit UI Setup
106
+ st.set_page_config(page_title="MitraVerse", layout="wide")
107
+
108
+ st.markdown("""
109
+ <style>
110
+ .stChatMessage {
111
+ padding: 12px;
112
+ margin-bottom: 12px;
113
+ border-radius: 12px;
114
+ max-width: 90%;
115
+ }
116
+ .user {
117
+ background-color: #dcf8c6;
118
+ align-self: flex-end;
119
+ }
120
+ .bot {
121
+ background-color: #f1f0f0;
122
+ align-self: flex-start;
123
+ }
124
+ .input-box {
125
+ display: flex;
126
+ align-items: center;
127
+ gap: 0.5rem;
128
+ }
129
+ #floating-container {
130
+ display: flex;
131
+ align-items: center;
132
+ justify-content: space-between;
133
+ padding: 0.25rem 0.75rem;
134
+ background-color: #f9f9f9;
135
+ border-radius: 0.75rem;
136
+ margin-top: 1rem;
137
+ border: 1px solid #ccc;
138
+ }
139
+ .floating-popup {
140
+ margin-top: 0.5rem;
141
+ padding: 0.5rem;
142
+ border-radius: 0.5rem;
143
+ border: 1px solid #ccc;
144
+ background-color: white;
145
+ }
146
+ </style>
147
+ """, unsafe_allow_html=True)
148
+
149
+ st.title("🧠 MitraVerse")
150
+
151
+ # Initialize session state
152
+ if "chat_history" not in st.session_state:
153
+ st.session_state.chat_history = []
154
+ if "latest_code" not in st.session_state:
155
+ st.session_state.latest_code = ""
156
+ if "latest_explanation" not in st.session_state:
157
+ st.session_state.latest_explanation = ""
158
+ if "latest_input" not in st.session_state:
159
+ st.session_state.latest_input = ""
160
+
161
+ # Display chat history
162
+ for msg in st.session_state.chat_history:
163
+ role = "user" if isinstance(msg, HumanMessage) else "bot"
164
+ st.markdown(f"<div class='stChatMessage {role}'>{msg.content}</div>", unsafe_allow_html=True)
165
+
166
+ # Input form
167
+ with st.form("chat_form", clear_on_submit=True):
168
+ st.markdown('<div id="floating-container">', unsafe_allow_html=True)
169
+ st.markdown('</div>', unsafe_allow_html=True)
170
+ user_input = st.text_input("Ask me", label_visibility="collapsed", placeholder="Ask me Anything")
171
+ submitted = st.form_submit_button(label="Send")
172
+
173
+ if submitted and user_input:
174
+ st.session_state.chat_history.append(HumanMessage(content=user_input))
175
+ st.session_state.latest_input = user_input
176
+
177
+ # Buttons to run each tool manually
178
+ if st.session_state.latest_input:
179
+ if st.button("πŸ”¨ Generate Code"):
180
+ state_input = {
181
+ "messages": st.session_state.chat_history,
182
+ "input": st.session_state.latest_input
183
+ }
184
+ result = graph.invoke(state_input, node="Generate_Code")
185
+ st.session_state.latest_code = result["code"]
186
+ st.session_state.chat_history.append(
187
+ AIMessage(content="**πŸ’» Generated Code:**\n\n```python\n" + result["code"] + "\n```")
188
+ )
189
+ st.code(result["code"], language="python")
190
+
191
+ if st.button("βš™οΈ Execute Code") and st.session_state.latest_code:
192
+ state_input = {
193
+ "code": st.session_state.latest_code,
194
+ "input": st.session_state.latest_input
195
+ }
196
+ result = graph.invoke(state_input, node="Execute_Code")
197
+ st.session_state.chat_history.append(
198
+ AIMessage(content="**πŸ§ͺ Execution Result:**\n\n" + result["execution_result"])
199
+ )
200
+ st.text("πŸ§ͺ Execution Result:")
201
+ st.text(result["execution_result"])
202
+
203
+ if st.button("🧠 Explain Code") and st.session_state.latest_code:
204
+ state_input = {
205
+ "code": st.session_state.latest_code,
206
+ "input": st.session_state.latest_input
207
+ }
208
+ result = graph.invoke(state_input, node="Code_Explainer")
209
+ st.session_state.latest_explanation = result["explanation"]
210
+ st.session_state.chat_history.append(
211
+ AIMessage(content="**πŸ” Code Explanation:**\n\n" + result["explanation"])
212
+ )
213
+ with st.expander("πŸ” Code Explanation"):
214
+ st.markdown(result["explanation"])