Anshini commited on
Commit
5d62236
·
verified ·
1 Parent(s): ca458f9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +86 -13
app.py CHANGED
@@ -15,6 +15,12 @@ from tools import execute_python_code, web_search, deep_think
15
  import io
16
  import contextlib
17
  import traceback
 
 
 
 
 
 
18
  # Load environment
19
  load_dotenv()
20
  # os.environ["TAVILY_API_KEY"] = os.getenv("TAVILY_API_KEY")
@@ -50,18 +56,81 @@ def ai_assistance(state: State):
50
  result =code_generator.invoke(state["messages"])
51
  return {"messages": state['messages']+[result]}
52
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
53
  def agent_node(state: State):
54
- # Use your LLM here (e.g., Together, OpenAI, etc.)
55
- model = ChatGoogleGenerativeAI(model = "gemini-2.0-flash-001").bind_tools(tools)
56
- follow_up_prompt = "Break down this task into subtasks and ask follow-up questions if needed:\n\n"
57
- last_user_msg = state["messages"][-1].content
58
- full_prompt = follow_up_prompt + last_user_msg
59
 
60
- response = model.invoke(full_prompt)
 
 
61
 
62
- return {"messages": state["messages"] + [AIMessage(content=response)]}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
63
 
64
-
65
  import time
66
  def generate_code(state: State):
67
  user_prompt = state["input"]
@@ -203,18 +272,22 @@ def router(state):
203
  else:
204
  return "AI_Assistance"
205
  builder = StateGraph(State)
 
 
206
  builder.add_node("AI_Assistance", ai_assistance)
 
207
  builder.add_node("Generate_Code", generate_code)
208
- # builder.add_node("LLM_Agent", agent_node)
209
  builder.add_node("Code_Explainer", explain_code)
210
 
 
211
  builder.set_entry_point("AI_Assistance")
212
 
213
- builder.add_edge("AI_Assistance", "Generate_Code")
214
- # builder.add_edge("Generate_Code", "LLM_Agent")
215
- # builder.add_edge("LLM_Agent", "Code_Explainer")
216
- builder.add_edge("Generate_Code","Code_Explainer")
217
  builder.add_edge("Code_Explainer", END)
 
218
  graph = builder.compile(checkpointer=memory)
219
  # Streamlit UI setup
220
  st.set_page_config(page_title="MitraVerse", layout="wide")
 
15
  import io
16
  import contextlib
17
  import traceback
18
+ from langchain_core.messages import AIMessage
19
+ from langchain_google_genai import ChatGoogleGenerativeAI
20
+ from langchain.prompts import PromptTemplate
21
+ from langchain.chains import LLMChain
22
+ from langchain_core.messages import AIMessage
23
+ from typing import List
24
  # Load environment
25
  load_dotenv()
26
  # os.environ["TAVILY_API_KEY"] = os.getenv("TAVILY_API_KEY")
 
56
  result =code_generator.invoke(state["messages"])
57
  return {"messages": state['messages']+[result]}
58
 
59
+ # def agent_node(state: State):
60
+ # # Use your LLM here (e.g., Together, OpenAI, etc.)
61
+ # model = ChatGoogleGenerativeAI(model = "gemini-2.0-flash-001").bind_tools(tools)
62
+ # follow_up_prompt = "Break down this task into subtasks and ask follow-up questions if needed:\n\n"
63
+ # last_user_msg = state["messages"][-1].content
64
+ # full_prompt = follow_up_prompt + last_user_msg
65
+
66
+ # response = model.invoke(full_prompt)
67
+
68
+ # return {"messages": state["messages"] + [AIMessage(content=response)]}
69
+
70
+ llm = ChatGroq(temperature=0)
71
+
72
+ # Template to extract subtasks from the user's input
73
+ subtask_prompt = PromptTemplate.from_template(
74
+ """You are an expert AI agent designer.
75
+
76
+ Given the user's goal:
77
+ "{user_goal}"
78
+
79
+ 1. Break this goal into a clear list of subtasks (in bullet points).
80
+ 2. If any clarification is needed, ask relevant follow-up questions.
81
+
82
+ Respond in this format:
83
+ ---
84
+ Subtasks:
85
+ - ...
86
+ - ...
87
+ Follow-Up Questions (if any):
88
+ - ...
89
+ ---"""
90
+ )
91
+
92
+ subtask_chain = LLMChain(llm=llm, prompt=subtask_prompt)
93
+
94
  def agent_node(state: State):
95
+ user_input = state["messages"][-1].content
 
 
 
 
96
 
97
+ # Get subtasks and possible questions
98
+ result = subtask_chain.invoke({"user_goal": user_input})
99
+ response_text = result["text"]
100
 
101
+ # Parse subtasks and follow-up questions
102
+ subtasks, questions = parse_subtasks_and_questions(response_text)
103
+
104
+ # Append AI response to messages
105
+ state["messages"].append(AIMessage(content=response_text))
106
+
107
+ # Save subtasks and questions into state
108
+ return {
109
+ "messages": state["messages"],
110
+ "subtasks": subtasks,
111
+ "follow_up_questions": questions
112
+ }
113
+
114
+ # ✂️ Helper function to parse bullet points
115
+ def parse_subtasks_and_questions(text: str):
116
+ subtasks = []
117
+ questions = []
118
+
119
+ collecting = None
120
+ for line in text.strip().splitlines():
121
+ line = line.strip()
122
+ if line.lower().startswith("subtasks:"):
123
+ collecting = "subtasks"
124
+ elif line.lower().startswith("follow-up questions"):
125
+ collecting = "questions"
126
+ elif line.startswith("-"):
127
+ if collecting == "subtasks":
128
+ subtasks.append(line[1:].strip())
129
+ elif collecting == "questions":
130
+ questions.append(line[1:].strip())
131
+
132
+ return subtasks, questions
133
 
 
134
  import time
135
  def generate_code(state: State):
136
  user_prompt = state["input"]
 
272
  else:
273
  return "AI_Assistance"
274
  builder = StateGraph(State)
275
+
276
+ # Define Nodes
277
  builder.add_node("AI_Assistance", ai_assistance)
278
+ builder.add_node("LLM_Agent", agent_node)
279
  builder.add_node("Generate_Code", generate_code)
 
280
  builder.add_node("Code_Explainer", explain_code)
281
 
282
+ # Entry Point
283
  builder.set_entry_point("AI_Assistance")
284
 
285
+ # Define Flow
286
+ builder.add_edge("AI_Assistance", "LLM_Agent")
287
+ builder.add_edge("LLM_Agent", "Generate_Code")
288
+ builder.add_edge("Generate_Code", "Code_Explainer")
289
  builder.add_edge("Code_Explainer", END)
290
+
291
  graph = builder.compile(checkpointer=memory)
292
  # Streamlit UI setup
293
  st.set_page_config(page_title="MitraVerse", layout="wide")