Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -25,9 +25,12 @@ class State(TypedDict):
|
|
| 25 |
messages: Annotated[list, add_messages]
|
| 26 |
name: str
|
| 27 |
birthday: str
|
|
|
|
|
|
|
| 28 |
|
| 29 |
# LLM
|
| 30 |
llm = ChatGroq(model="deepseek-r1-distill-llama-70b")
|
|
|
|
| 31 |
|
| 32 |
# Memory
|
| 33 |
memory = MemorySaver()
|
|
@@ -36,13 +39,56 @@ memory = MemorySaver()
|
|
| 36 |
def ai_assistance(state: State):
|
| 37 |
result = llm.invoke(state["messages"])
|
| 38 |
return {"messages": [result]}
|
| 39 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 40 |
# Build LangGraph
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 41 |
builder = StateGraph(State)
|
| 42 |
builder.add_node("AI_Assistance", ai_assistance)
|
| 43 |
-
builder.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 44 |
builder.add_edge("AI_Assistance", END)
|
|
|
|
|
|
|
| 45 |
graph = builder.compile(checkpointer=memory)
|
|
|
|
| 46 |
|
| 47 |
# Streamlit UI setup
|
| 48 |
st.set_page_config(page_title="LangGraph Chatbot", layout="wide")
|
|
@@ -111,9 +157,22 @@ with st.container():
|
|
| 111 |
user_input = st.text_input("Ask me", label_visibility="collapsed", placeholder="Ask me Anything")
|
| 112 |
|
| 113 |
submitted = st.form_submit_button(label = "Send")
|
|
|
|
| 114 |
if submitted and user_input:
|
| 115 |
st.session_state.chat_history.append(HumanMessage(content=user_input))
|
|
|
|
| 116 |
config = {"configurable": {"thread_id": st.session_state.thread_id}}
|
| 117 |
-
|
| 118 |
-
|
| 119 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 25 |
messages: Annotated[list, add_messages]
|
| 26 |
name: str
|
| 27 |
birthday: str
|
| 28 |
+
input : str
|
| 29 |
+
code = str
|
| 30 |
|
| 31 |
# LLM
|
| 32 |
llm = ChatGroq(model="deepseek-r1-distill-llama-70b")
|
| 33 |
+
code_generator =
|
| 34 |
|
| 35 |
# Memory
|
| 36 |
memory = MemorySaver()
|
|
|
|
| 39 |
def ai_assistance(state: State):
|
| 40 |
result = llm.invoke(state["messages"])
|
| 41 |
return {"messages": [result]}
|
| 42 |
+
|
| 43 |
+
def generate_code(state: State):
|
| 44 |
+
user_prompt = state["input"]
|
| 45 |
+
|
| 46 |
+
system_prompt = """You are an expert Python coding assistant specializing in LangGraph applications.
|
| 47 |
+
|
| 48 |
+
Generate clean, working Python code for the user's request with these requirements:
|
| 49 |
+
1. The code MUST use the LangGraph framework (langgraph library).
|
| 50 |
+
2. Implement a proper flow graph using StateGraph.
|
| 51 |
+
3. Include all necessary imports and make sure the code is complete.
|
| 52 |
+
4. Include code to visualize the flow graph (using builder.show() or similar methods).
|
| 53 |
+
5. Structure the code with proper node functions, state definitions, and graph compilation.
|
| 54 |
+
|
| 55 |
+
IMPORTANT: Output ONLY the final Python code.
|
| 56 |
+
DO NOT include any explanations, comments, or text before, inside, or after the code.
|
| 57 |
+
Start the output with the necessary import statements (e.g., "from langgraph import StateGraph, State, Transition").
|
| 58 |
+
No additional text, no markdown fences, just the pure code.
|
| 59 |
+
|
| 60 |
+
User request:"""
|
| 61 |
+
|
| 62 |
+
full_prompt = system_prompt + user_prompt
|
| 63 |
+
code = llm.invoke(full_prompt)
|
| 64 |
+
|
| 65 |
+
return {"input": user_prompt, "code": code.content}
|
| 66 |
+
|
| 67 |
+
def router(state: State):
|
| 68 |
+
last_msg = state["messages"][-1].content.lower()
|
| 69 |
+
if any(keyword in last_msg for keyword in ["generate", "code", "script", "langgraph", "flow"]):
|
| 70 |
+
return "Generate_Code"
|
| 71 |
+
return "AI_Assistance"
|
| 72 |
+
|
| 73 |
# Build LangGraph
|
| 74 |
+
# builder = StateGraph(State)
|
| 75 |
+
# builder.add_node("AI_Assistance", ai_assistance)
|
| 76 |
+
# builder.add_edge(START, "AI_Assistance")
|
| 77 |
+
# builder.add_edge("AI_Assistance", END)
|
| 78 |
+
# graph = builder.compile(checkpointer=memory)
|
| 79 |
builder = StateGraph(State)
|
| 80 |
builder.add_node("AI_Assistance", ai_assistance)
|
| 81 |
+
builder.add_node("Generate_Code", generate_code)
|
| 82 |
+
builder.add_node("Router", router)
|
| 83 |
+
|
| 84 |
+
builder.set_entry_point("Router")
|
| 85 |
+
builder.add_edge("Router", "AI_Assistance")
|
| 86 |
+
builder.add_edge("Router", "Generate_Code")
|
| 87 |
builder.add_edge("AI_Assistance", END)
|
| 88 |
+
builder.add_edge("Generate_Code", END)
|
| 89 |
+
|
| 90 |
graph = builder.compile(checkpointer=memory)
|
| 91 |
+
|
| 92 |
|
| 93 |
# Streamlit UI setup
|
| 94 |
st.set_page_config(page_title="LangGraph Chatbot", layout="wide")
|
|
|
|
| 157 |
user_input = st.text_input("Ask me", label_visibility="collapsed", placeholder="Ask me Anything")
|
| 158 |
|
| 159 |
submitted = st.form_submit_button(label = "Send")
|
| 160 |
+
|
| 161 |
if submitted and user_input:
|
| 162 |
st.session_state.chat_history.append(HumanMessage(content=user_input))
|
| 163 |
+
|
| 164 |
config = {"configurable": {"thread_id": st.session_state.thread_id}}
|
| 165 |
+
state_input = {
|
| 166 |
+
"messages": st.session_state.chat_history,
|
| 167 |
+
"input": user_input,
|
| 168 |
+
}
|
| 169 |
+
|
| 170 |
+
result = graph.invoke(state_input, config=config)
|
| 171 |
+
|
| 172 |
+
# If it’s a code response, append it as-is
|
| 173 |
+
if result.get("code"):
|
| 174 |
+
st.session_state.chat_history.append(AIMessage(content=result["code"]))
|
| 175 |
+
else:
|
| 176 |
+
st.session_state.chat_history.append(result["messages"][-1])
|
| 177 |
+
|
| 178 |
+
st.rerun()
|