rakesh-dvg commited on
Commit
abb4651
·
verified ·
1 Parent(s): 010ce37

Delete app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -128
app.py DELETED
@@ -1,128 +0,0 @@
1
- """LangGraph Agent (GROQ version without Supabase)"""
2
- import os
3
- from langgraph.graph import START, StateGraph, MessagesState
4
- from langgraph.prebuilt import tools_condition, ToolNode
5
- from langchain_google_genai import ChatGoogleGenerativeAI
6
- from langchain_groq import ChatGroq
7
- from langchain_huggingface import ChatHuggingFace, HuggingFaceEndpoint
8
- from langchain_community.tools.tavily_search import TavilySearchResults
9
- from langchain_community.document_loaders import WikipediaLoader, ArxivLoader
10
- from langchain_core.messages import SystemMessage, HumanMessage
11
- from langchain_core.tools import tool
12
-
13
- @tool
14
- def multiply(a: int, b: int) -> int:
15
- """Multiply two numbers."""
16
- return a * b
17
-
18
- @tool
19
- def add(a: int, b: int) -> int:
20
- """Add two numbers."""
21
- return a + b
22
-
23
- @tool
24
- def subtract(a: int, b: int) -> int:
25
- """Subtract second number from the first."""
26
- return a - b
27
-
28
- @tool
29
- def divide(a: int, b: int) -> float:
30
- """Divide two numbers."""
31
- if b == 0:
32
- raise ValueError("Cannot divide by zero.")
33
- return a / b
34
-
35
- @tool
36
- def modulus(a: int, b: int) -> int:
37
- """Get the modulus (remainder) of two numbers."""
38
- return a % b
39
-
40
- @tool
41
- def wiki_search(query: str) -> dict:
42
- """Search Wikipedia for a query and return up to 2 results."""
43
- search_docs = WikipediaLoader(query=query, load_max_docs=2).load()
44
- formatted_search_docs = "\n\n---\n\n".join(
45
- [
46
- f'<Document source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}"/>\n{doc.page_content}\n</Document>'
47
- for doc in search_docs
48
- ])
49
- return {"wiki_results": formatted_search_docs}
50
-
51
- @tool
52
- def web_search(query: str) -> dict:
53
- """Search Tavily for a query and return up to 3 results."""
54
- search_docs = TavilySearchResults(max_results=3).invoke(query=query)
55
- formatted_search_docs = "\n\n---\n\n".join(
56
- [
57
- f'<Document source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}"/>\n{doc.page_content}\n</Document>'
58
- for doc in search_docs
59
- ])
60
- return {"web_results": formatted_search_docs}
61
-
62
- @tool
63
- def arvix_search(query: str) -> dict:
64
- """Search Arxiv and return up to 3 truncated results."""
65
- search_docs = ArxivLoader(query=query, load_max_docs=3).load()
66
- formatted_search_docs = "\n\n---\n\n".join(
67
- [
68
- f'<Document source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}"/>\n{doc.page_content[:1000]}\n</Document>'
69
- for doc in search_docs
70
- ])
71
- return {"arvix_results": formatted_search_docs}
72
-
73
- # Load system prompt
74
- with open("system_prompt.txt", "r", encoding="utf-8") as f:
75
- system_prompt = f.read()
76
-
77
- sys_msg = SystemMessage(content=system_prompt)
78
-
79
- tools = [
80
- multiply, add, subtract, divide, modulus,
81
- wiki_search, web_search, arvix_search
82
- ]
83
-
84
- def build_graph(provider: str = "groq"):
85
- """Build the LangGraph agent using specified LLM provider."""
86
-
87
- if provider == "google":
88
- llm = ChatGoogleGenerativeAI(model="gemini-2.0-flash", temperature=0)
89
-
90
- elif provider == "groq":
91
- groq_api_key = os.getenv("GROQ_API_KEY")
92
- if not groq_api_key:
93
- raise ValueError("GROQ_API_KEY environment variable not set.")
94
- llm = ChatGroq(model="qwen-qwq-32b", temperature=0, api_key=groq_api_key)
95
-
96
- elif provider == "huggingface":
97
- llm = ChatHuggingFace(
98
- llm=HuggingFaceEndpoint(
99
- url="https://api-inference.huggingface.co/models/Meta-DeepLearning/llama-2-7b-chat-hf",
100
- temperature=0,
101
- )
102
- )
103
-
104
- else:
105
- raise ValueError("Invalid provider. Choose 'google', 'groq' or 'huggingface'.")
106
-
107
- llm_with_tools = llm.bind_tools(tools)
108
-
109
- def assistant(state: MessagesState):
110
- return {"messages": [llm_with_tools.invoke(state["messages"])]}
111
-
112
- builder = StateGraph(MessagesState)
113
- builder.add_node("assistant", assistant)
114
- builder.add_node("tools", ToolNode(tools))
115
- builder.add_edge(START, "assistant")
116
- builder.add_conditional_edges("assistant", tools_condition)
117
- builder.add_edge("tools", "assistant")
118
-
119
- return builder.compile()
120
-
121
- # For testing purposes
122
- if __name__ == "__main__":
123
- question = "When was a picture of St. Thomas Aquinas first added to the Wikipedia page on the Principle of double effect?"
124
- graph = build_graph(provider="groq")
125
- messages = [HumanMessage(content=question)]
126
- result = graph.invoke({"messages": messages})
127
- for msg in result["messages"]:
128
- print(msg.content)