Bshraman commited on
Commit
5239ba0
·
verified ·
1 Parent(s): af48ad4

Update agent.py

Browse files
Files changed (1) hide show
  1. agent.py +8 -10
agent.py CHANGED
@@ -4,8 +4,8 @@ from dotenv import load_dotenv
4
  from langgraph.graph import START, StateGraph, MessagesState
5
  from langgraph.prebuilt import tools_condition
6
  from langgraph.prebuilt import ToolNode
7
- #from langchain_google_genai import ChatGoogleGenerativeAI
8
- #from langchain_groq import ChatGroq
9
  from langchain_huggingface import ChatHuggingFace, HuggingFaceEndpoint, HuggingFaceEmbeddings
10
  from langchain_community.tools.tavily_search import TavilySearchResults
11
  from langchain_community.document_loaders import WikipediaLoader
@@ -154,14 +154,12 @@ tools = [
154
  def build_graph(provider: str = "groq"):
155
  """Build the graph"""
156
  # Load environment variables from .env file
157
- # if provider == "google":
158
- # # Google Gemini
159
- # llm = ChatGoogleGenerativeAI(model="gemini-2.0-flash", temperature=0)
160
- # elif provider == "groq":
161
- # # Groq https://console.groq.com/docs/models
162
- # llm = ChatGroq(model="qwen-qwq-32b", temperature=0) # optional : qwen-qwq-32b gemma2-9b-it
163
- if provider == "openai":
164
- llm = ChatOpenAI(model="gpt-4",temperature=0)
165
  elif provider == "huggingface":
166
  # TODO: Add huggingface endpoint
167
  llm = ChatHuggingFace(
 
4
  from langgraph.graph import START, StateGraph, MessagesState
5
  from langgraph.prebuilt import tools_condition
6
  from langgraph.prebuilt import ToolNode
7
+ from langchain_google_genai import ChatGoogleGenerativeAI
8
+ from langchain_groq import ChatGroq
9
  from langchain_huggingface import ChatHuggingFace, HuggingFaceEndpoint, HuggingFaceEmbeddings
10
  from langchain_community.tools.tavily_search import TavilySearchResults
11
  from langchain_community.document_loaders import WikipediaLoader
 
154
  def build_graph(provider: str = "groq"):
155
  """Build the graph"""
156
  # Load environment variables from .env file
157
+ if provider == "google":
158
+ # Google Gemini
159
+ llm = ChatGoogleGenerativeAI(model="gemini-2.0-flash", temperature=0)
160
+ elif provider == "groq":
161
+ # Groq https://console.groq.com/docs/models
162
+ llm = ChatGroq(model="qwen-qwq-32b", temperature=0) # optional : qwen-qwq-32b gemma2-9b-it
 
 
163
  elif provider == "huggingface":
164
  # TODO: Add huggingface endpoint
165
  llm = ChatHuggingFace(