Spaces:
Runtime error
Runtime error
Update agent.py
Browse files
agent.py
CHANGED
|
@@ -13,7 +13,7 @@ from langchain_community.vectorstores import SupabaseVectorStore
|
|
| 13 |
from langchain_core.messages import SystemMessage, HumanMessage
|
| 14 |
from langchain_core.tools import tool
|
| 15 |
from langchain.tools.retriever import create_retriever_tool
|
| 16 |
-
|
| 17 |
|
| 18 |
load_dotenv()
|
| 19 |
|
|
@@ -136,7 +136,7 @@ def build_graph(provider: str = "huggingface"):
|
|
| 136 |
# Groq https://console.groq.com/docs/models
|
| 137 |
llm = ChatGroq(model="qwen-qwq-32b", temperature=0) # optional : qwen-qwq-32b gemma2-9b-it
|
| 138 |
elif provider == "huggingface":
|
| 139 |
-
llm =
|
| 140 |
else:
|
| 141 |
raise ValueError("Invalid provider. Choose 'google', 'groq' or 'huggingface'.")
|
| 142 |
# Bind tools to LLM
|
|
|
|
| 13 |
from langchain_core.messages import SystemMessage, HumanMessage
|
| 14 |
from langchain_core.tools import tool
|
| 15 |
from langchain.tools.retriever import create_retriever_tool
|
| 16 |
+
from llama_index.llms.huggingface_api import HuggingFaceInferenceAPI
|
| 17 |
|
| 18 |
load_dotenv()
|
| 19 |
|
|
|
|
| 136 |
# Groq https://console.groq.com/docs/models
|
| 137 |
llm = ChatGroq(model="qwen-qwq-32b", temperature=0) # optional : qwen-qwq-32b gemma2-9b-it
|
| 138 |
elif provider == "huggingface":
|
| 139 |
+
llm = HuggingFaceInferenceAPI(model_name="Qwen/Qwen2.5-Coder-32B-Instruct")
|
| 140 |
else:
|
| 141 |
raise ValueError("Invalid provider. Choose 'google', 'groq' or 'huggingface'.")
|
| 142 |
# Bind tools to LLM
|