add HF endpoint
Browse files
agent.py
CHANGED
|
@@ -163,9 +163,13 @@ def build_graph(provider: str = "huggingface"):
|
|
| 163 |
# llm = ChatGroq(model="qwen-qwq-32b", temperature=0) # optional : qwen-qwq-32b gemma2-9b-it
|
| 164 |
if provider == "huggingface":
|
| 165 |
# TODO: Add huggingface endpoint
|
|
|
|
|
|
|
| 166 |
llm = ChatHuggingFace(
|
| 167 |
llm=HuggingFaceEndpoint(
|
| 168 |
-
url="https://api-inference.huggingface.co/models/Meta-DeepLearning/llama-2-7b-chat-hf",
|
|
|
|
|
|
|
| 169 |
temperature=0,
|
| 170 |
),
|
| 171 |
)
|
|
|
|
| 163 |
# llm = ChatGroq(model="qwen-qwq-32b", temperature=0) # optional : qwen-qwq-32b gemma2-9b-it
|
| 164 |
if provider == "huggingface":
|
| 165 |
# TODO: Add huggingface endpoint
|
| 166 |
+
HUGGINGFACEHUB_API_TOKEN = os.getenv['HUGGINGFACEHUB_API_TOKEN']
|
| 167 |
+
repo_id = "llama-2-7b-chat-hf"
|
| 168 |
llm = ChatHuggingFace(
|
| 169 |
llm=HuggingFaceEndpoint(
|
| 170 |
+
# url="https://api-inference.huggingface.co/models/Meta-DeepLearning/llama-2-7b-chat-hf",
|
| 171 |
+
repo_id=repo_id,
|
| 172 |
+
huggingfacehub_api_token=HUGGINGFACEHUB_API_TOKEN,
|
| 173 |
temperature=0,
|
| 174 |
),
|
| 175 |
)
|