Update app.py
Browse files
app.py
CHANGED
|
@@ -149,8 +149,8 @@ class BasicAgent:
|
|
| 149 |
|
| 150 |
# InferenceClient diretto — usa la Serverless Inference API HF
|
| 151 |
self.client = InferenceClient(
|
| 152 |
-
|
| 153 |
-
|
| 154 |
)
|
| 155 |
|
| 156 |
# Schema OpenAI dei tool per passarli al client
|
|
@@ -205,6 +205,7 @@ class BasicAgent:
|
|
| 205 |
hf_messages = self._messages_to_hf_format([sys_msg] + state["messages"])
|
| 206 |
|
| 207 |
response = self.client.chat_completion(
|
|
|
|
| 208 |
messages=hf_messages,
|
| 209 |
tools=self.tools_schema,
|
| 210 |
tool_choice="auto",
|
|
|
|
| 149 |
|
| 150 |
# InferenceClient diretto — usa la Serverless Inference API HF
|
| 151 |
self.client = InferenceClient(
|
| 152 |
+
provider="sambanova", # gratuito, non usa il router a pagamento
|
| 153 |
+
api_key=os.getenv("HF_TOKEN"),
|
| 154 |
)
|
| 155 |
|
| 156 |
# Schema OpenAI dei tool per passarli al client
|
|
|
|
| 205 |
hf_messages = self._messages_to_hf_format([sys_msg] + state["messages"])
|
| 206 |
|
| 207 |
response = self.client.chat_completion(
|
| 208 |
+
model="meta-llama/Llama-3.3-70B-Instruct",
|
| 209 |
messages=hf_messages,
|
| 210 |
tools=self.tools_schema,
|
| 211 |
tool_choice="auto",
|