Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -3,7 +3,7 @@ import datetime
|
|
| 3 |
import yaml
|
| 4 |
import pytz
|
| 5 |
|
| 6 |
-
from smolagents import CodeAgent, DuckDuckGoSearchTool,
|
| 7 |
from tools.final_answer import FinalAnswerTool # keep your existing file
|
| 8 |
from Gradio_UI import GradioUI # our UI that unwraps FinalAnswerStep and shows images
|
| 9 |
|
|
@@ -39,7 +39,7 @@ final_answer = FinalAnswerTool()
|
|
| 39 |
# Model (works on Spaces)
|
| 40 |
# ---------------------------
|
| 41 |
# If the model is gated or you use a private Inference Endpoint, add a Space secret "HF_TOKEN".
|
| 42 |
-
model =
|
| 43 |
max_tokens=2096,
|
| 44 |
temperature=0.5,
|
| 45 |
model_id="Qwen/Qwen2.5-Coder-32B-Instruct",
|
|
|
|
| 3 |
import yaml
|
| 4 |
import pytz
|
| 5 |
|
| 6 |
+
from smolagents import CodeAgent, DuckDuckGoSearchTool, InferenceClientModel , load_tool, tool
|
| 7 |
from tools.final_answer import FinalAnswerTool # keep your existing file
|
| 8 |
from Gradio_UI import GradioUI # our UI that unwraps FinalAnswerStep and shows images
|
| 9 |
|
|
|
|
| 39 |
# Model (works on Spaces)
|
| 40 |
# ---------------------------
|
| 41 |
# If the model is gated or you use a private Inference Endpoint, add a Space secret "HF_TOKEN".
|
| 42 |
+
model = InferenceClientModel (
|
| 43 |
max_tokens=2096,
|
| 44 |
temperature=0.5,
|
| 45 |
model_id="Qwen/Qwen2.5-Coder-32B-Instruct",
|