Spaces:
Sleeping
Sleeping
zasharepw77
commited on
Commit
·
2dea033
1
Parent(s):
5e23b87
Локальная ollama нормально подцепила модельку
Browse files
app.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
| 1 |
-
from smolagents import CodeAgent,DuckDuckGoSearchTool, HfApiModel, load_tool, tool,
|
| 2 |
import datetime
|
| 3 |
import requests
|
| 4 |
import json
|
|
@@ -103,6 +103,18 @@ model = HfApiModel(
|
|
| 103 |
token=os.getenv('hf_token'),
|
| 104 |
)
|
| 105 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 106 |
# print("Model Image")
|
| 107 |
# Import tool from Hub
|
| 108 |
# image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
|
|
|
|
| 1 |
+
from smolagents import CodeAgent,DuckDuckGoSearchTool, HfApiModel, load_tool, tool, LiteLLMModel
|
| 2 |
import datetime
|
| 3 |
import requests
|
| 4 |
import json
|
|
|
|
| 103 |
token=os.getenv('hf_token'),
|
| 104 |
)
|
| 105 |
|
| 106 |
+
'''
|
| 107 |
+
# Локальная модель:
|
| 108 |
+
# ollama = Запустился номально, но надо модельку нормальную под GPU
|
| 109 |
+
model = LiteLLMModel(
|
| 110 |
+
model_id="ollama/smollm:360m",
|
| 111 |
+
max_tokens=2096,
|
| 112 |
+
temperature=0.6,
|
| 113 |
+
api_base="http://localhost:11434",
|
| 114 |
+
num_ctx=8192
|
| 115 |
+
)
|
| 116 |
+
'''
|
| 117 |
+
|
| 118 |
# print("Model Image")
|
| 119 |
# Import tool from Hub
|
| 120 |
# image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
|