AlexZaShared commited on
Commit
a2b460f
·
verified ·
1 Parent(s): 494f3dc

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +18 -17
app.py CHANGED
@@ -1,4 +1,4 @@
1
- from smolagents import CodeAgent,DuckDuckGoSearchTool, HfApiModel,load_tool,tool
2
  import datetime
3
  import requests
4
  import pytz
@@ -35,6 +35,7 @@ def get_current_time_in_timezone(timezone: str) -> str:
35
 
36
  final_answer = FinalAnswerTool()
37
 
 
38
  #model = LiteLLMModel(
39
  # model_id="gemini/gemini-2.0-flash-exp",
40
  # max_tokens=2096,
@@ -42,12 +43,12 @@ final_answer = FinalAnswerTool()
42
  # api_key=os.getenv("LITELLM_API_KEY")
43
  #)
44
 
45
- model = HfApiModel(
46
- max_tokens=2096,
47
- temperature=0.5,
48
- model_id='Qwen/Qwen2.5-Coder-32B-Instruct',
49
- custom_role_conversions=None,
50
- )
51
 
52
  #model = HfApiModel(
53
  # max_tokens=2096,
@@ -57,13 +58,14 @@ model = HfApiModel(
57
  #)
58
 
59
  # ollama
60
- # model = LiteLLMModel(
61
- # model_id="ollama_chat/deepseek-r1:7b",
62
- # max_tokens=2096,
63
- # temperature=0.6,
64
- # api_base="http://localhost:11434",
65
- # num_ctx=8192
66
- # )
 
67
 
68
  # transformer
69
  # model = TransformersModel(
@@ -74,10 +76,9 @@ model = HfApiModel(
74
  # temperature=0.6,
75
  #)
76
 
77
- print("Model Image")
78
-
79
  # Import tool from Hub
80
- image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
81
 
82
  with open("prompts.yaml", 'r') as stream:
83
  prompt_templates = yaml.safe_load(stream)
 
1
+ from smolagents import CodeAgent,DuckDuckGoSearchTool, HfApiModel, load_tool, tool, OpenAIServerModel
2
  import datetime
3
  import requests
4
  import pytz
 
35
 
36
  final_answer = FinalAnswerTool()
37
 
38
+ # https://docs.litellm.ai/
39
  #model = LiteLLMModel(
40
  # model_id="gemini/gemini-2.0-flash-exp",
41
  # max_tokens=2096,
 
43
  # api_key=os.getenv("LITELLM_API_KEY")
44
  #)
45
 
46
+ #model = HfApiModel(
47
+ # max_tokens=2096,
48
+ # temperature=0.5,
49
+ # model_id='Qwen/Qwen2.5-Coder-32B-Instruct',
50
+ # custom_role_conversions=None,
51
+ #)
52
 
53
  #model = HfApiModel(
54
  # max_tokens=2096,
 
58
  #)
59
 
60
  # ollama
61
+ model = OpenAIServerModel(
62
+ model_id="gemma2:2b", #"deepseek-coder-v2:16b",
63
+ max_tokens=2096,
64
+ temperature=0.6,
65
+ api_base="http://localhost:11434",
66
+ #num_ctx=8192,
67
+ api_key="",
68
+ )
69
 
70
  # transformer
71
  # model = TransformersModel(
 
76
  # temperature=0.6,
77
  #)
78
 
79
+ # print("Model Image")
 
80
  # Import tool from Hub
81
+ # image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
82
 
83
  with open("prompts.yaml", 'r') as stream:
84
  prompt_templates = yaml.safe_load(stream)