KVNAditya commited on
Commit
c6fbf27
·
verified ·
1 Parent(s): 1e9673e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -7
app.py CHANGED
@@ -23,12 +23,20 @@ final_answer = FinalAnswerTool()
23
  # If the agent does not answer, the model is overloaded, please use another model or the following Hugging Face Endpoint that also contains qwen2.5 coder:
24
  # model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud'
25
 
26
- model = HfApiModel(
27
- max_tokens=2006,
28
- temperature=0.8,
29
- model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud',# it is possible that this model may be overloaded
30
- custom_role_conversions=None,
31
- )
 
 
 
 
 
 
 
 
32
 
33
 
34
  with open("prompts.yaml", 'r') as stream:
@@ -36,7 +44,7 @@ with open("prompts.yaml", 'r') as stream:
36
 
37
  agent = CodeAgent(
38
  model=model,
39
- tools=[func__sciencia,DuckDuckGoSearchTool, final_answer], ## add your tools here (don't remove final answer)
40
  max_steps=8,
41
  verbosity_level=2,
42
  grammar=None,
 
23
  # If the agent does not answer, the model is overloaded, please use another model or the following Hugging Face Endpoint that also contains qwen2.5 coder:
24
  # model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud'
25
 
26
+ try:
27
+ model = HfApiModel(
28
+ max_tokens=2006,
29
+ temperature=0.8,
30
+ model_id='Qwen/Qwen2.5-Coder-32B-Instruct',# it is possible that this model may be overloaded
31
+ custom_role_conversions=None,
32
+ )
33
+ except:
34
+ model = HfApiModel(
35
+ max_tokens=2006,
36
+ temperature=0.8,
37
+ model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud',# it is possible that this model may be overloaded
38
+ custom_role_conversions=None,
39
+ )
40
 
41
 
42
  with open("prompts.yaml", 'r') as stream:
 
44
 
45
  agent = CodeAgent(
46
  model=model,
47
+ tools=[DuckDuckGoSearchTool(), final_answer], ## add your tools here (don't remove final answer)
48
  max_steps=8,
49
  verbosity_level=2,
50
  grammar=None,