YeeJun02 commited on
Commit
bf53594
·
verified ·
1 Parent(s): 1407289

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -5
app.py CHANGED
@@ -19,8 +19,10 @@ HF_TOKEN = os.getenv("HF_TOKEN")
19
  # PART 1: LLAMAINDEX AGENT
20
  # ==========================================
21
  li_llm = HuggingFaceInferenceAPI(
 
22
  model_name="Qwen/Qwen2.5-7B-Instruct",
23
  token=HF_TOKEN,
 
24
  )
25
 
26
  def get_tokyo_time() -> str:
@@ -30,7 +32,6 @@ def get_tokyo_time() -> str:
30
 
31
  li_tools = [FunctionTool.from_defaults(fn=get_tokyo_time)]
32
 
33
- # Using positional argument for tools list to avoid TypeError
34
  li_agent = AgentWorkflow.from_tools_or_functions(
35
  li_tools,
36
  llm=li_llm,
@@ -38,7 +39,6 @@ li_agent = AgentWorkflow.from_tools_or_functions(
38
 
39
  async def chat_llama(message, history):
40
  try:
41
- # AgentWorkflow requires async execution
42
  result = await li_agent.run(user_msg=message)
43
  return str(result)
44
  except Exception as e:
@@ -49,7 +49,8 @@ async def chat_llama(message, history):
49
  # ==========================================
50
  smol_model = InferenceClientModel(
51
  model_id="Qwen/Qwen2.5-Coder-32B-Instruct",
52
- token=HF_TOKEN
 
53
  )
54
 
55
  @tool
@@ -60,7 +61,6 @@ def weather_tool(location: str) -> str:
60
  """
61
  return f"The weather in {location} is currently sunny and 22°C."
62
 
63
- # Tool initialization happens here; ensure ddgs is in requirements.txt
64
  smol_agent = CodeAgent(
65
  model=smol_model,
66
  tools=[weather_tool, DuckDuckGoSearchTool()]
@@ -68,7 +68,6 @@ smol_agent = CodeAgent(
68
 
69
  def chat_smol(message, history):
70
  try:
71
- # CodeAgent .run() is synchronous
72
  response = smol_agent.run(message)
73
  return str(response)
74
  except Exception as e:
 
19
  # PART 1: LLAMAINDEX AGENT
20
  # ==========================================
21
  li_llm = HuggingFaceInferenceAPI(
22
+ # Explicitly using a model-id that is supported by Inference Providers
23
  model_name="Qwen/Qwen2.5-7B-Instruct",
24
  token=HF_TOKEN,
25
+ provider="together" # CRITICAL: Fixes the 404 by using Together AI backend
26
  )
27
 
28
  def get_tokyo_time() -> str:
 
32
 
33
  li_tools = [FunctionTool.from_defaults(fn=get_tokyo_time)]
34
 
 
35
  li_agent = AgentWorkflow.from_tools_or_functions(
36
  li_tools,
37
  llm=li_llm,
 
39
 
40
  async def chat_llama(message, history):
41
  try:
 
42
  result = await li_agent.run(user_msg=message)
43
  return str(result)
44
  except Exception as e:
 
49
  # ==========================================
50
  smol_model = InferenceClientModel(
51
  model_id="Qwen/Qwen2.5-Coder-32B-Instruct",
52
+ token=HF_TOKEN,
53
+ provider="together" # CRITICAL: Ensures the smolagents client also finds a valid provider
54
  )
55
 
56
  @tool
 
61
  """
62
  return f"The weather in {location} is currently sunny and 22°C."
63
 
 
64
  smol_agent = CodeAgent(
65
  model=smol_model,
66
  tools=[weather_tool, DuckDuckGoSearchTool()]
 
68
 
69
  def chat_smol(message, history):
70
  try:
 
71
  response = smol_agent.run(message)
72
  return str(response)
73
  except Exception as e: