cowrycode commited on
Commit
088a147
·
verified ·
1 Parent(s): 6042d13

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -1
app.py CHANGED
@@ -4,18 +4,37 @@ import requests
4
  import inspect
5
  import pandas as pd
6
 
 
 
 
 
 
 
7
  # (Keep Constants as is)
8
  # --- Constants ---
9
  DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
10
 
11
  # --- Basic Agent Definition ---
 
12
  # ----- THIS IS WERE YOU CAN BUILD WHAT YOU WANT ------
13
  class BasicAgent:
14
  def __init__(self):
15
  print("BasicAgent initialized.")
 
 
 
 
 
 
 
 
 
 
 
16
  def __call__(self, question: str) -> str:
17
  print(f"Agent received question (first 50 chars): {question[:50]}...")
18
- fixed_answer = "This is a default answer."
 
19
  print(f"Agent returning fixed answer: {fixed_answer}")
20
  return fixed_answer
21
 
 
4
  import inspect
5
  import pandas as pd
6
 
7
+ from llama_index.core.workflow import Context
8
+ from llama_index.core.agent.workflow import AgentWorkflow
9
+ from llama_index.llms.huggingface_api import HuggingFaceInferenceAPI
10
+ from youtube_tool import youtube_transcript_tool, youtube_transcript_snippet_tool
11
+ from multiple_tools import round_to_two_decimals_tool, text_inverter_tool, google_web_search_tool, wikipedia_search_tool
12
+
13
  # (Keep Constants as is)
14
  # --- Constants ---
15
  DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
16
 
17
  # --- Basic Agent Definition ---
18
+
19
  # ----- THIS IS WERE YOU CAN BUILD WHAT YOU WANT ------
20
  class BasicAgent:
21
  def __init__(self):
22
  print("BasicAgent initialized.")
23
+ self.api_key = os.getenv("HF_TOKEN")
24
+ self.llm = HuggingFaceInferenceAPI(
25
+ model_name="deepseek-ai/DeepSeek-R1-0528",
26
+ token=self.api_key,
27
+ provider="auto"
28
+ )
29
+ self.llamaindex_agent = AgentWorkflow.from_tools_or_functions(
30
+ [wikipedia_search_tool, youtube_transcript_tool, youtube_transcript_snippet_tool, round_to_two_decimals_tool, text_inverter_tool, google_web_search_tool],
31
+ llm=self.llm,
32
+ )
33
+ self.context = Context(self.llamaindex_agent)
34
  def __call__(self, question: str) -> str:
35
  print(f"Agent received question (first 50 chars): {question[:50]}...")
36
+ #fixed_answer = "This is a default answer."
37
+ fixed_answer = self.llamaindex_agent.run(question, ctx = context)
38
  print(f"Agent returning fixed answer: {fixed_answer}")
39
  return fixed_answer
40