mirjam-m commited on
Commit
4a48ae7
·
1 Parent(s): 8851a45

dump agent llamaindex

Browse files
Files changed (1) hide show
  1. app.py +19 -8
app.py CHANGED
@@ -5,6 +5,9 @@ import gradio as gr
5
  import pandas as pd
6
  import requests
7
 
 
 
 
8
 
9
  # (Keep Constants as is)
10
  # --- Constants ---
@@ -15,13 +18,19 @@ DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
15
  # ----- THIS IS WERE YOU CAN BUILD WHAT YOU WANT ------
16
  class BasicAgent:
17
  def __init__(self):
18
- print("BasicAgent initialized.")
 
 
 
 
 
19
 
20
  def __call__(self, question: str) -> str:
21
  print(f"Agent received question (first 50 chars): {question[:50]}...")
22
- fixed_answer = "This is a default answer."
23
- print(f"Agent returning fixed answer: {fixed_answer}")
24
- return fixed_answer
 
25
 
26
 
27
  def run_and_submit_all(profile: gr.OAuthProfile | None):
@@ -55,6 +64,7 @@ def run_and_submit_all(profile: gr.OAuthProfile | None):
55
 
56
  # 2. Fetch Questions
57
  print(f"Fetching questions from: {questions_url}")
 
58
  try:
59
  response = requests.get(questions_url, timeout=15)
60
  response.raise_for_status()
@@ -63,13 +73,14 @@ def run_and_submit_all(profile: gr.OAuthProfile | None):
63
  print("Fetched questions list is empty.")
64
  return "Fetched questions list is empty or invalid format.", None
65
  print(f"Fetched {len(questions_data)} questions.")
66
- except requests.exceptions.RequestException as e:
67
- print(f"Error fetching questions: {e}")
68
- return f"Error fetching questions: {e}", None
69
  except requests.exceptions.JSONDecodeError as e:
70
  print(f"Error decoding JSON response from questions endpoint: {e}")
71
- print(f"Response text: {response.text[:500]}")
 
72
  return f"Error decoding server response for questions: {e}", None
 
 
 
73
  except Exception as e:
74
  print(f"An unexpected error occurred fetching questions: {e}")
75
  return f"An unexpected error occurred fetching questions: {e}", None
 
5
  import pandas as pd
6
  import requests
7
 
8
+ from llama_index.llms.huggingface_api import HuggingFaceInferenceAPI
9
+
10
+ hf_token = os.getenv("HF_TOKEN")
11
 
12
  # (Keep Constants as is)
13
  # --- Constants ---
 
18
  # ----- THIS IS WERE YOU CAN BUILD WHAT YOU WANT ------
19
  class BasicAgent:
20
  def __init__(self):
21
+ self.llm = HuggingFaceInferenceAPI(
22
+ model_name="Qwen/Qwen2.5-Coder-32B-Instruct",
23
+ temperature=0.7,
24
+ max_tokens=100,
25
+ token=hf_token,
26
+ )
27
 
28
  def __call__(self, question: str) -> str:
29
  print(f"Agent received question (first 50 chars): {question[:50]}...")
30
+ response = self.llm.complete("Hello, how are you?")
31
+
32
+ print(f"Agent returning answer: {response}")
33
+ return response.text
34
 
35
 
36
  def run_and_submit_all(profile: gr.OAuthProfile | None):
 
64
 
65
  # 2. Fetch Questions
66
  print(f"Fetching questions from: {questions_url}")
67
+ response = None
68
  try:
69
  response = requests.get(questions_url, timeout=15)
70
  response.raise_for_status()
 
73
  print("Fetched questions list is empty.")
74
  return "Fetched questions list is empty or invalid format.", None
75
  print(f"Fetched {len(questions_data)} questions.")
 
 
 
76
  except requests.exceptions.JSONDecodeError as e:
77
  print(f"Error decoding JSON response from questions endpoint: {e}")
78
+ if response:
79
+ print(f"Response text: {response.text[:500]}")
80
  return f"Error decoding server response for questions: {e}", None
81
+ except requests.exceptions.RequestException as e:
82
+ print(f"Error fetching questions: {e}")
83
+ return f"Error fetching questions: {e}", None
84
  except Exception as e:
85
  print(f"An unexpected error occurred fetching questions: {e}")
86
  return f"An unexpected error occurred fetching questions: {e}", None