Carolzinha2010 commited on
Commit
434705e
·
verified ·
1 Parent(s): eaca903

Create app.py

Browse files

miau miau miauuuuuu

Files changed (1) hide show
  1. app.py +166 -168
app.py CHANGED
@@ -1,203 +1,172 @@
1
- # Move Gradio interface definition and launch outside the function
2
- with gr.Blocks(theme=gr.themes.Soft(), title="Basic Agent Evaluation Runner") as demo:
3
- gr.Markdown(
4
- """
5
- # Basic Agent Evaluation Runner
6
- This application fetches a set of questions from a scoring API,
7
- runs your custom agent against each question, and submits the answers for scoring.
8
-
9
- **Instructions:**
10
- 1. Ensure your agent logic is defined in the `BasicAgent` class above.
11
- 2. **Get a SerpAPI key and a Google AI API key and add them as environment variables in your runtime environment (e.g., as secrets in your Hugging Face Space settings).**
12
- 3. Log in to Hugging Face using the button below.
13
- 4. Click the "Run Evaluation & Submit All Answers" button.
14
- 5. The application will fetch questions, run your agent, submit answers, and display the results below.
15
- """
16
- )
17
- login_btn = gr.LoginButton()
18
-
19
- run_button = gr.Button("Run Evaluation & Submit All Answers")
20
 
21
- status_output = gr.Textbox(label="Run Status", interactive=False, lines=5)
22
- results_output = gr.DataFrame(label="Evaluation Results")
 
23
 
24
- run_button.click(
25
- run_and_submit_all,
26
- inputs=[login_btn], # Pass the profile from the login button
27
- outputs=[status_output, results_output]
28
- )
29
 
30
- # Ensure the app launches when the script is run
31
- if __name__ == "__main__":
32
- # --- Define the default API URL ---
33
- DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space" # Corrected API URL
34
 
35
 
36
- print("Application script started.") # Debugging print statement
 
 
 
37
 
38
- import os
39
- import gradio as gr
40
- import requests
41
- import inspect
42
- import pandas as pd
43
 
44
- # Import libraries for SerpAPI and Google Generative AI
45
- from serpapi import GoogleSearch
46
- import google.generativeai as genai
47
 
48
- # --- Get API Keys from Environment Variables ---
49
- # SERPAPI_API_KEY and GOOGLE_API_KEY should be set as secrets in your Hugging Face Space
50
- SERPAPI_API_KEY = os.getenv('SERPAPI_API_KEY')
51
- print(f"SERPAPI_API_KEY (first 5 chars): {SERPAPI_API_KEY[:5] if SERPAPI_API_KEY else 'None'}...") # Debugging API key
52
 
53
- GOOGLE_API_KEY = os.getenv('GOOGLE_API_KEY')
54
- print(f"GOOGLE_API_KEY (first 5 chars): {GOOGLE_API_KEY[:5] if GOOGLE_API_KEY else 'None'}...") # Debugging API key
55
 
 
 
 
 
 
 
 
56
 
57
- # --- Google Generative AI LLM Initialization ---
58
- print("Attempting to initialize Google Generative AI model...") # Debugging print before loading
 
 
 
59
 
60
- gemini_model = None # Initialize to None
 
 
61
 
62
- if not GOOGLE_API_KEY:
63
- print("Warning: GOOGLE_API_KEY environment variable not set. LLM will not be available.")
64
- else:
65
- try:
66
- # Configure the generative AI library
67
- genai.configure(api_key=GOOGLE_API_KEY)
68
- print("Google Generative AI configured.")
69
 
70
- # Initialize the Generative Model
71
- # Using a fast and efficient model like gemini-1.5-flash
72
- # You can explore other models like 'gemini-1.5-pro' for potentially better results
73
- gemini_model = genai.GenerativeModel('gemini-1.5-flash')
74
- print("Gemini model initialized successfully.") # Debugging print after successful init
75
 
76
- except Exception as e:
77
- print(f"An error occurred during Google Generative AI initialization: {e}")
78
- gemini_model = None # Ensure model is None if initialization fails
79
 
80
-
81
- # --- Web Search Function (using SerpAPI) ---
82
- def web_search(query: str) -> list[dict]:
83
- """
84
- Performs a web search using SerpAPI and returns relevant information.
85
-
86
- Args:
87
- query: The search query string.
88
-
89
- Returns:
90
  A list of dictionaries, where each dictionary represents a search result
91
  with keys 'title', 'snippet', and 'url'. Returns an empty list if no
92
  results are found or an error occurs.
93
  """
94
- print(f"web_search called with query: {query[:50]}...") # Debugging web_search call
95
- if not SERPAPI_API_KEY:
96
- print("SerpAPI key not found in environment variables.")
97
- return []
98
-
99
- params = {
100
- "q": query,
101
- "api_key": SERPAPI_API_KEY,
102
- "engine": "google", # Use Google search engine
103
- "num": 5 # Number of results to fetch
104
- }
105
- results = []
106
-
107
- try:
108
- search = GoogleSearch(params)
109
- search_results_dict = search.get_dict() # Get results as a dictionary
110
- print(f"SerpAPI raw response keys: {search_results_dict.keys()}") # Debugging response keys
111
-
112
- # Extract organic results
113
- if "organic_results" in search_results_dict:
114
- print(f"Found {len(search_results_dict['organic_results'])} organic results.") # Debugging result count
115
- for result in search_results_dict["organic_results"]:
116
- item = {
117
- 'title': result.get('title'),
118
- 'url': result.get('link'),
119
- 'snippet': result.get('snippet', 'No snippet available')
120
- }
121
- results.append(item)
122
- else:
123
- print("No 'organic_results' key found in SerpAPI response.")
124
- # Print the whole response if no organic_results are found for debugging
125
- # print(f"SerpAPI response (no organic results): {search_results_dict}")
126
 
127
 
128
- except Exception as e:
129
- print(f"An error occurred during SerpAPI web search: {e}")
130
 
131
- print(f"web_search returning {len(results)} results.") # Debugging return count
132
- return results
133
 
134
 
135
- # --- Basic Agent Definition (Updated to use Google LLM) ---
136
- class BasicAgent:
137
 
138
- def __init__(self):
139
- print("BasicAgent initialized.") # Debugging print before init
140
- # Check if LLM is loaded (optional but good practice)
141
- global gemini_model # Access global variable
142
- if gemini_model is None:
143
- print("Warning: Google Generative AI model not successfully loaded before agent initialization.")
144
- # The agent can still perform search but won't use the LLM for synthesis
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
145
  else:
146
- print("Google Generative AI model found and ready.") # Debugging print after successful init
147
-
148
-
149
- def __call__(self, question: str) -> str:
150
- print(f"Agent received question (first 50 chars): {question[:50]}...")
151
-
152
- # Simple logic to determine if a web search is needed
153
- question_lower = question.lower()
154
- search_keywords = ["what is", "how to", "where is", "who is", "when did", "define", "explain", "tell me about"]
155
- needs_search = any(keyword in question_lower for keyword in search_keywords) or "?" in question
156
- print(f"Needs search: {needs_search}") # Debugging search decision
157
-
158
- # --- Analyze question and refine search query ---
159
- search_query = question # Default search query is the original question
160
- if needs_search:
161
- print("Analyzing question for keywords and refining search query...")
162
- # A more refined approach: identify potential entities or key phrases
163
- # This is a simplified example; advanced agents might use NLP libraries (spaCy, NLTK)
164
- # or even the LLM itself to extract optimal search terms.
165
-
166
- # Simple approach: split by common question words and take the rest
167
- parts = question_lower.split("what is", 1)
168
  if len(parts) > 1:
169
  search_query = parts[1].strip()
170
  else:
171
- parts = question_lower.split("how to", 1)
172
- if len(parts) > 1:
173
- search_query = parts[1].strip()
174
- else:
175
- parts = question_lower.split("where is", 1)
176
  if len(parts) > 1:
177
  search_query = parts[1].strip()
178
  else:
179
- parts = question_lower.split("who is", 1)
180
- if len(parts) > 1:
181
- search_query = parts[1].strip()
182
- else:
183
- parts = question_lower.split("when did", 1)
184
- if len(parts) > 1:
185
- search_query = parts[1].strip()
186
- else:
187
- parts = question_lower.split("define", 1)
188
- if len(parts) > 1:
189
- search_query = parts[1].strip()
190
- else:
191
- parts = question_lower.split("explain", 1)
192
- if len(parts) > 1:
193
- search_query = parts[1].strip()
194
- else:
195
- parts = question_lower.split("tell me about", 1)
196
- if len(parts) > 1:
197
- search_query = parts[1].strip()
198
- else:
199
- # If no specific question keyword found, use the whole question
200
- search_query = question_lower.strip()
201
 
202
 
203
  # Optional: Add quotation marks for multi-word phrases if identified
@@ -435,5 +404,34 @@ Answer:"""
435
  results_df = pd.DataFrame(results_log)
436
  return status_message, results_df
437
 
 
 
 
 
 
 
 
438
 
439
- demo.launch(server_name="0.0.0.0") # Ensure binding to all interfaces
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import gradio as gr
3
+ import requests
4
+ import inspect
5
+ import pandas as pd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
 
7
+ # Import libraries for SerpAPI and Google Generative AI
8
+ from serpapi import GoogleSearch
9
+ import google.generativeai as genai
10
 
11
+ # --- Constants ---
12
+ DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space" # Corrected API URL
 
 
 
13
 
14
+ print("Application script started.") # Debugging print statement
 
 
 
15
 
16
 
17
+ # --- Get API Keys from Environment Variables ---
18
+ # SERPAPI_API_KEY and GOOGLE_API_KEY should be set as secrets in your Hugging Face Space
19
+ SERPAPI_API_KEY = os.getenv('SERPAPI_API_KEY')
20
+ print(f"SERPAPI_API_KEY (first 5 chars): {SERPAPI_API_KEY[:5] if SERPAPI_API_KEY else 'None'}...") # Debugging API key
21
 
22
+ GOOGLE_API_KEY = os.getenv('GOOGLE_API_KEY')
23
+ print(f"GOOGLE_API_KEY (first 5 chars): {GOOGLE_API_KEY[:5] if GOOGLE_API_KEY else 'None'}...") # Debugging API key
 
 
 
24
 
 
 
 
25
 
26
+ # --- Google Generative AI LLM Initialization ---
27
+ print("Attempting to initialize Google Generative AI model...") # Debugging print before loading
 
 
28
 
29
+ gemini_model = None # Initialize to None
 
30
 
31
+ if not GOOGLE_API_KEY:
32
+ print("Warning: GOOGLE_API_KEY environment variable not set. LLM will not be available.")
33
+ else:
34
+ try:
35
+ # Configure the generative AI library
36
+ genai.configure(api_key=GOOGLE_API_KEY)
37
+ print("Google Generative AI configured.")
38
 
39
+ # Initialize the Generative Model
40
+ # Using a fast and efficient model like gemini-1.5-flash
41
+ # You can explore other models like 'gemini-1.5-pro' for potentially better results
42
+ gemini_model = genai.GenerativeModel('gemini-1.5-flash')
43
+ print("Gemini model initialized successfully.") # Debugging print after successful init
44
 
45
+ except Exception as e:
46
+ print(f"An error occurred during Google Generative AI initialization: {e}")
47
+ gemini_model = None # Ensure model is None if initialization fails
48
 
 
 
 
 
 
 
 
49
 
50
+ # --- Web Search Function (using SerpAPI) ---
51
+ def web_search(query: str) -> list[dict]:
52
+ """
53
+ Performs a web search using SerpAPI and returns relevant information.
 
54
 
55
+ Args:
56
+ query: The search query string.
 
57
 
58
+ Returns:
 
 
 
 
 
 
 
 
 
59
  A list of dictionaries, where each dictionary represents a search result
60
  with keys 'title', 'snippet', and 'url'. Returns an empty list if no
61
  results are found or an error occurs.
62
  """
63
+ print(f"web_search called with query: {query[:50]}...") # Debugging web_search call
64
+ if not SERPAPI_API_KEY:
65
+ print("SerpAPI key not found in environment variables.")
66
+ return []
67
+
68
+ params = {
69
+ "q": query,
70
+ "api_key": SERPAPI_API_KEY,
71
+ "engine": "google", # Use Google search engine
72
+ "num": 5 # Number of results to fetch
73
+ }
74
+ results = []
75
+
76
+ try:
77
+ search = GoogleSearch(params)
78
+ search_results_dict = search.get_dict() # Get results as a dictionary
79
+ print(f"SerpAPI raw response keys: {search_results_dict.keys()}") # Debugging response keys
80
+
81
+ # Extract organic results
82
+ if "organic_results" in search_results_dict:
83
+ print(f"Found {len(search_results_dict['organic_results'])} organic results.") # Debugging result count
84
+ for result in search_results_dict["organic_results"]:
85
+ item = {
86
+ 'title': result.get('title'),
87
+ 'url': result.get('link'),
88
+ 'snippet': result.get('snippet', 'No snippet available')
89
+ }
90
+ results.append(item)
91
+ else:
92
+ print("No 'organic_results' key found in SerpAPI response.")
93
+ # Print the whole response if no organic_results are found for debugging
94
+ # print(f"SerpAPI response (no organic results): {search_results_dict}")
95
 
96
 
97
+ except Exception as e:
98
+ print(f"An error occurred during SerpAPI web search: {e}")
99
 
100
+ print(f"web_search returning {len(results)} results.") # Debugging return count
101
+ return results
102
 
103
 
104
+ # --- Basic Agent Definition (Updated to use Google LLM) ---
105
+ class BasicAgent:
106
 
107
+ def __init__(self):
108
+ print("BasicAgent initialized.") # Debugging print before init
109
+ # Check if LLM is loaded (optional but good practice)
110
+ global gemini_model # Access global variable
111
+ if gemini_model is None:
112
+ print("Warning: Google Generative AI model not successfully loaded before agent initialization.")
113
+ # The agent can still perform search but won't use the LLM for synthesis
114
+ else:
115
+ print("Google Generative AI model found and ready.") # Debugging print after successful init
116
+
117
+
118
+ def __call__(self, question: str) -> str:
119
+ print(f"Agent received question (first 50 chars): {question[:50]}...")
120
+
121
+ # Simple logic to determine if a web search is needed
122
+ question_lower = question.lower()
123
+ search_keywords = ["what is", "how to", "where is", "who is", "when did", "define", "explain", "tell me about"]
124
+ needs_search = any(keyword in question_lower for keyword in search_keywords) or "?" in question
125
+ print(f"Needs search: {needs_search}") # Debugging search decision
126
+
127
+ # --- Analyze question and refine search query ---
128
+ search_query = question # Default search query is the original question
129
+ if needs_search:
130
+ print("Analyzing question for keywords and refining search query...")
131
+ # A more refined approach: identify potential entities or key phrases
132
+ # This is a simplified example; advanced agents might use NLP libraries (spaCy, NLTK)
133
+ # or even the LLM itself to extract optimal search terms.
134
+
135
+ # Simple approach: split by common question words and take the rest
136
+ parts = question_lower.split("what is", 1)
137
+ if len(parts) > 1:
138
+ search_query = parts[1].strip()
139
  else:
140
+ parts = question_lower.split("how to", 1)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
141
  if len(parts) > 1:
142
  search_query = parts[1].strip()
143
  else:
144
+ parts = question_lower.split("where is", 1)
145
+ if len(parts) > 1:
146
+ search_query = parts[1].strip()
147
+ else:
148
+ parts = question_lower.split("who is", 1)
149
  if len(parts) > 1:
150
  search_query = parts[1].strip()
151
  else:
152
+ parts = question_lower.split("when did", 1)
153
+ if len(parts) > 1:
154
+ search_query = parts[1].strip()
155
+ else:
156
+ parts = question_lower.split("define", 1)
157
+ if len(parts) > 1:
158
+ search_query = parts[1].strip()
159
+ else:
160
+ parts = question_lower.split("explain", 1)
161
+ if len(parts) > 1:
162
+ search_query = parts[1].strip()
163
+ else:
164
+ parts = question_lower.split("tell me about", 1)
165
+ if len(parts) > 1:
166
+ search_query = parts[1].strip()
167
+ else:
168
+ # If no specific question keyword found, use the whole question
169
+ search_query = question_lower.strip()
 
 
 
 
170
 
171
 
172
  # Optional: Add quotation marks for multi-word phrases if identified
 
404
  results_df = pd.DataFrame(results_log)
405
  return status_message, results_df
406
 
407
+ # Move Gradio interface definition and launch outside the function
408
+ with gr.Blocks(theme=gr.themes.Soft(), title="Basic Agent Evaluation Runner") as demo:
409
+ gr.Markdown(
410
+ """
411
+ # Basic Agent Evaluation Runner
412
+ This application fetches a set of questions from a scoring API,
413
+ runs your custom agent against each question, and submits the answers for scoring.
414
 
415
+ **Instructions:**
416
+ 1. Ensure your agent logic is defined in the `BasicAgent` class above.
417
+ 2. **Get a SerpAPI key and a Google AI API key and add them as environment variables in your runtime environment (e.g., as secrets in your Hugging Face Space settings).**
418
+ 3. Log in to Hugging Face using the button below.
419
+ 4. Click the "Run Evaluation & Submit All Answers" button.
420
+ 5. The application will fetch questions, run your agent, submit answers, and display the results below.
421
+ """
422
+ )
423
+ login_btn = gr.LoginButton()
424
+
425
+ run_button = gr.Button("Run Evaluation & Submit All Answers")
426
+
427
+ status_output = gr.Textbox(label="Run Status", interactive=False, lines=5)
428
+ results_output = gr.DataFrame(label="Evaluation Results")
429
+
430
+ run_button.click(
431
+ run_and_submit_all,
432
+ inputs=[login_btn], # Pass the profile from the login button
433
+ outputs=[status_output, results_output]
434
+ )
435
+
436
+ # Ensure the app launches when the script is run
437
+ demo.launch(server_name="0.0.0.0") # Ensure binding to all interfaces