dalinstone commited on
Commit
be9737d
·
verified ·
1 Parent(s): a55b667

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +39 -43
app.py CHANGED
@@ -8,8 +8,8 @@ from typing import List, Dict, Optional
8
  import os
9
  from pathlib import Path
10
  import time
11
- # The docx and fitz imports are no longer needed for the core logic
12
 
 
13
  GRADING_RUBRIC = """
14
  GRADING RUBRIC (Total 100 points)
15
 
@@ -81,7 +81,6 @@ Professor's Summary:
81
 
82
  """
83
 
84
- # The prompt is now simpler, as the files are passed as arguments to the model, not as text.
85
  BASE_PROMPT_TEMPLATE = """
86
  You are about to assume a role. Carefully review the persona, context, task, and output requirements before proceeding.
87
 
@@ -131,34 +130,28 @@ class GradingResult:
131
  summary: Optional[str] = None
132
  error_message: Optional[str] = None
133
 
134
- # --- Core Logic Classes ---
135
- # EssayParser class is no longer needed and has been removed.
136
-
137
  class GeminiGrader:
138
  """Manages interaction with the Google Gemini API for grading."""
139
  def __init__(self, api_key: str):
140
- """Initializes the Gemini model."""
141
- try:
142
- genai.configure(api_key=api_key)
143
- self.model = genai.GenerativeModel(model_name="gemini-1.5-pro-latest")
144
- except Exception as e:
145
- raise ValueError(f"Failed to configure Gemini API: {e}")
146
 
147
  def grade_essay(self, student_paper_file: object, example_paper_file: Optional[object]) -> GradingResult:
148
- """
149
- Sends files to Gemini for grading and parses the JSON response.
150
- This method now takes API file objects directly.
151
- """
152
  prompt_text = BASE_PROMPT_TEMPLATE.format(GRADING_RUBRIC=GRADING_RUBRIC)
153
 
154
- # Build the content list for the API call
155
- # It includes the prompt text and the file objects
156
  content_list = [prompt_text, student_paper_file]
157
  if example_paper_file:
 
158
  content_list.append(example_paper_file)
159
 
160
  try:
161
- response = self.model.generate_content(content_list)
 
 
162
  cleaned_response = response.text.strip().replace("```json", "").replace("```", "")
163
  data = json.loads(cleaned_response)
164
 
@@ -166,6 +159,7 @@ class GeminiGrader:
166
  if not all(key in data for key in required_keys):
167
  raise KeyError("Model response missing required keys.")
168
 
 
169
  return GradingResult(
170
  file_name=student_paper_file.display_name,
171
  success=True,
@@ -174,44 +168,39 @@ class GeminiGrader:
174
  feedback=data["feedback"],
175
  summary=data["summary"]
176
  )
177
- except json.JSONDecodeError:
178
- return GradingResult(
179
- file_name=student_paper_file.display_name,
180
- success=False,
181
- error_message="Failed to parse the model's JSON response."
182
- )
183
  except Exception as e:
 
184
  return GradingResult(
185
  file_name=student_paper_file.display_name,
186
  success=False,
187
  error_message=f"An API or model error occurred: {str(e)}"
188
  )
189
 
190
- # --- Gradio Application ---
191
-
192
  def process_single_file(file_path: str, grader: GeminiGrader, example_paper_file_obj: Optional[object]) -> GradingResult:
193
- """
194
- Uploads a single student paper and calls the grader.
195
- This function is what runs in each thread of the ThreadPoolExecutor.
196
- """
197
  student_paper_file_obj = None
198
  try:
199
- # Upload the student paper file for this specific job
200
- student_paper_file_obj = genai.upload_file(path=file_path, display_name=os.path.basename(file_path))
 
201
  return grader.grade_essay(student_paper_file_obj, example_paper_file_obj)
202
  except Exception as e:
203
- return GradingResult(file_name=os.path.basename(file_path), success=False, error_message=str(e))
 
204
  finally:
205
- # Clean up the uploaded student paper file after processing
206
  if student_paper_file_obj:
 
207
  genai.delete_file(student_paper_file_obj.name)
 
208
 
209
  async def grade_papers_concurrently(
210
  files: List[gr.File], example_paper_file: gr.File, api_key: str, progress=gr.Progress(track_tqdm=True)
211
  ) -> (str, str):
212
- """
213
- The main asynchronous function that orchestrates the grading process.
214
- """
215
  start_time = time.time()
216
  if not api_key:
217
  raise gr.Error("Google API Key is required.")
@@ -222,15 +211,18 @@ async def grade_papers_concurrently(
222
  try:
223
  grader = GeminiGrader(api_key)
224
 
225
- # Upload the example paper ONCE if it exists
226
  if example_paper_file:
 
227
  progress(0, desc="Uploading example paper...")
228
  example_paper_file_obj = genai.upload_file(path=example_paper_file.name, display_name=os.path.basename(example_paper_file.name))
 
229
 
230
  file_paths = [file.name for file in files]
231
  total_files = len(file_paths)
 
232
 
233
  with ThreadPoolExecutor(max_workers=1) as executor:
 
234
  loop = asyncio.get_event_loop()
235
  tasks = [
236
  loop.run_in_executor(
@@ -244,11 +236,13 @@ async def grade_papers_concurrently(
244
  ]
245
 
246
  results = []
 
247
  for i, future in enumerate(asyncio.as_completed(tasks)):
248
- progress(i + 1, desc=f"Grading paper {i+1}/{total_files}...")
249
  result = await future
 
250
  results.append(result)
251
 
 
252
  # --- Format the final output ---
253
  successful_grades = [res for res in results if res.success]
254
  failed_grades = [res for res in results if not res.success]
@@ -275,13 +269,15 @@ async def grade_papers_concurrently(
275
  end_time = time.time()
276
  runtime = f"Total runtime: {end_time - start_time:.2f} seconds."
277
  status = f"Grading complete. {len(successful_grades)} papers graded successfully, {len(failed_grades)} failed."
 
 
278
  return output_markdown, f"{status}\n{runtime}"
279
 
280
  finally:
281
- # Clean up the uploaded example paper file at the very end
282
  if example_paper_file_obj:
 
283
  genai.delete_file(example_paper_file_obj.name)
284
-
285
 
286
  # --- Build the Gradio Interface ---
287
  with gr.Blocks(theme=gr.themes.Soft(), title="Nursing Essay Grader") as demo:
@@ -301,14 +297,14 @@ with gr.Blocks(theme=gr.themes.Soft(), title="Nursing Essay Grader") as demo:
301
  file_uploads = gr.File(
302
  label="Upload Essays to Grade",
303
  file_count="multiple",
304
- file_types=['.pdf', '.docx'], # Allow both types
305
  type="filepath",
306
  scale=2
307
  )
308
  example_paper_upload = gr.File(
309
  label="Upload Example Paper (Optional)",
310
  file_count="single",
311
- file_types=['.pdf', '.docx'], # Allow both types
312
  type="filepath",
313
  scale=1
314
  )
 
8
  import os
9
  from pathlib import Path
10
  import time
 
11
 
12
+ # NOTE: No changes were made to your prompts or rubric.
13
  GRADING_RUBRIC = """
14
  GRADING RUBRIC (Total 100 points)
15
 
 
81
 
82
  """
83
 
 
84
  BASE_PROMPT_TEMPLATE = """
85
  You are about to assume a role. Carefully review the persona, context, task, and output requirements before proceeding.
86
 
 
130
  summary: Optional[str] = None
131
  error_message: Optional[str] = None
132
 
 
 
 
133
  class GeminiGrader:
134
  """Manages interaction with the Google Gemini API for grading."""
135
  def __init__(self, api_key: str):
136
+ print("[DEBUG] Initializing GeminiGrader...")
137
+ genai.configure(api_key=api_key)
138
+ self.model = genai.GenerativeModel(model_name="gemini-1.5-pro-latest")
139
+ print("[DEBUG] GeminiGrader initialized successfully.")
 
 
140
 
141
  def grade_essay(self, student_paper_file: object, example_paper_file: Optional[object]) -> GradingResult:
142
+ """Sends files to Gemini for grading and parses the JSON response."""
143
+ print(f"[DEBUG] Inside grade_essay for student paper: {student_paper_file.display_name}")
 
 
144
  prompt_text = BASE_PROMPT_TEMPLATE.format(GRADING_RUBRIC=GRADING_RUBRIC)
145
 
 
 
146
  content_list = [prompt_text, student_paper_file]
147
  if example_paper_file:
148
+ print(f"[DEBUG] Example paper '{example_paper_file.display_name}' is being used.")
149
  content_list.append(example_paper_file)
150
 
151
  try:
152
+ print(f"[DEBUG] >>> Calling the Gemini model for grading... THIS IS THE LONG WAIT. <<<")
153
+ response = self.model.generate_content(content_list, request_options={'timeout': 600}) # 10 minute timeout
154
+ print(f"[DEBUG] <<< Model response received for {student_paper_file.display_name}. Parsing JSON.")
155
  cleaned_response = response.text.strip().replace("```json", "").replace("```", "")
156
  data = json.loads(cleaned_response)
157
 
 
159
  if not all(key in data for key in required_keys):
160
  raise KeyError("Model response missing required keys.")
161
 
162
+ print(f"[DEBUG] Successfully parsed response for {student_paper_file.display_name}.")
163
  return GradingResult(
164
  file_name=student_paper_file.display_name,
165
  success=True,
 
168
  feedback=data["feedback"],
169
  summary=data["summary"]
170
  )
 
 
 
 
 
 
171
  except Exception as e:
172
+ print(f"[DEBUG] !!! An error occurred in grade_essay: {e}")
173
  return GradingResult(
174
  file_name=student_paper_file.display_name,
175
  success=False,
176
  error_message=f"An API or model error occurred: {str(e)}"
177
  )
178
 
 
 
179
  def process_single_file(file_path: str, grader: GeminiGrader, example_paper_file_obj: Optional[object]) -> GradingResult:
180
+ """Uploads a single student paper and calls the grader."""
181
+ file_name = os.path.basename(file_path)
182
+ print(f"[DEBUG] Starting process_single_file for: {file_name}")
 
183
  student_paper_file_obj = None
184
  try:
185
+ print(f"[DEBUG] -> Uploading student paper '{file_name}' to Gemini Files API...")
186
+ student_paper_file_obj = genai.upload_file(path=file_path, display_name=file_name)
187
+ print(f"[DEBUG] -> Upload successful for '{file_name}'. Handing off to grade_essay.")
188
  return grader.grade_essay(student_paper_file_obj, example_paper_file_obj)
189
  except Exception as e:
190
+ print(f"[DEBUG] !!! An error occurred in process_single_file for '{file_name}': {e}")
191
+ return GradingResult(file_name=file_name, success=False, error_message=str(e))
192
  finally:
 
193
  if student_paper_file_obj:
194
+ print(f"[DEBUG] -> Cleaning up uploaded file: {student_paper_file_obj.name}")
195
  genai.delete_file(student_paper_file_obj.name)
196
+ print(f"[DEBUG] -> Cleanup complete for {file_name}.")
197
 
198
  async def grade_papers_concurrently(
199
  files: List[gr.File], example_paper_file: gr.File, api_key: str, progress=gr.Progress(track_tqdm=True)
200
  ) -> (str, str):
201
+ """The main asynchronous function that orchestrates the grading process."""
202
+ print("\n" + "="*50)
203
+ print("[DEBUG] grade_papers_concurrently initiated.")
204
  start_time = time.time()
205
  if not api_key:
206
  raise gr.Error("Google API Key is required.")
 
211
  try:
212
  grader = GeminiGrader(api_key)
213
 
 
214
  if example_paper_file:
215
+ print("[DEBUG] Example paper provided. Uploading it now...")
216
  progress(0, desc="Uploading example paper...")
217
  example_paper_file_obj = genai.upload_file(path=example_paper_file.name, display_name=os.path.basename(example_paper_file.name))
218
+ print(f"[DEBUG] Example paper uploaded successfully: {example_paper_file_obj.name}")
219
 
220
  file_paths = [file.name for file in files]
221
  total_files = len(file_paths)
222
+ print(f"[DEBUG] Preparing to grade {total_files} paper(s).")
223
 
224
  with ThreadPoolExecutor(max_workers=1) as executor:
225
+ print("[DEBUG] ThreadPoolExecutor started (max_workers=1).")
226
  loop = asyncio.get_event_loop()
227
  tasks = [
228
  loop.run_in_executor(
 
236
  ]
237
 
238
  results = []
239
+ print("[DEBUG] Waiting for grading tasks to complete...")
240
  for i, future in enumerate(asyncio.as_completed(tasks)):
 
241
  result = await future
242
+ print(f"[DEBUG] Task {i+1}/{total_files} completed for file: {result.file_name}")
243
  results.append(result)
244
 
245
+ print("[DEBUG] All grading tasks finished. Formatting final output.")
246
  # --- Format the final output ---
247
  successful_grades = [res for res in results if res.success]
248
  failed_grades = [res for res in results if not res.success]
 
269
  end_time = time.time()
270
  runtime = f"Total runtime: {end_time - start_time:.2f} seconds."
271
  status = f"Grading complete. {len(successful_grades)} papers graded successfully, {len(failed_grades)} failed."
272
+ print(f"[DEBUG] Process finished. {runtime}")
273
+ print("="*50 + "\n")
274
  return output_markdown, f"{status}\n{runtime}"
275
 
276
  finally:
 
277
  if example_paper_file_obj:
278
+ print("[DEBUG] Final cleanup: Deleting example paper from API.")
279
  genai.delete_file(example_paper_file_obj.name)
280
+ print("[DEBUG] Final cleanup complete.")
281
 
282
  # --- Build the Gradio Interface ---
283
  with gr.Blocks(theme=gr.themes.Soft(), title="Nursing Essay Grader") as demo:
 
297
  file_uploads = gr.File(
298
  label="Upload Essays to Grade",
299
  file_count="multiple",
300
+ file_types=['.pdf', '.docx'],
301
  type="filepath",
302
  scale=2
303
  )
304
  example_paper_upload = gr.File(
305
  label="Upload Example Paper (Optional)",
306
  file_count="single",
307
+ file_types=['.pdf', '.docx'],
308
  type="filepath",
309
  scale=1
310
  )