CodeCommunity commited on
Commit
649dabc
·
verified ·
1 Parent(s): 2340fec

Update app/main.py

Browse files
Files changed (1) hide show
  1. app/main.py +42 -53
app/main.py CHANGED
@@ -56,8 +56,12 @@ class ChatRequest(BaseModel):
56
  def calculate_repo_health(total_vulns: int, avg_maint: float) -> int:
57
  base_score = avg_maint * 10
58
  penalty = total_vulns * 8
59
- final_score = base_score - penalty
60
- return int(max(10, min(100, final_score)))
 
 
 
 
61
 
62
  def parse_tree_to_list(raw_tree: str):
63
  """Parses text tree into JSON objects for Compose LazyColumn"""
@@ -112,75 +116,62 @@ async def classify_file(request: FileRequest):
112
  @app.post("/review-batch-code")
113
  async def review_batch_code(request: BatchReviewRequest):
114
  try:
115
- loop = asyncio.get_event_loop()
116
 
117
- # Helper function to process one file at a time, WITH the fix to unwrap the list
118
- def process_single_file(file_req):
119
- return reviewer.service.review_batch_code([file_req])
120
-
121
- # Spin up a ThreadPoolExecutor with up to 15 concurrent workers
122
- with ThreadPoolExecutor(max_workers=15) as executor:
123
- # Create a concurrent task for every file in the request
124
- tasks = [
125
- loop.run_in_executor(executor, process_single_file, f)
126
- for f in request.files
127
- ]
128
-
129
- # asyncio.gather fires them all off at the exact same time
130
- raw_reviews = await asyncio.gather(*tasks, return_exceptions=True)
131
 
132
- # Clean up the results and handle any individual file failures gracefully
133
- valid_reviews = []
134
- for i, result in enumerate(raw_reviews):
135
  if isinstance(result, Exception):
136
- # If we hit a rate limit, bubble it up immediately
137
- if "429" in str(result):
138
- raise HTTPException(status_code=429, detail="AI Quota Exceeded")
139
-
140
- # Otherwise, log the specific file error but don't crash the whole batch
141
- logger.error(f"Failed to analyze {request.files[i].fileName}: {result}")
 
142
  else:
143
- valid_reviews.append(result)
144
 
145
- return {"results": valid_reviews}
146
 
147
  except Exception as e:
148
- if isinstance(e, HTTPException):
149
- raise e
150
- traceback.print_exc()
151
- raise HTTPException(status_code=500, detail=str(e))
152
 
153
  @app.post("/repo-dashboard-stats")
154
  async def get_dashboard_stats(request: BatchReviewRequest):
155
  try:
156
- loop = asyncio.get_event_loop()
157
 
158
- # 1. Use the same parallel helper for blazing fast stats!
159
- def process_single_file(file_req):
160
- return reviewer.service.review_batch_code([file_req])
161
-
162
- # 2. Run all files concurrently
163
- with ThreadPoolExecutor(max_workers=15) as executor:
164
- tasks = [
165
- loop.run_in_executor(executor, process_single_file, f)
166
- for f in request.files
167
- ]
168
- raw_reviews = await asyncio.gather(*tasks, return_exceptions=True)
169
 
170
  total_vulns = 0
171
  maint_scores = []
172
  found_apis = set()
173
  api_regex = re.compile(r'(?:get|post|put|delete|patch)\([\'"]\/(.*?)[\'"]', re.IGNORECASE)
174
 
175
- # 3. Process the results, ignoring any parallel tasks that failed
176
  for i, review in enumerate(raw_reviews):
177
  if isinstance(review, Exception):
178
- logger.error(f"Skipping stats for {request.files[i].fileName} due to error: {review}")
179
- continue # Skip failed files so the dashboard doesn't crash
180
 
181
- vulns = review.get("vulnerabilities", [])
 
 
 
 
182
  total_vulns += len(vulns)
183
- m_score = review.get("metrics", {}).get("maintainability", 8.0)
 
184
  maint_scores.append(m_score)
185
 
186
  content = request.files[i].content
@@ -203,10 +194,8 @@ async def get_dashboard_stats(request: BatchReviewRequest):
203
  "average_maintainability": round(avg_maint, 1)
204
  }
205
  except Exception as e:
206
- if "429" in str(e):
207
- raise HTTPException(status_code=429, detail="Quota exceeded")
208
- logger.error(f"Dashboard stats failed: {traceback.format_exc()}")
209
- raise HTTPException(status_code=500, detail="Failed to sync dashboard metrics")
210
 
211
  @app.post("/analyze-file")
212
  async def analyze_file(request: FileRequest):
 
56
  def calculate_repo_health(total_vulns: int, avg_maint: float) -> int:
57
  base_score = avg_maint * 10
58
  penalty = total_vulns * 8
59
+ return int(max(10, min(100, base_score - penalty)))
60
+
61
+ def sync_review_worker(file_req: FileRequest):
62
+ """Wrapper for the synchronous reviewer service"""
63
+ # Note: We pass the individual file object directly
64
+ return reviewer.service.review_batch_code([file_req])
65
 
66
  def parse_tree_to_list(raw_tree: str):
67
  """Parses text tree into JSON objects for Compose LazyColumn"""
 
116
  @app.post("/review-batch-code")
117
  async def review_batch_code(request: BatchReviewRequest):
118
  try:
119
+ loop = asyncio.get_running_loop()
120
 
121
+ # Dispatch all files to the thread pool immediately
122
+ tasks = [
123
+ loop.run_in_executor(executor, sync_review_worker, f)
124
+ for f in request.files
125
+ ]
126
+
127
+ # Wait for all to complete
128
+ raw_results = await asyncio.gather(*tasks, return_exceptions=True)
 
 
 
 
 
 
129
 
130
+ final_reviews = []
131
+ for i, result in enumerate(raw_results):
 
132
  if isinstance(result, Exception):
133
+ logger.error(f"Error reviewing {request.files[i].fileName}: {result}")
134
+ # We return a placeholder error for this specific file so the UI knows it failed
135
+ final_reviews.append({
136
+ "fileName": request.files[i].fileName,
137
+ "error": "Analysis failed",
138
+ "details": str(result)
139
+ })
140
  else:
141
+ final_reviews.append(result)
142
 
143
+ return {"results": final_reviews}
144
 
145
  except Exception as e:
146
+ logger.error(f"Batch review critical failure: {traceback.format_exc()}")
147
+ raise HTTPException(status_code=500, detail="Internal processing error")
 
 
148
 
149
  @app.post("/repo-dashboard-stats")
150
  async def get_dashboard_stats(request: BatchReviewRequest):
151
  try:
152
+ loop = asyncio.get_running_loop()
153
 
154
+ # Parallel execution for stats
155
+ tasks = [loop.run_in_executor(executor, sync_review_worker, f) for f in request.files]
156
+ raw_reviews = await asyncio.gather(*tasks, return_exceptions=True)
 
 
 
 
 
 
 
 
157
 
158
  total_vulns = 0
159
  maint_scores = []
160
  found_apis = set()
161
  api_regex = re.compile(r'(?:get|post|put|delete|patch)\([\'"]\/(.*?)[\'"]', re.IGNORECASE)
162
 
 
163
  for i, review in enumerate(raw_reviews):
164
  if isinstance(review, Exception):
165
+ continue
 
166
 
167
+ # Handle the structure returned by your reviewer service
168
+ # (Assuming it returns a list or a dict based on your current service)
169
+ current_review = review[0] if isinstance(review, list) else review
170
+
171
+ vulns = current_review.get("vulnerabilities", [])
172
  total_vulns += len(vulns)
173
+
174
+ m_score = current_review.get("metrics", {}).get("maintainability", 8.0)
175
  maint_scores.append(m_score)
176
 
177
  content = request.files[i].content
 
194
  "average_maintainability": round(avg_maint, 1)
195
  }
196
  except Exception as e:
197
+ logger.error(f"Stats failed: {e}")
198
+ raise HTTPException(status_code=500, detail="Failed to aggregate metrics")
 
 
199
 
200
  @app.post("/analyze-file")
201
  async def analyze_file(request: FileRequest):