Spaces:
Sleeping
Sleeping
| from fastapi import APIRouter, Request, HTTPException | |
| from .models import PullRequestPayload | |
| from .github_service import fetch_pr_files, post_pr_comment | |
| from .code_reviewer import analyze_code | |
| import asyncio | |
| import hmac | |
| import hashlib | |
| import os | |
| import logging | |
| logger = logging.getLogger(__name__) | |
| router = APIRouter() | |
| def verify_signature(payload_body: bytes, signature_header: str) -> bool: | |
| """Verify GitHub webhook signature""" | |
| secret = os.getenv("GITHUB_WEBHOOK_SECRET", "") | |
| if not secret: | |
| return True # Skip verification if no secret set | |
| hash_object = hmac.new(secret.encode(), msg=payload_body, digestmod=hashlib.sha256) | |
| expected_signature = "sha256=" + hash_object.hexdigest() | |
| return hmac.compare_digest(expected_signature, signature_header) | |
| async def github_webhook(request: Request): | |
| logger.info("=" * 60) | |
| logger.info("π WEBHOOK RECEIVED") | |
| logger.info("=" * 60) | |
| # Check event type first | |
| event_type = request.headers.get("X-GitHub-Event", "") | |
| logger.info(f"π Event Type: {event_type}") | |
| # Only process pull_request events- Ignore rest | |
| if event_type != "pull_request": | |
| logger.info(f"βοΈ Ignoring {event_type} event (only processing pull_request events)") | |
| return {"message": f"Ignored {event_type} event"} | |
| # Verify webhook signature | |
| signature = request.headers.get("X-Hub-Signature-256", "") | |
| body = await request.body() | |
| # Parse payload after we know it's a pull_request event | |
| import json | |
| payload_dict = json.loads(body) | |
| payload = PullRequestPayload(**payload_dict) | |
| logger.info(f"π Action: {payload.action}") | |
| if not verify_signature(body, signature): | |
| logger.error("β Invalid webhook signature!") | |
| raise HTTPException(status_code=403, detail="Invalid signature") | |
| logger.info("β Signature verified") | |
| if payload.action not in ["opened", "synchronize"]: | |
| logger.info(f"βοΈ Ignoring action: {payload.action}") | |
| return {"message": "Ignored non-PR-open events."} | |
| repo_info = payload.repository | |
| pr = payload.pull_request | |
| owner = repo_info["owner"]["login"] | |
| repo = repo_info["name"] | |
| pr_number = pr["number"] | |
| logger.info(f"π¦ Repository: {owner}/{repo}") | |
| logger.info(f"π’ PR Number: #{pr_number}") | |
| logger.info(f"π€ Author: {pr['user']['login']}") | |
| logger.info(f"π Title: {pr['title']}") | |
| try: | |
| # Fetch changed files | |
| logger.info("π₯ Fetching changed files from GitHub...") | |
| files = await fetch_pr_files(owner, repo, pr_number) | |
| logger.info(f"β Found {len(files)} changed files") | |
| # Filter files to review (skip test files, configs, etc.) | |
| from .github_service import post_file_review | |
| def should_review_file(filename: str) -> bool: | |
| """Determine if a file should be reviewed""" | |
| skip_patterns = [ | |
| 'test_', '_test.', '.test.', # Test files | |
| '.md', '.txt', '.json', '.yaml', '.yml', # Docs/configs | |
| '.gitignore', 'LICENSE', 'README', # Meta files | |
| 'package-lock.json', 'yarn.lock', # Lock files | |
| ] | |
| return not any(pattern in filename.lower() for pattern in skip_patterns) | |
| files_with_patches = [f for f in files if f.get("patch") and should_review_file(f["filename"])] | |
| # Generate change summary | |
| from .code_reviewer import generate_change_summary | |
| change_summary = await generate_change_summary(files) | |
| logger.info(f"π Change summary: {change_summary}") | |
| if files_with_patches: | |
| #hello | |
| initial_msg = f"π€ **PRism AI is reviewing your PR right now...**\n\nAnalyzing {len(files_with_patches)} file(s). Only critical issues will be reported." | |
| await post_pr_comment(owner, repo, pr_number, initial_msg) | |
| logger.info("π’ Posted initial status comment") | |
| else: | |
| logger.info("βοΈ No files to review (all skipped)") | |
| summary_msg = "β **PRism AI Review Complete!**\n\nπ No production code files to review.\n\n_Skipped: test files, configs, and documentation._" | |
| await post_pr_comment(owner, repo, pr_number, summary_msg) | |
| return {"status": "No files to review", "count": 0} | |
| total_comments = 0 | |
| files_analyzed = 0 | |
| for idx, f in enumerate(files, 1): | |
| if not f.get("patch"): | |
| logger.info(f"βοΈ Skipping {f['filename']} (no patch)") | |
| continue | |
| if not should_review_file(f["filename"]): | |
| logger.info(f"βοΈ Skipping {f['filename']} (filtered out)") | |
| continue | |
| try: | |
| files_analyzed += 1 | |
| logger.info(f"π [{idx}/{len(files)}] Analyzing: {f['filename']}") | |
| logger.info(f" Status: {f.get('status', 'unknown')}, Changes: {f.get('changes', 0)} lines") | |
| ai_feedback = await analyze_code(f["filename"], f["patch"]) | |
| logger.info(f" β AI returned {len(ai_feedback)} comments") | |
| # Post review for this file immediately (even if no issues) | |
| await post_file_review(owner, repo, pr_number, f["filename"], ai_feedback) | |
| total_comments += len(ai_feedback) | |
| logger.info(f" π€ Posted review for {f['filename']} ({len(ai_feedback)} issues)") | |
| except Exception as file_error: | |
| logger.error(f" β Error analyzing {f['filename']}: {str(file_error)}") | |
| # Post error comment for this file | |
| error_msg = f"β οΈ **Error analyzing `{f['filename']}`**\n\nCouldn't complete AI review for this file. Error: {str(file_error)}" | |
| await post_pr_comment(owner, repo, pr_number, error_msg) | |
| # Post final summary only if we analyzed files | |
| logger.info(f"π Analysis complete: {files_analyzed} files analyzed, {total_comments} total comments") | |
| if files_analyzed > 0: | |
| summary_msg = f"β **PRism AI Review Complete!**\n\n" | |
| summary_msg += f"π **Summary:** {change_summary}\n\n" | |
| summary_msg += f"π Analyzed **{files_analyzed}** file(s)\n" | |
| if total_comments == 0: | |
| summary_msg += f"π¬ Found **{total_comments}** issue(s) - please review above.\n\n" | |
| else: | |
| summary_msg += f"π¬ Found **{total_comments}** issue(s) - please review above.\n\n" | |
| summary_msg += "---\n_Posted by PRism AI Reviewer_ π€" | |
| await post_pr_comment(owner, repo, pr_number, summary_msg) | |
| logger.info("β Posted final summary") | |
| logger.info("=" * 60) | |
| logger.info("π WEBHOOK PROCESSING COMPLETE") | |
| logger.info("=" * 60) | |
| return {"status": "AI review posted", "count": total_comments} | |
| except Exception as e: | |
| logger.error("=" * 60) | |
| logger.error(f"β CRITICAL ERROR: {str(e)}") | |
| logger.error("=" * 60) | |
| import traceback | |
| logger.error(traceback.format_exc()) | |
| # Try to post error comment to PR | |
| try: | |
| error_msg = f"β **PRism AI encountered an error**\n\nSorry, the review process failed. Please check the logs.\n\nError: `{str(e)}`" | |
| await post_pr_comment(owner, repo, pr_number, error_msg) | |
| except: | |
| logger.error("Failed to post error comment to PR") | |
| raise HTTPException(status_code=500, detail=str(e)) | |