File size: 3,228 Bytes
c6abe34
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
from fastapi import APIRouter, Form, UploadFile, File, Depends, HTTPException
from typing import Optional
from uuid import UUID
from datetime import date

from app.dependencies import require_linked_account
from app.stat_import.schemas import StatImportJobResponse, ImportReviewSchema, FinalizeImportRequest

router = APIRouter()

@router.post("/upload", response_model=StatImportJobResponse, summary="Upload a stat sheet (PDF/Image)")
async def upload_stat_sheet(
    team_id: UUID = Form(...),
    game_date: Optional[date] = Form(None),
    opponent_name: Optional[str] = Form(None),
    notes: Optional[str] = Form(None),
    file: UploadFile = File(...),
    account_info: dict = Depends(require_linked_account)
):
    """
    Coach uploads a PDF/image stat sheet. 
    System stores it in Supabase Storage and creates an import job.
    """
    # TODO: Validate user is in team `team_id`
    # TODO: Push to supabase storage
    # TODO: Insert to `stat_import_jobs` table
    raise HTTPException(status_code=501, detail="Not Implemented")


@router.post("/process/{import_job_id}", summary="Process an uploaded stat sheet")
async def process_stat_sheet(import_job_id: UUID, account_info: dict = Depends(require_linked_account)):
    """
    Trigger the asynchronous or synchronous OCR parsing pipeline for this upload.
    Classification -> Extraction -> Parsing -> Matching -> Validation.
    """
    # TODO: Service layer to run the layout parser / paddleOCR logic
    raise HTTPException(status_code=501, detail="Not Implemented")


@router.get("/{import_job_id}", summary="Get import job details")
async def get_import_job(import_job_id: UUID, account_info: dict = Depends(require_linked_account)):
    """
    Fetches the result of the pipeline: parsed data, status, etc.
    """
    raise HTTPException(status_code=501, detail="Not Implemented")


@router.post("/{import_job_id}/confirm", summary="Acknowledge overrides on review flow")
async def confirm_review_edits(import_job_id: UUID, account_info: dict = Depends(require_linked_account)):
    """
    Called periodically or as checkpoints when resolving unmatched players manually.
    """
    raise HTTPException(status_code=501, detail="Not Implemented")


@router.get("/{import_job_id}/issues", response_model=ImportReviewSchema, summary="Get extraction or validation issues")
async def get_import_issues(import_job_id: UUID, account_info: dict = Depends(require_linked_account)):
    """
    Return all validation and matching errors flagged during processing.
    """
    raise HTTPException(status_code=501, detail="Not Implemented")


@router.post("/{import_job_id}/finalize", summary="Commit the checked stat sheet to the database")
async def finalize_stat_import(
    import_job_id: UUID, 
    payload: FinalizeImportRequest, 
    account_info: dict = Depends(require_linked_account)
):
    """
    Saves final JSON mapped data into games, game_player_stats, game_team_totals tables.
    Also triggers analytics event generation.
    """
    # TODO: Check if ALL blockers are resolved
    # TODO: Open transaction and dump to relational schema
    # TODO: Mark import job as finalized
    raise HTTPException(status_code=501, detail="Not Implemented")