from fastapi import FastAPI, UploadFile, File, Form, HTTPException, Depends from fastapi.responses import FileResponse, JSONResponse from fastapi.staticfiles import StaticFiles from fastapi.middleware.cors import CORSMiddleware from pathlib import Path from typing import List, Optional, Dict import os import uuid from datetime import datetime import asyncio from app.schemas import ( IntegrationResponse, AssetResponse, PostResponse, CampaignResponse, CanvaBrandTemplate, CanvaAutofillRequest, CanvaAutofillResponse, LinkedInPostRequest, AIContentRequest, AIContentResponse ) from app.services.canva_service import CanvaService from app.services.linkedin_service import LinkedInService from app.services.ai_service import AIService from app.services.asset_analyzer import AssetAnalyzer from app.services.agentic_planner import AgenticPlanner from app.database import init_db, get_db, get_direct_psycopg2_connection, ensure_default_user from sqlalchemy.orm import Session app = FastAPI(title="PostGen API", version="1.0.0") app.add_middleware( CORSMiddleware, allow_origins=["*"], allow_credentials=True, allow_methods=["*"], allow_headers=["*"], ) # Initialize database on startup @app.on_event("startup") async def startup_event(): """Initialize database tables on startup""" # Create uploads directory if it doesn't exist upload_dir = Path("uploads") upload_dir.mkdir(exist_ok=True) print(f"✓ Uploads directory ready: {upload_dir.absolute()}") db_initialized = init_db() if db_initialized: print("✓ Database initialized successfully") # Ensure default user exists try: user_id = ensure_default_user() print(f"✓ Default user ready (id={user_id})") except Exception as e: print(f"⚠ Could not ensure default user: {e}") else: print("⚠ Database not available - using mock data") print("⚠ App will function normally with dummy content") print("⚠ To connect to database, set DATABASE_URL environment variable") # Services ai_service = AIService() asset_analyzer = AssetAnalyzer() agentic_planner = AgenticPlanner() # Upload status tracking (in-memory, could be moved to Redis in production) upload_status: Dict[str, Dict] = {} # ---- API Endpoints ---- @app.get("/api/health") def health(): return {"status": "ok", "message": "PostGen API is running"} @app.get("/api/hello") def hello(): return {"message": "Hello from PostGen API"} # ---- Canva Integration ---- @app.get("/api/canva/brand-templates", response_model=List[CanvaBrandTemplate]) async def get_canva_brand_templates(access_token: str): """Get list of Canva brand templates""" try: canva_service = CanvaService(access_token) templates = await canva_service.get_brand_templates() return templates except Exception as e: raise HTTPException(status_code=500, detail=str(e)) @app.get("/api/canva/brand-templates/{template_id}/dataset") async def get_canva_template_dataset(template_id: str, access_token: str): """Get dataset for a specific brand template""" try: canva_service = CanvaService(access_token) dataset = await canva_service.get_brand_template_dataset(template_id) return dataset except Exception as e: raise HTTPException(status_code=500, detail=str(e)) @app.post("/api/canva/autofill", response_model=CanvaAutofillResponse) async def create_canva_autofill(request: CanvaAutofillRequest, access_token: str): """Create an autofill job for a brand template""" try: canva_service = CanvaService(access_token) response = await canva_service.create_autofill_job(request) return response except Exception as e: raise HTTPException(status_code=500, detail=str(e)) @app.get("/api/canva/autofill/{job_id}") async def get_canva_autofill_status(job_id: str, access_token: str): """Get status of an autofill job""" try: canva_service = CanvaService(access_token) status = await canva_service.get_autofill_job_status(job_id) return status except Exception as e: raise HTTPException(status_code=500, detail=str(e)) # ---- LinkedIn Integration ---- @app.post("/api/linkedin/post") async def create_linkedin_post(request: LinkedInPostRequest, access_token: str): """Create a LinkedIn post""" try: linkedin_service = LinkedInService(access_token) result = await linkedin_service.create_post( text=request.text, media_uris=request.media_uris ) return result except Exception as e: raise HTTPException(status_code=500, detail=str(e)) @app.get("/api/linkedin/profile") async def get_linkedin_profile(access_token: str): """Get LinkedIn user profile""" try: linkedin_service = LinkedInService(access_token) profile = await linkedin_service.get_user_profile() return profile except Exception as e: raise HTTPException(status_code=500, detail=str(e)) # ---- AI Content Generation ---- @app.post("/api/ai/generate-content", response_model=AIContentResponse) async def generate_ai_content(request: AIContentRequest, db: Session = Depends(get_db)): """Generate LinkedIn post content using GPT with agentic asset context""" try: # Fetch assets with extracted content if provided asset_insights = None if request.assets: try: from app.models import Asset # Query assets from database db_assets = db.query(Asset).filter(Asset.id.in_(request.assets)).all() asset_insights = [] for asset in db_assets: asset_dict = { "id": str(asset.id), # Return as string to preserve precision "name": asset.name, "product_category": asset.product_category, "extracted_content": asset.extracted_content if hasattr(asset, 'extracted_content') else None } asset_insights.append(asset_dict) except Exception as db_error: # Fallback if database query fails print(f"Could not fetch assets from DB: {db_error}") asset_insights = None response = await ai_service.generate_content( request, assets_context=None, asset_insights=asset_insights ) return response except Exception as e: raise HTTPException(status_code=500, detail=f"AI generation failed: {str(e)}") # ---- Asset Management ---- @app.get("/api/assets/{asset_id}/status") async def get_asset_status(asset_id, db: Session = Depends(get_db)): """Get the analysis status of an asset""" try: # Convert asset_id to int (Python int can handle arbitrarily large integers) try: asset_id = int(asset_id) print(f"Status check for asset_id: {asset_id} (type: {type(asset_id).__name__})") except (ValueError, TypeError): raise HTTPException(status_code=400, detail=f"Invalid asset ID: {asset_id}") from app.models import Asset conn = get_direct_psycopg2_connection() if not conn: raise HTTPException(status_code=500, detail="Database connection failed") try: cursor = conn.cursor() # First check if extracted_content column exists try: cursor.execute(""" SELECT column_name FROM information_schema.columns WHERE table_name='assets' AND column_name='extracted_content' """) has_extracted_content = cursor.fetchone() is not None except Exception as col_check_error: print(f"Column check error (non-fatal): {col_check_error}") has_extracted_content = False # Build query based on column existence try: # Try querying with explicit bigint cast to handle large IDs if has_extracted_content: cursor.execute(""" SELECT id, name, analysis_status, analyzed_at, extracted_content FROM assets WHERE id = %s::bigint """, (asset_id,)) else: cursor.execute(""" SELECT id, name, analysis_status, analyzed_at FROM assets WHERE id = %s::bigint """, (asset_id,)) except Exception as query_error: print(f"Query error for asset_id {asset_id}: {query_error}") # Try without cast as fallback try: if has_extracted_content: cursor.execute(""" SELECT id, name, analysis_status, analyzed_at, extracted_content FROM assets WHERE id = %s """, (asset_id,)) else: cursor.execute(""" SELECT id, name, analysis_status, analyzed_at FROM assets WHERE id = %s """, (asset_id,)) except Exception as fallback_error: cursor.close() conn.close() raise HTTPException(status_code=500, detail=f"Query failed: {str(fallback_error)}") row = cursor.fetchone() # Debug: If not found, check if asset exists with different query if not row: try: cursor.execute("SELECT COUNT(*) FROM assets WHERE id = %s", (asset_id,)) count = cursor.fetchone()[0] print(f"Debug: Asset ID {asset_id} (type: {type(asset_id)}) - Count: {count}") # Also check recent assets to see what IDs look like cursor.execute("SELECT id, name FROM assets ORDER BY id DESC LIMIT 5") recent = cursor.fetchall() print(f"Debug: Recent asset IDs: {[r[0] for r in recent]}") except Exception as debug_error: print(f"Debug query error: {debug_error}") cursor.close() conn.close() if row: result = { "asset_id": str(row[0]), # Return as string to preserve precision for large IDs "name": row[1], "status": row[2] or "pending", "analyzed_at": row[3].isoformat() if row[3] else None, } # Add extracted_content only if column exists and value is present if has_extracted_content and len(row) > 4: result["extracted_content"] = row[4] else: result["extracted_content"] = None return result else: # Log for debugging print(f"Asset not found: id={asset_id}, type={type(asset_id)}") raise HTTPException(status_code=404, detail=f"Asset not found: {asset_id}") except HTTPException: raise except Exception as e: if conn: try: cursor.close() conn.close() except: pass print(f"Error in get_asset_status for asset_id {asset_id}: {e}") import traceback print(traceback.format_exc()) raise HTTPException(status_code=500, detail=f"Database error: {str(e)}") except HTTPException: raise except Exception as e: print(f"Error in get_asset_status (outer) for asset_id {asset_id}: {e}") import traceback print(traceback.format_exc()) raise HTTPException(status_code=500, detail=str(e)) async def analyze_asset_background(asset_id: int, file_path: str, file_type: str): """Background task to analyze asset""" try: # Update status to processing conn = get_direct_psycopg2_connection() if conn: try: cursor = conn.cursor() cursor.execute(""" UPDATE assets SET analysis_status = 'processing' WHERE id = %s """, (asset_id,)) conn.commit() cursor.close() conn.close() except Exception as update_error: print(f"Could not update analysis status: {update_error}") if conn: conn.close() # Analyze asset analysis_result = await asset_analyzer.analyze_document(str(file_path)) if analysis_result.get("success") and analysis_result.get("extracted_content"): # Update asset with extracted content conn = get_direct_psycopg2_connection() if conn: try: cursor = conn.cursor() import json extracted_json = json.dumps(analysis_result["extracted_content"]) cursor.execute(""" UPDATE assets SET extracted_content = %s::jsonb, analysis_status = 'completed', analyzed_at = NOW() WHERE id = %s """, (extracted_json, asset_id)) conn.commit() cursor.close() conn.close() print(f"✓ Asset {asset_id} analyzed successfully") except Exception as update_error: print(f"Could not save extracted content: {update_error}") # Try to mark as failed try: cursor = conn.cursor() cursor.execute(""" UPDATE assets SET analysis_status = 'failed' WHERE id = %s """, (asset_id,)) conn.commit() cursor.close() except: pass if conn: conn.close() else: # Mark as failed if analysis didn't succeed conn = get_direct_psycopg2_connection() if conn: try: cursor = conn.cursor() cursor.execute(""" UPDATE assets SET analysis_status = 'failed' WHERE id = %s """, (asset_id,)) conn.commit() cursor.close() conn.close() except: if conn: conn.close() except Exception as analysis_error: print(f"Asset analysis error: {analysis_error}") # Mark as failed conn = get_direct_psycopg2_connection() if conn: try: cursor = conn.cursor() cursor.execute(""" UPDATE assets SET analysis_status = 'failed' WHERE id = %s """, (asset_id,)) conn.commit() cursor.close() conn.close() except: if conn: conn.close() @app.post("/api/assets/upload") async def upload_asset( file: UploadFile = File(...), product_category: str = Form(None), sub_category: Optional[str] = Form(None), db: Session = Depends(get_db) ): """Upload an asset to the repository""" try: # Create uploads directory if it doesn't exist upload_dir = Path("uploads") upload_dir.mkdir(exist_ok=True) # Read file content content = await file.read() file_size = len(content) # Determine file type file_type = "unknown" if file.content_type: if file.content_type.startswith("image/"): file_type = "image" elif file.content_type.startswith("video/"): file_type = "video" elif file.content_type.startswith("application/pdf") or "document" in file.content_type: file_type = "document" # Save file to disk (use absolute path) # Sanitize filename to prevent directory traversal and add timestamp for uniqueness safe_filename = file.filename.replace('/', '_').replace('\\', '_') # Add timestamp and UUID to prevent overwrites file_stem = Path(safe_filename).stem file_suffix = Path(safe_filename).suffix unique_filename = f"{file_stem}_{datetime.utcnow().strftime('%Y%m%d_%H%M%S')}_{uuid.uuid4().hex[:8]}{file_suffix}" file_path = upload_dir / unique_filename # Convert to absolute path before storing file_path = file_path.resolve() with open(file_path, "wb") as buffer: buffer.write(content) # Save to database (keep dummy content as requested) try: from app.models import Asset # Ensure default user exists and get user_id user_id = ensure_default_user() db_asset = Asset( name=file.filename, # Keep original filename for display file_path=str(file_path), # Store absolute path file_type=file_type, product_category=product_category or "ocr", sub_category=sub_category if sub_category and sub_category != "none" else None, size=file_size, user_id=user_id ) db.add(db_asset) try: db.commit() try: db.refresh(db_asset) except Exception as refresh_error: # Refresh might fail due to version string, but commit succeeded # Query the asset back to get the ID if "Could not determine version" in str(refresh_error): # Use direct psycopg2 to query back the asset conn = get_direct_psycopg2_connection() if conn: try: cursor = conn.cursor() cursor.execute(""" SELECT id, created_at FROM assets WHERE name = %s AND file_path = %s ORDER BY id DESC LIMIT 1 """, (file.filename, str(file_path.resolve()))) row = cursor.fetchone() cursor.close() conn.close() if row: # Keep ID as returned from database (CockroachDB uses bigint) db_asset.id = row[0] print(f"✓ Asset created with ID: {db_asset.id} (type: {type(db_asset.id).__name__})") if hasattr(db_asset, 'created_at') and row[1]: db_asset.created_at = row[1] except Exception as psycopg2_error: print(f"Direct psycopg2 query failed: {psycopg2_error}") if conn: conn.close() else: raise refresh_error except Exception as commit_error: # If commit fails due to version string issue, use direct psycopg2 db.rollback() error_str = str(commit_error) if "Could not determine version" in error_str: # Use direct psycopg2 connection to bypass SQLAlchemy # Ensure default user exists first user_id = ensure_default_user() conn = get_direct_psycopg2_connection() if conn: try: cursor = conn.cursor() cursor.execute(""" INSERT INTO assets (name, file_path, file_type, product_category, sub_category, size, user_id, created_at) VALUES (%s, %s, %s, %s, %s, %s, %s, NOW()) RETURNING id, created_at """, ( file.filename, str(file_path.resolve()), # Store absolute path file_type, product_category or "ocr", sub_category if sub_category and sub_category != "none" else None, file_size, user_id )) row = cursor.fetchone() conn.commit() if row: # Keep ID as returned from database (CockroachDB uses bigint) db_asset.id = row[0] db_asset.created_at = row[1] print(f"✓ Asset created with ID: {db_asset.id} (type: {type(db_asset.id).__name__})") cursor.close() conn.close() except Exception as psycopg2_error: print(f"Direct psycopg2 insert failed: {psycopg2_error}") if conn: conn.close() raise commit_error else: raise commit_error else: raise commit_error # Start background analysis task asset_id = db_asset.id if file_type in ["document", "image"]: # Start background task (don't await - return immediately) asyncio.create_task(analyze_asset_background(asset_id, str(file_path), file_type)) return { "id": str(db_asset.id), # Return as string to preserve precision for large IDs "name": db_asset.name, "file_type": db_asset.file_type, "product_category": db_asset.product_category, "sub_category": db_asset.sub_category, "size": db_asset.size, "analysis_status": "processing" if file_type in ["document", "image"] else "pending", "created_at": db_asset.created_at.isoformat() if hasattr(db_asset, 'created_at') else datetime.utcnow().isoformat() } except Exception as db_error: # If database save fails, still return success (file is saved) # This allows the app to work even if DB has issues print(f"Database save warning: {db_error}") return { "id": "1", # Return as string for consistency "name": file.filename, "file_type": file_type, "product_category": product_category, "sub_category": sub_category, "size": file_size, "created_at": datetime.utcnow().isoformat() } except Exception as e: raise HTTPException(status_code=500, detail=str(e)) @app.get("/api/assets", response_model=List[AssetResponse]) async def get_assets( product_category: Optional[str] = None, db: Session = Depends(get_db) ): """Get list of assets""" try: from app.models import Asset from sqlalchemy import text # Try using ORM first try: query = db.query(Asset) if product_category and product_category != "all": query = query.filter(Asset.product_category == product_category) db_assets = query.order_by(Asset.created_at.desc()).all() # Convert to response format assets = [] for asset in db_assets: assets.append({ "id": str(asset.id), # Return as string to preserve precision "name": asset.name, "file_type": asset.file_type, "product_category": asset.product_category, "sub_category": asset.sub_category, "size": asset.size, "extracted_content": asset.extracted_content if hasattr(asset, 'extracted_content') else None, "analysis_status": asset.analysis_status if hasattr(asset, 'analysis_status') else None, "analyzed_at": asset.analyzed_at.isoformat() if hasattr(asset, 'analyzed_at') and asset.analyzed_at else None, "created_at": asset.created_at }) except Exception as orm_error: # If ORM fails due to version string issue, use direct psycopg2 error_str = str(orm_error) if "Could not determine version" in error_str: # Use direct psycopg2 connection to bypass SQLAlchemy conn = get_direct_psycopg2_connection() if conn: try: cursor = conn.cursor() if product_category and product_category != "all": cursor.execute(""" SELECT id, name, file_path, file_type, product_category, sub_category, size, extracted_content, analysis_status, analyzed_at, created_at FROM assets WHERE product_category = %s ORDER BY created_at DESC """, (product_category,)) else: cursor.execute(""" SELECT id, name, file_path, file_type, product_category, sub_category, size, extracted_content, analysis_status, analyzed_at, created_at FROM assets ORDER BY created_at DESC """) rows = cursor.fetchall() cursor.close() conn.close() assets = [] for row in rows: assets.append({ "id": str(row[0]), # Return as string to preserve precision "name": row[1], "file_type": row[3], "product_category": row[4], "sub_category": row[5], "size": row[6], "extracted_content": row[7] if len(row) > 7 else None, "analysis_status": row[8] if len(row) > 8 else None, "analyzed_at": row[9].isoformat() if len(row) > 9 and row[9] else None, "created_at": row[10] if len(row) > 10 else row[6] }) except Exception as psycopg2_error: print(f"Direct psycopg2 query failed: {psycopg2_error}") if conn: conn.close() assets = [] else: assets = [] else: print(f"ORM query error: {orm_error}") assets = [] # Merge with mock data (as requested - keep dummy content) mock_assets = [ { "id": "9991", # Return as string for consistency "name": "OCR_Demo_Screenshot.png", "file_type": "image", "product_category": "ocr", "sub_category": None, "size": 2516582, "created_at": datetime(2024, 12, 20) }, { "id": "9992", # Return as string for consistency "name": "P2P_Workflow_Diagram.pdf", "file_type": "document", "product_category": "p2p", "sub_category": "Budget Approval Workflow", "size": 1024000, "created_at": datetime(2024, 12, 19) }, { "id": "9993", # Return as string for consistency "name": "O2C_Process_Video.mp4", "file_type": "video", "product_category": "o2c", "sub_category": "Sales Order Workflow", "size": 15728640, "created_at": datetime(2024, 12, 18) } ] # Combine real assets with mock assets (real assets first) return assets + mock_assets except Exception as e: # If database query fails, return mock data only print(f"Database query warning: {e}") return [ { "id": 1, "name": "OCR_Demo_Screenshot.png", "file_type": "image", "product_category": "ocr", "sub_category": None, "size": 2516582, "created_at": datetime.utcnow() } ] @app.delete("/api/assets/{asset_id}") async def delete_asset(asset_id, db: Session = Depends(get_db)): """Delete an asset from both filesystem and database""" try: from app.models import Asset # Convert asset_id to int (Python int can handle arbitrarily large integers) try: asset_id_int = int(asset_id) except (ValueError, TypeError): raise HTTPException(status_code=400, detail=f"Invalid asset ID: {asset_id}") # Get asset from database conn = get_direct_psycopg2_connection() if not conn: raise HTTPException(status_code=500, detail="Database connection failed") try: cursor = conn.cursor() cursor.execute(""" SELECT id, name, file_path FROM assets WHERE id = %s::bigint """, (asset_id_int,)) row = cursor.fetchone() if not row: cursor.close() conn.close() raise HTTPException(status_code=404, detail="Asset not found") file_path = Path(row[2]) # Delete file from filesystem if file_path.exists(): try: file_path.unlink() print(f"✓ Deleted file: {file_path}") except Exception as file_error: print(f"⚠ Could not delete file: {file_error}") # Continue with database deletion even if file deletion fails # Delete from database cursor.execute("DELETE FROM assets WHERE id = %s::bigint", (asset_id_int,)) conn.commit() cursor.close() conn.close() return { "success": True, "message": f"Asset '{row[1]}' deleted successfully", "asset_id": str(asset_id_int) # Return as string } except Exception as db_error: if conn: conn.close() raise HTTPException(status_code=500, detail=f"Delete failed: {str(db_error)}") except HTTPException: raise except Exception as e: raise HTTPException(status_code=500, detail=str(e)) @app.get("/api/assets/{asset_id}/pdf-pages") async def get_pdf_pages(asset_id, db: Session = Depends(get_db)): """Convert PDF to images and return page URLs""" try: from app.models import Asset try: from pdf2image import convert_from_path except ImportError: raise HTTPException( status_code=503, detail="PDF conversion not available. Please install pdf2image and poppler-utils." ) import base64 from io import BytesIO # Convert asset_id to int (Python int can handle arbitrarily large integers) try: asset_id_int = int(asset_id) except (ValueError, TypeError): raise HTTPException(status_code=400, detail=f"Invalid asset ID: {asset_id}") # Get asset from database conn = get_direct_psycopg2_connection() if not conn: raise HTTPException(status_code=500, detail="Database connection failed") try: cursor = conn.cursor() cursor.execute(""" SELECT id, name, file_path, file_type FROM assets WHERE id = %s::bigint """, (asset_id_int,)) row = cursor.fetchone() cursor.close() conn.close() if not row: raise HTTPException(status_code=404, detail="Asset not found") file_path = Path(row[2]) if not file_path.exists(): raise HTTPException(status_code=404, detail="File not found on disk") if row[3] != "document" or not str(file_path).lower().endswith('.pdf'): raise HTTPException(status_code=400, detail="File is not a PDF") # Convert PDF pages to images try: # Convert PDF to images (one per page) images = convert_from_path(str(file_path), dpi=150) # Convert images to base64 page_images = [] for i, image in enumerate(images): buffered = BytesIO() image.save(buffered, format="PNG") img_str = base64.b64encode(buffered.getvalue()).decode() page_images.append({ "page_number": i + 1, "image_data": f"data:image/png;base64,{img_str}" }) return { "asset_id": str(asset_id_int), # Return as string "asset_name": row[1], "total_pages": len(page_images), "pages": page_images } except Exception as pdf_error: raise HTTPException(status_code=500, detail=f"PDF conversion failed: {str(pdf_error)}") except HTTPException: raise except Exception as db_error: if conn: conn.close() raise HTTPException(status_code=500, detail=str(db_error)) except HTTPException: raise except Exception as e: raise HTTPException(status_code=500, detail=str(e)) def _get_media_type(file_path: Path, file_type: str) -> str: """Determine media type from file path and type""" media_type = "application/octet-stream" suffix = file_path.suffix.lower() if file_type == "image" or suffix in [".jpg", ".jpeg", ".png", ".gif", ".webp", ".svg"]: if suffix in [".jpg", ".jpeg"]: media_type = "image/jpeg" elif suffix == ".png": media_type = "image/png" elif suffix == ".gif": media_type = "image/gif" elif suffix == ".webp": media_type = "image/webp" elif suffix == ".svg": media_type = "image/svg+xml" elif file_type == "video" or suffix in [".mp4", ".webm", ".mov", ".avi"]: if suffix == ".mp4": media_type = "video/mp4" elif suffix == ".webm": media_type = "video/webm" elif suffix == ".mov": media_type = "video/quicktime" elif file_type == "document" or suffix in [".pdf", ".doc", ".docx"]: if suffix == ".pdf": media_type = "application/pdf" elif suffix in [".doc", ".docx"]: media_type = "application/msword" return media_type def _resolve_file_path(path_str: str) -> Path: """Resolve file path (handle both absolute and relative paths)""" file_path = Path(path_str) if not file_path.is_absolute(): # If relative, assume it's relative to uploads directory upload_dir = Path("uploads") file_path = (upload_dir / file_path).resolve() else: file_path = file_path.resolve() return file_path @app.get("/api/assets/{asset_id}/download") async def download_asset(asset_id, db: Session = Depends(get_db)): """Download or preview an asset file""" try: from app.models import Asset # Convert asset_id to int (Python int can handle arbitrarily large integers) try: asset_id_int = int(asset_id) except (ValueError, TypeError): raise HTTPException(status_code=400, detail=f"Invalid asset ID: {asset_id}") # Try to get asset from database db_asset = None file_path = None file_name = None file_type = None try: db_asset = db.query(Asset).filter(Asset.id == asset_id_int).first() except Exception as orm_error: # If ORM fails, use direct psycopg2 if "Could not determine version" in str(orm_error): conn = get_direct_psycopg2_connection() if conn: try: cursor = conn.cursor() cursor.execute(""" SELECT id, name, file_path, file_type FROM assets WHERE id = %s::bigint """, (asset_id_int,)) row = cursor.fetchone() cursor.close() conn.close() if row: file_path = _resolve_file_path(row[2]) file_name = row[1] file_type = row[3] else: raise HTTPException(status_code=404, detail=f"Asset not found: {asset_id}") except HTTPException: raise except Exception as psycopg2_error: if conn: conn.close() raise HTTPException(status_code=500, detail=f"Database error: {str(psycopg2_error)}") else: raise HTTPException(status_code=500, detail=f"ORM error: {str(orm_error)}") if db_asset: file_path = _resolve_file_path(db_asset.file_path) file_name = db_asset.name file_type = db_asset.file_type if not file_path: raise HTTPException(status_code=404, detail=f"Asset not found: {asset_id}") # Check if file exists if not file_path.exists(): raise HTTPException( status_code=404, detail=f"File not found on disk. Expected path: {file_path.absolute()}" ) # Determine media type media_type = _get_media_type(file_path, file_type) return FileResponse( path=str(file_path.absolute()), filename=file_name, media_type=media_type, headers={ "Content-Disposition": f'inline; filename="{file_name}"' # Use 'inline' for preview } ) except HTTPException: raise except Exception as e: import traceback print(f"Download error for asset {asset_id}: {traceback.format_exc()}") raise HTTPException(status_code=500, detail=f"Download failed: {str(e)}") # ---- Post Management ---- @app.post("/api/posts", response_model=PostResponse) async def create_post(post_data: dict): """Create a new post""" # In a real implementation, save to database return { "id": 1, "title": post_data.get("title", "New Post"), "content": post_data.get("content", ""), "post_type": post_data.get("post_type", "content_only"), "product_category": post_data.get("product_category", "ocr"), "scheduled_date": post_data.get("scheduled_date", datetime.utcnow()), "status": "draft", "created_at": datetime.utcnow() } @app.get("/api/posts", response_model=List[PostResponse]) async def get_posts(): """Get list of posts""" # Mock data for now return [ { "id": 1, "title": "OCR Document Automation Benefits", "content": "Transform your document processing...", "post_type": "carousel", "product_category": "ocr", "scheduled_date": datetime.utcnow(), "status": "scheduled", "created_at": datetime.utcnow() } ] # ---- Campaign Management ---- @app.post("/api/campaigns/generate") async def generate_campaign(campaign_data: dict, db: Session = Depends(get_db)): """Generate a campaign schedule using agentic AI""" try: from datetime import datetime from app.models import Asset # Extract campaign parameters date_range_start = datetime.fromisoformat(campaign_data.get("date_range_start").replace("Z", "+00:00")) date_range_end = datetime.fromisoformat(campaign_data.get("date_range_end").replace("Z", "+00:00")) products = campaign_data.get("products", []) post_types = campaign_data.get("post_types", []) posts_per_week = campaign_data.get("posts_per_week", 5) # Fetch relevant assets for the selected products assets = [] try: # Query assets matching the product categories db_assets = db.query(Asset).filter(Asset.product_category.in_(products)).all() for asset in db_assets: asset_dict = { "id": asset.id, "name": asset.name, "file_type": asset.file_type, "product_category": asset.product_category, "sub_category": asset.sub_category, "extracted_content": asset.extracted_content if hasattr(asset, 'extracted_content') else None, "analysis_status": asset.analysis_status if hasattr(asset, 'analysis_status') else None } assets.append(asset_dict) except Exception as asset_error: print(f"Could not fetch assets: {asset_error}") # Continue without assets # Use agentic planner to generate campaign campaign_plan = await agentic_planner.plan_campaign( date_range_start=date_range_start, date_range_end=date_range_end, products=products, post_types=post_types, posts_per_week=posts_per_week, assets=assets ) return campaign_plan except Exception as e: import traceback print(f"Campaign generation error: {traceback.format_exc()}") raise HTTPException(status_code=500, detail=f"Campaign generation failed: {str(e)}") # ---- Frontend static serving ---- # Path calculation: /app/backend/app/main.py -> /app/frontend/dist FRONTEND_DIST = Path("/app/frontend/dist") INDEX_FILE = FRONTEND_DIST / "index.html" if FRONTEND_DIST.exists(): # Serve static assets (JS, CSS, images, etc.) from /assets assets_dir = FRONTEND_DIST / "assets" if assets_dir.exists(): app.mount("/assets", StaticFiles(directory=str(assets_dir)), name="assets") # Serve index.html for root @app.get("/") async def serve_index(): if INDEX_FILE.exists(): return FileResponse(str(INDEX_FILE)) return {"detail": "Frontend not found"} # SPA fallback: any non-/api route should return React index.html # This must be last to catch all routes not handled above @app.get("/{full_path:path}") async def spa_fallback(full_path: str): # Don't handle API routes here if full_path.startswith("api/"): return {"detail": "Not Found"} # Don't handle assets (already mounted) if full_path.startswith("assets/"): return {"detail": "Not Found"} # Serve index.html for all other routes (SPA routing) if INDEX_FILE.exists(): return FileResponse(str(INDEX_FILE)) return {"detail": "Frontend not found"}