Spaces:
Sleeping
Sleeping
| from fastapi import FastAPI, UploadFile, File, Form, HTTPException, Depends | |
| from fastapi.responses import FileResponse, JSONResponse | |
| from fastapi.staticfiles import StaticFiles | |
| from fastapi.middleware.cors import CORSMiddleware | |
| from pathlib import Path | |
| from typing import List, Optional, Dict | |
| import os | |
| import uuid | |
| from datetime import datetime | |
| import asyncio | |
| from app.schemas import ( | |
| IntegrationResponse, AssetResponse, PostResponse, CampaignResponse, | |
| CanvaBrandTemplate, CanvaAutofillRequest, CanvaAutofillResponse, | |
| LinkedInPostRequest, AIContentRequest, AIContentResponse | |
| ) | |
| from app.services.canva_service import CanvaService | |
| from app.services.linkedin_service import LinkedInService | |
| from app.services.ai_service import AIService | |
| from app.services.asset_analyzer import AssetAnalyzer | |
| from app.services.agentic_planner import AgenticPlanner | |
| from app.database import init_db, get_db, get_direct_psycopg2_connection, ensure_default_user | |
| from sqlalchemy.orm import Session | |
| app = FastAPI(title="PostGen API", version="1.0.0") | |
| app.add_middleware( | |
| CORSMiddleware, | |
| allow_origins=["*"], | |
| allow_credentials=True, | |
| allow_methods=["*"], | |
| allow_headers=["*"], | |
| ) | |
| # Initialize database on startup | |
| async def startup_event(): | |
| """Initialize database tables on startup""" | |
| # Create uploads directory if it doesn't exist | |
| upload_dir = Path("uploads") | |
| upload_dir.mkdir(exist_ok=True) | |
| print(f"✓ Uploads directory ready: {upload_dir.absolute()}") | |
| db_initialized = init_db() | |
| if db_initialized: | |
| print("✓ Database initialized successfully") | |
| # Ensure default user exists | |
| try: | |
| user_id = ensure_default_user() | |
| print(f"✓ Default user ready (id={user_id})") | |
| except Exception as e: | |
| print(f"⚠ Could not ensure default user: {e}") | |
| else: | |
| print("⚠ Database not available - using mock data") | |
| print("⚠ App will function normally with dummy content") | |
| print("⚠ To connect to database, set DATABASE_URL environment variable") | |
| # Services | |
| ai_service = AIService() | |
| asset_analyzer = AssetAnalyzer() | |
| agentic_planner = AgenticPlanner() | |
| # Upload status tracking (in-memory, could be moved to Redis in production) | |
| upload_status: Dict[str, Dict] = {} | |
| # ---- API Endpoints ---- | |
| def health(): | |
| return {"status": "ok", "message": "PostGen API is running"} | |
| def hello(): | |
| return {"message": "Hello from PostGen API"} | |
| # ---- Canva Integration ---- | |
| async def get_canva_brand_templates(access_token: str): | |
| """Get list of Canva brand templates""" | |
| try: | |
| canva_service = CanvaService(access_token) | |
| templates = await canva_service.get_brand_templates() | |
| return templates | |
| except Exception as e: | |
| raise HTTPException(status_code=500, detail=str(e)) | |
| async def get_canva_template_dataset(template_id: str, access_token: str): | |
| """Get dataset for a specific brand template""" | |
| try: | |
| canva_service = CanvaService(access_token) | |
| dataset = await canva_service.get_brand_template_dataset(template_id) | |
| return dataset | |
| except Exception as e: | |
| raise HTTPException(status_code=500, detail=str(e)) | |
| async def create_canva_autofill(request: CanvaAutofillRequest, access_token: str): | |
| """Create an autofill job for a brand template""" | |
| try: | |
| canva_service = CanvaService(access_token) | |
| response = await canva_service.create_autofill_job(request) | |
| return response | |
| except Exception as e: | |
| raise HTTPException(status_code=500, detail=str(e)) | |
| async def get_canva_autofill_status(job_id: str, access_token: str): | |
| """Get status of an autofill job""" | |
| try: | |
| canva_service = CanvaService(access_token) | |
| status = await canva_service.get_autofill_job_status(job_id) | |
| return status | |
| except Exception as e: | |
| raise HTTPException(status_code=500, detail=str(e)) | |
| # ---- LinkedIn Integration ---- | |
| async def create_linkedin_post(request: LinkedInPostRequest, access_token: str): | |
| """Create a LinkedIn post""" | |
| try: | |
| linkedin_service = LinkedInService(access_token) | |
| result = await linkedin_service.create_post( | |
| text=request.text, | |
| media_uris=request.media_uris | |
| ) | |
| return result | |
| except Exception as e: | |
| raise HTTPException(status_code=500, detail=str(e)) | |
| async def get_linkedin_profile(access_token: str): | |
| """Get LinkedIn user profile""" | |
| try: | |
| linkedin_service = LinkedInService(access_token) | |
| profile = await linkedin_service.get_user_profile() | |
| return profile | |
| except Exception as e: | |
| raise HTTPException(status_code=500, detail=str(e)) | |
| # ---- AI Content Generation ---- | |
| async def generate_ai_content(request: AIContentRequest, db: Session = Depends(get_db)): | |
| """Generate LinkedIn post content using GPT with agentic asset context""" | |
| try: | |
| # Fetch assets with extracted content if provided | |
| asset_insights = None | |
| if request.assets: | |
| try: | |
| from app.models import Asset | |
| # Query assets from database | |
| db_assets = db.query(Asset).filter(Asset.id.in_(request.assets)).all() | |
| asset_insights = [] | |
| for asset in db_assets: | |
| asset_dict = { | |
| "id": str(asset.id), # Return as string to preserve precision | |
| "name": asset.name, | |
| "product_category": asset.product_category, | |
| "extracted_content": asset.extracted_content if hasattr(asset, 'extracted_content') else None | |
| } | |
| asset_insights.append(asset_dict) | |
| except Exception as db_error: | |
| # Fallback if database query fails | |
| print(f"Could not fetch assets from DB: {db_error}") | |
| asset_insights = None | |
| response = await ai_service.generate_content( | |
| request, | |
| assets_context=None, | |
| asset_insights=asset_insights | |
| ) | |
| return response | |
| except Exception as e: | |
| raise HTTPException(status_code=500, detail=f"AI generation failed: {str(e)}") | |
| # ---- Asset Management ---- | |
| async def get_asset_status(asset_id, db: Session = Depends(get_db)): | |
| """Get the analysis status of an asset""" | |
| try: | |
| # Convert asset_id to int (Python int can handle arbitrarily large integers) | |
| try: | |
| asset_id = int(asset_id) | |
| print(f"Status check for asset_id: {asset_id} (type: {type(asset_id).__name__})") | |
| except (ValueError, TypeError): | |
| raise HTTPException(status_code=400, detail=f"Invalid asset ID: {asset_id}") | |
| from app.models import Asset | |
| # Try ORM first | |
| try: | |
| db_asset = db.query(Asset).filter(Asset.id == asset_id).first() | |
| if db_asset: | |
| result = { | |
| "asset_id": str(db_asset.id), | |
| "name": db_asset.name, | |
| "status": db_asset.analysis_status if hasattr(db_asset, 'analysis_status') and db_asset.analysis_status else "pending", | |
| "analyzed_at": db_asset.analyzed_at.isoformat() if hasattr(db_asset, 'analyzed_at') and db_asset.analyzed_at else None, | |
| "extracted_content": db_asset.extracted_content if hasattr(db_asset, 'extracted_content') else None | |
| } | |
| return result | |
| except Exception as orm_error: | |
| # If ORM fails, fallback to direct connection | |
| error_str = str(orm_error) | |
| if "Could not determine version" not in error_str: | |
| # Real error, not just version parsing | |
| print(f"ORM query error: {orm_error}") | |
| # Fallback to direct psycopg2 connection | |
| conn = get_direct_psycopg2_connection() | |
| if not conn: | |
| raise HTTPException(status_code=500, detail="Database connection failed") | |
| try: | |
| cursor = conn.cursor() | |
| # First check if extracted_content column exists | |
| try: | |
| cursor.execute(""" | |
| SELECT column_name | |
| FROM information_schema.columns | |
| WHERE table_name='assets' AND column_name='extracted_content' | |
| """) | |
| has_extracted_content = cursor.fetchone() is not None | |
| except Exception as col_check_error: | |
| print(f"Column check error (non-fatal): {col_check_error}") | |
| has_extracted_content = False | |
| # Build query based on column existence | |
| try: | |
| # Try querying with explicit bigint cast to handle large IDs | |
| if has_extracted_content: | |
| cursor.execute(""" | |
| SELECT id, name, analysis_status, analyzed_at, extracted_content | |
| FROM assets | |
| WHERE id = %s::bigint | |
| """, (asset_id,)) | |
| else: | |
| cursor.execute(""" | |
| SELECT id, name, analysis_status, analyzed_at | |
| FROM assets | |
| WHERE id = %s::bigint | |
| """, (asset_id,)) | |
| except Exception as query_error: | |
| print(f"Query error for asset_id {asset_id}: {query_error}") | |
| # Try without cast as fallback | |
| try: | |
| if has_extracted_content: | |
| cursor.execute(""" | |
| SELECT id, name, analysis_status, analyzed_at, extracted_content | |
| FROM assets | |
| WHERE id = %s | |
| """, (asset_id,)) | |
| else: | |
| cursor.execute(""" | |
| SELECT id, name, analysis_status, analyzed_at | |
| FROM assets | |
| WHERE id = %s | |
| """, (asset_id,)) | |
| except Exception as fallback_error: | |
| cursor.close() | |
| conn.close() | |
| raise HTTPException(status_code=500, detail=f"Query failed: {str(fallback_error)}") | |
| row = cursor.fetchone() | |
| # Debug: If not found, check if asset exists with different query | |
| if not row: | |
| try: | |
| cursor.execute("SELECT COUNT(*) FROM assets WHERE id = %s", (asset_id,)) | |
| count = cursor.fetchone()[0] | |
| print(f"Debug: Asset ID {asset_id} (type: {type(asset_id)}) - Count: {count}") | |
| # Also check recent assets to see what IDs look like | |
| cursor.execute("SELECT id, name FROM assets ORDER BY id DESC LIMIT 5") | |
| recent = cursor.fetchall() | |
| print(f"Debug: Recent asset IDs: {[r[0] for r in recent]}") | |
| except Exception as debug_error: | |
| print(f"Debug query error: {debug_error}") | |
| cursor.close() | |
| conn.close() | |
| if row: | |
| result = { | |
| "asset_id": str(row[0]), # Return as string to preserve precision for large IDs | |
| "name": row[1], | |
| "status": row[2] or "pending", | |
| "analyzed_at": row[3].isoformat() if row[3] else None, | |
| } | |
| # Add extracted_content only if column exists and value is present | |
| if has_extracted_content and len(row) > 4: | |
| result["extracted_content"] = row[4] | |
| else: | |
| result["extracted_content"] = None | |
| return result | |
| else: | |
| # Log for debugging | |
| print(f"Asset not found: id={asset_id}, type={type(asset_id)}") | |
| raise HTTPException(status_code=404, detail=f"Asset not found: {asset_id}") | |
| except HTTPException: | |
| raise | |
| except Exception as e: | |
| if conn: | |
| try: | |
| cursor.close() | |
| conn.close() | |
| except: | |
| pass | |
| print(f"Error in get_asset_status for asset_id {asset_id}: {e}") | |
| import traceback | |
| print(traceback.format_exc()) | |
| raise HTTPException(status_code=500, detail=f"Database error: {str(e)}") | |
| except HTTPException: | |
| raise | |
| except Exception as e: | |
| print(f"Error in get_asset_status (outer) for asset_id {asset_id}: {e}") | |
| import traceback | |
| print(traceback.format_exc()) | |
| raise HTTPException(status_code=500, detail=str(e)) | |
| async def analyze_asset_background(asset_id: int, file_path: str, file_type: str): | |
| """Background task to analyze asset""" | |
| try: | |
| # Update status to processing | |
| conn = get_direct_psycopg2_connection() | |
| if conn: | |
| try: | |
| cursor = conn.cursor() | |
| cursor.execute(""" | |
| UPDATE assets | |
| SET analysis_status = 'processing' | |
| WHERE id = %s | |
| """, (asset_id,)) | |
| conn.commit() | |
| cursor.close() | |
| conn.close() | |
| except Exception as update_error: | |
| print(f"Could not update analysis status: {update_error}") | |
| if conn: | |
| conn.close() | |
| # Analyze asset | |
| analysis_result = await asset_analyzer.analyze_document(str(file_path)) | |
| if analysis_result.get("success") and analysis_result.get("extracted_content"): | |
| # Update asset with extracted content | |
| conn = get_direct_psycopg2_connection() | |
| if conn: | |
| try: | |
| cursor = conn.cursor() | |
| import json | |
| extracted_json = json.dumps(analysis_result["extracted_content"]) | |
| cursor.execute(""" | |
| UPDATE assets | |
| SET extracted_content = %s::jsonb, | |
| analysis_status = 'completed', | |
| analyzed_at = NOW() | |
| WHERE id = %s | |
| """, (extracted_json, asset_id)) | |
| conn.commit() | |
| cursor.close() | |
| conn.close() | |
| print(f"✓ Asset {asset_id} analyzed successfully") | |
| except Exception as update_error: | |
| print(f"Could not save extracted content: {update_error}") | |
| # Try to mark as failed | |
| try: | |
| cursor = conn.cursor() | |
| cursor.execute(""" | |
| UPDATE assets | |
| SET analysis_status = 'failed' | |
| WHERE id = %s | |
| """, (asset_id,)) | |
| conn.commit() | |
| cursor.close() | |
| except: | |
| pass | |
| if conn: | |
| conn.close() | |
| else: | |
| # Mark as failed if analysis didn't succeed | |
| conn = get_direct_psycopg2_connection() | |
| if conn: | |
| try: | |
| cursor = conn.cursor() | |
| cursor.execute(""" | |
| UPDATE assets | |
| SET analysis_status = 'failed' | |
| WHERE id = %s | |
| """, (asset_id,)) | |
| conn.commit() | |
| cursor.close() | |
| conn.close() | |
| except: | |
| if conn: | |
| conn.close() | |
| except Exception as analysis_error: | |
| print(f"Asset analysis error: {analysis_error}") | |
| # Mark as failed | |
| conn = get_direct_psycopg2_connection() | |
| if conn: | |
| try: | |
| cursor = conn.cursor() | |
| cursor.execute(""" | |
| UPDATE assets | |
| SET analysis_status = 'failed' | |
| WHERE id = %s | |
| """, (asset_id,)) | |
| conn.commit() | |
| cursor.close() | |
| conn.close() | |
| except: | |
| if conn: | |
| conn.close() | |
| async def upload_asset( | |
| file: UploadFile = File(...), | |
| product_category: str = Form(None), | |
| sub_category: Optional[str] = Form(None), | |
| db: Session = Depends(get_db) | |
| ): | |
| """Upload an asset to the repository""" | |
| try: | |
| # Create uploads directory if it doesn't exist | |
| upload_dir = Path("uploads") | |
| upload_dir.mkdir(exist_ok=True) | |
| # Read file content | |
| content = await file.read() | |
| file_size = len(content) | |
| # Determine file type | |
| file_type = "unknown" | |
| if file.content_type: | |
| if file.content_type.startswith("image/"): | |
| file_type = "image" | |
| elif file.content_type.startswith("video/"): | |
| file_type = "video" | |
| elif file.content_type.startswith("application/pdf") or "document" in file.content_type: | |
| file_type = "document" | |
| # Save file to disk (use absolute path) | |
| # Sanitize filename to prevent directory traversal and add timestamp for uniqueness | |
| safe_filename = file.filename.replace('/', '_').replace('\\', '_') | |
| # Add timestamp and UUID to prevent overwrites | |
| file_stem = Path(safe_filename).stem | |
| file_suffix = Path(safe_filename).suffix | |
| unique_filename = f"{file_stem}_{datetime.utcnow().strftime('%Y%m%d_%H%M%S')}_{uuid.uuid4().hex[:8]}{file_suffix}" | |
| file_path = upload_dir / unique_filename | |
| # Resolve to absolute path for reliable access | |
| file_path = file_path.resolve() | |
| with open(file_path, "wb") as buffer: | |
| buffer.write(content) | |
| print(f"✓ File saved to: {file_path} (absolute path)") | |
| # Save to database (keep dummy content as requested) | |
| try: | |
| from app.models import Asset | |
| # Ensure default user exists and get user_id | |
| user_id = ensure_default_user() | |
| db_asset = Asset( | |
| name=file.filename, # Keep original filename for display | |
| file_path=str(file_path), # Store absolute path for reliable access | |
| file_type=file_type, | |
| product_category=product_category or "ocr", | |
| sub_category=sub_category if sub_category and sub_category != "none" else None, | |
| size=file_size, | |
| user_id=user_id | |
| ) | |
| db.add(db_asset) | |
| try: | |
| db.commit() | |
| try: | |
| db.refresh(db_asset) | |
| except Exception as refresh_error: | |
| # Refresh might fail due to version string, but commit succeeded | |
| # Query the asset back to get the ID | |
| if "Could not determine version" in str(refresh_error): | |
| # Use direct psycopg2 to query back the asset | |
| conn = get_direct_psycopg2_connection() | |
| if conn: | |
| try: | |
| cursor = conn.cursor() | |
| cursor.execute(""" | |
| SELECT id, created_at FROM assets | |
| WHERE name = %s AND file_path = %s | |
| ORDER BY id DESC LIMIT 1 | |
| """, (file.filename, str(file_path))) | |
| row = cursor.fetchone() | |
| cursor.close() | |
| conn.close() | |
| if row: | |
| # Keep ID as returned from database (CockroachDB uses bigint) | |
| db_asset.id = row[0] | |
| print(f"✓ Asset created with ID: {db_asset.id} (type: {type(db_asset.id).__name__})") | |
| if hasattr(db_asset, 'created_at') and row[1]: | |
| db_asset.created_at = row[1] | |
| except Exception as psycopg2_error: | |
| print(f"Direct psycopg2 query failed: {psycopg2_error}") | |
| if conn: | |
| conn.close() | |
| else: | |
| raise refresh_error | |
| except Exception as commit_error: | |
| # If commit fails due to version string issue, use direct psycopg2 | |
| db.rollback() | |
| error_str = str(commit_error) | |
| if "Could not determine version" in error_str: | |
| # Use direct psycopg2 connection to bypass SQLAlchemy | |
| # Ensure default user exists first | |
| user_id = ensure_default_user() | |
| conn = get_direct_psycopg2_connection() | |
| if conn: | |
| try: | |
| cursor = conn.cursor() | |
| cursor.execute(""" | |
| INSERT INTO assets (name, file_path, file_type, product_category, sub_category, size, user_id, created_at) | |
| VALUES (%s, %s, %s, %s, %s, %s, %s, NOW()) | |
| RETURNING id, created_at | |
| """, ( | |
| file.filename, | |
| str(file_path), | |
| file_type, | |
| product_category or "ocr", | |
| sub_category if sub_category and sub_category != "none" else None, | |
| file_size, | |
| user_id | |
| )) | |
| row = cursor.fetchone() | |
| conn.commit() | |
| if row: | |
| # Keep ID as returned from database (CockroachDB uses bigint) | |
| db_asset.id = row[0] | |
| db_asset.created_at = row[1] | |
| print(f"✓ Asset created with ID: {db_asset.id} (type: {type(db_asset.id).__name__})") | |
| cursor.close() | |
| conn.close() | |
| # Continue with normal flow - asset is saved | |
| else: | |
| cursor.close() | |
| conn.close() | |
| raise Exception("Failed to get asset ID after insert") | |
| except Exception as psycopg2_error: | |
| print(f"Direct psycopg2 insert failed: {psycopg2_error}") | |
| if conn: | |
| try: | |
| cursor.close() | |
| except: | |
| pass | |
| conn.close() | |
| # Don't raise - let it fall through to return dummy asset | |
| # But log the error | |
| print(f"⚠ Asset file saved but database insert failed: {psycopg2_error}") | |
| else: | |
| print("⚠ Direct psycopg2 connection failed - asset file saved but not in database") | |
| # Don't raise - let it fall through | |
| else: | |
| raise commit_error | |
| # Start background analysis task | |
| asset_id = db_asset.id | |
| if file_type in ["document", "image"]: | |
| # Start background task (don't await - return immediately) | |
| asyncio.create_task(analyze_asset_background(asset_id, str(file_path), file_type)) | |
| return { | |
| "id": str(db_asset.id), # Return as string to preserve precision for large IDs | |
| "name": db_asset.name, | |
| "file_type": db_asset.file_type, | |
| "product_category": db_asset.product_category, | |
| "sub_category": db_asset.sub_category, | |
| "size": db_asset.size, | |
| "analysis_status": "processing" if file_type in ["document", "image"] else "pending", | |
| "created_at": db_asset.created_at.isoformat() if hasattr(db_asset, 'created_at') else datetime.utcnow().isoformat() | |
| } | |
| except Exception as db_error: | |
| # If database save fails, still return success (file is saved) | |
| # This allows the app to work even if DB has issues | |
| print(f"Database save warning: {db_error}") | |
| return { | |
| "id": "1", # Return as string for consistency | |
| "name": file.filename, | |
| "file_type": file_type, | |
| "product_category": product_category, | |
| "sub_category": sub_category, | |
| "size": file_size, | |
| "created_at": datetime.utcnow().isoformat() | |
| } | |
| except Exception as e: | |
| raise HTTPException(status_code=500, detail=str(e)) | |
| async def get_assets( | |
| product_category: Optional[str] = None, | |
| db: Session = Depends(get_db) | |
| ): | |
| """Get list of assets""" | |
| try: | |
| from app.models import Asset | |
| from sqlalchemy import text | |
| # Try using ORM first | |
| try: | |
| query = db.query(Asset) | |
| if product_category and product_category != "all": | |
| query = query.filter(Asset.product_category == product_category) | |
| db_assets = query.order_by(Asset.created_at.desc()).all() | |
| # Convert to response format | |
| assets = [] | |
| for asset in db_assets: | |
| assets.append({ | |
| "id": str(asset.id), # Return as string to preserve precision | |
| "name": asset.name, | |
| "file_type": asset.file_type, | |
| "product_category": asset.product_category, | |
| "sub_category": asset.sub_category, | |
| "size": asset.size, | |
| "extracted_content": asset.extracted_content if hasattr(asset, 'extracted_content') else None, | |
| "analysis_status": asset.analysis_status if hasattr(asset, 'analysis_status') else None, | |
| "analyzed_at": asset.analyzed_at.isoformat() if hasattr(asset, 'analyzed_at') and asset.analyzed_at else None, | |
| "created_at": asset.created_at | |
| }) | |
| except Exception as orm_error: | |
| # If ORM fails due to version string issue, use direct psycopg2 | |
| error_str = str(orm_error) | |
| if "Could not determine version" in error_str: | |
| # Use direct psycopg2 connection to bypass SQLAlchemy | |
| conn = get_direct_psycopg2_connection() | |
| if conn: | |
| try: | |
| cursor = conn.cursor() | |
| if product_category and product_category != "all": | |
| cursor.execute(""" | |
| SELECT id, name, file_path, file_type, product_category, sub_category, size, | |
| extracted_content, analysis_status, analyzed_at, created_at | |
| FROM assets | |
| WHERE product_category = %s | |
| ORDER BY created_at DESC | |
| """, (product_category,)) | |
| else: | |
| cursor.execute(""" | |
| SELECT id, name, file_path, file_type, product_category, sub_category, size, | |
| extracted_content, analysis_status, analyzed_at, created_at | |
| FROM assets | |
| ORDER BY created_at DESC | |
| """) | |
| rows = cursor.fetchall() | |
| cursor.close() | |
| conn.close() | |
| assets = [] | |
| for row in rows: | |
| assets.append({ | |
| "id": str(row[0]), # Return as string to preserve precision | |
| "name": row[1], | |
| "file_type": row[3], | |
| "product_category": row[4], | |
| "sub_category": row[5], | |
| "size": row[6], | |
| "extracted_content": row[7] if len(row) > 7 else None, | |
| "analysis_status": row[8] if len(row) > 8 else None, | |
| "analyzed_at": row[9].isoformat() if len(row) > 9 and row[9] else None, | |
| "created_at": row[10] if len(row) > 10 else row[6] | |
| }) | |
| except Exception as psycopg2_error: | |
| print(f"Direct psycopg2 query failed: {psycopg2_error}") | |
| if conn: | |
| conn.close() | |
| assets = [] | |
| else: | |
| assets = [] | |
| else: | |
| print(f"ORM query error: {orm_error}") | |
| assets = [] | |
| # Merge with mock data (as requested - keep dummy content) | |
| mock_assets = [ | |
| { | |
| "id": "9991", # Return as string for consistency | |
| "name": "OCR_Demo_Screenshot.png", | |
| "file_type": "image", | |
| "product_category": "ocr", | |
| "sub_category": None, | |
| "size": 2516582, | |
| "created_at": datetime(2024, 12, 20) | |
| }, | |
| { | |
| "id": "9992", # Return as string for consistency | |
| "name": "P2P_Workflow_Diagram.pdf", | |
| "file_type": "document", | |
| "product_category": "p2p", | |
| "sub_category": "Budget Approval Workflow", | |
| "size": 1024000, | |
| "created_at": datetime(2024, 12, 19) | |
| }, | |
| { | |
| "id": "9993", # Return as string for consistency | |
| "name": "O2C_Process_Video.mp4", | |
| "file_type": "video", | |
| "product_category": "o2c", | |
| "sub_category": "Sales Order Workflow", | |
| "size": 15728640, | |
| "created_at": datetime(2024, 12, 18) | |
| } | |
| ] | |
| # Combine real assets with mock assets (real assets first) | |
| return assets + mock_assets | |
| except Exception as e: | |
| # If database query fails, return mock data only | |
| print(f"Database query warning: {e}") | |
| return [ | |
| { | |
| "id": 1, | |
| "name": "OCR_Demo_Screenshot.png", | |
| "file_type": "image", | |
| "product_category": "ocr", | |
| "sub_category": None, | |
| "size": 2516582, | |
| "created_at": datetime.utcnow() | |
| } | |
| ] | |
| async def delete_asset(asset_id, db: Session = Depends(get_db)): | |
| """Delete an asset from both filesystem and database""" | |
| try: | |
| from app.models import Asset | |
| # Convert asset_id to int (Python int can handle arbitrarily large integers) | |
| try: | |
| asset_id_int = int(asset_id) | |
| except (ValueError, TypeError): | |
| raise HTTPException(status_code=400, detail=f"Invalid asset ID: {asset_id}") | |
| # Get asset from database | |
| conn = get_direct_psycopg2_connection() | |
| if not conn: | |
| raise HTTPException(status_code=500, detail="Database connection failed") | |
| try: | |
| cursor = conn.cursor() | |
| cursor.execute(""" | |
| SELECT id, name, file_path | |
| FROM assets | |
| WHERE id = %s::bigint | |
| """, (asset_id_int,)) | |
| row = cursor.fetchone() | |
| if not row: | |
| cursor.close() | |
| conn.close() | |
| raise HTTPException(status_code=404, detail="Asset not found") | |
| file_path = Path(row[2]) | |
| # Delete file from filesystem | |
| if file_path.exists(): | |
| try: | |
| file_path.unlink() | |
| print(f"✓ Deleted file: {file_path}") | |
| except Exception as file_error: | |
| print(f"⚠ Could not delete file: {file_error}") | |
| # Continue with database deletion even if file deletion fails | |
| # Delete from database | |
| cursor.execute("DELETE FROM assets WHERE id = %s::bigint", (asset_id_int,)) | |
| conn.commit() | |
| cursor.close() | |
| conn.close() | |
| return { | |
| "success": True, | |
| "message": f"Asset '{row[1]}' deleted successfully", | |
| "asset_id": str(asset_id_int) # Return as string | |
| } | |
| except Exception as db_error: | |
| if conn: | |
| conn.close() | |
| raise HTTPException(status_code=500, detail=f"Delete failed: {str(db_error)}") | |
| except HTTPException: | |
| raise | |
| except Exception as e: | |
| raise HTTPException(status_code=500, detail=str(e)) | |
| async def get_pdf_pages(asset_id, db: Session = Depends(get_db)): | |
| """Convert PDF to images and return page URLs""" | |
| try: | |
| from app.models import Asset | |
| try: | |
| from pdf2image import convert_from_path | |
| except ImportError: | |
| raise HTTPException( | |
| status_code=503, | |
| detail="PDF conversion not available. Please install pdf2image and poppler-utils." | |
| ) | |
| import base64 | |
| from io import BytesIO | |
| # Convert asset_id to int (Python int can handle arbitrarily large integers) | |
| try: | |
| asset_id_int = int(asset_id) | |
| print(f"PDF preview request: asset_id={asset_id} -> {asset_id_int} (type: {type(asset_id_int).__name__})") | |
| except (ValueError, TypeError): | |
| raise HTTPException(status_code=400, detail=f"Invalid asset ID: {asset_id}") | |
| # Try to get asset from database using ORM first | |
| db_asset = None | |
| print(f"PDF preview: Looking for asset_id={asset_id_int} (type: {type(asset_id_int).__name__})") | |
| orm_failed = False | |
| try: | |
| db_asset = db.query(Asset).filter(Asset.id == asset_id_int).first() | |
| if db_asset: | |
| print(f"✓ Found asset via ORM: {db_asset.name} (id={db_asset.id})") | |
| else: | |
| print(f"⚠ Asset not found via ORM for id={asset_id_int}") | |
| # Try to see what assets exist | |
| try: | |
| all_assets = db.query(Asset).limit(10).all() | |
| asset_list = [(str(a.id), a.name) for a in all_assets] | |
| print(f"Debug: Assets in DB (showing IDs as strings): {asset_list}") | |
| # Also try without casting to see if ID type mismatch | |
| try: | |
| test_asset = db.query(Asset).first() | |
| if test_asset: | |
| print(f"Debug: Sample asset ID type: {type(test_asset.id).__name__}, value: {test_asset.id}") | |
| except: | |
| pass | |
| except Exception as debug_error: | |
| print(f"Debug query error: {debug_error}") | |
| orm_failed = True | |
| except Exception as orm_error: | |
| orm_failed = True | |
| # If ORM fails, try direct psycopg2 connection | |
| error_str = str(orm_error) | |
| print(f"ORM query error: {error_str}") | |
| if "Could not determine version" in error_str: | |
| # Try direct connection, but don't raise exception if not found - let it fall through to fallback | |
| conn = get_direct_psycopg2_connection() | |
| if conn: | |
| try: | |
| cursor = conn.cursor() | |
| # Try multiple query approaches | |
| cursor.execute(""" | |
| SELECT id, name, file_path, file_type | |
| FROM assets | |
| WHERE id = %s::bigint | |
| """, (asset_id_int,)) | |
| row = cursor.fetchone() | |
| # If not found, try without cast | |
| if not row: | |
| cursor.execute(""" | |
| SELECT id, name, file_path, file_type | |
| FROM assets | |
| WHERE id = %s | |
| """, (asset_id_int,)) | |
| row = cursor.fetchone() | |
| # If still not found, try as string | |
| if not row: | |
| cursor.execute(""" | |
| SELECT id, name, file_path, file_type | |
| FROM assets | |
| WHERE id::text = %s | |
| """, (str(asset_id_int),)) | |
| row = cursor.fetchone() | |
| cursor.close() | |
| conn.close() | |
| if row: | |
| print(f"✓ Found asset via direct connection (exception handler): {row[1]} (id={row[0]})") | |
| file_path_str = row[2] | |
| print(f"Debug: file_path from DB: {file_path_str}") | |
| # Resolve path - handle both relative and absolute paths | |
| file_path = Path(file_path_str) | |
| # Try multiple path resolution strategies | |
| if file_path.is_absolute(): | |
| # Already absolute, use as-is | |
| if not file_path.exists(): | |
| raise HTTPException(status_code=404, detail=f"File not found on disk: {file_path}") | |
| else: | |
| # Relative path - try multiple locations | |
| possible_paths = [ | |
| Path.cwd() / file_path_str, # From current working directory | |
| Path("/app") / file_path_str, # From /app (Docker) | |
| Path("/app/uploads") / Path(file_path_str).name, # Just filename in /app/uploads | |
| Path(file_path_str).resolve(), # Resolve relative to current dir | |
| ] | |
| file_path = None | |
| for possible_path in possible_paths: | |
| if possible_path.exists(): | |
| file_path = possible_path | |
| print(f"✓ Found file at: {file_path}") | |
| break | |
| if not file_path: | |
| # List what's actually in /app/uploads for debugging | |
| uploads_dir = Path("/app/uploads") | |
| if uploads_dir.exists(): | |
| files_in_uploads = list(uploads_dir.glob("*.pdf")) | |
| print(f"Debug: PDF files in /app/uploads: {[str(f) for f in files_in_uploads[:5]]}") | |
| raise HTTPException(status_code=404, detail=f"File not found. Tried: {[str(p) for p in possible_paths]}") | |
| print(f"Debug: Using file_path: {file_path} (exists: {file_path.exists()})") | |
| if row[3] != "document" or not str(file_path).lower().endswith('.pdf'): | |
| raise HTTPException(status_code=400, detail="File is not a PDF") | |
| # Convert PDF pages to images | |
| try: | |
| images = convert_from_path(str(file_path), dpi=150) | |
| page_images = [] | |
| for i, image in enumerate(images): | |
| buffered = BytesIO() | |
| image.save(buffered, format="PNG") | |
| img_str = base64.b64encode(buffered.getvalue()).decode() | |
| page_images.append({ | |
| "page_number": i + 1, | |
| "image_data": f"data:image/png;base64,{img_str}" | |
| }) | |
| return { | |
| "asset_id": str(asset_id_int), | |
| "asset_name": row[1], | |
| "total_pages": len(page_images), | |
| "pages": page_images | |
| } | |
| except Exception as pdf_error: | |
| error_msg = str(pdf_error) | |
| if "poppler" in error_msg.lower() or "pdftoppm" in error_msg.lower(): | |
| raise HTTPException( | |
| status_code=503, | |
| detail="PDF conversion requires poppler-utils to be installed on the server. PDFs can still be downloaded." | |
| ) | |
| raise HTTPException(status_code=500, detail=f"PDF conversion failed: {error_msg}") | |
| else: | |
| print(f"⚠ Asset not found in exception handler, will try fallback") | |
| # Don't raise - let it fall through to fallback code below | |
| except HTTPException: | |
| raise | |
| except Exception as db_error: | |
| print(f"Direct connection error in exception handler: {db_error}") | |
| if conn: | |
| try: | |
| conn.close() | |
| except: | |
| pass | |
| # Don't raise - let it fall through to fallback | |
| else: | |
| print("⚠ Direct connection failed in exception handler, will try fallback") | |
| # Don't raise - let it fall through to fallback | |
| else: | |
| # Real error, not version parsing | |
| raise HTTPException(status_code=500, detail=f"Database query failed: {str(orm_error)}") | |
| # If we got asset from ORM, use it | |
| if db_asset: | |
| file_path_str = db_asset.file_path | |
| print(f"Debug: file_path from ORM: {file_path_str}") | |
| # Resolve path - handle both relative and absolute paths | |
| file_path = Path(file_path_str) | |
| # Try multiple path resolution strategies | |
| if file_path.is_absolute(): | |
| # Already absolute, use as-is | |
| if not file_path.exists(): | |
| raise HTTPException(status_code=404, detail=f"File not found on disk: {file_path}") | |
| else: | |
| # Relative path - try multiple locations | |
| possible_paths = [ | |
| Path.cwd() / file_path_str, # From current working directory | |
| Path("/app") / file_path_str, # From /app (Docker) | |
| Path("/app/uploads") / Path(file_path_str).name, # Just filename in /app/uploads | |
| Path(file_path_str).resolve(), # Resolve relative to current dir | |
| ] | |
| file_path = None | |
| for possible_path in possible_paths: | |
| if possible_path.exists(): | |
| file_path = possible_path | |
| print(f"✓ Found file at: {file_path}") | |
| break | |
| if not file_path: | |
| # List what's actually in /app/uploads for debugging | |
| uploads_dir = Path("/app/uploads") | |
| if uploads_dir.exists(): | |
| files_in_uploads = list(uploads_dir.glob("*.pdf")) | |
| print(f"Debug: PDF files in /app/uploads: {[str(f) for f in files_in_uploads[:5]]}") | |
| raise HTTPException(status_code=404, detail=f"File not found. Tried: {[str(p) for p in possible_paths]}") | |
| print(f"Debug: Using file_path: {file_path} (exists: {file_path.exists()})") | |
| if db_asset.file_type != "document" or not str(file_path).lower().endswith('.pdf'): | |
| raise HTTPException(status_code=400, detail="File is not a PDF") | |
| # Convert PDF pages to images | |
| try: | |
| images = convert_from_path(str(file_path), dpi=150) | |
| page_images = [] | |
| for i, image in enumerate(images): | |
| buffered = BytesIO() | |
| image.save(buffered, format="PNG") | |
| img_str = base64.b64encode(buffered.getvalue()).decode() | |
| page_images.append({ | |
| "page_number": i + 1, | |
| "image_data": f"data:image/png;base64,{img_str}" | |
| }) | |
| return { | |
| "asset_id": str(asset_id_int), | |
| "asset_name": db_asset.name, | |
| "total_pages": len(page_images), | |
| "pages": page_images | |
| } | |
| except Exception as pdf_error: | |
| error_msg = str(pdf_error) | |
| if "poppler" in error_msg.lower() or "pdftoppm" in error_msg.lower(): | |
| raise HTTPException( | |
| status_code=503, | |
| detail="PDF conversion requires poppler-utils to be installed on the server. PDFs can still be downloaded." | |
| ) | |
| raise HTTPException(status_code=500, detail=f"PDF conversion failed: {error_msg}") | |
| # Asset not found via ORM - try direct connection as fallback | |
| print(f"Asset not found via ORM, trying direct connection for id={asset_id_int}") | |
| conn = get_direct_psycopg2_connection() | |
| if not conn: | |
| # Last resort: try to get asset by name from the request (if we can) | |
| raise HTTPException(status_code=404, detail="Asset not found (database connection failed)") | |
| try: | |
| cursor = conn.cursor() | |
| # Try multiple query approaches | |
| # First try with bigint cast | |
| cursor.execute(""" | |
| SELECT id, name, file_path, file_type | |
| FROM assets | |
| WHERE id = %s::bigint | |
| """, (asset_id_int,)) | |
| row = cursor.fetchone() | |
| # If not found, try without cast | |
| if not row: | |
| cursor.execute(""" | |
| SELECT id, name, file_path, file_type | |
| FROM assets | |
| WHERE id = %s | |
| """, (asset_id_int,)) | |
| row = cursor.fetchone() | |
| # If still not found, try as string | |
| if not row: | |
| cursor.execute(""" | |
| SELECT id, name, file_path, file_type | |
| FROM assets | |
| WHERE id::text = %s | |
| """, (str(asset_id_int),)) | |
| row = cursor.fetchone() | |
| # Debug: show what assets exist | |
| if not row: | |
| cursor.execute("SELECT id, name FROM assets ORDER BY id DESC LIMIT 10") | |
| recent = cursor.fetchall() | |
| print(f"Debug: Recent asset IDs in DB: {[(str(r[0]), r[1]) for r in recent]}") | |
| cursor.close() | |
| conn.close() | |
| if row: | |
| print(f"✓ Found asset via direct connection: {row[1]} (id={row[0]})") | |
| file_path_str = row[2] | |
| print(f"Debug: file_path from DB (fallback): {file_path_str}") | |
| # Resolve path - handle both relative and absolute paths | |
| file_path = Path(file_path_str) | |
| # Try multiple path resolution strategies | |
| if file_path.is_absolute(): | |
| # Already absolute, use as-is | |
| if not file_path.exists(): | |
| raise HTTPException(status_code=404, detail=f"File not found on disk: {file_path}") | |
| else: | |
| # Relative path - try multiple locations | |
| possible_paths = [ | |
| Path.cwd() / file_path_str, # From current working directory | |
| Path("/app") / file_path_str, # From /app (Docker) | |
| Path("/app/uploads") / Path(file_path_str).name, # Just filename in /app/uploads | |
| Path(file_path_str).resolve(), # Resolve relative to current dir | |
| ] | |
| file_path = None | |
| for possible_path in possible_paths: | |
| if possible_path.exists(): | |
| file_path = possible_path | |
| print(f"✓ Found file at: {file_path}") | |
| break | |
| if not file_path: | |
| # List what's actually in /app/uploads for debugging | |
| uploads_dir = Path("/app/uploads") | |
| if uploads_dir.exists(): | |
| files_in_uploads = list(uploads_dir.glob("*.pdf")) | |
| print(f"Debug: PDF files in /app/uploads: {[str(f) for f in files_in_uploads[:5]]}") | |
| raise HTTPException(status_code=404, detail=f"File not found. Tried: {[str(p) for p in possible_paths]}") | |
| print(f"Debug: Using file_path (fallback): {file_path} (exists: {file_path.exists()})") | |
| if row[3] != "document" or not str(file_path).lower().endswith('.pdf'): | |
| raise HTTPException(status_code=400, detail="File is not a PDF") | |
| # Convert PDF pages to images | |
| try: | |
| images = convert_from_path(str(file_path), dpi=150) | |
| page_images = [] | |
| for i, image in enumerate(images): | |
| buffered = BytesIO() | |
| image.save(buffered, format="PNG") | |
| img_str = base64.b64encode(buffered.getvalue()).decode() | |
| page_images.append({ | |
| "page_number": i + 1, | |
| "image_data": f"data:image/png;base64,{img_str}" | |
| }) | |
| return { | |
| "asset_id": str(asset_id_int), | |
| "asset_name": row[1], | |
| "total_pages": len(page_images), | |
| "pages": page_images | |
| } | |
| except Exception as pdf_error: | |
| error_msg = str(pdf_error) | |
| if "poppler" in error_msg.lower() or "pdftoppm" in error_msg.lower(): | |
| raise HTTPException( | |
| status_code=503, | |
| detail="PDF conversion requires poppler-utils to be installed on the server. PDFs can still be downloaded." | |
| ) | |
| raise HTTPException(status_code=500, detail=f"PDF conversion failed: {error_msg}") | |
| else: | |
| raise HTTPException(status_code=404, detail=f"Asset not found. Searched for id={asset_id_int}") | |
| except HTTPException: | |
| raise | |
| except Exception as direct_error: | |
| print(f"Direct connection error: {direct_error}") | |
| import traceback | |
| print(traceback.format_exc()) | |
| raise HTTPException(status_code=500, detail=f"Database error: {str(direct_error)}") | |
| file_path = Path(db_asset.file_path) | |
| if not file_path.exists(): | |
| raise HTTPException(status_code=404, detail="File not found on disk") | |
| if db_asset.file_type != "document" or not str(file_path).lower().endswith('.pdf'): | |
| raise HTTPException(status_code=400, detail="File is not a PDF") | |
| # Convert PDF pages to images | |
| try: | |
| images = convert_from_path(str(file_path), dpi=150) | |
| page_images = [] | |
| for i, image in enumerate(images): | |
| buffered = BytesIO() | |
| image.save(buffered, format="PNG") | |
| img_str = base64.b64encode(buffered.getvalue()).decode() | |
| page_images.append({ | |
| "page_number": i + 1, | |
| "image_data": f"data:image/png;base64,{img_str}" | |
| }) | |
| return { | |
| "asset_id": str(asset_id_int), | |
| "asset_name": db_asset.name, | |
| "total_pages": len(page_images), | |
| "pages": page_images | |
| } | |
| except Exception as pdf_error: | |
| error_msg = str(pdf_error) | |
| if "poppler" in error_msg.lower() or "pdftoppm" in error_msg.lower(): | |
| raise HTTPException( | |
| status_code=503, | |
| detail="PDF conversion requires poppler-utils to be installed on the server. PDFs can still be downloaded." | |
| ) | |
| raise HTTPException(status_code=500, detail=f"PDF conversion failed: {error_msg}") | |
| except HTTPException: | |
| raise | |
| except Exception as e: | |
| raise HTTPException(status_code=500, detail=str(e)) | |
| async def download_asset(asset_id, db: Session = Depends(get_db)): | |
| """Download or preview an asset file""" | |
| try: | |
| from app.models import Asset | |
| # Convert asset_id to int (Python int can handle arbitrarily large integers) | |
| try: | |
| asset_id_int = int(asset_id) | |
| except (ValueError, TypeError): | |
| raise HTTPException(status_code=400, detail=f"Invalid asset ID: {asset_id}") | |
| # Try to get asset from database | |
| try: | |
| db_asset = db.query(Asset).filter(Asset.id == asset_id_int).first() | |
| except Exception as orm_error: | |
| # If ORM fails, use direct psycopg2 | |
| if "Could not determine version" in str(orm_error): | |
| conn = get_direct_psycopg2_connection() | |
| if conn: | |
| try: | |
| cursor = conn.cursor() | |
| cursor.execute(""" | |
| SELECT id, name, file_path, file_type | |
| FROM assets | |
| WHERE id = %s::bigint | |
| """, (asset_id_int,)) | |
| row = cursor.fetchone() | |
| cursor.close() | |
| conn.close() | |
| if row: | |
| file_path = Path(row[2]) | |
| if file_path.exists(): | |
| return FileResponse( | |
| path=str(file_path), | |
| filename=row[1], | |
| media_type="application/octet-stream" | |
| ) | |
| else: | |
| raise HTTPException(status_code=404, detail="File not found on disk") | |
| else: | |
| raise HTTPException(status_code=404, detail="Asset not found") | |
| except Exception as psycopg2_error: | |
| if conn: | |
| conn.close() | |
| raise HTTPException(status_code=500, detail=str(psycopg2_error)) | |
| else: | |
| raise HTTPException(status_code=500, detail=str(orm_error)) | |
| if not db_asset: | |
| raise HTTPException(status_code=404, detail="Asset not found") | |
| file_path = Path(db_asset.file_path) | |
| if not file_path.exists(): | |
| raise HTTPException(status_code=404, detail="File not found on disk") | |
| # Determine media type | |
| media_type = "application/octet-stream" | |
| if db_asset.file_type == "image": | |
| if file_path.suffix.lower() in [".jpg", ".jpeg"]: | |
| media_type = "image/jpeg" | |
| elif file_path.suffix.lower() == ".png": | |
| media_type = "image/png" | |
| elif file_path.suffix.lower() == ".gif": | |
| media_type = "image/gif" | |
| elif file_path.suffix.lower() == ".webp": | |
| media_type = "image/webp" | |
| elif db_asset.file_type == "video": | |
| if file_path.suffix.lower() == ".mp4": | |
| media_type = "video/mp4" | |
| elif file_path.suffix.lower() == ".webm": | |
| media_type = "video/webm" | |
| elif db_asset.file_type == "document": | |
| if file_path.suffix.lower() == ".pdf": | |
| media_type = "application/pdf" | |
| elif file_path.suffix.lower() in [".doc", ".docx"]: | |
| media_type = "application/msword" | |
| return FileResponse( | |
| path=str(file_path), | |
| filename=db_asset.name, | |
| media_type=media_type | |
| ) | |
| except HTTPException: | |
| raise | |
| except Exception as e: | |
| raise HTTPException(status_code=500, detail=str(e)) | |
| # ---- Post Management ---- | |
| async def create_post(post_data: dict): | |
| """Create a new post""" | |
| # In a real implementation, save to database | |
| return { | |
| "id": 1, | |
| "title": post_data.get("title", "New Post"), | |
| "content": post_data.get("content", ""), | |
| "post_type": post_data.get("post_type", "content_only"), | |
| "product_category": post_data.get("product_category", "ocr"), | |
| "scheduled_date": post_data.get("scheduled_date", datetime.utcnow()), | |
| "status": "draft", | |
| "created_at": datetime.utcnow() | |
| } | |
| async def get_posts(): | |
| """Get list of posts""" | |
| # Mock data for now | |
| return [ | |
| { | |
| "id": 1, | |
| "title": "OCR Document Automation Benefits", | |
| "content": "Transform your document processing...", | |
| "post_type": "carousel", | |
| "product_category": "ocr", | |
| "scheduled_date": datetime.utcnow(), | |
| "status": "scheduled", | |
| "created_at": datetime.utcnow() | |
| } | |
| ] | |
| # ---- Campaign Management ---- | |
| async def generate_campaign(campaign_data: dict, db: Session = Depends(get_db)): | |
| """Generate a campaign schedule using agentic AI""" | |
| try: | |
| from datetime import datetime | |
| from app.models import Asset | |
| # Extract campaign parameters | |
| date_range_start = datetime.fromisoformat(campaign_data.get("date_range_start").replace("Z", "+00:00")) | |
| date_range_end = datetime.fromisoformat(campaign_data.get("date_range_end").replace("Z", "+00:00")) | |
| products = campaign_data.get("products", []) | |
| post_types = campaign_data.get("post_types", []) | |
| posts_per_week = campaign_data.get("posts_per_week", 5) | |
| # Fetch relevant assets for the selected products | |
| assets = [] | |
| try: | |
| # Query assets matching the product categories | |
| db_assets = db.query(Asset).filter(Asset.product_category.in_(products)).all() | |
| for asset in db_assets: | |
| asset_dict = { | |
| "id": asset.id, | |
| "name": asset.name, | |
| "file_type": asset.file_type, | |
| "product_category": asset.product_category, | |
| "sub_category": asset.sub_category, | |
| "extracted_content": asset.extracted_content if hasattr(asset, 'extracted_content') else None, | |
| "analysis_status": asset.analysis_status if hasattr(asset, 'analysis_status') else None | |
| } | |
| assets.append(asset_dict) | |
| except Exception as asset_error: | |
| print(f"Could not fetch assets: {asset_error}") | |
| # Continue without assets | |
| # Use agentic planner to generate campaign | |
| campaign_plan = await agentic_planner.plan_campaign( | |
| date_range_start=date_range_start, | |
| date_range_end=date_range_end, | |
| products=products, | |
| post_types=post_types, | |
| posts_per_week=posts_per_week, | |
| assets=assets | |
| ) | |
| return campaign_plan | |
| except Exception as e: | |
| import traceback | |
| print(f"Campaign generation error: {traceback.format_exc()}") | |
| raise HTTPException(status_code=500, detail=f"Campaign generation failed: {str(e)}") | |
| # ---- Frontend static serving ---- | |
| # Path calculation: /app/backend/app/main.py -> /app/frontend/dist | |
| FRONTEND_DIST = Path("/app/frontend/dist") | |
| INDEX_FILE = FRONTEND_DIST / "index.html" | |
| if FRONTEND_DIST.exists(): | |
| # Serve static assets (JS, CSS, images, etc.) from /assets | |
| assets_dir = FRONTEND_DIST / "assets" | |
| if assets_dir.exists(): | |
| app.mount("/assets", StaticFiles(directory=str(assets_dir)), name="assets") | |
| # Serve index.html for root | |
| async def serve_index(): | |
| if INDEX_FILE.exists(): | |
| return FileResponse(str(INDEX_FILE)) | |
| return {"detail": "Frontend not found"} | |
| # SPA fallback: any non-/api route should return React index.html | |
| # This must be last to catch all routes not handled above | |
| async def spa_fallback(full_path: str): | |
| # Don't handle API routes here | |
| if full_path.startswith("api/"): | |
| return {"detail": "Not Found"} | |
| # Don't handle assets (already mounted) | |
| if full_path.startswith("assets/"): | |
| return {"detail": "Not Found"} | |
| # Serve index.html for all other routes (SPA routing) | |
| if INDEX_FILE.exists(): | |
| return FileResponse(str(INDEX_FILE)) | |
| return {"detail": "Frontend not found"} | |