| from fastapi import FastAPI, UploadFile, File, Form, HTTPException, Depends |
| from fastapi.responses import FileResponse, JSONResponse |
| from fastapi.staticfiles import StaticFiles |
| from fastapi.middleware.cors import CORSMiddleware |
| from pathlib import Path |
| from typing import List, Optional, Dict |
| import os |
| import uuid |
| from datetime import datetime |
| import asyncio |
|
|
| from app.schemas import ( |
| IntegrationResponse, AssetResponse, PostResponse, CampaignResponse, |
| CanvaBrandTemplate, CanvaAutofillRequest, CanvaAutofillResponse, |
| LinkedInPostRequest, AIContentRequest, AIContentResponse |
| ) |
| from app.services.canva_service import CanvaService |
| from app.services.linkedin_service import LinkedInService |
| from app.services.ai_service import AIService |
| from app.services.asset_analyzer import AssetAnalyzer |
| from app.services.agentic_planner import AgenticPlanner |
| from app.database import init_db, get_db, get_direct_psycopg2_connection, ensure_default_user |
| from sqlalchemy.orm import Session |
|
|
| app = FastAPI(title="PostGen API", version="1.0.0") |
|
|
| app.add_middleware( |
| CORSMiddleware, |
| allow_origins=["*"], |
| allow_credentials=True, |
| allow_methods=["*"], |
| allow_headers=["*"], |
| ) |
|
|
| |
| @app.on_event("startup") |
| async def startup_event(): |
| """Initialize database tables on startup""" |
| |
| upload_dir = Path("uploads") |
| upload_dir.mkdir(exist_ok=True) |
| print(f"✓ Uploads directory ready: {upload_dir.absolute()}") |
| |
| db_initialized = init_db() |
| if db_initialized: |
| print("✓ Database initialized successfully") |
| |
| try: |
| user_id = ensure_default_user() |
| print(f"✓ Default user ready (id={user_id})") |
| except Exception as e: |
| print(f"⚠ Could not ensure default user: {e}") |
| else: |
| print("⚠ Database not available - using mock data") |
| print("⚠ App will function normally with dummy content") |
| print("⚠ To connect to database, set DATABASE_URL environment variable") |
|
|
| |
| ai_service = AIService() |
| asset_analyzer = AssetAnalyzer() |
| agentic_planner = AgenticPlanner() |
|
|
| |
| upload_status: Dict[str, Dict] = {} |
|
|
| |
|
|
| @app.get("/api/health") |
| def health(): |
| return {"status": "ok", "message": "PostGen API is running"} |
|
|
| @app.get("/api/hello") |
| def hello(): |
| return {"message": "Hello from PostGen API"} |
|
|
| |
|
|
| @app.get("/api/canva/brand-templates", response_model=List[CanvaBrandTemplate]) |
| async def get_canva_brand_templates(access_token: str): |
| """Get list of Canva brand templates""" |
| try: |
| canva_service = CanvaService(access_token) |
| templates = await canva_service.get_brand_templates() |
| return templates |
| except Exception as e: |
| raise HTTPException(status_code=500, detail=str(e)) |
|
|
| @app.get("/api/canva/brand-templates/{template_id}/dataset") |
| async def get_canva_template_dataset(template_id: str, access_token: str): |
| """Get dataset for a specific brand template""" |
| try: |
| canva_service = CanvaService(access_token) |
| dataset = await canva_service.get_brand_template_dataset(template_id) |
| return dataset |
| except Exception as e: |
| raise HTTPException(status_code=500, detail=str(e)) |
|
|
| @app.post("/api/canva/autofill", response_model=CanvaAutofillResponse) |
| async def create_canva_autofill(request: CanvaAutofillRequest, access_token: str): |
| """Create an autofill job for a brand template""" |
| try: |
| canva_service = CanvaService(access_token) |
| response = await canva_service.create_autofill_job(request) |
| return response |
| except Exception as e: |
| raise HTTPException(status_code=500, detail=str(e)) |
|
|
| @app.get("/api/canva/autofill/{job_id}") |
| async def get_canva_autofill_status(job_id: str, access_token: str): |
| """Get status of an autofill job""" |
| try: |
| canva_service = CanvaService(access_token) |
| status = await canva_service.get_autofill_job_status(job_id) |
| return status |
| except Exception as e: |
| raise HTTPException(status_code=500, detail=str(e)) |
|
|
| |
|
|
| @app.post("/api/linkedin/post") |
| async def create_linkedin_post(request: LinkedInPostRequest, access_token: str): |
| """Create a LinkedIn post""" |
| try: |
| linkedin_service = LinkedInService(access_token) |
| result = await linkedin_service.create_post( |
| text=request.text, |
| media_uris=request.media_uris |
| ) |
| return result |
| except Exception as e: |
| raise HTTPException(status_code=500, detail=str(e)) |
|
|
| @app.get("/api/linkedin/profile") |
| async def get_linkedin_profile(access_token: str): |
| """Get LinkedIn user profile""" |
| try: |
| linkedin_service = LinkedInService(access_token) |
| profile = await linkedin_service.get_user_profile() |
| return profile |
| except Exception as e: |
| raise HTTPException(status_code=500, detail=str(e)) |
|
|
| |
|
|
| @app.post("/api/ai/generate-content", response_model=AIContentResponse) |
| async def generate_ai_content(request: AIContentRequest, db: Session = Depends(get_db)): |
| """Generate LinkedIn post content using GPT with agentic asset context""" |
| try: |
| |
| asset_insights = None |
| if request.assets: |
| try: |
| from app.models import Asset |
| |
| db_assets = db.query(Asset).filter(Asset.id.in_(request.assets)).all() |
| asset_insights = [] |
| for asset in db_assets: |
| asset_dict = { |
| "id": str(asset.id), |
| "name": asset.name, |
| "product_category": asset.product_category, |
| "extracted_content": asset.extracted_content if hasattr(asset, 'extracted_content') else None |
| } |
| asset_insights.append(asset_dict) |
| except Exception as db_error: |
| |
| print(f"Could not fetch assets from DB: {db_error}") |
| asset_insights = None |
| |
| response = await ai_service.generate_content( |
| request, |
| assets_context=None, |
| asset_insights=asset_insights |
| ) |
| return response |
| except Exception as e: |
| raise HTTPException(status_code=500, detail=f"AI generation failed: {str(e)}") |
|
|
| |
|
|
| @app.get("/api/assets/{asset_id}/status") |
| async def get_asset_status(asset_id, db: Session = Depends(get_db)): |
| """Get the analysis status of an asset""" |
| try: |
| |
| try: |
| asset_id = int(asset_id) |
| print(f"Status check for asset_id: {asset_id} (type: {type(asset_id).__name__})") |
| except (ValueError, TypeError): |
| raise HTTPException(status_code=400, detail=f"Invalid asset ID: {asset_id}") |
| |
| from app.models import Asset |
| |
| |
| try: |
| db_asset = db.query(Asset).filter(Asset.id == asset_id).first() |
| if db_asset: |
| result = { |
| "asset_id": str(db_asset.id), |
| "name": db_asset.name, |
| "status": db_asset.analysis_status if hasattr(db_asset, 'analysis_status') and db_asset.analysis_status else "pending", |
| "analyzed_at": db_asset.analyzed_at.isoformat() if hasattr(db_asset, 'analyzed_at') and db_asset.analyzed_at else None, |
| "extracted_content": db_asset.extracted_content if hasattr(db_asset, 'extracted_content') else None |
| } |
| return result |
| except Exception as orm_error: |
| |
| error_str = str(orm_error) |
| if "Could not determine version" not in error_str: |
| |
| print(f"ORM query error: {orm_error}") |
| |
| |
| conn = get_direct_psycopg2_connection() |
| if not conn: |
| raise HTTPException(status_code=500, detail="Database connection failed") |
| |
| try: |
| cursor = conn.cursor() |
| |
| try: |
| cursor.execute(""" |
| SELECT column_name |
| FROM information_schema.columns |
| WHERE table_name='assets' AND column_name='extracted_content' |
| """) |
| has_extracted_content = cursor.fetchone() is not None |
| except Exception as col_check_error: |
| print(f"Column check error (non-fatal): {col_check_error}") |
| has_extracted_content = False |
| |
| |
| try: |
| |
| if has_extracted_content: |
| cursor.execute(""" |
| SELECT id, name, analysis_status, analyzed_at, extracted_content |
| FROM assets |
| WHERE id = %s::bigint |
| """, (asset_id,)) |
| else: |
| cursor.execute(""" |
| SELECT id, name, analysis_status, analyzed_at |
| FROM assets |
| WHERE id = %s::bigint |
| """, (asset_id,)) |
| except Exception as query_error: |
| print(f"Query error for asset_id {asset_id}: {query_error}") |
| |
| try: |
| if has_extracted_content: |
| cursor.execute(""" |
| SELECT id, name, analysis_status, analyzed_at, extracted_content |
| FROM assets |
| WHERE id = %s |
| """, (asset_id,)) |
| else: |
| cursor.execute(""" |
| SELECT id, name, analysis_status, analyzed_at |
| FROM assets |
| WHERE id = %s |
| """, (asset_id,)) |
| except Exception as fallback_error: |
| cursor.close() |
| conn.close() |
| raise HTTPException(status_code=500, detail=f"Query failed: {str(fallback_error)}") |
| |
| row = cursor.fetchone() |
| |
| |
| if not row: |
| try: |
| cursor.execute("SELECT COUNT(*) FROM assets WHERE id = %s", (asset_id,)) |
| count = cursor.fetchone()[0] |
| print(f"Debug: Asset ID {asset_id} (type: {type(asset_id)}) - Count: {count}") |
| |
| |
| cursor.execute("SELECT id, name FROM assets ORDER BY id DESC LIMIT 5") |
| recent = cursor.fetchall() |
| print(f"Debug: Recent asset IDs: {[r[0] for r in recent]}") |
| except Exception as debug_error: |
| print(f"Debug query error: {debug_error}") |
| |
| cursor.close() |
| conn.close() |
| |
| if row: |
| result = { |
| "asset_id": str(row[0]), |
| "name": row[1], |
| "status": row[2] or "pending", |
| "analyzed_at": row[3].isoformat() if row[3] else None, |
| } |
| |
| if has_extracted_content and len(row) > 4: |
| result["extracted_content"] = row[4] |
| else: |
| result["extracted_content"] = None |
| return result |
| else: |
| |
| print(f"Asset not found: id={asset_id}, type={type(asset_id)}") |
| raise HTTPException(status_code=404, detail=f"Asset not found: {asset_id}") |
| except HTTPException: |
| raise |
| except Exception as e: |
| if conn: |
| try: |
| cursor.close() |
| conn.close() |
| except: |
| pass |
| print(f"Error in get_asset_status for asset_id {asset_id}: {e}") |
| import traceback |
| print(traceback.format_exc()) |
| raise HTTPException(status_code=500, detail=f"Database error: {str(e)}") |
| except HTTPException: |
| raise |
| except Exception as e: |
| print(f"Error in get_asset_status (outer) for asset_id {asset_id}: {e}") |
| import traceback |
| print(traceback.format_exc()) |
| raise HTTPException(status_code=500, detail=str(e)) |
|
|
| async def analyze_asset_background(asset_id: int, file_path: str, file_type: str): |
| """Background task to analyze asset""" |
| try: |
| |
| conn = get_direct_psycopg2_connection() |
| if conn: |
| try: |
| cursor = conn.cursor() |
| cursor.execute(""" |
| UPDATE assets |
| SET analysis_status = 'processing' |
| WHERE id = %s |
| """, (asset_id,)) |
| conn.commit() |
| cursor.close() |
| conn.close() |
| except Exception as update_error: |
| print(f"Could not update analysis status: {update_error}") |
| if conn: |
| conn.close() |
| |
| |
| analysis_result = await asset_analyzer.analyze_document(str(file_path)) |
| if analysis_result.get("success") and analysis_result.get("extracted_content"): |
| |
| conn = get_direct_psycopg2_connection() |
| if conn: |
| try: |
| cursor = conn.cursor() |
| import json |
| extracted_json = json.dumps(analysis_result["extracted_content"]) |
| cursor.execute(""" |
| UPDATE assets |
| SET extracted_content = %s::jsonb, |
| analysis_status = 'completed', |
| analyzed_at = NOW() |
| WHERE id = %s |
| """, (extracted_json, asset_id)) |
| conn.commit() |
| cursor.close() |
| conn.close() |
| print(f"✓ Asset {asset_id} analyzed successfully") |
| except Exception as update_error: |
| print(f"Could not save extracted content: {update_error}") |
| |
| try: |
| cursor = conn.cursor() |
| cursor.execute(""" |
| UPDATE assets |
| SET analysis_status = 'failed' |
| WHERE id = %s |
| """, (asset_id,)) |
| conn.commit() |
| cursor.close() |
| except: |
| pass |
| if conn: |
| conn.close() |
| else: |
| |
| conn = get_direct_psycopg2_connection() |
| if conn: |
| try: |
| cursor = conn.cursor() |
| cursor.execute(""" |
| UPDATE assets |
| SET analysis_status = 'failed' |
| WHERE id = %s |
| """, (asset_id,)) |
| conn.commit() |
| cursor.close() |
| conn.close() |
| except: |
| if conn: |
| conn.close() |
| except Exception as analysis_error: |
| print(f"Asset analysis error: {analysis_error}") |
| |
| conn = get_direct_psycopg2_connection() |
| if conn: |
| try: |
| cursor = conn.cursor() |
| cursor.execute(""" |
| UPDATE assets |
| SET analysis_status = 'failed' |
| WHERE id = %s |
| """, (asset_id,)) |
| conn.commit() |
| cursor.close() |
| conn.close() |
| except: |
| if conn: |
| conn.close() |
|
|
| @app.post("/api/assets/upload") |
| async def upload_asset( |
| file: UploadFile = File(...), |
| product_category: str = Form(None), |
| sub_category: Optional[str] = Form(None), |
| db: Session = Depends(get_db) |
| ): |
| """Upload an asset to the repository""" |
| try: |
| |
| upload_dir = Path("uploads") |
| upload_dir.mkdir(exist_ok=True) |
| |
| |
| content = await file.read() |
| file_size = len(content) |
| |
| |
| file_type = "unknown" |
| if file.content_type: |
| if file.content_type.startswith("image/"): |
| file_type = "image" |
| elif file.content_type.startswith("video/"): |
| file_type = "video" |
| elif file.content_type.startswith("application/pdf") or "document" in file.content_type: |
| file_type = "document" |
| |
| |
| |
| safe_filename = file.filename.replace('/', '_').replace('\\', '_') |
| |
| file_stem = Path(safe_filename).stem |
| file_suffix = Path(safe_filename).suffix |
| unique_filename = f"{file_stem}_{datetime.utcnow().strftime('%Y%m%d_%H%M%S')}_{uuid.uuid4().hex[:8]}{file_suffix}" |
| file_path = upload_dir / unique_filename |
| |
| file_path = file_path.resolve() |
| with open(file_path, "wb") as buffer: |
| buffer.write(content) |
| |
| print(f"✓ File saved to: {file_path} (absolute path)") |
| |
| |
| try: |
| from app.models import Asset |
| |
| user_id = ensure_default_user() |
| |
| db_asset = Asset( |
| name=file.filename, |
| file_path=str(file_path), |
| file_type=file_type, |
| product_category=product_category or "ocr", |
| sub_category=sub_category if sub_category and sub_category != "none" else None, |
| size=file_size, |
| user_id=user_id |
| ) |
| db.add(db_asset) |
| try: |
| db.commit() |
| try: |
| db.refresh(db_asset) |
| except Exception as refresh_error: |
| |
| |
| if "Could not determine version" in str(refresh_error): |
| |
| conn = get_direct_psycopg2_connection() |
| if conn: |
| try: |
| cursor = conn.cursor() |
| cursor.execute(""" |
| SELECT id, created_at FROM assets |
| WHERE name = %s AND file_path = %s |
| ORDER BY id DESC LIMIT 1 |
| """, (file.filename, str(file_path))) |
| row = cursor.fetchone() |
| cursor.close() |
| conn.close() |
| if row: |
| |
| db_asset.id = row[0] |
| print(f"✓ Asset created with ID: {db_asset.id} (type: {type(db_asset.id).__name__})") |
| if hasattr(db_asset, 'created_at') and row[1]: |
| db_asset.created_at = row[1] |
| except Exception as psycopg2_error: |
| print(f"Direct psycopg2 query failed: {psycopg2_error}") |
| if conn: |
| conn.close() |
| else: |
| raise refresh_error |
| except Exception as commit_error: |
| |
| db.rollback() |
| error_str = str(commit_error) |
| if "Could not determine version" in error_str: |
| |
| |
| user_id = ensure_default_user() |
| conn = get_direct_psycopg2_connection() |
| if conn: |
| try: |
| cursor = conn.cursor() |
| cursor.execute(""" |
| INSERT INTO assets (name, file_path, file_type, product_category, sub_category, size, user_id, created_at) |
| VALUES (%s, %s, %s, %s, %s, %s, %s, NOW()) |
| RETURNING id, created_at |
| """, ( |
| file.filename, |
| str(file_path), |
| file_type, |
| product_category or "ocr", |
| sub_category if sub_category and sub_category != "none" else None, |
| file_size, |
| user_id |
| )) |
| row = cursor.fetchone() |
| conn.commit() |
| if row: |
| |
| db_asset.id = row[0] |
| db_asset.created_at = row[1] |
| print(f"✓ Asset created with ID: {db_asset.id} (type: {type(db_asset.id).__name__})") |
| cursor.close() |
| conn.close() |
| |
| else: |
| cursor.close() |
| conn.close() |
| raise Exception("Failed to get asset ID after insert") |
| except Exception as psycopg2_error: |
| print(f"Direct psycopg2 insert failed: {psycopg2_error}") |
| if conn: |
| try: |
| cursor.close() |
| except: |
| pass |
| conn.close() |
| |
| |
| print(f"⚠ Asset file saved but database insert failed: {psycopg2_error}") |
| else: |
| print("⚠ Direct psycopg2 connection failed - asset file saved but not in database") |
| |
| else: |
| raise commit_error |
| |
| |
| asset_id = db_asset.id |
| if file_type in ["document", "image"]: |
| |
| asyncio.create_task(analyze_asset_background(asset_id, str(file_path), file_type)) |
| |
| return { |
| "id": str(db_asset.id), |
| "name": db_asset.name, |
| "file_type": db_asset.file_type, |
| "product_category": db_asset.product_category, |
| "sub_category": db_asset.sub_category, |
| "size": db_asset.size, |
| "analysis_status": "processing" if file_type in ["document", "image"] else "pending", |
| "created_at": db_asset.created_at.isoformat() if hasattr(db_asset, 'created_at') else datetime.utcnow().isoformat() |
| } |
| except Exception as db_error: |
| |
| |
| print(f"Database save warning: {db_error}") |
| return { |
| "id": "1", |
| "name": file.filename, |
| "file_type": file_type, |
| "product_category": product_category, |
| "sub_category": sub_category, |
| "size": file_size, |
| "created_at": datetime.utcnow().isoformat() |
| } |
| except Exception as e: |
| raise HTTPException(status_code=500, detail=str(e)) |
|
|
| @app.get("/api/assets", response_model=List[AssetResponse]) |
| async def get_assets( |
| product_category: Optional[str] = None, |
| db: Session = Depends(get_db) |
| ): |
| """Get list of assets""" |
| try: |
| from app.models import Asset |
| from sqlalchemy import text |
| |
| |
| try: |
| query = db.query(Asset) |
| |
| if product_category and product_category != "all": |
| query = query.filter(Asset.product_category == product_category) |
| |
| db_assets = query.order_by(Asset.created_at.desc()).all() |
| |
| |
| assets = [] |
| for asset in db_assets: |
| assets.append({ |
| "id": str(asset.id), |
| "name": asset.name, |
| "file_type": asset.file_type, |
| "product_category": asset.product_category, |
| "sub_category": asset.sub_category, |
| "size": asset.size, |
| "extracted_content": asset.extracted_content if hasattr(asset, 'extracted_content') else None, |
| "analysis_status": asset.analysis_status if hasattr(asset, 'analysis_status') else None, |
| "analyzed_at": asset.analyzed_at.isoformat() if hasattr(asset, 'analyzed_at') and asset.analyzed_at else None, |
| "created_at": asset.created_at |
| }) |
| except Exception as orm_error: |
| |
| error_str = str(orm_error) |
| if "Could not determine version" in error_str: |
| |
| conn = get_direct_psycopg2_connection() |
| if conn: |
| try: |
| cursor = conn.cursor() |
| if product_category and product_category != "all": |
| cursor.execute(""" |
| SELECT id, name, file_path, file_type, product_category, sub_category, size, |
| extracted_content, analysis_status, analyzed_at, created_at |
| FROM assets |
| WHERE product_category = %s |
| ORDER BY created_at DESC |
| """, (product_category,)) |
| else: |
| cursor.execute(""" |
| SELECT id, name, file_path, file_type, product_category, sub_category, size, |
| extracted_content, analysis_status, analyzed_at, created_at |
| FROM assets |
| ORDER BY created_at DESC |
| """) |
| rows = cursor.fetchall() |
| cursor.close() |
| conn.close() |
| |
| assets = [] |
| for row in rows: |
| assets.append({ |
| "id": str(row[0]), |
| "name": row[1], |
| "file_type": row[3], |
| "product_category": row[4], |
| "sub_category": row[5], |
| "size": row[6], |
| "extracted_content": row[7] if len(row) > 7 else None, |
| "analysis_status": row[8] if len(row) > 8 else None, |
| "analyzed_at": row[9].isoformat() if len(row) > 9 and row[9] else None, |
| "created_at": row[10] if len(row) > 10 else row[6] |
| }) |
| except Exception as psycopg2_error: |
| print(f"Direct psycopg2 query failed: {psycopg2_error}") |
| if conn: |
| conn.close() |
| assets = [] |
| else: |
| assets = [] |
| else: |
| print(f"ORM query error: {orm_error}") |
| assets = [] |
| |
| |
| mock_assets = [ |
| { |
| "id": "9991", |
| "name": "OCR_Demo_Screenshot.png", |
| "file_type": "image", |
| "product_category": "ocr", |
| "sub_category": None, |
| "size": 2516582, |
| "created_at": datetime(2024, 12, 20) |
| }, |
| { |
| "id": "9992", |
| "name": "P2P_Workflow_Diagram.pdf", |
| "file_type": "document", |
| "product_category": "p2p", |
| "sub_category": "Budget Approval Workflow", |
| "size": 1024000, |
| "created_at": datetime(2024, 12, 19) |
| }, |
| { |
| "id": "9993", |
| "name": "O2C_Process_Video.mp4", |
| "file_type": "video", |
| "product_category": "o2c", |
| "sub_category": "Sales Order Workflow", |
| "size": 15728640, |
| "created_at": datetime(2024, 12, 18) |
| } |
| ] |
| |
| |
| return assets + mock_assets |
| except Exception as e: |
| |
| print(f"Database query warning: {e}") |
| return [ |
| { |
| "id": 1, |
| "name": "OCR_Demo_Screenshot.png", |
| "file_type": "image", |
| "product_category": "ocr", |
| "sub_category": None, |
| "size": 2516582, |
| "created_at": datetime.utcnow() |
| } |
| ] |
|
|
| @app.delete("/api/assets/{asset_id}") |
| async def delete_asset(asset_id, db: Session = Depends(get_db)): |
| """Delete an asset from both filesystem and database""" |
| try: |
| from app.models import Asset |
| |
| |
| try: |
| asset_id_int = int(asset_id) |
| except (ValueError, TypeError): |
| raise HTTPException(status_code=400, detail=f"Invalid asset ID: {asset_id}") |
| |
| |
| conn = get_direct_psycopg2_connection() |
| if not conn: |
| raise HTTPException(status_code=500, detail="Database connection failed") |
| |
| try: |
| cursor = conn.cursor() |
| cursor.execute(""" |
| SELECT id, name, file_path |
| FROM assets |
| WHERE id = %s::bigint |
| """, (asset_id_int,)) |
| row = cursor.fetchone() |
| |
| if not row: |
| cursor.close() |
| conn.close() |
| raise HTTPException(status_code=404, detail="Asset not found") |
| |
| file_path = Path(row[2]) |
| |
| |
| if file_path.exists(): |
| try: |
| file_path.unlink() |
| print(f"✓ Deleted file: {file_path}") |
| except Exception as file_error: |
| print(f"⚠ Could not delete file: {file_error}") |
| |
| |
| |
| cursor.execute("DELETE FROM assets WHERE id = %s::bigint", (asset_id_int,)) |
| conn.commit() |
| cursor.close() |
| conn.close() |
| |
| return { |
| "success": True, |
| "message": f"Asset '{row[1]}' deleted successfully", |
| "asset_id": str(asset_id_int) |
| } |
| except Exception as db_error: |
| if conn: |
| conn.close() |
| raise HTTPException(status_code=500, detail=f"Delete failed: {str(db_error)}") |
| except HTTPException: |
| raise |
| except Exception as e: |
| raise HTTPException(status_code=500, detail=str(e)) |
|
|
| @app.get("/api/assets/{asset_id}/pdf-pages") |
| async def get_pdf_pages(asset_id, db: Session = Depends(get_db)): |
| """Convert PDF to images and return page URLs""" |
| try: |
| from app.models import Asset |
| try: |
| from pdf2image import convert_from_path |
| except ImportError: |
| raise HTTPException( |
| status_code=503, |
| detail="PDF conversion not available. Please install pdf2image and poppler-utils." |
| ) |
| import base64 |
| from io import BytesIO |
| |
| |
| try: |
| asset_id_int = int(asset_id) |
| print(f"PDF preview request: asset_id={asset_id} -> {asset_id_int} (type: {type(asset_id_int).__name__})") |
| except (ValueError, TypeError): |
| raise HTTPException(status_code=400, detail=f"Invalid asset ID: {asset_id}") |
| |
| |
| db_asset = None |
| print(f"PDF preview: Looking for asset_id={asset_id_int} (type: {type(asset_id_int).__name__})") |
| orm_failed = False |
| try: |
| db_asset = db.query(Asset).filter(Asset.id == asset_id_int).first() |
| if db_asset: |
| print(f"✓ Found asset via ORM: {db_asset.name} (id={db_asset.id})") |
| else: |
| print(f"⚠ Asset not found via ORM for id={asset_id_int}") |
| |
| try: |
| all_assets = db.query(Asset).limit(10).all() |
| asset_list = [(str(a.id), a.name) for a in all_assets] |
| print(f"Debug: Assets in DB (showing IDs as strings): {asset_list}") |
| |
| try: |
| test_asset = db.query(Asset).first() |
| if test_asset: |
| print(f"Debug: Sample asset ID type: {type(test_asset.id).__name__}, value: {test_asset.id}") |
| except: |
| pass |
| except Exception as debug_error: |
| print(f"Debug query error: {debug_error}") |
| orm_failed = True |
| except Exception as orm_error: |
| orm_failed = True |
| |
| error_str = str(orm_error) |
| print(f"ORM query error: {error_str}") |
| if "Could not determine version" in error_str: |
| |
| conn = get_direct_psycopg2_connection() |
| if conn: |
| try: |
| cursor = conn.cursor() |
| |
| cursor.execute(""" |
| SELECT id, name, file_path, file_type |
| FROM assets |
| WHERE id = %s::bigint |
| """, (asset_id_int,)) |
| row = cursor.fetchone() |
| |
| |
| if not row: |
| cursor.execute(""" |
| SELECT id, name, file_path, file_type |
| FROM assets |
| WHERE id = %s |
| """, (asset_id_int,)) |
| row = cursor.fetchone() |
| |
| |
| if not row: |
| cursor.execute(""" |
| SELECT id, name, file_path, file_type |
| FROM assets |
| WHERE id::text = %s |
| """, (str(asset_id_int),)) |
| row = cursor.fetchone() |
| |
| cursor.close() |
| conn.close() |
| |
| if row: |
| print(f"✓ Found asset via direct connection (exception handler): {row[1]} (id={row[0]})") |
| file_path_str = row[2] |
| print(f"Debug: file_path from DB: {file_path_str}") |
| |
| file_path = Path(file_path_str) |
| |
| |
| if file_path.is_absolute(): |
| |
| if not file_path.exists(): |
| raise HTTPException(status_code=404, detail=f"File not found on disk: {file_path}") |
| else: |
| |
| possible_paths = [ |
| Path.cwd() / file_path_str, |
| Path("/app") / file_path_str, |
| Path("/app/uploads") / Path(file_path_str).name, |
| Path(file_path_str).resolve(), |
| ] |
| |
| file_path = None |
| for possible_path in possible_paths: |
| if possible_path.exists(): |
| file_path = possible_path |
| print(f"✓ Found file at: {file_path}") |
| break |
| |
| if not file_path: |
| |
| uploads_dir = Path("/app/uploads") |
| if uploads_dir.exists(): |
| files_in_uploads = list(uploads_dir.glob("*.pdf")) |
| print(f"Debug: PDF files in /app/uploads: {[str(f) for f in files_in_uploads[:5]]}") |
| raise HTTPException(status_code=404, detail=f"File not found. Tried: {[str(p) for p in possible_paths]}") |
| |
| print(f"Debug: Using file_path: {file_path} (exists: {file_path.exists()})") |
| |
| if row[3] != "document" or not str(file_path).lower().endswith('.pdf'): |
| raise HTTPException(status_code=400, detail="File is not a PDF") |
| |
| |
| try: |
| images = convert_from_path(str(file_path), dpi=150) |
| |
| page_images = [] |
| for i, image in enumerate(images): |
| buffered = BytesIO() |
| image.save(buffered, format="PNG") |
| img_str = base64.b64encode(buffered.getvalue()).decode() |
| page_images.append({ |
| "page_number": i + 1, |
| "image_data": f"data:image/png;base64,{img_str}" |
| }) |
| |
| return { |
| "asset_id": str(asset_id_int), |
| "asset_name": row[1], |
| "total_pages": len(page_images), |
| "pages": page_images |
| } |
| except Exception as pdf_error: |
| error_msg = str(pdf_error) |
| if "poppler" in error_msg.lower() or "pdftoppm" in error_msg.lower(): |
| raise HTTPException( |
| status_code=503, |
| detail="PDF conversion requires poppler-utils to be installed on the server. PDFs can still be downloaded." |
| ) |
| raise HTTPException(status_code=500, detail=f"PDF conversion failed: {error_msg}") |
| else: |
| print(f"⚠ Asset not found in exception handler, will try fallback") |
| |
| except HTTPException: |
| raise |
| except Exception as db_error: |
| print(f"Direct connection error in exception handler: {db_error}") |
| if conn: |
| try: |
| conn.close() |
| except: |
| pass |
| |
| else: |
| print("⚠ Direct connection failed in exception handler, will try fallback") |
| |
| else: |
| |
| raise HTTPException(status_code=500, detail=f"Database query failed: {str(orm_error)}") |
| |
| |
| if db_asset: |
| file_path_str = db_asset.file_path |
| print(f"Debug: file_path from ORM: {file_path_str}") |
| |
| file_path = Path(file_path_str) |
| |
| |
| if file_path.is_absolute(): |
| |
| if not file_path.exists(): |
| raise HTTPException(status_code=404, detail=f"File not found on disk: {file_path}") |
| else: |
| |
| possible_paths = [ |
| Path.cwd() / file_path_str, |
| Path("/app") / file_path_str, |
| Path("/app/uploads") / Path(file_path_str).name, |
| Path(file_path_str).resolve(), |
| ] |
| |
| file_path = None |
| for possible_path in possible_paths: |
| if possible_path.exists(): |
| file_path = possible_path |
| print(f"✓ Found file at: {file_path}") |
| break |
| |
| if not file_path: |
| |
| uploads_dir = Path("/app/uploads") |
| if uploads_dir.exists(): |
| files_in_uploads = list(uploads_dir.glob("*.pdf")) |
| print(f"Debug: PDF files in /app/uploads: {[str(f) for f in files_in_uploads[:5]]}") |
| raise HTTPException(status_code=404, detail=f"File not found. Tried: {[str(p) for p in possible_paths]}") |
| |
| print(f"Debug: Using file_path: {file_path} (exists: {file_path.exists()})") |
| |
| if db_asset.file_type != "document" or not str(file_path).lower().endswith('.pdf'): |
| raise HTTPException(status_code=400, detail="File is not a PDF") |
| |
| |
| try: |
| images = convert_from_path(str(file_path), dpi=150) |
| |
| page_images = [] |
| for i, image in enumerate(images): |
| buffered = BytesIO() |
| image.save(buffered, format="PNG") |
| img_str = base64.b64encode(buffered.getvalue()).decode() |
| page_images.append({ |
| "page_number": i + 1, |
| "image_data": f"data:image/png;base64,{img_str}" |
| }) |
| |
| return { |
| "asset_id": str(asset_id_int), |
| "asset_name": db_asset.name, |
| "total_pages": len(page_images), |
| "pages": page_images |
| } |
| except Exception as pdf_error: |
| error_msg = str(pdf_error) |
| if "poppler" in error_msg.lower() or "pdftoppm" in error_msg.lower(): |
| raise HTTPException( |
| status_code=503, |
| detail="PDF conversion requires poppler-utils to be installed on the server. PDFs can still be downloaded." |
| ) |
| raise HTTPException(status_code=500, detail=f"PDF conversion failed: {error_msg}") |
| |
| |
| print(f"Asset not found via ORM, trying direct connection for id={asset_id_int}") |
| conn = get_direct_psycopg2_connection() |
| if not conn: |
| |
| raise HTTPException(status_code=404, detail="Asset not found (database connection failed)") |
| |
| try: |
| cursor = conn.cursor() |
| |
| |
| cursor.execute(""" |
| SELECT id, name, file_path, file_type |
| FROM assets |
| WHERE id = %s::bigint |
| """, (asset_id_int,)) |
| row = cursor.fetchone() |
| |
| |
| if not row: |
| cursor.execute(""" |
| SELECT id, name, file_path, file_type |
| FROM assets |
| WHERE id = %s |
| """, (asset_id_int,)) |
| row = cursor.fetchone() |
| |
| |
| if not row: |
| cursor.execute(""" |
| SELECT id, name, file_path, file_type |
| FROM assets |
| WHERE id::text = %s |
| """, (str(asset_id_int),)) |
| row = cursor.fetchone() |
| |
| |
| if not row: |
| cursor.execute("SELECT id, name FROM assets ORDER BY id DESC LIMIT 10") |
| recent = cursor.fetchall() |
| print(f"Debug: Recent asset IDs in DB: {[(str(r[0]), r[1]) for r in recent]}") |
| |
| cursor.close() |
| conn.close() |
| |
| if row: |
| print(f"✓ Found asset via direct connection: {row[1]} (id={row[0]})") |
| file_path_str = row[2] |
| print(f"Debug: file_path from DB (fallback): {file_path_str}") |
| |
| file_path = Path(file_path_str) |
| |
| |
| if file_path.is_absolute(): |
| |
| if not file_path.exists(): |
| raise HTTPException(status_code=404, detail=f"File not found on disk: {file_path}") |
| else: |
| |
| possible_paths = [ |
| Path.cwd() / file_path_str, |
| Path("/app") / file_path_str, |
| Path("/app/uploads") / Path(file_path_str).name, |
| Path(file_path_str).resolve(), |
| ] |
| |
| file_path = None |
| for possible_path in possible_paths: |
| if possible_path.exists(): |
| file_path = possible_path |
| print(f"✓ Found file at: {file_path}") |
| break |
| |
| if not file_path: |
| |
| uploads_dir = Path("/app/uploads") |
| if uploads_dir.exists(): |
| files_in_uploads = list(uploads_dir.glob("*.pdf")) |
| print(f"Debug: PDF files in /app/uploads: {[str(f) for f in files_in_uploads[:5]]}") |
| raise HTTPException(status_code=404, detail=f"File not found. Tried: {[str(p) for p in possible_paths]}") |
| |
| print(f"Debug: Using file_path (fallback): {file_path} (exists: {file_path.exists()})") |
| |
| if row[3] != "document" or not str(file_path).lower().endswith('.pdf'): |
| raise HTTPException(status_code=400, detail="File is not a PDF") |
| |
| |
| try: |
| images = convert_from_path(str(file_path), dpi=150) |
| |
| page_images = [] |
| for i, image in enumerate(images): |
| buffered = BytesIO() |
| image.save(buffered, format="PNG") |
| img_str = base64.b64encode(buffered.getvalue()).decode() |
| page_images.append({ |
| "page_number": i + 1, |
| "image_data": f"data:image/png;base64,{img_str}" |
| }) |
| |
| return { |
| "asset_id": str(asset_id_int), |
| "asset_name": row[1], |
| "total_pages": len(page_images), |
| "pages": page_images |
| } |
| except Exception as pdf_error: |
| error_msg = str(pdf_error) |
| if "poppler" in error_msg.lower() or "pdftoppm" in error_msg.lower(): |
| raise HTTPException( |
| status_code=503, |
| detail="PDF conversion requires poppler-utils to be installed on the server. PDFs can still be downloaded." |
| ) |
| raise HTTPException(status_code=500, detail=f"PDF conversion failed: {error_msg}") |
| else: |
| raise HTTPException(status_code=404, detail=f"Asset not found. Searched for id={asset_id_int}") |
| except HTTPException: |
| raise |
| except Exception as direct_error: |
| print(f"Direct connection error: {direct_error}") |
| import traceback |
| print(traceback.format_exc()) |
| raise HTTPException(status_code=500, detail=f"Database error: {str(direct_error)}") |
| |
| file_path = Path(db_asset.file_path) |
| if not file_path.exists(): |
| raise HTTPException(status_code=404, detail="File not found on disk") |
| |
| if db_asset.file_type != "document" or not str(file_path).lower().endswith('.pdf'): |
| raise HTTPException(status_code=400, detail="File is not a PDF") |
| |
| |
| try: |
| images = convert_from_path(str(file_path), dpi=150) |
| |
| page_images = [] |
| for i, image in enumerate(images): |
| buffered = BytesIO() |
| image.save(buffered, format="PNG") |
| img_str = base64.b64encode(buffered.getvalue()).decode() |
| page_images.append({ |
| "page_number": i + 1, |
| "image_data": f"data:image/png;base64,{img_str}" |
| }) |
| |
| return { |
| "asset_id": str(asset_id_int), |
| "asset_name": db_asset.name, |
| "total_pages": len(page_images), |
| "pages": page_images |
| } |
| except Exception as pdf_error: |
| error_msg = str(pdf_error) |
| if "poppler" in error_msg.lower() or "pdftoppm" in error_msg.lower(): |
| raise HTTPException( |
| status_code=503, |
| detail="PDF conversion requires poppler-utils to be installed on the server. PDFs can still be downloaded." |
| ) |
| raise HTTPException(status_code=500, detail=f"PDF conversion failed: {error_msg}") |
| except HTTPException: |
| raise |
| except Exception as e: |
| raise HTTPException(status_code=500, detail=str(e)) |
|
|
| @app.get("/api/assets/{asset_id}/download") |
| async def download_asset(asset_id, db: Session = Depends(get_db)): |
| """Download or preview an asset file""" |
| try: |
| from app.models import Asset |
| |
| |
| try: |
| asset_id_int = int(asset_id) |
| except (ValueError, TypeError): |
| raise HTTPException(status_code=400, detail=f"Invalid asset ID: {asset_id}") |
| |
| |
| try: |
| db_asset = db.query(Asset).filter(Asset.id == asset_id_int).first() |
| except Exception as orm_error: |
| |
| if "Could not determine version" in str(orm_error): |
| conn = get_direct_psycopg2_connection() |
| if conn: |
| try: |
| cursor = conn.cursor() |
| cursor.execute(""" |
| SELECT id, name, file_path, file_type |
| FROM assets |
| WHERE id = %s::bigint |
| """, (asset_id_int,)) |
| row = cursor.fetchone() |
| cursor.close() |
| conn.close() |
| if row: |
| file_path = Path(row[2]) |
| if file_path.exists(): |
| return FileResponse( |
| path=str(file_path), |
| filename=row[1], |
| media_type="application/octet-stream" |
| ) |
| else: |
| raise HTTPException(status_code=404, detail="File not found on disk") |
| else: |
| raise HTTPException(status_code=404, detail="Asset not found") |
| except Exception as psycopg2_error: |
| if conn: |
| conn.close() |
| raise HTTPException(status_code=500, detail=str(psycopg2_error)) |
| else: |
| raise HTTPException(status_code=500, detail=str(orm_error)) |
| |
| if not db_asset: |
| raise HTTPException(status_code=404, detail="Asset not found") |
| |
| file_path = Path(db_asset.file_path) |
| if not file_path.exists(): |
| raise HTTPException(status_code=404, detail="File not found on disk") |
| |
| |
| media_type = "application/octet-stream" |
| if db_asset.file_type == "image": |
| if file_path.suffix.lower() in [".jpg", ".jpeg"]: |
| media_type = "image/jpeg" |
| elif file_path.suffix.lower() == ".png": |
| media_type = "image/png" |
| elif file_path.suffix.lower() == ".gif": |
| media_type = "image/gif" |
| elif file_path.suffix.lower() == ".webp": |
| media_type = "image/webp" |
| elif db_asset.file_type == "video": |
| if file_path.suffix.lower() == ".mp4": |
| media_type = "video/mp4" |
| elif file_path.suffix.lower() == ".webm": |
| media_type = "video/webm" |
| elif db_asset.file_type == "document": |
| if file_path.suffix.lower() == ".pdf": |
| media_type = "application/pdf" |
| elif file_path.suffix.lower() in [".doc", ".docx"]: |
| media_type = "application/msword" |
| |
| return FileResponse( |
| path=str(file_path), |
| filename=db_asset.name, |
| media_type=media_type |
| ) |
| except HTTPException: |
| raise |
| except Exception as e: |
| raise HTTPException(status_code=500, detail=str(e)) |
|
|
| |
|
|
| @app.post("/api/posts", response_model=PostResponse) |
| async def create_post(post_data: dict): |
| """Create a new post""" |
| |
| return { |
| "id": 1, |
| "title": post_data.get("title", "New Post"), |
| "content": post_data.get("content", ""), |
| "post_type": post_data.get("post_type", "content_only"), |
| "product_category": post_data.get("product_category", "ocr"), |
| "scheduled_date": post_data.get("scheduled_date", datetime.utcnow()), |
| "status": "draft", |
| "created_at": datetime.utcnow() |
| } |
|
|
| @app.get("/api/posts", response_model=List[PostResponse]) |
| async def get_posts(): |
| """Get list of posts""" |
| |
| return [ |
| { |
| "id": 1, |
| "title": "OCR Document Automation Benefits", |
| "content": "Transform your document processing...", |
| "post_type": "carousel", |
| "product_category": "ocr", |
| "scheduled_date": datetime.utcnow(), |
| "status": "scheduled", |
| "created_at": datetime.utcnow() |
| } |
| ] |
|
|
| |
|
|
| @app.post("/api/campaigns/generate") |
| async def generate_campaign(campaign_data: dict, db: Session = Depends(get_db)): |
| """Generate a campaign schedule using agentic AI""" |
| try: |
| from datetime import datetime |
| from app.models import Asset |
| |
| |
| date_range_start = datetime.fromisoformat(campaign_data.get("date_range_start").replace("Z", "+00:00")) |
| date_range_end = datetime.fromisoformat(campaign_data.get("date_range_end").replace("Z", "+00:00")) |
| products = campaign_data.get("products", []) |
| post_types = campaign_data.get("post_types", []) |
| posts_per_week = campaign_data.get("posts_per_week", 5) |
| |
| |
| assets = [] |
| try: |
| |
| db_assets = db.query(Asset).filter(Asset.product_category.in_(products)).all() |
| for asset in db_assets: |
| asset_dict = { |
| "id": asset.id, |
| "name": asset.name, |
| "file_type": asset.file_type, |
| "product_category": asset.product_category, |
| "sub_category": asset.sub_category, |
| "extracted_content": asset.extracted_content if hasattr(asset, 'extracted_content') else None, |
| "analysis_status": asset.analysis_status if hasattr(asset, 'analysis_status') else None |
| } |
| assets.append(asset_dict) |
| except Exception as asset_error: |
| print(f"Could not fetch assets: {asset_error}") |
| |
| |
| |
| campaign_plan = await agentic_planner.plan_campaign( |
| date_range_start=date_range_start, |
| date_range_end=date_range_end, |
| products=products, |
| post_types=post_types, |
| posts_per_week=posts_per_week, |
| assets=assets |
| ) |
| |
| return campaign_plan |
| except Exception as e: |
| import traceback |
| print(f"Campaign generation error: {traceback.format_exc()}") |
| raise HTTPException(status_code=500, detail=f"Campaign generation failed: {str(e)}") |
|
|
| |
| |
| FRONTEND_DIST = Path("/app/frontend/dist") |
| INDEX_FILE = FRONTEND_DIST / "index.html" |
|
|
| if FRONTEND_DIST.exists(): |
| |
| assets_dir = FRONTEND_DIST / "assets" |
| if assets_dir.exists(): |
| app.mount("/assets", StaticFiles(directory=str(assets_dir)), name="assets") |
| |
| |
| @app.get("/") |
| async def serve_index(): |
| if INDEX_FILE.exists(): |
| return FileResponse(str(INDEX_FILE)) |
| return {"detail": "Frontend not found"} |
|
|
| |
| |
| @app.get("/{full_path:path}") |
| async def spa_fallback(full_path: str): |
| |
| if full_path.startswith("api/"): |
| return {"detail": "Not Found"} |
| |
| if full_path.startswith("assets/"): |
| return {"detail": "Not Found"} |
| |
| if INDEX_FILE.exists(): |
| return FileResponse(str(INDEX_FILE)) |
| return {"detail": "Frontend not found"} |
|
|