Spaces:
Sleeping
Sleeping
| from fastapi import FastAPI, HTTPException, Header, Depends, Request | |
| from pydantic import BaseModel | |
| from transformers import pipeline | |
| from slowapi import Limiter, _rate_limit_exceeded_handler | |
| from slowapi.util import get_remote_address | |
| from slowapi.errors import RateLimitExceeded | |
| import logging | |
| import os | |
| logging.basicConfig(level=logging.INFO) | |
| logger = logging.getLogger(__name__) | |
| # Rate limiter - by IP address | |
| limiter = Limiter(key_func=get_remote_address) | |
| app = FastAPI(title="Panoptifi Sentiment API") | |
| app.state.limiter = limiter | |
| app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler) | |
| # API key protection | |
| API_KEY = os.environ.get("API_KEY", "") | |
| def verify_api_key(x_api_key: str = Header(None, alias="X-API-Key")): | |
| if API_KEY and x_api_key != API_KEY: | |
| raise HTTPException(status_code=401, detail="Invalid API key") | |
| return True | |
| logger.info("Loading FinBERT model...") | |
| classifier = pipeline( | |
| "sentiment-analysis", | |
| model="ProsusAI/finbert", | |
| tokenizer="ProsusAI/finbert" | |
| ) | |
| logger.info("Model loaded") | |
| class TextInput(BaseModel): | |
| text: str | |
| class BatchInput(BaseModel): | |
| texts: list[str] | |
| class SentimentResult(BaseModel): | |
| label: str | |
| score: float | |
| def health(request: Request): | |
| return {"status": "healthy", "model": "ProsusAI/finbert"} | |
| # Max 30 requests per minute per IP | |
| def analyze(request: Request, input: TextInput, _: bool = Depends(verify_api_key)): | |
| if not input.text.strip(): | |
| raise HTTPException(400, "Text cannot be empty") | |
| result = classifier(input.text[:2000])[0] | |
| return SentimentResult(label=result["label"], score=result["score"]) | |
| # Batch is heavier, limit more | |
| def analyze_batch(request: Request, input: BatchInput, _: bool = Depends(verify_api_key)): | |
| if len(input.texts) > 50: | |
| raise HTTPException(400, "Max 50 texts per batch") | |
| texts = [t[:2000] for t in input.texts if t.strip()] | |
| results = classifier(texts) | |
| return [SentimentResult(label=r["label"], score=r["score"]) for r in results] | |