Spaces:
Running
Running
File size: 5,254 Bytes
3603ded 66da1e3 3603ded 66da1e3 3603ded 66da1e3 3603ded 52cdaa9 3603ded 52cdaa9 3603ded 66da1e3 3603ded 66da1e3 3603ded 66da1e3 3603ded 66da1e3 3603ded 66da1e3 3603ded 66da1e3 3603ded 66da1e3 52cdaa9 66da1e3 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 |
from fastapi import FastAPI, HTTPException
from pydantic import BaseModel, Field
from transformers import pipeline
from sqlalchemy.orm import Session
from fastapi import Depends
from .database import init_db, get_db, SentimentAnalysis
import time
from . import cache
app = FastAPI(
title="Sentiment Analysis API",
description="Analyze text sentiment using transformers",
version="1.0.0"
)
# Initialize database on startup
@app.on_event("startup")
def startup_event():
"""Create database tables if they don't exist"""
print("Initializing database...")
init_db()
print("Database ready!")
# Load model once at startup
print("Loading sentiment analysis model...")
sentiment_analyzer = pipeline(
"sentiment-analysis",
model="distilbert-base-uncased-finetuned-sst-2-english"
)
print("Model loaded!")
class TextRequest(BaseModel):
text: str = Field(..., min_length=1, max_length=512,
example="I love this product!")
class SentimentResponse(BaseModel):
text: str
sentiment: str
confidence: float
processing_time_ms: int
cached: bool = False
class HistoryItem(BaseModel):
id: int
text: str
sentiment: str
confidence: float
processing_time_ms: int
created_at: str
class HistoryResponse(BaseModel):
total: int
analyses: list[HistoryItem]
@app.get("/")
def root():
"""Health check endpoint"""
return {
"status": "healthy",
"service": "sentiment-api",
"version": "1.0.0"
}
@app.post("/analyze", response_model=SentimentResponse)
def analyze_sentiment(
request: TextRequest,
db: Session = Depends(get_db)
):
"""
Analyze sentiment of input text with caching.
Returns sentiment (POSITIVE/NEGATIVE) with confidence score.
Stores result in PostgreSQL database and Redis cache.
"""
start_time = time.time()
try:
cached_result = cache.get_cached_result(request.text)
if cached_result:
# Cache HIT - return cached result
print(f"Cache HIT for: {request.text[:50]}")
# Add cache indicator
cached_result["cached"] = True
cached_result["processing_time_ms"] = int((time.time() - start_time) * 1000)
return SentimentResponse(**cached_result)
# Cache MISS - run ML model
print(f"Cache MISS for: {request.text[:50]}")
result = sentiment_analyzer(request.text)[0]
processing_time = int((time.time() - start_time) * 1000)
# Create response
response_data = {
"text": request.text,
"sentiment": result['label'],
"confidence": round(result['score'], 4),
"processing_time_ms": processing_time,
"cached": False # NEW: indicate this wasn't cached
}
# Store in database
db_analysis = SentimentAnalysis(
text=request.text,
sentiment=result['label'],
confidence=round(result['score'], 4),
processing_time_ms=processing_time
)
db.add(db_analysis)
db.commit()
db.refresh(db_analysis)
# ===== NEW: Store in cache =====
cache.cache_result(request.text, response_data)
# ===============================
return SentimentResponse(**response_data)
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@app.get("/health")
def health():
"""Kubernetes-style health check"""
return {"status": "ok"}
@app.get("/history", response_model=HistoryResponse)
def get_history(
limit: int = 10,
db: Session = Depends(get_db)
):
"""
Get sentiment analysis history from database
Returns recent analyses ordered by creation date (newest first)
"""
try:
# Get total count
total = db.query(SentimentAnalysis).count()
# Get recent analyses
analyses = db.query(SentimentAnalysis)\
.order_by(SentimentAnalysis.created_at.desc())\
.limit(limit)\
.all()
# Convert to response format
history_items = [
HistoryItem(
id=a.id,
text=a.text,
sentiment=a.sentiment,
confidence=a.confidence,
processing_time_ms=a.processing_time_ms,
created_at=a.created_at.isoformat()
)
for a in analyses
]
return HistoryResponse(total=total, analyses=history_items)
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@app.get("/cache/stats")
def get_cache_statistics():
"""
Get Redis cache statistics
Shows cache hit rate, memory usage, and key counts
"""
return cache.get_cache_stats()
@app.delete("/cache/clear")
def clear_cache_endpoint():
"""
Clear all cached sentiment results
Use this to force fresh analysis for all requests
"""
success = cache.clear_cache()
if success:
return {"message": "Cache cleared successfully"}
else:
return {"message": "Failed to clear cache"} |