FK U SQL
Browse files- pyproject.toml +1 -0
- requirements.txt +1 -0
- src/database.py +243 -0
- src/main.py +24 -235
- src/rag.py +1 -1
- uv.lock +62 -0
pyproject.toml
CHANGED
|
@@ -14,6 +14,7 @@ dependencies = [
|
|
| 14 |
"openai>=2.14.0",
|
| 15 |
"psutil>=7.2.1",
|
| 16 |
"pydantic>=2.12.5",
|
|
|
|
| 17 |
"pypdf>=6.5.0",
|
| 18 |
"python-multipart>=0.0.21",
|
| 19 |
"qdrant-client>=1.16.2",
|
|
|
|
| 14 |
"openai>=2.14.0",
|
| 15 |
"psutil>=7.2.1",
|
| 16 |
"pydantic>=2.12.5",
|
| 17 |
+
"pymongo>=4.16.0",
|
| 18 |
"pypdf>=6.5.0",
|
| 19 |
"python-multipart>=0.0.21",
|
| 20 |
"qdrant-client>=1.16.2",
|
requirements.txt
CHANGED
|
@@ -8,6 +8,7 @@ langchain-google-genai>=4.1.2
|
|
| 8 |
langchain-huggingface>=1.2.0
|
| 9 |
langchain-qdrant>=1.1.0
|
| 10 |
psutil>=7.2.1
|
|
|
|
| 11 |
pydantic>=2.12.5
|
| 12 |
pypdf>=6.5.0
|
| 13 |
python-dotenv>=1.0.0
|
|
|
|
| 8 |
langchain-huggingface>=1.2.0
|
| 9 |
langchain-qdrant>=1.1.0
|
| 10 |
psutil>=7.2.1
|
| 11 |
+
pymongo[srv]>=4.6.0
|
| 12 |
pydantic>=2.12.5
|
| 13 |
pypdf>=6.5.0
|
| 14 |
python-dotenv>=1.0.0
|
src/database.py
ADDED
|
@@ -0,0 +1,243 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import time
|
| 3 |
+
import logging
|
| 4 |
+
from datetime import datetime, timezone
|
| 5 |
+
from typing import Optional, List
|
| 6 |
+
|
| 7 |
+
from pymongo import MongoClient, DESCENDING
|
| 8 |
+
from pymongo.errors import ConnectionFailure, ServerSelectionTimeoutError
|
| 9 |
+
|
| 10 |
+
logger = logging.getLogger("rag-api")
|
| 11 |
+
|
| 12 |
+
MONGODB_URI = os.getenv("MONGODB_URI")
|
| 13 |
+
_client: Optional[MongoClient] = None
|
| 14 |
+
_db = None
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def get_database():
|
| 18 |
+
global _client, _db
|
| 19 |
+
|
| 20 |
+
if _db is not None:
|
| 21 |
+
return _db
|
| 22 |
+
|
| 23 |
+
try:
|
| 24 |
+
_client = MongoClient(
|
| 25 |
+
MONGODB_URI,
|
| 26 |
+
serverSelectionTimeoutMS=5000,
|
| 27 |
+
tls=True,
|
| 28 |
+
tlsAllowInvalidCertificates=True
|
| 29 |
+
)
|
| 30 |
+
_client.admin.command('ping')
|
| 31 |
+
_db = _client.askbookie
|
| 32 |
+
logger.info("MongoDB connected successfully")
|
| 33 |
+
_create_indexes(_db)
|
| 34 |
+
return _db
|
| 35 |
+
except (ConnectionFailure, ServerSelectionTimeoutError) as e:
|
| 36 |
+
logger.error(f"MongoDB connection failed: {e}")
|
| 37 |
+
raise
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def _create_indexes(db):
|
| 41 |
+
try:
|
| 42 |
+
db.rate_limits.create_index("timestamp", expireAfterSeconds=60)
|
| 43 |
+
db.rate_limits.create_index([("key_id", 1), ("endpoint", 1), ("timestamp", 1)])
|
| 44 |
+
db.failed_auth.create_index("timestamp", expireAfterSeconds=300)
|
| 45 |
+
db.failed_auth.create_index([("ip", 1), ("timestamp", 1)])
|
| 46 |
+
db.jobs.create_index("created_at", expireAfterSeconds=86400)
|
| 47 |
+
db.jobs.create_index([("key_id", 1), ("created_at", -1)])
|
| 48 |
+
db.jobs.create_index("job_id", unique=True)
|
| 49 |
+
db.metrics.create_index([("key_id", 1), ("endpoint", 1), ("timestamp", -1)])
|
| 50 |
+
db.query_history.create_index([("timestamp", -1)])
|
| 51 |
+
logger.info("MongoDB indexes created")
|
| 52 |
+
except Exception as e:
|
| 53 |
+
logger.warning(f"Index creation warning: {e}")
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
def check_rate_limit(key_id: str, endpoint: str, limit: int) -> bool:
|
| 57 |
+
db = get_database()
|
| 58 |
+
now = datetime.now(timezone.utc)
|
| 59 |
+
window_start = datetime.fromtimestamp(time.time() - 60, tz=timezone.utc)
|
| 60 |
+
|
| 61 |
+
count = db.rate_limits.count_documents({
|
| 62 |
+
"key_id": key_id,
|
| 63 |
+
"endpoint": endpoint,
|
| 64 |
+
"timestamp": {"$gte": window_start}
|
| 65 |
+
})
|
| 66 |
+
|
| 67 |
+
if count >= limit:
|
| 68 |
+
return False
|
| 69 |
+
|
| 70 |
+
db.rate_limits.insert_one({
|
| 71 |
+
"key_id": key_id,
|
| 72 |
+
"endpoint": endpoint,
|
| 73 |
+
"timestamp": now
|
| 74 |
+
})
|
| 75 |
+
return True
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
def record_failed_auth(ip: str):
|
| 79 |
+
db = get_database()
|
| 80 |
+
db.failed_auth.insert_one({
|
| 81 |
+
"ip": ip,
|
| 82 |
+
"timestamp": datetime.now(timezone.utc)
|
| 83 |
+
})
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
def check_auth_lockout(ip: str, limit: int = 5) -> bool:
|
| 87 |
+
db = get_database()
|
| 88 |
+
window_start = datetime.fromtimestamp(time.time() - 300, tz=timezone.utc)
|
| 89 |
+
count = db.failed_auth.count_documents({
|
| 90 |
+
"ip": ip,
|
| 91 |
+
"timestamp": {"$gte": window_start}
|
| 92 |
+
})
|
| 93 |
+
return count >= limit
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
def create_job(job_id: str, key_id: str, filename: str, subject: str, size: int) -> dict:
|
| 97 |
+
db = get_database()
|
| 98 |
+
job = {
|
| 99 |
+
"job_id": job_id,
|
| 100 |
+
"key_id": key_id,
|
| 101 |
+
"filename": filename,
|
| 102 |
+
"subject": subject,
|
| 103 |
+
"size": size,
|
| 104 |
+
"status": "queued",
|
| 105 |
+
"error": None,
|
| 106 |
+
"created_at": datetime.now(timezone.utc),
|
| 107 |
+
"updated_at": None
|
| 108 |
+
}
|
| 109 |
+
db.jobs.insert_one(job)
|
| 110 |
+
return job
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
def update_job_status(job_id: str, status: str, error: str = None):
|
| 114 |
+
db = get_database()
|
| 115 |
+
db.jobs.update_one(
|
| 116 |
+
{"job_id": job_id},
|
| 117 |
+
{"$set": {
|
| 118 |
+
"status": status,
|
| 119 |
+
"error": error,
|
| 120 |
+
"updated_at": datetime.now(timezone.utc)
|
| 121 |
+
}}
|
| 122 |
+
)
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
def get_job(job_id: str, key_id: str) -> Optional[dict]:
|
| 126 |
+
db = get_database()
|
| 127 |
+
job = db.jobs.find_one({"job_id": job_id, "key_id": key_id})
|
| 128 |
+
if job:
|
| 129 |
+
job["_id"] = str(job["_id"])
|
| 130 |
+
job["created_at"] = job["created_at"].timestamp() if job.get("created_at") else None
|
| 131 |
+
job["updated_at"] = job["updated_at"].timestamp() if job.get("updated_at") else None
|
| 132 |
+
return job
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
def list_user_jobs(key_id: str) -> List[dict]:
|
| 136 |
+
db = get_database()
|
| 137 |
+
jobs = list(db.jobs.find(
|
| 138 |
+
{"key_id": key_id},
|
| 139 |
+
{"_id": 0, "job_id": 1, "filename": 1, "subject": 1, "size": 1, "status": 1, "error": 1, "created_at": 1}
|
| 140 |
+
).sort("created_at", DESCENDING))
|
| 141 |
+
|
| 142 |
+
for job in jobs:
|
| 143 |
+
if job.get("created_at"):
|
| 144 |
+
job["created_at"] = job["created_at"].timestamp()
|
| 145 |
+
return jobs
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
def count_active_uploads(key_id: str) -> int:
|
| 149 |
+
db = get_database()
|
| 150 |
+
return db.jobs.count_documents({
|
| 151 |
+
"key_id": key_id,
|
| 152 |
+
"status": {"$in": ["queued", "processing"]}
|
| 153 |
+
})
|
| 154 |
+
|
| 155 |
+
|
| 156 |
+
def record_metric(key_id: str, endpoint: str, success: bool, latency_ms: float):
|
| 157 |
+
db = get_database()
|
| 158 |
+
db.metrics.insert_one({
|
| 159 |
+
"key_id": key_id,
|
| 160 |
+
"endpoint": endpoint,
|
| 161 |
+
"success": success,
|
| 162 |
+
"latency_ms": latency_ms,
|
| 163 |
+
"timestamp": datetime.now(timezone.utc)
|
| 164 |
+
})
|
| 165 |
+
|
| 166 |
+
|
| 167 |
+
def get_metrics_summary() -> dict:
|
| 168 |
+
import psutil
|
| 169 |
+
|
| 170 |
+
db = get_database()
|
| 171 |
+
|
| 172 |
+
total_calls = db.metrics.count_documents({})
|
| 173 |
+
total_questions = db.metrics.count_documents({"endpoint": "/ask"})
|
| 174 |
+
active_jobs = db.jobs.count_documents({"status": {"$in": ["queued", "processing"]}})
|
| 175 |
+
|
| 176 |
+
pipeline = [
|
| 177 |
+
{"$group": {
|
| 178 |
+
"_id": "$key_id",
|
| 179 |
+
"api_calls": {"$sum": 1},
|
| 180 |
+
"questions_asked": {"$sum": {"$cond": [{"$eq": ["$endpoint", "/ask"]}, 1, 0]}},
|
| 181 |
+
"uploads_attempted": {"$sum": {"$cond": [{"$eq": ["$endpoint", "/upload"]}, 1, 0]}},
|
| 182 |
+
"success_count": {"$sum": {"$cond": ["$success", 1, 0]}},
|
| 183 |
+
"total_latency": {"$sum": "$latency_ms"},
|
| 184 |
+
"ask_fails": {"$sum": {"$cond": [{"$and": [{"$eq": ["$endpoint", "/ask"]}, {"$not": "$success"}]}, 1, 0]}},
|
| 185 |
+
"upload_fails": {"$sum": {"$cond": [{"$and": [{"$eq": ["$endpoint", "/upload"]}, {"$not": "$success"}]}, 1, 0]}}
|
| 186 |
+
}}
|
| 187 |
+
]
|
| 188 |
+
|
| 189 |
+
per_user = {}
|
| 190 |
+
for doc in db.metrics.aggregate(pipeline):
|
| 191 |
+
kid = doc["_id"]
|
| 192 |
+
total = doc["api_calls"]
|
| 193 |
+
per_user[kid] = {
|
| 194 |
+
"api_calls": total,
|
| 195 |
+
"questions_asked": doc["questions_asked"],
|
| 196 |
+
"uploads_attempted": doc["uploads_attempted"],
|
| 197 |
+
"success_rate": round((doc["success_count"] / total * 100) if total > 0 else 100, 1),
|
| 198 |
+
"average_latency_seconds": round((doc["total_latency"] / total / 1000) if total > 0 else 0, 2),
|
| 199 |
+
"ask_fails": doc["ask_fails"],
|
| 200 |
+
"upload_fails": doc["upload_fails"],
|
| 201 |
+
}
|
| 202 |
+
|
| 203 |
+
process = psutil.Process()
|
| 204 |
+
return {
|
| 205 |
+
"total_api_calls": total_calls,
|
| 206 |
+
"total_questions": total_questions,
|
| 207 |
+
"active_jobs": active_jobs,
|
| 208 |
+
"memory_mb": round(process.memory_info().rss / (1024 * 1024), 1),
|
| 209 |
+
"per_user": per_user,
|
| 210 |
+
}
|
| 211 |
+
|
| 212 |
+
|
| 213 |
+
def store_query_history(key_id: str, subject: str, query: str, answer: str,
|
| 214 |
+
sources: list, request_id: str, latency_ms: float):
|
| 215 |
+
db = get_database()
|
| 216 |
+
db.query_history.insert_one({
|
| 217 |
+
"key_id": key_id,
|
| 218 |
+
"subject": subject,
|
| 219 |
+
"query": query,
|
| 220 |
+
"answer": answer,
|
| 221 |
+
"sources": sources,
|
| 222 |
+
"request_id": request_id,
|
| 223 |
+
"latency_ms": latency_ms,
|
| 224 |
+
"timestamp": datetime.now(timezone.utc)
|
| 225 |
+
})
|
| 226 |
+
|
| 227 |
+
|
| 228 |
+
def get_query_history(limit: int = 100, offset: int = 0) -> tuple[List[dict], int]:
|
| 229 |
+
db = get_database()
|
| 230 |
+
|
| 231 |
+
total = db.query_history.count_documents({})
|
| 232 |
+
|
| 233 |
+
history = list(db.query_history.find(
|
| 234 |
+
{},
|
| 235 |
+
{"_id": 0}
|
| 236 |
+
).sort("timestamp", DESCENDING).skip(offset).limit(limit))
|
| 237 |
+
|
| 238 |
+
for i, item in enumerate(history):
|
| 239 |
+
item["id"] = offset + i + 1
|
| 240 |
+
if item.get("timestamp"):
|
| 241 |
+
item["timestamp"] = item["timestamp"].timestamp()
|
| 242 |
+
|
| 243 |
+
return history, total
|
src/main.py
CHANGED
|
@@ -5,14 +5,12 @@ import secrets
|
|
| 5 |
import hmac
|
| 6 |
import hashlib
|
| 7 |
import logging
|
| 8 |
-
import sqlite3
|
| 9 |
import tempfile
|
| 10 |
import atexit
|
| 11 |
import re
|
| 12 |
import json
|
| 13 |
from datetime import datetime, timedelta, timezone
|
| 14 |
-
from
|
| 15 |
-
from contextlib import contextmanager, asynccontextmanager
|
| 16 |
from typing import Optional
|
| 17 |
from pathlib import Path
|
| 18 |
|
|
@@ -25,6 +23,7 @@ from dotenv import load_dotenv
|
|
| 25 |
load_dotenv()
|
| 26 |
|
| 27 |
from rag import RAGService, process_pdf, model_manager, MODEL_OPTIONS
|
|
|
|
| 28 |
|
| 29 |
logging.basicConfig(
|
| 30 |
level=logging.INFO,
|
|
@@ -33,7 +32,6 @@ logging.basicConfig(
|
|
| 33 |
)
|
| 34 |
logger = logging.getLogger("rag-api")
|
| 35 |
|
| 36 |
-
DB_PATH = os.getenv("DB_PATH", "rag_security.db")
|
| 37 |
MAX_FILE_SIZE = 10 * 1024 * 1024
|
| 38 |
ALLOWED_MIME_TYPES = {"application/pdf"}
|
| 39 |
UPLOAD_DIR = Path(tempfile.mkdtemp(prefix="rag_uploads_"))
|
|
@@ -41,90 +39,12 @@ MAX_JSON_BODY = 16 * 1024
|
|
| 41 |
MAX_DEFAULT_BODY = 32 * 1024
|
| 42 |
RATE_LIMITS = {"/ask": 30, "/upload": 2, "default": 50}
|
| 43 |
FAILED_AUTH_LIMIT = 5
|
| 44 |
-
FAILED_AUTH_WINDOW = 300
|
| 45 |
-
JOB_TTL_SECONDS = 24 * 3600
|
| 46 |
KEY_EXPIRY_DAYS = 90
|
| 47 |
HMAC_TIME_WINDOW = 300
|
| 48 |
MAX_CONCURRENT_UPLOADS = 3
|
| 49 |
-
PDF_PROCESS_TIMEOUT = 300
|
| 50 |
SUBJECT_PATTERN = re.compile(r'^[a-zA-Z0-9_-]+$')
|
| 51 |
STARTUP_TIME = time.time()
|
| 52 |
|
| 53 |
-
_db_lock = Lock()
|
| 54 |
-
|
| 55 |
-
def get_db_connection() -> sqlite3.Connection:
|
| 56 |
-
conn = sqlite3.connect(DB_PATH, timeout=10)
|
| 57 |
-
conn.row_factory = sqlite3.Row
|
| 58 |
-
return conn
|
| 59 |
-
|
| 60 |
-
@contextmanager
|
| 61 |
-
def db_transaction():
|
| 62 |
-
with _db_lock:
|
| 63 |
-
conn = get_db_connection()
|
| 64 |
-
try:
|
| 65 |
-
yield conn
|
| 66 |
-
conn.commit()
|
| 67 |
-
except Exception:
|
| 68 |
-
conn.rollback()
|
| 69 |
-
raise
|
| 70 |
-
finally:
|
| 71 |
-
conn.close()
|
| 72 |
-
|
| 73 |
-
def init_database():
|
| 74 |
-
with db_transaction() as conn:
|
| 75 |
-
conn.execute("PRAGMA journal_mode=WAL;")
|
| 76 |
-
conn.execute("PRAGMA busy_timeout=5000;")
|
| 77 |
-
conn.executescript("""
|
| 78 |
-
CREATE TABLE IF NOT EXISTS rate_limits (
|
| 79 |
-
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 80 |
-
key_id TEXT NOT NULL,
|
| 81 |
-
endpoint TEXT NOT NULL,
|
| 82 |
-
timestamp REAL NOT NULL
|
| 83 |
-
);
|
| 84 |
-
CREATE INDEX IF NOT EXISTS idx_rate_key_endpoint ON rate_limits(key_id, endpoint, timestamp);
|
| 85 |
-
CREATE TABLE IF NOT EXISTS failed_auth (
|
| 86 |
-
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 87 |
-
ip TEXT NOT NULL,
|
| 88 |
-
timestamp REAL NOT NULL
|
| 89 |
-
);
|
| 90 |
-
CREATE INDEX IF NOT EXISTS idx_failed_ip ON failed_auth(ip, timestamp);
|
| 91 |
-
CREATE TABLE IF NOT EXISTS jobs (
|
| 92 |
-
job_id TEXT PRIMARY KEY,
|
| 93 |
-
key_id TEXT NOT NULL,
|
| 94 |
-
filename TEXT,
|
| 95 |
-
subject TEXT,
|
| 96 |
-
size INTEGER,
|
| 97 |
-
status TEXT NOT NULL,
|
| 98 |
-
error TEXT,
|
| 99 |
-
created_at REAL NOT NULL,
|
| 100 |
-
updated_at REAL
|
| 101 |
-
);
|
| 102 |
-
CREATE INDEX IF NOT EXISTS idx_jobs_key ON jobs(key_id, created_at);
|
| 103 |
-
CREATE TABLE IF NOT EXISTS metrics (
|
| 104 |
-
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 105 |
-
key_id TEXT NOT NULL,
|
| 106 |
-
endpoint TEXT NOT NULL,
|
| 107 |
-
success INTEGER NOT NULL,
|
| 108 |
-
latency_ms REAL NOT NULL,
|
| 109 |
-
timestamp REAL NOT NULL
|
| 110 |
-
);
|
| 111 |
-
CREATE INDEX IF NOT EXISTS idx_metrics_key ON metrics(key_id, endpoint, timestamp);
|
| 112 |
-
CREATE TABLE IF NOT EXISTS query_history (
|
| 113 |
-
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 114 |
-
key_id TEXT NOT NULL,
|
| 115 |
-
subject TEXT NOT NULL,
|
| 116 |
-
query TEXT NOT NULL,
|
| 117 |
-
answer TEXT NOT NULL,
|
| 118 |
-
sources TEXT,
|
| 119 |
-
request_id TEXT,
|
| 120 |
-
latency_ms REAL,
|
| 121 |
-
timestamp REAL NOT NULL
|
| 122 |
-
);
|
| 123 |
-
CREATE INDEX IF NOT EXISTS idx_history_timestamp ON query_history(timestamp);
|
| 124 |
-
""")
|
| 125 |
-
|
| 126 |
-
init_database()
|
| 127 |
-
|
| 128 |
_api_key_configs = [
|
| 129 |
("askbookie-pesu", "SERVICE_A_SECRET", "user"),
|
| 130 |
("ask-bookie", "SERVICE_B_SECRET", "user"),
|
|
@@ -155,17 +75,6 @@ def get_client_ip(request: Request) -> str:
|
|
| 155 |
return forwarded.split(",")[-1].strip()
|
| 156 |
return request.client.host if request.client else "unknown"
|
| 157 |
|
| 158 |
-
def record_failed_auth(ip: str):
|
| 159 |
-
with db_transaction() as conn:
|
| 160 |
-
conn.execute("INSERT INTO failed_auth (ip, timestamp) VALUES (?, ?)", (ip, time.time()))
|
| 161 |
-
|
| 162 |
-
def check_auth_lockout(ip: str) -> bool:
|
| 163 |
-
cutoff = time.time() - FAILED_AUTH_WINDOW
|
| 164 |
-
with db_transaction() as conn:
|
| 165 |
-
conn.execute("DELETE FROM failed_auth WHERE timestamp < ?", (cutoff,))
|
| 166 |
-
cursor = conn.execute("SELECT COUNT(*) FROM failed_auth WHERE ip = ? AND timestamp > ?", (ip, cutoff))
|
| 167 |
-
return cursor.fetchone()[0] >= FAILED_AUTH_LIMIT
|
| 168 |
-
|
| 169 |
def verify_hmac_signature(request: Request) -> Optional[str]:
|
| 170 |
key_id = request.headers.get("X-API-Key-Id")
|
| 171 |
sig = request.headers.get("X-API-Signature")
|
|
@@ -194,143 +103,51 @@ def verify_hmac_signature(request: Request) -> Optional[str]:
|
|
| 194 |
|
| 195 |
async def verify_api_key(request: Request) -> str:
|
| 196 |
ip = get_client_ip(request)
|
| 197 |
-
if check_auth_lockout(ip):
|
| 198 |
raise HTTPException(status_code=429, detail="Too many failed attempts")
|
| 199 |
key_id = verify_hmac_signature(request)
|
| 200 |
if key_id:
|
| 201 |
return key_id
|
| 202 |
-
record_failed_auth(ip)
|
| 203 |
await asyncio.sleep(0.1)
|
| 204 |
raise HTTPException(status_code=401, detail="Unauthorized")
|
| 205 |
|
| 206 |
-
def check_rate_limit(key_id: str, endpoint: str) -> bool:
|
| 207 |
-
limit = RATE_LIMITS.get(endpoint, RATE_LIMITS["default"])
|
| 208 |
-
now = time.time()
|
| 209 |
-
window_start = now - 60
|
| 210 |
-
with db_transaction() as conn:
|
| 211 |
-
conn.execute("DELETE FROM rate_limits WHERE timestamp < ?", (window_start,))
|
| 212 |
-
cursor = conn.execute(
|
| 213 |
-
"SELECT COUNT(*) FROM rate_limits WHERE key_id = ? AND endpoint = ? AND timestamp > ?",
|
| 214 |
-
(key_id, endpoint, window_start)
|
| 215 |
-
)
|
| 216 |
-
if cursor.fetchone()[0] >= limit:
|
| 217 |
-
return False
|
| 218 |
-
conn.execute("INSERT INTO rate_limits (key_id, endpoint, timestamp) VALUES (?, ?, ?)", (key_id, endpoint, now))
|
| 219 |
-
return True
|
| 220 |
-
|
| 221 |
def rate_limited(endpoint: str):
|
| 222 |
async def dependency(request: Request):
|
| 223 |
key_id = await verify_api_key(request)
|
| 224 |
-
|
|
|
|
| 225 |
raise HTTPException(status_code=429, detail="Rate limit exceeded", headers={"Retry-After": "60"})
|
| 226 |
request.state.key_id = key_id
|
| 227 |
return key_id
|
| 228 |
return dependency
|
| 229 |
|
| 230 |
-
def update_job_status(job_id: str, status: str, error: str = None):
|
| 231 |
-
with db_transaction() as conn:
|
| 232 |
-
conn.execute("UPDATE jobs SET status = ?, error = ?, updated_at = ? WHERE job_id = ?",
|
| 233 |
-
(status, error, time.time(), job_id))
|
| 234 |
-
|
| 235 |
-
def get_job(job_id: str, key_id: str) -> Optional[dict]:
|
| 236 |
-
with db_transaction() as conn:
|
| 237 |
-
cursor = conn.execute("SELECT * FROM jobs WHERE job_id = ? AND key_id = ?", (job_id, key_id))
|
| 238 |
-
row = cursor.fetchone()
|
| 239 |
-
return dict(row) if row else None
|
| 240 |
-
|
| 241 |
-
def list_user_jobs(key_id: str) -> list:
|
| 242 |
-
cutoff = time.time() - JOB_TTL_SECONDS
|
| 243 |
-
with db_transaction() as conn:
|
| 244 |
-
conn.execute("DELETE FROM jobs WHERE created_at < ?", (cutoff,))
|
| 245 |
-
cursor = conn.execute(
|
| 246 |
-
"SELECT job_id, filename, subject, size, status, error, created_at FROM jobs WHERE key_id = ? ORDER BY created_at DESC",
|
| 247 |
-
(key_id,)
|
| 248 |
-
)
|
| 249 |
-
return [dict(row) for row in cursor.fetchall()]
|
| 250 |
-
|
| 251 |
-
def count_active_uploads(key_id: str) -> int:
|
| 252 |
-
with db_transaction() as conn:
|
| 253 |
-
cursor = conn.execute(
|
| 254 |
-
"SELECT COUNT(*) FROM jobs WHERE key_id = ? AND status IN ('queued', 'processing')",
|
| 255 |
-
(key_id,)
|
| 256 |
-
)
|
| 257 |
-
return cursor.fetchone()[0]
|
| 258 |
-
|
| 259 |
def sanitize_subject(subject: str) -> str:
|
| 260 |
clean = subject.strip().lower()
|
| 261 |
if not SUBJECT_PATTERN.match(clean):
|
| 262 |
clean = re.sub(r'[^a-zA-Z0-9_-]', '', clean)
|
| 263 |
return clean[:50] if clean else "default"
|
| 264 |
|
| 265 |
-
def record_metric(key_id: str, endpoint: str, success: bool, latency_ms: float):
|
| 266 |
-
with db_transaction() as conn:
|
| 267 |
-
conn.execute(
|
| 268 |
-
"INSERT INTO metrics (key_id, endpoint, success, latency_ms, timestamp) VALUES (?, ?, ?, ?, ?)",
|
| 269 |
-
(key_id, endpoint, 1 if success else 0, latency_ms, time.time())
|
| 270 |
-
)
|
| 271 |
-
|
| 272 |
-
def store_query_history(key_id: str, subject: str, query: str, answer: str,
|
| 273 |
-
sources: list, request_id: str, latency_ms: float):
|
| 274 |
-
with db_transaction() as conn:
|
| 275 |
-
conn.execute(
|
| 276 |
-
"""INSERT INTO query_history
|
| 277 |
-
(key_id, subject, query, answer, sources, request_id, latency_ms, timestamp)
|
| 278 |
-
VALUES (?, ?, ?, ?, ?, ?, ?, ?)""",
|
| 279 |
-
(key_id, subject, query, answer, json.dumps(sources), request_id, latency_ms, time.time())
|
| 280 |
-
)
|
| 281 |
-
|
| 282 |
def get_metrics_summary() -> dict:
|
| 283 |
-
|
| 284 |
-
|
| 285 |
-
|
| 286 |
-
|
| 287 |
-
|
| 288 |
-
per_user = {}
|
| 289 |
-
cursor = conn.execute("""
|
| 290 |
-
SELECT key_id, COUNT(*) as api_calls,
|
| 291 |
-
SUM(CASE WHEN endpoint = '/ask' THEN 1 ELSE 0 END) as questions_asked,
|
| 292 |
-
SUM(CASE WHEN endpoint = '/upload' THEN 1 ELSE 0 END) as uploads_attempted,
|
| 293 |
-
AVG(CASE WHEN success = 1 THEN 100.0 ELSE 0.0 END) as success_rate,
|
| 294 |
-
AVG(latency_ms) / 1000.0 as average_latency_seconds,
|
| 295 |
-
SUM(CASE WHEN endpoint = '/ask' AND success = 0 THEN 1 ELSE 0 END) as ask_fails,
|
| 296 |
-
SUM(CASE WHEN endpoint = '/upload' AND success = 0 THEN 1 ELSE 0 END) as upload_fails
|
| 297 |
-
FROM metrics GROUP BY key_id
|
| 298 |
-
""")
|
| 299 |
-
for row in cursor.fetchall():
|
| 300 |
-
kid = row[0]
|
| 301 |
-
per_user[kid] = {
|
| 302 |
-
"api_calls": row[1],
|
| 303 |
-
"questions_asked": row[2],
|
| 304 |
-
"uploads_attempted": row[3],
|
| 305 |
-
"success_rate": round(row[4], 1) if row[4] else 100,
|
| 306 |
-
"average_latency_seconds": round(row[5], 2) if row[5] else 0,
|
| 307 |
-
"ask_fails": row[6] or 0,
|
| 308 |
-
"upload_fails": row[7] or 0,
|
| 309 |
-
"role": API_KEYS.get(kid, {}).get("role", "user"),
|
| 310 |
-
}
|
| 311 |
-
process = psutil.Process()
|
| 312 |
-
return {
|
| 313 |
-
"uptime_hours": round((time.time() - STARTUP_TIME) / 3600, 2),
|
| 314 |
-
"total_api_calls": total_calls,
|
| 315 |
-
"total_questions": total_questions,
|
| 316 |
-
"active_jobs": active_jobs,
|
| 317 |
-
"memory_mb": round(process.memory_info().rss / (1024 * 1024), 1),
|
| 318 |
-
"per_user": per_user,
|
| 319 |
-
}
|
| 320 |
|
| 321 |
async def process_pdf_job(job_id: str, file_path: Path, original_filename: str, subject: str):
|
| 322 |
def status_callback(status: str):
|
| 323 |
-
update_job_status(job_id, status)
|
| 324 |
logger.info(f"Job {job_id}: {status}")
|
| 325 |
try:
|
| 326 |
-
update_job_status(job_id, "processing")
|
| 327 |
logger.info(f"Job {job_id}: Starting RAG processing")
|
| 328 |
process_pdf(str(file_path), original_filename, subject, status_callback)
|
| 329 |
-
update_job_status(job_id, "done")
|
| 330 |
logger.info(f"Job {job_id}: Complete")
|
| 331 |
except Exception as e:
|
| 332 |
logger.exception(f"Job {job_id} failed")
|
| 333 |
-
update_job_status(job_id, "failed", str(e)[:200])
|
| 334 |
finally:
|
| 335 |
try:
|
| 336 |
if file_path.exists():
|
|
@@ -443,7 +260,7 @@ async def ask(request: Request, body: AskRequest, key_id: str = Depends(rate_lim
|
|
| 443 |
result = rag_service.ask(body.query, subject)
|
| 444 |
success = True
|
| 445 |
latency_ms = (time.time() - start_time) * 1000
|
| 446 |
-
store_query_history(
|
| 447 |
key_id=key_id,
|
| 448 |
subject=subject,
|
| 449 |
query=body.query,
|
|
@@ -457,7 +274,7 @@ async def ask(request: Request, body: AskRequest, key_id: str = Depends(rate_lim
|
|
| 457 |
logger.exception(f"RAG query failed: {e}")
|
| 458 |
raise HTTPException(status_code=500, detail="Query failed")
|
| 459 |
finally:
|
| 460 |
-
record_metric(key_id, "/ask", success, (time.time() - start_time) * 1000)
|
| 461 |
|
| 462 |
@app.post("/upload")
|
| 463 |
async def upload(
|
|
@@ -472,7 +289,7 @@ async def upload(
|
|
| 472 |
subject = sanitize_subject(subject)
|
| 473 |
if not subject:
|
| 474 |
raise HTTPException(status_code=400, detail="Invalid subject")
|
| 475 |
-
if count_active_uploads(key_id) >= MAX_CONCURRENT_UPLOADS:
|
| 476 |
raise HTTPException(status_code=429, detail="Too many concurrent uploads")
|
| 477 |
if not file.filename or not file.filename.lower().endswith(".pdf"):
|
| 478 |
raise HTTPException(status_code=400, detail="Only PDF files allowed")
|
|
@@ -495,11 +312,7 @@ async def upload(
|
|
| 495 |
f.write(chunk)
|
| 496 |
if size == 0:
|
| 497 |
raise HTTPException(status_code=400, detail="Empty file")
|
| 498 |
-
|
| 499 |
-
conn.execute(
|
| 500 |
-
"INSERT INTO jobs (job_id, key_id, filename, subject, size, status, created_at) VALUES (?, ?, ?, ?, ?, ?, ?)",
|
| 501 |
-
(job_id, key_id, file.filename, subject, size, "queued", time.time())
|
| 502 |
-
)
|
| 503 |
background_tasks.add_task(process_pdf_job, job_id, temp_path, file.filename, subject)
|
| 504 |
logger.info(f"Upload queued: job={job_id}")
|
| 505 |
success = True
|
|
@@ -514,18 +327,18 @@ async def upload(
|
|
| 514 |
logger.exception("Upload failed")
|
| 515 |
raise HTTPException(status_code=500, detail="Upload failed")
|
| 516 |
finally:
|
| 517 |
-
record_metric(key_id, "/upload", success, (time.time() - start_time) * 1000)
|
| 518 |
|
| 519 |
@app.get("/jobs/{job_id}")
|
| 520 |
async def get_job_status(request: Request, job_id: str, key_id: str = Depends(rate_limited("default"))):
|
| 521 |
-
job = get_job(job_id, key_id)
|
| 522 |
if not job:
|
| 523 |
raise HTTPException(status_code=404, detail="Job not found")
|
| 524 |
return job
|
| 525 |
|
| 526 |
@app.get("/jobs")
|
| 527 |
async def list_jobs(request: Request, key_id: str = Depends(rate_limited("default"))):
|
| 528 |
-
jobs = list_user_jobs(key_id)
|
| 529 |
return {"jobs": jobs, "total": len(jobs)}
|
| 530 |
|
| 531 |
@app.get("/health")
|
|
@@ -592,35 +405,11 @@ async def get_query_history(
|
|
| 592 |
if API_KEYS.get(key_id, {}).get("role") != "admin":
|
| 593 |
raise HTTPException(status_code=403, detail="Forbidden")
|
| 594 |
|
| 595 |
-
|
| 596 |
-
cursor = conn.execute(
|
| 597 |
-
"""SELECT id, key_id, subject, query, answer, sources, request_id,
|
| 598 |
-
latency_ms, timestamp
|
| 599 |
-
FROM query_history
|
| 600 |
-
ORDER BY timestamp DESC
|
| 601 |
-
LIMIT ? OFFSET ?""",
|
| 602 |
-
(limit, offset)
|
| 603 |
-
)
|
| 604 |
-
history = []
|
| 605 |
-
for row in cursor.fetchall():
|
| 606 |
-
history.append({
|
| 607 |
-
"id": row[0],
|
| 608 |
-
"key_id": row[1],
|
| 609 |
-
"subject": row[2],
|
| 610 |
-
"query": row[3],
|
| 611 |
-
"answer": row[4],
|
| 612 |
-
"sources": json.loads(row[5]) if row[5] else [],
|
| 613 |
-
"request_id": row[6],
|
| 614 |
-
"latency_ms": row[7],
|
| 615 |
-
"timestamp": row[8],
|
| 616 |
-
})
|
| 617 |
-
|
| 618 |
-
total = conn.execute("SELECT COUNT(*) FROM query_history").fetchone()[0]
|
| 619 |
-
|
| 620 |
return {"history": history, "total": total, "limit": limit, "offset": offset}
|
| 621 |
|
| 622 |
class ModelSwitchRequest(BaseModel):
|
| 623 |
-
model_id: int = Field(..., ge=1, le=5
|
| 624 |
|
| 625 |
@app.get("/admin/models/current")
|
| 626 |
async def get_current_model(request: Request, key_id: str = Depends(rate_limited("default"))):
|
|
|
|
| 5 |
import hmac
|
| 6 |
import hashlib
|
| 7 |
import logging
|
|
|
|
| 8 |
import tempfile
|
| 9 |
import atexit
|
| 10 |
import re
|
| 11 |
import json
|
| 12 |
from datetime import datetime, timedelta, timezone
|
| 13 |
+
from contextlib import asynccontextmanager
|
|
|
|
| 14 |
from typing import Optional
|
| 15 |
from pathlib import Path
|
| 16 |
|
|
|
|
| 23 |
load_dotenv()
|
| 24 |
|
| 25 |
from rag import RAGService, process_pdf, model_manager, MODEL_OPTIONS
|
| 26 |
+
import database as db
|
| 27 |
|
| 28 |
logging.basicConfig(
|
| 29 |
level=logging.INFO,
|
|
|
|
| 32 |
)
|
| 33 |
logger = logging.getLogger("rag-api")
|
| 34 |
|
|
|
|
| 35 |
MAX_FILE_SIZE = 10 * 1024 * 1024
|
| 36 |
ALLOWED_MIME_TYPES = {"application/pdf"}
|
| 37 |
UPLOAD_DIR = Path(tempfile.mkdtemp(prefix="rag_uploads_"))
|
|
|
|
| 39 |
MAX_DEFAULT_BODY = 32 * 1024
|
| 40 |
RATE_LIMITS = {"/ask": 30, "/upload": 2, "default": 50}
|
| 41 |
FAILED_AUTH_LIMIT = 5
|
|
|
|
|
|
|
| 42 |
KEY_EXPIRY_DAYS = 90
|
| 43 |
HMAC_TIME_WINDOW = 300
|
| 44 |
MAX_CONCURRENT_UPLOADS = 3
|
|
|
|
| 45 |
SUBJECT_PATTERN = re.compile(r'^[a-zA-Z0-9_-]+$')
|
| 46 |
STARTUP_TIME = time.time()
|
| 47 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 48 |
_api_key_configs = [
|
| 49 |
("askbookie-pesu", "SERVICE_A_SECRET", "user"),
|
| 50 |
("ask-bookie", "SERVICE_B_SECRET", "user"),
|
|
|
|
| 75 |
return forwarded.split(",")[-1].strip()
|
| 76 |
return request.client.host if request.client else "unknown"
|
| 77 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 78 |
def verify_hmac_signature(request: Request) -> Optional[str]:
|
| 79 |
key_id = request.headers.get("X-API-Key-Id")
|
| 80 |
sig = request.headers.get("X-API-Signature")
|
|
|
|
| 103 |
|
| 104 |
async def verify_api_key(request: Request) -> str:
|
| 105 |
ip = get_client_ip(request)
|
| 106 |
+
if db.check_auth_lockout(ip, FAILED_AUTH_LIMIT):
|
| 107 |
raise HTTPException(status_code=429, detail="Too many failed attempts")
|
| 108 |
key_id = verify_hmac_signature(request)
|
| 109 |
if key_id:
|
| 110 |
return key_id
|
| 111 |
+
db.record_failed_auth(ip)
|
| 112 |
await asyncio.sleep(0.1)
|
| 113 |
raise HTTPException(status_code=401, detail="Unauthorized")
|
| 114 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 115 |
def rate_limited(endpoint: str):
|
| 116 |
async def dependency(request: Request):
|
| 117 |
key_id = await verify_api_key(request)
|
| 118 |
+
limit = RATE_LIMITS.get(endpoint, RATE_LIMITS["default"])
|
| 119 |
+
if not db.check_rate_limit(key_id, endpoint, limit):
|
| 120 |
raise HTTPException(status_code=429, detail="Rate limit exceeded", headers={"Retry-After": "60"})
|
| 121 |
request.state.key_id = key_id
|
| 122 |
return key_id
|
| 123 |
return dependency
|
| 124 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 125 |
def sanitize_subject(subject: str) -> str:
|
| 126 |
clean = subject.strip().lower()
|
| 127 |
if not SUBJECT_PATTERN.match(clean):
|
| 128 |
clean = re.sub(r'[^a-zA-Z0-9_-]', '', clean)
|
| 129 |
return clean[:50] if clean else "default"
|
| 130 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 131 |
def get_metrics_summary() -> dict:
|
| 132 |
+
metrics = db.get_metrics_summary()
|
| 133 |
+
metrics["uptime_hours"] = round((time.time() - STARTUP_TIME) / 3600, 2)
|
| 134 |
+
for kid in metrics.get("per_user", {}):
|
| 135 |
+
metrics["per_user"][kid]["role"] = API_KEYS.get(kid, {}).get("role", "user")
|
| 136 |
+
return metrics
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 137 |
|
| 138 |
async def process_pdf_job(job_id: str, file_path: Path, original_filename: str, subject: str):
|
| 139 |
def status_callback(status: str):
|
| 140 |
+
db.update_job_status(job_id, status)
|
| 141 |
logger.info(f"Job {job_id}: {status}")
|
| 142 |
try:
|
| 143 |
+
db.update_job_status(job_id, "processing")
|
| 144 |
logger.info(f"Job {job_id}: Starting RAG processing")
|
| 145 |
process_pdf(str(file_path), original_filename, subject, status_callback)
|
| 146 |
+
db.update_job_status(job_id, "done")
|
| 147 |
logger.info(f"Job {job_id}: Complete")
|
| 148 |
except Exception as e:
|
| 149 |
logger.exception(f"Job {job_id} failed")
|
| 150 |
+
db.update_job_status(job_id, "failed", str(e)[:200])
|
| 151 |
finally:
|
| 152 |
try:
|
| 153 |
if file_path.exists():
|
|
|
|
| 260 |
result = rag_service.ask(body.query, subject)
|
| 261 |
success = True
|
| 262 |
latency_ms = (time.time() - start_time) * 1000
|
| 263 |
+
db.store_query_history(
|
| 264 |
key_id=key_id,
|
| 265 |
subject=subject,
|
| 266 |
query=body.query,
|
|
|
|
| 274 |
logger.exception(f"RAG query failed: {e}")
|
| 275 |
raise HTTPException(status_code=500, detail="Query failed")
|
| 276 |
finally:
|
| 277 |
+
db.record_metric(key_id, "/ask", success, (time.time() - start_time) * 1000)
|
| 278 |
|
| 279 |
@app.post("/upload")
|
| 280 |
async def upload(
|
|
|
|
| 289 |
subject = sanitize_subject(subject)
|
| 290 |
if not subject:
|
| 291 |
raise HTTPException(status_code=400, detail="Invalid subject")
|
| 292 |
+
if db.count_active_uploads(key_id) >= MAX_CONCURRENT_UPLOADS:
|
| 293 |
raise HTTPException(status_code=429, detail="Too many concurrent uploads")
|
| 294 |
if not file.filename or not file.filename.lower().endswith(".pdf"):
|
| 295 |
raise HTTPException(status_code=400, detail="Only PDF files allowed")
|
|
|
|
| 312 |
f.write(chunk)
|
| 313 |
if size == 0:
|
| 314 |
raise HTTPException(status_code=400, detail="Empty file")
|
| 315 |
+
db.create_job(job_id, key_id, file.filename, subject, size)
|
|
|
|
|
|
|
|
|
|
|
|
|
| 316 |
background_tasks.add_task(process_pdf_job, job_id, temp_path, file.filename, subject)
|
| 317 |
logger.info(f"Upload queued: job={job_id}")
|
| 318 |
success = True
|
|
|
|
| 327 |
logger.exception("Upload failed")
|
| 328 |
raise HTTPException(status_code=500, detail="Upload failed")
|
| 329 |
finally:
|
| 330 |
+
db.record_metric(key_id, "/upload", success, (time.time() - start_time) * 1000)
|
| 331 |
|
| 332 |
@app.get("/jobs/{job_id}")
|
| 333 |
async def get_job_status(request: Request, job_id: str, key_id: str = Depends(rate_limited("default"))):
|
| 334 |
+
job = db.get_job(job_id, key_id)
|
| 335 |
if not job:
|
| 336 |
raise HTTPException(status_code=404, detail="Job not found")
|
| 337 |
return job
|
| 338 |
|
| 339 |
@app.get("/jobs")
|
| 340 |
async def list_jobs(request: Request, key_id: str = Depends(rate_limited("default"))):
|
| 341 |
+
jobs = db.list_user_jobs(key_id)
|
| 342 |
return {"jobs": jobs, "total": len(jobs)}
|
| 343 |
|
| 344 |
@app.get("/health")
|
|
|
|
| 405 |
if API_KEYS.get(key_id, {}).get("role") != "admin":
|
| 406 |
raise HTTPException(status_code=403, detail="Forbidden")
|
| 407 |
|
| 408 |
+
history, total = db.get_query_history(limit, offset)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 409 |
return {"history": history, "total": total, "limit": limit, "offset": offset}
|
| 410 |
|
| 411 |
class ModelSwitchRequest(BaseModel):
|
| 412 |
+
model_id: int = Field(..., ge=1, le=5)
|
| 413 |
|
| 414 |
@app.get("/admin/models/current")
|
| 415 |
async def get_current_model(request: Request, key_id: str = Depends(rate_limited("default"))):
|
src/rag.py
CHANGED
|
@@ -197,7 +197,7 @@ class RAGService:
|
|
| 197 |
continue
|
| 198 |
raise last_error
|
| 199 |
|
| 200 |
-
top_results = results[:
|
| 201 |
context_text = "\n\n---\n\n".join([doc.page_content for doc, _ in top_results])
|
| 202 |
|
| 203 |
full_prompt = PROMPT_TEMPLATE.format(context=context_text, question=query_text)
|
|
|
|
| 197 |
continue
|
| 198 |
raise last_error
|
| 199 |
|
| 200 |
+
top_results = results[:5]
|
| 201 |
context_text = "\n\n---\n\n".join([doc.page_content for doc, _ in top_results])
|
| 202 |
|
| 203 |
full_prompt = PROMPT_TEMPLATE.format(context=context_text, question=query_text)
|
uv.lock
CHANGED
|
@@ -162,6 +162,7 @@ dependencies = [
|
|
| 162 |
{ name = "openai" },
|
| 163 |
{ name = "psutil" },
|
| 164 |
{ name = "pydantic" },
|
|
|
|
| 165 |
{ name = "pypdf" },
|
| 166 |
{ name = "python-multipart" },
|
| 167 |
{ name = "qdrant-client" },
|
|
@@ -181,6 +182,7 @@ requires-dist = [
|
|
| 181 |
{ name = "openai", specifier = ">=2.14.0" },
|
| 182 |
{ name = "psutil", specifier = ">=7.2.1" },
|
| 183 |
{ name = "pydantic", specifier = ">=2.12.5" },
|
|
|
|
| 184 |
{ name = "pypdf", specifier = ">=6.5.0" },
|
| 185 |
{ name = "python-multipart", specifier = ">=0.0.21" },
|
| 186 |
{ name = "qdrant-client", specifier = ">=1.16.2" },
|
|
@@ -389,6 +391,15 @@ wheels = [
|
|
| 389 |
{ url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277, upload-time = "2023-12-24T09:54:30.421Z" },
|
| 390 |
]
|
| 391 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 392 |
[[package]]
|
| 393 |
name = "exceptiongroup"
|
| 394 |
version = "1.3.1"
|
|
@@ -1955,6 +1966,57 @@ wheels = [
|
|
| 1955 |
{ url = "https://files.pythonhosted.org/packages/c1/60/5d4751ba3f4a40a6891f24eec885f51afd78d208498268c734e256fb13c4/pydantic_settings-2.12.0-py3-none-any.whl", hash = "sha256:fddb9fd99a5b18da837b29710391e945b1e30c135477f484084ee513adb93809", size = 51880, upload-time = "2025-11-10T14:25:45.546Z" },
|
| 1956 |
]
|
| 1957 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1958 |
[[package]]
|
| 1959 |
name = "pypdf"
|
| 1960 |
version = "6.5.0"
|
|
|
|
| 162 |
{ name = "openai" },
|
| 163 |
{ name = "psutil" },
|
| 164 |
{ name = "pydantic" },
|
| 165 |
+
{ name = "pymongo" },
|
| 166 |
{ name = "pypdf" },
|
| 167 |
{ name = "python-multipart" },
|
| 168 |
{ name = "qdrant-client" },
|
|
|
|
| 182 |
{ name = "openai", specifier = ">=2.14.0" },
|
| 183 |
{ name = "psutil", specifier = ">=7.2.1" },
|
| 184 |
{ name = "pydantic", specifier = ">=2.12.5" },
|
| 185 |
+
{ name = "pymongo", specifier = ">=4.16.0" },
|
| 186 |
{ name = "pypdf", specifier = ">=6.5.0" },
|
| 187 |
{ name = "python-multipart", specifier = ">=0.0.21" },
|
| 188 |
{ name = "qdrant-client", specifier = ">=1.16.2" },
|
|
|
|
| 391 |
{ url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277, upload-time = "2023-12-24T09:54:30.421Z" },
|
| 392 |
]
|
| 393 |
|
| 394 |
+
[[package]]
|
| 395 |
+
name = "dnspython"
|
| 396 |
+
version = "2.8.0"
|
| 397 |
+
source = { registry = "https://pypi.org/simple" }
|
| 398 |
+
sdist = { url = "https://files.pythonhosted.org/packages/8c/8b/57666417c0f90f08bcafa776861060426765fdb422eb10212086fb811d26/dnspython-2.8.0.tar.gz", hash = "sha256:181d3c6996452cb1189c4046c61599b84a5a86e099562ffde77d26984ff26d0f", size = 368251, upload-time = "2025-09-07T18:58:00.022Z" }
|
| 399 |
+
wheels = [
|
| 400 |
+
{ url = "https://files.pythonhosted.org/packages/ba/5a/18ad964b0086c6e62e2e7500f7edc89e3faa45033c71c1893d34eed2b2de/dnspython-2.8.0-py3-none-any.whl", hash = "sha256:01d9bbc4a2d76bf0db7c1f729812ded6d912bd318d3b1cf81d30c0f845dbf3af", size = 331094, upload-time = "2025-09-07T18:57:58.071Z" },
|
| 401 |
+
]
|
| 402 |
+
|
| 403 |
[[package]]
|
| 404 |
name = "exceptiongroup"
|
| 405 |
version = "1.3.1"
|
|
|
|
| 1966 |
{ url = "https://files.pythonhosted.org/packages/c1/60/5d4751ba3f4a40a6891f24eec885f51afd78d208498268c734e256fb13c4/pydantic_settings-2.12.0-py3-none-any.whl", hash = "sha256:fddb9fd99a5b18da837b29710391e945b1e30c135477f484084ee513adb93809", size = 51880, upload-time = "2025-11-10T14:25:45.546Z" },
|
| 1967 |
]
|
| 1968 |
|
| 1969 |
+
[[package]]
|
| 1970 |
+
name = "pymongo"
|
| 1971 |
+
version = "4.16.0"
|
| 1972 |
+
source = { registry = "https://pypi.org/simple" }
|
| 1973 |
+
dependencies = [
|
| 1974 |
+
{ name = "dnspython" },
|
| 1975 |
+
]
|
| 1976 |
+
sdist = { url = "https://files.pythonhosted.org/packages/65/9c/a4895c4b785fc9865a84a56e14b5bd21ca75aadc3dab79c14187cdca189b/pymongo-4.16.0.tar.gz", hash = "sha256:8ba8405065f6e258a6f872fe62d797a28f383a12178c7153c01ed04e845c600c", size = 2495323, upload-time = "2026-01-07T18:05:48.107Z" }
|
| 1977 |
+
wheels = [
|
| 1978 |
+
{ url = "https://files.pythonhosted.org/packages/4d/93/c36c0998dd91ad8b5031d2e77a903d5cd705b5ba05ca92bcc8731a2c3a8d/pymongo-4.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ed162b2227f98d5b270ecbe1d53be56c8c81db08a1a8f5f02d89c7bb4d19591d", size = 807993, upload-time = "2026-01-07T18:03:40.302Z" },
|
| 1979 |
+
{ url = "https://files.pythonhosted.org/packages/f3/96/d2117d792fa9fedb2f6ccf0608db31f851e8382706d7c3c88c6ac92cc958/pymongo-4.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4a9390dce61d705a88218f0d7b54d7e1fa1b421da8129fc7c009e029a9a6b81e", size = 808355, upload-time = "2026-01-07T18:03:42.13Z" },
|
| 1980 |
+
{ url = "https://files.pythonhosted.org/packages/ae/2e/e79b7b86c0dd6323d0985c201583c7921d67b842b502aae3f3327cbe3935/pymongo-4.16.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:92a232af9927710de08a6c16a9710cc1b175fb9179c0d946cd4e213b92b2a69a", size = 1182337, upload-time = "2026-01-07T18:03:44.126Z" },
|
| 1981 |
+
{ url = "https://files.pythonhosted.org/packages/7b/82/07ec9966381c57d941fddc52637e9c9653e63773be410bd8605f74683084/pymongo-4.16.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4d79aa147ce86aef03079096d83239580006ffb684eead593917186aee407767", size = 1200928, upload-time = "2026-01-07T18:03:45.52Z" },
|
| 1982 |
+
{ url = "https://files.pythonhosted.org/packages/44/15/9d45e3cc6fa428b0a3600b0c1c86b310f28c91251c41493460695ab40b6b/pymongo-4.16.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:19a1c96e7f39c7a59a9cfd4d17920cf9382f6f684faeff4649bf587dc59f8edc", size = 1239418, upload-time = "2026-01-07T18:03:47.03Z" },
|
| 1983 |
+
{ url = "https://files.pythonhosted.org/packages/c8/b3/f35ee51e2a3f05f673ad4f5e803ae1284c42f4413e8d121c4958f1af4eb9/pymongo-4.16.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:efe020c46ce3c3a89af6baec6569635812129df6fb6cf76d4943af3ba6ee2069", size = 1229045, upload-time = "2026-01-07T18:03:48.377Z" },
|
| 1984 |
+
{ url = "https://files.pythonhosted.org/packages/18/2d/1688b88d7c0a5c01da8c703dea831419435d9ce67c6ddbb0ac629c9c72d2/pymongo-4.16.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9dc2c00bed568732b89e211b6adca389053d5e6d2d5a8979e80b813c3ec4d1f9", size = 1196517, upload-time = "2026-01-07T18:03:50.205Z" },
|
| 1985 |
+
{ url = "https://files.pythonhosted.org/packages/e6/c6/e89db0f23bd20757b627a5d8c73a609ffd6741887b9004ab229208a79764/pymongo-4.16.0-cp310-cp310-win32.whl", hash = "sha256:5b9c6d689bbe5beb156374508133218610e14f8c81e35bc17d7a14e30ab593e6", size = 794911, upload-time = "2026-01-07T18:03:52.701Z" },
|
| 1986 |
+
{ url = "https://files.pythonhosted.org/packages/37/54/e00a5e517153f310a33132375159e42dceb12bee45b51b35aa0df14f1866/pymongo-4.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:2290909275c9b8f637b0a92eb9b89281e18a72922749ebb903403ab6cc7da914", size = 804801, upload-time = "2026-01-07T18:03:57.671Z" },
|
| 1987 |
+
{ url = "https://files.pythonhosted.org/packages/e5/0a/2572faf89195a944c99c6d756227019c8c5f4b5658ecc261c303645dfe69/pymongo-4.16.0-cp310-cp310-win_arm64.whl", hash = "sha256:6af1aaa26f0835175d2200e62205b78e7ec3ffa430682e322cc91aaa1a0dbf28", size = 797579, upload-time = "2026-01-07T18:03:59.1Z" },
|
| 1988 |
+
{ url = "https://files.pythonhosted.org/packages/e6/3a/907414a763c4270b581ad6d960d0c6221b74a70eda216a1fdd8fa82ba89f/pymongo-4.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6f2077ec24e2f1248f9cac7b9a2dfb894e50cc7939fcebfb1759f99304caabef", size = 862561, upload-time = "2026-01-07T18:04:00.628Z" },
|
| 1989 |
+
{ url = "https://files.pythonhosted.org/packages/8c/58/787d8225dd65cb2383c447346ea5e200ecfde89962d531111521e3b53018/pymongo-4.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4d4f7ba040f72a9f43a44059872af5a8c8c660aa5d7f90d5344f2ed1c3c02721", size = 862923, upload-time = "2026-01-07T18:04:02.213Z" },
|
| 1990 |
+
{ url = "https://files.pythonhosted.org/packages/5d/a7/cc2865aae32bc77ade7b35f957a58df52680d7f8506f93c6edbf458e5738/pymongo-4.16.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:8a0f73af1ea56c422b2dcfc0437459148a799ef4231c6aee189d2d4c59d6728f", size = 1426779, upload-time = "2026-01-07T18:04:03.942Z" },
|
| 1991 |
+
{ url = "https://files.pythonhosted.org/packages/81/25/3e96eb7998eec05382174da2fefc58d28613f46bbdf821045539d0ed60ab/pymongo-4.16.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aa30cd16ddd2f216d07ba01d9635c873e97ddb041c61cf0847254edc37d1c60e", size = 1454207, upload-time = "2026-01-07T18:04:05.387Z" },
|
| 1992 |
+
{ url = "https://files.pythonhosted.org/packages/86/7b/8e817a7df8c5d565d39dd4ca417a5e0ef46cc5cc19aea9405f403fec6449/pymongo-4.16.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1d638b0b1b294d95d0fdc73688a3b61e05cc4188872818cd240d51460ccabcb5", size = 1511654, upload-time = "2026-01-07T18:04:08.458Z" },
|
| 1993 |
+
{ url = "https://files.pythonhosted.org/packages/39/7a/50c4d075ccefcd281cdcfccc5494caa5665b096b85e65a5d6afabb80e09e/pymongo-4.16.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:21d02cc10a158daa20cb040985e280e7e439832fc6b7857bff3d53ef6914ad50", size = 1496794, upload-time = "2026-01-07T18:04:10.355Z" },
|
| 1994 |
+
{ url = "https://files.pythonhosted.org/packages/0f/cd/ebdc1aaca5deeaf47310c369ef4083e8550e04e7bf7e3752cfb7d95fcdb8/pymongo-4.16.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4fbb8d3552c2ad99d9e236003c0b5f96d5f05e29386ba7abae73949bfebc13dd", size = 1448371, upload-time = "2026-01-07T18:04:11.76Z" },
|
| 1995 |
+
{ url = "https://files.pythonhosted.org/packages/3d/c9/50fdd78c37f68ea49d590c027c96919fbccfd98f3a4cb39f84f79970bd37/pymongo-4.16.0-cp311-cp311-win32.whl", hash = "sha256:be1099a8295b1a722d03fb7b48be895d30f4301419a583dcf50e9045968a041c", size = 841024, upload-time = "2026-01-07T18:04:13.522Z" },
|
| 1996 |
+
{ url = "https://files.pythonhosted.org/packages/4a/dd/a3aa1ade0cf9980744db703570afac70a62c85b432c391dea0577f6da7bb/pymongo-4.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:61567f712bda04c7545a037e3284b4367cad8d29b3dec84b4bf3b2147020a75b", size = 855838, upload-time = "2026-01-07T18:04:14.923Z" },
|
| 1997 |
+
{ url = "https://files.pythonhosted.org/packages/bf/10/9ad82593ccb895e8722e4884bad4c5ce5e8ff6683b740d7823a6c2bcfacf/pymongo-4.16.0-cp311-cp311-win_arm64.whl", hash = "sha256:c53338613043038005bf2e41a2fafa08d29cdbc0ce80891b5366c819456c1ae9", size = 845007, upload-time = "2026-01-07T18:04:17.099Z" },
|
| 1998 |
+
{ url = "https://files.pythonhosted.org/packages/6a/03/6dd7c53cbde98de469a3e6fb893af896dca644c476beb0f0c6342bcc368b/pymongo-4.16.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bd4911c40a43a821dfd93038ac824b756b6e703e26e951718522d29f6eb166a8", size = 917619, upload-time = "2026-01-07T18:04:19.173Z" },
|
| 1999 |
+
{ url = "https://files.pythonhosted.org/packages/73/e1/328915f2734ea1f355dc9b0e98505ff670f5fab8be5e951d6ed70971c6aa/pymongo-4.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25a6b03a68f9907ea6ec8bc7cf4c58a1b51a18e23394f962a6402f8e46d41211", size = 917364, upload-time = "2026-01-07T18:04:20.861Z" },
|
| 2000 |
+
{ url = "https://files.pythonhosted.org/packages/41/fe/4769874dd9812a1bc2880a9785e61eba5340da966af888dd430392790ae0/pymongo-4.16.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:91ac0cb0fe2bf17616c2039dac88d7c9a5088f5cb5829b27c9d250e053664d31", size = 1686901, upload-time = "2026-01-07T18:04:22.219Z" },
|
| 2001 |
+
{ url = "https://files.pythonhosted.org/packages/fa/8d/15707b9669fdc517bbc552ac60da7124dafe7ac1552819b51e97ed4038b4/pymongo-4.16.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cf0ec79e8ca7077f455d14d915d629385153b6a11abc0b93283ed73a8013e376", size = 1723034, upload-time = "2026-01-07T18:04:24.055Z" },
|
| 2002 |
+
{ url = "https://files.pythonhosted.org/packages/5b/af/3d5d16ff11d447d40c1472da1b366a31c7380d7ea2922a449c7f7f495567/pymongo-4.16.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2d0082631a7510318befc2b4fdab140481eb4b9dd62d9245e042157085da2a70", size = 1797161, upload-time = "2026-01-07T18:04:25.964Z" },
|
| 2003 |
+
{ url = "https://files.pythonhosted.org/packages/fb/04/725ab8664eeec73ec125b5a873448d80f5d8cf2750aaaf804cbc538a50a5/pymongo-4.16.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:85dc2f3444c346ea019a371e321ac868a4fab513b7a55fe368f0cc78de8177cc", size = 1780938, upload-time = "2026-01-07T18:04:28.745Z" },
|
| 2004 |
+
{ url = "https://files.pythonhosted.org/packages/22/50/dd7e9095e1ca35f93c3c844c92eb6eb0bc491caeb2c9bff3b32fe3c9b18f/pymongo-4.16.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dabbf3c14de75a20cc3c30bf0c6527157224a93dfb605838eabb1a2ee3be008d", size = 1714342, upload-time = "2026-01-07T18:04:30.331Z" },
|
| 2005 |
+
{ url = "https://files.pythonhosted.org/packages/03/c9/542776987d5c31ae8e93e92680ea2b6e5a2295f398b25756234cabf38a39/pymongo-4.16.0-cp312-cp312-win32.whl", hash = "sha256:60307bb91e0ab44e560fe3a211087748b2b5f3e31f403baf41f5b7b0a70bd104", size = 887868, upload-time = "2026-01-07T18:04:32.124Z" },
|
| 2006 |
+
{ url = "https://files.pythonhosted.org/packages/2e/d4/b4045a7ccc5680fb496d01edf749c7a9367cc8762fbdf7516cf807ef679b/pymongo-4.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:f513b2c6c0d5c491f478422f6b5b5c27ac1af06a54c93ef8631806f7231bd92e", size = 907554, upload-time = "2026-01-07T18:04:33.685Z" },
|
| 2007 |
+
{ url = "https://files.pythonhosted.org/packages/60/4c/33f75713d50d5247f2258405142c0318ff32c6f8976171c4fcae87a9dbdf/pymongo-4.16.0-cp312-cp312-win_arm64.whl", hash = "sha256:dfc320f08ea9a7ec5b2403dc4e8150636f0d6150f4b9792faaae539c88e7db3b", size = 892971, upload-time = "2026-01-07T18:04:35.594Z" },
|
| 2008 |
+
{ url = "https://files.pythonhosted.org/packages/47/84/148d8b5da8260f4679d6665196ae04ab14ffdf06f5fe670b0ab11942951f/pymongo-4.16.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d15f060bc6d0964a8bb70aba8f0cb6d11ae99715438f640cff11bbcf172eb0e8", size = 972009, upload-time = "2026-01-07T18:04:38.303Z" },
|
| 2009 |
+
{ url = "https://files.pythonhosted.org/packages/1e/5e/9f3a8daf583d0adaaa033a3e3e58194d2282737dc164014ff33c7a081103/pymongo-4.16.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a19ea46a0fe71248965305a020bc076a163311aefbaa1d83e47d06fa30ac747", size = 971784, upload-time = "2026-01-07T18:04:39.669Z" },
|
| 2010 |
+
{ url = "https://files.pythonhosted.org/packages/ad/f2/b6c24361fcde24946198573c0176406bfd5f7b8538335f3d939487055322/pymongo-4.16.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:311d4549d6bf1f8c61d025965aebb5ba29d1481dc6471693ab91610aaffbc0eb", size = 1947174, upload-time = "2026-01-07T18:04:41.368Z" },
|
| 2011 |
+
{ url = "https://files.pythonhosted.org/packages/47/1a/8634192f98cf740b3d174e1018dd0350018607d5bd8ac35a666dc49c732b/pymongo-4.16.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:46ffb728d92dd5b09fc034ed91acf5595657c7ca17d4cf3751322cd554153c17", size = 1991727, upload-time = "2026-01-07T18:04:42.965Z" },
|
| 2012 |
+
{ url = "https://files.pythonhosted.org/packages/5a/2f/0c47ac84572b28e23028a23a3798a1f725e1c23b0cf1c1424678d16aff42/pymongo-4.16.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:acda193f440dd88c2023cb00aa8bd7b93a9df59978306d14d87a8b12fe426b05", size = 2082497, upload-time = "2026-01-07T18:04:44.652Z" },
|
| 2013 |
+
{ url = "https://files.pythonhosted.org/packages/ba/57/9f46ef9c862b2f0cf5ce798f3541c201c574128d31ded407ba4b3918d7b6/pymongo-4.16.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5d9fdb386cf958e6ef6ff537d6149be7edb76c3268cd6833e6c36aa447e4443f", size = 2064947, upload-time = "2026-01-07T18:04:46.228Z" },
|
| 2014 |
+
{ url = "https://files.pythonhosted.org/packages/b8/56/5421c0998f38e32288100a07f6cb2f5f9f352522157c901910cb2927e211/pymongo-4.16.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:91899dd7fb9a8c50f09c3c1cf0cb73bfbe2737f511f641f19b9650deb61c00ca", size = 1980478, upload-time = "2026-01-07T18:04:48.017Z" },
|
| 2015 |
+
{ url = "https://files.pythonhosted.org/packages/92/93/bfc448d025e12313a937d6e1e0101b50cc9751636b4b170e600fe3203063/pymongo-4.16.0-cp313-cp313-win32.whl", hash = "sha256:2cd60cd1e05de7f01927f8e25ca26b3ea2c09de8723241e5d3bcfdc70eaff76b", size = 934672, upload-time = "2026-01-07T18:04:49.538Z" },
|
| 2016 |
+
{ url = "https://files.pythonhosted.org/packages/96/10/12710a5e01218d50c3dd165fd72c5ed2699285f77348a3b1a119a191d826/pymongo-4.16.0-cp313-cp313-win_amd64.whl", hash = "sha256:3ead8a0050c53eaa55935895d6919d393d0328ec24b2b9115bdbe881aa222673", size = 959237, upload-time = "2026-01-07T18:04:51.382Z" },
|
| 2017 |
+
{ url = "https://files.pythonhosted.org/packages/0c/56/d288bcd1d05bc17ec69df1d0b1d67bc710c7c5dbef86033a5a4d2e2b08e6/pymongo-4.16.0-cp313-cp313-win_arm64.whl", hash = "sha256:dbbc5b254c36c37d10abb50e899bc3939bbb7ab1e7c659614409af99bd3e7675", size = 940909, upload-time = "2026-01-07T18:04:52.904Z" },
|
| 2018 |
+
]
|
| 2019 |
+
|
| 2020 |
[[package]]
|
| 2021 |
name = "pypdf"
|
| 2022 |
version = "6.5.0"
|