Spaces:
Sleeping
Sleeping
Create main.py
Browse files
main.py
ADDED
|
@@ -0,0 +1,715 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# app.py — Hidden Stroke (AI Noir Investigation) with Internet Archive ingestion
|
| 2 |
+
# Flask + Firebase Realtime DB + Firebase Storage + Gemini (exact model names)
|
| 3 |
+
# Runs on Hugging Face like your reference app (same envs & init flow)
|
| 4 |
+
|
| 5 |
+
import os, io, uuid, json, time, hmac, hashlib, random, traceback, requests
|
| 6 |
+
from datetime import datetime, timedelta, timezone
|
| 7 |
+
from typing import Dict, Any, Tuple, List, Optional
|
| 8 |
+
|
| 9 |
+
from flask import Flask, request, jsonify
|
| 10 |
+
from flask_cors import CORS
|
| 11 |
+
from PIL import Image
|
| 12 |
+
|
| 13 |
+
# ---------------- Firebase Admin (Realtime DB + Storage) ----------------
|
| 14 |
+
import firebase_admin
|
| 15 |
+
from firebase_admin import credentials, db, storage
|
| 16 |
+
|
| 17 |
+
# ---------------- Gemini (exact client & model names) -------------------
|
| 18 |
+
from google import genai
|
| 19 |
+
from google.genai import types
|
| 20 |
+
|
| 21 |
+
# -----------------------------------------------------------------------------
|
| 22 |
+
# 1) CONFIG & INIT (env names EXACTLY as your reference code)
|
| 23 |
+
# -----------------------------------------------------------------------------
|
| 24 |
+
app = Flask(__name__)
|
| 25 |
+
CORS(app)
|
| 26 |
+
|
| 27 |
+
# --- Firebase ---
|
| 28 |
+
try:
|
| 29 |
+
credentials_json_string = os.environ.get("FIREBASE")
|
| 30 |
+
if not credentials_json_string:
|
| 31 |
+
raise ValueError("The FIREBASE environment variable is not set.")
|
| 32 |
+
|
| 33 |
+
credentials_json = json.loads(credentials_json_string)
|
| 34 |
+
firebase_db_url = os.environ.get("Firebase_DB")
|
| 35 |
+
firebase_storage_bucket = os.environ.get("Firebase_Storage")
|
| 36 |
+
if not firebase_db_url or not firebase_storage_bucket:
|
| 37 |
+
raise ValueError("Firebase_DB and Firebase_Storage environment variables must be set.")
|
| 38 |
+
|
| 39 |
+
cred = credentials.Certificate(credentials_json)
|
| 40 |
+
firebase_admin.initialize_app(cred, {
|
| 41 |
+
'databaseURL': firebase_db_url,
|
| 42 |
+
'storageBucket': firebase_storage_bucket
|
| 43 |
+
})
|
| 44 |
+
bucket = storage.bucket()
|
| 45 |
+
db_root = db.reference("/")
|
| 46 |
+
print("Firebase Realtime DB + Storage initialized.")
|
| 47 |
+
except Exception as e:
|
| 48 |
+
print(f"FATAL: Firebase init failed: {e}")
|
| 49 |
+
raise
|
| 50 |
+
|
| 51 |
+
# --- Gemini ---
|
| 52 |
+
try:
|
| 53 |
+
GEMINI_API_KEY = os.environ.get("Gemini")
|
| 54 |
+
if not GEMINI_API_KEY:
|
| 55 |
+
raise ValueError("The 'Gemini' environment variable is not set.")
|
| 56 |
+
client = genai.Client(api_key=GEMINI_API_KEY)
|
| 57 |
+
print("Gemini client initialized.")
|
| 58 |
+
except Exception as e:
|
| 59 |
+
print(f"FATAL: Gemini init failed: {e}")
|
| 60 |
+
raise
|
| 61 |
+
|
| 62 |
+
# --- Models (exact names) ---
|
| 63 |
+
CATEGORY_MODEL = "gemini-2.5-flash"
|
| 64 |
+
GENERATION_MODEL = "gemini-2.0-flash-exp-image-generation"
|
| 65 |
+
|
| 66 |
+
# --- Game constants ---
|
| 67 |
+
TIMER_SECONDS = 90
|
| 68 |
+
INITIAL_IP = 8
|
| 69 |
+
TOOL_COSTS = {"signature": 1, "metadata": 1, "financial": 2}
|
| 70 |
+
LEADERBOARD_TOP_N = 50
|
| 71 |
+
|
| 72 |
+
# --- Misc config ---
|
| 73 |
+
GAME_SALT = os.environ.get("GAME_SALT", "dev-salt") # for deterministic seeds / HMAC
|
| 74 |
+
ADMIN_KEY = os.environ.get("ADMIN_KEY") # optional for admin endpoints
|
| 75 |
+
IA_USER_AGENT = os.environ.get("IA_USER_AGENT", "HiddenStrokeBot/1.0 (+https://reddit.com)") # polite UA
|
| 76 |
+
|
| 77 |
+
# -----------------------------------------------------------------------------
|
| 78 |
+
# 2) UTILS
|
| 79 |
+
# -----------------------------------------------------------------------------
|
| 80 |
+
def utc_today_str() -> str:
|
| 81 |
+
return datetime.now(timezone.utc).strftime("%Y%m%d")
|
| 82 |
+
|
| 83 |
+
def case_ref(case_id: str):
|
| 84 |
+
return db_root.child(f"cases/{case_id}")
|
| 85 |
+
|
| 86 |
+
def plays_ref(case_id: str):
|
| 87 |
+
return db_root.child(f"plays/{case_id}")
|
| 88 |
+
|
| 89 |
+
def leaderboard_ref(case_id: str):
|
| 90 |
+
return db_root.child(f"leaderboards/{case_id}/top")
|
| 91 |
+
|
| 92 |
+
def sessions_ref():
|
| 93 |
+
return db_root.child("sessions")
|
| 94 |
+
|
| 95 |
+
def ia_pool_ref():
|
| 96 |
+
return db_root.child("ia_pool")
|
| 97 |
+
|
| 98 |
+
def hmac_hex(s: str) -> str:
|
| 99 |
+
return hmac.new(GAME_SALT.encode(), s.encode(), hashlib.sha256).hexdigest()
|
| 100 |
+
|
| 101 |
+
def upload_bytes_to_storage(data: bytes, path: str, content_type: str) -> str:
|
| 102 |
+
blob = bucket.blob(path)
|
| 103 |
+
blob.upload_from_string(data, content_type=content_type)
|
| 104 |
+
blob.make_public()
|
| 105 |
+
return blob.public_url
|
| 106 |
+
|
| 107 |
+
def pil_from_inline_image_part(part) -> Image.Image:
|
| 108 |
+
image_bytes = part.inline_data.data
|
| 109 |
+
return Image.open(io.BytesIO(image_bytes)).convert("RGB")
|
| 110 |
+
|
| 111 |
+
def save_image_return_url(img: Image.Image, path: str, quality=92) -> str:
|
| 112 |
+
b = io.BytesIO()
|
| 113 |
+
img.save(b, format="JPEG", quality=quality, optimize=True)
|
| 114 |
+
return upload_bytes_to_storage(b.getvalue(), path, "image/jpeg")
|
| 115 |
+
|
| 116 |
+
def extract_user_from_headers(req) -> Tuple[str, str]:
|
| 117 |
+
uname = (req.headers.get("X-Reddit-User") or "").strip()
|
| 118 |
+
uid = (req.headers.get("X-Reddit-Id") or "").strip()
|
| 119 |
+
if not uname:
|
| 120 |
+
uname = "anon"
|
| 121 |
+
if not uid:
|
| 122 |
+
uid = uname
|
| 123 |
+
return uid, uname
|
| 124 |
+
|
| 125 |
+
def seed_for_date(case_id: str) -> int:
|
| 126 |
+
return int(hmac_hex(f"seed::{case_id}")[:12], 16)
|
| 127 |
+
|
| 128 |
+
def fifty_fifty_mode(case_seed: int) -> str:
|
| 129 |
+
return "knowledge" if (case_seed % 2 == 0) else "observation"
|
| 130 |
+
|
| 131 |
+
def http_get_json(url: str, params: dict = None) -> dict:
|
| 132 |
+
headers = {"User-Agent": IA_USER_AGENT}
|
| 133 |
+
r = requests.get(url, params=params, headers=headers, timeout=30)
|
| 134 |
+
r.raise_for_status()
|
| 135 |
+
return r.json()
|
| 136 |
+
|
| 137 |
+
def http_get_bytes(url: str) -> bytes:
|
| 138 |
+
headers = {"User-Agent": IA_USER_AGENT}
|
| 139 |
+
r = requests.get(url, headers=headers, timeout=60)
|
| 140 |
+
r.raise_for_status()
|
| 141 |
+
return r.content
|
| 142 |
+
|
| 143 |
+
def ia_advanced_search(query: str, rows: int, page: int) -> List[dict]:
|
| 144 |
+
# Internet Archive Advanced Search (no key required)
|
| 145 |
+
# docs: /advancedsearch.php?q=...&rows=...&page=...&output=json
|
| 146 |
+
url = "https://archive.org/advancedsearch.php"
|
| 147 |
+
params = {"q": query, "rows": rows, "page": page, "output": "json"}
|
| 148 |
+
data = http_get_json(url, params=params)
|
| 149 |
+
return data.get("response", {}).get("docs", [])
|
| 150 |
+
|
| 151 |
+
def ia_metadata(identifier: str) -> dict:
|
| 152 |
+
url = f"https://archive.org/metadata/{identifier}"
|
| 153 |
+
return http_get_json(url)
|
| 154 |
+
|
| 155 |
+
def ia_best_image_from_metadata(meta: dict) -> Optional[dict]:
|
| 156 |
+
# Pick the largest suitable image file from /metadata result
|
| 157 |
+
files = meta.get("files", []) or []
|
| 158 |
+
best = None
|
| 159 |
+
best_pixels = -1
|
| 160 |
+
for f in files:
|
| 161 |
+
name = f.get("name", "")
|
| 162 |
+
fmt = (f.get("format") or "").lower()
|
| 163 |
+
if any(x in fmt for x in ["jpeg", "jpg", "png", "tiff", "image"]):
|
| 164 |
+
# width/height sometimes present
|
| 165 |
+
w = int(f.get("width") or 0)
|
| 166 |
+
h = int(f.get("height") or 0)
|
| 167 |
+
if w and h:
|
| 168 |
+
px = w * h
|
| 169 |
+
else:
|
| 170 |
+
px = int(f.get("size") or 0) # fallback by bytes
|
| 171 |
+
if px > best_pixels:
|
| 172 |
+
best_pixels = px
|
| 173 |
+
best = f
|
| 174 |
+
return best
|
| 175 |
+
|
| 176 |
+
def ingest_ia_doc(doc: dict) -> Optional[dict]:
|
| 177 |
+
"""
|
| 178 |
+
Given a doc from advancedsearch, fetch /metadata and store best image into ia_pool.
|
| 179 |
+
"""
|
| 180 |
+
identifier = doc.get("identifier")
|
| 181 |
+
if not identifier:
|
| 182 |
+
return None
|
| 183 |
+
meta = ia_metadata(identifier)
|
| 184 |
+
best = ia_best_image_from_metadata(meta)
|
| 185 |
+
if not best:
|
| 186 |
+
return None
|
| 187 |
+
|
| 188 |
+
title = (meta.get("metadata", {}) or {}).get("title", "") or doc.get("title", "")
|
| 189 |
+
date = (meta.get("metadata", {}) or {}).get("date", "") or doc.get("date", "")
|
| 190 |
+
creator = (meta.get("metadata", {}) or {}).get("creator", "") or doc.get("creator", "")
|
| 191 |
+
rights = (meta.get("metadata", {}) or {}).get("rights", "") or doc.get("rights", "")
|
| 192 |
+
licenseurl = (meta.get("metadata", {}) or {}).get("licenseurl", "") or doc.get("licenseurl", "")
|
| 193 |
+
|
| 194 |
+
download_url = f"https://archive.org/download/{identifier}/{best['name']}"
|
| 195 |
+
record = {
|
| 196 |
+
"identifier": identifier,
|
| 197 |
+
"title": title,
|
| 198 |
+
"date": str(date),
|
| 199 |
+
"creator": creator,
|
| 200 |
+
"rights": rights,
|
| 201 |
+
"licenseurl": licenseurl,
|
| 202 |
+
"download_url": download_url,
|
| 203 |
+
"file_name": best["name"],
|
| 204 |
+
"format": best.get("format"),
|
| 205 |
+
"width": best.get("width"),
|
| 206 |
+
"height": best.get("height"),
|
| 207 |
+
"size": best.get("size"),
|
| 208 |
+
"source": "internet_archive"
|
| 209 |
+
}
|
| 210 |
+
ia_pool_ref().child(identifier).set(record)
|
| 211 |
+
return record
|
| 212 |
+
|
| 213 |
+
def choose_ia_item_for_case(case_id: str) -> Optional[dict]:
|
| 214 |
+
pool = ia_pool_ref().get() or {}
|
| 215 |
+
if not pool:
|
| 216 |
+
return None
|
| 217 |
+
identifiers = sorted(pool.keys())
|
| 218 |
+
case_seed = seed_for_date(case_id)
|
| 219 |
+
idx = case_seed % len(identifiers)
|
| 220 |
+
ident = identifiers[idx]
|
| 221 |
+
return pool[ident]
|
| 222 |
+
|
| 223 |
+
def download_image_to_pil(url: str) -> Image.Image:
|
| 224 |
+
data = http_get_bytes(url)
|
| 225 |
+
img = Image.open(io.BytesIO(data)).convert("RGB")
|
| 226 |
+
return img
|
| 227 |
+
|
| 228 |
+
def crop_signature_macro(img: Image.Image, size: int = 512) -> Image.Image:
|
| 229 |
+
# simple lower-right macro crop (if smaller, clamp)
|
| 230 |
+
w, h = img.size
|
| 231 |
+
cw = min(size, w)
|
| 232 |
+
ch = min(size, h)
|
| 233 |
+
left = max(0, w - cw)
|
| 234 |
+
top = max(0, h - ch)
|
| 235 |
+
return img.crop((left, top, left + cw, top + ch))
|
| 236 |
+
|
| 237 |
+
# -----------------------------------------------------------------------------
|
| 238 |
+
# 3) CASE GENERATION (now uses IA for the authentic image)
|
| 239 |
+
# -----------------------------------------------------------------------------
|
| 240 |
+
def ensure_case_generated(case_id: str) -> Dict[str, Any]:
|
| 241 |
+
existing_public = case_ref(case_id).child("public").get()
|
| 242 |
+
if existing_public:
|
| 243 |
+
return existing_public
|
| 244 |
+
|
| 245 |
+
# Ensure we have at least some IA records; if not, auto-ingest a default set (one page)
|
| 246 |
+
pool = ia_pool_ref().get() or {}
|
| 247 |
+
if not pool:
|
| 248 |
+
try:
|
| 249 |
+
# Default query targets well-known museum collections with images
|
| 250 |
+
default_query = '(collection:(metropolitanmuseum OR smithsonian OR getty OR artic) AND mediatype:image)'
|
| 251 |
+
docs = ia_advanced_search(default_query, rows=100, page=1)
|
| 252 |
+
for d in docs:
|
| 253 |
+
try:
|
| 254 |
+
ingest_ia_doc(d)
|
| 255 |
+
except Exception:
|
| 256 |
+
continue
|
| 257 |
+
except Exception as e:
|
| 258 |
+
print("WARNING: IA default ingest failed:", e)
|
| 259 |
+
|
| 260 |
+
# Pick authentic from ia_pool deterministically
|
| 261 |
+
ia_item = choose_ia_item_for_case(case_id)
|
| 262 |
+
if not ia_item:
|
| 263 |
+
# absolute fallback (rare)
|
| 264 |
+
raise RuntimeError("No IA items available. Ingest needed.")
|
| 265 |
+
|
| 266 |
+
# Deterministic mode
|
| 267 |
+
case_seed = seed_for_date(case_id)
|
| 268 |
+
rng = random.Random(case_seed)
|
| 269 |
+
mode = "knowledge" if (case_seed % 2 == 0) else "observation"
|
| 270 |
+
|
| 271 |
+
# Style label (flavor text only)
|
| 272 |
+
style_period = "sourced from Internet Archive; museum catalog reproduction"
|
| 273 |
+
|
| 274 |
+
# Download authentic image
|
| 275 |
+
auth_img = download_image_to_pil(ia_item["download_url"])
|
| 276 |
+
|
| 277 |
+
images_urls: List[str] = []
|
| 278 |
+
signature_crops: List[str] = []
|
| 279 |
+
|
| 280 |
+
# Save authentic as image #1
|
| 281 |
+
images_urls.append(
|
| 282 |
+
save_image_return_url(auth_img, f"hidden_stroke/{case_id}/images/img_1.jpg")
|
| 283 |
+
)
|
| 284 |
+
# Macro crop for signature area
|
| 285 |
+
crop1 = crop_signature_macro(auth_img, 512)
|
| 286 |
+
signature_crops.append(
|
| 287 |
+
save_image_return_url(crop1, f"hidden_stroke/{case_id}/signature_crops/crop_1.jpg", quality=88)
|
| 288 |
+
)
|
| 289 |
+
|
| 290 |
+
if mode == "knowledge":
|
| 291 |
+
# Use the same authentic visual for all three; differences come from metadata only
|
| 292 |
+
for idx in [2, 3]:
|
| 293 |
+
images_urls.append(images_urls[0]) # same URL OK (client treats as separate cards)
|
| 294 |
+
signature_crops.append(signature_crops[0])
|
| 295 |
+
else:
|
| 296 |
+
# observation: generate 2 subtle variants (near-identical; tweak signature micro-geometry)
|
| 297 |
+
for i in range(2):
|
| 298 |
+
forg_prompt = """
|
| 299 |
+
Create a near-identical variant of the provided painting.
|
| 300 |
+
Keep composition, palette, and lighting the same.
|
| 301 |
+
Only introduce a subtle change in signature micro-geometry (baseline alignment, stroke overlap order, or curve spacing).
|
| 302 |
+
No annotations. Differences must be visible only at macro zoom.
|
| 303 |
+
"""
|
| 304 |
+
resp = client.models.generate_content(
|
| 305 |
+
model=GENERATION_MODEL,
|
| 306 |
+
contents=[forg_prompt, auth_img],
|
| 307 |
+
config=types.GenerateContentConfig(response_modalities=["IMAGE"])
|
| 308 |
+
)
|
| 309 |
+
f_img = None
|
| 310 |
+
for p in resp.candidates[0].content.parts:
|
| 311 |
+
if getattr(p, "inline_data", None):
|
| 312 |
+
f_img = pil_from_inline_image_part(p)
|
| 313 |
+
break
|
| 314 |
+
if f_img is None:
|
| 315 |
+
f_img = auth_img.copy()
|
| 316 |
+
|
| 317 |
+
url = save_image_return_url(f_img, f"hidden_stroke/{case_id}/images/img_{i+2}.jpg")
|
| 318 |
+
images_urls.append(url)
|
| 319 |
+
crop = crop_signature_macro(f_img, 512)
|
| 320 |
+
c_url = save_image_return_url(crop, f"hidden_stroke/{case_id}/signature_crops/crop_{i+2}.jpg", quality=88)
|
| 321 |
+
signature_crops.append(c_url)
|
| 322 |
+
|
| 323 |
+
# === Gemini: Case brief + 3 metadata bundles + ledger + solution ===
|
| 324 |
+
# Feed IA title/creator/year so the authentic bundle aligns with reality.
|
| 325 |
+
title = ia_item.get("title") or "Untitled"
|
| 326 |
+
creator = ia_item.get("creator") or ""
|
| 327 |
+
date = ia_item.get("date") or ""
|
| 328 |
+
rights = ia_item.get("rights") or ""
|
| 329 |
+
licenseurl = ia_item.get("licenseurl") or ""
|
| 330 |
+
|
| 331 |
+
meta_prompt = f"""
|
| 332 |
+
You are generating a daily case for a noir art investigation game.
|
| 333 |
+
|
| 334 |
+
MODE: {"KNOWLEDGE" if mode=="knowledge" else "OBSERVATION"}
|
| 335 |
+
|
| 336 |
+
AUTHENTIC CONTEXT (from Internet Archive):
|
| 337 |
+
- title: {title}
|
| 338 |
+
- creator: {creator}
|
| 339 |
+
- date: {date}
|
| 340 |
+
- rights: {rights}
|
| 341 |
+
- licenseurl: {licenseurl}
|
| 342 |
+
|
| 343 |
+
TASK:
|
| 344 |
+
1) Create a short, punchy "case_brief" (2–4 sentences) explaining why the artifact matters and why fraud is suspected — NO SPOILERS.
|
| 345 |
+
2) Prepare THREE metadata bundles for images A,B,C with NEARLY IDENTICAL fields.
|
| 346 |
+
Ensure exactly ONE bundle is AUTHENTIC and that it corresponds to the above authentic context.
|
| 347 |
+
The other two are FORGERIES that are almost correct but contain subtle, reality-checkable anomalies.
|
| 348 |
+
3) Provide a concise "ledger_summary" describing a believable ownership/payment trail.
|
| 349 |
+
4) Provide the solution with: "answer_index" (0 for A, 1 for B, 2 for C) and detailed flags for signature/metadata/financial, plus an "explanation".
|
| 350 |
+
|
| 351 |
+
OUTPUT STRICT JSON with this schema:
|
| 352 |
+
{{
|
| 353 |
+
"case_brief": "...",
|
| 354 |
+
"metadata": [
|
| 355 |
+
{{"title":"...", "year": "...", "medium": "...", "ink_or_pigment": "...", "catalog_ref": "...", "ownership_chain": ["...","..."], "notes":"..."}},
|
| 356 |
+
{{"title":"...", "year": "...", "medium": "...", "ink_or_pigment": "...", "catalog_ref": "...", "ownership_chain": ["...","..."], "notes":"..."}},
|
| 357 |
+
{{"title":"...", "year": "...", "medium": "...", "ink_or_pigment": "...", "catalog_ref": "...", "ownership_chain": ["...","..."], "notes":"..."}}
|
| 358 |
+
],
|
| 359 |
+
"ledger_summary": "short paragraph",
|
| 360 |
+
"solution": {{
|
| 361 |
+
"answer_index": 0,
|
| 362 |
+
"flags_signature": [ "..." ],
|
| 363 |
+
"flags_metadata": [ "..." ],
|
| 364 |
+
"flags_financial": [ "..." ],
|
| 365 |
+
"explanation": "A few sentences that justify the authentic pick without listing spoilers."
|
| 366 |
+
}}
|
| 367 |
+
}}
|
| 368 |
+
|
| 369 |
+
CONSTRAINTS:
|
| 370 |
+
- Keep all three bundles plausible and near-identical at a glance.
|
| 371 |
+
- Anomalies must be subtle and testable (chemistry/ink era, currency introductions, institution timelines, accession formats, etc.).
|
| 372 |
+
- If MODE=KNOWLEDGE, the tells should be discoverable via metadata/ledger alone.
|
| 373 |
+
- If MODE=OBSERVATION, include at least one signature micro-geometry flag in "flags_signature".
|
| 374 |
+
- The authentic bundle should be consistent with the AUTHENTIC CONTEXT.
|
| 375 |
+
"""
|
| 376 |
+
meta_resp = client.models.generate_content(
|
| 377 |
+
model=CATEGORY_MODEL,
|
| 378 |
+
contents=[meta_prompt]
|
| 379 |
+
)
|
| 380 |
+
raw_text = meta_resp.text.strip()
|
| 381 |
+
try:
|
| 382 |
+
meta_json = json.loads(raw_text)
|
| 383 |
+
except Exception:
|
| 384 |
+
cleaned = raw_text
|
| 385 |
+
if "```" in raw_text:
|
| 386 |
+
cleaned = raw_text.split("```")[1]
|
| 387 |
+
if cleaned.lower().startswith("json"):
|
| 388 |
+
cleaned = cleaned.split("\n", 1)[1]
|
| 389 |
+
meta_json = json.loads(cleaned)
|
| 390 |
+
|
| 391 |
+
case_brief = meta_json.get("case_brief", "A resurfaced portrait raises questions—its paper trail glitters a little too perfectly.")
|
| 392 |
+
metadata = meta_json.get("metadata", [])
|
| 393 |
+
ledger_summary = meta_json.get("ledger_summary", "")
|
| 394 |
+
solution = meta_json.get("solution", {})
|
| 395 |
+
answer_index = int(solution.get("answer_index", 0))
|
| 396 |
+
flags_signature = solution.get("flags_signature", [])
|
| 397 |
+
flags_metadata = solution.get("flags_metadata", [])
|
| 398 |
+
flags_financial = solution.get("flags_financial", [])
|
| 399 |
+
explanation = solution.get("explanation", "The authentic work aligns with period-accurate details; the others contain subtle contradictions.")
|
| 400 |
+
|
| 401 |
+
if len(metadata) != 3:
|
| 402 |
+
raise RuntimeError("Expected exactly 3 metadata bundles.")
|
| 403 |
+
|
| 404 |
+
public = {
|
| 405 |
+
"case_id": case_id,
|
| 406 |
+
"mode": mode,
|
| 407 |
+
"brief": case_brief,
|
| 408 |
+
"style_period": style_period,
|
| 409 |
+
"images": images_urls,
|
| 410 |
+
"signature_crops": signature_crops,
|
| 411 |
+
"metadata": metadata, # sanitized (no answer)
|
| 412 |
+
"ledger_summary": ledger_summary,
|
| 413 |
+
"timer_seconds": TIMER_SECONDS,
|
| 414 |
+
"initial_ip": INITIAL_IP,
|
| 415 |
+
"tool_costs": TOOL_COSTS,
|
| 416 |
+
"credits": {
|
| 417 |
+
"source": "Internet Archive",
|
| 418 |
+
"identifier": ia_item.get("identifier"),
|
| 419 |
+
"title": title,
|
| 420 |
+
"creator": creator,
|
| 421 |
+
"rights": rights,
|
| 422 |
+
"licenseurl": licenseurl
|
| 423 |
+
}
|
| 424 |
+
}
|
| 425 |
+
solution_doc = {
|
| 426 |
+
"answer_index": answer_index,
|
| 427 |
+
"flags_signature": flags_signature,
|
| 428 |
+
"flags_metadata": flags_metadata,
|
| 429 |
+
"flags_financial": flags_financial,
|
| 430 |
+
"explanation": explanation
|
| 431 |
+
}
|
| 432 |
+
|
| 433 |
+
cref = case_ref(case_id)
|
| 434 |
+
cref.child("public").set(public)
|
| 435 |
+
cref.child("solution").set(solution_doc)
|
| 436 |
+
return public
|
| 437 |
+
|
| 438 |
+
# -----------------------------------------------------------------------------
|
| 439 |
+
# 4) SESSIONS, TOOLS, GUESS, LEADERBOARD (same behavior as before)
|
| 440 |
+
# -----------------------------------------------------------------------------
|
| 441 |
+
def create_session(user_id: str, username: str, case_id: str) -> Dict[str, Any]:
|
| 442 |
+
session_id = str(uuid.uuid4())
|
| 443 |
+
expires_at = (datetime.now(timezone.utc) + timedelta(seconds=TIMER_SECONDS)).isoformat()
|
| 444 |
+
session_doc = {
|
| 445 |
+
"session_id": session_id,
|
| 446 |
+
"user_id": user_id,
|
| 447 |
+
"username": username,
|
| 448 |
+
"case_id": case_id,
|
| 449 |
+
"ip_remaining": INITIAL_IP,
|
| 450 |
+
"started_at": datetime.now(timezone.utc).isoformat(),
|
| 451 |
+
"expires_at": expires_at,
|
| 452 |
+
"actions": [],
|
| 453 |
+
"status": "active"
|
| 454 |
+
}
|
| 455 |
+
sessions_ref().child(session_id).set(session_doc)
|
| 456 |
+
return session_doc
|
| 457 |
+
|
| 458 |
+
def get_session(session_id: str) -> Dict[str, Any]:
|
| 459 |
+
return sessions_ref().child(session_id).get() or {}
|
| 460 |
+
|
| 461 |
+
def require_active_session(req) -> Tuple[Dict[str, Any], Dict[str, Any]]:
|
| 462 |
+
session_id = req.headers.get("X-Session-Id", "")
|
| 463 |
+
if not session_id:
|
| 464 |
+
return {}, {"error": "Missing X-Session-Id header."}
|
| 465 |
+
sess = get_session(session_id)
|
| 466 |
+
if not sess or sess.get("status") != "active":
|
| 467 |
+
return {}, {"error": "Invalid or inactive session."}
|
| 468 |
+
now = datetime.now(timezone.utc)
|
| 469 |
+
exp = datetime.fromisoformat(sess["expires_at"].replace("Z", "+00:00"))
|
| 470 |
+
if now > exp:
|
| 471 |
+
sess["status"] = "expired"
|
| 472 |
+
sessions_ref().child(session_id).child("status").set("expired")
|
| 473 |
+
return {}, {"error": "Session expired."}
|
| 474 |
+
return sess, {}
|
| 475 |
+
|
| 476 |
+
def spend_ip(session: Dict[str, Any], cost: int, action: Dict[str, Any]) -> Tuple[Dict[str, Any], Dict[str, Any]]:
|
| 477 |
+
if session["ip_remaining"] < cost:
|
| 478 |
+
return session, {"error": "Not enough Investigation Points."}
|
| 479 |
+
new_ip = session["ip_remaining"] - cost
|
| 480 |
+
session["ip_remaining"] = new_ip
|
| 481 |
+
action["ts"] = datetime.now(timezone.utc).isoformat()
|
| 482 |
+
sessions_ref().child(session["session_id"]).child("ip_remaining").set(new_ip)
|
| 483 |
+
sessions_ref().child(session["session_id"]).child("actions").push(action)
|
| 484 |
+
return session, {}
|
| 485 |
+
|
| 486 |
+
def score_result(correct: bool, session: Dict[str, Any]) -> Dict[str, Any]:
|
| 487 |
+
exp = datetime.fromisoformat(session["expires_at"].replace("Z", "+00:00"))
|
| 488 |
+
now = datetime.now(timezone.utc)
|
| 489 |
+
seconds_left = max(0, int((exp - now).total_seconds()))
|
| 490 |
+
time_bonus = (seconds_left + 9) // 10
|
| 491 |
+
ip_bonus = session["ip_remaining"] * 2
|
| 492 |
+
base = 100 if correct else 0
|
| 493 |
+
penalty = 40 if not correct else 0
|
| 494 |
+
score = max(0, base + time_bonus + ip_bonus - penalty)
|
| 495 |
+
return {"score": score, "seconds_left": seconds_left, "ip_left": session["ip_remaining"]}
|
| 496 |
+
|
| 497 |
+
def upsert_leaderboard(case_id: str, user_id: str, username: str, score: int):
|
| 498 |
+
plays_ref(case_id).child(user_id).set({
|
| 499 |
+
"user_id": user_id,
|
| 500 |
+
"username": username,
|
| 501 |
+
"score": score,
|
| 502 |
+
"ts": datetime.now(timezone.utc).isoformat()
|
| 503 |
+
})
|
| 504 |
+
plays = plays_ref(case_id).get() or {}
|
| 505 |
+
top = sorted(plays.values(), key=lambda x: x.get("score", 0), reverse=True)[:LEADERBOARD_TOP_N]
|
| 506 |
+
leaderboard_ref(case_id).set(top)
|
| 507 |
+
|
| 508 |
+
# -----------------------------------------------------------------------------
|
| 509 |
+
# 5) ROUTES
|
| 510 |
+
# -----------------------------------------------------------------------------
|
| 511 |
+
@app.route("/health", methods=["GET"])
|
| 512 |
+
def health():
|
| 513 |
+
return jsonify({"ok": True, "time": datetime.now(timezone.utc).isoformat()})
|
| 514 |
+
|
| 515 |
+
# --- Admin: Internet Archive ingestion ---
|
| 516 |
+
@app.route("/admin/ingest-ia", methods=["POST"])
|
| 517 |
+
def admin_ingest_ia():
|
| 518 |
+
if not ADMIN_KEY or request.headers.get("X-Admin-Key") != ADMIN_KEY:
|
| 519 |
+
return jsonify({"error": "Forbidden"}), 403
|
| 520 |
+
|
| 521 |
+
body = request.get_json() or {}
|
| 522 |
+
# Example default: a few reputable museum collections with images
|
| 523 |
+
query = body.get("query") or '(collection:(metropolitanmuseum OR smithsonian OR getty OR artic) AND mediatype:image)'
|
| 524 |
+
pages = int(body.get("pages") or 2)
|
| 525 |
+
rows = int(body.get("rows") or 100)
|
| 526 |
+
ingested = 0
|
| 527 |
+
errors = 0
|
| 528 |
+
|
| 529 |
+
for page in range(1, pages + 1):
|
| 530 |
+
try:
|
| 531 |
+
docs = ia_advanced_search(query, rows=rows, page=page)
|
| 532 |
+
except Exception as e:
|
| 533 |
+
errors += 1
|
| 534 |
+
continue
|
| 535 |
+
for d in docs:
|
| 536 |
+
ident = d.get("identifier")
|
| 537 |
+
if not ident:
|
| 538 |
+
continue
|
| 539 |
+
# skip if already ingested
|
| 540 |
+
if ia_pool_ref().child(ident).get():
|
| 541 |
+
continue
|
| 542 |
+
try:
|
| 543 |
+
rec = ingest_ia_doc(d)
|
| 544 |
+
if rec:
|
| 545 |
+
ingested += 1
|
| 546 |
+
except Exception:
|
| 547 |
+
errors += 1
|
| 548 |
+
continue
|
| 549 |
+
|
| 550 |
+
pool_size = len(ia_pool_ref().get() or {})
|
| 551 |
+
return jsonify({"ok": True, "ingested": ingested, "errors": errors, "pool_size": pool_size})
|
| 552 |
+
|
| 553 |
+
@app.route("/admin/ia-pool/stats", methods=["GET"])
|
| 554 |
+
def ia_pool_stats():
|
| 555 |
+
if not ADMIN_KEY or request.headers.get("X-Admin-Key") != ADMIN_KEY:
|
| 556 |
+
return jsonify({"error": "Forbidden"}), 403
|
| 557 |
+
pool = ia_pool_ref().get() or {}
|
| 558 |
+
return jsonify({"pool_size": len(pool)})
|
| 559 |
+
|
| 560 |
+
# --- Admin: pre-generate today's case (optional) ---
|
| 561 |
+
@app.route("/admin/generate-today", methods=["POST"])
|
| 562 |
+
def admin_generate_today():
|
| 563 |
+
if not ADMIN_KEY or request.headers.get("X-Admin-Key") != ADMIN_KEY:
|
| 564 |
+
return jsonify({"error": "Forbidden"}), 403
|
| 565 |
+
case_id = utc_today_str()
|
| 566 |
+
public = ensure_case_generated(case_id)
|
| 567 |
+
return jsonify({"generated": True, "case_id": case_id, "mode": public.get("mode")})
|
| 568 |
+
|
| 569 |
+
# --- Player flow ---
|
| 570 |
+
@app.route("/cases/today/start", methods=["POST"])
|
| 571 |
+
def start_case():
|
| 572 |
+
user_id, username = extract_user_from_headers(request)
|
| 573 |
+
case_id = utc_today_str()
|
| 574 |
+
public = ensure_case_generated(case_id)
|
| 575 |
+
|
| 576 |
+
# Create/reuse an active session for this user+case
|
| 577 |
+
existing = sessions_ref().order_by_child("user_id").equal_to(user_id).get()
|
| 578 |
+
sess = None
|
| 579 |
+
if existing:
|
| 580 |
+
for sid, sdoc in existing.items():
|
| 581 |
+
if sdoc.get("case_id") == case_id and sdoc.get("status") == "active":
|
| 582 |
+
sess = sdoc
|
| 583 |
+
break
|
| 584 |
+
if not sess:
|
| 585 |
+
sess = create_session(user_id, username, case_id)
|
| 586 |
+
|
| 587 |
+
payload = {
|
| 588 |
+
"session_id": sess["session_id"],
|
| 589 |
+
"case": public
|
| 590 |
+
}
|
| 591 |
+
return jsonify(payload)
|
| 592 |
+
|
| 593 |
+
@app.route("/cases/<case_id>/tool/signature", methods=["POST"])
|
| 594 |
+
def tool_signature(case_id):
|
| 595 |
+
session, err = require_active_session(request)
|
| 596 |
+
if err: return jsonify(err), 400
|
| 597 |
+
if session["case_id"] != case_id:
|
| 598 |
+
return jsonify({"error": "Session/case mismatch."}), 400
|
| 599 |
+
|
| 600 |
+
body = request.get_json() or {}
|
| 601 |
+
img_index = int(body.get("image_index", 0))
|
| 602 |
+
if img_index not in [0,1,2]:
|
| 603 |
+
return jsonify({"error": "image_index must be 0,1,2"}), 400
|
| 604 |
+
|
| 605 |
+
session, err = spend_ip(session, TOOL_COSTS["signature"], {"type": "tool_signature", "image_index": img_index})
|
| 606 |
+
if err: return jsonify(err), 400
|
| 607 |
+
|
| 608 |
+
public = case_ref(case_id).child("public").get() or {}
|
| 609 |
+
crops = public.get("signature_crops", [])
|
| 610 |
+
crop_url = crops[img_index] if img_index < len(crops) else ""
|
| 611 |
+
hint = "Examine baseline alignment and stroke overlap." if public.get("mode") == "observation" else ""
|
| 612 |
+
return jsonify({"crop_url": crop_url, "hint": hint, "ip_remaining": session["ip_remaining"]})
|
| 613 |
+
|
| 614 |
+
@app.route("/cases/<case_id>/tool/metadata", methods=["POST"])
|
| 615 |
+
def tool_metadata(case_id):
|
| 616 |
+
session, err = require_active_session(request)
|
| 617 |
+
if err: return jsonify(err), 400
|
| 618 |
+
if session["case_id"] != case_id:
|
| 619 |
+
return jsonify({"error": "Session/case mismatch."}), 400
|
| 620 |
+
|
| 621 |
+
body = request.get_json() or {}
|
| 622 |
+
img_index = int(body.get("image_index", 0))
|
| 623 |
+
if img_index not in [0,1,2]:
|
| 624 |
+
return jsonify({"error": "image_index must be 0,1,2"}), 400
|
| 625 |
+
|
| 626 |
+
session, err = spend_ip(session, TOOL_COSTS["metadata"], {"type": "tool_metadata", "image_index": img_index})
|
| 627 |
+
if err: return jsonify(err), 400
|
| 628 |
+
|
| 629 |
+
solution = case_ref(case_id).child("solution").get() or {}
|
| 630 |
+
flags_metadata: List[str] = solution.get("flags_metadata", [])
|
| 631 |
+
hint = flags_metadata[0] if flags_metadata else "Check chronology, chemistry, and institutional formats."
|
| 632 |
+
return jsonify({"flags": [hint], "ip_remaining": session["ip_remaining"]})
|
| 633 |
+
|
| 634 |
+
@app.route("/cases/<case_id>/tool/financial", methods=["POST"])
|
| 635 |
+
def tool_financial(case_id):
|
| 636 |
+
session, err = require_active_session(request)
|
| 637 |
+
if err: return jsonify(err), 400
|
| 638 |
+
if session["case_id"] != case_id:
|
| 639 |
+
return jsonify({"error": "Session/case mismatch."}), 400
|
| 640 |
+
|
| 641 |
+
session, err = spend_ip(session, TOOL_COSTS["financial"], {"type": "tool_financial"})
|
| 642 |
+
if err: return jsonify(err), 400
|
| 643 |
+
|
| 644 |
+
solution = case_ref(case_id).child("solution").get() or {}
|
| 645 |
+
flags_financial: List[str] = solution.get("flags_financial", [])
|
| 646 |
+
hint = flags_financial[0] if flags_financial else "Follow currency, jurisdiction, and payment method timelines."
|
| 647 |
+
return jsonify({"flags": [hint], "ip_remaining": session["ip_remaining"]})
|
| 648 |
+
|
| 649 |
+
@app.route("/cases/<case_id>/guess", methods=["POST"])
|
| 650 |
+
def submit_guess(case_id):
|
| 651 |
+
session, err = require_active_session(request)
|
| 652 |
+
if err: return jsonify(err), 400
|
| 653 |
+
if session["case_id"] != case_id:
|
| 654 |
+
return jsonify({"error": "Session/case mismatch."}), 400
|
| 655 |
+
|
| 656 |
+
body = request.get_json() or {}
|
| 657 |
+
guess_index = int(body.get("image_index", -1))
|
| 658 |
+
rationale = (body.get("rationale") or "").strip()
|
| 659 |
+
if guess_index not in [0,1,2]:
|
| 660 |
+
return jsonify({"error": "image_index must be 0,1,2"}), 400
|
| 661 |
+
|
| 662 |
+
sessions_ref().child(session["session_id"]).child("status").set("finished")
|
| 663 |
+
session["status"] = "finished"
|
| 664 |
+
|
| 665 |
+
solution = case_ref(case_id).child("solution").get() or {}
|
| 666 |
+
answer_index = int(solution.get("answer_index", 0))
|
| 667 |
+
correct = (guess_index == answer_index)
|
| 668 |
+
|
| 669 |
+
summary = score_result(correct, session)
|
| 670 |
+
upsert_leaderboard(case_id, session["user_id"], session["username"], summary["score"])
|
| 671 |
+
|
| 672 |
+
reveal = {
|
| 673 |
+
"authentic_index": answer_index,
|
| 674 |
+
"explanation": solution.get("explanation", ""),
|
| 675 |
+
"flags_signature": solution.get("flags_signature", []),
|
| 676 |
+
"flags_metadata": solution.get("flags_metadata", []),
|
| 677 |
+
"flags_financial": solution.get("flags_financial", [])
|
| 678 |
+
}
|
| 679 |
+
|
| 680 |
+
plays_ref(case_id).child(session["user_id"]).update({
|
| 681 |
+
"rationale": rationale,
|
| 682 |
+
"correct": correct,
|
| 683 |
+
"score": summary["score"],
|
| 684 |
+
"seconds_left": summary["seconds_left"],
|
| 685 |
+
"ip_left": summary["ip_left"],
|
| 686 |
+
"finished_at": datetime.now(timezone.utc).isoformat()
|
| 687 |
+
})
|
| 688 |
+
|
| 689 |
+
return jsonify({
|
| 690 |
+
"correct": correct,
|
| 691 |
+
"score": summary["score"],
|
| 692 |
+
"timeLeft": summary["seconds_left"],
|
| 693 |
+
"ipLeft": summary["ip_left"],
|
| 694 |
+
"reveal": reveal
|
| 695 |
+
})
|
| 696 |
+
|
| 697 |
+
@app.route("/leaderboard/daily", methods=["GET"])
|
| 698 |
+
def leaderboard_daily():
|
| 699 |
+
case_id = utc_today_str()
|
| 700 |
+
top = leaderboard_ref(case_id).get() or []
|
| 701 |
+
user_id, username = extract_user_from_headers(request)
|
| 702 |
+
me = plays_ref(case_id).child(user_id).get() or {}
|
| 703 |
+
rank = None
|
| 704 |
+
if top:
|
| 705 |
+
for i, row in enumerate(top):
|
| 706 |
+
if row.get("user_id") == user_id:
|
| 707 |
+
rank = i + 1
|
| 708 |
+
break
|
| 709 |
+
return jsonify({"case_id": case_id, "top": top, "me": {"score": me.get("score"), "rank": rank}})
|
| 710 |
+
|
| 711 |
+
# -----------------------------------------------------------------------------
|
| 712 |
+
# 6) MAIN
|
| 713 |
+
# -----------------------------------------------------------------------------
|
| 714 |
+
if __name__ == "__main__":
|
| 715 |
+
app.run(host="0.0.0.0", port=int(os.environ.get("PORT", "7860")), debug=True)
|