Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -6,7 +6,7 @@ import base64
|
|
| 6 |
import logging
|
| 7 |
import uuid
|
| 8 |
import time
|
| 9 |
-
from typing import List, Dict, Any, Tuple
|
| 10 |
|
| 11 |
from flask import Flask, request, jsonify
|
| 12 |
from flask_cors import CORS
|
|
@@ -36,7 +36,7 @@ GEMINI_API_KEY = os.getenv("GEMINI_API_KEY", "")
|
|
| 36 |
if not GEMINI_API_KEY:
|
| 37 |
log.warning("GEMINI_API_KEY not set — gemini calls will fail (but fallback still works).")
|
| 38 |
|
| 39 |
-
client = genai.Client(api_key=GEMINI_API_KEY)
|
| 40 |
|
| 41 |
# Firebase config (read service account JSON from env)
|
| 42 |
FIREBASE_ADMIN_JSON = os.getenv("FIREBASE_ADMIN_JSON", "").strip()
|
|
@@ -60,18 +60,14 @@ def init_firebase_admin_if_needed():
|
|
| 60 |
return None
|
| 61 |
if not FIREBASE_ADMIN_AVAILABLE:
|
| 62 |
raise RuntimeError("firebase-admin not installed (pip install firebase-admin)")
|
| 63 |
-
|
| 64 |
try:
|
| 65 |
sa_obj = json.loads(FIREBASE_ADMIN_JSON)
|
| 66 |
except Exception as e:
|
| 67 |
log.exception("Failed parsing FIREBASE_ADMIN_JSON: %s", e)
|
| 68 |
raise
|
| 69 |
-
|
| 70 |
-
# determine bucket
|
| 71 |
bucket_name = FIREBASE_STORAGE_BUCKET or (sa_obj.get("project_id") and f"{sa_obj.get('project_id')}.appspot.com")
|
| 72 |
if not bucket_name:
|
| 73 |
raise RuntimeError("Could not determine storage bucket. Set FIREBASE_STORAGE_BUCKET or include project_id in service account JSON.")
|
| 74 |
-
|
| 75 |
try:
|
| 76 |
cred = fb_credentials.Certificate(sa_obj)
|
| 77 |
_firebase_app = firebase_admin.initialize_app(cred, {"storageBucket": bucket_name})
|
|
@@ -83,7 +79,7 @@ def init_firebase_admin_if_needed():
|
|
| 83 |
|
| 84 |
def upload_b64_to_firebase(base64_str: str, path: str, content_type="image/jpeg", metadata: dict = None) -> str:
|
| 85 |
"""
|
| 86 |
-
Upload base64 string to Firebase Storage at `path`
|
| 87 |
Optionally attach metadata dict (custom metadata).
|
| 88 |
Returns a public URL when possible, otherwise returns gs://<bucket>/<path>.
|
| 89 |
"""
|
|
@@ -103,11 +99,10 @@ def upload_b64_to_firebase(base64_str: str, path: str, content_type="image/jpeg"
|
|
| 103 |
bucket = fb_storage.bucket()
|
| 104 |
blob = bucket.blob(path)
|
| 105 |
blob.upload_from_string(data, content_type=content_type)
|
| 106 |
-
# attach metadata if provided
|
| 107 |
if metadata:
|
| 108 |
try:
|
| 109 |
-
|
| 110 |
-
blob.metadata = metadata
|
| 111 |
blob.patch()
|
| 112 |
except Exception as me:
|
| 113 |
log.warning("Failed to patch metadata for %s: %s", path, me)
|
|
@@ -132,7 +127,6 @@ def read_image_bytes(file_storage) -> Tuple[np.ndarray, int, int, bytes]:
|
|
| 132 |
try:
|
| 133 |
img = ImageOps.exif_transpose(img)
|
| 134 |
except Exception:
|
| 135 |
-
# ignore if EXIF not present or transpose fails
|
| 136 |
pass
|
| 137 |
img = img.convert("RGB")
|
| 138 |
w, h = img.size
|
|
@@ -146,7 +140,6 @@ def crop_and_b64(bgr_img: np.ndarray, x: int, y: int, w: int, h: int, max_side=5
|
|
| 146 |
crop = bgr_img[y:y2, x:x2]
|
| 147 |
if crop.size == 0:
|
| 148 |
return ""
|
| 149 |
-
# resize if too large
|
| 150 |
max_dim = max(crop.shape[0], crop.shape[1])
|
| 151 |
if max_dim > max_side:
|
| 152 |
scale = max_side / max_dim
|
|
@@ -185,7 +178,6 @@ def fallback_contour_crops(bgr_img, max_items=8) -> List[Dict[str, Any]]:
|
|
| 185 |
"thumbnail_b64": b64,
|
| 186 |
"source": "fallback"
|
| 187 |
})
|
| 188 |
-
# if still none, split into grid
|
| 189 |
if not items:
|
| 190 |
h_half, w_half = h_img//2, w_img//2
|
| 191 |
rects = [
|
|
@@ -205,6 +197,75 @@ def fallback_contour_crops(bgr_img, max_items=8) -> List[Dict[str, Any]]:
|
|
| 205 |
})
|
| 206 |
return items
|
| 207 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 208 |
# ---------- Main / processing ----------
|
| 209 |
@app.route("/process", methods=["POST"])
|
| 210 |
def process_image():
|
|
@@ -212,7 +273,6 @@ def process_image():
|
|
| 212 |
return jsonify({"error": "missing photo"}), 400
|
| 213 |
file = request.files["photo"]
|
| 214 |
|
| 215 |
-
# optional uid from form fields (client can supply for grouping)
|
| 216 |
uid = (request.form.get("uid") or request.args.get("uid") or "anon").strip() or "anon"
|
| 217 |
|
| 218 |
try:
|
|
@@ -221,10 +281,9 @@ def process_image():
|
|
| 221 |
log.error("invalid image: %s", e)
|
| 222 |
return jsonify({"error": "invalid image"}), 400
|
| 223 |
|
| 224 |
-
# generate a per-request session id used to mark temporary uploads
|
| 225 |
session_id = str(uuid.uuid4())
|
| 226 |
|
| 227 |
-
#
|
| 228 |
user_prompt = (
|
| 229 |
"You are an assistant that extracts clothing detections from a single image. "
|
| 230 |
"Return a JSON object with a single key 'items' which is an array. Each item must have: "
|
|
@@ -271,13 +330,13 @@ def process_image():
|
|
| 271 |
cfg = types.GenerateContentConfig(response_mime_type="application/json", response_schema=schema)
|
| 272 |
|
| 273 |
log.info("Calling Gemini model for detection (gemini-2.5-flash-lite)...")
|
| 274 |
-
model_resp = client.models.generate_content(model="gemini-2.5-flash-lite", contents=contents, config=cfg)
|
| 275 |
-
raw_text = model_resp.text or ""
|
| 276 |
log.info("Gemini raw response length: %d", len(raw_text))
|
| 277 |
|
| 278 |
parsed = None
|
| 279 |
try:
|
| 280 |
-
parsed = json.loads(raw_text)
|
| 281 |
except Exception as e:
|
| 282 |
log.warning("Could not parse Gemini JSON: %s", e)
|
| 283 |
parsed = None
|
|
@@ -315,39 +374,59 @@ def process_image():
|
|
| 315 |
log.info("Gemini returned no items or parse failed — using fallback contour crops.")
|
| 316 |
items_out = fallback_contour_crops(bgr_img, max_items=8)
|
| 317 |
|
| 318 |
-
#
|
| 319 |
if FIREBASE_ADMIN_JSON and FIREBASE_ADMIN_AVAILABLE:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 320 |
safe_uid = "".join(ch for ch in uid if ch.isalnum() or ch in ("-", "_")) or "anon"
|
| 321 |
for itm in items_out:
|
| 322 |
b64 = itm.get("thumbnail_b64")
|
| 323 |
if not b64:
|
| 324 |
continue
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 325 |
item_id = itm.get("id") or str(uuid.uuid4())
|
| 326 |
path = f"detected/{safe_uid}/{item_id}.jpg"
|
| 327 |
try:
|
| 328 |
-
|
| 329 |
"tmp": "true",
|
| 330 |
"session_id": session_id,
|
| 331 |
"uploaded_by": safe_uid,
|
| 332 |
-
"uploaded_at": str(int(time.time()))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 333 |
}
|
| 334 |
-
url = upload_b64_to_firebase(b64, path, content_type="image/jpeg", metadata=
|
| 335 |
itm["thumbnail_url"] = url
|
| 336 |
-
|
| 337 |
itm.pop("thumbnail_b64", None)
|
| 338 |
-
# add session marker to response item
|
| 339 |
itm["_session_id"] = session_id
|
| 340 |
log.debug("Auto-uploaded thumbnail for %s -> %s (session=%s)", item_id, url, session_id)
|
| 341 |
except Exception as up_e:
|
| 342 |
log.warning("Auto-upload failed for %s: %s", item_id, up_e)
|
| 343 |
-
#
|
| 344 |
else:
|
| 345 |
if not FIREBASE_ADMIN_JSON:
|
| 346 |
log.info("FIREBASE_ADMIN_JSON not set; skipping server-side thumbnail upload.")
|
| 347 |
else:
|
| 348 |
log.info("Firebase admin SDK not available; skipping server-side thumbnail upload.")
|
| 349 |
|
| 350 |
-
return jsonify({"ok": True, "items": items_out, "session_id": session_id, "debug": {"raw_model_text": raw_text[:1600]}}), 200
|
| 351 |
|
| 352 |
except Exception as ex:
|
| 353 |
log.exception("Processing error: %s", ex)
|
|
@@ -406,18 +485,15 @@ def finalize_detections():
|
|
| 406 |
blobs = list(bucket.list_blobs(prefix=prefix))
|
| 407 |
for blob in blobs:
|
| 408 |
try:
|
| 409 |
-
name = blob.name
|
| 410 |
fname = name.split("/")[-1]
|
| 411 |
-
# only accept files of form <id>.<ext>
|
| 412 |
if "." not in fname:
|
| 413 |
-
# skip unexpected filenames
|
| 414 |
continue
|
| 415 |
item_id = fname.rsplit(".", 1)[0]
|
| 416 |
|
| 417 |
md = blob.metadata or {}
|
| 418 |
-
# only consider temporary files
|
| 419 |
if str(md.get("session_id", "")) != session_id or str(md.get("tmp", "")).lower() not in ("true", "1", "yes"):
|
| 420 |
-
# skip (not part of this session / not temporary)
|
| 421 |
continue
|
| 422 |
|
| 423 |
if item_id in keep_ids:
|
|
@@ -427,9 +503,28 @@ def finalize_detections():
|
|
| 427 |
url = blob.public_url
|
| 428 |
except Exception:
|
| 429 |
url = f"gs://{bucket.name}/{name}"
|
| 430 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 431 |
else:
|
| 432 |
-
# delete unkept (safe: only temporary and session-matching files)
|
| 433 |
try:
|
| 434 |
blob.delete()
|
| 435 |
deleted.append(item_id)
|
|
@@ -442,6 +537,57 @@ def finalize_detections():
|
|
| 442 |
log.exception("finalize_detections error: %s", e)
|
| 443 |
return jsonify({"error": "internal", "detail": str(e)}), 500
|
| 444 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 445 |
if __name__ == "__main__":
|
| 446 |
port = int(os.getenv("PORT", 7860))
|
| 447 |
log.info("Starting server on 0.0.0.0:%d", port)
|
|
|
|
| 6 |
import logging
|
| 7 |
import uuid
|
| 8 |
import time
|
| 9 |
+
from typing import List, Dict, Any, Tuple, Optional
|
| 10 |
|
| 11 |
from flask import Flask, request, jsonify
|
| 12 |
from flask_cors import CORS
|
|
|
|
| 36 |
if not GEMINI_API_KEY:
|
| 37 |
log.warning("GEMINI_API_KEY not set — gemini calls will fail (but fallback still works).")
|
| 38 |
|
| 39 |
+
client = genai.Client(api_key=GEMINI_API_KEY) if GEMINI_API_KEY else None
|
| 40 |
|
| 41 |
# Firebase config (read service account JSON from env)
|
| 42 |
FIREBASE_ADMIN_JSON = os.getenv("FIREBASE_ADMIN_JSON", "").strip()
|
|
|
|
| 60 |
return None
|
| 61 |
if not FIREBASE_ADMIN_AVAILABLE:
|
| 62 |
raise RuntimeError("firebase-admin not installed (pip install firebase-admin)")
|
|
|
|
| 63 |
try:
|
| 64 |
sa_obj = json.loads(FIREBASE_ADMIN_JSON)
|
| 65 |
except Exception as e:
|
| 66 |
log.exception("Failed parsing FIREBASE_ADMIN_JSON: %s", e)
|
| 67 |
raise
|
|
|
|
|
|
|
| 68 |
bucket_name = FIREBASE_STORAGE_BUCKET or (sa_obj.get("project_id") and f"{sa_obj.get('project_id')}.appspot.com")
|
| 69 |
if not bucket_name:
|
| 70 |
raise RuntimeError("Could not determine storage bucket. Set FIREBASE_STORAGE_BUCKET or include project_id in service account JSON.")
|
|
|
|
| 71 |
try:
|
| 72 |
cred = fb_credentials.Certificate(sa_obj)
|
| 73 |
_firebase_app = firebase_admin.initialize_app(cred, {"storageBucket": bucket_name})
|
|
|
|
| 79 |
|
| 80 |
def upload_b64_to_firebase(base64_str: str, path: str, content_type="image/jpeg", metadata: dict = None) -> str:
|
| 81 |
"""
|
| 82 |
+
Upload base64 string to Firebase Storage at `path`.
|
| 83 |
Optionally attach metadata dict (custom metadata).
|
| 84 |
Returns a public URL when possible, otherwise returns gs://<bucket>/<path>.
|
| 85 |
"""
|
|
|
|
| 99 |
bucket = fb_storage.bucket()
|
| 100 |
blob = bucket.blob(path)
|
| 101 |
blob.upload_from_string(data, content_type=content_type)
|
| 102 |
+
# attach metadata if provided (values must be strings)
|
| 103 |
if metadata:
|
| 104 |
try:
|
| 105 |
+
blob.metadata = {k: (json.dumps(v) if not isinstance(v, str) else v) for k, v in metadata.items()}
|
|
|
|
| 106 |
blob.patch()
|
| 107 |
except Exception as me:
|
| 108 |
log.warning("Failed to patch metadata for %s: %s", path, me)
|
|
|
|
| 127 |
try:
|
| 128 |
img = ImageOps.exif_transpose(img)
|
| 129 |
except Exception:
|
|
|
|
| 130 |
pass
|
| 131 |
img = img.convert("RGB")
|
| 132 |
w, h = img.size
|
|
|
|
| 140 |
crop = bgr_img[y:y2, x:x2]
|
| 141 |
if crop.size == 0:
|
| 142 |
return ""
|
|
|
|
| 143 |
max_dim = max(crop.shape[0], crop.shape[1])
|
| 144 |
if max_dim > max_side:
|
| 145 |
scale = max_side / max_dim
|
|
|
|
| 178 |
"thumbnail_b64": b64,
|
| 179 |
"source": "fallback"
|
| 180 |
})
|
|
|
|
| 181 |
if not items:
|
| 182 |
h_half, w_half = h_img//2, w_img//2
|
| 183 |
rects = [
|
|
|
|
| 197 |
})
|
| 198 |
return items
|
| 199 |
|
| 200 |
+
# ---------- AI analysis helper ----------
|
| 201 |
+
def analyze_crop_with_gemini(jpeg_b64: str) -> Dict[str, Any]:
|
| 202 |
+
"""
|
| 203 |
+
Run Gemini on the cropped image bytes to extract:
|
| 204 |
+
type (one-word category like 'shoe', 'jacket', 'dress'),
|
| 205 |
+
summary (single-line description),
|
| 206 |
+
brand (string or empty),
|
| 207 |
+
tags (array of short descriptors)
|
| 208 |
+
Returns dict, falls back to empty/defaults on error or missing key.
|
| 209 |
+
"""
|
| 210 |
+
if not client:
|
| 211 |
+
return {"type": "unknown", "summary": "", "brand": "", "tags": []}
|
| 212 |
+
try:
|
| 213 |
+
# prepare prompt
|
| 214 |
+
prompt = (
|
| 215 |
+
"You are an assistant that identifies clothing item characteristics from an image. "
|
| 216 |
+
"Return only a JSON object with keys: type (single word like 'shoe','top','jacket'), "
|
| 217 |
+
"summary (a single short sentence, one line), brand (brand name if visible else empty string), "
|
| 218 |
+
"tags (an array of short single-word tags describing visible attributes, e.g. ['striped','leather','white']). "
|
| 219 |
+
"Keep values short and concise."
|
| 220 |
+
)
|
| 221 |
+
|
| 222 |
+
contents = [
|
| 223 |
+
types.Content(role="user", parts=[types.Part.from_text(text=prompt)])
|
| 224 |
+
]
|
| 225 |
+
|
| 226 |
+
# attach the image bytes
|
| 227 |
+
image_bytes = base64.b64decode(jpeg_b64)
|
| 228 |
+
contents.append(types.Content(role="user", parts=[types.Part.from_bytes(data=image_bytes, mime_type="image/jpeg")]))
|
| 229 |
+
|
| 230 |
+
schema = {
|
| 231 |
+
"type": "object",
|
| 232 |
+
"properties": {
|
| 233 |
+
"type": {"type": "string"},
|
| 234 |
+
"summary": {"type": "string"},
|
| 235 |
+
"brand": {"type": "string"},
|
| 236 |
+
"tags": {"type": "array", "items": {"type": "string"}}
|
| 237 |
+
},
|
| 238 |
+
"required": ["type", "summary"]
|
| 239 |
+
}
|
| 240 |
+
cfg = types.GenerateContentConfig(response_mime_type="application/json", response_schema=schema)
|
| 241 |
+
|
| 242 |
+
# call model (use the same model family you used before)
|
| 243 |
+
resp = client.models.generate_content(model="gemini-2.5-flash-lite", contents=contents, config=cfg)
|
| 244 |
+
text = resp.text or ""
|
| 245 |
+
parsed = {}
|
| 246 |
+
try:
|
| 247 |
+
parsed = json.loads(text)
|
| 248 |
+
# coerce expected shapes
|
| 249 |
+
parsed["type"] = str(parsed.get("type", "")).strip()
|
| 250 |
+
parsed["summary"] = str(parsed.get("summary", "")).strip()
|
| 251 |
+
parsed["brand"] = str(parsed.get("brand", "")).strip()
|
| 252 |
+
tags = parsed.get("tags", [])
|
| 253 |
+
if not isinstance(tags, list):
|
| 254 |
+
tags = []
|
| 255 |
+
parsed["tags"] = [str(t).strip() for t in tags if str(t).strip()]
|
| 256 |
+
except Exception as e:
|
| 257 |
+
log.warning("Failed parsing Gemini analysis JSON: %s — raw: %s", e, (text[:300] if text else ""))
|
| 258 |
+
parsed = {"type": "unknown", "summary": "", "brand": "", "tags": []}
|
| 259 |
+
return {
|
| 260 |
+
"type": parsed.get("type", "unknown") or "unknown",
|
| 261 |
+
"summary": parsed.get("summary", "") or "",
|
| 262 |
+
"brand": parsed.get("brand", "") or "",
|
| 263 |
+
"tags": parsed.get("tags", []) or []
|
| 264 |
+
}
|
| 265 |
+
except Exception as e:
|
| 266 |
+
log.exception("analyze_crop_with_gemini failure: %s", e)
|
| 267 |
+
return {"type": "unknown", "summary": "", "brand": "", "tags": []}
|
| 268 |
+
|
| 269 |
# ---------- Main / processing ----------
|
| 270 |
@app.route("/process", methods=["POST"])
|
| 271 |
def process_image():
|
|
|
|
| 273 |
return jsonify({"error": "missing photo"}), 400
|
| 274 |
file = request.files["photo"]
|
| 275 |
|
|
|
|
| 276 |
uid = (request.form.get("uid") or request.args.get("uid") or "anon").strip() or "anon"
|
| 277 |
|
| 278 |
try:
|
|
|
|
| 281 |
log.error("invalid image: %s", e)
|
| 282 |
return jsonify({"error": "invalid image"}), 400
|
| 283 |
|
|
|
|
| 284 |
session_id = str(uuid.uuid4())
|
| 285 |
|
| 286 |
+
# Detection prompt (same as before)
|
| 287 |
user_prompt = (
|
| 288 |
"You are an assistant that extracts clothing detections from a single image. "
|
| 289 |
"Return a JSON object with a single key 'items' which is an array. Each item must have: "
|
|
|
|
| 330 |
cfg = types.GenerateContentConfig(response_mime_type="application/json", response_schema=schema)
|
| 331 |
|
| 332 |
log.info("Calling Gemini model for detection (gemini-2.5-flash-lite)...")
|
| 333 |
+
model_resp = client.models.generate_content(model="gemini-2.5-flash-lite", contents=contents, config=cfg) if client else None
|
| 334 |
+
raw_text = (model_resp.text or "") if model_resp else ""
|
| 335 |
log.info("Gemini raw response length: %d", len(raw_text))
|
| 336 |
|
| 337 |
parsed = None
|
| 338 |
try:
|
| 339 |
+
parsed = json.loads(raw_text) if raw_text else None
|
| 340 |
except Exception as e:
|
| 341 |
log.warning("Could not parse Gemini JSON: %s", e)
|
| 342 |
parsed = None
|
|
|
|
| 374 |
log.info("Gemini returned no items or parse failed — using fallback contour crops.")
|
| 375 |
items_out = fallback_contour_crops(bgr_img, max_items=8)
|
| 376 |
|
| 377 |
+
# Perform AI analysis per crop (if possible) and auto-upload to firebase with metadata (tmp + session)
|
| 378 |
if FIREBASE_ADMIN_JSON and FIREBASE_ADMIN_AVAILABLE:
|
| 379 |
+
try:
|
| 380 |
+
init_firebase_admin_if_needed()
|
| 381 |
+
bucket = fb_storage.bucket()
|
| 382 |
+
except Exception as e:
|
| 383 |
+
log.exception("Firebase admin init for upload failed: %s", e)
|
| 384 |
+
bucket = None
|
| 385 |
+
|
| 386 |
safe_uid = "".join(ch for ch in uid if ch.isalnum() or ch in ("-", "_")) or "anon"
|
| 387 |
for itm in items_out:
|
| 388 |
b64 = itm.get("thumbnail_b64")
|
| 389 |
if not b64:
|
| 390 |
continue
|
| 391 |
+
# analyze
|
| 392 |
+
try:
|
| 393 |
+
analysis = analyze_crop_with_gemini(b64) if client else {"type":"unknown","summary":"","brand":"","tags":[]}
|
| 394 |
+
except Exception as ae:
|
| 395 |
+
log.warning("analysis failed: %s", ae)
|
| 396 |
+
analysis = {"type":"unknown","summary":"","brand":"","tags":[]}
|
| 397 |
+
|
| 398 |
+
itm["analysis"] = analysis
|
| 399 |
+
|
| 400 |
item_id = itm.get("id") or str(uuid.uuid4())
|
| 401 |
path = f"detected/{safe_uid}/{item_id}.jpg"
|
| 402 |
try:
|
| 403 |
+
metadata = {
|
| 404 |
"tmp": "true",
|
| 405 |
"session_id": session_id,
|
| 406 |
"uploaded_by": safe_uid,
|
| 407 |
+
"uploaded_at": str(int(time.time())),
|
| 408 |
+
# store AI fields as JSON strings for later inspection
|
| 409 |
+
"ai_type": analysis.get("type",""),
|
| 410 |
+
"ai_brand": analysis.get("brand",""),
|
| 411 |
+
"ai_summary": analysis.get("summary",""),
|
| 412 |
+
"ai_tags": json.dumps(analysis.get("tags", [])),
|
| 413 |
}
|
| 414 |
+
url = upload_b64_to_firebase(b64, path, content_type="image/jpeg", metadata=metadata)
|
| 415 |
itm["thumbnail_url"] = url
|
| 416 |
+
itm["thumbnail_path"] = path
|
| 417 |
itm.pop("thumbnail_b64", None)
|
|
|
|
| 418 |
itm["_session_id"] = session_id
|
| 419 |
log.debug("Auto-uploaded thumbnail for %s -> %s (session=%s)", item_id, url, session_id)
|
| 420 |
except Exception as up_e:
|
| 421 |
log.warning("Auto-upload failed for %s: %s", item_id, up_e)
|
| 422 |
+
# keep thumbnail_b64 and analysis for client fallback
|
| 423 |
else:
|
| 424 |
if not FIREBASE_ADMIN_JSON:
|
| 425 |
log.info("FIREBASE_ADMIN_JSON not set; skipping server-side thumbnail upload.")
|
| 426 |
else:
|
| 427 |
log.info("Firebase admin SDK not available; skipping server-side thumbnail upload.")
|
| 428 |
|
| 429 |
+
return jsonify({"ok": True, "items": items_out, "session_id": session_id, "debug": {"raw_model_text": (raw_text or "")[:1600]}}), 200
|
| 430 |
|
| 431 |
except Exception as ex:
|
| 432 |
log.exception("Processing error: %s", ex)
|
|
|
|
| 485 |
blobs = list(bucket.list_blobs(prefix=prefix))
|
| 486 |
for blob in blobs:
|
| 487 |
try:
|
| 488 |
+
name = blob.name
|
| 489 |
fname = name.split("/")[-1]
|
|
|
|
| 490 |
if "." not in fname:
|
|
|
|
| 491 |
continue
|
| 492 |
item_id = fname.rsplit(".", 1)[0]
|
| 493 |
|
| 494 |
md = blob.metadata or {}
|
| 495 |
+
# only consider temporary files matching this session id
|
| 496 |
if str(md.get("session_id", "")) != session_id or str(md.get("tmp", "")).lower() not in ("true", "1", "yes"):
|
|
|
|
| 497 |
continue
|
| 498 |
|
| 499 |
if item_id in keep_ids:
|
|
|
|
| 503 |
url = blob.public_url
|
| 504 |
except Exception:
|
| 505 |
url = f"gs://{bucket.name}/{name}"
|
| 506 |
+
|
| 507 |
+
# extract AI metadata (if present)
|
| 508 |
+
ai_type = md.get("ai_type") or ""
|
| 509 |
+
ai_brand = md.get("ai_brand") or ""
|
| 510 |
+
ai_summary = md.get("ai_summary") or ""
|
| 511 |
+
ai_tags_raw = md.get("ai_tags") or "[]"
|
| 512 |
+
try:
|
| 513 |
+
ai_tags = json.loads(ai_tags_raw) if isinstance(ai_tags_raw, str) else ai_tags_raw
|
| 514 |
+
except Exception:
|
| 515 |
+
ai_tags = []
|
| 516 |
+
kept.append({
|
| 517 |
+
"id": item_id,
|
| 518 |
+
"thumbnail_url": url,
|
| 519 |
+
"thumbnail_path": name,
|
| 520 |
+
"analysis": {
|
| 521 |
+
"type": ai_type,
|
| 522 |
+
"brand": ai_brand,
|
| 523 |
+
"summary": ai_summary,
|
| 524 |
+
"tags": ai_tags
|
| 525 |
+
}
|
| 526 |
+
})
|
| 527 |
else:
|
|
|
|
| 528 |
try:
|
| 529 |
blob.delete()
|
| 530 |
deleted.append(item_id)
|
|
|
|
| 537 |
log.exception("finalize_detections error: %s", e)
|
| 538 |
return jsonify({"error": "internal", "detail": str(e)}), 500
|
| 539 |
|
| 540 |
+
# ---------- Clear session: delete all temporary files for a session ----------
|
| 541 |
+
@app.route("/clear_session", methods=["POST"])
|
| 542 |
+
def clear_session():
|
| 543 |
+
"""
|
| 544 |
+
Body JSON: { "session_id": "<id>", "uid": "<optional uid>" }
|
| 545 |
+
Deletes all detected/<uid>/* blobs where metadata.session_id == session_id and metadata.tmp == "true".
|
| 546 |
+
"""
|
| 547 |
+
try:
|
| 548 |
+
body = request.get_json(force=True)
|
| 549 |
+
except Exception:
|
| 550 |
+
return jsonify({"error": "invalid json"}), 400
|
| 551 |
+
|
| 552 |
+
session_id = (body.get("session_id") or request.args.get("session_id") or "").strip()
|
| 553 |
+
uid = (body.get("uid") or request.args.get("uid") or "anon").strip() or "anon"
|
| 554 |
+
|
| 555 |
+
if not session_id:
|
| 556 |
+
return jsonify({"error": "session_id required"}), 400
|
| 557 |
+
|
| 558 |
+
if not FIREBASE_ADMIN_JSON or not FIREBASE_ADMIN_AVAILABLE:
|
| 559 |
+
return jsonify({"error": "firebase admin not configured"}), 500
|
| 560 |
+
|
| 561 |
+
try:
|
| 562 |
+
init_firebase_admin_if_needed()
|
| 563 |
+
bucket = fb_storage.bucket()
|
| 564 |
+
except Exception as e:
|
| 565 |
+
log.exception("Firebase init error in clear_session: %s", e)
|
| 566 |
+
return jsonify({"error": "firebase admin init failed", "detail": str(e)}), 500
|
| 567 |
+
|
| 568 |
+
safe_uid = "".join(ch for ch in uid if ch.isalnum() or ch in ("-", "_")) or "anon"
|
| 569 |
+
prefix = f"detected/{safe_uid}/"
|
| 570 |
+
|
| 571 |
+
deleted = []
|
| 572 |
+
errors = []
|
| 573 |
+
try:
|
| 574 |
+
blobs = list(bucket.list_blobs(prefix=prefix))
|
| 575 |
+
for blob in blobs:
|
| 576 |
+
try:
|
| 577 |
+
md = blob.metadata or {}
|
| 578 |
+
if str(md.get("session_id", "")) == session_id and str(md.get("tmp", "")).lower() in ("true", "1", "yes"):
|
| 579 |
+
try:
|
| 580 |
+
blob.delete()
|
| 581 |
+
deleted.append(blob.name.split("/")[-1].rsplit(".", 1)[0])
|
| 582 |
+
except Exception as de:
|
| 583 |
+
errors.append({"blob": blob.name, "error": str(de)})
|
| 584 |
+
except Exception as e:
|
| 585 |
+
errors.append({"blob": getattr(blob, "name", None), "error": str(e)})
|
| 586 |
+
return jsonify({"ok": True, "deleted": deleted, "errors": errors}), 200
|
| 587 |
+
except Exception as e:
|
| 588 |
+
log.exception("clear_session error: %s", e)
|
| 589 |
+
return jsonify({"error": "internal", "detail": str(e)}), 500
|
| 590 |
+
|
| 591 |
if __name__ == "__main__":
|
| 592 |
port = int(os.getenv("PORT", 7860))
|
| 593 |
log.info("Starting server on 0.0.0.0:%d", port)
|