Spaces:
Running
Running
Update main.py
Browse files
main.py
CHANGED
|
@@ -1,1563 +1,1027 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
import os
|
| 2 |
-
import io
|
| 3 |
import re
|
| 4 |
import json
|
| 5 |
-
import uuid
|
| 6 |
import time
|
| 7 |
-
import
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
from flask import Flask, request, jsonify, Response
|
| 11 |
-
from flask_cors import CORS
|
| 12 |
-
|
| 13 |
-
import firebase_admin
|
| 14 |
-
from firebase_admin import credentials, db, storage, auth
|
| 15 |
-
|
| 16 |
-
from PIL import Image
|
| 17 |
-
import requests
|
| 18 |
-
|
| 19 |
-
# Google GenAI (Gemini)
|
| 20 |
-
from google import genai
|
| 21 |
-
from google.genai import types
|
| 22 |
-
|
| 23 |
import logging
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
# -----------------------------------------------------------------------------
|
| 28 |
-
# 1. CONFIGURATION & INITIALIZATION
|
| 29 |
-
# -----------------------------------------------------------------------------
|
| 30 |
-
|
| 31 |
-
app = Flask(__name__)
|
| 32 |
-
CORS(app)
|
| 33 |
-
|
| 34 |
-
# --- Firebase Initialization ---
|
| 35 |
-
try:
|
| 36 |
-
credentials_json_string = os.environ.get("FIREBASE")
|
| 37 |
-
if not credentials_json_string:
|
| 38 |
-
raise ValueError("The FIREBASE environment variable is not set.")
|
| 39 |
-
|
| 40 |
-
credentials_json = json.loads(credentials_json_string)
|
| 41 |
-
firebase_db_url = os.environ.get("Firebase_DB")
|
| 42 |
-
firebase_storage_bucket = os.environ.get("Firebase_Storage")
|
| 43 |
|
| 44 |
-
|
| 45 |
-
|
| 46 |
-
|
| 47 |
-
|
| 48 |
-
firebase_admin.initialize_app(cred, {
|
| 49 |
-
"databaseURL": firebase_db_url,
|
| 50 |
-
"storageBucket": firebase_storage_bucket
|
| 51 |
-
})
|
| 52 |
-
|
| 53 |
-
logger.info("Firebase Admin SDK initialized successfully.")
|
| 54 |
-
except Exception as e:
|
| 55 |
-
logger.error(f"FATAL: Error initializing Firebase: {e}")
|
| 56 |
-
raise
|
| 57 |
|
| 58 |
-
|
| 59 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 60 |
|
| 61 |
-
#
|
|
|
|
| 62 |
try:
|
| 63 |
-
|
| 64 |
-
if not api_key:
|
| 65 |
-
raise ValueError("The 'Gemini' environment variable is not set.")
|
| 66 |
-
client = genai.Client(api_key=api_key)
|
| 67 |
-
logger.info("Google GenAI Client initialized successfully.")
|
| 68 |
except Exception as e:
|
| 69 |
-
|
| 70 |
-
|
| 71 |
-
|
| 72 |
-
# --- Model Constants ---
|
| 73 |
-
VISION_MODEL = "gemini-2.5-flash" # Vision + text
|
| 74 |
-
TEXT_MODEL = "gemini-2.5-flash" # text-only tasks
|
| 75 |
-
|
| 76 |
-
# -----------------------------------------------------------------------------
|
| 77 |
-
# 2. HELPER FUNCTIONS
|
| 78 |
-
# -----------------------------------------------------------------------------
|
| 79 |
|
| 80 |
-
|
| 81 |
-
|
| 82 |
|
| 83 |
-
|
| 84 |
-
|
| 85 |
-
if not auth_header or not auth_header.startswith("Bearer "):
|
| 86 |
-
return None
|
| 87 |
-
token = auth_header.split("Bearer ")[1]
|
| 88 |
try:
|
| 89 |
-
|
| 90 |
-
|
| 91 |
except Exception as e:
|
| 92 |
-
logger.
|
| 93 |
-
return None
|
| 94 |
|
| 95 |
-
|
| 96 |
-
|
| 97 |
-
|
| 98 |
-
|
| 99 |
-
raise PermissionError("Invalid or missing user token")
|
| 100 |
-
|
| 101 |
-
user = db_ref.child(f"users/{uid}").get() or {}
|
| 102 |
-
if not user.get("is_admin", False):
|
| 103 |
-
raise PermissionError("Admin access required")
|
| 104 |
-
return uid
|
| 105 |
-
|
| 106 |
-
def upload_to_storage(data_bytes, destination_blob_name, content_type):
|
| 107 |
-
"""Uploads bytes to Firebase Storage and returns its public URL."""
|
| 108 |
-
blob = bucket.blob(destination_blob_name)
|
| 109 |
-
blob.upload_from_string(data_bytes, content_type=content_type)
|
| 110 |
-
blob.make_public()
|
| 111 |
-
return blob.public_url
|
| 112 |
-
|
| 113 |
-
def safe_float(x, default=None):
|
| 114 |
-
try:
|
| 115 |
-
return float(x)
|
| 116 |
-
except Exception:
|
| 117 |
-
return default
|
| 118 |
|
| 119 |
-
|
| 120 |
-
try:
|
| 121 |
-
return int(x)
|
| 122 |
-
except Exception:
|
| 123 |
-
return default
|
| 124 |
|
| 125 |
-
def
|
| 126 |
-
|
|
|
|
| 127 |
|
| 128 |
-
|
| 129 |
-
|
| 130 |
-
Helper: if image is provided, send [prompt, image].
|
| 131 |
-
If no image, send prompt only.
|
| 132 |
-
Returns response text or None.
|
| 133 |
-
"""
|
| 134 |
-
try:
|
| 135 |
-
chat = client.chats.create(model=model_name)
|
| 136 |
-
if image is None:
|
| 137 |
-
resp = chat.send_message([prompt])
|
| 138 |
-
else:
|
| 139 |
-
resp = chat.send_message([prompt, image])
|
| 140 |
|
| 141 |
-
|
| 142 |
-
|
| 143 |
-
|
| 144 |
-
|
| 145 |
-
return text_out.strip() if text_out else None
|
| 146 |
-
except Exception as e:
|
| 147 |
-
logger.error(f"Error with model {model_name}: {e}")
|
| 148 |
-
return None
|
| 149 |
|
| 150 |
-
|
| 151 |
-
"""
|
| 152 |
-
Robust-ish JSON extraction:
|
| 153 |
-
- Try direct JSON parse
|
| 154 |
-
- Else find first {...} block and parse that
|
| 155 |
-
"""
|
| 156 |
-
if not text:
|
| 157 |
-
return None
|
| 158 |
-
text = text.strip()
|
| 159 |
|
| 160 |
-
|
| 161 |
-
|
| 162 |
-
|
| 163 |
-
except Exception:
|
| 164 |
-
pass
|
| 165 |
|
| 166 |
-
|
| 167 |
-
|
| 168 |
-
|
| 169 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 170 |
try:
|
| 171 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 172 |
except Exception:
|
| 173 |
return None
|
| 174 |
|
| 175 |
-
def
|
| 176 |
-
""
|
| 177 |
-
|
| 178 |
-
|
| 179 |
-
|
| 180 |
-
user_data = db_ref.child(f"users/{uid}").get()
|
| 181 |
-
if not user_data:
|
| 182 |
-
raise PermissionError(
|
| 183 |
-
f"User profile missing in RTDB at /users/{uid}. "
|
| 184 |
-
f"Call /api/auth/social-signin (or /api/auth/signup) once after login to bootstrap the profile."
|
| 185 |
-
)
|
| 186 |
-
|
| 187 |
-
role = (user_data.get("role") or "").lower().strip()
|
| 188 |
-
if role not in allowed_roles:
|
| 189 |
-
raise PermissionError(f"Role '{role}' not allowed. Allowed roles: {allowed_roles}")
|
| 190 |
-
|
| 191 |
-
return user_data
|
| 192 |
-
|
| 193 |
-
|
| 194 |
-
def get_or_create_profile(uid: str) -> dict:
|
| 195 |
-
"""
|
| 196 |
-
Ensures /users/{uid} exists in RTDB for any authenticated user.
|
| 197 |
-
- If missing, bootstraps from Firebase Auth and defaults role to 'customer'.
|
| 198 |
-
"""
|
| 199 |
-
ref = db_ref.child(f"users/{uid}")
|
| 200 |
-
user_data = ref.get()
|
| 201 |
-
if user_data:
|
| 202 |
-
return user_data
|
| 203 |
-
|
| 204 |
-
fb_user = auth.get_user(uid)
|
| 205 |
-
new_user_data = {
|
| 206 |
-
"email": fb_user.email or "",
|
| 207 |
-
"displayName": fb_user.display_name or "",
|
| 208 |
-
"phone": "",
|
| 209 |
-
"city": "",
|
| 210 |
-
"role": "customer", # safe default so task posting works out-of-the-box
|
| 211 |
-
"is_admin": False,
|
| 212 |
-
"createdAt": now_iso()
|
| 213 |
-
}
|
| 214 |
-
ref.set(new_user_data)
|
| 215 |
-
return new_user_data
|
| 216 |
-
|
| 217 |
-
|
| 218 |
-
def push_notification(to_uid: str, notif_type: str, title: str, body: str, meta: dict | None = None):
|
| 219 |
-
"""
|
| 220 |
-
In-app notification stored in RTDB:
|
| 221 |
-
/notifications/{uid}/{notifId}
|
| 222 |
-
"""
|
| 223 |
-
notif_id = str(uuid.uuid4())
|
| 224 |
-
payload = {
|
| 225 |
-
"notifId": notif_id,
|
| 226 |
-
"type": notif_type,
|
| 227 |
-
"title": title,
|
| 228 |
-
"body": body,
|
| 229 |
-
"meta": meta or {},
|
| 230 |
-
"createdAt": now_iso(),
|
| 231 |
-
"read": False
|
| 232 |
-
}
|
| 233 |
-
db_ref.child(f"notifications/{to_uid}/{notif_id}").set(payload)
|
| 234 |
-
return payload
|
| 235 |
-
|
| 236 |
-
def task_access_check(uid: str, task: dict, user_role: str):
|
| 237 |
-
"""
|
| 238 |
-
Role-aware access:
|
| 239 |
-
- customer can access own tasks
|
| 240 |
-
- tasker can access open tasks + tasks they are assigned to + tasks they bid on
|
| 241 |
-
- admin can access all
|
| 242 |
-
"""
|
| 243 |
-
if user_role == "admin":
|
| 244 |
-
return True
|
| 245 |
-
|
| 246 |
-
owner = task.get("createdBy")
|
| 247 |
-
assigned = task.get("assignedTaskerId")
|
| 248 |
-
if user_role == "customer":
|
| 249 |
-
return owner == uid
|
| 250 |
-
|
| 251 |
-
if user_role == "tasker":
|
| 252 |
-
if task.get("status") in ["open", "bidding"] and owner != uid:
|
| 253 |
-
return True
|
| 254 |
-
if assigned == uid:
|
| 255 |
-
return True
|
| 256 |
-
# bid check
|
| 257 |
-
bids = db_ref.child(f"bids/{task.get('taskId')}").get() or {}
|
| 258 |
-
for b in bids.values():
|
| 259 |
-
if b.get("taskerId") == uid:
|
| 260 |
-
return True
|
| 261 |
-
return False
|
| 262 |
-
|
| 263 |
-
return False
|
| 264 |
-
|
| 265 |
-
# -----------------------------------------------------------------------------
|
| 266 |
-
# 3. BASIC HEALTH
|
| 267 |
-
# -----------------------------------------------------------------------------
|
| 268 |
-
|
| 269 |
-
@app.route("/api/health", methods=["GET"])
|
| 270 |
-
def health():
|
| 271 |
-
return jsonify({"ok": True, "service": "oneplus-server", "time": now_iso()}), 200
|
| 272 |
-
|
| 273 |
-
# -----------------------------------------------------------------------------
|
| 274 |
-
# 4. AUTH & USER PROFILES (MVP)
|
| 275 |
-
# -----------------------------------------------------------------------------
|
| 276 |
-
|
| 277 |
-
@app.route("/api/auth/signup", methods=["POST"])
|
| 278 |
-
def signup():
|
| 279 |
-
"""
|
| 280 |
-
Email + password signup.
|
| 281 |
-
Creates Firebase Auth user + RTDB user profile.
|
| 282 |
-
"""
|
| 283 |
-
try:
|
| 284 |
-
data = request.get_json() or {}
|
| 285 |
-
email = data.get("email")
|
| 286 |
-
password = data.get("password")
|
| 287 |
-
display_name = data.get("displayName")
|
| 288 |
-
phone = data.get("phone")
|
| 289 |
-
city = data.get("city")
|
| 290 |
-
role = (data.get("role") or "customer").lower().strip() # customer|tasker|admin(not allowed here)
|
| 291 |
-
|
| 292 |
-
if role not in ["customer", "tasker"]:
|
| 293 |
-
return jsonify({"error": "Invalid role. Use customer or tasker."}), 400
|
| 294 |
-
|
| 295 |
-
if not email or not password:
|
| 296 |
-
return jsonify({"error": "Email and password are required"}), 400
|
| 297 |
-
|
| 298 |
-
user = auth.create_user(email=email, password=password, display_name=display_name)
|
| 299 |
-
|
| 300 |
-
user_data = {
|
| 301 |
-
"email": email,
|
| 302 |
-
"displayName": display_name,
|
| 303 |
-
"phone": phone,
|
| 304 |
-
"city": city,
|
| 305 |
-
"role": role,
|
| 306 |
-
"is_admin": False,
|
| 307 |
-
"createdAt": now_iso()
|
| 308 |
-
}
|
| 309 |
-
db_ref.child(f"users/{user.uid}").set(user_data)
|
| 310 |
-
return jsonify({"success": True, "uid": user.uid, **user_data}), 201
|
| 311 |
-
|
| 312 |
-
except Exception as e:
|
| 313 |
-
logger.error(f"Signup failed: {e}")
|
| 314 |
-
if "EMAIL_EXISTS" in str(e):
|
| 315 |
-
return jsonify({"error": "An account with this email already exists."}), 409
|
| 316 |
-
return jsonify({"error": str(e)}), 400
|
| 317 |
-
|
| 318 |
-
@app.route("/api/auth/social-signin", methods=["POST"])
|
| 319 |
-
def social_signin():
|
| 320 |
-
"""
|
| 321 |
-
Ensures RTDB user record exists. Social login happens on client,
|
| 322 |
-
we just bootstrap profile.
|
| 323 |
-
"""
|
| 324 |
-
uid = verify_token(request.headers.get("Authorization"))
|
| 325 |
-
if not uid:
|
| 326 |
-
return jsonify({"error": "Invalid or expired token"}), 401
|
| 327 |
-
|
| 328 |
-
user_ref = db_ref.child(f"users/{uid}")
|
| 329 |
-
user_data = user_ref.get()
|
| 330 |
|
|
|
|
| 331 |
try:
|
| 332 |
-
|
| 333 |
-
|
| 334 |
-
|
| 335 |
-
|
| 336 |
-
|
| 337 |
-
|
| 338 |
-
|
| 339 |
-
|
| 340 |
-
|
| 341 |
-
|
| 342 |
-
|
| 343 |
-
|
| 344 |
-
|
| 345 |
-
|
| 346 |
-
|
| 347 |
-
|
| 348 |
-
|
| 349 |
-
|
| 350 |
-
|
| 351 |
-
|
| 352 |
-
|
| 353 |
-
|
| 354 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 355 |
}
|
| 356 |
-
|
| 357 |
-
|
| 358 |
-
|
| 359 |
-
|
| 360 |
-
|
| 361 |
-
|
| 362 |
-
|
| 363 |
-
|
| 364 |
-
|
| 365 |
-
|
| 366 |
-
|
| 367 |
-
|
| 368 |
-
|
| 369 |
-
|
| 370 |
-
- role missing/empty -> set role (customer|tasker)
|
| 371 |
-
- customer -> tasker (one-way upgrade ONCE)
|
| 372 |
-
- tasker -> customer (BLOCK)
|
| 373 |
-
- same role -> idempotent 200
|
| 374 |
-
|
| 375 |
-
Responses:
|
| 376 |
-
- 401 invalid token
|
| 377 |
-
- 400 invalid role
|
| 378 |
-
- 409 blocked role change
|
| 379 |
-
- 200 success (profile returned)
|
| 380 |
-
"""
|
| 381 |
-
uid = verify_token(request.headers.get("Authorization"))
|
| 382 |
-
if not uid:
|
| 383 |
-
return jsonify({"error": "Invalid or expired token"}), 401
|
| 384 |
-
|
| 385 |
-
data = request.get_json() or {}
|
| 386 |
-
requested_role = (data.get("role") or "").lower().strip()
|
| 387 |
-
|
| 388 |
-
if requested_role not in ["customer", "tasker"]:
|
| 389 |
-
return jsonify({"error": "Invalid role. Use customer or tasker."}), 400
|
| 390 |
-
|
| 391 |
-
try:
|
| 392 |
-
user_ref = db_ref.child(f"users/{uid}")
|
| 393 |
-
user_data = user_ref.get()
|
| 394 |
-
|
| 395 |
-
# Bootstrap if missing
|
| 396 |
-
if not user_data:
|
| 397 |
-
fb_user = auth.get_user(uid)
|
| 398 |
-
user_data = {
|
| 399 |
-
"email": fb_user.email or "",
|
| 400 |
-
"displayName": fb_user.display_name or "",
|
| 401 |
-
"phone": "",
|
| 402 |
-
"city": "",
|
| 403 |
-
"role": "", # intentionally empty so user must choose
|
| 404 |
-
"is_admin": False,
|
| 405 |
-
"createdAt": now_iso(),
|
| 406 |
-
}
|
| 407 |
-
user_ref.set(user_data)
|
| 408 |
-
|
| 409 |
-
current_role = (user_data.get("role") or "").lower().strip()
|
| 410 |
-
|
| 411 |
-
# Idempotent: already same role
|
| 412 |
-
if current_role and current_role == requested_role:
|
| 413 |
-
updated = user_ref.get() or {}
|
| 414 |
-
return jsonify({"success": True, "uid": uid, "profile": updated, "note": "role unchanged"}), 200
|
| 415 |
-
|
| 416 |
-
# If role is empty/missing -> allow setting
|
| 417 |
-
if not current_role:
|
| 418 |
-
patch = {
|
| 419 |
-
"role": requested_role,
|
| 420 |
-
"roleSetAt": now_iso(),
|
| 421 |
-
"updatedAt": now_iso(),
|
| 422 |
-
}
|
| 423 |
-
user_ref.update(patch)
|
| 424 |
-
updated = user_ref.get() or {}
|
| 425 |
-
return jsonify({"success": True, "uid": uid, "profile": updated}), 200
|
| 426 |
-
|
| 427 |
-
# One-way upgrade: customer -> tasker (allow ONCE)
|
| 428 |
-
if current_role == "customer" and requested_role == "tasker":
|
| 429 |
-
# If you want to allow this only once, the existence of roleUpgradedAt is enough
|
| 430 |
-
if (user_data.get("roleUpgradedAt") or "").strip():
|
| 431 |
-
return jsonify({
|
| 432 |
-
"error": "Role change blocked",
|
| 433 |
-
"reason": "Customer -> Tasker upgrade already used. Role flipping is not allowed.",
|
| 434 |
-
"currentRole": current_role,
|
| 435 |
-
"requestedRole": requested_role
|
| 436 |
-
}), 409
|
| 437 |
-
|
| 438 |
-
patch = {
|
| 439 |
-
"role": "tasker",
|
| 440 |
-
"roleUpgradedAt": now_iso(),
|
| 441 |
-
"updatedAt": now_iso(),
|
| 442 |
-
}
|
| 443 |
-
user_ref.update(patch)
|
| 444 |
-
updated = user_ref.get() or {}
|
| 445 |
-
return jsonify({"success": True, "uid": uid, "profile": updated, "note": "upgraded customer -> tasker"}), 200
|
| 446 |
-
|
| 447 |
-
# Block any other change (tasker->customer or any flip)
|
| 448 |
-
return jsonify({
|
| 449 |
-
"error": "Role change blocked",
|
| 450 |
-
"reason": "Role flipping is not allowed.",
|
| 451 |
-
"currentRole": current_role,
|
| 452 |
-
"requestedRole": requested_role
|
| 453 |
-
}), 409
|
| 454 |
-
|
| 455 |
-
except Exception as e:
|
| 456 |
-
logger.error(f"[SET ROLE] failed: {e}")
|
| 457 |
-
logger.error(traceback.format_exc())
|
| 458 |
-
return jsonify({"error": "Internal server error"}), 500
|
| 459 |
-
|
| 460 |
-
@app.route("/api/user/profile", methods=["GET"])
|
| 461 |
-
def get_user_profile():
|
| 462 |
-
uid = verify_token(request.headers.get("Authorization"))
|
| 463 |
-
if not uid:
|
| 464 |
-
return jsonify({"error": "Invalid or expired token"}), 401
|
| 465 |
-
|
| 466 |
-
# ---- NEW: auto-bootstrap profile so profile fetch never mysteriously 404s
|
| 467 |
-
try:
|
| 468 |
-
user_data = get_or_create_profile(uid)
|
| 469 |
-
return jsonify({"uid": uid, **user_data}), 200
|
| 470 |
-
except Exception as e:
|
| 471 |
-
logger.error(f"get_user_profile failed: {e}")
|
| 472 |
-
return jsonify({"error": "Failed to load profile"}), 500
|
| 473 |
-
|
| 474 |
-
|
| 475 |
-
@app.route("/api/user/profile", methods=["PUT"])
|
| 476 |
-
def update_user_profile():
|
| 477 |
-
uid = verify_token(request.headers.get("Authorization"))
|
| 478 |
-
if not uid:
|
| 479 |
-
return jsonify({"error": "Invalid or expired token"}), 401
|
| 480 |
-
|
| 481 |
-
# ---- NEW: ensure profile exists before update
|
| 482 |
-
try:
|
| 483 |
-
_ = get_or_create_profile(uid)
|
| 484 |
-
except Exception as e:
|
| 485 |
-
logger.error(f"update_user_profile bootstrap failed: {e}")
|
| 486 |
-
return jsonify({"error": "Failed to bootstrap profile"}), 500
|
| 487 |
-
|
| 488 |
-
data = request.get_json() or {}
|
| 489 |
-
allowed = {}
|
| 490 |
-
|
| 491 |
-
# Common fields
|
| 492 |
-
for key in ["displayName", "phone", "city"]:
|
| 493 |
-
if key in data:
|
| 494 |
-
allowed[key] = data.get(key)
|
| 495 |
-
|
| 496 |
-
# Role-specific (tasker)
|
| 497 |
-
for key in ["skills", "categories", "bio", "serviceRadiusKm", "baseRate", "profilePhotoUrl", "availability"]:
|
| 498 |
-
if key in data:
|
| 499 |
-
allowed[key] = data.get(key)
|
| 500 |
-
|
| 501 |
-
# ---- NEW: allow role updates ONLY if explicitly permitted (optional safeguard)
|
| 502 |
-
# If you don't want clients to ever change role, leave this out entirely.
|
| 503 |
-
# if "role" in data:
|
| 504 |
-
# return jsonify({"error": "Role cannot be updated from client"}), 400
|
| 505 |
-
|
| 506 |
-
if not allowed:
|
| 507 |
-
return jsonify({"error": "No valid fields provided"}), 400
|
| 508 |
-
|
| 509 |
-
try:
|
| 510 |
-
# If displayName changes, also update Auth profile
|
| 511 |
-
if "displayName" in allowed and allowed["displayName"]:
|
| 512 |
-
auth.update_user(uid, display_name=str(allowed["displayName"]))
|
| 513 |
-
|
| 514 |
-
db_ref.child(f"users/{uid}").update(allowed)
|
| 515 |
-
return jsonify({"success": True, "updated": allowed}), 200
|
| 516 |
-
except Exception as e:
|
| 517 |
-
logger.error(f"update_user_profile failed: {e}")
|
| 518 |
-
return jsonify({"error": f"Failed to update profile: {str(e)}"}), 500
|
| 519 |
-
|
| 520 |
-
# -----------------------------------------------------------------------------
|
| 521 |
-
# 5. AI (CUSTOMER) — SMART CAPTURE (MVP Critical)
|
| 522 |
-
# -----------------------------------------------------------------------------
|
| 523 |
-
|
| 524 |
-
@app.route("/api/ai/smart-capture", methods=["POST"])
|
| 525 |
-
def smart_capture():
|
| 526 |
-
"""
|
| 527 |
-
Customer uploads image (or video thumbnail) + optional context text.
|
| 528 |
-
Server sends to Gemini Vision and returns structured output for prefill:
|
| 529 |
-
- category
|
| 530 |
-
- problemSummary
|
| 531 |
-
- difficulty
|
| 532 |
-
- timeEstimate
|
| 533 |
-
- priceBand
|
| 534 |
-
- suggestedMaterials[]
|
| 535 |
-
- suggestedTitle
|
| 536 |
-
- suggestedDescription
|
| 537 |
-
- suggestedBudgetRange
|
| 538 |
-
"""
|
| 539 |
-
uid = verify_token(request.headers.get("Authorization"))
|
| 540 |
-
if not uid:
|
| 541 |
-
return jsonify({"error": "Unauthorized"}), 401
|
| 542 |
-
|
| 543 |
-
try:
|
| 544 |
-
# Accept multipart like SozoFix
|
| 545 |
-
if "image" not in request.files:
|
| 546 |
-
return jsonify({"error": "Image file is required (field name: image)"}), 400
|
| 547 |
-
|
| 548 |
-
image_file = request.files["image"]
|
| 549 |
-
context_text = request.form.get("contextText", "") or ""
|
| 550 |
-
|
| 551 |
-
image_bytes = image_file.read()
|
| 552 |
-
pil_image = Image.open(io.BytesIO(image_bytes)).convert("RGB")
|
| 553 |
-
|
| 554 |
-
prompt = f"""
|
| 555 |
-
You are One Plus Smart Capture (task diagnosis).
|
| 556 |
-
Analyze the image + optional context and return ONLY valid JSON.
|
| 557 |
-
|
| 558 |
-
Context (may be empty): "{context_text}"
|
| 559 |
-
|
| 560 |
-
Return this schema:
|
| 561 |
-
{{
|
| 562 |
-
"category": "plumbing|electrical|cleaning|moving|handyman|painting|gardening|appliance_repair|other",
|
| 563 |
-
"problemSummary": "plain language summary (1-2 sentences)",
|
| 564 |
-
"difficulty": "easy|moderate|complex",
|
| 565 |
-
"timeEstimate": "e.g. 30-60 minutes, 1-2 hours, 1 day",
|
| 566 |
-
"priceBand": "low|medium|high",
|
| 567 |
-
"suggestedBudgetRange": "e.g. $20-$40 (rough)",
|
| 568 |
-
"suggestedMaterials": ["..."],
|
| 569 |
-
"suggestedTitle": "short task title",
|
| 570 |
-
"suggestedDescription": "professional task description, include key constraints + what to check"
|
| 571 |
-
}}
|
| 572 |
-
|
| 573 |
-
Rules:
|
| 574 |
-
- Be realistic and safe.
|
| 575 |
-
- If unsure, pick "other" category and state uncertainty in problemSummary.
|
| 576 |
-
- Output must be JSON only (no markdown).
|
| 577 |
-
"""
|
| 578 |
-
raw = send_text_request(VISION_MODEL, prompt, pil_image)
|
| 579 |
-
result = extract_json_from_text(raw)
|
| 580 |
-
|
| 581 |
-
if not result:
|
| 582 |
-
logger.error(f"[SMART CAPTURE] Could not parse JSON. Raw: {raw}")
|
| 583 |
-
return jsonify({"error": "AI response format error"}), 500
|
| 584 |
-
|
| 585 |
-
# minimal cleanup defaults
|
| 586 |
-
result["category"] = (result.get("category") or "other").strip()
|
| 587 |
-
result["difficulty"] = (result.get("difficulty") or "moderate").strip()
|
| 588 |
-
result["priceBand"] = (result.get("priceBand") or "medium").strip()
|
| 589 |
-
if not isinstance(result.get("suggestedMaterials"), list):
|
| 590 |
-
result["suggestedMaterials"] = []
|
| 591 |
-
|
| 592 |
-
# Store last smart capture on user (handy for UI)
|
| 593 |
-
db_ref.child(f"users/{uid}/lastSmartCapture").set({
|
| 594 |
-
"createdAt": now_iso(),
|
| 595 |
-
"contextText": context_text,
|
| 596 |
-
"result": result
|
| 597 |
-
})
|
| 598 |
-
|
| 599 |
-
return jsonify({"success": True, "smartCapture": result}), 200
|
| 600 |
|
| 601 |
-
|
| 602 |
-
|
| 603 |
-
|
| 604 |
-
|
|
|
|
|
|
|
|
|
|
| 605 |
|
| 606 |
-
|
| 607 |
-
|
|
|
|
|
|
|
| 608 |
"""
|
| 609 |
-
|
| 610 |
-
|
|
|
|
|
|
|
|
|
|
| 611 |
"""
|
| 612 |
-
|
| 613 |
-
|
| 614 |
-
|
| 615 |
-
|
| 616 |
-
|
| 617 |
-
|
| 618 |
-
|
| 619 |
-
|
| 620 |
-
|
| 621 |
-
|
| 622 |
-
|
| 623 |
-
|
| 624 |
-
|
| 625 |
-
|
| 626 |
-
|
| 627 |
-
|
| 628 |
-
"{short_desc}"
|
| 629 |
-
|
| 630 |
-
Return ONLY JSON:
|
| 631 |
-
{{
|
| 632 |
-
"suggestedTitle": "...",
|
| 633 |
-
"suggestedDescription": "...",
|
| 634 |
-
"questionsForTasker": ["...", "...", "..."]
|
| 635 |
-
}}
|
| 636 |
-
JSON only, no markdown.
|
| 637 |
-
"""
|
| 638 |
-
raw = send_text_request(TEXT_MODEL, prompt, None)
|
| 639 |
-
result = extract_json_from_text(raw)
|
| 640 |
-
if not result:
|
| 641 |
-
return jsonify({"error": "AI response format error"}), 500
|
| 642 |
-
if not isinstance(result.get("questionsForTasker"), list):
|
| 643 |
-
result["questionsForTasker"] = []
|
| 644 |
-
return jsonify({"success": True, "result": result}), 200
|
| 645 |
-
|
| 646 |
-
# -----------------------------------------------------------------------------
|
| 647 |
-
# 6. TASKS (CUSTOMER POSTS, TASKER BROWSES) + MEDIA UPLOAD (MVP)
|
| 648 |
-
# -----------------------------------------------------------------------------
|
| 649 |
-
|
| 650 |
-
@app.route("/api/tasks", methods=["POST"])
|
| 651 |
-
def create_task():
|
| 652 |
-
"""
|
| 653 |
-
Customer creates a task.
|
| 654 |
-
Upload media like SozoFix:
|
| 655 |
-
- multipart/form-data
|
| 656 |
-
- fields: category, title, description, city, address(optional), budget, scheduleAt(optional ISO), contextText(optional)
|
| 657 |
-
- file fields: media (can send multiple) OR image (single)
|
| 658 |
-
"""
|
| 659 |
-
uid = verify_token(request.headers.get("Authorization"))
|
| 660 |
-
if not uid:
|
| 661 |
-
return jsonify({"error": "Unauthorized"}), 401
|
| 662 |
-
|
| 663 |
-
try:
|
| 664 |
-
# ---- NEW: Always ensure RTDB profile exists (prevents 403 due to missing /users/{uid})
|
| 665 |
-
profile = get_or_create_profile(uid)
|
| 666 |
-
|
| 667 |
-
# ---- NEW: role gate with explicit, debuggable response
|
| 668 |
-
role = (profile.get("role") or "").lower().strip()
|
| 669 |
-
if role not in ["customer", "admin"]:
|
| 670 |
-
return jsonify({
|
| 671 |
-
"error": "Forbidden",
|
| 672 |
-
"reason": f"Role '{role}' not allowed to create tasks. Must be customer (or admin).",
|
| 673 |
-
"uid": uid
|
| 674 |
-
}), 403
|
| 675 |
-
|
| 676 |
-
# ---- NEW: helpful logs for production debugging
|
| 677 |
-
logger.info(f"[CREATE TASK] uid={uid} role={role} email={profile.get('email')}")
|
| 678 |
-
|
| 679 |
-
# multipart
|
| 680 |
-
category = request.form.get("category", "").strip()
|
| 681 |
-
title = request.form.get("title", "").strip()
|
| 682 |
-
description = request.form.get("description", "").strip()
|
| 683 |
-
city = request.form.get("city", "").strip()
|
| 684 |
-
address = request.form.get("address", "").strip()
|
| 685 |
-
budget = request.form.get("budget", "").strip()
|
| 686 |
-
schedule_at = request.form.get("scheduleAt", "").strip() # ISO string from UI
|
| 687 |
-
smart_capture_json = request.form.get("smartCapture", "").strip() # optional JSON string from UI
|
| 688 |
-
|
| 689 |
-
if not category or not city or not description:
|
| 690 |
-
return jsonify({"error": "category, city, and description are required"}), 400
|
| 691 |
-
|
| 692 |
-
task_id = str(uuid.uuid4())
|
| 693 |
-
created_at = now_iso()
|
| 694 |
-
|
| 695 |
-
# Upload media (media[] or image)
|
| 696 |
-
media_urls = []
|
| 697 |
-
files = []
|
| 698 |
-
|
| 699 |
-
if "media" in request.files:
|
| 700 |
-
files = request.files.getlist("media")
|
| 701 |
-
elif "image" in request.files:
|
| 702 |
-
files = [request.files["image"]]
|
| 703 |
-
|
| 704 |
-
for i, f in enumerate(files):
|
| 705 |
-
data_bytes = f.read()
|
| 706 |
-
if not data_bytes:
|
| 707 |
-
continue
|
| 708 |
-
ext = (f.mimetype or "application/octet-stream").split("/")[-1]
|
| 709 |
-
path = f"tasks/{task_id}/media/{i+1}_{int(time.time())}.{ext}"
|
| 710 |
-
url = upload_to_storage(data_bytes, path, f.mimetype or "application/octet-stream")
|
| 711 |
-
media_urls.append(url)
|
| 712 |
-
|
| 713 |
-
# optional smartCapture object
|
| 714 |
-
smart_capture = None
|
| 715 |
-
if smart_capture_json:
|
| 716 |
try:
|
| 717 |
-
|
| 718 |
except Exception:
|
| 719 |
-
|
| 720 |
-
|
| 721 |
-
|
| 722 |
-
|
| 723 |
-
|
| 724 |
-
"
|
| 725 |
-
|
| 726 |
-
|
| 727 |
-
|
| 728 |
-
|
| 729 |
-
|
| 730 |
-
|
| 731 |
-
|
| 732 |
-
|
| 733 |
-
|
| 734 |
-
|
| 735 |
-
|
| 736 |
-
|
| 737 |
-
|
| 738 |
-
|
| 739 |
-
|
| 740 |
-
|
| 741 |
-
"selectedBidId": "",
|
| 742 |
-
"completedAt": "",
|
| 743 |
-
"cancelledAt": ""
|
| 744 |
-
}
|
| 745 |
-
|
| 746 |
-
db_ref.child(f"tasks/{task_id}").set(task_payload)
|
| 747 |
-
|
| 748 |
-
# Notify taskers (basic broadcast by category + city)
|
| 749 |
-
notify_taskers_for_new_task(task_payload)
|
| 750 |
-
|
| 751 |
-
return jsonify({"success": True, "task": task_payload}), 201
|
| 752 |
-
|
| 753 |
-
except PermissionError as e:
|
| 754 |
-
# Keep PermissionError mapping, but now it will be far more informative when it happens
|
| 755 |
-
return jsonify({"error": "Forbidden", "reason": str(e)}), 403
|
| 756 |
-
except Exception as e:
|
| 757 |
-
logger.error(f"[CREATE TASK] Error: {e}")
|
| 758 |
-
logger.error(traceback.format_exc())
|
| 759 |
-
return jsonify({"error": "Internal server error"}), 500
|
| 760 |
-
|
| 761 |
-
def notify_taskers_for_new_task(task: dict):
|
| 762 |
"""
|
| 763 |
-
|
| 764 |
-
|
| 765 |
-
- match category overlap + city match (or empty city)
|
| 766 |
-
- push in-app notification
|
| 767 |
"""
|
| 768 |
-
|
| 769 |
-
|
| 770 |
-
|
| 771 |
-
|
| 772 |
-
|
| 773 |
-
|
| 774 |
-
|
| 775 |
-
|
| 776 |
-
|
| 777 |
-
|
| 778 |
-
|
| 779 |
-
|
| 780 |
-
|
| 781 |
-
|
| 782 |
-
# Category match: if tasker categories list exists, try overlap; else allow.
|
| 783 |
-
cats = u.get("categories") or []
|
| 784 |
-
if isinstance(cats, str):
|
| 785 |
-
cats = [c.strip() for c in cats.split(",") if c.strip()]
|
| 786 |
-
|
| 787 |
-
if cats:
|
| 788 |
-
ok = any(normalize_text(c) == tcat for c in cats)
|
| 789 |
-
if not ok:
|
| 790 |
-
continue
|
| 791 |
-
|
| 792 |
-
push_notification(
|
| 793 |
-
to_uid=tasker_id,
|
| 794 |
-
notif_type="new_task",
|
| 795 |
-
title="New task in your area",
|
| 796 |
-
body=f"{task.get('category')} • {task.get('city')}",
|
| 797 |
-
meta={"taskId": task.get("taskId")}
|
| 798 |
-
)
|
| 799 |
-
except Exception as e:
|
| 800 |
-
logger.warning(f"[NOTIFY TASKERS] Failed: {e}")
|
| 801 |
-
|
| 802 |
-
@app.route("/api/tasks", methods=["GET"])
|
| 803 |
-
def list_tasks():
|
| 804 |
"""
|
| 805 |
-
|
| 806 |
-
|
| 807 |
-
- tasker: list open/bidding tasks (with filters)
|
| 808 |
-
- admin: list all tasks (optional filters)
|
| 809 |
-
Query params:
|
| 810 |
-
status, category, city, mine=true
|
| 811 |
"""
|
| 812 |
-
|
| 813 |
-
|
| 814 |
-
|
| 815 |
-
|
| 816 |
-
|
| 817 |
-
|
| 818 |
-
|
| 819 |
-
|
| 820 |
-
|
| 821 |
-
|
| 822 |
-
|
| 823 |
-
|
| 824 |
-
|
| 825 |
-
|
| 826 |
-
|
| 827 |
-
|
| 828 |
-
|
| 829 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 830 |
continue
|
| 831 |
|
| 832 |
-
|
| 833 |
-
|
| 834 |
-
if t.get("createdBy") != uid:
|
| 835 |
continue
|
| 836 |
-
|
| 837 |
-
|
| 838 |
-
|
| 839 |
-
|
| 840 |
-
|
| 841 |
-
|
| 842 |
-
|
| 843 |
-
|
| 844 |
-
|
| 845 |
-
|
| 846 |
-
|
| 847 |
-
|
| 848 |
-
|
| 849 |
-
|
| 850 |
-
|
| 851 |
-
|
| 852 |
-
|
| 853 |
-
|
| 854 |
-
|
| 855 |
-
|
| 856 |
-
|
| 857 |
-
|
| 858 |
-
|
| 859 |
-
|
| 860 |
-
|
| 861 |
-
|
| 862 |
-
|
| 863 |
-
|
| 864 |
-
|
| 865 |
-
|
| 866 |
-
|
| 867 |
-
|
| 868 |
-
|
| 869 |
-
|
| 870 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 871 |
|
| 872 |
try:
|
| 873 |
-
|
| 874 |
-
|
| 875 |
-
|
| 876 |
-
|
| 877 |
-
|
| 878 |
-
|
| 879 |
-
|
| 880 |
-
if not task_access_check(uid, task, role):
|
| 881 |
-
return jsonify({"error": "Access denied"}), 403
|
| 882 |
-
|
| 883 |
-
# attach bids count (cheap)
|
| 884 |
-
bids = db_ref.child(f"bids/{task_id}").get() or {}
|
| 885 |
-
task["bidsCount"] = len(bids)
|
| 886 |
-
|
| 887 |
-
return jsonify(task), 200
|
| 888 |
-
|
| 889 |
-
except Exception as e:
|
| 890 |
-
logger.error(f"[GET TASK] Error: {e}")
|
| 891 |
-
return jsonify({"error": "Internal server error"}), 500
|
| 892 |
-
|
| 893 |
-
@app.route("/api/tasks/<string:task_id>", methods=["PUT"])
|
| 894 |
-
def update_task(task_id):
|
| 895 |
-
"""
|
| 896 |
-
Customer can edit task only if not assigned/in_progress/completed.
|
| 897 |
-
"""
|
| 898 |
-
uid = verify_token(request.headers.get("Authorization"))
|
| 899 |
-
if not uid:
|
| 900 |
-
return jsonify({"error": "Unauthorized"}), 401
|
| 901 |
-
|
| 902 |
-
try:
|
| 903 |
-
user = require_role(uid, ["customer", "admin"])
|
| 904 |
-
task_ref = db_ref.child(f"tasks/{task_id}")
|
| 905 |
-
task = task_ref.get()
|
| 906 |
-
if not task:
|
| 907 |
-
return jsonify({"error": "Task not found"}), 404
|
| 908 |
-
|
| 909 |
-
if user.get("role") != "admin" and task.get("createdBy") != uid:
|
| 910 |
-
return jsonify({"error": "Access denied"}), 403
|
| 911 |
-
|
| 912 |
-
if task.get("status") not in ["open", "bidding"]:
|
| 913 |
-
return jsonify({"error": "Task cannot be edited at this stage"}), 400
|
| 914 |
-
|
| 915 |
-
data = request.get_json() or {}
|
| 916 |
-
allowed = {}
|
| 917 |
-
for key in ["category", "title", "description", "city", "address", "budget", "scheduleAt"]:
|
| 918 |
-
if key in data:
|
| 919 |
-
allowed[key] = data.get(key)
|
| 920 |
-
|
| 921 |
-
if not allowed:
|
| 922 |
-
return jsonify({"error": "No valid fields provided"}), 400
|
| 923 |
-
|
| 924 |
-
allowed["updatedAt"] = now_iso()
|
| 925 |
-
task_ref.update(allowed)
|
| 926 |
-
|
| 927 |
-
return jsonify({"success": True, "updated": allowed, "task": task_ref.get()}), 200
|
| 928 |
-
|
| 929 |
-
except PermissionError as e:
|
| 930 |
-
return jsonify({"error": str(e)}), 403
|
| 931 |
except Exception as e:
|
| 932 |
-
logger.error(
|
| 933 |
-
return
|
| 934 |
-
|
| 935 |
-
|
| 936 |
-
|
| 937 |
-
|
| 938 |
-
|
| 939 |
-
|
| 940 |
-
|
| 941 |
-
|
| 942 |
-
|
| 943 |
-
|
| 944 |
-
if not uid:
|
| 945 |
-
return jsonify({"error": "Unauthorized"}), 401
|
| 946 |
-
|
| 947 |
try:
|
| 948 |
-
|
| 949 |
-
|
| 950 |
-
|
| 951 |
-
|
| 952 |
-
|
| 953 |
-
|
| 954 |
-
|
| 955 |
-
|
| 956 |
-
|
| 957 |
-
|
| 958 |
-
|
| 959 |
-
if not new_status:
|
| 960 |
-
return jsonify({"error": "status is required"}), 400
|
| 961 |
-
|
| 962 |
-
# Admin override
|
| 963 |
-
if role == "admin":
|
| 964 |
-
task_ref.update({"status": new_status, "updatedAt": now_iso()})
|
| 965 |
-
return jsonify({"success": True, "task": task_ref.get()}), 200
|
| 966 |
-
|
| 967 |
-
# Customer rules
|
| 968 |
-
if role == "customer":
|
| 969 |
-
if task.get("createdBy") != uid:
|
| 970 |
-
return jsonify({"error": "Access denied"}), 403
|
| 971 |
-
|
| 972 |
-
if new_status == "cancelled":
|
| 973 |
-
if task.get("status") in ["completed", "cancelled"]:
|
| 974 |
-
return jsonify({"error": "Task already closed"}), 400
|
| 975 |
-
task_ref.update({"status": "cancelled", "cancelledAt": now_iso()})
|
| 976 |
-
# notify assigned tasker (if any)
|
| 977 |
-
if task.get("assignedTaskerId"):
|
| 978 |
-
push_notification(task["assignedTaskerId"], "task_cancelled", "Task cancelled", "Customer cancelled the task.", {"taskId": task_id})
|
| 979 |
-
return jsonify({"success": True, "task": task_ref.get()}), 200
|
| 980 |
-
|
| 981 |
-
if new_status == "completed":
|
| 982 |
-
# allow completion only if was assigned/in_progress
|
| 983 |
-
if task.get("status") not in ["assigned", "in_progress"]:
|
| 984 |
-
return jsonify({"error": "Task not in a completable state"}), 400
|
| 985 |
-
task_ref.update({"status": "completed", "completedAt": now_iso()})
|
| 986 |
-
if task.get("assignedTaskerId"):
|
| 987 |
-
push_notification(task["assignedTaskerId"], "task_completed", "Task marked complete", "Customer marked the task completed.", {"taskId": task_id})
|
| 988 |
-
return jsonify({"success": True, "task": task_ref.get()}), 200
|
| 989 |
-
|
| 990 |
-
return jsonify({"error": "Invalid customer status update"}), 400
|
| 991 |
-
|
| 992 |
-
# Tasker rules
|
| 993 |
-
if role == "tasker":
|
| 994 |
-
if task.get("assignedTaskerId") != uid:
|
| 995 |
-
return jsonify({"error": "Only assigned tasker can update status"}), 403
|
| 996 |
-
|
| 997 |
-
if new_status not in ["on_the_way", "in_progress", "completed"]:
|
| 998 |
-
return jsonify({"error": "Invalid tasker status update"}), 400
|
| 999 |
-
|
| 1000 |
-
# map on_the_way as in_progress-ish, but keep it if you want
|
| 1001 |
-
task_ref.update({"status": new_status, "updatedAt": now_iso()})
|
| 1002 |
-
|
| 1003 |
-
# notify customer
|
| 1004 |
-
push_notification(task["createdBy"], "task_update", "Task update", f"Task status: {new_status}", {"taskId": task_id})
|
| 1005 |
-
return jsonify({"success": True, "task": task_ref.get()}), 200
|
| 1006 |
-
|
| 1007 |
-
return jsonify({"error": "Role not supported"}), 400
|
| 1008 |
-
|
| 1009 |
except Exception as e:
|
| 1010 |
-
logger.error(
|
| 1011 |
-
return
|
| 1012 |
|
| 1013 |
-
|
| 1014 |
-
def delete_task(task_id):
|
| 1015 |
"""
|
| 1016 |
-
|
| 1017 |
-
|
|
|
|
|
|
|
| 1018 |
"""
|
| 1019 |
-
|
| 1020 |
-
|
| 1021 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1022 |
|
| 1023 |
try:
|
| 1024 |
-
|
| 1025 |
-
|
| 1026 |
-
|
| 1027 |
-
|
| 1028 |
-
|
| 1029 |
-
|
| 1030 |
-
|
| 1031 |
-
|
| 1032 |
-
|
| 1033 |
-
return jsonify({"error": "Access denied"}), 403
|
| 1034 |
-
|
| 1035 |
-
if role != "admin" and task.get("status") not in ["open", "bidding"]:
|
| 1036 |
-
return jsonify({"error": "Task cannot be deleted at this stage"}), 400
|
| 1037 |
-
|
| 1038 |
-
# delete RTDB nodes
|
| 1039 |
-
task_ref.delete()
|
| 1040 |
-
db_ref.child(f"bids/{task_id}").delete()
|
| 1041 |
-
db_ref.child(f"chats/{task_id}").delete()
|
| 1042 |
-
db_ref.child(f"reviews/{task_id}").delete()
|
| 1043 |
-
|
| 1044 |
-
# delete storage media
|
| 1045 |
-
for blob in bucket.list_blobs(prefix=f"tasks/{task_id}/"):
|
| 1046 |
-
try:
|
| 1047 |
-
blob.delete()
|
| 1048 |
-
except Exception:
|
| 1049 |
-
pass
|
| 1050 |
-
|
| 1051 |
-
return jsonify({"success": True, "message": f"Task {task_id} deleted"}), 200
|
| 1052 |
-
|
| 1053 |
-
except PermissionError as e:
|
| 1054 |
-
return jsonify({"error": str(e)}), 403
|
| 1055 |
except Exception as e:
|
| 1056 |
-
logger.error(
|
| 1057 |
-
return
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1058 |
|
| 1059 |
-
|
| 1060 |
-
|
| 1061 |
-
|
|
|
|
|
|
|
|
|
|
| 1062 |
|
| 1063 |
-
|
| 1064 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1065 |
"""
|
| 1066 |
-
|
| 1067 |
-
Stored under /bids/{taskId}/{bidId}
|
| 1068 |
"""
|
| 1069 |
-
|
| 1070 |
-
|
| 1071 |
-
|
| 1072 |
-
|
| 1073 |
-
|
| 1074 |
-
|
| 1075 |
-
|
| 1076 |
-
|
| 1077 |
-
|
| 1078 |
-
|
| 1079 |
-
|
| 1080 |
-
|
| 1081 |
-
|
| 1082 |
-
|
| 1083 |
-
|
| 1084 |
-
|
| 1085 |
-
|
| 1086 |
-
|
| 1087 |
-
|
| 1088 |
-
|
| 1089 |
-
|
| 1090 |
-
|
| 1091 |
-
|
| 1092 |
-
|
| 1093 |
-
"bidId": bid_id,
|
| 1094 |
-
"taskId": task_id,
|
| 1095 |
-
"taskerId": uid,
|
| 1096 |
-
"price": price,
|
| 1097 |
-
"timeline": timeline,
|
| 1098 |
-
"message": message,
|
| 1099 |
-
"status": "submitted",
|
| 1100 |
-
"createdAt": now_iso()
|
| 1101 |
-
}
|
| 1102 |
-
db_ref.child(f"bids/{task_id}/{bid_id}").set(bid)
|
| 1103 |
-
|
| 1104 |
-
# flip task to bidding
|
| 1105 |
-
if task.get("status") == "open":
|
| 1106 |
-
db_ref.child(f"tasks/{task_id}").update({"status": "bidding", "updatedAt": now_iso()})
|
| 1107 |
-
|
| 1108 |
-
# notify customer
|
| 1109 |
-
push_notification(
|
| 1110 |
-
to_uid=task["createdBy"],
|
| 1111 |
-
notif_type="new_bid",
|
| 1112 |
-
title="New bid received",
|
| 1113 |
-
body=f"A tasker submitted a bid for {task.get('category')}",
|
| 1114 |
-
meta={"taskId": task_id, "bidId": bid_id}
|
| 1115 |
-
)
|
| 1116 |
-
|
| 1117 |
-
return jsonify({"success": True, "bid": bid}), 201
|
| 1118 |
-
|
| 1119 |
-
except PermissionError as e:
|
| 1120 |
-
return jsonify({"error": str(e)}), 403
|
| 1121 |
-
except Exception as e:
|
| 1122 |
-
logger.error(f"[SUBMIT BID] Error: {e}")
|
| 1123 |
-
return jsonify({"error": "Internal server error"}), 500
|
| 1124 |
-
|
| 1125 |
-
@app.route("/api/tasks/<string:task_id>/bids", methods=["GET"])
|
| 1126 |
-
def list_bids(task_id):
|
| 1127 |
"""
|
| 1128 |
-
|
| 1129 |
-
|
| 1130 |
-
|
|
|
|
|
|
|
| 1131 |
"""
|
| 1132 |
-
|
| 1133 |
-
|
| 1134 |
-
|
| 1135 |
-
|
| 1136 |
-
|
| 1137 |
-
|
| 1138 |
-
|
| 1139 |
-
|
| 1140 |
-
|
| 1141 |
-
|
| 1142 |
-
|
| 1143 |
-
|
| 1144 |
-
|
| 1145 |
-
|
| 1146 |
-
|
| 1147 |
-
|
| 1148 |
-
|
| 1149 |
-
|
| 1150 |
-
|
| 1151 |
-
|
| 1152 |
-
|
| 1153 |
-
|
| 1154 |
-
|
| 1155 |
-
|
| 1156 |
-
|
| 1157 |
-
|
| 1158 |
-
|
| 1159 |
-
|
| 1160 |
-
|
| 1161 |
-
|
| 1162 |
-
|
| 1163 |
-
|
| 1164 |
-
|
| 1165 |
|
| 1166 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1167 |
|
| 1168 |
-
|
| 1169 |
-
|
| 1170 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1171 |
|
| 1172 |
-
|
| 1173 |
-
def select_bid(task_id):
|
| 1174 |
"""
|
| 1175 |
-
|
| 1176 |
-
-
|
| 1177 |
-
|
| 1178 |
-
- set task.status = assigned
|
| 1179 |
-
- notify tasker + customer
|
| 1180 |
"""
|
| 1181 |
-
|
| 1182 |
-
|
| 1183 |
-
|
| 1184 |
-
|
| 1185 |
-
|
| 1186 |
-
|
| 1187 |
-
|
| 1188 |
-
|
| 1189 |
-
|
| 1190 |
-
|
| 1191 |
-
|
| 1192 |
-
|
| 1193 |
-
|
| 1194 |
-
|
| 1195 |
-
|
| 1196 |
-
|
| 1197 |
-
|
| 1198 |
-
|
| 1199 |
-
|
| 1200 |
-
|
| 1201 |
-
|
| 1202 |
-
|
| 1203 |
-
|
| 1204 |
-
|
| 1205 |
-
|
| 1206 |
-
task_ref.update({
|
| 1207 |
-
"assignedTaskerId": tasker_id,
|
| 1208 |
-
"selectedBidId": bid_id,
|
| 1209 |
-
"status": "assigned",
|
| 1210 |
-
"updatedAt": now_iso()
|
| 1211 |
})
|
| 1212 |
|
| 1213 |
-
|
| 1214 |
-
|
| 1215 |
-
|
| 1216 |
-
|
| 1217 |
-
|
| 1218 |
-
|
| 1219 |
-
|
| 1220 |
-
|
| 1221 |
-
|
| 1222 |
-
|
| 1223 |
-
|
| 1224 |
-
|
| 1225 |
-
|
| 1226 |
-
|
| 1227 |
-
|
| 1228 |
-
|
| 1229 |
-
|
| 1230 |
-
|
| 1231 |
-
|
| 1232 |
-
|
| 1233 |
-
|
| 1234 |
-
|
| 1235 |
-
|
| 1236 |
-
|
| 1237 |
-
return jsonify({"success": True, "task": task_ref.get()}), 200
|
| 1238 |
-
|
| 1239 |
-
except PermissionError as e:
|
| 1240 |
-
return jsonify({"error": str(e)}), 403
|
| 1241 |
-
except Exception as e:
|
| 1242 |
-
logger.error(f"[SELECT BID] Error: {e}")
|
| 1243 |
-
return jsonify({"error": "Internal server error"}), 500
|
| 1244 |
-
|
| 1245 |
-
# -----------------------------------------------------------------------------
|
| 1246 |
-
# 8. CHAT (REAL-TIME DB STORED) (MVP)
|
| 1247 |
-
# -----------------------------------------------------------------------------
|
| 1248 |
-
|
| 1249 |
-
@app.route("/api/chats/<string:task_id>/messages", methods=["GET"])
|
| 1250 |
-
def list_messages(task_id):
|
| 1251 |
-
uid = verify_token(request.headers.get("Authorization"))
|
| 1252 |
-
if not uid:
|
| 1253 |
-
return jsonify({"error": "Unauthorized"}), 401
|
| 1254 |
-
|
| 1255 |
-
try:
|
| 1256 |
-
user = db_ref.child(f"users/{uid}").get() or {}
|
| 1257 |
-
role = (user.get("role") or "customer").lower().strip()
|
| 1258 |
-
|
| 1259 |
-
task = db_ref.child(f"tasks/{task_id}").get()
|
| 1260 |
-
if not task:
|
| 1261 |
-
return jsonify({"error": "Task not found"}), 404
|
| 1262 |
-
|
| 1263 |
-
if not task_access_check(uid, task, role):
|
| 1264 |
-
return jsonify({"error": "Access denied"}), 403
|
| 1265 |
-
|
| 1266 |
-
msgs = db_ref.child(f"chats/{task_id}").get() or {}
|
| 1267 |
-
out = list(msgs.values())
|
| 1268 |
-
out.sort(key=lambda x: x.get("createdAt") or "", reverse=False)
|
| 1269 |
-
return jsonify(out), 200
|
| 1270 |
-
|
| 1271 |
-
except Exception as e:
|
| 1272 |
-
logger.error(f"[LIST MSGS] Error: {e}")
|
| 1273 |
-
return jsonify({"error": "Internal server error"}), 500
|
| 1274 |
|
| 1275 |
-
|
| 1276 |
-
|
|
|
|
|
|
|
| 1277 |
"""
|
| 1278 |
-
|
| 1279 |
-
- text in form field "text"
|
| 1280 |
-
- file in field "file"
|
| 1281 |
-
Or JSON body: {"text": "..."} for text-only
|
| 1282 |
"""
|
| 1283 |
-
|
| 1284 |
-
|
| 1285 |
-
|
| 1286 |
-
|
| 1287 |
-
|
| 1288 |
-
user = db_ref.child(f"users/{uid}").get() or {}
|
| 1289 |
-
role = (user.get("role") or "customer").lower().strip()
|
| 1290 |
-
|
| 1291 |
-
task = db_ref.child(f"tasks/{task_id}").get()
|
| 1292 |
-
if not task:
|
| 1293 |
-
return jsonify({"error": "Task not found"}), 404
|
| 1294 |
-
|
| 1295 |
-
if not task_access_check(uid, task, role):
|
| 1296 |
-
return jsonify({"error": "Access denied"}), 403
|
| 1297 |
-
|
| 1298 |
-
text = ""
|
| 1299 |
-
attachment_url = ""
|
| 1300 |
-
attachment_type = ""
|
| 1301 |
-
|
| 1302 |
-
if request.content_type and "multipart/form-data" in request.content_type:
|
| 1303 |
-
text = (request.form.get("text") or "").strip()
|
| 1304 |
-
if "file" in request.files:
|
| 1305 |
-
f = request.files["file"]
|
| 1306 |
-
b = f.read()
|
| 1307 |
-
if b:
|
| 1308 |
-
ext = (f.mimetype or "application/octet-stream").split("/")[-1]
|
| 1309 |
-
path = f"tasks/{task_id}/chat/{uid}_{int(time.time())}.{ext}"
|
| 1310 |
-
attachment_url = upload_to_storage(b, path, f.mimetype or "application/octet-stream")
|
| 1311 |
-
attachment_type = f.mimetype or ""
|
| 1312 |
-
else:
|
| 1313 |
-
data = request.get_json() or {}
|
| 1314 |
-
text = (data.get("text") or "").strip()
|
| 1315 |
-
|
| 1316 |
-
if not text and not attachment_url:
|
| 1317 |
-
return jsonify({"error": "Message text or file is required"}), 400
|
| 1318 |
-
|
| 1319 |
-
msg_id = str(uuid.uuid4())
|
| 1320 |
-
msg = {
|
| 1321 |
-
"messageId": msg_id,
|
| 1322 |
-
"taskId": task_id,
|
| 1323 |
-
"senderId": uid,
|
| 1324 |
-
"senderRole": role,
|
| 1325 |
-
"text": text,
|
| 1326 |
-
"attachmentUrl": attachment_url,
|
| 1327 |
-
"attachmentType": attachment_type,
|
| 1328 |
-
"createdAt": now_iso()
|
| 1329 |
}
|
| 1330 |
-
db_ref.child(f"chats/{task_id}/{msg_id}").set(msg)
|
| 1331 |
-
|
| 1332 |
-
# notify the other party
|
| 1333 |
-
other_uid = None
|
| 1334 |
-
if uid == task.get("createdBy"):
|
| 1335 |
-
other_uid = task.get("assignedTaskerId") or None
|
| 1336 |
-
else:
|
| 1337 |
-
other_uid = task.get("createdBy")
|
| 1338 |
-
|
| 1339 |
-
if other_uid:
|
| 1340 |
-
push_notification(
|
| 1341 |
-
to_uid=other_uid,
|
| 1342 |
-
notif_type="chat_message",
|
| 1343 |
-
title="New message",
|
| 1344 |
-
body="You have a new message on a task.",
|
| 1345 |
-
meta={"taskId": task_id}
|
| 1346 |
-
)
|
| 1347 |
-
|
| 1348 |
-
return jsonify({"success": True, "message": msg}), 201
|
| 1349 |
-
|
| 1350 |
-
except Exception as e:
|
| 1351 |
-
logger.error(f"[SEND MSG] Error: {e}")
|
| 1352 |
-
return jsonify({"error": "Internal server error"}), 500
|
| 1353 |
-
|
| 1354 |
-
# -----------------------------------------------------------------------------
|
| 1355 |
-
# 9. NOTIFICATIONS (IN-APP) (MVP)
|
| 1356 |
-
# -----------------------------------------------------------------------------
|
| 1357 |
-
|
| 1358 |
-
@app.route("/api/notifications", methods=["GET"])
|
| 1359 |
-
def list_notifications():
|
| 1360 |
-
uid = verify_token(request.headers.get("Authorization"))
|
| 1361 |
-
if not uid:
|
| 1362 |
-
return jsonify({"error": "Unauthorized"}), 401
|
| 1363 |
-
|
| 1364 |
-
try:
|
| 1365 |
-
notifs = db_ref.child(f"notifications/{uid}").get() or {}
|
| 1366 |
-
out = list(notifs.values())
|
| 1367 |
-
out.sort(key=lambda x: x.get("createdAt") or "", reverse=True)
|
| 1368 |
-
return jsonify(out), 200
|
| 1369 |
-
except Exception as e:
|
| 1370 |
-
logger.error(f"[LIST NOTIFS] Error: {e}")
|
| 1371 |
-
return jsonify({"error": "Internal server error"}), 500
|
| 1372 |
-
|
| 1373 |
-
@app.route("/api/notifications/<string:notif_id>/read", methods=["PUT"])
|
| 1374 |
-
def mark_notification_read(notif_id):
|
| 1375 |
-
uid = verify_token(request.headers.get("Authorization"))
|
| 1376 |
-
if not uid:
|
| 1377 |
-
return jsonify({"error": "Unauthorized"}), 401
|
| 1378 |
-
|
| 1379 |
-
try:
|
| 1380 |
-
ref = db_ref.child(f"notifications/{uid}/{notif_id}")
|
| 1381 |
-
n = ref.get()
|
| 1382 |
-
if not n:
|
| 1383 |
-
return jsonify({"error": "Notification not found"}), 404
|
| 1384 |
-
ref.update({"read": True, "readAt": now_iso()})
|
| 1385 |
-
return jsonify({"success": True}), 200
|
| 1386 |
-
except Exception as e:
|
| 1387 |
-
logger.error(f"[READ NOTIF] Error: {e}")
|
| 1388 |
-
return jsonify({"error": "Internal server error"}), 500
|
| 1389 |
|
| 1390 |
-
|
| 1391 |
-
|
| 1392 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1393 |
|
| 1394 |
-
|
| 1395 |
-
|
| 1396 |
-
|
| 1397 |
-
|
| 1398 |
-
|
|
|
|
|
|
|
| 1399 |
|
| 1400 |
-
|
| 1401 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1402 |
|
| 1403 |
-
|
| 1404 |
-
|
| 1405 |
-
|
|
|
|
|
|
|
|
|
|
| 1406 |
|
| 1407 |
-
|
| 1408 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1409 |
|
| 1410 |
-
|
| 1411 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1412 |
|
| 1413 |
-
|
| 1414 |
-
|
| 1415 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1416 |
|
| 1417 |
-
|
| 1418 |
-
|
| 1419 |
-
|
|
|
|
|
|
|
| 1420 |
|
| 1421 |
-
|
| 1422 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1423 |
|
| 1424 |
-
|
| 1425 |
-
|
| 1426 |
-
|
| 1427 |
-
|
| 1428 |
-
|
| 1429 |
-
|
| 1430 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1431 |
}
|
| 1432 |
-
|
| 1433 |
-
|
| 1434 |
-
|
| 1435 |
-
return jsonify({"
|
| 1436 |
-
|
| 1437 |
-
|
| 1438 |
-
|
| 1439 |
-
|
| 1440 |
-
|
| 1441 |
-
|
| 1442 |
-
|
| 1443 |
-
#
|
| 1444 |
-
|
| 1445 |
-
|
| 1446 |
-
|
| 1447 |
-
|
| 1448 |
-
|
| 1449 |
-
|
| 1450 |
-
|
| 1451 |
-
|
| 1452 |
-
|
| 1453 |
-
|
| 1454 |
-
|
| 1455 |
-
|
| 1456 |
-
|
| 1457 |
-
|
| 1458 |
-
|
| 1459 |
-
|
| 1460 |
-
|
| 1461 |
-
|
| 1462 |
-
|
| 1463 |
-
|
| 1464 |
-
|
| 1465 |
-
|
| 1466 |
-
|
| 1467 |
-
|
| 1468 |
-
|
| 1469 |
-
|
| 1470 |
-
|
| 1471 |
-
|
| 1472 |
-
|
| 1473 |
-
|
| 1474 |
-
|
| 1475 |
-
|
| 1476 |
-
|
| 1477 |
-
|
| 1478 |
-
|
| 1479 |
-
|
| 1480 |
-
|
| 1481 |
-
|
| 1482 |
-
|
| 1483 |
-
|
| 1484 |
-
|
| 1485 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1486 |
}
|
| 1487 |
-
}), 200
|
| 1488 |
|
| 1489 |
-
|
| 1490 |
-
|
| 1491 |
-
|
| 1492 |
-
|
| 1493 |
-
|
|
|
|
|
|
|
| 1494 |
|
| 1495 |
-
|
| 1496 |
-
|
| 1497 |
-
|
| 1498 |
-
|
| 1499 |
-
|
| 1500 |
-
|
| 1501 |
-
|
| 1502 |
-
|
| 1503 |
-
|
| 1504 |
-
|
| 1505 |
-
|
| 1506 |
-
|
| 1507 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1508 |
|
| 1509 |
-
|
| 1510 |
-
|
| 1511 |
-
|
| 1512 |
-
|
| 1513 |
-
|
| 1514 |
-
|
| 1515 |
-
out.sort(key=lambda x: x.get("createdAt") or "", reverse=True)
|
| 1516 |
-
return jsonify(out), 200
|
| 1517 |
-
except PermissionError as e:
|
| 1518 |
-
return jsonify({"error": str(e)}), 403
|
| 1519 |
-
except Exception as e:
|
| 1520 |
-
logger.error(f"[ADMIN TASKS] Error: {e}")
|
| 1521 |
-
return jsonify({"error": "Internal server error"}), 500
|
| 1522 |
|
| 1523 |
-
@app.
|
| 1524 |
-
def
|
| 1525 |
-
|
| 1526 |
-
|
| 1527 |
-
|
| 1528 |
-
|
| 1529 |
-
|
| 1530 |
-
|
| 1531 |
-
|
| 1532 |
-
|
| 1533 |
-
|
| 1534 |
-
|
| 1535 |
-
|
| 1536 |
-
|
| 1537 |
-
|
| 1538 |
-
|
| 1539 |
-
|
| 1540 |
-
|
| 1541 |
-
|
| 1542 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1543 |
|
| 1544 |
-
|
| 1545 |
-
|
| 1546 |
-
|
| 1547 |
-
verify_admin(request.headers.get("Authorization"))
|
| 1548 |
-
msgs = db_ref.child(f"chats/{task_id}").get() or {}
|
| 1549 |
-
out = list(msgs.values())
|
| 1550 |
-
out.sort(key=lambda x: x.get("createdAt") or "", reverse=False)
|
| 1551 |
-
return jsonify(out), 200
|
| 1552 |
-
except PermissionError as e:
|
| 1553 |
-
return jsonify({"error": str(e)}), 403
|
| 1554 |
-
except Exception as e:
|
| 1555 |
-
logger.error(f"[ADMIN VIEW CHAT] Error: {e}")
|
| 1556 |
-
return jsonify({"error": "Internal server error"}), 500
|
| 1557 |
|
| 1558 |
-
|
| 1559 |
-
|
| 1560 |
-
|
|
|
|
|
|
|
| 1561 |
|
|
|
|
|
|
|
|
|
|
| 1562 |
if __name__ == "__main__":
|
| 1563 |
-
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
main.py — Pricelyst Shopping Advisor (single-file server)
|
| 3 |
+
|
| 4 |
+
✅ Flask API
|
| 5 |
+
✅ Firebase Admin persistence (service account JSON via env var)
|
| 6 |
+
✅ Gemini via NEW google-genai SDK (text + multimodal)
|
| 7 |
+
✅ Product intelligence from Pricelyst API (/api/v1/products is open)
|
| 8 |
+
✅ Graceful conversational handling (don’t “force” shopping intent)
|
| 9 |
+
✅ Call briefing + call logging + optional actionable post-call report
|
| 10 |
+
|
| 11 |
+
ENV VARS YOU NEED
|
| 12 |
+
- GOOGLE_API_KEY=...
|
| 13 |
+
- FIREBASE='{"type":"service_account", ...}' # full JSON string
|
| 14 |
+
- PRICE_API_BASE=https://api.pricelyst.co.zw # optional
|
| 15 |
+
- GEMINI_MODEL=gemini-2.0-flash # optional
|
| 16 |
+
- PORT=5000 # optional
|
| 17 |
+
|
| 18 |
+
REQUEST SHAPES
|
| 19 |
+
1) POST /chat
|
| 20 |
+
{
|
| 21 |
+
"profile_id": "demo123",
|
| 22 |
+
"username": "Tinashe", # optional
|
| 23 |
+
"message": "Where is cooking oil cheapest?",
|
| 24 |
+
"images": ["data:image/png;base64,...", "https://..."], # optional
|
| 25 |
+
"context": { "budget": 20, "location": "Harare" } # optional
|
| 26 |
+
}
|
| 27 |
+
|
| 28 |
+
2) POST /api/call-briefing
|
| 29 |
+
{
|
| 30 |
+
"profile_id": "demo123",
|
| 31 |
+
"username": "Tinashe"
|
| 32 |
+
}
|
| 33 |
+
|
| 34 |
+
3) POST /api/log-call-usage
|
| 35 |
+
{
|
| 36 |
+
"profile_id": "demo123",
|
| 37 |
+
"transcript": ".... full transcript ...",
|
| 38 |
+
"call_id": "optional-client-id",
|
| 39 |
+
"started_at": "2026-01-23T12:00:00Z",
|
| 40 |
+
"ended_at": "2026-01-23T12:08:05Z",
|
| 41 |
+
"stats": { "duration_sec": 485, "agent": "elevenlabs" }
|
| 42 |
+
}
|
| 43 |
+
|
| 44 |
+
NOTES
|
| 45 |
+
- We DO NOT depend on upstream auth (you said products are open).
|
| 46 |
+
- We keep our own "profile_id" for personalization; when integrated, the host app supplies real profile_id.
|
| 47 |
+
"""
|
| 48 |
+
|
| 49 |
import os
|
|
|
|
| 50 |
import re
|
| 51 |
import json
|
|
|
|
| 52 |
import time
|
| 53 |
+
import math
|
| 54 |
+
import uuid
|
| 55 |
+
import base64
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 56 |
import logging
|
| 57 |
+
from datetime import datetime, timezone
|
| 58 |
+
from typing import Any, Dict, List, Optional, Tuple
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 59 |
|
| 60 |
+
import requests
|
| 61 |
+
import pandas as pd
|
| 62 |
+
from flask import Flask, request, jsonify
|
| 63 |
+
from flask_cors import CORS
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 64 |
|
| 65 |
+
# ---------- Logging ----------
|
| 66 |
+
logging.basicConfig(
|
| 67 |
+
level=logging.INFO,
|
| 68 |
+
format="%(asctime)s | %(levelname)s | %(message)s"
|
| 69 |
+
)
|
| 70 |
+
logger = logging.getLogger("pricelyst-advisor")
|
| 71 |
|
| 72 |
+
# ---------- Gemini (NEW SDK) ----------
|
| 73 |
+
# pip install google-genai
|
| 74 |
try:
|
| 75 |
+
from google import genai
|
|
|
|
|
|
|
|
|
|
|
|
|
| 76 |
except Exception as e:
|
| 77 |
+
genai = None
|
| 78 |
+
logger.error("google-genai not installed. pip install google-genai. Error=%s", e)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 79 |
|
| 80 |
+
GOOGLE_API_KEY = os.environ.get("GOOGLE_API_KEY", "")
|
| 81 |
+
GEMINI_MODEL = os.environ.get("GEMINI_MODEL", "gemini-2.0-flash")
|
| 82 |
|
| 83 |
+
_gemini_client = None
|
| 84 |
+
if genai and GOOGLE_API_KEY:
|
|
|
|
|
|
|
|
|
|
| 85 |
try:
|
| 86 |
+
_gemini_client = genai.Client(api_key=GOOGLE_API_KEY)
|
| 87 |
+
logger.info("Gemini client ready (model=%s).", GEMINI_MODEL)
|
| 88 |
except Exception as e:
|
| 89 |
+
logger.error("Failed to init Gemini client: %s", e)
|
|
|
|
| 90 |
|
| 91 |
+
# ---------- Firebase Admin ----------
|
| 92 |
+
# pip install firebase-admin
|
| 93 |
+
import firebase_admin
|
| 94 |
+
from firebase_admin import credentials, firestore
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 95 |
|
| 96 |
+
FIREBASE_ENV = os.environ.get("FIREBASE", "")
|
|
|
|
|
|
|
|
|
|
|
|
|
| 97 |
|
| 98 |
+
def init_firestore_from_env() -> firestore.Client:
|
| 99 |
+
if firebase_admin._apps:
|
| 100 |
+
return firestore.client()
|
| 101 |
|
| 102 |
+
if not FIREBASE_ENV:
|
| 103 |
+
raise RuntimeError("FIREBASE env var missing. Provide full service account JSON string.")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 104 |
|
| 105 |
+
sa_info = json.loads(FIREBASE_ENV)
|
| 106 |
+
cred = credentials.Certificate(sa_info)
|
| 107 |
+
firebase_admin.initialize_app(cred)
|
| 108 |
+
return firestore.client()
|
|
|
|
|
|
|
|
|
|
|
|
|
| 109 |
|
| 110 |
+
db = init_firestore_from_env()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 111 |
|
| 112 |
+
# ---------- External API (Pricelyst) ----------
|
| 113 |
+
PRICE_API_BASE = os.environ.get("PRICE_API_BASE", "https://api.pricelyst.co.zw").rstrip("/")
|
| 114 |
+
HTTP_TIMEOUT = 20
|
|
|
|
|
|
|
| 115 |
|
| 116 |
+
# ---------- Flask ----------
|
| 117 |
+
app = Flask(__name__)
|
| 118 |
+
CORS(app)
|
| 119 |
+
|
| 120 |
+
# ---------- In-memory product cache ----------
|
| 121 |
+
PRODUCT_CACHE_TTL_SEC = 60 * 10 # 10 minutes
|
| 122 |
+
_product_cache: Dict[str, Any] = {
|
| 123 |
+
"ts": 0,
|
| 124 |
+
"df_offers": pd.DataFrame(),
|
| 125 |
+
"raw_count": 0,
|
| 126 |
+
}
|
| 127 |
+
|
| 128 |
+
# =========================
|
| 129 |
+
# Helpers: time / strings
|
| 130 |
+
# =========================
|
| 131 |
+
def now_utc_iso() -> str:
|
| 132 |
+
return datetime.now(timezone.utc).isoformat()
|
| 133 |
+
|
| 134 |
+
def _coerce_float(v: Any) -> Optional[float]:
|
| 135 |
try:
|
| 136 |
+
if v is None:
|
| 137 |
+
return None
|
| 138 |
+
if isinstance(v, (int, float)):
|
| 139 |
+
return float(v)
|
| 140 |
+
s = str(v).strip()
|
| 141 |
+
if not s:
|
| 142 |
+
return None
|
| 143 |
+
return float(s)
|
| 144 |
except Exception:
|
| 145 |
return None
|
| 146 |
|
| 147 |
+
def _norm_str(s: Any) -> str:
|
| 148 |
+
s = "" if s is None else str(s)
|
| 149 |
+
s = s.strip().lower()
|
| 150 |
+
s = re.sub(r"\s+", " ", s)
|
| 151 |
+
return s
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 152 |
|
| 153 |
+
def _safe_json_loads(s: str, fallback: Any):
|
| 154 |
try:
|
| 155 |
+
return json.loads(s)
|
| 156 |
+
except Exception:
|
| 157 |
+
return fallback
|
| 158 |
+
|
| 159 |
+
# =========================
|
| 160 |
+
# Firestore profile storage
|
| 161 |
+
# =========================
|
| 162 |
+
def profile_ref(profile_id: str):
|
| 163 |
+
return db.collection("pricelyst_profiles").document(profile_id)
|
| 164 |
+
|
| 165 |
+
def get_profile(profile_id: str) -> Dict[str, Any]:
|
| 166 |
+
ref = profile_ref(profile_id)
|
| 167 |
+
doc = ref.get()
|
| 168 |
+
if doc.exists:
|
| 169 |
+
return doc.to_dict() or {}
|
| 170 |
+
# create default
|
| 171 |
+
data = {
|
| 172 |
+
"profile_id": profile_id,
|
| 173 |
+
"created_at": now_utc_iso(),
|
| 174 |
+
"updated_at": now_utc_iso(),
|
| 175 |
+
"username": None,
|
| 176 |
+
"memory_summary": "",
|
| 177 |
+
"preferences": {},
|
| 178 |
+
"last_actions": [],
|
| 179 |
+
"counters": {
|
| 180 |
+
"chats": 0,
|
| 181 |
+
"calls": 0,
|
| 182 |
}
|
| 183 |
+
}
|
| 184 |
+
ref.set(data)
|
| 185 |
+
return data
|
| 186 |
+
|
| 187 |
+
def update_profile(profile_id: str, patch: Dict[str, Any]) -> None:
|
| 188 |
+
patch = dict(patch or {})
|
| 189 |
+
patch["updated_at"] = now_utc_iso()
|
| 190 |
+
profile_ref(profile_id).set(patch, merge=True)
|
| 191 |
+
|
| 192 |
+
def log_chat(profile_id: str, payload: Dict[str, Any]) -> None:
|
| 193 |
+
db.collection("pricelyst_profiles").document(profile_id).collection("chat_logs").add({
|
| 194 |
+
**payload,
|
| 195 |
+
"ts": now_utc_iso()
|
| 196 |
+
})
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 197 |
|
| 198 |
+
def log_call(profile_id: str, payload: Dict[str, Any]) -> str:
|
| 199 |
+
doc_ref = db.collection("pricelyst_profiles").document(profile_id).collection("call_logs").document()
|
| 200 |
+
doc_ref.set({
|
| 201 |
+
**payload,
|
| 202 |
+
"ts": now_utc_iso()
|
| 203 |
+
})
|
| 204 |
+
return doc_ref.id
|
| 205 |
|
| 206 |
+
# =========================
|
| 207 |
+
# Multimodal image handling
|
| 208 |
+
# =========================
|
| 209 |
+
def parse_images(images: List[str]) -> List[Dict[str, Any]]:
|
| 210 |
"""
|
| 211 |
+
Accepts:
|
| 212 |
+
- data URLs: data:image/png;base64,....
|
| 213 |
+
- raw base64 strings
|
| 214 |
+
- http(s) URLs
|
| 215 |
+
Returns: list of { "mime": "...", "bytes": b"..." } or { "url": "..." }
|
| 216 |
"""
|
| 217 |
+
out = []
|
| 218 |
+
for item in images or []:
|
| 219 |
+
if not item:
|
| 220 |
+
continue
|
| 221 |
+
item = item.strip()
|
| 222 |
+
|
| 223 |
+
# URL
|
| 224 |
+
if item.startswith("http://") or item.startswith("https://"):
|
| 225 |
+
out.append({"url": item})
|
| 226 |
+
continue
|
| 227 |
+
|
| 228 |
+
# data URL
|
| 229 |
+
m = re.match(r"^data:(image\/[a-zA-Z0-9.+-]+);base64,(.+)$", item)
|
| 230 |
+
if m:
|
| 231 |
+
mime = m.group(1)
|
| 232 |
+
b64 = m.group(2)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 233 |
try:
|
| 234 |
+
out.append({"mime": mime, "bytes": base64.b64decode(b64)})
|
| 235 |
except Exception:
|
| 236 |
+
continue
|
| 237 |
+
continue
|
| 238 |
+
|
| 239 |
+
# raw base64
|
| 240 |
+
try:
|
| 241 |
+
out.append({"mime": "image/png", "bytes": base64.b64decode(item)})
|
| 242 |
+
except Exception:
|
| 243 |
+
continue
|
| 244 |
+
|
| 245 |
+
return out
|
| 246 |
+
|
| 247 |
+
# =========================
|
| 248 |
+
# Product fetching + offers DF
|
| 249 |
+
# =========================
|
| 250 |
+
def fetch_products_page(page: int, per_page: int = 50) -> Dict[str, Any]:
|
| 251 |
+
url = f"{PRICE_API_BASE}/api/v1/products"
|
| 252 |
+
params = {"page": page, "perPage": per_page}
|
| 253 |
+
r = requests.get(url, params=params, timeout=HTTP_TIMEOUT)
|
| 254 |
+
r.raise_for_status()
|
| 255 |
+
return r.json()
|
| 256 |
+
|
| 257 |
+
def fetch_products(max_pages: int = 6, per_page: int = 50) -> List[Dict[str, Any]]:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 258 |
"""
|
| 259 |
+
Pull a reasonable slice (you can increase pages later).
|
| 260 |
+
API shape (common): {status, message, data, totalItemCount, currentPage, totalPages}
|
|
|
|
|
|
|
| 261 |
"""
|
| 262 |
+
products: List[Dict[str, Any]] = []
|
| 263 |
+
for p in range(1, max_pages + 1):
|
| 264 |
+
payload = fetch_products_page(p, per_page=per_page)
|
| 265 |
+
data = payload.get("data") or []
|
| 266 |
+
if isinstance(data, list):
|
| 267 |
+
products.extend(data)
|
| 268 |
+
total_pages = payload.get("totalPages")
|
| 269 |
+
if isinstance(total_pages, int) and p >= total_pages:
|
| 270 |
+
break
|
| 271 |
+
if not data:
|
| 272 |
+
break
|
| 273 |
+
return products
|
| 274 |
+
|
| 275 |
+
def products_to_offers_df(products: List[Dict[str, Any]]) -> pd.DataFrame:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 276 |
"""
|
| 277 |
+
Each row = one product + one retailer offer.
|
| 278 |
+
Your product object can include `prices[]` with nested `retailer`.
|
|
|
|
|
|
|
|
|
|
|
|
|
| 279 |
"""
|
| 280 |
+
rows = []
|
| 281 |
+
for p in products or []:
|
| 282 |
+
try:
|
| 283 |
+
product_id = p.get("id")
|
| 284 |
+
name = p.get("name") or ""
|
| 285 |
+
clean_name = _norm_str(name)
|
| 286 |
+
|
| 287 |
+
brand_name = ((p.get("brand") or {}).get("brand_name")) if isinstance(p.get("brand"), dict) else None
|
| 288 |
+
categories = p.get("categories") or []
|
| 289 |
+
cat_names = []
|
| 290 |
+
for c in categories:
|
| 291 |
+
if isinstance(c, dict) and c.get("name"):
|
| 292 |
+
cat_names.append(c.get("name"))
|
| 293 |
+
primary_category = cat_names[0] if cat_names else None
|
| 294 |
+
|
| 295 |
+
stock_status = p.get("stock_status")
|
| 296 |
+
on_promo = bool(p.get("on_promotion"))
|
| 297 |
+
promo_badge = p.get("promo_badge")
|
| 298 |
+
promo_name = p.get("promo_name")
|
| 299 |
+
promo_price = _coerce_float(p.get("promo_price"))
|
| 300 |
+
original_price = _coerce_float(p.get("original_price"))
|
| 301 |
+
|
| 302 |
+
recommended_price = _coerce_float(p.get("recommended_price"))
|
| 303 |
+
base_price = _coerce_float(p.get("price"))
|
| 304 |
+
bulk_price = _coerce_float(p.get("bulk_price"))
|
| 305 |
+
bulk_unit = p.get("bulk_unit")
|
| 306 |
+
|
| 307 |
+
image = p.get("image")
|
| 308 |
+
thumb = p.get("thumbnail")
|
| 309 |
+
|
| 310 |
+
offers = p.get("prices") or []
|
| 311 |
+
if not offers:
|
| 312 |
+
rows.append({
|
| 313 |
+
"product_id": product_id,
|
| 314 |
+
"product_name": name,
|
| 315 |
+
"clean_name": clean_name,
|
| 316 |
+
"brand_name": brand_name,
|
| 317 |
+
"primary_category": primary_category,
|
| 318 |
+
"categories": cat_names,
|
| 319 |
+
"stock_status": stock_status,
|
| 320 |
+
"on_promotion": on_promo,
|
| 321 |
+
"promo_badge": promo_badge,
|
| 322 |
+
"promo_name": promo_name,
|
| 323 |
+
"promo_price": promo_price,
|
| 324 |
+
"original_price": original_price,
|
| 325 |
+
"recommended_price": recommended_price,
|
| 326 |
+
"base_price": base_price,
|
| 327 |
+
"bulk_price": bulk_price,
|
| 328 |
+
"bulk_unit": bulk_unit,
|
| 329 |
+
"image": image,
|
| 330 |
+
"thumbnail": thumb,
|
| 331 |
+
"retailer_id": None,
|
| 332 |
+
"retailer_name": None,
|
| 333 |
+
"retailer_type": None,
|
| 334 |
+
"retailer_logo": None,
|
| 335 |
+
"offer_price": None,
|
| 336 |
+
})
|
| 337 |
continue
|
| 338 |
|
| 339 |
+
for offer in offers:
|
| 340 |
+
if not isinstance(offer, dict):
|
|
|
|
| 341 |
continue
|
| 342 |
+
retailer = offer.get("retailer") or {}
|
| 343 |
+
rows.append({
|
| 344 |
+
"product_id": product_id,
|
| 345 |
+
"product_name": name,
|
| 346 |
+
"clean_name": clean_name,
|
| 347 |
+
"brand_name": brand_name,
|
| 348 |
+
"primary_category": primary_category,
|
| 349 |
+
"categories": cat_names,
|
| 350 |
+
"stock_status": stock_status,
|
| 351 |
+
"on_promotion": on_promo,
|
| 352 |
+
"promo_badge": promo_badge,
|
| 353 |
+
"promo_name": promo_name,
|
| 354 |
+
"promo_price": promo_price,
|
| 355 |
+
"original_price": original_price,
|
| 356 |
+
"recommended_price": recommended_price,
|
| 357 |
+
"base_price": base_price,
|
| 358 |
+
"bulk_price": bulk_price,
|
| 359 |
+
"bulk_unit": bulk_unit,
|
| 360 |
+
"image": image,
|
| 361 |
+
"thumbnail": thumb,
|
| 362 |
+
"retailer_id": offer.get("retailer_id") or retailer.get("id"),
|
| 363 |
+
"retailer_name": (retailer.get("name") if isinstance(retailer, dict) else None),
|
| 364 |
+
"retailer_type": (retailer.get("type") if isinstance(retailer, dict) else None),
|
| 365 |
+
"retailer_logo": (retailer.get("logo") if isinstance(retailer, dict) else None),
|
| 366 |
+
"offer_price": _coerce_float(offer.get("price")),
|
| 367 |
+
})
|
| 368 |
+
except Exception:
|
| 369 |
+
continue
|
| 370 |
+
|
| 371 |
+
df = pd.DataFrame(rows)
|
| 372 |
+
if df.empty:
|
| 373 |
+
return df
|
| 374 |
+
|
| 375 |
+
df["offer_price"] = df["offer_price"].apply(_coerce_float)
|
| 376 |
+
df["clean_name"] = df["clean_name"].fillna("").astype(str)
|
| 377 |
+
df["product_name"] = df["product_name"].fillna("").astype(str)
|
| 378 |
+
df["retailer_name"] = df["retailer_name"].fillna("").astype(str)
|
| 379 |
+
return df
|
| 380 |
+
|
| 381 |
+
def get_offers_df(force_refresh: bool = False) -> pd.DataFrame:
|
| 382 |
+
ts = _product_cache["ts"]
|
| 383 |
+
if (not force_refresh) and (time.time() - ts < PRODUCT_CACHE_TTL_SEC) and isinstance(_product_cache["df_offers"], pd.DataFrame) and not _product_cache["df_offers"].empty:
|
| 384 |
+
return _product_cache["df_offers"]
|
| 385 |
|
| 386 |
try:
|
| 387 |
+
products = fetch_products(max_pages=8, per_page=50)
|
| 388 |
+
df = products_to_offers_df(products)
|
| 389 |
+
_product_cache["ts"] = time.time()
|
| 390 |
+
_product_cache["df_offers"] = df
|
| 391 |
+
_product_cache["raw_count"] = len(products)
|
| 392 |
+
logger.info("Loaded offers DF: products=%s offers_rows=%s", len(products), len(df))
|
| 393 |
+
return df
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 394 |
except Exception as e:
|
| 395 |
+
logger.error("Failed to refresh product cache: %s", e)
|
| 396 |
+
# fallback: return old cache (even if stale)
|
| 397 |
+
if isinstance(_product_cache["df_offers"], pd.DataFrame):
|
| 398 |
+
return _product_cache["df_offers"]
|
| 399 |
+
return pd.DataFrame()
|
| 400 |
+
|
| 401 |
+
# =========================
|
| 402 |
+
# Gemini wrappers
|
| 403 |
+
# =========================
|
| 404 |
+
def gemini_generate_text(system: str, user: str, temperature: float = 0.4) -> str:
|
| 405 |
+
if not _gemini_client:
|
| 406 |
+
return ""
|
|
|
|
|
|
|
|
|
|
| 407 |
try:
|
| 408 |
+
resp = _gemini_client.models.generate_content(
|
| 409 |
+
model=GEMINI_MODEL,
|
| 410 |
+
contents=[
|
| 411 |
+
{"role": "user", "parts": [{"text": system.strip() + "\n\n" + user.strip()}]}
|
| 412 |
+
],
|
| 413 |
+
config={
|
| 414 |
+
"temperature": temperature,
|
| 415 |
+
"max_output_tokens": 900,
|
| 416 |
+
}
|
| 417 |
+
)
|
| 418 |
+
return (resp.text or "").strip()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 419 |
except Exception as e:
|
| 420 |
+
logger.error("Gemini text error: %s", e)
|
| 421 |
+
return ""
|
| 422 |
|
| 423 |
+
def gemini_generate_multimodal(system: str, user: str, images: List[Dict[str, Any]]) -> str:
|
|
|
|
| 424 |
"""
|
| 425 |
+
Uses Gemini multimodal:
|
| 426 |
+
- if we have bytes -> inline_data
|
| 427 |
+
- if we have url -> just paste the URL (server-side fetch is unreliable w/o whitelisting),
|
| 428 |
+
so we prefer bytes from the client.
|
| 429 |
"""
|
| 430 |
+
if not _gemini_client:
|
| 431 |
+
return ""
|
| 432 |
+
|
| 433 |
+
parts: List[Dict[str, Any]] = [{"text": system.strip() + "\n\n" + user.strip()}]
|
| 434 |
+
|
| 435 |
+
for img in images or []:
|
| 436 |
+
if "bytes" in img and img.get("mime"):
|
| 437 |
+
b64 = base64.b64encode(img["bytes"]).decode("utf-8")
|
| 438 |
+
parts.append({
|
| 439 |
+
"inline_data": {
|
| 440 |
+
"mime_type": img["mime"],
|
| 441 |
+
"data": b64
|
| 442 |
+
}
|
| 443 |
+
})
|
| 444 |
+
elif img.get("url"):
|
| 445 |
+
# last resort
|
| 446 |
+
parts.append({"text": f"[IMAGE_URL]\n{img['url']}"})
|
| 447 |
|
| 448 |
try:
|
| 449 |
+
resp = _gemini_client.models.generate_content(
|
| 450 |
+
model=GEMINI_MODEL,
|
| 451 |
+
contents=[{"role": "user", "parts": parts}],
|
| 452 |
+
config={
|
| 453 |
+
"temperature": 0.2,
|
| 454 |
+
"max_output_tokens": 900,
|
| 455 |
+
}
|
| 456 |
+
)
|
| 457 |
+
return (resp.text or "").strip()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 458 |
except Exception as e:
|
| 459 |
+
logger.error("Gemini multimodal error: %s", e)
|
| 460 |
+
return ""
|
| 461 |
+
|
| 462 |
+
# =========================
|
| 463 |
+
# Intent + actionability
|
| 464 |
+
# =========================
|
| 465 |
+
INTENT_SYSTEM = """
|
| 466 |
+
You are Pricelyst AI. Your job: understand whether the user is asking for actionable shopping help.
|
| 467 |
+
Return STRICT JSON only.
|
| 468 |
+
|
| 469 |
+
Output schema:
|
| 470 |
+
{
|
| 471 |
+
"actionable": true|false,
|
| 472 |
+
"intent": one of [
|
| 473 |
+
"store_recommendation",
|
| 474 |
+
"price_lookup",
|
| 475 |
+
"price_compare",
|
| 476 |
+
"basket_optimize",
|
| 477 |
+
"basket_build",
|
| 478 |
+
"product_discovery",
|
| 479 |
+
"trust_check",
|
| 480 |
+
"chit_chat",
|
| 481 |
+
"other"
|
| 482 |
+
],
|
| 483 |
+
"items": [{"name": "...", "quantity": 1}],
|
| 484 |
+
"constraints": {"budget": number|null, "location": "... "|null, "time_context": "mid-month|month-end|weekend|today|unknown"},
|
| 485 |
+
"notes": "short reasoning"
|
| 486 |
+
}
|
| 487 |
|
| 488 |
+
Rules:
|
| 489 |
+
- If user is chatting/social (hi, jokes, thanks, how are you, etc) => actionable=false, intent="chit_chat".
|
| 490 |
+
- If user asks about prices/stores/basket/what to buy => actionable=true.
|
| 491 |
+
- If user provided a list, extract items + quantities if obvious.
|
| 492 |
+
- Keep it conservative: if unclear, actionable=false.
|
| 493 |
+
"""
|
| 494 |
|
| 495 |
+
def detect_intent(message: str, images_present: bool, context: Dict[str, Any]) -> Dict[str, Any]:
|
| 496 |
+
ctx_str = json.dumps(context or {}, ensure_ascii=False)
|
| 497 |
+
user = f"Message: {message}\nImagesPresent: {images_present}\nContext: {ctx_str}"
|
| 498 |
+
out = gemini_generate_text(INTENT_SYSTEM, user, temperature=0.1)
|
| 499 |
+
data = _safe_json_loads(out, fallback={})
|
| 500 |
+
if not isinstance(data, dict):
|
| 501 |
+
return {"actionable": False, "intent": "other", "items": [], "constraints": {}, "notes": "bad_json"}
|
| 502 |
+
# normalize
|
| 503 |
+
data.setdefault("actionable", False)
|
| 504 |
+
data.setdefault("intent", "other")
|
| 505 |
+
data.setdefault("items", [])
|
| 506 |
+
data.setdefault("constraints", {})
|
| 507 |
+
return data
|
| 508 |
+
|
| 509 |
+
# =========================
|
| 510 |
+
# Matching + analytics
|
| 511 |
+
# =========================
|
| 512 |
+
def search_products(df: pd.DataFrame, query: str, limit: int = 10) -> pd.DataFrame:
|
| 513 |
"""
|
| 514 |
+
Simple search: contains on clean_name + fallback token overlap scoring.
|
|
|
|
| 515 |
"""
|
| 516 |
+
if df.empty:
|
| 517 |
+
return df
|
| 518 |
+
|
| 519 |
+
q = _norm_str(query)
|
| 520 |
+
if not q:
|
| 521 |
+
return df.head(0)
|
| 522 |
+
|
| 523 |
+
# direct contains
|
| 524 |
+
hit = df[df["clean_name"].str.contains(re.escape(q), na=False)]
|
| 525 |
+
if len(hit) >= limit:
|
| 526 |
+
return hit.head(limit)
|
| 527 |
+
|
| 528 |
+
# token overlap (cheap scoring)
|
| 529 |
+
q_tokens = set(q.split())
|
| 530 |
+
if not q_tokens:
|
| 531 |
+
return hit.head(limit)
|
| 532 |
+
|
| 533 |
+
tmp = df.copy()
|
| 534 |
+
tmp["score"] = tmp["clean_name"].apply(lambda s: len(q_tokens.intersection(set(str(s).split()))))
|
| 535 |
+
tmp = tmp[tmp["score"] > 0].sort_values(["score"], ascending=False)
|
| 536 |
+
combined = pd.concat([hit, tmp], axis=0).drop_duplicates(subset=["product_id", "retailer_id"])
|
| 537 |
+
return combined.head(limit)
|
| 538 |
+
|
| 539 |
+
def summarize_offers(df_hits: pd.DataFrame) -> Dict[str, Any]:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 540 |
"""
|
| 541 |
+
For one product name, there can be multiple retailers (offers).
|
| 542 |
+
We return:
|
| 543 |
+
- cheapest offer
|
| 544 |
+
- price range
|
| 545 |
+
- top offers
|
| 546 |
"""
|
| 547 |
+
if df_hits.empty:
|
| 548 |
+
return {}
|
| 549 |
+
|
| 550 |
+
# group by product_id (best is highest offer coverage)
|
| 551 |
+
grp = df_hits.groupby("product_id").size().sort_values(ascending=False)
|
| 552 |
+
best_pid = int(grp.index[0])
|
| 553 |
+
prod_rows = df_hits[df_hits["product_id"] == best_pid].copy()
|
| 554 |
+
|
| 555 |
+
prod_name = prod_rows["product_name"].iloc[0]
|
| 556 |
+
brand = prod_rows["brand_name"].iloc[0]
|
| 557 |
+
category = prod_rows["primary_category"].iloc[0]
|
| 558 |
+
stock = prod_rows["stock_status"].iloc[0]
|
| 559 |
+
on_promo = bool(prod_rows["on_promotion"].iloc[0])
|
| 560 |
+
promo_badge = prod_rows["promo_badge"].iloc[0]
|
| 561 |
+
image = prod_rows["thumbnail"].iloc[0] or prod_rows["image"].iloc[0]
|
| 562 |
+
|
| 563 |
+
offers = prod_rows[prod_rows["offer_price"].notna()].copy()
|
| 564 |
+
offers = offers.sort_values("offer_price", ascending=True)
|
| 565 |
+
|
| 566 |
+
if offers.empty:
|
| 567 |
+
return {
|
| 568 |
+
"product_id": best_pid,
|
| 569 |
+
"name": prod_name,
|
| 570 |
+
"brand": brand,
|
| 571 |
+
"category": category,
|
| 572 |
+
"stock_status": stock,
|
| 573 |
+
"on_promotion": on_promo,
|
| 574 |
+
"promo_badge": promo_badge,
|
| 575 |
+
"image": image,
|
| 576 |
+
"offers": [],
|
| 577 |
+
"cheapest": None,
|
| 578 |
+
"price_range": None,
|
| 579 |
+
}
|
| 580 |
|
| 581 |
+
cheapest = {
|
| 582 |
+
"retailer": offers.iloc[0]["retailer_name"],
|
| 583 |
+
"price": float(offers.iloc[0]["offer_price"] or 0),
|
| 584 |
+
"retailer_logo": offers.iloc[0]["retailer_logo"],
|
| 585 |
+
}
|
| 586 |
+
lo = float(offers["offer_price"].min())
|
| 587 |
+
hi = float(offers["offer_price"].max())
|
| 588 |
+
|
| 589 |
+
top_offers = []
|
| 590 |
+
for _, r in offers.head(5).iterrows():
|
| 591 |
+
top_offers.append({
|
| 592 |
+
"retailer": r["retailer_name"],
|
| 593 |
+
"price": float(r["offer_price"]),
|
| 594 |
+
"retailer_logo": r["retailer_logo"],
|
| 595 |
+
})
|
| 596 |
|
| 597 |
+
return {
|
| 598 |
+
"product_id": best_pid,
|
| 599 |
+
"name": prod_name,
|
| 600 |
+
"brand": brand,
|
| 601 |
+
"category": category,
|
| 602 |
+
"stock_status": stock,
|
| 603 |
+
"on_promotion": on_promo,
|
| 604 |
+
"promo_badge": promo_badge,
|
| 605 |
+
"image": image,
|
| 606 |
+
"offers": top_offers,
|
| 607 |
+
"cheapest": cheapest,
|
| 608 |
+
"price_range": {"min": lo, "max": hi, "spread": (hi - lo)},
|
| 609 |
+
}
|
| 610 |
|
| 611 |
+
def basket_store_choice(df: pd.DataFrame, items: List[Dict[str, Any]]) -> Dict[str, Any]:
|
|
|
|
| 612 |
"""
|
| 613 |
+
Given items, pick:
|
| 614 |
+
- best single store to cover most items and minimize total
|
| 615 |
+
Very pragmatic MVP: for each item, match the best product and take cheapest offer.
|
|
|
|
|
|
|
| 616 |
"""
|
| 617 |
+
if df.empty or not items:
|
| 618 |
+
return {"items": [], "best_store": None, "missing": []}
|
| 619 |
+
|
| 620 |
+
results = []
|
| 621 |
+
missing = []
|
| 622 |
+
|
| 623 |
+
for it in items:
|
| 624 |
+
name = it.get("name") or ""
|
| 625 |
+
qty = int(it.get("quantity") or 1)
|
| 626 |
+
hits = search_products(df, name, limit=50)
|
| 627 |
+
summary = summarize_offers(hits)
|
| 628 |
+
if not summary or not summary.get("cheapest"):
|
| 629 |
+
missing.append(name)
|
| 630 |
+
continue
|
| 631 |
+
cheapest = summary["cheapest"]
|
| 632 |
+
results.append({
|
| 633 |
+
"requested": name,
|
| 634 |
+
"matched_product": summary["name"],
|
| 635 |
+
"brand": summary.get("brand"),
|
| 636 |
+
"qty": qty,
|
| 637 |
+
"cheapest_retailer": cheapest["retailer"],
|
| 638 |
+
"unit_price": cheapest["price"],
|
| 639 |
+
"line_total": cheapest["price"] * qty,
|
| 640 |
+
"offers": summary.get("offers", []),
|
| 641 |
+
"image": summary.get("image"),
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 642 |
})
|
| 643 |
|
| 644 |
+
if not results:
|
| 645 |
+
return {"items": [], "best_store": None, "missing": missing}
|
| 646 |
+
|
| 647 |
+
# compute totals by retailer for "all cheapest per item"
|
| 648 |
+
retailer_totals: Dict[str, float] = {}
|
| 649 |
+
retailer_counts: Dict[str, int] = {}
|
| 650 |
+
for r in results:
|
| 651 |
+
k = r["cheapest_retailer"]
|
| 652 |
+
retailer_totals[k] = retailer_totals.get(k, 0.0) + float(r["line_total"])
|
| 653 |
+
retailer_counts[k] = retailer_counts.get(k, 0) + 1
|
| 654 |
+
|
| 655 |
+
# Score: cover_count desc, then total asc
|
| 656 |
+
best = sorted(retailer_totals.keys(), key=lambda k: (-retailer_counts.get(k, 0), retailer_totals.get(k, 0.0)))[0]
|
| 657 |
+
return {
|
| 658 |
+
"items": results,
|
| 659 |
+
"best_store": {
|
| 660 |
+
"name": best,
|
| 661 |
+
"covered_items": retailer_counts.get(best, 0),
|
| 662 |
+
"total_for_covered_items": round(retailer_totals.get(best, 0.0), 2),
|
| 663 |
+
"total_items_requested": len(items),
|
| 664 |
+
},
|
| 665 |
+
"missing": missing
|
| 666 |
+
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 667 |
|
| 668 |
+
# =========================
|
| 669 |
+
# Response rendering (informative)
|
| 670 |
+
# =========================
|
| 671 |
+
def render_price_answer(summary: Dict[str, Any]) -> Dict[str, Any]:
|
| 672 |
"""
|
| 673 |
+
Returns structured payload for frontend to render nicely.
|
|
|
|
|
|
|
|
|
|
| 674 |
"""
|
| 675 |
+
if not summary:
|
| 676 |
+
return {
|
| 677 |
+
"type": "not_found",
|
| 678 |
+
"title": "I couldn't find that product.",
|
| 679 |
+
"message": "Try a different wording (brand + size helps), or upload an image/receipt.",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 680 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 681 |
|
| 682 |
+
name = summary.get("name")
|
| 683 |
+
brand = summary.get("brand")
|
| 684 |
+
category = summary.get("category")
|
| 685 |
+
stock = summary.get("stock_status")
|
| 686 |
+
on_promo = summary.get("on_promotion")
|
| 687 |
+
promo_badge = summary.get("promo_badge")
|
| 688 |
+
image = summary.get("image")
|
| 689 |
+
cheapest = summary.get("cheapest")
|
| 690 |
+
pr = summary.get("price_range")
|
| 691 |
+
|
| 692 |
+
lines = []
|
| 693 |
+
if cheapest:
|
| 694 |
+
lines.append(f"Cheapest right now: {cheapest['retailer']} — ${cheapest['price']:.2f}")
|
| 695 |
+
if pr and pr.get("min") is not None and pr.get("max") is not None and pr["max"] != pr["min"]:
|
| 696 |
+
lines.append(f"Price range: ${pr['min']:.2f} → ${pr['max']:.2f} (spread ${pr['spread']:.2f})")
|
| 697 |
+
if on_promo:
|
| 698 |
+
lines.append(f"Promo: {promo_badge or 'On promotion'}")
|
| 699 |
+
|
| 700 |
+
return {
|
| 701 |
+
"type": "product_price",
|
| 702 |
+
"title": name,
|
| 703 |
+
"subtitle": " | ".join([x for x in [brand, category, stock] if x]),
|
| 704 |
+
"image": image,
|
| 705 |
+
"highlights": lines,
|
| 706 |
+
"offers": summary.get("offers", []),
|
| 707 |
+
"raw": summary,
|
| 708 |
+
}
|
| 709 |
|
| 710 |
+
def render_basket_answer(basket: Dict[str, Any]) -> Dict[str, Any]:
|
| 711 |
+
if not basket.get("items"):
|
| 712 |
+
return {
|
| 713 |
+
"type": "basket_empty",
|
| 714 |
+
"title": "I couldn't build a basket from that.",
|
| 715 |
+
"message": "Send a clearer list (e.g., '2 bread, 1 cooking oil 2L') or upload a list/receipt photo."
|
| 716 |
+
}
|
| 717 |
|
| 718 |
+
best = basket.get("best_store")
|
| 719 |
+
missing = basket.get("missing") or []
|
| 720 |
+
return {
|
| 721 |
+
"type": "basket_plan",
|
| 722 |
+
"title": "Basket plan",
|
| 723 |
+
"best_store": best,
|
| 724 |
+
"items": basket["items"],
|
| 725 |
+
"missing": missing,
|
| 726 |
+
"notes": "If you want, tell me your budget and I’ll suggest cheaper substitutes.",
|
| 727 |
+
}
|
| 728 |
|
| 729 |
+
# =========================
|
| 730 |
+
# Multimodal extraction (lists / receipts)
|
| 731 |
+
# =========================
|
| 732 |
+
VISION_SYSTEM = """
|
| 733 |
+
You are an expert shopping assistant. Extract actionable items and quantities from the user's image(s).
|
| 734 |
+
Return STRICT JSON only.
|
| 735 |
|
| 736 |
+
Output schema:
|
| 737 |
+
{
|
| 738 |
+
"actionable": true|false,
|
| 739 |
+
"items": [{"name":"...", "quantity": 1}],
|
| 740 |
+
"notes": "short"
|
| 741 |
+
}
|
| 742 |
|
| 743 |
+
Rules:
|
| 744 |
+
- If it looks like a handwritten shopping list, extract items.
|
| 745 |
+
- If it looks like a receipt, extract the purchased items (best-effort).
|
| 746 |
+
- If it’s random (selfie, meme, etc), actionable=false and items=[].
|
| 747 |
+
- Keep it conservative: only include items you’re confident about.
|
| 748 |
+
"""
|
| 749 |
|
| 750 |
+
def extract_items_from_images(images: List[Dict[str, Any]]) -> Dict[str, Any]:
|
| 751 |
+
if not images:
|
| 752 |
+
return {"actionable": False, "items": [], "notes": "no_images"}
|
| 753 |
+
user = "Extract items from the images."
|
| 754 |
+
out = gemini_generate_multimodal(VISION_SYSTEM, user, images)
|
| 755 |
+
data = _safe_json_loads(out, fallback={})
|
| 756 |
+
if not isinstance(data, dict):
|
| 757 |
+
return {"actionable": False, "items": [], "notes": "bad_json"}
|
| 758 |
+
data.setdefault("actionable", False)
|
| 759 |
+
data.setdefault("items", [])
|
| 760 |
+
return data
|
| 761 |
+
|
| 762 |
+
# =========================
|
| 763 |
+
# Post-call report synthesis (only if actionable)
|
| 764 |
+
# =========================
|
| 765 |
+
CALL_REPORT_SYSTEM = """
|
| 766 |
+
You are Pricelyst AI. You will receive a full call transcript.
|
| 767 |
+
Decide whether there is an actionable request (party planning, shopping needs, budgeting, groceries, etc).
|
| 768 |
+
If actionable, produce a concise MARKDOWN report that the client can turn into a PDF.
|
| 769 |
+
If NOT actionable (just chatting), return an empty string.
|
| 770 |
|
| 771 |
+
Rules:
|
| 772 |
+
- Be practical and Zimbabwe-oriented.
|
| 773 |
+
- If planning an event: include (1) Assumptions, (2) Shopping list with quantities, (3) Budget ranges, (4) Simple menu/recipe ideas, (5) Optional restaurant/catering suggestions (generic; do NOT invent addresses).
|
| 774 |
+
- Only output Markdown or empty string. No code blocks.
|
| 775 |
+
"""
|
| 776 |
|
| 777 |
+
def build_call_report_markdown(transcript: str) -> str:
|
| 778 |
+
if not transcript or len(transcript.strip()) < 40:
|
| 779 |
+
return ""
|
| 780 |
+
md = gemini_generate_text(CALL_REPORT_SYSTEM, transcript, temperature=0.3)
|
| 781 |
+
md = (md or "").strip()
|
| 782 |
+
# Guardrail: if model returns JSON or obvious nonsense, drop it.
|
| 783 |
+
if md.startswith("{") or md.startswith("["):
|
| 784 |
+
return ""
|
| 785 |
+
# Conservative: must contain at least one list bullet or heading to be “report-like”
|
| 786 |
+
if ("#" not in md) and ("- " not in md) and ("* " not in md):
|
| 787 |
+
return ""
|
| 788 |
+
return md
|
| 789 |
+
|
| 790 |
+
# =========================
|
| 791 |
+
# Routes
|
| 792 |
+
# =========================
|
| 793 |
+
@app.get("/health")
|
| 794 |
+
def health():
|
| 795 |
+
return jsonify({
|
| 796 |
+
"ok": True,
|
| 797 |
+
"ts": now_utc_iso(),
|
| 798 |
+
"gemini": bool(_gemini_client),
|
| 799 |
+
"products_cached_rows": int(len(_product_cache["df_offers"])) if isinstance(_product_cache["df_offers"], pd.DataFrame) else 0,
|
| 800 |
+
"products_raw_count": int(_product_cache.get("raw_count", 0)),
|
| 801 |
+
})
|
| 802 |
|
| 803 |
+
@app.post("/chat")
|
| 804 |
+
def chat():
|
| 805 |
+
body = request.get_json(silent=True) or {}
|
| 806 |
+
profile_id = (body.get("profile_id") or "").strip()
|
| 807 |
+
if not profile_id:
|
| 808 |
+
return jsonify({"ok": False, "error": "profile_id is required"}), 400
|
| 809 |
+
|
| 810 |
+
message = (body.get("message") or "").strip()
|
| 811 |
+
username = body.get("username")
|
| 812 |
+
context = body.get("context") or {}
|
| 813 |
+
images_raw = body.get("images") or []
|
| 814 |
+
images = parse_images(images_raw)
|
| 815 |
+
|
| 816 |
+
prof = get_profile(profile_id)
|
| 817 |
+
if username and not prof.get("username"):
|
| 818 |
+
update_profile(profile_id, {"username": username})
|
| 819 |
+
|
| 820 |
+
# 1) If images: try extract items (shopping list / receipt)
|
| 821 |
+
extracted = {"actionable": False, "items": [], "notes": "skipped"}
|
| 822 |
+
if images:
|
| 823 |
+
extracted = extract_items_from_images(images)
|
| 824 |
+
|
| 825 |
+
# 2) Detect intent from message (+ image presence)
|
| 826 |
+
intent = detect_intent(message, images_present=bool(images), context=context)
|
| 827 |
+
|
| 828 |
+
# If image extraction got items, treat as actionable unless the message is clearly chit-chat
|
| 829 |
+
image_items = extracted.get("items") if isinstance(extracted, dict) else []
|
| 830 |
+
if image_items and isinstance(image_items, list) and intent.get("intent") != "chit_chat":
|
| 831 |
+
intent["actionable"] = True
|
| 832 |
+
intent["intent"] = "basket_build" if len(image_items) > 1 else "price_lookup"
|
| 833 |
+
intent["items"] = image_items
|
| 834 |
+
|
| 835 |
+
# 3) Graceful conversational fallback
|
| 836 |
+
if not intent.get("actionable"):
|
| 837 |
+
reply = {
|
| 838 |
+
"type": "chat",
|
| 839 |
+
"message": (
|
| 840 |
+
f"Hey{(' ' + (username or prof.get('username') or '')).strip()} 👋\n"
|
| 841 |
+
"If you want shopping help, ask me something like:\n"
|
| 842 |
+
"• “Where is cooking oil cheapest?”\n"
|
| 843 |
+
"• “Which store is best for my basket: rice, chicken, oil?”\n"
|
| 844 |
+
"• “Build me a budget basket under $20.”"
|
| 845 |
+
)
|
| 846 |
}
|
| 847 |
+
# log + counters
|
| 848 |
+
log_chat(profile_id, {"message": message, "intent": intent, "response_type": "chit_chat"})
|
| 849 |
+
update_profile(profile_id, {"counters": {"chats": int((prof.get("counters") or {}).get("chats", 0)) + 1}})
|
| 850 |
+
return jsonify({"ok": True, "intent": intent, "data": reply})
|
| 851 |
+
|
| 852 |
+
# 4) Actionable: execute
|
| 853 |
+
df = get_offers_df(force_refresh=False)
|
| 854 |
+
|
| 855 |
+
response_payload: Dict[str, Any] = {"type": "unknown", "message": "No result."}
|
| 856 |
+
|
| 857 |
+
if intent["intent"] in ("price_lookup", "trust_check", "product_discovery"):
|
| 858 |
+
# pick first item or treat message as query
|
| 859 |
+
query = ""
|
| 860 |
+
if intent.get("items"):
|
| 861 |
+
query = intent["items"][0].get("name") or ""
|
| 862 |
+
if not query:
|
| 863 |
+
query = message
|
| 864 |
+
hits = search_products(df, query, limit=80)
|
| 865 |
+
summary = summarize_offers(hits)
|
| 866 |
+
response_payload = render_price_answer(summary)
|
| 867 |
+
|
| 868 |
+
elif intent["intent"] in ("basket_build", "basket_optimize", "store_recommendation"):
|
| 869 |
+
items = intent.get("items") or []
|
| 870 |
+
# if user didn't provide items but asked store choice, we can try to extract nouns—too risky; keep conservative
|
| 871 |
+
if not items:
|
| 872 |
+
response_payload = {
|
| 873 |
+
"type": "need_list",
|
| 874 |
+
"title": "Send your list",
|
| 875 |
+
"message": "I can recommend the best store once you send your basket (even 3–5 items)."
|
| 876 |
+
}
|
| 877 |
+
else:
|
| 878 |
+
basket = basket_store_choice(df, items)
|
| 879 |
+
response_payload = render_basket_answer(basket)
|
| 880 |
+
|
| 881 |
+
elif intent["intent"] == "price_compare":
|
| 882 |
+
items = intent.get("items") or []
|
| 883 |
+
if len(items) < 2:
|
| 884 |
+
response_payload = {
|
| 885 |
+
"type": "need_two_items",
|
| 886 |
+
"title": "Need two items",
|
| 887 |
+
"message": "Tell me two items to compare, e.g., “Coke 2L vs Pepsi 2L”."
|
| 888 |
+
}
|
| 889 |
+
else:
|
| 890 |
+
comparisons = []
|
| 891 |
+
for it in items[:3]:
|
| 892 |
+
hits = search_products(df, it.get("name") or "", limit=60)
|
| 893 |
+
summary = summarize_offers(hits)
|
| 894 |
+
comparisons.append(summary)
|
| 895 |
+
|
| 896 |
+
# compute cheapest for each
|
| 897 |
+
rows = []
|
| 898 |
+
for s in comparisons:
|
| 899 |
+
if not s or not s.get("cheapest"):
|
| 900 |
+
continue
|
| 901 |
+
rows.append({
|
| 902 |
+
"name": s.get("name"),
|
| 903 |
+
"cheapest_retailer": s["cheapest"]["retailer"],
|
| 904 |
+
"price": s["cheapest"]["price"]
|
| 905 |
+
})
|
| 906 |
+
rows = sorted(rows, key=lambda x: x["price"])
|
| 907 |
+
response_payload = {
|
| 908 |
+
"type": "comparison",
|
| 909 |
+
"title": "Comparison",
|
| 910 |
+
"items": rows,
|
| 911 |
+
"winner": rows[0] if rows else None
|
| 912 |
}
|
|
|
|
| 913 |
|
| 914 |
+
# 5) Persist + counters + light memory updates
|
| 915 |
+
log_chat(profile_id, {
|
| 916 |
+
"message": message,
|
| 917 |
+
"intent": intent,
|
| 918 |
+
"response_type": response_payload.get("type"),
|
| 919 |
+
"images_present": bool(images),
|
| 920 |
+
})
|
| 921 |
|
| 922 |
+
counters = prof.get("counters") or {}
|
| 923 |
+
update_profile(profile_id, {"counters": {"chats": int(counters.get("chats", 0)) + 1}})
|
| 924 |
+
|
| 925 |
+
# minimal preference inference
|
| 926 |
+
if response_payload.get("type") == "basket_plan" and response_payload.get("best_store"):
|
| 927 |
+
update_profile(profile_id, {"preferences": {"last_best_store": response_payload["best_store"]["name"]}})
|
| 928 |
+
|
| 929 |
+
return jsonify({"ok": True, "intent": intent, "data": response_payload})
|
| 930 |
+
|
| 931 |
+
@app.post("/api/call-briefing")
|
| 932 |
+
def call_briefing():
|
| 933 |
+
body = request.get_json(silent=True) or {}
|
| 934 |
+
profile_id = (body.get("profile_id") or "").strip()
|
| 935 |
+
if not profile_id:
|
| 936 |
+
return jsonify({"ok": False, "error": "profile_id is required"}), 400
|
| 937 |
+
|
| 938 |
+
username = body.get("username")
|
| 939 |
+
prof = get_profile(profile_id)
|
| 940 |
+
|
| 941 |
+
if username and not prof.get("username"):
|
| 942 |
+
update_profile(profile_id, {"username": username})
|
| 943 |
+
prof["username"] = username
|
| 944 |
+
|
| 945 |
+
# Build lightweight "shopping intelligence" variables for ElevenLabs agent
|
| 946 |
+
prefs = prof.get("preferences") or {}
|
| 947 |
+
last_store = (prefs.get("last_best_store") or "").strip() or None
|
| 948 |
+
|
| 949 |
+
# quick stats from recent chats (last 25)
|
| 950 |
+
logs = db.collection("pricelyst_profiles").document(profile_id).collection("chat_logs") \
|
| 951 |
+
.order_by("ts", direction=firestore.Query.DESCENDING).limit(25).stream()
|
| 952 |
+
|
| 953 |
+
intents = []
|
| 954 |
+
for d in logs:
|
| 955 |
+
dd = d.to_dict() or {}
|
| 956 |
+
ii = (dd.get("intent") or {}).get("intent")
|
| 957 |
+
if ii:
|
| 958 |
+
intents.append(ii)
|
| 959 |
+
|
| 960 |
+
intent_counts: Dict[str, int] = {}
|
| 961 |
+
for ii in intents:
|
| 962 |
+
intent_counts[ii] = intent_counts.get(ii, 0) + 1
|
| 963 |
+
|
| 964 |
+
shopping_intelligence = {
|
| 965 |
+
"username": prof.get("username") or "there",
|
| 966 |
+
"last_best_store": last_store,
|
| 967 |
+
"top_intents_last_25": sorted(intent_counts.items(), key=lambda x: x[1], reverse=True)[:5],
|
| 968 |
+
"tone": "practical_zimbabwe",
|
| 969 |
+
}
|
| 970 |
|
| 971 |
+
return jsonify({
|
| 972 |
+
"ok": True,
|
| 973 |
+
"profile_id": profile_id,
|
| 974 |
+
"memory_summary": prof.get("memory_summary", ""),
|
| 975 |
+
"shopping_intelligence": shopping_intelligence
|
| 976 |
+
})
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 977 |
|
| 978 |
+
@app.post("/api/log-call-usage")
|
| 979 |
+
def log_call_usage():
|
| 980 |
+
body = request.get_json(silent=True) or {}
|
| 981 |
+
profile_id = (body.get("profile_id") or "").strip()
|
| 982 |
+
if not profile_id:
|
| 983 |
+
return jsonify({"ok": False, "error": "profile_id is required"}), 400
|
| 984 |
+
|
| 985 |
+
transcript = (body.get("transcript") or "").strip()
|
| 986 |
+
call_id = body.get("call_id") or None
|
| 987 |
+
started_at = body.get("started_at") or None
|
| 988 |
+
ended_at = body.get("ended_at") or None
|
| 989 |
+
stats = body.get("stats") or {}
|
| 990 |
+
|
| 991 |
+
prof = get_profile(profile_id)
|
| 992 |
+
|
| 993 |
+
# Conservative “actionable report” generation:
|
| 994 |
+
# - only generate if transcript has planning keywords
|
| 995 |
+
# - and Gemini returns report-ish markdown
|
| 996 |
+
planning_keywords = ["party", "birthday", "wedding", "braai", "grocer", "basket", "shopping", "budget", "ingredients", "recipe", "cook", "drinks", "snacks"]
|
| 997 |
+
looks_planning = any(k in transcript.lower() for k in planning_keywords)
|
| 998 |
+
|
| 999 |
+
report_md = ""
|
| 1000 |
+
if looks_planning and _gemini_client:
|
| 1001 |
+
report_md = build_call_report_markdown(transcript)
|
| 1002 |
+
|
| 1003 |
+
doc_id = log_call(profile_id, {
|
| 1004 |
+
"call_id": call_id,
|
| 1005 |
+
"started_at": started_at,
|
| 1006 |
+
"ended_at": ended_at,
|
| 1007 |
+
"stats": stats,
|
| 1008 |
+
"transcript": transcript,
|
| 1009 |
+
"report_markdown": report_md,
|
| 1010 |
+
})
|
| 1011 |
|
| 1012 |
+
# update counters
|
| 1013 |
+
counters = prof.get("counters") or {}
|
| 1014 |
+
update_profile(profile_id, {"counters": {"calls": int(counters.get("calls", 0)) + 1}})
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1015 |
|
| 1016 |
+
return jsonify({
|
| 1017 |
+
"ok": True,
|
| 1018 |
+
"logged_call_doc_id": doc_id,
|
| 1019 |
+
"report_markdown": report_md # client turns this into PDF; empty if non-actionable
|
| 1020 |
+
})
|
| 1021 |
|
| 1022 |
+
# =========================
|
| 1023 |
+
# Run
|
| 1024 |
+
# =========================
|
| 1025 |
if __name__ == "__main__":
|
| 1026 |
+
port = int(os.environ.get("PORT", "5000"))
|
| 1027 |
+
app.run(host="0.0.0.0", port=port, debug=True)
|