Spaces:
Running
Running
Update main.py
Browse files
main.py
CHANGED
|
@@ -1,6 +1,3 @@
|
|
| 1 |
-
# --- START OF FILE main.py ---
|
| 2 |
-
|
| 3 |
-
# main.py
|
| 4 |
from fastapi import FastAPI, HTTPException
|
| 5 |
from fastapi.middleware.cors import CORSMiddleware
|
| 6 |
from pydantic import BaseModel
|
|
@@ -37,9 +34,9 @@ app.add_middleware(
|
|
| 37 |
|
| 38 |
# Google Sheets Config
|
| 39 |
# Spreadsheet containing Scammer and DWC info
|
| 40 |
-
SCAMMER_DWC_SPREADSHEET_ID = '1sgkhBNGw_r6tBIxvdeXaI0bVmWBeACN4jiw_oDEeXLw'
|
| 41 |
# Spreadsheet containing Value lists and Dupe list
|
| 42 |
-
VALUES_DUPE_SPREADSHEET_ID = '1Toe07o3P517q8sm9Qb1e5xyFWCuwgskj71IKJwJNfNU'
|
| 43 |
|
| 44 |
SCOPES = ['https://www.googleapis.com/auth/spreadsheets.readonly']
|
| 45 |
|
|
@@ -62,12 +59,17 @@ CATEGORIES = [
|
|
| 62 |
VALUES_RANGE = 'B6:P' # Range within each category sheet
|
| 63 |
|
| 64 |
# Cache Update Interval
|
| 65 |
-
CACHE_UPDATE_INTERVAL_SECONDS = 60 * 5 # 5 minutes
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 66 |
|
| 67 |
# --- Global Cache ---
|
| 68 |
cache = {
|
| 69 |
"values": {}, # Dict mapping category name to list of items
|
| 70 |
-
"value_changes": {}, # Dict mapping category name to list of changes
|
| 71 |
"user_scammers": [],
|
| 72 |
"server_scammers": [],
|
| 73 |
"dwc": [],
|
|
@@ -91,35 +93,44 @@ def quote_sheet_name(name: str) -> str:
|
|
| 91 |
return name
|
| 92 |
|
| 93 |
def init_google_sheets(scopes=SCOPES):
|
| 94 |
-
"""Initialize Google Sheets credentials from environment variable"""
|
| 95 |
global sheets_service, cache
|
| 96 |
try:
|
| 97 |
creds_json_str = os.getenv('CREDENTIALS_JSON')
|
| 98 |
-
if
|
| 99 |
-
logger.
|
| 100 |
-
|
| 101 |
-
|
| 102 |
-
|
| 103 |
-
creds_json,
|
| 104 |
-
scopes=scopes
|
| 105 |
-
)
|
| 106 |
-
sheets_service = build('sheets', 'v4', credentials=creds, cache_discovery=False) # Disable discovery cache
|
| 107 |
-
logger.info("Google Sheets service initialized successfully from ENV VAR.")
|
| 108 |
-
cache["service_available"] = True
|
| 109 |
-
return sheets_service
|
| 110 |
-
except Exception as e:
|
| 111 |
-
logger.error(f"Error initializing Google Sheets from ENV VAR: {e}")
|
| 112 |
-
# Fallback attempt
|
| 113 |
-
try:
|
| 114 |
-
logger.info("Falling back to loading credentials from file 'credentials.json'")
|
| 115 |
-
creds = ServiceAccountCredentials.from_service_account_file(
|
| 116 |
-
'credentials.json',
|
| 117 |
scopes=scopes
|
| 118 |
)
|
| 119 |
-
sheets_service = build('sheets', 'v4', credentials=creds, cache_discovery=False)
|
| 120 |
-
logger.info("Google Sheets service initialized successfully from
|
| 121 |
cache["service_available"] = True
|
| 122 |
return sheets_service
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 123 |
except Exception as file_e:
|
| 124 |
logger.error(f"Error loading credentials from file: {file_e}")
|
| 125 |
logger.critical("Google Sheets service could not be initialized. API will be limited.")
|
|
@@ -154,8 +165,6 @@ def extract_image_url(formula, drive_url=None):
|
|
| 154 |
if formula.startswith('=IMAGE('):
|
| 155 |
match = re.search(r'=IMAGE\("([^"]+)"', formula)
|
| 156 |
if match: return match.group(1)
|
| 157 |
-
# If it wasn't a formula or direct URL, and no drive_url, return empty or original?
|
| 158 |
-
# Let's assume if it's not a recognizable URL/formula, it's not an image source.
|
| 159 |
return '' # Return empty string if no valid URL found
|
| 160 |
|
| 161 |
def format_currency(value: Any) -> Optional[str]:
|
|
@@ -207,6 +216,8 @@ async def get_roblox_user_id(session: aiohttp.ClientSession, username: str):
|
|
| 207 |
data = await response.json()
|
| 208 |
if data and data.get("data") and len(data["data"]) > 0:
|
| 209 |
return data["data"][0].get("id")
|
|
|
|
|
|
|
| 210 |
return None
|
| 211 |
except asyncio.TimeoutError:
|
| 212 |
logger.warning(f"Timeout fetching Roblox User ID for {username}")
|
|
@@ -215,7 +226,7 @@ async def get_roblox_user_id(session: aiohttp.ClientSession, username: str):
|
|
| 215 |
logger.warning(f"Network error fetching Roblox User ID for {username}: {e}")
|
| 216 |
return None
|
| 217 |
except Exception as e:
|
| 218 |
-
logger.error(f"Unexpected exception fetching Roblox User ID for {username}: {e}")
|
| 219 |
return None
|
| 220 |
|
| 221 |
async def get_roblox_avatar_url(session: aiohttp.ClientSession, user_id: int):
|
|
@@ -227,6 +238,8 @@ async def get_roblox_avatar_url(session: aiohttp.ClientSession, user_id: int):
|
|
| 227 |
data = await response.json()
|
| 228 |
if data and data.get("data") and len(data["data"]) > 0:
|
| 229 |
return data["data"][0].get("imageUrl")
|
|
|
|
|
|
|
| 230 |
return None
|
| 231 |
except asyncio.TimeoutError:
|
| 232 |
logger.warning(f"Timeout fetching Roblox avatar for User ID {user_id}")
|
|
@@ -235,22 +248,23 @@ async def get_roblox_avatar_url(session: aiohttp.ClientSession, user_id: int):
|
|
| 235 |
logger.warning(f"Network error fetching Roblox avatar for User ID {user_id}: {e}")
|
| 236 |
return None
|
| 237 |
except Exception as e:
|
| 238 |
-
logger.error(f"Unexpected exception fetching Roblox avatar for User ID {user_id}: {e}")
|
| 239 |
return None
|
| 240 |
|
| 241 |
|
| 242 |
# --- Data Processing Functions ---
|
| 243 |
# These functions take raw rows from the sheet and process them.
|
| 244 |
-
# They are now independent of *which* sheet they came from, as long as the structure matches.
|
| 245 |
|
| 246 |
def process_sheet_data(values): # For Value Categories
|
| 247 |
if not values: return []
|
| 248 |
processed_data = []
|
| 249 |
for row in values: # Expected range like B6:P
|
| 250 |
if not row or not any(str(cell).strip() for cell in row if cell is not None): continue
|
|
|
|
|
|
|
|
|
|
| 251 |
|
| 252 |
# Indices based on B6:P (0-indexed from B)
|
| 253 |
-
# B=0, C=1, D=2, E=3, F=4, G=5, H=6, I=7, J=8, K=9, L=10, M=11, N=12, O=13, P=14
|
| 254 |
icon_formula = row[0] if len(row) > 0 else ''
|
| 255 |
name = row[2] if len(row) > 2 else 'N/A'
|
| 256 |
value_raw = row[4] if len(row) > 4 else 'N/A'
|
|
@@ -260,15 +274,13 @@ def process_sheet_data(values): # For Value Categories
|
|
| 260 |
notes = row[12] if len(row) > 12 else ''
|
| 261 |
drive_url = row[14] if len(row) > 14 else None # Column P
|
| 262 |
|
| 263 |
-
|
| 264 |
-
if
|
| 265 |
-
continue
|
| 266 |
-
if clean_string(name) == 'N/A':
|
| 267 |
continue
|
| 268 |
|
| 269 |
processed_item = {
|
| 270 |
'icon': extract_image_url(icon_formula, drive_url),
|
| 271 |
-
'name':
|
| 272 |
'value': format_currency(value_raw),
|
| 273 |
'dupedValue': format_currency(duped_value_raw),
|
| 274 |
'marketValue': format_currency(market_value_raw),
|
|
@@ -284,7 +296,6 @@ def process_user_scammer_data(values): # For User Scammer Sheet
|
|
| 284 |
for row in values: # Expected range like B6:G
|
| 285 |
if not row or len(row) < 2: continue
|
| 286 |
# Indices based on B6:G (0-indexed from B)
|
| 287 |
-
# B=0, C=1, D=2, E=3, F=4, G=5
|
| 288 |
discord_id = clean_string_optional(row[0]) if len(row) > 0 else None # Col B
|
| 289 |
roblox_username = clean_string_optional(row[1]) if len(row) > 1 else None # Col C
|
| 290 |
if not discord_id and not roblox_username: continue
|
|
@@ -295,7 +306,7 @@ def process_user_scammer_data(values): # For User Scammer Sheet
|
|
| 295 |
'explanation': clean_string(row[3]) if len(row) > 3 else 'N/A', # Col E
|
| 296 |
'evidence_link': clean_string_optional(row[4]) if len(row) > 4 else None, # Col F
|
| 297 |
'alt_accounts': parse_alt_accounts(row[5]) if len(row) > 5 else [], # Col G
|
| 298 |
-
'roblox_avatar_url': None
|
| 299 |
}
|
| 300 |
processed_data.append(processed_item)
|
| 301 |
return processed_data
|
|
@@ -306,7 +317,6 @@ def process_server_scammer_data(values): # For Server Scammer Sheet
|
|
| 306 |
for row in values: # Expected range like B6:F
|
| 307 |
if not row or len(row) < 2: continue
|
| 308 |
# Indices based on B6:F (0-indexed from B)
|
| 309 |
-
# B=0, C=1, D=2, E=3, F=4
|
| 310 |
server_id = clean_string_optional(row[0]) if len(row) > 0 else None # Col B
|
| 311 |
server_name = clean_string_optional(row[1]) if len(row) > 1 else None # Col C
|
| 312 |
if not server_id and not server_name: continue
|
|
@@ -326,7 +336,6 @@ def process_dwc_data(values): # For DWC Sheet
|
|
| 326 |
for row in values: # Expected range like B6:G
|
| 327 |
if not row or len(row) < 3: continue
|
| 328 |
# Indices based on B6:G (0-indexed from B)
|
| 329 |
-
# B=0, C=1, D=2, E=3, F=4, G=5
|
| 330 |
user_id = clean_string_optional(row[0]) if len(row) > 0 else None # Col B
|
| 331 |
server_id = clean_string_optional(row[1]) if len(row) > 1 else None # Col C
|
| 332 |
roblox_user = clean_string_optional(row[2]) if len(row) > 2 else None # Col D
|
|
@@ -339,7 +348,7 @@ def process_dwc_data(values): # For DWC Sheet
|
|
| 339 |
'explanation': clean_string(row[3]) if len(row) > 3 else 'N/A', # Col E
|
| 340 |
'evidence_link': clean_string_optional(row[4]) if len(row) > 4 else None, # Col F
|
| 341 |
'alt_accounts': parse_alt_accounts(row[5]) if len(row) > 5 else [], # Col G
|
| 342 |
-
'roblox_avatar_url': None
|
| 343 |
}
|
| 344 |
processed_data.append(processed_item)
|
| 345 |
return processed_data
|
|
@@ -383,30 +392,62 @@ async def fetch_batch_ranges_async(spreadsheet_id: str, ranges: List[str], value
|
|
| 383 |
status = error_details.get('status')
|
| 384 |
message = error_details.get('message')
|
| 385 |
logger.error(f"Google API HTTP Error during batch fetch for {spreadsheet_id}: Status={status}, Message={message}")
|
| 386 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 387 |
except Exception as e:
|
| 388 |
-
logger.error(f"Error during batch fetching from {spreadsheet_id} for ranges {ranges}: {e}")
|
| 389 |
raise e
|
| 390 |
|
| 391 |
-
# ---
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 392 |
|
| 393 |
async def update_cache_periodically():
|
| 394 |
-
"""Fetches data
|
| 395 |
global cache
|
| 396 |
-
async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=
|
| 397 |
while True:
|
| 398 |
if not cache["service_available"]:
|
| 399 |
logger.info("Attempting to re-initialize Google Sheets service...")
|
| 400 |
init_google_sheets()
|
| 401 |
if not cache["service_available"]:
|
| 402 |
logger.warning("Google Sheets service still unavailable, skipping cache update cycle.")
|
| 403 |
-
await asyncio.sleep(CACHE_UPDATE_INTERVAL_SECONDS * 2)
|
| 404 |
continue
|
| 405 |
else:
|
| 406 |
logger.info("Google Sheets service re-initialized. Proceeding with cache update.")
|
| 407 |
|
| 408 |
logger.info("Starting cache update cycle...")
|
| 409 |
start_time = datetime.now(timezone.utc)
|
|
|
|
| 410 |
|
| 411 |
# Prepare temporary storage for fetched data
|
| 412 |
fetched_values_categories = {} # { "CategoryName": [items...] }
|
|
@@ -420,7 +461,6 @@ async def update_cache_periodically():
|
|
| 420 |
|
| 421 |
try:
|
| 422 |
# --- Define Ranges and Processors ---
|
| 423 |
-
# Scammer/DWC Spreadsheet
|
| 424 |
scammer_dwc_ranges = [
|
| 425 |
f"{quote_sheet_name(USER_SCAMMER_SHEET)}!{USER_SCAMMER_RANGE}",
|
| 426 |
f"{quote_sheet_name(SERVER_SCAMMER_SHEET)}!{SERVER_SCAMMER_RANGE}",
|
|
@@ -431,13 +471,12 @@ async def update_cache_periodically():
|
|
| 431 |
SERVER_SCAMMER_SHEET: process_server_scammer_data,
|
| 432 |
DWC_SHEET: process_dwc_data,
|
| 433 |
}
|
| 434 |
-
scammer_dwc_target_key_map = {
|
| 435 |
USER_SCAMMER_SHEET: "user_scammers",
|
| 436 |
SERVER_SCAMMER_SHEET: "server_scammers",
|
| 437 |
DWC_SHEET: "dwc",
|
| 438 |
}
|
| 439 |
|
| 440 |
-
# Values/Dupes Spreadsheet
|
| 441 |
values_dupes_ranges = [f"{quote_sheet_name(DUPE_LIST_SHEET)}!{DUPE_LIST_RANGE}"]
|
| 442 |
values_dupes_ranges.extend([f"{quote_sheet_name(cat)}!{VALUES_RANGE}" for cat in CATEGORIES])
|
| 443 |
|
|
@@ -446,12 +485,12 @@ async def update_cache_periodically():
|
|
| 446 |
"scammer_dwc_batch": fetch_batch_ranges_async(
|
| 447 |
SCAMMER_DWC_SPREADSHEET_ID,
|
| 448 |
scammer_dwc_ranges,
|
| 449 |
-
value_render_option='FORMATTED_VALUE'
|
| 450 |
),
|
| 451 |
"values_dupes_batch": fetch_batch_ranges_async(
|
| 452 |
VALUES_DUPE_SPREADSHEET_ID,
|
| 453 |
values_dupes_ranges,
|
| 454 |
-
value_render_option='FORMATTED_VALUE'
|
| 455 |
)
|
| 456 |
}
|
| 457 |
|
|
@@ -469,7 +508,6 @@ async def update_cache_periodically():
|
|
| 469 |
logger.error(f"Failed to fetch batch data for {key}: {result}")
|
| 470 |
current_errors[key] = str(result)
|
| 471 |
else:
|
| 472 |
-
# Store the raw valueRanges list
|
| 473 |
if key == "scammer_dwc_batch":
|
| 474 |
raw_scammer_dwc_results = result
|
| 475 |
elif key == "values_dupes_batch":
|
|
@@ -480,12 +518,11 @@ async def update_cache_periodically():
|
|
| 480 |
logger.info(f"Processing {len(raw_scammer_dwc_results)} valueRanges from Scammer/DWC sheet...")
|
| 481 |
for vr in raw_scammer_dwc_results:
|
| 482 |
range_str = vr.get('range', '')
|
| 483 |
-
# Extract sheet name (handle quotes)
|
| 484 |
match = re.match(r"^'?([^'!]+)'?!", range_str)
|
| 485 |
if not match:
|
| 486 |
logger.warning(f"Could not extract sheet name from range '{range_str}' in Scammer/DWC response.")
|
| 487 |
continue
|
| 488 |
-
sheet_name = match.group(1).replace("''", "'")
|
| 489 |
|
| 490 |
if sheet_name in scammer_dwc_processor_map:
|
| 491 |
processor = scammer_dwc_processor_map[sheet_name]
|
|
@@ -493,7 +530,7 @@ async def update_cache_periodically():
|
|
| 493 |
values = vr.get('values', [])
|
| 494 |
try:
|
| 495 |
processed_data = processor(values)
|
| 496 |
-
new_cache_data[target_key] = processed_data
|
| 497 |
logger.info(f"Processed {len(processed_data)} items for {sheet_name} -> {target_key}")
|
| 498 |
except Exception as e:
|
| 499 |
logger.error(f"Error processing data for {sheet_name} using {processor.__name__}: {e}", exc_info=True)
|
|
@@ -516,11 +553,11 @@ async def update_cache_periodically():
|
|
| 516 |
try:
|
| 517 |
if sheet_name == DUPE_LIST_SHEET:
|
| 518 |
processed_data = process_dupe_list_data(values)
|
| 519 |
-
new_cache_data["dupes"] = processed_data
|
| 520 |
logger.info(f"Processed {len(processed_data)} items for {DUPE_LIST_SHEET} -> dupes")
|
| 521 |
elif sheet_name in CATEGORIES:
|
| 522 |
processed_data = process_sheet_data(values)
|
| 523 |
-
fetched_values_categories[sheet_name] = processed_data
|
| 524 |
logger.info(f"Processed {len(processed_data)} items for Category: {sheet_name}")
|
| 525 |
else:
|
| 526 |
logger.warning(f"Unrecognized sheet name '{sheet_name}' derived from range '{range_str}' in Values/Dupes sheet.")
|
|
@@ -529,45 +566,10 @@ async def update_cache_periodically():
|
|
| 529 |
logger.error(f"Error processing data for {sheet_name}: {e}", exc_info=True)
|
| 530 |
current_errors[f"process_{target_key}"] = str(e)
|
| 531 |
|
| 532 |
-
# ---
|
| 533 |
-
logger.info("
|
| 534 |
-
current_time = datetime.now(timezone.utc)
|
| 535 |
-
detected_value_changes = {}
|
| 536 |
-
fields_to_compare = ['value', 'dupedValue', 'marketValue']
|
| 537 |
-
|
| 538 |
-
if "values" not in cache: cache["values"] = {} # Ensure exists
|
| 539 |
-
|
| 540 |
-
for category, new_items in fetched_values_categories.items():
|
| 541 |
-
old_items_dict = {item['name']: item for item in cache["values"].get(category, [])}
|
| 542 |
-
category_changes = []
|
| 543 |
-
|
| 544 |
-
for new_item in new_items:
|
| 545 |
-
item_name = new_item.get('name')
|
| 546 |
-
if not item_name or item_name == 'N/A': continue
|
| 547 |
-
|
| 548 |
-
old_item = old_items_dict.get(item_name)
|
| 549 |
-
if old_item: # Check existing item for changes
|
| 550 |
-
for field in fields_to_compare:
|
| 551 |
-
old_val_str = old_item.get(field, 'N/A')
|
| 552 |
-
new_val_str = new_item.get(field, 'N/A')
|
| 553 |
-
old_norm = parse_cached_currency(old_val_str) if parse_cached_currency(old_val_str) is not None else old_val_str
|
| 554 |
-
new_norm = parse_cached_currency(new_val_str) if parse_cached_currency(new_val_str) is not None else new_val_str
|
| 555 |
-
|
| 556 |
-
if old_norm != new_norm:
|
| 557 |
-
logger.info(f"Change detected in {category}: {item_name} - {field}: '{old_val_str}' -> '{new_val_str}'")
|
| 558 |
-
category_changes.append({
|
| 559 |
-
"item_name": item_name, "field": field,
|
| 560 |
-
"old_value": old_val_str if old_val_str is not None else "N/A",
|
| 561 |
-
"new_value": new_val_str if new_val_str is not None else "N/A",
|
| 562 |
-
"timestamp": current_time.isoformat()
|
| 563 |
-
})
|
| 564 |
-
if category_changes:
|
| 565 |
-
detected_value_changes[category] = category_changes
|
| 566 |
-
|
| 567 |
-
# --- Fetch Roblox Avatars ---
|
| 568 |
-
logger.info("Fetching Roblox avatars...")
|
| 569 |
avatar_tasks = []
|
| 570 |
-
# Combine lists needing avatars
|
| 571 |
entries_needing_avatars = new_cache_data.get("user_scammers", []) + new_cache_data.get("dwc", [])
|
| 572 |
for entry in entries_needing_avatars:
|
| 573 |
if entry.get('roblox_username'):
|
|
@@ -575,7 +577,156 @@ async def update_cache_periodically():
|
|
| 575 |
avatar_tasks.append(fetch_avatar_for_entry_update(session, entry))
|
| 576 |
if avatar_tasks:
|
| 577 |
await asyncio.gather(*avatar_tasks) # Exceptions logged within helper
|
| 578 |
-
logger.info(f"Finished fetching avatars for {len(avatar_tasks)} potential entries.")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 579 |
|
| 580 |
|
| 581 |
# --- Final Cache Update ---
|
|
@@ -587,9 +738,10 @@ async def update_cache_periodically():
|
|
| 587 |
cache["server_scammers"] = new_cache_data["server_scammers"]
|
| 588 |
cache["dwc"] = new_cache_data["dwc"]
|
| 589 |
cache["dupes"] = new_cache_data["dupes"]
|
| 590 |
-
cache["value_changes"] =
|
| 591 |
cache["last_updated"] = current_time
|
| 592 |
cache["is_ready"] = True
|
|
|
|
| 593 |
update_occurred = True
|
| 594 |
logger.info(f"Cache update cycle completed successfully.")
|
| 595 |
else: # Errors occurred, attempt partial update
|
|
@@ -598,46 +750,60 @@ async def update_cache_periodically():
|
|
| 598 |
|
| 599 |
# Update values only if the values/dupes batch succeeded AND processing succeeded
|
| 600 |
if "values_dupes_batch" not in current_errors and not any(k.startswith("process_values_") for k in current_errors):
|
| 601 |
-
if cache
|
| 602 |
-
|
| 603 |
-
|
| 604 |
-
|
| 605 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 606 |
|
| 607 |
# Update dupes only if the values/dupes batch succeeded AND processing succeeded
|
| 608 |
if "values_dupes_batch" not in current_errors and "process_dupes" not in current_errors:
|
| 609 |
-
if cache
|
| 610 |
cache["dupes"] = new_cache_data["dupes"]
|
| 611 |
partial_update_details.append("dupes")
|
| 612 |
update_occurred = True
|
|
|
|
|
|
|
| 613 |
|
| 614 |
# Update scammer/DWC sections if their batch succeeded AND processing succeeded
|
| 615 |
if "scammer_dwc_batch" not in current_errors:
|
| 616 |
for key in ["user_scammers", "server_scammers", "dwc"]:
|
| 617 |
process_error_key = f"process_{key}"
|
| 618 |
if process_error_key not in current_errors:
|
| 619 |
-
if cache
|
| 620 |
cache[key] = new_cache_data[key]
|
| 621 |
partial_update_details.append(key)
|
| 622 |
update_occurred = True
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 623 |
|
| 624 |
if update_occurred:
|
| 625 |
cache["last_updated"] = current_time # Mark partial update time
|
| 626 |
cache["is_ready"] = True # Allow access even if partial
|
|
|
|
| 627 |
logger.info(f"Partially updated cache sections: {', '.join(partial_update_details)}")
|
| 628 |
else:
|
| 629 |
logger.error(f"Cache update cycle failed, and no parts could be updated based on errors. Errors: {current_errors}")
|
| 630 |
-
# Keep cache["is_ready"] as it was.
|
| 631 |
|
| 632 |
except Exception as e:
|
| 633 |
logger.exception(f"Critical error during cache update cycle: {e}")
|
|
|
|
| 634 |
if isinstance(e, (aiohttp.ClientError, HttpError, asyncio.TimeoutError)):
|
| 635 |
logger.warning("Communication error detected, will re-check service availability next cycle.")
|
|
|
|
| 636 |
|
| 637 |
# --- Wait for the next cycle ---
|
| 638 |
end_time = datetime.now(timezone.utc)
|
| 639 |
duration = (end_time - start_time).total_seconds()
|
| 640 |
-
wait_time = max(10, CACHE_UPDATE_INTERVAL_SECONDS - duration)
|
| 641 |
logger.info(f"Cache update cycle duration: {duration:.2f}s. Waiting {wait_time:.2f}s for next cycle.")
|
| 642 |
await asyncio.sleep(wait_time)
|
| 643 |
|
|
@@ -647,9 +813,7 @@ async def fetch_avatar_for_entry_update(session: aiohttp.ClientSession, entry: d
|
|
| 647 |
roblox_username = entry.get('roblox_username')
|
| 648 |
if not roblox_username: return
|
| 649 |
|
| 650 |
-
current_avatar = entry.get('roblox_avatar_url')
|
| 651 |
new_avatar = None # Default to None
|
| 652 |
-
|
| 653 |
try:
|
| 654 |
user_id = await get_roblox_user_id(session, roblox_username)
|
| 655 |
if user_id:
|
|
@@ -661,9 +825,8 @@ async def fetch_avatar_for_entry_update(session: aiohttp.ClientSession, entry: d
|
|
| 661 |
# Keep new_avatar as None on error
|
| 662 |
|
| 663 |
finally:
|
| 664 |
-
# Update the dict
|
| 665 |
-
|
| 666 |
-
entry['roblox_avatar_url'] = new_avatar
|
| 667 |
|
| 668 |
|
| 669 |
# --- FastAPI Startup Event ---
|
|
@@ -673,15 +836,27 @@ async def startup_event():
|
|
| 673 |
if not cache["service_available"]:
|
| 674 |
logger.warning("Google Sheets service not available at startup. Will attempt re-init in background task.")
|
| 675 |
logger.info("Starting background cache update task...")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 676 |
asyncio.create_task(update_cache_periodically())
|
| 677 |
|
| 678 |
|
| 679 |
-
# --- API Endpoints
|
| 680 |
|
| 681 |
def check_cache_readiness():
|
| 682 |
"""Reusable check for API endpoints - Checks cache readiness"""
|
| 683 |
if not cache["is_ready"]:
|
| 684 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 685 |
|
| 686 |
@app.get("/")
|
| 687 |
async def root():
|
|
@@ -690,47 +865,59 @@ async def root():
|
|
| 690 |
@app.get("/api/status")
|
| 691 |
async def get_status():
|
| 692 |
"""Returns the current status of the cache and service availability"""
|
|
|
|
| 693 |
return {
|
| 694 |
"cache_ready": cache["is_ready"],
|
| 695 |
"sheets_service_available": cache["service_available"],
|
| 696 |
-
"last_updated":
|
| 697 |
"cached_items": {
|
| 698 |
-
"value_categories": len(cache
|
| 699 |
-
"user_scammers": len(cache
|
| 700 |
-
"server_scammers": len(cache
|
| 701 |
-
"dwc_entries": len(cache
|
| 702 |
-
"duped_usernames": len(cache
|
| 703 |
},
|
| 704 |
-
"
|
| 705 |
}
|
| 706 |
|
| 707 |
@app.get("/api/values")
|
| 708 |
async def get_values():
|
| 709 |
"""Get all values data from cache"""
|
| 710 |
check_cache_readiness()
|
| 711 |
-
return cache
|
| 712 |
|
| 713 |
@app.get("/api/values/{category}")
|
| 714 |
async def get_category_values(category: str):
|
| 715 |
"""Get values data for a specific category from cache"""
|
| 716 |
check_cache_readiness()
|
| 717 |
-
|
|
|
|
| 718 |
if not matched_category:
|
| 719 |
-
|
| 720 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 721 |
|
| 722 |
@app.get("/api/value-changes/{category}")
|
| 723 |
async def get_category_value_changes(category: str):
|
| 724 |
-
"""Get detected value changes for a specific category."""
|
| 725 |
check_cache_readiness()
|
| 726 |
-
|
|
|
|
| 727 |
if not matched_category:
|
| 728 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 729 |
return {matched_category: cache.get("value_changes", {}).get(matched_category, [])}
|
| 730 |
|
| 731 |
@app.get("/api/value-changes")
|
| 732 |
async def get_all_value_changes():
|
| 733 |
-
"""Get all detected value changes from the last cycle."""
|
| 734 |
check_cache_readiness()
|
| 735 |
return cache.get("value_changes", {})
|
| 736 |
|
|
@@ -739,17 +926,16 @@ async def get_scammers():
|
|
| 739 |
"""Get all scammer and DWC data (users, servers, dwc) from cache"""
|
| 740 |
check_cache_readiness()
|
| 741 |
return {
|
| 742 |
-
"users": cache
|
| 743 |
-
"servers": cache
|
| 744 |
-
"dwc": cache
|
| 745 |
}
|
| 746 |
|
| 747 |
@app.get("/api/dupes")
|
| 748 |
async def get_dupes():
|
| 749 |
"""Get all duped usernames from cache"""
|
| 750 |
check_cache_readiness()
|
| 751 |
-
|
| 752 |
-
return {"usernames": cache.get("dupes") or []}
|
| 753 |
|
| 754 |
|
| 755 |
class UsernameCheck(BaseModel):
|
|
@@ -761,32 +947,33 @@ async def check_username(data: UsernameCheck):
|
|
| 761 |
check_cache_readiness() # Use the standard readiness check
|
| 762 |
|
| 763 |
username_to_check = data.username.strip().lower()
|
| 764 |
-
|
|
|
|
|
|
|
| 765 |
|
| 766 |
-
# Webhook notification (runs in background)
|
|
|
|
|
|
|
| 767 |
if not is_duped:
|
| 768 |
-
webhook_url = os.getenv("WEBHOOK_URL")
|
| 769 |
if webhook_url:
|
| 770 |
-
|
| 771 |
-
|
| 772 |
-
|
| 773 |
-
|
| 774 |
-
|
| 775 |
-
|
| 776 |
-
"
|
| 777 |
-
"description": f"Username `{data.username}` was checked but not found in the dupe database.",
|
| 778 |
"color": 16776960, # Yellow
|
| 779 |
"timestamp": datetime.now(timezone.utc).isoformat()
|
| 780 |
-
}]
|
| 781 |
}
|
| 782 |
-
|
| 783 |
-
|
| 784 |
-
|
| 785 |
-
|
| 786 |
-
|
| 787 |
-
asyncio.create_task(send_webhook_notification())
|
| 788 |
else:
|
| 789 |
-
logger.info("
|
| 790 |
|
| 791 |
return {"username": data.username, "is_duped": is_duped}
|
| 792 |
|
|
@@ -794,12 +981,34 @@ async def check_username(data: UsernameCheck):
|
|
| 794 |
@app.get("/health")
|
| 795 |
def health_check():
|
| 796 |
"""Provides a health status of the API and its cache."""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 797 |
if not cache["is_ready"]:
|
| 798 |
-
|
| 799 |
-
|
| 800 |
-
return
|
| 801 |
-
if cache["last_updated"] and (datetime.now(timezone.utc) - cache["last_updated"]).total_seconds() > CACHE_UPDATE_INTERVAL_SECONDS * 3:
|
| 802 |
-
return {"status": "degraded", "reason": "Cache potentially stale (last update > 3 intervals ago)"}
|
| 803 |
-
return {"status": "ok"}
|
| 804 |
|
| 805 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
from fastapi import FastAPI, HTTPException
|
| 2 |
from fastapi.middleware.cors import CORSMiddleware
|
| 3 |
from pydantic import BaseModel
|
|
|
|
| 34 |
|
| 35 |
# Google Sheets Config
|
| 36 |
# Spreadsheet containing Scammer and DWC info
|
| 37 |
+
SCAMMER_DWC_SPREADSHEET_ID = os.getenv('SCAMMER_DWC_SPREADSHEET_ID', '1sgkhBNGw_r6tBIxvdeXaI0bVmWBeACN4jiw_oDEeXLw')
|
| 38 |
# Spreadsheet containing Value lists and Dupe list
|
| 39 |
+
VALUES_DUPE_SPREADSHEET_ID = os.getenv('VALUES_DUPE_SPREADSHEET_ID', '1Toe07o3P517q8sm9Qb1e5xyFWCuwgskj71IKJwJNfNU')
|
| 40 |
|
| 41 |
SCOPES = ['https://www.googleapis.com/auth/spreadsheets.readonly']
|
| 42 |
|
|
|
|
| 59 |
VALUES_RANGE = 'B6:P' # Range within each category sheet
|
| 60 |
|
| 61 |
# Cache Update Interval
|
| 62 |
+
CACHE_UPDATE_INTERVAL_SECONDS = int(os.getenv('CACHE_UPDATE_INTERVAL_SECONDS', 60 * 5)) # 5 minutes default
|
| 63 |
+
|
| 64 |
+
# Webhook URLs
|
| 65 |
+
SCAMMER_WEBHOOK_URL = os.getenv("SCAMMER_WEBHOOK_URL")
|
| 66 |
+
VALUE_WEBHOOK_URL = os.getenv("VALUE_WEBHOOK_URL")
|
| 67 |
+
|
| 68 |
|
| 69 |
# --- Global Cache ---
|
| 70 |
cache = {
|
| 71 |
"values": {}, # Dict mapping category name to list of items
|
| 72 |
+
"value_changes": {}, # Dict mapping category name to list of recent changes (for API endpoint)
|
| 73 |
"user_scammers": [],
|
| 74 |
"server_scammers": [],
|
| 75 |
"dwc": [],
|
|
|
|
| 93 |
return name
|
| 94 |
|
| 95 |
def init_google_sheets(scopes=SCOPES):
|
| 96 |
+
"""Initialize Google Sheets credentials from environment variable or file"""
|
| 97 |
global sheets_service, cache
|
| 98 |
try:
|
| 99 |
creds_json_str = os.getenv('CREDENTIALS_JSON')
|
| 100 |
+
if creds_json_str:
|
| 101 |
+
logger.info("Attempting to load credentials from CREDENTIALS_JSON environment variable.")
|
| 102 |
+
creds_json = json.loads(creds_json_str)
|
| 103 |
+
creds = ServiceAccountCredentials.from_service_account_info(
|
| 104 |
+
creds_json,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 105 |
scopes=scopes
|
| 106 |
)
|
| 107 |
+
sheets_service = build('sheets', 'v4', credentials=creds, cache_discovery=False) # Disable discovery cache
|
| 108 |
+
logger.info("Google Sheets service initialized successfully from ENV VAR.")
|
| 109 |
cache["service_available"] = True
|
| 110 |
return sheets_service
|
| 111 |
+
else:
|
| 112 |
+
logger.info("CREDENTIALS_JSON environment variable not found. Falling back to file.")
|
| 113 |
+
raise ValueError("CREDENTIALS_JSON not set") # Trigger fallback explicitly
|
| 114 |
+
|
| 115 |
+
except Exception as e:
|
| 116 |
+
logger.warning(f"Error initializing Google Sheets from ENV VAR: {e}. Trying file...")
|
| 117 |
+
try:
|
| 118 |
+
# Fallback to loading credentials from file 'credentials.json'
|
| 119 |
+
creds_file = 'credentials.json'
|
| 120 |
+
if os.path.exists(creds_file):
|
| 121 |
+
logger.info(f"Attempting to load credentials from file '{creds_file}'")
|
| 122 |
+
creds = ServiceAccountCredentials.from_service_account_file(
|
| 123 |
+
creds_file,
|
| 124 |
+
scopes=scopes
|
| 125 |
+
)
|
| 126 |
+
sheets_service = build('sheets', 'v4', credentials=creds, cache_discovery=False)
|
| 127 |
+
logger.info("Google Sheets service initialized successfully from file.")
|
| 128 |
+
cache["service_available"] = True
|
| 129 |
+
return sheets_service
|
| 130 |
+
else:
|
| 131 |
+
logger.error(f"Credentials file '{creds_file}' not found.")
|
| 132 |
+
raise FileNotFoundError(f"'{creds_file}' not found")
|
| 133 |
+
|
| 134 |
except Exception as file_e:
|
| 135 |
logger.error(f"Error loading credentials from file: {file_e}")
|
| 136 |
logger.critical("Google Sheets service could not be initialized. API will be limited.")
|
|
|
|
| 165 |
if formula.startswith('=IMAGE('):
|
| 166 |
match = re.search(r'=IMAGE\("([^"]+)"', formula)
|
| 167 |
if match: return match.group(1)
|
|
|
|
|
|
|
| 168 |
return '' # Return empty string if no valid URL found
|
| 169 |
|
| 170 |
def format_currency(value: Any) -> Optional[str]:
|
|
|
|
| 216 |
data = await response.json()
|
| 217 |
if data and data.get("data") and len(data["data"]) > 0:
|
| 218 |
return data["data"][0].get("id")
|
| 219 |
+
else:
|
| 220 |
+
logger.warning(f"Roblox API returned status {response.status} for username '{username}'")
|
| 221 |
return None
|
| 222 |
except asyncio.TimeoutError:
|
| 223 |
logger.warning(f"Timeout fetching Roblox User ID for {username}")
|
|
|
|
| 226 |
logger.warning(f"Network error fetching Roblox User ID for {username}: {e}")
|
| 227 |
return None
|
| 228 |
except Exception as e:
|
| 229 |
+
logger.error(f"Unexpected exception fetching Roblox User ID for {username}: {e}", exc_info=True)
|
| 230 |
return None
|
| 231 |
|
| 232 |
async def get_roblox_avatar_url(session: aiohttp.ClientSession, user_id: int):
|
|
|
|
| 238 |
data = await response.json()
|
| 239 |
if data and data.get("data") and len(data["data"]) > 0:
|
| 240 |
return data["data"][0].get("imageUrl")
|
| 241 |
+
else:
|
| 242 |
+
logger.warning(f"Roblox Thumbnail API returned status {response.status} for user ID {user_id}")
|
| 243 |
return None
|
| 244 |
except asyncio.TimeoutError:
|
| 245 |
logger.warning(f"Timeout fetching Roblox avatar for User ID {user_id}")
|
|
|
|
| 248 |
logger.warning(f"Network error fetching Roblox avatar for User ID {user_id}: {e}")
|
| 249 |
return None
|
| 250 |
except Exception as e:
|
| 251 |
+
logger.error(f"Unexpected exception fetching Roblox avatar for User ID {user_id}: {e}", exc_info=True)
|
| 252 |
return None
|
| 253 |
|
| 254 |
|
| 255 |
# --- Data Processing Functions ---
|
| 256 |
# These functions take raw rows from the sheet and process them.
|
|
|
|
| 257 |
|
| 258 |
def process_sheet_data(values): # For Value Categories
|
| 259 |
if not values: return []
|
| 260 |
processed_data = []
|
| 261 |
for row in values: # Expected range like B6:P
|
| 262 |
if not row or not any(str(cell).strip() for cell in row if cell is not None): continue
|
| 263 |
+
# Skip header-like rows (e.g., "LEVEL 1 | HYPERCHROMES" in column F/index 4)
|
| 264 |
+
if len(row) > 4 and isinstance(row[4], str) and re.search(r'LEVEL \d+ \|', row[4]):
|
| 265 |
+
continue
|
| 266 |
|
| 267 |
# Indices based on B6:P (0-indexed from B)
|
|
|
|
| 268 |
icon_formula = row[0] if len(row) > 0 else ''
|
| 269 |
name = row[2] if len(row) > 2 else 'N/A'
|
| 270 |
value_raw = row[4] if len(row) > 4 else 'N/A'
|
|
|
|
| 274 |
notes = row[12] if len(row) > 12 else ''
|
| 275 |
drive_url = row[14] if len(row) > 14 else None # Column P
|
| 276 |
|
| 277 |
+
cleaned_name = clean_string(name)
|
| 278 |
+
if cleaned_name == 'N/A': # Skip rows without a valid name
|
|
|
|
|
|
|
| 279 |
continue
|
| 280 |
|
| 281 |
processed_item = {
|
| 282 |
'icon': extract_image_url(icon_formula, drive_url),
|
| 283 |
+
'name': cleaned_name,
|
| 284 |
'value': format_currency(value_raw),
|
| 285 |
'dupedValue': format_currency(duped_value_raw),
|
| 286 |
'marketValue': format_currency(market_value_raw),
|
|
|
|
| 296 |
for row in values: # Expected range like B6:G
|
| 297 |
if not row or len(row) < 2: continue
|
| 298 |
# Indices based on B6:G (0-indexed from B)
|
|
|
|
| 299 |
discord_id = clean_string_optional(row[0]) if len(row) > 0 else None # Col B
|
| 300 |
roblox_username = clean_string_optional(row[1]) if len(row) > 1 else None # Col C
|
| 301 |
if not discord_id and not roblox_username: continue
|
|
|
|
| 306 |
'explanation': clean_string(row[3]) if len(row) > 3 else 'N/A', # Col E
|
| 307 |
'evidence_link': clean_string_optional(row[4]) if len(row) > 4 else None, # Col F
|
| 308 |
'alt_accounts': parse_alt_accounts(row[5]) if len(row) > 5 else [], # Col G
|
| 309 |
+
'roblox_avatar_url': None # Will be filled later
|
| 310 |
}
|
| 311 |
processed_data.append(processed_item)
|
| 312 |
return processed_data
|
|
|
|
| 317 |
for row in values: # Expected range like B6:F
|
| 318 |
if not row or len(row) < 2: continue
|
| 319 |
# Indices based on B6:F (0-indexed from B)
|
|
|
|
| 320 |
server_id = clean_string_optional(row[0]) if len(row) > 0 else None # Col B
|
| 321 |
server_name = clean_string_optional(row[1]) if len(row) > 1 else None # Col C
|
| 322 |
if not server_id and not server_name: continue
|
|
|
|
| 336 |
for row in values: # Expected range like B6:G
|
| 337 |
if not row or len(row) < 3: continue
|
| 338 |
# Indices based on B6:G (0-indexed from B)
|
|
|
|
| 339 |
user_id = clean_string_optional(row[0]) if len(row) > 0 else None # Col B
|
| 340 |
server_id = clean_string_optional(row[1]) if len(row) > 1 else None # Col C
|
| 341 |
roblox_user = clean_string_optional(row[2]) if len(row) > 2 else None # Col D
|
|
|
|
| 348 |
'explanation': clean_string(row[3]) if len(row) > 3 else 'N/A', # Col E
|
| 349 |
'evidence_link': clean_string_optional(row[4]) if len(row) > 4 else None, # Col F
|
| 350 |
'alt_accounts': parse_alt_accounts(row[5]) if len(row) > 5 else [], # Col G
|
| 351 |
+
'roblox_avatar_url': None # Will be filled later
|
| 352 |
}
|
| 353 |
processed_data.append(processed_item)
|
| 354 |
return processed_data
|
|
|
|
| 392 |
status = error_details.get('status')
|
| 393 |
message = error_details.get('message')
|
| 394 |
logger.error(f"Google API HTTP Error during batch fetch for {spreadsheet_id}: Status={status}, Message={message}")
|
| 395 |
+
# Handle potential API key/permission issues explicitly
|
| 396 |
+
if status == 'PERMISSION_DENIED' or status == 'UNAUTHENTICATED':
|
| 397 |
+
logger.critical(f"Authentication/Permission Error accessing {spreadsheet_id}. Please check credentials/API access.")
|
| 398 |
+
cache["service_available"] = False # Mark service as down
|
| 399 |
+
sheets_service = None # Reset service
|
| 400 |
+
elif status == 'NOT_FOUND':
|
| 401 |
+
logger.error(f"Spreadsheet or Range not found error for {spreadsheet_id}. Ranges: {ranges}. Check IDs and Sheet Names.")
|
| 402 |
+
raise e # Re-raise after logging
|
| 403 |
except Exception as e:
|
| 404 |
+
logger.error(f"Error during batch fetching from {spreadsheet_id} for ranges {ranges}: {e}", exc_info=True)
|
| 405 |
raise e
|
| 406 |
|
| 407 |
+
# --- Webhook Sending ---
|
| 408 |
+
async def send_webhook_notification(session: aiohttp.ClientSession, webhook_url: str, embed: Dict):
|
| 409 |
+
"""Sends a Discord webhook notification with the provided embed."""
|
| 410 |
+
if not webhook_url:
|
| 411 |
+
# logger.debug("Webhook URL not configured. Skipping notification.") # Optional: Log less verbosely
|
| 412 |
+
return
|
| 413 |
+
if not embed:
|
| 414 |
+
logger.warning("Attempted to send webhook with empty embed.")
|
| 415 |
+
return
|
| 416 |
+
|
| 417 |
+
webhook_data = {"embeds": [embed]}
|
| 418 |
+
try:
|
| 419 |
+
async with session.post(webhook_url, json=webhook_data, timeout=aiohttp.ClientTimeout(total=10)) as response:
|
| 420 |
+
if response.status not in [200, 204]:
|
| 421 |
+
logger.warning(f"Failed to send webhook (Status: {response.status}): {await response.text()}")
|
| 422 |
+
# else: # Optional: Log success, can be verbose
|
| 423 |
+
# logger.info(f"Webhook notification sent successfully to {webhook_url[:30]}...")
|
| 424 |
+
except asyncio.TimeoutError:
|
| 425 |
+
logger.warning(f"Timeout sending webhook to {webhook_url[:30]}...")
|
| 426 |
+
except aiohttp.ClientError as e:
|
| 427 |
+
logger.error(f"Network error sending webhook to {webhook_url[:30]}...: {e}")
|
| 428 |
+
except Exception as e:
|
| 429 |
+
logger.error(f"Unexpected error sending webhook: {e}", exc_info=True)
|
| 430 |
+
|
| 431 |
+
# --- Background Cache Update Task (Refactored for Batching & Webhooks) ---
|
| 432 |
|
| 433 |
async def update_cache_periodically():
|
| 434 |
+
"""Fetches data, processes, detects changes/new entries, sends webhooks, and updates cache."""
|
| 435 |
global cache
|
| 436 |
+
async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=30)) as session: # Overall session timeout
|
| 437 |
while True:
|
| 438 |
if not cache["service_available"]:
|
| 439 |
logger.info("Attempting to re-initialize Google Sheets service...")
|
| 440 |
init_google_sheets()
|
| 441 |
if not cache["service_available"]:
|
| 442 |
logger.warning("Google Sheets service still unavailable, skipping cache update cycle.")
|
| 443 |
+
await asyncio.sleep(CACHE_UPDATE_INTERVAL_SECONDS * 2) # Wait longer if service is down
|
| 444 |
continue
|
| 445 |
else:
|
| 446 |
logger.info("Google Sheets service re-initialized. Proceeding with cache update.")
|
| 447 |
|
| 448 |
logger.info("Starting cache update cycle...")
|
| 449 |
start_time = datetime.now(timezone.utc)
|
| 450 |
+
webhook_tasks = [] # Store webhook sending tasks
|
| 451 |
|
| 452 |
# Prepare temporary storage for fetched data
|
| 453 |
fetched_values_categories = {} # { "CategoryName": [items...] }
|
|
|
|
| 461 |
|
| 462 |
try:
|
| 463 |
# --- Define Ranges and Processors ---
|
|
|
|
| 464 |
scammer_dwc_ranges = [
|
| 465 |
f"{quote_sheet_name(USER_SCAMMER_SHEET)}!{USER_SCAMMER_RANGE}",
|
| 466 |
f"{quote_sheet_name(SERVER_SCAMMER_SHEET)}!{SERVER_SCAMMER_RANGE}",
|
|
|
|
| 471 |
SERVER_SCAMMER_SHEET: process_server_scammer_data,
|
| 472 |
DWC_SHEET: process_dwc_data,
|
| 473 |
}
|
| 474 |
+
scammer_dwc_target_key_map = {
|
| 475 |
USER_SCAMMER_SHEET: "user_scammers",
|
| 476 |
SERVER_SCAMMER_SHEET: "server_scammers",
|
| 477 |
DWC_SHEET: "dwc",
|
| 478 |
}
|
| 479 |
|
|
|
|
| 480 |
values_dupes_ranges = [f"{quote_sheet_name(DUPE_LIST_SHEET)}!{DUPE_LIST_RANGE}"]
|
| 481 |
values_dupes_ranges.extend([f"{quote_sheet_name(cat)}!{VALUES_RANGE}" for cat in CATEGORIES])
|
| 482 |
|
|
|
|
| 485 |
"scammer_dwc_batch": fetch_batch_ranges_async(
|
| 486 |
SCAMMER_DWC_SPREADSHEET_ID,
|
| 487 |
scammer_dwc_ranges,
|
| 488 |
+
value_render_option='FORMATTED_VALUE'
|
| 489 |
),
|
| 490 |
"values_dupes_batch": fetch_batch_ranges_async(
|
| 491 |
VALUES_DUPE_SPREADSHEET_ID,
|
| 492 |
values_dupes_ranges,
|
| 493 |
+
value_render_option='FORMATTED_VALUE'
|
| 494 |
)
|
| 495 |
}
|
| 496 |
|
|
|
|
| 508 |
logger.error(f"Failed to fetch batch data for {key}: {result}")
|
| 509 |
current_errors[key] = str(result)
|
| 510 |
else:
|
|
|
|
| 511 |
if key == "scammer_dwc_batch":
|
| 512 |
raw_scammer_dwc_results = result
|
| 513 |
elif key == "values_dupes_batch":
|
|
|
|
| 518 |
logger.info(f"Processing {len(raw_scammer_dwc_results)} valueRanges from Scammer/DWC sheet...")
|
| 519 |
for vr in raw_scammer_dwc_results:
|
| 520 |
range_str = vr.get('range', '')
|
|
|
|
| 521 |
match = re.match(r"^'?([^'!]+)'?!", range_str)
|
| 522 |
if not match:
|
| 523 |
logger.warning(f"Could not extract sheet name from range '{range_str}' in Scammer/DWC response.")
|
| 524 |
continue
|
| 525 |
+
sheet_name = match.group(1).replace("''", "'")
|
| 526 |
|
| 527 |
if sheet_name in scammer_dwc_processor_map:
|
| 528 |
processor = scammer_dwc_processor_map[sheet_name]
|
|
|
|
| 530 |
values = vr.get('values', [])
|
| 531 |
try:
|
| 532 |
processed_data = processor(values)
|
| 533 |
+
new_cache_data[target_key] = processed_data # Store fetched data temporarily
|
| 534 |
logger.info(f"Processed {len(processed_data)} items for {sheet_name} -> {target_key}")
|
| 535 |
except Exception as e:
|
| 536 |
logger.error(f"Error processing data for {sheet_name} using {processor.__name__}: {e}", exc_info=True)
|
|
|
|
| 553 |
try:
|
| 554 |
if sheet_name == DUPE_LIST_SHEET:
|
| 555 |
processed_data = process_dupe_list_data(values)
|
| 556 |
+
new_cache_data["dupes"] = processed_data # Store fetched data temporarily
|
| 557 |
logger.info(f"Processed {len(processed_data)} items for {DUPE_LIST_SHEET} -> dupes")
|
| 558 |
elif sheet_name in CATEGORIES:
|
| 559 |
processed_data = process_sheet_data(values)
|
| 560 |
+
fetched_values_categories[sheet_name] = processed_data # Store fetched data temporarily
|
| 561 |
logger.info(f"Processed {len(processed_data)} items for Category: {sheet_name}")
|
| 562 |
else:
|
| 563 |
logger.warning(f"Unrecognized sheet name '{sheet_name}' derived from range '{range_str}' in Values/Dupes sheet.")
|
|
|
|
| 566 |
logger.error(f"Error processing data for {sheet_name}: {e}", exc_info=True)
|
| 567 |
current_errors[f"process_{target_key}"] = str(e)
|
| 568 |
|
| 569 |
+
# --- Fetch Roblox Avatars (for new data before comparison/webhook) ---
|
| 570 |
+
logger.info("Fetching Roblox avatars for newly processed data...")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 571 |
avatar_tasks = []
|
| 572 |
+
# Combine lists needing avatars from the *newly fetched* data
|
| 573 |
entries_needing_avatars = new_cache_data.get("user_scammers", []) + new_cache_data.get("dwc", [])
|
| 574 |
for entry in entries_needing_avatars:
|
| 575 |
if entry.get('roblox_username'):
|
|
|
|
| 577 |
avatar_tasks.append(fetch_avatar_for_entry_update(session, entry))
|
| 578 |
if avatar_tasks:
|
| 579 |
await asyncio.gather(*avatar_tasks) # Exceptions logged within helper
|
| 580 |
+
logger.info(f"Finished fetching avatars for {len(avatar_tasks)} potential new entries.")
|
| 581 |
+
|
| 582 |
+
# --- Change Detection & Webhook Preparation (BEFORE Cache Update) ---
|
| 583 |
+
current_time = datetime.now(timezone.utc)
|
| 584 |
+
timestamp_iso = current_time.isoformat()
|
| 585 |
+
|
| 586 |
+
# 1. Value Changes (Existing Logic + Webhook Prep)
|
| 587 |
+
detected_value_changes_for_api = {} # For the /api/value-changes endpoint
|
| 588 |
+
if "values" not in cache: cache["values"] = {} # Ensure exists for comparison
|
| 589 |
+
if VALUE_WEBHOOK_URL and not any(k.startswith("process_values_") for k in current_errors) and "values_dupes_batch" not in current_errors:
|
| 590 |
+
logger.info("Detecting value changes for webhooks...")
|
| 591 |
+
fields_to_compare = ['value', 'dupedValue', 'marketValue']
|
| 592 |
+
for category, new_items in fetched_values_categories.items():
|
| 593 |
+
old_items_dict = {item['name']: item for item in cache["values"].get(category, [])}
|
| 594 |
+
category_changes_for_api = []
|
| 595 |
+
|
| 596 |
+
for new_item in new_items:
|
| 597 |
+
item_name = new_item.get('name')
|
| 598 |
+
if not item_name or item_name == 'N/A': continue
|
| 599 |
+
|
| 600 |
+
old_item = old_items_dict.get(item_name)
|
| 601 |
+
if old_item: # Check existing item for changes
|
| 602 |
+
for field in fields_to_compare:
|
| 603 |
+
old_val_str = old_item.get(field, 'N/A')
|
| 604 |
+
new_val_str = new_item.get(field, 'N/A')
|
| 605 |
+
# Simple string comparison is sufficient here as they are formatted consistently
|
| 606 |
+
if old_val_str != new_val_str:
|
| 607 |
+
logger.info(f"Change detected in {category}: {item_name} - {field}: '{old_val_str}' -> '{new_val_str}'")
|
| 608 |
+
change_info = {
|
| 609 |
+
"item_name": item_name, "field": field,
|
| 610 |
+
"old_value": old_val_str if old_val_str is not None else "N/A",
|
| 611 |
+
"new_value": new_val_str if new_val_str is not None else "N/A",
|
| 612 |
+
"timestamp": timestamp_iso
|
| 613 |
+
}
|
| 614 |
+
category_changes_for_api.append(change_info)
|
| 615 |
+
|
| 616 |
+
# Prepare webhook embed
|
| 617 |
+
embed = {
|
| 618 |
+
"title": f"Value Update: {item_name} ({category})",
|
| 619 |
+
"color": 3447003, # Blue
|
| 620 |
+
"fields": [
|
| 621 |
+
{"name": "Field Changed", "value": field, "inline": True},
|
| 622 |
+
{"name": "Old Value", "value": f"`{change_info['old_value']}`", "inline": True},
|
| 623 |
+
{"name": "New Value", "value": f"`{change_info['new_value']}`", "inline": True},
|
| 624 |
+
{"name": "Item Notes", "value": new_item.get('notes', 'N/A')[:1020] or 'N/A', "inline": False}, # Limit notes length
|
| 625 |
+
],
|
| 626 |
+
"timestamp": timestamp_iso
|
| 627 |
+
}
|
| 628 |
+
if new_item.get('icon'):
|
| 629 |
+
embed["thumbnail"] = {"url": new_item['icon']}
|
| 630 |
+
|
| 631 |
+
webhook_tasks.append(send_webhook_notification(session, VALUE_WEBHOOK_URL, embed))
|
| 632 |
+
if category_changes_for_api:
|
| 633 |
+
detected_value_changes_for_api[category] = category_changes_for_api
|
| 634 |
+
logger.info(f"Prepared {len(webhook_tasks)} value change webhooks.")
|
| 635 |
+
elif not VALUE_WEBHOOK_URL:
|
| 636 |
+
logger.info("VALUE_WEBHOOK_URL not set, skipping value change webhook detection.")
|
| 637 |
+
else:
|
| 638 |
+
logger.warning("Skipping value change webhook detection due to fetch/processing errors.")
|
| 639 |
+
|
| 640 |
+
|
| 641 |
+
# 2. New Scammers / DWC (New Logic + Webhook Prep)
|
| 642 |
+
if SCAMMER_WEBHOOK_URL and "scammer_dwc_batch" not in current_errors and not any(k.startswith("process_") and k in ["process_user_scammers", "process_server_scammers", "process_dwc"] for k in current_errors):
|
| 643 |
+
logger.info("Detecting new scammer/DWC entries for webhooks...")
|
| 644 |
+
initial_webhook_task_count = len(webhook_tasks)
|
| 645 |
+
|
| 646 |
+
# User Scammers
|
| 647 |
+
old_user_keys = set((item.get('discord_id'), item.get('roblox_username')) for item in cache.get("user_scammers", []))
|
| 648 |
+
for item in new_cache_data.get("user_scammers", []):
|
| 649 |
+
key = (item.get('discord_id'), item.get('roblox_username'))
|
| 650 |
+
if key not in old_user_keys:
|
| 651 |
+
logger.info(f"New User Scammer detected: Discord={item.get('discord_id')}, Roblox={item.get('roblox_username')}")
|
| 652 |
+
embed = {
|
| 653 |
+
"title": "🚨 New User Scammer Added",
|
| 654 |
+
"color": 15158332, # Red
|
| 655 |
+
"fields": [
|
| 656 |
+
{"name": "Discord ID", "value": f"`{item.get('discord_id', 'N/A')}`", "inline": True},
|
| 657 |
+
{"name": "Roblox User", "value": f"`{item.get('roblox_username', 'N/A')}`", "inline": True},
|
| 658 |
+
{"name": "Scam Type", "value": item.get('scam_type', 'N/A'), "inline": False},
|
| 659 |
+
{"name": "Explanation", "value": item.get('explanation', 'N/A')[:1020] or 'N/A', "inline": False},
|
| 660 |
+
],
|
| 661 |
+
"timestamp": timestamp_iso
|
| 662 |
+
}
|
| 663 |
+
if item.get('evidence_link'):
|
| 664 |
+
embed["fields"].append({"name": "Evidence", "value": item['evidence_link'], "inline": False})
|
| 665 |
+
if item.get('alt_accounts'):
|
| 666 |
+
embed["fields"].append({"name": "Alt Accounts", "value": ", ".join([f"`{a}`" for a in item['alt_accounts']]), "inline": False})
|
| 667 |
+
if item.get('roblox_avatar_url'):
|
| 668 |
+
embed["thumbnail"] = {"url": item['roblox_avatar_url']}
|
| 669 |
+
webhook_tasks.append(send_webhook_notification(session, SCAMMER_WEBHOOK_URL, embed))
|
| 670 |
+
|
| 671 |
+
# Server Scammers
|
| 672 |
+
old_server_keys = set((item.get('server_id'), item.get('server_name')) for item in cache.get("server_scammers", []))
|
| 673 |
+
for item in new_cache_data.get("server_scammers", []):
|
| 674 |
+
key = (item.get('server_id'), item.get('server_name'))
|
| 675 |
+
if key not in old_server_keys:
|
| 676 |
+
logger.info(f"New Server Scammer detected: ID={item.get('server_id')}, Name={item.get('server_name')}")
|
| 677 |
+
embed = {
|
| 678 |
+
"title": "🚨 New Server Scammer Added",
|
| 679 |
+
"color": 15158332, # Red
|
| 680 |
+
"fields": [
|
| 681 |
+
{"name": "Server ID", "value": f"`{item.get('server_id', 'N/A')}`", "inline": True},
|
| 682 |
+
{"name": "Server Name", "value": f"`{item.get('server_name', 'N/A')}`", "inline": True},
|
| 683 |
+
{"name": "Scam Type", "value": item.get('scam_type', 'N/A'), "inline": False},
|
| 684 |
+
{"name": "Explanation", "value": item.get('explanation', 'N/A')[:1020] or 'N/A', "inline": False},
|
| 685 |
+
],
|
| 686 |
+
"timestamp": timestamp_iso
|
| 687 |
+
}
|
| 688 |
+
if item.get('evidence_link'):
|
| 689 |
+
embed["fields"].append({"name": "Evidence", "value": item['evidence_link'], "inline": False})
|
| 690 |
+
webhook_tasks.append(send_webhook_notification(session, SCAMMER_WEBHOOK_URL, embed))
|
| 691 |
+
|
| 692 |
+
# DWC Entries
|
| 693 |
+
old_dwc_keys = set((item.get('discord_user_id'), item.get('discord_server_id'), item.get('roblox_username')) for item in cache.get("dwc", []))
|
| 694 |
+
for item in new_cache_data.get("dwc", []):
|
| 695 |
+
key = (item.get('discord_user_id'), item.get('discord_server_id'), item.get('roblox_username'))
|
| 696 |
+
if key not in old_dwc_keys:
|
| 697 |
+
logger.info(f"New DWC Entry detected: User={item.get('discord_user_id')}, Server={item.get('discord_server_id')}, Roblox={item.get('roblox_username')}")
|
| 698 |
+
embed = {
|
| 699 |
+
"title": "⚠️ New DWC Entry Added",
|
| 700 |
+
"color": 15105570, # Orange/Dark Yellow
|
| 701 |
+
"fields": [
|
| 702 |
+
{"name": "Discord User ID", "value": f"`{item.get('discord_user_id', 'N/A')}`", "inline": True},
|
| 703 |
+
{"name": "Discord Server ID", "value": f"`{item.get('discord_server_id', 'N/A')}`", "inline": True},
|
| 704 |
+
{"name": "Roblox User", "value": f"`{item.get('roblox_username', 'N/A')}`", "inline": True},
|
| 705 |
+
{"name": "Explanation", "value": item.get('explanation', 'N/A')[:1020] or 'N/A', "inline": False},
|
| 706 |
+
],
|
| 707 |
+
"timestamp": timestamp_iso
|
| 708 |
+
}
|
| 709 |
+
if item.get('evidence_link'):
|
| 710 |
+
embed["fields"].append({"name": "Evidence", "value": item['evidence_link'], "inline": False})
|
| 711 |
+
if item.get('alt_accounts'):
|
| 712 |
+
embed["fields"].append({"name": "Alt Accounts", "value": ", ".join([f"`{a}`" for a in item['alt_accounts']]), "inline": False})
|
| 713 |
+
if item.get('roblox_avatar_url'):
|
| 714 |
+
embed["thumbnail"] = {"url": item['roblox_avatar_url']}
|
| 715 |
+
webhook_tasks.append(send_webhook_notification(session, SCAMMER_WEBHOOK_URL, embed))
|
| 716 |
+
|
| 717 |
+
logger.info(f"Prepared {len(webhook_tasks) - initial_webhook_task_count} new scammer/DWC webhooks.")
|
| 718 |
+
elif not SCAMMER_WEBHOOK_URL:
|
| 719 |
+
logger.info("SCAMMER_WEBHOOK_URL not set, skipping new scammer webhook detection.")
|
| 720 |
+
else:
|
| 721 |
+
logger.warning("Skipping new scammer webhook detection due to fetch/processing errors.")
|
| 722 |
+
|
| 723 |
+
# --- Send Webhooks Concurrently ---
|
| 724 |
+
if webhook_tasks:
|
| 725 |
+
logger.info(f"Sending {len(webhook_tasks)} webhook notifications...")
|
| 726 |
+
await asyncio.gather(*webhook_tasks)
|
| 727 |
+
logger.info("Finished sending webhook notifications.")
|
| 728 |
+
else:
|
| 729 |
+
logger.info("No webhooks to send for this cycle.")
|
| 730 |
|
| 731 |
|
| 732 |
# --- Final Cache Update ---
|
|
|
|
| 738 |
cache["server_scammers"] = new_cache_data["server_scammers"]
|
| 739 |
cache["dwc"] = new_cache_data["dwc"]
|
| 740 |
cache["dupes"] = new_cache_data["dupes"]
|
| 741 |
+
cache["value_changes"] = detected_value_changes_for_api # Store the detected changes
|
| 742 |
cache["last_updated"] = current_time
|
| 743 |
cache["is_ready"] = True
|
| 744 |
+
cache["service_available"] = True # Mark as available on success
|
| 745 |
update_occurred = True
|
| 746 |
logger.info(f"Cache update cycle completed successfully.")
|
| 747 |
else: # Errors occurred, attempt partial update
|
|
|
|
| 750 |
|
| 751 |
# Update values only if the values/dupes batch succeeded AND processing succeeded
|
| 752 |
if "values_dupes_batch" not in current_errors and not any(k.startswith("process_values_") for k in current_errors):
|
| 753 |
+
# Check if fetched data is different from cache before updating
|
| 754 |
+
if cache.get("values") != fetched_values_categories:
|
| 755 |
+
cache["values"] = fetched_values_categories
|
| 756 |
+
cache["value_changes"] = detected_value_changes_for_api # Update changes along with values
|
| 757 |
+
partial_update_details.append("values")
|
| 758 |
+
update_occurred = True
|
| 759 |
+
else:
|
| 760 |
+
logger.warning("Skipping update for 'values' due to errors.")
|
| 761 |
+
|
| 762 |
|
| 763 |
# Update dupes only if the values/dupes batch succeeded AND processing succeeded
|
| 764 |
if "values_dupes_batch" not in current_errors and "process_dupes" not in current_errors:
|
| 765 |
+
if cache.get("dupes") != new_cache_data["dupes"]:
|
| 766 |
cache["dupes"] = new_cache_data["dupes"]
|
| 767 |
partial_update_details.append("dupes")
|
| 768 |
update_occurred = True
|
| 769 |
+
else:
|
| 770 |
+
logger.warning("Skipping update for 'dupes' due to errors.")
|
| 771 |
|
| 772 |
# Update scammer/DWC sections if their batch succeeded AND processing succeeded
|
| 773 |
if "scammer_dwc_batch" not in current_errors:
|
| 774 |
for key in ["user_scammers", "server_scammers", "dwc"]:
|
| 775 |
process_error_key = f"process_{key}"
|
| 776 |
if process_error_key not in current_errors:
|
| 777 |
+
if cache.get(key) != new_cache_data[key]:
|
| 778 |
cache[key] = new_cache_data[key]
|
| 779 |
partial_update_details.append(key)
|
| 780 |
update_occurred = True
|
| 781 |
+
else:
|
| 782 |
+
logger.warning(f"Skipping update for '{key}' due to processing error.")
|
| 783 |
+
else:
|
| 784 |
+
logger.warning("Skipping update for 'user_scammers', 'server_scammers', 'dwc' due to batch fetch error.")
|
| 785 |
+
|
| 786 |
|
| 787 |
if update_occurred:
|
| 788 |
cache["last_updated"] = current_time # Mark partial update time
|
| 789 |
cache["is_ready"] = True # Allow access even if partial
|
| 790 |
+
# Keep service_available as potentially false if there were fetch errors
|
| 791 |
logger.info(f"Partially updated cache sections: {', '.join(partial_update_details)}")
|
| 792 |
else:
|
| 793 |
logger.error(f"Cache update cycle failed, and no parts could be updated based on errors. Errors: {current_errors}")
|
| 794 |
+
# Keep cache["is_ready"] as it was. Don't update timestamp.
|
| 795 |
|
| 796 |
except Exception as e:
|
| 797 |
logger.exception(f"Critical error during cache update cycle: {e}")
|
| 798 |
+
# If a critical error happens (e.g., network error during fetch), mark service potentially unavailable
|
| 799 |
if isinstance(e, (aiohttp.ClientError, HttpError, asyncio.TimeoutError)):
|
| 800 |
logger.warning("Communication error detected, will re-check service availability next cycle.")
|
| 801 |
+
# service_available might have already been set to False by fetch_batch_ranges_async
|
| 802 |
|
| 803 |
# --- Wait for the next cycle ---
|
| 804 |
end_time = datetime.now(timezone.utc)
|
| 805 |
duration = (end_time - start_time).total_seconds()
|
| 806 |
+
wait_time = max(10, CACHE_UPDATE_INTERVAL_SECONDS - duration) # Ensure at least 10s wait
|
| 807 |
logger.info(f"Cache update cycle duration: {duration:.2f}s. Waiting {wait_time:.2f}s for next cycle.")
|
| 808 |
await asyncio.sleep(wait_time)
|
| 809 |
|
|
|
|
| 813 |
roblox_username = entry.get('roblox_username')
|
| 814 |
if not roblox_username: return
|
| 815 |
|
|
|
|
| 816 |
new_avatar = None # Default to None
|
|
|
|
| 817 |
try:
|
| 818 |
user_id = await get_roblox_user_id(session, roblox_username)
|
| 819 |
if user_id:
|
|
|
|
| 825 |
# Keep new_avatar as None on error
|
| 826 |
|
| 827 |
finally:
|
| 828 |
+
# Update the entry dict directly (no need to check if changed, just set it)
|
| 829 |
+
entry['roblox_avatar_url'] = new_avatar
|
|
|
|
| 830 |
|
| 831 |
|
| 832 |
# --- FastAPI Startup Event ---
|
|
|
|
| 836 |
if not cache["service_available"]:
|
| 837 |
logger.warning("Google Sheets service not available at startup. Will attempt re-init in background task.")
|
| 838 |
logger.info("Starting background cache update task...")
|
| 839 |
+
# Check for webhook URLs at startup
|
| 840 |
+
if not SCAMMER_WEBHOOK_URL:
|
| 841 |
+
logger.warning("SCAMMER_WEBHOOK_URL environment variable not set. New scammer notifications disabled.")
|
| 842 |
+
if not VALUE_WEBHOOK_URL:
|
| 843 |
+
logger.warning("VALUE_WEBHOOK_URL environment variable not set. Value change notifications disabled.")
|
| 844 |
asyncio.create_task(update_cache_periodically())
|
| 845 |
|
| 846 |
|
| 847 |
+
# --- API Endpoints ---
|
| 848 |
|
| 849 |
def check_cache_readiness():
|
| 850 |
"""Reusable check for API endpoints - Checks cache readiness"""
|
| 851 |
if not cache["is_ready"]:
|
| 852 |
+
# Be more specific if service is known to be down
|
| 853 |
+
if not cache["service_available"]:
|
| 854 |
+
raise HTTPException(status_code=503, detail="Service temporarily unavailable due to backend connection issues. Please try again later.")
|
| 855 |
+
else:
|
| 856 |
+
raise HTTPException(status_code=503, detail="Cache is initializing or data is currently unavailable. Please try again shortly.")
|
| 857 |
+
# Optional: Add check for staleness?
|
| 858 |
+
# if cache["last_updated"] and (datetime.now(timezone.utc) - cache["last_updated"]).total_seconds() > CACHE_UPDATE_INTERVAL_SECONDS * 3:
|
| 859 |
+
# raise HTTPException(status_code=503, detail="Data may be stale. Update in progress or backend issue.")
|
| 860 |
|
| 861 |
@app.get("/")
|
| 862 |
async def root():
|
|
|
|
| 865 |
@app.get("/api/status")
|
| 866 |
async def get_status():
|
| 867 |
"""Returns the current status of the cache and service availability"""
|
| 868 |
+
last_updated_iso = cache["last_updated"].isoformat() if cache["last_updated"] else None
|
| 869 |
return {
|
| 870 |
"cache_ready": cache["is_ready"],
|
| 871 |
"sheets_service_available": cache["service_available"],
|
| 872 |
+
"last_updated": last_updated_iso,
|
| 873 |
"cached_items": {
|
| 874 |
+
"value_categories": len(cache.get("values", {})),
|
| 875 |
+
"user_scammers": len(cache.get("user_scammers", [])),
|
| 876 |
+
"server_scammers": len(cache.get("server_scammers", [])),
|
| 877 |
+
"dwc_entries": len(cache.get("dwc", [])),
|
| 878 |
+
"duped_usernames": len(cache.get("dupes", [])),
|
| 879 |
},
|
| 880 |
+
"value_change_categories_in_last_cycle": len(cache.get("value_changes", {}))
|
| 881 |
}
|
| 882 |
|
| 883 |
@app.get("/api/values")
|
| 884 |
async def get_values():
|
| 885 |
"""Get all values data from cache"""
|
| 886 |
check_cache_readiness()
|
| 887 |
+
return cache.get("values", {})
|
| 888 |
|
| 889 |
@app.get("/api/values/{category}")
|
| 890 |
async def get_category_values(category: str):
|
| 891 |
"""Get values data for a specific category from cache"""
|
| 892 |
check_cache_readiness()
|
| 893 |
+
# Case-insensitive matching for category name
|
| 894 |
+
matched_category = next((c for c in cache.get("values", {}).keys() if c.lower() == category.lower()), None)
|
| 895 |
if not matched_category:
|
| 896 |
+
# Check if the category *exists* conceptually even if empty
|
| 897 |
+
if category.lower() in [c.lower() for c in CATEGORIES]:
|
| 898 |
+
return {category: []} # Return empty list if category is valid but has no items
|
| 899 |
+
else:
|
| 900 |
+
raise HTTPException(status_code=404, detail=f"Category '{category}' not found.")
|
| 901 |
+
return {matched_category: cache.get("values", {}).get(matched_category, [])}
|
| 902 |
+
|
| 903 |
|
| 904 |
@app.get("/api/value-changes/{category}")
|
| 905 |
async def get_category_value_changes(category: str):
|
| 906 |
+
"""Get detected value changes for a specific category from the last cache update cycle."""
|
| 907 |
check_cache_readiness()
|
| 908 |
+
# Case-insensitive matching for category name
|
| 909 |
+
matched_category = next((c for c in cache.get("value_changes", {}).keys() if c.lower() == category.lower()), None)
|
| 910 |
if not matched_category:
|
| 911 |
+
# Check if the category *exists* conceptually even if empty
|
| 912 |
+
if category.lower() in [c.lower() for c in CATEGORIES]:
|
| 913 |
+
return {category: []} # Return empty list if category is valid but had no changes
|
| 914 |
+
else:
|
| 915 |
+
raise HTTPException(status_code=404, detail=f"Category '{category}' not found.")
|
| 916 |
return {matched_category: cache.get("value_changes", {}).get(matched_category, [])}
|
| 917 |
|
| 918 |
@app.get("/api/value-changes")
|
| 919 |
async def get_all_value_changes():
|
| 920 |
+
"""Get all detected value changes from the last cache update cycle."""
|
| 921 |
check_cache_readiness()
|
| 922 |
return cache.get("value_changes", {})
|
| 923 |
|
|
|
|
| 926 |
"""Get all scammer and DWC data (users, servers, dwc) from cache"""
|
| 927 |
check_cache_readiness()
|
| 928 |
return {
|
| 929 |
+
"users": cache.get("user_scammers", []),
|
| 930 |
+
"servers": cache.get("server_scammers", []),
|
| 931 |
+
"dwc": cache.get("dwc", [])
|
| 932 |
}
|
| 933 |
|
| 934 |
@app.get("/api/dupes")
|
| 935 |
async def get_dupes():
|
| 936 |
"""Get all duped usernames from cache"""
|
| 937 |
check_cache_readiness()
|
| 938 |
+
return {"usernames": cache.get("dupes", [])} # Return empty list if not ready or empty
|
|
|
|
| 939 |
|
| 940 |
|
| 941 |
class UsernameCheck(BaseModel):
|
|
|
|
| 947 |
check_cache_readiness() # Use the standard readiness check
|
| 948 |
|
| 949 |
username_to_check = data.username.strip().lower()
|
| 950 |
+
# Ensure dupes list is populated before checking
|
| 951 |
+
dupes_list = cache.get("dupes", [])
|
| 952 |
+
is_duped = username_to_check in dupes_list
|
| 953 |
|
| 954 |
+
# Webhook notification for checks resulting in "Not Found" (runs in background)
|
| 955 |
+
# Note: This uses the *value* webhook URL per original code, not the scammer one.
|
| 956 |
+
# If this needs to go to a *different* webhook, adjust the env var name.
|
| 957 |
if not is_duped:
|
| 958 |
+
webhook_url = os.getenv("WEBHOOK_URL") # Keep original env var name for this specific check
|
| 959 |
if webhook_url:
|
| 960 |
+
# Use a new session for this one-off task or pass the main one if safe
|
| 961 |
+
async def send_check_webhook():
|
| 962 |
+
try:
|
| 963 |
+
async with aiohttp.ClientSession() as session: # Create a short-lived session
|
| 964 |
+
embed = {
|
| 965 |
+
"title": "User Dupe Check - Not Found",
|
| 966 |
+
"description": f"Username `{data.username}` was checked against the dupe list but was **not** found.",
|
|
|
|
| 967 |
"color": 16776960, # Yellow
|
| 968 |
"timestamp": datetime.now(timezone.utc).isoformat()
|
|
|
|
| 969 |
}
|
| 970 |
+
await send_webhook_notification(session, webhook_url, embed)
|
| 971 |
+
except Exception as e:
|
| 972 |
+
logger.error(f"Error sending dupe check webhook: {e}") # Log errors from the task
|
| 973 |
+
|
| 974 |
+
asyncio.create_task(send_check_webhook()) # Fire and forget
|
|
|
|
| 975 |
else:
|
| 976 |
+
logger.info("WEBHOOK_URL (for dupe checks) not configured. Skipping notification.")
|
| 977 |
|
| 978 |
return {"username": data.username, "is_duped": is_duped}
|
| 979 |
|
|
|
|
| 981 |
@app.get("/health")
|
| 982 |
def health_check():
|
| 983 |
"""Provides a health status of the API and its cache."""
|
| 984 |
+
now = datetime.now(timezone.utc)
|
| 985 |
+
status_detail = {"status": "ok", "last_updated": None, "time_since_update_seconds": None}
|
| 986 |
+
|
| 987 |
+
if cache["last_updated"]:
|
| 988 |
+
status_detail["last_updated"] = cache["last_updated"].isoformat()
|
| 989 |
+
time_since_update = (now - cache["last_updated"]).total_seconds()
|
| 990 |
+
status_detail["time_since_update_seconds"] = round(time_since_update)
|
| 991 |
+
else:
|
| 992 |
+
status_detail["last_updated"] = None
|
| 993 |
+
status_detail["time_since_update_seconds"] = None
|
| 994 |
+
|
| 995 |
+
|
| 996 |
if not cache["is_ready"]:
|
| 997 |
+
status_detail["status"] = "initializing"
|
| 998 |
+
status_detail["reason"] = "Cache has not been populated yet."
|
| 999 |
+
return status_detail
|
|
|
|
|
|
|
|
|
|
| 1000 |
|
| 1001 |
+
if not cache["service_available"]:
|
| 1002 |
+
status_detail["status"] = "degraded"
|
| 1003 |
+
status_detail["reason"] = "Google Sheets service connection issue detected on last attempt."
|
| 1004 |
+
return status_detail
|
| 1005 |
+
|
| 1006 |
+
# Check for staleness only if the cache is ready and service *was* available last check
|
| 1007 |
+
# Allow some grace period (e.g., 3 intervals)
|
| 1008 |
+
if cache["last_updated"] and time_since_update > CACHE_UPDATE_INTERVAL_SECONDS * 3:
|
| 1009 |
+
status_detail["status"] = "degraded"
|
| 1010 |
+
status_detail["reason"] = f"Cache potentially stale (last update > {CACHE_UPDATE_INTERVAL_SECONDS * 3} seconds ago)"
|
| 1011 |
+
return status_detail
|
| 1012 |
+
|
| 1013 |
+
# If we reach here, status is 'ok'
|
| 1014 |
+
return status_detail
|