asvs commited on
Commit
778b25b
Β·
1 Parent(s): 7d95f28

update app to use supabase

Browse files
Files changed (8) hide show
  1. .claude/settings.local.json +25 -0
  2. .env.example +4 -3
  3. .gitignore +4 -1
  4. CLAUDE.md +7 -0
  5. app.py +105 -253
  6. pyproject.toml +1 -0
  7. supabase-db-migration-rpi.md +228 -0
  8. uv.lock +0 -0
.claude/settings.local.json ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "permissions": {
3
+ "allow": [
4
+ "WebSearch",
5
+ "Bash(cat:*)",
6
+ "mcp__context7__resolve-library-id",
7
+ "mcp__context7__query-docs",
8
+ "mcp__supabase__get_project_url",
9
+ "mcp__supabase__list_tables",
10
+ "mcp__supabase__list_extensions",
11
+ "mcp__supabase__list_migrations",
12
+ "mcp__supabase__get_publishable_keys",
13
+ "mcp__supabase__get_advisors",
14
+ "mcp__supabase__search_docs",
15
+ "mcp__supabase__apply_migration",
16
+ "Bash(uv add:*)",
17
+ "mcp__supabase__execute_sql",
18
+ "Bash(SUPABASE_KEY=\"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6ImljaGhzdGh4YWVnZXhlb2dvbHp6Iiwicm9sZSI6ImFub24iLCJpYXQiOjE3Njc0NjIwNDcsImV4cCI6MjA4MzAzODA0N30.yskUms5aG5J2hc8IpHngItCERvWBd4F6y2Uy0vqSbhM\" uv run python -c \"from app import load_matches; matches = load_matches\\(\\); print\\(f''Loaded {len\\(matches\\)} matches''\\)\")"
19
+ ]
20
+ },
21
+ "enableAllProjectMcpServers": true,
22
+ "enabledMcpjsonServers": [
23
+ "supabase"
24
+ ]
25
+ }
.env.example CHANGED
@@ -1,3 +1,4 @@
1
- # HuggingFace Token for dataset storage
2
- # Get your token from: https://huggingface.co/settings/tokens
3
- HF_TOKEN=hf_your_token_here
 
 
1
+ # Supabase credentials for database storage
2
+ # Get your credentials from: https://supabase.com/dashboard/project/_/settings/api
3
+ SUPABASE_URL=https://your-project.supabase.co
4
+ SUPABASE_KEY=your-anon-key-here
.gitignore CHANGED
@@ -1 +1,4 @@
1
- .env
 
 
 
 
1
+ .env
2
+ .env.local
3
+ .env.*
4
+ .mcp.json
CLAUDE.md ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ use uv for package management and running code.
2
+ ```
3
+ uv add <package>
4
+ uv run <script>
5
+ ```
6
+ Use context7 MCP server to get the latest documentation of any library, this is a mandatory step before writing any code.
7
+ Dont run anything with sleep or timeout. If anything is running for a long time, move it to a background process and go and continuously monitor that task, till it is completed with some break in between in checking. Meanwhile, you can work on any other tasks. If there are none, then just keep waiting and rechecking for its completion.
app.py CHANGED
@@ -1,13 +1,10 @@
1
  """League Table Manager - Interactive Gradio Interface"""
2
- import json
3
- import threading
4
  import logging
5
  from datetime import datetime, timedelta, timezone
6
- from pathlib import Path
7
- from uuid import uuid4
8
  import pandas as pd
9
  import gradio as gr
10
- from huggingface_hub import HfApi
11
 
12
  # Configure logging
13
  logging.basicConfig(
@@ -22,143 +19,48 @@ TEAMS = ["Seelam", "Akhil", "Kartheek", "Shiva"]
22
  # IST timezone (UTC+5:30)
23
  IST = timezone(timedelta(hours=5, minutes=30))
24
 
25
- # Dataset storage setup
26
- DATASET_DIR = Path("league_data")
27
- DATASET_DIR.mkdir(parents=True, exist_ok=True)
28
 
29
- # Session-specific file names (generated once per app startup)
30
- SESSION_ID = str(uuid4())
31
- MATCHES_FILE = DATASET_DIR / f"matches-{SESSION_ID}.jsonl"
32
- DELETION_LOG_FILE = DATASET_DIR / f"deletions-{SESSION_ID}.jsonl"
33
 
34
- # Initialize HfApi for immediate uploads
35
- api = HfApi()
36
- REPO_ID = "asvs/league-table-data"
37
- REPO_TYPE = "dataset"
38
- PATH_IN_REPO = "data"
39
-
40
- # Thread lock for safe concurrent writes
41
- file_lock = threading.Lock()
42
-
43
- # Global matches storage
44
  matches = []
45
 
46
 
47
- def ensure_repo_exists():
48
- """Create the HuggingFace dataset repository if it doesn't exist."""
49
- try:
50
- # Check if repo exists by trying to get repo info
51
- api.repo_info(repo_id=REPO_ID, repo_type=REPO_TYPE)
52
- logger.info(f"βœ“ Dataset repository '{REPO_ID}' exists")
53
- except Exception:
54
- # Repository doesn't exist, create it
55
- try:
56
- api.create_repo(
57
- repo_id=REPO_ID,
58
- repo_type=REPO_TYPE,
59
- private=True,
60
- exist_ok=True
61
- )
62
- logger.info(f"βœ“ Created new dataset repository: '{REPO_ID}'")
63
- except Exception as e:
64
- logger.error(f"βœ— Error creating repository: {e}")
65
- logger.error("Please create the repository manually or check your HF_TOKEN permissions")
66
-
67
  def load_matches():
68
- """Load matches from HuggingFace dataset repository."""
69
  global matches
70
  matches = []
71
 
72
  logger.info("=" * 60)
73
- logger.info("LOADING MATCHES FROM DATASET")
74
  logger.info("=" * 60)
75
 
76
  try:
77
- # List all files in the dataset repository
78
- logger.info(f"β†’ Connecting to dataset: {REPO_ID}")
79
- repo_files = api.list_repo_files(repo_id=REPO_ID, repo_type=REPO_TYPE)
80
- logger.info(f"βœ“ Connected to dataset repository")
81
-
82
- # Filter for files in the data directory
83
- data_files = [f for f in repo_files if f.startswith(f"{PATH_IN_REPO}/")]
84
- logger.info(f"β†’ Found {len(data_files)} files in {PATH_IN_REPO}/ directory")
85
-
86
- # Load deleted match IDs from dataset
87
- deleted_ids = set()
88
- deletion_files = [f for f in data_files if "deletions-" in f and f.endswith(".jsonl")]
89
- logger.info(f"β†’ Found {len(deletion_files)} deletion log files")
90
-
91
- for deletion_file_path in deletion_files:
92
- try:
93
- logger.info(f" β†’ Downloading: {deletion_file_path}")
94
- content = api.hf_hub_download(
95
- repo_id=REPO_ID,
96
- repo_type=REPO_TYPE,
97
- filename=deletion_file_path
98
- )
99
-
100
- deletion_count = 0
101
- with open(content, "r") as f:
102
- for line in f:
103
- if line.strip():
104
- record = json.loads(line)
105
- deleted_ids.add(record["match_id"])
106
- deletion_count += 1
107
-
108
- logger.info(f" βœ“ Loaded {deletion_count} deletions from {deletion_file_path}")
109
- except Exception as e:
110
- logger.error(f" βœ— Error loading deletion file {deletion_file_path}: {e}")
111
- continue
112
-
113
- logger.info(f"βœ“ Total deleted matches: {len(deleted_ids)}")
114
-
115
- # Load all match records from dataset
116
- # Use a dictionary to track the latest version of each match (for updates)
117
- match_files = [f for f in data_files if "matches-" in f and f.endswith(".jsonl")]
118
- logger.info(f"β†’ Found {len(match_files)} match files")
119
-
120
- match_dict = {} # match_id -> match_data
121
-
122
- for match_file_path in match_files:
123
- try:
124
- logger.info(f" β†’ Downloading: {match_file_path}")
125
- content = api.hf_hub_download(
126
- repo_id=REPO_ID,
127
- repo_type=REPO_TYPE,
128
- filename=match_file_path
129
- )
130
 
131
- match_count = 0
132
- with open(content, "r") as f:
133
- for line in f:
134
- if line.strip():
135
- record = json.loads(line)
136
- match_id = record["id"]
137
- # Skip deleted matches
138
- if match_id not in deleted_ids:
139
- # Store/update match - later updates will overwrite earlier versions
140
- match_dict[match_id] = [
141
- match_id,
142
- record["home"],
143
- record["away"],
144
- record["home_goals"],
145
- record["away_goals"],
146
- record.get("datetime", datetime.now(IST).isoformat()) # Default to now if missing
147
- ]
148
- match_count += 1
149
-
150
- logger.info(f" βœ“ Processed {match_count} match records from {match_file_path}")
151
- except Exception as e:
152
- logger.error(f" βœ— Error loading match file {match_file_path}: {e}")
153
- continue
154
-
155
- # Convert dictionary to list
156
- matches = list(match_dict.values())
157
- logger.info(f"βœ“ Successfully loaded {len(matches)} unique active matches from dataset")
158
  logger.info("=" * 60)
159
 
160
  except Exception as e:
161
- logger.error(f"βœ— Error accessing dataset repository: {e}")
162
 
163
  return matches
164
 
@@ -346,51 +248,38 @@ def add_match(home, away, home_goals, away_goals):
346
  "Error: Goals must be non-negative!"
347
  )
348
 
349
- # Add match with file lock for thread-safe writes
350
- match_id = str(uuid4())
351
- match_datetime = datetime.now(IST).isoformat()
352
- match_data = [match_id, home, away, int(home_goals), int(away_goals), match_datetime]
353
- matches.append(match_data)
354
-
355
  logger.info(f"β†’ Adding match: {home} {int(home_goals)} - {int(away_goals)} {away}")
356
- logger.info(f" Match ID: {match_id}")
357
- logger.info(f" Timestamp: {match_datetime}")
358
-
359
- # Persist to dataset
360
- with file_lock:
361
- with MATCHES_FILE.open("a") as f:
362
- record = {
363
- "id": match_id,
364
- "home": home,
365
- "away": away,
366
- "home_goals": int(home_goals),
367
- "away_goals": int(away_goals),
368
- "datetime": match_datetime
369
- }
370
- json.dump(record, f)
371
- f.write("\n")
372
-
373
- logger.info(f" βœ“ Written to local file: {MATCHES_FILE.name}")
374
-
375
- # Upload file immediately to HuggingFace
376
  try:
377
- logger.info(f" β†’ Uploading to dataset: {REPO_ID}")
378
- api.upload_file(
379
- path_or_fileobj=str(MATCHES_FILE),
380
- path_in_repo=f"{PATH_IN_REPO}/{MATCHES_FILE.name}",
381
- repo_id=REPO_ID,
382
- repo_type=REPO_TYPE,
383
- )
384
- logger.info(f" βœ“ Successfully uploaded to HuggingFace dataset")
385
- upload_status = " (uploaded to HF)"
 
 
 
 
 
 
 
 
 
 
 
 
386
  except Exception as e:
387
- logger.error(f" βœ— Error uploading match: {e}")
388
- upload_status = " (upload failed)"
389
 
390
  # Return updated tables and status
391
  league_table = calculate_table(matches)
392
  matches_table = get_matches_dataframe(matches)
393
- status = f"Match added: {home} {int(home_goals)} - {int(away_goals)} {away}{upload_status}"
394
 
395
  return league_table, matches_table, status
396
 
@@ -404,63 +293,45 @@ def delete_match(row_number):
404
  "Error: Please enter a valid row number!"
405
  )
406
 
407
- # Convert to 0-based index
 
408
  row_idx = int(row_number) - 1
409
 
410
- if row_idx >= len(matches) or row_idx < 0:
411
  return (
412
  calculate_table(matches),
413
  get_matches_dataframe(matches),
414
- f"Error: Row {int(row_number)} does not exist! Valid rows: 1-{len(matches)}"
415
  )
416
 
417
- # Get match details for logging
418
- match = matches[row_idx]
419
- match_id, h, a, gh, ga = match[0], match[1], match[2], match[3], match[4]
420
 
421
  logger.info(f"β†’ Deleting match row #{int(row_number)}: {h} {gh} - {ga} {a}")
422
  logger.info(f" Match ID: {match_id}")
423
 
424
- # Log deletion with file lock
425
- deletion_datetime = datetime.now(IST).isoformat()
426
- with file_lock:
427
- with DELETION_LOG_FILE.open("a") as f:
428
- deletion_record = {
429
- "match_id": match_id,
430
- "home": h,
431
- "away": a,
432
- "home_goals": gh,
433
- "away_goals": ga,
434
- "datetime": deletion_datetime
435
- }
436
- json.dump(deletion_record, f)
437
- f.write("\n")
438
-
439
- logger.info(f" βœ“ Written to deletion log: {DELETION_LOG_FILE.name}")
440
-
441
- # Upload deletion log immediately to HuggingFace
442
  try:
443
- logger.info(f" β†’ Uploading deletion log to dataset: {REPO_ID}")
444
- api.upload_file(
445
- path_or_fileobj=str(DELETION_LOG_FILE),
446
- path_in_repo=f"{PATH_IN_REPO}/{DELETION_LOG_FILE.name}",
447
- repo_id=REPO_ID,
448
- repo_type=REPO_TYPE,
449
- )
450
- logger.info(f" βœ“ Successfully uploaded deletion log to HuggingFace dataset")
451
- upload_status = " (uploaded to HF)"
452
- except Exception as e:
453
- logger.error(f" βœ— Error uploading deletion log: {e}")
454
- upload_status = " (upload failed)"
455
 
456
- # Remove match from in-memory list
457
- matches.pop(row_idx)
458
- logger.info(f" βœ“ Match removed from in-memory storage")
 
 
 
459
 
460
  # Return updated tables and status
461
  league_table = calculate_table(matches)
462
  matches_table = get_matches_dataframe(matches)
463
- status = f"Deleted row {int(row_number)}: {h} vs {a} ({gh}-{ga}){upload_status}"
464
 
465
  return league_table, matches_table, status
466
 
@@ -474,7 +345,7 @@ def update_match(row_number, new_home, new_away, new_home_goals, new_away_goals)
474
  "Error: Please enter a valid row number!"
475
  )
476
 
477
- # Convert to 0-based index (note: we need to work with sorted matches)
478
  sorted_matches = sorted(matches, key=lambda x: x[5], reverse=True)
479
  row_idx = int(row_number) - 1
480
 
@@ -521,70 +392,51 @@ def update_match(row_number, new_home, new_away, new_home_goals, new_away_goals)
521
  logger.info(f" Old: {old_home} {old_home_goals} - {old_away_goals} {old_away}")
522
  logger.info(f" New: {new_home} {int(new_home_goals)} - {int(new_away_goals)} {new_away}")
523
 
524
- # Find and update the match in the main matches list
525
- for i, match in enumerate(matches):
526
- if match[0] == match_id:
527
- # Update the match in-place
528
- matches[i][1] = new_home
529
- matches[i][2] = new_away
530
- matches[i][3] = int(new_home_goals)
531
- matches[i][4] = int(new_away_goals)
532
- # Keep the original timestamp but add update timestamp to the record
533
- break
534
-
535
- # Create new JSONL file for the update
536
- update_file = DATASET_DIR / f"matches-{uuid4()}.jsonl"
537
- update_datetime = datetime.now(IST).isoformat()
538
-
539
- with file_lock:
540
- with update_file.open("a") as f:
541
- record = {
542
- "id": match_id, # Keep the same ID
543
- "home": new_home,
544
- "away": new_away,
545
- "home_goals": int(new_home_goals),
546
- "away_goals": int(new_away_goals),
547
- "datetime": matches[i][5], # Keep original datetime
548
- "updated_at": update_datetime # Add update timestamp
549
- }
550
- json.dump(record, f)
551
- f.write("\n")
552
-
553
- logger.info(f" βœ“ Written update to local file: {update_file.name}")
554
-
555
- # Upload update file immediately to HuggingFace
556
  try:
557
- logger.info(f" β†’ Uploading update to dataset: {REPO_ID}")
558
- api.upload_file(
559
- path_or_fileobj=str(update_file),
560
- path_in_repo=f"{PATH_IN_REPO}/{update_file.name}",
561
- repo_id=REPO_ID,
562
- repo_type=REPO_TYPE,
563
- )
564
- logger.info(f" βœ“ Successfully uploaded update to HuggingFace dataset")
565
- upload_status = " (uploaded to HF)"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
566
  except Exception as e:
567
- logger.error(f" βœ— Error uploading update: {e}")
568
- upload_status = " (upload failed)"
569
 
570
  # Return updated tables and status
571
  league_table = calculate_table(matches)
572
  matches_table = get_matches_dataframe(matches)
573
- status = f"Updated row {int(row_number)}: {new_home} {int(new_home_goals)} - {int(new_away_goals)} {new_away}{upload_status}"
574
 
575
  return league_table, matches_table, status
576
 
577
 
578
  def build_interface():
579
  """Build and return the Gradio interface."""
580
- # Ensure HuggingFace repository exists
581
- ensure_repo_exists()
582
-
583
- # Load initial data
584
  load_matches()
585
 
586
  def refresh_data():
587
- """Reload matches from HuggingFace and return updated tables."""
588
  load_matches()
589
  return (
590
  calculate_table(matches),
 
1
  """League Table Manager - Interactive Gradio Interface"""
2
+ import os
 
3
  import logging
4
  from datetime import datetime, timedelta, timezone
 
 
5
  import pandas as pd
6
  import gradio as gr
7
+ from supabase import create_client, Client
8
 
9
  # Configure logging
10
  logging.basicConfig(
 
19
  # IST timezone (UTC+5:30)
20
  IST = timezone(timedelta(hours=5, minutes=30))
21
 
22
+ # Supabase configuration
23
+ SUPABASE_URL = os.environ.get("SUPABASE_URL", "https://ichhsthxaegexeogolzz.supabase.co")
24
+ SUPABASE_KEY = os.environ.get("SUPABASE_KEY", "")
25
 
26
+ # Initialize Supabase client
27
+ supabase: Client = create_client(SUPABASE_URL, SUPABASE_KEY)
 
 
28
 
29
+ # Global matches storage (in-memory cache)
 
 
 
 
 
 
 
 
 
30
  matches = []
31
 
32
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  def load_matches():
34
+ """Load matches from Supabase database."""
35
  global matches
36
  matches = []
37
 
38
  logger.info("=" * 60)
39
+ logger.info("LOADING MATCHES FROM SUPABASE")
40
  logger.info("=" * 60)
41
 
42
  try:
43
+ logger.info(f"β†’ Connecting to Supabase: {SUPABASE_URL}")
44
+ response = supabase.table("matches").select("*").order("datetime", desc=True).execute()
45
+
46
+ if response.data:
47
+ for record in response.data:
48
+ matches.append([
49
+ str(record["id"]),
50
+ record["home"],
51
+ record["away"],
52
+ record["home_goals"],
53
+ record["away_goals"],
54
+ record["datetime"]
55
+ ])
56
+ logger.info(f"βœ“ Successfully loaded {len(matches)} matches from Supabase")
57
+ else:
58
+ logger.info("βœ“ No matches found in database")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
59
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
60
  logger.info("=" * 60)
61
 
62
  except Exception as e:
63
+ logger.error(f"βœ— Error accessing Supabase: {e}")
64
 
65
  return matches
66
 
 
248
  "Error: Goals must be non-negative!"
249
  )
250
 
 
 
 
 
 
 
251
  logger.info(f"β†’ Adding match: {home} {int(home_goals)} - {int(away_goals)} {away}")
252
+
253
+ # Insert into Supabase
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
254
  try:
255
+ match_datetime = datetime.now(IST).isoformat()
256
+ response = supabase.table("matches").insert({
257
+ "home": home,
258
+ "away": away,
259
+ "home_goals": int(home_goals),
260
+ "away_goals": int(away_goals),
261
+ "datetime": match_datetime
262
+ }).execute()
263
+
264
+ if response.data:
265
+ record = response.data[0]
266
+ match_id = str(record["id"])
267
+ match_data = [match_id, home, away, int(home_goals), int(away_goals), record["datetime"]]
268
+ matches.append(match_data)
269
+ logger.info(f" βœ“ Match ID: {match_id}")
270
+ logger.info(f" βœ“ Successfully saved to Supabase")
271
+ status = f"Match added: {home} {int(home_goals)} - {int(away_goals)} {away}"
272
+ else:
273
+ logger.error(" βœ— No data returned from insert")
274
+ status = f"Match added locally but Supabase returned no data"
275
+
276
  except Exception as e:
277
+ logger.error(f" βœ— Error saving to Supabase: {e}")
278
+ status = f"Error: Failed to save match - {e}"
279
 
280
  # Return updated tables and status
281
  league_table = calculate_table(matches)
282
  matches_table = get_matches_dataframe(matches)
 
283
 
284
  return league_table, matches_table, status
285
 
 
293
  "Error: Please enter a valid row number!"
294
  )
295
 
296
+ # Sort matches by datetime (most recent first) to match displayed order
297
+ sorted_matches = sorted(matches, key=lambda x: x[5], reverse=True)
298
  row_idx = int(row_number) - 1
299
 
300
+ if row_idx >= len(sorted_matches) or row_idx < 0:
301
  return (
302
  calculate_table(matches),
303
  get_matches_dataframe(matches),
304
+ f"Error: Row {int(row_number)} does not exist! Valid rows: 1-{len(sorted_matches)}"
305
  )
306
 
307
+ # Get match details
308
+ sorted_match = sorted_matches[row_idx]
309
+ match_id, h, a, gh, ga = sorted_match[0], sorted_match[1], sorted_match[2], sorted_match[3], sorted_match[4]
310
 
311
  logger.info(f"β†’ Deleting match row #{int(row_number)}: {h} {gh} - {ga} {a}")
312
  logger.info(f" Match ID: {match_id}")
313
 
314
+ # Delete from Supabase
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
315
  try:
316
+ response = supabase.table("matches").delete().eq("id", match_id).execute()
317
+ logger.info(f" βœ“ Successfully deleted from Supabase")
318
+
319
+ # Remove from in-memory list
320
+ for i, match in enumerate(matches):
321
+ if match[0] == match_id:
322
+ matches.pop(i)
323
+ break
 
 
 
 
324
 
325
+ logger.info(f" βœ“ Match removed from in-memory storage")
326
+ status = f"Deleted row {int(row_number)}: {h} vs {a} ({gh}-{ga})"
327
+
328
+ except Exception as e:
329
+ logger.error(f" βœ— Error deleting from Supabase: {e}")
330
+ status = f"Error: Failed to delete match - {e}"
331
 
332
  # Return updated tables and status
333
  league_table = calculate_table(matches)
334
  matches_table = get_matches_dataframe(matches)
 
335
 
336
  return league_table, matches_table, status
337
 
 
345
  "Error: Please enter a valid row number!"
346
  )
347
 
348
+ # Sort matches by datetime (most recent first) to match displayed order
349
  sorted_matches = sorted(matches, key=lambda x: x[5], reverse=True)
350
  row_idx = int(row_number) - 1
351
 
 
392
  logger.info(f" Old: {old_home} {old_home_goals} - {old_away_goals} {old_away}")
393
  logger.info(f" New: {new_home} {int(new_home_goals)} - {int(new_away_goals)} {new_away}")
394
 
395
+ # Update in Supabase
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
396
  try:
397
+ update_datetime = datetime.now(IST).isoformat()
398
+ response = supabase.table("matches").update({
399
+ "home": new_home,
400
+ "away": new_away,
401
+ "home_goals": int(new_home_goals),
402
+ "away_goals": int(new_away_goals),
403
+ "updated_at": update_datetime
404
+ }).eq("id", match_id).execute()
405
+
406
+ if response.data:
407
+ # Update in-memory cache
408
+ for i, match in enumerate(matches):
409
+ if match[0] == match_id:
410
+ matches[i][1] = new_home
411
+ matches[i][2] = new_away
412
+ matches[i][3] = int(new_home_goals)
413
+ matches[i][4] = int(new_away_goals)
414
+ break
415
+
416
+ logger.info(f" βœ“ Successfully updated in Supabase")
417
+ status = f"Updated row {int(row_number)}: {new_home} {int(new_home_goals)} - {int(new_away_goals)} {new_away}"
418
+ else:
419
+ logger.error(" βœ— No data returned from update")
420
+ status = f"Error: Update returned no data"
421
+
422
  except Exception as e:
423
+ logger.error(f" βœ— Error updating in Supabase: {e}")
424
+ status = f"Error: Failed to update match - {e}"
425
 
426
  # Return updated tables and status
427
  league_table = calculate_table(matches)
428
  matches_table = get_matches_dataframe(matches)
 
429
 
430
  return league_table, matches_table, status
431
 
432
 
433
  def build_interface():
434
  """Build and return the Gradio interface."""
435
+ # Load initial data from Supabase
 
 
 
436
  load_matches()
437
 
438
  def refresh_data():
439
+ """Reload matches from Supabase and return updated tables."""
440
  load_matches()
441
  return (
442
  calculate_table(matches),
pyproject.toml CHANGED
@@ -7,4 +7,5 @@ requires-python = ">=3.12"
7
  dependencies = [
8
  "gradio>=6.2.0",
9
  "huggingface-hub>=1.2.3",
 
10
  ]
 
7
  dependencies = [
8
  "gradio>=6.2.0",
9
  "huggingface-hub>=1.2.3",
10
+ "supabase>=2.27.0",
11
  ]
supabase-db-migration-rpi.md ADDED
@@ -0,0 +1,228 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Research: Supabase for League Table Manager Data Storage
2
+
3
+ ## Current Storage Architecture
4
+
5
+ ### Implementation Details
6
+
7
+ The application currently uses a **hybrid storage strategy**:
8
+
9
+ - **Local Storage**: JSONL (JSON Lines) files in `/league_data/` directory
10
+ - **Cloud Sync**: HuggingFace Dataset Repository (`asvs/league-table-data`)
11
+ - **Upload Pattern**: Immediate synchronous uploads after every CRUD operation
12
+ - **In-Memory Cache**: Python list maintains current state for calculations
13
+
14
+ ### Data Models
15
+
16
+ **Match Records** (JSONL format):
17
+ ```json
18
+ {
19
+ "id": "uuid-string",
20
+ "home": "team-name",
21
+ "away": "team-name",
22
+ "home_goals": 0-9,
23
+ "away_goals": 0-9,
24
+ "datetime": "ISO-8601-timestamp",
25
+ "updated_at": "ISO-8601-timestamp (optional)"
26
+ }
27
+ ```
28
+
29
+ **Deletion Logs** (JSONL format):
30
+ ```json
31
+ {
32
+ "match_id": "uuid-string",
33
+ "home": "team-name",
34
+ "away": "team-name",
35
+ "home_goals": 0-9,
36
+ "away_goals": 0-9,
37
+ "datetime": "ISO-8601-timestamp"
38
+ }
39
+ ```
40
+
41
+ ### Current Architecture Flow
42
+
43
+ ```
44
+ User Input β†’ In-Memory List β†’ Local JSONL File β†’ HuggingFace Upload
45
+ ↓
46
+ Calculations (League Table, H2H Stats)
47
+ ```
48
+
49
+ ---
50
+
51
+ ## Supabase Python Integration
52
+
53
+ **Project Details:**
54
+ - Project Name: fc25
55
+ - Project URL: `https://ichhsthxaegexeogolzz.supabase.co`
56
+ - Current State: Empty (no tables)
57
+ - Installed Extensions: `uuid-ossp`, `pgcrypto`, `pg_graphql`
58
+
59
+ ---
60
+
61
+ ## Implementation Steps
62
+
63
+ ### 1. Create Database Schema in Supabase
64
+
65
+ Create `matches` table with the following structure:
66
+ ```sql
67
+ CREATE TABLE matches (
68
+ id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
69
+ home TEXT NOT NULL,
70
+ away TEXT NOT NULL,
71
+ home_goals INTEGER NOT NULL CHECK (home_goals >= 0 AND home_goals <= 9),
72
+ away_goals INTEGER NOT NULL CHECK (away_goals >= 0 AND away_goals <= 9),
73
+ datetime TIMESTAMPTZ NOT NULL DEFAULT NOW(),
74
+ updated_at TIMESTAMPTZ,
75
+ created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
76
+ );
77
+
78
+ -- Add constraint to prevent same team playing itself
79
+ ALTER TABLE matches ADD CONSTRAINT different_teams CHECK (home <> away);
80
+
81
+ -- Create index for faster queries on datetime
82
+ CREATE INDEX idx_matches_datetime ON matches(datetime DESC);
83
+ ```
84
+
85
+ ### 2. Enable Row Level Security (RLS)
86
+
87
+ ```sql
88
+ -- Enable RLS on matches table
89
+ ALTER TABLE matches ENABLE ROW LEVEL SECURITY;
90
+
91
+ -- Create policy for public read access (adjust as needed)
92
+ CREATE POLICY "Allow public read access" ON matches FOR SELECT USING (true);
93
+
94
+ -- policy for public insert/update/delete: anyone can do any of these actions. update and delete records must be maintained. just timestamp, activity log
95
+ ```
96
+
97
+ ### 3. Add supabase-py Package
98
+
99
+ ```bash
100
+ uv add supabase
101
+ ```
102
+
103
+ ### 4. Create Supabase Client Configuration
104
+
105
+ Create environment variables for Supabase credentials:
106
+ - `SUPABASE_URL`: https://ichhsthxaegexeogolzz.supabase.co
107
+ - `SUPABASE_KEY`: (anon/public key from Supabase dashboard)
108
+
109
+ Initialize client:
110
+ ```python
111
+ from supabase import create_client, Client
112
+ import os
113
+
114
+ url = os.environ.get("SUPABASE_URL")
115
+ key = os.environ.get("SUPABASE_KEY")
116
+ supabase: Client = create_client(url, key)
117
+ ```
118
+
119
+ ### 5. Replace CRUD Operations in app.py
120
+
121
+ **Insert Match (replace `add_match` logic):**
122
+ ```python
123
+ response = supabase.table("matches").insert({
124
+ "home": home,
125
+ "away": away,
126
+ "home_goals": int(home_goals),
127
+ "away_goals": int(away_goals)
128
+ }).execute()
129
+ ```
130
+
131
+ **Select All Matches (replace `load_matches` logic):**
132
+ ```python
133
+ response = supabase.table("matches").select("*").order("datetime", desc=True).execute()
134
+ matches = response.data
135
+ ```
136
+
137
+ **Update Match (replace `update_match` logic):**
138
+ ```python
139
+ response = supabase.table("matches").update({
140
+ "home": new_home,
141
+ "away": new_away,
142
+ "home_goals": int(new_home_goals),
143
+ "away_goals": int(new_away_goals),
144
+ "updated_at": datetime.now(IST).isoformat()
145
+ }).eq("id", match_id).execute()
146
+ ```
147
+
148
+ **Delete Match (replace `delete_match` logic - actual delete, no soft delete needed):**
149
+ ```python
150
+ response = supabase.table("matches").delete().eq("id", match_id).execute()
151
+ ```
152
+
153
+ ### 6. Migrate Existing Data
154
+ Current matches:
155
+ Seelam Akhil 5 3
156
+ Seelam Kartheek 4 4
157
+ Shiva Akhil 1 6
158
+ Shiva Kartheek 8 3
159
+ Shiva Kartheek 4 1
160
+ Seelam Kartheek 5 1
161
+ Seelam Kartheek 1 6
162
+ Akhil Kartheek 1 5
163
+ Shiva Akhil 3 1
164
+ Shiva Akhil 3 3
165
+ Seelam Kartheek 1 3
166
+ Seelam Akhil 2 4
167
+ Seelam Kartheek 2 1
168
+ Ashwik Shiva 2 1
169
+ Kartheek Seelam 4 5
170
+ Kartheek Akhil 3 1
171
+ Kartheek Seelam 5 1
172
+ Kartheek Seelam 2 4
173
+ Kartheek Seelam 4 5
174
+ Kartheek Seelam 2 4
175
+ Seelam Kartheek 3 7
176
+ Seelam Kartheek 7 4
177
+ Seelam Kartheek 3 3
178
+ Akhil Kartheek 0 6
179
+ Seelam Kartheek 2 2
180
+ Kartheek Seelam 8 5
181
+ Akhil Kane 10 2
182
+ Seelam Kane 5 6
183
+ Kartheek Seelam 0 9
184
+ Kane Kartheek 5 1
185
+ Kane Akhil 8 2
186
+ Ashwik Akhil 4 3
187
+ Seelam Kartheek 1 5
188
+
189
+
190
+ ### 7. Remove HuggingFace Storage Code
191
+
192
+ Remove the following from `app.py`:
193
+ - `HfApi` import and initialization
194
+ - `REPO_ID`, `REPO_TYPE`, `PATH_IN_REPO` constants
195
+ - `MATCHES_FILE`, `DELETION_LOG_FILE` paths
196
+ - `ensure_repo_exists()` function
197
+ - `file_lock` threading lock
198
+ - All `api.upload_file()` calls
199
+ - All local JSONL file read/write operations
200
+
201
+ ### 8. Simplify In-Memory Cache
202
+
203
+ The in-memory `matches` list can be replaced with direct Supabase queries, or kept as a cache that syncs on:
204
+ - App startup (load from Supabase)
205
+ - After each CRUD operation (refresh from Supabase or update locally)
206
+
207
+ ### 9. Update Error Handling
208
+
209
+ Replace HuggingFace error handling with Supabase-specific handling:
210
+ ```python
211
+ try:
212
+ response = supabase.table("matches").insert({...}).execute()
213
+ if response.data:
214
+ # Success
215
+ except Exception as e:
216
+ logger.error(f"Supabase error: {e}")
217
+ ```
218
+
219
+ ### 10. Test All CRUD Operations
220
+
221
+ Verify:
222
+ - Add match β†’ appears in Supabase table and UI
223
+ - Update match β†’ changes reflected in both
224
+ - Delete match β†’ removed from Supabase and UI
225
+ - Load matches β†’ correctly fetches all records on app startup
226
+ - Head-to-head stats β†’ work with Supabase data
227
+
228
+ ---
uv.lock CHANGED
The diff for this file is too large to render. See raw diff