Spaces:
Sleeping
Sleeping
Commit
·
1dba95a
1
Parent(s):
a8e6a5d
new changes
Browse files
app.py
CHANGED
|
@@ -5,6 +5,7 @@ import uuid
|
|
| 5 |
from datetime import datetime
|
| 6 |
from pathlib import Path
|
| 7 |
import fcntl
|
|
|
|
| 8 |
import pandas as pd
|
| 9 |
import pytz
|
| 10 |
import streamlit as st
|
|
@@ -59,6 +60,61 @@ scheduler = CommitScheduler(
|
|
| 59 |
)
|
| 60 |
|
| 61 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 62 |
def load_data(file_path):
|
| 63 |
"""
|
| 64 |
Load data from a JSON or CSV file with better error handling.
|
|
@@ -225,28 +281,25 @@ def calculate_max_bid_points(user_name):
|
|
| 225 |
|
| 226 |
|
| 227 |
def load_users(users_json_path):
|
| 228 |
-
"""Load users data with
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 229 |
try:
|
| 230 |
-
|
| 231 |
-
|
|
|
|
|
|
|
| 232 |
|
| 233 |
-
|
| 234 |
-
|
| 235 |
-
|
| 236 |
-
|
| 237 |
-
data = json.load(f)
|
| 238 |
-
# Validate data structure
|
| 239 |
-
if not all(isinstance(v, dict) and 'points' in v for v in data.values()):
|
| 240 |
-
st.error("Invalid users data structure")
|
| 241 |
-
return {}
|
| 242 |
-
return data
|
| 243 |
-
except json.JSONDecodeError:
|
| 244 |
-
st.error("Invalid JSON in users file")
|
| 245 |
-
return {}
|
| 246 |
-
finally:
|
| 247 |
-
fcntl.flock(f, fcntl.LOCK_UN)
|
| 248 |
except Exception as e:
|
| 249 |
-
st.error(f"Error loading
|
| 250 |
return {}
|
| 251 |
|
| 252 |
|
|
@@ -490,16 +543,12 @@ def load_and_process_users():
|
|
| 490 |
|
| 491 |
def update_leaderboard_and_outcomes(match_id, winning_team, man_of_the_match, outcome_only=False):
|
| 492 |
try:
|
| 493 |
-
#
|
| 494 |
-
users_dict =
|
|
|
|
|
|
|
|
|
|
| 495 |
|
| 496 |
-
# Rest of your existing code for updating outcomes...
|
| 497 |
-
features = Features({
|
| 498 |
-
'match_id': Value('string'),
|
| 499 |
-
'man_of_the_match': Value('string'),
|
| 500 |
-
'winning_team': Value('string'),
|
| 501 |
-
})
|
| 502 |
-
|
| 503 |
# Load existing match outcomes
|
| 504 |
outcomes = load_dataset("Jay-Rajput/DIS_IPL_Outcomes", split="train")
|
| 505 |
outcomes_df = pd.DataFrame(outcomes)
|
|
@@ -552,18 +601,22 @@ def update_leaderboard_and_outcomes(match_id, winning_team, man_of_the_match, ou
|
|
| 552 |
user_data["last_5_results"].insert(0, result_indicator)
|
| 553 |
user_data["last_5_results"] = user_data["last_5_results"][:5]
|
| 554 |
|
| 555 |
-
# Save updated users data
|
| 556 |
-
|
| 557 |
-
|
|
|
|
| 558 |
|
| 559 |
-
#
|
| 560 |
-
|
| 561 |
-
|
|
|
|
|
|
|
|
|
|
| 562 |
|
| 563 |
# Save outcomes
|
| 564 |
outcomes.to_json(OUTCOMES)
|
| 565 |
outcomes.push_to_hub("Jay-Rajput/DIS_IPL_Outcomes", split="train")
|
| 566 |
-
|
| 567 |
return True
|
| 568 |
except Exception as e:
|
| 569 |
st.error(f"Error updating leaderboard: {e}")
|
|
@@ -619,15 +672,19 @@ with st.sidebar:
|
|
| 619 |
outcome_only = expander.checkbox("Submit Outcome Only", key="outcome_only_checkbox")
|
| 620 |
|
| 621 |
if expander.button("Submit Match Outcome", key="submit_outcome"):
|
| 622 |
-
|
| 623 |
-
|
| 624 |
-
|
| 625 |
-
|
| 626 |
-
|
| 627 |
-
|
| 628 |
-
|
| 629 |
-
|
| 630 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 631 |
else:
|
| 632 |
expander.write("No matches available for the selected date.")
|
| 633 |
else:
|
|
|
|
| 5 |
from datetime import datetime
|
| 6 |
from pathlib import Path
|
| 7 |
import fcntl
|
| 8 |
+
import tempfile
|
| 9 |
import pandas as pd
|
| 10 |
import pytz
|
| 11 |
import streamlit as st
|
|
|
|
| 60 |
)
|
| 61 |
|
| 62 |
|
| 63 |
+
def safe_write_json(data, file_path):
|
| 64 |
+
"""Safely write JSON data with atomic write pattern"""
|
| 65 |
+
try:
|
| 66 |
+
file_path = Path(file_path)
|
| 67 |
+
file_path.parent.mkdir(parents=True, exist_ok=True)
|
| 68 |
+
|
| 69 |
+
# Write to temporary file first
|
| 70 |
+
with tempfile.NamedTemporaryFile(
|
| 71 |
+
mode='w',
|
| 72 |
+
dir=file_path.parent,
|
| 73 |
+
prefix=file_path.stem,
|
| 74 |
+
suffix='.tmp',
|
| 75 |
+
delete=False
|
| 76 |
+
) as tmp_file:
|
| 77 |
+
json.dump(data, tmp_file, ensure_ascii=False, indent=4)
|
| 78 |
+
tmp_file.flush()
|
| 79 |
+
os.fsync(tmp_file.fileno())
|
| 80 |
+
|
| 81 |
+
# Atomic rename
|
| 82 |
+
os.replace(tmp_file.name, file_path)
|
| 83 |
+
return True
|
| 84 |
+
except Exception as e:
|
| 85 |
+
st.error(f"Error writing JSON file: {e}")
|
| 86 |
+
try:
|
| 87 |
+
os.unlink(tmp_file.name)
|
| 88 |
+
except:
|
| 89 |
+
pass
|
| 90 |
+
return False
|
| 91 |
+
|
| 92 |
+
def safe_load_json(file_path):
|
| 93 |
+
"""Safely load JSON data with file locking"""
|
| 94 |
+
try:
|
| 95 |
+
file_path = Path(file_path)
|
| 96 |
+
if not file_path.exists():
|
| 97 |
+
return {}
|
| 98 |
+
|
| 99 |
+
with open(file_path, 'r', encoding='utf-8') as f:
|
| 100 |
+
fcntl.flock(f, fcntl.LOCK_SH)
|
| 101 |
+
try:
|
| 102 |
+
data = json.load(f)
|
| 103 |
+
# Basic validation
|
| 104 |
+
if not isinstance(data, dict):
|
| 105 |
+
st.error("Invalid JSON structure - expected dictionary")
|
| 106 |
+
return {}
|
| 107 |
+
return data
|
| 108 |
+
except json.JSONDecodeError:
|
| 109 |
+
st.error("Invalid JSON file - contains syntax errors")
|
| 110 |
+
return {}
|
| 111 |
+
finally:
|
| 112 |
+
fcntl.flock(f, fcntl.LOCK_UN)
|
| 113 |
+
except Exception as e:
|
| 114 |
+
st.error(f"Error loading JSON file: {e}")
|
| 115 |
+
return {}
|
| 116 |
+
|
| 117 |
+
|
| 118 |
def load_data(file_path):
|
| 119 |
"""
|
| 120 |
Load data from a JSON or CSV file with better error handling.
|
|
|
|
| 281 |
|
| 282 |
|
| 283 |
def load_users(users_json_path):
|
| 284 |
+
"""Load users data with automatic fallback to Hugging Face"""
|
| 285 |
+
# First try local file
|
| 286 |
+
local_data = safe_load_json(users_json_path)
|
| 287 |
+
if local_data:
|
| 288 |
+
return local_data
|
| 289 |
+
|
| 290 |
+
# If local file fails, try Hugging Face
|
| 291 |
try:
|
| 292 |
+
users = load_dataset("Jay-Rajput/DIS_IPL_Leads", split="train")
|
| 293 |
+
users_dict = {}
|
| 294 |
+
for user_name, user_data in users.to_dict().items():
|
| 295 |
+
users_dict[user_name] = user_data[0] # Extract from list
|
| 296 |
|
| 297 |
+
# Save to local file for next time
|
| 298 |
+
if users_dict and safe_write_json(users_dict, users_json_path):
|
| 299 |
+
return users_dict
|
| 300 |
+
return {}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 301 |
except Exception as e:
|
| 302 |
+
st.error(f"Error loading from Hugging Face: {e}")
|
| 303 |
return {}
|
| 304 |
|
| 305 |
|
|
|
|
| 543 |
|
| 544 |
def update_leaderboard_and_outcomes(match_id, winning_team, man_of_the_match, outcome_only=False):
|
| 545 |
try:
|
| 546 |
+
# Load current data
|
| 547 |
+
users_dict = load_users(USERS_JSON)
|
| 548 |
+
if not users_dict:
|
| 549 |
+
st.error("Failed to load users data")
|
| 550 |
+
return False
|
| 551 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 552 |
# Load existing match outcomes
|
| 553 |
outcomes = load_dataset("Jay-Rajput/DIS_IPL_Outcomes", split="train")
|
| 554 |
outcomes_df = pd.DataFrame(outcomes)
|
|
|
|
| 601 |
user_data["last_5_results"].insert(0, result_indicator)
|
| 602 |
user_data["last_5_results"] = user_data["last_5_results"][:5]
|
| 603 |
|
| 604 |
+
# Save updated users data
|
| 605 |
+
if not safe_write_json(users_dict, USERS_JSON):
|
| 606 |
+
st.error("Failed to save updated users data")
|
| 607 |
+
return False
|
| 608 |
|
| 609 |
+
# Push to Hugging Face
|
| 610 |
+
try:
|
| 611 |
+
users_dataset = Dataset.from_dict({k: [v] for k, v in users_dict.items()})
|
| 612 |
+
users_dataset.push_to_hub("Jay-Rajput/DIS_IPL_Leads")
|
| 613 |
+
except Exception as e:
|
| 614 |
+
st.error(f"Failed to update Hugging Face: {e}")
|
| 615 |
|
| 616 |
# Save outcomes
|
| 617 |
outcomes.to_json(OUTCOMES)
|
| 618 |
outcomes.push_to_hub("Jay-Rajput/DIS_IPL_Outcomes", split="train")
|
| 619 |
+
st.success("Match outcome submitted and leaderboard updated!")
|
| 620 |
return True
|
| 621 |
except Exception as e:
|
| 622 |
st.error(f"Error updating leaderboard: {e}")
|
|
|
|
| 672 |
outcome_only = expander.checkbox("Submit Outcome Only", key="outcome_only_checkbox")
|
| 673 |
|
| 674 |
if expander.button("Submit Match Outcome", key="submit_outcome"):
|
| 675 |
+
with st.spinner("Processing match outcome..."):
|
| 676 |
+
if update_leaderboard_and_outcomes(
|
| 677 |
+
selected_match_id,
|
| 678 |
+
winning_team,
|
| 679 |
+
man_of_the_match,
|
| 680 |
+
outcome_only
|
| 681 |
+
):
|
| 682 |
+
# Clear any previous errors
|
| 683 |
+
st.success("Successfully updated match results and leaderboard!")
|
| 684 |
+
# Force refresh the UI
|
| 685 |
+
st.experimental_rerun()
|
| 686 |
+
else:
|
| 687 |
+
st.error("Failed to update match results")
|
| 688 |
else:
|
| 689 |
expander.write("No matches available for the selected date.")
|
| 690 |
else:
|