Spaces:
Build error
Build error
James McCool commited on
Commit ·
212b005
1
Parent(s): 9ef68b1
Instituting a cache for the contest IDs
Browse files- app.py +102 -31
- global_func/contest_lobby_cache.py +114 -0
app.py
CHANGED
|
@@ -16,6 +16,12 @@ from global_func.predict_dupes import predict_dupes
|
|
| 16 |
from global_func.highlight_rows import highlight_changes, highlight_changes_winners, highlight_changes_losers
|
| 17 |
from global_func.load_csv import load_csv
|
| 18 |
from global_func.contest_pricing_api import fetch_contests_for_selection, fetch_pricing_for_contest
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 19 |
from global_func.find_csv_mismatches import find_csv_mismatches
|
| 20 |
from global_func.trim_portfolio import trim_portfolio
|
| 21 |
from global_func.get_portfolio_names import get_portfolio_names
|
|
@@ -1477,18 +1483,64 @@ if selected_tab == 'Data Load':
|
|
| 1477 |
|
| 1478 |
upload_csv_col, csv_template_col = st.columns([3, 1])
|
| 1479 |
if pricing_source == 'Paydirt DB':
|
| 1480 |
-
contest_state_key = f"{sport_var}|{type_var}"
|
| 1481 |
if st.session_state.get('pricing_contest_key') != contest_state_key:
|
| 1482 |
-
st.session_state['pricing_contests'] = []
|
| 1483 |
-
st.session_state['selected_pricing_contest'] = None
|
| 1484 |
st.session_state['pricing_contest_key'] = contest_state_key
|
| 1485 |
-
|
| 1486 |
-
|
| 1487 |
-
|
| 1488 |
-
|
| 1489 |
-
except Exception as e:
|
| 1490 |
-
st.error(f"Could not fetch contests: {e}")
|
| 1491 |
st.session_state['pricing_contests'] = []
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1492 |
|
| 1493 |
contest_options = st.session_state.get('pricing_contests', [])
|
| 1494 |
if contest_options:
|
|
@@ -1497,33 +1549,32 @@ if selected_tab == 'Data Load':
|
|
| 1497 |
f"{c.get('game_type', '')} | ${int(c.get('prize_pool', 0)):,.0f}"
|
| 1498 |
for c in contest_options
|
| 1499 |
]
|
| 1500 |
-
|
|
|
|
| 1501 |
selected_label = st.selectbox(
|
| 1502 |
"Select Contest (searchable)",
|
| 1503 |
-
options=
|
| 1504 |
key="selected_pricing_contest",
|
| 1505 |
-
help="Type to search contest names."
|
| 1506 |
)
|
| 1507 |
-
|
| 1508 |
-
|
| 1509 |
-
|
| 1510 |
-
|
| 1511 |
-
|
| 1512 |
-
|
| 1513 |
-
|
| 1514 |
-
|
| 1515 |
-
|
| 1516 |
-
|
| 1517 |
-
|
| 1518 |
-
|
| 1519 |
-
|
| 1520 |
-
pricing_df = fetch_pricing_for_contest(selected_contest['contest_id'])
|
| 1521 |
-
st.session_state['csv_file'] = load_csv(pricing_df)
|
| 1522 |
-
st.session_state['pricing_loaded'] = True
|
| 1523 |
-
except Exception as e:
|
| 1524 |
-
st.error(f"Could not load contest pricing: {e}")
|
| 1525 |
else:
|
| 1526 |
-
|
|
|
|
|
|
|
|
|
|
| 1527 |
|
| 1528 |
try:
|
| 1529 |
st.session_state['csv_file']['Salary'] = st.session_state['csv_file']['Salary'].astype(str).str.replace(',', '').astype(int)
|
|
@@ -1553,6 +1604,26 @@ if selected_tab == 'Data Load':
|
|
| 1553 |
st.session_state['csv_file']['Salary'] = st.session_state['csv_file']['Salary'].astype(str).str.replace(',', '').astype(int)
|
| 1554 |
except:
|
| 1555 |
pass
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1556 |
|
| 1557 |
if st.session_state['pricing_loaded']:
|
| 1558 |
if type_var == 'Showdown':
|
|
|
|
| 16 |
from global_func.highlight_rows import highlight_changes, highlight_changes_winners, highlight_changes_losers
|
| 17 |
from global_func.load_csv import load_csv
|
| 18 |
from global_func.contest_pricing_api import fetch_contests_for_selection, fetch_pricing_for_contest
|
| 19 |
+
from global_func.contest_lobby_cache import (
|
| 20 |
+
cache_has_contests,
|
| 21 |
+
get_cached_contests,
|
| 22 |
+
parse_fd_contest_id_csv,
|
| 23 |
+
save_contests_cache,
|
| 24 |
+
)
|
| 25 |
from global_func.find_csv_mismatches import find_csv_mismatches
|
| 26 |
from global_func.trim_portfolio import trim_portfolio
|
| 27 |
from global_func.get_portfolio_names import get_portfolio_names
|
|
|
|
| 1483 |
|
| 1484 |
upload_csv_col, csv_template_col = st.columns([3, 1])
|
| 1485 |
if pricing_source == 'Paydirt DB':
|
| 1486 |
+
contest_state_key = f"{site_var}|{sport_var}|{type_var}"
|
| 1487 |
if st.session_state.get('pricing_contest_key') != contest_state_key:
|
|
|
|
|
|
|
| 1488 |
st.session_state['pricing_contest_key'] = contest_state_key
|
| 1489 |
+
cached = get_cached_contests(salaries_db, site_var, sport_var, type_var)
|
| 1490 |
+
if cached is not None:
|
| 1491 |
+
st.session_state['pricing_contests'] = cached
|
| 1492 |
+
else:
|
|
|
|
|
|
|
| 1493 |
st.session_state['pricing_contests'] = []
|
| 1494 |
+
st.session_state['selected_pricing_contest'] = None
|
| 1495 |
+
|
| 1496 |
+
if site_var == 'Draftkings':
|
| 1497 |
+
force_dk_api = st.checkbox(
|
| 1498 |
+
"Fetch new contests from DraftKings (overwrite cache)",
|
| 1499 |
+
key="force_dk_contest_api",
|
| 1500 |
+
help="If unchecked, contests load from Mongo after the first successful fetch for this site/sport/type.",
|
| 1501 |
+
)
|
| 1502 |
+
if st.button("Refresh Contests", key="refresh_pricing_contests"):
|
| 1503 |
+
try:
|
| 1504 |
+
if force_dk_api or not cache_has_contests(salaries_db, site_var, sport_var, type_var):
|
| 1505 |
+
contests = fetch_contests_for_selection(sport_var, type_var)
|
| 1506 |
+
save_contests_cache(salaries_db, site_var, sport_var, type_var, contests)
|
| 1507 |
+
st.session_state['pricing_contests'] = contests
|
| 1508 |
+
else:
|
| 1509 |
+
st.session_state['pricing_contests'] = (
|
| 1510 |
+
get_cached_contests(salaries_db, site_var, sport_var, type_var) or []
|
| 1511 |
+
)
|
| 1512 |
+
except Exception as e:
|
| 1513 |
+
st.error(f"Could not load contests: {e}")
|
| 1514 |
+
st.session_state['pricing_contests'] = []
|
| 1515 |
+
else:
|
| 1516 |
+
st.caption("FanDuel contests are stored in Mongo. Upload a contest-ID CSV below, then refresh.")
|
| 1517 |
+
fd_contest_csv = st.file_uploader(
|
| 1518 |
+
"FanDuel contest list CSV (contest_id column required; contest_name optional)",
|
| 1519 |
+
type=['csv'],
|
| 1520 |
+
key="fd_paydirt_contest_csv",
|
| 1521 |
+
)
|
| 1522 |
+
if st.button("Save FanDuel contests to database", key="save_fd_contests_paydirt"):
|
| 1523 |
+
if fd_contest_csv is None:
|
| 1524 |
+
st.warning("Choose a CSV file first.")
|
| 1525 |
+
else:
|
| 1526 |
+
try:
|
| 1527 |
+
fd_df = pd.read_csv(fd_contest_csv)
|
| 1528 |
+
fd_list = parse_fd_contest_id_csv(fd_df)
|
| 1529 |
+
if not fd_list:
|
| 1530 |
+
st.warning("No contest IDs found in the CSV.")
|
| 1531 |
+
else:
|
| 1532 |
+
save_contests_cache(salaries_db, site_var, sport_var, type_var, fd_list)
|
| 1533 |
+
st.session_state['pricing_contests'] = fd_list
|
| 1534 |
+
st.success(f"Saved {len(fd_list)} contest(s) for {sport_var} / {type_var}.")
|
| 1535 |
+
except Exception as e:
|
| 1536 |
+
st.error(f"Could not parse or save contests: {e}")
|
| 1537 |
+
if st.button("Refresh Contests", key="refresh_pricing_contests_fd"):
|
| 1538 |
+
cached = get_cached_contests(salaries_db, site_var, sport_var, type_var)
|
| 1539 |
+
if cached is None:
|
| 1540 |
+
st.session_state['pricing_contests'] = []
|
| 1541 |
+
st.info("No contests in database yet for this sport/type. Upload a CSV and save.")
|
| 1542 |
+
else:
|
| 1543 |
+
st.session_state['pricing_contests'] = cached
|
| 1544 |
|
| 1545 |
contest_options = st.session_state.get('pricing_contests', [])
|
| 1546 |
if contest_options:
|
|
|
|
| 1549 |
f"{c.get('game_type', '')} | ${int(c.get('prize_pool', 0)):,.0f}"
|
| 1550 |
for c in contest_options
|
| 1551 |
]
|
| 1552 |
+
if st.session_state.get("selected_pricing_contest") not in contest_labels:
|
| 1553 |
+
st.session_state["selected_pricing_contest"] = contest_labels[0]
|
| 1554 |
selected_label = st.selectbox(
|
| 1555 |
"Select Contest (searchable)",
|
| 1556 |
+
options=contest_labels,
|
| 1557 |
key="selected_pricing_contest",
|
| 1558 |
+
help="Type to search contest names.",
|
| 1559 |
)
|
| 1560 |
+
selected_idx = contest_labels.index(selected_label)
|
| 1561 |
+
selected_contest = contest_options[selected_idx]
|
| 1562 |
+
|
| 1563 |
+
if site_var == 'Draftkings':
|
| 1564 |
+
if st.button("Load Selected Contest Pricing", key="load_selected_contest_pricing"):
|
| 1565 |
+
try:
|
| 1566 |
+
pricing_df = fetch_pricing_for_contest(selected_contest['contest_id'])
|
| 1567 |
+
st.session_state['csv_file'] = load_csv(pricing_df)
|
| 1568 |
+
st.session_state['pricing_loaded'] = True
|
| 1569 |
+
except Exception as e:
|
| 1570 |
+
st.error(f"Could not load contest pricing: {e}")
|
| 1571 |
+
else:
|
| 1572 |
+
st.info("FanDuel salaries: use **User Upload** with the site pricing CSV. Contest list above is stored for reference / future use.")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1573 |
else:
|
| 1574 |
+
if site_var == 'Draftkings':
|
| 1575 |
+
st.info("Click **Refresh Contests** to load from DraftKings (first time) or from the database.")
|
| 1576 |
+
else:
|
| 1577 |
+
st.info("Upload a FanDuel contest CSV and click **Save** to populate the list, then **Refresh Contests**.")
|
| 1578 |
|
| 1579 |
try:
|
| 1580 |
st.session_state['csv_file']['Salary'] = st.session_state['csv_file']['Salary'].astype(str).str.replace(',', '').astype(int)
|
|
|
|
| 1604 |
st.session_state['csv_file']['Salary'] = st.session_state['csv_file']['Salary'].astype(str).str.replace(',', '').astype(int)
|
| 1605 |
except:
|
| 1606 |
pass
|
| 1607 |
+
if site_var == 'Fanduel':
|
| 1608 |
+
fd_contest_csv_uu = st.file_uploader(
|
| 1609 |
+
"Optional: FanDuel contest IDs CSV (saved to Mongo for this sport/type)",
|
| 1610 |
+
type=['csv'],
|
| 1611 |
+
key='fd_user_upload_contest_csv',
|
| 1612 |
+
)
|
| 1613 |
+
if st.button('Save contest IDs to database', key='save_fd_contests_user_upload'):
|
| 1614 |
+
if fd_contest_csv_uu is None:
|
| 1615 |
+
st.warning('Choose a contest CSV first.')
|
| 1616 |
+
else:
|
| 1617 |
+
try:
|
| 1618 |
+
fd_df_uu = pd.read_csv(fd_contest_csv_uu)
|
| 1619 |
+
fd_list_uu = parse_fd_contest_id_csv(fd_df_uu)
|
| 1620 |
+
if not fd_list_uu:
|
| 1621 |
+
st.warning('No contest IDs found in the CSV.')
|
| 1622 |
+
else:
|
| 1623 |
+
save_contests_cache(salaries_db, site_var, sport_var, type_var, fd_list_uu)
|
| 1624 |
+
st.success(f'Saved {len(fd_list_uu)} contest(s) for {sport_var} / {type_var}.')
|
| 1625 |
+
except Exception as e:
|
| 1626 |
+
st.error(f'Could not parse or save contests: {e}')
|
| 1627 |
|
| 1628 |
if st.session_state['pricing_loaded']:
|
| 1629 |
if type_var == 'Showdown':
|
global_func/contest_lobby_cache.py
ADDED
|
@@ -0,0 +1,114 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Persist pricing-step contest lists in Mongo (Contest_Information DB)."""
|
| 2 |
+
from datetime import datetime
|
| 3 |
+
from typing import List, Optional
|
| 4 |
+
|
| 5 |
+
import pandas as pd
|
| 6 |
+
|
| 7 |
+
COLLECTION_NAME = "DFS_PM_pricing_contest_cache"
|
| 8 |
+
_INDEX_ENSURED = False
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def _collection(db):
|
| 12 |
+
return db[COLLECTION_NAME]
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def ensure_contest_cache_indexes(db) -> None:
|
| 16 |
+
global _INDEX_ENSURED
|
| 17 |
+
if _INDEX_ENSURED:
|
| 18 |
+
return
|
| 19 |
+
try:
|
| 20 |
+
_collection(db).create_index(
|
| 21 |
+
[("site", 1), ("sport", 1), ("type", 1)],
|
| 22 |
+
unique=True,
|
| 23 |
+
name="site_sport_type_unique",
|
| 24 |
+
)
|
| 25 |
+
except Exception:
|
| 26 |
+
pass
|
| 27 |
+
_INDEX_ENSURED = True
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def cache_has_contests(db, site: str, sport: str, game_type: str) -> bool:
|
| 31 |
+
ensure_contest_cache_indexes(db)
|
| 32 |
+
return (
|
| 33 |
+
_collection(db).find_one({"site": site, "sport": sport, "type": game_type}, {"_id": 1})
|
| 34 |
+
is not None
|
| 35 |
+
)
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def get_cached_contests(db, site: str, sport: str, game_type: str) -> Optional[List[dict]]:
|
| 39 |
+
"""Return cached contest list, or None if no document exists."""
|
| 40 |
+
ensure_contest_cache_indexes(db)
|
| 41 |
+
doc = _collection(db).find_one({"site": site, "sport": sport, "type": game_type})
|
| 42 |
+
if doc is None:
|
| 43 |
+
return None
|
| 44 |
+
contests = doc.get("contests")
|
| 45 |
+
if contests is None:
|
| 46 |
+
return []
|
| 47 |
+
return list(contests)
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def save_contests_cache(db, site: str, sport: str, game_type: str, contests: List[dict]) -> None:
|
| 51 |
+
ensure_contest_cache_indexes(db)
|
| 52 |
+
_collection(db).update_one(
|
| 53 |
+
{"site": site, "sport": sport, "type": game_type},
|
| 54 |
+
{
|
| 55 |
+
"$set": {
|
| 56 |
+
"contests": contests,
|
| 57 |
+
"updated_at": datetime.utcnow(),
|
| 58 |
+
}
|
| 59 |
+
},
|
| 60 |
+
upsert=True,
|
| 61 |
+
)
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
def parse_fd_contest_id_csv(df: pd.DataFrame) -> List[dict]:
|
| 65 |
+
"""Build contest dicts for the pricing dropdown from a FanDuel-oriented upload."""
|
| 66 |
+
if df is None or df.empty:
|
| 67 |
+
return []
|
| 68 |
+
|
| 69 |
+
cols = {str(c).strip(): c for c in df.columns}
|
| 70 |
+
id_key = None
|
| 71 |
+
for label in ("contest_id", "Contest ID", "contestId", "ID", "Id", "id"):
|
| 72 |
+
if label in cols:
|
| 73 |
+
id_key = cols[label]
|
| 74 |
+
break
|
| 75 |
+
if id_key is None:
|
| 76 |
+
first = list(df.columns)[0]
|
| 77 |
+
id_key = first
|
| 78 |
+
|
| 79 |
+
name_key = None
|
| 80 |
+
for label in ("contest_name", "Contest Name", "Contest", "Name", "n", "nickname"):
|
| 81 |
+
if label in cols:
|
| 82 |
+
name_key = cols[label]
|
| 83 |
+
break
|
| 84 |
+
|
| 85 |
+
out: List[dict] = []
|
| 86 |
+
for _, row in df.iterrows():
|
| 87 |
+
raw_id = row.get(id_key)
|
| 88 |
+
if pd.isna(raw_id):
|
| 89 |
+
continue
|
| 90 |
+
s = str(raw_id).strip().replace(",", "")
|
| 91 |
+
if not s:
|
| 92 |
+
continue
|
| 93 |
+
try:
|
| 94 |
+
cid = int(float(s))
|
| 95 |
+
except (ValueError, TypeError):
|
| 96 |
+
cid = s
|
| 97 |
+
name = ""
|
| 98 |
+
if name_key is not None:
|
| 99 |
+
v = row.get(name_key)
|
| 100 |
+
if pd.notna(v):
|
| 101 |
+
name = str(v).strip()
|
| 102 |
+
if not name:
|
| 103 |
+
name = f"Contest {cid}"
|
| 104 |
+
out.append(
|
| 105 |
+
{
|
| 106 |
+
"contest_name": name,
|
| 107 |
+
"contest_id": cid,
|
| 108 |
+
"contest_date": "",
|
| 109 |
+
"contest_date_display": "",
|
| 110 |
+
"game_type": "",
|
| 111 |
+
"prize_pool": 0,
|
| 112 |
+
}
|
| 113 |
+
)
|
| 114 |
+
return out
|