Spaces:
Sleeping
Sleeping
| import streamlit as st | |
| st.set_page_config(layout="wide") | |
| import pandas as pd | |
| import pytz | |
| from rapidfuzz import process | |
| from collections import Counter | |
| import io | |
| ## import global functions | |
| from global_func.clean_player_name import clean_player_name | |
| from global_func.load_file import load_file | |
| from global_func.load_ss_file import load_ss_file | |
| from global_func.load_dk_fd_file import load_dk_fd_file | |
| from global_func.find_name_mismatches import find_name_mismatches | |
| from global_func.predict_dupes import predict_dupes | |
| from global_func.highlight_rows import highlight_changes, highlight_changes_winners, highlight_changes_losers | |
| from global_func.load_csv import load_csv | |
| from global_func.find_csv_mismatches import find_csv_mismatches | |
| from global_func.trim_portfolio import trim_portfolio | |
| from global_func.get_portfolio_names import get_portfolio_names | |
| from global_func.small_field_preset import small_field_preset | |
| from global_func.large_field_preset import large_field_preset | |
| from global_func.hedging_preset import hedging_preset | |
| from global_func.volatility_preset import volatility_preset | |
| from global_func.reduce_volatility_preset import reduce_volatility_preset | |
| from global_func.analyze_player_combos import analyze_player_combos | |
| from global_func.stratification_function import stratification_function | |
| from global_func.exposure_spread import exposure_spread | |
| from global_func.reassess_edge import reassess_edge | |
| from global_func.recalc_diversity import recalc_diversity | |
| from global_func.optimize_lineup import optimize_lineup | |
| from database_queries import * | |
| from database import * | |
| pos_parse_mapping = { | |
| 'Projection': 'proj_map', | |
| 'Ownership': 'own_map', | |
| 'Salary': 'salary_map', | |
| 'Position': 'pos_map', | |
| 'Team': 'team_map' | |
| } | |
| pos_parse_options = list(pos_parse_mapping.keys()) | |
| showdown_selections = ['Showdown #1', 'Showdown #2', 'Showdown #3', 'Showdown #4', 'Showdown #5', 'Showdown #6', 'Showdown #7', 'Showdown #8', 'Showdown #9', 'Showdown #10', 'Showdown #11', 'Showdown #12', 'Showdown #13', 'Showdown #14', 'Showdown #15'] | |
| dk_db_nfl_showdown_selections = ['DK_NFL_SD_seed_frame_Showdown #1', 'DK_NFL_SD_seed_frame_Showdown #2', 'DK_NFL_SD_seed_frame_Showdown #3', 'DK_NFL_SD_seed_frame_Showdown #4', 'DK_NFL_SD_seed_frame_Showdown #5', 'DK_NFL_SD_seed_frame_Showdown #6', | |
| 'DK_NFL_SD_seed_frame_Showdown #7', 'DK_NFL_SD_seed_frame_Showdown #8', 'DK_NFL_SD_seed_frame_Showdown #9', 'DK_NFL_SD_seed_frame_Showdown #10', 'DK_NFL_SD_seed_frame_Showdown #11', 'DK_NFL_SD_seed_frame_Showdown #12', 'DK_NFL_SD_seed_frame_Showdown #13', | |
| 'DK_NFL_SD_seed_frame_Showdown #14', 'DK_NFL_SD_seed_frame_Showdown #15'] | |
| fd_db_nfl_showdown_selections = ['FD_NFL_SD_seed_frame_Showdown #1', 'FD_NFL_SD_seed_frame_Showdown #2', 'FD_NFL_SD_seed_frame_Showdown #3', 'FD_NFL_SD_seed_frame_Showdown #4', 'FD_NFL_SD_seed_frame_Showdown #5', 'FD_NFL_SD_seed_frame_Showdown #6', | |
| 'FD_NFL_SD_seed_frame_Showdown #7', 'FD_NFL_SD_seed_frame_Showdown #8', 'FD_NFL_SD_seed_frame_Showdown #9', 'FD_NFL_SD_seed_frame_Showdown #10', 'FD_NFL_SD_seed_frame_Showdown #11', 'FD_NFL_SD_seed_frame_Showdown #12', 'FD_NFL_SD_seed_frame_Showdown #13', | |
| 'FD_NFL_SD_seed_frame_Showdown #14', 'FD_NFL_SD_seed_frame_Showdown #15'] | |
| dk_db_nba_showdown_selections = ['DK_NBA_SD_seed_frame_Showdown #1', 'DK_NBA_SD_seed_frame_Showdown #2', 'DK_NBA_SD_seed_frame_Showdown #3', 'DK_NBA_SD_seed_frame_Showdown #4', 'DK_NBA_SD_seed_frame_Showdown #5', 'DK_NBA_SD_seed_frame_Showdown #6', | |
| 'DK_NBA_SD_seed_frame_Showdown #7', 'DK_NBA_SD_seed_frame_Showdown #8', 'DK_NBA_SD_seed_frame_Showdown #9', 'DK_NBA_SD_seed_frame_Showdown #10', 'DK_NBA_SD_seed_frame_Showdown #11', 'DK_NBA_SD_seed_frame_Showdown #12', 'DK_NBA_SD_seed_frame_Showdown #13', | |
| 'DK_NBA_SD_seed_frame_Showdown #14', 'DK_NBA_SD_seed_frame_Showdown #15'] | |
| fd_db_nba_showdown_selections = ['FD_NBA_SD_seed_frame_Showdown #1', 'FD_NBA_SD_seed_frame_Showdown #2', 'FD_NBA_SD_seed_frame_Showdown #3', 'FD_NBA_SD_seed_frame_Showdown #4', 'FD_NBA_SD_seed_frame_Showdown #5', 'FD_NBA_SD_seed_frame_Showdown #6', | |
| 'FD_NBA_SD_seed_frame_Showdown #7', 'FD_NBA_SD_seed_frame_Showdown #8', 'FD_NBA_SD_seed_frame_Showdown #9', 'FD_NBA_SD_seed_frame_Showdown #10', 'FD_NBA_SD_seed_frame_Showdown #11', 'FD_NBA_SD_seed_frame_Showdown #12', 'FD_NBA_SD_seed_frame_Showdown #13', | |
| 'FD_NBA_SD_seed_frame_Showdown #14', 'FD_NBA_SD_seed_frame_Showdown #15'] | |
| dk_db_nhl_showdown_selections = ['DK_NHL_SD_seed_frame_Showdown #1', 'DK_NHL_SD_seed_frame_Showdown #2', 'DK_NHL_SD_seed_frame_Showdown #3', 'DK_NHL_SD_seed_frame_Showdown #4', 'DK_NHL_SD_seed_frame_Showdown #5', 'DK_NHL_SD_seed_frame_Showdown #6', | |
| 'DK_NHL_SD_seed_frame_Showdown #7', 'DK_NHL_SD_seed_frame_Showdown #8', 'DK_NHL_SD_seed_frame_Showdown #9', 'DK_NHL_SD_seed_frame_Showdown #10', 'DK_NHL_SD_seed_frame_Showdown #11', 'DK_NHL_SD_seed_frame_Showdown #12', 'DK_NHL_SD_seed_frame_Showdown #13', | |
| 'DK_NHL_SD_seed_frame_Showdown #14', 'DK_NHL_SD_seed_frame_Showdown #15'] | |
| fd_db_nhl_showdown_selections = ['FD_NHL_SD_seed_frame_Showdown #1', 'FD_NHL_SD_seed_frame_Showdown #2', 'FD_NHL_SD_seed_frame_Showdown #3', 'FD_NHL_SD_seed_frame_Showdown #4', 'FD_NHL_SD_seed_frame_Showdown #5', 'FD_NHL_SD_seed_frame_Showdown #6', | |
| 'FD_NHL_SD_seed_frame_Showdown #7', 'FD_NHL_SD_seed_frame_Showdown #8', 'FD_NHL_SD_seed_frame_Showdown #9', 'FD_NHL_SD_seed_frame_Showdown #10', 'FD_NHL_SD_seed_frame_Showdown #11', 'FD_NHL_SD_seed_frame_Showdown #12', 'FD_NHL_SD_seed_frame_Showdown #13', | |
| 'FD_NHL_SD_seed_frame_Showdown #14', 'FD_NHL_SD_seed_frame_Showdown #15'] | |
| dk_db_mma_showdown_selections = ['DK_MMA_SD_seed_frame_Showdown #1', 'DK_MMA_SD_seed_frame_Showdown #2', 'DK_MMA_SD_seed_frame_Showdown #3', 'DK_MMA_SD_seed_frame_Showdown #4', 'DK_MMA_SD_seed_frame_Showdown #5', 'DK_MMA_SD_seed_frame_Showdown #6', | |
| 'DK_MMA_SD_seed_frame_Showdown #7', 'DK_MMA_SD_seed_frame_Showdown #8', 'DK_MMA_SD_seed_frame_Showdown #9', 'DK_MMA_SD_seed_frame_Showdown #10', 'DK_MMA_SD_seed_frame_Showdown #11', 'DK_MMA_SD_seed_frame_Showdown #12', 'DK_MMA_SD_seed_frame_Showdown #13', | |
| 'DK_MMA_SD_seed_frame_Showdown #14', 'DK_MMA_SD_seed_frame_Showdown #15'] | |
| fd_db_mma_showdown_selections = ['FD_MMA_SD_seed_frame_Showdown #1', 'FD_MMA_SD_seed_frame_Showdown #2', 'FD_MMA_SD_seed_frame_Showdown #3', 'FD_MMA_SD_seed_frame_Showdown #4', 'FD_MMA_SD_seed_frame_Showdown #5', 'FD_MMA_SD_seed_frame_Showdown #6', | |
| 'FD_MMA_SD_seed_frame_Showdown #7', 'FD_MMA_SD_seed_frame_Showdown #8', 'FD_MMA_SD_seed_frame_Showdown #9', 'FD_MMA_SD_seed_frame_Showdown #10', 'FD_MMA_SD_seed_frame_Showdown #11', 'FD_MMA_SD_seed_frame_Showdown #12', 'FD_MMA_SD_seed_frame_Showdown #13', | |
| 'FD_MMA_SD_seed_frame_Showdown #14', 'FD_MMA_SD_seed_frame_Showdown #15'] | |
| dk_db_pga_showdown_selections = ['DK_PGA_SD_seed_frame_Showdown #1', 'DK_PGA_SD_seed_frame_Showdown #2', 'DK_PGA_SD_seed_frame_Showdown #3', 'DK_PGA_SD_seed_frame_Showdown #4', 'DK_PGA_SD_seed_frame_Showdown #5', 'DK_PGA_SD_seed_frame_Showdown #6', | |
| 'DK_PGA_SD_seed_frame_Showdown #7', 'DK_PGA_SD_seed_frame_Showdown #8', 'DK_PGA_SD_seed_frame_Showdown #9', 'DK_PGA_SD_seed_frame_Showdown #10', 'DK_PGA_SD_seed_frame_Showdown #11', 'DK_PGA_SD_seed_frame_Showdown #12', 'DK_PGA_SD_seed_frame_Showdown #13', | |
| 'DK_PGA_SD_seed_frame_Showdown #14', 'DK_PGA_SD_seed_frame_Showdown #15'] | |
| fd_db_pga_showdown_selections = ['FD_PGA_SD_seed_frame_Showdown #1', 'FD_PGA_SD_seed_frame_Showdown #2', 'FD_PGA_SD_seed_frame_Showdown #3', 'FD_PGA_SD_seed_frame_Showdown #4', 'FD_PGA_SD_seed_frame_Showdown #5', 'FD_PGA_SD_seed_frame_Showdown #6', | |
| 'FD_PGA_SD_seed_frame_Showdown #7', 'FD_PGA_SD_seed_frame_Showdown #8', 'FD_PGA_SD_seed_frame_Showdown #9', 'FD_PGA_SD_seed_frame_Showdown #10', 'FD_PGA_SD_seed_frame_Showdown #11', 'FD_PGA_SD_seed_frame_Showdown #12', 'FD_PGA_SD_seed_frame_Showdown #13', | |
| 'FD_PGA_SD_seed_frame_Showdown #14', 'FD_PGA_SD_seed_frame_Showdown #15'] | |
| dk_nfl_showdown_db_translation = dict(zip(showdown_selections, dk_db_nfl_showdown_selections)) | |
| fd_nfl_showdown_db_translation = dict(zip(showdown_selections, fd_db_nfl_showdown_selections)) | |
| dk_nba_showdown_db_translation = dict(zip(showdown_selections, dk_db_nba_showdown_selections)) | |
| fd_nba_showdown_db_translation = dict(zip(showdown_selections, fd_db_nba_showdown_selections)) | |
| dk_nhl_showdown_db_translation = dict(zip(showdown_selections, dk_db_nhl_showdown_selections)) | |
| fd_nhl_showdown_db_translation = dict(zip(showdown_selections, fd_db_nhl_showdown_selections)) | |
| dk_mma_showdown_db_translation = dict(zip(showdown_selections, dk_db_mma_showdown_selections)) | |
| fd_mma_showdown_db_translation = dict(zip(showdown_selections, fd_db_mma_showdown_selections)) | |
| dk_pga_showdown_db_translation = dict(zip(showdown_selections, dk_db_pga_showdown_selections)) | |
| fd_pga_showdown_db_translation = dict(zip(showdown_selections, fd_db_pga_showdown_selections)) | |
| freq_format = {'Finish_percentile': '{:.2%}', 'Lineup Edge': '{:.2%}', 'Lineup Edge_Raw': '{:.2%}', 'Win%': '{:.2%}'} | |
| stacking_sports = ['MLB', 'NHL', 'NFL', 'LOL', 'NCAAF'] | |
| stack_column_dict = { | |
| 'Draftkings': { | |
| 'Classic': { | |
| 'MLB': ['C', '1B', '2B', '3B', 'SS', 'OF1', 'OF2', 'OF3'], | |
| 'NHL': ['C1', 'C2', 'W1', 'W2', 'W3', 'D1', 'D2', 'G', 'UTIL'], | |
| 'NFL': ['QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX'], | |
| 'LOL': ['TOP', 'JNG', 'MID', 'ADC', 'SUP', 'TEAM'], | |
| 'NCAAF': ['QB', 'WR1', 'WR2', 'WR3', 'FLEX', 'SFLEX'], | |
| 'MMA': ['FLEX1', 'FLEX2', 'FLEX3', 'FLEX4', 'FLEX5', 'FLEX6'], | |
| }, | |
| 'Showdown': { | |
| 'MLB': ['CPT', 'FLEX1', 'FLEX2', 'FLEX3', 'FLEX4', 'FLEX5'], | |
| 'NHL': ['CPT', 'FLEX1', 'FLEX2', 'FLEX3', 'FLEX4', 'FLEX5'], | |
| 'NFL': ['CPT', 'FLEX1', 'FLEX2', 'FLEX3', 'FLEX4', 'FLEX5'], | |
| 'LOL': ['CPT', 'FLEX1', 'FLEX2', 'FLEX3', 'FLEX4', 'FLEX5'], | |
| 'NCAAF': ['CPT', 'FLEX1', 'FLEX2', 'FLEX3', 'FLEX4', 'FLEX5'], | |
| }, | |
| }, | |
| 'Fanduel': { | |
| 'Classic': { | |
| 'MLB': ['C/1B', '2B', '3B', 'SS', 'OF1', 'OF2', 'OF3', 'UTIL'], | |
| 'NHL': ['C1', 'C2', 'W1', 'W2', 'D1', 'D2', 'UTIL1', 'UTIL2', 'G'], | |
| 'NFL': ['QB', 'RB1', 'RB2', 'WR1', 'WR2', 'WR3', 'TE', 'FLEX'], | |
| 'LOL': ['TOP', 'JNG', 'MID', 'ADC', 'SUP', 'TEAM'], | |
| 'NCAAF': ['QB', 'WR1', 'WR2', 'WR3', 'SFLEX'], | |
| 'MMA': ['FLEX1', 'FLEX2', 'FLEX3', 'FLEX4', 'FLEX5', 'FLEX6'], | |
| }, | |
| 'Showdown': { | |
| 'MLB': ['CPT', 'FLEX1', 'FLEX2', 'FLEX3', 'FLEX4', 'FLEX5'], | |
| 'NHL': ['CPT', 'FLEX1', 'FLEX2', 'FLEX3', 'FLEX4', 'FLEX5'], | |
| 'NFL': ['CPT', 'FLEX1', 'FLEX2', 'FLEX3', 'FLEX4', 'FLEX5'], | |
| 'LOL': ['CPT', 'FLEX1', 'FLEX2', 'FLEX3', 'FLEX4', 'FLEX5'], | |
| 'NCAAF': ['CPT', 'FLEX1', 'FLEX2', 'FLEX3', 'FLEX4', 'FLEX5'], | |
| }, | |
| }, | |
| } | |
| sport_position_lists = { | |
| 'Draftkings': { | |
| 'MLB': ['P', 'C', '1B', '2B', '3B', 'SS', 'OF'], | |
| 'NHL': ['C', 'W', 'D', 'G'], | |
| 'NFL': ['QB', 'RB', 'WR', 'TE'], | |
| 'LOL': ['TOP', 'JNG', 'MID', 'ADC', 'SUP', 'TEAM'], | |
| 'COD': ['CPT', 'FLEX1', 'FLEX2', 'FLEX3', 'FLEX4', 'TEAM'], | |
| 'NCAAF': ['QB', 'WR', 'RB'], | |
| 'MMA': ['FLEX'], | |
| 'GOLF': ['FLEX'], | |
| 'TENNIS': ['FLEX'], | |
| 'WNBA': ['G', 'F'], | |
| 'NBA': ['PG', 'SG', 'SF', 'PF', 'C'], | |
| 'NASCAR': ['FLEX'], | |
| 'F1': ['DRIVER', 'CONST'], | |
| 'SOC': ['F', 'M', 'D', 'GK'], | |
| }, | |
| 'Fanduel': { | |
| 'MLB': ['P', 'C', '1B', '2B', '3B', 'SS', 'OF'], | |
| 'NHL': ['C', 'W', 'D', 'G'], | |
| 'NFL': ['QB', 'RB', 'WR', 'TE'], | |
| 'LOL': ['TOP', 'JNG', 'MID', 'ADC', 'SUP', 'TEAM'], | |
| 'COD': ['CPT', 'FLEX1', 'FLEX2', 'FLEX3', 'FLEX4', 'TEAM'], | |
| 'NCAAF': ['QB', 'WR', 'RB'], | |
| 'MMA': ['FLEX'], | |
| 'GOLF': ['FLEX'], | |
| 'TENNIS': ['FLEX'], | |
| 'WNBA': ['G', 'F'], | |
| 'NBA': ['PG', 'SG', 'SF', 'PF', 'C'], | |
| 'NASCAR': ['FLEX'], | |
| 'F1': ['DRIVER', 'CONST'], | |
| 'SOC': ['F', 'M', 'D', 'GK'], | |
| }, | |
| } | |
| player_wrong_names_mlb = ['Enrique Hernandez', 'Joseph Cantillo', 'Mike Soroka', 'Jakob Bauers', 'Temi Fágbénlé'] | |
| player_right_names_mlb = ['Kike Hernandez', 'Joey Cantillo', 'Michael Soroka', 'Jake Bauers', 'Temi Fagbenle'] | |
| st.markdown(""" | |
| <style> | |
| /* Tab styling */ | |
| .stElementContainer [data-baseweb="button-group"] { | |
| gap: 2.000rem; | |
| padding: 4px; | |
| } | |
| .stElementContainer [kind="segmented_control"] { | |
| height: 2.000rem; | |
| white-space: pre-wrap; | |
| background-color: #DAA520; | |
| color: white; | |
| border-radius: 20px; | |
| gap: 1px; | |
| padding: 10px 20px; | |
| font-weight: bold; | |
| transition: all 0.3s ease; | |
| } | |
| .stElementContainer [kind="segmented_controlActive"] { | |
| height: 3.000rem; | |
| background-color: #DAA520; | |
| border: 3px solid #FFD700; | |
| border-radius: 10px; | |
| color: black; | |
| } | |
| .stElementContainer [kind="segmented_control"]:hover { | |
| background-color: #FFD700; | |
| cursor: pointer; | |
| } | |
| div[data-baseweb="select"] > div { | |
| background-color: #DAA520; | |
| color: white; | |
| } | |
| </style>""", unsafe_allow_html=True) | |
| def grab_nfl_reg_salaries(slate_var: str): | |
| collection = salaries_db["NFL_reg_player_info"] | |
| eastern = pytz.timezone('US/Eastern') | |
| today_str = datetime.now(eastern).strftime("%Y%m%d") | |
| records = pd.DataFrame(list(collection.find({'Contest Date': {'$gte': today_str}}))) | |
| records = records[['Display Name', 'draftableId', 'Position', 'Salary']] | |
| records = records.rename(columns={'Display Name': 'Name', 'draftableId': 'ID', 'Position': 'Roster Position'}) | |
| if slate_var == 'Main': | |
| records = records.sort_values(by='ID', ascending=True) | |
| records = records.drop_duplicates(subset=['Name'], keep='first') | |
| elif slate_var == 'Secondary': | |
| records = records.sort_values(by='ID', ascending=True) | |
| # Keep middle occurrence: drop first and last, keep middle | |
| grouped = records.groupby('Name') | |
| middle_records = [] | |
| for name, group in grouped: | |
| if len(group) == 1: | |
| # Only one record, keep it | |
| middle_records.append(group) | |
| elif len(group) == 2: | |
| # Two records, keep the second one (last) | |
| middle_records.append(group.iloc[1:2]) | |
| else: | |
| # Three or more records, keep the middle one(s) | |
| # For odd number of records, keep the true middle | |
| # For even number of records, keep the record at index len//2 | |
| middle_idx = len(group) // 2 | |
| middle_records.append(group.iloc[middle_idx:middle_idx+1]) | |
| records = pd.concat(middle_records, ignore_index=True) | |
| elif slate_var == 'Auxiliary': | |
| records = records.sort_values(by='ID', ascending=True) | |
| records = records.drop_duplicates(subset=['Name'], keep='last') | |
| return records | |
| def grab_nfl_showdown_salaries(): | |
| collection = salaries_db["NFL_showdown_player_info"] | |
| eastern = pytz.timezone('US/Eastern') | |
| today_str = datetime.now(eastern).strftime("%Y%m%d") | |
| records = pd.DataFrame(list(collection.find({'Contest Date': {'$gte': today_str}}))) | |
| records = records[['Display Name', 'draftableId', 'Position', 'Salary']] | |
| records = records.rename(columns={'Display Name': 'Name', 'draftableId': 'ID', 'Position': 'Roster Position'}) | |
| return records | |
| def grab_nba_reg_salaries(slate_var: str): | |
| collection = salaries_db["NBA_reg_player_info"] | |
| eastern = pytz.timezone('US/Eastern') | |
| today_str = datetime.now(eastern).strftime("%Y%m%d") | |
| records = pd.DataFrame(list(collection.find({'Contest Date': {'$gte': today_str}}))) | |
| records = records[['Display Name', 'draftableId', 'Position', 'Salary']] | |
| records = records.rename(columns={'Display Name': 'Name', 'draftableId': 'ID', 'Position': 'Roster Position'}) | |
| if slate_var == 'Main': | |
| records = records.sort_values(by='ID', ascending=True) | |
| records = records.drop_duplicates(subset=['Name'], keep='first') | |
| elif slate_var == 'Secondary': | |
| records = records.sort_values(by='ID', ascending=True) | |
| # Keep middle occurrence: drop first and last, keep middle | |
| grouped = records.groupby('Name') | |
| middle_records = [] | |
| for name, group in grouped: | |
| if len(group) == 1: | |
| # Only one record, keep it | |
| middle_records.append(group) | |
| elif len(group) == 2: | |
| # Two records, keep the second one (last) | |
| middle_records.append(group.iloc[1:2]) | |
| else: | |
| # Three or more records, keep the middle one(s) | |
| # For odd number of records, keep the true middle | |
| # For even number of records, keep the record at index len//2 | |
| middle_idx = len(group) // 2 | |
| middle_records.append(group.iloc[middle_idx:middle_idx+1]) | |
| records = pd.concat(middle_records, ignore_index=True) | |
| elif slate_var == 'Auxiliary': | |
| records = records.sort_values(by='ID', ascending=True) | |
| records = records.drop_duplicates(subset=['Name'], keep='last') | |
| return records | |
| def grab_nba_showdown_salaries(): | |
| collection = salaries_db["NBA_showdown_player_info"] | |
| # Get current time in Eastern Time (handles EST/EDT automatically) | |
| eastern = pytz.timezone('US/Eastern') | |
| today_str = datetime.now(eastern).strftime("%Y%m%d") | |
| print(f"Current date in Eastern Time: {today_str}") | |
| records = pd.DataFrame(list(collection.find({'Contest Date': {'$gte': today_str}}))) | |
| records = records[['Display Name', 'draftableId', 'Position', 'Salary']] | |
| records = records.rename(columns={'Display Name': 'Name', 'draftableId': 'ID', 'Position': 'Roster Position'}) | |
| return records | |
| def grab_mlb_reg_salaries(slate_var: str): | |
| collection = salaries_db["MLB_reg_player_info"] | |
| eastern = pytz.timezone('US/Eastern') | |
| today_str = datetime.now(eastern).strftime("%Y%m%d") | |
| records = pd.DataFrame(list(collection.find({'Contest Date': {'$gte': today_str}}))) | |
| records = records[['Display Name', 'draftableId', 'Position', 'Salary']] | |
| records = records.rename(columns={'Display Name': 'Name', 'draftableId': 'ID', 'Position': 'Roster Position'}) | |
| if slate_var == 'Main': | |
| records = records.sort_values(by='ID', ascending=True) | |
| records = records.drop_duplicates(subset=['Name'], keep='first') | |
| elif slate_var == 'Secondary': | |
| records = records.sort_values(by='ID', ascending=True) | |
| # Keep middle occurrence: drop first and last, keep middle | |
| grouped = records.groupby('Name') | |
| middle_records = [] | |
| for name, group in grouped: | |
| if len(group) == 1: | |
| # Only one record, keep it | |
| middle_records.append(group) | |
| elif len(group) == 2: | |
| # Two records, keep the second one (last) | |
| middle_records.append(group.iloc[1:2]) | |
| else: | |
| # Three or more records, keep the middle one(s) | |
| # For odd number of records, keep the true middle | |
| # For even number of records, keep the record at index len//2 | |
| middle_idx = len(group) // 2 | |
| middle_records.append(group.iloc[middle_idx:middle_idx+1]) | |
| records = pd.concat(middle_records, ignore_index=True) | |
| elif slate_var == 'Auxiliary': | |
| records = records.sort_values(by='ID', ascending=True) | |
| records = records.drop_duplicates(subset=['Name'], keep='last') | |
| return records | |
| def grab_mlb_showdown_salaries(): | |
| collection = salaries_db["MLB_showdown_player_info"] | |
| eastern = pytz.timezone('US/Eastern') | |
| today_str = datetime.now(eastern).strftime("%Y%m%d") | |
| records = pd.DataFrame(list(collection.find({'Contest Date': {'$gte': today_str}}))) | |
| records = records[['Display Name', 'draftableId', 'Position', 'Salary']] | |
| records = records.rename(columns={'Display Name': 'Name', 'draftableId': 'ID', 'Position': 'Roster Position'}) | |
| return records | |
| def grab_nhl_reg_salaries(slate_var: str): | |
| collection = salaries_db["NHL_reg_player_info"] | |
| eastern = pytz.timezone('US/Eastern') | |
| today_str = datetime.now(eastern).strftime("%Y%m%d") | |
| records = pd.DataFrame(list(collection.find({'Contest Date': {'$gte': today_str}}))) | |
| records = records[['Display Name', 'draftableId', 'Position', 'Salary']] | |
| records = records.rename(columns={'Display Name': 'Name', 'draftableId': 'ID', 'Position': 'Roster Position'}) | |
| if slate_var == 'Main': | |
| records = records.sort_values(by='ID', ascending=True) | |
| records = records.drop_duplicates(subset=['Name'], keep='first') | |
| elif slate_var == 'Secondary': | |
| records = records.sort_values(by='ID', ascending=True) | |
| # Keep middle occurrence: drop first and last, keep middle | |
| grouped = records.groupby('Name') | |
| middle_records = [] | |
| for name, group in grouped: | |
| if len(group) == 1: | |
| # Only one record, keep it | |
| middle_records.append(group) | |
| elif len(group) == 2: | |
| # Two records, keep the second one (last) | |
| middle_records.append(group.iloc[1:2]) | |
| else: | |
| # Three or more records, keep the middle one(s) | |
| # For odd number of records, keep the true middle | |
| # For even number of records, keep the record at index len//2 | |
| middle_idx = len(group) // 2 | |
| middle_records.append(group.iloc[middle_idx:middle_idx+1]) | |
| records = pd.concat(middle_records, ignore_index=True) | |
| elif slate_var == 'Auxiliary': | |
| records = records.sort_values(by='ID', ascending=True) | |
| records = records.drop_duplicates(subset=['Name'], keep='last') | |
| return records | |
| def grab_nhl_showdown_salaries(): | |
| collection = salaries_db["NHL_showdown_player_info"] | |
| eastern = pytz.timezone('US/Eastern') | |
| today_str = datetime.now(eastern).strftime("%Y%m%d") | |
| records = pd.DataFrame(list(collection.find({'Contest Date': {'$gte': today_str}}))) | |
| records = records[['Display Name', 'draftableId', 'Position', 'Salary']] | |
| records = records.rename(columns={'Display Name': 'Name', 'draftableId': 'ID', 'Position': 'Roster Position'}) | |
| return records | |
| def grab_mma_reg_salaries(slate_var: str): | |
| collection = salaries_db["MMA_reg_player_info"] | |
| eastern = pytz.timezone('US/Eastern') | |
| today_str = datetime.now(eastern).strftime("%Y%m%d") | |
| records = pd.DataFrame(list(collection.find({'Contest Date': {'$gte': today_str}}))) | |
| records = records[['Display Name', 'draftableId', 'Position', 'Salary']] | |
| records = records.rename(columns={'Display Name': 'Name', 'draftableId': 'ID', 'Position': 'Roster Position'}) | |
| if slate_var == 'Main': | |
| records = records.sort_values(by='ID', ascending=True) | |
| records = records.drop_duplicates(subset=['Name'], keep='first') | |
| elif slate_var == 'Secondary': | |
| records = records.sort_values(by='ID', ascending=True) | |
| # Keep middle occurrence: drop first and last, keep middle | |
| grouped = records.groupby('Name') | |
| middle_records = [] | |
| for name, group in grouped: | |
| if len(group) == 1: | |
| # Only one record, keep it | |
| middle_records.append(group) | |
| elif len(group) == 2: | |
| # Two records, keep the second one (last) | |
| middle_records.append(group.iloc[1:2]) | |
| else: | |
| # Three or more records, keep the middle one(s) | |
| # For odd number of records, keep the true middle | |
| # For even number of records, keep the record at index len//2 | |
| middle_idx = len(group) // 2 | |
| middle_records.append(group.iloc[middle_idx:middle_idx+1]) | |
| records = pd.concat(middle_records, ignore_index=True) | |
| elif slate_var == 'Auxiliary': | |
| records = records.sort_values(by='ID', ascending=True) | |
| records = records.drop_duplicates(subset=['Name'], keep='last') | |
| return records | |
| def grab_mma_showdown_salaries(): | |
| collection = salaries_db["MMA_showdown_player_info"] | |
| eastern = pytz.timezone('US/Eastern') | |
| today_str = datetime.now(eastern).strftime("%Y%m%d") | |
| records = pd.DataFrame(list(collection.find({'Contest Date': {'$gte': today_str}}))) | |
| records = records[['Display Name', 'draftableId', 'Position', 'Salary']] | |
| records = records.rename(columns={'Display Name': 'Name', 'draftableId': 'ID', 'Position': 'Roster Position'}) | |
| return records | |
| def grab_pga_reg_salaries(slate_var: str): | |
| collection = salaries_db["PGA_reg_player_info"] | |
| eastern = pytz.timezone('US/Eastern') | |
| today_str = datetime.now(eastern).strftime("%Y%m%d") | |
| records = pd.DataFrame(list(collection.find({'Contest Date': {'$gte': today_str}}))) | |
| records = records[['Display Name', 'draftableId', 'Position', 'Salary']] | |
| records = records.rename(columns={'Display Name': 'Name', 'draftableId': 'ID', 'Position': 'Roster Position'}) | |
| if slate_var == 'Main': | |
| records = records.sort_values(by='ID', ascending=True) | |
| records = records.drop_duplicates(subset=['Name'], keep='first') | |
| elif slate_var == 'Secondary': | |
| records = records.sort_values(by='ID', ascending=True) | |
| # Keep middle occurrence: drop first and last, keep middle | |
| grouped = records.groupby('Name') | |
| middle_records = [] | |
| for name, group in grouped: | |
| if len(group) == 1: | |
| # Only one record, keep it | |
| middle_records.append(group) | |
| elif len(group) == 2: | |
| # Two records, keep the second one (last) | |
| middle_records.append(group.iloc[1:2]) | |
| else: | |
| # Three or more records, keep the middle one(s) | |
| # For odd number of records, keep the true middle | |
| # For even number of records, keep the record at index len//2 | |
| middle_idx = len(group) // 2 | |
| middle_records.append(group.iloc[middle_idx:middle_idx+1]) | |
| records = pd.concat(middle_records, ignore_index=True) | |
| elif slate_var == 'Auxiliary': | |
| records = records.sort_values(by='ID', ascending=True) | |
| records = records.drop_duplicates(subset=['Name'], keep='last') | |
| return records | |
| def grab_pga_showdown_salaries(): | |
| collection = salaries_db["PGA_showdown_player_info"] | |
| eastern = pytz.timezone('US/Eastern') | |
| today_str = datetime.now(eastern).strftime("%Y%m%d") | |
| records = pd.DataFrame(list(collection.find({'Contest Date': {'$gte': today_str}}))) | |
| records = records[['Display Name', 'draftableId', 'Position', 'Salary']] | |
| records = records.rename(columns={'Display Name': 'Name', 'draftableId': 'ID', 'Position': 'Roster Position'}) | |
| return records | |
| def define_dk_nfl_showdown_slates(): | |
| collection = nfl_db["DK_SD_NFL_ROO"] | |
| cursor = collection.find() | |
| raw_display = pd.DataFrame(list(cursor)) | |
| unique_slates = raw_display['slate'].unique() | |
| slate_names = [] | |
| for slate in unique_slates: | |
| slate_data = raw_display[raw_display['slate'] == slate] | |
| slate_name = slate_data.iloc[0]['Team'] + ' vs. ' + slate_data.iloc[0]['Opp'] | |
| slate_names.append(slate_name) | |
| slate_name_lookup = dict(zip(slate_names, unique_slates)) | |
| return slate_names, slate_name_lookup | |
| def define_fd_nfl_showdown_slates(): | |
| collection = nfl_db["FD_SD_NFL_ROO"] | |
| cursor = collection.find() | |
| raw_display = pd.DataFrame(list(cursor)) | |
| unique_slates = raw_display['slate'].unique() | |
| slate_names = [] | |
| for slate in unique_slates: | |
| slate_data = raw_display[raw_display['slate'] == slate] | |
| slate_name = slate_data.iloc[0]['Team'] + ' vs. ' + slate_data.iloc[0]['Opp'] | |
| slate_names.append(slate_name) | |
| slate_name_lookup = dict(zip(slate_names, unique_slates)) | |
| return slate_names, slate_name_lookup | |
| try: | |
| nfl_slate_names_dk, nfl_slate_name_lookup_dk = define_dk_nfl_showdown_slates() | |
| except: | |
| nfl_slate_names_dk = [] | |
| nfl_slate_name_lookup_dk = {} | |
| try: | |
| nfl_slate_names_fd, nfl_slate_name_lookup_fd = define_fd_nfl_showdown_slates() | |
| except: | |
| nfl_slate_names_fd = [] | |
| nfl_slate_name_lookup_fd = {} | |
| def define_dk_nba_showdown_slates(): | |
| collection = nba_db["Player_SD_Range_Of_Outcomes"] | |
| cursor = collection.find() | |
| raw_display = pd.DataFrame(list(cursor)) | |
| raw_display = raw_display[raw_display['site'] == 'Draftkings'] | |
| unique_slates = raw_display['slate'].unique() | |
| slate_names = [] | |
| for slate in unique_slates: | |
| slate_data = raw_display[raw_display['slate'] == slate] | |
| slate_name = slate_data.iloc[0]['Team'] + ' vs. ' + slate_data.iloc[0]['Opp'] | |
| slate_names.append(slate_name) | |
| slate_name_lookup = dict(zip(slate_names, unique_slates)) | |
| return slate_names, slate_name_lookup | |
| def define_fd_nba_showdown_slates(): | |
| collection = nba_db["Player_SD_Range_Of_Outcomes"] | |
| cursor = collection.find() | |
| raw_display = pd.DataFrame(list(cursor)) | |
| raw_display = raw_display[raw_display['site'] == 'Fanduel'] | |
| unique_slates = raw_display['slate'].unique() | |
| slate_names = [] | |
| for slate in unique_slates: | |
| slate_data = raw_display[raw_display['slate'] == slate] | |
| slate_name = slate_data.iloc[0]['Team'] + ' vs. ' + slate_data.iloc[0]['Opp'] | |
| slate_names.append(slate_name) | |
| slate_name_lookup = dict(zip(slate_names, unique_slates)) | |
| return slate_names, slate_name_lookup | |
| try: | |
| nba_slate_names_dk, nba_slate_name_lookup_dk = define_dk_nba_showdown_slates() | |
| except: | |
| nba_slate_names_dk = [] | |
| nba_slate_name_lookup_dk = {} | |
| try: | |
| nba_slate_names_fd, nba_slate_name_lookup_fd = define_fd_nba_showdown_slates() | |
| except: | |
| nba_slate_names_fd = [] | |
| nba_slate_name_lookup_fd = {} | |
| def define_dk_nhl_showdown_slates(): | |
| collection = nhl_db["Player_Level_SD_ROO"] | |
| cursor = collection.find() | |
| raw_display = pd.DataFrame(list(cursor)) | |
| raw_display = raw_display[raw_display['Site'] == 'Draftkings'] | |
| unique_slates = raw_display['Slate'].unique() | |
| slate_names = [] | |
| for slate in unique_slates: | |
| slate_data = raw_display[raw_display['Slate'] == slate] | |
| slate_name = slate_data.iloc[0]['Team'] + ' vs. ' + slate_data.iloc[0]['Opp'] | |
| slate_names.append(slate_name) | |
| slate_name_lookup = dict(zip(slate_names, unique_slates)) | |
| return slate_names, slate_name_lookup | |
| def define_fd_nhl_showdown_slates(): | |
| collection = nhl_db["Player_Level_SD_ROO"] | |
| cursor = collection.find() | |
| raw_display = pd.DataFrame(list(cursor)) | |
| raw_display = raw_display[raw_display['Site'] == 'Fanduel'] | |
| unique_slates = raw_display['Slate'].unique() | |
| slate_names = [] | |
| for slate in unique_slates: | |
| slate_data = raw_display[raw_display['Slate'] == slate] | |
| slate_name = slate_data.iloc[0]['Team'] + ' vs. ' + slate_data.iloc[0]['Opp'] | |
| slate_names.append(slate_name) | |
| slate_name_lookup = dict(zip(slate_names, unique_slates)) | |
| return slate_names, slate_name_lookup | |
| try: | |
| nhl_slate_names_dk, nhl_slate_name_lookup_dk = define_dk_nhl_showdown_slates() | |
| except: | |
| nhl_slate_names_dk = [] | |
| nhl_slate_name_lookup_dk = {} | |
| try: | |
| nhl_slate_names_fd, nhl_slate_name_lookup_fd = define_fd_nhl_showdown_slates() | |
| except: | |
| nhl_slate_names_fd = [] | |
| nhl_slate_name_lookup_fd = {} | |
| # Memory optimization helper functions | |
| def chunk_name_matching(portfolio_names, csv_names, chunk_size=1000): | |
| """Process name matching in chunks to reduce memory usage""" | |
| portfolio_match_dict = {} | |
| unmatched_names = [] | |
| for i in range(0, len(portfolio_names), chunk_size): | |
| chunk = portfolio_names[i:i+chunk_size] | |
| for portfolio_name in chunk: | |
| match = process.extractOne( | |
| portfolio_name, | |
| csv_names, | |
| score_cutoff=90 | |
| ) | |
| if match: | |
| portfolio_match_dict[portfolio_name] = match[0] | |
| if match[1] < 100: | |
| st.write(f"{portfolio_name} matched from portfolio to site csv {match[0]} with a score of {match[1]}%") | |
| else: | |
| portfolio_match_dict[portfolio_name] = portfolio_name | |
| unmatched_names.append(portfolio_name) | |
| return portfolio_match_dict, unmatched_names | |
| def optimize_dataframe_dtypes(df): | |
| """Optimize DataFrame data types for memory efficiency""" | |
| # For now, disable categorical conversion entirely to avoid issues with exposure_spread and other operations | |
| # This maintains compatibility while still providing other memory optimizations | |
| # Future enhancement: implement smarter categorical handling that preserves mutability | |
| # Only optimize numeric columns to more efficient dtypes | |
| for col in df.columns: | |
| if df[col].dtype == 'float64': | |
| # Convert float64 to float32 if possible without significant precision loss | |
| try: | |
| if df[col].max() < 3.4e+38 and df[col].min() > -3.4e+38: # float32 range | |
| df[col] = df[col].astype('float32') | |
| except: | |
| pass | |
| elif df[col].dtype == 'int64': | |
| # Convert int64 to smaller int types if possible | |
| try: | |
| if df[col].max() <= 32767 and df[col].min() >= -32768: | |
| df[col] = df[col].astype('int16') | |
| elif df[col].max() <= 2147483647 and df[col].min() >= -2147483648: | |
| df[col] = df[col].astype('int32') | |
| except: | |
| pass | |
| return df | |
| def load_base_frame(base_name): | |
| """Load a base frame from compressed storage""" | |
| if base_name in st.session_state['base_frame_names']: | |
| base_bytes = st.session_state['base_frame_names'][base_name] | |
| return pd.read_parquet(io.BytesIO(base_bytes)) | |
| else: | |
| raise KeyError(f"Base frame '{base_name}' not found") | |
| def save_base_frame(base_name, dataframe): | |
| """Save a base frame to compressed storage""" | |
| buffer = io.BytesIO() | |
| dataframe.to_parquet(buffer, compression='gzip') | |
| st.session_state['base_frame_names'][base_name] = buffer.getvalue() | |
| def create_memory_efficient_mappings(projections_df, site_var, type_var, sport_var): | |
| """Create mappings with optimized data types""" | |
| # Optimize projections data types first | |
| projections_df = projections_df.copy() | |
| # Convert to more efficient data types | |
| if 'position' in projections_df.columns: | |
| projections_df['position'] = projections_df['position'].astype('category') | |
| if 'team' in projections_df.columns: | |
| projections_df['team'] = projections_df['team'].astype('category') | |
| if 'salary' in projections_df.columns: | |
| projections_df['salary'] = projections_df['salary'].astype('int32') | |
| if 'median' in projections_df.columns: | |
| projections_df['median'] = projections_df['median'].astype('float32') | |
| if 'ownership' in projections_df.columns: | |
| projections_df['ownership'] = projections_df['ownership'].astype('float32') | |
| if 'captain ownership' in projections_df.columns: | |
| projections_df['captain ownership'] = projections_df['captain ownership'].astype('float32') | |
| # Create base mappings | |
| base_mappings = { | |
| 'pos_map': dict(zip(projections_df['player_names'], projections_df['position'])), | |
| 'team_map': dict(zip(projections_df['player_names'], projections_df['team'])), | |
| 'salary_map': dict(zip(projections_df['player_names'], projections_df['salary'])), | |
| 'proj_map': dict(zip(projections_df['player_names'], projections_df['median'])), | |
| 'own_map': dict(zip(projections_df['player_names'], projections_df['ownership'])), | |
| 'own_percent_rank': dict(zip(projections_df['player_names'], projections_df['ownership'].rank(pct=True).astype('float32'))) | |
| } | |
| # Add site/type specific mappings | |
| if site_var == 'Draftkings': | |
| if type_var == 'Classic': | |
| if sport_var == 'CS2' or sport_var == 'LOL': | |
| base_mappings.update({ | |
| 'cpt_salary_map': dict(zip(projections_df['player_names'], projections_df['salary'] * 1.5)), | |
| 'cpt_proj_map': dict(zip(projections_df['player_names'], projections_df['median'] * 1.5)), | |
| 'cpt_own_map': dict(zip(projections_df['player_names'], projections_df['captain ownership'])) | |
| }) | |
| else: | |
| base_mappings.update({ | |
| 'cpt_salary_map': dict(zip(projections_df['player_names'], projections_df['salary'])), | |
| 'cpt_proj_map': dict(zip(projections_df['player_names'], projections_df['median'] * 1.5)), | |
| 'cpt_own_map': dict(zip(projections_df['player_names'], projections_df['captain ownership'])) | |
| }) | |
| elif type_var == 'Showdown': | |
| if sport_var == 'GOLF': | |
| base_mappings.update({ | |
| 'cpt_salary_map': dict(zip(projections_df['player_names'], projections_df['salary'])), | |
| 'cpt_proj_map': dict(zip(projections_df['player_names'], projections_df['median'])), | |
| 'cpt_own_map': dict(zip(projections_df['player_names'], projections_df['ownership'])) | |
| }) | |
| else: | |
| base_mappings.update({ | |
| 'cpt_salary_map': dict(zip(projections_df['player_names'], projections_df['salary'] * 1.5)), | |
| 'cpt_proj_map': dict(zip(projections_df['player_names'], projections_df['median'] * 1.5)), | |
| 'cpt_own_map': dict(zip(projections_df['player_names'], projections_df['captain ownership'])) | |
| }) | |
| elif site_var == 'Fanduel': | |
| base_mappings.update({ | |
| 'cpt_salary_map': dict(zip(projections_df['player_names'], projections_df['salary'] * 1.5)), | |
| 'cpt_proj_map': dict(zip(projections_df['player_names'], projections_df['median'] * 1.5)), | |
| 'cpt_own_map': dict(zip(projections_df['player_names'], projections_df['captain ownership'])) | |
| }) | |
| return base_mappings | |
| def create_comprehensive_mappings(projections_df, portfolio_df, csv_file, site_var, type_var, sport_var): | |
| """Create mappings that include all portfolio players, using projections first and csv_file as fallback""" | |
| # Get all unique players from portfolio | |
| portfolio_players = get_portfolio_names(portfolio_df) | |
| # Optimize projections data types first (existing logic) | |
| projections_df = projections_df.copy() | |
| if 'position' in projections_df.columns: | |
| projections_df['position'] = projections_df['position'].astype('category') | |
| if 'team' in projections_df.columns: | |
| projections_df['team'] = projections_df['team'].astype('category') | |
| if 'salary' in projections_df.columns: | |
| projections_df['salary'] = projections_df['salary'].astype('int32') | |
| if 'median' in projections_df.columns: | |
| projections_df['median'] = projections_df['median'].astype('float32') | |
| if 'ownership' in projections_df.columns: | |
| projections_df['ownership'] = projections_df['ownership'].astype('float32') | |
| if 'captain ownership' in projections_df.columns: | |
| projections_df['captain ownership'] = projections_df['captain ownership'].astype('float32') | |
| # Create sets for efficient lookup | |
| projection_players = set(projections_df['player_names'].tolist()) | |
| missing_players = set(portfolio_players) - projection_players | |
| # Prepare csv_file fallback data | |
| csv_fallback = {} | |
| if not missing_players: | |
| # No missing players, use existing logic | |
| return create_memory_efficient_mappings(projections_df, site_var, type_var, sport_var) | |
| # Create fallback mappings from csv_file for missing players | |
| try: | |
| csv_name_col = 'Name' if 'Name' in csv_file.columns else 'Nickname' | |
| csv_salary_col = 'Salary' | |
| csv_position_col = 'Position' if 'Position' in csv_file.columns else 'Roster Position' | |
| csv_team_col = 'Team' if 'Team' in csv_file.columns else None | |
| # Create efficient lookup dictionaries from csv_file | |
| csv_salary_map = dict(zip(csv_file[csv_name_col], csv_file[csv_salary_col])) | |
| csv_position_map = dict(zip(csv_file[csv_name_col], csv_file[csv_position_col])) | |
| csv_team_map = dict(zip(csv_file[csv_name_col], csv_file.get(csv_team_col, 'Unknown'))) if csv_team_col else {} | |
| except Exception as e: | |
| st.warning(f"Could not create csv fallback mappings: {e}") | |
| # Fall back to original function if csv_file structure is unexpected | |
| return create_memory_efficient_mappings(projections_df, site_var, type_var, sport_var) | |
| # Start with projections-based mappings | |
| base_mappings = { | |
| 'pos_map': dict(zip(projections_df['player_names'], projections_df['position'])), | |
| 'team_map': dict(zip(projections_df['player_names'], projections_df['team'])), | |
| 'salary_map': dict(zip(projections_df['player_names'], projections_df['salary'])), | |
| 'proj_map': dict(zip(projections_df['player_names'], projections_df['median'])), | |
| 'own_map': dict(zip(projections_df['player_names'], projections_df['ownership'])), | |
| 'own_percent_rank': dict(zip(projections_df['player_names'], projections_df['ownership'].rank(pct=True).astype('float32'))) | |
| } | |
| # Add missing players with csv_file data and 0 projections/ownership | |
| for player in missing_players: | |
| if player in csv_salary_map: | |
| base_mappings['pos_map'][player] = csv_position_map.get(player, 'FLEX') | |
| base_mappings['team_map'][player] = csv_team_map.get(player, 'Unknown') if csv_team_map else 'Unknown' | |
| base_mappings['salary_map'][player] = csv_salary_map[player] | |
| base_mappings['proj_map'][player] = 0.0 # No projection available | |
| base_mappings['own_map'][player] = 0.0 # No ownership available | |
| base_mappings['own_percent_rank'][player] = 0.0 # Lowest rank for missing players | |
| else: | |
| st.warning(f"Player '{player}' not found in projections or csv_file") | |
| # Add with default values to prevent KeyError | |
| base_mappings['pos_map'][player] = 'FLEX' | |
| base_mappings['team_map'][player] = 'Unknown' | |
| base_mappings['salary_map'][player] = 0 | |
| base_mappings['proj_map'][player] = 0.0 | |
| base_mappings['own_map'][player] = 0.0 | |
| base_mappings['own_percent_rank'][player] = 0.0 | |
| # Add site/type specific mappings (existing logic) | |
| if site_var == 'Draftkings': | |
| if type_var == 'Classic': | |
| if sport_var == 'CS2' or sport_var == 'LOL': | |
| cpt_salary_map = dict(zip(projections_df['player_names'], projections_df['salary'] * 1.5)) | |
| cpt_proj_map = dict(zip(projections_df['player_names'], projections_df['median'] * 1.5)) | |
| cpt_own_map = dict(zip(projections_df['player_names'], projections_df['captain ownership'])) | |
| # Add missing players to captain mappings | |
| for player in missing_players: | |
| if player in csv_salary_map: | |
| cpt_salary_map[player] = csv_salary_map[player] * 1.5 | |
| cpt_proj_map[player] = 0.0 # No captain projection | |
| cpt_own_map[player] = 0.0 # No captain ownership | |
| base_mappings.update({ | |
| 'cpt_salary_map': cpt_salary_map, | |
| 'cpt_proj_map': cpt_proj_map, | |
| 'cpt_own_map': cpt_own_map | |
| }) | |
| else: | |
| cpt_salary_map = dict(zip(projections_df['player_names'], projections_df['salary'])) | |
| cpt_proj_map = dict(zip(projections_df['player_names'], projections_df['median'] * 1.5)) | |
| cpt_own_map = dict(zip(projections_df['player_names'], projections_df['captain ownership'])) | |
| # Add missing players to captain mappings | |
| for player in missing_players: | |
| if player in csv_salary_map: | |
| cpt_salary_map[player] = csv_salary_map[player] | |
| cpt_proj_map[player] = 0.0 | |
| cpt_own_map[player] = 0.0 | |
| base_mappings.update({ | |
| 'cpt_salary_map': cpt_salary_map, | |
| 'cpt_proj_map': cpt_proj_map, | |
| 'cpt_own_map': cpt_own_map | |
| }) | |
| elif type_var == 'Showdown': | |
| if sport_var == 'GOLF': | |
| cpt_salary_map = dict(zip(projections_df['player_names'], projections_df['salary'])) | |
| cpt_proj_map = dict(zip(projections_df['player_names'], projections_df['median'])) | |
| cpt_own_map = dict(zip(projections_df['player_names'], projections_df['ownership'])) | |
| # Add missing players | |
| for player in missing_players: | |
| if player in csv_salary_map: | |
| cpt_salary_map[player] = csv_salary_map[player] | |
| cpt_proj_map[player] = 0.0 | |
| cpt_own_map[player] = 0.0 | |
| base_mappings.update({ | |
| 'cpt_salary_map': cpt_salary_map, | |
| 'cpt_proj_map': cpt_proj_map, | |
| 'cpt_own_map': cpt_own_map | |
| }) | |
| else: | |
| cpt_salary_map = dict(zip(projections_df['player_names'], projections_df['salary'] * 1.5)) | |
| cpt_proj_map = dict(zip(projections_df['player_names'], projections_df['median'] * 1.5)) | |
| cpt_own_map = dict(zip(projections_df['player_names'], projections_df['captain ownership'])) | |
| # Add missing players | |
| for player in missing_players: | |
| if player in csv_salary_map: | |
| cpt_salary_map[player] = csv_salary_map[player] * 1.5 | |
| cpt_proj_map[player] = 0.0 | |
| cpt_own_map[player] = 0.0 | |
| base_mappings.update({ | |
| 'cpt_salary_map': cpt_salary_map, | |
| 'cpt_proj_map': cpt_proj_map, | |
| 'cpt_own_map': cpt_own_map | |
| }) | |
| elif site_var == 'Fanduel': | |
| cpt_salary_map = dict(zip(projections_df['player_names'], projections_df['salary'] * 1.5)) | |
| cpt_proj_map = dict(zip(projections_df['player_names'], projections_df['median'] * 1.5)) | |
| cpt_own_map = dict(zip(projections_df['player_names'], projections_df['captain ownership'])) | |
| # Add missing players | |
| for player in missing_players: | |
| if player in csv_salary_map: | |
| cpt_salary_map[player] = csv_salary_map[player] * 1.5 | |
| cpt_proj_map[player] = 0.0 | |
| cpt_own_map[player] = 0.0 | |
| base_mappings.update({ | |
| 'cpt_salary_map': cpt_salary_map, | |
| 'cpt_proj_map': cpt_proj_map, | |
| 'cpt_own_map': cpt_own_map | |
| }) | |
| return base_mappings | |
| def calculate_salary_vectorized(df, player_columns, map_dict, type_var, sport_var): | |
| """Vectorized salary calculation to replace expensive apply operations""" | |
| def safe_map_and_fill(series, mapping, fill_value=0): | |
| """Safely map values and fill NaN, handling categorical columns""" | |
| mapped = series.map(mapping) | |
| if hasattr(series, 'cat'): | |
| # Handle categorical columns by converting to object first | |
| mapped = mapped.astype('object') | |
| return mapped.fillna(fill_value) | |
| if type_var == 'Classic' and (sport_var == 'CS2' or sport_var == 'LOL'): | |
| # Captain + flex calculations | |
| cpt_salaries = safe_map_and_fill(df.iloc[:, 0], map_dict['cpt_salary_map']) | |
| flex_salaries = sum(safe_map_and_fill(df.iloc[:, i], map_dict['salary_map']) for i in range(1, len(player_columns))) | |
| return cpt_salaries + flex_salaries | |
| elif type_var == 'Showdown': | |
| if sport_var == 'GOLF': | |
| return sum(safe_map_and_fill(df[col], map_dict['salary_map']) for col in player_columns) | |
| else: | |
| cpt_salaries = safe_map_and_fill(df.iloc[:, 0], map_dict['cpt_salary_map']) | |
| flex_salaries = sum(safe_map_and_fill(df.iloc[:, i], map_dict['salary_map']) for i in range(1, len(player_columns))) | |
| return cpt_salaries + flex_salaries | |
| else: | |
| # Classic non-CS2/LOL | |
| return sum(safe_map_and_fill(df[col], map_dict['salary_map']) for col in player_columns) | |
| def calculate_median_vectorized(df, player_columns, map_dict, type_var, sport_var): | |
| """Vectorized median calculation to replace expensive apply operations""" | |
| def safe_map_and_fill(series, mapping, fill_value=0): | |
| """Safely map values and fill NaN, handling categorical columns""" | |
| mapped = series.map(mapping) | |
| if hasattr(series, 'cat'): | |
| # Handle categorical columns by converting to object first | |
| mapped = mapped.astype('object') | |
| return mapped.fillna(fill_value) | |
| if type_var == 'Classic' and (sport_var == 'CS2' or sport_var == 'LOL'): | |
| cpt_medians = safe_map_and_fill(df.iloc[:, 0], map_dict['cpt_proj_map']) | |
| flex_medians = sum(safe_map_and_fill(df.iloc[:, i], map_dict['proj_map']) for i in range(1, len(player_columns))) | |
| return cpt_medians + flex_medians | |
| elif type_var == 'Showdown': | |
| if sport_var == 'GOLF': | |
| return sum(safe_map_and_fill(df[col], map_dict['proj_map']) for col in player_columns) | |
| else: | |
| cpt_medians = safe_map_and_fill(df.iloc[:, 0], map_dict['cpt_proj_map']) | |
| flex_medians = sum(safe_map_and_fill(df.iloc[:, i], map_dict['proj_map']) for i in range(1, len(player_columns))) | |
| return cpt_medians + flex_medians | |
| else: | |
| return sum(safe_map_and_fill(df[col], map_dict['proj_map']) for col in player_columns) | |
| def calculate_ownership_vectorized(df, player_columns, map_dict, type_var, sport_var): | |
| """Vectorized ownership calculation to replace expensive apply operations""" | |
| def safe_map_and_fill(series, mapping, fill_value=0): | |
| """Safely map values and fill NaN, handling categorical columns""" | |
| mapped = series.map(mapping) | |
| if hasattr(series, 'cat'): | |
| # Handle categorical columns by converting to object first | |
| mapped = mapped.astype('object') | |
| return mapped.fillna(fill_value) | |
| if type_var == 'Classic' and (sport_var == 'CS2' or sport_var == 'LOL'): | |
| cpt_own = safe_map_and_fill(df.iloc[:, 0], map_dict['cpt_own_map']) | |
| flex_own = sum(safe_map_and_fill(df.iloc[:, i], map_dict['own_map']) for i in range(1, len(player_columns))) | |
| return cpt_own + flex_own | |
| elif type_var == 'Showdown': | |
| if sport_var == 'GOLF': | |
| return sum(safe_map_and_fill(df[col], map_dict['own_map']) for col in player_columns) | |
| else: | |
| cpt_own = safe_map_and_fill(df.iloc[:, 0], map_dict['cpt_own_map']) | |
| flex_own = sum(safe_map_and_fill(df.iloc[:, i], map_dict['own_map']) for i in range(1, len(player_columns))) | |
| return cpt_own + flex_own | |
| else: | |
| return sum(safe_map_and_fill(df[col], map_dict['own_map']) for col in player_columns) | |
| def calculate_lineup_metrics(df, player_columns, map_dict, type_var, sport_var, projections_df=None): | |
| """Centralized function to calculate salary, median, and ownership efficiently""" | |
| df = df.copy() # Work on a copy to avoid modifying original | |
| # Ensure player columns are object type to avoid categorical issues with exposure_spread | |
| for col in player_columns: | |
| if df[col].dtype.name == 'category': | |
| df[col] = df[col].astype('object') | |
| # Vectorized calculations | |
| df['salary'] = calculate_salary_vectorized(df[player_columns], player_columns, map_dict, type_var, sport_var) | |
| df['median'] = calculate_median_vectorized(df[player_columns], player_columns, map_dict, type_var, sport_var) | |
| df['Own'] = calculate_ownership_vectorized(df[player_columns], player_columns, map_dict, type_var, sport_var) | |
| return df | |
| def create_team_filter_mask(df, player_columns, team_map, teams_to_filter, focus_type='Overall', type_var='Classic'): | |
| """Create boolean mask for team filtering without creating intermediate DataFrames""" | |
| mask = pd.Series(False, index=df.index) | |
| if type_var == 'Showdown' and focus_type != 'Overall': | |
| if focus_type == 'CPT': | |
| focus_columns = [player_columns[0]] # First column only | |
| elif focus_type == 'FLEX': | |
| focus_columns = player_columns[1:] # All except first | |
| else: | |
| focus_columns = player_columns | |
| else: | |
| # For Classic or Overall focus, use appropriate columns | |
| if type_var == 'Classic': | |
| focus_columns = [col for col in player_columns if col not in ['SP1', 'SP2']] # Exclude pitchers | |
| else: | |
| focus_columns = player_columns | |
| for team in teams_to_filter: | |
| for col in focus_columns: | |
| team_mask = df[col].map(team_map) == team | |
| mask |= team_mask | |
| return mask | |
| def prepare_dataframe_for_exposure_spread(df, player_columns): | |
| """Ensure DataFrame is ready for exposure_spread by converting player columns to object type""" | |
| df_prepared = df.copy() | |
| # Convert any categorical player columns back to object type | |
| for col in player_columns: | |
| if col in df_prepared.columns and df_prepared[col].dtype.name == 'category': | |
| df_prepared[col] = df_prepared[col].astype('object') | |
| return df_prepared | |
| def create_position_export_dict(column_name, csv_file, site_var, type_var, sport_var): | |
| try: | |
| # Remove any numbers from the column name to get the position | |
| import re | |
| position_filter = re.sub(r'\d+$', '', column_name) | |
| # Filter CSV file by position | |
| if 'Position' in csv_file.columns: | |
| if type_var == 'Showdown': | |
| filtered_df = csv_file.copy() | |
| else: | |
| if position_filter == 'SP': | |
| filtered_df = csv_file[ | |
| csv_file['Roster Position'] == 'P' | |
| ] | |
| elif position_filter == 'CPT': | |
| filtered_df = csv_file.copy() | |
| elif position_filter == 'FLEX' or position_filter == 'UTIL': | |
| if sport_var == 'NFL': | |
| filtered_df = csv_file[csv_file['Position'].isin(['RB', 'WR', 'TE'])] | |
| elif sport_var == 'SOC': | |
| filtered_df = csv_file[csv_file['Position'].str.contains('D|M|F', na=False, regex=True)] | |
| elif sport_var == 'NCAAF': | |
| filtered_df = csv_file[csv_file['Position'].str.contains('RB|WR', na=False, regex=True)] | |
| elif sport_var == 'NHL': | |
| filtered_df = csv_file[csv_file['Position'].str.contains('C|W|D', na=False, regex=True)] | |
| else: | |
| filtered_df = csv_file.copy() | |
| elif position_filter == 'SFLEX': | |
| filtered_df = csv_file.copy() | |
| elif position_filter == 'C/1B': | |
| filtered_df = csv_file[ | |
| csv_file['Position'].str.contains(['C', '1B'], na=False, regex=False) | |
| ] | |
| else: | |
| filtered_df = csv_file[ | |
| csv_file['Position'].str.contains(position_filter, na=False, regex=False) | |
| ] | |
| else: | |
| # Fallback to all players if no position column found | |
| filtered_df = csv_file | |
| # Create the export dictionary for this position | |
| if site_var == 'Draftkings': | |
| try: | |
| filtered_df = filtered_df.sort_values(by='Salary', ascending=False).drop_duplicates(subset=['Name']) | |
| return dict(zip(filtered_df['Name'], filtered_df['Name + ID'])) | |
| except: | |
| filtered_df = filtered_df.sort_values(by='Salary', ascending=False).drop_duplicates(subset=['Nickname']) | |
| return dict(zip(filtered_df['Nickname'], filtered_df['Name + ID'])) | |
| else: | |
| try: | |
| filtered_df = filtered_df.sort_values(by='Salary', ascending=False).drop_duplicates(subset=['Nickname']) | |
| return dict(zip(filtered_df['Nickname'], filtered_df['Id'])) | |
| except: | |
| filtered_df = filtered_df.sort_values(by='Salary', ascending=False).drop_duplicates(subset=['Name']) | |
| return dict(zip(filtered_df['Name'], filtered_df['Id'])) | |
| except Exception as e: | |
| st.error(f"Error creating position export dict for {column_name}: {str(e)}") | |
| return {} | |
| def parse_portfolio_on_mapped(portfolio, map_dict, map_key, filter_keys_pos, filter_keys_team, low_threshold, high_threshold, column_choices): | |
| mapping_port = portfolio[column_choices] | |
| mapping_port = mapping_port.map(map_dict[map_key]) | |
| if column_choices == 'CPT': | |
| mapping_port = mapping_port * 1.5 | |
| if map_key not in ['team_map', 'pos_map']: | |
| # Numerical mapping - filter by thresholds | |
| low_mask = mapping_port > low_threshold | |
| high_mask = mapping_port < high_threshold | |
| mask = low_mask & high_mask | |
| else: | |
| pos_mask = False | |
| team_mask = False | |
| if filter_keys_pos: | |
| # Create a regex pattern that matches any of the selected positions | |
| pos_pattern = '|'.join([f'\\b{pos}\\b' for pos in filter_keys_pos]) | |
| pos_mask = mapping_port.str.contains(pos_pattern, case=False, na=False, regex=True) | |
| if filter_keys_team: | |
| team_mask = mapping_port.isin(filter_keys_team) | |
| mask = pos_mask | team_mask | |
| return portfolio[mask] | |
| def recalc_stacks_sizes(df, player_columns, map_dict): | |
| team_map = map_dict['team_map'] # Use this directly (player_name -> team) | |
| df['Stack'] = df.apply( | |
| lambda row: Counter( | |
| team_map.get(player, '') for player in row[stack_column_dict[site_var][type_var][sport_var]] | |
| if team_map.get(player, '') != '' | |
| ).most_common(1)[0][0] if any(team_map.get(player, '') for player in row[stack_column_dict[site_var][type_var][sport_var]]) else '', | |
| axis=1 | |
| ) | |
| df['Size'] = df.apply( | |
| lambda row: Counter( | |
| team_map.get(player, '') for player in row[stack_column_dict[site_var][type_var][sport_var]] | |
| if team_map.get(player, '') != '' | |
| ).most_common(1)[0][1] if any(team_map.get(player, '') for player in row[stack_column_dict[site_var][type_var][sport_var]]) else 0, | |
| axis=1 | |
| ) | |
| return df | |
| with st.container(): | |
| col1, col2, col3, col4 = st.columns([1, 4, 4, 4]) | |
| with col1: | |
| if st.button('Clear data', key='reset3'): | |
| st.session_state.clear() | |
| st.session_state['pricing_loaded'] = False | |
| st.session_state['projections_loaded'] = False | |
| st.session_state['portfolio_loaded'] = False | |
| with col2: | |
| site_var = st.selectbox("Select Site", ['Draftkings', 'Fanduel']) | |
| with col3: | |
| sport_var = st.selectbox("Select Sport", ['NFL', 'MLB', 'NBA', 'NHL', 'NCAAF', 'MMA', 'CS2', 'LOL', 'COD', 'TENNIS', 'NASCAR', 'GOLF', 'WNBA', 'F1'], key='sport_var') | |
| with col4: | |
| type_var = st.selectbox("Select Game Type", ['Classic', 'Showdown']) | |
| if sport_var == 'GOLF': | |
| position_var = 'G' | |
| team_var = 'GOLF' | |
| elif sport_var == 'TENNIS': | |
| position_var = 'T' | |
| team_var = 'TENNIS' | |
| elif sport_var == 'MMA': | |
| position_var = 'F' | |
| team_var = 'MMA' | |
| elif sport_var == 'NASCAR': | |
| position_var = 'D' | |
| team_var = 'NASCAR' | |
| elif sport_var == 'F1': | |
| position_var = 'D' | |
| team_var = 'F1' | |
| else: | |
| position_var = None | |
| team_var = None | |
| if site_var == 'Draftkings': | |
| salary_max = 50000 | |
| elif site_var == 'Fanduel': | |
| if type_var == 'Classic': | |
| if sport_var == 'MLB': | |
| salary_max = 40000 | |
| elif sport_var == 'WNBA': | |
| salary_max = 40000 | |
| elif sport_var == 'GOLF': | |
| salary_max = 60000 | |
| elif sport_var == 'MMA': | |
| salary_max = 100 | |
| elif sport_var == 'NFL': | |
| salary_max = 60000 | |
| elif sport_var == 'NASCAR': | |
| salary_max = 50000 | |
| else: | |
| salary_max = 60000 | |
| elif type_var == 'Showdown': | |
| salary_max = 60000 | |
| with st.expander("Info and Filters"): | |
| prio_col, optimals_site_col, optimals_salary_col, optimals_stacks_col = st.columns(4) | |
| with prio_col: | |
| prio_var = st.radio("Which priority variable do you want to use?", ('proj', 'Own', 'Mix'), key='prio_var_radio') | |
| prio_mix = st.number_input("If Mix, what split of Projection/Ownership to dedicate to Projection?", min_value=0, max_value=100, value=50, step=1) | |
| lineup_num_var = st.number_input("How many lineups do you want to work with?", min_value=100, max_value=10000, value=1000, step=100, key='lineup_download_var_input') | |
| with optimals_site_col: | |
| if site_var == 'Draftkings': | |
| if sport_var == 'NBA': | |
| slate_var3 = st.radio("Which slate data are you loading?", (nba_slate_names_dk if type_var == 'Showdown' else ['Main', 'Secondary', 'Auxiliary']), key='slate_var3_radio') | |
| elif sport_var == 'NFL': | |
| slate_var3 = st.radio("Which slate data are you loading?", (nfl_slate_names_dk if type_var == 'Showdown' else ['Main', 'Secondary', 'Auxiliary']), key='slate_var3_radio') | |
| elif sport_var == 'NHL': | |
| slate_var3 = st.radio("Which slate data are you loading?", (nhl_slate_names_dk if type_var == 'Showdown' else ['Main', 'Secondary', 'Auxiliary']), key='slate_var3_radio') | |
| elif sport_var == 'MMA': | |
| slate_var3 = st.radio("Which slate data are you loading?", (['Main', 'Secondary', 'Auxiliary']), key='slate_var3_radio') | |
| elif sport_var == 'GOLF': | |
| slate_var3 = st.radio("Which slate data are you loading?", (['Main', 'Secondary', 'Auxiliary']), key='slate_var3_radio') | |
| else: | |
| slate_var3 = st.radio("Which slate data are you loading?", (['Main', 'Secondary', 'Auxiliary']), key='slate_var3_radio') | |
| elif site_var == 'Fanduel': | |
| if sport_var == 'NBA': | |
| slate_var3 = st.radio("Which slate data are you loading?", (nba_slate_names_fd if type_var == 'Showdown' else ['Main', 'Secondary', 'Auxiliary']), key='slate_var3_radio') | |
| elif sport_var == 'NFL': | |
| slate_var3 = st.radio("Which slate data are you loading?", (nfl_slate_names_fd if type_var == 'Showdown' else ['Main', 'Secondary', 'Auxiliary']), key='slate_var3_radio') | |
| elif sport_var == 'NHL': | |
| slate_var3 = st.radio("Which slate data are you loading?", (nhl_slate_names_fd if type_var == 'Showdown' else ['Main', 'Secondary', 'Auxiliary']), key='slate_var3_radio') | |
| elif sport_var == 'MMA': | |
| slate_var3 = st.radio("Which slate data are you loading?", (['Main', 'Secondary', 'Auxiliary']), key='slate_var3_radio') | |
| elif sport_var == 'GOLF': | |
| slate_var3 = st.radio("Which slate data are you loading?", (['Main', 'Secondary', 'Auxiliary']), key='slate_var3_radio') | |
| else: | |
| slate_var3 = st.radio("Which slate data are you loading?", (['Main', 'Secondary', 'Auxiliary']), key='slate_var3_radio') | |
| with optimals_salary_col: | |
| if site_var == 'Draftkings': | |
| salary_min_var = st.number_input("Minimum salary used", min_value = 0, max_value = 50000, value = 49000, step = 100, key = 'salary_min_var_dk') | |
| salary_max_var = st.number_input("Maximum salary used", min_value = 0, max_value = 50000, value = 50000, step = 100, key = 'salary_max_var_dk') | |
| elif site_var == 'Fanduel': | |
| if sport_var == 'NHL': | |
| salary_min_var = st.number_input("Minimum salary used", min_value = 0, max_value = 55000, value = 54000, step = 100, key = 'salary_min_var_fd') | |
| salary_max_var = st.number_input("Maximum salary used", min_value = 0, max_value = 55000, value = 55000, step = 100, key = 'salary_max_var_fd') | |
| else: | |
| salary_min_var = st.number_input("Minimum salary used", min_value = 0, max_value = 60000, value = 59000, step = 100, key = 'salary_min_var_fd') | |
| salary_max_var = st.number_input("Maximum salary used", min_value = 0, max_value = 60000, value = 60000, step = 100, key = 'salary_max_var_fd') | |
| with optimals_stacks_col: | |
| if site_var == 'Draftkings': | |
| min_stacks_var = st.number_input("Minimum stacks used", min_value = 0, max_value = 5, value = 1, step = 1, key = 'min_stacks_var_dk') | |
| max_stacks_var = st.number_input("Maximum stacks used", min_value = 0, max_value = 5, value = 5, step = 1, key = 'max_stacks_var_dk') | |
| elif site_var == 'Fanduel': | |
| min_stacks_var = st.number_input("Minimum stacks used", min_value = 0, max_value = 4, value = 1, step = 1, key = 'min_stacks_var_fd') | |
| max_stacks_var = st.number_input("Maximum stacks used", min_value = 0, max_value = 4, value = 4, step = 1, key = 'max_stacks_var_fd') | |
| try: | |
| nfl_reg_salaries = grab_nfl_reg_salaries(slate_var3) | |
| except: | |
| nfl_reg_salaries = None | |
| try: | |
| nfl_showdown_salaries = grab_nfl_showdown_salaries() | |
| except: | |
| nfl_showdown_salaries = None | |
| try: | |
| nba_reg_salaries = grab_nba_reg_salaries(slate_var3) | |
| except: | |
| nba_reg_salaries = None | |
| try: | |
| nba_showdown_salaries = grab_nba_showdown_salaries() | |
| except: | |
| nba_showdown_salaries = None | |
| try: | |
| nhl_reg_salaries = grab_nhl_reg_salaries(slate_var3) | |
| except: | |
| nhl_reg_salaries = None | |
| try: | |
| nhl_showdown_salaries = grab_nhl_showdown_salaries() | |
| except: | |
| nhl_showdown_salaries = None | |
| try: | |
| mma_reg_salaries = grab_mma_reg_salaries(slate_var3) | |
| except: | |
| mma_reg_salaries = None | |
| try: | |
| mma_showdown_salaries = grab_mma_showdown_salaries() | |
| except: | |
| mma_showdown_salaries = None | |
| try: | |
| pga_reg_salaries = grab_pga_reg_salaries(slate_var3) | |
| except: | |
| pga_reg_salaries = None | |
| try: | |
| pga_showdown_salaries = grab_pga_showdown_salaries() | |
| except: | |
| pga_showdown_salaries = None | |
| try: | |
| selected_tab = st.segmented_control( | |
| "Select Tab", | |
| options=["Data Load", "Projections Management", "Manage Portfolio"], | |
| selection_mode='single', | |
| default='Data Load', | |
| label_visibility='collapsed', | |
| width='stretch', | |
| key='tab_selector' | |
| ) | |
| except: | |
| selected_tab = st.segmented_control( | |
| "Select Tab", | |
| options=["Data Load", "Projections Management", "Manage Portfolio"], | |
| selection_mode='single', | |
| default='Data Load', | |
| label_visibility='collapsed', | |
| key='tab_selector' | |
| ) | |
| if selected_tab == 'Data Load': | |
| col1, col2, col3 = st.columns(3) | |
| with col1: | |
| st.subheader("Draftkings/Fanduel CSV") | |
| with st.expander('Upload Info'): | |
| st.info("Upload the player pricing CSV from the site you are playing on") | |
| st.warning("Database load is active and in testing for Draftkings, not for Fanduel") | |
| pricing_source = st.selectbox("Select a pricing source", options=['Paydirt DB', 'User Upload']) | |
| if 'csv_file' not in st.session_state: | |
| st.session_state['pricing_loaded'] = False | |
| upload_csv_col, csv_template_col = st.columns([3, 1]) | |
| if pricing_source == 'Paydirt DB': | |
| if st.button("Load from Paydirt DB"): | |
| if 'csv_file' in st.session_state: | |
| del st.session_state['csv_file'] | |
| if sport_var == 'NBA': | |
| if type_var == 'Classic': | |
| st.session_state['csv_file'] = load_csv(nba_reg_salaries) | |
| elif type_var == 'Showdown': | |
| st.session_state['csv_file'] = load_csv(nba_showdown_salaries) | |
| elif sport_var == 'NFL': | |
| if type_var == 'Classic': | |
| st.session_state['csv_file'] = load_csv(nfl_reg_salaries) | |
| elif type_var == 'Showdown': | |
| st.session_state['csv_file'] = load_csv(nfl_showdown_salaries) | |
| elif sport_var == 'NHL': | |
| if type_var == 'Classic': | |
| st.session_state['csv_file'] = load_csv(nhl_reg_salaries) | |
| elif type_var == 'Showdown': | |
| st.session_state['csv_file'] = load_csv(nhl_showdown_salaries) | |
| elif sport_var == 'MMA': | |
| if type_var == 'Classic': | |
| st.session_state['csv_file'] = load_csv(mma_reg_salaries) | |
| elif type_var == 'Showdown': | |
| st.session_state['csv_file'] = load_csv(mma_showdown_salaries) | |
| elif sport_var == 'GOLF': | |
| if type_var == 'Classic': | |
| st.session_state['csv_file'] = load_csv(pga_reg_salaries) | |
| elif type_var == 'Showdown': | |
| st.session_state['csv_file'] = load_csv(pga_showdown_salaries) | |
| st.session_state['pricing_loaded'] = True | |
| try: | |
| st.session_state['csv_file']['Salary'] = st.session_state['csv_file']['Salary'].astype(str).str.replace(',', '').astype(int) | |
| except: | |
| pass | |
| else: | |
| with upload_csv_col: | |
| csv_file = st.file_uploader("Upload CSV File", type=['csv']) | |
| if 'csv_file' in st.session_state: | |
| del st.session_state['csv_file'] | |
| with csv_template_col: | |
| if site_var == 'Draftkings': | |
| csv_template_df = pd.DataFrame(columns=['Name', 'ID', 'Roster Position', 'Salary']) | |
| else: | |
| csv_template_df = pd.DataFrame(columns=['Nickname', 'Id', 'Roster Position', 'Salary']) | |
| st.download_button( | |
| label="CSV Template", | |
| data=csv_template_df.to_csv(index=False), | |
| file_name="csv_template.csv", | |
| mime="text/csv" | |
| ) | |
| st.session_state['csv_file'] = load_csv(csv_file) | |
| if csv_file is not None: | |
| st.session_state['pricing_loaded'] = True | |
| try: | |
| st.session_state['csv_file']['Salary'] = st.session_state['csv_file']['Salary'].astype(str).str.replace(',', '').astype(int) | |
| except: | |
| pass | |
| if st.session_state['pricing_loaded']: | |
| if type_var == 'Showdown': | |
| st.session_state['csv_file']['Position'] = 'FLEX' | |
| else: | |
| if sport_var == 'GOLF': | |
| st.session_state['csv_file']['Position'] = 'FLEX' | |
| st.session_state['csv_file']['Team'] = 'GOLF' | |
| elif sport_var == 'TENNIS': | |
| st.session_state['csv_file']['Position'] = 'FLEX' | |
| st.session_state['csv_file']['Team'] = 'TENNIS' | |
| elif sport_var == 'MMA': | |
| st.session_state['csv_file']['Position'] = 'FLEX' | |
| st.session_state['csv_file']['Team'] = 'MMA' | |
| elif sport_var == 'NASCAR': | |
| st.session_state['csv_file']['Position'] = 'FLEX' | |
| st.session_state['csv_file']['Team'] = 'NASCAR' | |
| if site_var == 'Fanduel': | |
| try: | |
| st.session_state['csv_file']['Position'] = st.session_state['csv_file']['Position'].replace('D', 'DST', regex=False) | |
| except: | |
| pass | |
| if st.session_state['csv_file'] is not None: | |
| st.success('Projections file loaded successfully!') | |
| st.dataframe(st.session_state['csv_file'].head(10)) | |
| with col2: | |
| st.subheader("Portfolio File") | |
| with st.expander('Upload Info'): | |
| st.info("Go ahead and upload a portfolio file here. Only include player columns.") | |
| st.warning("Database load is active and in testing for NBA, NFL, NHL, MMA, and PGA, both Classic and Regular") | |
| upload_toggle = st.selectbox("What source are you uploading from?", options=['Paydirt DB', 'SaberSim (Just IDs)', 'Draftkings/Fanduel (Names + IDs)', 'Other (Just Names)']) | |
| if 'portfolio' not in st.session_state: | |
| st.session_state['portfolio_loaded'] = False | |
| if upload_toggle == 'Paydirt DB': | |
| if st.button("Load from Database after inserting site CSV"): | |
| if site_var == 'Draftkings': | |
| if type_var != 'Showdown': | |
| if sport_var == 'NBA': | |
| portfolio_load = init_DK_NBA_lineups(type_var, slate_var3, prio_var, 50, dk_nba_showdown_db_translation, lineup_num_var, salary_min_var, salary_max_var, []) | |
| elif sport_var == 'NFL': | |
| portfolio_load = init_DK_NFL_lineups(type_var, slate_var3, prio_var, 50, dk_nfl_showdown_db_translation, lineup_num_var, salary_min_var, salary_max_var, []) | |
| elif sport_var == 'NHL': | |
| portfolio_load = init_DK_NHL_lineups(type_var, slate_var3, prio_var, 50, dk_nhl_showdown_db_translation, lineup_num_var, salary_min_var, salary_max_var, []) | |
| elif sport_var == 'MMA': | |
| portfolio_load = init_DK_MMA_lineups(type_var, slate_var3, prio_var, 50, lineup_num_var, salary_min_var, salary_max_var, []) | |
| elif sport_var == 'GOLF': | |
| portfolio_load = init_DK_PGA_lineups(type_var, slate_var3, prio_var, 50, lineup_num_var, salary_min_var, salary_max_var, []) | |
| else: | |
| if sport_var == 'NBA': | |
| portfolio_load = init_DK_NBA_lineups(type_var, nba_slate_name_lookup_dk[slate_var3], prio_var, 50, dk_nba_showdown_db_translation, lineup_num_var, salary_min_var, salary_max_var, []) | |
| elif sport_var == 'NFL': | |
| portfolio_load = init_DK_NFL_lineups(type_var, nfl_slate_name_lookup_dk[slate_var3], prio_var, 50, dk_nfl_showdown_db_translation, lineup_num_var, salary_min_var, salary_max_var, []) | |
| elif sport_var == 'NHL': | |
| portfolio_load = init_DK_NHL_lineups(type_var, nhl_slate_name_lookup_dk[slate_var3], prio_var, 50, dk_nhl_showdown_db_translation, lineup_num_var, salary_min_var, salary_max_var, []) | |
| elif sport_var == 'MMA': | |
| portfolio_load = init_DK_MMA_lineups(type_var, slate_var3, prio_var, 50, lineup_num_var, salary_min_var, salary_max_var, []) | |
| elif sport_var == 'GOLF': | |
| portfolio_load = init_DK_PGA_lineups(type_var, slate_var3, prio_var, 50, lineup_num_var, salary_min_var, salary_max_var, []) | |
| st.session_state['db_portfolio_file'] = pd.DataFrame(portfolio_load) | |
| st.session_state['portfolio_loaded'] = True | |
| if 'portfolio' in st.session_state: | |
| del st.session_state['portfolio'] | |
| if 'export_portfolio' in st.session_state: | |
| del st.session_state['export_portfolio'] | |
| else: | |
| if type_var != 'Showdown': | |
| if sport_var == 'NBA': | |
| portfolio_load = init_FD_NBA_lineups(type_var, slate_var3, prio_var, 50, fd_nba_showdown_db_translation, lineup_num_var, salary_min_var, salary_max_var, []) | |
| elif sport_var == 'NFL': | |
| portfolio_load = init_FD_NFL_lineups(type_var, slate_var3, prio_var, 50, fd_nfl_showdown_db_translation, lineup_num_var, salary_min_var, salary_max_var, []) | |
| elif sport_var == 'NHL': | |
| portfolio_load = init_FD_NHL_lineups(type_var, slate_var3, prio_var, 50, fd_nhl_showdown_db_translation, lineup_num_var, salary_min_var, salary_max_var, []) | |
| elif sport_var == 'MMA': | |
| portfolio_load = init_FD_MMA_lineups(type_var, slate_var3, prio_var, 50, lineup_num_var, salary_min_var, salary_max_var, []) | |
| elif sport_var == 'GOLF': | |
| portfolio_load = init_FD_PGA_lineups(type_var, slate_var3, prio_var, 50, lineup_num_var, salary_min_var, salary_max_var, []) | |
| else: | |
| if sport_var == 'NBA': | |
| portfolio_load = init_FD_NBA_lineups(type_var, nba_slate_name_lookup_fd[slate_var3], prio_var, 50, fd_nba_showdown_db_translation, lineup_num_var, salary_min_var, salary_max_var, []) | |
| elif sport_var == 'NFL': | |
| portfolio_load = init_FD_NFL_lineups(type_var, nfl_slate_name_lookup_fd[slate_var3], prio_var, 50, fd_nfl_showdown_db_translation, lineup_num_var, salary_min_var, salary_max_var, []) | |
| elif sport_var == 'NHL': | |
| portfolio_load = init_FD_NHL_lineups(type_var, nhl_slate_name_lookup_fd[slate_var3], prio_var, 50, fd_nhl_showdown_db_translation, lineup_num_var, salary_min_var, salary_max_var, []) | |
| elif sport_var == 'MMA': | |
| portfolio_load = init_FD_MMA_lineups(type_var, slate_var3, prio_var, 50, lineup_num_var, salary_min_var, salary_max_var, []) | |
| elif sport_var == 'GOLF': | |
| portfolio_load = init_FD_PGA_lineups(type_var, slate_var3, prio_var, 50, lineup_num_var, salary_min_var, salary_max_var, []) | |
| st.session_state['db_portfolio_file'] = pd.DataFrame(portfolio_load) | |
| st.session_state['portfolio_loaded'] = True | |
| if 'portfolio' in st.session_state: | |
| del st.session_state['portfolio'] | |
| if 'export_portfolio' in st.session_state: | |
| del st.session_state['export_portfolio'] | |
| elif 'db_portfolio_file' in st.session_state: | |
| st.session_state['portfolio_loaded'] = True | |
| elif upload_toggle == 'SaberSim (Just IDs)' or upload_toggle == 'Draftkings/Fanduel (Names + IDs)': | |
| portfolio_file = st.file_uploader("Upload Portfolio File (CSV or Excel)", type=['csv', 'xlsx', 'xls']) | |
| st.session_state['portfolio_loaded'] = True | |
| if 'portfolio' in st.session_state: | |
| del st.session_state['portfolio'] | |
| if 'export_portfolio' in st.session_state: | |
| del st.session_state['export_portfolio'] | |
| else: | |
| portfolio_file = st.file_uploader("Upload Portfolio File (CSV or Excel)", type=['csv', 'xlsx', 'xls']) | |
| st.session_state['portfolio_loaded'] = True | |
| if 'portfolio' in st.session_state: | |
| del st.session_state['portfolio'] | |
| if 'export_portfolio' in st.session_state: | |
| del st.session_state['export_portfolio'] | |
| if 'portfolio' not in st.session_state: | |
| if st.session_state['portfolio_loaded']: | |
| if upload_toggle == 'Paydirt DB': | |
| portfolio_file = st.session_state['db_portfolio_file'] | |
| st.session_state['export_portfolio'], st.session_state['portfolio'] = load_file(portfolio_file, site_var, type_var, sport_var, 'portfolio') | |
| st.session_state['export_portfolio'] = st.session_state['export_portfolio'].dropna(how='all') | |
| st.session_state['export_portfolio'] = st.session_state['export_portfolio'].reset_index(drop=True) | |
| st.session_state['portfolio'] = st.session_state['portfolio'].dropna(how='all') | |
| st.session_state['portfolio'] = st.session_state['portfolio'].reset_index(drop=True) | |
| elif upload_toggle == 'SaberSim (Just IDs)': | |
| if portfolio_file is not None: | |
| st.session_state['export_portfolio'], st.session_state['portfolio'] = load_ss_file(portfolio_file, st.session_state['csv_file'], site_var, type_var, sport_var) | |
| st.session_state['export_portfolio'] = st.session_state['export_portfolio'].dropna(how='all') | |
| st.session_state['export_portfolio'] = st.session_state['export_portfolio'].reset_index(drop=True) | |
| st.session_state['portfolio'] = st.session_state['portfolio'].dropna(how='all') | |
| st.session_state['portfolio'] = st.session_state['portfolio'].reset_index(drop=True) | |
| elif upload_toggle == 'Draftkings/Fanduel (Names + IDs)': | |
| if portfolio_file is not None: | |
| st.session_state['export_portfolio'], st.session_state['portfolio'] = load_dk_fd_file(portfolio_file, st.session_state['csv_file'], site_var, type_var, sport_var) | |
| st.session_state['export_portfolio'] = st.session_state['export_portfolio'].dropna(how='all') | |
| st.session_state['export_portfolio'] = st.session_state['export_portfolio'].reset_index(drop=True) | |
| st.session_state['portfolio'] = st.session_state['portfolio'].dropna(how='all') | |
| st.session_state['portfolio'] = st.session_state['portfolio'].reset_index(drop=True) | |
| else: | |
| if portfolio_file is not None: | |
| st.session_state['export_portfolio'], st.session_state['portfolio'] = load_file(portfolio_file, site_var, type_var, sport_var, 'portfolio') | |
| st.session_state['export_portfolio'] = st.session_state['export_portfolio'].dropna(how='all') | |
| st.session_state['export_portfolio'] = st.session_state['export_portfolio'].reset_index(drop=True) | |
| st.session_state['portfolio'] = st.session_state['portfolio'].dropna(how='all') | |
| st.session_state['portfolio'] = st.session_state['portfolio'].reset_index(drop=True) | |
| if st.session_state['portfolio'] is not None: | |
| # Optimize data types early for memory efficiency | |
| st.session_state['portfolio'] = optimize_dataframe_dtypes(st.session_state['portfolio']) | |
| st.success('Portfolio file loaded successfully!') | |
| for col in st.session_state['portfolio'].select_dtypes(include=['object', 'category']).columns: | |
| if st.session_state['portfolio'][col].dtype == 'category': | |
| # Handle categorical columns | |
| st.session_state['portfolio'][col] = st.session_state['portfolio'][col].cat.rename_categories( | |
| lambda x: player_right_names_mlb.get(x, x) if x in player_wrong_names_mlb else x | |
| ) | |
| else: | |
| # Handle object columns | |
| st.session_state['portfolio'][col] = st.session_state['portfolio'][col].replace(player_wrong_names_mlb) | |
| st.dataframe(st.session_state['portfolio'].head(10)) | |
| with col3: | |
| st.subheader("Projections File") | |
| with st.expander('Upload Info'): | |
| st.info("upload a projections file that has 'player_names', 'salary', 'median', 'ownership', and 'captain ownership' columns. Note that the salary for showdown needs to be the FLEX salary, not the captain salary.") | |
| st.warning("Database load is active and in testing for NBA, NFL, NHL, MMA, and PGA, both Classic and Regular") | |
| proj_options = st.selectbox("Select a projections source", options=['Paydirt DB', 'User Upload']) | |
| upload_col, template_col = st.columns([3, 1]) | |
| with upload_col: | |
| if 'portfolio' not in st.session_state: | |
| st.session_state['projections_loaded'] = False | |
| if proj_options == 'User Upload': | |
| projections_file = st.file_uploader("Upload Projections File (CSV or Excel)", type=['csv', 'xlsx', 'xls']) | |
| st.session_state['db_projections_file'] = projections_file | |
| st.session_state['projections_loaded'] = True | |
| elif proj_options == 'Paydirt DB': | |
| if st.button("Load from Database"): | |
| if sport_var == 'NBA': | |
| if site_var == 'Draftkings': | |
| if type_var == 'Classic': | |
| projections_file = init_nba_baselines(type_var, site_var, slate_var3)[0] | |
| elif type_var == 'Showdown': | |
| projections_file = init_nba_baselines(type_var, site_var, slate_var3)[2] | |
| elif site_var == 'Fanduel': | |
| if type_var == 'Classic': | |
| projections_file = init_nba_baselines(type_var, site_var, slate_var3)[1] | |
| elif type_var == 'Showdown': | |
| projections_file = init_nba_baselines(type_var, site_var, slate_var3)[3] | |
| elif sport_var == 'NFL': | |
| if site_var == 'Draftkings': | |
| if type_var == 'Classic': | |
| projections_file = init_nfl_baselines(type_var, site_var, slate_var3)[0] | |
| elif type_var == 'Showdown': | |
| projections_file = init_nfl_baselines(type_var, site_var, slate_var3)[2] | |
| elif site_var == 'Fanduel': | |
| if type_var == 'Classic': | |
| projections_file = init_nfl_baselines(type_var, site_var, slate_var3)[1] | |
| elif type_var == 'Showdown': | |
| projections_file = init_nfl_baselines(type_var, site_var, slate_var3)[3] | |
| elif sport_var == 'NHL': | |
| if site_var == 'Draftkings': | |
| if type_var == 'Classic': | |
| projections_file = init_nhl_baselines(type_var, site_var, slate_var3)[0] | |
| elif type_var == 'Showdown': | |
| projections_file = init_nhl_baselines(type_var, site_var, slate_var3)[2] | |
| elif site_var == 'Fanduel': | |
| if type_var == 'Classic': | |
| projections_file = init_nhl_baselines(type_var, site_var, slate_var3)[1] | |
| elif type_var == 'Showdown': | |
| projections_file = init_nhl_baselines(type_var, site_var, slate_var3)[3] | |
| elif sport_var == 'MMA': | |
| if site_var == 'Draftkings': | |
| if type_var == 'Classic': | |
| projections_file = init_mma_baselines(type_var, site_var, slate_var3)[0] | |
| elif type_var == 'Showdown': | |
| projections_file = init_mma_baselines(type_var, site_var, slate_var3)[2] | |
| elif site_var == 'Fanduel': | |
| if type_var == 'Classic': | |
| projections_file = init_mma_baselines(type_var, site_var, slate_var3)[1] | |
| elif type_var == 'Showdown': | |
| projections_file = init_mma_baselines(type_var, site_var, slate_var3)[3] | |
| elif sport_var == 'GOLF': | |
| if site_var == 'Draftkings': | |
| if type_var == 'Classic': | |
| projections_file = init_pga_baselines(type_var, site_var, slate_var3)[0] | |
| elif type_var == 'Showdown': | |
| projections_file = init_pga_baselines(type_var, site_var, slate_var3)[2] | |
| elif site_var == 'Fanduel': | |
| if type_var == 'Classic': | |
| projections_file = init_pga_baselines(type_var, site_var, slate_var3)[1] | |
| elif type_var == 'Showdown': | |
| projections_file = init_pga_baselines(type_var, site_var, slate_var3)[3] | |
| st.session_state['db_projections_file'] = projections_file | |
| st.session_state['projections_loaded'] = True | |
| if 'projections_df' in st.session_state: | |
| del st.session_state['projections_df'] | |
| with template_col: | |
| if proj_options == 'User Upload': | |
| template_df = pd.DataFrame(columns=['player_names', 'position', 'team', 'salary', 'median', 'ownership', 'captain ownership']) | |
| st.download_button( | |
| label="Template", | |
| data=template_df.to_csv(index=False), | |
| file_name="projections_template.csv", | |
| mime="text/csv" | |
| ) | |
| if st.session_state['projections_loaded']: | |
| export_projections, projections = load_file(st.session_state['db_projections_file'], site_var, type_var, sport_var, 'projections') | |
| if projections is not None: | |
| st.success('Projections file loaded successfully!') | |
| try: | |
| projections['salary'] = projections['salary'].str.replace(',', '').str.replace('$', '').str.replace(' ', '') | |
| st.write('replaced salary symbols') | |
| except: | |
| pass | |
| try: | |
| projections['ownership'] = projections['ownership'].str.replace('%', '').str.replace(' ', '') | |
| st.write('replaced ownership symbols') | |
| except: | |
| pass | |
| projections['salary'] = projections['salary'].dropna().astype('int32') | |
| projections['ownership'] = projections['ownership'].astype('float32') | |
| if projections['captain ownership'].isna().all(): | |
| projections['CPT_Own_raw'] = (projections['ownership'] / 2) * ((100 - (100-projections['ownership']))/100) | |
| cpt_own_var = 100 / projections['CPT_Own_raw'].sum() | |
| projections['captain ownership'] = projections['CPT_Own_raw'] * cpt_own_var | |
| projections = projections.drop(columns='CPT_Own_raw', axis=1) | |
| projections['captain ownership'] = projections['captain ownership'].astype('float32') | |
| projections['median'] = projections['median'].astype('float32') | |
| for col in projections.select_dtypes(include=['object']).columns: | |
| projections[col] = projections[col].replace(player_wrong_names_mlb) | |
| if position_var is not None: | |
| projections['position'] = position_var | |
| if team_var is not None: | |
| projections['team'] = team_var | |
| st.dataframe(projections.head(10)) | |
| if st.session_state['portfolio_loaded'] and st.session_state['projections_loaded']: | |
| if st.session_state['portfolio'] is not None and projections is not None: | |
| st.subheader("Name Matching Analysis") | |
| portfolio_names = get_portfolio_names(st.session_state['portfolio']) | |
| try: | |
| csv_names = st.session_state['csv_file']['Name'].tolist() | |
| except: | |
| csv_names = st.session_state['csv_file']['Nickname'].tolist() | |
| projection_names = projections['player_names'].tolist() | |
| portfolio_match_dict, unmatched_names = chunk_name_matching(portfolio_names, csv_names) | |
| player_columns = [col for col in st.session_state['portfolio'].columns | |
| if col not in ['salary', 'median', 'Own']] | |
| for col in player_columns: | |
| st.session_state['portfolio'][col] = st.session_state['portfolio'][col].map(lambda x: portfolio_match_dict.get(x, x)) | |
| projections_match_dict, unmatched_proj_names = chunk_name_matching(projection_names, csv_names) | |
| projections['player_names'] = projections['player_names'].map(lambda x: projections_match_dict.get(x, x)) | |
| st.session_state['projections_df'] = projections | |
| projections_names = st.session_state['projections_df']['player_names'].tolist() | |
| portfolio_names = get_portfolio_names(st.session_state['portfolio']) | |
| projections_match_dict2, unmatched_proj_names2 = chunk_name_matching(projection_names, portfolio_names) | |
| projections['player_names'] = projections['player_names'].map(lambda x: projections_match_dict2.get(x, x)) | |
| st.session_state['projections_df'] = projections | |
| if sport_var in stacking_sports: | |
| team_dict = dict(zip(st.session_state['projections_df']['player_names'], st.session_state['projections_df']['team'])) | |
| st.session_state['portfolio']['Stack'] = st.session_state['portfolio'].apply( | |
| lambda row: Counter( | |
| team_dict.get(player, '') for player in row[stack_column_dict[site_var][type_var][sport_var]] | |
| if team_dict.get(player, '') != '' | |
| ).most_common(1)[0][0] if any(team_dict.get(player, '') for player in row[stack_column_dict[site_var][type_var][sport_var]]) else '', | |
| axis=1 | |
| ) | |
| st.session_state['portfolio']['Size'] = st.session_state['portfolio'].apply( | |
| lambda row: Counter( | |
| team_dict.get(player, '') for player in row[stack_column_dict[site_var][type_var][sport_var]] | |
| if team_dict.get(player, '') != '' | |
| ).most_common(1)[0][1] if any(team_dict.get(player, '') for player in row[stack_column_dict[site_var][type_var][sport_var]]) else 0, | |
| axis=1 | |
| ) | |
| st.session_state['stack_dict'] = dict(zip(st.session_state['portfolio'].index, st.session_state['portfolio']['Stack'])) | |
| st.session_state['size_dict'] = dict(zip(st.session_state['portfolio'].index, st.session_state['portfolio']['Size'])) | |
| try: | |
| st.session_state['export_dict'] = dict(zip(st.session_state['csv_file']['Name'], st.session_state['csv_file']['Name + ID'])) | |
| except: | |
| st.session_state['export_dict'] = dict(zip(st.session_state['csv_file']['Nickname'], st.session_state['csv_file']['Id'])) | |
| if 'map_dict' not in st.session_state: | |
| st.session_state['map_dict'] = create_comprehensive_mappings( | |
| projections, | |
| st.session_state['portfolio'], | |
| st.session_state['csv_file'], | |
| site_var, | |
| type_var, | |
| sport_var | |
| ) | |
| st.session_state['portfolio'] = st.session_state['portfolio'].astype(str) | |
| st.session_state['portfolio'] = st.session_state['portfolio'][~st.session_state['portfolio'].isin(['', 'nan', 'None', 'NaN']).any(axis=1)].reset_index(drop=True) | |
| buffer = io.BytesIO() | |
| st.session_state['portfolio'].to_parquet(buffer, compression='snappy') | |
| st.session_state['origin_portfolio'] = buffer.getvalue() | |
| portfolio_inc_proj = pd.DataFrame() | |
| portfolio_inc_proj['player_names'] = get_portfolio_names(st.session_state['portfolio']) | |
| portfolio_inc_proj['position'] = portfolio_inc_proj['player_names'].map(lambda x: st.session_state['map_dict']['pos_map'].get(x, 'FLEX')) | |
| portfolio_inc_proj['team'] = portfolio_inc_proj['player_names'].map(lambda x: st.session_state['map_dict']['team_map'].get(x, 'Unknown')) | |
| portfolio_inc_proj['salary'] = portfolio_inc_proj['player_names'].map(lambda x: st.session_state['map_dict']['salary_map'].get(x, 0)) | |
| portfolio_inc_proj['median'] = portfolio_inc_proj['player_names'].map(lambda x: st.session_state['map_dict']['proj_map'].get(x, 0.0)) | |
| portfolio_inc_proj['ownership'] = portfolio_inc_proj['player_names'].map(lambda x: st.session_state['map_dict']['own_map'].get(x, 0.0)) | |
| portfolio_inc_proj['captain ownership'] = portfolio_inc_proj['player_names'].map(lambda x: st.session_state['map_dict']['own_map'].get(x, 0.0)) | |
| st.session_state['portfolio_inc_proj'] = portfolio_inc_proj.reset_index(drop=True) | |
| del st.session_state['portfolio'], st.session_state['export_portfolio'] | |
| if selected_tab == 'Projections Management': | |
| if 'projections_df' in st.session_state and st.session_state['projections_df'] is not None: | |
| st.subheader("Edit Player Projections") | |
| st.caption("Modify median, ownership, or captain ownership values directly in the table below. Changes will update both the projections and all related mappings.") | |
| projections_editor_df = st.session_state['projections_df'].copy() | |
| if 'origin_portfolio' in st.session_state and 'map_dict' in st.session_state: | |
| portfolio_df = pd.read_parquet(io.BytesIO(st.session_state['origin_portfolio'])) | |
| portfolio_players = set(get_portfolio_names(portfolio_df)) | |
| projection_players = set(projections_editor_df['player_names'].tolist()) | |
| # Find missing players | |
| missing_players = portfolio_players - projection_players | |
| if missing_players: | |
| # Create rows for missing players using map_dict data | |
| missing_rows = [] | |
| for player in missing_players: | |
| missing_rows.append({ | |
| 'player_names': player, | |
| 'position': st.session_state['map_dict']['pos_map'].get(player, 'FLEX'), | |
| 'team': st.session_state['map_dict']['team_map'].get(player, 'Unknown'), | |
| 'salary': st.session_state['map_dict']['salary_map'].get(player, 0), | |
| 'median': st.session_state['map_dict']['proj_map'].get(player, 0.0), | |
| 'ownership': st.session_state['map_dict']['own_map'].get(player, 0.0), | |
| 'captain ownership': st.session_state['map_dict'].get('cpt_own_map', {}).get(player, 0.0) | |
| }) | |
| # Add missing players to the editor dataframe | |
| missing_df = pd.DataFrame(missing_rows) | |
| projections_editor_df = pd.concat([projections_editor_df, missing_df], ignore_index=True) | |
| st.info(f"📌 Found {len(missing_players)} player(s) in portfolio not in projections. They have been added below with values of 0 for median, ownership, and captain ownership.") | |
| # Define column configuration for the data editor | |
| column_config = { | |
| 'player_names': st.column_config.TextColumn( | |
| 'Player', | |
| width='medium' | |
| ), | |
| 'position': st.column_config.TextColumn( | |
| 'Position', | |
| width='small' | |
| ), | |
| 'team': st.column_config.TextColumn( | |
| 'Team', | |
| width='small' | |
| ), | |
| 'salary': st.column_config.NumberColumn( | |
| 'Salary', | |
| width='small', | |
| format='$%d' | |
| ), | |
| 'median': st.column_config.NumberColumn( | |
| 'Median', | |
| min_value=0.0, | |
| max_value=100.0, | |
| step=0.1, | |
| format='%.2f', | |
| width='small' | |
| ), | |
| 'ownership': st.column_config.NumberColumn( | |
| 'Ownership %', | |
| min_value=0.0, | |
| max_value=100.0, | |
| step=0.1, | |
| format='%.2f', | |
| width='small' | |
| ), | |
| 'captain ownership': st.column_config.NumberColumn( | |
| 'Captain Own %', | |
| min_value=0.0, | |
| max_value=100.0, | |
| step=0.1, | |
| format='%.2f', | |
| width='small' | |
| ) | |
| } | |
| # Search/filter functionality | |
| search_col, team_filter_col, position_filter_col = st.columns([2, 1, 1]) | |
| with search_col: | |
| player_search = st.text_input("🔍 Search players", placeholder="Type player name...", key='proj_player_search') | |
| with team_filter_col: | |
| team_options = ['All Teams'] + sorted(projections_editor_df['team'].unique().tolist()) | |
| team_filter = st.selectbox("Filter by Team", options=team_options, key='proj_team_filter') | |
| with position_filter_col: | |
| position_options = ['All Positions'] + sorted(projections_editor_df['position'].unique().tolist()) | |
| position_filter = st.selectbox("Filter by Position", options=position_options, key='proj_position_filter') | |
| # Apply filters | |
| filtered_df = projections_editor_df.copy() | |
| if player_search: | |
| filtered_df = filtered_df[filtered_df['player_names'].str.contains(player_search, case=False, na=False)] | |
| if team_filter != 'All Teams': | |
| filtered_df = filtered_df[filtered_df['team'] == team_filter] | |
| if position_filter != 'All Positions': | |
| filtered_df = filtered_df[filtered_df['position'] == position_filter] | |
| # Display the editable dataframe | |
| edited_df = st.data_editor( | |
| filtered_df, | |
| column_config=column_config, | |
| use_container_width=True, | |
| hide_index=True, | |
| num_rows='fixed', | |
| key='projections_editor' | |
| ) | |
| if not edited_df.equals(filtered_df): | |
| changed_mask = ~(edited_df[['player_names', 'position', 'team', 'salary', 'median', 'ownership', 'captain ownership']] == filtered_df[['player_names', 'position', 'team', 'salary', 'median', 'ownership', 'captain ownership']]).all(axis=1) | |
| changed_rows = edited_df[changed_mask] | |
| if len(changed_rows) > 0: | |
| # Update the projections_df in session state | |
| for idx, row in changed_rows.iterrows(): | |
| player_name = row['player_names'] | |
| # Find and update the original projections_df | |
| orig_idx = st.session_state['projections_df'][st.session_state['projections_df']['player_names'] == player_name].index | |
| if len(orig_idx) > 0: | |
| # Player exists in projections_df - update existing row | |
| st.session_state['projections_df'].loc[orig_idx[0], 'player_names'] = row['player_names'] | |
| st.session_state['projections_df'].loc[orig_idx[0], 'position'] = row['position'] | |
| st.session_state['projections_df'].loc[orig_idx[0], 'team'] = row['team'] | |
| st.session_state['projections_df'].loc[orig_idx[0], 'salary'] = row['salary'] | |
| st.session_state['projections_df'].loc[orig_idx[0], 'median'] = row['median'] | |
| st.session_state['projections_df'].loc[orig_idx[0], 'ownership'] = row['ownership'] | |
| st.session_state['projections_df'].loc[orig_idx[0], 'captain ownership'] = row['captain ownership'] | |
| else: | |
| # Player is new (from portfolio but not in projections) - add new row | |
| new_row = pd.DataFrame([{ | |
| 'player_names': player_name, | |
| 'position': row['position'], | |
| 'team': row['team'], | |
| 'salary': row['salary'], | |
| 'median': row['median'], | |
| 'ownership': row['ownership'], | |
| 'captain ownership': row['captain ownership'] | |
| }]) | |
| st.session_state['projections_df'] = pd.concat([st.session_state['projections_df'], new_row], ignore_index=True) | |
| # Update map_dict entries | |
| if 'map_dict' in st.session_state: | |
| st.session_state['map_dict']['team_map'][player_name] = str(row['team']) | |
| st.session_state['map_dict']['pos_map'][player_name] = str(row['position']) | |
| st.session_state['map_dict']['salary_map'][player_name] = int(row['salary']) | |
| st.session_state['map_dict']['proj_map'][player_name] = float(row['median']) | |
| st.session_state['map_dict']['own_map'][player_name] = float(row['ownership']) | |
| # Update ownership percent rank | |
| ownership_series = pd.Series(st.session_state['map_dict']['own_map']) | |
| st.session_state['map_dict']['own_percent_rank'] = dict(ownership_series.rank(pct=True).astype('float32')) | |
| # Update captain mappings based on site/type/sport configuration | |
| if 'cpt_proj_map' in st.session_state['map_dict']: | |
| # Determine the multiplier based on site/type/sport | |
| if site_var == 'Draftkings': | |
| if type_var == 'Showdown' and sport_var == 'GOLF': | |
| st.session_state['map_dict']['cpt_proj_map'][player_name] = float(row['median']) | |
| else: | |
| st.session_state['map_dict']['cpt_proj_map'][player_name] = float(row['median']) * 1.5 | |
| elif site_var == 'Fanduel': | |
| st.session_state['map_dict']['cpt_proj_map'][player_name] = float(row['median']) * 1.5 | |
| if 'cpt_own_map' in st.session_state['map_dict']: | |
| # Captain ownership uses the captain ownership column directly | |
| if type_var == 'Showdown' and sport_var == 'GOLF': | |
| st.session_state['map_dict']['cpt_own_map'][player_name] = float(row['ownership']) | |
| else: | |
| st.session_state['map_dict']['cpt_own_map'][player_name] = float(row['captain ownership']) | |
| # Clear working_frame to force recalculation with new projections | |
| if 'working_frame' in st.session_state: | |
| del st.session_state['working_frame'] | |
| st.success(f"✅ Updated {len(changed_rows)} player(s). Portfolio metrics will recalculate on next view.") | |
| st.rerun() | |
| else: | |
| st.info("📋 No projections file loaded yet. Please upload projections in the Data Load tab first.") | |
| if selected_tab == 'Manage Portfolio': | |
| if 'base_frame_names' not in st.session_state: | |
| st.session_state['base_frame_names'] = {} | |
| if 'origin_portfolio' in st.session_state and 'projections_df' in st.session_state: | |
| with st.container(): | |
| reset_port_col, recalc_stacks_col, recalc_div_col, set_base_col, blank_reset_col, contest_size_col = st.columns([.15, .10, .10, .10, .30, .25]) | |
| with reset_port_col: | |
| with st.popover("Reset Portfolio"): | |
| st.markdown("choose a base to reset to:") | |
| if st.session_state['base_frame_names']: | |
| base_choice = st.selectbox("Base Choice", options=list(st.session_state['base_frame_names'].keys()), index=0) | |
| if st.button("Load Selected Base"): | |
| st.session_state['working_frame'] = load_base_frame(base_choice) | |
| st.rerun() | |
| else: | |
| st.info("No saved base frames available") | |
| with recalc_stacks_col: | |
| if st.button("Recalculate Stacks"): | |
| st.session_state['working_frame'] = recalc_stacks_sizes(st.session_state['working_frame'], st.session_state['player_columns'], st.session_state['map_dict']) | |
| st.rerun() | |
| with recalc_div_col: | |
| if st.button("Recalculate Diversity"): | |
| st.session_state['working_frame']['Diversity'] = recalc_diversity(st.session_state['display_frame'], st.session_state['player_columns']) | |
| st.rerun() | |
| with set_base_col: | |
| with st.popover("New Base Setting"): | |
| st.markdown("Name of new base:") | |
| new_base_name = st.text_input("New Base Name", value='New Base') | |
| if st.button("Save Current as Base"): | |
| if new_base_name and new_base_name not in st.session_state['base_frame_names']: | |
| save_base_frame(new_base_name, st.session_state['working_frame']) | |
| st.success(f"Base '{new_base_name}' saved successfully!") | |
| elif new_base_name in st.session_state['base_frame_names']: | |
| st.error("Base name already exists") | |
| else: | |
| st.error("Please enter a base name") | |
| with contest_size_col: | |
| with st.form(key='contest_size_form'): | |
| size_col, strength_col, submit_col = st.columns(3) | |
| with size_col: | |
| Contest_Size = st.number_input("Enter Contest Size", value=25000, min_value=1, step=1) | |
| with strength_col: | |
| strength_var = st.selectbox("Select field strength", ['Average', 'Sharp', 'Weak']) | |
| with submit_col: | |
| submitted = st.form_submit_button("Submit Size/Strength") | |
| if submitted: | |
| if 'working_frame' in st.session_state: | |
| del st.session_state['working_frame'] | |
| excluded_cols = ['salary', 'median', 'Own', 'Finish_percentile', 'Dupes', 'Stack', 'Size', 'Win%', 'Lineup Edge', 'Lineup Edge_Raw', 'Weighted Own', 'Geomean', 'Diversity', 'SE Score'] | |
| if 'working_frame' not in st.session_state: | |
| st.session_state['settings_base'] = True | |
| # Load and process the origin portfolio | |
| initial_frame = pd.read_parquet(io.BytesIO(st.session_state['origin_portfolio'])) | |
| st.session_state['player_columns'] = [col for col in initial_frame.columns if col not in excluded_cols] | |
| # Use vectorized calculation function | |
| processed_frame = calculate_lineup_metrics( | |
| initial_frame, | |
| st.session_state['player_columns'], | |
| st.session_state['map_dict'], | |
| type_var, | |
| sport_var, | |
| st.session_state['projections_df'] if 'stack_dict' in st.session_state else None | |
| ) | |
| processed_frame = processed_frame[processed_frame['salary'] <= salary_max] | |
| if 'stack_dict' in st.session_state: | |
| processed_frame['Stack'] = processed_frame.index.map(st.session_state['stack_dict']) | |
| processed_frame['Size'] = processed_frame.index.map(st.session_state['size_dict']) | |
| # Create the final base frame with dupe predictions | |
| final_base_frame = predict_dupes(processed_frame, st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var, salary_max) | |
| # Set up the Default base and working frame using memory-efficient storage | |
| save_base_frame('Default', final_base_frame) | |
| st.session_state['working_frame'] = final_base_frame.copy() | |
| #set some maxes for trimming variables | |
| if 'trimming_dict_maxes' not in st.session_state: | |
| st.session_state['trimming_dict_maxes'] = { | |
| 'Own': st.session_state['working_frame']['Own'].max(), | |
| 'Geomean': st.session_state['working_frame']['Geomean'].max(), | |
| 'Weighted Own': st.session_state['working_frame']['Weighted Own'].max(), | |
| 'median': st.session_state['working_frame']['median'].max(), | |
| 'Finish_percentile': st.session_state['working_frame']['Finish_percentile'].max(), | |
| 'Diversity': st.session_state['working_frame']['Diversity'].max() | |
| } | |
| with st.sidebar: | |
| if 'trimming_dict_maxes' not in st.session_state: | |
| st.session_state['trimming_dict_maxes'] = { | |
| 'Own': 500.0, | |
| 'Geomean': 500.0, | |
| 'Weighted Own': 500.0, | |
| 'median': 1500.0, | |
| 'Finish_percentile': 1.0, | |
| 'Diversity': 1.0 | |
| } | |
| with st.expander('Macro Filter Options'): | |
| # recent changes for showdown included | |
| with st.form(key='macro_filter_form'): | |
| macro_min_col, macro_max_col = st.columns(2) | |
| with macro_min_col: | |
| min_salary = st.number_input("Min acceptable salary?", value=0, min_value=0, max_value=salary_max, step=100) | |
| min_proj = st.number_input("Min acceptable projection?", value=0.0, min_value=0.0, max_value=1500.0, step=1.0) | |
| min_own = st.number_input("Min acceptable ownership?", value=0.0, min_value=0.0, max_value=500.0, step=1.0) | |
| min_dupes = st.number_input("Min acceptable dupes?", value=0, min_value=0, max_value=1000, step=1) | |
| min_finish_percentile = st.number_input("Min acceptable finish percentile?", value=0.00, min_value=0.00, max_value=1.00, step=.001) | |
| min_lineup_edge = st.number_input("Min acceptable Lineup Edge?", value=-1.00, min_value=-1.00, max_value=1.00, step=.001) | |
| min_weighted_own = st.number_input("Min acceptable Weighted Own?", value=0.0, min_value=0.0, max_value=500.0, step=1.0) | |
| with macro_max_col: | |
| max_salary = st.number_input("Max acceptable salary?", value=salary_max, min_value=0, max_value=salary_max, step=100) | |
| max_proj = st.number_input("Max acceptable projection?", value=1500.0, min_value=0.0, max_value=1500.0, step=1.0) | |
| max_own = st.number_input("Max acceptable ownership?", value=500.0, min_value=0.0, max_value=500.0, step=1.0) | |
| max_dupes = st.number_input("Max acceptable dupes?", value=1000, min_value=1, max_value=1000, step=1) | |
| max_finish_percentile = st.number_input("Max acceptable finish percentile?", value=1.00, min_value=0.00, max_value=1.00, step=.001) | |
| max_lineup_edge = st.number_input("Max acceptable Lineup Edge?", value=1.00, min_value=-1.00, max_value=1.00, step=.001) | |
| max_weighted_own = st.number_input("Max acceptable Weighted Own?", value=500.0, min_value=0.0, max_value=500.0, step=1.0) | |
| if sport_var in stacking_sports: | |
| stack_include_toggle = st.selectbox("Include specific stacks?", options=['All Stacks', 'Specific Stacks'], index=0) | |
| stack_selections = st.multiselect("If Specific Stacks, Which to include?", options=sorted(list(set(st.session_state['stack_dict'].values()))), default=[]) | |
| stack_remove_toggle = st.selectbox("Remove specific stacks?", options=['No', 'Yes'], index=0) | |
| stack_remove = st.multiselect("If Specific Stacks, Which to remove?", options=sorted(list(set(st.session_state['stack_dict'].values()))), default=[]) | |
| submitted_col, export_col = st.columns(2) | |
| st.info("Portfolio Button applies to your overall Portfolio, Export button applies to your Custom Export") | |
| with submitted_col: | |
| reg_submitted = st.form_submit_button("Portfolio") | |
| with export_col: | |
| exp_submitted = st.form_submit_button("Export") | |
| if reg_submitted: | |
| st.session_state['settings_base'] = False | |
| # Use index-based filtering instead of copying DataFrame | |
| filter_mask = ( | |
| (st.session_state['working_frame']['salary'] >= min_salary) & | |
| (st.session_state['working_frame']['salary'] <= max_salary) & | |
| (st.session_state['working_frame']['median'] >= min_proj) & | |
| (st.session_state['working_frame']['median'] <= max_proj) & | |
| (st.session_state['working_frame']['Own'] >= min_own) & | |
| (st.session_state['working_frame']['Own'] <= max_own) & | |
| (st.session_state['working_frame']['Dupes'] >= min_dupes) & | |
| (st.session_state['working_frame']['Dupes'] <= max_dupes) & | |
| (st.session_state['working_frame']['Finish_percentile'] >= min_finish_percentile) & | |
| (st.session_state['working_frame']['Finish_percentile'] <= max_finish_percentile) & | |
| (st.session_state['working_frame']['Lineup Edge'] >= min_lineup_edge) & | |
| (st.session_state['working_frame']['Lineup Edge'] <= max_lineup_edge) & | |
| (st.session_state['working_frame']['Weighted Own'] >= min_weighted_own) & | |
| (st.session_state['working_frame']['Weighted Own'] <= max_weighted_own) | |
| ) | |
| # Handle stack filtering | |
| if 'Stack' in st.session_state['working_frame'].columns: | |
| if stack_include_toggle != 'All Stacks': | |
| filter_mask &= st.session_state['working_frame']['Stack'].isin(stack_selections) | |
| if stack_remove_toggle == 'Yes': | |
| filter_mask &= ~st.session_state['working_frame']['Stack'].isin(stack_remove) | |
| # Apply all filters at once | |
| st.session_state['working_frame'] = st.session_state['working_frame'][filter_mask].sort_values(by='median', ascending=False).reset_index(drop=True) | |
| st.session_state['export_merge'] = st.session_state['working_frame'].copy() | |
| if exp_submitted: | |
| st.session_state['settings_base'] = False | |
| # Use index-based filtering for export_base | |
| export_filter_mask = ( | |
| (st.session_state['export_base']['salary'] >= min_salary) & | |
| (st.session_state['export_base']['salary'] <= max_salary) & | |
| (st.session_state['export_base']['median'] >= min_proj) & | |
| (st.session_state['export_base']['median'] <= max_proj) & | |
| (st.session_state['export_base']['Own'] >= min_own) & | |
| (st.session_state['export_base']['Own'] <= max_own) & | |
| (st.session_state['export_base']['Dupes'] >= min_dupes) & | |
| (st.session_state['export_base']['Dupes'] <= max_dupes) & | |
| (st.session_state['export_base']['Finish_percentile'] >= min_finish_percentile) & | |
| (st.session_state['export_base']['Finish_percentile'] <= max_finish_percentile) & | |
| (st.session_state['export_base']['Lineup Edge'] >= min_lineup_edge) & | |
| (st.session_state['export_base']['Lineup Edge'] <= max_lineup_edge) & | |
| (st.session_state['export_base']['Weighted Own'] >= min_weighted_own) & | |
| (st.session_state['export_base']['Weighted Own'] <= max_weighted_own) | |
| ) | |
| if 'Stack' in st.session_state['export_base'].columns: | |
| if stack_include_toggle != 'All Stacks': | |
| export_filter_mask &= st.session_state['export_base']['Stack'].isin(stack_selections) | |
| if stack_remove_toggle == 'Yes': | |
| export_filter_mask &= ~st.session_state['export_base']['Stack'].isin(stack_remove) | |
| st.session_state['export_base'] = st.session_state['export_base'][export_filter_mask].sort_values(by='median', ascending=False).reset_index(drop=True) | |
| st.session_state['export_merge'] = st.session_state['export_base'].copy() | |
| with st.expander('Micro Filter Options'): | |
| with st.form(key='micro_filter_form'): | |
| player_names = set() | |
| for col in st.session_state['working_frame'].columns: | |
| if col not in excluded_cols: | |
| player_names.update(st.session_state['working_frame'][col].unique()) | |
| if type_var == 'Showdown': | |
| cpt_flex_focus = st.selectbox("Focus on Overall, CPT, or FLEX?", options=['Overall', 'CPT', 'FLEX'], index=0) | |
| player_lock = st.multiselect("Lock players?", options=sorted(list(player_names)), default=[]) | |
| player_remove = st.multiselect("Remove players?", options=sorted(list(player_names)), default=[]) | |
| team_include = st.multiselect("Include teams?", options=sorted(list(set(st.session_state['projections_df']['team'].unique()))), default=[]) | |
| team_remove = st.multiselect("Remove teams?", options=sorted(list(set(st.session_state['projections_df']['team'].unique()))), default=[]) | |
| if sport_var in stacking_sports: | |
| size_include = st.multiselect("Include sizes?", options=sorted(list(set(st.session_state['working_frame']['Size'].unique()))), default=[]) | |
| else: | |
| size_include = [] | |
| if sport_var == 'NFL': | |
| qb_force = st.selectbox("Force QB Stacks?", options=['No', 'Yes'], index=0) | |
| else: | |
| qb_force = 'No' | |
| submitted_col, export_col = st.columns(2) | |
| st.info("Portfolio Button applies to your overall Portfolio, Export button applies to your Custom Export") | |
| with submitted_col: | |
| reg_submitted = st.form_submit_button("Portfolio") | |
| with export_col: | |
| exp_submitted = st.form_submit_button("Export") | |
| if reg_submitted: | |
| st.session_state['settings_base'] = False | |
| parsed_frame = st.session_state['working_frame'].copy() | |
| if player_remove: | |
| if type_var == 'Showdown': | |
| if cpt_flex_focus == 'CPT': | |
| remove_mask = parsed_frame.iloc[:, 0].apply( | |
| lambda player: not any(remove_player in str(player) for remove_player in player_remove) | |
| ) | |
| elif cpt_flex_focus == 'FLEX': | |
| remove_mask = parsed_frame.iloc[:, 1:].apply( | |
| lambda row: not any(player in list(row) for player in player_remove), axis=1 | |
| ) | |
| elif cpt_flex_focus == 'Overall': | |
| remove_mask = parsed_frame[st.session_state['player_columns']].apply( | |
| lambda row: not any(player in list(row) for player in player_remove), axis=1 | |
| ) | |
| else: | |
| # Create mask for lineups that contain any of the removed players | |
| remove_mask = parsed_frame[st.session_state['player_columns']].apply( | |
| lambda row: not any(player in list(row) for player in player_remove), axis=1 | |
| ) | |
| parsed_frame = parsed_frame[remove_mask] | |
| if player_lock: | |
| if type_var == 'Showdown': | |
| if cpt_flex_focus == 'CPT': | |
| lock_mask = parsed_frame.iloc[:, 0].apply( | |
| lambda player: any(lock_player in str(player) for lock_player in player_lock) | |
| ) | |
| elif cpt_flex_focus == 'FLEX': | |
| lock_mask = parsed_frame.iloc[:, 1:].apply( | |
| lambda row: all(player in list(row) for player in player_lock), axis=1 | |
| ) | |
| elif cpt_flex_focus == 'Overall': | |
| lock_mask = parsed_frame[st.session_state['player_columns']].apply( | |
| lambda row: all(player in list(row) for player in player_lock), axis=1 | |
| ) | |
| else: | |
| lock_mask = parsed_frame[st.session_state['player_columns']].apply( | |
| lambda row: all(player in list(row) for player in player_lock), axis=1 | |
| ) | |
| parsed_frame = parsed_frame[lock_mask] | |
| if team_include: | |
| if type_var == 'Showdown': | |
| if cpt_flex_focus == 'CPT': | |
| team_frame = parsed_frame.iloc[:, 0].apply( | |
| lambda x: x.map(st.session_state['map_dict']['team_map']) | |
| ) | |
| include_mask = team_frame.apply( | |
| lambda row: any(team in list(row) for team in team_include), axis=1 | |
| ) | |
| elif cpt_flex_focus == 'FLEX': | |
| team_frame = parsed_frame.iloc[:, 1:].apply( | |
| lambda x: x.map(st.session_state['map_dict']['team_map']) | |
| ) | |
| include_mask = team_frame.apply( | |
| lambda row: any(team in list(row) for team in team_include), axis=1 | |
| ) | |
| elif cpt_flex_focus == 'Overall': | |
| team_frame = parsed_frame[st.session_state['player_columns']].apply( | |
| lambda x: x.map(st.session_state['map_dict']['team_map']) | |
| ) | |
| include_mask = team_frame.apply( | |
| lambda row: any(team in list(row) for team in team_include), axis=1 | |
| ) | |
| else: | |
| # Create a copy of the frame with player names replaced by teams, excluding SP1 and SP2 | |
| filtered_player_columns = [col for col in st.session_state['player_columns'] if col not in ['SP1', 'SP2']] | |
| team_frame = parsed_frame[filtered_player_columns].apply( | |
| lambda x: x.map(st.session_state['map_dict']['team_map']) | |
| ) | |
| # Create mask for lineups that contain any of the included teams | |
| include_mask = team_frame.apply( | |
| lambda row: any(team in list(row) for team in team_include), axis=1 | |
| ) | |
| parsed_frame = parsed_frame[include_mask] | |
| if team_remove: | |
| if type_var == 'Showdown': | |
| if cpt_flex_focus == 'CPT': | |
| team_frame = parsed_frame.iloc[:, 0].apply( | |
| lambda x: x.map(st.session_state['map_dict']['team_map']) | |
| ) | |
| remove_mask = team_frame.apply( | |
| lambda row: not any(team in list(row) for team in team_remove), axis=1 | |
| ) | |
| elif cpt_flex_focus == 'FLEX': | |
| team_frame = parsed_frame.iloc[:, 1:].apply( | |
| lambda x: x.map(st.session_state['map_dict']['team_map']) | |
| ) | |
| remove_mask = team_frame.apply( | |
| lambda row: not any(team in list(row) for team in team_remove), axis=1 | |
| ) | |
| elif cpt_flex_focus == 'Overall': | |
| team_frame = parsed_frame[st.session_state['player_columns']].apply( | |
| lambda x: x.map(st.session_state['map_dict']['team_map']) | |
| ) | |
| remove_mask = team_frame.apply( | |
| lambda row: not any(team in list(row) for team in team_remove), axis=1 | |
| ) | |
| else: | |
| # Create a copy of the frame with player names replaced by teams, excluding SP1 and SP2 | |
| filtered_player_columns = [col for col in st.session_state['player_columns'] if col not in ['SP1', 'SP2']] | |
| team_frame = parsed_frame[filtered_player_columns].apply( | |
| lambda x: x.map(st.session_state['map_dict']['team_map']) | |
| ) | |
| # Create mask for lineups that don't contain any of the removed teams | |
| remove_mask = team_frame.apply( | |
| lambda row: not any(team in list(row) for team in team_remove), axis=1 | |
| ) | |
| parsed_frame = parsed_frame[remove_mask] | |
| if size_include: | |
| parsed_frame = parsed_frame[parsed_frame['Size'].isin(size_include)] | |
| if qb_force == 'Yes': | |
| if type_var == 'Classic': | |
| # Get team for the first player column for each lineup | |
| team_frame = parsed_frame.iloc[:, 0].map(st.session_state['map_dict']['team_map']) | |
| # Create mask where the first player's team matches the Stack column | |
| include_mask = team_frame == parsed_frame['Stack'] | |
| parsed_frame = parsed_frame[include_mask] | |
| st.session_state['working_frame'] = parsed_frame.sort_values(by='median', ascending=False).reset_index(drop=True) | |
| st.session_state['export_merge'] = st.session_state['working_frame'].copy() | |
| elif exp_submitted: | |
| st.session_state['settings_base'] = False | |
| parsed_frame = st.session_state['export_base'].copy() | |
| if player_remove: | |
| if type_var == 'Showdown': | |
| if cpt_flex_focus == 'CPT': | |
| remove_mask = parsed_frame.iloc[:, 0].apply( | |
| lambda player: not any(remove_player in str(player) for remove_player in player_remove) | |
| ) | |
| elif cpt_flex_focus == 'FLEX': | |
| remove_mask = parsed_frame.iloc[:, 1:].apply( | |
| lambda row: not any(player in list(row) for player in player_remove), axis=1 | |
| ) | |
| elif cpt_flex_focus == 'Overall': | |
| remove_mask = parsed_frame[st.session_state['player_columns']].apply( | |
| lambda row: not any(player in list(row) for player in player_remove), axis=1 | |
| ) | |
| else: | |
| remove_mask = parsed_frame[st.session_state['player_columns']].apply( | |
| lambda row: not any(player in list(row) for player in player_remove), axis=1 | |
| ) | |
| parsed_frame = parsed_frame[remove_mask] | |
| if player_lock: | |
| if type_var == 'Showdown': | |
| if cpt_flex_focus == 'CPT': | |
| lock_mask = parsed_frame.iloc[:, 0].apply( | |
| lambda player: any(lock_player in str(player) for lock_player in player_lock) | |
| ) | |
| elif cpt_flex_focus == 'FLEX': | |
| lock_mask = parsed_frame.iloc[:, 1:].apply( | |
| lambda row: all(player in list(row) for player in player_lock), axis=1 | |
| ) | |
| elif cpt_flex_focus == 'Overall': | |
| lock_mask = parsed_frame[st.session_state['player_columns']].apply( | |
| lambda row: all(player in list(row) for player in player_lock), axis=1 | |
| ) | |
| else: | |
| lock_mask = parsed_frame[st.session_state['player_columns']].apply( | |
| lambda row: all(player in list(row) for player in player_lock), axis=1 | |
| ) | |
| parsed_frame = parsed_frame[lock_mask] | |
| if team_include: | |
| if type_var == 'Showdown': | |
| if cpt_flex_focus == 'CPT': | |
| team_frame = parsed_frame.iloc[:, 0].apply( | |
| lambda x: x.map(st.session_state['map_dict']['team_map']) | |
| ) | |
| include_mask = team_frame.apply( | |
| lambda row: any(team in list(row) for team in team_include), axis=1 | |
| ) | |
| elif cpt_flex_focus == 'FLEX': | |
| team_frame = parsed_frame.iloc[:, 1:].apply( | |
| lambda x: x.map(st.session_state['map_dict']['team_map']) | |
| ) | |
| include_mask = team_frame.apply( | |
| lambda row: any(team in list(row) for team in team_include), axis=1 | |
| ) | |
| elif cpt_flex_focus == 'Overall': | |
| team_frame = parsed_frame[st.session_state['player_columns']].apply( | |
| lambda x: x.map(st.session_state['map_dict']['team_map']) | |
| ) | |
| include_mask = team_frame.apply( | |
| lambda row: any(team in list(row) for team in team_include), axis=1 | |
| ) | |
| else: | |
| # Create a copy of the frame with player names replaced by teams, excluding SP1 and SP2 | |
| filtered_player_columns = [col for col in st.session_state['player_columns'] if col not in ['SP1', 'SP2']] | |
| team_frame = parsed_frame[filtered_player_columns].apply( | |
| lambda x: x.map(st.session_state['map_dict']['team_map']) | |
| ) | |
| # Create mask for lineups that contain any of the included teams | |
| include_mask = team_frame.apply( | |
| lambda row: any(team in list(row) for team in team_include), axis=1 | |
| ) | |
| parsed_frame = parsed_frame[include_mask] | |
| if team_remove: | |
| if type_var == 'Showdown': | |
| if cpt_flex_focus == 'CPT': | |
| team_frame = parsed_frame.iloc[:, 0].apply( | |
| lambda x: x.map(st.session_state['map_dict']['team_map']) | |
| ) | |
| remove_mask = team_frame.apply( | |
| lambda row: not any(team in list(row) for team in team_remove), axis=1 | |
| ) | |
| elif cpt_flex_focus == 'FLEX': | |
| team_frame = parsed_frame.iloc[:, 1:].apply( | |
| lambda x: x.map(st.session_state['map_dict']['team_map']) | |
| ) | |
| remove_mask = team_frame.apply( | |
| lambda row: not any(team in list(row) for team in team_remove), axis=1 | |
| ) | |
| elif cpt_flex_focus == 'Overall': | |
| team_frame = parsed_frame[st.session_state['player_columns']].apply( | |
| lambda x: x.map(st.session_state['map_dict']['team_map']) | |
| ) | |
| remove_mask = team_frame.apply( | |
| lambda row: not any(team in list(row) for team in team_remove), axis=1 | |
| ) | |
| else: | |
| # Create a copy of the frame with player names replaced by teams, excluding SP1 and SP2 | |
| filtered_player_columns = [col for col in st.session_state['player_columns'] if col not in ['SP1', 'SP2']] | |
| team_frame = parsed_frame[filtered_player_columns].apply( | |
| lambda x: x.map(st.session_state['map_dict']['team_map']) | |
| ) | |
| # Create mask for lineups that don't contain any of the removed teams | |
| remove_mask = team_frame.apply( | |
| lambda row: not any(team in list(row) for team in team_remove), axis=1 | |
| ) | |
| parsed_frame = parsed_frame[remove_mask] | |
| if size_include: | |
| parsed_frame = parsed_frame[parsed_frame['Size'].isin(size_include)] | |
| st.session_state['export_base'] = parsed_frame.sort_values(by='median', ascending=False).reset_index(drop=True) | |
| st.session_state['export_merge'] = st.session_state['export_base'].copy() | |
| with st.expander('Position Filtering'): | |
| with st.form(key='position_filtering_form'): | |
| position_choice = st.selectbox("Position to filter", options=[col for col in st.session_state['working_frame'].columns if col not in excluded_cols], index=0) | |
| position_filter = st.selectbox("Filter on:", options=pos_parse_options) | |
| position_low_threshold = st.number_input("if filtering on Projection/Ownership/Salary, Low Threshold", value=0.0, min_value=0.0, step=1.0) | |
| position_high_threshold = st.number_input("if filtering on Projection/Ownership/Salary, High Threshold", value=20000.0, min_value=0.0, step=1.0) | |
| filter_keys_pos = st.multiselect("if filtering on Position, Position(s) to keep", options=sport_position_lists[site_var][sport_var], default=[]) | |
| filter_keys_team = st.multiselect("if filtering on Team, Team(s) to keep", options=st.session_state['portfolio_inc_proj']['team'].unique(), default=[]) | |
| submitted_col, export_col = st.columns(2) | |
| with submitted_col: | |
| reg_submitted = st.form_submit_button("Portfolio") | |
| with export_col: | |
| exp_submitted = st.form_submit_button("Export") | |
| if reg_submitted: | |
| st.session_state['settings_base'] = False | |
| parsed_frame = st.session_state['working_frame'].copy() | |
| parsed_frame = parse_portfolio_on_mapped(parsed_frame, st.session_state['map_dict'], pos_parse_mapping[position_filter], filter_keys_pos, filter_keys_team, position_low_threshold, position_high_threshold, position_choice) | |
| st.session_state['working_frame'] = parsed_frame.sort_values(by='median', ascending=False) | |
| st.session_state['export_merge'] = st.session_state['working_frame'].copy() | |
| elif exp_submitted: | |
| st.session_state['settings_base'] = False | |
| parsed_frame = st.session_state['export_base'].copy() | |
| parsed_frame = parse_portfolio_on_mapped(parsed_frame, st.session_state['map_dict'], pos_parse_mapping[position_filter], filter_keys_pos, filter_keys_team, position_low_threshold, position_high_threshold, position_choice) | |
| st.session_state['export_base'] = parsed_frame.sort_values(by='median', ascending=False) | |
| st.session_state['export_merge'] = st.session_state['export_base'].copy() | |
| with st.expander('Trimming Options'): | |
| with st.form(key='trim_form'): | |
| st.write("Sorting and trimming variables:") | |
| perf_var, own_var = st.columns(2) | |
| with perf_var: | |
| performance_type = st.selectbox("Sorting variable", ['median', 'Own', 'Weighted Own'], key='sort_var') | |
| with own_var: | |
| own_type = st.selectbox("Trimming variable", ['Own', 'Geomean', 'Weighted Own', 'Diversity'], key='trim_var') | |
| trim_slack_var = st.number_input("Trim slack (percentile addition to trimming variable ceiling)", value=0.0, min_value=0.0, max_value=1.0, step=0.1, key='trim_slack') | |
| st.write("Sorting threshold range:") | |
| min_sort, max_sort = st.columns(2) | |
| with min_sort: | |
| performance_threshold_low = st.number_input("Min", value=0.0, min_value=0.0, step=1.0, key='min_sort') | |
| with max_sort: | |
| performance_threshold_high = st.number_input("Max", value=float(st.session_state['trimming_dict_maxes'][performance_type]), min_value=0.0, step=1.0, key='max_sort') | |
| st.write("Trimming threshold range:") | |
| min_trim, max_trim = st.columns(2) | |
| with min_trim: | |
| own_threshold_low = st.number_input("Min", value=0.0, min_value=0.0, step=1.0, key='min_trim') | |
| with max_trim: | |
| own_threshold_high = st.number_input("Max", value=float(st.session_state['trimming_dict_maxes'][own_type]), min_value=0.0, step=1.0, key='max_trim') | |
| submitted_col, export_col = st.columns(2) | |
| st.info("Portfolio Button applies to your overall Portfolio, Export button applies to your Custom Export") | |
| with submitted_col: | |
| reg_submitted = st.form_submit_button("Portfolio") | |
| with export_col: | |
| exp_submitted = st.form_submit_button("Export") | |
| if reg_submitted: | |
| st.session_state['settings_base'] = False | |
| st.write('initiated') | |
| parsed_frame = st.session_state['working_frame'].copy() | |
| parsed_frame = trim_portfolio(parsed_frame, trim_slack_var, performance_type, own_type, performance_threshold_high, performance_threshold_low, own_threshold_high, own_threshold_low) | |
| st.session_state['working_frame'] = parsed_frame.sort_values(by='median', ascending=False) | |
| st.session_state['export_merge'] = st.session_state['working_frame'].copy() | |
| elif exp_submitted: | |
| st.session_state['settings_base'] = False | |
| parsed_frame = st.session_state['export_base'].copy() | |
| parsed_frame = trim_portfolio(parsed_frame, trim_slack_var, performance_type, own_type, performance_threshold_high, performance_threshold_low, own_threshold_high, own_threshold_low) | |
| st.session_state['export_base'] = parsed_frame.sort_values(by='median', ascending=False) | |
| st.session_state['export_merge'] = st.session_state['export_base'].copy() | |
| with st.expander('Presets'): | |
| st.info("Still heavily in testing here, I'll announce when they are ready for use.") | |
| with st.form(key='Small Field Preset'): | |
| preset_choice = st.selectbox("Preset", options=['Small Field (Heavy Own)', 'Large Field (Manage Diversity)', 'Hedge Chalk (Manage Leverage)', 'Volatility (Heavy Lineup Edge)'], index=0) | |
| lineup_target = st.number_input("Lineups to produce", value=150, min_value=1, step=1) | |
| submitted_col, export_col = st.columns(2) | |
| st.info("Portfolio Button applies to your overall Portfolio, Export button applies to your Custom Export") | |
| with submitted_col: | |
| reg_submitted = st.form_submit_button("Portfolio") | |
| with export_col: | |
| exp_submitted = st.form_submit_button("Export") | |
| if reg_submitted: | |
| st.session_state['settings_base'] = False | |
| if preset_choice == 'Small Field (Heavy Own)': | |
| parsed_frame = small_field_preset(st.session_state['working_frame'], lineup_target, excluded_cols, sport_var) | |
| elif preset_choice == 'Large Field (Manage Diversity)': | |
| parsed_frame = large_field_preset(st.session_state['working_frame'], lineup_target, excluded_cols, sport_var) | |
| elif preset_choice == 'Volatility (Heavy Lineup Edge)': | |
| parsed_frame = volatility_preset(st.session_state['working_frame'], lineup_target, excluded_cols, sport_var) | |
| elif preset_choice == 'Hedge Chalk (Manage Leverage)': | |
| parsed_frame = hedging_preset(st.session_state['working_frame'], lineup_target, st.session_state['portfolio_inc_proj'], sport_var) | |
| elif preset_choice == 'Reduce Volatility (Manage Own)': | |
| parsed_frame = reduce_volatility_preset(st.session_state['working_frame'], lineup_target, excluded_cols, sport_var) | |
| st.session_state['working_frame'] = parsed_frame.reset_index(drop=True) | |
| st.session_state['export_merge'] = st.session_state['working_frame'].copy() | |
| elif exp_submitted: | |
| st.session_state['settings_base'] = False | |
| parsed_frame = st.session_state['export_base'].copy() | |
| if preset_choice == 'Small Field (Heavy Own)': | |
| parsed_frame = small_field_preset(st.session_state['export_base'], lineup_target, excluded_cols, sport_var) | |
| elif preset_choice == 'Large Field (Manage Diversity)': | |
| parsed_frame = large_field_preset(st.session_state['export_base'], lineup_target, excluded_cols, sport_var) | |
| elif preset_choice == 'Volatility (Heavy Lineup Edge)': | |
| parsed_frame = volatility_preset(st.session_state['export_base'], lineup_target, excluded_cols, sport_var) | |
| elif preset_choice == 'Hedge Chalk (Manage Leverage)': | |
| parsed_frame = hedging_preset(st.session_state['export_base'], lineup_target, st.session_state['portfolio_inc_proj'], sport_var) | |
| elif preset_choice == 'Reduce Volatility (Manage Own)': | |
| parsed_frame = reduce_volatility_preset(st.session_state['export_base'], lineup_target, excluded_cols, sport_var) | |
| st.session_state['export_base'] = parsed_frame.reset_index(drop=True) | |
| st.session_state['export_merge'] = st.session_state['export_base'].copy() | |
| with st.expander('Stratify'): | |
| with st.form(key='Stratification'): | |
| sorting_choice = st.selectbox("Stat Choice", options=['median', 'Own', 'Weighted Own', 'Geomean', 'Lineup Edge', 'Finish_percentile', 'SE Score', 'Diversity'], index=0) | |
| lineup_target = st.number_input("Lineups to produce", value=150, min_value=1, step=1) | |
| strat_sample = st.slider("Sample range", value=[0.0, 100.0], min_value=0.0, max_value=100.0, step=1.0) | |
| submitted_col, export_col = st.columns(2) | |
| st.info("Portfolio Button applies to your overall Portfolio, Export button applies to your Custom Export") | |
| with submitted_col: | |
| reg_submitted = st.form_submit_button("Portfolio") | |
| with export_col: | |
| exp_submitted = st.form_submit_button("Export") | |
| if reg_submitted: | |
| st.session_state['settings_base'] = False | |
| parsed_frame = stratification_function(st.session_state['working_frame'], lineup_target, excluded_cols, sport_var, sorting_choice, strat_sample[0], strat_sample[1]) | |
| st.session_state['working_frame'] = parsed_frame.reset_index(drop=True) | |
| st.session_state['export_merge'] = st.session_state['working_frame'].copy() | |
| elif exp_submitted: | |
| st.session_state['settings_base'] = False | |
| parsed_frame = stratification_function(st.session_state['export_base'], lineup_target, excluded_cols, sport_var, sorting_choice, strat_sample[0], strat_sample[1]) | |
| st.session_state['export_base'] = parsed_frame.reset_index(drop=True) | |
| st.session_state['export_merge'] = st.session_state['export_base'].copy() | |
| with st.expander('Conditionals Manager (players)'): | |
| # a set of functions for removing lineups that contain a conditional between players and stacks | |
| with st.form(key='conditional_players_form'): | |
| player_names = set() | |
| for col in st.session_state['working_frame'].columns: | |
| if col not in excluded_cols: | |
| player_names.update(st.session_state['working_frame'][col].unique()) | |
| keep_remove_var = st.selectbox("Conditional:", options=['Keep', 'Remove'], index=0) | |
| conditional_side_alpha = st.multiselect("Lineups containing:", options=sorted(list(player_names)), default=[]) | |
| cpt_flex_alpha = st.selectbox("in slot:", options=['Overall', 'CPT', 'FLEX'], index=0, key='cpt_flex_alpha') | |
| conditional_var = st.selectbox("where they also contain:", options=['Any', 'All', 'None'], index=0) | |
| conditional_side_beta = st.multiselect("of the following player(s):", options=sorted(list(player_names)), default=[]) | |
| cpt_flex_beta = st.selectbox("in slot:", options=['Overall', 'CPT', 'FLEX'], index=0, key='cpt_flex_beta') | |
| submitted_col, export_col = st.columns(2) | |
| st.info("Portfolio Button applies to your overall Portfolio, Export button applies to your Custom Export") | |
| with submitted_col: | |
| reg_submitted = st.form_submit_button("Portfolio") | |
| with export_col: | |
| exp_submitted = st.form_submit_button("Export") | |
| if reg_submitted: | |
| st.session_state['settings_base'] = False | |
| parsed_frame = st.session_state['working_frame'].copy() | |
| # Check if we have players selected for both alpha and beta sides | |
| if conditional_side_alpha and conditional_side_beta: | |
| alpha_mask = pd.Series([True] * len(parsed_frame), index=parsed_frame.index) | |
| for player in conditional_side_alpha: | |
| if type_var == 'Showdown': | |
| if cpt_flex_alpha == 'Overall': | |
| player_present = parsed_frame.apply(lambda row: player in row.values, axis=1) | |
| elif cpt_flex_alpha == 'CPT': | |
| player_present = parsed_frame.iloc[:, 0].apply(lambda row: player in row) | |
| elif cpt_flex_alpha == 'FLEX': | |
| player_present = parsed_frame.iloc[:, 1:].apply(lambda row: player in row.values, axis=1) | |
| else: | |
| player_present = parsed_frame.apply(lambda row: player in row.values, axis=1) | |
| alpha_mask = alpha_mask & player_present | |
| # Only apply beta logic to rows that match alpha condition | |
| rows_to_process = alpha_mask | |
| # For rows that match alpha condition, check beta condition | |
| if conditional_var == 'Any': | |
| # Check if row contains ANY of the beta players | |
| beta_mask = pd.Series([False] * len(parsed_frame), index=parsed_frame.index) | |
| for player in conditional_side_beta: | |
| if type_var == 'Showdown': | |
| if cpt_flex_beta == 'Overall': | |
| player_present = parsed_frame.apply(lambda row: player in row.values, axis=1) | |
| elif cpt_flex_beta == 'CPT': | |
| player_present = parsed_frame.iloc[:, 0].apply(lambda row: player in row) | |
| elif cpt_flex_beta == 'FLEX': | |
| player_present = parsed_frame.iloc[:, 1:].apply(lambda row: player in row.values, axis=1) | |
| else: | |
| player_present = parsed_frame.apply(lambda row: player in row.values, axis=1) | |
| beta_mask = beta_mask | player_present | |
| elif conditional_var == 'All': | |
| # Check if row contains ALL of the beta players | |
| beta_mask = pd.Series([True] * len(parsed_frame), index=parsed_frame.index) | |
| for player in conditional_side_beta: | |
| if type_var == 'Showdown': | |
| if cpt_flex_beta == 'Overall': | |
| player_present = parsed_frame.apply(lambda row: player in row.values, axis=1) | |
| elif cpt_flex_beta == 'CPT': | |
| player_present = parsed_frame.iloc[:, 0].apply(lambda row: player in row) | |
| elif cpt_flex_beta == 'FLEX': | |
| player_present = parsed_frame.iloc[:, 1:].apply(lambda row: player in row.values, axis=1) | |
| else: | |
| player_present = parsed_frame.apply(lambda row: player in row.values, axis=1) | |
| beta_mask = beta_mask & player_present | |
| elif conditional_var == 'None': | |
| # Check if row contains NONE of the beta players | |
| beta_mask = pd.Series([True] * len(parsed_frame), index=parsed_frame.index) | |
| for player in conditional_side_beta: | |
| if type_var == 'Showdown': | |
| if cpt_flex_beta == 'Overall': | |
| player_present = parsed_frame.apply(lambda row: player in row.values, axis=1) | |
| elif cpt_flex_beta == 'CPT': | |
| player_present = parsed_frame.iloc[:, 0].apply(lambda row: player in row) | |
| elif cpt_flex_beta == 'FLEX': | |
| player_present = parsed_frame.iloc[:, 1:].apply(lambda row: player in row.values, axis=1) | |
| else: | |
| player_present = parsed_frame.apply(lambda row: player in row.values, axis=1) | |
| player_present = parsed_frame.apply(lambda row: player in row.values, axis=1) | |
| beta_mask = beta_mask & (~player_present) | |
| # Combine conditions: alpha_mask AND beta_mask | |
| final_condition = rows_to_process & beta_mask | |
| # Apply keep or remove logic | |
| if keep_remove_var == 'Keep': | |
| parsed_frame = parsed_frame[~rows_to_process | final_condition] | |
| else: # Remove | |
| parsed_frame = parsed_frame[~final_condition] | |
| elif conditional_side_alpha: | |
| # Only alpha side specified - filter based on presence of alpha players | |
| alpha_mask = pd.Series([True] * len(parsed_frame), index=parsed_frame.index) | |
| for player in conditional_side_alpha: | |
| if type_var == 'Showdown': | |
| if cpt_flex_alpha == 'Overall': | |
| player_present = parsed_frame.apply(lambda row: player in row.values, axis=1) | |
| elif cpt_flex_alpha == 'CPT': | |
| player_present = parsed_frame.iloc[:, 0].apply(lambda row: player in row) | |
| elif cpt_flex_alpha == 'FLEX': | |
| player_present = parsed_frame.iloc[:, 1:].apply(lambda row: player in row.values, axis=1) | |
| else: | |
| player_present = parsed_frame.apply(lambda row: player in row.values, axis=1) | |
| player_present = parsed_frame.apply(lambda row: player in row.values, axis=1) | |
| alpha_mask = alpha_mask & player_present | |
| if keep_remove_var == 'Keep': | |
| parsed_frame = parsed_frame[alpha_mask] | |
| else: # Remove | |
| parsed_frame = parsed_frame[~alpha_mask] | |
| st.session_state['working_frame'] = parsed_frame.sort_values(by='median', ascending=False).reset_index(drop=True) | |
| st.session_state['export_merge'] = st.session_state['working_frame'].copy() | |
| elif exp_submitted: | |
| st.session_state['settings_base'] = False | |
| parsed_frame = st.session_state['export_base'].copy() | |
| # Check if we have players selected for both alpha and beta sides | |
| if conditional_side_alpha and conditional_side_beta: | |
| # Create boolean mask for rows containing ALL players from alpha side | |
| alpha_mask = pd.Series([True] * len(parsed_frame), index=parsed_frame.index) | |
| for player in conditional_side_alpha: | |
| if type_var == 'Showdown': | |
| if cpt_flex_alpha == 'Overall': | |
| player_present = parsed_frame.apply(lambda row: player in row.values, axis=1) | |
| elif cpt_flex_alpha == 'CPT': | |
| player_present = parsed_frame.iloc[:, 0].apply(lambda row: player in row) | |
| elif cpt_flex_alpha == 'FLEX': | |
| player_present = parsed_frame.iloc[:, 1:].apply(lambda row: player in row.values, axis=1) | |
| else: | |
| player_present = parsed_frame.apply(lambda row: player in row.values, axis=1) | |
| alpha_mask = alpha_mask & player_present | |
| # Only apply beta logic to rows that match alpha condition | |
| rows_to_process = alpha_mask | |
| # For rows that match alpha condition, check beta condition | |
| if conditional_var == 'Any': | |
| # Check if row contains ANY of the beta players | |
| beta_mask = pd.Series([False] * len(parsed_frame), index=parsed_frame.index) | |
| for player in conditional_side_beta: | |
| if type_var == 'Showdown': | |
| if cpt_flex_beta == 'Overall': | |
| player_present = parsed_frame.apply(lambda row: player in row.values, axis=1) | |
| elif cpt_flex_beta == 'CPT': | |
| player_present = parsed_frame.iloc[:, 0].apply(lambda row: player in row) | |
| elif cpt_flex_beta == 'FLEX': | |
| player_present = parsed_frame.iloc[:, 1:].apply(lambda row: player in row.values, axis=1) | |
| else: | |
| player_present = parsed_frame.apply(lambda row: player in row.values, axis=1) | |
| beta_mask = beta_mask | player_present | |
| elif conditional_var == 'All': | |
| # Check if row contains ALL of the beta players | |
| beta_mask = pd.Series([True] * len(parsed_frame), index=parsed_frame.index) | |
| for player in conditional_side_beta: | |
| if type_var == 'Showdown': | |
| if cpt_flex_beta == 'Overall': | |
| player_present = parsed_frame.apply(lambda row: player in row.values, axis=1) | |
| elif cpt_flex_beta == 'CPT': | |
| player_present = parsed_frame.iloc[:, 0].apply(lambda row: player in row) | |
| elif cpt_flex_beta == 'FLEX': | |
| player_present = parsed_frame.iloc[:, 1:].apply(lambda row: player in row.values, axis=1) | |
| else: | |
| player_present = parsed_frame.apply(lambda row: player in row.values, axis=1) | |
| beta_mask = beta_mask & player_present | |
| elif conditional_var == 'None': | |
| # Check if row contains NONE of the beta players | |
| beta_mask = pd.Series([True] * len(parsed_frame), index=parsed_frame.index) | |
| for player in conditional_side_beta: | |
| if type_var == 'Showdown': | |
| if cpt_flex_beta == 'Overall': | |
| player_present = parsed_frame.apply(lambda row: player in row.values, axis=1) | |
| elif cpt_flex_beta == 'CPT': | |
| player_present = parsed_frame.iloc[:, 0].apply(lambda row: player in row) | |
| elif cpt_flex_beta == 'FLEX': | |
| player_present = parsed_frame.iloc[:, 1:].apply(lambda row: player in row.values, axis=1) | |
| else: | |
| player_present = parsed_frame.apply(lambda row: player in row.values, axis=1) | |
| player_present = parsed_frame.apply(lambda row: player in row.values, axis=1) | |
| beta_mask = beta_mask & (~player_present) | |
| # Combine conditions: alpha_mask AND beta_mask | |
| final_condition = rows_to_process & beta_mask | |
| # Apply keep or remove logic | |
| if keep_remove_var == 'Keep': | |
| parsed_frame = parsed_frame[~rows_to_process | final_condition] | |
| else: # Remove | |
| parsed_frame = parsed_frame[~final_condition] | |
| elif conditional_side_alpha: | |
| # Only alpha side specified - filter based on presence of alpha players | |
| alpha_mask = pd.Series([True] * len(parsed_frame), index=parsed_frame.index) | |
| for player in conditional_side_alpha: | |
| if type_var == 'Showdown': | |
| if cpt_flex_alpha == 'Overall': | |
| player_present = parsed_frame.apply(lambda row: player in row.values, axis=1) | |
| elif cpt_flex_alpha == 'CPT': | |
| player_present = parsed_frame.iloc[:, 0].apply(lambda row: player in row) | |
| elif cpt_flex_alpha == 'FLEX': | |
| player_present = parsed_frame.iloc[:, 1:].apply(lambda row: player in row.values, axis=1) | |
| else: | |
| player_present = parsed_frame.apply(lambda row: player in row.values, axis=1) | |
| player_present = parsed_frame.apply(lambda row: player in row.values, axis=1) | |
| alpha_mask = alpha_mask & player_present | |
| if keep_remove_var == 'Keep': | |
| parsed_frame = parsed_frame[alpha_mask] | |
| else: # Remove | |
| parsed_frame = parsed_frame[~alpha_mask] | |
| st.session_state['export_base'] = parsed_frame.sort_values(by='median', ascending=False).reset_index(drop=True) | |
| st.session_state['export_merge'] = st.session_state['export_base'].copy() | |
| with st.expander('Conditionals Manager (combos)'): | |
| # a set of functions for replacing players in lineups containing specific other players | |
| with st.form(key='conditional_combos_form'): | |
| player_names = set() | |
| for col in st.session_state['working_frame'].columns: | |
| if col not in excluded_cols: | |
| player_names.update(st.session_state['working_frame'][col].unique()) | |
| replace_player = st.selectbox("Replace player:", options=sorted(list(player_names)), key='replace_player') | |
| replace_slot = st.selectbox("In slot:", options=['Overall', 'CPT', 'FLEX'], index=0, key='replace_slot') | |
| containing_player = st.selectbox("In Lineups containing:", options=sorted(list(player_names)), key='containing_player') | |
| containing_slot = st.selectbox("In slot:", options=['Overall', 'CPT', 'FLEX'], index=0, key='containing_slot') | |
| specific_replacements_combo = st.multiselect("Specific replacements?", options=sorted(list(player_names)), default=[], key='specific_replacements_combo') | |
| specific_exclusions_combo = st.multiselect("Specific exclusions?", options=sorted(list(player_names)), default=[], key='specific_exclusions_combo') | |
| comp_salary_below_combo = st.number_input("Comp Salary Below", value=-5000, min_value=-5000, max_value=0, step=100, key='comp_salary_below_combo') | |
| comp_salary_above_combo = st.number_input("Comp Salary Above", value=5000, min_value=0, max_value=5000, step=100, key='comp_salary_above_combo') | |
| submitted_col, export_col = st.columns(2) | |
| st.info("Portfolio Button applies to your overall Portfolio, Export button applies to your Custom Export") | |
| with submitted_col: | |
| reg_submitted = st.form_submit_button("Portfolio") | |
| with export_col: | |
| exp_submitted = st.form_submit_button("Export") | |
| if reg_submitted: | |
| st.session_state['settings_base'] = False | |
| parsed_frame = st.session_state['working_frame'].copy() | |
| # Check if we have both replace and containing players selected | |
| if replace_player and containing_player and replace_player != containing_player: | |
| # Find rows that contain the containing_player in the specified slot | |
| containing_mask = pd.Series([False] * len(parsed_frame), index=parsed_frame.index) | |
| if type_var == 'Showdown': | |
| if containing_slot == 'Overall': | |
| containing_mask = parsed_frame.apply(lambda row: containing_player in row.values, axis=1) | |
| elif containing_slot == 'CPT': | |
| containing_mask = parsed_frame.iloc[:, 0].apply(lambda row: containing_player in row) | |
| elif containing_slot == 'FLEX': | |
| containing_mask = parsed_frame.iloc[:, 1:].apply(lambda row: containing_player in row.values, axis=1) | |
| else: | |
| containing_mask = parsed_frame.apply(lambda row: containing_player in row.values, axis=1) | |
| # Filter to only rows containing the target player | |
| target_rows = parsed_frame[containing_mask] | |
| if not target_rows.empty: | |
| # Reset index to avoid index mismatch issues | |
| target_rows_reset = target_rows.reset_index(drop=True) | |
| # Prepare DataFrame for exposure_spread to avoid categorical issues | |
| target_rows_prepared = prepare_dataframe_for_exposure_spread(target_rows_reset, st.session_state['player_columns']) | |
| # Use exposure_spread logic to replace the player in these specific rows | |
| # Set exposure_target to 0 to remove all instances of replace_player | |
| modified_rows = exposure_spread( | |
| target_rows_prepared, | |
| replace_player, | |
| 0, # exposure_target = 0 means remove all instances | |
| comp_salary_below_combo, | |
| comp_salary_above_combo, | |
| [], # ignore_stacks | |
| [], # remove_teams_exposure | |
| specific_replacements_combo, | |
| specific_exclusions_combo, | |
| st.session_state['player_columns'] if replace_slot == 'Overall' else | |
| ([st.session_state['player_columns'][0]] if replace_slot == 'CPT' else st.session_state['player_columns'][1:]), | |
| st.session_state['portfolio_inc_proj'], | |
| sport_var, | |
| type_var, | |
| salary_max, | |
| stacking_sports | |
| ) | |
| # Update the original dataframe with the modified rows | |
| parsed_frame.loc[containing_mask] = modified_rows.values | |
| # Use consolidated calculation function | |
| parsed_frame = calculate_lineup_metrics( | |
| parsed_frame, | |
| st.session_state['player_columns'], | |
| st.session_state['map_dict'], | |
| type_var, | |
| sport_var, | |
| st.session_state['portfolio_inc_proj'] | |
| ) | |
| st.session_state['working_frame'] = parsed_frame.sort_values(by='median', ascending=False).reset_index(drop=True) | |
| st.session_state['export_merge'] = st.session_state['working_frame'].copy() | |
| elif exp_submitted: | |
| st.session_state['settings_base'] = False | |
| parsed_frame = st.session_state['export_base'].copy() | |
| # Check if we have both replace and containing players selected | |
| if replace_player and containing_player and replace_player != containing_player: | |
| # Find rows that contain the containing_player in the specified slot | |
| containing_mask = pd.Series([False] * len(parsed_frame), index=parsed_frame.index) | |
| if type_var == 'Showdown': | |
| if containing_slot == 'Overall': | |
| containing_mask = parsed_frame.apply(lambda row: containing_player in row.values, axis=1) | |
| elif containing_slot == 'CPT': | |
| containing_mask = parsed_frame.iloc[:, 0].apply(lambda row: containing_player in row) | |
| elif containing_slot == 'FLEX': | |
| containing_mask = parsed_frame.iloc[:, 1:].apply(lambda row: containing_player in row.values, axis=1) | |
| else: | |
| containing_mask = parsed_frame.apply(lambda row: containing_player in row.values, axis=1) | |
| # Filter to only rows containing the target player | |
| target_rows = parsed_frame[containing_mask] | |
| if not target_rows.empty: | |
| # Reset index to avoid index mismatch issues | |
| target_rows_reset = target_rows.reset_index(drop=True) | |
| # Prepare DataFrame for exposure_spread to avoid categorical issues | |
| target_rows_prepared = prepare_dataframe_for_exposure_spread(target_rows_reset, st.session_state['player_columns']) | |
| # Use exposure_spread logic to replace the player in these specific rows | |
| # Set exposure_target to 0 to remove all instances of replace_player | |
| modified_rows = exposure_spread( | |
| target_rows_prepared, | |
| replace_player, | |
| 0, # exposure_target = 0 means remove all instances | |
| comp_salary_below_combo, | |
| comp_salary_above_combo, | |
| [], # ignore_stacks | |
| [], # remove_teams_exposure | |
| specific_replacements_combo, | |
| specific_exclusions_combo, | |
| st.session_state['player_columns'] if replace_slot == 'Overall' else | |
| ([st.session_state['player_columns'][0]] if replace_slot == 'CPT' else st.session_state['player_columns'][1:]), | |
| st.session_state['portfolio_inc_proj'], | |
| sport_var, | |
| type_var, | |
| salary_max, | |
| stacking_sports | |
| ) | |
| # Update the original dataframe with the modified rows | |
| parsed_frame.loc[containing_mask] = modified_rows.values | |
| # Use consolidated calculation function for export | |
| parsed_frame = calculate_lineup_metrics( | |
| parsed_frame, | |
| st.session_state['player_columns'], | |
| st.session_state['map_dict'], | |
| type_var, | |
| sport_var, | |
| st.session_state['portfolio_inc_proj'] | |
| ) | |
| st.session_state['export_base'] = parsed_frame.sort_values(by='median', ascending=False).reset_index(drop=True) | |
| st.session_state['export_merge'] = st.session_state['export_base'].copy() | |
| with st.expander('Exposure Management'): | |
| with st.form(key='Exposures'): | |
| exposure_player = st.selectbox("Player", options=sorted(list(set(get_portfolio_names(st.session_state['working_frame'])))), key='exposure_player') | |
| exposure_target = st.number_input("Target Exposure", value=.50, min_value=0.0, max_value=1.0, step=0.01) | |
| comp_salary_below = st.number_input("Comp Salary Below", value=-5000, min_value=-5000, max_value=0, step=100) | |
| comp_salary_above = st.number_input("Comp Salary Above", value=5000, min_value=0, max_value=5000, step=100) | |
| if 'Stack' in st.session_state['working_frame'].columns: | |
| ignore_stacks = st.multiselect("Ignore Specific Stacks?", options=sorted(list(set(st.session_state['projections_df']['team'].unique()))), default=[]) | |
| else: | |
| ignore_stacks = [] | |
| remove_teams_exposure = st.multiselect("Removed/Locked teams?", options=sorted(list(set(st.session_state['projections_df']['team'].unique()))), default=[]) | |
| specific_replacements = st.multiselect("Specific Replacements?", options=sorted(list(set(get_portfolio_names(st.session_state['working_frame'])))), default=[]) | |
| specific_exclusions = st.multiselect("Specific exclusions?", options=sorted(list(set(get_portfolio_names(st.session_state['working_frame'])))), default=[]) | |
| specific_columns = st.multiselect("Specific Positions?", options=sorted(list(st.session_state['player_columns'])), default=[]) | |
| submitted_col, export_col = st.columns(2) | |
| st.info("Portfolio Button applies to your overall Portfolio, Export button applies to your Custom Export") | |
| with submitted_col: | |
| reg_submitted = st.form_submit_button("Portfolio") | |
| with export_col: | |
| exp_submitted = st.form_submit_button("Export") | |
| if reg_submitted: | |
| st.session_state['settings_base'] = False | |
| # Prepare DataFrame for exposure_spread to avoid categorical issues | |
| working_frame_prepared = prepare_dataframe_for_exposure_spread(st.session_state['working_frame'], st.session_state['player_columns']) | |
| parsed_frame = exposure_spread(working_frame_prepared, st.session_state['exposure_player'], exposure_target, comp_salary_below, comp_salary_above, ignore_stacks, remove_teams_exposure, specific_replacements, specific_exclusions, specific_columns, st.session_state['portfolio_inc_proj'], sport_var, type_var, salary_max, stacking_sports) | |
| # Use consolidated calculation function | |
| parsed_frame = calculate_lineup_metrics( | |
| parsed_frame, | |
| st.session_state['player_columns'], | |
| st.session_state['map_dict'], | |
| type_var, | |
| sport_var, | |
| st.session_state['portfolio_inc_proj'] | |
| ) | |
| st.session_state['working_frame'] = parsed_frame.reset_index(drop=True) | |
| # st.session_state['working_frame'] = predict_dupes(st.session_state['working_frame'], st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var) | |
| # Load Default base from compressed storage for reassess_edge | |
| default_base = load_base_frame('Default') | |
| st.session_state['working_frame'] = reassess_edge(st.session_state['working_frame'], default_base, st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var, salary_max) | |
| team_dict = dict(zip(st.session_state['portfolio_inc_proj']['player_names'], st.session_state['portfolio_inc_proj']['team'])) | |
| if 'Stack' in st.session_state['working_frame'].columns: | |
| st.session_state['working_frame']['Stack'] = st.session_state['working_frame'].apply( | |
| lambda row: Counter( | |
| team_dict.get(player, '') for player in row[stack_column_dict[site_var][type_var][sport_var]] | |
| if team_dict.get(player, '') != '' | |
| ).most_common(1)[0][0] if any(team_dict.get(player, '') for player in row[stack_column_dict[site_var][type_var][sport_var]]) else '', | |
| axis=1 | |
| ) | |
| st.session_state['working_frame']['Size'] = st.session_state['working_frame'].apply( | |
| lambda row: Counter( | |
| team_dict.get(player, '') for player in row[stack_column_dict[site_var][type_var][sport_var]] | |
| if team_dict.get(player, '') != '' | |
| ).most_common(1)[0][1] if any(team_dict.get(player, '') for player in row[stack_column_dict[site_var][type_var][sport_var]]) else 0, | |
| axis=1 | |
| ) | |
| st.session_state['export_merge'] = st.session_state['working_frame'].copy() | |
| elif exp_submitted: | |
| st.session_state['settings_base'] = False | |
| # Prepare DataFrame for exposure_spread to avoid categorical issues | |
| export_base_prepared = prepare_dataframe_for_exposure_spread(st.session_state['export_base'], st.session_state['player_columns']) | |
| parsed_frame = exposure_spread(export_base_prepared, st.session_state['exposure_player'], exposure_target, comp_salary_below, comp_salary_above, ignore_stacks, remove_teams_exposure, specific_replacements, specific_exclusions, specific_columns, st.session_state['portfolio_inc_proj'], sport_var, type_var, salary_max, stacking_sports) | |
| # Use consolidated calculation function for export | |
| parsed_frame = calculate_lineup_metrics( | |
| parsed_frame, | |
| st.session_state['player_columns'], | |
| st.session_state['map_dict'], | |
| type_var, | |
| sport_var, | |
| st.session_state['portfolio_inc_proj'] | |
| ) | |
| st.session_state['export_base'] = parsed_frame.reset_index(drop=True) | |
| # st.session_state['export_base'] = predict_dupes(st.session_state['export_base'], st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var) | |
| # Load Default base from compressed storage for reassess_edge | |
| default_base = load_base_frame('Default') | |
| st.session_state['export_base'] = reassess_edge(st.session_state['export_base'], default_base, st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var, salary_max) | |
| team_dict = dict(zip(st.session_state['portfolio_inc_proj']['player_names'], st.session_state['portfolio_inc_proj']['team'])) | |
| if 'Stack' in st.session_state['export_base'].columns: | |
| st.session_state['export_base']['Stack'] = st.session_state['export_base'].apply( | |
| lambda row: Counter( | |
| team_dict.get(player, '') for player in row[stack_column_dict[site_var][type_var][sport_var]] | |
| if team_dict.get(player, '') != '' | |
| ).most_common(1)[0][0] if any(team_dict.get(player, '') for player in row[stack_column_dict[site_var][type_var][sport_var]]) else '', | |
| axis=1 | |
| ) | |
| st.session_state['export_base']['Size'] = st.session_state['export_base'].apply( | |
| lambda row: Counter( | |
| team_dict.get(player, '') for player in row[stack_column_dict[site_var][type_var][sport_var]] | |
| if team_dict.get(player, '') != '' | |
| ).most_common(1)[0][1] if any(team_dict.get(player, '') for player in row[stack_column_dict[site_var][type_var][sport_var]]) else 0, | |
| axis=1 | |
| ) | |
| st.session_state['export_merge'] = st.session_state['export_base'].copy() | |
| with st.expander('Lineup Reoptimization'): | |
| with st.form(key='Reoptimize'): | |
| optimize_by = st.selectbox("Optimize By", options=['median', 'ownership'], key='optimize_by') | |
| lock_teams_optimize = st.multiselect( | |
| "Locked Teams", | |
| options=sorted(list(set(st.session_state['projections_df']['team'].unique()))), | |
| default=[], | |
| key='lock_teams_optimize' | |
| ) | |
| opt_submitted_col, opt_export_col = st.columns(2) | |
| st.info("Portfolio Button applies to your overall Portfolio, Export button applies to your Custom Export") | |
| with opt_submitted_col: | |
| opt_reg_submitted = st.form_submit_button("Portfolio") | |
| with opt_export_col: | |
| opt_exp_submitted = st.form_submit_button("Export") | |
| if opt_reg_submitted: | |
| st.session_state['settings_base'] = False | |
| # Store original for comparison (player columns only) | |
| original_frame = st.session_state['working_frame'][st.session_state['player_columns']].copy() | |
| # Run optimization on working_frame | |
| optimized_frame = optimize_lineup( | |
| working_frame=st.session_state['working_frame'], | |
| projections_df=st.session_state['portfolio_inc_proj'], | |
| player_columns=st.session_state['player_columns'], | |
| map_dict=st.session_state['map_dict'], | |
| lock_teams=lock_teams_optimize, | |
| site_var=site_var, | |
| type_var=type_var, | |
| sport_var=sport_var, | |
| salary_max=salary_max, | |
| optimize_by=optimize_by | |
| ) | |
| # Store changes mask in session state for highlighting (no columns added) | |
| st.session_state['optimization_changes_mask'] = ( | |
| original_frame != optimized_frame[st.session_state['player_columns']] | |
| ) | |
| # Recalculate lineup metrics | |
| optimized_frame = calculate_lineup_metrics( | |
| optimized_frame, | |
| st.session_state['player_columns'], | |
| st.session_state['map_dict'], | |
| type_var, | |
| sport_var, | |
| st.session_state['portfolio_inc_proj'] | |
| ) | |
| st.session_state['working_frame'] = optimized_frame.reset_index(drop=True) | |
| # Load Default base from compressed storage for reassess_edge | |
| default_base = load_base_frame('Default') | |
| st.session_state['working_frame'] = reassess_edge( | |
| st.session_state['working_frame'], | |
| default_base, | |
| st.session_state['map_dict'], | |
| site_var, | |
| type_var, | |
| Contest_Size, | |
| strength_var, | |
| sport_var, | |
| salary_max | |
| ) | |
| # Update Stack/Size columns if applicable | |
| team_dict = dict(zip(st.session_state['portfolio_inc_proj']['player_names'], st.session_state['portfolio_inc_proj']['team'])) | |
| if 'Stack' in st.session_state['working_frame'].columns: | |
| st.session_state['working_frame']['Stack'] = st.session_state['working_frame'].apply( | |
| lambda row: Counter( | |
| team_dict.get(player, '') for player in row[stack_column_dict[site_var][type_var][sport_var]] | |
| if team_dict.get(player, '') != '' | |
| ).most_common(1)[0][0] if any(team_dict.get(player, '') for player in row[stack_column_dict[site_var][type_var][sport_var]]) else '', | |
| axis=1 | |
| ) | |
| st.session_state['working_frame']['Size'] = st.session_state['working_frame'].apply( | |
| lambda row: Counter( | |
| team_dict.get(player, '') for player in row[stack_column_dict[site_var][type_var][sport_var]] | |
| if team_dict.get(player, '') != '' | |
| ).most_common(1)[0][1] if any(team_dict.get(player, '') for player in row[stack_column_dict[site_var][type_var][sport_var]]) else 0, | |
| axis=1 | |
| ) | |
| st.session_state['export_merge'] = st.session_state['working_frame'].copy() | |
| elif opt_exp_submitted: | |
| st.session_state['settings_base'] = False | |
| # Store original for comparison (player columns only) | |
| original_frame = st.session_state['export_base'][st.session_state['player_columns']].copy() | |
| # Run optimization on export_base | |
| optimized_frame = optimize_lineup( | |
| working_frame=st.session_state['export_base'], | |
| projections_df=st.session_state['portfolio_inc_proj'], | |
| player_columns=st.session_state['player_columns'], | |
| map_dict=st.session_state['map_dict'], | |
| lock_teams=lock_teams_optimize, | |
| site_var=site_var, | |
| type_var=type_var, | |
| sport_var=sport_var, | |
| salary_max=salary_max, | |
| optimize_by=optimize_by | |
| ) | |
| # Store changes mask in session state for highlighting (no columns added) | |
| st.session_state['optimization_changes_mask'] = ( | |
| original_frame != optimized_frame[st.session_state['player_columns']] | |
| ) | |
| # Recalculate lineup metrics for export | |
| optimized_frame = calculate_lineup_metrics( | |
| optimized_frame, | |
| st.session_state['player_columns'], | |
| st.session_state['map_dict'], | |
| type_var, | |
| sport_var, | |
| st.session_state['portfolio_inc_proj'] | |
| ) | |
| st.session_state['export_base'] = optimized_frame.reset_index(drop=True) | |
| # Load Default base from compressed storage for reassess_edge | |
| default_base = load_base_frame('Default') | |
| st.session_state['export_base'] = reassess_edge( | |
| st.session_state['export_base'], | |
| default_base, | |
| st.session_state['map_dict'], | |
| site_var, | |
| type_var, | |
| Contest_Size, | |
| strength_var, | |
| sport_var, | |
| salary_max | |
| ) | |
| # Update Stack/Size columns if applicable | |
| team_dict = dict(zip(st.session_state['portfolio_inc_proj']['player_names'], st.session_state['portfolio_inc_proj']['team'])) | |
| if 'Stack' in st.session_state['export_base'].columns: | |
| st.session_state['export_base']['Stack'] = st.session_state['export_base'].apply( | |
| lambda row: Counter( | |
| team_dict.get(player, '') for player in row[stack_column_dict[site_var][type_var][sport_var]] | |
| if team_dict.get(player, '') != '' | |
| ).most_common(1)[0][0] if any(team_dict.get(player, '') for player in row[stack_column_dict[site_var][type_var][sport_var]]) else '', | |
| axis=1 | |
| ) | |
| st.session_state['export_base']['Size'] = st.session_state['export_base'].apply( | |
| lambda row: Counter( | |
| team_dict.get(player, '') for player in row[stack_column_dict[site_var][type_var][sport_var]] | |
| if team_dict.get(player, '') != '' | |
| ).most_common(1)[0][1] if any(team_dict.get(player, '') for player in row[stack_column_dict[site_var][type_var][sport_var]]) else 0, | |
| axis=1 | |
| ) | |
| st.session_state['export_merge'] = st.session_state['export_base'].copy() | |
| # Clear highlighting button (outside the form) | |
| if st.button("Clear Optimization Highlighting", key='clear_opt_highlight'): | |
| if 'optimization_changes_mask' in st.session_state: | |
| del st.session_state['optimization_changes_mask'] | |
| with st.container(): | |
| if 'export_base' not in st.session_state: | |
| st.session_state['export_base'] = pd.DataFrame(columns=st.session_state['working_frame'].columns) | |
| display_frame_source = st.selectbox("Display:", options=['Portfolio', 'Export Base'], key='display_frame_source') | |
| if display_frame_source == 'Portfolio': | |
| st.session_state['display_frame'] = st.session_state['working_frame'] | |
| st.session_state['export_file'] = st.session_state['display_frame'].copy() | |
| for col in st.session_state['export_file'].columns: | |
| if col not in excluded_cols: | |
| st.session_state['export_file'][col] = st.session_state['export_file'][col].map(st.session_state['export_dict']) | |
| elif display_frame_source == 'Export Base': | |
| st.session_state['display_frame'] = st.session_state['export_base'] | |
| st.session_state['export_file'] = st.session_state['display_frame'].copy() | |
| for col in st.session_state['export_file'].columns: | |
| if col not in excluded_cols: | |
| # Create position-specific export dictionary on the fly | |
| position_dict = create_position_export_dict(col, st.session_state['csv_file'], site_var, type_var, sport_var) | |
| st.session_state['export_file'][col] = st.session_state['export_file'][col].map(position_dict) | |
| if 'export_file' in st.session_state: | |
| download_port, merge_port, clear_export, add_rows_col, remove_rows_col, blank_export_col = st.columns([1, 1, 1, 2, 2, 6]) | |
| with download_port: | |
| st.download_button(label="Download Portfolio", data=st.session_state['export_file'].to_csv(index=False), file_name="portfolio.csv", mime="text/csv") | |
| with merge_port: | |
| if st.button("Add all to Custom Export"): | |
| st.session_state['export_base'] = pd.concat([st.session_state['export_base'], st.session_state['export_merge']]) | |
| st.session_state['export_base'] = st.session_state['export_base'].drop_duplicates() | |
| st.session_state['export_base'] = st.session_state['export_base'].reset_index(drop=True) | |
| with clear_export: | |
| if st.button("Clear Custom Export"): | |
| st.session_state['export_base'] = pd.DataFrame(columns=st.session_state['working_frame'].columns) | |
| if display_frame_source == 'Portfolio': | |
| st.session_state['display_frame'] = st.session_state['working_frame'] | |
| elif display_frame_source == 'Export Base': | |
| st.session_state['display_frame'] = st.session_state['export_base'] | |
| with add_rows_col: | |
| select_custom_index = st.multiselect("Select rows to add (based on first column):", options=st.session_state['display_frame'].index, default=[]) | |
| if st.button("Add selected to Custom Export"): | |
| st.session_state['export_base'] = pd.concat([st.session_state['export_base'], st.session_state['display_frame'].loc[select_custom_index]]) | |
| st.session_state['export_base'] = st.session_state['export_base'].drop_duplicates() | |
| st.session_state['export_base'] = st.session_state['export_base'].reset_index(drop=True) | |
| with remove_rows_col: | |
| remove_custom_index = st.multiselect("Remove rows (based on first column):", options=st.session_state['display_frame'].index, default=[]) | |
| if st.button("Remove selected from Display"): | |
| st.session_state['display_frame'] = st.session_state['display_frame'].drop(remove_custom_index) | |
| st.session_state['display_frame'] = st.session_state['display_frame'].drop_duplicates() | |
| st.session_state['display_frame'] = st.session_state['display_frame'].reset_index(drop=True) | |
| total_rows = len(st.session_state['display_frame']) | |
| rows_per_page = 500 | |
| total_pages = (total_rows + rows_per_page - 1) // rows_per_page # Ceiling division | |
| # Initialize page number in session state if not exists | |
| if 'current_page' not in st.session_state: | |
| st.session_state.current_page = 1 | |
| # Display current page range info and pagination control in a single line | |
| st.write( | |
| f"Showing rows {(st.session_state.current_page - 1) * rows_per_page + 1} " | |
| f"to {min(st.session_state.current_page * rows_per_page, total_rows)} of {total_rows}" | |
| ) | |
| # Add page number input | |
| st.session_state.current_page = st.number_input( | |
| f"Page (1-{total_pages})", | |
| min_value=1, | |
| max_value=total_pages, | |
| value=st.session_state.current_page | |
| ) | |
| # Calculate start and end indices for current page | |
| start_idx = (st.session_state.current_page - 1) * rows_per_page | |
| end_idx = min(start_idx + rows_per_page, total_rows) | |
| # Get the subset of data for the current page | |
| current_page_data = st.session_state['display_frame'].iloc[start_idx:end_idx] | |
| # Define highlight function for optimization changes | |
| def highlight_optimization_changes(df): | |
| styles = pd.DataFrame('', index=df.index, columns=df.columns) | |
| if 'optimization_changes_mask' in st.session_state: | |
| mask = st.session_state['optimization_changes_mask'] | |
| for col in mask.columns: | |
| if col in styles.columns: | |
| common_idx = mask.index.intersection(df.index) | |
| for idx in common_idx: | |
| if mask.loc[idx, col]: | |
| styles.loc[idx, col] = 'background-color: #DAA520; color: black' | |
| return styles | |
| # Display the paginated dataframe first | |
| st.dataframe( | |
| current_page_data.style | |
| .apply(highlight_optimization_changes, axis=None) | |
| .background_gradient(cmap='RdYlGn') | |
| .background_gradient(cmap='RdYlGn_r', subset=['Finish_percentile', 'Own', 'Dupes']) | |
| .format(freq_format, precision=2), | |
| column_config={ | |
| "Finish_percentile": st.column_config.NumberColumn( | |
| "Finish%", | |
| help="Projected finishing percentile", | |
| width="small", | |
| min_value=0.0, | |
| max_value=1.0 | |
| ), | |
| "Lineup Edge": st.column_config.NumberColumn( | |
| "Edge", | |
| help="Projected lineup edge", | |
| width="small", | |
| min_value=-1.0, | |
| max_value=1.0 | |
| ), | |
| "Diversity": st.column_config.NumberColumn( | |
| "Diversity", | |
| help="Projected lineup diversity", | |
| width="small", | |
| min_value=0.0, | |
| max_value=1.0 | |
| ), | |
| }, | |
| height=499, | |
| use_container_width=True | |
| ) | |
| player_stats_col, stack_stats_col, combos_col = st.tabs(['Player Stats', 'Stack Stats', 'Combos']) | |
| with player_stats_col: | |
| if st.button("Analyze Players", key='analyze_players'): | |
| player_stats = [] | |
| if st.session_state['settings_base'] and 'origin_player_exposures' in st.session_state and display_frame_source == 'Portfolio': | |
| st.session_state['player_summary'] = st.session_state['origin_player_exposures'] | |
| else: | |
| if type_var == 'Showdown': | |
| if sport_var == 'GOLF': | |
| for player in player_names: | |
| player_mask = st.session_state['display_frame'][st.session_state['player_columns']].apply( | |
| lambda row: player in list(row), axis=1 | |
| ) | |
| if player_mask.any(): | |
| player_stats.append({ | |
| 'Player': player, | |
| 'Position': st.session_state['map_dict']['pos_map'][player], | |
| 'Team': st.session_state['map_dict']['team_map'][player], | |
| 'ProjOwn': st.session_state['map_dict']['own_map'][player] / 100.0, | |
| 'Exposure': player_mask.sum() / len(st.session_state['display_frame']), | |
| 'Avg Median': st.session_state['display_frame'][player_mask]['median'].mean(), | |
| 'Avg Own': st.session_state['display_frame'][player_mask]['Own'].mean(), | |
| 'Avg Dupes': st.session_state['display_frame'][player_mask]['Dupes'].mean(), | |
| 'Avg Finish %': st.session_state['display_frame'][player_mask]['Finish_percentile'].mean(), | |
| 'Avg Lineup Edge': st.session_state['display_frame'][player_mask]['Lineup Edge'].mean(), | |
| 'Avg Diversity': st.session_state['display_frame'][player_mask]['Diversity'].mean(), | |
| }) | |
| else: | |
| for player in player_names: | |
| # Create mask for lineups where this player is Captain (first column) | |
| cpt_mask = st.session_state['display_frame'][st.session_state['player_columns'][0]] == player | |
| if cpt_mask.any(): | |
| player_stats.append({ | |
| 'Player': f"{player} (CPT)", | |
| 'Position': st.session_state['map_dict']['pos_map'][player], | |
| 'Team': st.session_state['map_dict']['team_map'][player], | |
| 'ProjOwn': st.session_state['map_dict']['own_map'][player] / 100.0, | |
| 'Exposure': cpt_mask.sum() / len(st.session_state['display_frame']), | |
| 'Avg Median': st.session_state['display_frame'][cpt_mask]['median'].mean(), | |
| 'Avg Own': st.session_state['display_frame'][cpt_mask]['Own'].mean(), | |
| 'Avg Dupes': st.session_state['display_frame'][cpt_mask]['Dupes'].mean(), | |
| 'Avg Finish %': st.session_state['display_frame'][cpt_mask]['Finish_percentile'].mean(), | |
| 'Avg Lineup Edge': st.session_state['display_frame'][cpt_mask]['Lineup Edge'].mean(), | |
| 'Avg Diversity': st.session_state['display_frame'][cpt_mask]['Diversity'].mean(), | |
| }) | |
| # Create mask for lineups where this player is FLEX (other columns) | |
| flex_mask = st.session_state['display_frame'][st.session_state['player_columns'][1:]].apply( | |
| lambda row: player in list(row), axis=1 | |
| ) | |
| if flex_mask.any(): | |
| player_stats.append({ | |
| 'Player': f"{player} (FLEX)", | |
| 'Position': st.session_state['map_dict']['pos_map'][player], | |
| 'Team': st.session_state['map_dict']['team_map'][player], | |
| 'ProjOwn': st.session_state['map_dict']['own_map'][player] / 100.0, | |
| 'Exposure': flex_mask.sum() / len(st.session_state['display_frame']), | |
| 'Avg Median': st.session_state['display_frame'][flex_mask]['median'].mean(), | |
| 'Avg Own': st.session_state['display_frame'][flex_mask]['Own'].mean(), | |
| 'Avg Dupes': st.session_state['display_frame'][flex_mask]['Dupes'].mean(), | |
| 'Avg Finish %': st.session_state['display_frame'][flex_mask]['Finish_percentile'].mean(), | |
| 'Avg Lineup Edge': st.session_state['display_frame'][flex_mask]['Lineup Edge'].mean(), | |
| 'Avg Diversity': st.session_state['display_frame'][flex_mask]['Diversity'].mean(), | |
| }) | |
| else: | |
| if sport_var == 'CS2' or sport_var == 'LOL': | |
| # Handle Captain positions | |
| for player in player_names: | |
| # Create mask for lineups where this player is Captain (first column) | |
| cpt_mask = st.session_state['display_frame'][st.session_state['player_columns'][0]] == player | |
| if cpt_mask.any(): | |
| player_stats.append({ | |
| 'Player': f"{player} (CPT)", | |
| 'Position': st.session_state['map_dict']['pos_map'][player], | |
| 'Team': st.session_state['map_dict']['team_map'][player], | |
| 'ProjOwn': st.session_state['map_dict']['own_map'][player] / 100.0, | |
| 'Exposure': cpt_mask.sum() / len(st.session_state['display_frame']), | |
| 'Avg Median': st.session_state['display_frame'][cpt_mask]['median'].mean(), | |
| 'Avg Own': st.session_state['display_frame'][cpt_mask]['Own'].mean(), | |
| 'Avg Dupes': st.session_state['display_frame'][cpt_mask]['Dupes'].mean(), | |
| 'Avg Finish %': st.session_state['display_frame'][cpt_mask]['Finish_percentile'].mean(), | |
| 'Avg Lineup Edge': st.session_state['display_frame'][cpt_mask]['Lineup Edge'].mean(), | |
| 'Avg Diversity': st.session_state['display_frame'][cpt_mask]['Diversity'].mean(), | |
| }) | |
| # Create mask for lineups where this player is FLEX (other columns) | |
| flex_mask = st.session_state['display_frame'][st.session_state['player_columns'][1:]].apply( | |
| lambda row: player in list(row), axis=1 | |
| ) | |
| if flex_mask.any(): | |
| player_stats.append({ | |
| 'Player': f"{player} (FLEX)", | |
| 'Position': st.session_state['map_dict']['pos_map'][player], | |
| 'Team': st.session_state['map_dict']['team_map'][player], | |
| 'ProjOwn': st.session_state['map_dict']['own_map'][player] / 100.0, | |
| 'Exposure': flex_mask.sum() / len(st.session_state['display_frame']), | |
| 'Avg Median': st.session_state['display_frame'][flex_mask]['median'].mean(), | |
| 'Avg Own': st.session_state['display_frame'][flex_mask]['Own'].mean(), | |
| 'Avg Dupes': st.session_state['display_frame'][flex_mask]['Dupes'].mean(), | |
| 'Avg Finish %': st.session_state['display_frame'][flex_mask]['Finish_percentile'].mean(), | |
| 'Avg Lineup Edge': st.session_state['display_frame'][flex_mask]['Lineup Edge'].mean(), | |
| 'Avg Diversity': st.session_state['display_frame'][flex_mask]['Diversity'].mean(), | |
| }) | |
| elif sport_var != 'CS2' and sport_var != 'LOL': | |
| # Original Classic format processing | |
| for player in player_names: | |
| player_mask = st.session_state['display_frame'][st.session_state['player_columns']].apply( | |
| lambda row: player in list(row), axis=1 | |
| ) | |
| if player_mask.any(): | |
| player_stats.append({ | |
| 'Player': player, | |
| 'Position': st.session_state['map_dict']['pos_map'][player], | |
| 'Team': st.session_state['map_dict']['team_map'][player], | |
| 'ProjOwn': st.session_state['map_dict']['own_map'][player] / 100.0, | |
| 'Exposure': player_mask.sum() / len(st.session_state['display_frame']), | |
| 'Avg Median': st.session_state['display_frame'][player_mask]['median'].mean(), | |
| 'Avg Own': st.session_state['display_frame'][player_mask]['Own'].mean(), | |
| 'Avg Dupes': st.session_state['display_frame'][player_mask]['Dupes'].mean(), | |
| 'Avg Finish %': st.session_state['display_frame'][player_mask]['Finish_percentile'].mean(), | |
| 'Avg Lineup Edge': st.session_state['display_frame'][player_mask]['Lineup Edge'].mean(), | |
| 'Avg Diversity': st.session_state['display_frame'][player_mask]['Diversity'].mean(), | |
| }) | |
| player_summary = pd.DataFrame(player_stats) | |
| player_summary = player_summary.sort_values('Exposure', ascending=False) | |
| st.session_state['player_summary'] = player_summary.copy() | |
| if 'origin_player_exposures' not in st.session_state: | |
| st.session_state['origin_player_exposures'] = player_summary.copy() | |
| st.subheader("Player Summary") | |
| st.dataframe( | |
| st.session_state['player_summary'].style | |
| .background_gradient(axis=0).background_gradient(cmap='RdYlGn').background_gradient(cmap='RdYlGn_r', subset=['Avg Finish %', 'Avg Own', 'Avg Dupes']) | |
| .format({ | |
| 'ProjOwn': '{:.2%}', | |
| 'Avg Median': '{:.2f}', | |
| 'Avg Own': '{:.2f}', | |
| 'Avg Dupes': '{:.2f}', | |
| 'Avg Finish %': '{:.2%}', | |
| 'Avg Lineup Edge': '{:.2%}', | |
| 'Exposure': '{:.2%}', | |
| 'Avg Diversity': '{:.2%}' | |
| }), | |
| height=400, | |
| use_container_width=True | |
| ) | |
| with stack_stats_col: | |
| if 'Stack' in st.session_state['display_frame'].columns: | |
| if st.button("Analyze Stacks", key='analyze_stacks'): | |
| stack_stats = [] | |
| stack_columns = [col for col in st.session_state['display_frame'].columns if col.startswith('Stack')] | |
| if st.session_state['settings_base'] and 'origin_stack_exposures' in st.session_state and display_frame_source == 'Portfolio': | |
| st.session_state['stack_summary'] = st.session_state['origin_stack_exposures'] | |
| else: | |
| for stack in st.session_state['stack_dict'].values(): | |
| stack_mask = st.session_state['display_frame']['Stack'] == stack | |
| if stack_mask.any(): | |
| stack_stats.append({ | |
| 'Stack': stack, | |
| 'Lineup Count': stack_mask.sum(), | |
| 'Exposure': stack_mask.sum() / len(st.session_state['display_frame']), | |
| 'Avg Median': st.session_state['display_frame'][stack_mask]['median'].mean(), | |
| 'Avg Own': st.session_state['display_frame'][stack_mask]['Own'].mean(), | |
| 'Avg Dupes': st.session_state['display_frame'][stack_mask]['Dupes'].mean(), | |
| 'Avg Finish %': st.session_state['display_frame'][stack_mask]['Finish_percentile'].mean(), | |
| 'Avg Lineup Edge': st.session_state['display_frame'][stack_mask]['Lineup Edge'].mean(), | |
| 'Avg Diversity': st.session_state['display_frame'][stack_mask]['Diversity'].mean(), | |
| }) | |
| stack_summary = pd.DataFrame(stack_stats) | |
| stack_summary = stack_summary.sort_values('Lineup Count', ascending=False).drop_duplicates() | |
| st.session_state['stack_summary'] = stack_summary.copy() | |
| if 'origin_stack_exposures' not in st.session_state: | |
| st.session_state['origin_stack_exposures'] = stack_summary.copy() | |
| st.subheader("Stack Summary") | |
| st.dataframe( | |
| st.session_state['stack_summary'].style | |
| .background_gradient(axis=0).background_gradient(cmap='RdYlGn').background_gradient(cmap='RdYlGn_r', subset=['Avg Finish %', 'Avg Own', 'Avg Dupes']) | |
| .format({ | |
| 'Avg Median': '{:.2f}', | |
| 'Avg Own': '{:.2f}', | |
| 'Avg Dupes': '{:.2f}', | |
| 'Avg Finish %': '{:.2%}', | |
| 'Avg Lineup Edge': '{:.2%}', | |
| 'Exposure': '{:.2%}', | |
| 'Avg Diversity': '{:.2%}' | |
| }), | |
| height=400, | |
| use_container_width=True | |
| ) | |
| else: | |
| stack_summary = pd.DataFrame(columns=['Stack', 'Lineup Count', 'Avg Median', 'Avg Own', 'Avg Dupes', 'Avg Finish %', 'Avg Lineup Edge']) | |
| with combos_col: | |
| st.subheader("Player Combinations") | |
| # Add controls for combo analysis | |
| with st.form("combo_analysis_form"): | |
| combo_size_col, columns_excluded_col, combo_analyze_col = st.columns(3) | |
| with combo_size_col: | |
| combo_size = st.selectbox("Combo Size", [2, 3], key='combo_size') | |
| with columns_excluded_col: | |
| try: | |
| excluded_cols_extended = st.multiselect("Exclude Columns?", st.session_state['display_frame'].drop(columns=excluded_cols).columns, key='excluded_cols_extended') | |
| except: | |
| excluded_cols_extended = st.multiselect("Exclude Columns?", st.session_state['display_frame'].columns, key='excluded_cols_extended') | |
| with combo_analyze_col: | |
| submitted = st.form_submit_button("Analyze Combos") | |
| if submitted: | |
| st.session_state['combo_analysis'] = analyze_player_combos( | |
| st.session_state['display_frame'], excluded_cols + excluded_cols_extended, combo_size | |
| ) | |
| # Display results | |
| if 'combo_analysis' in st.session_state: | |
| st.dataframe( | |
| st.session_state['combo_analysis'].style | |
| .background_gradient(axis=0) | |
| .background_gradient(cmap='RdYlGn') | |
| .background_gradient(cmap='RdYlGn_r', subset=['Avg Finish %', 'Avg Own', 'Avg Dupes']) | |
| .format({ | |
| 'Avg Median': '{:.2f}', | |
| 'Avg Own': '{:.2f}', | |
| 'Avg Dupes': '{:.2f}', | |
| 'Avg Finish %': '{:.2%}', | |
| 'Avg Lineup Edge': '{:.2%}', | |
| 'Exposure': '{:.2%}', | |
| 'Avg Diversity': '{:.2%}' | |
| }), | |
| height=400, | |
| use_container_width=True | |
| ) | |
| else: | |
| st.info("Click 'Analyze Combos' to see the most common player combinations.") | |