James McCool
commited on
Commit
·
b6cb507
1
Parent(s):
7fd11fc
Aiming to remedy the missing players from the projections in exposure management functions
Browse files
app.py
CHANGED
|
@@ -1655,6 +1655,16 @@ if selected_tab == 'Data Load':
|
|
| 1655 |
buffer = io.BytesIO()
|
| 1656 |
st.session_state['portfolio'].to_parquet(buffer, compression='snappy')
|
| 1657 |
st.session_state['origin_portfolio'] = buffer.getvalue()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1658 |
|
| 1659 |
del st.session_state['portfolio'], st.session_state['export_portfolio']
|
| 1660 |
|
|
@@ -2165,7 +2175,7 @@ if selected_tab == 'Manage Portfolio':
|
|
| 2165 |
elif preset_choice == 'Volatility (Heavy Lineup Edge)':
|
| 2166 |
parsed_frame = volatility_preset(st.session_state['working_frame'], lineup_target, excluded_cols, sport_var)
|
| 2167 |
elif preset_choice == 'Hedge Chalk (Manage Leverage)':
|
| 2168 |
-
parsed_frame = hedging_preset(st.session_state['working_frame'], lineup_target, st.session_state['
|
| 2169 |
elif preset_choice == 'Reduce Volatility (Manage Own)':
|
| 2170 |
parsed_frame = reduce_volatility_preset(st.session_state['working_frame'], lineup_target, excluded_cols, sport_var)
|
| 2171 |
|
|
@@ -2181,7 +2191,7 @@ if selected_tab == 'Manage Portfolio':
|
|
| 2181 |
elif preset_choice == 'Volatility (Heavy Lineup Edge)':
|
| 2182 |
parsed_frame = volatility_preset(st.session_state['export_base'], lineup_target, excluded_cols, sport_var)
|
| 2183 |
elif preset_choice == 'Hedge Chalk (Manage Leverage)':
|
| 2184 |
-
parsed_frame = hedging_preset(st.session_state['export_base'], lineup_target, st.session_state['
|
| 2185 |
elif preset_choice == 'Reduce Volatility (Manage Own)':
|
| 2186 |
parsed_frame = reduce_volatility_preset(st.session_state['export_base'], lineup_target, excluded_cols, sport_var)
|
| 2187 |
|
|
@@ -2453,7 +2463,7 @@ if selected_tab == 'Manage Portfolio':
|
|
| 2453 |
st.session_state['settings_base'] = False
|
| 2454 |
# Prepare DataFrame for exposure_spread to avoid categorical issues
|
| 2455 |
working_frame_prepared = prepare_dataframe_for_exposure_spread(st.session_state['working_frame'], st.session_state['player_columns'])
|
| 2456 |
-
parsed_frame = exposure_spread(working_frame_prepared, st.session_state['exposure_player'], exposure_target, comp_salary_below, comp_salary_above, ignore_stacks, remove_teams_exposure, specific_replacements, specific_columns, st.session_state['
|
| 2457 |
|
| 2458 |
# Use consolidated calculation function
|
| 2459 |
parsed_frame = calculate_lineup_metrics(
|
|
@@ -2462,14 +2472,14 @@ if selected_tab == 'Manage Portfolio':
|
|
| 2462 |
st.session_state['map_dict'],
|
| 2463 |
type_var,
|
| 2464 |
sport_var,
|
| 2465 |
-
st.session_state['
|
| 2466 |
)
|
| 2467 |
|
| 2468 |
st.session_state['working_frame'] = parsed_frame.reset_index(drop=True)
|
| 2469 |
|
| 2470 |
# st.session_state['working_frame'] = predict_dupes(st.session_state['working_frame'], st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var)
|
| 2471 |
st.session_state['working_frame'] = reassess_edge(st.session_state['working_frame'], st.session_state['base_frame'], st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var, salary_max)
|
| 2472 |
-
team_dict = dict(zip(st.session_state['
|
| 2473 |
if 'Stack' in st.session_state['working_frame'].columns:
|
| 2474 |
st.session_state['working_frame']['Stack'] = st.session_state['working_frame'].apply(
|
| 2475 |
lambda row: Counter(
|
|
@@ -2490,7 +2500,7 @@ if selected_tab == 'Manage Portfolio':
|
|
| 2490 |
st.session_state['settings_base'] = False
|
| 2491 |
# Prepare DataFrame for exposure_spread to avoid categorical issues
|
| 2492 |
export_base_prepared = prepare_dataframe_for_exposure_spread(st.session_state['export_base'], st.session_state['player_columns'])
|
| 2493 |
-
parsed_frame = exposure_spread(export_base_prepared, st.session_state['exposure_player'], exposure_target, comp_salary_below, comp_salary_above, ignore_stacks, remove_teams_exposure, specific_replacements, specific_columns, st.session_state['
|
| 2494 |
|
| 2495 |
# Use consolidated calculation function for export
|
| 2496 |
parsed_frame = calculate_lineup_metrics(
|
|
@@ -2499,7 +2509,7 @@ if selected_tab == 'Manage Portfolio':
|
|
| 2499 |
st.session_state['map_dict'],
|
| 2500 |
type_var,
|
| 2501 |
sport_var,
|
| 2502 |
-
st.session_state['
|
| 2503 |
)
|
| 2504 |
|
| 2505 |
|
|
@@ -2507,7 +2517,7 @@ if selected_tab == 'Manage Portfolio':
|
|
| 2507 |
|
| 2508 |
# st.session_state['export_base'] = predict_dupes(st.session_state['export_base'], st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var)
|
| 2509 |
st.session_state['export_base'] = reassess_edge(st.session_state['export_base'], st.session_state['base_frame'], st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var, salary_max)
|
| 2510 |
-
team_dict = dict(zip(st.session_state['
|
| 2511 |
if 'Stack' in st.session_state['export_base'].columns:
|
| 2512 |
st.session_state['export_base']['Stack'] = st.session_state['export_base'].apply(
|
| 2513 |
lambda row: Counter(
|
|
|
|
| 1655 |
buffer = io.BytesIO()
|
| 1656 |
st.session_state['portfolio'].to_parquet(buffer, compression='snappy')
|
| 1657 |
st.session_state['origin_portfolio'] = buffer.getvalue()
|
| 1658 |
+
|
| 1659 |
+
portfolio_inc_proj = pd.DataFrame()
|
| 1660 |
+
portfolio_inc_proj['player_names'] = get_portfolio_names(st.session_state['portfolio'])
|
| 1661 |
+
portfolio_inc_proj['position'] = portfolio_inc_proj['player_names'].map(lambda x: st.session_state['map_dict']['pos_map'].get(x, 'FLEX'))
|
| 1662 |
+
portfolio_inc_proj['team'] = portfolio_inc_proj['player_names'].map(lambda x: st.session_state['map_dict']['team_map'].get(x, 'Unknown'))
|
| 1663 |
+
portfolio_inc_proj['salary'] = portfolio_inc_proj['player_names'].map(lambda x: st.session_state['map_dict']['salary_map'].get(x, 0))
|
| 1664 |
+
portfolio_inc_proj['median'] = portfolio_inc_proj['player_names'].map(lambda x: st.session_state['map_dict']['proj_map'].get(x, 0.0))
|
| 1665 |
+
portfolio_inc_proj['ownership'] = portfolio_inc_proj['player_names'].map(lambda x: st.session_state['map_dict']['own_map'].get(x, 0.0))
|
| 1666 |
+
portfolio_inc_proj['captain ownership'] = portfolio_inc_proj['player_names'].map(lambda x: st.session_state['map_dict']['own_map'].get(x, 0.0))
|
| 1667 |
+
st.session_state['portfolio_inc_proj'] = portfolio_inc_proj.reset_index(drop=True)
|
| 1668 |
|
| 1669 |
del st.session_state['portfolio'], st.session_state['export_portfolio']
|
| 1670 |
|
|
|
|
| 2175 |
elif preset_choice == 'Volatility (Heavy Lineup Edge)':
|
| 2176 |
parsed_frame = volatility_preset(st.session_state['working_frame'], lineup_target, excluded_cols, sport_var)
|
| 2177 |
elif preset_choice == 'Hedge Chalk (Manage Leverage)':
|
| 2178 |
+
parsed_frame = hedging_preset(st.session_state['working_frame'], lineup_target, st.session_state['portfolio_inc_proj'], sport_var)
|
| 2179 |
elif preset_choice == 'Reduce Volatility (Manage Own)':
|
| 2180 |
parsed_frame = reduce_volatility_preset(st.session_state['working_frame'], lineup_target, excluded_cols, sport_var)
|
| 2181 |
|
|
|
|
| 2191 |
elif preset_choice == 'Volatility (Heavy Lineup Edge)':
|
| 2192 |
parsed_frame = volatility_preset(st.session_state['export_base'], lineup_target, excluded_cols, sport_var)
|
| 2193 |
elif preset_choice == 'Hedge Chalk (Manage Leverage)':
|
| 2194 |
+
parsed_frame = hedging_preset(st.session_state['export_base'], lineup_target, st.session_state['portfolio_inc_proj'], sport_var)
|
| 2195 |
elif preset_choice == 'Reduce Volatility (Manage Own)':
|
| 2196 |
parsed_frame = reduce_volatility_preset(st.session_state['export_base'], lineup_target, excluded_cols, sport_var)
|
| 2197 |
|
|
|
|
| 2463 |
st.session_state['settings_base'] = False
|
| 2464 |
# Prepare DataFrame for exposure_spread to avoid categorical issues
|
| 2465 |
working_frame_prepared = prepare_dataframe_for_exposure_spread(st.session_state['working_frame'], st.session_state['player_columns'])
|
| 2466 |
+
parsed_frame = exposure_spread(working_frame_prepared, st.session_state['exposure_player'], exposure_target, comp_salary_below, comp_salary_above, ignore_stacks, remove_teams_exposure, specific_replacements, specific_columns, st.session_state['portfolio_inc_proj'], sport_var, type_var, salary_max, stacking_sports)
|
| 2467 |
|
| 2468 |
# Use consolidated calculation function
|
| 2469 |
parsed_frame = calculate_lineup_metrics(
|
|
|
|
| 2472 |
st.session_state['map_dict'],
|
| 2473 |
type_var,
|
| 2474 |
sport_var,
|
| 2475 |
+
st.session_state['portfolio_inc_proj']
|
| 2476 |
)
|
| 2477 |
|
| 2478 |
st.session_state['working_frame'] = parsed_frame.reset_index(drop=True)
|
| 2479 |
|
| 2480 |
# st.session_state['working_frame'] = predict_dupes(st.session_state['working_frame'], st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var)
|
| 2481 |
st.session_state['working_frame'] = reassess_edge(st.session_state['working_frame'], st.session_state['base_frame'], st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var, salary_max)
|
| 2482 |
+
team_dict = dict(zip(st.session_state['portfolio_inc_proj']['player_names'], st.session_state['portfolio_inc_proj']['team']))
|
| 2483 |
if 'Stack' in st.session_state['working_frame'].columns:
|
| 2484 |
st.session_state['working_frame']['Stack'] = st.session_state['working_frame'].apply(
|
| 2485 |
lambda row: Counter(
|
|
|
|
| 2500 |
st.session_state['settings_base'] = False
|
| 2501 |
# Prepare DataFrame for exposure_spread to avoid categorical issues
|
| 2502 |
export_base_prepared = prepare_dataframe_for_exposure_spread(st.session_state['export_base'], st.session_state['player_columns'])
|
| 2503 |
+
parsed_frame = exposure_spread(export_base_prepared, st.session_state['exposure_player'], exposure_target, comp_salary_below, comp_salary_above, ignore_stacks, remove_teams_exposure, specific_replacements, specific_columns, st.session_state['portfolio_inc_proj'], sport_var, type_var, salary_max, stacking_sports)
|
| 2504 |
|
| 2505 |
# Use consolidated calculation function for export
|
| 2506 |
parsed_frame = calculate_lineup_metrics(
|
|
|
|
| 2509 |
st.session_state['map_dict'],
|
| 2510 |
type_var,
|
| 2511 |
sport_var,
|
| 2512 |
+
st.session_state['portfolio_inc_proj']
|
| 2513 |
)
|
| 2514 |
|
| 2515 |
|
|
|
|
| 2517 |
|
| 2518 |
# st.session_state['export_base'] = predict_dupes(st.session_state['export_base'], st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var)
|
| 2519 |
st.session_state['export_base'] = reassess_edge(st.session_state['export_base'], st.session_state['base_frame'], st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var, salary_max)
|
| 2520 |
+
team_dict = dict(zip(st.session_state['portfolio_inc_proj']['player_names'], st.session_state['portfolio_inc_proj']['team']))
|
| 2521 |
if 'Stack' in st.session_state['export_base'].columns:
|
| 2522 |
st.session_state['export_base']['Stack'] = st.session_state['export_base'].apply(
|
| 2523 |
lambda row: Counter(
|