James McCool commited on
Commit
26d7344
·
1 Parent(s): c038eeb

Add functions to load and save base frames using compressed storage in session state. Update Manage Portfolio tab to utilize these functions for managing working frames and default base frame.

Browse files
Files changed (1) hide show
  1. app.py +26 -6
app.py CHANGED
@@ -645,6 +645,20 @@ def optimize_dataframe_dtypes(df):
645
 
646
  return df
647
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
648
  def create_memory_efficient_mappings(projections_df, site_var, type_var, sport_var):
649
  """Create mappings with optimized data types"""
650
  # Optimize projections data types first
@@ -1752,7 +1766,8 @@ if selected_tab == 'Manage Portfolio':
1752
  if st.session_state['base_frame_names']:
1753
  base_choice = st.selectbox("Base Choice", options=list(st.session_state['base_frame_names'].keys()), index=0)
1754
  if st.button("Load Selected Base"):
1755
- st.session_state['working_frame'] = st.session_state['base_frame_names'][base_choice].copy()
 
1756
  st.rerun()
1757
  else:
1758
  st.info("No saved base frames available")
@@ -1765,7 +1780,8 @@ if selected_tab == 'Manage Portfolio':
1765
  new_base_name = st.text_input("New Base Name", value='New Base')
1766
  if st.button("Save Current as Base"):
1767
  if new_base_name and new_base_name not in st.session_state['base_frame_names']:
1768
- st.session_state['base_frame_names'][new_base_name] = st.session_state['working_frame'].copy()
 
1769
  st.success(f"Base '{new_base_name}' saved successfully!")
1770
  elif new_base_name in st.session_state['base_frame_names']:
1771
  st.error("Base name already exists")
@@ -1814,8 +1830,8 @@ if selected_tab == 'Manage Portfolio':
1814
  # Create the final base frame with dupe predictions
1815
  final_base_frame = predict_dupes(processed_frame, st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var, salary_max)
1816
 
1817
- # Set up the Default base and working frame
1818
- st.session_state['base_frame_names']['Default'] = final_base_frame.copy()
1819
  st.session_state['working_frame'] = final_base_frame.copy()
1820
 
1821
  # st.session_state['highest_owned_teams'] = st.session_state['projections_df'][~st.session_state['projections_df']['position'].isin(['P', 'SP'])].groupby('team')['ownership'].sum().sort_values(ascending=False).head(3).index.tolist()
@@ -2762,7 +2778,9 @@ if selected_tab == 'Manage Portfolio':
2762
  st.session_state['working_frame'] = parsed_frame.reset_index(drop=True)
2763
 
2764
  # st.session_state['working_frame'] = predict_dupes(st.session_state['working_frame'], st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var)
2765
- st.session_state['working_frame'] = reassess_edge(st.session_state['working_frame'], st.session_state['base_frame_names']['Default'], st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var, salary_max)
 
 
2766
  team_dict = dict(zip(st.session_state['portfolio_inc_proj']['player_names'], st.session_state['portfolio_inc_proj']['team']))
2767
  if 'Stack' in st.session_state['working_frame'].columns:
2768
  st.session_state['working_frame']['Stack'] = st.session_state['working_frame'].apply(
@@ -2800,7 +2818,9 @@ if selected_tab == 'Manage Portfolio':
2800
  st.session_state['export_base'] = parsed_frame.reset_index(drop=True)
2801
 
2802
  # st.session_state['export_base'] = predict_dupes(st.session_state['export_base'], st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var)
2803
- st.session_state['export_base'] = reassess_edge(st.session_state['export_base'], st.session_state['base_frame_names']['Default'], st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var, salary_max)
 
 
2804
  team_dict = dict(zip(st.session_state['portfolio_inc_proj']['player_names'], st.session_state['portfolio_inc_proj']['team']))
2805
  if 'Stack' in st.session_state['export_base'].columns:
2806
  st.session_state['export_base']['Stack'] = st.session_state['export_base'].apply(
 
645
 
646
  return df
647
 
648
+ def load_base_frame(base_name):
649
+ """Load a base frame from compressed storage"""
650
+ if base_name in st.session_state['base_frame_names']:
651
+ base_bytes = st.session_state['base_frame_names'][base_name]
652
+ return pd.read_parquet(io.BytesIO(base_bytes))
653
+ else:
654
+ raise KeyError(f"Base frame '{base_name}' not found")
655
+
656
+ def save_base_frame(base_name, dataframe):
657
+ """Save a base frame to compressed storage"""
658
+ buffer = io.BytesIO()
659
+ dataframe.to_parquet(buffer, compression='gzip')
660
+ st.session_state['base_frame_names'][base_name] = buffer.getvalue()
661
+
662
  def create_memory_efficient_mappings(projections_df, site_var, type_var, sport_var):
663
  """Create mappings with optimized data types"""
664
  # Optimize projections data types first
 
1766
  if st.session_state['base_frame_names']:
1767
  base_choice = st.selectbox("Base Choice", options=list(st.session_state['base_frame_names'].keys()), index=0)
1768
  if st.button("Load Selected Base"):
1769
+ # Load from compressed storage
1770
+ st.session_state['working_frame'] = load_base_frame(base_choice)
1771
  st.rerun()
1772
  else:
1773
  st.info("No saved base frames available")
 
1780
  new_base_name = st.text_input("New Base Name", value='New Base')
1781
  if st.button("Save Current as Base"):
1782
  if new_base_name and new_base_name not in st.session_state['base_frame_names']:
1783
+ # Store using compressed storage
1784
+ save_base_frame(new_base_name, st.session_state['working_frame'])
1785
  st.success(f"Base '{new_base_name}' saved successfully!")
1786
  elif new_base_name in st.session_state['base_frame_names']:
1787
  st.error("Base name already exists")
 
1830
  # Create the final base frame with dupe predictions
1831
  final_base_frame = predict_dupes(processed_frame, st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var, salary_max)
1832
 
1833
+ # Set up the Default base and working frame using memory-efficient storage
1834
+ save_base_frame('Default', final_base_frame)
1835
  st.session_state['working_frame'] = final_base_frame.copy()
1836
 
1837
  # st.session_state['highest_owned_teams'] = st.session_state['projections_df'][~st.session_state['projections_df']['position'].isin(['P', 'SP'])].groupby('team')['ownership'].sum().sort_values(ascending=False).head(3).index.tolist()
 
2778
  st.session_state['working_frame'] = parsed_frame.reset_index(drop=True)
2779
 
2780
  # st.session_state['working_frame'] = predict_dupes(st.session_state['working_frame'], st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var)
2781
+ # Load Default base from compressed storage for reassess_edge
2782
+ default_base = load_base_frame('Default')
2783
+ st.session_state['working_frame'] = reassess_edge(st.session_state['working_frame'], default_base, st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var, salary_max)
2784
  team_dict = dict(zip(st.session_state['portfolio_inc_proj']['player_names'], st.session_state['portfolio_inc_proj']['team']))
2785
  if 'Stack' in st.session_state['working_frame'].columns:
2786
  st.session_state['working_frame']['Stack'] = st.session_state['working_frame'].apply(
 
2818
  st.session_state['export_base'] = parsed_frame.reset_index(drop=True)
2819
 
2820
  # st.session_state['export_base'] = predict_dupes(st.session_state['export_base'], st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var)
2821
+ # Load Default base from compressed storage for reassess_edge
2822
+ default_base = load_base_frame('Default')
2823
+ st.session_state['export_base'] = reassess_edge(st.session_state['export_base'], default_base, st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var, salary_max)
2824
  team_dict = dict(zip(st.session_state['portfolio_inc_proj']['player_names'], st.session_state['portfolio_inc_proj']['team']))
2825
  if 'Stack' in st.session_state['export_base'].columns:
2826
  st.session_state['export_base']['Stack'] = st.session_state['export_base'].apply(