James McCool commited on
Commit
ea74249
·
1 Parent(s): 24de772

init commit

Browse files
.streamlit/secrets.toml ADDED
@@ -0,0 +1 @@
 
 
1
+ mongo_uri = "mongodb+srv://multichem:Xr1q5wZdXPbxdUmJ@testcluster.lgwtp5i.mongodb.net/?retryWrites=true&w=majority&appName=TestCluster"
Dockerfile CHANGED
@@ -5,11 +5,22 @@ WORKDIR /app
5
  RUN apt-get update && apt-get install -y \
6
  build-essential \
7
  curl \
 
8
  git \
9
  && rm -rf /var/lib/apt/lists/*
10
 
11
  COPY requirements.txt ./
12
  COPY src/ ./src/
 
 
 
 
 
 
 
 
 
 
13
 
14
  RUN pip3 install -r requirements.txt
15
 
 
5
  RUN apt-get update && apt-get install -y \
6
  build-essential \
7
  curl \
8
+ software-properties-common \
9
  git \
10
  && rm -rf /var/lib/apt/lists/*
11
 
12
  COPY requirements.txt ./
13
  COPY src/ ./src/
14
+ COPY .streamlit/ ./.streamlit/
15
+
16
+ ENV MONGO_URI="mongodb+srv://multichem:Xr1q5wZdXPbxdUmJ@testcluster.lgwtp5i.mongodb.net/?retryWrites=true&w=majority&appName=TestCluster"
17
+ RUN useradd -m -u 1000 user
18
+ USER user
19
+ ENV HOME=/home/user\
20
+ PATH=/home/user/.local/bin:$PATH
21
+ WORKDIR $HOME/app
22
+ RUN pip install --no-cache-dir --upgrade pip
23
+ COPY --chown=user . $HOME/app
24
 
25
  RUN pip3 install -r requirements.txt
26
 
requirements.txt CHANGED
@@ -1,3 +1,8 @@
1
- altair
2
- pandas
3
- streamlit
 
 
 
 
 
 
1
+ streamlit
2
+ openpyxl
3
+ matplotlib
4
+ pulp
5
+ docker
6
+ plotly
7
+ scipy
8
+ pymongo
src/database.py ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import pymongo
3
+ import os
4
+
5
+ @st.cache_resource
6
+ def init_conn():
7
+ uri = os.getenv('MONGO_URI')
8
+ if not uri:
9
+ uri = st.secrets['mongo_uri']
10
+ client = pymongo.MongoClient(uri, retryWrites=True, serverSelectionTimeoutMS=500000)
11
+ db = client["NHL_Database"]
12
+ return db
13
+
14
+ # Initialize the database connection
15
+ db = init_conn()
src/streamlit_app.py CHANGED
@@ -1,40 +1,774 @@
1
- import altair as alt
 
2
  import numpy as np
3
  import pandas as pd
4
- import streamlit as st
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
 
6
- """
7
- # Welcome to Streamlit!
8
-
9
- Edit `/streamlit_app.py` to customize this app to your heart's desire :heart:.
10
- If you have any questions, checkout our [documentation](https://docs.streamlit.io) and [community
11
- forums](https://discuss.streamlit.io).
12
-
13
- In the meantime, below is an example of what you can do with just a few lines of code:
14
- """
15
-
16
- num_points = st.slider("Number of points in spiral", 1, 10000, 1100)
17
- num_turns = st.slider("Number of turns in spiral", 1, 300, 31)
18
-
19
- indices = np.linspace(0, 1, num_points)
20
- theta = 2 * np.pi * num_turns * indices
21
- radius = indices
22
-
23
- x = radius * np.cos(theta)
24
- y = radius * np.sin(theta)
25
-
26
- df = pd.DataFrame({
27
- "x": x,
28
- "y": y,
29
- "idx": indices,
30
- "rand": np.random.randn(num_points),
31
- })
32
-
33
- st.altair_chart(alt.Chart(df, height=700, width=700)
34
- .mark_point(filled=True)
35
- .encode(
36
- x=alt.X("x", axis=None),
37
- y=alt.Y("y", axis=None),
38
- color=alt.Color("idx", legend=None, scale=alt.Scale()),
39
- size=alt.Size("rand", legend=None, scale=alt.Scale(range=[1, 150])),
40
- ))
 
1
+ import streamlit as st
2
+ st.set_page_config(layout="wide")
3
  import numpy as np
4
  import pandas as pd
5
+ import pymongo
6
+
7
+ from database import db
8
+
9
+ percentages_format = {'Exposure': '{:.2%}'}
10
+ freq_format = {'Exposure': '{:.2%}', 'Proj Own': '{:.2%}', 'Edge': '{:.2%}'}
11
+ dk_columns = ['C1', 'C2', 'W1', 'W2', 'W3', 'D1', 'D2', 'G', 'FLEX', 'salary', 'proj', 'Team', 'Team_count', 'Secondary', 'Secondary_count', 'Own']
12
+ fd_columns = ['C1', 'C2', 'W1', 'W2', 'D1', 'D2', 'FLEX1', 'FLEX2', 'G', 'salary', 'proj', 'Team', 'Team_count', 'Secondary', 'Secondary_count', 'Own']
13
+
14
+ st.markdown("""
15
+ <style>
16
+ /* Tab styling */
17
+ .stTabs [data-baseweb="tab-list"] {
18
+ gap: 8px;
19
+ padding: 4px;
20
+ }
21
+
22
+ .stTabs [data-baseweb="tab"] {
23
+ height: 50px;
24
+ white-space: pre-wrap;
25
+ background-color: #FFD700;
26
+ color: white;
27
+ border-radius: 10px;
28
+ gap: 1px;
29
+ padding: 10px 20px;
30
+ font-weight: bold;
31
+ transition: all 0.3s ease;
32
+ }
33
+
34
+ .stTabs [aria-selected="true"] {
35
+ background-color: #DAA520;
36
+ color: white;
37
+ }
38
+
39
+ .stTabs [data-baseweb="tab"]:hover {
40
+ background-color: #DAA520;
41
+ cursor: pointer;
42
+ }
43
+ </style>""", unsafe_allow_html=True)
44
+
45
+ @st.cache_data(ttl = 600)
46
+ def init_DK_seed_frames(sharp_split, slate_var):
47
+
48
+ if slate_var == 'Main Slate':
49
+ collection = db['DK_NHL_name_map']
50
+ cursor = collection.find()
51
+ raw_data = pd.DataFrame(list(cursor))
52
+ names_dict = dict(zip(raw_data['key'], raw_data['value']))
53
+ collection = db[f"DK_NHL_seed_frame_Main Slate"]
54
+ elif slate_var == 'Secondary Slate':
55
+ collection = db['DK_NHL_Secondary_name_map']
56
+ cursor = collection.find()
57
+ raw_data = pd.DataFrame(list(cursor))
58
+ names_dict = dict(zip(raw_data['key'], raw_data['value']))
59
+ collection = db[f"DK_NHL_seed_frame_Secondary Slate"]
60
+ elif slate_var == 'Auxiliary Slate':
61
+ collection = db['DK_NHL_Late_name_map']
62
+ cursor = collection.find()
63
+ raw_data = pd.DataFrame(list(cursor))
64
+ names_dict = dict(zip(raw_data['key'], raw_data['value']))
65
+ collection = db[f"DK_NHL_seed_frame_Late Slate"]
66
+
67
+ cursor = collection.find().limit(sharp_split)
68
+
69
+ raw_display = pd.DataFrame(list(cursor))
70
+ raw_display = raw_display[['C1', 'C2', 'W1', 'W2', 'W3', 'D1', 'D2', 'G', 'FLEX', 'salary', 'proj', 'Team', 'Team_count', 'Secondary', 'Secondary_count', 'Own']]
71
+ dict_columns = ['C1', 'C2', 'W1', 'W2', 'W3', 'D1', 'D2', 'G', 'FLEX']
72
+ for col in dict_columns:
73
+ raw_display[col] = raw_display[col].map(names_dict)
74
+ DK_seed = raw_display.to_numpy()
75
+
76
+ return DK_seed
77
+
78
+ @st.cache_data(ttl = 599)
79
+ def init_FD_seed_frames(sharp_split, slate_var):
80
+
81
+ if slate_var == 'Main Slate':
82
+ collection = db['FD_NHL_name_map']
83
+ cursor = collection.find()
84
+ raw_data = pd.DataFrame(list(cursor))
85
+ names_dict = dict(zip(raw_data['key'], raw_data['value']))
86
+ collection = db[f"FD_NHL_seed_frame_Main Slate"]
87
+ cursor = collection.find().limit(sharp_split)
88
+ elif slate_var == 'Secondary Slate':
89
+ collection = db['FD_NHL_Secondary_name_map']
90
+ cursor = collection.find()
91
+ raw_data = pd.DataFrame(list(cursor))
92
+ names_dict = dict(zip(raw_data['key'], raw_data['value']))
93
+ collection = db[f"FD_NHL_seed_frame_Secondary Slate"]
94
+ cursor = collection.find().limit(sharp_split)
95
+ elif slate_var == 'Auxiliary Slate':
96
+ collection = db['FD_NHL_Late_name_map']
97
+ cursor = collection.find()
98
+ raw_data = pd.DataFrame(list(cursor))
99
+ names_dict = dict(zip(raw_data['key'], raw_data['value']))
100
+ collection = db[f"FD_NHL_seed_frame_Late Slate"]
101
+ cursor = collection.find().limit(sharp_split)
102
+
103
+ cursor = collection.find().limit(sharp_split)
104
+
105
+ raw_display = pd.DataFrame(list(cursor))
106
+ raw_display = raw_display[['C1', 'C2', 'W1', 'W2', 'D1', 'D2', 'FLEX1', 'FLEX2', 'G', 'salary', 'proj', 'Team', 'Team_count', 'Secondary', 'Secondary_count', 'Own']]
107
+ dict_columns = ['C1', 'C2', 'W1', 'W2', 'D1', 'D2', 'FLEX1', 'FLEX2', 'G']
108
+ for col in dict_columns:
109
+ raw_display[col] = raw_display[col].map(names_dict)
110
+ FD_seed = raw_display.to_numpy()
111
+
112
+ return FD_seed
113
+
114
+ @st.cache_data(ttl = 599)
115
+ def init_baselines():
116
+ collection = db["Player_Level_ROO"]
117
+ cursor = collection.find()
118
+
119
+ raw_display = pd.DataFrame(list(cursor))
120
+ load_display = raw_display[['Player', 'Position', 'Team', 'Opp', 'Salary', 'Floor', 'Median', 'Ceiling', 'Top_finish', 'Top_5_finish', 'Top_10_finish', '20+%', '2x%', '3x%', '4x%', 'Own',
121
+ 'Small Field Own%', 'Large Field Own%', 'Cash Own%', 'CPT_Own', 'Site', 'Type', 'Slate', 'player_id', 'timestamp']]
122
+ load_display['STDev'] = load_display['Median'] / 3
123
+ DK_load_display = load_display[load_display['Site'] == 'Draftkings']
124
+ DK_load_display = DK_load_display.drop_duplicates(subset=['Player'], keep='first')
125
+
126
+ dk_raw = DK_load_display.dropna(subset=['Median'])
127
+ dk_raw['Team'] = dk_raw['Team'].replace(['TB', 'SJ', 'LA'], ['TBL', 'SJS', 'LAK'])
128
+
129
+ FD_load_display = load_display[load_display['Site'] == 'Fanduel']
130
+ FD_load_display = FD_load_display.drop_duplicates(subset=['Player'], keep='first')
131
+
132
+ fd_raw = FD_load_display.dropna(subset=['Median'])
133
+ fd_raw['Team'] = fd_raw['Team'].replace(['TB', 'SJ', 'LA'], ['TBL', 'SJS', 'LAK'])
134
+
135
+ teams_playing_count = len(dk_raw.Team.unique())
136
+
137
+ return dk_raw, fd_raw, teams_playing_count
138
+
139
+ @st.cache_data
140
+ def convert_df(array):
141
+ array = pd.DataFrame(array, columns=column_names)
142
+ return array.to_csv().encode('utf-8')
143
+
144
+ @st.cache_data
145
+ def calculate_DK_value_frequencies(np_array):
146
+ unique, counts = np.unique(np_array[:, :9], return_counts=True)
147
+ frequencies = counts / len(np_array) # Normalize by the number of rows
148
+ combined_array = np.column_stack((unique, frequencies))
149
+ return combined_array
150
+
151
+ @st.cache_data
152
+ def calculate_FD_value_frequencies(np_array):
153
+ unique, counts = np.unique(np_array[:, :9], return_counts=True)
154
+ frequencies = counts / len(np_array) # Normalize by the number of rows
155
+ combined_array = np.column_stack((unique, frequencies))
156
+ return combined_array
157
+
158
+ @st.cache_data
159
+ def sim_contest(Sim_size, seed_frame, maps_dict, Contest_Size, teams_playing_count):
160
+ SimVar = 1
161
+ Sim_Winners = []
162
+ fp_array = seed_frame.copy()
163
+ # Pre-vectorize functions
164
+ vec_projection_map = np.vectorize(maps_dict['Projection_map'].__getitem__)
165
+ vec_stdev_map = np.vectorize(maps_dict['STDev_map'].__getitem__)
166
+
167
+ st.write('Simulating contest on frames')
168
+
169
+ while SimVar <= Sim_size:
170
+ fp_random = fp_array[np.random.choice(fp_array.shape[0], Contest_Size)]
171
+
172
+ # Calculate stack multipliers first
173
+ stack_multiplier = np.ones(fp_random.shape[0]) # Start with no bonus
174
+ stack_multiplier += np.minimum(0.10, np.where(fp_random[:, 12] == 4, 0.025 * (teams_playing_count - 8), 0))
175
+ stack_multiplier += np.minimum(0.15, np.where(fp_random[:, 12] >= 5, 0.025 * (teams_playing_count - 12), 0))
176
+
177
+ # Apply multipliers to both loc and scale in the normal distribution
178
+ base_projections = np.sum(np.random.normal(
179
+ loc=vec_projection_map(fp_random[:, :-7]) * stack_multiplier[:, np.newaxis],
180
+ scale=vec_stdev_map(fp_random[:, :-7]) * stack_multiplier[:, np.newaxis]),
181
+ axis=1)
182
+
183
+ final_projections = base_projections
184
+
185
+ sample_arrays = np.c_[fp_random, final_projections]
186
+
187
+ final_array = sample_arrays[sample_arrays[:, 10].argsort()[::-1]]
188
+ best_lineup = final_array[final_array[:, -1].argsort(kind='stable')[::-1][:1]]
189
+ Sim_Winners.append(best_lineup)
190
+ SimVar += 1
191
+
192
+ return Sim_Winners
193
+
194
+ dk_raw, fd_raw, teams_playing_count = init_baselines()
195
+ dk_id_dict = dict(zip(dk_raw.Player, dk_raw.player_id))
196
+ fd_id_dict = dict(zip(fd_raw.Player, fd_raw.player_id))
197
+
198
+ tab1, tab2 = st.tabs(['Contest Sims', 'Data Export'])
199
+
200
+ with tab1:
201
+ with st.expander("Info and Filters"):
202
+ if st.button("Load/Reset Data", key='reset2'):
203
+ st.cache_data.clear()
204
+ for key in st.session_state.keys():
205
+ del st.session_state[key]
206
+ DK_seed = init_DK_seed_frames(10000, 'Main Slate')
207
+ FD_seed = init_FD_seed_frames(10000, 'Main Slate')
208
+ dk_raw, fd_raw, teams_playing_count = init_baselines()
209
+ dk_id_dict = dict(zip(dk_raw.Player, dk_raw.player_id))
210
+ fd_id_dict = dict(zip(fd_raw.Player, fd_raw.player_id))
211
+
212
+ sim_slate_var1 = st.radio("Which data are you loading?", ('Main Slate', 'Secondary Slate', 'Auxiliary Slate'), key='sim_slate_var1')
213
+ sim_site_var1 = st.radio("What site are you working with?", ('Draftkings', 'Fanduel'), key='sim_site_var1')
214
+
215
+ contest_var1 = st.selectbox("What contest size are you simulating?", ('Small', 'Medium', 'Large', 'Custom'))
216
+ if contest_var1 == 'Small':
217
+ Contest_Size = 1000
218
+ elif contest_var1 == 'Medium':
219
+ Contest_Size = 5000
220
+ elif contest_var1 == 'Large':
221
+ Contest_Size = 10000
222
+ elif contest_var1 == 'Custom':
223
+ Contest_Size = st.number_input("Insert contest size", value=100, placeholder="Type a number under 10,000...")
224
+ strength_var1 = st.selectbox("How sharp is the field in the contest?", ('Very', 'Above Average', 'Average', 'Below Average', 'Not Very'))
225
+ if strength_var1 == 'Not Very':
226
+ sharp_split = 500000
227
+ elif strength_var1 == 'Below Average':
228
+ sharp_split = 250000
229
+ elif strength_var1 == 'Average':
230
+ sharp_split = 100000
231
+ elif strength_var1 == 'Above Average':
232
+ sharp_split = 50000
233
+ elif strength_var1 == 'Very':
234
+ sharp_split = 10000
235
+
236
+ if st.button("Run Contest Sim"):
237
+ if 'working_seed' in st.session_state:
238
+ st.session_state.maps_dict = {
239
+ 'Projection_map':dict(zip(raw_baselines.Player,raw_baselines.Median)),
240
+ 'Salary_map':dict(zip(raw_baselines.Player,raw_baselines.Salary)),
241
+ 'Pos_map':dict(zip(raw_baselines.Player,raw_baselines.Position)),
242
+ 'Own_map':dict(zip(raw_baselines.Player,raw_baselines['Own'])),
243
+ 'Team_map':dict(zip(raw_baselines.Player,raw_baselines.Team)),
244
+ 'STDev_map':dict(zip(raw_baselines.Player,raw_baselines.STDev))
245
+ }
246
+ Sim_Winners = sim_contest(1000, st.session_state.working_seed, st.session_state.maps_dict, Contest_Size, teams_playing_count)
247
+ Sim_Winner_Frame = pd.DataFrame(np.concatenate(Sim_Winners))
248
+
249
+ #st.table(Sim_Winner_Frame)
250
+
251
+ # Initial setup
252
+ Sim_Winner_Frame = pd.DataFrame(np.concatenate(Sim_Winners), columns=column_names + ['Fantasy'])
253
+ Sim_Winner_Frame['GPP_Proj'] = (Sim_Winner_Frame['proj'] + Sim_Winner_Frame['Fantasy']) / 2
254
+ Sim_Winner_Frame['unique_id'] = Sim_Winner_Frame['proj'].astype(str) + Sim_Winner_Frame['salary'].astype(str) + Sim_Winner_Frame['Team'].astype(str) + Sim_Winner_Frame['Secondary'].astype(str)
255
+ Sim_Winner_Frame = Sim_Winner_Frame.assign(win_count=Sim_Winner_Frame['unique_id'].map(Sim_Winner_Frame['unique_id'].value_counts()))
256
+
257
+ # Type Casting
258
+ type_cast_dict = {'salary': int, 'proj': np.float16, 'Fantasy': np.float16, 'GPP_Proj': np.float32, 'Own': np.float32}
259
+ Sim_Winner_Frame = Sim_Winner_Frame.astype(type_cast_dict)
260
+
261
+ # Sorting
262
+ st.session_state.Sim_Winner_Frame = Sim_Winner_Frame.sort_values(by=['win_count', 'GPP_Proj'], ascending= [False, False]).copy().drop_duplicates(subset='unique_id').head(100)
263
+ st.session_state.Sim_Winner_Frame.drop(columns='unique_id', inplace=True)
264
+
265
+ # Data Copying
266
+ st.session_state.Sim_Winner_Export = Sim_Winner_Frame.copy()
267
+ for col in st.session_state.Sim_Winner_Export.iloc[:, 0:9].columns:
268
+ st.session_state.Sim_Winner_Export[col] = st.session_state.Sim_Winner_Export[col].map(dk_id_dict)
269
+ st.session_state.Sim_Winner_Export = st.session_state.Sim_Winner_Export.drop_duplicates(subset=['Team', 'Secondary', 'salary', 'unique_id'])
270
+
271
+ # Data Copying
272
+ st.session_state.Sim_Winner_Display = Sim_Winner_Frame.copy()
273
+
274
+ else:
275
+ if sim_site_var1 == 'Draftkings':
276
+ st.session_state.working_seed = init_DK_seed_frames(sharp_split, sim_slate_var1)
277
+ dk_id_dict = dict(zip(dk_raw.Player, dk_raw.player_id))
278
+ raw_baselines = dk_raw
279
+ column_names = dk_columns
280
+ elif sim_site_var1 == 'Fanduel':
281
+ st.session_state.working_seed = init_FD_seed_frames(sharp_split, sim_slate_var1)
282
+ fd_id_dict = dict(zip(fd_raw.Player, fd_raw.player_id))
283
+ raw_baselines = fd_raw
284
+ column_names = fd_columns
285
+
286
+ st.session_state.maps_dict = {
287
+ 'Projection_map':dict(zip(raw_baselines.Player,raw_baselines.Median)),
288
+ 'Salary_map':dict(zip(raw_baselines.Player,raw_baselines.Salary)),
289
+ 'Pos_map':dict(zip(raw_baselines.Player,raw_baselines.Position)),
290
+ 'Own_map':dict(zip(raw_baselines.Player,raw_baselines['Own'])),
291
+ 'Team_map':dict(zip(raw_baselines.Player,raw_baselines.Team)),
292
+ 'STDev_map':dict(zip(raw_baselines.Player,raw_baselines.STDev))
293
+ }
294
+ Sim_Winners = sim_contest(1000, st.session_state.working_seed, st.session_state.maps_dict, Contest_Size, teams_playing_count)
295
+ Sim_Winner_Frame = pd.DataFrame(np.concatenate(Sim_Winners))
296
+
297
+ #st.table(Sim_Winner_Frame)
298
+
299
+ # Initial setup
300
+ Sim_Winner_Frame = pd.DataFrame(np.concatenate(Sim_Winners), columns=column_names + ['Fantasy'])
301
+ Sim_Winner_Frame['GPP_Proj'] = (Sim_Winner_Frame['proj'] + Sim_Winner_Frame['Fantasy']) / 2
302
+ Sim_Winner_Frame['unique_id'] = Sim_Winner_Frame['proj'].astype(str) + Sim_Winner_Frame['salary'].astype(str) + Sim_Winner_Frame['Team'].astype(str) + Sim_Winner_Frame['Secondary'].astype(str)
303
+ Sim_Winner_Frame = Sim_Winner_Frame.assign(win_count=Sim_Winner_Frame['unique_id'].map(Sim_Winner_Frame['unique_id'].value_counts()))
304
+
305
+ # Type Casting
306
+ type_cast_dict = {'salary': int, 'proj': np.float16, 'Fantasy': np.float16, 'GPP_Proj': np.float32, 'Own': np.float32}
307
+ Sim_Winner_Frame = Sim_Winner_Frame.astype(type_cast_dict)
308
+
309
+ # Sorting
310
+ st.session_state.Sim_Winner_Frame = Sim_Winner_Frame.sort_values(by=['win_count', 'GPP_Proj'], ascending= [False, False]).copy().drop_duplicates(subset='unique_id').head(100)
311
+ st.session_state.Sim_Winner_Frame.drop(columns='unique_id', inplace=True)
312
+
313
+ # Data Copying
314
+ st.session_state.Sim_Winner_Export = Sim_Winner_Frame.copy()
315
+ for col in st.session_state.Sim_Winner_Export.iloc[:, 0:9].columns:
316
+ st.session_state.Sim_Winner_Export[col] = st.session_state.Sim_Winner_Export[col].map(dk_id_dict)
317
+ st.session_state.Sim_Winner_Export = st.session_state.Sim_Winner_Export.drop_duplicates(subset=['Team', 'Secondary', 'salary', 'unique_id'])
318
+
319
+ # Data Copying
320
+ st.session_state.Sim_Winner_Display = Sim_Winner_Frame.copy()
321
+ st.session_state.freq_copy = st.session_state.Sim_Winner_Display
322
+
323
+ if sim_site_var1 == 'Draftkings':
324
+ freq_working = pd.DataFrame(np.column_stack(np.unique(st.session_state.freq_copy.iloc[:,0:9].values, return_counts=True)),
325
+ columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
326
+ elif sim_site_var1 == 'Fanduel':
327
+ freq_working = pd.DataFrame(np.column_stack(np.unique(st.session_state.freq_copy.iloc[:,0:9].values, return_counts=True)),
328
+ columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
329
+ freq_working['Freq'] = freq_working['Freq'].astype(int)
330
+ freq_working['Position'] = freq_working['Player'].map(st.session_state.maps_dict['Pos_map'])
331
+ freq_working['Salary'] = freq_working['Player'].map(st.session_state.maps_dict['Salary_map'])
332
+ freq_working['Proj Own'] = freq_working['Player'].map(st.session_state.maps_dict['Own_map']) / 100
333
+ freq_working['Exposure'] = freq_working['Freq']/(1000)
334
+ freq_working['Edge'] = freq_working['Exposure'] - freq_working['Proj Own']
335
+ freq_working['Team'] = freq_working['Player'].map(st.session_state.maps_dict['Team_map'])
336
+ st.session_state.player_freq = freq_working.copy()
337
+
338
+ if sim_site_var1 == 'Draftkings':
339
+ center_working = pd.DataFrame(np.column_stack(np.unique(st.session_state.freq_copy.iloc[:,0:2].values, return_counts=True)),
340
+ columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
341
+ elif sim_site_var1 == 'Fanduel':
342
+ center_working = pd.DataFrame(np.column_stack(np.unique(st.session_state.freq_copy.iloc[:,0:2].values, return_counts=True)),
343
+ columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
344
+ center_working['Freq'] = center_working['Freq'].astype(int)
345
+ center_working['Position'] = center_working['Player'].map(st.session_state.maps_dict['Pos_map'])
346
+ center_working['Salary'] = center_working['Player'].map(st.session_state.maps_dict['Salary_map'])
347
+ center_working['Proj Own'] = center_working['Player'].map(st.session_state.maps_dict['Own_map']) / 100
348
+ center_working['Exposure'] = center_working['Freq']/(1000)
349
+ center_working['Edge'] = center_working['Exposure'] - center_working['Proj Own']
350
+ center_working['Team'] = center_working['Player'].map(st.session_state.maps_dict['Team_map'])
351
+ st.session_state.center_freq = center_working.copy()
352
+
353
+ if sim_site_var1 == 'Draftkings':
354
+ wing_working = pd.DataFrame(np.column_stack(np.unique(st.session_state.freq_copy.iloc[:,2:5].values, return_counts=True)),
355
+ columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
356
+ elif sim_site_var1 == 'Fanduel':
357
+ wing_working = pd.DataFrame(np.column_stack(np.unique(st.session_state.freq_copy.iloc[:,2:4].values, return_counts=True)),
358
+ columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
359
+ wing_working['Freq'] = wing_working['Freq'].astype(int)
360
+ wing_working['Position'] = wing_working['Player'].map(st.session_state.maps_dict['Pos_map'])
361
+ wing_working['Salary'] = wing_working['Player'].map(st.session_state.maps_dict['Salary_map'])
362
+ wing_working['Proj Own'] = wing_working['Player'].map(st.session_state.maps_dict['Own_map']) / 100
363
+ wing_working['Exposure'] = wing_working['Freq']/(1000)
364
+ wing_working['Edge'] = wing_working['Exposure'] - wing_working['Proj Own']
365
+ wing_working['Team'] = wing_working['Player'].map(st.session_state.maps_dict['Team_map'])
366
+ st.session_state.wing_freq = wing_working.copy()
367
+
368
+ if sim_site_var1 == 'Draftkings':
369
+ dmen_working = pd.DataFrame(np.column_stack(np.unique(st.session_state.freq_copy.iloc[:,5:7].values, return_counts=True)),
370
+ columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
371
+ elif sim_site_var1 == 'Fanduel':
372
+ dmen_working = pd.DataFrame(np.column_stack(np.unique(st.session_state.freq_copy.iloc[:,4:6].values, return_counts=True)),
373
+ columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
374
+ dmen_working['Freq'] = dmen_working['Freq'].astype(int)
375
+ dmen_working['Position'] = dmen_working['Player'].map(st.session_state.maps_dict['Pos_map'])
376
+ dmen_working['Salary'] = dmen_working['Player'].map(st.session_state.maps_dict['Salary_map'])
377
+ dmen_working['Proj Own'] = dmen_working['Player'].map(st.session_state.maps_dict['Own_map']) / 100
378
+ dmen_working['Exposure'] = dmen_working['Freq']/(1000)
379
+ dmen_working['Edge'] = dmen_working['Exposure'] - dmen_working['Proj Own']
380
+ dmen_working['Team'] = dmen_working['Player'].map(st.session_state.maps_dict['Team_map'])
381
+ st.session_state.dmen_freq = dmen_working.copy()
382
+
383
+ if sim_site_var1 == 'Draftkings':
384
+ flex_working = pd.DataFrame(np.column_stack(np.unique(st.session_state.freq_copy.iloc[:,8:9].values, return_counts=True)),
385
+ columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
386
+ elif sim_site_var1 == 'Fanduel':
387
+ flex_working = pd.DataFrame(np.column_stack(np.unique(st.session_state.freq_copy.iloc[:,6:8].values, return_counts=True)),
388
+ columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
389
+ flex_working['Freq'] = flex_working['Freq'].astype(int)
390
+ flex_working['Position'] = flex_working['Player'].map(st.session_state.maps_dict['Pos_map'])
391
+ flex_working['Salary'] = flex_working['Player'].map(st.session_state.maps_dict['Salary_map'])
392
+ flex_working['Proj Own'] = flex_working['Player'].map(st.session_state.maps_dict['Own_map']) / 100
393
+ flex_working['Exposure'] = flex_working['Freq']/(1000)
394
+ flex_working['Edge'] = flex_working['Exposure'] - flex_working['Proj Own']
395
+ flex_working['Team'] = flex_working['Player'].map(st.session_state.maps_dict['Team_map'])
396
+ st.session_state.flex_freq = flex_working.copy()
397
+
398
+ if sim_site_var1 == 'Draftkings':
399
+ goalie_working = pd.DataFrame(np.column_stack(np.unique(st.session_state.freq_copy.iloc[:,7:8].values, return_counts=True)),
400
+ columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
401
+ elif sim_site_var1 == 'Fanduel':
402
+ goalie_working = pd.DataFrame(np.column_stack(np.unique(st.session_state.freq_copy.iloc[:,8:9].values, return_counts=True)),
403
+ columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
404
+ goalie_working['Freq'] = goalie_working['Freq'].astype(int)
405
+ goalie_working['Position'] = goalie_working['Player'].map(st.session_state.maps_dict['Pos_map'])
406
+ goalie_working['Salary'] = goalie_working['Player'].map(st.session_state.maps_dict['Salary_map'])
407
+ goalie_working['Proj Own'] = goalie_working['Player'].map(st.session_state.maps_dict['Own_map']) / 100
408
+ goalie_working['Exposure'] = goalie_working['Freq']/(1000)
409
+ goalie_working['Edge'] = goalie_working['Exposure'] - goalie_working['Proj Own']
410
+ goalie_working['Team'] = goalie_working['Player'].map(st.session_state.maps_dict['Team_map'])
411
+ st.session_state.goalie_freq = goalie_working.copy()
412
+
413
+ if sim_site_var1 == 'Draftkings':
414
+ team_working = pd.DataFrame(np.column_stack(np.unique(st.session_state.freq_copy.iloc[:,11:12].values, return_counts=True)),
415
+ columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
416
+ elif sim_site_var1 == 'Fanduel':
417
+ team_working = pd.DataFrame(np.column_stack(np.unique(st.session_state.freq_copy.iloc[:,11:12].values, return_counts=True)),
418
+ columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
419
+ team_working['Freq'] = team_working['Freq'].astype(int)
420
+ team_working['Exposure'] = team_working['Freq']/(1000)
421
+ st.session_state.team_freq = team_working.copy()
422
+
423
+ with st.container():
424
+ if st.button("Reset Sim", key='reset_sim'):
425
+ for key in st.session_state.keys():
426
+ del st.session_state[key]
427
+ if 'player_freq' in st.session_state:
428
+ player_split_var2 = st.radio("Are you wanting to isolate any lineups with specific players?", ('Full Players', 'Specific Players'), key='player_split_var2')
429
+ if player_split_var2 == 'Specific Players':
430
+ find_var2 = st.multiselect('Which players must be included in the lineups?', options = st.session_state.player_freq['Player'].unique())
431
+ elif player_split_var2 == 'Full Players':
432
+ find_var2 = st.session_state.player_freq.Player.values.tolist()
433
+
434
+ if player_split_var2 == 'Specific Players':
435
+ st.session_state.Sim_Winner_Display = st.session_state.Sim_Winner_Frame[np.equal.outer(st.session_state.Sim_Winner_Frame.to_numpy(), find_var2).any(axis=1).all(axis=1)]
436
+ if player_split_var2 == 'Full Players':
437
+ st.session_state.Sim_Winner_Display = st.session_state.Sim_Winner_Frame
438
+ if 'Sim_Winner_Display' in st.session_state:
439
+ st.dataframe(st.session_state.Sim_Winner_Display.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(precision=2), use_container_width = True)
440
+ if 'Sim_Winner_Export' in st.session_state:
441
+ st.download_button(
442
+
443
+ label="Export Full Frame",
444
+ data=st.session_state.Sim_Winner_Export.to_csv().encode('utf-8'),
445
+ file_name='MLB_consim_export.csv',
446
+ mime='text/csv',
447
+ )
448
+ tab1, tab2, tab3 = st.tabs(['Winning Frame Statistics', 'Flex Exposure Statistics', 'Stack Type Statistics'])
449
+
450
+ with tab1:
451
+ if 'Sim_Winner_Display' in st.session_state:
452
+ # Create a new dataframe with summary statistics
453
+ summary_df = pd.DataFrame({
454
+ 'Metric': ['Min', 'Average', 'Max', 'STDdev'],
455
+ 'Salary': [
456
+ st.session_state.Sim_Winner_Display['salary'].min(),
457
+ st.session_state.Sim_Winner_Display['salary'].mean(),
458
+ st.session_state.Sim_Winner_Display['salary'].max(),
459
+ st.session_state.Sim_Winner_Display['salary'].std()
460
+ ],
461
+ 'Proj': [
462
+ st.session_state.Sim_Winner_Display['proj'].min(),
463
+ st.session_state.Sim_Winner_Display['proj'].mean(),
464
+ st.session_state.Sim_Winner_Display['proj'].max(),
465
+ st.session_state.Sim_Winner_Display['proj'].std()
466
+ ],
467
+ 'Own': [
468
+ st.session_state.Sim_Winner_Display['Own'].min(),
469
+ st.session_state.Sim_Winner_Display['Own'].mean(),
470
+ st.session_state.Sim_Winner_Display['Own'].max(),
471
+ st.session_state.Sim_Winner_Display['Own'].std()
472
+ ],
473
+ 'Fantasy': [
474
+ st.session_state.Sim_Winner_Display['Fantasy'].min(),
475
+ st.session_state.Sim_Winner_Display['Fantasy'].mean(),
476
+ st.session_state.Sim_Winner_Display['Fantasy'].max(),
477
+ st.session_state.Sim_Winner_Display['Fantasy'].std()
478
+ ],
479
+ 'GPP_Proj': [
480
+ st.session_state.Sim_Winner_Display['GPP_Proj'].min(),
481
+ st.session_state.Sim_Winner_Display['GPP_Proj'].mean(),
482
+ st.session_state.Sim_Winner_Display['GPP_Proj'].max(),
483
+ st.session_state.Sim_Winner_Display['GPP_Proj'].std()
484
+ ]
485
+ })
486
+
487
+ # Set the index of the summary dataframe as the "Metric" column
488
+ summary_df = summary_df.set_index('Metric')
489
+
490
+ # Display the summary dataframe
491
+ st.subheader("Winning Frame Statistics")
492
+ st.dataframe(summary_df.style.format({
493
+ 'Salary': '{:.2f}',
494
+ 'Proj': '{:.2f}',
495
+ 'Own': '{:.2f}',
496
+ 'Fantasy': '{:.2f}',
497
+ 'GPP_Proj': '{:.2f}'
498
+ }).background_gradient(cmap='RdYlGn', axis=0, subset=['Salary', 'Proj', 'Own', 'Fantasy', 'GPP_Proj']), use_container_width=True)
499
+
500
+ with tab2:
501
+ if 'Sim_Winner_Display' in st.session_state:
502
+ # Apply position mapping to FLEX column
503
+ if sim_site_var1 == 'Draftkings':
504
+ flex_positions = st.session_state.freq_copy['FLEX'].map(st.session_state.maps_dict['Pos_map'])
505
+ elif sim_site_var1 == 'Fanduel':
506
+ flex1_positions = st.session_state.freq_copy['FLEX1'].map(st.session_state.maps_dict['Pos_map'])
507
+ flex2_positions = st.session_state.freq_copy['FLEX2'].map(st.session_state.maps_dict['Pos_map'])
508
+ flex_positions = pd.concat([flex1_positions, flex2_positions])
509
+ flex_positions = flex_positions.reset_index(drop=True)
510
+
511
+ # Count occurrences of each position in FLEX
512
+ flex_counts = flex_positions.value_counts()
513
+
514
+ # Calculate average statistics for each FLEX position
515
+ flex_stats = st.session_state.freq_copy.groupby(flex_positions).agg({
516
+ 'proj': 'mean',
517
+ 'Own': 'mean',
518
+ 'Fantasy': 'mean',
519
+ 'GPP_Proj': 'mean'
520
+ })
521
+
522
+ # Combine counts and average statistics
523
+ flex_summary = pd.concat([flex_counts, flex_stats], axis=1)
524
+ flex_summary.columns = ['Count', 'Avg Proj', 'Avg Own', 'Avg Fantasy', 'Avg GPP_Proj']
525
+ flex_summary = flex_summary.reset_index()
526
+ flex_summary.columns = ['Position', 'Count', 'Avg Proj', 'Avg Own', 'Avg Fantasy', 'Avg GPP_Proj']
527
+
528
+ # Display the summary dataframe
529
+ st.subheader("FLEX Position Statistics")
530
+ st.dataframe(flex_summary.style.format({
531
+ 'Count': '{:.0f}',
532
+ 'Avg Proj': '{:.2f}',
533
+ 'Avg Own': '{:.2f}',
534
+ 'Avg Fantasy': '{:.2f}',
535
+ 'Avg GPP_Proj': '{:.2f}'
536
+ }).background_gradient(cmap='RdYlGn', axis=0, subset=['Count', 'Avg Proj', 'Avg Own', 'Avg Fantasy', 'Avg GPP_Proj']), use_container_width=True)
537
+ else:
538
+ st.write("Simulation data or position mapping not available.")
539
+
540
+ with tab3:
541
+ if 'Sim_Winner_Display' in st.session_state:
542
+ # Apply position mapping to FLEX column
543
+ stack_counts = st.session_state.freq_copy['Team_count'].value_counts()
544
+
545
+ # Calculate average statistics for each stack size
546
+ stack_stats = st.session_state.freq_copy.groupby('Team_count').agg({
547
+ 'proj': 'mean',
548
+ 'Own': 'mean',
549
+ 'Fantasy': 'mean',
550
+ 'GPP_Proj': 'mean'
551
+ })
552
+
553
+ # Combine counts and average statistics
554
+ stack_summary = pd.concat([stack_counts, stack_stats], axis=1)
555
+ stack_summary.columns = ['Count', 'Avg Proj', 'Avg Own', 'Avg Fantasy', 'Avg GPP_Proj']
556
+ stack_summary = stack_summary.reset_index()
557
+ stack_summary.columns = ['Stack Size', 'Count', 'Avg Proj', 'Avg Own', 'Avg Fantasy', 'Avg GPP_Proj']
558
+ stack_summary = stack_summary.sort_values(by='Stack Size', ascending=True)
559
+ stack_summary = stack_summary.set_index('Stack Size')
560
+
561
+ # Display the summary dataframe
562
+ st.subheader("Stack Type Statistics")
563
+ st.dataframe(stack_summary.style.format({
564
+ 'Count': '{:.0f}',
565
+ 'Avg Proj': '{:.2f}',
566
+ 'Avg Own': '{:.2f}',
567
+ 'Avg Fantasy': '{:.2f}',
568
+ 'Avg GPP_Proj': '{:.2f}'
569
+ }).background_gradient(cmap='RdYlGn', axis=0, subset=['Count', 'Avg Proj', 'Avg Own', 'Avg Fantasy', 'Avg GPP_Proj']), use_container_width=True)
570
+ else:
571
+ st.write("Simulation data or position mapping not available.")
572
+
573
+
574
+ with st.container():
575
+ tab1, tab2, tab3, tab4, tab5, tab6, tab7 = st.tabs(['Overall Exposures', 'Center Exposures', 'Wing Exposures', 'Defense Exposures', 'Flex Exposures', 'Goalie Exposures', 'Team Exposures'])
576
+ with tab1:
577
+ if 'player_freq' in st.session_state:
578
+
579
+ st.dataframe(st.session_state.player_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
580
+ st.download_button(
581
+ label="Export Exposures",
582
+ data=st.session_state.player_freq.to_csv().encode('utf-8'),
583
+ file_name='player_freq_export.csv',
584
+ mime='text/csv',
585
+ key='overall'
586
+ )
587
+ with tab2:
588
+ if 'center_freq' in st.session_state:
589
+
590
+ st.dataframe(st.session_state.center_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
591
+ st.download_button(
592
+ label="Export Exposures",
593
+ data=st.session_state.center_freq.to_csv().encode('utf-8'),
594
+ file_name='center_freq.csv',
595
+ mime='text/csv',
596
+ key='center'
597
+ )
598
+ with tab3:
599
+ if 'wing_freq' in st.session_state:
600
+
601
+ st.dataframe(st.session_state.wing_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
602
+ st.download_button(
603
+ label="Export Exposures",
604
+ data=st.session_state.wing_freq.to_csv().encode('utf-8'),
605
+ file_name='wing_freq.csv',
606
+ mime='text/csv',
607
+ key='wing'
608
+ )
609
+ with tab4:
610
+ if 'dmen_freq' in st.session_state:
611
+
612
+ st.dataframe(st.session_state.dmen_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
613
+ st.download_button(
614
+ label="Export Exposures",
615
+ data=st.session_state.dmen_freq.to_csv().encode('utf-8'),
616
+ file_name='dmen_freq.csv',
617
+ mime='text/csv',
618
+ key='dmen'
619
+ )
620
+ with tab5:
621
+ if 'flex_freq' in st.session_state:
622
+
623
+ st.dataframe(st.session_state.flex_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
624
+ st.download_button(
625
+ label="Export Exposures",
626
+ data=st.session_state.flex_freq.to_csv().encode('utf-8'),
627
+ file_name='flex_freq.csv',
628
+ mime='text/csv',
629
+ key='flex'
630
+ )
631
+ with tab6:
632
+ if 'goalie_freq' in st.session_state:
633
+
634
+ st.dataframe(st.session_state.goalie_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
635
+ st.download_button(
636
+ label="Export Exposures",
637
+ data=st.session_state.goalie_freq.to_csv().encode('utf-8'),
638
+ file_name='goalie_freq.csv',
639
+ mime='text/csv',
640
+ key='goalie'
641
+ )
642
+ with tab7:
643
+ if 'team_freq' in st.session_state:
644
+
645
+ st.dataframe(st.session_state.team_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(percentages_format, precision=2), use_container_width = True)
646
+ st.download_button(
647
+ label="Export Exposures",
648
+ data=st.session_state.team_freq.to_csv().encode('utf-8'),
649
+ file_name='team_freq.csv',
650
+ mime='text/csv',
651
+ key='team'
652
+ )
653
+
654
+ with tab2:
655
+ with st.expander("Info and Filters"):
656
+ if st.button("Load/Reset Data", key='reset1'):
657
+ st.cache_data.clear()
658
+ for key in st.session_state.keys():
659
+ del st.session_state[key]
660
+ DK_seed = init_DK_seed_frames(10000)
661
+ FD_seed = init_FD_seed_frames(10000)
662
+ dk_raw, fd_raw, teams_playing_count = init_baselines()
663
+ dk_id_dict = dict(zip(dk_raw.Player, dk_raw.player_id))
664
+ fd_id_dict = dict(zip(fd_raw.Player, fd_raw.player_id))
665
+
666
+ slate_var2 = st.radio("Which data are you loading?", ('Main Slate', 'Secondary Slate', 'Auxiliary Slate'))
667
+ site_var1 = st.radio("What site are you working with?", ('Draftkings', 'Fanduel'))
668
+ sharp_split_var = st.number_input("How many lineups do you want?", value=10000, max_value=500000, min_value=10000, step=10000)
669
+ lineup_num_var = st.number_input("How many lineups do you want to display?", min_value=1, max_value=500, value=10, step=1)
670
+
671
+ if site_var1 == 'Draftkings':
672
+
673
+ team_var1 = st.radio("Do you want a frame with specific teams?", ('Full Slate', 'Specific Teams'), key='team_var1')
674
+ if team_var1 == 'Specific Teams':
675
+ team_var2 = st.multiselect('Which teams do you want?', options = dk_raw['Team'].unique())
676
+ elif team_var1 == 'Full Slate':
677
+ team_var2 = dk_raw.Team.values.tolist()
678
+
679
+ stack_var1 = st.radio("Do you want a frame with specific stack sizes?", ('Full Slate', 'Specific Stack Sizes'), key='stack_var1')
680
+ if stack_var1 == 'Specific Stack Sizes':
681
+ stack_var2 = st.multiselect('Which stack sizes do you want?', options = [5, 4, 3, 2, 1, 0])
682
+ elif stack_var1 == 'Full Slate':
683
+ stack_var2 = [5, 4, 3, 2, 1, 0]
684
+
685
+ raw_baselines = dk_raw
686
+ column_names = dk_columns
687
+
688
+ elif site_var1 == 'Fanduel':
689
+
690
+ team_var1 = st.radio("Do you want a frame with specific teams?", ('Full Slate', 'Specific Teams'), key='team_var1')
691
+ if team_var1 == 'Specific Teams':
692
+ team_var2 = st.multiselect('Which teams do you want?', options = fd_raw['Team'].unique())
693
+ elif team_var1 == 'Full Slate':
694
+ team_var2 = fd_raw.Team.values.tolist()
695
+
696
+ stack_var1 = st.radio("Do you want a frame with specific stack sizes?", ('Full Slate', 'Specific Stack Sizes'), key='stack_var1')
697
+ if stack_var1 == 'Specific Stack Sizes':
698
+ stack_var2 = st.multiselect('Which stack sizes do you want?', options = [5, 4, 3, 2, 1, 0])
699
+ elif stack_var1 == 'Full Slate':
700
+ stack_var2 = [5, 4, 3, 2, 1, 0]
701
+
702
+ raw_baselines = fd_raw
703
+ column_names = fd_columns
704
+
705
+
706
+ if st.button("Prepare data export", key='data_export'):
707
+ if 'working_seed' in st.session_state:
708
+ st.session_state.working_seed = st.session_state.working_seed[np.isin(st.session_state.working_seed[:, 11], team_var2)]
709
+ st.session_state.working_seed = st.session_state.working_seed[np.isin(st.session_state.working_seed[:, 12], stack_var2)]
710
+ st.session_state.data_export_display = st.session_state.working_seed[0:lineup_num_var]
711
+ elif 'working_seed' not in st.session_state:
712
+ if site_var1 == 'Draftkings':
713
+ st.session_state.working_seed = init_DK_seed_frames(sharp_split_var, slate_var2)
714
+
715
+ dk_id_dict = dict(zip(dk_raw.Player, dk_raw.player_id))
716
+ raw_baselines = dk_raw
717
+ column_names = dk_columns
718
+
719
+ elif site_var1 == 'Fanduel':
720
+ st.session_state.working_seed = init_FD_seed_frames(sharp_split_var, slate_var2)
721
+
722
+ fd_id_dict = dict(zip(fd_raw.Player, fd_raw.player_id))
723
+ raw_baselines = fd_raw
724
+ column_names = fd_columns
725
+ st.session_state.working_seed = st.session_state.working_seed[np.isin(st.session_state.working_seed[:, 11], team_var2)]
726
+ st.session_state.working_seed = st.session_state.working_seed[np.isin(st.session_state.working_seed[:, 12], stack_var2)]
727
+ st.session_state.data_export_display = st.session_state.working_seed[0:lineup_num_var]
728
+ data_export = st.session_state.working_seed.copy()
729
+ st.download_button(
730
+ label="Export optimals set",
731
+ data=convert_df(data_export),
732
+ file_name='NHL_optimals_export.csv',
733
+ mime='text/csv',
734
+ )
735
+ for key in st.session_state.keys():
736
+ del st.session_state[key]
737
+
738
+ if st.button("Load Data", key='load_data'):
739
+ if site_var1 == 'Draftkings':
740
+ if 'working_seed' in st.session_state:
741
+ st.session_state.working_seed = st.session_state.working_seed[np.isin(st.session_state.working_seed[:, 11], team_var2)]
742
+ st.session_state.working_seed = st.session_state.working_seed[np.isin(st.session_state.working_seed[:, 12], stack_var2)]
743
+ st.session_state.data_export_display = pd.DataFrame(st.session_state.working_seed[0:lineup_num_var], columns=column_names)
744
+ elif 'working_seed' not in st.session_state:
745
+ if slate_var2 == 'Main Slate':
746
+ st.session_state.working_seed = init_DK_seed_frames(sharp_split_var)
747
+ dk_id_dict = dict(zip(dk_raw.Player, dk_raw.player_id))
748
 
749
+ raw_baselines = dk_raw
750
+ column_names = dk_columns
751
+
752
+ st.session_state.working_seed = st.session_state.working_seed[np.isin(st.session_state.working_seed[:, 11], team_var2)]
753
+ st.session_state.working_seed = st.session_state.working_seed[np.isin(st.session_state.working_seed[:, 12], stack_var2)]
754
+ st.session_state.data_export_display = pd.DataFrame(st.session_state.working_seed[0:lineup_num_var], columns=column_names)
755
+
756
+ elif site_var1 == 'Fanduel':
757
+ if 'working_seed' in st.session_state:
758
+ st.session_state.working_seed = st.session_state.working_seed[np.isin(st.session_state.working_seed[:, 11], team_var2)]
759
+ st.session_state.working_seed = st.session_state.working_seed[np.isin(st.session_state.working_seed[:, 12], stack_var2)]
760
+ st.session_state.data_export_display = pd.DataFrame(st.session_state.working_seed[0:lineup_num_var], columns=column_names)
761
+ elif 'working_seed' not in st.session_state:
762
+ if slate_var2 == 'Main Slate':
763
+ st.session_state.working_seed = init_FD_seed_frames(sharp_split_var)
764
+ fd_id_dict = dict(zip(fd_raw.Player, fd_raw.player_id))
765
+
766
+ raw_baselines = fd_raw
767
+ column_names = fd_columns
768
+ st.session_state.working_seed = st.session_state.working_seed[np.isin(st.session_state.working_seed[:, 11], team_var2)]
769
+ st.session_state.working_seed = st.session_state.working_seed[np.isin(st.session_state.working_seed[:, 12], stack_var2)]
770
+ st.session_state.data_export_display = pd.DataFrame(st.session_state.working_seed[0:lineup_num_var], columns=column_names)
771
+
772
+ with st.container():
773
+ if 'data_export_display' in st.session_state:
774
+ st.dataframe(st.session_state.data_export_display.style.format(freq_format, precision=2), use_container_width = True)