James McCool commited on
Commit
38b9c72
·
1 Parent(s): b0e8c62

Initial commit and dockerization

Browse files
.streamlit/secrets.toml ADDED
@@ -0,0 +1 @@
 
 
1
+ mongo_uri = "mongodb+srv://multichem:Xr1q5wZdXPbxdUmJ@testcluster.lgwtp5i.mongodb.net/?retryWrites=true&w=majority&appName=TestCluster"
Dockerfile CHANGED
@@ -1,3 +1,4 @@
 
1
  FROM python:3.13.5-slim
2
 
3
  WORKDIR /app
@@ -5,11 +6,24 @@ WORKDIR /app
5
  RUN apt-get update && apt-get install -y \
6
  build-essential \
7
  curl \
 
8
  git \
9
  && rm -rf /var/lib/apt/lists/*
10
 
11
  COPY requirements.txt ./
12
  COPY src/ ./src/
 
 
 
 
 
 
 
 
 
 
 
 
13
 
14
  RUN pip3 install -r requirements.txt
15
 
 
1
+ #making sure to have a good python version
2
  FROM python:3.13.5-slim
3
 
4
  WORKDIR /app
 
6
  RUN apt-get update && apt-get install -y \
7
  build-essential \
8
  curl \
9
+ software-properties-common \
10
  git \
11
  && rm -rf /var/lib/apt/lists/*
12
 
13
  COPY requirements.txt ./
14
  COPY src/ ./src/
15
+ COPY .streamlit/ ./.streamlit/
16
+
17
+
18
+
19
+ ENV MONGO_URI="mongodb+srv://multichem:Xr1q5wZdXPbxdUmJ@testcluster.lgwtp5i.mongodb.net/?retryWrites=true&w=majority&appName=TestCluster"
20
+ RUN useradd -m -u 1000 user
21
+ USER user
22
+ ENV HOME=/home/user\
23
+ PATH=/home/user/.local/bin:$PATH
24
+ WORKDIR $HOME/app
25
+ RUN pip install --no-cache-dir --upgrade pip
26
+ COPY --chown=user . $HOME/app
27
 
28
  RUN pip3 install -r requirements.txt
29
 
requirements.txt CHANGED
@@ -1,3 +1,8 @@
1
- altair
2
- pandas
3
- streamlit
 
 
 
 
 
 
1
+ streamlit
2
+ openpyxl
3
+ matplotlib
4
+ pulp
5
+ docker
6
+ plotly
7
+ scipy
8
+ pymongo
src/database.py ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import pymongo
3
+ import os
4
+
5
+ @st.cache_resource
6
+ def init_conn():
7
+ uri = os.getenv('MONGO_URI')
8
+ if not uri:
9
+ uri = st.secrets['mongo_uri']
10
+ client = pymongo.MongoClient(uri, retryWrites=True, serverSelectionTimeoutMS=500000)
11
+ db = client["NFL_Database"]
12
+
13
+ return db
14
+
15
+ db = init_conn()
src/streamlit_app.py CHANGED
@@ -1,40 +1,445 @@
1
- import altair as alt
2
  import numpy as np
3
  import pandas as pd
4
- import streamlit as st
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
 
6
- """
7
- # Welcome to Streamlit!
8
-
9
- Edit `/streamlit_app.py` to customize this app to your heart's desire :heart:.
10
- If you have any questions, checkout our [documentation](https://docs.streamlit.io) and [community
11
- forums](https://discuss.streamlit.io).
12
-
13
- In the meantime, below is an example of what you can do with just a few lines of code:
14
- """
15
-
16
- num_points = st.slider("Number of points in spiral", 1, 10000, 1100)
17
- num_turns = st.slider("Number of turns in spiral", 1, 300, 31)
18
-
19
- indices = np.linspace(0, 1, num_points)
20
- theta = 2 * np.pi * num_turns * indices
21
- radius = indices
22
-
23
- x = radius * np.cos(theta)
24
- y = radius * np.sin(theta)
25
-
26
- df = pd.DataFrame({
27
- "x": x,
28
- "y": y,
29
- "idx": indices,
30
- "rand": np.random.randn(num_points),
31
- })
32
-
33
- st.altair_chart(alt.Chart(df, height=700, width=700)
34
- .mark_point(filled=True)
35
- .encode(
36
- x=alt.X("x", axis=None),
37
- y=alt.Y("y", axis=None),
38
- color=alt.Color("idx", legend=None, scale=alt.Scale()),
39
- size=alt.Size("rand", legend=None, scale=alt.Scale(range=[1, 150])),
40
- ))
 
1
+ import streamlit as st
2
  import numpy as np
3
  import pandas as pd
4
+ from database import db
5
+ from itertools import combinations
6
+
7
+ st.set_page_config(layout="wide")
8
+
9
+ game_format = {'Win Percentage': '{:.2%}','First Inning Lead Percentage': '{:.2%}',
10
+ 'Fifth Inning Lead Percentage': '{:.2%}', '8+ runs': '{:.2%}', 'DK LevX': '{:.2%}', 'FD LevX': '{:.2%}'}
11
+
12
+ team_roo_format = {'Top Score%': '{:.2%}','0 Runs': '{:.2%}', '1 Run': '{:.2%}', '2 Runs': '{:.2%}', '3 Runs': '{:.2%}', '4 Runs': '{:.2%}',
13
+ '5 Runs': '{:.2%}','6 Runs': '{:.2%}', '7 Runs': '{:.2%}', '8 Runs': '{:.2%}', '9 Runs': '{:.2%}', '10 Runs': '{:.2%}'}
14
+
15
+ wrong_acro = ['WSH', 'AZ', 'CHW']
16
+ right_acro = ['WAS', 'ARI', 'CWS']
17
+
18
+ st.markdown("""
19
+ <style>
20
+ /* Tab styling */
21
+ .stElementContainer [data-baseweb="button-group"] {
22
+ gap: 2.000rem;
23
+ padding: 4px;
24
+ }
25
+ .stElementContainer [kind="segmented_control"] {
26
+ height: 2.000rem;
27
+ white-space: pre-wrap;
28
+ background-color: #68B1E7;
29
+ color: white;
30
+ border-radius: 20px;
31
+ gap: 1px;
32
+ padding: 10px 20px;
33
+ font-weight: bold;
34
+ transition: all 0.3s ease;
35
+ }
36
+ .stElementContainer [kind="segmented_controlActive"] {
37
+ height: 3.000rem;
38
+ background-color: #68B1E7;
39
+ border: 3px solid #4FB286;
40
+ border-radius: 10px;
41
+ color: black;
42
+ }
43
+ .stElementContainer [kind="segmented_control"]:hover {
44
+ background-color: #4FB286;
45
+ cursor: pointer;
46
+ }
47
+
48
+ div[data-baseweb="select"] > div {
49
+ background-color: #68B1E7;
50
+ color: white;
51
+ }
52
+
53
+ </style>""", unsafe_allow_html=True)
54
+
55
+ @st.cache_resource(ttl=600)
56
+ def init_baselines():
57
+
58
+ collection = db["Player_Baselines"]
59
+ cursor = collection.find()
60
+
61
+ raw_display = pd.DataFrame(list(cursor))
62
+ raw_display = raw_display[['name', 'Team', 'Opp', 'Position', 'Salary', 'team_plays', 'team_pass', 'team_rush', 'team_tds', 'team_pass_tds', 'team_rush_tds', 'dropbacks', 'pass_yards', 'pass_tds',
63
+ 'rush_att', 'rush_yards', 'rush_tds', 'targets', 'rec', 'rec_yards', 'rec_tds', 'PPR', 'Half_PPR', 'Own']]
64
+ player_stats = raw_display[raw_display['Position'] != 'K']
65
+
66
+ collection = db["DK_NFL_ROO"]
67
+ cursor = collection.find()
68
+
69
+ raw_display = pd.DataFrame(list(cursor))
70
+ raw_display = raw_display.rename(columns={'player_ID': 'player_id'})
71
+ raw_display = raw_display[['Player', 'Position', 'Team', 'Opp', 'Salary', 'Floor', 'Median', 'Ceiling', 'Top_finish', 'Top_5_finish', 'Top_10_finish', '20+%', '2x%', '3x%', '4x%',
72
+ 'Own', 'Small_Field_Own', 'Large_Field_Own', 'Cash_Field_Own', 'CPT_Own', 'LevX', 'version', 'slate', 'timestamp', 'player_id', 'site']]
73
+ load_display = raw_display[raw_display['Position'] != 'K']
74
+ dk_roo_raw = load_display.dropna(subset=['Median'])
75
+
76
+ collection = db["FD_NFL_ROO"]
77
+ cursor = collection.find()
78
+
79
+ raw_display = pd.DataFrame(list(cursor))
80
+ raw_display = raw_display.rename(columns={'player_ID': 'player_id'})
81
+ raw_display = raw_display[['Player', 'Position', 'Team', 'Opp', 'Salary', 'Floor', 'Median', 'Ceiling', 'Top_finish', 'Top_5_finish', 'Top_10_finish', '20+%', '2x%', '3x%', '4x%',
82
+ 'Own', 'Small_Field_Own', 'Large_Field_Own', 'Cash_Field_Own', 'CPT_Own', 'LevX', 'version', 'slate', 'timestamp', 'player_id', 'site']]
83
+ load_display = raw_display[raw_display['Position'] != 'K']
84
+ fd_roo_raw = load_display.dropna(subset=['Median'])
85
+
86
+ collection = db["DK_DFS_Stacks"]
87
+ cursor = collection.find()
88
+
89
+ raw_display = pd.DataFrame(list(cursor))
90
+ raw_display = raw_display[['Team', 'QB', 'WR1_TE', 'WR2_TE', 'Total', 'Salary', 'Floor', 'Median', 'Ceiling', 'Top_finish', 'Top_5_finish', 'Top_10_finish', '60+%', '2x%', '3x%', '4x%', 'Own', 'LevX', 'slate', 'version']]
91
+ dk_stacks_raw = raw_display.copy()
92
+
93
+ collection = db["FD_DFS_Stacks"]
94
+ cursor = collection.find()
95
+
96
+ raw_display = pd.DataFrame(list(cursor))
97
+ raw_display = raw_display[['Team', 'QB', 'WR1_TE', 'WR2_TE', 'Total', 'Salary', 'Floor', 'Median', 'Ceiling', 'Top_finish', 'Top_5_finish', 'Top_10_finish', '60+%', '2x%', '3x%', '4x%', 'Own', 'LevX', 'slate', 'version']]
98
+ fd_stacks_raw = raw_display.copy()
99
+
100
+ return player_stats, dk_stacks_raw, fd_stacks_raw, dk_roo_raw, fd_roo_raw
101
+
102
+ @st.cache_data
103
+ def convert_df_to_csv(df):
104
+ return df.to_csv().encode('utf-8')
105
+
106
+ player_stats, dk_stacks_raw, fd_stacks_raw, dk_roo_raw, fd_roo_raw = init_baselines()
107
+
108
+ app_load_reset_column, app_view_site_column = st.columns([1, 9])
109
+ with app_load_reset_column:
110
+ if st.button("Load/Reset Data", key='reset_data_button'):
111
+ st.cache_data.clear()
112
+ player_stats, dk_stacks_raw, fd_stacks_raw, dk_roo_raw, fd_roo_raw = init_baselines()
113
+ for key in st.session_state.keys():
114
+ del st.session_state[key]
115
+ with app_view_site_column:
116
+ with st.container():
117
+ app_view_column, app_site_column = st.columns([3, 3])
118
+ with app_view_column:
119
+ view_var = st.selectbox("Select view", ["Simple", "Advanced"], key='view_selectbox')
120
+ with app_site_column:
121
+ site_var = st.selectbox("What site do you want to view?", ('Draftkings', 'Fanduel'), key='site_selectbox')
122
+
123
+ selected_tab = st.segmented_control(
124
+ "Select Tab",
125
+ options=["Stack Finder", "User Upload"],
126
+ selection_mode='single',
127
+ default='Stack Finder',
128
+ width='stretch',
129
+ label_visibility='collapsed',
130
+ key='tab_selector'
131
+ )
132
+
133
+ if selected_tab == 'Stack Finder':
134
+ with st.expander("Info and Filters"):
135
+ app_info_column, slate_choice_column, filtering_column, stack_info_column = st.columns(4)
136
+ with app_info_column:
137
+ if st.button("Load/Reset Data", key='reset1'):
138
+ st.cache_data.clear()
139
+ player_stats, dk_stacks_raw, fd_stacks_raw, dk_roo_raw, fd_roo_raw = init_baselines()
140
+ for key in st.session_state.keys():
141
+ del st.session_state[key]
142
+ st.info(f"Last Update: " + str(dk_roo_raw['timestamp'][0]) + f" CST")
143
+ with slate_choice_column:
144
+ slate_var1 = st.radio("What slate are you working with?", ('Main Slate', 'Secondary Slate', 'Late Slate', 'Thurs-Mon Slate', 'User Upload'), key='slate_var1')
145
+ if slate_var1 == 'User Upload':
146
+ slate_var1 = st.session_state['proj_dataframe']
147
+ else:
148
+ if site_var == 'Draftkings':
149
+ raw_baselines = dk_roo_raw
150
+ if slate_var1 == 'Main Slate':
151
+ raw_baselines = raw_baselines[raw_baselines['slate'] == 'Main Slate']
152
+ elif slate_var1 == 'Secondary Slate':
153
+ raw_baselines = raw_baselines[raw_baselines['slate'] == 'Secondary Slate']
154
+ elif slate_var1 == 'Late Slate':
155
+ raw_baselines = raw_baselines[raw_baselines['slate'] == 'Late Slate']
156
+ elif slate_var1 == 'Thurs-Mon Slate':
157
+ raw_baselines = raw_baselines[raw_baselines['slate'] == 'Thurs-Mon Slate']
158
+ raw_baselines = raw_baselines.sort_values(by='Own', ascending=False)
159
+ qb_lookup = raw_baselines[raw_baselines['Position'] == 'QB']
160
+ elif site_var == 'Fanduel':
161
+ raw_baselines = fd_roo_raw
162
+ if slate_var1 == 'Main Slate':
163
+ raw_baselines = raw_baselines[raw_baselines['slate'] == 'Main Slate']
164
+ elif slate_var1 == 'Secondary Slate':
165
+ raw_baselines = raw_baselines[raw_baselines['slate'] == 'Secondary Slate']
166
+ elif slate_var1 == 'Late Slate':
167
+ raw_baselines = raw_baselines[raw_baselines['slate'] == 'Late Slate']
168
+ elif slate_var1 == 'Thurs-Mon Slate':
169
+ raw_baselines = raw_baselines[raw_baselines['slate'] == 'Thurs-Mon Slate']
170
+ raw_baselines = raw_baselines.sort_values(by='Own', ascending=False)
171
+ qb_lookup = raw_baselines[raw_baselines['Position'] == 'QB']
172
+ with filtering_column:
173
+ split_var2 = st.radio("Would you like to run stack analysis for the full slate or individual teams?", ('Full Slate Run', 'Specific Teams'), key='split_var2')
174
+ if split_var2 == 'Specific Teams':
175
+ team_var2 = st.multiselect('Which teams would you like to include in the analysis?', options = raw_baselines['Team'].unique(), key='team_var2')
176
+ elif split_var2 == 'Full Slate Run':
177
+ team_var2 = raw_baselines.Team.unique().tolist()
178
+ pos_var2 = st.multiselect('What Positions would you like to view?', options = ['WR', 'TE', 'RB'], default = ['WR', 'TE', 'RB'], key='pos_var2')
179
+ with stack_info_column:
180
+ if site_var == 'Draftkings':
181
+ max_sal2 = st.number_input('Max Salary', min_value = 5000, max_value = 50000, value = 35000, step = 100, key='max_sal2')
182
+ elif site_var == 'Fanduel':
183
+ max_sal2 = st.number_input('Max Salary', min_value = 5000, max_value = 35000, value = 25000, step = 100, key='max_sal2')
184
+ size_var2 = st.selectbox('What size of stacks are you analyzing?', options = ['QB+1', 'QB+2', 'QB+3'])
185
+ if size_var2 == 'QB+1':
186
+ stack_size = 2
187
+ if size_var2 == 'QB+2':
188
+ stack_size = 3
189
+ if size_var2 == 'QB+3':
190
+ stack_size = 4
191
+
192
+ team_dict = dict(zip(raw_baselines.Player, raw_baselines.Team))
193
+ proj_dict = dict(zip(raw_baselines.Player, raw_baselines.Median))
194
+ own_dict = dict(zip(raw_baselines.Player, raw_baselines.Own))
195
+ cost_dict = dict(zip(raw_baselines.Player, raw_baselines.Salary))
196
+ qb_dict = dict(zip(qb_lookup.Team, qb_lookup.Player))
197
+ if st.button("Run Stack Analysis", key='run_stack_analysis'):
198
+ if site_var == 'Draftkings':
199
+ position_limits = {
200
+ 'QB': 1,
201
+ 'RB': 2,
202
+ 'WR': 3,
203
+ 'TE': 1,
204
+ 'UTIL': 1,
205
+ 'DST': 1,
206
+ }
207
+ max_salary = max_sal2
208
+ max_players = 9
209
+ else:
210
+ position_limits = {
211
+ 'QB': 1,
212
+ 'RB': 2,
213
+ 'WR': 3,
214
+ 'TE': 1,
215
+ 'UTIL': 1,
216
+ 'DST': 1,
217
+ }
218
+ max_salary = max_sal2
219
+ max_players = 9
220
+
221
+ stack_hold_container = st.empty()
222
+ comb_list = []
223
+ raw_baselines = raw_baselines[raw_baselines['Position'].str.contains('|'.join(pos_var2 + ['QB']))]
224
+
225
+ # Create a position dictionary mapping players to their eligible positions
226
+ pos_dict = dict(zip(raw_baselines.Player, raw_baselines.Position))
227
+
228
+ def is_valid_combination(combo):
229
+ # Count positions in this combination
230
+ position_counts = {pos: 0 for pos in position_limits.keys()}
231
+
232
+ # For each player in the combination
233
+ for player in combo:
234
+ # Get their eligible positions
235
+ player_positions = pos_dict[player].split('/')
236
+
237
+ for pos in player_positions:
238
+ position_counts[pos] += 1
239
+
240
+ # Check if any position exceeds its limit
241
+ for pos, limit in position_limits.items():
242
+ if position_counts[pos] > limit:
243
+ return False
244
+
245
+ return True
246
+
247
+ # Modify the combination generation code
248
+ comb_list = []
249
+ for cur_team in team_var2:
250
+ working_baselines = raw_baselines
251
+ working_baselines = working_baselines[working_baselines['Team'] == cur_team]
252
+ working_baselines = working_baselines[working_baselines['Position'] != 'DST']
253
+ working_baselines = working_baselines[working_baselines['Position'] != 'K']
254
+ qb_var = qb_dict[cur_team]
255
+ order_list = working_baselines['Player'].unique()
256
+
257
+ comb = combinations(order_list, stack_size)
258
+
259
+ for i in list(comb):
260
+ if qb_var in i:
261
+ comb_list.append(i)
262
+
263
+ for i in list(comb):
264
+ if is_valid_combination(i):
265
+ comb_list.append(i)
266
+
267
+ comb_DF = pd.DataFrame(comb_list)
268
+
269
+ print(comb_DF.head(10))
270
+
271
+ if stack_size == 2:
272
+ comb_DF['Team'] = comb_DF[0].map(team_dict)
273
+
274
+ comb_DF['Proj'] = sum([comb_DF[0].map(proj_dict),
275
+ comb_DF[1].map(proj_dict)])
276
+
277
+ comb_DF['Salary'] = sum([comb_DF[0].map(cost_dict),
278
+ comb_DF[1].map(cost_dict)])
279
+
280
+ comb_DF['Own%'] = sum([comb_DF[0].map(own_dict),
281
+ comb_DF[1].map(own_dict)])
282
+ elif stack_size == 3:
283
+ comb_DF['Team'] = comb_DF[0].map(team_dict)
284
+
285
+ comb_DF['Proj'] = sum([comb_DF[0].map(proj_dict),
286
+ comb_DF[1].map(proj_dict),
287
+ comb_DF[2].map(proj_dict)])
288
+
289
+ comb_DF['Salary'] = sum([comb_DF[0].map(cost_dict),
290
+ comb_DF[1].map(cost_dict),
291
+ comb_DF[2].map(cost_dict)])
292
+
293
+ comb_DF['Own%'] = sum([comb_DF[0].map(own_dict),
294
+ comb_DF[1].map(own_dict),
295
+ comb_DF[2].map(own_dict)])
296
+ elif stack_size == 4:
297
+ comb_DF['Team'] = comb_DF[0].map(team_dict)
298
+
299
+ comb_DF['Proj'] = sum([comb_DF[0].map(proj_dict),
300
+ comb_DF[1].map(proj_dict),
301
+ comb_DF[2].map(proj_dict),
302
+ comb_DF[3].map(proj_dict)])
303
+
304
+ comb_DF['Salary'] = sum([comb_DF[0].map(cost_dict),
305
+ comb_DF[1].map(cost_dict),
306
+ comb_DF[2].map(cost_dict),
307
+ comb_DF[3].map(cost_dict)])
308
+
309
+ comb_DF['Own%'] = sum([comb_DF[0].map(own_dict),
310
+ comb_DF[1].map(own_dict),
311
+ comb_DF[2].map(own_dict),
312
+ comb_DF[3].map(own_dict)])
313
+ elif stack_size == 5:
314
+ comb_DF['Team'] = comb_DF[0].map(team_dict)
315
+
316
+ comb_DF['Proj'] = sum([comb_DF[0].map(proj_dict),
317
+ comb_DF[1].map(proj_dict),
318
+ comb_DF[2].map(proj_dict),
319
+ comb_DF[3].map(proj_dict),
320
+ comb_DF[4].map(proj_dict)])
321
+
322
+ comb_DF['Salary'] = sum([comb_DF[0].map(cost_dict),
323
+ comb_DF[1].map(cost_dict),
324
+ comb_DF[2].map(cost_dict),
325
+ comb_DF[3].map(cost_dict),
326
+ comb_DF[4].map(cost_dict)])
327
+
328
+ comb_DF['Own%'] = sum([comb_DF[0].map(own_dict),
329
+ comb_DF[1].map(own_dict),
330
+ comb_DF[2].map(own_dict),
331
+ comb_DF[3].map(own_dict),
332
+ comb_DF[4].map(own_dict)])
333
+
334
+ comb_DF = comb_DF.sort_values(by='Proj', ascending=False)
335
+ comb_DF = comb_DF.loc[comb_DF['Salary'] <= max_sal2]
336
+
337
+ cut_var = 0
338
+
339
+ if stack_size == 2:
340
+ while cut_var <= int(len(comb_DF)):
341
+ try:
342
+ if int(cut_var) == 0:
343
+ cur_proj = float(comb_DF.iat[cut_var, 3])
344
+ cur_own = float(comb_DF.iat[cut_var, 5])
345
+ elif int(cut_var) >= 1:
346
+ check_own = float(comb_DF.iat[cut_var, 5])
347
+ if check_own > cur_own:
348
+ comb_DF = comb_DF.drop([cut_var])
349
+ cur_own = cur_own
350
+ cut_var = cut_var - 1
351
+ comb_DF = comb_DF.reset_index()
352
+ comb_DF = comb_DF.drop(['index'], axis=1)
353
+ elif check_own <= cur_own:
354
+ cur_own = float(comb_DF.iat[cut_var, 5])
355
+ cut_var = cut_var
356
+ cut_var += 1
357
+ except:
358
+ cut_var += 1
359
+ elif stack_size == 3:
360
+ while cut_var <= int(len(comb_DF)):
361
+ try:
362
+ if int(cut_var) == 0:
363
+ cur_proj = float(comb_DF.iat[cut_var,4])
364
+ cur_own = float(comb_DF.iat[cut_var,6])
365
+ elif int(cut_var) >= 1:
366
+ check_own = float(comb_DF.iat[cut_var,6])
367
+ if check_own > cur_own:
368
+ comb_DF = comb_DF.drop([cut_var])
369
+ cur_own = cur_own
370
+ cut_var = cut_var - 1
371
+ comb_DF = comb_DF.reset_index()
372
+ comb_DF = comb_DF.drop(['index'], axis=1)
373
+ elif check_own <= cur_own:
374
+ cur_own = float(comb_DF.iat[cut_var,6])
375
+ cut_var = cut_var
376
+ cut_var += 1
377
+ except:
378
+ cut_var += 1
379
+ elif stack_size == 4:
380
+ while cut_var <= int(len(comb_DF)):
381
+ try:
382
+ if int(cut_var) == 0:
383
+ cur_proj = float(comb_DF.iat[cut_var,5])
384
+ cur_own = float(comb_DF.iat[cut_var,7])
385
+ elif int(cut_var) >= 1:
386
+ check_own = float(comb_DF.iat[cut_var,7])
387
+ if check_own > cur_own:
388
+ comb_DF = comb_DF.drop([cut_var])
389
+ cur_own = cur_own
390
+ cut_var = cut_var - 1
391
+ comb_DF = comb_DF.reset_index()
392
+ comb_DF = comb_DF.drop(['index'], axis=1)
393
+ elif check_own <= cur_own:
394
+ cur_own = float(comb_DF.iat[cut_var,7])
395
+ cut_var = cut_var
396
+ cut_var += 1
397
+ except:
398
+ cut_var += 1
399
+ elif stack_size == 5:
400
+ while cut_var <= int(len(comb_DF)):
401
+ try:
402
+ if int(cut_var) == 0:
403
+ cur_proj = float(comb_DF.iat[cut_var,6])
404
+ cur_own = float(comb_DF.iat[cut_var,8])
405
+ elif int(cut_var) >= 1:
406
+ check_own = float(comb_DF.iat[cut_var,8])
407
+ if check_own > cur_own:
408
+ comb_DF = comb_DF.drop([cut_var])
409
+ cur_own = cur_own
410
+ cut_var = cut_var - 1
411
+ comb_DF = comb_DF.reset_index()
412
+ comb_DF = comb_DF.drop(['index'], axis=1)
413
+ elif check_own <= cur_own:
414
+ cur_own = float(comb_DF.iat[cut_var,8])
415
+ cut_var = cut_var
416
+ cut_var += 1
417
+ except:
418
+ cut_var += 1
419
+ st.session_state['display_frame'] = comb_DF
420
+ if 'display_frame' in st.session_state:
421
+ st.dataframe(st.session_state['display_frame'].style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(precision=2), hide_index=True, use_container_width = True)
422
+ st.download_button(
423
+ label="Export Tables",
424
+ data=convert_df_to_csv(st.session_state['display_frame']),
425
+ file_name='NFL_Stack_Options_export.csv',
426
+ mime='text/csv',
427
+ )
428
+ else:
429
+ st.info("When you run the stack analysis, the results will be displayed here. Open up the 'Info and Filters' tab to check the settings.")
430
+
431
+ if selected_tab == 'User Upload':
432
+ st.info("The Projections file can have any columns in any order, but must contain columns explicitly named: 'Player', 'Salary', 'Position', 'Team', 'Opp', 'Median', and 'Own'.")
433
+ col1, col2 = st.columns([1, 5])
434
 
435
+ with col1:
436
+ proj_file = st.file_uploader("Upload Projections", key = 'proj_uploader')
437
+
438
+ if proj_file is not None:
439
+ try:
440
+ st.session_state['proj_dataframe'] = pd.read_csv(proj_file)
441
+ except:
442
+ st.session_state['proj_dataframe'] = pd.read_excel(proj_file)
443
+ with col2:
444
+ if proj_file is not None:
445
+ st.dataframe(st.session_state['proj_dataframe'].style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(precision=2), use_container_width = True)