Spaces:
Sleeping
Sleeping
James McCool
commited on
Commit
·
69f19a9
1
Parent(s):
a3bc4f1
Lots of work to set up transfer from gspread to mongo, added some sidebar action to stacks page for testing
Browse files
app.py
CHANGED
|
@@ -10,30 +10,51 @@ import numpy as np
|
|
| 10 |
import pandas as pd
|
| 11 |
import streamlit as st
|
| 12 |
import gspread
|
|
|
|
| 13 |
from itertools import combinations
|
| 14 |
|
| 15 |
@st.cache_resource
|
| 16 |
def init_conn():
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 37 |
|
| 38 |
game_format = {'Win Percentage': '{:.2%}','First Inning Lead Percentage': '{:.2%}',
|
| 39 |
'Fifth Inning Lead Percentage': '{:.2%}', '8+ runs': '{:.2%}', 'DK LevX': '{:.2%}', 'FD LevX': '{:.2%}'}
|
|
@@ -45,52 +66,64 @@ all_dk_player_projections = 'https://docs.google.com/spreadsheets/d/1I_1Ve3F4tft
|
|
| 45 |
|
| 46 |
@st.cache_resource(ttl=600)
|
| 47 |
def player_stat_table():
|
| 48 |
-
|
| 49 |
-
|
| 50 |
-
|
| 51 |
-
|
| 52 |
-
|
| 53 |
-
|
| 54 |
-
|
| 55 |
-
|
| 56 |
-
|
| 57 |
-
|
| 58 |
-
|
| 59 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 60 |
dk_roo_raw = load_display.dropna(subset=['Median'])
|
| 61 |
|
| 62 |
-
|
| 63 |
-
|
| 64 |
-
|
| 65 |
-
|
|
|
|
|
|
|
|
|
|
| 66 |
fd_roo_raw = load_display.dropna(subset=['Median'])
|
| 67 |
|
| 68 |
-
|
| 69 |
-
|
| 70 |
-
dk_stacks_raw = load_display
|
| 71 |
|
| 72 |
-
|
| 73 |
-
|
| 74 |
-
|
| 75 |
|
| 76 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 77 |
|
| 78 |
@st.cache_data
|
| 79 |
def convert_df_to_csv(df):
|
| 80 |
return df.to_csv().encode('utf-8')
|
| 81 |
|
| 82 |
-
player_stats, dk_stacks_raw, fd_stacks_raw, dk_roo_raw, fd_roo_raw
|
| 83 |
t_stamp = f"Last Update: " + str(dk_roo_raw['timestamp'][0]) + f" CST"
|
| 84 |
|
| 85 |
tab1, tab2, tab3, tab4, tab5, tab6 = st.tabs(["Team Stacks Range of Outcomes", "Overall Range of Outcomes", "QB Range of Outcomes", "RB Range of Outcomes", "WR Range of Outcomes", "TE Range of Outcomes"])
|
| 86 |
|
| 87 |
with tab1:
|
| 88 |
-
|
| 89 |
-
with col1:
|
| 90 |
st.info(t_stamp)
|
| 91 |
if st.button("Load/Reset Data", key='reset1'):
|
| 92 |
st.cache_data.clear()
|
| 93 |
-
player_stats, dk_stacks_raw, fd_stacks_raw, dk_roo_raw, fd_roo_raw
|
| 94 |
t_stamp = f"Last Update: " + str(dk_roo_raw['timestamp'][0]) + f" CST"
|
| 95 |
slate_var1 = st.radio("Which data are you loading?", ('Main Slate', 'Secondary Slate', 'Late Slate', 'Thurs-Mon Slate'), key='slate_var1')
|
| 96 |
site_var1 = st.radio("What table would you like to display?", ('Draftkings', 'Fanduel'), key='site_var1')
|
|
@@ -125,185 +158,185 @@ with tab1:
|
|
| 125 |
team_var1 = raw_baselines.Team.values.tolist()
|
| 126 |
|
| 127 |
|
| 128 |
-
with col2:
|
| 129 |
-
if custom_var1 == 'No':
|
| 130 |
-
final_stacks = raw_baselines[raw_baselines['Team'].isin(team_var1)]
|
| 131 |
-
if view_var1 == 'Simple':
|
| 132 |
-
final_stacks = final_stacks[['Team', 'QB', 'WR1_TE', 'WR2_TE', 'Salary', 'Median', '60+%', '4x%']]
|
| 133 |
-
elif view_var1 == 'Advanced':
|
| 134 |
-
final_stacks = final_stacks[['Team', 'QB', 'WR1_TE', 'WR2_TE', 'Total', 'Salary', 'Floor', 'Median', 'Ceiling', 'Top_finish', 'Top_5_finish',
|
| 135 |
-
'Top_10_finish', '60+%', '2x%', '3x%', '4x%', 'Own', 'LevX']]
|
| 136 |
-
st.dataframe(final_stacks.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(player_roo_format, precision=2), use_container_width = True)
|
| 137 |
-
st.download_button(
|
| 138 |
-
label="Export Tables",
|
| 139 |
-
data=convert_df_to_csv(final_stacks),
|
| 140 |
-
file_name='NFL_stacks_export.csv',
|
| 141 |
-
mime='text/csv',
|
| 142 |
-
)
|
| 143 |
-
elif custom_var1 == 'Yes':
|
| 144 |
-
hold_container = st.empty()
|
| 145 |
-
if st.button('Create Range of Outcomes for Slate'):
|
| 146 |
-
with hold_container:
|
| 147 |
-
if site_var1 == 'Draftkings':
|
| 148 |
-
working_roo = player_stats
|
| 149 |
-
working_roo.rename(columns={"name": "Player", "rush_yards": "Rush Yards", "rec": "Receptions", "PPR": "Fantasy"}, inplace = True)
|
| 150 |
-
working_roo.replace('', 0, inplace=True)
|
| 151 |
-
if site_var1 == 'Fanduel':
|
| 152 |
-
working_roo = player_stats
|
| 153 |
-
working_roo.rename(columns={"name": "Player", "rush_yards": "Rush Yards", "rec": "Receptions", "Half_PPR": "Fantasy"}, inplace = True)
|
| 154 |
-
working_roo.replace('', 0, inplace=True)
|
| 155 |
-
working_roo = working_roo[working_roo['Team'].isin(team_var1)]
|
| 156 |
-
|
| 157 |
-
total_sims = 1000
|
| 158 |
-
|
| 159 |
-
salary_dict = dict(zip(working_roo.name, working_roo.Salary))
|
| 160 |
-
own_dict = dict(zip(working_roo.name, working_roo.Own))
|
| 161 |
-
fantasy_dict = dict(zip(working_roo.name, working_roo.Fantasy))
|
| 162 |
-
|
| 163 |
-
QB_group = working_roo.loc[working_roo['Position'] == 'QB']
|
| 164 |
-
stacks_df = pd.DataFrame(columns=['Team','QB', 'WR1', 'WR2_TE'])
|
| 165 |
-
|
| 166 |
-
for stack in range(0,len(QB_group)):
|
| 167 |
-
team_var = QB_group.iat[stack,1]
|
| 168 |
-
WR_group_1 = working_roo.loc[working_roo['Position'] == 'WR']
|
| 169 |
-
WR_group_2 = WR_group_1.loc[working_roo['Team'] == team_var]
|
| 170 |
-
TE_group_1 = working_roo.loc[working_roo['Position'] == 'TE']
|
| 171 |
-
TE_group_2 = TE_group_1.loc[working_roo['Team'] == team_var]
|
| 172 |
-
cur_list = []
|
| 173 |
-
qb_piece = QB_group.iat[stack,0]
|
| 174 |
-
wr_piece = WR_group_2.iat[0,0]
|
| 175 |
-
te_piece = TE_group_2.iat[0,0]
|
| 176 |
-
cur_list.append(team_var)
|
| 177 |
-
cur_list.append(qb_piece)
|
| 178 |
-
cur_list.append(wr_piece)
|
| 179 |
-
cur_list.append(te_piece)
|
| 180 |
-
stacks_df.loc[len(stacks_df)] = cur_list
|
| 181 |
-
cur_list = []
|
| 182 |
-
qb_piece = QB_group.iat[stack,0]
|
| 183 |
-
wr_piece = WR_group_2.iat[1,0]
|
| 184 |
-
te_piece = TE_group_2.iat[0,0]
|
| 185 |
-
cur_list.append(team_var)
|
| 186 |
-
cur_list.append(qb_piece)
|
| 187 |
-
cur_list.append(wr_piece)
|
| 188 |
-
cur_list.append(te_piece)
|
| 189 |
-
stacks_df.loc[len(stacks_df)] = cur_list
|
| 190 |
-
cur_list = []
|
| 191 |
-
qb_piece = QB_group.iat[stack,0]
|
| 192 |
-
wr_piece = WR_group_2.iat[0,0]
|
| 193 |
-
te_piece = WR_group_2.iat[1,0]
|
| 194 |
-
cur_list.append(team_var)
|
| 195 |
-
cur_list.append(qb_piece)
|
| 196 |
-
cur_list.append(wr_piece)
|
| 197 |
-
cur_list.append(te_piece)
|
| 198 |
-
stacks_df.loc[len(stacks_df)] = cur_list
|
| 199 |
-
|
| 200 |
-
stacks_df['Salary'] = sum([stacks_df['QB'].map(salary_dict),
|
| 201 |
-
stacks_df['WR1'].map(salary_dict),
|
| 202 |
-
stacks_df['WR2_TE'].map(salary_dict)])
|
| 203 |
-
|
| 204 |
-
stacks_df['Fantasy'] = sum([stacks_df['QB'].map(fantasy_dict),
|
| 205 |
-
stacks_df['WR1'].map(fantasy_dict),
|
| 206 |
-
stacks_df['WR2_TE'].map(fantasy_dict)])
|
| 207 |
-
|
| 208 |
-
stacks_df['Own'] = sum([stacks_df['QB'].map(own_dict),
|
| 209 |
-
stacks_df['WR1'].map(own_dict),
|
| 210 |
-
stacks_df['WR2_TE'].map(own_dict)])
|
| 211 |
-
|
| 212 |
-
stacks_df['team_combo'] = stacks_df['Team'] + " " + stacks_df['QB'] + " " + stacks_df['WR1'] + " " + stacks_df['WR2_TE']
|
| 213 |
-
|
| 214 |
-
own_dict = dict(zip(stacks_df.team_combo, stacks_df.Own))
|
| 215 |
-
qb_dict = dict(zip(stacks_df.team_combo, stacks_df.QB))
|
| 216 |
-
wr1_dict = dict(zip(stacks_df.team_combo, stacks_df.WR1))
|
| 217 |
-
wr2_dict = dict(zip(stacks_df.team_combo, stacks_df.WR2_TE))
|
| 218 |
-
team_dict = dict(zip(stacks_df.team_combo, stacks_df.Team))
|
| 219 |
-
|
| 220 |
-
flex_file = stacks_df[['team_combo', 'Salary', 'Fantasy']]
|
| 221 |
-
flex_file.rename(columns={"Fantasy": "Median"}, inplace = True)
|
| 222 |
-
flex_file['Floor'] = flex_file['Median']*.25
|
| 223 |
-
flex_file['Ceiling'] = flex_file['Median'] + flex_file['Floor']
|
| 224 |
-
flex_file['STD'] = flex_file['Median']/4
|
| 225 |
-
flex_file = flex_file[['team_combo', 'Salary', 'Floor', 'Median', 'Ceiling', 'STD']]
|
| 226 |
-
hold_file = flex_file
|
| 227 |
-
overall_file = flex_file
|
| 228 |
-
salary_file = flex_file
|
| 229 |
-
|
| 230 |
-
overall_players = overall_file[['team_combo']]
|
| 231 |
-
|
| 232 |
-
for x in range(0,total_sims):
|
| 233 |
-
salary_file[x] = salary_file['Salary']
|
| 234 |
-
|
| 235 |
-
salary_file=salary_file.drop(['team_combo', 'Floor', 'Median', 'Ceiling', 'STD'], axis=1)
|
| 236 |
-
salary_file.astype('int').dtypes
|
| 237 |
-
|
| 238 |
-
salary_file = salary_file.div(1000)
|
| 239 |
-
|
| 240 |
-
for x in range(0,total_sims):
|
| 241 |
-
overall_file[x] = np.random.normal(overall_file['Median'],overall_file['STD'])
|
| 242 |
-
|
| 243 |
-
overall_file=overall_file.drop(['team_combo', 'Floor', 'Median', 'Ceiling', 'STD'], axis=1)
|
| 244 |
-
overall_file.astype('int').dtypes
|
| 245 |
|
| 246 |
-
|
| 247 |
-
|
| 248 |
-
|
| 249 |
-
|
| 250 |
-
|
| 251 |
-
|
| 252 |
-
|
| 253 |
-
|
| 254 |
-
|
| 255 |
-
|
| 256 |
-
|
| 257 |
-
|
| 258 |
-
|
| 259 |
-
|
| 260 |
-
|
| 261 |
-
|
| 262 |
-
|
| 263 |
-
|
| 264 |
-
|
| 265 |
-
|
| 266 |
-
|
| 267 |
-
|
| 268 |
-
|
| 269 |
-
|
| 270 |
-
|
| 271 |
-
|
| 272 |
-
|
| 273 |
-
|
| 274 |
-
|
| 275 |
-
|
| 276 |
-
|
| 277 |
-
|
| 278 |
-
|
| 279 |
-
|
| 280 |
-
|
| 281 |
-
|
| 282 |
-
|
| 283 |
-
|
| 284 |
-
|
| 285 |
-
|
| 286 |
-
|
| 287 |
-
|
| 288 |
-
|
| 289 |
-
|
| 290 |
-
|
| 291 |
-
|
| 292 |
-
|
| 293 |
-
|
| 294 |
-
|
| 295 |
-
|
| 296 |
-
|
| 297 |
-
|
| 298 |
-
|
| 299 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 300 |
|
| 301 |
-
|
| 302 |
-
|
| 303 |
-
|
| 304 |
-
|
| 305 |
-
|
| 306 |
-
|
| 307 |
|
| 308 |
with tab2:
|
| 309 |
col1, col2 = st.columns([1, 5])
|
|
@@ -311,7 +344,7 @@ with tab2:
|
|
| 311 |
st.info(t_stamp)
|
| 312 |
if st.button("Load/Reset Data", key='reset2'):
|
| 313 |
st.cache_data.clear()
|
| 314 |
-
player_stats, dk_stacks_raw, fd_stacks_raw, dk_roo_raw, fd_roo_raw
|
| 315 |
t_stamp = f"Last Update: " + str(dk_roo_raw['timestamp'][0]) + f" CST"
|
| 316 |
slate_var2 = st.radio("Which data are you loading?", ('Main Slate', 'Secondary Slate', 'Late Slate', 'Thurs-Mon Slate'), key='slate_var2')
|
| 317 |
site_var2 = st.radio("What table would you like to display?", ('Draftkings', 'Fanduel'), key='site_var2')
|
|
@@ -498,7 +531,7 @@ with tab3:
|
|
| 498 |
st.info(t_stamp)
|
| 499 |
if st.button("Load/Reset Data", key='reset3'):
|
| 500 |
st.cache_data.clear()
|
| 501 |
-
player_stats, dk_stacks_raw, fd_stacks_raw, dk_roo_raw, fd_roo_raw
|
| 502 |
t_stamp = f"Last Update: " + str(dk_roo_raw['timestamp'][0]) + f" CST"
|
| 503 |
slate_var3 = st.radio("Which data are you loading?", ('Main Slate', 'Secondary Slate', 'Late Slate', 'Thurs-Mon Slate'), key='slate_var3')
|
| 504 |
site_var3 = st.radio("What table would you like to display?", ('Draftkings', 'Fanduel'), key='site_var3')
|
|
@@ -689,7 +722,7 @@ with tab4:
|
|
| 689 |
st.info(t_stamp)
|
| 690 |
if st.button("Load/Reset Data", key='reset4'):
|
| 691 |
st.cache_data.clear()
|
| 692 |
-
player_stats, dk_stacks_raw, fd_stacks_raw, dk_roo_raw, fd_roo_raw
|
| 693 |
t_stamp = f"Last Update: " + str(dk_roo_raw['timestamp'][0]) + f" CST"
|
| 694 |
slate_var4 = st.radio("Which data are you loading?", ('Main Slate', 'Secondary Slate', 'Late Slate', 'Thurs-Mon Slate'), key='slate_var4')
|
| 695 |
site_var4 = st.radio("What table would you like to display?", ('Draftkings', 'Fanduel'), key='site_var4')
|
|
@@ -879,7 +912,7 @@ with tab5:
|
|
| 879 |
st.info(t_stamp)
|
| 880 |
if st.button("Load/Reset Data", key='reset5'):
|
| 881 |
st.cache_data.clear()
|
| 882 |
-
player_stats, dk_stacks_raw, fd_stacks_raw, dk_roo_raw, fd_roo_raw
|
| 883 |
t_stamp = f"Last Update: " + str(dk_roo_raw['timestamp'][0]) + f" CST"
|
| 884 |
slate_var5 = st.radio("Which data are you loading?", ('Main Slate', 'Secondary Slate', 'Late Slate', 'Thurs-Mon Slate'), key='slate_var5')
|
| 885 |
site_var5 = st.radio("What table would you like to display?", ('Draftkings', 'Fanduel'), key='site_var5')
|
|
@@ -1069,7 +1102,7 @@ with tab6:
|
|
| 1069 |
st.info(t_stamp)
|
| 1070 |
if st.button("Load/Reset Data", key='reset6'):
|
| 1071 |
st.cache_data.clear()
|
| 1072 |
-
player_stats, dk_stacks_raw, fd_stacks_raw, dk_roo_raw, fd_roo_raw
|
| 1073 |
t_stamp = f"Last Update: " + str(dk_roo_raw['timestamp'][0]) + f" CST"
|
| 1074 |
slate_var6 = st.radio("Which data are you loading?", ('Main Slate', 'Secondary Slate', 'Late Slate', 'Thurs-Mon Slate'), key='slate_var6')
|
| 1075 |
site_var6 = st.radio("What table would you like to display?", ('Draftkings', 'Fanduel'), key='site_var6')
|
|
|
|
| 10 |
import pandas as pd
|
| 11 |
import streamlit as st
|
| 12 |
import gspread
|
| 13 |
+
import pymongo
|
| 14 |
from itertools import combinations
|
| 15 |
|
| 16 |
@st.cache_resource
|
| 17 |
def init_conn():
|
| 18 |
+
scope = ['https://spreadsheets.google.com/feeds', 'https://www.googleapis.com/auth/drive']
|
| 19 |
+
|
| 20 |
+
credentials = {
|
| 21 |
+
"type": "service_account",
|
| 22 |
+
"project_id": "model-sheets-connect",
|
| 23 |
+
"private_key_id": st.secrets['model_sheets_connect_pk'],
|
| 24 |
+
"private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDiu1v/e6KBKOcK\ncx0KQ23nZK3ZVvADYy8u/RUn/EDI82QKxTd/DizRLIV81JiNQxDJXSzgkbwKYEDm\n48E8zGvupU8+Nk76xNPakrQKy2Y8+VJlq5psBtGchJTuUSHcXU5Mg2JhQsB376PJ\nsCw552K6Pw8fpeMDJDZuxpKSkaJR6k9G5Dhf5q8HDXnC5Rh/PRFuKJ2GGRpX7n+2\nhT/sCax0J8jfdTy/MDGiDfJqfQrOPrMKELtsGHR9Iv6F4vKiDqXpKfqH+02E9ptz\nBk+MNcbZ3m90M8ShfRu28ebebsASfarNMzc3dk7tb3utHOGXKCf4tF8yYKo7x8BZ\noO9X4gSfAgMBAAECggEAU8ByyMpSKlTCF32TJhXnVJi/kS+IhC/Qn5JUDMuk4LXr\naAEWsWO6kV/ZRVXArjmuSzuUVrXumISapM9Ps5Ytbl95CJmGDiLDwRL815nvv6k3\nUyAS8EGKjz74RpoIoH6E7EWCAzxlnUgTn+5oP9Flije97epYk3H+e2f1f5e1Nn1d\nYNe8U+1HqJgILcxA1TAUsARBfoD7+K3z/8DVPHI8IpzAh6kTHqhqC23Rram4XoQ6\nzj/ZdVBjvnKuazETfsD+Vl3jGLQA8cKQVV70xdz3xwLcNeHsbPbpGBpZUoF73c65\nkAXOrjYl0JD5yAk+hmYhXr6H9c6z5AieuZGDrhmlFQKBgQDzV6LRXmjn4854DP/J\nI82oX2GcI4eioDZPRukhiQLzYerMQBmyqZIRC+/LTCAhYQSjNgMa+ZKyvLqv48M0\n/x398op/+n3xTs+8L49SPI48/iV+mnH7k0WI/ycd4OOKh8rrmhl/0EWb9iitwJYe\nMjTV/QxNEpPBEXfR1/mvrN/lVQKBgQDuhomOxUhWVRVH6x03slmyRBn0Oiw4MW+r\nrt1hlNgtVmTc5Mu+4G0USMZwYuOB7F8xG4Foc7rIlwS7Ic83jMJxemtqAelwOLdV\nXRLrLWJfX8+O1z/UE15l2q3SUEnQ4esPHbQnZowHLm0mdL14qSVMl1mu1XfsoZ3z\nJZTQb48CIwKBgEWbzQRtKD8lKDupJEYqSrseRbK/ax43DDITS77/DWwHl33D3FYC\nMblUm8ygwxQpR4VUfwDpYXBlklWcJovzamXpSnsfcYVkkQH47NuOXPXPkXQsw+w+\nDYcJzeu7F/vZqk9I7oBkWHUrrik9zPNoUzrfPvSRGtkAoTDSwibhoc5dAoGBAMHE\nK0T/ANeZQLNuzQps6S7G4eqjwz5W8qeeYxsdZkvWThOgDd/ewt3ijMnJm5X05hOn\ni4XF1euTuvUl7wbqYx76Wv3/1ZojiNNgy7ie4rYlyB/6vlBS97F4ZxJdxMlabbCW\n6b3EMWa4EVVXKoA1sCY7IVDE+yoQ1JYsZmq45YzPAoGBANWWHuVueFGZRDZlkNlK\nh5OmySmA0NdNug3G1upaTthyaTZ+CxGliwBqMHAwpkIRPwxUJpUwBTSEGztGTAxs\nWsUOVWlD2/1JaKSmHE8JbNg6sxLilcG6WEDzxjC5dLL1OrGOXj9WhC9KX3sq6qb6\nF/j9eUXfXjAlb042MphoF3ZC\n-----END PRIVATE KEY-----\n",
|
| 25 |
+
"client_email": "gspread-connection@model-sheets-connect.iam.gserviceaccount.com",
|
| 26 |
+
"client_id": "100369174533302798535",
|
| 27 |
+
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
|
| 28 |
+
"token_uri": "https://oauth2.googleapis.com/token",
|
| 29 |
+
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
|
| 30 |
+
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/gspread-connection%40model-sheets-connect.iam.gserviceaccount.com"
|
| 31 |
+
}
|
| 32 |
+
|
| 33 |
+
credentials2 = {
|
| 34 |
+
"type": "service_account",
|
| 35 |
+
"project_id": "sheets-api-connect-378620",
|
| 36 |
+
"private_key_id": st.secrets['sheets_api_connect_pk'],
|
| 37 |
+
"private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCtKa01beXwc88R\nnPZVQTNPVQuBnbwoOfc66gW3547ja/UEyIGAF112dt/VqHprRafkKGmlg55jqJNt\na4zceLKV+wTm7vBu7lDISTJfGzCf2TrxQYNqwMKE2LOjI69dBM8u4Dcb4k0wcp9v\ntW1ZzLVVuwTvmrg7JBHjiSaB+x5wxm/r3FOiJDXdlAgFlytzqgcyeZMJVKKBQHyJ\njEGg/1720A0numuOCt71w/2G0bDmijuj1e6tH32MwRWcvRNZ19K9ssyDz2S9p68s\nYDhIxX69OWxwScTIHLY6J2t8txf/XMivL/636fPlDADvBEVTdlT606n8CcKUVQeq\npUVdG+lfAgMBAAECggEAP38SUA7B69eTfRpo658ycOs3Amr0JW4H/bb1rNeAul0K\nZhwd/HnU4E07y81xQmey5kN5ZeNrD5EvqkZvSyMJHV0EEahZStwhjCfnDB/cxyix\nZ+kFhv4y9eK+kFpUAhBy5nX6T0O+2T6WvzAwbmbVsZ+X8kJyPuF9m8ldcPlD0sce\ntj8NwVq1ys52eosqs7zi2vjt+eMcaY393l4ls+vNq8Yf27cfyFw45W45CH/97/Nu\n5AmuzlCOAfFF+z4OC5g4rei4E/Qgpxa7/uom+BVfv9G0DIGW/tU6Sne0+37uoGKt\nW6DzhgtebUtoYkG7ZJ05BTXGp2lwgVcNRoPwnKJDxQKBgQDT5wYPUBDW+FHbvZSp\nd1m1UQuXyerqOTA9smFaM8sr/UraeH85DJPEIEk8qsntMBVMhvD3Pw8uIUeFNMYj\naLmZFObsL+WctepXrVo5NB6RtLB/jZYxiKMatMLUJIYtcKIp+2z/YtKiWcLnwotB\nWdCjVnPTxpkurmF2fWP/eewZ+wKBgQDRMtJg7etjvKyjYNQ5fARnCc+XsI3gkBe1\nX9oeXfhyfZFeBXWnZzN1ITgFHplDznmBdxAyYGiQdbbkdKQSghviUQ0igBvoDMYy\n1rWcy+a17Mj98uyNEfmb3X2cC6WpvOZaGHwg9+GY67BThwI3FqHIbyk6Ko09WlTX\nQpRQjMzU7QKBgAfi1iflu+q0LR+3a3vvFCiaToskmZiD7latd9AKk2ocsBd3Woy9\n+hXXecJHPOKV4oUJlJgvAZqe5HGBqEoTEK0wyPNLSQlO/9ypd+0fEnArwFHO7CMF\nycQprAKHJXM1eOOFFuZeQCaInqdPZy1UcV5Szla4UmUZWkk1m24blHzXAoGBAMcA\nyH4qdbxX9AYrC1dvsSRvgcnzytMvX05LU0uF6tzGtG0zVlub4ahvpEHCfNuy44UT\nxRWW/oFFaWjjyFxO5sWggpUqNuHEnRopg3QXx22SRRTGbN45li/+QAocTkgsiRh1\nqEcYZsO4mPCsQqAy6E2p6RcK+Xa+omxvSnVhq0x1AoGAKr8GdkCl4CF6rieLMAQ7\nLNBuuoYGaHoh8l5E2uOQpzwxVy/nMBcAv+2+KqHEzHryUv1owOi6pMLv7A9mTFoS\n18B0QRLuz5fSOsVnmldfC9fpUc6H8cH1SINZpzajqQA74bPwELJjnzrCnH79TnHG\nJuElxA33rFEjbgbzdyrE768=\n-----END PRIVATE KEY-----\n",
|
| 38 |
+
"client_email": "gspread-connection@sheets-api-connect-378620.iam.gserviceaccount.com",
|
| 39 |
+
"client_id": "106625872877651920064",
|
| 40 |
+
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
|
| 41 |
+
"token_uri": "https://oauth2.googleapis.com/token",
|
| 42 |
+
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
|
| 43 |
+
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/gspread-connection%40sheets-api-connect-378620.iam.gserviceaccount.com"
|
| 44 |
+
}
|
| 45 |
+
|
| 46 |
+
uri = st.secrets['mongo_uri']
|
| 47 |
+
client = pymongo.MongoClient(uri, retryWrites=True, serverSelectionTimeoutMS=500000)
|
| 48 |
+
db = client["NFL_Database"]
|
| 49 |
+
|
| 50 |
+
NFL_Data = st.secrets['NFL_Data']
|
| 51 |
+
|
| 52 |
+
gc = gspread.service_account_from_dict(credentials)
|
| 53 |
+
gc2 = gspread.service_account_from_dict(credentials2)
|
| 54 |
+
|
| 55 |
+
return gc, gc2, db, NFL_Data
|
| 56 |
+
|
| 57 |
+
gcservice_account, gcservice_account2, db, NFL_Data = init_conn()
|
| 58 |
|
| 59 |
game_format = {'Win Percentage': '{:.2%}','First Inning Lead Percentage': '{:.2%}',
|
| 60 |
'Fifth Inning Lead Percentage': '{:.2%}', '8+ runs': '{:.2%}', 'DK LevX': '{:.2%}', 'FD LevX': '{:.2%}'}
|
|
|
|
| 66 |
|
| 67 |
@st.cache_resource(ttl=600)
|
| 68 |
def player_stat_table():
|
| 69 |
+
|
| 70 |
+
collection = db["Player_Stats"]
|
| 71 |
+
cursor = collection.find()
|
| 72 |
+
|
| 73 |
+
raw_display = pd.DataFrame(list(cursor))
|
| 74 |
+
raw_display = raw_display[['name', 'Team', 'Opp', 'Position', 'Salary', 'team_plays', 'team_pass', 'team_rush', 'team_tds', 'team_pass_tds', 'team_rush_tds', 'dropbacks', 'pass_yards', 'pass_tds',
|
| 75 |
+
'rush_att', 'rush_yards', 'rush_tds', 'targets', 'rec', 'rec_yards', 'rec_tds', 'PPR', 'Half_PPR', 'Own']]
|
| 76 |
+
player_stats = raw_display[raw_display['Position'] != 'K']
|
| 77 |
+
|
| 78 |
+
collection = db["DK_NFL_ROO"]
|
| 79 |
+
cursor = collection.find()
|
| 80 |
+
|
| 81 |
+
raw_display = pd.DataFrame(list(cursor))
|
| 82 |
+
raw_display = raw_display[['Player', 'Position', 'Team', 'Opp', 'Salary', 'Floor', 'Median', 'Ceiling', 'Top_finish', 'Top_5_finish', 'Top_10_finish', '20+%', '2x%', '3x%', '4x%',
|
| 83 |
+
'Own', 'Small_Field_Own', 'Large_Field_Own', 'Cash_Field_Own', 'CPT_Own', 'LevX', 'version', 'slate', 'timestamp', 'player_id', 'site']]
|
| 84 |
+
load_display = raw_display[raw_display['Position'] != 'K']
|
| 85 |
dk_roo_raw = load_display.dropna(subset=['Median'])
|
| 86 |
|
| 87 |
+
collection = db["FD_NFL_ROO"]
|
| 88 |
+
cursor = collection.find()
|
| 89 |
+
|
| 90 |
+
raw_display = pd.DataFrame(list(cursor))
|
| 91 |
+
raw_display = raw_display[['Player', 'Position', 'Team', 'Opp', 'Salary', 'Floor', 'Median', 'Ceiling', 'Top_finish', 'Top_5_finish', 'Top_10_finish', '20+%', '2x%', '3x%', '4x%',
|
| 92 |
+
'Own', 'Small_Field_Own', 'Large_Field_Own', 'Cash_Field_Own', 'CPT_Own', 'LevX', 'version', 'slate', 'timestamp', 'player_id', 'site']]
|
| 93 |
+
load_display = raw_display[raw_display['Position'] != 'K']
|
| 94 |
fd_roo_raw = load_display.dropna(subset=['Median'])
|
| 95 |
|
| 96 |
+
collection = db["DK_DFS_Stacks"]
|
| 97 |
+
cursor = collection.find()
|
|
|
|
| 98 |
|
| 99 |
+
raw_display = pd.DataFrame(list(cursor))
|
| 100 |
+
raw_display = raw_display[['Team', 'QB', 'WR1_TE', 'WR2_TE', 'Total', 'Salary', 'Floor', 'Median', 'Ceiling', 'Top_finish', 'Top_5_finish', 'Top_10_finish', '60+%', '2x%', '3x%', '4x%', 'Own', 'LevX', 'slate', 'version']]
|
| 101 |
+
dk_stacks_raw = raw_display.copy()
|
| 102 |
|
| 103 |
+
collection = db["FD_DFS_Stacks"]
|
| 104 |
+
cursor = collection.find()
|
| 105 |
+
|
| 106 |
+
raw_display = pd.DataFrame(list(cursor))
|
| 107 |
+
raw_display = raw_display[['Team', 'QB', 'WR1_TE', 'WR2_TE', 'Total', 'Salary', 'Floor', 'Median', 'Ceiling', 'Top_finish', 'Top_5_finish', 'Top_10_finish', '60+%', '2x%', '3x%', '4x%', 'Own', 'LevX', 'slate', 'version']]
|
| 108 |
+
fd_stacks_raw = raw_display.copy()
|
| 109 |
+
|
| 110 |
+
return player_stats, dk_stacks_raw, fd_stacks_raw, dk_roo_raw, fd_roo_raw
|
| 111 |
|
| 112 |
@st.cache_data
|
| 113 |
def convert_df_to_csv(df):
|
| 114 |
return df.to_csv().encode('utf-8')
|
| 115 |
|
| 116 |
+
player_stats, dk_stacks_raw, fd_stacks_raw, dk_roo_raw, fd_roo_raw = player_stat_table()
|
| 117 |
t_stamp = f"Last Update: " + str(dk_roo_raw['timestamp'][0]) + f" CST"
|
| 118 |
|
| 119 |
tab1, tab2, tab3, tab4, tab5, tab6 = st.tabs(["Team Stacks Range of Outcomes", "Overall Range of Outcomes", "QB Range of Outcomes", "RB Range of Outcomes", "WR Range of Outcomes", "TE Range of Outcomes"])
|
| 120 |
|
| 121 |
with tab1:
|
| 122 |
+
with st.sidebar:
|
|
|
|
| 123 |
st.info(t_stamp)
|
| 124 |
if st.button("Load/Reset Data", key='reset1'):
|
| 125 |
st.cache_data.clear()
|
| 126 |
+
player_stats, dk_stacks_raw, fd_stacks_raw, dk_roo_raw, fd_roo_raw = player_stat_table()
|
| 127 |
t_stamp = f"Last Update: " + str(dk_roo_raw['timestamp'][0]) + f" CST"
|
| 128 |
slate_var1 = st.radio("Which data are you loading?", ('Main Slate', 'Secondary Slate', 'Late Slate', 'Thurs-Mon Slate'), key='slate_var1')
|
| 129 |
site_var1 = st.radio("What table would you like to display?", ('Draftkings', 'Fanduel'), key='site_var1')
|
|
|
|
| 158 |
team_var1 = raw_baselines.Team.values.tolist()
|
| 159 |
|
| 160 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 161 |
|
| 162 |
+
if custom_var1 == 'No':
|
| 163 |
+
final_stacks = raw_baselines[raw_baselines['Team'].isin(team_var1)]
|
| 164 |
+
if view_var1 == 'Simple':
|
| 165 |
+
final_stacks = final_stacks[['Team', 'QB', 'WR1_TE', 'WR2_TE', 'Salary', 'Median', '60+%', '4x%']]
|
| 166 |
+
elif view_var1 == 'Advanced':
|
| 167 |
+
final_stacks = final_stacks[['Team', 'QB', 'WR1_TE', 'WR2_TE', 'Total', 'Salary', 'Floor', 'Median', 'Ceiling', 'Top_finish', 'Top_5_finish',
|
| 168 |
+
'Top_10_finish', '60+%', '2x%', '3x%', '4x%', 'Own', 'LevX']]
|
| 169 |
+
st.dataframe(final_stacks.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(player_roo_format, precision=2), use_container_width = True)
|
| 170 |
+
st.download_button(
|
| 171 |
+
label="Export Tables",
|
| 172 |
+
data=convert_df_to_csv(final_stacks),
|
| 173 |
+
file_name='NFL_stacks_export.csv',
|
| 174 |
+
mime='text/csv',
|
| 175 |
+
)
|
| 176 |
+
elif custom_var1 == 'Yes':
|
| 177 |
+
hold_container = st.empty()
|
| 178 |
+
if st.button('Create Range of Outcomes for Slate'):
|
| 179 |
+
with hold_container:
|
| 180 |
+
if site_var1 == 'Draftkings':
|
| 181 |
+
working_roo = player_stats
|
| 182 |
+
working_roo.rename(columns={"name": "Player", "rush_yards": "Rush Yards", "rec": "Receptions", "PPR": "Fantasy"}, inplace = True)
|
| 183 |
+
working_roo.replace('', 0, inplace=True)
|
| 184 |
+
if site_var1 == 'Fanduel':
|
| 185 |
+
working_roo = player_stats
|
| 186 |
+
working_roo.rename(columns={"name": "Player", "rush_yards": "Rush Yards", "rec": "Receptions", "Half_PPR": "Fantasy"}, inplace = True)
|
| 187 |
+
working_roo.replace('', 0, inplace=True)
|
| 188 |
+
working_roo = working_roo[working_roo['Team'].isin(team_var1)]
|
| 189 |
+
|
| 190 |
+
total_sims = 1000
|
| 191 |
+
|
| 192 |
+
salary_dict = dict(zip(working_roo.name, working_roo.Salary))
|
| 193 |
+
own_dict = dict(zip(working_roo.name, working_roo.Own))
|
| 194 |
+
fantasy_dict = dict(zip(working_roo.name, working_roo.Fantasy))
|
| 195 |
+
|
| 196 |
+
QB_group = working_roo.loc[working_roo['Position'] == 'QB']
|
| 197 |
+
stacks_df = pd.DataFrame(columns=['Team','QB', 'WR1', 'WR2_TE'])
|
| 198 |
+
|
| 199 |
+
for stack in range(0,len(QB_group)):
|
| 200 |
+
team_var = QB_group.iat[stack,1]
|
| 201 |
+
WR_group_1 = working_roo.loc[working_roo['Position'] == 'WR']
|
| 202 |
+
WR_group_2 = WR_group_1.loc[working_roo['Team'] == team_var]
|
| 203 |
+
TE_group_1 = working_roo.loc[working_roo['Position'] == 'TE']
|
| 204 |
+
TE_group_2 = TE_group_1.loc[working_roo['Team'] == team_var]
|
| 205 |
+
cur_list = []
|
| 206 |
+
qb_piece = QB_group.iat[stack,0]
|
| 207 |
+
wr_piece = WR_group_2.iat[0,0]
|
| 208 |
+
te_piece = TE_group_2.iat[0,0]
|
| 209 |
+
cur_list.append(team_var)
|
| 210 |
+
cur_list.append(qb_piece)
|
| 211 |
+
cur_list.append(wr_piece)
|
| 212 |
+
cur_list.append(te_piece)
|
| 213 |
+
stacks_df.loc[len(stacks_df)] = cur_list
|
| 214 |
+
cur_list = []
|
| 215 |
+
qb_piece = QB_group.iat[stack,0]
|
| 216 |
+
wr_piece = WR_group_2.iat[1,0]
|
| 217 |
+
te_piece = TE_group_2.iat[0,0]
|
| 218 |
+
cur_list.append(team_var)
|
| 219 |
+
cur_list.append(qb_piece)
|
| 220 |
+
cur_list.append(wr_piece)
|
| 221 |
+
cur_list.append(te_piece)
|
| 222 |
+
stacks_df.loc[len(stacks_df)] = cur_list
|
| 223 |
+
cur_list = []
|
| 224 |
+
qb_piece = QB_group.iat[stack,0]
|
| 225 |
+
wr_piece = WR_group_2.iat[0,0]
|
| 226 |
+
te_piece = WR_group_2.iat[1,0]
|
| 227 |
+
cur_list.append(team_var)
|
| 228 |
+
cur_list.append(qb_piece)
|
| 229 |
+
cur_list.append(wr_piece)
|
| 230 |
+
cur_list.append(te_piece)
|
| 231 |
+
stacks_df.loc[len(stacks_df)] = cur_list
|
| 232 |
+
|
| 233 |
+
stacks_df['Salary'] = sum([stacks_df['QB'].map(salary_dict),
|
| 234 |
+
stacks_df['WR1'].map(salary_dict),
|
| 235 |
+
stacks_df['WR2_TE'].map(salary_dict)])
|
| 236 |
+
|
| 237 |
+
stacks_df['Fantasy'] = sum([stacks_df['QB'].map(fantasy_dict),
|
| 238 |
+
stacks_df['WR1'].map(fantasy_dict),
|
| 239 |
+
stacks_df['WR2_TE'].map(fantasy_dict)])
|
| 240 |
+
|
| 241 |
+
stacks_df['Own'] = sum([stacks_df['QB'].map(own_dict),
|
| 242 |
+
stacks_df['WR1'].map(own_dict),
|
| 243 |
+
stacks_df['WR2_TE'].map(own_dict)])
|
| 244 |
+
|
| 245 |
+
stacks_df['team_combo'] = stacks_df['Team'] + " " + stacks_df['QB'] + " " + stacks_df['WR1'] + " " + stacks_df['WR2_TE']
|
| 246 |
+
|
| 247 |
+
own_dict = dict(zip(stacks_df.team_combo, stacks_df.Own))
|
| 248 |
+
qb_dict = dict(zip(stacks_df.team_combo, stacks_df.QB))
|
| 249 |
+
wr1_dict = dict(zip(stacks_df.team_combo, stacks_df.WR1))
|
| 250 |
+
wr2_dict = dict(zip(stacks_df.team_combo, stacks_df.WR2_TE))
|
| 251 |
+
team_dict = dict(zip(stacks_df.team_combo, stacks_df.Team))
|
| 252 |
+
|
| 253 |
+
flex_file = stacks_df[['team_combo', 'Salary', 'Fantasy']]
|
| 254 |
+
flex_file.rename(columns={"Fantasy": "Median"}, inplace = True)
|
| 255 |
+
flex_file['Floor'] = flex_file['Median']*.25
|
| 256 |
+
flex_file['Ceiling'] = flex_file['Median'] + flex_file['Floor']
|
| 257 |
+
flex_file['STD'] = flex_file['Median']/4
|
| 258 |
+
flex_file = flex_file[['team_combo', 'Salary', 'Floor', 'Median', 'Ceiling', 'STD']]
|
| 259 |
+
hold_file = flex_file
|
| 260 |
+
overall_file = flex_file
|
| 261 |
+
salary_file = flex_file
|
| 262 |
+
|
| 263 |
+
overall_players = overall_file[['team_combo']]
|
| 264 |
+
|
| 265 |
+
for x in range(0,total_sims):
|
| 266 |
+
salary_file[x] = salary_file['Salary']
|
| 267 |
+
|
| 268 |
+
salary_file=salary_file.drop(['team_combo', 'Floor', 'Median', 'Ceiling', 'STD'], axis=1)
|
| 269 |
+
salary_file.astype('int').dtypes
|
| 270 |
+
|
| 271 |
+
salary_file = salary_file.div(1000)
|
| 272 |
+
|
| 273 |
+
for x in range(0,total_sims):
|
| 274 |
+
overall_file[x] = np.random.normal(overall_file['Median'],overall_file['STD'])
|
| 275 |
+
|
| 276 |
+
overall_file=overall_file.drop(['team_combo', 'Floor', 'Median', 'Ceiling', 'STD'], axis=1)
|
| 277 |
+
overall_file.astype('int').dtypes
|
| 278 |
+
|
| 279 |
+
players_only = hold_file[['team_combo']]
|
| 280 |
+
raw_lineups_file = players_only
|
| 281 |
+
|
| 282 |
+
for x in range(0,total_sims):
|
| 283 |
+
maps_dict = {'proj_map':dict(zip(hold_file.team_combo,hold_file[x]))}
|
| 284 |
+
raw_lineups_file[x] = sum([raw_lineups_file['team_combo'].map(maps_dict['proj_map'])])
|
| 285 |
+
players_only[x] = raw_lineups_file[x].rank(ascending=False)
|
| 286 |
+
|
| 287 |
+
players_only=players_only.drop(['team_combo'], axis=1)
|
| 288 |
+
players_only.astype('int').dtypes
|
| 289 |
+
|
| 290 |
+
salary_2x_check = (overall_file - (salary_file*2))
|
| 291 |
+
salary_3x_check = (overall_file - (salary_file*3))
|
| 292 |
+
salary_4x_check = (overall_file - (salary_file*4))
|
| 293 |
+
|
| 294 |
+
players_only['Average_Rank'] = players_only.mean(axis=1)
|
| 295 |
+
players_only['Top_finish'] = players_only[players_only == 1].count(axis=1)/total_sims
|
| 296 |
+
players_only['Top_5_finish'] = players_only[players_only <= 5].count(axis=1)/total_sims
|
| 297 |
+
players_only['Top_10_finish'] = players_only[players_only <= 10].count(axis=1)/total_sims
|
| 298 |
+
players_only['60+%'] = overall_file[overall_file >= 60].count(axis=1)/float(total_sims)
|
| 299 |
+
players_only['2x%'] = salary_2x_check[salary_2x_check >= 1].count(axis=1)/float(total_sims)
|
| 300 |
+
players_only['3x%'] = salary_3x_check[salary_3x_check >= 1].count(axis=1)/float(total_sims)
|
| 301 |
+
players_only['4x%'] = salary_4x_check[salary_4x_check >= 1].count(axis=1)/float(total_sims)
|
| 302 |
+
|
| 303 |
+
players_only['team_combo'] = hold_file[['team_combo']]
|
| 304 |
+
|
| 305 |
+
final_outcomes = players_only[['team_combo', 'Top_finish', 'Top_5_finish', 'Top_10_finish', '60+%', '2x%', '3x%', '4x%']]
|
| 306 |
+
|
| 307 |
+
final_stacks = pd.merge(hold_file, final_outcomes, on="team_combo")
|
| 308 |
+
final_stacks = final_stacks[['team_combo', 'Salary', 'Floor', 'Median', 'Ceiling', 'Top_finish', 'Top_5_finish', 'Top_10_finish', '60+%', '2x%', '3x%', '4x%']]
|
| 309 |
+
final_stacks['Own'] = final_stacks['team_combo'].map(own_dict)
|
| 310 |
+
final_stacks = final_stacks[['team_combo', 'Salary', 'Floor', 'Median', 'Ceiling', 'Top_finish', 'Top_5_finish', 'Top_10_finish', '60+%', '2x%', '3x%', '4x%', 'Own']]
|
| 311 |
+
final_stacks['Projection Rank'] = final_stacks.Median.rank(pct = True)
|
| 312 |
+
final_stacks['Own Rank'] = final_stacks.Own.rank(pct = True)
|
| 313 |
+
final_stacks['LevX'] = final_stacks['Projection Rank'] - final_stacks['Own Rank']
|
| 314 |
+
final_stacks['Team'] = final_stacks['team_combo'].map(team_dict)
|
| 315 |
+
final_stacks['QB'] = final_stacks['team_combo'].map(qb_dict)
|
| 316 |
+
final_stacks['WR1_TE'] = final_stacks['team_combo'].map(wr1_dict)
|
| 317 |
+
final_stacks['WR2_TE'] = final_stacks['team_combo'].map(wr2_dict)
|
| 318 |
+
|
| 319 |
+
final_stacks = final_stacks[['Team', 'QB', 'WR1_TE', 'WR2_TE', 'Salary', 'Floor', 'Median', 'Ceiling', 'Top_finish', 'Top_5_finish',
|
| 320 |
+
'Top_10_finish', '60+%', '2x%', '3x%', '4x%', 'Own', 'LevX']]
|
| 321 |
+
|
| 322 |
+
final_stacks = final_stacks.sort_values(by='Median', ascending=False)
|
| 323 |
+
|
| 324 |
+
with hold_container:
|
| 325 |
+
hold_container = st.empty()
|
| 326 |
+
final_stacks = final_stacks
|
| 327 |
+
if view_var1 == 'Simple':
|
| 328 |
+
final_stacks = final_stacks[['Team', 'QB', 'WR1_TE', 'WR2_TE', 'Salary', 'Median', '60+%', '4x%']]
|
| 329 |
+
elif view_var1 == 'Advanced':
|
| 330 |
+
final_stacks = final_stacks[['Team', 'QB', 'WR1_TE', 'WR2_TE', 'Total', 'Salary', 'Floor', 'Median', 'Ceiling', 'Top_finish', 'Top_5_finish',
|
| 331 |
+
'Top_10_finish', '60+%', '2x%', '3x%', '4x%', 'Own', 'LevX']]
|
| 332 |
+
st.dataframe(final_stacks.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(player_roo_format, precision=2), use_container_width = True)
|
| 333 |
|
| 334 |
+
st.download_button(
|
| 335 |
+
label="Export Tables",
|
| 336 |
+
data=convert_df_to_csv(final_stacks),
|
| 337 |
+
file_name='Custom_NFL_stacks_export.csv',
|
| 338 |
+
mime='text/csv',
|
| 339 |
+
)
|
| 340 |
|
| 341 |
with tab2:
|
| 342 |
col1, col2 = st.columns([1, 5])
|
|
|
|
| 344 |
st.info(t_stamp)
|
| 345 |
if st.button("Load/Reset Data", key='reset2'):
|
| 346 |
st.cache_data.clear()
|
| 347 |
+
player_stats, dk_stacks_raw, fd_stacks_raw, dk_roo_raw, fd_roo_raw = player_stat_table()
|
| 348 |
t_stamp = f"Last Update: " + str(dk_roo_raw['timestamp'][0]) + f" CST"
|
| 349 |
slate_var2 = st.radio("Which data are you loading?", ('Main Slate', 'Secondary Slate', 'Late Slate', 'Thurs-Mon Slate'), key='slate_var2')
|
| 350 |
site_var2 = st.radio("What table would you like to display?", ('Draftkings', 'Fanduel'), key='site_var2')
|
|
|
|
| 531 |
st.info(t_stamp)
|
| 532 |
if st.button("Load/Reset Data", key='reset3'):
|
| 533 |
st.cache_data.clear()
|
| 534 |
+
player_stats, dk_stacks_raw, fd_stacks_raw, dk_roo_raw, fd_roo_raw = player_stat_table()
|
| 535 |
t_stamp = f"Last Update: " + str(dk_roo_raw['timestamp'][0]) + f" CST"
|
| 536 |
slate_var3 = st.radio("Which data are you loading?", ('Main Slate', 'Secondary Slate', 'Late Slate', 'Thurs-Mon Slate'), key='slate_var3')
|
| 537 |
site_var3 = st.radio("What table would you like to display?", ('Draftkings', 'Fanduel'), key='site_var3')
|
|
|
|
| 722 |
st.info(t_stamp)
|
| 723 |
if st.button("Load/Reset Data", key='reset4'):
|
| 724 |
st.cache_data.clear()
|
| 725 |
+
player_stats, dk_stacks_raw, fd_stacks_raw, dk_roo_raw, fd_roo_raw = player_stat_table()
|
| 726 |
t_stamp = f"Last Update: " + str(dk_roo_raw['timestamp'][0]) + f" CST"
|
| 727 |
slate_var4 = st.radio("Which data are you loading?", ('Main Slate', 'Secondary Slate', 'Late Slate', 'Thurs-Mon Slate'), key='slate_var4')
|
| 728 |
site_var4 = st.radio("What table would you like to display?", ('Draftkings', 'Fanduel'), key='site_var4')
|
|
|
|
| 912 |
st.info(t_stamp)
|
| 913 |
if st.button("Load/Reset Data", key='reset5'):
|
| 914 |
st.cache_data.clear()
|
| 915 |
+
player_stats, dk_stacks_raw, fd_stacks_raw, dk_roo_raw, fd_roo_raw = player_stat_table()
|
| 916 |
t_stamp = f"Last Update: " + str(dk_roo_raw['timestamp'][0]) + f" CST"
|
| 917 |
slate_var5 = st.radio("Which data are you loading?", ('Main Slate', 'Secondary Slate', 'Late Slate', 'Thurs-Mon Slate'), key='slate_var5')
|
| 918 |
site_var5 = st.radio("What table would you like to display?", ('Draftkings', 'Fanduel'), key='site_var5')
|
|
|
|
| 1102 |
st.info(t_stamp)
|
| 1103 |
if st.button("Load/Reset Data", key='reset6'):
|
| 1104 |
st.cache_data.clear()
|
| 1105 |
+
player_stats, dk_stacks_raw, fd_stacks_raw, dk_roo_raw, fd_roo_raw = player_stat_table()
|
| 1106 |
t_stamp = f"Last Update: " + str(dk_roo_raw['timestamp'][0]) + f" CST"
|
| 1107 |
slate_var6 = st.radio("Which data are you loading?", ('Main Slate', 'Secondary Slate', 'Late Slate', 'Thurs-Mon Slate'), key='slate_var6')
|
| 1108 |
site_var6 = st.radio("What table would you like to display?", ('Draftkings', 'Fanduel'), key='site_var6')
|