Spaces:
Sleeping
Sleeping
Update src/streamlit_app.py
Browse files- src/streamlit_app.py +248 -116
src/streamlit_app.py
CHANGED
|
@@ -45,6 +45,15 @@ SCRAPE_HEADERS = {
|
|
| 45 |
}
|
| 46 |
|
| 47 |
PERPLEXITY_API_URL = 'https://api.perplexity.ai/chat/completions'
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 48 |
|
| 49 |
# Initialize session state for storing data
|
| 50 |
if 'player_stats_data' not in st.session_state:
|
|
@@ -55,12 +64,23 @@ if 'fixtures_data' not in st.session_state:
|
|
| 55 |
st.session_state.fixtures_data = {}
|
| 56 |
if 'perplexity_api_key' not in st.session_state:
|
| 57 |
st.session_state.perplexity_api_key = ""
|
|
|
|
|
|
|
| 58 |
|
| 59 |
|
| 60 |
# ---------- Helper Functions (from Flask app) ----------
|
| 61 |
def clean_fbref_df_columns(df):
|
| 62 |
if isinstance(df.columns, pd.MultiIndex):
|
| 63 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 64 |
df.columns = ["".join(c if c.isalnum() or c == '%' else "_" for c in str(col)) for col in df.columns]
|
| 65 |
df.columns = [col.replace('%', 'Pct') for col in df.columns]
|
| 66 |
df = df.rename(columns=lambda x: re.sub(r'_+', '_', x))
|
|
@@ -70,174 +90,231 @@ def clean_fbref_df_columns(df):
|
|
| 70 |
# ---------- Scraping Functions (modified for Streamlit) ----------
|
| 71 |
def scrape_player_stats_st(league_keys_to_scrape):
|
| 72 |
st.write("### Scraping Player Stats...")
|
| 73 |
-
|
| 74 |
total_leagues = len(league_keys_to_scrape)
|
| 75 |
|
| 76 |
for i, key in enumerate(league_keys_to_scrape):
|
| 77 |
url = LEAGUES[key]['player_stats_url']
|
| 78 |
-
st.
|
| 79 |
try:
|
| 80 |
-
r = requests.get(url, headers=SCRAPE_HEADERS, timeout=
|
| 81 |
r.raise_for_status()
|
| 82 |
soup = BeautifulSoup(r.text, 'html.parser')
|
|
|
|
|
|
|
| 83 |
table_player_standard = soup.find('table', {'id': 'stats_standard'})
|
| 84 |
|
| 85 |
if table_player_standard:
|
| 86 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 87 |
df = clean_fbref_df_columns(df)
|
| 88 |
-
df = df[df['Player'].notna() & (df['Player'] != 'Player')]
|
| 89 |
-
df = df[df['Rk'].notna() & (df['Rk'] != 'Rk')]
|
| 90 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 91 |
for col in df.columns:
|
| 92 |
-
if col not in ['
|
| 93 |
try:
|
| 94 |
df[col] = pd.to_numeric(df[col], errors='coerce')
|
| 95 |
except Exception:
|
| 96 |
-
pass
|
| 97 |
-
df = df.fillna(0)
|
| 98 |
|
| 99 |
st.session_state.player_stats_data[key] = df
|
| 100 |
-
st.success(f"Successfully scraped player stats for {LEAGUES[key]['name']}.")
|
|
|
|
| 101 |
else:
|
| 102 |
-
st.error(f"Could not find player stats table for {LEAGUES[key]['name']}
|
| 103 |
-
time.sleep(
|
| 104 |
except Exception as e:
|
| 105 |
st.error(f"Error scraping player stats for {LEAGUES[key]['name']}: {e}")
|
| 106 |
-
|
| 107 |
st.write("Player stats scraping complete.")
|
| 108 |
|
|
|
|
| 109 |
def scrape_squad_stats_st(league_keys_to_scrape):
|
| 110 |
st.write("### Scraping Squad Stats (League Tables)...")
|
| 111 |
-
|
| 112 |
total_leagues = len(league_keys_to_scrape)
|
| 113 |
|
| 114 |
for i, key in enumerate(league_keys_to_scrape):
|
| 115 |
url = LEAGUES[key]['squad_stats_url']
|
| 116 |
-
st.
|
| 117 |
try:
|
| 118 |
r = requests.get(url, headers=SCRAPE_HEADERS, timeout=30)
|
| 119 |
r.raise_for_status()
|
| 120 |
soup = BeautifulSoup(r.text, 'html.parser')
|
| 121 |
|
| 122 |
league_table = None
|
| 123 |
-
|
| 124 |
-
for
|
| 125 |
-
|
| 126 |
-
|
| 127 |
-
|
| 128 |
-
|
| 129 |
-
if isinstance(temp_cols, pd.MultiIndex): temp_cols = temp_cols.droplevel(0)
|
| 130 |
-
if all(col in temp_cols for col in ['Squad', 'MP', 'W', 'D', 'L', 'Pts']):
|
| 131 |
-
league_table = parent_table
|
| 132 |
-
break
|
| 133 |
-
|
| 134 |
if not league_table:
|
| 135 |
-
|
| 136 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 137 |
|
| 138 |
-
if not league_table:
|
| 139 |
table_squad_standard = soup.find('table', {'id': 'stats_standard'})
|
| 140 |
if table_squad_standard:
|
| 141 |
-
temp_df_check = pd.read_html(str(table_squad_standard))[0]
|
| 142 |
temp_cols = temp_df_check.columns
|
| 143 |
if isinstance(temp_cols, pd.MultiIndex): temp_cols = temp_cols.droplevel(0)
|
| 144 |
if all(col in temp_cols for col in ['Squad', 'MP', 'W', 'D', 'L', 'Pts']):
|
| 145 |
league_table = table_squad_standard
|
| 146 |
|
| 147 |
if league_table:
|
| 148 |
-
df = pd.read_html(str(league_table))[0]
|
| 149 |
df = clean_fbref_df_columns(df)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 150 |
df = df[df['Squad'].notna() & (df['Squad'] != 'Squad')]
|
| 151 |
df = df[df['Rk'].notna() & (df['Rk'] != 'Rk')]
|
| 152 |
|
| 153 |
-
numeric_cols = ['MP', 'W', 'D', 'L', 'GF', 'GA', 'GD', 'Pts', 'xG', 'xGA', 'xGD']
|
| 154 |
for col in df.columns:
|
| 155 |
-
if col in numeric_cols:
|
| 156 |
df[col] = pd.to_numeric(df[col], errors='coerce')
|
| 157 |
df = df.fillna(0)
|
| 158 |
|
| 159 |
st.session_state.squad_stats_data[key] = df
|
| 160 |
st.success(f"Successfully scraped squad stats for {LEAGUES[key]['name']}.")
|
| 161 |
else:
|
| 162 |
-
st.error(f"Could not find squad stats table for {LEAGUES[key]['name']}
|
| 163 |
time.sleep(3)
|
| 164 |
except Exception as e:
|
| 165 |
st.error(f"Error scraping squad stats for {LEAGUES[key]['name']}: {e}")
|
| 166 |
-
|
| 167 |
st.write("Squad stats scraping complete.")
|
| 168 |
|
| 169 |
def scrape_fixtures_st(league_keys_to_scrape):
|
| 170 |
st.write("### Scraping Fixtures...")
|
| 171 |
-
|
| 172 |
total_leagues = len(league_keys_to_scrape)
|
| 173 |
|
| 174 |
for i, key in enumerate(league_keys_to_scrape):
|
| 175 |
url = LEAGUES[key]['fixtures_url']
|
| 176 |
-
st.
|
| 177 |
try:
|
| 178 |
r = requests.get(url, headers=SCRAPE_HEADERS, timeout=30)
|
| 179 |
r.raise_for_status()
|
| 180 |
soup = BeautifulSoup(r.text, 'html.parser')
|
| 181 |
|
| 182 |
fixture_table = None
|
|
|
|
| 183 |
all_captions = soup.find_all('caption')
|
| 184 |
for caption_tag in all_captions:
|
| 185 |
if "scores and fixtures" in caption_tag.get_text().lower():
|
| 186 |
fixture_table = caption_tag.find_parent('table')
|
| 187 |
if fixture_table: break
|
| 188 |
|
| 189 |
-
if not fixture_table:
|
| 190 |
-
potential_tables = soup.find_all('table', class_="stats_table")
|
| 191 |
-
if
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 192 |
|
| 193 |
if fixture_table:
|
| 194 |
-
df = pd.read_html(str(fixture_table))[0]
|
| 195 |
df = clean_fbref_df_columns(df)
|
| 196 |
-
|
| 197 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 198 |
|
| 199 |
if 'Score' in df.columns:
|
| 200 |
-
score_split = df['Score'].astype(str).str.split('–', expand=True)
|
| 201 |
if score_split.shape[1] == 2:
|
| 202 |
df['HomeGoals'] = pd.to_numeric(score_split[0], errors='coerce')
|
| 203 |
df['AwayGoals'] = pd.to_numeric(score_split[1], errors='coerce')
|
| 204 |
-
else:
|
| 205 |
-
df['HomeGoals'] =
|
| 206 |
-
df['AwayGoals'] =
|
| 207 |
-
|
|
|
|
|
|
|
|
|
|
| 208 |
if 'Date' in df.columns:
|
| 209 |
-
|
|
|
|
|
|
|
|
|
|
| 210 |
|
| 211 |
st.session_state.fixtures_data[key] = df
|
| 212 |
st.success(f"Successfully scraped fixtures for {LEAGUES[key]['name']}.")
|
| 213 |
else:
|
| 214 |
-
st.error(f"Could not find fixtures table for {LEAGUES[key]['name']}
|
| 215 |
time.sleep(3)
|
| 216 |
except Exception as e:
|
| 217 |
st.error(f"Error scraping fixtures for {LEAGUES[key]['name']}: {e}")
|
| 218 |
-
|
| 219 |
st.write("Fixtures scraping complete.")
|
| 220 |
|
| 221 |
# ---------- Perplexity API Functions ----------
|
| 222 |
-
def get_perplexity_response(api_key, prompt, system_message="You are a helpful football analyst AI."):
|
| 223 |
if not api_key:
|
| 224 |
st.error("Perplexity API Key is not set. Please enter it in the sidebar.")
|
| 225 |
return None
|
| 226 |
|
| 227 |
headers = {
|
| 228 |
'Authorization': f'Bearer {api_key}',
|
| 229 |
-
'Content-Type': 'application/json'
|
|
|
|
| 230 |
}
|
| 231 |
payload = {
|
| 232 |
-
'model':
|
| 233 |
'messages': [
|
| 234 |
{'role': 'system', 'content': system_message},
|
| 235 |
{'role': 'user', 'content': prompt}
|
| 236 |
]
|
| 237 |
}
|
| 238 |
try:
|
| 239 |
-
with st.spinner("Querying Perplexity AI..."):
|
| 240 |
-
response = requests.post(PERPLEXITY_API_URL, headers=headers, json=payload, timeout=
|
| 241 |
response.raise_for_status()
|
| 242 |
data = response.json()
|
| 243 |
return data.get('choices', [{}])[0].get('message', {}).get('content', '')
|
|
@@ -246,9 +323,9 @@ def get_perplexity_response(api_key, prompt, system_message="You are a helpful f
|
|
| 246 |
if e.response is not None:
|
| 247 |
try:
|
| 248 |
error_detail = e.response.json().get("error", {}).get("message", e.response.text)
|
| 249 |
-
error_message = f"Perplexity API error: {error_detail}"
|
| 250 |
-
except ValueError:
|
| 251 |
-
error_message = f"Perplexity API error: {e.response.status_code} - {e.response.reason}"
|
| 252 |
st.error(error_message)
|
| 253 |
return None
|
| 254 |
except Exception as e:
|
|
@@ -257,18 +334,24 @@ def get_perplexity_response(api_key, prompt, system_message="You are a helpful f
|
|
| 257 |
|
| 258 |
# ---------- Streamlit UI ----------
|
| 259 |
st.set_page_config(layout="wide")
|
| 260 |
-
st.title("⚽ Football Data Scraper & Perplexity Tester")
|
| 261 |
st.markdown("Test data retrieval from FBRef and Perplexity API integration. No Firebase calls.")
|
| 262 |
|
| 263 |
# --- Sidebar ---
|
| 264 |
-
st.sidebar.header("API
|
| 265 |
st.session_state.perplexity_api_key = st.sidebar.text_input(
|
| 266 |
"Perplexity API Key:",
|
| 267 |
type="password",
|
| 268 |
value=st.session_state.perplexity_api_key,
|
| 269 |
-
help="Your Perplexity AI API key.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 270 |
)
|
| 271 |
|
|
|
|
| 272 |
st.sidebar.markdown("---")
|
| 273 |
st.sidebar.header("Scraping Controls")
|
| 274 |
selected_league_keys = st.sidebar.multiselect(
|
|
@@ -278,17 +361,17 @@ selected_league_keys = st.sidebar.multiselect(
|
|
| 278 |
default=[]
|
| 279 |
)
|
| 280 |
|
| 281 |
-
if st.sidebar.button("Scrape Player Stats"):
|
| 282 |
if selected_league_keys: scrape_player_stats_st(selected_league_keys)
|
| 283 |
-
else: st.sidebar.warning("Select leagues.")
|
| 284 |
|
| 285 |
-
if st.sidebar.button("Scrape Squad Stats"):
|
| 286 |
if selected_league_keys: scrape_squad_stats_st(selected_league_keys)
|
| 287 |
-
else: st.sidebar.warning("Select leagues.")
|
| 288 |
|
| 289 |
-
if st.sidebar.button("Scrape Fixtures"):
|
| 290 |
if selected_league_keys: scrape_fixtures_st(selected_league_keys)
|
| 291 |
-
else: st.sidebar.warning("Select leagues.")
|
| 292 |
|
| 293 |
st.sidebar.markdown("---")
|
| 294 |
st.sidebar.header("View Scraped Data")
|
|
@@ -300,21 +383,20 @@ display_league_key = st.sidebar.selectbox(
|
|
| 300 |
|
| 301 |
# --- Main Content Area ---
|
| 302 |
if display_league_key:
|
| 303 |
-
tab1, tab2, tab3 = st.tabs(["Player Stats
|
|
|
|
|
|
|
| 304 |
with tab1:
|
| 305 |
-
st.subheader(f"Player Stats for {LEAGUES[display_league_key]['name']}")
|
| 306 |
if display_league_key in st.session_state.player_stats_data:
|
| 307 |
st.dataframe(st.session_state.player_stats_data[display_league_key])
|
| 308 |
else:
|
| 309 |
st.info("No player stats data loaded. Scrape first.")
|
| 310 |
with tab2:
|
| 311 |
-
st.subheader(f"Squad Stats for {LEAGUES[display_league_key]['name']}")
|
| 312 |
if display_league_key in st.session_state.squad_stats_data:
|
| 313 |
st.dataframe(st.session_state.squad_stats_data[display_league_key])
|
| 314 |
else:
|
| 315 |
st.info("No squad stats data loaded. Scrape first.")
|
| 316 |
with tab3:
|
| 317 |
-
st.subheader(f"Fixtures for {LEAGUES[display_league_key]['name']}")
|
| 318 |
if display_league_key in st.session_state.fixtures_data:
|
| 319 |
st.dataframe(st.session_state.fixtures_data[display_league_key])
|
| 320 |
else:
|
|
@@ -327,18 +409,29 @@ st.header("FBRef Data Feature Testing (Local)")
|
|
| 327 |
|
| 328 |
# --- 1. Player Comparison Tool ---
|
| 329 |
st.subheader("1. Player Comparison (Local Data)")
|
| 330 |
-
col1_pc, col2_pc, col3_pc = st.columns(
|
| 331 |
-
|
| 332 |
-
|
| 333 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 334 |
|
| 335 |
if st.button("Compare Players (Local)", key="compare_local_btn"):
|
| 336 |
-
# ... (Player comparison logic remains the same as before) ...
|
| 337 |
if pc_league and pc_player1_name and pc_player2_name:
|
| 338 |
if pc_league in st.session_state.player_stats_data:
|
| 339 |
all_players_df = st.session_state.player_stats_data[pc_league]
|
| 340 |
-
|
| 341 |
-
|
|
|
|
| 342 |
|
| 343 |
if not player1_data.empty:
|
| 344 |
st.write(f"**Stats for {pc_player1_name}:**")
|
|
@@ -352,78 +445,108 @@ if st.button("Compare Players (Local)", key="compare_local_btn"):
|
|
| 352 |
else:
|
| 353 |
st.warning(f"Could not find data for player: {pc_player2_name} in {LEAGUES[pc_league]['name']}")
|
| 354 |
else:
|
| 355 |
-
st.error(f"Player stats data for {LEAGUES[pc_league]['name']} not loaded. Please scrape first.")
|
| 356 |
else:
|
| 357 |
-
st.warning("Please select a league and
|
| 358 |
|
| 359 |
|
| 360 |
# --- 2. Fixture Analysis (Local Data) ---
|
| 361 |
st.subheader("2. Fixture Analysis (Local Data)")
|
| 362 |
-
|
| 363 |
-
|
| 364 |
-
fa_league = col1_fa.selectbox("League
|
| 365 |
-
|
| 366 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 367 |
|
| 368 |
if st.button("Analyze Fixture (Local)", key="analyze_local_btn"):
|
| 369 |
if fa_league and fa_home_team and fa_away_team:
|
| 370 |
if fa_league in st.session_state.fixtures_data:
|
| 371 |
-
all_fixtures_df = st.session_state.fixtures_data[fa_league]
|
| 372 |
home_team_norm = fa_home_team.strip().lower()
|
| 373 |
away_team_norm = fa_away_team.strip().lower()
|
| 374 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 375 |
h2h_matches = all_fixtures_df[
|
| 376 |
(all_fixtures_df['Home'].str.lower() == home_team_norm) & (all_fixtures_df['Away'].str.lower() == away_team_norm) |
|
| 377 |
(all_fixtures_df['Home'].str.lower() == away_team_norm) & (all_fixtures_df['Away'].str.lower() == home_team_norm)
|
| 378 |
]
|
| 379 |
st.write(f"**Head-to-Head between {fa_home_team} and {fa_away_team}:**")
|
| 380 |
if not h2h_matches.empty:
|
| 381 |
-
st.dataframe(h2h_matches
|
| 382 |
else:
|
| 383 |
st.info("No H2H matches found in the scraped data.")
|
| 384 |
|
| 385 |
-
def get_form_df(
|
| 386 |
-
team_matches =
|
| 387 |
-
(
|
|
|
|
| 388 |
]
|
| 389 |
-
|
|
|
|
| 390 |
return played_matches.head(num_matches)
|
| 391 |
|
| 392 |
st.write(f"**Recent Form for {fa_home_team} (last 5 played):**")
|
| 393 |
-
home_form_df = get_form_df(
|
| 394 |
if not home_form_df.empty: st.dataframe(home_form_df)
|
| 395 |
else: st.info(f"No recent played matches found for {fa_home_team}.")
|
| 396 |
|
| 397 |
st.write(f"**Recent Form for {fa_away_team} (last 5 played):**")
|
| 398 |
-
away_form_df = get_form_df(
|
| 399 |
if not away_form_df.empty: st.dataframe(away_form_df)
|
| 400 |
else: st.info(f"No recent played matches found for {fa_away_team}.")
|
| 401 |
else:
|
| 402 |
-
st.error(f"Fixtures data for {LEAGUES[fa_league]['name']} not loaded. Please scrape first.")
|
| 403 |
else:
|
| 404 |
-
st.warning("Please select a league and enter home
|
| 405 |
|
| 406 |
# --- 3. Visualization Data (Local Data) ---
|
| 407 |
st.subheader("3. Visualization Data (Example: Top Scorers - Local Data)")
|
| 408 |
-
# ... (Visualization logic remains the same as before) ...
|
| 409 |
col1_vd, col2_vd = st.columns(2)
|
| 410 |
-
|
|
|
|
| 411 |
|
| 412 |
if st.button("Show Top Scorers (Local)", key="top_scorers_local_btn"):
|
| 413 |
if vd_league:
|
| 414 |
if vd_league in st.session_state.player_stats_data:
|
| 415 |
player_df = st.session_state.player_stats_data[vd_league].copy()
|
| 416 |
-
|
| 417 |
-
|
| 418 |
-
|
| 419 |
-
|
| 420 |
-
|
| 421 |
-
|
| 422 |
-
|
| 423 |
-
|
| 424 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 425 |
else:
|
| 426 |
-
st.error(f"Player stats data for {LEAGUES[vd_league]['name']} not loaded. Please scrape first.")
|
| 427 |
else:
|
| 428 |
st.warning("Please select a league for visualization data.")
|
| 429 |
|
|
@@ -432,9 +555,9 @@ st.header("Perplexity API Testing")
|
|
| 432 |
|
| 433 |
# --- 4. Fixture Report via Perplexity ---
|
| 434 |
st.subheader("4. Fixture Report (via Perplexity AI)")
|
| 435 |
-
fr_home_team = st.text_input("Home Team (for Perplexity Report):", key="
|
| 436 |
-
fr_away_team = st.text_input("Away Team (for Perplexity Report):", key="
|
| 437 |
-
fr_match_date = st.text_input("Match Date (e.g., YYYY-MM-DD) (for Perplexity Report):", key="
|
| 438 |
|
| 439 |
if st.button("Get Fixture Report from Perplexity", key="fr_perplexity_btn"):
|
| 440 |
if fr_home_team and fr_away_team and fr_match_date:
|
|
@@ -450,7 +573,12 @@ if st.button("Get Fixture Report from Perplexity", key="fr_perplexity_btn"):
|
|
| 450 |
"4. Brief Tactical Outlook or Prediction (optional, if confident).\n"
|
| 451 |
"Prioritize information from reputable football sources. Be objective."
|
| 452 |
)
|
| 453 |
-
report = get_perplexity_response(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 454 |
if report:
|
| 455 |
st.markdown("**Perplexity AI Fixture Report:**")
|
| 456 |
st.markdown(report)
|
|
@@ -459,14 +587,18 @@ if st.button("Get Fixture Report from Perplexity", key="fr_perplexity_btn"):
|
|
| 459 |
|
| 460 |
# --- 5. Custom Query via Perplexity ---
|
| 461 |
st.subheader("5. Custom Query (via Perplexity AI)")
|
| 462 |
-
custom_query_text = st.text_area("Enter your football-related question:", height=100, key="
|
| 463 |
|
| 464 |
if st.button("Ask Perplexity AI", key="custom_q_btn"):
|
| 465 |
if custom_query_text:
|
| 466 |
if not st.session_state.perplexity_api_key:
|
| 467 |
st.error("Perplexity API Key is not set in the sidebar.")
|
| 468 |
else:
|
| 469 |
-
answer = get_perplexity_response(
|
|
|
|
|
|
|
|
|
|
|
|
|
| 470 |
if answer:
|
| 471 |
st.markdown("**Perplexity AI Answer:**")
|
| 472 |
st.markdown(answer)
|
|
@@ -475,4 +607,4 @@ if st.button("Ask Perplexity AI", key="custom_q_btn"):
|
|
| 475 |
|
| 476 |
|
| 477 |
st.markdown("---")
|
| 478 |
-
st.caption("Streamlit test app
|
|
|
|
| 45 |
}
|
| 46 |
|
| 47 |
PERPLEXITY_API_URL = 'https://api.perplexity.ai/chat/completions'
|
| 48 |
+
PERPLEXITY_MODELS = [
|
| 49 |
+
"sonar-deep-research",
|
| 50 |
+
"sonar-reasoning-pro",
|
| 51 |
+
"sonar-reasoning",
|
| 52 |
+
"sonar-pro",
|
| 53 |
+
"sonar", # Defaulting to this if sonar-medium-online is not listed
|
| 54 |
+
"r1-1776"
|
| 55 |
+
]
|
| 56 |
+
|
| 57 |
|
| 58 |
# Initialize session state for storing data
|
| 59 |
if 'player_stats_data' not in st.session_state:
|
|
|
|
| 64 |
st.session_state.fixtures_data = {}
|
| 65 |
if 'perplexity_api_key' not in st.session_state:
|
| 66 |
st.session_state.perplexity_api_key = ""
|
| 67 |
+
if 'selected_perplexity_model' not in st.session_state:
|
| 68 |
+
st.session_state.selected_perplexity_model = "sonar" # Default model
|
| 69 |
|
| 70 |
|
| 71 |
# ---------- Helper Functions (from Flask app) ----------
|
| 72 |
def clean_fbref_df_columns(df):
|
| 73 |
if isinstance(df.columns, pd.MultiIndex):
|
| 74 |
+
# Attempt to drop the top level if it's generic or a category header
|
| 75 |
+
# This is common for FBRef player stats tables
|
| 76 |
+
try:
|
| 77 |
+
df.columns = df.columns.droplevel(0)
|
| 78 |
+
except Exception as e:
|
| 79 |
+
st.warning(f"Could not droplevel(0) from columns: {e}. Columns: {df.columns}")
|
| 80 |
+
# If droplevel fails, try to flatten in a different way or use as is
|
| 81 |
+
# For now, we'll proceed, but this might need adjustment based on specific table structures
|
| 82 |
+
|
| 83 |
+
# Further cleaning
|
| 84 |
df.columns = ["".join(c if c.isalnum() or c == '%' else "_" for c in str(col)) for col in df.columns]
|
| 85 |
df.columns = [col.replace('%', 'Pct') for col in df.columns]
|
| 86 |
df = df.rename(columns=lambda x: re.sub(r'_+', '_', x))
|
|
|
|
| 90 |
# ---------- Scraping Functions (modified for Streamlit) ----------
|
| 91 |
def scrape_player_stats_st(league_keys_to_scrape):
|
| 92 |
st.write("### Scraping Player Stats...")
|
| 93 |
+
my_bar = st.progress(0)
|
| 94 |
total_leagues = len(league_keys_to_scrape)
|
| 95 |
|
| 96 |
for i, key in enumerate(league_keys_to_scrape):
|
| 97 |
url = LEAGUES[key]['player_stats_url']
|
| 98 |
+
st.info(f"Fetching player stats for: {LEAGUES[key]['name']} from {url}")
|
| 99 |
try:
|
| 100 |
+
r = requests.get(url, headers=SCRAPE_HEADERS, timeout=45) # Increased timeout
|
| 101 |
r.raise_for_status()
|
| 102 |
soup = BeautifulSoup(r.text, 'html.parser')
|
| 103 |
+
|
| 104 |
+
# Player standard stats table usually has id="stats_standard" on these specific stats pages
|
| 105 |
table_player_standard = soup.find('table', {'id': 'stats_standard'})
|
| 106 |
|
| 107 |
if table_player_standard:
|
| 108 |
+
df_list = pd.read_html(str(table_player_standard), flavor='lxml') # Use lxml
|
| 109 |
+
if not df_list:
|
| 110 |
+
st.error(f"Pandas could not read any table from the HTML for player stats: {LEAGUES[key]['name']}.")
|
| 111 |
+
continue
|
| 112 |
+
df = df_list[0] # Usually the first table
|
| 113 |
+
|
| 114 |
df = clean_fbref_df_columns(df)
|
|
|
|
|
|
|
| 115 |
|
| 116 |
+
# Ensure 'Player' and 'Rk' columns exist after cleaning for filtering
|
| 117 |
+
if 'Player' not in df.columns:
|
| 118 |
+
st.error(f"'Player' column not found after cleaning for {LEAGUES[key]['name']}. Columns: {df.columns}")
|
| 119 |
+
st.dataframe(df.head()) # Show what columns are there
|
| 120 |
+
continue
|
| 121 |
+
if 'Rk' not in df.columns:
|
| 122 |
+
st.warning(f"'Rk' column not found after cleaning for {LEAGUES[key]['name']}. Filtering might be less effective. Columns: {df.columns}")
|
| 123 |
+
# Proceed without Rk filtering if not present
|
| 124 |
+
df = df[df['Player'].notna() & (df['Player'] != 'Player')]
|
| 125 |
+
else:
|
| 126 |
+
df = df[df['Player'].notna() & (df['Player'] != 'Player')]
|
| 127 |
+
df = df[df['Rk'].notna() & (df['Rk'] != 'Rk')] # Filter out non-player rows if 'Rk' (Rank) column exists
|
| 128 |
+
|
| 129 |
+
# Convert potential numeric columns
|
| 130 |
for col in df.columns:
|
| 131 |
+
if col.lower() not in ['player', 'nation', 'pos', 'squad', 'comp', 'matches', 'match_report']: # Non-numeric columns
|
| 132 |
try:
|
| 133 |
df[col] = pd.to_numeric(df[col], errors='coerce')
|
| 134 |
except Exception:
|
| 135 |
+
pass # Keep as is if conversion fails
|
| 136 |
+
df = df.fillna(0) # Or use more sophisticated NaN handling for specific columns
|
| 137 |
|
| 138 |
st.session_state.player_stats_data[key] = df
|
| 139 |
+
st.success(f"Successfully scraped and processed player stats for {LEAGUES[key]['name']}.")
|
| 140 |
+
st.dataframe(df.head()) # Show a preview
|
| 141 |
else:
|
| 142 |
+
st.error(f"Could not find player standard stats table (id='stats_standard') for {LEAGUES[key]['name']} at {url}")
|
| 143 |
+
time.sleep(5) # Be polite
|
| 144 |
except Exception as e:
|
| 145 |
st.error(f"Error scraping player stats for {LEAGUES[key]['name']}: {e}")
|
| 146 |
+
my_bar.progress((i + 1) / total_leagues)
|
| 147 |
st.write("Player stats scraping complete.")
|
| 148 |
|
| 149 |
+
|
| 150 |
def scrape_squad_stats_st(league_keys_to_scrape):
|
| 151 |
st.write("### Scraping Squad Stats (League Tables)...")
|
| 152 |
+
my_bar = st.progress(0)
|
| 153 |
total_leagues = len(league_keys_to_scrape)
|
| 154 |
|
| 155 |
for i, key in enumerate(league_keys_to_scrape):
|
| 156 |
url = LEAGUES[key]['squad_stats_url']
|
| 157 |
+
st.info(f"Fetching squad stats for: {LEAGUES[key]['name']} from {url}")
|
| 158 |
try:
|
| 159 |
r = requests.get(url, headers=SCRAPE_HEADERS, timeout=30)
|
| 160 |
r.raise_for_status()
|
| 161 |
soup = BeautifulSoup(r.text, 'html.parser')
|
| 162 |
|
| 163 |
league_table = None
|
| 164 |
+
# Try finding the main league table first (often has "overall" in id or a specific caption)
|
| 165 |
+
possible_ids = [id_val for id_val in soup.find_all(id=True) if "overall" in str(id_val.get('id','')).lower() and "results" in str(id_val.get('id','')).lower()]
|
| 166 |
+
if possible_ids:
|
| 167 |
+
table_tag = soup.find('table', id=possible_ids[0].get('id'))
|
| 168 |
+
if table_tag: league_table = table_tag
|
| 169 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 170 |
if not league_table:
|
| 171 |
+
all_captions = soup.find_all('caption')
|
| 172 |
+
for caption_tag in all_captions:
|
| 173 |
+
caption_text = caption_tag.get_text().lower()
|
| 174 |
+
if ("league table" in caption_text or "regular season" in caption_text or "overall" in caption_text) and "squad" not in caption_text :
|
| 175 |
+
parent_table = caption_tag.find_parent('table')
|
| 176 |
+
if parent_table:
|
| 177 |
+
temp_df_check = pd.read_html(str(parent_table), flavor='lxml')[0]
|
| 178 |
+
temp_cols = temp_df_check.columns
|
| 179 |
+
if isinstance(temp_cols, pd.MultiIndex): temp_cols = temp_cols.droplevel(0)
|
| 180 |
+
if all(col in temp_cols for col in ['Squad', 'MP', 'W', 'D', 'L', 'Pts']):
|
| 181 |
+
league_table = parent_table
|
| 182 |
+
break
|
| 183 |
|
| 184 |
+
if not league_table: # Fallback to first 'stats_standard' if it's a squad table
|
| 185 |
table_squad_standard = soup.find('table', {'id': 'stats_standard'})
|
| 186 |
if table_squad_standard:
|
| 187 |
+
temp_df_check = pd.read_html(str(table_squad_standard), flavor='lxml')[0]
|
| 188 |
temp_cols = temp_df_check.columns
|
| 189 |
if isinstance(temp_cols, pd.MultiIndex): temp_cols = temp_cols.droplevel(0)
|
| 190 |
if all(col in temp_cols for col in ['Squad', 'MP', 'W', 'D', 'L', 'Pts']):
|
| 191 |
league_table = table_squad_standard
|
| 192 |
|
| 193 |
if league_table:
|
| 194 |
+
df = pd.read_html(str(league_table), flavor='lxml')[0]
|
| 195 |
df = clean_fbref_df_columns(df)
|
| 196 |
+
|
| 197 |
+
if 'Squad' not in df.columns or 'Rk' not in df.columns:
|
| 198 |
+
st.error(f"Squad or Rk column missing after cleaning for squad stats {LEAGUES[key]['name']}. Columns: {df.columns}")
|
| 199 |
+
st.dataframe(df.head())
|
| 200 |
+
continue
|
| 201 |
+
|
| 202 |
df = df[df['Squad'].notna() & (df['Squad'] != 'Squad')]
|
| 203 |
df = df[df['Rk'].notna() & (df['Rk'] != 'Rk')]
|
| 204 |
|
| 205 |
+
numeric_cols = ['MP', 'W', 'D', 'L', 'GF', 'GA', 'GD', 'Pts', 'xG', 'xGA', 'xGD', 'Attendance']
|
| 206 |
for col in df.columns:
|
| 207 |
+
if col in numeric_cols: # Check if column exists before trying to convert
|
| 208 |
df[col] = pd.to_numeric(df[col], errors='coerce')
|
| 209 |
df = df.fillna(0)
|
| 210 |
|
| 211 |
st.session_state.squad_stats_data[key] = df
|
| 212 |
st.success(f"Successfully scraped squad stats for {LEAGUES[key]['name']}.")
|
| 213 |
else:
|
| 214 |
+
st.error(f"Could not find a suitable squad stats/league table for {LEAGUES[key]['name']} at {url}")
|
| 215 |
time.sleep(3)
|
| 216 |
except Exception as e:
|
| 217 |
st.error(f"Error scraping squad stats for {LEAGUES[key]['name']}: {e}")
|
| 218 |
+
my_bar.progress((i + 1) / total_leagues)
|
| 219 |
st.write("Squad stats scraping complete.")
|
| 220 |
|
| 221 |
def scrape_fixtures_st(league_keys_to_scrape):
|
| 222 |
st.write("### Scraping Fixtures...")
|
| 223 |
+
my_bar = st.progress(0)
|
| 224 |
total_leagues = len(league_keys_to_scrape)
|
| 225 |
|
| 226 |
for i, key in enumerate(league_keys_to_scrape):
|
| 227 |
url = LEAGUES[key]['fixtures_url']
|
| 228 |
+
st.info(f"Fetching fixtures for: {LEAGUES[key]['name']} from {url}")
|
| 229 |
try:
|
| 230 |
r = requests.get(url, headers=SCRAPE_HEADERS, timeout=30)
|
| 231 |
r.raise_for_status()
|
| 232 |
soup = BeautifulSoup(r.text, 'html.parser')
|
| 233 |
|
| 234 |
fixture_table = None
|
| 235 |
+
# Fixture tables often have a caption containing "Scores and Fixtures"
|
| 236 |
all_captions = soup.find_all('caption')
|
| 237 |
for caption_tag in all_captions:
|
| 238 |
if "scores and fixtures" in caption_tag.get_text().lower():
|
| 239 |
fixture_table = caption_tag.find_parent('table')
|
| 240 |
if fixture_table: break
|
| 241 |
|
| 242 |
+
if not fixture_table: # Fallback if caption not found
|
| 243 |
+
potential_tables = soup.find_all('table', class_=lambda x: x and "stats_table" in x and "sched" in x) # More specific class
|
| 244 |
+
if not potential_tables:
|
| 245 |
+
potential_tables = soup.find_all('table', class_="stats_table") # Generic fallback
|
| 246 |
+
if potential_tables:
|
| 247 |
+
# Iterate to find one with typical fixture columns
|
| 248 |
+
for pt in potential_tables:
|
| 249 |
+
temp_df_check = pd.read_html(str(pt), flavor='lxml')[0]
|
| 250 |
+
temp_cols = temp_df_check.columns
|
| 251 |
+
if isinstance(temp_cols, pd.MultiIndex): temp_cols = temp_cols.droplevel(0)
|
| 252 |
+
if all(c in temp_cols for c in ['Wk', 'Date', 'Home', 'Away']):
|
| 253 |
+
fixture_table = pt
|
| 254 |
+
break
|
| 255 |
+
|
| 256 |
|
| 257 |
if fixture_table:
|
| 258 |
+
df = pd.read_html(str(fixture_table), flavor='lxml')[0]
|
| 259 |
df = clean_fbref_df_columns(df)
|
| 260 |
+
|
| 261 |
+
if 'Wk' not in df.columns or 'Home' not in df.columns:
|
| 262 |
+
st.error(f"Wk or Home column missing after cleaning for fixtures {LEAGUES[key]['name']}. Columns: {df.columns}")
|
| 263 |
+
st.dataframe(df.head())
|
| 264 |
+
continue
|
| 265 |
+
|
| 266 |
+
df = df[df['Wk'].notna()] # Week column usually present for fixtures
|
| 267 |
+
df = df[df['Home'].notna() & (df['Home'] != 'Home')] # Ensure Home team is present and not a header
|
| 268 |
|
| 269 |
if 'Score' in df.columns:
|
| 270 |
+
score_split = df['Score'].astype(str).str.split('–', expand=True) # Use en-dash
|
| 271 |
if score_split.shape[1] == 2:
|
| 272 |
df['HomeGoals'] = pd.to_numeric(score_split[0], errors='coerce')
|
| 273 |
df['AwayGoals'] = pd.to_numeric(score_split[1], errors='coerce')
|
| 274 |
+
else:
|
| 275 |
+
df['HomeGoals'] = pd.NA # Use pandas NA for missing numeric
|
| 276 |
+
df['AwayGoals'] = pd.NA
|
| 277 |
+
else:
|
| 278 |
+
df['HomeGoals'] = pd.NA
|
| 279 |
+
df['AwayGoals'] = pd.NA
|
| 280 |
+
|
| 281 |
if 'Date' in df.columns:
|
| 282 |
+
# Attempt to parse date, handling potential errors
|
| 283 |
+
df['Date_parsed'] = pd.to_datetime(df['Date'], errors='coerce')
|
| 284 |
+
df['Date'] = df['Date_parsed'].dt.strftime('%Y-%m-%d')
|
| 285 |
+
# df = df.drop(columns=['Date_parsed']) # Optional: drop the intermediate column
|
| 286 |
|
| 287 |
st.session_state.fixtures_data[key] = df
|
| 288 |
st.success(f"Successfully scraped fixtures for {LEAGUES[key]['name']}.")
|
| 289 |
else:
|
| 290 |
+
st.error(f"Could not find a suitable fixtures table for {LEAGUES[key]['name']} at {url}")
|
| 291 |
time.sleep(3)
|
| 292 |
except Exception as e:
|
| 293 |
st.error(f"Error scraping fixtures for {LEAGUES[key]['name']}: {e}")
|
| 294 |
+
my_bar.progress((i + 1) / total_leagues)
|
| 295 |
st.write("Fixtures scraping complete.")
|
| 296 |
|
| 297 |
# ---------- Perplexity API Functions ----------
|
| 298 |
+
def get_perplexity_response(api_key, model_name, prompt, system_message="You are a helpful football analyst AI."):
|
| 299 |
if not api_key:
|
| 300 |
st.error("Perplexity API Key is not set. Please enter it in the sidebar.")
|
| 301 |
return None
|
| 302 |
|
| 303 |
headers = {
|
| 304 |
'Authorization': f'Bearer {api_key}',
|
| 305 |
+
'Content-Type': 'application/json',
|
| 306 |
+
'Accept': 'application/json',
|
| 307 |
}
|
| 308 |
payload = {
|
| 309 |
+
'model': model_name,
|
| 310 |
'messages': [
|
| 311 |
{'role': 'system', 'content': system_message},
|
| 312 |
{'role': 'user', 'content': prompt}
|
| 313 |
]
|
| 314 |
}
|
| 315 |
try:
|
| 316 |
+
with st.spinner(f"Querying Perplexity AI with model: {model_name}..."):
|
| 317 |
+
response = requests.post(PERPLEXITY_API_URL, headers=headers, json=payload, timeout=60) # Increased timeout
|
| 318 |
response.raise_for_status()
|
| 319 |
data = response.json()
|
| 320 |
return data.get('choices', [{}])[0].get('message', {}).get('content', '')
|
|
|
|
| 323 |
if e.response is not None:
|
| 324 |
try:
|
| 325 |
error_detail = e.response.json().get("error", {}).get("message", e.response.text)
|
| 326 |
+
error_message = f"Perplexity API error ({e.response.status_code}): {error_detail}"
|
| 327 |
+
except ValueError: # Not JSON
|
| 328 |
+
error_message = f"Perplexity API error: {e.response.status_code} - {e.response.reason}. Response: {e.response.text[:200]}"
|
| 329 |
st.error(error_message)
|
| 330 |
return None
|
| 331 |
except Exception as e:
|
|
|
|
| 334 |
|
| 335 |
# ---------- Streamlit UI ----------
|
| 336 |
st.set_page_config(layout="wide")
|
| 337 |
+
st.title("⚽ Football Data Scraper & Perplexity Tester v2")
|
| 338 |
st.markdown("Test data retrieval from FBRef and Perplexity API integration. No Firebase calls.")
|
| 339 |
|
| 340 |
# --- Sidebar ---
|
| 341 |
+
st.sidebar.header("API Configuration")
|
| 342 |
st.session_state.perplexity_api_key = st.sidebar.text_input(
|
| 343 |
"Perplexity API Key:",
|
| 344 |
type="password",
|
| 345 |
value=st.session_state.perplexity_api_key,
|
| 346 |
+
help="Your Perplexity AI API key."
|
| 347 |
+
)
|
| 348 |
+
st.session_state.selected_perplexity_model = st.sidebar.selectbox(
|
| 349 |
+
"Select Perplexity Model:",
|
| 350 |
+
options=PERPLEXITY_MODELS,
|
| 351 |
+
index=PERPLEXITY_MODELS.index(st.session_state.selected_perplexity_model) if st.session_state.selected_perplexity_model in PERPLEXITY_MODELS else 4 # Default to 'sonar'
|
| 352 |
)
|
| 353 |
|
| 354 |
+
|
| 355 |
st.sidebar.markdown("---")
|
| 356 |
st.sidebar.header("Scraping Controls")
|
| 357 |
selected_league_keys = st.sidebar.multiselect(
|
|
|
|
| 361 |
default=[]
|
| 362 |
)
|
| 363 |
|
| 364 |
+
if st.sidebar.button("Scrape Player Stats", key="scrape_player_btn"):
|
| 365 |
if selected_league_keys: scrape_player_stats_st(selected_league_keys)
|
| 366 |
+
else: st.sidebar.warning("Select leagues for player stats.")
|
| 367 |
|
| 368 |
+
if st.sidebar.button("Scrape Squad Stats", key="scrape_squad_btn"):
|
| 369 |
if selected_league_keys: scrape_squad_stats_st(selected_league_keys)
|
| 370 |
+
else: st.sidebar.warning("Select leagues for squad stats.")
|
| 371 |
|
| 372 |
+
if st.sidebar.button("Scrape Fixtures", key="scrape_fixture_btn"):
|
| 373 |
if selected_league_keys: scrape_fixtures_st(selected_league_keys)
|
| 374 |
+
else: st.sidebar.warning("Select leagues for fixtures.")
|
| 375 |
|
| 376 |
st.sidebar.markdown("---")
|
| 377 |
st.sidebar.header("View Scraped Data")
|
|
|
|
| 383 |
|
| 384 |
# --- Main Content Area ---
|
| 385 |
if display_league_key:
|
| 386 |
+
tab1, tab2, tab3 = st.tabs([f"Player Stats ({LEAGUES[display_league_key]['name']})",
|
| 387 |
+
f"Squad Stats ({LEAGUES[display_league_key]['name']})",
|
| 388 |
+
f"Fixtures ({LEAGUES[display_league_key]['name']})"])
|
| 389 |
with tab1:
|
|
|
|
| 390 |
if display_league_key in st.session_state.player_stats_data:
|
| 391 |
st.dataframe(st.session_state.player_stats_data[display_league_key])
|
| 392 |
else:
|
| 393 |
st.info("No player stats data loaded. Scrape first.")
|
| 394 |
with tab2:
|
|
|
|
| 395 |
if display_league_key in st.session_state.squad_stats_data:
|
| 396 |
st.dataframe(st.session_state.squad_stats_data[display_league_key])
|
| 397 |
else:
|
| 398 |
st.info("No squad stats data loaded. Scrape first.")
|
| 399 |
with tab3:
|
|
|
|
| 400 |
if display_league_key in st.session_state.fixtures_data:
|
| 401 |
st.dataframe(st.session_state.fixtures_data[display_league_key])
|
| 402 |
else:
|
|
|
|
| 409 |
|
| 410 |
# --- 1. Player Comparison Tool ---
|
| 411 |
st.subheader("1. Player Comparison (Local Data)")
|
| 412 |
+
col1_pc, col2_pc, col3_pc = st.columns([1,2,2])
|
| 413 |
+
pc_league_options = [""] + [k for k in st.session_state.player_stats_data.keys() if not st.session_state.player_stats_data[k].empty]
|
| 414 |
+
pc_league = col1_pc.selectbox("League:", options=pc_league_options, format_func=lambda k: LEAGUES[k]['name'] if k else "Select", key="pc_league_select")
|
| 415 |
+
|
| 416 |
+
pc_player1_name = ""
|
| 417 |
+
pc_player2_name = ""
|
| 418 |
+
|
| 419 |
+
if pc_league and pc_league in st.session_state.player_stats_data:
|
| 420 |
+
player_list = sorted(st.session_state.player_stats_data[pc_league]['Player'].unique())
|
| 421 |
+
pc_player1_name = col2_pc.selectbox("Player 1 Name:", options=[""] + player_list, key="pc_p1_select")
|
| 422 |
+
pc_player2_name = col3_pc.selectbox("Player 2 Name:", options=[""] + player_list, key="pc_p2_select")
|
| 423 |
+
else:
|
| 424 |
+
pc_player1_name = col2_pc.text_input("Player 1 Name (Type if no league selected):", key="pc_p1_text")
|
| 425 |
+
pc_player2_name = col3_pc.text_input("Player 2 Name (Type if no league selected):", key="pc_p2_text")
|
| 426 |
+
|
| 427 |
|
| 428 |
if st.button("Compare Players (Local)", key="compare_local_btn"):
|
|
|
|
| 429 |
if pc_league and pc_player1_name and pc_player2_name:
|
| 430 |
if pc_league in st.session_state.player_stats_data:
|
| 431 |
all_players_df = st.session_state.player_stats_data[pc_league]
|
| 432 |
+
# Exact match from selectbox, or contains if text input was used (though selectbox is preferred now)
|
| 433 |
+
player1_data = all_players_df[all_players_df['Player'] == pc_player1_name]
|
| 434 |
+
player2_data = all_players_df[all_players_df['Player'] == pc_player2_name]
|
| 435 |
|
| 436 |
if not player1_data.empty:
|
| 437 |
st.write(f"**Stats for {pc_player1_name}:**")
|
|
|
|
| 445 |
else:
|
| 446 |
st.warning(f"Could not find data for player: {pc_player2_name} in {LEAGUES[pc_league]['name']}")
|
| 447 |
else:
|
| 448 |
+
st.error(f"Player stats data for {LEAGUES[pc_league]['name']} not loaded or is empty. Please scrape first.")
|
| 449 |
else:
|
| 450 |
+
st.warning("Please select a league and two player names for comparison.")
|
| 451 |
|
| 452 |
|
| 453 |
# --- 2. Fixture Analysis (Local Data) ---
|
| 454 |
st.subheader("2. Fixture Analysis (Local Data)")
|
| 455 |
+
col1_fa, col2_fa, col3_fa = st.columns([1,2,2])
|
| 456 |
+
fa_league_options = [""] + [k for k in st.session_state.fixtures_data.keys() if not st.session_state.fixtures_data[k].empty]
|
| 457 |
+
fa_league = col1_fa.selectbox("League:", options=fa_league_options, format_func=lambda k: LEAGUES[k]['name'] if k else "Select", key="fa_league_select")
|
| 458 |
+
|
| 459 |
+
fa_home_team = ""
|
| 460 |
+
fa_away_team = ""
|
| 461 |
+
|
| 462 |
+
if fa_league and fa_league in st.session_state.fixtures_data:
|
| 463 |
+
# Get unique team names from both Home and Away columns
|
| 464 |
+
home_teams = st.session_state.fixtures_data[fa_league]['Home'].unique()
|
| 465 |
+
away_teams = st.session_state.fixtures_data[fa_league]['Away'].unique()
|
| 466 |
+
all_teams = sorted(list(set(list(home_teams) + list(away_teams))))
|
| 467 |
+
fa_home_team = col2_fa.selectbox("Home Team:", options=[""] + all_teams, key="fa_home_select")
|
| 468 |
+
fa_away_team = col3_fa.selectbox("Away Team:", options=[""] + all_teams, key="fa_away_select")
|
| 469 |
+
else:
|
| 470 |
+
fa_home_team = col2_fa.text_input("Home Team (Type if no league selected):", key="fa_home_text")
|
| 471 |
+
fa_away_team = col3_fa.text_input("Away Team (Type if no league selected):", key="fa_away_text")
|
| 472 |
+
|
| 473 |
|
| 474 |
if st.button("Analyze Fixture (Local)", key="analyze_local_btn"):
|
| 475 |
if fa_league and fa_home_team and fa_away_team:
|
| 476 |
if fa_league in st.session_state.fixtures_data:
|
| 477 |
+
all_fixtures_df = st.session_state.fixtures_data[fa_league].copy() # Use a copy
|
| 478 |
home_team_norm = fa_home_team.strip().lower()
|
| 479 |
away_team_norm = fa_away_team.strip().lower()
|
| 480 |
|
| 481 |
+
# Ensure 'Date' column is suitable for sorting (already converted to YYYY-MM-DD string)
|
| 482 |
+
# If 'Date_parsed' exists and is datetime, use it for sorting then drop
|
| 483 |
+
if 'Date_parsed' in all_fixtures_df.columns:
|
| 484 |
+
all_fixtures_df = all_fixtures_df.sort_values(by='Date_parsed', ascending=False)
|
| 485 |
+
elif 'Date' in all_fixtures_df.columns:
|
| 486 |
+
all_fixtures_df = all_fixtures_df.sort_values(by='Date', ascending=False)
|
| 487 |
+
|
| 488 |
+
|
| 489 |
h2h_matches = all_fixtures_df[
|
| 490 |
(all_fixtures_df['Home'].str.lower() == home_team_norm) & (all_fixtures_df['Away'].str.lower() == away_team_norm) |
|
| 491 |
(all_fixtures_df['Home'].str.lower() == away_team_norm) & (all_fixtures_df['Away'].str.lower() == home_team_norm)
|
| 492 |
]
|
| 493 |
st.write(f"**Head-to-Head between {fa_home_team} and {fa_away_team}:**")
|
| 494 |
if not h2h_matches.empty:
|
| 495 |
+
st.dataframe(h2h_matches) # Already sorted by date
|
| 496 |
else:
|
| 497 |
st.info("No H2H matches found in the scraped data.")
|
| 498 |
|
| 499 |
+
def get_form_df(team_name_norm, all_fixtures_sorted_df, num_matches=5):
|
| 500 |
+
team_matches = all_fixtures_sorted_df[ # Use already sorted df
|
| 501 |
+
(all_fixtures_sorted_df['Home'].str.lower() == team_name_norm) |
|
| 502 |
+
(all_fixtures_sorted_df['Away'].str.lower() == team_name_norm)
|
| 503 |
]
|
| 504 |
+
# Consider only played matches (where HomeGoals is not NA after conversion)
|
| 505 |
+
played_matches = team_matches[team_matches['HomeGoals'].notna()]
|
| 506 |
return played_matches.head(num_matches)
|
| 507 |
|
| 508 |
st.write(f"**Recent Form for {fa_home_team} (last 5 played):**")
|
| 509 |
+
home_form_df = get_form_df(home_team_norm, all_fixtures_df)
|
| 510 |
if not home_form_df.empty: st.dataframe(home_form_df)
|
| 511 |
else: st.info(f"No recent played matches found for {fa_home_team}.")
|
| 512 |
|
| 513 |
st.write(f"**Recent Form for {fa_away_team} (last 5 played):**")
|
| 514 |
+
away_form_df = get_form_df(away_team_norm, all_fixtures_df)
|
| 515 |
if not away_form_df.empty: st.dataframe(away_form_df)
|
| 516 |
else: st.info(f"No recent played matches found for {fa_away_team}.")
|
| 517 |
else:
|
| 518 |
+
st.error(f"Fixtures data for {LEAGUES[fa_league]['name']} not loaded or is empty. Please scrape first.")
|
| 519 |
else:
|
| 520 |
+
st.warning("Please select a league and enter/select home & away team names for analysis.")
|
| 521 |
|
| 522 |
# --- 3. Visualization Data (Local Data) ---
|
| 523 |
st.subheader("3. Visualization Data (Example: Top Scorers - Local Data)")
|
|
|
|
| 524 |
col1_vd, col2_vd = st.columns(2)
|
| 525 |
+
vd_league_options = [""] + [k for k in st.session_state.player_stats_data.keys() if not st.session_state.player_stats_data[k].empty]
|
| 526 |
+
vd_league = col1_vd.selectbox("League:", options=vd_league_options, format_func=lambda k: LEAGUES[k]['name'] if k else "Select", key="vd_league_select")
|
| 527 |
|
| 528 |
if st.button("Show Top Scorers (Local)", key="top_scorers_local_btn"):
|
| 529 |
if vd_league:
|
| 530 |
if vd_league in st.session_state.player_stats_data:
|
| 531 |
player_df = st.session_state.player_stats_data[vd_league].copy()
|
| 532 |
+
|
| 533 |
+
# Ensure 'Gls' and 'Ast' columns exist and are numeric
|
| 534 |
+
if 'Gls' not in player_df.columns or 'Ast' not in player_df.columns:
|
| 535 |
+
st.error(f"Required columns 'Gls' or 'Ast' not found in player stats for {LEAGUES[vd_league]['name']}.")
|
| 536 |
+
else:
|
| 537 |
+
player_df['Gls'] = pd.to_numeric(player_df['Gls'], errors='coerce').fillna(0)
|
| 538 |
+
player_df['Ast'] = pd.to_numeric(player_df['Ast'], errors='coerce').fillna(0)
|
| 539 |
+
|
| 540 |
+
top_scorers = player_df.sort_values(by=['Gls', 'Ast'], ascending=[False, False]).head(10)
|
| 541 |
+
|
| 542 |
+
st.write(f"**Top 10 Scorers Data for {LEAGUES[vd_league]['name']}:**")
|
| 543 |
+
st.dataframe(top_scorers[['Player', 'Squad', 'Gls', 'Ast']])
|
| 544 |
+
if not top_scorers.empty and 'Player' in top_scorers.columns:
|
| 545 |
+
st.write("**Chart: Goals & Assists by Top Scorers**")
|
| 546 |
+
chart_data = top_scorers.set_index('Player')[['Gls', 'Ast']]
|
| 547 |
+
st.bar_chart(chart_data)
|
| 548 |
else:
|
| 549 |
+
st.error(f"Player stats data for {LEAGUES[vd_league]['name']} not loaded or is empty. Please scrape first.")
|
| 550 |
else:
|
| 551 |
st.warning("Please select a league for visualization data.")
|
| 552 |
|
|
|
|
| 555 |
|
| 556 |
# --- 4. Fixture Report via Perplexity ---
|
| 557 |
st.subheader("4. Fixture Report (via Perplexity AI)")
|
| 558 |
+
fr_home_team = st.text_input("Home Team (for Perplexity Report):", key="fr_home_pplx")
|
| 559 |
+
fr_away_team = st.text_input("Away Team (for Perplexity Report):", key="fr_away_pplx")
|
| 560 |
+
fr_match_date = st.text_input("Match Date (e.g., YYYY-MM-DD) (for Perplexity Report):", key="fr_date_pplx", placeholder="YYYY-MM-DD")
|
| 561 |
|
| 562 |
if st.button("Get Fixture Report from Perplexity", key="fr_perplexity_btn"):
|
| 563 |
if fr_home_team and fr_away_team and fr_match_date:
|
|
|
|
| 573 |
"4. Brief Tactical Outlook or Prediction (optional, if confident).\n"
|
| 574 |
"Prioritize information from reputable football sources. Be objective."
|
| 575 |
)
|
| 576 |
+
report = get_perplexity_response(
|
| 577 |
+
st.session_state.perplexity_api_key,
|
| 578 |
+
st.session_state.selected_perplexity_model,
|
| 579 |
+
prompt,
|
| 580 |
+
"You are a football analyst providing pre-match reports."
|
| 581 |
+
)
|
| 582 |
if report:
|
| 583 |
st.markdown("**Perplexity AI Fixture Report:**")
|
| 584 |
st.markdown(report)
|
|
|
|
| 587 |
|
| 588 |
# --- 5. Custom Query via Perplexity ---
|
| 589 |
st.subheader("5. Custom Query (via Perplexity AI)")
|
| 590 |
+
custom_query_text = st.text_area("Enter your football-related question:", height=100, key="custom_q_pplx")
|
| 591 |
|
| 592 |
if st.button("Ask Perplexity AI", key="custom_q_btn"):
|
| 593 |
if custom_query_text:
|
| 594 |
if not st.session_state.perplexity_api_key:
|
| 595 |
st.error("Perplexity API Key is not set in the sidebar.")
|
| 596 |
else:
|
| 597 |
+
answer = get_perplexity_response(
|
| 598 |
+
st.session_state.perplexity_api_key,
|
| 599 |
+
st.session_state.selected_perplexity_model,
|
| 600 |
+
custom_query_text
|
| 601 |
+
)
|
| 602 |
if answer:
|
| 603 |
st.markdown("**Perplexity AI Answer:**")
|
| 604 |
st.markdown(answer)
|
|
|
|
| 607 |
|
| 608 |
|
| 609 |
st.markdown("---")
|
| 610 |
+
st.caption("Streamlit test app. API keys are not stored after session.")
|