nakas commited on
Commit
8dc02da
·
verified ·
1 Parent(s): 29413a1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +82 -13
app.py CHANGED
@@ -7,6 +7,7 @@ from datetime import datetime, timedelta
7
  import matplotlib.pyplot as plt
8
  from matplotlib.gridspec import GridSpec
9
  import json
 
10
 
11
  def get_raw_data(station_id):
12
  """
@@ -61,6 +62,60 @@ def get_raw_data(station_id):
61
  traceback.print_exc()
62
  return None
63
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
64
  def parse_raw_data(data):
65
  """
66
  Parse the raw JSON data into a DataFrame
@@ -115,11 +170,13 @@ def process_weather_data(df):
115
  df['timestamp'] = pd.to_datetime(df['timestamp'])
116
  df['date'] = df['timestamp'].dt.date
117
 
118
- # Convert temperature from C to F
119
- df['temperature'] = (df['temperature'] * 9/5) + 32
 
120
 
121
- # Convert wind speed from m/s to mph
122
- df['wind_speed'] = df['wind_speed'] * 2.237
 
123
 
124
  return df
125
 
@@ -153,7 +210,8 @@ def create_wind_rose(ax, data, title):
153
 
154
  plot_data.loc[:, 'direction_bin'] = pd.cut(plot_data['wind_direction'],
155
  bins=direction_bins,
156
- labels=directions)
 
157
 
158
  wind_stats = plot_data.groupby('direction_bin', observed=True)['wind_speed'].mean()
159
 
@@ -200,10 +258,15 @@ def create_visualizations(df):
200
  ax2.set_xlabel('')
201
  ax2.grid(True)
202
 
203
- # Set y-axis range for snow depth
204
  if 'snowDepth' in df.columns and not df['snowDepth'].isna().all():
205
  ax3.plot(df['timestamp'], df['snowDepth'], linewidth=2)
206
  ax3.set_ylim(0, 80) # Fixed y-axis limit to 80 inches
 
 
 
 
 
207
  ax3.set_title('Snow Depth')
208
  ax3.set_ylabel('Snow Depth (inches)')
209
  ax3.set_xlabel('')
@@ -219,7 +282,7 @@ def create_visualizations(df):
219
  if i < 2:
220
  ax = fig.add_subplot(gs[4, i], projection='polar')
221
  day_data = df[df['date'] == date].copy()
222
- create_wind_rose(ax, day_data, date.strftime('%Y-%m-%d'))
223
  wind_axes.append(ax)
224
 
225
  plt.tight_layout()
@@ -231,7 +294,7 @@ def get_weather_data(station_id, hours):
231
  Main function to get and process weather data
232
  """
233
  try:
234
- # Get raw data
235
  raw_data = get_raw_data(station_id)
236
  if raw_data is None:
237
  return None, "Failed to fetch data"
@@ -241,11 +304,19 @@ def get_weather_data(station_id, hours):
241
  if df is None:
242
  return None, "Failed to parse data"
243
 
244
- # Process data
245
  df = process_weather_data(df)
246
  if df is None:
247
  return None, "Failed to process data"
248
-
 
 
 
 
 
 
 
 
249
  print("\nProcessed data sample:")
250
  print(df.head())
251
 
@@ -291,6 +362,4 @@ with gr.Blocks() as demo:
291
  )
292
 
293
  # Launch the app
294
- demo.launch()
295
-
296
- # requirements.txt
 
7
  import matplotlib.pyplot as plt
8
  from matplotlib.gridspec import GridSpec
9
  import json
10
+ from bs4 import BeautifulSoup
11
 
12
  def get_raw_data(station_id):
13
  """
 
62
  traceback.print_exc()
63
  return None
64
 
65
+ def scrape_snow_depth():
66
+ """
67
+ Scrape snow depth data from the HTML timeseries page.
68
+ Note: The structure of the HTML page may change over time.
69
+ """
70
+ url = "https://www.weather.gov/wrh/timeseries?site=YCTIM&hours=720&units=english&chart=on&headers=on&obs=tabular&hourly=false&pview=standard&font=12&plot="
71
+ try:
72
+ response = requests.get(url)
73
+ if response.status_code != 200:
74
+ print(f"Failed to fetch HTML page: {response.status_code}")
75
+ return pd.DataFrame()
76
+ soup = BeautifulSoup(response.text, 'html.parser')
77
+ table = soup.find('table')
78
+ if table is None:
79
+ print("No table found on the page.")
80
+ return pd.DataFrame()
81
+
82
+ # Extract headers from the table
83
+ header_row = table.find('tr')
84
+ headers = [th.get_text().strip() for th in header_row.find_all('th')]
85
+
86
+ # Look for the columns "Time" and "Snow Depth"
87
+ if "Time" not in headers or "Snow Depth" not in headers:
88
+ print("Required columns 'Time' or 'Snow Depth' not found in table headers.")
89
+ return pd.DataFrame()
90
+
91
+ time_idx = headers.index("Time")
92
+ snow_idx = headers.index("Snow Depth")
93
+
94
+ data = []
95
+ rows = table.find_all('tr')[1:] # Skip header row
96
+ for row in rows:
97
+ cells = row.find_all('td')
98
+ if len(cells) < max(time_idx, snow_idx) + 1:
99
+ continue
100
+ time_str = cells[time_idx].get_text().strip()
101
+ snow_str = cells[snow_idx].get_text().strip()
102
+ data.append((time_str, snow_str))
103
+
104
+ df = pd.DataFrame(data, columns=["Time", "snowDepth"])
105
+ # Convert "Time" column to datetime. Adjust the format if necessary.
106
+ try:
107
+ df["timestamp"] = pd.to_datetime(df["Time"])
108
+ except Exception as e:
109
+ print("Error parsing timestamp from scraped data:", e)
110
+ df["timestamp"] = pd.NaT
111
+ df["snowDepth"] = pd.to_numeric(df["snowDepth"], errors='coerce')
112
+ print("Scraped snow depth data:")
113
+ print(df.head())
114
+ return df[["timestamp", "snowDepth"]]
115
+ except Exception as e:
116
+ print(f"Error scraping snow depth: {e}")
117
+ return pd.DataFrame()
118
+
119
  def parse_raw_data(data):
120
  """
121
  Parse the raw JSON data into a DataFrame
 
170
  df['timestamp'] = pd.to_datetime(df['timestamp'])
171
  df['date'] = df['timestamp'].dt.date
172
 
173
+ # Convert temperature from C to F if not null
174
+ if df['temperature'].notna().all():
175
+ df['temperature'] = (df['temperature'] * 9/5) + 32
176
 
177
+ # Convert wind speed from km/h to mph if not null (original unit km/h)
178
+ if df['wind_speed'].notna().all():
179
+ df['wind_speed'] = df['wind_speed'] * 0.621371 # km/h to mph
180
 
181
  return df
182
 
 
210
 
211
  plot_data.loc[:, 'direction_bin'] = pd.cut(plot_data['wind_direction'],
212
  bins=direction_bins,
213
+ labels=directions,
214
+ include_lowest=True)
215
 
216
  wind_stats = plot_data.groupby('direction_bin', observed=True)['wind_speed'].mean()
217
 
 
258
  ax2.set_xlabel('')
259
  ax2.grid(True)
260
 
261
+ # Plot snow depth if available
262
  if 'snowDepth' in df.columns and not df['snowDepth'].isna().all():
263
  ax3.plot(df['timestamp'], df['snowDepth'], linewidth=2)
264
  ax3.set_ylim(0, 80) # Fixed y-axis limit to 80 inches
265
+ else:
266
+ ax3.text(0.5, 0.5, 'No snow depth data available',
267
+ horizontalalignment='center',
268
+ verticalalignment='center',
269
+ transform=ax3.transAxes)
270
  ax3.set_title('Snow Depth')
271
  ax3.set_ylabel('Snow Depth (inches)')
272
  ax3.set_xlabel('')
 
282
  if i < 2:
283
  ax = fig.add_subplot(gs[4, i], projection='polar')
284
  day_data = df[df['date'] == date].copy()
285
+ create_wind_rose(ax, day_data, pd.to_datetime(date).strftime('%Y-%m-%d'))
286
  wind_axes.append(ax)
287
 
288
  plt.tight_layout()
 
294
  Main function to get and process weather data
295
  """
296
  try:
297
+ # Get raw data from API
298
  raw_data = get_raw_data(station_id)
299
  if raw_data is None:
300
  return None, "Failed to fetch data"
 
304
  if df is None:
305
  return None, "Failed to parse data"
306
 
307
+ # Process API data
308
  df = process_weather_data(df)
309
  if df is None:
310
  return None, "Failed to process data"
311
+
312
+ # Scrape snow depth data and merge with API data
313
+ snow_df = scrape_snow_depth()
314
+ if not snow_df.empty:
315
+ df = df.sort_values('timestamp')
316
+ snow_df = snow_df.sort_values('timestamp')
317
+ # Merge using nearest timestamp within a 30-minute tolerance
318
+ df = pd.merge_asof(df, snow_df, on='timestamp', tolerance=pd.Timedelta('30min'), direction='nearest')
319
+
320
  print("\nProcessed data sample:")
321
  print(df.head())
322
 
 
362
  )
363
 
364
  # Launch the app
365
+ demo.launch()