Alaiy commited on
Commit
9bfa2d4
·
verified ·
1 Parent(s): b991336

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -7
app.py CHANGED
@@ -1145,6 +1145,7 @@ def fetch_and_store_data_market():
1145
  latest_doc = collection.find_one(sort=[("Reported Date", -1)])
1146
  if latest_doc and "Reported Date" in latest_doc:
1147
  latest_date = latest_doc["Reported Date"]
 
1148
  else:
1149
  latest_date = None
1150
 
@@ -1157,11 +1158,14 @@ def fetch_and_store_data_market():
1157
  to_date = (datetime.now() - timedelta(days=1)).strftime('%d %b %Y')
1158
  from_date_obj = datetime.strptime(from_date, '%d %b %Y')
1159
  to_date_obj = datetime.strptime(to_date, '%d %b %Y')
1160
- if to_date_obj < from_date_obj:
1161
- print("Data already scraped")
 
 
 
1162
  return None
1163
- # Build the URL to be requested
1164
  base_url = "https://agmarknet.gov.in/SearchCmmMkt.aspx"
 
1165
  params = {
1166
  "Tx_Commodity": "11",
1167
  "Tx_State": "0",
@@ -1187,13 +1191,16 @@ def fetch_and_store_data_market():
1187
  }
1188
 
1189
  response = requests.get(api_url, params=scraperapi_params)
1190
-
1191
  if response.status_code == 200:
1192
  soup = BeautifulSoup(response.content, 'html.parser')
 
 
 
1193
  table = soup.find("table", {"class": "tableagmark_new"})
1194
-
1195
  if table:
1196
  headers = [th.get_text(strip=True) for th in table.find_all("th")]
 
1197
  rows = []
1198
 
1199
  for row in table.find_all("tr")[1:]:
@@ -1216,9 +1223,11 @@ def fetch_and_store_data_market():
1216
  else:
1217
  print("No new records to insert.")
1218
  return df
 
 
1219
 
1220
  else:
1221
- print(f"Failed to fetch data with status code: {response.status_code}")
1222
  return None
1223
 
1224
 
@@ -1316,8 +1325,8 @@ if st.session_state.authenticated:
1316
  st.title("🌾 AgriPredict Dashboard")
1317
  if st.button("Get Live Data Feed"):
1318
  fetch_and_store_data()
 
1319
  fetch_and_store_data_market()
1320
- # Top-level radio buttons for switching views
1321
  view_mode = st.radio("", ["Statistics", "Plots", "Predictions", "Exim"], horizontal=True)
1322
 
1323
  if view_mode == "Plots":
 
1145
  latest_doc = collection.find_one(sort=[("Reported Date", -1)])
1146
  if latest_doc and "Reported Date" in latest_doc:
1147
  latest_date = latest_doc["Reported Date"]
1148
+ st.write("Latest Date: ", latest_date)
1149
  else:
1150
  latest_date = None
1151
 
 
1158
  to_date = (datetime.now() - timedelta(days=1)).strftime('%d %b %Y')
1159
  from_date_obj = datetime.strptime(from_date, '%d %b %Y')
1160
  to_date_obj = datetime.strptime(to_date, '%d %b %Y')
1161
+ st.write(to_date_obj)
1162
+ st.write(from_date_obj)
1163
+
1164
+ if to_date_obj <= from_date_obj:
1165
+ st.write("Data already scraped")
1166
  return None
 
1167
  base_url = "https://agmarknet.gov.in/SearchCmmMkt.aspx"
1168
+ st.write(base_url)
1169
  params = {
1170
  "Tx_Commodity": "11",
1171
  "Tx_State": "0",
 
1191
  }
1192
 
1193
  response = requests.get(api_url, params=scraperapi_params)
1194
+ st.write("Response: ", response.status_code)
1195
  if response.status_code == 200:
1196
  soup = BeautifulSoup(response.content, 'html.parser')
1197
+ if soup:
1198
+ st.write("Got html content")
1199
+ st.write(soup.contents)
1200
  table = soup.find("table", {"class": "tableagmark_new"})
 
1201
  if table:
1202
  headers = [th.get_text(strip=True) for th in table.find_all("th")]
1203
+ st.write("Got all headers")
1204
  rows = []
1205
 
1206
  for row in table.find_all("tr")[1:]:
 
1223
  else:
1224
  print("No new records to insert.")
1225
  return df
1226
+ else:
1227
+ st.write("No table found")
1228
 
1229
  else:
1230
+ st.write(f"Failed to fetch data with status code: {response.status_code}")
1231
  return None
1232
 
1233
 
 
1325
  st.title("🌾 AgriPredict Dashboard")
1326
  if st.button("Get Live Data Feed"):
1327
  fetch_and_store_data()
1328
+ st.write("Now executing Market scraper: ")
1329
  fetch_and_store_data_market()
 
1330
  view_mode = st.radio("", ["Statistics", "Plots", "Predictions", "Exim"], horizontal=True)
1331
 
1332
  if view_mode == "Plots":