Diego Marroquin commited on
Commit
60a59c1
·
1 Parent(s): fc8b48e

Debugging

Browse files
Files changed (1) hide show
  1. main.py +2 -77
main.py CHANGED
@@ -18,7 +18,6 @@ eventually allow connection between the database and the frontend
18
  # Connect to MongoDB
19
  # For some reason none of this works when im connected to VPN
20
 
21
-
22
  app = Flask(__name__)
23
  api = Api(app, version='1.0',
24
  title='Haya Energy NucPy API',
@@ -86,8 +85,6 @@ def convert_to_json(item):
86
  return item
87
  # --------------------------------------------------------------------------------------- #
88
 
89
- # The idea of this function is to sum the total availability for each day of interest
90
- # This is already done in the Excel so it might be useful to check
91
  # Function gives the total of the data. When printed as dataframe/excel,
92
  # Will give a final row with the total for each plant and the total overall
93
  def add_total(data):
@@ -136,14 +133,6 @@ def get_oauth():
136
  # the argument past_photo is a boolean (True, False) that indicates if we want to make a photo from the past or not
137
  # However, the past_photo part and past_date is not yet implemented.
138
  def get_unavailabilities(usr_start_date, usr_end_date):
139
- # This should be changed in the case of getting a past_photo because many of the rows that are relevant for that
140
- # past photo will not be ACTIVE anymore.
141
- # unav_status = ['ACTIVE', 'INACTIVE']
142
- # This could also be changed. Currently it means that if we call the API with start_date=01/01/2023 and end_date=01/02/2023,
143
- # it will return all the records of unavailabilities that have been updated between the two dates.
144
- # date_type = 'UPDATED_DATE'
145
- # date_type APPLICATION_DATE gets all unavailabilities with predictions in the defined dates, so that
146
- # we can get an unavailability that has updated_date outside the defined dates for start_date and end_date
147
  oauth = get_oauth()
148
  print("Get Oauth done")
149
  date_type = 'APPLICATION_DATE'
@@ -224,22 +213,9 @@ def get_unavailabilities(usr_start_date, usr_end_date):
224
  # --------------------------------------------------------------------------------------- #
225
 
226
 
227
- # this function does the proper analysis of the data
228
- # It takes the user, password, host, to connect to the mongodb database and get
229
- # the data to clean from the database from database and collection
230
- # Create a condition that makes it so it only takes the ACTIVE when nucmonitor, and
231
- # all (INACTIVE, ACTIVE) when photo_date
232
- # nuc_monitor will always take the photo_date and past_date as inputs, even when photo_date == False. In case False, past_date == 0 or None
233
  def nuc_monitor(rte_data, mongo_json_data, usr_start_date, usr_end_date, photo_date, past_date):
234
  # # Slightly changed metadata to fit the data from the RTE API: ST-LAURENT B 2 --> ST LAURENT 2, ....
235
 
236
- # --------------------------------------------- #
237
- # photo_date = False
238
-
239
- # file_path = "/Users/diegomarroquin/HayaEnergy/data/plants_metadata.json"
240
-
241
- # with open(file_path, "r") as file:
242
- # plants_metadata = json.load(file)
243
  plants_metadata = {"BELLEVILLE 1": 1310.0, "BELLEVILLE 2": 1310.0, "BLAYAIS 1": 910.0, "BLAYAIS 2": 910.0,
244
  "BLAYAIS 3": 910.0, "BLAYAIS 4": 910.0, "BUGEY 2": 910.0, "BUGEY 3": 910.0, "BUGEY 4": 880.0,
245
  "BUGEY 5": 880.0, "CATTENOM 1": 1300.0, "CATTENOM 2": 1300.0, "CATTENOM 3": 1300.0,
@@ -255,11 +231,6 @@ def nuc_monitor(rte_data, mongo_json_data, usr_start_date, usr_end_date, photo_d
255
  "ST LAURENT 2": 915.0, "TRICASTIN 1": 915.0, "TRICASTIN 2": 915.0, "TRICASTIN 3": 915.0,
256
  "TRICASTIN 4": 915.0, "FESSENHEIM 1": 880.0, "FESSENHEIM 2": 880.0}
257
 
258
-
259
- # Get raw data from database and the RTE
260
- # oauth = get_oauth()
261
-
262
-
263
  # --------------------- INITIAL DATA CLEANING FOR RTE DATA ------------------------ #
264
  unav_API = rte_data.json()
265
  print(unav_API)
@@ -298,18 +269,6 @@ def nuc_monitor(rte_data, mongo_json_data, usr_start_date, usr_end_date, photo_d
298
 
299
  # --------------------- INITIAL DATA CLEANING FOR MONGO DATA ------------------------ #
300
 
301
-
302
-
303
- # mongo_data = mongo_json_data.json()
304
-
305
- # # Specify the file path
306
- # file_path = "/Users/diegomarroquin/HayaEnergy/Nucmonitor_MVP/NucPy_v0.2/testing/test_data3.txt"
307
-
308
- # Open the file in write mode
309
- with open(file_path, 'w') as file:
310
- for item in mongo_data:
311
- file.write("%s" % item)
312
-
313
  # # Create a DataFrame
314
  mongo_df = pd.DataFrame(mongo_data)
315
 
@@ -322,22 +281,7 @@ def nuc_monitor(rte_data, mongo_json_data, usr_start_date, usr_end_date, photo_d
322
  # Drop the original column
323
  mongo_df_result.drop(columns=['generation_unavailabilities'], inplace=True)
324
  mongo_df_columns = mongo_df_result.columns
325
- # print(mongo_df_columns)
326
- # print(mongo_df_result)
327
- # print(mongo_df_result["values"])
328
- # # Unpack values column
329
- # # mongo_df2 = mongo_df_result.copy().apply(unpack_values, axis=1)
330
- # mongo_df_values_unpacked = pd.json_normalize(mongo_df_result['values'])
331
- # mongo_df2 = pd.concat([mongo_df_result, mongo_df_values_unpacked], axis=1)
332
- # print(mongo_df2.columns)
333
- # print(mongo_df2)
334
- # # mongo_df2 = pd.concat([mongo_df_result, pd.json_normalize(mongo_df_result['values'])], axis=1)
335
- # # mongo_df2 = pd.concat([mongo_df2, pd.json_normalize(mongo_df2['unit'])], axis=1)
336
- # # mongo_df2 = mongo_df.copy().apply(unpack_values, axis=1)
337
- # # mongo_df2 = mongo_df_result.copy()
338
- # mongo_df2.drop(columns=["values"], inplace=True)
339
- # mongo_df2.drop(0, axis=1, inplace=True)
340
- # Unpack values using apply() and lambda functions
341
  mongo_df_result['start_date'] = mongo_df_result['values'].apply(lambda x: x[0]['start_date'])
342
  mongo_df_result['end_date'] = mongo_df_result['values'].apply(lambda x: x[0]['end_date'])
343
  mongo_df_result['available_capacity'] = mongo_df_result['values'].apply(lambda x: x[0]['available_capacity'])
@@ -394,7 +338,6 @@ def nuc_monitor(rte_data, mongo_json_data, usr_start_date, usr_end_date, photo_d
394
  filtered_id_df.drop_duplicates(subset='identifier', keep='last', inplace=True)
395
  filtered_id_df = filtered_id_df.copy().reset_index(drop=True)
396
 
397
-
398
  # This filter should take all the dates with unavs that include days with unavs in the range of the start and end date
399
 
400
  filtered_df = filtered_id_df.copy()[(filtered_id_df.copy()['start_date'] <= end_date_str) & (filtered_id_df.copy()['end_date'] >= start_date_str)]
@@ -417,12 +360,6 @@ def nuc_monitor(rte_data, mongo_json_data, usr_start_date, usr_end_date, photo_d
417
  # Turn df into dict for json processing
418
  filtered_unavs = filtered_df3.copy().to_dict(orient='records')
419
 
420
- # file_path = "/Users/diegomarroquin/HayaEnergy/Nucmonitor_MVP/NucPy_v0.2/testing/test_data4.txt"
421
-
422
- # # Open the file in write mode
423
- # with open(file_path, 'w') as file:
424
- # for item in filtered_unavs:
425
- # file.write("%s" % item)
426
  results = {}
427
 
428
  for unav in filtered_unavs:
@@ -560,24 +497,12 @@ def nuc_monitor(rte_data, mongo_json_data, usr_start_date, usr_end_date, photo_d
560
  # print(output_results)
561
  # -------------------------------------------------
562
  if photo_date == False:
563
- # Store the results_plants in MongoDB
564
- database_name = "data" # Specify your database name
565
- collection_name = "filtered" # Specify your collection name
566
- # mongo_store_data(output_results, database_name, collection_name)
567
- # mongo_replace_data(results_plants_total, database_name, "filtered_excel")
568
- # print("Data stored in database")
569
- # mongo_append_data(results_plants, database_name, collection_name)
570
-
571
- # json_data = json.dumps(convert_to_json(output_results))
572
  json_data = json.dumps(output_results)
573
  # print(json_data)
574
  return json_data
575
  else:
576
- database_name = "data" # Specify your database name
577
- collection_name = "photo_date" # Specify your collection name
578
- # mongo_store_data(output_results, database_name, collection_name)
579
 
580
- # json_data = json.dumps(convert_to_json(output_results))
581
  json_data = json.dumps(output_results)
582
  # print(json_data)
583
  return json_data
 
18
  # Connect to MongoDB
19
  # For some reason none of this works when im connected to VPN
20
 
 
21
  app = Flask(__name__)
22
  api = Api(app, version='1.0',
23
  title='Haya Energy NucPy API',
 
85
  return item
86
  # --------------------------------------------------------------------------------------- #
87
 
 
 
88
  # Function gives the total of the data. When printed as dataframe/excel,
89
  # Will give a final row with the total for each plant and the total overall
90
  def add_total(data):
 
133
  # the argument past_photo is a boolean (True, False) that indicates if we want to make a photo from the past or not
134
  # However, the past_photo part and past_date is not yet implemented.
135
  def get_unavailabilities(usr_start_date, usr_end_date):
 
 
 
 
 
 
 
 
136
  oauth = get_oauth()
137
  print("Get Oauth done")
138
  date_type = 'APPLICATION_DATE'
 
213
  # --------------------------------------------------------------------------------------- #
214
 
215
 
 
 
 
 
 
 
216
  def nuc_monitor(rte_data, mongo_json_data, usr_start_date, usr_end_date, photo_date, past_date):
217
  # # Slightly changed metadata to fit the data from the RTE API: ST-LAURENT B 2 --> ST LAURENT 2, ....
218
 
 
 
 
 
 
 
 
219
  plants_metadata = {"BELLEVILLE 1": 1310.0, "BELLEVILLE 2": 1310.0, "BLAYAIS 1": 910.0, "BLAYAIS 2": 910.0,
220
  "BLAYAIS 3": 910.0, "BLAYAIS 4": 910.0, "BUGEY 2": 910.0, "BUGEY 3": 910.0, "BUGEY 4": 880.0,
221
  "BUGEY 5": 880.0, "CATTENOM 1": 1300.0, "CATTENOM 2": 1300.0, "CATTENOM 3": 1300.0,
 
231
  "ST LAURENT 2": 915.0, "TRICASTIN 1": 915.0, "TRICASTIN 2": 915.0, "TRICASTIN 3": 915.0,
232
  "TRICASTIN 4": 915.0, "FESSENHEIM 1": 880.0, "FESSENHEIM 2": 880.0}
233
 
 
 
 
 
 
234
  # --------------------- INITIAL DATA CLEANING FOR RTE DATA ------------------------ #
235
  unav_API = rte_data.json()
236
  print(unav_API)
 
269
 
270
  # --------------------- INITIAL DATA CLEANING FOR MONGO DATA ------------------------ #
271
 
 
 
 
 
 
 
 
 
 
 
 
 
272
  # # Create a DataFrame
273
  mongo_df = pd.DataFrame(mongo_data)
274
 
 
281
  # Drop the original column
282
  mongo_df_result.drop(columns=['generation_unavailabilities'], inplace=True)
283
  mongo_df_columns = mongo_df_result.columns
284
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
285
  mongo_df_result['start_date'] = mongo_df_result['values'].apply(lambda x: x[0]['start_date'])
286
  mongo_df_result['end_date'] = mongo_df_result['values'].apply(lambda x: x[0]['end_date'])
287
  mongo_df_result['available_capacity'] = mongo_df_result['values'].apply(lambda x: x[0]['available_capacity'])
 
338
  filtered_id_df.drop_duplicates(subset='identifier', keep='last', inplace=True)
339
  filtered_id_df = filtered_id_df.copy().reset_index(drop=True)
340
 
 
341
  # This filter should take all the dates with unavs that include days with unavs in the range of the start and end date
342
 
343
  filtered_df = filtered_id_df.copy()[(filtered_id_df.copy()['start_date'] <= end_date_str) & (filtered_id_df.copy()['end_date'] >= start_date_str)]
 
360
  # Turn df into dict for json processing
361
  filtered_unavs = filtered_df3.copy().to_dict(orient='records')
362
 
 
 
 
 
 
 
363
  results = {}
364
 
365
  for unav in filtered_unavs:
 
497
  # print(output_results)
498
  # -------------------------------------------------
499
  if photo_date == False:
500
+
 
 
 
 
 
 
 
 
501
  json_data = json.dumps(output_results)
502
  # print(json_data)
503
  return json_data
504
  else:
 
 
 
505
 
 
506
  json_data = json.dumps(output_results)
507
  # print(json_data)
508
  return json_data