| import pandas as pd |
| import numpy as np |
| from matplotlib import pyplot as plt |
| import json |
| import os |
| from tqdm import tqdm |
| from concurrent.futures import ThreadPoolExecutor |
|
|
| |
| pd.options.mode.chained_assignment = None |
|
|
| id_info = json.load(open('expanded_id_info.json')) |
| id_list = list(id_info.keys()) |
|
|
| print('Reading data...') |
| dfs = {} |
|
|
| def read_parquet(id): |
| return id, pd.read_parquet(f'./slim_data/id_{id}.parquet') |
|
|
| with ThreadPoolExecutor(max_workers=100) as executor: |
| futures = [executor.submit(read_parquet, id) for id in id_list] |
| for future in tqdm(futures): |
| id, df = future.result() |
| dfs[id] = df |
|
|
| sensor_downtimes = {} |
| for id in tqdm(id_list): |
|
|
| tqdm.write(f"Preparing ID: {id}") |
|
|
| df = dfs[str(id)].copy() |
| df['DATA_AS_OF'] = pd.to_datetime(df['DATA_AS_OF']) |
|
|
| |
| df['DATA_AS_OF'] = df['DATA_AS_OF'].dt.round('5min') |
| tqdm.write(f"De-duplicating ID: {id}") |
| |
| missing_gap0 = df['DATA_AS_OF'].diff().dt.total_seconds() == 0 |
| |
| missing_gap0 = missing_gap0[missing_gap0].index |
|
|
| to_remove=[] |
| for ind in missing_gap0: |
| |
| if ind+1 == len(df): |
| to_remove.append(ind) |
| elif df['DATA_AS_OF'].iloc[ind] + pd.Timedelta('10min') == df['DATA_AS_OF'].iloc[ind+1]: |
| |
| df['DATA_AS_OF'].iloc[ind] = df['DATA_AS_OF'].iloc[ind] + pd.Timedelta('5min') |
| else: |
| |
| to_remove.append(ind) |
| df = df.drop(to_remove) |
| df = df.reset_index(drop=True) |
| |
| missing_gap0 = df['DATA_AS_OF'].diff().dt.total_seconds() == 0 |
| missing_gap0 = missing_gap0[missing_gap0].index |
| assert len(missing_gap0) == 0, 'There are still duplicate timestamps' |
|
|
| tqdm.write(f"Small gaps ID: {id}") |
| |
| threshold_time = 120 |
|
|
| missing_gap15 = df['DATA_AS_OF'].diff().dt.total_seconds() <= (60*threshold_time) |
| missing_gap5 = df['DATA_AS_OF'].diff().dt.total_seconds() > (60*5) |
| missing_gap = missing_gap15 & missing_gap5 |
|
|
| missing_gap = missing_gap[missing_gap].index |
|
|
| def linear_impute(start_idx, end_idx): |
| start_time = df['DATA_AS_OF'][start_idx] |
| end_time = df['DATA_AS_OF'][end_idx] |
| start_speed = df['SPEED'][start_idx] |
| end_speed = df['SPEED'][end_idx] |
| start_travel_time = df['TRAVEL_TIME'][start_idx] |
| end_travel_time = df['TRAVEL_TIME'][end_idx] |
| gap = end_time - start_time |
| gap = gap.total_seconds() |
| new_rows = [] |
| for j in range(1, int(gap // 300)): |
| new_rows.append({ |
| 'DATA_AS_OF': start_time + pd.Timedelta(f'{j*5}min'), |
| 'SPEED': start_speed + (end_speed - start_speed) * j / (gap // 300), |
| 'TRAVEL_TIME': start_travel_time + (end_travel_time - start_travel_time) * j / (gap // 300) |
| }) |
| return new_rows |
|
|
| with ThreadPoolExecutor(max_workers=100) as executor: |
| futures = [executor.submit(linear_impute, i - 1, i) for i in missing_gap] |
| results = [future.result() for future in tqdm(futures)] |
|
|
| |
| new_rows = [item for sublist in results for item in sublist] |
|
|
| |
| new_df = pd.DataFrame(new_rows) |
| df = pd.concat([df, new_df], ignore_index=True) |
|
|
| |
| df = df.sort_values('DATA_AS_OF') |
| df = df.reset_index(drop=True) |
|
|
| |
| missing_gap15 = df['DATA_AS_OF'].diff().dt.total_seconds() <= (60*threshold_time) |
| missing_gap5 = df['DATA_AS_OF'].diff().dt.total_seconds() > (60*5) |
| missing_gap = missing_gap15 & missing_gap5 |
|
|
| missing_gap = missing_gap[missing_gap].index |
| assert len(missing_gap) == 0, 'There are still missing gaps' |
|
|
| tqdm.write(f"Large gaps ID: {id}") |
| missing_gaplarge = df['DATA_AS_OF'].diff().dt.total_seconds() > (60*threshold_time) |
|
|
| missing_gaplarge = missing_gaplarge[missing_gaplarge].index |
|
|
| def zero_impute(start_idx, end_idx): |
| start_time = df['DATA_AS_OF'][start_idx] |
| end_time = df['DATA_AS_OF'][end_idx] |
| gap = end_time - start_time |
| gap = gap.total_seconds() |
| new_rows = [] |
| for j in range(1, int(gap // 300)): |
| new_rows.append({ |
| 'DATA_AS_OF': start_time + pd.Timedelta(f'{j*5}min'), |
| 'SPEED': 0, |
| 'TRAVEL_TIME': 0 |
| }) |
| return new_rows |
|
|
| with ThreadPoolExecutor(max_workers=100) as executor: |
| futures = [executor.submit(zero_impute, i - 1, i) for i in missing_gaplarge] |
| results = [future.result() for future in tqdm(futures)] |
|
|
| |
| new_rows = [item for sublist in results for item in sublist] |
|
|
| |
| new_df = pd.DataFrame(new_rows) |
| df = pd.concat([df, new_df], ignore_index=True) |
|
|
| |
| df = df.sort_values('DATA_AS_OF') |
| df = df.reset_index(drop=True) |
|
|
| |
| missing_anygap = df['DATA_AS_OF'].diff().dt.total_seconds() > (60*5) |
| missing_anygap = missing_anygap[missing_anygap].index |
| assert len(missing_anygap) == 0, 'There are still missing gaps' |
|
|
| tqdm.write(f"Sensor downtime ID: {id}") |
| |
| |
| zero_speed = df['SPEED']==0 |
| speed_goes_down = df['SPEED'].diff() < 0 |
| speed_goes_up = df['SPEED'].diff(-1) < 0 |
|
|
| speed_goto_zero = zero_speed & speed_goes_down |
| speed_goto_zero = speed_goto_zero[speed_goto_zero].index |
|
|
| speed_gofrom_zero = zero_speed & speed_goes_up |
| speed_gofrom_zero = speed_gofrom_zero[speed_gofrom_zero].index |
|
|
| threshold_step = threshold_time//5 |
| sensor_downtime = {} |
| i=0 |
| for start, end in zip(speed_goto_zero, speed_gofrom_zero): |
| if end - start > threshold_step: |
| sensor_downtime[i] = {'time':(df['DATA_AS_OF'][start], df['DATA_AS_OF'][end]), 'index':(start, end)} |
| i+=1 |
|
|
|
|
| |
| def check_22_6(time): |
| if time.hour >= 0 and time.hour < 6: |
| return True |
| elif time.hour >=22: |
| return True |
| else: |
| return False |
| for key in list(sensor_downtime.keys()): |
| if check_22_6(sensor_downtime[key]['time'][0]) and check_22_6(sensor_downtime[key]['time'][1]): |
| del sensor_downtime[key] |
|
|
| |
| for key in sensor_downtime.keys(): |
| sensor_downtime[key]['time'] = (str(sensor_downtime[key]['time'][0]), str(sensor_downtime[key]['time'][1])) |
|
|
| sensor_downtime = dict(enumerate(sensor_downtime.values())) |
|
|
| df.to_parquet(f'./expanded_impute_data/id_{id}.parquet') |
| sensor_downtimes[id] = sensor_downtime |
| id_info[str(id)]['sensor_downtime'] = sensor_downtime |
| id_info[str(id)]['len'] = len(df) |
|
|
| json.dump(id_info, open('./expanded_impute_data/id_info_imputed.json', 'w'), indent=4) |
|
|
| json.dump(sensor_downtimes, open('./expanded_impute_data/sensor_downtimes.json', 'w'), indent=4) |
| print('Done!') |
|
|
|
|