import pandas as pd import numpy as np from matplotlib import pyplot as plt import json import os from tqdm import tqdm from concurrent.futures import ThreadPoolExecutor # disable the warning from pandas pd.options.mode.chained_assignment = None id_info = json.load(open('id_info.json')) id_list = list(id_info.keys()) print('Reading data...') dfs = {} for id in id_list: dfs[id] = pd.read_parquet(f'./slim_data/id_{id}.parquet') sensor_downtimes = {} for id in tqdm(id_list): tqdm.write(f"Preparing ID: {id}") df = dfs[str(id)].copy() df['DATA_AS_OF'] = pd.to_datetime(df['DATA_AS_OF']) # round the DATA_AS_OF to the nearest 5 minutes df['DATA_AS_OF'] = df['DATA_AS_OF'].dt.round('5min') tqdm.write(f"De-duplicating ID: {id}") #find duplicate timestamps missing_gap0 = df['DATA_AS_OF'].diff().dt.total_seconds() == 0 # missing_gap5 = df['DATA_AS_OF'].diff().dt.total_seconds() > (60*5) missing_gap0 = missing_gap0[missing_gap0].index to_remove=[] for ind in missing_gap0: # if the DATA_AS_OF of ind is 10min smaller than ind+1 then add 5 min to ind if ind+1 == len(df): to_remove.append(ind) elif df['DATA_AS_OF'].iloc[ind] + pd.Timedelta('10min') == df['DATA_AS_OF'].iloc[ind+1]: # print(f'adjust {ind}') df['DATA_AS_OF'].iloc[ind] = df['DATA_AS_OF'].iloc[ind] + pd.Timedelta('5min') else: # remove the row of ind to_remove.append(ind) df = df.drop(to_remove) df = df.reset_index(drop=True) # check missing_gap0 again missing_gap0 = df['DATA_AS_OF'].diff().dt.total_seconds() == 0 missing_gap0 = missing_gap0[missing_gap0].index assert len(missing_gap0) == 0, 'There are still duplicate timestamps' tqdm.write(f"Small gaps ID: {id}") # get the missing gaps of 5~2h threshold_time = 120 # in min missing_gap15 = df['DATA_AS_OF'].diff().dt.total_seconds() <= (60*threshold_time) # make it 2h missing_gap5 = df['DATA_AS_OF'].diff().dt.total_seconds() > (60*5) missing_gap = missing_gap15 & missing_gap5 missing_gap = missing_gap[missing_gap].index def linear_impute(start_idx, end_idx): start_time = df['DATA_AS_OF'][start_idx] end_time = df['DATA_AS_OF'][end_idx] start_speed = df['SPEED'][start_idx] end_speed = df['SPEED'][end_idx] start_travel_time = df['TRAVEL_TIME'][start_idx] end_travel_time = df['TRAVEL_TIME'][end_idx] gap = end_time - start_time gap = gap.total_seconds() new_rows = [] for j in range(1, int(gap // 300)): new_rows.append({ 'DATA_AS_OF': start_time + pd.Timedelta(f'{j*5}min'), 'SPEED': start_speed + (end_speed - start_speed) * j / (gap // 300), 'TRAVEL_TIME': start_travel_time + (end_travel_time - start_travel_time) * j / (gap // 300) }) return new_rows with ThreadPoolExecutor(max_workers=100) as executor: futures = [executor.submit(linear_impute, i - 1, i) for i in missing_gap] results = [future.result() for future in tqdm(futures)] # Flatten the list of lists new_rows = [item for sublist in results for item in sublist] # Create a DataFrame from the new rows and concatenate it with the original DataFrame new_df = pd.DataFrame(new_rows) df = pd.concat([df, new_df], ignore_index=True) # sort by the DATA_AS_OF df = df.sort_values('DATA_AS_OF') df = df.reset_index(drop=True) # check again missing_gap15 = df['DATA_AS_OF'].diff().dt.total_seconds() <= (60*threshold_time) missing_gap5 = df['DATA_AS_OF'].diff().dt.total_seconds() > (60*5) missing_gap = missing_gap15 & missing_gap5 missing_gap = missing_gap[missing_gap].index assert len(missing_gap) == 0, 'There are still missing gaps' tqdm.write(f"Large gaps ID: {id}") missing_gaplarge = df['DATA_AS_OF'].diff().dt.total_seconds() > (60*threshold_time) # make it 2h #30 min is ok missing_gaplarge = missing_gaplarge[missing_gaplarge].index def zero_impute(start_idx, end_idx): start_time = df['DATA_AS_OF'][start_idx] end_time = df['DATA_AS_OF'][end_idx] gap = end_time - start_time gap = gap.total_seconds() new_rows = [] for j in range(1, int(gap // 300)): new_rows.append({ 'DATA_AS_OF': start_time + pd.Timedelta(f'{j*5}min'), 'SPEED': 0, 'TRAVEL_TIME': 0 }) return new_rows with ThreadPoolExecutor(max_workers=100) as executor: futures = [executor.submit(zero_impute, i - 1, i) for i in missing_gaplarge] results = [future.result() for future in tqdm(futures)] # Flatten the list of lists new_rows = [item for sublist in results for item in sublist] # Create a DataFrame from the new rows and concatenate it with the original DataFrame new_df = pd.DataFrame(new_rows) df = pd.concat([df, new_df], ignore_index=True) # sort by the DATA_AS_OF df = df.sort_values('DATA_AS_OF') df = df.reset_index(drop=True) # check again missing_anygap = df['DATA_AS_OF'].diff().dt.total_seconds() > (60*5) missing_anygap = missing_anygap[missing_anygap].index assert len(missing_anygap) == 0, 'There are still missing gaps' tqdm.write(f"Sensor downtime ID: {id}") # get sensor downtime # get all the SPEED=0 zero_speed = df['SPEED']==0 speed_goes_down = df['SPEED'].diff() < 0 speed_goes_up = df['SPEED'].diff(-1) < 0 speed_goto_zero = zero_speed & speed_goes_down speed_goto_zero = speed_goto_zero[speed_goto_zero].index speed_gofrom_zero = zero_speed & speed_goes_up speed_gofrom_zero = speed_gofrom_zero[speed_gofrom_zero].index threshold_step = threshold_time//5 sensor_downtime = {} i=0 for start, end in zip(speed_goto_zero, speed_gofrom_zero): if end - start > threshold_step: sensor_downtime[i] = {'time':(df['DATA_AS_OF'][start], df['DATA_AS_OF'][end]), 'index':(start, end)} i+=1 # if the downtime is between 0:00 to 6:00 then remove the downtime from dictionary def check_22_6(time): if time.hour >= 0 and time.hour < 6: return True elif time.hour >=22: return True else: return False for key in list(sensor_downtime.keys()): if check_22_6(sensor_downtime[key]['time'][0]) and check_22_6(sensor_downtime[key]['time'][1]): del sensor_downtime[key] # convert the 'time' segment to string for key in sensor_downtime.keys(): sensor_downtime[key]['time'] = (str(sensor_downtime[key]['time'][0]), str(sensor_downtime[key]['time'][1])) sensor_downtime = dict(enumerate(sensor_downtime.values())) df.to_parquet(f'./impute_data/id_{id}.parquet') sensor_downtimes[id] = sensor_downtime id_info[str(id)]['sensor_downtime'] = sensor_downtime id_info[str(id)]['len'] = len(df) json.dump(id_info, open('./impute_data/id_info_imputed.json', 'w'), indent=4) json.dump(sensor_downtimes, open('./impute_data/sensor_downtimes.json', 'w'), indent=4) print('Done!')