anonymous-submission-acl2025's picture
add 17
8a79f2e
import os
import pandas as pd
from datetime import timedelta,datetime
from lib.utilities import codebook
from lib.experiment_specs import study_config
from lib.data_helpers.builder_utils import BuilderUtils
from lib.data_helpers import test
from lib.utilities import serialize
class Gaming():
gaming_dir = os.path.join("data","external","intermediate","PhoneDashboard","Gaming")
events_file = os.path.join(gaming_dir,"Events")
first_last_file = os.path.join(gaming_dir, "FirstLast")
diagnosed_file = os.path.join(gaming_dir, "Diagnosed")
diagnosed_test_file = diagnosed_file.replace("intermediate","intermediate_test")
good_diag = ["Phone never shut off",
"Phone shut off",
"Phone shut off, even if d1<d0",
]
game_cols = ["AppCode", "AppRuntime", "ForegroundApp","BatteryLevel", "DeviceRuntime", "CreatedDatetime","CreatedEasternDatetime", "ScreenActive",
"CreatedDate", "Sequence","Zipfile","TimeZone"]
""" - detects suspicious events of a granular dataframe that comes from an individual zipfile
- records the first and last pings in each granular data frame to scan first and last """
@staticmethod
def scan(df,file,first_last_bool = False):
for col in Gaming.game_cols:
if col in df.columns:
df[f"Prev{col}"] = df[col].shift(1)
keep_cols = [x + y for x in ["", "Prev"] for y in Gaming.game_cols]
#Find all pings in which the previous runtime was greater than current runtime
if first_last_bool == False:
ev = df.loc[(df["PrevAppRuntime"]>df["AppRuntime"]) & (df["PrevAppCode"]==df["AppCode"]),keep_cols]
else:
# add suspicious obs from the the first_last df when the last reading from the previous zipfile
# has a app runtime that is greater than the app runtime for the current zipfile
ev = df.loc[(df["PrevAppRuntime"]>df["AppRuntime"])
& (df["PrevAppCode"]==df["AppCode"])
& (df["Sequence"]=="First")
& (df["PrevSequence"]=="Last")
& (df["PrevZipfile"]!=df["Zipfile"])
, keep_cols]
serialize.save_pickle(ev, os.path.join(Gaming.gaming_dir,"Granular",f"events{file}"))
"""gets the first and last observation from each raw zipfile"""
@staticmethod
def get_first_last(df,file):
first = df.groupby("AppCode").first()
first["Sequence"] = "First"
last = df.groupby("AppCode").last()
last["Sequence"] = "Last"
first_last_df = first.append(last).reset_index()
first_last_df = first_last_df[Gaming.game_cols]
serialize.save_pickle(first_last_df, os.path.join(Gaming.gaming_dir, "FirstLast", f"first_last_{file}"))
""" assembles the events file, diagnoses gaming events, and summarizes blackouts on the user level by phase"""
@staticmethod
def process_gaming(error_margin, hour_use,raw_user_df):
#don't run raw gaming detection pipeline during test...debug over notebooks if needed
config_user_dict = serialize.open_yaml("config_user.yaml")
if config_user_dict['local']['test']:
diag_df = serialize.open_pickle(Gaming.diagnosed_file)
else:
Gaming._add_first_last_events()
ev_df = Gaming._aggregate_events()
diag_df = Gaming._diagnose_events(ev_df, error_margin, hour_use)
#rehape all blackout events for main analysis
game_user_df = Gaming._reshape_events(diag_df, raw_user_df)
#reshape screen active blackout events for side analysis
game_user_df_SA = Gaming._reshape_events(diag_df.loc[diag_df["PrevScreenActive"]==1], raw_user_df,"ActiveBlackoutsOverPhase")
game_hour_df = Gaming._expand_gaming_df(diag_df,"GameHourDf")
#game_hour_df_under_24 = Gaming._expand_gaming_df(diag_df.loc[diag_df["BlackoutHours"]<24],
# "GameHourDfUnder24")
return game_user_df
""" aggregate the first last observations, and then scan them.
We are scanning if the last reading from the previous zipfile
has a app runtime that is greater than the app runtime for the next zipfile """
@staticmethod
def _add_first_last_events():
fl_dir = os.path.join(Gaming.gaming_dir, "FirstLast")
df = pd.concat([serialize.soft_df_open(os.path.join(fl_dir, x)) for x in os.listdir(fl_dir)])
df = df.sort_values(by=["AppCode", "CreatedEasternDatetime"]).reset_index(drop=True)
if datetime.now()>study_config.surveys["Baseline"]["Start"]:
df = df.loc[df['CreatedDatetime']>study_config.surveys["Baseline"]["Start"]]
df["PrevSequence"] = df["Sequence"].shift(1)
Gaming.scan(df, "fl", first_last_bool=True)
"""aggregates all the individual events in the granular directory"""
@staticmethod
def _aggregate_events():
ev_dir = os.path.join(Gaming.gaming_dir, "Granular")
ev_df = pd.concat([serialize.soft_df_open(os.path.join(ev_dir, x)) for x in os.listdir(ev_dir)])
ev_df = ev_df.drop_duplicates(subset=["AppCode", "CreatedEasternDatetime"], keep='last').reset_index(drop=True)
serialize.save_pickle(ev_df, Gaming.events_file)
return ev_df
""" estimates the runtime of the phone when the user was not tracking
- d0: the device runtime right before pennyworth stopped recording
- d1: the device runtime when PD returned to recording
- dd: difference in phone runtime (d1 - d0)
- td: difference in the timestamps associated with d0 and d1
- error_margin: number of hours that CreateDateTime or runtime stamps can deviate before error is flagged
- all variables have hour units
"""
@staticmethod
def _diagnose_events(ev_df, error_margin, clean_hour_use):
df = ev_df.sort_values(by = ['AppCode','CreatedEasternDatetime'])
df = df.loc[df["PrevCreatedEasternDatetime"]>study_config.first_pull]
if datetime.now()>study_config.surveys["Baseline"]["Start"]:
df = df.loc[df['PrevCreatedDatetime']>study_config.surveys["Baseline"]["Start"]]
df["CreatedEasternDatetimeDiffHours"] = (df["CreatedEasternDatetime"] - df["PrevCreatedEasternDatetime"]).apply(
lambda x: round(x.days * 24 + x.seconds / (60 * 60), 2))
for col in ["DeviceRuntime", "AppRuntime", "PrevDeviceRuntime", "PrevAppRuntime"]:
df[f"{col}Hours"] = (df[f"{col}"] / (1000 * 60 * 60)).round(decimals=2)
for col in ["DeviceRuntimeHours","AppRuntimeHours"]:
df[col+"Diff"] = df[col]-df[f"Prev{col}"]
ne_dict = df.to_dict(orient='index')
day = clean_hour_use.groupby(["AppCode","CreatedDate"])["UseMinutes"].sum()
day_dic = {k: day[k].to_dict() for k, v in day.groupby(level=0)}
for key, val in ne_dict.items():
d0 = val["PrevDeviceRuntimeHours"]
d1 = val["DeviceRuntimeHours"]
td = val["CreatedEasternDatetimeDiffHours"]
date0 = val["PrevCreatedDatetime"]
date1 = val["CreatedDatetime"]
if val["AppCode"] in day_dic:
app_dic = day_dic[val["AppCode"]]
else:
#this appcode has no use :/
app_dic = {}
# Remove false-positives due to data export lag.
# ..i.e. drop an event if there is use in between the pings
if (date0+timedelta(days=1)).date()<date1.date():
next_day = date0+timedelta(days=1)
while next_day < date1.date():
if next_day in app_dic:
ne_dict[key]['Diagnosis'] = "ERROR: FalsePositive"
break
else:
next_day = next_day + timedelta(days=1)
if date1<date0:
ne_dict[key]['Diagnosis'] = "ERROR: Date1<Date0"
#assert td>0
elif d1 - d0 < 0:
# indicates that phone shutdown:
ne_dict[key]['Diagnosis'] = "Phone shut off"
ne_dict[key]['BlackoutHoursLB'] = d1
ne_dict[key]['BlackoutHoursUB'] = td
if td + error_margin < d1:
# Impossible, comment error
ne_dict[key]['Diagnosis'] = "ERROR: td <d1 | d1-d0 <= 0 "
else:
if td >= d1:
# if new runtime is less than or equal to time difference: phone had to have shut off,
# even if new runtime is greater than old runtime
ne_dict[key]['Diagnosis'] = f"Phone shut off, even if d1<d0"
ne_dict[key]['BlackoutHoursLB'] = d1
ne_dict[key]['BlackoutHoursUB'] = td
else:
# phone did not shut down, since d1>=d0 & d1>td
ne_dict[key]['Diagnosis'] = f"Phone never shut off"
ne_dict[key]['BlackoutHoursLB'] = td
ne_dict[key]['BlackoutHoursUB'] = td
if td + error_margin < d1 - d0:
# Impossible, comment error
ne_dict[key]['Diagnosis'] = "ERROR: if phone never shutoff, no way for td < d1-d0"
df = pd.DataFrame.from_dict(ne_dict, orient='index')
df["BlackoutHours"] = (df["BlackoutHoursLB"] + df["BlackoutHoursUB"])/2
df = Gaming._diagnose_dups(df)
serialize.save_pickle(df, Gaming.diagnosed_file)
test.save_test_df(df,Gaming.diagnosed_test_file)
return df
@staticmethod
def _diagnose_dups(df):
df = df.sort_values(by=["AppCode", "PrevCreatedEasternDatetime"]).reset_index(drop=True)
d_dict = df.to_dict(orient='index')
for key, val in d_dict.items():
if key + 1 not in d_dict:
continue
if d_dict[key]["AppCode"] != d_dict[key + 1]["AppCode"]:
continue
if d_dict[key]["CreatedEasternDatetime"] > d_dict[key + 1]["PrevCreatedEasternDatetime"]:
d_dict[key]["Diagnosis"] = "Error: Another event starts before this event ends"
#put an error on the other event if it is NOT embedded in the original event
if d_dict[key]["CreatedEasternDatetime"] < d_dict[key + 1]["CreatedEasternDatetime"]:
d_dict[key + 1]["Diagnosis"] = "Error: Another event ends after this event starts"
df = pd.DataFrame.from_dict(d_dict, orient='index')
return df
"""
Input: takes the diagnosed event level dataframe
Output: User level df that contains the total blackout period time by phase
"""
@staticmethod
def _reshape_events(diag_df,raw_user,file_name = None):
df = diag_df.loc[diag_df["Diagnosis"].isin(Gaming.good_diag)]
df["CreatedDate"] = df["CreatedDatetime"].apply(lambda x: x.date())
df = BuilderUtils.add_phase_label(df,
raw_df_date = "CreatedDate",
start_buffer = 0,
end_buffer = -1,)
# Replace Values of phase with the start survey code
codes = [study_config.phases[x]["StartSurvey"]["Code"] for x in list(study_config.phases.keys())]
rename_dic = dict(zip(list(study_config.phases.keys()), codes))
df["Phase"] = df["Phase"].apply(lambda x: rename_dic[x] if x in rename_dic else x)
df_s = df.groupby(["AppCode","Phase"])["BlackoutHours"].sum().reset_index()
df_p = df_s.pivot_table(index=["AppCode"],
values=["BlackoutHours"],
columns=["Phase"],
aggfunc='first')
#Flatten Column Names (and rearange in correct order
df_p.columns = ['_'.join(col[::-1]).strip() for col in df_p.columns.values]
df_p = df_p.reset_index()
#if file_name != None:
# serialize.save_pickle(df_p,os.path.join("data","external","intermediate","PhoneDashboard","Gaming",file_name))
# We don't calculate blackouthours per day here because we use DaySet as the denomenator
return df_p
@staticmethod
def _expand_gaming_df(diag,file_name):
ex = diag.loc[diag["Diagnosis"].isin(Gaming.good_diag)]
# Creates list of DatetimeHours that are in BlackoutPeriod
ex["DatetimeHour"] = ex.apply(lambda x: Gaming.get_time_attributes(x, "Hour"), axis=1)
# Expand the dataframe
ex = ex.explode("DatetimeHour")
ex["DatetimeHour"] = ex["DatetimeHour"].apply(lambda x: x.replace(minute=0, second=0, microsecond=0))
ex["HourCount"] = ex.groupby(["AppCode", "CreatedDatetime"])["DatetimeHour"].transform('count')
# Evenly divide the blackout period among the hours it occupied
ex["BlackoutHours"] = ex["BlackoutHours"] / ex["HourCount"]
# Compress onto the App-Hour Level (this compresses multiple blackout events that occured on the same datetime hour)
ex = ex.groupby(["AppCode", "DatetimeHour"])["BlackoutHours"].sum().reset_index()
config_user_dict = serialize.open_yaml("config_user.yaml")
if config_user_dict['local']['test']==False:
serialize.save_pickle(ex, os.path.join(Gaming.gaming_dir,file_name))
return ex
@staticmethod
def get_time_attributes(df, kind):
start = df["PrevCreatedDatetime"]
end = df["CreatedDatetime"]
if kind == "Hour":
thing = [x for x in pd.date_range(start, end, freq="H")]
else:
thing = [x.weekday() for x in pd.date_range(start, end, freq="D")]
return thing