jkushwaha commited on
Commit
57e8624
·
verified ·
1 Parent(s): 3cf2792

Create multiprocessing_in_date_ingestion.py

Browse files
multiprocessing_in_date_ingestion.py ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pandas as pd
2
+ from multiprocessing import Pool
3
+
4
+ def get_encounter_dates(df_list, pif_key):
5
+ encounter_dates = []
6
+ for df in df_list:
7
+ df['encounter_date'].fillna('', inplace=True)
8
+ df_date = df.loc[df['pif_key'].astype(str) == str(pif_key), 'encounter_date'].values
9
+ if len(df_date) > 0:
10
+ encounter_dates.extend(df_date)
11
+ return encounter_dates
12
+
13
+ def get_latest_date(encounter_dates):
14
+ if encounter_dates:
15
+ return max(encounter_dates)
16
+ return ''
17
+
18
+ def create_date_insert_dict(ingested_date):
19
+ return {
20
+ 'attribute_name': 'report_date',
21
+ 'attribute_method': 'cv',
22
+ 'attribute_normalized_prediction': '',
23
+ 'attribute_prediction': str(ingested_date),
24
+ 'attribute_version': 'v2_090523',
25
+ 'attribute_vocab': '',
26
+ 'attribute_code': '',
27
+ 'date_of_service': ''
28
+ }
29
+
30
+ def add_logging_entry(logging_df, pif_key, json_report_date_exists, encounter_dates, ingested_date, multiple_date, old_date):
31
+ logging_df = logging_df.append({
32
+ 'pif_key': pif_key,
33
+ 'json_report_date_exists': json_report_date_exists,
34
+ 'encounter_dates': encounter_dates,
35
+ 'ingested_date': ingested_date,
36
+ 'multiple_date': multiple_date,
37
+ 'old_date': old_date
38
+ }, ignore_index=True)
39
+ return logging_df
40
+
41
+ def date_dict(df_list, pif_key):
42
+ encounter_dates = get_encounter_dates(df_list, pif_key)
43
+ ingested_date = get_latest_date(encounter_dates)
44
+ return create_date_insert_dict(ingested_date), encounter_dates, ingested_date
45
+
46
+ def report_date_insertion(dict_list, df_list, logging_df):
47
+ col_names = {col['attribute_name'] for col in dict_list}
48
+ pif_key = next((col['attribute_prediction'] for col in dict_list if col['attribute_name'] == 'pif_key'), None)
49
+
50
+ if 'report_date' not in col_names and pif_key is not None:
51
+ date_insert_dict, encounter_dates, ingested_date = date_dict(df_list, pif_key)
52
+ dict_list.insert(1, date_insert_dict)
53
+ logging_df = add_logging_entry(logging_df, pif_key, False, encounter_dates, ingested_date, len(encounter_dates) > 1, 'missing' if not ingested_date else '')
54
+
55
+ elif 'report_date' in col_names and pif_key is not None:
56
+ date_insert_dict, encounter_dates, ingested_date = date_dict(df_list, pif_key)
57
+ if ingested_date:
58
+ for report_date_idx, tm in enumerate(dict_list):
59
+ if tm['attribute_name'] == 'report_date':
60
+ old_date = tm['attribute_prediction']
61
+ break
62
+ dict_list.pop(report_date_idx)
63
+ dict_list.insert(1, date_insert_dict)
64
+ logging_df = add_logging_entry(logging_df, pif_key, True, encounter_dates, ingested_date, len(encounter_dates) > 1, old_date)
65
+ else:
66
+ for report_date_idx, tm in enumerate(dict_list):
67
+ if tm['attribute_name'] == 'report_date':
68
+ old_date = tm['attribute_prediction']
69
+ break
70
+ logging_df = add_logging_entry(logging_df, pif_key, True, None, '', False, old_date)
71
+
72
+ return dict_list, logging_df
73
+
74
+ def process_biomarker_detail(biomarker_detail):
75
+ attributes = biomarker_detail['attribute']
76
+ for attribute in attributes:
77
+ attribute_details = attribute['attribute_details']
78
+ attribute['attribute_details'], logging_df = report_date_insertion(attribute_details, df_list, logging_df)
79
+ return biomarker_detail
80
+
81
+ def json_report_date_insertion(json_data, df_list, logging_df):
82
+ biomarker_details = json_data['patient_level']['biomarkers']['details']
83
+ with Pool() as pool:
84
+ updated_biomarker_details = pool.map(process_biomarker_detail, biomarker_details)
85
+ json_data['patient_level']['biomarkers']['details'] = updated_biomarker_details
86
+ return json_data, logging_df