Update app.py
Browse files
app.py
CHANGED
|
@@ -1,4 +1,6 @@
|
|
| 1 |
-
# 0815 Version
|
|
|
|
|
|
|
| 2 |
import gradio as gr
|
| 3 |
import pandas as pd
|
| 4 |
from datetime import datetime, time
|
|
@@ -10,145 +12,94 @@ import shutil
|
|
| 10 |
import json
|
| 11 |
from weasyprint import HTML, CSS
|
| 12 |
import warnings
|
| 13 |
-
from concurrent.futures import ThreadPoolExecutor, as_completed
|
| 14 |
|
| 15 |
warnings.filterwarnings("ignore")
|
| 16 |
|
| 17 |
-
# ----------------------------------------------------------------------
|
| 18 |
-
# Constants & Config
|
| 19 |
-
# ----------------------------------------------------------------------
|
| 20 |
UPLOAD_DIR = "Uploads"
|
| 21 |
UPLOAD_TIMES_FILE = os.path.join(UPLOAD_DIR, "upload_times.json")
|
| 22 |
CHICAGO_TZ = ZoneInfo("America/Chicago")
|
| 23 |
|
| 24 |
-
os.makedirs(UPLOAD_DIR, exist_ok=True)
|
| 25 |
if os.path.exists(UPLOAD_DIR) and not os.path.isdir(UPLOAD_DIR):
|
| 26 |
-
raise FileExistsError(f"'{UPLOAD_DIR}' exists as a file.
|
|
|
|
| 27 |
|
| 28 |
AGE_LST = ["Newborn-5mo", "6mo-9yo", "10-17yo", "18-20yo", "21-24yo", "25+yo"]
|
| 29 |
OFF_LST = ['OFF', 'VACATION', 'FMLA', 'ADMIN', 'PAID_LEAVE', 'CME', 'TEACHING', 'SICK', 'HOLIDAY']
|
| 30 |
UNDER_18G = ["Newborn-5mo", "6mo-9yo", "10-17yo"]
|
| 31 |
OVER_18G = ["18-20yo", "21-24yo", "25+yo"]
|
| 32 |
-
|
| 33 |
-
AVAILABLE_LOCATIONS = [
|
| 34 |
-
'Berwyn', 'Juarez', 'LVHS', 'Morgan', 'Orozco', 'Western',
|
| 35 |
-
'Urgent Care', 'Psych', "OB/Gynecology", 'All Locations'
|
| 36 |
-
]
|
| 37 |
NO_AGE_CHECK_LOCATIONS = ['Juarez', 'LVHS', 'Orozco', 'Urgent Care', 'Psych', "OB/Gynecology"]
|
| 38 |
-
NO_OPERATION_CHECK_LOCATIONS =
|
| 39 |
-
|
| 40 |
-
|
| 41 |
-
'B': 'Berwyn', 'J': 'Juarez', 'L': 'LVHS', 'M': 'Morgan', 'O': 'Orozco', 'W': 'Western',
|
| 42 |
-
'UC': 'Urgent Care', 'PSY/M': 'Psych', 'PSY/B': 'Psych',
|
| 43 |
-
"OB/B": "OB/Gynecology", "OB/M": "OB/Gynecology", "OB/W": "OB/Gynecology", "OB": "OB/Gynecology"
|
| 44 |
-
}
|
| 45 |
|
| 46 |
-
# ----------------------------------------------------------------------
|
| 47 |
-
# Clinic Hours (Defined BEFORE any reference)
|
| 48 |
-
# ----------------------------------------------------------------------
|
| 49 |
-
CLINIC_HOURS = {
|
| 50 |
-
'Berwyn': {
|
| 51 |
-
(0,1,3,4): (time(8,30), time(17,30), [(12,30),(13,30)]),
|
| 52 |
-
2: (time(13,0), time(20,0), [(16,0),(17,0)]),
|
| 53 |
-
5: (time(8,30), time(15,0), [(11,30),(12,0)])
|
| 54 |
-
},
|
| 55 |
-
'Morgan': {},
|
| 56 |
-
'Western': {},
|
| 57 |
-
'Urgent Care': {
|
| 58 |
-
(0,1,3,4): (time(9,0), time(18,0), [(13,0),(14,0)]),
|
| 59 |
-
2: (time(9,0), time(18,0), [(13,0),(14,0)]),
|
| 60 |
-
5: (time(9,0), time(13,30), [])
|
| 61 |
-
},
|
| 62 |
-
'Juarez': {
|
| 63 |
-
(0,1,2,3,4): (time(8,30), time(16,0), [(13,0),(14,0)])
|
| 64 |
-
},
|
| 65 |
-
'Orozco': {
|
| 66 |
-
(0,1,2,3,4): (time(8,0), time(16,30), [])
|
| 67 |
-
},
|
| 68 |
-
'LVHS': {
|
| 69 |
-
(0,1,2,3): (time(8,30), time(16,0), [(12,0),(13,0)]),
|
| 70 |
-
4: (time(12,0), time(13,0), [])
|
| 71 |
-
},
|
| 72 |
-
'Psych': {
|
| 73 |
-
(0,1,3,4): (None, None, [(12,30),(13,30)]),
|
| 74 |
-
2: (None, None, [(16,0),(17,0)]),
|
| 75 |
-
5: (None, None, [])
|
| 76 |
-
}
|
| 77 |
-
}
|
| 78 |
-
|
| 79 |
-
# Copy Berwyn → Morgan & Western
|
| 80 |
-
CLINIC_HOURS['Morgan'] = {k: v for k, v in CLINIC_HOURS['Berwyn'].items()}
|
| 81 |
-
CLINIC_HOURS['Western'] = {k: v for k, v in CLINIC_HOURS['Berwyn'].items()}
|
| 82 |
-
|
| 83 |
-
# ----------------------------------------------------------------------
|
| 84 |
-
# Helper Functions
|
| 85 |
-
# ----------------------------------------------------------------------
|
| 86 |
def get_time_string(row):
|
| 87 |
if row['Location'] == "OB/Gynecology" and pd.isna(row['Start_Time']) and pd.isna(row['End_Time']) and pd.notna(row.get('Note')):
|
| 88 |
-
|
|
|
|
| 89 |
if pd.isna(row['Start_Time']) and pd.isna(row['End_Time']) and pd.notna(row.get('Note')):
|
| 90 |
note = row['Note'].strip().upper()
|
| 91 |
-
|
|
|
|
|
|
|
|
|
|
| 92 |
if pd.notna(row['Start_Time']) and pd.notna(row['End_Time']):
|
| 93 |
time_str = f"{row['Start_Time'].strftime('%H:%M')} - {row['End_Time'].strftime('%H:%M')}"
|
| 94 |
-
|
| 95 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 96 |
return "OFF"
|
| 97 |
|
| 98 |
def parse_date(val):
|
| 99 |
if pd.isna(val) or val is None:
|
| 100 |
return None
|
| 101 |
-
if isinstance(val, (int, float)):
|
| 102 |
-
return pd.Timestamp('1899-12-30') + pd.Timedelta(days=val)
|
| 103 |
-
if isinstance(val, str):
|
| 104 |
-
for fmt in ['%m/%d/%y', '%m/%d/%Y', '%Y-%m-%d']:
|
| 105 |
-
try:
|
| 106 |
-
return pd.to_datetime(val, format=fmt)
|
| 107 |
-
except ValueError:
|
| 108 |
-
continue
|
| 109 |
-
return pd.to_datetime(val, errors='coerce')
|
| 110 |
-
|
| 111 |
-
def parse_time(val):
|
| 112 |
-
if pd.isna(val) or val in ["", "OFF", "nan", "NaT"]:
|
| 113 |
-
return None
|
| 114 |
-
if isinstance(val, time):
|
| 115 |
-
return val
|
| 116 |
-
if isinstance(val, datetime):
|
| 117 |
-
return val.time()
|
| 118 |
try:
|
| 119 |
-
|
| 120 |
-
|
| 121 |
-
|
| 122 |
-
|
| 123 |
-
|
| 124 |
-
|
| 125 |
-
|
| 126 |
-
|
| 127 |
-
|
| 128 |
-
|
| 129 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 130 |
|
| 131 |
-
|
| 132 |
-
# Excel Parsing (Parallel)
|
| 133 |
-
# ----------------------------------------------------------------------
|
| 134 |
-
def _parse_excel_file(path):
|
| 135 |
try:
|
| 136 |
-
all_sheets = pd.read_excel(
|
| 137 |
if not all_sheets:
|
| 138 |
-
return None, "No sheets found
|
| 139 |
-
|
|
|
|
|
|
|
| 140 |
for sheet_name, df in all_sheets.items():
|
| 141 |
print(f"Processing sheet: {sheet_name}")
|
| 142 |
base_columns = ['Name', 'Location']
|
| 143 |
if not all(col in df.columns for col in base_columns):
|
| 144 |
return None, f"Missing required columns in sheet '{sheet_name}'. Expected at least: {base_columns}"
|
|
|
|
| 145 |
if 'Start_Time1' not in df.columns:
|
| 146 |
return None, f"No valid time columns (e.g., Start_Time1) found in sheet '{sheet_name}'!"
|
|
|
|
| 147 |
num_days = sum(1 for col in df.columns if col.startswith('Start_Time'))
|
|
|
|
| 148 |
week_rows = df[df['Name'].str.startswith('Week', na=False)].index.tolist()
|
| 149 |
if not week_rows:
|
| 150 |
print(f"No week rows found in sheet '{sheet_name}'")
|
| 151 |
continue
|
|
|
|
| 152 |
for week_idx in week_rows:
|
| 153 |
dates = []
|
| 154 |
for day in range(1, num_days + 1):
|
|
@@ -160,21 +111,27 @@ def _parse_excel_file(path):
|
|
| 160 |
print(f"Parsed date for {col} in sheet '{sheet_name}': {parsed_date}")
|
| 161 |
else:
|
| 162 |
dates.append(None)
|
|
|
|
| 163 |
next_week = next((idx for idx in week_rows if idx > week_idx), len(df))
|
| 164 |
provider_df = df.loc[week_idx + 1: next_week - 1]
|
| 165 |
provider_df = provider_df[~provider_df['Name'].eq('Name')]
|
|
|
|
| 166 |
if provider_df.empty:
|
| 167 |
print(f"No provider data found for week starting at index {week_idx} in sheet '{sheet_name}'")
|
| 168 |
continue
|
|
|
|
| 169 |
temp_dfs = []
|
| 170 |
for day in range(1, num_days + 1):
|
| 171 |
if day > len(dates) or dates[day - 1] is None:
|
| 172 |
continue
|
|
|
|
| 173 |
start_col = f'Start_Time{day}'
|
| 174 |
end_col = f'End_Time{day}'
|
| 175 |
note_col = f'Note{day}'
|
|
|
|
| 176 |
if start_col not in df.columns or end_col not in df.columns:
|
| 177 |
continue
|
|
|
|
| 178 |
temp = provider_df[['Name', 'Location', start_col, end_col, note_col]].copy()
|
| 179 |
temp['Date'] = dates[day - 1]
|
| 180 |
temp = temp.rename(columns={
|
|
@@ -183,36 +140,65 @@ def _parse_excel_file(path):
|
|
| 183 |
note_col: 'Note'
|
| 184 |
})
|
| 185 |
temp_dfs.append(temp)
|
|
|
|
| 186 |
if temp_dfs:
|
| 187 |
sheet_week_df = pd.concat(temp_dfs, ignore_index=True)
|
| 188 |
sheet_week_df = sheet_week_df.dropna(subset=['Name'])
|
| 189 |
sheet_week_df['Location'] = sheet_week_df['Location'].map(lambda x: LOCATION_MAP.get(x, x) if pd.notna(x) else x)
|
|
|
|
| 190 |
for time_col in ['Start_Time', 'End_Time']:
|
| 191 |
if time_col in sheet_week_df.columns:
|
| 192 |
sheet_week_df[time_col] = sheet_week_df[time_col].apply(parse_time)
|
| 193 |
-
|
| 194 |
-
|
|
|
|
|
|
|
| 195 |
return None, "No valid data found across all sheets! Check date formats and ensure data exists for the specified range."
|
| 196 |
-
final_df = pd.concat(
|
| 197 |
final_df = final_df.drop_duplicates()
|
| 198 |
final_df = final_df.dropna(subset=["Date"])
|
| 199 |
final_df = final_df[final_df["Location"] != "Location"]
|
| 200 |
final_df = final_df[final_df["Name"] != "Name"]
|
| 201 |
final_df["Date"] = pd.to_datetime(final_df["Date"])
|
|
|
|
| 202 |
if final_df.empty:
|
| 203 |
return None, "No valid data after filtering! Ensure the date range matches the data in the Excel files."
|
|
|
|
| 204 |
print(f"Final DataFrame shape: {final_df.shape}")
|
| 205 |
print(f"Available dates: {final_df['Date'].unique()}")
|
| 206 |
return final_df, None
|
| 207 |
except Exception as e:
|
| 208 |
return None, f"Error reading Excel: {str(e)}!"
|
| 209 |
|
| 210 |
-
|
| 211 |
-
|
| 212 |
-
|
| 213 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 214 |
try:
|
| 215 |
-
df = pd.read_excel(
|
| 216 |
expected_columns = ["Provider", "Last_Name", "Location"] + AGE_LST
|
| 217 |
if not all(col in df.columns for col in expected_columns):
|
| 218 |
return None, f"Invalid columns in Provider_Info. Expected: {expected_columns}!"
|
|
@@ -224,9 +210,6 @@ def validate_provider_info(path):
|
|
| 224 |
except Exception as e:
|
| 225 |
return None, f"Error reading Provider_Info Excel: {str(e)}!"
|
| 226 |
|
| 227 |
-
# ----------------------------------------------------------------------
|
| 228 |
-
# File Management
|
| 229 |
-
# ----------------------------------------------------------------------
|
| 230 |
def save_files(file_list):
|
| 231 |
if not file_list:
|
| 232 |
return update_file_display()
|
|
@@ -279,9 +262,6 @@ def delete_file(filename):
|
|
| 279 |
json.dump(upload_times, f, indent=2)
|
| 280 |
return update_file_display()
|
| 281 |
|
| 282 |
-
# ----------------------------------------------------------------------
|
| 283 |
-
# Check Functions
|
| 284 |
-
# ----------------------------------------------------------------------
|
| 285 |
def check_age_coverage(providers_df, provider_info_df, location, date):
|
| 286 |
date_key = pd.to_datetime(date).strftime('%m/%d/%y')
|
| 287 |
providers_on_date = providers_df[
|
|
@@ -374,22 +354,92 @@ def check_provider_location_conflicts(providers_df, date, locations):
|
|
| 374 |
conflicts.append((provider, conflict_locations, 'conflict-warning', f'Provider {provider} scheduled at: {", ".join(conflict_locations)}'))
|
| 375 |
return conflicts
|
| 376 |
|
| 377 |
-
def get_clinic_hours(loc, weekday):
|
| 378 |
-
config = CLINIC_HOURS.get(loc, {})
|
| 379 |
-
for days, (start, end, breaks) in config.items():
|
| 380 |
-
if isinstance(days, tuple):
|
| 381 |
-
if weekday in days:
|
| 382 |
-
return start, end, [(time(b[0], b[1]), time(b[2], b[3])) for b in breaks]
|
| 383 |
-
elif weekday == days:
|
| 384 |
-
return start, end, [(time(b[0], b[1]), time(b[2], b[3])) for b in breaks]
|
| 385 |
-
return None, None, []
|
| 386 |
-
|
| 387 |
def check_operation_time_coverage(providers_df, date, location):
|
| 388 |
date_key = pd.to_datetime(date).strftime('%m/%d/%y')
|
| 389 |
weekday = pd.to_datetime(date).weekday()
|
| 390 |
-
|
| 391 |
-
if
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 392 |
return []
|
|
|
|
| 393 |
providers_on_date = providers_df[
|
| 394 |
(providers_df['Date'] == date_key) &
|
| 395 |
(providers_df['Location'] == location) &
|
|
@@ -407,12 +457,14 @@ def check_operation_time_coverage(providers_df, date, location):
|
|
| 407 |
if current_time < clinic_end:
|
| 408 |
gaps.append(f"{current_time.strftime('%H:%M')} - {clinic_end.strftime('%H:%M')}")
|
| 409 |
return gaps if gaps else []
|
|
|
|
| 410 |
intervals = []
|
| 411 |
for _, row in providers_on_date.iterrows():
|
| 412 |
start = row['Start_Time']
|
| 413 |
end = row['End_Time']
|
| 414 |
if start and end:
|
| 415 |
intervals.append((start, end))
|
|
|
|
| 416 |
intervals.sort(key=lambda x: x[0])
|
| 417 |
merged_intervals = []
|
| 418 |
current_start = None
|
|
@@ -429,6 +481,7 @@ def check_operation_time_coverage(providers_df, date, location):
|
|
| 429 |
current_end = end
|
| 430 |
if current_start is not None:
|
| 431 |
merged_intervals.append((current_start, current_end))
|
|
|
|
| 432 |
operational_intervals = []
|
| 433 |
for start, end in merged_intervals:
|
| 434 |
current_start = start
|
|
@@ -439,6 +492,7 @@ def check_operation_time_coverage(providers_df, date, location):
|
|
| 439 |
current_start = max(current_start, break_end)
|
| 440 |
if current_start < end:
|
| 441 |
operational_intervals.append((current_start, end))
|
|
|
|
| 442 |
gaps = []
|
| 443 |
current_time = clinic_start
|
| 444 |
for break_start, break_end in break_times:
|
|
@@ -456,28 +510,117 @@ def check_operation_time_coverage(providers_df, date, location):
|
|
| 456 |
current_time = max(current_time, end)
|
| 457 |
if current_time < clinic_end:
|
| 458 |
gaps.append(f"{current_time.strftime('%H:%M')} - {clinic_end.strftime('%H:%M')}")
|
|
|
|
| 459 |
return gaps
|
| 460 |
|
| 461 |
-
# ----------------------------------------------------------------------
|
| 462 |
-
# Weekly Hours
|
| 463 |
-
# ----------------------------------------------------------------------
|
| 464 |
def calculate_weekly_hours(providers_df, provider_info_df, start_date, end_date, locations):
|
| 465 |
weekly_hours = {}
|
| 466 |
weekly_totals = {}
|
| 467 |
current_date = start_date
|
| 468 |
week_number = 1
|
|
|
|
| 469 |
while current_date <= end_date:
|
| 470 |
if current_date.weekday() == 6:
|
| 471 |
current_date += pd.Timedelta(days=1)
|
| 472 |
continue
|
| 473 |
if current_date.weekday() == 0 and current_date != start_date:
|
| 474 |
week_number += 1
|
|
|
|
| 475 |
date_key = current_date.strftime('%m/%d/%y')
|
| 476 |
weekday = current_date.weekday()
|
|
|
|
| 477 |
for location in sorted(locations):
|
| 478 |
-
|
| 479 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 480 |
continue
|
|
|
|
| 481 |
loc_providers_df = providers_df[
|
| 482 |
(providers_df['Date'] == date_key) &
|
| 483 |
(providers_df['Location'] == location) &
|
|
@@ -502,18 +645,22 @@ def calculate_weekly_hours(providers_df, provider_info_df, start_date, end_date,
|
|
| 502 |
weekly_hours[week_key][location][display_name] = 0.0
|
| 503 |
if display_name not in weekly_totals[week_key]:
|
| 504 |
weekly_totals[week_key][display_name] = 0.0
|
| 505 |
-
|
| 506 |
start_dt = datetime.combine(current_date, start_time)
|
| 507 |
end_dt = datetime.combine(current_date, end_time)
|
| 508 |
if end_dt < start_dt:
|
| 509 |
end_dt += pd.Timedelta(days=1)
|
| 510 |
total_hours = (end_dt - start_dt).total_seconds() / 3600
|
| 511 |
-
|
|
|
|
| 512 |
if note == '6-7/TELE':
|
| 513 |
total_hours += 1.0
|
|
|
|
|
|
|
|
|
|
| 514 |
clinical_hours = total_hours
|
| 515 |
apply_break = clinical_hours >= 5.0
|
| 516 |
-
|
| 517 |
if apply_break:
|
| 518 |
for break_start, break_end in break_times:
|
| 519 |
break_start_dt = datetime.combine(current_date, break_start)
|
|
@@ -525,7 +672,7 @@ def calculate_weekly_hours(providers_df, provider_info_df, start_date, end_date,
|
|
| 525 |
if overlap_start < overlap_end:
|
| 526 |
overlap_hours = (overlap_end - overlap_start).total_seconds() / 3600
|
| 527 |
total_hours -= overlap_hours
|
| 528 |
-
|
| 529 |
clinic_start_dt = datetime.combine(current_date, clinic_start)
|
| 530 |
clinic_end_dt = datetime.combine(current_date, clinic_end)
|
| 531 |
if clinic_end_dt < clinic_start_dt:
|
|
@@ -536,16 +683,13 @@ def calculate_weekly_hours(providers_df, provider_info_df, start_date, end_date,
|
|
| 536 |
total_hours = min(total_hours, (overlap_end - overlap_start).total_seconds() / 3600)
|
| 537 |
else:
|
| 538 |
total_hours = 0.0
|
| 539 |
-
|
| 540 |
weekly_hours[week_key][location][display_name] += max(total_hours, 0.0)
|
| 541 |
weekly_totals[week_key][display_name] += max(total_hours, 0.0)
|
| 542 |
-
|
| 543 |
current_date += pd.Timedelta(days=1)
|
| 544 |
return weekly_hours, weekly_totals
|
| 545 |
|
| 546 |
-
# ----------------------------------------------------------------------
|
| 547 |
-
# Entry Count for Pagination
|
| 548 |
-
# ----------------------------------------------------------------------
|
| 549 |
def calculate_max_entries_per_day(providers_df, ma_df, start_obj, end_obj, all_locations, check_age_coverage_flag, check_location_conflicts_flag, check_operation_coverage_flag, check_ma_mismatch_flag, num_provider_files):
|
| 550 |
week_entries = []
|
| 551 |
current_date = start_obj
|
|
@@ -610,10 +754,10 @@ def calculate_max_entries_per_day(providers_df, ma_df, start_obj, end_obj, all_l
|
|
| 610 |
ma_on_day['End_Time'].notna() &
|
| 611 |
~ma_on_day['Note'].str.upper().fillna('').isin(OFF_LST)
|
| 612 |
]) if not ma_on_day.empty else 0
|
| 613 |
-
total_entries_day += 1
|
| 614 |
-
total_entries_day += ma_count
|
| 615 |
if not (ma_count == working_providers or ma_count == working_providers + 1):
|
| 616 |
-
total_entries_day += 1
|
| 617 |
has_providers = True
|
| 618 |
if perform_overall_check and has_providers:
|
| 619 |
total_entries_day += 1
|
|
@@ -628,118 +772,6 @@ def calculate_max_entries_per_day(providers_df, ma_df, start_obj, end_obj, all_l
|
|
| 628 |
week_entries.append(sum(current_week))
|
| 629 |
return week_entries
|
| 630 |
|
| 631 |
-
# ----------------------------------------------------------------------
|
| 632 |
-
# HTML Day Renderer
|
| 633 |
-
# ----------------------------------------------------------------------
|
| 634 |
-
def _render_day(providers_df, provider_info_df, ma_df, date, locations, checks):
|
| 635 |
-
date_key = pd.to_datetime(date).strftime('%m/%d/%y')
|
| 636 |
-
html = f'<div class="day">{date.day}<br>'
|
| 637 |
-
has_content = False
|
| 638 |
-
# Conflicts
|
| 639 |
-
if checks[1]:
|
| 640 |
-
conflicts = check_provider_location_conflicts(providers_df, date, locations)
|
| 641 |
-
for provider, conflict_locations, warning_class, message in conflicts:
|
| 642 |
-
html += f'<div class="{warning_class}"><span class="warning-details">{message}</span></div>'
|
| 643 |
-
has_content = True
|
| 644 |
-
# Overall age
|
| 645 |
-
if checks[0]:
|
| 646 |
-
missing_ages, is_holiday = check_overall_age_coverage(providers_df, provider_info_df, date, locations)
|
| 647 |
-
if not is_holiday and missing_ages:
|
| 648 |
-
html += f'<div class="overall-warning"><span class="warning-details">Missing age all locations: {", ".join(missing_ages)}</span></div>'
|
| 649 |
-
has_content = True
|
| 650 |
-
for location in sorted(locations):
|
| 651 |
-
loc_providers_df = providers_df[(providers_df['Date'] == pd.to_datetime(date_key, format='%m/%d/%y')) &
|
| 652 |
-
(providers_df['Location'] == location)] if not providers_df.empty else pd.DataFrame()
|
| 653 |
-
loc_provider_info_df = provider_info_df[provider_info_df['Location'] == location] if provider_info_df is not None else pd.DataFrame()
|
| 654 |
-
is_holiday = False
|
| 655 |
-
is_school_closed = False
|
| 656 |
-
if not loc_providers_df.empty:
|
| 657 |
-
all_provider_notes = loc_providers_df['Note'].dropna().str.strip().str.upper().tolist()
|
| 658 |
-
if all_provider_notes and all(note == 'HOLIDAY' for note in all_provider_notes):
|
| 659 |
-
is_holiday = True
|
| 660 |
-
elif all_provider_notes and all(note == 'SCHOOL CLOSED' for note in all_provider_notes) and location in NO_AGE_CHECK_LOCATIONS:
|
| 661 |
-
is_school_closed = True
|
| 662 |
-
if not loc_providers_df.empty:
|
| 663 |
-
loc_providers_df = loc_providers_df[
|
| 664 |
-
~((loc_providers_df['Start_Time'].isna()) &
|
| 665 |
-
(loc_providers_df['End_Time'].isna()) &
|
| 666 |
-
(loc_providers_df['Note'].isna() | (loc_providers_df['Note'] == '')))
|
| 667 |
-
]
|
| 668 |
-
if not loc_providers_df.empty or is_holiday or is_school_closed:
|
| 669 |
-
has_content = True
|
| 670 |
-
html += f'<div class="location-section"><strong>{location}</strong><br>'
|
| 671 |
-
if is_holiday:
|
| 672 |
-
html += '<div class="holiday-message">Holiday!<br>Clinic Closed!</div>'
|
| 673 |
-
if is_school_closed:
|
| 674 |
-
html += '<div class="holiday-message">School Closed!</div>'
|
| 675 |
-
if not is_holiday and not is_school_closed:
|
| 676 |
-
if not loc_providers_df.empty:
|
| 677 |
-
html += '<div class="event"><strong>Providers:</strong><br>'
|
| 678 |
-
missing_ages, full_age_providers, under_18_providers, over_18_providers, only_25_plus_providers = check_age_coverage(providers_df, loc_provider_info_df, location, date)
|
| 679 |
-
for _, row in loc_providers_df.iterrows():
|
| 680 |
-
provider_info = loc_provider_info_df[loc_provider_info_df['Provider'] == row['Name']]
|
| 681 |
-
display_name = provider_info['Last_Name'].iloc[0] if not provider_info.empty else row['Name']
|
| 682 |
-
time_str = get_time_string(row)
|
| 683 |
-
style = "font-size: 7pt; margin: 1mm; line-height: 1.1;"
|
| 684 |
-
if row['Name'] in full_age_providers:
|
| 685 |
-
style += "color: #ff6347;"
|
| 686 |
-
elif row['Name'] in under_18_providers:
|
| 687 |
-
style += "color: #008000;"
|
| 688 |
-
elif row['Name'] in over_18_providers:
|
| 689 |
-
style += "color: #0000ff;"
|
| 690 |
-
elif row['Name'] in only_25_plus_providers:
|
| 691 |
-
style += "color: #8E44AD;"
|
| 692 |
-
else:
|
| 693 |
-
style += "color: #000000;"
|
| 694 |
-
if time_str in OFF_LST:
|
| 695 |
-
style += " text-decoration: line-through;"
|
| 696 |
-
html += f'<span style="{style}">{display_name}: {time_str}</span><br>'
|
| 697 |
-
html += '</div>'
|
| 698 |
-
if checks[2] and location not in NO_OPERATION_CHECK_LOCATIONS:
|
| 699 |
-
gaps = check_operation_time_coverage(providers_df, date, location)
|
| 700 |
-
if gaps:
|
| 701 |
-
gap_text = ", ".join(gaps)
|
| 702 |
-
html += f'<div class="operation-warning"><span class="warning-details">Missing: {gap_text}</span></div>'
|
| 703 |
-
if checks[0] and missing_ages and location not in NO_AGE_CHECK_LOCATIONS:
|
| 704 |
-
warning_text = f"Missing: {', '.join(missing_ages)}"
|
| 705 |
-
html += f'<div class="warning"><span class="warning-details">{warning_text}</span></div>'
|
| 706 |
-
if checks[3]:
|
| 707 |
-
loc_ma_df = ma_df[(ma_df['Date'] == pd.to_datetime(date_key, format='%m/%d/%y')) &
|
| 708 |
-
(ma_df['Location'] == location)] if not ma_df.empty else pd.DataFrame()
|
| 709 |
-
if not loc_ma_df.empty:
|
| 710 |
-
loc_ma_df = loc_ma_df[
|
| 711 |
-
~((loc_ma_df['Start_Time'].isna()) &
|
| 712 |
-
(loc_ma_df['End_Time'].isna()) &
|
| 713 |
-
(loc_ma_df['Note'].isna() | (loc_ma_df['Note'] == '')))
|
| 714 |
-
]
|
| 715 |
-
html += '<div class="event"><strong>MAs:</strong><br>'
|
| 716 |
-
for _, row in loc_ma_df.iterrows():
|
| 717 |
-
display_name = row['Name']
|
| 718 |
-
time_str = get_time_string(row)
|
| 719 |
-
style = "font-size: 7pt; margin: 1mm; line-height: 1.1; color: #000000;"
|
| 720 |
-
if time_str in OFF_LST:
|
| 721 |
-
style += " text-decoration: line-through;"
|
| 722 |
-
html += f'<span style="{style}">{display_name}: {time_str}</span><br>'
|
| 723 |
-
html += '</div>'
|
| 724 |
-
working_providers = len(loc_providers_df[
|
| 725 |
-
loc_providers_df['Start_Time'].notna() &
|
| 726 |
-
loc_providers_df['End_Time'].notna() &
|
| 727 |
-
~loc_providers_df['Note'].str.upper().fillna('').isin(OFF_LST)
|
| 728 |
-
])
|
| 729 |
-
working_mas = len(loc_ma_df[
|
| 730 |
-
loc_ma_df['Start_Time'].notna() &
|
| 731 |
-
loc_ma_df['End_Time'].notna() &
|
| 732 |
-
~loc_ma_df['Note'].str.upper().fillna('').isin(OFF_LST)
|
| 733 |
-
])
|
| 734 |
-
if not (working_mas == working_providers or working_mas == working_providers + 1):
|
| 735 |
-
html += f'<div class="warning"><span class="warning-details">MA Mismatch: {working_mas} MAs for {working_providers} Providers</span></div>'
|
| 736 |
-
html += '</div>'
|
| 737 |
-
html += '</div>'
|
| 738 |
-
return html
|
| 739 |
-
|
| 740 |
-
# ----------------------------------------------------------------------
|
| 741 |
-
# Main Schedule Generator
|
| 742 |
-
# ----------------------------------------------------------------------
|
| 743 |
def combine_schedules(provider_info_file, provider_files, ma_files, start_date, end_date, check_age_coverage_flag, check_location_conflicts_flag, check_operation_coverage_flag, check_ma_mismatch_flag, show_weekly_hours, selected_locations):
|
| 744 |
save_files([provider_info_file] if provider_info_file else [])
|
| 745 |
save_files(provider_files if provider_files else [])
|
|
@@ -823,7 +855,7 @@ def combine_schedules(provider_info_file, provider_files, ma_files, start_date,
|
|
| 823 |
day_header_height = 10
|
| 824 |
entry_height = 10
|
| 825 |
num_locations = len(display_locations)
|
| 826 |
-
num_provider_files = len(
|
| 827 |
buffer_height = 200 + (num_locations * 20) + (num_provider_files * 25)
|
| 828 |
week_entries = calculate_max_entries_per_day(providers_df, ma_df, start_obj, end_obj, display_locations, check_age_coverage_flag, check_location_conflicts_flag, check_operation_coverage_flag, check_ma_mismatch_flag, num_provider_files)
|
| 829 |
if show_weekly_hours:
|
|
@@ -833,7 +865,7 @@ def combine_schedules(provider_info_file, provider_files, ma_files, start_date,
|
|
| 833 |
loc_providers = providers_df[providers_df['Location'] == location]['Name'].unique()
|
| 834 |
loc_provider_info = provider_info_df[provider_info_df['Provider'].isin(loc_providers)]
|
| 835 |
providers_per_location[location] = sorted(
|
| 836 |
-
[
|
| 837 |
)
|
| 838 |
max_providers = max([len(providers) for providers in providers_per_location.values()], default=0)
|
| 839 |
hours_table_height = (len(weekly_hours) * (max_providers + 2) * entry_height) + 20
|
|
@@ -844,8 +876,8 @@ def combine_schedules(provider_info_file, provider_files, ma_files, start_date,
|
|
| 844 |
hours_table_height = 0
|
| 845 |
a4_height = 842
|
| 846 |
bmw_locations = [loc for loc in display_locations if loc in ['Berwyn', 'Morgan', 'Western']]
|
| 847 |
-
perform_overall_check = check_age_coverage_flag and
|
| 848 |
-
perform_conflict_check = check_location_conflicts_flag and
|
| 849 |
locations_str = ", ".join(sorted(display_locations)) if display_locations else "No Locations"
|
| 850 |
generation_time = datetime.now(CHICAGO_TZ).strftime('%I:%M %p CDT, %B %d, %Y')
|
| 851 |
html_content = f"""
|
|
@@ -936,6 +968,7 @@ def combine_schedules(provider_info_file, provider_files, ma_files, start_date,
|
|
| 936 |
flex-direction: column;
|
| 937 |
align-items: flex-start;
|
| 938 |
break-inside: avoid;
|
|
|
|
| 939 |
page-break-inside: avoid;
|
| 940 |
overflow-wrap: break-word;
|
| 941 |
line-height: 1.1;
|
|
@@ -1180,8 +1213,8 @@ def combine_schedules(provider_info_file, provider_files, ma_files, start_date,
|
|
| 1180 |
day_html += '<div class="event"><strong>Providers:</strong><br>'
|
| 1181 |
missing_ages, full_age_providers, under_18_providers, over_18_providers, only_25_plus_providers = check_age_coverage(providers_df, loc_provider_info_df, location, pd.to_datetime(date_key, format='%m/%d/%y'))
|
| 1182 |
for _, row in loc_providers_df.iterrows():
|
| 1183 |
-
|
| 1184 |
-
display_name =
|
| 1185 |
time_str = get_time_string(row)
|
| 1186 |
style = "font-size: 7pt; margin: 1mm; line-height: 1.1;"
|
| 1187 |
if row['Name'] in full_age_providers:
|
|
@@ -1552,23 +1585,27 @@ def combine_schedules(provider_info_file, provider_files, ma_files, start_date,
|
|
| 1552 |
shutil.copy(output_pdf_file, final_output_pdf)
|
| 1553 |
return html_content, final_output_file, final_output_pdf
|
| 1554 |
|
| 1555 |
-
|
| 1556 |
-
|
| 1557 |
-
|
| 1558 |
-
def check_password(input_password):
|
| 1559 |
-
correct_password = "alivio0000" # Change this!
|
| 1560 |
-
|
| 1561 |
-
if input_password == correct_password:
|
| 1562 |
-
return gr.update(visible=False), gr.update(visible=True), "Access granted!"
|
| 1563 |
else:
|
| 1564 |
return gr.update(visible=True), gr.update(visible=False), "Incorrect password."
|
| 1565 |
|
| 1566 |
def create_interface():
|
| 1567 |
-
with gr.Blocks(title="Alivio Schedule
|
| 1568 |
-
gr.Markdown("# Alivio Schedule
|
| 1569 |
-
gr.Markdown("
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1570 |
with gr.Column(visible=True) as password_section:
|
| 1571 |
-
|
|
|
|
| 1572 |
password_feedback = gr.Textbox(label="Status", interactive=False)
|
| 1573 |
password_button = gr.Button("Submit")
|
| 1574 |
with gr.Column(visible=False) as app_section:
|
|
@@ -1588,8 +1625,8 @@ def create_interface():
|
|
| 1588 |
start_date = gr.Textbox(label="Start Date (e.g., 06/02/25 for June 2, 2025)", placeholder="e.g., 06/02/25")
|
| 1589 |
end_date = gr.Textbox(label="End Date (e.g., 07/05/25 for July 5, 2025)", placeholder="e.g., 07/05/25")
|
| 1590 |
check_age_coverage = gr.Checkbox(label="Age Coverage Check", value=False)
|
| 1591 |
-
check_location_conflicts = gr.Checkbox(label="Provider Location Conflict Check", value=True)
|
| 1592 |
check_operation_coverage = gr.Checkbox(label="Operation Hours Check", value=False)
|
|
|
|
| 1593 |
check_ma_mismatch = gr.Checkbox(label="Staffing Ratio Check", value=False)
|
| 1594 |
show_weekly_hours = gr.Checkbox(label="Provider Hours Summary", value=False)
|
| 1595 |
location_selector = gr.CheckboxGroup(label="Select Locations to Display", choices=AVAILABLE_LOCATIONS, value=['All Locations'])
|
|
@@ -1608,9 +1645,10 @@ def create_interface():
|
|
| 1608 |
inputs=[provider_info_file, provider_files, ma_files, start_date, end_date, check_age_coverage, check_location_conflicts, check_operation_coverage, check_ma_mismatch, show_weekly_hours, location_selector],
|
| 1609 |
outputs=[output, download_html, download_pdf]
|
| 1610 |
)
|
| 1611 |
-
password_button.click(fn=check_password, inputs=
|
| 1612 |
return demo
|
| 1613 |
|
| 1614 |
if __name__ == "__main__":
|
| 1615 |
demo = create_interface()
|
| 1616 |
-
demo.launch()
|
|
|
|
|
|
| 1 |
+
# 0815 Version.
|
| 2 |
+
|
| 3 |
+
# Imports.
|
| 4 |
import gradio as gr
|
| 5 |
import pandas as pd
|
| 6 |
from datetime import datetime, time
|
|
|
|
| 12 |
import json
|
| 13 |
from weasyprint import HTML, CSS
|
| 14 |
import warnings
|
|
|
|
| 15 |
|
| 16 |
warnings.filterwarnings("ignore")
|
| 17 |
|
|
|
|
|
|
|
|
|
|
| 18 |
UPLOAD_DIR = "Uploads"
|
| 19 |
UPLOAD_TIMES_FILE = os.path.join(UPLOAD_DIR, "upload_times.json")
|
| 20 |
CHICAGO_TZ = ZoneInfo("America/Chicago")
|
| 21 |
|
|
|
|
| 22 |
if os.path.exists(UPLOAD_DIR) and not os.path.isdir(UPLOAD_DIR):
|
| 23 |
+
raise FileExistsError(f"Error: '{UPLOAD_DIR}' exists as a file, not a directory. Please rename or remove the 'Uploads' file and try again.")
|
| 24 |
+
os.makedirs(UPLOAD_DIR, exist_ok=True)
|
| 25 |
|
| 26 |
AGE_LST = ["Newborn-5mo", "6mo-9yo", "10-17yo", "18-20yo", "21-24yo", "25+yo"]
|
| 27 |
OFF_LST = ['OFF', 'VACATION', 'FMLA', 'ADMIN', 'PAID_LEAVE', 'CME', 'TEACHING', 'SICK', 'HOLIDAY']
|
| 28 |
UNDER_18G = ["Newborn-5mo", "6mo-9yo", "10-17yo"]
|
| 29 |
OVER_18G = ["18-20yo", "21-24yo", "25+yo"]
|
| 30 |
+
AVAILABLE_LOCATIONS = ['Berwyn', 'Juarez', 'LVHS', 'Morgan', 'Orozco', 'Western', 'Urgent Care', 'Psych', "OB/Gynecology", 'All Locations']
|
|
|
|
|
|
|
|
|
|
|
|
|
| 31 |
NO_AGE_CHECK_LOCATIONS = ['Juarez', 'LVHS', 'Orozco', 'Urgent Care', 'Psych', "OB/Gynecology"]
|
| 32 |
+
NO_OPERATION_CHECK_LOCATIONS = ['Juarez', 'LVHS', 'Orozco', 'Urgent Care', 'Psych', "OB/Gynecology"]
|
| 33 |
+
LOCATION_MAP = {'B': 'Berwyn', 'J': 'Juarez', 'L': 'LVHS', 'M': 'Morgan', 'O': 'Orozco', 'W': 'Western', 'UC': 'Urgent Care',
|
| 34 |
+
'PSY/M': 'Psych', 'PSY/B': 'Psych', "OB/B": "OB/Gynecology", "OB/M": "OB/Gynecology", "OB/W": "OB/Gynecology", "OB": "OB/Gynecology"}
|
|
|
|
|
|
|
|
|
|
|
|
|
| 35 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 36 |
def get_time_string(row):
|
| 37 |
if row['Location'] == "OB/Gynecology" and pd.isna(row['Start_Time']) and pd.isna(row['End_Time']) and pd.notna(row.get('Note')):
|
| 38 |
+
note = row['Note'].strip()
|
| 39 |
+
return note
|
| 40 |
if pd.isna(row['Start_Time']) and pd.isna(row['End_Time']) and pd.notna(row.get('Note')):
|
| 41 |
note = row['Note'].strip().upper()
|
| 42 |
+
if note == 'SCHOOL CLOSED':
|
| 43 |
+
return "OFF"
|
| 44 |
+
if note in OFF_LST:
|
| 45 |
+
return row['Note']
|
| 46 |
if pd.notna(row['Start_Time']) and pd.notna(row['End_Time']):
|
| 47 |
time_str = f"{row['Start_Time'].strftime('%H:%M')} - {row['End_Time'].strftime('%H:%M')}"
|
| 48 |
+
if pd.notna(row.get('Note')) and row['Note'].strip() != '':
|
| 49 |
+
note = row['Note'].strip().upper()
|
| 50 |
+
if note == 'SCHOOL CLOSED':
|
| 51 |
+
return time_str
|
| 52 |
+
return f"{time_str} ({row['Note']})"
|
| 53 |
+
return time_str
|
| 54 |
return "OFF"
|
| 55 |
|
| 56 |
def parse_date(val):
|
| 57 |
if pd.isna(val) or val is None:
|
| 58 |
return None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 59 |
try:
|
| 60 |
+
if isinstance(val, (int, float)):
|
| 61 |
+
# Handle Excel serial numbers
|
| 62 |
+
return pd.Timestamp('1899-12-30') + pd.Timedelta(days=val)
|
| 63 |
+
elif isinstance(val, str):
|
| 64 |
+
# Try different date formats
|
| 65 |
+
for fmt in ['%m/%d/%y', '%m/%d/%Y', '%Y-%m-%d']:
|
| 66 |
+
try:
|
| 67 |
+
return pd.to_datetime(val, format=fmt)
|
| 68 |
+
except ValueError:
|
| 69 |
+
continue
|
| 70 |
+
# Fallback to general parsing
|
| 71 |
+
return pd.to_datetime(val)
|
| 72 |
+
elif isinstance(val, (pd.Timestamp, datetime)):
|
| 73 |
+
return val
|
| 74 |
+
return None
|
| 75 |
+
except (ValueError, TypeError) as e:
|
| 76 |
+
print(f"Date parsing error for value {val}: {str(e)}")
|
| 77 |
+
return None
|
| 78 |
|
| 79 |
+
def validate_excel_file(file_path, expected_columns):
|
|
|
|
|
|
|
|
|
|
| 80 |
try:
|
| 81 |
+
all_sheets = pd.read_excel(file_path, engine='openpyxl', sheet_name=None)
|
| 82 |
if not all_sheets:
|
| 83 |
+
return None, "No sheets found in the Excel file!"
|
| 84 |
+
|
| 85 |
+
combined_dfs = []
|
| 86 |
+
|
| 87 |
for sheet_name, df in all_sheets.items():
|
| 88 |
print(f"Processing sheet: {sheet_name}")
|
| 89 |
base_columns = ['Name', 'Location']
|
| 90 |
if not all(col in df.columns for col in base_columns):
|
| 91 |
return None, f"Missing required columns in sheet '{sheet_name}'. Expected at least: {base_columns}"
|
| 92 |
+
|
| 93 |
if 'Start_Time1' not in df.columns:
|
| 94 |
return None, f"No valid time columns (e.g., Start_Time1) found in sheet '{sheet_name}'!"
|
| 95 |
+
|
| 96 |
num_days = sum(1 for col in df.columns if col.startswith('Start_Time'))
|
| 97 |
+
|
| 98 |
week_rows = df[df['Name'].str.startswith('Week', na=False)].index.tolist()
|
| 99 |
if not week_rows:
|
| 100 |
print(f"No week rows found in sheet '{sheet_name}'")
|
| 101 |
continue
|
| 102 |
+
|
| 103 |
for week_idx in week_rows:
|
| 104 |
dates = []
|
| 105 |
for day in range(1, num_days + 1):
|
|
|
|
| 111 |
print(f"Parsed date for {col} in sheet '{sheet_name}': {parsed_date}")
|
| 112 |
else:
|
| 113 |
dates.append(None)
|
| 114 |
+
|
| 115 |
next_week = next((idx for idx in week_rows if idx > week_idx), len(df))
|
| 116 |
provider_df = df.loc[week_idx + 1: next_week - 1]
|
| 117 |
provider_df = provider_df[~provider_df['Name'].eq('Name')]
|
| 118 |
+
|
| 119 |
if provider_df.empty:
|
| 120 |
print(f"No provider data found for week starting at index {week_idx} in sheet '{sheet_name}'")
|
| 121 |
continue
|
| 122 |
+
|
| 123 |
temp_dfs = []
|
| 124 |
for day in range(1, num_days + 1):
|
| 125 |
if day > len(dates) or dates[day - 1] is None:
|
| 126 |
continue
|
| 127 |
+
|
| 128 |
start_col = f'Start_Time{day}'
|
| 129 |
end_col = f'End_Time{day}'
|
| 130 |
note_col = f'Note{day}'
|
| 131 |
+
|
| 132 |
if start_col not in df.columns or end_col not in df.columns:
|
| 133 |
continue
|
| 134 |
+
|
| 135 |
temp = provider_df[['Name', 'Location', start_col, end_col, note_col]].copy()
|
| 136 |
temp['Date'] = dates[day - 1]
|
| 137 |
temp = temp.rename(columns={
|
|
|
|
| 140 |
note_col: 'Note'
|
| 141 |
})
|
| 142 |
temp_dfs.append(temp)
|
| 143 |
+
|
| 144 |
if temp_dfs:
|
| 145 |
sheet_week_df = pd.concat(temp_dfs, ignore_index=True)
|
| 146 |
sheet_week_df = sheet_week_df.dropna(subset=['Name'])
|
| 147 |
sheet_week_df['Location'] = sheet_week_df['Location'].map(lambda x: LOCATION_MAP.get(x, x) if pd.notna(x) else x)
|
| 148 |
+
|
| 149 |
for time_col in ['Start_Time', 'End_Time']:
|
| 150 |
if time_col in sheet_week_df.columns:
|
| 151 |
sheet_week_df[time_col] = sheet_week_df[time_col].apply(parse_time)
|
| 152 |
+
|
| 153 |
+
combined_dfs.append(sheet_week_df)
|
| 154 |
+
|
| 155 |
+
if not combined_dfs:
|
| 156 |
return None, "No valid data found across all sheets! Check date formats and ensure data exists for the specified range."
|
| 157 |
+
final_df = pd.concat(combined_dfs, ignore_index=True)
|
| 158 |
final_df = final_df.drop_duplicates()
|
| 159 |
final_df = final_df.dropna(subset=["Date"])
|
| 160 |
final_df = final_df[final_df["Location"] != "Location"]
|
| 161 |
final_df = final_df[final_df["Name"] != "Name"]
|
| 162 |
final_df["Date"] = pd.to_datetime(final_df["Date"])
|
| 163 |
+
|
| 164 |
if final_df.empty:
|
| 165 |
return None, "No valid data after filtering! Ensure the date range matches the data in the Excel files."
|
| 166 |
+
|
| 167 |
print(f"Final DataFrame shape: {final_df.shape}")
|
| 168 |
print(f"Available dates: {final_df['Date'].unique()}")
|
| 169 |
return final_df, None
|
| 170 |
except Exception as e:
|
| 171 |
return None, f"Error reading Excel: {str(e)}!"
|
| 172 |
|
| 173 |
+
def parse_time(value):
|
| 174 |
+
if value in ["nan", "NaT", "", "OFF"] or pd.isna(value):
|
| 175 |
+
return None
|
| 176 |
+
try:
|
| 177 |
+
if isinstance(value, (datetime, time)):
|
| 178 |
+
return value if isinstance(value, time) else value.time()
|
| 179 |
+
try:
|
| 180 |
+
return pd.to_datetime(value, format='%H:%M:%S').time()
|
| 181 |
+
except ValueError:
|
| 182 |
+
try:
|
| 183 |
+
return pd.to_datetime(value, format='%H:%M').time()
|
| 184 |
+
except ValueError:
|
| 185 |
+
try:
|
| 186 |
+
hours = float(value) * 24
|
| 187 |
+
if hours < 0 or hours > 24:
|
| 188 |
+
return None
|
| 189 |
+
hour = int(hours)
|
| 190 |
+
minute = int((hours - hour) * 60)
|
| 191 |
+
if hour >= 24 or minute >= 60:
|
| 192 |
+
return None
|
| 193 |
+
return time(hour, minute)
|
| 194 |
+
except (ValueError, TypeError):
|
| 195 |
+
return None
|
| 196 |
+
except (ValueError, TypeError):
|
| 197 |
+
return None
|
| 198 |
+
|
| 199 |
+
def validate_provider_info(file_path):
|
| 200 |
try:
|
| 201 |
+
df = pd.read_excel(file_path, engine='openpyxl')
|
| 202 |
expected_columns = ["Provider", "Last_Name", "Location"] + AGE_LST
|
| 203 |
if not all(col in df.columns for col in expected_columns):
|
| 204 |
return None, f"Invalid columns in Provider_Info. Expected: {expected_columns}!"
|
|
|
|
| 210 |
except Exception as e:
|
| 211 |
return None, f"Error reading Provider_Info Excel: {str(e)}!"
|
| 212 |
|
|
|
|
|
|
|
|
|
|
| 213 |
def save_files(file_list):
|
| 214 |
if not file_list:
|
| 215 |
return update_file_display()
|
|
|
|
| 262 |
json.dump(upload_times, f, indent=2)
|
| 263 |
return update_file_display()
|
| 264 |
|
|
|
|
|
|
|
|
|
|
| 265 |
def check_age_coverage(providers_df, provider_info_df, location, date):
|
| 266 |
date_key = pd.to_datetime(date).strftime('%m/%d/%y')
|
| 267 |
providers_on_date = providers_df[
|
|
|
|
| 354 |
conflicts.append((provider, conflict_locations, 'conflict-warning', f'Provider {provider} scheduled at: {", ".join(conflict_locations)}'))
|
| 355 |
return conflicts
|
| 356 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 357 |
def check_operation_time_coverage(providers_df, date, location):
|
| 358 |
date_key = pd.to_datetime(date).strftime('%m/%d/%y')
|
| 359 |
weekday = pd.to_datetime(date).weekday()
|
| 360 |
+
|
| 361 |
+
if location in ['Berwyn', 'Morgan', 'Western']:
|
| 362 |
+
if weekday in [0, 1, 3, 4]:
|
| 363 |
+
clinic_start = time(8, 30)
|
| 364 |
+
clinic_end = time(17, 30)
|
| 365 |
+
break_times = [(time(12, 30), time(13, 30))]
|
| 366 |
+
elif weekday == 2:
|
| 367 |
+
clinic_start = time(13, 0)
|
| 368 |
+
clinic_end = time(20, 0)
|
| 369 |
+
break_times = [(time(16, 0), time(17, 0))]
|
| 370 |
+
elif weekday == 5:
|
| 371 |
+
clinic_start = time(8, 30)
|
| 372 |
+
clinic_end = time(15, 0)
|
| 373 |
+
break_times = [(time(11, 30), time(12, 00))]
|
| 374 |
+
else:
|
| 375 |
+
return []
|
| 376 |
+
elif location == 'Urgent Care':
|
| 377 |
+
if weekday in [0, 1, 3, 4]:
|
| 378 |
+
clinic_start = time(9, 0)
|
| 379 |
+
clinic_end = time(18, 0)
|
| 380 |
+
break_times = [(time(13, 0), time(14, 0))]
|
| 381 |
+
elif weekday == 2:
|
| 382 |
+
clinic_start = time(9, 0)
|
| 383 |
+
clinic_end = time(18, 0)
|
| 384 |
+
break_times = [(time(13, 0), time(14, 0))]
|
| 385 |
+
elif weekday == 5:
|
| 386 |
+
clinic_start = time(9, 0)
|
| 387 |
+
clinic_end = time(13, 30)
|
| 388 |
+
break_times = []
|
| 389 |
+
else:
|
| 390 |
+
return []
|
| 391 |
+
elif location == 'Juarez':
|
| 392 |
+
if weekday in [0, 1, 2, 3, 4]:
|
| 393 |
+
clinic_start = time(8, 30)
|
| 394 |
+
clinic_end = time(16, 0)
|
| 395 |
+
break_times = [(time(13, 0), time(14, 0))]
|
| 396 |
+
else:
|
| 397 |
+
return []
|
| 398 |
+
elif location == 'Orozco':
|
| 399 |
+
if weekday in [0, 1, 2, 3, 4]:
|
| 400 |
+
clinic_start = time(8, 0)
|
| 401 |
+
clinic_end = time(16, 30)
|
| 402 |
+
break_times = []
|
| 403 |
+
else:
|
| 404 |
+
return []
|
| 405 |
+
elif location == 'LVHS':
|
| 406 |
+
if weekday in [0, 1, 2, 3]:
|
| 407 |
+
clinic_start = time(8, 30)
|
| 408 |
+
clinic_end = time(16, 0)
|
| 409 |
+
break_times = [(time(12, 0), time(13, 0))]
|
| 410 |
+
elif weekday == 4:
|
| 411 |
+
clinic_start = time(12, 0)
|
| 412 |
+
clinic_end = time(13, 0)
|
| 413 |
+
break_times = []
|
| 414 |
+
else:
|
| 415 |
+
return []
|
| 416 |
+
elif location == 'Psych':
|
| 417 |
+
providers_on_date = providers_df[
|
| 418 |
+
(providers_df['Date'] == date_key) &
|
| 419 |
+
(providers_df['Location'] == location) &
|
| 420 |
+
(providers_df['Start_Time'].notna()) &
|
| 421 |
+
(providers_df['End_Time'].notna()) &
|
| 422 |
+
(~providers_df['Note'].str.upper().fillna('').isin(OFF_LST))
|
| 423 |
+
]
|
| 424 |
+
if providers_on_date.empty:
|
| 425 |
+
return []
|
| 426 |
+
if weekday in [0, 1, 3, 4]:
|
| 427 |
+
break_times = [(time(12, 30), time(13, 30))]
|
| 428 |
+
elif weekday == 2:
|
| 429 |
+
break_times = [(time(16, 0), time(17, 0))]
|
| 430 |
+
elif weekday == 5:
|
| 431 |
+
break_times = []
|
| 432 |
+
else:
|
| 433 |
+
return []
|
| 434 |
+
start_times = [row['Start_Time'] for _, row in providers_on_date.iterrows() if row['Start_Time']]
|
| 435 |
+
end_times = [row['End_Time'] for _, row in providers_on_date.iterrows() if row['End_Time']]
|
| 436 |
+
if not start_times or not end_times:
|
| 437 |
+
return []
|
| 438 |
+
clinic_start = min(start_times)
|
| 439 |
+
clinic_end = max(end_times)
|
| 440 |
+
else:
|
| 441 |
return []
|
| 442 |
+
|
| 443 |
providers_on_date = providers_df[
|
| 444 |
(providers_df['Date'] == date_key) &
|
| 445 |
(providers_df['Location'] == location) &
|
|
|
|
| 457 |
if current_time < clinic_end:
|
| 458 |
gaps.append(f"{current_time.strftime('%H:%M')} - {clinic_end.strftime('%H:%M')}")
|
| 459 |
return gaps if gaps else []
|
| 460 |
+
|
| 461 |
intervals = []
|
| 462 |
for _, row in providers_on_date.iterrows():
|
| 463 |
start = row['Start_Time']
|
| 464 |
end = row['End_Time']
|
| 465 |
if start and end:
|
| 466 |
intervals.append((start, end))
|
| 467 |
+
|
| 468 |
intervals.sort(key=lambda x: x[0])
|
| 469 |
merged_intervals = []
|
| 470 |
current_start = None
|
|
|
|
| 481 |
current_end = end
|
| 482 |
if current_start is not None:
|
| 483 |
merged_intervals.append((current_start, current_end))
|
| 484 |
+
|
| 485 |
operational_intervals = []
|
| 486 |
for start, end in merged_intervals:
|
| 487 |
current_start = start
|
|
|
|
| 492 |
current_start = max(current_start, break_end)
|
| 493 |
if current_start < end:
|
| 494 |
operational_intervals.append((current_start, end))
|
| 495 |
+
|
| 496 |
gaps = []
|
| 497 |
current_time = clinic_start
|
| 498 |
for break_start, break_end in break_times:
|
|
|
|
| 510 |
current_time = max(current_time, end)
|
| 511 |
if current_time < clinic_end:
|
| 512 |
gaps.append(f"{current_time.strftime('%H:%M')} - {clinic_end.strftime('%H:%M')}")
|
| 513 |
+
|
| 514 |
return gaps
|
| 515 |
|
|
|
|
|
|
|
|
|
|
| 516 |
def calculate_weekly_hours(providers_df, provider_info_df, start_date, end_date, locations):
|
| 517 |
weekly_hours = {}
|
| 518 |
weekly_totals = {}
|
| 519 |
current_date = start_date
|
| 520 |
week_number = 1
|
| 521 |
+
week_start = start_date
|
| 522 |
while current_date <= end_date:
|
| 523 |
if current_date.weekday() == 6:
|
| 524 |
current_date += pd.Timedelta(days=1)
|
| 525 |
continue
|
| 526 |
if current_date.weekday() == 0 and current_date != start_date:
|
| 527 |
week_number += 1
|
| 528 |
+
week_start = current_date
|
| 529 |
date_key = current_date.strftime('%m/%d/%y')
|
| 530 |
weekday = current_date.weekday()
|
| 531 |
+
|
| 532 |
for location in sorted(locations):
|
| 533 |
+
if location in ['Berwyn', 'Morgan', 'Western']:
|
| 534 |
+
if weekday in [0, 1, 3, 4]:
|
| 535 |
+
clinic_start = time(8, 30)
|
| 536 |
+
clinic_end = time(17, 30)
|
| 537 |
+
break_times = [(time(12, 30), time(13, 30))]
|
| 538 |
+
elif weekday == 2:
|
| 539 |
+
clinic_start = time(13, 0)
|
| 540 |
+
clinic_end = time(20, 0)
|
| 541 |
+
break_times = [(time(16, 0), time(17, 0))]
|
| 542 |
+
elif weekday == 5:
|
| 543 |
+
clinic_start = time(8, 30)
|
| 544 |
+
clinic_end = time(15, 0)
|
| 545 |
+
break_times = [(time(11, 30), time(12, 00))]
|
| 546 |
+
else:
|
| 547 |
+
break_times = []
|
| 548 |
+
continue
|
| 549 |
+
elif location == 'Urgent Care':
|
| 550 |
+
if weekday in [0, 1, 3, 4]:
|
| 551 |
+
clinic_start = time(9, 0)
|
| 552 |
+
clinic_end = time(18, 0)
|
| 553 |
+
break_times = [(time(13, 0), time(14, 0))]
|
| 554 |
+
elif weekday == 2:
|
| 555 |
+
clinic_start = time(9, 0)
|
| 556 |
+
clinic_end = time(18, 0)
|
| 557 |
+
break_times = [(time(13, 0), time(14, 0))]
|
| 558 |
+
elif weekday == 5:
|
| 559 |
+
clinic_start = time(9, 0)
|
| 560 |
+
clinic_end = time(13, 30)
|
| 561 |
+
break_times = []
|
| 562 |
+
else:
|
| 563 |
+
break_times = []
|
| 564 |
+
continue
|
| 565 |
+
elif location == 'Juarez':
|
| 566 |
+
if weekday in [0, 1, 2, 3, 4]:
|
| 567 |
+
clinic_start = time(8, 30)
|
| 568 |
+
clinic_end = time(16, 0)
|
| 569 |
+
break_times = [(time(13, 0), time(14, 0))]
|
| 570 |
+
else:
|
| 571 |
+
break_times = []
|
| 572 |
+
continue
|
| 573 |
+
elif location == 'Orozco':
|
| 574 |
+
if weekday in [0, 1, 2, 3, 4]:
|
| 575 |
+
clinic_start = time(8, 0)
|
| 576 |
+
clinic_end = time(16, 30)
|
| 577 |
+
break_times = []
|
| 578 |
+
else:
|
| 579 |
+
break_times = []
|
| 580 |
+
continue
|
| 581 |
+
elif location == 'LVHS':
|
| 582 |
+
if weekday in [0, 1, 2, 3]:
|
| 583 |
+
clinic_start = time(8, 30)
|
| 584 |
+
clinic_end = time(16, 0)
|
| 585 |
+
break_times = [(time(12, 0), time(13, 0))]
|
| 586 |
+
elif weekday == 4:
|
| 587 |
+
clinic_start = time(12, 0)
|
| 588 |
+
clinic_end = time(13, 0)
|
| 589 |
+
break_times = []
|
| 590 |
+
else:
|
| 591 |
+
break_times = []
|
| 592 |
+
continue
|
| 593 |
+
elif location == 'Psych':
|
| 594 |
+
loc_providers_df = providers_df[
|
| 595 |
+
(providers_df['Date'] == date_key) &
|
| 596 |
+
(providers_df['Location'] == location) &
|
| 597 |
+
(providers_df['Start_Time'].notna()) &
|
| 598 |
+
(providers_df['End_Time'].notna()) &
|
| 599 |
+
(~providers_df['Note'].str.upper().fillna('').isin(OFF_LST))
|
| 600 |
+
]
|
| 601 |
+
if loc_providers_df.empty:
|
| 602 |
+
break_times = []
|
| 603 |
+
continue
|
| 604 |
+
if weekday in [0, 1, 3, 4]:
|
| 605 |
+
break_times = [(time(12, 30), time(13, 30))]
|
| 606 |
+
elif weekday == 2:
|
| 607 |
+
break_times = [(time(16, 0), time(17, 0))]
|
| 608 |
+
elif weekday == 5:
|
| 609 |
+
break_times = []
|
| 610 |
+
else:
|
| 611 |
+
break_times = []
|
| 612 |
+
continue
|
| 613 |
+
start_times = [row['Start_Time'] for _, row in loc_providers_df.iterrows() if row['Start_Time']]
|
| 614 |
+
end_times = [row['End_Time'] for _, row in loc_providers_df.iterrows() if row['End_Time']]
|
| 615 |
+
if not start_times or not end_times:
|
| 616 |
+
break_times = []
|
| 617 |
+
continue
|
| 618 |
+
clinic_start = min(start_times)
|
| 619 |
+
clinic_end = max(end_times)
|
| 620 |
+
else:
|
| 621 |
+
break_times = []
|
| 622 |
continue
|
| 623 |
+
|
| 624 |
loc_providers_df = providers_df[
|
| 625 |
(providers_df['Date'] == date_key) &
|
| 626 |
(providers_df['Location'] == location) &
|
|
|
|
| 645 |
weekly_hours[week_key][location][display_name] = 0.0
|
| 646 |
if display_name not in weekly_totals[week_key]:
|
| 647 |
weekly_totals[week_key][display_name] = 0.0
|
| 648 |
+
|
| 649 |
start_dt = datetime.combine(current_date, start_time)
|
| 650 |
end_dt = datetime.combine(current_date, end_time)
|
| 651 |
if end_dt < start_dt:
|
| 652 |
end_dt += pd.Timedelta(days=1)
|
| 653 |
total_hours = (end_dt - start_dt).total_seconds() / 3600
|
| 654 |
+
|
| 655 |
+
# Add 1 hour if the note contains "6-7/TELE"
|
| 656 |
if note == '6-7/TELE':
|
| 657 |
total_hours += 1.0
|
| 658 |
+
|
| 659 |
+
# Newly added.
|
| 660 |
+
# Check if clinical hours are less than 5 hours to decide on break time deduction
|
| 661 |
clinical_hours = total_hours
|
| 662 |
apply_break = clinical_hours >= 5.0
|
| 663 |
+
|
| 664 |
if apply_break:
|
| 665 |
for break_start, break_end in break_times:
|
| 666 |
break_start_dt = datetime.combine(current_date, break_start)
|
|
|
|
| 672 |
if overlap_start < overlap_end:
|
| 673 |
overlap_hours = (overlap_end - overlap_start).total_seconds() / 3600
|
| 674 |
total_hours -= overlap_hours
|
| 675 |
+
|
| 676 |
clinic_start_dt = datetime.combine(current_date, clinic_start)
|
| 677 |
clinic_end_dt = datetime.combine(current_date, clinic_end)
|
| 678 |
if clinic_end_dt < clinic_start_dt:
|
|
|
|
| 683 |
total_hours = min(total_hours, (overlap_end - overlap_start).total_seconds() / 3600)
|
| 684 |
else:
|
| 685 |
total_hours = 0.0
|
| 686 |
+
|
| 687 |
weekly_hours[week_key][location][display_name] += max(total_hours, 0.0)
|
| 688 |
weekly_totals[week_key][display_name] += max(total_hours, 0.0)
|
| 689 |
+
|
| 690 |
current_date += pd.Timedelta(days=1)
|
| 691 |
return weekly_hours, weekly_totals
|
| 692 |
|
|
|
|
|
|
|
|
|
|
| 693 |
def calculate_max_entries_per_day(providers_df, ma_df, start_obj, end_obj, all_locations, check_age_coverage_flag, check_location_conflicts_flag, check_operation_coverage_flag, check_ma_mismatch_flag, num_provider_files):
|
| 694 |
week_entries = []
|
| 695 |
current_date = start_obj
|
|
|
|
| 754 |
ma_on_day['End_Time'].notna() &
|
| 755 |
~ma_on_day['Note'].str.upper().fillna('').isin(OFF_LST)
|
| 756 |
]) if not ma_on_day.empty else 0
|
| 757 |
+
total_entries_day += 1 # MAs header
|
| 758 |
+
total_entries_day += ma_count # Each MA
|
| 759 |
if not (ma_count == working_providers or ma_count == working_providers + 1):
|
| 760 |
+
total_entries_day += 1 # Warning
|
| 761 |
has_providers = True
|
| 762 |
if perform_overall_check and has_providers:
|
| 763 |
total_entries_day += 1
|
|
|
|
| 772 |
week_entries.append(sum(current_week))
|
| 773 |
return week_entries
|
| 774 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 775 |
def combine_schedules(provider_info_file, provider_files, ma_files, start_date, end_date, check_age_coverage_flag, check_location_conflicts_flag, check_operation_coverage_flag, check_ma_mismatch_flag, show_weekly_hours, selected_locations):
|
| 776 |
save_files([provider_info_file] if provider_info_file else [])
|
| 777 |
save_files(provider_files if provider_files else [])
|
|
|
|
| 855 |
day_header_height = 10
|
| 856 |
entry_height = 10
|
| 857 |
num_locations = len(display_locations)
|
| 858 |
+
num_provider_files = len(providers_dfs)
|
| 859 |
buffer_height = 200 + (num_locations * 20) + (num_provider_files * 25)
|
| 860 |
week_entries = calculate_max_entries_per_day(providers_df, ma_df, start_obj, end_obj, display_locations, check_age_coverage_flag, check_location_conflicts_flag, check_operation_coverage_flag, check_ma_mismatch_flag, num_provider_files)
|
| 861 |
if show_weekly_hours:
|
|
|
|
| 865 |
loc_providers = providers_df[providers_df['Location'] == location]['Name'].unique()
|
| 866 |
loc_provider_info = provider_info_df[provider_info_df['Provider'].isin(loc_providers)]
|
| 867 |
providers_per_location[location] = sorted(
|
| 868 |
+
[provider_info_df[provider_info_df['Provider'] == p]['Last_Name'].iloc[0] if not provider_info_df[provider_info_df['Provider'] == p].empty else p for p in loc_providers]
|
| 869 |
)
|
| 870 |
max_providers = max([len(providers) for providers in providers_per_location.values()], default=0)
|
| 871 |
hours_table_height = (len(weekly_hours) * (max_providers + 2) * entry_height) + 20
|
|
|
|
| 876 |
hours_table_height = 0
|
| 877 |
a4_height = 842
|
| 878 |
bmw_locations = [loc for loc in display_locations if loc in ['Berwyn', 'Morgan', 'Western']]
|
| 879 |
+
perform_overall_check = check_age_coverage_flag and len(providers_dfs) > 1 and len(bmw_locations) > 1
|
| 880 |
+
perform_conflict_check = check_location_conflicts_flag and len(providers_dfs) > 1 and len(display_locations) > 1
|
| 881 |
locations_str = ", ".join(sorted(display_locations)) if display_locations else "No Locations"
|
| 882 |
generation_time = datetime.now(CHICAGO_TZ).strftime('%I:%M %p CDT, %B %d, %Y')
|
| 883 |
html_content = f"""
|
|
|
|
| 968 |
flex-direction: column;
|
| 969 |
align-items: flex-start;
|
| 970 |
break-inside: avoid;
|
| 971 |
+
break-inside: avoid;
|
| 972 |
page-break-inside: avoid;
|
| 973 |
overflow-wrap: break-word;
|
| 974 |
line-height: 1.1;
|
|
|
|
| 1213 |
day_html += '<div class="event"><strong>Providers:</strong><br>'
|
| 1214 |
missing_ages, full_age_providers, under_18_providers, over_18_providers, only_25_plus_providers = check_age_coverage(providers_df, loc_provider_info_df, location, pd.to_datetime(date_key, format='%m/%d/%y'))
|
| 1215 |
for _, row in loc_providers_df.iterrows():
|
| 1216 |
+
provider_info_row = loc_provider_info_df[loc_provider_info_df['Provider'] == row['Name']]
|
| 1217 |
+
display_name = provider_info_row['Last_Name'].iloc[0] if not provider_info_row.empty else row['Name']
|
| 1218 |
time_str = get_time_string(row)
|
| 1219 |
style = "font-size: 7pt; margin: 1mm; line-height: 1.1;"
|
| 1220 |
if row['Name'] in full_age_providers:
|
|
|
|
| 1585 |
shutil.copy(output_pdf_file, final_output_pdf)
|
| 1586 |
return html_content, final_output_file, final_output_pdf
|
| 1587 |
|
| 1588 |
+
def check_password(pwd):
|
| 1589 |
+
if pwd == "alivio0000":
|
| 1590 |
+
return gr.update(visible=False), gr.update(visible=True), ""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1591 |
else:
|
| 1592 |
return gr.update(visible=True), gr.update(visible=False), "Incorrect password."
|
| 1593 |
|
| 1594 |
def create_interface():
|
| 1595 |
+
with gr.Blocks(title="Alivio Schedule Display") as demo:
|
| 1596 |
+
gr.Markdown("# Alivio Schedule Display")
|
| 1597 |
+
gr.Markdown("""
|
| 1598 |
+
Upload the Provider Information Excel and at least one Provider Schedule Excel file. Files are stored persistently in the 'Uploads' directory. Schedules will be generated for the selected locations (or all locations if 'All Locations' is selected) found in the uploaded provider schedule files, displayed on a single calendar.
|
| 1599 |
+
Providers are always displayed in different colors based on age coverage:
|
| 1600 |
+
- <span style="color: #ff6347;">Red</span>: Covers all age groups (Newborn-5mo, 6mo-9yo, 10-17yo, 18-20yo, 21-24yo, 25+yo).
|
| 1601 |
+
- <span style="color: #0000ff;">Blue</span>: Covers patients above 18 (18-20yo, 21-24yo, 25+yo).
|
| 1602 |
+
- <span style="color: #008000;">Green</span>: Covers patients under 18 (Newborn-5mo, 6mo-9yo, 10-17yo).
|
| 1603 |
+
- <span style="color: #8E44AD;">Purple</span>: Covers only patients above 25 (25+yo).
|
| 1604 |
+
- Black: Other coverage combinations.
|
| 1605 |
+
""")
|
| 1606 |
with gr.Column(visible=True) as password_section:
|
| 1607 |
+
gr.Markdown("## 🔐 Enter Password to Access the Main Function")
|
| 1608 |
+
password_input = gr.Textbox(label="Enter Password", type="password")
|
| 1609 |
password_feedback = gr.Textbox(label="Status", interactive=False)
|
| 1610 |
password_button = gr.Button("Submit")
|
| 1611 |
with gr.Column(visible=False) as app_section:
|
|
|
|
| 1625 |
start_date = gr.Textbox(label="Start Date (e.g., 06/02/25 for June 2, 2025)", placeholder="e.g., 06/02/25")
|
| 1626 |
end_date = gr.Textbox(label="End Date (e.g., 07/05/25 for July 5, 2025)", placeholder="e.g., 07/05/25")
|
| 1627 |
check_age_coverage = gr.Checkbox(label="Age Coverage Check", value=False)
|
|
|
|
| 1628 |
check_operation_coverage = gr.Checkbox(label="Operation Hours Check", value=False)
|
| 1629 |
+
check_location_conflicts = gr.Checkbox(label="Provider Location Conflict Check", value=True)
|
| 1630 |
check_ma_mismatch = gr.Checkbox(label="Staffing Ratio Check", value=False)
|
| 1631 |
show_weekly_hours = gr.Checkbox(label="Provider Hours Summary", value=False)
|
| 1632 |
location_selector = gr.CheckboxGroup(label="Select Locations to Display", choices=AVAILABLE_LOCATIONS, value=['All Locations'])
|
|
|
|
| 1645 |
inputs=[provider_info_file, provider_files, ma_files, start_date, end_date, check_age_coverage, check_location_conflicts, check_operation_coverage, check_ma_mismatch, show_weekly_hours, location_selector],
|
| 1646 |
outputs=[output, download_html, download_pdf]
|
| 1647 |
)
|
| 1648 |
+
password_button.click(fn=check_password, inputs=password_input, outputs=[password_section, app_section, password_feedback])
|
| 1649 |
return demo
|
| 1650 |
|
| 1651 |
if __name__ == "__main__":
|
| 1652 |
demo = create_interface()
|
| 1653 |
+
demo.launch()
|
| 1654 |
+
|