Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -15,6 +15,8 @@ from io import BytesIO
|
|
| 15 |
import base64
|
| 16 |
from reportlab.platypus import Image
|
| 17 |
import plotly.io as pio
|
|
|
|
|
|
|
| 18 |
|
| 19 |
# Load environment variables from .env file
|
| 20 |
load_dotenv()
|
|
@@ -37,6 +39,9 @@ except Exception as e:
|
|
| 37 |
logger.error(f"❌ Salesforce connection failed: {e}")
|
| 38 |
sf = None
|
| 39 |
|
|
|
|
|
|
|
|
|
|
| 40 |
def prepare_prophet_data(usage_series):
|
| 41 |
end_date = datetime.now()
|
| 42 |
start_date = end_date - timedelta(days=len(usage_series) - 1)
|
|
@@ -292,33 +297,76 @@ def upload_pdf_to_salesforce(pdf_file: BytesIO, consumable_type: str, record_id:
|
|
| 292 |
logger.error(f"Error uploading PDF to Salesforce: {str(e)}")
|
| 293 |
return None
|
| 294 |
|
| 295 |
-
def
|
| 296 |
-
|
| 297 |
-
|
| 298 |
-
|
| 299 |
-
|
| 300 |
-
|
| 301 |
-
|
| 302 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 303 |
|
| 304 |
-
|
| 305 |
-
|
| 306 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 307 |
st.error(error)
|
| 308 |
-
|
| 309 |
|
| 310 |
-
|
| 311 |
-
|
| 312 |
-
|
|
|
|
|
|
|
| 313 |
st.error(f"Error training model: {str(e)}")
|
| 314 |
-
|
| 315 |
|
| 316 |
-
|
| 317 |
-
|
| 318 |
-
|
| 319 |
-
|
| 320 |
-
|
| 321 |
|
|
|
|
| 322 |
st.header("Forecast Results")
|
| 323 |
col1, col2, col3 = st.columns(3)
|
| 324 |
col1.metric("7-Day Forecast", f"{forecast_7} units")
|
|
@@ -400,56 +448,164 @@ def main():
|
|
| 400 |
template='plotly_white'
|
| 401 |
)
|
| 402 |
st.plotly_chart(fig_alerts)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 403 |
|
| 404 |
-
|
| 405 |
-
|
| 406 |
-
|
| 407 |
-
|
| 408 |
-
|
| 409 |
-
|
| 410 |
-
|
| 411 |
-
|
| 412 |
-
|
| 413 |
-
|
| 414 |
-
|
| 415 |
-
|
| 416 |
-
|
| 417 |
-
|
| 418 |
-
|
| 419 |
-
|
| 420 |
-
|
| 421 |
-
|
| 422 |
-
|
| 423 |
-
|
| 424 |
-
|
| 425 |
-
|
| 426 |
-
|
| 427 |
-
|
| 428 |
-
|
| 429 |
-
|
| 430 |
-
|
| 431 |
-
|
| 432 |
-
|
| 433 |
-
|
| 434 |
-
|
| 435 |
-
|
| 436 |
-
|
| 437 |
-
|
| 438 |
-
|
| 439 |
-
|
| 440 |
-
|
| 441 |
-
|
| 442 |
-
|
| 443 |
-
else:
|
| 444 |
-
logger.error("Failed to upload PDF to Salesforce")
|
| 445 |
-
st.error("Failed to upload PDF to Salesforce")
|
| 446 |
else:
|
| 447 |
-
logger.error("Failed to
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 448 |
st.error("Failed to generate PDF")
|
| 449 |
-
|
| 450 |
-
|
|
|
|
| 451 |
st.error(f"Error saving to Salesforce: {str(e)}")
|
| 452 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 453 |
if __name__ == "__main__":
|
| 454 |
main()
|
| 455 |
sf = None
|
|
|
|
| 15 |
import base64
|
| 16 |
from reportlab.platypus import Image
|
| 17 |
import plotly.io as pio
|
| 18 |
+
import sys
|
| 19 |
+
import argparse
|
| 20 |
|
| 21 |
# Load environment variables from .env file
|
| 22 |
load_dotenv()
|
|
|
|
| 39 |
logger.error(f"❌ Salesforce connection failed: {e}")
|
| 40 |
sf = None
|
| 41 |
|
| 42 |
+
# File to store forecast data
|
| 43 |
+
DATA_FILE = "forecast_data.csv"
|
| 44 |
+
|
| 45 |
def prepare_prophet_data(usage_series):
|
| 46 |
end_date = datetime.now()
|
| 47 |
start_date = end_date - timedelta(days=len(usage_series) - 1)
|
|
|
|
| 297 |
logger.error(f"Error uploading PDF to Salesforce: {str(e)}")
|
| 298 |
return None
|
| 299 |
|
| 300 |
+
def save_forecast_data(consumable_type, usage_series, current_stock, daily_forecasts):
|
| 301 |
+
"""Save usage series, current stock, and daily forecasts to CSV."""
|
| 302 |
+
try:
|
| 303 |
+
# Convert usage series to string
|
| 304 |
+
usage_str = ','.join(map(str, usage_series))
|
| 305 |
+
# Prepare forecast data
|
| 306 |
+
forecast_data = {
|
| 307 |
+
'consumable_type': [consumable_type],
|
| 308 |
+
'usage_series': [usage_str],
|
| 309 |
+
'current_stock': [current_stock],
|
| 310 |
+
'forecast_date': [daily_forecasts['ds'].astype(str).tolist()],
|
| 311 |
+
'forecast_yhat': [daily_forecasts['yhat'].tolist()]
|
| 312 |
+
}
|
| 313 |
+
df = pd.DataFrame(forecast_data)
|
| 314 |
+
# Append to CSV or create new
|
| 315 |
+
if os.path.exists(DATA_FILE):
|
| 316 |
+
existing_df = pd.read_csv(DATA_FILE)
|
| 317 |
+
existing_df = existing_df[existing_df['consumable_type'] != consumable_type] # Remove old data for this type
|
| 318 |
+
df = pd.concat([existing_df, df], ignore_index=True)
|
| 319 |
+
df.to_csv(DATA_FILE, index=False)
|
| 320 |
+
logger.info(f"Saved forecast data for {consumable_type} to {DATA_FILE}")
|
| 321 |
+
except Exception as e:
|
| 322 |
+
logger.error(f"Error saving forecast data: {str(e)}")
|
| 323 |
|
| 324 |
+
def load_forecast_data(consumable_type):
|
| 325 |
+
"""Load previous forecast data for a consumable type."""
|
| 326 |
+
try:
|
| 327 |
+
if not os.path.exists(DATA_FILE):
|
| 328 |
+
logger.warning(f"No forecast data file found at {DATA_FILE}")
|
| 329 |
+
return None, None, None
|
| 330 |
+
df = pd.read_csv(DATA_FILE)
|
| 331 |
+
row = df[df['consumable_type'] == consumable_type]
|
| 332 |
+
if row.empty:
|
| 333 |
+
logger.warning(f"No data found for {consumable_type} in {DATA_FILE}")
|
| 334 |
+
return None, None, None
|
| 335 |
+
usage_series = [float(x) for x in row['usage_series'].iloc[0].split(',')]
|
| 336 |
+
current_stock = float(row['current_stock'].iloc[0])
|
| 337 |
+
# Parse forecast data (stored as strings, need to eval safely)
|
| 338 |
+
forecast_dates = eval(row['forecast_date'].iloc[0])
|
| 339 |
+
forecast_yhat = eval(row['forecast_yhat'].iloc[0])
|
| 340 |
+
daily_forecasts = pd.DataFrame({'ds': pd.to_datetime(forecast_dates), 'yhat': forecast_yhat})
|
| 341 |
+
return usage_series, current_stock, daily_forecasts
|
| 342 |
+
except Exception as e:
|
| 343 |
+
logger.error(f"Error loading forecast data: {str(e)}")
|
| 344 |
+
return None, None, None
|
| 345 |
+
|
| 346 |
+
def process_forecast(consumable_type, usage_series, current_stock, is_automated=False):
|
| 347 |
+
"""Process forecast for a given consumable type."""
|
| 348 |
+
usage_list, error = validate_usage_series(','.join(map(str, usage_series)))
|
| 349 |
+
if error:
|
| 350 |
+
logger.error(error)
|
| 351 |
+
if not is_automated:
|
| 352 |
st.error(error)
|
| 353 |
+
return None
|
| 354 |
|
| 355 |
+
try:
|
| 356 |
+
model = train_model_with_usage(usage_list)
|
| 357 |
+
except Exception as e:
|
| 358 |
+
logger.error(f"Error training model: {str(e)}")
|
| 359 |
+
if not is_automated:
|
| 360 |
st.error(f"Error training model: {str(e)}")
|
| 361 |
+
return None
|
| 362 |
|
| 363 |
+
forecast_7 = make_forecast(model, 7)
|
| 364 |
+
forecast_14 = make_forecast(model, 14)
|
| 365 |
+
forecast_30 = make_forecast(model, 30)
|
| 366 |
+
daily_forecasts = get_daily_forecasts(model, 30)
|
| 367 |
+
reorder_date = calculate_reorder_date(model, current_stock)
|
| 368 |
|
| 369 |
+
if not is_automated:
|
| 370 |
st.header("Forecast Results")
|
| 371 |
col1, col2, col3 = st.columns(3)
|
| 372 |
col1.metric("7-Day Forecast", f"{forecast_7} units")
|
|
|
|
| 448 |
template='plotly_white'
|
| 449 |
)
|
| 450 |
st.plotly_chart(fig_alerts)
|
| 451 |
+
else:
|
| 452 |
+
alert_status = [current_stock < forecast for forecast in [forecast_7, forecast_14, forecast_30]]
|
| 453 |
+
fig_daily = go.Figure()
|
| 454 |
+
fig_daily.add_trace(go.Scatter(
|
| 455 |
+
x=daily_forecasts['ds'],
|
| 456 |
+
y=daily_forecasts['yhat'],
|
| 457 |
+
mode='lines+markers',
|
| 458 |
+
name='Daily Forecast',
|
| 459 |
+
line=dict(color='royalblue', width=2),
|
| 460 |
+
marker=dict(size=8, color='darkorange', line=dict(width=2, color='black')),
|
| 461 |
+
fill='tozeroy',
|
| 462 |
+
fillcolor='rgba(0, 176, 246, 0.2)'
|
| 463 |
+
))
|
| 464 |
+
y_values = daily_forecasts['yhat'].tolist()
|
| 465 |
+
fig_daily.update_layout(
|
| 466 |
+
title='Daily Consumable Usage Forecast (30 Days)',
|
| 467 |
+
xaxis_title='Date',
|
| 468 |
+
yaxis_title='Units',
|
| 469 |
+
template='plotly_white',
|
| 470 |
+
xaxis=dict(tickformat="%Y-%m-%d", tickangle=45, tickmode='auto', nticks=10),
|
| 471 |
+
yaxis=dict(range=[max(0, min(y_values) - 5), max(y_values) + 5], tickmode='linear', dtick=2),
|
| 472 |
+
showlegend=True,
|
| 473 |
+
legend=dict(x=0.01, y=0.99),
|
| 474 |
+
hovermode='x unified',
|
| 475 |
+
plot_bgcolor='rgba(0,0,0,0)',
|
| 476 |
+
paper_bgcolor='rgba(0,0,0,0)',
|
| 477 |
+
margin=dict(l=50, r=50, t=50, b=100)
|
| 478 |
+
)
|
| 479 |
+
alert_data = pd.DataFrame({
|
| 480 |
+
'Category': ['Current Stock', '7-Day Forecast', '14-Day Forecast', '30-Day Forecast'],
|
| 481 |
+
'Units': [current_stock, forecast_7, forecast_14, forecast_30],
|
| 482 |
+
'Alert': [False] + alert_status
|
| 483 |
+
})
|
| 484 |
+
fig_alerts = go.Figure()
|
| 485 |
+
fig_alerts.add_trace(go.Bar(
|
| 486 |
+
x=alert_data['Category'],
|
| 487 |
+
y=alert_data['Units'],
|
| 488 |
+
marker_color=['green'] + ['red' if alert else 'blue' for alert in alert_data['Alert'][1:]],
|
| 489 |
+
text=[f"🚩" if alert else "" for alert in alert_data['Alert']],
|
| 490 |
+
textposition='auto'
|
| 491 |
+
))
|
| 492 |
+
fig_alerts.update_layout(
|
| 493 |
+
title='Stock vs Forecast with Alerts (🚩 indicates low stock)',
|
| 494 |
+
xaxis_title='Category',
|
| 495 |
+
yaxis_title='Units',
|
| 496 |
+
template='plotly_white'
|
| 497 |
+
)
|
| 498 |
|
| 499 |
+
# Salesforce record creation with PDF upload
|
| 500 |
+
if sf is not None:
|
| 501 |
+
try:
|
| 502 |
+
order_suggestions_text = f"7 Days: {max(0, forecast_7 - current_stock)} units, 14 Days: {max(0, forecast_14 - current_stock)} units, 30 Days: {max(0, forecast_30 - current_stock)} units"
|
| 503 |
+
forecast_data = {
|
| 504 |
+
"Consumable Type": consumable_type,
|
| 505 |
+
"Current Stock": current_stock,
|
| 506 |
+
"7-Day Forecast": f"{forecast_7} units",
|
| 507 |
+
"14-Day Forecast": f"{forecast_14} units",
|
| 508 |
+
"30-Day Forecast": f"{forecast_30} units",
|
| 509 |
+
"Order Suggestions": order_suggestions_text,
|
| 510 |
+
"Reorder Recommendation": "Yes" if any(alert_status) else "No",
|
| 511 |
+
"Reorder Date": reorder_date if reorder_date else "Not within 30 days"
|
| 512 |
+
}
|
| 513 |
+
pdf_file = generate_forecast_pdf(forecast_data, daily_forecasts, alert_status, current_stock, forecast_7, forecast_14, forecast_30, fig_daily, fig_alerts, ','.join(map(str, usage_series)))
|
| 514 |
+
sf_data = {
|
| 515 |
+
'Consumable_Type__c': consumable_type,
|
| 516 |
+
'Forecast_Period__c': '7days',
|
| 517 |
+
'ForeCasted_Quantity__c': float(forecast_7),
|
| 518 |
+
'ForeCasted_Quantity_14days__c': float(forecast_14),
|
| 519 |
+
'ForeCasted_Quantity_30days__c': float(forecast_30),
|
| 520 |
+
'Current_Stock__c': float(current_stock),
|
| 521 |
+
'Order_Suggestions__c': order_suggestions_text,
|
| 522 |
+
'Reorder_Recommendation__c': any(alert_status),
|
| 523 |
+
'Reorder_Date__c': reorder_date,
|
| 524 |
+
'Pdf_report__c': ''
|
| 525 |
+
}
|
| 526 |
+
result = sf.Consumables_Forecaste__c.create(sf_data)
|
| 527 |
+
logger.info(f"Salesforce record created: {result}")
|
| 528 |
+
|
| 529 |
+
if pdf_file:
|
| 530 |
+
pdf_url = upload_pdf_to_salesforce(pdf_file, consumable_type, result['id'])
|
| 531 |
+
if pdf_url:
|
| 532 |
+
sf.Consumables_Forecaste__c.update(
|
| 533 |
+
result['id'],
|
| 534 |
+
{"Pdf_report__c": pdf_url}
|
| 535 |
+
)
|
| 536 |
+
logger.info(f"PDF uploaded to Salesforce: {pdf_url}")
|
| 537 |
+
logger.info(f"PDF Report generated and uploaded to Salesforce: {pdf_url}")
|
|
|
|
|
|
|
|
|
|
| 538 |
else:
|
| 539 |
+
logger.error("Failed to upload PDF to Salesforce")
|
| 540 |
+
if not is_automated:
|
| 541 |
+
st.error("Failed to upload PDF to Salesforce")
|
| 542 |
+
else:
|
| 543 |
+
logger.error("Failed to generate PDF")
|
| 544 |
+
if not is_automated:
|
| 545 |
st.error("Failed to generate PDF")
|
| 546 |
+
except Exception as e:
|
| 547 |
+
logger.error(f"Error creating Salesforce record or uploading PDF: {e}", exc_info=True)
|
| 548 |
+
if not is_automated:
|
| 549 |
st.error(f"Error saving to Salesforce: {str(e)}")
|
| 550 |
|
| 551 |
+
return daily_forecasts
|
| 552 |
+
|
| 553 |
+
def automate_daily_forecast():
|
| 554 |
+
"""Run daily forecast automation for all consumable types."""
|
| 555 |
+
consumable_types = ['Filters', 'Reagents', 'Vials']
|
| 556 |
+
for consumable_type in consumable_types:
|
| 557 |
+
logger.info(f"Processing automated forecast for {consumable_type}")
|
| 558 |
+
# Load previous data
|
| 559 |
+
usage_series, current_stock, prev_daily_forecasts = load_forecast_data(consumable_type)
|
| 560 |
+
|
| 561 |
+
if usage_series is None or current_stock is None or prev_daily_forecasts is None:
|
| 562 |
+
logger.warning(f"No previous data for {consumable_type}. Skipping automation.")
|
| 563 |
+
continue
|
| 564 |
+
|
| 565 |
+
# Get 61st day forecast (first day of previous 30-day forecast)
|
| 566 |
+
next_day_usage = prev_daily_forecasts['yhat'].iloc[0]
|
| 567 |
+
# Update usage series: remove oldest day, append new day
|
| 568 |
+
usage_series = usage_series[1:] + [next_day_usage]
|
| 569 |
+
# Update current stock: subtract yesterday's forecasted usage
|
| 570 |
+
yesterday_usage = prev_daily_forecasts['yhat'].iloc[0]
|
| 571 |
+
current_stock = max(0, current_stock - yesterday_usage)
|
| 572 |
+
|
| 573 |
+
# Process forecast
|
| 574 |
+
daily_forecasts = process_forecast(consumable_type, usage_series, current_stock, is_automated=True)
|
| 575 |
+
if daily_forecasts is not None:
|
| 576 |
+
# Save new data
|
| 577 |
+
save_forecast_data(consumable_type, usage_series, current_stock, daily_forecasts)
|
| 578 |
+
logger.info(f"Completed automated forecast for {consumable_type}")
|
| 579 |
+
else:
|
| 580 |
+
logger.error(f"Failed to process forecast for {consumable_type}")
|
| 581 |
+
|
| 582 |
+
def main():
|
| 583 |
+
parser = argparse.ArgumentParser(description="SmartLab Consumables Forecast")
|
| 584 |
+
parser.add_argument('--automated', action='store_true', help="Run in automated mode")
|
| 585 |
+
args = parser.parse_args()
|
| 586 |
+
|
| 587 |
+
if args.automated:
|
| 588 |
+
automate_daily_forecast()
|
| 589 |
+
return
|
| 590 |
+
|
| 591 |
+
st.title("SmartLab Consumables Forecast")
|
| 592 |
+
st.header("Input Parameters")
|
| 593 |
+
|
| 594 |
+
consumable_type_label = st.selectbox("Consumable Type", ['Filters', 'Reagents', 'Vials'])
|
| 595 |
+
consumable_type = consumable_type_label
|
| 596 |
+
usage_series = st.text_input("Last 60 Days Usage (comma-separated)", "")
|
| 597 |
+
current_stock = st.number_input("Current Stock", min_value=0, value=0)
|
| 598 |
+
|
| 599 |
+
if st.button("Generate Forecast"):
|
| 600 |
+
usage_list, error = validate_usage_series(usage_series)
|
| 601 |
+
if error:
|
| 602 |
+
st.error(error)
|
| 603 |
+
return
|
| 604 |
+
|
| 605 |
+
daily_forecasts = process_forecast(consumable_type, usage_list, current_stock, is_automated=False)
|
| 606 |
+
if daily_forecasts is not None:
|
| 607 |
+
save_forecast_data(consumable_type, usage_list, current_stock, daily_forecasts)
|
| 608 |
+
|
| 609 |
if __name__ == "__main__":
|
| 610 |
main()
|
| 611 |
sf = None
|