Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -17,12 +17,11 @@ import matplotlib.pyplot as plt
|
|
| 17 |
import seaborn as sns
|
| 18 |
import numpy as np
|
| 19 |
|
| 20 |
-
#
|
| 21 |
from transformers import pipeline
|
| 22 |
|
| 23 |
weather_map = {"Cloudy": 0, "Rainy": 1, "Sunny": 2}
|
| 24 |
|
| 25 |
-
# Load and preprocess data
|
| 26 |
print("Loading and preprocessing data...")
|
| 27 |
try:
|
| 28 |
if not os.path.exists("new_delay_data.csv"):
|
|
@@ -38,19 +37,18 @@ try:
|
|
| 38 |
else:
|
| 39 |
df = pd.read_csv("new_delay_data.csv")
|
| 40 |
|
| 41 |
-
df = pd.get_dummies(df, columns=["Phase"], drop_first=True)
|
| 42 |
df["Weather"] = df["Weather"].map(weather_map)
|
| 43 |
df.dropna(subset=["Weather", "Absentee", "DelayLog", "Delay%"], inplace=True)
|
| 44 |
|
| 45 |
for col in ["Phase_Framing", "Phase_Foundation"]:
|
| 46 |
if col not in df.columns:
|
| 47 |
df[col] = 0
|
| 48 |
-
print("Data loaded and preprocessed
|
| 49 |
except Exception as e:
|
| 50 |
print(f"Error loading data: {e}")
|
| 51 |
raise
|
| 52 |
|
| 53 |
-
# Split features and target
|
| 54 |
try:
|
| 55 |
X = df[["Phase_Framing", "Phase_Foundation", "Weather", "Absentee", "DelayLog"]]
|
| 56 |
y = df["Delay%"]
|
|
@@ -58,7 +56,6 @@ except Exception as e:
|
|
| 58 |
print(f"Error preparing features: {e}")
|
| 59 |
raise
|
| 60 |
|
| 61 |
-
# Train Linear Regression model
|
| 62 |
print("Training model...")
|
| 63 |
try:
|
| 64 |
model = LinearRegression()
|
|
@@ -68,37 +65,34 @@ except Exception as e:
|
|
| 68 |
print(f"Error training model: {e}")
|
| 69 |
raise
|
| 70 |
|
| 71 |
-
|
| 72 |
-
print("Loading AI text generation model...")
|
| 73 |
try:
|
| 74 |
-
text_generator = pipeline("text-generation", model="
|
| 75 |
print("Text generation model loaded successfully.")
|
| 76 |
except Exception as e:
|
| 77 |
print(f"Failed to load text generation model: {e}")
|
| 78 |
raise
|
| 79 |
|
| 80 |
-
# Generate AI insight and migration plan dynamically
|
| 81 |
def generate_ai_insight(phase, weather, absentee_pct, delay_log, prediction):
|
| 82 |
prompt = (
|
| 83 |
-
f"
|
| 84 |
-
f"
|
| 85 |
-
f"
|
| 86 |
-
"
|
| 87 |
-
"
|
|
|
|
|
|
|
| 88 |
)
|
| 89 |
try:
|
| 90 |
-
result = text_generator(prompt, max_length=
|
| 91 |
generated_text = result[0]['generated_text']
|
| 92 |
-
# Strip out prompt to get only generated part
|
| 93 |
insight = generated_text[len(prompt):].strip()
|
| 94 |
return insight
|
| 95 |
except Exception as e:
|
| 96 |
print(f"AI insight generation failed: {e}")
|
| 97 |
return "AI insight generation failed. Please check logs."
|
| 98 |
|
| 99 |
-
# Heatmap generation function (unchanged)
|
| 100 |
def generate_heatmap(phase, weather, model):
|
| 101 |
-
print("Generating heatmap...")
|
| 102 |
try:
|
| 103 |
absentee_range = np.linspace(0, 100, 20)
|
| 104 |
delay_log_range = np.linspace(0, 20, 20)
|
|
@@ -125,15 +119,12 @@ def generate_heatmap(phase, weather, model):
|
|
| 125 |
heatmap_path = os.path.join(output_dir, f"heatmap_{timestamp}.png")
|
| 126 |
plt.savefig(heatmap_path, bbox_inches='tight')
|
| 127 |
plt.close()
|
| 128 |
-
print(f"Heatmap saved at: {heatmap_path}")
|
| 129 |
return heatmap_path
|
| 130 |
except Exception as e:
|
| 131 |
print(f"Heatmap generation failed: {e}")
|
| 132 |
return None
|
| 133 |
|
| 134 |
-
# PDF generation function (unchanged except uses AI-generated insight)
|
| 135 |
def generate_pdf_report(phase, weather, absentee_pct, delay_log, prediction, risk, insight):
|
| 136 |
-
print("Generating PDF report...")
|
| 137 |
try:
|
| 138 |
buffer = BytesIO()
|
| 139 |
c = canvas.Canvas(buffer, pagesize=letter)
|
|
@@ -193,16 +184,13 @@ def generate_pdf_report(phase, weather, absentee_pct, delay_log, prediction, ris
|
|
| 193 |
output_path = os.path.join(output_dir, f"delay_report_{timestamp}.pdf")
|
| 194 |
with open(output_path, "wb") as f:
|
| 195 |
f.write(pdf_data)
|
| 196 |
-
print(f"PDF saved locally at: {output_path}")
|
| 197 |
|
| 198 |
return pdf_base64, output_path, heatmap_path
|
| 199 |
except Exception as e:
|
| 200 |
print(f"PDF generation failed: {e}")
|
| 201 |
return None, None, None
|
| 202 |
|
| 203 |
-
# Main prediction logic
|
| 204 |
def predict_delay(phase, weather, absentee_pct, delay_log):
|
| 205 |
-
print(f"Predicting delay for Phase: {phase}, Weather: {weather}, Absentee: {absentee_pct}, Delay Log: {delay_log}")
|
| 206 |
try:
|
| 207 |
valid_phases = ["Framing", "Foundation", "Finishing"]
|
| 208 |
valid_weather = ["Sunny", "Rainy", "Cloudy"]
|
|
@@ -235,12 +223,10 @@ def predict_delay(phase, weather, absentee_pct, delay_log):
|
|
| 235 |
print(f"Prediction error: {e}")
|
| 236 |
return None, None, f"Error: {e}", None, None, None
|
| 237 |
|
| 238 |
-
# FastAPI app
|
| 239 |
api_app = FastAPI()
|
| 240 |
|
| 241 |
@api_app.post("/predict")
|
| 242 |
async def predict_from_salesforce(request: Request):
|
| 243 |
-
print("Received API request...")
|
| 244 |
try:
|
| 245 |
data = await request.json()
|
| 246 |
phase = data.get("phase", "Framing")
|
|
@@ -263,12 +249,9 @@ async def predict_from_salesforce(request: Request):
|
|
| 263 |
"status": "success"
|
| 264 |
})
|
| 265 |
except Exception as e:
|
| 266 |
-
print(f"API error: {e}")
|
| 267 |
return JSONResponse(status_code=500, content={"status": "error", "message": str(e)})
|
| 268 |
|
| 269 |
-
# Gradio UI
|
| 270 |
try:
|
| 271 |
-
print("Setting up Gradio UI...")
|
| 272 |
with gr.Blocks() as demo:
|
| 273 |
gr.Markdown("## 🏗️ Delay Predictor with AI Insights")
|
| 274 |
with gr.Row():
|
|
@@ -281,7 +264,6 @@ try:
|
|
| 281 |
submit = gr.Button("Predict")
|
| 282 |
|
| 283 |
def predict_and_format(phase, weather, absentee, delay_log):
|
| 284 |
-
print("Gradio predict button clicked.")
|
| 285 |
prediction, risk, insight, pdf_base64, pdf_path, heatmap_path = predict_delay(phase, weather, absentee, delay_log)
|
| 286 |
if prediction is None:
|
| 287 |
return f"Error: {insight}"
|
|
@@ -298,24 +280,15 @@ try:
|
|
| 298 |
inputs=[phase_input, weather_input, absentee_input, delay_input],
|
| 299 |
outputs=output
|
| 300 |
)
|
| 301 |
-
print("Gradio UI setup complete.")
|
| 302 |
except Exception as e:
|
| 303 |
print(f"Error setting up Gradio UI: {e}")
|
| 304 |
raise
|
| 305 |
|
| 306 |
-
# Mount Gradio app inside FastAPI
|
| 307 |
try:
|
| 308 |
-
print("Mounting Gradio app...")
|
| 309 |
app = gr.mount_gradio_app(api_app, demo, path="/")
|
| 310 |
-
print("Gradio app mounted successfully.")
|
| 311 |
except Exception as e:
|
| 312 |
print(f"Error mounting Gradio app: {e}")
|
| 313 |
raise
|
| 314 |
|
| 315 |
if __name__ == "__main__":
|
| 316 |
-
|
| 317 |
-
try:
|
| 318 |
-
uvicorn.run(app, host="0.0.0.0", port=7860)
|
| 319 |
-
except Exception as e:
|
| 320 |
-
print(f"Server failed to start: {e}")
|
| 321 |
-
raise
|
|
|
|
| 17 |
import seaborn as sns
|
| 18 |
import numpy as np
|
| 19 |
|
| 20 |
+
# Hugging Face transformers pipeline import
|
| 21 |
from transformers import pipeline
|
| 22 |
|
| 23 |
weather_map = {"Cloudy": 0, "Rainy": 1, "Sunny": 2}
|
| 24 |
|
|
|
|
| 25 |
print("Loading and preprocessing data...")
|
| 26 |
try:
|
| 27 |
if not os.path.exists("new_delay_data.csv"):
|
|
|
|
| 37 |
else:
|
| 38 |
df = pd.read_csv("new_delay_data.csv")
|
| 39 |
|
| 40 |
+
df = pd.get_dummies(df, columns=["Phase"], drop_first=True)
|
| 41 |
df["Weather"] = df["Weather"].map(weather_map)
|
| 42 |
df.dropna(subset=["Weather", "Absentee", "DelayLog", "Delay%"], inplace=True)
|
| 43 |
|
| 44 |
for col in ["Phase_Framing", "Phase_Foundation"]:
|
| 45 |
if col not in df.columns:
|
| 46 |
df[col] = 0
|
| 47 |
+
print("Data loaded and preprocessed. Columns:", df.columns.tolist())
|
| 48 |
except Exception as e:
|
| 49 |
print(f"Error loading data: {e}")
|
| 50 |
raise
|
| 51 |
|
|
|
|
| 52 |
try:
|
| 53 |
X = df[["Phase_Framing", "Phase_Foundation", "Weather", "Absentee", "DelayLog"]]
|
| 54 |
y = df["Delay%"]
|
|
|
|
| 56 |
print(f"Error preparing features: {e}")
|
| 57 |
raise
|
| 58 |
|
|
|
|
| 59 |
print("Training model...")
|
| 60 |
try:
|
| 61 |
model = LinearRegression()
|
|
|
|
| 65 |
print(f"Error training model: {e}")
|
| 66 |
raise
|
| 67 |
|
| 68 |
+
print("Loading AI text generation model (EleutherAI/gpt-neo-125M)...")
|
|
|
|
| 69 |
try:
|
| 70 |
+
text_generator = pipeline("text-generation", model="EleutherAI/gpt-neo-125M")
|
| 71 |
print("Text generation model loaded successfully.")
|
| 72 |
except Exception as e:
|
| 73 |
print(f"Failed to load text generation model: {e}")
|
| 74 |
raise
|
| 75 |
|
|
|
|
| 76 |
def generate_ai_insight(phase, weather, absentee_pct, delay_log, prediction):
|
| 77 |
prompt = (
|
| 78 |
+
f"You are an expert construction project manager.\n"
|
| 79 |
+
f"Project phase: {phase}\n"
|
| 80 |
+
f"Weather: {weather}\n"
|
| 81 |
+
f"Absenteeism rate: {absentee_pct}%\n"
|
| 82 |
+
f"Previous delay log: {delay_log}\n"
|
| 83 |
+
f"Predicted delay percentage: {prediction}%\n"
|
| 84 |
+
f"Provide a brief insight on the delay risks and a practical migration plan to mitigate these delays."
|
| 85 |
)
|
| 86 |
try:
|
| 87 |
+
result = text_generator(prompt, max_length=120, num_return_sequences=1, temperature=0.7, top_p=0.9)
|
| 88 |
generated_text = result[0]['generated_text']
|
|
|
|
| 89 |
insight = generated_text[len(prompt):].strip()
|
| 90 |
return insight
|
| 91 |
except Exception as e:
|
| 92 |
print(f"AI insight generation failed: {e}")
|
| 93 |
return "AI insight generation failed. Please check logs."
|
| 94 |
|
|
|
|
| 95 |
def generate_heatmap(phase, weather, model):
|
|
|
|
| 96 |
try:
|
| 97 |
absentee_range = np.linspace(0, 100, 20)
|
| 98 |
delay_log_range = np.linspace(0, 20, 20)
|
|
|
|
| 119 |
heatmap_path = os.path.join(output_dir, f"heatmap_{timestamp}.png")
|
| 120 |
plt.savefig(heatmap_path, bbox_inches='tight')
|
| 121 |
plt.close()
|
|
|
|
| 122 |
return heatmap_path
|
| 123 |
except Exception as e:
|
| 124 |
print(f"Heatmap generation failed: {e}")
|
| 125 |
return None
|
| 126 |
|
|
|
|
| 127 |
def generate_pdf_report(phase, weather, absentee_pct, delay_log, prediction, risk, insight):
|
|
|
|
| 128 |
try:
|
| 129 |
buffer = BytesIO()
|
| 130 |
c = canvas.Canvas(buffer, pagesize=letter)
|
|
|
|
| 184 |
output_path = os.path.join(output_dir, f"delay_report_{timestamp}.pdf")
|
| 185 |
with open(output_path, "wb") as f:
|
| 186 |
f.write(pdf_data)
|
|
|
|
| 187 |
|
| 188 |
return pdf_base64, output_path, heatmap_path
|
| 189 |
except Exception as e:
|
| 190 |
print(f"PDF generation failed: {e}")
|
| 191 |
return None, None, None
|
| 192 |
|
|
|
|
| 193 |
def predict_delay(phase, weather, absentee_pct, delay_log):
|
|
|
|
| 194 |
try:
|
| 195 |
valid_phases = ["Framing", "Foundation", "Finishing"]
|
| 196 |
valid_weather = ["Sunny", "Rainy", "Cloudy"]
|
|
|
|
| 223 |
print(f"Prediction error: {e}")
|
| 224 |
return None, None, f"Error: {e}", None, None, None
|
| 225 |
|
|
|
|
| 226 |
api_app = FastAPI()
|
| 227 |
|
| 228 |
@api_app.post("/predict")
|
| 229 |
async def predict_from_salesforce(request: Request):
|
|
|
|
| 230 |
try:
|
| 231 |
data = await request.json()
|
| 232 |
phase = data.get("phase", "Framing")
|
|
|
|
| 249 |
"status": "success"
|
| 250 |
})
|
| 251 |
except Exception as e:
|
|
|
|
| 252 |
return JSONResponse(status_code=500, content={"status": "error", "message": str(e)})
|
| 253 |
|
|
|
|
| 254 |
try:
|
|
|
|
| 255 |
with gr.Blocks() as demo:
|
| 256 |
gr.Markdown("## 🏗️ Delay Predictor with AI Insights")
|
| 257 |
with gr.Row():
|
|
|
|
| 264 |
submit = gr.Button("Predict")
|
| 265 |
|
| 266 |
def predict_and_format(phase, weather, absentee, delay_log):
|
|
|
|
| 267 |
prediction, risk, insight, pdf_base64, pdf_path, heatmap_path = predict_delay(phase, weather, absentee, delay_log)
|
| 268 |
if prediction is None:
|
| 269 |
return f"Error: {insight}"
|
|
|
|
| 280 |
inputs=[phase_input, weather_input, absentee_input, delay_input],
|
| 281 |
outputs=output
|
| 282 |
)
|
|
|
|
| 283 |
except Exception as e:
|
| 284 |
print(f"Error setting up Gradio UI: {e}")
|
| 285 |
raise
|
| 286 |
|
|
|
|
| 287 |
try:
|
|
|
|
| 288 |
app = gr.mount_gradio_app(api_app, demo, path="/")
|
|
|
|
| 289 |
except Exception as e:
|
| 290 |
print(f"Error mounting Gradio app: {e}")
|
| 291 |
raise
|
| 292 |
|
| 293 |
if __name__ == "__main__":
|
| 294 |
+
uvicorn.run(app, host="0.0.0.0", port=7860)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|