| | """ |
| | Fixed: write_file() now checks for empty directory path. |
| | """ |
| | import os, subprocess, sys, textwrap |
| |
|
| | def sh(cmd, check=True): |
| | """Helper to run shell commands.""" |
| | print(f"\nRUN: {cmd}\n") |
| | result = subprocess.run(cmd, shell=True, capture_output=True, text=True) |
| | print(result.stdout) |
| | if check and result.returncode != 0: |
| | print(result.stderr) |
| | raise RuntimeError(f"Command failed: {cmd}") |
| | return result |
| |
|
| | def write_file(path, content): |
| | """Helper to safely write a file, even in the current directory.""" |
| | directory = os.path.dirname(path) |
| | if directory: |
| | os.makedirs(directory, exist_ok=True) |
| | with open(path, "w") as f: |
| | f.write(textwrap.dedent(content)) |
| | print(f"WROTE: {path}") |
| |
|
| | def main(): |
| | print("🚀 Starting automated deployment setup...") |
| |
|
| | github_repo = os.getenv("GITHUB_REPO") |
| | github_token = os.getenv("GITHUB_TOKEN") |
| | hf_token = os.getenv("HF_TOKEN") |
| | hf_space = os.getenv("HF_SPACE_REPO") |
| |
|
| | if not github_repo or not github_token: |
| | print("ERROR: Please set GITHUB_REPO and GITHUB_TOKEN environment variables.") |
| | sys.exit(1) |
| |
|
| | |
| | if not os.path.exists("engine-condition-predictor"): |
| | sh(f"git clone https://{github_token}@github.com/{github_repo}.git engine-condition-predictor") |
| | else: |
| | print("✅ Repo already exists locally, skipping clone.") |
| |
|
| | os.chdir("engine-condition-predictor") |
| |
|
| | |
| | write_file("Dockerfile", """ |
| | FROM python:3.10-slim |
| | WORKDIR /app |
| | RUN apt-get update && apt-get install -y --no-install-recommends build-essential git libgomp1 && rm -rf /var/lib/apt/lists/* |
| | COPY requirements.txt . |
| | RUN pip install --upgrade pip && pip install -r requirements.txt |
| | COPY . . |
| | EXPOSE 7860 |
| | CMD ["streamlit", "run", "app.py", "--server.port=7860", "--server.address=0.0.0.0"] |
| | """) |
| |
|
| | write_file("requirements.txt", """ |
| | streamlit |
| | pandas |
| | numpy |
| | scikit-learn |
| | xgboost |
| | joblib |
| | huggingface-hub |
| | """) |
| |
|
| | write_file("app.py", """ |
| | import streamlit as st |
| | import pandas as pd |
| | import numpy as np |
| | import joblib |
| | from huggingface_hub import hf_hub_download |
| | import os |
| | |
| | st.title("⚙️ Predictive Maintenance: Engine Condition Predictor") |
| | |
| | st.write("Upload data or input manually to predict engine condition using XGBoost model.") |
| | |
| | # Constants for model/scaler paths on Hugging Face Hub |
| | HF_TOKEN = os.getenv("HF_TOKEN") # Get token from environment variables (Colab secrets or GitHub secrets) |
| | MODEL_REPO_ID = "sriharimudakavi/engine-condition-xgboost-tuned" |
| | MODEL_FILENAME = "xgboost_tuned_model.joblib" |
| | SCALER_REPO_ID = "sriharimudakavi/engine-data" # Assuming scaler is in the dataset repo |
| | SCALER_FILENAME = "scaler.joblib" |
| | |
| | # Download model and scaler |
| | try: |
| | model_path = hf_hub_download(repo_id=MODEL_REPO_ID, filename=MODEL_FILENAME, repo_type="model", token=HF_TOKEN) |
| | scaler_path = hf_hub_download(repo_id=SCALER_REPO_ID, filename=SCALER_FILENAME, repo_type="dataset", token=HF_TOKEN) |
| | except Exception as e: |
| | st.error(f"Error downloading model or scaler: {e}") |
| | st.stop() |
| | |
| | # Load model and scaler |
| | model = joblib.load(model_path) |
| | scaler = joblib.load(scaler_path) |
| | |
| | option = st.sidebar.selectbox("Input Method", ["Manual Entry", "Upload CSV"]) |
| | |
| | if option == "Manual Entry": |
| | rpm = st.number_input("Engine RPM", 0, 3000, 800) |
| | oil_p = st.number_input("Lube Oil Pressure", 0.0, 10.0, 3.0) |
| | fuel_p = st.number_input("Fuel Pressure", 0.0, 25.0, 6.0) |
| | cool_p = st.number_input("Coolant Pressure", 0.0, 10.0, 2.0) |
| | oil_t = st.number_input("Lube Oil Temp (°C)", 60.0, 120.0, 80.0) |
| | cool_t = st.number_input("Coolant Temp (°C)", 60.0, 200.0, 90.0) |
| | input_df = pd.DataFrame([[rpm, oil_p, fuel_p, cool_p, oil_t, cool_t]], |
| | columns=["Engine rpm", "Lub oil pressure", "Fuel pressure", "Coolant pressure", "lub oil temp", "Coolant temp"]) |
| | st.write(input_df) |
| | if st.button("🔍 Predict Engine Condition"): |
| | # Scale the input data |
| | scaled_input_df = scaler.transform(input_df) |
| | pred = model.predict(scaled_input_df)[0] |
| | st.success(f"Predicted Condition: {'Normal (0)' if pred==0 else 'Faulty (1)'}") |
| | else: |
| | file = st.file_uploader("Upload CSV file", type=["csv"]) |
| | if file: |
| | input_df = pd.read_csv(file) |
| | st.write("Uploaded Data:") |
| | st.dataframe(input_df) |
| | if st.button("🔍 Predict Engine Condition from CSV"): |
| | # Ensure the columns match the training data |
| | if not all(col in input_df.columns for col in ["Engine rpm", "Lub oil pressure", "Fuel pressure", "Coolant pressure", "lub oil temp", "Coolant temp"]): |
| | st.error("CSV file must contain 'Engine rpm', 'Lub oil pressure', 'Fuel pressure', 'Coolant pressure', 'lub oil temp', 'Coolant temp' columns.") |
| | else: |
| | # Scale the input data |
| | scaled_input_df = scaler.transform(input_df[["Engine rpm", "Lub oil pressure", "Fuel pressure", "Coolant pressure", "lub oil temp", "Coolant temp"]]) |
| | preds = model.predict(scaled_input_df) |
| | input_df["Predicted Condition"] = np.where(preds==0, "Normal (0)", "Faulty (1)") |
| | st.write("Predictions:") |
| | st.dataframe(input_df) |
| | """) |
| |
|
| | write_file(".github/workflows/pipeline.yml", """ |
| | name: ML Deployment Pipeline |
| | on: |
| | push: |
| | branches: [ main ] |
| | jobs: |
| | deploy: |
| | runs-on: ubuntu-latest |
| | steps: |
| | - name: Checkout |
| | uses: actions/checkout@v3 |
| | - name: Set up Python |
| | uses: actions/setup-python@v4 |
| | with: |
| | python-version: '3.10' |
| | - name: Install Dependencies |
| | run: | |
| | pip install -r requirements.txt |
| | - name: Deploy to Hugging Face |
| | env: |
| | HF_TOKEN: ${{ secrets.HF_TOKEN }} |
| | run: | |
| | python host_to_hf.py |
| | """) |
| |
|
| | write_file("host_to_hf.py", """ |
| | from huggingface_hub import HfApi |
| | import os |
| | HF_TOKEN = os.getenv("HF_TOKEN") |
| | REPO_ID = os.getenv("HF_SPACE_REPO", "sriharimudakavi/engine-condition-predictor") # Use env var or default |
| | api = HfApi() |
| | api.upload_folder(folder_path=".", repo_id=REPO_ID, repo_type="space", token=HF_TOKEN) |
| | print("✅ Uploaded to Hugging Face Space successfully.") |
| | """) |
| |
|
| | |
| | sh("git add .") |
| | sh('git config user.name "sriharimudakavi5"') |
| | sh('git config user.email "sriharimudakavi5@gmail.com"') |
| | sh('git commit -m "Fix app.py: direct model/scaler loading and remove self-update logic" || echo "No changes to commit"') |
| | sh("git push origin main") |
| |
|
| | print("✅ Deployment files pushed to GitHub successfully.") |
| |
|
| | if __name__ == "__main__": |
| | main() |
| |
|