GitHub Actions Bot commited on
Commit
41d029b
·
1 Parent(s): da8007b

feat: Deploy latest version of Gradio app

Browse files
.devcontainer/devcontainer.json ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "name": "Predictive Maintenance Environment",
3
+ "image": "mcr.microsoft.com/devcontainers/python:3.9",
4
+ "postCreateCommand": "pip install pandas scikit-learn gradio joblib huggingface_hub datasets",
5
+ "customizations": {
6
+ "vscode": {
7
+ "extensions": [
8
+ "ms-python.python",
9
+ "ms-toolsai.jupyter"
10
+ ]
11
+ }
12
+ }
13
+ }
.gitattributes DELETED
@@ -1,35 +0,0 @@
1
- *.7z filter=lfs diff=lfs merge=lfs -text
2
- *.arrow filter=lfs diff=lfs merge=lfs -text
3
- *.bin filter=lfs diff=lfs merge=lfs -text
4
- *.bz2 filter=lfs diff=lfs merge=lfs -text
5
- *.ckpt filter=lfs diff=lfs merge=lfs -text
6
- *.ftz filter=lfs diff=lfs merge=lfs -text
7
- *.gz filter=lfs diff=lfs merge=lfs -text
8
- *.h5 filter=lfs diff=lfs merge=lfs -text
9
- *.joblib filter=lfs diff=lfs merge=lfs -text
10
- *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
- *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
- *.model filter=lfs diff=lfs merge=lfs -text
13
- *.msgpack filter=lfs diff=lfs merge=lfs -text
14
- *.npy filter=lfs diff=lfs merge=lfs -text
15
- *.npz filter=lfs diff=lfs merge=lfs -text
16
- *.onnx filter=lfs diff=lfs merge=lfs -text
17
- *.ot filter=lfs diff=lfs merge=lfs -text
18
- *.parquet filter=lfs diff=lfs merge=lfs -text
19
- *.pb filter=lfs diff=lfs merge=lfs -text
20
- *.pickle filter=lfs diff=lfs merge=lfs -text
21
- *.pkl filter=lfs diff=lfs merge=lfs -text
22
- *.pt filter=lfs diff=lfs merge=lfs -text
23
- *.pth filter=lfs diff=lfs merge=lfs -text
24
- *.rar filter=lfs diff=lfs merge=lfs -text
25
- *.safetensors filter=lfs diff=lfs merge=lfs -text
26
- saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
- *.tar.* filter=lfs diff=lfs merge=lfs -text
28
- *.tar filter=lfs diff=lfs merge=lfs -text
29
- *.tflite filter=lfs diff=lfs merge=lfs -text
30
- *.tgz filter=lfs diff=lfs merge=lfs -text
31
- *.wasm filter=lfs diff=lfs merge=lfs -text
32
- *.xz filter=lfs diff=lfs merge=lfs -text
33
- *.zip filter=lfs diff=lfs merge=lfs -text
34
- *.zst filter=lfs diff=lfs merge=lfs -text
35
- *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
.github/workflows/main.yaml ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This workflow trains a model AND deploys the full Gradio app with that model.
2
+ name: Train and Deploy Gradio App to Spaces
3
+
4
+ on:
5
+ push:
6
+ branches:
7
+ - main
8
+ workflow_dispatch:
9
+
10
+ jobs:
11
+ train-and-deploy:
12
+ runs-on: ubuntu-latest
13
+
14
+ steps:
15
+ # Step 1: Checks out your GitHub repository code
16
+ - name: Checkout Repository
17
+ uses: actions/checkout@v4
18
+ with:
19
+ # Fetch all history for all branches and tags
20
+ fetch-depth: 0
21
+
22
+ # Step 2: Set up the Python environment
23
+ - name: Set up Python 3.9
24
+ uses: actions/setup-python@v4
25
+ with:
26
+ python-version: '3.9'
27
+
28
+ # Step 3: Install all dependencies from your requirements.txt file
29
+ - name: Install Dependencies
30
+ run: pip install -r requirements.txt
31
+
32
+ # Step 4: Prepare the data for training
33
+ - name: Prepare Data
34
+ run: python prepare_data.py
35
+
36
+ # Step 5: Train the model to create the model.joblib file
37
+ - name: Train Baseline Linear Model
38
+ run: python train.py
39
+
40
+ # Step 6: Push the entire project to your Hugging Face Space
41
+ - name: Push to HF Space
42
+ env:
43
+ # Your HF token, stored as a GitHub secret
44
+ HF_TOKEN: ${{ secrets.HUGGING_FACE_TOKEN }}
45
+ run: |
46
+ # Clones your Hugging Face Space repository into a new folder named 'space'
47
+ git clone https://ashandilgith:$HF_TOKEN@huggingface.co/spaces/ashandilgith/predictive_maintenance space
48
+
49
+ # Use rsync to copy all files from the current directory to the 'space' directory,
50
+ # excluding the 'space' directory itself to prevent the error.
51
+ rsync -av --delete --exclude='/space' --exclude='.git' ./ ./space/
52
+
53
+ # Go into the 'space' folder to perform git operations
54
+ cd ./space
55
+
56
+ # Configure git user for the commit
57
+ git config --global user.email "actions@github.com"
58
+ git config --global user.name "GitHub Actions Bot"
59
+
60
+ # Add all the new and updated files to be committed
61
+ git add .
62
+
63
+ # Commit the changes with a descriptive message
64
+ # The '--allow-empty' flag prevents errors if there are no changes to commit.
65
+ git commit -m "feat: Deploy latest version of Gradio app" --allow-empty
66
+
67
+ # Push the changes to the Hugging Face Space repository
68
+ git push
LICENSE ADDED
@@ -0,0 +1,121 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Creative Commons Legal Code
2
+
3
+ CC0 1.0 Universal
4
+
5
+ CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT PROVIDE
6
+ LEGAL SERVICES. DISTRIBUTION OF THIS DOCUMENT DOES NOT CREATE AN
7
+ ATTORNEY-CLIENT RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS
8
+ INFORMATION ON AN "AS-IS" BASIS. CREATIVE COMMONS MAKES NO WARRANTIES
9
+ REGARDING THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS
10
+ PROVIDED HEREUNDER, AND DISCLAIMS LIABILITY FOR DAMAGES RESULTING FROM
11
+ THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS PROVIDED
12
+ HEREUNDER.
13
+
14
+ Statement of Purpose
15
+
16
+ The laws of most jurisdictions throughout the world automatically confer
17
+ exclusive Copyright and Related Rights (defined below) upon the creator
18
+ and subsequent owner(s) (each and all, an "owner") of an original work of
19
+ authorship and/or a database (each, a "Work").
20
+
21
+ Certain owners wish to permanently relinquish those rights to a Work for
22
+ the purpose of contributing to a commons of creative, cultural and
23
+ scientific works ("Commons") that the public can reliably and without fear
24
+ of later claims of infringement build upon, modify, incorporate in other
25
+ works, reuse and redistribute as freely as possible in any form whatsoever
26
+ and for any purposes, including without limitation commercial purposes.
27
+ These owners may contribute to the Commons to promote the ideal of a free
28
+ culture and the further production of creative, cultural and scientific
29
+ works, or to gain reputation or greater distribution for their Work in
30
+ part through the use and efforts of others.
31
+
32
+ For these and/or other purposes and motivations, and without any
33
+ expectation of additional consideration or compensation, the person
34
+ associating CC0 with a Work (the "Affirmer"), to the extent that he or she
35
+ is an owner of Copyright and Related Rights in the Work, voluntarily
36
+ elects to apply CC0 to the Work and publicly distribute the Work under its
37
+ terms, with knowledge of his or her Copyright and Related Rights in the
38
+ Work and the meaning and intended legal effect of CC0 on those rights.
39
+
40
+ 1. Copyright and Related Rights. A Work made available under CC0 may be
41
+ protected by copyright and related or neighboring rights ("Copyright and
42
+ Related Rights"). Copyright and Related Rights include, but are not
43
+ limited to, the following:
44
+
45
+ i. the right to reproduce, adapt, distribute, perform, display,
46
+ communicate, and translate a Work;
47
+ ii. moral rights retained by the original author(s) and/or performer(s);
48
+ iii. publicity and privacy rights pertaining to a person's image or
49
+ likeness depicted in a Work;
50
+ iv. rights protecting against unfair competition in regards to a Work,
51
+ subject to the limitations in paragraph 4(a), below;
52
+ v. rights protecting the extraction, dissemination, use and reuse of data
53
+ in a Work;
54
+ vi. database rights (such as those arising under Directive 96/9/EC of the
55
+ European Parliament and of the Council of 11 March 1996 on the legal
56
+ protection of databases, and under any national implementation
57
+ thereof, including any amended or successor version of such
58
+ directive); and
59
+ vii. other similar, equivalent or corresponding rights throughout the
60
+ world based on applicable law or treaty, and any national
61
+ implementations thereof.
62
+
63
+ 2. Waiver. To the greatest extent permitted by, but not in contravention
64
+ of, applicable law, Affirmer hereby overtly, fully, permanently,
65
+ irrevocably and unconditionally waives, abandons, and surrenders all of
66
+ Affirmer's Copyright and Related Rights and associated claims and causes
67
+ of action, whether now known or unknown (including existing as well as
68
+ future claims and causes of action), in the Work (i) in all territories
69
+ worldwide, (ii) for the maximum duration provided by applicable law or
70
+ treaty (including future time extensions), (iii) in any current or future
71
+ medium and for any number of copies, and (iv) for any purpose whatsoever,
72
+ including without limitation commercial, advertising or promotional
73
+ purposes (the "Waiver"). Affirmer makes the Waiver for the benefit of each
74
+ member of the public at large and to the detriment of Affirmer's heirs and
75
+ successors, fully intending that such Waiver shall not be subject to
76
+ revocation, rescission, cancellation, termination, or any other legal or
77
+ equitable action to disrupt the quiet enjoyment of the Work by the public
78
+ as contemplated by Affirmer's express Statement of Purpose.
79
+
80
+ 3. Public License Fallback. Should any part of the Waiver for any reason
81
+ be judged legally invalid or ineffective under applicable law, then the
82
+ Waiver shall be preserved to the maximum extent permitted taking into
83
+ account Affirmer's express Statement of Purpose. In addition, to the
84
+ extent the Waiver is so judged Affirmer hereby grants to each affected
85
+ person a royalty-free, non transferable, non sublicensable, non exclusive,
86
+ irrevocable and unconditional license to exercise Affirmer's Copyright and
87
+ Related Rights in the Work (i) in all territories worldwide, (ii) for the
88
+ maximum duration provided by applicable law or treaty (including future
89
+ time extensions), (iii) in any current or future medium and for any number
90
+ of copies, and (iv) for any purpose whatsoever, including without
91
+ limitation commercial, advertising or promotional purposes (the
92
+ "License"). The License shall be deemed effective as of the date CC0 was
93
+ applied by Affirmer to the Work. Should any part of the License for any
94
+ reason be judged legally invalid or ineffective under applicable law, such
95
+ partial invalidity or ineffectiveness shall not invalidate the remainder
96
+ of the License, and in such case Affirmer hereby affirms that he or she
97
+ will not (i) exercise any of his or her remaining Copyright and Related
98
+ Rights in the Work or (ii) assert any associated claims and causes of
99
+ action with respect to the Work, in either case contrary to Affirmer's
100
+ express Statement of Purpose.
101
+
102
+ 4. Limitations and Disclaimers.
103
+
104
+ a. No trademark or patent rights held by Affirmer are waived, abandoned,
105
+ surrendered, licensed or otherwise affected by this document.
106
+ b. Affirmer offers the Work as-is and makes no representations or
107
+ warranties of any kind concerning the Work, express, implied,
108
+ statutory or otherwise, including without limitation warranties of
109
+ title, merchantability, fitness for a particular purpose, non
110
+ infringement, or the absence of latent or other defects, accuracy, or
111
+ the present or absence of errors, whether or not discoverable, all to
112
+ the greatest extent permissible under applicable law.
113
+ c. Affirmer disclaims responsibility for clearing rights of other persons
114
+ that may apply to the Work or any use thereof, including without
115
+ limitation any person's Copyright and Related Rights in the Work.
116
+ Further, Affirmer disclaims responsibility for obtaining any necessary
117
+ consents, permissions or other rights required for any use of the
118
+ Work.
119
+ d. Affirmer understands and acknowledges that Creative Commons is not a
120
+ party to this document and has no duty or obligation with respect to
121
+ this CC0 or use of the Work.
README.md CHANGED
@@ -1,14 +1,85 @@
1
- ---
2
- title: Predictive Maintenance
3
- emoji: 🌍
4
- colorFrom: yellow
5
- colorTo: pink
6
- sdk: gradio
7
- sdk_version: 5.34.1
8
- app_file: app.py
9
- pinned: false
10
- license: cc0-1.0
11
- short_description: 'MVP for predictive maintenance '
12
- ---
13
-
14
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ title: Predictive Maintenance for Turbofan Engines emoji: ✈️ colorFrom: blue colorTo: indigo sdk: gradio sdk_version: 4.20.0 app_file: app.py pinned: false
2
+
3
+ Predictive Maintenance for Turbofan Engines
4
+ A complete MLOps project demonstrating an end-to-end workflow for a predictive maintenance solution. This application uses a machine learning model to predict the Remaining Useful Life (RUL) of a turbofan engine based on operational settings and sensor data.
5
+
6
+ The project is developed within a containerized GitHub Codespaces environment and features a CI/CD pipeline that automatically trains the model and deploys the application to a Hugging Face Space.
7
+
8
+ Features
9
+ Interactive Demo: A user-friendly Gradio web interface to get real-time RUL predictions.
10
+
11
+ Automated CI/CD: The model is automatically retrained and the application is redeployed on every push to the main branch using GitHub Actions.
12
+
13
+ Reproducible Environment: A defined development environment using Codespaces ensures that the project can be run consistently by anyone.
14
+
15
+ Extensible Framework: While this demo uses a turbofan engine dataset, the principles can be customized for any machinery that relies on sensor data to predict performance or potential faults.
16
+
17
+ 🛠️ Technology Stack
18
+ Backend: Python
19
+
20
+ ML Model: Scikit-learn (Linear Regression)
21
+
22
+ Web App: Gradio
23
+
24
+ Dev Environment: GitHub Codespaces (Docker)
25
+
26
+ CI/CD & Hosting: GitHub Actions, Hugging Face Spaces
27
+
28
+ 🚀 How to Run Locally
29
+ To run this project on your own machine or Codespace, follow these steps.
30
+
31
+ Prerequisites
32
+ Python 3.9 or higher
33
+
34
+ Git
35
+
36
+ 1. Clone the Repository
37
+ git clone [https://github.com/ashandilgith/predictivemaintenance-.git](https://github.com/ashandilgith/predictivemaintenance-.git)
38
+ cd predictivemaintenance-
39
+
40
+ 2. Set Up a Virtual Environment (Recommended)
41
+ # Create a virtual environment
42
+ python3 -m venv venv
43
+
44
+ # Activate it
45
+ source venv/bin/activate
46
+
47
+ 3. Install Dependencies
48
+ Install all the required Python libraries from the requirements.txt file.
49
+
50
+ pip install -r requirements.txt
51
+
52
+ 4. Prepare the Data
53
+ Run the script to process the raw dataset. This will create processed_train_data.csv in the data/ directory.
54
+
55
+ python prepare_data.py
56
+
57
+ 5. Train the Model
58
+ Run the training script to create the model.joblib file.
59
+
60
+ python train.py
61
+
62
+ 6. Launch the Gradio App
63
+ Run the application file. The app will be available at a local URL shown in your terminal.
64
+
65
+ python app.py
66
+
67
+ ⚙️ CI/CD Pipeline
68
+ This project uses a GitHub Actions workflow defined in .github/workflows/main.yml. The pipeline automates the following steps on every push to the main branch:
69
+
70
+ Checkout Code: Clones the repository onto a fresh virtual machine.
71
+
72
+ Install Dependencies: Installs all necessary libraries.
73
+
74
+ Prepare Data: Runs the data preparation script.
75
+
76
+ Train Model: Trains the linear model and creates the model.joblib artifact.
77
+
78
+ Deploy to Space: Pushes the entire application, including the newly trained model, to the designated Hugging Face Space, making the updated app live.
79
+
80
+ 📊 Dataset
81
+ This project uses the Turbofan Engine Degradation Simulation Data Set provided by NASA.
82
+
83
+ Source: NASA Prognostics Data Repository
84
+
85
+ Subset Used: FD001
app.py ADDED
@@ -0,0 +1,84 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import joblib
3
+ import numpy as np
4
+ import os
5
+
6
+ # --- 1. Load the Pre-Trained Model ---
7
+ # This script assumes 'model.joblib' exists because you've run train.py
8
+ MODEL_PATH = "model.joblib"
9
+ model = None
10
+
11
+ try:
12
+ model = joblib.load(MODEL_PATH)
13
+ print("Model 'model.joblib' loaded successfully.")
14
+ except FileNotFoundError:
15
+ print(f"ERROR: Model file not found at '{MODEL_PATH}'.")
16
+ print("Please run 'python train.py' in your terminal first to create the model file.")
17
+
18
+
19
+ # --- 2. Define Feature Names ---
20
+ # This list must be in the exact same order as the data your model was trained on.
21
+ feature_names = [
22
+ 'time_in_cycles', 'setting_1', 'setting_2', 's_1', 's_2', 's_3', 's_4', 's_5',
23
+ 's_6', 's_7', 's_8', 's_9', 's_10', 's_11', 's_12', 's_13', 's_14', 's_15',
24
+ 's_16', 's_17', 's_18', 's_19', 's_20', 's_21'
25
+ ]
26
+
27
+ # --- 3. Create the Prediction Function ---
28
+ def predict_rul(*args):
29
+ """
30
+ Takes all 24 slider/number inputs as arguments, arranges them into the
31
+ correct format, and returns the model's RUL prediction.
32
+ """
33
+ if model is None:
34
+ return "Model not loaded. Please run 'python train.py' and restart the app."
35
+
36
+ input_data = np.array(args).reshape(1, -1)
37
+ prediction = model.predict(input_data)
38
+ final_prediction = prediction[0]
39
+
40
+ return f"{round(final_prediction, 2)} cycles remaining"
41
+
42
+ # --- 4. Build the Gradio Interface ---
43
+ with gr.Blocks(theme=gr.themes.Soft()) as demo:
44
+ gr.Markdown("# Turbofan Engine Predictive Maintenance")
45
+ gr.Markdown(
46
+ """
47
+ This is a demo of a predictive maintenance model for a turbofan engine.
48
+ However, the underlying principles can be customized for any form of machinery that uses sensor data
49
+ to predict its Remaining Useful Life (RUL), performance, or time before a potential fault.
50
+ """
51
+ )
52
+
53
+ gr.Markdown("### Engine Parameters & Sensor Readings")
54
+
55
+ # Create a list to hold all our input components in a single column
56
+ inputs = []
57
+ # Arrange the number inputs into a 3-column grid for a more compact layout
58
+ num_columns = 3
59
+ for i in range(0, len(feature_names), num_columns):
60
+ with gr.Row():
61
+ for j in range(num_columns):
62
+ if i + j < len(feature_names):
63
+ name = feature_names[i + j]
64
+ # Create the component and add it to our list of inputs
65
+ component = gr.Number(label=name, value=0.0)
66
+ inputs.append(component)
67
+
68
+ # Place the prediction button below the inputs
69
+ predict_btn = gr.Button("Predict RUL", variant="primary")
70
+
71
+ gr.Markdown("### Prediction Result")
72
+ # Create the output textbox below the button
73
+ outputs = gr.Textbox(label="Predicted Remaining Useful Life (RUL)")
74
+
75
+ # Connect the button's "click" event to our prediction function
76
+ predict_btn.click(
77
+ fn=predict_rul,
78
+ inputs=inputs,
79
+ outputs=outputs
80
+ )
81
+
82
+ # --- 5. Launch the App ---
83
+ if __name__ == "__main__":
84
+ demo.launch()
app_old.py ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import joblib
3
+ import numpy as np
4
+ import os
5
+ from PIL import Image
6
+
7
+ # --- 1. Load the Pre-Trained Model ---
8
+ # This script assumes 'model.joblib' exists because you've run train.py
9
+ MODEL_PATH = "model.joblib"
10
+ IMAGE_PATH = "engine.jpg"
11
+ model = None
12
+
13
+ try:
14
+ model = joblib.load(MODEL_PATH)
15
+ print("Model 'model.joblib' loaded successfully.")
16
+ except FileNotFoundError:
17
+ print(f"ERROR: Model file not found at '{MODEL_PATH}'.")
18
+ print("Please run 'python train.py' in your terminal first to create the model file.")
19
+
20
+ # --- 2. Define Feature Names ---
21
+ # This list must be in the exact same order as the data your model was trained on.
22
+ feature_names = [
23
+ 'time_in_cycles', 'setting_1', 'setting_2', 's_1', 's_2', 's_3', 's_4', 's_5',
24
+ 's_6', 's_7', 's_8', 's_9', 's_10', 's_11', 's_12', 's_13', 's_14', 's_15',
25
+ 's_16', 's_17', 's_18', 's_19', 's_20', 's_21'
26
+ ]
27
+
28
+ # --- 3. Create the Prediction Function ---
29
+ def predict_rul(*args):
30
+ """
31
+ Takes all 24 slider/number inputs as arguments, arranges them into the
32
+ correct format, and returns the model's RUL prediction.
33
+ """
34
+ # First, check if the model was loaded successfully.
35
+ if model is None:
36
+ return "Model not loaded. Please run 'python train.py' and restart the app."
37
+
38
+ # Convert the input arguments into a NumPy array for the model
39
+ input_data = np.array(args).reshape(1, -1)
40
+
41
+ # Make the prediction
42
+ prediction = model.predict(input_data)
43
+
44
+ # The model returns an array, so we get the first (and only) element
45
+ final_prediction = prediction[0]
46
+
47
+ return f"{round(final_prediction, 2)} cycles remaining"
48
+
49
+ # --- 4. Build the Gradio Interface ---
50
+ # We use a Gradio "Blocks" layout for more control over the UI.
51
+ with gr.Blocks(theme=gr.themes.Soft()) as demo:
52
+ gr.Markdown("# Turbofan Engine Predictive Maintenance")
53
+ gr.Markdown("Enter the engine's current sensor readings to predict its Remaining Useful Life (RUL). This demo uses a trained Linear Regression model.")
54
+
55
+ with gr.Row():
56
+ # Column for Inputs
57
+ with gr.Column(scale=1):
58
+ gr.Markdown("### Engine Parameters & Sensor Readings")
59
+ # Create a list to hold all our input components
60
+ inputs = []
61
+ for name in feature_names:
62
+ # Use a slider for each input for easy interaction
63
+ inputs.append(gr.Slider(minimum=0, maximum=1000, label=name, value=50))
64
+
65
+ # Column for Image and Output
66
+ with gr.Column(scale=1):
67
+ # Using a reliable placeholder image link.
68
+ gr.Image(
69
+ "engine.jpg",
70
+ label="Turbofan Engine"
71
+ )
72
+
73
+ gr.Markdown("### Prediction Result")
74
+ # Create the output textbox
75
+ outputs = gr.Textbox(label="Predicted Remaining Useful Life (RUL)")
76
+
77
+ # Create the button to trigger the prediction
78
+ predict_btn = gr.Button("Predict RUL", variant="primary")
79
+
80
+ # Connect the button's "click" event to our prediction function
81
+ predict_btn.click(
82
+ fn=predict_rul,
83
+ inputs=inputs,
84
+ outputs=outputs
85
+ )
86
+
87
+ # --- 5. Launch the App ---
88
+ if __name__ == "__main__":
89
+ demo.launch()
data/processed_data.csv ADDED
The diff for this file is too large to render. See raw diff
 
data/test_FD001.txt ADDED
The diff for this file is too large to render. See raw diff
 
data/train_FD001.txt ADDED
The diff for this file is too large to render. See raw diff
 
engine.jpg ADDED
model.joblib ADDED
Binary file (1.35 kB). View file
 
prepare_data.py ADDED
@@ -0,0 +1,71 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pandas as pd
2
+ import numpy as np
3
+
4
+
5
+ def prepare_data():
6
+ #def prepare_data():
7
+
8
+ #load the data as a pandas dataset
9
+
10
+ train_df = pd.read_csv('data/train_FD001.txt', sep = ' ', header=None)
11
+ #train_df = pd.read_csv('data/train_csv.txt', sep = ' ', header = None)
12
+ #train_df = pd.read_csv)'data/train_FD001.txt', sep = ' ', header = None)
13
+
14
+
15
+
16
+ # --- This is the crucial fix! ---
17
+ # The original file has trailing spaces, which pandas reads as empty columns.
18
+ # We drop these empty columns (usually numbered 26 and 27) before doing anything else.
19
+ train_df.drop(columns=[26, 27], inplace=True, errors='ignore')
20
+
21
+ #name the columns
22
+ columns = ['unit_number', 'time_in_cycles', 'setting_1', 'setting_2', 'setting'] + [f's_{i}' for i in range (1,22)]
23
+
24
+ #name the columns of the new dataset
25
+ train_df.columns = columns
26
+
27
+ #calculate Remaining Useful Life for each machine (unit_number on the table), by:
28
+ # 1. changing max cycle series to just the highest number (as it's currently a series of numbered runs)
29
+ # 2. simply changing max cycle to RUL by just removing the number of cycles run so far
30
+
31
+ # Calculate RUL for the training data
32
+
33
+ # 1️⃣ Group by unit_number (machine ID) and get the final cycle (maximum time_in_cycles) for each unit
34
+ max_cycles = train_df.groupby('unit_number')['time_in_cycles'].max()
35
+ #max_cycles = train_df.groupby('unit_number')['time_in_cycles'].max()
36
+ #max_cycles = train_df/groupby('unit_number')['time_in_cycles'].max()
37
+ #max_cycles = train_df.groupby('unit_number')['time_in_cycles'].max()
38
+
39
+ # 2️⃣ Merge the max_cycles back into the original dataframe, adding a 'max_cycle' column for each record based on unit_number
40
+ train_df = train_df.merge(max_cycles.to_frame(name='max_cycle'), left_on='unit_number', right_index=True)
41
+ #train_df = train_df.merge(max_cycles.to_frame(name='max_cycle'), left_on = 'unit_number', right_index=True)
42
+
43
+ # 3️⃣ Calculate RUL by subtracting current cycle from the final cycle for each record
44
+ train_df['RUL'] = train_df['max_cycle'] - train_df['time_in_cycles']
45
+ #train_df['RUL'] = train_df['max_cycle'] - train_df['time_in_cycles']
46
+
47
+ # 4️⃣ Drop the now-unnecessary 'max_cycle' column since we've already computed RUL
48
+ train_df.drop(columns=['max_cycle'], inplace=True)
49
+
50
+ #save the processed data
51
+ train_df.to_csv('data/processed_data.csv', index=False)
52
+
53
+
54
+ if __name__ == "__main__":
55
+ prepare_data()
56
+
57
+
58
+
59
+
60
+
61
+
62
+
63
+
64
+
65
+
66
+
67
+
68
+
69
+
70
+
71
+
push_to_hub.py ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from huggingface_hub import HfApi
2
+ import os
3
+ import sys
4
+
5
+ def push_model_to_hub():
6
+ """
7
+ Authenticates with the Hugging Face Hub using an environment variable
8
+ and uploads the trained model file.
9
+ """
10
+ print("--- Starting Model Upload Process ---")
11
+
12
+ # This gets the secret token passed in from the YAML file
13
+ hf_token = os.environ.get("HF_TOKEN")
14
+ if not hf_token:
15
+ print("Fatal: HF_TOKEN secret not found. Make sure HF_TOKEN secret is set.")
16
+ sys.exit(1) # Exit with an error code
17
+
18
+ # These are standard variables provided by GitHub Actions
19
+ repo_owner = os.environ.get("GITHUB_REPOSITORY_OWNER")
20
+ repo_name_base = os.environ.get("GITHUB_REPOSITORY_NAME")
21
+
22
+ if not repo_owner or not repo_name_base:
23
+ print("Fatal: Could not determine repository owner or name from GitHub environment variables.")
24
+ sys.exit(1)
25
+
26
+ # --- THIS IS THE FIX ---
27
+ # Clean up the repo name by removing any trailing hyphens before we add our suffix.
28
+ repo_name_base = repo_name_base.rstrip("-")
29
+
30
+ # We will create a Hugging Face repo with a name like "your-username/your-repo-name-model"
31
+ repo_id = f"{repo_owner}/{repo_name_base}-model"
32
+
33
+ print(f"Target Hugging Face repository: {repo_id}")
34
+
35
+ # Check if the model file exists before trying to upload
36
+ if not os.path.exists("model.joblib"):
37
+ print(f"Fatal: Model file 'model.joblib' not found. Please ensure train.py runs successfully first.")
38
+ sys.exit(1)
39
+
40
+ try:
41
+ # Initialize the Hugging Face API client
42
+ api = HfApi()
43
+
44
+ # Create the repository on the Hub. `exist_ok=True` means it won't fail if the repo is already there.
45
+ print(f"Ensuring repository '{repo_id}' exists on the Hub...")
46
+ api.create_repo(
47
+ repo_id=repo_id,
48
+ token=hf_token,
49
+ exist_ok=True,
50
+ )
51
+
52
+ # Upload the model file
53
+ print("Uploading 'model.joblib'...")
54
+ api.upload_file(
55
+ path_or_fileobj="model.joblib",
56
+ path_in_repo="model.joblib", # The name of the file on the Hub
57
+ repo_id=repo_id,
58
+ token=hf_token,
59
+ commit_message="Pushed baseline model from GitHub Actions"
60
+ )
61
+ print(f"✅ Model successfully pushed to https://huggingface.co/{repo_id}")
62
+
63
+ except Exception as e:
64
+ print(f"An error occurred during upload: {e}")
65
+ sys.exit(1)
66
+
67
+ if __name__ == "__main__":
68
+ push_model_to_hub()
requirements.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ gradio
2
+ scikit-learn
3
+ pandas
4
+ numpy
train.py ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #Import the libraries
2
+
3
+ import pandas as pd
4
+ from sklearn.linear_model import LinearRegression
5
+ from sklearn.model_selection import train_test_split
6
+ from sklearn.metrics import mean_squared_error
7
+ import joblib
8
+ from huggingface_hub import HfApi, HfFolder, create_repo
9
+ from sklearn.metrics import r2_score
10
+
11
+
12
+ #Create a train model function
13
+
14
+ def train_model():
15
+
16
+
17
+ #Load the data
18
+
19
+ df = pd.read_csv("data/processed_data.csv")
20
+
21
+ #Split the data into X and Y (dependent variable and parameters)
22
+
23
+ features = ['time_in_cycles','setting_1', 'setting_2'] + [f's_{i}' for i in range (1,22) if f's_{i}' in df.columns]
24
+
25
+ target = 'RUL'
26
+
27
+ X = df[features]
28
+ y = df[target]
29
+
30
+ #Perform the train test split
31
+
32
+ X_train, X_test, y_train, y_test = train_test_split(X,y, test_size = 0.2, random_state=42)
33
+
34
+ #Load the model - in this case from Scikit-Learn
35
+
36
+ model = LinearRegression()
37
+
38
+
39
+ #Train the model on the training data
40
+
41
+ model.fit(X_train, y_train)
42
+
43
+ #evaluate the model
44
+
45
+ #Make your first prediction
46
+
47
+ y_pred = model.predict(X_test)
48
+
49
+
50
+ #Calculate the accuracy of the model
51
+
52
+ r2 = r2_score(y_test, y_pred)
53
+
54
+ print(f'R_squared score is {r2}')
55
+
56
+ #Save the model
57
+
58
+ joblib.dump(model, 'model.joblib')
59
+
60
+ #Ensure the function is called with the script
61
+
62
+ if __name__ == '__main__':
63
+ train_model()