Quantum9999 commited on
Commit
3a4beb6
Β·
verified Β·
1 Parent(s): 2e6aa70

Upload folder using huggingface_hub

Browse files
Files changed (2) hide show
  1. Dockerfile +26 -10
  2. app.py +99 -23
Dockerfile CHANGED
@@ -1,20 +1,36 @@
 
1
  FROM python:3.10-slim
2
 
 
3
  WORKDIR /app
4
 
5
- # Copy requirements and install dependencies
6
  COPY requirements.txt .
7
  RUN pip install --no-cache-dir -r requirements.txt
8
 
9
- # Copy application file
10
- COPY app.py .
11
 
12
- # Expose Streamlit default port
13
- EXPOSE 7860
 
 
 
 
 
 
 
14
 
15
- # Set environment variables for Streamlit
16
- ENV STREAMLIT_SERVER_PORT=7860
17
- ENV STREAMLIT_SERVER_ADDRESS=0.0.0.0
 
 
18
 
19
- # Run the application
20
- CMD ["streamlit", "run", "app.py", "--server.port=7860", "--server.address=0.0.0.0"]
 
 
 
 
 
 
1
+ # Use Python 3.10 slim for smaller image size
2
  FROM python:3.10-slim
3
 
4
+ # Set working directory
5
  WORKDIR /app
6
 
7
+ # Copy requirements and install dependencies first (better caching)
8
  COPY requirements.txt .
9
  RUN pip install --no-cache-dir -r requirements.txt
10
 
11
+ # Create non-root user for security (HF Spaces best practice)
12
+ RUN useradd -m -u 1000 user
13
 
14
+ # Switch to non-root user
15
+ USER user
16
+
17
+ # Set user environment variables
18
+ ENV HOME=/home/user \
19
+ PATH=/home/user/.local/bin:$PATH
20
+
21
+ # Set working directory to user's app directory
22
+ WORKDIR $HOME/app
23
 
24
+ # Copy application files with correct ownership
25
+ COPY --chown=user . $HOME/app
26
+
27
+ # Expose Hugging Face Spaces standard port
28
+ EXPOSE 7860
29
 
30
+ # Run Streamlit on port 7860 with production settings
31
+ CMD ["streamlit", "run", "app.py", \
32
+ "--server.port=7860", \
33
+ "--server.address=0.0.0.0", \
34
+ "--server.headless=true", \
35
+ "--server.fileWatcherType=none", \
36
+ "--browser.gatherUsageStats=false"]
app.py CHANGED
@@ -1,15 +1,19 @@
1
  """
2
  Streamlit Application for Engine Predictive Maintenance
3
- Production-ready deployment with proper error handling
4
  """
5
 
6
  import streamlit as st
7
  import pandas as pd
8
- from huggingface_hub import hf_hub_download, login
9
- import joblib
10
  import os
 
11
 
12
- # Page Configuration
 
 
 
 
 
13
  st.set_page_config(
14
  page_title="Engine Predictive Maintenance",
15
  page_icon="πŸ”§",
@@ -17,6 +21,18 @@ st.set_page_config(
17
  initial_sidebar_state="expanded"
18
  )
19
 
 
 
 
 
 
 
 
 
 
 
 
 
20
  # Custom CSS
21
  st.markdown("""
22
  <style>
@@ -63,29 +79,58 @@ st.markdown("""
63
 
64
  @st.cache_resource
65
  def load_model():
66
- """Load model from Hugging Face with authentication"""
 
 
 
 
 
67
  try:
68
- # Authenticate
69
  hf_token = os.environ.get("HF_TOKEN")
 
 
70
  if hf_token:
 
71
  login(token=hf_token)
 
 
 
72
 
73
  # Download model
 
 
 
 
74
  model_path = hf_hub_download(
75
  repo_id="Quantum9999/xgb-predictive-maintenance",
76
  filename="xgb_tuned_model.joblib",
77
  token=hf_token
78
  )
 
79
 
80
  # Load model
 
81
  model = joblib.load(model_path)
 
 
 
82
  return model, None
83
 
84
  except Exception as e:
85
- return None, str(e)
 
 
 
 
 
86
 
87
 
88
  def main():
 
 
 
 
89
  # Header
90
  st.markdown(
91
  '<div class="main-header">πŸ”§ Engine Predictive Maintenance System</div>',
@@ -100,9 +145,24 @@ def main():
100
  model, error = load_model()
101
 
102
  if model is None:
103
- st.error(f"❌ Failed to load prediction model: {error}")
104
- st.info("Please check Hugging Face configuration and ensure HF_TOKEN is set correctly.")
105
- return
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
106
 
107
  # Sidebar
108
  with st.sidebar:
@@ -120,14 +180,13 @@ def main():
120
  - 0: Normal Operation
121
  - 1: Maintenance Required
122
  - **Training Data**: 19,535 records
123
- - **Test Accuracy**: ~92%
124
  """)
125
 
126
  st.header("🎯 How to Use")
127
  st.markdown("""
128
- 1. Enter current sensor readings in the input fields
129
- 2. Click **'Predict Engine Condition'**
130
- 3. Review prediction and confidence scores
131
  4. Take action based on results
132
  """)
133
 
@@ -146,7 +205,7 @@ def main():
146
  st.header("πŸ“ Enter Engine Sensor Readings")
147
  st.markdown("---")
148
 
149
- # Create two columns for input
150
  col1, col2 = st.columns(2)
151
 
152
  with col1:
@@ -215,18 +274,22 @@ def main():
215
  if st.button("πŸ” Predict Engine Condition", use_container_width=True, type="primary"):
216
  # Prepare input data
217
  input_df = pd.DataFrame([{
218
- "Engine RPM": engine_rpm,
219
- "Lub Oil Pressure": lub_oil_pressure,
220
- "Fuel Pressure": fuel_pressure,
221
- "Coolant Pressure": coolant_pressure,
222
- "Lub Oil Temperature": lub_oil_temp,
223
- "Coolant Temperature": coolant_temp
224
  }])
225
 
226
  try:
 
 
227
  # Make prediction
228
  prediction = model.predict(input_df)[0]
229
  proba = model.predict_proba(input_df)[0]
 
 
230
 
231
  # Display results
232
  st.markdown("---")
@@ -297,7 +360,12 @@ def main():
297
  """)
298
 
299
  except Exception as e:
300
- st.error(f"❌ Prediction error: {e}")
 
 
 
 
 
301
  st.info("Please verify all sensor values are within valid ranges and try again.")
302
 
303
  # Footer
@@ -312,4 +380,12 @@ def main():
312
 
313
 
314
  if __name__ == "__main__":
315
- main()
 
 
 
 
 
 
 
 
 
1
  """
2
  Streamlit Application for Engine Predictive Maintenance
3
+ With detailed logging for debugging
4
  """
5
 
6
  import streamlit as st
7
  import pandas as pd
 
 
8
  import os
9
+ import sys
10
 
11
+ # Print to console (will show in HF Space logs)
12
+ print("=" * 70, file=sys.stderr)
13
+ print("APP STARTING - INITIALIZATION", file=sys.stderr)
14
+ print("=" * 70, file=sys.stderr)
15
+
16
+ # Page Configuration MUST be first
17
  st.set_page_config(
18
  page_title="Engine Predictive Maintenance",
19
  page_icon="πŸ”§",
 
21
  initial_sidebar_state="expanded"
22
  )
23
 
24
+ # Import after page config
25
+ try:
26
+ print("Importing huggingface_hub...", file=sys.stderr)
27
+ from huggingface_hub import hf_hub_download, login
28
+ print("Importing joblib...", file=sys.stderr)
29
+ import joblib
30
+ print("βœ“ All imports successful", file=sys.stderr)
31
+ except Exception as e:
32
+ print(f"βœ— Import error: {e}", file=sys.stderr)
33
+ st.error(f"Import failed: {e}")
34
+ st.stop()
35
+
36
  # Custom CSS
37
  st.markdown("""
38
  <style>
 
79
 
80
  @st.cache_resource
81
  def load_model():
82
+ """Load model from Hugging Face with detailed logging"""
83
+
84
+ print("\n" + "=" * 70, file=sys.stderr)
85
+ print("LOADING MODEL FROM HUGGING FACE", file=sys.stderr)
86
+ print("=" * 70, file=sys.stderr)
87
+
88
  try:
89
+ # Check for token
90
  hf_token = os.environ.get("HF_TOKEN")
91
+ print(f"HF_TOKEN found: {hf_token is not None}", file=sys.stderr)
92
+
93
  if hf_token:
94
+ print("Authenticating with Hugging Face...", file=sys.stderr)
95
  login(token=hf_token)
96
+ print("βœ“ Authentication successful", file=sys.stderr)
97
+ else:
98
+ print("⚠ No HF_TOKEN - attempting public access", file=sys.stderr)
99
 
100
  # Download model
101
+ print("\nDownloading model...", file=sys.stderr)
102
+ print(" Repo: Quantum9999/xgb-predictive-maintenance", file=sys.stderr)
103
+ print(" File: xgb_tuned_model.joblib", file=sys.stderr)
104
+
105
  model_path = hf_hub_download(
106
  repo_id="Quantum9999/xgb-predictive-maintenance",
107
  filename="xgb_tuned_model.joblib",
108
  token=hf_token
109
  )
110
+ print(f"βœ“ Model downloaded: {model_path}", file=sys.stderr)
111
 
112
  # Load model
113
+ print("Loading model into memory...", file=sys.stderr)
114
  model = joblib.load(model_path)
115
+ print("βœ“ Model loaded successfully", file=sys.stderr)
116
+ print("=" * 70 + "\n", file=sys.stderr)
117
+
118
  return model, None
119
 
120
  except Exception as e:
121
+ error_msg = f"Model loading failed: {str(e)}"
122
+ print(f"βœ— {error_msg}", file=sys.stderr)
123
+ import traceback
124
+ print(f"Traceback:\n{traceback.format_exc()}", file=sys.stderr)
125
+ print("=" * 70 + "\n", file=sys.stderr)
126
+ return None, error_msg
127
 
128
 
129
  def main():
130
+ """Main application"""
131
+
132
+ print("Starting main application...", file=sys.stderr)
133
+
134
  # Header
135
  st.markdown(
136
  '<div class="main-header">πŸ”§ Engine Predictive Maintenance System</div>',
 
145
  model, error = load_model()
146
 
147
  if model is None:
148
+ st.error(f"❌ Failed to load prediction model")
149
+ st.code(error)
150
+
151
+ with st.expander("πŸ” Troubleshooting"):
152
+ st.write("**Possible Issues:**")
153
+ st.write("1. HF_TOKEN not set in Space secrets")
154
+ st.write("2. Model repository is private")
155
+ st.write("3. Model filename is incorrect")
156
+ st.write("4. Network connectivity issue")
157
+
158
+ st.write("\n**Current Configuration:**")
159
+ st.write(f"- HF_TOKEN set: {os.environ.get('HF_TOKEN') is not None}")
160
+ st.write("- Expected repo: Quantum9999/xgb-predictive-maintenance")
161
+ st.write("- Expected file: xgb_tuned_model.joblib")
162
+
163
+ st.stop()
164
+
165
+ st.success("βœ“ Model loaded successfully!")
166
 
167
  # Sidebar
168
  with st.sidebar:
 
180
  - 0: Normal Operation
181
  - 1: Maintenance Required
182
  - **Training Data**: 19,535 records
 
183
  """)
184
 
185
  st.header("🎯 How to Use")
186
  st.markdown("""
187
+ 1. Enter current sensor readings
188
+ 2. Click 'Predict Engine Condition'
189
+ 3. Review prediction and confidence
190
  4. Take action based on results
191
  """)
192
 
 
205
  st.header("πŸ“ Enter Engine Sensor Readings")
206
  st.markdown("---")
207
 
208
+ # Input columns
209
  col1, col2 = st.columns(2)
210
 
211
  with col1:
 
274
  if st.button("πŸ” Predict Engine Condition", use_container_width=True, type="primary"):
275
  # Prepare input data
276
  input_df = pd.DataFrame([{
277
+ "Engine rpm": engine_rpm,
278
+ "Lub oil pressure": lub_oil_pressure,
279
+ "Fuel pressure": fuel_pressure,
280
+ "Coolant pressure": coolant_pressure,
281
+ "lub oil temp": lub_oil_temp,
282
+ "Coolant temp": coolant_temp
283
  }])
284
 
285
  try:
286
+ print(f"Making prediction with input: {input_df.to_dict()}", file=sys.stderr)
287
+
288
  # Make prediction
289
  prediction = model.predict(input_df)[0]
290
  proba = model.predict_proba(input_df)[0]
291
+
292
+ print(f"Prediction: {prediction}, Probabilities: {proba}", file=sys.stderr)
293
 
294
  # Display results
295
  st.markdown("---")
 
360
  """)
361
 
362
  except Exception as e:
363
+ error_msg = f"Prediction error: {e}"
364
+ print(f"βœ— {error_msg}", file=sys.stderr)
365
+ import traceback
366
+ print(f"Traceback:\n{traceback.format_exc()}", file=sys.stderr)
367
+
368
+ st.error(f"❌ {error_msg}")
369
  st.info("Please verify all sensor values are within valid ranges and try again.")
370
 
371
  # Footer
 
380
 
381
 
382
  if __name__ == "__main__":
383
+ print("Entering main()...", file=sys.stderr)
384
+ try:
385
+ main()
386
+ print("βœ“ Main completed successfully", file=sys.stderr)
387
+ except Exception as e:
388
+ print(f"βœ— FATAL ERROR: {e}", file=sys.stderr)
389
+ import traceback
390
+ print(f"Traceback:\n{traceback.format_exc()}", file=sys.stderr)
391
+ st.error(f"Application error: {e}")