Spaces:
Sleeping
Sleeping
File size: 2,911 Bytes
3815023 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 |
from flask import Flask, render_template, request, jsonify
import numpy as np
import tensorflow as tf
import joblib
app = Flask(__name__)
# --- MODEL LOADING ---
# Load the model and scaler you want to use.
# By default, this loads the FIRST model you trained.
try:
model = tf.keras.models.load_model('water_quality_model.h5')
scaler = joblib.load('scaler.joblib')
print("✅ Model 'water_quality_model.h5' and 'scaler.joblib' loaded successfully.")
except Exception as e:
print(f"❌ Error loading 'water_quality_model.h5': {e}")
print("---")
# --- IF YOU WANT TO USE THE ALTERNATIVE MODEL, UNCOMMENT THE 3 LINES BELOW ---
# try:
# model = tf.keras.models.load_model('water_quality_model_ALT.h5')
# scaler = joblib.load('scaler_ALT.joblib')
# print("✅ Alternative Model 'water_quality_model_ALT.h5' loaded.")
# except Exception as e2:
# print(f"❌ Error loading all models: {e2}")
# model = None
# scaler = None
model = None
scaler = None
# ---------------------
@app.route('/')
def home():
"""Renders the main HTML page."""
return render_template('index.html')
@app.route('/predict', methods=['POST'])
def predict():
"""Receives data from the form and returns a prediction."""
if not model or not scaler:
return jsonify({'error': 'Model is not loaded properly. Check server logs.'}), 500
try:
# Get data from the JSON request sent by the JavaScript
data = request.get_json()
# 1. Create a numpy array in the correct order (must match training order)
features = [
float(data['ph']),
float(data['Hardness']),
float(data['Solids']),
float(data['Chloramines']),
float(data['Sulfate']),
float(data['Conductivity']),
float(data['Organic_carbon']),
float(data['Trihalomethanes']),
float(data['Turbidity'])
]
# 2. Put data into a 2D numpy array (model expects a batch)
input_data = np.array([features])
# 3. Scale the data using the *loaded* scaler
input_scaled = scaler.transform(input_data)
# 4. Make the prediction (will be a probability between 0 and 1)
prediction_prob = model.predict(input_scaled)[0][0]
# 5. Classify the prediction
prediction = int(prediction_prob > 0.5) # 1 if > 0.5, else 0
# 6. Return the result as JSON
return jsonify({
'potability': prediction, # 0 or 1
'probability': float(prediction_prob) # The raw probability
})
except Exception as e:
return jsonify({'error': str(e)}), 400
if __name__ == '__main__':
app.run(host="0.0.0.0", port=7860) |