Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| import tensorflow as tf | |
| import numpy as np | |
| from PIL import Image | |
| # Load the TFLite model | |
| def load_tflite_model(model_path): | |
| interpreter = tf.lite.Interpreter(model_path=model_path) | |
| interpreter.allocate_tensors() | |
| return interpreter | |
| # Preprocess image for TFLite model (assuming the model expects 224x224 RGB images) | |
| def preprocess_image(image): | |
| image = image.resize((224, 224)) # Resize to model input size | |
| image = np.array(image).astype(np.int8) # Convert to INT8 | |
| image = np.expand_dims(image, axis=0) # Add batch dimension | |
| return image | |
| # Run inference with the TFLite model | |
| def run_inference(interpreter, image): | |
| input_details = interpreter.get_input_details() | |
| output_details = interpreter.get_output_details() | |
| # Set input tensor | |
| interpreter.set_tensor(input_details[0]['index'], image) | |
| # Run inference | |
| interpreter.invoke() | |
| # Get the output tensor | |
| output_data = interpreter.get_tensor(output_details[0]['index']) | |
| return output_data | |
| # Gradio interface function | |
| def predict(image): | |
| interpreter = load_tflite_model("MNv2Flood_cat_Mar2025.tflite") # Replace with your TFLite model path | |
| preprocessed_image = preprocess_image(image) | |
| prediction = run_inference(interpreter, preprocessed_image) | |
| # Get the predicted class by comparing the output values | |
| if prediction[0][0] > prediction[0][1]: | |
| return "Flood" | |
| else: | |
| return "No Flood" | |
| # Create Gradio interface | |
| interface = gr.Interface( | |
| fn=predict, # Function that runs the model | |
| inputs=gr.Image(type="pil"), # Input is an image, processed as PIL image | |
| outputs="text", # Output is text indicating Flood or NoFlood | |
| ) | |
| # Launch the app | |
| interface.launch() | |