Spaces:
Sleeping
Sleeping
File size: 1,765 Bytes
80b5a8f d87cf44 85d9d7a 80b5a8f a41e2b9 80b5a8f 5bd98aa 80b5a8f 5bd98aa 80b5a8f | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 | import gradio as gr
import tensorflow as tf
import numpy as np
from PIL import Image
# Load the TFLite model
def load_tflite_model(model_path):
interpreter = tf.lite.Interpreter(model_path=model_path)
interpreter.allocate_tensors()
return interpreter
# Preprocess image for TFLite model (assuming the model expects 224x224 RGB images)
def preprocess_image(image):
image = image.resize((224, 224)) # Resize to model input size
image = np.array(image).astype(np.int8) # Convert to INT8
image = np.expand_dims(image, axis=0) # Add batch dimension
return image
# Run inference with the TFLite model
def run_inference(interpreter, image):
input_details = interpreter.get_input_details()
output_details = interpreter.get_output_details()
# Set input tensor
interpreter.set_tensor(input_details[0]['index'], image)
# Run inference
interpreter.invoke()
# Get the output tensor
output_data = interpreter.get_tensor(output_details[0]['index'])
return output_data
# Gradio interface function
def predict(image):
interpreter = load_tflite_model("MNv2Flood_cat_Mar2025.tflite") # Replace with your TFLite model path
preprocessed_image = preprocess_image(image)
prediction = run_inference(interpreter, preprocessed_image)
# Get the predicted class by comparing the output values
if prediction[0][0] > prediction[0][1]:
return "Flood"
else:
return "No Flood"
# Create Gradio interface
interface = gr.Interface(
fn=predict, # Function that runs the model
inputs=gr.Image(type="pil"), # Input is an image, processed as PIL image
outputs="text", # Output is text indicating Flood or NoFlood
)
# Launch the app
interface.launch()
|