Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| import base64 | |
| from io import BytesIO | |
| from PIL import Image | |
| import tensorflow as tf | |
| import numpy as np | |
| # Carregar o modelo TensorFlow Lite | |
| interpreter = tf.lite.Interpreter(model_path="model_unquant.tflite") | |
| interpreter.allocate_tensors() | |
| input_details = interpreter.get_input_details() | |
| output_details = interpreter.get_output_details() | |
| classes = ['Bastonete', 'Basófilo'] | |
| def predict_base64_image(base64_image): | |
| try: | |
| # Decodificar Base64 para imagem | |
| image_data = base64.b64decode(base64_image) | |
| image = Image.open(BytesIO(image_data)).convert("RGB") | |
| image_array = np.array(image).astype(np.float32) / 255.0 | |
| image_array = np.expand_dims(image_array, axis=0) | |
| # Realizar a inferência | |
| interpreter.set_tensor(input_details[0]['index'], image_array) | |
| interpreter.invoke() | |
| output_data = interpreter.get_tensor(output_details[0]['index']) | |
| predicted_class_index = np.argmax(output_data) | |
| predicted_class_name = classes[predicted_class_index] | |
| predicted_confidence = output_data[0][predicted_class_index] * 100 | |
| # Retornar o resultado | |
| return {"class": predicted_class_name, "confidence": f"{predicted_confidence:.2f}%"} | |
| except Exception as e: | |
| return {"error": str(e)} | |
| # Configuração do Gradio | |
| interface = gr.Interface( | |
| fn=predict_base64_image, | |
| inputs="text", # Base64 será enviado como texto | |
| outputs="json", # Retorna um JSON com a classe e a confiança | |
| api_name="/predict" | |
| ) | |
| # Iniciar o servidor | |
| interface.launch() | |