Spaces:
Sleeping
Sleeping
| from fastapi import FastAPI, File, UploadFile | |
| from fastapi.responses import JSONResponse | |
| import numpy as np | |
| import tensorflow as tf | |
| from PIL import Image | |
| import io | |
| import uvicorn | |
| # Cargar etiquetas y modelo | |
| with open("labels_mobilenet_quant_v1_224.txt", "r") as f: | |
| labels = f.read().splitlines() | |
| interpreter = tf.lite.Interpreter(model_path="mobilenet_v1_1.0_224_quant.tflite") | |
| interpreter.allocate_tensors() | |
| input_details = interpreter.get_input_details() | |
| output_details = interpreter.get_output_details() | |
| # Clasificaci贸n | |
| def classify_image(image: Image.Image): | |
| image = image.convert("RGB").resize((224, 224)) | |
| input_data = np.expand_dims(np.array(image, dtype=np.uint8), axis=0) | |
| interpreter.set_tensor(input_details[0]["index"], input_data) | |
| interpreter.invoke() | |
| output_data = interpreter.get_tensor(output_details[0]["index"]) | |
| # Ajuste de cuantizaci贸n | |
| output_scale, output_zero_point = output_details[0]["quantization"] | |
| output = output_scale * (output_data.astype(np.float32) - output_zero_point) | |
| pred_idx = np.argmax(output[0]) | |
| pred_label = labels[pred_idx] if pred_idx < len(labels) else "Etiqueta no encontrada" | |
| # Softmax | |
| exp_scores = np.exp(output[0] - np.max(output[0])) | |
| probabilities = exp_scores / np.sum(exp_scores) | |
| confidence = probabilities[pred_idx] | |
| return pred_label, float(confidence) | |
| # Crear API | |
| app = FastAPI() | |
| async def predict(file: UploadFile = File(...)): | |
| contents = await file.read() | |
| image = Image.open(io.BytesIO(contents)) | |
| label, conf = classify_image(image) | |
| return JSONResponse({"label": label, "confidence": f"{conf*100:.2f}%"}) | |
| # Para correr localmente | |
| # if __name__ == "__main__": | |
| # uvicorn.run(app, host="0.0.0.0", port=7860) | |