Dogs-Breed-Identification / dogs_breed_identification_deploy.py
ChenShterental's picture
Upload folder using huggingface_hub
0fdb979 verified
# -*- coding: utf-8 -*-
"""dogs-breed-identification-deploy.ipynb
Automatically generated by Colab.
Original file is located at
https://colab.research.google.com/drive/1b32D4koxhMHOY2yZO0RwegCtGwG_A47T
"""
import gradio as gr
import numpy as np
import pickle
from PIL import Image
import torch
import torchvision.transforms as transforms
from torchvision import models
from torchvision.models import inception_v3
with open("label_encoder.pkl", "rb") as f:
label_encoder = pickle.load(f)
# Reverse integer encoding to original labels
def reverse_encode(label_encoder, int_enc_label):
return label_encoder.inverse_transform([int_enc_label])[0]
"""# **Deploy**"""
# Load model
inception = models.inception_v3() # define architecture
num_ftrs = inception.fc.in_features
inception.fc = torch.nn.Linear(num_ftrs, 120)
inception.load_state_dict(torch.load("model.pt", map_location=torch.device("cpu")))
inception.eval()
normalize = transforms.Normalize(
mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225]
)
# Define preprocessing
test_trans = transforms.Compose([
transforms.Resize(342),
transforms.CenterCrop(299),
transforms.ToTensor(),
normalize
])
# Inference function (Top-k)
def predict(image, k=5):
image = test_trans(image).unsqueeze(0) # add batch dimension
with torch.no_grad():
output = inception(image)
topk_vals, topk_indices = torch.topk(output, k, dim=1)
# Decode predictions
predictions = []
for i in range(k):
idx = topk_indices[0, i].item()
score = torch.softmax(output, dim=1)[0, idx].item()
str_pred = reverse_encode(label_encoder, idx)
clean_pred = str_pred.split('-', 1)[1] if '-' in str_pred else str_pred
predictions.append(f"{clean_pred} ({score:.2%})")
return "Top-k Predicted Dog Breeds:\n" + "\n".join(predictions)
# Create Gradio interface
demo = gr.Interface(fn=predict, inputs=gr.Image(type="pil"), outputs="text")
demo.launch(debug=True)