grkw's picture
Add ipynb and more info on the app
dc82a3f
import gradio as gr
from fastai.vision.all import *
import json
# Load the category-to-name mapping
with open('cat_to_name.json', 'r') as f:
cat_to_name = json.load(f)
learn = load_learner('flower_classifier.pkl')
labels = learn.dls.vocab
def predict(img):
img = PILImage.create(img)
_,_,probs = learn.predict(img)
predictions = {labels[i]: float(probs[i]) for i in range(len(labels))}
predictions_with_names = {
cat_to_name[str(label)]: prob for label, prob in predictions.items()
}
return predictions_with_names
title = "<h1>Flower Classifier</h1>"
description = "<p>An introductory project using fastai for transfer learning using an image classification model, Gradio to demo it on a web app, and HuggingFace Spaces for deployment. I used the ResNet34 architecture on the Oxford Flowers 102 dataset, with a random 80%/20% train/test split, input resizing to 224x224x3, batch data augmentation, a learning rate found by `lr_find()`, only 2 training epochs, and the rest of the hyperparameters as fastai defaults. As someone who's learned neural networks from the bottom up with a strong theoretical foundation, it was fun to see how \"easy\" ML can be for simpler tasks, as the model achieves 91% test accuracy (while a random guess would yield 1% accuracy)!</p><p>Feel free to browse the example images below (10 are from the test set, and 2 are my own out-of-distribution images) or upload your own image of a flower. The model may have overfit to the training distribution, as it doesn't generalize well to images with cluttered backgrounds (see my dahlia photo and my tulip photo) and has 100% certainty of correct guesses for some examples in the test set.</p><p>The Oxford Flowers 102 dataset, created by the University of Oxford’s Visual Geometry Group, consists of 8,189 images spanning 102 flower species, designed to challenge fine-grained image classification models. With varying lighting, backgrounds, and an uneven class distribution, it serves as a benchmark for testing model robustness and optimizing classification accuracy, making it popular for transfer learning experiments with models like VGG16, ResNet, and EfficientNet."
labels_table = """<p>Classes included in training:<p>
<table>
<tr>
<td>alpine sea holly</td>
<td>anthurium</td>
<td>artichoke</td>
<td>azalea</td>
<td>ball moss</td>
<td>balloon flower</td>
</tr>
<tr>
<td>barbeton daisy</td>
<td>bearded iris</td>
<td>bee balm</td>
<td>bird of paradise</td>
<td>bishop of llandaff</td>
<td>black-eyed susan</td>
</tr>
<tr>
<td>blackberry lily</td>
<td>blanket flower</td>
<td>bolero deep blue</td>
<td>bougainvillea</td>
<td>bromelia</td>
<td>buttercup</td>
</tr>
<tr>
<td>californian poppy</td>
<td>camellia</td>
<td>canna lily</td>
<td>canterbury bells</td>
<td>cape flower</td>
<td>carnation</td>
</tr>
<tr>
<td>cautleya spicata</td>
<td>clematis</td>
<td>columbine</td>
<td>colt's foot</td>
<td>common dandelion</td>
<td>corn poppy</td>
</tr>
<tr>
<td>cyclamen</td>
<td>daffodil</td>
<td>desert-rose</td>
<td>english marigold</td>
<td>fire lily</td>
<td>foxglove</td>
</tr>
<tr>
<td>frangipani</td>
<td>fritillary</td>
<td>garden phlox</td>
<td>gaura</td>
<td>gazania</td>
<td>geranium</td>
</tr>
<tr>
<td>giant white arum lily</td>
<td>globe thistle</td>
<td>globe-flower</td>
<td>grape hyacinth</td>
<td>great masterwort</td>
<td>hard-leaved pocket orchid</td>
</tr>
<tr>
<td>hibiscus</td>
<td>hippeastrum</td>
<td>japanese anemone</td>
<td>king protea</td>
<td>lenten rose</td>
<td>lotus</td>
</tr>
<tr>
<td>love in the mist</td>
<td>magnolia</td>
<td>mallow</td>
<td>marigold</td>
<td>mexican aster</td>
<td>mexican petunia</td>
</tr>
<tr>
<td>monkshood</td>
<td>moon orchid</td>
<td>morning glory</td>
<td>orange dahlia</td>
<td>osteospermum</td>
<td>oxeye daisy</td>
</tr>
<tr>
<td>passion flower</td>
<td>pelargonium</td>
<td>peruvian lily</td>
<td>petunia</td>
<td>pincushion flower</td>
<td>pink primrose</td>
</tr>
<tr>
<td>pink-yellow dahlia</td>
<td>poinsettia</td>
<td>primula</td>
<td>prince of wales feathers</td>
<td>purple coneflower</td>
<td>red ginger</td>
</tr>
<tr>
<td>rose</td>
<td>ruby-lipped cattleya</td>
<td>siam tulip</td>
<td>silverbush</td>
<td>snapdragon</td>
<td>spear thistle</td>
</tr>
<tr>
<td>spring crocus</td>
<td>stemless gentian</td>
<td>sunflower</td>
<td>sweet pea</td>
<td>sweet william</td>
<td>sword lily</td>
</tr>
<tr>
<td>thorn apple</td>
<td>tiger lily</td>
<td>toad lily</td>
<td>tree mallow</td>
<td>tree poppy</td>
<td>trumpet creeper</td>
</tr>
<tr>
<td>wallflower</td>
<td>water lily</td>
<td>watercress</td>
<td>wild pansy</td>
<td>windflower</td>
<td>yellow iris</td>
</tr>
</table>
"""
# Make examples a list of all image filenames in the examples folder
examples = ["examples/" + filename for filename in os.listdir("examples")]
with gr.Blocks() as demo:
gr.HTML(title)
gr.HTML(description)
gr.Interface(fn=predict,
inputs=gr.Image(type="pil"),
outputs=gr.Label(num_top_classes=3),
examples=examples)
gr.HTML(labels_table)
if __name__ == "__main__":
demo.launch()