Spaces:
Sleeping
Sleeping
File size: 5,871 Bytes
5e6cb45 e5ee328 5e6cb45 e5ee328 5e6cb45 e5ee328 5e6cb45 e5ee328 5e6cb45 e5ee328 5e6cb45 e5ee328 b96a216 e5ee328 dc82a3f e5ee328 c4528a6 e5ee328 c4528a6 e5ee328 5e6cb45 e5ee328 c4528a6 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 |
import gradio as gr
from fastai.vision.all import *
import json
# Load the category-to-name mapping
with open('cat_to_name.json', 'r') as f:
cat_to_name = json.load(f)
learn = load_learner('flower_classifier.pkl')
labels = learn.dls.vocab
def predict(img):
img = PILImage.create(img)
_,_,probs = learn.predict(img)
predictions = {labels[i]: float(probs[i]) for i in range(len(labels))}
predictions_with_names = {
cat_to_name[str(label)]: prob for label, prob in predictions.items()
}
return predictions_with_names
title = "<h1>Flower Classifier</h1>"
description = "<p>An introductory project using fastai for transfer learning using an image classification model, Gradio to demo it on a web app, and HuggingFace Spaces for deployment. I used the ResNet34 architecture on the Oxford Flowers 102 dataset, with a random 80%/20% train/test split, input resizing to 224x224x3, batch data augmentation, a learning rate found by `lr_find()`, only 2 training epochs, and the rest of the hyperparameters as fastai defaults. As someone who's learned neural networks from the bottom up with a strong theoretical foundation, it was fun to see how \"easy\" ML can be for simpler tasks, as the model achieves 91% test accuracy (while a random guess would yield 1% accuracy)!</p><p>Feel free to browse the example images below (10 are from the test set, and 2 are my own out-of-distribution images) or upload your own image of a flower. The model may have overfit to the training distribution, as it doesn't generalize well to images with cluttered backgrounds (see my dahlia photo and my tulip photo) and has 100% certainty of correct guesses for some examples in the test set.</p><p>The Oxford Flowers 102 dataset, created by the University of Oxford’s Visual Geometry Group, consists of 8,189 images spanning 102 flower species, designed to challenge fine-grained image classification models. With varying lighting, backgrounds, and an uneven class distribution, it serves as a benchmark for testing model robustness and optimizing classification accuracy, making it popular for transfer learning experiments with models like VGG16, ResNet, and EfficientNet."
labels_table = """<p>Classes included in training:<p>
<table>
<tr>
<td>alpine sea holly</td>
<td>anthurium</td>
<td>artichoke</td>
<td>azalea</td>
<td>ball moss</td>
<td>balloon flower</td>
</tr>
<tr>
<td>barbeton daisy</td>
<td>bearded iris</td>
<td>bee balm</td>
<td>bird of paradise</td>
<td>bishop of llandaff</td>
<td>black-eyed susan</td>
</tr>
<tr>
<td>blackberry lily</td>
<td>blanket flower</td>
<td>bolero deep blue</td>
<td>bougainvillea</td>
<td>bromelia</td>
<td>buttercup</td>
</tr>
<tr>
<td>californian poppy</td>
<td>camellia</td>
<td>canna lily</td>
<td>canterbury bells</td>
<td>cape flower</td>
<td>carnation</td>
</tr>
<tr>
<td>cautleya spicata</td>
<td>clematis</td>
<td>columbine</td>
<td>colt's foot</td>
<td>common dandelion</td>
<td>corn poppy</td>
</tr>
<tr>
<td>cyclamen</td>
<td>daffodil</td>
<td>desert-rose</td>
<td>english marigold</td>
<td>fire lily</td>
<td>foxglove</td>
</tr>
<tr>
<td>frangipani</td>
<td>fritillary</td>
<td>garden phlox</td>
<td>gaura</td>
<td>gazania</td>
<td>geranium</td>
</tr>
<tr>
<td>giant white arum lily</td>
<td>globe thistle</td>
<td>globe-flower</td>
<td>grape hyacinth</td>
<td>great masterwort</td>
<td>hard-leaved pocket orchid</td>
</tr>
<tr>
<td>hibiscus</td>
<td>hippeastrum</td>
<td>japanese anemone</td>
<td>king protea</td>
<td>lenten rose</td>
<td>lotus</td>
</tr>
<tr>
<td>love in the mist</td>
<td>magnolia</td>
<td>mallow</td>
<td>marigold</td>
<td>mexican aster</td>
<td>mexican petunia</td>
</tr>
<tr>
<td>monkshood</td>
<td>moon orchid</td>
<td>morning glory</td>
<td>orange dahlia</td>
<td>osteospermum</td>
<td>oxeye daisy</td>
</tr>
<tr>
<td>passion flower</td>
<td>pelargonium</td>
<td>peruvian lily</td>
<td>petunia</td>
<td>pincushion flower</td>
<td>pink primrose</td>
</tr>
<tr>
<td>pink-yellow dahlia</td>
<td>poinsettia</td>
<td>primula</td>
<td>prince of wales feathers</td>
<td>purple coneflower</td>
<td>red ginger</td>
</tr>
<tr>
<td>rose</td>
<td>ruby-lipped cattleya</td>
<td>siam tulip</td>
<td>silverbush</td>
<td>snapdragon</td>
<td>spear thistle</td>
</tr>
<tr>
<td>spring crocus</td>
<td>stemless gentian</td>
<td>sunflower</td>
<td>sweet pea</td>
<td>sweet william</td>
<td>sword lily</td>
</tr>
<tr>
<td>thorn apple</td>
<td>tiger lily</td>
<td>toad lily</td>
<td>tree mallow</td>
<td>tree poppy</td>
<td>trumpet creeper</td>
</tr>
<tr>
<td>wallflower</td>
<td>water lily</td>
<td>watercress</td>
<td>wild pansy</td>
<td>windflower</td>
<td>yellow iris</td>
</tr>
</table>
"""
# Make examples a list of all image filenames in the examples folder
examples = ["examples/" + filename for filename in os.listdir("examples")]
with gr.Blocks() as demo:
gr.HTML(title)
gr.HTML(description)
gr.Interface(fn=predict,
inputs=gr.Image(type="pil"),
outputs=gr.Label(num_top_classes=3),
examples=examples)
gr.HTML(labels_table)
if __name__ == "__main__":
demo.launch() |