scottymcgee commited on
Commit
52a3219
Β·
verified Β·
1 Parent(s): 14464c3

Create app.py

Browse files

This is a simple app that demonstrates how to use an autogluon multimodal predictor in a gradio space to predict if a picture contains a stop sign. To use, just upload a photo. The result will be generated automatically if there is or is not a stop sign.

Files changed (1) hide show
  1. app.py +124 -0
app.py ADDED
@@ -0,0 +1,124 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os # For reading environment variables
2
+ import shutil # For directory cleanup
3
+ import zipfile # For extracting model archives
4
+ import pathlib # For path manipulations
5
+ import tempfile # For creating temporary files/directories
6
+
7
+ import gradio # For interactive UI
8
+ import pandas # For tabular data handling
9
+ import PIL.Image # For image I/O
10
+
11
+ import huggingface_hub # For downloading model assets
12
+ import autogluon.multimodal # For loading AutoGluon image classifier
13
+
14
+ # Hardcoded Hub model (native zip)
15
+ MODEL_REPO_ID = "yusenthebot/sign-identification-autogluon" # Updated model ID
16
+ ZIP_FILENAME = "autogluon_sign_predictor_dir.zip"
17
+ HF_TOKEN = os.getenv("HF_TOKEN", None)
18
+
19
+ # Local cache/extract dirs
20
+ CACHE_DIR = pathlib.Path("hf_assets")
21
+ EXTRACT_DIR = CACHE_DIR / "predictor_native"
22
+
23
+ # Download & load the native predictor
24
+ def _prepare_predictor_dir() -> str:
25
+ CACHE_DIR.mkdir(parents=True, exist_ok=True)
26
+ local_zip = huggingface_hub.hf_hub_download(
27
+ repo_id=MODEL_REPO_ID,
28
+ filename=ZIP_FILENAME,
29
+ repo_type="model",
30
+ token=HF_TOKEN,
31
+ local_dir=str(CACHE_DIR),
32
+ local_dir_use_symlinks=False,
33
+ )
34
+ if EXTRACT_DIR.exists():
35
+ shutil.rmtree(EXTRACT_DIR)
36
+ EXTRACT_DIR.mkdir(parents=True, exist_ok=True)
37
+ with zipfile.ZipFile(local_zip, "r") as zf:
38
+ zf.extractall(str(EXTRACT_DIR))
39
+ contents = list(EXTRACT_DIR.iterdir())
40
+ predictor_root = contents[0] if (len(contents) == 1 and contents[0].is_dir()) else EXTRACT_DIR
41
+ return str(predictor_root)
42
+
43
+ PREDICTOR_DIR = _prepare_predictor_dir()
44
+ PREDICTOR = autogluon.multimodal.MultiModalPredictor.load(PREDICTOR_DIR)
45
+
46
+ # Explicit class labels (edit copy as desired)
47
+ CLASS_LABELS = {0: "β›” No Stop Sign", 1: "πŸ›‘ Stop Sign"} # Updated class labels
48
+
49
+ # Helper to map model class -> human label
50
+ def _human_label(c):
51
+ try:
52
+ ci = int(c)
53
+ return CLASS_LABELS.get(ci, str(c))
54
+ except Exception:
55
+ return CLASS_LABELS.get(c, str(c))
56
+
57
+ # Do the prediction!
58
+ def do_predict(pil_img: PIL.Image.Image):
59
+ # Make sure there's actually an image to work with
60
+ if pil_img is None:
61
+ return "No image provided.", {}, pandas.DataFrame(columns=["Predicted label", "Confidence (%)"])
62
+
63
+ # IF we have something to work with, save it and prepare the input
64
+ tmpdir = pathlib.Path(tempfile.mkdtemp())
65
+ img_path = tmpdir / "input.png"
66
+ pil_img.save(img_path)
67
+
68
+ df = pandas.DataFrame({"image": [str(img_path)]}) # For AutoGluon expected input format
69
+
70
+ # For class probabilities
71
+ proba_df = PREDICTOR.predict_proba(df)
72
+
73
+ # For user-friendly column names
74
+ proba_df = proba_df.rename(columns={0: "β›” No Stop Sign (0)", 1: "πŸ›‘ Stop Sign (1)"}) # Updated column names
75
+ row = proba_df.iloc[0]
76
+
77
+ # For pretty ranked dict expected by gr.Label
78
+ pretty_dict = {
79
+ "β›” No Stop Sign": float(row.get("β›” No Stop Sign (0)", 0.0)), # Updated dictionary keys
80
+ "πŸ›‘ Stop Sign": float(row.get("πŸ›‘ Stop Sign (1)", 0.0)), # Updated dictionary keys
81
+ }
82
+
83
+ return pretty_dict
84
+
85
+ # Representative example images! These can be local or links.
86
+ EXAMPLES = [
87
+ ["https://datasets-server.huggingface.co/assets/ecopus/sign_identification/--/a1506696eb48233ed9cd1afa1bea8e7002a7ad85/--/default/original/2/image/image.jpg?Expires=1758737216&Signature=uWOZzcVh97AFIfr9g6DQpm3YDAJUQKn5etPOan9JsZAKH5p0PGd1lX3gD6BfoN~MxhSru7DSYqX25ohageHm0dpAAnP0MiOnbirCRjRIKubBuRUI6NH5KdZ5uIAyA7Qdxd4f7~jo1YDS4QfqWPv0WMhJnKXCWqYStPFENK8HkVN7lYBk4q1lC8iEtjav5WSib~n2Wk8QN9BnhXCvczCpWol7aCCXaZG2QdHjJ3FqwoEW1p2kQC6luJoXXdvHMrRnSvDSR0i7kr6XS2cbaPZc6kJN8jKa06JUYIdgoVZM3RZfrY8DaxR3o9XA7lbM~W8-wrGULCOjLzsgSkN4fuzDuw__&Key-Pair-Id=K3EI6M078Z3AC3"], # Example from the new dataset
88
+ ["https://datasets-server.huggingface.co/assets/ecopus/sign_identification/--/a1506696eb48233ed9cd1afa1bea8e7002a7ad85/--/default/original/6/image/image.jpg?Expires=1758737216&Signature=AxmjPAovyejutgVUsczDxtIIvpqHCYxyE-J1NO2EAvwoasguueWPpFFi3Buv2--LdyEnkuZsbO8tEFttbwinKPirc002NZHcNvM~BMJQIvgAxaKc7hNbQBcMhmFUI27opJU-5fX5QGxVULy5UkTnIPtHtQhdcbMFCa8PVP~AQ5RREBdTbwAzXp-KVVExqH~MUHqt9~HMUoPbNUs41dytIjgsoBF4~PNsDu6~dosLtLjJGQtMFZtRBYUH2gzHNgNHLLikiFrwWp1SKMg1LnkqBLbjq3Qsr7Bw3DmyzrGenIw9mOh9aQ9jFW2Pw32LgC54yxzWHluZ0t2xSP9-Vh0MWw__&Key-Pair-Id=K3EI6M078Z3AC3"],
89
+ ["https://datasets-server.huggingface.co/assets/ecopus/sign_identification/--/a1506696eb48233ed9cd1afa1bea8e7002a7ad85/--/default/original/16/image/image.jpg?Expires=1758737216&Signature=xR-QqmSpXqj9Z6YB-p8URFQ~Og90zuqSnvyHulcwrl42XMeU1bnd~Z6WjIE1T80MPQpas22gM--ou5pw-vzJctNhjz7kH~ZCdvWPL9QIQQ7NxRc5uoEikZTv2SiFBF5X6PpARjddFZmBKfD2~Rq-kUU2-FYvIRwUmodWB5wRblmrovfFB1aRBY4-oLN~ghl2xpb-RgCZWVMbyI5FGMddGhGFygJICcTe2qOmtXLZcyyrGd~j3XwntVE6cp1rlfTGRcXl1H~ykq-G4OBZkOAYCa~u9yLeUcaoaRHGvg6Sla3-efvQUMQ2kENDrqgljAxx-zFLHi4E1ezfFH4SvZx5AA__&Key-Pair-Id=K3EI6M078Z3AC3"],# Example from the new dataset
90
+ ["https://datasets-server.huggingface.co/assets/ecopus/sign_identification/--/a1506696eb48233ed9cd1afa1bea8e7002a7ad85/--/default/original/10/image/image.jpg?Expires=1758737216&Signature=GfHEIohqv359Wkp6WO1U8qyGKzDT5ilUqy4LOSzQZngNdZgiFgg7r902ZWDNR3EhKvTlog3oDsyGlVOWgtEqkR5QR2FUw-kp--Hf6TNeya3D6yvWcoSTaQL3aRgHCm1lFzYJR20HHbJ3M7f-mf4cNwe1m2nzSxTkMNDfUQNdyl6w6l6mujUSFRzSzYd6LSxVYCdLVeCYrnlj3kkpB~DqCz0TrhHbrtj0kxoNuGYSgG6NIdq67sE9m9KWoIX5HpVOfSe-AYiDh8lqO2kQJwQcmVFRiIJqH~q7DjpCnz0SmH1iWdQ-4TZlTL-5PPWGEMHOn1V~L2dFEw4enNd-zNK~tg__&Key-Pair-Id=K3EI6M078Z3AC3"] # Example from the new dataset
91
+
92
+ ]
93
+
94
+ # Gradio UI
95
+ with gradio.Blocks() as demo:
96
+
97
+ # Provide an introduction
98
+ gradio.Markdown("# Stop Sign Detection?") # Updated title
99
+ gradio.Markdown("""
100
+ This is a simple app that demonstrates how to use an autogluon multimodal
101
+ predictor in a gradio space to predict if a picture contains a stop sign. To use,
102
+ just upload a photo. The result will be generated automatically if there is or is not a stop sign.
103
+ """) # Updated description
104
+
105
+ # Interface for the incoming image
106
+ image_in = gradio.Image(type="pil", label="Input image", sources=["upload", "webcam"])
107
+
108
+ # Interface elements to show htte result and probabilities
109
+ proba_pretty = gradio.Label(num_top_classes=2, label="Class probabilities")
110
+
111
+ # Whenever a new image is uploaded, update the result
112
+ image_in.change(fn=do_predict, inputs=[image_in], outputs=[proba_pretty])
113
+
114
+ # For clickable example images
115
+ gradio.Examples(
116
+ examples=EXAMPLES,
117
+ inputs=[image_in],
118
+ label="Representative examples",
119
+ examples_per_page=8,
120
+ cache_examples=False,
121
+ )
122
+
123
+ if __name__ == "__main__":
124
+ demo.launch()