DimasMP3 commited on
Commit
1af914e
·
1 Parent(s): f15012e

Re-upload model with LFS fixed

Browse files
.gitattributes CHANGED
@@ -33,3 +33,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ *.keras filter=lfs diff=lfs merge=lfs -text
37
+ models/model_cnn.keras filter=lfs diff=lfs merge=lfs -text
README.md CHANGED
@@ -1,6 +1,6 @@
1
  ---
2
  title: Fruit Classification
3
- emoji: 🏆
4
  colorFrom: blue
5
  colorTo: yellow
6
  sdk: gradio
@@ -9,4 +9,18 @@ app_file: app.py
9
  pinned: false
10
  ---
11
 
12
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  ---
2
  title: Fruit Classification
3
+ emoji: dY?+
4
  colorFrom: blue
5
  colorTo: yellow
6
  sdk: gradio
 
9
  pinned: false
10
  ---
11
 
12
+ # Fruit Classification Space
13
+
14
+ Space ini memuat demo inference untuk model klasifikasi sayur/buah berbasis TensorFlow yang dilatih pada notebook `code/1_Training_Model.ipynb`.
15
+
16
+ ## Struktur penting
17
+ - `app.py` & `inference.py`: skrip utama Space mirip dengan contoh `hf-model-classification-face`, sudah menambahkan endpoint batch (`predict_batch`).
18
+ - `models/model_cnn.keras`: bobot model siap pakai.
19
+ - `models/class_names.json`: daftar label yang otomatis dibaca saat Space di-load.
20
+ - `code/1_Training_Model.ipynb`: notebook asli proses training untuk referensi/penyesuaian ulang.
21
+
22
+ ## Menjalankan lokal
23
+ 1. `pip install -r requirements.txt`
24
+ 2. `python app.py`
25
+
26
+ Config `config.json` akan dibuat otomatis saat inference pertama agar sesuai standar Hugging Face.
app.py ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+
3
+ from inference import predict, predict_batch
4
+
5
+ APP_TITLE = "# Fruit & Vegetable Classification"
6
+ APP_DESC = """
7
+ Model CNN berbasis TensorFlow untuk 15 kelas sayur/buah dari dataset Fresh & Rotten.
8
+
9
+ - Input : Foto RGB tunggal, otomatis di-resize ke ukuran input model.
10
+ - Output : Probabilitas per kelas (Top-N dari gr.Label).
11
+ - Catatan: Gunakan gambar close-up dengan satu objek utama untuk hasil terbaik.
12
+ """
13
+
14
+ with gr.Blocks(theme=gr.themes.Soft()) as demo:
15
+ gr.Markdown(APP_TITLE)
16
+ gr.Markdown(APP_DESC)
17
+
18
+ with gr.Row():
19
+ inp = gr.Image(type="pil", label="Upload image (fruit/vegetable)")
20
+ out = gr.Label(num_top_classes=5, label="Predictions")
21
+
22
+ with gr.Row():
23
+ btn = gr.Button("Predict", variant="primary")
24
+ gr.ClearButton([inp, out])
25
+
26
+ btn.click(predict, inputs=inp, outputs=out, api_name="predict")
27
+
28
+ with gr.Tab("Batch (optional)"):
29
+ gal = gr.Gallery(label="Images", columns=4, height="auto")
30
+ out_gal = gr.JSON(label="Batch outputs")
31
+ runb = gr.Button("Run batch")
32
+ runb.click(predict_batch, inputs=gal, outputs=out_gal, api_name="predict_batch")
33
+
34
+ if __name__ == "__main__":
35
+ demo.launch()
code/1_Training_Model.ipynb ADDED
The diff for this file is too large to render. See raw diff
 
inference.py ADDED
@@ -0,0 +1,213 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # inference.py
2
+ """TensorFlow helpers for the fruit classification Hugging Face Space."""
3
+
4
+ from __future__ import annotations
5
+
6
+ import json
7
+ import os
8
+ import time
9
+ from typing import Any, Dict, Iterable, List, Optional
10
+
11
+ import numpy as np
12
+ from PIL import Image, ImageFile
13
+
14
+ ImageFile.LOAD_TRUNCATED_IMAGES = True
15
+
16
+ import tensorflow as tf
17
+
18
+ # ------------------- Label utilities -------------------
19
+ _LABEL_FILES = [
20
+ os.path.join("models", "class_names.json"),
21
+ os.path.join("models", "class_indices.json"),
22
+ os.path.join("models", "idx2class.json"),
23
+ ]
24
+
25
+ _DEFAULT_LABELS = [
26
+ "Bean",
27
+ "Bitter_Gourd",
28
+ "Bottle_Gourd",
29
+ "Brinjal",
30
+ "Broccoli",
31
+ "Cabbage",
32
+ "Capsicum",
33
+ "Carrot",
34
+ "Cauliflower",
35
+ "Cucumber",
36
+ "Papaya",
37
+ "Potato",
38
+ "Pumpkin",
39
+ "Radish",
40
+ "Tomato",
41
+ ]
42
+
43
+
44
+ def _normalize_labels(seq: Iterable[Any]) -> List[str]:
45
+ cleaned: List[str] = []
46
+ seen = set()
47
+ for label in seq:
48
+ if not isinstance(label, str):
49
+ continue
50
+ label = label.strip()
51
+ if not label or label.startswith("."):
52
+ continue
53
+ if label in seen:
54
+ continue
55
+ cleaned.append(label)
56
+ seen.add(label)
57
+ return cleaned
58
+
59
+
60
+ def _load_labels() -> List[str]:
61
+ def _is_digits(x: Any) -> bool:
62
+ try:
63
+ int(x)
64
+ return True
65
+ except (TypeError, ValueError):
66
+ return False
67
+
68
+ for path in _LABEL_FILES:
69
+ if not os.path.exists(path):
70
+ continue
71
+ try:
72
+ with open(path, "r", encoding="utf-8") as f:
73
+ data = json.load(f)
74
+ except Exception as exc:
75
+ print(f"[LABEL] failed to load {path}: {exc}")
76
+ continue
77
+
78
+ if isinstance(data, list):
79
+ labels = _normalize_labels(data)
80
+ if labels:
81
+ print(f"[LABEL] from {os.path.basename(path)} -> {labels}")
82
+ return labels
83
+
84
+ if isinstance(data, dict) and data:
85
+ # case A: {label: idx}
86
+ if all(_is_digits(v) for v in data.values()):
87
+ sorted_pairs = sorted(
88
+ ((lbl, int(idx)) for lbl, idx in data.items()),
89
+ key=lambda item: item[1],
90
+ )
91
+ labels = _normalize_labels(lbl for lbl, _ in sorted_pairs)
92
+ if labels:
93
+ print(f"[LABEL] from {os.path.basename(path)} (label->idx) -> {labels}")
94
+ return labels
95
+
96
+ # case B: {idx: label}
97
+ if all(_is_digits(k) for k in data.keys()):
98
+ size = len(data)
99
+ ordered = [data.get(str(i), data.get(i)) for i in range(size)]
100
+ labels = _normalize_labels(ordered)
101
+ if labels:
102
+ print(f"[LABEL] from {os.path.basename(path)} (idx->label) -> {labels}")
103
+ return labels
104
+
105
+ print("[LABEL] fallback default ->", _DEFAULT_LABELS)
106
+ return list(_DEFAULT_LABELS)
107
+
108
+
109
+ def _generate_config_if_missing(model: tf.keras.Model, labels: List[str], path: str = "config.json") -> None:
110
+ if os.path.exists(path):
111
+ return
112
+ ishape = model.input_shape
113
+ try:
114
+ img_size = int(ishape[1])
115
+ except Exception as exc: # pragma: no cover - defensive only
116
+ raise AssertionError(f"Invalid input shape for config: {ishape}") from exc
117
+
118
+ cfg = {
119
+ "architectures": ["FruitCNN"],
120
+ "image_size": img_size,
121
+ "num_labels": len(labels),
122
+ "id2label": {str(i): lbl for i, lbl in enumerate(labels)},
123
+ "label2id": {lbl: i for i, lbl in enumerate(labels)},
124
+ }
125
+ with open(path, "w", encoding="utf-8") as f:
126
+ json.dump(cfg, f, indent=2)
127
+ print(f"[CFG] wrote {path} (image_size={img_size}, num_labels={len(labels)})")
128
+
129
+
130
+ # ------------------- Model wrapper -------------------
131
+ class FruitClassifier:
132
+ def __init__(self, model_path: str = "models/model_cnn.keras") -> None:
133
+ self.labels = _load_labels()
134
+ full_path = os.path.join(os.getcwd(), model_path)
135
+ print(f"[LOAD] {full_path}")
136
+ self.model: tf.keras.Model = tf.keras.models.load_model(full_path, compile=False)
137
+
138
+ ishape = self.model.input_shape
139
+ self.img_size = int(ishape[1])
140
+ print(f"[MODEL] input size = {self.img_size}")
141
+
142
+ names_lower = [layer.name.lower() for layer in self.model.layers[:12]]
143
+ has_internal_pp = any("rescaling" in n or "normalization" in n for n in names_lower)
144
+ self.external_rescale = not has_internal_pp
145
+ print(f"[MODEL] internal_preproc={has_internal_pp} -> external_rescale={self.external_rescale}")
146
+
147
+ num_outputs = int(self.model.output_shape[-1])
148
+ if num_outputs != len(self.labels):
149
+ print(f"[WARN] labels({len(self.labels)}) != outputs({num_outputs}) -> syncing")
150
+ if len(self.labels) >= num_outputs:
151
+ self.labels = self.labels[:num_outputs]
152
+ else:
153
+ for idx in range(len(self.labels), num_outputs):
154
+ self.labels.append(f"class_{idx}")
155
+
156
+ _generate_config_if_missing(self.model, self.labels)
157
+
158
+ try:
159
+ _ = self.model(tf.zeros((1, self.img_size, self.img_size, 3), dtype=tf.float32))
160
+ except Exception as exc:
161
+ print("[WARN] warmup failed:", exc)
162
+
163
+ @staticmethod
164
+ def _to_rgb(img: Image.Image) -> Image.Image:
165
+ return img if img.mode == "RGB" else img.convert("RGB")
166
+
167
+ def _preprocess(self, img: Image.Image) -> np.ndarray:
168
+ img = self._to_rgb(img).resize((self.img_size, self.img_size))
169
+ arr = np.asarray(img, dtype=np.float32)
170
+ if self.external_rescale:
171
+ arr = arr / 255.0
172
+ return np.expand_dims(arr, 0)
173
+
174
+ def predict_dict(self, img: Image.Image) -> Dict[str, float]:
175
+ t0 = time.perf_counter()
176
+ probs = self.model.predict(self._preprocess(img), verbose=0)[0]
177
+ result = {label: float(prob) for label, prob in zip(self.labels, probs)}
178
+ dt = (time.perf_counter() - t0) * 1000.0
179
+ print(f"[INF] {len(self.labels)} classes in {dt:.1f} ms")
180
+ return result
181
+
182
+
183
+ _MODEL = FruitClassifier()
184
+
185
+
186
+ # ------------------- Public API -------------------
187
+ def predict(image: Optional[Image.Image]) -> Dict[str, float]:
188
+ if image is None:
189
+ return {"Error": 1.0}
190
+ return _MODEL.predict_dict(image)
191
+
192
+
193
+ def predict_batch(images: Iterable[Any]) -> List[Dict[str, float]]:
194
+ from PIL import Image as PILImage
195
+
196
+ def _as_pil(obj: Any) -> Optional[PILImage.Image]:
197
+ if obj is None:
198
+ return None
199
+ if isinstance(obj, PILImage.Image):
200
+ return obj
201
+ try:
202
+ return PILImage.open(obj).convert("RGB")
203
+ except Exception:
204
+ return None
205
+
206
+ outputs: List[Dict[str, float]] = []
207
+ for item in images or []:
208
+ pil_img = _as_pil(item)
209
+ outputs.append({"Error": 1.0} if pil_img is None else _MODEL.predict_dict(pil_img))
210
+ return outputs
211
+
212
+
213
+ __all__ = ["predict", "predict_batch"]
models/class_names.json ADDED
@@ -0,0 +1 @@
 
 
1
+ [".ipynb_checkpoints", "Bean", "Bitter_Gourd", "Bottle_Gourd", "Brinjal", "Broccoli", "Cabbage", "Capsicum", "Carrot", "Cauliflower", "Cucumber", "Papaya", "Potato", "Pumpkin", "Radish", "Tomato"]
models/model_cnn.keras ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1153a74cb2d242e5dc56222442739299621d967f50381070c10ebe8bb1ad2e88
3
+ size 51538309
requirements.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ tensorflow==2.16.1
2
+ gradio==4.12.0
3
+ numpy==1.26.4
4
+ Pillow==10.1.0