Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| from transformers import AutoImageProcessor, AutoModelForImageClassification | |
| from PIL import Image | |
| import torch | |
| import torch.nn.functional as F | |
| model_path = "./" | |
| processor = AutoImageProcessor.from_pretrained(model_path) | |
| model = AutoModelForImageClassification.from_pretrained(model_path) | |
| def classify_image(image): | |
| if image is None: return None | |
| inputs = processor(images=image, return_tensors="pt") | |
| with torch.no_grad(): | |
| outputs = model(**inputs) | |
| probabilities = F.softmax(outputs.logits, dim=1)[0] | |
| results = {} | |
| for i, prob in enumerate(probabilities): | |
| label_name = model.config.id2label[i] | |
| results[label_name] = float(prob) | |
| return results | |
| guide_text = """ | |
| ### F1-Score 0.92 ์ฑ๋ฅ ํ์ธํ๊ธฐ (ํ๋ !) | |
| ์ด ๋ชจ๋ธ์ ์ฐ์ ์ฉ ์ฃ์ง ๋๋ฐ์ด์ค(๋จ์ ๋ฐฐ๊ฒฝ) ํ๊ฒฝ์ ๊ฐ์ ํ์ฌ ๋ง๋ ๊ฒฝ๋ํ ๋ชจ๋ธ์ ๋๋ค. | |
| ๊ฒ์ฆ๋ ์ฑ๋ฅ์ ํ์ธํ์๋ ค๋ฉด, ์ ๊ฐ ํ์ต์ ์ค์ ์ฌ์ฉํ ์ด๋ฏธ์ง๋ฅผ ๋ฃ์ด๋ณด์ธ์. | |
| <a href="https://huggingface.co/datasets/mayonaise1979/datasets/resolve/main/0126.zip" target="_blank">ํ ์คํธ์ฉ ์ํ ์ด๋ฏธ์ง ๋ค์ด๋ก๋ (ํด๋ฆญ)</a> | |
| """ | |
| dev_summary = """ | |
| --- | |
| ### ๊ฐ๋ฐ ๋ก๊ทธ: f1: (0.68 โ 0.96) | |
| ๋จ์ํ ํ๋์ด ์๋, **๋ฐ์ดํฐ ํ์ง ๊ฐ์ **์ ํตํด ๋ฌธ์ ๋ฅผ ํด๊ฒฐํ 10๋จ๊ณ์ ์คํ ๊ณผ์ ์ ๋๋ค. | |
| | ๋จ๊ณ | ์ฃผ์ ์๋ (Experiment) | F1-Score | ๋ถ์ ๋ฐ ๊ฒฐ๊ณผ (Key Insight) | | |
| | :---: | :--- | :---: | :--- | | |
| | 1 | Baseline (MobileViT) | 0.68 | ๋ฎ์ ์ฑ๋ฅ, ํด๋์ค ๋ถ๊ท ํ ํ์ธ | | |
| | 2~3 | ์ฆ๊ฐ(Augmentation) ์ฌ๊ฒ์ฆ | 0.67 | ํ์ต๋ฅ /์ฆ๊ฐ ์กฐ์ ํ์ผ๋ ์ฑ๋ฅ ์ ์ฒด (ํจ๊ณผ ๋ฏธ๋ฏธ) | | |
| | 4~5 | Class Weight ์ ์ฉ | 0.65 | ๋ ธ์ด์ฆ ๋ฐ์ดํฐ์ ๊ณผ์ ํฉ๋์ด ์ฑ๋ฅ ์คํ๋ ค ํ๋ฝ | | |
| | 6 | ํ๋ผ๋ฏธํฐ ์ฌ์กฐ์ | 0.73 | ์ ์ฒ๋ฆฌ ๋ณ๊ฒฝ ์์ด๋ ํ๊ณ์์ ํ์ธ | | |
| | 7 | ๋ฐ์ดํฐ 2์ฐจ ์ ์ฒ๋ฆฌ (Cleaning) | 0.82 | ๋ถ๋ ๋ฐ์ดํฐ 50 ์ญ์ โ ์ฑ๋ฅ ๋น์ฝ์ ์์น | | |
| | 8 | ๋ชจ๋ธ ๋ณ๊ฒฝ (EfficientFormer) | 0.92 | ์ ์ ๋ ๋ฐ์ดํฐ์ ์ต์ ๊ฒฝ๋ ๋ชจ๋ธ ๋์ | | |
| | 9~10 | ํด์๋/์ ๊ทํ ์ถ๊ฐ ์คํ | 0.92 | ์ฑ๋ฅ ์๋ ด (์ถ๊ฐ ๊ฐ์ ํญ ๋ฏธ๋ฏธ) | | |
| | 11 | 3์ฐจ ์ ์ฒ๋ฆฌ, ๋ชจ๋ธ๋ณ๊ฒฝ: google/vit-base-patch16-224 | 0.96 | ์์ค 0.6->0.03์ผ๋ก ๊ฐ์ | | |
| > ๊ฒฐ๊ณผ์ ์ผ๋ก ํธ๋์คํฌ๋จธ ๊ณ ์ฑ๋ฅ ๋ชจ๋ธ๋ณด๋ค ๋ฐ์ดํฐ ํ์ง์ด ์ค์ํจ. ํ๋ ์ค์ผ๋ฐ์ดํฐ ์์ ํ ์ ๊ฑฐํ์ง ์์์ ์์ฒด์ ์ธ ์ฆ๊ฐํจ๊ณผ๋ฅผ ์ํด์ | |
| ### Classification Report | |
| | Class | Precision | Recall | F1-score | Support | | |
| |----------|----------:|-------:|---------:|--------:| | |
| | PET | 0.96 | 0.94 | 0.95 | 218 | | |
| | Can | 0.99 | 0.97 | 0.98 | 283 | | |
| | Glass | 0.96 | 0.97 | 0.97 | 221 | | |
| | Paper | 0.98 | 0.98 | 0.98 | 315 | | |
| | Plastic | 0.95 | 0.95 | 0.95 | 308 | | |
| | Vinyl | 0.95 | 0.97 | 0.96 | 282 | | |
| | **Accuracy** | | | **0.96** | | | |
| | **Macro Avg** | 0.96 | 0.96 | 0.96 | | | |
| | **Weighted Avg** | 0.97 | 0.96 | 0.96 | | | |
| ์ฌ์ฉ ๋ฐ์ดํฐ : jms0923/tod: Trash_Object_Detection_Dataset_v1.0(zenodo) | |
| """ | |
| interface = gr.Interface( | |
| fn=classify_image, | |
| inputs=gr.Image(type="pil", label="์ฌ๊ธฐ์ ์ด๋ฏธ์ง๋ฅผ ๋๋๊ทธํ์ธ์"), | |
| outputs=gr.Label(num_top_classes=3, label="๋ถ๋ฅ ๊ฒฐ๊ณผ"), | |
| title=" ๊ฒฝ๋ํ ์ฌํ์ฉํ ๋ถ๋ฅ๊ธฐ", | |
| description=guide_text, | |
| article=dev_summary | |
| ) | |
| interface.launch() |