mayonaise1979's picture
Update app.py
afc4fa4 verified
import gradio as gr
from transformers import AutoImageProcessor, AutoModelForImageClassification
from PIL import Image
import torch
import torch.nn.functional as F
model_path = "./"
processor = AutoImageProcessor.from_pretrained(model_path)
model = AutoModelForImageClassification.from_pretrained(model_path)
def classify_image(image):
if image is None: return None
inputs = processor(images=image, return_tensors="pt")
with torch.no_grad():
outputs = model(**inputs)
probabilities = F.softmax(outputs.logits, dim=1)[0]
results = {}
for i, prob in enumerate(probabilities):
label_name = model.config.id2label[i]
results[label_name] = float(prob)
return results
guide_text = """
### F1-Score 0.92 ์„ฑ๋Šฅ ํ™•์ธํ•˜๊ธฐ (ํ•„๋…!)
์ด ๋ชจ๋ธ์€ ์‚ฐ์—…์šฉ ์—ฃ์ง€ ๋””๋ฐ”์ด์Šค(๋‹จ์ˆœ ๋ฐฐ๊ฒฝ) ํ™˜๊ฒฝ์„ ๊ฐ€์ •ํ•˜์—ฌ ๋งŒ๋“  ๊ฒฝ๋Ÿ‰ํ™” ๋ชจ๋ธ์ž…๋‹ˆ๋‹ค.
๊ฒ€์ฆ๋œ ์„ฑ๋Šฅ์„ ํ™•์ธํ•˜์‹œ๋ ค๋ฉด, ์ œ๊ฐ€ ํ•™์Šต์— ์‹ค์ œ ์‚ฌ์šฉํ•œ ์ด๋ฏธ์ง€๋ฅผ ๋„ฃ์–ด๋ณด์„ธ์š”.
<a href="https://huggingface.co/datasets/mayonaise1979/datasets/resolve/main/0126.zip" target="_blank">ํ…Œ์ŠคํŠธ์šฉ ์ƒ˜ํ”Œ ์ด๋ฏธ์ง€ ๋‹ค์šด๋กœ๋“œ (ํด๋ฆญ)</a>
"""
dev_summary = """
---
### ๊ฐœ๋ฐœ ๋กœ๊ทธ: f1: (0.68 โ†’ 0.96)
๋‹จ์ˆœํ•œ ํŠœ๋‹์ด ์•„๋‹Œ, **๋ฐ์ดํ„ฐ ํ’ˆ์งˆ ๊ฐœ์„ **์„ ํ†ตํ•ด ๋ฌธ์ œ๋ฅผ ํ•ด๊ฒฐํ•œ 10๋‹จ๊ณ„์˜ ์‹คํ—˜ ๊ณผ์ •์ž…๋‹ˆ๋‹ค.
| ๋‹จ๊ณ„ | ์ฃผ์š” ์‹œ๋„ (Experiment) | F1-Score | ๋ถ„์„ ๋ฐ ๊ฒฐ๊ณผ (Key Insight) |
| :---: | :--- | :---: | :--- |
| 1 | Baseline (MobileViT) | 0.68 | ๋‚ฎ์€ ์„ฑ๋Šฅ, ํด๋ž˜์Šค ๋ถˆ๊ท ํ˜• ํ™•์ธ |
| 2~3 | ์ฆ๊ฐ•(Augmentation) ์žฌ๊ฒ€์ฆ | 0.67 | ํ•™์Šต๋ฅ /์ฆ๊ฐ• ์กฐ์ ˆํ–ˆ์œผ๋‚˜ ์„ฑ๋Šฅ ์ •์ฒด (ํšจ๊ณผ ๋ฏธ๋ฏธ) |
| 4~5 | Class Weight ์ ์šฉ | 0.65 | ๋…ธ์ด์ฆˆ ๋ฐ์ดํ„ฐ์— ๊ณผ์ ํ•ฉ๋˜์–ด ์„ฑ๋Šฅ ์˜คํžˆ๋ ค ํ•˜๋ฝ |
| 6 | ํŒŒ๋ผ๋ฏธํ„ฐ ์žฌ์กฐ์ • | 0.73 | ์ „์ฒ˜๋ฆฌ ๋ณ€๊ฒฝ ์—†์ด๋Š” ํ•œ๊ณ„์ž„์„ ํ™•์ธ |
| 7 | ๋ฐ์ดํ„ฐ 2์ฐจ ์ „์ฒ˜๋ฆฌ (Cleaning) | 0.82 | ๋ถˆ๋Ÿ‰ ๋ฐ์ดํ„ฐ 50 ์‚ญ์ œ โ†’ ์„ฑ๋Šฅ ๋น„์•ฝ์  ์ƒ์Šน |
| 8 | ๋ชจ๋ธ ๋ณ€๊ฒฝ (EfficientFormer) | 0.92 | ์ •์ œ๋œ ๋ฐ์ดํ„ฐ์— ์ตœ์‹  ๊ฒฝ๋Ÿ‰ ๋ชจ๋ธ ๋„์ž… |
| 9~10 | ํ•ด์ƒ๋„/์ •๊ทœํ™” ์ถ”๊ฐ€ ์‹คํ—˜ | 0.92 | ์„ฑ๋Šฅ ์ˆ˜๋ ด (์ถ”๊ฐ€ ๊ฐœ์„ ํญ ๋ฏธ๋ฏธ) |
| 11 | 3์ฐจ ์ „์ฒ˜๋ฆฌ, ๋ชจ๋ธ๋ณ€๊ฒฝ: google/vit-base-patch16-224 | 0.96 | ์†์‹ค 0.6->0.03์œผ๋กœ ๊ฐ์†Œ |
> ๊ฒฐ๊ณผ์ ์œผ๋กœ ํŠธ๋žœ์Šคํฌ๋จธ ๊ณ ์„ฑ๋Šฅ ๋ชจ๋ธ๋ณด๋‹ค ๋ฐ์ดํ„ฐ ํ’ˆ์งˆ์ด ์ค‘์š”ํ•จ. ํ—ˆ๋‚˜ ์˜ค์—ผ๋ฐ์ดํ„ฐ ์™„์ „ํžˆ ์ œ๊ฑฐํ•˜์ง„ ์•Š์•˜์Œ ์ž์ฒด์ ์ธ ์ฆ๊ฐ•ํšจ๊ณผ๋ฅผ ์œ„ํ•ด์„œ
### Classification Report
| Class | Precision | Recall | F1-score | Support |
|----------|----------:|-------:|---------:|--------:|
| PET | 0.96 | 0.94 | 0.95 | 218 |
| Can | 0.99 | 0.97 | 0.98 | 283 |
| Glass | 0.96 | 0.97 | 0.97 | 221 |
| Paper | 0.98 | 0.98 | 0.98 | 315 |
| Plastic | 0.95 | 0.95 | 0.95 | 308 |
| Vinyl | 0.95 | 0.97 | 0.96 | 282 |
| **Accuracy** | | | **0.96** | |
| **Macro Avg** | 0.96 | 0.96 | 0.96 | |
| **Weighted Avg** | 0.97 | 0.96 | 0.96 | |
์‚ฌ์šฉ ๋ฐ์ดํ„ฐ : jms0923/tod: Trash_Object_Detection_Dataset_v1.0(zenodo)
"""
interface = gr.Interface(
fn=classify_image,
inputs=gr.Image(type="pil", label="์—ฌ๊ธฐ์— ์ด๋ฏธ์ง€๋ฅผ ๋“œ๋ž˜๊ทธํ•˜์„ธ์š”"),
outputs=gr.Label(num_top_classes=3, label="๋ถ„๋ฅ˜ ๊ฒฐ๊ณผ"),
title=" ๊ฒฝ๋Ÿ‰ํ™” ์žฌํ™œ์šฉํ’ˆ ๋ถ„๋ฅ˜๊ธฐ",
description=guide_text,
article=dev_summary
)
interface.launch()