File size: 1,070 Bytes
20595ec
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
import gradio as gr
from transformers import pipeline

# Lade DEIN Modell
vit_classifier = pipeline("image-classification", model="LindiSimon/vit-beans-model")
clip_detector = pipeline(model="openai/clip-vit-large-patch14", task="zero-shot-image-classification")

labels_beans = ["angular_leaf_spot", "bean_rust", "healthy"]

def classify_bean(image):
    vit_results = vit_classifier(image)
    vit_output = {result['label']: result['score'] for result in vit_results}

    clip_results = clip_detector(image, candidate_labels=labels_beans)
    clip_output = {result['label']: result['score'] for result in clip_results}

    return {"ViT Classification": vit_output, "CLIP Zero-Shot Classification": clip_output}

examples = [["example_input.png"]]

iface = gr.Interface(
    fn=classify_bean,
    inputs=gr.Image(type="filepath"),
    outputs=gr.JSON(),
    title="Bean Disease Classification",
    description="Vergleich eines trainierten ViT-Modells mit CLIP für Bean-Disease-Klassifikation.",
    examples=examples
)

iface.launch()