File size: 835 Bytes
2d665b2
49fd203
8f20b61
49fd203
8f20b61
49fd203
2d665b2
8f20b61
49fd203
 
 
 
8f20b61
 
49fd203
2d665b2
49fd203
8f20b61
 
2d665b2
49fd203
 
 
 
8f20b61
 
49fd203
2d665b2
49fd203
8f20b61
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
import gradio as gr
from transformers import pipeline
import torch

# Reemplaza por tu identificador de modelo:
REPO_ID = "alramil/Practica9"

# Creamos pipeline cargando directamente del Hub
classifier = pipeline(
    "text-classification",
    model=REPO_ID,
    tokenizer=REPO_ID,
    return_all_scores=True,
    device=0 if torch.cuda.is_available() else -1
)

def classify(text: str):
    outputs = classifier(text)
    return { d["label"]: float(d["score"]) for d in outputs }

iface = gr.Interface(
    fn=classify,
    inputs=gr.Textbox(lines=5, placeholder="Escribe tu texto aquí…"),
    outputs=gr.Label(num_top_classes=3),
    title="🧠 Clasificador Practica9",
    description=f"Modelo cargado desde Hugging Face Hub: `{REPO_ID}`"
)

if __name__ == "__main__":
    iface.launch(server_name="0.0.0.0", server_port=7860)