File size: 1,222 Bytes
d77aa9e 3f81d77 e53f1d3 d77aa9e 3f81d77 e53f1d3 3f7ccc6 3f81d77 d77aa9e b03eaf8 d77aa9e 3f81d77 d77aa9e 3f81d77 d77aa9e 3f81d77 d77aa9e 3f81d77 d01993c faa1384 d77aa9e 3f81d77 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 | import gradio as gr
import torch
from transformers import AutoTokenizer
from modeling_multitask import MultiTaskModel
# Charger le modèle
tokenizer = AutoTokenizer.from_pretrained("patrickott1/model-ticket-ti")
model = MultiTaskModel.from_pretrained(
"patrickott1/model-ticket-ti",
trust_remote_code=True
)
model.eval()
type_labels = ["RÉSEAU", "MATÉRIEL", "LOGICIEL", "COMPTE/MOT DE PASSE", "AUTRE"]
priorite_labels = ["BASSE", "MOYENNE", "ÉLEVÉ"]
def predict(text):
inputs = tokenizer(text, return_tensors="pt", truncation=True, padding=True)
with torch.no_grad():
outputs = model(**inputs)
logits_type = outputs.logits_type
logits_priorite = outputs.logits_priorite
type_pred = type_labels[logits_type.argmax(dim=1).item()]
priorite_pred = priorite_labels[logits_priorite.argmax(dim=1).item()]
return type_pred, priorite_pred
iface = gr.Interface(
fn=predict,
inputs=gr.Textbox(lines=2, placeholder="Entrez un ticket IT..."),
outputs=[gr.Textbox(label="Type de problème"), gr.Textbox(label="Priorité")],
title="Classificateur de Ticket TI",
description="Classification des tickets TI en type de problèmes et priorité"
)
iface.launch() |