import gradio as gr from huggingface_hub import InferenceClient from transformers import pipeline # Ganti dengan model kamu di Hugging Face pipe = pipeline("text-classification", model="Ranti0603/job_classifier_model_v2") FALLBACK_MAP = { "LABEL_0": "Non-TIK (0)", "LABEL_1": "TIK (1)" } INDEX_MAP = { 0 : "Non-TIK (0)", 1 : "TIK (1)" } def map_label_from_pipeline(pipe, raw_label): id2label = getattr(pipe.model.config, "id2label", None) if isinstance(id2label, dict): inv = {v: int(k) for k, v in id2label.items()} if raw_label in inv: idx = inv[raw_label] return INDEX_MAP.get(idx, f"{raw_label} ({idx})") return FALLBACK_MAP.get(raw_label, raw_label) def respond(message, history): result = pipe(message, truncation=True)[0] label = result.get("label","") score = round(result.get("score", 0.0) * 100, 2) human_label = map_label_from_pipeline(pipe, label) response = f"Pekerjaan ini dikategorikan sebagai **{human_label}** dengan confidence {score}%" return response # === UI === with gr.Blocks() as demo: gr.Markdown("# Job Classification Chatbot") gr.Markdown("Masukkan deskripsi pekerjaan, sistem akan mengklasifikasikan apakah pekerjaan tersebut termasuk **Non-TIK (0)** atau **TIK (1)**.") with gr.Row(): with gr.Column(scale=5): chatbot = gr.ChatInterface( respond, type="messages", chatbot=gr.Chatbot(height=400), ) if __name__ == "__main__": demo.launch()