File size: 1,584 Bytes
c26f6ef
 
b6226c3
c26f6ef
f37b117
3788013
c26f6ef
76937b4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7942b6f
3788013
76937b4
 
 
 
 
 
3788013
c26f6ef
f37b117
c26f6ef
f37b117
 
 
d3f5dfc
3788013
f37b117
 
 
 
 
7942b6f
c26f6ef
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
import gradio as gr
from huggingface_hub import InferenceClient
from transformers import pipeline

# Ganti dengan model kamu di Hugging Face
pipe = pipeline("text-classification", model="Ranti0603/job_classifier_model_v2")

FALLBACK_MAP = {
    "LABEL_0": "Non-TIK (0)",
    "LABEL_1": "TIK (1)"
}

INDEX_MAP = {
    0 : "Non-TIK (0)",
    1 : "TIK (1)"
}

def map_label_from_pipeline(pipe, raw_label):
    id2label = getattr(pipe.model.config, "id2label", None)
    if isinstance(id2label, dict):
        inv = {v: int(k) for k, v in id2label.items()}
        if raw_label in inv:
            idx = inv[raw_label]
            return INDEX_MAP.get(idx, f"{raw_label} ({idx})")
    return FALLBACK_MAP.get(raw_label, raw_label)

def respond(message, history):
    result = pipe(message, truncation=True)[0]
    label = result.get("label","")
    score = round(result.get("score", 0.0) * 100, 2)

    human_label = map_label_from_pipeline(pipe, label)
    
    response = f"Pekerjaan ini dikategorikan sebagai **{human_label}** dengan confidence {score}%"
    return response

# === UI ===
with gr.Blocks() as demo:
    gr.Markdown("# Job Classification Chatbot")
    gr.Markdown("Masukkan deskripsi pekerjaan, sistem akan mengklasifikasikan apakah pekerjaan tersebut termasuk **Non-TIK (0)** atau **TIK (1)**.")

    with gr.Row():
         with gr.Column(scale=5):
            chatbot = gr.ChatInterface(
                respond,
                type="messages",
                chatbot=gr.Chatbot(height=400),
            )
             
if __name__ == "__main__":
    demo.launch()