Famazo commited on
Commit
17d08c9
Β·
1 Parent(s): 96813b8

Update backend/api.py

Browse files
Files changed (1) hide show
  1. backend/api.py +37 -26
backend/api.py CHANGED
@@ -1,45 +1,42 @@
1
  from fastapi import FastAPI
2
  from fastapi.middleware.cors import CORSMiddleware
3
  from pydantic import BaseModel
4
- from transformers import AutoTokenizer, AutoModelForSequenceClassification
 
5
  import torch
6
  import pandas as pd
7
  from pathlib import Path
8
 
9
  app = FastAPI()
10
 
11
- # === CORS agar bisa diakses frontend Vercel ===
12
  app.add_middleware(
13
  CORSMiddleware,
14
- allow_origins=["*"], # bisa diganti domain spesifik
15
  allow_credentials=True,
16
  allow_methods=["*"],
17
  allow_headers=["*"],
18
  )
19
 
20
- # === Global Setup ===
21
  BASE_DIR = Path(__file__).resolve().parent
22
- MODEL_DIR = BASE_DIR / "bert_chatbot_model"
 
23
  DATASET_PATH = BASE_DIR / "dataset_chatbot_template.xlsx"
24
 
25
- print("πŸš€ Loading model...")
26
- try:
27
- tokenizer = AutoTokenizer.from_pretrained(str(MODEL_DIR))
28
- model = AutoModelForSequenceClassification.from_pretrained(str(MODEL_DIR))
29
- device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
30
- model.to(device)
31
- model.eval()
32
- print("βœ… Model loaded successfully!")
33
- except Exception as e:
34
- print(f"❌ Model load error: {e}")
35
 
36
- # === Load Dataset (optional untuk custom response) ===
37
  try:
38
  df_jawaban = pd.read_excel(DATASET_PATH)
39
  except Exception:
40
  df_jawaban = pd.DataFrame(columns=["Intent", "Jawaban_ID"])
41
 
42
- # === Default Responses ===
43
  responses = {
44
  "about_me": "I am a passionate developer specializing in AI and web development.",
45
  "skills": "My main skills are HTML5, CSS3, JavaScript, Laravel, Node.js, TensorFlow, and PyTorch.",
@@ -50,31 +47,45 @@ responses = {
50
  "fallback": "I'm sorry, I don't understand. Please try another question."
51
  }
52
 
 
53
  class ChatRequest(BaseModel):
54
  text: str
55
 
56
  @app.get("/")
57
  async def root():
58
- return {"message": "πŸš€ Chatbot API running on Hugging Face"}
59
 
60
  @app.post("/chatbot")
61
- async def chat(req: ChatRequest):
62
  try:
63
- inputs = tokenizer(req.text, return_tensors="pt", padding=True, truncation=True, max_length=128).to(device)
64
- with torch.no_grad():
65
- outputs = model(**inputs)
66
- pred_id = torch.argmax(outputs.logits, dim=1).item()
 
 
 
 
67
 
68
- intent = model.config.id2label.get(pred_id, "fallback")
 
 
 
 
 
 
 
 
 
69
 
 
70
  if not df_jawaban.empty and intent in df_jawaban["Intent"].values:
71
  reply = df_jawaban.loc[df_jawaban["Intent"] == intent, "Jawaban_ID"].iloc[0]
72
  else:
73
  reply = responses.get(intent, responses["fallback"])
74
 
75
- # βœ… agar cocok dengan frontend
76
  return {"reply": reply, "intent": intent}
77
 
78
  except Exception as e:
79
- print(f"❌ Runtime Error: {e}")
80
  return {"reply": "⚠️ Internal server error.", "intent": "error"}
 
1
  from fastapi import FastAPI
2
  from fastapi.middleware.cors import CORSMiddleware
3
  from pydantic import BaseModel
4
+ from transformers import AutoTokenizer
5
+ import onnxruntime as ort
6
  import torch
7
  import pandas as pd
8
  from pathlib import Path
9
 
10
  app = FastAPI()
11
 
12
+ # === CORS untuk frontend di Vercel ===
13
  app.add_middleware(
14
  CORSMiddleware,
15
+ allow_origins=["*"],
16
  allow_credentials=True,
17
  allow_methods=["*"],
18
  allow_headers=["*"],
19
  )
20
 
21
+ # === Path setup ===
22
  BASE_DIR = Path(__file__).resolve().parent
23
+ MODEL_PATH = BASE_DIR / "bert_chatbot_model.onnx"
24
+ TOKENIZER_PATH = BASE_DIR / "bert_chatbot_tokenizer"
25
  DATASET_PATH = BASE_DIR / "dataset_chatbot_template.xlsx"
26
 
27
+ # === Load tokenizer dan model ===
28
+ print("πŸš€ Loading ONNX model...")
29
+ tokenizer = AutoTokenizer.from_pretrained(str(TOKENIZER_PATH))
30
+ session = ort.InferenceSession(str(MODEL_PATH), providers=["CPUExecutionProvider"])
31
+ print("βœ… ONNX model loaded!")
 
 
 
 
 
32
 
33
+ # === Load dataset (optional) ===
34
  try:
35
  df_jawaban = pd.read_excel(DATASET_PATH)
36
  except Exception:
37
  df_jawaban = pd.DataFrame(columns=["Intent", "Jawaban_ID"])
38
 
39
+ # === Default responses ===
40
  responses = {
41
  "about_me": "I am a passionate developer specializing in AI and web development.",
42
  "skills": "My main skills are HTML5, CSS3, JavaScript, Laravel, Node.js, TensorFlow, and PyTorch.",
 
47
  "fallback": "I'm sorry, I don't understand. Please try another question."
48
  }
49
 
50
+ # === Request schema ===
51
  class ChatRequest(BaseModel):
52
  text: str
53
 
54
  @app.get("/")
55
  async def root():
56
+ return {"message": "πŸš€ ONNX Chatbot API running on Hugging Face"}
57
 
58
  @app.post("/chatbot")
59
+ async def chatbot(req: ChatRequest):
60
  try:
61
+ # Tokenize input
62
+ inputs = tokenizer(req.text, return_tensors="pt", padding=True, truncation=True, max_length=128)
63
+
64
+ # Convert to numpy for ONNX
65
+ ort_inputs = {k: v.cpu().numpy() for k, v in inputs.items()}
66
+ ort_outputs = session.run(None, ort_inputs)
67
+ logits = torch.tensor(ort_outputs[0])
68
+ pred_id = torch.argmax(logits, dim=1).item()
69
 
70
+ # === Mapping ID ke label ===
71
+ id2label = {
72
+ 0: "about_me",
73
+ 1: "skills",
74
+ 2: "projects",
75
+ 3: "experience",
76
+ 4: "career_goal",
77
+ 5: "greeting",
78
+ }
79
+ intent = id2label.get(pred_id, "fallback")
80
 
81
+ # === Ambil jawaban ===
82
  if not df_jawaban.empty and intent in df_jawaban["Intent"].values:
83
  reply = df_jawaban.loc[df_jawaban["Intent"] == intent, "Jawaban_ID"].iloc[0]
84
  else:
85
  reply = responses.get(intent, responses["fallback"])
86
 
 
87
  return {"reply": reply, "intent": intent}
88
 
89
  except Exception as e:
90
+ print(f"❌ Runtime error: {e}")
91
  return {"reply": "⚠️ Internal server error.", "intent": "error"}