amaresh8053 commited on
Commit
12fcc4e
·
1 Parent(s): ac6e07e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -5
app.py CHANGED
@@ -28,7 +28,7 @@ def reverse(sentence: str) -> str:
28
  return " ".join(sentence.split()[::-1])
29
 
30
 
31
- # ------------- Vocab class (must match training) -------------
32
  class Vocab:
33
  def __init__(self):
34
  self.word2idx = {'<PAD>': 0, '<SOS>': 1, '<EOS>': 2, '<UNK>': 3}
@@ -77,7 +77,7 @@ EOS_IDX = vocab.word2idx["<EOS>"]
77
  UNK_IDX = vocab.word2idx["<UNK>"]
78
 
79
 
80
- # ------------- model definitions (EXACTLY as in notebook) -------------
81
  class Encoder(nn.Module):
82
  def __init__(self):
83
  super().__init__()
@@ -178,7 +178,7 @@ ckpt = torch.load(MODEL_FILE, map_location="cpu")
178
  model.load_state_dict(ckpt["model"])
179
  model.eval()
180
 
181
- print("Model and vocab loaded. Chatbot ready to serve 🚀")
182
 
183
 
184
  # ------------- beam search (beam_generate_v2 from notebook) -------------
@@ -266,7 +266,7 @@ def generate_reply(user_text: str) -> str:
266
  src = torch.tensor([ids], dtype=torch.long, device=DEVICE)
267
  reply = beam_generate_v2(src, beam=5, max_len=50)
268
  if not reply.strip():
269
- return "I don't know."
270
  return reply
271
 
272
 
@@ -277,7 +277,7 @@ def respond(message, history):
277
 
278
  demo = gr.ChatInterface(
279
  fn=respond,
280
- title="Ubuntu Chatbot (Seq2Seq + GRU + Attention)",
281
  description="A generative chatbot trained on Ubuntu dialogue pairs (seq2seq with attention)."
282
  )
283
 
 
28
  return " ".join(sentence.split()[::-1])
29
 
30
 
31
+ # ------------- Vocab class (same as training) -------------
32
  class Vocab:
33
  def __init__(self):
34
  self.word2idx = {'<PAD>': 0, '<SOS>': 1, '<EOS>': 2, '<UNK>': 3}
 
77
  UNK_IDX = vocab.word2idx["<UNK>"]
78
 
79
 
80
+ # ------------- model definitions (same as notebook) -------------
81
  class Encoder(nn.Module):
82
  def __init__(self):
83
  super().__init__()
 
178
  model.load_state_dict(ckpt["model"])
179
  model.eval()
180
 
181
+ print("Model and vocab loaded. Chatbot ready to serve ")
182
 
183
 
184
  # ------------- beam search (beam_generate_v2 from notebook) -------------
 
266
  src = torch.tensor([ids], dtype=torch.long, device=DEVICE)
267
  reply = beam_generate_v2(src, beam=5, max_len=50)
268
  if not reply.strip():
269
+ return "I'm a chatbot trained on Ubuntu Linux support conversations, so I may not understand this question."
270
  return reply
271
 
272
 
 
277
 
278
  demo = gr.ChatInterface(
279
  fn=respond,
280
+ title="Ubuntu Chatbot (Seq2Seq + GRU + Attention) Developed by Group E",
281
  description="A generative chatbot trained on Ubuntu dialogue pairs (seq2seq with attention)."
282
  )
283