hamza2923 commited on
Commit
1e8d370
·
verified ·
1 Parent(s): c9686ad

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +26 -14
app.py CHANGED
@@ -4,11 +4,11 @@ import torch
4
  import re
5
 
6
  # Load model and tokenizer
7
- model_name = "microsoft/DialoGPT-medium"
8
  tokenizer = AutoTokenizer.from_pretrained(model_name)
9
  tokenizer.pad_token = tokenizer.eos_token
10
  model = AutoModelForCausalLM.from_pretrained(model_name)
11
- model.config.pad_token_id = tokenizer.eos_token_id # Ensure no pad token issues
12
 
13
  # System prompt for company context
14
  SYSTEM_PROMPT = """You are MediaDecode's AI assistant, a professional IT services and software development company.
@@ -35,23 +35,36 @@ DEFAULT_RESPONSE = """🤖 I couldn't find a specific answer. For detailed inqui
35
  - Email: info@mediadecode.com
36
  - Visit: [Support Page](https://mediadecode.com/contact)"""
37
 
38
- # Response generator
39
  def respond(message, chat_history):
 
 
 
 
 
 
 
 
 
 
 
40
  # Check for predefined FAQs
41
  for pattern, response in FAQ_RESPONSES.items():
42
  if re.search(pattern, message):
43
- return f"{response}\n\nHow else can I help?"
44
 
45
  # Generate AI response
46
  prompt = f"{SYSTEM_PROMPT}\nUser: {message}\nAI:"
47
- inputs = tokenizer(prompt, return_tensors="pt")
 
48
  with torch.no_grad():
49
  outputs = model.generate(
50
  inputs.input_ids,
51
  max_length=200,
52
  pad_token_id=tokenizer.eos_token_id,
53
- temperature=0.7
 
54
  )
 
55
  response = tokenizer.decode(outputs[0], skip_special_tokens=True)
56
  response = response.split("AI:")[-1].strip()
57
 
@@ -69,7 +82,7 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
69
  *Powered by AI + Company Knowledge Base*
70
  """)
71
 
72
- chatbot = gr.Chatbot(height=400)
73
  msg = gr.Textbox(label="Your Query", placeholder="e.g., What's your pricing for Java development?")
74
  clear = gr.Button("Clear Chat")
75
 
@@ -77,16 +90,15 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
77
  return "", history + [[user_message, None]]
78
 
79
  def bot(history):
80
- if not history or not history[-1][0]:
81
- return history
82
- bot_reply = respond(history[-1][0], history[:-1])
83
- history[-1][1] = bot_reply
84
  return history
85
 
86
- msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
87
  bot, chatbot, chatbot
88
  )
89
 
90
- clear.click(lambda: [], None, chatbot, queue=False)
91
 
92
- demo.launch()
 
4
  import re
5
 
6
  # Load model and tokenizer
7
+ model_name = "microsoft/DialoGPT-small"
8
  tokenizer = AutoTokenizer.from_pretrained(model_name)
9
  tokenizer.pad_token = tokenizer.eos_token
10
  model = AutoModelForCausalLM.from_pretrained(model_name)
11
+ model.config.pad_token_id = tokenizer.eos_token_id
12
 
13
  # System prompt for company context
14
  SYSTEM_PROMPT = """You are MediaDecode's AI assistant, a professional IT services and software development company.
 
35
  - Email: info@mediadecode.com
36
  - Visit: [Support Page](https://mediadecode.com/contact)"""
37
 
 
38
  def respond(message, chat_history):
39
+ # Clean the input message
40
+ message = message.strip().lower()
41
+
42
+ # Check for empty message
43
+ if not message:
44
+ return "Please enter a valid question or query."
45
+
46
+ # Check for greeting
47
+ if any(greeting in message for greeting in ["hi", "hello", "hey"]):
48
+ return "Hello! Welcome to MediaDecode support. How can I help you today?"
49
+
50
  # Check for predefined FAQs
51
  for pattern, response in FAQ_RESPONSES.items():
52
  if re.search(pattern, message):
53
+ return response
54
 
55
  # Generate AI response
56
  prompt = f"{SYSTEM_PROMPT}\nUser: {message}\nAI:"
57
+ inputs = tokenizer(prompt, return_tensors="pt", truncation=True, max_length=512)
58
+
59
  with torch.no_grad():
60
  outputs = model.generate(
61
  inputs.input_ids,
62
  max_length=200,
63
  pad_token_id=tokenizer.eos_token_id,
64
+ temperature=0.7,
65
+ do_sample=True
66
  )
67
+
68
  response = tokenizer.decode(outputs[0], skip_special_tokens=True)
69
  response = response.split("AI:")[-1].strip()
70
 
 
82
  *Powered by AI + Company Knowledge Base*
83
  """)
84
 
85
+ chatbot = gr.Chatbot(height=400, label="Chat History")
86
  msg = gr.Textbox(label="Your Query", placeholder="e.g., What's your pricing for Java development?")
87
  clear = gr.Button("Clear Chat")
88
 
 
90
  return "", history + [[user_message, None]]
91
 
92
  def bot(history):
93
+ user_message = history[-1][0]
94
+ bot_response = respond(user_message, history[:-1])
95
+ history[-1][1] = bot_response
 
96
  return history
97
 
98
+ msg.submit(user, [msg, chatbot], [msg, chatbot]).then(
99
  bot, chatbot, chatbot
100
  )
101
 
102
+ clear.click(lambda: None, None, chatbot, queue=False)
103
 
104
+ demo.launch()