rbbist commited on
Commit
78eab1a
·
verified ·
1 Parent(s): ea55c7a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +158 -143
app.py CHANGED
@@ -1,133 +1,140 @@
1
  import gradio as gr
2
- import torch
3
- from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
4
- import warnings
5
- warnings.filterwarnings("ignore")
 
6
 
7
- # Initialize models with better error handling
8
- def load_models():
9
- """Load models with proper error handling"""
 
 
 
 
 
 
10
  try:
11
- # Load conversational model
12
- model_name = "microsoft/DialoGPT-medium"
13
- tokenizer = AutoTokenizer.from_pretrained(model_name)
14
- model = AutoModelForCausalLM.from_pretrained(model_name)
 
 
 
15
 
16
- # Add padding token if not present
17
- if tokenizer.pad_token is None:
18
- tokenizer.pad_token = tokenizer.eos_token
19
-
20
- print("✅ Conversational model loaded successfully")
 
 
 
 
 
21
 
22
- # Load translation models
23
- try:
24
- en_to_ne = pipeline("translation", model="Helsinki-NLP/opus-mt-en-ne", device=-1)
25
- ne_to_en = pipeline("translation", model="Helsinki-NLP/opus-mt-ne-en", device=-1)
26
- print("✅ Translation models loaded successfully")
27
- return tokenizer, model, en_to_ne, ne_to_en, True
28
- except Exception as e:
29
- print(f"⚠️ Translation models failed to load: {e}")
30
- print("📝 Continuing with English-only mode")
31
- return tokenizer, model, None, None, False
32
-
33
- except Exception as e:
34
- print(f"❌ Failed to load models: {e}")
35
- return None, None, None, None, False
36
-
37
- # Load models
38
- tokenizer, model, en_to_ne, ne_to_en, translation_available = load_models()
39
-
40
- def translate_to_english(text):
41
- """Translate Nepali text to English"""
42
- if not translation_available or ne_to_en is None:
43
  return text
44
- try:
45
- result = ne_to_en(text, max_length=512)
46
- return result[0]['translation_text']
47
  except Exception as e:
48
- print(f"Translation error (NE->EN): {e}")
49
  return text
50
 
51
- def translate_to_nepali(text):
52
- """Translate English text to Nepali"""
53
- if not translation_available or en_to_ne is None:
54
- return text
55
  try:
56
- result = en_to_ne(text, max_length=512)
57
- return result[0]['translation_text']
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
58
  except Exception as e:
59
- print(f"Translation error (EN->NE): {e}")
60
- return text
61
 
62
- def is_nepali_text(text):
63
- """Simple check to see if text contains Devanagari script (Nepali)"""
64
- nepali_chars = any('\u0900' <= char <= '\u097F' for char in text)
65
- return nepali_chars
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
66
 
67
- def generate_response(message, history):
68
- """Generate chatbot response"""
69
- if tokenizer is None or model is None:
70
- return "Sorry, the chatbot models failed to load. Please try refreshing the page."
 
 
 
71
 
72
  try:
73
- # Check if input is in Nepali and translate to English if needed
74
- if is_nepali_text(message):
75
- english_message = translate_to_english(message)
76
- print(f"Translated input: {message} -> {english_message}")
 
 
 
 
 
 
 
 
 
77
  else:
78
- english_message = message
79
-
80
- # Prepare conversation history
81
- bot_input_ids = tokenizer.encode(english_message + tokenizer.eos_token, return_tensors='pt')
82
 
83
- # Generate response with more conservative settings for stability
84
- with torch.no_grad():
85
- chat_history_ids = model.generate(
86
- bot_input_ids,
87
- max_length=min(1000, bot_input_ids.shape[-1] + 100),
88
- num_beams=3,
89
- no_repeat_ngram_size=2,
90
- temperature=0.8,
91
- do_sample=True,
92
- pad_token_id=tokenizer.eos_token_id,
93
- early_stopping=True,
94
- max_new_tokens=100
95
- )
96
-
97
- # Decode the response
98
- response = tokenizer.decode(chat_history_ids[:, bot_input_ids.shape[-1]:][0], skip_special_tokens=True)
99
-
100
- # Clean up response
101
- response = response.strip()
102
- if not response:
103
- response = "I understand. Could you tell me more?"
104
 
105
- # If original input was in Nepali, translate response back to Nepali
106
- if is_nepali_text(message) and translation_available:
107
- nepali_response = translate_to_nepali(response)
108
- print(f"Translated output: {response} -> {nepali_response}")
109
- return nepali_response
110
- else:
111
- return response
112
-
113
  except Exception as e:
114
- print(f"Error generating response: {e}")
115
- error_msg = "Sorry, I encountered an error. Please try again."
116
- if is_nepali_text(message):
117
- return "माफ गर्नुहोस्, मलाई समस्या भयो। कृपया फेरि प्रयास गर्नुहोस्।"
118
- return error_msg
119
-
120
- def chat_interface(message, history):
121
- """Gradio chat interface function"""
122
- if not message.strip():
123
- return history, ""
124
-
125
- # Generate bot response
126
- bot_response = generate_response(message, history)
127
 
128
  # Add to history
129
  history.append([message, bot_response])
130
-
131
  return history, ""
132
 
133
  # Custom CSS for better appearance
@@ -142,18 +149,22 @@ css = """
142
  .message.bot {
143
  background-color: #f5f5f5 !important;
144
  }
 
 
 
 
145
  """
146
 
147
  # Create Gradio interface
148
- with gr.Blocks(css=css, title="Nepali Chatbot") as demo:
149
  gr.Markdown("""
150
- # नेपाली च्याटबोट (Nepali Chatbot) 🇳🇵
151
 
152
- A simple chatbot that can understand and respond in Nepali language.
153
 
154
- **नेपालीमा कुराकानी गर्नुहोस् वा अंग्रेजीमा!**
155
 
156
- *You can chat in Nepali or English - the bot will respond in the same language you use.*
157
  """)
158
 
159
  chatbot = gr.Chatbot(
@@ -161,54 +172,58 @@ with gr.Blocks(css=css, title="Nepali Chatbot") as demo:
161
  height=400,
162
  show_label=False,
163
  container=True,
164
- bubble_full_width=False
 
165
  )
166
 
167
  with gr.Row():
168
  msg = gr.Textbox(
169
- placeholder="तपईंको सन्देश यहाँ लेख्नुहोस्... (Type your message here...)",
170
  show_label=False,
171
  scale=4,
172
- container=False
 
173
  )
174
- submit_btn = gr.Button("Send", scale=1, variant="primary")
175
- clear_btn = gr.Button("Clear", scale=1)
176
 
177
- # Event handlers
178
- msg.submit(
179
- chat_interface,
180
- inputs=[msg, chatbot],
181
- outputs=[chatbot, msg],
182
- queue=True
 
 
 
 
 
 
183
  )
184
 
185
- submit_btn.click(
186
- chat_interface,
187
- inputs=[msg, chatbot],
188
- outputs=[chatbot, msg],
189
- queue=True
190
- )
191
-
192
- clear_btn.click(
193
- lambda: ([], ""),
194
- outputs=[chatbot, msg],
195
- queue=False
196
- )
197
 
198
  gr.Markdown("""
199
  ---
200
- **Note:** This chatbot uses translation models to handle Nepali language.
201
- The responses might not be perfect but should be understandable.
202
 
203
- **टिप्पणी:** यो च्याटबोटले नेपली भाषा ह्यान्डलर्न अनुवाद डेहरू प्रयोग गर्छ।
 
 
204
  """)
205
 
206
- # Launch the app
207
  if __name__ == "__main__":
208
- demo.queue(concurrency_count=3)
 
 
 
209
  demo.launch(
210
- share=False,
211
  server_name="0.0.0.0",
212
  server_port=7860,
213
- show_error=True
 
214
  )
 
1
  import gradio as gr
2
+ import os
3
+ import requests
4
+ import json
5
+ from typing import List, Tuple
6
+ import time
7
 
8
+ # Simple chatbot using Hugging Face Inference API (free tier)
9
+ HF_TOKEN = os.getenv("HUGGINGFACE_HUB_TOKEN", "") # Optional: set in Space secrets
10
+
11
+ def is_nepali_text(text: str) -> bool:
12
+ """Simple check to see if text contains Devanagari script (Nepali)"""
13
+ return any('\u0900' <= char <= '\u097F' for char in text)
14
+
15
+ def translate_text(text: str, source_lang: str = "ne", target_lang: str = "en") -> str:
16
+ """Translate text using Hugging Face Inference API"""
17
  try:
18
+ # Use Helsinki translation models via API
19
+ if source_lang == "ne" and target_lang == "en":
20
+ model = "Helsinki-NLP/opus-mt-ne-en"
21
+ elif source_lang == "en" and target_lang == "ne":
22
+ model = "Helsinki-NLP/opus-mt-en-ne"
23
+ else:
24
+ return text
25
 
26
+ api_url = f"https://api-inference.huggingface.co/models/{model}"
27
+ headers = {"Authorization": f"Bearer {HF_TOKEN}"} if HF_TOKEN else {}
28
+
29
+ payload = {"inputs": text}
30
+ response = requests.post(api_url, headers=headers, json=payload, timeout=30)
31
+
32
+ if response.status_code == 200:
33
+ result = response.json()
34
+ if isinstance(result, list) and len(result) > 0:
35
+ return result[0].get("translation_text", text)
36
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
37
  return text
 
 
 
38
  except Exception as e:
39
+ print(f"Translation error: {e}")
40
  return text
41
 
42
+ def generate_response_api(message: str) -> str:
43
+ """Generate response using Hugging Face Inference API"""
 
 
44
  try:
45
+ # Use a lightweight conversational model
46
+ model = "microsoft/DialoGPT-medium"
47
+ api_url = f"https://api-inference.huggingface.co/models/{model}"
48
+ headers = {"Authorization": f"Bearer {HF_TOKEN}"} if HF_TOKEN else {}
49
+
50
+ payload = {
51
+ "inputs": message,
52
+ "parameters": {
53
+ "max_length": 100,
54
+ "temperature": 0.7,
55
+ "do_sample": True,
56
+ "top_p": 0.9,
57
+ "repetition_penalty": 1.1
58
+ }
59
+ }
60
+
61
+ response = requests.post(api_url, headers=headers, json=payload, timeout=30)
62
+
63
+ if response.status_code == 200:
64
+ result = response.json()
65
+ if isinstance(result, list) and len(result) > 0:
66
+ generated_text = result[0].get("generated_text", "")
67
+ # Extract only the new part of the response
68
+ if generated_text.startswith(message):
69
+ response_text = generated_text[len(message):].strip()
70
+ else:
71
+ response_text = generated_text.strip()
72
+
73
+ return response_text if response_text else "I understand. Could you tell me more?"
74
+
75
+ # Fallback responses
76
+ return "I'm here to help! Could you tell me more about what you'd like to discuss?"
77
+
78
  except Exception as e:
79
+ print(f"API error: {e}")
80
+ return "I'm having trouble connecting right now. Please try again in a moment."
81
 
82
+ def simple_fallback_response(message: str) -> str:
83
+ """Simple rule-based fallback responses"""
84
+ message_lower = message.lower()
85
+
86
+ # English responses
87
+ if any(word in message_lower for word in ["hello", "hi", "hey"]):
88
+ return "Hello! How can I help you today?"
89
+ elif any(word in message_lower for word in ["how", "are", "you"]):
90
+ return "I'm doing well, thank you! How about you?"
91
+ elif any(word in message_lower for word in ["name", "who"]):
92
+ return "I'm a Nepali chatbot here to help you!"
93
+ elif any(word in message_lower for word in ["bye", "goodbye"]):
94
+ return "Goodbye! Have a great day!"
95
+
96
+ # Check if it's Nepali text
97
+ if is_nepali_text(message):
98
+ return "धन्यवाद! म तपाईंलाई कसरी मद्दत गर्न सक्छु?"
99
+
100
+ return "That's interesting! Tell me more about it."
101
 
102
+ def chat_function(message: str, history: List[List[str]]) -> Tuple[List[List[str]], str]:
103
+ """Main chat function"""
104
+ if not message.strip():
105
+ return history, ""
106
+
107
+ # Detect language
108
+ is_nepali = is_nepali_text(message)
109
 
110
  try:
111
+ if is_nepali:
112
+ # Translate Nepali to English for processing
113
+ english_message = translate_text(message, "ne", "en")
114
+ print(f"Translated NE->EN: {message} -> {english_message}")
115
+
116
+ # Generate response in English
117
+ english_response = generate_response_api(english_message)
118
+
119
+ # Translate back to Nepali
120
+ nepali_response = translate_text(english_response, "en", "ne")
121
+ print(f"Translated EN->NE: {english_response} -> {nepali_response}")
122
+
123
+ bot_response = nepali_response
124
  else:
125
+ # Process in English directly
126
+ bot_response = generate_response_api(message)
 
 
127
 
128
+ # Fallback if API response is empty or unhelpful
129
+ if not bot_response or len(bot_response.strip()) < 3:
130
+ bot_response = simple_fallback_response(message)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
131
 
 
 
 
 
 
 
 
 
132
  except Exception as e:
133
+ print(f"Chat error: {e}")
134
+ bot_response = simple_fallback_response(message)
 
 
 
 
 
 
 
 
 
 
 
135
 
136
  # Add to history
137
  history.append([message, bot_response])
 
138
  return history, ""
139
 
140
  # Custom CSS for better appearance
 
149
  .message.bot {
150
  background-color: #f5f5f5 !important;
151
  }
152
+ .chat-message {
153
+ padding: 10px !important;
154
+ border-radius: 8px !important;
155
+ }
156
  """
157
 
158
  # Create Gradio interface
159
+ with gr.Blocks(css=css, title="Nepali Chatbot", theme=gr.themes.Soft()) as demo:
160
  gr.Markdown("""
161
+ # 🇳🇵 नेपाली च्याटबोट (Nepali Chatbot)
162
 
163
+ **तपाईंलाई स्वागत छ! Welcome!**
164
 
165
+ This chatbot can understand and respond in both Nepali and English.
166
 
167
+ **नेपालीमा वा अंग्रेजीमा कुराकानी सुरु गर्नुहोस्!**
168
  """)
169
 
170
  chatbot = gr.Chatbot(
 
172
  height=400,
173
  show_label=False,
174
  container=True,
175
+ bubble_full_width=False,
176
+ show_copy_button=True
177
  )
178
 
179
  with gr.Row():
180
  msg = gr.Textbox(
181
+ placeholder="यहँ आफ्नो सन्देश लेख्नुहोस्... / Type your message here...",
182
  show_label=False,
183
  scale=4,
184
+ container=False,
185
+ lines=1
186
  )
187
+ submit_btn = gr.Button("📤 Send", scale=1, variant="primary")
188
+ clear_btn = gr.Button("🗑️ Clear", scale=1)
189
 
190
+ # Examples for users to try
191
+ gr.Examples(
192
+ examples=[
193
+ ["नमस्ते! तपाईं कस्तो हुनुहुन्छ?"],
194
+ ["Hello! How are you?"],
195
+ ["तपाईंको नाम के हो?"],
196
+ ["What's your name?"],
197
+ ["मलाई नेपालको बारेमा भन्नुहोस्"],
198
+ ["Tell me about Nepal"]
199
+ ],
200
+ inputs=msg,
201
+ label="Try these examples / यी उदाहरणहरू प्रयास गर्नुहोस्:"
202
  )
203
 
204
+ # Event handlers
205
+ msg.submit(chat_function, inputs=[msg, chatbot], outputs=[chatbot, msg], queue=True)
206
+ submit_btn.click(chat_function, inputs=[msg, chatbot], outputs=[chatbot, msg], queue=True)
207
+ clear_btn.click(lambda: ([], ""), outputs=[chatbot, msg], queue=False)
 
 
 
 
 
 
 
 
208
 
209
  gr.Markdown("""
210
  ---
211
+ **📝 Note:** This chatbot uses Hugging Face's free inference API for translation and conversation.
 
212
 
213
+ **टिप्पणी:** यो च्याटबोटले ुव कुरकाीका लाि Hugging Face क नि:शु्क API प्रयोग गर्छ।
214
+
215
+ *Response time may vary depending on API availability.*
216
  """)
217
 
218
+ # Launch configuration
219
  if __name__ == "__main__":
220
+ demo.queue(
221
+ concurrency_count=1, # Lower concurrency for free tier
222
+ max_size=10
223
+ )
224
  demo.launch(
 
225
  server_name="0.0.0.0",
226
  server_port=7860,
227
+ show_error=True,
228
+ share=False
229
  )