zaid002 commited on
Commit
9022362
·
verified ·
1 Parent(s): 723e03c

Update src/streamlit_app.py

Browse files
Files changed (1) hide show
  1. src/streamlit_app.py +46 -91
src/streamlit_app.py CHANGED
@@ -1,103 +1,58 @@
1
  import streamlit as st
2
- from transformers import GPT2LMHeadModel, GPT2Tokenizer, pipeline
3
- import torch
4
- import re
5
 
6
- # ==========================================================
7
- # 1️⃣ LOAD MODEL
8
- # ==========================================================
9
  @st.cache_resource
10
- def load_model():
11
- model_path = "zaid002/finetunedmodel" # Your Hugging Face repo path
12
- tokenizer = GPT2Tokenizer.from_pretrained(model_path)
13
- model = GPT2LMHeadModel.from_pretrained(model_path)
14
- device = 0 if torch.cuda.is_available() else -1
15
- text_gen = pipeline("text-generation", model=model, tokenizer=tokenizer, device=device)
16
- return tokenizer, text_gen
 
 
 
 
 
 
 
 
 
 
17
 
18
- tokenizer, chatbot = load_model()
19
-
20
- # ==========================================================
21
- # 2️⃣ TRANSLATION DICTIONARY
22
- # ==========================================================
23
- def translate_response(text, lang):
24
- translations = {
25
- "en": text,
26
- "hi": "यह रहा आपका उत्तर हिंदी में: " + text,
27
- "ta": "உங்கள் பதில் தமிழில்: " + text,
28
- "te": "మీ సమాధానం తెలుగులో: " + text,
29
- "fr": "Votre réponse en français : " + text,
30
- "es": "Su respuesta en español: " + text,
31
- }
32
- return translations.get(lang, text)
33
-
34
- # ==========================================================
35
- # 3️⃣ CHAT FUNCTION
36
- # ==========================================================
37
- def multilingual_chat(user_input, lang="en"):
38
- user_input_lower = user_input.lower()
39
-
40
- if re.search(r"\bguvi\b", user_input_lower):
41
- input_text = f"Question: {user_input}\nAnswer:"
42
- raw_response = chatbot(
43
- input_text,
44
- max_length=150,
45
- num_return_sequences=1,
46
- pad_token_id=tokenizer.eos_token_id,
47
- do_sample=True,
48
- top_k=50,
49
- top_p=0.9,
50
- temperature=0.7
51
- )
52
- model_output = raw_response[0]["generated_text"].replace(input_text, "").strip()
53
- if len(model_output) > 400:
54
- model_output = model_output[:400]
55
- response = model_output
56
- else:
57
- response = "⚠️ I can only answer questions related to GUVI."
58
-
59
- return translate_response(response, lang)
60
 
61
- # ==========================================================
62
- # 4️⃣ STREAMLIT UI
63
- # ==========================================================
64
- st.set_page_config(page_title="GUVI-only Chatbot", page_icon="🤖", layout="centered")
65
 
66
- st.markdown(
67
- """
68
- <h1 style='text-align:center; font-family:Calibri;'>🤖 GUVI-only LLM Chatbot</h1>
69
- <p style='text-align:center; font-size:18px;'>Ask me anything about GUVI (courses, fees, placements, etc.)</p>
70
- """,
71
- unsafe_allow_html=True
72
- )
73
 
74
- # Keep conversation in session_state
75
- if "history" not in st.session_state:
76
- st.session_state.history = []
77
 
78
- # User input and language selection
79
- col1, col2 = st.columns([3, 1])
80
- with col1:
81
- user_input = st.text_input("💬 Ask about GUVI...")
82
- with col2:
83
- lang = st.selectbox("🌐 Language", ["en", "hi", "ta", "te", "fr", "es"], index=0)
84
 
85
- # Button row
86
- colA, colB = st.columns([1, 1])
87
- with colA:
88
- send = st.button("🚀 Send")
89
- with colB:
90
- clear = st.button("🗑️ Clear Chat")
91
 
92
- if send and user_input.strip():
93
- reply = multilingual_chat(user_input, lang)
94
- st.session_state.history.append((user_input, reply))
95
 
96
- if clear:
97
- st.session_state.history = []
98
 
99
- # Display conversation
100
- for i, (u, r) in enumerate(st.session_state.history):
101
- st.markdown(f"**🧑 You:** {u}")
102
- st.markdown(f"**🤖 Bot:** {r}")
103
- st.markdown("---")
 
1
  import streamlit as st
2
+ from deep_translator import GoogleTranslator
3
+ from transformers import pipeline
 
4
 
 
 
 
5
  @st.cache_resource
6
+ def load_translator():
7
+ def translate_to_english(text, src_lang='auto'):
8
+ """Translate any input text to English."""
9
+ try:
10
+ translated = GoogleTranslator(source=src_lang, target='en').translate(text)
11
+ return translated
12
+ except Exception as e:
13
+ return f"Translation error: {e}"
14
+
15
+ def translate_from_english(text, target_lang):
16
+ """Translate English response back to user language."""
17
+ try:
18
+ translated = GoogleTranslator(source='en', target=target_lang).translate(text)
19
+ return translated
20
+ except Exception as e:
21
+ return f"Translation error: {e}"
22
+ return translate_to_english, translate_from_english
23
 
24
+ @st.cache_resource
25
+ def load_chatbot_model():
26
+ # Use a small multilingual model (or fine-tuned one from Hugging Face)
27
+ chatbot = pipeline("text-generation", model="facebook/blenderbot-400M-distill")
28
+ def get_chatbot_response(prompt):
29
+ """Generate chatbot reply using a transformer model."""
30
+ result = chatbot(prompt, max_length=120, num_return_sequences=1)
31
+ return result[0]['generated_text']
32
+ return get_chatbot_response
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
 
34
+ translate_to_english, translate_from_english = load_translator()
35
+ get_chatbot_response = load_chatbot_model()
 
 
36
 
37
+ st.set_page_config(page_title="GUVI Multilingual Chatbot", layout="centered")
38
+ st.title("🤖 GUVI Multilingual Chatbot")
39
+ st.write("Chat with AI in your own language!")
 
 
 
 
40
 
41
+ # User selects language
42
+ language = st.selectbox("Select your language:",
43
+ ["en", "ta", "hi", "ml", "te", "bn", "fr", "es"])
44
 
45
+ user_input = st.text_input("You:", "")
 
 
 
 
 
46
 
47
+ if st.button("Send"):
48
+ if user_input:
49
+ # Step 1: Translate to English
50
+ english_text = translate_to_english(user_input, src_lang=language)
 
 
51
 
52
+ # Step 2: Get GPT response
53
+ english_reply = get_chatbot_response(english_text)
 
54
 
55
+ # Step 3: Translate back
56
+ final_reply = translate_from_english(english_reply, target_lang=language)
57
 
58
+ st.markdown(f"**Bot:** {final_reply}")