Stanley03 commited on
Commit
09d8208
·
verified ·
1 Parent(s): 789806b

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +320 -0
app.py ADDED
@@ -0,0 +1,320 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # =====================
2
+ # 🦁 SIMBA AI - First African LLM (FAST VERSION)
3
+ # =====================
4
+ # Optimized for free Hugging Face hosting
5
+ # Uses DialoGPT-large - Fast & Reliable
6
+ # =====================
7
+
8
+ import gradio as gr
9
+ import torch
10
+ from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
11
+ from sentence_transformers import SentenceTransformer
12
+ import faiss
13
+ import numpy as np
14
+ import time
15
+
16
+ print("🚀 Initializing Simba AI - Fast African LLM...")
17
+
18
+ # =====================
19
+ # LOAD AI MODEL - OPTIMIZED
20
+ # =====================
21
+
22
+ # Using DialoGPT-large - Fast, reliable, works on free tier
23
+ model_name = "microsoft/DialoGPT-large"
24
+
25
+ try:
26
+ print("📥 Loading optimized AI model...")
27
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
28
+ tokenizer.pad_token = tokenizer.eos_token
29
+
30
+ model = AutoModelForCausalLM.from_pretrained(
31
+ model_name,
32
+ torch_dtype=torch.float16,
33
+ device_map="auto",
34
+ )
35
+ print("✅ Simba AI Model Loaded Successfully! (Fast Version)")
36
+
37
+ except Exception as e:
38
+ print(f"❌ Model loading error: {e}")
39
+ print("🔄 Using even lighter model...")
40
+ model_name = "microsoft/DialoGPT-medium"
41
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
42
+ tokenizer.pad_token = tokenizer.eos_token
43
+ model = AutoModelForCausalLM.from_pretrained(model_name)
44
+ print("✅ Light model loaded!")
45
+
46
+ # =====================
47
+ # AFRICAN KNOWLEDGE BASE
48
+ # =====================
49
+
50
+ simba_knowledge_base = [
51
+ # CODING
52
+ {"question": "Python add function", "answer": "def add(a, b): return a + b"},
53
+ {"question": "Factorial function", "answer": "def factorial(n): return 1 if n == 0 else n * factorial(n-1)"},
54
+ {"question": "Reverse string function", "answer": "def reverse_string(s): return s[::-1]"},
55
+ {"question": "Check even number", "answer": "def is_even(n): return n % 2 == 0"},
56
+ {"question": "Multiply function", "answer": "def multiply(x, y): return x * y"},
57
+ {"question": "Yoruba greeting function", "answer": "def yoruba_greeting(): return 'Báwo ni'"},
58
+
59
+ # MATH
60
+ {"question": "15 + 27", "answer": "42"},
61
+ {"question": "8 × 7", "answer": "56"},
62
+ {"question": "100 - 45", "answer": "55"},
63
+ {"question": "12 × 12", "answer": "144"},
64
+ {"question": "25% of 200", "answer": "50"},
65
+ {"question": "9 × 6", "answer": "54"},
66
+
67
+ # YORUBA
68
+ {"question": "Hello in Yoruba", "answer": "Báwo ni"},
69
+ {"question": "Thank you in Yoruba", "answer": "Ẹ sé"},
70
+ {"question": "How are you in Yoruba", "answer": "Ṣe daadaa ni"},
71
+ {"question": "Good morning in Yoruba", "answer": "Ẹ káàrọ̀"},
72
+ {"question": "Good night in Yoruba", "answer": "O dàárọ̀"},
73
+ {"question": "Please in Yoruba", "answer": "Jọ̀wọ́"},
74
+ {"question": "Welcome in Yoruba", "answer": "Káàbọ̀"},
75
+
76
+ # SWAHILI
77
+ {"question": "Hello in Swahili", "answer": "Hujambo"},
78
+ {"question": "Thank you in Swahili", "answer": "Asante"},
79
+ {"question": "How are you in Swahili", "answer": "Habari yako"},
80
+ {"question": "Good morning in Swahili", "answer": "Habari za asubuhi"},
81
+
82
+ # IGBO
83
+ {"question": "Hello in Igbo", "answer": "Nnọọ"},
84
+ {"question": "Thank you in Igbo", "answer": "Daalụ"},
85
+ {"question": "How are you in Igbo", "answer": "Kedu ka ị mere?"},
86
+
87
+ # HAUSA
88
+ {"question": "Hello in Hausa", "answer": "Sannu"},
89
+ {"question": "Thank you in Hausa", "answer": "Na gode"},
90
+ {"question": "How are you in Hausa", "answer": "Yaya lafiya?"},
91
+
92
+ # AFRICAN INNOVATION
93
+ {"question": "M-Pesa", "answer": "Mobile money service launched in Kenya in 2007, revolutionized banking"},
94
+ {"question": "Andela", "answer": "Trains African software developers for global tech companies"},
95
+ {"question": "Flutterwave", "answer": "Nigerian fintech company providing payment infrastructure across Africa"},
96
+ {"question": "Paystack", "answer": "Nigerian payment processing company acquired by Stripe for $200M"},
97
+
98
+ # SIMBA AI IDENTITY
99
+ {"question": "What is Simba AI", "answer": "Simba AI is the first African Large Language Model, specializing in African languages, coding, and mathematics"},
100
+ {"question": "Simba AI purpose", "answer": "To advance African AI innovation and make AI accessible for African languages and contexts"},
101
+ ]
102
+
103
+ print(f"✅ African Knowledge Base: {len(simba_knowledge_base)} expert entries")
104
+
105
+ # =====================
106
+ # FAST SEARCH SYSTEM
107
+ # =====================
108
+
109
+ try:
110
+ print("🔍 Loading fast search system...")
111
+ embedder = SentenceTransformer('all-MiniLM-L6-v2')
112
+
113
+ # Build search index
114
+ questions = [item["question"] for item in simba_knowledge_base]
115
+ question_embeddings = embedder.encode(questions)
116
+
117
+ dimension = question_embeddings.shape[1]
118
+ index = faiss.IndexFlatIP(dimension)
119
+ faiss.normalize_L2(question_embeddings)
120
+ index.add(question_embeddings)
121
+
122
+ print("✅ Fast Search System Ready!")
123
+ except Exception as e:
124
+ print(f"❌ Search system error: {e}")
125
+ index = None
126
+
127
+ def simba_search(query, top_k=3):
128
+ """Fast search African knowledge base"""
129
+ if index is None:
130
+ return simba_knowledge_base[:top_k]
131
+
132
+ try:
133
+ query_embedding = embedder.encode([query])
134
+ faiss.normalize_L2(query_embedding)
135
+
136
+ scores, indices = index.search(query_embedding, top_k)
137
+
138
+ results = []
139
+ for i, idx in enumerate(indices[0]):
140
+ if idx < len(simba_knowledge_base):
141
+ results.append({
142
+ "question": simba_knowledge_base[idx]["question"],
143
+ "answer": simba_knowledge_base[idx]["answer"],
144
+ "score": scores[0][i]
145
+ })
146
+
147
+ return results
148
+ except Exception as e:
149
+ return simba_knowledge_base[:top_k]
150
+
151
+ # =====================
152
+ # FAST SIMBA AI CHAT FUNCTION
153
+ # =====================
154
+
155
+ def simba_ai_chat(message, history):
156
+ """Fast chat function with timeout protection"""
157
+
158
+ start_time = time.time()
159
+
160
+ # Quick responses for common greetings (instant)
161
+ quick_responses = {
162
+ "hello": "🦁 Báwo ni! Hello! I'm Simba AI, the first African LLM. I specialize in African languages, coding, and mathematics. How can I assist you today?",
163
+ "hi": "🦁 Báwo ni! Welcome to Simba AI - Africa's first Large Language Model!",
164
+ "hey": "🦁 Hello! I'm Simba AI. Ask me about African languages, Python coding, or math problems!",
165
+ "hola": "🦁 Báwo ni! I see you speak Spanish. I specialize in African languages like Yoruba, Swahili, Igbo, and Hausa!",
166
+ }
167
+
168
+ lower_message = message.lower().strip()
169
+ if lower_message in quick_responses:
170
+ return quick_responses[lower_message]
171
+
172
+ try:
173
+ # Search for relevant knowledge
174
+ search_results = simba_search(message, top_k=2)
175
+
176
+ # Build smart context
177
+ context = "📚 African Knowledge:\n"
178
+ for i, result in enumerate(search_results, 1):
179
+ context += f"{i}. {result['question']}: {result['answer']}\n"
180
+
181
+ # Optimized prompt for DialoGPT
182
+ prompt = f"User: {message}\nAfrican Knowledge: {context}\nSimba AI:"
183
+
184
+ # Fast generation
185
+ inputs = tokenizer.encode(prompt, return_tensors="pt", max_length=512, truncation=True)
186
+
187
+ with torch.no_grad():
188
+ outputs = model.generate(
189
+ inputs,
190
+ max_new_tokens=100,
191
+ temperature=0.7,
192
+ do_sample=True,
193
+ pad_token_id=tokenizer.eos_token_id,
194
+ repetition_penalty=1.1,
195
+ num_return_sequences=1,
196
+ )
197
+
198
+ response = tokenizer.decode(outputs[0], skip_special_tokens=True)
199
+
200
+ # Extract just the Simba AI response
201
+ if "Simba AI:" in response:
202
+ final_response = response.split("Simba AI:")[-1].strip()
203
+ else:
204
+ final_response = response[len(prompt):].strip()
205
+
206
+ # Add African flair
207
+ if not final_response.startswith("🦁"):
208
+ final_response = f"🦁 {final_response}"
209
+
210
+ response_time = time.time() - start_time
211
+ print(f"✅ Response generated in {response_time:.2f} seconds")
212
+
213
+ return final_response
214
+
215
+ except Exception as e:
216
+ error_msg = f"🦁 Simba AI is thinking... Please try again! (Error: {str(e)})"
217
+ print(f"❌ Generation error: {e}")
218
+ return error_msg
219
+
220
+ # =====================
221
+ # FAST GRADIO INTERFACE
222
+ # =====================
223
+
224
+ # African theme CSS
225
+ css = """
226
+ .gradio-container {
227
+ font-family: 'Arial', sans-serif;
228
+ }
229
+ .header {
230
+ text-align: center;
231
+ padding: 25px;
232
+ background: linear-gradient(135deg, #ff7e5f, #feb47b);
233
+ color: white;
234
+ border-radius: 15px;
235
+ margin-bottom: 20px;
236
+ border: 2px solid #e65c50;
237
+ }
238
+ .african-pattern {
239
+ background: linear-gradient(45deg, #ff7e5f 25%, transparent 25%),
240
+ linear-gradient(-45deg, #ff7e5f 25%, transparent 25%),
241
+ linear-gradient(45deg, transparent 75%, #ff7e5f 75%),
242
+ linear-gradient(-45deg, transparent 75%, #ff7e5f 75');
243
+ background-size: 20px 20px;
244
+ background-position: 0 0, 0 10px, 10px -10px, -10px 0px;
245
+ opacity: 0.1;
246
+ }
247
+ """
248
+
249
+ # Create fast interface
250
+ with gr.Blocks(css=css, theme=gr.themes.Soft()) as demo:
251
+
252
+ gr.HTML("""
253
+ <div class="header">
254
+ <h1>🦁 Simba AI - First African LLM</h1>
255
+ <h3>Fast & Optimized - African Languages, Coding & Mathematics</h3>
256
+ <p>Powered by DialoGPT-large • Responses in 5-15 seconds</p>
257
+ </div>
258
+ """)
259
+
260
+ chatbot = gr.Chatbot(
261
+ label="🦁 Chat with Simba AI",
262
+ height=500,
263
+ show_copy_button=True,
264
+ placeholder="Ask about African languages, Python coding, or math..."
265
+ )
266
+
267
+ with gr.Row():
268
+ msg = gr.Textbox(
269
+ label="Your message",
270
+ placeholder="Type your question here...",
271
+ lines=2,
272
+ scale=4
273
+ )
274
+ send_btn = gr.Button("🚀 Ask Simba AI", variant="primary", scale=1)
275
+
276
+ with gr.Row():
277
+ clear_btn = gr.Button("🧹 Clear Chat")
278
+ info_btn = gr.Button("ℹ️ About Simba AI")
279
+
280
+ # Quick action examples
281
+ gr.Examples(
282
+ examples=[
283
+ "Hello",
284
+ "How do you say thank you in Yoruba?",
285
+ "Write Python function to add numbers",
286
+ "What is 15 + 27?",
287
+ "Tell me about M-Pesa",
288
+ "How are you in Swahili?",
289
+ "Create factorial function",
290
+ "Calculate 8 × 7"
291
+ ],
292
+ inputs=msg,
293
+ label="💡 Quick Actions:"
294
+ )
295
+
296
+ # Event handlers
297
+ def respond(message, chat_history):
298
+ bot_message = simba_ai_chat(message, chat_history)
299
+ chat_history.append((message, bot_message))
300
+ return "", chat_history
301
+
302
+ def show_info():
303
+ return [("", "🦁 **Simba AI - First African LLM**\n\n**Capabilities:**\n• African Languages: Yoruba, Swahili, Igbo, Hausa\n• Python Coding & Programming\n• Mathematics & Problem Solving\n• African Tech Innovation\n\n**Powered by:** DialoGPT-large + African Knowledge Base\n**Response Time:** 5-15 seconds\n**Mission:** Advance African AI Innovation")]
304
+
305
+ msg.submit(respond, [msg, chatbot], [msg, chatbot])
306
+ send_btn.click(respond, [msg, chatbot], [msg, chatbot])
307
+ clear_btn.click(lambda: None, None, chatbot, queue=False)
308
+ info_btn.click(show_info, None, chatbot)
309
+
310
+ # =====================
311
+ # LAUNCH
312
+ # =====================
313
+
314
+ if __name__ == "__main__":
315
+ print("🎯 Simba AI is ready! Launching interface...")
316
+ demo.launch(
317
+ debug=True,
318
+ share=True,
319
+ show_error=True
320
+ )