Stanley03 commited on
Commit
60f5f42
·
verified ·
1 Parent(s): e0898b3

Delete app.py

Browse files

# =====================
# 🦁 SIMBA AI - BACKEND API ONLY
# =====================
# Provides REST API for your custom frontend
# No Gradio interface - Pure backend
# =====================

from flask import Flask, request, jsonify, send_from_directory
from flask_cors import CORS
import torch
from transformers import AutoModelForCausalLM, AutoTokenizer
from sentence_transformers import SentenceTransformer
import faiss
import numpy as np
import time
import os

print("🚀 Initializing Simba AI Backend API...")

app = Flask(__name__)
CORS(app) # Enable CORS for your frontend

# =====================
# LOAD AI MODEL
# =====================

model_name = "microsoft/DialoGPT-large"

try:
print("📥 Loading AI model...")
tokenizer = AutoTokenizer.from_pretrained(model_name)
tokenizer.pad_token = tokenizer.eos_token

model = AutoModelForCausalLM.from_pretrained(
model_name,
torch_dtype=torch.float16,
device_map="auto",
)
print("✅ Simba AI Model Loaded Successfully!")
except Exception as e:
print(f"❌ Model loading error: {e}")
model_name = "microsoft/DialoGPT-medium"
tokenizer = AutoTokenizer.from_pretrained(model_name)
tokenizer.pad_token = tokenizer.eos_token
model = AutoModelForCausalLM.from_pretrained(model_name)
print("✅ Light model loaded!")

# =====================
# AFRICAN KNOWLEDGE BASE
# =====================

simba_knowledge_base = [
# CODING
{"question": "Python add function", "answer": "def add(a, b): return a + b"},
{"question": "Factorial function", "answer": "def factorial(n): return 1 if n == 0 else n * factorial(n-1)"},
{"question": "Reverse string function", "answer": "def reverse_string(s): return s[::-1]"},
{"question": "Check even number", "answer": "def is_even(n): return n % 2 == 0"},
{"question": "Multiply function", "answer": "def multiply(x, y): return x * y"},
{"question": "Yoruba greeting function", "answer": "def yoruba_greeting(): return 'Báwo ni'"},

# MATH
{"question": "15 + 27", "answer": "42"},
{"question": "8 × 7", "answer": "56"},
{"question": "100 - 45", "answer": "55"},
{"question": "12 × 12", "answer": "144"},
{"question": "25% of 200", "answer": "50"},

# YORUBA
{"question": "Hello in Yoruba", "answer": "Báwo ni"},
{"question": "Thank you in Yoruba", "answer": "Ẹ sé"},
{"question": "How are you in Yoruba", "answer": "Ṣe daadaa ni"},
{"question": "Good morning in Yoruba", "answer": "Ẹ káàrọ̀"},
{"question": "Good night in Yoruba", "answer": "O dàárọ̀"},
{"question": "Please in Yoruba", "answer": "Jọ̀wọ́"},

# SWAHILI
{"question": "Hello in Swahili", "answer": "Hujambo"},
{"question": "Thank you in Swahili", "answer": "Asante"},

# IGBO
{"question": "Hello in Igbo", "answer": "Nnọọ"},
{"question": "Thank you in Igbo", "answer": "Daalụ"},

# HAUSA
{"question": "Hello in Hausa", "answer": "Sannu"},
{"question": "Thank you in Hausa", "answer": "Na gode"},

# AFRICAN INNOVATION
{"question": "M-Pesa", "answer": "Mobile money service launched in Kenya in 2007"},
{"question": "Andela", "answer": "Trains African software developers for global companies"},
]

print(f"✅ African Knowledge Base: {len(simba_knowledge_base)} entries")

# =====================
# SEARCH SYSTEM
# =====================

try:
embedder = SentenceTransformer('all-MiniLM-L6-v2')
questions = [item["question"] for item in simba_knowledge_base]
question_embeddings = embedder.encode(questions)

dimension = question_embeddings.shape[1]
index = faiss.IndexFlatIP(dimension)
faiss.normalize_L2(question_embeddings)
index.add(question_embeddings)
print("✅ Search System Ready!")
except Exception as e:
print(f"❌ Search system error: {e}")
index = None

def simba_search(query, top_k=2):
"""Search African knowledge base"""
if index is None:
return simba_knowledge_base[:top_k]

try:
query_embedding = embedder.encode([query])
faiss.normalize_L2(query_embedding)
scores, indices = index.search(query_embedding, top_k)

results = []
for i, idx in enumerate(indices[0]):
if idx < len(simba_knowledge_base):
results.append({
"question": simba_knowledge_base[idx]["question"],
"answer": simba_knowledge_base[idx]["answer"],
"score": scores[0][i]
})
return results
except:
return simba_knowledge_base[:top_k]

# =====================
# SIMBA AI CORE FUNCTION
# =====================

def generate_simba_response(message):
"""Core function to generate Simba AI response"""

start_time = time.time()

# Quick responses for common greetings
quick_responses = {
"hello": "🦁 Báwo ni! Hello! I'm Simba AI, the first African LLM.",
"hi": "🦁 Báwo ni! Welcome to Simba AI!",
"hey": "🦁 Hello! I'm Simba AI, specializing in African languages and coding.",
}

lower_message = message.lower().strip()
if lower_message in quick_responses:
return {
"response": quick_responses[lower_message],
"response_time": round(time.time() - start_time, 2),
"knowledge_used": ["quick_response"]
}

try:
# Search for relevant knowledge
search_results = simba_search(message, top_k=2)

# Build context
context = "African Knowledge Reference:\n"
for i, result in enumerate(search_results, 1):
context += f"{i}. {result['question']}: {result['answer']}\n"

# Build prompt
prompt = f"User: {message}\nAfrican Knowledge: {context}\nSimba AI:"

# Generate response
inputs = tokenizer.encode(prompt, return_tensors="pt", max_length=512, truncation=True)

with torch.no_grad():
outputs = model.generate(
inputs,
max_new_tokens=100,
temperature=0.7,
do_sample=True,
pad_token_id=tokenizer.eos_token_id,
)

full_response = tokenizer.decode(outputs[0], skip_special_tokens=True)

# Extract response
if "Simba AI:" in full_response:
response = full_response.split("Simba AI:")[-1].strip()
else:
response = full_response[len(prompt):].strip()

if not response.startswith("🦁"):
response = f"🦁 {response}"

response_time = round(time.time() - start_time, 2)

return {
"response": response,
"response_time": response_time,
"knowledge_used": [r["question"] for r in search_results],
"model": model_name
}

except Exception as e:
return {
"response": f"🦁 Simba AI is currently learning. Please try again!",
"response_time": round(time.time() - start_time, 2),
"error": str(e)
}

# =====================
# API ROUTES
# =====================

@app .route('/')
def home():
return jsonify({
"message": "🦁 Simba AI Backend API - First African LLM",
"status": "running",
"endpoints": {
"/api/chat": "POST - Chat with Simba AI",
"/api/health": "GET - Health check",
"/api/info": "GET - API information"
}
})

@app .route('/api/health')
def health_check():
return jsonify({
"status": "healthy",
"model": model_name,
"timestamp": time.time()
})

@app .route('/api/info')
def api_info():
return jsonify({
"name": "Simba AI - First African LLM",
"version": "1.0",
"model": model_name,
"capabilities": [
"African Languages: Yoruba, Swahili, Igbo, Hausa",
"Python Coding & Programming",
"Mathematics & Problem Solving",
"African Innovation Knowledge"
],
"knowledge_base_size": len(simba_knowledge_base)
})

@app .route('/api/chat', methods=['POST'])
def chat():
try:
data = request.get_json()

if not data or 'message' not in data:
return jsonify({
"error": "Missing 'message' in request body"
}), 400

user_message = data['message']

if not user_message.strip():
return jsonify({
"error": "Message cannot be empty"
}), 400

# Generate response
result = generate_simba_response(user_message)

return jsonify({
"success": True,
"user_message": user_message,
"simba_response": result["response"],
"response_time": result["response_time"],
"knowledge_used": result.get("knowledge_used", []),
"model": result.get("model", model_name),
"timestamp": time.time()
})

except Exception as e:
return jsonify({
"success": False,
"error": str(e),
"timestamp": time.time()
}), 500

# =====================
# LAUNCH
# =====================

if __name__ == '__main__':
print("🎯 Simba AI Backend API Ready!")
print("🌐 Endpoints:")
print(" GET /api/health - Health check")
print(" GET /api/info - API information")
print(" POST /api/chat - Chat with Simba AI")
print("\n🚀 Starting server...")
app.run(host='0.0.0.0', port=7860, debug=False)

Files changed (1) hide show
  1. app.py +0 -320
app.py DELETED
@@ -1,320 +0,0 @@
1
- # =====================
2
- # 🦁 SIMBA AI - First African LLM (FAST VERSION)
3
- # =====================
4
- # Optimized for free Hugging Face hosting
5
- # Uses DialoGPT-large - Fast & Reliable
6
- # =====================
7
-
8
- import gradio as gr
9
- import torch
10
- from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
11
- from sentence_transformers import SentenceTransformer
12
- import faiss
13
- import numpy as np
14
- import time
15
-
16
- print("🚀 Initializing Simba AI - Fast African LLM...")
17
-
18
- # =====================
19
- # LOAD AI MODEL - OPTIMIZED
20
- # =====================
21
-
22
- # Using DialoGPT-large - Fast, reliable, works on free tier
23
- model_name = "microsoft/DialoGPT-large"
24
-
25
- try:
26
- print("📥 Loading optimized AI model...")
27
- tokenizer = AutoTokenizer.from_pretrained(model_name)
28
- tokenizer.pad_token = tokenizer.eos_token
29
-
30
- model = AutoModelForCausalLM.from_pretrained(
31
- model_name,
32
- torch_dtype=torch.float16,
33
- device_map="auto",
34
- )
35
- print("✅ Simba AI Model Loaded Successfully! (Fast Version)")
36
-
37
- except Exception as e:
38
- print(f"❌ Model loading error: {e}")
39
- print("🔄 Using even lighter model...")
40
- model_name = "microsoft/DialoGPT-medium"
41
- tokenizer = AutoTokenizer.from_pretrained(model_name)
42
- tokenizer.pad_token = tokenizer.eos_token
43
- model = AutoModelForCausalLM.from_pretrained(model_name)
44
- print("✅ Light model loaded!")
45
-
46
- # =====================
47
- # AFRICAN KNOWLEDGE BASE
48
- # =====================
49
-
50
- simba_knowledge_base = [
51
- # CODING
52
- {"question": "Python add function", "answer": "def add(a, b): return a + b"},
53
- {"question": "Factorial function", "answer": "def factorial(n): return 1 if n == 0 else n * factorial(n-1)"},
54
- {"question": "Reverse string function", "answer": "def reverse_string(s): return s[::-1]"},
55
- {"question": "Check even number", "answer": "def is_even(n): return n % 2 == 0"},
56
- {"question": "Multiply function", "answer": "def multiply(x, y): return x * y"},
57
- {"question": "Yoruba greeting function", "answer": "def yoruba_greeting(): return 'Báwo ni'"},
58
-
59
- # MATH
60
- {"question": "15 + 27", "answer": "42"},
61
- {"question": "8 × 7", "answer": "56"},
62
- {"question": "100 - 45", "answer": "55"},
63
- {"question": "12 × 12", "answer": "144"},
64
- {"question": "25% of 200", "answer": "50"},
65
- {"question": "9 × 6", "answer": "54"},
66
-
67
- # YORUBA
68
- {"question": "Hello in Yoruba", "answer": "Báwo ni"},
69
- {"question": "Thank you in Yoruba", "answer": "Ẹ sé"},
70
- {"question": "How are you in Yoruba", "answer": "Ṣe daadaa ni"},
71
- {"question": "Good morning in Yoruba", "answer": "Ẹ káàrọ̀"},
72
- {"question": "Good night in Yoruba", "answer": "O dàárọ̀"},
73
- {"question": "Please in Yoruba", "answer": "Jọ̀wọ́"},
74
- {"question": "Welcome in Yoruba", "answer": "Káàbọ̀"},
75
-
76
- # SWAHILI
77
- {"question": "Hello in Swahili", "answer": "Hujambo"},
78
- {"question": "Thank you in Swahili", "answer": "Asante"},
79
- {"question": "How are you in Swahili", "answer": "Habari yako"},
80
- {"question": "Good morning in Swahili", "answer": "Habari za asubuhi"},
81
-
82
- # IGBO
83
- {"question": "Hello in Igbo", "answer": "Nnọọ"},
84
- {"question": "Thank you in Igbo", "answer": "Daalụ"},
85
- {"question": "How are you in Igbo", "answer": "Kedu ka ị mere?"},
86
-
87
- # HAUSA
88
- {"question": "Hello in Hausa", "answer": "Sannu"},
89
- {"question": "Thank you in Hausa", "answer": "Na gode"},
90
- {"question": "How are you in Hausa", "answer": "Yaya lafiya?"},
91
-
92
- # AFRICAN INNOVATION
93
- {"question": "M-Pesa", "answer": "Mobile money service launched in Kenya in 2007, revolutionized banking"},
94
- {"question": "Andela", "answer": "Trains African software developers for global tech companies"},
95
- {"question": "Flutterwave", "answer": "Nigerian fintech company providing payment infrastructure across Africa"},
96
- {"question": "Paystack", "answer": "Nigerian payment processing company acquired by Stripe for $200M"},
97
-
98
- # SIMBA AI IDENTITY
99
- {"question": "What is Simba AI", "answer": "Simba AI is the first African Large Language Model, specializing in African languages, coding, and mathematics"},
100
- {"question": "Simba AI purpose", "answer": "To advance African AI innovation and make AI accessible for African languages and contexts"},
101
- ]
102
-
103
- print(f"✅ African Knowledge Base: {len(simba_knowledge_base)} expert entries")
104
-
105
- # =====================
106
- # FAST SEARCH SYSTEM
107
- # =====================
108
-
109
- try:
110
- print("🔍 Loading fast search system...")
111
- embedder = SentenceTransformer('all-MiniLM-L6-v2')
112
-
113
- # Build search index
114
- questions = [item["question"] for item in simba_knowledge_base]
115
- question_embeddings = embedder.encode(questions)
116
-
117
- dimension = question_embeddings.shape[1]
118
- index = faiss.IndexFlatIP(dimension)
119
- faiss.normalize_L2(question_embeddings)
120
- index.add(question_embeddings)
121
-
122
- print("✅ Fast Search System Ready!")
123
- except Exception as e:
124
- print(f"❌ Search system error: {e}")
125
- index = None
126
-
127
- def simba_search(query, top_k=3):
128
- """Fast search African knowledge base"""
129
- if index is None:
130
- return simba_knowledge_base[:top_k]
131
-
132
- try:
133
- query_embedding = embedder.encode([query])
134
- faiss.normalize_L2(query_embedding)
135
-
136
- scores, indices = index.search(query_embedding, top_k)
137
-
138
- results = []
139
- for i, idx in enumerate(indices[0]):
140
- if idx < len(simba_knowledge_base):
141
- results.append({
142
- "question": simba_knowledge_base[idx]["question"],
143
- "answer": simba_knowledge_base[idx]["answer"],
144
- "score": scores[0][i]
145
- })
146
-
147
- return results
148
- except Exception as e:
149
- return simba_knowledge_base[:top_k]
150
-
151
- # =====================
152
- # FAST SIMBA AI CHAT FUNCTION
153
- # =====================
154
-
155
- def simba_ai_chat(message, history):
156
- """Fast chat function with timeout protection"""
157
-
158
- start_time = time.time()
159
-
160
- # Quick responses for common greetings (instant)
161
- quick_responses = {
162
- "hello": "🦁 Báwo ni! Hello! I'm Simba AI, the first African LLM. I specialize in African languages, coding, and mathematics. How can I assist you today?",
163
- "hi": "🦁 Báwo ni! Welcome to Simba AI - Africa's first Large Language Model!",
164
- "hey": "🦁 Hello! I'm Simba AI. Ask me about African languages, Python coding, or math problems!",
165
- "hola": "🦁 Báwo ni! I see you speak Spanish. I specialize in African languages like Yoruba, Swahili, Igbo, and Hausa!",
166
- }
167
-
168
- lower_message = message.lower().strip()
169
- if lower_message in quick_responses:
170
- return quick_responses[lower_message]
171
-
172
- try:
173
- # Search for relevant knowledge
174
- search_results = simba_search(message, top_k=2)
175
-
176
- # Build smart context
177
- context = "📚 African Knowledge:\n"
178
- for i, result in enumerate(search_results, 1):
179
- context += f"{i}. {result['question']}: {result['answer']}\n"
180
-
181
- # Optimized prompt for DialoGPT
182
- prompt = f"User: {message}\nAfrican Knowledge: {context}\nSimba AI:"
183
-
184
- # Fast generation
185
- inputs = tokenizer.encode(prompt, return_tensors="pt", max_length=512, truncation=True)
186
-
187
- with torch.no_grad():
188
- outputs = model.generate(
189
- inputs,
190
- max_new_tokens=100,
191
- temperature=0.7,
192
- do_sample=True,
193
- pad_token_id=tokenizer.eos_token_id,
194
- repetition_penalty=1.1,
195
- num_return_sequences=1,
196
- )
197
-
198
- response = tokenizer.decode(outputs[0], skip_special_tokens=True)
199
-
200
- # Extract just the Simba AI response
201
- if "Simba AI:" in response:
202
- final_response = response.split("Simba AI:")[-1].strip()
203
- else:
204
- final_response = response[len(prompt):].strip()
205
-
206
- # Add African flair
207
- if not final_response.startswith("🦁"):
208
- final_response = f"🦁 {final_response}"
209
-
210
- response_time = time.time() - start_time
211
- print(f"✅ Response generated in {response_time:.2f} seconds")
212
-
213
- return final_response
214
-
215
- except Exception as e:
216
- error_msg = f"🦁 Simba AI is thinking... Please try again! (Error: {str(e)})"
217
- print(f"❌ Generation error: {e}")
218
- return error_msg
219
-
220
- # =====================
221
- # FAST GRADIO INTERFACE
222
- # =====================
223
-
224
- # African theme CSS
225
- css = """
226
- .gradio-container {
227
- font-family: 'Arial', sans-serif;
228
- }
229
- .header {
230
- text-align: center;
231
- padding: 25px;
232
- background: linear-gradient(135deg, #ff7e5f, #feb47b);
233
- color: white;
234
- border-radius: 15px;
235
- margin-bottom: 20px;
236
- border: 2px solid #e65c50;
237
- }
238
- .african-pattern {
239
- background: linear-gradient(45deg, #ff7e5f 25%, transparent 25%),
240
- linear-gradient(-45deg, #ff7e5f 25%, transparent 25%),
241
- linear-gradient(45deg, transparent 75%, #ff7e5f 75%),
242
- linear-gradient(-45deg, transparent 75%, #ff7e5f 75');
243
- background-size: 20px 20px;
244
- background-position: 0 0, 0 10px, 10px -10px, -10px 0px;
245
- opacity: 0.1;
246
- }
247
- """
248
-
249
- # Create fast interface
250
- with gr.Blocks(css=css, theme=gr.themes.Soft()) as demo:
251
-
252
- gr.HTML("""
253
- <div class="header">
254
- <h1>🦁 Simba AI - First African LLM</h1>
255
- <h3>Fast & Optimized - African Languages, Coding & Mathematics</h3>
256
- <p>Powered by DialoGPT-large • Responses in 5-15 seconds</p>
257
- </div>
258
- """)
259
-
260
- chatbot = gr.Chatbot(
261
- label="🦁 Chat with Simba AI",
262
- height=500,
263
- show_copy_button=True,
264
- placeholder="Ask about African languages, Python coding, or math..."
265
- )
266
-
267
- with gr.Row():
268
- msg = gr.Textbox(
269
- label="Your message",
270
- placeholder="Type your question here...",
271
- lines=2,
272
- scale=4
273
- )
274
- send_btn = gr.Button("🚀 Ask Simba AI", variant="primary", scale=1)
275
-
276
- with gr.Row():
277
- clear_btn = gr.Button("🧹 Clear Chat")
278
- info_btn = gr.Button("ℹ️ About Simba AI")
279
-
280
- # Quick action examples
281
- gr.Examples(
282
- examples=[
283
- "Hello",
284
- "How do you say thank you in Yoruba?",
285
- "Write Python function to add numbers",
286
- "What is 15 + 27?",
287
- "Tell me about M-Pesa",
288
- "How are you in Swahili?",
289
- "Create factorial function",
290
- "Calculate 8 × 7"
291
- ],
292
- inputs=msg,
293
- label="💡 Quick Actions:"
294
- )
295
-
296
- # Event handlers
297
- def respond(message, chat_history):
298
- bot_message = simba_ai_chat(message, chat_history)
299
- chat_history.append((message, bot_message))
300
- return "", chat_history
301
-
302
- def show_info():
303
- return [("", "🦁 **Simba AI - First African LLM**\n\n**Capabilities:**\n• African Languages: Yoruba, Swahili, Igbo, Hausa\n• Python Coding & Programming\n• Mathematics & Problem Solving\n• African Tech Innovation\n\n**Powered by:** DialoGPT-large + African Knowledge Base\n**Response Time:** 5-15 seconds\n**Mission:** Advance African AI Innovation")]
304
-
305
- msg.submit(respond, [msg, chatbot], [msg, chatbot])
306
- send_btn.click(respond, [msg, chatbot], [msg, chatbot])
307
- clear_btn.click(lambda: None, None, chatbot, queue=False)
308
- info_btn.click(show_info, None, chatbot)
309
-
310
- # =====================
311
- # LAUNCH
312
- # =====================
313
-
314
- if __name__ == "__main__":
315
- print("🎯 Simba AI is ready! Launching interface...")
316
- demo.launch(
317
- debug=True,
318
- share=True,
319
- show_error=True
320
- )