Stanley03 commited on
Commit
c2c78c0
·
verified ·
1 Parent(s): fef4f77

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +297 -0
app.py ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # =====================
2
+ # 🦁 SIMBA AI - BACKEND API ONLY
3
+ # =====================
4
+ # Provides REST API for your custom frontend
5
+ # No Gradio interface - Pure backend
6
+ # =====================
7
+
8
+ from flask import Flask, request, jsonify, send_from_directory
9
+ from flask_cors import CORS
10
+ import torch
11
+ from transformers import AutoModelForCausalLM, AutoTokenizer
12
+ from sentence_transformers import SentenceTransformer
13
+ import faiss
14
+ import numpy as np
15
+ import time
16
+ import os
17
+
18
+ print("🚀 Initializing Simba AI Backend API...")
19
+
20
+ app = Flask(__name__)
21
+ CORS(app) # Enable CORS for your frontend
22
+
23
+ # =====================
24
+ # LOAD AI MODEL
25
+ # =====================
26
+
27
+ model_name = "microsoft/DialoGPT-large"
28
+
29
+ try:
30
+ print("📥 Loading AI model...")
31
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
32
+ tokenizer.pad_token = tokenizer.eos_token
33
+
34
+ model = AutoModelForCausalLM.from_pretrained(
35
+ model_name,
36
+ torch_dtype=torch.float16,
37
+ device_map="auto",
38
+ )
39
+ print("✅ Simba AI Model Loaded Successfully!")
40
+ except Exception as e:
41
+ print(f"❌ Model loading error: {e}")
42
+ model_name = "microsoft/DialoGPT-medium"
43
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
44
+ tokenizer.pad_token = tokenizer.eos_token
45
+ model = AutoModelForCausalLM.from_pretrained(model_name)
46
+ print("✅ Light model loaded!")
47
+
48
+ # =====================
49
+ # AFRICAN KNOWLEDGE BASE
50
+ # =====================
51
+
52
+ simba_knowledge_base = [
53
+ # CODING
54
+ {"question": "Python add function", "answer": "def add(a, b): return a + b"},
55
+ {"question": "Factorial function", "answer": "def factorial(n): return 1 if n == 0 else n * factorial(n-1)"},
56
+ {"question": "Reverse string function", "answer": "def reverse_string(s): return s[::-1]"},
57
+ {"question": "Check even number", "answer": "def is_even(n): return n % 2 == 0"},
58
+ {"question": "Multiply function", "answer": "def multiply(x, y): return x * y"},
59
+ {"question": "Yoruba greeting function", "answer": "def yoruba_greeting(): return 'Báwo ni'"},
60
+
61
+ # MATH
62
+ {"question": "15 + 27", "answer": "42"},
63
+ {"question": "8 × 7", "answer": "56"},
64
+ {"question": "100 - 45", "answer": "55"},
65
+ {"question": "12 × 12", "answer": "144"},
66
+ {"question": "25% of 200", "answer": "50"},
67
+
68
+ # YORUBA
69
+ {"question": "Hello in Yoruba", "answer": "Báwo ni"},
70
+ {"question": "Thank you in Yoruba", "answer": "Ẹ sé"},
71
+ {"question": "How are you in Yoruba", "answer": "Ṣe daadaa ni"},
72
+ {"question": "Good morning in Yoruba", "answer": "Ẹ káàrọ̀"},
73
+ {"question": "Good night in Yoruba", "answer": "O dàárọ̀"},
74
+ {"question": "Please in Yoruba", "answer": "Jọ̀wọ́"},
75
+
76
+ # SWAHILI
77
+ {"question": "Hello in Swahili", "answer": "Hujambo"},
78
+ {"question": "Thank you in Swahili", "answer": "Asante"},
79
+
80
+ # IGBO
81
+ {"question": "Hello in Igbo", "answer": "Nnọọ"},
82
+ {"question": "Thank you in Igbo", "answer": "Daalụ"},
83
+
84
+ # HAUSA
85
+ {"question": "Hello in Hausa", "answer": "Sannu"},
86
+ {"question": "Thank you in Hausa", "answer": "Na gode"},
87
+
88
+ # AFRICAN INNOVATION
89
+ {"question": "M-Pesa", "answer": "Mobile money service launched in Kenya in 2007"},
90
+ {"question": "Andela", "answer": "Trains African software developers for global companies"},
91
+ ]
92
+
93
+ print(f"✅ African Knowledge Base: {len(simba_knowledge_base)} entries")
94
+
95
+ # =====================
96
+ # SEARCH SYSTEM
97
+ # =====================
98
+
99
+ try:
100
+ embedder = SentenceTransformer('all-MiniLM-L6-v2')
101
+ questions = [item["question"] for item in simba_knowledge_base]
102
+ question_embeddings = embedder.encode(questions)
103
+
104
+ dimension = question_embeddings.shape[1]
105
+ index = faiss.IndexFlatIP(dimension)
106
+ faiss.normalize_L2(question_embeddings)
107
+ index.add(question_embeddings)
108
+ print("✅ Search System Ready!")
109
+ except Exception as e:
110
+ print(f"❌ Search system error: {e}")
111
+ index = None
112
+
113
+ def simba_search(query, top_k=2):
114
+ """Search African knowledge base"""
115
+ if index is None:
116
+ return simba_knowledge_base[:top_k]
117
+
118
+ try:
119
+ query_embedding = embedder.encode([query])
120
+ faiss.normalize_L2(query_embedding)
121
+ scores, indices = index.search(query_embedding, top_k)
122
+
123
+ results = []
124
+ for i, idx in enumerate(indices[0]):
125
+ if idx < len(simba_knowledge_base):
126
+ results.append({
127
+ "question": simba_knowledge_base[idx]["question"],
128
+ "answer": simba_knowledge_base[idx]["answer"],
129
+ "score": scores[0][i]
130
+ })
131
+ return results
132
+ except:
133
+ return simba_knowledge_base[:top_k]
134
+
135
+ # =====================
136
+ # SIMBA AI CORE FUNCTION
137
+ # =====================
138
+
139
+ def generate_simba_response(message):
140
+ """Core function to generate Simba AI response"""
141
+
142
+ start_time = time.time()
143
+
144
+ # Quick responses for common greetings
145
+ quick_responses = {
146
+ "hello": "🦁 Báwo ni! Hello! I'm Simba AI, the first African LLM.",
147
+ "hi": "🦁 Báwo ni! Welcome to Simba AI!",
148
+ "hey": "🦁 Hello! I'm Simba AI, specializing in African languages and coding.",
149
+ }
150
+
151
+ lower_message = message.lower().strip()
152
+ if lower_message in quick_responses:
153
+ return {
154
+ "response": quick_responses[lower_message],
155
+ "response_time": round(time.time() - start_time, 2),
156
+ "knowledge_used": ["quick_response"]
157
+ }
158
+
159
+ try:
160
+ # Search for relevant knowledge
161
+ search_results = simba_search(message, top_k=2)
162
+
163
+ # Build context
164
+ context = "African Knowledge Reference:\n"
165
+ for i, result in enumerate(search_results, 1):
166
+ context += f"{i}. {result['question']}: {result['answer']}\n"
167
+
168
+ # Build prompt
169
+ prompt = f"User: {message}\nAfrican Knowledge: {context}\nSimba AI:"
170
+
171
+ # Generate response
172
+ inputs = tokenizer.encode(prompt, return_tensors="pt", max_length=512, truncation=True)
173
+
174
+ with torch.no_grad():
175
+ outputs = model.generate(
176
+ inputs,
177
+ max_new_tokens=100,
178
+ temperature=0.7,
179
+ do_sample=True,
180
+ pad_token_id=tokenizer.eos_token_id,
181
+ )
182
+
183
+ full_response = tokenizer.decode(outputs[0], skip_special_tokens=True)
184
+
185
+ # Extract response
186
+ if "Simba AI:" in full_response:
187
+ response = full_response.split("Simba AI:")[-1].strip()
188
+ else:
189
+ response = full_response[len(prompt):].strip()
190
+
191
+ if not response.startswith("🦁"):
192
+ response = f"🦁 {response}"
193
+
194
+ response_time = round(time.time() - start_time, 2)
195
+
196
+ return {
197
+ "response": response,
198
+ "response_time": response_time,
199
+ "knowledge_used": [r["question"] for r in search_results],
200
+ "model": model_name
201
+ }
202
+
203
+ except Exception as e:
204
+ return {
205
+ "response": f"🦁 Simba AI is currently learning. Please try again!",
206
+ "response_time": round(time.time() - start_time, 2),
207
+ "error": str(e)
208
+ }
209
+
210
+ # =====================
211
+ # API ROUTES
212
+ # =====================
213
+
214
+ @app.route('/')
215
+ def home():
216
+ return jsonify({
217
+ "message": "🦁 Simba AI Backend API - First African LLM",
218
+ "status": "running",
219
+ "endpoints": {
220
+ "/api/chat": "POST - Chat with Simba AI",
221
+ "/api/health": "GET - Health check",
222
+ "/api/info": "GET - API information"
223
+ }
224
+ })
225
+
226
+ @app.route('/api/health')
227
+ def health_check():
228
+ return jsonify({
229
+ "status": "healthy",
230
+ "model": model_name,
231
+ "timestamp": time.time()
232
+ })
233
+
234
+ @app.route('/api/info')
235
+ def api_info():
236
+ return jsonify({
237
+ "name": "Simba AI - First African LLM",
238
+ "version": "1.0",
239
+ "model": model_name,
240
+ "capabilities": [
241
+ "African Languages: Yoruba, Swahili, Igbo, Hausa",
242
+ "Python Coding & Programming",
243
+ "Mathematics & Problem Solving",
244
+ "African Innovation Knowledge"
245
+ ],
246
+ "knowledge_base_size": len(simba_knowledge_base)
247
+ })
248
+
249
+ @app.route('/api/chat', methods=['POST'])
250
+ def chat():
251
+ try:
252
+ data = request.get_json()
253
+
254
+ if not data or 'message' not in data:
255
+ return jsonify({
256
+ "error": "Missing 'message' in request body"
257
+ }), 400
258
+
259
+ user_message = data['message']
260
+
261
+ if not user_message.strip():
262
+ return jsonify({
263
+ "error": "Message cannot be empty"
264
+ }), 400
265
+
266
+ # Generate response
267
+ result = generate_simba_response(user_message)
268
+
269
+ return jsonify({
270
+ "success": True,
271
+ "user_message": user_message,
272
+ "simba_response": result["response"],
273
+ "response_time": result["response_time"],
274
+ "knowledge_used": result.get("knowledge_used", []),
275
+ "model": result.get("model", model_name),
276
+ "timestamp": time.time()
277
+ })
278
+
279
+ except Exception as e:
280
+ return jsonify({
281
+ "success": False,
282
+ "error": str(e),
283
+ "timestamp": time.time()
284
+ }), 500
285
+
286
+ # =====================
287
+ # LAUNCH
288
+ # =====================
289
+
290
+ if __name__ == '__main__':
291
+ print("🎯 Simba AI Backend API Ready!")
292
+ print("🌐 Endpoints:")
293
+ print(" GET /api/health - Health check")
294
+ print(" GET /api/info - API information")
295
+ print(" POST /api/chat - Chat with Simba AI")
296
+ print("\n🚀 Starting server...")
297
+ app.run(host='0.0.0.0', port=7860, debug=False)