File size: 5,526 Bytes
c2c78c0 16967c2 c2c78c0 16967c2 c2c78c0 6a23cf2 16967c2 6a23cf2 16967c2 c2c78c0 16967c2 c2c78c0 16967c2 c2c78c0 16967c2 c2c78c0 16967c2 c2c78c0 16967c2 c2c78c0 16967c2 c2c78c0 16967c2 c2c78c0 16967c2 c2c78c0 16967c2 c2c78c0 16967c2 c2c78c0 16967c2 c2c78c0 16967c2 c2c78c0 16967c2 c2c78c0 16967c2 c2c78c0 16967c2 c2c78c0 16967c2 c2c78c0 16967c2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 |
# =====================
# 🦁 SIMBA AI - HUGGING FACE COMPATIBLE
# =====================
# Optimized for Hugging Face Spaces
# Uses their built-in components
# =====================
import os
import time
import json
print("🚀 Initializing Simba AI on Hugging Face...")
# Try to import with error handling
try:
import torch
from transformers import pipeline, AutoTokenizer, AutoModelForCausalLM
TORCH_AVAILABLE = True
print("✅ PyTorch and Transformers loaded successfully!")
except ImportError as e:
print(f"❌ Import error: {e}")
TORCH_AVAILABLE = False
# =====================
# SIMPLE FALLBACK AI
# =====================
simba_knowledge_base = {
"hello": "🦁 Báwo ni! Hello! I'm Simba AI, the first African LLM.",
"hi": "🦁 Báwo ni! Welcome to Simba AI!",
"hey": "🦁 Hello! I'm Simba AI, specializing in African languages and coding.",
"python add function": "🦁 Here's a Python function to add two numbers:\n```python\ndef add(a, b):\n return a + b\n```",
"yoruba hello": "🦁 Hello in Yoruba is: Báwo ni",
"swahili hello": "🦁 Hello in Swahili is: Hujambo",
"igbo hello": "🦁 Hello in Igbo is: Nnọọ",
"hausa hello": "🦁 Hello in Hausa is: Sannu",
"15 + 27": "🦁 15 + 27 = 42",
"8 × 7": "🦁 8 × 7 = 56",
"what is simba ai": "🦁 Simba AI is the first African Large Language Model, specializing in African languages, coding, and mathematics.",
"mpesa": "🦁 M-Pesa is a mobile money service launched in Kenya in 2007 that revolutionized banking in Africa.",
"andela": "🦁 Andela trains African software developers and connects them with global tech companies.",
}
def simple_simba_response(message):
"""Simple response system that works without PyTorch"""
lower_msg = message.lower().strip()
# Check for exact matches
if lower_msg in simba_knowledge_base:
return simba_knowledge_base[lower_msg]
# Check for partial matches
for key, response in simba_knowledge_base.items():
if key in lower_msg:
return response
# Default responses based on keywords
if any(word in lower_msg for word in ['python', 'code', 'programming', 'function']):
return "🦁 I can help with Python programming! Try asking: 'Python add function' or 'Create factorial function'"
elif any(word in lower_msg for word in ['yoruba', 'swahili', 'igbo', 'hausa', 'language']):
return "🦁 I specialize in African languages! Try: 'Yoruba hello', 'Swahili thank you', 'Igbo hello', or 'Hausa hello'"
elif any(word in lower_msg for word in ['math', 'calculate', 'add', 'multiply', 'times']):
return "🦁 I can help with mathematics! Try: '15 + 27', '8 × 7', or '25% of 200'"
elif any(word in lower_msg for word in ['africa', 'innovation', 'mpesa', 'andela']):
return "🦁 Ask me about African innovation! Try: 'What is M-Pesa?' or 'Tell me about Andela'"
else:
return "🦁 Báwo ni! I'm Simba AI, the first African LLM. I specialize in:\n• African languages (Yoruba, Swahili, Igbo, Hausa)\n• Python programming and coding\n• Mathematics and calculations\n• African tech innovation\n\nTry asking me about any of these topics!"
# =====================
# GRADIO INTERFACE (Hugging Face Native)
# =====================
# Use Gradio which is pre-installed on Hugging Face
import gradio as gr
def chat_interface(message, history):
"""Gradio chat function"""
if TORCH_AVAILABLE:
try:
# Try to use the AI model if available
response = f"🦁 (AI Model) Response to: {message}"
# In a real implementation, you'd use the model here
except:
response = simple_simba_response(message)
else:
response = simple_simba_response(message)
return response
# Create a simple chat interface
with gr.Blocks(theme=gr.themes.Soft(), title="🦁 Simba AI - First African LLM") as demo:
gr.Markdown("""
# 🦁 Simba AI - First African LLM
### Specializing in African Languages, Coding & Mathematics
""")
chatbot = gr.Chatbot(
label="Chat with Simba AI",
value=[
["", "🦁 Báwo ni! Hello! I'm Simba AI, the first African LLM. How can I help you today?"]
]
)
with gr.Row():
msg = gr.Textbox(
label="Your message",
placeholder="Ask about African languages, coding, or mathematics...",
scale=4
)
btn = gr.Button("🚀 Send", scale=1)
clear = gr.Button("🧹 Clear Chat")
gr.Examples(
examples=[
"Hello",
"Python add function",
"Yoruba hello",
"Swahili thank you",
"15 + 27",
"8 × 7",
"What is M-Pesa?",
"Tell me about Andela"
],
inputs=msg
)
def respond(message, chat_history):
bot_message = simple_simba_response(message)
chat_history.append((message, bot_message))
return "", chat_history
msg.submit(respond, [msg, chatbot], [msg, chatbot])
btn.click(respond, [msg, chatbot], [msg, chatbot])
clear.click(lambda: None, None, chatbot, queue=False)
# =====================
# LAUNCH
# =====================
if __name__ == "__main__":
print("✅ Simba AI is ready!")
print("🌍 Capabilities: African Languages, Coding, Mathematics")
demo.launch(debug=True, share=True) |