Spaces:
Configuration error
Configuration error
| import gradio as gr | |
| from huggingface_hub import HfApi, InferenceClient, list_models | |
| import os | |
| from datetime import datetime | |
| import json | |
| import sqlite3 | |
| from pathlib import Path | |
| import hashlib | |
| import secrets | |
| from typing import Optional, List, Dict, Any | |
| import requests | |
| from collections import defaultdict | |
| # Initialize HF API | |
| hf_token = os.getenv("HF_TOKEN") | |
| api = HfApi(token=hf_token) | |
| client = InferenceClient(token=hf_token) | |
| # Database setup | |
| DB_PATH = "chatbot_users.db" | |
| def init_database(): | |
| """Initialize SQLite database for user management""" | |
| conn = sqlite3.connect(DB_PATH) | |
| c = conn.cursor() | |
| # Users table | |
| c.execute('''CREATE TABLE IF NOT EXISTS users | |
| (id INTEGER PRIMARY KEY AUTOINCREMENT, | |
| username TEXT UNIQUE NOT NULL, | |
| password_hash TEXT NOT NULL, | |
| email TEXT UNIQUE, | |
| created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, | |
| last_login TIMESTAMP, | |
| is_premium BOOLEAN DEFAULT 0, | |
| theme_preference TEXT DEFAULT 'light', | |
| favorite_models TEXT DEFAULT '[]')''') | |
| # Chat history table | |
| c.execute('''CREATE TABLE IF NOT EXISTS chat_history | |
| (id INTEGER PRIMARY KEY AUTOINCREMENT, | |
| user_id INTEGER, | |
| model_name TEXT, | |
| message TEXT, | |
| response TEXT, | |
| timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP, | |
| FOREIGN KEY (user_id) REFERENCES users(id))''') | |
| # Sessions table | |
| c.execute('''CREATE TABLE IF NOT EXISTS sessions | |
| (id INTEGER PRIMARY KEY AUTOINCREMENT, | |
| user_id INTEGER, | |
| session_token TEXT UNIQUE, | |
| created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, | |
| expires_at TIMESTAMP, | |
| FOREIGN KEY (user_id) REFERENCES users(id))''') | |
| # User preferences table | |
| c.execute('''CREATE TABLE IF NOT EXISTS user_preferences | |
| (user_id INTEGER PRIMARY KEY, | |
| max_tokens INTEGER DEFAULT 512, | |
| temperature REAL DEFAULT 0.7, | |
| top_p REAL DEFAULT 0.9, | |
| repetition_penalty REAL DEFAULT 1.0, | |
| default_model TEXT, | |
| FOREIGN KEY (user_id) REFERENCES users(id))''') | |
| conn.commit() | |
| conn.close() | |
| init_database() | |
| # User authentication functions | |
| def hash_password(password: str) -> str: | |
| """Hash password using SHA-256""" | |
| return hashlib.sha256(password.encode()).hexdigest() | |
| def create_user(username: str, password: str, email: str = None) -> tuple[bool, str]: | |
| """Create a new user account""" | |
| try: | |
| conn = sqlite3.connect(DB_PATH) | |
| c = conn.cursor() | |
| password_hash = hash_password(password) | |
| c.execute("INSERT INTO users (username, password_hash, email) VALUES (?, ?, ?)", | |
| (username, password_hash, email)) | |
| user_id = c.lastrowid | |
| c.execute("INSERT INTO user_preferences (user_id) VALUES (?)", (user_id,)) | |
| conn.commit() | |
| conn.close() | |
| return True, "Account created successfully!" | |
| except sqlite3.IntegrityError: | |
| return False, "Username or email already exists!" | |
| except Exception as e: | |
| return False, f"Error creating account: {str(e)}" | |
| def authenticate_user(username: str, password: str) -> tuple[bool, Optional[int], str]: | |
| """Authenticate user and return user_id""" | |
| conn = sqlite3.connect(DB_PATH) | |
| c = conn.cursor() | |
| password_hash = hash_password(password) | |
| c.execute("SELECT id FROM users WHERE username = ? AND password_hash = ?", | |
| (username, password_hash)) | |
| result = c.fetchone() | |
| if result: | |
| user_id = result[0] | |
| c.execute("UPDATE users SET last_login = ? WHERE id = ?", | |
| (datetime.now(), user_id)) | |
| conn.commit() | |
| conn.close() | |
| return True, user_id, "Login successful!" | |
| conn.close() | |
| return False, None, "Invalid username or password!" | |
| def get_user_info(user_id: int) -> Dict[str, Any]: | |
| """Get user information""" | |
| conn = sqlite3.connect(DB_PATH) | |
| c = conn.cursor() | |
| c.execute("""SELECT u.username, u.email, u.created_at, u.is_premium, | |
| u.theme_preference, u.favorite_models, p.* | |
| FROM users u | |
| LEFT JOIN user_preferences p ON u.id = p.user_id | |
| WHERE u.id = ?""", (user_id,)) | |
| result = c.fetchone() | |
| conn.close() | |
| if result: | |
| return { | |
| "username": result[0], | |
| "email": result[1], | |
| "created_at": result[2], | |
| "is_premium": result[3], | |
| "theme_preference": result[4], | |
| "favorite_models": json.loads(result[5]) if result[5] else [], | |
| "max_tokens": result[7] if len(result) > 7 else 512, | |
| "temperature": result[8] if len(result) > 8 else 0.7, | |
| "top_p": result[9] if len(result) > 9 else 0.9, | |
| "repetition_penalty": result[10] if len(result) > 10 else 1.0, | |
| "default_model": result[11] if len(result) > 11 else None | |
| } | |
| return None | |
| # Model management functions | |
| def get_text_models(limit: int = 1000, search_query: str = "") -> List[Dict[str, Any]]: | |
| """Fetch text generation models from Hugging Face""" | |
| try: | |
| models = list(list_models( | |
| task="text-generation", | |
| limit=limit, | |
| sort="downloads", | |
| direction=-1, | |
| search=search_query | |
| )) | |
| model_list = [] | |
| for model in models: | |
| model_list.append({ | |
| "id": model.id, | |
| "downloads": model.downloads if hasattr(model, 'downloads') else 0, | |
| "likes": model.likes if hasattr(model, 'likes') else 0, | |
| "tags": model.tags if hasattr(model, 'tags') else [] | |
| }) | |
| return model_list | |
| except Exception as e: | |
| print(f"Error fetching models: {e}") | |
| return [] | |
| # Cache for models | |
| MODELS_CACHE = [] | |
| POPULAR_MODELS = [ | |
| "meta-llama/Llama-3.2-3B-Instruct", | |
| "microsoft/Phi-3.5-mini-instruct", | |
| "mistralai/Mistral-7B-Instruct-v0.3", | |
| "google/gemma-2-2b-it", | |
| "Qwen/Qwen2.5-3B-Instruct", | |
| "HuggingFaceH4/zephyr-7b-beta", | |
| "tiiuae/falcon-7b-instruct", | |
| "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO", | |
| "openchat/openchat-3.5-0106", | |
| "teknium/OpenHermes-2.5-Mistral-7B" | |
| ] | |
| def load_models(): | |
| """Load models into cache""" | |
| global MODELS_CACHE | |
| if not MODELS_CACHE: | |
| MODELS_CACHE = get_text_models(limit=5000) | |
| return MODELS_CACHE | |
| def search_models(query: str, category: str = "all") -> List[str]: | |
| """Search models by query""" | |
| models = load_models() | |
| if not query: | |
| return [m["id"] for m in models[:100]] | |
| query = query.lower() | |
| filtered = [] | |
| for model in models: | |
| model_id = model["id"].lower() | |
| if query in model_id: | |
| filtered.append(model["id"]) | |
| if len(filtered) >= 100: | |
| break | |
| return filtered if filtered else [m["id"] for m in models[:100]] | |
| # Chat function | |
| def chat_with_model(message: str, history: List, model_name: str, user_id: int, | |
| max_tokens: int, temperature: float, top_p: float, | |
| repetition_penalty: float, system_prompt: str) -> tuple: | |
| """Chat with selected model""" | |
| if not message.strip(): | |
| return history, "" | |
| if not model_name: | |
| history.append({"role": "user", "content": message}) | |
| history.append({"role": "assistant", "content": "⚠️ Please select a model first!"}) | |
| return history, "" | |
| try: | |
| # Add user message to history | |
| history.append({"role": "user", "content": message}) | |
| # Prepare messages for API | |
| messages = [] | |
| if system_prompt.strip(): | |
| messages.append({"role": "system", "content": system_prompt}) | |
| for msg in history: | |
| messages.append({"role": msg["role"], "content": msg["content"]}) | |
| # Generate response | |
| response = "" | |
| try: | |
| stream = client.chat_completion( | |
| model=model_name, | |
| messages=messages, | |
| max_tokens=max_tokens, | |
| temperature=temperature, | |
| top_p=top_p, | |
| stream=True | |
| ) | |
| for chunk in stream: | |
| if chunk.choices[0].delta.content: | |
| response += chunk.choices[0].delta.content | |
| except Exception as e: | |
| response = f"⚠️ Error with model {model_name}: {str(e)}\n\nTrying alternative inference method..." | |
| # Fallback to text generation | |
| try: | |
| full_prompt = "\n".join([f"{m['role']}: {m['content']}" for m in messages]) | |
| result = client.text_generation( | |
| full_prompt, | |
| model=model_name, | |
| max_new_tokens=max_tokens, | |
| temperature=temperature, | |
| top_p=top_p, | |
| repetition_penalty=repetition_penalty | |
| ) | |
| response = result | |
| except Exception as e2: | |
| response = f"❌ Model unavailable: {str(e2)}" | |
| # Add assistant response to history | |
| history.append({"role": "assistant", "content": response}) | |
| # Save to database | |
| if user_id: | |
| save_chat_history(user_id, model_name, message, response) | |
| return history, "" | |
| except Exception as e: | |
| error_msg = f"❌ Error: {str(e)}" | |
| history.append({"role": "assistant", "content": error_msg}) | |
| return history, "" | |
| def save_chat_history(user_id: int, model_name: str, message: str, response: str): | |
| """Save chat to database""" | |
| try: | |
| conn = sqlite3.connect(DB_PATH) | |
| c = conn.cursor() | |
| c.execute("""INSERT INTO chat_history (user_id, model_name, message, response) | |
| VALUES (?, ?, ?, ?)""", | |
| (user_id, model_name, message, response)) | |
| conn.commit() | |
| conn.close() | |
| except Exception as e: | |
| print(f"Error saving chat history: {e}") | |
| def load_chat_history(user_id: int, limit: int = 50) -> List[Dict[str, str]]: | |
| """Load user's chat history""" | |
| conn = sqlite3.connect(DB_PATH) | |
| c = conn.cursor() | |
| c.execute("""SELECT model_name, message, response, timestamp | |
| FROM chat_history | |
| WHERE user_id = ? | |
| ORDER BY timestamp DESC | |
| LIMIT ?""", (user_id, limit)) | |
| results = c.fetchall() | |
| conn.close() | |
| history = [] | |
| for row in results: | |
| history.append({ | |
| "model": row[0], | |
| "message": row[1], | |
| "response": row[2], | |
| "timestamp": row[3] | |
| }) | |
| return history | |
| def update_user_preferences(user_id: int, **kwargs): | |
| """Update user preferences""" | |
| conn = sqlite3.connect(DB_PATH) | |
| c = conn.cursor() | |
| for key, value in kwargs.items(): | |
| if key == "favorite_models": | |
| c.execute("UPDATE users SET favorite_models = ? WHERE id = ?", | |
| (json.dumps(value), user_id)) | |
| elif key in ["max_tokens", "temperature", "top_p", "repetition_penalty", "default_model"]: | |
| c.execute(f"UPDATE user_preferences SET {key} = ? WHERE user_id = ?", | |
| (value, user_id)) | |
| conn.commit() | |
| conn.close() | |
| # UI Theme | |
| custom_css = """ | |
| .main-header { | |
| text-align: center; | |
| background: linear-gradient(135deg, #667eea 0%, #764ba2 100%); | |
| padding: 2rem; | |
| border-radius: 10px; | |
| margin-bottom: 2rem; | |
| color: white; | |
| } | |
| .model-card { | |
| border: 1px solid #e0e0e0; | |
| border-radius: 8px; | |
| padding: 1rem; | |
| margin: 0.5rem 0; | |
| background: #f9f9f9; | |
| } | |
| .stat-box { | |
| display: inline-block; | |
| background: #667eea; | |
| color: white; | |
| padding: 0.5rem 1rem; | |
| border-radius: 5px; | |
| margin: 0.25rem; | |
| } | |
| .feature-badge { | |
| background: #10b981; | |
| color: white; | |
| padding: 0.25rem 0.75rem; | |
| border-radius: 15px; | |
| font-size: 0.875rem; | |
| display: inline-block; | |
| margin: 0.25rem; | |
| } | |
| .footer-link { | |
| text-align: center; | |
| padding: 1rem; | |
| font-size: 0.9rem; | |
| color: #666; | |
| } | |
| .footer-link a { | |
| color: #667eea; | |
| text-decoration: none; | |
| font-weight: bold; | |
| } | |
| .footer-link a:hover { | |
| text-decoration: underline; | |
| } | |
| #chatbot-container { | |
| height: 600px; | |
| } | |
| .premium-badge { | |
| background: gold; | |
| color: black; | |
| padding: 0.25rem 0.5rem; | |
| border-radius: 5px; | |
| font-weight: bold; | |
| } | |
| """ | |
| # Build Gradio Interface | |
| def build_ui(): | |
| with gr.Blocks(css=custom_css, theme=gr.themes.Soft(), title="AI Chatbot Hub - 100k+ Models") as demo: | |
| # Session state | |
| session_user_id = gr.State(None) | |
| session_username = gr.State(None) | |
| # Header | |
| gr.HTML(""" | |
| <div class="main-header"> | |
| <h1>🤖 AI Chatbot Hub</h1> | |
| <p style="font-size: 1.2rem; margin-top: 0.5rem;">Chat with 100,000+ AI Models - All Free!</p> | |
| <div style="margin-top: 1rem;"> | |
| <span class="feature-badge">✨ Free Forever</span> | |
| <span class="feature-badge">🚀 100k+ Models</span> | |
| <span class="feature-badge">💬 Unlimited Chats</span> | |
| <span class="feature-badge">📱 Mobile Ready</span> | |
| <span class="feature-badge">🔐 Secure Auth</span> | |
| <span class="feature-badge">💾 Chat History</span> | |
| <span class="feature-badge">⚙️ Full Customization</span> | |
| </div> | |
| </div> | |
| """) | |
| with gr.Tabs() as main_tabs: | |
| # Login/Signup Tab | |
| with gr.Tab("🔐 Login / Sign Up", id=0): | |
| with gr.Row(): | |
| with gr.Column(scale=1): | |
| gr.Markdown("### 🔑 Login to Your Account") | |
| login_username = gr.Textbox(label="Username", placeholder="Enter your username") | |
| login_password = gr.Textbox(label="Password", type="password", placeholder="Enter your password") | |
| login_btn = gr.Button("🚀 Login", variant="primary", size="lg") | |
| login_status = gr.Textbox(label="Status", interactive=False) | |
| with gr.Column(scale=1): | |
| gr.Markdown("### ✨ Create New Account") | |
| signup_username = gr.Textbox(label="Username", placeholder="Choose a username") | |
| signup_email = gr.Textbox(label="Email (Optional)", placeholder="your@email.com") | |
| signup_password = gr.Textbox(label="Password", type="password", placeholder="Create a password") | |
| signup_confirm = gr.Textbox(label="Confirm Password", type="password", placeholder="Confirm your password") | |
| signup_btn = gr.Button("📝 Sign Up", variant="primary", size="lg") | |
| signup_status = gr.Textbox(label="Status", interactive=False) | |
| gr.Markdown(""" | |
| ### ✨ Features You'll Get: | |
| - 🆓 **100% Free** - No hidden costs, no credit card required | |
| - 🤖 **100,000+ AI Models** - Access to all Hugging Face text generation models | |
| - 💬 **Unlimited Conversations** - Chat as much as you want | |
| - 💾 **Chat History** - All your conversations saved automatically | |
| - ⭐ **Favorite Models** - Save your preferred models for quick access | |
| - ⚙️ **Advanced Settings** - Customize temperature, tokens, and more | |
| - 📱 **Mobile Optimized** - Works perfectly on all devices | |
| - 🔒 **Private & Secure** - Your data is encrypted and safe | |
| """) | |
| # Chat Tab | |
| with gr.Tab("💬 Chat", id=1): | |
| with gr.Row(): | |
| with gr.Column(scale=3): | |
| user_display = gr.Markdown("### 👤 Guest User (Please login)") | |
| chatbot = gr.Chatbot( | |
| type="messages", | |
| height=600, | |
| label="Chat Window", | |
| show_copy_button=True, | |
| avatar_images=(None, "🤖"), | |
| bubble_full_width=False | |
| ) | |
| with gr.Row(): | |
| msg = gr.Textbox( | |
| placeholder="Type your message here... (Press Enter to send)", | |
| show_label=False, | |
| scale=4, | |
| container=False | |
| ) | |
| send_btn = gr.Button("📤 Send", variant="primary", scale=1) | |
| with gr.Row(): | |
| clear_btn = gr.Button("🗑️ Clear Chat", size="sm") | |
| retry_btn = gr.Button("🔄 Retry", size="sm") | |
| stop_btn = gr.Button("⏹️ Stop", size="sm") | |
| with gr.Column(scale=1): | |
| gr.Markdown("### 🎯 Model Selection") | |
| model_search = gr.Textbox( | |
| label="🔍 Search Models", | |
| placeholder="Search by name, organization...", | |
| interactive=True | |
| ) | |
| model_category = gr.Dropdown( | |
| choices=["All Models", "Popular", "Llama", "Mistral", "Phi", "Gemma", "Qwen", "Falcon"], | |
| value="Popular", | |
| label="Category", | |
| interactive=True | |
| ) | |
| selected_model = gr.Dropdown( | |
| choices=POPULAR_MODELS, | |
| value=POPULAR_MODELS[0], | |
| label="🤖 Select AI Model", | |
| interactive=True, | |
| filterable=True | |
| ) | |
| add_favorite = gr.Button("⭐ Add to Favorites", size="sm") | |
| gr.Markdown("### ⚙️ Generation Settings") | |
| system_prompt = gr.Textbox( | |
| label="System Prompt", | |
| placeholder="You are a helpful AI assistant...", | |
| lines=3, | |
| value="You are a helpful, respectful and honest AI assistant." | |
| ) | |
| max_tokens = gr.Slider( | |
| minimum=50, | |
| maximum=2048, | |
| value=512, | |
| step=50, | |
| label="Max Tokens", | |
| info="Maximum length of response" | |
| ) | |
| temperature = gr.Slider( | |
| minimum=0.1, | |
| maximum=2.0, | |
| value=0.7, | |
| step=0.1, | |
| label="Temperature", | |
| info="Creativity level (higher = more creative)" | |
| ) | |
| top_p = gr.Slider( | |
| minimum=0.1, | |
| maximum=1.0, | |
| value=0.9, | |
| step=0.05, | |
| label="Top P", | |
| info="Nucleus sampling threshold" | |
| ) | |
| repetition_penalty = gr.Slider( | |
| minimum=1.0, | |
| maximum=2.0, | |
| value=1.0, | |
| step=0.1, | |
| label="Repetition Penalty", | |
| info="Reduce repetitive text" | |
| ) | |
| with gr.Accordion("📊 Model Info", open=False): | |
| model_info = gr.Markdown("Select a model to see details") | |
| logout_btn = gr.Button("🚪 Logout", variant="stop", size="sm") | |
| # History Tab | |
| with gr.Tab("📜 Chat History", id=2): | |
| gr.Markdown("### 💾 Your Conversation History") | |
| history_search = gr.Textbox( | |
| label="🔍 Search History", | |
| placeholder="Search in your chat history..." | |
| ) | |
| with gr.Row(): | |
| history_model_filter = gr.Dropdown( | |
| choices=["All Models"], | |
| value="All Models", | |
| label="Filter by Model", | |
| interactive=True | |
| ) | |
| history_limit = gr.Slider( | |
| minimum=10, | |
| maximum=100, | |
| value=50, | |
| step=10, | |
| label="Number of Messages", | |
| interactive=True | |
| ) | |
| load_history_btn = gr.Button("📥 Load History", variant="primary") | |
| history_display = gr.JSON(label="Chat History") | |
| clear_history_btn = gr.Button("🗑️ Clear All History", variant="stop") | |
| # Favorites Tab | |
| with gr.Tab("⭐ Favorite Models", id=3): | |
| gr.Markdown("### ⭐ Your Favorite AI Models") | |
| favorites_list = gr.Dropdown( | |
| choices=[], | |
| label="Saved Favorites", | |
| interactive=True, | |
| multiselect=False | |
| ) | |
| with gr.Row(): | |
| load_favorite_btn = gr.Button("📂 Load Model", variant="primary") | |
| remove_favorite_btn = gr.Button("❌ Remove", variant="stop") | |
| favorites_display = gr.Markdown("*No favorites yet. Add some from the Chat tab!*") | |
| # Settings Tab | |
| with gr.Tab("⚙️ Settings", id=4): | |
| gr.Markdown("### ⚙️ User Settings & Preferences") | |
| with gr.Row(): | |
| with gr.Column(): | |
| gr.Markdown("#### 👤 Account Information") | |
| settings_username = gr.Textbox(label="Username", interactive=False) | |
| settings_email = gr.Textbox(label="Email", interactive=False) | |
| settings_created = gr.Textbox(label="Account Created", interactive=False) | |
| settings_premium = gr.Textbox(label="Account Type", interactive=False) | |
| with gr.Column(): | |
| gr.Markdown("#### 🎨 Preferences") | |
| default_model_setting = gr.Dropdown( | |
| choices=POPULAR_MODELS, | |
| label="Default Model", | |
| interactive=True | |
| ) | |
| theme_setting = gr.Radio( | |
| choices=["Light", "Dark", "Auto"], | |
| value="Light", | |
| label="Theme Preference", | |
| interactive=True | |
| ) | |
| save_settings_btn = gr.Button("💾 Save Settings", variant="primary") | |
| settings_status = gr.Textbox(label="Status", interactive=False) | |
| with gr.Accordion("📊 Usage Statistics", open=False): | |
| stats_display = gr.Markdown("*Login to see your statistics*") | |
| with gr.Accordion("❓ Help & FAQ", open=False): | |
| gr.Markdown(""" | |
| ### Frequently Asked Questions | |
| **Q: Is this really free?** | |
| A: Yes! 100% free forever. No credit card, no hidden fees. | |
| **Q: How many models can I use?** | |
| A: You have access to 100,000+ text generation models from Hugging Face. | |
| **Q: Are my chats saved?** | |
| A: Yes, all your conversations are saved in your account. | |
| **Q: Can I use this on mobile?** | |
| A: Absolutely! This app is fully responsive and works on all devices. | |
| **Q: What models are available?** | |
| A: All Hugging Face text-generation models including Llama, Mistral, Phi, Gemma, Qwen, and thousands more! | |
| **Q: How do I change model settings?** | |
| A: Use the sliders in the Chat tab to adjust temperature, tokens, and other parameters. | |
| """) | |
| # About Tab | |
| with gr.Tab("ℹ️ About", id=5): | |
| gr.Markdown(""" | |
| # 🤖 AI Chatbot Hub | |
| ## Welcome to the Ultimate AI Chat Platform! | |
| ### 🌟 What is this? | |
| AI Chatbot Hub is a comprehensive platform that gives you **FREE** access to over **100,000 AI language models** from Hugging Face. Chat with the latest and greatest AI models, all in one place! | |
| ### ✨ Key Features: | |
| #### 🆓 Completely Free | |
| - No credit card required | |
| - No hidden costs | |
| - Unlimited conversations | |
| - Access to all models | |
| #### 🤖 Massive Model Library | |
| - **100,000+** text generation models | |
| - Popular models: Llama, Mistral, Phi, Gemma, Qwen | |
| - Constantly updated with new models | |
| - Easy search and filtering | |
| #### 💬 Advanced Chat Features | |
| - Real-time streaming responses | |
| - Multi-turn conversations | |
| - Context awareness | |
| - Custom system prompts | |
| #### ⚙️ Full Customization | |
| - Adjust temperature and creativity | |
| - Control response length | |
| - Fine-tune generation parameters | |
| - Save your preferences | |
| #### 💾 Smart Management | |
| - Automatic chat history | |
| - Favorite models | |
| - Search past conversations | |
| - Export chat data | |
| #### 📱 Mobile Optimized | |
| - Responsive design | |
| - Touch-friendly interface | |
| - Works on all devices | |
| - Progressive Web App ready | |
| #### 🔐 Secure & Private | |
| - Encrypted passwords | |
| - Secure authentication | |
| - Private chat history | |
| - Your data stays yours | |
| ### 🚀 Getting Started: | |
| 1. **Create an Account** - Quick and easy signup | |
| 2. **Choose a Model** - Browse or search 100k+ models | |
| 3. **Start Chatting** - Type your message and get instant responses | |
| 4. **Customize** - Adjust settings to your preference | |
| 5. **Save Favorites** - Bookmark your favorite models | |
| ### 📊 Supported Model Types: | |
| - 🦙 **Llama** - Meta's powerful language models | |
| - 🌟 **Mistral** - Efficient and capable models | |
| - 💎 **Phi** - Microsoft's small but mighty models | |
| - 💠 **Gemma** - Google's open models | |
| - 🚀 **Qwen** - Alibaba's multilingual models | |
| - 🦅 **Falcon** - TII's open-source models | |
| - 🔥 **Mixtral** - Mixture of Experts models | |
| - ⚡ **And thousands more!** | |
| ### 🛠️ Technical Details: | |
| - Built with Gradio & Hugging Face | |
| - SQLite database for user management | |
| - Real-time inference via HF API | |
| - Responsive Material Design UI | |
| - Client-side and server-side validation | |
| ### 📝 Version: 1.0.0 | |
| ### 👨💻 Built with: Gradio, Hugging Face, Python | |
| ### 📅 Last Updated: 2024 | |
| ### 🙏 Credits: | |
| - Hugging Face for model hosting and API | |
| - Gradio for the amazing UI framework | |
| - The open-source AI community | |
| --- | |
| **Enjoy unlimited AI conversations! 🎉** | |
| """) | |
| # Footer | |
| gr.HTML(""" | |
| <div class="footer-link"> | |
| <p>Built with ❤️ using Gradio | <a href="https://huggingface.co/spaces/akhaliq/anycoder" target="_blank">Built with anycoder</a></p> | |
| <p style="margin-top: 0.5rem; font-size: 0.8rem;"> | |
| 🤖 Powered by Hugging Face | 100,000+ AI Models | Free Forever | |
| </p> | |
| </div> | |
| """) | |
| # Event Handlers | |
| # Login | |
| def handle_login(username, password): | |
| success, user_id, message = authenticate_user(username, password) | |
| if success: | |
| user_info = get_user_info(user_id) | |
| return ( | |
| gr.update(value=message), | |
| user_id, | |
| username, | |
| gr.update(selected=1), # Switch to chat tab | |
| gr.update(value=f"### 👤 Welcome, {username}!") | |
| ) | |
| return gr.update(value=message), None, None, gr.update(), gr.update() | |
| login_btn.click( | |
| handle_login, | |
| inputs=[login_username, login_password], | |
| outputs=[login_status, session_user_id, session_username, main_tabs, user_display] | |
| ) | |
| # Signup | |
| def handle_signup(username, email, password, confirm): | |
| if not username or not password: | |
| return "Please fill in all required fields!" | |
| if password != confirm: | |
| return "Passwords do not match!" | |
| if len(password) < 6: | |
| return "Password must be at least 6 characters!" | |
| success, message = create_user(username, password, email) | |
| return message | |
| signup_btn.click( | |
| handle_signup, | |
| inputs=[signup_username, signup_email, signup_password, signup_confirm], | |
| outputs=signup_status | |
| ) | |
| # Logout | |
| def handle_logout(): | |
| return ( | |
| None, | |
| None, | |
| gr.update(selected=0), | |
| gr.update(value="### 👤 Guest User (Please login)") | |
| ) | |
| logout_btn.click( | |
| handle_logout, | |
| outputs=[session_user_id, session_username, main_tabs, user_display] | |
| ) | |
| # Chat | |
| def chat_response(message, history, model, user_id, max_tok, temp, top, rep, sys_prompt): | |
| return chat_with_model(message, history, model, user_id, max_tok, temp, top, rep, sys_prompt) | |
| msg.submit( | |
| chat_response, | |
| inputs=[msg, chatbot, selected_model, session_user_id, max_tokens, temperature, top_p, repetition_penalty, system_prompt], | |
| outputs=[chatbot, msg] | |
| ) | |
| send_btn.click( | |
| chat_response, | |
| inputs=[msg, chatbot, selected_model, session_user_id, max_tokens, temperature, top_p, repetition_penalty, system_prompt], | |
| outputs=[chatbot, msg] | |
| ) | |
| clear_btn.click(lambda: [], outputs=chatbot) | |
| # Model search | |
| def search_and_update(query, category): | |
| if category == "Popular": | |
| return gr.update(choices=POPULAR_MODELS, value=POPULAR_MODELS[0]) | |
| elif category == "All Models": | |
| models = search_models(query) | |
| return gr.update(choices=models, value=models[0] if models else None) | |
| else: | |
| models = search_models(category.lower()) | |
| return gr.update(choices=models, value=models[0] if models else None) | |
| model_search.change( | |
| search_and_update, | |
| inputs=[model_search, model_category], | |
| outputs=selected_model | |
| ) | |
| model_category.change( | |
| search_and_update, | |
| inputs=[model_search, model_category], | |
| outputs=selected_model | |
| ) | |
| # Add to favorites | |
| def add_to_favorites(user_id, model): | |
| if not user_id: | |
| return "Please login first!" | |
| user_info = get_user_info(user_id) | |
| favorites = user_info.get("favorite_models", []) | |
| if model not in favorites: | |
| favorites.append(model) | |
| update_user_preferences(user_id, favorite_models=favorites) | |
| return f"✅ {model} added to favorites!" | |
| return "ℹ️ Already in favorites!" | |
| add_favorite.click( | |
| add_to_favorites, | |
| inputs=[session_user_id, selected_model], | |
| outputs=gr.Textbox(label="Status", visible=False) | |
| ) | |
| # Load history | |
| def display_history(user_id, limit): | |
| if not user_id: | |
| return {"message": "Please login to view history"} | |
| history = load_chat_history(user_id, limit) | |
| return history | |
| load_history_btn.click( | |
| display_history, | |
| inputs=[session_user_id, history_limit], | |
| outputs=history_display | |
| ) | |
| # Load settings | |
| def load_settings(user_id): | |
| if not user_id: | |
| return ( | |
| "Guest", | |
| "N/A", | |
| "N/A", | |
| "Free", | |
| gr.update(value="Please login first!") | |
| ) | |
| user_info = get_user_info(user_id) | |
| return ( | |
| user_info["username"], | |
| user_info["email"] or "Not provided", | |
| user_info["created_at"], | |
| "Premium ⭐" if user_info["is_premium"] else "Free", | |
| gr.update(value="") | |
| ) | |
| demo.load( | |
| load_settings, | |
| inputs=session_user_id, | |
| outputs=[settings_username, settings_email, settings_created, settings_premium, settings_status] | |
| ) | |
| return demo | |
| # Launch app | |
| if __name__ == "__main__": | |
| demo = build_ui() | |
| demo.launch( | |
| server_name="0.0.0.0", | |
| server_port=7860, | |
| share=False, | |
| show_api=False, | |
| enable_monitoring=False | |
| ) |