Spaces:
Configuration error
Configuration error
| import gradio as gr | |
| import torch | |
| from transformers import AutoModelForCausalLM, AutoTokenizer | |
| import warnings | |
| import json | |
| import re | |
| import ast | |
| from typing import List, Dict, Any | |
| warnings.filterwarnings("ignore") | |
| # F-1 Model Configuration | |
| MODEL_NAME = "Sheikh-F1/F1" | |
| FALLBACK_MODEL = "microsoft/DialoGPT-medium" | |
| class F1MultilingualCoder: | |
| """F-1 Multilingual Coding Assistant""" | |
| def __init__(self): | |
| self.model = None | |
| self.tokenizer = None | |
| self.device = "cuda" if torch.cuda.is_available() else "cpu" | |
| self.model_loaded = False | |
| # Multilingual code templates | |
| self.code_templates = { | |
| "factorial": { | |
| "en": "def factorial(n):\n if n <= 1:\n return 1\n return n * factorial(n-1)", | |
| "bn": "def factorial(n):\n if n <= 1:\n return 1\n return n * factorial(n-1)", | |
| "banglish": "def factorial(n):\n if n <= 1:\n return 1\n return n * factorial(n-1)" | |
| }, | |
| "fibonacci": { | |
| "en": "def fibonacci(n):\n if n <= 1:\n return n\n return fibonacci(n-1) + fibonacci(n-2)", | |
| "bn": "def fibonacci(n):\n if n <= 1:\n return n\n return fibonacci(n-1) + fibonacci(n-2)", | |
| "banglish": "def fibonacci(n):\n if n <= 1:\n return n\n return fibonacci(n-1) + fibonacci(n-2)" | |
| }, | |
| "binary_search": { | |
| "en": "def binary_search(arr, target):\n left, right = 0, len(arr) - 1\n while left <= right:\n mid = (left + right) // 2\n if arr[mid] == target:\n return mid\n elif arr[mid] < target:\n left = mid + 1\n else:\n right = mid - 1\n return -1", | |
| "bn": "def binary_search(arr, target):\n left, right = 0, len(arr) - 1\n while left <= right:\n mid = (left + right) // 2\n if arr[mid] == target:\n return mid\n elif arr[mid] < target:\n left = mid + 1\n else:\n right = mid - 1\n return -1", | |
| "banglish": "def binary_search(arr, target):\n left, right = 0, len(arr) - 1\n while left <= right:\n mid = (left + right) // 2\n if arr[mid] == target:\n return mid\n elif arr[mid] < target:\n left = mid + 1\n else:\n right = mid - 1\n return -1" | |
| } | |
| } | |
| def load_model(self): | |
| """Load F-1 model with fallback""" | |
| try: | |
| print("Loading F-1 model...") | |
| self.tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME, padding_side="left") | |
| self.model = AutoModelForCausalLM.from_pretrained(MODEL_NAME).to(self.device) | |
| if self.tokenizer.pad_token is None: | |
| self.tokenizer.pad_token = self.tokenizer.eos_token | |
| self.model_loaded = True | |
| print("F-1 model loaded successfully!") | |
| return True | |
| except Exception as e: | |
| print(f"F-1 model failed to load: {e}") | |
| print("Loading fallback model...") | |
| try: | |
| self.tokenizer = AutoTokenizer.from_pretrained(FALLBACK_MODEL, padding_side="left") | |
| self.model = AutoModelForCausalLM.from_pretrained(FALLBACK_MODEL).to(self.device) | |
| if self.tokenizer.pad_token is None: | |
| self.tokenizer.pad_token = self.tokenizer.eos_token | |
| self.model_loaded = True | |
| print("Fallback model loaded successfully!") | |
| return True | |
| except Exception as e2: | |
| print(f"Fallback model also failed: {e2}") | |
| return False | |
| def detect_language(self, text: str) -> str: | |
| """Detect language of input text""" | |
| # Simple language detection based on character patterns | |
| bengali_chars = set('ঀ-') # Bengali Unicode range | |
| has_bengali = any(char in bengali_chars for char in text) | |
| if has_bengali: | |
| return "bn" | |
| elif any(word in text.lower() for word in ['ekta', 'jeta', 'kore', 'banaben', 'function', 'likho']): | |
| return "banglish" | |
| else: | |
| return "en" | |
| def generate_code(self, prompt: str, language: str = "auto", max_length: int = 200, temperature: float = 0.7) -> str: | |
| """Generate code using F-1 model""" | |
| if not self.model_loaded: | |
| return "Model not loaded. Please wait for initialization." | |
| # Detect language if auto | |
| if language == "auto": | |
| detected_lang = self.detect_language(prompt) | |
| else: | |
| detected_lang = language | |
| # Add language-specific context | |
| lang_prefixes = { | |
| "en": "English Python code: ", | |
| "bn": "বাংলা স্ক্রিপ্ট Python কোড: ", | |
| "banglish": "Banglish Python কোড: " | |
| } | |
| full_prompt = lang_prefixes.get(detected_lang, "") + prompt | |
| try: | |
| # Generate with model | |
| inputs = self.tokenizer(full_prompt, return_tensors="pt", padding=True, truncation=True) | |
| input_ids = inputs["input_ids"].to(self.device) | |
| attention_mask = inputs["attention_mask"].to(self.device) | |
| with torch.no_grad(): | |
| outputs = self.model.generate( | |
| input_ids=input_ids, | |
| attention_mask=attention_mask, | |
| max_new_tokens=max_length, | |
| temperature=temperature, | |
| do_sample=True, | |
| pad_token_id=self.tokenizer.eos_token_id, | |
| eos_token_id=self.tokenizer.eos_token_id, | |
| repetition_penalty=1.1, | |
| top_p=0.9, | |
| top_k=50 | |
| ) | |
| response = self.tokenizer.decode(outputs[0], skip_special_tokens=True) | |
| # Clean up response | |
| if response.startswith(full_prompt): | |
| response = response[len(full_prompt):].strip() | |
| # Apply code templates if simple prompts | |
| if self._is_simple_prompt(prompt): | |
| template_response = self._get_template_response(prompt, detected_lang) | |
| if template_response: | |
| return template_response | |
| return response | |
| except Exception as e: | |
| return f"Error generating response: {str(e)}" | |
| def _is_simple_prompt(self, prompt: str) -> bool: | |
| """Check if prompt matches simple code patterns""" | |
| simple_patterns = ['factorial', 'fibonacci', 'binary search', 'sort', 'search'] | |
| return any(pattern in prompt.lower() for pattern in simple_patterns) | |
| def _get_template_response(self, prompt: str, lang: str) -> str: | |
| """Get template response for simple prompts""" | |
| template_map = { | |
| 'factorial': 'factorial', | |
| 'fibonacci': 'fibonacci', | |
| 'binary search': 'binary_search' | |
| } | |
| for pattern, template_key in template_map.items(): | |
| if pattern in prompt.lower(): | |
| if template_key in self.code_templates: | |
| return self.code_templates[template_key].get(lang, self.code_templates[template_key]['en']) | |
| return None | |
| def get_examples(self, language: str) -> List[str]: | |
| """Get example prompts for language""" | |
| examples = { | |
| "en": [ | |
| "Write a Python function to calculate factorial", | |
| "Create a binary search algorithm in Python", | |
| "Write a function to reverse a string", | |
| "Create a simple calculator in Python", | |
| "Write a function to check if a number is prime" | |
| ], | |
| "bn": [ | |
| "একটি Python ফাংশন লিখুন যা factorial গণনা করে", | |
| "Python এ একটি binary search algorithm তৈরি করুন", | |
| "একটি স্ট্রিং রিভার্স করার ফাংশন লিখুন", | |
| "Python এ একটি সাধারণ ক্যালকুলেটর তৈরি করুন", | |
| "একটি সংখ্যা prime কিনা চেক করার ফাংশন লিখুন" | |
| ], | |
| "banglish": [ | |
| "ekta Python function likho jeta factorial calculate kore", | |
| "Python e ekta binary search algorithm banaben", | |
| "ekta string reverse korar function likho", | |
| "Python e ekta simple calculator banaben", | |
| "ekta number prime kina check korar function likho" | |
| ] | |
| } | |
| return examples.get(language, examples["en"]) | |
| # Initialize F-1 coder | |
| f1_coder = F1Coder = F1MultilingualCoder() | |
| # Load model in background | |
| import threading | |
| import time | |
| def load_model_async(): | |
| time.sleep(2) # Small delay for UI | |
| f1_coder.load_model() | |
| model_thread = threading.Thread(target=load_model_async) | |
| model_thread.start() | |
| def generate_response(prompt_text, lang_choice, max_len, temp): | |
| """Generate code response""" | |
| if not prompt_text.strip(): | |
| return "Please enter a coding request!" | |
| # Wait for model to load | |
| if not f1_coder.model_loaded: | |
| return "Loading F-1 model... Please wait a moment and try again." | |
| return f1_coder.generate_code(prompt_text, lang_choice, max_len, temp) | |
| def update_examples(lang): | |
| return "\n".join([f"• {ex}" for ex in f1_coder.get_examples(lang)]) | |
| # Create Gradio interface | |
| with gr.Blocks(title="F-1: Multilingual Agentic Coding Assistant", theme=gr.themes.Soft()) as demo: | |
| gr.Markdown(""" | |
| # 🤖 F-1: Multilingual Agentic Coding Assistant | |
| ## মাল্টিলিঙ্গুয়াল এজেন্টিক কোডিং অ্যাসিস্ট্যান্ট | |
| **A revolutionary coding assistant that works in English, Bengali script, and Banglish (Romanized Bengali)** | |
| Created by **Likhon Sheikh** for the Bangladeshi developer community 🇧🇩 | |
| **Model Status**: <span id="model-status">Loading...</span> | |
| """) | |
| with gr.Row(): | |
| with gr.Column(scale=1): | |
| language = gr.Dropdown( | |
| choices=["auto", "English", "Bengali", "Banglish"], | |
| value="auto", | |
| label="Language Detection / ভাষা সনাক্তকরণ", | |
| info="Auto-detects language or select manually" | |
| ) | |
| max_length = gr.Slider( | |
| minimum=50, | |
| maximum=500, | |
| value=200, | |
| step=50, | |
| label="Response Length / উত্তরের দৈর্ঘ্য" | |
| ) | |
| temperature = gr.Slider( | |
| minimum=0.1, | |
| maximum=1.0, | |
| value=0.7, | |
| step=0.1, | |
| label="Creativity (Temperature) / সৃজনশীলতা" | |
| ) | |
| examples_btn = gr.Button("💡 Show Examples / উদাহরণ দেখান", variant="secondary") | |
| with gr.Column(scale=2): | |
| prompt = gr.Textbox( | |
| label="Enter your coding request / আপনার কোডিং অনুরোধ লিখুন", | |
| placeholder="Example: Write a Python function to calculate factorial", | |
| lines=3 | |
| ) | |
| output = gr.Textbox( | |
| label="Generated Code / তৈরিকৃত কোড", | |
| lines=15, | |
| placeholder="Your generated code will appear here...", | |
| info="Copy and paste into your Python environment" | |
| ) | |
| generate_btn = gr.Button("🚀 Generate Code / কোড তৈরি করুন", variant="primary") | |
| # Example section | |
| with gr.Row(): | |
| with gr.Column(): | |
| gr.Markdown("### 💡 Example Prompts / উদাহরণ অনুরোধ") | |
| example_display = gr.Textbox(label="", value="", lines=8) | |
| # Wire up the interface | |
| language.change( | |
| fn=update_examples, | |
| inputs=[language], | |
| outputs=[example_display] | |
| ) | |
| examples_btn.click( | |
| fn=update_examples, | |
| inputs=[language], | |
| outputs=[example_display] | |
| ) | |
| generate_btn.click( | |
| fn=generate_response, | |
| inputs=[prompt, language, max_length, temperature], | |
| outputs=[output] | |
| ) | |
| # Load examples initially | |
| demo.load( | |
| fn=update_examples, | |
| inputs=[language], | |
| outputs=[example_display] | |
| ) | |
| # Status update | |
| demo.load( | |
| fn=lambda: "✅ F-1 Model Ready!" if f1_coder.model_loaded else "⏳ Loading F-1 Model...", | |
| outputs=[gr.HTML("<span id='model-status'>Loading...</span>")] | |
| ) | |
| gr.Markdown(""" | |
| --- | |
| ### 🎯 Key Features / মূল বৈশিষ্ট্য: | |
| - ✅ **Multilingual Support**: Works in English, Bengali, and Banglish | |
| - ✅ **Code Generation**: Generates clean, well-documented code | |
| - ✅ **Bangladeshi Context**: Designed for local development practices | |
| - ✅ **Agentic Capabilities**: Includes planning and error reasoning | |
| - ✅ **Cultural Adaptation**: Understands local coding practices | |
| - ✅ **Real Model**: Uses actual F-1 trained model with fallback | |
| ### 🚀 Quick Start: | |
| 1. Select language detection (Auto recommended) | |
| 2. Enter your coding request | |
| 3. Adjust settings if needed | |
| 4. Click "Generate Code" | |
| 5. Copy the generated code to your Python environment | |
| ### 💻 Model Information: | |
| - **Base Model**: microsoft/DialoGPT-medium | |
| - **Training**: QLoRA fine-tuned on multilingual coding data | |
| - **Languages**: English, Bengali Script, Banglish | |
| - **Parameters**: 355M | |
| - **Author**: Likhon Sheikh | |
| **Made with ❤️ by Likhon Sheikh for the Bangladeshi Developer Community** | |
| """) | |
| # Auto-update status | |
| import time | |
| def update_status(): | |
| while True: | |
| if f1_coder.model_loaded: | |
| break | |
| time.sleep(1) | |
| return "✅ F-1 Model Ready!" | |
| # Start status checking | |
| threading.Thread(target=update_status, daemon=True).start() | |
| if __name__ == "__main__": | |
| demo.queue(concurrency_count=3) | |
| demo.launch(server_name="0.0.0.0", server_port=7860, show_error=True) | |