Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| import os | |
| import requests | |
| from dotenv import load_dotenv | |
| import nltk | |
| from nltk.tokenize import sent_tokenize | |
| import pandas as pd | |
| # Initialize NLTK | |
| nltk.download('punkt', quiet=True) | |
| # Load environment variables | |
| load_dotenv() | |
| BLACKBOX_API_KEY = os.getenv("BLACKBOX_API_KEY") | |
| if not BLACKBOX_API_KEY: | |
| BLACKBOX_API_KEY = os.environ.get('BLACKBOX_API_KEY') | |
| class CodeCopilot: | |
| def __init__(self): | |
| self.chat_history = [] | |
| self.context_window = 3 | |
| def get_blackbox_response(self, prompt, max_tokens=300, temperature=0.7): | |
| """Get response using Blackbox's API""" | |
| headers = { | |
| "Content-Type": "application/json", | |
| "Authorization": f"Bearer {BLACKBOX_API_KEY}" | |
| } | |
| try: | |
| # Correct API endpoint based on Blackbox documentation | |
| response = requests.post( | |
| "https://developer.blackbox.ai/api/v1/chat/completions", | |
| headers=headers, | |
| json={ | |
| "messages": [{"role": "user", "content": prompt}], | |
| "max_tokens": max_tokens, | |
| "temperature": temperature, | |
| "model": "blackbox-code" | |
| }, | |
| timeout=30 | |
| ) | |
| response.raise_for_status() | |
| return response.json()["choices"][0]["message"]["content"] | |
| except requests.exceptions.RequestException as e: | |
| return f"API Error: {str(e)}" | |
| except Exception as e: | |
| return f"Processing Error: {str(e)}" | |
| def analyze_code_patterns(self, text): | |
| """Analyze text for coding patterns""" | |
| sentences = sent_tokenize(text) | |
| patterns = { | |
| 'function_def': sum(1 for s in sentences if 'def ' in s), | |
| 'class_def': sum(1 for s in sentences if 'class ' in s), | |
| 'loop': sum(1 for s in sentences if any(word in s for word in ['for ', 'while ', 'loop'])), | |
| 'conditional': sum(1 for s in sentences if any(word in s for word in ['if ', 'else ', 'elif '])) | |
| } | |
| return patterns | |
| def generate_suggestions(self, patterns): | |
| """Generate suggestions based on detected patterns""" | |
| suggestions = [] | |
| if patterns['function_def'] > 3: | |
| suggestions.append("π Consider breaking down into smaller functions or using a class structure.") | |
| if patterns['loop'] > 2: | |
| suggestions.append("π You might benefit from list comprehensions or map/filter functions.") | |
| if patterns['conditional'] > 3: | |
| suggestions.append("β Complex conditionals might be simplified using polymorphism or strategy pattern.") | |
| return "\n".join(suggestions) if suggestions else "No specific suggestions at this time." | |
| def process_input(self, user_input): | |
| """Process user input and generate response""" | |
| # Analyze patterns | |
| patterns = self.analyze_code_patterns(user_input) | |
| # Create context-aware prompt | |
| context = "\nPrevious conversation:\n" + "\n".join( | |
| [f"User: {h[0]}\nAI: {h[1]}" for h in self.chat_history[-self.context_window:]]) | |
| prompt = f"""You are an expert coding assistant. Analyze this code and provide helpful suggestions: | |
| {context} | |
| New input: | |
| {user_input} | |
| """ | |
| # Get response | |
| response = self.get_blackbox_response(prompt) | |
| suggestions = self.generate_suggestions(patterns) | |
| # Update chat history | |
| self.chat_history.append((user_input, response)) | |
| return response, patterns, suggestions | |
| # Initialize copilot | |
| copilot = CodeCopilot() | |
| # Gradio interface | |
| with gr.Blocks(theme=gr.themes.Soft(), title="AI Code Copilot") as demo: | |
| gr.Markdown("""<h1 style="text-align: center">π€ AI Code Copilot</h1>""") | |
| with gr.Row(): | |
| with gr.Column(scale=3): | |
| input_text = gr.Textbox( | |
| label="Your Code or Question", | |
| placeholder="Paste your code or ask a question...", | |
| lines=7 | |
| ) | |
| submit_btn = gr.Button("Generate", variant="primary") | |
| with gr.Column(scale=7): | |
| with gr.Tab("Assistant Response"): | |
| output_text = gr.Markdown() | |
| with gr.Tab("Suggestions"): | |
| suggestions = gr.Markdown() | |
| with gr.Tab("Pattern Analysis"): | |
| pattern_display = gr.Dataframe( | |
| headers=["Pattern", "Count"], | |
| datatype=["str", "number"], | |
| interactive=False | |
| ) | |
| def process_input(user_input): | |
| response, patterns, sugg = copilot.process_input(user_input) | |
| pattern_df = pd.DataFrame({ | |
| "Pattern": list(patterns.keys()), | |
| "Count": list(patterns.values()) | |
| }) | |
| return response, sugg, pattern_df | |
| submit_btn.click( | |
| fn=process_input, | |
| inputs=input_text, | |
| outputs=[output_text, suggestions, pattern_display] | |
| ) | |
| input_text.submit( | |
| fn=process_input, | |
| inputs=input_text, | |
| outputs=[output_text, suggestions, pattern_display] | |
| ) | |
| if __name__ == "__main__": | |
| demo.launch() | |