Spaces:
Sleeping
Sleeping
File size: 5,320 Bytes
dfdbcb5 f29dc1a 1f0a7ed f29dc1a 1f0a7ed bea0568 1f0a7ed dfdbcb5 f29dc1a 1f0a7ed bea0568 dfdbcb5 1f0a7ed bea0568 1f0a7ed bea0568 1f0a7ed bea0568 1f0a7ed bea0568 1f0a7ed bea0568 1f0a7ed bea0568 1f0a7ed bea0568 1f0a7ed bea0568 f947381 1f0a7ed bea0568 1f0a7ed f947381 1f0a7ed bea0568 1f0a7ed bea0568 1f0a7ed bea0568 1f0a7ed f29dc1a bea0568 1f0a7ed f947381 1f0a7ed bea0568 1f0a7ed f947381 bea0568 1f0a7ed bea0568 1f0a7ed bea0568 1f0a7ed bea0568 f947381 1f0a7ed dfdbcb5 1f0a7ed bea0568 1f0a7ed f947381 bea0568 1f0a7ed bea0568 1f0a7ed bea0568 1f0a7ed bea0568 1f0a7ed bea0568 f947381 bea0568 1f0a7ed bea0568 f947381 1f0a7ed bea0568 1f0a7ed bea0568 f947381 dfdbcb5 1f0a7ed |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 |
import gradio as gr
import os
import requests
from dotenv import load_dotenv
import nltk
from nltk.tokenize import sent_tokenize
import pandas as pd
# Initialize NLTK
nltk.download('punkt', quiet=True)
# Load environment variables
load_dotenv()
BLACKBOX_API_KEY = os.getenv("BLACKBOX_API_KEY")
if not BLACKBOX_API_KEY:
BLACKBOX_API_KEY = os.environ.get('BLACKBOX_API_KEY')
class CodeCopilot:
def __init__(self):
self.chat_history = []
self.context_window = 3
def get_blackbox_response(self, prompt, max_tokens=300, temperature=0.7):
"""Get response using Blackbox's API"""
headers = {
"Content-Type": "application/json",
"Authorization": f"Bearer {BLACKBOX_API_KEY}"
}
try:
# Correct API endpoint based on Blackbox documentation
response = requests.post(
"https://developer.blackbox.ai/api/v1/chat/completions",
headers=headers,
json={
"messages": [{"role": "user", "content": prompt}],
"max_tokens": max_tokens,
"temperature": temperature,
"model": "blackbox-code"
},
timeout=30
)
response.raise_for_status()
return response.json()["choices"][0]["message"]["content"]
except requests.exceptions.RequestException as e:
return f"API Error: {str(e)}"
except Exception as e:
return f"Processing Error: {str(e)}"
def analyze_code_patterns(self, text):
"""Analyze text for coding patterns"""
sentences = sent_tokenize(text)
patterns = {
'function_def': sum(1 for s in sentences if 'def ' in s),
'class_def': sum(1 for s in sentences if 'class ' in s),
'loop': sum(1 for s in sentences if any(word in s for word in ['for ', 'while ', 'loop'])),
'conditional': sum(1 for s in sentences if any(word in s for word in ['if ', 'else ', 'elif ']))
}
return patterns
def generate_suggestions(self, patterns):
"""Generate suggestions based on detected patterns"""
suggestions = []
if patterns['function_def'] > 3:
suggestions.append("π Consider breaking down into smaller functions or using a class structure.")
if patterns['loop'] > 2:
suggestions.append("π You might benefit from list comprehensions or map/filter functions.")
if patterns['conditional'] > 3:
suggestions.append("β Complex conditionals might be simplified using polymorphism or strategy pattern.")
return "\n".join(suggestions) if suggestions else "No specific suggestions at this time."
def process_input(self, user_input):
"""Process user input and generate response"""
# Analyze patterns
patterns = self.analyze_code_patterns(user_input)
# Create context-aware prompt
context = "\nPrevious conversation:\n" + "\n".join(
[f"User: {h[0]}\nAI: {h[1]}" for h in self.chat_history[-self.context_window:]])
prompt = f"""You are an expert coding assistant. Analyze this code and provide helpful suggestions:
{context}
New input:
{user_input}
"""
# Get response
response = self.get_blackbox_response(prompt)
suggestions = self.generate_suggestions(patterns)
# Update chat history
self.chat_history.append((user_input, response))
return response, patterns, suggestions
# Initialize copilot
copilot = CodeCopilot()
# Gradio interface
with gr.Blocks(theme=gr.themes.Soft(), title="AI Code Copilot") as demo:
gr.Markdown("""<h1 style="text-align: center">π€ AI Code Copilot</h1>""")
with gr.Row():
with gr.Column(scale=3):
input_text = gr.Textbox(
label="Your Code or Question",
placeholder="Paste your code or ask a question...",
lines=7
)
submit_btn = gr.Button("Generate", variant="primary")
with gr.Column(scale=7):
with gr.Tab("Assistant Response"):
output_text = gr.Markdown()
with gr.Tab("Suggestions"):
suggestions = gr.Markdown()
with gr.Tab("Pattern Analysis"):
pattern_display = gr.Dataframe(
headers=["Pattern", "Count"],
datatype=["str", "number"],
interactive=False
)
def process_input(user_input):
response, patterns, sugg = copilot.process_input(user_input)
pattern_df = pd.DataFrame({
"Pattern": list(patterns.keys()),
"Count": list(patterns.values())
})
return response, sugg, pattern_df
submit_btn.click(
fn=process_input,
inputs=input_text,
outputs=[output_text, suggestions, pattern_display]
)
input_text.submit(
fn=process_input,
inputs=input_text,
outputs=[output_text, suggestions, pattern_display]
)
if __name__ == "__main__":
demo.launch()
|