Spaces:
Sleeping
Sleeping
File size: 5,634 Bytes
dfdbcb5 f29dc1a 1f0a7ed f29dc1a 1f0a7ed bea0568 1f0a7ed dfdbcb5 f29dc1a 1f0a7ed bea0568 dfdbcb5 1f0a7ed bea0568 1f0a7ed bea0568 1f0a7ed bea0568 1f0a7ed 4ae9573 1f0a7ed bea0568 737136e bea0568 1f0a7ed bea0568 1f0a7ed bea0568 f947381 1f0a7ed bea0568 1f0a7ed f947381 1f0a7ed bea0568 1f0a7ed bea0568 1f0a7ed bea0568 1f0a7ed f29dc1a bea0568 1f0a7ed f947381 bea0568 1f0a7ed f947381 bea0568 1f0a7ed bea0568 1f0a7ed bea0568 f947381 1f0a7ed dfdbcb5 1f0a7ed 9715ab4 f947381 9715ab4 1f0a7ed 9715ab4 1f0a7ed 9715ab4 bea0568 1f0a7ed bea0568 f947381 1f0a7ed bea0568 1f0a7ed bea0568 f947381 dfdbcb5 1f0a7ed |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 |
import gradio as gr
import os
import requests
from dotenv import load_dotenv
import nltk
from nltk.tokenize import sent_tokenize
import pandas as pd
# Initialize NLTK
nltk.download('punkt', quiet=True)
# Load environment variables
load_dotenv()
BLACKBOX_API_KEY = os.getenv("BLACKBOX_API_KEY")
if not BLACKBOX_API_KEY:
BLACKBOX_API_KEY = os.environ.get('BLACKBOX_API_KEY')
class CodeCopilot:
def __init__(self):
self.chat_history = []
self.context_window = 3
def get_blackbox_response(self, prompt, max_tokens=300, temperature=0.7):
"""Get response using Blackbox's API"""
headers = {
"Content-Type": "application/json",
"Authorization": f"Bearer {BLACKBOX_API_KEY}"
}
try:
response = requests.post(
"https://api.blackbox.ai/chat/completions",
headers=headers,
json={
"messages": [{"role": "user", "content": prompt}],
"max_tokens": max_tokens,
"temperature": temperature,
"model": "blackboxai/openai/gpt-4"
},
timeout=30
)
response.raise_for_status()
return response.json()["choices"][0]["message"]["content"]
except requests.exceptions.RequestException as e:
return f"API Error: {str(e)}"
except Exception as e:
return f"Processing Error: {str(e)}"
def analyze_code_patterns(self, text):
"""Analyze text for coding patterns"""
sentences = sent_tokenize(text)
patterns = {
'function_def': sum(1 for s in sentences if 'def ' in s),
'class_def': sum(1 for s in sentences if 'class ' in s),
'loop': sum(1 for s in sentences if any(word in s for word in ['for ', 'while ', 'loop'])),
'conditional': sum(1 for s in sentences if any(word in s for word in ['if ', 'else ', 'elif ']))
}
return patterns
def generate_suggestions(self, patterns):
"""Generate suggestions based on detected patterns"""
suggestions = []
if patterns['function_def'] > 3:
suggestions.append("π Consider breaking down into smaller functions or using a class structure.")
if patterns['loop'] > 2:
suggestions.append("π You might benefit from list comprehensions or map/filter functions.")
if patterns['conditional'] > 3:
suggestions.append("β Complex conditionals might be simplified using polymorphism or strategy pattern.")
return "\n".join(suggestions) if suggestions else "No specific suggestions at this time."
def process_input(self, user_input):
"""Process user input and generate response"""
patterns = self.analyze_code_patterns(user_input)
context = "\nPrevious conversation:\n" + "\n".join(
[f"User: {h[0]}\nAI: {h[1]}" for h in self.chat_history[-self.context_window:]])
prompt = f"""You are an expert coding assistant. Analyze this code and provide helpful suggestions:
{context}
New input:
{user_input}
"""
response = self.get_blackbox_response(prompt)
suggestions = self.generate_suggestions(patterns)
self.chat_history.append((user_input, response))
return response, patterns, suggestions
# Initialize copilot
copilot = CodeCopilot()
# Gradio interface
with gr.Blocks(theme=gr.themes.Soft(), title="π€ AI Code Copilot") as demo:
# Header
gr.Markdown(
"""
<div style='text-align: center; margin-bottom: 1rem;'>
<h1>π€ AI Code Copilot</h1>
<p>Your interactive assistant for code suggestions and analysis</p>
</div>
"""
)
# Main layout: input on the left, outputs on the right
with gr.Row():
with gr.Column(scale=3, min_width=300):
input_text = gr.Textbox(
label="Enter your code or question",
placeholder="Paste code snippets or ask a coding question...",
lines=10,
interactive=True
)
submit_btn = gr.Button("π Generate", variant="primary")
with gr.Column(scale=5, min_width=500):
# Display response
gr.Markdown("**Assistant Response:**")
output_text = gr.Markdown()
# Display suggestions
gr.Markdown("**Suggestions:**")
suggestions = gr.Markdown()
# Display pattern analysis
gr.Markdown("**Pattern Analysis:**")
pattern_display = gr.Dataframe(
headers=["Pattern", "Count"],
datatype=["str", "number"],
interactive=False,
label="Detected code patterns"
)
# Processing function for both button and enter
def process_input(user_input):
response, patterns, sugg = copilot.process_input(user_input)
pattern_df = pd.DataFrame({
"Pattern": list(patterns.keys()),
"Count": list(patterns.values())
})
return response, sugg, pattern_df
submit_btn.click(
fn=process_input,
inputs=input_text,
outputs=[output_text, suggestions, pattern_display]
)
input_text.submit(
fn=process_input,
inputs=input_text,
outputs=[output_text, suggestions, pattern_display]
)
if __name__ == "__main__":
demo.launch()
|