Spaces:
Sleeping
Sleeping
File size: 5,751 Bytes
dfdbcb5 f29dc1a 1f0a7ed f29dc1a 1f0a7ed 7f42aea 1f0a7ed bea0568 1f0a7ed dfdbcb5 f29dc1a 1f0a7ed bea0568 dfdbcb5 7f42aea 1f0a7ed bea0568 7f42aea 1f0a7ed bea0568 1f0a7ed 7f42aea 4ae9573 1f0a7ed bea0568 737136e bea0568 1f0a7ed 7f42aea 1f0a7ed 7f42aea 1f0a7ed 7f42aea 1330579 7f42aea 1330579 7f42aea 1330579 7f42aea 1330579 7f42aea 1330579 7f42aea 1330579 bea0568 7f42aea bea0568 7f42aea f947381 1f0a7ed dfdbcb5 7f42aea 9715ab4 7f42aea 9715ab4 f947381 9715ab4 7f42aea 9715ab4 7f42aea dfdbcb5 1f0a7ed |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 |
import gradio as gr
import os
import requests
from dotenv import load_dotenv
import nltk
from nltk.tokenize import sent_tokenize
import pandas as pd
import ast
# Initialize NLTK
nltk.download('punkt', quiet=True)
# Load environment variables
load_dotenv()
BLACKBOX_API_KEY = os.getenv("BLACKBOX_API_KEY")
if not BLACKBOX_API_KEY:
BLACKBOX_API_KEY = os.environ.get('BLACKBOX_API_KEY')
class CodeAnalyzer(ast.NodeVisitor):
def __init__(self):
self.func_count = 0
self.loop_count = 0
self.cond_count = 0
self.max_depth = 0
self.current_depth = 0
def visit_FunctionDef(self, node):
self.func_count += 1
self.generic_visit(node)
def visit_For(self, node):
self.loop_count += 1
self._enter_block(node)
def visit_While(self, node):
self.loop_count += 1
self._enter_block(node)
def visit_If(self, node):
self.cond_count += 1
self._enter_block(node)
def _enter_block(self, node):
self.current_depth += 1
self.max_depth = max(self.max_depth, self.current_depth)
self.generic_visit(node)
self.current_depth -= 1
def analyze(self, code_str):
try:
tree = ast.parse(code_str)
self.visit(tree)
except SyntaxError:
pass
return {
'function_def': self.func_count,
'loop': self.loop_count,
'conditional': self.cond_count,
'max_depth': self.max_depth
}
class CodeCopilot:
def __init__(self):
self.chat_history = []
self.context_window = 3
def get_blackbox_response(self, prompt, max_tokens=300, temperature=0.7):
headers = {
"Content-Type": "application/json",
"Authorization": f"Bearer {BLACKBOX_API_KEY}"
}
try:
resp = requests.post(
"https://api.blackbox.ai/chat/completions",
headers=headers,
json={
"messages": [{"role": "user", "content": prompt}],
"max_tokens": max_tokens,
"temperature": temperature,
"model": "blackboxai/openai/gpt-4"
},
timeout=30
)
resp.raise_for_status()
return resp.json()["choices"][0]["message"]["content"]
except Exception as e:
return f"API Error: {e}"
def generate_suggestions(self, analysis):
suggestions = []
# Functions
if analysis['function_def'] == 0:
suggestions.append("π Consider defining functions to organize your code and improve reuse.")
elif analysis['function_def'] > 3:
suggestions.append(f"π Detected {analysis['function_def']} functions β consider grouping related functions into classes or modules.")
# Loops
if analysis['loop'] >= 1:
suggestions.append(f"π {analysis['loop']} loop(s) found β check if list comprehensions or vectorized operations can simplify them.")
# Conditionals
if analysis['conditional'] >= 2:
suggestions.append(f"β {analysis['conditional']} conditional statements β consider simplifying nested logic or using lookup tables.")
# Nesting depth
if analysis['max_depth'] > 2:
suggestions.append(f"π¦ Maximum nesting depth of {analysis['max_depth']} detected β flatten nested blocks for readability.")
# Default
if not suggestions:
suggestions.append("β
Code structure looks clean based on basic analysis.")
return "\n".join(suggestions)
def process_input(self, user_input):
# AST analysis
analyzer = CodeAnalyzer()
analysis = analyzer.analyze(user_input)
# Build context prompt
context = "\nPrevious conversation:\n" + "\n".join(
[f"User: {h[0]}\nAI: {h[1]}" for h in self.chat_history[-self.context_window:]]
)
prompt = f"You are an expert coding assistant. Analyze this code and provide improvements.\n{context}\nNew input:\n{user_input}"
# AI response
ai_resp = self.get_blackbox_response(prompt)
# Suggestions
sugg = self.generate_suggestions(analysis)
self.chat_history.append((user_input, ai_resp))
return ai_resp, analysis, sugg
# Initialize copilot
copilot = CodeCopilot()
# Build Gradio UI
with gr.Blocks(theme=gr.themes.Soft(), title="π€ AI Code Copilot") as demo:
gr.Markdown("""
<div style='text-align: center; margin-bottom: 1rem;'>
<h1>π€ AI Code Copilot</h1>
<p>Paste code or ask a question below to get instant analysis.</p>
</div>
"""
)
with gr.Row():
with gr.Column(scale=3, min_width=300):
inp = gr.Textbox(label="Your Code / Question", lines=10, placeholder="Enter code here...")
btn = gr.Button("π Generate")
with gr.Column(scale=6, min_width=500):
gr.Markdown("**Assistant Response**")
out = gr.Markdown()
gr.Markdown("**Pattern Analysis**")
df = gr.Dataframe(headers=["Metric","Count"], datatype=["str","number"], interactive=False)
gr.Markdown("**Suggestions**")
sug = gr.Markdown()
def run_all(text):
ai_text, analysis, suggestions = copilot.process_input(text)
df_data = {"Metric": list(analysis.keys()), "Count": list(analysis.values())}
return ai_text, pd.DataFrame(df_data), suggestions
btn.click(fn=run_all, inputs=inp, outputs=[out, df, sug])
inp.submit(fn=run_all, inputs=inp, outputs=[out, df, sug])
if __name__ == "__main__":
demo.launch()
|