MalikShehram's picture
Update app.py
1f0a7ed verified
raw
history blame
5.15 kB
import gradio as gr
import os
import requests
from dotenv import load_dotenv
import nltk
from nltk.tokenize import sent_tokenize
import numpy as np
import pandas as pd
from tqdm import tqdm
# Initialize NLTK (download punkt if needed)
nltk.download('punkt', quiet=True)
# Load environment variables
load_dotenv()
BLACKBOX_API_KEY = os.getenv("BLACKBOX_API_KEY")
class CodeCopilot:
def __init__(self):
self.chat_history = []
self.context_window = 5
def get_blackbox_response(self, prompt, max_tokens=300, temperature=0.7):
"""Get response from Blackbox AI API"""
headers = {
"Authorization": f"Bearer {BLACKBOX_API_KEY}",
"Content-Type": "application/json"
}
data = {
"prompt": prompt,
"max_tokens": max_tokens,
"temperature": temperature
}
try:
response = requests.post(
"https://api.blackbox.ai/generate",
headers=headers,
json=data,
timeout=30
)
response.raise_for_status()
return response.json().get("text", "No response text found.")
except Exception as e:
return f"Error: {str(e)}"
def analyze_code_patterns(self, text):
"""Analyze text for coding patterns"""
sentences = sent_tokenize(text)
# Simple pattern detection (can be expanded)
patterns = {
'function_def': sum(1 for s in sentences if 'def ' in s),
'class_def': sum(1 for s in sentences if 'class ' in s),
'loop': sum(1 for s in sentences if any(
word in s for word in ['for ', 'while ', 'loop'])),
'conditional': sum(1 for s in sentences if any(
word in s for word in ['if ', 'else ', 'elif ']))
}
return patterns
def generate_suggestions(self, patterns):
"""Generate suggestions based on detected patterns"""
suggestions = []
if patterns['function_def'] > 3:
suggestions.append("Consider breaking down into smaller functions or using a class structure.")
if patterns['loop'] > 2:
suggestions.append("You might benefit from using list comprehensions or map/filter functions.")
if patterns['conditional'] > 3:
suggestions.append("Complex conditionals might be simplified using polymorphism or strategy pattern.")
return "\n".join(suggestions) if suggestions else "No specific suggestions at this time."
def process_input(self, user_input, history=None):
"""Process user input and generate response"""
# Analyze patterns
patterns = self.analyze_code_patterns(user_input)
suggestions = self.generate_suggestions(patterns)
# Create context-aware prompt
context = "\n".join([f"User: {h[0]}\nAI: {h[1]}" for h in self.chat_history[-self.context_window:]])
prompt = f"""
Context:
{context}
User Input:
{user_input}
Respond as a helpful coding assistant that also provides suggestions for improvement.
Suggestions to consider:
{suggestions}
"""
# Get response from Blackbox AI
response = self.get_blackbox_response(prompt)
# Update chat history
self.chat_history.append((user_input, response))
# Format full response with suggestions
full_response = f"{response}\n\n=== Suggestions ===\n{suggestions}"
return full_response
# Initialize copilot
copilot = CodeCopilot()
# Gradio interface
with gr.Blocks(title="AI Code Copilot") as demo:
gr.Markdown("""
# AI Code Copilot
Your intelligent assistant for coding tasks that learns patterns and provides proactive suggestions.
""")
with gr.Row():
with gr.Column():
input_text = gr.Textbox(
label="Your code or question",
placeholder="Paste your code or ask a question...",
lines=5
)
submit_btn = gr.Button("Generate")
with gr.Column():
output_text = gr.Textbox(
label="Copilot Response",
lines=10,
interactive=False
)
with gr.Accordion("Pattern Analysis", open=False):
pattern_display = gr.Dataframe(
headers=["Pattern", "Count"],
datatype=["str", "number"],
interactive=False
)
def process_and_analyze(input_text):
response = copilot.process_input(input_text)
patterns = copilot.analyze_code_patterns(input_text)
pattern_df = pd.DataFrame({
"Pattern": list(patterns.keys()),
"Count": list(patterns.values())
})
return response, pattern_df
submit_btn.click(
fn=process_and_analyze,
inputs=input_text,
outputs=[output_text, pattern_display]
)
demo.launch()
if __name__ == "__main__":
demo.launch()