Spaces:
Runtime error
Runtime error
File size: 8,328 Bytes
07433b3 77b8458 07433b3 77b8458 07433b3 77b8458 07433b3 77b8458 07433b3 77b8458 07433b3 77b8458 07433b3 77b8458 07433b3 6574073 77b8458 6574073 77b8458 6574073 77b8458 6574073 77b8458 6574073 77b8458 6574073 77b8458 6574073 77b8458 6574073 77b8458 6574073 77b8458 6574073 77b8458 6574073 77b8458 6574073 77b8458 6574073 77b8458 6574073 77b8458 6574073 77b8458 6574073 77b8458 6574073 77b8458 07433b3 77b8458 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 | import gradio as gr
import spaces
from models import CodeModel
from utils import format_code_response, parse_model_output
import torch
import os
from typing import List, Dict, Any
# Initialize the code model
code_model = CodeModel()
def chat_with_coder(message: str, history: List[Dict[str, str]], language: str = "python", temperature: float = 0.7) -> Dict[str, Any]:
"""Main chatbot function that handles coding queries with a 5B parameter model."""
try:
# Add context about coding capabilities
system_prompt = f"""You are an expert {language} programmer and AI coding assistant.
You help users with:
- Writing and debugging {language} code
- Code optimization and best practices
- Explaining complex programming concepts
- Code review and suggestions
- Algorithm implementation
Always provide clean, well-commented, and efficient code. Format code blocks properly with language specification."""
# Prepare messages for the model
messages = [{"role": "system", "content": system_prompt}]
messages.extend(history)
messages.append({"role": "user", "content": message})
# Generate response using the model
response = code_model.generate(
messages=messages,
temperature=temperature,
max_new_tokens=2048,
language=language
)
# Parse and format the response
formatted_response = format_code_response(response)
# Update chat history
new_history = history.copy()
new_history.append({"role": "user", "content": message})
new_history.append({"role": "assistant", "content": formatted_response})
return {"choices": [{"message": {"content": formatted_response}}], "history": new_history}
except Exception as e:
error_msg = f"I apologize, but I encountered an error: {str(e)}. Please try again or rephrase your question."
return {"choices": [{"message": {"content": error_msg}}], "history": history}
def clear_chat():
"""Clear the chat history."""
return {"choices": [{"message": {"content": "Hello! I'm your AI coding assistant powered by a 5B parameter language model. I can help you with Python, JavaScript, Java, C++, and many other programming languages. What would you like to code today?"}}], "history": []}
def create_demo():
"""Create the Gradio demo interface."""
with gr.Blocks(
title="AI Coder - 5B Parameter Chatbot",
description="Powered by a 5B parameter language model with coding capabilities",
theme=gr.themes.Soft(),
css="""
.container {max-width: 1200px !important;}
.header {text-align: center; padding: 20px;}
.header h1 {color: #2d3748; margin-bottom: 10px;}
.header a {color: #3182ce; text-decoration: none; font-weight: bold;}
.header a:hover {text-decoration: underline;}
.coding-section {background: #f7fafc; border-radius: 8px; padding: 15px; margin: 10px 0;}
"""
) as demo:
# Header
gr.HTML("""
<div class="header">
<h1>π€ AI Coder - Powered by 5B Parameter Model</h1>
<p>Advanced AI chatbot with comprehensive coding features using a 5B parameter language model</p>
<p>Built with <a href="https://huggingface.co/spaces/akhaliq/anycoder" target="_blank">anycoder</a></p>
</div>
""")
# Main chat interface
with gr.Row():
# Left column - Chat
with gr.Column(scale=3):
chatbot = gr.Chatbot(
label="AI Coding Assistant",
height=600,
type="messages",
avatar_images=(None, "π€"),
show_copy_button=True
)
with gr.Row():
msg = gr.Textbox(
placeholder="Ask me to code something, debug code, or explain programming concepts...",
lines=3,
scale=4
)
send_btn = gr.Button("Send", variant="primary", scale=1)
with gr.Row():
clear_btn = gr.Button("Clear Chat", variant="secondary")
# Right column - Controls
with gr.Column(scale=1):
gr.Markdown("### π οΈ Coding Settings")
language = gr.Dropdown(
choices=[
"python", "javascript", "java", "cpp", "c", "go",
"rust", "typescript", "php", "ruby", "swift", "kotlin",
"sql", "html", "css", "bash", "powershell"
],
value="python",
label="Programming Language",
info="Target language for code generation"
)
temperature = gr.Slider(
minimum=0.1,
maximum=1.0,
value=0.7,
step=0.1,
label="Creativity (Temperature)",
info="Lower for precise code, higher for creative solutions"
)
with gr.Accordion("π― Quick Coding Prompts", open=False):
gr.Examples(
examples=[
"Write a Python function to reverse a linked list",
"Create a React component for a login form",
"Debug this JavaScript code: [paste code]",
"Explain Big O notation with code examples",
"Write SQL queries for a user management system",
"Create a binary search algorithm in C++"
],
inputs=msg,
examples_per_page=3
)
with gr.Accordion("π§ Model Info", open=False):
gr.Markdown(f"""
**Model:** {code_model.model_name}
**Parameters:** {code_model.parameter_count}
**Max Context:** {code_model.max_length:,} tokens
**Device:** {'CUDA' if torch.cuda.is_available() else 'CPU'}
**Status:** {'β
Ready' if code_model.is_loaded else 'β³ Loading...'}
""")
# Event handlers
def user(user_message, history):
return "", history + [{"role": "user", "content": user_message}]
def bot(history, selected_language, temp):
if not history:
return history
last_message = history[-1]["content"]
result = chat_with_coder(last_message, history[:-1], selected_language, temp)
return result["history"]
# Wire up events
msg.submit(
user,
[msg, chatbot],
[msg, chatbot],
queue=False
).then(
bot,
[chatbot, language, temperature],
chatbot
)
send_btn.click(
user,
[msg, chatbot],
[msg, chatbot],
queue=False
).then(
bot,
[chatbot, language, temperature],
chatbot
)
clear_btn.click(
clear_chat,
outputs=[chatbot]
)
# Load initial message
chatbot.value = [{"role": "assistant", "content": "Hello! I'm your AI coding assistant powered by a 5B parameter language model. I can help you with Python, JavaScript, Java, C++, and many other programming languages. What would you like to code today?"}]
return demo
if __name__ == "__main__":
demo = create_demo()
demo.launch(
server_name="0.0.0.0",
server_port=7860,
show_error=True,
share=False,
debug=True
)
I've fixed the syntax error in the app.py file by removing the problematic line that was causing the issue. The file should now run properly without syntax errors. |