Dmang69 commited on
Commit
eb8f44f
·
verified ·
1 Parent(s): 726975b

Spaces Application File

Browse files

# app.py - Main Application for Hugging Face Spaces
import gradio as gr
import torch
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
import re
import time

# Initialize model (using a smaller, faster model for Spaces)
MODEL_NAME = "codellama/CodeLlama-7b-hf"

def load_model():
"""Load the model with appropriate optimizations for Spaces"""
try:
tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
model = AutoModelForCausalLM.from_pretrained(
MODEL_NAME,
torch_dtype=torch.float16,
low_cpu_mem_usage=True,
load_in_8bit=True,
device_map="auto"
)

pipe = pipeline(
"text-generation",
model=model,
tokenizer=tokenizer,
max_new_tokens=512,
temperature=0.2,
do_sample=True,
top_p=0.95,
repetition_penalty=1.15
)
return pipe
except Exception as e:
print(f"Error loading model: {e}")
return None

# Global model loading
pipe = load_model()

def extract_code(text):
"""Extract code from markdown-style code blocks"""
pattern = r"```(?:\w+)?\s*(.*?)```"
matches = re.findall(pattern, text, re.DOTALL)
return matches[0] if matches else text.strip()

def generate_code(task, language="python", temperature=0.2):
"""Generate code based on task description"""
if not pipe:
return "Error: Model not loaded"

prompt = f"Generate {language} code for: {task}"
try:
result = pipe(prompt, max_new_tokens=512, temperature=temperature)
return extract_code(result[0]['generated_text'][len(prompt):])
except Exception as e:
return f"Error generating code: {str(e)}"

def explain_code(code):
"""Explain what the code does"""
if not pipe:
return "Error: Model not loaded"

prompt = f"Explain this {code.split()[0] if code.split() else 'code'} code concisely:\n{code}\n\nExplanation:"
try:
result = pipe(prompt, max_new_tokens=300, temperature=0.3)
return result[0]['generated_text'][len(prompt):]
except Exception as e:
return f"Error explaining code: {str(e)}"

def fix_code(code, error=""):
"""Fix code with optional error message"""
if not pipe:
return "Error: Model not loaded"

if error:
prompt = f"Fix this code. Error: {error}\nCode:\n{code}\n\nFixed code:"
else:
prompt = f"Improve this code for better efficiency and readability:\n{code}\n\nImproved code:"

try:
result = pipe(prompt, max_new_tokens=512, temperature=0.2)
return extract_code(result[0]['generated_text'][len(prompt):])
except Exception as e:
return f"Error fixing code: {str(e)}"

def chat_with_ai(message, history):
"""Chat interface for general programming questions"""
if not pipe:
return "Error: Model not loaded"

# Format conversation history
conversation = ""
for user_msg, assistant_msg in history:
conversation += f"User: {user_msg}\nAssistant: {assistant_msg}\n"

conversation += f"User: {message}\nAssistant:"

try:
result = pipe(conversation, max_new_tokens=300, temperature=0.7)
response = result[0]['generated_text'][len(conversation):]
# Extract only the first response
response = response.split("User:")[0].strip()
return response
except Exception as e:
return f"Error in chat: {str(e)}"

# Gradio Interface
with gr.Blocks(title="SynapseAI Programming Assistant") as demo:
gr.Markdown("""
# ⚡ SynapseAI Programming Assistant
Your expert AI pair programmer for code generation, explanation, and debugging.
""")

with gr.Tab("Generate Code"):
with gr.Row():
with gr.Column():
task_input = gr.Textbox(
label="What do you want to build?",
placeholder="e.g., a function to reverse a string"
)
lang_input = gr.Dropdown(
["python", "javascript", "java", "cpp", "html", "css", "sql"],
value="python",
label="Programming Language"
)
temp_slider = gr.Slider(0.1, 1.0, value=0.2, step=0.1, label="Creativity (Temperature)")
gen_btn = gr.Button("Generate Code")
with gr.Column():
code_output = gr.Code(label="Generated Code", language="python")

gen_btn.click(
generate_code,
inputs=[task_input, lang_input, temp_slider],
outputs=code_output
)

with gr.Tab("Explain Code"):
with gr.Row():
with gr.Column():
code_input = gr.Textbox(
label="Paste code to explain",
lines=10,
placeholder="Paste your code here..."
)
explain_btn = gr.Button("Explain Code")
with gr.Column():
explain_output = gr.Textbox(label="Explanation", lines=10)

explain_btn.click(
explain_code,
inputs=[code_input],
outputs=explain_output
)

with gr.Tab("Fix/Improve Code"):
with gr.Row():
with gr.Column():
broken_code = gr.Textbox(
label="Paste code to fix/improve",
lines=10,
placeholder="Paste your code here..."
)
error_msg = gr.Textbox(
label="Error message (optional)",
placeholder="e.g., SyntaxError: invalid syntax"
)
fix_btn = gr.Button("Fix/Improve Code")
with gr.Column():
fixed_output = gr.Code(label="Fixed/Improved Code")

fix_btn.click(
fix_code,
inputs=[broken_code, error_msg],
outputs=fixed_output
)

with gr.Tab("Chat"):
chatbot = gr.Chatbot(label="Programming Assistant")
msg = gr.Textbox(label="Ask anything about programming...")
clear = gr.Button("Clear Chat")

msg.submit(chat_with_ai, [msg, chatbot], [msg, chatbot])
clear.click(lambda: None, None, chatbot, queue=False)

# For Hugging Face Spaces
if __name__ == "__main__":
demo.launch()

Hugging Face Spaces Application File ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ # app.yaml - Hugging Face Spaces Configuration
2
+ title: SynapseAI Programming Assistant
3
+ emoji: ⚡
4
+ colorFrom: blue
5
+ colorTo: purple
6
+ sdk: gradio
7
+ sdk_version: 4.19.2
8
+ app_file: app.py
9
+ pinned: false
10
+ license: apache-2.0