CodeMagic / app.py
chmawia's picture
Update app.py
19c7c87 verified
raw
history blame
1.98 kB
import gradio as gr
from transformers import AutoModelForSeq2SeqLM, AutoTokenizer
import torch
import subprocess
# Load a code-generation model with support for multiple languages
MODEL_NAME = "Salesforce/codet5-small"
tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
model = AutoModelForSeq2SeqLM.from_pretrained(MODEL_NAME)
def generate_code(description, language):
prompt = f"Generate {language} code: {description}"
inputs = tokenizer(prompt, return_tensors="pt", padding=True, truncation=True)
outputs = model.generate(**inputs, max_length=400)
response = tokenizer.decode(outputs[0], skip_special_tokens=True)
return response.strip()
def execute_code(code, language):
if language == "Python":
try:
result = subprocess.run(['python3', '-c', code], capture_output=True, text=True, timeout=5)
return result.stdout if result.stdout else result.stderr
except Exception as e:
return str(e)
return "Code execution only supported for Python."
def generate_and_execute(description, language):
code = generate_code(description, language)
output = execute_code(code, language) if language == "Python" else "Execution not supported for this language."
return code, output
# Create a Gradio interface with language selection and execution
iface = gr.Interface(
fn=generate_and_execute,
inputs=[
gr.Textbox(lines=5, placeholder="Describe your coding task..."),
gr.Dropdown(choices=["Python", "JavaScript", "Java"], label="Programming Language")
],
outputs=[gr.Code(label="Generated Code"), gr.Textbox(label="Execution Output")],
title="Multi-Language Text-to-Code AI",
description="Convert natural language descriptions into code in different programming languages! Run Python code directly in the app.",
theme="default",
allow_flagging="never",
live=True
)
# Launch the app
if __name__ == "__main__":
iface.launch(share=True)