Not-Grim-Refer's picture
Update app.py
52c1104
raw
history blame contribute delete
939 Bytes
import gradio as gr
import torch
from transformers import AutoTokenizer, AutoModelForCausalLM
tokenizer = AutoTokenizer.from_pretrained("microsoft/CodeGPT-small-py")
model = AutoModelForCausalLM.from_pretrained("microsoft/CodeGPT-small-py")
def generate_code(description, temperature, top_k):
input_ids = tokenizer.encode(description, return_tensors="pt")
output_ids = model.generate(input_ids, max_length=100, temperature=temperature, top_k=top_k) # added temperature and top_k parameters
output = tokenizer.decode(output_ids[0], skip_special_tokens=True)
return output
iface = gr.Interface(fn=generate_code,
inputs=["text", gr.inputs.Slider(0.1, 1.0, label="Temperature"), gr.inputs.Slider(1, 50, label="Top K")], # added sliders for temperature and top_k
outputs=["text", gr.outputs.CopyButton(label="Copy")], # added copy button
server_port=8000)
iface.launch()