raj-vir-singh's picture
Upload 3 files
3d69e25
raw
history blame
1.24 kB
import gradio as gr
import os
import torch
from transformers import RobertaTokenizer, T5ForConditionalGeneration
access_token = os.environ.get("access_token") or True
model_name = "ThoughtFocusAI/CodeGeneration-CodeT5-small"
device = "cuda" if torch.cuda.is_available() else "cpu"
model = T5ForConditionalGeneration.from_pretrained(
model_name, use_auth_token=access_token).to(device)
tokenizer = RobertaTokenizer.from_pretrained(
model_name, use_auth_token=access_token)
def generate_code(user_input):
query = "Generate Python: " + user_input
encoded_text = tokenizer(query, return_tensors='pt', padding='max_length',
truncation=True, max_length=512).input_ids.to(device)
# inference
generated_code = model.generate(encoded_text, max_length=512)
# decode generated tokens
decoded_code = tokenizer.decode(
generated_code.numpy()[0], skip_special_tokens=True)
return decoded_code
interface = gr.Interface(fn=generate_code,
inputs=gr.inputs.Textbox(
lines=3, label="Enter Text", placeholder="Ex-Add two numbers"),
outputs=gr.outputs.Textbox(label="Generated Code"))
interface.launch()