Spaces:
Sleeping
Sleeping
| from transformers import AutoTokenizer, AutoModelForCausalLM | |
| import gradio as gr | |
| tokenizer = AutoTokenizer.from_pretrained("Salesforce/codegen-350M-mono") | |
| model = AutoModelForCausalLM.from_pretrained("Salesforce/codegen-350M-mono") | |
| # text = "create a function recieve two arguments that return sum as a result" | |
| example_text = [ 'create a function to calculate the n!', "create a function recieve two arguments that return sum as a result"] | |
| def get_code(prompt): | |
| input_ids = tokenizer(prompt, return_tensors="pt").input_ids | |
| generated_ids = model.generate(input_ids, max_length=128) | |
| return tokenizer.decode(generated_ids[0], skip_special_tokens=True) | |
| demo = gr.Blocks() | |
| with demo: | |
| gr.Markdown( | |
| "## This Demo will generate python code only upto 128 tokens " | |
| ) | |
| with gr.Row(): | |
| inputs = gr.Textbox(label='Prompt for generating code', lines=5) | |
| outputs = gr.Textbox(label='Python Code', lines=10) | |
| b1 = gr.Button('Generate Code') | |
| gr.Examples(examples=example_text, inputs= inputs, outputs= outputs) | |
| b1.click(fn = get_code,inputs= inputs, outputs= outputs ) | |
| demo.launch() |