Spaces:
Running
Running
| import gradio as gr | |
| from peft import PeftModel | |
| from transformers import AutoModelForCausalLM, AutoTokenizer | |
| import torch | |
| # Load LoRA model | |
| base_model = AutoModelForCausalLM.from_pretrained("microsoft/CodeGPT-small-py") | |
| model = PeftModel.from_pretrained(base_model, "Pradnya27/codegpt-lora-code-generation") | |
| tokenizer = AutoTokenizer.from_pretrained("microsoft/CodeGPT-small-py") | |
| tokenizer.pad_token = tokenizer.eos_token | |
| model.eval() | |
| def generate_code(question): | |
| prompt = "Generate code: " + question | |
| inputs = tokenizer(prompt, return_tensors="pt") | |
| with torch.no_grad(): | |
| outputs = model.generate( | |
| inputs["input_ids"], | |
| max_new_tokens=200, | |
| temperature=0.7, | |
| do_sample=True, | |
| pad_token_id=tokenizer.eos_token_id | |
| ) | |
| generated = tokenizer.decode(outputs[0], skip_special_tokens=True) | |
| return generated[len(prompt):] | |
| demo = gr.Interface( | |
| fn=generate_code, | |
| inputs=gr.Textbox( | |
| label="Your coding question", | |
| placeholder="e.g. Write a function to check if a number is prime", | |
| lines=3 | |
| ), | |
| outputs=gr.Code( | |
| label="Generated Code", | |
| language="python" | |
| ), | |
| title="⚡ CodeGPT LoRA — AI Code Generator", | |
| description="Fine-tuned CodeGPT with LoRA by Pradnya27. 275x smaller than full fine-tuning! Ask any coding question and get Python code.", | |
| examples=[ | |
| ["Write a function to reverse a string"], | |
| ["Write a function to find the largest number in a list"], | |
| ["Write a function to check if a string is a palindrome"] | |
| ] | |
| ) | |
| demo.launch() | |