vaibhav1821's picture
update app.py
445b95f verified
import streamlit as st
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
# Load your model and tokenizer
@st.cache(allow_output_mutation=True)
def load_model():
model_name = "abhishekyo/codellama2-finetuned-codex-fin7"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name)
gen_pipeline = pipeline('text-generation', model=model, tokenizer=tokenizer, device=0)
return gen_pipeline
gen_pipeline = load_model()
st.title('Text-to-Code Generator')
# Text input
user_input = st.text_area("Enter your text here:", height=200)
if st.button("Generate Code"):
if user_input:
with st.spinner("Generating code..."):
results = gen_pipeline(user_input, max_length=512, num_return_sequences=1)
generated_code = results[0]['generated_text']
st.text_area("Generated Code:", value=generated_code, height=200)
else:
st.warning("Please enter some text to generate code.")