BlogPost / app.py
sabssag's picture
Update app.py
85b498f verified
raw
history blame
1.1 kB
import streamlit as st
from transformers import GPT2Tokenizer, GPT2LMHeadModel
# Initialize the tokenizer and model
model_name = 'gpt2-large'
tokenizer = GPT2Tokenizer.from_pretrained(model_name)
model = GPT2LMHeadModel.from_pretrained(model_name)
# Set the title for the Streamlit app
st.title("GPT-2 Blog Post Generator")
# Text input for the user
text = st.text_area("Enter your Topic: ")
if text:
try:
# Encode input text
encoded_input = tokenizer(text, return_tensors='pt')
# Generate text
output = model.generate(
input_ids=encoded_input['input_ids'],
max_length=200, # Adjust length as needed
num_return_sequences=1,
no_repeat_ngram_size=2,
top_p=0.95,
top_k=50
)
# Decode generated text
generated_text = tokenizer.decode(output[0], skip_special_tokens=True)
# Display the generated text
st.subheader("Generated Blog Post")
st.write(generated_text)
except Exception as e:
st.error(f"An error occurred: {e}")