Muh113 commited on
Commit
4ea832f
·
verified ·
1 Parent(s): b4f4a2d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +33 -6
app.py CHANGED
@@ -1,9 +1,36 @@
 
 
1
  import streamlit as st
2
- from transformers import pipeline
3
 
4
- pipe = pipeline('sentiment-analysis')
5
- text = st.text_area('Enter some text!')
 
6
 
7
- if text:
8
- out = pipe(text)
9
- st.json(out)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from transformers import AutoModelForCausalLM, AutoTokenizer
3
  import streamlit as st
 
4
 
5
+ # Load the tokenizer and model using auto tokenizer and auto model
6
+ tokenizer = AutoTokenizer.from_pretrained('gpt2-large')
7
+ model = AutoModelForCausalLM.from_pretrained('gpt2-large')
8
 
9
+ def generate_blog_post(topic):
10
+ prompt = f"Write a blog post about {topic}."
11
+ inputs = tokenizer.encode(prompt, return_tensors='pt')
12
+
13
+ # Generate text
14
+ outputs = model.generate(inputs, max_length=500, num_return_sequences=1, do_sample=True, top_p=0.95, top_k=60)
15
+
16
+ # Decode the generated text
17
+ text = tokenizer.decode(outputs[0], skip_special_tokens=True)
18
+ return text
19
+
20
+ # Streamlit interface
21
+ st.title("Blog Post Generator")
22
+ st.write("Generate a blog post for a given topic using GPT-2 Large.")
23
+
24
+ topic = st.text_input("Enter the topic for the blog post:")
25
+
26
+ if st.button("Generate Blog Post"):
27
+ if topic:
28
+ with st.spinner("Generating..."):
29
+ try:
30
+ blog_post = generate_blog_post(topic)
31
+ st.subheader("Generated Blog Post")
32
+ st.write(blog_post)
33
+ except Exception as e:
34
+ st.error(f"Error generating blog post: {e}")
35
+ else:
36
+ st.warning("Please enter a topic.")