Alpha108 commited on
Commit
53f4745
·
verified ·
1 Parent(s): 2094c2c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +25 -14
app.py CHANGED
@@ -13,13 +13,12 @@ def load_style_samples():
13
 
14
  @st.cache_resource(show_spinner=False)
15
  def load_pipeline():
16
- # Replace model ID with a more powerful public LLM if you want
17
- model_id = "google/flan-t5-base" # For English text generation
 
18
  gen_pipe = pipeline(
19
- "text2text-generation",
20
- model=model_id,
21
- device_map="auto",
22
- trust_remote_code=True
23
  )
24
  return gen_pipe
25
 
@@ -28,7 +27,7 @@ style_samples = load_style_samples()
28
 
29
  st.set_page_config(page_title="LinkedIn Post Generator", layout="centered")
30
  st.title("🔗 LinkedIn Post Generator (Hugging Face)")
31
- st.write("Generate LinkedIn posts in your writing style.")
32
 
33
  with st.form("gen_form"):
34
  topic = st.text_input("Post Topic", "Generative AI for Business")
@@ -36,14 +35,16 @@ with st.form("gen_form"):
36
  audience = st.text_input("Audience", "Startup founders")
37
  length = st.slider("Length (approx words)", 30, 400, 120, 10)
38
  use_sample = st.selectbox(
39
- "Style Sample (optional)", ["None"] + [f"Sample {i+1}" for i in range(len(style_samples))])
 
 
40
  custom_style = st.text_area("Or paste your own style sample (optional)")
41
  submitted = st.form_submit_button("Generate Post")
42
 
43
  prompt_style = ""
44
  if use_sample != "None":
45
- sample_idx = int(use_sample.split()[1]) - 1
46
- prompt_style += f"Sample style: {style_samples[sample_idx]}\n"
47
  if custom_style.strip():
48
  prompt_style += f"User style: {custom_style}\n"
49
 
@@ -56,11 +57,19 @@ prompt = (
56
 
57
  if submitted:
58
  if not topic.strip():
59
- st.warning("Please enter a topic before generating.")
60
  else:
61
  with st.spinner("Generating..."):
62
  try:
63
- result = pipe(prompt, max_new_tokens=length + 32)[0]["generated_text"].strip()
 
 
 
 
 
 
 
 
64
  st.success("Here's your LinkedIn post:")
65
  st.write(result)
66
  st.download_button("Download post as .txt", result, file_name="linkedin_post.txt")
@@ -68,13 +77,15 @@ if submitted:
68
  st.error(f"Error generating post: {e}")
69
 
70
  st.markdown("---")
71
- st.write("Upload a new list of style samples (JSON array of strings, replaces existing):")
72
  file = st.file_uploader("Upload style_samples.json", type=["json"])
73
  if file:
74
  try:
75
  data = json.load(file)
 
 
76
  with open(STYLE_SAMPLES_FILE, "w") as f:
77
  json.dump(data, f)
78
- st.success(f"Uploaded and saved {len(data)} samples. Reload Space to use.")
79
  except Exception as e:
80
  st.error(f"Upload failed: {e}")
 
13
 
14
  @st.cache_resource(show_spinner=False)
15
  def load_pipeline():
16
+ # Choose a lightweight, CPU-friendly model for Spaces
17
+ model_id = "google/flan-t5-base"
18
+ # IMPORTANT: Do NOT pass device_map here to avoid needing accelerate
19
  gen_pipe = pipeline(
20
+ task="text2text-generation",
21
+ model=model_id
 
 
22
  )
23
  return gen_pipe
24
 
 
27
 
28
  st.set_page_config(page_title="LinkedIn Post Generator", layout="centered")
29
  st.title("🔗 LinkedIn Post Generator (Hugging Face)")
30
+ st.write("Generate LinkedIn posts in your style using a compact open model.")
31
 
32
  with st.form("gen_form"):
33
  topic = st.text_input("Post Topic", "Generative AI for Business")
 
35
  audience = st.text_input("Audience", "Startup founders")
36
  length = st.slider("Length (approx words)", 30, 400, 120, 10)
37
  use_sample = st.selectbox(
38
+ "Style Sample (optional)",
39
+ ["None"] + [f"Sample {i+1}" for i in range(len(style_samples))]
40
+ )
41
  custom_style = st.text_area("Or paste your own style sample (optional)")
42
  submitted = st.form_submit_button("Generate Post")
43
 
44
  prompt_style = ""
45
  if use_sample != "None":
46
+ idx = int(use_sample.split()[1]) - 1
47
+ prompt_style += f"Sample style: {style_samples[idx]}\n"
48
  if custom_style.strip():
49
  prompt_style += f"User style: {custom_style}\n"
50
 
 
57
 
58
  if submitted:
59
  if not topic.strip():
60
+ st.warning("Please enter a topic.")
61
  else:
62
  with st.spinner("Generating..."):
63
  try:
64
+ # flan-t5 uses text2text; pipeline returns list of dicts
65
+ outputs = pipe(prompt, max_new_tokens=length + 48)
66
+ # Support both list and dict return shapes defensively
67
+ if isinstance(outputs, list) and len(outputs) and "generated_text" in outputs[0]:
68
+ result = outputs[0]["generated_text"].strip()
69
+ elif isinstance(outputs, dict) and "generated_text" in outputs:
70
+ result = outputs["generated_text"].strip()
71
+ else:
72
+ result = str(outputs)
73
  st.success("Here's your LinkedIn post:")
74
  st.write(result)
75
  st.download_button("Download post as .txt", result, file_name="linkedin_post.txt")
 
77
  st.error(f"Error generating post: {e}")
78
 
79
  st.markdown("---")
80
+ st.write("Upload a JSON array of style samples (strings). This overwrites existing samples.")
81
  file = st.file_uploader("Upload style_samples.json", type=["json"])
82
  if file:
83
  try:
84
  data = json.load(file)
85
+ if not isinstance(data, list) or not all(isinstance(x, str) for x in data):
86
+ raise ValueError("JSON must be a list of strings.")
87
  with open(STYLE_SAMPLES_FILE, "w") as f:
88
  json.dump(data, f)
89
+ st.success(f"Saved {len(data)} samples. Reload the app to use them.")
90
  except Exception as e:
91
  st.error(f"Upload failed: {e}")