Sakil commited on
Commit
f91d645
·
1 Parent(s): 720b01b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -5
app.py CHANGED
@@ -19,7 +19,7 @@ def load_llm(max_tokens, prompt_template, temperature):
19
  temperature=temperature,
20
  )
21
 
22
- llm_chain = LLMChain(llm=llm, prompt=prompt_template)
23
  return llm_chain
24
 
25
  def get_src_original_url(query):
@@ -71,26 +71,26 @@ st.set_page_config(layout="wide")
71
  def main():
72
  st.title("GeniusWords: Unleash Your Imagination")
73
 
74
- example_prompt_template = st.text_area("Please enter the example prompt template:")
 
75
  max_tokens = st.number_input("Please enter the maximum tokens:", min_value=1, value=800)
76
  temperature = st.slider("Select the temperature:", min_value=0.1, max_value=1.0, value=0.7, step=0.1)
77
  user_input = st.text_input("Please enter the idea/topic for the article you want to generate!")
78
  image_input = st.text_input("Please enter the topic for the image you want to fetch!")
79
 
80
  if st.button("Generate Article"):
81
- if len(user_input) > 0 and len(image_input) > 0 and len(example_prompt_template) > 0:
82
  col1, col2, col3 = st.columns([1, 2, 1])
83
  with col1:
84
  st.subheader("Generated Content")
85
  st.write("Topic of the article is: " + user_input)
86
  st.write("Image of the article is: " + image_input)
87
 
88
- prompt_template = example_prompt_template.format(user_input=user_input)
89
  llm_call = load_llm(max_tokens=max_tokens, prompt_template=prompt_template, temperature=temperature)
90
  result = llm_call(user_input)
91
  if len(result) > 0:
92
  st.info("Your article has been generated successfully!")
93
- st.write(result['text'])
94
  else:
95
  st.error("Your article couldn't be generated!")
96
 
 
19
  temperature=temperature,
20
  )
21
 
22
+ llm_chain = LLMChain(llm=llm, prompt=PromptTemplate.from_template(prompt_template))
23
  return llm_chain
24
 
25
  def get_src_original_url(query):
 
71
  def main():
72
  st.title("GeniusWords: Unleash Your Imagination")
73
 
74
+ example_prompt_template = 'You are a digital marketing and SEO expert and your task is to write an article on the given topic: {user_input}. The article must be under 800 words. The article should be lengthy.'
75
+ prompt_template = st.text_area("Please enter the prompt template:", value=example_prompt_template)
76
  max_tokens = st.number_input("Please enter the maximum tokens:", min_value=1, value=800)
77
  temperature = st.slider("Select the temperature:", min_value=0.1, max_value=1.0, value=0.7, step=0.1)
78
  user_input = st.text_input("Please enter the idea/topic for the article you want to generate!")
79
  image_input = st.text_input("Please enter the topic for the image you want to fetch!")
80
 
81
  if st.button("Generate Article"):
82
+ if len(user_input) > 0 and len(image_input) > 0 and len(prompt_template) > 0:
83
  col1, col2, col3 = st.columns([1, 2, 1])
84
  with col1:
85
  st.subheader("Generated Content")
86
  st.write("Topic of the article is: " + user_input)
87
  st.write("Image of the article is: " + image_input)
88
 
 
89
  llm_call = load_llm(max_tokens=max_tokens, prompt_template=prompt_template, temperature=temperature)
90
  result = llm_call(user_input)
91
  if len(result) > 0:
92
  st.info("Your article has been generated successfully!")
93
+ st.write(result)
94
  else:
95
  st.error("Your article couldn't be generated!")
96