Sakil commited on
Commit
720b01b
·
1 Parent(s): aa1c125

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -4
app.py CHANGED
@@ -19,7 +19,7 @@ def load_llm(max_tokens, prompt_template, temperature):
19
  temperature=temperature,
20
  )
21
 
22
- llm_chain = LLMChain(llm=llm, prompt=PromptTemplate.from_template(prompt_template))
23
  return llm_chain
24
 
25
  def get_src_original_url(query):
@@ -86,9 +86,8 @@ def main():
86
  st.write("Image of the article is: " + image_input)
87
 
88
  prompt_template = example_prompt_template.format(user_input=user_input)
89
- prompt_dict = {'text': prompt_template} # Convert the prompt to a dictionary
90
- llm_call = load_llm(max_tokens=max_tokens, prompt_template=prompt_dict, temperature=temperature)
91
- result = llm_call({'text': user_input}) # Pass user input as a dictionary
92
  if len(result) > 0:
93
  st.info("Your article has been generated successfully!")
94
  st.write(result['text'])
 
19
  temperature=temperature,
20
  )
21
 
22
+ llm_chain = LLMChain(llm=llm, prompt=prompt_template)
23
  return llm_chain
24
 
25
  def get_src_original_url(query):
 
86
  st.write("Image of the article is: " + image_input)
87
 
88
  prompt_template = example_prompt_template.format(user_input=user_input)
89
+ llm_call = load_llm(max_tokens=max_tokens, prompt_template=prompt_template, temperature=temperature)
90
+ result = llm_call(user_input)
 
91
  if len(result) > 0:
92
  st.info("Your article has been generated successfully!")
93
  st.write(result['text'])