Nullzero commited on
Commit
1c92ebb
·
1 Parent(s): f2b2988

updated app

Browse files
Files changed (1) hide show
  1. app.py +7 -4
app.py CHANGED
@@ -1,9 +1,9 @@
1
  """Python file to serve as the frontend"""
2
- #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
3
  import streamlit as st
4
  from streamlit_chat import message
5
  import os
6
  import openai
 
7
 
8
  #Import LLM wrapper
9
  from langchain.llms import OpenAI
@@ -30,7 +30,7 @@ def load_chain(api_key):
30
 
31
  second_prompt = PromptTemplate(
32
  input_variables=["program"],
33
- template= '''Produce the python3 code for each step of the {program} described. Use python3 style. Be concise in the code and opinionated about framework choice.'''
34
  )
35
 
36
  chain_two = LLMChain(llm=llm, prompt=second_prompt)
@@ -67,8 +67,11 @@ user_input = get_text()
67
 
68
  if user_input:
69
  chain = load_chain(openai_api_key)
70
- output = chain.run(input=user_input)
71
-
 
 
 
72
  st.session_state.past.append(user_input)
73
  st.session_state.generated.append(output)
74
 
 
1
  """Python file to serve as the frontend"""
 
2
  import streamlit as st
3
  from streamlit_chat import message
4
  import os
5
  import openai
6
+ import time
7
 
8
  #Import LLM wrapper
9
  from langchain.llms import OpenAI
 
30
 
31
  second_prompt = PromptTemplate(
32
  input_variables=["program"],
33
+ template= '''Write the python3 code for each step of the {program} described. Use python3 style. Be concise in the code and opinionated about framework choice.'''
34
  )
35
 
36
  chain_two = LLMChain(llm=llm, prompt=second_prompt)
 
67
 
68
  if user_input:
69
  chain = load_chain(openai_api_key)
70
+
71
+ with st.spinner('Wait for it...'):
72
+ output = chain.run(input=user_input)
73
+ time.sleep(5)
74
+ st.success('Done!')
75
  st.session_state.past.append(user_input)
76
  st.session_state.generated.append(output)
77