# import streamlit as st # import openai from langchain_openai from langchain_openai import ChatOpenAI # import HumanMessage,SystemMessage and AIMessage from the 'schema' module of the 'langchain' library. from langchain.schema import ( AIMessage, HumanMessage, SystemMessage ) # From here down is all the StreamLit UI st.set_page_config(page_title="LangChain Demo", page_icon=":robot:") st.header("Hey, I'm your Chat GPT") # If "sessionMessages" not in st.session_state, then create a list of SystemMessage if "sessionMessages" not in st.session_state: st.session_state.sessionMessages = [ SystemMessage(content="You are a helpful assistant.") ] def load_answer(question): # Append the question to the sessionMessages list st.session_state.sessionMessages.append(HumanMessage(content=question)) # Invoke the chat with the sessionMessages list assistant_answer = chat.invoke(st.session_state.sessionMessages ) # Append the assistant's answer to the sessionMessages list st.session_state.sessionMessages.append(AIMessage(content=assistant_answer.content)) # Return the assistant's answer return assistant_answer.content def get_text(): # Get the user input input_text = st.text_input("You: ") return input_text # Initialize the ChatOpenAI object chat = ChatOpenAI(temperature=0) # Get user input user_input=get_text() # Add button to generate response submit = st.button('Generate') if submit: # Get response from the user input and display it response = load_answer(user_input) st.subheader("Answer:") st.write(response)