|
|
|
|
|
import os
|
|
|
import streamlit as st
|
|
|
|
|
|
from dotenv import load_dotenv
|
|
|
from langchain_community.chat_models import ChatOpenAI
|
|
|
from langchain.schema import HumanMessage, SystemMessage, AIMessage
|
|
|
|
|
|
load_dotenv()
|
|
|
|
|
|
|
|
|
st.set_page_config(page_title="Conversational Q&A Chatbot", layout="centered")
|
|
|
st.header("Hey! Let's Chat...")
|
|
|
|
|
|
chat = ChatOpenAI(temperature=0.5)
|
|
|
|
|
|
if 'flow_messages' not in st.session_state:
|
|
|
st.session_state['flow_messages']=[
|
|
|
SystemMessage(content="You are a helpful Coding AI assistant.")
|
|
|
]
|
|
|
|
|
|
|
|
|
def gen_openai_response(question):
|
|
|
st.session_state["flow_messages"].append(HumanMessage(content=question))
|
|
|
answer = chat(st.session_state["flow_messages"])
|
|
|
st.session_state["flow_messages"].append(AIMessage(content=answer.content))
|
|
|
return answer.content
|
|
|
|
|
|
user_input= st.text_input("Input: ",key="input")
|
|
|
response= gen_openai_response(user_input)
|
|
|
|
|
|
submit=st.button("Ask the question")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
|
|
|
|
|
if submit:
|
|
|
st.subheader("The Response is")
|
|
|
st.write(response) |