basantcuraj commited on
Commit
3ddd24e
·
verified ·
1 Parent(s): 88f3592

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -22
app.py CHANGED
@@ -1,35 +1,29 @@
1
- ## Conversational Q&A Chatbot
2
- import streamlit as st
3
-
4
- from langchain.schema import HumanMessage,SystemMessage,AIMessage
5
- from langchain.chat_models import ChatOpenAI
6
-
7
- ## Streamlit UI
8
- st.set_page_config(page_title="Conversational Q&A Chatbot")
9
- st.header("Hey, Let's Chat")
10
 
11
  #from dotenv import load_dotenv
12
- #load_dotenv()
13
- #import os
14
 
15
- chat=ChatOpenAI(temperature=0.5)
 
 
 
16
 
17
- if 'flowmessages' not in st.session_state:
18
- st.session_state['flowmessages']=[
19
- SystemMessage(content="Yor are a comedian AI assitant")
20
- ]
21
 
22
  ## Function to load OpenAI model and get respones
23
 
24
- def get_chatmodel_response(question):
 
 
 
 
 
 
 
25
 
26
- st.session_state['flowmessages'].append(HumanMessage(content=question))
27
- answer=chat(st.session_state['flowmessages'])
28
- st.session_state['flowmessages'].append(AIMessage(content=answer.content))
29
- return answer.content
30
 
31
  input=st.text_input("Input: ",key="input")
32
- response=get_chatmodel_response(input)
33
 
34
  submit=st.button("Ask the question")
35
 
 
1
+ # Q&A Chatbot
2
+ from langchain.llms import OpenAI
 
 
 
 
 
 
 
3
 
4
  #from dotenv import load_dotenv
 
 
5
 
6
+ #load_dotenv() # take environment variables from .env.
7
+
8
+ import streamlit as st
9
+ import os
10
 
 
 
 
 
11
 
12
  ## Function to load OpenAI model and get respones
13
 
14
+ def get_openai_response(question):
15
+ llm=OpenAI(model_name="text-davinci-003",temperature=0.5)
16
+ response=llm(question)
17
+ return response
18
+
19
+ ##initialize our streamlit app
20
+
21
+ st.set_page_config(page_title="Q&A Demo")
22
 
23
+ st.header("Langchain Application")
 
 
 
24
 
25
  input=st.text_input("Input: ",key="input")
26
+ response=get_openai_response(input)
27
 
28
  submit=st.button("Ask the question")
29