owl123
commited on
Commit
·
47351aa
1
Parent(s):
5621d9a
Added temperature
Browse files
app.py
CHANGED
|
@@ -26,6 +26,8 @@ pinecone_index = "group-1"
|
|
| 26 |
|
| 27 |
if 'exchanges' not in st.session_state:
|
| 28 |
st.session_state.exchanges = []
|
|
|
|
|
|
|
| 29 |
|
| 30 |
# -------------------- Functions -----------------------
|
| 31 |
def console_log(msg):
|
|
@@ -56,12 +58,12 @@ def load_pdf(url):
|
|
| 56 |
console_log(f'After splitting, you have {len(texts)} documents')
|
| 57 |
load_vector_database()
|
| 58 |
|
| 59 |
-
def chat(query):
|
| 60 |
|
| 61 |
from langchain.llms import OpenAI
|
| 62 |
from langchain.chains.question_answering import load_qa_chain
|
| 63 |
|
| 64 |
-
llm = OpenAI(temperature=
|
| 65 |
chain = load_qa_chain(llm, chain_type="stuff")
|
| 66 |
|
| 67 |
embeddings = OpenAIEmbeddings(openai_api_key=st.secrets["OPENAI_API_KEY"])
|
|
@@ -109,12 +111,13 @@ with st.sidebar:
|
|
| 109 |
st.button('Click to start loading PDF', key="load_pdf", on_click=load_pdf, args=[pdf_dict[option]])
|
| 110 |
|
| 111 |
# ------------------------ Chatbot ------------------------
|
|
|
|
| 112 |
st.text_input("Prompt", placeholder="Ask me anything", key="prompt")
|
| 113 |
|
| 114 |
if st.session_state.prompt:
|
| 115 |
st.session_state.exchanges.append({"role": "user", "content": st.session_state.prompt})
|
| 116 |
try:
|
| 117 |
-
response = chat(format_prompt(st.session_state.exchanges))
|
| 118 |
except Exception as e:
|
| 119 |
st.error(e)
|
| 120 |
st.stop()
|
|
|
|
| 26 |
|
| 27 |
if 'exchanges' not in st.session_state:
|
| 28 |
st.session_state.exchanges = []
|
| 29 |
+
if 'temperature' not in st.session_state:
|
| 30 |
+
st.session_state.temperature = 0.5
|
| 31 |
|
| 32 |
# -------------------- Functions -----------------------
|
| 33 |
def console_log(msg):
|
|
|
|
| 58 |
console_log(f'After splitting, you have {len(texts)} documents')
|
| 59 |
load_vector_database()
|
| 60 |
|
| 61 |
+
def chat(query, temperature):
|
| 62 |
|
| 63 |
from langchain.llms import OpenAI
|
| 64 |
from langchain.chains.question_answering import load_qa_chain
|
| 65 |
|
| 66 |
+
llm = OpenAI(temperature=temperature, openai_api_key=st.secrets["OPENAI_API_KEY"])
|
| 67 |
chain = load_qa_chain(llm, chain_type="stuff")
|
| 68 |
|
| 69 |
embeddings = OpenAIEmbeddings(openai_api_key=st.secrets["OPENAI_API_KEY"])
|
|
|
|
| 111 |
st.button('Click to start loading PDF', key="load_pdf", on_click=load_pdf, args=[pdf_dict[option]])
|
| 112 |
|
| 113 |
# ------------------------ Chatbot ------------------------
|
| 114 |
+
st.slider("Temperature (0 = Most Deterministic)", min_value=0.0, max_value=1.0, step=0.1, key="temperature")
|
| 115 |
st.text_input("Prompt", placeholder="Ask me anything", key="prompt")
|
| 116 |
|
| 117 |
if st.session_state.prompt:
|
| 118 |
st.session_state.exchanges.append({"role": "user", "content": st.session_state.prompt})
|
| 119 |
try:
|
| 120 |
+
response = chat(format_prompt(st.session_state.exchanges), st.session_state.temperature)
|
| 121 |
except Exception as e:
|
| 122 |
st.error(e)
|
| 123 |
st.stop()
|