Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -10,41 +10,41 @@ import fitz
|
|
| 10 |
from PIL import Image
|
| 11 |
import streamlit as st
|
| 12 |
|
| 13 |
-
#
|
| 14 |
-
|
| 15 |
-
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
#
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
#
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
#
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
#
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
|
| 38 |
-
|
| 39 |
-
|
| 40 |
-
|
| 41 |
-
|
| 42 |
-
|
| 43 |
-
|
| 44 |
-
|
| 45 |
-
|
| 46 |
-
|
| 47 |
-
|
| 48 |
|
| 49 |
# # Function to generate a response based on the chat history and query
|
| 50 |
def generate_response(history, query, btn):
|
|
|
|
| 10 |
from PIL import Image
|
| 11 |
import streamlit as st
|
| 12 |
|
| 13 |
+
# Global variables
|
| 14 |
+
COUNT, N = 0, 0
|
| 15 |
+
chat_history = []
|
| 16 |
+
chain = None # Initialize chain as None
|
| 17 |
+
|
| 18 |
+
# Function to set the OpenAI API key
|
| 19 |
+
def set_apikey(api_key):
|
| 20 |
+
os.environ['OPENAI_API_KEY'] = api_key
|
| 21 |
+
return disable_box # Update the disable_box
|
| 22 |
+
|
| 23 |
+
# Function to enable the API key input box
|
| 24 |
+
def enable_api_box():
|
| 25 |
+
return enable_box # Update the enable_box
|
| 26 |
+
|
| 27 |
+
# Function to add text to the chat history
|
| 28 |
+
def add_text(history, text):
|
| 29 |
+
if not text:
|
| 30 |
+
raise gr.Error('Enter text')
|
| 31 |
+
history = history + [(text, '')]
|
| 32 |
+
return history
|
| 33 |
+
|
| 34 |
+
# Function to process the PDF file and create a conversation chain
|
| 35 |
+
def process_file(file):
|
| 36 |
+
global chain # Access the global 'chain' variable
|
| 37 |
+
if 'OPENAI_API_KEY' not in os.environ:
|
| 38 |
+
raise gr.Error('Upload your OpenAI API key')
|
| 39 |
+
|
| 40 |
+
loader = PyPDFLoader(file.name)
|
| 41 |
+
documents = loader.load()
|
| 42 |
+
embeddings = OpenAIEmbeddings()
|
| 43 |
+
pdfsearch = Chroma.from_documents(documents, embeddings)
|
| 44 |
+
chain = ConversationalRetrievalChain.from_llm(ChatOpenAI(temperature=0.3),
|
| 45 |
+
retriever=pdfsearch.as_retriever(search_kwargs={"k": 1}),
|
| 46 |
+
return_source_documents=True)
|
| 47 |
+
return chain
|
| 48 |
|
| 49 |
# # Function to generate a response based on the chat history and query
|
| 50 |
def generate_response(history, query, btn):
|