Spaces:
Sleeping
Sleeping
explicity pass the GOOGle-api key to llm and embedding model to resolve the 503 Illegal metadata and timeout errors
Browse files
app.py
CHANGED
|
@@ -11,6 +11,11 @@ from langchain_core.runnables import RunnablePassthrough
|
|
| 11 |
from langchain_core.output_parsers import StrOutputParser
|
| 12 |
import tempfile
|
| 13 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 14 |
|
| 15 |
# Constants
|
| 16 |
LLM_MODEL = "gemini-1.5-flash"
|
|
@@ -50,7 +55,7 @@ class PDFChatbot:
|
|
| 50 |
docs = text_splitter.create_documents([text])
|
| 51 |
print("Text split into chunks successfully.")
|
| 52 |
|
| 53 |
-
embeddings = GoogleGenerativeAIEmbeddings(model=EMBEDDING_MODEL)
|
| 54 |
self.state.db = await Chroma.afrom_documents(
|
| 55 |
documents=docs,
|
| 56 |
embedding=embeddings,
|
|
@@ -74,8 +79,8 @@ class PDFChatbot:
|
|
| 74 |
return
|
| 75 |
|
| 76 |
print("Database is ready. Retrieving relevant documents...")
|
| 77 |
-
retriever = self.state.db.
|
| 78 |
-
llm = ChatGoogleGenerativeAI(model=LLM_MODEL, temperature=0.7)
|
| 79 |
|
| 80 |
prompt_template = PromptTemplate(
|
| 81 |
template="""
|
|
@@ -110,10 +115,6 @@ class SessionState:
|
|
| 110 |
def is_db_ready(self):
|
| 111 |
return self.db is not None
|
| 112 |
|
| 113 |
-
# Set the Google API key from environment variables
|
| 114 |
-
if "GOOGLE_API_KEY" not in os.environ:
|
| 115 |
-
raise Exception("Please set the GOOGLE_API_KEY environment variable.")
|
| 116 |
-
|
| 117 |
with gr.Blocks(title="PDF Chatbot") as demo:
|
| 118 |
chatbot = PDFChatbot()
|
| 119 |
|
|
|
|
| 11 |
from langchain_core.output_parsers import StrOutputParser
|
| 12 |
import tempfile
|
| 13 |
|
| 14 |
+
# Set the Google API key from environment variables
|
| 15 |
+
if "GOOGLE_API_KEY" not in os.environ:
|
| 16 |
+
raise Exception("Please set the GOOGLE_API_KEY environment variable.")
|
| 17 |
+
|
| 18 |
+
google_api_key = os.environ.get("GOOGLE_API_KEY")
|
| 19 |
|
| 20 |
# Constants
|
| 21 |
LLM_MODEL = "gemini-1.5-flash"
|
|
|
|
| 55 |
docs = text_splitter.create_documents([text])
|
| 56 |
print("Text split into chunks successfully.")
|
| 57 |
|
| 58 |
+
embeddings = GoogleGenerativeAIEmbeddings(model=EMBEDDING_MODEL, google_api_key=google_api_key)
|
| 59 |
self.state.db = await Chroma.afrom_documents(
|
| 60 |
documents=docs,
|
| 61 |
embedding=embeddings,
|
|
|
|
| 79 |
return
|
| 80 |
|
| 81 |
print("Database is ready. Retrieving relevant documents...")
|
| 82 |
+
retriever = self.state.db.as_ retriever()
|
| 83 |
+
llm = ChatGoogleGenerativeAI(model=LLM_MODEL, temperature=0.7, google_api_key=google_api_key)
|
| 84 |
|
| 85 |
prompt_template = PromptTemplate(
|
| 86 |
template="""
|
|
|
|
| 115 |
def is_db_ready(self):
|
| 116 |
return self.db is not None
|
| 117 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 118 |
with gr.Blocks(title="PDF Chatbot") as demo:
|
| 119 |
chatbot = PDFChatbot()
|
| 120 |
|