Mpavan45 commited on
Commit
4c1f03d
Β·
verified Β·
1 Parent(s): cd90eb7

Update src/streamlit_app.py

Browse files
Files changed (1) hide show
  1. src/streamlit_app.py +10 -13
src/streamlit_app.py CHANGED
@@ -1,26 +1,21 @@
1
  import streamlit as st
2
- import os
3
- import zipfile
4
- from langchain_chroma import Chroma # βœ… Updated import
5
  from langchain_google_genai import ChatGoogleGenerativeAI, GoogleGenerativeAIEmbeddings
6
  from langchain.prompts import ChatPromptTemplate
7
  from langchain.schema.output_parser import StrOutputParser
8
  from langchain.schema.runnable import RunnableLambda
9
- import tempfile
10
 
11
  # === Page Setup ===
12
  st.set_page_config(page_title="Financial QA - ITC Ltd.", layout="wide")
13
  st.title("πŸ“Š Financial Q&A Chatbot (ITC Ltd.)")
14
 
15
- # === Step 1: Extract Chroma DB from zip ===
16
- def load_chroma_db():
17
- with zipfile.ZipFile("chroma_db1.zip", 'r') as zip_ref:
18
- temp_dir = tempfile.mkdtemp()
19
- zip_ref.extractall(temp_dir)
20
- embedding = GoogleGenerativeAIEmbeddings(model="models/embedding-001")
21
- return Chroma(persist_directory=temp_dir, embedding_function=embedding)
22
 
23
- vectorstore = load_chroma_db()
 
 
 
24
 
25
  # === Step 2: MMR Retriever ===
26
  retriever = vectorstore.as_retriever(
@@ -48,11 +43,13 @@ prompt = ChatPromptTemplate.from_messages([
48
 
49
  # === Step 4: LLM Setup ===
50
  GOOGLE_API_KEY = st.secrets["GOOGLE_API_KEY"]
 
51
  llm = ChatGoogleGenerativeAI(
52
  api_key=GOOGLE_API_KEY,
53
  model="gemini-2.0-flash",
54
  temperature=1
55
  )
 
56
  parser = StrOutputParser()
57
 
58
  # === Step 5: Helper Functions ===
@@ -60,7 +57,7 @@ def format_docs(docs):
60
  return "\n\n".join(doc.page_content for doc in docs)
61
 
62
  def retrieve_and_answer(question):
63
- docs = retriever.invoke(question) # βœ… Updated to new `invoke()` method
64
  context = format_docs(docs)
65
  final_input = {"question": question, "context": context}
66
  result = (prompt | llm | parser).invoke(final_input)
 
1
  import streamlit as st
2
+ from langchain_chroma import Chroma
 
 
3
  from langchain_google_genai import ChatGoogleGenerativeAI, GoogleGenerativeAIEmbeddings
4
  from langchain.prompts import ChatPromptTemplate
5
  from langchain.schema.output_parser import StrOutputParser
6
  from langchain.schema.runnable import RunnableLambda
 
7
 
8
  # === Page Setup ===
9
  st.set_page_config(page_title="Financial QA - ITC Ltd.", layout="wide")
10
  st.title("πŸ“Š Financial Q&A Chatbot (ITC Ltd.)")
11
 
12
+ # === Step 1: Load Chroma DB ===
13
+ embedding = GoogleGenerativeAIEmbeddings(model="models/embedding-001")
 
 
 
 
 
14
 
15
+ vectorstore = Chroma(
16
+ persist_directory="src/chroma_db1", # βœ… Use unzipped path
17
+ embedding_function=embedding
18
+ )
19
 
20
  # === Step 2: MMR Retriever ===
21
  retriever = vectorstore.as_retriever(
 
43
 
44
  # === Step 4: LLM Setup ===
45
  GOOGLE_API_KEY = st.secrets["GOOGLE_API_KEY"]
46
+
47
  llm = ChatGoogleGenerativeAI(
48
  api_key=GOOGLE_API_KEY,
49
  model="gemini-2.0-flash",
50
  temperature=1
51
  )
52
+
53
  parser = StrOutputParser()
54
 
55
  # === Step 5: Helper Functions ===
 
57
  return "\n\n".join(doc.page_content for doc in docs)
58
 
59
  def retrieve_and_answer(question):
60
+ docs = retriever.invoke(question)
61
  context = format_docs(docs)
62
  final_input = {"question": question, "context": context}
63
  result = (prompt | llm | parser).invoke(final_input)