Nigz commited on
Commit
24a6253
·
verified ·
1 Parent(s): 849d13d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -12
app.py CHANGED
@@ -1,19 +1,22 @@
1
- import os
2
- import pickle
3
- import time
4
- import gradio as gr
5
- from dotenv import load_dotenv
6
- from langchain_google_genai import ChatGoogleGenerativeAI
7
- from langchain.chains import RetrievalQAWithSourcesChain
8
  from langchain.text_splitter import RecursiveCharacterTextSplitter
9
- from langchain.document_loaders import UnstructuredURLLoader, PyPDFLoader, TextLoader, Docx2txtLoader, UnstructuredHTMLLoader
 
10
  from langchain_google_genai import GoogleGenerativeAIEmbeddings
11
- from langchain.vectorstores import FAISS
 
 
 
 
 
12
 
13
  load_dotenv()
14
 
15
 
16
- llm = ChatGoogleGenerativeAI(model="gemini-2.0-flash-001", google_api_key = api_key, temperature=0.5)
17
  file_path = "vector_db.pkl"
18
 
19
  def load_any_file(file_path):
@@ -49,7 +52,7 @@ def process_inputs(url, file):
49
  )
50
  docs = text_splitter.split_documents(data)
51
 
52
- embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001",google_api_key = api_key)
53
  vectorstore_openai = FAISS.from_documents(docs, embeddings)
54
 
55
  with open(file_path, "wb") as f:
@@ -90,4 +93,4 @@ with gr.Blocks(title="RockyBot: News Research Tool") as demo:
90
  process_btn.click(fn=process_inputs, inputs=[url_input, file_input], outputs=[process_status, answer_output])
91
  query_input.submit(fn=answer_question, inputs=query_input, outputs=[answer_output, sources_output])
92
 
93
- demo.launch()
 
1
+ from langchain.document_loaders import UnstructuredURLLoader
2
+ from langchain.document_loaders import (
3
+ PyPDFLoader, TextLoader, Docx2txtLoader, UnstructuredHTMLLoader
4
+ )
 
 
 
5
  from langchain.text_splitter import RecursiveCharacterTextSplitter
6
+ from langchain.chains.qa_with_sources.retrieval import RetrievalQAWithSourcesChain
7
+ from langchain_google_genai import ChatGoogleGenerativeAI
8
  from langchain_google_genai import GoogleGenerativeAIEmbeddings
9
+ from langchain_community.docstore.in_memory import InMemoryDocstore
10
+ from langchain_community.vectorstores import FAISS
11
+ from sentence_transformers import SentenceTransformer
12
+ from dotenv import load_dotenv
13
+ import pickle
14
+ import os
15
 
16
  load_dotenv()
17
 
18
 
19
+ llm = ChatGoogleGenerativeAI(model="gemini-2.0-flash-001", temperature=0.5)
20
  file_path = "vector_db.pkl"
21
 
22
  def load_any_file(file_path):
 
52
  )
53
  docs = text_splitter.split_documents(data)
54
 
55
+ embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001")
56
  vectorstore_openai = FAISS.from_documents(docs, embeddings)
57
 
58
  with open(file_path, "wb") as f:
 
93
  process_btn.click(fn=process_inputs, inputs=[url_input, file_input], outputs=[process_status, answer_output])
94
  query_input.submit(fn=answer_question, inputs=query_input, outputs=[answer_output, sources_output])
95
 
96
+ demo.launch()