Spaces:
Sleeping
Sleeping
Commit ·
2b32d34
1
Parent(s): 6983abc
added qdrant
Browse files- app.py +25 -4
- requirements.txt +2 -1
app.py
CHANGED
|
@@ -12,6 +12,8 @@ from aimakerspace.vectordatabase import VectorDatabase
|
|
| 12 |
from aimakerspace.openai_utils.chatmodel import ChatOpenAI
|
| 13 |
import chainlit as cl
|
| 14 |
import fitz # PyMuPDF for PDF reading
|
|
|
|
|
|
|
| 15 |
|
| 16 |
system_template = """\
|
| 17 |
Use the following context to answer a user's question. If you cannot find the answer in the context, say you don't know the answer."""
|
|
@@ -27,7 +29,7 @@ Question:
|
|
| 27 |
user_role_prompt = UserRolePrompt(user_prompt_template)
|
| 28 |
|
| 29 |
class RetrievalAugmentedQAPipeline:
|
| 30 |
-
def __init__(self, llm: ChatOpenAI(), vector_db_retriever
|
| 31 |
self.llm = llm
|
| 32 |
self.vector_db_retriever = vector_db_retriever
|
| 33 |
|
|
@@ -85,9 +87,29 @@ def process_text_file(file: AskFileResponse):
|
|
| 85 |
texts = text_splitter.split_texts(documents)
|
| 86 |
return texts
|
| 87 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 88 |
|
| 89 |
@cl.on_chat_start
|
| 90 |
async def on_chat_start():
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 91 |
files = None
|
| 92 |
|
| 93 |
# Wait for the user to upload a file
|
|
@@ -111,9 +133,8 @@ async def on_chat_start():
|
|
| 111 |
|
| 112 |
print(f"Processing {len(texts)} text chunks")
|
| 113 |
|
| 114 |
-
#
|
| 115 |
-
vector_db =
|
| 116 |
-
vector_db = await vector_db.abuild_from_list(texts)
|
| 117 |
|
| 118 |
chat_openai = ChatOpenAI()
|
| 119 |
|
|
|
|
| 12 |
from aimakerspace.openai_utils.chatmodel import ChatOpenAI
|
| 13 |
import chainlit as cl
|
| 14 |
import fitz # PyMuPDF for PDF reading
|
| 15 |
+
from qdrant_client import QdrantClient
|
| 16 |
+
from qdrant_client.http.models import PointStruct, VectorParams, Distance
|
| 17 |
|
| 18 |
system_template = """\
|
| 19 |
Use the following context to answer a user's question. If you cannot find the answer in the context, say you don't know the answer."""
|
|
|
|
| 29 |
user_role_prompt = UserRolePrompt(user_prompt_template)
|
| 30 |
|
| 31 |
class RetrievalAugmentedQAPipeline:
|
| 32 |
+
def __init__(self, llm: ChatOpenAI(), vector_db_retriever) -> None:
|
| 33 |
self.llm = llm
|
| 34 |
self.vector_db_retriever = vector_db_retriever
|
| 35 |
|
|
|
|
| 87 |
texts = text_splitter.split_texts(documents)
|
| 88 |
return texts
|
| 89 |
|
| 90 |
+
async def initialize_vector_db(choice, texts):
|
| 91 |
+
if choice == "current":
|
| 92 |
+
vector_db = VectorDatabase()
|
| 93 |
+
vector_db = await vector_db.abuild_from_list(texts)
|
| 94 |
+
return vector_db
|
| 95 |
+
elif choice == "qdrant":
|
| 96 |
+
client = QdrantClient(":memory:") # Using an in-memory Qdrant instance for demonstration
|
| 97 |
+
client.recreate_collection(
|
| 98 |
+
collection_name="my_collection",
|
| 99 |
+
vectors_config=VectorParams(size=768, distance=Distance.COSINE)
|
| 100 |
+
)
|
| 101 |
+
points = [PointStruct(id=i, vector=[0.0] * 768, payload={"text": text}) for i, text in enumerate(texts)]
|
| 102 |
+
client.upsert(collection_name="my_collection", points=points)
|
| 103 |
+
return client
|
| 104 |
|
| 105 |
@cl.on_chat_start
|
| 106 |
async def on_chat_start():
|
| 107 |
+
# Prompt the user to select the vector database
|
| 108 |
+
user_choice = await cl.AskSelectMessage(
|
| 109 |
+
content="Which vector database would you like to use?",
|
| 110 |
+
options=["current", "qdrant"],
|
| 111 |
+
).send()
|
| 112 |
+
|
| 113 |
files = None
|
| 114 |
|
| 115 |
# Wait for the user to upload a file
|
|
|
|
| 133 |
|
| 134 |
print(f"Processing {len(texts)} text chunks")
|
| 135 |
|
| 136 |
+
# Initialize the selected vector database
|
| 137 |
+
vector_db = await initialize_vector_db(user_choice, texts)
|
|
|
|
| 138 |
|
| 139 |
chat_openai = ChatOpenAI()
|
| 140 |
|
requirements.txt
CHANGED
|
@@ -1,4 +1,5 @@
|
|
| 1 |
numpy
|
| 2 |
chainlit==0.7.700
|
| 3 |
openai
|
| 4 |
-
pymupdf
|
|
|
|
|
|
| 1 |
numpy
|
| 2 |
chainlit==0.7.700
|
| 3 |
openai
|
| 4 |
+
pymupdf
|
| 5 |
+
qdrant-client
|