Spaces:
Runtime error
Runtime error
added async functionality to chain execution
Browse files- app.py +3 -2
- utils/store.py +2 -2
app.py
CHANGED
|
@@ -23,6 +23,7 @@ import langchain
|
|
| 23 |
from langchain.cache import InMemoryCache
|
| 24 |
from langchain_core.messages.human import HumanMessage
|
| 25 |
from langchain.memory import ConversationBufferMemory
|
|
|
|
| 26 |
|
| 27 |
load_dotenv()
|
| 28 |
YOUR_API_KEY = os.environ["PINECONE_API_KEY"]
|
|
@@ -119,7 +120,7 @@ async def main(message: cl.Message):
|
|
| 119 |
memory: ConversationBufferMemory = tools['memory']
|
| 120 |
|
| 121 |
# using query search for ArXiv documents and index files(on message)
|
| 122 |
-
await cl.make_async(search_and_index)(message=message, embedder=embedder, index=index)
|
| 123 |
|
| 124 |
text_field = "source_document"
|
| 125 |
index = pinecone.Index(INDEX_NAME)
|
|
@@ -139,7 +140,7 @@ async def main(message: cl.Message):
|
|
| 139 |
await sys_message.update()
|
| 140 |
|
| 141 |
# run
|
| 142 |
-
for chunk in retrieval_augmented_qa_chain.
|
| 143 |
if res:= chunk.get('response'):
|
| 144 |
await sys_message.stream_token(res.content)
|
| 145 |
await sys_message.send()
|
|
|
|
| 23 |
from langchain.cache import InMemoryCache
|
| 24 |
from langchain_core.messages.human import HumanMessage
|
| 25 |
from langchain.memory import ConversationBufferMemory
|
| 26 |
+
from chainlit import make_async
|
| 27 |
|
| 28 |
load_dotenv()
|
| 29 |
YOUR_API_KEY = os.environ["PINECONE_API_KEY"]
|
|
|
|
| 120 |
memory: ConversationBufferMemory = tools['memory']
|
| 121 |
|
| 122 |
# using query search for ArXiv documents and index files(on message)
|
| 123 |
+
await cl.make_async(search_and_index)(message=message, quantity=1, embedder=embedder, index=index)
|
| 124 |
|
| 125 |
text_field = "source_document"
|
| 126 |
index = pinecone.Index(INDEX_NAME)
|
|
|
|
| 140 |
await sys_message.update()
|
| 141 |
|
| 142 |
# run
|
| 143 |
+
async for chunk in retrieval_augmented_qa_chain.astream({"question": f"{message.content}", "chat_history": memory.buffer_as_messages}):
|
| 144 |
if res:= chunk.get('response'):
|
| 145 |
await sys_message.stream_token(res.content)
|
| 146 |
await sys_message.send()
|
utils/store.py
CHANGED
|
@@ -48,13 +48,13 @@ def index_documents(
|
|
| 48 |
index.upsert(vectors=zip(ids, embeds, metadatas))
|
| 49 |
|
| 50 |
|
| 51 |
-
def search_and_index(message: cl.Message, embedder: CacheBackedEmbeddings, index: GRPCIndex) -> None:
|
| 52 |
|
| 53 |
arxiv_client = arxiv.Client()
|
| 54 |
|
| 55 |
search = arxiv.Search(
|
| 56 |
query = message.content,
|
| 57 |
-
max_results =
|
| 58 |
sort_by = arxiv.SortCriterion.Relevance
|
| 59 |
)
|
| 60 |
paper_urls = []
|
|
|
|
| 48 |
index.upsert(vectors=zip(ids, embeds, metadatas))
|
| 49 |
|
| 50 |
|
| 51 |
+
def search_and_index(message: cl.Message, quantity: int, embedder: CacheBackedEmbeddings, index: GRPCIndex) -> None:
|
| 52 |
|
| 53 |
arxiv_client = arxiv.Client()
|
| 54 |
|
| 55 |
search = arxiv.Search(
|
| 56 |
query = message.content,
|
| 57 |
+
max_results = quantity,
|
| 58 |
sort_by = arxiv.SortCriterion.Relevance
|
| 59 |
)
|
| 60 |
paper_urls = []
|