Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -48,34 +48,6 @@ embedding = HuggingFaceBgeEmbeddings(
|
|
| 48 |
show_progress=True,
|
| 49 |
)
|
| 50 |
|
| 51 |
-
|
| 52 |
-
async def echo(websocket):
|
| 53 |
-
global retriever, conversational_rag_chain
|
| 54 |
-
async for message in websocket:
|
| 55 |
-
data = json.loads(message)
|
| 56 |
-
if not "message" in message:
|
| 57 |
-
return
|
| 58 |
-
if not "token" in message:
|
| 59 |
-
return
|
| 60 |
-
m = data["message"]
|
| 61 |
-
token = data["token"]
|
| 62 |
-
docs = retriever.get_relevant_documents(m)
|
| 63 |
-
response = conversational_rag_chain.invoke(
|
| 64 |
-
{"input": m},
|
| 65 |
-
config={
|
| 66 |
-
"configurable": {"session_id": token}
|
| 67 |
-
},
|
| 68 |
-
)["answer"]
|
| 69 |
-
await websocket.send(json.dumps({"response": response}))
|
| 70 |
-
|
| 71 |
-
async def main():
|
| 72 |
-
async with serve(echo, "0.0.0.0", 7860):
|
| 73 |
-
await asyncio.Future()
|
| 74 |
-
|
| 75 |
-
def f():
|
| 76 |
-
asyncio.run(main())
|
| 77 |
-
Process(target=f).start()
|
| 78 |
-
|
| 79 |
vectorstore = Chroma.from_documents(documents=splits, embedding=embedding)
|
| 80 |
|
| 81 |
def format_docs(docs):
|
|
@@ -145,10 +117,28 @@ conversational_rag_chain = RunnableWithMessageHistory(
|
|
| 145 |
history_messages_key="chat_history",
|
| 146 |
output_messages_key="answer",
|
| 147 |
)
|
| 148 |
-
|
| 149 |
-
websocket
|
| 150 |
-
|
| 151 |
-
|
| 152 |
-
|
| 153 |
-
|
| 154 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 48 |
show_progress=True,
|
| 49 |
)
|
| 50 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 51 |
vectorstore = Chroma.from_documents(documents=splits, embedding=embedding)
|
| 52 |
|
| 53 |
def format_docs(docs):
|
|
|
|
| 117 |
history_messages_key="chat_history",
|
| 118 |
output_messages_key="answer",
|
| 119 |
)
|
| 120 |
+
|
| 121 |
+
async def echo(websocket):
|
| 122 |
+
global retriever, conversational_rag_chain
|
| 123 |
+
async for message in websocket:
|
| 124 |
+
data = json.loads(message)
|
| 125 |
+
if not "message" in message:
|
| 126 |
+
return
|
| 127 |
+
if not "token" in message:
|
| 128 |
+
return
|
| 129 |
+
m = data["message"]
|
| 130 |
+
token = data["token"]
|
| 131 |
+
docs = retriever.get_relevant_documents(m)
|
| 132 |
+
response = conversational_rag_chain.invoke(
|
| 133 |
+
{"input": m},
|
| 134 |
+
config={
|
| 135 |
+
"configurable": {"session_id": token}
|
| 136 |
+
},
|
| 137 |
+
)["answer"]
|
| 138 |
+
await websocket.send(json.dumps({"response": response}))
|
| 139 |
+
|
| 140 |
+
async def main():
|
| 141 |
+
async with serve(echo, "0.0.0.0", 7860):
|
| 142 |
+
await asyncio.Future()
|
| 143 |
+
|
| 144 |
+
asyncio.run(main())
|