Spaces:
Running
Running
Update main.py
Browse files
main.py
CHANGED
|
@@ -148,7 +148,7 @@ async def Search(input, categorie):
|
|
| 148 |
@cl.step(type="llm")
|
| 149 |
async def setup_conversationalChain():
|
| 150 |
model = await LLModel()
|
| 151 |
-
retriever = await Retriever(
|
| 152 |
qa = ConversationalRetrievalChain.from_llm(
|
| 153 |
model,
|
| 154 |
memory=memory,
|
|
@@ -160,6 +160,23 @@ async def setup_conversationalChain():
|
|
| 160 |
cl.user_session.set("runnable", qa)
|
| 161 |
cl.user_session.set("memory", memory)
|
| 162 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 163 |
@cl.on_chat_start
|
| 164 |
async def on_chat_start():
|
| 165 |
await cl.Message(f"> REVIEWSTREAM").send()
|
|
@@ -211,6 +228,11 @@ async def on_chat_resume(thread: ThreadDict):
|
|
| 211 |
memory = ConversationBufferMemory(return_messages=True)
|
| 212 |
root_messages = [m for m in thread["steps"] if m["parentId"] == None]
|
| 213 |
for message in root_messages:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 214 |
if message["type"] == "user_message":
|
| 215 |
memory.chat_memory.add_user_message(message["output"])
|
| 216 |
else:
|
|
|
|
| 148 |
@cl.step(type="llm")
|
| 149 |
async def setup_conversationalChain():
|
| 150 |
model = await LLModel()
|
| 151 |
+
retriever = await Retriever(cl.user_session.get("selectRequest"))
|
| 152 |
qa = ConversationalRetrievalChain.from_llm(
|
| 153 |
model,
|
| 154 |
memory=memory,
|
|
|
|
| 160 |
cl.user_session.set("runnable", qa)
|
| 161 |
cl.user_session.set("memory", memory)
|
| 162 |
|
| 163 |
+
@cl.step(type="tool")
|
| 164 |
+
async def switch(value):
|
| 165 |
+
if value == "Pédagogie durable":
|
| 166 |
+
return "bibliographie-OPP-DGDIN"
|
| 167 |
+
elif value == "Lieux d'apprentissage":
|
| 168 |
+
return "bibliographie-OPP-DGDIN"
|
| 169 |
+
elif value == "Journée de La Pédagogie":
|
| 170 |
+
return "year"
|
| 171 |
+
elif value == "Compétences du CFA Descartes":
|
| 172 |
+
return "skills"
|
| 173 |
+
elif value == "Formations Gustave Eiffel":
|
| 174 |
+
return "OF"
|
| 175 |
+
elif value == "Vidéos paroles de confiné.es":
|
| 176 |
+
return "videosTC"
|
| 177 |
+
elif value == "Offres d'emploi France Travail":
|
| 178 |
+
return "offreST"
|
| 179 |
+
|
| 180 |
@cl.on_chat_start
|
| 181 |
async def on_chat_start():
|
| 182 |
await cl.Message(f"> REVIEWSTREAM").send()
|
|
|
|
| 228 |
memory = ConversationBufferMemory(return_messages=True)
|
| 229 |
root_messages = [m for m in thread["steps"] if m["parentId"] == None]
|
| 230 |
for message in root_messages:
|
| 231 |
+
if message["type"] == "assistant_message" and message["output"][29:38] == "thématique":
|
| 232 |
+
resName = await switch(message["output"][42:])
|
| 233 |
+
cl.user_session.set("selectRequest", resName)
|
| 234 |
+
print(message["output"])
|
| 235 |
+
|
| 236 |
if message["type"] == "user_message":
|
| 237 |
memory.chat_memory.add_user_message(message["output"])
|
| 238 |
else:
|