Spaces:
Sleeping
Sleeping
Commit
·
6faecbe
1
Parent(s):
96b05a8
Update main.py
Browse files
main.py
CHANGED
|
@@ -154,41 +154,26 @@ async def start():
|
|
| 154 |
CONDENSE_QUESTION_PROMPT = PromptTemplate.from_template(_template)
|
| 155 |
|
| 156 |
########## Chain with streaming ##########
|
| 157 |
-
|
| 158 |
-
|
| 159 |
-
|
| 160 |
-
|
| 161 |
-
|
| 162 |
-
|
| 163 |
-
|
| 164 |
-
#llm = ChatAnthropic(temperature=0)
|
| 165 |
-
|
| 166 |
streaming_llm = ChatAnthropic(
|
| 167 |
streaming=True,
|
| 168 |
temperature=1,
|
| 169 |
max_tokens=4000
|
| 170 |
)
|
| 171 |
-
#question_generator = LLMChain(llm=llm, prompt=CONDENSE_QUESTION_PROMPT)
|
| 172 |
-
#doc_chain = load_qa_chain(streaming_llm, chain_type="stuff")
|
| 173 |
-
#relevant=retriever_to_cache()
|
| 174 |
-
memory = ConversationSummaryBufferMemory(
|
| 175 |
-
llm=streaming_llm,
|
| 176 |
-
output_key='answer',
|
| 177 |
-
memory_key='chat_history',
|
| 178 |
-
return_messages=True
|
| 179 |
-
)
|
| 180 |
qa = ConversationalRetrievalChain.from_llm(
|
| 181 |
streaming_llm,
|
| 182 |
chain_type="stuff",
|
| 183 |
retriever=retriever_to_cache(),
|
| 184 |
-
#combine_docs_chain=doc_chain,
|
| 185 |
-
#question_generator=question_generator,
|
| 186 |
memory=memory,
|
| 187 |
return_source_documents=True,
|
| 188 |
-
get_chat_history=lambda h : h,
|
| 189 |
verbose=False
|
| 190 |
)
|
| 191 |
-
|
| 192 |
cl.user_session.set("conversation_chain", qa)
|
| 193 |
elif chatProfile[0] == 'Emplois':
|
| 194 |
poleemploi = cl.TaskList()
|
|
|
|
| 154 |
CONDENSE_QUESTION_PROMPT = PromptTemplate.from_template(_template)
|
| 155 |
|
| 156 |
########## Chain with streaming ##########
|
| 157 |
+
message_history = ChatMessageHistory()
|
| 158 |
+
memory = ConversationBufferMemory(
|
| 159 |
+
memory_key="chat_history",
|
| 160 |
+
output_key="answer",
|
| 161 |
+
chat_memory=message_history,
|
| 162 |
+
return_messages=True,
|
| 163 |
+
)
|
|
|
|
|
|
|
| 164 |
streaming_llm = ChatAnthropic(
|
| 165 |
streaming=True,
|
| 166 |
temperature=1,
|
| 167 |
max_tokens=4000
|
| 168 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 169 |
qa = ConversationalRetrievalChain.from_llm(
|
| 170 |
streaming_llm,
|
| 171 |
chain_type="stuff",
|
| 172 |
retriever=retriever_to_cache(),
|
|
|
|
|
|
|
| 173 |
memory=memory,
|
| 174 |
return_source_documents=True,
|
|
|
|
| 175 |
verbose=False
|
| 176 |
)
|
|
|
|
| 177 |
cl.user_session.set("conversation_chain", qa)
|
| 178 |
elif chatProfile[0] == 'Emplois':
|
| 179 |
poleemploi = cl.TaskList()
|