updates
Browse files
app.py
CHANGED
|
@@ -6,6 +6,11 @@ IMPORTS HERE
|
|
| 6 |
import chainlit as cl
|
| 7 |
from langchain.chat_models import ChatOpenAI
|
| 8 |
from langchain.chains import ConversationChain
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 9 |
|
| 10 |
### Global Section ###
|
| 11 |
"""
|
|
@@ -24,6 +29,7 @@ conversation_chain = ConversationChain(llm=llm)
|
|
| 24 |
async def on_chat_start():
|
| 25 |
""" SESSION SPECIFIC CODE HERE """
|
| 26 |
await cl.Message(content="Welcome! How can I assist you today?").send()
|
|
|
|
| 27 |
### Rename Chains ###
|
| 28 |
@cl.author_rename
|
| 29 |
def rename(orig_author: str):
|
|
@@ -35,12 +41,15 @@ def rename(orig_author: str):
|
|
| 35 |
|
| 36 |
### On Message Section ###
|
| 37 |
@cl.on_message
|
| 38 |
-
async def
|
| 39 |
-
|
| 40 |
-
|
|
|
|
| 41 |
|
| 42 |
-
|
| 43 |
-
|
|
|
|
|
|
|
|
|
|
| 44 |
|
| 45 |
-
|
| 46 |
-
await cl.Message(content=response).send()
|
|
|
|
| 6 |
import chainlit as cl
|
| 7 |
from langchain.chat_models import ChatOpenAI
|
| 8 |
from langchain.chains import ConversationChain
|
| 9 |
+
from langchain.prompts import ChatPromptTemplate
|
| 10 |
+
from langchain.schema import StrOutputParser
|
| 11 |
+
from langchain.schema.runnable import Runnable
|
| 12 |
+
from langchain.schema.runnable.config import RunnableConfig
|
| 13 |
+
from typing import cast
|
| 14 |
|
| 15 |
### Global Section ###
|
| 16 |
"""
|
|
|
|
| 29 |
async def on_chat_start():
|
| 30 |
""" SESSION SPECIFIC CODE HERE """
|
| 31 |
await cl.Message(content="Welcome! How can I assist you today?").send()
|
| 32 |
+
|
| 33 |
### Rename Chains ###
|
| 34 |
@cl.author_rename
|
| 35 |
def rename(orig_author: str):
|
|
|
|
| 41 |
|
| 42 |
### On Message Section ###
|
| 43 |
@cl.on_message
|
| 44 |
+
async def on_message(message: cl.Message):
|
| 45 |
+
runnable = cast(Runnable, cl.user_session.get("runnable"))
|
| 46 |
+
|
| 47 |
+
msg = cl.Message(content="")
|
| 48 |
|
| 49 |
+
async for chunk in runnable.astream(
|
| 50 |
+
{"question": message.content},
|
| 51 |
+
config=RunnableConfig(callbacks=[cl.LangchainCallbackHandler()]),
|
| 52 |
+
):
|
| 53 |
+
await msg.stream_token(chunk)
|
| 54 |
|
| 55 |
+
await msg.send()
|
|
|