groq / ask.py
affine's picture
Upload 4 files
ccaef53 verified
from langchain_groq import ChatGroq
from langchain_core.prompts import ChatPromptTemplate
from langchain.schema import StrOutputParser
from langchain.schema.runnable import Runnable
from langchain.schema.runnable.config import RunnableConfig
from chainlit.input_widget import Select
import chainlit as cl
from typing import Optional
@cl.author_rename
def rename(orig_author: str):
rename_dict = {"LLMMathChain": "Albert Einstein", "Chatbot": "Assistant"}
return rename_dict.get(orig_author, orig_author)
@cl.on_chat_start
async def on_chat_start():
# Sending an image with the local file path
# elements = [
# cl.Image(name="image1", display="inline", path="groq.jpeg")
# ]
settings = await cl.ChatSettings(
[
Select(
id="Model",
label="OpenAI - Model",
values=["mixtral-8x7b-32768","llama2-70b-4096"],
initial_index=0,
)
]
).send()
value = settings["Model"]
await cl.Message(content="Hello there, I am Groq. How can I help you ?").send()
model = ChatGroq(temperature=0,model_name=value,api_key="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")
prompt = ChatPromptTemplate.from_messages(
[
(
"system",
"You're a helpful assistant",
),
("human", "{question}"),
]
)
runnable = prompt | model | StrOutputParser()
cl.user_session.set("runnable", runnable)
@cl.on_message
async def on_message(message: cl.Message):
runnable = cl.user_session.get("runnable") # type: Runnable
msg = cl.Message(content="")
async for chunk in runnable.astream(
{"question": message.content},
config=RunnableConfig(callbacks=[cl.LangchainCallbackHandler()]),
):
await msg.stream_token(chunk)
await msg.send()