File size: 2,564 Bytes
82fedd3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
from langchain_core.messages import HumanMessage, SystemMessage,AIMessageChunk
from langchain_core.runnables.config import RunnableConfig
from langchain_google_genai import ChatGoogleGenerativeAI
from langchain_google_genai import GoogleGenerativeAIEmbeddings
from langchain_core.prompts import ChatPromptTemplate
from langgraph.checkpoint.memory import MemorySaver
from langgraph.graph import START, MessagesState, StateGraph
from langsmith import traceable
import chainlit as cl

from dotenv import load_dotenv

load_dotenv()

workflow = StateGraph(state_schema=MessagesState)


#print(os.environ.get("GOOGLE_API_KEY"))
model = ChatGoogleGenerativeAI(model="gemini-2.5-pro", temperature=0.5)
with open("sys_prompt.txt", "r",encoding="utf-8") as f:
    sys_prompt=f.read()
    ChatPromptTemplate.from_messages([SystemMessage(content=sys_prompt) ])

#model = ChatOpenAI(model="gpt-4o-mini", temperature=0)


def call_model(state: MessagesState):
    response = model.invoke(state["messages"])
    return {"messages": response}


workflow.add_edge(START, "model")
workflow.add_node("model", call_model)

memory = MemorySaver()

app = workflow.compile(checkpointer=memory)


@cl.password_auth_callback
def auth_callback(username: str, password: str):
    # Fetch the user matching username from your database
    # and compare the hashed password with the value stored in the database
    if (username, password) == ("admin", "admin"):
        return cl.User(
            identifier="admin", metadata={"role": "admin", "provider": "credentials"}
        )
    else:
        return None


# @cl.on_chat_resume
# async def on_chat_resume(thread):
#     pass


@cl.on_message
async def main(message: cl.Message):

    if message.elements:
        for file in message.elements:
            if file.mime not in ["image/png", "image/jpeg" , "document/pgf"]:
                await cl.ErrorMessage(content="Unsupported file type").send()

    answer = cl.Message(content="")
    await answer.send()

    config: RunnableConfig = {
        "configurable": {"thread_id": cl.context.session.thread_id}
    }


    for msg, _ in app.stream(
        {"messages": [HumanMessage(content=message.content)]},
        config,
        stream_mode="messages",
    ):
        if isinstance(msg, AIMessageChunk):
            answer.content += msg.content  # type: ignore
            await answer.update()

@cl.on_audio_chunk
async def on_audio_chunk(chunk: cl.InputAudioChunk):
    return {"audio": chunk}