File size: 3,296 Bytes
94bf8ea
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4e48e99
 
 
 
94bf8ea
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7958867
94bf8ea
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
e14f64b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
94bf8ea
 
e14f64b
 
4e48e99
e14f64b
4e48e99
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
import os
from typing import List
from dotenv import load_dotenv
import gradio as gr
from langchain_openai import ChatOpenAI
from langchain_core.prompts import PromptTemplate
from langchain_core.runnables import RunnableMap, RunnableSequence
from langchain_core.runnables.history import RunnableWithMessageHistory
from langchain_core.chat_history import BaseChatMessageHistory
from langchain_core.messages import BaseMessage
from pydantic import BaseModel, Field
from config import SYSTEM_INSTRUCTION, TEMPERATURE, MAX_TOKENS

# Load environment variables from .env file
load_dotenv()
OPENAI_API_KEY = os.getenv('OPENAI_API_KEY')

# Get the username and password from environment variables
USERNAME = os.getenv('USERNAME')
PASSWORD = os.getenv('PASSWORD')

# In-memory implementation of chat message history
class InMemoryHistory(BaseChatMessageHistory, BaseModel):
    """In-memory implementation of chat message history."""
    messages: List[BaseMessage] = Field(default_factory=list)

    def add_messages(self, messages: List[BaseMessage]) -> None:
        """Add a list of messages to the store."""
        self.messages.extend(messages)

    def clear(self) -> None:
        """Clear all messages from the store."""
        self.messages = []

# Store to manage session histories
store = {}

def get_session_history(session_id: str) -> InMemoryHistory:
    if session_id not in store:
        store[session_id] = InMemoryHistory()
    return store[session_id]

# Define the prompt template
template = f"""{SYSTEM_INSTRUCTION}
{{chat_history}}
User: {{user_message}}
Chatbot:"""

prompt = PromptTemplate(
    input_variables=["chat_history", "user_message"],
    template=template
)

# Initialize the chat model
llm = ChatOpenAI(
    temperature=TEMPERATURE,
    max_tokens=MAX_TOKENS,
    model_name="gpt-4.1"
)

# Create a sequence of the prompt and LLM
chain = prompt | llm

# Wrap the chain with RunnableWithMessageHistory
with_history = RunnableWithMessageHistory(
    chain,
    get_session_history=get_session_history,
    input_messages_key="user_message",
    #output_messages_key="response",
    history_messages_key="chat_history"
)

def get_text_response(user_message, history):
    # Generate the response using the chain with history management
    response = with_history.invoke(
        {"user_message": user_message},
        config={"configurable": {"session_id": "unique_session_id"}}
    )
    # Access the content of the AIMessage directly
    return str(response.content)

# # Create the Gradio interface
# demo = gr.ChatInterface(fn=get_text_response, type='messages', theme="allenai/gradio-theme")

# if __name__ == "__main__":
#     demo.launch(
#         debug=True, 
#         share=True,
#         #auth=(USERNAME, PASSWORD),
#         #auth_message="Enter the username and password provided by your instructor."
#     )
def create_demo():
    # Create the chat interface
    demo = gr.ChatInterface(
        fn=get_text_response, 
        type='messages', 
        theme="allenai/gradio-theme"
    )
    return demo

if __name__ == "__main__":
    demo = create_demo()
    demo.queue().launch(
        share=True,
        debug=True,
        #auth=(USERNAME, PASSWORD),
        #auth_message="Enter the username and password provided by your instructor."
    )