keefereuther's picture
Update app.py
7958867 verified
raw
history blame
3.3 kB
import os
from typing import List
from dotenv import load_dotenv
import gradio as gr
from langchain_openai import ChatOpenAI
from langchain_core.prompts import PromptTemplate
from langchain_core.runnables import RunnableMap, RunnableSequence
from langchain_core.runnables.history import RunnableWithMessageHistory
from langchain_core.chat_history import BaseChatMessageHistory
from langchain_core.messages import BaseMessage
from pydantic import BaseModel, Field
from config import SYSTEM_INSTRUCTION, TEMPERATURE, MAX_TOKENS
# Load environment variables from .env file
load_dotenv()
OPENAI_API_KEY = os.getenv('OPENAI_API_KEY')
# Get the username and password from environment variables
USERNAME = os.getenv('USERNAME')
PASSWORD = os.getenv('PASSWORD')
# In-memory implementation of chat message history
class InMemoryHistory(BaseChatMessageHistory, BaseModel):
"""In-memory implementation of chat message history."""
messages: List[BaseMessage] = Field(default_factory=list)
def add_messages(self, messages: List[BaseMessage]) -> None:
"""Add a list of messages to the store."""
self.messages.extend(messages)
def clear(self) -> None:
"""Clear all messages from the store."""
self.messages = []
# Store to manage session histories
store = {}
def get_session_history(session_id: str) -> InMemoryHistory:
if session_id not in store:
store[session_id] = InMemoryHistory()
return store[session_id]
# Define the prompt template
template = f"""{SYSTEM_INSTRUCTION}
{{chat_history}}
User: {{user_message}}
Chatbot:"""
prompt = PromptTemplate(
input_variables=["chat_history", "user_message"],
template=template
)
# Initialize the chat model
llm = ChatOpenAI(
temperature=TEMPERATURE,
max_tokens=MAX_TOKENS,
model_name="gpt-4.1"
)
# Create a sequence of the prompt and LLM
chain = prompt | llm
# Wrap the chain with RunnableWithMessageHistory
with_history = RunnableWithMessageHistory(
chain,
get_session_history=get_session_history,
input_messages_key="user_message",
#output_messages_key="response",
history_messages_key="chat_history"
)
def get_text_response(user_message, history):
# Generate the response using the chain with history management
response = with_history.invoke(
{"user_message": user_message},
config={"configurable": {"session_id": "unique_session_id"}}
)
# Access the content of the AIMessage directly
return str(response.content)
# # Create the Gradio interface
# demo = gr.ChatInterface(fn=get_text_response, type='messages', theme="allenai/gradio-theme")
# if __name__ == "__main__":
# demo.launch(
# debug=True,
# share=True,
# #auth=(USERNAME, PASSWORD),
# #auth_message="Enter the username and password provided by your instructor."
# )
def create_demo():
# Create the chat interface
demo = gr.ChatInterface(
fn=get_text_response,
type='messages',
theme="allenai/gradio-theme"
)
return demo
if __name__ == "__main__":
demo = create_demo()
demo.queue().launch(
share=True,
debug=True,
#auth=(USERNAME, PASSWORD),
#auth_message="Enter the username and password provided by your instructor."
)