Spaces:
Sleeping
Sleeping
| import os | |
| from langchain_core.prompts import ChatPromptTemplate | |
| from langchain_google_genai import ChatGoogleGenerativeAI | |
| from langchain_core.prompts import MessagesPlaceholder | |
| from langchain.memory import ConversationBufferWindowMemory | |
| from operator import itemgetter | |
| from langchain_core.runnables import RunnableLambda,RunnablePassthrough | |
| import streamlit as st | |
| genai_key = os.getenv("gen_key") | |
| model = ChatGoogleGenerativeAI(temperature=0,model='gemini-1.5-pro',max_output_tokens=150,convert_system_message_to_human=True,google_api_key=genai_key) | |
| prompt=ChatPromptTemplate.from_messages([ | |
| ("system","you are a good assistant that give information about mentioned topic."), | |
| MessagesPlaceholder(variable_name="history"), | |
| ("human","{input}")]) | |
| # Initialize memory in session state | |
| if 'memory' not in st.session_state: | |
| st.session_state.memory = ConversationBufferWindowMemory(k=10, return_messages=True) | |
| # Define the chain | |
| chain = (RunnablePassthrough.assign(history=RunnableLambda(st.session_state.memory.load_memory_variables) | itemgetter("history")) | | |
| prompt | model) | |
| # Streamlit app | |
| st.title("Interactive Chatbot") | |
| # Initialize session state for user input | |
| if 'user_input' not in st.session_state: | |
| st.session_state.user_input = "" | |
| # Input from user | |
| user_input = st.text_area("User: ", st.session_state.user_input, height=100) | |
| if st.button("Submit"): | |
| response = chain.invoke({"input": user_input}) | |
| st.write(f"Assistant: {response.content}") | |
| st.session_state.memory.save_context({"input": user_input}, {"output": response.content}) | |
| st.session_state.user_input = "" # Clear the input box | |
| # Display chat history | |
| if st.checkbox("Show Chat History"): | |
| chat_history = st.session_state.memory.load_memory_variables({}) | |
| st.write(chat_history) |