Aido-Assistant / app.py
dhanvanth183's picture
Update app.py
cc812c1 verified
from langchain_openai import ChatOpenAI
from langchain.chains import ConversationChain
from langchain.memory import ConversationBufferWindowMemory
from langchain.prompts import (
SystemMessagePromptTemplate,
HumanMessagePromptTemplate,
ChatPromptTemplate,
MessagesPlaceholder
)
import streamlit as st
from utils import find_match, query_refiner, get_conversation_string
from dotenv import load_dotenv
import os
load_dotenv()
st.subheader("Aido-We assist Universities for recruiting International students")
if 'responses' not in st.session_state:
st.session_state['responses'] = ["How can I assist you?"]
if 'requests' not in st.session_state:
st.session_state['requests'] = []
llm = ChatOpenAI(model_name="gpt-4o-mini", api_key=os.getenv('OPENAI_API_KEY'))
if 'buffer_memory' not in st.session_state:
st.session_state.buffer_memory = ConversationBufferWindowMemory(k=3, return_messages=True)
system_msg_template = SystemMessagePromptTemplate.from_template(template="""
You are a proficient International Student Analyst, specializing in analyzing global
student trends to assist universities in understanding enrollment patterns, financial concerns, and academic outcomes.
Use only the context provided to derive your responses.
Do not rely on external knowledge. If the given context is insufficient, respond ONLY with: 'I don't know'""")
human_msg_template = HumanMessagePromptTemplate.from_template(template="{input}")
prompt_template = ChatPromptTemplate.from_messages(
[system_msg_template, MessagesPlaceholder(variable_name="history"), human_msg_template])
conversation = ConversationChain(memory=st.session_state.buffer_memory, prompt=prompt_template, llm=llm, verbose=True)
# container for chat history
response_container = st.container()
# container for text box
textcontainer = st.container()
with textcontainer:
# Replace the single-line text input with a text area that expands
query = st.text_area(
"Query: ",
key="input",
height=100, # Initial height
max_chars=None, # No character limit
help="Type your question here.",
placeholder="What are some concerns students from Algeria have about studying in the USA?"
)
# Add a submit button to control when the query is processed
submit_button = st.button("Submit")
if submit_button and query:
with st.spinner("typing..."):
conversation_string = get_conversation_string()
refined_query = query_refiner(conversation_string, query)
st.subheader("Refined Query:")
st.write(refined_query)
context = find_match(refined_query)
response = conversation.predict(input=f"Context:\n {context} \n\n Query:\n{query}")
st.session_state.requests.append(query)
st.session_state.responses.append(response)
with response_container:
if st.session_state['responses']:
for i in range(len(st.session_state['responses'])):
# Using Streamlit's native chat message functionality instead of streamlit_chat
with st.chat_message("assistant"):
st.write(st.session_state['responses'][i])
if i < len(st.session_state['requests']):
with st.chat_message("user"):
st.write(st.session_state["requests"][i])