|
|
import streamlit as st |
|
|
import os |
|
|
import chatFromLangchain |
|
|
from chatFromLangchain import LLm |
|
|
from dotenv import load_dotenv |
|
|
from langchain_core.messages import HumanMessage |
|
|
from langchain_core.messages import AIMessage |
|
|
import re |
|
|
import actions |
|
|
import pandas as pd |
|
|
|
|
|
|
|
|
load_dotenv() |
|
|
token = os.getenv('AZURE_OPENAI_KEY') |
|
|
llm_model = 'gpt-3.5-turbo-1106' |
|
|
temperature = 1.0 |
|
|
|
|
|
|
|
|
st.image("logo.png") |
|
|
st.title("Metaminds") |
|
|
|
|
|
|
|
|
st.markdown(""" |
|
|
<style> |
|
|
.chat-container { |
|
|
display: flex; |
|
|
flex-direction: column; |
|
|
gap: 10px; /* Space between messages */ |
|
|
padding: 10px; |
|
|
} |
|
|
.chat-bubble { |
|
|
max-width: 70%; |
|
|
padding: 10px; |
|
|
border-radius: 15px; |
|
|
margin: 5px 0; /* Space around each bubble */ |
|
|
line-height: 1.5; |
|
|
word-wrap: break-word; /* Ensure long words wrap */ |
|
|
} |
|
|
.human-message-container { |
|
|
display: flex; |
|
|
justify-content: flex-start; /* Align human messages to the left */ |
|
|
} |
|
|
.ai-message-container { |
|
|
display: flex; |
|
|
justify-content: flex-end; /* Align AI messages to the right */ |
|
|
} |
|
|
.human-message { |
|
|
background-color: #DCF8C6; |
|
|
color: black; |
|
|
} |
|
|
.ai-message { |
|
|
background-color: #E3E3E3; |
|
|
color: black; |
|
|
} |
|
|
</style> |
|
|
""", unsafe_allow_html=True) |
|
|
|
|
|
|
|
|
dev_mode = st.checkbox("Dev Mode") |
|
|
|
|
|
|
|
|
if dev_mode: |
|
|
with st.expander("Setting the LLM"): |
|
|
st.markdown("This page is used to have a chat with the uploaded documents") |
|
|
with st.form("setting"): |
|
|
row_1 = st.columns(3) |
|
|
with row_1[0]: |
|
|
token = st.text_input("Open AI Key", type="password") |
|
|
|
|
|
with row_1[1]: |
|
|
llm_model = st.text_input("LLM model", value="openai-gpt-4o") |
|
|
|
|
|
with row_1[2]: |
|
|
temperature = st.number_input("Temperature", value=1.0, step=0.1) |
|
|
|
|
|
create_chatbot = st.form_submit_button("Create chatbot") |
|
|
|
|
|
with st.expander("Chat History LLm"): |
|
|
chathistplace = st.empty() |
|
|
|
|
|
|
|
|
if "conversation" not in st.session_state: |
|
|
st.session_state.conversation = None |
|
|
|
|
|
if token: |
|
|
st.session_state.conversation = LLm() |
|
|
|
|
|
|
|
|
if "history" not in st.session_state: |
|
|
st.session_state.history = [] |
|
|
|
|
|
|
|
|
if "messages" not in st.session_state: |
|
|
st.session_state.messages = [{'role': 'system', 'content': '''You are a Helpful chatbot also a truthful one who will be chatting with user Ragrding some connections to the database . |
|
|
You have access to these tools |
|
|
tableFindinRedshift, this tools find if the table is present in the Redshift database, this tools takes only one input 'tableName': str, and returns either None or a list representing multiple tables present in table or a list of tuples where each tuple representing column name and the column type |
|
|
tableFindinImpala, this tools find if the table is present in the Impala database, this tools takes only one input 'tableName': str, and returns either None or a list representing multiple tables present in table or a list of tuples where each tuple representing column name and the column type |
|
|
|
|
|
if a user have not mentioned the Database connection in his connection confirm it from the user. |
|
|
|
|
|
Here is as example |
|
|
user: Hi |
|
|
as: Hi I am an assistant who can help you with details of any table in Datalake. |
|
|
user: Ok tell about the Mara table in Redshift |
|
|
assistant: ```{'action': 'tableFindinRedshift', 'input': 'Mara'}``` |
|
|
function: ['Mara1', 'Mara2', 'Maraxyz'] |
|
|
assistant: There Are multiple table present in the Reshift similar to Mara please Provide which one to choose |
|
|
user: Mara1 |
|
|
assistant: ```{'action': 'tableFindinRedshift', 'input': 'Mara1'}``` |
|
|
function: [('client_id', int), ('purchase', int)] |
|
|
assistant: Mara1 has 2 columns client_id: int, purchase: int |
|
|
user: Thank you now write a Query which will give me all the clients which has purchase value more than 500 |
|
|
assistant: Certainly ```SELECT client_id FROM Mara1 WHERE purchase > 500``` |
|
|
|
|
|
Refuse to answer any query from user where it deviates from the above mentioned capabilites |
|
|
Example: |
|
|
user: Tell me about the weather. |
|
|
assistant: Hi I am still MetaMinds 1.0 maybe I can answer your questions better when I am MetaMinds 2.0 I can only answer related to Datalakes. |
|
|
'''}] |
|
|
|
|
|
|
|
|
def getAction(text): |
|
|
|
|
|
pattern = r"```(.*?)```" |
|
|
|
|
|
|
|
|
match = re.search(pattern, text) |
|
|
|
|
|
|
|
|
if match: |
|
|
extracted_content = match.group(1) |
|
|
return eval(extracted_content) |
|
|
else: |
|
|
return None |
|
|
|
|
|
|
|
|
st.markdown('<div class="chat-container">', unsafe_allow_html=True) |
|
|
for message in st.session_state.history: |
|
|
if isinstance(message, pd.DataFrame): |
|
|
st.dataframe(message) |
|
|
continue |
|
|
message = eval(message.json(), {'false': False, 'true': True}) |
|
|
role = message["type"] |
|
|
content = message["content"] |
|
|
if role == "human": |
|
|
st.markdown(f'<div class="human-message-container"><div class="chat-bubble human-message">{content}</div></div>', unsafe_allow_html=True) |
|
|
else: |
|
|
st.markdown(f'<div class="ai-message-container"><div class="chat-bubble ai-message">{content}</div></div>', unsafe_allow_html=True) |
|
|
st.markdown('</div>', unsafe_allow_html=True) |
|
|
|
|
|
if dev_mode: |
|
|
chathistplace.json(st.session_state.messages) |
|
|
|
|
|
|
|
|
|
|
|
if question := st.chat_input("Ask a question"): |
|
|
|
|
|
st.session_state.history.append( |
|
|
HumanMessage(content=question) |
|
|
) |
|
|
|
|
|
st.markdown(f'<div class="human-message-container"><div class="chat-bubble human-message">{question}</div></div>', unsafe_allow_html=True) |
|
|
|
|
|
|
|
|
answer = st.session_state.conversation.generate_answer(question) |
|
|
|
|
|
|
|
|
if getAction(answer) is not None: |
|
|
action = getAction(answer) |
|
|
print('\n\n\n\n------------------------ Performing Action -----------------------') |
|
|
print(action, '\n\n\n') |
|
|
|
|
|
if action['action'] == 'tableFindinRedshift': |
|
|
action_statement = f"Searcing the Table {action['input'].lower()} in Redshift Hang on a sec " |
|
|
st.markdown(f'<div class="ai-message-container"><div class="chat-bubble ai-message">{action_statement}</div></div>', unsafe_allow_html=True) |
|
|
st.session_state.history.append( |
|
|
AIMessage(content=action_statement) |
|
|
) |
|
|
answer = actions.tableFindinRedshift(action['input'].lower()) |
|
|
|
|
|
if isinstance(answer, pd.DataFrame) and 'schema' in answer.columns: |
|
|
table_list = [] |
|
|
|
|
|
for i, row in answer.iterrows(): |
|
|
table_list.append(f'{row["schema"]}.{row["table_name"]}') |
|
|
|
|
|
|
|
|
st.session_state.messages.append( |
|
|
{ |
|
|
'role': 'assistant', 'content': str(table_list) |
|
|
} |
|
|
) |
|
|
st.session_state.messages.append( |
|
|
{ |
|
|
'role': 'assistant', 'content': f'These are all the Tables I could find in Redshift that has name {action["input"].lower()} in it please select among these' |
|
|
} |
|
|
) |
|
|
|
|
|
st.markdown(f'''<div class="ai-message-container"><div class="chat-bubble ai-message">These are all the Tables I could find in Redshift that has name {action["input"].lower()} in it please select among these</div></div>''', unsafe_allow_html=True) |
|
|
|
|
|
st.session_state.history.append( |
|
|
AIMessage(content=f'These are all the Tables I could find in Redshift that has name {action["input"].lower()} in it please select among these') |
|
|
) |
|
|
|
|
|
|
|
|
if isinstance(answer, pd.DataFrame) and 'datatype' in answer.columns: |
|
|
column_detail = [] |
|
|
|
|
|
for i, row in answer.iterrows(): |
|
|
column_detail.append([row['columnname'], row['datatype']]) |
|
|
|
|
|
|
|
|
st.session_state.messages.append( |
|
|
{ |
|
|
'role': 'assistant', 'content': str(column_detail) |
|
|
} |
|
|
) |
|
|
|
|
|
st.session_state.messages.append( |
|
|
{ |
|
|
'role': 'assistant', 'content': f'Here is the column details of Table {action["input"].lower()} from Redshift' |
|
|
} |
|
|
) |
|
|
|
|
|
st.markdown(f'''<div class="ai-message-container"><div class="chat-bubble ai-message">Here is the column details of Table {action["input"].lower()} from Redshift</div></div>''', unsafe_allow_html=True) |
|
|
|
|
|
st.session_state.history.append( |
|
|
AIMessage(content=f'Here is the column details of Table {action["input"].lower()} from Redshift') |
|
|
) |
|
|
|
|
|
if isinstance(answer, str): |
|
|
st.markdown(f'<div class="ai-message-container"><div class="chat-bubble ai-message">{answer}</div></div>', unsafe_allow_html=True) |
|
|
if isinstance(answer, pd.DataFrame): |
|
|
st.dataframe(answer) |
|
|
|
|
|
else: |
|
|
st.markdown(f'<div class="ai-message-container"><div class="chat-bubble ai-message">{answer}</div></div>', unsafe_allow_html=True) |
|
|
|
|
|
|
|
|
if isinstance(answer, pd.DataFrame): |
|
|
st.session_state.history.append( |
|
|
answer |
|
|
) |
|
|
else: |
|
|
st.session_state.history.append( |
|
|
AIMessage(content=answer) |
|
|
) |
|
|
|
|
|
if dev_mode: |
|
|
chathistplace.json(st.session_state.messages) |