Spaces:
Sleeping
Sleeping
File size: 3,326 Bytes
842a437 93467f0 842a437 93467f0 842a437 0796ffc 842a437 0796ffc 748db1a 0796ffc 842a437 84cd004 842a437 84cd004 842a437 93467f0 0796ffc 93467f0 79a7d2c 842a437 79a7d2c 22818fd 79a7d2c 842a437 22818fd 748db1a 79a7d2c 748db1a 79a7d2c 0796ffc 748db1a 0796ffc 22818fd 0796ffc 93467f0 0796ffc 93467f0 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 |
import streamlit as st
import pandas as pd
from langchain_core.messages import AIMessage, HumanMessage
from azure_openai import qt, get_response
from retriver import search_and_reconstruct
def read_file(file):
"""
Reads the content of a text file and returns it as a string.
:param approver: The type of approver.
:return: The content of the file as a string.
"""
fp = f"assets/{file}.md"
try:
with open(fp, 'r', encoding='utf-8') as file:
content = file.read()
return content
except FileNotFoundError:
print(f"The file at {fp} was not found.")
except IOError:
print(f"An error occurred while reading the file at {fp}.")
QTESystemMessage = read_file("QTESystemMessage")
RAGSystemMessage = read_file("RAGSystemMessage")
RAGUserMessage = read_file("RAGUserMessage")
k = 5
pagesReturned = 3
temp1 = 0.5
tokens1 = 200
temp2 = 0.6
tokens2 = 2000
asset = "GSKGlossary"
# app config
st.set_page_config(page_title="Medical Sales Toolbox", page_icon="🤖")
st.title("Medical Sales Toolbox :toolbox:")
# session state
if "chat_history" not in st.session_state:
st.session_state.chat_history = [
AIMessage(content="Hello, I am the Medical Sales Assistant. How can I help you?"),
]
# conversation
for message in st.session_state.chat_history:
if isinstance(message, AIMessage):
with st.chat_message("AI"):
st.write(message.content)
elif isinstance(message, HumanMessage):
with st.chat_message("Human"):
st.write(message.content)
# user input
user_query = st.chat_input("Type your message here...")
if user_query is not None and user_query != "":
st.session_state.chat_history.append(HumanMessage(content=user_query))
with st.chat_message("Human"):
st.markdown(user_query)
qte = qt(QTESystemMessage, st.session_state.chat_history, temp1, tokens1, asset)
st.text("Contextualised Query")
st.caption(qte)
knowledge = search_and_reconstruct(qte, k, pagesReturned)
if knowledge:
# Prepare the data for the table
table_data = {
"Title": [entry['Title'] for entry in knowledge],
"Score (%)": [f"{int(entry.get('Score', 0) * 100)}%" for entry in knowledge], # Convert to percentage and remove decimals
"Page": [entry['PageNumber'] for entry in knowledge],
# "Grounding Text": [entry['ReconstructedText'] for entry in knowledge]
}
# Create a dataframe for displaying as a table
df = pd.DataFrame(table_data)
# Calculate the mean score
mean_score = sum(entry.get('Score', 0) for entry in knowledge) / len(knowledge)
# Display the mean score as a Streamlit text element
# Display the table in the sidebar
st.text("Knowledge Base Results")
st.text(f"Average Accuracy Score: {mean_score * 100:.2f}%")
st.dataframe(df) # Adjust height as needed
else:
st.write("No relevant knowledge base results found.")
with st.chat_message("AI"):
response = st.write_stream(get_response(st.session_state.chat_history, qte, knowledge, temp2, tokens2, RAGSystemMessage, RAGUserMessage, asset))
st.session_state.chat_history.append(AIMessage(content=response)) |