Ilyas KHIAT
commited on
Commit
·
2eff77d
1
Parent(s):
28d07c2
execute prompts
Browse files- app.py +1 -1
- chat_te.py +1 -1
- chat_with_pps.py +24 -4
- prompt.py +12 -4
app.py
CHANGED
|
@@ -20,7 +20,7 @@ from collaborons import display_company_selection_for_materiality,display_materi
|
|
| 20 |
from documentations import display_documentation
|
| 21 |
from RAG_PDF_WEB import rag_pdf_web
|
| 22 |
from prompt import get_prompts_list,prompt_execution,execute_prompt
|
| 23 |
-
from chat_with_pps import display_chat
|
| 24 |
from high_chart import test_chart
|
| 25 |
from chat_te import display_chat_te
|
| 26 |
|
|
|
|
| 20 |
from documentations import display_documentation
|
| 21 |
from RAG_PDF_WEB import rag_pdf_web
|
| 22 |
from prompt import get_prompts_list,prompt_execution,execute_prompt
|
| 23 |
+
from chat_with_pps import display_chat,display_container_chat
|
| 24 |
from high_chart import test_chart
|
| 25 |
from chat_te import display_chat_te
|
| 26 |
|
chat_te.py
CHANGED
|
@@ -112,7 +112,7 @@ def display_chat_te():
|
|
| 112 |
# if col1.button("chatbot"):
|
| 113 |
# vote("chatbot")
|
| 114 |
# with col2:
|
| 115 |
-
user_query = st.chat_input("
|
| 116 |
if user_query is not None and user_query != "":
|
| 117 |
st.session_state.chat_history_te.append(HumanMessage(content=user_query))
|
| 118 |
|
|
|
|
| 112 |
# if col1.button("chatbot"):
|
| 113 |
# vote("chatbot")
|
| 114 |
# with col2:
|
| 115 |
+
user_query = st.chat_input(placeholder="c'est quoi la transition écologique ?")
|
| 116 |
if user_query is not None and user_query != "":
|
| 117 |
st.session_state.chat_history_te.append(HumanMessage(content=user_query))
|
| 118 |
|
chat_with_pps.py
CHANGED
|
@@ -58,6 +58,12 @@ def display_chart():
|
|
| 58 |
plot = construct_plot()
|
| 59 |
st.plotly_chart(plot)
|
| 60 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 61 |
|
| 62 |
def display_chat():
|
| 63 |
# app config
|
|
@@ -68,7 +74,8 @@ def display_chat():
|
|
| 68 |
st.session_state.chat_history = [
|
| 69 |
AIMessage(content="Salut, voici votre cartographie des parties prenantes. Que puis-je faire pour vous?"),
|
| 70 |
]
|
| 71 |
-
|
|
|
|
| 72 |
|
| 73 |
|
| 74 |
# conversation
|
|
@@ -81,6 +88,12 @@ def display_chat():
|
|
| 81 |
elif isinstance(message, HumanMessage):
|
| 82 |
with st.chat_message("Moi"):
|
| 83 |
st.write(message.content)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 84 |
|
| 85 |
if "pp_grouped" not in st.session_state or st.session_state['pp_grouped'] is None or len(st.session_state['pp_grouped']) == 0:
|
| 86 |
return None
|
|
@@ -88,14 +101,21 @@ def display_chat():
|
|
| 88 |
user_query = st.chat_input("Par ici...")
|
| 89 |
if user_query is not None and user_query != "":
|
| 90 |
st.session_state.chat_history.append(HumanMessage(content=user_query))
|
| 91 |
-
|
| 92 |
with st.chat_message("Moi"):
|
| 93 |
st.markdown(user_query)
|
| 94 |
-
|
| 95 |
with st.chat_message("AI"):
|
| 96 |
-
|
| 97 |
response = st.write_stream(get_response(user_query, st.session_state.chat_history,format_context(st.session_state['pp_grouped'],st.session_state['Nom de la marque'])))
|
| 98 |
if "cartographie des parties prenantes" in message.content:
|
| 99 |
display_chart()
|
| 100 |
|
| 101 |
st.session_state.chat_history.append(AIMessage(content=response))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 58 |
plot = construct_plot()
|
| 59 |
st.plotly_chart(plot)
|
| 60 |
|
| 61 |
+
@st.experimental_dialog("Cast your vote")
|
| 62 |
+
def vote(item):
|
| 63 |
+
st.write(f"Why is {item} your favorite?")
|
| 64 |
+
reason = st.text_input("Because...")
|
| 65 |
+
if st.button("Submit"):
|
| 66 |
+
st.rerun()
|
| 67 |
|
| 68 |
def display_chat():
|
| 69 |
# app config
|
|
|
|
| 74 |
st.session_state.chat_history = [
|
| 75 |
AIMessage(content="Salut, voici votre cartographie des parties prenantes. Que puis-je faire pour vous?"),
|
| 76 |
]
|
| 77 |
+
if "user_query" not in st.session_state:
|
| 78 |
+
st.session_state.user_query = "test"
|
| 79 |
|
| 80 |
|
| 81 |
# conversation
|
|
|
|
| 88 |
elif isinstance(message, HumanMessage):
|
| 89 |
with st.chat_message("Moi"):
|
| 90 |
st.write(message.content)
|
| 91 |
+
|
| 92 |
+
#check if the last message is from the user , that means execute button has been clicked
|
| 93 |
+
last_message = st.session_state.chat_history[-1]
|
| 94 |
+
if isinstance(last_message, HumanMessage):
|
| 95 |
+
response = st.write_stream(get_response(last_message.content, st.session_state.chat_history,format_context(st.session_state['pp_grouped'],st.session_state['Nom de la marque'])))
|
| 96 |
+
st.session_state.chat_history.append(AIMessage(content=response))
|
| 97 |
|
| 98 |
if "pp_grouped" not in st.session_state or st.session_state['pp_grouped'] is None or len(st.session_state['pp_grouped']) == 0:
|
| 99 |
return None
|
|
|
|
| 101 |
user_query = st.chat_input("Par ici...")
|
| 102 |
if user_query is not None and user_query != "":
|
| 103 |
st.session_state.chat_history.append(HumanMessage(content=user_query))
|
|
|
|
| 104 |
with st.chat_message("Moi"):
|
| 105 |
st.markdown(user_query)
|
|
|
|
| 106 |
with st.chat_message("AI"):
|
|
|
|
| 107 |
response = st.write_stream(get_response(user_query, st.session_state.chat_history,format_context(st.session_state['pp_grouped'],st.session_state['Nom de la marque'])))
|
| 108 |
if "cartographie des parties prenantes" in message.content:
|
| 109 |
display_chart()
|
| 110 |
|
| 111 |
st.session_state.chat_history.append(AIMessage(content=response))
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
def display_container_chat():
|
| 115 |
+
st.markdown(":point_left: Cliquez pour discuter avec le Chatbot", unsafe_allow_html=True)
|
| 116 |
+
col1, col2 = st.columns([1, 10])
|
| 117 |
+
with col1:
|
| 118 |
+
if st.button("display_prompts"):
|
| 119 |
+
vote("D")
|
| 120 |
+
with col2:
|
| 121 |
+
display_chat()
|
prompt.py
CHANGED
|
@@ -1,6 +1,7 @@
|
|
| 1 |
import streamlit as st
|
| 2 |
import pandas as pd
|
| 3 |
from data_manager_bziiit import get_prompts
|
|
|
|
| 4 |
from session import get_rag
|
| 5 |
|
| 6 |
prompts = []
|
|
@@ -31,14 +32,19 @@ def get_prompts_list():
|
|
| 31 |
for name, group in grouped:
|
| 32 |
st.subheader(name) # Display the context name as a subheader
|
| 33 |
for i, row in group.iterrows():
|
| 34 |
-
col1, col2, col3, col4 = st.columns((1, 2, 2,
|
| 35 |
col1.write(num) # index
|
| 36 |
col2.write(row['name']) # name
|
| 37 |
col3.write(row['text']) # text
|
| 38 |
num += 1
|
| 39 |
|
| 40 |
button_phold = col4.empty() # create a placeholder
|
| 41 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 42 |
if do_action:
|
| 43 |
prompt_html = prompts[i]['text'].replace('\n', '<br>')
|
| 44 |
prompt_metadata = extract_metadata(prompts[i])
|
|
@@ -46,8 +52,10 @@ def get_prompts_list():
|
|
| 46 |
for text in prompt_metadata:
|
| 47 |
prompt_html = prompt_html.replace(f"{text}", f"<span style='font-weight:bold'>{text}</span>")
|
| 48 |
|
| 49 |
-
st.html(prompt_html)
|
| 50 |
-
|
|
|
|
|
|
|
| 51 |
else:
|
| 52 |
st.write("Data does not contain 'name', 'context', and 'text' fields.")
|
| 53 |
else:
|
|
|
|
| 1 |
import streamlit as st
|
| 2 |
import pandas as pd
|
| 3 |
from data_manager_bziiit import get_prompts
|
| 4 |
+
from langchain_core.messages import AIMessage, HumanMessage
|
| 5 |
from session import get_rag
|
| 6 |
|
| 7 |
prompts = []
|
|
|
|
| 32 |
for name, group in grouped:
|
| 33 |
st.subheader(name) # Display the context name as a subheader
|
| 34 |
for i, row in group.iterrows():
|
| 35 |
+
col1, col2, col3, col4 = st.columns((1, 2, 2, 2))
|
| 36 |
col1.write(num) # index
|
| 37 |
col2.write(row['name']) # name
|
| 38 |
col3.write(row['text']) # text
|
| 39 |
num += 1
|
| 40 |
|
| 41 |
button_phold = col4.empty() # create a placeholder
|
| 42 |
+
but1, but2 = button_phold.columns(2)
|
| 43 |
+
|
| 44 |
+
do_action = but1.button('Voir plus', key=i)
|
| 45 |
+
execute = but2.button('Executer', key=f"e{i}")
|
| 46 |
+
if execute:
|
| 47 |
+
st.session_state.chat_history.append(HumanMessage(content=prompts[i]['text']))
|
| 48 |
if do_action:
|
| 49 |
prompt_html = prompts[i]['text'].replace('\n', '<br>')
|
| 50 |
prompt_metadata = extract_metadata(prompts[i])
|
|
|
|
| 52 |
for text in prompt_metadata:
|
| 53 |
prompt_html = prompt_html.replace(f"{text}", f"<span style='font-weight:bold'>{text}</span>")
|
| 54 |
|
| 55 |
+
st.html(prompt_html)
|
| 56 |
+
i
|
| 57 |
+
# Display the full text
|
| 58 |
+
# remove button
|
| 59 |
else:
|
| 60 |
st.write("Data does not contain 'name', 'context', and 'text' fields.")
|
| 61 |
else:
|