Update app.py
Browse files
app.py
CHANGED
|
@@ -1,28 +1,45 @@
|
|
| 1 |
-
import
|
| 2 |
-
import streamlit as st
|
| 3 |
import json
|
|
|
|
| 4 |
from streamlit_option_menu import option_menu
|
| 5 |
from gemini_utility import (load_gemini_pro, gemini_pro_vision_responce)
|
| 6 |
from PIL import Image
|
| 7 |
|
| 8 |
-
#
|
| 9 |
st.set_page_config(
|
| 10 |
page_title="GnosticDev AI",
|
| 11 |
page_icon="馃",
|
| 12 |
-
layout="
|
| 13 |
initial_sidebar_state="expanded",
|
| 14 |
)
|
| 15 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 16 |
# Funci贸n para guardar el historial en cookies
|
| 17 |
def save_chat_history(history):
|
| 18 |
-
# Convertir el historial a un formato serializable
|
| 19 |
serializable_history = []
|
| 20 |
for message in history:
|
| 21 |
serializable_history.append({
|
| 22 |
"role": message.role,
|
| 23 |
"text": message.parts[0].text
|
| 24 |
})
|
| 25 |
-
# Guardar en cookie
|
| 26 |
st.session_state.cookie_chat_history = json.dumps(serializable_history)
|
| 27 |
|
| 28 |
# Funci贸n para cargar el historial desde cookies
|
|
@@ -32,7 +49,6 @@ def load_chat_history():
|
|
| 32 |
history = json.loads(st.session_state.cookie_chat_history)
|
| 33 |
model = load_gemini_pro()
|
| 34 |
chat = model.start_chat(history=[])
|
| 35 |
-
# Reconstruir el historial
|
| 36 |
if st.session_state.system_prompt:
|
| 37 |
chat.send_message(st.session_state.system_prompt)
|
| 38 |
for message in history:
|
|
@@ -50,32 +66,15 @@ def download_chat_history(history):
|
|
| 50 |
chat_text += f"{message.role}: {message.parts[0].text}\n"
|
| 51 |
return chat_text
|
| 52 |
|
| 53 |
-
#
|
| 54 |
-
|
| 55 |
-
|
| 56 |
-
|
| 57 |
-
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
|
| 61 |
-
|
| 62 |
-
icons=['gear', 'chat-dots-fill', 'image-fill'],
|
| 63 |
-
default_index=0
|
| 64 |
-
)
|
| 65 |
-
|
| 66 |
-
# Bot贸n para borrar historial
|
| 67 |
-
if st.button("Borrar Historial"):
|
| 68 |
-
if 'cookie_chat_history' in st.session_state:
|
| 69 |
-
del st.session_state.cookie_chat_history
|
| 70 |
-
if 'chat_session' in st.session_state:
|
| 71 |
-
del st.session_state.chat_session
|
| 72 |
-
st.success("Historial borrado!")
|
| 73 |
-
|
| 74 |
-
def translate_role_to_streamlit(user_role):
|
| 75 |
-
if user_role == "model":
|
| 76 |
-
return "assistant"
|
| 77 |
-
else:
|
| 78 |
-
return user_role
|
| 79 |
|
| 80 |
if selected == "System Prompt":
|
| 81 |
st.title("Configuraci贸n del System Prompt")
|
|
@@ -87,16 +86,28 @@ if selected == "System Prompt":
|
|
| 87 |
help="Escribe aqu铆 las instrucciones que definir谩n el comportamiento del AI"
|
| 88 |
)
|
| 89 |
|
| 90 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 91 |
st.session_state.system_prompt = new_system_prompt
|
| 92 |
-
st.session_state.
|
| 93 |
if "chat_session" in st.session_state:
|
| 94 |
del st.session_state.chat_session
|
| 95 |
-
st.success("System Prompt
|
| 96 |
|
| 97 |
if st.session_state.system_prompt:
|
| 98 |
st.markdown("### System Prompt Actual:")
|
| 99 |
st.info(st.session_state.system_prompt)
|
|
|
|
|
|
|
|
|
|
|
|
|
| 100 |
|
| 101 |
elif selected == "Chatbot":
|
| 102 |
model = load_gemini_pro()
|
|
@@ -122,10 +133,28 @@ elif selected == "Chatbot":
|
|
| 122 |
with st.chat_message(translate_role_to_streamlit(message.role)):
|
| 123 |
st.markdown(message.parts[0].text)
|
| 124 |
|
| 125 |
-
|
| 126 |
user_prompt = st.chat_input("Preguntame algo...")
|
| 127 |
if user_prompt:
|
| 128 |
st.chat_message("user").markdown(user_prompt)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 129 |
gemini_response = st.session_state.chat_session.send_message(user_prompt)
|
| 130 |
with st.chat_message("assistant"):
|
| 131 |
st.markdown(gemini_response.text)
|
|
@@ -155,5 +184,4 @@ elif selected == "Image Captioning":
|
|
| 155 |
default_prompt = "Write a caption for this image"
|
| 156 |
caption = gemini_pro_vision_responce(default_prompt, image)
|
| 157 |
with col2:
|
| 158 |
-
st.info(caption)
|
| 159 |
-
|
|
|
|
| 1 |
+
import streamlit as st
|
|
|
|
| 2 |
import json
|
| 3 |
+
import requests
|
| 4 |
from streamlit_option_menu import option_menu
|
| 5 |
from gemini_utility import (load_gemini_pro, gemini_pro_vision_responce)
|
| 6 |
from PIL import Image
|
| 7 |
|
| 8 |
+
# Configuraci贸n de la p谩gina
|
| 9 |
st.set_page_config(
|
| 10 |
page_title="GnosticDev AI",
|
| 11 |
page_icon="馃",
|
| 12 |
+
layout="wide", # Aseg煤rate de que el layout sea 'wide'
|
| 13 |
initial_sidebar_state="expanded",
|
| 14 |
)
|
| 15 |
|
| 16 |
+
# Men煤 de opciones en el lateral izquierdo
|
| 17 |
+
selected = option_menu(
|
| 18 |
+
menu_title="Men煤",
|
| 19 |
+
options=["System Prompt", "Chatbot", "Image Captioning"],
|
| 20 |
+
icons=["gear", "chat", "camera"],
|
| 21 |
+
default_index=0,
|
| 22 |
+
orientation="vertical" # Aseg煤rate de que est茅 en vertical
|
| 23 |
+
)
|
| 24 |
+
|
| 25 |
+
# Inicializar el estado de la sesi贸n
|
| 26 |
+
if 'cookie_chat_history' not in st.session_state:
|
| 27 |
+
st.session_state.cookie_chat_history = json.dumps([])
|
| 28 |
+
|
| 29 |
+
if 'cookie_urls' not in st.session_state:
|
| 30 |
+
st.session_state.cookie_urls = []
|
| 31 |
+
|
| 32 |
+
if 'system_prompt' not in st.session_state:
|
| 33 |
+
st.session_state.system_prompt = ""
|
| 34 |
+
|
| 35 |
# Funci贸n para guardar el historial en cookies
|
| 36 |
def save_chat_history(history):
|
|
|
|
| 37 |
serializable_history = []
|
| 38 |
for message in history:
|
| 39 |
serializable_history.append({
|
| 40 |
"role": message.role,
|
| 41 |
"text": message.parts[0].text
|
| 42 |
})
|
|
|
|
| 43 |
st.session_state.cookie_chat_history = json.dumps(serializable_history)
|
| 44 |
|
| 45 |
# Funci贸n para cargar el historial desde cookies
|
|
|
|
| 49 |
history = json.loads(st.session_state.cookie_chat_history)
|
| 50 |
model = load_gemini_pro()
|
| 51 |
chat = model.start_chat(history=[])
|
|
|
|
| 52 |
if st.session_state.system_prompt:
|
| 53 |
chat.send_message(st.session_state.system_prompt)
|
| 54 |
for message in history:
|
|
|
|
| 66 |
chat_text += f"{message.role}: {message.parts[0].text}\n"
|
| 67 |
return chat_text
|
| 68 |
|
| 69 |
+
# Funci贸n para obtener contenido de URLs
|
| 70 |
+
def fetch_url_content(url):
|
| 71 |
+
try:
|
| 72 |
+
response = requests.get(url)
|
| 73 |
+
response.raise_for_status() # Lanza un error si la respuesta es un c贸digo de error
|
| 74 |
+
return response.text # Devuelve el contenido de la p谩gina
|
| 75 |
+
except requests.RequestException as e:
|
| 76 |
+
st.error(f"Error al acceder a {url}: {e}")
|
| 77 |
+
return None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 78 |
|
| 79 |
if selected == "System Prompt":
|
| 80 |
st.title("Configuraci贸n del System Prompt")
|
|
|
|
| 86 |
help="Escribe aqu铆 las instrucciones que definir谩n el comportamiento del AI"
|
| 87 |
)
|
| 88 |
|
| 89 |
+
# Campo para ingresar URLs
|
| 90 |
+
urls_input = st.text_area(
|
| 91 |
+
"Ingresa URLs de informaci贸n y documentos (separadas por comas)",
|
| 92 |
+
value=", ".join(st.session_state.cookie_urls),
|
| 93 |
+
height=100,
|
| 94 |
+
help="Escribe aqu铆 las URLs que el AI puede usar como referencia, separadas por comas."
|
| 95 |
+
)
|
| 96 |
+
|
| 97 |
+
if st.button("Guardar System Prompt y URLs"):
|
| 98 |
st.session_state.system_prompt = new_system_prompt
|
| 99 |
+
st.session_state.cookie_urls = [url.strip() for url in urls_input.split(",") if url.strip()] # Guardar las URLs en una lista
|
| 100 |
if "chat_session" in st.session_state:
|
| 101 |
del st.session_state.chat_session
|
| 102 |
+
st.success("System Prompt y URLs actualizados con 茅xito!")
|
| 103 |
|
| 104 |
if st.session_state.system_prompt:
|
| 105 |
st.markdown("### System Prompt Actual:")
|
| 106 |
st.info(st.session_state.system_prompt)
|
| 107 |
+
|
| 108 |
+
if st.session_state.cookie_urls:
|
| 109 |
+
st.markdown("### URLs Guardadas:")
|
| 110 |
+
st.info(", ".join(st.session_state.cookie_urls))
|
| 111 |
|
| 112 |
elif selected == "Chatbot":
|
| 113 |
model = load_gemini_pro()
|
|
|
|
| 133 |
with st.chat_message(translate_role_to_streamlit(message.role)):
|
| 134 |
st.markdown(message.parts[0].text)
|
| 135 |
|
| 136 |
+
# Campo de entrada
|
| 137 |
user_prompt = st.chat_input("Preguntame algo...")
|
| 138 |
if user_prompt:
|
| 139 |
st.chat_message("user").markdown(user_prompt)
|
| 140 |
+
|
| 141 |
+
# Obtener las URLs guardadas
|
| 142 |
+
urls = st.session_state.get('cookie_urls', [])
|
| 143 |
+
fetched_contents = []
|
| 144 |
+
|
| 145 |
+
if urls:
|
| 146 |
+
# L贸gica para consultar las URLs y obtener informaci贸n
|
| 147 |
+
for url in urls:
|
| 148 |
+
content = fetch_url_content(url)
|
| 149 |
+
if content:
|
| 150 |
+
fetched_contents.append(content)
|
| 151 |
+
|
| 152 |
+
# Aqu铆 puedes procesar el contenido obtenido de las URLs
|
| 153 |
+
# Por ejemplo, podr铆as resumirlo o extraer informaci贸n relevante
|
| 154 |
+
combined_content = "\n\n".join(fetched_contents)
|
| 155 |
+
user_prompt += f"\n\nInformaci贸n adicional de URLs:\n{combined_content}"
|
| 156 |
+
|
| 157 |
+
# Enviar el mensaje del usuario al modelo
|
| 158 |
gemini_response = st.session_state.chat_session.send_message(user_prompt)
|
| 159 |
with st.chat_message("assistant"):
|
| 160 |
st.markdown(gemini_response.text)
|
|
|
|
| 184 |
default_prompt = "Write a caption for this image"
|
| 185 |
caption = gemini_pro_vision_responce(default_prompt, image)
|
| 186 |
with col2:
|
| 187 |
+
st.info(caption)
|
|
|