| import os |
| import streamlit as st |
| import json |
| import requests |
| import re |
| from bs4 import BeautifulSoup |
| from streamlit_option_menu import option_menu |
| from gemini_utility import (load_gemini_pro, gemini_pro_vision_responce) |
| from PIL import Image |
|
|
| |
| st.set_page_config( |
| page_title="GnosticDev AI", |
| page_icon="馃", |
| layout="centered", |
| initial_sidebar_state="expanded", |
| ) |
|
|
| CONFIG_FILE = "config.json" |
|
|
| |
| def load_config(): |
| if os.path.exists(CONFIG_FILE): |
| try: |
| with open(CONFIG_FILE, "r") as file: |
| return json.load(file) |
| except Exception as e: |
| st.error(f"Error cargando configuraciones: {e}") |
| return {} |
|
|
| |
| def save_config(config): |
| try: |
| with open(CONFIG_FILE, "w") as file: |
| json.dump(config, file, indent=4) |
| except Exception as e: |
| st.error(f"Error guardando configuraciones: {e}") |
|
|
| |
| config = load_config() |
| if "system_prompt" not in config: |
| config["system_prompt"] = "" |
|
|
| |
| if "system_prompt" not in st.session_state: |
| st.session_state.system_prompt = config["system_prompt"] |
|
|
| def save_chat_history(history): |
| serializable_history = [] |
| for message in history: |
| serializable_history.append({ |
| "role": message.role, |
| "text": message.parts[0].text |
| }) |
| st.session_state.cookie_chat_history = json.dumps(serializable_history) |
|
|
| def load_chat_history(): |
| if 'cookie_chat_history' in st.session_state: |
| try: |
| history = json.loads(st.session_state.cookie_chat_history) |
| model = load_gemini_pro() |
| chat = model.start_chat(history=[]) |
| if st.session_state.system_prompt: |
| chat.send_message(st.session_state.system_prompt) |
| for message in history: |
| if message["role"] != "model" or not message["text"].startswith(st.session_state.system_prompt): |
| chat.send_message(message["text"]) |
| return chat |
| except Exception as e: |
| st.error(f"Error cargando el historial: {e}") |
| return None |
|
|
| with st.sidebar: |
| selected = option_menu( |
| "GD AI", |
| ["System Prompt", "Chatbot", "Image Captioning"], |
| menu_icon="robot", |
| icons=['gear', 'chat-dots-fill', 'image-fill'], |
| default_index=0 |
| ) |
|
|
| if st.button("Borrar Historial"): |
| if 'cookie_chat_history' in st.session_state: |
| del st.session_state.cookie_chat_history |
| if 'chat_session' in st.session_state: |
| del st.session_state.chat_session |
| st.success("Historial borrado!") |
|
|
| def translate_role_to_streamlit(user_role): |
| if user_role == "model": |
| return "assistant" |
| else: |
| return user_role |
|
|
| def extract_urls(text): |
| url_pattern = r"(https?://\S+)" |
| urls = re.findall(url_pattern, text) |
| return urls |
|
|
| def fetch_url_content(url): |
| try: |
| response = requests.get(url, timeout=10) |
| response.raise_for_status() |
| return response.text |
| except requests.exceptions.RequestException as e: |
| return f"Error al acceder a la URL '{url}': {e}" |
|
|
| def process_url_content(content): |
| try: |
| soup = BeautifulSoup(content, "html.parser") |
| text = soup.get_text(" ", strip=True) |
| return text |
| except Exception as e: |
| return f"Error al procesar el contenido HTML: {e}" |
|
|
| def process_urls_in_prompt(prompt): |
| urls = extract_urls(prompt) |
| new_prompt = prompt |
| for url in urls: |
| content = fetch_url_content(url) |
| if content.startswith("Error"): |
| new_prompt = new_prompt.replace(url, content) |
| else: |
| processed_content = process_url_content(content) |
| new_prompt = new_prompt.replace(url, processed_content) |
| return new_prompt |
|
|
| if selected == "System Prompt": |
| st.title("Configuraci贸n del System Prompt") |
| new_system_prompt = st.text_area( |
| "Ingresa las instrucciones para el AI (System Prompt), incluyendo URLs", |
| value=st.session_state.system_prompt, |
| height=300, |
| help="Escribe aqu铆 las instrucciones que definir谩n el comportamiento del AI. Puedes incluir URLs." |
| ) |
|
|
| if st.button("Guardar System Prompt"): |
| processed_prompt = process_urls_in_prompt(new_system_prompt) |
| st.session_state.system_prompt = processed_prompt |
| config["system_prompt"] = processed_prompt |
| save_config(config) |
| if "chat_session" in st.session_state: |
| del st.session_state.chat_session |
| st.success("System Prompt actualizado con 茅xito!") |
|
|
| |
|
|
| elif selected == "Chatbot": |
| model = load_gemini_pro() |
| if "chat_session" not in st.session_state: |
| loaded_chat = load_chat_history() |
| if loaded_chat: |
| st.session_state.chat_session = loaded_chat |
| else: |
| |
| st.session_state.chat_session = model.start_chat(history=[]) |
|
|
| st.title("Gnosticdev Chatbot") |
|
|
| |
| for message in st.session_state.chat_session.history: |
| with st.chat_message(translate_role_to_streamlit(message.role)): |
| st.markdown(message.parts[0].text) |
|
|
| |
| user_prompt = st.chat_input("Preg煤ntame algo...") |
| if user_prompt: |
| processed_user_prompt = process_urls_in_prompt(user_prompt) |
| st.chat_message("user").markdown(processed_user_prompt) |
| gemini_response = st.session_state.chat_session.send_message(processed_user_prompt) |
| with st.chat_message("assistant"): |
| st.markdown(gemini_response.text) |
| save_chat_history(st.session_state.chat_session.history) |
|
|
|
|
| elif selected == "Image Captioning": |
| st.title("Image Caption Generation馃摳") |
| upload_image = st.file_uploader("Sube una imagen...", type=["jpg", "jpeg", "png"]) |
| |
| if upload_image and st.button("Generar"): |
| try: |
| |
| image = Image.open(upload_image) |
| col1, col2 = st.columns(2) |
| with col1: |
| st.image(image, caption="Imagen subida", use_column_width=True) |
|
|
| |
| default_prompt = "Escribe un subt铆tulo para esta imagen" |
| caption = gemini_pro_vision_responce(default_prompt, image) |
| |
| |
| with col2: |
| st.info(caption) |
| except Exception as e: |
| st.error(f"Error procesando la imagen: {e}") |
|
|
| |
| else: |
| st.write("Selecciona una opci贸n en el men煤 para comenzar.") |
|
|
|
|
|
|