| | import os |
| | import streamlit as st |
| | import json |
| | import requests |
| | import re |
| | from bs4 import BeautifulSoup |
| | from streamlit_option_menu import option_menu |
| | from gemini_utility import (load_gemini_pro, gemini_pro_vision_responce) |
| | from PIL import Image |
| |
|
| | |
| | SYSTEM_PROMPT_FILE = "system_prompt.json" |
| |
|
| | |
| | def load_system_prompt(): |
| | if os.path.exists(SYSTEM_PROMPT_FILE): |
| | with open(SYSTEM_PROMPT_FILE, 'r') as f: |
| | return json.load(f).get("system_prompt", "") |
| | return "" |
| |
|
| | |
| | def save_system_prompt(system_prompt): |
| | with open(SYSTEM_PROMPT_FILE, 'w') as f: |
| | json.dump({"system_prompt": system_prompt}, f) |
| |
|
| | |
| | st.set_page_config( |
| | page_title="GnosticDev AI", |
| | page_icon="馃", |
| | layout="centered", |
| | initial_sidebar_state="expanded", |
| | ) |
| |
|
| | |
| | if "system_prompt" not in st.session_state: |
| | st.session_state.system_prompt = load_system_prompt() |
| |
|
| | |
| | def save_chat_history(history): |
| | serializable_history = [] |
| | for message in history: |
| | serializable_history.append({ |
| | "role": message.role, |
| | "text": message.parts[0].text |
| | }) |
| | st.session_state.cookie_chat_history = json.dumps(serializable_history) |
| |
|
| | |
| | def load_chat_history(): |
| | if 'cookie_chat_history' in st.session_state: |
| | try: |
| | history = json.loads(st.session_state.cookie_chat_history) |
| | model = load_gemini_pro() |
| | chat = model.start_chat(history=[]) |
| | |
| | for message in history: |
| | if message["role"] != "model": |
| | chat.send_message(message["text"]) |
| | return chat |
| | except Exception as e: |
| | st.error(f"Error cargando el historial: {e}") |
| | return None |
| |
|
| | with st.sidebar: |
| | selected = option_menu( |
| | "GD AI", |
| | ["System Prompt", "Chatbot", "Image Captioning"], |
| | menu_icon="robot", |
| | icons=['gear', 'chat-dots-fill', 'image-fill'], |
| | default_index=0 |
| | ) |
| |
|
| | if st.button("Borrar Historial"): |
| | if 'cookie_chat_history' in st.session_state: |
| | del st.session_state.cookie_chat_history |
| | if 'chat_session' in st.session_state: |
| | del st.session_state.chat_session |
| | st.success("Historial borrado!") |
| |
|
| | def translate_role_to_streamlit(user_role): |
| | if user_role == "model": |
| | return "assistant" |
| | else: |
| | return user_role |
| |
|
| | def extract_urls(text): |
| | url_pattern = r"(https?://\S+)" |
| | urls = re.findall(url_pattern, text) |
| | return urls |
| |
|
| | def fetch_url_content(url): |
| | try: |
| | response = requests.get(url, timeout=10) |
| | response.raise_for_status() |
| | return response.text |
| | except requests.exceptions.RequestException as e: |
| | return f"Error al acceder a la URL '{url}': {e}" |
| |
|
| | def process_url_content(content): |
| | try: |
| | soup = BeautifulSoup(content, "html.parser") |
| | |
| | text = soup.get_text(" ", strip=True) |
| | return text |
| | except Exception as e: |
| | return f"Error al procesar el contenido HTML: {e}" |
| |
|
| | def process_urls_in_prompt(prompt): |
| | urls = extract_urls(prompt) |
| | new_prompt = prompt |
| | for url in urls: |
| | content = fetch_url_content(url) |
| | if content.startswith("Error"): |
| | new_prompt = new_prompt.replace(url, content) |
| | else: |
| | processed_content = process_url_content(content) |
| | new_prompt = new_prompt.replace(url, processed_content) |
| | return new_prompt |
| |
|
| | if selected == "System Prompt": |
| | st.title("Configuraci贸n del System Prompt") |
| | new_system_prompt = st.text_area( |
| | "Ingresa las instrucciones para el AI (System Prompt), incluyendo URLs", |
| | value=st.session_state.system_prompt, |
| | height=300, |
| | help="Escribe aqu铆 las instrucciones que definir谩n el comportamiento del AI. Puedes incluir URLs." |
| | ) |
| |
|
| | if st.button("Guardar System Prompt"): |
| | processed_prompt = process_urls_in_prompt(new_system_prompt) |
| | st.session_state.system_prompt = processed_prompt |
| | save_system_prompt(processed_prompt) |
| | if "chat_session" in st.session_state: |
| | del st.session_state.chat_session |
| | st.success("System Prompt actualizado con 茅xito!") |
| |
|
| | if st.session_state.system_prompt: |
| | st.markdown("### System Prompt Actual:") |
| | st.info(st.session_state.system_prompt) |
| |
|
| | elif selected == "Chatbot": |
| | model = load_gemini_pro() |
| | if "chat_session" not in st.session_state: |
| | loaded_chat = load_chat_history() |
| | if loaded_chat: |
| | st.session_state.chat_session = loaded_chat |
| | else: |
| | |
| | st.session_state.chat_session = model.start_chat(history=[]) |
| | st.chat_message("assistant").markdown("隆Hola! 驴En qu茅 puedo ayudarte hoy?") |
| | |
| |
|
| | st.title("Gnosticdev Chatbot") |
| | user_prompt = st.chat_input("Preguntame algo...") |
| | if user_prompt: |
| | processed_user_prompt = process_urls_in_prompt(user_prompt) |
| | st.chat_message("user").markdown(processed_user_prompt) |
| | gemini_response = st.session_state.chat_session.send_message(processed_user_prompt) |
| | with st.chat_message("assistant"): |
| | st.markdown(gemini_response.text) |
| | save_chat_history(st.session_state.chat_session.history) |
| |
|
| | elif selected == "Image Captioning": |
| | st.title("Image Caption Generation馃摳") |
| | upload_image = st.file_uploader("Upload an image...", type=["jpg", "jpeg", "png"]) |
| | if upload_image and st.button("Generate"): |
| | image = Image.open(upload_image) |
| | col1, col2 = st.columns(2) |
| | with col1: |
| | st.image(image, caption="Uploaded Image", use_column_width=True) |
| | default_prompt = "Write a caption for this image" |
| | caption = gemini_pro_vision_responce(default_prompt, image) |
| | with col2: |
| | st.info(caption) |
| |
|