Spaces:
Running
Running
| import gradio as gr | |
| import google.generativeai as genai | |
| import os | |
| import time | |
| import json | |
| from datetime import datetime | |
| from typing import List, Tuple, Optional, Dict, Any | |
| import asyncio | |
| import httpx | |
| # --- 1. Конфигурация API и Моделей --- | |
| GOOGLE_API_KEY = os.environ.get('GOOGLE_API_KEY') | |
| if not GOOGLE_API_KEY: | |
| raise gr.Error("Переменная окружения GOOGLE_API_KEY не установлена. Пожалуйста, добавьте её как секрет в Hugging Face Spaces.") | |
| try: | |
| genai.configure(api_key=GOOGLE_API_KEY) | |
| except Exception as e: | |
| raise gr.Error(f"Ошибка при настройке Google Gemini API или перечислении моделей: {e}. Проверьте ваш GOOGLE_API_KEY.") | |
| def get_available_models() -> List[str]: | |
| available_models = [] | |
| try: | |
| for m in genai.list_models(): | |
| if 'generateContent' in m.supported_generation_methods and 'vision' not in m.name.lower() and 'tts' not in m.name.lower() and 'audio' not in m.name.lower(): | |
| available_models.append(m.name) | |
| except Exception as e: | |
| print(f"Предупреждение: Не удалось получить список моделей: {e}. Попытка продолжить без полного списка.") | |
| return sorted(list(set(available_models))) | |
| AVAILABLE_MODELS = get_available_models() | |
| if not AVAILABLE_MODELS: | |
| raise gr.Error("Не найдено моделей, совместимых с 'generateContent'. Проверьте ваш ключ API и доступность моделей.") | |
| # --- 2. Управление Историей Чатов --- | |
| CHATS_DIR = "saved_chats" | |
| CHATS_FILE_PATH = os.path.join(CHATS_DIR, "chats.jsonl") | |
| os.makedirs(CHATS_DIR, exist_ok=True) | |
| def save_chat_history(current_chat_history: List[List[Optional[str]]]) -> str: | |
| if not current_chat_history: return "Нет истории чата для сохранения." | |
| chat_id = datetime.now().strftime("%Y%m%d_%H%M%S") | |
| timestamp = datetime.now().isoformat() | |
| filtered_history = [[u, b] for u, b in current_chat_history if u is not None and b is not None and isinstance(b, str)] | |
| if not filtered_history: return "Чат пуст. Сохранение не выполнено." | |
| chat_data = {"id": chat_id, "timestamp": timestamp, "history": filtered_history} | |
| try: | |
| with open(CHATS_FILE_PATH, "a", encoding="utf-8") as f: | |
| f.write(json.dumps(chat_data, ensure_ascii=False) + "\n") | |
| return f"Чат '{chat_id}' успешно сохранен." | |
| except Exception as e: | |
| return f"Ошибка при сохранении чата: {e}" | |
| def load_all_chat_summaries() -> List[Tuple[str, str]]: | |
| summaries = [] | |
| if not os.path.exists(CHATS_FILE_PATH): | |
| return summaries | |
| try: | |
| with open(CHATS_FILE_PATH, "r", encoding="utf-8") as f: | |
| for line in f: | |
| try: | |
| chat_data = json.loads(line) | |
| chat_id = chat_data.get("id", "ID_Неизвестен") | |
| timestamp_str = chat_data.get("timestamp", "") | |
| try: | |
| dt_obj = datetime.fromisoformat(timestamp_str) | |
| formatted_time = dt_obj.strftime('%Y-%m-%d %H:%M') | |
| except: | |
| formatted_time = "Дата_Неизвестна" | |
| first_message = chat_data.get("history", [["(пустой чат)"]])[0][0] | |
| summary = f"{formatted_time} - {first_message[:40]}..." | |
| summaries.append((summary, chat_id)) | |
| except (json.JSONDecodeError, IndexError): | |
| continue | |
| except FileNotFoundError: pass | |
| return sorted(summaries, key=lambda x: x[0], reverse=True) | |
| def load_specific_chat(chat_id_from_dropdown: str) -> List[List[Optional[str]]]: | |
| if not chat_id_from_dropdown: | |
| return [] | |
| try: | |
| with open(CHATS_FILE_PATH, "r", encoding="utf-8") as f: | |
| for line in f: | |
| try: | |
| chat_data = json.loads(line) | |
| if chat_data.get("id") == chat_id_from_dropdown: | |
| return chat_data.get("history", []) | |
| except json.JSONDecodeError: continue | |
| except FileNotFoundError: pass | |
| return [] | |
| # --- 3. Логика генерации --- | |
| # ИСПРАВЛЕНИЕ: Эта функция была случайно удалена и теперь возвращена на место. | |
| # Она также улучшена, чтобы не отправлять в API сообщения с таймером. | |
| def format_history_for_gemini(history: List[List[Optional[str]]]) -> List[Dict[str, Any]]: | |
| gemini_history = [] | |
| for turn in history: | |
| if isinstance(turn, (list, tuple)) and len(turn) >= 2: | |
| user_message, bot_message = turn[0], turn[1] | |
| if user_message: gemini_history.append({'role': 'user', 'parts': [{'text': str(user_message)}]}) | |
| # Отправляем сообщение бота в историю только если это реальный ответ (строка), | |
| # а не промежуточный None или текст таймера. | |
| if bot_message and isinstance(bot_message, str) and not bot_message.startswith("⏳"): | |
| gemini_history.append({'role': 'model', 'parts': [{'text': str(bot_message)}]}) | |
| return gemini_history | |
| def format_variants_html(variants: List[str]) -> str: | |
| if not variants: return "" | |
| html_outputs = [] | |
| for i, variant_text in enumerate(variants): | |
| js_safe_text = variant_text.replace('`', '\\`').replace('\n', '\\n').replace("'", "\\'") | |
| copy_button_html = f"""<button onclick="navigator.clipboard.writeText(`{js_safe_text}`)" class="copy-button" title="Копировать текст"><svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"><rect x="9" y="9" width="13" height="13" rx="2" ry="2"></rect><path d="M5 15H4a2 2 0 0 1-2-2V4a2 2 0 0 1 2-2h9a2 2 0 0 1 2 2v1"></path></svg></button>""" | |
| if "<strong>Ошибка" in variant_text or "<strong>Время ожидания истекло" in variant_text: | |
| html_outputs.append(f'<div class="variant-container error-message">{variant_text}</div>') | |
| else: | |
| header = f"<strong>Вариант {i + 1}</strong>" if len(variants) > 1 else "" | |
| html_outputs.append(f'<div class="variant-container"><div class="variant-header">{header}{copy_button_html}</div><div class="variant-text">{variant_text}</div></div>') | |
| return "".join(html_outputs) | |
| async def generate_single_variant_async(history: List, model_name: str, temperature: float): | |
| try: | |
| model = genai.GenerativeModel(model_name=model_name) | |
| chat = model.start_chat(history=history[:-1]) # История без последнего сообщения пользователя | |
| response = await chat.send_message_async(history[-1]['parts'][0]['text'], generation_config=genai.types.GenerationConfig(temperature=temperature)) | |
| return response.text.strip() | |
| except Exception as e: | |
| return f"<strong>Ошибка генерации:</strong><br>{e}" | |
| # ИСПРАВЛЕНИЕ: Полностью замененная функция respond с таймером обратного отсчета. | |
| async def respond(history, model_name, temperature, num_variants): | |
| # 1. Защита от ошибок. Оставляем ТОЛЬКО return. | |
| if not history or not history[-1][0]: | |
| return | |
| # 2. Никакого промежуточного yield. Никакого "..." или таймера. | |
| # 3. Основная логика генерации ответа (остается без изменений) | |
| api_history = format_history_for_gemini(history) | |
| try: | |
| tasks = [generate_single_variant_async(api_history, model_name, temperature) for _ in range(int(num_variants))] | |
| results = await asyncio.gather(*tasks, return_exceptions=True) | |
| processed_results = [] | |
| for res in results: | |
| if isinstance(res, Exception): | |
| processed_results.append(f"<strong>Ошибка генерации варианта:</strong><br>{res}") | |
| else: | |
| processed_results.append(res) | |
| final_html = format_variants_html(processed_results) | |
| history[-1][1] = final_html | |
| except Exception as e: | |
| history[-1][1] = f"<strong>Произошла непредвиденная ошибка:</strong><br>{e}" | |
| # 4. Отправляем в интерфейс только финальный результат | |
| yield history | |
| # --- 4. Вспомогательные функции для UI --- | |
| def add_user_message_to_history(message: str, history: List[List[Optional[str]]]): | |
| if not message.strip(): | |
| return "", history | |
| return "", history + [[message, None]] | |
| def regenerate_last_response(history: List[List[Optional[str]]]): | |
| if history and history[-1][1] is not None: | |
| history[-1][1] = None | |
| return history | |
| # --- 5. CSS --- | |
| custom_css = """ | |
| :root { --primary-color: #3B82F6; --primary-color-hover: #60A5FA; --secondary-color: #9CA3AF; --secondary-color-hover: #E5E7EB; --danger-color: #F87171; --danger-color-hover: #EF4444; --app-bg-color: #111827; --input-bg-color: #1F2937; --border-color: #4B5563; --text-color-primary: #F3F4F6; --text-color-secondary: #9CA3AF; --label-color: #E5E7EB; } | |
| .gradio-container { background-color: var(--app-bg-color) !important; color: var(--text-color-primary) !important; } | |
| h1, .gr-markdown p { color: var(--text-color-primary) !important; text-align: center; } | |
| .custom-button button { border-radius: 8px !important; font-weight: 600 !important; transition: all 0.2s ease-in-out !important; padding: 10px !important; } | |
| .submit-button button { background: var(--primary-color) !important; color: white !important; border: 1px solid var(--primary-color) !important; } | |
| .submit-button button:hover { background: var(--primary-color-hover) !important; border-color: var(--primary-color-hover) !important; } | |
| .secondary-button button, .danger-button button { background: transparent !important; border: 1px solid var(--border-color) !important; } | |
| .secondary-button button { color: var(--secondary-color) !important; } | |
| .danger-button button { color: var(--danger-color) !important; } | |
| .secondary-button button:hover { background: var(--secondary-color) !important; border-color: var(--secondary-color) !important; color: var(--app-bg-color) !important; } | |
| .danger-button button:hover { background: var(--danger-color) !important; border-color: var(--danger-color) !important; color: white !important; } | |
| .input-container, .input-container .wrap { background-color: var(--input-bg-color) !important; border: 1px solid var(--border-color) !important; border-radius: 8px !important; } | |
| .input-container:focus-within, .input-container .wrap:focus-within { border-color: var(--primary-color) !important; box-shadow: 0 0 0 3px rgba(59, 130, 246, 0.2) !important; } | |
| .gradio-container .input-container .options { background-color: var(--input-bg-color) !important; border: 1px solid var(--border-color) !important; } | |
| .gradio-container .input-container .option-item:hover, .gradio-container .input-container .option-item.selected { background-color: var(--primary-color) !important; color: white !important; } | |
| .gradio-container .gradio-slider > input[type=range] { background-color: var(--primary-color) !important; } | |
| .gradio-container label, .gradio-container .gr-info { font-weight: 600 !important; color: var(--label-color) !important; } | |
| .variant-container { background: var(--input-bg-color); border: 1px solid var(--border-color); border-radius: 8px; margin-bottom: 10px; padding: 15px; } | |
| .variant-header { display: flex; justify-content: space-between; align-items: center; margin-bottom: 10px; color: var(--label-color); } | |
| .copy-button { background: transparent; border: 1px solid var(--border-color); border-radius: 5px; cursor: pointer; padding: 5px; } | |
| .copy-button:hover { background: #374151; } | |
| .copy-button svg { stroke: var(--secondary-color); } | |
| .error-message { background-color: #450A0A; color: #F87171; border: 1px solid #7F1D1D; } | |
| #chatbot_window { | |
| overflow-y: auto !important; | |
| } | |
| """ | |
| # --- 6. Создание интерфейса Gradio --- | |
| with gr.Blocks(theme=gr.themes.Soft(), css=custom_css) as demo: | |
| gr.Markdown("# Чат-Бот Gemini/Gemma AI") | |
| with gr.Row(): | |
| # --- Левая колонка: Чат и Ввод --- | |
| with gr.Column(scale=3): | |
| chatbot = gr.Chatbot(height=500, label="Чат", elem_classes="input-container", elem_id="chatbot_window") | |
| msg = gr.Textbox( | |
| label="Введи свой запрос:", | |
| placeholder="Который нынче час?", | |
| elem_classes="input-container", | |
| lines=2, # <-- ДОБАВЛЕНО: начальная высота в 2 строки | |
| max_lines=10 # <-- ДОБАВЛЕНО: максимальная высота в 6 строк | |
| ) | |
| # --- Правая колонка: Настройки и Управление --- | |
| with gr.Column(scale=1, min_width=250): | |
| model_selector = gr.Dropdown(choices=AVAILABLE_MODELS, value=AVAILABLE_MODELS[0] if AVAILABLE_MODELS else "", label="Выбирай Модель", interactive=True, elem_classes="input-container") | |
| temperature_slider = gr.Slider(minimum=0.0, maximum=1.0, step=0.1, value=0.9, label="Температура") | |
| num_variants_slider = gr.Slider(minimum=1, maximum=5, step=1, value=1, label="Количество Вариантов Ответа") | |
| gr.Markdown("---") # Визуальный разделитель | |
| send_btn = gr.Button("Спросить (Shift+Enter)", variant="primary", elem_classes=["custom-button", "submit-button"]) | |
| regenerate_btn = gr.Button("🔄 Переспросить", variant="secondary", elem_classes=["custom-button", "secondary-button"]) | |
| clear_current_chat_btn = gr.Button("🗑️ Сбросить", variant="stop", elem_classes=["custom-button", "danger-button"]) | |
| with gr.Accordion("Сохран и Загруз Чатов", open=False): | |
| save_chat_btn = gr.Button("Сохранить Текущий", elem_classes=["custom-button", "secondary-button"]) | |
| new_chat_btn = gr.Button("Новый Чат (сохранить и сбросить)", elem_classes=["custom-button", "secondary-button"]) | |
| saved_chat_selector = gr.Dropdown(label="Загрузить Сохранённый", interactive=True, elem_classes="input-container", choices=load_all_chat_summaries()) | |
| with gr.Row(): | |
| load_chat_btn = gr.Button("Загрузить", elem_classes=["custom-button", "secondary-button"]) | |
| refresh_chats_btn = gr.Button("🔄", elem_classes=["custom-button", "secondary-button"], scale=0) | |
| system_message_display = gr.Textbox(interactive=False, visible=True, label="Статус", placeholder="Сообщения о сохране...") | |
| # --- 7. Логика обработчиков событий --- | |
| generation_inputs = [chatbot, model_selector, temperature_slider, num_variants_slider] | |
| msg.submit(add_user_message_to_history, [msg, chatbot], [msg, chatbot]).then(respond, generation_inputs, chatbot) | |
| send_btn.click(add_user_message_to_history, [msg, chatbot], [msg, chatbot]).then(respond, generation_inputs, chatbot) | |
| regenerate_btn.click(regenerate_last_response, [chatbot], [chatbot]).then(respond, generation_inputs, chatbot) | |
| clear_current_chat_btn.click(lambda: ["", []], outputs=[msg, chatbot], queue=False) | |
| def save_and_refresh(chat_history): | |
| msg = save_chat_history(chat_history) | |
| new_choices = load_all_chat_summaries() | |
| return msg, gr.update(choices=new_choices) | |
| def save_and_clear_and_refresh(chat_history): | |
| msg = save_chat_history(chat_history) | |
| new_choices = load_all_chat_summaries() | |
| return msg, "", [], gr.update(choices=new_choices) | |
| save_chat_btn.click(save_and_refresh, [chatbot], [system_message_display, saved_chat_selector]) | |
| new_chat_btn.click(save_and_clear_and_refresh, [chatbot], [system_message_display, msg, chatbot, saved_chat_selector]) | |
| load_chat_btn.click(load_specific_chat, [saved_chat_selector], [chatbot]).then(lambda: ["", "Чат загружен."], outputs=[msg, system_message_display]) | |
| refresh_chats_btn.click(lambda: gr.update(choices=load_all_chat_summaries()), outputs=[saved_chat_selector]) | |
| demo.load(lambda: gr.update(choices=load_all_chat_summaries()), outputs=[saved_chat_selector], queue=False) | |
| demo.queue().launch() | |