Spaces:
Running
Running
| import gradio as gr | |
| from llama_index.core import Settings | |
| from documents_prep import load_all_documents | |
| from index_retriever import create_vector_index, create_query_engine | |
| from utils import get_llm_model, get_embedding_model, get_reranker_model, answer_question | |
| from my_logging import log_message | |
| from config import * | |
| # Global state | |
| query_engine = None | |
| reranker = None | |
| def initialize_system(): | |
| """Initialize RAG system""" | |
| global query_engine, reranker | |
| log_message("="*60) | |
| log_message("INITIALIZING SYSTEM") | |
| log_message("="*60) | |
| # Setup models | |
| llm = get_llm_model(GOOGLE_API_KEY) | |
| embed_model = get_embedding_model() | |
| reranker = get_reranker_model() | |
| Settings.llm = llm | |
| Settings.embed_model = embed_model | |
| log_message("✓ Models loaded") | |
| # Load documents | |
| documents = load_all_documents( | |
| repo_id=HF_REPO_ID, | |
| hf_token=HF_TOKEN, | |
| json_dir=JSON_FILES_DIR, | |
| table_dir=TABLE_DATA_DIR, | |
| image_dir=IMAGE_DATA_DIR | |
| ) | |
| # Create index | |
| vector_index = create_vector_index(documents) | |
| query_engine = create_query_engine(vector_index) | |
| log_message("="*60) | |
| log_message("SYSTEM READY") | |
| log_message("="*60) | |
| return "✅ System initialized" | |
| def ask_question(question): | |
| """Handle question from UI""" | |
| if not question.strip(): | |
| return "Пожалуйста, введите вопрос", "" | |
| if query_engine is None: | |
| return "❌ Система не инициализирована", "" | |
| answer, sources = answer_question(question, query_engine, reranker) | |
| return answer, sources | |
| def create_interface(): | |
| """Create Gradio UI""" | |
| # Auto-initialize system before UI starts | |
| status_msg = initialize_system() | |
| with gr.Blocks(title="AIEXP - RAG System", theme=gr.themes.Soft()) as demo: | |
| gr.Markdown(""" | |
| # AIEXP - AI Expert для нормативной документации | |
| ## Упрощенная версия RAG системы | |
| """) | |
| gr.Markdown("### Задайте вопрос") | |
| with gr.Row(): | |
| question = gr.Textbox( | |
| label="Ваш вопрос", | |
| placeholder="Введите вопрос...", | |
| lines=3 | |
| ) | |
| ask_btn = gr.Button("Найти ответ", variant="primary") | |
| gr.Examples( | |
| examples=[ | |
| "О чем таблица А.12 в ГОСТ Р 59023.4-2020?", | |
| "Какая температура подогрева для стали 20 толщиной до 100 мм?", | |
| "Что показано на рисунке Л.2 в ГОСТ Р 50.04.07-2022?" | |
| ], | |
| inputs=question | |
| ) | |
| with gr.Row(): | |
| answer = gr.Textbox( | |
| label="Ответ", | |
| lines=10 | |
| ) | |
| sources = gr.Textbox( | |
| label="Источники", | |
| lines=10 | |
| ) | |
| # Event handlers | |
| ask_btn.click( | |
| fn=ask_question, | |
| inputs=question, | |
| outputs=[answer, sources] | |
| ) | |
| question.submit( | |
| fn=ask_question, | |
| inputs=question, | |
| outputs=[answer, sources] | |
| ) | |
| return demo | |
| if __name__ == "__main__": | |
| demo = create_interface() | |
| demo.launch( | |
| server_name="0.0.0.0", | |
| server_port=7860, | |
| share=True | |
| ) |