File size: 2,695 Bytes
0e2081a
fa696d9
 
0e2081a
fa696d9
0e2081a
 
 
fa696d9
0e2081a
 
 
 
 
fa696d9
0e2081a
 
fa696d9
 
 
 
 
 
6e0ff93
 
0e2081a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
fa696d9
 
 
 
 
0e2081a
 
fa696d9
0e2081a
6e0ff93
fa696d9
 
 
 
 
 
 
 
 
0e2081a
 
 
 
fa696d9
6e0ff93
0e2081a
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
import sys
import uuid

import gradio as gr

from database_setup import initialize_database
from chat_utils import create_rag_chain, format_sources, create_session_history_manager
from langchain_core.runnables.history import RunnableWithMessageHistory

print("Inicjalizacja bazy danych...")
baza = initialize_database()
if baza is None:
    print("Nie udało się zainicjalizować bazy danych. Zakończenie pracy.")
    sys.exit(1)

rag_chain = create_rag_chain(baza)
get_session_history = create_session_history_manager()
conversational_rag_chain = RunnableWithMessageHistory(
    rag_chain,
    get_session_history,
    input_messages_key="input",
    history_messages_key="chat_history",
    output_messages_key="answer",
)

def respond(message, chat_history, sess_id):
    """Obsługuje odpowiedź na wiadomość użytkownika."""
    try:
        response = conversational_rag_chain.invoke(
            {"input": message},
            config={"configurable": {"session_id": sess_id}},
        )
    except Exception as e:
        chat_history.append((message, f"Błąd podczas przetwarzania: {e}"))
        return chat_history

    context_docs = response.get("context", [])
    # Debug: wypisz poziom podobieństwa
    try:
        debug_scores = baza.similarity_search_with_score(message, k=len(context_docs))
        for i, (doc, score) in enumerate(debug_scores):
            print(f"Chunk {i+1}: similarity_score={score}, title={doc.metadata.get('title')}")
    except Exception:
        pass

    sources_md = format_sources(context_docs)
    answer = response.get("answer") or ""
    answer_with_sources = f"{answer}\n\n**Źródła:**\n{sources_md}"
    chat_history.append((message, answer_with_sources))
    return chat_history

with gr.Blocks(theme=gr.themes.Soft(primary_hue="blue"), title="Szuflada Chatbot") as demo:
    session_id = gr.State(lambda: str(uuid.uuid4()))

    gr.Markdown(
        "# Czat z Moją Szufladą\n"
        "### Zadaj pytanie na temat treści ze strony [mojaszuflada.pl](https://mojaszuflada.pl)"
    )
    chatbot = gr.Chatbot(label="Rozmowa", height=500)

    with gr.Row():
        msg = gr.Textbox(
            show_label=False,
            placeholder="Wpisz swoje pytanie...",
            container=False,
            scale=7,
        )
        submit_btn = gr.Button("Wyślij", variant="primary", scale=1)

    submit_btn.click(respond, [msg, chatbot, session_id], [chatbot]) \
              .then(lambda: gr.update(value=""), None, [msg], queue=False)
    msg.submit(respond, [msg, chatbot, session_id], [chatbot]) \
       .then(lambda: gr.update(value=""), None, [msg], queue=False)

if __name__ == "__main__":
    demo.launch(inbrowser=True)