File size: 3,234 Bytes
a0d7d94
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
642ca5c
 
12d6bc9
a0d7d94
642ca5c
a0d7d94
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
642ca5c
a0d7d94
642ca5c
a0d7d94
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
import gradio as gr
from generate_service import OpenAIService


# Initialize OpenAI service
openai_service = OpenAIService()


def respond(message, history):
    """
    Process user message and stream the response from OpenAI Assistant.
    
    Args:
        message: User's input message
        history: Chat history as list of tuples
        
    Yields:
        Updated history with streaming response
    """
    if not message.strip():
        return
    
    # Add user message to history with loading indicator
    history.append((message, '<span class="loading-dots">🤔 Thinking<span style="animation: blink 1.4s infinite; animation-delay: 0s;">.</span><span style="animation: blink 1.4s infinite; animation-delay: 0.2s;">.</span><span style="animation: blink 1.4s infinite; animation-delay: 0.4s;">.</span></span>'))
    yield history, ""
    
    # Stream the assistant's response
    response_text = ""
    first_chunk = True
    for chunk in openai_service.generate_stream(message):
        response_text += chunk
        # Replace loading message with actual response
        history[-1] = (message, response_text)
        yield history, ""
        first_chunk = False


def clear_chat():
    """Clear the chat history and create a new thread."""
    openai_service.clear_thread()
    return []


with gr.Blocks(css="""
    #component-0 {
        height: calc(100vh - 200px) !important;
    }
    #chatbot-container {
        height: 100% !important;
        min-height: 600px;
    }
    #input-row {
        position: sticky;
        bottom: 0;
        background: white;
        padding: 10px 0;
    }
    .contain {
        max-width: 100% !important;
        padding: 0 !important;
    }
    #clear-btn {
        position: absolute;
        right: 10px;
        top: 10px;
        z-index: 100;
    }
    @keyframes blink {
        0%, 100% { opacity: 1; }
        50% { opacity: 0.3; }
    }
    .message.bot:has(.loading-dots) {
        animation: pulse 1.5s ease-in-out infinite;
    }
    @keyframes pulse {
        0%, 100% { opacity: 1; }
        50% { opacity: 0.6; }
    }
""") as demo:
    
    with gr.Row():
        gr.Markdown("# Chat Interface")
        clear_button = gr.Button("Clear", elem_id="clear-btn", size="sm")
    
    chatbot = gr.Chatbot(
        value=[],
        elem_id="chatbot-container",
        height="calc(100vh - 200px)",
        show_label=False,
        sanitize_html=False,
    )
    
    with gr.Row(elem_id="input-row"):
        with gr.Column(scale=9):
            msg_input = gr.Textbox(
                placeholder="Type your message here...",
                show_label=False,
                container=False
            )
        with gr.Column(scale=1, min_width=100):
            send_button = gr.Button("Send", variant="primary")
    
    # Event handlers
    msg_input.submit(
        respond,
        inputs=[msg_input, chatbot],
        outputs=[chatbot, msg_input]
    )
    
    send_button.click(
        respond,
        inputs=[msg_input, chatbot],
        outputs=[chatbot, msg_input]
    )
    
    clear_button.click(
        clear_chat,
        inputs=None,
        outputs=chatbot
    )


if __name__ == "__main__":
    demo.launch(inbrowser=True)