File size: 1,392 Bytes
5bae77a
d400edb
5bae77a
 
d400edb
8f2ed12
5bae77a
d400edb
5bae77a
8f2ed12
d400edb
5bae77a
 
 
 
d400edb
8f2ed12
5bae77a
 
d400edb
5bae77a
d400edb
8f2ed12
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5bae77a
 
d400edb
8f2ed12
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
# app.py
import gradio as gr
from model_loader import MODELS, load_models
from chat_utils import chat, download_chat

# Load models once at startup
loaded_models = load_models(MODELS)

with gr.Blocks(theme=gr.themes.Monochrome()) as demo:
    gr.Markdown("<h1 style='text-align:center;'>πŸ’» Multi-Model Coding Assistant</h1>")

    with gr.Row():
        model_selector = gr.Dropdown(choices=list(MODELS.keys()), value=list(MODELS.keys())[0], label="Choose Model")
        clear_button = gr.Button("🧹 Clear Chat")
        download_btn = gr.Button("⬇️ Download Chat")

    chatbot = gr.Chatbot(label="Conversation", show_copy_button=True, type="messages")
    prompt_box = gr.Textbox(placeholder="Ask a coding question...", label="Your Prompt")
    submit_btn = gr.Button("πŸš€ Send")

    chat_state = gr.State([])

    def wrapped_chat(prompt, history, model_name):
        return chat(prompt, history, model_name, loaded_models)

    submit_btn.click(
        wrapped_chat,
        inputs=[prompt_box, chat_state, model_selector],
        outputs=[chatbot, chat_state]
    )

    prompt_box.submit(
        wrapped_chat,
        inputs=[prompt_box, chat_state, model_selector],
        outputs=[chatbot, chat_state]
    )

    clear_button.click(lambda: ([], []), None, [chatbot, chat_state])
    download_btn.click(download_chat, [chat_state], file_name="chat.txt")

demo.launch()