|
|
|
|
|
import gradio as gr |
|
|
from model_loader import MODELS, load_models |
|
|
from chat_utils import chat, download_chat |
|
|
|
|
|
|
|
|
loaded_models = load_models(MODELS) |
|
|
|
|
|
with gr.Blocks(theme=gr.themes.Monochrome()) as demo: |
|
|
gr.Markdown("<h1 style='text-align:center;'>π» Multi-Model Coding Assistant</h1>") |
|
|
|
|
|
with gr.Row(): |
|
|
model_selector = gr.Dropdown(choices=list(MODELS.keys()), value=list(MODELS.keys())[0], label="Choose Model") |
|
|
clear_button = gr.Button("π§Ή Clear Chat") |
|
|
download_btn = gr.Button("β¬οΈ Download Chat") |
|
|
|
|
|
chatbot = gr.Chatbot(label="Conversation", show_copy_button=True, type="messages") |
|
|
prompt_box = gr.Textbox(placeholder="Ask a coding question...", label="Your Prompt") |
|
|
submit_btn = gr.Button("π Send") |
|
|
|
|
|
chat_state = gr.State([]) |
|
|
|
|
|
def wrapped_chat(prompt, history, model_name): |
|
|
return chat(prompt, history, model_name, loaded_models) |
|
|
|
|
|
submit_btn.click( |
|
|
wrapped_chat, |
|
|
inputs=[prompt_box, chat_state, model_selector], |
|
|
outputs=[chatbot, chat_state] |
|
|
) |
|
|
|
|
|
prompt_box.submit( |
|
|
wrapped_chat, |
|
|
inputs=[prompt_box, chat_state, model_selector], |
|
|
outputs=[chatbot, chat_state] |
|
|
) |
|
|
|
|
|
clear_button.click(lambda: ([], []), None, [chatbot, chat_state]) |
|
|
download_btn.click(download_chat, [chat_state], file_name="chat.txt") |
|
|
|
|
|
demo.launch() |
|
|
|