app-otqmig-46 / app.py
AiCoderv2's picture
Update Gradio app with multiple files
3720b00 verified
import gradio as gr
import spaces
from models import load_model, chat_with_model
from config import AVAILABLE_MODELS, DEFAULT_MODEL
from utils import format_conversation, save_chat_history, load_chat_history
# Global variable for current model
current_model = None
def switch_model(model_name):
global current_model
current_model = load_model(model_name)
return f"Switched to {model_name}"
@spaces.GPU(duration=120) # GPU acceleration for inference
def generate_response(message, history, model_name, streaming=False):
global current_model
if current_model is None or current_model['name'] != model_name:
current_model = load_model(model_name)
conversation = format_conversation(message, history)
response = chat_with_model(current_model, conversation, streaming=streaming)
return response
with gr.Blocks(title="Advanced LLM Chatbot", theme=gr.themes.Soft()) as demo:
gr.Markdown("# 🤖 Advanced LLM Chatbot")
gr.Markdown("*Powered by Hugging Face models with model switching and streaming*")
gr.Markdown("[Built with anycoder](https://huggingface.co/spaces/akhaliq/anycoder)")
with gr.Row():
model_dropdown = gr.Dropdown(
choices=list(AVAILABLE_MODELS.keys()),
value=DEFAULT_MODEL,
label="Select Model",
info="Choose a chat model to use"
)
switch_btn = gr.Button("Switch Model")
status = gr.Textbox(label="Status", interactive=False, value=f"Current: {DEFAULT_MODEL}")
with gr.Row():
streaming_toggle = gr.Checkbox(label="Enable Streaming", value=False)
save_btn = gr.Button("Save Chat")
load_btn = gr.Button("Load Chat")
chatbot = gr.Chatbot(type="messages", height=400, show_copy_button=True)
msg = gr.Textbox(placeholder="Type your message here...", label="Message")
with gr.Row():
submit_btn = gr.Button("Send", variant="primary")
clear_btn = gr.ClearButton([msg, chatbot])
undo_btn = gr.Button("Undo Last")
def respond(message, chat_history, model_name, streaming):
if not message.strip():
return "", chat_history
response = generate_response(message, chat_history, model_name, streaming)
chat_history.append({"role": "user", "content": message})
chat_history.append({"role": "assistant", "content": response})
return "", chat_history
def undo_last(history):
if len(history) >= 2:
return history[:-2] # Remove last user and assistant message
return history
def save_history(history):
save_chat_history(history)
return "Chat saved!"
def load_history():
return load_chat_history()
model_dropdown.change(switch_model, model_dropdown, status)
switch_btn.click(switch_model, model_dropdown, status)
msg.submit(respond, [msg, chatbot, model_dropdown, streaming_toggle], [msg, chatbot])
submit_btn.click(respond, [msg, chatbot, model_dropdown, streaming_toggle], [msg, chatbot])
undo_btn.click(undo_last, chatbot, chatbot)
save_btn.click(save_history, chatbot, status)
load_btn.click(load_history, None, chatbot)
if __name__ == "__main__":
demo.launch()