|
|
"""Single Chat ν UI""" |
|
|
|
|
|
import gradio as gr |
|
|
from typing import Any |
|
|
|
|
|
from models.model_registry import get_all_models, get_model_info |
|
|
from characters import get_character_loader, build_system_prompt |
|
|
from utils import parse_thinking_response, format_thinking_for_display |
|
|
|
|
|
|
|
|
def create_chat_tab( |
|
|
model_manager: Any = None, |
|
|
use_mock: bool = False, |
|
|
): |
|
|
"""Single Chat ν μμ±""" |
|
|
|
|
|
|
|
|
char_loader = get_character_loader() |
|
|
|
|
|
|
|
|
all_models = get_all_models() |
|
|
model_choices = [(f"{get_model_info(m).get('description', m)}", m) for m in all_models] |
|
|
|
|
|
|
|
|
characters = char_loader.get_character_names() |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
gr.Markdown("## λ¨μΌ λͺ¨λΈ μ±ν
") |
|
|
gr.Markdown("μ νν λͺ¨λΈκ³Ό μΊλ¦ν°λ‘ λνλ₯Ό λλ 보μΈμ.") |
|
|
|
|
|
with gr.Row(): |
|
|
with gr.Column(scale=1): |
|
|
model_dropdown = gr.Dropdown( |
|
|
choices=model_choices, |
|
|
value=all_models[0] if all_models else None, |
|
|
label="λͺ¨λΈ μ ν", |
|
|
) |
|
|
with gr.Column(scale=1): |
|
|
character_dropdown = gr.Dropdown( |
|
|
choices=characters, |
|
|
value=characters[0] if characters else None, |
|
|
label="μΊλ¦ν° μ ν", |
|
|
) |
|
|
|
|
|
|
|
|
chatbot = gr.Chatbot( |
|
|
label="λν", |
|
|
height=400, |
|
|
type="messages", |
|
|
) |
|
|
|
|
|
with gr.Accordion("Thinking Process (λ§μ§λ§ μλ΅)", open=False): |
|
|
thinking_display = gr.Markdown("*(μλ΅ μμ± ν νμλ©λλ€)*") |
|
|
|
|
|
with gr.Row(): |
|
|
user_input = gr.Textbox( |
|
|
label="λ©μμ§ μ
λ ₯", |
|
|
placeholder="λ©μμ§λ₯Ό μ
λ ₯νμΈμ...", |
|
|
lines=2, |
|
|
scale=4, |
|
|
) |
|
|
send_btn = gr.Button("μ μ‘", variant="primary", scale=1) |
|
|
|
|
|
with gr.Row(): |
|
|
clear_btn = gr.Button("λν μ΄κΈ°ν") |
|
|
|
|
|
metadata_display = gr.Markdown("") |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def respond( |
|
|
model_id: str, |
|
|
character: str, |
|
|
message: str, |
|
|
history: list, |
|
|
): |
|
|
"""μλ΅ μμ±""" |
|
|
if not message.strip(): |
|
|
return history, "", "*(λ©μμ§λ₯Ό μ
λ ₯ν΄μ£ΌμΈμ)*", "" |
|
|
|
|
|
|
|
|
messages = [] |
|
|
for msg in history: |
|
|
if msg["role"] == "user": |
|
|
messages.append({"role": "user", "content": msg["content"]}) |
|
|
elif msg["role"] == "assistant": |
|
|
|
|
|
_, clean = parse_thinking_response(msg["content"]) |
|
|
messages.append({"role": "assistant", "content": clean}) |
|
|
|
|
|
messages.append({"role": "user", "content": message}) |
|
|
|
|
|
system_prompt = build_system_prompt(character) |
|
|
|
|
|
|
|
|
if use_mock or model_manager is None: |
|
|
response_full = f"<think>\n{character}λ‘μ μκ°ν΄λ³΄λ©΄...\n</think>\n\nμλ
~ λ°κ°μ! (Mock Response)" |
|
|
meta = {"latency_s": 0.5, "output_tokens": 30} |
|
|
else: |
|
|
try: |
|
|
response_full, meta = model_manager.generate_response( |
|
|
model_id, messages, system_prompt |
|
|
) |
|
|
except Exception as e: |
|
|
response_full = f"*Error: {str(e)}*" |
|
|
meta = {"latency_s": 0, "output_tokens": 0} |
|
|
|
|
|
|
|
|
thinking, clean_response = parse_thinking_response(response_full) |
|
|
|
|
|
|
|
|
history.append({"role": "user", "content": message}) |
|
|
history.append({"role": "assistant", "content": response_full}) |
|
|
|
|
|
|
|
|
meta_str = f"β±οΈ {meta.get('latency_s', 0):.2f}s | {meta.get('output_tokens', 0)} tokens" |
|
|
|
|
|
return ( |
|
|
history, |
|
|
"", |
|
|
format_thinking_for_display(thinking) if thinking else "*No thinking*", |
|
|
meta_str, |
|
|
) |
|
|
|
|
|
def clear_chat(): |
|
|
"""λν μ΄κΈ°ν""" |
|
|
return [], "", "*(μλ΅ μμ± ν νμλ©λλ€)*", "" |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
send_btn.click( |
|
|
fn=respond, |
|
|
inputs=[model_dropdown, character_dropdown, user_input, chatbot], |
|
|
outputs=[chatbot, user_input, thinking_display, metadata_display], |
|
|
) |
|
|
|
|
|
user_input.submit( |
|
|
fn=respond, |
|
|
inputs=[model_dropdown, character_dropdown, user_input, chatbot], |
|
|
outputs=[chatbot, user_input, thinking_display, metadata_display], |
|
|
) |
|
|
|
|
|
clear_btn.click( |
|
|
fn=clear_chat, |
|
|
outputs=[chatbot, user_input, thinking_display, metadata_display], |
|
|
) |
|
|
|