Spaces:
Paused
Paused
| import os | |
| import uuid | |
| import gradio as gr | |
| import requests | |
| from datetime import datetime | |
| # Cấu hình | |
| LLAMA_SERVER_URL = "http://localhost:8000/completion" # llama-server chạy trên cổng 8000 nội bộ | |
| # Hàm gọi API llama-server | |
| def call_llama_server(messages, max_length=50): | |
| payload = { | |
| "prompt": messages[-1]["content"], # Chỉ gửi prompt cuối | |
| "n_predict": max_length | |
| } | |
| try: | |
| response = requests.post(LLAMA_SERVER_URL, json=payload, timeout=10) | |
| response.raise_for_status() | |
| return response.json().get("content", "No response") | |
| except requests.RequestException as e: | |
| return f"Error: {str(e)}" | |
| # Định dạng lịch sử cho giao diện | |
| def format_history(history): | |
| return [{"role": "user", "content": item["content"]} if item["role"] == "user" | |
| else {"role": "assistant", "content": item["content"]} | |
| for item in history if item["role"] != "placeholder"] | |
| # Giao diện chính | |
| with gr.Blocks( | |
| title="LLaMA Chat", | |
| theme=gr.themes.Soft(primary_hue="purple", secondary_hue="gray"), | |
| css=""" | |
| .chatbot { height: calc(100vh - 100px); overflow-y: auto; } | |
| .message-user { background-color: #e6f3ff; padding: 10px; border-radius: 5px; margin: 5px 0; } | |
| .message-assistant { background-color: #f0f0f0; padding: 10px; border-radius: 5px; margin: 5px 0; } | |
| """ | |
| ) as demo: | |
| # Trạng thái lưu lịch sử | |
| state = gr.State({ | |
| "conversations_history": {}, | |
| "conversations": [], | |
| "conversation_id": "" | |
| }) | |
| gr.Markdown("# LLaMA Chat\nChat với mô hình Qwen2.5-0.5B - Powered by llama.cpp") | |
| with gr.Row(): | |
| # Cột trái: Danh sách hội thoại | |
| with gr.Column(scale=1, min_width=200): | |
| gr.Markdown("### Hội thoại") | |
| conversations = gr.Dropdown(label="Chọn hội thoại", choices=[], value=None, allow_custom_value=True) | |
| new_chat_btn = gr.Button("Tạo hội thoại mới", variant="primary") | |
| clear_btn = gr.Button("Xóa lịch sử", variant="secondary") | |
| # Cột phải: Chatbot | |
| with gr.Column(scale=3): | |
| chatbot = gr.Chatbot(label="Cuộc trò chuyện", type="messages", elem_classes="chatbot") | |
| with gr.Row(): | |
| prompt_input = gr.Textbox( | |
| label="Nhập tin nhắn", | |
| placeholder="Gõ tin nhắn hoặc '/' để xem gợi ý...", | |
| show_label=False, | |
| container=False | |
| ) | |
| submit_btn = gr.Button("Gửi", variant="primary") | |
| # Hàm xử lý sự kiện | |
| def submit_prompt(prompt, state): | |
| if not prompt.strip(): | |
| return state, [], "" | |
| # Tạo hội thoại mới nếu chưa có | |
| if not state["conversation_id"]: | |
| convo_id = str(uuid.uuid4()) | |
| state["conversation_id"] = convo_id | |
| state["conversations_history"][convo_id] = [] | |
| state["conversations"].append({"label": prompt[:20] + "...", "value": convo_id}) | |
| history = state["conversations_history"][state["conversation_id"]] | |
| history.append({"role": "user", "content": prompt, "key": str(uuid.uuid4())}) | |
| # Gọi llama-server | |
| response = call_llama_server(format_history(history)) | |
| history.append({"role": "assistant", "content": response, "key": str(uuid.uuid4())}) | |
| return ( | |
| state, | |
| format_history(history), | |
| "" | |
| ) | |
| def new_chat(state): | |
| state["conversation_id"] = "" | |
| return state, [], gr.update(choices=[(c["label"], c["value"]) for c in state["conversations"]]) | |
| def select_conversation(state, convo_id): | |
| if convo_id and convo_id in state["conversations_history"]: | |
| state["conversation_id"] = convo_id | |
| history = state["conversations_history"][convo_id] | |
| return state, format_history(history) | |
| return state, [] | |
| def clear_history(state): | |
| if state["conversation_id"]: | |
| state["conversations_history"][state["conversation_id"]] = [] | |
| return state, [] | |
| # Sự kiện | |
| submit_btn.click( | |
| fn=submit_prompt, | |
| inputs=[prompt_input, state], | |
| outputs=[state, chatbot, prompt_input] | |
| ) | |
| new_chat_btn.click( | |
| fn=new_chat, | |
| inputs=[state], | |
| outputs=[state, chatbot, conversations] | |
| ) | |
| conversations.change( | |
| fn=select_conversation, | |
| inputs=[state, conversations], | |
| outputs=[state, chatbot] | |
| ) | |
| clear_btn.click( | |
| fn=clear_history, | |
| inputs=[state], | |
| outputs=[state, chatbot] | |
| ) | |
| demo.launch(server_name="0.0.0.0", server_port=3000) |