Spaces:
Running
Running
File size: 4,379 Bytes
0ea40d5 5d2e348 0ea40d5 b5a0b96 0ea40d5 b5a0b96 0ea40d5 b5a0b96 642547d b5a0b96 0ea40d5 642547d 0ea40d5 b5a0b96 0ea40d5 642547d 0ea40d5 b5a0b96 642547d 0ea40d5 642547d 0ea40d5 fdc8455 b5a0b96 0ea40d5 642547d fdc8455 b5a0b96 fdc8455 b5a0b96 0ea40d5 bd7c5b9 642547d bd7c5b9 642547d b5a0b96 0ea40d5 642547d b5a0b96 bd7c5b9 fdc8455 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 |
# ./app.py
"""
The Interface Skeleton - The code sets up the navigation panel and the multimodal chat interface
The .then() chain: Previously, the save happened "in the background." Now, handle_save explicitly returns the new load_history() results to the history_list component, causing it to "re-render" with the new chat visible.
The chat_id_state: By passing this back and forth, the app knows if it should update an existing file in the HF Dataset or create a new one.
history_list.click: This is the bridge that makes the sidebar interactive. Without this event, clicking the "Recent Conversations" wouldn't do anything.
"""
import gradio as gr
from core_logic import chat_function
from storage import save_chat, load_history, get_chat_content
with gr.Blocks() as demo:
# This state keeps track of the filename for the current session
chat_id_state = gr.State("")
with gr.Row():
# --- Sidebar ---
with gr.Column(scale=1, variant="secondary"):
gr.Markdown("### 🛠️ Silicon Architect")
new_btn = gr.Button("➕ New Chat", variant="primary")
# The sidebar component
history_list = gr.Dataset(
components=[gr.Textbox(visible=False)],
label="Recent Conversations",
samples=load_history(),
type="values" # Ensures we get the string value when clicked
)
# --- Main Chat ---
with gr.Column(scale=4):
chatbot = gr.Chatbot(show_label=False, height=700)
chat_input = gr.MultimodalTextbox(
interactive=True,
placeholder="Discuss architecture or upload code...",
show_label=False
)
# --- LOGIC FUNCTIONS ---
def bot_response(message, history, chat_id):
user_content = message["text"]
history.append({"role": "user", "content": user_content})
history.append({"role": "assistant", "content": ""})
for partial_resp in chat_function(message, history[:-1]):
history[-1]["content"] = partial_resp
yield history
"""
def handle_save(history, chat_id):
# Saves the chat and returns the updated list for the sidebar
new_id = save_chat(chat_id, history)
return new_id, load_history()
"""
def handle_save(history, chat_id):
# 1. Save the actual data
new_id = save_chat(chat_id, history)
# 2. Get the latest from hub
current_list = load_history()
# 3. Ensure the current one is definitely at the top
if [new_id] not in current_list:
current_list.insert(0, [new_id])
return new_id, gr.update(samples=current_list)
def load_past_chat(selected_list):
# selected_list comes as [ 'chat_id' ]
chat_id = selected_list[0]
content = get_chat_content(chat_id)
return content, chat_id
# --- EVENT HANDLERS ---
# 1. Submit Chat -> Stream Response -> Save -> Refresh Sidebar
chat_input.submit(
bot_response,
[chat_input, chatbot, chat_id_state],
[chatbot]
).then(
handle_save,
[chatbot, chat_id_state],
[chat_id_state, history_list]
)
# 2. Click Sidebar Item -> Load Content
history_list.click(
load_past_chat,
[history_list],
[chatbot, chat_id_state]
)
# 3. New Chat Button
new_btn.click(
lambda: ([], "", load_history()),
None,
[chatbot, chat_id_state, history_list]
)
# 4. "Force Refresh"
# In Gradio, a component's samples are often cached at the moment of the initial load. To ensure the sidebar is truly dynamic, we need to update the gr.Dataset definition and the refresh logic
# To update history_list definition, we add samples_per_page explicitly, ensuring it's not cutting off the list early
history_list = gr.Dataset(
components=[gr.Textbox(visible=False)],
label="Recent Conversations",
samples=load_history(),
type="values",
samples_per_page=20 # Increase this to see more at once
)
demo.launch(theme=gr.themes.Soft(), css="styles.css") |