AI-Talent-Force Claude Sonnet 4.5 commited on
Commit Β·
ecb4524
1
Parent(s): 9078a1e
Switch to gr.ChatInterface for proper Gradio 6.5 compatibility
Browse files- Replaced manual Blocks interface with ChatInterface component
- ChatInterface automatically handles message formatting
- Function now returns just response string (not full history)
- Simplified event handling - ChatInterface manages everything
- Added retry, undo, and clear buttons automatically
- Fixes 'Data incompatible with messages format' error
Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
app.py
CHANGED
|
@@ -53,15 +53,16 @@ def chat_with_ceo(message, history):
|
|
| 53 |
Chat function that responds like the CEO
|
| 54 |
Args:
|
| 55 |
message: User's current message
|
| 56 |
-
history: List of previous
|
| 57 |
"""
|
| 58 |
# Build conversation context (limit history to last 5 exchanges for speed)
|
| 59 |
conversation = []
|
| 60 |
-
recent_history = history[-5:] if len(history) > 5 else history
|
| 61 |
-
for user_msg, bot_msg in recent_history:
|
| 62 |
-
conversation.append({"role": "user", "content": user_msg})
|
| 63 |
-
conversation.append({"role": "assistant", "content": bot_msg})
|
| 64 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 65 |
conversation.append({"role": "user", "content": message})
|
| 66 |
|
| 67 |
# Apply chat template
|
|
@@ -92,76 +93,38 @@ def chat_with_ceo(message, history):
|
|
| 92 |
# Decode response
|
| 93 |
response = tokenizer.decode(outputs[0][inputs['input_ids'].shape[1]:], skip_special_tokens=True)
|
| 94 |
|
| 95 |
-
# Return
|
| 96 |
-
|
| 97 |
-
|
| 98 |
-
|
| 99 |
-
|
| 100 |
-
|
| 101 |
-
|
| 102 |
-
|
| 103 |
-
|
| 104 |
-
|
| 105 |
-
|
| 106 |
-
|
| 107 |
-
|
| 108 |
-
|
| 109 |
-
|
| 110 |
-
|
| 111 |
-
|
| 112 |
-
|
| 113 |
-
""
|
| 114 |
-
|
| 115 |
-
|
| 116 |
-
|
|
|
|
| 117 |
height=500,
|
| 118 |
-
label="Chat with CEO AI",
|
| 119 |
-
show_label=True,
|
| 120 |
avatar_images=(None, "π―")
|
| 121 |
-
)
|
| 122 |
-
|
| 123 |
-
|
| 124 |
-
|
| 125 |
-
|
| 126 |
-
|
| 127 |
-
|
| 128 |
-
scale=4
|
| 129 |
-
)
|
| 130 |
-
submit = gr.Button("Send", variant="primary", scale=1)
|
| 131 |
-
|
| 132 |
-
with gr.Row():
|
| 133 |
-
clear = gr.Button("Clear Chat")
|
| 134 |
-
|
| 135 |
-
gr.Examples(
|
| 136 |
-
examples=[
|
| 137 |
-
"What's your vision for the company?",
|
| 138 |
-
"How do you approach leadership?",
|
| 139 |
-
"What are your thoughts on innovation?",
|
| 140 |
-
"Can you share your perspective on team building?",
|
| 141 |
-
"What drives your business strategy?"
|
| 142 |
-
],
|
| 143 |
-
inputs=msg,
|
| 144 |
-
label="Example Questions"
|
| 145 |
-
)
|
| 146 |
-
|
| 147 |
-
gr.Markdown(
|
| 148 |
-
"""
|
| 149 |
-
---
|
| 150 |
-
### About This AI
|
| 151 |
-
This chatbot uses a fine-tuned Qwen3-30B language model trained on the CEO's blog posts and writings.
|
| 152 |
-
It attempts to replicate their writing style, thinking patterns, and perspectives on various topics.
|
| 153 |
-
"""
|
| 154 |
-
)
|
| 155 |
-
|
| 156 |
-
# Event handlers
|
| 157 |
-
msg.submit(chat_with_ceo, inputs=[msg, chatbot], outputs=[chatbot]).then(
|
| 158 |
-
lambda: "", None, msg
|
| 159 |
-
)
|
| 160 |
-
submit.click(chat_with_ceo, inputs=[msg, chatbot], outputs=[chatbot]).then(
|
| 161 |
-
lambda: "", None, msg
|
| 162 |
-
)
|
| 163 |
-
clear.click(lambda: [], None, chatbot, queue=False)
|
| 164 |
|
| 165 |
if __name__ == "__main__":
|
| 166 |
-
demo.
|
| 167 |
-
demo.launch(share=False, ssr_mode=False)
|
|
|
|
| 53 |
Chat function that responds like the CEO
|
| 54 |
Args:
|
| 55 |
message: User's current message
|
| 56 |
+
history: List of previous message dictionaries
|
| 57 |
"""
|
| 58 |
# Build conversation context (limit history to last 5 exchanges for speed)
|
| 59 |
conversation = []
|
|
|
|
|
|
|
|
|
|
|
|
|
| 60 |
|
| 61 |
+
# Process history - in Gradio ChatInterface, history is a list of dicts
|
| 62 |
+
recent_history = history[-10:] if len(history) > 10 else history # Last 5 exchanges (10 messages)
|
| 63 |
+
conversation.extend(recent_history)
|
| 64 |
+
|
| 65 |
+
# Add current message
|
| 66 |
conversation.append({"role": "user", "content": message})
|
| 67 |
|
| 68 |
# Apply chat template
|
|
|
|
| 93 |
# Decode response
|
| 94 |
response = tokenizer.decode(outputs[0][inputs['input_ids'].shape[1]:], skip_special_tokens=True)
|
| 95 |
|
| 96 |
+
# Return just the response string - ChatInterface handles the history
|
| 97 |
+
return response
|
| 98 |
+
|
| 99 |
+
# Create Gradio ChatInterface
|
| 100 |
+
demo = gr.ChatInterface(
|
| 101 |
+
fn=chat_with_ceo,
|
| 102 |
+
title="π― CEO AI Executive",
|
| 103 |
+
description="""
|
| 104 |
+
Chat with an AI trained on your CEO's writing style and thoughts.
|
| 105 |
+
Ask questions about business strategy, leadership, technology, or any topic your CEO writes about.
|
| 106 |
+
|
| 107 |
+
**Note:** This AI responds based on patterns learned from the CEO's blog posts and writings.
|
| 108 |
+
|
| 109 |
+
β
**Model Status:** Loaded and ready! The model is kept in memory for fast responses.
|
| 110 |
+
""",
|
| 111 |
+
examples=[
|
| 112 |
+
"What's your vision for the company?",
|
| 113 |
+
"How do you approach leadership?",
|
| 114 |
+
"What are your thoughts on innovation?",
|
| 115 |
+
"Can you share your perspective on team building?",
|
| 116 |
+
"What drives your business strategy?"
|
| 117 |
+
],
|
| 118 |
+
chatbot=gr.Chatbot(
|
| 119 |
height=500,
|
|
|
|
|
|
|
| 120 |
avatar_images=(None, "π―")
|
| 121 |
+
),
|
| 122 |
+
textbox=gr.Textbox(placeholder="Ask me anything...", container=False, scale=7),
|
| 123 |
+
submit_btn="Send",
|
| 124 |
+
retry_btn="π Retry",
|
| 125 |
+
undo_btn="β©οΈ Undo",
|
| 126 |
+
clear_btn="ποΈ Clear",
|
| 127 |
+
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 128 |
|
| 129 |
if __name__ == "__main__":
|
| 130 |
+
demo.launch()
|
|
|