GLM-4.6V-Flash / app.py
akhaliq's picture
akhaliq HF Staff
Upload folder using huggingface_hub
f5258bc verified
raw
history blame
3.84 kB
<|begin_of_box|># Image Chatbot Application
import gradio as gr
import os
from openai import OpenAI
# Initialize OpenAI client
client = OpenAI(
base_url="https://router.huggingface.co/v1",
api_key=os.environ["HF_TOKEN"],
default_headers={
"X-HF-Bill-To": "huggingface"
}
)
def process_message(message, history, image):
"""
Process user message and image, send to the model, and return the response.
"""
# Prepare messages for the API
messages = []
# Add chat history
for user_msg, assistant_msg in history:
messages.append({"role": "user", "content": user_msg})
if assistant_msg:
messages.append({"role": "assistant", "content": assistant_msg})
# Add current message and image
if message or image:
current_message = [{"role": "user", "content": []}]
if message:
current_message[0]["content"].append({"type": "text", "text": message})
if image:
current_message[0]["content"].append({
"type": "image_url",
"image_url": {"url": image}
})
messages.append(current_message[0])
# Get response from the model
response = ""
try:
stream = client.chat.completions.create(
model="zai-org/GLM-4.6V-Flash:zai-org",
messages=messages,
stream=True,
)
for chunk in stream:
if chunk.choices[0].delta.content:
response += chunk.choices[0].delta.content
yield response, history + [(message, response)]
except Exception as e:
error_msg = f"Error: {str(e)}"
yield error_msg, history + [(message, error_msg)]
# Create Gradio interface
with gr.Blocks(theme=gr.themes.Soft(primary_hue="blue")) as demo:
gr.Markdown("# 🤖 Image Chatbot")
gr.Markdown("Chat with an AI that can understand images! Upload an image and ask questions about it.")
# Add the "Built with anycoder" link
gr.Markdown(
'<a href="https://huggingface.co/spaces/akhaliq/anycoder" target="_blank" style="text-decoration: none;">'
'<span style="color: #4F46E5; font-weight: bold;">Built with anycoder</span>'
'</a>'
)
chatbot = gr.Chatbot(
label="Conversation",
height=400,
bubble_full_width=False,
avatar_images=(
"https://cdn-icons-png.flaticon.com/512/147/147144.png",
"https://cdn-icons-png.flaticon.com/512/4712/4712025.png"
)
)
with gr.Row():
with gr.Column(scale=3):
msg = gr.Textbox(
label="Your message",
placeholder="Type your message here...",
lines=2
)
with gr.Column(scale=1):
img = gr.Image(
label="Upload image",
type="filepath",
height=50
)
with gr.Row():
submit_btn = gr.Button("Send", variant="primary")
clear_btn = gr.ClearButton(components=[msg, img, chatbot], value="Clear Chat")
# Set up the chat interface
msg.submit(
fn=process_message,
inputs=[msg, chatbot, img],
outputs=[chatbot, chatbot],
api_visibility="public"
)
submit_btn.click(
fn=process_message,
inputs=[msg, chatbot, img],
outputs=[chatbot, chatbot],
api_visibility="public"
)
# Launch the app
if __name__ == "__main__":
demo.launch(
server_name="0.0.0.0",
server_port=7860,
share=False,
footer_links=[
{
"label": "Built with anycoder",
"url": "https://huggingface.co/spaces/akhaliq/anycoder"
}
]
)<|end_of_box|>