File size: 3,852 Bytes
f5258bc
 
 
 
 
 
 
 
 
 
 
 
 
e650e3a
f5258bc
 
 
 
 
 
2fe8f38
f5258bc
 
 
 
 
 
 
2fe8f38
f5258bc
 
2fe8f38
f5258bc
 
2fe8f38
f5258bc
 
 
 
2fe8f38
f5258bc
 
 
 
 
 
 
 
 
 
 
 
 
2fe8f38
 
 
f5258bc
 
 
2fe8f38
 
f5258bc
 
ee175f9
f5258bc
 
 
 
 
 
 
 
 
 
2fe8f38
f5258bc
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
73eca71
f5258bc
 
 
 
 
 
73eca71
f5258bc
 
 
 
 
 
 
 
 
 
ee175f9
f5258bc
 
 
 
 
 
ee175f9
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
import gradio as gr
import os
from openai import OpenAI

# Initialize OpenAI client
client = OpenAI(
    base_url="https://router.huggingface.co/v1",
    api_key=os.environ["HF_TOKEN"],
    default_headers={
        "X-HF-Bill-To": "huggingface"
    }
)

def process_message(message, history, image):
    """
    Process user message and image, send to the model, and return the response.
    """
    # Prepare messages for the API
    messages = []
    
    # Add chat history - convert tuple format to messages format
    for user_msg, assistant_msg in history:
        messages.append({"role": "user", "content": user_msg})
        if assistant_msg:
            messages.append({"role": "assistant", "content": assistant_msg})
    
    # Add current message and image
    if message or image:
        current_message = {"role": "user", "content": []}
        
        if message:
            current_message["content"].append({"type": "text", "text": message})
        
        if image:
            current_message["content"].append({
                "type": "image_url",
                "image_url": {"url": image}
            })
        
        messages.append(current_message)
    
    # Get response from the model
    response = ""
    try:
        stream = client.chat.completions.create(
            model="zai-org/GLM-4.6V-Flash:zai-org",
            messages=messages,
            stream=True,
        )
        
        for chunk in stream:
            if chunk.choices[0].delta.content:
                response += chunk.choices[0].delta.content
                # Update history with new messages in the correct format
                new_history = history + [(message, response)]
                yield new_history, new_history
    
    except Exception as e:
        error_msg = f"Error: {str(e)}"
        error_history = history + [(message, error_msg)]
        yield error_history, error_history

# Create Gradio interface
with gr.Blocks() as demo:
    # Add the "Built with anycoder" link
    gr.Markdown(
        '<a href="https://huggingface.co/spaces/akhaliq/anycoder" target="_blank" style="text-decoration: none;">'
        '<span style="color: #4F46E5; font-weight: bold;">Built with anycoder</span>'
        '</a>'
    )
    
    chatbot = gr.Chatbot(
        label="Conversation",
        height=400,
        type="messages",  # Set to messages format
        avatar_images=(
            "https://cdn-icons-png.flaticon.com/512/147/147144.png",
            "https://cdn-icons-png.flaticon.com/512/4712/4712025.png"
        )
    )
    
    with gr.Row():
        with gr.Column(scale=3):
            msg = gr.Textbox(
                label="Your message",
                placeholder="Type your message here...",
                lines=2
            )
        with gr.Column(scale=1):
            img = gr.Image(
                label="Upload image",
                type="filepath",
                height=50
            )
    
    with gr.Row():
        submit_btn = gr.Button("Send", variant="primary")
        clear_btn = gr.ClearButton(components=[msg, img, chatbot], value="Clear Chat")
    
    # Set up the chat interface
    msg.submit(
        fn=process_message,
        inputs=[msg, chatbot, img],
        outputs=[chatbot, chatbot],
        api_visibility="public"
    )
    
    submit_btn.click(
        fn=process_message,
        inputs=[msg, chatbot, img],
        outputs=[chatbot, chatbot],
        api_visibility="public"
    )

# Launch the app
if __name__ == "__main__":
    demo.launch(
        server_name="0.0.0.0",
        server_port=7860,
        share=False,
        theme=gr.themes.Soft(primary_hue="blue"),
        footer_links=[
            {
                "label": "Built with anycoder",
                "url": "https://huggingface.co/spaces/akhaliq/anycoder"
            }
        ]
    )