File size: 8,043 Bytes
c14ae1e
bd8eca9
 
 
 
 
 
 
 
 
 
 
c14ae1e
bd8eca9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c14ae1e
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
import gradio as gr
import os
import tempfile
import shutil
import re
import json
import datetime
from pathlib import Path
from huggingface_hub import HfApi, hf_hub_download, snapshot_download
from safetensors.torch import load_file
import torch
import subprocess

# --- Conversion Function: Safetensors (UNet) β†’ GGUF ---
def convert_unet_to_gguf(safetensors_path, output_dir, progress=gr.Progress()):
    """
    Converts a UNet safetensors file to GGUF using gguf-connector's CLI (t2 or t).
    Assumes the file is named 'unet.safetensors'.
    """
    progress(0.1, desc="Starting UNet to GGUF conversion...")

    try:
        # Ensure gguf-connector is available
        import gguf_connector  # noqa

        # Copy input to working dir because ggc expects files in current dir
        work_dir = tempfile.mkdtemp()
        input_path = os.path.join(work_dir, "unet.safetensors")
        shutil.copy(safetensors_path, input_path)

        # GGUF output will be named automatically like unet.safetensors -> unet.gguf
        gguf_output_path = os.path.join(work_dir, "unet.gguf")

        progress(0.3, desc="Running gguf-connector (t2: safetensors β†’ GGUF)...")

        # Use 'ggc t2' for conversion (beta: unlimited)
        # This is interactive, so we must simulate input via echo or expect
        # But since ggc t2 may be interactive, we try non-interactive fallback:
        # Unfortunately, ggc does not support non-interactive mode robustly.
        # So we simulate by running in dir and hoping it picks the only file.

        # Change working dir so ggc sees the file
        original_cwd = os.getcwd()
        os.chdir(work_dir)

        try:
            # Launch ggc t2 and auto-select first file via input redirection
            # This is fragile but best-effort
            result = subprocess.run(
                ["ggc", "t2"],
                input="1\n",  # select first model
                text=True,
                capture_output=True,
                timeout=300
            )
            if result.returncode != 0:
                raise RuntimeError(f"ggc t2 failed: {result.stderr}")
        finally:
            os.chdir(original_cwd)

        if not os.path.exists(gguf_output_path):
            # Try alternative naming
            candidates = [f for f in os.listdir(work_dir) if f.endswith(".gguf")]
            if not candidates:
                raise FileNotFoundError("No GGUF file generated by ggc t2")
            gguf_output_path = os.path.join(work_dir, candidates[0])

        # Move to output dir
        final_gguf_path = os.path.join(output_dir, "unet.gguf")
        shutil.move(gguf_output_path, final_gguf_path)

        # Also save minimal config
        config_path = os.path.join(output_dir, "config.json")
        with open(config_path, "w") as f:
            json.dump({
                "model_type": "unet",
                "format": "gguf",
                "source": "converted from safetensors"
            }, f)

        progress(1.0, desc="Conversion to GGUF complete!")
        return True, "UNet converted to GGUF successfully."

    except Exception as e:
        return False, str(e)
    finally:
        if 'work_dir' in locals():
            shutil.rmtree(work_dir, ignore_errors=True)

# --- Main Processing Function ---
def process_and_upload_unet_to_gguf(repo_url, hf_token, new_repo_id, private_repo, progress=gr.Progress()):
    if not all([repo_url, hf_token, new_repo_id]):
        return None, "❌ Error: Please fill in all fields.", ""

    if not re.match(r"^[a-zA-Z0-9._-]+/[a-zA-Z0-9._-]+$", new_repo_id):
        return None, "❌ Error: Invalid repository ID format. Use 'username/model-name'.", ""

    temp_dir = tempfile.mkdtemp()
    output_dir = tempfile.mkdtemp()

    try:
        # Authenticate
        progress(0.05, desc="Logging into Hugging Face...")
        api = HfApi(token=hf_token)
        user_info = api.whoami()
        user_name = user_info['name']
        progress(0.1, desc=f"Logged in as {user_name}.")

        # Parse source repo
        clean_url = repo_url.strip().rstrip("/")
        if "huggingface.co" not in clean_url:
            return None, "❌ Source must be a Hugging Face model repo.", ""
        src_repo_id = clean_url.replace("https://huggingface.co/", "")

        # Download only unet.safetensors
        progress(0.15, desc="Downloading unet.safetensors...")
        safetensors_path = hf_hub_download(
            repo_id=src_repo_id,
            filename="unet.safetensors",
            cache_dir=temp_dir,
            token=hf_token
        )
        progress(0.3, desc="Download complete.")

        # Convert
        success, msg = convert_unet_to_gguf(safetensors_path, output_dir, progress)
        if not success:
            return None, f"❌ Conversion failed: {msg}", ""

        # Create new repo
        progress(0.8, desc="Creating new repository...")
        api.create_repo(
            repo_id=new_repo_id,
            private=private_repo,
            repo_type="model",
            exist_ok=True
        )

        # Generate README
        readme = f"""---
library_name: diffusers
tags:
- gguf
- unet
- diffusion
- converted-by-gradio
---

# GGUF UNet Model

Converted from: [`{src_repo_id}`](https://huggingface.co/{src_repo_id})  
File: `unet.safetensors` β†’ `unet.gguf`

Converted by: {user_name}  
Date: {datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')}

> ⚠️ This is a GGUF-quantized UNet for storage efficiency. Use with compatible GGUF-aware inference engines.
"""
        with open(os.path.join(output_dir, "README.md"), "w") as f:
            f.write(readme)

        # Upload
        progress(0.9, desc="Uploading to Hugging Face Hub...")
        api.upload_folder(
            repo_id=new_repo_id,
            folder_path=output_dir,
            repo_type="model",
            token=hf_token,
            commit_message="Upload UNet GGUF conversion"
        )

        progress(1.0, desc="βœ… Done!")
        result_html = f"""
βœ… Success!  
Your GGUF UNet is uploaded to: [{new_repo_id}](https://huggingface.co/{new_repo_id})  
Visibility: {'Private' if private_repo else 'Public'}
"""
        return gr.HTML(result_html), "βœ… Conversion and upload successful!", ""

    except Exception as e:
        return None, f"❌ Error: {str(e)}", ""
    finally:
        shutil.rmtree(temp_dir, ignore_errors=True)
        shutil.rmtree(output_dir, ignore_errors=True)

# --- Gradio UI ---
with gr.Blocks(title="UNet Safetensors β†’ GGUF Converter") as demo:
    gr.Markdown("# πŸ”„ UNet (Safetensors) to GGUF Converter")
    gr.Markdown("Converts `unet.safetensors` from a Hugging Face model repo to GGUF format for compact storage.")

    with gr.Row():
        with gr.Column():
            repo_url = gr.Textbox(
                label="Source Model Repository URL",
                placeholder="https://huggingface.co/Yabo/FramePainter",
                info="Must contain 'unet.safetensors'"
            )
            hf_token = gr.Textbox(
                label="Hugging Face Token",
                type="password",
                info="Write-access token from https://huggingface.co/settings/tokens"
            )
        with gr.Column():
            new_repo_id = gr.Textbox(
                label="New Repository ID",
                placeholder="your-username/framepainter-unet-gguf",
                info="Format: username/model-name"
            )
            private_repo = gr.Checkbox(label="Make Private", value=False)

    convert_btn = gr.Button("πŸš€ Convert & Upload", variant="primary")

    with gr.Row():
        status_output = gr.Markdown()
        repo_link_output = gr.HTML()

    convert_btn.click(
        fn=process_and_upload_unet_to_gguf,
        inputs=[repo_url, hf_token, new_repo_id, private_repo],
        outputs=[repo_link_output, status_output],
        show_progress=True
    )

    gr.Examples(
        examples=[
            ["https://huggingface.co/Yabo/FramePainter"]
        ],
        inputs=[repo_url]
    )

demo.launch()