Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| import os | |
| import tempfile | |
| import shutil | |
| import re | |
| import json | |
| import datetime | |
| from pathlib import Path | |
| from huggingface_hub import HfApi, hf_hub_download, snapshot_download | |
| from safetensors.torch import load_file | |
| import torch | |
| import subprocess | |
| # --- Conversion Function: Safetensors (UNet) β GGUF --- | |
| def convert_unet_to_gguf(safetensors_path, output_dir, progress=gr.Progress()): | |
| """ | |
| Converts a UNet safetensors file to GGUF using gguf-connector's CLI (t2 or t). | |
| Assumes the file is named 'unet.safetensors'. | |
| """ | |
| progress(0.1, desc="Starting UNet to GGUF conversion...") | |
| try: | |
| # Ensure gguf-connector is available | |
| import gguf_connector # noqa | |
| # Copy input to working dir because ggc expects files in current dir | |
| work_dir = tempfile.mkdtemp() | |
| input_path = os.path.join(work_dir, "unet.safetensors") | |
| shutil.copy(safetensors_path, input_path) | |
| # GGUF output will be named automatically like unet.safetensors -> unet.gguf | |
| gguf_output_path = os.path.join(work_dir, "unet.gguf") | |
| progress(0.3, desc="Running gguf-connector (t2: safetensors β GGUF)...") | |
| # Use 'ggc t2' for conversion (beta: unlimited) | |
| # This is interactive, so we must simulate input via echo or expect | |
| # But since ggc t2 may be interactive, we try non-interactive fallback: | |
| # Unfortunately, ggc does not support non-interactive mode robustly. | |
| # So we simulate by running in dir and hoping it picks the only file. | |
| # Change working dir so ggc sees the file | |
| original_cwd = os.getcwd() | |
| os.chdir(work_dir) | |
| try: | |
| # Launch ggc t2 and auto-select first file via input redirection | |
| # This is fragile but best-effort | |
| result = subprocess.run( | |
| ["ggc", "t2"], | |
| input="1\n", # select first model | |
| text=True, | |
| capture_output=True, | |
| timeout=300 | |
| ) | |
| if result.returncode != 0: | |
| raise RuntimeError(f"ggc t2 failed: {result.stderr}") | |
| finally: | |
| os.chdir(original_cwd) | |
| if not os.path.exists(gguf_output_path): | |
| # Try alternative naming | |
| candidates = [f for f in os.listdir(work_dir) if f.endswith(".gguf")] | |
| if not candidates: | |
| raise FileNotFoundError("No GGUF file generated by ggc t2") | |
| gguf_output_path = os.path.join(work_dir, candidates[0]) | |
| # Move to output dir | |
| final_gguf_path = os.path.join(output_dir, "unet.gguf") | |
| shutil.move(gguf_output_path, final_gguf_path) | |
| # Also save minimal config | |
| config_path = os.path.join(output_dir, "config.json") | |
| with open(config_path, "w") as f: | |
| json.dump({ | |
| "model_type": "unet", | |
| "format": "gguf", | |
| "source": "converted from safetensors" | |
| }, f) | |
| progress(1.0, desc="Conversion to GGUF complete!") | |
| return True, "UNet converted to GGUF successfully." | |
| except Exception as e: | |
| return False, str(e) | |
| finally: | |
| if 'work_dir' in locals(): | |
| shutil.rmtree(work_dir, ignore_errors=True) | |
| # --- Main Processing Function --- | |
| def process_and_upload_unet_to_gguf(repo_url, hf_token, new_repo_id, private_repo, progress=gr.Progress()): | |
| if not all([repo_url, hf_token, new_repo_id]): | |
| return None, "β Error: Please fill in all fields.", "" | |
| if not re.match(r"^[a-zA-Z0-9._-]+/[a-zA-Z0-9._-]+$", new_repo_id): | |
| return None, "β Error: Invalid repository ID format. Use 'username/model-name'.", "" | |
| temp_dir = tempfile.mkdtemp() | |
| output_dir = tempfile.mkdtemp() | |
| try: | |
| # Authenticate | |
| progress(0.05, desc="Logging into Hugging Face...") | |
| api = HfApi(token=hf_token) | |
| user_info = api.whoami() | |
| user_name = user_info['name'] | |
| progress(0.1, desc=f"Logged in as {user_name}.") | |
| # Parse source repo | |
| clean_url = repo_url.strip().rstrip("/") | |
| if "huggingface.co" not in clean_url: | |
| return None, "β Source must be a Hugging Face model repo.", "" | |
| src_repo_id = clean_url.replace("https://huggingface.co/", "") | |
| # Download only unet.safetensors | |
| progress(0.15, desc="Downloading unet.safetensors...") | |
| safetensors_path = hf_hub_download( | |
| repo_id=src_repo_id, | |
| filename="unet.safetensors", | |
| cache_dir=temp_dir, | |
| token=hf_token | |
| ) | |
| progress(0.3, desc="Download complete.") | |
| # Convert | |
| success, msg = convert_unet_to_gguf(safetensors_path, output_dir, progress) | |
| if not success: | |
| return None, f"β Conversion failed: {msg}", "" | |
| # Create new repo | |
| progress(0.8, desc="Creating new repository...") | |
| api.create_repo( | |
| repo_id=new_repo_id, | |
| private=private_repo, | |
| repo_type="model", | |
| exist_ok=True | |
| ) | |
| # Generate README | |
| readme = f"""--- | |
| library_name: diffusers | |
| tags: | |
| - gguf | |
| - unet | |
| - diffusion | |
| - converted-by-gradio | |
| --- | |
| # GGUF UNet Model | |
| Converted from: [`{src_repo_id}`](https://huggingface.co/{src_repo_id}) | |
| File: `unet.safetensors` β `unet.gguf` | |
| Converted by: {user_name} | |
| Date: {datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')} | |
| > β οΈ This is a GGUF-quantized UNet for storage efficiency. Use with compatible GGUF-aware inference engines. | |
| """ | |
| with open(os.path.join(output_dir, "README.md"), "w") as f: | |
| f.write(readme) | |
| # Upload | |
| progress(0.9, desc="Uploading to Hugging Face Hub...") | |
| api.upload_folder( | |
| repo_id=new_repo_id, | |
| folder_path=output_dir, | |
| repo_type="model", | |
| token=hf_token, | |
| commit_message="Upload UNet GGUF conversion" | |
| ) | |
| progress(1.0, desc="β Done!") | |
| result_html = f""" | |
| β Success! | |
| Your GGUF UNet is uploaded to: [{new_repo_id}](https://huggingface.co/{new_repo_id}) | |
| Visibility: {'Private' if private_repo else 'Public'} | |
| """ | |
| return gr.HTML(result_html), "β Conversion and upload successful!", "" | |
| except Exception as e: | |
| return None, f"β Error: {str(e)}", "" | |
| finally: | |
| shutil.rmtree(temp_dir, ignore_errors=True) | |
| shutil.rmtree(output_dir, ignore_errors=True) | |
| # --- Gradio UI --- | |
| with gr.Blocks(title="UNet Safetensors β GGUF Converter") as demo: | |
| gr.Markdown("# π UNet (Safetensors) to GGUF Converter") | |
| gr.Markdown("Converts `unet.safetensors` from a Hugging Face model repo to GGUF format for compact storage.") | |
| with gr.Row(): | |
| with gr.Column(): | |
| repo_url = gr.Textbox( | |
| label="Source Model Repository URL", | |
| placeholder="https://huggingface.co/Yabo/FramePainter", | |
| info="Must contain 'unet.safetensors'" | |
| ) | |
| hf_token = gr.Textbox( | |
| label="Hugging Face Token", | |
| type="password", | |
| info="Write-access token from https://huggingface.co/settings/tokens" | |
| ) | |
| with gr.Column(): | |
| new_repo_id = gr.Textbox( | |
| label="New Repository ID", | |
| placeholder="your-username/framepainter-unet-gguf", | |
| info="Format: username/model-name" | |
| ) | |
| private_repo = gr.Checkbox(label="Make Private", value=False) | |
| convert_btn = gr.Button("π Convert & Upload", variant="primary") | |
| with gr.Row(): | |
| status_output = gr.Markdown() | |
| repo_link_output = gr.HTML() | |
| convert_btn.click( | |
| fn=process_and_upload_unet_to_gguf, | |
| inputs=[repo_url, hf_token, new_repo_id, private_repo], | |
| outputs=[repo_link_output, status_output], | |
| show_progress=True | |
| ) | |
| gr.Examples( | |
| examples=[ | |
| ["https://huggingface.co/Yabo/FramePainter"] | |
| ], | |
| inputs=[repo_url] | |
| ) | |
| demo.launch() |