|
|
import spaces
|
|
|
|
|
|
@spaces.GPU
|
|
|
def dummy_gpu():
|
|
|
pass
|
|
|
|
|
|
import gradio as gr
|
|
|
import torch
|
|
|
from pathlib import Path
|
|
|
from diffusers import FluxPipeline, FluxTransformer2DModel
|
|
|
from huggingface_hub import hf_hub_download, HfApi
|
|
|
|
|
|
IS_TURBO = False
|
|
|
TEMP_DIR = "./temp"
|
|
|
repo_id = "camenduru/FLUX.1-dev-diffusers"
|
|
|
|
|
|
|
|
|
dtype = torch.bfloat16
|
|
|
|
|
|
cp = hf_hub_download("John6666/flux1-backup-202502", "jibMixFlux_v8AccentueightNSFW.safetensors", repo_type="dataset")
|
|
|
transformer = FluxTransformer2DModel.from_single_file(cp, subfolder="transformer", torch_dtype=dtype, config=repo_id)
|
|
|
pipe = FluxPipeline.from_pretrained(repo_id, transformer=transformer, torch_dtype=dtype)
|
|
|
|
|
|
if IS_TURBO:
|
|
|
pipe.to("cuda")
|
|
|
pipe.load_lora_weights(hf_hub_download("ByteDance/Hyper-SD", "Hyper-FLUX.1-dev-8steps-lora.safetensors"), adapter_name="hyper-sd")
|
|
|
pipe.set_adapters(["hyper-sd"], adapter_weights=[0.125])
|
|
|
pipe.fuse_lora()
|
|
|
pipe.unload_lora_weights()
|
|
|
pipe.to("cpu")
|
|
|
|
|
|
def upload_model(repo_id: str="", token: str="", progress=gr.Progress(track_tqdm=True)):
|
|
|
if not token: return "Token not found."
|
|
|
pipe.save_pretrained(TEMP_DIR)
|
|
|
api = HfApi(token=token if token else False)
|
|
|
api.create_repo(repo_id=repo_id, token=token, private=True, exist_ok=True)
|
|
|
api.upload_folder(repo_id=repo_id, repo_type="model", folder_path=TEMP_DIR, path_in_repo=".")
|
|
|
api.upload_file(repo_id=repo_id, repo_type="model", path_or_fileobj=cp, path_in_repo=Path(cp).name)
|
|
|
return "Converted."
|
|
|
|
|
|
with gr.Blocks() as demo:
|
|
|
repo_id = gr.Textbox(label="Repo ID", value="")
|
|
|
hf_token = gr.Textbox(label="Your HF write token", value="")
|
|
|
run_button = gr.Button("Submit", variant="primary")
|
|
|
info_md = gr.Markdown("<br><br><br>")
|
|
|
|
|
|
run_button.click(upload_model, [repo_id, hf_token], [info_md])
|
|
|
|
|
|
demo.launch()
|
|
|
|