File size: 1,184 Bytes
5c0c0f7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
import gc
import torch
from safetensors.torch import save_file
from diffusers import LTXVideoTransformer3DModel

torch.backends.cudnn.allow_tf32 = False
torch.backends.cuda.matmul.allow_tf32 = False

with torch.no_grad():
    transformer = LTXVideoTransformer3DModel.from_pretrained(
        r"G:/ltx/ltxdiffusers-0.9",
        subfolder="transformer",
        torch_dtype=torch.float32
    ).to("cpu")

try:
    transformer_state_dict = transformer.state_dict()
    save_file(transformer_state_dict, "transformer-fp32.safetensors")
    transformer_state_dict = {k: v.to(torch.float16)
                              for k, v in transformer.state_dict().items()}
    save_file(transformer_state_dict, "transformer-fp16.safetensors")
    with torch.no_grad():
        transformer = transformer.to("cuda", torch.bfloat16)
        transformer_state_dict = {k: v.to(torch.bfloat16)
                                  for k, v in transformer.state_dict().items()}
        save_file(transformer_state_dict, "transformer-bf16.safetensors")
        print("transformer saved")
except:
    print("error saveing transformer")

gc.collect()
torch.cuda.empty_cache()