Suzu008 commited on
Commit
93345e0
·
verified ·
1 Parent(s): fb2b61e

Upload folder using huggingface_hub

Browse files
convert.py ADDED
@@ -0,0 +1,119 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import argparse
2
+ import os
3
+ import sys
4
+ from safetensors.torch import load_file, save_file
5
+
6
+ class C:
7
+ HEADER = '\033[95m'
8
+ BLUE = '\033[94m'
9
+ CYAN = '\033[96m'
10
+ GREEN = '\033[92m'
11
+ YELLOW = '\033[93m'
12
+ RED = '\033[91m'
13
+ GREY = '\033[90m'
14
+ RESET = '\033[0m'
15
+ BOLD = '\033[1m'
16
+
17
+ def convert_key(old_key):
18
+ parts = old_key.split('.')
19
+
20
+ if "attn" not in parts or "processor" not in parts or "weight" not in parts:
21
+ return None
22
+
23
+ lora_type_idx = -1
24
+ for i, part in enumerate(parts):
25
+ if part.endswith('_loras'):
26
+ lora_type_idx = i
27
+ break
28
+
29
+ if lora_type_idx == -1:
30
+ return None
31
+
32
+ raw_type = parts[lora_type_idx].replace('_loras', '')
33
+
34
+ try:
35
+ direction = parts[lora_type_idx + 2]
36
+ except IndexError:
37
+ return None
38
+
39
+ type_map = {
40
+ "q": "to_q",
41
+ "k": "to_k",
42
+ "v": "to_v",
43
+ "proj": "to_out.0"
44
+ }
45
+
46
+ lora_map = {
47
+ "down": "lora_A",
48
+ "up": "lora_B"
49
+ }
50
+
51
+ if raw_type not in type_map or direction not in lora_map:
52
+ return None
53
+
54
+ new_type_str = type_map[raw_type]
55
+ new_lora_str = lora_map[direction]
56
+
57
+ prefix_parts = parts[:lora_type_idx - 1]
58
+ prefix_str = ".".join(prefix_parts)
59
+
60
+ new_key = f"transformer.{prefix_str}.{new_type_str}.{new_lora_str}.weight"
61
+
62
+ return new_key
63
+
64
+ def main():
65
+ parser = argparse.ArgumentParser(description="Colorful LoRA Key Converter")
66
+ parser.add_argument("input_file", type=str, help="Input safetensors file")
67
+ parser.add_argument("output_file", type=str, help="Output safetensors file")
68
+
69
+ args = parser.parse_args()
70
+
71
+ if not os.path.exists(args.input_file):
72
+ print(f"{C.RED}Error: Input file '{args.input_file}' not found.{C.RESET}")
73
+ return
74
+
75
+ print(f"{C.HEADER}Loading weights from: {C.RESET}{C.BOLD}{args.input_file}{C.RESET}")
76
+ try:
77
+ tensors = load_file(args.input_file)
78
+ except Exception as e:
79
+ print(f"{C.RED}Failed to load file: {e}{C.RESET}")
80
+ return
81
+
82
+ new_tensors = {}
83
+ skipped_keys = []
84
+
85
+ print(f"{C.HEADER}{'-'*60}{C.RESET}")
86
+ print(f"{C.BOLD}Processing Keys:{C.RESET}")
87
+
88
+ for key, tensor in tensors.items():
89
+ new_key = convert_key(key)
90
+
91
+ if new_key:
92
+ print(f"{C.RED}{key}{C.RESET}")
93
+ print(f" ↓ {C.GREY}converted to{C.RESET}")
94
+ print(f"{C.GREEN}{new_key}{C.RESET}")
95
+ print(f"{C.GREY}-{C.RESET}" * 20)
96
+
97
+ new_tensors[new_key] = tensor
98
+ else:
99
+ skipped_keys.append(key)
100
+
101
+ print(f"\n{C.HEADER}Saving...{C.RESET}")
102
+ save_file(new_tensors, args.output_file)
103
+ print(f"{C.BOLD}Saved converted model to:{C.RESET} {C.CYAN}{args.output_file}{C.RESET}")
104
+
105
+ print(f"\n{C.HEADER}{'-'*60}{C.RESET}")
106
+ print(f"{C.YELLOW}Skipped Keys (Not Converted): {len(skipped_keys)}{C.RESET}")
107
+ if skipped_keys:
108
+ for sk in skipped_keys:
109
+ print(f" {C.GREY}• {sk}{C.RESET}")
110
+ else:
111
+ print(f" {C.GREEN}(None - All keys were converted){C.RESET}")
112
+
113
+ print(f"{C.HEADER}{'-'*60}{C.RESET}")
114
+ print(f"Total Original Keys: {len(tensors)}")
115
+ print(f"Converted Keys: {C.GREEN}{len(new_tensors)}{C.RESET}")
116
+ print(f"Skipped Keys: {C.YELLOW}{len(skipped_keys)}{C.RESET}")
117
+
118
+ if __name__ == "__main__":
119
+ main()
detail_encoder.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f49a640546c7826f7c81503b88029cee3f7a170b923ea07d78ea0b33277397bb
3
+ size 761688592
image_critic_lora.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bdf8b25a140c75b6a9608e09f11ac25ecbcb2fc03f0ea582908b1f3b018c449e
3
+ size 298896032