kgrabko commited on
Commit
c9eedbd
·
verified ·
1 Parent(s): 9f6fe04

Update copy_weights.py

Browse files
Files changed (1) hide show
  1. copy_weights.py +18 -11
copy_weights.py CHANGED
@@ -18,16 +18,21 @@
18
  #
19
  # =============================================================================
20
 
 
 
 
 
21
  import torch
22
  from JiRackTernary_new import JiRackConfig, JiRackTernary1B
23
  from transformers import AutoTokenizer
 
24
  import os
25
 
26
  print("🚀 Copying embeddings and lm_head...")
27
 
28
- old_model_path = "." ## curent model
29
- new_tokenizer_path = "./jirack_code_tokenizer" ## new JiRack tokenizer
30
- save_path = "./JiRack_init_model_with_new_vocab" ## new safe bank model
31
 
32
  os.makedirs(save_path, exist_ok=True)
33
 
@@ -35,21 +40,23 @@ os.makedirs(save_path, exist_ok=True)
35
  tokenizer = AutoTokenizer.from_pretrained(new_tokenizer_path)
36
  new_vocab_size = len(tokenizer)
37
 
38
- # Create new model with updated vocab size
 
 
39
  config = JiRackConfig()
40
  model = JiRackTernary1B(config)
41
 
42
- # Load old model weights
43
- old_state = torch.load(f"{old_model_path}/pytorch_model.bin", map_location="cpu")
44
 
45
  old_vocab_size = 128256
46
 
47
  with torch.no_grad():
48
  # Copy old weights
49
- model.token_emb.weight[:old_vocab_size] = old_state['token_emb.weight'][:old_vocab_size]
50
- model.lm_head.weight[:old_vocab_size] = old_state['lm_head.weight'][:old_vocab_size]
51
 
52
- # Initialize the new 3 tokens (FIM markers) with mean value
53
  mean_emb = old_state['token_emb.weight'].mean(dim=0)
54
  model.token_emb.weight[old_vocab_size:] = mean_emb
55
  model.lm_head.weight[old_vocab_size:] = mean_emb
@@ -57,8 +64,8 @@ with torch.no_grad():
57
  print(f"✅ Copied {old_vocab_size} tokens")
58
  print(f"✅ Initialized {new_vocab_size - old_vocab_size} new tokens")
59
 
60
- # Save
61
- torch.save(model.state_dict(), f"{save_path}/pytorch_model.bin")
62
  tokenizer.save_pretrained(save_path)
63
 
64
  print(f"\n🎉 Done! New model saved to: {save_path}")
 
18
  #
19
  # =============================================================================
20
 
21
+ # =============================================================================
22
+ # COPYRIGHT © 2025 Konstantin Vladimirovich Grabko. ALL RIGHTS RESERVED.
23
+ # =============================================================================
24
+
25
  import torch
26
  from JiRackTernary_new import JiRackConfig, JiRackTernary1B
27
  from transformers import AutoTokenizer
28
+ from safetensors.torch import load_file, save_file
29
  import os
30
 
31
  print("🚀 Copying embeddings and lm_head...")
32
 
33
+ old_model_path = "."
34
+ new_tokenizer_path = "./jirack_code_tokenizer"
35
+ save_path = "./JiRack_init_model_with_new_vocab"
36
 
37
  os.makedirs(save_path, exist_ok=True)
38
 
 
40
  tokenizer = AutoTokenizer.from_pretrained(new_tokenizer_path)
41
  new_vocab_size = len(tokenizer)
42
 
43
+ print(f"New vocab size: {new_vocab_size}")
44
+
45
+ # Create new model
46
  config = JiRackConfig()
47
  model = JiRackTernary1B(config)
48
 
49
+ # Load old model (SafeTensors)
50
+ old_state = load_file(f"{old_model_path}/model.safetensors")
51
 
52
  old_vocab_size = 128256
53
 
54
  with torch.no_grad():
55
  # Copy old weights
56
+ model.token_emb.weight[:old_vocab_size] = old_state['token_emb.weight'][:old_vocab_size].clone()
57
+ model.lm_head.weight[:old_vocab_size] = old_state['lm_head.weight'][:old_vocab_size].clone()
58
 
59
+ # Initialize new 3 tokens (FIM) with mean value
60
  mean_emb = old_state['token_emb.weight'].mean(dim=0)
61
  model.token_emb.weight[old_vocab_size:] = mean_emb
62
  model.lm_head.weight[old_vocab_size:] = mean_emb
 
64
  print(f"✅ Copied {old_vocab_size} tokens")
65
  print(f"✅ Initialized {new_vocab_size - old_vocab_size} new tokens")
66
 
67
+ # Save in SafeTensors
68
+ save_file(model.state_dict(), f"{save_path}/model.safetensors")
69
  tokenizer.save_pretrained(save_path)
70
 
71
  print(f"\n🎉 Done! New model saved to: {save_path}")