Initial upload of JiRackTernary 70B converted weights
Browse files- JiRackTernary1b.pt +1 -1
- chat_jirack_ternary1b_safetensors.py +0 -5
- convert_to_safetensors.py +34 -0
- model.safetensors +1 -1
JiRackTernary1b.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 5995037658
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:31608edd98d754aef7e3228ad625fe372b7fc7937f41eea5aed931fb911d339c
|
| 3 |
size 5995037658
|
chat_jirack_ternary1b_safetensors.py
CHANGED
|
@@ -1,8 +1,3 @@
|
|
| 1 |
-
# ==============================================================================
|
| 2 |
-
# COPYRIGHT (C) 2026 KONSTANTIN VLADIMIROVICH GRABKO. ALL RIGHTS RESERVED.
|
| 3 |
-
# PATENT PENDING | CMS MANHATTAN JIRACK TECHNOLOGY
|
| 4 |
-
# ==============================================================================
|
| 5 |
-
|
| 6 |
import torch
|
| 7 |
import torch.nn.functional as F
|
| 8 |
from transformers import AutoTokenizer
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
import torch
|
| 2 |
import torch.nn.functional as F
|
| 3 |
from transformers import AutoTokenizer
|
convert_to_safetensors.py
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
import os
|
| 3 |
+
from safetensors.torch import save_file
|
| 4 |
+
|
| 5 |
+
CHECKPOINT_PATH = "JiRackTernary1b.pt"
|
| 6 |
+
OUTPUT_PATH = "model.safetensors"
|
| 7 |
+
|
| 8 |
+
def convert():
|
| 9 |
+
print(f"📦 Loading tensors from: {CHECKPOINT_PATH}")
|
| 10 |
+
checkpoint = torch.load(CHECKPOINT_PATH, map_location="cpu")
|
| 11 |
+
|
| 12 |
+
# 1. Extract only the model weights dictionary
|
| 13 |
+
if "model_state_dict" in checkpoint:
|
| 14 |
+
state_dict = checkpoint["model_state_dict"]
|
| 15 |
+
else:
|
| 16 |
+
state_dict = checkpoint
|
| 17 |
+
|
| 18 |
+
# 2. Filter: keep ONLY tensors (weights) and remove metadata
|
| 19 |
+
clean_sd = {}
|
| 20 |
+
for k, v in state_dict.items():
|
| 21 |
+
if isinstance(v, torch.Tensor):
|
| 22 |
+
# Remove prefixes if they exist
|
| 23 |
+
new_key = k.replace("_orig_mod.", "").replace("module.", "")
|
| 24 |
+
# Force float16 to reduce size
|
| 25 |
+
clean_sd[new_key] = v.contiguous().to(torch.float16)
|
| 26 |
+
|
| 27 |
+
print(" Saving clean weights to Safetensors...")
|
| 28 |
+
save_file(clean_sd, OUTPUT_PATH)
|
| 29 |
+
|
| 30 |
+
new_size = os.path.getsize(OUTPUT_PATH) / (1024**3)
|
| 31 |
+
print(f"✅ Success! New size: {new_size:.2f} GB")
|
| 32 |
+
|
| 33 |
+
if __name__ == "__main__":
|
| 34 |
+
convert()
|
model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 2997242000
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d912f780749e51e1ccaca27c53fa4a53396447f8d478a7f675c0de4492e49c1a
|
| 3 |
size 2997242000
|