unary-quantization-research / run_convert.py
OpenTransformer's picture
Add files using upload-large-folder tool
19ed98b verified
import os, json, numpy as np, time, sys
from pathlib import Path
from safetensors import safe_open
import torch
sys.path.insert(0, "/root/ternary_engine")
from convert import quantize_weight_matrix
model_dir = "/root/ternary_engine/deepseek-r1-1.5b-hf"
output_dir = "/root/ternary_engine/deepseek-r1-1.5b-ternary"
alpha = 0.7
os.makedirs(output_dir, exist_ok=True)
tensors = {}
for f in sorted(Path(model_dir).glob("*.safetensors")):
print("Loading " + f.name)
with safe_open(str(f), framework="pt") as st:
for key in st.keys():
tensors[key] = st.get_tensor(key).float().numpy()
print("Loaded " + str(len(tensors)) + " tensors")
config = {
"hidden_size": 1536, "intermediate_size": 8960,
"num_attention_heads": 12, "num_key_value_heads": 2,
"num_hidden_layers": 28, "vocab_size": 151936,
"head_dim": 128, "rope_theta": 1000000.0,
"rms_norm_eps": 1e-6, "alpha": alpha,
}
ternary_manifest = {}
fp16_manifest = {}
linear_suffixes = ['q_proj.weight', 'k_proj.weight', 'v_proj.weight',
'o_proj.weight', 'gate_proj.weight', 'up_proj.weight',
'down_proj.weight']
total_tb = 0
total_ob = 0
for key, w in tensors.items():
prefix = os.path.join(output_dir, key.replace(".", "_"))
is_linear = any(key.endswith(s) for s in linear_suffixes)
if is_linear and len(w.shape) == 2:
out_dim, in_dim = w.shape
total_ob += w.nbytes
t0 = time.time()
pos, neg, scales, sparsity = quantize_weight_matrix(w, alpha)
dt = time.time() - t0
pos.tofile(prefix + ".pos")
neg.tofile(prefix + ".neg")
scales.tofile(prefix + ".scales")
tb = pos.nbytes + neg.nbytes + scales.nbytes
total_tb += tb
ratio = w.nbytes / tb
ternary_manifest[key] = list(w.shape)
print(" T %s: %s -> %dKB (%.1fx, %.0f%% sparse, %.1fs)" % (
key, str(w.shape), tb // 1024, ratio, sparsity * 100, dt))
else:
w16 = w.astype(np.float16)
w16.tofile(prefix + ".fp16")
fp16_manifest[key] = list(w.shape)
print(" F %s: %s -> %dKB" % (key, str(w.shape), w16.nbytes // 1024))
with open(os.path.join(output_dir, "config.json"), "w") as f:
json.dump(config, f, indent=2)
with open(os.path.join(output_dir, "manifest.json"), "w") as f:
json.dump({"ternary": ternary_manifest, "fp16": fp16_manifest}, f, indent=2)
print("")
print("Ternary: %.1fMB (from %.1fMB FP32)" % (total_tb / 1024 / 1024, total_ob / 1024 / 1024))
print("DONE")