throbbey commited on
Commit
ffc32d3
·
verified ·
1 Parent(s): 3351b92

Upload CRATE checkpoint at step 20000

Browse files
Files changed (6) hide show
  1. README.md +45 -0
  2. config.json +9 -0
  3. meta.json +57 -0
  4. model.safetensors +3 -0
  5. token_bytes.pt +3 -0
  6. tokenizer.pkl +3 -0
README.md ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ tags:
3
+ - nanochat
4
+ - crate
5
+ license: mit
6
+ ---
7
+
8
+ # crate-d12-base
9
+
10
+ A CRATE (Coding-Representation Auto-encoding Transformer Encoder) language model
11
+ trained with [nanochat](https://github.com/karpathy/nanochat).
12
+
13
+ ## Model Details
14
+
15
+ | Parameter | Value |
16
+ |-----------|-------|
17
+ | Architecture | CRATE |
18
+ | Layers | 12 |
19
+ | Hidden dim | 768 |
20
+ | Attention heads | 6 |
21
+ | Vocab size | 50304 |
22
+ | Max sequence length | 1024 |
23
+ | Window pattern | SSSL |
24
+ | Training step | 20,000 |
25
+ | Validation BPB | 1.1131 |
26
+ | Smooth train loss | 3.7495 |
27
+ | Training time | 3.4 hours |
28
+ | Run name | 4090-crate-a |
29
+ | Batch size (tokens) | 65536 |
30
+
31
+ ## Files
32
+
33
+ - `model.safetensors` -- model weights in safetensors format
34
+ - `config.json` -- model architecture config (reconstruct with `CRATEConfig(**config)`)
35
+ - `tokenizer.pkl` -- BPE tokenizer (pickle of tiktoken Encoding)
36
+ - `token_bytes.pt` -- token byte mappings
37
+ - `meta.json` -- full training metadata from the checkpoint
38
+
39
+ ## Usage
40
+
41
+ ```python
42
+ from nanochat.checkpoint_manager import build_model
43
+
44
+ model, tokenizer, meta = build_model("path/to/downloaded/dir", step=20000, device=torch.device("cuda"), phase="eval")
45
+ ```
config.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "sequence_len": 1024,
3
+ "vocab_size": 50304,
4
+ "n_layer": 12,
5
+ "n_head": 6,
6
+ "n_kv_head": 6,
7
+ "n_embd": 768,
8
+ "window_pattern": "SSSL"
9
+ }
meta.json ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "step": 20000,
3
+ "val_bpb": 1.1130690245666328,
4
+ "model_config": {
5
+ "sequence_len": 1024,
6
+ "vocab_size": 50304,
7
+ "n_layer": 12,
8
+ "n_head": 6,
9
+ "n_kv_head": 6,
10
+ "n_embd": 768,
11
+ "window_pattern": "SSSL"
12
+ },
13
+ "user_config": {
14
+ "run": "4090-crate-a",
15
+ "device_type": "",
16
+ "depth": 12,
17
+ "aspect_ratio": 64,
18
+ "head_dim": 128,
19
+ "max_seq_len": 1024,
20
+ "window_pattern": "SSSL",
21
+ "num_iterations": 50000,
22
+ "target_flops": -1.0,
23
+ "target_param_data_ratio": 8,
24
+ "device_batch_size": 16,
25
+ "total_batch_size": 65536,
26
+ "embedding_lr": 0.3,
27
+ "unembedding_lr": 0.004,
28
+ "weight_decay": 0.2,
29
+ "matrix_lr": 0.02,
30
+ "scalar_lr": 0.5,
31
+ "adam_beta1": 0.8,
32
+ "adam_beta2": 0.95,
33
+ "warmup_ratio": 0.0,
34
+ "warmdown_ratio": 0.4,
35
+ "final_lr_frac": 0.0,
36
+ "resume_from_step": -1,
37
+ "eval_every": 20000,
38
+ "eval_tokens": 10485760,
39
+ "core_metric_every": 2000,
40
+ "core_metric_max_per_task": 500,
41
+ "sample_every": 2000,
42
+ "save_every": 5000,
43
+ "model_tag": null
44
+ },
45
+ "device_batch_size": 16,
46
+ "max_seq_len": 1024,
47
+ "dataloader_state_dict": {
48
+ "pq_idx": 8,
49
+ "rg_idx": 18,
50
+ "epoch": 2
51
+ },
52
+ "loop_state": {
53
+ "min_val_bpb": 1.1130690245666328,
54
+ "smooth_train_loss": 3.7495008182618137,
55
+ "total_training_time": 12204.943566560745
56
+ }
57
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:72bf013a21a11c7bdf5eb92b983e75426f4ea31daf13fc11d412e9a1b0ad57aa
3
+ size 515070592
token_bytes.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d03fb2a61108a67da7bfc068acb7df60418a9810d2396cae9ba431edb48ebe2f
3
+ size 202793
tokenizer.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6329ca24d1862360651c17b750fc49b01564c871f9047717dc63c7726891ac22
3
+ size 644366