ASzecsenyi commited on
Commit
75a7148
·
verified ·
1 Parent(s): f702557

Upload gpt-ts-d2-uk0gnfxw-stage0/meta_005000.json with huggingface_hub

Browse files
gpt-ts-d2-uk0gnfxw-stage0/meta_005000.json ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "stage": 0,
3
+ "step": 5000,
4
+ "val_bpb": 0.958053951979268,
5
+ "model_config": {
6
+ "sequence_len": 2048,
7
+ "vocab_size": 265,
8
+ "n_layer": 2,
9
+ "n_head": 16,
10
+ "n_kv_head": 16,
11
+ "n_embd": 768,
12
+ "adaptive_decay": false,
13
+ "rope": true,
14
+ "repetition_count": 1,
15
+ "mamba_layers": [],
16
+ "recurrent_vocab_layers": [
17
+ 0
18
+ ],
19
+ "gauss_layers": [],
20
+ "attention_time_decay": false,
21
+ "rva_viz": false
22
+ },
23
+ "user_config": {
24
+ "run": "",
25
+ "device_type": "",
26
+ "depth": 2,
27
+ "repetition_count": 1,
28
+ "attention_time_decay": false,
29
+ "adaptive_decay": false,
30
+ "rope": true,
31
+ "init_from_tag": "",
32
+ "resume_from_tag": "",
33
+ "resume_from_step": -1,
34
+ "resume_from_hub": true,
35
+ "resume_hub_repo_id": "ASzecsenyi/VQLM",
36
+ "resume_hub_subdir": "",
37
+ "resume_hub_repo_type": "model",
38
+ "rva_viz": false,
39
+ "num_iterations": -1,
40
+ "target_flops": -1.0,
41
+ "target_param_data_ratio": 20,
42
+ "num_epochs": -1,
43
+ "data": "tinystories_data",
44
+ "vocab_size": 265,
45
+ "max_seq_len": 2048,
46
+ "device_batch_size": 4,
47
+ "total_batch_size": -1,
48
+ "embedding_lr": 0.2,
49
+ "unembedding_lr": 0.004,
50
+ "weight_decay": 0.0,
51
+ "matrix_lr": 0.02,
52
+ "base_lr": 1.0,
53
+ "grad_clip": 1.0,
54
+ "warmup_ratio": 0.05,
55
+ "warmdown_ratio": 0.2,
56
+ "final_lr_frac": 0.0,
57
+ "eval_every": 2500,
58
+ "eval_tokens": 10485760,
59
+ "core_metric_every": 2000,
60
+ "core_metric_max_per_task": 500,
61
+ "sample_every": 2500,
62
+ "checkpoint_every": -1,
63
+ "max_checkpoints": 3,
64
+ "push_checkpoints_to_hub": true,
65
+ "hf_repo_id": "ASzecsenyi/VQLM",
66
+ "hf_repo_type": "model",
67
+ "model_tag": "",
68
+ "layerwise_num_iterations": -1,
69
+ "layerwise_start_stage": 0
70
+ },
71
+ "device_batch_size": 4,
72
+ "max_seq_len": 2048,
73
+ "num_iterations": 32724,
74
+ "warmdown_ratio": 0.2,
75
+ "max_checkpoints": 3,
76
+ "push_checkpoints_to_hub": true,
77
+ "hf_repo_id": "ASzecsenyi/VQLM",
78
+ "hf_repo_type": "model"
79
+ }