ASzecsenyi commited on
Commit
037c1f9
·
verified ·
1 Parent(s): 6ca2698

Upload mamba_12x928_vckm3utj/meta_007000.json with huggingface_hub

Browse files
mamba_12x928_vckm3utj/meta_007000.json ADDED
@@ -0,0 +1,126 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "step": 7000,
3
+ "val_bpb": 0.9417039394973545,
4
+ "model_config": {
5
+ "run": "mamba_12x928",
6
+ "device_type": "cuda",
7
+ "config": "mamba85m",
8
+ "depth": 12,
9
+ "aspect_ratio": 64,
10
+ "model_dim": 928,
11
+ "head_dim": 128,
12
+ "sequence_len": 1024,
13
+ "vocab_size": 65535,
14
+ "rva_blocks": [],
15
+ "gauss_blocks": [],
16
+ "mamba_blocks": [
17
+ -1
18
+ ],
19
+ "gdn_blocks": [],
20
+ "d_state": 32,
21
+ "mamba_params": true,
22
+ "kla_kernel": true,
23
+ "mimo_rank": 1,
24
+ "skip_around_kla": true,
25
+ "decoder_mlp": false,
26
+ "num_iterations": -1,
27
+ "target_flops": -1.0,
28
+ "target_param_data_ratio": 20,
29
+ "data_dir": "base_data",
30
+ "device_batch_size": 32,
31
+ "total_batch_size": 524288,
32
+ "use_muon": true,
33
+ "embedding_lr": 0.3,
34
+ "unembedding_lr": 0.004,
35
+ "grad_clip": 1.0,
36
+ "weight_decay": 0.0,
37
+ "matrix_lr": 0.02,
38
+ "adam_beta1": 0.8,
39
+ "adam_beta2": 0.95,
40
+ "warmup_ratio": 0.0,
41
+ "warmdown_ratio": 0.4,
42
+ "final_lr_frac": 0.0,
43
+ "resume_from_step": -1,
44
+ "eval_every": 250,
45
+ "eval_tokens": 10485760,
46
+ "core_metric_every": -1,
47
+ "core_metric_max_per_task": 500,
48
+ "sample_every": 250,
49
+ "save_every": 1000,
50
+ "push_checkpoints_to_hub": true,
51
+ "use_profiler": false,
52
+ "profile_step": 2,
53
+ "profile_micro_step": 0,
54
+ "memory_history_max_entries": 10000,
55
+ "model_tag": "mamba_12x928",
56
+ "n_layer": 12,
57
+ "n_head": 8,
58
+ "n_kv_head": 8,
59
+ "n_embd": 928
60
+ },
61
+ "user_config": {
62
+ "run": "mamba_12x928",
63
+ "device_type": "",
64
+ "config": "mamba85m",
65
+ "depth": 12,
66
+ "aspect_ratio": 64,
67
+ "model_dim": 928,
68
+ "head_dim": 128,
69
+ "sequence_len": 1024,
70
+ "vocab_size": 65535,
71
+ "rva_blocks": [],
72
+ "gauss_blocks": [],
73
+ "mamba_blocks": [
74
+ -1
75
+ ],
76
+ "gdn_blocks": [],
77
+ "d_state": 32,
78
+ "mamba_params": true,
79
+ "kla_kernel": true,
80
+ "mimo_rank": 1,
81
+ "skip_around_kla": true,
82
+ "decoder_mlp": false,
83
+ "num_iterations": -1,
84
+ "target_flops": -1.0,
85
+ "target_param_data_ratio": 20,
86
+ "data_dir": "base_data",
87
+ "device_batch_size": 32,
88
+ "total_batch_size": 524288,
89
+ "use_muon": true,
90
+ "embedding_lr": 0.3,
91
+ "unembedding_lr": 0.004,
92
+ "grad_clip": 1.0,
93
+ "weight_decay": 0.0,
94
+ "matrix_lr": 0.02,
95
+ "adam_beta1": 0.8,
96
+ "adam_beta2": 0.95,
97
+ "warmup_ratio": 0.0,
98
+ "warmdown_ratio": 0.4,
99
+ "final_lr_frac": 0.0,
100
+ "resume_from_step": -1,
101
+ "eval_every": 250,
102
+ "eval_tokens": 10485760,
103
+ "core_metric_every": -1,
104
+ "core_metric_max_per_task": 500,
105
+ "sample_every": 250,
106
+ "save_every": 1000,
107
+ "push_checkpoints_to_hub": true,
108
+ "use_profiler": false,
109
+ "profile_step": 2,
110
+ "profile_micro_step": 0,
111
+ "memory_history_max_entries": 10000,
112
+ "model_tag": "mamba_12x928",
113
+ "pod_name": "s2027538-infk8s-job-k9dsn-ffk57"
114
+ },
115
+ "device_batch_size": 32,
116
+ "sequence_len": 1024,
117
+ "dataloader_state_dict": {
118
+ "pq_idx": 69,
119
+ "rg_idx": 38
120
+ },
121
+ "loop_state": {
122
+ "min_val_bpb": 0.9417039394973545,
123
+ "smooth_train_loss": 3.158316580366325,
124
+ "total_training_time": 27945.642812728882
125
+ }
126
+ }