ASzecsenyi commited on
Commit
b5f2597
·
verified ·
1 Parent(s): da6de92

Upload gpt-ts-d1/meta_002500.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. gpt-ts-d1/meta_002500.json +77 -0
gpt-ts-d1/meta_002500.json ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "step": 2500,
3
+ "val_bpb": 1.3736497678831758,
4
+ "model_config": {
5
+ "sequence_len": 2048,
6
+ "vocab_size": 265,
7
+ "n_layer": 1,
8
+ "n_head": 16,
9
+ "n_kv_head": 16,
10
+ "n_embd": 1024,
11
+ "adaptive_decay": false,
12
+ "rope": true,
13
+ "repetition_count": 1,
14
+ "mamba_layers": [],
15
+ "recurrent_vocab_layers": [],
16
+ "gauss_layers": [],
17
+ "attention_time_decay": false,
18
+ "rva_viz": false
19
+ },
20
+ "user_config": {
21
+ "run": "dummy",
22
+ "device_type": "",
23
+ "depth": 1,
24
+ "repetition_count": 1,
25
+ "recurrent_vocab_layers": [],
26
+ "mamba_layers": [],
27
+ "gauss_layers": [],
28
+ "attention_time_decay": false,
29
+ "adaptive_decay": false,
30
+ "rope": true,
31
+ "init_from_tag": "",
32
+ "rva_viz": false,
33
+ "resume_from_tag": "",
34
+ "resume_from_step": -1,
35
+ "resume_from_hub": true,
36
+ "resume_hub_repo_id": "ASzecsenyi/VQLM",
37
+ "resume_hub_subdir": "",
38
+ "resume_hub_repo_type": "model",
39
+ "num_iterations": -1,
40
+ "target_flops": -1.0,
41
+ "target_param_data_ratio": 20,
42
+ "num_epochs": -1,
43
+ "data": "tinystories_data",
44
+ "vocab_size": 265,
45
+ "max_seq_len": 2048,
46
+ "device_batch_size": 4,
47
+ "total_batch_size": -1,
48
+ "embedding_lr": 0.2,
49
+ "unembedding_lr": 0.004,
50
+ "weight_decay": 0.0,
51
+ "matrix_lr": 0.02,
52
+ "base_lr": 1.0,
53
+ "grad_clip": 1.0,
54
+ "warmup_ratio": 0.05,
55
+ "warmdown_ratio": 0.2,
56
+ "final_lr_frac": 0.0,
57
+ "eval_every": 2500,
58
+ "eval_tokens": 10485760,
59
+ "core_metric_every": 2000,
60
+ "core_metric_max_per_task": 500,
61
+ "sample_every": 2500,
62
+ "checkpoint_every": -1,
63
+ "max_checkpoints": 3,
64
+ "push_checkpoints_to_hub": true,
65
+ "hf_repo_id": "ASzecsenyi/VQLM",
66
+ "hf_repo_type": "model",
67
+ "model_tag": "gpt-ts-d1"
68
+ },
69
+ "device_batch_size": 4,
70
+ "max_seq_len": 2048,
71
+ "num_iterations": 32045,
72
+ "warmdown_ratio": 0.2,
73
+ "max_checkpoints": 3,
74
+ "push_checkpoints_to_hub": true,
75
+ "hf_repo_id": "ASzecsenyi/VQLM",
76
+ "hf_repo_type": "model"
77
+ }