VQLM / mamba-d1-4j2vbaoi /meta_002500.json
ASzecsenyi's picture
Upload mamba-d1-4j2vbaoi/meta_002500.json with huggingface_hub
68de1c9 verified
{
"step": 2500,
"val_bpb": 2.3049861890100987,
"model_config": {
"sequence_len": 2048,
"vocab_size": 265,
"n_layer": 1,
"n_head": 16,
"n_kv_head": 16,
"n_embd": 1536,
"adaptive_decay": false,
"rope": true,
"repetition_count": 1,
"mamba_layers": [
0
],
"recurrent_vocab_layers": [],
"attention_time_decay": false
},
"user_config": {
"run": "mamba-d1",
"device_type": "cuda",
"depth": 1,
"repetition_count": 1,
"attention_time_decay": false,
"adaptive_decay": false,
"rope": true,
"init_from_tag": "",
"resume_from_tag": "",
"resume_from_step": -1,
"resume_from_hub": true,
"resume_hub_repo_id": "ASzecsenyi/VQLM",
"resume_hub_subdir": "",
"resume_hub_repo_type": "model",
"num_iterations": -1,
"target_flops": -1.0,
"target_param_data_ratio": 20,
"num_epochs": -1,
"data": "base_data",
"max_seq_len": 2048,
"device_batch_size": 4,
"total_batch_size": -1,
"embedding_lr": 0.2,
"unembedding_lr": 0.004,
"weight_decay": 0.0,
"matrix_lr": 0.02,
"base_lr": 1.0,
"grad_clip": 1.0,
"warmup_ratio": 0.05,
"warmdown_ratio": 0.2,
"final_lr_frac": 0.0,
"eval_every": 2500,
"eval_tokens": 10485760,
"core_metric_every": 2000,
"core_metric_max_per_task": 500,
"sample_every": 2500,
"checkpoint_every": -1,
"max_checkpoints": 3,
"push_checkpoints_to_hub": true,
"hf_repo_id": "ASzecsenyi/VQLM",
"hf_repo_type": "model",
"model_tag": "mamba-d1",
"ddp": true,
"ddp_rank": 0,
"ddp_local_rank": 0,
"ddp_world_size": 1
},
"device_batch_size": 4,
"max_seq_len": 2048,
"num_iterations": 36885,
"warmdown_ratio": 0.2,
"max_checkpoints": 3,
"push_checkpoints_to_hub": true,
"hf_repo_id": "ASzecsenyi/VQLM",
"hf_repo_type": "model"
}