File size: 398 Bytes
81da06c | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 | {
"model_type": "casm_skip_policy",
"hidden_dim": 4096,
"n_layers": 32,
"n_skip": 8,
"policy_dim": 128,
"n_heads": 4,
"n_encoder_layers": 2,
"keep_prefix": 0,
"keep_suffix": 0,
"context_tokens": 1,
"draft_len_choices": [4, 8, 12, 16, 24, 32, 48, 64],
"base_model": "meta-llama/Meta-Llama-3-8B",
"training_steps": 10000,
"eval_reward": 99.773,
"test_reward": 100.429
}
|