File size: 449 Bytes
d2c6ef9 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 | {
"base_model_name": "LogicLMv3",
"base_model_class": "LlamaForCausalLM",
"base_loaded_in_4bit": true,
"base_loaded_in_8bit": false,
"projections": "q, v",
"loss": 1.1674,
"learning_rate": 2.359882005899705e-07,
"epoch": 3.99,
"current_steps": 21979,
"train_runtime": 258780.2694,
"train_samples_per_second": 1.023,
"train_steps_per_second": 0.001,
"total_flos": 2.6619410217015706e+18,
"train_loss": 1.2536086592563362
} |