File size: 456 Bytes
202a49c | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 | {
"base_model_name": "Llama-2-7b-chat-hf",
"base_model_class": "LlamaForCausalLM",
"base_loaded_in_4bit": true,
"base_loaded_in_8bit": false,
"projections": "q, v",
"loss": 0.8314,
"learning_rate": 7.264545643486997e-07,
"epoch": 2.76,
"current_steps": 3387,
"train_runtime": 13542.7743,
"train_samples_per_second": 0.277,
"train_steps_per_second": 0.002,
"total_flos": 1.4593684047239578e+17,
"train_loss": 0.9451417039941858
} |