| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 145, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.034482758620689655, | |
| "grad_norm": 5.1068618007455715, | |
| "learning_rate": 3.125e-05, | |
| "loss": 1.7913, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.06896551724137931, | |
| "grad_norm": 2.957239486428569, | |
| "learning_rate": 4.997634107543713e-05, | |
| "loss": 1.6657, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.10344827586206896, | |
| "grad_norm": 2.032152712698823, | |
| "learning_rate": 4.971074924974395e-05, | |
| "loss": 1.6124, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.13793103448275862, | |
| "grad_norm": 2.2172317636469665, | |
| "learning_rate": 4.915349129238729e-05, | |
| "loss": 1.5928, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.1724137931034483, | |
| "grad_norm": 2.0749093256584987, | |
| "learning_rate": 4.8311884978689945e-05, | |
| "loss": 1.5655, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.20689655172413793, | |
| "grad_norm": 1.7269474661231707, | |
| "learning_rate": 4.719698207710602e-05, | |
| "loss": 1.5663, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.2413793103448276, | |
| "grad_norm": 1.7864302551019986, | |
| "learning_rate": 4.582342322009812e-05, | |
| "loss": 1.5432, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.27586206896551724, | |
| "grad_norm": 1.6921601464819647, | |
| "learning_rate": 4.4209245646929606e-05, | |
| "loss": 1.5527, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.3103448275862069, | |
| "grad_norm": 1.6339377257117198, | |
| "learning_rate": 4.2375646343046135e-05, | |
| "loss": 1.529, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.3448275862068966, | |
| "grad_norm": 1.6399246016241635, | |
| "learning_rate": 4.034670368644256e-05, | |
| "loss": 1.5468, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.3793103448275862, | |
| "grad_norm": 1.542839474519924, | |
| "learning_rate": 3.8149061256287007e-05, | |
| "loss": 1.5498, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.41379310344827586, | |
| "grad_norm": 1.581675624331528, | |
| "learning_rate": 3.581157795594989e-05, | |
| "loss": 1.4887, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.4482758620689655, | |
| "grad_norm": 1.573021517165374, | |
| "learning_rate": 3.3364949044936924e-05, | |
| "loss": 1.539, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.4827586206896552, | |
| "grad_norm": 1.4415911705288675, | |
| "learning_rate": 3.084130305624209e-05, | |
| "loss": 1.5109, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.5172413793103449, | |
| "grad_norm": 1.6114133285748098, | |
| "learning_rate": 2.8273779892303337e-05, | |
| "loss": 1.4433, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.5517241379310345, | |
| "grad_norm": 1.5263739278184745, | |
| "learning_rate": 2.5696095639901996e-05, | |
| "loss": 1.5055, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.5862068965517241, | |
| "grad_norm": 1.5621008510154932, | |
| "learning_rate": 2.314209981875008e-05, | |
| "loss": 1.5184, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.6206896551724138, | |
| "grad_norm": 1.4227359935094617, | |
| "learning_rate": 2.0645330877869e-05, | |
| "loss": 1.4824, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.6551724137931034, | |
| "grad_norm": 1.3684824038108097, | |
| "learning_rate": 1.8238575776872595e-05, | |
| "loss": 1.4529, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.6896551724137931, | |
| "grad_norm": 1.5013494268652265, | |
| "learning_rate": 1.5953439435625384e-05, | |
| "loss": 1.476, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.7241379310344828, | |
| "grad_norm": 1.3815244281031855, | |
| "learning_rate": 1.3819929706157959e-05, | |
| "loss": 1.4915, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.7586206896551724, | |
| "grad_norm": 1.4148361912594616, | |
| "learning_rate": 1.1866063316886964e-05, | |
| "loss": 1.4761, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.7931034482758621, | |
| "grad_norm": 1.3631432518006836, | |
| "learning_rate": 1.0117497963783762e-05, | |
| "loss": 1.4597, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.8275862068965517, | |
| "grad_norm": 1.297582991316736, | |
| "learning_rate": 8.597195379780726e-06, | |
| "loss": 1.4837, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.8620689655172413, | |
| "grad_norm": 1.2752607882500357, | |
| "learning_rate": 7.3251198069053465e-06, | |
| "loss": 1.4326, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.896551724137931, | |
| "grad_norm": 1.3253453113805727, | |
| "learning_rate": 6.317975830732497e-06, | |
| "loss": 1.4415, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.9310344827586207, | |
| "grad_norm": 1.258090770041199, | |
| "learning_rate": 5.588989019848609e-06, | |
| "loss": 1.4284, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.9655172413793104, | |
| "grad_norm": 1.2835407696331522, | |
| "learning_rate": 5.147732250916841e-06, | |
| "loss": 1.462, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 1.2564755440061064, | |
| "learning_rate": 5e-06, | |
| "loss": 1.4216, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 145, | |
| "total_flos": 4079631728640.0, | |
| "train_loss": 1.518260485550453, | |
| "train_runtime": 123.6348, | |
| "train_samples_per_second": 74.825, | |
| "train_steps_per_second": 1.173 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 145, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 4079631728640.0, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |