| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 145, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.034482758620689655, | |
| "grad_norm": 5.118941856424631, | |
| "learning_rate": 3.125e-05, | |
| "loss": 1.7914, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.06896551724137931, | |
| "grad_norm": 2.9573423689762866, | |
| "learning_rate": 4.997634107543713e-05, | |
| "loss": 1.666, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.10344827586206896, | |
| "grad_norm": 2.039420630897441, | |
| "learning_rate": 4.971074924974395e-05, | |
| "loss": 1.6124, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.13793103448275862, | |
| "grad_norm": 2.1953763522509946, | |
| "learning_rate": 4.915349129238729e-05, | |
| "loss": 1.5929, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.1724137931034483, | |
| "grad_norm": 2.049453928196352, | |
| "learning_rate": 4.8311884978689945e-05, | |
| "loss": 1.5655, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.20689655172413793, | |
| "grad_norm": 1.7738134431283565, | |
| "learning_rate": 4.719698207710602e-05, | |
| "loss": 1.5665, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.2413793103448276, | |
| "grad_norm": 1.7877365606373776, | |
| "learning_rate": 4.582342322009812e-05, | |
| "loss": 1.5434, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.27586206896551724, | |
| "grad_norm": 1.6827790459654124, | |
| "learning_rate": 4.4209245646929606e-05, | |
| "loss": 1.5528, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.3103448275862069, | |
| "grad_norm": 1.6193600867831015, | |
| "learning_rate": 4.2375646343046135e-05, | |
| "loss": 1.5286, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.3448275862068966, | |
| "grad_norm": 1.6304628801910557, | |
| "learning_rate": 4.034670368644256e-05, | |
| "loss": 1.5466, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.3793103448275862, | |
| "grad_norm": 1.5317716414980957, | |
| "learning_rate": 3.8149061256287007e-05, | |
| "loss": 1.5496, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.41379310344827586, | |
| "grad_norm": 1.5527683273021031, | |
| "learning_rate": 3.581157795594989e-05, | |
| "loss": 1.4883, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.4482758620689655, | |
| "grad_norm": 1.593847047843435, | |
| "learning_rate": 3.3364949044936924e-05, | |
| "loss": 1.5387, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.4827586206896552, | |
| "grad_norm": 1.4396115722574465, | |
| "learning_rate": 3.084130305624209e-05, | |
| "loss": 1.511, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.5172413793103449, | |
| "grad_norm": 1.6057921876869088, | |
| "learning_rate": 2.8273779892303337e-05, | |
| "loss": 1.4434, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.5517241379310345, | |
| "grad_norm": 1.5280825241631029, | |
| "learning_rate": 2.5696095639901996e-05, | |
| "loss": 1.5058, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.5862068965517241, | |
| "grad_norm": 1.5666909004892384, | |
| "learning_rate": 2.314209981875008e-05, | |
| "loss": 1.5186, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.6206896551724138, | |
| "grad_norm": 1.4281308841157434, | |
| "learning_rate": 2.0645330877869e-05, | |
| "loss": 1.4822, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.6551724137931034, | |
| "grad_norm": 1.3745413443273744, | |
| "learning_rate": 1.8238575776872595e-05, | |
| "loss": 1.4529, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.6896551724137931, | |
| "grad_norm": 1.5081595406938955, | |
| "learning_rate": 1.5953439435625384e-05, | |
| "loss": 1.4762, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.7241379310344828, | |
| "grad_norm": 1.3768197030526457, | |
| "learning_rate": 1.3819929706157959e-05, | |
| "loss": 1.4915, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.7586206896551724, | |
| "grad_norm": 1.4228532169763397, | |
| "learning_rate": 1.1866063316886964e-05, | |
| "loss": 1.4761, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.7931034482758621, | |
| "grad_norm": 1.3531199760661987, | |
| "learning_rate": 1.0117497963783762e-05, | |
| "loss": 1.4597, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.8275862068965517, | |
| "grad_norm": 1.297275568713758, | |
| "learning_rate": 8.597195379780726e-06, | |
| "loss": 1.4838, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.8620689655172413, | |
| "grad_norm": 1.2780573701142792, | |
| "learning_rate": 7.3251198069053465e-06, | |
| "loss": 1.4329, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.896551724137931, | |
| "grad_norm": 1.3276253224901293, | |
| "learning_rate": 6.317975830732497e-06, | |
| "loss": 1.4417, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.9310344827586207, | |
| "grad_norm": 1.2548059361443487, | |
| "learning_rate": 5.588989019848609e-06, | |
| "loss": 1.4287, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.9655172413793104, | |
| "grad_norm": 1.2798168590694643, | |
| "learning_rate": 5.147732250916841e-06, | |
| "loss": 1.462, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 1.2571681575699087, | |
| "learning_rate": 5e-06, | |
| "loss": 1.4216, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 145, | |
| "total_flos": 4079631728640.0, | |
| "train_loss": 1.5182941075029044, | |
| "train_runtime": 127.2004, | |
| "train_samples_per_second": 72.728, | |
| "train_steps_per_second": 1.14 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 145, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 4079631728640.0, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |