| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.16842105263157894, | |
| "eval_steps": 500, | |
| "global_step": 1000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0016842105263157896, | |
| "grad_norm": 0.7416742444038391, | |
| "learning_rate": 2.525252525252526e-07, | |
| "loss": 1.6163, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.003368421052631579, | |
| "grad_norm": 0.8192729353904724, | |
| "learning_rate": 5.331088664421998e-07, | |
| "loss": 1.5854, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.0050526315789473685, | |
| "grad_norm": 0.7132650017738342, | |
| "learning_rate": 8.13692480359147e-07, | |
| "loss": 1.5937, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.006736842105263158, | |
| "grad_norm": 0.6016515493392944, | |
| "learning_rate": 1.0942760942760944e-06, | |
| "loss": 1.5379, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.008421052631578947, | |
| "grad_norm": 0.8635797500610352, | |
| "learning_rate": 1.3748597081930417e-06, | |
| "loss": 1.6066, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.010105263157894737, | |
| "grad_norm": 0.7259553670883179, | |
| "learning_rate": 1.655443322109989e-06, | |
| "loss": 1.5905, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.011789473684210527, | |
| "grad_norm": 0.630763828754425, | |
| "learning_rate": 1.936026936026936e-06, | |
| "loss": 1.4836, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.013473684210526317, | |
| "grad_norm": 0.8008495569229126, | |
| "learning_rate": 2.2166105499438833e-06, | |
| "loss": 1.5698, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.015157894736842105, | |
| "grad_norm": 0.719158947467804, | |
| "learning_rate": 2.497194163860831e-06, | |
| "loss": 1.4721, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.016842105263157894, | |
| "grad_norm": 0.7813356518745422, | |
| "learning_rate": 2.777777777777778e-06, | |
| "loss": 1.5929, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.018526315789473686, | |
| "grad_norm": 0.7189694046974182, | |
| "learning_rate": 3.058361391694725e-06, | |
| "loss": 1.6497, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.020210526315789474, | |
| "grad_norm": 0.673009991645813, | |
| "learning_rate": 3.3389450056116726e-06, | |
| "loss": 1.4904, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.021894736842105262, | |
| "grad_norm": 0.6332181096076965, | |
| "learning_rate": 3.6195286195286197e-06, | |
| "loss": 1.5269, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.023578947368421053, | |
| "grad_norm": 0.6532586812973022, | |
| "learning_rate": 3.900112233445567e-06, | |
| "loss": 1.4615, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.02526315789473684, | |
| "grad_norm": 0.5978661179542542, | |
| "learning_rate": 4.180695847362514e-06, | |
| "loss": 1.5024, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.026947368421052633, | |
| "grad_norm": 0.5529018044471741, | |
| "learning_rate": 4.4612794612794615e-06, | |
| "loss": 1.4819, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.02863157894736842, | |
| "grad_norm": 0.5229254961013794, | |
| "learning_rate": 4.741863075196409e-06, | |
| "loss": 1.5354, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.03031578947368421, | |
| "grad_norm": 0.4654325246810913, | |
| "learning_rate": 5.022446689113356e-06, | |
| "loss": 1.472, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.032, | |
| "grad_norm": 0.44726964831352234, | |
| "learning_rate": 5.303030303030304e-06, | |
| "loss": 1.4595, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.03368421052631579, | |
| "grad_norm": 0.4176335334777832, | |
| "learning_rate": 5.583613916947251e-06, | |
| "loss": 1.5168, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.03536842105263158, | |
| "grad_norm": 0.3661172688007355, | |
| "learning_rate": 5.864197530864198e-06, | |
| "loss": 1.452, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.03705263157894737, | |
| "grad_norm": 0.38835084438323975, | |
| "learning_rate": 6.144781144781145e-06, | |
| "loss": 1.4537, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.03873684210526316, | |
| "grad_norm": 0.3330041468143463, | |
| "learning_rate": 6.425364758698092e-06, | |
| "loss": 1.4693, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.04042105263157895, | |
| "grad_norm": 0.33307236433029175, | |
| "learning_rate": 6.705948372615039e-06, | |
| "loss": 1.4899, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.042105263157894736, | |
| "grad_norm": 0.30282339453697205, | |
| "learning_rate": 6.986531986531987e-06, | |
| "loss": 1.5017, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.043789473684210524, | |
| "grad_norm": 0.302558034658432, | |
| "learning_rate": 7.267115600448934e-06, | |
| "loss": 1.4894, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.04547368421052632, | |
| "grad_norm": 0.32156404852867126, | |
| "learning_rate": 7.5476992143658815e-06, | |
| "loss": 1.5006, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.04715789473684211, | |
| "grad_norm": 0.28996139764785767, | |
| "learning_rate": 7.82828282828283e-06, | |
| "loss": 1.447, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.048842105263157895, | |
| "grad_norm": 0.31116044521331787, | |
| "learning_rate": 8.108866442199776e-06, | |
| "loss": 1.5103, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.05052631578947368, | |
| "grad_norm": 0.3400988280773163, | |
| "learning_rate": 8.389450056116724e-06, | |
| "loss": 1.517, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.05221052631578947, | |
| "grad_norm": 0.3122132420539856, | |
| "learning_rate": 8.67003367003367e-06, | |
| "loss": 1.4381, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.053894736842105266, | |
| "grad_norm": 0.31050199270248413, | |
| "learning_rate": 8.950617283950618e-06, | |
| "loss": 1.4649, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.055578947368421054, | |
| "grad_norm": 0.29493045806884766, | |
| "learning_rate": 9.231200897867564e-06, | |
| "loss": 1.4224, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.05726315789473684, | |
| "grad_norm": 0.3371477425098419, | |
| "learning_rate": 9.511784511784512e-06, | |
| "loss": 1.4904, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.05894736842105263, | |
| "grad_norm": 0.3025756776332855, | |
| "learning_rate": 9.79236812570146e-06, | |
| "loss": 1.4743, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.06063157894736842, | |
| "grad_norm": 0.3008587658405304, | |
| "learning_rate": 1.0072951739618406e-05, | |
| "loss": 1.5047, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.06231578947368421, | |
| "grad_norm": 0.2756430506706238, | |
| "learning_rate": 1.0353535353535354e-05, | |
| "loss": 1.4286, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.064, | |
| "grad_norm": 0.3119344413280487, | |
| "learning_rate": 1.0634118967452302e-05, | |
| "loss": 1.4723, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.06568421052631579, | |
| "grad_norm": 0.3306136131286621, | |
| "learning_rate": 1.0914702581369249e-05, | |
| "loss": 1.4769, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.06736842105263158, | |
| "grad_norm": 0.28518420457839966, | |
| "learning_rate": 1.1195286195286197e-05, | |
| "loss": 1.4551, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.06905263157894737, | |
| "grad_norm": 0.28847867250442505, | |
| "learning_rate": 1.1475869809203143e-05, | |
| "loss": 1.4071, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.07073684210526315, | |
| "grad_norm": 0.2927733361721039, | |
| "learning_rate": 1.175645342312009e-05, | |
| "loss": 1.4057, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.07242105263157894, | |
| "grad_norm": 0.3126303553581238, | |
| "learning_rate": 1.2037037037037037e-05, | |
| "loss": 1.4561, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.07410526315789474, | |
| "grad_norm": 0.3449290990829468, | |
| "learning_rate": 1.2317620650953985e-05, | |
| "loss": 1.4998, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.07578947368421053, | |
| "grad_norm": 0.2801138758659363, | |
| "learning_rate": 1.2598204264870933e-05, | |
| "loss": 1.4468, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.07747368421052632, | |
| "grad_norm": 0.29857516288757324, | |
| "learning_rate": 1.287878787878788e-05, | |
| "loss": 1.5324, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.07915789473684211, | |
| "grad_norm": 0.30076226592063904, | |
| "learning_rate": 1.3159371492704827e-05, | |
| "loss": 1.4199, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.0808421052631579, | |
| "grad_norm": 0.3206464946269989, | |
| "learning_rate": 1.3439955106621773e-05, | |
| "loss": 1.4405, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.08252631578947368, | |
| "grad_norm": 0.3138670027256012, | |
| "learning_rate": 1.3720538720538723e-05, | |
| "loss": 1.4967, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.08421052631578947, | |
| "grad_norm": 0.3104718327522278, | |
| "learning_rate": 1.4001122334455668e-05, | |
| "loss": 1.5703, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.08421052631578947, | |
| "eval_loss": 1.4425467252731323, | |
| "eval_runtime": 627.4744, | |
| "eval_samples_per_second": 7.968, | |
| "eval_steps_per_second": 3.984, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.08589473684210526, | |
| "grad_norm": 0.3332298994064331, | |
| "learning_rate": 1.4281705948372614e-05, | |
| "loss": 1.4636, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.08757894736842105, | |
| "grad_norm": 0.29114049673080444, | |
| "learning_rate": 1.4562289562289564e-05, | |
| "loss": 1.3763, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.08926315789473684, | |
| "grad_norm": 0.2750283479690552, | |
| "learning_rate": 1.484287317620651e-05, | |
| "loss": 1.4726, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.09094736842105264, | |
| "grad_norm": 0.29571524262428284, | |
| "learning_rate": 1.5123456790123458e-05, | |
| "loss": 1.4944, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.09263157894736843, | |
| "grad_norm": 0.30884265899658203, | |
| "learning_rate": 1.5404040404040404e-05, | |
| "loss": 1.4321, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.09431578947368421, | |
| "grad_norm": 0.2976180911064148, | |
| "learning_rate": 1.5684624017957354e-05, | |
| "loss": 1.4563, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.096, | |
| "grad_norm": 0.3064989745616913, | |
| "learning_rate": 1.59652076318743e-05, | |
| "loss": 1.4104, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.09768421052631579, | |
| "grad_norm": 0.3264683187007904, | |
| "learning_rate": 1.6245791245791246e-05, | |
| "loss": 1.4738, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.09936842105263158, | |
| "grad_norm": 0.2948909103870392, | |
| "learning_rate": 1.6526374859708193e-05, | |
| "loss": 1.4363, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.10105263157894737, | |
| "grad_norm": 0.32367390394210815, | |
| "learning_rate": 1.6806958473625142e-05, | |
| "loss": 1.4438, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.10273684210526315, | |
| "grad_norm": 0.29063737392425537, | |
| "learning_rate": 1.708754208754209e-05, | |
| "loss": 1.4437, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.10442105263157894, | |
| "grad_norm": 0.3163065016269684, | |
| "learning_rate": 1.7368125701459035e-05, | |
| "loss": 1.4542, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.10610526315789473, | |
| "grad_norm": 0.29881158471107483, | |
| "learning_rate": 1.7648709315375984e-05, | |
| "loss": 1.4029, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.10778947368421053, | |
| "grad_norm": 0.3249943256378174, | |
| "learning_rate": 1.7929292929292927e-05, | |
| "loss": 1.4454, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.10947368421052632, | |
| "grad_norm": 0.31320974230766296, | |
| "learning_rate": 1.8209876543209877e-05, | |
| "loss": 1.5437, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.11115789473684211, | |
| "grad_norm": 0.30014970898628235, | |
| "learning_rate": 1.8490460157126823e-05, | |
| "loss": 1.3781, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.1128421052631579, | |
| "grad_norm": 0.3208349943161011, | |
| "learning_rate": 1.8771043771043773e-05, | |
| "loss": 1.4615, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.11452631578947368, | |
| "grad_norm": 0.33636245131492615, | |
| "learning_rate": 1.905162738496072e-05, | |
| "loss": 1.3844, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.11621052631578947, | |
| "grad_norm": 0.2895752489566803, | |
| "learning_rate": 1.933221099887767e-05, | |
| "loss": 1.5021, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.11789473684210526, | |
| "grad_norm": 0.33455783128738403, | |
| "learning_rate": 1.9612794612794612e-05, | |
| "loss": 1.3939, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.11957894736842105, | |
| "grad_norm": 0.36592987179756165, | |
| "learning_rate": 1.989337822671156e-05, | |
| "loss": 1.4311, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.12126315789473684, | |
| "grad_norm": 0.317028671503067, | |
| "learning_rate": 2.0173961840628508e-05, | |
| "loss": 1.4778, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.12294736842105262, | |
| "grad_norm": 0.3277951180934906, | |
| "learning_rate": 2.0454545454545457e-05, | |
| "loss": 1.3975, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.12463157894736843, | |
| "grad_norm": 0.33263206481933594, | |
| "learning_rate": 2.0735129068462404e-05, | |
| "loss": 1.4842, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.12631578947368421, | |
| "grad_norm": 0.34084832668304443, | |
| "learning_rate": 2.101571268237935e-05, | |
| "loss": 1.4702, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.128, | |
| "grad_norm": 0.3358267843723297, | |
| "learning_rate": 2.1296296296296296e-05, | |
| "loss": 1.4088, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.1296842105263158, | |
| "grad_norm": 0.3366195559501648, | |
| "learning_rate": 2.1576879910213242e-05, | |
| "loss": 1.4043, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.13136842105263158, | |
| "grad_norm": 0.3151577115058899, | |
| "learning_rate": 2.1857463524130192e-05, | |
| "loss": 1.4305, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.13305263157894737, | |
| "grad_norm": 0.3637940287590027, | |
| "learning_rate": 2.213804713804714e-05, | |
| "loss": 1.4381, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.13473684210526315, | |
| "grad_norm": 0.30998823046684265, | |
| "learning_rate": 2.2418630751964088e-05, | |
| "loss": 1.4181, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.13642105263157894, | |
| "grad_norm": 0.29245883226394653, | |
| "learning_rate": 2.2699214365881034e-05, | |
| "loss": 1.4515, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.13810526315789473, | |
| "grad_norm": 0.32378411293029785, | |
| "learning_rate": 2.297979797979798e-05, | |
| "loss": 1.4873, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.13978947368421052, | |
| "grad_norm": 0.32840538024902344, | |
| "learning_rate": 2.3260381593714927e-05, | |
| "loss": 1.4798, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.1414736842105263, | |
| "grad_norm": 0.32433003187179565, | |
| "learning_rate": 2.3540965207631877e-05, | |
| "loss": 1.4148, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.1431578947368421, | |
| "grad_norm": 0.3382358253002167, | |
| "learning_rate": 2.3821548821548823e-05, | |
| "loss": 1.4499, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.14484210526315788, | |
| "grad_norm": 0.30143994092941284, | |
| "learning_rate": 2.4102132435465772e-05, | |
| "loss": 1.3917, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.14652631578947367, | |
| "grad_norm": 0.3304138481616974, | |
| "learning_rate": 2.438271604938272e-05, | |
| "loss": 1.3791, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.1482105263157895, | |
| "grad_norm": 0.353997141122818, | |
| "learning_rate": 2.466329966329966e-05, | |
| "loss": 1.4707, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.14989473684210528, | |
| "grad_norm": 0.3323941230773926, | |
| "learning_rate": 2.494388327721661e-05, | |
| "loss": 1.4232, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.15157894736842106, | |
| "grad_norm": 0.3112866282463074, | |
| "learning_rate": 2.5224466891133558e-05, | |
| "loss": 1.4291, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.15326315789473685, | |
| "grad_norm": 0.3290766477584839, | |
| "learning_rate": 2.5505050505050504e-05, | |
| "loss": 1.4411, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.15494736842105264, | |
| "grad_norm": 0.3461703360080719, | |
| "learning_rate": 2.5785634118967457e-05, | |
| "loss": 1.4012, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.15663157894736843, | |
| "grad_norm": 0.359667032957077, | |
| "learning_rate": 2.6066217732884403e-05, | |
| "loss": 1.4327, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.15831578947368422, | |
| "grad_norm": 0.3078553378582001, | |
| "learning_rate": 2.6346801346801346e-05, | |
| "loss": 1.445, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 0.31644898653030396, | |
| "learning_rate": 2.6627384960718292e-05, | |
| "loss": 1.4784, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.1616842105263158, | |
| "grad_norm": 0.3724164366722107, | |
| "learning_rate": 2.6907968574635245e-05, | |
| "loss": 1.4873, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.16336842105263158, | |
| "grad_norm": 0.32616913318634033, | |
| "learning_rate": 2.718855218855219e-05, | |
| "loss": 1.4213, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.16505263157894737, | |
| "grad_norm": 0.3016490936279297, | |
| "learning_rate": 2.7469135802469138e-05, | |
| "loss": 1.4085, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.16673684210526316, | |
| "grad_norm": 0.33823099732398987, | |
| "learning_rate": 2.7749719416386084e-05, | |
| "loss": 1.4121, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.16842105263157894, | |
| "grad_norm": 0.40880540013313293, | |
| "learning_rate": 2.803030303030303e-05, | |
| "loss": 1.4945, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.16842105263157894, | |
| "eval_loss": 1.4282265901565552, | |
| "eval_runtime": 627.3446, | |
| "eval_samples_per_second": 7.97, | |
| "eval_steps_per_second": 3.985, | |
| "step": 1000 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 17814, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 6.625805619481805e+16, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |