| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.9906700593723494, | |
| "eval_steps": 500, | |
| "global_step": 882, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.033927056827820185, | |
| "grad_norm": 0.5479967594146729, | |
| "learning_rate": 0.0001977324263038549, | |
| "loss": 1.8868, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.06785411365564037, | |
| "grad_norm": 0.224782794713974, | |
| "learning_rate": 0.00019546485260770976, | |
| "loss": 1.2591, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.10178117048346055, | |
| "grad_norm": 0.24415747821331024, | |
| "learning_rate": 0.00019319727891156462, | |
| "loss": 1.0807, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.13570822731128074, | |
| "grad_norm": 0.22518040239810944, | |
| "learning_rate": 0.0001909297052154195, | |
| "loss": 1.0525, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.16963528413910092, | |
| "grad_norm": 0.2071247398853302, | |
| "learning_rate": 0.0001886621315192744, | |
| "loss": 1.0399, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.2035623409669211, | |
| "grad_norm": 0.21717332303524017, | |
| "learning_rate": 0.00018639455782312926, | |
| "loss": 0.9676, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.23748939779474132, | |
| "grad_norm": 0.23291213810443878, | |
| "learning_rate": 0.00018412698412698412, | |
| "loss": 0.9593, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.2714164546225615, | |
| "grad_norm": 0.24413667619228363, | |
| "learning_rate": 0.000181859410430839, | |
| "loss": 0.9712, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.3053435114503817, | |
| "grad_norm": 0.24343140423297882, | |
| "learning_rate": 0.0001795918367346939, | |
| "loss": 0.9398, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.33927056827820185, | |
| "grad_norm": 0.2566365897655487, | |
| "learning_rate": 0.00017732426303854876, | |
| "loss": 0.9026, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.37319762510602206, | |
| "grad_norm": 0.2497139573097229, | |
| "learning_rate": 0.00017505668934240365, | |
| "loss": 0.8716, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.4071246819338422, | |
| "grad_norm": 0.2669306993484497, | |
| "learning_rate": 0.0001727891156462585, | |
| "loss": 0.8351, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.4410517387616624, | |
| "grad_norm": 0.27545222640037537, | |
| "learning_rate": 0.0001705215419501134, | |
| "loss": 0.8784, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.47497879558948264, | |
| "grad_norm": 0.2573504149913788, | |
| "learning_rate": 0.00016825396825396826, | |
| "loss": 0.8346, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.5089058524173028, | |
| "grad_norm": 0.2542634904384613, | |
| "learning_rate": 0.00016598639455782315, | |
| "loss": 0.8113, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.542832909245123, | |
| "grad_norm": 0.314864844083786, | |
| "learning_rate": 0.000163718820861678, | |
| "loss": 0.8093, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.5767599660729432, | |
| "grad_norm": 0.2655967175960541, | |
| "learning_rate": 0.00016145124716553287, | |
| "loss": 0.8426, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.6106870229007634, | |
| "grad_norm": 0.27555230259895325, | |
| "learning_rate": 0.00015918367346938776, | |
| "loss": 0.8129, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.6446140797285835, | |
| "grad_norm": 0.28799182176589966, | |
| "learning_rate": 0.00015691609977324265, | |
| "loss": 0.7833, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.6785411365564037, | |
| "grad_norm": 0.27187833189964294, | |
| "learning_rate": 0.00015464852607709753, | |
| "loss": 0.7795, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.712468193384224, | |
| "grad_norm": 0.28134599328041077, | |
| "learning_rate": 0.00015238095238095237, | |
| "loss": 0.8008, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.7463952502120441, | |
| "grad_norm": 0.29679593443870544, | |
| "learning_rate": 0.00015011337868480726, | |
| "loss": 0.7823, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.7803223070398643, | |
| "grad_norm": 0.30081549286842346, | |
| "learning_rate": 0.00014784580498866215, | |
| "loss": 0.7772, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.8142493638676844, | |
| "grad_norm": 0.28066059947013855, | |
| "learning_rate": 0.000145578231292517, | |
| "loss": 0.7776, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.8481764206955047, | |
| "grad_norm": 0.2732291519641876, | |
| "learning_rate": 0.0001433106575963719, | |
| "loss": 0.7719, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.8821034775233249, | |
| "grad_norm": 0.2929159104824066, | |
| "learning_rate": 0.00014104308390022676, | |
| "loss": 0.778, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.916030534351145, | |
| "grad_norm": 0.29763197898864746, | |
| "learning_rate": 0.00013877551020408165, | |
| "loss": 0.8044, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.9499575911789653, | |
| "grad_norm": 0.28522127866744995, | |
| "learning_rate": 0.0001365079365079365, | |
| "loss": 0.7877, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.9838846480067854, | |
| "grad_norm": 0.3052780032157898, | |
| "learning_rate": 0.0001342403628117914, | |
| "loss": 0.7463, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.01696352841391, | |
| "grad_norm": 0.323665976524353, | |
| "learning_rate": 0.00013197278911564626, | |
| "loss": 0.7519, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.0508905852417303, | |
| "grad_norm": 0.3049149513244629, | |
| "learning_rate": 0.00012970521541950114, | |
| "loss": 0.7254, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.0848176420695506, | |
| "grad_norm": 0.36289119720458984, | |
| "learning_rate": 0.000127437641723356, | |
| "loss": 0.7228, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.1187446988973706, | |
| "grad_norm": 0.32274919748306274, | |
| "learning_rate": 0.0001251700680272109, | |
| "loss": 0.7348, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.1526717557251909, | |
| "grad_norm": 0.3286229372024536, | |
| "learning_rate": 0.00012290249433106578, | |
| "loss": 0.736, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.1865988125530111, | |
| "grad_norm": 0.3304899036884308, | |
| "learning_rate": 0.00012063492063492063, | |
| "loss": 0.7248, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.2205258693808312, | |
| "grad_norm": 0.30971524119377136, | |
| "learning_rate": 0.00011836734693877552, | |
| "loss": 0.726, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.2544529262086515, | |
| "grad_norm": 0.3118181526660919, | |
| "learning_rate": 0.0001160997732426304, | |
| "loss": 0.715, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.2883799830364717, | |
| "grad_norm": 0.35270431637763977, | |
| "learning_rate": 0.00011383219954648527, | |
| "loss": 0.7117, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.3223070398642918, | |
| "grad_norm": 0.3350945711135864, | |
| "learning_rate": 0.00011156462585034013, | |
| "loss": 0.7161, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.356234096692112, | |
| "grad_norm": 0.3284754455089569, | |
| "learning_rate": 0.000109297052154195, | |
| "loss": 0.7131, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.390161153519932, | |
| "grad_norm": 0.31873300671577454, | |
| "learning_rate": 0.0001070294784580499, | |
| "loss": 0.6879, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.4240882103477523, | |
| "grad_norm": 0.32634538412094116, | |
| "learning_rate": 0.00010476190476190477, | |
| "loss": 0.7141, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.4580152671755724, | |
| "grad_norm": 0.3236243724822998, | |
| "learning_rate": 0.00010249433106575966, | |
| "loss": 0.7128, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.4919423240033927, | |
| "grad_norm": 0.34859582781791687, | |
| "learning_rate": 0.0001002267573696145, | |
| "loss": 0.7329, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.525869380831213, | |
| "grad_norm": 0.3041052222251892, | |
| "learning_rate": 9.79591836734694e-05, | |
| "loss": 0.7001, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.559796437659033, | |
| "grad_norm": 0.33919453620910645, | |
| "learning_rate": 9.569160997732427e-05, | |
| "loss": 0.6973, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.5937234944868532, | |
| "grad_norm": 0.333812952041626, | |
| "learning_rate": 9.342403628117914e-05, | |
| "loss": 0.7114, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.6276505513146735, | |
| "grad_norm": 0.32383838295936584, | |
| "learning_rate": 9.115646258503402e-05, | |
| "loss": 0.712, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.6615776081424936, | |
| "grad_norm": 0.29588553309440613, | |
| "learning_rate": 8.888888888888889e-05, | |
| "loss": 0.6894, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.6955046649703138, | |
| "grad_norm": 0.3401544392108917, | |
| "learning_rate": 8.662131519274377e-05, | |
| "loss": 0.7074, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.729431721798134, | |
| "grad_norm": 0.3124183714389801, | |
| "learning_rate": 8.435374149659864e-05, | |
| "loss": 0.7054, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.7633587786259541, | |
| "grad_norm": 0.31472259759902954, | |
| "learning_rate": 8.208616780045352e-05, | |
| "loss": 0.6761, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.7972858354537744, | |
| "grad_norm": 0.31297537684440613, | |
| "learning_rate": 7.981859410430839e-05, | |
| "loss": 0.6715, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.8312128922815947, | |
| "grad_norm": 0.33949580788612366, | |
| "learning_rate": 7.755102040816327e-05, | |
| "loss": 0.6883, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.8651399491094147, | |
| "grad_norm": 0.34998491406440735, | |
| "learning_rate": 7.528344671201814e-05, | |
| "loss": 0.6878, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.899067005937235, | |
| "grad_norm": 0.3444700241088867, | |
| "learning_rate": 7.301587301587302e-05, | |
| "loss": 0.7044, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.9329940627650553, | |
| "grad_norm": 0.34009718894958496, | |
| "learning_rate": 7.074829931972789e-05, | |
| "loss": 0.6753, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.9669211195928753, | |
| "grad_norm": 0.2923238277435303, | |
| "learning_rate": 6.848072562358277e-05, | |
| "loss": 0.6843, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.43473541736602783, | |
| "learning_rate": 6.621315192743764e-05, | |
| "loss": 0.6944, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 2.03392705682782, | |
| "grad_norm": 0.34993427991867065, | |
| "learning_rate": 6.394557823129253e-05, | |
| "loss": 0.6565, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 2.0678541136556405, | |
| "grad_norm": 0.3352773189544678, | |
| "learning_rate": 6.167800453514739e-05, | |
| "loss": 0.6447, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 2.1017811704834606, | |
| "grad_norm": 0.31774693727493286, | |
| "learning_rate": 5.9410430839002274e-05, | |
| "loss": 0.6706, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 2.1357082273112806, | |
| "grad_norm": 0.36518582701683044, | |
| "learning_rate": 5.714285714285714e-05, | |
| "loss": 0.6746, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 2.169635284139101, | |
| "grad_norm": 0.40079668164253235, | |
| "learning_rate": 5.4875283446712023e-05, | |
| "loss": 0.6582, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 2.203562340966921, | |
| "grad_norm": 0.3342309594154358, | |
| "learning_rate": 5.260770975056689e-05, | |
| "loss": 0.6381, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 2.237489397794741, | |
| "grad_norm": 0.32690057158470154, | |
| "learning_rate": 5.034013605442177e-05, | |
| "loss": 0.6649, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 2.2714164546225613, | |
| "grad_norm": 0.33868837356567383, | |
| "learning_rate": 4.807256235827665e-05, | |
| "loss": 0.6649, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 2.3053435114503817, | |
| "grad_norm": 0.3330642282962799, | |
| "learning_rate": 4.580498866213152e-05, | |
| "loss": 0.6581, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 2.339270568278202, | |
| "grad_norm": 0.34615200757980347, | |
| "learning_rate": 4.35374149659864e-05, | |
| "loss": 0.6505, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 2.3731976251060223, | |
| "grad_norm": 0.4173542261123657, | |
| "learning_rate": 4.126984126984127e-05, | |
| "loss": 0.6407, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 2.4071246819338423, | |
| "grad_norm": 0.36637556552886963, | |
| "learning_rate": 3.900226757369615e-05, | |
| "loss": 0.6741, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 2.4410517387616624, | |
| "grad_norm": 0.327965646982193, | |
| "learning_rate": 3.673469387755102e-05, | |
| "loss": 0.6536, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 2.4749787955894824, | |
| "grad_norm": 0.3590456247329712, | |
| "learning_rate": 3.44671201814059e-05, | |
| "loss": 0.666, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 2.508905852417303, | |
| "grad_norm": 0.35121339559555054, | |
| "learning_rate": 3.219954648526077e-05, | |
| "loss": 0.6629, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 2.542832909245123, | |
| "grad_norm": 0.37019094824790955, | |
| "learning_rate": 2.9931972789115647e-05, | |
| "loss": 0.6241, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 2.5767599660729434, | |
| "grad_norm": 0.34458139538764954, | |
| "learning_rate": 2.7664399092970522e-05, | |
| "loss": 0.6324, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 2.6106870229007635, | |
| "grad_norm": 0.3818744421005249, | |
| "learning_rate": 2.5396825396825397e-05, | |
| "loss": 0.6517, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 2.6446140797285835, | |
| "grad_norm": 0.34370139241218567, | |
| "learning_rate": 2.3129251700680275e-05, | |
| "loss": 0.6594, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 2.6785411365564036, | |
| "grad_norm": 0.3298929035663605, | |
| "learning_rate": 2.0861678004535147e-05, | |
| "loss": 0.6506, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 2.712468193384224, | |
| "grad_norm": 0.3151053190231323, | |
| "learning_rate": 1.8594104308390022e-05, | |
| "loss": 0.6406, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 2.746395250212044, | |
| "grad_norm": 0.38624706864356995, | |
| "learning_rate": 1.6326530612244897e-05, | |
| "loss": 0.6761, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 2.780322307039864, | |
| "grad_norm": 0.3403633236885071, | |
| "learning_rate": 1.4058956916099775e-05, | |
| "loss": 0.6628, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 2.8142493638676847, | |
| "grad_norm": 0.3730277419090271, | |
| "learning_rate": 1.179138321995465e-05, | |
| "loss": 0.6537, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 2.8481764206955047, | |
| "grad_norm": 0.35656866431236267, | |
| "learning_rate": 9.523809523809523e-06, | |
| "loss": 0.6456, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 2.8821034775233247, | |
| "grad_norm": 0.32987338304519653, | |
| "learning_rate": 7.2562358276644e-06, | |
| "loss": 0.6542, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 2.916030534351145, | |
| "grad_norm": 0.3333512246608734, | |
| "learning_rate": 4.9886621315192745e-06, | |
| "loss": 0.6498, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 2.9499575911789653, | |
| "grad_norm": 0.35678303241729736, | |
| "learning_rate": 2.72108843537415e-06, | |
| "loss": 0.652, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 2.9838846480067853, | |
| "grad_norm": 0.4047712981700897, | |
| "learning_rate": 4.53514739229025e-07, | |
| "loss": 0.6347, | |
| "step": 880 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 882, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.3050328576425984e+18, | |
| "train_batch_size": 8, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |