| { | |
| "best_metric": 0.26930707693099976, | |
| "best_model_checkpoint": "./convnext-tiny-1e-4/checkpoint-5500", | |
| "epoch": 10.0, | |
| "eval_steps": 500, | |
| "global_step": 5500, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 17.60952377319336, | |
| "learning_rate": 9.991845519630678e-05, | |
| "loss": 1.9015, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 19.555858612060547, | |
| "learning_rate": 9.967408676742751e-05, | |
| "loss": 0.9352, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 14.498743057250977, | |
| "learning_rate": 9.926769179238466e-05, | |
| "loss": 0.7326, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 14.032439231872559, | |
| "learning_rate": 9.870059584711668e-05, | |
| "loss": 0.6518, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 13.6815767288208, | |
| "learning_rate": 9.797464868072488e-05, | |
| "loss": 0.5828, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_accuracy": 0.8874751491053678, | |
| "eval_loss": 0.4074288010597229, | |
| "eval_runtime": 70.8056, | |
| "eval_samples_per_second": 35.52, | |
| "eval_steps_per_second": 1.116, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "grad_norm": 23.21503448486328, | |
| "learning_rate": 9.709221818197624e-05, | |
| "loss": 0.5615, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "grad_norm": 9.002695083618164, | |
| "learning_rate": 9.60561826557425e-05, | |
| "loss": 0.4534, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "grad_norm": 20.327482223510742, | |
| "learning_rate": 9.486992143456792e-05, | |
| "loss": 0.4444, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "grad_norm": 9.196972846984863, | |
| "learning_rate": 9.353730385598887e-05, | |
| "loss": 0.4863, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "grad_norm": 7.924983501434326, | |
| "learning_rate": 9.206267664155907e-05, | |
| "loss": 0.4324, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 12.762895584106445, | |
| "learning_rate": 9.045084971874738e-05, | |
| "loss": 0.4404, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_accuracy": 0.8811133200795228, | |
| "eval_loss": 0.40930676460266113, | |
| "eval_runtime": 68.8533, | |
| "eval_samples_per_second": 36.527, | |
| "eval_steps_per_second": 1.147, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "grad_norm": 14.092179298400879, | |
| "learning_rate": 8.870708053195413e-05, | |
| "loss": 0.3572, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "grad_norm": 9.523673057556152, | |
| "learning_rate": 8.683705689382024e-05, | |
| "loss": 0.3574, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "grad_norm": 5.849709987640381, | |
| "learning_rate": 8.484687843276469e-05, | |
| "loss": 0.3547, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "grad_norm": 8.085291862487793, | |
| "learning_rate": 8.274303669726426e-05, | |
| "loss": 0.3713, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "grad_norm": 7.404701232910156, | |
| "learning_rate": 8.053239398177191e-05, | |
| "loss": 0.3503, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_accuracy": 0.9017892644135189, | |
| "eval_loss": 0.33908799290657043, | |
| "eval_runtime": 67.9125, | |
| "eval_samples_per_second": 37.033, | |
| "eval_steps_per_second": 1.163, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "grad_norm": 13.007326126098633, | |
| "learning_rate": 7.822216094333847e-05, | |
| "loss": 0.3168, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "grad_norm": 10.012882232666016, | |
| "learning_rate": 7.58198730819481e-05, | |
| "loss": 0.2862, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 3.45, | |
| "grad_norm": 10.694050788879395, | |
| "learning_rate": 7.333336616128369e-05, | |
| "loss": 0.2627, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "grad_norm": 9.87967586517334, | |
| "learning_rate": 7.077075065009433e-05, | |
| "loss": 0.3063, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 3.82, | |
| "grad_norm": 10.754569053649902, | |
| "learning_rate": 6.814038526753205e-05, | |
| "loss": 0.2825, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "grad_norm": 9.205183029174805, | |
| "learning_rate": 6.545084971874738e-05, | |
| "loss": 0.2636, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "eval_accuracy": 0.91610337972167, | |
| "eval_loss": 0.307881236076355, | |
| "eval_runtime": 67.843, | |
| "eval_samples_per_second": 37.071, | |
| "eval_steps_per_second": 1.164, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 4.18, | |
| "grad_norm": 13.164912223815918, | |
| "learning_rate": 6.271091670967436e-05, | |
| "loss": 0.2436, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 4.36, | |
| "grad_norm": 6.218183994293213, | |
| "learning_rate": 5.992952333228728e-05, | |
| "loss": 0.2253, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 4.55, | |
| "grad_norm": 13.5186128616333, | |
| "learning_rate": 5.7115741913664264e-05, | |
| "loss": 0.2223, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 4.73, | |
| "grad_norm": 15.335129737854004, | |
| "learning_rate": 5.427875042394199e-05, | |
| "loss": 0.2409, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 4.91, | |
| "grad_norm": 4.0428643226623535, | |
| "learning_rate": 5.142780253968481e-05, | |
| "loss": 0.2217, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "eval_accuracy": 0.9168986083499006, | |
| "eval_loss": 0.30684027075767517, | |
| "eval_runtime": 71.9472, | |
| "eval_samples_per_second": 34.956, | |
| "eval_steps_per_second": 1.098, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 5.09, | |
| "grad_norm": 8.145292282104492, | |
| "learning_rate": 4.85721974603152e-05, | |
| "loss": 0.2155, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 5.27, | |
| "grad_norm": 17.054922103881836, | |
| "learning_rate": 4.5721249576058027e-05, | |
| "loss": 0.193, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 5.45, | |
| "grad_norm": 5.592764377593994, | |
| "learning_rate": 4.288425808633575e-05, | |
| "loss": 0.1827, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 5.64, | |
| "grad_norm": 3.9595816135406494, | |
| "learning_rate": 4.007047666771274e-05, | |
| "loss": 0.1662, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 5.82, | |
| "grad_norm": 7.078103542327881, | |
| "learning_rate": 3.728908329032567e-05, | |
| "loss": 0.1985, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "grad_norm": 20.20966339111328, | |
| "learning_rate": 3.4549150281252636e-05, | |
| "loss": 0.2024, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "eval_accuracy": 0.9284294234592445, | |
| "eval_loss": 0.283940851688385, | |
| "eval_runtime": 69.4245, | |
| "eval_samples_per_second": 36.226, | |
| "eval_steps_per_second": 1.138, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 6.18, | |
| "grad_norm": 6.057071208953857, | |
| "learning_rate": 3.1859614732467954e-05, | |
| "loss": 0.1643, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 6.36, | |
| "grad_norm": 10.104562759399414, | |
| "learning_rate": 2.9229249349905684e-05, | |
| "loss": 0.1492, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 6.55, | |
| "grad_norm": 4.825140476226807, | |
| "learning_rate": 2.6666633838716314e-05, | |
| "loss": 0.1509, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 6.73, | |
| "grad_norm": 5.883378505706787, | |
| "learning_rate": 2.418012691805191e-05, | |
| "loss": 0.1371, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 6.91, | |
| "grad_norm": 11.501364707946777, | |
| "learning_rate": 2.1777839056661554e-05, | |
| "loss": 0.1565, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "eval_accuracy": 0.9324055666003976, | |
| "eval_loss": 0.2781141400337219, | |
| "eval_runtime": 70.6162, | |
| "eval_samples_per_second": 35.615, | |
| "eval_steps_per_second": 1.119, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 7.09, | |
| "grad_norm": 7.272291660308838, | |
| "learning_rate": 1.946760601822809e-05, | |
| "loss": 0.1158, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 7.27, | |
| "grad_norm": 10.176920890808105, | |
| "learning_rate": 1.725696330273575e-05, | |
| "loss": 0.1342, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 7.45, | |
| "grad_norm": 10.327248573303223, | |
| "learning_rate": 1.5153121567235335e-05, | |
| "loss": 0.1261, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 7.64, | |
| "grad_norm": 10.916594505310059, | |
| "learning_rate": 1.3162943106179749e-05, | |
| "loss": 0.1336, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 7.82, | |
| "grad_norm": 15.206429481506348, | |
| "learning_rate": 1.1292919468045877e-05, | |
| "loss": 0.1363, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "grad_norm": 0.1916830986738205, | |
| "learning_rate": 9.549150281252633e-06, | |
| "loss": 0.1203, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "eval_accuracy": 0.9387673956262426, | |
| "eval_loss": 0.27075594663619995, | |
| "eval_runtime": 70.5293, | |
| "eval_samples_per_second": 35.659, | |
| "eval_steps_per_second": 1.12, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 8.18, | |
| "grad_norm": 10.416373252868652, | |
| "learning_rate": 7.937323358440935e-06, | |
| "loss": 0.1245, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 8.36, | |
| "grad_norm": 9.158769607543945, | |
| "learning_rate": 6.462696144011149e-06, | |
| "loss": 0.1151, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 8.55, | |
| "grad_norm": 9.09100341796875, | |
| "learning_rate": 5.13007856543209e-06, | |
| "loss": 0.1037, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 8.73, | |
| "grad_norm": 2.6982064247131348, | |
| "learning_rate": 3.9438173442575e-06, | |
| "loss": 0.1193, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 8.91, | |
| "grad_norm": 7.2986931800842285, | |
| "learning_rate": 2.9077818180237693e-06, | |
| "loss": 0.1281, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 9.0, | |
| "eval_accuracy": 0.9363817097415507, | |
| "eval_loss": 0.2707272469997406, | |
| "eval_runtime": 69.913, | |
| "eval_samples_per_second": 35.973, | |
| "eval_steps_per_second": 1.13, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 9.09, | |
| "grad_norm": 4.659209251403809, | |
| "learning_rate": 2.0253513192751373e-06, | |
| "loss": 0.1022, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 9.27, | |
| "grad_norm": 9.605278015136719, | |
| "learning_rate": 1.2994041528833266e-06, | |
| "loss": 0.1073, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 9.45, | |
| "grad_norm": 9.004630088806152, | |
| "learning_rate": 7.323082076153509e-07, | |
| "loss": 0.0928, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 9.64, | |
| "grad_norm": 9.619388580322266, | |
| "learning_rate": 3.2591323257248893e-07, | |
| "loss": 0.1171, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 9.82, | |
| "grad_norm": 11.248806953430176, | |
| "learning_rate": 8.15448036932176e-08, | |
| "loss": 0.1334, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "grad_norm": 18.464763641357422, | |
| "learning_rate": 0.0, | |
| "loss": 0.1014, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "eval_accuracy": 0.9379721669980119, | |
| "eval_loss": 0.26930707693099976, | |
| "eval_runtime": 69.9231, | |
| "eval_samples_per_second": 35.968, | |
| "eval_steps_per_second": 1.13, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "step": 5500, | |
| "total_flos": 1.301428412334932e+19, | |
| "train_loss": 0.2947656165036288, | |
| "train_runtime": 8332.2574, | |
| "train_samples_per_second": 21.1, | |
| "train_steps_per_second": 0.66 | |
| } | |
| ], | |
| "logging_steps": 100, | |
| "max_steps": 5500, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 10, | |
| "save_steps": 500, | |
| "total_flos": 1.301428412334932e+19, | |
| "train_batch_size": 32, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |