Training in progress, step 11000, checkpoint
Browse files
last-checkpoint/model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 328277848
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:62f77201047c0ff7c5527ffc5ccf11b4138f77fae747adff964ee88ae1f98afc
|
| 3 |
size 328277848
|
last-checkpoint/optimizer.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 318646859
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:ace327ac217bad5e9c3541a67c8adbffd0c6930f7ad271ab5e15f9a6306ce52e
|
| 3 |
size 318646859
|
last-checkpoint/rng_state.pth
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 14645
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:ca9715fac08ad0b70edb3a378bc21ad649dabc882b316cdb77b215f678babe3b
|
| 3 |
size 14645
|
last-checkpoint/scheduler.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 1465
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d77946d2c30708215d82675369c6b0f4ea0ac50e0bfa8851a58c893e34baac40
|
| 3 |
size 1465
|
last-checkpoint/trainer_state.json
CHANGED
|
@@ -2,9 +2,9 @@
|
|
| 2 |
"best_global_step": null,
|
| 3 |
"best_metric": null,
|
| 4 |
"best_model_checkpoint": null,
|
| 5 |
-
"epoch": 1.
|
| 6 |
"eval_steps": 500,
|
| 7 |
-
"global_step":
|
| 8 |
"is_hyper_param_search": false,
|
| 9 |
"is_local_process_zero": true,
|
| 10 |
"is_world_process_zero": true,
|
|
@@ -7533,6 +7533,364 @@
|
|
| 7533 |
"eval_samples_per_second": 246.863,
|
| 7534 |
"eval_steps_per_second": 5.184,
|
| 7535 |
"step": 10500
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 7536 |
}
|
| 7537 |
],
|
| 7538 |
"logging_steps": 10,
|
|
@@ -7552,7 +7910,7 @@
|
|
| 7552 |
"attributes": {}
|
| 7553 |
}
|
| 7554 |
},
|
| 7555 |
-
"total_flos": 3.
|
| 7556 |
"train_batch_size": 48,
|
| 7557 |
"trial_name": null,
|
| 7558 |
"trial_params": null
|
|
|
|
| 2 |
"best_global_step": null,
|
| 3 |
"best_metric": null,
|
| 4 |
"best_model_checkpoint": null,
|
| 5 |
+
"epoch": 1.8584220307484371,
|
| 6 |
"eval_steps": 500,
|
| 7 |
+
"global_step": 11000,
|
| 8 |
"is_hyper_param_search": false,
|
| 9 |
"is_local_process_zero": true,
|
| 10 |
"is_world_process_zero": true,
|
|
|
|
| 7533 |
"eval_samples_per_second": 246.863,
|
| 7534 |
"eval_steps_per_second": 5.184,
|
| 7535 |
"step": 10500
|
| 7536 |
+
},
|
| 7537 |
+
{
|
| 7538 |
+
"epoch": 1.7756377766514615,
|
| 7539 |
+
"grad_norm": 0.45778968930244446,
|
| 7540 |
+
"learning_rate": 1.330666277084756e-05,
|
| 7541 |
+
"loss": 4.324074172973633,
|
| 7542 |
+
"step": 10510
|
| 7543 |
+
},
|
| 7544 |
+
{
|
| 7545 |
+
"epoch": 1.777327251224869,
|
| 7546 |
+
"grad_norm": 0.44530189037323,
|
| 7547 |
+
"learning_rate": 1.3110123947820345e-05,
|
| 7548 |
+
"loss": 4.296671295166016,
|
| 7549 |
+
"step": 10520
|
| 7550 |
+
},
|
| 7551 |
+
{
|
| 7552 |
+
"epoch": 1.7790167257982767,
|
| 7553 |
+
"grad_norm": 0.4516686797142029,
|
| 7554 |
+
"learning_rate": 1.2914981033673616e-05,
|
| 7555 |
+
"loss": 4.3019359588623045,
|
| 7556 |
+
"step": 10530
|
| 7557 |
+
},
|
| 7558 |
+
{
|
| 7559 |
+
"epoch": 1.7807062003716845,
|
| 7560 |
+
"grad_norm": 0.4497534930706024,
|
| 7561 |
+
"learning_rate": 1.2721236018340675e-05,
|
| 7562 |
+
"loss": 4.252984237670899,
|
| 7563 |
+
"step": 10540
|
| 7564 |
+
},
|
| 7565 |
+
{
|
| 7566 |
+
"epoch": 1.782395674945092,
|
| 7567 |
+
"grad_norm": 0.4479978680610657,
|
| 7568 |
+
"learning_rate": 1.2528890877500025e-05,
|
| 7569 |
+
"loss": 4.305055618286133,
|
| 7570 |
+
"step": 10550
|
| 7571 |
+
},
|
| 7572 |
+
{
|
| 7573 |
+
"epoch": 1.7840851495184997,
|
| 7574 |
+
"grad_norm": 0.462827205657959,
|
| 7575 |
+
"learning_rate": 1.2337947572555257e-05,
|
| 7576 |
+
"loss": 4.314754486083984,
|
| 7577 |
+
"step": 10560
|
| 7578 |
+
},
|
| 7579 |
+
{
|
| 7580 |
+
"epoch": 1.7857746240919075,
|
| 7581 |
+
"grad_norm": 0.4561219811439514,
|
| 7582 |
+
"learning_rate": 1.2148408050614961e-05,
|
| 7583 |
+
"loss": 4.2755790710449215,
|
| 7584 |
+
"step": 10570
|
| 7585 |
+
},
|
| 7586 |
+
{
|
| 7587 |
+
"epoch": 1.787464098665315,
|
| 7588 |
+
"grad_norm": 0.4636087119579315,
|
| 7589 |
+
"learning_rate": 1.1960274244472928e-05,
|
| 7590 |
+
"loss": 4.280724716186524,
|
| 7591 |
+
"step": 10580
|
| 7592 |
+
},
|
| 7593 |
+
{
|
| 7594 |
+
"epoch": 1.7891535732387227,
|
| 7595 |
+
"grad_norm": 0.4560607373714447,
|
| 7596 |
+
"learning_rate": 1.1773548072588352e-05,
|
| 7597 |
+
"loss": 4.296182632446289,
|
| 7598 |
+
"step": 10590
|
| 7599 |
+
},
|
| 7600 |
+
{
|
| 7601 |
+
"epoch": 1.7908430478121304,
|
| 7602 |
+
"grad_norm": 0.46516045928001404,
|
| 7603 |
+
"learning_rate": 1.158823143906652e-05,
|
| 7604 |
+
"loss": 4.301852416992188,
|
| 7605 |
+
"step": 10600
|
| 7606 |
+
},
|
| 7607 |
+
{
|
| 7608 |
+
"epoch": 1.792532522385538,
|
| 7609 |
+
"grad_norm": 0.4671533703804016,
|
| 7610 |
+
"learning_rate": 1.1404326233639056e-05,
|
| 7611 |
+
"loss": 4.321551132202148,
|
| 7612 |
+
"step": 10610
|
| 7613 |
+
},
|
| 7614 |
+
{
|
| 7615 |
+
"epoch": 1.794221996958946,
|
| 7616 |
+
"grad_norm": 0.46711355447769165,
|
| 7617 |
+
"learning_rate": 1.1221834331644857e-05,
|
| 7618 |
+
"loss": 4.292984390258789,
|
| 7619 |
+
"step": 10620
|
| 7620 |
+
},
|
| 7621 |
+
{
|
| 7622 |
+
"epoch": 1.7959114715323534,
|
| 7623 |
+
"grad_norm": 0.46830058097839355,
|
| 7624 |
+
"learning_rate": 1.1040757594010908e-05,
|
| 7625 |
+
"loss": 4.294471740722656,
|
| 7626 |
+
"step": 10630
|
| 7627 |
+
},
|
| 7628 |
+
{
|
| 7629 |
+
"epoch": 1.7976009461057612,
|
| 7630 |
+
"grad_norm": 0.45422518253326416,
|
| 7631 |
+
"learning_rate": 1.0861097867233375e-05,
|
| 7632 |
+
"loss": 4.302399444580078,
|
| 7633 |
+
"step": 10640
|
| 7634 |
+
},
|
| 7635 |
+
{
|
| 7636 |
+
"epoch": 1.799290420679169,
|
| 7637 |
+
"grad_norm": 0.46243947744369507,
|
| 7638 |
+
"learning_rate": 1.0682856983358645e-05,
|
| 7639 |
+
"loss": 4.300415420532227,
|
| 7640 |
+
"step": 10650
|
| 7641 |
+
},
|
| 7642 |
+
{
|
| 7643 |
+
"epoch": 1.8009798952525764,
|
| 7644 |
+
"grad_norm": 0.45327311754226685,
|
| 7645 |
+
"learning_rate": 1.050603675996477e-05,
|
| 7646 |
+
"loss": 4.294659042358399,
|
| 7647 |
+
"step": 10660
|
| 7648 |
+
},
|
| 7649 |
+
{
|
| 7650 |
+
"epoch": 1.8026693698259841,
|
| 7651 |
+
"grad_norm": 0.4481427073478699,
|
| 7652 |
+
"learning_rate": 1.0330639000142877e-05,
|
| 7653 |
+
"loss": 4.29761962890625,
|
| 7654 |
+
"step": 10670
|
| 7655 |
+
},
|
| 7656 |
+
{
|
| 7657 |
+
"epoch": 1.8043588443993919,
|
| 7658 |
+
"grad_norm": 0.45235884189605713,
|
| 7659 |
+
"learning_rate": 1.0156665492478794e-05,
|
| 7660 |
+
"loss": 4.2950092315673825,
|
| 7661 |
+
"step": 10680
|
| 7662 |
+
},
|
| 7663 |
+
{
|
| 7664 |
+
"epoch": 1.8060483189727994,
|
| 7665 |
+
"grad_norm": 0.4574648439884186,
|
| 7666 |
+
"learning_rate": 9.984118011034787e-06,
|
| 7667 |
+
"loss": 4.296451187133789,
|
| 7668 |
+
"step": 10690
|
| 7669 |
+
},
|
| 7670 |
+
{
|
| 7671 |
+
"epoch": 1.8077377935462071,
|
| 7672 |
+
"grad_norm": 0.44989126920700073,
|
| 7673 |
+
"learning_rate": 9.812998315331449e-06,
|
| 7674 |
+
"loss": 4.295338821411133,
|
| 7675 |
+
"step": 10700
|
| 7676 |
+
},
|
| 7677 |
+
{
|
| 7678 |
+
"epoch": 1.8094272681196149,
|
| 7679 |
+
"grad_norm": 0.44825267791748047,
|
| 7680 |
+
"learning_rate": 9.64330815032991e-06,
|
| 7681 |
+
"loss": 4.29632682800293,
|
| 7682 |
+
"step": 10710
|
| 7683 |
+
},
|
| 7684 |
+
{
|
| 7685 |
+
"epoch": 1.8111167426930224,
|
| 7686 |
+
"grad_norm": 0.44391629099845886,
|
| 7687 |
+
"learning_rate": 9.475049246413801e-06,
|
| 7688 |
+
"loss": 4.282930374145508,
|
| 7689 |
+
"step": 10720
|
| 7690 |
+
},
|
| 7691 |
+
{
|
| 7692 |
+
"epoch": 1.81280621726643,
|
| 7693 |
+
"grad_norm": 0.4501837491989136,
|
| 7694 |
+
"learning_rate": 9.308223319371789e-06,
|
| 7695 |
+
"loss": 4.3113666534423825,
|
| 7696 |
+
"step": 10730
|
| 7697 |
+
},
|
| 7698 |
+
{
|
| 7699 |
+
"epoch": 1.8144956918398378,
|
| 7700 |
+
"grad_norm": 0.45159661769866943,
|
| 7701 |
+
"learning_rate": 9.142832070380051e-06,
|
| 7702 |
+
"loss": 4.275300979614258,
|
| 7703 |
+
"step": 10740
|
| 7704 |
+
},
|
| 7705 |
+
{
|
| 7706 |
+
"epoch": 1.8161851664132453,
|
| 7707 |
+
"grad_norm": 0.4615607261657715,
|
| 7708 |
+
"learning_rate": 8.978877185984895e-06,
|
| 7709 |
+
"loss": 4.27879753112793,
|
| 7710 |
+
"step": 10750
|
| 7711 |
+
},
|
| 7712 |
+
{
|
| 7713 |
+
"epoch": 1.8178746409866533,
|
| 7714 |
+
"grad_norm": 0.4554959535598755,
|
| 7715 |
+
"learning_rate": 8.816360338085537e-06,
|
| 7716 |
+
"loss": 4.320524597167969,
|
| 7717 |
+
"step": 10760
|
| 7718 |
+
},
|
| 7719 |
+
{
|
| 7720 |
+
"epoch": 1.8195641155600608,
|
| 7721 |
+
"grad_norm": 0.4588150978088379,
|
| 7722 |
+
"learning_rate": 8.655283183917094e-06,
|
| 7723 |
+
"loss": 4.305972671508789,
|
| 7724 |
+
"step": 10770
|
| 7725 |
+
},
|
| 7726 |
+
{
|
| 7727 |
+
"epoch": 1.8212535901334683,
|
| 7728 |
+
"grad_norm": 0.4642908275127411,
|
| 7729 |
+
"learning_rate": 8.495647366033708e-06,
|
| 7730 |
+
"loss": 4.303414154052734,
|
| 7731 |
+
"step": 10780
|
| 7732 |
+
},
|
| 7733 |
+
{
|
| 7734 |
+
"epoch": 1.8229430647068763,
|
| 7735 |
+
"grad_norm": 0.44532260298728943,
|
| 7736 |
+
"learning_rate": 8.33745451229173e-06,
|
| 7737 |
+
"loss": 4.2890056610107425,
|
| 7738 |
+
"step": 10790
|
| 7739 |
+
},
|
| 7740 |
+
{
|
| 7741 |
+
"epoch": 1.8246325392802838,
|
| 7742 |
+
"grad_norm": 0.44421857595443726,
|
| 7743 |
+
"learning_rate": 8.180706235833162e-06,
|
| 7744 |
+
"loss": 4.27965087890625,
|
| 7745 |
+
"step": 10800
|
| 7746 |
+
},
|
| 7747 |
+
{
|
| 7748 |
+
"epoch": 1.8263220138536915,
|
| 7749 |
+
"grad_norm": 0.4534235894680023,
|
| 7750 |
+
"learning_rate": 8.025404135069207e-06,
|
| 7751 |
+
"loss": 4.3062583923339846,
|
| 7752 |
+
"step": 10810
|
| 7753 |
+
},
|
| 7754 |
+
{
|
| 7755 |
+
"epoch": 1.8280114884270993,
|
| 7756 |
+
"grad_norm": 0.44868797063827515,
|
| 7757 |
+
"learning_rate": 7.871549793663985e-06,
|
| 7758 |
+
"loss": 4.286159896850586,
|
| 7759 |
+
"step": 10820
|
| 7760 |
+
},
|
| 7761 |
+
{
|
| 7762 |
+
"epoch": 1.8297009630005068,
|
| 7763 |
+
"grad_norm": 0.4559250771999359,
|
| 7764 |
+
"learning_rate": 7.719144780518315e-06,
|
| 7765 |
+
"loss": 4.280204391479492,
|
| 7766 |
+
"step": 10830
|
| 7767 |
+
},
|
| 7768 |
+
{
|
| 7769 |
+
"epoch": 1.8313904375739145,
|
| 7770 |
+
"grad_norm": 0.4582137167453766,
|
| 7771 |
+
"learning_rate": 7.568190649753753e-06,
|
| 7772 |
+
"loss": 4.293819427490234,
|
| 7773 |
+
"step": 10840
|
| 7774 |
+
},
|
| 7775 |
+
{
|
| 7776 |
+
"epoch": 1.8330799121473222,
|
| 7777 |
+
"grad_norm": 0.44784441590309143,
|
| 7778 |
+
"learning_rate": 7.418688940696843e-06,
|
| 7779 |
+
"loss": 4.301911163330078,
|
| 7780 |
+
"step": 10850
|
| 7781 |
+
},
|
| 7782 |
+
{
|
| 7783 |
+
"epoch": 1.8347693867207298,
|
| 7784 |
+
"grad_norm": 0.4547264575958252,
|
| 7785 |
+
"learning_rate": 7.270641177863251e-06,
|
| 7786 |
+
"loss": 4.318780136108399,
|
| 7787 |
+
"step": 10860
|
| 7788 |
+
},
|
| 7789 |
+
{
|
| 7790 |
+
"epoch": 1.8364588612941375,
|
| 7791 |
+
"grad_norm": 0.44876977801322937,
|
| 7792 |
+
"learning_rate": 7.124048870942301e-06,
|
| 7793 |
+
"loss": 4.305691528320312,
|
| 7794 |
+
"step": 10870
|
| 7795 |
+
},
|
| 7796 |
+
{
|
| 7797 |
+
"epoch": 1.8381483358675452,
|
| 7798 |
+
"grad_norm": 0.4435437321662903,
|
| 7799 |
+
"learning_rate": 6.97891351478157e-06,
|
| 7800 |
+
"loss": 4.285098648071289,
|
| 7801 |
+
"step": 10880
|
| 7802 |
+
},
|
| 7803 |
+
{
|
| 7804 |
+
"epoch": 1.8398378104409527,
|
| 7805 |
+
"grad_norm": 0.4529848098754883,
|
| 7806 |
+
"learning_rate": 6.83523658937174e-06,
|
| 7807 |
+
"loss": 4.30163345336914,
|
| 7808 |
+
"step": 10890
|
| 7809 |
+
},
|
| 7810 |
+
{
|
| 7811 |
+
"epoch": 1.8415272850143607,
|
| 7812 |
+
"grad_norm": 0.44488754868507385,
|
| 7813 |
+
"learning_rate": 6.693019559831319e-06,
|
| 7814 |
+
"loss": 4.272104644775391,
|
| 7815 |
+
"step": 10900
|
| 7816 |
+
},
|
| 7817 |
+
{
|
| 7818 |
+
"epoch": 1.8432167595877682,
|
| 7819 |
+
"grad_norm": 0.44506925344467163,
|
| 7820 |
+
"learning_rate": 6.552263876391878e-06,
|
| 7821 |
+
"loss": 4.296164703369141,
|
| 7822 |
+
"step": 10910
|
| 7823 |
+
},
|
| 7824 |
+
{
|
| 7825 |
+
"epoch": 1.8449062341611757,
|
| 7826 |
+
"grad_norm": 0.453124076128006,
|
| 7827 |
+
"learning_rate": 6.412970974383069e-06,
|
| 7828 |
+
"loss": 4.268503189086914,
|
| 7829 |
+
"step": 10920
|
| 7830 |
+
},
|
| 7831 |
+
{
|
| 7832 |
+
"epoch": 1.8465957087345837,
|
| 7833 |
+
"grad_norm": 0.45630943775177,
|
| 7834 |
+
"learning_rate": 6.275142274218264e-06,
|
| 7835 |
+
"loss": 4.276957702636719,
|
| 7836 |
+
"step": 10930
|
| 7837 |
+
},
|
| 7838 |
+
{
|
| 7839 |
+
"epoch": 1.8482851833079912,
|
| 7840 |
+
"grad_norm": 0.4438062012195587,
|
| 7841 |
+
"learning_rate": 6.138779181379777e-06,
|
| 7842 |
+
"loss": 4.31237564086914,
|
| 7843 |
+
"step": 10940
|
| 7844 |
+
},
|
| 7845 |
+
{
|
| 7846 |
+
"epoch": 1.849974657881399,
|
| 7847 |
+
"grad_norm": 0.4586540460586548,
|
| 7848 |
+
"learning_rate": 6.003883086404709e-06,
|
| 7849 |
+
"loss": 4.296250915527343,
|
| 7850 |
+
"step": 10950
|
| 7851 |
+
},
|
| 7852 |
+
{
|
| 7853 |
+
"epoch": 1.8516641324548067,
|
| 7854 |
+
"grad_norm": 0.4555058777332306,
|
| 7855 |
+
"learning_rate": 5.870455364870747e-06,
|
| 7856 |
+
"loss": 4.289797973632813,
|
| 7857 |
+
"step": 10960
|
| 7858 |
+
},
|
| 7859 |
+
{
|
| 7860 |
+
"epoch": 1.8533536070282142,
|
| 7861 |
+
"grad_norm": 0.4580257833003998,
|
| 7862 |
+
"learning_rate": 5.738497377382117e-06,
|
| 7863 |
+
"loss": 4.288161849975586,
|
| 7864 |
+
"step": 10970
|
| 7865 |
+
},
|
| 7866 |
+
{
|
| 7867 |
+
"epoch": 1.855043081601622,
|
| 7868 |
+
"grad_norm": 0.44520384073257446,
|
| 7869 |
+
"learning_rate": 5.608010469555674e-06,
|
| 7870 |
+
"loss": 4.309579467773437,
|
| 7871 |
+
"step": 10980
|
| 7872 |
+
},
|
| 7873 |
+
{
|
| 7874 |
+
"epoch": 1.8567325561750296,
|
| 7875 |
+
"grad_norm": 0.4458165168762207,
|
| 7876 |
+
"learning_rate": 5.4789959720071995e-06,
|
| 7877 |
+
"loss": 4.300251007080078,
|
| 7878 |
+
"step": 10990
|
| 7879 |
+
},
|
| 7880 |
+
{
|
| 7881 |
+
"epoch": 1.8584220307484371,
|
| 7882 |
+
"grad_norm": 0.4537349343299866,
|
| 7883 |
+
"learning_rate": 5.3514552003379395e-06,
|
| 7884 |
+
"loss": 4.293206024169922,
|
| 7885 |
+
"step": 11000
|
| 7886 |
+
},
|
| 7887 |
+
{
|
| 7888 |
+
"epoch": 1.8584220307484371,
|
| 7889 |
+
"eval_loss": 4.268224716186523,
|
| 7890 |
+
"eval_runtime": 3.8046,
|
| 7891 |
+
"eval_samples_per_second": 262.842,
|
| 7892 |
+
"eval_steps_per_second": 5.52,
|
| 7893 |
+
"step": 11000
|
| 7894 |
}
|
| 7895 |
],
|
| 7896 |
"logging_steps": 10,
|
|
|
|
| 7910 |
"attributes": {}
|
| 7911 |
}
|
| 7912 |
},
|
| 7913 |
+
"total_flos": 3.679004125896376e+17,
|
| 7914 |
"train_batch_size": 48,
|
| 7915 |
"trial_name": null,
|
| 7916 |
"trial_params": null
|