| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.20051413881748073, | |
| "eval_steps": 26, | |
| "global_step": 104, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.005784061696658098, | |
| "grad_norm": NaN, | |
| "learning_rate": 0.0, | |
| "loss": 7.8503, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.011568123393316195, | |
| "grad_norm": 127.71849060058594, | |
| "learning_rate": 5.791505791505792e-08, | |
| "loss": 8.4022, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.017352185089974295, | |
| "grad_norm": 244.984619140625, | |
| "learning_rate": 1.7374517374517373e-07, | |
| "loss": 11.1776, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.02313624678663239, | |
| "grad_norm": 425.6427917480469, | |
| "learning_rate": 3.4749034749034746e-07, | |
| "loss": 9.7845, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.02892030848329049, | |
| "grad_norm": 238.8634490966797, | |
| "learning_rate": 5.212355212355212e-07, | |
| "loss": 8.9224, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.03470437017994859, | |
| "grad_norm": 173.4210968017578, | |
| "learning_rate": 6.949806949806949e-07, | |
| "loss": 11.1202, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.04048843187660668, | |
| "grad_norm": 77.89308166503906, | |
| "learning_rate": 8.687258687258688e-07, | |
| "loss": 7.413, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.04627249357326478, | |
| "grad_norm": Infinity, | |
| "learning_rate": 9.845559845559844e-07, | |
| "loss": 7.7803, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.05012853470437018, | |
| "eval_Qnli-dev_cosine_accuracy": 0.609375, | |
| "eval_Qnli-dev_cosine_accuracy_threshold": 0.9602330923080444, | |
| "eval_Qnli-dev_cosine_ap": 0.587737015984062, | |
| "eval_Qnli-dev_cosine_f1": 0.6358695652173914, | |
| "eval_Qnli-dev_cosine_f1_threshold": 0.7741116285324097, | |
| "eval_Qnli-dev_cosine_precision": 0.468, | |
| "eval_Qnli-dev_cosine_recall": 0.9915254237288136, | |
| "eval_Qnli-dev_dot_accuracy": 0.56640625, | |
| "eval_Qnli-dev_dot_accuracy_threshold": 750.123779296875, | |
| "eval_Qnli-dev_dot_ap": 0.5198069471819926, | |
| "eval_Qnli-dev_dot_f1": 0.6318607764390897, | |
| "eval_Qnli-dev_dot_f1_threshold": 387.37371826171875, | |
| "eval_Qnli-dev_dot_precision": 0.461839530332681, | |
| "eval_Qnli-dev_dot_recall": 1.0, | |
| "eval_Qnli-dev_euclidean_accuracy": 0.6171875, | |
| "eval_Qnli-dev_euclidean_accuracy_threshold": 7.253280162811279, | |
| "eval_Qnli-dev_euclidean_ap": 0.5993562103857255, | |
| "eval_Qnli-dev_euclidean_f1": 0.6376021798365122, | |
| "eval_Qnli-dev_euclidean_f1_threshold": 17.42431640625, | |
| "eval_Qnli-dev_euclidean_precision": 0.46987951807228917, | |
| "eval_Qnli-dev_euclidean_recall": 0.9915254237288136, | |
| "eval_Qnli-dev_manhattan_accuracy": 0.625, | |
| "eval_Qnli-dev_manhattan_accuracy_threshold": 212.0604705810547, | |
| "eval_Qnli-dev_manhattan_ap": 0.609409682371795, | |
| "eval_Qnli-dev_manhattan_f1": 0.6358695652173914, | |
| "eval_Qnli-dev_manhattan_f1_threshold": 461.66192626953125, | |
| "eval_Qnli-dev_manhattan_precision": 0.468, | |
| "eval_Qnli-dev_manhattan_recall": 0.9915254237288136, | |
| "eval_Qnli-dev_max_accuracy": 0.625, | |
| "eval_Qnli-dev_max_accuracy_threshold": 750.123779296875, | |
| "eval_Qnli-dev_max_ap": 0.609409682371795, | |
| "eval_Qnli-dev_max_f1": 0.6376021798365122, | |
| "eval_Qnli-dev_max_f1_threshold": 461.66192626953125, | |
| "eval_Qnli-dev_max_precision": 0.46987951807228917, | |
| "eval_Qnli-dev_max_recall": 1.0, | |
| "eval_allNLI-dev_cosine_accuracy": 0.666015625, | |
| "eval_allNLI-dev_cosine_accuracy_threshold": 0.9931652545928955, | |
| "eval_allNLI-dev_cosine_ap": 0.3330277035108763, | |
| "eval_allNLI-dev_cosine_f1": 0.5088235294117648, | |
| "eval_allNLI-dev_cosine_f1_threshold": 0.40660202503204346, | |
| "eval_allNLI-dev_cosine_precision": 0.34122287968441817, | |
| "eval_allNLI-dev_cosine_recall": 1.0, | |
| "eval_allNLI-dev_dot_accuracy": 0.662109375, | |
| "eval_allNLI-dev_dot_accuracy_threshold": 810.7718505859375, | |
| "eval_allNLI-dev_dot_ap": 0.2980492563498839, | |
| "eval_allNLI-dev_dot_f1": 0.5088235294117648, | |
| "eval_allNLI-dev_dot_f1_threshold": 294.67071533203125, | |
| "eval_allNLI-dev_dot_precision": 0.34122287968441817, | |
| "eval_allNLI-dev_dot_recall": 1.0, | |
| "eval_allNLI-dev_euclidean_accuracy": 0.666015625, | |
| "eval_allNLI-dev_euclidean_accuracy_threshold": 3.3851852416992188, | |
| "eval_allNLI-dev_euclidean_ap": 0.33790858355605125, | |
| "eval_allNLI-dev_euclidean_f1": 0.5095729013254787, | |
| "eval_allNLI-dev_euclidean_f1_threshold": 28.78042984008789, | |
| "eval_allNLI-dev_euclidean_precision": 0.34189723320158105, | |
| "eval_allNLI-dev_euclidean_recall": 1.0, | |
| "eval_allNLI-dev_manhattan_accuracy": 0.666015625, | |
| "eval_allNLI-dev_manhattan_accuracy_threshold": 98.2369613647461, | |
| "eval_allNLI-dev_manhattan_ap": 0.3452833632811151, | |
| "eval_allNLI-dev_manhattan_f1": 0.5091463414634146, | |
| "eval_allNLI-dev_manhattan_f1_threshold": 626.5145874023438, | |
| "eval_allNLI-dev_manhattan_precision": 0.34575569358178054, | |
| "eval_allNLI-dev_manhattan_recall": 0.9653179190751445, | |
| "eval_allNLI-dev_max_accuracy": 0.666015625, | |
| "eval_allNLI-dev_max_accuracy_threshold": 810.7718505859375, | |
| "eval_allNLI-dev_max_ap": 0.3452833632811151, | |
| "eval_allNLI-dev_max_f1": 0.5095729013254787, | |
| "eval_allNLI-dev_max_f1_threshold": 626.5145874023438, | |
| "eval_allNLI-dev_max_precision": 0.34575569358178054, | |
| "eval_allNLI-dev_max_recall": 1.0, | |
| "eval_sequential_score": 0.609409682371795, | |
| "eval_sts-test_pearson_cosine": 0.24255956652335486, | |
| "eval_sts-test_pearson_dot": 0.18022025813169615, | |
| "eval_sts-test_pearson_euclidean": 0.28657386934462287, | |
| "eval_sts-test_pearson_manhattan": 0.32587411772454034, | |
| "eval_sts-test_pearson_max": 0.32587411772454034, | |
| "eval_sts-test_spearman_cosine": 0.32225123770630715, | |
| "eval_sts-test_spearman_dot": 0.17184726031108927, | |
| "eval_sts-test_spearman_euclidean": 0.3279675400801376, | |
| "eval_sts-test_spearman_manhattan": 0.3635547276663291, | |
| "eval_sts-test_spearman_max": 0.3635547276663291, | |
| "eval_vitaminc-pairs_loss": 4.122440338134766, | |
| "eval_vitaminc-pairs_runtime": 4.7337, | |
| "eval_vitaminc-pairs_samples_per_second": 27.04, | |
| "eval_vitaminc-pairs_steps_per_second": 0.211, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.05012853470437018, | |
| "eval_negation-triplets_loss": 3.917590379714966, | |
| "eval_negation-triplets_runtime": 3.3679, | |
| "eval_negation-triplets_samples_per_second": 38.006, | |
| "eval_negation-triplets_steps_per_second": 0.297, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.05012853470437018, | |
| "eval_scitail-pairs-pos_loss": 2.5062460899353027, | |
| "eval_scitail-pairs-pos_runtime": 2.7274, | |
| "eval_scitail-pairs-pos_samples_per_second": 46.931, | |
| "eval_scitail-pairs-pos_steps_per_second": 0.367, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.05012853470437018, | |
| "eval_scitail-pairs-qa_loss": 2.978612184524536, | |
| "eval_scitail-pairs-qa_runtime": 2.2452, | |
| "eval_scitail-pairs-qa_samples_per_second": 57.011, | |
| "eval_scitail-pairs-qa_steps_per_second": 0.445, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.05012853470437018, | |
| "eval_xsum-pairs_loss": 5.394700050354004, | |
| "eval_xsum-pairs_runtime": 3.0886, | |
| "eval_xsum-pairs_samples_per_second": 41.442, | |
| "eval_xsum-pairs_steps_per_second": 0.324, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.05012853470437018, | |
| "eval_sciq_pairs_loss": 0.8977569937705994, | |
| "eval_sciq_pairs_runtime": 4.7516, | |
| "eval_sciq_pairs_samples_per_second": 26.938, | |
| "eval_sciq_pairs_steps_per_second": 0.21, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.05012853470437018, | |
| "eval_qasc_pairs_loss": 6.6808977127075195, | |
| "eval_qasc_pairs_runtime": 2.1087, | |
| "eval_qasc_pairs_samples_per_second": 60.701, | |
| "eval_qasc_pairs_steps_per_second": 0.474, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.05012853470437018, | |
| "eval_openbookqa_pairs_loss": 6.402599334716797, | |
| "eval_openbookqa_pairs_runtime": 2.2222, | |
| "eval_openbookqa_pairs_samples_per_second": 57.601, | |
| "eval_openbookqa_pairs_steps_per_second": 0.45, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.05012853470437018, | |
| "eval_msmarco_pairs_loss": 15.782674789428711, | |
| "eval_msmarco_pairs_runtime": 2.307, | |
| "eval_msmarco_pairs_samples_per_second": 55.483, | |
| "eval_msmarco_pairs_steps_per_second": 0.433, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.05012853470437018, | |
| "eval_nq_pairs_loss": 10.447425842285156, | |
| "eval_nq_pairs_runtime": 3.6239, | |
| "eval_nq_pairs_samples_per_second": 35.321, | |
| "eval_nq_pairs_steps_per_second": 0.276, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.05012853470437018, | |
| "eval_trivia_pairs_loss": 6.107333183288574, | |
| "eval_trivia_pairs_runtime": 3.1299, | |
| "eval_trivia_pairs_samples_per_second": 40.895, | |
| "eval_trivia_pairs_steps_per_second": 0.319, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.05012853470437018, | |
| "eval_gooaq_pairs_loss": 8.095273971557617, | |
| "eval_gooaq_pairs_runtime": 2.1098, | |
| "eval_gooaq_pairs_samples_per_second": 60.669, | |
| "eval_gooaq_pairs_steps_per_second": 0.474, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.05012853470437018, | |
| "eval_paws-pos_loss": 1.6077361106872559, | |
| "eval_paws-pos_runtime": 2.2412, | |
| "eval_paws-pos_samples_per_second": 57.112, | |
| "eval_paws-pos_steps_per_second": 0.446, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.05012853470437018, | |
| "eval_global_dataset_loss": 6.348417282104492, | |
| "eval_global_dataset_runtime": 8.3683, | |
| "eval_global_dataset_samples_per_second": 38.837, | |
| "eval_global_dataset_steps_per_second": 0.358, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.05205655526992288, | |
| "grad_norm": 172.1571807861328, | |
| "learning_rate": 1.1583011583011583e-06, | |
| "loss": 7.9729, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.05784061696658098, | |
| "grad_norm": 106.302001953125, | |
| "learning_rate": 1.3320463320463321e-06, | |
| "loss": 6.0587, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.06362467866323908, | |
| "grad_norm": 72.31057739257812, | |
| "learning_rate": 1.5057915057915057e-06, | |
| "loss": 5.6742, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.06940874035989718, | |
| "grad_norm": 258.2577209472656, | |
| "learning_rate": 1.6795366795366796e-06, | |
| "loss": 6.5406, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.07519280205655526, | |
| "grad_norm": 156.0019073486328, | |
| "learning_rate": 1.7953667953667953e-06, | |
| "loss": 5.4429, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.08097686375321336, | |
| "grad_norm": 227.6062469482422, | |
| "learning_rate": 1.969111969111969e-06, | |
| "loss": 6.7855, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.08676092544987146, | |
| "grad_norm": 221.12347412109375, | |
| "learning_rate": 2.1428571428571427e-06, | |
| "loss": 5.3403, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.09254498714652956, | |
| "grad_norm": 363.6319274902344, | |
| "learning_rate": 2.3166023166023166e-06, | |
| "loss": 4.2282, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.09832904884318766, | |
| "grad_norm": 119.22310638427734, | |
| "learning_rate": 2.4903474903474904e-06, | |
| "loss": 4.7411, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.10025706940874037, | |
| "eval_Qnli-dev_cosine_accuracy": 0.58984375, | |
| "eval_Qnli-dev_cosine_accuracy_threshold": 0.9407284259796143, | |
| "eval_Qnli-dev_cosine_ap": 0.5957496162735323, | |
| "eval_Qnli-dev_cosine_f1": 0.6314363143631436, | |
| "eval_Qnli-dev_cosine_f1_threshold": 0.7475543022155762, | |
| "eval_Qnli-dev_cosine_precision": 0.4641434262948207, | |
| "eval_Qnli-dev_cosine_recall": 0.9872881355932204, | |
| "eval_Qnli-dev_dot_accuracy": 0.591796875, | |
| "eval_Qnli-dev_dot_accuracy_threshold": 787.171630859375, | |
| "eval_Qnli-dev_dot_ap": 0.556540262374452, | |
| "eval_Qnli-dev_dot_f1": 0.6328767123287672, | |
| "eval_Qnli-dev_dot_f1_threshold": 576.0565795898438, | |
| "eval_Qnli-dev_dot_precision": 0.4676113360323887, | |
| "eval_Qnli-dev_dot_recall": 0.9788135593220338, | |
| "eval_Qnli-dev_euclidean_accuracy": 0.587890625, | |
| "eval_Qnli-dev_euclidean_accuracy_threshold": 9.92269515991211, | |
| "eval_Qnli-dev_euclidean_ap": 0.5913529071853763, | |
| "eval_Qnli-dev_euclidean_f1": 0.6322930800542741, | |
| "eval_Qnli-dev_euclidean_f1_threshold": 19.546676635742188, | |
| "eval_Qnli-dev_euclidean_precision": 0.46506986027944114, | |
| "eval_Qnli-dev_euclidean_recall": 0.9872881355932204, | |
| "eval_Qnli-dev_manhattan_accuracy": 0.591796875, | |
| "eval_Qnli-dev_manhattan_accuracy_threshold": 314.9248352050781, | |
| "eval_Qnli-dev_manhattan_ap": 0.5944730102616272, | |
| "eval_Qnli-dev_manhattan_f1": 0.632952691680261, | |
| "eval_Qnli-dev_manhattan_f1_threshold": 424.9676818847656, | |
| "eval_Qnli-dev_manhattan_precision": 0.5145888594164456, | |
| "eval_Qnli-dev_manhattan_recall": 0.8220338983050848, | |
| "eval_Qnli-dev_max_accuracy": 0.591796875, | |
| "eval_Qnli-dev_max_accuracy_threshold": 787.171630859375, | |
| "eval_Qnli-dev_max_ap": 0.5957496162735323, | |
| "eval_Qnli-dev_max_f1": 0.632952691680261, | |
| "eval_Qnli-dev_max_f1_threshold": 576.0565795898438, | |
| "eval_Qnli-dev_max_precision": 0.5145888594164456, | |
| "eval_Qnli-dev_max_recall": 0.9872881355932204, | |
| "eval_allNLI-dev_cosine_accuracy": 0.666015625, | |
| "eval_allNLI-dev_cosine_accuracy_threshold": 0.9878466129302979, | |
| "eval_allNLI-dev_cosine_ap": 0.3898337405414008, | |
| "eval_allNLI-dev_cosine_f1": 0.5065885797950219, | |
| "eval_allNLI-dev_cosine_f1_threshold": 0.6534674167633057, | |
| "eval_allNLI-dev_cosine_precision": 0.3392156862745098, | |
| "eval_allNLI-dev_cosine_recall": 1.0, | |
| "eval_allNLI-dev_dot_accuracy": 0.666015625, | |
| "eval_allNLI-dev_dot_accuracy_threshold": 925.5989990234375, | |
| "eval_allNLI-dev_dot_ap": 0.3391870470132937, | |
| "eval_allNLI-dev_dot_f1": 0.5046153846153847, | |
| "eval_allNLI-dev_dot_f1_threshold": 608.7012939453125, | |
| "eval_allNLI-dev_dot_precision": 0.3438155136268344, | |
| "eval_allNLI-dev_dot_recall": 0.9479768786127167, | |
| "eval_allNLI-dev_euclidean_accuracy": 0.666015625, | |
| "eval_allNLI-dev_euclidean_accuracy_threshold": 4.708977699279785, | |
| "eval_allNLI-dev_euclidean_ap": 0.39556533875401867, | |
| "eval_allNLI-dev_euclidean_f1": 0.5080763582966226, | |
| "eval_allNLI-dev_euclidean_f1_threshold": 23.160057067871094, | |
| "eval_allNLI-dev_euclidean_precision": 0.3405511811023622, | |
| "eval_allNLI-dev_euclidean_recall": 1.0, | |
| "eval_allNLI-dev_manhattan_accuracy": 0.666015625, | |
| "eval_allNLI-dev_manhattan_accuracy_threshold": 142.7260284423828, | |
| "eval_allNLI-dev_manhattan_ap": 0.4050661514301679, | |
| "eval_allNLI-dev_manhattan_f1": 0.5118483412322274, | |
| "eval_allNLI-dev_manhattan_f1_threshold": 504.7154846191406, | |
| "eval_allNLI-dev_manhattan_precision": 0.3521739130434783, | |
| "eval_allNLI-dev_manhattan_recall": 0.9364161849710982, | |
| "eval_allNLI-dev_max_accuracy": 0.666015625, | |
| "eval_allNLI-dev_max_accuracy_threshold": 925.5989990234375, | |
| "eval_allNLI-dev_max_ap": 0.4050661514301679, | |
| "eval_allNLI-dev_max_f1": 0.5118483412322274, | |
| "eval_allNLI-dev_max_f1_threshold": 608.7012939453125, | |
| "eval_allNLI-dev_max_precision": 0.3521739130434783, | |
| "eval_allNLI-dev_max_recall": 1.0, | |
| "eval_sequential_score": 0.5957496162735323, | |
| "eval_sts-test_pearson_cosine": 0.5067228548513025, | |
| "eval_sts-test_pearson_dot": 0.28867418819353924, | |
| "eval_sts-test_pearson_euclidean": 0.5315956249457565, | |
| "eval_sts-test_pearson_manhattan": 0.5620907203006935, | |
| "eval_sts-test_pearson_max": 0.5620907203006935, | |
| "eval_sts-test_spearman_cosine": 0.5257409961657202, | |
| "eval_sts-test_spearman_dot": 0.280153994622986, | |
| "eval_sts-test_spearman_euclidean": 0.5306475244646732, | |
| "eval_sts-test_spearman_manhattan": 0.5605804796765843, | |
| "eval_sts-test_spearman_max": 0.5605804796765843, | |
| "eval_vitaminc-pairs_loss": 3.929797887802124, | |
| "eval_vitaminc-pairs_runtime": 4.6018, | |
| "eval_vitaminc-pairs_samples_per_second": 27.815, | |
| "eval_vitaminc-pairs_steps_per_second": 0.217, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.10025706940874037, | |
| "eval_negation-triplets_loss": 3.267047166824341, | |
| "eval_negation-triplets_runtime": 3.2813, | |
| "eval_negation-triplets_samples_per_second": 39.009, | |
| "eval_negation-triplets_steps_per_second": 0.305, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.10025706940874037, | |
| "eval_scitail-pairs-pos_loss": 1.134178876876831, | |
| "eval_scitail-pairs-pos_runtime": 2.6261, | |
| "eval_scitail-pairs-pos_samples_per_second": 48.742, | |
| "eval_scitail-pairs-pos_steps_per_second": 0.381, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.10025706940874037, | |
| "eval_scitail-pairs-qa_loss": 2.0085601806640625, | |
| "eval_scitail-pairs-qa_runtime": 2.2161, | |
| "eval_scitail-pairs-qa_samples_per_second": 57.759, | |
| "eval_scitail-pairs-qa_steps_per_second": 0.451, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.10025706940874037, | |
| "eval_xsum-pairs_loss": 4.462350845336914, | |
| "eval_xsum-pairs_runtime": 3.1126, | |
| "eval_xsum-pairs_samples_per_second": 41.124, | |
| "eval_xsum-pairs_steps_per_second": 0.321, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.10025706940874037, | |
| "eval_sciq_pairs_loss": 0.71834796667099, | |
| "eval_sciq_pairs_runtime": 4.7505, | |
| "eval_sciq_pairs_samples_per_second": 26.944, | |
| "eval_sciq_pairs_steps_per_second": 0.211, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.10025706940874037, | |
| "eval_qasc_pairs_loss": 4.889453887939453, | |
| "eval_qasc_pairs_runtime": 2.1101, | |
| "eval_qasc_pairs_samples_per_second": 60.662, | |
| "eval_qasc_pairs_steps_per_second": 0.474, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.10025706940874037, | |
| "eval_openbookqa_pairs_loss": 3.4273369312286377, | |
| "eval_openbookqa_pairs_runtime": 2.2311, | |
| "eval_openbookqa_pairs_samples_per_second": 57.371, | |
| "eval_openbookqa_pairs_steps_per_second": 0.448, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.10025706940874037, | |
| "eval_msmarco_pairs_loss": 7.532540321350098, | |
| "eval_msmarco_pairs_runtime": 2.2959, | |
| "eval_msmarco_pairs_samples_per_second": 55.751, | |
| "eval_msmarco_pairs_steps_per_second": 0.436, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.10025706940874037, | |
| "eval_nq_pairs_loss": 6.29541015625, | |
| "eval_nq_pairs_runtime": 3.661, | |
| "eval_nq_pairs_samples_per_second": 34.963, | |
| "eval_nq_pairs_steps_per_second": 0.273, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.10025706940874037, | |
| "eval_trivia_pairs_loss": 5.789977073669434, | |
| "eval_trivia_pairs_runtime": 3.0826, | |
| "eval_trivia_pairs_samples_per_second": 41.523, | |
| "eval_trivia_pairs_steps_per_second": 0.324, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.10025706940874037, | |
| "eval_gooaq_pairs_loss": 5.785163879394531, | |
| "eval_gooaq_pairs_runtime": 2.1382, | |
| "eval_gooaq_pairs_samples_per_second": 59.863, | |
| "eval_gooaq_pairs_steps_per_second": 0.468, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.10025706940874037, | |
| "eval_paws-pos_loss": 0.47830095887184143, | |
| "eval_paws-pos_runtime": 2.3113, | |
| "eval_paws-pos_samples_per_second": 55.379, | |
| "eval_paws-pos_steps_per_second": 0.433, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.10025706940874037, | |
| "eval_global_dataset_loss": 3.8913991451263428, | |
| "eval_global_dataset_runtime": 8.3623, | |
| "eval_global_dataset_samples_per_second": 38.865, | |
| "eval_global_dataset_steps_per_second": 0.359, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.10411311053984576, | |
| "grad_norm": 55.83478546142578, | |
| "learning_rate": 2.6640926640926642e-06, | |
| "loss": 3.9082, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.10989717223650386, | |
| "grad_norm": 57.4188117980957, | |
| "learning_rate": 2.7799227799227797e-06, | |
| "loss": 4.3922, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.11568123393316196, | |
| "grad_norm": 116.89701843261719, | |
| "learning_rate": 2.9536679536679535e-06, | |
| "loss": 3.2655, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.12146529562982006, | |
| "grad_norm": 53.49580001831055, | |
| "learning_rate": 3.1274131274131274e-06, | |
| "loss": 3.1043, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.12724935732647816, | |
| "grad_norm": 51.044410705566406, | |
| "learning_rate": 3.3011583011583012e-06, | |
| "loss": 2.2074, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.13303341902313626, | |
| "grad_norm": 139.9288787841797, | |
| "learning_rate": 3.474903474903475e-06, | |
| "loss": 1.4414, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.13881748071979436, | |
| "grad_norm": 37.221168518066406, | |
| "learning_rate": 3.5907335907335905e-06, | |
| "loss": 1.5937, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.14460154241645246, | |
| "grad_norm": 25.855661392211914, | |
| "learning_rate": 3.7644787644787644e-06, | |
| "loss": 1.0306, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.15038560411311053, | |
| "grad_norm": 29.79730796813965, | |
| "learning_rate": 3.938223938223938e-06, | |
| "loss": 1.0784, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.15038560411311053, | |
| "eval_Qnli-dev_cosine_accuracy": 0.638671875, | |
| "eval_Qnli-dev_cosine_accuracy_threshold": 0.856202244758606, | |
| "eval_Qnli-dev_cosine_ap": 0.6292803059732273, | |
| "eval_Qnli-dev_cosine_f1": 0.6408839779005525, | |
| "eval_Qnli-dev_cosine_f1_threshold": 0.6206527352333069, | |
| "eval_Qnli-dev_cosine_precision": 0.47540983606557374, | |
| "eval_Qnli-dev_cosine_recall": 0.9830508474576272, | |
| "eval_Qnli-dev_dot_accuracy": 0.626953125, | |
| "eval_Qnli-dev_dot_accuracy_threshold": 887.830810546875, | |
| "eval_Qnli-dev_dot_ap": 0.592075047542057, | |
| "eval_Qnli-dev_dot_f1": 0.640990371389271, | |
| "eval_Qnli-dev_dot_f1_threshold": 601.355224609375, | |
| "eval_Qnli-dev_dot_precision": 0.4745417515274949, | |
| "eval_Qnli-dev_dot_recall": 0.9872881355932204, | |
| "eval_Qnli-dev_euclidean_accuracy": 0.638671875, | |
| "eval_Qnli-dev_euclidean_accuracy_threshold": 17.094371795654297, | |
| "eval_Qnli-dev_euclidean_ap": 0.6289192660680781, | |
| "eval_Qnli-dev_euclidean_f1": 0.6423357664233577, | |
| "eval_Qnli-dev_euclidean_f1_threshold": 24.894683837890625, | |
| "eval_Qnli-dev_euclidean_precision": 0.48997772828507796, | |
| "eval_Qnli-dev_euclidean_recall": 0.9322033898305084, | |
| "eval_Qnli-dev_manhattan_accuracy": 0.640625, | |
| "eval_Qnli-dev_manhattan_accuracy_threshold": 526.0111083984375, | |
| "eval_Qnli-dev_manhattan_ap": 0.6256143574081243, | |
| "eval_Qnli-dev_manhattan_f1": 0.6417704011065007, | |
| "eval_Qnli-dev_manhattan_f1_threshold": 853.242431640625, | |
| "eval_Qnli-dev_manhattan_precision": 0.47638603696098564, | |
| "eval_Qnli-dev_manhattan_recall": 0.9830508474576272, | |
| "eval_Qnli-dev_max_accuracy": 0.640625, | |
| "eval_Qnli-dev_max_accuracy_threshold": 887.830810546875, | |
| "eval_Qnli-dev_max_ap": 0.6292803059732273, | |
| "eval_Qnli-dev_max_f1": 0.6423357664233577, | |
| "eval_Qnli-dev_max_f1_threshold": 853.242431640625, | |
| "eval_Qnli-dev_max_precision": 0.48997772828507796, | |
| "eval_Qnli-dev_max_recall": 0.9872881355932204, | |
| "eval_allNLI-dev_cosine_accuracy": 0.6796875, | |
| "eval_allNLI-dev_cosine_accuracy_threshold": 0.9426460862159729, | |
| "eval_allNLI-dev_cosine_ap": 0.473508692504868, | |
| "eval_allNLI-dev_cosine_f1": 0.5328947368421053, | |
| "eval_allNLI-dev_cosine_f1_threshold": 0.6884603500366211, | |
| "eval_allNLI-dev_cosine_precision": 0.3724137931034483, | |
| "eval_allNLI-dev_cosine_recall": 0.9364161849710982, | |
| "eval_allNLI-dev_dot_accuracy": 0.673828125, | |
| "eval_allNLI-dev_dot_accuracy_threshold": 980.9593505859375, | |
| "eval_allNLI-dev_dot_ap": 0.44188020452374555, | |
| "eval_allNLI-dev_dot_f1": 0.53156146179402, | |
| "eval_allNLI-dev_dot_f1_threshold": 683.7774658203125, | |
| "eval_allNLI-dev_dot_precision": 0.372960372960373, | |
| "eval_allNLI-dev_dot_recall": 0.9248554913294798, | |
| "eval_allNLI-dev_euclidean_accuracy": 0.6796875, | |
| "eval_allNLI-dev_euclidean_accuracy_threshold": 10.647405624389648, | |
| "eval_allNLI-dev_euclidean_ap": 0.47440611686548606, | |
| "eval_allNLI-dev_euclidean_f1": 0.5292207792207793, | |
| "eval_allNLI-dev_euclidean_f1_threshold": 25.08370590209961, | |
| "eval_allNLI-dev_euclidean_precision": 0.36794582392776526, | |
| "eval_allNLI-dev_euclidean_recall": 0.9421965317919075, | |
| "eval_allNLI-dev_manhattan_accuracy": 0.6796875, | |
| "eval_allNLI-dev_manhattan_accuracy_threshold": 318.408203125, | |
| "eval_allNLI-dev_manhattan_ap": 0.4747137405292771, | |
| "eval_allNLI-dev_manhattan_f1": 0.5305343511450381, | |
| "eval_allNLI-dev_manhattan_f1_threshold": 697.8466796875, | |
| "eval_allNLI-dev_manhattan_precision": 0.396011396011396, | |
| "eval_allNLI-dev_manhattan_recall": 0.8034682080924855, | |
| "eval_allNLI-dev_max_accuracy": 0.6796875, | |
| "eval_allNLI-dev_max_accuracy_threshold": 980.9593505859375, | |
| "eval_allNLI-dev_max_ap": 0.4747137405292771, | |
| "eval_allNLI-dev_max_f1": 0.5328947368421053, | |
| "eval_allNLI-dev_max_f1_threshold": 697.8466796875, | |
| "eval_allNLI-dev_max_precision": 0.396011396011396, | |
| "eval_allNLI-dev_max_recall": 0.9421965317919075, | |
| "eval_sequential_score": 0.6292803059732273, | |
| "eval_sts-test_pearson_cosine": 0.8216305666941104, | |
| "eval_sts-test_pearson_dot": 0.7893058557928705, | |
| "eval_sts-test_pearson_euclidean": 0.841701309677291, | |
| "eval_sts-test_pearson_manhattan": 0.8438559225680149, | |
| "eval_sts-test_pearson_max": 0.8438559225680149, | |
| "eval_sts-test_spearman_cosine": 0.8291168678373173, | |
| "eval_sts-test_spearman_dot": 0.7782024179758329, | |
| "eval_sts-test_spearman_euclidean": 0.829029110613729, | |
| "eval_sts-test_spearman_manhattan": 0.8308224279114665, | |
| "eval_sts-test_spearman_max": 0.8308224279114665, | |
| "eval_vitaminc-pairs_loss": 3.763850212097168, | |
| "eval_vitaminc-pairs_runtime": 4.6142, | |
| "eval_vitaminc-pairs_samples_per_second": 27.741, | |
| "eval_vitaminc-pairs_steps_per_second": 0.217, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.15038560411311053, | |
| "eval_negation-triplets_loss": 1.4772971868515015, | |
| "eval_negation-triplets_runtime": 3.4145, | |
| "eval_negation-triplets_samples_per_second": 37.488, | |
| "eval_negation-triplets_steps_per_second": 0.293, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.15038560411311053, | |
| "eval_scitail-pairs-pos_loss": 0.11086193472146988, | |
| "eval_scitail-pairs-pos_runtime": 2.8091, | |
| "eval_scitail-pairs-pos_samples_per_second": 45.565, | |
| "eval_scitail-pairs-pos_steps_per_second": 0.356, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.15038560411311053, | |
| "eval_scitail-pairs-qa_loss": 0.12822097539901733, | |
| "eval_scitail-pairs-qa_runtime": 2.312, | |
| "eval_scitail-pairs-qa_samples_per_second": 55.364, | |
| "eval_scitail-pairs-qa_steps_per_second": 0.433, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.15038560411311053, | |
| "eval_xsum-pairs_loss": 0.43598121404647827, | |
| "eval_xsum-pairs_runtime": 3.1471, | |
| "eval_xsum-pairs_samples_per_second": 40.673, | |
| "eval_xsum-pairs_steps_per_second": 0.318, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.15038560411311053, | |
| "eval_sciq_pairs_loss": 0.20338551700115204, | |
| "eval_sciq_pairs_runtime": 4.8592, | |
| "eval_sciq_pairs_samples_per_second": 26.342, | |
| "eval_sciq_pairs_steps_per_second": 0.206, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.15038560411311053, | |
| "eval_qasc_pairs_loss": 1.3059492111206055, | |
| "eval_qasc_pairs_runtime": 2.1191, | |
| "eval_qasc_pairs_samples_per_second": 60.403, | |
| "eval_qasc_pairs_steps_per_second": 0.472, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.15038560411311053, | |
| "eval_openbookqa_pairs_loss": 1.1711227893829346, | |
| "eval_openbookqa_pairs_runtime": 2.2105, | |
| "eval_openbookqa_pairs_samples_per_second": 57.907, | |
| "eval_openbookqa_pairs_steps_per_second": 0.452, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.15038560411311053, | |
| "eval_msmarco_pairs_loss": 1.3856205940246582, | |
| "eval_msmarco_pairs_runtime": 2.3065, | |
| "eval_msmarco_pairs_samples_per_second": 55.494, | |
| "eval_msmarco_pairs_steps_per_second": 0.434, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.15038560411311053, | |
| "eval_nq_pairs_loss": 1.2713654041290283, | |
| "eval_nq_pairs_runtime": 3.6729, | |
| "eval_nq_pairs_samples_per_second": 34.85, | |
| "eval_nq_pairs_steps_per_second": 0.272, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.15038560411311053, | |
| "eval_trivia_pairs_loss": 1.082784652709961, | |
| "eval_trivia_pairs_runtime": 3.145, | |
| "eval_trivia_pairs_samples_per_second": 40.7, | |
| "eval_trivia_pairs_steps_per_second": 0.318, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.15038560411311053, | |
| "eval_gooaq_pairs_loss": 0.703183650970459, | |
| "eval_gooaq_pairs_runtime": 2.1975, | |
| "eval_gooaq_pairs_samples_per_second": 58.248, | |
| "eval_gooaq_pairs_steps_per_second": 0.455, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.15038560411311053, | |
| "eval_paws-pos_loss": 0.04489962384104729, | |
| "eval_paws-pos_runtime": 2.3636, | |
| "eval_paws-pos_samples_per_second": 54.155, | |
| "eval_paws-pos_steps_per_second": 0.423, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.15038560411311053, | |
| "eval_global_dataset_loss": 0.6713590621948242, | |
| "eval_global_dataset_runtime": 8.4648, | |
| "eval_global_dataset_samples_per_second": 38.394, | |
| "eval_global_dataset_steps_per_second": 0.354, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.15616966580976863, | |
| "grad_norm": 22.163389205932617, | |
| "learning_rate": 4.1119691119691125e-06, | |
| "loss": 0.9674, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.16195372750642673, | |
| "grad_norm": 25.121131896972656, | |
| "learning_rate": 4.2857142857142855e-06, | |
| "loss": 0.9335, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.16773778920308482, | |
| "grad_norm": 44.308528900146484, | |
| "learning_rate": 4.45945945945946e-06, | |
| "loss": 0.8806, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.17352185089974292, | |
| "grad_norm": 9.972362518310547, | |
| "learning_rate": 4.633204633204633e-06, | |
| "loss": 0.631, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.17930591259640102, | |
| "grad_norm": 13.609597206115723, | |
| "learning_rate": 4.806949806949808e-06, | |
| "loss": 0.3384, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.18508997429305912, | |
| "grad_norm": 6.892751693725586, | |
| "learning_rate": 4.980694980694981e-06, | |
| "loss": 0.404, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.19087403598971722, | |
| "grad_norm": 7.414526462554932, | |
| "learning_rate": 5.154440154440155e-06, | |
| "loss": 0.6488, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.19665809768637532, | |
| "grad_norm": 23.75099754333496, | |
| "learning_rate": 5.3281853281853285e-06, | |
| "loss": 0.4728, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.20051413881748073, | |
| "eval_Qnli-dev_cosine_accuracy": 0.693359375, | |
| "eval_Qnli-dev_cosine_accuracy_threshold": 0.8276629447937012, | |
| "eval_Qnli-dev_cosine_ap": 0.7127503402574797, | |
| "eval_Qnli-dev_cosine_f1": 0.6730038022813688, | |
| "eval_Qnli-dev_cosine_f1_threshold": 0.7761298418045044, | |
| "eval_Qnli-dev_cosine_precision": 0.6103448275862069, | |
| "eval_Qnli-dev_cosine_recall": 0.75, | |
| "eval_Qnli-dev_dot_accuracy": 0.681640625, | |
| "eval_Qnli-dev_dot_accuracy_threshold": 857.5540771484375, | |
| "eval_Qnli-dev_dot_ap": 0.6617256136301952, | |
| "eval_Qnli-dev_dot_f1": 0.6678141135972461, | |
| "eval_Qnli-dev_dot_f1_threshold": 756.1309204101562, | |
| "eval_Qnli-dev_dot_precision": 0.5623188405797102, | |
| "eval_Qnli-dev_dot_recall": 0.8220338983050848, | |
| "eval_Qnli-dev_euclidean_accuracy": 0.69140625, | |
| "eval_Qnli-dev_euclidean_accuracy_threshold": 18.45541763305664, | |
| "eval_Qnli-dev_euclidean_ap": 0.7154480594709788, | |
| "eval_Qnli-dev_euclidean_f1": 0.6725043782837128, | |
| "eval_Qnli-dev_euclidean_f1_threshold": 22.723068237304688, | |
| "eval_Qnli-dev_euclidean_precision": 0.573134328358209, | |
| "eval_Qnli-dev_euclidean_recall": 0.8135593220338984, | |
| "eval_Qnli-dev_manhattan_accuracy": 0.69140625, | |
| "eval_Qnli-dev_manhattan_accuracy_threshold": 585.3442993164062, | |
| "eval_Qnli-dev_manhattan_ap": 0.713289230749026, | |
| "eval_Qnli-dev_manhattan_f1": 0.6733333333333333, | |
| "eval_Qnli-dev_manhattan_f1_threshold": 733.2077026367188, | |
| "eval_Qnli-dev_manhattan_precision": 0.554945054945055, | |
| "eval_Qnli-dev_manhattan_recall": 0.8559322033898306, | |
| "eval_Qnli-dev_max_accuracy": 0.693359375, | |
| "eval_Qnli-dev_max_accuracy_threshold": 857.5540771484375, | |
| "eval_Qnli-dev_max_ap": 0.7154480594709788, | |
| "eval_Qnli-dev_max_f1": 0.6733333333333333, | |
| "eval_Qnli-dev_max_f1_threshold": 756.1309204101562, | |
| "eval_Qnli-dev_max_precision": 0.6103448275862069, | |
| "eval_Qnli-dev_max_recall": 0.8559322033898306, | |
| "eval_allNLI-dev_cosine_accuracy": 0.693359375, | |
| "eval_allNLI-dev_cosine_accuracy_threshold": 0.8820043802261353, | |
| "eval_allNLI-dev_cosine_ap": 0.5380653098565689, | |
| "eval_allNLI-dev_cosine_f1": 0.5689655172413792, | |
| "eval_allNLI-dev_cosine_f1_threshold": 0.7308224439620972, | |
| "eval_allNLI-dev_cosine_precision": 0.4536082474226804, | |
| "eval_allNLI-dev_cosine_recall": 0.7630057803468208, | |
| "eval_allNLI-dev_dot_accuracy": 0.6875, | |
| "eval_allNLI-dev_dot_accuracy_threshold": 904.0134887695312, | |
| "eval_allNLI-dev_dot_ap": 0.5175334662627952, | |
| "eval_allNLI-dev_dot_f1": 0.5675675675675675, | |
| "eval_allNLI-dev_dot_f1_threshold": 762.652587890625, | |
| "eval_allNLI-dev_dot_precision": 0.46494464944649444, | |
| "eval_allNLI-dev_dot_recall": 0.7283236994219653, | |
| "eval_allNLI-dev_euclidean_accuracy": 0.6953125, | |
| "eval_allNLI-dev_euclidean_accuracy_threshold": 15.50227165222168, | |
| "eval_allNLI-dev_euclidean_ap": 0.5410854453600531, | |
| "eval_allNLI-dev_euclidean_f1": 0.5645514223194749, | |
| "eval_allNLI-dev_euclidean_f1_threshold": 23.451454162597656, | |
| "eval_allNLI-dev_euclidean_precision": 0.45422535211267606, | |
| "eval_allNLI-dev_euclidean_recall": 0.7456647398843931, | |
| "eval_allNLI-dev_manhattan_accuracy": 0.6953125, | |
| "eval_allNLI-dev_manhattan_accuracy_threshold": 518.75146484375, | |
| "eval_allNLI-dev_manhattan_ap": 0.5430149701961973, | |
| "eval_allNLI-dev_manhattan_f1": 0.5639913232104121, | |
| "eval_allNLI-dev_manhattan_f1_threshold": 733.8897705078125, | |
| "eval_allNLI-dev_manhattan_precision": 0.4513888888888889, | |
| "eval_allNLI-dev_manhattan_recall": 0.7514450867052023, | |
| "eval_allNLI-dev_max_accuracy": 0.6953125, | |
| "eval_allNLI-dev_max_accuracy_threshold": 904.0134887695312, | |
| "eval_allNLI-dev_max_ap": 0.5430149701961973, | |
| "eval_allNLI-dev_max_f1": 0.5689655172413792, | |
| "eval_allNLI-dev_max_f1_threshold": 762.652587890625, | |
| "eval_allNLI-dev_max_precision": 0.46494464944649444, | |
| "eval_allNLI-dev_max_recall": 0.7630057803468208, | |
| "eval_sequential_score": 0.7154480594709788, | |
| "eval_sts-test_pearson_cosine": 0.8841456597076958, | |
| "eval_sts-test_pearson_dot": 0.8597263949983345, | |
| "eval_sts-test_pearson_euclidean": 0.8977156894521804, | |
| "eval_sts-test_pearson_manhattan": 0.8973099980705126, | |
| "eval_sts-test_pearson_max": 0.8977156894521804, | |
| "eval_sts-test_spearman_cosine": 0.8862653474156692, | |
| "eval_sts-test_spearman_dot": 0.8451249310521576, | |
| "eval_sts-test_spearman_euclidean": 0.8866065310325173, | |
| "eval_sts-test_spearman_manhattan": 0.8860841943095571, | |
| "eval_sts-test_spearman_max": 0.8866065310325173, | |
| "eval_vitaminc-pairs_loss": 3.314167022705078, | |
| "eval_vitaminc-pairs_runtime": 4.4886, | |
| "eval_vitaminc-pairs_samples_per_second": 28.517, | |
| "eval_vitaminc-pairs_steps_per_second": 0.223, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.20051413881748073, | |
| "eval_negation-triplets_loss": 1.3400739431381226, | |
| "eval_negation-triplets_runtime": 3.3068, | |
| "eval_negation-triplets_samples_per_second": 38.708, | |
| "eval_negation-triplets_steps_per_second": 0.302, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.20051413881748073, | |
| "eval_scitail-pairs-pos_loss": 0.07263079285621643, | |
| "eval_scitail-pairs-pos_runtime": 2.6213, | |
| "eval_scitail-pairs-pos_samples_per_second": 48.83, | |
| "eval_scitail-pairs-pos_steps_per_second": 0.381, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.20051413881748073, | |
| "eval_scitail-pairs-qa_loss": 0.048478204756975174, | |
| "eval_scitail-pairs-qa_runtime": 2.202, | |
| "eval_scitail-pairs-qa_samples_per_second": 58.13, | |
| "eval_scitail-pairs-qa_steps_per_second": 0.454, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.20051413881748073, | |
| "eval_xsum-pairs_loss": 0.14091075956821442, | |
| "eval_xsum-pairs_runtime": 3.0891, | |
| "eval_xsum-pairs_samples_per_second": 41.436, | |
| "eval_xsum-pairs_steps_per_second": 0.324, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.20051413881748073, | |
| "eval_sciq_pairs_loss": 0.1505264937877655, | |
| "eval_sciq_pairs_runtime": 4.762, | |
| "eval_sciq_pairs_samples_per_second": 26.879, | |
| "eval_sciq_pairs_steps_per_second": 0.21, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.20051413881748073, | |
| "eval_qasc_pairs_loss": 0.4244489073753357, | |
| "eval_qasc_pairs_runtime": 2.0783, | |
| "eval_qasc_pairs_samples_per_second": 61.589, | |
| "eval_qasc_pairs_steps_per_second": 0.481, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.20051413881748073, | |
| "eval_openbookqa_pairs_loss": 0.5923758745193481, | |
| "eval_openbookqa_pairs_runtime": 2.1938, | |
| "eval_openbookqa_pairs_samples_per_second": 58.345, | |
| "eval_openbookqa_pairs_steps_per_second": 0.456, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.20051413881748073, | |
| "eval_msmarco_pairs_loss": 0.5882836580276489, | |
| "eval_msmarco_pairs_runtime": 2.2483, | |
| "eval_msmarco_pairs_samples_per_second": 56.933, | |
| "eval_msmarco_pairs_steps_per_second": 0.445, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.20051413881748073, | |
| "eval_nq_pairs_loss": 0.5424538254737854, | |
| "eval_nq_pairs_runtime": 3.6424, | |
| "eval_nq_pairs_samples_per_second": 35.142, | |
| "eval_nq_pairs_steps_per_second": 0.275, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.20051413881748073, | |
| "eval_trivia_pairs_loss": 0.6437886953353882, | |
| "eval_trivia_pairs_runtime": 3.0809, | |
| "eval_trivia_pairs_samples_per_second": 41.546, | |
| "eval_trivia_pairs_steps_per_second": 0.325, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.20051413881748073, | |
| "eval_gooaq_pairs_loss": 0.22378715872764587, | |
| "eval_gooaq_pairs_runtime": 2.101, | |
| "eval_gooaq_pairs_samples_per_second": 60.924, | |
| "eval_gooaq_pairs_steps_per_second": 0.476, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.20051413881748073, | |
| "eval_paws-pos_loss": 0.03581737354397774, | |
| "eval_paws-pos_runtime": 2.2447, | |
| "eval_paws-pos_samples_per_second": 57.024, | |
| "eval_paws-pos_steps_per_second": 0.445, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.20051413881748073, | |
| "eval_global_dataset_loss": 0.3989756405353546, | |
| "eval_global_dataset_runtime": 8.402, | |
| "eval_global_dataset_samples_per_second": 38.681, | |
| "eval_global_dataset_steps_per_second": 0.357, | |
| "step": 104 | |
| } | |
| ], | |
| "logging_steps": 3, | |
| "max_steps": 1036, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 52, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 0.0, | |
| "train_batch_size": 64, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |