|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.1415929203539823, |
|
"eval_steps": 40, |
|
"global_step": 160, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0008849557522123894, |
|
"grad_norm": NaN, |
|
"learning_rate": 0.0, |
|
"loss": 5.8564, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0017699115044247787, |
|
"grad_norm": NaN, |
|
"learning_rate": 0.0, |
|
"loss": 7.1716, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.002654867256637168, |
|
"grad_norm": NaN, |
|
"learning_rate": 0.0, |
|
"loss": 5.9095, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0035398230088495575, |
|
"grad_norm": 21.95326805114746, |
|
"learning_rate": 3.5377358490566036e-09, |
|
"loss": 5.0841, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.004424778761061947, |
|
"grad_norm": 16.607179641723633, |
|
"learning_rate": 7.075471698113207e-09, |
|
"loss": 4.0184, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.005309734513274336, |
|
"grad_norm": 33.789615631103516, |
|
"learning_rate": 1.0613207547169811e-08, |
|
"loss": 6.2191, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.006194690265486726, |
|
"grad_norm": 28.073551177978516, |
|
"learning_rate": 1.4150943396226414e-08, |
|
"loss": 5.6124, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.007079646017699115, |
|
"grad_norm": 17.365602493286133, |
|
"learning_rate": 1.768867924528302e-08, |
|
"loss": 3.9544, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.007964601769911504, |
|
"grad_norm": 19.384475708007812, |
|
"learning_rate": 2.1226415094339622e-08, |
|
"loss": 4.7149, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.008849557522123894, |
|
"grad_norm": 19.67770004272461, |
|
"learning_rate": 2.4764150943396227e-08, |
|
"loss": 4.9616, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.009734513274336283, |
|
"grad_norm": 24.233421325683594, |
|
"learning_rate": 2.830188679245283e-08, |
|
"loss": 5.2794, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.010619469026548672, |
|
"grad_norm": Infinity, |
|
"learning_rate": 2.830188679245283e-08, |
|
"loss": 8.8704, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.011504424778761062, |
|
"grad_norm": 34.37785720825195, |
|
"learning_rate": 3.183962264150943e-08, |
|
"loss": 6.0707, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.012389380530973451, |
|
"grad_norm": 25.11741065979004, |
|
"learning_rate": 3.537735849056604e-08, |
|
"loss": 5.4071, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.01327433628318584, |
|
"grad_norm": 53.84364700317383, |
|
"learning_rate": 3.891509433962264e-08, |
|
"loss": 6.9104, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01415929203539823, |
|
"grad_norm": 32.0903434753418, |
|
"learning_rate": 4.2452830188679244e-08, |
|
"loss": 6.0276, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.01504424778761062, |
|
"grad_norm": 39.742130279541016, |
|
"learning_rate": 4.599056603773585e-08, |
|
"loss": 6.737, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.01592920353982301, |
|
"grad_norm": 45.267417907714844, |
|
"learning_rate": 4.9528301886792454e-08, |
|
"loss": 6.5354, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.016814159292035398, |
|
"grad_norm": 22.39731788635254, |
|
"learning_rate": 5.3066037735849055e-08, |
|
"loss": 5.206, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.017699115044247787, |
|
"grad_norm": 20.858232498168945, |
|
"learning_rate": 5.660377358490566e-08, |
|
"loss": 5.2469, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.018584070796460177, |
|
"grad_norm": 23.96446990966797, |
|
"learning_rate": 6.014150943396226e-08, |
|
"loss": 5.3771, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.019469026548672566, |
|
"grad_norm": 22.945741653442383, |
|
"learning_rate": 6.367924528301887e-08, |
|
"loss": 4.979, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.020353982300884955, |
|
"grad_norm": 15.497300148010254, |
|
"learning_rate": 6.721698113207547e-08, |
|
"loss": 4.7909, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.021238938053097345, |
|
"grad_norm": 20.039024353027344, |
|
"learning_rate": 7.075471698113208e-08, |
|
"loss": 4.9086, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.022123893805309734, |
|
"grad_norm": 21.30576515197754, |
|
"learning_rate": 7.429245283018869e-08, |
|
"loss": 4.8826, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.023008849557522124, |
|
"grad_norm": 64.5285873413086, |
|
"learning_rate": 7.783018867924529e-08, |
|
"loss": 8.2266, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.023893805309734513, |
|
"grad_norm": 59.894893646240234, |
|
"learning_rate": 8.13679245283019e-08, |
|
"loss": 8.3024, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.024778761061946902, |
|
"grad_norm": 25.504356384277344, |
|
"learning_rate": 8.490566037735849e-08, |
|
"loss": 5.8745, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.02566371681415929, |
|
"grad_norm": 15.169568061828613, |
|
"learning_rate": 8.84433962264151e-08, |
|
"loss": 4.7298, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.02654867256637168, |
|
"grad_norm": 24.09995460510254, |
|
"learning_rate": 9.19811320754717e-08, |
|
"loss": 5.4614, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.02743362831858407, |
|
"grad_norm": 28.669275283813477, |
|
"learning_rate": 9.55188679245283e-08, |
|
"loss": 5.8594, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.02831858407079646, |
|
"grad_norm": 23.37987518310547, |
|
"learning_rate": 9.905660377358491e-08, |
|
"loss": 5.2401, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.02920353982300885, |
|
"grad_norm": 22.815292358398438, |
|
"learning_rate": 1.0259433962264152e-07, |
|
"loss": 5.1579, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.03008849557522124, |
|
"grad_norm": 13.775344848632812, |
|
"learning_rate": 1.0613207547169811e-07, |
|
"loss": 5.2181, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.030973451327433628, |
|
"grad_norm": 18.642087936401367, |
|
"learning_rate": 1.0966981132075472e-07, |
|
"loss": 4.6328, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.03185840707964602, |
|
"grad_norm": 18.041406631469727, |
|
"learning_rate": 1.1320754716981131e-07, |
|
"loss": 2.121, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.03274336283185841, |
|
"grad_norm": 23.423933029174805, |
|
"learning_rate": 1.1674528301886792e-07, |
|
"loss": 5.9026, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.033628318584070796, |
|
"grad_norm": 46.25591278076172, |
|
"learning_rate": 1.2028301886792452e-07, |
|
"loss": 7.3796, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.034513274336283185, |
|
"grad_norm": 20.376422882080078, |
|
"learning_rate": 1.2382075471698114e-07, |
|
"loss": 5.5361, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"grad_norm": 12.82562255859375, |
|
"learning_rate": 1.2735849056603773e-07, |
|
"loss": 4.0243, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_Qnli-dev_cosine_accuracy": 0.5859375, |
|
"eval_Qnli-dev_cosine_accuracy_threshold": 0.9302856922149658, |
|
"eval_Qnli-dev_cosine_ap": 0.5480269179285036, |
|
"eval_Qnli-dev_cosine_f1": 0.6315789473684211, |
|
"eval_Qnli-dev_cosine_f1_threshold": 0.7634451389312744, |
|
"eval_Qnli-dev_cosine_precision": 0.4633663366336634, |
|
"eval_Qnli-dev_cosine_recall": 0.9915254237288136, |
|
"eval_Qnli-dev_dot_accuracy": 0.5859375, |
|
"eval_Qnli-dev_dot_accuracy_threshold": 714.4895629882812, |
|
"eval_Qnli-dev_dot_ap": 0.548060663242546, |
|
"eval_Qnli-dev_dot_f1": 0.6315789473684211, |
|
"eval_Qnli-dev_dot_f1_threshold": 586.342529296875, |
|
"eval_Qnli-dev_dot_precision": 0.4633663366336634, |
|
"eval_Qnli-dev_dot_recall": 0.9915254237288136, |
|
"eval_Qnli-dev_euclidean_accuracy": 0.5859375, |
|
"eval_Qnli-dev_euclidean_accuracy_threshold": 10.348224639892578, |
|
"eval_Qnli-dev_euclidean_ap": 0.5480269179285036, |
|
"eval_Qnli-dev_euclidean_f1": 0.6315789473684211, |
|
"eval_Qnli-dev_euclidean_f1_threshold": 19.05518341064453, |
|
"eval_Qnli-dev_euclidean_precision": 0.4633663366336634, |
|
"eval_Qnli-dev_euclidean_recall": 0.9915254237288136, |
|
"eval_Qnli-dev_manhattan_accuracy": 0.59765625, |
|
"eval_Qnli-dev_manhattan_accuracy_threshold": 175.22628784179688, |
|
"eval_Qnli-dev_manhattan_ap": 0.5780924813828909, |
|
"eval_Qnli-dev_manhattan_f1": 0.6291834002677376, |
|
"eval_Qnli-dev_manhattan_f1_threshold": 334.39178466796875, |
|
"eval_Qnli-dev_manhattan_precision": 0.4598825831702544, |
|
"eval_Qnli-dev_manhattan_recall": 0.9957627118644068, |
|
"eval_Qnli-dev_max_accuracy": 0.59765625, |
|
"eval_Qnli-dev_max_accuracy_threshold": 714.4895629882812, |
|
"eval_Qnli-dev_max_ap": 0.5780924813828909, |
|
"eval_Qnli-dev_max_f1": 0.6315789473684211, |
|
"eval_Qnli-dev_max_f1_threshold": 586.342529296875, |
|
"eval_Qnli-dev_max_precision": 0.4633663366336634, |
|
"eval_Qnli-dev_max_recall": 0.9957627118644068, |
|
"eval_allNLI-dev_cosine_accuracy": 0.6640625, |
|
"eval_allNLI-dev_cosine_accuracy_threshold": 0.9888672828674316, |
|
"eval_allNLI-dev_cosine_ap": 0.32886365768247516, |
|
"eval_allNLI-dev_cosine_f1": 0.5095729013254787, |
|
"eval_allNLI-dev_cosine_f1_threshold": 0.7477295398712158, |
|
"eval_allNLI-dev_cosine_precision": 0.34189723320158105, |
|
"eval_allNLI-dev_cosine_recall": 1.0, |
|
"eval_allNLI-dev_dot_accuracy": 0.6640625, |
|
"eval_allNLI-dev_dot_accuracy_threshold": 759.483154296875, |
|
"eval_allNLI-dev_dot_ap": 0.3288581611938815, |
|
"eval_allNLI-dev_dot_f1": 0.5095729013254787, |
|
"eval_allNLI-dev_dot_f1_threshold": 574.2760620117188, |
|
"eval_allNLI-dev_dot_precision": 0.34189723320158105, |
|
"eval_allNLI-dev_dot_recall": 1.0, |
|
"eval_allNLI-dev_euclidean_accuracy": 0.6640625, |
|
"eval_allNLI-dev_euclidean_accuracy_threshold": 3.8085508346557617, |
|
"eval_allNLI-dev_euclidean_ap": 0.32886365768247516, |
|
"eval_allNLI-dev_euclidean_f1": 0.5095729013254787, |
|
"eval_allNLI-dev_euclidean_f1_threshold": 19.684810638427734, |
|
"eval_allNLI-dev_euclidean_precision": 0.34189723320158105, |
|
"eval_allNLI-dev_euclidean_recall": 1.0, |
|
"eval_allNLI-dev_manhattan_accuracy": 0.6640625, |
|
"eval_allNLI-dev_manhattan_accuracy_threshold": 65.93238830566406, |
|
"eval_allNLI-dev_manhattan_ap": 0.33852594919898543, |
|
"eval_allNLI-dev_manhattan_f1": 0.5058479532163743, |
|
"eval_allNLI-dev_manhattan_f1_threshold": 335.4263916015625, |
|
"eval_allNLI-dev_manhattan_precision": 0.3385518590998043, |
|
"eval_allNLI-dev_manhattan_recall": 1.0, |
|
"eval_allNLI-dev_max_accuracy": 0.6640625, |
|
"eval_allNLI-dev_max_accuracy_threshold": 759.483154296875, |
|
"eval_allNLI-dev_max_ap": 0.33852594919898543, |
|
"eval_allNLI-dev_max_f1": 0.5095729013254787, |
|
"eval_allNLI-dev_max_f1_threshold": 574.2760620117188, |
|
"eval_allNLI-dev_max_precision": 0.34189723320158105, |
|
"eval_allNLI-dev_max_recall": 1.0, |
|
"eval_sequential_score": 0.5780924813828909, |
|
"eval_sts-test_pearson_cosine": 0.1533465318414369, |
|
"eval_sts-test_pearson_dot": 0.15333057450060855, |
|
"eval_sts-test_pearson_euclidean": 0.1664717893342273, |
|
"eval_sts-test_pearson_manhattan": 0.20717970064899288, |
|
"eval_sts-test_pearson_max": 0.20717970064899288, |
|
"eval_sts-test_spearman_cosine": 0.18786210334203038, |
|
"eval_sts-test_spearman_dot": 0.1878347337472397, |
|
"eval_sts-test_spearman_euclidean": 0.18786046572196458, |
|
"eval_sts-test_spearman_manhattan": 0.22429466463153608, |
|
"eval_sts-test_spearman_max": 0.22429466463153608, |
|
"eval_vitaminc-pairs_loss": 2.901831865310669, |
|
"eval_vitaminc-pairs_runtime": 4.078, |
|
"eval_vitaminc-pairs_samples_per_second": 31.388, |
|
"eval_vitaminc-pairs_steps_per_second": 0.245, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_negation-triplets_loss": 5.690315246582031, |
|
"eval_negation-triplets_runtime": 0.7141, |
|
"eval_negation-triplets_samples_per_second": 179.254, |
|
"eval_negation-triplets_steps_per_second": 1.4, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_scitail-pairs-pos_loss": 2.1135852336883545, |
|
"eval_scitail-pairs-pos_runtime": 0.8282, |
|
"eval_scitail-pairs-pos_samples_per_second": 154.543, |
|
"eval_scitail-pairs-pos_steps_per_second": 1.207, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_scitail-pairs-qa_loss": 2.8052029609680176, |
|
"eval_scitail-pairs-qa_runtime": 0.5471, |
|
"eval_scitail-pairs-qa_samples_per_second": 233.943, |
|
"eval_scitail-pairs-qa_steps_per_second": 1.828, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_xsum-pairs_loss": 6.583061695098877, |
|
"eval_xsum-pairs_runtime": 2.8921, |
|
"eval_xsum-pairs_samples_per_second": 44.259, |
|
"eval_xsum-pairs_steps_per_second": 0.346, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_sciq_pairs_loss": 0.8882207870483398, |
|
"eval_sciq_pairs_runtime": 3.7993, |
|
"eval_sciq_pairs_samples_per_second": 33.69, |
|
"eval_sciq_pairs_steps_per_second": 0.263, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_qasc_pairs_loss": 4.1147541999816895, |
|
"eval_qasc_pairs_runtime": 0.6768, |
|
"eval_qasc_pairs_samples_per_second": 189.125, |
|
"eval_qasc_pairs_steps_per_second": 1.478, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_openbookqa_pairs_loss": 5.096628665924072, |
|
"eval_openbookqa_pairs_runtime": 0.5776, |
|
"eval_openbookqa_pairs_samples_per_second": 221.615, |
|
"eval_openbookqa_pairs_steps_per_second": 1.731, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_msmarco_pairs_loss": 10.391141891479492, |
|
"eval_msmarco_pairs_runtime": 1.2577, |
|
"eval_msmarco_pairs_samples_per_second": 101.77, |
|
"eval_msmarco_pairs_steps_per_second": 0.795, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_nq_pairs_loss": 10.903197288513184, |
|
"eval_nq_pairs_runtime": 2.5051, |
|
"eval_nq_pairs_samples_per_second": 51.095, |
|
"eval_nq_pairs_steps_per_second": 0.399, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_trivia_pairs_loss": 7.190384387969971, |
|
"eval_trivia_pairs_runtime": 3.6482, |
|
"eval_trivia_pairs_samples_per_second": 35.085, |
|
"eval_trivia_pairs_steps_per_second": 0.274, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_gooaq_pairs_loss": 8.193528175354004, |
|
"eval_gooaq_pairs_runtime": 0.9648, |
|
"eval_gooaq_pairs_samples_per_second": 132.67, |
|
"eval_gooaq_pairs_steps_per_second": 1.036, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_paws-pos_loss": 1.3942564725875854, |
|
"eval_paws-pos_runtime": 0.6718, |
|
"eval_paws-pos_samples_per_second": 190.538, |
|
"eval_paws-pos_steps_per_second": 1.489, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_global_dataset_loss": 5.671571731567383, |
|
"eval_global_dataset_runtime": 23.0452, |
|
"eval_global_dataset_samples_per_second": 28.77, |
|
"eval_global_dataset_steps_per_second": 0.26, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.036283185840707964, |
|
"grad_norm": 18.026830673217773, |
|
"learning_rate": 1.3089622641509433e-07, |
|
"loss": 4.9072, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.03716814159292035, |
|
"grad_norm": 15.423810958862305, |
|
"learning_rate": 1.3443396226415095e-07, |
|
"loss": 3.4439, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.03805309734513274, |
|
"grad_norm": 16.31403160095215, |
|
"learning_rate": 1.3797169811320754e-07, |
|
"loss": 4.9787, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.03893805309734513, |
|
"grad_norm": 21.37955093383789, |
|
"learning_rate": 1.4150943396226417e-07, |
|
"loss": 5.8318, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.03982300884955752, |
|
"grad_norm": 18.23583984375, |
|
"learning_rate": 1.4504716981132076e-07, |
|
"loss": 5.3226, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.04070796460176991, |
|
"grad_norm": 20.878713607788086, |
|
"learning_rate": 1.4858490566037738e-07, |
|
"loss": 5.1181, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.0415929203539823, |
|
"grad_norm": 18.71149444580078, |
|
"learning_rate": 1.5212264150943398e-07, |
|
"loss": 4.7834, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.04247787610619469, |
|
"grad_norm": 38.85902786254883, |
|
"learning_rate": 1.5566037735849057e-07, |
|
"loss": 6.6303, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.04336283185840708, |
|
"grad_norm": 37.41562271118164, |
|
"learning_rate": 1.591981132075472e-07, |
|
"loss": 5.8171, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.04424778761061947, |
|
"grad_norm": 17.541080474853516, |
|
"learning_rate": 1.627358490566038e-07, |
|
"loss": 5.1962, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.04513274336283186, |
|
"grad_norm": 16.145116806030273, |
|
"learning_rate": 1.6627358490566038e-07, |
|
"loss": 5.2096, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.04601769911504425, |
|
"grad_norm": 20.175189971923828, |
|
"learning_rate": 1.6981132075471698e-07, |
|
"loss": 5.0943, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.046902654867256637, |
|
"grad_norm": 13.441214561462402, |
|
"learning_rate": 1.733490566037736e-07, |
|
"loss": 4.9038, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.047787610619469026, |
|
"grad_norm": 13.396607398986816, |
|
"learning_rate": 1.768867924528302e-07, |
|
"loss": 4.6479, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.048672566371681415, |
|
"grad_norm": 13.68046760559082, |
|
"learning_rate": 1.804245283018868e-07, |
|
"loss": 5.5098, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.049557522123893805, |
|
"grad_norm": 13.278443336486816, |
|
"learning_rate": 1.839622641509434e-07, |
|
"loss": 4.6979, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.050442477876106194, |
|
"grad_norm": 15.295453071594238, |
|
"learning_rate": 1.875e-07, |
|
"loss": 3.1969, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.05132743362831858, |
|
"grad_norm": 12.185781478881836, |
|
"learning_rate": 1.910377358490566e-07, |
|
"loss": 4.4127, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.05221238938053097, |
|
"grad_norm": 10.874494552612305, |
|
"learning_rate": 1.9457547169811322e-07, |
|
"loss": 3.7746, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.05309734513274336, |
|
"grad_norm": 9.654823303222656, |
|
"learning_rate": 1.9811320754716982e-07, |
|
"loss": 4.5378, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.05398230088495575, |
|
"grad_norm": 21.123645782470703, |
|
"learning_rate": 2.016509433962264e-07, |
|
"loss": 5.0209, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.05486725663716814, |
|
"grad_norm": 33.47934341430664, |
|
"learning_rate": 2.0518867924528303e-07, |
|
"loss": 6.5936, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.05575221238938053, |
|
"grad_norm": 10.2566556930542, |
|
"learning_rate": 2.0872641509433963e-07, |
|
"loss": 4.2315, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.05663716814159292, |
|
"grad_norm": 28.198625564575195, |
|
"learning_rate": 2.1226415094339622e-07, |
|
"loss": 6.4269, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.05752212389380531, |
|
"grad_norm": 9.386558532714844, |
|
"learning_rate": 2.1580188679245282e-07, |
|
"loss": 4.2644, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.0584070796460177, |
|
"grad_norm": 12.687555313110352, |
|
"learning_rate": 2.1933962264150944e-07, |
|
"loss": 5.1388, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.05929203539823009, |
|
"grad_norm": 14.834878921508789, |
|
"learning_rate": 2.2287735849056603e-07, |
|
"loss": 5.1852, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.06017699115044248, |
|
"grad_norm": 10.888677597045898, |
|
"learning_rate": 2.2641509433962263e-07, |
|
"loss": 4.8057, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.061061946902654866, |
|
"grad_norm": 13.97256851196289, |
|
"learning_rate": 2.2995283018867925e-07, |
|
"loss": 3.1725, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.061946902654867256, |
|
"grad_norm": 11.82534122467041, |
|
"learning_rate": 2.3349056603773584e-07, |
|
"loss": 3.3322, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.06283185840707965, |
|
"grad_norm": 16.99266242980957, |
|
"learning_rate": 2.3702830188679244e-07, |
|
"loss": 5.139, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.06371681415929203, |
|
"grad_norm": 8.74513053894043, |
|
"learning_rate": 2.4056603773584903e-07, |
|
"loss": 4.307, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.06460176991150443, |
|
"grad_norm": 11.715869903564453, |
|
"learning_rate": 2.4410377358490563e-07, |
|
"loss": 5.0133, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.06548672566371681, |
|
"grad_norm": 9.844196319580078, |
|
"learning_rate": 2.476415094339623e-07, |
|
"loss": 4.0507, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.06637168141592921, |
|
"grad_norm": 12.447444915771484, |
|
"learning_rate": 2.5117924528301887e-07, |
|
"loss": 3.3895, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.06725663716814159, |
|
"grad_norm": 23.91596794128418, |
|
"learning_rate": 2.5471698113207547e-07, |
|
"loss": 5.6736, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.06814159292035399, |
|
"grad_norm": 9.635603904724121, |
|
"learning_rate": 2.5825471698113206e-07, |
|
"loss": 4.2572, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.06902654867256637, |
|
"grad_norm": 14.971665382385254, |
|
"learning_rate": 2.6179245283018866e-07, |
|
"loss": 3.0796, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.06991150442477877, |
|
"grad_norm": 11.226128578186035, |
|
"learning_rate": 2.6533018867924525e-07, |
|
"loss": 5.0199, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"grad_norm": 11.01388931274414, |
|
"learning_rate": 2.688679245283019e-07, |
|
"loss": 4.1414, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_Qnli-dev_cosine_accuracy": 0.591796875, |
|
"eval_Qnli-dev_cosine_accuracy_threshold": 0.9258557558059692, |
|
"eval_Qnli-dev_cosine_ap": 0.5585355274462735, |
|
"eval_Qnli-dev_cosine_f1": 0.6291834002677376, |
|
"eval_Qnli-dev_cosine_f1_threshold": 0.750666618347168, |
|
"eval_Qnli-dev_cosine_precision": 0.4598825831702544, |
|
"eval_Qnli-dev_cosine_recall": 0.9957627118644068, |
|
"eval_Qnli-dev_dot_accuracy": 0.591796875, |
|
"eval_Qnli-dev_dot_accuracy_threshold": 711.18359375, |
|
"eval_Qnli-dev_dot_ap": 0.5585297234749824, |
|
"eval_Qnli-dev_dot_f1": 0.6291834002677376, |
|
"eval_Qnli-dev_dot_f1_threshold": 576.5970458984375, |
|
"eval_Qnli-dev_dot_precision": 0.4598825831702544, |
|
"eval_Qnli-dev_dot_recall": 0.9957627118644068, |
|
"eval_Qnli-dev_euclidean_accuracy": 0.591796875, |
|
"eval_Qnli-dev_euclidean_accuracy_threshold": 10.672666549682617, |
|
"eval_Qnli-dev_euclidean_ap": 0.5585355274462735, |
|
"eval_Qnli-dev_euclidean_f1": 0.6291834002677376, |
|
"eval_Qnli-dev_euclidean_f1_threshold": 19.553747177124023, |
|
"eval_Qnli-dev_euclidean_precision": 0.4598825831702544, |
|
"eval_Qnli-dev_euclidean_recall": 0.9957627118644068, |
|
"eval_Qnli-dev_manhattan_accuracy": 0.619140625, |
|
"eval_Qnli-dev_manhattan_accuracy_threshold": 188.09068298339844, |
|
"eval_Qnli-dev_manhattan_ap": 0.5898283705050701, |
|
"eval_Qnli-dev_manhattan_f1": 0.6301775147928994, |
|
"eval_Qnli-dev_manhattan_f1_threshold": 237.80462646484375, |
|
"eval_Qnli-dev_manhattan_precision": 0.48409090909090907, |
|
"eval_Qnli-dev_manhattan_recall": 0.902542372881356, |
|
"eval_Qnli-dev_max_accuracy": 0.619140625, |
|
"eval_Qnli-dev_max_accuracy_threshold": 711.18359375, |
|
"eval_Qnli-dev_max_ap": 0.5898283705050701, |
|
"eval_Qnli-dev_max_f1": 0.6301775147928994, |
|
"eval_Qnli-dev_max_f1_threshold": 576.5970458984375, |
|
"eval_Qnli-dev_max_precision": 0.48409090909090907, |
|
"eval_Qnli-dev_max_recall": 0.9957627118644068, |
|
"eval_allNLI-dev_cosine_accuracy": 0.666015625, |
|
"eval_allNLI-dev_cosine_accuracy_threshold": 0.983686089515686, |
|
"eval_allNLI-dev_cosine_ap": 0.34411819659341086, |
|
"eval_allNLI-dev_cosine_f1": 0.5065885797950219, |
|
"eval_allNLI-dev_cosine_f1_threshold": 0.7642872333526611, |
|
"eval_allNLI-dev_cosine_precision": 0.3392156862745098, |
|
"eval_allNLI-dev_cosine_recall": 1.0, |
|
"eval_allNLI-dev_dot_accuracy": 0.666015625, |
|
"eval_allNLI-dev_dot_accuracy_threshold": 755.60302734375, |
|
"eval_allNLI-dev_dot_ap": 0.344109544232086, |
|
"eval_allNLI-dev_dot_f1": 0.5065885797950219, |
|
"eval_allNLI-dev_dot_f1_threshold": 587.0625, |
|
"eval_allNLI-dev_dot_precision": 0.3392156862745098, |
|
"eval_allNLI-dev_dot_recall": 1.0, |
|
"eval_allNLI-dev_euclidean_accuracy": 0.666015625, |
|
"eval_allNLI-dev_euclidean_accuracy_threshold": 5.00581693649292, |
|
"eval_allNLI-dev_euclidean_ap": 0.3441246898925644, |
|
"eval_allNLI-dev_euclidean_f1": 0.5065885797950219, |
|
"eval_allNLI-dev_euclidean_f1_threshold": 19.022436141967773, |
|
"eval_allNLI-dev_euclidean_precision": 0.3392156862745098, |
|
"eval_allNLI-dev_euclidean_recall": 1.0, |
|
"eval_allNLI-dev_manhattan_accuracy": 0.6640625, |
|
"eval_allNLI-dev_manhattan_accuracy_threshold": 62.69102096557617, |
|
"eval_allNLI-dev_manhattan_ap": 0.35131239981425566, |
|
"eval_allNLI-dev_manhattan_f1": 0.5058479532163743, |
|
"eval_allNLI-dev_manhattan_f1_threshold": 337.6861877441406, |
|
"eval_allNLI-dev_manhattan_precision": 0.3385518590998043, |
|
"eval_allNLI-dev_manhattan_recall": 1.0, |
|
"eval_allNLI-dev_max_accuracy": 0.666015625, |
|
"eval_allNLI-dev_max_accuracy_threshold": 755.60302734375, |
|
"eval_allNLI-dev_max_ap": 0.35131239981425566, |
|
"eval_allNLI-dev_max_f1": 0.5065885797950219, |
|
"eval_allNLI-dev_max_f1_threshold": 587.0625, |
|
"eval_allNLI-dev_max_precision": 0.3392156862745098, |
|
"eval_allNLI-dev_max_recall": 1.0, |
|
"eval_sequential_score": 0.5898283705050701, |
|
"eval_sts-test_pearson_cosine": 0.22248205020578934, |
|
"eval_sts-test_pearson_dot": 0.22239084967931927, |
|
"eval_sts-test_pearson_euclidean": 0.2323160413842197, |
|
"eval_sts-test_pearson_manhattan": 0.26632593273308647, |
|
"eval_sts-test_pearson_max": 0.26632593273308647, |
|
"eval_sts-test_spearman_cosine": 0.24802235964390085, |
|
"eval_sts-test_spearman_dot": 0.24791612015173234, |
|
"eval_sts-test_spearman_euclidean": 0.24799036249272113, |
|
"eval_sts-test_spearman_manhattan": 0.2843623073856928, |
|
"eval_sts-test_spearman_max": 0.2843623073856928, |
|
"eval_vitaminc-pairs_loss": 2.7793872356414795, |
|
"eval_vitaminc-pairs_runtime": 3.7649, |
|
"eval_vitaminc-pairs_samples_per_second": 33.998, |
|
"eval_vitaminc-pairs_steps_per_second": 0.266, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_negation-triplets_loss": 4.888970851898193, |
|
"eval_negation-triplets_runtime": 0.7134, |
|
"eval_negation-triplets_samples_per_second": 179.432, |
|
"eval_negation-triplets_steps_per_second": 1.402, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_scitail-pairs-pos_loss": 1.8996644020080566, |
|
"eval_scitail-pairs-pos_runtime": 0.8506, |
|
"eval_scitail-pairs-pos_samples_per_second": 150.477, |
|
"eval_scitail-pairs-pos_steps_per_second": 1.176, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_scitail-pairs-qa_loss": 2.6760551929473877, |
|
"eval_scitail-pairs-qa_runtime": 0.5685, |
|
"eval_scitail-pairs-qa_samples_per_second": 225.171, |
|
"eval_scitail-pairs-qa_steps_per_second": 1.759, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_xsum-pairs_loss": 6.209648609161377, |
|
"eval_xsum-pairs_runtime": 2.9221, |
|
"eval_xsum-pairs_samples_per_second": 43.804, |
|
"eval_xsum-pairs_steps_per_second": 0.342, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_sciq_pairs_loss": 0.7622462511062622, |
|
"eval_sciq_pairs_runtime": 3.7816, |
|
"eval_sciq_pairs_samples_per_second": 33.848, |
|
"eval_sciq_pairs_steps_per_second": 0.264, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_qasc_pairs_loss": 3.3129472732543945, |
|
"eval_qasc_pairs_runtime": 0.6761, |
|
"eval_qasc_pairs_samples_per_second": 189.334, |
|
"eval_qasc_pairs_steps_per_second": 1.479, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_openbookqa_pairs_loss": 4.549765586853027, |
|
"eval_openbookqa_pairs_runtime": 0.5767, |
|
"eval_openbookqa_pairs_samples_per_second": 221.954, |
|
"eval_openbookqa_pairs_steps_per_second": 1.734, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_msmarco_pairs_loss": 7.205582141876221, |
|
"eval_msmarco_pairs_runtime": 1.2621, |
|
"eval_msmarco_pairs_samples_per_second": 101.416, |
|
"eval_msmarco_pairs_steps_per_second": 0.792, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_nq_pairs_loss": 7.680945873260498, |
|
"eval_nq_pairs_runtime": 2.5052, |
|
"eval_nq_pairs_samples_per_second": 51.095, |
|
"eval_nq_pairs_steps_per_second": 0.399, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_trivia_pairs_loss": 6.37924861907959, |
|
"eval_trivia_pairs_runtime": 3.6293, |
|
"eval_trivia_pairs_samples_per_second": 35.268, |
|
"eval_trivia_pairs_steps_per_second": 0.276, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_gooaq_pairs_loss": 6.656675338745117, |
|
"eval_gooaq_pairs_runtime": 0.9698, |
|
"eval_gooaq_pairs_samples_per_second": 131.988, |
|
"eval_gooaq_pairs_steps_per_second": 1.031, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_paws-pos_loss": 1.3848179578781128, |
|
"eval_paws-pos_runtime": 0.6727, |
|
"eval_paws-pos_samples_per_second": 190.278, |
|
"eval_paws-pos_steps_per_second": 1.487, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_global_dataset_loss": 5.002967834472656, |
|
"eval_global_dataset_runtime": 23.048, |
|
"eval_global_dataset_samples_per_second": 28.766, |
|
"eval_global_dataset_steps_per_second": 0.26, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07168141592920355, |
|
"grad_norm": 18.9890193939209, |
|
"learning_rate": 2.724056603773585e-07, |
|
"loss": 5.8604, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.07256637168141593, |
|
"grad_norm": 8.206193923950195, |
|
"learning_rate": 2.759433962264151e-07, |
|
"loss": 4.3003, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.07345132743362832, |
|
"grad_norm": 10.03178882598877, |
|
"learning_rate": 2.794811320754717e-07, |
|
"loss": 4.4568, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.0743362831858407, |
|
"grad_norm": 14.74673080444336, |
|
"learning_rate": 2.8301886792452833e-07, |
|
"loss": 4.2747, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.0752212389380531, |
|
"grad_norm": 19.097232818603516, |
|
"learning_rate": 2.865566037735849e-07, |
|
"loss": 5.52, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.07610619469026549, |
|
"grad_norm": 14.828218460083008, |
|
"learning_rate": 2.900943396226415e-07, |
|
"loss": 2.7767, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.07699115044247788, |
|
"grad_norm": 9.30789566040039, |
|
"learning_rate": 2.936320754716981e-07, |
|
"loss": 4.397, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.07787610619469026, |
|
"grad_norm": 15.119461059570312, |
|
"learning_rate": 2.9716981132075476e-07, |
|
"loss": 5.4449, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.07876106194690266, |
|
"grad_norm": 8.459301948547363, |
|
"learning_rate": 3.0070754716981136e-07, |
|
"loss": 4.2706, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.07964601769911504, |
|
"grad_norm": 23.59125518798828, |
|
"learning_rate": 3.0424528301886795e-07, |
|
"loss": 6.4759, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.08053097345132744, |
|
"grad_norm": 8.729449272155762, |
|
"learning_rate": 3.0778301886792455e-07, |
|
"loss": 4.1951, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.08141592920353982, |
|
"grad_norm": 8.37271785736084, |
|
"learning_rate": 3.1132075471698114e-07, |
|
"loss": 4.6328, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.08230088495575222, |
|
"grad_norm": 10.029474258422852, |
|
"learning_rate": 3.1485849056603774e-07, |
|
"loss": 4.1278, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.0831858407079646, |
|
"grad_norm": 8.706567764282227, |
|
"learning_rate": 3.183962264150944e-07, |
|
"loss": 4.1787, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.084070796460177, |
|
"grad_norm": 13.88837718963623, |
|
"learning_rate": 3.21933962264151e-07, |
|
"loss": 5.2156, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.08495575221238938, |
|
"grad_norm": 12.01068115234375, |
|
"learning_rate": 3.254716981132076e-07, |
|
"loss": 3.1403, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.08584070796460178, |
|
"grad_norm": 8.432968139648438, |
|
"learning_rate": 3.2900943396226417e-07, |
|
"loss": 4.0273, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.08672566371681416, |
|
"grad_norm": 12.645098686218262, |
|
"learning_rate": 3.3254716981132077e-07, |
|
"loss": 3.0624, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.08761061946902655, |
|
"grad_norm": 11.483688354492188, |
|
"learning_rate": 3.3608490566037736e-07, |
|
"loss": 4.6786, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.08849557522123894, |
|
"grad_norm": 8.645537376403809, |
|
"learning_rate": 3.3962264150943395e-07, |
|
"loss": 4.1505, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.08938053097345133, |
|
"grad_norm": 13.053335189819336, |
|
"learning_rate": 3.431603773584906e-07, |
|
"loss": 2.9529, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.09026548672566372, |
|
"grad_norm": 14.494400978088379, |
|
"learning_rate": 3.466981132075472e-07, |
|
"loss": 4.7048, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.09115044247787611, |
|
"grad_norm": 9.513616561889648, |
|
"learning_rate": 3.502358490566038e-07, |
|
"loss": 4.7388, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.0920353982300885, |
|
"grad_norm": 9.751347541809082, |
|
"learning_rate": 3.537735849056604e-07, |
|
"loss": 3.7879, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.09292035398230089, |
|
"grad_norm": 9.06558895111084, |
|
"learning_rate": 3.57311320754717e-07, |
|
"loss": 4.0311, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.09380530973451327, |
|
"grad_norm": 9.53257942199707, |
|
"learning_rate": 3.608490566037736e-07, |
|
"loss": 4.1314, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.09469026548672567, |
|
"grad_norm": 11.554676055908203, |
|
"learning_rate": 3.643867924528302e-07, |
|
"loss": 4.9411, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.09557522123893805, |
|
"grad_norm": 8.559597969055176, |
|
"learning_rate": 3.679245283018868e-07, |
|
"loss": 4.1118, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.09646017699115045, |
|
"grad_norm": 10.008039474487305, |
|
"learning_rate": 3.714622641509434e-07, |
|
"loss": 3.6971, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.09734513274336283, |
|
"grad_norm": 16.543254852294922, |
|
"learning_rate": 3.75e-07, |
|
"loss": 5.605, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.09823008849557523, |
|
"grad_norm": 11.816540718078613, |
|
"learning_rate": 3.7853773584905666e-07, |
|
"loss": 3.4563, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.09911504424778761, |
|
"grad_norm": 10.638028144836426, |
|
"learning_rate": 3.820754716981132e-07, |
|
"loss": 3.7422, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 8.5276460647583, |
|
"learning_rate": 3.8561320754716985e-07, |
|
"loss": 3.8055, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.10088495575221239, |
|
"grad_norm": 13.437420845031738, |
|
"learning_rate": 3.8915094339622644e-07, |
|
"loss": 5.2369, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.10176991150442478, |
|
"grad_norm": 21.039424896240234, |
|
"learning_rate": 3.926886792452831e-07, |
|
"loss": 5.6518, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.10265486725663717, |
|
"grad_norm": 13.487382888793945, |
|
"learning_rate": 3.9622641509433963e-07, |
|
"loss": 3.2906, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.10353982300884956, |
|
"grad_norm": 11.895822525024414, |
|
"learning_rate": 3.997641509433963e-07, |
|
"loss": 3.4996, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.10442477876106195, |
|
"grad_norm": 10.83902359008789, |
|
"learning_rate": 4.033018867924528e-07, |
|
"loss": 3.6283, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.10530973451327434, |
|
"grad_norm": 10.552660942077637, |
|
"learning_rate": 4.0683962264150947e-07, |
|
"loss": 4.1487, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.10619469026548672, |
|
"grad_norm": 9.924088478088379, |
|
"learning_rate": 4.1037735849056606e-07, |
|
"loss": 4.3996, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.10619469026548672, |
|
"eval_Qnli-dev_cosine_accuracy": 0.595703125, |
|
"eval_Qnli-dev_cosine_accuracy_threshold": 0.9275249242782593, |
|
"eval_Qnli-dev_cosine_ap": 0.5645920090286662, |
|
"eval_Qnli-dev_cosine_f1": 0.6327077747989276, |
|
"eval_Qnli-dev_cosine_f1_threshold": 0.7267085313796997, |
|
"eval_Qnli-dev_cosine_precision": 0.4627450980392157, |
|
"eval_Qnli-dev_cosine_recall": 1.0, |
|
"eval_Qnli-dev_dot_accuracy": 0.595703125, |
|
"eval_Qnli-dev_dot_accuracy_threshold": 712.4608154296875, |
|
"eval_Qnli-dev_dot_ap": 0.5646837736357366, |
|
"eval_Qnli-dev_dot_f1": 0.6327077747989276, |
|
"eval_Qnli-dev_dot_f1_threshold": 558.2177734375, |
|
"eval_Qnli-dev_dot_precision": 0.4627450980392157, |
|
"eval_Qnli-dev_dot_recall": 1.0, |
|
"eval_Qnli-dev_euclidean_accuracy": 0.595703125, |
|
"eval_Qnli-dev_euclidean_accuracy_threshold": 10.551876068115234, |
|
"eval_Qnli-dev_euclidean_ap": 0.5645997569733668, |
|
"eval_Qnli-dev_euclidean_f1": 0.6327077747989276, |
|
"eval_Qnli-dev_euclidean_f1_threshold": 20.490163803100586, |
|
"eval_Qnli-dev_euclidean_precision": 0.4627450980392157, |
|
"eval_Qnli-dev_euclidean_recall": 1.0, |
|
"eval_Qnli-dev_manhattan_accuracy": 0.626953125, |
|
"eval_Qnli-dev_manhattan_accuracy_threshold": 195.12744140625, |
|
"eval_Qnli-dev_manhattan_ap": 0.5975206086733145, |
|
"eval_Qnli-dev_manhattan_f1": 0.6322008862629247, |
|
"eval_Qnli-dev_manhattan_f1_threshold": 256.6172180175781, |
|
"eval_Qnli-dev_manhattan_precision": 0.4852607709750567, |
|
"eval_Qnli-dev_manhattan_recall": 0.9067796610169492, |
|
"eval_Qnli-dev_max_accuracy": 0.626953125, |
|
"eval_Qnli-dev_max_accuracy_threshold": 712.4608154296875, |
|
"eval_Qnli-dev_max_ap": 0.5975206086733145, |
|
"eval_Qnli-dev_max_f1": 0.6327077747989276, |
|
"eval_Qnli-dev_max_f1_threshold": 558.2177734375, |
|
"eval_Qnli-dev_max_precision": 0.4852607709750567, |
|
"eval_Qnli-dev_max_recall": 1.0, |
|
"eval_allNLI-dev_cosine_accuracy": 0.666015625, |
|
"eval_allNLI-dev_cosine_accuracy_threshold": 0.983871340751648, |
|
"eval_allNLI-dev_cosine_ap": 0.36035507065342104, |
|
"eval_allNLI-dev_cosine_f1": 0.5051395007342143, |
|
"eval_allNLI-dev_cosine_f1_threshold": 0.7787582874298096, |
|
"eval_allNLI-dev_cosine_precision": 0.33858267716535434, |
|
"eval_allNLI-dev_cosine_recall": 0.9942196531791907, |
|
"eval_allNLI-dev_dot_accuracy": 0.666015625, |
|
"eval_allNLI-dev_dot_accuracy_threshold": 755.7670288085938, |
|
"eval_allNLI-dev_dot_ap": 0.36031241443166284, |
|
"eval_allNLI-dev_dot_f1": 0.5051395007342143, |
|
"eval_allNLI-dev_dot_f1_threshold": 598.2041625976562, |
|
"eval_allNLI-dev_dot_precision": 0.33858267716535434, |
|
"eval_allNLI-dev_dot_recall": 0.9942196531791907, |
|
"eval_allNLI-dev_euclidean_accuracy": 0.666015625, |
|
"eval_allNLI-dev_euclidean_accuracy_threshold": 4.964720249176025, |
|
"eval_allNLI-dev_euclidean_ap": 0.36035507065342104, |
|
"eval_allNLI-dev_euclidean_f1": 0.5051395007342143, |
|
"eval_allNLI-dev_euclidean_f1_threshold": 18.434789657592773, |
|
"eval_allNLI-dev_euclidean_precision": 0.33858267716535434, |
|
"eval_allNLI-dev_euclidean_recall": 0.9942196531791907, |
|
"eval_allNLI-dev_manhattan_accuracy": 0.6640625, |
|
"eval_allNLI-dev_manhattan_accuracy_threshold": 66.59053039550781, |
|
"eval_allNLI-dev_manhattan_ap": 0.3692975841596879, |
|
"eval_allNLI-dev_manhattan_f1": 0.5029239766081871, |
|
"eval_allNLI-dev_manhattan_f1_threshold": 380.123779296875, |
|
"eval_allNLI-dev_manhattan_precision": 0.33659491193737767, |
|
"eval_allNLI-dev_manhattan_recall": 0.9942196531791907, |
|
"eval_allNLI-dev_max_accuracy": 0.666015625, |
|
"eval_allNLI-dev_max_accuracy_threshold": 755.7670288085938, |
|
"eval_allNLI-dev_max_ap": 0.3692975841596879, |
|
"eval_allNLI-dev_max_f1": 0.5051395007342143, |
|
"eval_allNLI-dev_max_f1_threshold": 598.2041625976562, |
|
"eval_allNLI-dev_max_precision": 0.33858267716535434, |
|
"eval_allNLI-dev_max_recall": 0.9942196531791907, |
|
"eval_sequential_score": 0.5975206086733145, |
|
"eval_sts-test_pearson_cosine": 0.2980667522290251, |
|
"eval_sts-test_pearson_dot": 0.29795063801865274, |
|
"eval_sts-test_pearson_euclidean": 0.30279956330153407, |
|
"eval_sts-test_pearson_manhattan": 0.32939035635624725, |
|
"eval_sts-test_pearson_max": 0.32939035635624725, |
|
"eval_sts-test_spearman_cosine": 0.3148821747085771, |
|
"eval_sts-test_spearman_dot": 0.3149517475826025, |
|
"eval_sts-test_spearman_euclidean": 0.31489636085812106, |
|
"eval_sts-test_spearman_manhattan": 0.34558301612848313, |
|
"eval_sts-test_spearman_max": 0.34558301612848313, |
|
"eval_vitaminc-pairs_loss": 2.727938652038574, |
|
"eval_vitaminc-pairs_runtime": 3.7459, |
|
"eval_vitaminc-pairs_samples_per_second": 34.17, |
|
"eval_vitaminc-pairs_steps_per_second": 0.267, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.10619469026548672, |
|
"eval_negation-triplets_loss": 4.394620418548584, |
|
"eval_negation-triplets_runtime": 0.7078, |
|
"eval_negation-triplets_samples_per_second": 180.852, |
|
"eval_negation-triplets_steps_per_second": 1.413, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.10619469026548672, |
|
"eval_scitail-pairs-pos_loss": 1.4130322933197021, |
|
"eval_scitail-pairs-pos_runtime": 0.8587, |
|
"eval_scitail-pairs-pos_samples_per_second": 149.07, |
|
"eval_scitail-pairs-pos_steps_per_second": 1.165, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.10619469026548672, |
|
"eval_scitail-pairs-qa_loss": 2.1150403022766113, |
|
"eval_scitail-pairs-qa_runtime": 0.549, |
|
"eval_scitail-pairs-qa_samples_per_second": 233.163, |
|
"eval_scitail-pairs-qa_steps_per_second": 1.822, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.10619469026548672, |
|
"eval_xsum-pairs_loss": 6.048598289489746, |
|
"eval_xsum-pairs_runtime": 2.9142, |
|
"eval_xsum-pairs_samples_per_second": 43.923, |
|
"eval_xsum-pairs_steps_per_second": 0.343, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.10619469026548672, |
|
"eval_sciq_pairs_loss": 0.7171850800514221, |
|
"eval_sciq_pairs_runtime": 3.7786, |
|
"eval_sciq_pairs_samples_per_second": 33.875, |
|
"eval_sciq_pairs_steps_per_second": 0.265, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.10619469026548672, |
|
"eval_qasc_pairs_loss": 2.96693754196167, |
|
"eval_qasc_pairs_runtime": 0.6718, |
|
"eval_qasc_pairs_samples_per_second": 190.538, |
|
"eval_qasc_pairs_steps_per_second": 1.489, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.10619469026548672, |
|
"eval_openbookqa_pairs_loss": 4.418018341064453, |
|
"eval_openbookqa_pairs_runtime": 0.577, |
|
"eval_openbookqa_pairs_samples_per_second": 221.852, |
|
"eval_openbookqa_pairs_steps_per_second": 1.733, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.10619469026548672, |
|
"eval_msmarco_pairs_loss": 6.302182197570801, |
|
"eval_msmarco_pairs_runtime": 1.2547, |
|
"eval_msmarco_pairs_samples_per_second": 102.016, |
|
"eval_msmarco_pairs_steps_per_second": 0.797, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.10619469026548672, |
|
"eval_nq_pairs_loss": 6.841231822967529, |
|
"eval_nq_pairs_runtime": 2.5052, |
|
"eval_nq_pairs_samples_per_second": 51.094, |
|
"eval_nq_pairs_steps_per_second": 0.399, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.10619469026548672, |
|
"eval_trivia_pairs_loss": 6.201311111450195, |
|
"eval_trivia_pairs_runtime": 3.6311, |
|
"eval_trivia_pairs_samples_per_second": 35.251, |
|
"eval_trivia_pairs_steps_per_second": 0.275, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.10619469026548672, |
|
"eval_gooaq_pairs_loss": 6.098212718963623, |
|
"eval_gooaq_pairs_runtime": 0.9643, |
|
"eval_gooaq_pairs_samples_per_second": 132.741, |
|
"eval_gooaq_pairs_steps_per_second": 1.037, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.10619469026548672, |
|
"eval_paws-pos_loss": 0.9473956823348999, |
|
"eval_paws-pos_runtime": 0.6684, |
|
"eval_paws-pos_samples_per_second": 191.51, |
|
"eval_paws-pos_steps_per_second": 1.496, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.10619469026548672, |
|
"eval_global_dataset_loss": 4.385201454162598, |
|
"eval_global_dataset_runtime": 23.0455, |
|
"eval_global_dataset_samples_per_second": 28.769, |
|
"eval_global_dataset_steps_per_second": 0.26, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.10707964601769912, |
|
"grad_norm": 12.284002304077148, |
|
"learning_rate": 4.1391509433962266e-07, |
|
"loss": 3.5291, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.1079646017699115, |
|
"grad_norm": 10.567977905273438, |
|
"learning_rate": 4.1745283018867925e-07, |
|
"loss": 3.8232, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.1088495575221239, |
|
"grad_norm": 11.508279800415039, |
|
"learning_rate": 4.209905660377359e-07, |
|
"loss": 4.6035, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.10973451327433628, |
|
"grad_norm": 10.180809020996094, |
|
"learning_rate": 4.2452830188679244e-07, |
|
"loss": 3.7607, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.11061946902654868, |
|
"grad_norm": 9.519749641418457, |
|
"learning_rate": 4.280660377358491e-07, |
|
"loss": 3.8461, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.11150442477876106, |
|
"grad_norm": 11.971588134765625, |
|
"learning_rate": 4.3160377358490563e-07, |
|
"loss": 3.3413, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.11238938053097346, |
|
"grad_norm": 9.211153984069824, |
|
"learning_rate": 4.351415094339623e-07, |
|
"loss": 4.2777, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.11327433628318584, |
|
"grad_norm": 12.393014907836914, |
|
"learning_rate": 4.386792452830189e-07, |
|
"loss": 4.3597, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.11415929203539824, |
|
"grad_norm": 14.332024574279785, |
|
"learning_rate": 4.422169811320755e-07, |
|
"loss": 3.9046, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.11504424778761062, |
|
"grad_norm": 10.091246604919434, |
|
"learning_rate": 4.4575471698113207e-07, |
|
"loss": 4.0527, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.11592920353982301, |
|
"grad_norm": 15.043377876281738, |
|
"learning_rate": 4.492924528301887e-07, |
|
"loss": 5.0883, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.1168141592920354, |
|
"grad_norm": 12.942100524902344, |
|
"learning_rate": 4.5283018867924526e-07, |
|
"loss": 3.8308, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.11769911504424779, |
|
"grad_norm": 11.961737632751465, |
|
"learning_rate": 4.563679245283019e-07, |
|
"loss": 3.572, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.11858407079646018, |
|
"grad_norm": 12.325026512145996, |
|
"learning_rate": 4.599056603773585e-07, |
|
"loss": 3.4299, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.11946902654867257, |
|
"grad_norm": 12.118773460388184, |
|
"learning_rate": 4.6344339622641515e-07, |
|
"loss": 4.1541, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.12035398230088495, |
|
"grad_norm": 11.99026107788086, |
|
"learning_rate": 4.669811320754717e-07, |
|
"loss": 3.584, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.12123893805309735, |
|
"grad_norm": 15.083515167236328, |
|
"learning_rate": 4.7051886792452834e-07, |
|
"loss": 5.0977, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.12212389380530973, |
|
"grad_norm": 15.059394836425781, |
|
"learning_rate": 4.740566037735849e-07, |
|
"loss": 4.6769, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.12300884955752213, |
|
"grad_norm": 8.864882469177246, |
|
"learning_rate": 4.775943396226415e-07, |
|
"loss": 3.8396, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.12389380530973451, |
|
"grad_norm": 12.116555213928223, |
|
"learning_rate": 4.811320754716981e-07, |
|
"loss": 3.2875, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.12477876106194691, |
|
"grad_norm": 14.214646339416504, |
|
"learning_rate": 4.846698113207547e-07, |
|
"loss": 4.1946, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.1256637168141593, |
|
"grad_norm": 16.207908630371094, |
|
"learning_rate": 4.882075471698113e-07, |
|
"loss": 4.9602, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.12654867256637167, |
|
"grad_norm": 11.662668228149414, |
|
"learning_rate": 4.917452830188679e-07, |
|
"loss": 4.1531, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.12743362831858407, |
|
"grad_norm": 12.429448127746582, |
|
"learning_rate": 4.952830188679246e-07, |
|
"loss": 3.8351, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.12831858407079647, |
|
"grad_norm": 11.522616386413574, |
|
"learning_rate": 4.988207547169812e-07, |
|
"loss": 3.112, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.12920353982300886, |
|
"grad_norm": 14.556803703308105, |
|
"learning_rate": 5.023584905660377e-07, |
|
"loss": 2.3145, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.13008849557522123, |
|
"grad_norm": 12.348714828491211, |
|
"learning_rate": 5.058962264150944e-07, |
|
"loss": 4.0989, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.13097345132743363, |
|
"grad_norm": 13.150403022766113, |
|
"learning_rate": 5.094339622641509e-07, |
|
"loss": 3.2173, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.13185840707964602, |
|
"grad_norm": 12.066205978393555, |
|
"learning_rate": 5.129716981132076e-07, |
|
"loss": 2.7913, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.13274336283185842, |
|
"grad_norm": 11.519116401672363, |
|
"learning_rate": 5.165094339622641e-07, |
|
"loss": 3.7627, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.1336283185840708, |
|
"grad_norm": 12.59196662902832, |
|
"learning_rate": 5.200471698113208e-07, |
|
"loss": 3.3669, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.13451327433628318, |
|
"grad_norm": 13.791536331176758, |
|
"learning_rate": 5.235849056603773e-07, |
|
"loss": 2.6775, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.13539823008849558, |
|
"grad_norm": 11.906597137451172, |
|
"learning_rate": 5.27122641509434e-07, |
|
"loss": 3.2804, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.13628318584070798, |
|
"grad_norm": 11.267363548278809, |
|
"learning_rate": 5.306603773584905e-07, |
|
"loss": 3.0676, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.13716814159292035, |
|
"grad_norm": 12.373686790466309, |
|
"learning_rate": 5.341981132075471e-07, |
|
"loss": 3.1559, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.13805309734513274, |
|
"grad_norm": 13.258451461791992, |
|
"learning_rate": 5.377358490566038e-07, |
|
"loss": 2.6638, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.13893805309734514, |
|
"grad_norm": 12.79727554321289, |
|
"learning_rate": 5.412735849056604e-07, |
|
"loss": 2.8045, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.13982300884955753, |
|
"grad_norm": 13.88683032989502, |
|
"learning_rate": 5.44811320754717e-07, |
|
"loss": 4.0568, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.1407079646017699, |
|
"grad_norm": 12.57358169555664, |
|
"learning_rate": 5.483490566037736e-07, |
|
"loss": 2.7554, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.1415929203539823, |
|
"grad_norm": 14.520818710327148, |
|
"learning_rate": 5.518867924528302e-07, |
|
"loss": 3.7407, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1415929203539823, |
|
"eval_Qnli-dev_cosine_accuracy": 0.62890625, |
|
"eval_Qnli-dev_cosine_accuracy_threshold": 0.9045097827911377, |
|
"eval_Qnli-dev_cosine_ap": 0.6193527955003784, |
|
"eval_Qnli-dev_cosine_f1": 0.6397415185783522, |
|
"eval_Qnli-dev_cosine_f1_threshold": 0.8351442813873291, |
|
"eval_Qnli-dev_cosine_precision": 0.5169712793733682, |
|
"eval_Qnli-dev_cosine_recall": 0.8389830508474576, |
|
"eval_Qnli-dev_dot_accuracy": 0.62890625, |
|
"eval_Qnli-dev_dot_accuracy_threshold": 694.7778930664062, |
|
"eval_Qnli-dev_dot_ap": 0.6194150916988216, |
|
"eval_Qnli-dev_dot_f1": 0.6397415185783522, |
|
"eval_Qnli-dev_dot_f1_threshold": 641.4969482421875, |
|
"eval_Qnli-dev_dot_precision": 0.5169712793733682, |
|
"eval_Qnli-dev_dot_recall": 0.8389830508474576, |
|
"eval_Qnli-dev_euclidean_accuracy": 0.62890625, |
|
"eval_Qnli-dev_euclidean_accuracy_threshold": 12.111844062805176, |
|
"eval_Qnli-dev_euclidean_ap": 0.6193576186776235, |
|
"eval_Qnli-dev_euclidean_f1": 0.6397415185783522, |
|
"eval_Qnli-dev_euclidean_f1_threshold": 15.914146423339844, |
|
"eval_Qnli-dev_euclidean_precision": 0.5169712793733682, |
|
"eval_Qnli-dev_euclidean_recall": 0.8389830508474576, |
|
"eval_Qnli-dev_manhattan_accuracy": 0.646484375, |
|
"eval_Qnli-dev_manhattan_accuracy_threshold": 245.2164306640625, |
|
"eval_Qnli-dev_manhattan_ap": 0.6417015148414534, |
|
"eval_Qnli-dev_manhattan_f1": 0.6521060842433698, |
|
"eval_Qnli-dev_manhattan_f1_threshold": 303.317626953125, |
|
"eval_Qnli-dev_manhattan_precision": 0.5160493827160494, |
|
"eval_Qnli-dev_manhattan_recall": 0.885593220338983, |
|
"eval_Qnli-dev_max_accuracy": 0.646484375, |
|
"eval_Qnli-dev_max_accuracy_threshold": 694.7778930664062, |
|
"eval_Qnli-dev_max_ap": 0.6417015148414534, |
|
"eval_Qnli-dev_max_f1": 0.6521060842433698, |
|
"eval_Qnli-dev_max_f1_threshold": 641.4969482421875, |
|
"eval_Qnli-dev_max_precision": 0.5169712793733682, |
|
"eval_Qnli-dev_max_recall": 0.885593220338983, |
|
"eval_allNLI-dev_cosine_accuracy": 0.66796875, |
|
"eval_allNLI-dev_cosine_accuracy_threshold": 0.9767438173294067, |
|
"eval_allNLI-dev_cosine_ap": 0.38624833037583434, |
|
"eval_allNLI-dev_cosine_f1": 0.5100182149362477, |
|
"eval_allNLI-dev_cosine_f1_threshold": 0.8540960550308228, |
|
"eval_allNLI-dev_cosine_precision": 0.3723404255319149, |
|
"eval_allNLI-dev_cosine_recall": 0.8092485549132948, |
|
"eval_allNLI-dev_dot_accuracy": 0.66796875, |
|
"eval_allNLI-dev_dot_accuracy_threshold": 750.345458984375, |
|
"eval_allNLI-dev_dot_ap": 0.3862261253421553, |
|
"eval_allNLI-dev_dot_f1": 0.5100182149362477, |
|
"eval_allNLI-dev_dot_f1_threshold": 656.0940551757812, |
|
"eval_allNLI-dev_dot_precision": 0.3723404255319149, |
|
"eval_allNLI-dev_dot_recall": 0.8092485549132948, |
|
"eval_allNLI-dev_euclidean_accuracy": 0.66796875, |
|
"eval_allNLI-dev_euclidean_accuracy_threshold": 5.977196216583252, |
|
"eval_allNLI-dev_euclidean_ap": 0.38624380046547035, |
|
"eval_allNLI-dev_euclidean_f1": 0.5100182149362477, |
|
"eval_allNLI-dev_euclidean_f1_threshold": 14.971920013427734, |
|
"eval_allNLI-dev_euclidean_precision": 0.3723404255319149, |
|
"eval_allNLI-dev_euclidean_recall": 0.8092485549132948, |
|
"eval_allNLI-dev_manhattan_accuracy": 0.6640625, |
|
"eval_allNLI-dev_manhattan_accuracy_threshold": 78.52637481689453, |
|
"eval_allNLI-dev_manhattan_ap": 0.3898187083180651, |
|
"eval_allNLI-dev_manhattan_f1": 0.5062388591800357, |
|
"eval_allNLI-dev_manhattan_f1_threshold": 285.7745361328125, |
|
"eval_allNLI-dev_manhattan_precision": 0.36597938144329895, |
|
"eval_allNLI-dev_manhattan_recall": 0.8208092485549133, |
|
"eval_allNLI-dev_max_accuracy": 0.66796875, |
|
"eval_allNLI-dev_max_accuracy_threshold": 750.345458984375, |
|
"eval_allNLI-dev_max_ap": 0.3898187083180651, |
|
"eval_allNLI-dev_max_f1": 0.5100182149362477, |
|
"eval_allNLI-dev_max_f1_threshold": 656.0940551757812, |
|
"eval_allNLI-dev_max_precision": 0.3723404255319149, |
|
"eval_allNLI-dev_max_recall": 0.8208092485549133, |
|
"eval_sequential_score": 0.6417015148414534, |
|
"eval_sts-test_pearson_cosine": 0.2853943019391156, |
|
"eval_sts-test_pearson_dot": 0.28526334639473966, |
|
"eval_sts-test_pearson_euclidean": 0.29405773952219494, |
|
"eval_sts-test_pearson_manhattan": 0.3110310476615048, |
|
"eval_sts-test_pearson_max": 0.3110310476615048, |
|
"eval_sts-test_spearman_cosine": 0.31414239162305135, |
|
"eval_sts-test_spearman_dot": 0.31380407209449446, |
|
"eval_sts-test_spearman_euclidean": 0.3141516551339523, |
|
"eval_sts-test_spearman_manhattan": 0.3366243060620438, |
|
"eval_sts-test_spearman_max": 0.3366243060620438, |
|
"eval_vitaminc-pairs_loss": 2.7439002990722656, |
|
"eval_vitaminc-pairs_runtime": 3.7639, |
|
"eval_vitaminc-pairs_samples_per_second": 34.007, |
|
"eval_vitaminc-pairs_steps_per_second": 0.266, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1415929203539823, |
|
"eval_negation-triplets_loss": 4.63640022277832, |
|
"eval_negation-triplets_runtime": 0.7072, |
|
"eval_negation-triplets_samples_per_second": 180.999, |
|
"eval_negation-triplets_steps_per_second": 1.414, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1415929203539823, |
|
"eval_scitail-pairs-pos_loss": 1.0088545083999634, |
|
"eval_scitail-pairs-pos_runtime": 0.8123, |
|
"eval_scitail-pairs-pos_samples_per_second": 157.577, |
|
"eval_scitail-pairs-pos_steps_per_second": 1.231, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1415929203539823, |
|
"eval_scitail-pairs-qa_loss": 1.1228678226470947, |
|
"eval_scitail-pairs-qa_runtime": 0.5444, |
|
"eval_scitail-pairs-qa_samples_per_second": 235.115, |
|
"eval_scitail-pairs-qa_steps_per_second": 1.837, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1415929203539823, |
|
"eval_xsum-pairs_loss": 5.4869818687438965, |
|
"eval_xsum-pairs_runtime": 2.8888, |
|
"eval_xsum-pairs_samples_per_second": 44.308, |
|
"eval_xsum-pairs_steps_per_second": 0.346, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1415929203539823, |
|
"eval_sciq_pairs_loss": 0.628353476524353, |
|
"eval_sciq_pairs_runtime": 3.8061, |
|
"eval_sciq_pairs_samples_per_second": 33.631, |
|
"eval_sciq_pairs_steps_per_second": 0.263, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1415929203539823, |
|
"eval_qasc_pairs_loss": 2.593322277069092, |
|
"eval_qasc_pairs_runtime": 0.6728, |
|
"eval_qasc_pairs_samples_per_second": 190.241, |
|
"eval_qasc_pairs_steps_per_second": 1.486, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1415929203539823, |
|
"eval_openbookqa_pairs_loss": 4.394308090209961, |
|
"eval_openbookqa_pairs_runtime": 0.5852, |
|
"eval_openbookqa_pairs_samples_per_second": 218.729, |
|
"eval_openbookqa_pairs_steps_per_second": 1.709, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1415929203539823, |
|
"eval_msmarco_pairs_loss": 5.656517505645752, |
|
"eval_msmarco_pairs_runtime": 1.2571, |
|
"eval_msmarco_pairs_samples_per_second": 101.822, |
|
"eval_msmarco_pairs_steps_per_second": 0.795, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1415929203539823, |
|
"eval_nq_pairs_loss": 5.986983776092529, |
|
"eval_nq_pairs_runtime": 2.5075, |
|
"eval_nq_pairs_samples_per_second": 51.047, |
|
"eval_nq_pairs_steps_per_second": 0.399, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1415929203539823, |
|
"eval_trivia_pairs_loss": 5.694415092468262, |
|
"eval_trivia_pairs_runtime": 3.6302, |
|
"eval_trivia_pairs_samples_per_second": 35.26, |
|
"eval_trivia_pairs_steps_per_second": 0.275, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1415929203539823, |
|
"eval_gooaq_pairs_loss": 5.3856658935546875, |
|
"eval_gooaq_pairs_runtime": 0.9618, |
|
"eval_gooaq_pairs_samples_per_second": 133.082, |
|
"eval_gooaq_pairs_steps_per_second": 1.04, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1415929203539823, |
|
"eval_paws-pos_loss": 0.3622308671474457, |
|
"eval_paws-pos_runtime": 0.6678, |
|
"eval_paws-pos_samples_per_second": 191.674, |
|
"eval_paws-pos_steps_per_second": 1.497, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1415929203539823, |
|
"eval_global_dataset_loss": 3.401135206222534, |
|
"eval_global_dataset_runtime": 23.0422, |
|
"eval_global_dataset_samples_per_second": 28.773, |
|
"eval_global_dataset_steps_per_second": 0.26, |
|
"step": 160 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 3390, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 80, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 0.0, |
|
"train_batch_size": 42, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|