|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.07079646017699115, |
|
"eval_steps": 40, |
|
"global_step": 80, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0008849557522123894, |
|
"grad_norm": NaN, |
|
"learning_rate": 0.0, |
|
"loss": 5.8564, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0017699115044247787, |
|
"grad_norm": NaN, |
|
"learning_rate": 0.0, |
|
"loss": 7.1716, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.002654867256637168, |
|
"grad_norm": NaN, |
|
"learning_rate": 0.0, |
|
"loss": 5.9095, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0035398230088495575, |
|
"grad_norm": 21.95326805114746, |
|
"learning_rate": 3.5377358490566036e-09, |
|
"loss": 5.0841, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.004424778761061947, |
|
"grad_norm": 16.607179641723633, |
|
"learning_rate": 7.075471698113207e-09, |
|
"loss": 4.0184, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.005309734513274336, |
|
"grad_norm": 33.789615631103516, |
|
"learning_rate": 1.0613207547169811e-08, |
|
"loss": 6.2191, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.006194690265486726, |
|
"grad_norm": 28.073551177978516, |
|
"learning_rate": 1.4150943396226414e-08, |
|
"loss": 5.6124, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.007079646017699115, |
|
"grad_norm": 17.365602493286133, |
|
"learning_rate": 1.768867924528302e-08, |
|
"loss": 3.9544, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.007964601769911504, |
|
"grad_norm": 19.384475708007812, |
|
"learning_rate": 2.1226415094339622e-08, |
|
"loss": 4.7149, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.008849557522123894, |
|
"grad_norm": 19.67770004272461, |
|
"learning_rate": 2.4764150943396227e-08, |
|
"loss": 4.9616, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.009734513274336283, |
|
"grad_norm": 24.233421325683594, |
|
"learning_rate": 2.830188679245283e-08, |
|
"loss": 5.2794, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.010619469026548672, |
|
"grad_norm": Infinity, |
|
"learning_rate": 2.830188679245283e-08, |
|
"loss": 8.8704, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.011504424778761062, |
|
"grad_norm": 34.37785720825195, |
|
"learning_rate": 3.183962264150943e-08, |
|
"loss": 6.0707, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.012389380530973451, |
|
"grad_norm": 25.11741065979004, |
|
"learning_rate": 3.537735849056604e-08, |
|
"loss": 5.4071, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.01327433628318584, |
|
"grad_norm": 53.84364700317383, |
|
"learning_rate": 3.891509433962264e-08, |
|
"loss": 6.9104, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01415929203539823, |
|
"grad_norm": 32.0903434753418, |
|
"learning_rate": 4.2452830188679244e-08, |
|
"loss": 6.0276, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.01504424778761062, |
|
"grad_norm": 39.742130279541016, |
|
"learning_rate": 4.599056603773585e-08, |
|
"loss": 6.737, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.01592920353982301, |
|
"grad_norm": 45.267417907714844, |
|
"learning_rate": 4.9528301886792454e-08, |
|
"loss": 6.5354, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.016814159292035398, |
|
"grad_norm": 22.39731788635254, |
|
"learning_rate": 5.3066037735849055e-08, |
|
"loss": 5.206, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.017699115044247787, |
|
"grad_norm": 20.858232498168945, |
|
"learning_rate": 5.660377358490566e-08, |
|
"loss": 5.2469, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.018584070796460177, |
|
"grad_norm": 23.96446990966797, |
|
"learning_rate": 6.014150943396226e-08, |
|
"loss": 5.3771, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.019469026548672566, |
|
"grad_norm": 22.945741653442383, |
|
"learning_rate": 6.367924528301887e-08, |
|
"loss": 4.979, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.020353982300884955, |
|
"grad_norm": 15.497300148010254, |
|
"learning_rate": 6.721698113207547e-08, |
|
"loss": 4.7909, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.021238938053097345, |
|
"grad_norm": 20.039024353027344, |
|
"learning_rate": 7.075471698113208e-08, |
|
"loss": 4.9086, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.022123893805309734, |
|
"grad_norm": 21.30576515197754, |
|
"learning_rate": 7.429245283018869e-08, |
|
"loss": 4.8826, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.023008849557522124, |
|
"grad_norm": 64.5285873413086, |
|
"learning_rate": 7.783018867924529e-08, |
|
"loss": 8.2266, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.023893805309734513, |
|
"grad_norm": 59.894893646240234, |
|
"learning_rate": 8.13679245283019e-08, |
|
"loss": 8.3024, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.024778761061946902, |
|
"grad_norm": 25.504356384277344, |
|
"learning_rate": 8.490566037735849e-08, |
|
"loss": 5.8745, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.02566371681415929, |
|
"grad_norm": 15.169568061828613, |
|
"learning_rate": 8.84433962264151e-08, |
|
"loss": 4.7298, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.02654867256637168, |
|
"grad_norm": 24.09995460510254, |
|
"learning_rate": 9.19811320754717e-08, |
|
"loss": 5.4614, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.02743362831858407, |
|
"grad_norm": 28.669275283813477, |
|
"learning_rate": 9.55188679245283e-08, |
|
"loss": 5.8594, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.02831858407079646, |
|
"grad_norm": 23.37987518310547, |
|
"learning_rate": 9.905660377358491e-08, |
|
"loss": 5.2401, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.02920353982300885, |
|
"grad_norm": 22.815292358398438, |
|
"learning_rate": 1.0259433962264152e-07, |
|
"loss": 5.1579, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.03008849557522124, |
|
"grad_norm": 13.775344848632812, |
|
"learning_rate": 1.0613207547169811e-07, |
|
"loss": 5.2181, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.030973451327433628, |
|
"grad_norm": 18.642087936401367, |
|
"learning_rate": 1.0966981132075472e-07, |
|
"loss": 4.6328, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.03185840707964602, |
|
"grad_norm": 18.041406631469727, |
|
"learning_rate": 1.1320754716981131e-07, |
|
"loss": 2.121, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.03274336283185841, |
|
"grad_norm": 23.423933029174805, |
|
"learning_rate": 1.1674528301886792e-07, |
|
"loss": 5.9026, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.033628318584070796, |
|
"grad_norm": 46.25591278076172, |
|
"learning_rate": 1.2028301886792452e-07, |
|
"loss": 7.3796, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.034513274336283185, |
|
"grad_norm": 20.376422882080078, |
|
"learning_rate": 1.2382075471698114e-07, |
|
"loss": 5.5361, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"grad_norm": 12.82562255859375, |
|
"learning_rate": 1.2735849056603773e-07, |
|
"loss": 4.0243, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_Qnli-dev_cosine_accuracy": 0.5859375, |
|
"eval_Qnli-dev_cosine_accuracy_threshold": 0.9302856922149658, |
|
"eval_Qnli-dev_cosine_ap": 0.5480269179285036, |
|
"eval_Qnli-dev_cosine_f1": 0.6315789473684211, |
|
"eval_Qnli-dev_cosine_f1_threshold": 0.7634451389312744, |
|
"eval_Qnli-dev_cosine_precision": 0.4633663366336634, |
|
"eval_Qnli-dev_cosine_recall": 0.9915254237288136, |
|
"eval_Qnli-dev_dot_accuracy": 0.5859375, |
|
"eval_Qnli-dev_dot_accuracy_threshold": 714.4895629882812, |
|
"eval_Qnli-dev_dot_ap": 0.548060663242546, |
|
"eval_Qnli-dev_dot_f1": 0.6315789473684211, |
|
"eval_Qnli-dev_dot_f1_threshold": 586.342529296875, |
|
"eval_Qnli-dev_dot_precision": 0.4633663366336634, |
|
"eval_Qnli-dev_dot_recall": 0.9915254237288136, |
|
"eval_Qnli-dev_euclidean_accuracy": 0.5859375, |
|
"eval_Qnli-dev_euclidean_accuracy_threshold": 10.348224639892578, |
|
"eval_Qnli-dev_euclidean_ap": 0.5480269179285036, |
|
"eval_Qnli-dev_euclidean_f1": 0.6315789473684211, |
|
"eval_Qnli-dev_euclidean_f1_threshold": 19.05518341064453, |
|
"eval_Qnli-dev_euclidean_precision": 0.4633663366336634, |
|
"eval_Qnli-dev_euclidean_recall": 0.9915254237288136, |
|
"eval_Qnli-dev_manhattan_accuracy": 0.59765625, |
|
"eval_Qnli-dev_manhattan_accuracy_threshold": 175.22628784179688, |
|
"eval_Qnli-dev_manhattan_ap": 0.5780924813828909, |
|
"eval_Qnli-dev_manhattan_f1": 0.6291834002677376, |
|
"eval_Qnli-dev_manhattan_f1_threshold": 334.39178466796875, |
|
"eval_Qnli-dev_manhattan_precision": 0.4598825831702544, |
|
"eval_Qnli-dev_manhattan_recall": 0.9957627118644068, |
|
"eval_Qnli-dev_max_accuracy": 0.59765625, |
|
"eval_Qnli-dev_max_accuracy_threshold": 714.4895629882812, |
|
"eval_Qnli-dev_max_ap": 0.5780924813828909, |
|
"eval_Qnli-dev_max_f1": 0.6315789473684211, |
|
"eval_Qnli-dev_max_f1_threshold": 586.342529296875, |
|
"eval_Qnli-dev_max_precision": 0.4633663366336634, |
|
"eval_Qnli-dev_max_recall": 0.9957627118644068, |
|
"eval_allNLI-dev_cosine_accuracy": 0.6640625, |
|
"eval_allNLI-dev_cosine_accuracy_threshold": 0.9888672828674316, |
|
"eval_allNLI-dev_cosine_ap": 0.32886365768247516, |
|
"eval_allNLI-dev_cosine_f1": 0.5095729013254787, |
|
"eval_allNLI-dev_cosine_f1_threshold": 0.7477295398712158, |
|
"eval_allNLI-dev_cosine_precision": 0.34189723320158105, |
|
"eval_allNLI-dev_cosine_recall": 1.0, |
|
"eval_allNLI-dev_dot_accuracy": 0.6640625, |
|
"eval_allNLI-dev_dot_accuracy_threshold": 759.483154296875, |
|
"eval_allNLI-dev_dot_ap": 0.3288581611938815, |
|
"eval_allNLI-dev_dot_f1": 0.5095729013254787, |
|
"eval_allNLI-dev_dot_f1_threshold": 574.2760620117188, |
|
"eval_allNLI-dev_dot_precision": 0.34189723320158105, |
|
"eval_allNLI-dev_dot_recall": 1.0, |
|
"eval_allNLI-dev_euclidean_accuracy": 0.6640625, |
|
"eval_allNLI-dev_euclidean_accuracy_threshold": 3.8085508346557617, |
|
"eval_allNLI-dev_euclidean_ap": 0.32886365768247516, |
|
"eval_allNLI-dev_euclidean_f1": 0.5095729013254787, |
|
"eval_allNLI-dev_euclidean_f1_threshold": 19.684810638427734, |
|
"eval_allNLI-dev_euclidean_precision": 0.34189723320158105, |
|
"eval_allNLI-dev_euclidean_recall": 1.0, |
|
"eval_allNLI-dev_manhattan_accuracy": 0.6640625, |
|
"eval_allNLI-dev_manhattan_accuracy_threshold": 65.93238830566406, |
|
"eval_allNLI-dev_manhattan_ap": 0.33852594919898543, |
|
"eval_allNLI-dev_manhattan_f1": 0.5058479532163743, |
|
"eval_allNLI-dev_manhattan_f1_threshold": 335.4263916015625, |
|
"eval_allNLI-dev_manhattan_precision": 0.3385518590998043, |
|
"eval_allNLI-dev_manhattan_recall": 1.0, |
|
"eval_allNLI-dev_max_accuracy": 0.6640625, |
|
"eval_allNLI-dev_max_accuracy_threshold": 759.483154296875, |
|
"eval_allNLI-dev_max_ap": 0.33852594919898543, |
|
"eval_allNLI-dev_max_f1": 0.5095729013254787, |
|
"eval_allNLI-dev_max_f1_threshold": 574.2760620117188, |
|
"eval_allNLI-dev_max_precision": 0.34189723320158105, |
|
"eval_allNLI-dev_max_recall": 1.0, |
|
"eval_sequential_score": 0.5780924813828909, |
|
"eval_sts-test_pearson_cosine": 0.1533465318414369, |
|
"eval_sts-test_pearson_dot": 0.15333057450060855, |
|
"eval_sts-test_pearson_euclidean": 0.1664717893342273, |
|
"eval_sts-test_pearson_manhattan": 0.20717970064899288, |
|
"eval_sts-test_pearson_max": 0.20717970064899288, |
|
"eval_sts-test_spearman_cosine": 0.18786210334203038, |
|
"eval_sts-test_spearman_dot": 0.1878347337472397, |
|
"eval_sts-test_spearman_euclidean": 0.18786046572196458, |
|
"eval_sts-test_spearman_manhattan": 0.22429466463153608, |
|
"eval_sts-test_spearman_max": 0.22429466463153608, |
|
"eval_vitaminc-pairs_loss": 2.901831865310669, |
|
"eval_vitaminc-pairs_runtime": 4.078, |
|
"eval_vitaminc-pairs_samples_per_second": 31.388, |
|
"eval_vitaminc-pairs_steps_per_second": 0.245, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_negation-triplets_loss": 5.690315246582031, |
|
"eval_negation-triplets_runtime": 0.7141, |
|
"eval_negation-triplets_samples_per_second": 179.254, |
|
"eval_negation-triplets_steps_per_second": 1.4, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_scitail-pairs-pos_loss": 2.1135852336883545, |
|
"eval_scitail-pairs-pos_runtime": 0.8282, |
|
"eval_scitail-pairs-pos_samples_per_second": 154.543, |
|
"eval_scitail-pairs-pos_steps_per_second": 1.207, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_scitail-pairs-qa_loss": 2.8052029609680176, |
|
"eval_scitail-pairs-qa_runtime": 0.5471, |
|
"eval_scitail-pairs-qa_samples_per_second": 233.943, |
|
"eval_scitail-pairs-qa_steps_per_second": 1.828, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_xsum-pairs_loss": 6.583061695098877, |
|
"eval_xsum-pairs_runtime": 2.8921, |
|
"eval_xsum-pairs_samples_per_second": 44.259, |
|
"eval_xsum-pairs_steps_per_second": 0.346, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_sciq_pairs_loss": 0.8882207870483398, |
|
"eval_sciq_pairs_runtime": 3.7993, |
|
"eval_sciq_pairs_samples_per_second": 33.69, |
|
"eval_sciq_pairs_steps_per_second": 0.263, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_qasc_pairs_loss": 4.1147541999816895, |
|
"eval_qasc_pairs_runtime": 0.6768, |
|
"eval_qasc_pairs_samples_per_second": 189.125, |
|
"eval_qasc_pairs_steps_per_second": 1.478, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_openbookqa_pairs_loss": 5.096628665924072, |
|
"eval_openbookqa_pairs_runtime": 0.5776, |
|
"eval_openbookqa_pairs_samples_per_second": 221.615, |
|
"eval_openbookqa_pairs_steps_per_second": 1.731, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_msmarco_pairs_loss": 10.391141891479492, |
|
"eval_msmarco_pairs_runtime": 1.2577, |
|
"eval_msmarco_pairs_samples_per_second": 101.77, |
|
"eval_msmarco_pairs_steps_per_second": 0.795, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_nq_pairs_loss": 10.903197288513184, |
|
"eval_nq_pairs_runtime": 2.5051, |
|
"eval_nq_pairs_samples_per_second": 51.095, |
|
"eval_nq_pairs_steps_per_second": 0.399, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_trivia_pairs_loss": 7.190384387969971, |
|
"eval_trivia_pairs_runtime": 3.6482, |
|
"eval_trivia_pairs_samples_per_second": 35.085, |
|
"eval_trivia_pairs_steps_per_second": 0.274, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_gooaq_pairs_loss": 8.193528175354004, |
|
"eval_gooaq_pairs_runtime": 0.9648, |
|
"eval_gooaq_pairs_samples_per_second": 132.67, |
|
"eval_gooaq_pairs_steps_per_second": 1.036, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_paws-pos_loss": 1.3942564725875854, |
|
"eval_paws-pos_runtime": 0.6718, |
|
"eval_paws-pos_samples_per_second": 190.538, |
|
"eval_paws-pos_steps_per_second": 1.489, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_global_dataset_loss": 5.671571731567383, |
|
"eval_global_dataset_runtime": 23.0452, |
|
"eval_global_dataset_samples_per_second": 28.77, |
|
"eval_global_dataset_steps_per_second": 0.26, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.036283185840707964, |
|
"grad_norm": 18.026830673217773, |
|
"learning_rate": 1.3089622641509433e-07, |
|
"loss": 4.9072, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.03716814159292035, |
|
"grad_norm": 15.423810958862305, |
|
"learning_rate": 1.3443396226415095e-07, |
|
"loss": 3.4439, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.03805309734513274, |
|
"grad_norm": 16.31403160095215, |
|
"learning_rate": 1.3797169811320754e-07, |
|
"loss": 4.9787, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.03893805309734513, |
|
"grad_norm": 21.37955093383789, |
|
"learning_rate": 1.4150943396226417e-07, |
|
"loss": 5.8318, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.03982300884955752, |
|
"grad_norm": 18.23583984375, |
|
"learning_rate": 1.4504716981132076e-07, |
|
"loss": 5.3226, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.04070796460176991, |
|
"grad_norm": 20.878713607788086, |
|
"learning_rate": 1.4858490566037738e-07, |
|
"loss": 5.1181, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.0415929203539823, |
|
"grad_norm": 18.71149444580078, |
|
"learning_rate": 1.5212264150943398e-07, |
|
"loss": 4.7834, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.04247787610619469, |
|
"grad_norm": 38.85902786254883, |
|
"learning_rate": 1.5566037735849057e-07, |
|
"loss": 6.6303, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.04336283185840708, |
|
"grad_norm": 37.41562271118164, |
|
"learning_rate": 1.591981132075472e-07, |
|
"loss": 5.8171, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.04424778761061947, |
|
"grad_norm": 17.541080474853516, |
|
"learning_rate": 1.627358490566038e-07, |
|
"loss": 5.1962, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.04513274336283186, |
|
"grad_norm": 16.145116806030273, |
|
"learning_rate": 1.6627358490566038e-07, |
|
"loss": 5.2096, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.04601769911504425, |
|
"grad_norm": 20.175189971923828, |
|
"learning_rate": 1.6981132075471698e-07, |
|
"loss": 5.0943, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.046902654867256637, |
|
"grad_norm": 13.441214561462402, |
|
"learning_rate": 1.733490566037736e-07, |
|
"loss": 4.9038, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.047787610619469026, |
|
"grad_norm": 13.396607398986816, |
|
"learning_rate": 1.768867924528302e-07, |
|
"loss": 4.6479, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.048672566371681415, |
|
"grad_norm": 13.68046760559082, |
|
"learning_rate": 1.804245283018868e-07, |
|
"loss": 5.5098, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.049557522123893805, |
|
"grad_norm": 13.278443336486816, |
|
"learning_rate": 1.839622641509434e-07, |
|
"loss": 4.6979, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.050442477876106194, |
|
"grad_norm": 15.295453071594238, |
|
"learning_rate": 1.875e-07, |
|
"loss": 3.1969, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.05132743362831858, |
|
"grad_norm": 12.185781478881836, |
|
"learning_rate": 1.910377358490566e-07, |
|
"loss": 4.4127, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.05221238938053097, |
|
"grad_norm": 10.874494552612305, |
|
"learning_rate": 1.9457547169811322e-07, |
|
"loss": 3.7746, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.05309734513274336, |
|
"grad_norm": 9.654823303222656, |
|
"learning_rate": 1.9811320754716982e-07, |
|
"loss": 4.5378, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.05398230088495575, |
|
"grad_norm": 21.123645782470703, |
|
"learning_rate": 2.016509433962264e-07, |
|
"loss": 5.0209, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.05486725663716814, |
|
"grad_norm": 33.47934341430664, |
|
"learning_rate": 2.0518867924528303e-07, |
|
"loss": 6.5936, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.05575221238938053, |
|
"grad_norm": 10.2566556930542, |
|
"learning_rate": 2.0872641509433963e-07, |
|
"loss": 4.2315, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.05663716814159292, |
|
"grad_norm": 28.198625564575195, |
|
"learning_rate": 2.1226415094339622e-07, |
|
"loss": 6.4269, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.05752212389380531, |
|
"grad_norm": 9.386558532714844, |
|
"learning_rate": 2.1580188679245282e-07, |
|
"loss": 4.2644, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.0584070796460177, |
|
"grad_norm": 12.687555313110352, |
|
"learning_rate": 2.1933962264150944e-07, |
|
"loss": 5.1388, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.05929203539823009, |
|
"grad_norm": 14.834878921508789, |
|
"learning_rate": 2.2287735849056603e-07, |
|
"loss": 5.1852, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.06017699115044248, |
|
"grad_norm": 10.888677597045898, |
|
"learning_rate": 2.2641509433962263e-07, |
|
"loss": 4.8057, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.061061946902654866, |
|
"grad_norm": 13.97256851196289, |
|
"learning_rate": 2.2995283018867925e-07, |
|
"loss": 3.1725, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.061946902654867256, |
|
"grad_norm": 11.82534122467041, |
|
"learning_rate": 2.3349056603773584e-07, |
|
"loss": 3.3322, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.06283185840707965, |
|
"grad_norm": 16.99266242980957, |
|
"learning_rate": 2.3702830188679244e-07, |
|
"loss": 5.139, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.06371681415929203, |
|
"grad_norm": 8.74513053894043, |
|
"learning_rate": 2.4056603773584903e-07, |
|
"loss": 4.307, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.06460176991150443, |
|
"grad_norm": 11.715869903564453, |
|
"learning_rate": 2.4410377358490563e-07, |
|
"loss": 5.0133, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.06548672566371681, |
|
"grad_norm": 9.844196319580078, |
|
"learning_rate": 2.476415094339623e-07, |
|
"loss": 4.0507, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.06637168141592921, |
|
"grad_norm": 12.447444915771484, |
|
"learning_rate": 2.5117924528301887e-07, |
|
"loss": 3.3895, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.06725663716814159, |
|
"grad_norm": 23.91596794128418, |
|
"learning_rate": 2.5471698113207547e-07, |
|
"loss": 5.6736, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.06814159292035399, |
|
"grad_norm": 9.635603904724121, |
|
"learning_rate": 2.5825471698113206e-07, |
|
"loss": 4.2572, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.06902654867256637, |
|
"grad_norm": 14.971665382385254, |
|
"learning_rate": 2.6179245283018866e-07, |
|
"loss": 3.0796, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.06991150442477877, |
|
"grad_norm": 11.226128578186035, |
|
"learning_rate": 2.6533018867924525e-07, |
|
"loss": 5.0199, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"grad_norm": 11.01388931274414, |
|
"learning_rate": 2.688679245283019e-07, |
|
"loss": 4.1414, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_Qnli-dev_cosine_accuracy": 0.591796875, |
|
"eval_Qnli-dev_cosine_accuracy_threshold": 0.9258557558059692, |
|
"eval_Qnli-dev_cosine_ap": 0.5585355274462735, |
|
"eval_Qnli-dev_cosine_f1": 0.6291834002677376, |
|
"eval_Qnli-dev_cosine_f1_threshold": 0.750666618347168, |
|
"eval_Qnli-dev_cosine_precision": 0.4598825831702544, |
|
"eval_Qnli-dev_cosine_recall": 0.9957627118644068, |
|
"eval_Qnli-dev_dot_accuracy": 0.591796875, |
|
"eval_Qnli-dev_dot_accuracy_threshold": 711.18359375, |
|
"eval_Qnli-dev_dot_ap": 0.5585297234749824, |
|
"eval_Qnli-dev_dot_f1": 0.6291834002677376, |
|
"eval_Qnli-dev_dot_f1_threshold": 576.5970458984375, |
|
"eval_Qnli-dev_dot_precision": 0.4598825831702544, |
|
"eval_Qnli-dev_dot_recall": 0.9957627118644068, |
|
"eval_Qnli-dev_euclidean_accuracy": 0.591796875, |
|
"eval_Qnli-dev_euclidean_accuracy_threshold": 10.672666549682617, |
|
"eval_Qnli-dev_euclidean_ap": 0.5585355274462735, |
|
"eval_Qnli-dev_euclidean_f1": 0.6291834002677376, |
|
"eval_Qnli-dev_euclidean_f1_threshold": 19.553747177124023, |
|
"eval_Qnli-dev_euclidean_precision": 0.4598825831702544, |
|
"eval_Qnli-dev_euclidean_recall": 0.9957627118644068, |
|
"eval_Qnli-dev_manhattan_accuracy": 0.619140625, |
|
"eval_Qnli-dev_manhattan_accuracy_threshold": 188.09068298339844, |
|
"eval_Qnli-dev_manhattan_ap": 0.5898283705050701, |
|
"eval_Qnli-dev_manhattan_f1": 0.6301775147928994, |
|
"eval_Qnli-dev_manhattan_f1_threshold": 237.80462646484375, |
|
"eval_Qnli-dev_manhattan_precision": 0.48409090909090907, |
|
"eval_Qnli-dev_manhattan_recall": 0.902542372881356, |
|
"eval_Qnli-dev_max_accuracy": 0.619140625, |
|
"eval_Qnli-dev_max_accuracy_threshold": 711.18359375, |
|
"eval_Qnli-dev_max_ap": 0.5898283705050701, |
|
"eval_Qnli-dev_max_f1": 0.6301775147928994, |
|
"eval_Qnli-dev_max_f1_threshold": 576.5970458984375, |
|
"eval_Qnli-dev_max_precision": 0.48409090909090907, |
|
"eval_Qnli-dev_max_recall": 0.9957627118644068, |
|
"eval_allNLI-dev_cosine_accuracy": 0.666015625, |
|
"eval_allNLI-dev_cosine_accuracy_threshold": 0.983686089515686, |
|
"eval_allNLI-dev_cosine_ap": 0.34411819659341086, |
|
"eval_allNLI-dev_cosine_f1": 0.5065885797950219, |
|
"eval_allNLI-dev_cosine_f1_threshold": 0.7642872333526611, |
|
"eval_allNLI-dev_cosine_precision": 0.3392156862745098, |
|
"eval_allNLI-dev_cosine_recall": 1.0, |
|
"eval_allNLI-dev_dot_accuracy": 0.666015625, |
|
"eval_allNLI-dev_dot_accuracy_threshold": 755.60302734375, |
|
"eval_allNLI-dev_dot_ap": 0.344109544232086, |
|
"eval_allNLI-dev_dot_f1": 0.5065885797950219, |
|
"eval_allNLI-dev_dot_f1_threshold": 587.0625, |
|
"eval_allNLI-dev_dot_precision": 0.3392156862745098, |
|
"eval_allNLI-dev_dot_recall": 1.0, |
|
"eval_allNLI-dev_euclidean_accuracy": 0.666015625, |
|
"eval_allNLI-dev_euclidean_accuracy_threshold": 5.00581693649292, |
|
"eval_allNLI-dev_euclidean_ap": 0.3441246898925644, |
|
"eval_allNLI-dev_euclidean_f1": 0.5065885797950219, |
|
"eval_allNLI-dev_euclidean_f1_threshold": 19.022436141967773, |
|
"eval_allNLI-dev_euclidean_precision": 0.3392156862745098, |
|
"eval_allNLI-dev_euclidean_recall": 1.0, |
|
"eval_allNLI-dev_manhattan_accuracy": 0.6640625, |
|
"eval_allNLI-dev_manhattan_accuracy_threshold": 62.69102096557617, |
|
"eval_allNLI-dev_manhattan_ap": 0.35131239981425566, |
|
"eval_allNLI-dev_manhattan_f1": 0.5058479532163743, |
|
"eval_allNLI-dev_manhattan_f1_threshold": 337.6861877441406, |
|
"eval_allNLI-dev_manhattan_precision": 0.3385518590998043, |
|
"eval_allNLI-dev_manhattan_recall": 1.0, |
|
"eval_allNLI-dev_max_accuracy": 0.666015625, |
|
"eval_allNLI-dev_max_accuracy_threshold": 755.60302734375, |
|
"eval_allNLI-dev_max_ap": 0.35131239981425566, |
|
"eval_allNLI-dev_max_f1": 0.5065885797950219, |
|
"eval_allNLI-dev_max_f1_threshold": 587.0625, |
|
"eval_allNLI-dev_max_precision": 0.3392156862745098, |
|
"eval_allNLI-dev_max_recall": 1.0, |
|
"eval_sequential_score": 0.5898283705050701, |
|
"eval_sts-test_pearson_cosine": 0.22248205020578934, |
|
"eval_sts-test_pearson_dot": 0.22239084967931927, |
|
"eval_sts-test_pearson_euclidean": 0.2323160413842197, |
|
"eval_sts-test_pearson_manhattan": 0.26632593273308647, |
|
"eval_sts-test_pearson_max": 0.26632593273308647, |
|
"eval_sts-test_spearman_cosine": 0.24802235964390085, |
|
"eval_sts-test_spearman_dot": 0.24791612015173234, |
|
"eval_sts-test_spearman_euclidean": 0.24799036249272113, |
|
"eval_sts-test_spearman_manhattan": 0.2843623073856928, |
|
"eval_sts-test_spearman_max": 0.2843623073856928, |
|
"eval_vitaminc-pairs_loss": 2.7793872356414795, |
|
"eval_vitaminc-pairs_runtime": 3.7649, |
|
"eval_vitaminc-pairs_samples_per_second": 33.998, |
|
"eval_vitaminc-pairs_steps_per_second": 0.266, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_negation-triplets_loss": 4.888970851898193, |
|
"eval_negation-triplets_runtime": 0.7134, |
|
"eval_negation-triplets_samples_per_second": 179.432, |
|
"eval_negation-triplets_steps_per_second": 1.402, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_scitail-pairs-pos_loss": 1.8996644020080566, |
|
"eval_scitail-pairs-pos_runtime": 0.8506, |
|
"eval_scitail-pairs-pos_samples_per_second": 150.477, |
|
"eval_scitail-pairs-pos_steps_per_second": 1.176, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_scitail-pairs-qa_loss": 2.6760551929473877, |
|
"eval_scitail-pairs-qa_runtime": 0.5685, |
|
"eval_scitail-pairs-qa_samples_per_second": 225.171, |
|
"eval_scitail-pairs-qa_steps_per_second": 1.759, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_xsum-pairs_loss": 6.209648609161377, |
|
"eval_xsum-pairs_runtime": 2.9221, |
|
"eval_xsum-pairs_samples_per_second": 43.804, |
|
"eval_xsum-pairs_steps_per_second": 0.342, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_sciq_pairs_loss": 0.7622462511062622, |
|
"eval_sciq_pairs_runtime": 3.7816, |
|
"eval_sciq_pairs_samples_per_second": 33.848, |
|
"eval_sciq_pairs_steps_per_second": 0.264, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_qasc_pairs_loss": 3.3129472732543945, |
|
"eval_qasc_pairs_runtime": 0.6761, |
|
"eval_qasc_pairs_samples_per_second": 189.334, |
|
"eval_qasc_pairs_steps_per_second": 1.479, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_openbookqa_pairs_loss": 4.549765586853027, |
|
"eval_openbookqa_pairs_runtime": 0.5767, |
|
"eval_openbookqa_pairs_samples_per_second": 221.954, |
|
"eval_openbookqa_pairs_steps_per_second": 1.734, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_msmarco_pairs_loss": 7.205582141876221, |
|
"eval_msmarco_pairs_runtime": 1.2621, |
|
"eval_msmarco_pairs_samples_per_second": 101.416, |
|
"eval_msmarco_pairs_steps_per_second": 0.792, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_nq_pairs_loss": 7.680945873260498, |
|
"eval_nq_pairs_runtime": 2.5052, |
|
"eval_nq_pairs_samples_per_second": 51.095, |
|
"eval_nq_pairs_steps_per_second": 0.399, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_trivia_pairs_loss": 6.37924861907959, |
|
"eval_trivia_pairs_runtime": 3.6293, |
|
"eval_trivia_pairs_samples_per_second": 35.268, |
|
"eval_trivia_pairs_steps_per_second": 0.276, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_gooaq_pairs_loss": 6.656675338745117, |
|
"eval_gooaq_pairs_runtime": 0.9698, |
|
"eval_gooaq_pairs_samples_per_second": 131.988, |
|
"eval_gooaq_pairs_steps_per_second": 1.031, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_paws-pos_loss": 1.3848179578781128, |
|
"eval_paws-pos_runtime": 0.6727, |
|
"eval_paws-pos_samples_per_second": 190.278, |
|
"eval_paws-pos_steps_per_second": 1.487, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_global_dataset_loss": 5.002967834472656, |
|
"eval_global_dataset_runtime": 23.048, |
|
"eval_global_dataset_samples_per_second": 28.766, |
|
"eval_global_dataset_steps_per_second": 0.26, |
|
"step": 80 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 3390, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 80, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 0.0, |
|
"train_batch_size": 42, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|