|
{ |
|
"best_metric": 1.180140495300293, |
|
"best_model_checkpoint": "classical_composer_classification/checkpoint-1005", |
|
"epoch": 15.0, |
|
"global_step": 1005, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.0000000000000001e-07, |
|
"loss": 1.6203, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.0000000000000002e-07, |
|
"loss": 1.611, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.0000000000000004e-07, |
|
"loss": 1.6066, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 1.6033, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 1.6006, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 1.6104, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 7.000000000000001e-07, |
|
"loss": 1.6163, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 1.6205, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.000000000000001e-07, |
|
"loss": 1.6081, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 1.6039, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.1e-06, |
|
"loss": 1.6157, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 1.6045, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.3e-06, |
|
"loss": 1.6093, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.4000000000000001e-06, |
|
"loss": 1.6137, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.5e-06, |
|
"loss": 1.6035, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 1.6207, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7000000000000002e-06, |
|
"loss": 1.6017, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.8000000000000001e-06, |
|
"loss": 1.6471, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9000000000000002e-06, |
|
"loss": 1.6058, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.6073, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.1000000000000002e-06, |
|
"loss": 1.6206, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.2e-06, |
|
"loss": 1.6173, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.3000000000000004e-06, |
|
"loss": 1.6106, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 1.606, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.5e-06, |
|
"loss": 1.6096, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.6e-06, |
|
"loss": 1.6042, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.7000000000000004e-06, |
|
"loss": 1.6105, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 1.6128, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.9e-06, |
|
"loss": 1.6039, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 3e-06, |
|
"loss": 1.6105, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 3.1000000000000004e-06, |
|
"loss": 1.5976, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 1.5883, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 3.3000000000000006e-06, |
|
"loss": 1.6045, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 3.4000000000000005e-06, |
|
"loss": 1.5963, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 3.5e-06, |
|
"loss": 1.6007, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 3.6000000000000003e-06, |
|
"loss": 1.6223, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 3.7e-06, |
|
"loss": 1.5988, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 3.8000000000000005e-06, |
|
"loss": 1.6182, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 3.900000000000001e-06, |
|
"loss": 1.6118, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.6125, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.1e-06, |
|
"loss": 1.587, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.2000000000000004e-06, |
|
"loss": 1.6084, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.3e-06, |
|
"loss": 1.6098, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.4e-06, |
|
"loss": 1.6195, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.5e-06, |
|
"loss": 1.6018, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.600000000000001e-06, |
|
"loss": 1.6226, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.7e-06, |
|
"loss": 1.6124, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 1.6075, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.9000000000000005e-06, |
|
"loss": 1.598, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 5e-06, |
|
"loss": 1.5968, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.99476439790576e-06, |
|
"loss": 1.6453, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.989528795811518e-06, |
|
"loss": 1.5927, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.984293193717278e-06, |
|
"loss": 1.6027, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.979057591623037e-06, |
|
"loss": 1.6153, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.9738219895287965e-06, |
|
"loss": 1.6048, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.968586387434555e-06, |
|
"loss": 1.5814, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.963350785340314e-06, |
|
"loss": 1.5994, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.958115183246074e-06, |
|
"loss": 1.5917, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.952879581151832e-06, |
|
"loss": 1.619, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.947643979057592e-06, |
|
"loss": 1.6093, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.942408376963351e-06, |
|
"loss": 1.5972, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.9371727748691105e-06, |
|
"loss": 1.6112, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.931937172774869e-06, |
|
"loss": 1.6086, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.926701570680628e-06, |
|
"loss": 1.6005, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.921465968586388e-06, |
|
"loss": 1.6032, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.916230366492147e-06, |
|
"loss": 1.6082, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.910994764397906e-06, |
|
"loss": 1.6167, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.2835820895522388, |
|
"eval_loss": 1.6057485342025757, |
|
"eval_roc_auc": 0.7361035629382203, |
|
"eval_runtime": 114.8198, |
|
"eval_samples_per_second": 0.584, |
|
"eval_steps_per_second": 0.584, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.905759162303665e-06, |
|
"loss": 1.5778, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 4.9005235602094245e-06, |
|
"loss": 1.6078, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 4.895287958115184e-06, |
|
"loss": 1.6155, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 4.890052356020943e-06, |
|
"loss": 1.6081, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 4.884816753926702e-06, |
|
"loss": 1.615, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 4.879581151832461e-06, |
|
"loss": 1.6057, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 4.8743455497382206e-06, |
|
"loss": 1.6165, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 4.86910994764398e-06, |
|
"loss": 1.5844, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 4.8638743455497385e-06, |
|
"loss": 1.5988, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 4.858638743455498e-06, |
|
"loss": 1.5929, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 4.853403141361257e-06, |
|
"loss": 1.6609, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 4.848167539267017e-06, |
|
"loss": 1.5917, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 4.842931937172775e-06, |
|
"loss": 1.6136, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 4.837696335078535e-06, |
|
"loss": 1.6112, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 4.832460732984294e-06, |
|
"loss": 1.612, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 4.8272251308900525e-06, |
|
"loss": 1.6076, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 4.821989528795812e-06, |
|
"loss": 1.6057, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 4.816753926701571e-06, |
|
"loss": 1.5769, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 4.811518324607331e-06, |
|
"loss": 1.6045, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 4.806282722513089e-06, |
|
"loss": 1.602, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 4.801047120418849e-06, |
|
"loss": 1.5942, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 4.795811518324608e-06, |
|
"loss": 1.6149, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 4.7905759162303665e-06, |
|
"loss": 1.5933, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.785340314136126e-06, |
|
"loss": 1.6048, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.780104712041885e-06, |
|
"loss": 1.6154, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.774869109947645e-06, |
|
"loss": 1.5998, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.769633507853403e-06, |
|
"loss": 1.6342, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 4.764397905759163e-06, |
|
"loss": 1.6019, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 4.759162303664922e-06, |
|
"loss": 1.6392, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 4.7539267015706805e-06, |
|
"loss": 1.623, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 4.74869109947644e-06, |
|
"loss": 1.6016, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 4.743455497382199e-06, |
|
"loss": 1.613, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 4.738219895287958e-06, |
|
"loss": 1.6087, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 4.732984293193717e-06, |
|
"loss": 1.6022, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.727748691099477e-06, |
|
"loss": 1.5927, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.722513089005236e-06, |
|
"loss": 1.5968, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.717277486910995e-06, |
|
"loss": 1.6018, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.712041884816754e-06, |
|
"loss": 1.59, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.706806282722513e-06, |
|
"loss": 1.6068, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.701570680628273e-06, |
|
"loss": 1.6148, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.696335078534032e-06, |
|
"loss": 1.6022, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.6910994764397914e-06, |
|
"loss": 1.6399, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 4.68586387434555e-06, |
|
"loss": 1.5989, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 4.680628272251309e-06, |
|
"loss": 1.5946, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 4.675392670157069e-06, |
|
"loss": 1.5955, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 4.670157068062828e-06, |
|
"loss": 1.596, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 4.664921465968587e-06, |
|
"loss": 1.6021, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 4.659685863874346e-06, |
|
"loss": 1.6031, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 4.6544502617801054e-06, |
|
"loss": 1.596, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 4.649214659685865e-06, |
|
"loss": 1.5977, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 4.643979057591623e-06, |
|
"loss": 1.5955, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 4.638743455497383e-06, |
|
"loss": 1.5858, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 4.633507853403142e-06, |
|
"loss": 1.6205, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 4.628272251308901e-06, |
|
"loss": 1.61, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 4.62303664921466e-06, |
|
"loss": 1.6035, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 4.6178010471204194e-06, |
|
"loss": 1.6081, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 4.612565445026179e-06, |
|
"loss": 1.6066, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 4.607329842931937e-06, |
|
"loss": 1.5975, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 4.602094240837697e-06, |
|
"loss": 1.6156, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 4.596858638743456e-06, |
|
"loss": 1.6041, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 4.591623036649215e-06, |
|
"loss": 1.5978, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 4.586387434554974e-06, |
|
"loss": 1.5896, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 4.5811518324607335e-06, |
|
"loss": 1.5947, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 4.575916230366493e-06, |
|
"loss": 1.5916, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 4.570680628272251e-06, |
|
"loss": 1.6138, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 4.565445026178011e-06, |
|
"loss": 1.6157, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.56020942408377e-06, |
|
"loss": 1.6005, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.2537313432835821, |
|
"eval_loss": 1.60354745388031, |
|
"eval_roc_auc": 0.6429349531633545, |
|
"eval_runtime": 115.014, |
|
"eval_samples_per_second": 0.583, |
|
"eval_steps_per_second": 0.583, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.554973821989529e-06, |
|
"loss": 1.6075, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 4.549738219895288e-06, |
|
"loss": 1.5978, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 4.5445026178010475e-06, |
|
"loss": 1.6135, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.539267015706806e-06, |
|
"loss": 1.5899, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.534031413612565e-06, |
|
"loss": 1.5957, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.528795811518325e-06, |
|
"loss": 1.6034, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.523560209424084e-06, |
|
"loss": 1.5991, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.5183246073298435e-06, |
|
"loss": 1.5832, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.513089005235602e-06, |
|
"loss": 1.6053, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 4.5078534031413615e-06, |
|
"loss": 1.5825, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 4.502617801047121e-06, |
|
"loss": 1.5835, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 4.49738219895288e-06, |
|
"loss": 1.6049, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 4.492146596858639e-06, |
|
"loss": 1.6015, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 4.486910994764398e-06, |
|
"loss": 1.5913, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 4.4816753926701575e-06, |
|
"loss": 1.5841, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 4.476439790575917e-06, |
|
"loss": 1.6218, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 4.471204188481676e-06, |
|
"loss": 1.5744, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 4.465968586387435e-06, |
|
"loss": 1.5613, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 4.460732984293194e-06, |
|
"loss": 1.6108, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 4.455497382198954e-06, |
|
"loss": 1.5852, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 4.450261780104713e-06, |
|
"loss": 1.6145, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 4.4450261780104715e-06, |
|
"loss": 1.5974, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 4.439790575916231e-06, |
|
"loss": 1.6123, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 4.43455497382199e-06, |
|
"loss": 1.5875, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 4.429319371727749e-06, |
|
"loss": 1.5957, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 4.424083769633508e-06, |
|
"loss": 1.5734, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 4.418848167539268e-06, |
|
"loss": 1.6153, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 4.413612565445027e-06, |
|
"loss": 1.6366, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 4.4083769633507855e-06, |
|
"loss": 1.6076, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 4.403141361256545e-06, |
|
"loss": 1.5851, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 4.397905759162304e-06, |
|
"loss": 1.5644, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 4.392670157068063e-06, |
|
"loss": 1.592, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 4.387434554973822e-06, |
|
"loss": 1.5731, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 4.382198952879582e-06, |
|
"loss": 1.5703, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 4.37696335078534e-06, |
|
"loss": 1.6144, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 4.3717277486910996e-06, |
|
"loss": 1.6309, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 4.366492146596859e-06, |
|
"loss": 1.6017, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 4.361256544502618e-06, |
|
"loss": 1.5652, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 4.356020942408377e-06, |
|
"loss": 1.5775, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 4.350785340314136e-06, |
|
"loss": 1.5907, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 4.345549738219896e-06, |
|
"loss": 1.6096, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 4.340314136125654e-06, |
|
"loss": 1.5706, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 4.3350785340314136e-06, |
|
"loss": 1.5711, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 4.329842931937173e-06, |
|
"loss": 1.6023, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 4.324607329842932e-06, |
|
"loss": 1.5766, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 4.319371727748692e-06, |
|
"loss": 1.5795, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.31413612565445e-06, |
|
"loss": 1.5655, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.30890052356021e-06, |
|
"loss": 1.5366, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.303664921465969e-06, |
|
"loss": 1.5702, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.298429319371728e-06, |
|
"loss": 1.5976, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.293193717277487e-06, |
|
"loss": 1.5405, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 4.287958115183246e-06, |
|
"loss": 1.6132, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 4.282722513089006e-06, |
|
"loss": 1.5873, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 4.277486910994765e-06, |
|
"loss": 1.5689, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 4.2722513089005245e-06, |
|
"loss": 1.5774, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 4.267015706806283e-06, |
|
"loss": 1.567, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 4.261780104712042e-06, |
|
"loss": 1.587, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 4.256544502617802e-06, |
|
"loss": 1.564, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 4.251308900523561e-06, |
|
"loss": 1.5441, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 4.24607329842932e-06, |
|
"loss": 1.6087, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 4.240837696335079e-06, |
|
"loss": 1.5691, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 4.2356020942408385e-06, |
|
"loss": 1.5733, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 4.230366492146597e-06, |
|
"loss": 1.5608, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 4.225130890052356e-06, |
|
"loss": 1.5822, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.219895287958116e-06, |
|
"loss": 1.58, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 4.214659685863874e-06, |
|
"loss": 1.6026, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 4.209424083769634e-06, |
|
"loss": 1.5882, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.3283582089552239, |
|
"eval_loss": 1.5650840997695923, |
|
"eval_roc_auc": 0.8251884750469657, |
|
"eval_runtime": 114.4983, |
|
"eval_samples_per_second": 0.585, |
|
"eval_steps_per_second": 0.585, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 4.204188481675393e-06, |
|
"loss": 1.6095, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 4.1989528795811525e-06, |
|
"loss": 1.5525, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 4.193717277486911e-06, |
|
"loss": 1.526, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 4.18848167539267e-06, |
|
"loss": 1.543, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 4.18324607329843e-06, |
|
"loss": 1.5385, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 4.178010471204188e-06, |
|
"loss": 1.5581, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 4.172774869109948e-06, |
|
"loss": 1.5184, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 4.167539267015707e-06, |
|
"loss": 1.5782, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 4.1623036649214665e-06, |
|
"loss": 1.5476, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 4.157068062827225e-06, |
|
"loss": 1.5345, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 4.151832460732984e-06, |
|
"loss": 1.5511, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 4.146596858638744e-06, |
|
"loss": 1.5452, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 4.141361256544502e-06, |
|
"loss": 1.6125, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 4.136125654450262e-06, |
|
"loss": 1.5821, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 4.130890052356021e-06, |
|
"loss": 1.5133, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 4.1256544502617805e-06, |
|
"loss": 1.5998, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 4.120418848167539e-06, |
|
"loss": 1.5498, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 4.115183246073298e-06, |
|
"loss": 1.5217, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 4.109947643979058e-06, |
|
"loss": 1.6151, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 4.104712041884817e-06, |
|
"loss": 1.6195, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 4.099476439790577e-06, |
|
"loss": 1.5148, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 4.094240837696335e-06, |
|
"loss": 1.5387, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 4.0890052356020945e-06, |
|
"loss": 1.5773, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 4.083769633507854e-06, |
|
"loss": 1.6052, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 4.078534031413613e-06, |
|
"loss": 1.5402, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 4.073298429319373e-06, |
|
"loss": 1.6079, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 4.068062827225131e-06, |
|
"loss": 1.6924, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 4.062827225130891e-06, |
|
"loss": 1.5469, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 4.05759162303665e-06, |
|
"loss": 1.5873, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 4.052356020942409e-06, |
|
"loss": 1.6393, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 4.047120418848168e-06, |
|
"loss": 1.5403, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 4.041884816753927e-06, |
|
"loss": 1.5488, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 4.036649214659687e-06, |
|
"loss": 1.5781, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 4.031413612565445e-06, |
|
"loss": 1.5889, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 4.026178010471205e-06, |
|
"loss": 1.5712, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 4.020942408376964e-06, |
|
"loss": 1.4809, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 4.0157068062827225e-06, |
|
"loss": 1.5818, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 4.010471204188482e-06, |
|
"loss": 1.6553, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 4.005235602094241e-06, |
|
"loss": 1.5496, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.5869, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 3.994764397905759e-06, |
|
"loss": 1.6217, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 3.989528795811519e-06, |
|
"loss": 1.5883, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 3.984293193717278e-06, |
|
"loss": 1.5391, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 3.9790575916230365e-06, |
|
"loss": 1.5869, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 3.973821989528796e-06, |
|
"loss": 1.5623, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 3.968586387434555e-06, |
|
"loss": 1.5421, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 3.963350785340315e-06, |
|
"loss": 1.5695, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 3.958115183246073e-06, |
|
"loss": 1.5639, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 3.952879581151833e-06, |
|
"loss": 1.5488, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 3.947643979057592e-06, |
|
"loss": 1.5882, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 3.9424083769633505e-06, |
|
"loss": 1.5899, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 3.93717277486911e-06, |
|
"loss": 1.6539, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 3.931937172774869e-06, |
|
"loss": 1.5572, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 3.926701570680629e-06, |
|
"loss": 1.5665, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 3.921465968586387e-06, |
|
"loss": 1.6125, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 3.916230366492147e-06, |
|
"loss": 1.5109, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 3.910994764397906e-06, |
|
"loss": 1.6052, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 3.905759162303665e-06, |
|
"loss": 1.5552, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 3.900523560209425e-06, |
|
"loss": 1.5781, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 3.895287958115183e-06, |
|
"loss": 1.5518, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 3.890052356020943e-06, |
|
"loss": 1.5583, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 3.884816753926702e-06, |
|
"loss": 1.5543, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 3.8795811518324614e-06, |
|
"loss": 1.5628, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 3.874345549738221e-06, |
|
"loss": 1.5232, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 3.869109947643979e-06, |
|
"loss": 1.5801, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 3.863874345549739e-06, |
|
"loss": 1.4316, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 3.858638743455498e-06, |
|
"loss": 1.5457, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.3880597014925373, |
|
"eval_loss": 1.524383783340454, |
|
"eval_roc_auc": 0.8612657716331995, |
|
"eval_runtime": 114.7192, |
|
"eval_samples_per_second": 0.584, |
|
"eval_steps_per_second": 0.584, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 3.853403141361257e-06, |
|
"loss": 1.463, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 3.848167539267016e-06, |
|
"loss": 1.5483, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 3.8429319371727755e-06, |
|
"loss": 1.536, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 3.837696335078535e-06, |
|
"loss": 1.5668, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 3.832460732984293e-06, |
|
"loss": 1.5547, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 3.827225130890053e-06, |
|
"loss": 1.5112, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 3.821989528795812e-06, |
|
"loss": 1.611, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 3.816753926701571e-06, |
|
"loss": 1.5539, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 3.81151832460733e-06, |
|
"loss": 1.5626, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 3.8062827225130895e-06, |
|
"loss": 1.5878, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 3.801047120418849e-06, |
|
"loss": 1.4933, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 3.7958115183246074e-06, |
|
"loss": 1.5695, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 3.7905759162303668e-06, |
|
"loss": 1.5517, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 3.785340314136126e-06, |
|
"loss": 1.5824, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 3.7801047120418847e-06, |
|
"loss": 1.5898, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 3.774869109947644e-06, |
|
"loss": 1.5718, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 3.7696335078534035e-06, |
|
"loss": 1.5329, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 3.764397905759163e-06, |
|
"loss": 1.5609, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 3.759162303664922e-06, |
|
"loss": 1.5517, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 3.7539267015706808e-06, |
|
"loss": 1.4804, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 3.74869109947644e-06, |
|
"loss": 1.572, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 3.743455497382199e-06, |
|
"loss": 1.4667, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 3.7382198952879585e-06, |
|
"loss": 1.4649, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 3.732984293193718e-06, |
|
"loss": 1.4884, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 3.727748691099477e-06, |
|
"loss": 1.481, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 3.722513089005236e-06, |
|
"loss": 1.6238, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 3.717277486910995e-06, |
|
"loss": 1.4681, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 3.7120418848167546e-06, |
|
"loss": 1.5198, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 3.706806282722513e-06, |
|
"loss": 1.5526, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 3.7015706806282725e-06, |
|
"loss": 1.4698, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 3.696335078534032e-06, |
|
"loss": 1.524, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 3.6910994764397904e-06, |
|
"loss": 1.519, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 3.68586387434555e-06, |
|
"loss": 1.4419, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 3.680628272251309e-06, |
|
"loss": 1.5393, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 3.6753926701570686e-06, |
|
"loss": 1.6101, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 3.6701570680628275e-06, |
|
"loss": 1.5209, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 3.6649214659685865e-06, |
|
"loss": 1.4508, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 3.659685863874346e-06, |
|
"loss": 1.5547, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 3.654450261780105e-06, |
|
"loss": 1.5269, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 3.6492146596858642e-06, |
|
"loss": 1.5252, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 3.643979057591623e-06, |
|
"loss": 1.5595, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 3.6387434554973826e-06, |
|
"loss": 1.4009, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 3.6335078534031415e-06, |
|
"loss": 1.4614, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 3.628272251308901e-06, |
|
"loss": 1.5848, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 3.6230366492146603e-06, |
|
"loss": 1.418, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 3.617801047120419e-06, |
|
"loss": 1.6182, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 3.6125654450261782e-06, |
|
"loss": 1.4291, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 3.6073298429319376e-06, |
|
"loss": 1.5016, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 3.602094240837697e-06, |
|
"loss": 1.5445, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 3.5968586387434556e-06, |
|
"loss": 1.4518, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 3.591623036649215e-06, |
|
"loss": 1.5133, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 3.5863874345549743e-06, |
|
"loss": 1.5552, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 3.581151832460733e-06, |
|
"loss": 1.4487, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 3.5759162303664922e-06, |
|
"loss": 1.5563, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 3.5706806282722516e-06, |
|
"loss": 1.4824, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 3.565445026178011e-06, |
|
"loss": 1.5034, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 3.56020942408377e-06, |
|
"loss": 1.429, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 3.554973821989529e-06, |
|
"loss": 1.5454, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 3.5497382198952883e-06, |
|
"loss": 1.6144, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 3.5445026178010473e-06, |
|
"loss": 1.5299, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 3.5392670157068067e-06, |
|
"loss": 1.5063, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 3.534031413612566e-06, |
|
"loss": 1.4603, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 3.5287958115183246e-06, |
|
"loss": 1.5949, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 3.523560209424084e-06, |
|
"loss": 1.5152, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 3.5183246073298434e-06, |
|
"loss": 1.5446, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 3.5130890052356028e-06, |
|
"loss": 1.6619, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 3.5078534031413613e-06, |
|
"loss": 1.5892, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_accuracy": 0.373134328358209, |
|
"eval_loss": 1.4515025615692139, |
|
"eval_roc_auc": 0.881142317629408, |
|
"eval_runtime": 115.9119, |
|
"eval_samples_per_second": 0.578, |
|
"eval_steps_per_second": 0.578, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"learning_rate": 3.5026178010471207e-06, |
|
"loss": 1.5314, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 3.49738219895288e-06, |
|
"loss": 1.4861, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"learning_rate": 3.4921465968586386e-06, |
|
"loss": 1.5109, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 3.486910994764398e-06, |
|
"loss": 1.5558, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"learning_rate": 3.4816753926701574e-06, |
|
"loss": 1.4709, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 3.4764397905759168e-06, |
|
"loss": 1.4823, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 3.4712041884816753e-06, |
|
"loss": 1.503, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"learning_rate": 3.4659685863874347e-06, |
|
"loss": 1.4892, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"learning_rate": 3.460732984293194e-06, |
|
"loss": 1.4838, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"learning_rate": 3.455497382198953e-06, |
|
"loss": 1.4468, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 3.4502617801047124e-06, |
|
"loss": 1.4746, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"learning_rate": 3.4450261780104714e-06, |
|
"loss": 1.5053, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 3.4397905759162308e-06, |
|
"loss": 1.5735, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 3.4345549738219897e-06, |
|
"loss": 1.5715, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"learning_rate": 3.429319371727749e-06, |
|
"loss": 1.4935, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"learning_rate": 3.4240837696335085e-06, |
|
"loss": 1.5139, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 3.418848167539267e-06, |
|
"loss": 1.5141, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 3.4136125654450264e-06, |
|
"loss": 1.4008, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"learning_rate": 3.408376963350786e-06, |
|
"loss": 1.4642, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"learning_rate": 3.403141361256545e-06, |
|
"loss": 1.5497, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 3.3979057591623037e-06, |
|
"loss": 1.612, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"learning_rate": 3.392670157068063e-06, |
|
"loss": 1.5713, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 3.3874345549738225e-06, |
|
"loss": 1.4733, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 3.382198952879581e-06, |
|
"loss": 1.444, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 5.37, |
|
"learning_rate": 3.3769633507853404e-06, |
|
"loss": 1.4756, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"learning_rate": 3.3717277486911e-06, |
|
"loss": 1.547, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"learning_rate": 3.366492146596859e-06, |
|
"loss": 1.558, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 5.42, |
|
"learning_rate": 3.361256544502618e-06, |
|
"loss": 1.4456, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 3.356020942408377e-06, |
|
"loss": 1.5993, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 3.3507853403141365e-06, |
|
"loss": 1.4614, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"learning_rate": 3.3455497382198955e-06, |
|
"loss": 1.5135, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"learning_rate": 3.340314136125655e-06, |
|
"loss": 1.4615, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"learning_rate": 3.335078534031414e-06, |
|
"loss": 1.5132, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 3.3298429319371728e-06, |
|
"loss": 1.5745, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"learning_rate": 3.324607329842932e-06, |
|
"loss": 1.5302, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 5.54, |
|
"learning_rate": 3.3193717277486915e-06, |
|
"loss": 1.5434, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"learning_rate": 3.314136125654451e-06, |
|
"loss": 1.5764, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"learning_rate": 3.3089005235602095e-06, |
|
"loss": 1.552, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"learning_rate": 3.303664921465969e-06, |
|
"loss": 1.4927, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 3.2984293193717282e-06, |
|
"loss": 1.533, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 3.2931937172774868e-06, |
|
"loss": 1.5709, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 3.287958115183246e-06, |
|
"loss": 1.4734, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"learning_rate": 3.2827225130890055e-06, |
|
"loss": 1.4005, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 3.277486910994765e-06, |
|
"loss": 1.5306, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"learning_rate": 3.2722513089005235e-06, |
|
"loss": 1.4783, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 3.267015706806283e-06, |
|
"loss": 1.4767, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"learning_rate": 3.2617801047120422e-06, |
|
"loss": 1.458, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"learning_rate": 3.256544502617801e-06, |
|
"loss": 1.4254, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"learning_rate": 3.2513089005235606e-06, |
|
"loss": 1.4295, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"learning_rate": 3.2460732984293196e-06, |
|
"loss": 1.4778, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"learning_rate": 3.240837696335079e-06, |
|
"loss": 1.4342, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 3.235602094240838e-06, |
|
"loss": 1.3781, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"learning_rate": 3.2303664921465973e-06, |
|
"loss": 1.4398, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"learning_rate": 3.2251308900523567e-06, |
|
"loss": 1.4211, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"learning_rate": 3.219895287958115e-06, |
|
"loss": 1.614, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"learning_rate": 3.2146596858638746e-06, |
|
"loss": 1.5052, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 3.209424083769634e-06, |
|
"loss": 1.4412, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"learning_rate": 3.2041884816753934e-06, |
|
"loss": 1.3745, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 3.198952879581152e-06, |
|
"loss": 1.3644, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"learning_rate": 3.1937172774869113e-06, |
|
"loss": 1.3902, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 3.1884816753926707e-06, |
|
"loss": 1.5347, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"learning_rate": 3.183246073298429e-06, |
|
"loss": 1.3773, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 3.1780104712041886e-06, |
|
"loss": 1.4015, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"learning_rate": 3.172774869109948e-06, |
|
"loss": 1.4122, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"learning_rate": 3.167539267015707e-06, |
|
"loss": 1.5502, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"learning_rate": 3.1623036649214663e-06, |
|
"loss": 1.5631, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 3.1570680628272253e-06, |
|
"loss": 1.5762, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_accuracy": 0.5074626865671642, |
|
"eval_loss": 1.42205810546875, |
|
"eval_roc_auc": 0.8996636638766727, |
|
"eval_runtime": 114.7962, |
|
"eval_samples_per_second": 0.584, |
|
"eval_steps_per_second": 0.584, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 3.1518324607329847e-06, |
|
"loss": 1.5411, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 3.1465968586387436e-06, |
|
"loss": 1.4211, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 6.04, |
|
"learning_rate": 3.141361256544503e-06, |
|
"loss": 1.4769, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"learning_rate": 3.136125654450262e-06, |
|
"loss": 1.452, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 6.07, |
|
"learning_rate": 3.130890052356021e-06, |
|
"loss": 1.5483, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"learning_rate": 3.1256544502617803e-06, |
|
"loss": 1.4164, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"learning_rate": 3.1204188481675397e-06, |
|
"loss": 1.4244, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 3.115183246073299e-06, |
|
"loss": 1.4406, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 6.13, |
|
"learning_rate": 3.1099476439790576e-06, |
|
"loss": 1.469, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"learning_rate": 3.104712041884817e-06, |
|
"loss": 1.541, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"learning_rate": 3.0994764397905764e-06, |
|
"loss": 1.587, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 6.18, |
|
"learning_rate": 3.094240837696335e-06, |
|
"loss": 1.425, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"learning_rate": 3.0890052356020943e-06, |
|
"loss": 1.471, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 6.21, |
|
"learning_rate": 3.0837696335078537e-06, |
|
"loss": 1.3477, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 3.078534031413613e-06, |
|
"loss": 1.4807, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"learning_rate": 3.0732984293193716e-06, |
|
"loss": 1.3969, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"learning_rate": 3.068062827225131e-06, |
|
"loss": 1.5848, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 6.27, |
|
"learning_rate": 3.0628272251308904e-06, |
|
"loss": 1.462, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 6.28, |
|
"learning_rate": 3.0575916230366494e-06, |
|
"loss": 1.4761, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"learning_rate": 3.0523560209424088e-06, |
|
"loss": 1.5367, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 6.31, |
|
"learning_rate": 3.0471204188481677e-06, |
|
"loss": 1.6053, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"learning_rate": 3.041884816753927e-06, |
|
"loss": 1.3879, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 3.036649214659686e-06, |
|
"loss": 1.4343, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"learning_rate": 3.0314136125654455e-06, |
|
"loss": 1.5917, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 6.37, |
|
"learning_rate": 3.0261780104712044e-06, |
|
"loss": 1.5117, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"learning_rate": 3.0209424083769634e-06, |
|
"loss": 1.4879, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"learning_rate": 3.0157068062827228e-06, |
|
"loss": 1.4032, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"learning_rate": 3.010471204188482e-06, |
|
"loss": 1.579, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 6.43, |
|
"learning_rate": 3.0052356020942407e-06, |
|
"loss": 1.5148, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 6.45, |
|
"learning_rate": 3e-06, |
|
"loss": 1.319, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"learning_rate": 2.9947643979057595e-06, |
|
"loss": 1.473, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"learning_rate": 2.989528795811519e-06, |
|
"loss": 1.4904, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 2.9842931937172774e-06, |
|
"loss": 1.4602, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 6.51, |
|
"learning_rate": 2.9790575916230368e-06, |
|
"loss": 1.4704, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"learning_rate": 2.973821989528796e-06, |
|
"loss": 1.4505, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 6.54, |
|
"learning_rate": 2.968586387434555e-06, |
|
"loss": 1.6099, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 6.55, |
|
"learning_rate": 2.963350785340314e-06, |
|
"loss": 1.4174, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 6.57, |
|
"learning_rate": 2.9581151832460735e-06, |
|
"loss": 1.427, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"learning_rate": 2.952879581151833e-06, |
|
"loss": 1.3987, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"learning_rate": 2.947643979057592e-06, |
|
"loss": 1.5228, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"learning_rate": 2.942408376963351e-06, |
|
"loss": 1.5097, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"learning_rate": 2.93717277486911e-06, |
|
"loss": 1.3947, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"learning_rate": 2.931937172774869e-06, |
|
"loss": 1.4502, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 2.9267015706806285e-06, |
|
"loss": 1.524, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 2.921465968586388e-06, |
|
"loss": 1.3004, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"learning_rate": 2.9162303664921473e-06, |
|
"loss": 1.4197, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"learning_rate": 2.910994764397906e-06, |
|
"loss": 1.4906, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"learning_rate": 2.905759162303665e-06, |
|
"loss": 1.5555, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"learning_rate": 2.9005235602094246e-06, |
|
"loss": 1.3979, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"learning_rate": 2.895287958115183e-06, |
|
"loss": 1.4629, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"learning_rate": 2.8900523560209425e-06, |
|
"loss": 1.4555, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 6.78, |
|
"learning_rate": 2.884816753926702e-06, |
|
"loss": 1.3624, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 6.79, |
|
"learning_rate": 2.8795811518324613e-06, |
|
"loss": 1.3357, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"learning_rate": 2.87434554973822e-06, |
|
"loss": 1.3743, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"learning_rate": 2.869109947643979e-06, |
|
"loss": 1.5461, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"learning_rate": 2.8638743455497386e-06, |
|
"loss": 1.4473, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"learning_rate": 2.8586387434554976e-06, |
|
"loss": 1.4294, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 6.87, |
|
"learning_rate": 2.853403141361257e-06, |
|
"loss": 1.4106, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 2.848167539267016e-06, |
|
"loss": 1.5031, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"learning_rate": 2.842931937172775e-06, |
|
"loss": 1.3915, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"learning_rate": 2.8376963350785342e-06, |
|
"loss": 1.4706, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 2.8324607329842936e-06, |
|
"loss": 1.5077, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 6.94, |
|
"learning_rate": 2.8272251308900526e-06, |
|
"loss": 1.5472, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"learning_rate": 2.8219895287958116e-06, |
|
"loss": 1.4057, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 2.816753926701571e-06, |
|
"loss": 1.3993, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"learning_rate": 2.8115183246073303e-06, |
|
"loss": 1.3983, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 2.806282722513089e-06, |
|
"loss": 1.4085, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_accuracy": 0.582089552238806, |
|
"eval_loss": 1.3650407791137695, |
|
"eval_roc_auc": 0.8868159463839701, |
|
"eval_runtime": 115.2498, |
|
"eval_samples_per_second": 0.581, |
|
"eval_steps_per_second": 0.581, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"learning_rate": 2.8010471204188483e-06, |
|
"loss": 1.3493, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 2.7958115183246076e-06, |
|
"loss": 1.4412, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 7.04, |
|
"learning_rate": 2.790575916230367e-06, |
|
"loss": 1.4768, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 7.06, |
|
"learning_rate": 2.7853403141361256e-06, |
|
"loss": 1.2879, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 7.07, |
|
"learning_rate": 2.780104712041885e-06, |
|
"loss": 1.3029, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 7.09, |
|
"learning_rate": 2.7748691099476443e-06, |
|
"loss": 1.4652, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 7.1, |
|
"learning_rate": 2.7696335078534033e-06, |
|
"loss": 1.3336, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 7.12, |
|
"learning_rate": 2.7643979057591623e-06, |
|
"loss": 1.4184, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"learning_rate": 2.7591623036649216e-06, |
|
"loss": 1.5299, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 7.15, |
|
"learning_rate": 2.753926701570681e-06, |
|
"loss": 1.3983, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 7.16, |
|
"learning_rate": 2.74869109947644e-06, |
|
"loss": 1.4203, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 7.18, |
|
"learning_rate": 2.7434554973821994e-06, |
|
"loss": 1.2893, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 7.19, |
|
"learning_rate": 2.7382198952879583e-06, |
|
"loss": 1.2741, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"learning_rate": 2.7329842931937173e-06, |
|
"loss": 1.3778, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 7.22, |
|
"learning_rate": 2.7277486910994767e-06, |
|
"loss": 1.3175, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"learning_rate": 2.722513089005236e-06, |
|
"loss": 1.4461, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"learning_rate": 2.7172774869109955e-06, |
|
"loss": 1.4851, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 7.27, |
|
"learning_rate": 2.712041884816754e-06, |
|
"loss": 1.3368, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 7.28, |
|
"learning_rate": 2.7068062827225134e-06, |
|
"loss": 1.4123, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 7.3, |
|
"learning_rate": 2.7015706806282728e-06, |
|
"loss": 1.4103, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"learning_rate": 2.6963350785340313e-06, |
|
"loss": 1.3252, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 7.33, |
|
"learning_rate": 2.6910994764397907e-06, |
|
"loss": 1.4189, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"learning_rate": 2.68586387434555e-06, |
|
"loss": 1.3527, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"learning_rate": 2.6806282722513095e-06, |
|
"loss": 1.3924, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"learning_rate": 2.675392670157068e-06, |
|
"loss": 1.5393, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"learning_rate": 2.6701570680628274e-06, |
|
"loss": 1.4622, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 7.4, |
|
"learning_rate": 2.6649214659685868e-06, |
|
"loss": 1.3947, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 7.42, |
|
"learning_rate": 2.6596858638743457e-06, |
|
"loss": 1.3809, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"learning_rate": 2.6544502617801047e-06, |
|
"loss": 1.3288, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"learning_rate": 2.649214659685864e-06, |
|
"loss": 1.4655, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 7.46, |
|
"learning_rate": 2.643979057591623e-06, |
|
"loss": 1.4743, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 7.48, |
|
"learning_rate": 2.6387434554973824e-06, |
|
"loss": 1.4624, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 7.49, |
|
"learning_rate": 2.633507853403142e-06, |
|
"loss": 1.3989, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 7.51, |
|
"learning_rate": 2.6282722513089008e-06, |
|
"loss": 1.4216, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 7.52, |
|
"learning_rate": 2.6230366492146597e-06, |
|
"loss": 1.303, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 7.54, |
|
"learning_rate": 2.617801047120419e-06, |
|
"loss": 1.5343, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 7.55, |
|
"learning_rate": 2.6125654450261785e-06, |
|
"loss": 1.2747, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 7.57, |
|
"learning_rate": 2.607329842931937e-06, |
|
"loss": 1.4191, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 7.58, |
|
"learning_rate": 2.6020942408376964e-06, |
|
"loss": 1.3868, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 7.6, |
|
"learning_rate": 2.596858638743456e-06, |
|
"loss": 1.3678, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"learning_rate": 2.591623036649215e-06, |
|
"loss": 1.4754, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 7.63, |
|
"learning_rate": 2.5863874345549737e-06, |
|
"loss": 1.4908, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 7.64, |
|
"learning_rate": 2.581151832460733e-06, |
|
"loss": 1.4364, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"learning_rate": 2.5759162303664925e-06, |
|
"loss": 1.3136, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 7.67, |
|
"learning_rate": 2.5706806282722515e-06, |
|
"loss": 1.3757, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 7.69, |
|
"learning_rate": 2.5654450261780104e-06, |
|
"loss": 1.4906, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 7.7, |
|
"learning_rate": 2.56020942408377e-06, |
|
"loss": 1.484, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"learning_rate": 2.554973821989529e-06, |
|
"loss": 1.4083, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 7.73, |
|
"learning_rate": 2.549738219895288e-06, |
|
"loss": 1.4492, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"learning_rate": 2.5445026178010475e-06, |
|
"loss": 1.4013, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 7.76, |
|
"learning_rate": 2.5392670157068065e-06, |
|
"loss": 1.3524, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 7.78, |
|
"learning_rate": 2.5340314136125655e-06, |
|
"loss": 1.4877, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"learning_rate": 2.528795811518325e-06, |
|
"loss": 1.4133, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"learning_rate": 2.5235602094240842e-06, |
|
"loss": 1.4107, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 7.82, |
|
"learning_rate": 2.518324607329843e-06, |
|
"loss": 1.4637, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"learning_rate": 2.513089005235602e-06, |
|
"loss": 1.3032, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 7.85, |
|
"learning_rate": 2.5078534031413615e-06, |
|
"loss": 1.5095, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 7.87, |
|
"learning_rate": 2.502617801047121e-06, |
|
"loss": 1.3128, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"learning_rate": 2.49738219895288e-06, |
|
"loss": 1.3374, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 7.9, |
|
"learning_rate": 2.492146596858639e-06, |
|
"loss": 1.3599, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 7.91, |
|
"learning_rate": 2.4869109947643982e-06, |
|
"loss": 1.5406, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 7.93, |
|
"learning_rate": 2.481675392670157e-06, |
|
"loss": 1.4901, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 7.94, |
|
"learning_rate": 2.476439790575916e-06, |
|
"loss": 1.3618, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 7.96, |
|
"learning_rate": 2.4712041884816756e-06, |
|
"loss": 1.4245, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"learning_rate": 2.4659685863874345e-06, |
|
"loss": 1.4703, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 7.99, |
|
"learning_rate": 2.460732984293194e-06, |
|
"loss": 1.4407, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"learning_rate": 2.455497382198953e-06, |
|
"loss": 1.3602, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_accuracy": 0.5970149253731343, |
|
"eval_loss": 1.3232402801513672, |
|
"eval_roc_auc": 0.8994015054789634, |
|
"eval_runtime": 114.1138, |
|
"eval_samples_per_second": 0.587, |
|
"eval_steps_per_second": 0.587, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 8.01, |
|
"learning_rate": 2.4502617801047122e-06, |
|
"loss": 1.3341, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 8.03, |
|
"learning_rate": 2.4450261780104716e-06, |
|
"loss": 1.3392, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 8.04, |
|
"learning_rate": 2.4397905759162306e-06, |
|
"loss": 1.3314, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"learning_rate": 2.43455497382199e-06, |
|
"loss": 1.4066, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 8.07, |
|
"learning_rate": 2.429319371727749e-06, |
|
"loss": 1.493, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 8.09, |
|
"learning_rate": 2.4240837696335083e-06, |
|
"loss": 1.2613, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 8.1, |
|
"learning_rate": 2.4188481675392673e-06, |
|
"loss": 1.3318, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 8.12, |
|
"learning_rate": 2.4136125654450263e-06, |
|
"loss": 1.3639, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 8.13, |
|
"learning_rate": 2.4083769633507856e-06, |
|
"loss": 1.3511, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 8.15, |
|
"learning_rate": 2.4031413612565446e-06, |
|
"loss": 1.4716, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 8.16, |
|
"learning_rate": 2.397905759162304e-06, |
|
"loss": 1.4163, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 8.18, |
|
"learning_rate": 2.392670157068063e-06, |
|
"loss": 1.4113, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 8.19, |
|
"learning_rate": 2.3874345549738223e-06, |
|
"loss": 1.7564, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 8.21, |
|
"learning_rate": 2.3821989528795813e-06, |
|
"loss": 1.3595, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 8.22, |
|
"learning_rate": 2.3769633507853403e-06, |
|
"loss": 1.3879, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 8.24, |
|
"learning_rate": 2.3717277486910996e-06, |
|
"loss": 1.3285, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 8.25, |
|
"learning_rate": 2.3664921465968586e-06, |
|
"loss": 1.3988, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 8.27, |
|
"learning_rate": 2.361256544502618e-06, |
|
"loss": 1.2299, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 8.28, |
|
"learning_rate": 2.356020942408377e-06, |
|
"loss": 1.401, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 8.3, |
|
"learning_rate": 2.3507853403141363e-06, |
|
"loss": 1.4867, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 8.31, |
|
"learning_rate": 2.3455497382198957e-06, |
|
"loss": 1.5886, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 8.33, |
|
"learning_rate": 2.3403141361256547e-06, |
|
"loss": 1.3351, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 8.34, |
|
"learning_rate": 2.335078534031414e-06, |
|
"loss": 1.281, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 8.36, |
|
"learning_rate": 2.329842931937173e-06, |
|
"loss": 1.3566, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 8.37, |
|
"learning_rate": 2.3246073298429324e-06, |
|
"loss": 1.2541, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 8.39, |
|
"learning_rate": 2.3193717277486914e-06, |
|
"loss": 1.4523, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 8.4, |
|
"learning_rate": 2.3141361256544503e-06, |
|
"loss": 1.3455, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 8.42, |
|
"learning_rate": 2.3089005235602097e-06, |
|
"loss": 1.4539, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 8.43, |
|
"learning_rate": 2.3036649214659687e-06, |
|
"loss": 1.4109, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 8.45, |
|
"learning_rate": 2.298429319371728e-06, |
|
"loss": 1.3696, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 8.46, |
|
"learning_rate": 2.293193717277487e-06, |
|
"loss": 1.4263, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 8.48, |
|
"learning_rate": 2.2879581151832464e-06, |
|
"loss": 1.4543, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 8.49, |
|
"learning_rate": 2.2827225130890054e-06, |
|
"loss": 1.4815, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 8.51, |
|
"learning_rate": 2.2774869109947643e-06, |
|
"loss": 1.4155, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 8.52, |
|
"learning_rate": 2.2722513089005237e-06, |
|
"loss": 1.4503, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 8.54, |
|
"learning_rate": 2.2670157068062827e-06, |
|
"loss": 1.3865, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 8.55, |
|
"learning_rate": 2.261780104712042e-06, |
|
"loss": 1.4291, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 8.57, |
|
"learning_rate": 2.256544502617801e-06, |
|
"loss": 1.2442, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 8.58, |
|
"learning_rate": 2.2513089005235604e-06, |
|
"loss": 1.2507, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 8.6, |
|
"learning_rate": 2.2460732984293194e-06, |
|
"loss": 1.4016, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 8.61, |
|
"learning_rate": 2.2408376963350788e-06, |
|
"loss": 1.3312, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 8.63, |
|
"learning_rate": 2.235602094240838e-06, |
|
"loss": 1.4893, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 8.64, |
|
"learning_rate": 2.230366492146597e-06, |
|
"loss": 1.4599, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 8.66, |
|
"learning_rate": 2.2251308900523565e-06, |
|
"loss": 1.245, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 8.67, |
|
"learning_rate": 2.2198952879581155e-06, |
|
"loss": 1.3642, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 8.69, |
|
"learning_rate": 2.2146596858638744e-06, |
|
"loss": 1.2514, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 8.7, |
|
"learning_rate": 2.209424083769634e-06, |
|
"loss": 1.298, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 8.72, |
|
"learning_rate": 2.2041884816753928e-06, |
|
"loss": 1.3461, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 8.73, |
|
"learning_rate": 2.198952879581152e-06, |
|
"loss": 1.2911, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 8.75, |
|
"learning_rate": 2.193717277486911e-06, |
|
"loss": 1.4528, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 8.76, |
|
"learning_rate": 2.18848167539267e-06, |
|
"loss": 1.3732, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 8.78, |
|
"learning_rate": 2.1832460732984295e-06, |
|
"loss": 1.3621, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 8.79, |
|
"learning_rate": 2.1780104712041884e-06, |
|
"loss": 1.2996, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 8.81, |
|
"learning_rate": 2.172774869109948e-06, |
|
"loss": 1.2922, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 8.82, |
|
"learning_rate": 2.1675392670157068e-06, |
|
"loss": 1.3483, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 8.84, |
|
"learning_rate": 2.162303664921466e-06, |
|
"loss": 1.3111, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 8.85, |
|
"learning_rate": 2.157068062827225e-06, |
|
"loss": 1.4849, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 8.87, |
|
"learning_rate": 2.1518324607329845e-06, |
|
"loss": 1.3469, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 8.88, |
|
"learning_rate": 2.1465968586387435e-06, |
|
"loss": 1.3223, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 8.9, |
|
"learning_rate": 2.141361256544503e-06, |
|
"loss": 1.4483, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 8.91, |
|
"learning_rate": 2.1361256544502622e-06, |
|
"loss": 1.5507, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 8.93, |
|
"learning_rate": 2.130890052356021e-06, |
|
"loss": 1.3229, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 8.94, |
|
"learning_rate": 2.1256544502617806e-06, |
|
"loss": 1.3307, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 8.96, |
|
"learning_rate": 2.1204188481675396e-06, |
|
"loss": 1.2268, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 8.97, |
|
"learning_rate": 2.1151832460732985e-06, |
|
"loss": 1.3881, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 8.99, |
|
"learning_rate": 2.109947643979058e-06, |
|
"loss": 1.3719, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"learning_rate": 2.104712041884817e-06, |
|
"loss": 1.4634, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_accuracy": 0.4925373134328358, |
|
"eval_loss": 1.334736943244934, |
|
"eval_roc_auc": 0.8865149903059535, |
|
"eval_runtime": 114.7502, |
|
"eval_samples_per_second": 0.584, |
|
"eval_steps_per_second": 0.584, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 9.01, |
|
"learning_rate": 2.0994764397905762e-06, |
|
"loss": 1.2737, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 9.03, |
|
"learning_rate": 2.094240837696335e-06, |
|
"loss": 1.3233, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 9.04, |
|
"learning_rate": 2.089005235602094e-06, |
|
"loss": 1.1945, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 9.06, |
|
"learning_rate": 2.0837696335078536e-06, |
|
"loss": 1.238, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 9.07, |
|
"learning_rate": 2.0785340314136125e-06, |
|
"loss": 1.419, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 9.09, |
|
"learning_rate": 2.073298429319372e-06, |
|
"loss": 1.2901, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 9.1, |
|
"learning_rate": 2.068062827225131e-06, |
|
"loss": 1.3354, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 9.12, |
|
"learning_rate": 2.0628272251308902e-06, |
|
"loss": 1.3721, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 9.13, |
|
"learning_rate": 2.057591623036649e-06, |
|
"loss": 1.361, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 9.15, |
|
"learning_rate": 2.0523560209424086e-06, |
|
"loss": 1.4684, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 9.16, |
|
"learning_rate": 2.0471204188481676e-06, |
|
"loss": 1.2416, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 9.18, |
|
"learning_rate": 2.041884816753927e-06, |
|
"loss": 1.3574, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 9.19, |
|
"learning_rate": 2.0366492146596863e-06, |
|
"loss": 1.4478, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 9.21, |
|
"learning_rate": 2.0314136125654453e-06, |
|
"loss": 1.3652, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 9.22, |
|
"learning_rate": 2.0261780104712047e-06, |
|
"loss": 1.2838, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 9.24, |
|
"learning_rate": 2.0209424083769636e-06, |
|
"loss": 1.4077, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 9.25, |
|
"learning_rate": 2.0157068062827226e-06, |
|
"loss": 1.4663, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 9.27, |
|
"learning_rate": 2.010471204188482e-06, |
|
"loss": 1.3451, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 9.28, |
|
"learning_rate": 2.005235602094241e-06, |
|
"loss": 1.2607, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 9.3, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.1843, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 9.31, |
|
"learning_rate": 1.9947643979057593e-06, |
|
"loss": 1.3023, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 9.33, |
|
"learning_rate": 1.9895287958115183e-06, |
|
"loss": 1.2757, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 9.34, |
|
"learning_rate": 1.9842931937172776e-06, |
|
"loss": 1.3397, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 9.36, |
|
"learning_rate": 1.9790575916230366e-06, |
|
"loss": 1.3043, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 9.37, |
|
"learning_rate": 1.973821989528796e-06, |
|
"loss": 1.2754, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 9.39, |
|
"learning_rate": 1.968586387434555e-06, |
|
"loss": 1.478, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 9.4, |
|
"learning_rate": 1.9633507853403143e-06, |
|
"loss": 1.4255, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 9.42, |
|
"learning_rate": 1.9581151832460733e-06, |
|
"loss": 1.291, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 9.43, |
|
"learning_rate": 1.9528795811518327e-06, |
|
"loss": 1.2604, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 9.45, |
|
"learning_rate": 1.9476439790575916e-06, |
|
"loss": 1.4036, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 9.46, |
|
"learning_rate": 1.942408376963351e-06, |
|
"loss": 1.3565, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 9.48, |
|
"learning_rate": 1.9371727748691104e-06, |
|
"loss": 1.4375, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 9.49, |
|
"learning_rate": 1.9319371727748694e-06, |
|
"loss": 1.5023, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 9.51, |
|
"learning_rate": 1.9267015706806283e-06, |
|
"loss": 1.3839, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 9.52, |
|
"learning_rate": 1.9214659685863877e-06, |
|
"loss": 1.1494, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 9.54, |
|
"learning_rate": 1.9162303664921467e-06, |
|
"loss": 1.3491, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 9.55, |
|
"learning_rate": 1.910994764397906e-06, |
|
"loss": 1.3917, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 9.57, |
|
"learning_rate": 1.905759162303665e-06, |
|
"loss": 1.4043, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 9.58, |
|
"learning_rate": 1.9005235602094244e-06, |
|
"loss": 1.2889, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 9.6, |
|
"learning_rate": 1.8952879581151834e-06, |
|
"loss": 1.4794, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 9.61, |
|
"learning_rate": 1.8900523560209423e-06, |
|
"loss": 1.3874, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 9.63, |
|
"learning_rate": 1.8848167539267017e-06, |
|
"loss": 1.2018, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 9.64, |
|
"learning_rate": 1.879581151832461e-06, |
|
"loss": 1.3614, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 9.66, |
|
"learning_rate": 1.87434554973822e-06, |
|
"loss": 1.3398, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 9.67, |
|
"learning_rate": 1.8691099476439793e-06, |
|
"loss": 1.3111, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 9.69, |
|
"learning_rate": 1.8638743455497384e-06, |
|
"loss": 1.4621, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 9.7, |
|
"learning_rate": 1.8586387434554976e-06, |
|
"loss": 1.4852, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 9.72, |
|
"learning_rate": 1.8534031413612566e-06, |
|
"loss": 1.4208, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 9.73, |
|
"learning_rate": 1.848167539267016e-06, |
|
"loss": 1.2835, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 9.75, |
|
"learning_rate": 1.842931937172775e-06, |
|
"loss": 1.3615, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 9.76, |
|
"learning_rate": 1.8376963350785343e-06, |
|
"loss": 1.3334, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 9.78, |
|
"learning_rate": 1.8324607329842933e-06, |
|
"loss": 1.2462, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 9.79, |
|
"learning_rate": 1.8272251308900524e-06, |
|
"loss": 1.4433, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 9.81, |
|
"learning_rate": 1.8219895287958116e-06, |
|
"loss": 1.2258, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 9.82, |
|
"learning_rate": 1.8167539267015708e-06, |
|
"loss": 1.3308, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 9.84, |
|
"learning_rate": 1.8115183246073302e-06, |
|
"loss": 1.4901, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 9.85, |
|
"learning_rate": 1.8062827225130891e-06, |
|
"loss": 1.2137, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 9.87, |
|
"learning_rate": 1.8010471204188485e-06, |
|
"loss": 1.2923, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 9.88, |
|
"learning_rate": 1.7958115183246075e-06, |
|
"loss": 1.345, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 9.9, |
|
"learning_rate": 1.7905759162303664e-06, |
|
"loss": 1.2361, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"learning_rate": 1.7853403141361258e-06, |
|
"loss": 1.5686, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 9.93, |
|
"learning_rate": 1.780104712041885e-06, |
|
"loss": 1.3806, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 9.94, |
|
"learning_rate": 1.7748691099476442e-06, |
|
"loss": 1.3243, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 9.96, |
|
"learning_rate": 1.7696335078534033e-06, |
|
"loss": 1.2921, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 9.97, |
|
"learning_rate": 1.7643979057591623e-06, |
|
"loss": 1.3123, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 9.99, |
|
"learning_rate": 1.7591623036649217e-06, |
|
"loss": 1.353, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 1.7539267015706806e-06, |
|
"loss": 1.301, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_accuracy": 0.7164179104477612, |
|
"eval_loss": 1.2525287866592407, |
|
"eval_roc_auc": 0.9251711504318256, |
|
"eval_runtime": 113.7082, |
|
"eval_samples_per_second": 0.589, |
|
"eval_steps_per_second": 0.589, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 10.01, |
|
"learning_rate": 1.74869109947644e-06, |
|
"loss": 1.4082, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 10.03, |
|
"learning_rate": 1.743455497382199e-06, |
|
"loss": 1.2513, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 10.04, |
|
"learning_rate": 1.7382198952879584e-06, |
|
"loss": 1.2786, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 10.06, |
|
"learning_rate": 1.7329842931937173e-06, |
|
"loss": 1.3847, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 10.07, |
|
"learning_rate": 1.7277486910994765e-06, |
|
"loss": 1.4659, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 10.09, |
|
"learning_rate": 1.7225130890052357e-06, |
|
"loss": 1.3991, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 10.1, |
|
"learning_rate": 1.7172774869109949e-06, |
|
"loss": 1.3691, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 10.12, |
|
"learning_rate": 1.7120418848167542e-06, |
|
"loss": 1.3431, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 10.13, |
|
"learning_rate": 1.7068062827225132e-06, |
|
"loss": 1.1488, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 10.15, |
|
"learning_rate": 1.7015706806282726e-06, |
|
"loss": 1.2661, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 10.16, |
|
"learning_rate": 1.6963350785340316e-06, |
|
"loss": 1.4463, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 10.18, |
|
"learning_rate": 1.6910994764397905e-06, |
|
"loss": 1.2989, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 10.19, |
|
"learning_rate": 1.68586387434555e-06, |
|
"loss": 1.2203, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 10.21, |
|
"learning_rate": 1.680628272251309e-06, |
|
"loss": 1.2228, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 10.22, |
|
"learning_rate": 1.6753926701570683e-06, |
|
"loss": 1.2175, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 10.24, |
|
"learning_rate": 1.6701570680628274e-06, |
|
"loss": 1.3362, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 10.25, |
|
"learning_rate": 1.6649214659685864e-06, |
|
"loss": 1.3643, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 10.27, |
|
"learning_rate": 1.6596858638743458e-06, |
|
"loss": 1.2681, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 10.28, |
|
"learning_rate": 1.6544502617801047e-06, |
|
"loss": 1.2416, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 10.3, |
|
"learning_rate": 1.6492146596858641e-06, |
|
"loss": 1.2755, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 10.31, |
|
"learning_rate": 1.643979057591623e-06, |
|
"loss": 1.3008, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 10.33, |
|
"learning_rate": 1.6387434554973825e-06, |
|
"loss": 1.3513, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 10.34, |
|
"learning_rate": 1.6335078534031414e-06, |
|
"loss": 1.3498, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 10.36, |
|
"learning_rate": 1.6282722513089006e-06, |
|
"loss": 1.3968, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 10.37, |
|
"learning_rate": 1.6230366492146598e-06, |
|
"loss": 1.3529, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 10.39, |
|
"learning_rate": 1.617801047120419e-06, |
|
"loss": 1.4367, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 10.4, |
|
"learning_rate": 1.6125654450261783e-06, |
|
"loss": 1.3278, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 10.42, |
|
"learning_rate": 1.6073298429319373e-06, |
|
"loss": 1.2611, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 10.43, |
|
"learning_rate": 1.6020942408376967e-06, |
|
"loss": 1.2905, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 10.45, |
|
"learning_rate": 1.5968586387434556e-06, |
|
"loss": 1.5078, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 10.46, |
|
"learning_rate": 1.5916230366492146e-06, |
|
"loss": 1.327, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 10.48, |
|
"learning_rate": 1.586387434554974e-06, |
|
"loss": 1.3342, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 10.49, |
|
"learning_rate": 1.5811518324607332e-06, |
|
"loss": 1.3534, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 10.51, |
|
"learning_rate": 1.5759162303664923e-06, |
|
"loss": 1.4769, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 10.52, |
|
"learning_rate": 1.5706806282722515e-06, |
|
"loss": 1.2854, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 10.54, |
|
"learning_rate": 1.5654450261780105e-06, |
|
"loss": 1.3121, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 10.55, |
|
"learning_rate": 1.5602094240837699e-06, |
|
"loss": 1.3069, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 10.57, |
|
"learning_rate": 1.5549738219895288e-06, |
|
"loss": 1.3382, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 10.58, |
|
"learning_rate": 1.5497382198952882e-06, |
|
"loss": 1.2994, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 10.6, |
|
"learning_rate": 1.5445026178010472e-06, |
|
"loss": 1.3024, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 10.61, |
|
"learning_rate": 1.5392670157068066e-06, |
|
"loss": 1.3612, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 10.63, |
|
"learning_rate": 1.5340314136125655e-06, |
|
"loss": 1.2902, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 10.64, |
|
"learning_rate": 1.5287958115183247e-06, |
|
"loss": 1.2869, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 10.66, |
|
"learning_rate": 1.5235602094240839e-06, |
|
"loss": 1.2834, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 10.67, |
|
"learning_rate": 1.518324607329843e-06, |
|
"loss": 1.2747, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 10.69, |
|
"learning_rate": 1.5130890052356022e-06, |
|
"loss": 1.3888, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 10.7, |
|
"learning_rate": 1.5078534031413614e-06, |
|
"loss": 1.4652, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 10.72, |
|
"learning_rate": 1.5026178010471203e-06, |
|
"loss": 1.3828, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 10.73, |
|
"learning_rate": 1.4973821989528797e-06, |
|
"loss": 1.3641, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 10.75, |
|
"learning_rate": 1.4921465968586387e-06, |
|
"loss": 1.4433, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 10.76, |
|
"learning_rate": 1.486910994764398e-06, |
|
"loss": 1.3673, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 10.78, |
|
"learning_rate": 1.481675392670157e-06, |
|
"loss": 1.2609, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 10.79, |
|
"learning_rate": 1.4764397905759164e-06, |
|
"loss": 1.3625, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 10.81, |
|
"learning_rate": 1.4712041884816756e-06, |
|
"loss": 1.3957, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 10.82, |
|
"learning_rate": 1.4659685863874346e-06, |
|
"loss": 1.1985, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 10.84, |
|
"learning_rate": 1.460732984293194e-06, |
|
"loss": 1.2971, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 10.85, |
|
"learning_rate": 1.455497382198953e-06, |
|
"loss": 1.3019, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 10.87, |
|
"learning_rate": 1.4502617801047123e-06, |
|
"loss": 1.2214, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 10.88, |
|
"learning_rate": 1.4450261780104713e-06, |
|
"loss": 1.2505, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 10.9, |
|
"learning_rate": 1.4397905759162306e-06, |
|
"loss": 1.365, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 10.91, |
|
"learning_rate": 1.4345549738219896e-06, |
|
"loss": 1.2999, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 10.93, |
|
"learning_rate": 1.4293193717277488e-06, |
|
"loss": 1.4429, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 10.94, |
|
"learning_rate": 1.424083769633508e-06, |
|
"loss": 1.1743, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 10.96, |
|
"learning_rate": 1.4188481675392671e-06, |
|
"loss": 1.2749, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 10.97, |
|
"learning_rate": 1.4136125654450263e-06, |
|
"loss": 1.269, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 10.99, |
|
"learning_rate": 1.4083769633507855e-06, |
|
"loss": 1.4313, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"learning_rate": 1.4031413612565444e-06, |
|
"loss": 1.2545, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"eval_accuracy": 0.7164179104477612, |
|
"eval_loss": 1.2266749143600464, |
|
"eval_roc_auc": 0.9242875950596904, |
|
"eval_runtime": 114.8426, |
|
"eval_samples_per_second": 0.583, |
|
"eval_steps_per_second": 0.583, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 11.01, |
|
"learning_rate": 1.3979057591623038e-06, |
|
"loss": 1.3544, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 11.03, |
|
"learning_rate": 1.3926701570680628e-06, |
|
"loss": 1.2447, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 11.04, |
|
"learning_rate": 1.3874345549738222e-06, |
|
"loss": 1.1688, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 11.06, |
|
"learning_rate": 1.3821989528795811e-06, |
|
"loss": 1.222, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 11.07, |
|
"learning_rate": 1.3769633507853405e-06, |
|
"loss": 1.1891, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 11.09, |
|
"learning_rate": 1.3717277486910997e-06, |
|
"loss": 1.3033, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 11.1, |
|
"learning_rate": 1.3664921465968586e-06, |
|
"loss": 1.239, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 11.12, |
|
"learning_rate": 1.361256544502618e-06, |
|
"loss": 1.4937, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 11.13, |
|
"learning_rate": 1.356020942408377e-06, |
|
"loss": 1.2524, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 11.15, |
|
"learning_rate": 1.3507853403141364e-06, |
|
"loss": 1.3377, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 11.16, |
|
"learning_rate": 1.3455497382198953e-06, |
|
"loss": 1.2181, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 11.18, |
|
"learning_rate": 1.3403141361256547e-06, |
|
"loss": 1.4058, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 11.19, |
|
"learning_rate": 1.3350785340314137e-06, |
|
"loss": 1.3541, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 11.21, |
|
"learning_rate": 1.3298429319371729e-06, |
|
"loss": 1.2097, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 11.22, |
|
"learning_rate": 1.324607329842932e-06, |
|
"loss": 1.1825, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 11.24, |
|
"learning_rate": 1.3193717277486912e-06, |
|
"loss": 1.1941, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 11.25, |
|
"learning_rate": 1.3141361256544504e-06, |
|
"loss": 1.368, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 11.27, |
|
"learning_rate": 1.3089005235602096e-06, |
|
"loss": 1.5042, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 11.28, |
|
"learning_rate": 1.3036649214659685e-06, |
|
"loss": 1.3033, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 11.3, |
|
"learning_rate": 1.298429319371728e-06, |
|
"loss": 1.236, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 11.31, |
|
"learning_rate": 1.2931937172774869e-06, |
|
"loss": 1.3492, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 11.33, |
|
"learning_rate": 1.2879581151832463e-06, |
|
"loss": 1.3996, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 11.34, |
|
"learning_rate": 1.2827225130890052e-06, |
|
"loss": 1.3104, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 11.36, |
|
"learning_rate": 1.2774869109947646e-06, |
|
"loss": 1.5012, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 11.37, |
|
"learning_rate": 1.2722513089005238e-06, |
|
"loss": 1.5104, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 11.39, |
|
"learning_rate": 1.2670157068062827e-06, |
|
"loss": 1.3532, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 11.4, |
|
"learning_rate": 1.2617801047120421e-06, |
|
"loss": 1.1661, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 11.42, |
|
"learning_rate": 1.256544502617801e-06, |
|
"loss": 1.3541, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 11.43, |
|
"learning_rate": 1.2513089005235605e-06, |
|
"loss": 1.3815, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 11.45, |
|
"learning_rate": 1.2460732984293194e-06, |
|
"loss": 1.1549, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 11.46, |
|
"learning_rate": 1.2408376963350786e-06, |
|
"loss": 1.5656, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 11.48, |
|
"learning_rate": 1.2356020942408378e-06, |
|
"loss": 1.1574, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 11.49, |
|
"learning_rate": 1.230366492146597e-06, |
|
"loss": 1.2198, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 11.51, |
|
"learning_rate": 1.2251308900523561e-06, |
|
"loss": 1.2281, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 11.52, |
|
"learning_rate": 1.2198952879581153e-06, |
|
"loss": 1.3165, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 11.54, |
|
"learning_rate": 1.2146596858638745e-06, |
|
"loss": 1.3714, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 11.55, |
|
"learning_rate": 1.2094240837696336e-06, |
|
"loss": 1.2312, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 11.57, |
|
"learning_rate": 1.2041884816753928e-06, |
|
"loss": 1.335, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 11.58, |
|
"learning_rate": 1.198952879581152e-06, |
|
"loss": 1.1492, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 11.6, |
|
"learning_rate": 1.1937172774869112e-06, |
|
"loss": 1.2127, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 11.61, |
|
"learning_rate": 1.1884816753926701e-06, |
|
"loss": 1.2743, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 11.63, |
|
"learning_rate": 1.1832460732984293e-06, |
|
"loss": 1.454, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 11.64, |
|
"learning_rate": 1.1780104712041885e-06, |
|
"loss": 1.4488, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 11.66, |
|
"learning_rate": 1.1727748691099479e-06, |
|
"loss": 1.1444, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 11.67, |
|
"learning_rate": 1.167539267015707e-06, |
|
"loss": 1.3731, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 11.69, |
|
"learning_rate": 1.1623036649214662e-06, |
|
"loss": 1.2858, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 11.7, |
|
"learning_rate": 1.1570680628272252e-06, |
|
"loss": 1.5179, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 11.72, |
|
"learning_rate": 1.1518324607329843e-06, |
|
"loss": 1.2574, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 11.73, |
|
"learning_rate": 1.1465968586387435e-06, |
|
"loss": 1.184, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 11.75, |
|
"learning_rate": 1.1413612565445027e-06, |
|
"loss": 1.2148, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 11.76, |
|
"learning_rate": 1.1361256544502619e-06, |
|
"loss": 1.5347, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 11.78, |
|
"learning_rate": 1.130890052356021e-06, |
|
"loss": 1.4017, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 11.79, |
|
"learning_rate": 1.1256544502617802e-06, |
|
"loss": 1.3218, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 11.81, |
|
"learning_rate": 1.1204188481675394e-06, |
|
"loss": 1.3168, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 11.82, |
|
"learning_rate": 1.1151832460732986e-06, |
|
"loss": 1.3802, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 11.84, |
|
"learning_rate": 1.1099476439790577e-06, |
|
"loss": 1.2083, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 11.85, |
|
"learning_rate": 1.104712041884817e-06, |
|
"loss": 1.3997, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 11.87, |
|
"learning_rate": 1.099476439790576e-06, |
|
"loss": 1.2884, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 11.88, |
|
"learning_rate": 1.094240837696335e-06, |
|
"loss": 1.3002, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 11.9, |
|
"learning_rate": 1.0890052356020942e-06, |
|
"loss": 1.2945, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 11.91, |
|
"learning_rate": 1.0837696335078534e-06, |
|
"loss": 1.2357, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 11.93, |
|
"learning_rate": 1.0785340314136126e-06, |
|
"loss": 1.1826, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 11.94, |
|
"learning_rate": 1.0732984293193717e-06, |
|
"loss": 1.4052, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 11.96, |
|
"learning_rate": 1.0680628272251311e-06, |
|
"loss": 1.3942, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 11.97, |
|
"learning_rate": 1.0628272251308903e-06, |
|
"loss": 1.2643, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 11.99, |
|
"learning_rate": 1.0575916230366493e-06, |
|
"loss": 1.2621, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"learning_rate": 1.0523560209424084e-06, |
|
"loss": 1.209, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"eval_accuracy": 0.7014925373134329, |
|
"eval_loss": 1.199011206626892, |
|
"eval_roc_auc": 0.9410022590782272, |
|
"eval_runtime": 115.3131, |
|
"eval_samples_per_second": 0.581, |
|
"eval_steps_per_second": 0.581, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 12.01, |
|
"learning_rate": 1.0471204188481676e-06, |
|
"loss": 1.2375, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 12.03, |
|
"learning_rate": 1.0418848167539268e-06, |
|
"loss": 1.2546, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 12.04, |
|
"learning_rate": 1.036649214659686e-06, |
|
"loss": 1.3091, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 12.06, |
|
"learning_rate": 1.0314136125654451e-06, |
|
"loss": 1.1969, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 12.07, |
|
"learning_rate": 1.0261780104712043e-06, |
|
"loss": 1.1491, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 12.09, |
|
"learning_rate": 1.0209424083769635e-06, |
|
"loss": 1.42, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 12.1, |
|
"learning_rate": 1.0157068062827226e-06, |
|
"loss": 1.2053, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 12.12, |
|
"learning_rate": 1.0104712041884818e-06, |
|
"loss": 1.2398, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 12.13, |
|
"learning_rate": 1.005235602094241e-06, |
|
"loss": 1.179, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 12.15, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 1.2398, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 12.16, |
|
"learning_rate": 9.947643979057591e-07, |
|
"loss": 1.3218, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 12.18, |
|
"learning_rate": 9.895287958115183e-07, |
|
"loss": 1.2632, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 12.19, |
|
"learning_rate": 9.842931937172775e-07, |
|
"loss": 1.2483, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 12.21, |
|
"learning_rate": 9.790575916230366e-07, |
|
"loss": 1.2987, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 12.22, |
|
"learning_rate": 9.738219895287958e-07, |
|
"loss": 1.5495, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 12.24, |
|
"learning_rate": 9.685863874345552e-07, |
|
"loss": 1.1955, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 12.25, |
|
"learning_rate": 9.633507853403142e-07, |
|
"loss": 1.1993, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 12.27, |
|
"learning_rate": 9.581151832460733e-07, |
|
"loss": 1.3928, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 12.28, |
|
"learning_rate": 9.528795811518325e-07, |
|
"loss": 1.3699, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 12.3, |
|
"learning_rate": 9.476439790575917e-07, |
|
"loss": 1.2026, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 12.31, |
|
"learning_rate": 9.424083769633509e-07, |
|
"loss": 1.4298, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 12.33, |
|
"learning_rate": 9.3717277486911e-07, |
|
"loss": 1.1997, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 12.34, |
|
"learning_rate": 9.319371727748692e-07, |
|
"loss": 1.2377, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 12.36, |
|
"learning_rate": 9.267015706806283e-07, |
|
"loss": 1.3125, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 12.37, |
|
"learning_rate": 9.214659685863875e-07, |
|
"loss": 1.373, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 12.39, |
|
"learning_rate": 9.162303664921466e-07, |
|
"loss": 1.169, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 12.4, |
|
"learning_rate": 9.109947643979058e-07, |
|
"loss": 1.4811, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 12.42, |
|
"learning_rate": 9.057591623036651e-07, |
|
"loss": 1.1686, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 12.43, |
|
"learning_rate": 9.005235602094243e-07, |
|
"loss": 1.2359, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 12.45, |
|
"learning_rate": 8.952879581151832e-07, |
|
"loss": 1.1424, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 12.46, |
|
"learning_rate": 8.900523560209425e-07, |
|
"loss": 1.4827, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 12.48, |
|
"learning_rate": 8.848167539267017e-07, |
|
"loss": 1.3714, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 12.49, |
|
"learning_rate": 8.795811518324608e-07, |
|
"loss": 1.3198, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 12.51, |
|
"learning_rate": 8.7434554973822e-07, |
|
"loss": 1.2483, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 12.52, |
|
"learning_rate": 8.691099476439792e-07, |
|
"loss": 1.3707, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 12.54, |
|
"learning_rate": 8.638743455497383e-07, |
|
"loss": 1.2405, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 12.55, |
|
"learning_rate": 8.586387434554974e-07, |
|
"loss": 1.1803, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 12.57, |
|
"learning_rate": 8.534031413612566e-07, |
|
"loss": 1.205, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 12.58, |
|
"learning_rate": 8.481675392670158e-07, |
|
"loss": 1.2485, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 12.6, |
|
"learning_rate": 8.42931937172775e-07, |
|
"loss": 1.3933, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 12.61, |
|
"learning_rate": 8.376963350785341e-07, |
|
"loss": 1.235, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 12.63, |
|
"learning_rate": 8.324607329842932e-07, |
|
"loss": 1.6294, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 12.64, |
|
"learning_rate": 8.272251308900524e-07, |
|
"loss": 1.2848, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 12.66, |
|
"learning_rate": 8.219895287958115e-07, |
|
"loss": 1.3612, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 12.67, |
|
"learning_rate": 8.167539267015707e-07, |
|
"loss": 1.2749, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 12.69, |
|
"learning_rate": 8.115183246073299e-07, |
|
"loss": 1.4122, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 12.7, |
|
"learning_rate": 8.062827225130892e-07, |
|
"loss": 1.62, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 12.72, |
|
"learning_rate": 8.010471204188483e-07, |
|
"loss": 1.2063, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 12.73, |
|
"learning_rate": 7.958115183246073e-07, |
|
"loss": 1.1882, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 12.75, |
|
"learning_rate": 7.905759162303666e-07, |
|
"loss": 1.3236, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 12.76, |
|
"learning_rate": 7.853403141361258e-07, |
|
"loss": 1.3277, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 12.78, |
|
"learning_rate": 7.801047120418849e-07, |
|
"loss": 1.2155, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 12.79, |
|
"learning_rate": 7.748691099476441e-07, |
|
"loss": 1.3083, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 12.81, |
|
"learning_rate": 7.696335078534033e-07, |
|
"loss": 1.2612, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 12.82, |
|
"learning_rate": 7.643979057591623e-07, |
|
"loss": 1.3424, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 12.84, |
|
"learning_rate": 7.591623036649215e-07, |
|
"loss": 1.4033, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 12.85, |
|
"learning_rate": 7.539267015706807e-07, |
|
"loss": 1.0953, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 12.87, |
|
"learning_rate": 7.486910994764399e-07, |
|
"loss": 1.1239, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 12.88, |
|
"learning_rate": 7.43455497382199e-07, |
|
"loss": 1.1509, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 12.9, |
|
"learning_rate": 7.382198952879582e-07, |
|
"loss": 1.286, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 12.91, |
|
"learning_rate": 7.329842931937173e-07, |
|
"loss": 1.255, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 12.93, |
|
"learning_rate": 7.277486910994765e-07, |
|
"loss": 1.2595, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 12.94, |
|
"learning_rate": 7.225130890052356e-07, |
|
"loss": 1.3704, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 12.96, |
|
"learning_rate": 7.172774869109948e-07, |
|
"loss": 1.2514, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 12.97, |
|
"learning_rate": 7.12041884816754e-07, |
|
"loss": 1.3482, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 12.99, |
|
"learning_rate": 7.068062827225131e-07, |
|
"loss": 1.3112, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"learning_rate": 7.015706806282722e-07, |
|
"loss": 1.2827, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"eval_accuracy": 0.6865671641791045, |
|
"eval_loss": 1.2039672136306763, |
|
"eval_roc_auc": 0.9386177600949199, |
|
"eval_runtime": 114.8773, |
|
"eval_samples_per_second": 0.583, |
|
"eval_steps_per_second": 0.583, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 13.01, |
|
"learning_rate": 6.963350785340314e-07, |
|
"loss": 1.3363, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 13.03, |
|
"learning_rate": 6.910994764397906e-07, |
|
"loss": 1.3538, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 13.04, |
|
"learning_rate": 6.858638743455498e-07, |
|
"loss": 1.1828, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 13.06, |
|
"learning_rate": 6.80628272251309e-07, |
|
"loss": 1.269, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 13.07, |
|
"learning_rate": 6.753926701570682e-07, |
|
"loss": 1.3859, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 13.09, |
|
"learning_rate": 6.701570680628274e-07, |
|
"loss": 1.3295, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 13.1, |
|
"learning_rate": 6.649214659685864e-07, |
|
"loss": 1.2554, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 13.12, |
|
"learning_rate": 6.596858638743456e-07, |
|
"loss": 1.3383, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 13.13, |
|
"learning_rate": 6.544502617801048e-07, |
|
"loss": 1.2203, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 13.15, |
|
"learning_rate": 6.49214659685864e-07, |
|
"loss": 1.1111, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 13.16, |
|
"learning_rate": 6.439790575916231e-07, |
|
"loss": 1.3303, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 13.18, |
|
"learning_rate": 6.387434554973823e-07, |
|
"loss": 1.2962, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 13.19, |
|
"learning_rate": 6.335078534031414e-07, |
|
"loss": 1.1112, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 13.21, |
|
"learning_rate": 6.282722513089005e-07, |
|
"loss": 1.2808, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 13.22, |
|
"learning_rate": 6.230366492146597e-07, |
|
"loss": 1.3277, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 13.24, |
|
"learning_rate": 6.178010471204189e-07, |
|
"loss": 1.2698, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 13.25, |
|
"learning_rate": 6.125654450261781e-07, |
|
"loss": 1.3752, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 13.27, |
|
"learning_rate": 6.073298429319372e-07, |
|
"loss": 1.3124, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 13.28, |
|
"learning_rate": 6.020942408376964e-07, |
|
"loss": 1.381, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 13.3, |
|
"learning_rate": 5.968586387434556e-07, |
|
"loss": 1.1912, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 13.31, |
|
"learning_rate": 5.916230366492147e-07, |
|
"loss": 1.1615, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 13.33, |
|
"learning_rate": 5.863874345549739e-07, |
|
"loss": 1.2712, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 13.34, |
|
"learning_rate": 5.811518324607331e-07, |
|
"loss": 1.2132, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 13.36, |
|
"learning_rate": 5.759162303664922e-07, |
|
"loss": 1.1343, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 13.37, |
|
"learning_rate": 5.706806282722513e-07, |
|
"loss": 1.1905, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 13.39, |
|
"learning_rate": 5.654450261780105e-07, |
|
"loss": 1.1824, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 13.4, |
|
"learning_rate": 5.602094240837697e-07, |
|
"loss": 1.2526, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 13.42, |
|
"learning_rate": 5.549738219895289e-07, |
|
"loss": 1.2113, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 13.43, |
|
"learning_rate": 5.49738219895288e-07, |
|
"loss": 1.2196, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 13.45, |
|
"learning_rate": 5.445026178010471e-07, |
|
"loss": 1.3181, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 13.46, |
|
"learning_rate": 5.392670157068063e-07, |
|
"loss": 1.4086, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 13.48, |
|
"learning_rate": 5.340314136125656e-07, |
|
"loss": 1.2798, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 13.49, |
|
"learning_rate": 5.287958115183246e-07, |
|
"loss": 1.2169, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 13.51, |
|
"learning_rate": 5.235602094240838e-07, |
|
"loss": 1.3561, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 13.52, |
|
"learning_rate": 5.18324607329843e-07, |
|
"loss": 1.3644, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 13.54, |
|
"learning_rate": 5.130890052356021e-07, |
|
"loss": 1.2416, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 13.55, |
|
"learning_rate": 5.078534031413613e-07, |
|
"loss": 1.231, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 13.57, |
|
"learning_rate": 5.026178010471205e-07, |
|
"loss": 1.2572, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 13.58, |
|
"learning_rate": 4.973821989528796e-07, |
|
"loss": 1.4077, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 13.6, |
|
"learning_rate": 4.921465968586387e-07, |
|
"loss": 1.2956, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 13.61, |
|
"learning_rate": 4.869109947643979e-07, |
|
"loss": 1.2224, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 13.63, |
|
"learning_rate": 4.816753926701571e-07, |
|
"loss": 1.5222, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 13.64, |
|
"learning_rate": 4.7643979057591626e-07, |
|
"loss": 1.4524, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 13.66, |
|
"learning_rate": 4.7120418848167543e-07, |
|
"loss": 1.4276, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 13.67, |
|
"learning_rate": 4.659685863874346e-07, |
|
"loss": 1.2812, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 13.69, |
|
"learning_rate": 4.6073298429319373e-07, |
|
"loss": 1.2272, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 13.7, |
|
"learning_rate": 4.554973821989529e-07, |
|
"loss": 1.3103, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 13.72, |
|
"learning_rate": 4.5026178010471213e-07, |
|
"loss": 1.2374, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 13.73, |
|
"learning_rate": 4.4502617801047125e-07, |
|
"loss": 1.1095, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 13.75, |
|
"learning_rate": 4.397905759162304e-07, |
|
"loss": 1.2857, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 13.76, |
|
"learning_rate": 4.345549738219896e-07, |
|
"loss": 1.1178, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 13.78, |
|
"learning_rate": 4.293193717277487e-07, |
|
"loss": 1.1583, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 13.79, |
|
"learning_rate": 4.240837696335079e-07, |
|
"loss": 1.3165, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 13.81, |
|
"learning_rate": 4.1884816753926706e-07, |
|
"loss": 1.2868, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 13.82, |
|
"learning_rate": 4.136125654450262e-07, |
|
"loss": 1.307, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 13.84, |
|
"learning_rate": 4.0837696335078536e-07, |
|
"loss": 1.2476, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 13.85, |
|
"learning_rate": 4.031413612565446e-07, |
|
"loss": 1.2712, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 13.87, |
|
"learning_rate": 3.9790575916230365e-07, |
|
"loss": 1.3732, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 13.88, |
|
"learning_rate": 3.926701570680629e-07, |
|
"loss": 1.2432, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 13.9, |
|
"learning_rate": 3.8743455497382205e-07, |
|
"loss": 1.2804, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 13.91, |
|
"learning_rate": 3.8219895287958117e-07, |
|
"loss": 1.2338, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 13.93, |
|
"learning_rate": 3.7696335078534035e-07, |
|
"loss": 1.2115, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 13.94, |
|
"learning_rate": 3.717277486910995e-07, |
|
"loss": 1.3014, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 13.96, |
|
"learning_rate": 3.6649214659685864e-07, |
|
"loss": 1.2881, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 13.97, |
|
"learning_rate": 3.612565445026178e-07, |
|
"loss": 1.1988, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 13.99, |
|
"learning_rate": 3.56020942408377e-07, |
|
"loss": 1.2895, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"learning_rate": 3.507853403141361e-07, |
|
"loss": 1.148, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"eval_accuracy": 0.746268656716418, |
|
"eval_loss": 1.180509090423584, |
|
"eval_roc_auc": 0.9466529853794006, |
|
"eval_runtime": 114.9939, |
|
"eval_samples_per_second": 0.583, |
|
"eval_steps_per_second": 0.583, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 14.01, |
|
"learning_rate": 3.455497382198953e-07, |
|
"loss": 1.2813, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 14.03, |
|
"learning_rate": 3.403141361256545e-07, |
|
"loss": 1.2313, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 14.04, |
|
"learning_rate": 3.350785340314137e-07, |
|
"loss": 1.3903, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 14.06, |
|
"learning_rate": 3.298429319371728e-07, |
|
"loss": 1.1912, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 14.07, |
|
"learning_rate": 3.24607329842932e-07, |
|
"loss": 1.3479, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 14.09, |
|
"learning_rate": 3.1937172774869115e-07, |
|
"loss": 1.1825, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 14.1, |
|
"learning_rate": 3.1413612565445027e-07, |
|
"loss": 1.2482, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 14.12, |
|
"learning_rate": 3.0890052356020944e-07, |
|
"loss": 1.3458, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 14.13, |
|
"learning_rate": 3.036649214659686e-07, |
|
"loss": 1.2576, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 14.15, |
|
"learning_rate": 2.984293193717278e-07, |
|
"loss": 1.2473, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 14.16, |
|
"learning_rate": 2.9319371727748697e-07, |
|
"loss": 1.312, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 14.18, |
|
"learning_rate": 2.879581151832461e-07, |
|
"loss": 1.2814, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 14.19, |
|
"learning_rate": 2.8272251308900526e-07, |
|
"loss": 1.2448, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 14.21, |
|
"learning_rate": 2.7748691099476443e-07, |
|
"loss": 1.3163, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 14.22, |
|
"learning_rate": 2.7225130890052355e-07, |
|
"loss": 1.1912, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 14.24, |
|
"learning_rate": 2.670157068062828e-07, |
|
"loss": 1.1767, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 14.25, |
|
"learning_rate": 2.617801047120419e-07, |
|
"loss": 1.2902, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 14.27, |
|
"learning_rate": 2.565445026178011e-07, |
|
"loss": 1.5993, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 14.28, |
|
"learning_rate": 2.5130890052356025e-07, |
|
"loss": 1.3344, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 14.3, |
|
"learning_rate": 2.4607329842931937e-07, |
|
"loss": 1.2891, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 14.31, |
|
"learning_rate": 2.4083769633507854e-07, |
|
"loss": 1.2419, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 14.33, |
|
"learning_rate": 2.3560209424083772e-07, |
|
"loss": 1.1958, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 14.34, |
|
"learning_rate": 2.3036649214659686e-07, |
|
"loss": 1.3736, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 14.36, |
|
"learning_rate": 2.2513089005235606e-07, |
|
"loss": 1.1602, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 14.37, |
|
"learning_rate": 2.198952879581152e-07, |
|
"loss": 1.2981, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 14.39, |
|
"learning_rate": 2.1465968586387436e-07, |
|
"loss": 1.2263, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 14.4, |
|
"learning_rate": 2.0942408376963353e-07, |
|
"loss": 1.3522, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 14.42, |
|
"learning_rate": 2.0418848167539268e-07, |
|
"loss": 1.2269, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 14.43, |
|
"learning_rate": 1.9895287958115183e-07, |
|
"loss": 1.3164, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 14.45, |
|
"learning_rate": 1.9371727748691103e-07, |
|
"loss": 1.2863, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 14.46, |
|
"learning_rate": 1.8848167539267017e-07, |
|
"loss": 1.1581, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 14.48, |
|
"learning_rate": 1.8324607329842932e-07, |
|
"loss": 1.2892, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 14.49, |
|
"learning_rate": 1.780104712041885e-07, |
|
"loss": 1.2545, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 14.51, |
|
"learning_rate": 1.7277486910994764e-07, |
|
"loss": 1.2766, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 14.52, |
|
"learning_rate": 1.6753926701570684e-07, |
|
"loss": 1.2359, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 14.54, |
|
"learning_rate": 1.62303664921466e-07, |
|
"loss": 1.1912, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 14.55, |
|
"learning_rate": 1.5706806282722514e-07, |
|
"loss": 1.2958, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 14.57, |
|
"learning_rate": 1.518324607329843e-07, |
|
"loss": 1.1492, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 14.58, |
|
"learning_rate": 1.4659685863874348e-07, |
|
"loss": 1.3414, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 14.6, |
|
"learning_rate": 1.4136125654450263e-07, |
|
"loss": 1.4046, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 14.61, |
|
"learning_rate": 1.3612565445026178e-07, |
|
"loss": 1.1398, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 14.63, |
|
"learning_rate": 1.3089005235602095e-07, |
|
"loss": 1.115, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 14.64, |
|
"learning_rate": 1.2565445026178012e-07, |
|
"loss": 1.3315, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 14.66, |
|
"learning_rate": 1.2041884816753927e-07, |
|
"loss": 1.419, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 14.67, |
|
"learning_rate": 1.1518324607329843e-07, |
|
"loss": 1.2696, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 14.69, |
|
"learning_rate": 1.099476439790576e-07, |
|
"loss": 1.3029, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 14.7, |
|
"learning_rate": 1.0471204188481677e-07, |
|
"loss": 1.387, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 14.72, |
|
"learning_rate": 9.947643979057591e-08, |
|
"loss": 1.2613, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 14.73, |
|
"learning_rate": 9.424083769633509e-08, |
|
"loss": 1.1824, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 14.75, |
|
"learning_rate": 8.900523560209425e-08, |
|
"loss": 1.25, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 14.76, |
|
"learning_rate": 8.376963350785342e-08, |
|
"loss": 1.2513, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 14.78, |
|
"learning_rate": 7.853403141361257e-08, |
|
"loss": 1.3408, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 14.79, |
|
"learning_rate": 7.329842931937174e-08, |
|
"loss": 1.261, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 14.81, |
|
"learning_rate": 6.806282722513089e-08, |
|
"loss": 1.2401, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 14.82, |
|
"learning_rate": 6.282722513089006e-08, |
|
"loss": 1.1627, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 14.84, |
|
"learning_rate": 5.7591623036649216e-08, |
|
"loss": 1.1001, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 14.85, |
|
"learning_rate": 5.235602094240838e-08, |
|
"loss": 1.4284, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 14.87, |
|
"learning_rate": 4.712041884816754e-08, |
|
"loss": 1.2977, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 14.88, |
|
"learning_rate": 4.188481675392671e-08, |
|
"loss": 1.4529, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 14.9, |
|
"learning_rate": 3.664921465968587e-08, |
|
"loss": 1.3078, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 14.91, |
|
"learning_rate": 3.141361256544503e-08, |
|
"loss": 1.365, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 14.93, |
|
"learning_rate": 2.617801047120419e-08, |
|
"loss": 1.2315, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 14.94, |
|
"learning_rate": 2.0942408376963355e-08, |
|
"loss": 1.1403, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 14.96, |
|
"learning_rate": 1.5706806282722516e-08, |
|
"loss": 1.2835, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 14.97, |
|
"learning_rate": 1.0471204188481678e-08, |
|
"loss": 1.2234, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 14.99, |
|
"learning_rate": 5.235602094240839e-09, |
|
"loss": 1.4951, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"learning_rate": 0.0, |
|
"loss": 1.3951, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"eval_accuracy": 0.7313432835820896, |
|
"eval_loss": 1.180140495300293, |
|
"eval_roc_auc": 0.9414392219829161, |
|
"eval_runtime": 115.5119, |
|
"eval_samples_per_second": 0.58, |
|
"eval_steps_per_second": 0.58, |
|
"step": 1005 |
|
} |
|
], |
|
"max_steps": 1005, |
|
"num_train_epochs": 15, |
|
"total_flos": 3.6216920886809103e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|