|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0145278450363193, |
|
"eval_steps": 500, |
|
"global_step": 210, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 1.6073, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 1.701, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.4e-05, |
|
"loss": 1.6232, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 1.705, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4e-05, |
|
"loss": 1.6552, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.8e-05, |
|
"loss": 1.7611, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 5.6e-05, |
|
"loss": 1.6008, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 6.400000000000001e-05, |
|
"loss": 1.6468, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 7.2e-05, |
|
"loss": 1.6148, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 8e-05, |
|
"loss": 1.6125, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.999779207981935e-05, |
|
"loss": 1.6532, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 7.999116856302298e-05, |
|
"loss": 1.528, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 7.998013018082072e-05, |
|
"loss": 1.5758, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 7.996467815180588e-05, |
|
"loss": 1.6549, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 7.994481418182082e-05, |
|
"loss": 1.3725, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 7.992054046376854e-05, |
|
"loss": 1.6324, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 7.989185967737066e-05, |
|
"loss": 1.5984, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 7.985877498887149e-05, |
|
"loss": 1.7095, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 7.982129005068865e-05, |
|
"loss": 1.4843, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 7.977940900100967e-05, |
|
"loss": 1.6701, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 7.973313646333532e-05, |
|
"loss": 1.6216, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.968247754596908e-05, |
|
"loss": 1.6616, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.962743784145323e-05, |
|
"loss": 1.5737, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 7.956802342595152e-05, |
|
"loss": 1.5716, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 7.950424085857827e-05, |
|
"loss": 1.4727, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 7.943609718067437e-05, |
|
"loss": 1.676, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 7.936359991502993e-05, |
|
"loss": 1.5953, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 7.92867570650537e-05, |
|
"loss": 1.5233, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 7.920557711388967e-05, |
|
"loss": 1.7087, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 7.912006902348045e-05, |
|
"loss": 1.6314, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 7.903024223357797e-05, |
|
"loss": 1.5763, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 7.893610666070134e-05, |
|
"loss": 1.7077, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 7.883767269704209e-05, |
|
"loss": 1.5335, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 7.873495120931697e-05, |
|
"loss": 1.6053, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 7.86279535375683e-05, |
|
"loss": 1.3602, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 7.851669149391198e-05, |
|
"loss": 1.3493, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 7.84011773612336e-05, |
|
"loss": 1.5459, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 7.828142389183239e-05, |
|
"loss": 1.5918, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 7.815744430601344e-05, |
|
"loss": 1.4649, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 7.802925229062823e-05, |
|
"loss": 1.5555, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 7.789686199756365e-05, |
|
"loss": 1.6162, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 7.776028804217968e-05, |
|
"loss": 1.5508, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 7.761954550169593e-05, |
|
"loss": 1.7089, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.74746499135272e-05, |
|
"loss": 1.6471, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.732561727356811e-05, |
|
"loss": 1.5361, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.717246403442735e-05, |
|
"loss": 1.4333, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 7.701520710361129e-05, |
|
"loss": 1.5406, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 7.685386384165748e-05, |
|
"loss": 1.6056, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.668845206021812e-05, |
|
"loss": 1.5945, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 7.651899002009375e-05, |
|
"loss": 1.6727, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 7.634549642921725e-05, |
|
"loss": 1.6352, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 7.616799044058867e-05, |
|
"loss": 1.5127, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 7.598649165016073e-05, |
|
"loss": 1.5696, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 7.58010200946755e-05, |
|
"loss": 1.5987, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 7.561159624945257e-05, |
|
"loss": 1.5007, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 7.541824102612839e-05, |
|
"loss": 1.697, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 7.5220975770348e-05, |
|
"loss": 1.5062, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 7.501982225940833e-05, |
|
"loss": 1.6713, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 7.48148026998542e-05, |
|
"loss": 1.5913, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.460593972502674e-05, |
|
"loss": 1.5841, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.439325639256483e-05, |
|
"loss": 1.5459, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.417677618185955e-05, |
|
"loss": 1.5911, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.39565229914622e-05, |
|
"loss": 1.5862, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 7.373252113644596e-05, |
|
"loss": 1.6381, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 7.350479534572166e-05, |
|
"loss": 1.5476, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 7.327337075930775e-05, |
|
"loss": 1.579, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 7.303827292555495e-05, |
|
"loss": 1.6105, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 7.279952779832584e-05, |
|
"loss": 1.5728, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 7.255716173412966e-05, |
|
"loss": 1.6509, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 7.23112014892126e-05, |
|
"loss": 1.5586, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 7.20616742166041e-05, |
|
"loss": 1.3865, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 7.180860746311917e-05, |
|
"loss": 1.5214, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 7.155202916631743e-05, |
|
"loss": 1.5699, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 7.129196765141886e-05, |
|
"loss": 1.388, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 7.10284516281768e-05, |
|
"loss": 1.5735, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 7.076151018770854e-05, |
|
"loss": 1.6245, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 7.049117279928374e-05, |
|
"loss": 1.6489, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 7.021746930707117e-05, |
|
"loss": 1.5712, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 6.994042992684406e-05, |
|
"loss": 1.6359, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 6.966008524264429e-05, |
|
"loss": 1.6452, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 6.937646620340618e-05, |
|
"loss": 1.5655, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 6.908960411953973e-05, |
|
"loss": 1.5791, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 6.879953065947416e-05, |
|
"loss": 1.685, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 6.850627784616178e-05, |
|
"loss": 1.5755, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 6.82098780535428e-05, |
|
"loss": 1.5878, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 6.791036400297142e-05, |
|
"loss": 1.6273, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.760776875960347e-05, |
|
"loss": 1.625, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.730212572874618e-05, |
|
"loss": 1.6891, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 6.699346865217031e-05, |
|
"loss": 1.5593, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 6.668183160438531e-05, |
|
"loss": 1.6518, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.636724898887751e-05, |
|
"loss": 1.6314, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.604975553431219e-05, |
|
"loss": 1.6267, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 6.572938629069959e-05, |
|
"loss": 1.5465, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 6.540617662552565e-05, |
|
"loss": 1.6302, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 6.508016221984747e-05, |
|
"loss": 1.5039, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 6.475137906435435e-05, |
|
"loss": 1.6879, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 6.441986345539446e-05, |
|
"loss": 1.5822, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 6.408565199096798e-05, |
|
"loss": 1.5612, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.374878156668676e-05, |
|
"loss": 1.6022, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 6.340928937170118e-05, |
|
"loss": 1.6292, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 6.30672128845947e-05, |
|
"loss": 1.6783, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.272258986924624e-05, |
|
"loss": 1.5995, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 6.237545837066133e-05, |
|
"loss": 1.5811, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 6.202585671077204e-05, |
|
"loss": 1.5029, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 6.167382348420637e-05, |
|
"loss": 1.5067, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 6.131939755402755e-05, |
|
"loss": 1.4932, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 6.09626180474438e-05, |
|
"loss": 1.6286, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 6.060352435148874e-05, |
|
"loss": 1.5934, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 6.024215610867327e-05, |
|
"loss": 1.5562, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 5.9878553212609184e-05, |
|
"loss": 1.5332, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 5.95127558036051e-05, |
|
"loss": 1.604, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 5.9144804264235066e-05, |
|
"loss": 1.4745, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 5.8774739214880554e-05, |
|
"loss": 1.4254, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 5.840260150924609e-05, |
|
"loss": 1.6039, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 5.802843222984919e-05, |
|
"loss": 1.6403, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 5.765227268348501e-05, |
|
"loss": 1.6617, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 5.727416439666622e-05, |
|
"loss": 1.5826, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 5.689414911103867e-05, |
|
"loss": 1.4698, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 5.651226877877326e-05, |
|
"loss": 1.3276, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 5.612856555793459e-05, |
|
"loss": 1.4114, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 5.574308180782693e-05, |
|
"loss": 1.583, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 5.5355860084317787e-05, |
|
"loss": 1.684, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 5.496694313514009e-05, |
|
"loss": 1.6231, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 5.457637389517285e-05, |
|
"loss": 1.5874, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 5.4184195481701425e-05, |
|
"loss": 1.4062, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 5.3790451189657486e-05, |
|
"loss": 1.3867, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 5.339518448683945e-05, |
|
"loss": 1.5874, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 5.2998439009113814e-05, |
|
"loss": 1.6856, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 5.260025855559792e-05, |
|
"loss": 1.5057, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 5.2200687083824706e-05, |
|
"loss": 1.5815, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 5.179976870488999e-05, |
|
"loss": 1.4807, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 5.1397547678582745e-05, |
|
"loss": 1.542, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 5.099406840849902e-05, |
|
"loss": 1.5297, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 5.058937543713999e-05, |
|
"loss": 1.461, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 5.018351344099453e-05, |
|
"loss": 1.5576, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 4.9776527225607274e-05, |
|
"loss": 1.4301, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 4.93684617206321e-05, |
|
"loss": 1.6082, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 4.89593619748722e-05, |
|
"loss": 1.6474, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 4.8549273151306795e-05, |
|
"loss": 1.5037, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 4.8138240522105365e-05, |
|
"loss": 1.5476, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.7726309463629733e-05, |
|
"loss": 1.5466, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.731352545142478e-05, |
|
"loss": 1.6127, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.689993405519802e-05, |
|
"loss": 1.6105, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.648558093378899e-05, |
|
"loss": 1.492, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.607051183012862e-05, |
|
"loss": 1.5909, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.5654772566189415e-05, |
|
"loss": 1.5474, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.5238409037926905e-05, |
|
"loss": 1.6371, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.4821467210212924e-05, |
|
"loss": 1.4939, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 4.4403993111761265e-05, |
|
"loss": 1.5418, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 4.398603283004626e-05, |
|
"loss": 1.5505, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 4.356763250621496e-05, |
|
"loss": 1.4928, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 4.314883832999326e-05, |
|
"loss": 1.4589, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 4.272969653458685e-05, |
|
"loss": 1.5522, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 4.231025339157714e-05, |
|
"loss": 1.5325, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 4.189055520581315e-05, |
|
"loss": 1.4912, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 4.147064831029959e-05, |
|
"loss": 1.5292, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 4.105057906108189e-05, |
|
"loss": 1.4766, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 4.063039383212866e-05, |
|
"loss": 1.6332, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.021013901021225e-05, |
|
"loss": 1.5232, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 3.978986098978777e-05, |
|
"loss": 1.5639, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 3.936960616787135e-05, |
|
"loss": 1.6731, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 3.8949420938918124e-05, |
|
"loss": 1.5748, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 3.852935168970042e-05, |
|
"loss": 1.4529, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 3.810944479418686e-05, |
|
"loss": 1.5499, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 3.768974660842287e-05, |
|
"loss": 1.4416, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 3.727030346541317e-05, |
|
"loss": 1.4857, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 3.685116167000675e-05, |
|
"loss": 1.6612, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 3.6432367493785056e-05, |
|
"loss": 1.6123, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 3.601396716995375e-05, |
|
"loss": 1.4952, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 3.559600688823875e-05, |
|
"loss": 1.5899, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 3.517853278978708e-05, |
|
"loss": 1.5784, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 3.4761590962073115e-05, |
|
"loss": 1.6251, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 3.434522743381061e-05, |
|
"loss": 1.6478, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 3.39294881698714e-05, |
|
"loss": 1.5043, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 3.3514419066211025e-05, |
|
"loss": 1.6968, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 3.310006594480199e-05, |
|
"loss": 1.395, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 3.268647454857524e-05, |
|
"loss": 1.6285, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 3.227369053637028e-05, |
|
"loss": 1.4827, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 3.1861759477894656e-05, |
|
"loss": 1.3457, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 3.145072684869322e-05, |
|
"loss": 1.4998, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 3.104063802512782e-05, |
|
"loss": 1.536, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 3.063153827936792e-05, |
|
"loss": 1.5633, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 3.0223472774392753e-05, |
|
"loss": 1.7605, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.9816486559005482e-05, |
|
"loss": 1.5604, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.9410624562860026e-05, |
|
"loss": 1.5909, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.9005931591500974e-05, |
|
"loss": 1.4833, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.860245232141726e-05, |
|
"loss": 1.551, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 2.8200231295110012e-05, |
|
"loss": 1.5279, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 2.7799312916175294e-05, |
|
"loss": 1.5564, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 2.7399741444402087e-05, |
|
"loss": 1.4859, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 2.7001560990886196e-05, |
|
"loss": 1.5372, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 2.6604815513160556e-05, |
|
"loss": 1.4932, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 2.6209548810342517e-05, |
|
"loss": 1.5412, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 2.5815804518298575e-05, |
|
"loss": 1.4758, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 2.542362610482715e-05, |
|
"loss": 1.715, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 2.503305686485991e-05, |
|
"loss": 1.5454, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 2.464413991568222e-05, |
|
"loss": 1.43, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 2.4256918192173088e-05, |
|
"loss": 1.5798, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 2.3871434442065414e-05, |
|
"loss": 1.4315, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 2.3487731221226754e-05, |
|
"loss": 1.6566, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 2.3105850888961348e-05, |
|
"loss": 1.5944, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 2.272583560333379e-05, |
|
"loss": 1.5169, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 2.2347727316515e-05, |
|
"loss": 1.6553, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 2.1971567770150814e-05, |
|
"loss": 1.6193, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 2.1597398490753917e-05, |
|
"loss": 1.6137, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 2.1225260785119456e-05, |
|
"loss": 1.459, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 2.0855195735764947e-05, |
|
"loss": 1.6145, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 2.0487244196394912e-05, |
|
"loss": 1.607, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 2.0121446787390822e-05, |
|
"loss": 1.5978, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.9757843891326736e-05, |
|
"loss": 1.4527, |
|
"step": 210 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 309, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"total_flos": 1.4076450105049743e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|