|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.9972316210396803, |
|
"global_step": 1218, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 5.405405405405406e-07, |
|
"loss": 1.9685, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.0810810810810812e-06, |
|
"loss": 2.0077, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.6216216216216219e-06, |
|
"loss": 1.7185, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.1621621621621623e-06, |
|
"loss": 1.5678, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.702702702702703e-06, |
|
"loss": 1.6993, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.2432432432432437e-06, |
|
"loss": 1.7094, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7837837837837844e-06, |
|
"loss": 1.6745, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.324324324324325e-06, |
|
"loss": 1.7161, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.864864864864866e-06, |
|
"loss": 1.5671, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 5.405405405405406e-06, |
|
"loss": 1.5681, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5.945945945945947e-06, |
|
"loss": 1.6126, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 6.486486486486487e-06, |
|
"loss": 1.7084, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 7.027027027027028e-06, |
|
"loss": 1.5979, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 7.567567567567569e-06, |
|
"loss": 1.5169, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.108108108108109e-06, |
|
"loss": 1.529, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.64864864864865e-06, |
|
"loss": 1.5377, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.189189189189191e-06, |
|
"loss": 1.6252, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.729729729729732e-06, |
|
"loss": 1.5163, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.027027027027027e-05, |
|
"loss": 1.535, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.0810810810810812e-05, |
|
"loss": 1.5056, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.1351351351351352e-05, |
|
"loss": 1.5673, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.1891891891891894e-05, |
|
"loss": 1.5766, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.2432432432432433e-05, |
|
"loss": 1.5272, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.2972972972972975e-05, |
|
"loss": 1.551, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.3513513513513515e-05, |
|
"loss": 1.6771, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.4054054054054055e-05, |
|
"loss": 1.5944, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.4594594594594596e-05, |
|
"loss": 1.6029, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.5135135135135138e-05, |
|
"loss": 1.5741, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.5675675675675676e-05, |
|
"loss": 1.5831, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.6216216216216218e-05, |
|
"loss": 1.5362, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.6756756756756757e-05, |
|
"loss": 1.5485, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.72972972972973e-05, |
|
"loss": 1.6209, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.783783783783784e-05, |
|
"loss": 1.505, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.8378378378378383e-05, |
|
"loss": 1.5399, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.891891891891892e-05, |
|
"loss": 1.497, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9459459459459463e-05, |
|
"loss": 1.6058, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2e-05, |
|
"loss": 1.5012, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.999996461903301e-05, |
|
"loss": 1.5813, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9999858476382388e-05, |
|
"loss": 1.5735, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9999681572799226e-05, |
|
"loss": 1.4149, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9999433909535333e-05, |
|
"loss": 1.542, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9999115488343213e-05, |
|
"loss": 1.5271, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.999872631147608e-05, |
|
"loss": 1.4501, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.999826638168783e-05, |
|
"loss": 1.4914, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9997735702233006e-05, |
|
"loss": 1.4565, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.99971342768668e-05, |
|
"loss": 1.4538, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.999646210984502e-05, |
|
"loss": 1.552, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.999571920592405e-05, |
|
"loss": 1.5556, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9994905570360817e-05, |
|
"loss": 1.5317, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.999402120891276e-05, |
|
"loss": 1.4997, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.99930661278378e-05, |
|
"loss": 1.4592, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9992040333894273e-05, |
|
"loss": 1.4862, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9990943834340893e-05, |
|
"loss": 1.6162, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9989776636936705e-05, |
|
"loss": 1.5054, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9988538749941024e-05, |
|
"loss": 1.5535, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9987230182113374e-05, |
|
"loss": 1.5145, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.998585094271344e-05, |
|
"loss": 1.4675, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.998440104150098e-05, |
|
"loss": 1.5726, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.998288048873578e-05, |
|
"loss": 1.5489, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9981289295177566e-05, |
|
"loss": 1.5856, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9979627472085927e-05, |
|
"loss": 1.4966, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.997789503122025e-05, |
|
"loss": 1.5644, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9976091984839616e-05, |
|
"loss": 1.49, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9974218345702733e-05, |
|
"loss": 1.561, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9972274127067838e-05, |
|
"loss": 1.5704, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.997025934269259e-05, |
|
"loss": 1.5434, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9968174006833996e-05, |
|
"loss": 1.488, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9966018134248296e-05, |
|
"loss": 1.5842, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9963791740190863e-05, |
|
"loss": 1.5373, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.996149484041609e-05, |
|
"loss": 1.436, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9959127451177287e-05, |
|
"loss": 1.5963, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9956689589226555e-05, |
|
"loss": 1.5238, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9954181271814673e-05, |
|
"loss": 1.4124, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9951602516690988e-05, |
|
"loss": 1.4398, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9948953342103268e-05, |
|
"loss": 1.4836, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.994623376679758e-05, |
|
"loss": 1.5311, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9943443810018174e-05, |
|
"loss": 1.4614, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9940583491507314e-05, |
|
"loss": 1.5746, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.993765283150517e-05, |
|
"loss": 1.4914, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9934651850749663e-05, |
|
"loss": 1.5658, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9931580570476306e-05, |
|
"loss": 1.5893, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9928439012418076e-05, |
|
"loss": 1.5762, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9925227198805247e-05, |
|
"loss": 1.4619, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9921945152365235e-05, |
|
"loss": 1.6507, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9918592896322432e-05, |
|
"loss": 1.5032, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9915170454398045e-05, |
|
"loss": 1.5206, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9911677850809943e-05, |
|
"loss": 1.5568, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9908115110272463e-05, |
|
"loss": 1.5528, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9904482257996244e-05, |
|
"loss": 1.5943, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.990077931968805e-05, |
|
"loss": 1.5335, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9897006321550592e-05, |
|
"loss": 1.5123, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9893163290282335e-05, |
|
"loss": 1.442, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9889250253077306e-05, |
|
"loss": 1.5288, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9885267237624923e-05, |
|
"loss": 1.5926, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.988121427210976e-05, |
|
"loss": 1.6138, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.98770913852114e-05, |
|
"loss": 1.4996, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9872898606104175e-05, |
|
"loss": 1.5081, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9868635964457007e-05, |
|
"loss": 1.5397, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.986430349043317e-05, |
|
"loss": 1.5946, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9859901214690094e-05, |
|
"loss": 1.5384, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9855429168379127e-05, |
|
"loss": 1.5845, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.985088738314534e-05, |
|
"loss": 1.4658, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9846275891127275e-05, |
|
"loss": 1.5455, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9841594724956746e-05, |
|
"loss": 1.5214, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9836843917758593e-05, |
|
"loss": 1.4254, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.983202350315044e-05, |
|
"loss": 1.5212, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.982713351524248e-05, |
|
"loss": 1.5146, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.982217398863721e-05, |
|
"loss": 1.4277, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.98171449584292e-05, |
|
"loss": 1.425, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9812046460204837e-05, |
|
"loss": 1.5144, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9806878530042083e-05, |
|
"loss": 1.6131, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9801641204510216e-05, |
|
"loss": 1.5409, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9796334520669555e-05, |
|
"loss": 1.4707, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9790958516071228e-05, |
|
"loss": 1.537, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.978551322875688e-05, |
|
"loss": 1.4324, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.977999869725842e-05, |
|
"loss": 1.4989, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.977441496059774e-05, |
|
"loss": 1.4105, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9768762058286433e-05, |
|
"loss": 1.4307, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.976304003032554e-05, |
|
"loss": 1.4543, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9757248917205228e-05, |
|
"loss": 1.4607, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.975138875990454e-05, |
|
"loss": 1.6315, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.974545959989108e-05, |
|
"loss": 1.4523, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9739461479120727e-05, |
|
"loss": 1.543, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.973339444003735e-05, |
|
"loss": 1.4478, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9727258525572487e-05, |
|
"loss": 1.4982, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9721053779145057e-05, |
|
"loss": 1.487, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9714780244661044e-05, |
|
"loss": 1.5652, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9708437966513196e-05, |
|
"loss": 1.4799, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9702026989580694e-05, |
|
"loss": 1.5135, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.969554735922885e-05, |
|
"loss": 1.4226, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.968899912130879e-05, |
|
"loss": 1.5302, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9682382322157103e-05, |
|
"loss": 1.4938, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9675697008595545e-05, |
|
"loss": 1.3944, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9668943227930686e-05, |
|
"loss": 1.5336, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.966212102795358e-05, |
|
"loss": 1.4832, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.965523045693944e-05, |
|
"loss": 1.4443, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.964827156364728e-05, |
|
"loss": 1.5658, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.964124439731957e-05, |
|
"loss": 1.4998, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9634149007681894e-05, |
|
"loss": 1.5682, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.962698544494261e-05, |
|
"loss": 1.4023, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9619753759792466e-05, |
|
"loss": 1.4763, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.961245400340427e-05, |
|
"loss": 1.4863, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9605086227432512e-05, |
|
"loss": 1.5116, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9597650484012997e-05, |
|
"loss": 1.5353, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9590146825762476e-05, |
|
"loss": 1.4939, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9582575305778297e-05, |
|
"loss": 1.4486, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9574935977637994e-05, |
|
"loss": 1.454, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9567228895398936e-05, |
|
"loss": 1.5772, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.955945411359792e-05, |
|
"loss": 1.5268, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9551611687250808e-05, |
|
"loss": 1.4294, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9543701671852127e-05, |
|
"loss": 1.4364, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9535724123374674e-05, |
|
"loss": 1.4264, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.952767909826913e-05, |
|
"loss": 1.5548, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.951956665346364e-05, |
|
"loss": 1.4417, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.951138684636344e-05, |
|
"loss": 1.4987, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9503139734850426e-05, |
|
"loss": 1.5214, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9494825377282746e-05, |
|
"loss": 1.4836, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9486443832494414e-05, |
|
"loss": 1.4791, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9477995159794854e-05, |
|
"loss": 1.5139, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9469479418968506e-05, |
|
"loss": 1.4858, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9460896670274408e-05, |
|
"loss": 1.3027, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9452246974445743e-05, |
|
"loss": 1.5751, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9443530392689434e-05, |
|
"loss": 1.4963, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.94347469866857e-05, |
|
"loss": 1.5052, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9425896818587615e-05, |
|
"loss": 1.4721, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.941697995102069e-05, |
|
"loss": 1.5447, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9407996447082394e-05, |
|
"loss": 1.5826, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.939894637034174e-05, |
|
"loss": 1.538, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9389829784838833e-05, |
|
"loss": 1.4421, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.938064675508438e-05, |
|
"loss": 1.3979, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9371397346059286e-05, |
|
"loss": 1.5234, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.936208162321415e-05, |
|
"loss": 1.4172, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9352699652468835e-05, |
|
"loss": 1.464, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9343251500211977e-05, |
|
"loss": 1.4958, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.933373723330053e-05, |
|
"loss": 1.4263, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9324156919059286e-05, |
|
"loss": 1.5034, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.93145106252804e-05, |
|
"loss": 1.5243, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9304798420222918e-05, |
|
"loss": 1.4999, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9295020372612276e-05, |
|
"loss": 1.536, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9285176551639826e-05, |
|
"loss": 1.5425, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9275267026962358e-05, |
|
"loss": 1.6061, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9265291868701584e-05, |
|
"loss": 1.4502, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9255251147443646e-05, |
|
"loss": 1.567, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.924514493423864e-05, |
|
"loss": 1.4235, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9234973300600074e-05, |
|
"loss": 1.4931, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.92247363185044e-05, |
|
"loss": 1.503, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9214434060390484e-05, |
|
"loss": 1.5545, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9204066599159094e-05, |
|
"loss": 1.4933, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9193634008172396e-05, |
|
"loss": 1.5112, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9183136361253417e-05, |
|
"loss": 1.4536, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.917257373268554e-05, |
|
"loss": 1.4638, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.916194619721196e-05, |
|
"loss": 1.4806, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.915125383003518e-05, |
|
"loss": 1.4791, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.914049670681646e-05, |
|
"loss": 1.5478, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.912967490367528e-05, |
|
"loss": 1.4017, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9118788497188815e-05, |
|
"loss": 1.3816, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9107837564391376e-05, |
|
"loss": 1.4484, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9096822182773887e-05, |
|
"loss": 1.3673, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9085742430283322e-05, |
|
"loss": 1.4965, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.907459838532215e-05, |
|
"loss": 1.5041, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9063390126747778e-05, |
|
"loss": 1.5221, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9052117733872025e-05, |
|
"loss": 1.4985, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.904078128646052e-05, |
|
"loss": 1.5436, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.902938086473215e-05, |
|
"loss": 1.5542, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.901791654935852e-05, |
|
"loss": 1.4878, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.9006388421463322e-05, |
|
"loss": 1.4366, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.899479656262183e-05, |
|
"loss": 1.5619, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.898314105486028e-05, |
|
"loss": 1.4125, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.8971421980655295e-05, |
|
"loss": 1.4888, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.8959639422933316e-05, |
|
"loss": 1.4248, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.894779346506999e-05, |
|
"loss": 1.4671, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.893588419088962e-05, |
|
"loss": 1.4576, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.892391168466452e-05, |
|
"loss": 1.5892, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.891187603111447e-05, |
|
"loss": 1.6097, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.8899777315406073e-05, |
|
"loss": 1.428, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.8887615623152188e-05, |
|
"loss": 1.4905, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.88753910404113e-05, |
|
"loss": 1.5599, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8863103653686917e-05, |
|
"loss": 1.3781, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8850753549926967e-05, |
|
"loss": 1.427, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8838340816523175e-05, |
|
"loss": 1.5079, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8825865541310438e-05, |
|
"loss": 1.5375, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8813327812566217e-05, |
|
"loss": 1.5392, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.880072771900991e-05, |
|
"loss": 1.4198, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.878806534980221e-05, |
|
"loss": 1.5668, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8775340794544497e-05, |
|
"loss": 1.4315, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.876255414327818e-05, |
|
"loss": 1.5012, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8749705486484074e-05, |
|
"loss": 1.5996, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8736794915081765e-05, |
|
"loss": 1.4609, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8723822520428954e-05, |
|
"loss": 1.4306, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8710788394320807e-05, |
|
"loss": 1.3959, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8697692628989327e-05, |
|
"loss": 1.4551, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.868453531710268e-05, |
|
"loss": 1.4901, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8671316551764552e-05, |
|
"loss": 1.4911, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.865803642651348e-05, |
|
"loss": 1.509, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8644695035322203e-05, |
|
"loss": 1.5201, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8631292472596978e-05, |
|
"loss": 1.5627, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8617828833176935e-05, |
|
"loss": 1.4702, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.860430421233339e-05, |
|
"loss": 1.4672, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.859071870576918e-05, |
|
"loss": 1.4023, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.857707240961797e-05, |
|
"loss": 1.4423, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8563365420443594e-05, |
|
"loss": 1.4925, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.854959783523936e-05, |
|
"loss": 1.4703, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.853576975142736e-05, |
|
"loss": 1.499, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.852188126685779e-05, |
|
"loss": 1.4785, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.8507932479808254e-05, |
|
"loss": 1.3313, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.8493923488983066e-05, |
|
"loss": 1.4063, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.847985439351256e-05, |
|
"loss": 1.5201, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.846572529295237e-05, |
|
"loss": 1.5639, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.845153628728274e-05, |
|
"loss": 1.556, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8437287476907828e-05, |
|
"loss": 1.5443, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.842297896265497e-05, |
|
"loss": 1.394, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8408610845773974e-05, |
|
"loss": 1.5062, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8394183227936418e-05, |
|
"loss": 1.4301, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8379696211234918e-05, |
|
"loss": 1.5224, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8365149898182403e-05, |
|
"loss": 1.4614, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8350544391711396e-05, |
|
"loss": 1.4176, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.833587979517329e-05, |
|
"loss": 1.4349, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8321156212337604e-05, |
|
"loss": 1.438, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.830637374739126e-05, |
|
"loss": 1.5304, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.829153250493783e-05, |
|
"loss": 1.3957, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.827663258999683e-05, |
|
"loss": 1.4895, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8261674108002925e-05, |
|
"loss": 1.4291, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.824665716480524e-05, |
|
"loss": 1.4376, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.823158186666656e-05, |
|
"loss": 1.4583, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.821644832026261e-05, |
|
"loss": 1.5522, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.82012566326813e-05, |
|
"loss": 1.5009, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8186006911421937e-05, |
|
"loss": 1.5268, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.817069926439451e-05, |
|
"loss": 1.5403, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8155333799918883e-05, |
|
"loss": 1.5086, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8139910626724058e-05, |
|
"loss": 1.4217, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8124429853947387e-05, |
|
"loss": 1.5121, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8108891591133812e-05, |
|
"loss": 1.4745, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.809329594823509e-05, |
|
"loss": 1.4478, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8077643035609006e-05, |
|
"loss": 1.5936, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.806193296401859e-05, |
|
"loss": 1.434, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.804616584463136e-05, |
|
"loss": 1.5192, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.803034178901849e-05, |
|
"loss": 1.4611, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.8014460909154058e-05, |
|
"loss": 1.4463, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.799852331741425e-05, |
|
"loss": 1.5376, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.7982529126576543e-05, |
|
"loss": 1.5143, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.7966478449818925e-05, |
|
"loss": 1.5489, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.7950371400719087e-05, |
|
"loss": 1.5324, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.7934208093253625e-05, |
|
"loss": 1.4892, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.7917988641797227e-05, |
|
"loss": 1.5164, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.7901713161121873e-05, |
|
"loss": 1.4834, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.7885381766396008e-05, |
|
"loss": 1.418, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.786899457318374e-05, |
|
"loss": 1.4374, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.7852551697444017e-05, |
|
"loss": 1.4375, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.783605325552981e-05, |
|
"loss": 1.4484, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.7819499364187282e-05, |
|
"loss": 1.507, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.780289014055497e-05, |
|
"loss": 1.3947, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.7786225702162955e-05, |
|
"loss": 1.4528, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.7769506166932026e-05, |
|
"loss": 1.5722, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.7752731653172847e-05, |
|
"loss": 1.4561, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7735902279585118e-05, |
|
"loss": 1.4411, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7719018165256745e-05, |
|
"loss": 1.4508, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7702079429662986e-05, |
|
"loss": 1.5283, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7685086192665605e-05, |
|
"loss": 1.2922, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7668038574512045e-05, |
|
"loss": 1.4048, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7650936695834536e-05, |
|
"loss": 1.4329, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.763378067764929e-05, |
|
"loss": 1.5082, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7616570641355602e-05, |
|
"loss": 1.3541, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.759930670873502e-05, |
|
"loss": 1.4544, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.758198900195047e-05, |
|
"loss": 1.557, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7564617643545395e-05, |
|
"loss": 1.4158, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7547192756442887e-05, |
|
"loss": 1.4912, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7529714463944815e-05, |
|
"loss": 1.4887, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.751218288973096e-05, |
|
"loss": 1.5082, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7494598157858127e-05, |
|
"loss": 1.4374, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7476960392759284e-05, |
|
"loss": 1.4352, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.7459269719242665e-05, |
|
"loss": 1.3782, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.74415262624909e-05, |
|
"loss": 1.5327, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.742373014806012e-05, |
|
"loss": 1.5196, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.740588150187907e-05, |
|
"loss": 1.5719, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.7387980450248222e-05, |
|
"loss": 1.5836, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.7370027119838884e-05, |
|
"loss": 1.4612, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.735202163769229e-05, |
|
"loss": 1.5138, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.7333964131218714e-05, |
|
"loss": 1.46, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.7315854728196568e-05, |
|
"loss": 1.5212, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.729769355677149e-05, |
|
"loss": 1.4436, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.7279480745455433e-05, |
|
"loss": 1.446, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.7261216423125782e-05, |
|
"loss": 1.4478, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.724290071902441e-05, |
|
"loss": 1.5164, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7224533762756775e-05, |
|
"loss": 1.5921, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.720611568429103e-05, |
|
"loss": 1.3207, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.718764661395704e-05, |
|
"loss": 1.4776, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.716912668244553e-05, |
|
"loss": 1.4853, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.715055602080711e-05, |
|
"loss": 1.4779, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7131934760451385e-05, |
|
"loss": 1.3848, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7113263033145985e-05, |
|
"loss": 1.5338, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7094540971015663e-05, |
|
"loss": 1.335, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7075768706541355e-05, |
|
"loss": 1.4512, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7056946372559234e-05, |
|
"loss": 1.3966, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7038074102259775e-05, |
|
"loss": 1.5724, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7019152029186817e-05, |
|
"loss": 1.3596, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.70001802872366e-05, |
|
"loss": 1.4671, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.6981159010656847e-05, |
|
"loss": 1.484, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.6962088334045785e-05, |
|
"loss": 1.5372, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.694296839235121e-05, |
|
"loss": 1.4365, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.692379932086953e-05, |
|
"loss": 1.4995, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.6904581255244802e-05, |
|
"loss": 1.4586, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.688531433146777e-05, |
|
"loss": 1.4517, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.6865998685874923e-05, |
|
"loss": 1.513, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6846634455147498e-05, |
|
"loss": 1.4483, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6827221776310532e-05, |
|
"loss": 1.3291, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6807760786731905e-05, |
|
"loss": 1.3948, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6788251624121335e-05, |
|
"loss": 1.536, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6768694426529432e-05, |
|
"loss": 1.4636, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6749089332346714e-05, |
|
"loss": 1.4605, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.672943648030261e-05, |
|
"loss": 1.4914, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6709736009464504e-05, |
|
"loss": 1.4608, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.668998805923675e-05, |
|
"loss": 1.5315, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6670192769359643e-05, |
|
"loss": 1.4783, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6650350279908497e-05, |
|
"loss": 1.5233, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6630460731292597e-05, |
|
"loss": 1.4558, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.661052426425424e-05, |
|
"loss": 1.4945, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6590541019867722e-05, |
|
"loss": 1.5253, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6570511139538348e-05, |
|
"loss": 1.3644, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.655043476500142e-05, |
|
"loss": 1.3728, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6530312038321247e-05, |
|
"loss": 1.5508, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6510143101890136e-05, |
|
"loss": 1.3814, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6489928098427383e-05, |
|
"loss": 1.51, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6469667170978258e-05, |
|
"loss": 1.3647, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6449360462913005e-05, |
|
"loss": 1.4148, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.642900811792582e-05, |
|
"loss": 1.5278, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.640861028003383e-05, |
|
"loss": 1.4379, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6388167093576083e-05, |
|
"loss": 1.4898, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6367678703212515e-05, |
|
"loss": 1.4376, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6347145253922942e-05, |
|
"loss": 1.5371, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.632656689100602e-05, |
|
"loss": 1.4784, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6305943760078226e-05, |
|
"loss": 1.4778, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.628527600707283e-05, |
|
"loss": 1.4496, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.6264563778238834e-05, |
|
"loss": 1.4265, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.6243807220139988e-05, |
|
"loss": 1.4144, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.6223006479653708e-05, |
|
"loss": 1.3847, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6202161703970057e-05, |
|
"loss": 1.4024, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6181273040590696e-05, |
|
"loss": 1.4366, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.616034063732785e-05, |
|
"loss": 1.3984, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.613936464230325e-05, |
|
"loss": 1.4135, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6118345203947093e-05, |
|
"loss": 1.5571, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6097282470996997e-05, |
|
"loss": 1.4876, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6076176592496926e-05, |
|
"loss": 1.3847, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.605502771779616e-05, |
|
"loss": 1.4314, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.603383599654823e-05, |
|
"loss": 1.4177, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.601260157870985e-05, |
|
"loss": 1.4774, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.599132461453987e-05, |
|
"loss": 1.4068, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.5970005254598204e-05, |
|
"loss": 1.4833, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.594864364974476e-05, |
|
"loss": 1.4635, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.592723995113839e-05, |
|
"loss": 1.4766, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.5905794310235808e-05, |
|
"loss": 1.4363, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.5884306878790512e-05, |
|
"loss": 1.4122, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.586277780885172e-05, |
|
"loss": 1.4629, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.58412072527633e-05, |
|
"loss": 1.5554, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5819595363162682e-05, |
|
"loss": 1.4584, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5797942292979767e-05, |
|
"loss": 1.4806, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.577624819543587e-05, |
|
"loss": 1.4799, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5754513224042625e-05, |
|
"loss": 1.5323, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.573273753260089e-05, |
|
"loss": 1.5143, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.571092127519967e-05, |
|
"loss": 1.3724, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.568906460621502e-05, |
|
"loss": 1.4376, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.566716768030896e-05, |
|
"loss": 1.516, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5645230652428367e-05, |
|
"loss": 1.3851, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5623253677803897e-05, |
|
"loss": 1.4896, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5601236911948876e-05, |
|
"loss": 1.4985, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.5579180510658187e-05, |
|
"loss": 1.4713, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.5557084630007206e-05, |
|
"loss": 1.5704, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.5534949426350642e-05, |
|
"loss": 1.1073, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.551277505632149e-05, |
|
"loss": 1.0673, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.549056167682987e-05, |
|
"loss": 1.1634, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.546830944506196e-05, |
|
"loss": 1.1615, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.544601851847885e-05, |
|
"loss": 1.0975, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.542368905481545e-05, |
|
"loss": 1.1318, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.5401321212079366e-05, |
|
"loss": 0.9377, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.5378915148549772e-05, |
|
"loss": 1.0874, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.5356471022776315e-05, |
|
"loss": 1.0396, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.5333988993577958e-05, |
|
"loss": 1.0781, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.5311469220041903e-05, |
|
"loss": 1.0133, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.5288911861522413e-05, |
|
"loss": 1.0539, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.526631707763972e-05, |
|
"loss": 1.2074, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.5243685028278888e-05, |
|
"loss": 1.1373, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.5221015873588672e-05, |
|
"loss": 0.9648, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.5198309773980397e-05, |
|
"loss": 1.0255, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.5175566890126812e-05, |
|
"loss": 1.0889, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.5152787382960968e-05, |
|
"loss": 1.0913, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5129971413675055e-05, |
|
"loss": 1.0329, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.510711914371929e-05, |
|
"loss": 1.0569, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5084230734800754e-05, |
|
"loss": 1.0238, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5061306348882252e-05, |
|
"loss": 1.0904, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.5038346148181178e-05, |
|
"loss": 1.1452, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.5015350295168344e-05, |
|
"loss": 1.0408, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.4992318952566862e-05, |
|
"loss": 1.1252, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.4969252283350964e-05, |
|
"loss": 0.9937, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.4946150450744859e-05, |
|
"loss": 1.0751, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.4923013618221584e-05, |
|
"loss": 1.1261, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.4899841949501845e-05, |
|
"loss": 1.1045, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.4876635608552845e-05, |
|
"loss": 1.029, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.4853394759587146e-05, |
|
"loss": 1.1211, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.4830119567061484e-05, |
|
"loss": 1.0406, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.4806810195675627e-05, |
|
"loss": 1.0517, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.4783466810371195e-05, |
|
"loss": 1.0026, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.4760089576330493e-05, |
|
"loss": 1.0275, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.4736678658975357e-05, |
|
"loss": 1.0359, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.471323422396596e-05, |
|
"loss": 1.164, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.4689756437199658e-05, |
|
"loss": 1.1032, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.4666245464809818e-05, |
|
"loss": 1.0966, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.4642701473164618e-05, |
|
"loss": 1.1152, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.4619124628865904e-05, |
|
"loss": 1.0731, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.459551509874798e-05, |
|
"loss": 1.0371, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.4571873049876452e-05, |
|
"loss": 1.0888, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.454819864954703e-05, |
|
"loss": 1.0688, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.4524492065284344e-05, |
|
"loss": 1.0156, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.4500753464840775e-05, |
|
"loss": 1.1587, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.4476983016195245e-05, |
|
"loss": 1.0391, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4453180887552052e-05, |
|
"loss": 1.0478, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4429347247339656e-05, |
|
"loss": 1.1598, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4405482264209512e-05, |
|
"loss": 0.9857, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4381586107034849e-05, |
|
"loss": 1.1735, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.4357658944909496e-05, |
|
"loss": 1.0613, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.4333700947146686e-05, |
|
"loss": 1.1052, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.4309712283277839e-05, |
|
"loss": 1.1275, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.4285693123051385e-05, |
|
"loss": 1.0158, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4261643636431539e-05, |
|
"loss": 1.1073, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4237563993597133e-05, |
|
"loss": 1.0065, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4213454364940362e-05, |
|
"loss": 1.0783, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4189314921065629e-05, |
|
"loss": 1.1141, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4165145832788305e-05, |
|
"loss": 1.0855, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4140947271133536e-05, |
|
"loss": 1.0268, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4116719407335022e-05, |
|
"loss": 1.1203, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4092462412833811e-05, |
|
"loss": 1.0887, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.406817645927709e-05, |
|
"loss": 0.9545, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4043861718516964e-05, |
|
"loss": 1.0331, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4019518362609239e-05, |
|
"loss": 1.0864, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.399514656381221e-05, |
|
"loss": 1.0886, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.3970746494585439e-05, |
|
"loss": 1.1292, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.3946318327588534e-05, |
|
"loss": 1.0675, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.3921862235679929e-05, |
|
"loss": 1.1474, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.389737839191566e-05, |
|
"loss": 1.039, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.3872866969548143e-05, |
|
"loss": 1.073, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.384832814202494e-05, |
|
"loss": 1.1149, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.3823762082987544e-05, |
|
"loss": 1.1452, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.3799168966270139e-05, |
|
"loss": 1.0943, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.3774548965898371e-05, |
|
"loss": 1.0701, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.3749902256088125e-05, |
|
"loss": 1.0005, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.3725229011244294e-05, |
|
"loss": 1.1638, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.3700529405959517e-05, |
|
"loss": 1.0659, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3675803615012993e-05, |
|
"loss": 1.0407, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3651051813369188e-05, |
|
"loss": 1.0907, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3626274176176645e-05, |
|
"loss": 1.1026, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3601470878766714e-05, |
|
"loss": 1.1216, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3576642096652322e-05, |
|
"loss": 1.0523, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3551788005526738e-05, |
|
"loss": 1.0775, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3526908781262314e-05, |
|
"loss": 1.1233, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3502004599909255e-05, |
|
"loss": 1.1803, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.3477075637694362e-05, |
|
"loss": 1.1263, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.3452122071019797e-05, |
|
"loss": 1.0823, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.3427144076461818e-05, |
|
"loss": 1.1011, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.3402141830769551e-05, |
|
"loss": 0.9963, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.3377115510863716e-05, |
|
"loss": 1.0976, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.3352065293835399e-05, |
|
"loss": 1.0262, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.3326991356944776e-05, |
|
"loss": 1.0757, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.3301893877619874e-05, |
|
"loss": 1.0232, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3276773033455312e-05, |
|
"loss": 1.0653, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3251629002211042e-05, |
|
"loss": 1.0742, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.322646196181109e-05, |
|
"loss": 1.103, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3201272090342303e-05, |
|
"loss": 1.1171, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3176059566053083e-05, |
|
"loss": 1.0204, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3150824567352128e-05, |
|
"loss": 1.0055, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3125567272807167e-05, |
|
"loss": 1.0891, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3100287861143703e-05, |
|
"loss": 1.0075, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3074986511243741e-05, |
|
"loss": 1.0382, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.3049663402144528e-05, |
|
"loss": 1.0055, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.302431871303728e-05, |
|
"loss": 1.0861, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.2998952623265917e-05, |
|
"loss": 1.0948, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.2973565312325798e-05, |
|
"loss": 1.0362, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.2948156959862446e-05, |
|
"loss": 1.1207, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.2922727745670276e-05, |
|
"loss": 1.0854, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.2897277849691326e-05, |
|
"loss": 1.0418, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.2871807452013977e-05, |
|
"loss": 1.0269, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.28463167328717e-05, |
|
"loss": 1.0621, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.2820805872641745e-05, |
|
"loss": 1.0765, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.2795275051843893e-05, |
|
"loss": 1.0733, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.276972445113917e-05, |
|
"loss": 0.9789, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.2744154251328573e-05, |
|
"loss": 1.0535, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.2718564633351773e-05, |
|
"loss": 1.0474, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.2692955778285865e-05, |
|
"loss": 1.0164, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.266732786734405e-05, |
|
"loss": 1.0503, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.2641681081874394e-05, |
|
"loss": 1.0523, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.2616015603358497e-05, |
|
"loss": 1.0932, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.2590331613410261e-05, |
|
"loss": 0.9713, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.2564629293774561e-05, |
|
"loss": 1.0768, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.253890882632598e-05, |
|
"loss": 1.1055, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.2513170393067527e-05, |
|
"loss": 1.075, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.2487414176129322e-05, |
|
"loss": 1.1094, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.246164035776735e-05, |
|
"loss": 1.1134, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.2435849120362123e-05, |
|
"loss": 0.9916, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.2410040646417431e-05, |
|
"loss": 1.0974, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.2384215118559027e-05, |
|
"loss": 0.9745, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.235837271953334e-05, |
|
"loss": 1.0322, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.2332513632206183e-05, |
|
"loss": 1.1612, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.2306638039561455e-05, |
|
"loss": 1.0081, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.2280746124699864e-05, |
|
"loss": 1.1934, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.2254838070837596e-05, |
|
"loss": 0.9869, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.2228914061305059e-05, |
|
"loss": 1.0878, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.2202974279545554e-05, |
|
"loss": 1.0396, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.2177018909113994e-05, |
|
"loss": 0.9445, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.21510481336756e-05, |
|
"loss": 1.0257, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.2125062137004602e-05, |
|
"loss": 1.1219, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.2099061102982939e-05, |
|
"loss": 0.9798, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.2073045215598953e-05, |
|
"loss": 1.0811, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.20470146589461e-05, |
|
"loss": 1.1189, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.2020969617221627e-05, |
|
"loss": 1.0373, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.199491027472529e-05, |
|
"loss": 1.0027, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.1968836815858038e-05, |
|
"loss": 1.099, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.1942749425120704e-05, |
|
"loss": 1.0822, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.1916648287112714e-05, |
|
"loss": 1.0227, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.1890533586530766e-05, |
|
"loss": 1.0001, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.1864405508167532e-05, |
|
"loss": 1.116, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.1838264236910348e-05, |
|
"loss": 1.0277, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.1812109957739907e-05, |
|
"loss": 1.0403, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.1785942855728945e-05, |
|
"loss": 1.0276, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.1759763116040936e-05, |
|
"loss": 0.9857, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.1733570923928785e-05, |
|
"loss": 1.095, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.1707366464733501e-05, |
|
"loss": 1.1235, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.1681149923882913e-05, |
|
"loss": 0.9838, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.1654921486890327e-05, |
|
"loss": 1.0423, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.1628681339353244e-05, |
|
"loss": 0.955, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.1602429666952015e-05, |
|
"loss": 1.0012, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.1576166655448558e-05, |
|
"loss": 1.0756, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.1549892490685018e-05, |
|
"loss": 1.0529, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.1523607358582462e-05, |
|
"loss": 1.1125, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.149731144513958e-05, |
|
"loss": 1.0864, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.1471004936431327e-05, |
|
"loss": 1.0284, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.144468801860766e-05, |
|
"loss": 1.0784, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.1418360877892165e-05, |
|
"loss": 1.1312, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.1392023700580796e-05, |
|
"loss": 1.0445, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.1365676673040502e-05, |
|
"loss": 1.0802, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.133931998170795e-05, |
|
"loss": 1.1193, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.1312953813088183e-05, |
|
"loss": 1.0525, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.1286578353753313e-05, |
|
"loss": 1.0297, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.1260193790341186e-05, |
|
"loss": 0.9857, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.1233800309554083e-05, |
|
"loss": 1.0877, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.1207398098157371e-05, |
|
"loss": 1.0841, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.1180987342978209e-05, |
|
"loss": 1.0807, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.1154568230904204e-05, |
|
"loss": 0.9939, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.1128140948882107e-05, |
|
"loss": 1.0699, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.1101705683916473e-05, |
|
"loss": 1.1226, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.1075262623068352e-05, |
|
"loss": 1.0694, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.1048811953453955e-05, |
|
"loss": 1.1485, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.1022353862243338e-05, |
|
"loss": 1.1421, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.0995888536659067e-05, |
|
"loss": 0.981, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.096941616397491e-05, |
|
"loss": 1.099, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.0942936931514492e-05, |
|
"loss": 0.9755, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.0916451026649981e-05, |
|
"loss": 1.1162, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.088995863680077e-05, |
|
"loss": 0.9486, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.0863459949432122e-05, |
|
"loss": 1.0728, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.0836955152053883e-05, |
|
"loss": 1.022, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.081044443221912e-05, |
|
"loss": 1.0214, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.0783927977522819e-05, |
|
"loss": 1.0447, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.0757405975600534e-05, |
|
"loss": 1.1324, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.0730878614127087e-05, |
|
"loss": 1.0334, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.0704346080815218e-05, |
|
"loss": 0.9593, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.0677808563414256e-05, |
|
"loss": 1.0855, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.0651266249708816e-05, |
|
"loss": 0.9969, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.0624719327517434e-05, |
|
"loss": 1.0853, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.0598167984691276e-05, |
|
"loss": 1.0692, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.057161240911277e-05, |
|
"loss": 1.1015, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.0545052788694312e-05, |
|
"loss": 1.0251, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.0518489311376905e-05, |
|
"loss": 1.0395, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.0491922165128853e-05, |
|
"loss": 1.1046, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.0465351537944429e-05, |
|
"loss": 1.0587, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.043877761784252e-05, |
|
"loss": 1.0852, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.0412200592865331e-05, |
|
"loss": 1.0562, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.0385620651077024e-05, |
|
"loss": 1.1277, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.0359037980562416e-05, |
|
"loss": 1.1861, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.0332452769425619e-05, |
|
"loss": 1.0468, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.0305865205788728e-05, |
|
"loss": 1.1171, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.0279275477790487e-05, |
|
"loss": 1.0895, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.0252683773584953e-05, |
|
"loss": 1.1341, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.0226090281340168e-05, |
|
"loss": 0.9769, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.0199495189236828e-05, |
|
"loss": 0.9259, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.0172898685466947e-05, |
|
"loss": 1.1301, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.0146300958232528e-05, |
|
"loss": 1.0171, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.0119702195744236e-05, |
|
"loss": 1.062, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.0093102586220056e-05, |
|
"loss": 0.9874, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.0066502317883969e-05, |
|
"loss": 1.0152, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.0039901578964619e-05, |
|
"loss": 1.0578, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.0013300557693981e-05, |
|
"loss": 1.0976, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 9.986699442306025e-06, |
|
"loss": 1.0745, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 9.960098421035383e-06, |
|
"loss": 1.0151, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 9.933497682116035e-06, |
|
"loss": 1.0811, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 9.906897413779949e-06, |
|
"loss": 1.1467, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.88029780425577e-06, |
|
"loss": 1.1107, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.853699041767473e-06, |
|
"loss": 1.0815, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.827101314533056e-06, |
|
"loss": 1.0757, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.800504810763176e-06, |
|
"loss": 1.0236, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.773909718659831e-06, |
|
"loss": 1.0345, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.747316226415052e-06, |
|
"loss": 1.0998, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.720724522209518e-06, |
|
"loss": 1.0581, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.694134794211277e-06, |
|
"loss": 1.0571, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.667547230574386e-06, |
|
"loss": 1.0675, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.64096201943759e-06, |
|
"loss": 1.0064, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.61437934892298e-06, |
|
"loss": 1.0512, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.587799407134672e-06, |
|
"loss": 1.0125, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.56122238215748e-06, |
|
"loss": 1.1364, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.534648462055576e-06, |
|
"loss": 0.9677, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.50807783487115e-06, |
|
"loss": 0.9366, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.481510688623098e-06, |
|
"loss": 1.099, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.454947211305691e-06, |
|
"loss": 1.1557, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.42838759088723e-06, |
|
"loss": 1.1092, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.401832015308728e-06, |
|
"loss": 1.0027, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.375280672482567e-06, |
|
"loss": 0.9507, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.348733750291186e-06, |
|
"loss": 1.0384, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.322191436585745e-06, |
|
"loss": 1.1106, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.295653919184787e-06, |
|
"loss": 0.9639, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.269121385872915e-06, |
|
"loss": 1.1055, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.242594024399467e-06, |
|
"loss": 1.0294, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.216072022477183e-06, |
|
"loss": 1.0803, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.189555567780882e-06, |
|
"loss": 1.0642, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.16304484794612e-06, |
|
"loss": 1.0686, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.13654005056788e-06, |
|
"loss": 1.0217, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.110041363199233e-06, |
|
"loss": 1.0821, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.083548973350019e-06, |
|
"loss": 1.01, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.057063068485513e-06, |
|
"loss": 1.0153, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.030583836025093e-06, |
|
"loss": 1.0438, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 9.004111463340935e-06, |
|
"loss": 1.008, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 8.977646137756662e-06, |
|
"loss": 0.9625, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 8.951188046546048e-06, |
|
"loss": 0.9189, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 8.924737376931651e-06, |
|
"loss": 1.0021, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.898294316083529e-06, |
|
"loss": 1.0809, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.871859051117896e-06, |
|
"loss": 1.117, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.8454317690958e-06, |
|
"loss": 1.1136, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.819012657021794e-06, |
|
"loss": 1.0478, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.79260190184263e-06, |
|
"loss": 1.035, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.76619969044592e-06, |
|
"loss": 1.086, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.739806209658812e-06, |
|
"loss": 1.135, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.713421646246692e-06, |
|
"loss": 0.9785, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.687046186911819e-06, |
|
"loss": 0.9707, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.660680018292053e-06, |
|
"loss": 1.0241, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.634323326959501e-06, |
|
"loss": 1.0647, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.60797629941921e-06, |
|
"loss": 1.1372, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.581639122107837e-06, |
|
"loss": 1.0297, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.555311981392342e-06, |
|
"loss": 0.9526, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.528995063568673e-06, |
|
"loss": 0.9586, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.502688554860426e-06, |
|
"loss": 1.092, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.47639264141754e-06, |
|
"loss": 1.0226, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.450107509314983e-06, |
|
"loss": 1.085, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.423833344551443e-06, |
|
"loss": 1.1103, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.397570333047985e-06, |
|
"loss": 0.986, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.37131866064676e-06, |
|
"loss": 1.0804, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.345078513109677e-06, |
|
"loss": 1.0916, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.31885007611709e-06, |
|
"loss": 1.0922, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.2926335352665e-06, |
|
"loss": 1.0426, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.266429076071221e-06, |
|
"loss": 1.0726, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.240236883959067e-06, |
|
"loss": 1.0394, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.214057144271058e-06, |
|
"loss": 1.0831, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.187890042260094e-06, |
|
"loss": 1.0468, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.161735763089654e-06, |
|
"loss": 1.0715, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.13559449183247e-06, |
|
"loss": 1.0629, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.109466413469238e-06, |
|
"loss": 1.0247, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.083351712887288e-06, |
|
"loss": 1.0005, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.057250574879296e-06, |
|
"loss": 1.2048, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.031163184141965e-06, |
|
"loss": 1.0668, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.005089725274711e-06, |
|
"loss": 1.1425, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 7.979030382778376e-06, |
|
"loss": 0.9857, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 7.952985341053902e-06, |
|
"loss": 1.0851, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 7.92695478440105e-06, |
|
"loss": 1.068, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 7.900938897017064e-06, |
|
"loss": 0.962, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 7.874937862995401e-06, |
|
"loss": 1.0436, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 7.848951866324402e-06, |
|
"loss": 1.0016, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.822981090886011e-06, |
|
"loss": 1.0617, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.79702572045445e-06, |
|
"loss": 1.0548, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.771085938694943e-06, |
|
"loss": 1.0146, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.745161929162405e-06, |
|
"loss": 1.124, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.719253875300138e-06, |
|
"loss": 1.0498, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.693361960438548e-06, |
|
"loss": 1.1152, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.667486367793822e-06, |
|
"loss": 1.0943, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.641627280466663e-06, |
|
"loss": 1.0807, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.615784881440975e-06, |
|
"loss": 1.0324, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.589959353582574e-06, |
|
"loss": 1.0629, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.564150879637882e-06, |
|
"loss": 1.0459, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.538359642232654e-06, |
|
"loss": 1.0255, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.5125858238706785e-06, |
|
"loss": 1.0333, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.486829606932478e-06, |
|
"loss": 1.0706, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.461091173674022e-06, |
|
"loss": 1.0349, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.43537070622544e-06, |
|
"loss": 1.0736, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.40966838658974e-06, |
|
"loss": 0.9974, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.383984396641506e-06, |
|
"loss": 1.0728, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.358318918125613e-06, |
|
"loss": 1.0178, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.332672132655953e-06, |
|
"loss": 1.0454, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.307044221714139e-06, |
|
"loss": 1.0209, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.2814353666482276e-06, |
|
"loss": 1.1334, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.2558457486714316e-06, |
|
"loss": 0.9714, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.230275548860833e-06, |
|
"loss": 1.0516, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.2047249481561125e-06, |
|
"loss": 1.1283, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.179194127358258e-06, |
|
"loss": 1.0491, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.153683267128304e-06, |
|
"loss": 1.0826, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.128192547986023e-06, |
|
"loss": 1.1392, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.102722150308678e-06, |
|
"loss": 1.0416, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.077272254329726e-06, |
|
"loss": 1.0953, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.051843040137558e-06, |
|
"loss": 1.0443, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.026434687674204e-06, |
|
"loss": 1.0803, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 7.001047376734087e-06, |
|
"loss": 1.0346, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 6.975681286962724e-06, |
|
"loss": 1.0129, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 6.9503365978554735e-06, |
|
"loss": 1.0135, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 6.925013488756264e-06, |
|
"loss": 1.0746, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 6.8997121388563e-06, |
|
"loss": 1.0111, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 6.874432727192837e-06, |
|
"loss": 0.9851, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 6.849175432647875e-06, |
|
"loss": 0.9814, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 6.823940433946921e-06, |
|
"loss": 0.9906, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 6.798727909657698e-06, |
|
"loss": 1.0407, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 6.773538038188912e-06, |
|
"loss": 1.051, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 6.74837099778896e-06, |
|
"loss": 1.1052, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 6.723226966544691e-06, |
|
"loss": 0.9945, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.69810612238013e-06, |
|
"loss": 1.0537, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.673008643055228e-06, |
|
"loss": 1.015, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.6479347061646046e-06, |
|
"loss": 1.1698, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.622884489136286e-06, |
|
"loss": 1.058, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.597858169230454e-06, |
|
"loss": 1.0748, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.572855923538186e-06, |
|
"loss": 1.0525, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.547877928980206e-06, |
|
"loss": 1.0324, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.522924362305639e-06, |
|
"loss": 1.0841, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.497995400090748e-06, |
|
"loss": 1.0314, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.4730912187376895e-06, |
|
"loss": 0.93, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.448211994473263e-06, |
|
"loss": 1.0023, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.42335790334768e-06, |
|
"loss": 1.1017, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.398529121233291e-06, |
|
"loss": 0.9936, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.373725823823359e-06, |
|
"loss": 1.0008, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.348948186630815e-06, |
|
"loss": 0.9642, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.324196384987009e-06, |
|
"loss": 1.0331, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.2994705940404825e-06, |
|
"loss": 1.1336, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.274770988755712e-06, |
|
"loss": 1.0058, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.250097743911877e-06, |
|
"loss": 1.0476, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.225451034101631e-06, |
|
"loss": 0.9599, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.200831033729864e-06, |
|
"loss": 1.0348, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.176237917012459e-06, |
|
"loss": 1.0001, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.151671857975061e-06, |
|
"loss": 1.0962, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.12713303045186e-06, |
|
"loss": 1.0898, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.10262160808434e-06, |
|
"loss": 1.0083, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.0781377643200765e-06, |
|
"loss": 1.0719, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.053681672411471e-06, |
|
"loss": 0.9877, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.029253505414565e-06, |
|
"loss": 1.1058, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.004853436187794e-06, |
|
"loss": 1.0424, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 5.9804816373907625e-06, |
|
"loss": 1.0619, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 5.956138281483039e-06, |
|
"loss": 1.1046, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 5.931823540722912e-06, |
|
"loss": 1.0912, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 5.907537587166191e-06, |
|
"loss": 1.0446, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 5.883280592664979e-06, |
|
"loss": 1.0525, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 5.859052728866468e-06, |
|
"loss": 0.9887, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 5.834854167211699e-06, |
|
"loss": 1.0369, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 5.810685078934375e-06, |
|
"loss": 1.0696, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.78654563505964e-06, |
|
"loss": 1.0302, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.762436006402874e-06, |
|
"loss": 1.059, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.738356363568463e-06, |
|
"loss": 1.1394, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.714306876948621e-06, |
|
"loss": 1.0213, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.69028771672216e-06, |
|
"loss": 1.0083, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.666299052853314e-06, |
|
"loss": 1.0249, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.642341055090508e-06, |
|
"loss": 1.1016, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.618413892965158e-06, |
|
"loss": 1.0996, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.5945177357904935e-06, |
|
"loss": 1.0578, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.570652752660343e-06, |
|
"loss": 1.0476, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.546819112447952e-06, |
|
"loss": 1.0377, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.523016983804759e-06, |
|
"loss": 1.0016, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.499246535159231e-06, |
|
"loss": 1.0697, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.47550793471566e-06, |
|
"loss": 1.1112, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.451801350452975e-06, |
|
"loss": 1.0682, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.428126950123551e-06, |
|
"loss": 1.1169, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 5.404484901252023e-06, |
|
"loss": 0.9758, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 5.3808753711341e-06, |
|
"loss": 1.0212, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 5.357298526835381e-06, |
|
"loss": 1.0372, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 5.333754535190186e-06, |
|
"loss": 1.1271, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 5.3102435628003435e-06, |
|
"loss": 1.063, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 5.286765776034044e-06, |
|
"loss": 1.1389, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 5.263321341024646e-06, |
|
"loss": 0.9841, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 5.239910423669509e-06, |
|
"loss": 0.7626, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.216533189628808e-06, |
|
"loss": 0.7865, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.193189804324376e-06, |
|
"loss": 0.7634, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.169880432938519e-06, |
|
"loss": 0.8215, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.146605240412859e-06, |
|
"loss": 0.7284, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.123364391447156e-06, |
|
"loss": 0.7751, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.100158050498159e-06, |
|
"loss": 0.8276, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.076986381778417e-06, |
|
"loss": 0.8012, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.053849549255143e-06, |
|
"loss": 0.755, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 5.03074771664904e-06, |
|
"loss": 0.7815, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 5.0076810474331395e-06, |
|
"loss": 0.7611, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 4.984649704831658e-06, |
|
"loss": 0.8147, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 4.961653851818827e-06, |
|
"loss": 0.8307, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 4.938693651117751e-06, |
|
"loss": 0.7899, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 4.9157692651992495e-06, |
|
"loss": 0.819, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 4.892880856280713e-06, |
|
"loss": 0.7603, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 4.870028586324947e-06, |
|
"loss": 0.8007, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 4.847212617039037e-06, |
|
"loss": 0.8197, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.82443310987319e-06, |
|
"loss": 0.8041, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.801690226019606e-06, |
|
"loss": 0.7963, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.77898412641133e-06, |
|
"loss": 0.7348, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.756314971721115e-06, |
|
"loss": 0.6906, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.733682922360282e-06, |
|
"loss": 0.7446, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.71108813847759e-06, |
|
"loss": 0.8379, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.688530779958099e-06, |
|
"loss": 0.7473, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.666011006422041e-06, |
|
"loss": 0.7785, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.643528977223689e-06, |
|
"loss": 0.8199, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.621084851450229e-06, |
|
"loss": 0.7547, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.5986787879206375e-06, |
|
"loss": 0.7269, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.5763109451845515e-06, |
|
"loss": 0.7745, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.553981481521156e-06, |
|
"loss": 0.7706, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.531690554938043e-06, |
|
"loss": 0.7923, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.509438323170131e-06, |
|
"loss": 0.7361, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.487224943678513e-06, |
|
"loss": 0.7218, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.465050573649359e-06, |
|
"loss": 0.6608, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.442915369992802e-06, |
|
"loss": 0.8203, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.4208194893418125e-06, |
|
"loss": 0.7754, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.398763088051127e-06, |
|
"loss": 0.6917, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.3767463221961034e-06, |
|
"loss": 0.7366, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.354769347571638e-06, |
|
"loss": 0.7909, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.332832319691044e-06, |
|
"loss": 0.7557, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.3109353937849815e-06, |
|
"loss": 0.7832, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.289078724800331e-06, |
|
"loss": 0.7443, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.267262467399114e-06, |
|
"loss": 0.6829, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.24548677595738e-06, |
|
"loss": 0.7148, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.22375180456413e-06, |
|
"loss": 0.6658, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.202057707020235e-06, |
|
"loss": 0.6883, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.180404636837321e-06, |
|
"loss": 0.6841, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.158792747236702e-06, |
|
"loss": 0.7621, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.137222191148282e-06, |
|
"loss": 0.7775, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.11569312120949e-06, |
|
"loss": 0.7944, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.0942056897641934e-06, |
|
"loss": 0.8151, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.072760048861614e-06, |
|
"loss": 0.6909, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.051356350255246e-06, |
|
"loss": 0.7683, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.0299947454018e-06, |
|
"loss": 0.7324, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.008675385460131e-06, |
|
"loss": 0.8605, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 3.987398421290155e-06, |
|
"loss": 0.6989, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 3.966164003451775e-06, |
|
"loss": 0.7251, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.944972282203844e-06, |
|
"loss": 0.805, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.923823407503076e-06, |
|
"loss": 0.7412, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.902717529003005e-06, |
|
"loss": 0.8276, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.88165479605291e-06, |
|
"loss": 0.744, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.860635357696756e-06, |
|
"loss": 0.7458, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.839659362672156e-06, |
|
"loss": 0.7778, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.818726959409305e-06, |
|
"loss": 0.7298, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.7978382960299476e-06, |
|
"loss": 0.8165, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.776993520346295e-06, |
|
"loss": 0.8294, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.756192779860014e-06, |
|
"loss": 0.715, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.7354362217611652e-06, |
|
"loss": 0.7522, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.714723992927177e-06, |
|
"loss": 0.7537, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.694056239921776e-06, |
|
"loss": 0.8263, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.6734331089939835e-06, |
|
"loss": 0.8068, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.6528547460770636e-06, |
|
"loss": 0.7623, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.6323212967874866e-06, |
|
"loss": 0.6378, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.6118329064239222e-06, |
|
"loss": 0.7163, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.5913897199661716e-06, |
|
"loss": 0.715, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.5709918820741816e-06, |
|
"loss": 0.7586, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.5506395370869963e-06, |
|
"loss": 0.7395, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.5303328290217453e-06, |
|
"loss": 0.7425, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.5100719015726228e-06, |
|
"loss": 0.7336, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.4898568981098678e-06, |
|
"loss": 0.7556, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.469687961678757e-06, |
|
"loss": 0.8236, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.4495652349985844e-06, |
|
"loss": 0.8095, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.429488860461655e-06, |
|
"loss": 0.7911, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.4094589801322773e-06, |
|
"loss": 0.7544, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.389475735745761e-06, |
|
"loss": 0.7854, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.3695392687074045e-06, |
|
"loss": 0.6854, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.3496497200915067e-06, |
|
"loss": 0.7875, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.3298072306403595e-06, |
|
"loss": 0.8737, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.3100119407632556e-06, |
|
"loss": 0.7294, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.2902639905354948e-06, |
|
"loss": 0.742, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.2705635196973927e-06, |
|
"loss": 0.7496, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.2509106676532897e-06, |
|
"loss": 0.8544, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.231305573470569e-06, |
|
"loss": 0.7902, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.2117483758786683e-06, |
|
"loss": 0.7222, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.192239213268099e-06, |
|
"loss": 0.7472, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.17277822368947e-06, |
|
"loss": 0.7353, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.1533655448525057e-06, |
|
"loss": 0.824, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.134001314125079e-06, |
|
"loss": 0.7616, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.114685668532229e-06, |
|
"loss": 0.694, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.0954187447551996e-06, |
|
"loss": 0.6772, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.076200679130471e-06, |
|
"loss": 0.7149, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.0570316076487918e-06, |
|
"loss": 0.6822, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.0379116659542186e-06, |
|
"loss": 0.7332, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.0188409893431556e-06, |
|
"loss": 0.7217, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 2.999819712763402e-06, |
|
"loss": 0.7803, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 2.9808479708131864e-06, |
|
"loss": 0.7129, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.9619258977402253e-06, |
|
"loss": 0.7679, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.943053627440771e-06, |
|
"loss": 0.7511, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.924231293458647e-06, |
|
"loss": 0.7012, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.90545902898434e-06, |
|
"loss": 0.7584, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.886736966854019e-06, |
|
"loss": 0.7744, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.8680652395486198e-06, |
|
"loss": 0.797, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.849443979192892e-06, |
|
"loss": 0.7356, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.8308733175544724e-06, |
|
"loss": 0.7267, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.812353386042962e-06, |
|
"loss": 0.7347, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.7938843157089734e-06, |
|
"loss": 0.677, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.775466237243226e-06, |
|
"loss": 0.7115, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.7570992809755937e-06, |
|
"loss": 0.8489, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.73878357687422e-06, |
|
"loss": 0.7027, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.720519254544568e-06, |
|
"loss": 0.7935, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.702306443228516e-06, |
|
"loss": 0.748, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.6841452718034343e-06, |
|
"loss": 0.8216, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.666035868781285e-06, |
|
"loss": 0.8043, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.6479783623077105e-06, |
|
"loss": 0.7297, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.62997288016112e-06, |
|
"loss": 0.676, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.6120195497517818e-06, |
|
"loss": 0.7872, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.5941184981209354e-06, |
|
"loss": 0.7718, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.5762698519398832e-06, |
|
"loss": 0.7205, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.5584737375091016e-06, |
|
"loss": 0.6992, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.5407302807573387e-06, |
|
"loss": 0.7619, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.5230396072407204e-06, |
|
"loss": 0.7293, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.5054018421418737e-06, |
|
"loss": 0.7979, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.487817110269042e-06, |
|
"loss": 0.725, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.470285536055188e-06, |
|
"loss": 0.7741, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.4528072435571158e-06, |
|
"loss": 0.7211, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.4353823564546064e-06, |
|
"loss": 0.693, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.4180109980495293e-06, |
|
"loss": 0.8077, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.4006932912649816e-06, |
|
"loss": 0.7579, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.3834293586444e-06, |
|
"loss": 0.7526, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.3662193223507135e-06, |
|
"loss": 0.699, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.349063304165462e-06, |
|
"loss": 0.7606, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.331961425487956e-06, |
|
"loss": 0.853, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.3149138073343958e-06, |
|
"loss": 0.773, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.297920570337019e-06, |
|
"loss": 0.7557, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.2809818347432598e-06, |
|
"loss": 0.7767, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.2640977204148838e-06, |
|
"loss": 0.8156, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.2472683468271584e-06, |
|
"loss": 0.7607, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.230493833067977e-06, |
|
"loss": 0.8509, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.213774297837047e-06, |
|
"loss": 0.7933, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.1971098594450315e-06, |
|
"loss": 0.7459, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.1805006358127213e-06, |
|
"loss": 0.7633, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.1639467444701934e-06, |
|
"loss": 0.76, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.1474483025559857e-06, |
|
"loss": 0.8054, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.1310054268162628e-06, |
|
"loss": 0.7315, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.114618233603992e-06, |
|
"loss": 0.8457, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.0982868388781286e-06, |
|
"loss": 0.7856, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.0820113582027734e-06, |
|
"loss": 0.7577, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.0657919067463773e-06, |
|
"loss": 0.8005, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.0496285992809163e-06, |
|
"loss": 0.7323, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.033521550181078e-06, |
|
"loss": 0.737, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.0174708734234596e-06, |
|
"loss": 0.7039, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.0014766825857514e-06, |
|
"loss": 0.8001, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.985539090845943e-06, |
|
"loss": 0.8213, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.9696582109815145e-06, |
|
"loss": 0.7024, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.9538341553686446e-06, |
|
"loss": 0.7473, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.93806703598141e-06, |
|
"loss": 0.8634, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.9223569643909978e-06, |
|
"loss": 0.7337, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.9067040517649115e-06, |
|
"loss": 0.675, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.8911084088661903e-06, |
|
"loss": 0.7118, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.8755701460526166e-06, |
|
"loss": 0.786, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.860089373275945e-06, |
|
"loss": 0.8187, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.8446662000811177e-06, |
|
"loss": 0.7859, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.8293007356054903e-06, |
|
"loss": 0.8582, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.8139930885780621e-06, |
|
"loss": 0.7031, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.7987433673187026e-06, |
|
"loss": 0.8058, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.7835516797373908e-06, |
|
"loss": 0.7231, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.7684181333334437e-06, |
|
"loss": 0.7504, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.7533428351947634e-06, |
|
"loss": 0.7031, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.7383258919970746e-06, |
|
"loss": 0.7784, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.7233674100031728e-06, |
|
"loss": 0.7856, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.7084674950621694e-06, |
|
"loss": 0.7928, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.6936262526087432e-06, |
|
"loss": 0.8125, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.6788437876623963e-06, |
|
"loss": 0.7592, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.6641202048267102e-06, |
|
"loss": 0.755, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.6494556082886038e-06, |
|
"loss": 0.7177, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.634850101817601e-06, |
|
"loss": 0.7806, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.6203037887650842e-06, |
|
"loss": 0.7891, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.6058167720635832e-06, |
|
"loss": 0.7573, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.5913891542260284e-06, |
|
"loss": 0.7019, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.5770210373450356e-06, |
|
"loss": 0.8272, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.5627125230921725e-06, |
|
"loss": 0.781, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.5484637127172609e-06, |
|
"loss": 0.792, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.5342747070476339e-06, |
|
"loss": 0.8272, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.520145606487442e-06, |
|
"loss": 0.7784, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.506076511016935e-06, |
|
"loss": 0.7424, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.4920675201917467e-06, |
|
"loss": 0.7235, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.4781187331422109e-06, |
|
"loss": 0.7974, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.4642302485726423e-06, |
|
"loss": 0.7787, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.4504021647606448e-06, |
|
"loss": 0.7455, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.4366345795564084e-06, |
|
"loss": 0.7663, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.4229275903820306e-06, |
|
"loss": 0.7163, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.409281294230821e-06, |
|
"loss": 0.7627, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.395695787666611e-06, |
|
"loss": 0.7525, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.3821711668230675e-06, |
|
"loss": 0.7615, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.3687075274030238e-06, |
|
"loss": 0.7913, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.3553049646777993e-06, |
|
"loss": 0.7802, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.34196357348652e-06, |
|
"loss": 0.7872, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.3286834482354506e-06, |
|
"loss": 0.7357, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.3154646828973217e-06, |
|
"loss": 0.6946, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.3023073710106726e-06, |
|
"loss": 0.7104, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.2892116056791927e-06, |
|
"loss": 0.7292, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.2761774795710502e-06, |
|
"loss": 0.7176, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.2632050849182365e-06, |
|
"loss": 0.6822, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.2502945135159272e-06, |
|
"loss": 0.7543, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.2374458567218217e-06, |
|
"loss": 0.7733, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.2246592054555062e-06, |
|
"loss": 0.8225, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.2119346501977914e-06, |
|
"loss": 0.8064, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.1992722809900925e-06, |
|
"loss": 0.7189, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.1866721874337827e-06, |
|
"loss": 0.7857, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.1741344586895642e-06, |
|
"loss": 0.7682, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.1616591834768299e-06, |
|
"loss": 0.6905, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.149246450073036e-06, |
|
"loss": 0.7895, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.1368963463130866e-06, |
|
"loss": 0.7626, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.1246089595887023e-06, |
|
"loss": 0.715, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.1123843768478148e-06, |
|
"loss": 0.7468, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.100222684593929e-06, |
|
"loss": 0.6507, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.088123968885534e-06, |
|
"loss": 0.7542, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.0760883153354818e-06, |
|
"loss": 0.8011, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.0641158091103832e-06, |
|
"loss": 0.7302, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.0522065349300103e-06, |
|
"loss": 0.7747, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.040360577066688e-06, |
|
"loss": 0.7688, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.028578019344706e-06, |
|
"loss": 0.7384, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.0168589451397204e-06, |
|
"loss": 0.7658, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.0052034373781716e-06, |
|
"loss": 0.7751, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 9.936115785366817e-07, |
|
"loss": 0.8098, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 9.820834506414866e-07, |
|
"loss": 0.7774, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 9.706191352678495e-07, |
|
"loss": 0.7921, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 9.592187135394826e-07, |
|
"loss": 0.7449, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 9.478822661279763e-07, |
|
"loss": 0.7424, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 9.366098732522233e-07, |
|
"loss": 0.8077, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 9.254016146778555e-07, |
|
"loss": 0.8037, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 9.1425756971668e-07, |
|
"loss": 0.741, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 9.03177817226113e-07, |
|
"loss": 0.764, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 8.921624356086256e-07, |
|
"loss": 0.7839, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 8.81211502811189e-07, |
|
"loss": 0.7792, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 8.703250963247223e-07, |
|
"loss": 0.8151, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 8.595032931835423e-07, |
|
"loss": 0.7714, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 8.487461699648203e-07, |
|
"loss": 0.8234, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 8.380538027880425e-07, |
|
"loss": 0.7568, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 8.274262673144651e-07, |
|
"loss": 0.7692, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 8.168636387465856e-07, |
|
"loss": 0.784, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 8.063659918276056e-07, |
|
"loss": 0.7713, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 7.95933400840907e-07, |
|
"loss": 0.7208, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 7.855659396095183e-07, |
|
"loss": 0.6972, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 7.752636814956027e-07, |
|
"loss": 0.7664, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 7.6502669939993e-07, |
|
"loss": 0.7246, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 7.548550657613651e-07, |
|
"loss": 0.7973, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 7.447488525563551e-07, |
|
"loss": 0.8285, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 7.347081312984194e-07, |
|
"loss": 0.7857, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 7.247329730376429e-07, |
|
"loss": 0.7565, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 7.148234483601746e-07, |
|
"loss": 0.6821, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 7.049796273877297e-07, |
|
"loss": 0.8031, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 6.952015797770862e-07, |
|
"loss": 0.7517, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 6.854893747196034e-07, |
|
"loss": 0.7194, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 6.758430809407169e-07, |
|
"loss": 0.7053, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 6.662627666994725e-07, |
|
"loss": 0.7842, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 6.567484997880247e-07, |
|
"loss": 0.7345, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 6.47300347531169e-07, |
|
"loss": 0.73, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 6.37918376785851e-07, |
|
"loss": 0.8712, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 6.286026539407164e-07, |
|
"loss": 0.8146, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 6.193532449156203e-07, |
|
"loss": 0.7262, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 6.101702151611688e-07, |
|
"loss": 0.7452, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 6.010536296582592e-07, |
|
"loss": 0.7688, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.920035529176082e-07, |
|
"loss": 0.7653, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.830200489793136e-07, |
|
"loss": 0.7568, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.741031814123843e-07, |
|
"loss": 0.711, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.652530133143042e-07, |
|
"loss": 0.7736, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.564696073105669e-07, |
|
"loss": 0.7911, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.477530255542573e-07, |
|
"loss": 0.7315, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.391033297255932e-07, |
|
"loss": 0.7599, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.305205810314951e-07, |
|
"loss": 0.7397, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.220048402051503e-07, |
|
"loss": 0.754, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.135561675055889e-07, |
|
"loss": 0.8184, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.051746227172538e-07, |
|
"loss": 0.7996, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.96860265149578e-07, |
|
"loss": 0.715, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.886131536365623e-07, |
|
"loss": 0.8323, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.804333465363609e-07, |
|
"loss": 0.6795, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.723209017308727e-07, |
|
"loss": 0.7394, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.6427587662532636e-07, |
|
"loss": 0.8024, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.562983281478761e-07, |
|
"loss": 0.7783, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.4838831274919505e-07, |
|
"loss": 0.726, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.4054588640208285e-07, |
|
"loss": 0.8476, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.327711046010663e-07, |
|
"loss": 0.7458, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.2506402236200616e-07, |
|
"loss": 0.8423, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.1742469422170417e-07, |
|
"loss": 0.7711, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.0985317423752557e-07, |
|
"loss": 0.857, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.0234951598700725e-07, |
|
"loss": 0.7365, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 3.94913772567489e-07, |
|
"loss": 0.6751, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 3.875459965957307e-07, |
|
"loss": 0.7548, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 3.802462402075358e-07, |
|
"loss": 0.8037, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 3.7301455505739494e-07, |
|
"loss": 0.7338, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 3.6585099231810863e-07, |
|
"loss": 0.754, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 3.587556026804362e-07, |
|
"loss": 0.8021, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 3.5172843635272403e-07, |
|
"loss": 0.738, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 3.4476954306056023e-07, |
|
"loss": 0.7903, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.378789720464193e-07, |
|
"loss": 0.7734, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.31056772069317e-07, |
|
"loss": 0.8347, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.2430299140445597e-07, |
|
"loss": 0.725, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.176176778428974e-07, |
|
"loss": 0.6322, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.110008786912122e-07, |
|
"loss": 0.7819, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.044526407711501e-07, |
|
"loss": 0.7837, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 2.97973010419309e-07, |
|
"loss": 0.76, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 2.915620334868074e-07, |
|
"loss": 0.7325, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 2.852197553389568e-07, |
|
"loss": 0.7902, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 2.789462208549454e-07, |
|
"loss": 0.7256, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 2.727414744275147e-07, |
|
"loss": 0.728, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 2.666055599626527e-07, |
|
"loss": 0.7852, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.6053852087927436e-07, |
|
"loss": 0.7888, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.5454040010892354e-07, |
|
"loss": 0.7621, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.486112400954621e-07, |
|
"loss": 0.8157, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.427510827947721e-07, |
|
"loss": 0.7354, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 2.3695996967446178e-07, |
|
"loss": 0.8115, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 2.3123794171356683e-07, |
|
"loss": 0.7528, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 2.2558503940226296e-07, |
|
"loss": 0.7649, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 2.2000130274158039e-07, |
|
"loss": 0.7848, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.144867712431198e-07, |
|
"loss": 0.7815, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.0904148392877354e-07, |
|
"loss": 0.7311, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.0366547933044712e-07, |
|
"loss": 0.7462, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 1.983587954897881e-07, |
|
"loss": 0.7176, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.931214699579176e-07, |
|
"loss": 0.6902, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.8795353979516596e-07, |
|
"loss": 0.7834, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.8285504157080414e-07, |
|
"loss": 0.7117, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.7782601136279277e-07, |
|
"loss": 0.7548, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.7286648475752122e-07, |
|
"loss": 0.7306, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.6797649684956118e-07, |
|
"loss": 0.7424, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.6315608224141023e-07, |
|
"loss": 0.8624, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.5840527504325632e-07, |
|
"loss": 0.7367, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.5372410887272814e-07, |
|
"loss": 0.7027, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.4911261685466416e-07, |
|
"loss": 0.7757, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.4457083162087383e-07, |
|
"loss": 0.7566, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.4009878530990784e-07, |
|
"loss": 0.7135, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.3569650956682944e-07, |
|
"loss": 0.7213, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.313640355429946e-07, |
|
"loss": 0.6434, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.2710139389582654e-07, |
|
"loss": 0.7905, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.2290861478860272e-07, |
|
"loss": 0.847, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.1878572789023935e-07, |
|
"loss": 0.7078, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.1473276237507935e-07, |
|
"loss": 0.6616, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.1074974692269258e-07, |
|
"loss": 0.8124, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.068367097176659e-07, |
|
"loss": 0.7143, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.02993678449409e-07, |
|
"loss": 0.6879, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 9.92206803119511e-08, |
|
"loss": 0.7451, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 9.551774200375896e-08, |
|
"loss": 0.7455, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 9.188488972753928e-08, |
|
"loss": 0.8372, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 8.832214919005877e-08, |
|
"loss": 0.8751, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 8.482954560195655e-08, |
|
"loss": 0.7331, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 8.14071036775721e-08, |
|
"loss": 0.7416, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 7.805484763476756e-08, |
|
"loss": 0.7256, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 7.477280119475239e-08, |
|
"loss": 0.8067, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 7.156098758192453e-08, |
|
"loss": 0.7268, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 6.841942952369618e-08, |
|
"loss": 0.7826, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 6.53481492503405e-08, |
|
"loss": 0.7759, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 6.23471684948318e-08, |
|
"loss": 0.716, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 5.9416508492688986e-08, |
|
"loss": 0.7764, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 5.655618998182899e-08, |
|
"loss": 0.7926, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 5.376623320241914e-08, |
|
"loss": 0.8057, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 5.10466578967328e-08, |
|
"loss": 0.721, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 4.8397483309011726e-08, |
|
"loss": 0.8369, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 4.581872818532729e-08, |
|
"loss": 0.8173, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 4.331041077344944e-08, |
|
"loss": 0.712, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 4.087254882271574e-08, |
|
"loss": 0.7406, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.8505159583911434e-08, |
|
"loss": 0.7365, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.6208259809139554e-08, |
|
"loss": 0.6632, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.3981865751705477e-08, |
|
"loss": 0.7471, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.182599316600699e-08, |
|
"loss": 0.7481, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 2.974065730741327e-08, |
|
"loss": 0.745, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 2.772587293216611e-08, |
|
"loss": 0.7976, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 2.5781654297267756e-08, |
|
"loss": 0.7092, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 2.3908015160385433e-08, |
|
"loss": 0.7884, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 2.2104968779752546e-08, |
|
"loss": 0.7642, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 2.037252791407318e-08, |
|
"loss": 0.7772, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.8710704822435534e-08, |
|
"loss": 0.772, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.7119511264220844e-08, |
|
"loss": 0.7676, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.5598958499021265e-08, |
|
"loss": 0.7742, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.4149057286562128e-08, |
|
"loss": 0.8167, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.2769817886626456e-08, |
|
"loss": 0.7761, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.1461250058977248e-08, |
|
"loss": 0.7587, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.02233630632953e-08, |
|
"loss": 0.7846, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 9.056165659107053e-09, |
|
"loss": 0.6886, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 7.959666105727959e-09, |
|
"loss": 0.7579, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 6.933872162199207e-09, |
|
"loss": 0.7947, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 5.978791087239977e-09, |
|
"loss": 0.757, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 5.0944296391863825e-09, |
|
"loss": 0.8102, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.2807940759515e-09, |
|
"loss": 0.8435, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 3.53789015497763e-09, |
|
"loss": 0.8808, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 2.8657231331985503e-09, |
|
"loss": 0.6947, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 2.264297766995105e-09, |
|
"loss": 0.7229, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 1.7336183121730022e-09, |
|
"loss": 0.8263, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 1.273688523919514e-09, |
|
"loss": 0.7912, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 8.845116567879342e-10, |
|
"loss": 0.811, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 5.660904646698217e-10, |
|
"loss": 0.7072, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 3.1842720077390754e-10, |
|
"loss": 0.756, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.4152361761388122e-10, |
|
"loss": 0.7184, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 3.5380966993958655e-11, |
|
"loss": 0.7508, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 0.0, |
|
"loss": 0.738, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 1218, |
|
"total_flos": 2.7901147998899405e+17, |
|
"train_loss": 1.106034378749005, |
|
"train_runtime": 3656.3925, |
|
"train_samples_per_second": 42.667, |
|
"train_steps_per_second": 0.333 |
|
} |
|
], |
|
"max_steps": 1218, |
|
"num_train_epochs": 3, |
|
"total_flos": 2.7901147998899405e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|