|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9843125192248539, |
|
"global_step": 400, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 5.405405405405406e-07, |
|
"loss": 1.5039, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.0810810810810812e-06, |
|
"loss": 1.5785, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.6216216216216219e-06, |
|
"loss": 1.2981, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.1621621621621623e-06, |
|
"loss": 1.1631, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.702702702702703e-06, |
|
"loss": 1.2493, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.2432432432432437e-06, |
|
"loss": 1.2413, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7837837837837844e-06, |
|
"loss": 1.2368, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.324324324324325e-06, |
|
"loss": 1.2152, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.864864864864866e-06, |
|
"loss": 1.1487, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 5.405405405405406e-06, |
|
"loss": 1.116, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5.945945945945947e-06, |
|
"loss": 1.179, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 6.486486486486487e-06, |
|
"loss": 1.2134, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 7.027027027027028e-06, |
|
"loss": 1.1641, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 7.567567567567569e-06, |
|
"loss": 1.0623, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.108108108108109e-06, |
|
"loss": 1.0922, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.64864864864865e-06, |
|
"loss": 1.0979, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.189189189189191e-06, |
|
"loss": 1.1572, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.729729729729732e-06, |
|
"loss": 1.0788, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.027027027027027e-05, |
|
"loss": 1.1467, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.0810810810810812e-05, |
|
"loss": 1.0852, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.1351351351351352e-05, |
|
"loss": 1.1176, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.1891891891891894e-05, |
|
"loss": 1.1049, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.2432432432432433e-05, |
|
"loss": 1.0972, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.2972972972972975e-05, |
|
"loss": 1.1404, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.3513513513513515e-05, |
|
"loss": 1.1416, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.4054054054054055e-05, |
|
"loss": 1.1458, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.4594594594594596e-05, |
|
"loss": 1.1497, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.5135135135135138e-05, |
|
"loss": 1.14, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.5675675675675676e-05, |
|
"loss": 1.0808, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.6216216216216218e-05, |
|
"loss": 1.0996, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.6756756756756757e-05, |
|
"loss": 1.1064, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.72972972972973e-05, |
|
"loss": 1.1823, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.783783783783784e-05, |
|
"loss": 1.0483, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.8378378378378383e-05, |
|
"loss": 1.1019, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.891891891891892e-05, |
|
"loss": 1.0325, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9459459459459463e-05, |
|
"loss": 1.1006, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2e-05, |
|
"loss": 1.0715, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.999996461903301e-05, |
|
"loss": 1.1542, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9999858476382388e-05, |
|
"loss": 1.1402, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9999681572799226e-05, |
|
"loss": 1.0163, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9999433909535333e-05, |
|
"loss": 1.0851, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9999115488343213e-05, |
|
"loss": 1.1119, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.999872631147608e-05, |
|
"loss": 1.0726, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.999826638168783e-05, |
|
"loss": 1.0927, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9997735702233006e-05, |
|
"loss": 1.0697, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.99971342768668e-05, |
|
"loss": 1.0363, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.999646210984502e-05, |
|
"loss": 1.1251, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.999571920592405e-05, |
|
"loss": 1.127, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9994905570360817e-05, |
|
"loss": 1.0994, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.999402120891276e-05, |
|
"loss": 1.0688, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.99930661278378e-05, |
|
"loss": 1.0544, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9992040333894273e-05, |
|
"loss": 1.0908, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9990943834340893e-05, |
|
"loss": 1.1394, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9989776636936705e-05, |
|
"loss": 1.1065, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9988538749941024e-05, |
|
"loss": 1.0752, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9987230182113374e-05, |
|
"loss": 1.076, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.998585094271344e-05, |
|
"loss": 1.0504, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.998440104150098e-05, |
|
"loss": 1.0934, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.998288048873578e-05, |
|
"loss": 1.1355, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9981289295177566e-05, |
|
"loss": 1.1693, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9979627472085927e-05, |
|
"loss": 1.0514, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.997789503122025e-05, |
|
"loss": 1.119, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9976091984839616e-05, |
|
"loss": 1.0635, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9974218345702733e-05, |
|
"loss": 1.1111, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9972274127067838e-05, |
|
"loss": 1.1732, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.997025934269259e-05, |
|
"loss": 1.149, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9968174006833996e-05, |
|
"loss": 1.1011, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9966018134248296e-05, |
|
"loss": 1.1367, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9963791740190863e-05, |
|
"loss": 1.1354, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.996149484041609e-05, |
|
"loss": 1.0343, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9959127451177287e-05, |
|
"loss": 1.1997, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9956689589226555e-05, |
|
"loss": 1.1131, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9954181271814673e-05, |
|
"loss": 1.0335, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9951602516690988e-05, |
|
"loss": 1.0786, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9948953342103268e-05, |
|
"loss": 1.0926, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.994623376679758e-05, |
|
"loss": 1.1353, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9943443810018174e-05, |
|
"loss": 1.0605, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9940583491507314e-05, |
|
"loss": 1.1255, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.993765283150517e-05, |
|
"loss": 1.0541, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9934651850749663e-05, |
|
"loss": 1.1197, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9931580570476306e-05, |
|
"loss": 1.1297, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9928439012418076e-05, |
|
"loss": 1.1324, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9925227198805247e-05, |
|
"loss": 1.0846, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9921945152365235e-05, |
|
"loss": 1.1831, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9918592896322432e-05, |
|
"loss": 1.0921, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9915170454398045e-05, |
|
"loss": 1.1037, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9911677850809943e-05, |
|
"loss": 1.125, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9908115110272463e-05, |
|
"loss": 1.1279, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9904482257996244e-05, |
|
"loss": 1.1427, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.990077931968805e-05, |
|
"loss": 1.1475, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9897006321550592e-05, |
|
"loss": 1.136, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9893163290282335e-05, |
|
"loss": 1.0469, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9889250253077306e-05, |
|
"loss": 1.1028, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9885267237624923e-05, |
|
"loss": 1.1775, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.988121427210976e-05, |
|
"loss": 1.1744, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.98770913852114e-05, |
|
"loss": 1.1049, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9872898606104175e-05, |
|
"loss": 1.1085, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9868635964457007e-05, |
|
"loss": 1.1153, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.986430349043317e-05, |
|
"loss": 1.1637, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9859901214690094e-05, |
|
"loss": 1.0971, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9855429168379127e-05, |
|
"loss": 1.1537, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.985088738314534e-05, |
|
"loss": 1.0882, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9846275891127275e-05, |
|
"loss": 1.128, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9841594724956746e-05, |
|
"loss": 1.14, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9836843917758593e-05, |
|
"loss": 1.0273, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.983202350315044e-05, |
|
"loss": 1.0891, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.982713351524248e-05, |
|
"loss": 1.106, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.982217398863721e-05, |
|
"loss": 1.0599, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.98171449584292e-05, |
|
"loss": 1.0396, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9812046460204837e-05, |
|
"loss": 1.0767, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9806878530042083e-05, |
|
"loss": 1.1568, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9801641204510216e-05, |
|
"loss": 1.1534, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9796334520669555e-05, |
|
"loss": 1.0332, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9790958516071228e-05, |
|
"loss": 1.1018, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.978551322875688e-05, |
|
"loss": 1.0634, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.977999869725842e-05, |
|
"loss": 1.0766, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.977441496059774e-05, |
|
"loss": 1.0456, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9768762058286433e-05, |
|
"loss": 1.0047, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.976304003032554e-05, |
|
"loss": 1.0798, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9757248917205228e-05, |
|
"loss": 1.1055, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.975138875990454e-05, |
|
"loss": 1.1921, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.974545959989108e-05, |
|
"loss": 1.0465, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9739461479120727e-05, |
|
"loss": 1.129, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.973339444003735e-05, |
|
"loss": 1.0773, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9727258525572487e-05, |
|
"loss": 1.0955, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9721053779145057e-05, |
|
"loss": 1.0941, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9714780244661044e-05, |
|
"loss": 1.1672, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9708437966513196e-05, |
|
"loss": 1.1014, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9702026989580694e-05, |
|
"loss": 1.1272, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.969554735922885e-05, |
|
"loss": 1.0398, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.968899912130879e-05, |
|
"loss": 1.106, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9682382322157103e-05, |
|
"loss": 1.1033, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9675697008595545e-05, |
|
"loss": 1.0154, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9668943227930686e-05, |
|
"loss": 1.1219, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.966212102795358e-05, |
|
"loss": 1.0824, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.965523045693944e-05, |
|
"loss": 1.0581, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.964827156364728e-05, |
|
"loss": 1.1327, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.964124439731957e-05, |
|
"loss": 1.1144, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9634149007681894e-05, |
|
"loss": 1.1436, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.962698544494261e-05, |
|
"loss": 1.0457, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9619753759792466e-05, |
|
"loss": 1.1053, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.961245400340427e-05, |
|
"loss": 1.0881, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9605086227432512e-05, |
|
"loss": 1.1058, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9597650484012997e-05, |
|
"loss": 1.1418, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9590146825762476e-05, |
|
"loss": 1.0623, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9582575305778297e-05, |
|
"loss": 1.0987, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9574935977637994e-05, |
|
"loss": 1.0764, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9567228895398936e-05, |
|
"loss": 1.153, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.955945411359792e-05, |
|
"loss": 1.1233, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9551611687250808e-05, |
|
"loss": 1.0827, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9543701671852127e-05, |
|
"loss": 1.0487, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9535724123374674e-05, |
|
"loss": 1.0795, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.952767909826913e-05, |
|
"loss": 1.15, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.951956665346364e-05, |
|
"loss": 1.0645, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.951138684636344e-05, |
|
"loss": 1.0764, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9503139734850426e-05, |
|
"loss": 1.1185, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9494825377282746e-05, |
|
"loss": 1.0879, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9486443832494414e-05, |
|
"loss": 1.0558, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9477995159794854e-05, |
|
"loss": 1.1042, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9469479418968506e-05, |
|
"loss": 1.1029, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9460896670274408e-05, |
|
"loss": 0.9836, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9452246974445743e-05, |
|
"loss": 1.1325, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9443530392689434e-05, |
|
"loss": 1.1018, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.94347469866857e-05, |
|
"loss": 1.1525, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9425896818587615e-05, |
|
"loss": 1.0719, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.941697995102069e-05, |
|
"loss": 1.1595, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9407996447082394e-05, |
|
"loss": 1.1698, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.939894637034174e-05, |
|
"loss": 1.131, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9389829784838833e-05, |
|
"loss": 1.0604, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.938064675508438e-05, |
|
"loss": 1.0422, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9371397346059286e-05, |
|
"loss": 1.1098, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.936208162321415e-05, |
|
"loss": 1.0682, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9352699652468835e-05, |
|
"loss": 1.0763, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9343251500211977e-05, |
|
"loss": 1.1038, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.933373723330053e-05, |
|
"loss": 1.0613, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9324156919059286e-05, |
|
"loss": 1.1157, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.93145106252804e-05, |
|
"loss": 1.1264, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9304798420222918e-05, |
|
"loss": 1.0787, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9295020372612276e-05, |
|
"loss": 1.1298, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9285176551639826e-05, |
|
"loss": 1.1175, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9275267026962358e-05, |
|
"loss": 1.1686, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9265291868701584e-05, |
|
"loss": 1.0939, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9255251147443646e-05, |
|
"loss": 1.1674, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.924514493423864e-05, |
|
"loss": 1.0747, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9234973300600074e-05, |
|
"loss": 1.1085, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.92247363185044e-05, |
|
"loss": 1.1157, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9214434060390484e-05, |
|
"loss": 1.1827, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9204066599159094e-05, |
|
"loss": 1.1126, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9193634008172396e-05, |
|
"loss": 1.1158, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9183136361253417e-05, |
|
"loss": 1.0609, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.917257373268554e-05, |
|
"loss": 1.0617, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.916194619721196e-05, |
|
"loss": 1.0724, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.915125383003518e-05, |
|
"loss": 1.0784, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.914049670681646e-05, |
|
"loss": 1.1141, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.912967490367528e-05, |
|
"loss": 1.0305, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9118788497188815e-05, |
|
"loss": 1.026, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9107837564391376e-05, |
|
"loss": 1.0663, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9096822182773887e-05, |
|
"loss": 1.0368, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9085742430283322e-05, |
|
"loss": 1.1052, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.907459838532215e-05, |
|
"loss": 1.0834, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9063390126747778e-05, |
|
"loss": 1.1579, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9052117733872025e-05, |
|
"loss": 1.1034, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.904078128646052e-05, |
|
"loss": 1.1173, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.902938086473215e-05, |
|
"loss": 1.1546, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.901791654935852e-05, |
|
"loss": 1.1274, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.9006388421463322e-05, |
|
"loss": 1.0896, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.899479656262183e-05, |
|
"loss": 1.1937, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.898314105486028e-05, |
|
"loss": 1.0613, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.8971421980655295e-05, |
|
"loss": 1.0896, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.8959639422933316e-05, |
|
"loss": 1.0578, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.894779346506999e-05, |
|
"loss": 1.0935, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.893588419088962e-05, |
|
"loss": 1.0655, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.892391168466452e-05, |
|
"loss": 1.1562, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.891187603111447e-05, |
|
"loss": 1.1434, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.8899777315406073e-05, |
|
"loss": 1.0462, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.8887615623152188e-05, |
|
"loss": 1.1151, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.88753910404113e-05, |
|
"loss": 1.1457, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8863103653686917e-05, |
|
"loss": 1.0401, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8850753549926967e-05, |
|
"loss": 1.074, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8838340816523175e-05, |
|
"loss": 1.14, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8825865541310438e-05, |
|
"loss": 1.1483, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8813327812566217e-05, |
|
"loss": 1.156, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.880072771900991e-05, |
|
"loss": 1.0334, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.878806534980221e-05, |
|
"loss": 1.1854, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8775340794544497e-05, |
|
"loss": 1.0593, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.876255414327818e-05, |
|
"loss": 1.0863, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8749705486484074e-05, |
|
"loss": 1.2038, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8736794915081765e-05, |
|
"loss": 1.0991, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8723822520428954e-05, |
|
"loss": 1.0572, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8710788394320807e-05, |
|
"loss": 1.0798, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8697692628989327e-05, |
|
"loss": 1.082, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.868453531710268e-05, |
|
"loss": 1.1121, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8671316551764552e-05, |
|
"loss": 1.089, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.865803642651348e-05, |
|
"loss": 1.1132, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8644695035322203e-05, |
|
"loss": 1.1323, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8631292472596978e-05, |
|
"loss": 1.1352, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8617828833176935e-05, |
|
"loss": 1.0801, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.860430421233339e-05, |
|
"loss": 1.1049, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.859071870576918e-05, |
|
"loss": 1.0518, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.857707240961797e-05, |
|
"loss": 1.0691, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8563365420443594e-05, |
|
"loss": 1.0941, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.854959783523936e-05, |
|
"loss": 1.1016, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.853576975142736e-05, |
|
"loss": 1.0896, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.852188126685779e-05, |
|
"loss": 1.0822, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.8507932479808254e-05, |
|
"loss": 0.9657, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.8493923488983066e-05, |
|
"loss": 1.0548, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.847985439351256e-05, |
|
"loss": 1.0908, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.846572529295237e-05, |
|
"loss": 1.1709, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.845153628728274e-05, |
|
"loss": 1.1686, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8437287476907828e-05, |
|
"loss": 1.1704, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.842297896265497e-05, |
|
"loss": 1.054, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8408610845773974e-05, |
|
"loss": 1.1077, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8394183227936418e-05, |
|
"loss": 1.0797, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8379696211234918e-05, |
|
"loss": 1.1079, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8365149898182403e-05, |
|
"loss": 1.11, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8350544391711396e-05, |
|
"loss": 1.0777, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.833587979517329e-05, |
|
"loss": 1.0761, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8321156212337604e-05, |
|
"loss": 1.0941, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.830637374739126e-05, |
|
"loss": 1.128, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.829153250493783e-05, |
|
"loss": 1.0112, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.827663258999683e-05, |
|
"loss": 1.0975, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8261674108002925e-05, |
|
"loss": 1.0442, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.824665716480524e-05, |
|
"loss": 1.0653, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.823158186666656e-05, |
|
"loss": 1.0561, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.821644832026261e-05, |
|
"loss": 1.1615, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.82012566326813e-05, |
|
"loss": 1.1045, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8186006911421937e-05, |
|
"loss": 1.1486, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.817069926439451e-05, |
|
"loss": 1.1622, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8155333799918883e-05, |
|
"loss": 1.0986, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8139910626724058e-05, |
|
"loss": 1.0416, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8124429853947387e-05, |
|
"loss": 1.1328, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8108891591133812e-05, |
|
"loss": 1.0921, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.809329594823509e-05, |
|
"loss": 1.0795, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8077643035609006e-05, |
|
"loss": 1.1597, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.806193296401859e-05, |
|
"loss": 1.0555, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.804616584463136e-05, |
|
"loss": 1.1308, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.803034178901849e-05, |
|
"loss": 1.0728, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.8014460909154058e-05, |
|
"loss": 1.0919, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.799852331741425e-05, |
|
"loss": 1.1573, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.7982529126576543e-05, |
|
"loss": 1.0903, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.7966478449818925e-05, |
|
"loss": 1.1511, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.7950371400719087e-05, |
|
"loss": 1.1447, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.7934208093253625e-05, |
|
"loss": 1.1149, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.7917988641797227e-05, |
|
"loss": 1.1297, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.7901713161121873e-05, |
|
"loss": 1.0903, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.7885381766396008e-05, |
|
"loss": 1.0677, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.786899457318374e-05, |
|
"loss": 1.0687, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.7852551697444017e-05, |
|
"loss": 1.0536, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.783605325552981e-05, |
|
"loss": 1.0364, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.7819499364187282e-05, |
|
"loss": 1.1249, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.780289014055497e-05, |
|
"loss": 1.0534, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.7786225702162955e-05, |
|
"loss": 1.0512, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.7769506166932026e-05, |
|
"loss": 1.1828, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.7752731653172847e-05, |
|
"loss": 1.064, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7735902279585118e-05, |
|
"loss": 1.0973, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7719018165256745e-05, |
|
"loss": 1.0576, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7702079429662986e-05, |
|
"loss": 1.1355, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7685086192665605e-05, |
|
"loss": 0.9462, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7668038574512045e-05, |
|
"loss": 1.0778, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7650936695834536e-05, |
|
"loss": 1.1013, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.763378067764929e-05, |
|
"loss": 1.0889, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7616570641355602e-05, |
|
"loss": 1.013, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.759930670873502e-05, |
|
"loss": 1.0847, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.758198900195047e-05, |
|
"loss": 1.1345, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7564617643545395e-05, |
|
"loss": 1.0481, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7547192756442887e-05, |
|
"loss": 1.0952, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7529714463944815e-05, |
|
"loss": 1.0869, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.751218288973096e-05, |
|
"loss": 1.1197, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7494598157858127e-05, |
|
"loss": 1.1011, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7476960392759284e-05, |
|
"loss": 1.079, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.7459269719242665e-05, |
|
"loss": 1.0569, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.74415262624909e-05, |
|
"loss": 1.1322, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.742373014806012e-05, |
|
"loss": 1.121, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.740588150187907e-05, |
|
"loss": 1.1544, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.7387980450248222e-05, |
|
"loss": 1.1591, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.7370027119838884e-05, |
|
"loss": 1.0956, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.735202163769229e-05, |
|
"loss": 1.1096, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.7333964131218714e-05, |
|
"loss": 1.0962, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.7315854728196568e-05, |
|
"loss": 1.1088, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.729769355677149e-05, |
|
"loss": 1.0693, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.7279480745455433e-05, |
|
"loss": 1.0909, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.7261216423125782e-05, |
|
"loss": 1.0636, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.724290071902441e-05, |
|
"loss": 1.131, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7224533762756775e-05, |
|
"loss": 1.1824, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.720611568429103e-05, |
|
"loss": 1.002, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.718764661395704e-05, |
|
"loss": 1.0908, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.716912668244553e-05, |
|
"loss": 1.1213, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.715055602080711e-05, |
|
"loss": 1.1056, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7131934760451385e-05, |
|
"loss": 1.073, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7113263033145985e-05, |
|
"loss": 1.1817, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7094540971015663e-05, |
|
"loss": 1.0157, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7075768706541355e-05, |
|
"loss": 1.0835, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7056946372559234e-05, |
|
"loss": 0.9943, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7038074102259775e-05, |
|
"loss": 1.1713, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7019152029186817e-05, |
|
"loss": 1.0421, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.70001802872366e-05, |
|
"loss": 1.1, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.6981159010656847e-05, |
|
"loss": 1.0711, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.6962088334045785e-05, |
|
"loss": 1.1572, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.694296839235121e-05, |
|
"loss": 1.0448, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.692379932086953e-05, |
|
"loss": 1.0735, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.6904581255244802e-05, |
|
"loss": 1.0977, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.688531433146777e-05, |
|
"loss": 1.0777, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.6865998685874923e-05, |
|
"loss": 1.077, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6846634455147498e-05, |
|
"loss": 1.0797, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6827221776310532e-05, |
|
"loss": 1.0048, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6807760786731905e-05, |
|
"loss": 1.0392, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6788251624121335e-05, |
|
"loss": 1.1693, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6768694426529432e-05, |
|
"loss": 1.1036, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6749089332346714e-05, |
|
"loss": 1.0874, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.672943648030261e-05, |
|
"loss": 1.1224, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6709736009464504e-05, |
|
"loss": 1.0791, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.668998805923675e-05, |
|
"loss": 1.0822, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6670192769359643e-05, |
|
"loss": 1.0818, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6650350279908497e-05, |
|
"loss": 1.1335, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6630460731292597e-05, |
|
"loss": 1.0699, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.661052426425424e-05, |
|
"loss": 1.1318, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6590541019867722e-05, |
|
"loss": 1.1362, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6570511139538348e-05, |
|
"loss": 1.0223, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.655043476500142e-05, |
|
"loss": 1.0471, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6530312038321247e-05, |
|
"loss": 1.1665, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6510143101890136e-05, |
|
"loss": 1.0409, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6489928098427383e-05, |
|
"loss": 1.1231, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6469667170978258e-05, |
|
"loss": 1.0062, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6449360462913005e-05, |
|
"loss": 1.0764, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.642900811792582e-05, |
|
"loss": 1.1421, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.640861028003383e-05, |
|
"loss": 1.0809, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6388167093576083e-05, |
|
"loss": 1.1295, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6367678703212515e-05, |
|
"loss": 1.0813, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6347145253922942e-05, |
|
"loss": 1.1542, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.632656689100602e-05, |
|
"loss": 1.0917, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6305943760078226e-05, |
|
"loss": 1.1101, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.628527600707283e-05, |
|
"loss": 1.0929, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.6264563778238834e-05, |
|
"loss": 1.074, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.6243807220139988e-05, |
|
"loss": 1.0869, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.6223006479653708e-05, |
|
"loss": 1.0546, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6202161703970057e-05, |
|
"loss": 1.0218, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6181273040590696e-05, |
|
"loss": 1.054, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.616034063732785e-05, |
|
"loss": 0.9993, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.613936464230325e-05, |
|
"loss": 1.0481, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6118345203947093e-05, |
|
"loss": 1.1722, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6097282470996997e-05, |
|
"loss": 1.1137, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6076176592496926e-05, |
|
"loss": 1.0729, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.605502771779616e-05, |
|
"loss": 1.0825, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.603383599654823e-05, |
|
"loss": 1.0767, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.601260157870985e-05, |
|
"loss": 1.1263, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.599132461453987e-05, |
|
"loss": 1.0745, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.5970005254598204e-05, |
|
"loss": 1.1024, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.594864364974476e-05, |
|
"loss": 1.1268, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.592723995113839e-05, |
|
"loss": 1.0998, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.5905794310235808e-05, |
|
"loss": 1.0569, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.5884306878790512e-05, |
|
"loss": 1.0528, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.586277780885172e-05, |
|
"loss": 1.0959, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.58412072527633e-05, |
|
"loss": 1.167, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5819595363162682e-05, |
|
"loss": 1.11, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5797942292979767e-05, |
|
"loss": 1.1125, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.577624819543587e-05, |
|
"loss": 1.1226, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5754513224042625e-05, |
|
"loss": 1.134, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.573273753260089e-05, |
|
"loss": 1.1455, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.571092127519967e-05, |
|
"loss": 1.0762, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.568906460621502e-05, |
|
"loss": 1.0632, |
|
"step": 400 |
|
} |
|
], |
|
"max_steps": 1218, |
|
"num_train_epochs": 3, |
|
"total_flos": 9.670402784414925e+16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|