|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.4921562596124269, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 5.405405405405406e-07, |
|
"loss": 1.5039, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.0810810810810812e-06, |
|
"loss": 1.5785, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.6216216216216219e-06, |
|
"loss": 1.2981, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.1621621621621623e-06, |
|
"loss": 1.1631, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.702702702702703e-06, |
|
"loss": 1.2493, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.2432432432432437e-06, |
|
"loss": 1.2413, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7837837837837844e-06, |
|
"loss": 1.2368, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.324324324324325e-06, |
|
"loss": 1.2152, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.864864864864866e-06, |
|
"loss": 1.1487, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 5.405405405405406e-06, |
|
"loss": 1.116, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5.945945945945947e-06, |
|
"loss": 1.179, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 6.486486486486487e-06, |
|
"loss": 1.2134, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 7.027027027027028e-06, |
|
"loss": 1.1641, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 7.567567567567569e-06, |
|
"loss": 1.0623, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.108108108108109e-06, |
|
"loss": 1.0922, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.64864864864865e-06, |
|
"loss": 1.0979, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.189189189189191e-06, |
|
"loss": 1.1572, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.729729729729732e-06, |
|
"loss": 1.0788, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.027027027027027e-05, |
|
"loss": 1.1467, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.0810810810810812e-05, |
|
"loss": 1.0852, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.1351351351351352e-05, |
|
"loss": 1.1176, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.1891891891891894e-05, |
|
"loss": 1.1049, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.2432432432432433e-05, |
|
"loss": 1.0972, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.2972972972972975e-05, |
|
"loss": 1.1404, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.3513513513513515e-05, |
|
"loss": 1.1416, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.4054054054054055e-05, |
|
"loss": 1.1458, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.4594594594594596e-05, |
|
"loss": 1.1497, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.5135135135135138e-05, |
|
"loss": 1.14, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.5675675675675676e-05, |
|
"loss": 1.0808, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.6216216216216218e-05, |
|
"loss": 1.0996, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.6756756756756757e-05, |
|
"loss": 1.1064, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.72972972972973e-05, |
|
"loss": 1.1823, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.783783783783784e-05, |
|
"loss": 1.0483, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.8378378378378383e-05, |
|
"loss": 1.1019, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.891891891891892e-05, |
|
"loss": 1.0325, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9459459459459463e-05, |
|
"loss": 1.1006, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2e-05, |
|
"loss": 1.0715, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.999996461903301e-05, |
|
"loss": 1.1542, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9999858476382388e-05, |
|
"loss": 1.1402, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9999681572799226e-05, |
|
"loss": 1.0163, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9999433909535333e-05, |
|
"loss": 1.0851, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9999115488343213e-05, |
|
"loss": 1.1119, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.999872631147608e-05, |
|
"loss": 1.0726, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.999826638168783e-05, |
|
"loss": 1.0927, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9997735702233006e-05, |
|
"loss": 1.0697, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.99971342768668e-05, |
|
"loss": 1.0363, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.999646210984502e-05, |
|
"loss": 1.1251, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.999571920592405e-05, |
|
"loss": 1.127, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9994905570360817e-05, |
|
"loss": 1.0994, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.999402120891276e-05, |
|
"loss": 1.0688, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.99930661278378e-05, |
|
"loss": 1.0544, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9992040333894273e-05, |
|
"loss": 1.0908, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9990943834340893e-05, |
|
"loss": 1.1394, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9989776636936705e-05, |
|
"loss": 1.1065, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9988538749941024e-05, |
|
"loss": 1.0752, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9987230182113374e-05, |
|
"loss": 1.076, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.998585094271344e-05, |
|
"loss": 1.0504, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.998440104150098e-05, |
|
"loss": 1.0934, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.998288048873578e-05, |
|
"loss": 1.1355, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9981289295177566e-05, |
|
"loss": 1.1693, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9979627472085927e-05, |
|
"loss": 1.0514, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.997789503122025e-05, |
|
"loss": 1.119, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9976091984839616e-05, |
|
"loss": 1.0635, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9974218345702733e-05, |
|
"loss": 1.1111, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9972274127067838e-05, |
|
"loss": 1.1732, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.997025934269259e-05, |
|
"loss": 1.149, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9968174006833996e-05, |
|
"loss": 1.1011, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9966018134248296e-05, |
|
"loss": 1.1367, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9963791740190863e-05, |
|
"loss": 1.1354, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.996149484041609e-05, |
|
"loss": 1.0343, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9959127451177287e-05, |
|
"loss": 1.1997, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9956689589226555e-05, |
|
"loss": 1.1131, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9954181271814673e-05, |
|
"loss": 1.0335, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9951602516690988e-05, |
|
"loss": 1.0786, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9948953342103268e-05, |
|
"loss": 1.0926, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.994623376679758e-05, |
|
"loss": 1.1353, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9943443810018174e-05, |
|
"loss": 1.0605, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9940583491507314e-05, |
|
"loss": 1.1255, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.993765283150517e-05, |
|
"loss": 1.0541, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9934651850749663e-05, |
|
"loss": 1.1197, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9931580570476306e-05, |
|
"loss": 1.1297, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9928439012418076e-05, |
|
"loss": 1.1324, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9925227198805247e-05, |
|
"loss": 1.0846, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9921945152365235e-05, |
|
"loss": 1.1831, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9918592896322432e-05, |
|
"loss": 1.0921, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9915170454398045e-05, |
|
"loss": 1.1037, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9911677850809943e-05, |
|
"loss": 1.125, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9908115110272463e-05, |
|
"loss": 1.1279, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9904482257996244e-05, |
|
"loss": 1.1427, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.990077931968805e-05, |
|
"loss": 1.1475, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9897006321550592e-05, |
|
"loss": 1.136, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9893163290282335e-05, |
|
"loss": 1.0469, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9889250253077306e-05, |
|
"loss": 1.1028, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9885267237624923e-05, |
|
"loss": 1.1775, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.988121427210976e-05, |
|
"loss": 1.1744, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.98770913852114e-05, |
|
"loss": 1.1049, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9872898606104175e-05, |
|
"loss": 1.1085, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9868635964457007e-05, |
|
"loss": 1.1153, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.986430349043317e-05, |
|
"loss": 1.1637, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9859901214690094e-05, |
|
"loss": 1.0971, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9855429168379127e-05, |
|
"loss": 1.1537, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.985088738314534e-05, |
|
"loss": 1.0882, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9846275891127275e-05, |
|
"loss": 1.128, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9841594724956746e-05, |
|
"loss": 1.14, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9836843917758593e-05, |
|
"loss": 1.0273, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.983202350315044e-05, |
|
"loss": 1.0891, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.982713351524248e-05, |
|
"loss": 1.106, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.982217398863721e-05, |
|
"loss": 1.0599, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.98171449584292e-05, |
|
"loss": 1.0396, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9812046460204837e-05, |
|
"loss": 1.0767, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9806878530042083e-05, |
|
"loss": 1.1568, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9801641204510216e-05, |
|
"loss": 1.1534, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9796334520669555e-05, |
|
"loss": 1.0332, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9790958516071228e-05, |
|
"loss": 1.1018, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.978551322875688e-05, |
|
"loss": 1.0634, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.977999869725842e-05, |
|
"loss": 1.0766, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.977441496059774e-05, |
|
"loss": 1.0456, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9768762058286433e-05, |
|
"loss": 1.0047, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.976304003032554e-05, |
|
"loss": 1.0798, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9757248917205228e-05, |
|
"loss": 1.1055, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.975138875990454e-05, |
|
"loss": 1.1921, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.974545959989108e-05, |
|
"loss": 1.0465, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9739461479120727e-05, |
|
"loss": 1.129, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.973339444003735e-05, |
|
"loss": 1.0773, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9727258525572487e-05, |
|
"loss": 1.0955, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9721053779145057e-05, |
|
"loss": 1.0941, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9714780244661044e-05, |
|
"loss": 1.1672, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9708437966513196e-05, |
|
"loss": 1.1014, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9702026989580694e-05, |
|
"loss": 1.1272, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.969554735922885e-05, |
|
"loss": 1.0398, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.968899912130879e-05, |
|
"loss": 1.106, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9682382322157103e-05, |
|
"loss": 1.1033, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9675697008595545e-05, |
|
"loss": 1.0154, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9668943227930686e-05, |
|
"loss": 1.1219, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.966212102795358e-05, |
|
"loss": 1.0824, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.965523045693944e-05, |
|
"loss": 1.0581, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.964827156364728e-05, |
|
"loss": 1.1327, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.964124439731957e-05, |
|
"loss": 1.1144, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9634149007681894e-05, |
|
"loss": 1.1436, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.962698544494261e-05, |
|
"loss": 1.0457, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9619753759792466e-05, |
|
"loss": 1.1053, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.961245400340427e-05, |
|
"loss": 1.0881, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9605086227432512e-05, |
|
"loss": 1.1058, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9597650484012997e-05, |
|
"loss": 1.1418, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9590146825762476e-05, |
|
"loss": 1.0623, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9582575305778297e-05, |
|
"loss": 1.0987, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9574935977637994e-05, |
|
"loss": 1.0764, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9567228895398936e-05, |
|
"loss": 1.153, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.955945411359792e-05, |
|
"loss": 1.1233, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9551611687250808e-05, |
|
"loss": 1.0827, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9543701671852127e-05, |
|
"loss": 1.0487, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9535724123374674e-05, |
|
"loss": 1.0795, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.952767909826913e-05, |
|
"loss": 1.15, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.951956665346364e-05, |
|
"loss": 1.0645, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.951138684636344e-05, |
|
"loss": 1.0764, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9503139734850426e-05, |
|
"loss": 1.1185, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9494825377282746e-05, |
|
"loss": 1.0879, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9486443832494414e-05, |
|
"loss": 1.0558, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9477995159794854e-05, |
|
"loss": 1.1042, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9469479418968506e-05, |
|
"loss": 1.1029, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9460896670274408e-05, |
|
"loss": 0.9836, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9452246974445743e-05, |
|
"loss": 1.1325, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9443530392689434e-05, |
|
"loss": 1.1018, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.94347469866857e-05, |
|
"loss": 1.1525, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9425896818587615e-05, |
|
"loss": 1.0719, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.941697995102069e-05, |
|
"loss": 1.1595, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9407996447082394e-05, |
|
"loss": 1.1698, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.939894637034174e-05, |
|
"loss": 1.131, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9389829784838833e-05, |
|
"loss": 1.0604, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.938064675508438e-05, |
|
"loss": 1.0422, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9371397346059286e-05, |
|
"loss": 1.1098, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.936208162321415e-05, |
|
"loss": 1.0682, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9352699652468835e-05, |
|
"loss": 1.0763, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9343251500211977e-05, |
|
"loss": 1.1038, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.933373723330053e-05, |
|
"loss": 1.0613, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9324156919059286e-05, |
|
"loss": 1.1157, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.93145106252804e-05, |
|
"loss": 1.1264, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9304798420222918e-05, |
|
"loss": 1.0787, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9295020372612276e-05, |
|
"loss": 1.1298, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9285176551639826e-05, |
|
"loss": 1.1175, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9275267026962358e-05, |
|
"loss": 1.1686, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9265291868701584e-05, |
|
"loss": 1.0939, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9255251147443646e-05, |
|
"loss": 1.1674, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.924514493423864e-05, |
|
"loss": 1.0747, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9234973300600074e-05, |
|
"loss": 1.1085, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.92247363185044e-05, |
|
"loss": 1.1157, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9214434060390484e-05, |
|
"loss": 1.1827, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9204066599159094e-05, |
|
"loss": 1.1126, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9193634008172396e-05, |
|
"loss": 1.1158, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9183136361253417e-05, |
|
"loss": 1.0609, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.917257373268554e-05, |
|
"loss": 1.0617, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.916194619721196e-05, |
|
"loss": 1.0724, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.915125383003518e-05, |
|
"loss": 1.0784, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.914049670681646e-05, |
|
"loss": 1.1141, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.912967490367528e-05, |
|
"loss": 1.0305, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9118788497188815e-05, |
|
"loss": 1.026, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9107837564391376e-05, |
|
"loss": 1.0663, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9096822182773887e-05, |
|
"loss": 1.0368, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9085742430283322e-05, |
|
"loss": 1.1052, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.907459838532215e-05, |
|
"loss": 1.0834, |
|
"step": 200 |
|
} |
|
], |
|
"max_steps": 1218, |
|
"num_train_epochs": 3, |
|
"total_flos": 4.842042281898803e+16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|