|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.9988925802879294, |
|
"global_step": 1353, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 9.992609016999261e-05, |
|
"loss": 1.0451, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 9.985218033998522e-05, |
|
"loss": 3.8431, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9.977827050997783e-05, |
|
"loss": 4.1686, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9.970436067997045e-05, |
|
"loss": 4.1879, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9.963045084996306e-05, |
|
"loss": 4.633, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9.955654101995566e-05, |
|
"loss": 3.4155, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.948263118994827e-05, |
|
"loss": 3.3784, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.940872135994088e-05, |
|
"loss": 3.3627, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.933481152993348e-05, |
|
"loss": 3.2365, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.926090169992609e-05, |
|
"loss": 3.5945, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.91869918699187e-05, |
|
"loss": 3.4429, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.911308203991131e-05, |
|
"loss": 3.1824, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.903917220990391e-05, |
|
"loss": 3.2093, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.896526237989652e-05, |
|
"loss": 3.1471, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.889135254988914e-05, |
|
"loss": 3.0478, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.881744271988175e-05, |
|
"loss": 2.9833, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.874353288987436e-05, |
|
"loss": 2.9323, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.866962305986696e-05, |
|
"loss": 2.9048, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.859571322985957e-05, |
|
"loss": 2.8425, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.852180339985218e-05, |
|
"loss": 2.7644, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.84478935698448e-05, |
|
"loss": 2.7183, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.837398373983741e-05, |
|
"loss": 2.636, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.830007390983002e-05, |
|
"loss": 2.5524, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.822616407982262e-05, |
|
"loss": 2.4676, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.815225424981523e-05, |
|
"loss": 2.3894, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.807834441980784e-05, |
|
"loss": 2.3208, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.800443458980046e-05, |
|
"loss": 2.2373, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.793052475979305e-05, |
|
"loss": 2.1539, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.785661492978566e-05, |
|
"loss": 2.068, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.778270509977827e-05, |
|
"loss": 2.0042, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.770879526977087e-05, |
|
"loss": 1.9648, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.76348854397635e-05, |
|
"loss": 1.8853, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.75609756097561e-05, |
|
"loss": 1.8292, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.748706577974871e-05, |
|
"loss": 1.7822, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.741315594974132e-05, |
|
"loss": 1.7378, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.733924611973393e-05, |
|
"loss": 1.6707, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.726533628972653e-05, |
|
"loss": 1.6189, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.719142645971915e-05, |
|
"loss": 1.5756, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.711751662971176e-05, |
|
"loss": 1.5198, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.704360679970437e-05, |
|
"loss": 1.4909, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.696969696969698e-05, |
|
"loss": 1.4441, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.689578713968958e-05, |
|
"loss": 1.4154, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.682187730968219e-05, |
|
"loss": 1.3827, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.674796747967481e-05, |
|
"loss": 1.3536, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.667405764966742e-05, |
|
"loss": 1.3125, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.660014781966001e-05, |
|
"loss": 1.2792, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.652623798965262e-05, |
|
"loss": 1.264, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.645232815964523e-05, |
|
"loss": 1.2366, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.637841832963785e-05, |
|
"loss": 1.2136, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.630450849963046e-05, |
|
"loss": 1.2248, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.623059866962306e-05, |
|
"loss": 1.2063, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.615668883961567e-05, |
|
"loss": 1.2254, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.608277900960828e-05, |
|
"loss": 1.1853, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.600886917960089e-05, |
|
"loss": 1.1918, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.59349593495935e-05, |
|
"loss": 1.1578, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.586104951958611e-05, |
|
"loss": 1.1771, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.578713968957872e-05, |
|
"loss": 1.1823, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.571322985957133e-05, |
|
"loss": 1.1658, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.563932002956394e-05, |
|
"loss": 1.1454, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.556541019955654e-05, |
|
"loss": 1.1488, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.549150036954916e-05, |
|
"loss": 1.1033, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.541759053954177e-05, |
|
"loss": 1.1537, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.534368070953438e-05, |
|
"loss": 1.1373, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.526977087952697e-05, |
|
"loss": 1.1266, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.519586104951958e-05, |
|
"loss": 1.1476, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.51219512195122e-05, |
|
"loss": 1.124, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.504804138950481e-05, |
|
"loss": 1.1024, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.497413155949742e-05, |
|
"loss": 1.1007, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.490022172949002e-05, |
|
"loss": 1.1147, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.482631189948263e-05, |
|
"loss": 1.122, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.475240206947524e-05, |
|
"loss": 1.1051, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.467849223946786e-05, |
|
"loss": 1.1096, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.460458240946047e-05, |
|
"loss": 1.1002, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.453067257945307e-05, |
|
"loss": 1.0841, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.445676274944568e-05, |
|
"loss": 1.0895, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.438285291943829e-05, |
|
"loss": 1.0664, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.43089430894309e-05, |
|
"loss": 1.1017, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.423503325942352e-05, |
|
"loss": 1.085, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.416112342941612e-05, |
|
"loss": 1.0883, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.408721359940873e-05, |
|
"loss": 1.0971, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.401330376940134e-05, |
|
"loss": 1.0812, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.393939393939395e-05, |
|
"loss": 1.0899, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.386548410938655e-05, |
|
"loss": 1.0572, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.379157427937916e-05, |
|
"loss": 1.0804, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.371766444937177e-05, |
|
"loss": 1.0625, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.364375461936438e-05, |
|
"loss": 1.0475, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.356984478935698e-05, |
|
"loss": 1.0685, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.349593495934959e-05, |
|
"loss": 1.0713, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.342202512934221e-05, |
|
"loss": 1.0734, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.334811529933482e-05, |
|
"loss": 1.0612, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.327420546932743e-05, |
|
"loss": 1.0677, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.320029563932003e-05, |
|
"loss": 1.045, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.312638580931264e-05, |
|
"loss": 1.0542, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.305247597930525e-05, |
|
"loss": 1.0559, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.297856614929787e-05, |
|
"loss": 1.0632, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.290465631929048e-05, |
|
"loss": 1.0478, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.283074648928308e-05, |
|
"loss": 1.0505, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.275683665927569e-05, |
|
"loss": 1.0342, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.26829268292683e-05, |
|
"loss": 1.045, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.26090169992609e-05, |
|
"loss": 1.0467, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.253510716925351e-05, |
|
"loss": 1.0632, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.246119733924612e-05, |
|
"loss": 1.065, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.238728750923873e-05, |
|
"loss": 1.0673, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.231337767923134e-05, |
|
"loss": 1.0433, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.223946784922394e-05, |
|
"loss": 1.0589, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.216555801921656e-05, |
|
"loss": 1.0636, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9.209164818920917e-05, |
|
"loss": 1.0308, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9.201773835920178e-05, |
|
"loss": 1.0593, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9.194382852919439e-05, |
|
"loss": 1.0585, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9.1869918699187e-05, |
|
"loss": 1.0257, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 9.17960088691796e-05, |
|
"loss": 1.0379, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 9.172209903917222e-05, |
|
"loss": 1.0441, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 9.164818920916483e-05, |
|
"loss": 1.0517, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 9.157427937915744e-05, |
|
"loss": 1.0289, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 9.150036954915004e-05, |
|
"loss": 1.0441, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.142645971914265e-05, |
|
"loss": 1.0235, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.135254988913526e-05, |
|
"loss": 1.0419, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.127864005912788e-05, |
|
"loss": 1.049, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.120473022912047e-05, |
|
"loss": 1.0248, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.113082039911308e-05, |
|
"loss": 1.0314, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.105691056910569e-05, |
|
"loss": 1.0316, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.09830007390983e-05, |
|
"loss": 1.038, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.090909090909092e-05, |
|
"loss": 1.0373, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.083518107908352e-05, |
|
"loss": 1.0313, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.076127124907613e-05, |
|
"loss": 1.0317, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.068736141906874e-05, |
|
"loss": 1.0202, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.061345158906135e-05, |
|
"loss": 1.0621, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.053954175905395e-05, |
|
"loss": 1.0052, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.046563192904656e-05, |
|
"loss": 1.023, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.039172209903918e-05, |
|
"loss": 1.0052, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.031781226903179e-05, |
|
"loss": 1.0358, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.02439024390244e-05, |
|
"loss": 1.0257, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.0169992609017e-05, |
|
"loss": 1.0233, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.009608277900961e-05, |
|
"loss": 1.0122, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.002217294900222e-05, |
|
"loss": 0.9906, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 8.994826311899484e-05, |
|
"loss": 1.0247, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 8.987435328898745e-05, |
|
"loss": 1.0287, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 8.980044345898004e-05, |
|
"loss": 1.0192, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 8.972653362897265e-05, |
|
"loss": 1.007, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 8.965262379896526e-05, |
|
"loss": 1.0074, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 8.957871396895788e-05, |
|
"loss": 1.0187, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 8.950480413895048e-05, |
|
"loss": 1.0192, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 8.943089430894309e-05, |
|
"loss": 1.0086, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 8.93569844789357e-05, |
|
"loss": 0.9988, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 8.928307464892831e-05, |
|
"loss": 1.0108, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 8.920916481892091e-05, |
|
"loss": 1.0013, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 8.913525498891354e-05, |
|
"loss": 1.0042, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 8.906134515890614e-05, |
|
"loss": 0.993, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 8.898743532889875e-05, |
|
"loss": 1.0033, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 8.891352549889136e-05, |
|
"loss": 1.0007, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 8.883961566888396e-05, |
|
"loss": 0.9968, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 8.876570583887657e-05, |
|
"loss": 1.0201, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 8.869179600886919e-05, |
|
"loss": 1.0137, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 8.86178861788618e-05, |
|
"loss": 1.0233, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 8.854397634885441e-05, |
|
"loss": 0.9938, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 8.8470066518847e-05, |
|
"loss": 0.988, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 8.839615668883961e-05, |
|
"loss": 0.9947, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 8.832224685883223e-05, |
|
"loss": 0.9819, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 8.824833702882484e-05, |
|
"loss": 0.9967, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 8.817442719881744e-05, |
|
"loss": 1.0239, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.810051736881005e-05, |
|
"loss": 0.9696, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.802660753880266e-05, |
|
"loss": 0.9997, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.795269770879527e-05, |
|
"loss": 0.9927, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.787878787878789e-05, |
|
"loss": 0.9806, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.78048780487805e-05, |
|
"loss": 0.9724, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.77309682187731e-05, |
|
"loss": 1.0076, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.765705838876571e-05, |
|
"loss": 0.9766, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.758314855875832e-05, |
|
"loss": 1.027, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.750923872875092e-05, |
|
"loss": 0.9988, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.743532889874355e-05, |
|
"loss": 0.9907, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.736141906873615e-05, |
|
"loss": 0.9686, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.728750923872876e-05, |
|
"loss": 0.9985, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.721359940872137e-05, |
|
"loss": 0.9995, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.713968957871396e-05, |
|
"loss": 0.9907, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.706577974870658e-05, |
|
"loss": 1.0031, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.699186991869919e-05, |
|
"loss": 0.9838, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.69179600886918e-05, |
|
"loss": 1.0116, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.68440502586844e-05, |
|
"loss": 0.9775, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.677014042867701e-05, |
|
"loss": 0.9743, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.669623059866962e-05, |
|
"loss": 0.9861, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.662232076866224e-05, |
|
"loss": 0.9761, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.654841093865485e-05, |
|
"loss": 0.9742, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.647450110864746e-05, |
|
"loss": 0.9878, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.640059127864006e-05, |
|
"loss": 0.9682, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.632668144863267e-05, |
|
"loss": 0.9822, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.625277161862528e-05, |
|
"loss": 0.9825, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.61788617886179e-05, |
|
"loss": 0.9977, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.61049519586105e-05, |
|
"loss": 0.9965, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.603104212860311e-05, |
|
"loss": 0.9943, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.595713229859572e-05, |
|
"loss": 0.9714, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.588322246858833e-05, |
|
"loss": 0.9891, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 8.580931263858094e-05, |
|
"loss": 0.9951, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 8.573540280857354e-05, |
|
"loss": 1.0029, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 8.566149297856615e-05, |
|
"loss": 0.9935, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 8.558758314855876e-05, |
|
"loss": 0.9812, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 8.551367331855137e-05, |
|
"loss": 0.979, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 8.543976348854397e-05, |
|
"loss": 0.971, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 8.53658536585366e-05, |
|
"loss": 0.9928, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 8.52919438285292e-05, |
|
"loss": 0.9819, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 8.521803399852181e-05, |
|
"loss": 0.9603, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 8.514412416851442e-05, |
|
"loss": 0.9798, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 8.507021433850702e-05, |
|
"loss": 0.9626, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 8.499630450849963e-05, |
|
"loss": 0.9701, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 8.492239467849225e-05, |
|
"loss": 0.9601, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 8.484848484848486e-05, |
|
"loss": 0.9801, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 8.477457501847747e-05, |
|
"loss": 0.9757, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 8.470066518847007e-05, |
|
"loss": 0.9777, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 8.462675535846268e-05, |
|
"loss": 0.9564, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 8.455284552845529e-05, |
|
"loss": 0.9692, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 8.447893569844791e-05, |
|
"loss": 0.9658, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 8.44050258684405e-05, |
|
"loss": 0.9849, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 8.433111603843311e-05, |
|
"loss": 0.9817, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 8.425720620842572e-05, |
|
"loss": 0.9945, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 8.418329637841833e-05, |
|
"loss": 0.9493, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 8.410938654841095e-05, |
|
"loss": 0.9654, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 8.403547671840355e-05, |
|
"loss": 0.973, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 8.396156688839616e-05, |
|
"loss": 0.9676, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 8.388765705838877e-05, |
|
"loss": 0.9553, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 8.381374722838138e-05, |
|
"loss": 0.9632, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 8.373983739837398e-05, |
|
"loss": 0.9514, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 8.36659275683666e-05, |
|
"loss": 0.9498, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 8.359201773835921e-05, |
|
"loss": 0.9451, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 8.351810790835182e-05, |
|
"loss": 0.9893, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 8.344419807834443e-05, |
|
"loss": 0.9861, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 8.337028824833703e-05, |
|
"loss": 0.94, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 8.329637841832964e-05, |
|
"loss": 0.9659, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 8.322246858832226e-05, |
|
"loss": 0.9403, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 8.314855875831487e-05, |
|
"loss": 0.9683, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 8.307464892830746e-05, |
|
"loss": 0.9597, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 8.300073909830007e-05, |
|
"loss": 0.926, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 8.292682926829268e-05, |
|
"loss": 0.9518, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 8.28529194382853e-05, |
|
"loss": 0.9683, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 8.27790096082779e-05, |
|
"loss": 0.9516, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 8.270509977827051e-05, |
|
"loss": 0.9467, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 8.263118994826312e-05, |
|
"loss": 0.9752, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 8.255728011825573e-05, |
|
"loss": 0.9506, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 8.248337028824834e-05, |
|
"loss": 0.9516, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 8.240946045824096e-05, |
|
"loss": 0.9787, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 8.233555062823356e-05, |
|
"loss": 0.9477, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 8.226164079822617e-05, |
|
"loss": 0.9686, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 8.218773096821878e-05, |
|
"loss": 0.9642, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 8.211382113821139e-05, |
|
"loss": 0.9792, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 8.2039911308204e-05, |
|
"loss": 0.9794, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 8.19660014781966e-05, |
|
"loss": 0.9446, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 8.189209164818922e-05, |
|
"loss": 0.9402, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 8.181818181818183e-05, |
|
"loss": 0.9475, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.174427198817442e-05, |
|
"loss": 0.9269, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.167036215816703e-05, |
|
"loss": 0.9432, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.159645232815965e-05, |
|
"loss": 0.9557, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.152254249815226e-05, |
|
"loss": 0.9549, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.144863266814487e-05, |
|
"loss": 0.9571, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.137472283813747e-05, |
|
"loss": 0.9229, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.130081300813008e-05, |
|
"loss": 0.9485, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.122690317812269e-05, |
|
"loss": 0.936, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.11529933481153e-05, |
|
"loss": 0.9451, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.107908351810792e-05, |
|
"loss": 0.9275, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.100517368810052e-05, |
|
"loss": 0.9603, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.093126385809313e-05, |
|
"loss": 0.9744, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.085735402808574e-05, |
|
"loss": 0.9491, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 8.078344419807835e-05, |
|
"loss": 0.9197, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 8.070953436807095e-05, |
|
"loss": 0.9557, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 8.063562453806357e-05, |
|
"loss": 0.957, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 8.056171470805618e-05, |
|
"loss": 0.9301, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 8.048780487804879e-05, |
|
"loss": 0.9392, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 8.04138950480414e-05, |
|
"loss": 0.9258, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 8.033998521803399e-05, |
|
"loss": 0.934, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 8.026607538802661e-05, |
|
"loss": 0.9387, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 8.019216555801922e-05, |
|
"loss": 0.9501, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 8.011825572801183e-05, |
|
"loss": 0.9392, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 8.004434589800443e-05, |
|
"loss": 0.9384, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.997043606799704e-05, |
|
"loss": 0.9287, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.989652623798965e-05, |
|
"loss": 0.9518, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.982261640798227e-05, |
|
"loss": 0.9368, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.974870657797488e-05, |
|
"loss": 0.9392, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.967479674796748e-05, |
|
"loss": 0.948, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.960088691796009e-05, |
|
"loss": 0.9303, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.95269770879527e-05, |
|
"loss": 0.8942, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 7.94530672579453e-05, |
|
"loss": 0.9253, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 7.937915742793793e-05, |
|
"loss": 0.9061, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 7.930524759793053e-05, |
|
"loss": 0.9449, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 7.923133776792314e-05, |
|
"loss": 0.9148, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 7.915742793791575e-05, |
|
"loss": 0.9449, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 7.908351810790836e-05, |
|
"loss": 0.9259, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 7.900960827790096e-05, |
|
"loss": 0.9166, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 7.893569844789357e-05, |
|
"loss": 0.9077, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 7.886178861788618e-05, |
|
"loss": 0.9183, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 7.878787878787879e-05, |
|
"loss": 0.9027, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 7.87139689578714e-05, |
|
"loss": 0.9168, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 7.8640059127864e-05, |
|
"loss": 0.9315, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 7.856614929785662e-05, |
|
"loss": 0.9241, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 7.849223946784923e-05, |
|
"loss": 0.9114, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 7.841832963784184e-05, |
|
"loss": 0.9422, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 7.834441980783444e-05, |
|
"loss": 0.9066, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 7.827050997782705e-05, |
|
"loss": 0.9089, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 7.819660014781966e-05, |
|
"loss": 0.9189, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 7.812269031781228e-05, |
|
"loss": 0.8879, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 7.804878048780489e-05, |
|
"loss": 0.8975, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 7.79748706577975e-05, |
|
"loss": 0.9212, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 7.79009608277901e-05, |
|
"loss": 0.8963, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 7.782705099778271e-05, |
|
"loss": 0.9264, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 7.775314116777532e-05, |
|
"loss": 0.9348, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 7.767923133776792e-05, |
|
"loss": 0.9171, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 7.760532150776053e-05, |
|
"loss": 0.9223, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 7.753141167775314e-05, |
|
"loss": 0.902, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 7.745750184774575e-05, |
|
"loss": 0.9099, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 7.738359201773835e-05, |
|
"loss": 0.9211, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 7.730968218773098e-05, |
|
"loss": 0.9131, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 7.723577235772358e-05, |
|
"loss": 0.9035, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 7.716186252771619e-05, |
|
"loss": 0.9464, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 7.70879526977088e-05, |
|
"loss": 0.925, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 7.70140428677014e-05, |
|
"loss": 0.9286, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 7.694013303769401e-05, |
|
"loss": 0.9381, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 7.686622320768663e-05, |
|
"loss": 0.9158, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 7.679231337767924e-05, |
|
"loss": 0.9184, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 7.671840354767185e-05, |
|
"loss": 0.9114, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 7.664449371766446e-05, |
|
"loss": 0.8987, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 7.657058388765706e-05, |
|
"loss": 0.9153, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 7.649667405764967e-05, |
|
"loss": 0.9236, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 7.642276422764229e-05, |
|
"loss": 0.9105, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 7.634885439763488e-05, |
|
"loss": 0.9024, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 7.627494456762749e-05, |
|
"loss": 0.9225, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 7.62010347376201e-05, |
|
"loss": 0.8984, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 7.612712490761271e-05, |
|
"loss": 0.9116, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 7.605321507760533e-05, |
|
"loss": 0.8936, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 7.597930524759794e-05, |
|
"loss": 0.9155, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 7.590539541759054e-05, |
|
"loss": 0.9079, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 7.583148558758315e-05, |
|
"loss": 0.8987, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 7.575757575757576e-05, |
|
"loss": 0.9167, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 7.568366592756836e-05, |
|
"loss": 0.897, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 7.560975609756099e-05, |
|
"loss": 0.8923, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 7.55358462675536e-05, |
|
"loss": 0.8807, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 7.54619364375462e-05, |
|
"loss": 0.8959, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 7.538802660753881e-05, |
|
"loss": 0.8835, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 7.531411677753142e-05, |
|
"loss": 0.9158, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 7.524020694752402e-05, |
|
"loss": 0.9144, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 7.516629711751664e-05, |
|
"loss": 0.8882, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 7.509238728750925e-05, |
|
"loss": 0.9033, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 7.501847745750186e-05, |
|
"loss": 0.9014, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 7.494456762749445e-05, |
|
"loss": 0.8739, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 7.487065779748706e-05, |
|
"loss": 0.8935, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 7.479674796747968e-05, |
|
"loss": 0.8744, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 7.472283813747229e-05, |
|
"loss": 0.8975, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 7.46489283074649e-05, |
|
"loss": 0.8932, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 7.45750184774575e-05, |
|
"loss": 0.9007, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 7.450110864745011e-05, |
|
"loss": 0.8956, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 7.442719881744272e-05, |
|
"loss": 0.9183, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 7.435328898743534e-05, |
|
"loss": 0.9186, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 7.427937915742795e-05, |
|
"loss": 0.9008, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 7.420546932742055e-05, |
|
"loss": 0.9091, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 7.413155949741316e-05, |
|
"loss": 0.8968, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 7.405764966740577e-05, |
|
"loss": 0.8865, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 7.398373983739838e-05, |
|
"loss": 0.8842, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 7.3909830007391e-05, |
|
"loss": 0.8815, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 7.38359201773836e-05, |
|
"loss": 0.8858, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 7.376201034737621e-05, |
|
"loss": 0.8848, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 7.368810051736882e-05, |
|
"loss": 0.8863, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 7.361419068736141e-05, |
|
"loss": 0.8966, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 7.354028085735403e-05, |
|
"loss": 0.8848, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 7.346637102734664e-05, |
|
"loss": 0.8607, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 7.339246119733925e-05, |
|
"loss": 0.8998, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 7.331855136733186e-05, |
|
"loss": 0.8809, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 7.324464153732446e-05, |
|
"loss": 0.8668, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 7.317073170731707e-05, |
|
"loss": 0.8717, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 7.309682187730969e-05, |
|
"loss": 0.8684, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 7.30229120473023e-05, |
|
"loss": 0.896, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 7.29490022172949e-05, |
|
"loss": 0.8838, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 7.287509238728751e-05, |
|
"loss": 0.8756, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.280118255728012e-05, |
|
"loss": 0.8886, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.272727272727273e-05, |
|
"loss": 0.9048, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.265336289726534e-05, |
|
"loss": 0.899, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.257945306725796e-05, |
|
"loss": 0.8528, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.250554323725056e-05, |
|
"loss": 0.8946, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.243163340724317e-05, |
|
"loss": 0.8688, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.235772357723578e-05, |
|
"loss": 0.8938, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.228381374722837e-05, |
|
"loss": 0.881, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.2209903917221e-05, |
|
"loss": 0.888, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.21359940872136e-05, |
|
"loss": 0.8929, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 7.206208425720621e-05, |
|
"loss": 0.8655, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 7.198817442719882e-05, |
|
"loss": 0.8491, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 7.191426459719142e-05, |
|
"loss": 0.8847, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 7.184035476718403e-05, |
|
"loss": 0.8766, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.176644493717665e-05, |
|
"loss": 0.8869, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.169253510716926e-05, |
|
"loss": 0.8822, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.161862527716187e-05, |
|
"loss": 0.8764, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.154471544715447e-05, |
|
"loss": 0.8828, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.147080561714708e-05, |
|
"loss": 0.8862, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.139689578713969e-05, |
|
"loss": 0.8785, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.132298595713231e-05, |
|
"loss": 0.8772, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.124907612712492e-05, |
|
"loss": 0.8598, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.117516629711752e-05, |
|
"loss": 0.8865, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 7.110125646711013e-05, |
|
"loss": 0.8658, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 7.102734663710274e-05, |
|
"loss": 0.8777, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 7.095343680709535e-05, |
|
"loss": 0.8555, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 7.087952697708795e-05, |
|
"loss": 0.8594, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 7.080561714708056e-05, |
|
"loss": 0.8785, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.073170731707317e-05, |
|
"loss": 0.8549, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.065779748706578e-05, |
|
"loss": 0.8745, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.058388765705838e-05, |
|
"loss": 0.8816, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.0509977827051e-05, |
|
"loss": 0.8734, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 7.043606799704361e-05, |
|
"loss": 0.859, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 7.036215816703622e-05, |
|
"loss": 0.8587, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 7.028824833702883e-05, |
|
"loss": 0.8539, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 7.021433850702143e-05, |
|
"loss": 0.8734, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 7.014042867701404e-05, |
|
"loss": 0.8576, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 7.006651884700666e-05, |
|
"loss": 0.8494, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 6.999260901699927e-05, |
|
"loss": 0.887, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 6.991869918699188e-05, |
|
"loss": 0.8724, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 6.984478935698448e-05, |
|
"loss": 0.8618, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 6.977087952697709e-05, |
|
"loss": 0.8801, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 6.96969696969697e-05, |
|
"loss": 0.858, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 6.962305986696232e-05, |
|
"loss": 0.8715, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 6.954915003695491e-05, |
|
"loss": 0.8698, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 6.947524020694752e-05, |
|
"loss": 0.8644, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 6.940133037694013e-05, |
|
"loss": 0.8688, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 6.932742054693274e-05, |
|
"loss": 0.8771, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 6.925351071692536e-05, |
|
"loss": 0.8753, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 6.917960088691796e-05, |
|
"loss": 0.8978, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 6.910569105691057e-05, |
|
"loss": 0.8655, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 6.903178122690318e-05, |
|
"loss": 0.8651, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 6.895787139689579e-05, |
|
"loss": 0.8668, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 6.88839615668884e-05, |
|
"loss": 0.8648, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 6.881005173688101e-05, |
|
"loss": 0.843, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 6.873614190687362e-05, |
|
"loss": 0.8458, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 6.866223207686623e-05, |
|
"loss": 0.8617, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 6.858832224685884e-05, |
|
"loss": 0.8539, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 6.851441241685144e-05, |
|
"loss": 0.8422, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 6.844050258684405e-05, |
|
"loss": 0.8718, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 6.836659275683667e-05, |
|
"loss": 0.8288, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 6.829268292682928e-05, |
|
"loss": 0.8255, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 6.821877309682187e-05, |
|
"loss": 0.8324, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 6.814486326681448e-05, |
|
"loss": 0.844, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.807095343680709e-05, |
|
"loss": 0.8382, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.799704360679971e-05, |
|
"loss": 0.8692, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.792313377679232e-05, |
|
"loss": 0.8357, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.784922394678492e-05, |
|
"loss": 0.8666, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 6.777531411677753e-05, |
|
"loss": 0.8681, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 6.770140428677014e-05, |
|
"loss": 0.8415, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 6.762749445676275e-05, |
|
"loss": 0.8398, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 6.755358462675537e-05, |
|
"loss": 0.8264, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 6.747967479674798e-05, |
|
"loss": 0.8649, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 6.740576496674058e-05, |
|
"loss": 0.8357, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 6.733185513673319e-05, |
|
"loss": 0.8502, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 6.72579453067258e-05, |
|
"loss": 0.8603, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 6.71840354767184e-05, |
|
"loss": 0.8323, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.711012564671103e-05, |
|
"loss": 0.8238, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.703621581670363e-05, |
|
"loss": 0.8522, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.696230598669624e-05, |
|
"loss": 0.8351, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.688839615668883e-05, |
|
"loss": 0.832, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.681448632668144e-05, |
|
"loss": 0.8478, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 6.674057649667406e-05, |
|
"loss": 0.8497, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 0.8427, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.0878102779388428, |
|
"eval_runtime": 495.5202, |
|
"eval_samples_per_second": 32.168, |
|
"eval_steps_per_second": 0.505, |
|
"eval_wer": 0.8091472356907791, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 6.659275683665928e-05, |
|
"loss": 1.2494, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 6.651884700665188e-05, |
|
"loss": 0.8478, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 6.644493717664449e-05, |
|
"loss": 0.8247, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 6.63710273466371e-05, |
|
"loss": 0.8202, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 6.629711751662972e-05, |
|
"loss": 0.8152, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 6.622320768662233e-05, |
|
"loss": 0.8248, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 6.614929785661494e-05, |
|
"loss": 0.8315, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 6.607538802660754e-05, |
|
"loss": 0.8001, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 6.600147819660015e-05, |
|
"loss": 0.841, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 6.592756836659276e-05, |
|
"loss": 0.8395, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 6.585365853658538e-05, |
|
"loss": 0.8209, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 6.577974870657799e-05, |
|
"loss": 0.8413, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 6.570583887657059e-05, |
|
"loss": 0.8145, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 6.56319290465632e-05, |
|
"loss": 0.8217, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 6.555801921655581e-05, |
|
"loss": 0.8535, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 6.548410938654842e-05, |
|
"loss": 0.8173, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 6.541019955654102e-05, |
|
"loss": 0.8303, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 6.533628972653363e-05, |
|
"loss": 0.8416, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 6.526237989652624e-05, |
|
"loss": 0.8218, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 6.518847006651884e-05, |
|
"loss": 0.8257, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 6.511456023651145e-05, |
|
"loss": 0.8319, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 6.504065040650407e-05, |
|
"loss": 0.8535, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 6.496674057649668e-05, |
|
"loss": 0.8369, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 6.489283074648929e-05, |
|
"loss": 0.8115, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 6.48189209164819e-05, |
|
"loss": 0.8091, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 6.47450110864745e-05, |
|
"loss": 0.8345, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 6.467110125646711e-05, |
|
"loss": 0.8542, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 6.459719142645973e-05, |
|
"loss": 0.8158, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 6.452328159645234e-05, |
|
"loss": 0.829, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 6.444937176644495e-05, |
|
"loss": 0.8146, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 6.437546193643755e-05, |
|
"loss": 0.8232, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 6.430155210643016e-05, |
|
"loss": 0.8075, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 6.422764227642277e-05, |
|
"loss": 0.8333, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 6.415373244641538e-05, |
|
"loss": 0.8112, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 6.407982261640798e-05, |
|
"loss": 0.809, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 6.400591278640059e-05, |
|
"loss": 0.7983, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 6.39320029563932e-05, |
|
"loss": 0.7997, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 6.38580931263858e-05, |
|
"loss": 0.8087, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 6.378418329637843e-05, |
|
"loss": 0.8165, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 6.371027346637103e-05, |
|
"loss": 0.795, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 6.363636363636364e-05, |
|
"loss": 0.8302, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 6.356245380635625e-05, |
|
"loss": 0.7928, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 6.348854397634886e-05, |
|
"loss": 0.822, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 6.341463414634146e-05, |
|
"loss": 0.8243, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 6.334072431633407e-05, |
|
"loss": 0.8252, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 6.326681448632669e-05, |
|
"loss": 0.8079, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 6.31929046563193e-05, |
|
"loss": 0.8181, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 6.31189948263119e-05, |
|
"loss": 0.8216, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 6.304508499630451e-05, |
|
"loss": 0.8127, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 6.297117516629712e-05, |
|
"loss": 0.7971, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 6.289726533628973e-05, |
|
"loss": 0.8005, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 6.282335550628234e-05, |
|
"loss": 0.8065, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 6.274944567627494e-05, |
|
"loss": 0.7944, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 6.267553584626755e-05, |
|
"loss": 0.8219, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 6.260162601626016e-05, |
|
"loss": 0.8399, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 6.252771618625277e-05, |
|
"loss": 0.7951, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 6.245380635624539e-05, |
|
"loss": 0.8164, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 6.2379896526238e-05, |
|
"loss": 0.8104, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 6.23059866962306e-05, |
|
"loss": 0.8094, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 6.223207686622321e-05, |
|
"loss": 0.8018, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 6.215816703621582e-05, |
|
"loss": 0.7701, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 6.208425720620842e-05, |
|
"loss": 0.83, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 6.201034737620104e-05, |
|
"loss": 0.7991, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 6.193643754619365e-05, |
|
"loss": 0.8105, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 6.186252771618626e-05, |
|
"loss": 0.8116, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 6.178861788617887e-05, |
|
"loss": 0.8091, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 6.171470805617147e-05, |
|
"loss": 0.8257, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 6.164079822616408e-05, |
|
"loss": 0.804, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 6.15668883961567e-05, |
|
"loss": 0.8073, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 6.149297856614931e-05, |
|
"loss": 0.8012, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 6.14190687361419e-05, |
|
"loss": 0.7969, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 6.134515890613451e-05, |
|
"loss": 0.8234, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 6.127124907612712e-05, |
|
"loss": 0.8076, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 6.119733924611974e-05, |
|
"loss": 0.7882, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 6.112342941611235e-05, |
|
"loss": 0.7898, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 6.104951958610495e-05, |
|
"loss": 0.8098, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 6.097560975609756e-05, |
|
"loss": 0.8245, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 6.0901699926090175e-05, |
|
"loss": 0.8084, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 6.082779009608278e-05, |
|
"loss": 0.7964, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 6.075388026607539e-05, |
|
"loss": 0.7949, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 6.0679970436068004e-05, |
|
"loss": 0.8092, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 6.060606060606061e-05, |
|
"loss": 0.8034, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 6.053215077605322e-05, |
|
"loss": 0.8043, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 6.045824094604583e-05, |
|
"loss": 0.8164, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 6.038433111603844e-05, |
|
"loss": 0.8083, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 6.031042128603105e-05, |
|
"loss": 0.7934, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 6.023651145602366e-05, |
|
"loss": 0.7929, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 6.016260162601627e-05, |
|
"loss": 0.8063, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 6.008869179600887e-05, |
|
"loss": 0.7816, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 6.001478196600148e-05, |
|
"loss": 0.7975, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 5.9940872135994085e-05, |
|
"loss": 0.788, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 5.98669623059867e-05, |
|
"loss": 0.776, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 5.9793052475979306e-05, |
|
"loss": 0.7878, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 5.9719142645971914e-05, |
|
"loss": 0.8016, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 5.964523281596453e-05, |
|
"loss": 0.8124, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 5.9571322985957135e-05, |
|
"loss": 0.7717, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 5.949741315594974e-05, |
|
"loss": 0.8093, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 5.942350332594236e-05, |
|
"loss": 0.8056, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 5.9349593495934964e-05, |
|
"loss": 0.8196, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 5.927568366592757e-05, |
|
"loss": 0.8077, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 5.920177383592018e-05, |
|
"loss": 0.7827, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 5.912786400591279e-05, |
|
"loss": 0.8, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 5.90539541759054e-05, |
|
"loss": 0.7841, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 5.898004434589801e-05, |
|
"loss": 0.7795, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 5.890613451589062e-05, |
|
"loss": 0.7767, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 5.883222468588323e-05, |
|
"loss": 0.8054, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 5.875831485587583e-05, |
|
"loss": 0.7823, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 5.868440502586844e-05, |
|
"loss": 0.8048, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 5.8610495195861045e-05, |
|
"loss": 0.7906, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 5.853658536585366e-05, |
|
"loss": 0.7659, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 5.8462675535846266e-05, |
|
"loss": 0.7845, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 5.8388765705838874e-05, |
|
"loss": 0.7897, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 5.831485587583149e-05, |
|
"loss": 0.7927, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 5.8240946045824095e-05, |
|
"loss": 0.7902, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 5.81670362158167e-05, |
|
"loss": 0.7968, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 5.809312638580932e-05, |
|
"loss": 0.7839, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 5.8019216555801924e-05, |
|
"loss": 0.8069, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 5.794530672579453e-05, |
|
"loss": 0.8002, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 5.7871396895787146e-05, |
|
"loss": 0.7777, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 5.779748706577975e-05, |
|
"loss": 0.7824, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 5.772357723577236e-05, |
|
"loss": 0.7936, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 5.7649667405764975e-05, |
|
"loss": 0.7865, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 5.757575757575758e-05, |
|
"loss": 0.7916, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 5.750184774575019e-05, |
|
"loss": 0.7948, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 5.742793791574279e-05, |
|
"loss": 0.7801, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 5.73540280857354e-05, |
|
"loss": 0.816, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 5.728011825572801e-05, |
|
"loss": 0.7687, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 5.720620842572062e-05, |
|
"loss": 0.7912, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 5.7132298595713227e-05, |
|
"loss": 0.7725, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 5.705838876570584e-05, |
|
"loss": 0.7681, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 5.698447893569845e-05, |
|
"loss": 0.7738, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 5.6910569105691056e-05, |
|
"loss": 0.7931, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 5.683665927568367e-05, |
|
"loss": 0.7802, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 5.676274944567628e-05, |
|
"loss": 0.7733, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 5.6688839615668884e-05, |
|
"loss": 0.8044, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 5.66149297856615e-05, |
|
"loss": 0.7887, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 5.6541019955654106e-05, |
|
"loss": 0.7773, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 5.6467110125646713e-05, |
|
"loss": 0.7839, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 5.639320029563933e-05, |
|
"loss": 0.7642, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 5.6319290465631935e-05, |
|
"loss": 0.7783, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 5.624538063562454e-05, |
|
"loss": 0.7861, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 5.6171470805617157e-05, |
|
"loss": 0.7671, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 5.6097560975609764e-05, |
|
"loss": 0.7749, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 5.6023651145602365e-05, |
|
"loss": 0.7714, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 5.594974131559497e-05, |
|
"loss": 0.7685, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 5.587583148558758e-05, |
|
"loss": 0.7936, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 5.5801921655580193e-05, |
|
"loss": 0.7746, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 5.57280118255728e-05, |
|
"loss": 0.7715, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 5.565410199556541e-05, |
|
"loss": 0.7538, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 5.558019216555802e-05, |
|
"loss": 0.7734, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 5.550628233555063e-05, |
|
"loss": 0.7725, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 5.543237250554324e-05, |
|
"loss": 0.7751, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 5.535846267553585e-05, |
|
"loss": 0.7659, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 5.528455284552846e-05, |
|
"loss": 0.7691, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 5.5210643015521066e-05, |
|
"loss": 0.7951, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 5.513673318551368e-05, |
|
"loss": 0.765, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 5.506282335550629e-05, |
|
"loss": 0.8091, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 5.4988913525498895e-05, |
|
"loss": 0.7571, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 5.491500369549151e-05, |
|
"loss": 0.7798, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 5.484109386548412e-05, |
|
"loss": 0.7893, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 5.4767184035476724e-05, |
|
"loss": 0.7676, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 5.4693274205469325e-05, |
|
"loss": 0.7782, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 5.461936437546193e-05, |
|
"loss": 0.8033, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 5.4545454545454546e-05, |
|
"loss": 0.7911, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 5.4471544715447154e-05, |
|
"loss": 0.7983, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 5.439763488543976e-05, |
|
"loss": 0.7728, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 5.4323725055432375e-05, |
|
"loss": 0.7699, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 5.424981522542498e-05, |
|
"loss": 0.781, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 5.417590539541759e-05, |
|
"loss": 0.7795, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 5.4101995565410204e-05, |
|
"loss": 0.7683, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 5.402808573540281e-05, |
|
"loss": 0.7791, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 5.395417590539542e-05, |
|
"loss": 0.7517, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 5.388026607538803e-05, |
|
"loss": 0.7678, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 5.380635624538064e-05, |
|
"loss": 0.7713, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 5.373244641537325e-05, |
|
"loss": 0.7628, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 5.365853658536586e-05, |
|
"loss": 0.7781, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 5.358462675535847e-05, |
|
"loss": 0.7694, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 5.351071692535108e-05, |
|
"loss": 0.7674, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 5.343680709534369e-05, |
|
"loss": 0.7817, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 5.3362897265336285e-05, |
|
"loss": 0.7805, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 5.32889874353289e-05, |
|
"loss": 0.8098, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 5.3215077605321506e-05, |
|
"loss": 0.7793, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 5.3141167775314114e-05, |
|
"loss": 0.7673, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 5.306725794530673e-05, |
|
"loss": 0.7774, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 5.2993348115299335e-05, |
|
"loss": 0.7772, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 5.291943828529194e-05, |
|
"loss": 0.7819, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 5.284552845528456e-05, |
|
"loss": 0.7578, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 5.2771618625277164e-05, |
|
"loss": 0.7764, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 5.269770879526977e-05, |
|
"loss": 0.7864, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 5.2623798965262386e-05, |
|
"loss": 0.7743, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 5.254988913525499e-05, |
|
"loss": 0.7832, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 5.24759793052476e-05, |
|
"loss": 0.798, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 5.2402069475240215e-05, |
|
"loss": 0.7582, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 5.232815964523282e-05, |
|
"loss": 0.7563, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 5.225424981522543e-05, |
|
"loss": 0.7762, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 5.2180339985218044e-05, |
|
"loss": 0.7704, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 5.210643015521065e-05, |
|
"loss": 0.7543, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 5.203252032520326e-05, |
|
"loss": 0.7913, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 5.195861049519586e-05, |
|
"loss": 0.7785, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 5.1884700665188466e-05, |
|
"loss": 0.7571, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 5.181079083518108e-05, |
|
"loss": 0.7639, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 5.173688100517369e-05, |
|
"loss": 0.7585, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 5.1662971175166295e-05, |
|
"loss": 0.7729, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 5.158906134515891e-05, |
|
"loss": 0.7575, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 5.151515151515152e-05, |
|
"loss": 0.7638, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 5.1441241685144124e-05, |
|
"loss": 0.7424, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 5.136733185513674e-05, |
|
"loss": 0.7697, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 5.1293422025129346e-05, |
|
"loss": 0.7543, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 5.121951219512195e-05, |
|
"loss": 0.7695, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 5.114560236511457e-05, |
|
"loss": 0.7577, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 5.1071692535107175e-05, |
|
"loss": 0.7481, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 5.099778270509978e-05, |
|
"loss": 0.7727, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 5.0923872875092396e-05, |
|
"loss": 0.7656, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 5.0849963045085004e-05, |
|
"loss": 0.7512, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 5.077605321507761e-05, |
|
"loss": 0.7747, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 5.0702143385070225e-05, |
|
"loss": 0.7426, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 5.062823355506282e-05, |
|
"loss": 0.7453, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 5.0554323725055433e-05, |
|
"loss": 0.768, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 5.048041389504804e-05, |
|
"loss": 0.7675, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 5.040650406504065e-05, |
|
"loss": 0.744, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 5.033259423503326e-05, |
|
"loss": 0.7919, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 5.025868440502587e-05, |
|
"loss": 0.775, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 5.018477457501848e-05, |
|
"loss": 0.7365, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 5.011086474501109e-05, |
|
"loss": 0.7503, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 5.00369549150037e-05, |
|
"loss": 0.7614, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 4.9963045084996306e-05, |
|
"loss": 0.7528, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 4.9889135254988913e-05, |
|
"loss": 0.7555, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 4.981522542498153e-05, |
|
"loss": 0.7599, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 4.9741315594974135e-05, |
|
"loss": 0.7662, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 4.966740576496674e-05, |
|
"loss": 0.7538, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 4.959349593495935e-05, |
|
"loss": 0.7613, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 4.951958610495196e-05, |
|
"loss": 0.7519, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.944567627494457e-05, |
|
"loss": 0.7604, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.937176644493718e-05, |
|
"loss": 0.7641, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.9297856614929786e-05, |
|
"loss": 0.7631, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.92239467849224e-05, |
|
"loss": 0.7621, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.915003695491501e-05, |
|
"loss": 0.7388, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.9076127124907615e-05, |
|
"loss": 0.7679, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.900221729490023e-05, |
|
"loss": 0.767, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.892830746489283e-05, |
|
"loss": 0.7847, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.885439763488544e-05, |
|
"loss": 0.764, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.878048780487805e-05, |
|
"loss": 0.7496, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.870657797487066e-05, |
|
"loss": 0.7661, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.8632668144863266e-05, |
|
"loss": 0.7493, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.855875831485588e-05, |
|
"loss": 0.7536, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.848484848484849e-05, |
|
"loss": 0.766, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.8410938654841095e-05, |
|
"loss": 0.7424, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.833702882483371e-05, |
|
"loss": 0.7463, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.826311899482631e-05, |
|
"loss": 0.7571, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.8189209164818924e-05, |
|
"loss": 0.766, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.811529933481153e-05, |
|
"loss": 0.7507, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.804138950480414e-05, |
|
"loss": 0.7552, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.796747967479675e-05, |
|
"loss": 0.7316, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.789356984478936e-05, |
|
"loss": 0.7738, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.781966001478197e-05, |
|
"loss": 0.7618, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.774575018477458e-05, |
|
"loss": 0.767, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.767184035476719e-05, |
|
"loss": 0.7355, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.759793052475979e-05, |
|
"loss": 0.7526, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.7524020694752404e-05, |
|
"loss": 0.764, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.745011086474501e-05, |
|
"loss": 0.7423, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.737620103473762e-05, |
|
"loss": 0.7532, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.730229120473023e-05, |
|
"loss": 0.7528, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.722838137472284e-05, |
|
"loss": 0.7486, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.715447154471545e-05, |
|
"loss": 0.7676, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.708056171470806e-05, |
|
"loss": 0.7437, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.700665188470067e-05, |
|
"loss": 0.7566, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.693274205469328e-05, |
|
"loss": 0.7216, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.6858832224685884e-05, |
|
"loss": 0.7254, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.678492239467849e-05, |
|
"loss": 0.738, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.6711012564671106e-05, |
|
"loss": 0.7499, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.663710273466371e-05, |
|
"loss": 0.7488, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.656319290465632e-05, |
|
"loss": 0.7425, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.6489283074648935e-05, |
|
"loss": 0.7574, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.641537324464154e-05, |
|
"loss": 0.7394, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.634146341463415e-05, |
|
"loss": 0.742, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.626755358462676e-05, |
|
"loss": 0.7651, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.6193643754619364e-05, |
|
"loss": 0.7553, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.611973392461197e-05, |
|
"loss": 0.7625, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.6045824094604586e-05, |
|
"loss": 0.7604, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.597191426459719e-05, |
|
"loss": 0.7457, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.58980044345898e-05, |
|
"loss": 0.7268, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.5824094604582415e-05, |
|
"loss": 0.7469, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.575018477457502e-05, |
|
"loss": 0.7533, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.567627494456763e-05, |
|
"loss": 0.7617, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.560236511456024e-05, |
|
"loss": 0.7178, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.5528455284552844e-05, |
|
"loss": 0.7467, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 4.545454545454546e-05, |
|
"loss": 0.7467, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 4.5380635624538066e-05, |
|
"loss": 0.7378, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 4.530672579453067e-05, |
|
"loss": 0.7443, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 4.523281596452328e-05, |
|
"loss": 0.7225, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 4.5158906134515895e-05, |
|
"loss": 0.7489, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 4.50849963045085e-05, |
|
"loss": 0.7266, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 4.501108647450111e-05, |
|
"loss": 0.7338, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 4.4937176644493724e-05, |
|
"loss": 0.7421, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 4.4863266814486324e-05, |
|
"loss": 0.7834, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 4.478935698447894e-05, |
|
"loss": 0.7429, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 4.4715447154471546e-05, |
|
"loss": 0.7463, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 4.4641537324464153e-05, |
|
"loss": 0.756, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 4.456762749445677e-05, |
|
"loss": 0.7413, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 4.4493717664449375e-05, |
|
"loss": 0.741, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 4.441980783444198e-05, |
|
"loss": 0.732, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 4.4345898004434597e-05, |
|
"loss": 0.7492, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 4.4271988174427204e-05, |
|
"loss": 0.732, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 4.4198078344419805e-05, |
|
"loss": 0.7372, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 4.412416851441242e-05, |
|
"loss": 0.7509, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 4.4050258684405026e-05, |
|
"loss": 0.7464, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 4.3976348854397633e-05, |
|
"loss": 0.7616, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 4.390243902439025e-05, |
|
"loss": 0.7608, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 4.3828529194382855e-05, |
|
"loss": 0.7267, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 4.375461936437546e-05, |
|
"loss": 0.7436, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 4.3680709534368077e-05, |
|
"loss": 0.7399, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 4.3606799704360684e-05, |
|
"loss": 0.7488, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 4.353288987435329e-05, |
|
"loss": 0.7485, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 4.34589800443459e-05, |
|
"loss": 0.7381, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 4.3385070214338506e-05, |
|
"loss": 0.7301, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 4.331116038433112e-05, |
|
"loss": 0.7208, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 4.323725055432373e-05, |
|
"loss": 0.7487, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 4.3163340724316335e-05, |
|
"loss": 0.7508, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 4.308943089430895e-05, |
|
"loss": 0.7316, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 4.301552106430156e-05, |
|
"loss": 0.7223, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 4.2941611234294164e-05, |
|
"loss": 0.7169, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 4.286770140428677e-05, |
|
"loss": 0.7277, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 4.279379157427938e-05, |
|
"loss": 0.7399, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 4.2719881744271986e-05, |
|
"loss": 0.7319, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 4.26459719142646e-05, |
|
"loss": 0.7416, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 4.257206208425721e-05, |
|
"loss": 0.7469, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 4.2498152254249815e-05, |
|
"loss": 0.7492, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 4.242424242424243e-05, |
|
"loss": 0.7424, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 4.235033259423504e-05, |
|
"loss": 0.7508, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 4.2276422764227644e-05, |
|
"loss": 0.7464, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 4.220251293422025e-05, |
|
"loss": 0.7392, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 4.212860310421286e-05, |
|
"loss": 0.728, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 4.205469327420547e-05, |
|
"loss": 0.7462, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 4.198078344419808e-05, |
|
"loss": 0.7317, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 4.190687361419069e-05, |
|
"loss": 0.7147, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 4.18329637841833e-05, |
|
"loss": 0.7281, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 4.175905395417591e-05, |
|
"loss": 0.7549, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 4.168514412416852e-05, |
|
"loss": 0.7376, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 4.161123429416113e-05, |
|
"loss": 0.732, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 4.153732446415373e-05, |
|
"loss": 0.7325, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 4.146341463414634e-05, |
|
"loss": 0.7463, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 4.138950480413895e-05, |
|
"loss": 0.7079, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 4.131559497413156e-05, |
|
"loss": 0.7501, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 4.124168514412417e-05, |
|
"loss": 0.7268, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 4.116777531411678e-05, |
|
"loss": 0.7256, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 4.109386548410939e-05, |
|
"loss": 0.7266, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 4.1019955654102e-05, |
|
"loss": 0.7419, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 4.094604582409461e-05, |
|
"loss": 0.7396, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 4.087213599408721e-05, |
|
"loss": 0.7292, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 4.0798226164079826e-05, |
|
"loss": 0.7492, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 4.072431633407243e-05, |
|
"loss": 0.763, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 4.065040650406504e-05, |
|
"loss": 0.7275, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 4.057649667405765e-05, |
|
"loss": 0.7356, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 4.050258684405026e-05, |
|
"loss": 0.7444, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 4.042867701404287e-05, |
|
"loss": 0.7407, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 4.035476718403548e-05, |
|
"loss": 0.7187, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 4.028085735402809e-05, |
|
"loss": 0.733, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 4.02069475240207e-05, |
|
"loss": 0.748, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 4.0133037694013306e-05, |
|
"loss": 0.7165, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 4.005912786400591e-05, |
|
"loss": 0.7189, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 3.998521803399852e-05, |
|
"loss": 0.7429, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 3.9911308203991135e-05, |
|
"loss": 0.7397, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 3.983739837398374e-05, |
|
"loss": 0.73, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 3.976348854397635e-05, |
|
"loss": 0.7173, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 3.9689578713968964e-05, |
|
"loss": 0.7395, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 3.961566888396157e-05, |
|
"loss": 0.7353, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 3.954175905395418e-05, |
|
"loss": 0.7266, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 3.9467849223946786e-05, |
|
"loss": 0.7284, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 3.939393939393939e-05, |
|
"loss": 0.7332, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 3.9320029563932e-05, |
|
"loss": 0.7368, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 3.9246119733924615e-05, |
|
"loss": 0.7192, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 3.917220990391722e-05, |
|
"loss": 0.7302, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 3.909830007390983e-05, |
|
"loss": 0.7396, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 3.9024390243902444e-05, |
|
"loss": 0.7018, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 3.895048041389505e-05, |
|
"loss": 0.7351, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 3.887657058388766e-05, |
|
"loss": 0.7156, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 3.8802660753880266e-05, |
|
"loss": 0.7216, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 3.8728750923872873e-05, |
|
"loss": 0.7245, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 3.865484109386549e-05, |
|
"loss": 0.7329, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 3.8580931263858095e-05, |
|
"loss": 0.749, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 3.85070214338507e-05, |
|
"loss": 0.728, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 3.8433111603843317e-05, |
|
"loss": 0.7243, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 3.8359201773835924e-05, |
|
"loss": 0.7189, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 3.828529194382853e-05, |
|
"loss": 0.7448, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 3.8211382113821145e-05, |
|
"loss": 0.7482, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 3.8137472283813746e-05, |
|
"loss": 0.7251, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 3.8063562453806354e-05, |
|
"loss": 0.725, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 3.798965262379897e-05, |
|
"loss": 0.7204, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 3.7915742793791575e-05, |
|
"loss": 0.7288, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 3.784183296378418e-05, |
|
"loss": 0.7148, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 3.77679231337768e-05, |
|
"loss": 0.712, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 3.7694013303769404e-05, |
|
"loss": 0.7438, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 3.762010347376201e-05, |
|
"loss": 0.7395, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 3.7546193643754626e-05, |
|
"loss": 0.724, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 3.7472283813747226e-05, |
|
"loss": 0.7187, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 3.739837398373984e-05, |
|
"loss": 0.7038, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 3.732446415373245e-05, |
|
"loss": 0.7227, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 3.7250554323725055e-05, |
|
"loss": 0.7374, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 3.717664449371767e-05, |
|
"loss": 0.7137, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 3.710273466371028e-05, |
|
"loss": 0.7398, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 3.7028824833702884e-05, |
|
"loss": 0.7397, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 3.69549150036955e-05, |
|
"loss": 0.7227, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 3.6881005173688106e-05, |
|
"loss": 0.7066, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 3.6807095343680706e-05, |
|
"loss": 0.716, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 3.673318551367332e-05, |
|
"loss": 0.7131, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 3.665927568366593e-05, |
|
"loss": 0.7509, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 3.6585365853658535e-05, |
|
"loss": 0.7182, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 3.651145602365115e-05, |
|
"loss": 0.718, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 3.643754619364376e-05, |
|
"loss": 0.7071, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 3.6363636363636364e-05, |
|
"loss": 0.7157, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 3.628972653362898e-05, |
|
"loss": 0.7303, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 3.6215816703621586e-05, |
|
"loss": 0.7242, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 3.6141906873614186e-05, |
|
"loss": 0.7404, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 3.60679970436068e-05, |
|
"loss": 0.7306, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 3.599408721359941e-05, |
|
"loss": 0.712, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 3.5920177383592015e-05, |
|
"loss": 0.7281, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 3.584626755358463e-05, |
|
"loss": 0.7122, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 3.577235772357724e-05, |
|
"loss": 0.7151, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 3.5698447893569844e-05, |
|
"loss": 0.7353, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 3.562453806356246e-05, |
|
"loss": 0.7313, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 3.5550628233555066e-05, |
|
"loss": 0.7078, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 3.547671840354767e-05, |
|
"loss": 0.7207, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 3.540280857354028e-05, |
|
"loss": 0.7238, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 3.532889874353289e-05, |
|
"loss": 0.7269, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 3.52549889135255e-05, |
|
"loss": 0.6943, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 3.518107908351811e-05, |
|
"loss": 0.6894, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 3.510716925351072e-05, |
|
"loss": 0.7088, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 3.503325942350333e-05, |
|
"loss": 0.7372, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 3.495934959349594e-05, |
|
"loss": 0.7253, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 3.4885439763488546e-05, |
|
"loss": 0.7108, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 3.481152993348116e-05, |
|
"loss": 0.7275, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 3.473762010347376e-05, |
|
"loss": 0.7266, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 3.466371027346637e-05, |
|
"loss": 0.7009, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 3.458980044345898e-05, |
|
"loss": 0.7173, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 3.451589061345159e-05, |
|
"loss": 0.724, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 3.44419807834442e-05, |
|
"loss": 0.7069, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 3.436807095343681e-05, |
|
"loss": 0.7026, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 3.429416112342942e-05, |
|
"loss": 0.7246, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 3.4220251293422026e-05, |
|
"loss": 0.7173, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 3.414634146341464e-05, |
|
"loss": 0.7221, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 3.407243163340724e-05, |
|
"loss": 0.7281, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 3.3998521803399855e-05, |
|
"loss": 0.7093, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 3.392461197339246e-05, |
|
"loss": 0.7146, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 3.385070214338507e-05, |
|
"loss": 0.7276, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 3.3776792313377684e-05, |
|
"loss": 0.7234, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 3.370288248337029e-05, |
|
"loss": 0.7139, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 3.36289726533629e-05, |
|
"loss": 0.7339, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 3.355506282335551e-05, |
|
"loss": 0.7225, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 3.348115299334812e-05, |
|
"loss": 0.7219, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 3.340724316334072e-05, |
|
"loss": 0.7195, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 0.722, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 0.9731828570365906, |
|
"eval_runtime": 496.0356, |
|
"eval_samples_per_second": 32.135, |
|
"eval_steps_per_second": 0.504, |
|
"eval_wer": 0.759285117848898, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 3.325942350332594e-05, |
|
"loss": 1.0519, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 3.318551367331855e-05, |
|
"loss": 0.7017, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 3.3111603843311164e-05, |
|
"loss": 0.6736, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 3.303769401330377e-05, |
|
"loss": 0.7095, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 3.296378418329638e-05, |
|
"loss": 0.708, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 3.288987435328899e-05, |
|
"loss": 0.6951, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 3.28159645232816e-05, |
|
"loss": 0.6904, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 3.274205469327421e-05, |
|
"loss": 0.7247, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 3.2668144863266815e-05, |
|
"loss": 0.6902, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 3.259423503325942e-05, |
|
"loss": 0.6871, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 3.2520325203252037e-05, |
|
"loss": 0.6836, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 3.2446415373244644e-05, |
|
"loss": 0.7123, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 3.237250554323725e-05, |
|
"loss": 0.6875, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 3.2298595713229866e-05, |
|
"loss": 0.6848, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 3.222468588322247e-05, |
|
"loss": 0.7001, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 3.215077605321508e-05, |
|
"loss": 0.7013, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 3.207686622320769e-05, |
|
"loss": 0.687, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 3.2002956393200295e-05, |
|
"loss": 0.7157, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 3.19290465631929e-05, |
|
"loss": 0.7039, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 3.185513673318552e-05, |
|
"loss": 0.6905, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 3.1781226903178124e-05, |
|
"loss": 0.7005, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 3.170731707317073e-05, |
|
"loss": 0.6968, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 3.1633407243163346e-05, |
|
"loss": 0.7053, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 3.155949741315595e-05, |
|
"loss": 0.7216, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 3.148558758314856e-05, |
|
"loss": 0.7071, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 3.141167775314117e-05, |
|
"loss": 0.6974, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 3.1337767923133775e-05, |
|
"loss": 0.7182, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 3.126385809312638e-05, |
|
"loss": 0.697, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 3.1189948263119e-05, |
|
"loss": 0.6993, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 3.1116038433111604e-05, |
|
"loss": 0.7236, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 3.104212860310421e-05, |
|
"loss": 0.7066, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 3.0968218773096826e-05, |
|
"loss": 0.6842, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 3.089430894308943e-05, |
|
"loss": 0.7084, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 3.082039911308204e-05, |
|
"loss": 0.6936, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 3.0746489283074655e-05, |
|
"loss": 0.6903, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 3.0672579453067255e-05, |
|
"loss": 0.6785, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 3.059866962305987e-05, |
|
"loss": 0.6824, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 3.052475979305248e-05, |
|
"loss": 0.703, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 3.0450849963045088e-05, |
|
"loss": 0.716, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 3.0376940133037695e-05, |
|
"loss": 0.7118, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 3.0303030303030306e-05, |
|
"loss": 0.6825, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 3.0229120473022916e-05, |
|
"loss": 0.679, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 3.0155210643015524e-05, |
|
"loss": 0.6908, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 3.0081300813008135e-05, |
|
"loss": 0.6697, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 3.000739098300074e-05, |
|
"loss": 0.6667, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 2.993348115299335e-05, |
|
"loss": 0.6857, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 2.9859571322985957e-05, |
|
"loss": 0.6816, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 2.9785661492978568e-05, |
|
"loss": 0.7112, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 2.971175166297118e-05, |
|
"loss": 0.6764, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 2.9637841832963786e-05, |
|
"loss": 0.6962, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 2.9563932002956397e-05, |
|
"loss": 0.6609, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 2.9490022172949004e-05, |
|
"loss": 0.6981, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 2.9416112342941615e-05, |
|
"loss": 0.6815, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 2.934220251293422e-05, |
|
"loss": 0.7157, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 2.926829268292683e-05, |
|
"loss": 0.6793, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 2.9194382852919437e-05, |
|
"loss": 0.7055, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 2.9120473022912048e-05, |
|
"loss": 0.7082, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 2.904656319290466e-05, |
|
"loss": 0.7065, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 2.8972653362897266e-05, |
|
"loss": 0.6811, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 2.8898743532889877e-05, |
|
"loss": 0.7102, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 2.8824833702882487e-05, |
|
"loss": 0.702, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 2.8750923872875095e-05, |
|
"loss": 0.6983, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 2.86770140428677e-05, |
|
"loss": 0.7045, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 2.860310421286031e-05, |
|
"loss": 0.6894, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 2.852919438285292e-05, |
|
"loss": 0.6839, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 2.8455284552845528e-05, |
|
"loss": 0.6913, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 2.838137472283814e-05, |
|
"loss": 0.6855, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 2.830746489283075e-05, |
|
"loss": 0.6843, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 2.8233555062823357e-05, |
|
"loss": 0.7058, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 2.8159645232815967e-05, |
|
"loss": 0.6956, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 2.8085735402808578e-05, |
|
"loss": 0.6816, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 2.8011825572801182e-05, |
|
"loss": 0.6936, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 2.793791574279379e-05, |
|
"loss": 0.6835, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 2.78640059127864e-05, |
|
"loss": 0.6878, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 2.779009608277901e-05, |
|
"loss": 0.6746, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 2.771618625277162e-05, |
|
"loss": 0.6847, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 2.764227642276423e-05, |
|
"loss": 0.6737, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 2.756836659275684e-05, |
|
"loss": 0.671, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 2.7494456762749448e-05, |
|
"loss": 0.6814, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 2.742054693274206e-05, |
|
"loss": 0.6643, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 2.7346637102734662e-05, |
|
"loss": 0.6995, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 2.7272727272727273e-05, |
|
"loss": 0.6728, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 2.719881744271988e-05, |
|
"loss": 0.6869, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 2.712490761271249e-05, |
|
"loss": 0.6819, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 2.7050997782705102e-05, |
|
"loss": 0.7077, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 2.697708795269771e-05, |
|
"loss": 0.6763, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 2.690317812269032e-05, |
|
"loss": 0.6853, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 2.682926829268293e-05, |
|
"loss": 0.6945, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 2.675535846267554e-05, |
|
"loss": 0.6916, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 2.6681448632668142e-05, |
|
"loss": 0.6738, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 2.6607538802660753e-05, |
|
"loss": 0.6954, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 2.6533628972653364e-05, |
|
"loss": 0.6868, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 2.645971914264597e-05, |
|
"loss": 0.7037, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 2.6385809312638582e-05, |
|
"loss": 0.7072, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 2.6311899482631193e-05, |
|
"loss": 0.6908, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 2.62379896526238e-05, |
|
"loss": 0.696, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 2.616407982261641e-05, |
|
"loss": 0.7197, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 2.6090169992609022e-05, |
|
"loss": 0.6981, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 2.601626016260163e-05, |
|
"loss": 0.694, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 2.5942350332594233e-05, |
|
"loss": 0.683, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 2.5868440502586844e-05, |
|
"loss": 0.6763, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 2.5794530672579455e-05, |
|
"loss": 0.6886, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 2.5720620842572062e-05, |
|
"loss": 0.6846, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 2.5646711012564673e-05, |
|
"loss": 0.6944, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 2.5572801182557284e-05, |
|
"loss": 0.6795, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 2.549889135254989e-05, |
|
"loss": 0.6966, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 2.5424981522542502e-05, |
|
"loss": 0.6799, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 2.5351071692535113e-05, |
|
"loss": 0.691, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 2.5277161862527717e-05, |
|
"loss": 0.6688, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 2.5203252032520324e-05, |
|
"loss": 0.678, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 2.5129342202512935e-05, |
|
"loss": 0.6805, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 2.5055432372505546e-05, |
|
"loss": 0.7044, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 2.4981522542498153e-05, |
|
"loss": 0.6733, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 2.4907612712490764e-05, |
|
"loss": 0.6841, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 2.483370288248337e-05, |
|
"loss": 0.6786, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 2.475979305247598e-05, |
|
"loss": 0.708, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 2.468588322246859e-05, |
|
"loss": 0.6829, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 2.46119733924612e-05, |
|
"loss": 0.6921, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 2.4538063562453808e-05, |
|
"loss": 0.6985, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.4464153732446415e-05, |
|
"loss": 0.6591, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.4390243902439026e-05, |
|
"loss": 0.6993, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.4316334072431633e-05, |
|
"loss": 0.6744, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.4242424242424244e-05, |
|
"loss": 0.6983, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.4168514412416855e-05, |
|
"loss": 0.6673, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.4094604582409462e-05, |
|
"loss": 0.7011, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.402069475240207e-05, |
|
"loss": 0.664, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.394678492239468e-05, |
|
"loss": 0.7047, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.387287509238729e-05, |
|
"loss": 0.6832, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.3798965262379895e-05, |
|
"loss": 0.6799, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.3725055432372506e-05, |
|
"loss": 0.682, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.3651145602365117e-05, |
|
"loss": 0.6876, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.3577235772357724e-05, |
|
"loss": 0.693, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.3503325942350335e-05, |
|
"loss": 0.6904, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.3429416112342942e-05, |
|
"loss": 0.6727, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.3355506282335553e-05, |
|
"loss": 0.6891, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.328159645232816e-05, |
|
"loss": 0.6876, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.320768662232077e-05, |
|
"loss": 0.6763, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.313377679231338e-05, |
|
"loss": 0.6906, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.3059866962305986e-05, |
|
"loss": 0.6795, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.2985957132298597e-05, |
|
"loss": 0.6989, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.2912047302291207e-05, |
|
"loss": 0.6771, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.2838137472283815e-05, |
|
"loss": 0.6918, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.2764227642276422e-05, |
|
"loss": 0.7107, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.2690317812269033e-05, |
|
"loss": 0.6802, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.261640798226164e-05, |
|
"loss": 0.6642, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.254249815225425e-05, |
|
"loss": 0.7026, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.2468588322246862e-05, |
|
"loss": 0.6709, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.239467849223947e-05, |
|
"loss": 0.682, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.2320768662232077e-05, |
|
"loss": 0.6742, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.2246858832224687e-05, |
|
"loss": 0.6839, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.2172949002217298e-05, |
|
"loss": 0.6488, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.2099039172209902e-05, |
|
"loss": 0.6798, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.2025129342202513e-05, |
|
"loss": 0.6986, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.1951219512195124e-05, |
|
"loss": 0.6986, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.187730968218773e-05, |
|
"loss": 0.6855, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.1803399852180342e-05, |
|
"loss": 0.6851, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.172949002217295e-05, |
|
"loss": 0.6652, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.165558019216556e-05, |
|
"loss": 0.7077, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.1581670362158168e-05, |
|
"loss": 0.6852, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.150776053215078e-05, |
|
"loss": 0.6733, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.1433850702143386e-05, |
|
"loss": 0.6794, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.1359940872135993e-05, |
|
"loss": 0.6964, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.1286031042128604e-05, |
|
"loss": 0.6756, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.1212121212121215e-05, |
|
"loss": 0.6911, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.1138211382113822e-05, |
|
"loss": 0.6606, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.106430155210643e-05, |
|
"loss": 0.697, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.099039172209904e-05, |
|
"loss": 0.6772, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.091648189209165e-05, |
|
"loss": 0.6729, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.084257206208426e-05, |
|
"loss": 0.6738, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.0768662232076866e-05, |
|
"loss": 0.6862, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.0694752402069477e-05, |
|
"loss": 0.6868, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.0620842572062084e-05, |
|
"loss": 0.6785, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.0546932742054695e-05, |
|
"loss": 0.6605, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.0473022912047306e-05, |
|
"loss": 0.6947, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.0399113082039913e-05, |
|
"loss": 0.6816, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.032520325203252e-05, |
|
"loss": 0.6899, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.025129342202513e-05, |
|
"loss": 0.7032, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.017738359201774e-05, |
|
"loss": 0.6956, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.010347376201035e-05, |
|
"loss": 0.6768, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.0029563932002957e-05, |
|
"loss": 0.668, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.9955654101995567e-05, |
|
"loss": 0.6736, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.9881744271988175e-05, |
|
"loss": 0.6682, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.9807834441980786e-05, |
|
"loss": 0.6657, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.9733924611973393e-05, |
|
"loss": 0.6675, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.9660014781966e-05, |
|
"loss": 0.6985, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.958610495195861e-05, |
|
"loss": 0.7021, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.9512195121951222e-05, |
|
"loss": 0.6778, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.943828529194383e-05, |
|
"loss": 0.6738, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.9364375461936437e-05, |
|
"loss": 0.6746, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.9290465631929047e-05, |
|
"loss": 0.6873, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.9216555801921658e-05, |
|
"loss": 0.6645, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.9142645971914266e-05, |
|
"loss": 0.6711, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.9068736141906873e-05, |
|
"loss": 0.6691, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.8994826311899484e-05, |
|
"loss": 0.6787, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.892091648189209e-05, |
|
"loss": 0.6857, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.8847006651884702e-05, |
|
"loss": 0.6755, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.8773096821877313e-05, |
|
"loss": 0.663, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.869918699186992e-05, |
|
"loss": 0.6645, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.8625277161862528e-05, |
|
"loss": 0.6679, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.855136733185514e-05, |
|
"loss": 0.6962, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.847745750184775e-05, |
|
"loss": 0.6929, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.8403547671840353e-05, |
|
"loss": 0.6926, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.8329637841832964e-05, |
|
"loss": 0.6826, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.8255728011825575e-05, |
|
"loss": 0.705, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.8181818181818182e-05, |
|
"loss": 0.6744, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.8107908351810793e-05, |
|
"loss": 0.6711, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.80339985218034e-05, |
|
"loss": 0.6847, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.7960088691796008e-05, |
|
"loss": 0.6809, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.788617886178862e-05, |
|
"loss": 0.6777, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.781226903178123e-05, |
|
"loss": 0.6767, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.7738359201773837e-05, |
|
"loss": 0.6831, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.7664449371766444e-05, |
|
"loss": 0.6916, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.7590539541759055e-05, |
|
"loss": 0.6577, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.7516629711751666e-05, |
|
"loss": 0.6701, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.7442719881744273e-05, |
|
"loss": 0.6852, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.736881005173688e-05, |
|
"loss": 0.6649, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.729490022172949e-05, |
|
"loss": 0.6762, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.72209903917221e-05, |
|
"loss": 0.6953, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.714708056171471e-05, |
|
"loss": 0.6744, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.707317073170732e-05, |
|
"loss": 0.676, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.6999260901699927e-05, |
|
"loss": 0.6721, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.6925351071692535e-05, |
|
"loss": 0.685, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.6851441241685146e-05, |
|
"loss": 0.667, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.6777531411677756e-05, |
|
"loss": 0.6826, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.670362158167036e-05, |
|
"loss": 0.6829, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.662971175166297e-05, |
|
"loss": 0.6797, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.6555801921655582e-05, |
|
"loss": 0.6575, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.648189209164819e-05, |
|
"loss": 0.6719, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.64079822616408e-05, |
|
"loss": 0.6692, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.6334072431633408e-05, |
|
"loss": 0.6697, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.6260162601626018e-05, |
|
"loss": 0.6964, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.6186252771618626e-05, |
|
"loss": 0.6702, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.6112342941611236e-05, |
|
"loss": 0.6784, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.6038433111603844e-05, |
|
"loss": 0.6816, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.596452328159645e-05, |
|
"loss": 0.6664, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.5890613451589062e-05, |
|
"loss": 0.6691, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.5816703621581673e-05, |
|
"loss": 0.6831, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.574279379157428e-05, |
|
"loss": 0.6566, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.5668883961566888e-05, |
|
"loss": 0.6607, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.55949741315595e-05, |
|
"loss": 0.6579, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.5521064301552106e-05, |
|
"loss": 0.6688, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.5447154471544717e-05, |
|
"loss": 0.6875, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.5373244641537327e-05, |
|
"loss": 0.6742, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.5299334811529935e-05, |
|
"loss": 0.6736, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.5225424981522544e-05, |
|
"loss": 0.6678, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.5151515151515153e-05, |
|
"loss": 0.6667, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.5077605321507762e-05, |
|
"loss": 0.6491, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.500369549150037e-05, |
|
"loss": 0.6505, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.4929785661492978e-05, |
|
"loss": 0.6771, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.485587583148559e-05, |
|
"loss": 0.6634, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.4781966001478198e-05, |
|
"loss": 0.6712, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.4708056171470807e-05, |
|
"loss": 0.6646, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.4634146341463415e-05, |
|
"loss": 0.6992, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.4560236511456024e-05, |
|
"loss": 0.6795, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.4486326681448633e-05, |
|
"loss": 0.6809, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.4412416851441244e-05, |
|
"loss": 0.6687, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.433850702143385e-05, |
|
"loss": 0.6707, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.426459719142646e-05, |
|
"loss": 0.6802, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.419068736141907e-05, |
|
"loss": 0.6624, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.4116777531411678e-05, |
|
"loss": 0.6985, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.4042867701404289e-05, |
|
"loss": 0.6808, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.3968957871396895e-05, |
|
"loss": 0.6802, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.3895048041389506e-05, |
|
"loss": 0.6685, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.3821138211382115e-05, |
|
"loss": 0.6702, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.3747228381374724e-05, |
|
"loss": 0.6664, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.3673318551367331e-05, |
|
"loss": 0.6597, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.359940872135994e-05, |
|
"loss": 0.6818, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.3525498891352551e-05, |
|
"loss": 0.6685, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.345158906134516e-05, |
|
"loss": 0.666, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.337767923133777e-05, |
|
"loss": 0.6882, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.3303769401330377e-05, |
|
"loss": 0.6702, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.3229859571322986e-05, |
|
"loss": 0.6771, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.3155949741315596e-05, |
|
"loss": 0.6715, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.3082039911308206e-05, |
|
"loss": 0.6538, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.3008130081300815e-05, |
|
"loss": 0.6619, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.2934220251293422e-05, |
|
"loss": 0.6735, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.2860310421286031e-05, |
|
"loss": 0.6693, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.2786400591278642e-05, |
|
"loss": 0.6474, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.2712490761271251e-05, |
|
"loss": 0.677, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.2638580931263858e-05, |
|
"loss": 0.6817, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.2564671101256467e-05, |
|
"loss": 0.6704, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.2490761271249077e-05, |
|
"loss": 0.6464, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 1.2416851441241686e-05, |
|
"loss": 0.6844, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 1.2342941611234295e-05, |
|
"loss": 0.681, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 1.2269031781226904e-05, |
|
"loss": 0.6782, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 1.2195121951219513e-05, |
|
"loss": 0.6781, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 1.2121212121212122e-05, |
|
"loss": 0.6817, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 1.2047302291204731e-05, |
|
"loss": 0.6717, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 1.197339246119734e-05, |
|
"loss": 0.6704, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 1.1899482631189948e-05, |
|
"loss": 0.6675, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 1.1825572801182558e-05, |
|
"loss": 0.6586, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 1.1751662971175167e-05, |
|
"loss": 0.6592, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 1.1677753141167776e-05, |
|
"loss": 0.6789, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 1.1603843311160386e-05, |
|
"loss": 0.6721, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 1.1529933481152993e-05, |
|
"loss": 0.6675, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 1.1456023651145604e-05, |
|
"loss": 0.681, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 1.1382113821138211e-05, |
|
"loss": 0.6711, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 1.130820399113082e-05, |
|
"loss": 0.6575, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 1.1234294161123431e-05, |
|
"loss": 0.6553, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 1.1160384331116038e-05, |
|
"loss": 0.6562, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 1.1086474501108649e-05, |
|
"loss": 0.6706, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 1.1012564671101257e-05, |
|
"loss": 0.6772, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 1.0938654841093866e-05, |
|
"loss": 0.6671, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 1.0864745011086475e-05, |
|
"loss": 0.6989, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 1.0790835181079084e-05, |
|
"loss": 0.6861, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 1.0716925351071693e-05, |
|
"loss": 0.6777, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 1.0643015521064302e-05, |
|
"loss": 0.6616, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 1.0569105691056911e-05, |
|
"loss": 0.6805, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 1.049519586104952e-05, |
|
"loss": 0.6628, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 1.042128603104213e-05, |
|
"loss": 0.6901, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 1.0347376201034738e-05, |
|
"loss": 0.672, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 1.0273466371027347e-05, |
|
"loss": 0.6681, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 1.0199556541019956e-05, |
|
"loss": 0.6732, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 1.0125646711012566e-05, |
|
"loss": 0.6462, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 1.0051736881005175e-05, |
|
"loss": 0.676, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 9.977827050997784e-06, |
|
"loss": 0.6782, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 9.903917220990393e-06, |
|
"loss": 0.662, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 9.830007390983e-06, |
|
"loss": 0.6705, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 9.756097560975611e-06, |
|
"loss": 0.6712, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 9.682187730968218e-06, |
|
"loss": 0.6842, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 9.608277900960829e-06, |
|
"loss": 0.6746, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 9.534368070953437e-06, |
|
"loss": 0.6728, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 9.460458240946046e-06, |
|
"loss": 0.6767, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 9.386548410938656e-06, |
|
"loss": 0.6552, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 9.312638580931264e-06, |
|
"loss": 0.6695, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 9.238728750923875e-06, |
|
"loss": 0.6678, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 9.164818920916482e-06, |
|
"loss": 0.6719, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 9.090909090909091e-06, |
|
"loss": 0.6748, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 9.0169992609017e-06, |
|
"loss": 0.6622, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 8.94308943089431e-06, |
|
"loss": 0.6618, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 8.869179600886918e-06, |
|
"loss": 0.6543, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 8.795269770879527e-06, |
|
"loss": 0.6822, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 8.721359940872136e-06, |
|
"loss": 0.6577, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 8.647450110864746e-06, |
|
"loss": 0.6678, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 8.573540280857355e-06, |
|
"loss": 0.6461, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 8.499630450849964e-06, |
|
"loss": 0.66, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 8.425720620842573e-06, |
|
"loss": 0.6593, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 8.35181079083518e-06, |
|
"loss": 0.6586, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 8.277900960827791e-06, |
|
"loss": 0.6745, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 8.2039911308204e-06, |
|
"loss": 0.6506, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 8.130081300813009e-06, |
|
"loss": 0.6529, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 8.056171470805618e-06, |
|
"loss": 0.6779, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 7.982261640798226e-06, |
|
"loss": 0.64, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 7.908351810790836e-06, |
|
"loss": 0.6603, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 7.834441980783444e-06, |
|
"loss": 0.6954, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 7.760532150776053e-06, |
|
"loss": 0.6671, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 7.686622320768664e-06, |
|
"loss": 0.6412, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 7.612712490761272e-06, |
|
"loss": 0.6763, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 7.538802660753881e-06, |
|
"loss": 0.6633, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 7.464892830746489e-06, |
|
"loss": 0.6496, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 7.390983000739099e-06, |
|
"loss": 0.6481, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 7.317073170731707e-06, |
|
"loss": 0.6718, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 7.2431633407243165e-06, |
|
"loss": 0.6738, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 7.169253510716925e-06, |
|
"loss": 0.6692, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 7.095343680709535e-06, |
|
"loss": 0.6662, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 7.0214338507021446e-06, |
|
"loss": 0.657, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 6.947524020694753e-06, |
|
"loss": 0.646, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 6.873614190687362e-06, |
|
"loss": 0.6506, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 6.79970436067997e-06, |
|
"loss": 0.6588, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 6.72579453067258e-06, |
|
"loss": 0.6906, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 6.651884700665188e-06, |
|
"loss": 0.6626, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 6.577974870657798e-06, |
|
"loss": 0.6795, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 6.504065040650407e-06, |
|
"loss": 0.6471, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 6.4301552106430155e-06, |
|
"loss": 0.6757, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 6.3562453806356255e-06, |
|
"loss": 0.6755, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 6.282335550628234e-06, |
|
"loss": 0.6802, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 6.208425720620843e-06, |
|
"loss": 0.6751, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 6.134515890613452e-06, |
|
"loss": 0.6675, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 6.060606060606061e-06, |
|
"loss": 0.6663, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 5.98669623059867e-06, |
|
"loss": 0.6613, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 5.912786400591279e-06, |
|
"loss": 0.6713, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 5.838876570583888e-06, |
|
"loss": 0.6596, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 5.7649667405764965e-06, |
|
"loss": 0.6543, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 5.6910569105691056e-06, |
|
"loss": 0.6693, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 5.6171470805617155e-06, |
|
"loss": 0.6677, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 5.5432372505543246e-06, |
|
"loss": 0.6807, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 5.469327420546933e-06, |
|
"loss": 0.669, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 5.395417590539542e-06, |
|
"loss": 0.6806, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 5.321507760532151e-06, |
|
"loss": 0.6572, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 5.24759793052476e-06, |
|
"loss": 0.659, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 5.173688100517369e-06, |
|
"loss": 0.6614, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 5.099778270509978e-06, |
|
"loss": 0.6713, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 5.025868440502587e-06, |
|
"loss": 0.6608, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 4.951958610495196e-06, |
|
"loss": 0.6597, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 4.8780487804878055e-06, |
|
"loss": 0.661, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 4.8041389504804146e-06, |
|
"loss": 0.6631, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 4.730229120473023e-06, |
|
"loss": 0.6812, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 4.656319290465632e-06, |
|
"loss": 0.6611, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 4.582409460458241e-06, |
|
"loss": 0.6804, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 4.50849963045085e-06, |
|
"loss": 0.6655, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 4.434589800443459e-06, |
|
"loss": 0.6634, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 4.360679970436068e-06, |
|
"loss": 0.6642, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 4.286770140428677e-06, |
|
"loss": 0.6826, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 4.212860310421286e-06, |
|
"loss": 0.6986, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 4.1389504804138955e-06, |
|
"loss": 0.6538, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 4.0650406504065046e-06, |
|
"loss": 0.6831, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 3.991130820399113e-06, |
|
"loss": 0.6597, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 3.917220990391722e-06, |
|
"loss": 0.6864, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 3.843311160384332e-06, |
|
"loss": 0.6499, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 3.7694013303769405e-06, |
|
"loss": 0.6661, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 3.6954915003695496e-06, |
|
"loss": 0.6762, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 3.6215816703621582e-06, |
|
"loss": 0.6672, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 3.5476718403547673e-06, |
|
"loss": 0.6606, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 3.4737620103473764e-06, |
|
"loss": 0.6557, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 3.399852180339985e-06, |
|
"loss": 0.6763, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 3.325942350332594e-06, |
|
"loss": 0.6542, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 3.2520325203252037e-06, |
|
"loss": 0.6592, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 3.1781226903178127e-06, |
|
"loss": 0.6636, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 3.1042128603104214e-06, |
|
"loss": 0.6487, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 3.0303030303030305e-06, |
|
"loss": 0.6763, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 2.9563932002956396e-06, |
|
"loss": 0.6617, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 2.8824833702882482e-06, |
|
"loss": 0.6683, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 2.8085735402808577e-06, |
|
"loss": 0.6658, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 2.7346637102734664e-06, |
|
"loss": 0.6639, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 2.6607538802660755e-06, |
|
"loss": 0.6671, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 2.5868440502586846e-06, |
|
"loss": 0.67, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 2.5129342202512937e-06, |
|
"loss": 0.6904, |
|
"step": 1319 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 2.4390243902439027e-06, |
|
"loss": 0.6444, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 2.3651145602365114e-06, |
|
"loss": 0.6626, |
|
"step": 1321 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 2.2912047302291205e-06, |
|
"loss": 0.6861, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 2.2172949002217296e-06, |
|
"loss": 0.6626, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 2.1433850702143387e-06, |
|
"loss": 0.6687, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 2.0694752402069477e-06, |
|
"loss": 0.6723, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.9955654101995564e-06, |
|
"loss": 0.6842, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.921655580192166e-06, |
|
"loss": 0.6391, |
|
"step": 1327 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.8477457501847748e-06, |
|
"loss": 0.6612, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.7738359201773837e-06, |
|
"loss": 0.6542, |
|
"step": 1329 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.6999260901699925e-06, |
|
"loss": 0.671, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.6260162601626018e-06, |
|
"loss": 0.6669, |
|
"step": 1331 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.5521064301552107e-06, |
|
"loss": 0.6671, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.4781966001478198e-06, |
|
"loss": 0.6375, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.4042867701404289e-06, |
|
"loss": 0.6593, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.3303769401330377e-06, |
|
"loss": 0.6719, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.2564671101256468e-06, |
|
"loss": 0.6673, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.1825572801182557e-06, |
|
"loss": 0.6624, |
|
"step": 1337 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 1.1086474501108648e-06, |
|
"loss": 0.6479, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 1.0347376201034739e-06, |
|
"loss": 0.668, |
|
"step": 1339 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 9.60827790096083e-07, |
|
"loss": 0.6592, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 8.869179600886918e-07, |
|
"loss": 0.658, |
|
"step": 1341 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 8.130081300813009e-07, |
|
"loss": 0.6538, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 7.390983000739099e-07, |
|
"loss": 0.6621, |
|
"step": 1343 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 6.651884700665189e-07, |
|
"loss": 0.6663, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 5.912786400591279e-07, |
|
"loss": 0.6573, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 5.173688100517369e-07, |
|
"loss": 0.6785, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 4.434589800443459e-07, |
|
"loss": 0.6408, |
|
"step": 1347 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 3.6954915003695495e-07, |
|
"loss": 0.663, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 2.956393200295639e-07, |
|
"loss": 0.6562, |
|
"step": 1349 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 2.2172949002217296e-07, |
|
"loss": 0.6537, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.4781966001478196e-07, |
|
"loss": 0.664, |
|
"step": 1351 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 7.390983000739098e-08, |
|
"loss": 0.6657, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 0.0, |
|
"loss": 0.6589, |
|
"step": 1353 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 0.9302364587783813, |
|
"eval_runtime": 496.1152, |
|
"eval_samples_per_second": 32.13, |
|
"eval_steps_per_second": 0.504, |
|
"eval_wer": 0.7427813602969554, |
|
"step": 1353 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 1353, |
|
"total_flos": 6.225660003734846e+19, |
|
"train_loss": 0.8526812995792933, |
|
"train_runtime": 42752.8496, |
|
"train_samples_per_second": 32.442, |
|
"train_steps_per_second": 0.032 |
|
} |
|
], |
|
"max_steps": 1353, |
|
"num_train_epochs": 3, |
|
"total_flos": 6.225660003734846e+19, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|