|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.0, |
|
"eval_steps": 500, |
|
"global_step": 640, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.4907612239995434, |
|
"learning_rate": 1.09375e-07, |
|
"loss": 0.7509, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.1504421825230433, |
|
"learning_rate": 2.1875e-07, |
|
"loss": 0.6507, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.105418020487931, |
|
"learning_rate": 3.28125e-07, |
|
"loss": 0.6342, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.254821921473971, |
|
"learning_rate": 4.375e-07, |
|
"loss": 0.6813, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.3389206590297786, |
|
"learning_rate": 5.46875e-07, |
|
"loss": 0.7041, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.103212376255131, |
|
"learning_rate": 6.5625e-07, |
|
"loss": 0.6574, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.4026075935403863, |
|
"learning_rate": 7.65625e-07, |
|
"loss": 0.7184, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.144883715920057, |
|
"learning_rate": 8.75e-07, |
|
"loss": 0.6639, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.058002010109806, |
|
"learning_rate": 9.84375e-07, |
|
"loss": 0.6932, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.0365307674019246, |
|
"learning_rate": 1.09375e-06, |
|
"loss": 0.6667, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.6573450994955512, |
|
"learning_rate": 1.203125e-06, |
|
"loss": 0.6364, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.734125128574585, |
|
"learning_rate": 1.3125e-06, |
|
"loss": 0.7094, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.5814881821639086, |
|
"learning_rate": 1.421875e-06, |
|
"loss": 0.6469, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.143717763435021, |
|
"learning_rate": 1.53125e-06, |
|
"loss": 0.6392, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.113314084373293, |
|
"learning_rate": 1.640625e-06, |
|
"loss": 0.6419, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.1248682912950092, |
|
"learning_rate": 1.75e-06, |
|
"loss": 0.6257, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.0523745421583506, |
|
"learning_rate": 1.8593749999999999e-06, |
|
"loss": 0.6387, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.9540773623803008, |
|
"learning_rate": 1.96875e-06, |
|
"loss": 0.5753, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.0055838205407808, |
|
"learning_rate": 2.078125e-06, |
|
"loss": 0.6348, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.2432091605190205, |
|
"learning_rate": 2.1875e-06, |
|
"loss": 0.6318, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.3869607173028649, |
|
"learning_rate": 2.296875e-06, |
|
"loss": 0.6012, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.240637379277943, |
|
"learning_rate": 2.40625e-06, |
|
"loss": 0.5629, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.1314993062351444, |
|
"learning_rate": 2.515625e-06, |
|
"loss": 0.5683, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.8815352840655305, |
|
"learning_rate": 2.625e-06, |
|
"loss": 0.5399, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.8120210446427198, |
|
"learning_rate": 2.734375e-06, |
|
"loss": 0.5661, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.7406111049433269, |
|
"learning_rate": 2.84375e-06, |
|
"loss": 0.5374, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.9308313980688961, |
|
"learning_rate": 2.9531249999999998e-06, |
|
"loss": 0.5139, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.1735905075611526, |
|
"learning_rate": 3.0625e-06, |
|
"loss": 0.549, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.993673114117623, |
|
"learning_rate": 3.171875e-06, |
|
"loss": 0.5097, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.912014516728298, |
|
"learning_rate": 3.28125e-06, |
|
"loss": 0.5496, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.7795311370852275, |
|
"learning_rate": 3.390625e-06, |
|
"loss": 0.5672, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.7360032804580571, |
|
"learning_rate": 3.5e-06, |
|
"loss": 0.5452, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.9290691764870666, |
|
"learning_rate": 3.609375e-06, |
|
"loss": 0.5574, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.8323834195359898, |
|
"learning_rate": 3.7187499999999998e-06, |
|
"loss": 0.5093, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.7793193471539842, |
|
"learning_rate": 3.828125e-06, |
|
"loss": 0.5053, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.694220694288892, |
|
"learning_rate": 3.9375e-06, |
|
"loss": 0.5364, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.5692085599440716, |
|
"learning_rate": 4.046875e-06, |
|
"loss": 0.5141, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.5870629876760964, |
|
"learning_rate": 4.15625e-06, |
|
"loss": 0.5303, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.5804059716624939, |
|
"learning_rate": 4.265625e-06, |
|
"loss": 0.4715, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.6036445201285046, |
|
"learning_rate": 4.375e-06, |
|
"loss": 0.4963, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.4904238208746072, |
|
"learning_rate": 4.484375e-06, |
|
"loss": 0.4614, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.46399381294856473, |
|
"learning_rate": 4.59375e-06, |
|
"loss": 0.4743, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.4606123524196029, |
|
"learning_rate": 4.703125e-06, |
|
"loss": 0.4905, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.49558583641444864, |
|
"learning_rate": 4.8125e-06, |
|
"loss": 0.5502, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.4954617188222101, |
|
"learning_rate": 4.921875e-06, |
|
"loss": 0.4903, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.49117419459298145, |
|
"learning_rate": 5.03125e-06, |
|
"loss": 0.4955, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.47373321434706367, |
|
"learning_rate": 5.140625e-06, |
|
"loss": 0.5362, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.42677909808697095, |
|
"learning_rate": 5.25e-06, |
|
"loss": 0.5186, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.8847880576479226, |
|
"learning_rate": 5.359375e-06, |
|
"loss": 0.465, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.38006660937319003, |
|
"learning_rate": 5.46875e-06, |
|
"loss": 0.4458, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.42406636746037957, |
|
"learning_rate": 5.578125e-06, |
|
"loss": 0.4738, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.4497405637038939, |
|
"learning_rate": 5.6875e-06, |
|
"loss": 0.4717, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.42588364899074693, |
|
"learning_rate": 5.796875e-06, |
|
"loss": 0.4839, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.40971064168289695, |
|
"learning_rate": 5.9062499999999996e-06, |
|
"loss": 0.5087, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.3832469222413672, |
|
"learning_rate": 6.015625e-06, |
|
"loss": 0.4917, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.36300109542286413, |
|
"learning_rate": 6.125e-06, |
|
"loss": 0.454, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.3693983257133135, |
|
"learning_rate": 6.234375e-06, |
|
"loss": 0.4612, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.34955603337351576, |
|
"learning_rate": 6.34375e-06, |
|
"loss": 0.4519, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.366486649899712, |
|
"learning_rate": 6.453125e-06, |
|
"loss": 0.4801, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.3851964587046632, |
|
"learning_rate": 6.5625e-06, |
|
"loss": 0.4746, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.3429078789254216, |
|
"learning_rate": 6.6718749999999995e-06, |
|
"loss": 0.4544, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.3679977159831168, |
|
"learning_rate": 6.78125e-06, |
|
"loss": 0.4803, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.3730323682839911, |
|
"learning_rate": 6.890625e-06, |
|
"loss": 0.4986, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.3445733696341208, |
|
"learning_rate": 7e-06, |
|
"loss": 0.4693, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.35641969322397765, |
|
"learning_rate": 6.9999479414879755e-06, |
|
"loss": 0.4668, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.33068121294839087, |
|
"learning_rate": 6.999791767500524e-06, |
|
"loss": 0.4505, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.33728314549188104, |
|
"learning_rate": 6.999531482683467e-06, |
|
"loss": 0.4393, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.33775640987020233, |
|
"learning_rate": 6.999167094779681e-06, |
|
"loss": 0.4611, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.343304798662571, |
|
"learning_rate": 6.998698614628881e-06, |
|
"loss": 0.4691, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.3553019759244247, |
|
"learning_rate": 6.99812605616728e-06, |
|
"loss": 0.4754, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.3151337286948089, |
|
"learning_rate": 6.997449436427188e-06, |
|
"loss": 0.4655, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.3154508848797514, |
|
"learning_rate": 6.996668775536502e-06, |
|
"loss": 0.4551, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.30673808027443833, |
|
"learning_rate": 6.9957840967181034e-06, |
|
"loss": 0.4335, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.3684762500038093, |
|
"learning_rate": 6.994795426289171e-06, |
|
"loss": 0.447, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.3245066578888837, |
|
"learning_rate": 6.993702793660396e-06, |
|
"loss": 0.4354, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.33960008906806616, |
|
"learning_rate": 6.992506231335112e-06, |
|
"loss": 0.4411, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.3393061927704471, |
|
"learning_rate": 6.991205774908319e-06, |
|
"loss": 0.4449, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.3611077743448448, |
|
"learning_rate": 6.9898014630656335e-06, |
|
"loss": 0.446, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.3727255174354603, |
|
"learning_rate": 6.988293337582131e-06, |
|
"loss": 0.4401, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.3626035130302128, |
|
"learning_rate": 6.9866814433211094e-06, |
|
"loss": 0.4821, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.3376405406112302, |
|
"learning_rate": 6.984965828232749e-06, |
|
"loss": 0.4625, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.2976230880823188, |
|
"learning_rate": 6.983146543352689e-06, |
|
"loss": 0.4362, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.3055669447324052, |
|
"learning_rate": 6.981223642800509e-06, |
|
"loss": 0.4321, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.33059939115881387, |
|
"learning_rate": 6.979197183778118e-06, |
|
"loss": 0.4667, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.3233396642976923, |
|
"learning_rate": 6.977067226568055e-06, |
|
"loss": 0.4615, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.3320915668626651, |
|
"learning_rate": 6.974833834531692e-06, |
|
"loss": 0.4447, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.35242585608089094, |
|
"learning_rate": 6.972497074107354e-06, |
|
"loss": 0.4717, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.335342569378105, |
|
"learning_rate": 6.970057014808337e-06, |
|
"loss": 0.4457, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.33281453048842513, |
|
"learning_rate": 6.967513729220844e-06, |
|
"loss": 0.4485, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.3170073581449777, |
|
"learning_rate": 6.964867293001827e-06, |
|
"loss": 0.4521, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.34489609925729525, |
|
"learning_rate": 6.962117784876734e-06, |
|
"loss": 0.4592, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.3133849074705517, |
|
"learning_rate": 6.959265286637163e-06, |
|
"loss": 0.4209, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.35497309527917403, |
|
"learning_rate": 6.956309883138437e-06, |
|
"loss": 0.492, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.3437625485559197, |
|
"learning_rate": 6.953251662297077e-06, |
|
"loss": 0.438, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.3384164661888183, |
|
"learning_rate": 6.950090715088181e-06, |
|
"loss": 0.4447, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.39820938339107037, |
|
"learning_rate": 6.946827135542729e-06, |
|
"loss": 0.4686, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.31137355275022705, |
|
"learning_rate": 6.94346102074477e-06, |
|
"loss": 0.4116, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.3374951130929495, |
|
"learning_rate": 6.939992470828554e-06, |
|
"loss": 0.4267, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.385791851799853, |
|
"learning_rate": 6.936421588975533e-06, |
|
"loss": 0.4749, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.4217868488199523, |
|
"learning_rate": 6.932748481411306e-06, |
|
"loss": 0.4819, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.32286969859849285, |
|
"learning_rate": 6.928973257402453e-06, |
|
"loss": 0.4355, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.3283858360190172, |
|
"learning_rate": 6.925096029253284e-06, |
|
"loss": 0.4585, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.33207664810442833, |
|
"learning_rate": 6.921116912302502e-06, |
|
"loss": 0.4423, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.36777168702028123, |
|
"learning_rate": 6.917036024919767e-06, |
|
"loss": 0.4466, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.34291748925162174, |
|
"learning_rate": 6.912853488502181e-06, |
|
"loss": 0.4659, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.3414087931045369, |
|
"learning_rate": 6.908569427470668e-06, |
|
"loss": 0.4718, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.3184110518552108, |
|
"learning_rate": 6.904183969266283e-06, |
|
"loss": 0.4395, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.33317082829069683, |
|
"learning_rate": 6.899697244346414e-06, |
|
"loss": 0.4554, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.37433602938769583, |
|
"learning_rate": 6.8951093861809044e-06, |
|
"loss": 0.4714, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.3599952403662053, |
|
"learning_rate": 6.890420531248076e-06, |
|
"loss": 0.4563, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.3083422618801623, |
|
"learning_rate": 6.885630819030679e-06, |
|
"loss": 0.412, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.3386336304018581, |
|
"learning_rate": 6.880740392011738e-06, |
|
"loss": 0.4465, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.3475827421782862, |
|
"learning_rate": 6.875749395670313e-06, |
|
"loss": 0.4577, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.3966450413893103, |
|
"learning_rate": 6.870657978477169e-06, |
|
"loss": 0.454, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.3406821834010313, |
|
"learning_rate": 6.865466291890367e-06, |
|
"loss": 0.4379, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.3587077375756719, |
|
"learning_rate": 6.860174490350751e-06, |
|
"loss": 0.4513, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.33640117371342315, |
|
"learning_rate": 6.854782731277357e-06, |
|
"loss": 0.4202, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.3428259358656843, |
|
"learning_rate": 6.849291175062731e-06, |
|
"loss": 0.4435, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.42577201899051426, |
|
"learning_rate": 6.843699985068156e-06, |
|
"loss": 0.4824, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.3316256364391201, |
|
"learning_rate": 6.838009327618794e-06, |
|
"loss": 0.4165, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.39342611861287163, |
|
"learning_rate": 6.8322193719987345e-06, |
|
"loss": 0.4658, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.3619051246676873, |
|
"learning_rate": 6.8263302904459634e-06, |
|
"loss": 0.4214, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.3421694783963853, |
|
"learning_rate": 6.820342258147237e-06, |
|
"loss": 0.4205, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.3520272042873268, |
|
"learning_rate": 6.81425545323287e-06, |
|
"loss": 0.4326, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.37261582297065365, |
|
"learning_rate": 6.808070056771437e-06, |
|
"loss": 0.436, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.3190843731641372, |
|
"learning_rate": 6.801786252764388e-06, |
|
"loss": 0.408, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.45801591328905905, |
|
"learning_rate": 6.795404228140573e-06, |
|
"loss": 0.4262, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.3375844074069623, |
|
"learning_rate": 6.788924172750679e-06, |
|
"loss": 0.454, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.37385172623439816, |
|
"learning_rate": 6.782346279361589e-06, |
|
"loss": 0.4898, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.3181793747206892, |
|
"learning_rate": 6.77567074365064e-06, |
|
"loss": 0.4232, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.3422195996524943, |
|
"learning_rate": 6.768897764199808e-06, |
|
"loss": 0.4019, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.36276549218726206, |
|
"learning_rate": 6.762027542489795e-06, |
|
"loss": 0.436, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.38061371338027, |
|
"learning_rate": 6.755060282894042e-06, |
|
"loss": 0.4579, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.32719635914528883, |
|
"learning_rate": 6.747996192672646e-06, |
|
"loss": 0.4233, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.3810736630935037, |
|
"learning_rate": 6.740835481966191e-06, |
|
"loss": 0.4567, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.37868571648829663, |
|
"learning_rate": 6.733578363789503e-06, |
|
"loss": 0.4231, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.36004410394238534, |
|
"learning_rate": 6.726225054025311e-06, |
|
"loss": 0.451, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.3908878371379872, |
|
"learning_rate": 6.718775771417823e-06, |
|
"loss": 0.4303, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.3459717178074042, |
|
"learning_rate": 6.711230737566219e-06, |
|
"loss": 0.4196, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.3476179794053542, |
|
"learning_rate": 6.7035901769180656e-06, |
|
"loss": 0.4715, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.3586451898247301, |
|
"learning_rate": 6.6958543167626265e-06, |
|
"loss": 0.4315, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 0.3361222789247428, |
|
"learning_rate": 6.688023387224115e-06, |
|
"loss": 0.44, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.3408702408553507, |
|
"learning_rate": 6.6800976212548396e-06, |
|
"loss": 0.4427, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.3412669995759075, |
|
"learning_rate": 6.672077254628275e-06, |
|
"loss": 0.4327, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 0.35466921769883636, |
|
"learning_rate": 6.663962525932052e-06, |
|
"loss": 0.4439, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 0.3169267316634476, |
|
"learning_rate": 6.655753676560856e-06, |
|
"loss": 0.3991, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 0.33593441440927124, |
|
"learning_rate": 6.647450950709251e-06, |
|
"loss": 0.4577, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.33950045221890385, |
|
"learning_rate": 6.639054595364409e-06, |
|
"loss": 0.4591, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.3615774798815532, |
|
"learning_rate": 6.630564860298768e-06, |
|
"loss": 0.4344, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 0.34631817223512895, |
|
"learning_rate": 6.6219819980625995e-06, |
|
"loss": 0.4119, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.3332561539660665, |
|
"learning_rate": 6.613306263976496e-06, |
|
"loss": 0.4147, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 0.34402131787716334, |
|
"learning_rate": 6.604537916123775e-06, |
|
"loss": 0.3931, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.3690152266668397, |
|
"learning_rate": 6.595677215342806e-06, |
|
"loss": 0.4215, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.322342729858813, |
|
"learning_rate": 6.5867244252192426e-06, |
|
"loss": 0.4017, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 0.31344067021908767, |
|
"learning_rate": 6.577679812078189e-06, |
|
"loss": 0.3929, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 0.3568397607324737, |
|
"learning_rate": 6.568543644976277e-06, |
|
"loss": 0.4195, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 0.33749366055496693, |
|
"learning_rate": 6.559316195693656e-06, |
|
"loss": 0.3897, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 0.3455835380149386, |
|
"learning_rate": 6.549997738725915e-06, |
|
"loss": 0.4264, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.37228654836514496, |
|
"learning_rate": 6.540588551275913e-06, |
|
"loss": 0.4392, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.33154975556916305, |
|
"learning_rate": 6.531088913245536e-06, |
|
"loss": 0.427, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 0.3458031035321322, |
|
"learning_rate": 6.5214991072273635e-06, |
|
"loss": 0.4375, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 0.31276873609868006, |
|
"learning_rate": 6.511819418496276e-06, |
|
"loss": 0.4031, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 0.34409165822067467, |
|
"learning_rate": 6.502050135000952e-06, |
|
"loss": 0.4011, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.36279847439403795, |
|
"learning_rate": 6.492191547355313e-06, |
|
"loss": 0.4237, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 0.34564833445832377, |
|
"learning_rate": 6.482243948829876e-06, |
|
"loss": 0.4055, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 0.38077908458682896, |
|
"learning_rate": 6.472207635343026e-06, |
|
"loss": 0.4209, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 0.329913816797509, |
|
"learning_rate": 6.46208290545222e-06, |
|
"loss": 0.3818, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 0.3111225383454026, |
|
"learning_rate": 6.4518700603451e-06, |
|
"loss": 0.397, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 0.3432239018285694, |
|
"learning_rate": 6.441569403830533e-06, |
|
"loss": 0.3698, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 0.35339874192273096, |
|
"learning_rate": 6.431181242329578e-06, |
|
"loss": 0.3878, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 0.3323530722856522, |
|
"learning_rate": 6.420705884866365e-06, |
|
"loss": 0.4014, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 0.3521691617675391, |
|
"learning_rate": 6.4101436430589085e-06, |
|
"loss": 0.4458, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 0.3538003813466273, |
|
"learning_rate": 6.399494831109832e-06, |
|
"loss": 0.4014, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 0.33082565177535356, |
|
"learning_rate": 6.3887597657970235e-06, |
|
"loss": 0.4061, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 0.3585534863365245, |
|
"learning_rate": 6.377938766464212e-06, |
|
"loss": 0.4353, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 0.3817049240831521, |
|
"learning_rate": 6.367032155011471e-06, |
|
"loss": 0.4272, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 0.3688197288995215, |
|
"learning_rate": 6.3560402558856354e-06, |
|
"loss": 0.3783, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 0.3291013719807746, |
|
"learning_rate": 6.3449633960706536e-06, |
|
"loss": 0.3683, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 0.34668256480725407, |
|
"learning_rate": 6.333801905077864e-06, |
|
"loss": 0.3886, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 0.3139656780215767, |
|
"learning_rate": 6.322556114936189e-06, |
|
"loss": 0.3864, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 0.3479964266741095, |
|
"learning_rate": 6.311226360182257e-06, |
|
"loss": 0.4026, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 0.3365002537739493, |
|
"learning_rate": 6.2998129778504535e-06, |
|
"loss": 0.4205, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 0.3395485075789255, |
|
"learning_rate": 6.288316307462895e-06, |
|
"loss": 0.406, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.31182460780702925, |
|
"learning_rate": 6.276736691019323e-06, |
|
"loss": 0.377, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 0.31900712972961165, |
|
"learning_rate": 6.265074472986942e-06, |
|
"loss": 0.3876, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 0.3234199942872983, |
|
"learning_rate": 6.253330000290159e-06, |
|
"loss": 0.3754, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 0.34875728639110426, |
|
"learning_rate": 6.241503622300277e-06, |
|
"loss": 0.402, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 0.3406636880839683, |
|
"learning_rate": 6.229595690825086e-06, |
|
"loss": 0.3955, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.38204417456095835, |
|
"learning_rate": 6.217606560098415e-06, |
|
"loss": 0.3795, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.40729559753298156, |
|
"learning_rate": 6.205536586769579e-06, |
|
"loss": 0.4006, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 0.35939142710468447, |
|
"learning_rate": 6.193386129892782e-06, |
|
"loss": 0.4145, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.3990064878239788, |
|
"learning_rate": 6.181155550916423e-06, |
|
"loss": 0.3964, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 0.37789265888304424, |
|
"learning_rate": 6.168845213672358e-06, |
|
"loss": 0.395, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.334681217049792, |
|
"learning_rate": 6.156455484365066e-06, |
|
"loss": 0.3825, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.31563685389160046, |
|
"learning_rate": 6.143986731560761e-06, |
|
"loss": 0.3668, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 0.3532696477038572, |
|
"learning_rate": 6.131439326176421e-06, |
|
"loss": 0.3881, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 0.3591118501043564, |
|
"learning_rate": 6.118813641468765e-06, |
|
"loss": 0.3941, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 0.3233571068183556, |
|
"learning_rate": 6.1061100530231424e-06, |
|
"loss": 0.4001, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 0.33223865496179206, |
|
"learning_rate": 6.093328938742357e-06, |
|
"loss": 0.3981, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 0.3012035216026795, |
|
"learning_rate": 6.080470678835434e-06, |
|
"loss": 0.3896, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 0.30212477918803177, |
|
"learning_rate": 6.067535655806304e-06, |
|
"loss": 0.3725, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 0.3432977226199173, |
|
"learning_rate": 6.054524254442424e-06, |
|
"loss": 0.3756, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 0.3196624845591416, |
|
"learning_rate": 6.0414368618033354e-06, |
|
"loss": 0.3676, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 0.3327501182697744, |
|
"learning_rate": 6.028273867209144e-06, |
|
"loss": 0.3722, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.31984015375544, |
|
"learning_rate": 6.015035662228943e-06, |
|
"loss": 0.3765, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 0.35134173352065806, |
|
"learning_rate": 6.001722640669162e-06, |
|
"loss": 0.3762, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 0.3440468808364095, |
|
"learning_rate": 5.988335198561855e-06, |
|
"loss": 0.3794, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 0.34844527663899655, |
|
"learning_rate": 5.974873734152916e-06, |
|
"loss": 0.41, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 0.34304312268288334, |
|
"learning_rate": 5.961338647890235e-06, |
|
"loss": 0.3936, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 0.30945801228280895, |
|
"learning_rate": 5.947730342411785e-06, |
|
"loss": 0.3766, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 0.3216707019915462, |
|
"learning_rate": 5.93404922253364e-06, |
|
"loss": 0.3731, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 0.3427933027019099, |
|
"learning_rate": 5.9202956952379435e-06, |
|
"loss": 0.4022, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 0.3322640935210238, |
|
"learning_rate": 5.9064701696607854e-06, |
|
"loss": 0.3974, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 0.351184635298803, |
|
"learning_rate": 5.892573057080049e-06, |
|
"loss": 0.3829, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 0.357169284418552, |
|
"learning_rate": 5.878604770903163e-06, |
|
"loss": 0.4057, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 0.3485603587904994, |
|
"learning_rate": 5.864565726654811e-06, |
|
"loss": 0.3837, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 0.3299707884038329, |
|
"learning_rate": 5.850456341964565e-06, |
|
"loss": 0.3866, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 0.3301963858451784, |
|
"learning_rate": 5.836277036554466e-06, |
|
"loss": 0.3883, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 0.35937103442524587, |
|
"learning_rate": 5.822028232226539e-06, |
|
"loss": 0.3935, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 0.3143923577131876, |
|
"learning_rate": 5.807710352850241e-06, |
|
"loss": 0.3672, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 0.34204735597602554, |
|
"learning_rate": 5.793323824349856e-06, |
|
"loss": 0.4238, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 0.350414261224835, |
|
"learning_rate": 5.778869074691822e-06, |
|
"loss": 0.3761, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 0.3366168053878449, |
|
"learning_rate": 5.764346533872001e-06, |
|
"loss": 0.3868, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 0.3765806993799636, |
|
"learning_rate": 5.749756633902887e-06, |
|
"loss": 0.4002, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 0.30582158798586045, |
|
"learning_rate": 5.735099808800758e-06, |
|
"loss": 0.3551, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 0.3333315124044687, |
|
"learning_rate": 5.720376494572759e-06, |
|
"loss": 0.3674, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 0.37190863363237675, |
|
"learning_rate": 5.705587129203936e-06, |
|
"loss": 0.4117, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 0.4030535384641014, |
|
"learning_rate": 5.690732152644207e-06, |
|
"loss": 0.4165, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 0.31140781430679837, |
|
"learning_rate": 5.675812006795271e-06, |
|
"loss": 0.3765, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 0.3334852992049489, |
|
"learning_rate": 5.6608271354974675e-06, |
|
"loss": 0.4011, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 0.3333953874618333, |
|
"learning_rate": 5.645777984516568e-06, |
|
"loss": 0.3842, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 0.3484910738096921, |
|
"learning_rate": 5.630665001530522e-06, |
|
"loss": 0.3883, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 0.33587659933991937, |
|
"learning_rate": 5.615488636116131e-06, |
|
"loss": 0.408, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 0.3481828026794521, |
|
"learning_rate": 5.600249339735683e-06, |
|
"loss": 0.412, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 0.32627533208737675, |
|
"learning_rate": 5.584947565723517e-06, |
|
"loss": 0.3815, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 0.32794121285389627, |
|
"learning_rate": 5.569583769272539e-06, |
|
"loss": 0.3944, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 0.3610564505429431, |
|
"learning_rate": 5.554158407420681e-06, |
|
"loss": 0.4111, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 0.3485408935089911, |
|
"learning_rate": 5.5386719390373075e-06, |
|
"loss": 0.3984, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 0.3111131027556224, |
|
"learning_rate": 5.523124824809562e-06, |
|
"loss": 0.3611, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 0.3212167065940285, |
|
"learning_rate": 5.507517527228661e-06, |
|
"loss": 0.3902, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 0.3429494707502764, |
|
"learning_rate": 5.4918505105761435e-06, |
|
"loss": 0.3988, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 0.37745564096378414, |
|
"learning_rate": 5.476124240910052e-06, |
|
"loss": 0.391, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 0.3370908454585386, |
|
"learning_rate": 5.460339186051069e-06, |
|
"loss": 0.3832, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 0.35699424319652345, |
|
"learning_rate": 5.444495815568607e-06, |
|
"loss": 0.3931, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 0.32181963953057935, |
|
"learning_rate": 5.428594600766834e-06, |
|
"loss": 0.366, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.33474503624294333, |
|
"learning_rate": 5.412636014670652e-06, |
|
"loss": 0.3873, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 0.3868760538229008, |
|
"learning_rate": 5.396620532011631e-06, |
|
"loss": 0.4187, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 0.3265049011735541, |
|
"learning_rate": 5.380548629213884e-06, |
|
"loss": 0.3632, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 0.36248572247880945, |
|
"learning_rate": 5.364420784379892e-06, |
|
"loss": 0.4009, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 0.34416933574029995, |
|
"learning_rate": 5.348237477276288e-06, |
|
"loss": 0.3711, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 0.33413802546232907, |
|
"learning_rate": 5.331999189319578e-06, |
|
"loss": 0.3664, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 0.3379009176453982, |
|
"learning_rate": 5.315706403561825e-06, |
|
"loss": 0.3791, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 0.3400903622872459, |
|
"learning_rate": 5.299359604676275e-06, |
|
"loss": 0.383, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 0.3201446015275036, |
|
"learning_rate": 5.282959278942947e-06, |
|
"loss": 0.358, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 0.31420468207920393, |
|
"learning_rate": 5.266505914234152e-06, |
|
"loss": 0.3759, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.33144165880196313, |
|
"learning_rate": 5.25e-06, |
|
"loss": 0.3987, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 0.35730368664695833, |
|
"learning_rate": 5.233442027253823e-06, |
|
"loss": 0.4236, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 0.3204129143253973, |
|
"learning_rate": 5.216832488557577e-06, |
|
"loss": 0.3715, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 0.3470930852931772, |
|
"learning_rate": 5.20017187800719e-06, |
|
"loss": 0.3494, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 0.35698216535228194, |
|
"learning_rate": 5.183460691217857e-06, |
|
"loss": 0.3806, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 0.36940897857930255, |
|
"learning_rate": 5.166699425309303e-06, |
|
"loss": 0.3972, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 0.3248249397484033, |
|
"learning_rate": 5.1498885788909926e-06, |
|
"loss": 0.367, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 0.37853950376269563, |
|
"learning_rate": 5.133028652047296e-06, |
|
"loss": 0.3954, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 0.3903953620153008, |
|
"learning_rate": 5.11612014632262e-06, |
|
"loss": 0.3683, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 0.3655129284014845, |
|
"learning_rate": 5.099163564706473e-06, |
|
"loss": 0.3932, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 0.38542545846265136, |
|
"learning_rate": 5.082159411618519e-06, |
|
"loss": 0.3716, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 0.3563013091684777, |
|
"learning_rate": 5.065108192893563e-06, |
|
"loss": 0.3689, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 0.34414564307032347, |
|
"learning_rate": 5.048010415766505e-06, |
|
"loss": 0.414, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 0.34099408232504325, |
|
"learning_rate": 5.030866588857251e-06, |
|
"loss": 0.3825, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 0.32991209225304574, |
|
"learning_rate": 5.013677222155581e-06, |
|
"loss": 0.3894, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 0.3481866752789252, |
|
"learning_rate": 4.996442827005987e-06, |
|
"loss": 0.3921, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 0.3408069698194307, |
|
"learning_rate": 4.979163916092448e-06, |
|
"loss": 0.3842, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 0.3521646614948515, |
|
"learning_rate": 4.961841003423187e-06, |
|
"loss": 0.391, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 0.3241237346030365, |
|
"learning_rate": 4.944474604315381e-06, |
|
"loss": 0.3552, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 0.3369201964822902, |
|
"learning_rate": 4.927065235379828e-06, |
|
"loss": 0.4064, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 0.33897966935178175, |
|
"learning_rate": 4.9096134145055806e-06, |
|
"loss": 0.4034, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 0.34618663822712353, |
|
"learning_rate": 4.892119660844538e-06, |
|
"loss": 0.3824, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 0.3447506295061284, |
|
"learning_rate": 4.87458449479601e-06, |
|
"loss": 0.3632, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 0.33961753482937246, |
|
"learning_rate": 4.857008437991222e-06, |
|
"loss": 0.3628, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 0.3346934313378748, |
|
"learning_rate": 4.8393920132778144e-06, |
|
"loss": 0.3386, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 0.3742410827458272, |
|
"learning_rate": 4.821735744704276e-06, |
|
"loss": 0.3719, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 0.3467492717364462, |
|
"learning_rate": 4.804040157504361e-06, |
|
"loss": 0.3519, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 0.31898977746556895, |
|
"learning_rate": 4.786305778081462e-06, |
|
"loss": 0.3499, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 0.3725664879305011, |
|
"learning_rate": 4.7685331339929555e-06, |
|
"loss": 0.3703, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 0.35775844350818625, |
|
"learning_rate": 4.750722753934501e-06, |
|
"loss": 0.3452, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 0.33251154393279203, |
|
"learning_rate": 4.732875167724318e-06, |
|
"loss": 0.3786, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 0.3652428072641884, |
|
"learning_rate": 4.714990906287423e-06, |
|
"loss": 0.3886, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 0.34153035774512613, |
|
"learning_rate": 4.697070501639841e-06, |
|
"loss": 0.3803, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 0.33024253081562993, |
|
"learning_rate": 4.67911448687277e-06, |
|
"loss": 0.3841, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 0.31352646672532924, |
|
"learning_rate": 4.661123396136733e-06, |
|
"loss": 0.3575, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 0.3591115191111562, |
|
"learning_rate": 4.643097764625678e-06, |
|
"loss": 0.3569, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 0.3727369743742984, |
|
"learning_rate": 4.625038128561065e-06, |
|
"loss": 0.3735, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 0.35276922439582065, |
|
"learning_rate": 4.606945025175914e-06, |
|
"loss": 0.3558, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 0.3769733317611369, |
|
"learning_rate": 4.588818992698818e-06, |
|
"loss": 0.3707, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 0.33808066630991396, |
|
"learning_rate": 4.570660570337937e-06, |
|
"loss": 0.3436, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 0.324817784328749, |
|
"learning_rate": 4.552470298264956e-06, |
|
"loss": 0.3479, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 0.32662676785729616, |
|
"learning_rate": 4.534248717599016e-06, |
|
"loss": 0.3255, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 0.326421759505535, |
|
"learning_rate": 4.5159963703906175e-06, |
|
"loss": 0.3471, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 0.34011208528881004, |
|
"learning_rate": 4.497713799605498e-06, |
|
"loss": 0.356, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 0.3794880482586398, |
|
"learning_rate": 4.479401549108473e-06, |
|
"loss": 0.3915, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 0.3210491323588062, |
|
"learning_rate": 4.4610601636472636e-06, |
|
"loss": 0.3576, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 0.3419966824370152, |
|
"learning_rate": 4.442690188836292e-06, |
|
"loss": 0.3596, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 0.3689177192376415, |
|
"learning_rate": 4.424292171140445e-06, |
|
"loss": 0.3794, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 0.37058353484814044, |
|
"learning_rate": 4.405866657858823e-06, |
|
"loss": 0.377, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 0.6109797175104753, |
|
"learning_rate": 4.387414197108459e-06, |
|
"loss": 0.3364, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 0.31458714468904536, |
|
"learning_rate": 4.368935337808006e-06, |
|
"loss": 0.327, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 0.36105992330152553, |
|
"learning_rate": 4.350430629661424e-06, |
|
"loss": 0.3412, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 0.3406357854301439, |
|
"learning_rate": 4.3319006231416055e-06, |
|
"loss": 0.3384, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 0.3737736032761594, |
|
"learning_rate": 4.313345869474022e-06, |
|
"loss": 0.3584, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 0.35537041074074627, |
|
"learning_rate": 4.294766920620306e-06, |
|
"loss": 0.3719, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 0.3409493794346439, |
|
"learning_rate": 4.276164329261853e-06, |
|
"loss": 0.3573, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 0.34029967350445056, |
|
"learning_rate": 4.25753864878336e-06, |
|
"loss": 0.3334, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 0.3513848739355917, |
|
"learning_rate": 4.238890433256378e-06, |
|
"loss": 0.3486, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 0.3336942534916315, |
|
"learning_rate": 4.220220237422822e-06, |
|
"loss": 0.3313, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 0.33387764080245036, |
|
"learning_rate": 4.201528616678472e-06, |
|
"loss": 0.3557, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 0.3881832942635055, |
|
"learning_rate": 4.182816127056449e-06, |
|
"loss": 0.3486, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 0.32396387681049105, |
|
"learning_rate": 4.1640833252106775e-06, |
|
"loss": 0.3358, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 0.3498684089335312, |
|
"learning_rate": 4.1453307683993216e-06, |
|
"loss": 0.3513, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 0.3806936187217104, |
|
"learning_rate": 4.1265590144682155e-06, |
|
"loss": 0.3617, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 0.33925242861989363, |
|
"learning_rate": 4.107768621834257e-06, |
|
"loss": 0.3534, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"grad_norm": 0.3300810784465674, |
|
"learning_rate": 4.088960149468808e-06, |
|
"loss": 0.352, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 0.3404645723726401, |
|
"learning_rate": 4.070134156881061e-06, |
|
"loss": 0.3409, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 0.3655966353045549, |
|
"learning_rate": 4.051291204101393e-06, |
|
"loss": 0.3196, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 0.37730833103284855, |
|
"learning_rate": 4.032431851664708e-06, |
|
"loss": 0.3416, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"grad_norm": 0.3302002834245634, |
|
"learning_rate": 4.013556660593766e-06, |
|
"loss": 0.3456, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 0.35855386728187316, |
|
"learning_rate": 3.9946661923824864e-06, |
|
"loss": 0.3523, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 0.36564829262836096, |
|
"learning_rate": 3.97576100897925e-06, |
|
"loss": 0.3556, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 0.3668383201799081, |
|
"learning_rate": 3.956841672770181e-06, |
|
"loss": 0.349, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"grad_norm": 0.3138509419345777, |
|
"learning_rate": 3.937908746562417e-06, |
|
"loss": 0.3345, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 0.3887214100972992, |
|
"learning_rate": 3.918962793567368e-06, |
|
"loss": 0.3284, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 0.3614318215285855, |
|
"learning_rate": 3.900004377383963e-06, |
|
"loss": 0.3223, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 0.3357424147189796, |
|
"learning_rate": 3.881034061981876e-06, |
|
"loss": 0.3262, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 0.3454698621783415, |
|
"learning_rate": 3.862052411684763e-06, |
|
"loss": 0.3313, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 0.35334068407857966, |
|
"learning_rate": 3.843059991153463e-06, |
|
"loss": 0.3277, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 0.3918670742282634, |
|
"learning_rate": 3.824057365369205e-06, |
|
"loss": 0.3399, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 0.3739422718773499, |
|
"learning_rate": 3.805045099616804e-06, |
|
"loss": 0.3595, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 0.34257555009987084, |
|
"learning_rate": 3.786023759467839e-06, |
|
"loss": 0.3465, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 0.32802479405729623, |
|
"learning_rate": 3.766993910763834e-06, |
|
"loss": 0.3369, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 0.3203688904452787, |
|
"learning_rate": 3.7479561195994195e-06, |
|
"loss": 0.3347, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 0.3410536222408465, |
|
"learning_rate": 3.728910952305501e-06, |
|
"loss": 0.3582, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 0.31478038592605895, |
|
"learning_rate": 3.7098589754324037e-06, |
|
"loss": 0.3532, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 0.32415333055330847, |
|
"learning_rate": 3.6908007557330225e-06, |
|
"loss": 0.3401, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 0.35602242055976896, |
|
"learning_rate": 3.6717368601459635e-06, |
|
"loss": 0.3601, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"grad_norm": 0.3497515154481127, |
|
"learning_rate": 3.6526678557786763e-06, |
|
"loss": 0.3413, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 0.34412660600849004, |
|
"learning_rate": 3.633594309890586e-06, |
|
"loss": 0.3452, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 0.33351558637954704, |
|
"learning_rate": 3.6145167898762167e-06, |
|
"loss": 0.3444, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"grad_norm": 0.3419766355994506, |
|
"learning_rate": 3.595435863248315e-06, |
|
"loss": 0.3471, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"grad_norm": 0.3251581136971839, |
|
"learning_rate": 3.576352097620964e-06, |
|
"loss": 0.3308, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 0.381853253533654, |
|
"learning_rate": 3.557266060692704e-06, |
|
"loss": 0.3757, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 0.34450659180881255, |
|
"learning_rate": 3.5381783202296382e-06, |
|
"loss": 0.3323, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 0.3400017029410289, |
|
"learning_rate": 3.5190894440485483e-06, |
|
"loss": 0.3461, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 0.36714525068941245, |
|
"learning_rate": 3.5e-06, |
|
"loss": 0.3507, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 0.3113553611505031, |
|
"learning_rate": 3.480910555951451e-06, |
|
"loss": 0.3146, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"grad_norm": 0.3570421820064217, |
|
"learning_rate": 3.461821679770362e-06, |
|
"loss": 0.3264, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"grad_norm": 0.38077478251107827, |
|
"learning_rate": 3.442733939307296e-06, |
|
"loss": 0.3667, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"grad_norm": 0.3743349045152784, |
|
"learning_rate": 3.4236479023790363e-06, |
|
"loss": 0.3694, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 0.3230112425756345, |
|
"learning_rate": 3.4045641367516857e-06, |
|
"loss": 0.3344, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 0.3423897120742137, |
|
"learning_rate": 3.3854832101237836e-06, |
|
"loss": 0.3606, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 0.3461281931644015, |
|
"learning_rate": 3.366405690109414e-06, |
|
"loss": 0.3425, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 0.3432644044960223, |
|
"learning_rate": 3.3473321442213245e-06, |
|
"loss": 0.3466, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 0.34805031939451947, |
|
"learning_rate": 3.328263139854037e-06, |
|
"loss": 0.3668, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"grad_norm": 0.34793326646142414, |
|
"learning_rate": 3.3091992442669774e-06, |
|
"loss": 0.3699, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 0.35783347717306163, |
|
"learning_rate": 3.290141024567597e-06, |
|
"loss": 0.3401, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 0.33765692223581617, |
|
"learning_rate": 3.271089047694499e-06, |
|
"loss": 0.3502, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"grad_norm": 0.358125416429268, |
|
"learning_rate": 3.252043880400581e-06, |
|
"loss": 0.3677, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"grad_norm": 0.3645605208958676, |
|
"learning_rate": 3.2330060892361665e-06, |
|
"loss": 0.3565, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"grad_norm": 0.35182249395160453, |
|
"learning_rate": 3.2139762405321623e-06, |
|
"loss": 0.3256, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 0.36526279600710565, |
|
"learning_rate": 3.1949549003831962e-06, |
|
"loss": 0.3496, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 0.3417604543854069, |
|
"learning_rate": 3.1759426346307963e-06, |
|
"loss": 0.3556, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 0.3538576376640646, |
|
"learning_rate": 3.1569400088465375e-06, |
|
"loss": 0.3447, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 0.34693771863093653, |
|
"learning_rate": 3.137947588315237e-06, |
|
"loss": 0.3426, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"grad_norm": 0.38481964319346323, |
|
"learning_rate": 3.118965938018125e-06, |
|
"loss": 0.3498, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"grad_norm": 0.34675521547578386, |
|
"learning_rate": 3.099995622616037e-06, |
|
"loss": 0.3268, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"grad_norm": 0.3372531716548693, |
|
"learning_rate": 3.0810372064326317e-06, |
|
"loss": 0.3461, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"grad_norm": 0.38104519894600813, |
|
"learning_rate": 3.0620912534375834e-06, |
|
"loss": 0.3728, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"grad_norm": 0.3466001656961185, |
|
"learning_rate": 3.0431583272298204e-06, |
|
"loss": 0.3244, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 0.37591315716887963, |
|
"learning_rate": 3.0242389910207505e-06, |
|
"loss": 0.3521, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 0.3265662120545945, |
|
"learning_rate": 3.0053338076175147e-06, |
|
"loss": 0.334, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 0.3457254552647638, |
|
"learning_rate": 2.986443339406234e-06, |
|
"loss": 0.3264, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"grad_norm": 0.37047097912111926, |
|
"learning_rate": 2.9675681483352915e-06, |
|
"loss": 0.3401, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"grad_norm": 0.34065288744920735, |
|
"learning_rate": 2.948708795898608e-06, |
|
"loss": 0.3444, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"grad_norm": 0.33705540164600195, |
|
"learning_rate": 2.9298658431189395e-06, |
|
"loss": 0.3219, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"grad_norm": 0.33349856248341736, |
|
"learning_rate": 2.9110398505311927e-06, |
|
"loss": 0.3414, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 0.35835514865052853, |
|
"learning_rate": 2.8922313781657437e-06, |
|
"loss": 0.3586, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"grad_norm": 0.4112703391832232, |
|
"learning_rate": 2.873440985531786e-06, |
|
"loss": 0.3742, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"grad_norm": 0.35218060775962634, |
|
"learning_rate": 2.8546692316006783e-06, |
|
"loss": 0.333, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"grad_norm": 0.3272084779142217, |
|
"learning_rate": 2.8359166747893237e-06, |
|
"loss": 0.3106, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"grad_norm": 0.35734611520518955, |
|
"learning_rate": 2.8171838729435513e-06, |
|
"loss": 0.3393, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"grad_norm": 0.38149060855665573, |
|
"learning_rate": 2.7984713833215282e-06, |
|
"loss": 0.3512, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"grad_norm": 0.37164588957773215, |
|
"learning_rate": 2.779779762577178e-06, |
|
"loss": 0.3247, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"grad_norm": 0.3722544608089407, |
|
"learning_rate": 2.761109566743622e-06, |
|
"loss": 0.3492, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"grad_norm": 0.3934397097005587, |
|
"learning_rate": 2.7424613512166398e-06, |
|
"loss": 0.327, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"grad_norm": 0.3654030239595223, |
|
"learning_rate": 2.7238356707381474e-06, |
|
"loss": 0.3488, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"grad_norm": 0.3503984335464161, |
|
"learning_rate": 2.705233079379694e-06, |
|
"loss": 0.3266, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"grad_norm": 0.32818020407257076, |
|
"learning_rate": 2.6866541305259794e-06, |
|
"loss": 0.3309, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"grad_norm": 0.377998199299507, |
|
"learning_rate": 2.6680993768583944e-06, |
|
"loss": 0.371, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"grad_norm": 0.34713137652697956, |
|
"learning_rate": 2.6495693703385765e-06, |
|
"loss": 0.3464, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"grad_norm": 0.3620196231959417, |
|
"learning_rate": 2.6310646621919923e-06, |
|
"loss": 0.3519, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"grad_norm": 0.3652450097403223, |
|
"learning_rate": 2.612585802891542e-06, |
|
"loss": 0.355, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"grad_norm": 0.3561392599782829, |
|
"learning_rate": 2.594133342141177e-06, |
|
"loss": 0.3489, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"grad_norm": 0.3615565981799769, |
|
"learning_rate": 2.575707828859556e-06, |
|
"loss": 0.3523, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"grad_norm": 0.3152156285999487, |
|
"learning_rate": 2.5573098111637088e-06, |
|
"loss": 0.3232, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"grad_norm": 0.35211559538338943, |
|
"learning_rate": 2.5389398363527376e-06, |
|
"loss": 0.3689, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"grad_norm": 0.3709163488846079, |
|
"learning_rate": 2.5205984508915277e-06, |
|
"loss": 0.3624, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"grad_norm": 0.34807484716577025, |
|
"learning_rate": 2.502286200394503e-06, |
|
"loss": 0.3441, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"grad_norm": 0.32986214092483723, |
|
"learning_rate": 2.4840036296093824e-06, |
|
"loss": 0.327, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"grad_norm": 0.34365285959643166, |
|
"learning_rate": 2.465751282400983e-06, |
|
"loss": 0.3234, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"grad_norm": 0.33477375466750364, |
|
"learning_rate": 2.4475297017350445e-06, |
|
"loss": 0.2972, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 0.3530313958823634, |
|
"learning_rate": 2.4293394296620625e-06, |
|
"loss": 0.3343, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 0.3390812879356889, |
|
"learning_rate": 2.411181007301182e-06, |
|
"loss": 0.3138, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"grad_norm": 0.33512846317555633, |
|
"learning_rate": 2.393054974824086e-06, |
|
"loss": 0.3177, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"grad_norm": 0.3580805399639594, |
|
"learning_rate": 2.374961871438935e-06, |
|
"loss": 0.3327, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"grad_norm": 0.3372867599039067, |
|
"learning_rate": 2.356902235374323e-06, |
|
"loss": 0.3127, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"grad_norm": 0.33276453513059345, |
|
"learning_rate": 2.3388766038632686e-06, |
|
"loss": 0.3436, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"grad_norm": 0.3576415009985178, |
|
"learning_rate": 2.32088551312723e-06, |
|
"loss": 0.3515, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"grad_norm": 0.3294166745203506, |
|
"learning_rate": 2.3029294983601598e-06, |
|
"loss": 0.3458, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"grad_norm": 0.3419414807485379, |
|
"learning_rate": 2.2850090937125775e-06, |
|
"loss": 0.3443, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"grad_norm": 0.32368745015623135, |
|
"learning_rate": 2.2671248322756827e-06, |
|
"loss": 0.3235, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"grad_norm": 0.34897803123816196, |
|
"learning_rate": 2.2492772460655e-06, |
|
"loss": 0.3241, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"grad_norm": 0.34347437595206803, |
|
"learning_rate": 2.2314668660070444e-06, |
|
"loss": 0.3363, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"grad_norm": 0.3594982446723844, |
|
"learning_rate": 2.213694221918538e-06, |
|
"loss": 0.3187, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"grad_norm": 0.3447190484440951, |
|
"learning_rate": 2.1959598424956397e-06, |
|
"loss": 0.3334, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"grad_norm": 0.32223135788338164, |
|
"learning_rate": 2.178264255295725e-06, |
|
"loss": 0.3158, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"grad_norm": 0.33566912281178546, |
|
"learning_rate": 2.160607986722186e-06, |
|
"loss": 0.3113, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"grad_norm": 0.33595879664180733, |
|
"learning_rate": 2.1429915620087775e-06, |
|
"loss": 0.2925, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"grad_norm": 0.313823398660823, |
|
"learning_rate": 2.125415505203991e-06, |
|
"loss": 0.3169, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"grad_norm": 0.3353191261291372, |
|
"learning_rate": 2.1078803391554616e-06, |
|
"loss": 0.3228, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"grad_norm": 0.3616603942250458, |
|
"learning_rate": 2.09038658549442e-06, |
|
"loss": 0.3513, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"grad_norm": 0.31987262953616186, |
|
"learning_rate": 2.0729347646201717e-06, |
|
"loss": 0.3256, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"grad_norm": 0.34523684765216606, |
|
"learning_rate": 2.0555253956846186e-06, |
|
"loss": 0.3251, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"grad_norm": 0.3721898929848971, |
|
"learning_rate": 2.0381589965768126e-06, |
|
"loss": 0.3374, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"grad_norm": 0.34400955797489746, |
|
"learning_rate": 2.0208360839075525e-06, |
|
"loss": 0.3399, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"grad_norm": 0.4080412320465832, |
|
"learning_rate": 2.0035571729940133e-06, |
|
"loss": 0.3009, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"grad_norm": 0.32577460503727845, |
|
"learning_rate": 1.9863227778444176e-06, |
|
"loss": 0.2963, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"grad_norm": 0.33702369973839313, |
|
"learning_rate": 1.9691334111427506e-06, |
|
"loss": 0.3057, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"grad_norm": 0.3327729761550235, |
|
"learning_rate": 1.951989584233496e-06, |
|
"loss": 0.3026, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"grad_norm": 0.33776747658890194, |
|
"learning_rate": 1.934891807106438e-06, |
|
"loss": 0.3255, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"grad_norm": 0.3376479216926523, |
|
"learning_rate": 1.917840588381481e-06, |
|
"loss": 0.3361, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"grad_norm": 0.33956065490995724, |
|
"learning_rate": 1.9008364352935276e-06, |
|
"loss": 0.3213, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"grad_norm": 0.3202103047195963, |
|
"learning_rate": 1.8838798536773821e-06, |
|
"loss": 0.3014, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"grad_norm": 0.32958945998206785, |
|
"learning_rate": 1.8669713479527048e-06, |
|
"loss": 0.3201, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"grad_norm": 0.3266771071234881, |
|
"learning_rate": 1.850111421109008e-06, |
|
"loss": 0.2984, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"grad_norm": 0.33932931342987876, |
|
"learning_rate": 1.8333005746906976e-06, |
|
"loss": 0.3213, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"grad_norm": 0.35764343040036173, |
|
"learning_rate": 1.8165393087821438e-06, |
|
"loss": 0.3142, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"grad_norm": 0.3317934468561935, |
|
"learning_rate": 1.7998281219928094e-06, |
|
"loss": 0.304, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"grad_norm": 0.32267754599110376, |
|
"learning_rate": 1.783167511442422e-06, |
|
"loss": 0.3147, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"grad_norm": 0.3593732066521806, |
|
"learning_rate": 1.7665579727461771e-06, |
|
"loss": 0.3226, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"grad_norm": 0.3399224799991492, |
|
"learning_rate": 1.7500000000000008e-06, |
|
"loss": 0.3217, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"grad_norm": 0.33121444418447826, |
|
"learning_rate": 1.7334940857658472e-06, |
|
"loss": 0.3205, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"grad_norm": 0.32955427160524653, |
|
"learning_rate": 1.7170407210570539e-06, |
|
"loss": 0.3102, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"grad_norm": 0.3362729432822011, |
|
"learning_rate": 1.700640395323724e-06, |
|
"loss": 0.2848, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"grad_norm": 0.3452937005495939, |
|
"learning_rate": 1.6842935964381741e-06, |
|
"loss": 0.3076, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"grad_norm": 0.3259636359618341, |
|
"learning_rate": 1.6680008106804213e-06, |
|
"loss": 0.3099, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"grad_norm": 0.3410314706263269, |
|
"learning_rate": 1.651762522723712e-06, |
|
"loss": 0.3175, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"grad_norm": 0.3289205759105589, |
|
"learning_rate": 1.6355792156201085e-06, |
|
"loss": 0.3245, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"grad_norm": 0.3494368432511646, |
|
"learning_rate": 1.619451370786116e-06, |
|
"loss": 0.3194, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"grad_norm": 0.3373773656095024, |
|
"learning_rate": 1.60337946798837e-06, |
|
"loss": 0.307, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"grad_norm": 0.34836673049287364, |
|
"learning_rate": 1.5873639853293484e-06, |
|
"loss": 0.2942, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"grad_norm": 0.336243339029711, |
|
"learning_rate": 1.5714053992331667e-06, |
|
"loss": 0.2902, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"grad_norm": 0.3336044821385557, |
|
"learning_rate": 1.5555041844313931e-06, |
|
"loss": 0.2935, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"grad_norm": 0.3855406448411789, |
|
"learning_rate": 1.5396608139489307e-06, |
|
"loss": 0.2987, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"grad_norm": 0.3786473105435691, |
|
"learning_rate": 1.5238757590899485e-06, |
|
"loss": 0.2932, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"grad_norm": 0.3439557208763382, |
|
"learning_rate": 1.5081494894238554e-06, |
|
"loss": 0.3114, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"grad_norm": 0.3610410823922369, |
|
"learning_rate": 1.4924824727713396e-06, |
|
"loss": 0.3222, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"grad_norm": 0.3407026252586559, |
|
"learning_rate": 1.4768751751904387e-06, |
|
"loss": 0.3128, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"grad_norm": 0.33809041981905197, |
|
"learning_rate": 1.4613280609626928e-06, |
|
"loss": 0.3087, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"grad_norm": 0.35834168771366504, |
|
"learning_rate": 1.4458415925793196e-06, |
|
"loss": 0.3077, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"grad_norm": 0.38871459853044665, |
|
"learning_rate": 1.4304162307274625e-06, |
|
"loss": 0.3272, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"grad_norm": 0.3463091561520958, |
|
"learning_rate": 1.4150524342764833e-06, |
|
"loss": 0.3226, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"grad_norm": 0.33988619370787143, |
|
"learning_rate": 1.399750660264317e-06, |
|
"loss": 0.3104, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"grad_norm": 0.34938108700342674, |
|
"learning_rate": 1.384511363883869e-06, |
|
"loss": 0.3281, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"grad_norm": 0.33376707095971697, |
|
"learning_rate": 1.3693349984694776e-06, |
|
"loss": 0.3116, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"grad_norm": 0.3316263604682683, |
|
"learning_rate": 1.3542220154834316e-06, |
|
"loss": 0.316, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"grad_norm": 0.3526534924986156, |
|
"learning_rate": 1.339172864502533e-06, |
|
"loss": 0.3136, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"grad_norm": 0.34150892420283485, |
|
"learning_rate": 1.32418799320473e-06, |
|
"loss": 0.3143, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"grad_norm": 0.3184132035524764, |
|
"learning_rate": 1.3092678473557933e-06, |
|
"loss": 0.3056, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"grad_norm": 0.3507359963811024, |
|
"learning_rate": 1.294412870796064e-06, |
|
"loss": 0.3424, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"grad_norm": 0.32578196789023506, |
|
"learning_rate": 1.2796235054272411e-06, |
|
"loss": 0.3022, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"grad_norm": 0.32010441528418093, |
|
"learning_rate": 1.2649001911992413e-06, |
|
"loss": 0.3183, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"grad_norm": 0.3708179399239553, |
|
"learning_rate": 1.2502433660971122e-06, |
|
"loss": 0.3167, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"grad_norm": 0.3127471229910051, |
|
"learning_rate": 1.2356534661279994e-06, |
|
"loss": 0.2867, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"grad_norm": 0.34260266839337344, |
|
"learning_rate": 1.2211309253081786e-06, |
|
"loss": 0.2982, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"grad_norm": 0.3324510300552272, |
|
"learning_rate": 1.2066761756501436e-06, |
|
"loss": 0.3354, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"grad_norm": 0.3596583818265582, |
|
"learning_rate": 1.192289647149759e-06, |
|
"loss": 0.337, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"grad_norm": 0.3199547049033629, |
|
"learning_rate": 1.1779717677734615e-06, |
|
"loss": 0.3055, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"grad_norm": 0.3263505155905279, |
|
"learning_rate": 1.1637229634455348e-06, |
|
"loss": 0.3327, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"grad_norm": 0.340329924281869, |
|
"learning_rate": 1.1495436580354353e-06, |
|
"loss": 0.3142, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"grad_norm": 0.3259441664576382, |
|
"learning_rate": 1.135434273345189e-06, |
|
"loss": 0.3181, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"grad_norm": 0.3430436145582583, |
|
"learning_rate": 1.1213952290968368e-06, |
|
"loss": 0.339, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"grad_norm": 0.3542173756029115, |
|
"learning_rate": 1.1074269429199503e-06, |
|
"loss": 0.3419, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"grad_norm": 0.33473973725214246, |
|
"learning_rate": 1.093529830339214e-06, |
|
"loss": 0.3128, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"grad_norm": 0.331525816801152, |
|
"learning_rate": 1.0797043047620575e-06, |
|
"loss": 0.3209, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"grad_norm": 0.35046397496028414, |
|
"learning_rate": 1.0659507774663595e-06, |
|
"loss": 0.3387, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"grad_norm": 0.34176605906283997, |
|
"learning_rate": 1.0522696575882148e-06, |
|
"loss": 0.328, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"grad_norm": 0.31267583422913253, |
|
"learning_rate": 1.0386613521097656e-06, |
|
"loss": 0.3017, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"grad_norm": 0.34018126548941485, |
|
"learning_rate": 1.0251262658470838e-06, |
|
"loss": 0.322, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"grad_norm": 0.33129322774090264, |
|
"learning_rate": 1.0116648014381442e-06, |
|
"loss": 0.3271, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"grad_norm": 0.36466718788465824, |
|
"learning_rate": 9.982773593308383e-07, |
|
"loss": 0.3145, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"grad_norm": 0.3347289464190966, |
|
"learning_rate": 9.849643377710566e-07, |
|
"loss": 0.3156, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"grad_norm": 0.35870097599007505, |
|
"learning_rate": 9.717261327908557e-07, |
|
"loss": 0.3212, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"grad_norm": 0.3404942487890118, |
|
"learning_rate": 9.585631381966645e-07, |
|
"loss": 0.3006, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"grad_norm": 0.3450349827459326, |
|
"learning_rate": 9.454757455575762e-07, |
|
"loss": 0.3188, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"grad_norm": 0.3631808968816027, |
|
"learning_rate": 9.324643441936959e-07, |
|
"loss": 0.3424, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"grad_norm": 0.32472675900072473, |
|
"learning_rate": 9.195293211645661e-07, |
|
"loss": 0.2989, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"grad_norm": 0.3497374588970107, |
|
"learning_rate": 9.066710612576439e-07, |
|
"loss": 0.3203, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"grad_norm": 0.32654566686001346, |
|
"learning_rate": 8.938899469768581e-07, |
|
"loss": 0.3103, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"grad_norm": 0.3473781125149438, |
|
"learning_rate": 8.811863585312348e-07, |
|
"loss": 0.3002, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"grad_norm": 0.3395838807842396, |
|
"learning_rate": 8.685606738235796e-07, |
|
"loss": 0.3152, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"grad_norm": 0.3578724503519501, |
|
"learning_rate": 8.560132684392404e-07, |
|
"loss": 0.32, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"grad_norm": 0.3177446511801698, |
|
"learning_rate": 8.435445156349334e-07, |
|
"loss": 0.2991, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"grad_norm": 0.34017313516990605, |
|
"learning_rate": 8.311547863276417e-07, |
|
"loss": 0.3195, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 0.3411873808454031, |
|
"learning_rate": 8.188444490835774e-07, |
|
"loss": 0.333, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"grad_norm": 0.39461744589677294, |
|
"learning_rate": 8.066138701072195e-07, |
|
"loss": 0.343, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"grad_norm": 0.3310920364932005, |
|
"learning_rate": 7.944634132304205e-07, |
|
"loss": 0.3086, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"grad_norm": 0.3230577112664011, |
|
"learning_rate": 7.823934399015856e-07, |
|
"loss": 0.2861, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"grad_norm": 0.33762915771622776, |
|
"learning_rate": 7.704043091749143e-07, |
|
"loss": 0.3134, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"grad_norm": 0.3577729507142605, |
|
"learning_rate": 7.584963776997237e-07, |
|
"loss": 0.3221, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"grad_norm": 0.33785584510404026, |
|
"learning_rate": 7.466699997098405e-07, |
|
"loss": 0.2977, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"grad_norm": 0.3686247902420513, |
|
"learning_rate": 7.349255270130589e-07, |
|
"loss": 0.3205, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"grad_norm": 0.3704478433650854, |
|
"learning_rate": 7.232633089806773e-07, |
|
"loss": 0.3012, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"grad_norm": 0.3567295734464394, |
|
"learning_rate": 7.116836925371055e-07, |
|
"loss": 0.3219, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"grad_norm": 0.3641001366574356, |
|
"learning_rate": 7.001870221495463e-07, |
|
"loss": 0.2991, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"grad_norm": 0.3504305641012147, |
|
"learning_rate": 6.88773639817743e-07, |
|
"loss": 0.3082, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"grad_norm": 0.3795835236074235, |
|
"learning_rate": 6.774438850638107e-07, |
|
"loss": 0.345, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"grad_norm": 0.3466954727567642, |
|
"learning_rate": 6.661980949221356e-07, |
|
"loss": 0.3249, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"grad_norm": 0.35305077141361857, |
|
"learning_rate": 6.550366039293471e-07, |
|
"loss": 0.329, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"grad_norm": 0.3629881477132717, |
|
"learning_rate": 6.439597441143655e-07, |
|
"loss": 0.3319, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"grad_norm": 0.34628496427804784, |
|
"learning_rate": 6.329678449885283e-07, |
|
"loss": 0.3271, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"grad_norm": 0.3458963677412187, |
|
"learning_rate": 6.22061233535788e-07, |
|
"loss": 0.3287, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"grad_norm": 0.3258984143176478, |
|
"learning_rate": 6.112402342029767e-07, |
|
"loss": 0.3045, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"grad_norm": 0.35759231804803177, |
|
"learning_rate": 6.005051688901686e-07, |
|
"loss": 0.3466, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"grad_norm": 0.3808260746862728, |
|
"learning_rate": 5.898563569410913e-07, |
|
"loss": 0.3381, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"grad_norm": 0.3680045197463959, |
|
"learning_rate": 5.79294115133635e-07, |
|
"loss": 0.3224, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"grad_norm": 0.33757707750018473, |
|
"learning_rate": 5.688187576704227e-07, |
|
"loss": 0.3063, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"grad_norm": 0.34941820581500316, |
|
"learning_rate": 5.584305961694664e-07, |
|
"loss": 0.3008, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"grad_norm": 0.36034623026164275, |
|
"learning_rate": 5.481299396549007e-07, |
|
"loss": 0.2738, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"grad_norm": 0.3455392371123542, |
|
"learning_rate": 5.379170945477797e-07, |
|
"loss": 0.3125, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"grad_norm": 0.3308793612340533, |
|
"learning_rate": 5.277923646569743e-07, |
|
"loss": 0.2914, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"grad_norm": 0.31467610286094216, |
|
"learning_rate": 5.177560511701249e-07, |
|
"loss": 0.2987, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"grad_norm": 0.34939004004336754, |
|
"learning_rate": 5.078084526446877e-07, |
|
"loss": 0.3101, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"grad_norm": 0.3239983541697057, |
|
"learning_rate": 4.97949864999048e-07, |
|
"loss": 0.2932, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"grad_norm": 0.3457088720493278, |
|
"learning_rate": 4.881805815037239e-07, |
|
"loss": 0.3231, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"grad_norm": 0.3441443295170719, |
|
"learning_rate": 4.785008927726359e-07, |
|
"loss": 0.3302, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"grad_norm": 0.3324111381679956, |
|
"learning_rate": 4.6891108675446453e-07, |
|
"loss": 0.3265, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"grad_norm": 0.33951739514591006, |
|
"learning_rate": 4.59411448724087e-07, |
|
"loss": 0.3223, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"grad_norm": 0.32817326444431716, |
|
"learning_rate": 4.500022612740856e-07, |
|
"loss": 0.3045, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"grad_norm": 0.3250627297920548, |
|
"learning_rate": 4.406838043063446e-07, |
|
"loss": 0.3059, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"grad_norm": 0.3378123787646438, |
|
"learning_rate": 4.314563550237231e-07, |
|
"loss": 0.3157, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"grad_norm": 0.33855002776019455, |
|
"learning_rate": 4.2232018792181037e-07, |
|
"loss": 0.2979, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"grad_norm": 0.3388034403657763, |
|
"learning_rate": 4.132755747807577e-07, |
|
"loss": 0.3126, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"grad_norm": 0.31032554781257515, |
|
"learning_rate": 4.0432278465719386e-07, |
|
"loss": 0.3, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"grad_norm": 0.31720176424364194, |
|
"learning_rate": 3.95462083876224e-07, |
|
"loss": 0.2909, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"grad_norm": 0.3088378496487171, |
|
"learning_rate": 3.8669373602350414e-07, |
|
"loss": 0.2743, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"grad_norm": 0.307490909821479, |
|
"learning_rate": 3.7801800193740066e-07, |
|
"loss": 0.3003, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"grad_norm": 0.3123422646558358, |
|
"learning_rate": 3.6943513970123184e-07, |
|
"loss": 0.3044, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"grad_norm": 0.3560275027298391, |
|
"learning_rate": 3.609454046355911e-07, |
|
"loss": 0.3296, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"grad_norm": 0.30395635084459943, |
|
"learning_rate": 3.525490492907494e-07, |
|
"loss": 0.308, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"grad_norm": 0.31537748262010745, |
|
"learning_rate": 3.442463234391441e-07, |
|
"loss": 0.3065, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"grad_norm": 0.3569650951838116, |
|
"learning_rate": 3.3603747406794833e-07, |
|
"loss": 0.315, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"grad_norm": 0.3396558121035842, |
|
"learning_rate": 3.279227453717252e-07, |
|
"loss": 0.3199, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"grad_norm": 0.3270067914121413, |
|
"learning_rate": 3.1990237874516066e-07, |
|
"loss": 0.2812, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"grad_norm": 0.29552163074015286, |
|
"learning_rate": 3.1197661277588436e-07, |
|
"loss": 0.2799, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"grad_norm": 0.3243293757851992, |
|
"learning_rate": 3.0414568323737346e-07, |
|
"loss": 0.2873, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"grad_norm": 0.3131415442010758, |
|
"learning_rate": 2.964098230819351e-07, |
|
"loss": 0.2843, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"grad_norm": 0.30987006697829866, |
|
"learning_rate": 2.887692624337806e-07, |
|
"loss": 0.3082, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"grad_norm": 0.32359530816615634, |
|
"learning_rate": 2.812242285821771e-07, |
|
"loss": 0.3175, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"grad_norm": 0.3285661078243146, |
|
"learning_rate": 2.7377494597468916e-07, |
|
"loss": 0.3028, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"grad_norm": 0.3120391175324319, |
|
"learning_rate": 2.664216362104964e-07, |
|
"loss": 0.2848, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"grad_norm": 0.3254624468839883, |
|
"learning_rate": 2.591645180338085e-07, |
|
"loss": 0.3057, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"grad_norm": 0.3066889634179814, |
|
"learning_rate": 2.5200380732735444e-07, |
|
"loss": 0.2815, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"grad_norm": 0.3219858581758564, |
|
"learning_rate": 2.4493971710595773e-07, |
|
"loss": 0.3037, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"grad_norm": 0.3405241698528944, |
|
"learning_rate": 2.3797245751020545e-07, |
|
"loss": 0.2966, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"grad_norm": 0.313000854277894, |
|
"learning_rate": 2.3110223580019317e-07, |
|
"loss": 0.288, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"grad_norm": 0.3277910051867206, |
|
"learning_rate": 2.2432925634936062e-07, |
|
"loss": 0.2967, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"grad_norm": 0.3602978682407753, |
|
"learning_rate": 2.176537206384112e-07, |
|
"loss": 0.3031, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"grad_norm": 0.3237851867770472, |
|
"learning_rate": 2.1107582724932088e-07, |
|
"loss": 0.3059, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"grad_norm": 0.32037287639379797, |
|
"learning_rate": 2.0459577185942756e-07, |
|
"loss": 0.3049, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"grad_norm": 0.313744879178858, |
|
"learning_rate": 1.9821374723561168e-07, |
|
"loss": 0.2948, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"grad_norm": 0.3170159860467313, |
|
"learning_rate": 1.9192994322856282e-07, |
|
"loss": 0.2679, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"grad_norm": 0.32920768720751875, |
|
"learning_rate": 1.8574454676713047e-07, |
|
"loss": 0.291, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"grad_norm": 0.32724821046512204, |
|
"learning_rate": 1.7965774185276317e-07, |
|
"loss": 0.2929, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"grad_norm": 0.33727509928787114, |
|
"learning_rate": 1.736697095540361e-07, |
|
"loss": 0.3008, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"grad_norm": 0.32258880398881606, |
|
"learning_rate": 1.6778062800126503e-07, |
|
"loss": 0.3101, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"grad_norm": 0.31631420707873364, |
|
"learning_rate": 1.6199067238120613e-07, |
|
"loss": 0.3056, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"grad_norm": 0.3103455602146561, |
|
"learning_rate": 1.563000149318439e-07, |
|
"loss": 0.2941, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"grad_norm": 0.3278465787187694, |
|
"learning_rate": 1.5070882493726911e-07, |
|
"loss": 0.2778, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"grad_norm": 0.3161655577398492, |
|
"learning_rate": 1.4521726872264334e-07, |
|
"loss": 0.2749, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"grad_norm": 0.3201062365976047, |
|
"learning_rate": 1.398255096492499e-07, |
|
"loss": 0.2784, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"grad_norm": 0.32972141755861767, |
|
"learning_rate": 1.3453370810963294e-07, |
|
"loss": 0.2836, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"grad_norm": 0.3328324371601979, |
|
"learning_rate": 1.2934202152283052e-07, |
|
"loss": 0.2772, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"grad_norm": 0.3189014403287748, |
|
"learning_rate": 1.242506043296871e-07, |
|
"loss": 0.2982, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"grad_norm": 0.34902552630822714, |
|
"learning_rate": 1.192596079882613e-07, |
|
"loss": 0.3056, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"grad_norm": 0.33213859814343094, |
|
"learning_rate": 1.1436918096932042e-07, |
|
"loss": 0.2979, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"grad_norm": 0.3041910425282244, |
|
"learning_rate": 1.095794687519242e-07, |
|
"loss": 0.2958, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"grad_norm": 0.3083108489167821, |
|
"learning_rate": 1.0489061381909609e-07, |
|
"loss": 0.2949, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"grad_norm": 0.329428102687414, |
|
"learning_rate": 1.0030275565358499e-07, |
|
"loss": 0.3127, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"grad_norm": 0.32701334264131404, |
|
"learning_rate": 9.581603073371642e-08, |
|
"loss": 0.3087, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"grad_norm": 0.327187678866886, |
|
"learning_rate": 9.143057252933229e-08, |
|
"loss": 0.297, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"grad_norm": 0.3324055738492342, |
|
"learning_rate": 8.714651149782038e-08, |
|
"loss": 0.3142, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"grad_norm": 0.3146633682745703, |
|
"learning_rate": 8.296397508023323e-08, |
|
"loss": 0.2989, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"grad_norm": 0.3201792191905935, |
|
"learning_rate": 7.888308769749875e-08, |
|
"loss": 0.3037, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"grad_norm": 0.3306081284329322, |
|
"learning_rate": 7.490397074671583e-08, |
|
"loss": 0.3011, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"grad_norm": 0.3296222930326982, |
|
"learning_rate": 7.102674259754693e-08, |
|
"loss": 0.3008, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"grad_norm": 0.31859276472772396, |
|
"learning_rate": 6.72515185886935e-08, |
|
"loss": 0.2949, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"grad_norm": 0.3473157998924945, |
|
"learning_rate": 6.357841102446649e-08, |
|
"loss": 0.3284, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"grad_norm": 0.325767117495, |
|
"learning_rate": 6.000752917144614e-08, |
|
"loss": 0.2895, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"grad_norm": 0.317107385679555, |
|
"learning_rate": 5.653897925522877e-08, |
|
"loss": 0.3068, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"grad_norm": 0.34501499728066537, |
|
"learning_rate": 5.3172864457271926e-08, |
|
"loss": 0.3025, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"grad_norm": 0.30943095831999046, |
|
"learning_rate": 4.990928491181839e-08, |
|
"loss": 0.2754, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"grad_norm": 0.3316466343580564, |
|
"learning_rate": 4.674833770292358e-08, |
|
"loss": 0.2867, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"grad_norm": 0.32304558047721826, |
|
"learning_rate": 4.369011686156293e-08, |
|
"loss": 0.3232, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"grad_norm": 0.3387512123056888, |
|
"learning_rate": 4.073471336283768e-08, |
|
"loss": 0.3242, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"grad_norm": 0.32043866040079066, |
|
"learning_rate": 3.788221512326645e-08, |
|
"loss": 0.2945, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"grad_norm": 0.3293121499354337, |
|
"learning_rate": 3.5132706998172444e-08, |
|
"loss": 0.3219, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"grad_norm": 0.3264621427885196, |
|
"learning_rate": 3.248627077915578e-08, |
|
"loss": 0.3031, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"grad_norm": 0.3293128611402775, |
|
"learning_rate": 2.994298519166366e-08, |
|
"loss": 0.3072, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"grad_norm": 0.34414365396850366, |
|
"learning_rate": 2.7502925892646135e-08, |
|
"loss": 0.3281, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"grad_norm": 0.33142162835066347, |
|
"learning_rate": 2.5166165468307356e-08, |
|
"loss": 0.331, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"grad_norm": 0.3277738639629037, |
|
"learning_rate": 2.293277343194472e-08, |
|
"loss": 0.3027, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"grad_norm": 0.3307126527318936, |
|
"learning_rate": 2.0802816221881235e-08, |
|
"loss": 0.31, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"grad_norm": 0.3334818389052575, |
|
"learning_rate": 1.8776357199490778e-08, |
|
"loss": 0.328, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"grad_norm": 0.33980982527208864, |
|
"learning_rate": 1.6853456647311137e-08, |
|
"loss": 0.3174, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"grad_norm": 0.31490770170987176, |
|
"learning_rate": 1.5034171767251135e-08, |
|
"loss": 0.2928, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"grad_norm": 0.3317129206040837, |
|
"learning_rate": 1.3318556678890592e-08, |
|
"loss": 0.3118, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"grad_norm": 0.32525833347477956, |
|
"learning_rate": 1.1706662417868885e-08, |
|
"loss": 0.3169, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"grad_norm": 0.34817461775119474, |
|
"learning_rate": 1.0198536934366786e-08, |
|
"loss": 0.3034, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"grad_norm": 0.32179354557665907, |
|
"learning_rate": 8.794225091680763e-09, |
|
"loss": 0.3057, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"grad_norm": 0.34168991698735435, |
|
"learning_rate": 7.493768664887822e-09, |
|
"loss": 0.3106, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"grad_norm": 0.3143573304513848, |
|
"learning_rate": 6.2972063396032336e-09, |
|
"loss": 0.291, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"grad_norm": 0.3282355451726847, |
|
"learning_rate": 5.204573710829163e-09, |
|
"loss": 0.3088, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"grad_norm": 0.3518094962496798, |
|
"learning_rate": 4.2159032818965825e-09, |
|
"loss": 0.3318, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"grad_norm": 0.3140683067025284, |
|
"learning_rate": 3.331224463497706e-09, |
|
"loss": 0.29, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"grad_norm": 0.358966960549393, |
|
"learning_rate": 2.5505635728116927e-09, |
|
"loss": 0.3095, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"grad_norm": 0.31663660996902965, |
|
"learning_rate": 1.873943832720104e-09, |
|
"loss": 0.3019, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"grad_norm": 0.3270301154008955, |
|
"learning_rate": 1.3013853711191237e-09, |
|
"loss": 0.291, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"grad_norm": 0.32925831746753514, |
|
"learning_rate": 8.329052203180364e-10, |
|
"loss": 0.3063, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"grad_norm": 0.333316577240123, |
|
"learning_rate": 4.685173165336897e-10, |
|
"loss": 0.3115, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"grad_norm": 0.3146179236474361, |
|
"learning_rate": 2.0823249947587997e-10, |
|
"loss": 0.2914, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"grad_norm": 0.3399539528453878, |
|
"learning_rate": 5.205851202444434e-11, |
|
"loss": 0.3122, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 0.34346878676292253, |
|
"learning_rate": 0.0, |
|
"loss": 0.3243, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"step": 640, |
|
"total_flos": 0.0, |
|
"train_loss": 0.3771400361787528, |
|
"train_runtime": 5460.2717, |
|
"train_samples_per_second": 15.018, |
|
"train_steps_per_second": 0.117 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 640, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": false, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 0.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|