|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.9995724668661823, |
|
"eval_steps": 500, |
|
"global_step": 877, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.003420265070542967, |
|
"grad_norm": 10.156329759741077, |
|
"learning_rate": 0.0, |
|
"loss": 2.1575, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.006840530141085934, |
|
"grad_norm": 184.3295302770049, |
|
"learning_rate": 1.41e-05, |
|
"loss": 4.88, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.010260795211628902, |
|
"grad_norm": 182.65124398096702, |
|
"learning_rate": 1.41e-05, |
|
"loss": 4.8994, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.013681060282171868, |
|
"grad_norm": 30.32918049437796, |
|
"learning_rate": 1.41e-05, |
|
"loss": 3.5419, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.017101325352714837, |
|
"grad_norm": 16.444831424361816, |
|
"learning_rate": 1.41e-05, |
|
"loss": 2.7311, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.020521590423257803, |
|
"grad_norm": 6.633419850116959, |
|
"learning_rate": 1.41e-05, |
|
"loss": 2.2268, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.02394185549380077, |
|
"grad_norm": 9.401679436345786, |
|
"learning_rate": 1.41e-05, |
|
"loss": 2.1353, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.027362120564343735, |
|
"grad_norm": 7.95097945213086, |
|
"learning_rate": 1.41e-05, |
|
"loss": 2.0867, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.030782385634886705, |
|
"grad_norm": 3.9617869212350962, |
|
"learning_rate": 1.41e-05, |
|
"loss": 1.9823, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.034202650705429674, |
|
"grad_norm": 9.402637091305385, |
|
"learning_rate": 1.41e-05, |
|
"loss": 1.9361, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03762291577597264, |
|
"grad_norm": 3.7983366071849862, |
|
"learning_rate": 1.41e-05, |
|
"loss": 1.9295, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.041043180846515606, |
|
"grad_norm": 4.558710902554969, |
|
"learning_rate": 1.41e-05, |
|
"loss": 1.8787, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.04446344591705857, |
|
"grad_norm": 3.9168701968301134, |
|
"learning_rate": 1.41e-05, |
|
"loss": 1.7936, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.04788371098760154, |
|
"grad_norm": 3.763989925966716, |
|
"learning_rate": 1.41e-05, |
|
"loss": 1.7617, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.051303976058144504, |
|
"grad_norm": 3.4821708929604007, |
|
"learning_rate": 1.41e-05, |
|
"loss": 1.6881, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.05472424112868747, |
|
"grad_norm": 2.1426267148325997, |
|
"learning_rate": 1.41e-05, |
|
"loss": 1.6905, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.05814450619923044, |
|
"grad_norm": 3.6919608929938255, |
|
"learning_rate": 1.41e-05, |
|
"loss": 1.6473, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.06156477126977341, |
|
"grad_norm": 2.725651684433171, |
|
"learning_rate": 1.41e-05, |
|
"loss": 1.7064, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.06498503634031637, |
|
"grad_norm": 2.9127756737882824, |
|
"learning_rate": 1.41e-05, |
|
"loss": 1.5901, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.06840530141085935, |
|
"grad_norm": 2.664503176902388, |
|
"learning_rate": 1.41e-05, |
|
"loss": 1.6196, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.07182556648140231, |
|
"grad_norm": 2.5256162285977077, |
|
"learning_rate": 1.41e-05, |
|
"loss": 1.4734, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.07524583155194528, |
|
"grad_norm": 2.232926984374301, |
|
"learning_rate": 1.41e-05, |
|
"loss": 1.4862, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.07866609662248825, |
|
"grad_norm": 1.8467296080556872, |
|
"learning_rate": 1.41e-05, |
|
"loss": 1.4919, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.08208636169303121, |
|
"grad_norm": 2.133405884576976, |
|
"learning_rate": 1.41e-05, |
|
"loss": 1.4962, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.08550662676357418, |
|
"grad_norm": 2.0723150961914256, |
|
"learning_rate": 1.41e-05, |
|
"loss": 1.3745, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.08892689183411714, |
|
"grad_norm": 2.2473775199610166, |
|
"learning_rate": 1.41e-05, |
|
"loss": 1.4678, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.09234715690466011, |
|
"grad_norm": 3.1366913970429366, |
|
"learning_rate": 1.41e-05, |
|
"loss": 1.3231, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.09576742197520308, |
|
"grad_norm": 2.2703890784758585, |
|
"learning_rate": 1.41e-05, |
|
"loss": 1.3328, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.09918768704574604, |
|
"grad_norm": 2.7735799657439943, |
|
"learning_rate": 1.41e-05, |
|
"loss": 1.1703, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.10260795211628901, |
|
"grad_norm": 2.5181744230002203, |
|
"learning_rate": 1.41e-05, |
|
"loss": 1.257, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.10602821718683197, |
|
"grad_norm": 2.5187681530121546, |
|
"learning_rate": 1.41e-05, |
|
"loss": 1.1321, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.10944848225737494, |
|
"grad_norm": 2.5085106975094678, |
|
"learning_rate": 1.41e-05, |
|
"loss": 1.0929, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.11286874732791792, |
|
"grad_norm": 2.1171154989402887, |
|
"learning_rate": 1.41e-05, |
|
"loss": 1.1053, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.11628901239846089, |
|
"grad_norm": 2.4016503905187725, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.9514, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.11970927746900385, |
|
"grad_norm": 4.505185827517653, |
|
"learning_rate": 1.41e-05, |
|
"loss": 1.1443, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.12312954253954682, |
|
"grad_norm": 2.283031446044169, |
|
"learning_rate": 1.41e-05, |
|
"loss": 1.1263, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.12654980761008977, |
|
"grad_norm": 2.474396323802316, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.9587, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.12997007268063274, |
|
"grad_norm": 3.3515325695241667, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.8727, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.1333903377511757, |
|
"grad_norm": 2.3607499413214694, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.929, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.1368106028217187, |
|
"grad_norm": 2.4723425187830537, |
|
"learning_rate": 1.41e-05, |
|
"loss": 1.0473, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.14023086789226166, |
|
"grad_norm": 2.183114877668465, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.854, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.14365113296280463, |
|
"grad_norm": 2.3660790712775555, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.8244, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.1470713980333476, |
|
"grad_norm": 1.9551361934244411, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.8276, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.15049166310389056, |
|
"grad_norm": 3.163303383692252, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.8468, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.15391192817443353, |
|
"grad_norm": 3.163426556149129, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.7213, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.1573321932449765, |
|
"grad_norm": 2.7934537634938783, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.7035, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.16075245831551946, |
|
"grad_norm": 1.8747838269439079, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.7371, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.16417272338606242, |
|
"grad_norm": 2.7812346413247484, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.7602, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.1675929884566054, |
|
"grad_norm": 2.378687443172595, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.8561, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.17101325352714836, |
|
"grad_norm": 2.3489013022006056, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.5867, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.17443351859769132, |
|
"grad_norm": 1.7892416152344746, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.4853, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.1778537836682343, |
|
"grad_norm": 2.077063902741533, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.6375, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.18127404873877725, |
|
"grad_norm": 2.2312628788967412, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.6439, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.18469431380932022, |
|
"grad_norm": 2.996527586876452, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.5925, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.1881145788798632, |
|
"grad_norm": 1.8130852934136357, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.5575, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.19153484395040615, |
|
"grad_norm": 1.5587429488330231, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.5954, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.19495510902094912, |
|
"grad_norm": 1.7839250019187394, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.6196, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.19837537409149208, |
|
"grad_norm": 1.6933241820211493, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.4969, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.20179563916203505, |
|
"grad_norm": 1.731887729628178, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.6548, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.20521590423257802, |
|
"grad_norm": 2.3987513930499995, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.4595, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.20863616930312098, |
|
"grad_norm": 2.639090838839165, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.5341, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.21205643437366395, |
|
"grad_norm": 1.7474798765435617, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.6015, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.21547669944420692, |
|
"grad_norm": 3.571763528033457, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.4851, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.21889696451474988, |
|
"grad_norm": 1.9011170860485487, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.4911, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.22231722958529285, |
|
"grad_norm": 2.625704311611419, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.5247, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.22573749465583584, |
|
"grad_norm": 1.8272075489091404, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.4123, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.2291577597263788, |
|
"grad_norm": 2.005801690308512, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.437, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.23257802479692177, |
|
"grad_norm": 2.009191411617363, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.4553, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.23599828986746474, |
|
"grad_norm": 1.9856248091054856, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.4911, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.2394185549380077, |
|
"grad_norm": 1.9582222549850024, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.4345, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.24283882000855067, |
|
"grad_norm": 1.6336088110301379, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.4649, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.24625908507909364, |
|
"grad_norm": 1.946765368736228, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.3968, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.2496793501496366, |
|
"grad_norm": 1.9515164464490413, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.405, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.25309961522017954, |
|
"grad_norm": 1.5507200892542412, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.2883, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.25651988029072254, |
|
"grad_norm": 1.8953946078798778, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.3697, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.2599401453612655, |
|
"grad_norm": 1.609567252811076, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.3912, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.26336041043180847, |
|
"grad_norm": 1.841628814862342, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.3753, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.2667806755023514, |
|
"grad_norm": 1.3866520663429722, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.2978, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.2702009405728944, |
|
"grad_norm": 1.8662166292908873, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.325, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.2736212056434374, |
|
"grad_norm": 1.86990469712952, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.3608, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.27704147071398033, |
|
"grad_norm": 1.7776490861424512, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.3164, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.2804617357845233, |
|
"grad_norm": 1.5257018693275182, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.2736, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.28388200085506626, |
|
"grad_norm": 1.2107786177311144, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.2293, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.28730226592560926, |
|
"grad_norm": 1.7075082762966491, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.3312, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.2907225309961522, |
|
"grad_norm": 1.4271608707593932, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.2731, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.2941427960666952, |
|
"grad_norm": 2.0611639591955466, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.2708, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.2975630611372381, |
|
"grad_norm": 1.2754227728205574, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.2301, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.3009833262077811, |
|
"grad_norm": 1.312428216280633, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.2219, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.30440359127832406, |
|
"grad_norm": 1.36584246397569, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.2761, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.30782385634886705, |
|
"grad_norm": 1.1768002468324985, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.1884, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.31124412141941, |
|
"grad_norm": 1.402779753210287, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.24, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.314664386489953, |
|
"grad_norm": 1.2917943074724656, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.2487, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.3180846515604959, |
|
"grad_norm": 1.4168579440960474, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.2429, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.3215049166310389, |
|
"grad_norm": 1.1040710078361045, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.1676, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.32492518170158186, |
|
"grad_norm": 1.6692824066389471, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.2456, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.32834544677212485, |
|
"grad_norm": 1.4794418835245668, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.2346, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.3317657118426678, |
|
"grad_norm": 1.2426804107195764, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.2286, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.3351859769132108, |
|
"grad_norm": 1.5976518274152767, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.2356, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.3386062419837537, |
|
"grad_norm": 1.3380817380576575, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.2361, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.3420265070542967, |
|
"grad_norm": 1.8499907335313168, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.2144, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.34544677212483965, |
|
"grad_norm": 1.5519650604852626, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.2211, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.34886703719538265, |
|
"grad_norm": 1.4110088914262258, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.2422, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.3522873022659256, |
|
"grad_norm": 1.4052513404508866, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.1939, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.3557075673364686, |
|
"grad_norm": 1.377026410169766, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.193, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.3591278324070115, |
|
"grad_norm": 1.0901968649772857, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.1705, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.3625480974775545, |
|
"grad_norm": 1.2716891700474584, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.1998, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.3659683625480975, |
|
"grad_norm": 1.269092339983086, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.1845, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.36938862761864044, |
|
"grad_norm": 1.0134148691503204, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.1457, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.37280889268918344, |
|
"grad_norm": 1.0973618617348355, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.1829, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.3762291577597264, |
|
"grad_norm": 1.3476960032405298, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.1936, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.37964942283026937, |
|
"grad_norm": 1.1243588113049168, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.1763, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.3830696879008123, |
|
"grad_norm": 1.756776902549336, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.1772, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.3864899529713553, |
|
"grad_norm": 1.0653616123456815, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.1485, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.38991021804189824, |
|
"grad_norm": 1.322506116172751, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.1511, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.39333048311244123, |
|
"grad_norm": 1.134939568700316, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.1595, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.39675074818298417, |
|
"grad_norm": 1.2351108355732012, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.1653, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.40017101325352716, |
|
"grad_norm": 1.291174811726909, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.1414, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.4035912783240701, |
|
"grad_norm": 1.3110441911359814, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.1602, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.4070115433946131, |
|
"grad_norm": 1.0324677945393839, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.1589, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.41043180846515603, |
|
"grad_norm": 1.1528407686685203, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.142, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.413852073535699, |
|
"grad_norm": 1.259359755456077, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.1671, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.41727233860624197, |
|
"grad_norm": 0.9342347863650876, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.123, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.42069260367678496, |
|
"grad_norm": 1.0262297732751853, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.1379, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.4241128687473279, |
|
"grad_norm": 0.8528122922890459, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.1115, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.4275331338178709, |
|
"grad_norm": 1.0773688633850005, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.1368, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.43095339888841383, |
|
"grad_norm": 1.02543648603311, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.1116, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.4343736639589568, |
|
"grad_norm": 0.8824964139082571, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.1149, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.43779392902949976, |
|
"grad_norm": 1.1649358884512249, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.127, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.44121419410004276, |
|
"grad_norm": 1.0614948335318424, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.1248, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.4446344591705857, |
|
"grad_norm": 0.7776250289623791, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.1145, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.4480547242411287, |
|
"grad_norm": 0.8384984456348001, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0921, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.4514749893116717, |
|
"grad_norm": 1.0151085870429353, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.1226, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.4548952543822146, |
|
"grad_norm": 0.7615407647681977, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0951, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.4583155194527576, |
|
"grad_norm": 0.9555465270405811, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.1186, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.46173578452330055, |
|
"grad_norm": 0.9186643738206431, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.1207, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.46515604959384355, |
|
"grad_norm": 0.8860210174403064, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.1195, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.4685763146643865, |
|
"grad_norm": 0.8817179419098865, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0897, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.4719965797349295, |
|
"grad_norm": 0.8289163487116727, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.1003, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.4754168448054724, |
|
"grad_norm": 0.904736501387183, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0924, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.4788371098760154, |
|
"grad_norm": 0.943604807746642, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.1109, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.48225737494655835, |
|
"grad_norm": 0.9402997510757618, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.1145, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.48567764001710134, |
|
"grad_norm": 0.7366551780058475, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0899, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.4890979050876443, |
|
"grad_norm": 0.9208800933128714, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.1167, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.4925181701581873, |
|
"grad_norm": 0.750448679907497, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0809, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.4959384352287302, |
|
"grad_norm": 0.8092636444718945, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0908, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.4993587002992732, |
|
"grad_norm": 0.8625942305572842, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.1031, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.5027789653698161, |
|
"grad_norm": 0.9198105373299891, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.1113, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.5061992304403591, |
|
"grad_norm": 0.7848012242788545, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0865, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.5096194955109021, |
|
"grad_norm": 0.9324979893254519, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0971, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.5130397605814451, |
|
"grad_norm": 0.7143510040718761, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0795, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.516460025651988, |
|
"grad_norm": 1.0676584465757966, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.1135, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.519880290722531, |
|
"grad_norm": 0.6822736052229258, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0733, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.523300555793074, |
|
"grad_norm": 0.8374258007686984, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0907, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.5267208208636169, |
|
"grad_norm": 0.8341896688419776, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0939, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.5301410859341599, |
|
"grad_norm": 0.7611257220630606, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0869, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.5335613510047028, |
|
"grad_norm": 0.740325417856638, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0832, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.5369816160752459, |
|
"grad_norm": 0.7042263268553958, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0716, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.5404018811457888, |
|
"grad_norm": 0.7335797206103793, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0807, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.5438221462163317, |
|
"grad_norm": 0.8019722685081757, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0826, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.5472424112868748, |
|
"grad_norm": 0.6929468279193534, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0821, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.5506626763574177, |
|
"grad_norm": 0.6854252563729888, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0675, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.5540829414279607, |
|
"grad_norm": 0.6741146081895844, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0749, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.5575032064985036, |
|
"grad_norm": 0.6902694268516201, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0707, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.5609234715690466, |
|
"grad_norm": 0.6948144741970704, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0795, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.5643437366395896, |
|
"grad_norm": 0.7169974641783955, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0818, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.5677640017101325, |
|
"grad_norm": 0.6384211122986987, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0735, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.5711842667806755, |
|
"grad_norm": 0.7417356609403256, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0842, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.5746045318512185, |
|
"grad_norm": 0.66647458408933, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0814, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.5780247969217615, |
|
"grad_norm": 0.7378707726234803, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0746, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.5814450619923044, |
|
"grad_norm": 0.6812399388437269, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0828, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.5848653270628473, |
|
"grad_norm": 0.6793042032294047, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0724, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.5882855921333904, |
|
"grad_norm": 0.6062435025746024, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0649, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.5917058572039333, |
|
"grad_norm": 0.6813501327595134, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0711, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.5951261222744763, |
|
"grad_norm": 0.672907340612349, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0769, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.5985463873450192, |
|
"grad_norm": 0.5410302293555107, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0588, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.6019666524155622, |
|
"grad_norm": 0.6636988853462809, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0702, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.6053869174861052, |
|
"grad_norm": 0.5918082791413835, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0626, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.6088071825566481, |
|
"grad_norm": 0.5816411742116699, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.059, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.6122274476271911, |
|
"grad_norm": 0.6819230663398479, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.074, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.6156477126977341, |
|
"grad_norm": 0.6727834641069249, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0739, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.619067977768277, |
|
"grad_norm": 0.5791689945437091, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0553, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.62248824283882, |
|
"grad_norm": 0.6060693467771211, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0572, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.6259085079093629, |
|
"grad_norm": 0.6611378853926027, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0697, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.629328772979906, |
|
"grad_norm": 0.5887572335679387, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0603, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.6327490380504489, |
|
"grad_norm": 0.538930391171475, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0462, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.6361693031209918, |
|
"grad_norm": 0.5617056384496442, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0549, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.6395895681915349, |
|
"grad_norm": 0.5912693387471951, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0574, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.6430098332620778, |
|
"grad_norm": 0.5373216387973052, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0574, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.6464300983326208, |
|
"grad_norm": 0.6151671164129469, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0533, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.6498503634031637, |
|
"grad_norm": 0.5394742707247884, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0575, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.6532706284737068, |
|
"grad_norm": 0.5752514447611141, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0574, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.6566908935442497, |
|
"grad_norm": 0.5136422669182581, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.054, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.6601111586147926, |
|
"grad_norm": 0.6261951776293332, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0612, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.6635314236853356, |
|
"grad_norm": 0.5067466055288193, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.054, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.6669516887558786, |
|
"grad_norm": 0.5892942817895197, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0572, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.6703719538264216, |
|
"grad_norm": 0.5652390088377635, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0655, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.6737922188969645, |
|
"grad_norm": 0.5285822929717092, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0494, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.6772124839675074, |
|
"grad_norm": 0.5246191872665474, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0614, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.6806327490380505, |
|
"grad_norm": 0.5387443642901717, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0531, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.6840530141085934, |
|
"grad_norm": 0.5238421425157995, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0512, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.6874732791791364, |
|
"grad_norm": 0.4882221825014573, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0489, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.6908935442496793, |
|
"grad_norm": 0.5646516308611281, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0512, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.6943138093202224, |
|
"grad_norm": 0.5778234440206057, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0634, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.6977340743907653, |
|
"grad_norm": 0.5293741220834561, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.056, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.7011543394613082, |
|
"grad_norm": 0.527641578215631, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0561, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.7045746045318512, |
|
"grad_norm": 0.4000174523279179, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0401, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.7079948696023942, |
|
"grad_norm": 0.5298298097123045, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0609, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.7114151346729372, |
|
"grad_norm": 0.5349126180633413, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.052, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.7148353997434801, |
|
"grad_norm": 0.5748237411918808, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0651, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.718255664814023, |
|
"grad_norm": 0.4989630175919984, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0502, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.7216759298845661, |
|
"grad_norm": 0.5604803895822472, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.054, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.725096194955109, |
|
"grad_norm": 0.6062121706000653, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0644, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.728516460025652, |
|
"grad_norm": 0.49966896896386376, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0469, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.731936725096195, |
|
"grad_norm": 0.44454274508391683, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0428, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.735356990166738, |
|
"grad_norm": 0.563352438281472, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.055, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.7387772552372809, |
|
"grad_norm": 0.6191641486840816, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.063, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.7421975203078238, |
|
"grad_norm": 0.4827104401750106, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0496, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.7456177853783669, |
|
"grad_norm": 0.4656663916077845, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0528, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.7490380504489098, |
|
"grad_norm": 0.4785719764753492, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0438, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.7524583155194527, |
|
"grad_norm": 0.5448096052982832, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0422, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.7558785805899957, |
|
"grad_norm": 0.5065090698982245, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0489, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.7592988456605387, |
|
"grad_norm": 0.5059637979656728, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0485, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.7627191107310817, |
|
"grad_norm": 0.49583806553345944, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0525, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.7661393758016246, |
|
"grad_norm": 0.4012288566084756, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0384, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.7695596408721675, |
|
"grad_norm": 0.49449469823574593, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.05, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.7729799059427106, |
|
"grad_norm": 0.45672465215152086, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0479, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.7764001710132535, |
|
"grad_norm": 0.4909318818146214, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0499, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.7798204360837965, |
|
"grad_norm": 0.40392311014383353, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0406, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.7832407011543394, |
|
"grad_norm": 0.4131849222227493, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0388, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.7866609662248825, |
|
"grad_norm": 0.5611423817088044, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0594, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.7900812312954254, |
|
"grad_norm": 0.47364645404517464, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0522, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.7935014963659683, |
|
"grad_norm": 0.4545208834696141, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.048, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.7969217614365113, |
|
"grad_norm": 0.4113439560012879, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0393, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.8003420265070543, |
|
"grad_norm": 0.42334212760511825, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0404, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.8037622915775973, |
|
"grad_norm": 0.4768224768125407, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0479, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.8071825566481402, |
|
"grad_norm": 0.43678389875922824, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0442, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.8106028217186833, |
|
"grad_norm": 0.43277663891476426, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0409, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.8140230867892262, |
|
"grad_norm": 0.4443462520817696, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0465, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.8174433518597691, |
|
"grad_norm": 0.43839684525150946, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0415, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.8208636169303121, |
|
"grad_norm": 0.43297499486580887, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0445, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.8242838820008551, |
|
"grad_norm": 0.38283138235459124, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0404, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.827704147071398, |
|
"grad_norm": 0.40333504070789256, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0373, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.831124412141941, |
|
"grad_norm": 0.40950403615291003, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0423, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.8345446772124839, |
|
"grad_norm": 0.4314317792707956, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0424, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.837964942283027, |
|
"grad_norm": 0.4189847423801003, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0434, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.8413852073535699, |
|
"grad_norm": 0.4593355477826361, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0456, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.8448054724241129, |
|
"grad_norm": 0.43148788855113257, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0429, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.8482257374946558, |
|
"grad_norm": 0.41015663281431336, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0411, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.8516460025651988, |
|
"grad_norm": 0.39479744422344626, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0408, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.8550662676357418, |
|
"grad_norm": 0.49034951176740266, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.056, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.8584865327062847, |
|
"grad_norm": 0.4241786968876316, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0414, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.8619067977768277, |
|
"grad_norm": 0.40856481199229994, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0384, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.8653270628473707, |
|
"grad_norm": 0.44863803613683295, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0454, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.8687473279179136, |
|
"grad_norm": 0.4315245075802853, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0425, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.8721675929884566, |
|
"grad_norm": 0.4300931739828693, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0386, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.8755878580589995, |
|
"grad_norm": 0.41516523943726863, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.044, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.8790081231295426, |
|
"grad_norm": 0.41978138385172276, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0376, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.8824283882000855, |
|
"grad_norm": 0.38286432398443565, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0357, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.8858486532706284, |
|
"grad_norm": 1.0190380922512265, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0413, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.8892689183411714, |
|
"grad_norm": 0.46857802366186146, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0468, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.8926891834117144, |
|
"grad_norm": 0.38236119116632356, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0383, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.8961094484822574, |
|
"grad_norm": 0.4551658885616909, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0453, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.8995297135528003, |
|
"grad_norm": 0.4286695716016865, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0408, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.9029499786233434, |
|
"grad_norm": 0.43951997404486826, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0395, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.9063702436938863, |
|
"grad_norm": 0.4395704115846384, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0419, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.9097905087644292, |
|
"grad_norm": 0.3839941648623069, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0374, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.9132107738349722, |
|
"grad_norm": 0.922460042226267, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0372, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.9166310389055152, |
|
"grad_norm": 0.44811463225682846, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.043, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.9200513039760582, |
|
"grad_norm": 0.4172986063615246, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0365, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.9234715690466011, |
|
"grad_norm": 0.37219225103541853, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0377, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.926891834117144, |
|
"grad_norm": 0.3447899677572012, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0317, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.9303120991876871, |
|
"grad_norm": 0.3964175357323583, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.034, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.93373236425823, |
|
"grad_norm": 0.36028858741706155, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0351, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.937152629328773, |
|
"grad_norm": 0.46256283545817356, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.041, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.9405728943993159, |
|
"grad_norm": 0.3781578237183448, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0366, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.943993159469859, |
|
"grad_norm": 0.3818392999731623, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0321, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.9474134245404019, |
|
"grad_norm": 0.38568204387768645, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0336, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.9508336896109448, |
|
"grad_norm": 0.3905057560704898, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0349, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.9542539546814878, |
|
"grad_norm": 0.36210340527903007, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0351, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.9576742197520308, |
|
"grad_norm": 0.3784578196301756, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0382, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.9610944848225738, |
|
"grad_norm": 0.4415630974468222, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0417, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.9645147498931167, |
|
"grad_norm": 0.36220744727441767, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0336, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.9679350149636596, |
|
"grad_norm": 0.4266084587936558, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0384, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.9713552800342027, |
|
"grad_norm": 0.3599843427563046, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.035, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.9747755451047456, |
|
"grad_norm": 0.38722401771389997, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0348, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.9781958101752886, |
|
"grad_norm": 0.40946001056055625, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0373, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.9816160752458315, |
|
"grad_norm": 0.41550323303820474, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.04, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.9850363403163745, |
|
"grad_norm": 0.37048272944847027, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0309, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.9884566053869175, |
|
"grad_norm": 0.3620011070515116, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0341, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.9918768704574604, |
|
"grad_norm": 0.4132102658215653, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0383, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.9952971355280035, |
|
"grad_norm": 0.3852351778306183, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0357, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.9987174005985464, |
|
"grad_norm": 0.37414254856973184, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0348, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 1.0021376656690895, |
|
"grad_norm": 0.31841258787737364, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0308, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 1.0055579307396323, |
|
"grad_norm": 0.30923263652050564, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0263, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 1.0089781958101753, |
|
"grad_norm": 0.3118052788978316, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0243, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.0123984608807182, |
|
"grad_norm": 0.2908942931886208, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0239, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 1.0158187259512612, |
|
"grad_norm": 0.2904620406307295, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0222, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 1.0192389910218043, |
|
"grad_norm": 0.28692807204183246, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0225, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 1.022659256092347, |
|
"grad_norm": 0.32077856448530445, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.025, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 1.0260795211628901, |
|
"grad_norm": 0.28877834524497115, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0214, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.0294997862334332, |
|
"grad_norm": 0.27974144729840145, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0226, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 1.032920051303976, |
|
"grad_norm": 0.2976755981973845, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.025, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 1.036340316374519, |
|
"grad_norm": 0.2909288296995041, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0241, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 1.039760581445062, |
|
"grad_norm": 0.26580362507255084, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0227, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 1.043180846515605, |
|
"grad_norm": 0.3375634201266733, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0261, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.046601111586148, |
|
"grad_norm": 0.2797681439324777, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.023, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 1.0500213766566908, |
|
"grad_norm": 0.3197672217160271, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0258, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 1.0534416417272339, |
|
"grad_norm": 0.2834421342617412, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0231, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 1.056861906797777, |
|
"grad_norm": 0.30638464271397325, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0237, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 1.0602821718683197, |
|
"grad_norm": 0.2772969145815675, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0227, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.0637024369388628, |
|
"grad_norm": 0.27940000335528603, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0232, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 1.0671227020094056, |
|
"grad_norm": 0.2793001905583242, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.024, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 1.0705429670799487, |
|
"grad_norm": 0.2615333153159237, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0219, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 1.0739632321504917, |
|
"grad_norm": 0.2876480036224866, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0253, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 1.0773834972210345, |
|
"grad_norm": 0.30478384773242695, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0252, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.0808037622915776, |
|
"grad_norm": 0.2528846190302502, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0206, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 1.0842240273621206, |
|
"grad_norm": 0.2435449168695854, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0224, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 1.0876442924326635, |
|
"grad_norm": 0.3079236522189953, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0253, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 1.0910645575032065, |
|
"grad_norm": 0.2675432502865711, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0239, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 1.0944848225737496, |
|
"grad_norm": 0.3004587131742902, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0239, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.0979050876442924, |
|
"grad_norm": 0.29332378399603554, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0247, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 1.1013253527148354, |
|
"grad_norm": 0.23762802516720216, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0242, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 1.1047456177853783, |
|
"grad_norm": 0.26342688417785715, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0244, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 1.1081658828559213, |
|
"grad_norm": 0.3055498089807232, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0265, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 1.1115861479264644, |
|
"grad_norm": 0.23787182811525248, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0226, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.1150064129970072, |
|
"grad_norm": 0.2647940637873088, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0242, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 1.1184266780675503, |
|
"grad_norm": 0.2913294201237873, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0261, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 1.1218469431380933, |
|
"grad_norm": 0.2581915579915153, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0228, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 1.1252672082086361, |
|
"grad_norm": 0.2500171384316944, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0206, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 1.1286874732791792, |
|
"grad_norm": 0.2689440388389032, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0237, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.1321077383497222, |
|
"grad_norm": 0.2551497009606492, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0216, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 1.135528003420265, |
|
"grad_norm": 0.25757413049163996, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0224, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 1.138948268490808, |
|
"grad_norm": 0.2699506485343775, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0218, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 1.142368533561351, |
|
"grad_norm": 0.25767905743807257, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0227, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 1.145788798631894, |
|
"grad_norm": 0.2273460686067317, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0226, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.149209063702437, |
|
"grad_norm": 0.31200680170359746, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0196, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 1.1526293287729799, |
|
"grad_norm": 0.23994204361556526, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.022, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 1.156049593843523, |
|
"grad_norm": 0.2867059420441838, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0256, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 1.1594698589140657, |
|
"grad_norm": 0.2523475692407159, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0212, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 1.1628901239846088, |
|
"grad_norm": 0.24499307458764866, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0198, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.1663103890551518, |
|
"grad_norm": 0.2436283905820348, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0216, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 1.1697306541256947, |
|
"grad_norm": 0.25602523324809817, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0216, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 1.1731509191962377, |
|
"grad_norm": 0.25021177697750296, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0233, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 1.1765711842667808, |
|
"grad_norm": 0.28379855080073285, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0259, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 1.1799914493373236, |
|
"grad_norm": 0.25642024168179856, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0208, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.1834117144078666, |
|
"grad_norm": 0.24053576849839317, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0206, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 1.1868319794784097, |
|
"grad_norm": 0.26458302891901536, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.025, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 1.1902522445489525, |
|
"grad_norm": 0.25340954518832737, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0232, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 1.1936725096194956, |
|
"grad_norm": 0.2822910057628071, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0258, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 1.1970927746900384, |
|
"grad_norm": 0.24401825117116366, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0207, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.2005130397605814, |
|
"grad_norm": 0.25351547147013626, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0232, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 1.2039333048311245, |
|
"grad_norm": 0.2514277079230116, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0236, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 1.2073535699016673, |
|
"grad_norm": 0.2499624840027022, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0218, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 1.2107738349722104, |
|
"grad_norm": 0.25541034452698597, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0226, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 1.2141941000427534, |
|
"grad_norm": 0.26116617349172355, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0233, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.2176143651132962, |
|
"grad_norm": 0.3144220810007225, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0238, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 1.2210346301838393, |
|
"grad_norm": 0.2397648420075889, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0218, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 1.2244548952543823, |
|
"grad_norm": 0.23056631938102404, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0211, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 1.2278751603249252, |
|
"grad_norm": 0.6359133071495683, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0221, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 1.2312954253954682, |
|
"grad_norm": 0.2494306454740628, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0212, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.234715690466011, |
|
"grad_norm": 0.2661512294789303, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0254, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 1.238135955536554, |
|
"grad_norm": 0.7710920857502603, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.025, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 1.2415562206070971, |
|
"grad_norm": 0.3942637887185519, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0236, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 1.24497648567764, |
|
"grad_norm": 0.27087010860427124, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0252, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 1.248396750748183, |
|
"grad_norm": 0.24919842057781497, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.022, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.2518170158187258, |
|
"grad_norm": 0.247824008548709, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0235, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 1.255237280889269, |
|
"grad_norm": 0.2113113650016047, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0202, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 1.258657545959812, |
|
"grad_norm": 0.2259169472785117, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0192, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 1.262077811030355, |
|
"grad_norm": 0.2752050707801685, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0254, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 1.2654980761008978, |
|
"grad_norm": 0.23830392409535447, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0229, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.2689183411714409, |
|
"grad_norm": 0.24302497993054667, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0219, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 1.2723386062419837, |
|
"grad_norm": 0.24504347526605771, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0224, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 1.2757588713125267, |
|
"grad_norm": 0.2346626865644856, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0209, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 1.2791791363830698, |
|
"grad_norm": 0.24245589828220748, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0218, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 1.2825994014536126, |
|
"grad_norm": 0.282140360657227, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0234, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.2860196665241557, |
|
"grad_norm": 0.26692130820889887, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.02, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 1.2894399315946985, |
|
"grad_norm": 0.2691231899902035, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0221, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 1.2928601966652415, |
|
"grad_norm": 0.2281687474950334, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0222, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 1.2962804617357846, |
|
"grad_norm": 0.373488505328009, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0217, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 1.2997007268063274, |
|
"grad_norm": 0.2345889821941022, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.021, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.3031209918768705, |
|
"grad_norm": 0.2105687923684266, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0204, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 1.3065412569474133, |
|
"grad_norm": 0.23464935916200427, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0213, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 1.3099615220179563, |
|
"grad_norm": 0.23646699592209017, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0211, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 1.3133817870884994, |
|
"grad_norm": 0.2631733185003215, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0219, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 1.3168020521590424, |
|
"grad_norm": 0.25931322588284866, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0229, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.3202223172295853, |
|
"grad_norm": 0.28688530125524797, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0226, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 1.3236425823001283, |
|
"grad_norm": 0.25729658498109925, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0229, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 1.3270628473706712, |
|
"grad_norm": 0.25760420815172724, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0207, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 1.3304831124412142, |
|
"grad_norm": 0.32657940169239236, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0217, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 1.3339033775117572, |
|
"grad_norm": 0.22557643602886415, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0204, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.3373236425823, |
|
"grad_norm": 0.2661843657709089, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0243, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 1.3407439076528431, |
|
"grad_norm": 0.2974504589558172, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0241, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 1.344164172723386, |
|
"grad_norm": 0.26843823846639436, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0234, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 1.347584437793929, |
|
"grad_norm": 0.3218845205293079, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0221, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 1.351004702864472, |
|
"grad_norm": 0.2428094674581459, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0222, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.354424967935015, |
|
"grad_norm": 0.2455927243682846, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0211, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.357845233005558, |
|
"grad_norm": 0.2733123552071639, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0212, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 1.361265498076101, |
|
"grad_norm": 0.2659612332359629, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0203, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 1.3646857631466438, |
|
"grad_norm": 0.25254417828478626, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0201, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 1.3681060282171869, |
|
"grad_norm": 0.2598602588125554, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0217, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.37152629328773, |
|
"grad_norm": 0.25849620548333013, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0209, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 1.3749465583582727, |
|
"grad_norm": 0.3137023430125288, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0211, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 1.3783668234288158, |
|
"grad_norm": 0.2113162287336589, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0195, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 1.3817870884993586, |
|
"grad_norm": 0.2503171178420045, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0234, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 1.3852073535699017, |
|
"grad_norm": 0.23119898543579737, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0209, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.3886276186404447, |
|
"grad_norm": 0.26039590115583117, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0244, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 1.3920478837109875, |
|
"grad_norm": 0.23886269607593336, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0223, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 1.3954681487815306, |
|
"grad_norm": 0.2714710828662534, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0247, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 1.3988884138520734, |
|
"grad_norm": 0.27319555229782644, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0244, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 1.4023086789226165, |
|
"grad_norm": 0.24019773624911636, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0218, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.4057289439931595, |
|
"grad_norm": 0.2708671308101268, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0219, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 1.4091492090637026, |
|
"grad_norm": 0.22702757974948617, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0198, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 1.4125694741342454, |
|
"grad_norm": 0.25555770375998416, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0224, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 1.4159897392047884, |
|
"grad_norm": 0.23780595691689027, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0213, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 1.4194100042753313, |
|
"grad_norm": 0.26451555701259444, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0211, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.4228302693458743, |
|
"grad_norm": 0.24436230077463678, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0226, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 1.4262505344164174, |
|
"grad_norm": 0.2216762882841438, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.021, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 1.4296707994869602, |
|
"grad_norm": 0.26405628674467824, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0219, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 1.4330910645575032, |
|
"grad_norm": 0.23921814427898216, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.021, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 1.436511329628046, |
|
"grad_norm": 0.265453432302215, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0226, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.4399315946985891, |
|
"grad_norm": 0.24492268359315303, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0227, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 1.4433518597691322, |
|
"grad_norm": 0.25191935351384814, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0215, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 1.4467721248396752, |
|
"grad_norm": 0.27197993846345997, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0229, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 1.450192389910218, |
|
"grad_norm": 0.2075718815542033, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0183, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 1.453612654980761, |
|
"grad_norm": 0.26376818444878, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0221, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.457032920051304, |
|
"grad_norm": 0.26080564628665626, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0229, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 1.460453185121847, |
|
"grad_norm": 0.2434588148667514, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.022, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 1.46387345019239, |
|
"grad_norm": 0.20943782903685337, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0185, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 1.4672937152629328, |
|
"grad_norm": 0.24301387125104526, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0208, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 1.470713980333476, |
|
"grad_norm": 0.25766203422027834, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0237, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.4741342454040187, |
|
"grad_norm": 0.21556702000374744, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0213, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 1.4775545104745618, |
|
"grad_norm": 0.23784909822124217, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0207, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 1.4809747755451048, |
|
"grad_norm": 0.22414128247781562, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0198, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 1.4843950406156476, |
|
"grad_norm": 0.23386987857579358, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0201, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 1.4878153056861907, |
|
"grad_norm": 0.2853961947266083, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.023, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.4912355707567335, |
|
"grad_norm": 0.24974092416116495, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0209, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 1.4946558358272766, |
|
"grad_norm": 0.23615654630506394, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.022, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 1.4980761008978196, |
|
"grad_norm": 0.2284456001474283, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0195, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 1.5014963659683627, |
|
"grad_norm": 0.26063959990204033, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0258, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 1.5049166310389055, |
|
"grad_norm": 0.24423575575966894, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0215, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.5083368961094483, |
|
"grad_norm": 0.24270403980054853, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0198, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 1.5117571611799914, |
|
"grad_norm": 0.24660594248828135, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0208, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 1.5151774262505344, |
|
"grad_norm": 0.23764056229245814, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0185, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 1.5185976913210775, |
|
"grad_norm": 0.2327212499624357, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0205, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 1.5220179563916203, |
|
"grad_norm": 0.2644477025039793, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0222, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.5254382214621633, |
|
"grad_norm": 0.2385202884230959, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.023, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 1.5288584865327062, |
|
"grad_norm": 0.24308327876991292, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0198, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 1.5322787516032492, |
|
"grad_norm": 0.30634205660699, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.026, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 1.5356990166737923, |
|
"grad_norm": 0.21217473130594544, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0199, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 1.5391192817443353, |
|
"grad_norm": 0.2658497297922587, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0221, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.5425395468148781, |
|
"grad_norm": 0.21481338352800414, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0197, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 1.545959811885421, |
|
"grad_norm": 0.27821761611964035, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0246, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 1.549380076955964, |
|
"grad_norm": 0.2416104328737188, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0216, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 1.552800342026507, |
|
"grad_norm": 0.2322000952546936, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0219, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 1.5562206070970501, |
|
"grad_norm": 0.23752597721176905, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0218, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.559640872167593, |
|
"grad_norm": 0.24308738119455534, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0205, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 1.563061137238136, |
|
"grad_norm": 0.26073824102297316, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0247, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 1.5664814023086788, |
|
"grad_norm": 0.24050011455971732, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0223, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 1.5699016673792219, |
|
"grad_norm": 0.21610207139710053, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0192, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 1.573321932449765, |
|
"grad_norm": 0.25801793315676924, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0236, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.576742197520308, |
|
"grad_norm": 0.21228131497792052, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0191, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 1.5801624625908508, |
|
"grad_norm": 0.25197778439538576, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.022, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 1.5835827276613936, |
|
"grad_norm": 0.2565258961786049, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0219, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 1.5870029927319367, |
|
"grad_norm": 0.2559923791329211, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0217, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 1.5904232578024797, |
|
"grad_norm": 0.2535527705987336, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0221, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.5938435228730228, |
|
"grad_norm": 0.28495146888910583, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0272, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 1.5972637879435656, |
|
"grad_norm": 0.26151357261060504, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.022, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 1.6006840530141084, |
|
"grad_norm": 0.24889528453574525, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0218, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 1.6041043180846515, |
|
"grad_norm": 0.21219419933729458, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0176, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 1.6075245831551945, |
|
"grad_norm": 0.24995940249087767, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0207, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.6109448482257376, |
|
"grad_norm": 0.23890689663178338, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0204, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 1.6143651132962804, |
|
"grad_norm": 0.23090495982296502, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0209, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 1.6177853783668235, |
|
"grad_norm": 0.24984846656221682, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0191, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 1.6212056434373663, |
|
"grad_norm": 0.2864149291763989, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0243, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 1.6246259085079093, |
|
"grad_norm": 0.23371681508272213, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0197, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.6280461735784524, |
|
"grad_norm": 0.21807560914421245, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.019, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 1.6314664386489954, |
|
"grad_norm": 0.24325192491721911, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0212, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 1.6348867037195383, |
|
"grad_norm": 0.21802834032775958, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0183, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 1.638306968790081, |
|
"grad_norm": 0.25015961868430003, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0206, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 1.6417272338606241, |
|
"grad_norm": 0.2047993695964016, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0166, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.6451474989311672, |
|
"grad_norm": 0.24407449809464615, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0209, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 1.6485677640017102, |
|
"grad_norm": 0.22905836500660803, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0197, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 1.651988029072253, |
|
"grad_norm": 0.2302659533712275, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0184, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 1.655408294142796, |
|
"grad_norm": 0.22469778783055896, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.022, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 1.658828559213339, |
|
"grad_norm": 0.2403880470710616, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0184, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.662248824283882, |
|
"grad_norm": 0.21080166415704205, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0194, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 1.665669089354425, |
|
"grad_norm": 0.24366180394977457, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0212, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 1.669089354424968, |
|
"grad_norm": 0.2445725370169137, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0201, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 1.672509619495511, |
|
"grad_norm": 0.2400419596996237, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0217, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 1.6759298845660537, |
|
"grad_norm": 0.22845732992118456, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0193, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.6793501496365968, |
|
"grad_norm": 0.21963101467880122, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0205, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 1.6827704147071398, |
|
"grad_norm": 0.23832272298065416, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0208, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 1.6861906797776829, |
|
"grad_norm": 0.2701770424720032, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0245, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 1.6896109448482257, |
|
"grad_norm": 0.21999352850800136, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0191, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 1.6930312099187685, |
|
"grad_norm": 0.2303202011396172, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0206, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.6964514749893116, |
|
"grad_norm": 0.23207638496444852, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0196, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 1.6998717400598546, |
|
"grad_norm": 0.27476502715987833, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0243, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 1.7032920051303977, |
|
"grad_norm": 0.25943127784425596, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0209, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 1.7067122702009405, |
|
"grad_norm": 0.2582673258172837, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0213, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 1.7101325352714836, |
|
"grad_norm": 0.2270336558731971, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0207, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.7135528003420264, |
|
"grad_norm": 0.21716332073489333, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.017, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 1.7169730654125694, |
|
"grad_norm": 0.22361076006822656, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0198, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 1.7203933304831125, |
|
"grad_norm": 0.25453906115757163, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0215, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 1.7238135955536555, |
|
"grad_norm": 0.2639274732447762, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0211, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 1.7272338606241984, |
|
"grad_norm": 0.25322752853555974, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0228, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.7306541256947412, |
|
"grad_norm": 0.23285923741403275, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0205, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 1.7340743907652842, |
|
"grad_norm": 0.2323064412599781, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0195, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 1.7374946558358273, |
|
"grad_norm": 0.23653121619208753, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0211, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 1.7409149209063703, |
|
"grad_norm": 0.2576532470063006, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0216, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 1.7443351859769132, |
|
"grad_norm": 0.3031982152602418, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0212, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.7477554510474562, |
|
"grad_norm": 0.21353603367638405, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0187, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 1.751175716117999, |
|
"grad_norm": 0.23897983103928835, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0211, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 1.754595981188542, |
|
"grad_norm": 0.26759279857241974, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0225, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 1.7580162462590851, |
|
"grad_norm": 0.21204297476027956, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0182, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 1.7614365113296282, |
|
"grad_norm": 0.2522156014013123, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0224, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.764856776400171, |
|
"grad_norm": 0.2483681502861828, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0225, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 1.7682770414707139, |
|
"grad_norm": 0.2757743080951287, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0252, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 1.771697306541257, |
|
"grad_norm": 0.20783407363518988, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0189, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 1.7751175716118, |
|
"grad_norm": 0.25743244499425244, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.022, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 1.778537836682343, |
|
"grad_norm": 0.23017428673046214, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0195, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.7819581017528858, |
|
"grad_norm": 0.2315895066049246, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0178, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 1.7853783668234287, |
|
"grad_norm": 0.22782043412403868, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0215, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 1.7887986318939717, |
|
"grad_norm": 0.2724892881429743, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0224, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 1.7922188969645148, |
|
"grad_norm": 0.28330092514107247, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.02, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 1.7956391620350578, |
|
"grad_norm": 0.23561517254595046, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0199, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.7990594271056009, |
|
"grad_norm": 0.27080028676550905, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0213, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.8024796921761437, |
|
"grad_norm": 0.20908394348220563, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0181, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.8058999572466865, |
|
"grad_norm": 0.24764828335378633, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0208, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.8093202223172296, |
|
"grad_norm": 0.29733366903532993, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0218, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 1.8127404873877726, |
|
"grad_norm": 0.2708827452564214, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0226, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.8161607524583157, |
|
"grad_norm": 0.2887461777468973, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0226, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 1.8195810175288585, |
|
"grad_norm": 0.2774042332926017, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0246, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.8230012825994013, |
|
"grad_norm": 0.23877829686783267, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0202, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 1.8264215476699444, |
|
"grad_norm": 0.2544233995482896, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0204, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.8298418127404874, |
|
"grad_norm": 0.26365092898440745, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0223, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.8332620778110305, |
|
"grad_norm": 0.21178009134118125, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.018, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.8366823428815733, |
|
"grad_norm": 0.27115664081831004, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0211, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 1.8401026079521163, |
|
"grad_norm": 0.2653048117342068, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0212, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.8435228730226592, |
|
"grad_norm": 0.2757728122775812, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0204, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 1.8469431380932022, |
|
"grad_norm": 0.2664986190346045, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0207, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.8503634031637453, |
|
"grad_norm": 0.265518302380727, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0179, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 1.8537836682342883, |
|
"grad_norm": 0.2895623799588168, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0228, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.8572039333048311, |
|
"grad_norm": 0.23284728474850006, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0192, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.860624198375374, |
|
"grad_norm": 0.21864913411739378, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0178, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.864044463445917, |
|
"grad_norm": 0.25369925309969443, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0204, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.86746472851646, |
|
"grad_norm": 0.22162789215208448, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0214, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.870884993587003, |
|
"grad_norm": 0.228575157771765, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0215, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 1.874305258657546, |
|
"grad_norm": 0.29177202889082, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0231, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.8777255237280888, |
|
"grad_norm": 0.2526426700464145, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.022, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 1.8811457887986318, |
|
"grad_norm": 0.25824463173696005, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0238, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.8845660538691749, |
|
"grad_norm": 0.22931141277969364, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0195, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 1.887986318939718, |
|
"grad_norm": 0.20128540781283585, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.018, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.891406584010261, |
|
"grad_norm": 0.2726453682164885, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0215, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 1.8948268490808038, |
|
"grad_norm": 0.2379175413754306, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.02, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.8982471141513466, |
|
"grad_norm": 0.2657340317097815, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0215, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.9016673792218897, |
|
"grad_norm": 0.2469655254797632, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0218, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.9050876442924327, |
|
"grad_norm": 0.2306898046638619, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0206, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 1.9085079093629758, |
|
"grad_norm": 0.2501966986368517, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0208, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.9119281744335186, |
|
"grad_norm": 0.26337448659750273, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0224, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 1.9153484395040614, |
|
"grad_norm": 0.23788126235745702, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0207, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.9187687045746045, |
|
"grad_norm": 0.24400460810027635, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0199, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.9221889696451475, |
|
"grad_norm": 0.2524032823359455, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.02, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.9256092347156906, |
|
"grad_norm": 0.2288025108079512, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0201, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.9290294997862334, |
|
"grad_norm": 0.269452001004884, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0214, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.9324497648567764, |
|
"grad_norm": 0.2316242696085615, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0196, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.9358700299273193, |
|
"grad_norm": 0.221180291707861, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0177, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.9392902949978623, |
|
"grad_norm": 0.2607363855888593, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0192, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.9427105600684054, |
|
"grad_norm": 0.24514765767262625, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0182, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.9461308251389484, |
|
"grad_norm": 0.28916534222959817, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0207, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.9495510902094912, |
|
"grad_norm": 0.26414654167917073, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0204, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.952971355280034, |
|
"grad_norm": 0.2482098982197082, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0206, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.9563916203505771, |
|
"grad_norm": 0.2608562249261069, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0208, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.9598118854211202, |
|
"grad_norm": 0.25525358687825855, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0213, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.9632321504916632, |
|
"grad_norm": 0.23125518036524792, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0212, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.966652415562206, |
|
"grad_norm": 0.2432676298521834, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0187, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.9700726806327489, |
|
"grad_norm": 0.24502134522738972, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0194, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.973492945703292, |
|
"grad_norm": 0.26553777081307495, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0212, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.976913210773835, |
|
"grad_norm": 0.24919535132544937, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0208, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.980333475844378, |
|
"grad_norm": 0.2828767110434045, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0238, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.983753740914921, |
|
"grad_norm": 0.26780635861278174, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0225, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.987174005985464, |
|
"grad_norm": 0.26755832407143076, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0197, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.9905942710560067, |
|
"grad_norm": 0.23150201894075045, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0225, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.9940145361265498, |
|
"grad_norm": 0.2556938828383787, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0217, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.9974348011970928, |
|
"grad_norm": 0.24779362245147543, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0209, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 2.000855066267636, |
|
"grad_norm": 0.23220294742842815, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0205, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 2.004275331338179, |
|
"grad_norm": 0.19611669457431613, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0136, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 2.0076955964087215, |
|
"grad_norm": 0.18201675420540842, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0146, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 2.0111158614792646, |
|
"grad_norm": 0.2111608412027396, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0165, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 2.0145361265498076, |
|
"grad_norm": 0.19725473414903413, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.015, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 2.0179563916203507, |
|
"grad_norm": 0.21354984112177863, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0161, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.0213766566908937, |
|
"grad_norm": 0.22940862450456587, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0157, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 2.0247969217614363, |
|
"grad_norm": 0.18767782352642373, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0152, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 2.0282171868319794, |
|
"grad_norm": 0.20643159409018694, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0148, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 2.0316374519025224, |
|
"grad_norm": 0.2037046870595368, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0138, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 2.0350577169730655, |
|
"grad_norm": 0.19705696567608205, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0162, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 2.0384779820436085, |
|
"grad_norm": 0.21000255959562664, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0156, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 2.041898247114151, |
|
"grad_norm": 0.1909421240987611, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0136, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 2.045318512184694, |
|
"grad_norm": 0.22254593762119865, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0155, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 2.0487387772552372, |
|
"grad_norm": 0.18039596470753322, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0147, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 2.0521590423257803, |
|
"grad_norm": 0.1836910759989102, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0139, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.0555793073963233, |
|
"grad_norm": 0.2007119695404956, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.015, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 2.0589995724668664, |
|
"grad_norm": 0.17881220811912174, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.016, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 2.062419837537409, |
|
"grad_norm": 0.18131752461639056, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0152, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 2.065840102607952, |
|
"grad_norm": 0.2091101968035948, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0147, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 2.069260367678495, |
|
"grad_norm": 0.1785580403127973, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0142, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 2.072680632749038, |
|
"grad_norm": 0.1952402053161801, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0143, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 2.076100897819581, |
|
"grad_norm": 0.16849209697986273, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0135, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 2.079521162890124, |
|
"grad_norm": 0.18960749528358953, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0158, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 2.082941427960667, |
|
"grad_norm": 0.1822523628091137, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0157, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 2.08636169303121, |
|
"grad_norm": 0.22244344533378702, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0145, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.089781958101753, |
|
"grad_norm": 0.16677137506686548, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0144, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 2.093202223172296, |
|
"grad_norm": 0.19008011213315038, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0149, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 2.096622488242839, |
|
"grad_norm": 0.1965146825157404, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0158, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 2.1000427533133816, |
|
"grad_norm": 0.17095730271612053, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0148, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 2.1034630183839247, |
|
"grad_norm": 0.18376098918489842, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0133, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 2.1068832834544677, |
|
"grad_norm": 0.18859892344363186, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0155, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 2.110303548525011, |
|
"grad_norm": 0.17678403587042746, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.013, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 2.113723813595554, |
|
"grad_norm": 0.18933984169673357, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0152, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 2.1171440786660964, |
|
"grad_norm": 0.20060664740874196, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0157, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 2.1205643437366395, |
|
"grad_norm": 0.1803038264349401, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0141, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.1239846088071825, |
|
"grad_norm": 0.1910210164605926, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.015, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 2.1274048738777256, |
|
"grad_norm": 0.1794365441256405, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0137, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 2.1308251389482686, |
|
"grad_norm": 0.15893205685478506, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0136, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 2.1342454040188112, |
|
"grad_norm": 0.19724991519919266, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0143, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 2.1376656690893543, |
|
"grad_norm": 0.17458605756275755, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.016, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 2.1410859341598973, |
|
"grad_norm": 0.172521593573168, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0141, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 2.1445061992304404, |
|
"grad_norm": 0.14557636073307398, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0135, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 2.1479264643009834, |
|
"grad_norm": 0.22843890765431576, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.016, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 2.1513467293715265, |
|
"grad_norm": 0.18196169286713992, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0146, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 2.154766994442069, |
|
"grad_norm": 0.17272140560874683, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0135, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.158187259512612, |
|
"grad_norm": 0.15595539359263064, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.014, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 2.161607524583155, |
|
"grad_norm": 0.16465865001486402, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0136, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 2.1650277896536982, |
|
"grad_norm": 0.17986632980817158, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.015, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 2.1684480547242413, |
|
"grad_norm": 0.18929877257592365, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0163, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 2.171868319794784, |
|
"grad_norm": 0.19542505698465773, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0159, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 2.175288584865327, |
|
"grad_norm": 0.17114565781513597, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0145, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 2.17870884993587, |
|
"grad_norm": 0.18331913158118285, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0146, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 2.182129115006413, |
|
"grad_norm": 0.16055196305467795, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.015, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 2.185549380076956, |
|
"grad_norm": 0.15985440322290387, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0141, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 2.188969645147499, |
|
"grad_norm": 0.16417247457666104, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0145, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.1923899102180417, |
|
"grad_norm": 0.16210688538374043, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0141, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 2.195810175288585, |
|
"grad_norm": 0.16634144533077172, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.014, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 2.199230440359128, |
|
"grad_norm": 0.18582606043218244, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0156, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 2.202650705429671, |
|
"grad_norm": 0.1747184961523454, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0164, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 2.206070970500214, |
|
"grad_norm": 0.15610336933105856, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.013, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 2.2094912355707566, |
|
"grad_norm": 0.16920938302300378, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0139, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 2.2129115006412996, |
|
"grad_norm": 0.17648344233280136, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0147, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 2.2163317657118426, |
|
"grad_norm": 0.17020885614859071, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0155, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 2.2197520307823857, |
|
"grad_norm": 0.17716196526554184, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0142, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 2.2231722958529287, |
|
"grad_norm": 0.17375550168851311, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0144, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.2265925609234714, |
|
"grad_norm": 0.15660012931864775, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0138, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 2.2300128259940144, |
|
"grad_norm": 0.192388171113886, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0151, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 2.2334330910645575, |
|
"grad_norm": 0.17112223174304642, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0137, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 2.2368533561351005, |
|
"grad_norm": 0.17046414663473125, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0163, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 2.2402736212056436, |
|
"grad_norm": 0.14723103129157328, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0141, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 2.2436938862761866, |
|
"grad_norm": 0.16260020026720062, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0134, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 2.247114151346729, |
|
"grad_norm": 0.1605529186115587, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0148, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 2.2505344164172723, |
|
"grad_norm": 0.1649326783596916, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0146, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 2.2539546814878153, |
|
"grad_norm": 0.1664480794095166, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0144, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 2.2573749465583584, |
|
"grad_norm": 0.16946179512795181, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0146, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.2607952116289014, |
|
"grad_norm": 0.166434202624617, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0135, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 2.2642154766994445, |
|
"grad_norm": 0.1739998754952991, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0142, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 2.267635741769987, |
|
"grad_norm": 0.1809093145862042, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.014, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 2.27105600684053, |
|
"grad_norm": 0.15727492334875873, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0137, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 2.274476271911073, |
|
"grad_norm": 0.18448939109027693, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0157, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 2.277896536981616, |
|
"grad_norm": 0.18236721526184443, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0166, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 2.2813168020521593, |
|
"grad_norm": 0.1695210468590141, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0144, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 2.284737067122702, |
|
"grad_norm": 0.17157602170689792, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0153, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 2.288157332193245, |
|
"grad_norm": 0.16583571772207567, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0133, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 2.291577597263788, |
|
"grad_norm": 0.15309231682278962, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0134, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.294997862334331, |
|
"grad_norm": 0.18010491103515566, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0155, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 2.298418127404874, |
|
"grad_norm": 0.165226793210629, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0134, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 2.3018383924754167, |
|
"grad_norm": 0.19017257841407276, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.016, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 2.3052586575459597, |
|
"grad_norm": 0.16105989698952458, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0154, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 2.3086789226165028, |
|
"grad_norm": 0.17435397438208278, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0136, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 2.312099187687046, |
|
"grad_norm": 0.15489352805926157, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0149, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 2.315519452757589, |
|
"grad_norm": 0.15749574161575872, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0136, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 2.3189397178281315, |
|
"grad_norm": 0.17861788971942064, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.015, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 2.3223599828986745, |
|
"grad_norm": 0.16852344878390402, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0152, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 2.3257802479692176, |
|
"grad_norm": 0.1903544466095535, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0146, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.3292005130397606, |
|
"grad_norm": 0.1938655036641079, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0158, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 2.3326207781103037, |
|
"grad_norm": 0.20203642790445486, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0143, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 2.3360410431808463, |
|
"grad_norm": 0.16328748761388734, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0154, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 2.3394613082513893, |
|
"grad_norm": 0.17437303421082254, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0149, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 2.3428815733219324, |
|
"grad_norm": 0.17762818156916235, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0149, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 2.3463018383924754, |
|
"grad_norm": 0.18594818733099802, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0161, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 2.3497221034630185, |
|
"grad_norm": 0.16139308140544603, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0148, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 2.3531423685335615, |
|
"grad_norm": 0.16108681383332696, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0139, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 2.3565626336041046, |
|
"grad_norm": 0.15992689822195585, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0146, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 2.359982898674647, |
|
"grad_norm": 0.19759610045563245, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0158, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.36340316374519, |
|
"grad_norm": 0.16149200857684542, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.014, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 2.3668234288157333, |
|
"grad_norm": 0.15072989325955685, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0144, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 2.3702436938862763, |
|
"grad_norm": 0.14913939759012662, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.014, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 2.3736639589568194, |
|
"grad_norm": 0.15636976839856181, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0153, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 2.377084224027362, |
|
"grad_norm": 0.18599274943363073, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0141, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 2.380504489097905, |
|
"grad_norm": 0.16568136382204926, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.013, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 2.383924754168448, |
|
"grad_norm": 0.17332302727716312, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0128, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 2.387345019238991, |
|
"grad_norm": 0.1879178833471297, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0146, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 2.390765284309534, |
|
"grad_norm": 0.1726315547096704, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0136, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 2.3941855493800768, |
|
"grad_norm": 0.1720038600446686, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0158, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.39760581445062, |
|
"grad_norm": 0.17262771034044544, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0146, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 2.401026079521163, |
|
"grad_norm": 0.18381965008540402, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0151, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 2.404446344591706, |
|
"grad_norm": 0.1943786246553059, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0148, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 2.407866609662249, |
|
"grad_norm": 0.15285642974808178, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0133, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 2.4112868747327916, |
|
"grad_norm": 0.19330131469876405, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0146, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 2.4147071398033346, |
|
"grad_norm": 0.16935279234922626, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0143, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 2.4181274048738777, |
|
"grad_norm": 0.17130297774748693, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.016, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 2.4215476699444207, |
|
"grad_norm": 0.16649757139294383, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0135, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 2.4249679350149638, |
|
"grad_norm": 0.1509762981692361, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0131, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 2.428388200085507, |
|
"grad_norm": 0.16903612165580836, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.015, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.4318084651560494, |
|
"grad_norm": 0.16965158831335697, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0139, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 2.4352287302265925, |
|
"grad_norm": 0.17489335625033847, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0127, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 2.4386489952971355, |
|
"grad_norm": 0.17619733289193135, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0144, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 2.4420692603676786, |
|
"grad_norm": 0.17617881507074173, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0152, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 2.4454895254382216, |
|
"grad_norm": 0.18048518098716906, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0139, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 2.4489097905087647, |
|
"grad_norm": 0.2051952297924243, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0146, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 2.4523300555793073, |
|
"grad_norm": 0.15664970563386238, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0143, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 2.4557503206498503, |
|
"grad_norm": 0.16743723595706328, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0152, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 2.4591705857203934, |
|
"grad_norm": 0.17613442783841668, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0162, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 2.4625908507909364, |
|
"grad_norm": 0.1818706957568455, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0135, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.4660111158614795, |
|
"grad_norm": 0.17047836187664528, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0141, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 2.469431380932022, |
|
"grad_norm": 0.17741467330052185, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0158, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 2.472851646002565, |
|
"grad_norm": 0.1800167905144302, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0156, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 2.476271911073108, |
|
"grad_norm": 0.1520569062216661, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.014, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 2.4796921761436512, |
|
"grad_norm": 0.19504143457216305, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.015, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 2.4831124412141943, |
|
"grad_norm": 0.16406995475864966, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0152, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 2.486532706284737, |
|
"grad_norm": 0.1640747075139151, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0138, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 2.48995297135528, |
|
"grad_norm": 0.17515606314180554, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0156, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 2.493373236425823, |
|
"grad_norm": 0.17908072157399882, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0155, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 2.496793501496366, |
|
"grad_norm": 0.15664812551985538, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0147, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.500213766566909, |
|
"grad_norm": 0.165379248202876, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0151, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 2.5036340316374517, |
|
"grad_norm": 0.15423115964706519, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0148, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 2.5070542967079947, |
|
"grad_norm": 0.16485219288254147, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0149, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 2.510474561778538, |
|
"grad_norm": 0.176637422447442, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0154, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 2.513894826849081, |
|
"grad_norm": 0.17814754105281233, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0154, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 2.517315091919624, |
|
"grad_norm": 0.17118656720070607, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0155, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 2.5207353569901665, |
|
"grad_norm": 0.18260703508475823, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0154, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 2.52415562206071, |
|
"grad_norm": 0.17080402514684412, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0136, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 2.5275758871312526, |
|
"grad_norm": 0.15709135265175292, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0134, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 2.5309961522017956, |
|
"grad_norm": 0.18246981305252055, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0142, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.5344164172723387, |
|
"grad_norm": 0.1808585737198185, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0139, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 2.5378366823428817, |
|
"grad_norm": 0.16454795740570335, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0153, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 2.541256947413425, |
|
"grad_norm": 0.21032421935647805, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0165, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 2.5446772124839674, |
|
"grad_norm": 0.15249763976570654, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0139, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 2.5480974775545104, |
|
"grad_norm": 0.1613635941460105, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0131, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 2.5515177426250535, |
|
"grad_norm": 0.1700060737892137, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0145, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 2.5549380076955965, |
|
"grad_norm": 0.20547449901345968, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0168, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 2.5583582727661396, |
|
"grad_norm": 0.16484966537277382, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0142, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 2.561778537836682, |
|
"grad_norm": 0.17889219393433903, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.014, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 2.5651988029072252, |
|
"grad_norm": 0.17679721987917787, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0138, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.5686190679777683, |
|
"grad_norm": 0.16210612559541202, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0145, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 2.5720393330483113, |
|
"grad_norm": 0.17192503019428163, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0143, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 2.5754595981188544, |
|
"grad_norm": 0.16114230533462182, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0151, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 2.578879863189397, |
|
"grad_norm": 0.16683326324078276, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.014, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 2.58230012825994, |
|
"grad_norm": 0.15929030309974343, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0135, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 2.585720393330483, |
|
"grad_norm": 0.17728404681160545, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0134, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 2.589140658401026, |
|
"grad_norm": 0.16727441441267749, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0143, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 2.592560923471569, |
|
"grad_norm": 0.21841845784314023, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0149, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 2.595981188542112, |
|
"grad_norm": 0.1770465088709539, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.015, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 2.599401453612655, |
|
"grad_norm": 0.1737371304814951, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0156, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.602821718683198, |
|
"grad_norm": 0.17069864594223258, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0134, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 2.606241983753741, |
|
"grad_norm": 0.16609359338004503, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0158, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 2.609662248824284, |
|
"grad_norm": 0.17378348121220705, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0141, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 2.6130825138948266, |
|
"grad_norm": 0.17355912661147865, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0142, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 2.61650277896537, |
|
"grad_norm": 0.20077889987954006, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0154, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 2.6199230440359127, |
|
"grad_norm": 0.16419074893195182, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.014, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 2.6233433091064557, |
|
"grad_norm": 0.19476203022751112, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0154, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 2.626763574176999, |
|
"grad_norm": 0.16568838103054007, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0148, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 2.630183839247542, |
|
"grad_norm": 0.21099152906068663, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.017, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 2.633604104318085, |
|
"grad_norm": 0.17959731227982292, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.014, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.6370243693886275, |
|
"grad_norm": 0.16972780919186436, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0139, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 2.6404446344591705, |
|
"grad_norm": 0.1842651161264449, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0157, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 2.6438648995297136, |
|
"grad_norm": 0.17834439152890905, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0153, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 2.6472851646002566, |
|
"grad_norm": 0.18047152749420897, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0156, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 2.6507054296707997, |
|
"grad_norm": 0.17154124492966955, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0147, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 2.6541256947413423, |
|
"grad_norm": 0.17549885521671557, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0161, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 2.6575459598118853, |
|
"grad_norm": 0.17861344190439962, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0151, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 2.6609662248824284, |
|
"grad_norm": 0.16730938712809765, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0145, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 2.6643864899529714, |
|
"grad_norm": 0.1775818865748014, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0159, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 2.6678067550235145, |
|
"grad_norm": 0.191333871334205, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.013, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.671227020094057, |
|
"grad_norm": 0.16907634204579375, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0145, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 2.6746472851646, |
|
"grad_norm": 0.18178434604030544, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0135, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 2.678067550235143, |
|
"grad_norm": 0.20503354023831713, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0146, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 2.6814878153056863, |
|
"grad_norm": 0.16391814776977276, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0156, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 2.6849080803762293, |
|
"grad_norm": 0.1839989787303562, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0148, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 2.688328345446772, |
|
"grad_norm": 0.19236560848567033, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0154, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 2.691748610517315, |
|
"grad_norm": 0.16811298067940522, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0141, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 2.695168875587858, |
|
"grad_norm": 0.19975209346986805, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0159, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 2.698589140658401, |
|
"grad_norm": 0.16338526349657181, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0133, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 2.702009405728944, |
|
"grad_norm": 0.17351362318047445, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0159, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.7054296707994867, |
|
"grad_norm": 0.17661074926464723, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0154, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 2.70884993587003, |
|
"grad_norm": 0.1762002497021315, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.016, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 2.712270200940573, |
|
"grad_norm": 0.19710675588819915, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0161, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 2.715690466011116, |
|
"grad_norm": 0.1692117872805568, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.014, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 2.719110731081659, |
|
"grad_norm": 0.20731978857327257, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0156, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 2.722530996152202, |
|
"grad_norm": 0.17806201725432017, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0147, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 2.725951261222745, |
|
"grad_norm": 0.16617443428776074, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0143, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 2.7293715262932876, |
|
"grad_norm": 0.20059647446620907, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.014, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 2.7327917913638307, |
|
"grad_norm": 0.1773142221832559, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0171, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 2.7362120564343737, |
|
"grad_norm": 0.1979846553461278, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0139, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.7396323215049168, |
|
"grad_norm": 0.18511763263524342, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0155, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 2.74305258657546, |
|
"grad_norm": 0.17699099585761474, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0118, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 2.7464728516460024, |
|
"grad_norm": 0.19352657405112964, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0163, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 2.7498931167165455, |
|
"grad_norm": 0.19135035210758095, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0156, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 2.7533133817870885, |
|
"grad_norm": 0.1841148474869883, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0155, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 2.7567336468576316, |
|
"grad_norm": 0.19008317628622337, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0152, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 2.7601539119281746, |
|
"grad_norm": 0.1949140281276627, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0143, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 2.763574176998717, |
|
"grad_norm": 0.1944281661049677, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0145, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 2.7669944420692603, |
|
"grad_norm": 0.1674859606182937, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0162, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 2.7704147071398033, |
|
"grad_norm": 0.18850185666818006, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0148, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.7738349722103464, |
|
"grad_norm": 0.18475272615726215, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0156, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 2.7772552372808894, |
|
"grad_norm": 0.15365265630124134, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0134, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 2.780675502351432, |
|
"grad_norm": 0.16024686744719197, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0143, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 2.784095767421975, |
|
"grad_norm": 0.18228181650312342, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0142, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 2.787516032492518, |
|
"grad_norm": 0.15232575479299343, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0141, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 2.790936297563061, |
|
"grad_norm": 0.19473439330433306, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0141, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 2.794356562633604, |
|
"grad_norm": 0.20524717935488615, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.017, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 2.797776827704147, |
|
"grad_norm": 0.1908817371704934, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0147, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 2.8011970927746903, |
|
"grad_norm": 0.1817449147111606, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0146, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 2.804617357845233, |
|
"grad_norm": 0.20849626412897376, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0144, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.808037622915776, |
|
"grad_norm": 0.17439115442992892, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0165, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 2.811457887986319, |
|
"grad_norm": 0.184212269622675, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0139, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 2.814878153056862, |
|
"grad_norm": 0.1742874984626096, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0146, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 2.818298418127405, |
|
"grad_norm": 0.16854467961459083, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0145, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 2.8217186831979477, |
|
"grad_norm": 0.17468169666793745, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0149, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 2.8251389482684908, |
|
"grad_norm": 0.16936997795252084, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0139, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 2.828559213339034, |
|
"grad_norm": 0.16572236927644424, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0154, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 2.831979478409577, |
|
"grad_norm": 0.1651962024013233, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0127, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 2.83539974348012, |
|
"grad_norm": 0.1657376085150308, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0139, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 2.8388200085506625, |
|
"grad_norm": 0.17582133731054114, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0131, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.8422402736212056, |
|
"grad_norm": 0.20723879555852162, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0162, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 2.8456605386917486, |
|
"grad_norm": 0.6025905984134068, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0142, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 2.8490808037622917, |
|
"grad_norm": 0.1678623970594971, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0143, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 2.8525010688328347, |
|
"grad_norm": 0.21086844483510658, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0158, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 2.8559213339033773, |
|
"grad_norm": 0.18493480224421863, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0126, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 2.8593415989739204, |
|
"grad_norm": 0.1904246277238966, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0155, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 2.8627618640444634, |
|
"grad_norm": 0.18774356330645445, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0174, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 2.8661821291150065, |
|
"grad_norm": 0.20481371594917794, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0132, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 2.8696023941855495, |
|
"grad_norm": 0.19259669092688833, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0144, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 2.873022659256092, |
|
"grad_norm": 0.20161241494039436, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0159, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.876442924326635, |
|
"grad_norm": 0.19885243582338097, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0155, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 2.8798631893971782, |
|
"grad_norm": 0.18089297372993837, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.015, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 2.8832834544677213, |
|
"grad_norm": 0.19064879861955514, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0132, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 2.8867037195382643, |
|
"grad_norm": 0.18907692558323141, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0146, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 2.890123984608807, |
|
"grad_norm": 0.19585899032623924, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0153, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 2.8935442496793504, |
|
"grad_norm": 0.22806745672139891, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0169, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 2.896964514749893, |
|
"grad_norm": 0.19336051551290398, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0155, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 2.900384779820436, |
|
"grad_norm": 0.17663309403974298, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.013, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 2.903805044890979, |
|
"grad_norm": 0.1884422733573296, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0153, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 2.907225309961522, |
|
"grad_norm": 0.19574362737358483, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.016, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.9106455750320652, |
|
"grad_norm": 0.16218777133767662, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0135, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 2.914065840102608, |
|
"grad_norm": 0.1844053227394077, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0147, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 2.917486105173151, |
|
"grad_norm": 0.19292851510616071, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0149, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 2.920906370243694, |
|
"grad_norm": 0.2089035835617463, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.017, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 2.924326635314237, |
|
"grad_norm": 0.7055863227979324, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.016, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 2.92774690038478, |
|
"grad_norm": 0.18518793131577801, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0141, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 2.9311671654553226, |
|
"grad_norm": 0.17688744692401304, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0144, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 2.9345874305258657, |
|
"grad_norm": 0.20202970506605689, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0144, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 2.9380076955964087, |
|
"grad_norm": 0.19190788298961772, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0154, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 2.941427960666952, |
|
"grad_norm": 0.19843345575867563, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0167, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.944848225737495, |
|
"grad_norm": 0.18043514187233164, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0124, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 2.9482684908080374, |
|
"grad_norm": 0.1852741160641503, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0153, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 2.9516887558785805, |
|
"grad_norm": 0.19552493492009015, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0161, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 2.9551090209491235, |
|
"grad_norm": 0.1972068517674688, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0151, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 2.9585292860196666, |
|
"grad_norm": 0.2016593632101422, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0161, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 2.9619495510902096, |
|
"grad_norm": 0.18720575761526204, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.014, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 2.9653698161607522, |
|
"grad_norm": 0.1948235931544798, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0142, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 2.9687900812312953, |
|
"grad_norm": 0.18580729685744493, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0148, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 2.9722103463018383, |
|
"grad_norm": 0.1747215712068137, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0148, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 2.9756306113723814, |
|
"grad_norm": 0.21255162912353637, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0149, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.9790508764429244, |
|
"grad_norm": 0.18848472150934822, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0141, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 2.982471141513467, |
|
"grad_norm": 0.18016163548131123, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0147, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 2.9858914065840105, |
|
"grad_norm": 0.1795705522164676, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.014, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 2.989311671654553, |
|
"grad_norm": 0.17401025878895968, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0138, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 2.992731936725096, |
|
"grad_norm": 0.1783730795841426, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0132, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 2.9961522017956392, |
|
"grad_norm": 0.20051631261532152, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0166, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 2.9995724668661823, |
|
"grad_norm": 0.2056542584608787, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.0155, |
|
"step": 877 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 1460, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 367251916062720.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|