|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.5002086521073863, |
|
"eval_steps": 899, |
|
"global_step": 899, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0005564056196967589, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 2e-05, |
|
"loss": 1.7806, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0005564056196967589, |
|
"eval_loss": 1.7791756391525269, |
|
"eval_runtime": 1750.0366, |
|
"eval_samples_per_second": 2.681, |
|
"eval_steps_per_second": 1.341, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0011128112393935178, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 4e-05, |
|
"loss": 1.7467, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0016692168590902768, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 6e-05, |
|
"loss": 1.6933, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0022256224787870356, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 8e-05, |
|
"loss": 1.6916, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0027820280984837947, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 0.0001, |
|
"loss": 1.6973, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0033384337181805537, |
|
"grad_norm": 0.9140625, |
|
"learning_rate": 0.00012, |
|
"loss": 1.6518, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0038948393378773127, |
|
"grad_norm": 0.796875, |
|
"learning_rate": 0.00014, |
|
"loss": 1.6383, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.004451244957574071, |
|
"grad_norm": 0.75, |
|
"learning_rate": 0.00016, |
|
"loss": 1.5714, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.005007650577270831, |
|
"grad_norm": 0.70703125, |
|
"learning_rate": 0.00018, |
|
"loss": 1.553, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.005564056196967589, |
|
"grad_norm": 0.734375, |
|
"learning_rate": 0.0002, |
|
"loss": 1.5321, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.006120461816664348, |
|
"grad_norm": 0.84765625, |
|
"learning_rate": 0.0001999999829570899, |
|
"loss": 1.5589, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.006676867436361107, |
|
"grad_norm": 0.72265625, |
|
"learning_rate": 0.0001999999318283654, |
|
"loss": 1.4519, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.007233273056057866, |
|
"grad_norm": 0.6640625, |
|
"learning_rate": 0.0001999998466138439, |
|
"loss": 1.4035, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.007789678675754625, |
|
"grad_norm": 0.68359375, |
|
"learning_rate": 0.0001999997273135545, |
|
"loss": 1.4071, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.008346084295451384, |
|
"grad_norm": 0.6796875, |
|
"learning_rate": 0.0001999995739275378, |
|
"loss": 1.2862, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.008902489915148143, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 0.00019999938645584616, |
|
"loss": 1.2801, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.009458895534844901, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 0.0001999991648985434, |
|
"loss": 1.2358, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.010015301154541661, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 0.0001999989092557051, |
|
"loss": 1.2122, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.01057170677423842, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 0.00019999861952741839, |
|
"loss": 1.2294, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.011128112393935179, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 0.00019999829571378197, |
|
"loss": 1.2641, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.011684518013631937, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 0.0001999979378149063, |
|
"loss": 1.2373, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.012240923633328696, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 0.0001999975458309133, |
|
"loss": 1.2389, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.012797329253025456, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 0.00019999711976193663, |
|
"loss": 1.2716, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.013353734872722215, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 0.0001999966596081215, |
|
"loss": 1.2038, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.013910140492418973, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 0.00019999616536962473, |
|
"loss": 1.2588, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.014466546112115732, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 0.00019999563704661483, |
|
"loss": 1.198, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.01502295173181249, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 0.00019999507463927188, |
|
"loss": 1.2047, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.01557935735150925, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 0.00019999447814778755, |
|
"loss": 1.1884, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.01613576297120601, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 0.0001999938475723652, |
|
"loss": 1.2156, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.016692168590902768, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 0.00019999318291321968, |
|
"loss": 1.1636, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.017248574210599527, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 0.00019999248417057767, |
|
"loss": 1.1857, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.017804979830296285, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 0.00019999175134467726, |
|
"loss": 1.1862, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.018361385449993044, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 0.00019999098443576827, |
|
"loss": 1.2155, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.018917791069689802, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 0.00019999018344411207, |
|
"loss": 1.1009, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.019474196689386564, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 0.00019998934836998173, |
|
"loss": 1.1714, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.020030602309083323, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 0.00019998847921366187, |
|
"loss": 1.0824, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.02058700792878008, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 0.00019998757597544875, |
|
"loss": 1.0703, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.02114341354847684, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 0.00019998663865565023, |
|
"loss": 1.1183, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.0216998191681736, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 0.00019998566725458586, |
|
"loss": 1.1328, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.022256224787870357, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 0.0001999846617725867, |
|
"loss": 1.1076, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.022812630407567116, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 0.00019998362220999548, |
|
"loss": 1.1143, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.023369036027263874, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 0.00019998254856716657, |
|
"loss": 1.1768, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.023925441646960633, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 0.0001999814408444659, |
|
"loss": 1.0872, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.02448184726665739, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 0.00019998029904227103, |
|
"loss": 1.122, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.025038252886354154, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 0.0001999791231609712, |
|
"loss": 1.0025, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.025594658506050912, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 0.00019997791320096718, |
|
"loss": 1.1672, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.02615106412574767, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 0.00019997666916267144, |
|
"loss": 1.1096, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.02670746974544443, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 0.000199975391046508, |
|
"loss": 1.1446, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.027263875365141188, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 0.0001999740788529125, |
|
"loss": 1.1254, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.027820280984837947, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 0.00019997273258233223, |
|
"loss": 1.1143, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.028376686604534705, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 0.00019997135223522605, |
|
"loss": 1.1123, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.028933092224231464, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 0.00019996993781206448, |
|
"loss": 1.0469, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.029489497843928222, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 0.00019996848931332965, |
|
"loss": 1.1145, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.03004590346362498, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 0.0001999670067395153, |
|
"loss": 1.0489, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.030602309083321743, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 0.00019996549009112675, |
|
"loss": 1.0671, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.0311587147030185, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.00019996393936868098, |
|
"loss": 1.0341, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.03171512032271526, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.00019996235457270654, |
|
"loss": 1.0725, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.03227152594241202, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 0.00019996073570374367, |
|
"loss": 1.1031, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.032827931562108774, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 0.0001999590827623441, |
|
"loss": 0.9974, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.033384337181805536, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 0.00019995739574907133, |
|
"loss": 1.1398, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0339407428015023, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 0.00019995567466450035, |
|
"loss": 1.0728, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.03449714842119905, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 0.00019995391950921782, |
|
"loss": 0.9649, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.035053554040895815, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 0.000199952130283822, |
|
"loss": 1.1536, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.03560995966059257, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 0.0001999503069889227, |
|
"loss": 1.0135, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.03616636528028933, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 0.0001999484496251415, |
|
"loss": 1.0599, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.03672277089998609, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 0.00019994655819311144, |
|
"loss": 1.0521, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.03727917651968285, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 0.00019994463269347725, |
|
"loss": 1.0737, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.037835582139379605, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 0.00019994267312689525, |
|
"loss": 1.0856, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.03839198775907637, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 0.00019994067949403337, |
|
"loss": 0.9674, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.03894839337877313, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 0.00019993865179557117, |
|
"loss": 1.1069, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.039504798998469884, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.00019993659003219978, |
|
"loss": 1.0198, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.040061204618166646, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 0.00019993449420462197, |
|
"loss": 1.0864, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.0406176102378634, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 0.00019993236431355217, |
|
"loss": 1.037, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.04117401585756016, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 0.00019993020035971633, |
|
"loss": 1.0724, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.04173042147725692, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 0.00019992800234385201, |
|
"loss": 0.9854, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.04228682709695368, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.00019992577026670854, |
|
"loss": 1.0655, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.042843232716650435, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 0.00019992350412904663, |
|
"loss": 1.0487, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.0433996383363472, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 0.00019992120393163876, |
|
"loss": 0.9593, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.04395604395604396, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 0.00019991886967526897, |
|
"loss": 1.1132, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.044512449575740715, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.00019991650136073287, |
|
"loss": 0.9343, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.04506885519543748, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 0.0001999140989888378, |
|
"loss": 1.0228, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.04562526081513423, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 0.00019991166256040256, |
|
"loss": 1.0239, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.046181666434830994, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 0.00019990919207625764, |
|
"loss": 1.0706, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.04673807205452775, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 0.00019990668753724515, |
|
"loss": 1.0069, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.04729447767422451, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 0.00019990414894421876, |
|
"loss": 1.0511, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.047850883293921266, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.0001999015762980438, |
|
"loss": 0.9398, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.04840728891361803, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 0.00019989896959959712, |
|
"loss": 1.1496, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.04896369453331478, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 0.00019989632884976727, |
|
"loss": 1.0488, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.049520100153011545, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 0.00019989365404945436, |
|
"loss": 1.0843, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.05007650577270831, |
|
"grad_norm": 0.375, |
|
"learning_rate": 0.00019989094519957015, |
|
"loss": 1.0822, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.05063291139240506, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.00019988820230103796, |
|
"loss": 1.0301, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.051189317012101825, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 0.0001998854253547927, |
|
"loss": 1.0098, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.05174572263179858, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.00019988261436178093, |
|
"loss": 0.9847, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.05230212825149534, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 0.0001998797693229608, |
|
"loss": 1.0233, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.0528585338711921, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 0.0001998768902393021, |
|
"loss": 1.0747, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.05341493949088886, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.00019987397711178614, |
|
"loss": 0.9823, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.053971345110585614, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 0.0001998710299414059, |
|
"loss": 1.0238, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.054527750730282376, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 0.00019986804872916593, |
|
"loss": 0.9832, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.05508415634997914, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.00019986503347608245, |
|
"loss": 0.9833, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.05564056196967589, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 0.0001998619841831832, |
|
"loss": 1.0971, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.056196967589372655, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 0.0001998589008515076, |
|
"loss": 1.0077, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.05675337320906941, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 0.0001998557834821065, |
|
"loss": 1.0066, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.05730977882876617, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 0.00019985263207604264, |
|
"loss": 1.0267, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.05786618444846293, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.0001998494466343901, |
|
"loss": 0.975, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.05842259006815969, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.00019984622715823474, |
|
"loss": 1.0134, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.058978995687856445, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 0.00019984297364867385, |
|
"loss": 1.1247, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.05953540130755321, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 0.00019983968610681648, |
|
"loss": 0.974, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.06009180692724996, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 0.0001998363645337832, |
|
"loss": 1.0099, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.060648212546946724, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.00019983300893070623, |
|
"loss": 0.8479, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.061204618166643486, |
|
"grad_norm": 0.375, |
|
"learning_rate": 0.00019982961929872927, |
|
"loss": 1.0034, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.06176102378634024, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 0.00019982619563900782, |
|
"loss": 1.0165, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.062317429406037, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 0.00019982273795270876, |
|
"loss": 1.0338, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.06287383502573377, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 0.00019981924624101071, |
|
"loss": 1.0465, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.06343024064543051, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 0.00019981572050510387, |
|
"loss": 0.9284, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.06398664626512728, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 0.00019981216074619, |
|
"loss": 1.074, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.06454305188482404, |
|
"grad_norm": 0.375, |
|
"learning_rate": 0.00019980856696548242, |
|
"loss": 0.8981, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.0650994575045208, |
|
"grad_norm": 0.375, |
|
"learning_rate": 0.00019980493916420616, |
|
"loss": 1.0605, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.06565586312421755, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.00019980127734359782, |
|
"loss": 1.0433, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.06621226874391431, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 0.00019979758150490546, |
|
"loss": 1.0483, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.06676867436361107, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 0.0001997938516493889, |
|
"loss": 1.0543, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.06732507998330783, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 0.0001997900877783195, |
|
"loss": 1.0162, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.0678814856030046, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 0.0001997862898929802, |
|
"loss": 0.8468, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.06843789122270134, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.00019978245799466554, |
|
"loss": 1.0417, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.0689942968423981, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.00019977859208468161, |
|
"loss": 0.9901, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.06955070246209487, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 0.00019977469216434619, |
|
"loss": 1.0117, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.07010710808179163, |
|
"grad_norm": 0.375, |
|
"learning_rate": 0.0001997707582349886, |
|
"loss": 0.9719, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.07066351370148838, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 0.00019976679029794968, |
|
"loss": 0.9176, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.07121991932118514, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 0.00019976278835458203, |
|
"loss": 1.0633, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.0717763249408819, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 0.00019975875240624968, |
|
"loss": 0.9845, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.07233273056057866, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 0.00019975468245432833, |
|
"loss": 0.9352, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.07288913618027543, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.0001997505785002053, |
|
"loss": 1.0966, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.07344554179997217, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 0.0001997464405452794, |
|
"loss": 0.9278, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.07400194741966894, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 0.0001997422685909611, |
|
"loss": 0.9244, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.0745583530393657, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.00019973806263867247, |
|
"loss": 0.8941, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.07511475865906246, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 0.00019973382268984713, |
|
"loss": 0.9702, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.07567116427875921, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 0.00019972954874593026, |
|
"loss": 0.9809, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.07622756989845597, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.00019972524080837873, |
|
"loss": 1.0604, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.07678397551815273, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.00019972089887866092, |
|
"loss": 0.9702, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.0773403811378495, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.00019971652295825681, |
|
"loss": 0.9903, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.07789678675754626, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.00019971211304865795, |
|
"loss": 0.979, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.078453192377243, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 0.00019970766915136746, |
|
"loss": 1.1067, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.07900959799693977, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.0001997031912679002, |
|
"loss": 0.966, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.07956600361663653, |
|
"grad_norm": 0.375, |
|
"learning_rate": 0.00019969867939978236, |
|
"loss": 1.0207, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.08012240923633329, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.00019969413354855195, |
|
"loss": 0.9127, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.08067881485603004, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 0.0001996895537157584, |
|
"loss": 1.0944, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.0812352204757268, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 0.00019968493990296277, |
|
"loss": 0.948, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.08179162609542356, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.00019968029211173778, |
|
"loss": 0.9824, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.08234803171512033, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.00019967561034366763, |
|
"loss": 0.9328, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.08290443733481709, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.00019967089460034814, |
|
"loss": 1.0162, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.08346084295451384, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.0001996661448833867, |
|
"loss": 0.7696, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.0840172485742106, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.00019966136119440233, |
|
"loss": 0.9133, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.08457365419390736, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 0.00019965654353502554, |
|
"loss": 1.0071, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.08513005981360412, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 0.0001996516919068985, |
|
"loss": 0.9797, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.08568646543330087, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 0.00019964680631167492, |
|
"loss": 0.9521, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.08624287105299763, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 0.0001996418867510201, |
|
"loss": 1.034, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.0867992766726944, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.00019963693322661087, |
|
"loss": 0.9971, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.08735568229239116, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.00019963194574013573, |
|
"loss": 1.0262, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.08791208791208792, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 0.0001996269242932947, |
|
"loss": 0.9913, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.08846849353178467, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 0.00019962186888779936, |
|
"loss": 1.0822, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.08902489915148143, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.00019961677952537292, |
|
"loss": 0.8287, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.08958130477117819, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.0001996116562077501, |
|
"loss": 0.9663, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.09013771039087495, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 0.00019960649893667722, |
|
"loss": 0.7657, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.0906941160105717, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 0.0001996013077139122, |
|
"loss": 0.8837, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.09125052163026846, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 0.0001995960825412245, |
|
"loss": 0.9727, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.09180692724996523, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.0001995908234203952, |
|
"loss": 0.9438, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.09236333286966199, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 0.00019958553035321686, |
|
"loss": 0.9364, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.09291973848935874, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.00019958020334149366, |
|
"loss": 0.9435, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.0934761441090555, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 0.00019957484238704139, |
|
"loss": 1.034, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.09403254972875226, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.00019956944749168737, |
|
"loss": 0.9662, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.09458895534844902, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 0.00019956401865727053, |
|
"loss": 1.0057, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.09514536096814578, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.00019955855588564125, |
|
"loss": 0.9569, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.09570176658784253, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.00019955305917866164, |
|
"loss": 0.9651, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.0962581722075393, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.00019954752853820523, |
|
"loss": 0.8911, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.09681457782723606, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.00019954196396615721, |
|
"loss": 0.9656, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.09737098344693282, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.00019953636546441431, |
|
"loss": 0.8851, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.09792738906662957, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.00019953073303488486, |
|
"loss": 0.91, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.09848379468632633, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.00019952506667948868, |
|
"loss": 1.0532, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.09904020030602309, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 0.00019951936640015723, |
|
"loss": 1.052, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.09959660592571985, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.00019951363219883344, |
|
"loss": 0.9436, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.10015301154541661, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.0001995078640774719, |
|
"loss": 1.0067, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.10070941716511336, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.00019950206203803874, |
|
"loss": 0.8923, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.10126582278481013, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.00019949622608251156, |
|
"loss": 0.9681, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.10182222840450689, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.00019949035621287967, |
|
"loss": 0.9994, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.10237863402420365, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.00019948445243114384, |
|
"loss": 1.0139, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.1029350396439004, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 0.0001994785147393164, |
|
"loss": 0.9698, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.10349144526359716, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.0001994725431394213, |
|
"loss": 0.9727, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.10404785088329392, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.00019946653763349394, |
|
"loss": 0.9109, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.10460425650299068, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 0.0001994604982235814, |
|
"loss": 0.9881, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.10516066212268745, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.00019945442491174227, |
|
"loss": 1.0062, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.1057170677423842, |
|
"grad_norm": 0.375, |
|
"learning_rate": 0.00019944831770004665, |
|
"loss": 0.9374, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.10627347336208096, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.00019944217659057626, |
|
"loss": 0.907, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.10682987898177772, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 0.00019943600158542436, |
|
"loss": 0.9287, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.10738628460147448, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 0.00019942979268669573, |
|
"loss": 0.9948, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.10794269022117123, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.0001994235498965067, |
|
"loss": 0.9651, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.10849909584086799, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.00019941727321698521, |
|
"loss": 0.9267, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.10905550146056475, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.00019941096265027074, |
|
"loss": 0.8551, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.10961190708026151, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 0.00019940461819851425, |
|
"loss": 0.9683, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.11016831269995828, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.00019939823986387834, |
|
"loss": 0.9127, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.11072471831965502, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.00019939182764853707, |
|
"loss": 1.0557, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.11128112393935179, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.00019938538155467615, |
|
"loss": 0.9967, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.11183752955904855, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 0.00019937890158449272, |
|
"loss": 0.9391, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.11239393517874531, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 0.00019937238774019557, |
|
"loss": 1.9551, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.11295034079844206, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.000199365840024005, |
|
"loss": 0.9682, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.11350674641813882, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.00019935925843815285, |
|
"loss": 0.961, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.11406315203783558, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 0.0001993526429848825, |
|
"loss": 1.0718, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.11461955765753234, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.0001993459936664489, |
|
"loss": 0.9232, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.1151759632772291, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 0.0001993393104851185, |
|
"loss": 1.0022, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.11573236889692586, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.00019933259344316934, |
|
"loss": 0.9549, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.11628877451662262, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.00019932584254289096, |
|
"loss": 0.9472, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.11684518013631938, |
|
"grad_norm": 2.859375, |
|
"learning_rate": 0.00019931905778658443, |
|
"loss": 1.9077, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.11740158575601614, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 0.00019931223917656247, |
|
"loss": 1.0227, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.11795799137571289, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 0.00019930538671514923, |
|
"loss": 1.0234, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.11851439699540965, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.00019929850040468035, |
|
"loss": 0.9573, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.11907080261510641, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 0.0001992915802475032, |
|
"loss": 0.9576, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.11962720823480318, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 0.0001992846262459765, |
|
"loss": 0.9715, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.12018361385449992, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 0.0001992776384024706, |
|
"loss": 0.8743, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.12074001947419669, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 0.00019927061671936737, |
|
"loss": 0.8957, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.12129642509389345, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.0001992635611990602, |
|
"loss": 0.9718, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.12185283071359021, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 0.00019925647184395402, |
|
"loss": 0.8916, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.12240923633328697, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.0001992493486564653, |
|
"loss": 0.9546, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.12296564195298372, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 0.00019924219163902204, |
|
"loss": 0.9285, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.12352204757268048, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.00019923500079406373, |
|
"loss": 0.9095, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.12407845319237724, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 0.00019922777612404148, |
|
"loss": 0.9921, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.124634858812074, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 0.00019922051763141788, |
|
"loss": 0.9756, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.12519126443177075, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.00019921322531866702, |
|
"loss": 0.832, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.12574767005146753, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 0.00019920589918827455, |
|
"loss": 0.741, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.12630407567116428, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.0001991985392427376, |
|
"loss": 0.8839, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.12686048129086103, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.00019919114548456497, |
|
"loss": 1.0329, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.1274168869105578, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 0.0001991837179162768, |
|
"loss": 0.9777, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.12797329253025455, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.00019917625654040485, |
|
"loss": 0.9311, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.12852969814995133, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 0.0001991687613594924, |
|
"loss": 0.9973, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.12908610376964808, |
|
"grad_norm": 0.375, |
|
"learning_rate": 0.00019916123237609428, |
|
"loss": 1.054, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.12964250938934482, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.00019915366959277674, |
|
"loss": 0.886, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.1301989150090416, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.00019914607301211765, |
|
"loss": 0.9879, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.13075532062873835, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 0.00019913844263670638, |
|
"loss": 0.8272, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.1313117262484351, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.00019913077846914377, |
|
"loss": 0.8881, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.13186813186813187, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.00019912308051204226, |
|
"loss": 0.9737, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.13242453748782862, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 0.00019911534876802572, |
|
"loss": 1.1547, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.1329809431075254, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.00019910758323972961, |
|
"loss": 0.9975, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.13353734872722214, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.00019909978392980087, |
|
"loss": 0.9454, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.1340937543469189, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.00019909195084089792, |
|
"loss": 0.969, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.13465015996661567, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.00019908408397569075, |
|
"loss": 1.0012, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.13520656558631242, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 0.0001990761833368609, |
|
"loss": 0.9292, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.1357629712060092, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 0.0001990682489271013, |
|
"loss": 0.8401, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.13631937682570594, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.00019906028074911645, |
|
"loss": 0.9614, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.1368757824454027, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 0.00019905227880562246, |
|
"loss": 0.9454, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.13743218806509946, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.0001990442430993468, |
|
"loss": 0.8745, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.1379885936847962, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.00019903617363302846, |
|
"loss": 0.9185, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.138544999304493, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.00019902807040941806, |
|
"loss": 0.9842, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.13910140492418974, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.00019901993343127763, |
|
"loss": 1.0057, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.13965781054388648, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.0001990117627013807, |
|
"loss": 0.8891, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.14021421616358326, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.00019900355822251233, |
|
"loss": 0.9715, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.14077062178328, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.0001989953199974691, |
|
"loss": 0.8475, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.14132702740297676, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.0001989870480290591, |
|
"loss": 0.9162, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.14188343302267353, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.00019897874232010186, |
|
"loss": 0.8788, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.14243983864237028, |
|
"grad_norm": 0.7734375, |
|
"learning_rate": 0.0001989704028734285, |
|
"loss": 1.8645, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.14299624426206706, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 0.00019896202969188148, |
|
"loss": 1.0641, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.1435526498817638, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.000198953622778315, |
|
"loss": 1.0014, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.14410905550146055, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 0.00019894518213559457, |
|
"loss": 1.0273, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.14466546112115733, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.0001989367077665972, |
|
"loss": 0.9224, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.14522186674085408, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.00019892819967421154, |
|
"loss": 0.9585, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.14577827236055085, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.0001989196578613376, |
|
"loss": 0.9612, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.1463346779802476, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.0001989110823308869, |
|
"loss": 0.8635, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.14689108359994435, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.0001989024730857825, |
|
"loss": 0.9478, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.14744748921964113, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 0.00019889383012895896, |
|
"loss": 1.0103, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.14800389483933787, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.00019888515346336226, |
|
"loss": 0.8712, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.14856030045903465, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.00019887644309195, |
|
"loss": 0.9338, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.1491167060787314, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.00019886769901769104, |
|
"loss": 0.9435, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.14967311169842815, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 0.00019885892124356597, |
|
"loss": 1.0054, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.15022951731812492, |
|
"grad_norm": 0.375, |
|
"learning_rate": 0.00019885010977256675, |
|
"loss": 0.9563, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.15078592293782167, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.0001988412646076968, |
|
"loss": 0.889, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.15134232855751842, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.00019883238575197113, |
|
"loss": 0.8797, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.1518987341772152, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.00019882347320841615, |
|
"loss": 0.9045, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.15245513979691194, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.00019881452698006973, |
|
"loss": 0.8376, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.15301154541660872, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.0001988055470699813, |
|
"loss": 0.9139, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.15356795103630547, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.00019879653348121176, |
|
"loss": 0.9675, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.15412435665600221, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.00019878748621683344, |
|
"loss": 0.8359, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.154680762275699, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.00019877840527993018, |
|
"loss": 0.9296, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.15523716789539574, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.00019876929067359726, |
|
"loss": 0.9716, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.15579357351509251, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.00019876014240094154, |
|
"loss": 0.9313, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.15634997913478926, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.0001987509604650812, |
|
"loss": 0.9064, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.156906384754486, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 0.00019874174486914602, |
|
"loss": 1.0199, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.1574627903741828, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 0.00019873249561627723, |
|
"loss": 0.9184, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.15801919599387954, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 0.0001987232127096275, |
|
"loss": 0.8171, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.15857560161357628, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 0.00019871389615236094, |
|
"loss": 0.9977, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.15913200723327306, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 0.00019870454594765323, |
|
"loss": 0.9063, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.1596884128529698, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.00019869516209869144, |
|
"loss": 1.0768, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.16024481847266658, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.00019868574460867416, |
|
"loss": 0.9082, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.16080122409236333, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.00019867629348081138, |
|
"loss": 0.9355, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.16135762971206008, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.0001986668087183246, |
|
"loss": 0.8966, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.16191403533175686, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.0001986572903244468, |
|
"loss": 0.8964, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.1624704409514536, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.00019864773830242244, |
|
"loss": 0.9721, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.16302684657115038, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.0001986381526555073, |
|
"loss": 0.9809, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.16358325219084713, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 0.0001986285333869688, |
|
"loss": 0.9851, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.16413965781054388, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.00019861888050008575, |
|
"loss": 0.8559, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.16469606343024065, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 0.00019860919399814837, |
|
"loss": 0.9342, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.1652524690499374, |
|
"grad_norm": 0.375, |
|
"learning_rate": 0.0001985994738844584, |
|
"loss": 0.9079, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.16580887466963418, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.00019858972016232907, |
|
"loss": 0.8145, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.16636528028933092, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.00019857993283508497, |
|
"loss": 0.9047, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.16692168590902767, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.00019857011190606215, |
|
"loss": 0.9684, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.16747809152872445, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 0.00019856025737860828, |
|
"loss": 1.0272, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.1680344971484212, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 0.0001985503692560822, |
|
"loss": 0.9, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.16859090276811795, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 0.0001985404475418545, |
|
"loss": 1.0117, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.16914730838781472, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.00019853049223930697, |
|
"loss": 0.8626, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.16970371400751147, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 0.00019852050335183305, |
|
"loss": 0.9383, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.17026011962720825, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.00019851048088283742, |
|
"loss": 0.9119, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.170816525246905, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.00019850042483573644, |
|
"loss": 0.9732, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.17137293086660174, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.00019849033521395773, |
|
"loss": 0.9586, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.17192933648629852, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.00019848021202094042, |
|
"loss": 0.943, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.17248574210599527, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.0001984700552601351, |
|
"loss": 0.9244, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.17304214772569204, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 0.00019845986493500378, |
|
"loss": 1.0124, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.1735985533453888, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.00019844964104901992, |
|
"loss": 1.0024, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.17415495896508554, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.00019843938360566843, |
|
"loss": 0.856, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.1747113645847823, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 0.0001984290926084456, |
|
"loss": 0.9451, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.17526777020447906, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.00019841876806085923, |
|
"loss": 0.9995, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.17582417582417584, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 0.00019840840996642852, |
|
"loss": 0.9539, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.17638058144387259, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.00019839801832868413, |
|
"loss": 0.9055, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.17693698706356933, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 0.0001983875931511681, |
|
"loss": 0.8727, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.1774933926832661, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.00019837713443743397, |
|
"loss": 0.899, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.17804979830296286, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.00019836664219104666, |
|
"loss": 0.8607, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.1786062039226596, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 0.00019835611641558253, |
|
"loss": 1.0124, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.17916260954235638, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 0.0001983455571146294, |
|
"loss": 1.0013, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.17971901516205313, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.00019833496429178652, |
|
"loss": 0.8477, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.1802754207817499, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.00019832433795066446, |
|
"loss": 0.889, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.18083182640144665, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.00019831367809488537, |
|
"loss": 0.789, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.1813882320211434, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.0001983029847280827, |
|
"loss": 0.8853, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.18194463764084018, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.0001982922578539014, |
|
"loss": 0.9427, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.18250104326053693, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.0001982814974759978, |
|
"loss": 0.8221, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.1830574488802337, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 0.00019827070359803968, |
|
"loss": 0.9266, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.18361385449993045, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 0.0001982598762237062, |
|
"loss": 1.0117, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.1841702601196272, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 0.00019824901535668796, |
|
"loss": 0.9188, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.18472666573932398, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.000198238121000687, |
|
"loss": 0.952, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.18528307135902072, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.00019822719315941672, |
|
"loss": 0.9938, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.18583947697871747, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.000198216231836602, |
|
"loss": 0.8886, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.18639588259841425, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.00019820523703597908, |
|
"loss": 0.8996, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.186952288218111, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 0.0001981942087612956, |
|
"loss": 0.8635, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.18750869383780777, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.0001981831470163107, |
|
"loss": 1.029, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.18806509945750452, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 0.0001981720518047948, |
|
"loss": 0.9771, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.18862150507720127, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.0001981609231305298, |
|
"loss": 0.919, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.18917791069689804, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 0.00019814976099730904, |
|
"loss": 0.9555, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.1897343163165948, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.00019813856540893722, |
|
"loss": 1.0713, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.19029072193629157, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.00019812733636923044, |
|
"loss": 0.9679, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.19084712755598832, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.0001981160738820162, |
|
"loss": 0.9542, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.19140353317568506, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.0001981047779511334, |
|
"loss": 0.991, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.19195993879538184, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 0.0001980934485804324, |
|
"loss": 0.9589, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.1925163444150786, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.00019808208577377486, |
|
"loss": 0.9242, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.19307275003477536, |
|
"grad_norm": 0.375, |
|
"learning_rate": 0.0001980706895350339, |
|
"loss": 0.9109, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.1936291556544721, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.00019805925986809403, |
|
"loss": 1.0326, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.19418556127416886, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 0.00019804779677685112, |
|
"loss": 0.9841, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.19474196689386564, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 0.0001980363002652125, |
|
"loss": 0.9203, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.19529837251356238, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.0001980247703370968, |
|
"loss": 0.8824, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.19585477813325913, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 0.00019801320699643416, |
|
"loss": 0.7956, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.1964111837529559, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 0.00019800161024716598, |
|
"loss": 0.922, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.19696758937265266, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 0.00019798998009324512, |
|
"loss": 0.9232, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.19752399499234943, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.00019797831653863582, |
|
"loss": 0.8868, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.19808040061204618, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.0001979666195873137, |
|
"loss": 0.9103, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.19863680623174293, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.00019795488924326574, |
|
"loss": 0.8998, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.1991932118514397, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.00019794312551049039, |
|
"loss": 0.8973, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.19974961747113645, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 0.00019793132839299732, |
|
"loss": 0.9458, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.20030602309083323, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.00019791949789480775, |
|
"loss": 1.0083, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.20086242871052998, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 0.00019790763401995415, |
|
"loss": 0.9661, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.20141883433022673, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 0.00019789573677248047, |
|
"loss": 0.8475, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.2019752399499235, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.00019788380615644196, |
|
"loss": 0.8632, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.20253164556962025, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 0.00019787184217590523, |
|
"loss": 0.9522, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.20308805118931703, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.00019785984483494838, |
|
"loss": 0.8749, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.20364445680901377, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 0.00019784781413766073, |
|
"loss": 0.8714, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.20420086242871052, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.0001978357500881431, |
|
"loss": 0.9581, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.2047572680484073, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.00019782365269050756, |
|
"loss": 1.0496, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.20531367366810405, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.00019781152194887764, |
|
"loss": 0.8804, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.2058700792878008, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.00019779935786738822, |
|
"loss": 0.8474, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.20642648490749757, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.0001977871604501855, |
|
"loss": 0.8413, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.20698289052719432, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.00019777492970142707, |
|
"loss": 0.8636, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.2075392961468911, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.00019776266562528192, |
|
"loss": 0.8351, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.20809570176658784, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 0.0001977503682259303, |
|
"loss": 0.8751, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.2086521073862846, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.00019773803750756393, |
|
"loss": 0.9113, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.20920851300598137, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.0001977256734743858, |
|
"loss": 0.9433, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.20976491862567812, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.0001977132761306103, |
|
"loss": 0.8863, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.2103213242453749, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.00019770084548046318, |
|
"loss": 0.8073, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.21087772986507164, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.00019768838152818153, |
|
"loss": 0.9198, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.2114341354847684, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.0001976758842780138, |
|
"loss": 0.9204, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.21199054110446516, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 0.00019766335373421973, |
|
"loss": 0.9714, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.2125469467241619, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.00019765078990107048, |
|
"loss": 1.0368, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.21310335234385866, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.0001976381927828486, |
|
"loss": 0.8881, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.21365975796355544, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 0.00019762556238384784, |
|
"loss": 1.0236, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.21421616358325218, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.0001976128987083734, |
|
"loss": 0.9628, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.21477256920294896, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.0001976002017607418, |
|
"loss": 0.8861, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.2153289748226457, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 0.00019758747154528092, |
|
"loss": 1.0946, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.21588538044234246, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.00019757470806632994, |
|
"loss": 1.0047, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.21644178606203923, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 0.00019756191132823937, |
|
"loss": 0.8831, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.21699819168173598, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.00019754908133537113, |
|
"loss": 0.8999, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.21755459730143276, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.00019753621809209842, |
|
"loss": 0.8368, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.2181110029211295, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.00019752332160280576, |
|
"loss": 0.9262, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.21866740854082625, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 0.000197510391871889, |
|
"loss": 0.9496, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.21922381416052303, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.0001974974289037554, |
|
"loss": 0.9303, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.21978021978021978, |
|
"grad_norm": 0.375, |
|
"learning_rate": 0.0001974844327028235, |
|
"loss": 0.924, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.22033662539991655, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.00019747140327352306, |
|
"loss": 0.9763, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.2208930310196133, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.0001974583406202954, |
|
"loss": 0.7589, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.22144943663931005, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 0.00019744524474759296, |
|
"loss": 0.9391, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.22200584225900682, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 0.0001974321156598796, |
|
"loss": 0.9961, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.22256224787870357, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 0.00019741895336163046, |
|
"loss": 0.9214, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.22311865349840032, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 0.000197405757857332, |
|
"loss": 0.8915, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.2236750591180971, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 0.00019739252915148208, |
|
"loss": 0.8334, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.22423146473779385, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 0.00019737926724858976, |
|
"loss": 0.9007, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.22478787035749062, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.0001973659721531755, |
|
"loss": 0.8694, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.22534427597718737, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.00019735264386977098, |
|
"loss": 0.9033, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.22590068159688412, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.00019733928240291932, |
|
"loss": 0.8603, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.2264570872165809, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 0.00019732588775717486, |
|
"loss": 1.024, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.22701349283627764, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.00019731245993710327, |
|
"loss": 0.8826, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.22756989845597442, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.00019729899894728158, |
|
"loss": 0.8364, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.22812630407567117, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.000197285504792298, |
|
"loss": 1.0239, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.22868270969536791, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.00019727197747675218, |
|
"loss": 0.9286, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.2292391153150647, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.000197258417005255, |
|
"loss": 0.8623, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.22979552093476144, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.00019724482338242866, |
|
"loss": 0.8571, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.2303519265544582, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.00019723119661290664, |
|
"loss": 1.0023, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.23090833217415496, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.00019721753670133376, |
|
"loss": 0.8866, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.2314647377938517, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 0.0001972038436523661, |
|
"loss": 0.7956, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.2320211434135485, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 0.00019719011747067108, |
|
"loss": 0.9645, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.23257754903324523, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.00019717635816092732, |
|
"loss": 0.9324, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.23313395465294198, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.00019716256572782482, |
|
"loss": 0.9626, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.23369036027263876, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.0001971487401760649, |
|
"loss": 0.8419, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.2342467658923355, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.00019713488151036003, |
|
"loss": 0.8471, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.23480317151203228, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.00019712098973543408, |
|
"loss": 1.0177, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.23535957713172903, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 0.0001971070648560222, |
|
"loss": 0.944, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.23591598275142578, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.0001970931068768708, |
|
"loss": 0.8007, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.23647238837112255, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.0001970791158027375, |
|
"loss": 0.9658, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.2370287939908193, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 0.00019706509163839136, |
|
"loss": 0.8966, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.23758519961051608, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 0.0001970510343886126, |
|
"loss": 1.0448, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.23814160523021283, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.0001970369440581927, |
|
"loss": 0.937, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.23869801084990958, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.00019702282065193455, |
|
"loss": 0.9181, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.23925441646960635, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.00019700866417465216, |
|
"loss": 0.9101, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.2398108220893031, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 0.00019699447463117093, |
|
"loss": 0.9438, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.24036722770899985, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.00019698025202632743, |
|
"loss": 0.8315, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.24092363332869662, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.00019696599636496962, |
|
"loss": 0.8385, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.24148003894839337, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.0001969517076519566, |
|
"loss": 0.8667, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.24203644456809015, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.00019693738589215883, |
|
"loss": 0.881, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.2425928501877869, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.00019692303109045795, |
|
"loss": 0.9509, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.24314925580748364, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.00019690864325174699, |
|
"loss": 0.9065, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.24370566142718042, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 0.0001968942223809301, |
|
"loss": 1.072, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.24426206704687717, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 0.0001968797684829228, |
|
"loss": 0.9068, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.24481847266657394, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 0.00019686528156265175, |
|
"loss": 0.8168, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.2453748782862707, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.000196850761625055, |
|
"loss": 0.8748, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.24593128390596744, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 0.00019683620867508176, |
|
"loss": 0.7686, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.24648768952566422, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 0.00019682162271769256, |
|
"loss": 0.9036, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.24704409514536096, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.0001968070037578591, |
|
"loss": 0.9472, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.24760050076505774, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.00019679235180056437, |
|
"loss": 0.8882, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.2481569063847545, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.00019677766685080264, |
|
"loss": 0.9693, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.24871331200445124, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.00019676294891357937, |
|
"loss": 0.8764, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.249269717624148, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 0.00019674819799391129, |
|
"loss": 0.9671, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.24982612324384476, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.0001967334140968264, |
|
"loss": 0.9398, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.2503825288635415, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.00019671859722736389, |
|
"loss": 0.9572, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.2509389344832383, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.0001967037473905742, |
|
"loss": 0.958, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.25149534010293506, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 0.00019668886459151908, |
|
"loss": 0.8959, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.2520517457226318, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 0.00019667394883527139, |
|
"loss": 0.9529, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.25260815134232856, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.00019665900012691529, |
|
"loss": 0.9213, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.25316455696202533, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 0.0001966440184715462, |
|
"loss": 0.952, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.25372096258172205, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 0.00019662900387427075, |
|
"loss": 0.9548, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.25427736820141883, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.00019661395634020673, |
|
"loss": 0.8359, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.2548337738211156, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.00019659887587448327, |
|
"loss": 0.9204, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.2553901794408123, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 0.00019658376248224063, |
|
"loss": 0.9216, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.2559465850605091, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 0.00019656861616863038, |
|
"loss": 0.9383, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.2565029906802059, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 0.00019655343693881526, |
|
"loss": 0.9428, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.25705939629990265, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.0001965382247979692, |
|
"loss": 0.9357, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.2576158019195994, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 0.00019652297975127737, |
|
"loss": 0.967, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.25817220753929615, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.0001965077018039362, |
|
"loss": 0.8993, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.2587286131589929, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 0.00019649239096115336, |
|
"loss": 1.0113, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.25928501877868965, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.00019647704722814755, |
|
"loss": 0.9117, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.2598414243983864, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.00019646167061014892, |
|
"loss": 0.9318, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.2603978300180832, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.00019644626111239863, |
|
"loss": 0.9237, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.2609542356377799, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.0001964308187401492, |
|
"loss": 0.9926, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.2615106412574767, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 0.0001964153434986643, |
|
"loss": 0.8647, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.26206704687717347, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 0.0001963998353932187, |
|
"loss": 0.7973, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.2626234524968702, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 0.00019638429442909853, |
|
"loss": 0.8922, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.26317985811656697, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.00019636872061160109, |
|
"loss": 0.9267, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.26373626373626374, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.00019635311394603476, |
|
"loss": 0.8743, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.2642926693559605, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 0.00019633747443771924, |
|
"loss": 0.7997, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.26484907497565724, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.00019632180209198542, |
|
"loss": 0.9695, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.265405480595354, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.00019630609691417527, |
|
"loss": 0.7992, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.2659618862150508, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 0.00019629035890964208, |
|
"loss": 0.7188, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.2665182918347475, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 0.0001962745880837503, |
|
"loss": 0.93, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.2670746974544443, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 0.00019625878444187551, |
|
"loss": 0.8121, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.26763110307414106, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.00019624294798940451, |
|
"loss": 0.9366, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.2681875086938378, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 0.0001962270787317353, |
|
"loss": 0.94, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.26874391431353456, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.000196211176674277, |
|
"loss": 0.9317, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.26930031993323134, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.00019619524182245, |
|
"loss": 0.8751, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.2698567255529281, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.00019617927418168587, |
|
"loss": 0.9093, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.27041313117262483, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.00019616327375742722, |
|
"loss": 0.8399, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.2709695367923216, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.00019614724055512799, |
|
"loss": 0.9025, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.2715259424120184, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.0001961311745802532, |
|
"loss": 0.8661, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.2720823480317151, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.00019611507583827903, |
|
"loss": 0.8429, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.2726387536514119, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.00019609894433469295, |
|
"loss": 0.9272, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.27319515927110866, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.0001960827800749935, |
|
"loss": 0.87, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.2737515648908054, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.00019606658306469034, |
|
"loss": 0.865, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.27430797051050215, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 0.00019605035330930442, |
|
"loss": 0.8939, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.27486437613019893, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.00019603409081436773, |
|
"loss": 0.9009, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.27542078174989565, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.0001960177955854235, |
|
"loss": 0.8828, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.2759771873695924, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.00019600146762802613, |
|
"loss": 0.9509, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.2765335929892892, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 0.00019598510694774108, |
|
"loss": 0.7998, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.277089998608986, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.00019596871355014506, |
|
"loss": 0.8909, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.2776464042286827, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.00019595228744082586, |
|
"loss": 0.9643, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.2782028098483795, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.00019593582862538248, |
|
"loss": 0.9311, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.27875921546807625, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.00019591933710942503, |
|
"loss": 0.8344, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.27931562108777297, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.00019590281289857478, |
|
"loss": 0.9109, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.27987202670746975, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.00019588625599846417, |
|
"loss": 0.994, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.2804284323271665, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 0.00019586966641473673, |
|
"loss": 0.8917, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.28098483794686324, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.00019585304415304711, |
|
"loss": 0.8417, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.28154124356656, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 0.00019583638921906124, |
|
"loss": 0.9425, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.2820976491862568, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 0.00019581970161845603, |
|
"loss": 0.8792, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.2826540548059535, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.0001958029813569196, |
|
"loss": 0.9791, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.2832104604256503, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 0.00019578622844015117, |
|
"loss": 0.8546, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.28376686604534707, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 0.00019576944287386113, |
|
"loss": 0.8836, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.28432327166504384, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 0.000195752624663771, |
|
"loss": 0.8764, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.28487967728474056, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 0.00019573577381561336, |
|
"loss": 0.9411, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.28543608290443734, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.00019571889033513198, |
|
"loss": 0.9222, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.2859924885241341, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 0.00019570197422808173, |
|
"loss": 0.9914, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.28654889414383083, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.00019568502550022858, |
|
"loss": 0.8788, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.2871052997635276, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.0001956680441573497, |
|
"loss": 0.811, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.2876617053832244, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.00019565103020523328, |
|
"loss": 0.9191, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.2882181110029211, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.00019563398364967868, |
|
"loss": 0.9129, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.2887745166226179, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.00019561690449649636, |
|
"loss": 0.9591, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.28933092224231466, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 0.00019559979275150783, |
|
"loss": 0.8129, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.2898873278620114, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.00019558264842054585, |
|
"loss": 0.8881, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.29044373348170816, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.00019556547150945418, |
|
"loss": 0.8941, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.29100013910140493, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.0001955482620240877, |
|
"loss": 0.8872, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.2915565447211017, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.0001955310199703124, |
|
"loss": 0.7887, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.2921129503407984, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.00019551374535400543, |
|
"loss": 0.8684, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.2926693559604952, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 0.0001954964381810549, |
|
"loss": 0.7739, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.293225761580192, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 0.00019547909845736013, |
|
"loss": 0.8568, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.2937821671998887, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.00019546172618883155, |
|
"loss": 0.9188, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.2943385728195855, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 0.00019544432138139062, |
|
"loss": 0.8119, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.29489497843928225, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.00019542688404096986, |
|
"loss": 0.9192, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.29545138405897897, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.000195409414173513, |
|
"loss": 0.8616, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.29600778967867575, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.00019539191178497472, |
|
"loss": 0.9191, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.2965641952983725, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 0.00019537437688132094, |
|
"loss": 0.7765, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.2971206009180693, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.00019535680946852852, |
|
"loss": 0.9185, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.297677006537766, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.00019533920955258546, |
|
"loss": 0.8514, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.2982334121574628, |
|
"grad_norm": 0.80078125, |
|
"learning_rate": 0.00019532157713949083, |
|
"loss": 1.8071, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.29878981777715957, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.00019530391223525478, |
|
"loss": 0.9006, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.2993462233968563, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.00019528621484589859, |
|
"loss": 0.9186, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.29990262901655307, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 0.00019526848497745448, |
|
"loss": 0.8292, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.30045903463624984, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.00019525072263596585, |
|
"loss": 0.7303, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.30101544025594656, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.0001952329278274872, |
|
"loss": 0.9214, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.30157184587564334, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.00019521510055808396, |
|
"loss": 0.9173, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.3021282514953401, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.00019519724083383273, |
|
"loss": 0.8349, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.30268465711503684, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.00019517934866082116, |
|
"loss": 0.9247, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.3032410627347336, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.0001951614240451479, |
|
"loss": 0.8788, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.3037974683544304, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 0.00019514346699292274, |
|
"loss": 0.8521, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.30435387397412716, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.00019512547751026648, |
|
"loss": 0.8488, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.3049102795938239, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.00019510745560331097, |
|
"loss": 0.9655, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.30546668521352066, |
|
"grad_norm": 0.375, |
|
"learning_rate": 0.00019508940127819913, |
|
"loss": 1.0282, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.30602309083321744, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.00019507131454108497, |
|
"loss": 1.0152, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.30657949645291416, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 0.0001950531953981334, |
|
"loss": 0.8472, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.30713590207261093, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.0001950350438555206, |
|
"loss": 0.8322, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.3076923076923077, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.00019501685991943357, |
|
"loss": 0.9437, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.30824871331200443, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 0.0001949986435960705, |
|
"loss": 0.817, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.3088051189317012, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 0.00019498039489164058, |
|
"loss": 0.9954, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.309361524551398, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.00019496211381236401, |
|
"loss": 0.8284, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.3099179301710947, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.0001949438003644721, |
|
"loss": 0.8699, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.3104743357907915, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.00019492545455420703, |
|
"loss": 0.8738, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.31103074141048825, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.0001949070763878222, |
|
"loss": 0.894, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.31158714703018503, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 0.00019488866587158196, |
|
"loss": 0.8258, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.31214355264988175, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.00019487022301176168, |
|
"loss": 0.9347, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.3126999582695785, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.00019485174781464773, |
|
"loss": 0.9083, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.3132563638892753, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 0.00019483324028653754, |
|
"loss": 0.784, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.313812769508972, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.0001948147004337396, |
|
"loss": 0.8954, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.3143691751286688, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.0001947961282625733, |
|
"loss": 0.8873, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.3149255807483656, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 0.00019477752377936917, |
|
"loss": 0.9064, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.3154819863680623, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 0.00019475888699046866, |
|
"loss": 0.8424, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.31603839198775907, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 0.0001947402179022243, |
|
"loss": 0.9908, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.31659479760745585, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.00019472151652099963, |
|
"loss": 0.9116, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.31715120322715257, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.0001947027828531691, |
|
"loss": 0.9074, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.31770760884684934, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.00019468401690511832, |
|
"loss": 0.9643, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.3182640144665461, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.00019466521868324375, |
|
"loss": 0.9002, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.3188204200862429, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.0001946463881939529, |
|
"loss": 0.9181, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.3193768257059396, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.00019462752544366436, |
|
"loss": 0.9275, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.3199332313256364, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 0.0001946086304388076, |
|
"loss": 0.8565, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.32048963694533317, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.0001945897031858232, |
|
"loss": 0.9334, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.3210460425650299, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 0.0001945707436911626, |
|
"loss": 0.9068, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.32160244818472666, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.00019455175196128838, |
|
"loss": 0.8463, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.32215885380442344, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.00019453272800267393, |
|
"loss": 0.8119, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.32271525942412016, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.00019451367182180378, |
|
"loss": 0.9386, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.32327166504381694, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.00019449458342517338, |
|
"loss": 0.9527, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.3238280706635137, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 0.00019447546281928916, |
|
"loss": 0.9448, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.3243844762832105, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 0.00019445631001066853, |
|
"loss": 0.8528, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.3249408819029072, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.00019443712500583993, |
|
"loss": 0.9443, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.325497287522604, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.00019441790781134265, |
|
"loss": 0.9369, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.32605369314230076, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.0001943986584337271, |
|
"loss": 0.8631, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.3266100987619975, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 0.0001943793768795545, |
|
"loss": 0.921, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.32716650438169426, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.0001943600631553972, |
|
"loss": 0.8827, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.32772291000139103, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.00019434071726783843, |
|
"loss": 0.9897, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.32827931562108775, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 0.0001943213392234724, |
|
"loss": 0.7868, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.32883572124078453, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.00019430192902890423, |
|
"loss": 0.8787, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.3293921268604813, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 0.0001942824866907501, |
|
"loss": 0.861, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.329948532480178, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 0.00019426301221563702, |
|
"loss": 0.8415, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.3305049380998748, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.00019424350561020312, |
|
"loss": 0.9665, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.3310613437195716, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.0001942239668810973, |
|
"loss": 0.934, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.33161774933926835, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.00019420439603497956, |
|
"loss": 0.9518, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.3321741549589651, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 0.00019418479307852074, |
|
"loss": 0.8112, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.33273056057866185, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 0.0001941651580184027, |
|
"loss": 0.7649, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.3332869661983586, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.00019414549086131815, |
|
"loss": 0.9321, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.33384337181805535, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.0001941257916139709, |
|
"loss": 0.9537, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.3343997774377521, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.0001941060602830755, |
|
"loss": 0.961, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.3349561830574489, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.00019408629687535763, |
|
"loss": 0.9001, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.3355125886771456, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 0.00019406650139755375, |
|
"loss": 0.8772, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.3360689942968424, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.00019404667385641128, |
|
"loss": 0.9444, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.33662539991653917, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.0001940268142586887, |
|
"loss": 0.8729, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.3371818055362359, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 0.0001940069226111552, |
|
"loss": 0.8825, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.33773821115593267, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.00019398699892059112, |
|
"loss": 0.9166, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.33829461677562944, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 0.00019396704319378754, |
|
"loss": 0.9447, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.3388510223953262, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.0001939470554375466, |
|
"loss": 0.932, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.33940742801502294, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.0001939270356586812, |
|
"loss": 0.8981, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.3399638336347197, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 0.00019390698386401533, |
|
"loss": 0.8561, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.3405202392544165, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 0.00019388690006038375, |
|
"loss": 0.8992, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.3410766448741132, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.00019386678425463221, |
|
"loss": 0.952, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.34163305049381, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.00019384663645361736, |
|
"loss": 0.8244, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.34218945611350676, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.00019382645666420674, |
|
"loss": 0.9347, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.3427458617332035, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.00019380624489327876, |
|
"loss": 0.9098, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.34330226735290026, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 0.00019378600114772283, |
|
"loss": 1.9243, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.34385867297259703, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 0.00019376572543443915, |
|
"loss": 0.8078, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.34441507859229376, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 0.00019374541776033888, |
|
"loss": 0.8494, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.34497148421199053, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.00019372507813234405, |
|
"loss": 0.8841, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.3455278898316873, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.0001937047065573876, |
|
"loss": 0.8424, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.3460842954513841, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 0.0001936843030424133, |
|
"loss": 0.8028, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.3466407010710808, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 0.00019366386759437596, |
|
"loss": 0.8883, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.3471971066907776, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 0.0001936434002202411, |
|
"loss": 0.7923, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.34775351231047436, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.00019362290092698515, |
|
"loss": 0.8398, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.3483099179301711, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 0.0001936023697215956, |
|
"loss": 0.866, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.34886632354986785, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 0.00019358180661107051, |
|
"loss": 0.8925, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.3494227291695646, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.0001935612116024191, |
|
"loss": 0.8689, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.34997913478926135, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.0001935405847026613, |
|
"loss": 0.8397, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.3505355404089581, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.00019351992591882798, |
|
"loss": 0.8053, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.3510919460286549, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.00019349923525796084, |
|
"loss": 0.8547, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.3516483516483517, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.00019347851272711247, |
|
"loss": 0.944, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.3522047572680484, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 0.00019345775833334633, |
|
"loss": 0.8962, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.35276116288774517, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 0.0001934369720837367, |
|
"loss": 0.8379, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.35331756850744195, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.00019341615398536877, |
|
"loss": 0.9129, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.35387397412713867, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 0.00019339530404533854, |
|
"loss": 0.8244, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.35443037974683544, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 0.00019337442227075285, |
|
"loss": 0.8405, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.3549867853665322, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.0001933535086687295, |
|
"loss": 0.906, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.35554319098622894, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.000193332563246397, |
|
"loss": 0.9176, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.3560995966059257, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 0.0001933115860108948, |
|
"loss": 0.8164, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.3566560022256225, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.00019329057696937317, |
|
"loss": 0.8414, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.3572124078453192, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.00019326953612899318, |
|
"loss": 0.9027, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.357768813465016, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.0001932484634969268, |
|
"loss": 0.8986, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.35832521908471276, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.00019322735908035678, |
|
"loss": 0.891, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.35888162470440954, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.0001932062228864768, |
|
"loss": 0.9787, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.35943803032410626, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.0001931850549224912, |
|
"loss": 0.8145, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.35999443594380304, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.00019316385519561538, |
|
"loss": 0.8913, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.3605508415634998, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 0.00019314262371307532, |
|
"loss": 0.7981, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.36110724718319653, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 0.00019312136048210801, |
|
"loss": 0.8772, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.3616636528028933, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 0.0001931000655099612, |
|
"loss": 0.8837, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.3622200584225901, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.00019307873880389345, |
|
"loss": 0.923, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.3627764640422868, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 0.00019305738037117412, |
|
"loss": 0.8609, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.3633328696619836, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.00019303599021908342, |
|
"loss": 0.8668, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.36388927528168036, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.00019301456835491237, |
|
"loss": 0.9394, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.3644456809013771, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 0.0001929931147859628, |
|
"loss": 0.7789, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.36500208652107385, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 0.00019297162951954728, |
|
"loss": 0.9009, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.36555849214077063, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 0.00019295011256298929, |
|
"loss": 0.7917, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.3661148977604674, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.00019292856392362303, |
|
"loss": 0.9266, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.3666713033801641, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.00019290698360879355, |
|
"loss": 0.849, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.3672277089998609, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.0001928853716258567, |
|
"loss": 0.8509, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.3677841146195577, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 0.00019286372798217905, |
|
"loss": 0.8732, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.3683405202392544, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.00019284205268513803, |
|
"loss": 0.8759, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.3688969258589512, |
|
"grad_norm": 1.8046875, |
|
"learning_rate": 0.00019282034574212185, |
|
"loss": 1.8908, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.36945333147864795, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 0.00019279860716052947, |
|
"loss": 0.8435, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.37000973709834467, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.00019277683694777074, |
|
"loss": 0.8151, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.37056614271804145, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.00019275503511126618, |
|
"loss": 0.9638, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.3711225483377382, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 0.00019273320165844707, |
|
"loss": 0.9098, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.37167895395743494, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 0.00019271133659675563, |
|
"loss": 0.9032, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.3722353595771317, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.00019268943993364462, |
|
"loss": 0.7979, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.3727917651968285, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.00019266751167657778, |
|
"loss": 0.9499, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.37334817081652527, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.0001926455518330295, |
|
"loss": 0.8741, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.373904576436222, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.000192623560410485, |
|
"loss": 0.9162, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.37446098205591877, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 0.00019260153741644021, |
|
"loss": 0.8135, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.37501738767561554, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 0.00019257948285840188, |
|
"loss": 0.6664, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.37557379329531226, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 0.00019255739674388746, |
|
"loss": 0.9409, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.37613019891500904, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.00019253527908042522, |
|
"loss": 0.9325, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.3766866045347058, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.00019251312987555408, |
|
"loss": 0.8681, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.37724301015440254, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.00019249094913682382, |
|
"loss": 0.9045, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.3777994157740993, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.00019246873687179495, |
|
"loss": 0.8807, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.3783558213937961, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 0.00019244649308803867, |
|
"loss": 0.9102, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.37891222701349286, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 0.00019242421779313692, |
|
"loss": 0.8124, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.3794686326331896, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.00019240191099468253, |
|
"loss": 0.9337, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.38002503825288636, |
|
"grad_norm": 0.90625, |
|
"learning_rate": 0.00019237957270027884, |
|
"loss": 1.8244, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.38058144387258314, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.0001923572029175401, |
|
"loss": 0.8975, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.38113784949227986, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.0001923348016540912, |
|
"loss": 0.8755, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.38169425511197663, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 0.00019231236891756787, |
|
"loss": 0.8125, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.3822506607316734, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 0.00019228990471561636, |
|
"loss": 0.8673, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.38280706635137013, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 0.0001922674090558939, |
|
"loss": 0.8462, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.3833634719710669, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.00019224488194606826, |
|
"loss": 1.0229, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.3839198775907637, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 0.00019222232339381802, |
|
"loss": 0.8654, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.3844762832104604, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.00019219973340683244, |
|
"loss": 0.877, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.3850326888301572, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.00019217711199281148, |
|
"loss": 0.9084, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.38558909444985395, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.00019215445915946584, |
|
"loss": 0.9667, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.38614550006955073, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 0.00019213177491451692, |
|
"loss": 0.8251, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.38670190568924745, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 0.00019210905926569688, |
|
"loss": 0.8764, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.3872583113089442, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.0001920863122207485, |
|
"loss": 0.8484, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.387814716928641, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.00019206353378742528, |
|
"loss": 1.0047, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.3883711225483377, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 0.00019204072397349144, |
|
"loss": 0.8996, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.3889275281680345, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.00019201788278672193, |
|
"loss": 0.9355, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.3894839337877313, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.00019199501023490232, |
|
"loss": 0.8971, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.390040339407428, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.0001919721063258289, |
|
"loss": 0.8437, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.39059674502712477, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.0001919491710673087, |
|
"loss": 0.9162, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.39115315064682155, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.00019192620446715933, |
|
"loss": 0.9021, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.39170955626651827, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 0.00019190320653320918, |
|
"loss": 0.8099, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.39226596188621504, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 0.0001918801772732973, |
|
"loss": 0.9569, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.3928223675059118, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.0001918571166952734, |
|
"loss": 0.9673, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.3933787731256086, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 0.0001918340248069978, |
|
"loss": 0.8443, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.3939351787453053, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 0.00019181090161634167, |
|
"loss": 0.9339, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.3944915843650021, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.00019178774713118663, |
|
"loss": 0.834, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.39504798998469887, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.00019176456135942515, |
|
"loss": 0.9933, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.3956043956043956, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.00019174134430896027, |
|
"loss": 0.924, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.39616080122409236, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 0.0001917180959877057, |
|
"loss": 0.8976, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.39671720684378914, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.00019169481640358583, |
|
"loss": 0.8488, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.39727361246348586, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 0.00019167150556453569, |
|
"loss": 0.8426, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.39783001808318263, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.00019164816347850096, |
|
"loss": 1.0289, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.3983864237028794, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 0.00019162479015343803, |
|
"loss": 0.8953, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.39894282932257613, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.00019160138559731386, |
|
"loss": 0.9065, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.3994992349422729, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.00019157794981810607, |
|
"loss": 0.908, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.4000556405619697, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 0.00019155448282380294, |
|
"loss": 0.796, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.40061204618166646, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.00019153098462240342, |
|
"loss": 0.9603, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.4011684518013632, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 0.00019150745522191701, |
|
"loss": 0.9096, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.40172485742105996, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.00019148389463036395, |
|
"loss": 1.0184, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.40228126304075673, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 0.00019146030285577506, |
|
"loss": 0.8274, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.40283766866045345, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.00019143667990619177, |
|
"loss": 0.9454, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.4033940742801502, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 0.00019141302578966614, |
|
"loss": 0.8516, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.403950479899847, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.00019138934051426092, |
|
"loss": 0.9234, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.4045068855195437, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 0.00019136562408804934, |
|
"loss": 0.8836, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.4050632911392405, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.00019134187651911546, |
|
"loss": 0.9283, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.4056196967589373, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.00019131809781555373, |
|
"loss": 0.9731, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.40617610237863405, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 0.00019129428798546938, |
|
"loss": 0.7996, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.4067325079983308, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.00019127044703697816, |
|
"loss": 0.8759, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.40728891361802755, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.00019124657497820644, |
|
"loss": 0.9137, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.4078453192377243, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.00019122267181729123, |
|
"loss": 0.8381, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.40840172485742104, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 0.0001911987375623801, |
|
"loss": 0.9209, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.4089581304771178, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.00019117477222163127, |
|
"loss": 0.8937, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.4095145360968146, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 0.0001911507758032135, |
|
"loss": 0.9494, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.4100709417165113, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.00019112674831530616, |
|
"loss": 0.9375, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.4106273473362081, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 0.00019110268976609923, |
|
"loss": 0.8973, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.41118375295590487, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.00019107860016379324, |
|
"loss": 0.9579, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.4117401585756016, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 0.00019105447951659935, |
|
"loss": 0.7406, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.41229656419529837, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 0.0001910303278327393, |
|
"loss": 0.9315, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.41285296981499514, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 0.00019100614512044536, |
|
"loss": 0.8731, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.4134093754346919, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.0001909819313879604, |
|
"loss": 0.8679, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.41396578105438864, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.00019095768664353785, |
|
"loss": 0.8881, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.4145221866740854, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.0001909334108954418, |
|
"loss": 0.8616, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.4150785922937822, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.00019090910415194682, |
|
"loss": 0.8466, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.4156349979134789, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 0.000190884766421338, |
|
"loss": 0.963, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.4161914035331757, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.00019086039771191112, |
|
"loss": 0.8879, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.41674780915287246, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.0001908359980319724, |
|
"loss": 0.9345, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.4173042147725692, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.00019081156738983874, |
|
"loss": 0.9356, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.41786062039226596, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 0.00019078710579383746, |
|
"loss": 0.8295, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.41841702601196273, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.00019076261325230655, |
|
"loss": 0.9092, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.41897343163165945, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 0.00019073808977359446, |
|
"loss": 0.83, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.41952983725135623, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 0.0001907135353660602, |
|
"loss": 0.7945, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.420086242871053, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.00019068895003807339, |
|
"loss": 0.959, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.4206426484907498, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.0001906643337980141, |
|
"loss": 0.8404, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.4211990541104465, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 0.000190639686654273, |
|
"loss": 0.811, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.4217554597301433, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.00019061500861525127, |
|
"loss": 0.9065, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.42231186534984005, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.00019059029968936062, |
|
"loss": 0.8662, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.4228682709695368, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 0.0001905655598850233, |
|
"loss": 0.7609, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.42342467658923355, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.00019054078921067205, |
|
"loss": 0.8826, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.4239810822089303, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 0.00019051598767475016, |
|
"loss": 0.8253, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.42453748782862705, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 0.00019049115528571147, |
|
"loss": 0.7537, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.4250938934483238, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.00019046629205202027, |
|
"loss": 0.9363, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.4256502990680206, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.00019044139798215143, |
|
"loss": 0.8248, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.4262067046877173, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 0.00019041647308459022, |
|
"loss": 0.7364, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.4267631103074141, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.00019039151736783262, |
|
"loss": 0.9075, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.42731951592711087, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 0.00019036653084038486, |
|
"loss": 0.966, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.42787592154680765, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.00019034151351076387, |
|
"loss": 0.8645, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.42843232716650437, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.00019031646538749707, |
|
"loss": 0.824, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.42898873278620114, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.00019029138647912218, |
|
"loss": 0.8858, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.4295451384058979, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 0.00019026627679418767, |
|
"loss": 0.8066, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.43010154402559464, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 0.00019024113634125234, |
|
"loss": 0.8256, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.4306579496452914, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.0001902159651288855, |
|
"loss": 0.8785, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.4312143552649882, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 0.000190190763165667, |
|
"loss": 0.8643, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.4317707608846849, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.00019016553046018712, |
|
"loss": 0.9032, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.4323271665043817, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.00019014026702104662, |
|
"loss": 0.8785, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.43288357212407846, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.00019011497285685678, |
|
"loss": 0.8607, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.4334399777437752, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.0001900896479762393, |
|
"loss": 0.8355, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.43399638336347196, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 0.00019006429238782637, |
|
"loss": 0.9727, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.43455278898316874, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.00019003890610026067, |
|
"loss": 0.8807, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.4351091946028655, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.00019001348912219533, |
|
"loss": 0.9325, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.43566560022256223, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.00018998804146229387, |
|
"loss": 0.8559, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.436222005842259, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 0.00018996256312923043, |
|
"loss": 0.8304, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.4367784114619558, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.00018993705413168944, |
|
"loss": 0.9767, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.4373348170816525, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.0001899115144783659, |
|
"loss": 0.8711, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.4378912227013493, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.00018988594417796516, |
|
"loss": 0.8525, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.43844762832104606, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 0.00018986034323920312, |
|
"loss": 0.83, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.4390040339407428, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 0.000189834711670806, |
|
"loss": 0.8088, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.43956043956043955, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 0.00018980904948151062, |
|
"loss": 0.784, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.44011684518013633, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.00018978335668006407, |
|
"loss": 0.9129, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.4406732507998331, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.000189757633275224, |
|
"loss": 1.0234, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.4412296564195298, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.0001897318792757584, |
|
"loss": 0.8711, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.4417860620392266, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.00018970609469044577, |
|
"loss": 0.9124, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.4423424676589234, |
|
"grad_norm": 0.310546875, |
|
"learning_rate": 0.00018968027952807494, |
|
"loss": 0.7081, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.4428988732786201, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 0.00018965443379744535, |
|
"loss": 0.7776, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.4434552788983169, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 0.00018962855750736656, |
|
"loss": 0.8216, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.44401168451801365, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.00018960265066665882, |
|
"loss": 0.8406, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.44456809013771037, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.0001895767132841527, |
|
"loss": 0.9074, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.44512449575740715, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.00018955074536868913, |
|
"loss": 0.88, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.4456809013771039, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.0001895247469291195, |
|
"loss": 0.9454, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.44623730699680064, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 0.00018949871797430557, |
|
"loss": 0.7475, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.4467937126164974, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.00018947265851311954, |
|
"loss": 0.9466, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.4473501182361942, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 0.000189446568554444, |
|
"loss": 0.791, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.44790652385589097, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.00018942044810717188, |
|
"loss": 0.898, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.4484629294755877, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 0.00018939429718020661, |
|
"loss": 0.7666, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.44901933509528447, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.0001893681157824619, |
|
"loss": 0.9343, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.44957574071498124, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 0.00018934190392286198, |
|
"loss": 0.8074, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.45013214633467796, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 0.00018931566161034126, |
|
"loss": 0.8394, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.45068855195437474, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.00018928938885384472, |
|
"loss": 0.8655, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.4512449575740715, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 0.00018926308566232763, |
|
"loss": 0.792, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.45180136319376824, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.00018923675204475564, |
|
"loss": 0.8638, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.452357768813465, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.00018921038801010477, |
|
"loss": 0.8151, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.4529141744331618, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.00018918399356736146, |
|
"loss": 0.8198, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.4534705800528585, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 0.00018915756872552242, |
|
"loss": 0.6904, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.4540269856725553, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 0.0001891311134935948, |
|
"loss": 0.8762, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.45458339129225206, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 0.0001891046278805961, |
|
"loss": 0.7527, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.45513979691194884, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.00018907811189555412, |
|
"loss": 0.7777, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.45569620253164556, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.00018905156554750708, |
|
"loss": 0.8556, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.45625260815134233, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 0.0001890249888455035, |
|
"loss": 0.8273, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.4568090137710391, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 0.0001889983817986023, |
|
"loss": 0.8405, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.45736541939073583, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 0.00018897174441587266, |
|
"loss": 0.9006, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.4579218250104326, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.00018894507670639418, |
|
"loss": 0.9191, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.4584782306301294, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.00018891837867925678, |
|
"loss": 0.9777, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.4590346362498261, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.00018889165034356072, |
|
"loss": 0.892, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.4595910418695229, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.00018886489170841648, |
|
"loss": 0.8615, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.46014744748921965, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 0.00018883810278294503, |
|
"loss": 0.8765, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.4607038531089164, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.00018881128357627763, |
|
"loss": 0.8803, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.46126025872861315, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.00018878443409755575, |
|
"loss": 0.9017, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.4618166643483099, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 0.00018875755435593129, |
|
"loss": 0.8139, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.4623730699680067, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.0001887306443605664, |
|
"loss": 0.9611, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.4629294755877034, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.00018870370412063365, |
|
"loss": 0.9311, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.4634858812074002, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.00018867673364531578, |
|
"loss": 0.9108, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.464042286827097, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.00018864973294380587, |
|
"loss": 1.0142, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.4645986924467937, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 0.00018862270202530738, |
|
"loss": 0.744, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.46515509806649047, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 0.00018859564089903404, |
|
"loss": 0.8419, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.46571150368618724, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 0.00018856854957420983, |
|
"loss": 0.7806, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.46626790930588397, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.00018854142806006902, |
|
"loss": 0.9135, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.46682431492558074, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 0.00018851427636585623, |
|
"loss": 0.836, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.4673807205452775, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 0.00018848709450082637, |
|
"loss": 0.8926, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.4679371261649743, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 0.0001884598824742445, |
|
"loss": 0.8962, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.468493531784671, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 0.0001884326402953862, |
|
"loss": 0.8185, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.4690499374043678, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 0.0001884053679735371, |
|
"loss": 0.948, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.46960634302406457, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 0.0001883780655179932, |
|
"loss": 1.0652, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.4701627486437613, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 0.00018835073293806075, |
|
"loss": 0.9291, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.47071915426345806, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.00018832337024305638, |
|
"loss": 0.9503, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.47127555988315484, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 0.00018829597744230678, |
|
"loss": 0.8685, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.47183196550285156, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.00018826855454514907, |
|
"loss": 0.8796, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.47238837112254833, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 0.00018824110156093053, |
|
"loss": 0.7906, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.4729447767422451, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.00018821361849900877, |
|
"loss": 0.9085, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.47350118236194183, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 0.00018818610536875164, |
|
"loss": 0.9018, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.4740575879816386, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 0.00018815856217953714, |
|
"loss": 0.8632, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.4746139936013354, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.00018813098894075365, |
|
"loss": 0.9047, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.47517039922103216, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.00018810338566179973, |
|
"loss": 0.9046, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.4757268048407289, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 0.00018807575235208415, |
|
"loss": 0.8683, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.47628321046042565, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 0.00018804808902102597, |
|
"loss": 0.8541, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.47683961608012243, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 0.00018802039567805448, |
|
"loss": 0.8484, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.47739602169981915, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.00018799267233260918, |
|
"loss": 0.8559, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.4779524273195159, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.00018796491899413976, |
|
"loss": 0.8646, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.4785088329392127, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.0001879371356721062, |
|
"loss": 0.9625, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.4790652385589094, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 0.0001879093223759787, |
|
"loss": 0.7927, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.4796216441786062, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 0.00018788147911523762, |
|
"loss": 0.8716, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.480178049798303, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.00018785360589937354, |
|
"loss": 0.7913, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.4807344554179997, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.00018782570273788736, |
|
"loss": 0.8636, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.48129086103769647, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 0.00018779776964029, |
|
"loss": 0.8082, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.48184726665739325, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.00018776980661610273, |
|
"loss": 0.8549, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.48240367227709, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 0.000187741813674857, |
|
"loss": 0.8543, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.48296007789678674, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 0.00018771379082609436, |
|
"loss": 0.8735, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.4835164835164835, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.0001876857380793667, |
|
"loss": 0.8338, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.4840728891361803, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 0.00018765765544423606, |
|
"loss": 0.8904, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.484629294755877, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 0.00018762954293027453, |
|
"loss": 0.7981, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.4851857003755738, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 0.00018760140054706453, |
|
"loss": 0.9093, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.48574210599527057, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 0.00018757322830419867, |
|
"loss": 0.9216, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.4862985116149673, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 0.00018754502621127966, |
|
"loss": 0.7949, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.48685491723466406, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.00018751679427792038, |
|
"loss": 0.9682, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.48741132285436084, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 0.00018748853251374396, |
|
"loss": 0.792, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.48796772847405756, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.00018746024092838366, |
|
"loss": 0.8901, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.48852413409375434, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.00018743191953148286, |
|
"loss": 0.9955, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.4890805397134511, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.00018740356833269516, |
|
"loss": 0.9246, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.4896369453331479, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.00018737518734168428, |
|
"loss": 0.9122, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.4901933509528446, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.0001873467765681242, |
|
"loss": 0.9119, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.4907497565725414, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 0.00018731833602169884, |
|
"loss": 0.7738, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.49130616219223816, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 0.00018728986571210244, |
|
"loss": 0.8866, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.4918625678119349, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 0.00018726136564903938, |
|
"loss": 0.9322, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.49241897343163166, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 0.0001872328358422241, |
|
"loss": 0.7841, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.49297537905132843, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.00018720427630138122, |
|
"loss": 0.9459, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.49353178467102515, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 0.0001871756870362455, |
|
"loss": 0.8372, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.49408819029072193, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.00018714706805656185, |
|
"loss": 0.8801, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.4946445959104187, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 0.00018711841937208527, |
|
"loss": 0.7583, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.4952010015301155, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 0.00018708974099258086, |
|
"loss": 0.8007, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.4957574071498122, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.0001870610329278239, |
|
"loss": 0.9087, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.496313812769509, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 0.00018703229518759985, |
|
"loss": 0.8809, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.49687021838920575, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 0.00018700352778170407, |
|
"loss": 0.8517, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.4974266240089025, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.00018697473071994225, |
|
"loss": 0.9004, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.49798302962859925, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 0.0001869459040121301, |
|
"loss": 0.8786, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.498539435248296, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.0001869170476680934, |
|
"loss": 0.911, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.49909584086799275, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.0001868881616976681, |
|
"loss": 0.8994, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.4996522464876895, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 0.00018685924611070025, |
|
"loss": 0.8162, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.5002086521073863, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.00018683030091704592, |
|
"loss": 0.891, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.5002086521073863, |
|
"eval_loss": 0.8054929971694946, |
|
"eval_runtime": 1766.0961, |
|
"eval_samples_per_second": 2.656, |
|
"eval_steps_per_second": 1.328, |
|
"step": 899 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 5391, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 899, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.746246500779295e+17, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|