|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.99953509995351, |
|
"eval_steps": 500, |
|
"global_step": 430, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0023245002324500234, |
|
"grad_norm": 24.9628849029541, |
|
"learning_rate": 7.222222222222221e-06, |
|
"loss": 2.9398, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.004649000464900047, |
|
"grad_norm": 27.249839782714844, |
|
"learning_rate": 1.4444444444444442e-05, |
|
"loss": 3.0436, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.00697350069735007, |
|
"grad_norm": 29.58257293701172, |
|
"learning_rate": 2.1666666666666664e-05, |
|
"loss": 2.5493, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.009298000929800094, |
|
"grad_norm": 21.416799545288086, |
|
"learning_rate": 2.8888888888888885e-05, |
|
"loss": 1.5315, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.011622501162250116, |
|
"grad_norm": 6.383184909820557, |
|
"learning_rate": 3.611111111111111e-05, |
|
"loss": 1.2681, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01394700139470014, |
|
"grad_norm": 3.6736369132995605, |
|
"learning_rate": 4.333333333333333e-05, |
|
"loss": 1.1031, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.016271501627150162, |
|
"grad_norm": 2.394563913345337, |
|
"learning_rate": 5.055555555555555e-05, |
|
"loss": 1.1564, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.018596001859600187, |
|
"grad_norm": 2.2405858039855957, |
|
"learning_rate": 5.777777777777777e-05, |
|
"loss": 1.1077, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.02092050209205021, |
|
"grad_norm": 2.2083897590637207, |
|
"learning_rate": 6.5e-05, |
|
"loss": 1.0652, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.023245002324500233, |
|
"grad_norm": 2.0182244777679443, |
|
"learning_rate": 6.499909512851264e-05, |
|
"loss": 1.0865, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.025569502556950254, |
|
"grad_norm": 1.935754418373108, |
|
"learning_rate": 6.499638056443783e-05, |
|
"loss": 1.1143, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.02789400278940028, |
|
"grad_norm": 1.7560354471206665, |
|
"learning_rate": 6.499185645893443e-05, |
|
"loss": 1.0293, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.030218503021850304, |
|
"grad_norm": 1.882704496383667, |
|
"learning_rate": 6.498552306392452e-05, |
|
"loss": 1.1189, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.032543003254300325, |
|
"grad_norm": 1.6570043563842773, |
|
"learning_rate": 6.49773807320794e-05, |
|
"loss": 1.024, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.03486750348675035, |
|
"grad_norm": 1.6351194381713867, |
|
"learning_rate": 6.496742991679994e-05, |
|
"loss": 1.1053, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.037192003719200374, |
|
"grad_norm": 1.693750023841858, |
|
"learning_rate": 6.495567117219131e-05, |
|
"loss": 1.0443, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.03951650395165039, |
|
"grad_norm": 1.6009650230407715, |
|
"learning_rate": 6.494210515303213e-05, |
|
"loss": 0.9832, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.04184100418410042, |
|
"grad_norm": 1.6692452430725098, |
|
"learning_rate": 6.492673261473802e-05, |
|
"loss": 0.9498, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.04416550441655044, |
|
"grad_norm": 1.6801658868789673, |
|
"learning_rate": 6.490955441331956e-05, |
|
"loss": 0.9983, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.046490004649000466, |
|
"grad_norm": 1.5920137166976929, |
|
"learning_rate": 6.489057150533456e-05, |
|
"loss": 1.0731, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04881450488145049, |
|
"grad_norm": 1.5493357181549072, |
|
"learning_rate": 6.486978494783486e-05, |
|
"loss": 0.9685, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.05113900511390051, |
|
"grad_norm": 1.4238895177841187, |
|
"learning_rate": 6.484719589830741e-05, |
|
"loss": 0.9721, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.05346350534635053, |
|
"grad_norm": 1.572223424911499, |
|
"learning_rate": 6.482280561460986e-05, |
|
"loss": 1.0263, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.05578800557880056, |
|
"grad_norm": 1.4538493156433105, |
|
"learning_rate": 6.479661545490052e-05, |
|
"loss": 1.0008, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.05811250581125058, |
|
"grad_norm": 1.4649128913879395, |
|
"learning_rate": 6.47686268775627e-05, |
|
"loss": 1.0391, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.06043700604370061, |
|
"grad_norm": 1.4427037239074707, |
|
"learning_rate": 6.473884144112351e-05, |
|
"loss": 0.9842, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.06276150627615062, |
|
"grad_norm": 1.5365402698516846, |
|
"learning_rate": 6.470726080416707e-05, |
|
"loss": 1.027, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.06508600650860065, |
|
"grad_norm": 1.58499014377594, |
|
"learning_rate": 6.467388672524219e-05, |
|
"loss": 1.0134, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.06741050674105067, |
|
"grad_norm": 1.4400190114974976, |
|
"learning_rate": 6.463872106276442e-05, |
|
"loss": 0.9216, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.0697350069735007, |
|
"grad_norm": 1.5131999254226685, |
|
"learning_rate": 6.46017657749125e-05, |
|
"loss": 0.9949, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.07205950720595072, |
|
"grad_norm": 1.4283956289291382, |
|
"learning_rate": 6.456302291951948e-05, |
|
"loss": 0.9467, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.07438400743840075, |
|
"grad_norm": 1.48639714717865, |
|
"learning_rate": 6.452249465395796e-05, |
|
"loss": 1.007, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.07670850767085077, |
|
"grad_norm": 1.3868178129196167, |
|
"learning_rate": 6.448018323502007e-05, |
|
"loss": 0.9728, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.07903300790330078, |
|
"grad_norm": 1.442160725593567, |
|
"learning_rate": 6.443609101879175e-05, |
|
"loss": 0.9678, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.08135750813575081, |
|
"grad_norm": 1.334540843963623, |
|
"learning_rate": 6.439022046052158e-05, |
|
"loss": 0.9515, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.08368200836820083, |
|
"grad_norm": 1.3423985242843628, |
|
"learning_rate": 6.434257411448404e-05, |
|
"loss": 0.9431, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.08600650860065086, |
|
"grad_norm": 1.3876471519470215, |
|
"learning_rate": 6.429315463383725e-05, |
|
"loss": 0.9427, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.08833100883310088, |
|
"grad_norm": 1.3787826299667358, |
|
"learning_rate": 6.424196477047534e-05, |
|
"loss": 0.956, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.09065550906555091, |
|
"grad_norm": 1.393442153930664, |
|
"learning_rate": 6.418900737487508e-05, |
|
"loss": 0.9742, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.09298000929800093, |
|
"grad_norm": 1.3465863466262817, |
|
"learning_rate": 6.413428539593724e-05, |
|
"loss": 0.9512, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.09530450953045096, |
|
"grad_norm": 1.3853354454040527, |
|
"learning_rate": 6.407780188082231e-05, |
|
"loss": 0.9802, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.09762900976290098, |
|
"grad_norm": 1.506191372871399, |
|
"learning_rate": 6.401955997478094e-05, |
|
"loss": 0.9839, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.099953509995351, |
|
"grad_norm": 1.346642255783081, |
|
"learning_rate": 6.395956292097865e-05, |
|
"loss": 0.9612, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.10227801022780102, |
|
"grad_norm": 1.4252711534500122, |
|
"learning_rate": 6.389781406031533e-05, |
|
"loss": 0.966, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.10460251046025104, |
|
"grad_norm": 1.3196333646774292, |
|
"learning_rate": 6.383431683123921e-05, |
|
"loss": 0.9783, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.10692701069270107, |
|
"grad_norm": 1.334774136543274, |
|
"learning_rate": 6.376907476955534e-05, |
|
"loss": 0.9661, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.10925151092515109, |
|
"grad_norm": 1.3319814205169678, |
|
"learning_rate": 6.370209150822871e-05, |
|
"loss": 0.9326, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.11157601115760112, |
|
"grad_norm": 1.3688098192214966, |
|
"learning_rate": 6.3633370777182e-05, |
|
"loss": 0.9141, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.11390051139005114, |
|
"grad_norm": 1.375450849533081, |
|
"learning_rate": 6.356291640308783e-05, |
|
"loss": 0.937, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.11622501162250116, |
|
"grad_norm": 1.4250009059906006, |
|
"learning_rate": 6.349073230915569e-05, |
|
"loss": 0.9604, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.11854951185495119, |
|
"grad_norm": 1.3212535381317139, |
|
"learning_rate": 6.34168225149135e-05, |
|
"loss": 0.9432, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.12087401208740121, |
|
"grad_norm": 1.3916277885437012, |
|
"learning_rate": 6.334119113598371e-05, |
|
"loss": 1.0155, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.12319851231985124, |
|
"grad_norm": 1.278640627861023, |
|
"learning_rate": 6.326384238385426e-05, |
|
"loss": 0.9462, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.12552301255230125, |
|
"grad_norm": 1.3200207948684692, |
|
"learning_rate": 6.318478056564394e-05, |
|
"loss": 0.9506, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.1278475127847513, |
|
"grad_norm": 1.2866770029067993, |
|
"learning_rate": 6.310401008386256e-05, |
|
"loss": 0.9299, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.1301720130172013, |
|
"grad_norm": 1.3080073595046997, |
|
"learning_rate": 6.302153543616591e-05, |
|
"loss": 0.9416, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.13249651324965134, |
|
"grad_norm": 1.306003451347351, |
|
"learning_rate": 6.293736121510519e-05, |
|
"loss": 0.96, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.13482101348210135, |
|
"grad_norm": 1.2852219343185425, |
|
"learning_rate": 6.285149210787132e-05, |
|
"loss": 0.9077, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.13714551371455136, |
|
"grad_norm": 1.312796711921692, |
|
"learning_rate": 6.276393289603395e-05, |
|
"loss": 0.96, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.1394700139470014, |
|
"grad_norm": 1.3223918676376343, |
|
"learning_rate": 6.267468845527521e-05, |
|
"loss": 0.9349, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.1417945141794514, |
|
"grad_norm": 1.2744101285934448, |
|
"learning_rate": 6.258376375511813e-05, |
|
"loss": 0.9634, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.14411901441190145, |
|
"grad_norm": 1.2165145874023438, |
|
"learning_rate": 6.249116385865003e-05, |
|
"loss": 0.883, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.14644351464435146, |
|
"grad_norm": 1.3080806732177734, |
|
"learning_rate": 6.239689392224053e-05, |
|
"loss": 0.9183, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.1487680148768015, |
|
"grad_norm": 1.3560563325881958, |
|
"learning_rate": 6.230095919525438e-05, |
|
"loss": 0.8918, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.1510925151092515, |
|
"grad_norm": 1.3362810611724854, |
|
"learning_rate": 6.220336501975922e-05, |
|
"loss": 0.9218, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.15341701534170155, |
|
"grad_norm": 1.4007967710494995, |
|
"learning_rate": 6.21041168302281e-05, |
|
"loss": 0.9743, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.15574151557415156, |
|
"grad_norm": 1.2750542163848877, |
|
"learning_rate": 6.200322015323678e-05, |
|
"loss": 0.9101, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.15806601580660157, |
|
"grad_norm": 1.3171645402908325, |
|
"learning_rate": 6.190068060715615e-05, |
|
"loss": 0.9494, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.1603905160390516, |
|
"grad_norm": 1.2462354898452759, |
|
"learning_rate": 6.179650390183923e-05, |
|
"loss": 0.9074, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.16271501627150162, |
|
"grad_norm": 1.2816126346588135, |
|
"learning_rate": 6.169069583830324e-05, |
|
"loss": 0.9672, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.16503951650395166, |
|
"grad_norm": 1.360295057296753, |
|
"learning_rate": 6.158326230840664e-05, |
|
"loss": 0.9216, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.16736401673640167, |
|
"grad_norm": 1.454693078994751, |
|
"learning_rate": 6.1474209294521e-05, |
|
"loss": 0.9648, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.1696885169688517, |
|
"grad_norm": 1.2523744106292725, |
|
"learning_rate": 6.136354286919789e-05, |
|
"loss": 0.9098, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.17201301720130172, |
|
"grad_norm": 1.265788197517395, |
|
"learning_rate": 6.125126919483069e-05, |
|
"loss": 0.8955, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.17433751743375175, |
|
"grad_norm": 1.2782336473464966, |
|
"learning_rate": 6.113739452331156e-05, |
|
"loss": 0.9257, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.17666201766620176, |
|
"grad_norm": 1.3308346271514893, |
|
"learning_rate": 6.102192519568312e-05, |
|
"loss": 0.9581, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.1789865178986518, |
|
"grad_norm": 1.3193368911743164, |
|
"learning_rate": 6.0904867641785524e-05, |
|
"loss": 0.9376, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.18131101813110181, |
|
"grad_norm": 1.2379558086395264, |
|
"learning_rate": 6.078622837989834e-05, |
|
"loss": 0.8897, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.18363551836355183, |
|
"grad_norm": 1.314134120941162, |
|
"learning_rate": 6.0666014016377575e-05, |
|
"loss": 0.9065, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.18596001859600186, |
|
"grad_norm": 1.3453565835952759, |
|
"learning_rate": 6.0544231245287847e-05, |
|
"loss": 0.9992, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.18828451882845187, |
|
"grad_norm": 1.2375950813293457, |
|
"learning_rate": 6.042088684802962e-05, |
|
"loss": 0.8979, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.1906090190609019, |
|
"grad_norm": 1.2984182834625244, |
|
"learning_rate": 6.029598769296152e-05, |
|
"loss": 0.9209, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.19293351929335192, |
|
"grad_norm": 1.264028787612915, |
|
"learning_rate": 6.0169540735017986e-05, |
|
"loss": 0.9202, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.19525801952580196, |
|
"grad_norm": 1.2835426330566406, |
|
"learning_rate": 6.004155301532189e-05, |
|
"loss": 0.9103, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.19758251975825197, |
|
"grad_norm": 1.3315404653549194, |
|
"learning_rate": 5.991203166079251e-05, |
|
"loss": 0.9246, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.199907019990702, |
|
"grad_norm": 1.2901430130004883, |
|
"learning_rate": 5.978098388374867e-05, |
|
"loss": 0.9187, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.20223152022315202, |
|
"grad_norm": 1.2905139923095703, |
|
"learning_rate": 5.964841698150709e-05, |
|
"loss": 0.8755, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.20455602045560203, |
|
"grad_norm": 1.284635305404663, |
|
"learning_rate": 5.95143383359761e-05, |
|
"loss": 0.9376, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.20688052068805207, |
|
"grad_norm": 1.274927020072937, |
|
"learning_rate": 5.93787554132445e-05, |
|
"loss": 0.8953, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.20920502092050208, |
|
"grad_norm": 1.275505542755127, |
|
"learning_rate": 5.92416757631659e-05, |
|
"loss": 0.9458, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.21152952115295212, |
|
"grad_norm": 1.249010443687439, |
|
"learning_rate": 5.910310701893825e-05, |
|
"loss": 0.9195, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.21385402138540213, |
|
"grad_norm": 1.2102458477020264, |
|
"learning_rate": 5.896305689667885e-05, |
|
"loss": 0.9237, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.21617852161785217, |
|
"grad_norm": 1.2785658836364746, |
|
"learning_rate": 5.882153319499459e-05, |
|
"loss": 0.9474, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.21850302185030218, |
|
"grad_norm": 1.2559230327606201, |
|
"learning_rate": 5.867854379454777e-05, |
|
"loss": 0.8634, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.22082752208275222, |
|
"grad_norm": 1.1916835308074951, |
|
"learning_rate": 5.8534096657617286e-05, |
|
"loss": 0.8826, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.22315202231520223, |
|
"grad_norm": 1.2259260416030884, |
|
"learning_rate": 5.838819982765514e-05, |
|
"loss": 0.8864, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.22547652254765224, |
|
"grad_norm": 1.2614275217056274, |
|
"learning_rate": 5.824086142883867e-05, |
|
"loss": 0.8649, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.22780102278010228, |
|
"grad_norm": 1.3275364637374878, |
|
"learning_rate": 5.8092089665618096e-05, |
|
"loss": 0.9218, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.2301255230125523, |
|
"grad_norm": 1.2847840785980225, |
|
"learning_rate": 5.7941892822259686e-05, |
|
"loss": 0.8976, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.23245002324500233, |
|
"grad_norm": 1.2817707061767578, |
|
"learning_rate": 5.779027926238441e-05, |
|
"loss": 0.9278, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.23477452347745234, |
|
"grad_norm": 1.2560011148452759, |
|
"learning_rate": 5.7637257428502266e-05, |
|
"loss": 0.9113, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.23709902370990238, |
|
"grad_norm": 1.222548484802246, |
|
"learning_rate": 5.748283584154215e-05, |
|
"loss": 0.8772, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.2394235239423524, |
|
"grad_norm": 1.2913312911987305, |
|
"learning_rate": 5.7327023100377346e-05, |
|
"loss": 0.9342, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.24174802417480243, |
|
"grad_norm": 1.1847671270370483, |
|
"learning_rate": 5.716982788134676e-05, |
|
"loss": 0.8713, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.24407252440725244, |
|
"grad_norm": 1.2654389142990112, |
|
"learning_rate": 5.70112589377717e-05, |
|
"loss": 0.8818, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.24639702463970248, |
|
"grad_norm": 1.2268662452697754, |
|
"learning_rate": 5.6851325099468526e-05, |
|
"loss": 0.8754, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.2487215248721525, |
|
"grad_norm": 1.30552339553833, |
|
"learning_rate": 5.669003527225696e-05, |
|
"loss": 0.946, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.2510460251046025, |
|
"grad_norm": 1.239740252494812, |
|
"learning_rate": 5.65273984374641e-05, |
|
"loss": 0.9061, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.2533705253370525, |
|
"grad_norm": 1.2037532329559326, |
|
"learning_rate": 5.6363423651424416e-05, |
|
"loss": 0.9221, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.2556950255695026, |
|
"grad_norm": 1.3354305028915405, |
|
"learning_rate": 5.6198120044975337e-05, |
|
"loss": 0.9359, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.2580195258019526, |
|
"grad_norm": 1.3739912509918213, |
|
"learning_rate": 5.603149682294886e-05, |
|
"loss": 0.9305, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.2603440260344026, |
|
"grad_norm": 1.2662415504455566, |
|
"learning_rate": 5.586356326365904e-05, |
|
"loss": 0.8996, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.2626685262668526, |
|
"grad_norm": 1.3930978775024414, |
|
"learning_rate": 5.5694328718385214e-05, |
|
"loss": 0.9477, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.2649930264993027, |
|
"grad_norm": 1.276788592338562, |
|
"learning_rate": 5.552380261085135e-05, |
|
"loss": 0.886, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.2673175267317527, |
|
"grad_norm": 1.2634483575820923, |
|
"learning_rate": 5.5351994436701326e-05, |
|
"loss": 0.9207, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.2696420269642027, |
|
"grad_norm": 1.3280141353607178, |
|
"learning_rate": 5.5178913762970074e-05, |
|
"loss": 0.916, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.2719665271966527, |
|
"grad_norm": 1.2456930875778198, |
|
"learning_rate": 5.500457022755095e-05, |
|
"loss": 0.8524, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.2742910274291027, |
|
"grad_norm": 1.2636594772338867, |
|
"learning_rate": 5.4828973538658974e-05, |
|
"loss": 0.9525, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.2766155276615528, |
|
"grad_norm": 1.223232626914978, |
|
"learning_rate": 5.465213347429026e-05, |
|
"loss": 0.8556, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.2789400278940028, |
|
"grad_norm": 1.414523720741272, |
|
"learning_rate": 5.4474059881677566e-05, |
|
"loss": 0.9719, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.2812645281264528, |
|
"grad_norm": 1.2748632431030273, |
|
"learning_rate": 5.42947626767419e-05, |
|
"loss": 0.9076, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.2835890283589028, |
|
"grad_norm": 1.1875295639038086, |
|
"learning_rate": 5.411425184354042e-05, |
|
"loss": 0.8475, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.2859135285913529, |
|
"grad_norm": 1.168205976486206, |
|
"learning_rate": 5.393253743371041e-05, |
|
"loss": 0.8827, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.2882380288238029, |
|
"grad_norm": 1.2521716356277466, |
|
"learning_rate": 5.3749629565909614e-05, |
|
"loss": 0.9238, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.2905625290562529, |
|
"grad_norm": 1.3135292530059814, |
|
"learning_rate": 5.356553842525278e-05, |
|
"loss": 0.9562, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.2928870292887029, |
|
"grad_norm": 1.1815370321273804, |
|
"learning_rate": 5.3380274262744447e-05, |
|
"loss": 0.8571, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.2952115295211529, |
|
"grad_norm": 1.2776923179626465, |
|
"learning_rate": 5.319384739470821e-05, |
|
"loss": 0.9234, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.297536029753603, |
|
"grad_norm": 1.3336985111236572, |
|
"learning_rate": 5.300626820221224e-05, |
|
"loss": 0.9257, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.299860529986053, |
|
"grad_norm": 1.2534116506576538, |
|
"learning_rate": 5.2817547130491154e-05, |
|
"loss": 0.8918, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.302185030218503, |
|
"grad_norm": 1.2334403991699219, |
|
"learning_rate": 5.262769468836446e-05, |
|
"loss": 0.8407, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.304509530450953, |
|
"grad_norm": 1.2081224918365479, |
|
"learning_rate": 5.2436721447651325e-05, |
|
"loss": 0.8976, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.3068340306834031, |
|
"grad_norm": 1.3717191219329834, |
|
"learning_rate": 5.224463804258194e-05, |
|
"loss": 0.9156, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.3091585309158531, |
|
"grad_norm": 1.2610055208206177, |
|
"learning_rate": 5.205145516920531e-05, |
|
"loss": 0.925, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.3114830311483031, |
|
"grad_norm": 1.2727302312850952, |
|
"learning_rate": 5.185718358479369e-05, |
|
"loss": 0.9217, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.3138075313807531, |
|
"grad_norm": 1.2891085147857666, |
|
"learning_rate": 5.166183410724353e-05, |
|
"loss": 0.9583, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.31613203161320313, |
|
"grad_norm": 1.1891586780548096, |
|
"learning_rate": 5.146541761447313e-05, |
|
"loss": 0.8329, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.3184565318456532, |
|
"grad_norm": 1.203373670578003, |
|
"learning_rate": 5.1267945043816886e-05, |
|
"loss": 0.8716, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.3207810320781032, |
|
"grad_norm": 1.2259798049926758, |
|
"learning_rate": 5.106942739141625e-05, |
|
"loss": 0.9246, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.3231055323105532, |
|
"grad_norm": 1.237679123878479, |
|
"learning_rate": 5.086987571160746e-05, |
|
"loss": 0.8769, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.32543003254300323, |
|
"grad_norm": 1.2703795433044434, |
|
"learning_rate": 5.0669301116305886e-05, |
|
"loss": 0.8838, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.3277545327754533, |
|
"grad_norm": 1.2701797485351562, |
|
"learning_rate": 5.0467714774387386e-05, |
|
"loss": 0.8438, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.3300790330079033, |
|
"grad_norm": 1.2709927558898926, |
|
"learning_rate": 5.0265127911066295e-05, |
|
"loss": 0.9252, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.3324035332403533, |
|
"grad_norm": 1.291172981262207, |
|
"learning_rate": 5.0061551807270376e-05, |
|
"loss": 0.93, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.33472803347280333, |
|
"grad_norm": 1.2257442474365234, |
|
"learning_rate": 4.9856997799012677e-05, |
|
"loss": 0.8957, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.33705253370525334, |
|
"grad_norm": 1.2257241010665894, |
|
"learning_rate": 4.965147727676027e-05, |
|
"loss": 0.9213, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.3393770339377034, |
|
"grad_norm": 1.16828191280365, |
|
"learning_rate": 4.944500168479995e-05, |
|
"loss": 0.8625, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.3417015341701534, |
|
"grad_norm": 1.2331054210662842, |
|
"learning_rate": 4.9237582520600986e-05, |
|
"loss": 0.9318, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.34402603440260343, |
|
"grad_norm": 1.2196905612945557, |
|
"learning_rate": 4.902923133417496e-05, |
|
"loss": 0.8709, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.34635053463505344, |
|
"grad_norm": 1.2139414548873901, |
|
"learning_rate": 4.8819959727432496e-05, |
|
"loss": 0.9081, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.3486750348675035, |
|
"grad_norm": 1.204951524734497, |
|
"learning_rate": 4.86097793535373e-05, |
|
"loss": 0.8331, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.3509995350999535, |
|
"grad_norm": 1.1808617115020752, |
|
"learning_rate": 4.839870191625722e-05, |
|
"loss": 0.865, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.35332403533240353, |
|
"grad_norm": 1.1742596626281738, |
|
"learning_rate": 4.818673916931252e-05, |
|
"loss": 0.8266, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.35564853556485354, |
|
"grad_norm": 1.2377365827560425, |
|
"learning_rate": 4.797390291572145e-05, |
|
"loss": 0.841, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.3579730357973036, |
|
"grad_norm": 1.2187622785568237, |
|
"learning_rate": 4.7760205007142904e-05, |
|
"loss": 0.875, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.3602975360297536, |
|
"grad_norm": 1.182664394378662, |
|
"learning_rate": 4.754565734321654e-05, |
|
"loss": 0.8742, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.36262203626220363, |
|
"grad_norm": 1.2196147441864014, |
|
"learning_rate": 4.733027187090013e-05, |
|
"loss": 0.8849, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.36494653649465364, |
|
"grad_norm": 1.2542427778244019, |
|
"learning_rate": 4.711406058380429e-05, |
|
"loss": 0.847, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.36727103672710365, |
|
"grad_norm": 1.2659164667129517, |
|
"learning_rate": 4.689703552152467e-05, |
|
"loss": 0.8557, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.3695955369595537, |
|
"grad_norm": 1.1970021724700928, |
|
"learning_rate": 4.667920876897147e-05, |
|
"loss": 0.8474, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.3719200371920037, |
|
"grad_norm": 1.2822171449661255, |
|
"learning_rate": 4.646059245569654e-05, |
|
"loss": 0.9273, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.37424453742445374, |
|
"grad_norm": 1.233355164527893, |
|
"learning_rate": 4.6241198755217925e-05, |
|
"loss": 0.8528, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.37656903765690375, |
|
"grad_norm": 1.2058496475219727, |
|
"learning_rate": 4.602103988434206e-05, |
|
"loss": 0.86, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.3788935378893538, |
|
"grad_norm": 1.3541826009750366, |
|
"learning_rate": 4.5800128102483376e-05, |
|
"loss": 0.9412, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.3812180381218038, |
|
"grad_norm": 1.1209250688552856, |
|
"learning_rate": 4.5578475710981745e-05, |
|
"loss": 0.8332, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.38354253835425384, |
|
"grad_norm": 1.2058111429214478, |
|
"learning_rate": 4.535609505241742e-05, |
|
"loss": 0.857, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.38586703858670385, |
|
"grad_norm": 1.2212828397750854, |
|
"learning_rate": 4.513299850992376e-05, |
|
"loss": 0.8301, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.38819153881915386, |
|
"grad_norm": 1.2507742643356323, |
|
"learning_rate": 4.4909198506497725e-05, |
|
"loss": 0.88, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.3905160390516039, |
|
"grad_norm": 1.1851963996887207, |
|
"learning_rate": 4.4684707504308036e-05, |
|
"loss": 0.8504, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.39284053928405394, |
|
"grad_norm": 1.1980507373809814, |
|
"learning_rate": 4.4459538004001276e-05, |
|
"loss": 0.8413, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.39516503951650395, |
|
"grad_norm": 1.210808515548706, |
|
"learning_rate": 4.42337025440058e-05, |
|
"loss": 0.8669, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.39748953974895396, |
|
"grad_norm": 1.1731034517288208, |
|
"learning_rate": 4.400721369983352e-05, |
|
"loss": 0.9147, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.399814039981404, |
|
"grad_norm": 1.2603881359100342, |
|
"learning_rate": 4.3780084083379654e-05, |
|
"loss": 0.8829, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.40213854021385403, |
|
"grad_norm": 1.2504175901412964, |
|
"learning_rate": 4.3552326342220444e-05, |
|
"loss": 0.8924, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.40446304044630405, |
|
"grad_norm": 1.1380208730697632, |
|
"learning_rate": 4.332395315890887e-05, |
|
"loss": 0.7826, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.40678754067875406, |
|
"grad_norm": 1.2040411233901978, |
|
"learning_rate": 4.309497725026844e-05, |
|
"loss": 0.8476, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.40911204091120407, |
|
"grad_norm": 1.2187401056289673, |
|
"learning_rate": 4.2865411366685064e-05, |
|
"loss": 0.8605, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.41143654114365413, |
|
"grad_norm": 1.2218343019485474, |
|
"learning_rate": 4.2635268291397054e-05, |
|
"loss": 0.8408, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.41376104137610414, |
|
"grad_norm": 1.3197338581085205, |
|
"learning_rate": 4.240456083978329e-05, |
|
"loss": 0.8658, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.41608554160855415, |
|
"grad_norm": 1.3240693807601929, |
|
"learning_rate": 4.2173301858649605e-05, |
|
"loss": 0.9434, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.41841004184100417, |
|
"grad_norm": 1.2702267169952393, |
|
"learning_rate": 4.1941504225513445e-05, |
|
"loss": 0.8643, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.42073454207345423, |
|
"grad_norm": 1.2468559741973877, |
|
"learning_rate": 4.1709180847886745e-05, |
|
"loss": 0.8797, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.42305904230590424, |
|
"grad_norm": 1.2787050008773804, |
|
"learning_rate": 4.147634466255721e-05, |
|
"loss": 0.8794, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.42538354253835425, |
|
"grad_norm": 1.222352147102356, |
|
"learning_rate": 4.124300863486793e-05, |
|
"loss": 0.8459, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.42770804277080426, |
|
"grad_norm": 1.203860878944397, |
|
"learning_rate": 4.100918575799543e-05, |
|
"loss": 0.9002, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.4300325430032543, |
|
"grad_norm": 1.1671602725982666, |
|
"learning_rate": 4.077488905222613e-05, |
|
"loss": 0.8686, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.43235704323570434, |
|
"grad_norm": 1.172710657119751, |
|
"learning_rate": 4.0540131564231346e-05, |
|
"loss": 0.8665, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.43468154346815435, |
|
"grad_norm": 1.196799635887146, |
|
"learning_rate": 4.0304926366340746e-05, |
|
"loss": 0.874, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.43700604370060436, |
|
"grad_norm": 1.3162767887115479, |
|
"learning_rate": 4.006928655581446e-05, |
|
"loss": 0.9128, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.4393305439330544, |
|
"grad_norm": 1.1845192909240723, |
|
"learning_rate": 3.983322525411379e-05, |
|
"loss": 0.8354, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.44165504416550444, |
|
"grad_norm": 1.2140437364578247, |
|
"learning_rate": 3.959675560617049e-05, |
|
"loss": 0.8716, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.44397954439795445, |
|
"grad_norm": 1.1984777450561523, |
|
"learning_rate": 3.9359890779654846e-05, |
|
"loss": 0.8063, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.44630404463040446, |
|
"grad_norm": 1.1856093406677246, |
|
"learning_rate": 3.91226439642424e-05, |
|
"loss": 0.7776, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.4486285448628545, |
|
"grad_norm": 1.2061065435409546, |
|
"learning_rate": 3.8885028370879544e-05, |
|
"loss": 0.8475, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.4509530450953045, |
|
"grad_norm": 1.2898370027542114, |
|
"learning_rate": 3.864705723104786e-05, |
|
"loss": 0.8788, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.45327754532775455, |
|
"grad_norm": 1.2373768091201782, |
|
"learning_rate": 3.840874379602728e-05, |
|
"loss": 0.8424, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.45560204556020456, |
|
"grad_norm": 1.2365310192108154, |
|
"learning_rate": 3.817010133615827e-05, |
|
"loss": 0.8429, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.45792654579265457, |
|
"grad_norm": 1.2247048616409302, |
|
"learning_rate": 3.793114314010282e-05, |
|
"loss": 0.8806, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.4602510460251046, |
|
"grad_norm": 1.2044016122817993, |
|
"learning_rate": 3.769188251410454e-05, |
|
"loss": 0.8633, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.46257554625755465, |
|
"grad_norm": 1.1678348779678345, |
|
"learning_rate": 3.7452332781247633e-05, |
|
"loss": 0.8325, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.46490004649000466, |
|
"grad_norm": 1.2637572288513184, |
|
"learning_rate": 3.7212507280715056e-05, |
|
"loss": 0.8471, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.46722454672245467, |
|
"grad_norm": 1.1990036964416504, |
|
"learning_rate": 3.6972419367045726e-05, |
|
"loss": 0.8821, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.4695490469549047, |
|
"grad_norm": 1.1562861204147339, |
|
"learning_rate": 3.673208240939088e-05, |
|
"loss": 0.8486, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.4718735471873547, |
|
"grad_norm": 1.2566421031951904, |
|
"learning_rate": 3.6491509790769616e-05, |
|
"loss": 0.9117, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.47419804741980476, |
|
"grad_norm": 1.2128387689590454, |
|
"learning_rate": 3.625071490732366e-05, |
|
"loss": 0.8905, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.47652254765225477, |
|
"grad_norm": 1.2243783473968506, |
|
"learning_rate": 3.6009711167571434e-05, |
|
"loss": 0.8362, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.4788470478847048, |
|
"grad_norm": 1.2012783288955688, |
|
"learning_rate": 3.57685119916614e-05, |
|
"loss": 0.841, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.4811715481171548, |
|
"grad_norm": 1.1792044639587402, |
|
"learning_rate": 3.552713081062478e-05, |
|
"loss": 0.8673, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.48349604834960486, |
|
"grad_norm": 1.2149136066436768, |
|
"learning_rate": 3.528558106562759e-05, |
|
"loss": 0.8776, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.48582054858205487, |
|
"grad_norm": 1.2262108325958252, |
|
"learning_rate": 3.50438762072223e-05, |
|
"loss": 0.8598, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.4881450488145049, |
|
"grad_norm": 1.1815216541290283, |
|
"learning_rate": 3.4802029694598705e-05, |
|
"loss": 0.7776, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.4904695490469549, |
|
"grad_norm": 1.1577028036117554, |
|
"learning_rate": 3.456005499483456e-05, |
|
"loss": 0.8072, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.49279404927940496, |
|
"grad_norm": 1.1453114748001099, |
|
"learning_rate": 3.4317965582145684e-05, |
|
"loss": 0.8518, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.49511854951185497, |
|
"grad_norm": 1.2426217794418335, |
|
"learning_rate": 3.4075774937135554e-05, |
|
"loss": 0.88, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.497443049744305, |
|
"grad_norm": 1.2424201965332031, |
|
"learning_rate": 3.3833496546044746e-05, |
|
"loss": 0.8669, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.499767549976755, |
|
"grad_norm": 1.2596735954284668, |
|
"learning_rate": 3.3591143899999905e-05, |
|
"loss": 0.8546, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.502092050209205, |
|
"grad_norm": 1.2579164505004883, |
|
"learning_rate": 3.334873049426253e-05, |
|
"loss": 0.8954, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.504416550441655, |
|
"grad_norm": 1.1875728368759155, |
|
"learning_rate": 3.3106269827477484e-05, |
|
"loss": 0.8319, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.506741050674105, |
|
"grad_norm": 1.241857647895813, |
|
"learning_rate": 3.286377540092133e-05, |
|
"loss": 0.9209, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.5090655509065551, |
|
"grad_norm": 1.1753859519958496, |
|
"learning_rate": 3.2621260717750515e-05, |
|
"loss": 0.8092, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.5113900511390052, |
|
"grad_norm": 1.1673084497451782, |
|
"learning_rate": 3.237873928224948e-05, |
|
"loss": 0.831, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.5137145513714552, |
|
"grad_norm": 1.2066866159439087, |
|
"learning_rate": 3.213622459907867e-05, |
|
"loss": 0.8413, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.5160390516039052, |
|
"grad_norm": 1.1958924531936646, |
|
"learning_rate": 3.189373017252252e-05, |
|
"loss": 0.8648, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.5183635518363552, |
|
"grad_norm": 1.1267814636230469, |
|
"learning_rate": 3.165126950573747e-05, |
|
"loss": 0.8551, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.5206880520688052, |
|
"grad_norm": 1.1723332405090332, |
|
"learning_rate": 3.140885610000009e-05, |
|
"loss": 0.8732, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.5230125523012552, |
|
"grad_norm": 1.2160817384719849, |
|
"learning_rate": 3.116650345395524e-05, |
|
"loss": 0.874, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.5253370525337052, |
|
"grad_norm": 1.1341142654418945, |
|
"learning_rate": 3.0924225062864434e-05, |
|
"loss": 0.8044, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.5276615527661552, |
|
"grad_norm": 1.1896854639053345, |
|
"learning_rate": 3.0682034417854324e-05, |
|
"loss": 0.8364, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.5299860529986054, |
|
"grad_norm": 1.229346513748169, |
|
"learning_rate": 3.0439945005165433e-05, |
|
"loss": 0.8821, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.5323105532310554, |
|
"grad_norm": 1.1989872455596924, |
|
"learning_rate": 3.0197970305401306e-05, |
|
"loss": 0.8566, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.5346350534635054, |
|
"grad_norm": 1.248273491859436, |
|
"learning_rate": 2.9956123792777708e-05, |
|
"loss": 0.7907, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.5369595536959554, |
|
"grad_norm": 1.235098958015442, |
|
"learning_rate": 2.9714418934372402e-05, |
|
"loss": 0.8838, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.5392840539284054, |
|
"grad_norm": 1.2413426637649536, |
|
"learning_rate": 2.9472869189375224e-05, |
|
"loss": 0.8614, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.5416085541608554, |
|
"grad_norm": 1.2023251056671143, |
|
"learning_rate": 2.9231488008338592e-05, |
|
"loss": 0.8328, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.5439330543933054, |
|
"grad_norm": 1.2137963771820068, |
|
"learning_rate": 2.899028883242856e-05, |
|
"loss": 0.8174, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.5462575546257554, |
|
"grad_norm": 1.1812171936035156, |
|
"learning_rate": 2.8749285092676343e-05, |
|
"loss": 0.8636, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.5485820548582054, |
|
"grad_norm": 1.2329221963882446, |
|
"learning_rate": 2.8508490209230388e-05, |
|
"loss": 0.8419, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.5509065550906556, |
|
"grad_norm": 1.176352620124817, |
|
"learning_rate": 2.8267917590609115e-05, |
|
"loss": 0.8174, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.5532310553231056, |
|
"grad_norm": 1.2109792232513428, |
|
"learning_rate": 2.802758063295427e-05, |
|
"loss": 0.8718, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.5555555555555556, |
|
"grad_norm": 1.155416488647461, |
|
"learning_rate": 2.7787492719284935e-05, |
|
"loss": 0.8365, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.5578800557880056, |
|
"grad_norm": 1.2347787618637085, |
|
"learning_rate": 2.7547667218752358e-05, |
|
"loss": 0.9275, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.5602045560204556, |
|
"grad_norm": 1.2063627243041992, |
|
"learning_rate": 2.7308117485895463e-05, |
|
"loss": 0.8702, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.5625290562529056, |
|
"grad_norm": 1.1815749406814575, |
|
"learning_rate": 2.7068856859897173e-05, |
|
"loss": 0.8222, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.5648535564853556, |
|
"grad_norm": 1.1446418762207031, |
|
"learning_rate": 2.6829898663841733e-05, |
|
"loss": 0.7562, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.5671780567178056, |
|
"grad_norm": 1.117566466331482, |
|
"learning_rate": 2.659125620397272e-05, |
|
"loss": 0.7828, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.5695025569502556, |
|
"grad_norm": 1.1796785593032837, |
|
"learning_rate": 2.6352942768952144e-05, |
|
"loss": 0.8555, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.5718270571827058, |
|
"grad_norm": 1.1962978839874268, |
|
"learning_rate": 2.6114971629120454e-05, |
|
"loss": 0.8359, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.5741515574151558, |
|
"grad_norm": 1.2345088720321655, |
|
"learning_rate": 2.5877356035757598e-05, |
|
"loss": 0.857, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.5764760576476058, |
|
"grad_norm": 1.1942895650863647, |
|
"learning_rate": 2.564010922034516e-05, |
|
"loss": 0.8233, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.5788005578800558, |
|
"grad_norm": 1.2165658473968506, |
|
"learning_rate": 2.5403244393829505e-05, |
|
"loss": 0.8141, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.5811250581125058, |
|
"grad_norm": 1.2151166200637817, |
|
"learning_rate": 2.5166774745886202e-05, |
|
"loss": 0.8538, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.5834495583449558, |
|
"grad_norm": 1.203872799873352, |
|
"learning_rate": 2.493071344418553e-05, |
|
"loss": 0.8177, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.5857740585774058, |
|
"grad_norm": 1.2117934226989746, |
|
"learning_rate": 2.4695073633659245e-05, |
|
"loss": 0.8277, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.5880985588098558, |
|
"grad_norm": 1.1932367086410522, |
|
"learning_rate": 2.4459868435768644e-05, |
|
"loss": 0.8342, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.5904230590423059, |
|
"grad_norm": 1.1477673053741455, |
|
"learning_rate": 2.4225110947773856e-05, |
|
"loss": 0.751, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.592747559274756, |
|
"grad_norm": 1.243074655532837, |
|
"learning_rate": 2.3990814242004573e-05, |
|
"loss": 0.8784, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.595072059507206, |
|
"grad_norm": 1.1835278272628784, |
|
"learning_rate": 2.375699136513207e-05, |
|
"loss": 0.8597, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.597396559739656, |
|
"grad_norm": 1.2624067068099976, |
|
"learning_rate": 2.3523655337442798e-05, |
|
"loss": 0.8519, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.599721059972106, |
|
"grad_norm": 1.1431169509887695, |
|
"learning_rate": 2.3290819152113256e-05, |
|
"loss": 0.8444, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.602045560204556, |
|
"grad_norm": 1.1751455068588257, |
|
"learning_rate": 2.3058495774486553e-05, |
|
"loss": 0.8608, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.604370060437006, |
|
"grad_norm": 1.1753759384155273, |
|
"learning_rate": 2.2826698141350392e-05, |
|
"loss": 0.8567, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.606694560669456, |
|
"grad_norm": 1.22267746925354, |
|
"learning_rate": 2.2595439160216713e-05, |
|
"loss": 0.8707, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.609019060901906, |
|
"grad_norm": 1.1453746557235718, |
|
"learning_rate": 2.2364731708602947e-05, |
|
"loss": 0.831, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.6113435611343561, |
|
"grad_norm": 1.1886532306671143, |
|
"learning_rate": 2.2134588633314937e-05, |
|
"loss": 0.763, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.6136680613668062, |
|
"grad_norm": 1.3295722007751465, |
|
"learning_rate": 2.1905022749731563e-05, |
|
"loss": 0.8818, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.6159925615992562, |
|
"grad_norm": 1.124739408493042, |
|
"learning_rate": 2.1676046841091133e-05, |
|
"loss": 0.7534, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.6183170618317062, |
|
"grad_norm": 1.2315418720245361, |
|
"learning_rate": 2.144767365777955e-05, |
|
"loss": 0.8528, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.6206415620641562, |
|
"grad_norm": 1.248775601387024, |
|
"learning_rate": 2.121991591662034e-05, |
|
"loss": 0.8536, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.6229660622966062, |
|
"grad_norm": 1.2271955013275146, |
|
"learning_rate": 2.0992786300166472e-05, |
|
"loss": 0.8731, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.6252905625290562, |
|
"grad_norm": 1.1660230159759521, |
|
"learning_rate": 2.0766297455994204e-05, |
|
"loss": 0.8301, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.6276150627615062, |
|
"grad_norm": 1.2100571393966675, |
|
"learning_rate": 2.054046199599873e-05, |
|
"loss": 0.867, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.6299395629939563, |
|
"grad_norm": 1.1913954019546509, |
|
"learning_rate": 2.031529249569197e-05, |
|
"loss": 0.8322, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.6322640632264063, |
|
"grad_norm": 1.1850714683532715, |
|
"learning_rate": 2.0090801493502276e-05, |
|
"loss": 0.8616, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.6345885634588564, |
|
"grad_norm": 1.152343511581421, |
|
"learning_rate": 1.986700149007624e-05, |
|
"loss": 0.8155, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.6369130636913064, |
|
"grad_norm": 1.195383906364441, |
|
"learning_rate": 1.964390494758258e-05, |
|
"loss": 0.8021, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.6392375639237564, |
|
"grad_norm": 1.4834905862808228, |
|
"learning_rate": 1.9421524289018253e-05, |
|
"loss": 0.8191, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.6415620641562064, |
|
"grad_norm": 1.250483751296997, |
|
"learning_rate": 1.919987189751662e-05, |
|
"loss": 0.8497, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.6438865643886564, |
|
"grad_norm": 1.1749329566955566, |
|
"learning_rate": 1.8978960115657944e-05, |
|
"loss": 0.837, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.6462110646211064, |
|
"grad_norm": 1.1642099618911743, |
|
"learning_rate": 1.875880124478207e-05, |
|
"loss": 0.8454, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.6485355648535565, |
|
"grad_norm": 1.2152022123336792, |
|
"learning_rate": 1.853940754430346e-05, |
|
"loss": 0.8492, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.6508600650860065, |
|
"grad_norm": 1.1679767370224, |
|
"learning_rate": 1.8320791231028526e-05, |
|
"loss": 0.8454, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.6531845653184565, |
|
"grad_norm": 1.2204087972640991, |
|
"learning_rate": 1.8102964478475316e-05, |
|
"loss": 0.9385, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.6555090655509066, |
|
"grad_norm": 1.1549757719039917, |
|
"learning_rate": 1.7885939416195687e-05, |
|
"loss": 0.7963, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.6578335657833566, |
|
"grad_norm": 1.1434992551803589, |
|
"learning_rate": 1.766972812909988e-05, |
|
"loss": 0.7884, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.6601580660158066, |
|
"grad_norm": 1.151331901550293, |
|
"learning_rate": 1.7454342656783464e-05, |
|
"loss": 0.767, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.6624825662482566, |
|
"grad_norm": 1.1396957635879517, |
|
"learning_rate": 1.7239794992857097e-05, |
|
"loss": 0.8191, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.6648070664807066, |
|
"grad_norm": 1.1968632936477661, |
|
"learning_rate": 1.7026097084278556e-05, |
|
"loss": 0.8241, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.6671315667131567, |
|
"grad_norm": 1.1648362874984741, |
|
"learning_rate": 1.6813260830687472e-05, |
|
"loss": 0.7965, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.6694560669456067, |
|
"grad_norm": 1.226285457611084, |
|
"learning_rate": 1.660129808374279e-05, |
|
"loss": 0.9029, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.6717805671780567, |
|
"grad_norm": 1.178755760192871, |
|
"learning_rate": 1.6390220646462693e-05, |
|
"loss": 0.7986, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.6741050674105067, |
|
"grad_norm": 1.1674259901046753, |
|
"learning_rate": 1.618004027256749e-05, |
|
"loss": 0.7532, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.6764295676429568, |
|
"grad_norm": 1.196860671043396, |
|
"learning_rate": 1.5970768665825036e-05, |
|
"loss": 0.8043, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.6787540678754068, |
|
"grad_norm": 1.2226951122283936, |
|
"learning_rate": 1.5762417479399005e-05, |
|
"loss": 0.847, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.6810785681078568, |
|
"grad_norm": 1.1900945901870728, |
|
"learning_rate": 1.555499831520006e-05, |
|
"loss": 0.847, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.6834030683403068, |
|
"grad_norm": 1.176953911781311, |
|
"learning_rate": 1.5348522723239726e-05, |
|
"loss": 0.8145, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.6857275685727569, |
|
"grad_norm": 1.1760255098342896, |
|
"learning_rate": 1.5143002200987304e-05, |
|
"loss": 0.792, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.6880520688052069, |
|
"grad_norm": 1.1875512599945068, |
|
"learning_rate": 1.4938448192729617e-05, |
|
"loss": 0.8698, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.6903765690376569, |
|
"grad_norm": 1.142947793006897, |
|
"learning_rate": 1.4734872088933713e-05, |
|
"loss": 0.8222, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.6927010692701069, |
|
"grad_norm": 1.2026643753051758, |
|
"learning_rate": 1.4532285225612613e-05, |
|
"loss": 0.8494, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.695025569502557, |
|
"grad_norm": 1.1509863138198853, |
|
"learning_rate": 1.4330698883694115e-05, |
|
"loss": 0.8091, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.697350069735007, |
|
"grad_norm": 1.237417459487915, |
|
"learning_rate": 1.413012428839254e-05, |
|
"loss": 0.8465, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.699674569967457, |
|
"grad_norm": 1.1145235300064087, |
|
"learning_rate": 1.3930572608583745e-05, |
|
"loss": 0.7913, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.701999070199907, |
|
"grad_norm": 1.1738826036453247, |
|
"learning_rate": 1.3732054956183111e-05, |
|
"loss": 0.8164, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.704323570432357, |
|
"grad_norm": 1.1653589010238647, |
|
"learning_rate": 1.3534582385526876e-05, |
|
"loss": 0.7946, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.7066480706648071, |
|
"grad_norm": 1.1790781021118164, |
|
"learning_rate": 1.3338165892756468e-05, |
|
"loss": 0.8287, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.7089725708972571, |
|
"grad_norm": 1.1557509899139404, |
|
"learning_rate": 1.3142816415206304e-05, |
|
"loss": 0.7584, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.7112970711297071, |
|
"grad_norm": 1.2092468738555908, |
|
"learning_rate": 1.2948544830794682e-05, |
|
"loss": 0.8641, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.7136215713621571, |
|
"grad_norm": 1.1953829526901245, |
|
"learning_rate": 1.275536195741805e-05, |
|
"loss": 0.8669, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.7159460715946072, |
|
"grad_norm": 1.1542140245437622, |
|
"learning_rate": 1.2563278552348676e-05, |
|
"loss": 0.8841, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.7182705718270572, |
|
"grad_norm": 1.2032291889190674, |
|
"learning_rate": 1.2372305311635541e-05, |
|
"loss": 0.8485, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.7205950720595072, |
|
"grad_norm": 1.1283597946166992, |
|
"learning_rate": 1.2182452869508834e-05, |
|
"loss": 0.7937, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.7229195722919572, |
|
"grad_norm": 1.133653163909912, |
|
"learning_rate": 1.1993731797787766e-05, |
|
"loss": 0.81, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.7252440725244073, |
|
"grad_norm": 1.1899718046188354, |
|
"learning_rate": 1.1806152605291783e-05, |
|
"loss": 0.8785, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.7275685727568573, |
|
"grad_norm": 1.182756781578064, |
|
"learning_rate": 1.1619725737255553e-05, |
|
"loss": 0.8157, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.7298930729893073, |
|
"grad_norm": 1.1462957859039307, |
|
"learning_rate": 1.143446157474723e-05, |
|
"loss": 0.8032, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.7322175732217573, |
|
"grad_norm": 1.1228857040405273, |
|
"learning_rate": 1.1250370434090382e-05, |
|
"loss": 0.7521, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.7345420734542073, |
|
"grad_norm": 1.1679176092147827, |
|
"learning_rate": 1.1067462566289592e-05, |
|
"loss": 0.8729, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.7368665736866574, |
|
"grad_norm": 1.1772230863571167, |
|
"learning_rate": 1.0885748156459578e-05, |
|
"loss": 0.8305, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.7391910739191074, |
|
"grad_norm": 1.1800023317337036, |
|
"learning_rate": 1.0705237323258088e-05, |
|
"loss": 0.8285, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.7415155741515574, |
|
"grad_norm": 1.1623144149780273, |
|
"learning_rate": 1.0525940118322434e-05, |
|
"loss": 0.8343, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.7438400743840075, |
|
"grad_norm": 1.2074713706970215, |
|
"learning_rate": 1.0347866525709737e-05, |
|
"loss": 0.8442, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.7461645746164575, |
|
"grad_norm": 1.1325371265411377, |
|
"learning_rate": 1.017102646134103e-05, |
|
"loss": 0.8072, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.7484890748489075, |
|
"grad_norm": 1.176443099975586, |
|
"learning_rate": 9.995429772449044e-06, |
|
"loss": 0.8585, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.7508135750813575, |
|
"grad_norm": 1.159180760383606, |
|
"learning_rate": 9.82108623702991e-06, |
|
"loss": 0.8226, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.7531380753138075, |
|
"grad_norm": 1.180493950843811, |
|
"learning_rate": 9.648005563298672e-06, |
|
"loss": 0.8364, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.7554625755462575, |
|
"grad_norm": 1.2017948627471924, |
|
"learning_rate": 9.476197389148645e-06, |
|
"loss": 0.8531, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.7577870757787076, |
|
"grad_norm": 1.156783103942871, |
|
"learning_rate": 9.305671281614786e-06, |
|
"loss": 0.8105, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.7601115760111576, |
|
"grad_norm": 1.145729660987854, |
|
"learning_rate": 9.136436736340962e-06, |
|
"loss": 0.8248, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.7624360762436077, |
|
"grad_norm": 1.2108309268951416, |
|
"learning_rate": 8.968503177051128e-06, |
|
"loss": 0.8042, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.7647605764760577, |
|
"grad_norm": 1.193647861480713, |
|
"learning_rate": 8.801879955024671e-06, |
|
"loss": 0.8598, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.7670850767085077, |
|
"grad_norm": 1.2257269620895386, |
|
"learning_rate": 8.636576348575587e-06, |
|
"loss": 0.8747, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.7694095769409577, |
|
"grad_norm": 1.2016220092773438, |
|
"learning_rate": 8.472601562535888e-06, |
|
"loss": 0.799, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.7717340771734077, |
|
"grad_norm": 1.243636131286621, |
|
"learning_rate": 8.309964727743042e-06, |
|
"loss": 0.8876, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.7740585774058577, |
|
"grad_norm": 1.2658741474151611, |
|
"learning_rate": 8.148674900531466e-06, |
|
"loss": 0.8943, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.7763830776383077, |
|
"grad_norm": 1.1937084197998047, |
|
"learning_rate": 7.988741062228303e-06, |
|
"loss": 0.8072, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.7787075778707578, |
|
"grad_norm": 1.188068151473999, |
|
"learning_rate": 7.830172118653236e-06, |
|
"loss": 0.8405, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.7810320781032078, |
|
"grad_norm": 1.1817395687103271, |
|
"learning_rate": 7.672976899622636e-06, |
|
"loss": 0.8474, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.7833565783356579, |
|
"grad_norm": 1.2230587005615234, |
|
"learning_rate": 7.517164158457843e-06, |
|
"loss": 0.8168, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.7856810785681079, |
|
"grad_norm": 1.1503418684005737, |
|
"learning_rate": 7.362742571497723e-06, |
|
"loss": 0.7769, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.7880055788005579, |
|
"grad_norm": 1.1554263830184937, |
|
"learning_rate": 7.209720737615588e-06, |
|
"loss": 0.7941, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.7903300790330079, |
|
"grad_norm": 1.167829155921936, |
|
"learning_rate": 7.058107177740316e-06, |
|
"loss": 0.861, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.7926545792654579, |
|
"grad_norm": 1.1501438617706299, |
|
"learning_rate": 6.907910334381895e-06, |
|
"loss": 0.7728, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.7949790794979079, |
|
"grad_norm": 1.2136327028274536, |
|
"learning_rate": 6.759138571161328e-06, |
|
"loss": 0.8745, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.7973035797303579, |
|
"grad_norm": 1.15041184425354, |
|
"learning_rate": 6.611800172344858e-06, |
|
"loss": 0.8245, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.799628079962808, |
|
"grad_norm": 1.2242484092712402, |
|
"learning_rate": 6.4659033423827105e-06, |
|
"loss": 0.8703, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.8019525801952581, |
|
"grad_norm": 1.2068572044372559, |
|
"learning_rate": 6.321456205452219e-06, |
|
"loss": 0.8453, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.8042770804277081, |
|
"grad_norm": 1.138300895690918, |
|
"learning_rate": 6.17846680500541e-06, |
|
"loss": 0.8205, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.8066015806601581, |
|
"grad_norm": 1.2071059942245483, |
|
"learning_rate": 6.0369431033211525e-06, |
|
"loss": 0.873, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.8089260808926081, |
|
"grad_norm": 1.2351478338241577, |
|
"learning_rate": 5.896892981061738e-06, |
|
"loss": 0.8453, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.8112505811250581, |
|
"grad_norm": 1.1644549369812012, |
|
"learning_rate": 5.758324236834091e-06, |
|
"loss": 0.8112, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.8135750813575081, |
|
"grad_norm": 1.2177077531814575, |
|
"learning_rate": 5.621244586755496e-06, |
|
"loss": 0.8216, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.8158995815899581, |
|
"grad_norm": 1.202233076095581, |
|
"learning_rate": 5.485661664023893e-06, |
|
"loss": 0.8821, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.8182240818224081, |
|
"grad_norm": 1.173756718635559, |
|
"learning_rate": 5.351583018492902e-06, |
|
"loss": 0.8337, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.8205485820548583, |
|
"grad_norm": 1.1963239908218384, |
|
"learning_rate": 5.219016116251332e-06, |
|
"loss": 0.8751, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.8228730822873083, |
|
"grad_norm": 1.1608357429504395, |
|
"learning_rate": 5.087968339207486e-06, |
|
"loss": 0.8938, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.8251975825197583, |
|
"grad_norm": 1.1277998685836792, |
|
"learning_rate": 4.958446984678114e-06, |
|
"loss": 0.7774, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.8275220827522083, |
|
"grad_norm": 1.2397881746292114, |
|
"learning_rate": 4.830459264982011e-06, |
|
"loss": 0.8269, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.8298465829846583, |
|
"grad_norm": 1.2230621576309204, |
|
"learning_rate": 4.704012307038478e-06, |
|
"loss": 0.8511, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.8321710832171083, |
|
"grad_norm": 1.1905261278152466, |
|
"learning_rate": 4.579113151970381e-06, |
|
"loss": 0.8424, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.8344955834495583, |
|
"grad_norm": 1.2389678955078125, |
|
"learning_rate": 4.455768754712141e-06, |
|
"loss": 0.8978, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.8368200836820083, |
|
"grad_norm": 1.2028136253356934, |
|
"learning_rate": 4.3339859836224235e-06, |
|
"loss": 0.8501, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.8391445839144583, |
|
"grad_norm": 1.1460210084915161, |
|
"learning_rate": 4.213771620101661e-06, |
|
"loss": 0.7621, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.8414690841469085, |
|
"grad_norm": 1.1692181825637817, |
|
"learning_rate": 4.095132358214476e-06, |
|
"loss": 0.8351, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.8437935843793585, |
|
"grad_norm": 1.1694318056106567, |
|
"learning_rate": 3.97807480431688e-06, |
|
"loss": 0.7781, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.8461180846118085, |
|
"grad_norm": 1.1537806987762451, |
|
"learning_rate": 3.862605476688435e-06, |
|
"loss": 0.7383, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.8484425848442585, |
|
"grad_norm": 1.1844308376312256, |
|
"learning_rate": 3.748730805169292e-06, |
|
"loss": 0.8434, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.8507670850767085, |
|
"grad_norm": 1.2213958501815796, |
|
"learning_rate": 3.6364571308021075e-06, |
|
"loss": 0.9058, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.8530915853091585, |
|
"grad_norm": 1.2063052654266357, |
|
"learning_rate": 3.5257907054789965e-06, |
|
"loss": 0.864, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.8554160855416085, |
|
"grad_norm": 1.2098087072372437, |
|
"learning_rate": 3.416737691593364e-06, |
|
"loss": 0.8397, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.8577405857740585, |
|
"grad_norm": 1.197662591934204, |
|
"learning_rate": 3.309304161696762e-06, |
|
"loss": 0.8456, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.8600650860065086, |
|
"grad_norm": 1.167975902557373, |
|
"learning_rate": 3.203496098160773e-06, |
|
"loss": 0.8325, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.8623895862389587, |
|
"grad_norm": 1.1775121688842773, |
|
"learning_rate": 3.0993193928438364e-06, |
|
"loss": 0.8519, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.8647140864714087, |
|
"grad_norm": 1.1968019008636475, |
|
"learning_rate": 2.996779846763207e-06, |
|
"loss": 0.8299, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.8670385867038587, |
|
"grad_norm": 1.1625730991363525, |
|
"learning_rate": 2.8958831697719116e-06, |
|
"loss": 0.7785, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.8693630869363087, |
|
"grad_norm": 1.2199307680130005, |
|
"learning_rate": 2.796634980240773e-06, |
|
"loss": 0.8107, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.8716875871687587, |
|
"grad_norm": 1.2188974618911743, |
|
"learning_rate": 2.6990408047456208e-06, |
|
"loss": 0.8457, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.8740120874012087, |
|
"grad_norm": 1.194596767425537, |
|
"learning_rate": 2.603106077759467e-06, |
|
"loss": 0.8295, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.8763365876336587, |
|
"grad_norm": 1.0834101438522339, |
|
"learning_rate": 2.508836141349955e-06, |
|
"loss": 0.7247, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.8786610878661087, |
|
"grad_norm": 1.1972309350967407, |
|
"learning_rate": 2.4162362448818634e-06, |
|
"loss": 0.9107, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.8809855880985588, |
|
"grad_norm": 1.1724853515625, |
|
"learning_rate": 2.3253115447247866e-06, |
|
"loss": 0.8098, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.8833100883310089, |
|
"grad_norm": 1.1860358715057373, |
|
"learning_rate": 2.2360671039660346e-06, |
|
"loss": 0.8301, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.8856345885634589, |
|
"grad_norm": 1.2200249433517456, |
|
"learning_rate": 2.148507892128674e-06, |
|
"loss": 0.8012, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.8879590887959089, |
|
"grad_norm": 1.118643045425415, |
|
"learning_rate": 2.062638784894803e-06, |
|
"loss": 0.7925, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.8902835890283589, |
|
"grad_norm": 1.1284154653549194, |
|
"learning_rate": 1.9784645638340827e-06, |
|
"loss": 0.825, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.8926080892608089, |
|
"grad_norm": 1.2449028491973877, |
|
"learning_rate": 1.8959899161374317e-06, |
|
"loss": 0.8187, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.8949325894932589, |
|
"grad_norm": 1.1662657260894775, |
|
"learning_rate": 1.8152194343560626e-06, |
|
"loss": 0.8131, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.897257089725709, |
|
"grad_norm": 1.2282719612121582, |
|
"learning_rate": 1.736157616145729e-06, |
|
"loss": 0.8428, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.899581589958159, |
|
"grad_norm": 1.112457275390625, |
|
"learning_rate": 1.6588088640162775e-06, |
|
"loss": 0.767, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.901906090190609, |
|
"grad_norm": 1.1861826181411743, |
|
"learning_rate": 1.5831774850865059e-06, |
|
"loss": 0.8122, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.9042305904230591, |
|
"grad_norm": 1.1628504991531372, |
|
"learning_rate": 1.5092676908443047e-06, |
|
"loss": 0.7754, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.9065550906555091, |
|
"grad_norm": 1.1673952341079712, |
|
"learning_rate": 1.4370835969121649e-06, |
|
"loss": 0.8309, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.9088795908879591, |
|
"grad_norm": 1.203006386756897, |
|
"learning_rate": 1.366629222817995e-06, |
|
"loss": 0.7955, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.9112040911204091, |
|
"grad_norm": 1.1598377227783203, |
|
"learning_rate": 1.2979084917712842e-06, |
|
"loss": 0.8337, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.9135285913528591, |
|
"grad_norm": 1.1832410097122192, |
|
"learning_rate": 1.2309252304446607e-06, |
|
"loss": 0.8024, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.9158530915853091, |
|
"grad_norm": 1.1913425922393799, |
|
"learning_rate": 1.1656831687607826e-06, |
|
"loss": 0.8127, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.9181775918177592, |
|
"grad_norm": 1.1691555976867676, |
|
"learning_rate": 1.1021859396846632e-06, |
|
"loss": 0.7681, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.9205020920502092, |
|
"grad_norm": 1.2271596193313599, |
|
"learning_rate": 1.0404370790213507e-06, |
|
"loss": 0.8653, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.9228265922826592, |
|
"grad_norm": 1.1940650939941406, |
|
"learning_rate": 9.804400252190588e-07, |
|
"loss": 0.8275, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.9251510925151093, |
|
"grad_norm": 1.322935938835144, |
|
"learning_rate": 9.221981191776801e-07, |
|
"loss": 0.9482, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.9274755927475593, |
|
"grad_norm": 1.226189374923706, |
|
"learning_rate": 8.657146040627655e-07, |
|
"loss": 0.8803, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.9298000929800093, |
|
"grad_norm": 1.2069227695465088, |
|
"learning_rate": 8.109926251249163e-07, |
|
"loss": 0.8547, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.9321245932124593, |
|
"grad_norm": 1.2206929922103882, |
|
"learning_rate": 7.58035229524658e-07, |
|
"loss": 0.8214, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.9344490934449093, |
|
"grad_norm": 1.2493535280227661, |
|
"learning_rate": 7.068453661627424e-07, |
|
"loss": 0.8802, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.9367735936773594, |
|
"grad_norm": 1.1659430265426636, |
|
"learning_rate": 6.574258855159656e-07, |
|
"loss": 0.803, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.9390980939098094, |
|
"grad_norm": 1.2101560831069946, |
|
"learning_rate": 6.097795394784138e-07, |
|
"loss": 0.8255, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.9414225941422594, |
|
"grad_norm": 1.1926182508468628, |
|
"learning_rate": 5.639089812082365e-07, |
|
"loss": 0.7878, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.9437470943747094, |
|
"grad_norm": 1.1893055438995361, |
|
"learning_rate": 5.198167649799214e-07, |
|
"loss": 0.8213, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.9460715946071595, |
|
"grad_norm": 1.2222219705581665, |
|
"learning_rate": 4.775053460420305e-07, |
|
"loss": 0.8765, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.9483960948396095, |
|
"grad_norm": 1.1839882135391235, |
|
"learning_rate": 4.369770804805123e-07, |
|
"loss": 0.7754, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.9507205950720595, |
|
"grad_norm": 1.1785272359848022, |
|
"learning_rate": 3.98234225087489e-07, |
|
"loss": 0.8291, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.9530450953045095, |
|
"grad_norm": 1.1746994256973267, |
|
"learning_rate": 3.6127893723558233e-07, |
|
"loss": 0.8198, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.9553695955369595, |
|
"grad_norm": 1.218136191368103, |
|
"learning_rate": 3.2611327475779545e-07, |
|
"loss": 0.8316, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.9576940957694096, |
|
"grad_norm": 1.2541922330856323, |
|
"learning_rate": 2.927391958329195e-07, |
|
"loss": 0.8747, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.9600185960018596, |
|
"grad_norm": 1.1385949850082397, |
|
"learning_rate": 2.6115855887648195e-07, |
|
"loss": 0.7893, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.9623430962343096, |
|
"grad_norm": 1.142892837524414, |
|
"learning_rate": 2.3137312243728834e-07, |
|
"loss": 0.7838, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.9646675964667596, |
|
"grad_norm": 1.2635773420333862, |
|
"learning_rate": 2.033845450994659e-07, |
|
"loss": 0.8358, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.9669920966992097, |
|
"grad_norm": 1.141421914100647, |
|
"learning_rate": 1.771943853901292e-07, |
|
"loss": 0.8355, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.9693165969316597, |
|
"grad_norm": 1.1934841871261597, |
|
"learning_rate": 1.5280410169258794e-07, |
|
"loss": 0.8171, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.9716410971641097, |
|
"grad_norm": 1.1529548168182373, |
|
"learning_rate": 1.3021505216514017e-07, |
|
"loss": 0.7629, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.9739655973965597, |
|
"grad_norm": 1.207676649093628, |
|
"learning_rate": 1.0942849466543308e-07, |
|
"loss": 0.8672, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.9762900976290098, |
|
"grad_norm": 1.264219045639038, |
|
"learning_rate": 9.044558668043461e-08, |
|
"loss": 0.9103, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.9786145978614598, |
|
"grad_norm": 1.2143738269805908, |
|
"learning_rate": 7.32673852619689e-08, |
|
"loss": 0.8604, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.9809390980939098, |
|
"grad_norm": 1.172698974609375, |
|
"learning_rate": 5.7894846967866115e-08, |
|
"loss": 0.823, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.9832635983263598, |
|
"grad_norm": 1.194075584411621, |
|
"learning_rate": 4.432882780868341e-08, |
|
"loss": 0.8139, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.9855880985588099, |
|
"grad_norm": 1.191976547241211, |
|
"learning_rate": 3.257008320005111e-08, |
|
"loss": 0.8117, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.9879125987912599, |
|
"grad_norm": 1.202752709388733, |
|
"learning_rate": 2.261926792058999e-08, |
|
"loss": 0.8479, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.9902370990237099, |
|
"grad_norm": 1.2659246921539307, |
|
"learning_rate": 1.4476936075475397e-08, |
|
"loss": 0.8472, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.9925615992561599, |
|
"grad_norm": 1.1615605354309082, |
|
"learning_rate": 8.143541065568914e-09, |
|
"loss": 0.778, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.99488609948861, |
|
"grad_norm": 1.23662531375885, |
|
"learning_rate": 3.619435562157158e-09, |
|
"loss": 0.8868, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.99721059972106, |
|
"grad_norm": 1.181993007659912, |
|
"learning_rate": 9.048714873446938e-10, |
|
"loss": 0.8409, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.99953509995351, |
|
"grad_norm": 1.1682720184326172, |
|
"learning_rate": 0.0, |
|
"loss": 0.8589, |
|
"step": 430 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 430, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.6061243889342874e+17, |
|
"train_batch_size": 6, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|