|
{
|
|
"best_metric": null,
|
|
"best_model_checkpoint": null,
|
|
"epoch": 129.03225806451613,
|
|
"eval_steps": 500,
|
|
"global_step": 18000,
|
|
"is_hyper_param_search": false,
|
|
"is_local_process_zero": true,
|
|
"is_world_process_zero": true,
|
|
"log_history": [
|
|
{
|
|
"epoch": 0.14336917562724014,
|
|
"grad_norm": 0.7797384262084961,
|
|
"learning_rate": 4e-05,
|
|
"loss": 2.2646,
|
|
"step": 20
|
|
},
|
|
{
|
|
"epoch": 0.2867383512544803,
|
|
"grad_norm": 0.8766279220581055,
|
|
"learning_rate": 8e-05,
|
|
"loss": 2.2106,
|
|
"step": 40
|
|
},
|
|
{
|
|
"epoch": 0.43010752688172044,
|
|
"grad_norm": 0.9374061822891235,
|
|
"learning_rate": 0.00012,
|
|
"loss": 1.9441,
|
|
"step": 60
|
|
},
|
|
{
|
|
"epoch": 0.5734767025089605,
|
|
"grad_norm": 0.9437981247901917,
|
|
"learning_rate": 0.00016,
|
|
"loss": 1.8573,
|
|
"step": 80
|
|
},
|
|
{
|
|
"epoch": 0.7168458781362007,
|
|
"grad_norm": 1.16326904296875,
|
|
"learning_rate": 0.0002,
|
|
"loss": 1.7739,
|
|
"step": 100
|
|
},
|
|
{
|
|
"epoch": 0.8602150537634409,
|
|
"grad_norm": 1.1639043092727661,
|
|
"learning_rate": 0.00019988455988455988,
|
|
"loss": 1.6996,
|
|
"step": 120
|
|
},
|
|
{
|
|
"epoch": 1.003584229390681,
|
|
"grad_norm": 1.3743420839309692,
|
|
"learning_rate": 0.00019976911976911978,
|
|
"loss": 1.6048,
|
|
"step": 140
|
|
},
|
|
{
|
|
"epoch": 1.146953405017921,
|
|
"grad_norm": 1.3700010776519775,
|
|
"learning_rate": 0.00019965367965367966,
|
|
"loss": 1.5738,
|
|
"step": 160
|
|
},
|
|
{
|
|
"epoch": 1.2903225806451613,
|
|
"grad_norm": 1.8980423212051392,
|
|
"learning_rate": 0.00019953823953823956,
|
|
"loss": 1.4982,
|
|
"step": 180
|
|
},
|
|
{
|
|
"epoch": 1.4336917562724014,
|
|
"grad_norm": 1.8157719373703003,
|
|
"learning_rate": 0.00019942279942279943,
|
|
"loss": 1.5173,
|
|
"step": 200
|
|
},
|
|
{
|
|
"epoch": 1.5770609318996416,
|
|
"grad_norm": 2.1088435649871826,
|
|
"learning_rate": 0.0001993073593073593,
|
|
"loss": 1.4963,
|
|
"step": 220
|
|
},
|
|
{
|
|
"epoch": 1.7204301075268817,
|
|
"grad_norm": 2.1570374965667725,
|
|
"learning_rate": 0.0001991919191919192,
|
|
"loss": 1.4934,
|
|
"step": 240
|
|
},
|
|
{
|
|
"epoch": 1.863799283154122,
|
|
"grad_norm": 2.2034499645233154,
|
|
"learning_rate": 0.00019907647907647908,
|
|
"loss": 1.429,
|
|
"step": 260
|
|
},
|
|
{
|
|
"epoch": 2.007168458781362,
|
|
"grad_norm": 1.7870151996612549,
|
|
"learning_rate": 0.00019896103896103898,
|
|
"loss": 1.4812,
|
|
"step": 280
|
|
},
|
|
{
|
|
"epoch": 2.150537634408602,
|
|
"grad_norm": 2.705448627471924,
|
|
"learning_rate": 0.00019884559884559885,
|
|
"loss": 1.3495,
|
|
"step": 300
|
|
},
|
|
{
|
|
"epoch": 2.293906810035842,
|
|
"grad_norm": 2.151088237762451,
|
|
"learning_rate": 0.00019873015873015875,
|
|
"loss": 1.3401,
|
|
"step": 320
|
|
},
|
|
{
|
|
"epoch": 2.4372759856630823,
|
|
"grad_norm": 2.273388385772705,
|
|
"learning_rate": 0.00019861471861471863,
|
|
"loss": 1.261,
|
|
"step": 340
|
|
},
|
|
{
|
|
"epoch": 2.5806451612903225,
|
|
"grad_norm": 3.103841543197632,
|
|
"learning_rate": 0.00019849927849927853,
|
|
"loss": 1.2768,
|
|
"step": 360
|
|
},
|
|
{
|
|
"epoch": 2.7240143369175627,
|
|
"grad_norm": 2.259409189224243,
|
|
"learning_rate": 0.00019838383838383837,
|
|
"loss": 1.2932,
|
|
"step": 380
|
|
},
|
|
{
|
|
"epoch": 2.867383512544803,
|
|
"grad_norm": 3.061185359954834,
|
|
"learning_rate": 0.00019826839826839827,
|
|
"loss": 1.2528,
|
|
"step": 400
|
|
},
|
|
{
|
|
"epoch": 3.010752688172043,
|
|
"grad_norm": 2.311734199523926,
|
|
"learning_rate": 0.00019815295815295815,
|
|
"loss": 1.2575,
|
|
"step": 420
|
|
},
|
|
{
|
|
"epoch": 3.154121863799283,
|
|
"grad_norm": 3.4611690044403076,
|
|
"learning_rate": 0.00019803751803751805,
|
|
"loss": 1.1404,
|
|
"step": 440
|
|
},
|
|
{
|
|
"epoch": 3.2974910394265233,
|
|
"grad_norm": 3.1576485633850098,
|
|
"learning_rate": 0.00019792207792207792,
|
|
"loss": 1.1195,
|
|
"step": 460
|
|
},
|
|
{
|
|
"epoch": 3.4408602150537635,
|
|
"grad_norm": 2.9822025299072266,
|
|
"learning_rate": 0.00019780663780663782,
|
|
"loss": 1.1651,
|
|
"step": 480
|
|
},
|
|
{
|
|
"epoch": 3.5842293906810037,
|
|
"grad_norm": 2.700014591217041,
|
|
"learning_rate": 0.0001976911976911977,
|
|
"loss": 1.1024,
|
|
"step": 500
|
|
},
|
|
{
|
|
"epoch": 3.727598566308244,
|
|
"grad_norm": 3.5420925617218018,
|
|
"learning_rate": 0.0001975757575757576,
|
|
"loss": 1.1593,
|
|
"step": 520
|
|
},
|
|
{
|
|
"epoch": 3.870967741935484,
|
|
"grad_norm": 3.261657238006592,
|
|
"learning_rate": 0.00019746031746031747,
|
|
"loss": 1.1687,
|
|
"step": 540
|
|
},
|
|
{
|
|
"epoch": 4.014336917562724,
|
|
"grad_norm": 2.6333651542663574,
|
|
"learning_rate": 0.00019734487734487734,
|
|
"loss": 1.0741,
|
|
"step": 560
|
|
},
|
|
{
|
|
"epoch": 4.157706093189964,
|
|
"grad_norm": 3.958665609359741,
|
|
"learning_rate": 0.00019722943722943722,
|
|
"loss": 0.9966,
|
|
"step": 580
|
|
},
|
|
{
|
|
"epoch": 4.301075268817204,
|
|
"grad_norm": 3.4680724143981934,
|
|
"learning_rate": 0.00019711399711399712,
|
|
"loss": 0.9532,
|
|
"step": 600
|
|
},
|
|
{
|
|
"epoch": 4.444444444444445,
|
|
"grad_norm": 3.7936246395111084,
|
|
"learning_rate": 0.000196998556998557,
|
|
"loss": 1.0428,
|
|
"step": 620
|
|
},
|
|
{
|
|
"epoch": 4.587813620071684,
|
|
"grad_norm": 3.4765727519989014,
|
|
"learning_rate": 0.0001968831168831169,
|
|
"loss": 1.0067,
|
|
"step": 640
|
|
},
|
|
{
|
|
"epoch": 4.731182795698925,
|
|
"grad_norm": 4.042802810668945,
|
|
"learning_rate": 0.00019676767676767677,
|
|
"loss": 1.0318,
|
|
"step": 660
|
|
},
|
|
{
|
|
"epoch": 4.874551971326165,
|
|
"grad_norm": 3.597938060760498,
|
|
"learning_rate": 0.00019665223665223667,
|
|
"loss": 0.9431,
|
|
"step": 680
|
|
},
|
|
{
|
|
"epoch": 5.017921146953405,
|
|
"grad_norm": 3.7627108097076416,
|
|
"learning_rate": 0.00019653679653679654,
|
|
"loss": 1.0065,
|
|
"step": 700
|
|
},
|
|
{
|
|
"epoch": 5.161290322580645,
|
|
"grad_norm": 3.363626003265381,
|
|
"learning_rate": 0.00019642135642135644,
|
|
"loss": 0.8925,
|
|
"step": 720
|
|
},
|
|
{
|
|
"epoch": 5.304659498207886,
|
|
"grad_norm": 3.777928352355957,
|
|
"learning_rate": 0.00019630591630591631,
|
|
"loss": 0.8798,
|
|
"step": 740
|
|
},
|
|
{
|
|
"epoch": 5.448028673835125,
|
|
"grad_norm": 3.517575740814209,
|
|
"learning_rate": 0.0001961904761904762,
|
|
"loss": 0.8555,
|
|
"step": 760
|
|
},
|
|
{
|
|
"epoch": 5.591397849462366,
|
|
"grad_norm": 3.2290761470794678,
|
|
"learning_rate": 0.0001960750360750361,
|
|
"loss": 0.8826,
|
|
"step": 780
|
|
},
|
|
{
|
|
"epoch": 5.734767025089606,
|
|
"grad_norm": 3.73039174079895,
|
|
"learning_rate": 0.00019595959595959596,
|
|
"loss": 0.9197,
|
|
"step": 800
|
|
},
|
|
{
|
|
"epoch": 5.878136200716845,
|
|
"grad_norm": 3.3132483959198,
|
|
"learning_rate": 0.00019584415584415586,
|
|
"loss": 0.9388,
|
|
"step": 820
|
|
},
|
|
{
|
|
"epoch": 6.021505376344086,
|
|
"grad_norm": 3.7642362117767334,
|
|
"learning_rate": 0.00019572871572871574,
|
|
"loss": 0.8208,
|
|
"step": 840
|
|
},
|
|
{
|
|
"epoch": 6.164874551971327,
|
|
"grad_norm": 4.448329448699951,
|
|
"learning_rate": 0.00019561327561327564,
|
|
"loss": 0.7378,
|
|
"step": 860
|
|
},
|
|
{
|
|
"epoch": 6.308243727598566,
|
|
"grad_norm": 3.517230987548828,
|
|
"learning_rate": 0.0001954978354978355,
|
|
"loss": 0.7511,
|
|
"step": 880
|
|
},
|
|
{
|
|
"epoch": 6.451612903225806,
|
|
"grad_norm": 4.085216999053955,
|
|
"learning_rate": 0.0001953823953823954,
|
|
"loss": 0.7659,
|
|
"step": 900
|
|
},
|
|
{
|
|
"epoch": 6.594982078853047,
|
|
"grad_norm": 4.3206634521484375,
|
|
"learning_rate": 0.00019526695526695528,
|
|
"loss": 0.8262,
|
|
"step": 920
|
|
},
|
|
{
|
|
"epoch": 6.738351254480286,
|
|
"grad_norm": 3.6010711193084717,
|
|
"learning_rate": 0.00019515151515151516,
|
|
"loss": 0.8121,
|
|
"step": 940
|
|
},
|
|
{
|
|
"epoch": 6.881720430107527,
|
|
"grad_norm": 4.308924198150635,
|
|
"learning_rate": 0.00019503607503607503,
|
|
"loss": 0.83,
|
|
"step": 960
|
|
},
|
|
{
|
|
"epoch": 7.025089605734767,
|
|
"grad_norm": 3.5463600158691406,
|
|
"learning_rate": 0.00019492063492063493,
|
|
"loss": 0.7746,
|
|
"step": 980
|
|
},
|
|
{
|
|
"epoch": 7.168458781362007,
|
|
"grad_norm": 3.1309635639190674,
|
|
"learning_rate": 0.0001948051948051948,
|
|
"loss": 0.6994,
|
|
"step": 1000
|
|
},
|
|
{
|
|
"epoch": 7.311827956989247,
|
|
"grad_norm": 4.161702632904053,
|
|
"learning_rate": 0.0001946897546897547,
|
|
"loss": 0.6635,
|
|
"step": 1020
|
|
},
|
|
{
|
|
"epoch": 7.455197132616488,
|
|
"grad_norm": 3.9364986419677734,
|
|
"learning_rate": 0.00019457431457431458,
|
|
"loss": 0.7223,
|
|
"step": 1040
|
|
},
|
|
{
|
|
"epoch": 7.598566308243727,
|
|
"grad_norm": 3.674201250076294,
|
|
"learning_rate": 0.00019445887445887448,
|
|
"loss": 0.737,
|
|
"step": 1060
|
|
},
|
|
{
|
|
"epoch": 7.741935483870968,
|
|
"grad_norm": 5.211554050445557,
|
|
"learning_rate": 0.00019434343434343435,
|
|
"loss": 0.6624,
|
|
"step": 1080
|
|
},
|
|
{
|
|
"epoch": 7.885304659498208,
|
|
"grad_norm": 3.496736764907837,
|
|
"learning_rate": 0.00019422799422799425,
|
|
"loss": 0.6788,
|
|
"step": 1100
|
|
},
|
|
{
|
|
"epoch": 8.028673835125447,
|
|
"grad_norm": 4.182429790496826,
|
|
"learning_rate": 0.0001941125541125541,
|
|
"loss": 0.6958,
|
|
"step": 1120
|
|
},
|
|
{
|
|
"epoch": 8.172043010752688,
|
|
"grad_norm": 3.506474018096924,
|
|
"learning_rate": 0.000193997113997114,
|
|
"loss": 0.5827,
|
|
"step": 1140
|
|
},
|
|
{
|
|
"epoch": 8.315412186379929,
|
|
"grad_norm": 4.012503623962402,
|
|
"learning_rate": 0.00019388167388167387,
|
|
"loss": 0.5956,
|
|
"step": 1160
|
|
},
|
|
{
|
|
"epoch": 8.45878136200717,
|
|
"grad_norm": 4.3530497550964355,
|
|
"learning_rate": 0.00019376623376623377,
|
|
"loss": 0.6149,
|
|
"step": 1180
|
|
},
|
|
{
|
|
"epoch": 8.602150537634408,
|
|
"grad_norm": 3.8034679889678955,
|
|
"learning_rate": 0.00019365079365079365,
|
|
"loss": 0.6482,
|
|
"step": 1200
|
|
},
|
|
{
|
|
"epoch": 8.745519713261649,
|
|
"grad_norm": 4.107527256011963,
|
|
"learning_rate": 0.00019353535353535355,
|
|
"loss": 0.601,
|
|
"step": 1220
|
|
},
|
|
{
|
|
"epoch": 8.88888888888889,
|
|
"grad_norm": 3.9704201221466064,
|
|
"learning_rate": 0.00019341991341991342,
|
|
"loss": 0.6424,
|
|
"step": 1240
|
|
},
|
|
{
|
|
"epoch": 9.03225806451613,
|
|
"grad_norm": 5.5366315841674805,
|
|
"learning_rate": 0.00019330447330447332,
|
|
"loss": 0.6291,
|
|
"step": 1260
|
|
},
|
|
{
|
|
"epoch": 9.175627240143369,
|
|
"grad_norm": 4.817638397216797,
|
|
"learning_rate": 0.0001931890331890332,
|
|
"loss": 0.5392,
|
|
"step": 1280
|
|
},
|
|
{
|
|
"epoch": 9.31899641577061,
|
|
"grad_norm": 4.172895431518555,
|
|
"learning_rate": 0.00019307359307359307,
|
|
"loss": 0.5134,
|
|
"step": 1300
|
|
},
|
|
{
|
|
"epoch": 9.46236559139785,
|
|
"grad_norm": 4.357290267944336,
|
|
"learning_rate": 0.00019295815295815297,
|
|
"loss": 0.5292,
|
|
"step": 1320
|
|
},
|
|
{
|
|
"epoch": 9.60573476702509,
|
|
"grad_norm": 4.0718512535095215,
|
|
"learning_rate": 0.00019284271284271284,
|
|
"loss": 0.5353,
|
|
"step": 1340
|
|
},
|
|
{
|
|
"epoch": 9.74910394265233,
|
|
"grad_norm": 4.535477638244629,
|
|
"learning_rate": 0.00019272727272727274,
|
|
"loss": 0.5357,
|
|
"step": 1360
|
|
},
|
|
{
|
|
"epoch": 9.89247311827957,
|
|
"grad_norm": 5.310904026031494,
|
|
"learning_rate": 0.00019261183261183262,
|
|
"loss": 0.5955,
|
|
"step": 1380
|
|
},
|
|
{
|
|
"epoch": 10.03584229390681,
|
|
"grad_norm": 4.394368648529053,
|
|
"learning_rate": 0.00019249639249639252,
|
|
"loss": 0.5394,
|
|
"step": 1400
|
|
},
|
|
{
|
|
"epoch": 10.17921146953405,
|
|
"grad_norm": 4.731152057647705,
|
|
"learning_rate": 0.0001923809523809524,
|
|
"loss": 0.4465,
|
|
"step": 1420
|
|
},
|
|
{
|
|
"epoch": 10.32258064516129,
|
|
"grad_norm": 4.1888041496276855,
|
|
"learning_rate": 0.0001922655122655123,
|
|
"loss": 0.4734,
|
|
"step": 1440
|
|
},
|
|
{
|
|
"epoch": 10.46594982078853,
|
|
"grad_norm": 4.412322044372559,
|
|
"learning_rate": 0.00019215007215007217,
|
|
"loss": 0.4749,
|
|
"step": 1460
|
|
},
|
|
{
|
|
"epoch": 10.609318996415771,
|
|
"grad_norm": 4.3348612785339355,
|
|
"learning_rate": 0.00019203463203463204,
|
|
"loss": 0.4751,
|
|
"step": 1480
|
|
},
|
|
{
|
|
"epoch": 10.75268817204301,
|
|
"grad_norm": 4.405585765838623,
|
|
"learning_rate": 0.00019191919191919191,
|
|
"loss": 0.5176,
|
|
"step": 1500
|
|
},
|
|
{
|
|
"epoch": 10.89605734767025,
|
|
"grad_norm": 3.9980640411376953,
|
|
"learning_rate": 0.00019180375180375181,
|
|
"loss": 0.5078,
|
|
"step": 1520
|
|
},
|
|
{
|
|
"epoch": 11.039426523297491,
|
|
"grad_norm": 4.7061028480529785,
|
|
"learning_rate": 0.0001916883116883117,
|
|
"loss": 0.4775,
|
|
"step": 1540
|
|
},
|
|
{
|
|
"epoch": 11.182795698924732,
|
|
"grad_norm": 5.084146022796631,
|
|
"learning_rate": 0.0001915728715728716,
|
|
"loss": 0.3826,
|
|
"step": 1560
|
|
},
|
|
{
|
|
"epoch": 11.32616487455197,
|
|
"grad_norm": 3.631321907043457,
|
|
"learning_rate": 0.00019145743145743146,
|
|
"loss": 0.4259,
|
|
"step": 1580
|
|
},
|
|
{
|
|
"epoch": 11.469534050179211,
|
|
"grad_norm": 5.307252407073975,
|
|
"learning_rate": 0.00019134199134199136,
|
|
"loss": 0.409,
|
|
"step": 1600
|
|
},
|
|
{
|
|
"epoch": 11.612903225806452,
|
|
"grad_norm": 4.273385047912598,
|
|
"learning_rate": 0.00019122655122655124,
|
|
"loss": 0.4716,
|
|
"step": 1620
|
|
},
|
|
{
|
|
"epoch": 11.756272401433693,
|
|
"grad_norm": 3.9993953704833984,
|
|
"learning_rate": 0.00019111111111111114,
|
|
"loss": 0.4482,
|
|
"step": 1640
|
|
},
|
|
{
|
|
"epoch": 11.899641577060931,
|
|
"grad_norm": 4.456310749053955,
|
|
"learning_rate": 0.00019099567099567098,
|
|
"loss": 0.4285,
|
|
"step": 1660
|
|
},
|
|
{
|
|
"epoch": 12.043010752688172,
|
|
"grad_norm": 3.430715322494507,
|
|
"learning_rate": 0.00019088023088023088,
|
|
"loss": 0.4146,
|
|
"step": 1680
|
|
},
|
|
{
|
|
"epoch": 12.186379928315413,
|
|
"grad_norm": 4.638120174407959,
|
|
"learning_rate": 0.00019076479076479076,
|
|
"loss": 0.3632,
|
|
"step": 1700
|
|
},
|
|
{
|
|
"epoch": 12.329749103942653,
|
|
"grad_norm": 4.701437950134277,
|
|
"learning_rate": 0.00019064935064935066,
|
|
"loss": 0.3433,
|
|
"step": 1720
|
|
},
|
|
{
|
|
"epoch": 12.473118279569892,
|
|
"grad_norm": 4.129784107208252,
|
|
"learning_rate": 0.00019053391053391053,
|
|
"loss": 0.3695,
|
|
"step": 1740
|
|
},
|
|
{
|
|
"epoch": 12.616487455197133,
|
|
"grad_norm": 4.153416633605957,
|
|
"learning_rate": 0.00019041847041847043,
|
|
"loss": 0.3807,
|
|
"step": 1760
|
|
},
|
|
{
|
|
"epoch": 12.759856630824373,
|
|
"grad_norm": 4.782583713531494,
|
|
"learning_rate": 0.0001903030303030303,
|
|
"loss": 0.4431,
|
|
"step": 1780
|
|
},
|
|
{
|
|
"epoch": 12.903225806451612,
|
|
"grad_norm": 3.8527839183807373,
|
|
"learning_rate": 0.0001901875901875902,
|
|
"loss": 0.3609,
|
|
"step": 1800
|
|
},
|
|
{
|
|
"epoch": 13.046594982078853,
|
|
"grad_norm": 3.863771438598633,
|
|
"learning_rate": 0.00019007215007215008,
|
|
"loss": 0.3711,
|
|
"step": 1820
|
|
},
|
|
{
|
|
"epoch": 13.189964157706093,
|
|
"grad_norm": 4.283368110656738,
|
|
"learning_rate": 0.00018995670995670995,
|
|
"loss": 0.3066,
|
|
"step": 1840
|
|
},
|
|
{
|
|
"epoch": 13.333333333333334,
|
|
"grad_norm": 4.724956512451172,
|
|
"learning_rate": 0.00018984126984126985,
|
|
"loss": 0.3394,
|
|
"step": 1860
|
|
},
|
|
{
|
|
"epoch": 13.476702508960573,
|
|
"grad_norm": 4.810844421386719,
|
|
"learning_rate": 0.00018972582972582973,
|
|
"loss": 0.31,
|
|
"step": 1880
|
|
},
|
|
{
|
|
"epoch": 13.620071684587813,
|
|
"grad_norm": 4.2984466552734375,
|
|
"learning_rate": 0.00018961038961038963,
|
|
"loss": 0.3296,
|
|
"step": 1900
|
|
},
|
|
{
|
|
"epoch": 13.763440860215054,
|
|
"grad_norm": 4.326981067657471,
|
|
"learning_rate": 0.0001894949494949495,
|
|
"loss": 0.3428,
|
|
"step": 1920
|
|
},
|
|
{
|
|
"epoch": 13.906810035842295,
|
|
"grad_norm": 3.757113218307495,
|
|
"learning_rate": 0.0001893795093795094,
|
|
"loss": 0.3637,
|
|
"step": 1940
|
|
},
|
|
{
|
|
"epoch": 14.050179211469533,
|
|
"grad_norm": 4.3763861656188965,
|
|
"learning_rate": 0.00018926406926406928,
|
|
"loss": 0.3518,
|
|
"step": 1960
|
|
},
|
|
{
|
|
"epoch": 14.193548387096774,
|
|
"grad_norm": 4.564425468444824,
|
|
"learning_rate": 0.00018914862914862918,
|
|
"loss": 0.2795,
|
|
"step": 1980
|
|
},
|
|
{
|
|
"epoch": 14.336917562724015,
|
|
"grad_norm": 4.2235283851623535,
|
|
"learning_rate": 0.00018903318903318905,
|
|
"loss": 0.2836,
|
|
"step": 2000
|
|
},
|
|
{
|
|
"epoch": 14.480286738351255,
|
|
"grad_norm": 5.298072338104248,
|
|
"learning_rate": 0.00018891774891774892,
|
|
"loss": 0.2968,
|
|
"step": 2020
|
|
},
|
|
{
|
|
"epoch": 14.623655913978494,
|
|
"grad_norm": 4.486312389373779,
|
|
"learning_rate": 0.0001888023088023088,
|
|
"loss": 0.2891,
|
|
"step": 2040
|
|
},
|
|
{
|
|
"epoch": 14.767025089605735,
|
|
"grad_norm": 9.727706909179688,
|
|
"learning_rate": 0.0001886868686868687,
|
|
"loss": 0.3302,
|
|
"step": 2060
|
|
},
|
|
{
|
|
"epoch": 14.910394265232975,
|
|
"grad_norm": 4.170314788818359,
|
|
"learning_rate": 0.00018857142857142857,
|
|
"loss": 0.3139,
|
|
"step": 2080
|
|
},
|
|
{
|
|
"epoch": 15.053763440860216,
|
|
"grad_norm": 4.840059757232666,
|
|
"learning_rate": 0.00018845598845598847,
|
|
"loss": 0.2774,
|
|
"step": 2100
|
|
},
|
|
{
|
|
"epoch": 15.197132616487455,
|
|
"grad_norm": 4.770188808441162,
|
|
"learning_rate": 0.00018834054834054834,
|
|
"loss": 0.2681,
|
|
"step": 2120
|
|
},
|
|
{
|
|
"epoch": 15.340501792114695,
|
|
"grad_norm": 4.672384262084961,
|
|
"learning_rate": 0.00018822510822510825,
|
|
"loss": 0.2499,
|
|
"step": 2140
|
|
},
|
|
{
|
|
"epoch": 15.483870967741936,
|
|
"grad_norm": 4.933983325958252,
|
|
"learning_rate": 0.00018810966810966812,
|
|
"loss": 0.2398,
|
|
"step": 2160
|
|
},
|
|
{
|
|
"epoch": 15.627240143369175,
|
|
"grad_norm": 3.96673583984375,
|
|
"learning_rate": 0.00018799422799422802,
|
|
"loss": 0.2917,
|
|
"step": 2180
|
|
},
|
|
{
|
|
"epoch": 15.770609318996415,
|
|
"grad_norm": 4.215606212615967,
|
|
"learning_rate": 0.0001878787878787879,
|
|
"loss": 0.272,
|
|
"step": 2200
|
|
},
|
|
{
|
|
"epoch": 15.913978494623656,
|
|
"grad_norm": 4.834962844848633,
|
|
"learning_rate": 0.00018776334776334777,
|
|
"loss": 0.2717,
|
|
"step": 2220
|
|
},
|
|
{
|
|
"epoch": 16.057347670250895,
|
|
"grad_norm": 3.783409595489502,
|
|
"learning_rate": 0.00018764790764790764,
|
|
"loss": 0.2594,
|
|
"step": 2240
|
|
},
|
|
{
|
|
"epoch": 16.200716845878137,
|
|
"grad_norm": 3.0773093700408936,
|
|
"learning_rate": 0.00018753246753246754,
|
|
"loss": 0.2007,
|
|
"step": 2260
|
|
},
|
|
{
|
|
"epoch": 16.344086021505376,
|
|
"grad_norm": 3.63726806640625,
|
|
"learning_rate": 0.00018741702741702741,
|
|
"loss": 0.2213,
|
|
"step": 2280
|
|
},
|
|
{
|
|
"epoch": 16.487455197132615,
|
|
"grad_norm": 2.6716272830963135,
|
|
"learning_rate": 0.00018730158730158731,
|
|
"loss": 0.2239,
|
|
"step": 2300
|
|
},
|
|
{
|
|
"epoch": 16.630824372759857,
|
|
"grad_norm": 3.748936414718628,
|
|
"learning_rate": 0.0001871861471861472,
|
|
"loss": 0.2455,
|
|
"step": 2320
|
|
},
|
|
{
|
|
"epoch": 16.774193548387096,
|
|
"grad_norm": 4.2871785163879395,
|
|
"learning_rate": 0.0001870707070707071,
|
|
"loss": 0.2833,
|
|
"step": 2340
|
|
},
|
|
{
|
|
"epoch": 16.91756272401434,
|
|
"grad_norm": 4.30325984954834,
|
|
"learning_rate": 0.00018695526695526696,
|
|
"loss": 0.2538,
|
|
"step": 2360
|
|
},
|
|
{
|
|
"epoch": 17.060931899641577,
|
|
"grad_norm": 3.659284830093384,
|
|
"learning_rate": 0.00018683982683982686,
|
|
"loss": 0.2033,
|
|
"step": 2380
|
|
},
|
|
{
|
|
"epoch": 17.204301075268816,
|
|
"grad_norm": 3.3626983165740967,
|
|
"learning_rate": 0.00018672438672438674,
|
|
"loss": 0.1804,
|
|
"step": 2400
|
|
},
|
|
{
|
|
"epoch": 17.34767025089606,
|
|
"grad_norm": 4.500549793243408,
|
|
"learning_rate": 0.0001866089466089466,
|
|
"loss": 0.2,
|
|
"step": 2420
|
|
},
|
|
{
|
|
"epoch": 17.491039426523297,
|
|
"grad_norm": 3.9919979572296143,
|
|
"learning_rate": 0.0001864935064935065,
|
|
"loss": 0.2469,
|
|
"step": 2440
|
|
},
|
|
{
|
|
"epoch": 17.634408602150536,
|
|
"grad_norm": 3.7051095962524414,
|
|
"learning_rate": 0.00018637806637806638,
|
|
"loss": 0.1967,
|
|
"step": 2460
|
|
},
|
|
{
|
|
"epoch": 17.77777777777778,
|
|
"grad_norm": 5.194777011871338,
|
|
"learning_rate": 0.00018626262626262628,
|
|
"loss": 0.2353,
|
|
"step": 2480
|
|
},
|
|
{
|
|
"epoch": 17.921146953405017,
|
|
"grad_norm": 4.292696952819824,
|
|
"learning_rate": 0.00018614718614718616,
|
|
"loss": 0.2291,
|
|
"step": 2500
|
|
},
|
|
{
|
|
"epoch": 18.06451612903226,
|
|
"grad_norm": 5.281682968139648,
|
|
"learning_rate": 0.00018603174603174606,
|
|
"loss": 0.2102,
|
|
"step": 2520
|
|
},
|
|
{
|
|
"epoch": 18.2078853046595,
|
|
"grad_norm": 4.829092502593994,
|
|
"learning_rate": 0.00018591630591630593,
|
|
"loss": 0.1822,
|
|
"step": 2540
|
|
},
|
|
{
|
|
"epoch": 18.351254480286737,
|
|
"grad_norm": 4.637266635894775,
|
|
"learning_rate": 0.00018580086580086583,
|
|
"loss": 0.1774,
|
|
"step": 2560
|
|
},
|
|
{
|
|
"epoch": 18.49462365591398,
|
|
"grad_norm": 4.328069686889648,
|
|
"learning_rate": 0.00018568542568542568,
|
|
"loss": 0.1867,
|
|
"step": 2580
|
|
},
|
|
{
|
|
"epoch": 18.63799283154122,
|
|
"grad_norm": 3.5773091316223145,
|
|
"learning_rate": 0.00018556998556998558,
|
|
"loss": 0.1957,
|
|
"step": 2600
|
|
},
|
|
{
|
|
"epoch": 18.781362007168457,
|
|
"grad_norm": 3.868898391723633,
|
|
"learning_rate": 0.00018545454545454545,
|
|
"loss": 0.2005,
|
|
"step": 2620
|
|
},
|
|
{
|
|
"epoch": 18.9247311827957,
|
|
"grad_norm": 3.5077199935913086,
|
|
"learning_rate": 0.00018533910533910535,
|
|
"loss": 0.2185,
|
|
"step": 2640
|
|
},
|
|
{
|
|
"epoch": 19.06810035842294,
|
|
"grad_norm": 4.56404972076416,
|
|
"learning_rate": 0.00018522366522366523,
|
|
"loss": 0.1939,
|
|
"step": 2660
|
|
},
|
|
{
|
|
"epoch": 19.211469534050178,
|
|
"grad_norm": 3.897749662399292,
|
|
"learning_rate": 0.00018510822510822513,
|
|
"loss": 0.1669,
|
|
"step": 2680
|
|
},
|
|
{
|
|
"epoch": 19.35483870967742,
|
|
"grad_norm": 5.491626262664795,
|
|
"learning_rate": 0.000184992784992785,
|
|
"loss": 0.1685,
|
|
"step": 2700
|
|
},
|
|
{
|
|
"epoch": 19.49820788530466,
|
|
"grad_norm": 4.0989813804626465,
|
|
"learning_rate": 0.0001848773448773449,
|
|
"loss": 0.1582,
|
|
"step": 2720
|
|
},
|
|
{
|
|
"epoch": 19.6415770609319,
|
|
"grad_norm": 3.87003755569458,
|
|
"learning_rate": 0.00018476190476190478,
|
|
"loss": 0.1644,
|
|
"step": 2740
|
|
},
|
|
{
|
|
"epoch": 19.78494623655914,
|
|
"grad_norm": 4.180914402008057,
|
|
"learning_rate": 0.00018464646464646465,
|
|
"loss": 0.1731,
|
|
"step": 2760
|
|
},
|
|
{
|
|
"epoch": 19.92831541218638,
|
|
"grad_norm": 4.430409908294678,
|
|
"learning_rate": 0.00018453102453102452,
|
|
"loss": 0.191,
|
|
"step": 2780
|
|
},
|
|
{
|
|
"epoch": 20.07168458781362,
|
|
"grad_norm": 4.21690559387207,
|
|
"learning_rate": 0.00018441558441558442,
|
|
"loss": 0.1707,
|
|
"step": 2800
|
|
},
|
|
{
|
|
"epoch": 20.21505376344086,
|
|
"grad_norm": 3.604231595993042,
|
|
"learning_rate": 0.0001843001443001443,
|
|
"loss": 0.1358,
|
|
"step": 2820
|
|
},
|
|
{
|
|
"epoch": 20.3584229390681,
|
|
"grad_norm": 3.138463020324707,
|
|
"learning_rate": 0.0001841847041847042,
|
|
"loss": 0.1588,
|
|
"step": 2840
|
|
},
|
|
{
|
|
"epoch": 20.50179211469534,
|
|
"grad_norm": 4.0394978523254395,
|
|
"learning_rate": 0.00018406926406926407,
|
|
"loss": 0.1727,
|
|
"step": 2860
|
|
},
|
|
{
|
|
"epoch": 20.64516129032258,
|
|
"grad_norm": 3.6658740043640137,
|
|
"learning_rate": 0.00018395382395382397,
|
|
"loss": 0.1759,
|
|
"step": 2880
|
|
},
|
|
{
|
|
"epoch": 20.788530465949822,
|
|
"grad_norm": 3.8693015575408936,
|
|
"learning_rate": 0.00018383838383838384,
|
|
"loss": 0.1486,
|
|
"step": 2900
|
|
},
|
|
{
|
|
"epoch": 20.93189964157706,
|
|
"grad_norm": 4.257044315338135,
|
|
"learning_rate": 0.00018372294372294375,
|
|
"loss": 0.1338,
|
|
"step": 2920
|
|
},
|
|
{
|
|
"epoch": 21.0752688172043,
|
|
"grad_norm": 2.618070125579834,
|
|
"learning_rate": 0.00018360750360750362,
|
|
"loss": 0.1527,
|
|
"step": 2940
|
|
},
|
|
{
|
|
"epoch": 21.218637992831543,
|
|
"grad_norm": 4.323324203491211,
|
|
"learning_rate": 0.0001834920634920635,
|
|
"loss": 0.1519,
|
|
"step": 2960
|
|
},
|
|
{
|
|
"epoch": 21.36200716845878,
|
|
"grad_norm": 3.53774356842041,
|
|
"learning_rate": 0.0001833766233766234,
|
|
"loss": 0.1175,
|
|
"step": 2980
|
|
},
|
|
{
|
|
"epoch": 21.50537634408602,
|
|
"grad_norm": 5.719931125640869,
|
|
"learning_rate": 0.00018326118326118327,
|
|
"loss": 0.1323,
|
|
"step": 3000
|
|
},
|
|
{
|
|
"epoch": 21.648745519713263,
|
|
"grad_norm": 4.327314376831055,
|
|
"learning_rate": 0.00018314574314574317,
|
|
"loss": 0.1344,
|
|
"step": 3020
|
|
},
|
|
{
|
|
"epoch": 21.7921146953405,
|
|
"grad_norm": 4.0956501960754395,
|
|
"learning_rate": 0.00018303030303030304,
|
|
"loss": 0.1339,
|
|
"step": 3040
|
|
},
|
|
{
|
|
"epoch": 21.93548387096774,
|
|
"grad_norm": 4.5580830574035645,
|
|
"learning_rate": 0.00018291486291486294,
|
|
"loss": 0.1707,
|
|
"step": 3060
|
|
},
|
|
{
|
|
"epoch": 22.078853046594983,
|
|
"grad_norm": 2.9596970081329346,
|
|
"learning_rate": 0.00018279942279942281,
|
|
"loss": 0.137,
|
|
"step": 3080
|
|
},
|
|
{
|
|
"epoch": 22.22222222222222,
|
|
"grad_norm": 3.2811834812164307,
|
|
"learning_rate": 0.00018268398268398272,
|
|
"loss": 0.1299,
|
|
"step": 3100
|
|
},
|
|
{
|
|
"epoch": 22.365591397849464,
|
|
"grad_norm": 2.986440420150757,
|
|
"learning_rate": 0.00018256854256854256,
|
|
"loss": 0.1318,
|
|
"step": 3120
|
|
},
|
|
{
|
|
"epoch": 22.508960573476703,
|
|
"grad_norm": 3.6425325870513916,
|
|
"learning_rate": 0.00018245310245310246,
|
|
"loss": 0.1234,
|
|
"step": 3140
|
|
},
|
|
{
|
|
"epoch": 22.65232974910394,
|
|
"grad_norm": 4.4187846183776855,
|
|
"learning_rate": 0.00018233766233766234,
|
|
"loss": 0.1333,
|
|
"step": 3160
|
|
},
|
|
{
|
|
"epoch": 22.795698924731184,
|
|
"grad_norm": 4.019857406616211,
|
|
"learning_rate": 0.00018222222222222224,
|
|
"loss": 0.126,
|
|
"step": 3180
|
|
},
|
|
{
|
|
"epoch": 22.939068100358423,
|
|
"grad_norm": 3.8513457775115967,
|
|
"learning_rate": 0.0001821067821067821,
|
|
"loss": 0.1346,
|
|
"step": 3200
|
|
},
|
|
{
|
|
"epoch": 23.08243727598566,
|
|
"grad_norm": 2.8631300926208496,
|
|
"learning_rate": 0.000181991341991342,
|
|
"loss": 0.1151,
|
|
"step": 3220
|
|
},
|
|
{
|
|
"epoch": 23.225806451612904,
|
|
"grad_norm": 2.056180477142334,
|
|
"learning_rate": 0.00018187590187590188,
|
|
"loss": 0.093,
|
|
"step": 3240
|
|
},
|
|
{
|
|
"epoch": 23.369175627240143,
|
|
"grad_norm": 4.408998966217041,
|
|
"learning_rate": 0.00018176046176046178,
|
|
"loss": 0.1404,
|
|
"step": 3260
|
|
},
|
|
{
|
|
"epoch": 23.512544802867385,
|
|
"grad_norm": 4.436832904815674,
|
|
"learning_rate": 0.00018164502164502166,
|
|
"loss": 0.1199,
|
|
"step": 3280
|
|
},
|
|
{
|
|
"epoch": 23.655913978494624,
|
|
"grad_norm": 3.343065023422241,
|
|
"learning_rate": 0.00018152958152958153,
|
|
"loss": 0.1341,
|
|
"step": 3300
|
|
},
|
|
{
|
|
"epoch": 23.799283154121863,
|
|
"grad_norm": 3.611084461212158,
|
|
"learning_rate": 0.0001814141414141414,
|
|
"loss": 0.1149,
|
|
"step": 3320
|
|
},
|
|
{
|
|
"epoch": 23.942652329749105,
|
|
"grad_norm": 5.188615322113037,
|
|
"learning_rate": 0.0001812987012987013,
|
|
"loss": 0.1342,
|
|
"step": 3340
|
|
},
|
|
{
|
|
"epoch": 24.086021505376344,
|
|
"grad_norm": 3.88657546043396,
|
|
"learning_rate": 0.00018118326118326118,
|
|
"loss": 0.1084,
|
|
"step": 3360
|
|
},
|
|
{
|
|
"epoch": 24.229390681003583,
|
|
"grad_norm": 2.9031708240509033,
|
|
"learning_rate": 0.00018106782106782108,
|
|
"loss": 0.1054,
|
|
"step": 3380
|
|
},
|
|
{
|
|
"epoch": 24.372759856630825,
|
|
"grad_norm": 3.283027172088623,
|
|
"learning_rate": 0.00018095238095238095,
|
|
"loss": 0.1049,
|
|
"step": 3400
|
|
},
|
|
{
|
|
"epoch": 24.516129032258064,
|
|
"grad_norm": 3.191681385040283,
|
|
"learning_rate": 0.00018083694083694085,
|
|
"loss": 0.114,
|
|
"step": 3420
|
|
},
|
|
{
|
|
"epoch": 24.659498207885306,
|
|
"grad_norm": 2.726076602935791,
|
|
"learning_rate": 0.00018072150072150073,
|
|
"loss": 0.1119,
|
|
"step": 3440
|
|
},
|
|
{
|
|
"epoch": 24.802867383512545,
|
|
"grad_norm": 3.3401198387145996,
|
|
"learning_rate": 0.00018060606060606063,
|
|
"loss": 0.1098,
|
|
"step": 3460
|
|
},
|
|
{
|
|
"epoch": 24.946236559139784,
|
|
"grad_norm": 3.8354945182800293,
|
|
"learning_rate": 0.0001804906204906205,
|
|
"loss": 0.1212,
|
|
"step": 3480
|
|
},
|
|
{
|
|
"epoch": 25.089605734767026,
|
|
"grad_norm": 3.6096622943878174,
|
|
"learning_rate": 0.00018037518037518038,
|
|
"loss": 0.0942,
|
|
"step": 3500
|
|
},
|
|
{
|
|
"epoch": 25.232974910394265,
|
|
"grad_norm": 2.9408082962036133,
|
|
"learning_rate": 0.00018025974025974025,
|
|
"loss": 0.0873,
|
|
"step": 3520
|
|
},
|
|
{
|
|
"epoch": 25.376344086021504,
|
|
"grad_norm": 2.9690041542053223,
|
|
"learning_rate": 0.00018014430014430015,
|
|
"loss": 0.0999,
|
|
"step": 3540
|
|
},
|
|
{
|
|
"epoch": 25.519713261648747,
|
|
"grad_norm": 2.772616147994995,
|
|
"learning_rate": 0.00018002886002886002,
|
|
"loss": 0.1108,
|
|
"step": 3560
|
|
},
|
|
{
|
|
"epoch": 25.663082437275985,
|
|
"grad_norm": 2.993304491043091,
|
|
"learning_rate": 0.00017991341991341992,
|
|
"loss": 0.1065,
|
|
"step": 3580
|
|
},
|
|
{
|
|
"epoch": 25.806451612903224,
|
|
"grad_norm": 3.176945209503174,
|
|
"learning_rate": 0.0001797979797979798,
|
|
"loss": 0.1009,
|
|
"step": 3600
|
|
},
|
|
{
|
|
"epoch": 25.949820788530467,
|
|
"grad_norm": 3.697052001953125,
|
|
"learning_rate": 0.0001796825396825397,
|
|
"loss": 0.135,
|
|
"step": 3620
|
|
},
|
|
{
|
|
"epoch": 26.093189964157705,
|
|
"grad_norm": 2.9682657718658447,
|
|
"learning_rate": 0.00017956709956709957,
|
|
"loss": 0.0872,
|
|
"step": 3640
|
|
},
|
|
{
|
|
"epoch": 26.236559139784948,
|
|
"grad_norm": 2.0252315998077393,
|
|
"learning_rate": 0.00017945165945165947,
|
|
"loss": 0.0871,
|
|
"step": 3660
|
|
},
|
|
{
|
|
"epoch": 26.379928315412187,
|
|
"grad_norm": 2.12572979927063,
|
|
"learning_rate": 0.00017933621933621934,
|
|
"loss": 0.0881,
|
|
"step": 3680
|
|
},
|
|
{
|
|
"epoch": 26.523297491039425,
|
|
"grad_norm": 3.1188809871673584,
|
|
"learning_rate": 0.00017922077922077922,
|
|
"loss": 0.0943,
|
|
"step": 3700
|
|
},
|
|
{
|
|
"epoch": 26.666666666666668,
|
|
"grad_norm": 3.05329966545105,
|
|
"learning_rate": 0.00017910533910533912,
|
|
"loss": 0.1057,
|
|
"step": 3720
|
|
},
|
|
{
|
|
"epoch": 26.810035842293907,
|
|
"grad_norm": 3.4610469341278076,
|
|
"learning_rate": 0.000178989898989899,
|
|
"loss": 0.0933,
|
|
"step": 3740
|
|
},
|
|
{
|
|
"epoch": 26.953405017921146,
|
|
"grad_norm": 4.512834072113037,
|
|
"learning_rate": 0.0001788744588744589,
|
|
"loss": 0.1219,
|
|
"step": 3760
|
|
},
|
|
{
|
|
"epoch": 27.096774193548388,
|
|
"grad_norm": 6.541393756866455,
|
|
"learning_rate": 0.00017875901875901877,
|
|
"loss": 0.0882,
|
|
"step": 3780
|
|
},
|
|
{
|
|
"epoch": 27.240143369175627,
|
|
"grad_norm": 3.331212043762207,
|
|
"learning_rate": 0.00017864357864357867,
|
|
"loss": 0.081,
|
|
"step": 3800
|
|
},
|
|
{
|
|
"epoch": 27.38351254480287,
|
|
"grad_norm": 2.9885828495025635,
|
|
"learning_rate": 0.00017852813852813854,
|
|
"loss": 0.0919,
|
|
"step": 3820
|
|
},
|
|
{
|
|
"epoch": 27.526881720430108,
|
|
"grad_norm": 3.2344679832458496,
|
|
"learning_rate": 0.00017841269841269844,
|
|
"loss": 0.0774,
|
|
"step": 3840
|
|
},
|
|
{
|
|
"epoch": 27.670250896057347,
|
|
"grad_norm": 4.0804290771484375,
|
|
"learning_rate": 0.0001782972582972583,
|
|
"loss": 0.1038,
|
|
"step": 3860
|
|
},
|
|
{
|
|
"epoch": 27.81362007168459,
|
|
"grad_norm": 4.064199924468994,
|
|
"learning_rate": 0.0001781818181818182,
|
|
"loss": 0.0962,
|
|
"step": 3880
|
|
},
|
|
{
|
|
"epoch": 27.956989247311828,
|
|
"grad_norm": 3.9135751724243164,
|
|
"learning_rate": 0.00017806637806637806,
|
|
"loss": 0.0937,
|
|
"step": 3900
|
|
},
|
|
{
|
|
"epoch": 28.100358422939067,
|
|
"grad_norm": 2.8442928791046143,
|
|
"learning_rate": 0.00017795093795093796,
|
|
"loss": 0.0873,
|
|
"step": 3920
|
|
},
|
|
{
|
|
"epoch": 28.24372759856631,
|
|
"grad_norm": 2.8450095653533936,
|
|
"learning_rate": 0.00017783549783549784,
|
|
"loss": 0.0865,
|
|
"step": 3940
|
|
},
|
|
{
|
|
"epoch": 28.387096774193548,
|
|
"grad_norm": 5.168785572052002,
|
|
"learning_rate": 0.00017772005772005774,
|
|
"loss": 0.0819,
|
|
"step": 3960
|
|
},
|
|
{
|
|
"epoch": 28.530465949820787,
|
|
"grad_norm": 2.829857587814331,
|
|
"learning_rate": 0.0001776046176046176,
|
|
"loss": 0.0777,
|
|
"step": 3980
|
|
},
|
|
{
|
|
"epoch": 28.67383512544803,
|
|
"grad_norm": 3.645796060562134,
|
|
"learning_rate": 0.0001774891774891775,
|
|
"loss": 0.0784,
|
|
"step": 4000
|
|
},
|
|
{
|
|
"epoch": 28.817204301075268,
|
|
"grad_norm": 2.782454252243042,
|
|
"learning_rate": 0.00017737373737373738,
|
|
"loss": 0.0907,
|
|
"step": 4020
|
|
},
|
|
{
|
|
"epoch": 28.96057347670251,
|
|
"grad_norm": 3.160844564437866,
|
|
"learning_rate": 0.00017725829725829726,
|
|
"loss": 0.0893,
|
|
"step": 4040
|
|
},
|
|
{
|
|
"epoch": 29.10394265232975,
|
|
"grad_norm": 2.9098074436187744,
|
|
"learning_rate": 0.00017714285714285713,
|
|
"loss": 0.0696,
|
|
"step": 4060
|
|
},
|
|
{
|
|
"epoch": 29.247311827956988,
|
|
"grad_norm": 2.2926838397979736,
|
|
"learning_rate": 0.00017702741702741703,
|
|
"loss": 0.0762,
|
|
"step": 4080
|
|
},
|
|
{
|
|
"epoch": 29.39068100358423,
|
|
"grad_norm": 3.952270030975342,
|
|
"learning_rate": 0.0001769119769119769,
|
|
"loss": 0.0754,
|
|
"step": 4100
|
|
},
|
|
{
|
|
"epoch": 29.53405017921147,
|
|
"grad_norm": 2.679051160812378,
|
|
"learning_rate": 0.0001767965367965368,
|
|
"loss": 0.0753,
|
|
"step": 4120
|
|
},
|
|
{
|
|
"epoch": 29.677419354838708,
|
|
"grad_norm": 3.0351693630218506,
|
|
"learning_rate": 0.00017668109668109668,
|
|
"loss": 0.0889,
|
|
"step": 4140
|
|
},
|
|
{
|
|
"epoch": 29.82078853046595,
|
|
"grad_norm": 2.2097012996673584,
|
|
"learning_rate": 0.00017656565656565658,
|
|
"loss": 0.0882,
|
|
"step": 4160
|
|
},
|
|
{
|
|
"epoch": 29.96415770609319,
|
|
"grad_norm": 3.2388463020324707,
|
|
"learning_rate": 0.00017645021645021645,
|
|
"loss": 0.0828,
|
|
"step": 4180
|
|
},
|
|
{
|
|
"epoch": 30.107526881720432,
|
|
"grad_norm": 2.688284158706665,
|
|
"learning_rate": 0.00017633477633477635,
|
|
"loss": 0.0639,
|
|
"step": 4200
|
|
},
|
|
{
|
|
"epoch": 30.25089605734767,
|
|
"grad_norm": 3.341397523880005,
|
|
"learning_rate": 0.00017621933621933623,
|
|
"loss": 0.0651,
|
|
"step": 4220
|
|
},
|
|
{
|
|
"epoch": 30.39426523297491,
|
|
"grad_norm": 3.0759265422821045,
|
|
"learning_rate": 0.0001761038961038961,
|
|
"loss": 0.0728,
|
|
"step": 4240
|
|
},
|
|
{
|
|
"epoch": 30.537634408602152,
|
|
"grad_norm": 2.7288103103637695,
|
|
"learning_rate": 0.000175988455988456,
|
|
"loss": 0.0719,
|
|
"step": 4260
|
|
},
|
|
{
|
|
"epoch": 30.68100358422939,
|
|
"grad_norm": 3.3915891647338867,
|
|
"learning_rate": 0.00017587301587301588,
|
|
"loss": 0.0758,
|
|
"step": 4280
|
|
},
|
|
{
|
|
"epoch": 30.82437275985663,
|
|
"grad_norm": 3.2848050594329834,
|
|
"learning_rate": 0.00017575757575757578,
|
|
"loss": 0.0842,
|
|
"step": 4300
|
|
},
|
|
{
|
|
"epoch": 30.967741935483872,
|
|
"grad_norm": 3.1872403621673584,
|
|
"learning_rate": 0.00017564213564213565,
|
|
"loss": 0.0895,
|
|
"step": 4320
|
|
},
|
|
{
|
|
"epoch": 31.11111111111111,
|
|
"grad_norm": 2.675920248031616,
|
|
"learning_rate": 0.00017552669552669555,
|
|
"loss": 0.067,
|
|
"step": 4340
|
|
},
|
|
{
|
|
"epoch": 31.25448028673835,
|
|
"grad_norm": 3.840548276901245,
|
|
"learning_rate": 0.00017541125541125542,
|
|
"loss": 0.0614,
|
|
"step": 4360
|
|
},
|
|
{
|
|
"epoch": 31.397849462365592,
|
|
"grad_norm": 3.0870628356933594,
|
|
"learning_rate": 0.00017529581529581532,
|
|
"loss": 0.0726,
|
|
"step": 4380
|
|
},
|
|
{
|
|
"epoch": 31.54121863799283,
|
|
"grad_norm": 3.956441640853882,
|
|
"learning_rate": 0.00017518037518037517,
|
|
"loss": 0.0673,
|
|
"step": 4400
|
|
},
|
|
{
|
|
"epoch": 31.684587813620073,
|
|
"grad_norm": 2.798611640930176,
|
|
"learning_rate": 0.00017506493506493507,
|
|
"loss": 0.0755,
|
|
"step": 4420
|
|
},
|
|
{
|
|
"epoch": 31.827956989247312,
|
|
"grad_norm": 3.071885585784912,
|
|
"learning_rate": 0.00017494949494949494,
|
|
"loss": 0.0697,
|
|
"step": 4440
|
|
},
|
|
{
|
|
"epoch": 31.97132616487455,
|
|
"grad_norm": 4.090572834014893,
|
|
"learning_rate": 0.00017483405483405485,
|
|
"loss": 0.0833,
|
|
"step": 4460
|
|
},
|
|
{
|
|
"epoch": 32.11469534050179,
|
|
"grad_norm": 3.011218309402466,
|
|
"learning_rate": 0.00017471861471861472,
|
|
"loss": 0.0625,
|
|
"step": 4480
|
|
},
|
|
{
|
|
"epoch": 32.25806451612903,
|
|
"grad_norm": 2.3517768383026123,
|
|
"learning_rate": 0.00017460317460317462,
|
|
"loss": 0.0658,
|
|
"step": 4500
|
|
},
|
|
{
|
|
"epoch": 32.401433691756274,
|
|
"grad_norm": 2.255727767944336,
|
|
"learning_rate": 0.0001744877344877345,
|
|
"loss": 0.0595,
|
|
"step": 4520
|
|
},
|
|
{
|
|
"epoch": 32.54480286738351,
|
|
"grad_norm": 3.6209774017333984,
|
|
"learning_rate": 0.0001743722943722944,
|
|
"loss": 0.0696,
|
|
"step": 4540
|
|
},
|
|
{
|
|
"epoch": 32.68817204301075,
|
|
"grad_norm": 2.5406808853149414,
|
|
"learning_rate": 0.00017425685425685427,
|
|
"loss": 0.0689,
|
|
"step": 4560
|
|
},
|
|
{
|
|
"epoch": 32.831541218637994,
|
|
"grad_norm": 3.2799172401428223,
|
|
"learning_rate": 0.00017414141414141414,
|
|
"loss": 0.0737,
|
|
"step": 4580
|
|
},
|
|
{
|
|
"epoch": 32.97491039426523,
|
|
"grad_norm": 2.7221455574035645,
|
|
"learning_rate": 0.00017402597402597401,
|
|
"loss": 0.0786,
|
|
"step": 4600
|
|
},
|
|
{
|
|
"epoch": 33.11827956989247,
|
|
"grad_norm": 1.5500837564468384,
|
|
"learning_rate": 0.00017391053391053391,
|
|
"loss": 0.0558,
|
|
"step": 4620
|
|
},
|
|
{
|
|
"epoch": 33.261648745519715,
|
|
"grad_norm": 2.5242912769317627,
|
|
"learning_rate": 0.0001737950937950938,
|
|
"loss": 0.0578,
|
|
"step": 4640
|
|
},
|
|
{
|
|
"epoch": 33.40501792114696,
|
|
"grad_norm": 3.8142666816711426,
|
|
"learning_rate": 0.0001736796536796537,
|
|
"loss": 0.062,
|
|
"step": 4660
|
|
},
|
|
{
|
|
"epoch": 33.54838709677419,
|
|
"grad_norm": 3.5291833877563477,
|
|
"learning_rate": 0.00017356421356421356,
|
|
"loss": 0.0669,
|
|
"step": 4680
|
|
},
|
|
{
|
|
"epoch": 33.691756272401435,
|
|
"grad_norm": 2.1203930377960205,
|
|
"learning_rate": 0.00017344877344877346,
|
|
"loss": 0.0658,
|
|
"step": 4700
|
|
},
|
|
{
|
|
"epoch": 33.83512544802868,
|
|
"grad_norm": 3.0345406532287598,
|
|
"learning_rate": 0.00017333333333333334,
|
|
"loss": 0.0652,
|
|
"step": 4720
|
|
},
|
|
{
|
|
"epoch": 33.97849462365591,
|
|
"grad_norm": 3.3054096698760986,
|
|
"learning_rate": 0.00017321789321789324,
|
|
"loss": 0.0662,
|
|
"step": 4740
|
|
},
|
|
{
|
|
"epoch": 34.121863799283155,
|
|
"grad_norm": 2.796600580215454,
|
|
"learning_rate": 0.0001731024531024531,
|
|
"loss": 0.053,
|
|
"step": 4760
|
|
},
|
|
{
|
|
"epoch": 34.2652329749104,
|
|
"grad_norm": 2.0653796195983887,
|
|
"learning_rate": 0.00017298701298701298,
|
|
"loss": 0.0626,
|
|
"step": 4780
|
|
},
|
|
{
|
|
"epoch": 34.40860215053763,
|
|
"grad_norm": 1.7784148454666138,
|
|
"learning_rate": 0.00017287157287157288,
|
|
"loss": 0.0527,
|
|
"step": 4800
|
|
},
|
|
{
|
|
"epoch": 34.551971326164875,
|
|
"grad_norm": 1.5484881401062012,
|
|
"learning_rate": 0.00017275613275613276,
|
|
"loss": 0.0597,
|
|
"step": 4820
|
|
},
|
|
{
|
|
"epoch": 34.69534050179212,
|
|
"grad_norm": 2.585890769958496,
|
|
"learning_rate": 0.00017264069264069266,
|
|
"loss": 0.0585,
|
|
"step": 4840
|
|
},
|
|
{
|
|
"epoch": 34.83870967741935,
|
|
"grad_norm": 2.978422164916992,
|
|
"learning_rate": 0.00017252525252525253,
|
|
"loss": 0.0629,
|
|
"step": 4860
|
|
},
|
|
{
|
|
"epoch": 34.982078853046595,
|
|
"grad_norm": 2.5631103515625,
|
|
"learning_rate": 0.00017240981240981243,
|
|
"loss": 0.078,
|
|
"step": 4880
|
|
},
|
|
{
|
|
"epoch": 35.12544802867384,
|
|
"grad_norm": 2.0318007469177246,
|
|
"learning_rate": 0.0001722943722943723,
|
|
"loss": 0.0588,
|
|
"step": 4900
|
|
},
|
|
{
|
|
"epoch": 35.26881720430107,
|
|
"grad_norm": 2.345930576324463,
|
|
"learning_rate": 0.0001721789321789322,
|
|
"loss": 0.0569,
|
|
"step": 4920
|
|
},
|
|
{
|
|
"epoch": 35.412186379928315,
|
|
"grad_norm": 3.440155506134033,
|
|
"learning_rate": 0.00017206349206349208,
|
|
"loss": 0.0528,
|
|
"step": 4940
|
|
},
|
|
{
|
|
"epoch": 35.55555555555556,
|
|
"grad_norm": 2.1735332012176514,
|
|
"learning_rate": 0.00017194805194805195,
|
|
"loss": 0.0576,
|
|
"step": 4960
|
|
},
|
|
{
|
|
"epoch": 35.69892473118279,
|
|
"grad_norm": 2.1158435344696045,
|
|
"learning_rate": 0.00017183261183261183,
|
|
"loss": 0.0593,
|
|
"step": 4980
|
|
},
|
|
{
|
|
"epoch": 35.842293906810035,
|
|
"grad_norm": 2.6468334197998047,
|
|
"learning_rate": 0.00017171717171717173,
|
|
"loss": 0.065,
|
|
"step": 5000
|
|
},
|
|
{
|
|
"epoch": 35.98566308243728,
|
|
"grad_norm": 2.7233901023864746,
|
|
"learning_rate": 0.0001716017316017316,
|
|
"loss": 0.0557,
|
|
"step": 5020
|
|
},
|
|
{
|
|
"epoch": 36.12903225806452,
|
|
"grad_norm": 2.840747594833374,
|
|
"learning_rate": 0.0001714862914862915,
|
|
"loss": 0.0483,
|
|
"step": 5040
|
|
},
|
|
{
|
|
"epoch": 36.272401433691755,
|
|
"grad_norm": 2.235607624053955,
|
|
"learning_rate": 0.00017137085137085138,
|
|
"loss": 0.0504,
|
|
"step": 5060
|
|
},
|
|
{
|
|
"epoch": 36.415770609319,
|
|
"grad_norm": 3.1793267726898193,
|
|
"learning_rate": 0.00017125541125541128,
|
|
"loss": 0.055,
|
|
"step": 5080
|
|
},
|
|
{
|
|
"epoch": 36.55913978494624,
|
|
"grad_norm": 2.583932638168335,
|
|
"learning_rate": 0.00017113997113997115,
|
|
"loss": 0.0553,
|
|
"step": 5100
|
|
},
|
|
{
|
|
"epoch": 36.702508960573475,
|
|
"grad_norm": 3.4074461460113525,
|
|
"learning_rate": 0.00017102453102453105,
|
|
"loss": 0.0591,
|
|
"step": 5120
|
|
},
|
|
{
|
|
"epoch": 36.84587813620072,
|
|
"grad_norm": 2.3088393211364746,
|
|
"learning_rate": 0.0001709090909090909,
|
|
"loss": 0.0577,
|
|
"step": 5140
|
|
},
|
|
{
|
|
"epoch": 36.98924731182796,
|
|
"grad_norm": 2.301844358444214,
|
|
"learning_rate": 0.0001707936507936508,
|
|
"loss": 0.0609,
|
|
"step": 5160
|
|
},
|
|
{
|
|
"epoch": 37.132616487455195,
|
|
"grad_norm": 1.6736223697662354,
|
|
"learning_rate": 0.00017067821067821067,
|
|
"loss": 0.0483,
|
|
"step": 5180
|
|
},
|
|
{
|
|
"epoch": 37.27598566308244,
|
|
"grad_norm": 2.2391276359558105,
|
|
"learning_rate": 0.00017056277056277057,
|
|
"loss": 0.044,
|
|
"step": 5200
|
|
},
|
|
{
|
|
"epoch": 37.41935483870968,
|
|
"grad_norm": 4.5583062171936035,
|
|
"learning_rate": 0.00017044733044733044,
|
|
"loss": 0.0529,
|
|
"step": 5220
|
|
},
|
|
{
|
|
"epoch": 37.562724014336915,
|
|
"grad_norm": 2.835345983505249,
|
|
"learning_rate": 0.00017033189033189035,
|
|
"loss": 0.0547,
|
|
"step": 5240
|
|
},
|
|
{
|
|
"epoch": 37.70609318996416,
|
|
"grad_norm": 2.906676769256592,
|
|
"learning_rate": 0.00017021645021645022,
|
|
"loss": 0.0592,
|
|
"step": 5260
|
|
},
|
|
{
|
|
"epoch": 37.8494623655914,
|
|
"grad_norm": 2.9492480754852295,
|
|
"learning_rate": 0.00017010101010101012,
|
|
"loss": 0.0597,
|
|
"step": 5280
|
|
},
|
|
{
|
|
"epoch": 37.992831541218635,
|
|
"grad_norm": 2.3341236114501953,
|
|
"learning_rate": 0.00016998556998557,
|
|
"loss": 0.0597,
|
|
"step": 5300
|
|
},
|
|
{
|
|
"epoch": 38.13620071684588,
|
|
"grad_norm": 2.3535149097442627,
|
|
"learning_rate": 0.00016987012987012987,
|
|
"loss": 0.0455,
|
|
"step": 5320
|
|
},
|
|
{
|
|
"epoch": 38.27956989247312,
|
|
"grad_norm": 2.001614809036255,
|
|
"learning_rate": 0.00016975468975468977,
|
|
"loss": 0.048,
|
|
"step": 5340
|
|
},
|
|
{
|
|
"epoch": 38.422939068100355,
|
|
"grad_norm": 2.9416966438293457,
|
|
"learning_rate": 0.00016963924963924964,
|
|
"loss": 0.0458,
|
|
"step": 5360
|
|
},
|
|
{
|
|
"epoch": 38.5663082437276,
|
|
"grad_norm": 2.1588425636291504,
|
|
"learning_rate": 0.00016952380952380954,
|
|
"loss": 0.0503,
|
|
"step": 5380
|
|
},
|
|
{
|
|
"epoch": 38.70967741935484,
|
|
"grad_norm": 1.9148027896881104,
|
|
"learning_rate": 0.00016940836940836941,
|
|
"loss": 0.056,
|
|
"step": 5400
|
|
},
|
|
{
|
|
"epoch": 38.85304659498208,
|
|
"grad_norm": 3.3233532905578613,
|
|
"learning_rate": 0.00016929292929292932,
|
|
"loss": 0.0541,
|
|
"step": 5420
|
|
},
|
|
{
|
|
"epoch": 38.99641577060932,
|
|
"grad_norm": 2.9712846279144287,
|
|
"learning_rate": 0.0001691774891774892,
|
|
"loss": 0.0584,
|
|
"step": 5440
|
|
},
|
|
{
|
|
"epoch": 39.13978494623656,
|
|
"grad_norm": 3.8493432998657227,
|
|
"learning_rate": 0.0001690620490620491,
|
|
"loss": 0.0408,
|
|
"step": 5460
|
|
},
|
|
{
|
|
"epoch": 39.2831541218638,
|
|
"grad_norm": 2.1640853881835938,
|
|
"learning_rate": 0.00016894660894660896,
|
|
"loss": 0.0521,
|
|
"step": 5480
|
|
},
|
|
{
|
|
"epoch": 39.42652329749104,
|
|
"grad_norm": 3.0871174335479736,
|
|
"learning_rate": 0.00016883116883116884,
|
|
"loss": 0.0434,
|
|
"step": 5500
|
|
},
|
|
{
|
|
"epoch": 39.56989247311828,
|
|
"grad_norm": 4.486806869506836,
|
|
"learning_rate": 0.0001687157287157287,
|
|
"loss": 0.0511,
|
|
"step": 5520
|
|
},
|
|
{
|
|
"epoch": 39.71326164874552,
|
|
"grad_norm": 2.457530975341797,
|
|
"learning_rate": 0.0001686002886002886,
|
|
"loss": 0.0552,
|
|
"step": 5540
|
|
},
|
|
{
|
|
"epoch": 39.85663082437276,
|
|
"grad_norm": 2.0484917163848877,
|
|
"learning_rate": 0.00016848484848484848,
|
|
"loss": 0.0507,
|
|
"step": 5560
|
|
},
|
|
{
|
|
"epoch": 40.0,
|
|
"grad_norm": 1.594878911972046,
|
|
"learning_rate": 0.00016836940836940838,
|
|
"loss": 0.0519,
|
|
"step": 5580
|
|
},
|
|
{
|
|
"epoch": 40.14336917562724,
|
|
"grad_norm": 2.9362168312072754,
|
|
"learning_rate": 0.00016825396825396826,
|
|
"loss": 0.0367,
|
|
"step": 5600
|
|
},
|
|
{
|
|
"epoch": 40.28673835125448,
|
|
"grad_norm": 1.8991386890411377,
|
|
"learning_rate": 0.00016813852813852816,
|
|
"loss": 0.0409,
|
|
"step": 5620
|
|
},
|
|
{
|
|
"epoch": 40.43010752688172,
|
|
"grad_norm": 2.8097732067108154,
|
|
"learning_rate": 0.00016802308802308803,
|
|
"loss": 0.0469,
|
|
"step": 5640
|
|
},
|
|
{
|
|
"epoch": 40.57347670250896,
|
|
"grad_norm": 2.691704511642456,
|
|
"learning_rate": 0.00016790764790764793,
|
|
"loss": 0.0486,
|
|
"step": 5660
|
|
},
|
|
{
|
|
"epoch": 40.7168458781362,
|
|
"grad_norm": 1.8878296613693237,
|
|
"learning_rate": 0.00016779220779220778,
|
|
"loss": 0.0508,
|
|
"step": 5680
|
|
},
|
|
{
|
|
"epoch": 40.86021505376344,
|
|
"grad_norm": 1.7793047428131104,
|
|
"learning_rate": 0.00016767676767676768,
|
|
"loss": 0.0564,
|
|
"step": 5700
|
|
},
|
|
{
|
|
"epoch": 41.00358422939068,
|
|
"grad_norm": 1.3552976846694946,
|
|
"learning_rate": 0.00016756132756132755,
|
|
"loss": 0.0588,
|
|
"step": 5720
|
|
},
|
|
{
|
|
"epoch": 41.14695340501792,
|
|
"grad_norm": 1.7316395044326782,
|
|
"learning_rate": 0.00016744588744588745,
|
|
"loss": 0.0444,
|
|
"step": 5740
|
|
},
|
|
{
|
|
"epoch": 41.29032258064516,
|
|
"grad_norm": 2.8789002895355225,
|
|
"learning_rate": 0.00016733044733044733,
|
|
"loss": 0.0369,
|
|
"step": 5760
|
|
},
|
|
{
|
|
"epoch": 41.4336917562724,
|
|
"grad_norm": 1.6924148797988892,
|
|
"learning_rate": 0.00016721500721500723,
|
|
"loss": 0.0457,
|
|
"step": 5780
|
|
},
|
|
{
|
|
"epoch": 41.577060931899645,
|
|
"grad_norm": 1.9911508560180664,
|
|
"learning_rate": 0.0001670995670995671,
|
|
"loss": 0.0433,
|
|
"step": 5800
|
|
},
|
|
{
|
|
"epoch": 41.72043010752688,
|
|
"grad_norm": 1.8963024616241455,
|
|
"learning_rate": 0.000166984126984127,
|
|
"loss": 0.0486,
|
|
"step": 5820
|
|
},
|
|
{
|
|
"epoch": 41.86379928315412,
|
|
"grad_norm": 2.808363437652588,
|
|
"learning_rate": 0.00016686868686868688,
|
|
"loss": 0.0476,
|
|
"step": 5840
|
|
},
|
|
{
|
|
"epoch": 42.007168458781365,
|
|
"grad_norm": 2.1983284950256348,
|
|
"learning_rate": 0.00016675324675324675,
|
|
"loss": 0.0464,
|
|
"step": 5860
|
|
},
|
|
{
|
|
"epoch": 42.1505376344086,
|
|
"grad_norm": 1.6120527982711792,
|
|
"learning_rate": 0.00016663780663780665,
|
|
"loss": 0.0361,
|
|
"step": 5880
|
|
},
|
|
{
|
|
"epoch": 42.29390681003584,
|
|
"grad_norm": 3.206881046295166,
|
|
"learning_rate": 0.00016652236652236652,
|
|
"loss": 0.0465,
|
|
"step": 5900
|
|
},
|
|
{
|
|
"epoch": 42.437275985663085,
|
|
"grad_norm": 2.2303574085235596,
|
|
"learning_rate": 0.00016640692640692642,
|
|
"loss": 0.0468,
|
|
"step": 5920
|
|
},
|
|
{
|
|
"epoch": 42.58064516129032,
|
|
"grad_norm": 2.191185235977173,
|
|
"learning_rate": 0.0001662914862914863,
|
|
"loss": 0.0453,
|
|
"step": 5940
|
|
},
|
|
{
|
|
"epoch": 42.72401433691756,
|
|
"grad_norm": 4.653162002563477,
|
|
"learning_rate": 0.0001661760461760462,
|
|
"loss": 0.0498,
|
|
"step": 5960
|
|
},
|
|
{
|
|
"epoch": 42.867383512544805,
|
|
"grad_norm": 1.5531796216964722,
|
|
"learning_rate": 0.00016606060606060607,
|
|
"loss": 0.0493,
|
|
"step": 5980
|
|
},
|
|
{
|
|
"epoch": 43.01075268817204,
|
|
"grad_norm": 2.2170863151550293,
|
|
"learning_rate": 0.00016594516594516597,
|
|
"loss": 0.0479,
|
|
"step": 6000
|
|
},
|
|
{
|
|
"epoch": 43.15412186379928,
|
|
"grad_norm": 2.6806721687316895,
|
|
"learning_rate": 0.00016582972582972585,
|
|
"loss": 0.0364,
|
|
"step": 6020
|
|
},
|
|
{
|
|
"epoch": 43.297491039426525,
|
|
"grad_norm": 2.155580997467041,
|
|
"learning_rate": 0.00016571428571428575,
|
|
"loss": 0.0413,
|
|
"step": 6040
|
|
},
|
|
{
|
|
"epoch": 43.44086021505376,
|
|
"grad_norm": 1.8151005506515503,
|
|
"learning_rate": 0.0001655988455988456,
|
|
"loss": 0.0446,
|
|
"step": 6060
|
|
},
|
|
{
|
|
"epoch": 43.584229390681,
|
|
"grad_norm": 1.0290437936782837,
|
|
"learning_rate": 0.0001654834054834055,
|
|
"loss": 0.0455,
|
|
"step": 6080
|
|
},
|
|
{
|
|
"epoch": 43.727598566308245,
|
|
"grad_norm": 1.4907678365707397,
|
|
"learning_rate": 0.00016536796536796537,
|
|
"loss": 0.0437,
|
|
"step": 6100
|
|
},
|
|
{
|
|
"epoch": 43.87096774193548,
|
|
"grad_norm": 2.121614933013916,
|
|
"learning_rate": 0.00016525252525252527,
|
|
"loss": 0.0525,
|
|
"step": 6120
|
|
},
|
|
{
|
|
"epoch": 44.01433691756272,
|
|
"grad_norm": 1.2397854328155518,
|
|
"learning_rate": 0.00016513708513708514,
|
|
"loss": 0.0467,
|
|
"step": 6140
|
|
},
|
|
{
|
|
"epoch": 44.157706093189965,
|
|
"grad_norm": 1.3860089778900146,
|
|
"learning_rate": 0.00016502164502164504,
|
|
"loss": 0.0335,
|
|
"step": 6160
|
|
},
|
|
{
|
|
"epoch": 44.30107526881721,
|
|
"grad_norm": 3.316824436187744,
|
|
"learning_rate": 0.00016490620490620491,
|
|
"loss": 0.0408,
|
|
"step": 6180
|
|
},
|
|
{
|
|
"epoch": 44.44444444444444,
|
|
"grad_norm": 1.873334527015686,
|
|
"learning_rate": 0.00016479076479076482,
|
|
"loss": 0.0377,
|
|
"step": 6200
|
|
},
|
|
{
|
|
"epoch": 44.587813620071685,
|
|
"grad_norm": 2.275757312774658,
|
|
"learning_rate": 0.0001646753246753247,
|
|
"loss": 0.0415,
|
|
"step": 6220
|
|
},
|
|
{
|
|
"epoch": 44.73118279569893,
|
|
"grad_norm": 3.352830648422241,
|
|
"learning_rate": 0.00016455988455988456,
|
|
"loss": 0.0444,
|
|
"step": 6240
|
|
},
|
|
{
|
|
"epoch": 44.87455197132616,
|
|
"grad_norm": 0.8780913352966309,
|
|
"learning_rate": 0.00016444444444444444,
|
|
"loss": 0.0437,
|
|
"step": 6260
|
|
},
|
|
{
|
|
"epoch": 45.017921146953405,
|
|
"grad_norm": 1.7202258110046387,
|
|
"learning_rate": 0.00016432900432900434,
|
|
"loss": 0.043,
|
|
"step": 6280
|
|
},
|
|
{
|
|
"epoch": 45.16129032258065,
|
|
"grad_norm": 1.2132396697998047,
|
|
"learning_rate": 0.0001642135642135642,
|
|
"loss": 0.0375,
|
|
"step": 6300
|
|
},
|
|
{
|
|
"epoch": 45.30465949820788,
|
|
"grad_norm": 1.2273428440093994,
|
|
"learning_rate": 0.0001640981240981241,
|
|
"loss": 0.0343,
|
|
"step": 6320
|
|
},
|
|
{
|
|
"epoch": 45.448028673835125,
|
|
"grad_norm": 1.7960898876190186,
|
|
"learning_rate": 0.00016398268398268398,
|
|
"loss": 0.0395,
|
|
"step": 6340
|
|
},
|
|
{
|
|
"epoch": 45.59139784946237,
|
|
"grad_norm": 3.1496095657348633,
|
|
"learning_rate": 0.00016386724386724388,
|
|
"loss": 0.0392,
|
|
"step": 6360
|
|
},
|
|
{
|
|
"epoch": 45.7347670250896,
|
|
"grad_norm": 0.9285885691642761,
|
|
"learning_rate": 0.00016375180375180376,
|
|
"loss": 0.0383,
|
|
"step": 6380
|
|
},
|
|
{
|
|
"epoch": 45.878136200716845,
|
|
"grad_norm": 2.420919179916382,
|
|
"learning_rate": 0.00016363636363636366,
|
|
"loss": 0.0472,
|
|
"step": 6400
|
|
},
|
|
{
|
|
"epoch": 46.02150537634409,
|
|
"grad_norm": 1.5770702362060547,
|
|
"learning_rate": 0.00016352092352092353,
|
|
"loss": 0.0396,
|
|
"step": 6420
|
|
},
|
|
{
|
|
"epoch": 46.16487455197132,
|
|
"grad_norm": 1.161670207977295,
|
|
"learning_rate": 0.0001634054834054834,
|
|
"loss": 0.0317,
|
|
"step": 6440
|
|
},
|
|
{
|
|
"epoch": 46.308243727598565,
|
|
"grad_norm": 1.5375560522079468,
|
|
"learning_rate": 0.00016329004329004328,
|
|
"loss": 0.0415,
|
|
"step": 6460
|
|
},
|
|
{
|
|
"epoch": 46.45161290322581,
|
|
"grad_norm": 1.4522629976272583,
|
|
"learning_rate": 0.00016317460317460318,
|
|
"loss": 0.0356,
|
|
"step": 6480
|
|
},
|
|
{
|
|
"epoch": 46.59498207885304,
|
|
"grad_norm": 2.3963541984558105,
|
|
"learning_rate": 0.00016305916305916305,
|
|
"loss": 0.038,
|
|
"step": 6500
|
|
},
|
|
{
|
|
"epoch": 46.738351254480285,
|
|
"grad_norm": 1.1542603969573975,
|
|
"learning_rate": 0.00016294372294372295,
|
|
"loss": 0.0461,
|
|
"step": 6520
|
|
},
|
|
{
|
|
"epoch": 46.88172043010753,
|
|
"grad_norm": 2.11688232421875,
|
|
"learning_rate": 0.00016282828282828283,
|
|
"loss": 0.0372,
|
|
"step": 6540
|
|
},
|
|
{
|
|
"epoch": 47.02508960573477,
|
|
"grad_norm": 0.9798230528831482,
|
|
"learning_rate": 0.00016271284271284273,
|
|
"loss": 0.0447,
|
|
"step": 6560
|
|
},
|
|
{
|
|
"epoch": 47.168458781362006,
|
|
"grad_norm": 2.257840394973755,
|
|
"learning_rate": 0.0001625974025974026,
|
|
"loss": 0.0297,
|
|
"step": 6580
|
|
},
|
|
{
|
|
"epoch": 47.31182795698925,
|
|
"grad_norm": 2.5821969509124756,
|
|
"learning_rate": 0.00016248196248196248,
|
|
"loss": 0.0349,
|
|
"step": 6600
|
|
},
|
|
{
|
|
"epoch": 47.45519713261649,
|
|
"grad_norm": 1.9042233228683472,
|
|
"learning_rate": 0.00016236652236652238,
|
|
"loss": 0.0394,
|
|
"step": 6620
|
|
},
|
|
{
|
|
"epoch": 47.598566308243726,
|
|
"grad_norm": 1.7701516151428223,
|
|
"learning_rate": 0.00016225108225108225,
|
|
"loss": 0.0377,
|
|
"step": 6640
|
|
},
|
|
{
|
|
"epoch": 47.74193548387097,
|
|
"grad_norm": 1.6117067337036133,
|
|
"learning_rate": 0.00016213564213564215,
|
|
"loss": 0.0435,
|
|
"step": 6660
|
|
},
|
|
{
|
|
"epoch": 47.88530465949821,
|
|
"grad_norm": 2.874051570892334,
|
|
"learning_rate": 0.00016202020202020202,
|
|
"loss": 0.0414,
|
|
"step": 6680
|
|
},
|
|
{
|
|
"epoch": 48.028673835125446,
|
|
"grad_norm": 1.6802932024002075,
|
|
"learning_rate": 0.00016190476190476192,
|
|
"loss": 0.0378,
|
|
"step": 6700
|
|
},
|
|
{
|
|
"epoch": 48.17204301075269,
|
|
"grad_norm": 1.4522868394851685,
|
|
"learning_rate": 0.0001617893217893218,
|
|
"loss": 0.0302,
|
|
"step": 6720
|
|
},
|
|
{
|
|
"epoch": 48.31541218637993,
|
|
"grad_norm": 1.544982671737671,
|
|
"learning_rate": 0.0001616738816738817,
|
|
"loss": 0.0365,
|
|
"step": 6740
|
|
},
|
|
{
|
|
"epoch": 48.458781362007166,
|
|
"grad_norm": 1.7399832010269165,
|
|
"learning_rate": 0.00016155844155844157,
|
|
"loss": 0.0365,
|
|
"step": 6760
|
|
},
|
|
{
|
|
"epoch": 48.60215053763441,
|
|
"grad_norm": 1.5386717319488525,
|
|
"learning_rate": 0.00016144300144300145,
|
|
"loss": 0.0352,
|
|
"step": 6780
|
|
},
|
|
{
|
|
"epoch": 48.74551971326165,
|
|
"grad_norm": 2.235114812850952,
|
|
"learning_rate": 0.00016132756132756132,
|
|
"loss": 0.0364,
|
|
"step": 6800
|
|
},
|
|
{
|
|
"epoch": 48.888888888888886,
|
|
"grad_norm": 2.3979759216308594,
|
|
"learning_rate": 0.00016121212121212122,
|
|
"loss": 0.0419,
|
|
"step": 6820
|
|
},
|
|
{
|
|
"epoch": 49.03225806451613,
|
|
"grad_norm": 1.5085747241973877,
|
|
"learning_rate": 0.0001610966810966811,
|
|
"loss": 0.0429,
|
|
"step": 6840
|
|
},
|
|
{
|
|
"epoch": 49.17562724014337,
|
|
"grad_norm": 2.1474428176879883,
|
|
"learning_rate": 0.000160981240981241,
|
|
"loss": 0.0307,
|
|
"step": 6860
|
|
},
|
|
{
|
|
"epoch": 49.31899641577061,
|
|
"grad_norm": 1.3394923210144043,
|
|
"learning_rate": 0.00016086580086580087,
|
|
"loss": 0.0428,
|
|
"step": 6880
|
|
},
|
|
{
|
|
"epoch": 49.46236559139785,
|
|
"grad_norm": 2.126370668411255,
|
|
"learning_rate": 0.00016075036075036077,
|
|
"loss": 0.0379,
|
|
"step": 6900
|
|
},
|
|
{
|
|
"epoch": 49.60573476702509,
|
|
"grad_norm": 1.1088324785232544,
|
|
"learning_rate": 0.00016063492063492064,
|
|
"loss": 0.0356,
|
|
"step": 6920
|
|
},
|
|
{
|
|
"epoch": 49.74910394265233,
|
|
"grad_norm": 2.978712320327759,
|
|
"learning_rate": 0.00016051948051948054,
|
|
"loss": 0.0424,
|
|
"step": 6940
|
|
},
|
|
{
|
|
"epoch": 49.89247311827957,
|
|
"grad_norm": 2.286139488220215,
|
|
"learning_rate": 0.0001604040404040404,
|
|
"loss": 0.0386,
|
|
"step": 6960
|
|
},
|
|
{
|
|
"epoch": 50.03584229390681,
|
|
"grad_norm": 0.7451670169830322,
|
|
"learning_rate": 0.0001602886002886003,
|
|
"loss": 0.0427,
|
|
"step": 6980
|
|
},
|
|
{
|
|
"epoch": 50.17921146953405,
|
|
"grad_norm": 1.148743748664856,
|
|
"learning_rate": 0.00016017316017316016,
|
|
"loss": 0.0305,
|
|
"step": 7000
|
|
},
|
|
{
|
|
"epoch": 50.32258064516129,
|
|
"grad_norm": 1.3162598609924316,
|
|
"learning_rate": 0.00016005772005772006,
|
|
"loss": 0.0334,
|
|
"step": 7020
|
|
},
|
|
{
|
|
"epoch": 50.46594982078853,
|
|
"grad_norm": 2.752922296524048,
|
|
"learning_rate": 0.00015994227994227994,
|
|
"loss": 0.0366,
|
|
"step": 7040
|
|
},
|
|
{
|
|
"epoch": 50.60931899641577,
|
|
"grad_norm": 1.8845641613006592,
|
|
"learning_rate": 0.00015982683982683984,
|
|
"loss": 0.0347,
|
|
"step": 7060
|
|
},
|
|
{
|
|
"epoch": 50.75268817204301,
|
|
"grad_norm": 2.329726219177246,
|
|
"learning_rate": 0.0001597113997113997,
|
|
"loss": 0.0369,
|
|
"step": 7080
|
|
},
|
|
{
|
|
"epoch": 50.89605734767025,
|
|
"grad_norm": 2.6540048122406006,
|
|
"learning_rate": 0.0001595959595959596,
|
|
"loss": 0.0401,
|
|
"step": 7100
|
|
},
|
|
{
|
|
"epoch": 51.03942652329749,
|
|
"grad_norm": 1.353549599647522,
|
|
"learning_rate": 0.00015948051948051948,
|
|
"loss": 0.0403,
|
|
"step": 7120
|
|
},
|
|
{
|
|
"epoch": 51.18279569892473,
|
|
"grad_norm": 1.1914341449737549,
|
|
"learning_rate": 0.00015936507936507936,
|
|
"loss": 0.0314,
|
|
"step": 7140
|
|
},
|
|
{
|
|
"epoch": 51.32616487455197,
|
|
"grad_norm": 2.1381866931915283,
|
|
"learning_rate": 0.00015924963924963926,
|
|
"loss": 0.0362,
|
|
"step": 7160
|
|
},
|
|
{
|
|
"epoch": 51.46953405017921,
|
|
"grad_norm": 0.9165394902229309,
|
|
"learning_rate": 0.00015913419913419913,
|
|
"loss": 0.0313,
|
|
"step": 7180
|
|
},
|
|
{
|
|
"epoch": 51.61290322580645,
|
|
"grad_norm": 1.80477774143219,
|
|
"learning_rate": 0.00015901875901875903,
|
|
"loss": 0.0368,
|
|
"step": 7200
|
|
},
|
|
{
|
|
"epoch": 51.75627240143369,
|
|
"grad_norm": 1.940472960472107,
|
|
"learning_rate": 0.0001589033189033189,
|
|
"loss": 0.0327,
|
|
"step": 7220
|
|
},
|
|
{
|
|
"epoch": 51.89964157706093,
|
|
"grad_norm": 2.8632259368896484,
|
|
"learning_rate": 0.0001587878787878788,
|
|
"loss": 0.0352,
|
|
"step": 7240
|
|
},
|
|
{
|
|
"epoch": 52.043010752688176,
|
|
"grad_norm": 1.245902419090271,
|
|
"learning_rate": 0.00015867243867243868,
|
|
"loss": 0.036,
|
|
"step": 7260
|
|
},
|
|
{
|
|
"epoch": 52.18637992831541,
|
|
"grad_norm": 1.5645310878753662,
|
|
"learning_rate": 0.00015855699855699858,
|
|
"loss": 0.0251,
|
|
"step": 7280
|
|
},
|
|
{
|
|
"epoch": 52.32974910394265,
|
|
"grad_norm": 2.4143242835998535,
|
|
"learning_rate": 0.00015844155844155845,
|
|
"loss": 0.0347,
|
|
"step": 7300
|
|
},
|
|
{
|
|
"epoch": 52.473118279569896,
|
|
"grad_norm": 1.4213330745697021,
|
|
"learning_rate": 0.00015832611832611836,
|
|
"loss": 0.035,
|
|
"step": 7320
|
|
},
|
|
{
|
|
"epoch": 52.61648745519713,
|
|
"grad_norm": 1.924867033958435,
|
|
"learning_rate": 0.0001582106782106782,
|
|
"loss": 0.0362,
|
|
"step": 7340
|
|
},
|
|
{
|
|
"epoch": 52.75985663082437,
|
|
"grad_norm": 2.1168925762176514,
|
|
"learning_rate": 0.0001580952380952381,
|
|
"loss": 0.037,
|
|
"step": 7360
|
|
},
|
|
{
|
|
"epoch": 52.903225806451616,
|
|
"grad_norm": 1.1882020235061646,
|
|
"learning_rate": 0.00015797979797979798,
|
|
"loss": 0.0324,
|
|
"step": 7380
|
|
},
|
|
{
|
|
"epoch": 53.04659498207885,
|
|
"grad_norm": 1.3322278261184692,
|
|
"learning_rate": 0.00015786435786435788,
|
|
"loss": 0.0316,
|
|
"step": 7400
|
|
},
|
|
{
|
|
"epoch": 53.18996415770609,
|
|
"grad_norm": 1.4307975769042969,
|
|
"learning_rate": 0.00015774891774891775,
|
|
"loss": 0.029,
|
|
"step": 7420
|
|
},
|
|
{
|
|
"epoch": 53.333333333333336,
|
|
"grad_norm": 1.0495163202285767,
|
|
"learning_rate": 0.00015763347763347765,
|
|
"loss": 0.0269,
|
|
"step": 7440
|
|
},
|
|
{
|
|
"epoch": 53.47670250896057,
|
|
"grad_norm": 2.268535614013672,
|
|
"learning_rate": 0.00015751803751803752,
|
|
"loss": 0.0335,
|
|
"step": 7460
|
|
},
|
|
{
|
|
"epoch": 53.62007168458781,
|
|
"grad_norm": 1.3576409816741943,
|
|
"learning_rate": 0.00015740259740259742,
|
|
"loss": 0.0346,
|
|
"step": 7480
|
|
},
|
|
{
|
|
"epoch": 53.763440860215056,
|
|
"grad_norm": 0.9467073678970337,
|
|
"learning_rate": 0.0001572871572871573,
|
|
"loss": 0.0338,
|
|
"step": 7500
|
|
},
|
|
{
|
|
"epoch": 53.90681003584229,
|
|
"grad_norm": 1.293380856513977,
|
|
"learning_rate": 0.00015717171717171717,
|
|
"loss": 0.0333,
|
|
"step": 7520
|
|
},
|
|
{
|
|
"epoch": 54.05017921146953,
|
|
"grad_norm": 2.2245001792907715,
|
|
"learning_rate": 0.00015705627705627704,
|
|
"loss": 0.0283,
|
|
"step": 7540
|
|
},
|
|
{
|
|
"epoch": 54.193548387096776,
|
|
"grad_norm": 1.7361971139907837,
|
|
"learning_rate": 0.00015694083694083695,
|
|
"loss": 0.027,
|
|
"step": 7560
|
|
},
|
|
{
|
|
"epoch": 54.33691756272401,
|
|
"grad_norm": 1.952705979347229,
|
|
"learning_rate": 0.00015682539682539682,
|
|
"loss": 0.0325,
|
|
"step": 7580
|
|
},
|
|
{
|
|
"epoch": 54.48028673835125,
|
|
"grad_norm": 1.6739161014556885,
|
|
"learning_rate": 0.00015670995670995672,
|
|
"loss": 0.0265,
|
|
"step": 7600
|
|
},
|
|
{
|
|
"epoch": 54.623655913978496,
|
|
"grad_norm": 1.3664783239364624,
|
|
"learning_rate": 0.0001565945165945166,
|
|
"loss": 0.0323,
|
|
"step": 7620
|
|
},
|
|
{
|
|
"epoch": 54.76702508960574,
|
|
"grad_norm": 0.8929353952407837,
|
|
"learning_rate": 0.0001564790764790765,
|
|
"loss": 0.0274,
|
|
"step": 7640
|
|
},
|
|
{
|
|
"epoch": 54.91039426523297,
|
|
"grad_norm": 2.452836036682129,
|
|
"learning_rate": 0.00015636363636363637,
|
|
"loss": 0.0369,
|
|
"step": 7660
|
|
},
|
|
{
|
|
"epoch": 55.053763440860216,
|
|
"grad_norm": 1.300386905670166,
|
|
"learning_rate": 0.00015624819624819627,
|
|
"loss": 0.0328,
|
|
"step": 7680
|
|
},
|
|
{
|
|
"epoch": 55.19713261648746,
|
|
"grad_norm": 1.373857021331787,
|
|
"learning_rate": 0.00015613275613275614,
|
|
"loss": 0.0267,
|
|
"step": 7700
|
|
},
|
|
{
|
|
"epoch": 55.340501792114694,
|
|
"grad_norm": 0.9818399548530579,
|
|
"learning_rate": 0.00015601731601731601,
|
|
"loss": 0.0291,
|
|
"step": 7720
|
|
},
|
|
{
|
|
"epoch": 55.483870967741936,
|
|
"grad_norm": 2.268507719039917,
|
|
"learning_rate": 0.00015590187590187592,
|
|
"loss": 0.0307,
|
|
"step": 7740
|
|
},
|
|
{
|
|
"epoch": 55.62724014336918,
|
|
"grad_norm": 1.9051214456558228,
|
|
"learning_rate": 0.0001557864357864358,
|
|
"loss": 0.032,
|
|
"step": 7760
|
|
},
|
|
{
|
|
"epoch": 55.770609318996414,
|
|
"grad_norm": 2.82845401763916,
|
|
"learning_rate": 0.0001556709956709957,
|
|
"loss": 0.0324,
|
|
"step": 7780
|
|
},
|
|
{
|
|
"epoch": 55.913978494623656,
|
|
"grad_norm": 1.909113883972168,
|
|
"learning_rate": 0.00015555555555555556,
|
|
"loss": 0.0337,
|
|
"step": 7800
|
|
},
|
|
{
|
|
"epoch": 56.0573476702509,
|
|
"grad_norm": 1.3736584186553955,
|
|
"learning_rate": 0.00015544011544011546,
|
|
"loss": 0.0287,
|
|
"step": 7820
|
|
},
|
|
{
|
|
"epoch": 56.200716845878134,
|
|
"grad_norm": 1.8787893056869507,
|
|
"learning_rate": 0.00015532467532467534,
|
|
"loss": 0.0229,
|
|
"step": 7840
|
|
},
|
|
{
|
|
"epoch": 56.344086021505376,
|
|
"grad_norm": 2.683082103729248,
|
|
"learning_rate": 0.00015520923520923524,
|
|
"loss": 0.0279,
|
|
"step": 7860
|
|
},
|
|
{
|
|
"epoch": 56.48745519713262,
|
|
"grad_norm": 2.0692434310913086,
|
|
"learning_rate": 0.00015509379509379508,
|
|
"loss": 0.0325,
|
|
"step": 7880
|
|
},
|
|
{
|
|
"epoch": 56.630824372759854,
|
|
"grad_norm": 1.6145623922348022,
|
|
"learning_rate": 0.00015497835497835498,
|
|
"loss": 0.0283,
|
|
"step": 7900
|
|
},
|
|
{
|
|
"epoch": 56.774193548387096,
|
|
"grad_norm": 1.7083507776260376,
|
|
"learning_rate": 0.00015486291486291486,
|
|
"loss": 0.0333,
|
|
"step": 7920
|
|
},
|
|
{
|
|
"epoch": 56.91756272401434,
|
|
"grad_norm": 1.145247220993042,
|
|
"learning_rate": 0.00015474747474747476,
|
|
"loss": 0.0317,
|
|
"step": 7940
|
|
},
|
|
{
|
|
"epoch": 57.060931899641574,
|
|
"grad_norm": 0.9719469547271729,
|
|
"learning_rate": 0.00015463203463203463,
|
|
"loss": 0.0286,
|
|
"step": 7960
|
|
},
|
|
{
|
|
"epoch": 57.204301075268816,
|
|
"grad_norm": 2.092046022415161,
|
|
"learning_rate": 0.00015451659451659453,
|
|
"loss": 0.0261,
|
|
"step": 7980
|
|
},
|
|
{
|
|
"epoch": 57.34767025089606,
|
|
"grad_norm": 1.0875440835952759,
|
|
"learning_rate": 0.0001544011544011544,
|
|
"loss": 0.0269,
|
|
"step": 8000
|
|
},
|
|
{
|
|
"epoch": 57.4910394265233,
|
|
"grad_norm": 0.8553919792175293,
|
|
"learning_rate": 0.0001542857142857143,
|
|
"loss": 0.025,
|
|
"step": 8020
|
|
},
|
|
{
|
|
"epoch": 57.634408602150536,
|
|
"grad_norm": 2.4628798961639404,
|
|
"learning_rate": 0.00015417027417027418,
|
|
"loss": 0.0277,
|
|
"step": 8040
|
|
},
|
|
{
|
|
"epoch": 57.77777777777778,
|
|
"grad_norm": 1.3310191631317139,
|
|
"learning_rate": 0.00015405483405483405,
|
|
"loss": 0.0346,
|
|
"step": 8060
|
|
},
|
|
{
|
|
"epoch": 57.92114695340502,
|
|
"grad_norm": 1.6072683334350586,
|
|
"learning_rate": 0.00015393939393939393,
|
|
"loss": 0.0337,
|
|
"step": 8080
|
|
},
|
|
{
|
|
"epoch": 58.064516129032256,
|
|
"grad_norm": 1.4923372268676758,
|
|
"learning_rate": 0.00015382395382395383,
|
|
"loss": 0.0266,
|
|
"step": 8100
|
|
},
|
|
{
|
|
"epoch": 58.2078853046595,
|
|
"grad_norm": 1.3505113124847412,
|
|
"learning_rate": 0.0001537085137085137,
|
|
"loss": 0.0274,
|
|
"step": 8120
|
|
},
|
|
{
|
|
"epoch": 58.35125448028674,
|
|
"grad_norm": 1.782718300819397,
|
|
"learning_rate": 0.0001535930735930736,
|
|
"loss": 0.0249,
|
|
"step": 8140
|
|
},
|
|
{
|
|
"epoch": 58.494623655913976,
|
|
"grad_norm": 2.466642141342163,
|
|
"learning_rate": 0.00015347763347763348,
|
|
"loss": 0.029,
|
|
"step": 8160
|
|
},
|
|
{
|
|
"epoch": 58.63799283154122,
|
|
"grad_norm": 1.3576604127883911,
|
|
"learning_rate": 0.00015336219336219338,
|
|
"loss": 0.0265,
|
|
"step": 8180
|
|
},
|
|
{
|
|
"epoch": 58.78136200716846,
|
|
"grad_norm": 1.5334093570709229,
|
|
"learning_rate": 0.00015324675324675325,
|
|
"loss": 0.0306,
|
|
"step": 8200
|
|
},
|
|
{
|
|
"epoch": 58.924731182795696,
|
|
"grad_norm": 0.9467904567718506,
|
|
"learning_rate": 0.00015313131313131315,
|
|
"loss": 0.0277,
|
|
"step": 8220
|
|
},
|
|
{
|
|
"epoch": 59.06810035842294,
|
|
"grad_norm": 2.2288928031921387,
|
|
"learning_rate": 0.00015301587301587302,
|
|
"loss": 0.0276,
|
|
"step": 8240
|
|
},
|
|
{
|
|
"epoch": 59.21146953405018,
|
|
"grad_norm": 2.1053996086120605,
|
|
"learning_rate": 0.0001529004329004329,
|
|
"loss": 0.0251,
|
|
"step": 8260
|
|
},
|
|
{
|
|
"epoch": 59.354838709677416,
|
|
"grad_norm": 1.3560117483139038,
|
|
"learning_rate": 0.0001527849927849928,
|
|
"loss": 0.0277,
|
|
"step": 8280
|
|
},
|
|
{
|
|
"epoch": 59.49820788530466,
|
|
"grad_norm": 2.2819387912750244,
|
|
"learning_rate": 0.00015266955266955267,
|
|
"loss": 0.0266,
|
|
"step": 8300
|
|
},
|
|
{
|
|
"epoch": 59.6415770609319,
|
|
"grad_norm": 2.4547369480133057,
|
|
"learning_rate": 0.00015255411255411257,
|
|
"loss": 0.0283,
|
|
"step": 8320
|
|
},
|
|
{
|
|
"epoch": 59.784946236559136,
|
|
"grad_norm": 1.3507577180862427,
|
|
"learning_rate": 0.00015243867243867245,
|
|
"loss": 0.0242,
|
|
"step": 8340
|
|
},
|
|
{
|
|
"epoch": 59.92831541218638,
|
|
"grad_norm": 1.0877169370651245,
|
|
"learning_rate": 0.00015232323232323235,
|
|
"loss": 0.0303,
|
|
"step": 8360
|
|
},
|
|
{
|
|
"epoch": 60.07168458781362,
|
|
"grad_norm": 0.7371624112129211,
|
|
"learning_rate": 0.00015220779220779222,
|
|
"loss": 0.0248,
|
|
"step": 8380
|
|
},
|
|
{
|
|
"epoch": 60.215053763440864,
|
|
"grad_norm": 1.8010125160217285,
|
|
"learning_rate": 0.00015209235209235212,
|
|
"loss": 0.0216,
|
|
"step": 8400
|
|
},
|
|
{
|
|
"epoch": 60.3584229390681,
|
|
"grad_norm": 1.563842535018921,
|
|
"learning_rate": 0.00015197691197691197,
|
|
"loss": 0.0218,
|
|
"step": 8420
|
|
},
|
|
{
|
|
"epoch": 60.50179211469534,
|
|
"grad_norm": 0.9503732323646545,
|
|
"learning_rate": 0.00015186147186147187,
|
|
"loss": 0.0269,
|
|
"step": 8440
|
|
},
|
|
{
|
|
"epoch": 60.645161290322584,
|
|
"grad_norm": 1.7325998544692993,
|
|
"learning_rate": 0.00015174603174603174,
|
|
"loss": 0.0328,
|
|
"step": 8460
|
|
},
|
|
{
|
|
"epoch": 60.78853046594982,
|
|
"grad_norm": 2.546757221221924,
|
|
"learning_rate": 0.00015163059163059164,
|
|
"loss": 0.0298,
|
|
"step": 8480
|
|
},
|
|
{
|
|
"epoch": 60.93189964157706,
|
|
"grad_norm": 2.4068710803985596,
|
|
"learning_rate": 0.00015151515151515152,
|
|
"loss": 0.0322,
|
|
"step": 8500
|
|
},
|
|
{
|
|
"epoch": 61.075268817204304,
|
|
"grad_norm": 1.4139975309371948,
|
|
"learning_rate": 0.00015139971139971142,
|
|
"loss": 0.0257,
|
|
"step": 8520
|
|
},
|
|
{
|
|
"epoch": 61.21863799283154,
|
|
"grad_norm": 1.5787806510925293,
|
|
"learning_rate": 0.0001512842712842713,
|
|
"loss": 0.0226,
|
|
"step": 8540
|
|
},
|
|
{
|
|
"epoch": 61.36200716845878,
|
|
"grad_norm": 1.1099748611450195,
|
|
"learning_rate": 0.0001511688311688312,
|
|
"loss": 0.0264,
|
|
"step": 8560
|
|
},
|
|
{
|
|
"epoch": 61.505376344086024,
|
|
"grad_norm": 1.5145684480667114,
|
|
"learning_rate": 0.00015105339105339106,
|
|
"loss": 0.0278,
|
|
"step": 8580
|
|
},
|
|
{
|
|
"epoch": 61.64874551971326,
|
|
"grad_norm": 1.3687940835952759,
|
|
"learning_rate": 0.00015093795093795096,
|
|
"loss": 0.0234,
|
|
"step": 8600
|
|
},
|
|
{
|
|
"epoch": 61.7921146953405,
|
|
"grad_norm": 1.019043207168579,
|
|
"learning_rate": 0.0001508225108225108,
|
|
"loss": 0.0272,
|
|
"step": 8620
|
|
},
|
|
{
|
|
"epoch": 61.935483870967744,
|
|
"grad_norm": 1.8688873052597046,
|
|
"learning_rate": 0.0001507070707070707,
|
|
"loss": 0.0294,
|
|
"step": 8640
|
|
},
|
|
{
|
|
"epoch": 62.07885304659498,
|
|
"grad_norm": 0.7723051309585571,
|
|
"learning_rate": 0.00015059163059163058,
|
|
"loss": 0.0264,
|
|
"step": 8660
|
|
},
|
|
{
|
|
"epoch": 62.22222222222222,
|
|
"grad_norm": 0.8704918026924133,
|
|
"learning_rate": 0.00015047619047619048,
|
|
"loss": 0.0245,
|
|
"step": 8680
|
|
},
|
|
{
|
|
"epoch": 62.365591397849464,
|
|
"grad_norm": 1.2645530700683594,
|
|
"learning_rate": 0.00015036075036075036,
|
|
"loss": 0.0239,
|
|
"step": 8700
|
|
},
|
|
{
|
|
"epoch": 62.5089605734767,
|
|
"grad_norm": 2.082028388977051,
|
|
"learning_rate": 0.00015024531024531026,
|
|
"loss": 0.0295,
|
|
"step": 8720
|
|
},
|
|
{
|
|
"epoch": 62.65232974910394,
|
|
"grad_norm": 1.1436362266540527,
|
|
"learning_rate": 0.00015012987012987013,
|
|
"loss": 0.0247,
|
|
"step": 8740
|
|
},
|
|
{
|
|
"epoch": 62.795698924731184,
|
|
"grad_norm": 1.4623620510101318,
|
|
"learning_rate": 0.00015001443001443003,
|
|
"loss": 0.0275,
|
|
"step": 8760
|
|
},
|
|
{
|
|
"epoch": 62.939068100358426,
|
|
"grad_norm": 0.524268388748169,
|
|
"learning_rate": 0.0001498989898989899,
|
|
"loss": 0.0253,
|
|
"step": 8780
|
|
},
|
|
{
|
|
"epoch": 63.08243727598566,
|
|
"grad_norm": 0.9553951621055603,
|
|
"learning_rate": 0.00014978354978354978,
|
|
"loss": 0.0223,
|
|
"step": 8800
|
|
},
|
|
{
|
|
"epoch": 63.225806451612904,
|
|
"grad_norm": 1.3312689065933228,
|
|
"learning_rate": 0.00014966810966810968,
|
|
"loss": 0.0244,
|
|
"step": 8820
|
|
},
|
|
{
|
|
"epoch": 63.369175627240146,
|
|
"grad_norm": 1.1657428741455078,
|
|
"learning_rate": 0.00014955266955266955,
|
|
"loss": 0.0274,
|
|
"step": 8840
|
|
},
|
|
{
|
|
"epoch": 63.51254480286738,
|
|
"grad_norm": 2.2913668155670166,
|
|
"learning_rate": 0.00014943722943722945,
|
|
"loss": 0.0234,
|
|
"step": 8860
|
|
},
|
|
{
|
|
"epoch": 63.655913978494624,
|
|
"grad_norm": 0.7200100421905518,
|
|
"learning_rate": 0.00014932178932178933,
|
|
"loss": 0.0247,
|
|
"step": 8880
|
|
},
|
|
{
|
|
"epoch": 63.799283154121866,
|
|
"grad_norm": 1.651200532913208,
|
|
"learning_rate": 0.00014920634920634923,
|
|
"loss": 0.026,
|
|
"step": 8900
|
|
},
|
|
{
|
|
"epoch": 63.9426523297491,
|
|
"grad_norm": 1.192811131477356,
|
|
"learning_rate": 0.0001490909090909091,
|
|
"loss": 0.0283,
|
|
"step": 8920
|
|
},
|
|
{
|
|
"epoch": 64.08602150537635,
|
|
"grad_norm": 1.5972771644592285,
|
|
"learning_rate": 0.000148975468975469,
|
|
"loss": 0.026,
|
|
"step": 8940
|
|
},
|
|
{
|
|
"epoch": 64.22939068100358,
|
|
"grad_norm": 0.8746197819709778,
|
|
"learning_rate": 0.00014886002886002888,
|
|
"loss": 0.0237,
|
|
"step": 8960
|
|
},
|
|
{
|
|
"epoch": 64.37275985663082,
|
|
"grad_norm": 1.0549620389938354,
|
|
"learning_rate": 0.00014874458874458875,
|
|
"loss": 0.0249,
|
|
"step": 8980
|
|
},
|
|
{
|
|
"epoch": 64.51612903225806,
|
|
"grad_norm": 0.6685066819190979,
|
|
"learning_rate": 0.00014862914862914862,
|
|
"loss": 0.024,
|
|
"step": 9000
|
|
},
|
|
{
|
|
"epoch": 64.6594982078853,
|
|
"grad_norm": 0.803246021270752,
|
|
"learning_rate": 0.00014851370851370852,
|
|
"loss": 0.0228,
|
|
"step": 9020
|
|
},
|
|
{
|
|
"epoch": 64.80286738351255,
|
|
"grad_norm": 0.9190162420272827,
|
|
"learning_rate": 0.0001483982683982684,
|
|
"loss": 0.0224,
|
|
"step": 9040
|
|
},
|
|
{
|
|
"epoch": 64.94623655913979,
|
|
"grad_norm": 1.1969116926193237,
|
|
"learning_rate": 0.0001482828282828283,
|
|
"loss": 0.0285,
|
|
"step": 9060
|
|
},
|
|
{
|
|
"epoch": 65.08960573476702,
|
|
"grad_norm": 1.0449926853179932,
|
|
"learning_rate": 0.00014816738816738817,
|
|
"loss": 0.0265,
|
|
"step": 9080
|
|
},
|
|
{
|
|
"epoch": 65.23297491039426,
|
|
"grad_norm": 0.6982151865959167,
|
|
"learning_rate": 0.00014805194805194807,
|
|
"loss": 0.0204,
|
|
"step": 9100
|
|
},
|
|
{
|
|
"epoch": 65.3763440860215,
|
|
"grad_norm": 1.496858835220337,
|
|
"learning_rate": 0.00014793650793650795,
|
|
"loss": 0.0238,
|
|
"step": 9120
|
|
},
|
|
{
|
|
"epoch": 65.51971326164875,
|
|
"grad_norm": 0.826849639415741,
|
|
"learning_rate": 0.00014782106782106785,
|
|
"loss": 0.027,
|
|
"step": 9140
|
|
},
|
|
{
|
|
"epoch": 65.66308243727599,
|
|
"grad_norm": 1.4536947011947632,
|
|
"learning_rate": 0.0001477056277056277,
|
|
"loss": 0.0263,
|
|
"step": 9160
|
|
},
|
|
{
|
|
"epoch": 65.80645161290323,
|
|
"grad_norm": 2.4352056980133057,
|
|
"learning_rate": 0.0001475901875901876,
|
|
"loss": 0.0271,
|
|
"step": 9180
|
|
},
|
|
{
|
|
"epoch": 65.94982078853046,
|
|
"grad_norm": 1.828048586845398,
|
|
"learning_rate": 0.00014747474747474747,
|
|
"loss": 0.025,
|
|
"step": 9200
|
|
},
|
|
{
|
|
"epoch": 66.0931899641577,
|
|
"grad_norm": 0.8592638969421387,
|
|
"learning_rate": 0.00014735930735930737,
|
|
"loss": 0.0234,
|
|
"step": 9220
|
|
},
|
|
{
|
|
"epoch": 66.23655913978494,
|
|
"grad_norm": 1.3824059963226318,
|
|
"learning_rate": 0.00014724386724386724,
|
|
"loss": 0.0198,
|
|
"step": 9240
|
|
},
|
|
{
|
|
"epoch": 66.37992831541219,
|
|
"grad_norm": 1.9825196266174316,
|
|
"learning_rate": 0.00014712842712842714,
|
|
"loss": 0.0251,
|
|
"step": 9260
|
|
},
|
|
{
|
|
"epoch": 66.52329749103943,
|
|
"grad_norm": 2.2713663578033447,
|
|
"learning_rate": 0.00014701298701298702,
|
|
"loss": 0.0234,
|
|
"step": 9280
|
|
},
|
|
{
|
|
"epoch": 66.66666666666667,
|
|
"grad_norm": 1.5792700052261353,
|
|
"learning_rate": 0.00014689754689754692,
|
|
"loss": 0.0265,
|
|
"step": 9300
|
|
},
|
|
{
|
|
"epoch": 66.81003584229391,
|
|
"grad_norm": 2.062516689300537,
|
|
"learning_rate": 0.0001467821067821068,
|
|
"loss": 0.0255,
|
|
"step": 9320
|
|
},
|
|
{
|
|
"epoch": 66.95340501792114,
|
|
"grad_norm": 1.345128059387207,
|
|
"learning_rate": 0.00014666666666666666,
|
|
"loss": 0.0254,
|
|
"step": 9340
|
|
},
|
|
{
|
|
"epoch": 67.09677419354838,
|
|
"grad_norm": 0.9115346074104309,
|
|
"learning_rate": 0.00014655122655122656,
|
|
"loss": 0.0229,
|
|
"step": 9360
|
|
},
|
|
{
|
|
"epoch": 67.24014336917563,
|
|
"grad_norm": 0.7011423110961914,
|
|
"learning_rate": 0.00014643578643578644,
|
|
"loss": 0.0201,
|
|
"step": 9380
|
|
},
|
|
{
|
|
"epoch": 67.38351254480287,
|
|
"grad_norm": 0.8718897700309753,
|
|
"learning_rate": 0.00014632034632034634,
|
|
"loss": 0.0227,
|
|
"step": 9400
|
|
},
|
|
{
|
|
"epoch": 67.52688172043011,
|
|
"grad_norm": 2.1501848697662354,
|
|
"learning_rate": 0.0001462049062049062,
|
|
"loss": 0.0232,
|
|
"step": 9420
|
|
},
|
|
{
|
|
"epoch": 67.67025089605735,
|
|
"grad_norm": 1.4368712902069092,
|
|
"learning_rate": 0.0001460894660894661,
|
|
"loss": 0.0253,
|
|
"step": 9440
|
|
},
|
|
{
|
|
"epoch": 67.81362007168458,
|
|
"grad_norm": 0.9436593651771545,
|
|
"learning_rate": 0.00014597402597402599,
|
|
"loss": 0.0253,
|
|
"step": 9460
|
|
},
|
|
{
|
|
"epoch": 67.95698924731182,
|
|
"grad_norm": 1.4069864749908447,
|
|
"learning_rate": 0.00014585858585858586,
|
|
"loss": 0.026,
|
|
"step": 9480
|
|
},
|
|
{
|
|
"epoch": 68.10035842293907,
|
|
"grad_norm": 0.6093623042106628,
|
|
"learning_rate": 0.00014574314574314576,
|
|
"loss": 0.0243,
|
|
"step": 9500
|
|
},
|
|
{
|
|
"epoch": 68.24372759856631,
|
|
"grad_norm": 1.180405616760254,
|
|
"learning_rate": 0.00014562770562770563,
|
|
"loss": 0.0214,
|
|
"step": 9520
|
|
},
|
|
{
|
|
"epoch": 68.38709677419355,
|
|
"grad_norm": 1.2015615701675415,
|
|
"learning_rate": 0.0001455122655122655,
|
|
"loss": 0.0226,
|
|
"step": 9540
|
|
},
|
|
{
|
|
"epoch": 68.5304659498208,
|
|
"grad_norm": 1.7189544439315796,
|
|
"learning_rate": 0.0001453968253968254,
|
|
"loss": 0.0213,
|
|
"step": 9560
|
|
},
|
|
{
|
|
"epoch": 68.67383512544802,
|
|
"grad_norm": 1.5872224569320679,
|
|
"learning_rate": 0.00014528138528138528,
|
|
"loss": 0.0241,
|
|
"step": 9580
|
|
},
|
|
{
|
|
"epoch": 68.81720430107526,
|
|
"grad_norm": 1.9644558429718018,
|
|
"learning_rate": 0.00014516594516594518,
|
|
"loss": 0.0237,
|
|
"step": 9600
|
|
},
|
|
{
|
|
"epoch": 68.9605734767025,
|
|
"grad_norm": 1.3132842779159546,
|
|
"learning_rate": 0.00014505050505050505,
|
|
"loss": 0.0249,
|
|
"step": 9620
|
|
},
|
|
{
|
|
"epoch": 69.10394265232975,
|
|
"grad_norm": 1.6341869831085205,
|
|
"learning_rate": 0.00014493506493506496,
|
|
"loss": 0.0192,
|
|
"step": 9640
|
|
},
|
|
{
|
|
"epoch": 69.24731182795699,
|
|
"grad_norm": 0.9933415651321411,
|
|
"learning_rate": 0.00014481962481962483,
|
|
"loss": 0.0167,
|
|
"step": 9660
|
|
},
|
|
{
|
|
"epoch": 69.39068100358423,
|
|
"grad_norm": 1.0422909259796143,
|
|
"learning_rate": 0.00014470418470418473,
|
|
"loss": 0.0204,
|
|
"step": 9680
|
|
},
|
|
{
|
|
"epoch": 69.53405017921148,
|
|
"grad_norm": 1.594971776008606,
|
|
"learning_rate": 0.00014458874458874458,
|
|
"loss": 0.0239,
|
|
"step": 9700
|
|
},
|
|
{
|
|
"epoch": 69.6774193548387,
|
|
"grad_norm": 2.2464418411254883,
|
|
"learning_rate": 0.00014447330447330448,
|
|
"loss": 0.0204,
|
|
"step": 9720
|
|
},
|
|
{
|
|
"epoch": 69.82078853046595,
|
|
"grad_norm": 1.5175591707229614,
|
|
"learning_rate": 0.00014435786435786435,
|
|
"loss": 0.0288,
|
|
"step": 9740
|
|
},
|
|
{
|
|
"epoch": 69.96415770609319,
|
|
"grad_norm": 0.9416744112968445,
|
|
"learning_rate": 0.00014424242424242425,
|
|
"loss": 0.0276,
|
|
"step": 9760
|
|
},
|
|
{
|
|
"epoch": 70.10752688172043,
|
|
"grad_norm": 1.7233195304870605,
|
|
"learning_rate": 0.00014412698412698412,
|
|
"loss": 0.022,
|
|
"step": 9780
|
|
},
|
|
{
|
|
"epoch": 70.25089605734767,
|
|
"grad_norm": 1.2204762697219849,
|
|
"learning_rate": 0.00014401154401154402,
|
|
"loss": 0.0173,
|
|
"step": 9800
|
|
},
|
|
{
|
|
"epoch": 70.39426523297492,
|
|
"grad_norm": 0.8648879528045654,
|
|
"learning_rate": 0.0001438961038961039,
|
|
"loss": 0.0191,
|
|
"step": 9820
|
|
},
|
|
{
|
|
"epoch": 70.53763440860214,
|
|
"grad_norm": 1.511797547340393,
|
|
"learning_rate": 0.0001437806637806638,
|
|
"loss": 0.0245,
|
|
"step": 9840
|
|
},
|
|
{
|
|
"epoch": 70.68100358422939,
|
|
"grad_norm": 0.8332564234733582,
|
|
"learning_rate": 0.00014366522366522367,
|
|
"loss": 0.0229,
|
|
"step": 9860
|
|
},
|
|
{
|
|
"epoch": 70.82437275985663,
|
|
"grad_norm": 1.21042799949646,
|
|
"learning_rate": 0.00014354978354978357,
|
|
"loss": 0.0215,
|
|
"step": 9880
|
|
},
|
|
{
|
|
"epoch": 70.96774193548387,
|
|
"grad_norm": 1.0077437162399292,
|
|
"learning_rate": 0.00014343434343434342,
|
|
"loss": 0.0247,
|
|
"step": 9900
|
|
},
|
|
{
|
|
"epoch": 71.11111111111111,
|
|
"grad_norm": 1.8682667016983032,
|
|
"learning_rate": 0.00014331890331890332,
|
|
"loss": 0.0181,
|
|
"step": 9920
|
|
},
|
|
{
|
|
"epoch": 71.25448028673836,
|
|
"grad_norm": 1.4242209196090698,
|
|
"learning_rate": 0.0001432034632034632,
|
|
"loss": 0.0203,
|
|
"step": 9940
|
|
},
|
|
{
|
|
"epoch": 71.39784946236558,
|
|
"grad_norm": 0.8294592499732971,
|
|
"learning_rate": 0.0001430880230880231,
|
|
"loss": 0.0178,
|
|
"step": 9960
|
|
},
|
|
{
|
|
"epoch": 71.54121863799283,
|
|
"grad_norm": 0.8091810345649719,
|
|
"learning_rate": 0.00014297258297258297,
|
|
"loss": 0.0171,
|
|
"step": 9980
|
|
},
|
|
{
|
|
"epoch": 71.68458781362007,
|
|
"grad_norm": 1.304630994796753,
|
|
"learning_rate": 0.00014285714285714287,
|
|
"loss": 0.0228,
|
|
"step": 10000
|
|
},
|
|
{
|
|
"epoch": 71.82795698924731,
|
|
"grad_norm": 0.9604668021202087,
|
|
"learning_rate": 0.00014274170274170274,
|
|
"loss": 0.0246,
|
|
"step": 10020
|
|
},
|
|
{
|
|
"epoch": 71.97132616487455,
|
|
"grad_norm": 0.9887527227401733,
|
|
"learning_rate": 0.00014262626262626264,
|
|
"loss": 0.0252,
|
|
"step": 10040
|
|
},
|
|
{
|
|
"epoch": 72.1146953405018,
|
|
"grad_norm": 0.6128952503204346,
|
|
"learning_rate": 0.00014251082251082252,
|
|
"loss": 0.0187,
|
|
"step": 10060
|
|
},
|
|
{
|
|
"epoch": 72.25806451612904,
|
|
"grad_norm": 1.252211093902588,
|
|
"learning_rate": 0.0001423953823953824,
|
|
"loss": 0.0181,
|
|
"step": 10080
|
|
},
|
|
{
|
|
"epoch": 72.40143369175627,
|
|
"grad_norm": 1.664756417274475,
|
|
"learning_rate": 0.0001422799422799423,
|
|
"loss": 0.0195,
|
|
"step": 10100
|
|
},
|
|
{
|
|
"epoch": 72.54480286738351,
|
|
"grad_norm": 1.0579140186309814,
|
|
"learning_rate": 0.00014216450216450216,
|
|
"loss": 0.022,
|
|
"step": 10120
|
|
},
|
|
{
|
|
"epoch": 72.68817204301075,
|
|
"grad_norm": 1.5698117017745972,
|
|
"learning_rate": 0.00014204906204906206,
|
|
"loss": 0.0184,
|
|
"step": 10140
|
|
},
|
|
{
|
|
"epoch": 72.831541218638,
|
|
"grad_norm": 1.4136704206466675,
|
|
"learning_rate": 0.00014193362193362194,
|
|
"loss": 0.026,
|
|
"step": 10160
|
|
},
|
|
{
|
|
"epoch": 72.97491039426524,
|
|
"grad_norm": 1.1597872972488403,
|
|
"learning_rate": 0.00014181818181818184,
|
|
"loss": 0.0235,
|
|
"step": 10180
|
|
},
|
|
{
|
|
"epoch": 73.11827956989248,
|
|
"grad_norm": 0.9069903492927551,
|
|
"learning_rate": 0.0001417027417027417,
|
|
"loss": 0.0199,
|
|
"step": 10200
|
|
},
|
|
{
|
|
"epoch": 73.26164874551971,
|
|
"grad_norm": 0.9252745509147644,
|
|
"learning_rate": 0.0001415873015873016,
|
|
"loss": 0.0158,
|
|
"step": 10220
|
|
},
|
|
{
|
|
"epoch": 73.40501792114695,
|
|
"grad_norm": 1.0820692777633667,
|
|
"learning_rate": 0.00014147186147186149,
|
|
"loss": 0.0203,
|
|
"step": 10240
|
|
},
|
|
{
|
|
"epoch": 73.54838709677419,
|
|
"grad_norm": 0.9505143165588379,
|
|
"learning_rate": 0.00014135642135642136,
|
|
"loss": 0.0197,
|
|
"step": 10260
|
|
},
|
|
{
|
|
"epoch": 73.69175627240143,
|
|
"grad_norm": 0.6456949710845947,
|
|
"learning_rate": 0.00014124098124098123,
|
|
"loss": 0.0218,
|
|
"step": 10280
|
|
},
|
|
{
|
|
"epoch": 73.83512544802868,
|
|
"grad_norm": 1.034869909286499,
|
|
"learning_rate": 0.00014112554112554113,
|
|
"loss": 0.0216,
|
|
"step": 10300
|
|
},
|
|
{
|
|
"epoch": 73.97849462365592,
|
|
"grad_norm": 2.550605297088623,
|
|
"learning_rate": 0.000141010101010101,
|
|
"loss": 0.0216,
|
|
"step": 10320
|
|
},
|
|
{
|
|
"epoch": 74.12186379928315,
|
|
"grad_norm": 1.9784917831420898,
|
|
"learning_rate": 0.0001408946608946609,
|
|
"loss": 0.0159,
|
|
"step": 10340
|
|
},
|
|
{
|
|
"epoch": 74.26523297491039,
|
|
"grad_norm": 0.852441132068634,
|
|
"learning_rate": 0.00014077922077922078,
|
|
"loss": 0.0184,
|
|
"step": 10360
|
|
},
|
|
{
|
|
"epoch": 74.40860215053763,
|
|
"grad_norm": 1.1380398273468018,
|
|
"learning_rate": 0.00014066378066378068,
|
|
"loss": 0.0189,
|
|
"step": 10380
|
|
},
|
|
{
|
|
"epoch": 74.55197132616487,
|
|
"grad_norm": 4.045578479766846,
|
|
"learning_rate": 0.00014054834054834055,
|
|
"loss": 0.0199,
|
|
"step": 10400
|
|
},
|
|
{
|
|
"epoch": 74.69534050179212,
|
|
"grad_norm": 1.1933624744415283,
|
|
"learning_rate": 0.00014043290043290046,
|
|
"loss": 0.0225,
|
|
"step": 10420
|
|
},
|
|
{
|
|
"epoch": 74.83870967741936,
|
|
"grad_norm": 1.0848654508590698,
|
|
"learning_rate": 0.0001403174603174603,
|
|
"loss": 0.022,
|
|
"step": 10440
|
|
},
|
|
{
|
|
"epoch": 74.9820788530466,
|
|
"grad_norm": 1.3202558755874634,
|
|
"learning_rate": 0.0001402020202020202,
|
|
"loss": 0.0262,
|
|
"step": 10460
|
|
},
|
|
{
|
|
"epoch": 75.12544802867383,
|
|
"grad_norm": 0.5207009315490723,
|
|
"learning_rate": 0.00014008658008658008,
|
|
"loss": 0.0163,
|
|
"step": 10480
|
|
},
|
|
{
|
|
"epoch": 75.26881720430107,
|
|
"grad_norm": 1.06955885887146,
|
|
"learning_rate": 0.00013997113997113998,
|
|
"loss": 0.0211,
|
|
"step": 10500
|
|
},
|
|
{
|
|
"epoch": 75.41218637992831,
|
|
"grad_norm": 0.9280649423599243,
|
|
"learning_rate": 0.00013985569985569985,
|
|
"loss": 0.023,
|
|
"step": 10520
|
|
},
|
|
{
|
|
"epoch": 75.55555555555556,
|
|
"grad_norm": 0.8869330883026123,
|
|
"learning_rate": 0.00013974025974025975,
|
|
"loss": 0.0222,
|
|
"step": 10540
|
|
},
|
|
{
|
|
"epoch": 75.6989247311828,
|
|
"grad_norm": 1.6605570316314697,
|
|
"learning_rate": 0.00013962481962481962,
|
|
"loss": 0.02,
|
|
"step": 10560
|
|
},
|
|
{
|
|
"epoch": 75.84229390681004,
|
|
"grad_norm": 2.232680082321167,
|
|
"learning_rate": 0.00013950937950937952,
|
|
"loss": 0.0236,
|
|
"step": 10580
|
|
},
|
|
{
|
|
"epoch": 75.98566308243727,
|
|
"grad_norm": 1.2954185009002686,
|
|
"learning_rate": 0.0001393939393939394,
|
|
"loss": 0.0231,
|
|
"step": 10600
|
|
},
|
|
{
|
|
"epoch": 76.12903225806451,
|
|
"grad_norm": 3.355668783187866,
|
|
"learning_rate": 0.00013927849927849927,
|
|
"loss": 0.0167,
|
|
"step": 10620
|
|
},
|
|
{
|
|
"epoch": 76.27240143369175,
|
|
"grad_norm": 0.5665978193283081,
|
|
"learning_rate": 0.00013916305916305917,
|
|
"loss": 0.0198,
|
|
"step": 10640
|
|
},
|
|
{
|
|
"epoch": 76.415770609319,
|
|
"grad_norm": 1.1628668308258057,
|
|
"learning_rate": 0.00013904761904761905,
|
|
"loss": 0.0166,
|
|
"step": 10660
|
|
},
|
|
{
|
|
"epoch": 76.55913978494624,
|
|
"grad_norm": 1.055058240890503,
|
|
"learning_rate": 0.00013893217893217895,
|
|
"loss": 0.0171,
|
|
"step": 10680
|
|
},
|
|
{
|
|
"epoch": 76.70250896057348,
|
|
"grad_norm": 1.6783472299575806,
|
|
"learning_rate": 0.00013881673881673882,
|
|
"loss": 0.0181,
|
|
"step": 10700
|
|
},
|
|
{
|
|
"epoch": 76.84587813620071,
|
|
"grad_norm": 0.7342981100082397,
|
|
"learning_rate": 0.00013870129870129872,
|
|
"loss": 0.0218,
|
|
"step": 10720
|
|
},
|
|
{
|
|
"epoch": 76.98924731182795,
|
|
"grad_norm": 1.552182912826538,
|
|
"learning_rate": 0.0001385858585858586,
|
|
"loss": 0.0221,
|
|
"step": 10740
|
|
},
|
|
{
|
|
"epoch": 77.1326164874552,
|
|
"grad_norm": 1.4156135320663452,
|
|
"learning_rate": 0.0001384704184704185,
|
|
"loss": 0.0158,
|
|
"step": 10760
|
|
},
|
|
{
|
|
"epoch": 77.27598566308244,
|
|
"grad_norm": 0.9503490924835205,
|
|
"learning_rate": 0.00013835497835497837,
|
|
"loss": 0.0146,
|
|
"step": 10780
|
|
},
|
|
{
|
|
"epoch": 77.41935483870968,
|
|
"grad_norm": 1.0030412673950195,
|
|
"learning_rate": 0.00013823953823953824,
|
|
"loss": 0.021,
|
|
"step": 10800
|
|
},
|
|
{
|
|
"epoch": 77.56272401433692,
|
|
"grad_norm": 1.6275995969772339,
|
|
"learning_rate": 0.00013812409812409812,
|
|
"loss": 0.0245,
|
|
"step": 10820
|
|
},
|
|
{
|
|
"epoch": 77.70609318996416,
|
|
"grad_norm": 1.2786650657653809,
|
|
"learning_rate": 0.00013800865800865802,
|
|
"loss": 0.0211,
|
|
"step": 10840
|
|
},
|
|
{
|
|
"epoch": 77.84946236559139,
|
|
"grad_norm": 1.184510588645935,
|
|
"learning_rate": 0.0001378932178932179,
|
|
"loss": 0.0215,
|
|
"step": 10860
|
|
},
|
|
{
|
|
"epoch": 77.99283154121864,
|
|
"grad_norm": 1.4506416320800781,
|
|
"learning_rate": 0.0001377777777777778,
|
|
"loss": 0.0229,
|
|
"step": 10880
|
|
},
|
|
{
|
|
"epoch": 78.13620071684588,
|
|
"grad_norm": 0.8410572409629822,
|
|
"learning_rate": 0.00013766233766233766,
|
|
"loss": 0.0127,
|
|
"step": 10900
|
|
},
|
|
{
|
|
"epoch": 78.27956989247312,
|
|
"grad_norm": 1.0638360977172852,
|
|
"learning_rate": 0.00013754689754689756,
|
|
"loss": 0.0183,
|
|
"step": 10920
|
|
},
|
|
{
|
|
"epoch": 78.42293906810036,
|
|
"grad_norm": 0.7749863862991333,
|
|
"learning_rate": 0.00013743145743145744,
|
|
"loss": 0.0196,
|
|
"step": 10940
|
|
},
|
|
{
|
|
"epoch": 78.5663082437276,
|
|
"grad_norm": 0.7387521266937256,
|
|
"learning_rate": 0.00013731601731601734,
|
|
"loss": 0.0216,
|
|
"step": 10960
|
|
},
|
|
{
|
|
"epoch": 78.70967741935483,
|
|
"grad_norm": 0.6283668279647827,
|
|
"learning_rate": 0.00013720057720057718,
|
|
"loss": 0.0181,
|
|
"step": 10980
|
|
},
|
|
{
|
|
"epoch": 78.85304659498208,
|
|
"grad_norm": 0.7480463981628418,
|
|
"learning_rate": 0.00013708513708513709,
|
|
"loss": 0.0216,
|
|
"step": 11000
|
|
},
|
|
{
|
|
"epoch": 78.99641577060932,
|
|
"grad_norm": 0.9228019714355469,
|
|
"learning_rate": 0.00013696969696969696,
|
|
"loss": 0.0236,
|
|
"step": 11020
|
|
},
|
|
{
|
|
"epoch": 79.13978494623656,
|
|
"grad_norm": 0.8929498195648193,
|
|
"learning_rate": 0.00013685425685425686,
|
|
"loss": 0.0165,
|
|
"step": 11040
|
|
},
|
|
{
|
|
"epoch": 79.2831541218638,
|
|
"grad_norm": 0.7602713704109192,
|
|
"learning_rate": 0.00013673881673881673,
|
|
"loss": 0.0159,
|
|
"step": 11060
|
|
},
|
|
{
|
|
"epoch": 79.42652329749104,
|
|
"grad_norm": 0.8371136784553528,
|
|
"learning_rate": 0.00013662337662337663,
|
|
"loss": 0.0171,
|
|
"step": 11080
|
|
},
|
|
{
|
|
"epoch": 79.56989247311827,
|
|
"grad_norm": 0.8401156067848206,
|
|
"learning_rate": 0.0001365079365079365,
|
|
"loss": 0.0174,
|
|
"step": 11100
|
|
},
|
|
{
|
|
"epoch": 79.71326164874552,
|
|
"grad_norm": 2.196991443634033,
|
|
"learning_rate": 0.0001363924963924964,
|
|
"loss": 0.0214,
|
|
"step": 11120
|
|
},
|
|
{
|
|
"epoch": 79.85663082437276,
|
|
"grad_norm": 1.0070806741714478,
|
|
"learning_rate": 0.00013627705627705628,
|
|
"loss": 0.0237,
|
|
"step": 11140
|
|
},
|
|
{
|
|
"epoch": 80.0,
|
|
"grad_norm": 0.8702186346054077,
|
|
"learning_rate": 0.00013616161616161618,
|
|
"loss": 0.0219,
|
|
"step": 11160
|
|
},
|
|
{
|
|
"epoch": 80.14336917562724,
|
|
"grad_norm": 0.9072542190551758,
|
|
"learning_rate": 0.00013604617604617605,
|
|
"loss": 0.0168,
|
|
"step": 11180
|
|
},
|
|
{
|
|
"epoch": 80.28673835125448,
|
|
"grad_norm": 0.9638134837150574,
|
|
"learning_rate": 0.00013593073593073593,
|
|
"loss": 0.0181,
|
|
"step": 11200
|
|
},
|
|
{
|
|
"epoch": 80.43010752688173,
|
|
"grad_norm": 0.8404929041862488,
|
|
"learning_rate": 0.00013581529581529583,
|
|
"loss": 0.0174,
|
|
"step": 11220
|
|
},
|
|
{
|
|
"epoch": 80.57347670250896,
|
|
"grad_norm": 1.2767741680145264,
|
|
"learning_rate": 0.0001356998556998557,
|
|
"loss": 0.0179,
|
|
"step": 11240
|
|
},
|
|
{
|
|
"epoch": 80.7168458781362,
|
|
"grad_norm": 0.7579748630523682,
|
|
"learning_rate": 0.0001355844155844156,
|
|
"loss": 0.0229,
|
|
"step": 11260
|
|
},
|
|
{
|
|
"epoch": 80.86021505376344,
|
|
"grad_norm": 2.516218900680542,
|
|
"learning_rate": 0.00013546897546897548,
|
|
"loss": 0.0186,
|
|
"step": 11280
|
|
},
|
|
{
|
|
"epoch": 81.00358422939068,
|
|
"grad_norm": 0.5348778963088989,
|
|
"learning_rate": 0.00013535353535353538,
|
|
"loss": 0.0198,
|
|
"step": 11300
|
|
},
|
|
{
|
|
"epoch": 81.14695340501792,
|
|
"grad_norm": 1.0473957061767578,
|
|
"learning_rate": 0.00013523809523809525,
|
|
"loss": 0.0152,
|
|
"step": 11320
|
|
},
|
|
{
|
|
"epoch": 81.29032258064517,
|
|
"grad_norm": 0.797150194644928,
|
|
"learning_rate": 0.00013512265512265515,
|
|
"loss": 0.0167,
|
|
"step": 11340
|
|
},
|
|
{
|
|
"epoch": 81.4336917562724,
|
|
"grad_norm": 1.723746418952942,
|
|
"learning_rate": 0.000135007215007215,
|
|
"loss": 0.0193,
|
|
"step": 11360
|
|
},
|
|
{
|
|
"epoch": 81.57706093189964,
|
|
"grad_norm": 2.0251080989837646,
|
|
"learning_rate": 0.0001348917748917749,
|
|
"loss": 0.019,
|
|
"step": 11380
|
|
},
|
|
{
|
|
"epoch": 81.72043010752688,
|
|
"grad_norm": 1.2227544784545898,
|
|
"learning_rate": 0.00013477633477633477,
|
|
"loss": 0.0204,
|
|
"step": 11400
|
|
},
|
|
{
|
|
"epoch": 81.86379928315412,
|
|
"grad_norm": 0.6503992080688477,
|
|
"learning_rate": 0.00013466089466089467,
|
|
"loss": 0.0205,
|
|
"step": 11420
|
|
},
|
|
{
|
|
"epoch": 82.00716845878136,
|
|
"grad_norm": 0.4469417333602905,
|
|
"learning_rate": 0.00013454545454545455,
|
|
"loss": 0.021,
|
|
"step": 11440
|
|
},
|
|
{
|
|
"epoch": 82.15053763440861,
|
|
"grad_norm": 4.052209854125977,
|
|
"learning_rate": 0.00013443001443001445,
|
|
"loss": 0.0132,
|
|
"step": 11460
|
|
},
|
|
{
|
|
"epoch": 82.29390681003584,
|
|
"grad_norm": 2.3262343406677246,
|
|
"learning_rate": 0.00013431457431457432,
|
|
"loss": 0.0151,
|
|
"step": 11480
|
|
},
|
|
{
|
|
"epoch": 82.43727598566308,
|
|
"grad_norm": 1.2867971658706665,
|
|
"learning_rate": 0.00013419913419913422,
|
|
"loss": 0.0187,
|
|
"step": 11500
|
|
},
|
|
{
|
|
"epoch": 82.58064516129032,
|
|
"grad_norm": 1.421526312828064,
|
|
"learning_rate": 0.0001340836940836941,
|
|
"loss": 0.0197,
|
|
"step": 11520
|
|
},
|
|
{
|
|
"epoch": 82.72401433691756,
|
|
"grad_norm": 1.2142455577850342,
|
|
"learning_rate": 0.00013396825396825397,
|
|
"loss": 0.0144,
|
|
"step": 11540
|
|
},
|
|
{
|
|
"epoch": 82.8673835125448,
|
|
"grad_norm": 0.8395231366157532,
|
|
"learning_rate": 0.00013385281385281384,
|
|
"loss": 0.0207,
|
|
"step": 11560
|
|
},
|
|
{
|
|
"epoch": 83.01075268817205,
|
|
"grad_norm": 1.0998722314834595,
|
|
"learning_rate": 0.00013373737373737374,
|
|
"loss": 0.0209,
|
|
"step": 11580
|
|
},
|
|
{
|
|
"epoch": 83.15412186379929,
|
|
"grad_norm": 0.8316681385040283,
|
|
"learning_rate": 0.00013362193362193362,
|
|
"loss": 0.0145,
|
|
"step": 11600
|
|
},
|
|
{
|
|
"epoch": 83.29749103942652,
|
|
"grad_norm": 0.5516606569290161,
|
|
"learning_rate": 0.00013350649350649352,
|
|
"loss": 0.0131,
|
|
"step": 11620
|
|
},
|
|
{
|
|
"epoch": 83.44086021505376,
|
|
"grad_norm": 0.5891528129577637,
|
|
"learning_rate": 0.0001333910533910534,
|
|
"loss": 0.0172,
|
|
"step": 11640
|
|
},
|
|
{
|
|
"epoch": 83.584229390681,
|
|
"grad_norm": 1.044071912765503,
|
|
"learning_rate": 0.0001332756132756133,
|
|
"loss": 0.0156,
|
|
"step": 11660
|
|
},
|
|
{
|
|
"epoch": 83.72759856630825,
|
|
"grad_norm": 1.1857376098632812,
|
|
"learning_rate": 0.00013316017316017316,
|
|
"loss": 0.0161,
|
|
"step": 11680
|
|
},
|
|
{
|
|
"epoch": 83.87096774193549,
|
|
"grad_norm": 0.78326416015625,
|
|
"learning_rate": 0.00013304473304473306,
|
|
"loss": 0.021,
|
|
"step": 11700
|
|
},
|
|
{
|
|
"epoch": 84.01433691756273,
|
|
"grad_norm": 1.841831088066101,
|
|
"learning_rate": 0.00013292929292929294,
|
|
"loss": 0.0207,
|
|
"step": 11720
|
|
},
|
|
{
|
|
"epoch": 84.15770609318996,
|
|
"grad_norm": 0.4260961711406708,
|
|
"learning_rate": 0.0001328138528138528,
|
|
"loss": 0.0146,
|
|
"step": 11740
|
|
},
|
|
{
|
|
"epoch": 84.3010752688172,
|
|
"grad_norm": 0.2751542925834656,
|
|
"learning_rate": 0.0001326984126984127,
|
|
"loss": 0.0155,
|
|
"step": 11760
|
|
},
|
|
{
|
|
"epoch": 84.44444444444444,
|
|
"grad_norm": 0.8890261650085449,
|
|
"learning_rate": 0.00013258297258297259,
|
|
"loss": 0.0117,
|
|
"step": 11780
|
|
},
|
|
{
|
|
"epoch": 84.58781362007169,
|
|
"grad_norm": 0.4691111147403717,
|
|
"learning_rate": 0.00013246753246753249,
|
|
"loss": 0.0156,
|
|
"step": 11800
|
|
},
|
|
{
|
|
"epoch": 84.73118279569893,
|
|
"grad_norm": 1.0129485130310059,
|
|
"learning_rate": 0.00013235209235209236,
|
|
"loss": 0.0159,
|
|
"step": 11820
|
|
},
|
|
{
|
|
"epoch": 84.87455197132617,
|
|
"grad_norm": 1.2997750043869019,
|
|
"learning_rate": 0.00013223665223665226,
|
|
"loss": 0.0179,
|
|
"step": 11840
|
|
},
|
|
{
|
|
"epoch": 85.0179211469534,
|
|
"grad_norm": 1.8712488412857056,
|
|
"learning_rate": 0.00013212121212121213,
|
|
"loss": 0.0175,
|
|
"step": 11860
|
|
},
|
|
{
|
|
"epoch": 85.16129032258064,
|
|
"grad_norm": 1.115181803703308,
|
|
"learning_rate": 0.00013200577200577203,
|
|
"loss": 0.0131,
|
|
"step": 11880
|
|
},
|
|
{
|
|
"epoch": 85.30465949820788,
|
|
"grad_norm": 0.5553827285766602,
|
|
"learning_rate": 0.00013189033189033188,
|
|
"loss": 0.014,
|
|
"step": 11900
|
|
},
|
|
{
|
|
"epoch": 85.44802867383513,
|
|
"grad_norm": 4.3839921951293945,
|
|
"learning_rate": 0.00013177489177489178,
|
|
"loss": 0.0154,
|
|
"step": 11920
|
|
},
|
|
{
|
|
"epoch": 85.59139784946237,
|
|
"grad_norm": 0.6795554161071777,
|
|
"learning_rate": 0.00013165945165945165,
|
|
"loss": 0.0135,
|
|
"step": 11940
|
|
},
|
|
{
|
|
"epoch": 85.73476702508961,
|
|
"grad_norm": 1.588478446006775,
|
|
"learning_rate": 0.00013154401154401156,
|
|
"loss": 0.0193,
|
|
"step": 11960
|
|
},
|
|
{
|
|
"epoch": 85.87813620071685,
|
|
"grad_norm": 0.7655717134475708,
|
|
"learning_rate": 0.00013142857142857143,
|
|
"loss": 0.018,
|
|
"step": 11980
|
|
},
|
|
{
|
|
"epoch": 86.02150537634408,
|
|
"grad_norm": 0.6319197416305542,
|
|
"learning_rate": 0.00013131313131313133,
|
|
"loss": 0.0179,
|
|
"step": 12000
|
|
},
|
|
{
|
|
"epoch": 86.16487455197132,
|
|
"grad_norm": 2.254366397857666,
|
|
"learning_rate": 0.0001311976911976912,
|
|
"loss": 0.0125,
|
|
"step": 12020
|
|
},
|
|
{
|
|
"epoch": 86.30824372759857,
|
|
"grad_norm": 1.710524082183838,
|
|
"learning_rate": 0.0001310822510822511,
|
|
"loss": 0.0157,
|
|
"step": 12040
|
|
},
|
|
{
|
|
"epoch": 86.45161290322581,
|
|
"grad_norm": 0.9524160027503967,
|
|
"learning_rate": 0.00013096681096681098,
|
|
"loss": 0.0167,
|
|
"step": 12060
|
|
},
|
|
{
|
|
"epoch": 86.59498207885305,
|
|
"grad_norm": 1.7875772714614868,
|
|
"learning_rate": 0.00013085137085137085,
|
|
"loss": 0.0168,
|
|
"step": 12080
|
|
},
|
|
{
|
|
"epoch": 86.73835125448029,
|
|
"grad_norm": 0.5251893997192383,
|
|
"learning_rate": 0.00013073593073593072,
|
|
"loss": 0.0173,
|
|
"step": 12100
|
|
},
|
|
{
|
|
"epoch": 86.88172043010752,
|
|
"grad_norm": 1.2652454376220703,
|
|
"learning_rate": 0.00013062049062049062,
|
|
"loss": 0.0196,
|
|
"step": 12120
|
|
},
|
|
{
|
|
"epoch": 87.02508960573476,
|
|
"grad_norm": 0.43472713232040405,
|
|
"learning_rate": 0.0001305050505050505,
|
|
"loss": 0.0178,
|
|
"step": 12140
|
|
},
|
|
{
|
|
"epoch": 87.168458781362,
|
|
"grad_norm": 0.7071630358695984,
|
|
"learning_rate": 0.0001303896103896104,
|
|
"loss": 0.011,
|
|
"step": 12160
|
|
},
|
|
{
|
|
"epoch": 87.31182795698925,
|
|
"grad_norm": 2.188324451446533,
|
|
"learning_rate": 0.00013027417027417027,
|
|
"loss": 0.0147,
|
|
"step": 12180
|
|
},
|
|
{
|
|
"epoch": 87.45519713261649,
|
|
"grad_norm": 0.4888250231742859,
|
|
"learning_rate": 0.00013015873015873017,
|
|
"loss": 0.015,
|
|
"step": 12200
|
|
},
|
|
{
|
|
"epoch": 87.59856630824373,
|
|
"grad_norm": 2.0377535820007324,
|
|
"learning_rate": 0.00013004329004329005,
|
|
"loss": 0.0195,
|
|
"step": 12220
|
|
},
|
|
{
|
|
"epoch": 87.74193548387096,
|
|
"grad_norm": 0.30090615153312683,
|
|
"learning_rate": 0.00012992784992784995,
|
|
"loss": 0.0166,
|
|
"step": 12240
|
|
},
|
|
{
|
|
"epoch": 87.8853046594982,
|
|
"grad_norm": 1.111887812614441,
|
|
"learning_rate": 0.00012981240981240982,
|
|
"loss": 0.0192,
|
|
"step": 12260
|
|
},
|
|
{
|
|
"epoch": 88.02867383512545,
|
|
"grad_norm": 1.8173977136611938,
|
|
"learning_rate": 0.0001296969696969697,
|
|
"loss": 0.0187,
|
|
"step": 12280
|
|
},
|
|
{
|
|
"epoch": 88.17204301075269,
|
|
"grad_norm": 1.127140760421753,
|
|
"learning_rate": 0.0001295815295815296,
|
|
"loss": 0.015,
|
|
"step": 12300
|
|
},
|
|
{
|
|
"epoch": 88.31541218637993,
|
|
"grad_norm": 0.8404380083084106,
|
|
"learning_rate": 0.00012946608946608947,
|
|
"loss": 0.0148,
|
|
"step": 12320
|
|
},
|
|
{
|
|
"epoch": 88.45878136200717,
|
|
"grad_norm": 3.1833529472351074,
|
|
"learning_rate": 0.00012935064935064937,
|
|
"loss": 0.0186,
|
|
"step": 12340
|
|
},
|
|
{
|
|
"epoch": 88.60215053763442,
|
|
"grad_norm": 0.8558987379074097,
|
|
"learning_rate": 0.00012923520923520924,
|
|
"loss": 0.0141,
|
|
"step": 12360
|
|
},
|
|
{
|
|
"epoch": 88.74551971326164,
|
|
"grad_norm": 0.6127412915229797,
|
|
"learning_rate": 0.00012911976911976914,
|
|
"loss": 0.0176,
|
|
"step": 12380
|
|
},
|
|
{
|
|
"epoch": 88.88888888888889,
|
|
"grad_norm": 1.3192341327667236,
|
|
"learning_rate": 0.00012900432900432902,
|
|
"loss": 0.0184,
|
|
"step": 12400
|
|
},
|
|
{
|
|
"epoch": 89.03225806451613,
|
|
"grad_norm": 1.5425376892089844,
|
|
"learning_rate": 0.00012888888888888892,
|
|
"loss": 0.0153,
|
|
"step": 12420
|
|
},
|
|
{
|
|
"epoch": 89.17562724014337,
|
|
"grad_norm": 0.9999290108680725,
|
|
"learning_rate": 0.0001287734487734488,
|
|
"loss": 0.0128,
|
|
"step": 12440
|
|
},
|
|
{
|
|
"epoch": 89.31899641577061,
|
|
"grad_norm": 2.734896421432495,
|
|
"learning_rate": 0.00012865800865800866,
|
|
"loss": 0.0147,
|
|
"step": 12460
|
|
},
|
|
{
|
|
"epoch": 89.46236559139786,
|
|
"grad_norm": 0.6258257031440735,
|
|
"learning_rate": 0.00012854256854256854,
|
|
"loss": 0.0159,
|
|
"step": 12480
|
|
},
|
|
{
|
|
"epoch": 89.60573476702508,
|
|
"grad_norm": 1.4892915487289429,
|
|
"learning_rate": 0.00012842712842712844,
|
|
"loss": 0.0162,
|
|
"step": 12500
|
|
},
|
|
{
|
|
"epoch": 89.74910394265233,
|
|
"grad_norm": 1.593381404876709,
|
|
"learning_rate": 0.0001283116883116883,
|
|
"loss": 0.0187,
|
|
"step": 12520
|
|
},
|
|
{
|
|
"epoch": 89.89247311827957,
|
|
"grad_norm": 1.5302436351776123,
|
|
"learning_rate": 0.0001281962481962482,
|
|
"loss": 0.0175,
|
|
"step": 12540
|
|
},
|
|
{
|
|
"epoch": 90.03584229390681,
|
|
"grad_norm": 0.5006189942359924,
|
|
"learning_rate": 0.00012808080808080809,
|
|
"loss": 0.0168,
|
|
"step": 12560
|
|
},
|
|
{
|
|
"epoch": 90.17921146953405,
|
|
"grad_norm": 0.46206581592559814,
|
|
"learning_rate": 0.00012796536796536799,
|
|
"loss": 0.0161,
|
|
"step": 12580
|
|
},
|
|
{
|
|
"epoch": 90.3225806451613,
|
|
"grad_norm": 0.9318781495094299,
|
|
"learning_rate": 0.00012784992784992786,
|
|
"loss": 0.0138,
|
|
"step": 12600
|
|
},
|
|
{
|
|
"epoch": 90.46594982078852,
|
|
"grad_norm": 3.1766090393066406,
|
|
"learning_rate": 0.00012773448773448776,
|
|
"loss": 0.0181,
|
|
"step": 12620
|
|
},
|
|
{
|
|
"epoch": 90.60931899641577,
|
|
"grad_norm": 0.80521559715271,
|
|
"learning_rate": 0.0001276190476190476,
|
|
"loss": 0.0162,
|
|
"step": 12640
|
|
},
|
|
{
|
|
"epoch": 90.75268817204301,
|
|
"grad_norm": 0.8221294283866882,
|
|
"learning_rate": 0.0001275036075036075,
|
|
"loss": 0.017,
|
|
"step": 12660
|
|
},
|
|
{
|
|
"epoch": 90.89605734767025,
|
|
"grad_norm": 0.4975380599498749,
|
|
"learning_rate": 0.00012738816738816738,
|
|
"loss": 0.02,
|
|
"step": 12680
|
|
},
|
|
{
|
|
"epoch": 91.0394265232975,
|
|
"grad_norm": 1.241468071937561,
|
|
"learning_rate": 0.00012727272727272728,
|
|
"loss": 0.0187,
|
|
"step": 12700
|
|
},
|
|
{
|
|
"epoch": 91.18279569892474,
|
|
"grad_norm": 2.9104273319244385,
|
|
"learning_rate": 0.00012715728715728715,
|
|
"loss": 0.0148,
|
|
"step": 12720
|
|
},
|
|
{
|
|
"epoch": 91.32616487455198,
|
|
"grad_norm": 0.4875599443912506,
|
|
"learning_rate": 0.00012704184704184706,
|
|
"loss": 0.0162,
|
|
"step": 12740
|
|
},
|
|
{
|
|
"epoch": 91.4695340501792,
|
|
"grad_norm": 0.7503949403762817,
|
|
"learning_rate": 0.00012692640692640693,
|
|
"loss": 0.0172,
|
|
"step": 12760
|
|
},
|
|
{
|
|
"epoch": 91.61290322580645,
|
|
"grad_norm": 1.3120399713516235,
|
|
"learning_rate": 0.00012681096681096683,
|
|
"loss": 0.0159,
|
|
"step": 12780
|
|
},
|
|
{
|
|
"epoch": 91.75627240143369,
|
|
"grad_norm": 1.3051767349243164,
|
|
"learning_rate": 0.0001266955266955267,
|
|
"loss": 0.017,
|
|
"step": 12800
|
|
},
|
|
{
|
|
"epoch": 91.89964157706093,
|
|
"grad_norm": 1.0927627086639404,
|
|
"learning_rate": 0.00012658008658008658,
|
|
"loss": 0.0189,
|
|
"step": 12820
|
|
},
|
|
{
|
|
"epoch": 92.04301075268818,
|
|
"grad_norm": 0.5616310834884644,
|
|
"learning_rate": 0.00012646464646464645,
|
|
"loss": 0.016,
|
|
"step": 12840
|
|
},
|
|
{
|
|
"epoch": 92.18637992831542,
|
|
"grad_norm": 1.0195703506469727,
|
|
"learning_rate": 0.00012634920634920635,
|
|
"loss": 0.0129,
|
|
"step": 12860
|
|
},
|
|
{
|
|
"epoch": 92.32974910394265,
|
|
"grad_norm": 0.35718274116516113,
|
|
"learning_rate": 0.00012623376623376622,
|
|
"loss": 0.0118,
|
|
"step": 12880
|
|
},
|
|
{
|
|
"epoch": 92.47311827956989,
|
|
"grad_norm": 0.5305570363998413,
|
|
"learning_rate": 0.00012611832611832612,
|
|
"loss": 0.0145,
|
|
"step": 12900
|
|
},
|
|
{
|
|
"epoch": 92.61648745519713,
|
|
"grad_norm": 1.3988255262374878,
|
|
"learning_rate": 0.000126002886002886,
|
|
"loss": 0.0176,
|
|
"step": 12920
|
|
},
|
|
{
|
|
"epoch": 92.75985663082437,
|
|
"grad_norm": 1.0325313806533813,
|
|
"learning_rate": 0.0001258874458874459,
|
|
"loss": 0.019,
|
|
"step": 12940
|
|
},
|
|
{
|
|
"epoch": 92.90322580645162,
|
|
"grad_norm": 0.871889054775238,
|
|
"learning_rate": 0.00012577200577200577,
|
|
"loss": 0.0169,
|
|
"step": 12960
|
|
},
|
|
{
|
|
"epoch": 93.04659498207886,
|
|
"grad_norm": 0.7299668192863464,
|
|
"learning_rate": 0.00012565656565656567,
|
|
"loss": 0.0173,
|
|
"step": 12980
|
|
},
|
|
{
|
|
"epoch": 93.18996415770609,
|
|
"grad_norm": 0.7993871569633484,
|
|
"learning_rate": 0.00012554112554112555,
|
|
"loss": 0.0143,
|
|
"step": 13000
|
|
},
|
|
{
|
|
"epoch": 93.33333333333333,
|
|
"grad_norm": 0.5431854128837585,
|
|
"learning_rate": 0.00012542568542568542,
|
|
"loss": 0.0119,
|
|
"step": 13020
|
|
},
|
|
{
|
|
"epoch": 93.47670250896057,
|
|
"grad_norm": 0.27925625443458557,
|
|
"learning_rate": 0.00012531024531024532,
|
|
"loss": 0.0152,
|
|
"step": 13040
|
|
},
|
|
{
|
|
"epoch": 93.62007168458781,
|
|
"grad_norm": 0.7711465358734131,
|
|
"learning_rate": 0.0001251948051948052,
|
|
"loss": 0.0148,
|
|
"step": 13060
|
|
},
|
|
{
|
|
"epoch": 93.76344086021506,
|
|
"grad_norm": 0.9410423040390015,
|
|
"learning_rate": 0.0001250793650793651,
|
|
"loss": 0.0177,
|
|
"step": 13080
|
|
},
|
|
{
|
|
"epoch": 93.9068100358423,
|
|
"grad_norm": 0.6102956533432007,
|
|
"learning_rate": 0.00012496392496392497,
|
|
"loss": 0.0159,
|
|
"step": 13100
|
|
},
|
|
{
|
|
"epoch": 94.05017921146954,
|
|
"grad_norm": 0.8722354173660278,
|
|
"learning_rate": 0.00012484848484848487,
|
|
"loss": 0.0179,
|
|
"step": 13120
|
|
},
|
|
{
|
|
"epoch": 94.19354838709677,
|
|
"grad_norm": 1.3206337690353394,
|
|
"learning_rate": 0.00012473304473304474,
|
|
"loss": 0.0124,
|
|
"step": 13140
|
|
},
|
|
{
|
|
"epoch": 94.33691756272401,
|
|
"grad_norm": 0.6697111129760742,
|
|
"learning_rate": 0.00012461760461760464,
|
|
"loss": 0.0158,
|
|
"step": 13160
|
|
},
|
|
{
|
|
"epoch": 94.48028673835125,
|
|
"grad_norm": 0.6329695582389832,
|
|
"learning_rate": 0.0001245021645021645,
|
|
"loss": 0.0137,
|
|
"step": 13180
|
|
},
|
|
{
|
|
"epoch": 94.6236559139785,
|
|
"grad_norm": 0.5048131346702576,
|
|
"learning_rate": 0.0001243867243867244,
|
|
"loss": 0.0147,
|
|
"step": 13200
|
|
},
|
|
{
|
|
"epoch": 94.76702508960574,
|
|
"grad_norm": 1.4308075904846191,
|
|
"learning_rate": 0.00012427128427128426,
|
|
"loss": 0.019,
|
|
"step": 13220
|
|
},
|
|
{
|
|
"epoch": 94.91039426523298,
|
|
"grad_norm": 1.982608437538147,
|
|
"learning_rate": 0.00012415584415584416,
|
|
"loss": 0.0166,
|
|
"step": 13240
|
|
},
|
|
{
|
|
"epoch": 95.05376344086021,
|
|
"grad_norm": 0.6681750416755676,
|
|
"learning_rate": 0.00012404040404040404,
|
|
"loss": 0.0157,
|
|
"step": 13260
|
|
},
|
|
{
|
|
"epoch": 95.19713261648745,
|
|
"grad_norm": 1.096899390220642,
|
|
"learning_rate": 0.00012392496392496394,
|
|
"loss": 0.0125,
|
|
"step": 13280
|
|
},
|
|
{
|
|
"epoch": 95.3405017921147,
|
|
"grad_norm": 0.9610181450843811,
|
|
"learning_rate": 0.0001238095238095238,
|
|
"loss": 0.0124,
|
|
"step": 13300
|
|
},
|
|
{
|
|
"epoch": 95.48387096774194,
|
|
"grad_norm": 0.4483550488948822,
|
|
"learning_rate": 0.0001236940836940837,
|
|
"loss": 0.0153,
|
|
"step": 13320
|
|
},
|
|
{
|
|
"epoch": 95.62724014336918,
|
|
"grad_norm": 1.6430498361587524,
|
|
"learning_rate": 0.00012357864357864359,
|
|
"loss": 0.0159,
|
|
"step": 13340
|
|
},
|
|
{
|
|
"epoch": 95.77060931899642,
|
|
"grad_norm": 0.6202759742736816,
|
|
"learning_rate": 0.00012346320346320346,
|
|
"loss": 0.0158,
|
|
"step": 13360
|
|
},
|
|
{
|
|
"epoch": 95.91397849462365,
|
|
"grad_norm": 0.6338889002799988,
|
|
"learning_rate": 0.00012334776334776333,
|
|
"loss": 0.0141,
|
|
"step": 13380
|
|
},
|
|
{
|
|
"epoch": 96.05734767025089,
|
|
"grad_norm": 0.5451236963272095,
|
|
"learning_rate": 0.00012323232323232323,
|
|
"loss": 0.0143,
|
|
"step": 13400
|
|
},
|
|
{
|
|
"epoch": 96.20071684587813,
|
|
"grad_norm": 0.7059997916221619,
|
|
"learning_rate": 0.0001231168831168831,
|
|
"loss": 0.0117,
|
|
"step": 13420
|
|
},
|
|
{
|
|
"epoch": 96.34408602150538,
|
|
"grad_norm": 2.3459508419036865,
|
|
"learning_rate": 0.000123001443001443,
|
|
"loss": 0.0156,
|
|
"step": 13440
|
|
},
|
|
{
|
|
"epoch": 96.48745519713262,
|
|
"grad_norm": 1.0656720399856567,
|
|
"learning_rate": 0.00012288600288600288,
|
|
"loss": 0.0154,
|
|
"step": 13460
|
|
},
|
|
{
|
|
"epoch": 96.63082437275986,
|
|
"grad_norm": 3.8411576747894287,
|
|
"learning_rate": 0.00012277056277056278,
|
|
"loss": 0.0182,
|
|
"step": 13480
|
|
},
|
|
{
|
|
"epoch": 96.7741935483871,
|
|
"grad_norm": 3.2692503929138184,
|
|
"learning_rate": 0.00012265512265512266,
|
|
"loss": 0.0152,
|
|
"step": 13500
|
|
},
|
|
{
|
|
"epoch": 96.91756272401433,
|
|
"grad_norm": 1.409364938735962,
|
|
"learning_rate": 0.00012253968253968256,
|
|
"loss": 0.02,
|
|
"step": 13520
|
|
},
|
|
{
|
|
"epoch": 97.06093189964157,
|
|
"grad_norm": 0.6051990389823914,
|
|
"learning_rate": 0.00012242424242424243,
|
|
"loss": 0.013,
|
|
"step": 13540
|
|
},
|
|
{
|
|
"epoch": 97.20430107526882,
|
|
"grad_norm": 0.6656172275543213,
|
|
"learning_rate": 0.0001223088023088023,
|
|
"loss": 0.0107,
|
|
"step": 13560
|
|
},
|
|
{
|
|
"epoch": 97.34767025089606,
|
|
"grad_norm": 0.4472416639328003,
|
|
"learning_rate": 0.0001221933621933622,
|
|
"loss": 0.0113,
|
|
"step": 13580
|
|
},
|
|
{
|
|
"epoch": 97.4910394265233,
|
|
"grad_norm": 1.116843581199646,
|
|
"learning_rate": 0.00012207792207792208,
|
|
"loss": 0.0134,
|
|
"step": 13600
|
|
},
|
|
{
|
|
"epoch": 97.63440860215054,
|
|
"grad_norm": 0.8457857370376587,
|
|
"learning_rate": 0.00012196248196248196,
|
|
"loss": 0.017,
|
|
"step": 13620
|
|
},
|
|
{
|
|
"epoch": 97.77777777777777,
|
|
"grad_norm": 1.092576026916504,
|
|
"learning_rate": 0.00012184704184704185,
|
|
"loss": 0.0156,
|
|
"step": 13640
|
|
},
|
|
{
|
|
"epoch": 97.92114695340501,
|
|
"grad_norm": 0.45171117782592773,
|
|
"learning_rate": 0.00012173160173160174,
|
|
"loss": 0.0139,
|
|
"step": 13660
|
|
},
|
|
{
|
|
"epoch": 98.06451612903226,
|
|
"grad_norm": 1.2349461317062378,
|
|
"learning_rate": 0.00012161616161616162,
|
|
"loss": 0.0144,
|
|
"step": 13680
|
|
},
|
|
{
|
|
"epoch": 98.2078853046595,
|
|
"grad_norm": 1.1139470338821411,
|
|
"learning_rate": 0.00012150072150072151,
|
|
"loss": 0.0113,
|
|
"step": 13700
|
|
},
|
|
{
|
|
"epoch": 98.35125448028674,
|
|
"grad_norm": 0.3060425817966461,
|
|
"learning_rate": 0.0001213852813852814,
|
|
"loss": 0.0148,
|
|
"step": 13720
|
|
},
|
|
{
|
|
"epoch": 98.49462365591398,
|
|
"grad_norm": 0.7443995475769043,
|
|
"learning_rate": 0.00012126984126984127,
|
|
"loss": 0.0149,
|
|
"step": 13740
|
|
},
|
|
{
|
|
"epoch": 98.63799283154123,
|
|
"grad_norm": 0.3905450403690338,
|
|
"learning_rate": 0.00012115440115440116,
|
|
"loss": 0.0147,
|
|
"step": 13760
|
|
},
|
|
{
|
|
"epoch": 98.78136200716845,
|
|
"grad_norm": 0.6178728342056274,
|
|
"learning_rate": 0.00012103896103896105,
|
|
"loss": 0.0165,
|
|
"step": 13780
|
|
},
|
|
{
|
|
"epoch": 98.9247311827957,
|
|
"grad_norm": 0.6815348267555237,
|
|
"learning_rate": 0.00012092352092352093,
|
|
"loss": 0.0115,
|
|
"step": 13800
|
|
},
|
|
{
|
|
"epoch": 99.06810035842294,
|
|
"grad_norm": 0.34794047474861145,
|
|
"learning_rate": 0.00012080808080808082,
|
|
"loss": 0.0151,
|
|
"step": 13820
|
|
},
|
|
{
|
|
"epoch": 99.21146953405018,
|
|
"grad_norm": 1.1684291362762451,
|
|
"learning_rate": 0.00012069264069264071,
|
|
"loss": 0.013,
|
|
"step": 13840
|
|
},
|
|
{
|
|
"epoch": 99.35483870967742,
|
|
"grad_norm": 0.4920693635940552,
|
|
"learning_rate": 0.0001205772005772006,
|
|
"loss": 0.0125,
|
|
"step": 13860
|
|
},
|
|
{
|
|
"epoch": 99.49820788530467,
|
|
"grad_norm": 0.8987365961074829,
|
|
"learning_rate": 0.00012046176046176048,
|
|
"loss": 0.0142,
|
|
"step": 13880
|
|
},
|
|
{
|
|
"epoch": 99.6415770609319,
|
|
"grad_norm": 0.49470409750938416,
|
|
"learning_rate": 0.00012034632034632037,
|
|
"loss": 0.0148,
|
|
"step": 13900
|
|
},
|
|
{
|
|
"epoch": 99.78494623655914,
|
|
"grad_norm": 0.33893218636512756,
|
|
"learning_rate": 0.00012023088023088023,
|
|
"loss": 0.0162,
|
|
"step": 13920
|
|
},
|
|
{
|
|
"epoch": 99.92831541218638,
|
|
"grad_norm": 1.1363176107406616,
|
|
"learning_rate": 0.00012011544011544012,
|
|
"loss": 0.0135,
|
|
"step": 13940
|
|
},
|
|
{
|
|
"epoch": 100.07168458781362,
|
|
"grad_norm": 0.8836669325828552,
|
|
"learning_rate": 0.00012,
|
|
"loss": 0.0124,
|
|
"step": 13960
|
|
},
|
|
{
|
|
"epoch": 100.21505376344086,
|
|
"grad_norm": 0.31579235196113586,
|
|
"learning_rate": 0.00011988455988455989,
|
|
"loss": 0.0112,
|
|
"step": 13980
|
|
},
|
|
{
|
|
"epoch": 100.3584229390681,
|
|
"grad_norm": 0.5097656846046448,
|
|
"learning_rate": 0.00011976911976911978,
|
|
"loss": 0.0126,
|
|
"step": 14000
|
|
},
|
|
{
|
|
"epoch": 100.50179211469533,
|
|
"grad_norm": 1.554952621459961,
|
|
"learning_rate": 0.00011965367965367966,
|
|
"loss": 0.0113,
|
|
"step": 14020
|
|
},
|
|
{
|
|
"epoch": 100.64516129032258,
|
|
"grad_norm": 0.4777323603630066,
|
|
"learning_rate": 0.00011953823953823955,
|
|
"loss": 0.013,
|
|
"step": 14040
|
|
},
|
|
{
|
|
"epoch": 100.78853046594982,
|
|
"grad_norm": 0.7135176062583923,
|
|
"learning_rate": 0.00011942279942279944,
|
|
"loss": 0.016,
|
|
"step": 14060
|
|
},
|
|
{
|
|
"epoch": 100.93189964157706,
|
|
"grad_norm": 1.4053717851638794,
|
|
"learning_rate": 0.00011930735930735933,
|
|
"loss": 0.0175,
|
|
"step": 14080
|
|
},
|
|
{
|
|
"epoch": 101.0752688172043,
|
|
"grad_norm": 0.2957761287689209,
|
|
"learning_rate": 0.00011919191919191919,
|
|
"loss": 0.0103,
|
|
"step": 14100
|
|
},
|
|
{
|
|
"epoch": 101.21863799283155,
|
|
"grad_norm": 0.4688275456428528,
|
|
"learning_rate": 0.00011907647907647907,
|
|
"loss": 0.0108,
|
|
"step": 14120
|
|
},
|
|
{
|
|
"epoch": 101.36200716845877,
|
|
"grad_norm": 0.5214554667472839,
|
|
"learning_rate": 0.00011896103896103896,
|
|
"loss": 0.0117,
|
|
"step": 14140
|
|
},
|
|
{
|
|
"epoch": 101.50537634408602,
|
|
"grad_norm": 1.1007990837097168,
|
|
"learning_rate": 0.00011884559884559885,
|
|
"loss": 0.0131,
|
|
"step": 14160
|
|
},
|
|
{
|
|
"epoch": 101.64874551971326,
|
|
"grad_norm": 0.45049968361854553,
|
|
"learning_rate": 0.00011873015873015873,
|
|
"loss": 0.014,
|
|
"step": 14180
|
|
},
|
|
{
|
|
"epoch": 101.7921146953405,
|
|
"grad_norm": 0.5373572707176208,
|
|
"learning_rate": 0.00011861471861471862,
|
|
"loss": 0.0151,
|
|
"step": 14200
|
|
},
|
|
{
|
|
"epoch": 101.93548387096774,
|
|
"grad_norm": 0.9874764680862427,
|
|
"learning_rate": 0.00011849927849927851,
|
|
"loss": 0.0173,
|
|
"step": 14220
|
|
},
|
|
{
|
|
"epoch": 102.07885304659499,
|
|
"grad_norm": 0.35346361994743347,
|
|
"learning_rate": 0.0001183838383838384,
|
|
"loss": 0.0132,
|
|
"step": 14240
|
|
},
|
|
{
|
|
"epoch": 102.22222222222223,
|
|
"grad_norm": 0.9317113757133484,
|
|
"learning_rate": 0.00011826839826839828,
|
|
"loss": 0.0091,
|
|
"step": 14260
|
|
},
|
|
{
|
|
"epoch": 102.36559139784946,
|
|
"grad_norm": 0.4453873038291931,
|
|
"learning_rate": 0.00011815295815295816,
|
|
"loss": 0.0151,
|
|
"step": 14280
|
|
},
|
|
{
|
|
"epoch": 102.5089605734767,
|
|
"grad_norm": 0.3758510649204254,
|
|
"learning_rate": 0.00011803751803751804,
|
|
"loss": 0.0115,
|
|
"step": 14300
|
|
},
|
|
{
|
|
"epoch": 102.65232974910394,
|
|
"grad_norm": 0.5492348074913025,
|
|
"learning_rate": 0.00011792207792207793,
|
|
"loss": 0.0164,
|
|
"step": 14320
|
|
},
|
|
{
|
|
"epoch": 102.79569892473118,
|
|
"grad_norm": 0.48428672552108765,
|
|
"learning_rate": 0.0001178066378066378,
|
|
"loss": 0.0162,
|
|
"step": 14340
|
|
},
|
|
{
|
|
"epoch": 102.93906810035843,
|
|
"grad_norm": 0.8958579897880554,
|
|
"learning_rate": 0.00011769119769119769,
|
|
"loss": 0.0133,
|
|
"step": 14360
|
|
},
|
|
{
|
|
"epoch": 103.08243727598567,
|
|
"grad_norm": 0.705040454864502,
|
|
"learning_rate": 0.00011757575757575758,
|
|
"loss": 0.0126,
|
|
"step": 14380
|
|
},
|
|
{
|
|
"epoch": 103.2258064516129,
|
|
"grad_norm": 0.8866944313049316,
|
|
"learning_rate": 0.00011746031746031746,
|
|
"loss": 0.0116,
|
|
"step": 14400
|
|
},
|
|
{
|
|
"epoch": 103.36917562724014,
|
|
"grad_norm": 0.6136330366134644,
|
|
"learning_rate": 0.00011734487734487735,
|
|
"loss": 0.0135,
|
|
"step": 14420
|
|
},
|
|
{
|
|
"epoch": 103.51254480286738,
|
|
"grad_norm": 1.5731008052825928,
|
|
"learning_rate": 0.00011722943722943724,
|
|
"loss": 0.0136,
|
|
"step": 14440
|
|
},
|
|
{
|
|
"epoch": 103.65591397849462,
|
|
"grad_norm": 1.265551209449768,
|
|
"learning_rate": 0.00011711399711399711,
|
|
"loss": 0.012,
|
|
"step": 14460
|
|
},
|
|
{
|
|
"epoch": 103.79928315412187,
|
|
"grad_norm": 0.4641808271408081,
|
|
"learning_rate": 0.000116998556998557,
|
|
"loss": 0.0142,
|
|
"step": 14480
|
|
},
|
|
{
|
|
"epoch": 103.94265232974911,
|
|
"grad_norm": 1.5429835319519043,
|
|
"learning_rate": 0.00011688311688311689,
|
|
"loss": 0.0138,
|
|
"step": 14500
|
|
},
|
|
{
|
|
"epoch": 104.08602150537635,
|
|
"grad_norm": 6.428526401519775,
|
|
"learning_rate": 0.00011676767676767677,
|
|
"loss": 0.0176,
|
|
"step": 14520
|
|
},
|
|
{
|
|
"epoch": 104.22939068100358,
|
|
"grad_norm": 0.5054616332054138,
|
|
"learning_rate": 0.00011665223665223666,
|
|
"loss": 0.01,
|
|
"step": 14540
|
|
},
|
|
{
|
|
"epoch": 104.37275985663082,
|
|
"grad_norm": 0.7927387952804565,
|
|
"learning_rate": 0.00011653679653679655,
|
|
"loss": 0.0129,
|
|
"step": 14560
|
|
},
|
|
{
|
|
"epoch": 104.51612903225806,
|
|
"grad_norm": 0.763381838798523,
|
|
"learning_rate": 0.00011642135642135643,
|
|
"loss": 0.0116,
|
|
"step": 14580
|
|
},
|
|
{
|
|
"epoch": 104.6594982078853,
|
|
"grad_norm": 0.696645200252533,
|
|
"learning_rate": 0.00011630591630591632,
|
|
"loss": 0.0138,
|
|
"step": 14600
|
|
},
|
|
{
|
|
"epoch": 104.80286738351255,
|
|
"grad_norm": 0.8061676621437073,
|
|
"learning_rate": 0.00011619047619047621,
|
|
"loss": 0.015,
|
|
"step": 14620
|
|
},
|
|
{
|
|
"epoch": 104.94623655913979,
|
|
"grad_norm": 0.5591301918029785,
|
|
"learning_rate": 0.00011607503607503607,
|
|
"loss": 0.0188,
|
|
"step": 14640
|
|
},
|
|
{
|
|
"epoch": 105.08960573476702,
|
|
"grad_norm": 0.8842107057571411,
|
|
"learning_rate": 0.00011595959595959596,
|
|
"loss": 0.0149,
|
|
"step": 14660
|
|
},
|
|
{
|
|
"epoch": 105.23297491039426,
|
|
"grad_norm": 1.3089277744293213,
|
|
"learning_rate": 0.00011584415584415584,
|
|
"loss": 0.0119,
|
|
"step": 14680
|
|
},
|
|
{
|
|
"epoch": 105.3763440860215,
|
|
"grad_norm": 0.8796935677528381,
|
|
"learning_rate": 0.00011572871572871573,
|
|
"loss": 0.0125,
|
|
"step": 14700
|
|
},
|
|
{
|
|
"epoch": 105.51971326164875,
|
|
"grad_norm": 0.3819722831249237,
|
|
"learning_rate": 0.00011561327561327562,
|
|
"loss": 0.0136,
|
|
"step": 14720
|
|
},
|
|
{
|
|
"epoch": 105.66308243727599,
|
|
"grad_norm": 0.7016351222991943,
|
|
"learning_rate": 0.0001154978354978355,
|
|
"loss": 0.0117,
|
|
"step": 14740
|
|
},
|
|
{
|
|
"epoch": 105.80645161290323,
|
|
"grad_norm": 0.5755144953727722,
|
|
"learning_rate": 0.00011538239538239539,
|
|
"loss": 0.0138,
|
|
"step": 14760
|
|
},
|
|
{
|
|
"epoch": 105.94982078853046,
|
|
"grad_norm": 0.44824662804603577,
|
|
"learning_rate": 0.00011526695526695528,
|
|
"loss": 0.0141,
|
|
"step": 14780
|
|
},
|
|
{
|
|
"epoch": 106.0931899641577,
|
|
"grad_norm": 0.17359097301959991,
|
|
"learning_rate": 0.00011515151515151516,
|
|
"loss": 0.012,
|
|
"step": 14800
|
|
},
|
|
{
|
|
"epoch": 106.23655913978494,
|
|
"grad_norm": 0.7815229892730713,
|
|
"learning_rate": 0.00011503607503607502,
|
|
"loss": 0.0098,
|
|
"step": 14820
|
|
},
|
|
{
|
|
"epoch": 106.37992831541219,
|
|
"grad_norm": 0.7781772613525391,
|
|
"learning_rate": 0.00011492063492063491,
|
|
"loss": 0.0117,
|
|
"step": 14840
|
|
},
|
|
{
|
|
"epoch": 106.52329749103943,
|
|
"grad_norm": 0.7882605791091919,
|
|
"learning_rate": 0.0001148051948051948,
|
|
"loss": 0.011,
|
|
"step": 14860
|
|
},
|
|
{
|
|
"epoch": 106.66666666666667,
|
|
"grad_norm": 0.6973186731338501,
|
|
"learning_rate": 0.00011468975468975469,
|
|
"loss": 0.015,
|
|
"step": 14880
|
|
},
|
|
{
|
|
"epoch": 106.81003584229391,
|
|
"grad_norm": 0.43788307905197144,
|
|
"learning_rate": 0.00011457431457431457,
|
|
"loss": 0.0141,
|
|
"step": 14900
|
|
},
|
|
{
|
|
"epoch": 106.95340501792114,
|
|
"grad_norm": 0.6005880236625671,
|
|
"learning_rate": 0.00011445887445887446,
|
|
"loss": 0.0135,
|
|
"step": 14920
|
|
},
|
|
{
|
|
"epoch": 107.09677419354838,
|
|
"grad_norm": 0.5299505591392517,
|
|
"learning_rate": 0.00011434343434343435,
|
|
"loss": 0.0102,
|
|
"step": 14940
|
|
},
|
|
{
|
|
"epoch": 107.24014336917563,
|
|
"grad_norm": 0.3653981685638428,
|
|
"learning_rate": 0.00011422799422799423,
|
|
"loss": 0.0105,
|
|
"step": 14960
|
|
},
|
|
{
|
|
"epoch": 107.38351254480287,
|
|
"grad_norm": 0.7234962582588196,
|
|
"learning_rate": 0.00011411255411255412,
|
|
"loss": 0.0142,
|
|
"step": 14980
|
|
},
|
|
{
|
|
"epoch": 107.52688172043011,
|
|
"grad_norm": 1.8402234315872192,
|
|
"learning_rate": 0.00011399711399711401,
|
|
"loss": 0.0107,
|
|
"step": 15000
|
|
},
|
|
{
|
|
"epoch": 107.67025089605735,
|
|
"grad_norm": 0.3934486508369446,
|
|
"learning_rate": 0.00011388167388167388,
|
|
"loss": 0.0136,
|
|
"step": 15020
|
|
},
|
|
{
|
|
"epoch": 107.81362007168458,
|
|
"grad_norm": 0.3657015562057495,
|
|
"learning_rate": 0.00011376623376623377,
|
|
"loss": 0.0137,
|
|
"step": 15040
|
|
},
|
|
{
|
|
"epoch": 107.95698924731182,
|
|
"grad_norm": 0.5700513124465942,
|
|
"learning_rate": 0.00011365079365079366,
|
|
"loss": 0.0119,
|
|
"step": 15060
|
|
},
|
|
{
|
|
"epoch": 108.10035842293907,
|
|
"grad_norm": 0.28370949625968933,
|
|
"learning_rate": 0.00011353535353535354,
|
|
"loss": 0.0132,
|
|
"step": 15080
|
|
},
|
|
{
|
|
"epoch": 108.24372759856631,
|
|
"grad_norm": 1.1346427202224731,
|
|
"learning_rate": 0.00011341991341991343,
|
|
"loss": 0.0163,
|
|
"step": 15100
|
|
},
|
|
{
|
|
"epoch": 108.38709677419355,
|
|
"grad_norm": 1.7276256084442139,
|
|
"learning_rate": 0.00011330447330447332,
|
|
"loss": 0.0129,
|
|
"step": 15120
|
|
},
|
|
{
|
|
"epoch": 108.5304659498208,
|
|
"grad_norm": 0.44223088026046753,
|
|
"learning_rate": 0.0001131890331890332,
|
|
"loss": 0.0115,
|
|
"step": 15140
|
|
},
|
|
{
|
|
"epoch": 108.67383512544802,
|
|
"grad_norm": 0.6865257024765015,
|
|
"learning_rate": 0.00011307359307359309,
|
|
"loss": 0.0175,
|
|
"step": 15160
|
|
},
|
|
{
|
|
"epoch": 108.81720430107526,
|
|
"grad_norm": 0.4634477198123932,
|
|
"learning_rate": 0.00011295815295815298,
|
|
"loss": 0.0134,
|
|
"step": 15180
|
|
},
|
|
{
|
|
"epoch": 108.9605734767025,
|
|
"grad_norm": 1.737423062324524,
|
|
"learning_rate": 0.00011284271284271284,
|
|
"loss": 0.0137,
|
|
"step": 15200
|
|
},
|
|
{
|
|
"epoch": 109.10394265232975,
|
|
"grad_norm": 0.5774843096733093,
|
|
"learning_rate": 0.00011272727272727272,
|
|
"loss": 0.0113,
|
|
"step": 15220
|
|
},
|
|
{
|
|
"epoch": 109.24731182795699,
|
|
"grad_norm": 0.47207480669021606,
|
|
"learning_rate": 0.00011261183261183261,
|
|
"loss": 0.0093,
|
|
"step": 15240
|
|
},
|
|
{
|
|
"epoch": 109.39068100358423,
|
|
"grad_norm": 0.2144831120967865,
|
|
"learning_rate": 0.0001124963924963925,
|
|
"loss": 0.0122,
|
|
"step": 15260
|
|
},
|
|
{
|
|
"epoch": 109.53405017921148,
|
|
"grad_norm": 0.5294771790504456,
|
|
"learning_rate": 0.00011238095238095239,
|
|
"loss": 0.0127,
|
|
"step": 15280
|
|
},
|
|
{
|
|
"epoch": 109.6774193548387,
|
|
"grad_norm": 1.598105788230896,
|
|
"learning_rate": 0.00011226551226551227,
|
|
"loss": 0.0152,
|
|
"step": 15300
|
|
},
|
|
{
|
|
"epoch": 109.82078853046595,
|
|
"grad_norm": 0.500149667263031,
|
|
"learning_rate": 0.00011215007215007216,
|
|
"loss": 0.0119,
|
|
"step": 15320
|
|
},
|
|
{
|
|
"epoch": 109.96415770609319,
|
|
"grad_norm": 0.9765909314155579,
|
|
"learning_rate": 0.00011203463203463205,
|
|
"loss": 0.0143,
|
|
"step": 15340
|
|
},
|
|
{
|
|
"epoch": 110.10752688172043,
|
|
"grad_norm": 0.4274522066116333,
|
|
"learning_rate": 0.00011191919191919193,
|
|
"loss": 0.011,
|
|
"step": 15360
|
|
},
|
|
{
|
|
"epoch": 110.25089605734767,
|
|
"grad_norm": 0.5223116874694824,
|
|
"learning_rate": 0.0001118037518037518,
|
|
"loss": 0.0156,
|
|
"step": 15380
|
|
},
|
|
{
|
|
"epoch": 110.39426523297492,
|
|
"grad_norm": 0.6910545825958252,
|
|
"learning_rate": 0.00011168831168831168,
|
|
"loss": 0.0125,
|
|
"step": 15400
|
|
},
|
|
{
|
|
"epoch": 110.53763440860214,
|
|
"grad_norm": 0.39949220418930054,
|
|
"learning_rate": 0.00011157287157287157,
|
|
"loss": 0.0118,
|
|
"step": 15420
|
|
},
|
|
{
|
|
"epoch": 110.68100358422939,
|
|
"grad_norm": 0.28121069073677063,
|
|
"learning_rate": 0.00011145743145743146,
|
|
"loss": 0.0117,
|
|
"step": 15440
|
|
},
|
|
{
|
|
"epoch": 110.82437275985663,
|
|
"grad_norm": 0.4827168583869934,
|
|
"learning_rate": 0.00011134199134199134,
|
|
"loss": 0.0111,
|
|
"step": 15460
|
|
},
|
|
{
|
|
"epoch": 110.96774193548387,
|
|
"grad_norm": 1.9229991436004639,
|
|
"learning_rate": 0.00011122655122655123,
|
|
"loss": 0.013,
|
|
"step": 15480
|
|
},
|
|
{
|
|
"epoch": 111.11111111111111,
|
|
"grad_norm": 0.5128869414329529,
|
|
"learning_rate": 0.00011111111111111112,
|
|
"loss": 0.0112,
|
|
"step": 15500
|
|
},
|
|
{
|
|
"epoch": 111.25448028673836,
|
|
"grad_norm": 0.4633185565471649,
|
|
"learning_rate": 0.000110995670995671,
|
|
"loss": 0.0119,
|
|
"step": 15520
|
|
},
|
|
{
|
|
"epoch": 111.39784946236558,
|
|
"grad_norm": 1.0069903135299683,
|
|
"learning_rate": 0.00011088023088023089,
|
|
"loss": 0.0119,
|
|
"step": 15540
|
|
},
|
|
{
|
|
"epoch": 111.54121863799283,
|
|
"grad_norm": 0.7252398133277893,
|
|
"learning_rate": 0.00011076479076479076,
|
|
"loss": 0.0098,
|
|
"step": 15560
|
|
},
|
|
{
|
|
"epoch": 111.68458781362007,
|
|
"grad_norm": 0.7625533938407898,
|
|
"learning_rate": 0.00011064935064935065,
|
|
"loss": 0.0146,
|
|
"step": 15580
|
|
},
|
|
{
|
|
"epoch": 111.82795698924731,
|
|
"grad_norm": 2.35369873046875,
|
|
"learning_rate": 0.00011053391053391054,
|
|
"loss": 0.014,
|
|
"step": 15600
|
|
},
|
|
{
|
|
"epoch": 111.97132616487455,
|
|
"grad_norm": 0.6633703708648682,
|
|
"learning_rate": 0.00011041847041847043,
|
|
"loss": 0.014,
|
|
"step": 15620
|
|
},
|
|
{
|
|
"epoch": 112.1146953405018,
|
|
"grad_norm": 0.577873706817627,
|
|
"learning_rate": 0.00011030303030303031,
|
|
"loss": 0.01,
|
|
"step": 15640
|
|
},
|
|
{
|
|
"epoch": 112.25806451612904,
|
|
"grad_norm": 1.2911592721939087,
|
|
"learning_rate": 0.0001101875901875902,
|
|
"loss": 0.0094,
|
|
"step": 15660
|
|
},
|
|
{
|
|
"epoch": 112.40143369175627,
|
|
"grad_norm": 0.7414108514785767,
|
|
"learning_rate": 0.00011007215007215009,
|
|
"loss": 0.0116,
|
|
"step": 15680
|
|
},
|
|
{
|
|
"epoch": 112.54480286738351,
|
|
"grad_norm": 1.0763784646987915,
|
|
"learning_rate": 0.00010995670995670997,
|
|
"loss": 0.0134,
|
|
"step": 15700
|
|
},
|
|
{
|
|
"epoch": 112.68817204301075,
|
|
"grad_norm": 0.8727330565452576,
|
|
"learning_rate": 0.00010984126984126986,
|
|
"loss": 0.0146,
|
|
"step": 15720
|
|
},
|
|
{
|
|
"epoch": 112.831541218638,
|
|
"grad_norm": 0.8735530376434326,
|
|
"learning_rate": 0.00010972582972582972,
|
|
"loss": 0.0133,
|
|
"step": 15740
|
|
},
|
|
{
|
|
"epoch": 112.97491039426524,
|
|
"grad_norm": 1.0024839639663696,
|
|
"learning_rate": 0.00010961038961038961,
|
|
"loss": 0.0146,
|
|
"step": 15760
|
|
},
|
|
{
|
|
"epoch": 113.11827956989248,
|
|
"grad_norm": 0.4074417054653168,
|
|
"learning_rate": 0.0001094949494949495,
|
|
"loss": 0.0104,
|
|
"step": 15780
|
|
},
|
|
{
|
|
"epoch": 113.26164874551971,
|
|
"grad_norm": 0.3941658139228821,
|
|
"learning_rate": 0.00010937950937950938,
|
|
"loss": 0.0121,
|
|
"step": 15800
|
|
},
|
|
{
|
|
"epoch": 113.40501792114695,
|
|
"grad_norm": 0.41017812490463257,
|
|
"learning_rate": 0.00010926406926406927,
|
|
"loss": 0.0098,
|
|
"step": 15820
|
|
},
|
|
{
|
|
"epoch": 113.54838709677419,
|
|
"grad_norm": 0.4483148157596588,
|
|
"learning_rate": 0.00010914862914862916,
|
|
"loss": 0.0103,
|
|
"step": 15840
|
|
},
|
|
{
|
|
"epoch": 113.69175627240143,
|
|
"grad_norm": 0.8790960311889648,
|
|
"learning_rate": 0.00010903318903318904,
|
|
"loss": 0.0117,
|
|
"step": 15860
|
|
},
|
|
{
|
|
"epoch": 113.83512544802868,
|
|
"grad_norm": 0.35701245069503784,
|
|
"learning_rate": 0.00010891774891774893,
|
|
"loss": 0.0125,
|
|
"step": 15880
|
|
},
|
|
{
|
|
"epoch": 113.97849462365592,
|
|
"grad_norm": 0.30300161242485046,
|
|
"learning_rate": 0.00010880230880230882,
|
|
"loss": 0.0138,
|
|
"step": 15900
|
|
},
|
|
{
|
|
"epoch": 114.12186379928315,
|
|
"grad_norm": 0.48139527440071106,
|
|
"learning_rate": 0.00010868686868686868,
|
|
"loss": 0.01,
|
|
"step": 15920
|
|
},
|
|
{
|
|
"epoch": 114.26523297491039,
|
|
"grad_norm": 0.7350351214408875,
|
|
"learning_rate": 0.00010857142857142856,
|
|
"loss": 0.0081,
|
|
"step": 15940
|
|
},
|
|
{
|
|
"epoch": 114.40860215053763,
|
|
"grad_norm": 0.19255736470222473,
|
|
"learning_rate": 0.00010845598845598845,
|
|
"loss": 0.0119,
|
|
"step": 15960
|
|
},
|
|
{
|
|
"epoch": 114.55197132616487,
|
|
"grad_norm": 0.902733325958252,
|
|
"learning_rate": 0.00010834054834054834,
|
|
"loss": 0.012,
|
|
"step": 15980
|
|
},
|
|
{
|
|
"epoch": 114.69534050179212,
|
|
"grad_norm": 2.632317066192627,
|
|
"learning_rate": 0.00010822510822510823,
|
|
"loss": 0.0123,
|
|
"step": 16000
|
|
},
|
|
{
|
|
"epoch": 114.83870967741936,
|
|
"grad_norm": 0.21073153614997864,
|
|
"learning_rate": 0.00010810966810966811,
|
|
"loss": 0.0121,
|
|
"step": 16020
|
|
},
|
|
{
|
|
"epoch": 114.9820788530466,
|
|
"grad_norm": 0.49080151319503784,
|
|
"learning_rate": 0.000107994227994228,
|
|
"loss": 0.0104,
|
|
"step": 16040
|
|
},
|
|
{
|
|
"epoch": 115.12544802867383,
|
|
"grad_norm": 0.2161736786365509,
|
|
"learning_rate": 0.00010787878787878789,
|
|
"loss": 0.0095,
|
|
"step": 16060
|
|
},
|
|
{
|
|
"epoch": 115.26881720430107,
|
|
"grad_norm": 2.276884078979492,
|
|
"learning_rate": 0.00010776334776334777,
|
|
"loss": 0.0101,
|
|
"step": 16080
|
|
},
|
|
{
|
|
"epoch": 115.41218637992831,
|
|
"grad_norm": 0.3416621685028076,
|
|
"learning_rate": 0.00010764790764790765,
|
|
"loss": 0.0089,
|
|
"step": 16100
|
|
},
|
|
{
|
|
"epoch": 115.55555555555556,
|
|
"grad_norm": 0.5193475484848022,
|
|
"learning_rate": 0.00010753246753246753,
|
|
"loss": 0.0096,
|
|
"step": 16120
|
|
},
|
|
{
|
|
"epoch": 115.6989247311828,
|
|
"grad_norm": 0.8061926960945129,
|
|
"learning_rate": 0.00010741702741702742,
|
|
"loss": 0.0108,
|
|
"step": 16140
|
|
},
|
|
{
|
|
"epoch": 115.84229390681004,
|
|
"grad_norm": 0.6256706714630127,
|
|
"learning_rate": 0.00010730158730158731,
|
|
"loss": 0.0128,
|
|
"step": 16160
|
|
},
|
|
{
|
|
"epoch": 115.98566308243727,
|
|
"grad_norm": 0.3403368890285492,
|
|
"learning_rate": 0.0001071861471861472,
|
|
"loss": 0.0137,
|
|
"step": 16180
|
|
},
|
|
{
|
|
"epoch": 116.12903225806451,
|
|
"grad_norm": 2.7123234272003174,
|
|
"learning_rate": 0.00010707070707070708,
|
|
"loss": 0.0107,
|
|
"step": 16200
|
|
},
|
|
{
|
|
"epoch": 116.27240143369175,
|
|
"grad_norm": 0.8956249952316284,
|
|
"learning_rate": 0.00010695526695526697,
|
|
"loss": 0.0112,
|
|
"step": 16220
|
|
},
|
|
{
|
|
"epoch": 116.415770609319,
|
|
"grad_norm": 1.533353567123413,
|
|
"learning_rate": 0.00010683982683982686,
|
|
"loss": 0.0108,
|
|
"step": 16240
|
|
},
|
|
{
|
|
"epoch": 116.55913978494624,
|
|
"grad_norm": 1.1206146478652954,
|
|
"learning_rate": 0.00010672438672438674,
|
|
"loss": 0.0121,
|
|
"step": 16260
|
|
},
|
|
{
|
|
"epoch": 116.70250896057348,
|
|
"grad_norm": 0.8611764311790466,
|
|
"learning_rate": 0.00010660894660894663,
|
|
"loss": 0.012,
|
|
"step": 16280
|
|
},
|
|
{
|
|
"epoch": 116.84587813620071,
|
|
"grad_norm": 0.5663005113601685,
|
|
"learning_rate": 0.00010649350649350649,
|
|
"loss": 0.0105,
|
|
"step": 16300
|
|
},
|
|
{
|
|
"epoch": 116.98924731182795,
|
|
"grad_norm": 0.4195761978626251,
|
|
"learning_rate": 0.00010637806637806638,
|
|
"loss": 0.0137,
|
|
"step": 16320
|
|
},
|
|
{
|
|
"epoch": 117.1326164874552,
|
|
"grad_norm": 0.14845073223114014,
|
|
"learning_rate": 0.00010626262626262626,
|
|
"loss": 0.0082,
|
|
"step": 16340
|
|
},
|
|
{
|
|
"epoch": 117.27598566308244,
|
|
"grad_norm": 0.36522069573402405,
|
|
"learning_rate": 0.00010614718614718615,
|
|
"loss": 0.0106,
|
|
"step": 16360
|
|
},
|
|
{
|
|
"epoch": 117.41935483870968,
|
|
"grad_norm": 0.4705219566822052,
|
|
"learning_rate": 0.00010603174603174604,
|
|
"loss": 0.0106,
|
|
"step": 16380
|
|
},
|
|
{
|
|
"epoch": 117.56272401433692,
|
|
"grad_norm": 0.8641894459724426,
|
|
"learning_rate": 0.00010591630591630593,
|
|
"loss": 0.012,
|
|
"step": 16400
|
|
},
|
|
{
|
|
"epoch": 117.70609318996416,
|
|
"grad_norm": 1.0649274587631226,
|
|
"learning_rate": 0.00010580086580086581,
|
|
"loss": 0.0105,
|
|
"step": 16420
|
|
},
|
|
{
|
|
"epoch": 117.84946236559139,
|
|
"grad_norm": 0.6413153409957886,
|
|
"learning_rate": 0.0001056854256854257,
|
|
"loss": 0.0156,
|
|
"step": 16440
|
|
},
|
|
{
|
|
"epoch": 117.99283154121864,
|
|
"grad_norm": 0.7136850953102112,
|
|
"learning_rate": 0.00010556998556998559,
|
|
"loss": 0.0122,
|
|
"step": 16460
|
|
},
|
|
{
|
|
"epoch": 118.13620071684588,
|
|
"grad_norm": 0.36151137948036194,
|
|
"learning_rate": 0.00010545454545454545,
|
|
"loss": 0.0071,
|
|
"step": 16480
|
|
},
|
|
{
|
|
"epoch": 118.27956989247312,
|
|
"grad_norm": 0.320170521736145,
|
|
"learning_rate": 0.00010533910533910533,
|
|
"loss": 0.0098,
|
|
"step": 16500
|
|
},
|
|
{
|
|
"epoch": 118.42293906810036,
|
|
"grad_norm": 0.43986791372299194,
|
|
"learning_rate": 0.00010522366522366522,
|
|
"loss": 0.0117,
|
|
"step": 16520
|
|
},
|
|
{
|
|
"epoch": 118.5663082437276,
|
|
"grad_norm": 0.3372204601764679,
|
|
"learning_rate": 0.00010510822510822511,
|
|
"loss": 0.0104,
|
|
"step": 16540
|
|
},
|
|
{
|
|
"epoch": 118.70967741935483,
|
|
"grad_norm": 1.0731576681137085,
|
|
"learning_rate": 0.000104992784992785,
|
|
"loss": 0.0112,
|
|
"step": 16560
|
|
},
|
|
{
|
|
"epoch": 118.85304659498208,
|
|
"grad_norm": 0.4361036419868469,
|
|
"learning_rate": 0.00010487734487734488,
|
|
"loss": 0.0121,
|
|
"step": 16580
|
|
},
|
|
{
|
|
"epoch": 118.99641577060932,
|
|
"grad_norm": 0.6188058257102966,
|
|
"learning_rate": 0.00010476190476190477,
|
|
"loss": 0.0106,
|
|
"step": 16600
|
|
},
|
|
{
|
|
"epoch": 119.13978494623656,
|
|
"grad_norm": 0.8371912240982056,
|
|
"learning_rate": 0.00010464646464646466,
|
|
"loss": 0.0116,
|
|
"step": 16620
|
|
},
|
|
{
|
|
"epoch": 119.2831541218638,
|
|
"grad_norm": 0.561147928237915,
|
|
"learning_rate": 0.00010453102453102454,
|
|
"loss": 0.0104,
|
|
"step": 16640
|
|
},
|
|
{
|
|
"epoch": 119.42652329749104,
|
|
"grad_norm": 0.12699294090270996,
|
|
"learning_rate": 0.00010441558441558442,
|
|
"loss": 0.0102,
|
|
"step": 16660
|
|
},
|
|
{
|
|
"epoch": 119.56989247311827,
|
|
"grad_norm": 0.541074812412262,
|
|
"learning_rate": 0.0001043001443001443,
|
|
"loss": 0.0106,
|
|
"step": 16680
|
|
},
|
|
{
|
|
"epoch": 119.71326164874552,
|
|
"grad_norm": 1.2384388446807861,
|
|
"learning_rate": 0.00010418470418470419,
|
|
"loss": 0.0105,
|
|
"step": 16700
|
|
},
|
|
{
|
|
"epoch": 119.85663082437276,
|
|
"grad_norm": 0.4829830527305603,
|
|
"learning_rate": 0.00010406926406926408,
|
|
"loss": 0.0125,
|
|
"step": 16720
|
|
},
|
|
{
|
|
"epoch": 120.0,
|
|
"grad_norm": 0.30892565846443176,
|
|
"learning_rate": 0.00010395382395382396,
|
|
"loss": 0.0107,
|
|
"step": 16740
|
|
},
|
|
{
|
|
"epoch": 120.14336917562724,
|
|
"grad_norm": 0.6136677265167236,
|
|
"learning_rate": 0.00010383838383838385,
|
|
"loss": 0.0098,
|
|
"step": 16760
|
|
},
|
|
{
|
|
"epoch": 120.28673835125448,
|
|
"grad_norm": 0.4109688699245453,
|
|
"learning_rate": 0.00010372294372294374,
|
|
"loss": 0.0089,
|
|
"step": 16780
|
|
},
|
|
{
|
|
"epoch": 120.43010752688173,
|
|
"grad_norm": 0.8578640818595886,
|
|
"learning_rate": 0.00010360750360750363,
|
|
"loss": 0.0103,
|
|
"step": 16800
|
|
},
|
|
{
|
|
"epoch": 120.57347670250896,
|
|
"grad_norm": 0.3595373034477234,
|
|
"learning_rate": 0.00010349206349206351,
|
|
"loss": 0.0119,
|
|
"step": 16820
|
|
},
|
|
{
|
|
"epoch": 120.7168458781362,
|
|
"grad_norm": 0.9312357306480408,
|
|
"learning_rate": 0.00010337662337662337,
|
|
"loss": 0.0126,
|
|
"step": 16840
|
|
},
|
|
{
|
|
"epoch": 120.86021505376344,
|
|
"grad_norm": 0.8884514570236206,
|
|
"learning_rate": 0.00010326118326118326,
|
|
"loss": 0.0115,
|
|
"step": 16860
|
|
},
|
|
{
|
|
"epoch": 121.00358422939068,
|
|
"grad_norm": 1.535401463508606,
|
|
"learning_rate": 0.00010314574314574315,
|
|
"loss": 0.0104,
|
|
"step": 16880
|
|
},
|
|
{
|
|
"epoch": 121.14695340501792,
|
|
"grad_norm": 0.4228200316429138,
|
|
"learning_rate": 0.00010303030303030303,
|
|
"loss": 0.0094,
|
|
"step": 16900
|
|
},
|
|
{
|
|
"epoch": 121.29032258064517,
|
|
"grad_norm": 0.5223821997642517,
|
|
"learning_rate": 0.00010291486291486292,
|
|
"loss": 0.0094,
|
|
"step": 16920
|
|
},
|
|
{
|
|
"epoch": 121.4336917562724,
|
|
"grad_norm": 0.7498956322669983,
|
|
"learning_rate": 0.00010279942279942281,
|
|
"loss": 0.0093,
|
|
"step": 16940
|
|
},
|
|
{
|
|
"epoch": 121.57706093189964,
|
|
"grad_norm": 0.39098238945007324,
|
|
"learning_rate": 0.0001026839826839827,
|
|
"loss": 0.0127,
|
|
"step": 16960
|
|
},
|
|
{
|
|
"epoch": 121.72043010752688,
|
|
"grad_norm": 0.18215855956077576,
|
|
"learning_rate": 0.00010256854256854258,
|
|
"loss": 0.0083,
|
|
"step": 16980
|
|
},
|
|
{
|
|
"epoch": 121.86379928315412,
|
|
"grad_norm": 0.30135685205459595,
|
|
"learning_rate": 0.00010245310245310247,
|
|
"loss": 0.008,
|
|
"step": 17000
|
|
},
|
|
{
|
|
"epoch": 122.00716845878136,
|
|
"grad_norm": 1.6250956058502197,
|
|
"learning_rate": 0.00010233766233766233,
|
|
"loss": 0.0105,
|
|
"step": 17020
|
|
},
|
|
{
|
|
"epoch": 122.15053763440861,
|
|
"grad_norm": 0.3971248269081116,
|
|
"learning_rate": 0.00010222222222222222,
|
|
"loss": 0.0098,
|
|
"step": 17040
|
|
},
|
|
{
|
|
"epoch": 122.29390681003584,
|
|
"grad_norm": 0.5935524106025696,
|
|
"learning_rate": 0.0001021067821067821,
|
|
"loss": 0.0082,
|
|
"step": 17060
|
|
},
|
|
{
|
|
"epoch": 122.43727598566308,
|
|
"grad_norm": 0.3369162976741791,
|
|
"learning_rate": 0.00010199134199134199,
|
|
"loss": 0.0117,
|
|
"step": 17080
|
|
},
|
|
{
|
|
"epoch": 122.58064516129032,
|
|
"grad_norm": 0.379986047744751,
|
|
"learning_rate": 0.00010187590187590188,
|
|
"loss": 0.0094,
|
|
"step": 17100
|
|
},
|
|
{
|
|
"epoch": 122.72401433691756,
|
|
"grad_norm": 0.3556450307369232,
|
|
"learning_rate": 0.00010176046176046176,
|
|
"loss": 0.01,
|
|
"step": 17120
|
|
},
|
|
{
|
|
"epoch": 122.8673835125448,
|
|
"grad_norm": 0.5440447330474854,
|
|
"learning_rate": 0.00010164502164502165,
|
|
"loss": 0.012,
|
|
"step": 17140
|
|
},
|
|
{
|
|
"epoch": 123.01075268817205,
|
|
"grad_norm": 0.3880947232246399,
|
|
"learning_rate": 0.00010152958152958154,
|
|
"loss": 0.0113,
|
|
"step": 17160
|
|
},
|
|
{
|
|
"epoch": 123.15412186379929,
|
|
"grad_norm": 0.3488018214702606,
|
|
"learning_rate": 0.00010141414141414143,
|
|
"loss": 0.0082,
|
|
"step": 17180
|
|
},
|
|
{
|
|
"epoch": 123.29749103942652,
|
|
"grad_norm": 0.4776211082935333,
|
|
"learning_rate": 0.0001012987012987013,
|
|
"loss": 0.0079,
|
|
"step": 17200
|
|
},
|
|
{
|
|
"epoch": 123.44086021505376,
|
|
"grad_norm": 1.3282997608184814,
|
|
"learning_rate": 0.00010118326118326119,
|
|
"loss": 0.0106,
|
|
"step": 17220
|
|
},
|
|
{
|
|
"epoch": 123.584229390681,
|
|
"grad_norm": 0.6149415373802185,
|
|
"learning_rate": 0.00010106782106782107,
|
|
"loss": 0.0126,
|
|
"step": 17240
|
|
},
|
|
{
|
|
"epoch": 123.72759856630825,
|
|
"grad_norm": 0.9233654737472534,
|
|
"learning_rate": 0.00010095238095238096,
|
|
"loss": 0.0089,
|
|
"step": 17260
|
|
},
|
|
{
|
|
"epoch": 123.87096774193549,
|
|
"grad_norm": 0.4630541503429413,
|
|
"learning_rate": 0.00010083694083694085,
|
|
"loss": 0.0104,
|
|
"step": 17280
|
|
},
|
|
{
|
|
"epoch": 124.01433691756273,
|
|
"grad_norm": 0.15139320492744446,
|
|
"learning_rate": 0.00010072150072150073,
|
|
"loss": 0.0107,
|
|
"step": 17300
|
|
},
|
|
{
|
|
"epoch": 124.15770609318996,
|
|
"grad_norm": 1.2937678098678589,
|
|
"learning_rate": 0.00010060606060606062,
|
|
"loss": 0.01,
|
|
"step": 17320
|
|
},
|
|
{
|
|
"epoch": 124.3010752688172,
|
|
"grad_norm": 0.6447067260742188,
|
|
"learning_rate": 0.00010049062049062051,
|
|
"loss": 0.0103,
|
|
"step": 17340
|
|
},
|
|
{
|
|
"epoch": 124.44444444444444,
|
|
"grad_norm": 3.1351418495178223,
|
|
"learning_rate": 0.00010037518037518038,
|
|
"loss": 0.0093,
|
|
"step": 17360
|
|
},
|
|
{
|
|
"epoch": 124.58781362007169,
|
|
"grad_norm": 0.3367327153682709,
|
|
"learning_rate": 0.00010025974025974026,
|
|
"loss": 0.0114,
|
|
"step": 17380
|
|
},
|
|
{
|
|
"epoch": 124.73118279569893,
|
|
"grad_norm": 0.43587976694107056,
|
|
"learning_rate": 0.00010014430014430014,
|
|
"loss": 0.0083,
|
|
"step": 17400
|
|
},
|
|
{
|
|
"epoch": 124.87455197132617,
|
|
"grad_norm": 0.1791679859161377,
|
|
"learning_rate": 0.00010002886002886003,
|
|
"loss": 0.0105,
|
|
"step": 17420
|
|
},
|
|
{
|
|
"epoch": 125.0179211469534,
|
|
"grad_norm": 0.24327871203422546,
|
|
"learning_rate": 9.991341991341992e-05,
|
|
"loss": 0.014,
|
|
"step": 17440
|
|
},
|
|
{
|
|
"epoch": 125.16129032258064,
|
|
"grad_norm": 3.729423761367798,
|
|
"learning_rate": 9.97979797979798e-05,
|
|
"loss": 0.0105,
|
|
"step": 17460
|
|
},
|
|
{
|
|
"epoch": 125.30465949820788,
|
|
"grad_norm": 0.41952335834503174,
|
|
"learning_rate": 9.968253968253969e-05,
|
|
"loss": 0.0101,
|
|
"step": 17480
|
|
},
|
|
{
|
|
"epoch": 125.44802867383513,
|
|
"grad_norm": 0.5933164358139038,
|
|
"learning_rate": 9.956709956709958e-05,
|
|
"loss": 0.0098,
|
|
"step": 17500
|
|
},
|
|
{
|
|
"epoch": 125.59139784946237,
|
|
"grad_norm": 1.627990484237671,
|
|
"learning_rate": 9.945165945165945e-05,
|
|
"loss": 0.0118,
|
|
"step": 17520
|
|
},
|
|
{
|
|
"epoch": 125.73476702508961,
|
|
"grad_norm": 1.5514627695083618,
|
|
"learning_rate": 9.933621933621934e-05,
|
|
"loss": 0.0112,
|
|
"step": 17540
|
|
},
|
|
{
|
|
"epoch": 125.87813620071685,
|
|
"grad_norm": 0.5078562498092651,
|
|
"learning_rate": 9.922077922077923e-05,
|
|
"loss": 0.011,
|
|
"step": 17560
|
|
},
|
|
{
|
|
"epoch": 126.02150537634408,
|
|
"grad_norm": 1.107245922088623,
|
|
"learning_rate": 9.910533910533911e-05,
|
|
"loss": 0.0115,
|
|
"step": 17580
|
|
},
|
|
{
|
|
"epoch": 126.16487455197132,
|
|
"grad_norm": 1.2128745317459106,
|
|
"learning_rate": 9.8989898989899e-05,
|
|
"loss": 0.0105,
|
|
"step": 17600
|
|
},
|
|
{
|
|
"epoch": 126.30824372759857,
|
|
"grad_norm": 1.18942129611969,
|
|
"learning_rate": 9.887445887445887e-05,
|
|
"loss": 0.0105,
|
|
"step": 17620
|
|
},
|
|
{
|
|
"epoch": 126.45161290322581,
|
|
"grad_norm": 1.619493842124939,
|
|
"learning_rate": 9.875901875901876e-05,
|
|
"loss": 0.0092,
|
|
"step": 17640
|
|
},
|
|
{
|
|
"epoch": 126.59498207885305,
|
|
"grad_norm": 0.41182196140289307,
|
|
"learning_rate": 9.864357864357865e-05,
|
|
"loss": 0.0128,
|
|
"step": 17660
|
|
},
|
|
{
|
|
"epoch": 126.73835125448029,
|
|
"grad_norm": 0.3715384304523468,
|
|
"learning_rate": 9.852813852813853e-05,
|
|
"loss": 0.0101,
|
|
"step": 17680
|
|
},
|
|
{
|
|
"epoch": 126.88172043010752,
|
|
"grad_norm": 0.7196559309959412,
|
|
"learning_rate": 9.841269841269841e-05,
|
|
"loss": 0.0109,
|
|
"step": 17700
|
|
},
|
|
{
|
|
"epoch": 127.02508960573476,
|
|
"grad_norm": 0.9756142497062683,
|
|
"learning_rate": 9.82972582972583e-05,
|
|
"loss": 0.0127,
|
|
"step": 17720
|
|
},
|
|
{
|
|
"epoch": 127.168458781362,
|
|
"grad_norm": 0.3491683602333069,
|
|
"learning_rate": 9.818181818181818e-05,
|
|
"loss": 0.0097,
|
|
"step": 17740
|
|
},
|
|
{
|
|
"epoch": 127.31182795698925,
|
|
"grad_norm": 0.4264160394668579,
|
|
"learning_rate": 9.806637806637807e-05,
|
|
"loss": 0.0099,
|
|
"step": 17760
|
|
},
|
|
{
|
|
"epoch": 127.45519713261649,
|
|
"grad_norm": 0.49018484354019165,
|
|
"learning_rate": 9.795093795093796e-05,
|
|
"loss": 0.0102,
|
|
"step": 17780
|
|
},
|
|
{
|
|
"epoch": 127.59856630824373,
|
|
"grad_norm": 0.2783392667770386,
|
|
"learning_rate": 9.783549783549783e-05,
|
|
"loss": 0.0111,
|
|
"step": 17800
|
|
},
|
|
{
|
|
"epoch": 127.74193548387096,
|
|
"grad_norm": 2.4563422203063965,
|
|
"learning_rate": 9.772005772005772e-05,
|
|
"loss": 0.0084,
|
|
"step": 17820
|
|
},
|
|
{
|
|
"epoch": 127.8853046594982,
|
|
"grad_norm": 0.4553118050098419,
|
|
"learning_rate": 9.76046176046176e-05,
|
|
"loss": 0.0096,
|
|
"step": 17840
|
|
},
|
|
{
|
|
"epoch": 128.02867383512546,
|
|
"grad_norm": 0.25094160437583923,
|
|
"learning_rate": 9.748917748917749e-05,
|
|
"loss": 0.0107,
|
|
"step": 17860
|
|
},
|
|
{
|
|
"epoch": 128.1720430107527,
|
|
"grad_norm": 1.0729650259017944,
|
|
"learning_rate": 9.737373737373738e-05,
|
|
"loss": 0.0091,
|
|
"step": 17880
|
|
},
|
|
{
|
|
"epoch": 128.31541218637992,
|
|
"grad_norm": 1.0814895629882812,
|
|
"learning_rate": 9.725829725829726e-05,
|
|
"loss": 0.0139,
|
|
"step": 17900
|
|
},
|
|
{
|
|
"epoch": 128.45878136200716,
|
|
"grad_norm": 0.3357614576816559,
|
|
"learning_rate": 9.714285714285715e-05,
|
|
"loss": 0.0103,
|
|
"step": 17920
|
|
},
|
|
{
|
|
"epoch": 128.6021505376344,
|
|
"grad_norm": 0.6389890909194946,
|
|
"learning_rate": 9.702741702741704e-05,
|
|
"loss": 0.0116,
|
|
"step": 17940
|
|
},
|
|
{
|
|
"epoch": 128.74551971326164,
|
|
"grad_norm": 0.26751846075057983,
|
|
"learning_rate": 9.691197691197693e-05,
|
|
"loss": 0.0095,
|
|
"step": 17960
|
|
},
|
|
{
|
|
"epoch": 128.88888888888889,
|
|
"grad_norm": 0.659002959728241,
|
|
"learning_rate": 9.67965367965368e-05,
|
|
"loss": 0.0101,
|
|
"step": 17980
|
|
},
|
|
{
|
|
"epoch": 129.03225806451613,
|
|
"grad_norm": 0.32723554968833923,
|
|
"learning_rate": 9.668109668109669e-05,
|
|
"loss": 0.0092,
|
|
"step": 18000
|
|
}
|
|
],
|
|
"logging_steps": 20,
|
|
"max_steps": 34750,
|
|
"num_input_tokens_seen": 0,
|
|
"num_train_epochs": 250,
|
|
"save_steps": 500,
|
|
"stateful_callbacks": {
|
|
"TrainerControl": {
|
|
"args": {
|
|
"should_epoch_stop": false,
|
|
"should_evaluate": false,
|
|
"should_log": false,
|
|
"should_save": true,
|
|
"should_training_stop": false
|
|
},
|
|
"attributes": {}
|
|
}
|
|
},
|
|
"total_flos": 1.9243805639684506e+17,
|
|
"train_batch_size": 1,
|
|
"trial_name": null,
|
|
"trial_params": null
|
|
}
|
|
|