|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9998799759951991, |
|
"eval_steps": 500, |
|
"global_step": 6248, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00016003200640128025, |
|
"grad_norm": 1.1826485395431519, |
|
"learning_rate": 8e-09, |
|
"loss": 1.8238, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0016003200640128026, |
|
"grad_norm": 1.2571839094161987, |
|
"learning_rate": 8e-08, |
|
"loss": 1.8529, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.003200640128025605, |
|
"grad_norm": 1.3447990417480469, |
|
"learning_rate": 1.6e-07, |
|
"loss": 1.8769, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.004800960192038408, |
|
"grad_norm": 1.0940663814544678, |
|
"learning_rate": 2.4000000000000003e-07, |
|
"loss": 1.9161, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.00640128025605121, |
|
"grad_norm": 1.1993372440338135, |
|
"learning_rate": 3.2e-07, |
|
"loss": 1.8078, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.008001600320064013, |
|
"grad_norm": 1.06308913230896, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 1.9163, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.009601920384076815, |
|
"grad_norm": 1.1543391942977905, |
|
"learning_rate": 4.800000000000001e-07, |
|
"loss": 1.8377, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.011202240448089618, |
|
"grad_norm": 0.8527859449386597, |
|
"learning_rate": 5.6e-07, |
|
"loss": 1.8549, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.01280256051210242, |
|
"grad_norm": 0.9085479974746704, |
|
"learning_rate": 6.4e-07, |
|
"loss": 1.789, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.014402880576115223, |
|
"grad_norm": 0.7586134672164917, |
|
"learning_rate": 7.2e-07, |
|
"loss": 1.7819, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.016003200640128026, |
|
"grad_norm": 0.8043696284294128, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 1.7761, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.017603520704140826, |
|
"grad_norm": 0.7043126821517944, |
|
"learning_rate": 8.8e-07, |
|
"loss": 1.7702, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.01920384076815363, |
|
"grad_norm": 0.6750304698944092, |
|
"learning_rate": 9.600000000000001e-07, |
|
"loss": 1.7808, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.02080416083216643, |
|
"grad_norm": 0.6183897852897644, |
|
"learning_rate": 1.04e-06, |
|
"loss": 1.7655, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.022404480896179236, |
|
"grad_norm": 0.6176135540008545, |
|
"learning_rate": 1.12e-06, |
|
"loss": 1.7363, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.024004800960192037, |
|
"grad_norm": 0.5962868928909302, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 1.7132, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.02560512102420484, |
|
"grad_norm": 0.5577073693275452, |
|
"learning_rate": 1.28e-06, |
|
"loss": 1.7228, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.027205441088217642, |
|
"grad_norm": 0.5972305536270142, |
|
"learning_rate": 1.3600000000000001e-06, |
|
"loss": 1.7134, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.028805761152230446, |
|
"grad_norm": 0.891796886920929, |
|
"learning_rate": 1.44e-06, |
|
"loss": 1.677, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.030406081216243247, |
|
"grad_norm": 0.500298023223877, |
|
"learning_rate": 1.52e-06, |
|
"loss": 1.6674, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.03200640128025605, |
|
"grad_norm": 0.4819110929965973, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 1.6432, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.033606721344268856, |
|
"grad_norm": 0.48815402388572693, |
|
"learning_rate": 1.6800000000000002e-06, |
|
"loss": 1.6579, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.03520704140828165, |
|
"grad_norm": 0.43313154578208923, |
|
"learning_rate": 1.76e-06, |
|
"loss": 1.5858, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.03680736147229446, |
|
"grad_norm": 0.4705139100551605, |
|
"learning_rate": 1.8400000000000002e-06, |
|
"loss": 1.6045, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.03840768153630726, |
|
"grad_norm": 0.46406659483909607, |
|
"learning_rate": 1.9200000000000003e-06, |
|
"loss": 1.6324, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.040008001600320066, |
|
"grad_norm": 0.46592777967453003, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.601, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.04160832166433286, |
|
"grad_norm": 0.477504163980484, |
|
"learning_rate": 2.08e-06, |
|
"loss": 1.5639, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.04320864172834567, |
|
"grad_norm": 0.45321983098983765, |
|
"learning_rate": 2.16e-06, |
|
"loss": 1.5627, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.04480896179235847, |
|
"grad_norm": 0.463728129863739, |
|
"learning_rate": 2.24e-06, |
|
"loss": 1.5359, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.046409281856371276, |
|
"grad_norm": 0.43756529688835144, |
|
"learning_rate": 2.3200000000000002e-06, |
|
"loss": 1.4996, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.048009601920384073, |
|
"grad_norm": 0.4395667314529419, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 1.51, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.04960992198439688, |
|
"grad_norm": 0.44691407680511475, |
|
"learning_rate": 2.4800000000000004e-06, |
|
"loss": 1.4876, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.05121024204840968, |
|
"grad_norm": 0.4260852336883545, |
|
"learning_rate": 2.56e-06, |
|
"loss": 1.4463, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.052810562112422486, |
|
"grad_norm": 0.4296850264072418, |
|
"learning_rate": 2.64e-06, |
|
"loss": 1.4195, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.054410882176435284, |
|
"grad_norm": 0.3987746834754944, |
|
"learning_rate": 2.7200000000000002e-06, |
|
"loss": 1.4248, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.05601120224044809, |
|
"grad_norm": 0.38749226927757263, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 1.4151, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.05761152230446089, |
|
"grad_norm": 0.3994350731372833, |
|
"learning_rate": 2.88e-06, |
|
"loss": 1.4625, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.0592118423684737, |
|
"grad_norm": 0.36386868357658386, |
|
"learning_rate": 2.96e-06, |
|
"loss": 1.4048, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.060812162432486494, |
|
"grad_norm": 0.3657371699810028, |
|
"learning_rate": 3.04e-06, |
|
"loss": 1.4143, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.0624124824964993, |
|
"grad_norm": 0.3797203302383423, |
|
"learning_rate": 3.12e-06, |
|
"loss": 1.4394, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.0640128025605121, |
|
"grad_norm": 0.36554431915283203, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 1.3839, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.0656131226245249, |
|
"grad_norm": 0.3783222734928131, |
|
"learning_rate": 3.2800000000000004e-06, |
|
"loss": 1.4041, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.06721344268853771, |
|
"grad_norm": 0.36894097924232483, |
|
"learning_rate": 3.3600000000000004e-06, |
|
"loss": 1.3962, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.06881376275255051, |
|
"grad_norm": 0.3558606207370758, |
|
"learning_rate": 3.44e-06, |
|
"loss": 1.4449, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.0704140828165633, |
|
"grad_norm": 0.5871737003326416, |
|
"learning_rate": 3.52e-06, |
|
"loss": 1.4062, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.07201440288057612, |
|
"grad_norm": 0.3803611099720001, |
|
"learning_rate": 3.6000000000000003e-06, |
|
"loss": 1.3951, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.07361472294458891, |
|
"grad_norm": 0.37580356001853943, |
|
"learning_rate": 3.6800000000000003e-06, |
|
"loss": 1.3606, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.07521504300860173, |
|
"grad_norm": 0.3693610429763794, |
|
"learning_rate": 3.7600000000000004e-06, |
|
"loss": 1.4153, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.07681536307261452, |
|
"grad_norm": 0.3637755811214447, |
|
"learning_rate": 3.8400000000000005e-06, |
|
"loss": 1.4622, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.07841568313662732, |
|
"grad_norm": 0.3501175343990326, |
|
"learning_rate": 3.920000000000001e-06, |
|
"loss": 1.4107, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.08001600320064013, |
|
"grad_norm": 2.7416932582855225, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.3932, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.08161632326465293, |
|
"grad_norm": 0.7613168358802795, |
|
"learning_rate": 4.08e-06, |
|
"loss": 1.3947, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.08321664332866573, |
|
"grad_norm": 0.34814804792404175, |
|
"learning_rate": 4.16e-06, |
|
"loss": 1.4168, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.08481696339267854, |
|
"grad_norm": 0.3658451735973358, |
|
"learning_rate": 4.24e-06, |
|
"loss": 1.383, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.08641728345669134, |
|
"grad_norm": 0.3520263135433197, |
|
"learning_rate": 4.32e-06, |
|
"loss": 1.3419, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.08801760352070415, |
|
"grad_norm": 0.37928885221481323, |
|
"learning_rate": 4.4e-06, |
|
"loss": 1.3982, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.08961792358471694, |
|
"grad_norm": 1.466685175895691, |
|
"learning_rate": 4.48e-06, |
|
"loss": 1.3914, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.09121824364872974, |
|
"grad_norm": 0.37202754616737366, |
|
"learning_rate": 4.56e-06, |
|
"loss": 1.3421, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.09281856371274255, |
|
"grad_norm": 0.37455129623413086, |
|
"learning_rate": 4.6400000000000005e-06, |
|
"loss": 1.3564, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.09441888377675535, |
|
"grad_norm": 0.3516671359539032, |
|
"learning_rate": 4.7200000000000005e-06, |
|
"loss": 1.3219, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.09601920384076815, |
|
"grad_norm": 0.37462764978408813, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 1.3639, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.09761952390478096, |
|
"grad_norm": 0.3746024966239929, |
|
"learning_rate": 4.880000000000001e-06, |
|
"loss": 1.3789, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.09921984396879376, |
|
"grad_norm": 0.3724232316017151, |
|
"learning_rate": 4.960000000000001e-06, |
|
"loss": 1.3499, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.10082016403280657, |
|
"grad_norm": 0.38169583678245544, |
|
"learning_rate": 4.999990245313474e-06, |
|
"loss": 1.3086, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.10242048409681936, |
|
"grad_norm": 0.3765912353992462, |
|
"learning_rate": 4.999912208277997e-06, |
|
"loss": 1.3307, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.10402080416083216, |
|
"grad_norm": 0.38425111770629883, |
|
"learning_rate": 4.999756136642964e-06, |
|
"loss": 1.3168, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.10562112422484497, |
|
"grad_norm": 0.38071689009666443, |
|
"learning_rate": 4.999522035280142e-06, |
|
"loss": 1.3717, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.10722144428885777, |
|
"grad_norm": 0.3890415132045746, |
|
"learning_rate": 4.999209911496989e-06, |
|
"loss": 1.362, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.10882176435287057, |
|
"grad_norm": 0.3932015001773834, |
|
"learning_rate": 4.99881977503643e-06, |
|
"loss": 1.3367, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.11042208441688338, |
|
"grad_norm": 0.39485597610473633, |
|
"learning_rate": 4.998351638076548e-06, |
|
"loss": 1.3263, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.11202240448089618, |
|
"grad_norm": 0.40244272351264954, |
|
"learning_rate": 4.99780551523021e-06, |
|
"loss": 1.3483, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.11362272454490899, |
|
"grad_norm": 0.38777467608451843, |
|
"learning_rate": 4.9971814235446035e-06, |
|
"loss": 1.4173, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.11522304460892178, |
|
"grad_norm": 0.3693300485610962, |
|
"learning_rate": 4.996479382500714e-06, |
|
"loss": 1.3711, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.11682336467293458, |
|
"grad_norm": 0.39682939648628235, |
|
"learning_rate": 4.995699414012704e-06, |
|
"loss": 1.2961, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.1184236847369474, |
|
"grad_norm": 0.4018958806991577, |
|
"learning_rate": 4.994841542427242e-06, |
|
"loss": 1.3109, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.12002400480096019, |
|
"grad_norm": 0.3853909969329834, |
|
"learning_rate": 4.993905794522734e-06, |
|
"loss": 1.3114, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.12162432486497299, |
|
"grad_norm": 0.4010235071182251, |
|
"learning_rate": 4.992892199508492e-06, |
|
"loss": 1.3348, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.1232246449289858, |
|
"grad_norm": 0.3733123242855072, |
|
"learning_rate": 4.991800789023818e-06, |
|
"loss": 1.324, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.1248249649929986, |
|
"grad_norm": 0.4108160138130188, |
|
"learning_rate": 4.990631597137021e-06, |
|
"loss": 1.2966, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.1264252850570114, |
|
"grad_norm": 0.41119498014450073, |
|
"learning_rate": 4.989384660344351e-06, |
|
"loss": 1.3555, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.1280256051210242, |
|
"grad_norm": 0.39664438366889954, |
|
"learning_rate": 4.9880600175688596e-06, |
|
"loss": 1.326, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.12962592518503702, |
|
"grad_norm": 0.40418845415115356, |
|
"learning_rate": 4.986657710159187e-06, |
|
"loss": 1.3018, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.1312262452490498, |
|
"grad_norm": 0.4098711609840393, |
|
"learning_rate": 4.98517778188827e-06, |
|
"loss": 1.3963, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.1328265653130626, |
|
"grad_norm": 0.43664970993995667, |
|
"learning_rate": 4.983620278951973e-06, |
|
"loss": 1.3343, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.13442688537707542, |
|
"grad_norm": 0.4087508022785187, |
|
"learning_rate": 4.981985249967651e-06, |
|
"loss": 1.2787, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.1360272054410882, |
|
"grad_norm": 0.38437142968177795, |
|
"learning_rate": 4.980272745972629e-06, |
|
"loss": 1.3563, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.13762752550510102, |
|
"grad_norm": 0.41200581192970276, |
|
"learning_rate": 4.978482820422612e-06, |
|
"loss": 1.3338, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.13922784556911383, |
|
"grad_norm": 0.40172791481018066, |
|
"learning_rate": 4.976615529190008e-06, |
|
"loss": 1.3087, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.1408281656331266, |
|
"grad_norm": 0.44374722242355347, |
|
"learning_rate": 4.974670930562195e-06, |
|
"loss": 1.3104, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.14242848569713942, |
|
"grad_norm": 0.4120323657989502, |
|
"learning_rate": 4.972649085239694e-06, |
|
"loss": 1.303, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.14402880576115223, |
|
"grad_norm": 0.42948517203330994, |
|
"learning_rate": 4.970550056334277e-06, |
|
"loss": 1.2743, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.14562912582516505, |
|
"grad_norm": 0.4419565498828888, |
|
"learning_rate": 4.968373909366998e-06, |
|
"loss": 1.3082, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.14722944588917783, |
|
"grad_norm": 0.411284476518631, |
|
"learning_rate": 4.966120712266146e-06, |
|
"loss": 1.3116, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.14882976595319064, |
|
"grad_norm": 0.4247114062309265, |
|
"learning_rate": 4.963790535365122e-06, |
|
"loss": 1.3486, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.15043008601720345, |
|
"grad_norm": 0.4018177092075348, |
|
"learning_rate": 4.961383451400251e-06, |
|
"loss": 1.3029, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.15203040608121624, |
|
"grad_norm": 0.40833476185798645, |
|
"learning_rate": 4.958899535508503e-06, |
|
"loss": 1.2706, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.15363072614522905, |
|
"grad_norm": 0.41154181957244873, |
|
"learning_rate": 4.956338865225155e-06, |
|
"loss": 1.3063, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.15523104620924186, |
|
"grad_norm": 0.42829248309135437, |
|
"learning_rate": 4.953701520481365e-06, |
|
"loss": 1.3119, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.15683136627325464, |
|
"grad_norm": 0.42823630571365356, |
|
"learning_rate": 4.950987583601681e-06, |
|
"loss": 1.276, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.15843168633726745, |
|
"grad_norm": 0.4209652841091156, |
|
"learning_rate": 4.9481971393014686e-06, |
|
"loss": 1.2947, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.16003200640128026, |
|
"grad_norm": 0.4024667739868164, |
|
"learning_rate": 4.945330274684268e-06, |
|
"loss": 1.2704, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.16163232646529305, |
|
"grad_norm": 0.4125162363052368, |
|
"learning_rate": 4.942387079239075e-06, |
|
"loss": 1.2686, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.16323264652930586, |
|
"grad_norm": 0.39515283703804016, |
|
"learning_rate": 4.9393676448375475e-06, |
|
"loss": 1.2875, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.16483296659331867, |
|
"grad_norm": 0.42820730805397034, |
|
"learning_rate": 4.936272065731135e-06, |
|
"loss": 1.2633, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.16643328665733145, |
|
"grad_norm": 0.43181583285331726, |
|
"learning_rate": 4.933100438548144e-06, |
|
"loss": 1.3641, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.16803360672134426, |
|
"grad_norm": 0.4250759482383728, |
|
"learning_rate": 4.929852862290711e-06, |
|
"loss": 1.3123, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.16963392678535708, |
|
"grad_norm": 0.4282747805118561, |
|
"learning_rate": 4.926529438331724e-06, |
|
"loss": 1.3036, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.1712342468493699, |
|
"grad_norm": 0.5093165040016174, |
|
"learning_rate": 4.923130270411648e-06, |
|
"loss": 1.2802, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.17283456691338267, |
|
"grad_norm": 0.4356524348258972, |
|
"learning_rate": 4.919655464635291e-06, |
|
"loss": 1.3567, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.17443488697739548, |
|
"grad_norm": 0.4331202507019043, |
|
"learning_rate": 4.916105129468493e-06, |
|
"loss": 1.3056, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.1760352070414083, |
|
"grad_norm": 0.437448650598526, |
|
"learning_rate": 4.91247937573474e-06, |
|
"loss": 1.3202, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.17763552710542108, |
|
"grad_norm": 0.4280548095703125, |
|
"learning_rate": 4.9087783166117035e-06, |
|
"loss": 1.2888, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.1792358471694339, |
|
"grad_norm": 0.430753231048584, |
|
"learning_rate": 4.905002067627704e-06, |
|
"loss": 1.2891, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.1808361672334467, |
|
"grad_norm": 0.42238640785217285, |
|
"learning_rate": 4.9011507466581146e-06, |
|
"loss": 1.2956, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.18243648729745948, |
|
"grad_norm": 0.4425515830516815, |
|
"learning_rate": 4.897224473921672e-06, |
|
"loss": 1.2969, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.1840368073614723, |
|
"grad_norm": 0.4294049143791199, |
|
"learning_rate": 4.893223371976728e-06, |
|
"loss": 1.2962, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.1856371274254851, |
|
"grad_norm": 0.44323623180389404, |
|
"learning_rate": 4.889147565717425e-06, |
|
"loss": 1.2914, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.1872374474894979, |
|
"grad_norm": 1.6440542936325073, |
|
"learning_rate": 4.8849971823697925e-06, |
|
"loss": 1.2961, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.1888377675535107, |
|
"grad_norm": 0.44181469082832336, |
|
"learning_rate": 4.880772351487781e-06, |
|
"loss": 1.2617, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.1904380876175235, |
|
"grad_norm": 0.42513954639434814, |
|
"learning_rate": 4.8764732049492194e-06, |
|
"loss": 1.3279, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.1920384076815363, |
|
"grad_norm": 0.4171765446662903, |
|
"learning_rate": 4.8720998769516876e-06, |
|
"loss": 1.2486, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.1936387277455491, |
|
"grad_norm": 0.43634432554244995, |
|
"learning_rate": 4.867652504008341e-06, |
|
"loss": 1.2848, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.19523904780956192, |
|
"grad_norm": 0.4300241768360138, |
|
"learning_rate": 4.863131224943641e-06, |
|
"loss": 1.3023, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.19683936787357473, |
|
"grad_norm": 0.41159215569496155, |
|
"learning_rate": 4.858536180889025e-06, |
|
"loss": 1.2471, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.1984396879375875, |
|
"grad_norm": 0.44021639227867126, |
|
"learning_rate": 4.853867515278499e-06, |
|
"loss": 1.3048, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.20004000800160032, |
|
"grad_norm": 0.43915456533432007, |
|
"learning_rate": 4.849125373844159e-06, |
|
"loss": 1.2624, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.20164032806561313, |
|
"grad_norm": 0.4190748929977417, |
|
"learning_rate": 4.844309904611648e-06, |
|
"loss": 1.261, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.20324064812962592, |
|
"grad_norm": 0.443705677986145, |
|
"learning_rate": 4.839421257895531e-06, |
|
"loss": 1.2977, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.20484096819363873, |
|
"grad_norm": 0.5231160521507263, |
|
"learning_rate": 4.8344595862945995e-06, |
|
"loss": 1.3784, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.20644128825765154, |
|
"grad_norm": 0.44119977951049805, |
|
"learning_rate": 4.829425044687117e-06, |
|
"loss": 1.275, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.20804160832166432, |
|
"grad_norm": 0.4395291209220886, |
|
"learning_rate": 4.8243177902259775e-06, |
|
"loss": 1.2103, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.20964192838567713, |
|
"grad_norm": 0.4597066044807434, |
|
"learning_rate": 4.819137982333801e-06, |
|
"loss": 1.3092, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.21124224844968995, |
|
"grad_norm": 0.4260525107383728, |
|
"learning_rate": 4.81388578269796e-06, |
|
"loss": 1.2564, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.21284256851370273, |
|
"grad_norm": 0.419204980134964, |
|
"learning_rate": 4.80856135526553e-06, |
|
"loss": 1.3276, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.21444288857771554, |
|
"grad_norm": 0.4245491027832031, |
|
"learning_rate": 4.8031648662381715e-06, |
|
"loss": 1.297, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.21604320864172835, |
|
"grad_norm": 0.4363405704498291, |
|
"learning_rate": 4.797696484066945e-06, |
|
"loss": 1.2796, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.21764352870574113, |
|
"grad_norm": 0.4211016297340393, |
|
"learning_rate": 4.7921563794470495e-06, |
|
"loss": 1.2903, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.21924384876975395, |
|
"grad_norm": 0.44692331552505493, |
|
"learning_rate": 4.786544725312496e-06, |
|
"loss": 1.2893, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.22084416883376676, |
|
"grad_norm": 0.4414706826210022, |
|
"learning_rate": 4.78086169683071e-06, |
|
"loss": 1.2812, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.22244448889777957, |
|
"grad_norm": 0.4407265782356262, |
|
"learning_rate": 4.77510747139706e-06, |
|
"loss": 1.2196, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.22404480896179235, |
|
"grad_norm": 0.43710222840309143, |
|
"learning_rate": 4.7692822286293266e-06, |
|
"loss": 1.2762, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.22564512902580516, |
|
"grad_norm": 0.4352550506591797, |
|
"learning_rate": 4.76338615036209e-06, |
|
"loss": 1.2983, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.22724544908981797, |
|
"grad_norm": 0.45670264959335327, |
|
"learning_rate": 4.757419420641053e-06, |
|
"loss": 1.2998, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.22884576915383076, |
|
"grad_norm": 0.4622712731361389, |
|
"learning_rate": 4.751382225717304e-06, |
|
"loss": 1.2664, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.23044608921784357, |
|
"grad_norm": 0.4442577064037323, |
|
"learning_rate": 4.745274754041497e-06, |
|
"loss": 1.3014, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.23204640928185638, |
|
"grad_norm": 0.4509070813655853, |
|
"learning_rate": 4.739097196257967e-06, |
|
"loss": 1.3166, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.23364672934586916, |
|
"grad_norm": 0.4595312476158142, |
|
"learning_rate": 4.732849745198786e-06, |
|
"loss": 1.2714, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.23524704940988198, |
|
"grad_norm": 0.46154943108558655, |
|
"learning_rate": 4.72653259587774e-06, |
|
"loss": 1.2457, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.2368473694738948, |
|
"grad_norm": 0.45601826906204224, |
|
"learning_rate": 4.7201459454842395e-06, |
|
"loss": 1.3244, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.23844768953790757, |
|
"grad_norm": 0.4444175958633423, |
|
"learning_rate": 4.713689993377167e-06, |
|
"loss": 1.2842, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.24004800960192038, |
|
"grad_norm": 0.4575673043727875, |
|
"learning_rate": 4.707164941078656e-06, |
|
"loss": 1.2829, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.2416483296659332, |
|
"grad_norm": 0.44316327571868896, |
|
"learning_rate": 4.700570992267795e-06, |
|
"loss": 1.2571, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.24324864972994598, |
|
"grad_norm": 0.4629514515399933, |
|
"learning_rate": 4.6939083527742735e-06, |
|
"loss": 1.3043, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.2448489697939588, |
|
"grad_norm": 0.45411327481269836, |
|
"learning_rate": 4.6871772305719586e-06, |
|
"loss": 1.3144, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.2464492898579716, |
|
"grad_norm": 0.44114208221435547, |
|
"learning_rate": 4.680377835772399e-06, |
|
"loss": 1.2906, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.2480496099219844, |
|
"grad_norm": 0.4612123966217041, |
|
"learning_rate": 4.673510380618266e-06, |
|
"loss": 1.2272, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.2496499299859972, |
|
"grad_norm": 0.5552570819854736, |
|
"learning_rate": 4.666575079476733e-06, |
|
"loss": 1.3235, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.25125025005001, |
|
"grad_norm": 0.4699633717536926, |
|
"learning_rate": 4.659572148832783e-06, |
|
"loss": 1.2614, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.2528505701140228, |
|
"grad_norm": 0.4418560564517975, |
|
"learning_rate": 4.652501807282448e-06, |
|
"loss": 1.2433, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.2544508901780356, |
|
"grad_norm": 0.4806090295314789, |
|
"learning_rate": 4.645364275525984e-06, |
|
"loss": 1.2728, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.2560512102420484, |
|
"grad_norm": 0.4657137393951416, |
|
"learning_rate": 4.63815977636099e-06, |
|
"loss": 1.2605, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.2576515303060612, |
|
"grad_norm": 0.48473361134529114, |
|
"learning_rate": 4.630888534675446e-06, |
|
"loss": 1.3102, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.25925185037007403, |
|
"grad_norm": 0.4496365785598755, |
|
"learning_rate": 4.6235507774406966e-06, |
|
"loss": 1.2934, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.2608521704340868, |
|
"grad_norm": 0.4331839978694916, |
|
"learning_rate": 4.616146733704362e-06, |
|
"loss": 1.2459, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.2624524904980996, |
|
"grad_norm": 0.46064773201942444, |
|
"learning_rate": 4.608676634583194e-06, |
|
"loss": 1.2778, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.26405281056211244, |
|
"grad_norm": 0.4446019232273102, |
|
"learning_rate": 4.60114071325586e-06, |
|
"loss": 1.2879, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.2656531306261252, |
|
"grad_norm": 0.4695674479007721, |
|
"learning_rate": 4.593539204955661e-06, |
|
"loss": 1.294, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.267253450690138, |
|
"grad_norm": 0.4513859450817108, |
|
"learning_rate": 4.585872346963192e-06, |
|
"loss": 1.241, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.26885377075415084, |
|
"grad_norm": 0.4444881081581116, |
|
"learning_rate": 4.578140378598935e-06, |
|
"loss": 1.2657, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.27045409081816363, |
|
"grad_norm": 0.4365733563899994, |
|
"learning_rate": 4.570343541215787e-06, |
|
"loss": 1.2716, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.2720544108821764, |
|
"grad_norm": 0.4717450737953186, |
|
"learning_rate": 4.562482078191527e-06, |
|
"loss": 1.2458, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.27365473094618925, |
|
"grad_norm": 0.45678940415382385, |
|
"learning_rate": 4.554556234921221e-06, |
|
"loss": 1.2837, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.27525505101020203, |
|
"grad_norm": 0.4608180522918701, |
|
"learning_rate": 4.54656625880956e-06, |
|
"loss": 1.2805, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.2768553710742148, |
|
"grad_norm": 0.46855640411376953, |
|
"learning_rate": 4.538512399263136e-06, |
|
"loss": 1.2629, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.27845569113822766, |
|
"grad_norm": 0.4587608575820923, |
|
"learning_rate": 4.530394907682659e-06, |
|
"loss": 1.2804, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.28005601120224044, |
|
"grad_norm": 0.4596967101097107, |
|
"learning_rate": 4.52221403745511e-06, |
|
"loss": 1.2699, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.2816563312662532, |
|
"grad_norm": 0.4687954783439636, |
|
"learning_rate": 4.513970043945828e-06, |
|
"loss": 1.2563, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.28325665133026606, |
|
"grad_norm": 0.4607718884944916, |
|
"learning_rate": 4.505663184490543e-06, |
|
"loss": 1.2504, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.28485697139427885, |
|
"grad_norm": 0.4863135814666748, |
|
"learning_rate": 4.497293718387342e-06, |
|
"loss": 1.2735, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.2864572914582917, |
|
"grad_norm": 0.4534025192260742, |
|
"learning_rate": 4.488861906888574e-06, |
|
"loss": 1.2426, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.28805761152230447, |
|
"grad_norm": 0.4449337422847748, |
|
"learning_rate": 4.4803680131926935e-06, |
|
"loss": 1.2569, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.28965793158631725, |
|
"grad_norm": 0.4723162055015564, |
|
"learning_rate": 4.47181230243605e-06, |
|
"loss": 1.2444, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.2912582516503301, |
|
"grad_norm": 0.45309850573539734, |
|
"learning_rate": 4.463195041684607e-06, |
|
"loss": 1.2356, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.2928585717143429, |
|
"grad_norm": 0.4788224399089813, |
|
"learning_rate": 4.454516499925606e-06, |
|
"loss": 1.2534, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.29445889177835566, |
|
"grad_norm": 0.4625834822654724, |
|
"learning_rate": 4.445776948059174e-06, |
|
"loss": 1.2631, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.2960592118423685, |
|
"grad_norm": 0.47085145115852356, |
|
"learning_rate": 4.436976658889862e-06, |
|
"loss": 1.3068, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.2976595319063813, |
|
"grad_norm": 0.4663547873497009, |
|
"learning_rate": 4.428115907118134e-06, |
|
"loss": 1.2664, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.29925985197039406, |
|
"grad_norm": 0.45759037137031555, |
|
"learning_rate": 4.4191949693317886e-06, |
|
"loss": 1.3019, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.3008601720344069, |
|
"grad_norm": 0.4738781154155731, |
|
"learning_rate": 4.410214123997328e-06, |
|
"loss": 1.2699, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.3024604920984197, |
|
"grad_norm": 0.4601910710334778, |
|
"learning_rate": 4.401173651451264e-06, |
|
"loss": 1.2779, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.30406081216243247, |
|
"grad_norm": 0.4631597399711609, |
|
"learning_rate": 4.392073833891369e-06, |
|
"loss": 1.2617, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.3056611322264453, |
|
"grad_norm": 0.46061640977859497, |
|
"learning_rate": 4.3829149553678666e-06, |
|
"loss": 1.2971, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.3072614522904581, |
|
"grad_norm": 0.4908485412597656, |
|
"learning_rate": 4.373697301774561e-06, |
|
"loss": 1.2483, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.3088617723544709, |
|
"grad_norm": 0.5022984147071838, |
|
"learning_rate": 4.364421160839921e-06, |
|
"loss": 1.2754, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.3104620924184837, |
|
"grad_norm": 0.46905580163002014, |
|
"learning_rate": 4.355086822118092e-06, |
|
"loss": 1.2467, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.3120624124824965, |
|
"grad_norm": 0.44887256622314453, |
|
"learning_rate": 4.345694576979858e-06, |
|
"loss": 1.2803, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.3136627325465093, |
|
"grad_norm": 0.4691268503665924, |
|
"learning_rate": 4.336244718603553e-06, |
|
"loss": 1.2655, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.3152630526105221, |
|
"grad_norm": 0.4812462329864502, |
|
"learning_rate": 4.326737541965899e-06, |
|
"loss": 1.2945, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.3168633726745349, |
|
"grad_norm": 0.47187143564224243, |
|
"learning_rate": 4.3171733438328065e-06, |
|
"loss": 1.2625, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.3184636927385477, |
|
"grad_norm": 0.46066814661026, |
|
"learning_rate": 4.3075524227501074e-06, |
|
"loss": 1.3126, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.3200640128025605, |
|
"grad_norm": 0.4603062570095062, |
|
"learning_rate": 4.297875079034239e-06, |
|
"loss": 1.234, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.3216643328665733, |
|
"grad_norm": 0.4697874188423157, |
|
"learning_rate": 4.288141614762864e-06, |
|
"loss": 1.2536, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.3232646529305861, |
|
"grad_norm": 0.48067206144332886, |
|
"learning_rate": 4.278352333765449e-06, |
|
"loss": 1.2311, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.32486497299459893, |
|
"grad_norm": 0.4830579459667206, |
|
"learning_rate": 4.26850754161377e-06, |
|
"loss": 1.2606, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.3264652930586117, |
|
"grad_norm": 0.47166574001312256, |
|
"learning_rate": 4.258607545612389e-06, |
|
"loss": 1.2638, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.3280656131226245, |
|
"grad_norm": 0.47056472301483154, |
|
"learning_rate": 4.248652654789043e-06, |
|
"loss": 1.2242, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.32966593318663734, |
|
"grad_norm": 0.4559212625026703, |
|
"learning_rate": 4.238643179885013e-06, |
|
"loss": 1.2496, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.3312662532506501, |
|
"grad_norm": 0.48663806915283203, |
|
"learning_rate": 4.228579433345418e-06, |
|
"loss": 1.2716, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.3328665733146629, |
|
"grad_norm": 0.47868451476097107, |
|
"learning_rate": 4.2184617293094625e-06, |
|
"loss": 1.235, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.33446689337867574, |
|
"grad_norm": 0.4848126769065857, |
|
"learning_rate": 4.20829038360063e-06, |
|
"loss": 1.2852, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.33606721344268853, |
|
"grad_norm": 0.46406739950180054, |
|
"learning_rate": 4.198065713716824e-06, |
|
"loss": 1.2625, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.33766753350670137, |
|
"grad_norm": 0.4762243628501892, |
|
"learning_rate": 4.187788038820462e-06, |
|
"loss": 1.2587, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.33926785357071415, |
|
"grad_norm": 0.48254549503326416, |
|
"learning_rate": 4.177457679728508e-06, |
|
"loss": 1.2214, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.34086817363472693, |
|
"grad_norm": 0.5035943388938904, |
|
"learning_rate": 4.1670749589024605e-06, |
|
"loss": 1.2365, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.3424684936987398, |
|
"grad_norm": 0.4957742393016815, |
|
"learning_rate": 4.156640200438283e-06, |
|
"loss": 1.2667, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.34406881376275256, |
|
"grad_norm": 0.47412991523742676, |
|
"learning_rate": 4.146153730056296e-06, |
|
"loss": 1.2442, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.34566913382676534, |
|
"grad_norm": 0.461995393037796, |
|
"learning_rate": 4.135615875090998e-06, |
|
"loss": 1.3429, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.3472694538907782, |
|
"grad_norm": 0.4934525787830353, |
|
"learning_rate": 4.125026964480861e-06, |
|
"loss": 1.2672, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.34886977395479096, |
|
"grad_norm": 0.4495723247528076, |
|
"learning_rate": 4.1143873287580485e-06, |
|
"loss": 1.3079, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.35047009401880375, |
|
"grad_norm": 0.4944385886192322, |
|
"learning_rate": 4.10369730003811e-06, |
|
"loss": 1.2615, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.3520704140828166, |
|
"grad_norm": 0.5282748341560364, |
|
"learning_rate": 4.092957212009607e-06, |
|
"loss": 1.26, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.35367073414682937, |
|
"grad_norm": 0.4717164635658264, |
|
"learning_rate": 4.082167399923701e-06, |
|
"loss": 1.2839, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.35527105421084215, |
|
"grad_norm": 0.4664457142353058, |
|
"learning_rate": 4.071328200583684e-06, |
|
"loss": 1.2246, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.356871374274855, |
|
"grad_norm": 0.4893302917480469, |
|
"learning_rate": 4.0604399523344715e-06, |
|
"loss": 1.2885, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.3584716943388678, |
|
"grad_norm": 0.4661487638950348, |
|
"learning_rate": 4.049502995052034e-06, |
|
"loss": 1.287, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.36007201440288056, |
|
"grad_norm": 0.4612460732460022, |
|
"learning_rate": 4.038517670132794e-06, |
|
"loss": 1.2483, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.3616723344668934, |
|
"grad_norm": 0.4863792657852173, |
|
"learning_rate": 4.0274843204829645e-06, |
|
"loss": 1.231, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.3632726545309062, |
|
"grad_norm": 0.514241635799408, |
|
"learning_rate": 4.01640329050785e-06, |
|
"loss": 1.2583, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.36487297459491896, |
|
"grad_norm": 0.5051952004432678, |
|
"learning_rate": 4.00527492610109e-06, |
|
"loss": 1.246, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.3664732946589318, |
|
"grad_norm": 0.4585234224796295, |
|
"learning_rate": 3.994099574633869e-06, |
|
"loss": 1.2585, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.3680736147229446, |
|
"grad_norm": 0.4963371753692627, |
|
"learning_rate": 3.982877584944066e-06, |
|
"loss": 1.2529, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.36967393478695737, |
|
"grad_norm": 0.4735323488712311, |
|
"learning_rate": 3.971609307325373e-06, |
|
"loss": 1.2464, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.3712742548509702, |
|
"grad_norm": 0.4705098271369934, |
|
"learning_rate": 3.960295093516352e-06, |
|
"loss": 1.2705, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.372874574914983, |
|
"grad_norm": 0.5204774737358093, |
|
"learning_rate": 3.948935296689464e-06, |
|
"loss": 1.2127, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.3744748949789958, |
|
"grad_norm": 0.46412527561187744, |
|
"learning_rate": 3.9375302714400415e-06, |
|
"loss": 1.3021, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.3760752150430086, |
|
"grad_norm": 0.47340336441993713, |
|
"learning_rate": 3.926080373775215e-06, |
|
"loss": 1.2325, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.3776755351070214, |
|
"grad_norm": 0.5133469700813293, |
|
"learning_rate": 3.914585961102808e-06, |
|
"loss": 1.3012, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.3792758551710342, |
|
"grad_norm": 0.5203788876533508, |
|
"learning_rate": 3.903047392220176e-06, |
|
"loss": 1.1977, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.380876175235047, |
|
"grad_norm": 0.5032314658164978, |
|
"learning_rate": 3.8914650273030085e-06, |
|
"loss": 1.2758, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.3824764952990598, |
|
"grad_norm": 0.4731563627719879, |
|
"learning_rate": 3.8798392278940846e-06, |
|
"loss": 1.2457, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.3840768153630726, |
|
"grad_norm": 0.5058057308197021, |
|
"learning_rate": 3.86817035689199e-06, |
|
"loss": 1.2344, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.3856771354270854, |
|
"grad_norm": 0.4878542125225067, |
|
"learning_rate": 3.856458778539784e-06, |
|
"loss": 1.2323, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.3872774554910982, |
|
"grad_norm": 0.4845779836177826, |
|
"learning_rate": 3.844704858413637e-06, |
|
"loss": 1.2345, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.38887777555511105, |
|
"grad_norm": 0.4679320454597473, |
|
"learning_rate": 3.832908963411412e-06, |
|
"loss": 1.22, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.39047809561912383, |
|
"grad_norm": 0.4730748236179352, |
|
"learning_rate": 3.821071461741217e-06, |
|
"loss": 1.2737, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.3920784156831366, |
|
"grad_norm": 0.4998788833618164, |
|
"learning_rate": 3.8091927229099094e-06, |
|
"loss": 1.2222, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.39367873574714946, |
|
"grad_norm": 0.4714483618736267, |
|
"learning_rate": 3.797273117711561e-06, |
|
"loss": 1.2434, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.39527905581116224, |
|
"grad_norm": 0.5025110840797424, |
|
"learning_rate": 3.7853130182158837e-06, |
|
"loss": 1.2855, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.396879375875175, |
|
"grad_norm": 0.4985499978065491, |
|
"learning_rate": 3.77331279775662e-06, |
|
"loss": 1.2668, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.39847969593918786, |
|
"grad_norm": 0.49666157364845276, |
|
"learning_rate": 3.7612728309198822e-06, |
|
"loss": 1.26, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.40008001600320064, |
|
"grad_norm": 0.47888752818107605, |
|
"learning_rate": 3.7491934935324636e-06, |
|
"loss": 1.2683, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.40168033606721343, |
|
"grad_norm": 0.49205294251441956, |
|
"learning_rate": 3.737075162650109e-06, |
|
"loss": 1.2587, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.40328065613122627, |
|
"grad_norm": 0.47279250621795654, |
|
"learning_rate": 3.7249182165457405e-06, |
|
"loss": 1.2564, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.40488097619523905, |
|
"grad_norm": 0.4705461263656616, |
|
"learning_rate": 3.7127230346976527e-06, |
|
"loss": 1.2814, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.40648129625925183, |
|
"grad_norm": 0.48040762543678284, |
|
"learning_rate": 3.700489997777666e-06, |
|
"loss": 1.2257, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.4080816163232647, |
|
"grad_norm": 0.48487865924835205, |
|
"learning_rate": 3.6882194876392454e-06, |
|
"loss": 1.268, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.40968193638727746, |
|
"grad_norm": 0.49241048097610474, |
|
"learning_rate": 3.675911887305579e-06, |
|
"loss": 1.2189, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.41128225645129024, |
|
"grad_norm": 0.5074785351753235, |
|
"learning_rate": 3.6635675809576234e-06, |
|
"loss": 1.2319, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.4128825765153031, |
|
"grad_norm": 0.5021569728851318, |
|
"learning_rate": 3.65118695392211e-06, |
|
"loss": 1.241, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.41448289657931586, |
|
"grad_norm": 0.527664303779602, |
|
"learning_rate": 3.638770392659522e-06, |
|
"loss": 1.2692, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.41608321664332865, |
|
"grad_norm": 0.4816740155220032, |
|
"learning_rate": 3.626318284752022e-06, |
|
"loss": 1.2027, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.4176835367073415, |
|
"grad_norm": 0.4745207130908966, |
|
"learning_rate": 3.6138310188913617e-06, |
|
"loss": 1.2242, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.41928385677135427, |
|
"grad_norm": 0.4819655120372772, |
|
"learning_rate": 3.601308984866746e-06, |
|
"loss": 1.2833, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.42088417683536705, |
|
"grad_norm": 0.47727903723716736, |
|
"learning_rate": 3.5887525735526645e-06, |
|
"loss": 1.2501, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.4224844968993799, |
|
"grad_norm": 0.4700309932231903, |
|
"learning_rate": 3.5761621768966947e-06, |
|
"loss": 1.2608, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.4240848169633927, |
|
"grad_norm": 0.5003221035003662, |
|
"learning_rate": 3.56353818790726e-06, |
|
"loss": 1.2303, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.42568513702740546, |
|
"grad_norm": 0.47159165143966675, |
|
"learning_rate": 3.55088100064137e-06, |
|
"loss": 1.2353, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.4272854570914183, |
|
"grad_norm": 0.48961141705513, |
|
"learning_rate": 3.538191010192314e-06, |
|
"loss": 1.2455, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.4288857771554311, |
|
"grad_norm": 0.5218635201454163, |
|
"learning_rate": 3.525468612677333e-06, |
|
"loss": 1.2207, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.43048609721944386, |
|
"grad_norm": 0.5075445771217346, |
|
"learning_rate": 3.512714205225249e-06, |
|
"loss": 1.2562, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.4320864172834567, |
|
"grad_norm": 0.4947759509086609, |
|
"learning_rate": 3.4999281859640753e-06, |
|
"loss": 1.2768, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.4336867373474695, |
|
"grad_norm": 0.47496354579925537, |
|
"learning_rate": 3.4871109540085853e-06, |
|
"loss": 1.2451, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.43528705741148227, |
|
"grad_norm": 0.4932936429977417, |
|
"learning_rate": 3.474262909447853e-06, |
|
"loss": 1.2292, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.4368873774754951, |
|
"grad_norm": 0.5209819078445435, |
|
"learning_rate": 3.4613844533327677e-06, |
|
"loss": 1.2483, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.4384876975395079, |
|
"grad_norm": 0.49459630250930786, |
|
"learning_rate": 3.448475987663511e-06, |
|
"loss": 1.2562, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.44008801760352073, |
|
"grad_norm": 0.49290767312049866, |
|
"learning_rate": 3.435537915377015e-06, |
|
"loss": 1.2563, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.4416883376675335, |
|
"grad_norm": 0.46836143732070923, |
|
"learning_rate": 3.4225706403343767e-06, |
|
"loss": 1.2324, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.4432886577315463, |
|
"grad_norm": 0.47990599274635315, |
|
"learning_rate": 3.4095745673082557e-06, |
|
"loss": 1.2031, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.44488897779555914, |
|
"grad_norm": 0.4839983880519867, |
|
"learning_rate": 3.3965501019702406e-06, |
|
"loss": 1.2534, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.4464892978595719, |
|
"grad_norm": 0.4967963695526123, |
|
"learning_rate": 3.383497650878183e-06, |
|
"loss": 1.2496, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.4480896179235847, |
|
"grad_norm": 0.5163443088531494, |
|
"learning_rate": 3.370417621463508e-06, |
|
"loss": 1.2078, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.44968993798759754, |
|
"grad_norm": 0.49203282594680786, |
|
"learning_rate": 3.3573104220184966e-06, |
|
"loss": 1.1888, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.4512902580516103, |
|
"grad_norm": 0.5206897854804993, |
|
"learning_rate": 3.3441764616835405e-06, |
|
"loss": 1.2233, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.4528905781156231, |
|
"grad_norm": 0.48824429512023926, |
|
"learning_rate": 3.3310161504343715e-06, |
|
"loss": 1.2682, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.45449089817963595, |
|
"grad_norm": 0.4975123703479767, |
|
"learning_rate": 3.3178298990692614e-06, |
|
"loss": 1.2532, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.45609121824364873, |
|
"grad_norm": 0.5045737028121948, |
|
"learning_rate": 3.3046181191962028e-06, |
|
"loss": 1.2544, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.4576915383076615, |
|
"grad_norm": 0.47918882966041565, |
|
"learning_rate": 3.2913812232200593e-06, |
|
"loss": 1.2512, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.45929185837167436, |
|
"grad_norm": 0.5058532953262329, |
|
"learning_rate": 3.278119624329692e-06, |
|
"loss": 1.245, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.46089217843568714, |
|
"grad_norm": 1.186800241470337, |
|
"learning_rate": 3.2648337364850602e-06, |
|
"loss": 1.2404, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.4624924984996999, |
|
"grad_norm": 0.5249418616294861, |
|
"learning_rate": 3.2515239744043033e-06, |
|
"loss": 1.245, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.46409281856371276, |
|
"grad_norm": 0.5032217502593994, |
|
"learning_rate": 3.2381907535507912e-06, |
|
"loss": 1.2201, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.46569313862772554, |
|
"grad_norm": 0.49591124057769775, |
|
"learning_rate": 3.2248344901201618e-06, |
|
"loss": 1.2129, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.46729345869173833, |
|
"grad_norm": 0.5112331509590149, |
|
"learning_rate": 3.2114556010273213e-06, |
|
"loss": 1.2446, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.46889377875575117, |
|
"grad_norm": 0.5048455595970154, |
|
"learning_rate": 3.198054503893436e-06, |
|
"loss": 1.2595, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.47049409881976395, |
|
"grad_norm": 0.4961850643157959, |
|
"learning_rate": 3.184631617032897e-06, |
|
"loss": 1.2276, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.47209441888377673, |
|
"grad_norm": 0.4945153594017029, |
|
"learning_rate": 3.171187359440257e-06, |
|
"loss": 1.2423, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.4736947389477896, |
|
"grad_norm": 0.49535536766052246, |
|
"learning_rate": 3.157722150777156e-06, |
|
"loss": 1.2322, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.47529505901180236, |
|
"grad_norm": 0.47433891892433167, |
|
"learning_rate": 3.144236411359221e-06, |
|
"loss": 1.2201, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.47689537907581514, |
|
"grad_norm": 0.4769941568374634, |
|
"learning_rate": 3.1307305621429453e-06, |
|
"loss": 1.2169, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.478495699139828, |
|
"grad_norm": 0.5070396065711975, |
|
"learning_rate": 3.117205024712546e-06, |
|
"loss": 1.2166, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.48009601920384076, |
|
"grad_norm": 1.291608214378357, |
|
"learning_rate": 3.10366022126681e-06, |
|
"loss": 1.2227, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.48169633926785355, |
|
"grad_norm": 0.4981076717376709, |
|
"learning_rate": 3.090096574605908e-06, |
|
"loss": 1.244, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.4832966593318664, |
|
"grad_norm": 0.49318239092826843, |
|
"learning_rate": 3.0765145081182045e-06, |
|
"loss": 1.2612, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.48489697939587917, |
|
"grad_norm": 0.48173773288726807, |
|
"learning_rate": 3.0629144457670334e-06, |
|
"loss": 1.2403, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.48649729945989195, |
|
"grad_norm": 0.5013173222541809, |
|
"learning_rate": 3.0492968120774714e-06, |
|
"loss": 1.2635, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.4880976195239048, |
|
"grad_norm": 0.5153574347496033, |
|
"learning_rate": 3.0356620321230823e-06, |
|
"loss": 1.2283, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.4896979395879176, |
|
"grad_norm": 0.49809423089027405, |
|
"learning_rate": 3.0220105315126496e-06, |
|
"loss": 1.2643, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.4912982596519304, |
|
"grad_norm": 0.4933251142501831, |
|
"learning_rate": 3.0083427363768908e-06, |
|
"loss": 1.2687, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.4928985797159432, |
|
"grad_norm": 0.497218519449234, |
|
"learning_rate": 2.9946590733551562e-06, |
|
"loss": 1.2131, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.494498899779956, |
|
"grad_norm": 0.49667471647262573, |
|
"learning_rate": 2.9809599695821114e-06, |
|
"loss": 1.258, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.4960992198439688, |
|
"grad_norm": 0.46937358379364014, |
|
"learning_rate": 2.9672458526744034e-06, |
|
"loss": 1.2243, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.4976995399079816, |
|
"grad_norm": 0.466854065656662, |
|
"learning_rate": 2.9535171507173144e-06, |
|
"loss": 1.2265, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.4992998599719944, |
|
"grad_norm": 0.5092979669570923, |
|
"learning_rate": 2.939774292251395e-06, |
|
"loss": 1.2694, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.5009001800360072, |
|
"grad_norm": 0.5075433850288391, |
|
"learning_rate": 2.926017706259095e-06, |
|
"loss": 1.2295, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.50250050010002, |
|
"grad_norm": 0.4989834129810333, |
|
"learning_rate": 2.912247822151365e-06, |
|
"loss": 1.2688, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.5041008201640328, |
|
"grad_norm": 0.49576446413993835, |
|
"learning_rate": 2.898465069754255e-06, |
|
"loss": 1.251, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.5057011402280456, |
|
"grad_norm": 0.49369537830352783, |
|
"learning_rate": 2.8846698792954995e-06, |
|
"loss": 1.2792, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.5073014602920584, |
|
"grad_norm": 0.48225724697113037, |
|
"learning_rate": 2.8708626813910857e-06, |
|
"loss": 1.2142, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.5089017803560713, |
|
"grad_norm": 0.48354271054267883, |
|
"learning_rate": 2.8570439070318116e-06, |
|
"loss": 1.2099, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.510502100420084, |
|
"grad_norm": 0.49547797441482544, |
|
"learning_rate": 2.843213987569835e-06, |
|
"loss": 1.2254, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.5121024204840968, |
|
"grad_norm": 0.49686744809150696, |
|
"learning_rate": 2.8293733547052067e-06, |
|
"loss": 1.255, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.5137027405481096, |
|
"grad_norm": 0.49578046798706055, |
|
"learning_rate": 2.8155224404723946e-06, |
|
"loss": 1.1932, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.5153030606121224, |
|
"grad_norm": 0.48902279138565063, |
|
"learning_rate": 2.801661677226801e-06, |
|
"loss": 1.2343, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.5169033806761353, |
|
"grad_norm": 0.50181645154953, |
|
"learning_rate": 2.787791497631263e-06, |
|
"loss": 1.1983, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.5185037007401481, |
|
"grad_norm": 0.49499696493148804, |
|
"learning_rate": 2.7739123346425485e-06, |
|
"loss": 1.2036, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.5201040208041608, |
|
"grad_norm": 0.5074306130409241, |
|
"learning_rate": 2.760024621497843e-06, |
|
"loss": 1.2691, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.5217043408681736, |
|
"grad_norm": 0.5145530700683594, |
|
"learning_rate": 2.7461287917012207e-06, |
|
"loss": 1.2452, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.5233046609321864, |
|
"grad_norm": 0.5158199071884155, |
|
"learning_rate": 2.732225279010121e-06, |
|
"loss": 1.2463, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.5249049809961992, |
|
"grad_norm": 0.48611709475517273, |
|
"learning_rate": 2.7183145174218024e-06, |
|
"loss": 1.2657, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.5265053010602121, |
|
"grad_norm": 0.516374945640564, |
|
"learning_rate": 2.704396941159798e-06, |
|
"loss": 1.2098, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.5281056211242249, |
|
"grad_norm": 0.5058311223983765, |
|
"learning_rate": 2.690472984660359e-06, |
|
"loss": 1.2323, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.5297059411882377, |
|
"grad_norm": 0.5268154740333557, |
|
"learning_rate": 2.676543082558896e-06, |
|
"loss": 1.2465, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.5313062612522504, |
|
"grad_norm": 0.5180853605270386, |
|
"learning_rate": 2.6626076696764125e-06, |
|
"loss": 1.2701, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.5329065813162632, |
|
"grad_norm": 0.47433140873908997, |
|
"learning_rate": 2.648667181005929e-06, |
|
"loss": 1.2379, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.534506901380276, |
|
"grad_norm": 0.5192251205444336, |
|
"learning_rate": 2.6347220516989064e-06, |
|
"loss": 1.2161, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.5361072214442889, |
|
"grad_norm": 0.4724452495574951, |
|
"learning_rate": 2.620772717051664e-06, |
|
"loss": 1.2482, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.5377075415083017, |
|
"grad_norm": 0.48347294330596924, |
|
"learning_rate": 2.60681961249179e-06, |
|
"loss": 1.2619, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.5393078615723145, |
|
"grad_norm": 0.5179809331893921, |
|
"learning_rate": 2.5928631735645503e-06, |
|
"loss": 1.2228, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.5409081816363273, |
|
"grad_norm": 0.5063818693161011, |
|
"learning_rate": 2.5789038359192912e-06, |
|
"loss": 1.1893, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.54250850170034, |
|
"grad_norm": 0.5198962688446045, |
|
"learning_rate": 2.5649420352958452e-06, |
|
"loss": 1.2239, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.5441088217643528, |
|
"grad_norm": 0.5000635981559753, |
|
"learning_rate": 2.550978207510925e-06, |
|
"loss": 1.2476, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.5457091418283657, |
|
"grad_norm": 0.5031649470329285, |
|
"learning_rate": 2.53701278844452e-06, |
|
"loss": 1.2153, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.5473094618923785, |
|
"grad_norm": 0.5141199231147766, |
|
"learning_rate": 2.5230462140262936e-06, |
|
"loss": 1.1893, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.5489097819563913, |
|
"grad_norm": 0.499374657869339, |
|
"learning_rate": 2.5090789202219705e-06, |
|
"loss": 1.3086, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.5505101020204041, |
|
"grad_norm": 0.5020270943641663, |
|
"learning_rate": 2.495111343019735e-06, |
|
"loss": 1.2257, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.5521104220844169, |
|
"grad_norm": 0.5148738026618958, |
|
"learning_rate": 2.4811439184166137e-06, |
|
"loss": 1.2291, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.5537107421484296, |
|
"grad_norm": 0.49779781699180603, |
|
"learning_rate": 2.467177082404871e-06, |
|
"loss": 1.2537, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.5553110622124425, |
|
"grad_norm": 0.5102514028549194, |
|
"learning_rate": 2.453211270958401e-06, |
|
"loss": 1.2495, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.5569113822764553, |
|
"grad_norm": 0.49807655811309814, |
|
"learning_rate": 2.4392469200191135e-06, |
|
"loss": 1.2416, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.5585117023404681, |
|
"grad_norm": 0.5306846499443054, |
|
"learning_rate": 2.4252844654833278e-06, |
|
"loss": 1.2343, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.5601120224044809, |
|
"grad_norm": 0.5187516212463379, |
|
"learning_rate": 2.4113243431881696e-06, |
|
"loss": 1.2483, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.5617123424684937, |
|
"grad_norm": 0.48813924193382263, |
|
"learning_rate": 2.3973669888979627e-06, |
|
"loss": 1.2557, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.5633126625325064, |
|
"grad_norm": 0.5215795040130615, |
|
"learning_rate": 2.3834128382906277e-06, |
|
"loss": 1.2194, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.5649129825965193, |
|
"grad_norm": 0.515504777431488, |
|
"learning_rate": 2.369462326944082e-06, |
|
"loss": 1.246, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.5665133026605321, |
|
"grad_norm": 0.5169162154197693, |
|
"learning_rate": 2.3555158903226446e-06, |
|
"loss": 1.2787, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.5681136227245449, |
|
"grad_norm": 0.5044717788696289, |
|
"learning_rate": 2.341573963763442e-06, |
|
"loss": 1.2552, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.5697139427885577, |
|
"grad_norm": 0.49591878056526184, |
|
"learning_rate": 2.3276369824628193e-06, |
|
"loss": 1.229, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.5713142628525705, |
|
"grad_norm": 2.2468647956848145, |
|
"learning_rate": 2.313705381462755e-06, |
|
"loss": 1.3349, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.5729145829165834, |
|
"grad_norm": 0.5160189867019653, |
|
"learning_rate": 2.299779595637284e-06, |
|
"loss": 1.2395, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.5745149029805962, |
|
"grad_norm": 0.4940637946128845, |
|
"learning_rate": 2.2858600596789186e-06, |
|
"loss": 1.2176, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.5761152230446089, |
|
"grad_norm": 0.5026009678840637, |
|
"learning_rate": 2.2719472080850827e-06, |
|
"loss": 1.2506, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.5777155431086217, |
|
"grad_norm": 0.4862099587917328, |
|
"learning_rate": 2.2580414751445483e-06, |
|
"loss": 1.1798, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.5793158631726345, |
|
"grad_norm": 0.4969867467880249, |
|
"learning_rate": 2.2441432949238783e-06, |
|
"loss": 1.2579, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.5809161832366473, |
|
"grad_norm": 0.5029799938201904, |
|
"learning_rate": 2.2302531012538796e-06, |
|
"loss": 1.2203, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.5825165033006602, |
|
"grad_norm": 0.5118884444236755, |
|
"learning_rate": 2.2163713277160583e-06, |
|
"loss": 1.2164, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.584116823364673, |
|
"grad_norm": 0.4936463236808777, |
|
"learning_rate": 2.202498407629086e-06, |
|
"loss": 1.2404, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.5857171434286857, |
|
"grad_norm": 0.5048061609268188, |
|
"learning_rate": 2.188634774035276e-06, |
|
"loss": 1.2116, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.5873174634926985, |
|
"grad_norm": 0.5140055418014526, |
|
"learning_rate": 2.1747808596870636e-06, |
|
"loss": 1.187, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.5889177835567113, |
|
"grad_norm": 0.4869968891143799, |
|
"learning_rate": 2.160937097033497e-06, |
|
"loss": 1.3085, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.5905181036207241, |
|
"grad_norm": 0.4946134090423584, |
|
"learning_rate": 2.1471039182067427e-06, |
|
"loss": 1.2291, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.592118423684737, |
|
"grad_norm": 0.5053882002830505, |
|
"learning_rate": 2.133281755008592e-06, |
|
"loss": 1.2178, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.5937187437487498, |
|
"grad_norm": 0.5099985599517822, |
|
"learning_rate": 2.1194710388969846e-06, |
|
"loss": 1.2427, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.5953190638127626, |
|
"grad_norm": 0.5032602548599243, |
|
"learning_rate": 2.1056722009725386e-06, |
|
"loss": 1.2182, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.5969193838767753, |
|
"grad_norm": 0.9163219332695007, |
|
"learning_rate": 2.0918856719650975e-06, |
|
"loss": 1.249, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.5985197039407881, |
|
"grad_norm": 0.5141546130180359, |
|
"learning_rate": 2.078111882220282e-06, |
|
"loss": 1.2313, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.6001200240048009, |
|
"grad_norm": 0.5054259300231934, |
|
"learning_rate": 2.0643512616860562e-06, |
|
"loss": 1.2089, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.6017203440688138, |
|
"grad_norm": 0.506846010684967, |
|
"learning_rate": 2.050604239899311e-06, |
|
"loss": 1.2399, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.6033206641328266, |
|
"grad_norm": 0.49931642413139343, |
|
"learning_rate": 2.03687124597245e-06, |
|
"loss": 1.2006, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.6049209841968394, |
|
"grad_norm": 0.5294950604438782, |
|
"learning_rate": 2.0231527085799985e-06, |
|
"loss": 1.2871, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.6065213042608522, |
|
"grad_norm": 0.49745187163352966, |
|
"learning_rate": 2.009449055945226e-06, |
|
"loss": 1.2371, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.6081216243248649, |
|
"grad_norm": 0.5167328119277954, |
|
"learning_rate": 1.99576071582677e-06, |
|
"loss": 1.2754, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.6097219443888777, |
|
"grad_norm": 0.4940430819988251, |
|
"learning_rate": 1.9820881155052922e-06, |
|
"loss": 1.2542, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.6113222644528906, |
|
"grad_norm": 0.500819206237793, |
|
"learning_rate": 1.9684316817701365e-06, |
|
"loss": 1.2487, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.6129225845169034, |
|
"grad_norm": 0.5038882493972778, |
|
"learning_rate": 1.9547918409060076e-06, |
|
"loss": 1.2785, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.6145229045809162, |
|
"grad_norm": 0.5073249936103821, |
|
"learning_rate": 1.941169018679664e-06, |
|
"loss": 1.2316, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.616123224644929, |
|
"grad_norm": 0.5036085247993469, |
|
"learning_rate": 1.9275636403266297e-06, |
|
"loss": 1.2145, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.6177235447089418, |
|
"grad_norm": 0.5362631678581238, |
|
"learning_rate": 1.91397613053792e-06, |
|
"loss": 1.2449, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.6193238647729545, |
|
"grad_norm": 0.5268213748931885, |
|
"learning_rate": 1.9004069134467834e-06, |
|
"loss": 1.2062, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.6209241848369674, |
|
"grad_norm": 0.4909802973270416, |
|
"learning_rate": 1.8868564126154613e-06, |
|
"loss": 1.2528, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.6225245049009802, |
|
"grad_norm": 0.5266659259796143, |
|
"learning_rate": 1.8733250510219693e-06, |
|
"loss": 1.1999, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 0.624124824964993, |
|
"grad_norm": 0.5033228397369385, |
|
"learning_rate": 1.8598132510468967e-06, |
|
"loss": 1.2249, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.6257251450290058, |
|
"grad_norm": 0.510733425617218, |
|
"learning_rate": 1.846321434460212e-06, |
|
"loss": 1.2873, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 0.6273254650930186, |
|
"grad_norm": 0.49860432744026184, |
|
"learning_rate": 1.8328500224081069e-06, |
|
"loss": 1.2465, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.6289257851570315, |
|
"grad_norm": 0.5228532552719116, |
|
"learning_rate": 1.8193994353998462e-06, |
|
"loss": 1.2396, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.6305261052210442, |
|
"grad_norm": 0.507424533367157, |
|
"learning_rate": 1.8059700932946434e-06, |
|
"loss": 1.2233, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 0.632126425285057, |
|
"grad_norm": 0.52374267578125, |
|
"learning_rate": 1.7925624152885512e-06, |
|
"loss": 1.2586, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.6337267453490698, |
|
"grad_norm": 0.5252387523651123, |
|
"learning_rate": 1.7791768199013808e-06, |
|
"loss": 1.2898, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.6353270654130826, |
|
"grad_norm": 0.496242880821228, |
|
"learning_rate": 1.7658137249636358e-06, |
|
"loss": 1.2213, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 0.6369273854770954, |
|
"grad_norm": 0.49199914932250977, |
|
"learning_rate": 1.7524735476034689e-06, |
|
"loss": 1.2509, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 0.6385277055411083, |
|
"grad_norm": 0.5169931650161743, |
|
"learning_rate": 1.7391567042336626e-06, |
|
"loss": 1.2099, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.640128025605121, |
|
"grad_norm": 0.5024374723434448, |
|
"learning_rate": 1.7258636105386289e-06, |
|
"loss": 1.2134, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.6417283456691338, |
|
"grad_norm": 0.4861580431461334, |
|
"learning_rate": 1.7125946814614396e-06, |
|
"loss": 1.2247, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 0.6433286657331466, |
|
"grad_norm": 0.5099141001701355, |
|
"learning_rate": 1.6993503311908638e-06, |
|
"loss": 1.235, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 0.6449289857971594, |
|
"grad_norm": 0.5228403210639954, |
|
"learning_rate": 1.6861309731484487e-06, |
|
"loss": 1.2156, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 0.6465293058611722, |
|
"grad_norm": 0.5319278240203857, |
|
"learning_rate": 1.6729370199756083e-06, |
|
"loss": 1.2589, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 0.6481296259251851, |
|
"grad_norm": 0.5031079649925232, |
|
"learning_rate": 1.6597688835207477e-06, |
|
"loss": 1.2299, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 0.6497299459891979, |
|
"grad_norm": 0.4944058954715729, |
|
"learning_rate": 1.646626974826403e-06, |
|
"loss": 1.2086, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 0.6513302660532106, |
|
"grad_norm": 0.5121133327484131, |
|
"learning_rate": 1.633511704116412e-06, |
|
"loss": 1.2477, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 0.6529305861172234, |
|
"grad_norm": 0.5161622166633606, |
|
"learning_rate": 1.620423480783111e-06, |
|
"loss": 1.2717, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 0.6545309061812362, |
|
"grad_norm": 0.4925840198993683, |
|
"learning_rate": 1.6073627133745531e-06, |
|
"loss": 1.2057, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 0.656131226245249, |
|
"grad_norm": 0.5267453193664551, |
|
"learning_rate": 1.5943298095817572e-06, |
|
"loss": 1.2009, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.6577315463092619, |
|
"grad_norm": 0.5138763785362244, |
|
"learning_rate": 1.5813251762259813e-06, |
|
"loss": 1.2124, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 0.6593318663732747, |
|
"grad_norm": 0.5108798146247864, |
|
"learning_rate": 1.5683492192460226e-06, |
|
"loss": 1.2226, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 0.6609321864372875, |
|
"grad_norm": 0.5101296901702881, |
|
"learning_rate": 1.5554023436855488e-06, |
|
"loss": 1.2533, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 0.6625325065013002, |
|
"grad_norm": 0.4998767375946045, |
|
"learning_rate": 1.5424849536804497e-06, |
|
"loss": 1.195, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 0.664132826565313, |
|
"grad_norm": 0.4969738721847534, |
|
"learning_rate": 1.529597452446229e-06, |
|
"loss": 1.2269, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 0.6657331466293258, |
|
"grad_norm": 0.47680023312568665, |
|
"learning_rate": 1.5167402422654122e-06, |
|
"loss": 1.2024, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 0.6673334666933387, |
|
"grad_norm": 0.49921032786369324, |
|
"learning_rate": 1.503913724474992e-06, |
|
"loss": 1.1991, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 0.6689337867573515, |
|
"grad_norm": 0.5103694796562195, |
|
"learning_rate": 1.491118299453901e-06, |
|
"loss": 1.2349, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 0.6705341068213643, |
|
"grad_norm": 0.5037977695465088, |
|
"learning_rate": 1.478354366610511e-06, |
|
"loss": 1.2537, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 0.6721344268853771, |
|
"grad_norm": 0.5193389654159546, |
|
"learning_rate": 1.4656223243701692e-06, |
|
"loss": 1.2304, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.6737347469493898, |
|
"grad_norm": 7.260618209838867, |
|
"learning_rate": 1.4529225701627587e-06, |
|
"loss": 1.2909, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 0.6753350670134027, |
|
"grad_norm": 0.5033296942710876, |
|
"learning_rate": 1.4402555004102946e-06, |
|
"loss": 1.2202, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 0.6769353870774155, |
|
"grad_norm": 0.49899816513061523, |
|
"learning_rate": 1.4276215105145486e-06, |
|
"loss": 1.1994, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 0.6785357071414283, |
|
"grad_norm": 0.4914785325527191, |
|
"learning_rate": 1.415020994844706e-06, |
|
"loss": 1.2149, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 0.6801360272054411, |
|
"grad_norm": 0.5077610015869141, |
|
"learning_rate": 1.4024543467250573e-06, |
|
"loss": 1.213, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 0.6817363472694539, |
|
"grad_norm": 0.5179877281188965, |
|
"learning_rate": 1.389921958422719e-06, |
|
"loss": 1.2507, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 0.6833366673334667, |
|
"grad_norm": 0.5066015124320984, |
|
"learning_rate": 1.3774242211353906e-06, |
|
"loss": 1.2518, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 0.6849369873974795, |
|
"grad_norm": 0.5118170380592346, |
|
"learning_rate": 1.3649615249791396e-06, |
|
"loss": 1.2214, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 0.6865373074614923, |
|
"grad_norm": 0.502426266670227, |
|
"learning_rate": 1.3525342589762298e-06, |
|
"loss": 1.2264, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 0.6881376275255051, |
|
"grad_norm": 0.5098256468772888, |
|
"learning_rate": 1.3401428110429734e-06, |
|
"loss": 1.2319, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.6897379475895179, |
|
"grad_norm": 0.49994170665740967, |
|
"learning_rate": 1.3277875679776252e-06, |
|
"loss": 1.2075, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 0.6913382676535307, |
|
"grad_norm": 0.5266156196594238, |
|
"learning_rate": 1.3154689154483055e-06, |
|
"loss": 1.2298, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 0.6929385877175435, |
|
"grad_norm": 0.5013164281845093, |
|
"learning_rate": 1.3031872379809663e-06, |
|
"loss": 1.1819, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 0.6945389077815564, |
|
"grad_norm": 0.5253974199295044, |
|
"learning_rate": 1.2909429189473833e-06, |
|
"loss": 1.2661, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 0.6961392278455691, |
|
"grad_norm": 0.4963892996311188, |
|
"learning_rate": 1.27873634055319e-06, |
|
"loss": 1.2064, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 0.6977395479095819, |
|
"grad_norm": 0.5143953561782837, |
|
"learning_rate": 1.2665678838259498e-06, |
|
"loss": 1.2714, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 0.6993398679735947, |
|
"grad_norm": 0.5049063563346863, |
|
"learning_rate": 1.254437928603261e-06, |
|
"loss": 1.1939, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 0.7009401880376075, |
|
"grad_norm": 0.5228173136711121, |
|
"learning_rate": 1.242346853520899e-06, |
|
"loss": 1.2205, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 0.7025405081016203, |
|
"grad_norm": 0.5170574188232422, |
|
"learning_rate": 1.2302950360009994e-06, |
|
"loss": 1.2494, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 0.7041408281656332, |
|
"grad_norm": 0.5283452272415161, |
|
"learning_rate": 1.2182828522402737e-06, |
|
"loss": 1.2615, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.705741148229646, |
|
"grad_norm": 0.5086333751678467, |
|
"learning_rate": 1.2063106771982693e-06, |
|
"loss": 1.2446, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 0.7073414682936587, |
|
"grad_norm": 0.5221850872039795, |
|
"learning_rate": 1.194378884585664e-06, |
|
"loss": 1.2235, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 0.7089417883576715, |
|
"grad_norm": 0.525809109210968, |
|
"learning_rate": 1.1824878468526001e-06, |
|
"loss": 1.2425, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 0.7105421084216843, |
|
"grad_norm": 0.5064604878425598, |
|
"learning_rate": 1.1706379351770597e-06, |
|
"loss": 1.2386, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 0.7121424284856971, |
|
"grad_norm": 0.50041264295578, |
|
"learning_rate": 1.158829519453277e-06, |
|
"loss": 1.2283, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 0.71374274854971, |
|
"grad_norm": 0.5169569253921509, |
|
"learning_rate": 1.1470629682801925e-06, |
|
"loss": 1.2147, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 0.7153430686137228, |
|
"grad_norm": 0.5069741010665894, |
|
"learning_rate": 1.1353386489499487e-06, |
|
"loss": 1.2122, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 0.7169433886777355, |
|
"grad_norm": 0.5095933079719543, |
|
"learning_rate": 1.1236569274364224e-06, |
|
"loss": 1.2038, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 0.7185437087417483, |
|
"grad_norm": 0.5300613641738892, |
|
"learning_rate": 1.112018168383803e-06, |
|
"loss": 1.2187, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 0.7201440288057611, |
|
"grad_norm": 0.5320760011672974, |
|
"learning_rate": 1.1004227350952102e-06, |
|
"loss": 1.2461, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.7217443488697739, |
|
"grad_norm": 0.5251277685165405, |
|
"learning_rate": 1.0888709895213498e-06, |
|
"loss": 1.2513, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 0.7233446689337868, |
|
"grad_norm": 0.4987024962902069, |
|
"learning_rate": 1.077363292249222e-06, |
|
"loss": 1.2544, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 0.7249449889977996, |
|
"grad_norm": 0.5069317817687988, |
|
"learning_rate": 1.0659000024908588e-06, |
|
"loss": 1.2288, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 0.7265453090618124, |
|
"grad_norm": 0.5270032286643982, |
|
"learning_rate": 1.0544814780721185e-06, |
|
"loss": 1.2335, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 0.7281456291258251, |
|
"grad_norm": 0.5044519305229187, |
|
"learning_rate": 1.0431080754215092e-06, |
|
"loss": 1.2408, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 0.7297459491898379, |
|
"grad_norm": 0.5111990571022034, |
|
"learning_rate": 1.031780149559066e-06, |
|
"loss": 1.2371, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 0.7313462692538508, |
|
"grad_norm": 0.520692765712738, |
|
"learning_rate": 1.020498054085271e-06, |
|
"loss": 1.2359, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 0.7329465893178636, |
|
"grad_norm": 0.5196593403816223, |
|
"learning_rate": 1.0092621411700101e-06, |
|
"loss": 1.237, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 0.7345469093818764, |
|
"grad_norm": 0.5122175216674805, |
|
"learning_rate": 9.980727615415867e-07, |
|
"loss": 1.2347, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 0.7361472294458892, |
|
"grad_norm": 0.5103949308395386, |
|
"learning_rate": 9.869302644757696e-07, |
|
"loss": 1.1981, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 0.737747549509902, |
|
"grad_norm": 0.49739348888397217, |
|
"learning_rate": 9.758349977848917e-07, |
|
"loss": 1.2809, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 0.7393478695739147, |
|
"grad_norm": 0.5084463953971863, |
|
"learning_rate": 9.647873078069923e-07, |
|
"loss": 1.2486, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 0.7409481896379276, |
|
"grad_norm": 0.5033512711524963, |
|
"learning_rate": 9.537875393950067e-07, |
|
"loss": 1.2317, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 0.7425485097019404, |
|
"grad_norm": 0.5531705617904663, |
|
"learning_rate": 9.428360359060021e-07, |
|
"loss": 1.2196, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 0.7441488297659532, |
|
"grad_norm": 0.5110276937484741, |
|
"learning_rate": 9.319331391904587e-07, |
|
"loss": 1.2432, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.745749149829966, |
|
"grad_norm": 0.5248765349388123, |
|
"learning_rate": 9.210791895815996e-07, |
|
"loss": 1.2137, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 0.7473494698939788, |
|
"grad_norm": 0.4994560778141022, |
|
"learning_rate": 9.10274525884767e-07, |
|
"loss": 1.196, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 0.7489497899579916, |
|
"grad_norm": 0.49998924136161804, |
|
"learning_rate": 8.995194853668466e-07, |
|
"loss": 1.2414, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 0.7505501100220044, |
|
"grad_norm": 0.5122482776641846, |
|
"learning_rate": 8.888144037457397e-07, |
|
"loss": 1.2107, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 0.7521504300860172, |
|
"grad_norm": 0.5213466286659241, |
|
"learning_rate": 8.781596151798838e-07, |
|
"loss": 1.1997, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 0.75375075015003, |
|
"grad_norm": 0.5351124405860901, |
|
"learning_rate": 8.675554522578219e-07, |
|
"loss": 1.2645, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 0.7553510702140428, |
|
"grad_norm": 0.4969538450241089, |
|
"learning_rate": 8.57002245987821e-07, |
|
"loss": 1.2251, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 0.7569513902780556, |
|
"grad_norm": 0.5164848566055298, |
|
"learning_rate": 8.465003257875392e-07, |
|
"loss": 1.2453, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 0.7585517103420684, |
|
"grad_norm": 0.6240895986557007, |
|
"learning_rate": 8.360500194737445e-07, |
|
"loss": 1.2244, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 0.7601520304060813, |
|
"grad_norm": 0.5174912214279175, |
|
"learning_rate": 8.256516532520761e-07, |
|
"loss": 1.2375, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 0.761752350470094, |
|
"grad_norm": 0.5274839401245117, |
|
"learning_rate": 8.153055517068733e-07, |
|
"loss": 1.1858, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 0.7633526705341068, |
|
"grad_norm": 0.5240426659584045, |
|
"learning_rate": 8.050120377910326e-07, |
|
"loss": 1.2411, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 0.7649529905981196, |
|
"grad_norm": 0.5028566718101501, |
|
"learning_rate": 7.947714328159314e-07, |
|
"loss": 1.1977, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 0.7665533106621324, |
|
"grad_norm": 0.5158740282058716, |
|
"learning_rate": 7.845840564413992e-07, |
|
"loss": 1.2006, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 0.7681536307261452, |
|
"grad_norm": 0.5100390911102295, |
|
"learning_rate": 7.744502266657372e-07, |
|
"loss": 1.2377, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.7697539507901581, |
|
"grad_norm": 0.5089078545570374, |
|
"learning_rate": 7.643702598157937e-07, |
|
"loss": 1.2058, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 0.7713542708541709, |
|
"grad_norm": 0.5130503177642822, |
|
"learning_rate": 7.543444705370873e-07, |
|
"loss": 1.223, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 0.7729545909181836, |
|
"grad_norm": 0.5297360420227051, |
|
"learning_rate": 7.443731717839891e-07, |
|
"loss": 1.2248, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 0.7745549109821964, |
|
"grad_norm": 0.5005588531494141, |
|
"learning_rate": 7.344566748099518e-07, |
|
"loss": 1.2036, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 0.7761552310462092, |
|
"grad_norm": 0.5383617877960205, |
|
"learning_rate": 7.245952891577926e-07, |
|
"loss": 1.2134, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 0.7777555511102221, |
|
"grad_norm": 0.5036467909812927, |
|
"learning_rate": 7.147893226500349e-07, |
|
"loss": 1.2188, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 0.7793558711742349, |
|
"grad_norm": 0.5479145646095276, |
|
"learning_rate": 7.050390813792951e-07, |
|
"loss": 1.2245, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 0.7809561912382477, |
|
"grad_norm": 0.5276610851287842, |
|
"learning_rate": 6.953448696987308e-07, |
|
"loss": 1.2519, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 0.7825565113022604, |
|
"grad_norm": 0.5226728916168213, |
|
"learning_rate": 6.857069902125377e-07, |
|
"loss": 1.166, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 0.7841568313662732, |
|
"grad_norm": 0.5151464343070984, |
|
"learning_rate": 6.761257437665075e-07, |
|
"loss": 1.2345, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 0.785757151430286, |
|
"grad_norm": 0.512322187423706, |
|
"learning_rate": 6.666014294386347e-07, |
|
"loss": 1.2238, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 0.7873574714942989, |
|
"grad_norm": 0.5147780179977417, |
|
"learning_rate": 6.571343445297817e-07, |
|
"loss": 1.218, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 0.7889577915583117, |
|
"grad_norm": 1.6754525899887085, |
|
"learning_rate": 6.47724784554398e-07, |
|
"loss": 1.2543, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 0.7905581116223245, |
|
"grad_norm": 0.5027932524681091, |
|
"learning_rate": 6.383730432312954e-07, |
|
"loss": 1.2238, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 0.7921584316863373, |
|
"grad_norm": 0.5050190687179565, |
|
"learning_rate": 6.290794124744809e-07, |
|
"loss": 1.2441, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 0.79375875175035, |
|
"grad_norm": 0.5184482932090759, |
|
"learning_rate": 6.198441823840439e-07, |
|
"loss": 1.2078, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 0.7953590718143628, |
|
"grad_norm": 0.5082205533981323, |
|
"learning_rate": 6.106676412371002e-07, |
|
"loss": 1.2144, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 0.7969593918783757, |
|
"grad_norm": 0.5187720656394958, |
|
"learning_rate": 6.015500754787942e-07, |
|
"loss": 1.2269, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 0.7985597119423885, |
|
"grad_norm": 0.5122787952423096, |
|
"learning_rate": 5.924917697133579e-07, |
|
"loss": 1.2332, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 0.8001600320064013, |
|
"grad_norm": 0.5063095092773438, |
|
"learning_rate": 5.834930066952254e-07, |
|
"loss": 1.2522, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.8017603520704141, |
|
"grad_norm": 0.5298603177070618, |
|
"learning_rate": 5.745540673202088e-07, |
|
"loss": 1.2776, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 0.8033606721344269, |
|
"grad_norm": 0.532569944858551, |
|
"learning_rate": 5.656752306167279e-07, |
|
"loss": 1.2488, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 0.8049609921984396, |
|
"grad_norm": 0.5093346834182739, |
|
"learning_rate": 5.568567737371023e-07, |
|
"loss": 1.2637, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 0.8065613122624525, |
|
"grad_norm": 0.5202427506446838, |
|
"learning_rate": 5.480989719489e-07, |
|
"loss": 1.2482, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 0.8081616323264653, |
|
"grad_norm": 0.5047549605369568, |
|
"learning_rate": 5.394020986263412e-07, |
|
"loss": 1.2504, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 0.8097619523904781, |
|
"grad_norm": 0.5232532620429993, |
|
"learning_rate": 5.30766425241771e-07, |
|
"loss": 1.2636, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 0.8113622724544909, |
|
"grad_norm": 0.49118658900260925, |
|
"learning_rate": 5.221922213571809e-07, |
|
"loss": 1.2292, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 0.8129625925185037, |
|
"grad_norm": 0.5222775936126709, |
|
"learning_rate": 5.136797546157974e-07, |
|
"loss": 1.2117, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 0.8145629125825165, |
|
"grad_norm": 0.49552589654922485, |
|
"learning_rate": 5.052292907337239e-07, |
|
"loss": 1.192, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 0.8161632326465293, |
|
"grad_norm": 0.5257281064987183, |
|
"learning_rate": 4.968410934916502e-07, |
|
"loss": 1.2259, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 0.8177635527105421, |
|
"grad_norm": 0.5237207412719727, |
|
"learning_rate": 4.885154247266163e-07, |
|
"loss": 1.2252, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 0.8193638727745549, |
|
"grad_norm": 5.458524227142334, |
|
"learning_rate": 4.80252544323839e-07, |
|
"loss": 1.2544, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 0.8209641928385677, |
|
"grad_norm": 0.4922438859939575, |
|
"learning_rate": 4.7205271020860143e-07, |
|
"loss": 1.2508, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 0.8225645129025805, |
|
"grad_norm": 0.5238988995552063, |
|
"learning_rate": 4.6391617833820066e-07, |
|
"loss": 1.2277, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 0.8241648329665933, |
|
"grad_norm": 0.5212239623069763, |
|
"learning_rate": 4.5584320269395827e-07, |
|
"loss": 1.256, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 0.8257651530306062, |
|
"grad_norm": 0.4987280070781708, |
|
"learning_rate": 4.478340352732924e-07, |
|
"loss": 1.2181, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 0.8273654730946189, |
|
"grad_norm": 0.519672155380249, |
|
"learning_rate": 4.3988892608185035e-07, |
|
"loss": 1.2403, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 0.8289657931586317, |
|
"grad_norm": 0.5267203450202942, |
|
"learning_rate": 4.3200812312570894e-07, |
|
"loss": 1.1784, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 0.8305661132226445, |
|
"grad_norm": 0.5164812207221985, |
|
"learning_rate": 4.241918724036262e-07, |
|
"loss": 1.2642, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 0.8321664332866573, |
|
"grad_norm": 0.5407664775848389, |
|
"learning_rate": 4.164404178993689e-07, |
|
"loss": 1.2224, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 0.8337667533506702, |
|
"grad_norm": 0.5061764717102051, |
|
"learning_rate": 4.087540015740929e-07, |
|
"loss": 1.2286, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 0.835367073414683, |
|
"grad_norm": 0.8010200262069702, |
|
"learning_rate": 4.011328633587919e-07, |
|
"loss": 1.2238, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 0.8369673934786958, |
|
"grad_norm": 0.5010237693786621, |
|
"learning_rate": 3.935772411468075e-07, |
|
"loss": 1.2296, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 0.8385677135427085, |
|
"grad_norm": 0.5492453575134277, |
|
"learning_rate": 3.8608737078640326e-07, |
|
"loss": 1.2459, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 0.8401680336067213, |
|
"grad_norm": 0.492929071187973, |
|
"learning_rate": 3.7866348607340284e-07, |
|
"loss": 1.1894, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 0.8417683536707341, |
|
"grad_norm": 0.49885085225105286, |
|
"learning_rate": 3.7130581874389285e-07, |
|
"loss": 1.1877, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 0.843368673734747, |
|
"grad_norm": 0.5160171389579773, |
|
"learning_rate": 3.640145984669882e-07, |
|
"loss": 1.1935, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 0.8449689937987598, |
|
"grad_norm": 0.4957447052001953, |
|
"learning_rate": 3.5679005283766136e-07, |
|
"loss": 1.2315, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 0.8465693138627726, |
|
"grad_norm": 0.4999656677246094, |
|
"learning_rate": 3.496324073696439e-07, |
|
"loss": 1.2198, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 0.8481696339267853, |
|
"grad_norm": 0.5246298909187317, |
|
"learning_rate": 3.4254188548837967e-07, |
|
"loss": 1.1953, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 0.8497699539907981, |
|
"grad_norm": 0.5125941634178162, |
|
"learning_rate": 3.355187085240555e-07, |
|
"loss": 1.193, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 0.8513702740548109, |
|
"grad_norm": 0.5065252184867859, |
|
"learning_rate": 3.285630957046912e-07, |
|
"loss": 1.2423, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 0.8529705941188238, |
|
"grad_norm": 0.48778149485588074, |
|
"learning_rate": 3.216752641492951e-07, |
|
"loss": 1.229, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 0.8545709141828366, |
|
"grad_norm": 0.5513453483581543, |
|
"learning_rate": 3.1485542886108945e-07, |
|
"loss": 1.2244, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 0.8561712342468494, |
|
"grad_norm": 0.5131115317344666, |
|
"learning_rate": 3.081038027207944e-07, |
|
"loss": 1.1894, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 0.8577715543108622, |
|
"grad_norm": 0.49720299243927, |
|
"learning_rate": 3.014205964799888e-07, |
|
"loss": 1.2314, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 0.8593718743748749, |
|
"grad_norm": 0.511868417263031, |
|
"learning_rate": 2.948060187545276e-07, |
|
"loss": 1.2065, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 0.8609721944388877, |
|
"grad_norm": 0.5046089291572571, |
|
"learning_rate": 2.8826027601803085e-07, |
|
"loss": 1.2016, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 0.8625725145029006, |
|
"grad_norm": 0.5005461573600769, |
|
"learning_rate": 2.8178357259543876e-07, |
|
"loss": 1.2073, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 0.8641728345669134, |
|
"grad_norm": 0.5057045221328735, |
|
"learning_rate": 2.7537611065663497e-07, |
|
"loss": 1.2147, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 0.8657731546309262, |
|
"grad_norm": 0.4953499436378479, |
|
"learning_rate": 2.690380902101339e-07, |
|
"loss": 1.2347, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 0.867373474694939, |
|
"grad_norm": 0.5268727540969849, |
|
"learning_rate": 2.6276970909683674e-07, |
|
"loss": 1.2551, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 0.8689737947589518, |
|
"grad_norm": 0.5176235437393188, |
|
"learning_rate": 2.5657116298385964e-07, |
|
"loss": 1.2194, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 0.8705741148229645, |
|
"grad_norm": 0.5456222891807556, |
|
"learning_rate": 2.5044264535842244e-07, |
|
"loss": 1.2365, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 0.8721744348869774, |
|
"grad_norm": 0.5065416693687439, |
|
"learning_rate": 2.443843475218105e-07, |
|
"loss": 1.2296, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 0.8737747549509902, |
|
"grad_norm": 0.5186034440994263, |
|
"learning_rate": 2.383964585834031e-07, |
|
"loss": 1.2013, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 0.875375075015003, |
|
"grad_norm": 0.5220500826835632, |
|
"learning_rate": 2.3247916545477007e-07, |
|
"loss": 1.2373, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 0.8769753950790158, |
|
"grad_norm": 0.5157853960990906, |
|
"learning_rate": 2.2663265284383757e-07, |
|
"loss": 1.241, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 0.8785757151430286, |
|
"grad_norm": 0.48508715629577637, |
|
"learning_rate": 2.2085710324912275e-07, |
|
"loss": 1.2457, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 0.8801760352070415, |
|
"grad_norm": 0.5108803510665894, |
|
"learning_rate": 2.1515269695403656e-07, |
|
"loss": 1.1906, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.8817763552710542, |
|
"grad_norm": 0.5034008026123047, |
|
"learning_rate": 2.0951961202125588e-07, |
|
"loss": 1.1725, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 0.883376675335067, |
|
"grad_norm": 0.5276970863342285, |
|
"learning_rate": 2.0395802428716666e-07, |
|
"loss": 1.2374, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 0.8849769953990798, |
|
"grad_norm": 0.4890348017215729, |
|
"learning_rate": 1.9846810735637378e-07, |
|
"loss": 1.2215, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 0.8865773154630926, |
|
"grad_norm": 0.5114810466766357, |
|
"learning_rate": 1.9305003259628307e-07, |
|
"loss": 1.2035, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 0.8881776355271054, |
|
"grad_norm": 0.506430983543396, |
|
"learning_rate": 1.8770396913175115e-07, |
|
"loss": 1.2278, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 0.8897779555911183, |
|
"grad_norm": 0.5225040912628174, |
|
"learning_rate": 1.8243008383980716e-07, |
|
"loss": 1.212, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 0.8913782756551311, |
|
"grad_norm": 0.5081613659858704, |
|
"learning_rate": 1.7722854134444246e-07, |
|
"loss": 1.2222, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 0.8929785957191438, |
|
"grad_norm": 0.5115835070610046, |
|
"learning_rate": 1.7209950401147368e-07, |
|
"loss": 1.2043, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 0.8945789157831566, |
|
"grad_norm": 0.5055177211761475, |
|
"learning_rate": 1.6704313194347204e-07, |
|
"loss": 1.2989, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 0.8961792358471694, |
|
"grad_norm": 0.49186971783638, |
|
"learning_rate": 1.6205958297476787e-07, |
|
"loss": 1.2083, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 0.8977795559111822, |
|
"grad_norm": 0.5076245665550232, |
|
"learning_rate": 1.5714901266652426e-07, |
|
"loss": 1.2493, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 0.8993798759751951, |
|
"grad_norm": 0.5159875154495239, |
|
"learning_rate": 1.5231157430187853e-07, |
|
"loss": 1.2123, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 0.9009801960392079, |
|
"grad_norm": 0.48809802532196045, |
|
"learning_rate": 1.4754741888115959e-07, |
|
"loss": 1.2238, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 0.9025805161032207, |
|
"grad_norm": 0.5183054208755493, |
|
"learning_rate": 1.4285669511717364e-07, |
|
"loss": 1.2591, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 0.9041808361672334, |
|
"grad_norm": 0.5310025215148926, |
|
"learning_rate": 1.3823954943056355e-07, |
|
"loss": 1.215, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 0.9057811562312462, |
|
"grad_norm": 0.4965580105781555, |
|
"learning_rate": 1.3369612594523544e-07, |
|
"loss": 1.2061, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 0.907381476295259, |
|
"grad_norm": 0.5230700969696045, |
|
"learning_rate": 1.2922656648386266e-07, |
|
"loss": 1.2409, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 0.9089817963592719, |
|
"grad_norm": 0.5221272706985474, |
|
"learning_rate": 1.2483101056345815e-07, |
|
"loss": 1.2387, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 0.9105821164232847, |
|
"grad_norm": 0.5355955958366394, |
|
"learning_rate": 1.2050959539101846e-07, |
|
"loss": 1.2481, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 0.9121824364872975, |
|
"grad_norm": 0.4908137619495392, |
|
"learning_rate": 1.1626245585924123e-07, |
|
"loss": 1.196, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 0.9137827565513102, |
|
"grad_norm": 0.5292563438415527, |
|
"learning_rate": 1.1208972454231526e-07, |
|
"loss": 1.2779, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 0.915383076615323, |
|
"grad_norm": 0.5044008493423462, |
|
"learning_rate": 1.0799153169178206e-07, |
|
"loss": 1.2197, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 0.9169833966793358, |
|
"grad_norm": 1.3565585613250732, |
|
"learning_rate": 1.0396800523246847e-07, |
|
"loss": 1.2523, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 0.9185837167433487, |
|
"grad_norm": 0.5061067938804626, |
|
"learning_rate": 1.0001927075849543e-07, |
|
"loss": 1.231, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 0.9201840368073615, |
|
"grad_norm": 0.5245989561080933, |
|
"learning_rate": 9.614545152935695e-08, |
|
"loss": 1.2345, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 0.9217843568713743, |
|
"grad_norm": 0.5027105808258057, |
|
"learning_rate": 9.234666846607204e-08, |
|
"loss": 1.3076, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 0.9233846769353871, |
|
"grad_norm": 0.5443490743637085, |
|
"learning_rate": 8.862304014741113e-08, |
|
"loss": 1.2521, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 0.9249849969993998, |
|
"grad_norm": 0.49271267652511597, |
|
"learning_rate": 8.497468280619397e-08, |
|
"loss": 1.2442, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 0.9265853170634126, |
|
"grad_norm": 0.5291959047317505, |
|
"learning_rate": 8.140171032566091e-08, |
|
"loss": 1.2284, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 0.9281856371274255, |
|
"grad_norm": 0.5186416506767273, |
|
"learning_rate": 7.790423423591958e-08, |
|
"loss": 1.2079, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 0.9297859571914383, |
|
"grad_norm": 0.5180310010910034, |
|
"learning_rate": 7.44823637104622e-08, |
|
"loss": 1.2409, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 0.9313862772554511, |
|
"grad_norm": 0.5114362835884094, |
|
"learning_rate": 7.113620556275819e-08, |
|
"loss": 1.2422, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 0.9329865973194639, |
|
"grad_norm": 0.48198938369750977, |
|
"learning_rate": 6.786586424291975e-08, |
|
"loss": 1.2177, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 0.9345869173834767, |
|
"grad_norm": 0.5073717832565308, |
|
"learning_rate": 6.467144183444213e-08, |
|
"loss": 1.2919, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 0.9361872374474896, |
|
"grad_norm": 0.5134086608886719, |
|
"learning_rate": 6.155303805101597e-08, |
|
"loss": 1.2253, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 0.9377875575115023, |
|
"grad_norm": 0.5133287310600281, |
|
"learning_rate": 5.851075023341618e-08, |
|
"loss": 1.2342, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 0.9393878775755151, |
|
"grad_norm": 0.5021890997886658, |
|
"learning_rate": 5.55446733464618e-08, |
|
"loss": 1.2029, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 0.9409881976395279, |
|
"grad_norm": 0.5119520425796509, |
|
"learning_rate": 5.265489997605372e-08, |
|
"loss": 1.2402, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 0.9425885177035407, |
|
"grad_norm": 0.4851899743080139, |
|
"learning_rate": 4.98415203262817e-08, |
|
"loss": 1.1988, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 0.9441888377675535, |
|
"grad_norm": 0.4942720830440521, |
|
"learning_rate": 4.7104622216611884e-08, |
|
"loss": 1.2398, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 0.9457891578315664, |
|
"grad_norm": 3.8196513652801514, |
|
"learning_rate": 4.444429107914372e-08, |
|
"loss": 1.3265, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 0.9473894778955791, |
|
"grad_norm": 0.5300382971763611, |
|
"learning_rate": 4.186060995594293e-08, |
|
"loss": 1.2989, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 0.9489897979595919, |
|
"grad_norm": 0.5063347816467285, |
|
"learning_rate": 3.935365949645109e-08, |
|
"loss": 1.2128, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 0.9505901180236047, |
|
"grad_norm": 0.5129570364952087, |
|
"learning_rate": 3.692351795496568e-08, |
|
"loss": 1.2242, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 0.9521904380876175, |
|
"grad_norm": 0.4922707974910736, |
|
"learning_rate": 3.457026118820012e-08, |
|
"loss": 1.2326, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 0.9537907581516303, |
|
"grad_norm": 0.48577573895454407, |
|
"learning_rate": 3.229396265291285e-08, |
|
"loss": 1.2291, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 0.9553910782156432, |
|
"grad_norm": 0.491740882396698, |
|
"learning_rate": 3.009469340361726e-08, |
|
"loss": 1.228, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 0.956991398279656, |
|
"grad_norm": 0.5267091393470764, |
|
"learning_rate": 2.797252209036233e-08, |
|
"loss": 1.2411, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 0.9585917183436687, |
|
"grad_norm": 0.5358031988143921, |
|
"learning_rate": 2.5927514956589628e-08, |
|
"loss": 1.2506, |
|
"step": 5990 |
|
}, |
|
{ |
|
"epoch": 0.9601920384076815, |
|
"grad_norm": 0.50943922996521, |
|
"learning_rate": 2.395973583706607e-08, |
|
"loss": 1.2033, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.9617923584716943, |
|
"grad_norm": 0.5052767992019653, |
|
"learning_rate": 2.206924615589079e-08, |
|
"loss": 1.1939, |
|
"step": 6010 |
|
}, |
|
{ |
|
"epoch": 0.9633926785357071, |
|
"grad_norm": 0.5414103269577026, |
|
"learning_rate": 2.0256104924578357e-08, |
|
"loss": 1.2267, |
|
"step": 6020 |
|
}, |
|
{ |
|
"epoch": 0.96499299859972, |
|
"grad_norm": 0.501817524433136, |
|
"learning_rate": 1.8520368740215787e-08, |
|
"loss": 1.2503, |
|
"step": 6030 |
|
}, |
|
{ |
|
"epoch": 0.9665933186637328, |
|
"grad_norm": 0.5033535957336426, |
|
"learning_rate": 1.6862091783697576e-08, |
|
"loss": 1.2043, |
|
"step": 6040 |
|
}, |
|
{ |
|
"epoch": 0.9681936387277456, |
|
"grad_norm": 0.5262925624847412, |
|
"learning_rate": 1.5281325818032045e-08, |
|
"loss": 1.2777, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 0.9697939587917583, |
|
"grad_norm": 0.503886342048645, |
|
"learning_rate": 1.3778120186728472e-08, |
|
"loss": 1.2007, |
|
"step": 6060 |
|
}, |
|
{ |
|
"epoch": 0.9713942788557711, |
|
"grad_norm": 4.596221923828125, |
|
"learning_rate": 1.2352521812253603e-08, |
|
"loss": 1.2417, |
|
"step": 6070 |
|
}, |
|
{ |
|
"epoch": 0.9729945989197839, |
|
"grad_norm": 0.5384092330932617, |
|
"learning_rate": 1.1004575194570044e-08, |
|
"loss": 1.2333, |
|
"step": 6080 |
|
}, |
|
{ |
|
"epoch": 0.9745949189837968, |
|
"grad_norm": 0.4920881390571594, |
|
"learning_rate": 9.734322409744867e-09, |
|
"loss": 1.2, |
|
"step": 6090 |
|
}, |
|
{ |
|
"epoch": 0.9761952390478096, |
|
"grad_norm": 0.5168455839157104, |
|
"learning_rate": 8.541803108637613e-09, |
|
"loss": 1.1556, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 0.9777955591118224, |
|
"grad_norm": 0.5123913884162903, |
|
"learning_rate": 7.427054515661835e-09, |
|
"loss": 1.2644, |
|
"step": 6110 |
|
}, |
|
{ |
|
"epoch": 0.9793958791758351, |
|
"grad_norm": 0.5197492837905884, |
|
"learning_rate": 6.390111427623524e-09, |
|
"loss": 1.2119, |
|
"step": 6120 |
|
}, |
|
{ |
|
"epoch": 0.9809961992398479, |
|
"grad_norm": 0.5286774039268494, |
|
"learning_rate": 5.43100621263476e-09, |
|
"loss": 1.208, |
|
"step": 6130 |
|
}, |
|
{ |
|
"epoch": 0.9825965193038608, |
|
"grad_norm": 0.5219496488571167, |
|
"learning_rate": 4.549768809103406e-09, |
|
"loss": 1.2614, |
|
"step": 6140 |
|
}, |
|
{ |
|
"epoch": 0.9841968393678736, |
|
"grad_norm": 0.4979791045188904, |
|
"learning_rate": 3.74642672479858e-09, |
|
"loss": 1.2224, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 0.9857971594318864, |
|
"grad_norm": 0.5516430735588074, |
|
"learning_rate": 3.021005035992175e-09, |
|
"loss": 1.2278, |
|
"step": 6160 |
|
}, |
|
{ |
|
"epoch": 0.9873974794958992, |
|
"grad_norm": 0.5197909474372864, |
|
"learning_rate": 2.373526386675873e-09, |
|
"loss": 1.2345, |
|
"step": 6170 |
|
}, |
|
{ |
|
"epoch": 0.988997799559912, |
|
"grad_norm": 0.5151229500770569, |
|
"learning_rate": 1.8040109878539326e-09, |
|
"loss": 1.2125, |
|
"step": 6180 |
|
}, |
|
{ |
|
"epoch": 0.9905981196239247, |
|
"grad_norm": 0.5217447280883789, |
|
"learning_rate": 1.3124766169131386e-09, |
|
"loss": 1.2251, |
|
"step": 6190 |
|
}, |
|
{ |
|
"epoch": 0.9921984396879376, |
|
"grad_norm": 0.4913589060306549, |
|
"learning_rate": 8.989386170674131e-10, |
|
"loss": 1.232, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 0.9937987597519504, |
|
"grad_norm": 0.5131626725196838, |
|
"learning_rate": 5.634098968793078e-10, |
|
"loss": 1.2282, |
|
"step": 6210 |
|
}, |
|
{ |
|
"epoch": 0.9953990798159632, |
|
"grad_norm": 0.48121556639671326, |
|
"learning_rate": 3.059009298558846e-10, |
|
"loss": 1.2193, |
|
"step": 6220 |
|
}, |
|
{ |
|
"epoch": 0.996999399879976, |
|
"grad_norm": 0.5183552503585815, |
|
"learning_rate": 1.264197541234191e-10, |
|
"loss": 1.2284, |
|
"step": 6230 |
|
}, |
|
{ |
|
"epoch": 0.9985997199439888, |
|
"grad_norm": 0.513854444026947, |
|
"learning_rate": 2.4971972175102943e-11, |
|
"loss": 1.177, |
|
"step": 6240 |
|
}, |
|
{ |
|
"epoch": 0.9998799759951991, |
|
"step": 6248, |
|
"total_flos": 1.306193982790828e+18, |
|
"train_loss": 1.279416140650665, |
|
"train_runtime": 62601.4894, |
|
"train_samples_per_second": 1.597, |
|
"train_steps_per_second": 0.1 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 6248, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.306193982790828e+18, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|