qd-zh-phi-1_5 / trainer_state.json
voidful's picture
Upload folder using huggingface_hub
b611fb8 verified
raw history blame
No virus
83.5 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 3.9999777750613963,
"eval_steps": 500,
"global_step": 51422,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.01,
"grad_norm": 4.884139060974121,
"learning_rate": 2.0000000000000002e-07,
"loss": 4.1318,
"step": 100
},
{
"epoch": 0.02,
"grad_norm": 3.1436049938201904,
"learning_rate": 4.0000000000000003e-07,
"loss": 3.778,
"step": 200
},
{
"epoch": 0.02,
"grad_norm": 2.9424259662628174,
"learning_rate": 6.000000000000001e-07,
"loss": 3.3925,
"step": 300
},
{
"epoch": 0.03,
"grad_norm": 4.712306022644043,
"learning_rate": 8.000000000000001e-07,
"loss": 3.0483,
"step": 400
},
{
"epoch": 0.04,
"grad_norm": 3.5699660778045654,
"learning_rate": 1.0000000000000002e-06,
"loss": 2.7775,
"step": 500
},
{
"epoch": 0.05,
"grad_norm": 3.2760748863220215,
"learning_rate": 1.2000000000000002e-06,
"loss": 2.5476,
"step": 600
},
{
"epoch": 0.05,
"grad_norm": 4.388134002685547,
"learning_rate": 1.4000000000000001e-06,
"loss": 2.3882,
"step": 700
},
{
"epoch": 0.06,
"grad_norm": 3.93226957321167,
"learning_rate": 1.6000000000000001e-06,
"loss": 2.2654,
"step": 800
},
{
"epoch": 0.07,
"grad_norm": 3.6425278186798096,
"learning_rate": 1.8000000000000001e-06,
"loss": 2.1592,
"step": 900
},
{
"epoch": 0.08,
"grad_norm": 3.260810613632202,
"learning_rate": 2.0000000000000003e-06,
"loss": 2.032,
"step": 1000
},
{
"epoch": 0.09,
"grad_norm": 3.389195203781128,
"learning_rate": 2.2e-06,
"loss": 1.9446,
"step": 1100
},
{
"epoch": 0.09,
"grad_norm": 3.6105241775512695,
"learning_rate": 2.4000000000000003e-06,
"loss": 1.8368,
"step": 1200
},
{
"epoch": 0.1,
"grad_norm": 3.270883083343506,
"learning_rate": 2.6e-06,
"loss": 1.7411,
"step": 1300
},
{
"epoch": 0.11,
"grad_norm": 2.950645685195923,
"learning_rate": 2.8000000000000003e-06,
"loss": 1.6893,
"step": 1400
},
{
"epoch": 0.12,
"grad_norm": 2.6197257041931152,
"learning_rate": 3e-06,
"loss": 1.6173,
"step": 1500
},
{
"epoch": 0.12,
"grad_norm": 3.4023056030273438,
"learning_rate": 3.2000000000000003e-06,
"loss": 1.5633,
"step": 1600
},
{
"epoch": 0.13,
"grad_norm": 3.480717897415161,
"learning_rate": 3.4000000000000005e-06,
"loss": 1.4983,
"step": 1700
},
{
"epoch": 0.14,
"grad_norm": 3.2709102630615234,
"learning_rate": 3.6000000000000003e-06,
"loss": 1.4771,
"step": 1800
},
{
"epoch": 0.15,
"grad_norm": 2.3203020095825195,
"learning_rate": 3.8000000000000005e-06,
"loss": 1.4192,
"step": 1900
},
{
"epoch": 0.16,
"grad_norm": 2.908491373062134,
"learning_rate": 4.000000000000001e-06,
"loss": 1.3816,
"step": 2000
},
{
"epoch": 0.16,
"grad_norm": 2.9639596939086914,
"learning_rate": 4.2000000000000004e-06,
"loss": 1.3685,
"step": 2100
},
{
"epoch": 0.17,
"grad_norm": 3.183112621307373,
"learning_rate": 4.4e-06,
"loss": 1.3234,
"step": 2200
},
{
"epoch": 0.18,
"grad_norm": 2.953932285308838,
"learning_rate": 4.600000000000001e-06,
"loss": 1.3108,
"step": 2300
},
{
"epoch": 0.19,
"grad_norm": 2.4042069911956787,
"learning_rate": 4.800000000000001e-06,
"loss": 1.278,
"step": 2400
},
{
"epoch": 0.19,
"grad_norm": 2.5564184188842773,
"learning_rate": 5e-06,
"loss": 1.2395,
"step": 2500
},
{
"epoch": 0.2,
"grad_norm": 2.2440097332000732,
"learning_rate": 5.2e-06,
"loss": 1.2298,
"step": 2600
},
{
"epoch": 0.21,
"grad_norm": 2.2690846920013428,
"learning_rate": 5.400000000000001e-06,
"loss": 1.2058,
"step": 2700
},
{
"epoch": 0.22,
"grad_norm": 2.678061008453369,
"learning_rate": 5.600000000000001e-06,
"loss": 1.1865,
"step": 2800
},
{
"epoch": 0.23,
"grad_norm": 2.536303758621216,
"learning_rate": 5.8e-06,
"loss": 1.1636,
"step": 2900
},
{
"epoch": 0.23,
"grad_norm": 2.5350258350372314,
"learning_rate": 6e-06,
"loss": 1.1476,
"step": 3000
},
{
"epoch": 0.24,
"grad_norm": 2.197143316268921,
"learning_rate": 6.200000000000001e-06,
"loss": 1.1329,
"step": 3100
},
{
"epoch": 0.25,
"grad_norm": 1.8976725339889526,
"learning_rate": 6.4000000000000006e-06,
"loss": 1.1356,
"step": 3200
},
{
"epoch": 0.26,
"grad_norm": 2.1533405780792236,
"learning_rate": 6.600000000000001e-06,
"loss": 1.1107,
"step": 3300
},
{
"epoch": 0.26,
"grad_norm": 2.5432329177856445,
"learning_rate": 6.800000000000001e-06,
"loss": 1.1003,
"step": 3400
},
{
"epoch": 0.27,
"grad_norm": 2.5095925331115723,
"learning_rate": 7e-06,
"loss": 1.0753,
"step": 3500
},
{
"epoch": 0.28,
"grad_norm": 2.0674924850463867,
"learning_rate": 7.2000000000000005e-06,
"loss": 1.056,
"step": 3600
},
{
"epoch": 0.29,
"grad_norm": 1.9934577941894531,
"learning_rate": 7.4e-06,
"loss": 1.0414,
"step": 3700
},
{
"epoch": 0.3,
"grad_norm": 1.6853481531143188,
"learning_rate": 7.600000000000001e-06,
"loss": 1.0322,
"step": 3800
},
{
"epoch": 0.3,
"grad_norm": 2.2070693969726562,
"learning_rate": 7.800000000000002e-06,
"loss": 1.047,
"step": 3900
},
{
"epoch": 0.31,
"grad_norm": 2.402536153793335,
"learning_rate": 8.000000000000001e-06,
"loss": 1.0119,
"step": 4000
},
{
"epoch": 0.32,
"grad_norm": 1.525566577911377,
"learning_rate": 8.2e-06,
"loss": 1.0262,
"step": 4100
},
{
"epoch": 0.33,
"grad_norm": 1.9584845304489136,
"learning_rate": 8.400000000000001e-06,
"loss": 0.9944,
"step": 4200
},
{
"epoch": 0.33,
"grad_norm": 2.0090949535369873,
"learning_rate": 8.6e-06,
"loss": 0.9995,
"step": 4300
},
{
"epoch": 0.34,
"grad_norm": 1.8657029867172241,
"learning_rate": 8.8e-06,
"loss": 0.9853,
"step": 4400
},
{
"epoch": 0.35,
"grad_norm": 2.4438652992248535,
"learning_rate": 9e-06,
"loss": 0.9879,
"step": 4500
},
{
"epoch": 0.36,
"grad_norm": 1.7610666751861572,
"learning_rate": 9.200000000000002e-06,
"loss": 0.9821,
"step": 4600
},
{
"epoch": 0.37,
"grad_norm": 1.8249353170394897,
"learning_rate": 9.4e-06,
"loss": 0.9569,
"step": 4700
},
{
"epoch": 0.37,
"grad_norm": 2.3479127883911133,
"learning_rate": 9.600000000000001e-06,
"loss": 0.9497,
"step": 4800
},
{
"epoch": 0.38,
"grad_norm": 1.7944222688674927,
"learning_rate": 9.800000000000001e-06,
"loss": 0.9274,
"step": 4900
},
{
"epoch": 0.39,
"grad_norm": 1.6285799741744995,
"learning_rate": 1e-05,
"loss": 0.9467,
"step": 5000
},
{
"epoch": 0.4,
"grad_norm": 1.8791054487228394,
"learning_rate": 9.983129481231549e-06,
"loss": 0.9475,
"step": 5100
},
{
"epoch": 0.4,
"grad_norm": 2.0971226692199707,
"learning_rate": 9.966258962463097e-06,
"loss": 0.9199,
"step": 5200
},
{
"epoch": 0.41,
"grad_norm": 1.7968696355819702,
"learning_rate": 9.949388443694644e-06,
"loss": 0.9175,
"step": 5300
},
{
"epoch": 0.42,
"grad_norm": 1.9605484008789062,
"learning_rate": 9.932517924926192e-06,
"loss": 0.9133,
"step": 5400
},
{
"epoch": 0.43,
"grad_norm": 1.3476393222808838,
"learning_rate": 9.91564740615774e-06,
"loss": 0.8838,
"step": 5500
},
{
"epoch": 0.44,
"grad_norm": 1.5439773797988892,
"learning_rate": 9.898776887389287e-06,
"loss": 0.8928,
"step": 5600
},
{
"epoch": 0.44,
"grad_norm": 1.4250659942626953,
"learning_rate": 9.881906368620837e-06,
"loss": 0.8641,
"step": 5700
},
{
"epoch": 0.45,
"grad_norm": 1.425870656967163,
"learning_rate": 9.865035849852383e-06,
"loss": 0.8678,
"step": 5800
},
{
"epoch": 0.46,
"grad_norm": 1.879429578781128,
"learning_rate": 9.848165331083931e-06,
"loss": 0.8709,
"step": 5900
},
{
"epoch": 0.47,
"grad_norm": 1.5838955640792847,
"learning_rate": 9.83129481231548e-06,
"loss": 0.8754,
"step": 6000
},
{
"epoch": 0.47,
"grad_norm": 1.762133240699768,
"learning_rate": 9.814424293547026e-06,
"loss": 0.8494,
"step": 6100
},
{
"epoch": 0.48,
"grad_norm": 1.445033073425293,
"learning_rate": 9.797553774778576e-06,
"loss": 0.8417,
"step": 6200
},
{
"epoch": 0.49,
"grad_norm": 1.6173651218414307,
"learning_rate": 9.780683256010123e-06,
"loss": 0.8511,
"step": 6300
},
{
"epoch": 0.5,
"grad_norm": 1.468042254447937,
"learning_rate": 9.76381273724167e-06,
"loss": 0.8587,
"step": 6400
},
{
"epoch": 0.51,
"grad_norm": 1.739689588546753,
"learning_rate": 9.746942218473219e-06,
"loss": 0.8272,
"step": 6500
},
{
"epoch": 0.51,
"grad_norm": 1.5442858934402466,
"learning_rate": 9.730071699704765e-06,
"loss": 0.8188,
"step": 6600
},
{
"epoch": 0.52,
"grad_norm": 1.511681318283081,
"learning_rate": 9.713201180936315e-06,
"loss": 0.837,
"step": 6700
},
{
"epoch": 0.53,
"grad_norm": 1.3714661598205566,
"learning_rate": 9.696330662167862e-06,
"loss": 0.8192,
"step": 6800
},
{
"epoch": 0.54,
"grad_norm": 1.3328415155410767,
"learning_rate": 9.67946014339941e-06,
"loss": 0.8184,
"step": 6900
},
{
"epoch": 0.54,
"grad_norm": 1.334816336631775,
"learning_rate": 9.662589624630958e-06,
"loss": 0.8153,
"step": 7000
},
{
"epoch": 0.55,
"grad_norm": 1.5207791328430176,
"learning_rate": 9.645719105862507e-06,
"loss": 0.8119,
"step": 7100
},
{
"epoch": 0.56,
"grad_norm": 1.4754551649093628,
"learning_rate": 9.628848587094055e-06,
"loss": 0.8048,
"step": 7200
},
{
"epoch": 0.57,
"grad_norm": 1.8752347230911255,
"learning_rate": 9.611978068325601e-06,
"loss": 0.8074,
"step": 7300
},
{
"epoch": 0.58,
"grad_norm": 1.369080662727356,
"learning_rate": 9.59510754955715e-06,
"loss": 0.8029,
"step": 7400
},
{
"epoch": 0.58,
"grad_norm": 1.7981059551239014,
"learning_rate": 9.578237030788698e-06,
"loss": 0.7985,
"step": 7500
},
{
"epoch": 0.59,
"grad_norm": 1.5368590354919434,
"learning_rate": 9.561366512020246e-06,
"loss": 0.7893,
"step": 7600
},
{
"epoch": 0.6,
"grad_norm": 1.3475455045700073,
"learning_rate": 9.544495993251794e-06,
"loss": 0.7867,
"step": 7700
},
{
"epoch": 0.61,
"grad_norm": 1.386738657951355,
"learning_rate": 9.52762547448334e-06,
"loss": 0.7761,
"step": 7800
},
{
"epoch": 0.61,
"grad_norm": 1.6115236282348633,
"learning_rate": 9.510754955714889e-06,
"loss": 0.8028,
"step": 7900
},
{
"epoch": 0.62,
"grad_norm": 1.6878514289855957,
"learning_rate": 9.493884436946437e-06,
"loss": 0.7722,
"step": 8000
},
{
"epoch": 0.63,
"grad_norm": 1.4150365591049194,
"learning_rate": 9.477013918177985e-06,
"loss": 0.7835,
"step": 8100
},
{
"epoch": 0.64,
"grad_norm": 1.39914870262146,
"learning_rate": 9.460143399409532e-06,
"loss": 0.7764,
"step": 8200
},
{
"epoch": 0.65,
"grad_norm": 1.3100072145462036,
"learning_rate": 9.44327288064108e-06,
"loss": 0.7824,
"step": 8300
},
{
"epoch": 0.65,
"grad_norm": 1.5020335912704468,
"learning_rate": 9.426402361872628e-06,
"loss": 0.7649,
"step": 8400
},
{
"epoch": 0.66,
"grad_norm": 1.3903230428695679,
"learning_rate": 9.409531843104176e-06,
"loss": 0.7664,
"step": 8500
},
{
"epoch": 0.67,
"grad_norm": 1.3603887557983398,
"learning_rate": 9.392661324335725e-06,
"loss": 0.7588,
"step": 8600
},
{
"epoch": 0.68,
"grad_norm": 1.2914139032363892,
"learning_rate": 9.375790805567271e-06,
"loss": 0.7467,
"step": 8700
},
{
"epoch": 0.68,
"grad_norm": 1.2703696489334106,
"learning_rate": 9.35892028679882e-06,
"loss": 0.7515,
"step": 8800
},
{
"epoch": 0.69,
"grad_norm": 1.3954447507858276,
"learning_rate": 9.342049768030368e-06,
"loss": 0.752,
"step": 8900
},
{
"epoch": 0.7,
"grad_norm": 1.2770296335220337,
"learning_rate": 9.325179249261916e-06,
"loss": 0.7462,
"step": 9000
},
{
"epoch": 0.71,
"grad_norm": 1.3484748601913452,
"learning_rate": 9.308308730493464e-06,
"loss": 0.7368,
"step": 9100
},
{
"epoch": 0.72,
"grad_norm": 1.2490614652633667,
"learning_rate": 9.29143821172501e-06,
"loss": 0.7488,
"step": 9200
},
{
"epoch": 0.72,
"grad_norm": 1.2963238954544067,
"learning_rate": 9.274567692956559e-06,
"loss": 0.739,
"step": 9300
},
{
"epoch": 0.73,
"grad_norm": 1.4898951053619385,
"learning_rate": 9.257697174188107e-06,
"loss": 0.7448,
"step": 9400
},
{
"epoch": 0.74,
"grad_norm": 1.5196597576141357,
"learning_rate": 9.240826655419655e-06,
"loss": 0.7271,
"step": 9500
},
{
"epoch": 0.75,
"grad_norm": 1.3974419832229614,
"learning_rate": 9.223956136651203e-06,
"loss": 0.7492,
"step": 9600
},
{
"epoch": 0.75,
"grad_norm": 1.3109018802642822,
"learning_rate": 9.20708561788275e-06,
"loss": 0.7374,
"step": 9700
},
{
"epoch": 0.76,
"grad_norm": 1.616546630859375,
"learning_rate": 9.190215099114298e-06,
"loss": 0.7221,
"step": 9800
},
{
"epoch": 0.77,
"grad_norm": 1.382163405418396,
"learning_rate": 9.173344580345846e-06,
"loss": 0.7263,
"step": 9900
},
{
"epoch": 0.78,
"grad_norm": 1.368768572807312,
"learning_rate": 9.156474061577395e-06,
"loss": 0.7352,
"step": 10000
},
{
"epoch": 0.79,
"grad_norm": 1.3974610567092896,
"learning_rate": 9.139603542808943e-06,
"loss": 0.7276,
"step": 10100
},
{
"epoch": 0.79,
"grad_norm": 1.3801538944244385,
"learning_rate": 9.12273302404049e-06,
"loss": 0.7234,
"step": 10200
},
{
"epoch": 0.8,
"grad_norm": 1.2477465867996216,
"learning_rate": 9.10586250527204e-06,
"loss": 0.7254,
"step": 10300
},
{
"epoch": 0.81,
"grad_norm": 1.3689539432525635,
"learning_rate": 9.088991986503586e-06,
"loss": 0.7094,
"step": 10400
},
{
"epoch": 0.82,
"grad_norm": 1.0961337089538574,
"learning_rate": 9.072121467735134e-06,
"loss": 0.7262,
"step": 10500
},
{
"epoch": 0.82,
"grad_norm": 1.39137601852417,
"learning_rate": 9.055250948966682e-06,
"loss": 0.7249,
"step": 10600
},
{
"epoch": 0.83,
"grad_norm": 1.2879551649093628,
"learning_rate": 9.038380430198229e-06,
"loss": 0.7056,
"step": 10700
},
{
"epoch": 0.84,
"grad_norm": 1.1503087282180786,
"learning_rate": 9.021509911429777e-06,
"loss": 0.7096,
"step": 10800
},
{
"epoch": 0.85,
"grad_norm": 1.3748828172683716,
"learning_rate": 9.004639392661325e-06,
"loss": 0.6976,
"step": 10900
},
{
"epoch": 0.86,
"grad_norm": 1.1935720443725586,
"learning_rate": 8.987768873892873e-06,
"loss": 0.7062,
"step": 11000
},
{
"epoch": 0.86,
"grad_norm": 1.3598394393920898,
"learning_rate": 8.970898355124422e-06,
"loss": 0.7095,
"step": 11100
},
{
"epoch": 0.87,
"grad_norm": 1.1463404893875122,
"learning_rate": 8.954027836355968e-06,
"loss": 0.7084,
"step": 11200
},
{
"epoch": 0.88,
"grad_norm": 1.1318614482879639,
"learning_rate": 8.937157317587516e-06,
"loss": 0.7034,
"step": 11300
},
{
"epoch": 0.89,
"grad_norm": 1.3571819067001343,
"learning_rate": 8.920286798819064e-06,
"loss": 0.6979,
"step": 11400
},
{
"epoch": 0.89,
"grad_norm": 1.1747997999191284,
"learning_rate": 8.903416280050613e-06,
"loss": 0.6922,
"step": 11500
},
{
"epoch": 0.9,
"grad_norm": 1.3801376819610596,
"learning_rate": 8.886545761282161e-06,
"loss": 0.7025,
"step": 11600
},
{
"epoch": 0.91,
"grad_norm": 1.2742717266082764,
"learning_rate": 8.869675242513707e-06,
"loss": 0.7007,
"step": 11700
},
{
"epoch": 0.92,
"grad_norm": 1.261022925376892,
"learning_rate": 8.852804723745256e-06,
"loss": 0.7088,
"step": 11800
},
{
"epoch": 0.93,
"grad_norm": 1.1409653425216675,
"learning_rate": 8.835934204976804e-06,
"loss": 0.6876,
"step": 11900
},
{
"epoch": 0.93,
"grad_norm": 1.2370312213897705,
"learning_rate": 8.81906368620835e-06,
"loss": 0.6842,
"step": 12000
},
{
"epoch": 0.94,
"grad_norm": 1.2493584156036377,
"learning_rate": 8.8021931674399e-06,
"loss": 0.6861,
"step": 12100
},
{
"epoch": 0.95,
"grad_norm": 1.0940289497375488,
"learning_rate": 8.785322648671447e-06,
"loss": 0.678,
"step": 12200
},
{
"epoch": 0.96,
"grad_norm": 1.0800695419311523,
"learning_rate": 8.768452129902995e-06,
"loss": 0.6762,
"step": 12300
},
{
"epoch": 0.96,
"grad_norm": 1.2263818979263306,
"learning_rate": 8.751581611134543e-06,
"loss": 0.6882,
"step": 12400
},
{
"epoch": 0.97,
"grad_norm": 1.094193696975708,
"learning_rate": 8.73471109236609e-06,
"loss": 0.6704,
"step": 12500
},
{
"epoch": 0.98,
"grad_norm": 1.4435601234436035,
"learning_rate": 8.71784057359764e-06,
"loss": 0.68,
"step": 12600
},
{
"epoch": 0.99,
"grad_norm": 1.3831571340560913,
"learning_rate": 8.700970054829186e-06,
"loss": 0.6804,
"step": 12700
},
{
"epoch": 1.0,
"grad_norm": 1.1377003192901611,
"learning_rate": 8.684099536060734e-06,
"loss": 0.6801,
"step": 12800
},
{
"epoch": 1.0,
"eval_loss": 0.6581470370292664,
"eval_runtime": 240.2734,
"eval_samples_per_second": 26.836,
"eval_steps_per_second": 26.836,
"step": 12855
},
{
"epoch": 1.0,
"grad_norm": 1.3885269165039062,
"learning_rate": 8.667229017292283e-06,
"loss": 0.6454,
"step": 12900
},
{
"epoch": 1.01,
"grad_norm": 1.1746355295181274,
"learning_rate": 8.650358498523829e-06,
"loss": 0.6355,
"step": 13000
},
{
"epoch": 1.02,
"grad_norm": 1.184806227684021,
"learning_rate": 8.633487979755379e-06,
"loss": 0.6237,
"step": 13100
},
{
"epoch": 1.03,
"grad_norm": 1.1578764915466309,
"learning_rate": 8.616617460986926e-06,
"loss": 0.629,
"step": 13200
},
{
"epoch": 1.03,
"grad_norm": 1.0176867246627808,
"learning_rate": 8.599746942218474e-06,
"loss": 0.6272,
"step": 13300
},
{
"epoch": 1.04,
"grad_norm": 1.1205527782440186,
"learning_rate": 8.582876423450022e-06,
"loss": 0.6241,
"step": 13400
},
{
"epoch": 1.05,
"grad_norm": 1.56365966796875,
"learning_rate": 8.566005904681568e-06,
"loss": 0.6259,
"step": 13500
},
{
"epoch": 1.06,
"grad_norm": 1.3043173551559448,
"learning_rate": 8.549135385913118e-06,
"loss": 0.6168,
"step": 13600
},
{
"epoch": 1.07,
"grad_norm": 1.1044868230819702,
"learning_rate": 8.532264867144665e-06,
"loss": 0.6379,
"step": 13700
},
{
"epoch": 1.07,
"grad_norm": 1.189816951751709,
"learning_rate": 8.515394348376213e-06,
"loss": 0.6197,
"step": 13800
},
{
"epoch": 1.08,
"grad_norm": 1.2163881063461304,
"learning_rate": 8.498523829607761e-06,
"loss": 0.6254,
"step": 13900
},
{
"epoch": 1.09,
"grad_norm": 1.2914482355117798,
"learning_rate": 8.48165331083931e-06,
"loss": 0.6198,
"step": 14000
},
{
"epoch": 1.1,
"grad_norm": 1.1523815393447876,
"learning_rate": 8.464782792070858e-06,
"loss": 0.6319,
"step": 14100
},
{
"epoch": 1.1,
"grad_norm": 1.0500476360321045,
"learning_rate": 8.447912273302404e-06,
"loss": 0.6264,
"step": 14200
},
{
"epoch": 1.11,
"grad_norm": 1.2976247072219849,
"learning_rate": 8.431041754533953e-06,
"loss": 0.6291,
"step": 14300
},
{
"epoch": 1.12,
"grad_norm": 1.1747026443481445,
"learning_rate": 8.4141712357655e-06,
"loss": 0.6247,
"step": 14400
},
{
"epoch": 1.13,
"grad_norm": 1.2161647081375122,
"learning_rate": 8.397300716997049e-06,
"loss": 0.6271,
"step": 14500
},
{
"epoch": 1.14,
"grad_norm": 1.1042088270187378,
"learning_rate": 8.380430198228595e-06,
"loss": 0.6225,
"step": 14600
},
{
"epoch": 1.14,
"grad_norm": 1.235410213470459,
"learning_rate": 8.363559679460144e-06,
"loss": 0.6192,
"step": 14700
},
{
"epoch": 1.15,
"grad_norm": 1.341143012046814,
"learning_rate": 8.346689160691692e-06,
"loss": 0.6185,
"step": 14800
},
{
"epoch": 1.16,
"grad_norm": 1.378353476524353,
"learning_rate": 8.32981864192324e-06,
"loss": 0.6186,
"step": 14900
},
{
"epoch": 1.17,
"grad_norm": 1.2785013914108276,
"learning_rate": 8.312948123154788e-06,
"loss": 0.6284,
"step": 15000
},
{
"epoch": 1.17,
"grad_norm": 1.2899855375289917,
"learning_rate": 8.296077604386335e-06,
"loss": 0.6173,
"step": 15100
},
{
"epoch": 1.18,
"grad_norm": 1.2241288423538208,
"learning_rate": 8.279207085617883e-06,
"loss": 0.6248,
"step": 15200
},
{
"epoch": 1.19,
"grad_norm": 1.124097228050232,
"learning_rate": 8.262336566849431e-06,
"loss": 0.6221,
"step": 15300
},
{
"epoch": 1.2,
"grad_norm": 1.1559983491897583,
"learning_rate": 8.24546604808098e-06,
"loss": 0.619,
"step": 15400
},
{
"epoch": 1.21,
"grad_norm": 1.0540140867233276,
"learning_rate": 8.228595529312528e-06,
"loss": 0.6095,
"step": 15500
},
{
"epoch": 1.21,
"grad_norm": 1.301646113395691,
"learning_rate": 8.211725010544074e-06,
"loss": 0.6211,
"step": 15600
},
{
"epoch": 1.22,
"grad_norm": 1.4309414625167847,
"learning_rate": 8.194854491775622e-06,
"loss": 0.6244,
"step": 15700
},
{
"epoch": 1.23,
"grad_norm": 1.49406099319458,
"learning_rate": 8.17798397300717e-06,
"loss": 0.6177,
"step": 15800
},
{
"epoch": 1.24,
"grad_norm": 1.1188528537750244,
"learning_rate": 8.161113454238719e-06,
"loss": 0.6346,
"step": 15900
},
{
"epoch": 1.24,
"grad_norm": 1.14681875705719,
"learning_rate": 8.144242935470267e-06,
"loss": 0.6218,
"step": 16000
},
{
"epoch": 1.25,
"grad_norm": 1.106151819229126,
"learning_rate": 8.127372416701814e-06,
"loss": 0.6173,
"step": 16100
},
{
"epoch": 1.26,
"grad_norm": 1.0186794996261597,
"learning_rate": 8.110501897933362e-06,
"loss": 0.6126,
"step": 16200
},
{
"epoch": 1.27,
"grad_norm": 1.2177096605300903,
"learning_rate": 8.09363137916491e-06,
"loss": 0.6262,
"step": 16300
},
{
"epoch": 1.28,
"grad_norm": 1.1168888807296753,
"learning_rate": 8.076760860396458e-06,
"loss": 0.6099,
"step": 16400
},
{
"epoch": 1.28,
"grad_norm": 1.5206681489944458,
"learning_rate": 8.059890341628006e-06,
"loss": 0.6182,
"step": 16500
},
{
"epoch": 1.29,
"grad_norm": 1.2879040241241455,
"learning_rate": 8.043019822859553e-06,
"loss": 0.6217,
"step": 16600
},
{
"epoch": 1.3,
"grad_norm": 1.1366697549819946,
"learning_rate": 8.026149304091101e-06,
"loss": 0.6114,
"step": 16700
},
{
"epoch": 1.31,
"grad_norm": 1.0332260131835938,
"learning_rate": 8.00927878532265e-06,
"loss": 0.607,
"step": 16800
},
{
"epoch": 1.31,
"grad_norm": 1.2021900415420532,
"learning_rate": 7.992408266554198e-06,
"loss": 0.6062,
"step": 16900
},
{
"epoch": 1.32,
"grad_norm": 1.1737139225006104,
"learning_rate": 7.975537747785746e-06,
"loss": 0.6109,
"step": 17000
},
{
"epoch": 1.33,
"grad_norm": 1.2494922876358032,
"learning_rate": 7.958667229017292e-06,
"loss": 0.6139,
"step": 17100
},
{
"epoch": 1.34,
"grad_norm": 1.04032301902771,
"learning_rate": 7.94179671024884e-06,
"loss": 0.5907,
"step": 17200
},
{
"epoch": 1.35,
"grad_norm": 1.016192078590393,
"learning_rate": 7.924926191480389e-06,
"loss": 0.607,
"step": 17300
},
{
"epoch": 1.35,
"grad_norm": 1.055438756942749,
"learning_rate": 7.908055672711937e-06,
"loss": 0.5982,
"step": 17400
},
{
"epoch": 1.36,
"grad_norm": 1.1405280828475952,
"learning_rate": 7.891185153943485e-06,
"loss": 0.6077,
"step": 17500
},
{
"epoch": 1.37,
"grad_norm": 1.2112674713134766,
"learning_rate": 7.874314635175032e-06,
"loss": 0.6004,
"step": 17600
},
{
"epoch": 1.38,
"grad_norm": 1.1471858024597168,
"learning_rate": 7.85744411640658e-06,
"loss": 0.6066,
"step": 17700
},
{
"epoch": 1.38,
"grad_norm": 1.1858110427856445,
"learning_rate": 7.840573597638128e-06,
"loss": 0.6071,
"step": 17800
},
{
"epoch": 1.39,
"grad_norm": 1.2066441774368286,
"learning_rate": 7.823703078869675e-06,
"loss": 0.6009,
"step": 17900
},
{
"epoch": 1.4,
"grad_norm": 1.2638628482818604,
"learning_rate": 7.806832560101225e-06,
"loss": 0.6135,
"step": 18000
},
{
"epoch": 1.41,
"grad_norm": 1.1812059879302979,
"learning_rate": 7.789962041332771e-06,
"loss": 0.6101,
"step": 18100
},
{
"epoch": 1.42,
"grad_norm": 1.1751078367233276,
"learning_rate": 7.77309152256432e-06,
"loss": 0.5935,
"step": 18200
},
{
"epoch": 1.42,
"grad_norm": 1.1354562044143677,
"learning_rate": 7.756221003795868e-06,
"loss": 0.6121,
"step": 18300
},
{
"epoch": 1.43,
"grad_norm": 1.133031964302063,
"learning_rate": 7.739350485027414e-06,
"loss": 0.6087,
"step": 18400
},
{
"epoch": 1.44,
"grad_norm": 1.0784614086151123,
"learning_rate": 7.722479966258964e-06,
"loss": 0.5897,
"step": 18500
},
{
"epoch": 1.45,
"grad_norm": 1.2417889833450317,
"learning_rate": 7.70560944749051e-06,
"loss": 0.6003,
"step": 18600
},
{
"epoch": 1.45,
"grad_norm": 1.0895743370056152,
"learning_rate": 7.688738928722059e-06,
"loss": 0.5951,
"step": 18700
},
{
"epoch": 1.46,
"grad_norm": 0.9925142526626587,
"learning_rate": 7.671868409953607e-06,
"loss": 0.5913,
"step": 18800
},
{
"epoch": 1.47,
"grad_norm": 1.1214051246643066,
"learning_rate": 7.654997891185153e-06,
"loss": 0.601,
"step": 18900
},
{
"epoch": 1.48,
"grad_norm": 1.155522346496582,
"learning_rate": 7.638127372416703e-06,
"loss": 0.5898,
"step": 19000
},
{
"epoch": 1.49,
"grad_norm": 1.2364145517349243,
"learning_rate": 7.62125685364825e-06,
"loss": 0.5968,
"step": 19100
},
{
"epoch": 1.49,
"grad_norm": 1.1163456439971924,
"learning_rate": 7.604386334879799e-06,
"loss": 0.5887,
"step": 19200
},
{
"epoch": 1.5,
"grad_norm": 1.2798086404800415,
"learning_rate": 7.587515816111346e-06,
"loss": 0.6007,
"step": 19300
},
{
"epoch": 1.51,
"grad_norm": 1.0055946111679077,
"learning_rate": 7.570645297342894e-06,
"loss": 0.5816,
"step": 19400
},
{
"epoch": 1.52,
"grad_norm": 1.117680549621582,
"learning_rate": 7.553774778574442e-06,
"loss": 0.5953,
"step": 19500
},
{
"epoch": 1.52,
"grad_norm": 1.2904775142669678,
"learning_rate": 7.536904259805989e-06,
"loss": 0.5959,
"step": 19600
},
{
"epoch": 1.53,
"grad_norm": 1.199350118637085,
"learning_rate": 7.520033741037538e-06,
"loss": 0.5871,
"step": 19700
},
{
"epoch": 1.54,
"grad_norm": 1.219810128211975,
"learning_rate": 7.503163222269086e-06,
"loss": 0.5873,
"step": 19800
},
{
"epoch": 1.55,
"grad_norm": 1.2980371713638306,
"learning_rate": 7.486292703500633e-06,
"loss": 0.5932,
"step": 19900
},
{
"epoch": 1.56,
"grad_norm": 1.1398423910140991,
"learning_rate": 7.469422184732181e-06,
"loss": 0.5862,
"step": 20000
},
{
"epoch": 1.56,
"grad_norm": 1.477073311805725,
"learning_rate": 7.452551665963729e-06,
"loss": 0.5921,
"step": 20100
},
{
"epoch": 1.57,
"grad_norm": 1.0095638036727905,
"learning_rate": 7.435681147195278e-06,
"loss": 0.5907,
"step": 20200
},
{
"epoch": 1.58,
"grad_norm": 1.077609658241272,
"learning_rate": 7.418810628426825e-06,
"loss": 0.5817,
"step": 20300
},
{
"epoch": 1.59,
"grad_norm": 0.9858334064483643,
"learning_rate": 7.401940109658372e-06,
"loss": 0.5939,
"step": 20400
},
{
"epoch": 1.59,
"grad_norm": 1.268883228302002,
"learning_rate": 7.385069590889921e-06,
"loss": 0.5885,
"step": 20500
},
{
"epoch": 1.6,
"grad_norm": 1.2403576374053955,
"learning_rate": 7.368199072121468e-06,
"loss": 0.6015,
"step": 20600
},
{
"epoch": 1.61,
"grad_norm": 1.0347434282302856,
"learning_rate": 7.351328553353016e-06,
"loss": 0.5851,
"step": 20700
},
{
"epoch": 1.62,
"grad_norm": 0.9588680863380432,
"learning_rate": 7.3344580345845635e-06,
"loss": 0.5959,
"step": 20800
},
{
"epoch": 1.63,
"grad_norm": 1.133475661277771,
"learning_rate": 7.317587515816113e-06,
"loss": 0.5887,
"step": 20900
},
{
"epoch": 1.63,
"grad_norm": 1.2239305973052979,
"learning_rate": 7.30071699704766e-06,
"loss": 0.5774,
"step": 21000
},
{
"epoch": 1.64,
"grad_norm": 1.2144396305084229,
"learning_rate": 7.283846478279207e-06,
"loss": 0.5829,
"step": 21100
},
{
"epoch": 1.65,
"grad_norm": 1.1166149377822876,
"learning_rate": 7.2669759595107556e-06,
"loss": 0.5807,
"step": 21200
},
{
"epoch": 1.66,
"grad_norm": 1.1699156761169434,
"learning_rate": 7.250105440742303e-06,
"loss": 0.5769,
"step": 21300
},
{
"epoch": 1.66,
"grad_norm": 1.0418583154678345,
"learning_rate": 7.233234921973852e-06,
"loss": 0.586,
"step": 21400
},
{
"epoch": 1.67,
"grad_norm": 1.097359538078308,
"learning_rate": 7.216364403205399e-06,
"loss": 0.5736,
"step": 21500
},
{
"epoch": 1.68,
"grad_norm": 1.157788872718811,
"learning_rate": 7.199493884436947e-06,
"loss": 0.5813,
"step": 21600
},
{
"epoch": 1.69,
"grad_norm": 1.1046146154403687,
"learning_rate": 7.182623365668495e-06,
"loss": 0.56,
"step": 21700
},
{
"epoch": 1.7,
"grad_norm": 1.3684812784194946,
"learning_rate": 7.165752846900042e-06,
"loss": 0.5738,
"step": 21800
},
{
"epoch": 1.7,
"grad_norm": 1.2205299139022827,
"learning_rate": 7.148882328131591e-06,
"loss": 0.5714,
"step": 21900
},
{
"epoch": 1.71,
"grad_norm": 1.132609248161316,
"learning_rate": 7.132011809363139e-06,
"loss": 0.5836,
"step": 22000
},
{
"epoch": 1.72,
"grad_norm": 1.446101427078247,
"learning_rate": 7.115141290594686e-06,
"loss": 0.5876,
"step": 22100
},
{
"epoch": 1.73,
"grad_norm": 1.0895814895629883,
"learning_rate": 7.098270771826234e-06,
"loss": 0.5722,
"step": 22200
},
{
"epoch": 1.73,
"grad_norm": 1.146032452583313,
"learning_rate": 7.081400253057782e-06,
"loss": 0.5642,
"step": 22300
},
{
"epoch": 1.74,
"grad_norm": 1.3625069856643677,
"learning_rate": 7.064529734289331e-06,
"loss": 0.5746,
"step": 22400
},
{
"epoch": 1.75,
"grad_norm": 0.9880527257919312,
"learning_rate": 7.047659215520878e-06,
"loss": 0.5745,
"step": 22500
},
{
"epoch": 1.76,
"grad_norm": 1.0360558032989502,
"learning_rate": 7.0307886967524255e-06,
"loss": 0.5711,
"step": 22600
},
{
"epoch": 1.77,
"grad_norm": 1.2373803853988647,
"learning_rate": 7.013918177983974e-06,
"loss": 0.575,
"step": 22700
},
{
"epoch": 1.77,
"grad_norm": 1.0527061223983765,
"learning_rate": 6.997047659215521e-06,
"loss": 0.5709,
"step": 22800
},
{
"epoch": 1.78,
"grad_norm": 1.0829005241394043,
"learning_rate": 6.98017714044707e-06,
"loss": 0.572,
"step": 22900
},
{
"epoch": 1.79,
"grad_norm": 1.1301981210708618,
"learning_rate": 6.9633066216786175e-06,
"loss": 0.5697,
"step": 23000
},
{
"epoch": 1.8,
"grad_norm": 1.336841344833374,
"learning_rate": 6.946436102910165e-06,
"loss": 0.5756,
"step": 23100
},
{
"epoch": 1.8,
"grad_norm": 1.3953934907913208,
"learning_rate": 6.929565584141713e-06,
"loss": 0.573,
"step": 23200
},
{
"epoch": 1.81,
"grad_norm": 1.0496814250946045,
"learning_rate": 6.91269506537326e-06,
"loss": 0.5732,
"step": 23300
},
{
"epoch": 1.82,
"grad_norm": 1.1564157009124756,
"learning_rate": 6.895824546604809e-06,
"loss": 0.5682,
"step": 23400
},
{
"epoch": 1.83,
"grad_norm": 1.307882308959961,
"learning_rate": 6.878954027836357e-06,
"loss": 0.5709,
"step": 23500
},
{
"epoch": 1.84,
"grad_norm": 1.1765997409820557,
"learning_rate": 6.862083509067904e-06,
"loss": 0.5773,
"step": 23600
},
{
"epoch": 1.84,
"grad_norm": 1.1779475212097168,
"learning_rate": 6.8452129902994524e-06,
"loss": 0.5654,
"step": 23700
},
{
"epoch": 1.85,
"grad_norm": 1.0180901288986206,
"learning_rate": 6.828342471531e-06,
"loss": 0.5704,
"step": 23800
},
{
"epoch": 1.86,
"grad_norm": 1.0048550367355347,
"learning_rate": 6.811471952762548e-06,
"loss": 0.566,
"step": 23900
},
{
"epoch": 1.87,
"grad_norm": 1.1614845991134644,
"learning_rate": 6.794601433994095e-06,
"loss": 0.5765,
"step": 24000
},
{
"epoch": 1.87,
"grad_norm": 1.1774615049362183,
"learning_rate": 6.777730915225644e-06,
"loss": 0.5675,
"step": 24100
},
{
"epoch": 1.88,
"grad_norm": 1.2056039571762085,
"learning_rate": 6.760860396457192e-06,
"loss": 0.5784,
"step": 24200
},
{
"epoch": 1.89,
"grad_norm": 1.303844690322876,
"learning_rate": 6.743989877688739e-06,
"loss": 0.5676,
"step": 24300
},
{
"epoch": 1.9,
"grad_norm": 1.2364120483398438,
"learning_rate": 6.727119358920287e-06,
"loss": 0.5629,
"step": 24400
},
{
"epoch": 1.91,
"grad_norm": 1.1283705234527588,
"learning_rate": 6.710248840151835e-06,
"loss": 0.5737,
"step": 24500
},
{
"epoch": 1.91,
"grad_norm": 1.0897367000579834,
"learning_rate": 6.693378321383384e-06,
"loss": 0.5628,
"step": 24600
},
{
"epoch": 1.92,
"grad_norm": 1.104775309562683,
"learning_rate": 6.676507802614931e-06,
"loss": 0.5568,
"step": 24700
},
{
"epoch": 1.93,
"grad_norm": 1.400399088859558,
"learning_rate": 6.6596372838464785e-06,
"loss": 0.581,
"step": 24800
},
{
"epoch": 1.94,
"grad_norm": 1.2806612253189087,
"learning_rate": 6.642766765078027e-06,
"loss": 0.5746,
"step": 24900
},
{
"epoch": 1.94,
"grad_norm": 1.1554964780807495,
"learning_rate": 6.625896246309574e-06,
"loss": 0.5481,
"step": 25000
},
{
"epoch": 1.95,
"grad_norm": 1.6145806312561035,
"learning_rate": 6.609025727541123e-06,
"loss": 0.5639,
"step": 25100
},
{
"epoch": 1.96,
"grad_norm": 1.025269865989685,
"learning_rate": 6.5921552087726706e-06,
"loss": 0.5586,
"step": 25200
},
{
"epoch": 1.97,
"grad_norm": 1.1193889379501343,
"learning_rate": 6.575284690004218e-06,
"loss": 0.5573,
"step": 25300
},
{
"epoch": 1.98,
"grad_norm": 1.177747130393982,
"learning_rate": 6.558414171235766e-06,
"loss": 0.5559,
"step": 25400
},
{
"epoch": 1.98,
"grad_norm": 0.9178218245506287,
"learning_rate": 6.5415436524673135e-06,
"loss": 0.5635,
"step": 25500
},
{
"epoch": 1.99,
"grad_norm": 1.3144500255584717,
"learning_rate": 6.5246731336988626e-06,
"loss": 0.5552,
"step": 25600
},
{
"epoch": 2.0,
"grad_norm": 1.1707431077957153,
"learning_rate": 6.50780261493041e-06,
"loss": 0.5691,
"step": 25700
},
{
"epoch": 2.0,
"eval_loss": 0.5625693798065186,
"eval_runtime": 238.6823,
"eval_samples_per_second": 27.015,
"eval_steps_per_second": 27.015,
"step": 25711
},
{
"epoch": 2.01,
"grad_norm": 0.9804295301437378,
"learning_rate": 6.490932096161957e-06,
"loss": 0.5096,
"step": 25800
},
{
"epoch": 2.01,
"grad_norm": 1.2259230613708496,
"learning_rate": 6.4740615773935055e-06,
"loss": 0.502,
"step": 25900
},
{
"epoch": 2.02,
"grad_norm": 1.1402134895324707,
"learning_rate": 6.457191058625053e-06,
"loss": 0.5045,
"step": 26000
},
{
"epoch": 2.03,
"grad_norm": 1.1775941848754883,
"learning_rate": 6.440320539856602e-06,
"loss": 0.5069,
"step": 26100
},
{
"epoch": 2.04,
"grad_norm": 1.0790044069290161,
"learning_rate": 6.423450021088149e-06,
"loss": 0.4966,
"step": 26200
},
{
"epoch": 2.05,
"grad_norm": 1.1552369594573975,
"learning_rate": 6.406579502319697e-06,
"loss": 0.5031,
"step": 26300
},
{
"epoch": 2.05,
"grad_norm": 1.1360880136489868,
"learning_rate": 6.389708983551245e-06,
"loss": 0.4948,
"step": 26400
},
{
"epoch": 2.06,
"grad_norm": 1.1748837232589722,
"learning_rate": 6.372838464782792e-06,
"loss": 0.5089,
"step": 26500
},
{
"epoch": 2.07,
"grad_norm": 1.2439712285995483,
"learning_rate": 6.3559679460143405e-06,
"loss": 0.5025,
"step": 26600
},
{
"epoch": 2.08,
"grad_norm": 1.229835867881775,
"learning_rate": 6.339097427245889e-06,
"loss": 0.498,
"step": 26700
},
{
"epoch": 2.08,
"grad_norm": 1.0424373149871826,
"learning_rate": 6.322226908477436e-06,
"loss": 0.504,
"step": 26800
},
{
"epoch": 2.09,
"grad_norm": 1.0707651376724243,
"learning_rate": 6.305356389708984e-06,
"loss": 0.5014,
"step": 26900
},
{
"epoch": 2.1,
"grad_norm": 1.3633686304092407,
"learning_rate": 6.288485870940532e-06,
"loss": 0.4993,
"step": 27000
},
{
"epoch": 2.11,
"grad_norm": 1.135166883468628,
"learning_rate": 6.27161535217208e-06,
"loss": 0.5029,
"step": 27100
},
{
"epoch": 2.12,
"grad_norm": 1.0198315382003784,
"learning_rate": 6.254744833403627e-06,
"loss": 0.5071,
"step": 27200
},
{
"epoch": 2.12,
"grad_norm": 1.0351316928863525,
"learning_rate": 6.237874314635175e-06,
"loss": 0.493,
"step": 27300
},
{
"epoch": 2.13,
"grad_norm": 1.2323417663574219,
"learning_rate": 6.221003795866724e-06,
"loss": 0.4972,
"step": 27400
},
{
"epoch": 2.14,
"grad_norm": 1.0917974710464478,
"learning_rate": 6.204133277098271e-06,
"loss": 0.515,
"step": 27500
},
{
"epoch": 2.15,
"grad_norm": 1.0742385387420654,
"learning_rate": 6.187262758329819e-06,
"loss": 0.5003,
"step": 27600
},
{
"epoch": 2.15,
"grad_norm": 1.1752365827560425,
"learning_rate": 6.170392239561367e-06,
"loss": 0.5037,
"step": 27700
},
{
"epoch": 2.16,
"grad_norm": 1.3029072284698486,
"learning_rate": 6.153521720792916e-06,
"loss": 0.5058,
"step": 27800
},
{
"epoch": 2.17,
"grad_norm": 1.1643210649490356,
"learning_rate": 6.136651202024463e-06,
"loss": 0.4972,
"step": 27900
},
{
"epoch": 2.18,
"grad_norm": 1.242964267730713,
"learning_rate": 6.11978068325601e-06,
"loss": 0.4997,
"step": 28000
},
{
"epoch": 2.19,
"grad_norm": 1.3172551393508911,
"learning_rate": 6.102910164487559e-06,
"loss": 0.5094,
"step": 28100
},
{
"epoch": 2.19,
"grad_norm": 1.2467583417892456,
"learning_rate": 6.086039645719106e-06,
"loss": 0.5076,
"step": 28200
},
{
"epoch": 2.2,
"grad_norm": 1.3781509399414062,
"learning_rate": 6.069169126950655e-06,
"loss": 0.4913,
"step": 28300
},
{
"epoch": 2.21,
"grad_norm": 1.1692475080490112,
"learning_rate": 6.052298608182202e-06,
"loss": 0.5068,
"step": 28400
},
{
"epoch": 2.22,
"grad_norm": 1.2849936485290527,
"learning_rate": 6.03542808941375e-06,
"loss": 0.4904,
"step": 28500
},
{
"epoch": 2.22,
"grad_norm": 1.156049132347107,
"learning_rate": 6.018557570645298e-06,
"loss": 0.4945,
"step": 28600
},
{
"epoch": 2.23,
"grad_norm": 1.1158158779144287,
"learning_rate": 6.001687051876845e-06,
"loss": 0.5085,
"step": 28700
},
{
"epoch": 2.24,
"grad_norm": 1.1390436887741089,
"learning_rate": 5.984816533108394e-06,
"loss": 0.5008,
"step": 28800
},
{
"epoch": 2.25,
"grad_norm": 1.285714030265808,
"learning_rate": 5.967946014339942e-06,
"loss": 0.5041,
"step": 28900
},
{
"epoch": 2.26,
"grad_norm": 1.1822518110275269,
"learning_rate": 5.951075495571489e-06,
"loss": 0.4991,
"step": 29000
},
{
"epoch": 2.26,
"grad_norm": 1.0039293766021729,
"learning_rate": 5.934204976803037e-06,
"loss": 0.5014,
"step": 29100
},
{
"epoch": 2.27,
"grad_norm": 1.0583579540252686,
"learning_rate": 5.917334458034585e-06,
"loss": 0.5017,
"step": 29200
},
{
"epoch": 2.28,
"grad_norm": 1.2026630640029907,
"learning_rate": 5.900463939266134e-06,
"loss": 0.4909,
"step": 29300
},
{
"epoch": 2.29,
"grad_norm": 1.3809479475021362,
"learning_rate": 5.883593420497681e-06,
"loss": 0.5,
"step": 29400
},
{
"epoch": 2.29,
"grad_norm": 1.2457770109176636,
"learning_rate": 5.8667229017292285e-06,
"loss": 0.5024,
"step": 29500
},
{
"epoch": 2.3,
"grad_norm": 1.1686639785766602,
"learning_rate": 5.849852382960777e-06,
"loss": 0.4969,
"step": 29600
},
{
"epoch": 2.31,
"grad_norm": 1.1628988981246948,
"learning_rate": 5.832981864192324e-06,
"loss": 0.5021,
"step": 29700
},
{
"epoch": 2.32,
"grad_norm": 1.1939592361450195,
"learning_rate": 5.816111345423872e-06,
"loss": 0.4984,
"step": 29800
},
{
"epoch": 2.33,
"grad_norm": 1.1496870517730713,
"learning_rate": 5.7992408266554205e-06,
"loss": 0.5074,
"step": 29900
},
{
"epoch": 2.33,
"grad_norm": 1.0704864263534546,
"learning_rate": 5.782370307886968e-06,
"loss": 0.4897,
"step": 30000
},
{
"epoch": 2.34,
"grad_norm": 1.049353003501892,
"learning_rate": 5.765499789118516e-06,
"loss": 0.4952,
"step": 30100
},
{
"epoch": 2.35,
"grad_norm": 1.1420207023620605,
"learning_rate": 5.7486292703500635e-06,
"loss": 0.4928,
"step": 30200
},
{
"epoch": 2.36,
"grad_norm": 1.196271538734436,
"learning_rate": 5.731758751581612e-06,
"loss": 0.4954,
"step": 30300
},
{
"epoch": 2.36,
"grad_norm": 1.2674270868301392,
"learning_rate": 5.714888232813159e-06,
"loss": 0.4899,
"step": 30400
},
{
"epoch": 2.37,
"grad_norm": 1.076567530632019,
"learning_rate": 5.698017714044707e-06,
"loss": 0.4878,
"step": 30500
},
{
"epoch": 2.38,
"grad_norm": 1.1435110569000244,
"learning_rate": 5.6811471952762555e-06,
"loss": 0.4915,
"step": 30600
},
{
"epoch": 2.39,
"grad_norm": 1.187680959701538,
"learning_rate": 5.664276676507803e-06,
"loss": 0.498,
"step": 30700
},
{
"epoch": 2.4,
"grad_norm": 1.0326035022735596,
"learning_rate": 5.647406157739351e-06,
"loss": 0.4895,
"step": 30800
},
{
"epoch": 2.4,
"grad_norm": 1.1324219703674316,
"learning_rate": 5.630535638970898e-06,
"loss": 0.4973,
"step": 30900
},
{
"epoch": 2.41,
"grad_norm": 1.1349315643310547,
"learning_rate": 5.613665120202446e-06,
"loss": 0.4999,
"step": 31000
},
{
"epoch": 2.42,
"grad_norm": 1.0533714294433594,
"learning_rate": 5.596794601433995e-06,
"loss": 0.4883,
"step": 31100
},
{
"epoch": 2.43,
"grad_norm": 1.2699588537216187,
"learning_rate": 5.579924082665542e-06,
"loss": 0.4918,
"step": 31200
},
{
"epoch": 2.43,
"grad_norm": 1.2485431432724,
"learning_rate": 5.56305356389709e-06,
"loss": 0.4955,
"step": 31300
},
{
"epoch": 2.44,
"grad_norm": 1.4232758283615112,
"learning_rate": 5.546183045128638e-06,
"loss": 0.4865,
"step": 31400
},
{
"epoch": 2.45,
"grad_norm": 1.0299222469329834,
"learning_rate": 5.529312526360187e-06,
"loss": 0.4988,
"step": 31500
},
{
"epoch": 2.46,
"grad_norm": 1.3187662363052368,
"learning_rate": 5.512442007591734e-06,
"loss": 0.5004,
"step": 31600
},
{
"epoch": 2.47,
"grad_norm": 1.3091824054718018,
"learning_rate": 5.495571488823282e-06,
"loss": 0.5027,
"step": 31700
},
{
"epoch": 2.47,
"grad_norm": 1.0004264116287231,
"learning_rate": 5.47870097005483e-06,
"loss": 0.5021,
"step": 31800
},
{
"epoch": 2.48,
"grad_norm": 1.1970808506011963,
"learning_rate": 5.461830451286377e-06,
"loss": 0.4947,
"step": 31900
},
{
"epoch": 2.49,
"grad_norm": 1.0422667264938354,
"learning_rate": 5.444959932517926e-06,
"loss": 0.4853,
"step": 32000
},
{
"epoch": 2.5,
"grad_norm": 1.0415763854980469,
"learning_rate": 5.428089413749474e-06,
"loss": 0.4999,
"step": 32100
},
{
"epoch": 2.5,
"grad_norm": 1.0286779403686523,
"learning_rate": 5.411218894981021e-06,
"loss": 0.4914,
"step": 32200
},
{
"epoch": 2.51,
"grad_norm": 1.2340902090072632,
"learning_rate": 5.394348376212569e-06,
"loss": 0.5021,
"step": 32300
},
{
"epoch": 2.52,
"grad_norm": 1.0882303714752197,
"learning_rate": 5.3774778574441165e-06,
"loss": 0.5031,
"step": 32400
},
{
"epoch": 2.53,
"grad_norm": 1.0787783861160278,
"learning_rate": 5.360607338675666e-06,
"loss": 0.4958,
"step": 32500
},
{
"epoch": 2.54,
"grad_norm": 1.0476319789886475,
"learning_rate": 5.343736819907213e-06,
"loss": 0.4946,
"step": 32600
},
{
"epoch": 2.54,
"grad_norm": 1.2390849590301514,
"learning_rate": 5.32686630113876e-06,
"loss": 0.5019,
"step": 32700
},
{
"epoch": 2.55,
"grad_norm": 1.2755340337753296,
"learning_rate": 5.3099957823703085e-06,
"loss": 0.4964,
"step": 32800
},
{
"epoch": 2.56,
"grad_norm": 1.060534954071045,
"learning_rate": 5.293125263601856e-06,
"loss": 0.4905,
"step": 32900
},
{
"epoch": 2.57,
"grad_norm": 1.1471514701843262,
"learning_rate": 5.276254744833404e-06,
"loss": 0.4991,
"step": 33000
},
{
"epoch": 2.57,
"grad_norm": 1.350637435913086,
"learning_rate": 5.2593842260649515e-06,
"loss": 0.4872,
"step": 33100
},
{
"epoch": 2.58,
"grad_norm": 1.0406626462936401,
"learning_rate": 5.2425137072965e-06,
"loss": 0.4794,
"step": 33200
},
{
"epoch": 2.59,
"grad_norm": 1.4993454217910767,
"learning_rate": 5.225643188528048e-06,
"loss": 0.4817,
"step": 33300
},
{
"epoch": 2.6,
"grad_norm": 1.3099007606506348,
"learning_rate": 5.208772669759595e-06,
"loss": 0.4965,
"step": 33400
},
{
"epoch": 2.61,
"grad_norm": 1.256925344467163,
"learning_rate": 5.1919021509911435e-06,
"loss": 0.4926,
"step": 33500
},
{
"epoch": 2.61,
"grad_norm": 1.0852125883102417,
"learning_rate": 5.175031632222691e-06,
"loss": 0.4912,
"step": 33600
},
{
"epoch": 2.62,
"grad_norm": 1.2201043367385864,
"learning_rate": 5.158161113454238e-06,
"loss": 0.4885,
"step": 33700
},
{
"epoch": 2.63,
"grad_norm": 1.1780471801757812,
"learning_rate": 5.141290594685787e-06,
"loss": 0.4938,
"step": 33800
},
{
"epoch": 2.64,
"grad_norm": 1.1712925434112549,
"learning_rate": 5.124420075917335e-06,
"loss": 0.4964,
"step": 33900
},
{
"epoch": 2.64,
"grad_norm": 1.0011783838272095,
"learning_rate": 5.107549557148883e-06,
"loss": 0.492,
"step": 34000
},
{
"epoch": 2.65,
"grad_norm": 1.116950273513794,
"learning_rate": 5.09067903838043e-06,
"loss": 0.4899,
"step": 34100
},
{
"epoch": 2.66,
"grad_norm": 1.209444522857666,
"learning_rate": 5.073808519611978e-06,
"loss": 0.4977,
"step": 34200
},
{
"epoch": 2.67,
"grad_norm": 1.210232138633728,
"learning_rate": 5.056938000843527e-06,
"loss": 0.5006,
"step": 34300
},
{
"epoch": 2.68,
"grad_norm": 1.2212296724319458,
"learning_rate": 5.040067482075074e-06,
"loss": 0.4899,
"step": 34400
},
{
"epoch": 2.68,
"grad_norm": 1.0587881803512573,
"learning_rate": 5.023196963306622e-06,
"loss": 0.4978,
"step": 34500
},
{
"epoch": 2.69,
"grad_norm": 1.0898345708847046,
"learning_rate": 5.00632644453817e-06,
"loss": 0.4823,
"step": 34600
},
{
"epoch": 2.7,
"grad_norm": 1.101915717124939,
"learning_rate": 4.989455925769718e-06,
"loss": 0.4865,
"step": 34700
},
{
"epoch": 2.71,
"grad_norm": 1.0491071939468384,
"learning_rate": 4.972585407001266e-06,
"loss": 0.4902,
"step": 34800
},
{
"epoch": 2.71,
"grad_norm": 1.1705042123794556,
"learning_rate": 4.955714888232813e-06,
"loss": 0.4818,
"step": 34900
},
{
"epoch": 2.72,
"grad_norm": 0.9806114435195923,
"learning_rate": 4.938844369464361e-06,
"loss": 0.4917,
"step": 35000
},
{
"epoch": 2.73,
"grad_norm": 1.0800176858901978,
"learning_rate": 4.921973850695909e-06,
"loss": 0.487,
"step": 35100
},
{
"epoch": 2.74,
"grad_norm": 1.1579550504684448,
"learning_rate": 4.905103331927457e-06,
"loss": 0.4953,
"step": 35200
},
{
"epoch": 2.75,
"grad_norm": 1.4026445150375366,
"learning_rate": 4.888232813159005e-06,
"loss": 0.4854,
"step": 35300
},
{
"epoch": 2.75,
"grad_norm": 1.0393800735473633,
"learning_rate": 4.871362294390553e-06,
"loss": 0.4869,
"step": 35400
},
{
"epoch": 2.76,
"grad_norm": 0.9717263579368591,
"learning_rate": 4.8544917756221e-06,
"loss": 0.49,
"step": 35500
},
{
"epoch": 2.77,
"grad_norm": 1.0537793636322021,
"learning_rate": 4.837621256853648e-06,
"loss": 0.4837,
"step": 35600
},
{
"epoch": 2.78,
"grad_norm": 1.1512401103973389,
"learning_rate": 4.820750738085197e-06,
"loss": 0.4922,
"step": 35700
},
{
"epoch": 2.78,
"grad_norm": 1.2357419729232788,
"learning_rate": 4.803880219316745e-06,
"loss": 0.4788,
"step": 35800
},
{
"epoch": 2.79,
"grad_norm": 1.27003014087677,
"learning_rate": 4.787009700548292e-06,
"loss": 0.4984,
"step": 35900
},
{
"epoch": 2.8,
"grad_norm": 1.1796925067901611,
"learning_rate": 4.77013918177984e-06,
"loss": 0.4865,
"step": 36000
},
{
"epoch": 2.81,
"grad_norm": 1.284103274345398,
"learning_rate": 4.753268663011388e-06,
"loss": 0.4865,
"step": 36100
},
{
"epoch": 2.82,
"grad_norm": 1.2095571756362915,
"learning_rate": 4.736398144242936e-06,
"loss": 0.4972,
"step": 36200
},
{
"epoch": 2.82,
"grad_norm": 1.256468415260315,
"learning_rate": 4.719527625474483e-06,
"loss": 0.4912,
"step": 36300
},
{
"epoch": 2.83,
"grad_norm": 1.1643502712249756,
"learning_rate": 4.7026571067060315e-06,
"loss": 0.4873,
"step": 36400
},
{
"epoch": 2.84,
"grad_norm": 1.1925140619277954,
"learning_rate": 4.68578658793758e-06,
"loss": 0.4985,
"step": 36500
},
{
"epoch": 2.85,
"grad_norm": 1.3707034587860107,
"learning_rate": 4.668916069169127e-06,
"loss": 0.4847,
"step": 36600
},
{
"epoch": 2.85,
"grad_norm": 1.2967514991760254,
"learning_rate": 4.652045550400675e-06,
"loss": 0.492,
"step": 36700
},
{
"epoch": 2.86,
"grad_norm": 1.3054057359695435,
"learning_rate": 4.635175031632223e-06,
"loss": 0.4884,
"step": 36800
},
{
"epoch": 2.87,
"grad_norm": 1.2648892402648926,
"learning_rate": 4.618304512863771e-06,
"loss": 0.4999,
"step": 36900
},
{
"epoch": 2.88,
"grad_norm": 1.1818757057189941,
"learning_rate": 4.601433994095319e-06,
"loss": 0.4901,
"step": 37000
},
{
"epoch": 2.89,
"grad_norm": 1.2684180736541748,
"learning_rate": 4.5845634753268665e-06,
"loss": 0.4854,
"step": 37100
},
{
"epoch": 2.89,
"grad_norm": 1.2977045774459839,
"learning_rate": 4.567692956558415e-06,
"loss": 0.4925,
"step": 37200
},
{
"epoch": 2.9,
"grad_norm": 1.263070821762085,
"learning_rate": 4.550822437789962e-06,
"loss": 0.4856,
"step": 37300
},
{
"epoch": 2.91,
"grad_norm": 1.204965591430664,
"learning_rate": 4.53395191902151e-06,
"loss": 0.4954,
"step": 37400
},
{
"epoch": 2.92,
"grad_norm": 1.445685625076294,
"learning_rate": 4.5170814002530585e-06,
"loss": 0.4922,
"step": 37500
},
{
"epoch": 2.92,
"grad_norm": 1.2058970928192139,
"learning_rate": 4.500210881484606e-06,
"loss": 0.4963,
"step": 37600
},
{
"epoch": 2.93,
"grad_norm": 1.1493515968322754,
"learning_rate": 4.483340362716154e-06,
"loss": 0.4833,
"step": 37700
},
{
"epoch": 2.94,
"grad_norm": 1.1071513891220093,
"learning_rate": 4.4664698439477014e-06,
"loss": 0.4802,
"step": 37800
},
{
"epoch": 2.95,
"grad_norm": 1.1301243305206299,
"learning_rate": 4.44959932517925e-06,
"loss": 0.4854,
"step": 37900
},
{
"epoch": 2.96,
"grad_norm": 1.1066009998321533,
"learning_rate": 4.432728806410798e-06,
"loss": 0.4801,
"step": 38000
},
{
"epoch": 2.96,
"grad_norm": 1.0919671058654785,
"learning_rate": 4.415858287642345e-06,
"loss": 0.4875,
"step": 38100
},
{
"epoch": 2.97,
"grad_norm": 1.0837271213531494,
"learning_rate": 4.398987768873893e-06,
"loss": 0.4859,
"step": 38200
},
{
"epoch": 2.98,
"grad_norm": 1.156788945198059,
"learning_rate": 4.382117250105441e-06,
"loss": 0.4875,
"step": 38300
},
{
"epoch": 2.99,
"grad_norm": 1.0719399452209473,
"learning_rate": 4.365246731336989e-06,
"loss": 0.485,
"step": 38400
},
{
"epoch": 2.99,
"grad_norm": 1.0001031160354614,
"learning_rate": 4.348376212568537e-06,
"loss": 0.484,
"step": 38500
},
{
"epoch": 3.0,
"eval_loss": 0.5327300429344177,
"eval_runtime": 238.1908,
"eval_samples_per_second": 27.071,
"eval_steps_per_second": 27.071,
"step": 38566
},
{
"epoch": 3.0,
"grad_norm": 1.18661367893219,
"learning_rate": 4.331505693800085e-06,
"loss": 0.4712,
"step": 38600
},
{
"epoch": 3.01,
"grad_norm": 1.1101746559143066,
"learning_rate": 4.314635175031632e-06,
"loss": 0.4248,
"step": 38700
},
{
"epoch": 3.02,
"grad_norm": 1.123779296875,
"learning_rate": 4.29776465626318e-06,
"loss": 0.4205,
"step": 38800
},
{
"epoch": 3.03,
"grad_norm": 1.0496944189071655,
"learning_rate": 4.280894137494728e-06,
"loss": 0.4265,
"step": 38900
},
{
"epoch": 3.03,
"grad_norm": 1.2305235862731934,
"learning_rate": 4.264023618726277e-06,
"loss": 0.427,
"step": 39000
},
{
"epoch": 3.04,
"grad_norm": 1.2351257801055908,
"learning_rate": 4.247153099957824e-06,
"loss": 0.416,
"step": 39100
},
{
"epoch": 3.05,
"grad_norm": 1.4697481393814087,
"learning_rate": 4.230282581189372e-06,
"loss": 0.4305,
"step": 39200
},
{
"epoch": 3.06,
"grad_norm": 1.2224518060684204,
"learning_rate": 4.2134120624209196e-06,
"loss": 0.4332,
"step": 39300
},
{
"epoch": 3.06,
"grad_norm": 1.1773486137390137,
"learning_rate": 4.196541543652468e-06,
"loss": 0.4253,
"step": 39400
},
{
"epoch": 3.07,
"grad_norm": 1.1467719078063965,
"learning_rate": 4.179671024884015e-06,
"loss": 0.4229,
"step": 39500
},
{
"epoch": 3.08,
"grad_norm": 1.3091965913772583,
"learning_rate": 4.162800506115563e-06,
"loss": 0.4227,
"step": 39600
},
{
"epoch": 3.09,
"grad_norm": 1.208458423614502,
"learning_rate": 4.145929987347112e-06,
"loss": 0.4265,
"step": 39700
},
{
"epoch": 3.1,
"grad_norm": 1.4349595308303833,
"learning_rate": 4.129059468578659e-06,
"loss": 0.4263,
"step": 39800
},
{
"epoch": 3.1,
"grad_norm": 1.4481394290924072,
"learning_rate": 4.112188949810207e-06,
"loss": 0.4316,
"step": 39900
},
{
"epoch": 3.11,
"grad_norm": 1.0939385890960693,
"learning_rate": 4.0953184310417545e-06,
"loss": 0.4304,
"step": 40000
},
{
"epoch": 3.12,
"grad_norm": 1.3906654119491577,
"learning_rate": 4.078447912273303e-06,
"loss": 0.4235,
"step": 40100
},
{
"epoch": 3.13,
"grad_norm": 1.0942233800888062,
"learning_rate": 4.061577393504851e-06,
"loss": 0.4201,
"step": 40200
},
{
"epoch": 3.13,
"grad_norm": 1.2476675510406494,
"learning_rate": 4.044706874736398e-06,
"loss": 0.4362,
"step": 40300
},
{
"epoch": 3.14,
"grad_norm": 1.206559658050537,
"learning_rate": 4.0278363559679465e-06,
"loss": 0.4238,
"step": 40400
},
{
"epoch": 3.15,
"grad_norm": 1.1425930261611938,
"learning_rate": 4.010965837199494e-06,
"loss": 0.4232,
"step": 40500
},
{
"epoch": 3.16,
"grad_norm": 1.048159122467041,
"learning_rate": 3.994095318431042e-06,
"loss": 0.4259,
"step": 40600
},
{
"epoch": 3.17,
"grad_norm": 1.2630932331085205,
"learning_rate": 3.97722479966259e-06,
"loss": 0.428,
"step": 40700
},
{
"epoch": 3.17,
"grad_norm": 1.336988091468811,
"learning_rate": 3.960354280894138e-06,
"loss": 0.4276,
"step": 40800
},
{
"epoch": 3.18,
"grad_norm": 1.2166807651519775,
"learning_rate": 3.943483762125686e-06,
"loss": 0.4247,
"step": 40900
},
{
"epoch": 3.19,
"grad_norm": 1.1539608240127563,
"learning_rate": 3.926613243357233e-06,
"loss": 0.4279,
"step": 41000
},
{
"epoch": 3.2,
"grad_norm": 1.1006641387939453,
"learning_rate": 3.9097427245887815e-06,
"loss": 0.4166,
"step": 41100
},
{
"epoch": 3.2,
"grad_norm": 1.218772292137146,
"learning_rate": 3.89287220582033e-06,
"loss": 0.4283,
"step": 41200
},
{
"epoch": 3.21,
"grad_norm": 1.3342804908752441,
"learning_rate": 3.876001687051877e-06,
"loss": 0.4299,
"step": 41300
},
{
"epoch": 3.22,
"grad_norm": 1.1451455354690552,
"learning_rate": 3.8591311682834244e-06,
"loss": 0.4211,
"step": 41400
},
{
"epoch": 3.23,
"grad_norm": 1.171906590461731,
"learning_rate": 3.842260649514973e-06,
"loss": 0.4244,
"step": 41500
},
{
"epoch": 3.24,
"grad_norm": 1.0812770128250122,
"learning_rate": 3.825390130746521e-06,
"loss": 0.4269,
"step": 41600
},
{
"epoch": 3.24,
"grad_norm": 1.1748661994934082,
"learning_rate": 3.8085196119780687e-06,
"loss": 0.4225,
"step": 41700
},
{
"epoch": 3.25,
"grad_norm": 1.3125067949295044,
"learning_rate": 3.791649093209617e-06,
"loss": 0.4204,
"step": 41800
},
{
"epoch": 3.26,
"grad_norm": 1.4073984622955322,
"learning_rate": 3.7747785744411642e-06,
"loss": 0.4172,
"step": 41900
},
{
"epoch": 3.27,
"grad_norm": 1.184374451637268,
"learning_rate": 3.757908055672712e-06,
"loss": 0.4228,
"step": 42000
},
{
"epoch": 3.27,
"grad_norm": 1.4073774814605713,
"learning_rate": 3.7410375369042602e-06,
"loss": 0.4257,
"step": 42100
},
{
"epoch": 3.28,
"grad_norm": 1.1929515600204468,
"learning_rate": 3.724167018135808e-06,
"loss": 0.4272,
"step": 42200
},
{
"epoch": 3.29,
"grad_norm": 1.3336668014526367,
"learning_rate": 3.707296499367356e-06,
"loss": 0.4177,
"step": 42300
},
{
"epoch": 3.3,
"grad_norm": 1.2955700159072876,
"learning_rate": 3.6904259805989036e-06,
"loss": 0.4267,
"step": 42400
},
{
"epoch": 3.31,
"grad_norm": 1.4237792491912842,
"learning_rate": 3.6735554618304514e-06,
"loss": 0.4338,
"step": 42500
},
{
"epoch": 3.31,
"grad_norm": 1.1870936155319214,
"learning_rate": 3.656684943061999e-06,
"loss": 0.4301,
"step": 42600
},
{
"epoch": 3.32,
"grad_norm": 1.4807709455490112,
"learning_rate": 3.6398144242935474e-06,
"loss": 0.4217,
"step": 42700
},
{
"epoch": 3.33,
"grad_norm": 1.181097388267517,
"learning_rate": 3.622943905525095e-06,
"loss": 0.4307,
"step": 42800
},
{
"epoch": 3.34,
"grad_norm": 1.41066312789917,
"learning_rate": 3.6060733867566434e-06,
"loss": 0.4294,
"step": 42900
},
{
"epoch": 3.34,
"grad_norm": 1.1162735223770142,
"learning_rate": 3.5892028679881908e-06,
"loss": 0.429,
"step": 43000
},
{
"epoch": 3.35,
"grad_norm": 1.1872411966323853,
"learning_rate": 3.5723323492197386e-06,
"loss": 0.4248,
"step": 43100
},
{
"epoch": 3.36,
"grad_norm": 1.2379745244979858,
"learning_rate": 3.5554618304512868e-06,
"loss": 0.4173,
"step": 43200
},
{
"epoch": 3.37,
"grad_norm": 1.1592068672180176,
"learning_rate": 3.5385913116828346e-06,
"loss": 0.4225,
"step": 43300
},
{
"epoch": 3.38,
"grad_norm": 1.1917668581008911,
"learning_rate": 3.5217207929143828e-06,
"loss": 0.4257,
"step": 43400
},
{
"epoch": 3.38,
"grad_norm": 1.2930108308792114,
"learning_rate": 3.50485027414593e-06,
"loss": 0.4229,
"step": 43500
},
{
"epoch": 3.39,
"grad_norm": 1.3443032503128052,
"learning_rate": 3.487979755377478e-06,
"loss": 0.4215,
"step": 43600
},
{
"epoch": 3.4,
"grad_norm": 1.313826084136963,
"learning_rate": 3.471109236609026e-06,
"loss": 0.4241,
"step": 43700
},
{
"epoch": 3.41,
"grad_norm": 1.2277220487594604,
"learning_rate": 3.454238717840574e-06,
"loss": 0.4213,
"step": 43800
},
{
"epoch": 3.41,
"grad_norm": 1.154130220413208,
"learning_rate": 3.4373681990721217e-06,
"loss": 0.4247,
"step": 43900
},
{
"epoch": 3.42,
"grad_norm": 1.444671630859375,
"learning_rate": 3.4204976803036695e-06,
"loss": 0.4259,
"step": 44000
},
{
"epoch": 3.43,
"grad_norm": 1.3900195360183716,
"learning_rate": 3.4036271615352173e-06,
"loss": 0.416,
"step": 44100
},
{
"epoch": 3.44,
"grad_norm": 1.1950856447219849,
"learning_rate": 3.386756642766765e-06,
"loss": 0.4281,
"step": 44200
},
{
"epoch": 3.45,
"grad_norm": 1.34796142578125,
"learning_rate": 3.3698861239983133e-06,
"loss": 0.4288,
"step": 44300
},
{
"epoch": 3.45,
"grad_norm": 1.180707573890686,
"learning_rate": 3.353015605229861e-06,
"loss": 0.4199,
"step": 44400
},
{
"epoch": 3.46,
"grad_norm": 1.4567344188690186,
"learning_rate": 3.3361450864614093e-06,
"loss": 0.4267,
"step": 44500
},
{
"epoch": 3.47,
"grad_norm": 1.1246074438095093,
"learning_rate": 3.3192745676929567e-06,
"loss": 0.4231,
"step": 44600
},
{
"epoch": 3.48,
"grad_norm": 1.1688181161880493,
"learning_rate": 3.3024040489245045e-06,
"loss": 0.4304,
"step": 44700
},
{
"epoch": 3.48,
"grad_norm": 1.411210536956787,
"learning_rate": 3.2855335301560527e-06,
"loss": 0.4233,
"step": 44800
},
{
"epoch": 3.49,
"grad_norm": 1.2641918659210205,
"learning_rate": 3.2686630113876005e-06,
"loss": 0.4189,
"step": 44900
},
{
"epoch": 3.5,
"grad_norm": 1.5290361642837524,
"learning_rate": 3.2517924926191487e-06,
"loss": 0.4248,
"step": 45000
},
{
"epoch": 3.51,
"grad_norm": 1.1927236318588257,
"learning_rate": 3.234921973850696e-06,
"loss": 0.4187,
"step": 45100
},
{
"epoch": 3.52,
"grad_norm": 1.2530293464660645,
"learning_rate": 3.218051455082244e-06,
"loss": 0.4278,
"step": 45200
},
{
"epoch": 3.52,
"grad_norm": 1.3620356321334839,
"learning_rate": 3.201180936313792e-06,
"loss": 0.4212,
"step": 45300
},
{
"epoch": 3.53,
"grad_norm": 1.2663601636886597,
"learning_rate": 3.18431041754534e-06,
"loss": 0.4219,
"step": 45400
},
{
"epoch": 3.54,
"grad_norm": 1.377575159072876,
"learning_rate": 3.1674398987768877e-06,
"loss": 0.423,
"step": 45500
},
{
"epoch": 3.55,
"grad_norm": 1.579256296157837,
"learning_rate": 3.1505693800084354e-06,
"loss": 0.4266,
"step": 45600
},
{
"epoch": 3.55,
"grad_norm": 1.2571321725845337,
"learning_rate": 3.1336988612399832e-06,
"loss": 0.4277,
"step": 45700
},
{
"epoch": 3.56,
"grad_norm": 1.2397903203964233,
"learning_rate": 3.116828342471531e-06,
"loss": 0.4255,
"step": 45800
},
{
"epoch": 3.57,
"grad_norm": 1.5481135845184326,
"learning_rate": 3.0999578237030792e-06,
"loss": 0.4236,
"step": 45900
},
{
"epoch": 3.58,
"grad_norm": 1.1892071962356567,
"learning_rate": 3.083087304934627e-06,
"loss": 0.4256,
"step": 46000
},
{
"epoch": 3.59,
"grad_norm": 1.4242011308670044,
"learning_rate": 3.0662167861661752e-06,
"loss": 0.4238,
"step": 46100
},
{
"epoch": 3.59,
"grad_norm": 1.2539838552474976,
"learning_rate": 3.0493462673977226e-06,
"loss": 0.4344,
"step": 46200
},
{
"epoch": 3.6,
"grad_norm": 1.354716181755066,
"learning_rate": 3.0324757486292704e-06,
"loss": 0.4226,
"step": 46300
},
{
"epoch": 3.61,
"grad_norm": 1.1949005126953125,
"learning_rate": 3.0156052298608186e-06,
"loss": 0.4229,
"step": 46400
},
{
"epoch": 3.62,
"grad_norm": 1.1654438972473145,
"learning_rate": 2.9987347110923664e-06,
"loss": 0.4188,
"step": 46500
},
{
"epoch": 3.62,
"grad_norm": 1.3851518630981445,
"learning_rate": 2.9818641923239146e-06,
"loss": 0.4164,
"step": 46600
},
{
"epoch": 3.63,
"grad_norm": 1.6049551963806152,
"learning_rate": 2.964993673555462e-06,
"loss": 0.4248,
"step": 46700
},
{
"epoch": 3.64,
"grad_norm": 1.437743067741394,
"learning_rate": 2.9481231547870098e-06,
"loss": 0.4189,
"step": 46800
},
{
"epoch": 3.65,
"grad_norm": 1.1186052560806274,
"learning_rate": 2.931252636018558e-06,
"loss": 0.4135,
"step": 46900
},
{
"epoch": 3.66,
"grad_norm": 1.1863722801208496,
"learning_rate": 2.9143821172501058e-06,
"loss": 0.428,
"step": 47000
},
{
"epoch": 3.66,
"grad_norm": 1.1537821292877197,
"learning_rate": 2.8975115984816536e-06,
"loss": 0.4305,
"step": 47100
},
{
"epoch": 3.67,
"grad_norm": 1.332077980041504,
"learning_rate": 2.8806410797132014e-06,
"loss": 0.4222,
"step": 47200
},
{
"epoch": 3.68,
"grad_norm": 1.0979773998260498,
"learning_rate": 2.863770560944749e-06,
"loss": 0.4182,
"step": 47300
},
{
"epoch": 3.69,
"grad_norm": 1.34776771068573,
"learning_rate": 2.846900042176297e-06,
"loss": 0.4182,
"step": 47400
},
{
"epoch": 3.69,
"grad_norm": 1.5361868143081665,
"learning_rate": 2.830029523407845e-06,
"loss": 0.4177,
"step": 47500
},
{
"epoch": 3.7,
"grad_norm": 1.4123471975326538,
"learning_rate": 2.813159004639393e-06,
"loss": 0.417,
"step": 47600
},
{
"epoch": 3.71,
"grad_norm": 1.23320472240448,
"learning_rate": 2.7962884858709403e-06,
"loss": 0.4217,
"step": 47700
},
{
"epoch": 3.72,
"grad_norm": 1.4047470092773438,
"learning_rate": 2.7794179671024885e-06,
"loss": 0.4194,
"step": 47800
},
{
"epoch": 3.73,
"grad_norm": 1.2386730909347534,
"learning_rate": 2.7625474483340363e-06,
"loss": 0.4248,
"step": 47900
},
{
"epoch": 3.73,
"grad_norm": 1.4956306219100952,
"learning_rate": 2.7456769295655845e-06,
"loss": 0.4129,
"step": 48000
},
{
"epoch": 3.74,
"grad_norm": 1.058263897895813,
"learning_rate": 2.7288064107971323e-06,
"loss": 0.4158,
"step": 48100
},
{
"epoch": 3.75,
"grad_norm": 1.4023634195327759,
"learning_rate": 2.7119358920286805e-06,
"loss": 0.422,
"step": 48200
},
{
"epoch": 3.76,
"grad_norm": 1.2374383211135864,
"learning_rate": 2.695065373260228e-06,
"loss": 0.4285,
"step": 48300
},
{
"epoch": 3.76,
"grad_norm": 1.3916772603988647,
"learning_rate": 2.6781948544917757e-06,
"loss": 0.4254,
"step": 48400
},
{
"epoch": 3.77,
"grad_norm": 1.2032909393310547,
"learning_rate": 2.661324335723324e-06,
"loss": 0.4283,
"step": 48500
},
{
"epoch": 3.78,
"grad_norm": 1.2075848579406738,
"learning_rate": 2.6444538169548717e-06,
"loss": 0.4193,
"step": 48600
},
{
"epoch": 3.79,
"grad_norm": 1.1569247245788574,
"learning_rate": 2.6275832981864195e-06,
"loss": 0.4261,
"step": 48700
},
{
"epoch": 3.8,
"grad_norm": 1.4597578048706055,
"learning_rate": 2.6107127794179673e-06,
"loss": 0.4283,
"step": 48800
},
{
"epoch": 3.8,
"grad_norm": 1.3192527294158936,
"learning_rate": 2.593842260649515e-06,
"loss": 0.4228,
"step": 48900
},
{
"epoch": 3.81,
"grad_norm": 1.1565971374511719,
"learning_rate": 2.576971741881063e-06,
"loss": 0.4276,
"step": 49000
},
{
"epoch": 3.82,
"grad_norm": 1.205554485321045,
"learning_rate": 2.560101223112611e-06,
"loss": 0.4197,
"step": 49100
},
{
"epoch": 3.83,
"grad_norm": 1.1162891387939453,
"learning_rate": 2.543230704344159e-06,
"loss": 0.418,
"step": 49200
},
{
"epoch": 3.83,
"grad_norm": 1.4133708477020264,
"learning_rate": 2.5263601855757062e-06,
"loss": 0.4266,
"step": 49300
},
{
"epoch": 3.84,
"grad_norm": 1.1502392292022705,
"learning_rate": 2.5094896668072544e-06,
"loss": 0.4254,
"step": 49400
},
{
"epoch": 3.85,
"grad_norm": 1.2038531303405762,
"learning_rate": 2.4926191480388022e-06,
"loss": 0.4235,
"step": 49500
},
{
"epoch": 3.86,
"grad_norm": 1.2624757289886475,
"learning_rate": 2.4757486292703504e-06,
"loss": 0.425,
"step": 49600
},
{
"epoch": 3.87,
"grad_norm": 1.3285934925079346,
"learning_rate": 2.458878110501898e-06,
"loss": 0.4228,
"step": 49700
},
{
"epoch": 3.87,
"grad_norm": 1.2643487453460693,
"learning_rate": 2.442007591733446e-06,
"loss": 0.4244,
"step": 49800
},
{
"epoch": 3.88,
"grad_norm": 1.1776206493377686,
"learning_rate": 2.425137072964994e-06,
"loss": 0.4243,
"step": 49900
},
{
"epoch": 3.89,
"grad_norm": 1.3528225421905518,
"learning_rate": 2.4082665541965416e-06,
"loss": 0.4253,
"step": 50000
},
{
"epoch": 3.9,
"grad_norm": 1.3553099632263184,
"learning_rate": 2.39139603542809e-06,
"loss": 0.4221,
"step": 50100
},
{
"epoch": 3.9,
"grad_norm": 1.1443736553192139,
"learning_rate": 2.3745255166596376e-06,
"loss": 0.4125,
"step": 50200
},
{
"epoch": 3.91,
"grad_norm": 1.3355522155761719,
"learning_rate": 2.3576549978911854e-06,
"loss": 0.4217,
"step": 50300
},
{
"epoch": 3.92,
"grad_norm": 1.3759735822677612,
"learning_rate": 2.340784479122733e-06,
"loss": 0.4228,
"step": 50400
},
{
"epoch": 3.93,
"grad_norm": 1.3781483173370361,
"learning_rate": 2.323913960354281e-06,
"loss": 0.4258,
"step": 50500
},
{
"epoch": 3.94,
"grad_norm": 1.255956768989563,
"learning_rate": 2.3070434415858288e-06,
"loss": 0.4167,
"step": 50600
},
{
"epoch": 3.94,
"grad_norm": 1.239749789237976,
"learning_rate": 2.290172922817377e-06,
"loss": 0.4234,
"step": 50700
},
{
"epoch": 3.95,
"grad_norm": 1.4020352363586426,
"learning_rate": 2.2733024040489248e-06,
"loss": 0.418,
"step": 50800
},
{
"epoch": 3.96,
"grad_norm": 1.4167559146881104,
"learning_rate": 2.2564318852804726e-06,
"loss": 0.4228,
"step": 50900
},
{
"epoch": 3.97,
"grad_norm": 1.1842854022979736,
"learning_rate": 2.2395613665120204e-06,
"loss": 0.4147,
"step": 51000
},
{
"epoch": 3.97,
"grad_norm": 1.1870306730270386,
"learning_rate": 2.222690847743568e-06,
"loss": 0.4119,
"step": 51100
},
{
"epoch": 3.98,
"grad_norm": 1.261056900024414,
"learning_rate": 2.2058203289751164e-06,
"loss": 0.4209,
"step": 51200
},
{
"epoch": 3.99,
"grad_norm": 1.1587308645248413,
"learning_rate": 2.1889498102066637e-06,
"loss": 0.4263,
"step": 51300
},
{
"epoch": 4.0,
"grad_norm": 1.261036992073059,
"learning_rate": 2.172079291438212e-06,
"loss": 0.4107,
"step": 51400
},
{
"epoch": 4.0,
"eval_loss": 0.5302631258964539,
"eval_runtime": 238.7038,
"eval_samples_per_second": 27.013,
"eval_steps_per_second": 27.013,
"step": 51422
}
],
"logging_steps": 100,
"max_steps": 64275,
"num_input_tokens_seen": 0,
"num_train_epochs": 5,
"save_steps": 500,
"total_flos": 2.2978476544119404e+18,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}