beit-large-patch16-224-finetuned-BreastCancer-Classification-BreakHis-AH-60-20-20
/
trainer_state.json
{ | |
"best_metric": 0.9938708156529938, | |
"best_model_checkpoint": "beit-large-patch16-224-finetuned-BreastCancer-Classification-BreakHis-AH-60-20-20/checkpoint-2388", | |
"epoch": 12.0, | |
"global_step": 2388, | |
"is_hyper_param_search": false, | |
"is_local_process_zero": true, | |
"is_world_process_zero": true, | |
"log_history": [ | |
{ | |
"epoch": 0.03, | |
"learning_rate": 1.1627906976744186e-08, | |
"loss": 0.7335, | |
"step": 5 | |
}, | |
{ | |
"epoch": 0.05, | |
"learning_rate": 2.3255813953488372e-08, | |
"loss": 0.7745, | |
"step": 10 | |
}, | |
{ | |
"epoch": 0.08, | |
"learning_rate": 3.488372093023256e-08, | |
"loss": 0.7823, | |
"step": 15 | |
}, | |
{ | |
"epoch": 0.1, | |
"learning_rate": 4.6511627906976744e-08, | |
"loss": 0.7606, | |
"step": 20 | |
}, | |
{ | |
"epoch": 0.13, | |
"learning_rate": 5.8139534883720935e-08, | |
"loss": 0.7698, | |
"step": 25 | |
}, | |
{ | |
"epoch": 0.15, | |
"learning_rate": 6.976744186046512e-08, | |
"loss": 0.7576, | |
"step": 30 | |
}, | |
{ | |
"epoch": 0.18, | |
"learning_rate": 8.139534883720931e-08, | |
"loss": 0.7102, | |
"step": 35 | |
}, | |
{ | |
"epoch": 0.2, | |
"learning_rate": 9.302325581395349e-08, | |
"loss": 0.7435, | |
"step": 40 | |
}, | |
{ | |
"epoch": 0.23, | |
"learning_rate": 1.0465116279069768e-07, | |
"loss": 0.7648, | |
"step": 45 | |
}, | |
{ | |
"epoch": 0.25, | |
"learning_rate": 1.1627906976744187e-07, | |
"loss": 0.7704, | |
"step": 50 | |
}, | |
{ | |
"epoch": 0.28, | |
"learning_rate": 1.2790697674418605e-07, | |
"loss": 0.688, | |
"step": 55 | |
}, | |
{ | |
"epoch": 0.3, | |
"learning_rate": 1.3953488372093024e-07, | |
"loss": 0.7343, | |
"step": 60 | |
}, | |
{ | |
"epoch": 0.33, | |
"learning_rate": 1.5116279069767443e-07, | |
"loss": 0.7055, | |
"step": 65 | |
}, | |
{ | |
"epoch": 0.35, | |
"learning_rate": 1.6279069767441862e-07, | |
"loss": 0.7237, | |
"step": 70 | |
}, | |
{ | |
"epoch": 0.38, | |
"learning_rate": 1.744186046511628e-07, | |
"loss": 0.7374, | |
"step": 75 | |
}, | |
{ | |
"epoch": 0.4, | |
"learning_rate": 1.8604651162790698e-07, | |
"loss": 0.653, | |
"step": 80 | |
}, | |
{ | |
"epoch": 0.43, | |
"learning_rate": 1.9767441860465117e-07, | |
"loss": 0.6931, | |
"step": 85 | |
}, | |
{ | |
"epoch": 0.45, | |
"learning_rate": 2.0930232558139536e-07, | |
"loss": 0.638, | |
"step": 90 | |
}, | |
{ | |
"epoch": 0.48, | |
"learning_rate": 2.2093023255813955e-07, | |
"loss": 0.6868, | |
"step": 95 | |
}, | |
{ | |
"epoch": 0.5, | |
"learning_rate": 2.3255813953488374e-07, | |
"loss": 0.716, | |
"step": 100 | |
}, | |
{ | |
"epoch": 0.53, | |
"learning_rate": 2.4418604651162793e-07, | |
"loss": 0.725, | |
"step": 105 | |
}, | |
{ | |
"epoch": 0.55, | |
"learning_rate": 2.558139534883721e-07, | |
"loss": 0.6195, | |
"step": 110 | |
}, | |
{ | |
"epoch": 0.58, | |
"learning_rate": 2.674418604651163e-07, | |
"loss": 0.5791, | |
"step": 115 | |
}, | |
{ | |
"epoch": 0.6, | |
"learning_rate": 2.790697674418605e-07, | |
"loss": 0.6498, | |
"step": 120 | |
}, | |
{ | |
"epoch": 0.63, | |
"learning_rate": 2.906976744186047e-07, | |
"loss": 0.6431, | |
"step": 125 | |
}, | |
{ | |
"epoch": 0.65, | |
"learning_rate": 3.0232558139534886e-07, | |
"loss": 0.5811, | |
"step": 130 | |
}, | |
{ | |
"epoch": 0.68, | |
"learning_rate": 3.139534883720931e-07, | |
"loss": 0.5803, | |
"step": 135 | |
}, | |
{ | |
"epoch": 0.7, | |
"learning_rate": 3.2558139534883724e-07, | |
"loss": 0.5635, | |
"step": 140 | |
}, | |
{ | |
"epoch": 0.73, | |
"learning_rate": 3.3720930232558146e-07, | |
"loss": 0.5369, | |
"step": 145 | |
}, | |
{ | |
"epoch": 0.75, | |
"learning_rate": 3.488372093023256e-07, | |
"loss": 0.5395, | |
"step": 150 | |
}, | |
{ | |
"epoch": 0.78, | |
"learning_rate": 3.6046511627906984e-07, | |
"loss": 0.5645, | |
"step": 155 | |
}, | |
{ | |
"epoch": 0.8, | |
"learning_rate": 3.7209302325581396e-07, | |
"loss": 0.5327, | |
"step": 160 | |
}, | |
{ | |
"epoch": 0.83, | |
"learning_rate": 3.837209302325582e-07, | |
"loss": 0.5035, | |
"step": 165 | |
}, | |
{ | |
"epoch": 0.85, | |
"learning_rate": 3.9534883720930234e-07, | |
"loss": 0.5256, | |
"step": 170 | |
}, | |
{ | |
"epoch": 0.88, | |
"learning_rate": 4.0697674418604655e-07, | |
"loss": 0.5225, | |
"step": 175 | |
}, | |
{ | |
"epoch": 0.9, | |
"learning_rate": 4.186046511627907e-07, | |
"loss": 0.5197, | |
"step": 180 | |
}, | |
{ | |
"epoch": 0.93, | |
"learning_rate": 4.3023255813953494e-07, | |
"loss": 0.5052, | |
"step": 185 | |
}, | |
{ | |
"epoch": 0.95, | |
"learning_rate": 4.418604651162791e-07, | |
"loss": 0.4875, | |
"step": 190 | |
}, | |
{ | |
"epoch": 0.98, | |
"learning_rate": 4.534883720930233e-07, | |
"loss": 0.46, | |
"step": 195 | |
}, | |
{ | |
"epoch": 1.0, | |
"eval_accuracy": 0.8481848184818482, | |
"eval_loss": 0.39499393105506897, | |
"eval_runtime": 90.9996, | |
"eval_samples_per_second": 23.308, | |
"eval_steps_per_second": 1.462, | |
"step": 199 | |
}, | |
{ | |
"epoch": 1.01, | |
"learning_rate": 4.651162790697675e-07, | |
"loss": 0.5344, | |
"step": 200 | |
}, | |
{ | |
"epoch": 1.03, | |
"learning_rate": 4.767441860465117e-07, | |
"loss": 0.4448, | |
"step": 205 | |
}, | |
{ | |
"epoch": 1.06, | |
"learning_rate": 4.883720930232559e-07, | |
"loss": 0.5011, | |
"step": 210 | |
}, | |
{ | |
"epoch": 1.08, | |
"learning_rate": 5.000000000000001e-07, | |
"loss": 0.4055, | |
"step": 215 | |
}, | |
{ | |
"epoch": 1.11, | |
"learning_rate": 5.116279069767442e-07, | |
"loss": 0.4714, | |
"step": 220 | |
}, | |
{ | |
"epoch": 1.13, | |
"learning_rate": 5.232558139534884e-07, | |
"loss": 0.4132, | |
"step": 225 | |
}, | |
{ | |
"epoch": 1.16, | |
"learning_rate": 5.348837209302326e-07, | |
"loss": 0.4394, | |
"step": 230 | |
}, | |
{ | |
"epoch": 1.18, | |
"learning_rate": 5.465116279069768e-07, | |
"loss": 0.3649, | |
"step": 235 | |
}, | |
{ | |
"epoch": 1.21, | |
"learning_rate": 5.58139534883721e-07, | |
"loss": 0.3948, | |
"step": 240 | |
}, | |
{ | |
"epoch": 1.23, | |
"learning_rate": 5.697674418604652e-07, | |
"loss": 0.4236, | |
"step": 245 | |
}, | |
{ | |
"epoch": 1.26, | |
"learning_rate": 5.813953488372094e-07, | |
"loss": 0.4115, | |
"step": 250 | |
}, | |
{ | |
"epoch": 1.28, | |
"learning_rate": 5.930232558139536e-07, | |
"loss": 0.465, | |
"step": 255 | |
}, | |
{ | |
"epoch": 1.31, | |
"learning_rate": 6.046511627906977e-07, | |
"loss": 0.3335, | |
"step": 260 | |
}, | |
{ | |
"epoch": 1.33, | |
"learning_rate": 6.162790697674419e-07, | |
"loss": 0.3746, | |
"step": 265 | |
}, | |
{ | |
"epoch": 1.36, | |
"learning_rate": 6.279069767441862e-07, | |
"loss": 0.3266, | |
"step": 270 | |
}, | |
{ | |
"epoch": 1.38, | |
"learning_rate": 6.395348837209303e-07, | |
"loss": 0.3936, | |
"step": 275 | |
}, | |
{ | |
"epoch": 1.41, | |
"learning_rate": 6.511627906976745e-07, | |
"loss": 0.3193, | |
"step": 280 | |
}, | |
{ | |
"epoch": 1.43, | |
"learning_rate": 6.627906976744186e-07, | |
"loss": 0.3624, | |
"step": 285 | |
}, | |
{ | |
"epoch": 1.46, | |
"learning_rate": 6.744186046511629e-07, | |
"loss": 0.303, | |
"step": 290 | |
}, | |
{ | |
"epoch": 1.48, | |
"learning_rate": 6.86046511627907e-07, | |
"loss": 0.3427, | |
"step": 295 | |
}, | |
{ | |
"epoch": 1.51, | |
"learning_rate": 6.976744186046513e-07, | |
"loss": 0.2622, | |
"step": 300 | |
}, | |
{ | |
"epoch": 1.53, | |
"learning_rate": 7.093023255813954e-07, | |
"loss": 0.2665, | |
"step": 305 | |
}, | |
{ | |
"epoch": 1.56, | |
"learning_rate": 7.209302325581397e-07, | |
"loss": 0.3244, | |
"step": 310 | |
}, | |
{ | |
"epoch": 1.58, | |
"learning_rate": 7.325581395348838e-07, | |
"loss": 0.2232, | |
"step": 315 | |
}, | |
{ | |
"epoch": 1.61, | |
"learning_rate": 7.441860465116279e-07, | |
"loss": 0.2798, | |
"step": 320 | |
}, | |
{ | |
"epoch": 1.63, | |
"learning_rate": 7.558139534883721e-07, | |
"loss": 0.261, | |
"step": 325 | |
}, | |
{ | |
"epoch": 1.66, | |
"learning_rate": 7.674418604651165e-07, | |
"loss": 0.2758, | |
"step": 330 | |
}, | |
{ | |
"epoch": 1.68, | |
"learning_rate": 7.790697674418606e-07, | |
"loss": 0.3212, | |
"step": 335 | |
}, | |
{ | |
"epoch": 1.71, | |
"learning_rate": 7.906976744186047e-07, | |
"loss": 0.2536, | |
"step": 340 | |
}, | |
{ | |
"epoch": 1.73, | |
"learning_rate": 8.023255813953489e-07, | |
"loss": 0.2719, | |
"step": 345 | |
}, | |
{ | |
"epoch": 1.76, | |
"learning_rate": 8.139534883720931e-07, | |
"loss": 0.2754, | |
"step": 350 | |
}, | |
{ | |
"epoch": 1.78, | |
"learning_rate": 8.255813953488373e-07, | |
"loss": 0.187, | |
"step": 355 | |
}, | |
{ | |
"epoch": 1.81, | |
"learning_rate": 8.372093023255814e-07, | |
"loss": 0.2383, | |
"step": 360 | |
}, | |
{ | |
"epoch": 1.83, | |
"learning_rate": 8.488372093023257e-07, | |
"loss": 0.3075, | |
"step": 365 | |
}, | |
{ | |
"epoch": 1.86, | |
"learning_rate": 8.604651162790699e-07, | |
"loss": 0.2802, | |
"step": 370 | |
}, | |
{ | |
"epoch": 1.88, | |
"learning_rate": 8.720930232558141e-07, | |
"loss": 0.2815, | |
"step": 375 | |
}, | |
{ | |
"epoch": 1.91, | |
"learning_rate": 8.837209302325582e-07, | |
"loss": 0.2916, | |
"step": 380 | |
}, | |
{ | |
"epoch": 1.93, | |
"learning_rate": 8.953488372093023e-07, | |
"loss": 0.2769, | |
"step": 385 | |
}, | |
{ | |
"epoch": 1.96, | |
"learning_rate": 9.069767441860466e-07, | |
"loss": 0.2174, | |
"step": 390 | |
}, | |
{ | |
"epoch": 1.98, | |
"learning_rate": 9.186046511627907e-07, | |
"loss": 0.2048, | |
"step": 395 | |
}, | |
{ | |
"epoch": 2.0, | |
"eval_accuracy": 0.918906176331919, | |
"eval_loss": 0.18860779702663422, | |
"eval_runtime": 91.0197, | |
"eval_samples_per_second": 23.303, | |
"eval_steps_per_second": 1.461, | |
"step": 398 | |
}, | |
{ | |
"epoch": 2.01, | |
"learning_rate": 9.30232558139535e-07, | |
"loss": 0.2442, | |
"step": 400 | |
}, | |
{ | |
"epoch": 2.04, | |
"learning_rate": 9.418604651162791e-07, | |
"loss": 0.2001, | |
"step": 405 | |
}, | |
{ | |
"epoch": 2.06, | |
"learning_rate": 9.534883720930234e-07, | |
"loss": 0.2522, | |
"step": 410 | |
}, | |
{ | |
"epoch": 2.09, | |
"learning_rate": 9.651162790697675e-07, | |
"loss": 0.1624, | |
"step": 415 | |
}, | |
{ | |
"epoch": 2.11, | |
"learning_rate": 9.767441860465117e-07, | |
"loss": 0.2131, | |
"step": 420 | |
}, | |
{ | |
"epoch": 2.14, | |
"learning_rate": 9.88372093023256e-07, | |
"loss": 0.1761, | |
"step": 425 | |
}, | |
{ | |
"epoch": 2.16, | |
"learning_rate": 1.0000000000000002e-06, | |
"loss": 0.2182, | |
"step": 430 | |
}, | |
{ | |
"epoch": 2.19, | |
"learning_rate": 1.0116279069767444e-06, | |
"loss": 0.2105, | |
"step": 435 | |
}, | |
{ | |
"epoch": 2.21, | |
"learning_rate": 1.0232558139534884e-06, | |
"loss": 0.2178, | |
"step": 440 | |
}, | |
{ | |
"epoch": 2.24, | |
"learning_rate": 1.0348837209302326e-06, | |
"loss": 0.1795, | |
"step": 445 | |
}, | |
{ | |
"epoch": 2.26, | |
"learning_rate": 1.0465116279069768e-06, | |
"loss": 0.1685, | |
"step": 450 | |
}, | |
{ | |
"epoch": 2.29, | |
"learning_rate": 1.058139534883721e-06, | |
"loss": 0.2405, | |
"step": 455 | |
}, | |
{ | |
"epoch": 2.31, | |
"learning_rate": 1.0697674418604653e-06, | |
"loss": 0.1603, | |
"step": 460 | |
}, | |
{ | |
"epoch": 2.34, | |
"learning_rate": 1.0813953488372093e-06, | |
"loss": 0.2294, | |
"step": 465 | |
}, | |
{ | |
"epoch": 2.36, | |
"learning_rate": 1.0930232558139537e-06, | |
"loss": 0.2107, | |
"step": 470 | |
}, | |
{ | |
"epoch": 2.39, | |
"learning_rate": 1.1046511627906977e-06, | |
"loss": 0.1622, | |
"step": 475 | |
}, | |
{ | |
"epoch": 2.41, | |
"learning_rate": 1.116279069767442e-06, | |
"loss": 0.1739, | |
"step": 480 | |
}, | |
{ | |
"epoch": 2.44, | |
"learning_rate": 1.1279069767441861e-06, | |
"loss": 0.2156, | |
"step": 485 | |
}, | |
{ | |
"epoch": 2.46, | |
"learning_rate": 1.1395348837209304e-06, | |
"loss": 0.2137, | |
"step": 490 | |
}, | |
{ | |
"epoch": 2.49, | |
"learning_rate": 1.1511627906976746e-06, | |
"loss": 0.2177, | |
"step": 495 | |
}, | |
{ | |
"epoch": 2.51, | |
"learning_rate": 1.1627906976744188e-06, | |
"loss": 0.2202, | |
"step": 500 | |
}, | |
{ | |
"epoch": 2.54, | |
"learning_rate": 1.1744186046511628e-06, | |
"loss": 0.1772, | |
"step": 505 | |
}, | |
{ | |
"epoch": 2.56, | |
"learning_rate": 1.1860465116279072e-06, | |
"loss": 0.1435, | |
"step": 510 | |
}, | |
{ | |
"epoch": 2.59, | |
"learning_rate": 1.1976744186046512e-06, | |
"loss": 0.1728, | |
"step": 515 | |
}, | |
{ | |
"epoch": 2.61, | |
"learning_rate": 1.2093023255813954e-06, | |
"loss": 0.1686, | |
"step": 520 | |
}, | |
{ | |
"epoch": 2.64, | |
"learning_rate": 1.2209302325581397e-06, | |
"loss": 0.1815, | |
"step": 525 | |
}, | |
{ | |
"epoch": 2.66, | |
"learning_rate": 1.2325581395348839e-06, | |
"loss": 0.2048, | |
"step": 530 | |
}, | |
{ | |
"epoch": 2.69, | |
"learning_rate": 1.244186046511628e-06, | |
"loss": 0.1522, | |
"step": 535 | |
}, | |
{ | |
"epoch": 2.71, | |
"learning_rate": 1.2558139534883723e-06, | |
"loss": 0.1424, | |
"step": 540 | |
}, | |
{ | |
"epoch": 2.74, | |
"learning_rate": 1.2674418604651165e-06, | |
"loss": 0.1503, | |
"step": 545 | |
}, | |
{ | |
"epoch": 2.76, | |
"learning_rate": 1.2790697674418605e-06, | |
"loss": 0.2181, | |
"step": 550 | |
}, | |
{ | |
"epoch": 2.79, | |
"learning_rate": 1.2906976744186048e-06, | |
"loss": 0.1607, | |
"step": 555 | |
}, | |
{ | |
"epoch": 2.81, | |
"learning_rate": 1.302325581395349e-06, | |
"loss": 0.1129, | |
"step": 560 | |
}, | |
{ | |
"epoch": 2.84, | |
"learning_rate": 1.313953488372093e-06, | |
"loss": 0.1496, | |
"step": 565 | |
}, | |
{ | |
"epoch": 2.86, | |
"learning_rate": 1.3255813953488372e-06, | |
"loss": 0.1261, | |
"step": 570 | |
}, | |
{ | |
"epoch": 2.89, | |
"learning_rate": 1.3372093023255814e-06, | |
"loss": 0.1951, | |
"step": 575 | |
}, | |
{ | |
"epoch": 2.91, | |
"learning_rate": 1.3488372093023258e-06, | |
"loss": 0.2033, | |
"step": 580 | |
}, | |
{ | |
"epoch": 2.94, | |
"learning_rate": 1.36046511627907e-06, | |
"loss": 0.2468, | |
"step": 585 | |
}, | |
{ | |
"epoch": 2.96, | |
"learning_rate": 1.372093023255814e-06, | |
"loss": 0.1552, | |
"step": 590 | |
}, | |
{ | |
"epoch": 2.99, | |
"learning_rate": 1.3837209302325583e-06, | |
"loss": 0.182, | |
"step": 595 | |
}, | |
{ | |
"epoch": 3.0, | |
"eval_accuracy": 0.9481376709099482, | |
"eval_loss": 0.1382409781217575, | |
"eval_runtime": 91.1965, | |
"eval_samples_per_second": 23.257, | |
"eval_steps_per_second": 1.458, | |
"step": 597 | |
}, | |
{ | |
"epoch": 3.02, | |
"learning_rate": 1.3953488372093025e-06, | |
"loss": 0.1781, | |
"step": 600 | |
}, | |
{ | |
"epoch": 3.04, | |
"learning_rate": 1.4069767441860465e-06, | |
"loss": 0.2034, | |
"step": 605 | |
}, | |
{ | |
"epoch": 3.07, | |
"learning_rate": 1.4186046511627907e-06, | |
"loss": 0.1188, | |
"step": 610 | |
}, | |
{ | |
"epoch": 3.09, | |
"learning_rate": 1.430232558139535e-06, | |
"loss": 0.1816, | |
"step": 615 | |
}, | |
{ | |
"epoch": 3.12, | |
"learning_rate": 1.4418604651162794e-06, | |
"loss": 0.1081, | |
"step": 620 | |
}, | |
{ | |
"epoch": 3.14, | |
"learning_rate": 1.4534883720930234e-06, | |
"loss": 0.1409, | |
"step": 625 | |
}, | |
{ | |
"epoch": 3.17, | |
"learning_rate": 1.4651162790697676e-06, | |
"loss": 0.1834, | |
"step": 630 | |
}, | |
{ | |
"epoch": 3.19, | |
"learning_rate": 1.4767441860465118e-06, | |
"loss": 0.2097, | |
"step": 635 | |
}, | |
{ | |
"epoch": 3.22, | |
"learning_rate": 1.4883720930232558e-06, | |
"loss": 0.145, | |
"step": 640 | |
}, | |
{ | |
"epoch": 3.24, | |
"learning_rate": 1.5e-06, | |
"loss": 0.156, | |
"step": 645 | |
}, | |
{ | |
"epoch": 3.27, | |
"learning_rate": 1.5116279069767443e-06, | |
"loss": 0.1742, | |
"step": 650 | |
}, | |
{ | |
"epoch": 3.29, | |
"learning_rate": 1.5232558139534885e-06, | |
"loss": 0.1477, | |
"step": 655 | |
}, | |
{ | |
"epoch": 3.32, | |
"learning_rate": 1.534883720930233e-06, | |
"loss": 0.0985, | |
"step": 660 | |
}, | |
{ | |
"epoch": 3.34, | |
"learning_rate": 1.546511627906977e-06, | |
"loss": 0.1889, | |
"step": 665 | |
}, | |
{ | |
"epoch": 3.37, | |
"learning_rate": 1.5581395348837211e-06, | |
"loss": 0.1832, | |
"step": 670 | |
}, | |
{ | |
"epoch": 3.39, | |
"learning_rate": 1.5697674418604653e-06, | |
"loss": 0.1582, | |
"step": 675 | |
}, | |
{ | |
"epoch": 3.42, | |
"learning_rate": 1.5813953488372093e-06, | |
"loss": 0.1872, | |
"step": 680 | |
}, | |
{ | |
"epoch": 3.44, | |
"learning_rate": 1.5930232558139536e-06, | |
"loss": 0.1571, | |
"step": 685 | |
}, | |
{ | |
"epoch": 3.47, | |
"learning_rate": 1.6046511627906978e-06, | |
"loss": 0.1407, | |
"step": 690 | |
}, | |
{ | |
"epoch": 3.49, | |
"learning_rate": 1.6162790697674418e-06, | |
"loss": 0.0965, | |
"step": 695 | |
}, | |
{ | |
"epoch": 3.52, | |
"learning_rate": 1.6279069767441862e-06, | |
"loss": 0.0725, | |
"step": 700 | |
}, | |
{ | |
"epoch": 3.54, | |
"learning_rate": 1.6395348837209304e-06, | |
"loss": 0.1101, | |
"step": 705 | |
}, | |
{ | |
"epoch": 3.57, | |
"learning_rate": 1.6511627906976747e-06, | |
"loss": 0.1048, | |
"step": 710 | |
}, | |
{ | |
"epoch": 3.59, | |
"learning_rate": 1.6627906976744187e-06, | |
"loss": 0.1131, | |
"step": 715 | |
}, | |
{ | |
"epoch": 3.62, | |
"learning_rate": 1.6744186046511629e-06, | |
"loss": 0.1339, | |
"step": 720 | |
}, | |
{ | |
"epoch": 3.64, | |
"learning_rate": 1.686046511627907e-06, | |
"loss": 0.1134, | |
"step": 725 | |
}, | |
{ | |
"epoch": 3.67, | |
"learning_rate": 1.6976744186046513e-06, | |
"loss": 0.1316, | |
"step": 730 | |
}, | |
{ | |
"epoch": 3.69, | |
"learning_rate": 1.7093023255813953e-06, | |
"loss": 0.1277, | |
"step": 735 | |
}, | |
{ | |
"epoch": 3.72, | |
"learning_rate": 1.7209302325581397e-06, | |
"loss": 0.1207, | |
"step": 740 | |
}, | |
{ | |
"epoch": 3.74, | |
"learning_rate": 1.732558139534884e-06, | |
"loss": 0.1694, | |
"step": 745 | |
}, | |
{ | |
"epoch": 3.77, | |
"learning_rate": 1.7441860465116282e-06, | |
"loss": 0.1486, | |
"step": 750 | |
}, | |
{ | |
"epoch": 3.79, | |
"learning_rate": 1.7558139534883722e-06, | |
"loss": 0.0856, | |
"step": 755 | |
}, | |
{ | |
"epoch": 3.82, | |
"learning_rate": 1.7674418604651164e-06, | |
"loss": 0.1068, | |
"step": 760 | |
}, | |
{ | |
"epoch": 3.84, | |
"learning_rate": 1.7790697674418606e-06, | |
"loss": 0.0985, | |
"step": 765 | |
}, | |
{ | |
"epoch": 3.87, | |
"learning_rate": 1.7906976744186046e-06, | |
"loss": 0.1445, | |
"step": 770 | |
}, | |
{ | |
"epoch": 3.89, | |
"learning_rate": 1.8023255813953488e-06, | |
"loss": 0.0848, | |
"step": 775 | |
}, | |
{ | |
"epoch": 3.92, | |
"learning_rate": 1.8139534883720933e-06, | |
"loss": 0.1124, | |
"step": 780 | |
}, | |
{ | |
"epoch": 3.94, | |
"learning_rate": 1.8255813953488375e-06, | |
"loss": 0.0806, | |
"step": 785 | |
}, | |
{ | |
"epoch": 3.97, | |
"learning_rate": 1.8372093023255815e-06, | |
"loss": 0.078, | |
"step": 790 | |
}, | |
{ | |
"epoch": 3.99, | |
"learning_rate": 1.8488372093023257e-06, | |
"loss": 0.0826, | |
"step": 795 | |
}, | |
{ | |
"epoch": 4.0, | |
"eval_accuracy": 0.9693540782649693, | |
"eval_loss": 0.0760357454419136, | |
"eval_runtime": 90.9354, | |
"eval_samples_per_second": 23.324, | |
"eval_steps_per_second": 1.463, | |
"step": 796 | |
}, | |
{ | |
"epoch": 4.02, | |
"learning_rate": 1.86046511627907e-06, | |
"loss": 0.0578, | |
"step": 800 | |
}, | |
{ | |
"epoch": 4.05, | |
"learning_rate": 1.872093023255814e-06, | |
"loss": 0.2264, | |
"step": 805 | |
}, | |
{ | |
"epoch": 4.07, | |
"learning_rate": 1.8837209302325582e-06, | |
"loss": 0.0851, | |
"step": 810 | |
}, | |
{ | |
"epoch": 4.1, | |
"learning_rate": 1.8953488372093024e-06, | |
"loss": 0.0803, | |
"step": 815 | |
}, | |
{ | |
"epoch": 4.12, | |
"learning_rate": 1.9069767441860468e-06, | |
"loss": 0.104, | |
"step": 820 | |
}, | |
{ | |
"epoch": 4.15, | |
"learning_rate": 1.918604651162791e-06, | |
"loss": 0.1087, | |
"step": 825 | |
}, | |
{ | |
"epoch": 4.17, | |
"learning_rate": 1.930232558139535e-06, | |
"loss": 0.1494, | |
"step": 830 | |
}, | |
{ | |
"epoch": 4.2, | |
"learning_rate": 1.941860465116279e-06, | |
"loss": 0.1459, | |
"step": 835 | |
}, | |
{ | |
"epoch": 4.22, | |
"learning_rate": 1.9534883720930235e-06, | |
"loss": 0.0806, | |
"step": 840 | |
}, | |
{ | |
"epoch": 4.25, | |
"learning_rate": 1.9651162790697675e-06, | |
"loss": 0.0517, | |
"step": 845 | |
}, | |
{ | |
"epoch": 4.27, | |
"learning_rate": 1.976744186046512e-06, | |
"loss": 0.1205, | |
"step": 850 | |
}, | |
{ | |
"epoch": 4.3, | |
"learning_rate": 1.988372093023256e-06, | |
"loss": 0.1168, | |
"step": 855 | |
}, | |
{ | |
"epoch": 4.32, | |
"learning_rate": 2.0000000000000003e-06, | |
"loss": 0.1178, | |
"step": 860 | |
}, | |
{ | |
"epoch": 4.35, | |
"learning_rate": 2.0116279069767443e-06, | |
"loss": 0.0972, | |
"step": 865 | |
}, | |
{ | |
"epoch": 4.37, | |
"learning_rate": 2.0232558139534888e-06, | |
"loss": 0.1218, | |
"step": 870 | |
}, | |
{ | |
"epoch": 4.4, | |
"learning_rate": 2.0348837209302328e-06, | |
"loss": 0.0609, | |
"step": 875 | |
}, | |
{ | |
"epoch": 4.42, | |
"learning_rate": 2.0465116279069768e-06, | |
"loss": 0.0926, | |
"step": 880 | |
}, | |
{ | |
"epoch": 4.45, | |
"learning_rate": 2.058139534883721e-06, | |
"loss": 0.0923, | |
"step": 885 | |
}, | |
{ | |
"epoch": 4.47, | |
"learning_rate": 2.0697674418604652e-06, | |
"loss": 0.0673, | |
"step": 890 | |
}, | |
{ | |
"epoch": 4.5, | |
"learning_rate": 2.0813953488372092e-06, | |
"loss": 0.0915, | |
"step": 895 | |
}, | |
{ | |
"epoch": 4.52, | |
"learning_rate": 2.0930232558139536e-06, | |
"loss": 0.0987, | |
"step": 900 | |
}, | |
{ | |
"epoch": 4.55, | |
"learning_rate": 2.104651162790698e-06, | |
"loss": 0.1189, | |
"step": 905 | |
}, | |
{ | |
"epoch": 4.57, | |
"learning_rate": 2.116279069767442e-06, | |
"loss": 0.0615, | |
"step": 910 | |
}, | |
{ | |
"epoch": 4.6, | |
"learning_rate": 2.127906976744186e-06, | |
"loss": 0.1131, | |
"step": 915 | |
}, | |
{ | |
"epoch": 4.62, | |
"learning_rate": 2.1395348837209305e-06, | |
"loss": 0.1153, | |
"step": 920 | |
}, | |
{ | |
"epoch": 4.65, | |
"learning_rate": 2.1511627906976745e-06, | |
"loss": 0.0741, | |
"step": 925 | |
}, | |
{ | |
"epoch": 4.67, | |
"learning_rate": 2.1627906976744185e-06, | |
"loss": 0.1233, | |
"step": 930 | |
}, | |
{ | |
"epoch": 4.7, | |
"learning_rate": 2.174418604651163e-06, | |
"loss": 0.1245, | |
"step": 935 | |
}, | |
{ | |
"epoch": 4.72, | |
"learning_rate": 2.1860465116279074e-06, | |
"loss": 0.058, | |
"step": 940 | |
}, | |
{ | |
"epoch": 4.75, | |
"learning_rate": 2.1976744186046514e-06, | |
"loss": 0.0409, | |
"step": 945 | |
}, | |
{ | |
"epoch": 4.77, | |
"learning_rate": 2.2093023255813954e-06, | |
"loss": 0.0828, | |
"step": 950 | |
}, | |
{ | |
"epoch": 4.8, | |
"learning_rate": 2.22093023255814e-06, | |
"loss": 0.1098, | |
"step": 955 | |
}, | |
{ | |
"epoch": 4.82, | |
"learning_rate": 2.232558139534884e-06, | |
"loss": 0.0561, | |
"step": 960 | |
}, | |
{ | |
"epoch": 4.85, | |
"learning_rate": 2.244186046511628e-06, | |
"loss": 0.1561, | |
"step": 965 | |
}, | |
{ | |
"epoch": 4.87, | |
"learning_rate": 2.2558139534883723e-06, | |
"loss": 0.1465, | |
"step": 970 | |
}, | |
{ | |
"epoch": 4.9, | |
"learning_rate": 2.2674418604651163e-06, | |
"loss": 0.068, | |
"step": 975 | |
}, | |
{ | |
"epoch": 4.92, | |
"learning_rate": 2.2790697674418607e-06, | |
"loss": 0.1774, | |
"step": 980 | |
}, | |
{ | |
"epoch": 4.95, | |
"learning_rate": 2.2906976744186047e-06, | |
"loss": 0.1152, | |
"step": 985 | |
}, | |
{ | |
"epoch": 4.97, | |
"learning_rate": 2.302325581395349e-06, | |
"loss": 0.0964, | |
"step": 990 | |
}, | |
{ | |
"epoch": 5.0, | |
"learning_rate": 2.313953488372093e-06, | |
"loss": 0.0886, | |
"step": 995 | |
}, | |
{ | |
"epoch": 5.0, | |
"eval_accuracy": 0.9787835926449788, | |
"eval_loss": 0.059965480118989944, | |
"eval_runtime": 90.9509, | |
"eval_samples_per_second": 23.32, | |
"eval_steps_per_second": 1.462, | |
"step": 995 | |
}, | |
{ | |
"epoch": 5.03, | |
"learning_rate": 2.3255813953488376e-06, | |
"loss": 0.0869, | |
"step": 1000 | |
}, | |
{ | |
"epoch": 5.05, | |
"learning_rate": 2.3372093023255816e-06, | |
"loss": 0.1005, | |
"step": 1005 | |
}, | |
{ | |
"epoch": 5.08, | |
"learning_rate": 2.3488372093023256e-06, | |
"loss": 0.0859, | |
"step": 1010 | |
}, | |
{ | |
"epoch": 5.1, | |
"learning_rate": 2.36046511627907e-06, | |
"loss": 0.0807, | |
"step": 1015 | |
}, | |
{ | |
"epoch": 5.13, | |
"learning_rate": 2.3720930232558144e-06, | |
"loss": 0.0645, | |
"step": 1020 | |
}, | |
{ | |
"epoch": 5.15, | |
"learning_rate": 2.3837209302325585e-06, | |
"loss": 0.0598, | |
"step": 1025 | |
}, | |
{ | |
"epoch": 5.18, | |
"learning_rate": 2.3953488372093025e-06, | |
"loss": 0.1259, | |
"step": 1030 | |
}, | |
{ | |
"epoch": 5.2, | |
"learning_rate": 2.406976744186047e-06, | |
"loss": 0.0914, | |
"step": 1035 | |
}, | |
{ | |
"epoch": 5.23, | |
"learning_rate": 2.418604651162791e-06, | |
"loss": 0.1101, | |
"step": 1040 | |
}, | |
{ | |
"epoch": 5.25, | |
"learning_rate": 2.430232558139535e-06, | |
"loss": 0.1553, | |
"step": 1045 | |
}, | |
{ | |
"epoch": 5.28, | |
"learning_rate": 2.4418604651162793e-06, | |
"loss": 0.0465, | |
"step": 1050 | |
}, | |
{ | |
"epoch": 5.3, | |
"learning_rate": 2.4534883720930233e-06, | |
"loss": 0.0226, | |
"step": 1055 | |
}, | |
{ | |
"epoch": 5.33, | |
"learning_rate": 2.4651162790697678e-06, | |
"loss": 0.089, | |
"step": 1060 | |
}, | |
{ | |
"epoch": 5.35, | |
"learning_rate": 2.4767441860465118e-06, | |
"loss": 0.0578, | |
"step": 1065 | |
}, | |
{ | |
"epoch": 5.38, | |
"learning_rate": 2.488372093023256e-06, | |
"loss": 0.0772, | |
"step": 1070 | |
}, | |
{ | |
"epoch": 5.4, | |
"learning_rate": 2.5e-06, | |
"loss": 0.066, | |
"step": 1075 | |
}, | |
{ | |
"epoch": 5.43, | |
"learning_rate": 2.5116279069767446e-06, | |
"loss": 0.0877, | |
"step": 1080 | |
}, | |
{ | |
"epoch": 5.45, | |
"learning_rate": 2.5232558139534886e-06, | |
"loss": 0.107, | |
"step": 1085 | |
}, | |
{ | |
"epoch": 5.48, | |
"learning_rate": 2.534883720930233e-06, | |
"loss": 0.1334, | |
"step": 1090 | |
}, | |
{ | |
"epoch": 5.5, | |
"learning_rate": 2.5465116279069767e-06, | |
"loss": 0.0577, | |
"step": 1095 | |
}, | |
{ | |
"epoch": 5.53, | |
"learning_rate": 2.558139534883721e-06, | |
"loss": 0.1463, | |
"step": 1100 | |
}, | |
{ | |
"epoch": 5.55, | |
"learning_rate": 2.569767441860465e-06, | |
"loss": 0.097, | |
"step": 1105 | |
}, | |
{ | |
"epoch": 5.58, | |
"learning_rate": 2.5813953488372095e-06, | |
"loss": 0.0964, | |
"step": 1110 | |
}, | |
{ | |
"epoch": 5.6, | |
"learning_rate": 2.593023255813954e-06, | |
"loss": 0.0306, | |
"step": 1115 | |
}, | |
{ | |
"epoch": 5.63, | |
"learning_rate": 2.604651162790698e-06, | |
"loss": 0.1012, | |
"step": 1120 | |
}, | |
{ | |
"epoch": 5.65, | |
"learning_rate": 2.6162790697674424e-06, | |
"loss": 0.1271, | |
"step": 1125 | |
}, | |
{ | |
"epoch": 5.68, | |
"learning_rate": 2.627906976744186e-06, | |
"loss": 0.0381, | |
"step": 1130 | |
}, | |
{ | |
"epoch": 5.7, | |
"learning_rate": 2.6395348837209304e-06, | |
"loss": 0.1062, | |
"step": 1135 | |
}, | |
{ | |
"epoch": 5.73, | |
"learning_rate": 2.6511627906976744e-06, | |
"loss": 0.0798, | |
"step": 1140 | |
}, | |
{ | |
"epoch": 5.75, | |
"learning_rate": 2.662790697674419e-06, | |
"loss": 0.0583, | |
"step": 1145 | |
}, | |
{ | |
"epoch": 5.78, | |
"learning_rate": 2.674418604651163e-06, | |
"loss": 0.1107, | |
"step": 1150 | |
}, | |
{ | |
"epoch": 5.8, | |
"learning_rate": 2.6860465116279073e-06, | |
"loss": 0.0535, | |
"step": 1155 | |
}, | |
{ | |
"epoch": 5.83, | |
"learning_rate": 2.6976744186046517e-06, | |
"loss": 0.1115, | |
"step": 1160 | |
}, | |
{ | |
"epoch": 5.85, | |
"learning_rate": 2.7093023255813957e-06, | |
"loss": 0.0569, | |
"step": 1165 | |
}, | |
{ | |
"epoch": 5.88, | |
"learning_rate": 2.72093023255814e-06, | |
"loss": 0.0927, | |
"step": 1170 | |
}, | |
{ | |
"epoch": 5.9, | |
"learning_rate": 2.7325581395348837e-06, | |
"loss": 0.0326, | |
"step": 1175 | |
}, | |
{ | |
"epoch": 5.93, | |
"learning_rate": 2.744186046511628e-06, | |
"loss": 0.1111, | |
"step": 1180 | |
}, | |
{ | |
"epoch": 5.95, | |
"learning_rate": 2.755813953488372e-06, | |
"loss": 0.0345, | |
"step": 1185 | |
}, | |
{ | |
"epoch": 5.98, | |
"learning_rate": 2.7674418604651166e-06, | |
"loss": 0.0896, | |
"step": 1190 | |
}, | |
{ | |
"epoch": 6.0, | |
"eval_accuracy": 0.9801980198019802, | |
"eval_loss": 0.05234839394688606, | |
"eval_runtime": 91.0139, | |
"eval_samples_per_second": 23.304, | |
"eval_steps_per_second": 1.461, | |
"step": 1194 | |
}, | |
{ | |
"epoch": 6.01, | |
"learning_rate": 2.779069767441861e-06, | |
"loss": 0.0247, | |
"step": 1195 | |
}, | |
{ | |
"epoch": 6.03, | |
"learning_rate": 2.790697674418605e-06, | |
"loss": 0.06, | |
"step": 1200 | |
}, | |
{ | |
"epoch": 6.06, | |
"learning_rate": 2.8023255813953494e-06, | |
"loss": 0.0695, | |
"step": 1205 | |
}, | |
{ | |
"epoch": 6.08, | |
"learning_rate": 2.813953488372093e-06, | |
"loss": 0.0554, | |
"step": 1210 | |
}, | |
{ | |
"epoch": 6.11, | |
"learning_rate": 2.8255813953488374e-06, | |
"loss": 0.1587, | |
"step": 1215 | |
}, | |
{ | |
"epoch": 6.13, | |
"learning_rate": 2.8372093023255815e-06, | |
"loss": 0.0628, | |
"step": 1220 | |
}, | |
{ | |
"epoch": 6.16, | |
"learning_rate": 2.848837209302326e-06, | |
"loss": 0.073, | |
"step": 1225 | |
}, | |
{ | |
"epoch": 6.18, | |
"learning_rate": 2.86046511627907e-06, | |
"loss": 0.0835, | |
"step": 1230 | |
}, | |
{ | |
"epoch": 6.21, | |
"learning_rate": 2.8720930232558143e-06, | |
"loss": 0.1044, | |
"step": 1235 | |
}, | |
{ | |
"epoch": 6.23, | |
"learning_rate": 2.8837209302325587e-06, | |
"loss": 0.0537, | |
"step": 1240 | |
}, | |
{ | |
"epoch": 6.26, | |
"learning_rate": 2.8953488372093023e-06, | |
"loss": 0.1153, | |
"step": 1245 | |
}, | |
{ | |
"epoch": 6.28, | |
"learning_rate": 2.9069767441860468e-06, | |
"loss": 0.0685, | |
"step": 1250 | |
}, | |
{ | |
"epoch": 6.31, | |
"learning_rate": 2.9186046511627908e-06, | |
"loss": 0.0602, | |
"step": 1255 | |
}, | |
{ | |
"epoch": 6.33, | |
"learning_rate": 2.930232558139535e-06, | |
"loss": 0.0731, | |
"step": 1260 | |
}, | |
{ | |
"epoch": 6.36, | |
"learning_rate": 2.941860465116279e-06, | |
"loss": 0.0509, | |
"step": 1265 | |
}, | |
{ | |
"epoch": 6.38, | |
"learning_rate": 2.9534883720930236e-06, | |
"loss": 0.0426, | |
"step": 1270 | |
}, | |
{ | |
"epoch": 6.41, | |
"learning_rate": 2.965116279069768e-06, | |
"loss": 0.0578, | |
"step": 1275 | |
}, | |
{ | |
"epoch": 6.43, | |
"learning_rate": 2.9767441860465116e-06, | |
"loss": 0.0487, | |
"step": 1280 | |
}, | |
{ | |
"epoch": 6.46, | |
"learning_rate": 2.988372093023256e-06, | |
"loss": 0.0266, | |
"step": 1285 | |
}, | |
{ | |
"epoch": 6.48, | |
"learning_rate": 3e-06, | |
"loss": 0.0566, | |
"step": 1290 | |
}, | |
{ | |
"epoch": 6.51, | |
"learning_rate": 3.0116279069767445e-06, | |
"loss": 0.0443, | |
"step": 1295 | |
}, | |
{ | |
"epoch": 6.53, | |
"learning_rate": 3.0232558139534885e-06, | |
"loss": 0.0457, | |
"step": 1300 | |
}, | |
{ | |
"epoch": 6.56, | |
"learning_rate": 3.034883720930233e-06, | |
"loss": 0.122, | |
"step": 1305 | |
}, | |
{ | |
"epoch": 6.58, | |
"learning_rate": 3.046511627906977e-06, | |
"loss": 0.1308, | |
"step": 1310 | |
}, | |
{ | |
"epoch": 6.61, | |
"learning_rate": 3.0581395348837214e-06, | |
"loss": 0.0771, | |
"step": 1315 | |
}, | |
{ | |
"epoch": 6.63, | |
"learning_rate": 3.069767441860466e-06, | |
"loss": 0.0594, | |
"step": 1320 | |
}, | |
{ | |
"epoch": 6.66, | |
"learning_rate": 3.0813953488372094e-06, | |
"loss": 0.0398, | |
"step": 1325 | |
}, | |
{ | |
"epoch": 6.68, | |
"learning_rate": 3.093023255813954e-06, | |
"loss": 0.0973, | |
"step": 1330 | |
}, | |
{ | |
"epoch": 6.71, | |
"learning_rate": 3.104651162790698e-06, | |
"loss": 0.174, | |
"step": 1335 | |
}, | |
{ | |
"epoch": 6.73, | |
"learning_rate": 3.1162790697674423e-06, | |
"loss": 0.0669, | |
"step": 1340 | |
}, | |
{ | |
"epoch": 6.76, | |
"learning_rate": 3.1279069767441863e-06, | |
"loss": 0.0592, | |
"step": 1345 | |
}, | |
{ | |
"epoch": 6.78, | |
"learning_rate": 3.1395348837209307e-06, | |
"loss": 0.0337, | |
"step": 1350 | |
}, | |
{ | |
"epoch": 6.81, | |
"learning_rate": 3.151162790697675e-06, | |
"loss": 0.0338, | |
"step": 1355 | |
}, | |
{ | |
"epoch": 6.83, | |
"learning_rate": 3.1627906976744187e-06, | |
"loss": 0.0956, | |
"step": 1360 | |
}, | |
{ | |
"epoch": 6.86, | |
"learning_rate": 3.174418604651163e-06, | |
"loss": 0.0236, | |
"step": 1365 | |
}, | |
{ | |
"epoch": 6.88, | |
"learning_rate": 3.186046511627907e-06, | |
"loss": 0.0381, | |
"step": 1370 | |
}, | |
{ | |
"epoch": 6.91, | |
"learning_rate": 3.1976744186046516e-06, | |
"loss": 0.0674, | |
"step": 1375 | |
}, | |
{ | |
"epoch": 6.93, | |
"learning_rate": 3.2093023255813956e-06, | |
"loss": 0.0448, | |
"step": 1380 | |
}, | |
{ | |
"epoch": 6.96, | |
"learning_rate": 3.22093023255814e-06, | |
"loss": 0.0564, | |
"step": 1385 | |
}, | |
{ | |
"epoch": 6.98, | |
"learning_rate": 3.2325581395348836e-06, | |
"loss": 0.0774, | |
"step": 1390 | |
}, | |
{ | |
"epoch": 7.0, | |
"eval_accuracy": 0.9825553983969826, | |
"eval_loss": 0.04816773906350136, | |
"eval_runtime": 90.6447, | |
"eval_samples_per_second": 23.399, | |
"eval_steps_per_second": 1.467, | |
"step": 1393 | |
}, | |
{ | |
"epoch": 7.01, | |
"learning_rate": 3.244186046511628e-06, | |
"loss": 0.1127, | |
"step": 1395 | |
}, | |
{ | |
"epoch": 7.04, | |
"learning_rate": 3.2558139534883724e-06, | |
"loss": 0.0509, | |
"step": 1400 | |
}, | |
{ | |
"epoch": 7.06, | |
"learning_rate": 3.2674418604651164e-06, | |
"loss": 0.1105, | |
"step": 1405 | |
}, | |
{ | |
"epoch": 7.09, | |
"learning_rate": 3.279069767441861e-06, | |
"loss": 0.0226, | |
"step": 1410 | |
}, | |
{ | |
"epoch": 7.11, | |
"learning_rate": 3.290697674418605e-06, | |
"loss": 0.0518, | |
"step": 1415 | |
}, | |
{ | |
"epoch": 7.14, | |
"learning_rate": 3.3023255813953493e-06, | |
"loss": 0.155, | |
"step": 1420 | |
}, | |
{ | |
"epoch": 7.16, | |
"learning_rate": 3.313953488372093e-06, | |
"loss": 0.0815, | |
"step": 1425 | |
}, | |
{ | |
"epoch": 7.19, | |
"learning_rate": 3.3255813953488373e-06, | |
"loss": 0.0785, | |
"step": 1430 | |
}, | |
{ | |
"epoch": 7.21, | |
"learning_rate": 3.3372093023255817e-06, | |
"loss": 0.0339, | |
"step": 1435 | |
}, | |
{ | |
"epoch": 7.24, | |
"learning_rate": 3.3488372093023258e-06, | |
"loss": 0.0358, | |
"step": 1440 | |
}, | |
{ | |
"epoch": 7.26, | |
"learning_rate": 3.36046511627907e-06, | |
"loss": 0.0259, | |
"step": 1445 | |
}, | |
{ | |
"epoch": 7.29, | |
"learning_rate": 3.372093023255814e-06, | |
"loss": 0.0391, | |
"step": 1450 | |
}, | |
{ | |
"epoch": 7.31, | |
"learning_rate": 3.3837209302325586e-06, | |
"loss": 0.0233, | |
"step": 1455 | |
}, | |
{ | |
"epoch": 7.34, | |
"learning_rate": 3.3953488372093026e-06, | |
"loss": 0.024, | |
"step": 1460 | |
}, | |
{ | |
"epoch": 7.36, | |
"learning_rate": 3.406976744186047e-06, | |
"loss": 0.0789, | |
"step": 1465 | |
}, | |
{ | |
"epoch": 7.39, | |
"learning_rate": 3.4186046511627906e-06, | |
"loss": 0.0406, | |
"step": 1470 | |
}, | |
{ | |
"epoch": 7.41, | |
"learning_rate": 3.430232558139535e-06, | |
"loss": 0.0524, | |
"step": 1475 | |
}, | |
{ | |
"epoch": 7.44, | |
"learning_rate": 3.4418604651162795e-06, | |
"loss": 0.1186, | |
"step": 1480 | |
}, | |
{ | |
"epoch": 7.46, | |
"learning_rate": 3.4534883720930235e-06, | |
"loss": 0.0434, | |
"step": 1485 | |
}, | |
{ | |
"epoch": 7.49, | |
"learning_rate": 3.465116279069768e-06, | |
"loss": 0.084, | |
"step": 1490 | |
}, | |
{ | |
"epoch": 7.51, | |
"learning_rate": 3.476744186046512e-06, | |
"loss": 0.0475, | |
"step": 1495 | |
}, | |
{ | |
"epoch": 7.54, | |
"learning_rate": 3.4883720930232564e-06, | |
"loss": 0.1015, | |
"step": 1500 | |
}, | |
{ | |
"epoch": 7.56, | |
"learning_rate": 3.5e-06, | |
"loss": 0.042, | |
"step": 1505 | |
}, | |
{ | |
"epoch": 7.59, | |
"learning_rate": 3.5116279069767444e-06, | |
"loss": 0.0287, | |
"step": 1510 | |
}, | |
{ | |
"epoch": 7.61, | |
"learning_rate": 3.523255813953489e-06, | |
"loss": 0.064, | |
"step": 1515 | |
}, | |
{ | |
"epoch": 7.64, | |
"learning_rate": 3.534883720930233e-06, | |
"loss": 0.095, | |
"step": 1520 | |
}, | |
{ | |
"epoch": 7.66, | |
"learning_rate": 3.5465116279069772e-06, | |
"loss": 0.0677, | |
"step": 1525 | |
}, | |
{ | |
"epoch": 7.69, | |
"learning_rate": 3.5581395348837212e-06, | |
"loss": 0.05, | |
"step": 1530 | |
}, | |
{ | |
"epoch": 7.71, | |
"learning_rate": 3.5697674418604657e-06, | |
"loss": 0.0435, | |
"step": 1535 | |
}, | |
{ | |
"epoch": 7.74, | |
"learning_rate": 3.5813953488372093e-06, | |
"loss": 0.1378, | |
"step": 1540 | |
}, | |
{ | |
"epoch": 7.76, | |
"learning_rate": 3.5930232558139537e-06, | |
"loss": 0.1013, | |
"step": 1545 | |
}, | |
{ | |
"epoch": 7.79, | |
"learning_rate": 3.6046511627906977e-06, | |
"loss": 0.0508, | |
"step": 1550 | |
}, | |
{ | |
"epoch": 7.81, | |
"learning_rate": 3.616279069767442e-06, | |
"loss": 0.0078, | |
"step": 1555 | |
}, | |
{ | |
"epoch": 7.84, | |
"learning_rate": 3.6279069767441866e-06, | |
"loss": 0.0256, | |
"step": 1560 | |
}, | |
{ | |
"epoch": 7.86, | |
"learning_rate": 3.6395348837209306e-06, | |
"loss": 0.0381, | |
"step": 1565 | |
}, | |
{ | |
"epoch": 7.89, | |
"learning_rate": 3.651162790697675e-06, | |
"loss": 0.0318, | |
"step": 1570 | |
}, | |
{ | |
"epoch": 7.91, | |
"learning_rate": 3.6627906976744186e-06, | |
"loss": 0.1263, | |
"step": 1575 | |
}, | |
{ | |
"epoch": 7.94, | |
"learning_rate": 3.674418604651163e-06, | |
"loss": 0.0309, | |
"step": 1580 | |
}, | |
{ | |
"epoch": 7.96, | |
"learning_rate": 3.686046511627907e-06, | |
"loss": 0.0699, | |
"step": 1585 | |
}, | |
{ | |
"epoch": 7.99, | |
"learning_rate": 3.6976744186046514e-06, | |
"loss": 0.0876, | |
"step": 1590 | |
}, | |
{ | |
"epoch": 8.0, | |
"eval_accuracy": 0.9877416313059877, | |
"eval_loss": 0.0289468877017498, | |
"eval_runtime": 90.5383, | |
"eval_samples_per_second": 23.427, | |
"eval_steps_per_second": 1.469, | |
"step": 1592 | |
}, | |
{ | |
"epoch": 8.02, | |
"learning_rate": 3.709302325581396e-06, | |
"loss": 0.0537, | |
"step": 1595 | |
}, | |
{ | |
"epoch": 8.04, | |
"learning_rate": 3.72093023255814e-06, | |
"loss": 0.0633, | |
"step": 1600 | |
}, | |
{ | |
"epoch": 8.07, | |
"learning_rate": 3.7325581395348843e-06, | |
"loss": 0.0803, | |
"step": 1605 | |
}, | |
{ | |
"epoch": 8.09, | |
"learning_rate": 3.744186046511628e-06, | |
"loss": 0.0594, | |
"step": 1610 | |
}, | |
{ | |
"epoch": 8.12, | |
"learning_rate": 3.7558139534883727e-06, | |
"loss": 0.0894, | |
"step": 1615 | |
}, | |
{ | |
"epoch": 8.14, | |
"learning_rate": 3.7674418604651163e-06, | |
"loss": 0.0336, | |
"step": 1620 | |
}, | |
{ | |
"epoch": 8.17, | |
"learning_rate": 3.7790697674418607e-06, | |
"loss": 0.0151, | |
"step": 1625 | |
}, | |
{ | |
"epoch": 8.19, | |
"learning_rate": 3.7906976744186048e-06, | |
"loss": 0.0575, | |
"step": 1630 | |
}, | |
{ | |
"epoch": 8.22, | |
"learning_rate": 3.802325581395349e-06, | |
"loss": 0.0224, | |
"step": 1635 | |
}, | |
{ | |
"epoch": 8.24, | |
"learning_rate": 3.8139534883720936e-06, | |
"loss": 0.0561, | |
"step": 1640 | |
}, | |
{ | |
"epoch": 8.27, | |
"learning_rate": 3.825581395348838e-06, | |
"loss": 0.0333, | |
"step": 1645 | |
}, | |
{ | |
"epoch": 8.29, | |
"learning_rate": 3.837209302325582e-06, | |
"loss": 0.0881, | |
"step": 1650 | |
}, | |
{ | |
"epoch": 8.32, | |
"learning_rate": 3.848837209302326e-06, | |
"loss": 0.0562, | |
"step": 1655 | |
}, | |
{ | |
"epoch": 8.34, | |
"learning_rate": 3.86046511627907e-06, | |
"loss": 0.0381, | |
"step": 1660 | |
}, | |
{ | |
"epoch": 8.37, | |
"learning_rate": 3.872093023255814e-06, | |
"loss": 0.0172, | |
"step": 1665 | |
}, | |
{ | |
"epoch": 8.39, | |
"learning_rate": 3.883720930232558e-06, | |
"loss": 0.0262, | |
"step": 1670 | |
}, | |
{ | |
"epoch": 8.42, | |
"learning_rate": 3.8953488372093025e-06, | |
"loss": 0.1236, | |
"step": 1675 | |
}, | |
{ | |
"epoch": 8.44, | |
"learning_rate": 3.906976744186047e-06, | |
"loss": 0.0794, | |
"step": 1680 | |
}, | |
{ | |
"epoch": 8.47, | |
"learning_rate": 3.918604651162791e-06, | |
"loss": 0.0319, | |
"step": 1685 | |
}, | |
{ | |
"epoch": 8.49, | |
"learning_rate": 3.930232558139535e-06, | |
"loss": 0.0114, | |
"step": 1690 | |
}, | |
{ | |
"epoch": 8.52, | |
"learning_rate": 3.941860465116279e-06, | |
"loss": 0.0872, | |
"step": 1695 | |
}, | |
{ | |
"epoch": 8.54, | |
"learning_rate": 3.953488372093024e-06, | |
"loss": 0.0592, | |
"step": 1700 | |
}, | |
{ | |
"epoch": 8.57, | |
"learning_rate": 3.965116279069768e-06, | |
"loss": 0.0436, | |
"step": 1705 | |
}, | |
{ | |
"epoch": 8.59, | |
"learning_rate": 3.976744186046512e-06, | |
"loss": 0.0328, | |
"step": 1710 | |
}, | |
{ | |
"epoch": 8.62, | |
"learning_rate": 3.988372093023256e-06, | |
"loss": 0.0602, | |
"step": 1715 | |
}, | |
{ | |
"epoch": 8.64, | |
"learning_rate": 4.000000000000001e-06, | |
"loss": 0.0482, | |
"step": 1720 | |
}, | |
{ | |
"epoch": 8.67, | |
"learning_rate": 4.011627906976744e-06, | |
"loss": 0.012, | |
"step": 1725 | |
}, | |
{ | |
"epoch": 8.69, | |
"learning_rate": 4.023255813953489e-06, | |
"loss": 0.0723, | |
"step": 1730 | |
}, | |
{ | |
"epoch": 8.72, | |
"learning_rate": 4.034883720930233e-06, | |
"loss": 0.0854, | |
"step": 1735 | |
}, | |
{ | |
"epoch": 8.74, | |
"learning_rate": 4.0465116279069775e-06, | |
"loss": 0.0185, | |
"step": 1740 | |
}, | |
{ | |
"epoch": 8.77, | |
"learning_rate": 4.058139534883721e-06, | |
"loss": 0.0684, | |
"step": 1745 | |
}, | |
{ | |
"epoch": 8.79, | |
"learning_rate": 4.0697674418604655e-06, | |
"loss": 0.0528, | |
"step": 1750 | |
}, | |
{ | |
"epoch": 8.82, | |
"learning_rate": 4.08139534883721e-06, | |
"loss": 0.0708, | |
"step": 1755 | |
}, | |
{ | |
"epoch": 8.84, | |
"learning_rate": 4.0930232558139536e-06, | |
"loss": 0.0283, | |
"step": 1760 | |
}, | |
{ | |
"epoch": 8.87, | |
"learning_rate": 4.104651162790698e-06, | |
"loss": 0.0126, | |
"step": 1765 | |
}, | |
{ | |
"epoch": 8.89, | |
"learning_rate": 4.116279069767442e-06, | |
"loss": 0.1266, | |
"step": 1770 | |
}, | |
{ | |
"epoch": 8.92, | |
"learning_rate": 4.127906976744187e-06, | |
"loss": 0.138, | |
"step": 1775 | |
}, | |
{ | |
"epoch": 8.94, | |
"learning_rate": 4.1395348837209304e-06, | |
"loss": 0.1034, | |
"step": 1780 | |
}, | |
{ | |
"epoch": 8.97, | |
"learning_rate": 4.151162790697675e-06, | |
"loss": 0.0554, | |
"step": 1785 | |
}, | |
{ | |
"epoch": 8.99, | |
"learning_rate": 4.1627906976744184e-06, | |
"loss": 0.1105, | |
"step": 1790 | |
}, | |
{ | |
"epoch": 9.0, | |
"eval_accuracy": 0.9820839226779821, | |
"eval_loss": 0.05802316591143608, | |
"eval_runtime": 90.801, | |
"eval_samples_per_second": 23.359, | |
"eval_steps_per_second": 1.465, | |
"step": 1791 | |
}, | |
{ | |
"epoch": 9.02, | |
"learning_rate": 4.174418604651163e-06, | |
"loss": 0.0654, | |
"step": 1795 | |
}, | |
{ | |
"epoch": 9.05, | |
"learning_rate": 4.186046511627907e-06, | |
"loss": 0.0111, | |
"step": 1800 | |
}, | |
{ | |
"epoch": 9.07, | |
"learning_rate": 4.197674418604652e-06, | |
"loss": 0.0222, | |
"step": 1805 | |
}, | |
{ | |
"epoch": 9.1, | |
"learning_rate": 4.209302325581396e-06, | |
"loss": 0.0517, | |
"step": 1810 | |
}, | |
{ | |
"epoch": 9.12, | |
"learning_rate": 4.22093023255814e-06, | |
"loss": 0.0376, | |
"step": 1815 | |
}, | |
{ | |
"epoch": 9.15, | |
"learning_rate": 4.232558139534884e-06, | |
"loss": 0.0881, | |
"step": 1820 | |
}, | |
{ | |
"epoch": 9.17, | |
"learning_rate": 4.244186046511628e-06, | |
"loss": 0.0317, | |
"step": 1825 | |
}, | |
{ | |
"epoch": 9.2, | |
"learning_rate": 4.255813953488372e-06, | |
"loss": 0.0075, | |
"step": 1830 | |
}, | |
{ | |
"epoch": 9.22, | |
"learning_rate": 4.267441860465117e-06, | |
"loss": 0.0807, | |
"step": 1835 | |
}, | |
{ | |
"epoch": 9.25, | |
"learning_rate": 4.279069767441861e-06, | |
"loss": 0.0307, | |
"step": 1840 | |
}, | |
{ | |
"epoch": 9.27, | |
"learning_rate": 4.2906976744186055e-06, | |
"loss": 0.0826, | |
"step": 1845 | |
}, | |
{ | |
"epoch": 9.3, | |
"learning_rate": 4.302325581395349e-06, | |
"loss": 0.0526, | |
"step": 1850 | |
}, | |
{ | |
"epoch": 9.32, | |
"learning_rate": 4.3139534883720935e-06, | |
"loss": 0.0748, | |
"step": 1855 | |
}, | |
{ | |
"epoch": 9.35, | |
"learning_rate": 4.325581395348837e-06, | |
"loss": 0.02, | |
"step": 1860 | |
}, | |
{ | |
"epoch": 9.37, | |
"learning_rate": 4.3372093023255815e-06, | |
"loss": 0.0865, | |
"step": 1865 | |
}, | |
{ | |
"epoch": 9.4, | |
"learning_rate": 4.348837209302326e-06, | |
"loss": 0.0962, | |
"step": 1870 | |
}, | |
{ | |
"epoch": 9.42, | |
"learning_rate": 4.36046511627907e-06, | |
"loss": 0.0448, | |
"step": 1875 | |
}, | |
{ | |
"epoch": 9.45, | |
"learning_rate": 4.372093023255815e-06, | |
"loss": 0.0414, | |
"step": 1880 | |
}, | |
{ | |
"epoch": 9.47, | |
"learning_rate": 4.383720930232558e-06, | |
"loss": 0.0367, | |
"step": 1885 | |
}, | |
{ | |
"epoch": 9.5, | |
"learning_rate": 4.395348837209303e-06, | |
"loss": 0.018, | |
"step": 1890 | |
}, | |
{ | |
"epoch": 9.52, | |
"learning_rate": 4.406976744186046e-06, | |
"loss": 0.0238, | |
"step": 1895 | |
}, | |
{ | |
"epoch": 9.55, | |
"learning_rate": 4.418604651162791e-06, | |
"loss": 0.0449, | |
"step": 1900 | |
}, | |
{ | |
"epoch": 9.57, | |
"learning_rate": 4.430232558139535e-06, | |
"loss": 0.0461, | |
"step": 1905 | |
}, | |
{ | |
"epoch": 9.6, | |
"learning_rate": 4.44186046511628e-06, | |
"loss": 0.0269, | |
"step": 1910 | |
}, | |
{ | |
"epoch": 9.62, | |
"learning_rate": 4.453488372093024e-06, | |
"loss": 0.1015, | |
"step": 1915 | |
}, | |
{ | |
"epoch": 9.65, | |
"learning_rate": 4.465116279069768e-06, | |
"loss": 0.0235, | |
"step": 1920 | |
}, | |
{ | |
"epoch": 9.67, | |
"learning_rate": 4.476744186046512e-06, | |
"loss": 0.0445, | |
"step": 1925 | |
}, | |
{ | |
"epoch": 9.7, | |
"learning_rate": 4.488372093023256e-06, | |
"loss": 0.0707, | |
"step": 1930 | |
}, | |
{ | |
"epoch": 9.72, | |
"learning_rate": 4.5e-06, | |
"loss": 0.0562, | |
"step": 1935 | |
}, | |
{ | |
"epoch": 9.75, | |
"learning_rate": 4.5116279069767445e-06, | |
"loss": 0.0444, | |
"step": 1940 | |
}, | |
{ | |
"epoch": 9.77, | |
"learning_rate": 4.523255813953489e-06, | |
"loss": 0.0733, | |
"step": 1945 | |
}, | |
{ | |
"epoch": 9.8, | |
"learning_rate": 4.5348837209302326e-06, | |
"loss": 0.041, | |
"step": 1950 | |
}, | |
{ | |
"epoch": 9.82, | |
"learning_rate": 4.546511627906977e-06, | |
"loss": 0.0205, | |
"step": 1955 | |
}, | |
{ | |
"epoch": 9.85, | |
"learning_rate": 4.558139534883721e-06, | |
"loss": 0.067, | |
"step": 1960 | |
}, | |
{ | |
"epoch": 9.87, | |
"learning_rate": 4.569767441860465e-06, | |
"loss": 0.0548, | |
"step": 1965 | |
}, | |
{ | |
"epoch": 9.9, | |
"learning_rate": 4.5813953488372094e-06, | |
"loss": 0.0704, | |
"step": 1970 | |
}, | |
{ | |
"epoch": 9.92, | |
"learning_rate": 4.593023255813954e-06, | |
"loss": 0.0203, | |
"step": 1975 | |
}, | |
{ | |
"epoch": 9.95, | |
"learning_rate": 4.604651162790698e-06, | |
"loss": 0.0189, | |
"step": 1980 | |
}, | |
{ | |
"epoch": 9.97, | |
"learning_rate": 4.616279069767442e-06, | |
"loss": 0.0342, | |
"step": 1985 | |
}, | |
{ | |
"epoch": 10.0, | |
"learning_rate": 4.627906976744186e-06, | |
"loss": 0.0289, | |
"step": 1990 | |
}, | |
{ | |
"epoch": 10.0, | |
"eval_accuracy": 0.9924563884959925, | |
"eval_loss": 0.029388291761279106, | |
"eval_runtime": 90.9871, | |
"eval_samples_per_second": 23.311, | |
"eval_steps_per_second": 1.462, | |
"step": 1990 | |
}, | |
{ | |
"epoch": 10.03, | |
"learning_rate": 4.639534883720931e-06, | |
"loss": 0.0206, | |
"step": 1995 | |
}, | |
{ | |
"epoch": 10.05, | |
"learning_rate": 4.651162790697675e-06, | |
"loss": 0.0817, | |
"step": 2000 | |
}, | |
{ | |
"epoch": 10.08, | |
"learning_rate": 4.66279069767442e-06, | |
"loss": 0.0143, | |
"step": 2005 | |
}, | |
{ | |
"epoch": 10.1, | |
"learning_rate": 4.674418604651163e-06, | |
"loss": 0.0036, | |
"step": 2010 | |
}, | |
{ | |
"epoch": 10.13, | |
"learning_rate": 4.686046511627908e-06, | |
"loss": 0.0335, | |
"step": 2015 | |
}, | |
{ | |
"epoch": 10.15, | |
"learning_rate": 4.697674418604651e-06, | |
"loss": 0.0304, | |
"step": 2020 | |
}, | |
{ | |
"epoch": 10.18, | |
"learning_rate": 4.709302325581396e-06, | |
"loss": 0.099, | |
"step": 2025 | |
}, | |
{ | |
"epoch": 10.2, | |
"learning_rate": 4.72093023255814e-06, | |
"loss": 0.037, | |
"step": 2030 | |
}, | |
{ | |
"epoch": 10.23, | |
"learning_rate": 4.7325581395348845e-06, | |
"loss": 0.0534, | |
"step": 2035 | |
}, | |
{ | |
"epoch": 10.25, | |
"learning_rate": 4.744186046511629e-06, | |
"loss": 0.0079, | |
"step": 2040 | |
}, | |
{ | |
"epoch": 10.28, | |
"learning_rate": 4.7558139534883725e-06, | |
"loss": 0.0551, | |
"step": 2045 | |
}, | |
{ | |
"epoch": 10.3, | |
"learning_rate": 4.767441860465117e-06, | |
"loss": 0.0175, | |
"step": 2050 | |
}, | |
{ | |
"epoch": 10.33, | |
"learning_rate": 4.7790697674418605e-06, | |
"loss": 0.0048, | |
"step": 2055 | |
}, | |
{ | |
"epoch": 10.35, | |
"learning_rate": 4.790697674418605e-06, | |
"loss": 0.0216, | |
"step": 2060 | |
}, | |
{ | |
"epoch": 10.38, | |
"learning_rate": 4.802325581395349e-06, | |
"loss": 0.0821, | |
"step": 2065 | |
}, | |
{ | |
"epoch": 10.4, | |
"learning_rate": 4.813953488372094e-06, | |
"loss": 0.0045, | |
"step": 2070 | |
}, | |
{ | |
"epoch": 10.43, | |
"learning_rate": 4.825581395348838e-06, | |
"loss": 0.0971, | |
"step": 2075 | |
}, | |
{ | |
"epoch": 10.45, | |
"learning_rate": 4.837209302325582e-06, | |
"loss": 0.0088, | |
"step": 2080 | |
}, | |
{ | |
"epoch": 10.48, | |
"learning_rate": 4.848837209302326e-06, | |
"loss": 0.0364, | |
"step": 2085 | |
}, | |
{ | |
"epoch": 10.5, | |
"learning_rate": 4.86046511627907e-06, | |
"loss": 0.0391, | |
"step": 2090 | |
}, | |
{ | |
"epoch": 10.53, | |
"learning_rate": 4.872093023255814e-06, | |
"loss": 0.0233, | |
"step": 2095 | |
}, | |
{ | |
"epoch": 10.55, | |
"learning_rate": 4.883720930232559e-06, | |
"loss": 0.0171, | |
"step": 2100 | |
}, | |
{ | |
"epoch": 10.58, | |
"learning_rate": 4.895348837209303e-06, | |
"loss": 0.0406, | |
"step": 2105 | |
}, | |
{ | |
"epoch": 10.6, | |
"learning_rate": 4.906976744186047e-06, | |
"loss": 0.0124, | |
"step": 2110 | |
}, | |
{ | |
"epoch": 10.63, | |
"learning_rate": 4.918604651162791e-06, | |
"loss": 0.042, | |
"step": 2115 | |
}, | |
{ | |
"epoch": 10.65, | |
"learning_rate": 4.9302325581395355e-06, | |
"loss": 0.0625, | |
"step": 2120 | |
}, | |
{ | |
"epoch": 10.68, | |
"learning_rate": 4.941860465116279e-06, | |
"loss": 0.0485, | |
"step": 2125 | |
}, | |
{ | |
"epoch": 10.7, | |
"learning_rate": 4.9534883720930235e-06, | |
"loss": 0.0038, | |
"step": 2130 | |
}, | |
{ | |
"epoch": 10.73, | |
"learning_rate": 4.965116279069768e-06, | |
"loss": 0.0635, | |
"step": 2135 | |
}, | |
{ | |
"epoch": 10.75, | |
"learning_rate": 4.976744186046512e-06, | |
"loss": 0.0032, | |
"step": 2140 | |
}, | |
{ | |
"epoch": 10.78, | |
"learning_rate": 4.988372093023256e-06, | |
"loss": 0.0495, | |
"step": 2145 | |
}, | |
{ | |
"epoch": 10.8, | |
"learning_rate": 5e-06, | |
"loss": 0.08, | |
"step": 2150 | |
}, | |
{ | |
"epoch": 10.83, | |
"learning_rate": 4.894957983193277e-06, | |
"loss": 0.0515, | |
"step": 2155 | |
}, | |
{ | |
"epoch": 10.85, | |
"learning_rate": 4.7899159663865555e-06, | |
"loss": 0.0506, | |
"step": 2160 | |
}, | |
{ | |
"epoch": 10.88, | |
"learning_rate": 4.684873949579832e-06, | |
"loss": 0.0511, | |
"step": 2165 | |
}, | |
{ | |
"epoch": 10.9, | |
"learning_rate": 4.57983193277311e-06, | |
"loss": 0.0343, | |
"step": 2170 | |
}, | |
{ | |
"epoch": 10.93, | |
"learning_rate": 4.474789915966387e-06, | |
"loss": 0.129, | |
"step": 2175 | |
}, | |
{ | |
"epoch": 10.95, | |
"learning_rate": 4.369747899159664e-06, | |
"loss": 0.0301, | |
"step": 2180 | |
}, | |
{ | |
"epoch": 10.98, | |
"learning_rate": 4.264705882352942e-06, | |
"loss": 0.0594, | |
"step": 2185 | |
}, | |
{ | |
"epoch": 11.0, | |
"eval_accuracy": 0.9905704856199906, | |
"eval_loss": 0.03312527760863304, | |
"eval_runtime": 90.7854, | |
"eval_samples_per_second": 23.363, | |
"eval_steps_per_second": 1.465, | |
"step": 2189 | |
}, | |
{ | |
"epoch": 11.01, | |
"learning_rate": 4.159663865546219e-06, | |
"loss": 0.0181, | |
"step": 2190 | |
}, | |
{ | |
"epoch": 11.03, | |
"learning_rate": 4.054621848739496e-06, | |
"loss": 0.0074, | |
"step": 2195 | |
}, | |
{ | |
"epoch": 11.06, | |
"learning_rate": 3.9495798319327735e-06, | |
"loss": 0.0418, | |
"step": 2200 | |
}, | |
{ | |
"epoch": 11.08, | |
"learning_rate": 3.844537815126051e-06, | |
"loss": 0.0897, | |
"step": 2205 | |
}, | |
{ | |
"epoch": 11.11, | |
"learning_rate": 3.739495798319328e-06, | |
"loss": 0.0387, | |
"step": 2210 | |
}, | |
{ | |
"epoch": 11.13, | |
"learning_rate": 3.6344537815126053e-06, | |
"loss": 0.0034, | |
"step": 2215 | |
}, | |
{ | |
"epoch": 11.16, | |
"learning_rate": 3.529411764705883e-06, | |
"loss": 0.0177, | |
"step": 2220 | |
}, | |
{ | |
"epoch": 11.18, | |
"learning_rate": 3.42436974789916e-06, | |
"loss": 0.0186, | |
"step": 2225 | |
}, | |
{ | |
"epoch": 11.21, | |
"learning_rate": 3.319327731092437e-06, | |
"loss": 0.0361, | |
"step": 2230 | |
}, | |
{ | |
"epoch": 11.23, | |
"learning_rate": 3.2142857142857147e-06, | |
"loss": 0.0547, | |
"step": 2235 | |
}, | |
{ | |
"epoch": 11.26, | |
"learning_rate": 3.109243697478992e-06, | |
"loss": 0.1177, | |
"step": 2240 | |
}, | |
{ | |
"epoch": 11.28, | |
"learning_rate": 3.004201680672269e-06, | |
"loss": 0.0343, | |
"step": 2245 | |
}, | |
{ | |
"epoch": 11.31, | |
"learning_rate": 2.8991596638655466e-06, | |
"loss": 0.0807, | |
"step": 2250 | |
}, | |
{ | |
"epoch": 11.33, | |
"learning_rate": 2.7941176470588237e-06, | |
"loss": 0.0048, | |
"step": 2255 | |
}, | |
{ | |
"epoch": 11.36, | |
"learning_rate": 2.689075630252101e-06, | |
"loss": 0.0128, | |
"step": 2260 | |
}, | |
{ | |
"epoch": 11.38, | |
"learning_rate": 2.5840336134453784e-06, | |
"loss": 0.0142, | |
"step": 2265 | |
}, | |
{ | |
"epoch": 11.41, | |
"learning_rate": 2.4789915966386555e-06, | |
"loss": 0.0401, | |
"step": 2270 | |
}, | |
{ | |
"epoch": 11.43, | |
"learning_rate": 2.3739495798319327e-06, | |
"loss": 0.002, | |
"step": 2275 | |
}, | |
{ | |
"epoch": 11.46, | |
"learning_rate": 2.2689075630252102e-06, | |
"loss": 0.0072, | |
"step": 2280 | |
}, | |
{ | |
"epoch": 11.48, | |
"learning_rate": 2.163865546218488e-06, | |
"loss": 0.0341, | |
"step": 2285 | |
}, | |
{ | |
"epoch": 11.51, | |
"learning_rate": 2.058823529411765e-06, | |
"loss": 0.0059, | |
"step": 2290 | |
}, | |
{ | |
"epoch": 11.53, | |
"learning_rate": 1.953781512605042e-06, | |
"loss": 0.0588, | |
"step": 2295 | |
}, | |
{ | |
"epoch": 11.56, | |
"learning_rate": 1.8487394957983196e-06, | |
"loss": 0.0601, | |
"step": 2300 | |
}, | |
{ | |
"epoch": 11.58, | |
"learning_rate": 1.7436974789915968e-06, | |
"loss": 0.0146, | |
"step": 2305 | |
}, | |
{ | |
"epoch": 11.61, | |
"learning_rate": 1.6386554621848741e-06, | |
"loss": 0.0052, | |
"step": 2310 | |
}, | |
{ | |
"epoch": 11.63, | |
"learning_rate": 1.5336134453781515e-06, | |
"loss": 0.0437, | |
"step": 2315 | |
}, | |
{ | |
"epoch": 11.66, | |
"learning_rate": 1.4285714285714286e-06, | |
"loss": 0.0075, | |
"step": 2320 | |
}, | |
{ | |
"epoch": 11.68, | |
"learning_rate": 1.323529411764706e-06, | |
"loss": 0.0561, | |
"step": 2325 | |
}, | |
{ | |
"epoch": 11.71, | |
"learning_rate": 1.2184873949579833e-06, | |
"loss": 0.0718, | |
"step": 2330 | |
}, | |
{ | |
"epoch": 11.73, | |
"learning_rate": 1.1134453781512607e-06, | |
"loss": 0.0006, | |
"step": 2335 | |
}, | |
{ | |
"epoch": 11.76, | |
"learning_rate": 1.0084033613445378e-06, | |
"loss": 0.0184, | |
"step": 2340 | |
}, | |
{ | |
"epoch": 11.78, | |
"learning_rate": 9.033613445378152e-07, | |
"loss": 0.0341, | |
"step": 2345 | |
}, | |
{ | |
"epoch": 11.81, | |
"learning_rate": 7.983193277310924e-07, | |
"loss": 0.0058, | |
"step": 2350 | |
}, | |
{ | |
"epoch": 11.83, | |
"learning_rate": 6.932773109243699e-07, | |
"loss": 0.0086, | |
"step": 2355 | |
}, | |
{ | |
"epoch": 11.86, | |
"learning_rate": 5.882352941176471e-07, | |
"loss": 0.0563, | |
"step": 2360 | |
}, | |
{ | |
"epoch": 11.88, | |
"learning_rate": 4.831932773109245e-07, | |
"loss": 0.0036, | |
"step": 2365 | |
}, | |
{ | |
"epoch": 11.91, | |
"learning_rate": 3.781512605042017e-07, | |
"loss": 0.018, | |
"step": 2370 | |
}, | |
{ | |
"epoch": 11.93, | |
"learning_rate": 2.73109243697479e-07, | |
"loss": 0.0058, | |
"step": 2375 | |
}, | |
{ | |
"epoch": 11.96, | |
"learning_rate": 1.680672268907563e-07, | |
"loss": 0.0008, | |
"step": 2380 | |
}, | |
{ | |
"epoch": 11.98, | |
"learning_rate": 6.302521008403361e-08, | |
"loss": 0.0011, | |
"step": 2385 | |
}, | |
{ | |
"epoch": 12.0, | |
"eval_accuracy": 0.9938708156529938, | |
"eval_loss": 0.0275154709815979, | |
"eval_runtime": 90.8878, | |
"eval_samples_per_second": 23.336, | |
"eval_steps_per_second": 1.463, | |
"step": 2388 | |
}, | |
{ | |
"epoch": 12.0, | |
"step": 2388, | |
"total_flos": 2.0923685902916665e+19, | |
"train_loss": 0.1486589150943784, | |
"train_runtime": 11149.9067, | |
"train_samples_per_second": 6.848, | |
"train_steps_per_second": 0.214 | |
} | |
], | |
"max_steps": 2388, | |
"num_train_epochs": 12, | |
"total_flos": 2.0923685902916665e+19, | |
"trial_name": null, | |
"trial_params": null | |
} | |