|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.4857444561774025, |
|
"eval_steps": 60, |
|
"global_step": 1480, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 26.0, |
|
"learning_rate": 6.666666666666667e-08, |
|
"loss": 2.5036, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"eval_loss": 2.734731435775757, |
|
"eval_runtime": 73.3418, |
|
"eval_samples_per_second": 8.849, |
|
"eval_steps_per_second": 8.849, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 25.25, |
|
"learning_rate": 1.3333333333333334e-07, |
|
"loss": 2.8732, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 23.0, |
|
"learning_rate": 2e-07, |
|
"loss": 2.6451, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 26.375, |
|
"learning_rate": 2.6666666666666667e-07, |
|
"loss": 2.6047, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 23.0, |
|
"learning_rate": 3.333333333333333e-07, |
|
"loss": 2.7992, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 24.75, |
|
"learning_rate": 4e-07, |
|
"loss": 2.7625, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 25.5, |
|
"learning_rate": 4.6666666666666666e-07, |
|
"loss": 2.5595, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 29.25, |
|
"learning_rate": 5.333333333333333e-07, |
|
"loss": 2.8246, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 19.75, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 2.4177, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 23.375, |
|
"learning_rate": 6.666666666666666e-07, |
|
"loss": 2.5124, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 26.375, |
|
"learning_rate": 7.333333333333333e-07, |
|
"loss": 2.4839, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 23.75, |
|
"learning_rate": 8e-07, |
|
"loss": 2.7422, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 26.5, |
|
"learning_rate": 8.666666666666666e-07, |
|
"loss": 2.7973, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 25.0, |
|
"learning_rate": 9.333333333333333e-07, |
|
"loss": 2.914, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 23.75, |
|
"learning_rate": 1e-06, |
|
"loss": 2.9031, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 23.125, |
|
"learning_rate": 1.0666666666666667e-06, |
|
"loss": 2.6065, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 22.75, |
|
"learning_rate": 1.1333333333333334e-06, |
|
"loss": 2.7575, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 20.375, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 2.8768, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 17.375, |
|
"learning_rate": 1.2666666666666667e-06, |
|
"loss": 2.7704, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 15.25, |
|
"learning_rate": 1.3333333333333332e-06, |
|
"loss": 2.5928, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 15.625, |
|
"learning_rate": 1.4000000000000001e-06, |
|
"loss": 2.5997, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 18.25, |
|
"learning_rate": 1.4666666666666667e-06, |
|
"loss": 2.605, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 14.4375, |
|
"learning_rate": 1.5333333333333332e-06, |
|
"loss": 2.7531, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 14.625, |
|
"learning_rate": 1.6e-06, |
|
"loss": 2.6625, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 9.1875, |
|
"learning_rate": 1.6666666666666669e-06, |
|
"loss": 2.5257, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 11.875, |
|
"learning_rate": 1.7333333333333332e-06, |
|
"loss": 2.5876, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 9.0, |
|
"learning_rate": 1.8e-06, |
|
"loss": 2.473, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 8.9375, |
|
"learning_rate": 1.8666666666666667e-06, |
|
"loss": 2.2859, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 8.5625, |
|
"learning_rate": 1.9333333333333336e-06, |
|
"loss": 2.6034, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 8.6875, |
|
"learning_rate": 2e-06, |
|
"loss": 2.745, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 8.125, |
|
"learning_rate": 2.0666666666666666e-06, |
|
"loss": 2.597, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 5.125, |
|
"learning_rate": 2.1333333333333334e-06, |
|
"loss": 2.4495, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 5.75, |
|
"learning_rate": 2.1999999999999997e-06, |
|
"loss": 2.5442, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 7.09375, |
|
"learning_rate": 2.266666666666667e-06, |
|
"loss": 2.5841, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.46875, |
|
"learning_rate": 2.3333333333333336e-06, |
|
"loss": 2.3175, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 5.03125, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 2.4678, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.96875, |
|
"learning_rate": 2.4666666666666666e-06, |
|
"loss": 2.3823, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.5, |
|
"learning_rate": 2.5333333333333334e-06, |
|
"loss": 2.2722, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 4.78125, |
|
"learning_rate": 2.6e-06, |
|
"loss": 2.6632, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.75, |
|
"learning_rate": 2.6666666666666664e-06, |
|
"loss": 2.3124, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 4.3125, |
|
"learning_rate": 2.733333333333333e-06, |
|
"loss": 2.3315, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.796875, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 2.3679, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.8125, |
|
"learning_rate": 2.866666666666667e-06, |
|
"loss": 2.3584, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.953125, |
|
"learning_rate": 2.9333333333333333e-06, |
|
"loss": 2.1787, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.1875, |
|
"learning_rate": 3e-06, |
|
"loss": 2.4291, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.921875, |
|
"learning_rate": 3.0666666666666664e-06, |
|
"loss": 2.3665, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.28125, |
|
"learning_rate": 3.1333333333333335e-06, |
|
"loss": 2.4683, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.859375, |
|
"learning_rate": 3.2e-06, |
|
"loss": 2.327, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.40625, |
|
"learning_rate": 3.2666666666666666e-06, |
|
"loss": 2.3169, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 3.3333333333333337e-06, |
|
"loss": 2.043, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.0, |
|
"learning_rate": 3.4e-06, |
|
"loss": 2.4445, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.484375, |
|
"learning_rate": 3.4666666666666664e-06, |
|
"loss": 2.6345, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.09375, |
|
"learning_rate": 3.5333333333333335e-06, |
|
"loss": 2.4101, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 3.6e-06, |
|
"loss": 2.3681, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.5, |
|
"learning_rate": 3.666666666666667e-06, |
|
"loss": 2.4041, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.21875, |
|
"learning_rate": 3.7333333333333333e-06, |
|
"loss": 2.5763, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.96875, |
|
"learning_rate": 3.8e-06, |
|
"loss": 2.2547, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.25, |
|
"learning_rate": 3.866666666666667e-06, |
|
"loss": 2.3994, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.9765625, |
|
"learning_rate": 3.9333333333333335e-06, |
|
"loss": 2.3216, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.25, |
|
"learning_rate": 4e-06, |
|
"loss": 2.239, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"eval_loss": 2.207598924636841, |
|
"eval_runtime": 72.558, |
|
"eval_samples_per_second": 8.945, |
|
"eval_steps_per_second": 8.945, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.0, |
|
"learning_rate": 4.066666666666667e-06, |
|
"loss": 2.0404, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.9609375, |
|
"learning_rate": 4.133333333333333e-06, |
|
"loss": 2.0953, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 4.2e-06, |
|
"loss": 2.2906, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 4.266666666666667e-06, |
|
"loss": 2.2262, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.03125, |
|
"learning_rate": 4.333333333333333e-06, |
|
"loss": 2.2607, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.25, |
|
"learning_rate": 4.399999999999999e-06, |
|
"loss": 2.2938, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 4.4666666666666665e-06, |
|
"loss": 2.1453, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 4.533333333333334e-06, |
|
"loss": 2.0149, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 4.600000000000001e-06, |
|
"loss": 2.4088, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.8125, |
|
"learning_rate": 4.666666666666667e-06, |
|
"loss": 2.1434, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.6640625, |
|
"learning_rate": 4.7333333333333335e-06, |
|
"loss": 2.0821, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.84375, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 2.1367, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.375, |
|
"learning_rate": 4.866666666666667e-06, |
|
"loss": 2.3554, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.640625, |
|
"learning_rate": 4.933333333333333e-06, |
|
"loss": 2.1236, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 5e-06, |
|
"loss": 2.2571, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.03125, |
|
"learning_rate": 5.066666666666667e-06, |
|
"loss": 2.3625, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.8515625, |
|
"learning_rate": 5.133333333333333e-06, |
|
"loss": 2.224, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.9609375, |
|
"learning_rate": 5.2e-06, |
|
"loss": 2.2386, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.9296875, |
|
"learning_rate": 5.2666666666666665e-06, |
|
"loss": 2.2665, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 5.333333333333333e-06, |
|
"loss": 1.9822, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.71875, |
|
"learning_rate": 5.4e-06, |
|
"loss": 2.1562, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.9453125, |
|
"learning_rate": 5.466666666666666e-06, |
|
"loss": 2.1552, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.859375, |
|
"learning_rate": 5.5333333333333334e-06, |
|
"loss": 2.0242, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 1.9113, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.5, |
|
"learning_rate": 5.666666666666667e-06, |
|
"loss": 2.0728, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.890625, |
|
"learning_rate": 5.733333333333334e-06, |
|
"loss": 2.1963, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.8828125, |
|
"learning_rate": 5.8e-06, |
|
"loss": 2.3849, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.734375, |
|
"learning_rate": 5.866666666666667e-06, |
|
"loss": 2.0595, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.75, |
|
"learning_rate": 5.933333333333334e-06, |
|
"loss": 2.0931, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.5, |
|
"learning_rate": 6e-06, |
|
"loss": 1.9384, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.7734375, |
|
"learning_rate": 5.999995296023366e-06, |
|
"loss": 2.2192, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.65625, |
|
"learning_rate": 5.999981184109854e-06, |
|
"loss": 2.0251, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 5.999957664308636e-06, |
|
"loss": 1.9813, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.796875, |
|
"learning_rate": 5.999924736701666e-06, |
|
"loss": 2.1143, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.625, |
|
"learning_rate": 5.999882401403676e-06, |
|
"loss": 1.8317, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.25, |
|
"learning_rate": 5.999830658562182e-06, |
|
"loss": 2.2928, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.7265625, |
|
"learning_rate": 5.999769508357479e-06, |
|
"loss": 2.0814, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.6171875, |
|
"learning_rate": 5.999698951002636e-06, |
|
"loss": 1.9344, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 5.99961898674351e-06, |
|
"loss": 1.951, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.8203125, |
|
"learning_rate": 5.999529615858727e-06, |
|
"loss": 2.0772, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 5.999430838659697e-06, |
|
"loss": 2.1198, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.75, |
|
"learning_rate": 5.999322655490598e-06, |
|
"loss": 2.0501, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.671875, |
|
"learning_rate": 5.999205066728389e-06, |
|
"loss": 2.1693, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.703125, |
|
"learning_rate": 5.999078072782799e-06, |
|
"loss": 2.143, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.625, |
|
"learning_rate": 5.9989416740963284e-06, |
|
"loss": 2.2024, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 4.96875, |
|
"learning_rate": 5.998795871144249e-06, |
|
"loss": 2.1681, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.828125, |
|
"learning_rate": 5.9986406644346e-06, |
|
"loss": 2.0761, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 5.998476054508189e-06, |
|
"loss": 2.3697, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.8671875, |
|
"learning_rate": 5.998302041938586e-06, |
|
"loss": 2.2053, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.8671875, |
|
"learning_rate": 5.998118627332126e-06, |
|
"loss": 2.1624, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.7265625, |
|
"learning_rate": 5.997925811327904e-06, |
|
"loss": 2.2316, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.75, |
|
"learning_rate": 5.997723594597772e-06, |
|
"loss": 2.0206, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 5.997511977846342e-06, |
|
"loss": 1.9041, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.7109375, |
|
"learning_rate": 5.997290961810975e-06, |
|
"loss": 1.9657, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.7109375, |
|
"learning_rate": 5.997060547261785e-06, |
|
"loss": 2.0243, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.9296875, |
|
"learning_rate": 5.996820735001635e-06, |
|
"loss": 2.0708, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 5.9965715258661355e-06, |
|
"loss": 2.02, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.6171875, |
|
"learning_rate": 5.996312920723635e-06, |
|
"loss": 2.118, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 5.996044920475225e-06, |
|
"loss": 1.9616, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.703125, |
|
"learning_rate": 5.995767526054733e-06, |
|
"loss": 2.1868, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"eval_loss": 2.064619541168213, |
|
"eval_runtime": 72.5673, |
|
"eval_samples_per_second": 8.943, |
|
"eval_steps_per_second": 8.943, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 5.99548073842872e-06, |
|
"loss": 2.0924, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.84375, |
|
"learning_rate": 5.995184558596476e-06, |
|
"loss": 2.2306, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.6875, |
|
"learning_rate": 5.9948789875900195e-06, |
|
"loss": 2.0417, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.7265625, |
|
"learning_rate": 5.994564026474088e-06, |
|
"loss": 2.1642, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.6875, |
|
"learning_rate": 5.994239676346142e-06, |
|
"loss": 2.253, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.84375, |
|
"learning_rate": 5.9939059383363556e-06, |
|
"loss": 1.9898, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.875, |
|
"learning_rate": 5.993562813607616e-06, |
|
"loss": 2.1938, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 5.993210303355513e-06, |
|
"loss": 2.1159, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 2.125, |
|
"learning_rate": 5.9928484088083454e-06, |
|
"loss": 2.1059, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.609375, |
|
"learning_rate": 5.992477131227108e-06, |
|
"loss": 2.1826, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 3.40625, |
|
"learning_rate": 5.992096471905491e-06, |
|
"loss": 1.8935, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 5.991706432169873e-06, |
|
"loss": 2.1573, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.9453125, |
|
"learning_rate": 5.991307013379319e-06, |
|
"loss": 2.0751, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.640625, |
|
"learning_rate": 5.990898216925575e-06, |
|
"loss": 2.1524, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 2.015625, |
|
"learning_rate": 5.990480044233063e-06, |
|
"loss": 2.0496, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 5.990052496758875e-06, |
|
"loss": 2.0499, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 5.989615575992769e-06, |
|
"loss": 1.8557, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.84375, |
|
"learning_rate": 5.989169283457163e-06, |
|
"loss": 1.9044, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 5.98871362070713e-06, |
|
"loss": 1.9648, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.96875, |
|
"learning_rate": 5.988248589330397e-06, |
|
"loss": 2.1693, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.7265625, |
|
"learning_rate": 5.987774190947329e-06, |
|
"loss": 2.1912, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 5.987290427210933e-06, |
|
"loss": 2.0069, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 3.65625, |
|
"learning_rate": 5.98679729980685e-06, |
|
"loss": 1.9199, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.7734375, |
|
"learning_rate": 5.986294810453345e-06, |
|
"loss": 2.2007, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 5.985782960901305e-06, |
|
"loss": 1.8919, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 5.985261752934234e-06, |
|
"loss": 1.9178, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 5.984731188368243e-06, |
|
"loss": 2.0098, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 5.984191269052045e-06, |
|
"loss": 2.0263, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 5.98364199686695e-06, |
|
"loss": 2.1398, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 5.983083373726857e-06, |
|
"loss": 2.0072, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.9453125, |
|
"learning_rate": 5.982515401578246e-06, |
|
"loss": 2.1237, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.625, |
|
"learning_rate": 5.981938082400176e-06, |
|
"loss": 2.2744, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 5.9813514182042745e-06, |
|
"loss": 1.999, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 5.980755411034728e-06, |
|
"loss": 1.9153, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 5.980150062968282e-06, |
|
"loss": 1.9111, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 5.979535376114227e-06, |
|
"loss": 1.9874, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.8828125, |
|
"learning_rate": 5.978911352614394e-06, |
|
"loss": 2.213, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.7109375, |
|
"learning_rate": 5.9782779946431475e-06, |
|
"loss": 2.0593, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.5, |
|
"learning_rate": 5.9776353044073774e-06, |
|
"loss": 1.9598, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.875, |
|
"learning_rate": 5.9769832841464914e-06, |
|
"loss": 2.3477, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 5.976321936132406e-06, |
|
"loss": 1.8547, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.703125, |
|
"learning_rate": 5.975651262669542e-06, |
|
"loss": 2.2347, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.703125, |
|
"learning_rate": 5.97497126609481e-06, |
|
"loss": 2.0735, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 5.974281948777609e-06, |
|
"loss": 1.8234, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 5.973583313119815e-06, |
|
"loss": 1.9371, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 5.972875361555773e-06, |
|
"loss": 2.1144, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 5.97215809655229e-06, |
|
"loss": 2.0255, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 5.971431520608621e-06, |
|
"loss": 2.0186, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 5.97069563625647e-06, |
|
"loss": 1.933, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 5.9699504460599696e-06, |
|
"loss": 1.9893, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 5.969195952615683e-06, |
|
"loss": 1.9635, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 5.968432158552586e-06, |
|
"loss": 1.7703, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.609375, |
|
"learning_rate": 5.967659066532064e-06, |
|
"loss": 1.9872, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.9296875, |
|
"learning_rate": 5.966876679247899e-06, |
|
"loss": 2.2503, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 5.966084999426265e-06, |
|
"loss": 2.0597, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 5.9652840298257104e-06, |
|
"loss": 1.9385, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 5.9644737732371555e-06, |
|
"loss": 2.0229, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 5.9636542324838804e-06, |
|
"loss": 2.0867, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.6171875, |
|
"learning_rate": 5.962825410421516e-06, |
|
"loss": 2.2203, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.609375, |
|
"learning_rate": 5.9619873099380325e-06, |
|
"loss": 1.8822, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"eval_loss": 2.0232019424438477, |
|
"eval_runtime": 72.5156, |
|
"eval_samples_per_second": 8.95, |
|
"eval_steps_per_second": 8.95, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 5.96113993395373e-06, |
|
"loss": 1.8759, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 5.960283285421226e-06, |
|
"loss": 2.0248, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 5.959417367325453e-06, |
|
"loss": 2.1588, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 5.958542182683637e-06, |
|
"loss": 2.0872, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 5.957657734545297e-06, |
|
"loss": 1.8537, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 5.956764025992228e-06, |
|
"loss": 2.0764, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 5.9558610601384905e-06, |
|
"loss": 2.0138, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.8046875, |
|
"learning_rate": 5.9549488401304055e-06, |
|
"loss": 2.0897, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.5, |
|
"learning_rate": 5.954027369146535e-06, |
|
"loss": 2.0864, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.6640625, |
|
"learning_rate": 5.9530966503976796e-06, |
|
"loss": 2.128, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.734375, |
|
"learning_rate": 5.952156687126859e-06, |
|
"loss": 2.1293, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 5.951207482609308e-06, |
|
"loss": 2.0601, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 5.95024904015246e-06, |
|
"loss": 2.0303, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.8125, |
|
"learning_rate": 5.949281363095939e-06, |
|
"loss": 2.0027, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 5.9483044548115445e-06, |
|
"loss": 2.0815, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 5.947318318703242e-06, |
|
"loss": 2.111, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.734375, |
|
"learning_rate": 5.94632295820715e-06, |
|
"loss": 2.0938, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 5.945318376791531e-06, |
|
"loss": 1.8695, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.375, |
|
"learning_rate": 5.944304577956776e-06, |
|
"loss": 1.947, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 5.9432815652353924e-06, |
|
"loss": 1.9312, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.6953125, |
|
"learning_rate": 5.942249342191993e-06, |
|
"loss": 2.0169, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 5.941207912423285e-06, |
|
"loss": 1.9519, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.71875, |
|
"learning_rate": 5.940157279558055e-06, |
|
"loss": 2.0954, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 2.046875, |
|
"learning_rate": 5.9390974472571535e-06, |
|
"loss": 2.1871, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 5.9380284192134925e-06, |
|
"loss": 1.9921, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.703125, |
|
"learning_rate": 5.936950199152021e-06, |
|
"loss": 2.0208, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 5.935862790829718e-06, |
|
"loss": 1.9512, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 5.934766198035577e-06, |
|
"loss": 1.9911, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 5.933660424590599e-06, |
|
"loss": 1.9075, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.765625, |
|
"learning_rate": 5.932545474347769e-06, |
|
"loss": 2.1189, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 5.931421351192052e-06, |
|
"loss": 2.0672, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.8359375, |
|
"learning_rate": 5.9302880590403685e-06, |
|
"loss": 2.1281, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 5.929145601841597e-06, |
|
"loss": 1.7286, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.7421875, |
|
"learning_rate": 5.927993983576545e-06, |
|
"loss": 2.0305, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.65625, |
|
"learning_rate": 5.926833208257942e-06, |
|
"loss": 2.1612, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 5.925663279930424e-06, |
|
"loss": 2.0265, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 5.924484202670524e-06, |
|
"loss": 1.7555, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.7265625, |
|
"learning_rate": 5.923295980586648e-06, |
|
"loss": 2.053, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.734375, |
|
"learning_rate": 5.922098617819071e-06, |
|
"loss": 2.162, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.7109375, |
|
"learning_rate": 5.920892118539916e-06, |
|
"loss": 2.1272, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.375, |
|
"learning_rate": 5.919676486953141e-06, |
|
"loss": 1.8841, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 5.918451727294526e-06, |
|
"loss": 1.8209, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 5.917217843831657e-06, |
|
"loss": 1.965, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 5.915974840863911e-06, |
|
"loss": 2.0254, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 5.9147227227224415e-06, |
|
"loss": 2.0056, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.609375, |
|
"learning_rate": 5.913461493770162e-06, |
|
"loss": 2.0089, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.703125, |
|
"learning_rate": 5.912191158401734e-06, |
|
"loss": 1.9855, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.625, |
|
"learning_rate": 5.910911721043549e-06, |
|
"loss": 2.051, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 5.90962318615371e-06, |
|
"loss": 1.9122, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.71875, |
|
"learning_rate": 5.908325558222027e-06, |
|
"loss": 2.1777, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 5.907018841769988e-06, |
|
"loss": 1.9074, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 5.905703041350752e-06, |
|
"loss": 1.9642, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 5.904378161549129e-06, |
|
"loss": 1.8773, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.671875, |
|
"learning_rate": 5.903044206981566e-06, |
|
"loss": 2.2381, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.6875, |
|
"learning_rate": 5.901701182296131e-06, |
|
"loss": 1.9551, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 5.900349092172495e-06, |
|
"loss": 1.9436, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 5.898987941321919e-06, |
|
"loss": 1.8453, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 5.897617734487233e-06, |
|
"loss": 1.9952, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 5.896238476442822e-06, |
|
"loss": 2.1063, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 5.894850171994611e-06, |
|
"loss": 1.9618, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"eval_loss": 2.005347967147827, |
|
"eval_runtime": 72.6485, |
|
"eval_samples_per_second": 8.933, |
|
"eval_steps_per_second": 8.933, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 5.893452825980047e-06, |
|
"loss": 1.947, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 5.892046443268078e-06, |
|
"loss": 2.2132, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 5.890631028759143e-06, |
|
"loss": 2.0863, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 5.8892065873851515e-06, |
|
"loss": 2.0921, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 5.887773124109465e-06, |
|
"loss": 1.7609, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 5.886330643926881e-06, |
|
"loss": 2.0622, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 5.884879151863618e-06, |
|
"loss": 2.05, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 5.8834186529772945e-06, |
|
"loss": 1.9644, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 5.881949152356911e-06, |
|
"loss": 1.912, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 5.880470655122837e-06, |
|
"loss": 1.9836, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.625, |
|
"learning_rate": 5.878983166426787e-06, |
|
"loss": 2.1238, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 5.8774866914518075e-06, |
|
"loss": 1.9278, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.65625, |
|
"learning_rate": 5.875981235412256e-06, |
|
"loss": 1.9561, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 5.874466803553785e-06, |
|
"loss": 1.9926, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 5.8729434011533235e-06, |
|
"loss": 1.9142, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 5.871411033519052e-06, |
|
"loss": 1.964, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.625, |
|
"learning_rate": 5.8698697059903975e-06, |
|
"loss": 1.8374, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 5.868319423938003e-06, |
|
"loss": 2.068, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 5.866760192763712e-06, |
|
"loss": 2.1991, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 5.865192017900552e-06, |
|
"loss": 2.0453, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 5.863614904812716e-06, |
|
"loss": 1.8102, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 5.862028858995538e-06, |
|
"loss": 1.9502, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.7109375, |
|
"learning_rate": 5.8604338859754785e-06, |
|
"loss": 2.2038, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 5.858829991310106e-06, |
|
"loss": 2.0411, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 5.857217180588075e-06, |
|
"loss": 2.1519, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 5.855595459429105e-06, |
|
"loss": 2.0955, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 5.853964833483967e-06, |
|
"loss": 1.9125, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.6875, |
|
"learning_rate": 5.852325308434457e-06, |
|
"loss": 2.0982, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 5.850676889993383e-06, |
|
"loss": 2.0996, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.6640625, |
|
"learning_rate": 5.849019583904536e-06, |
|
"loss": 2.049, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 5.84735339594268e-06, |
|
"loss": 1.9399, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 5.845678331913525e-06, |
|
"loss": 2.0352, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.375, |
|
"learning_rate": 5.843994397653709e-06, |
|
"loss": 1.9942, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 5.842301599030779e-06, |
|
"loss": 2.0872, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 5.840599941943169e-06, |
|
"loss": 1.9365, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 5.838889432320178e-06, |
|
"loss": 1.9919, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 5.8371700761219525e-06, |
|
"loss": 2.0302, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 5.835441879339465e-06, |
|
"loss": 2.0213, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 5.833704847994489e-06, |
|
"loss": 1.9393, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.375, |
|
"learning_rate": 5.831958988139585e-06, |
|
"loss": 1.8929, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.6953125, |
|
"learning_rate": 5.830204305858075e-06, |
|
"loss": 2.0949, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 5.828440807264021e-06, |
|
"loss": 1.953, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 5.826668498502205e-06, |
|
"loss": 1.8952, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 5.824887385748107e-06, |
|
"loss": 2.1153, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.921875, |
|
"learning_rate": 5.823097475207888e-06, |
|
"loss": 2.1172, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 5.821298773118357e-06, |
|
"loss": 1.8706, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 5.8194912857469615e-06, |
|
"loss": 2.0176, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 5.81767501939176e-06, |
|
"loss": 2.0283, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 5.815849980381401e-06, |
|
"loss": 1.9713, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.640625, |
|
"learning_rate": 5.814016175075099e-06, |
|
"loss": 2.022, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.78125, |
|
"learning_rate": 5.8121736098626144e-06, |
|
"loss": 2.1311, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.828125, |
|
"learning_rate": 5.8103222911642335e-06, |
|
"loss": 2.0839, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.7578125, |
|
"learning_rate": 5.808462225430739e-06, |
|
"loss": 1.9339, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 5.8065934191433955e-06, |
|
"loss": 1.9463, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 5.804715878813923e-06, |
|
"loss": 1.9973, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 5.8028296109844736e-06, |
|
"loss": 1.9792, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 5.800934622227608e-06, |
|
"loss": 2.005, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 5.79903091914628e-06, |
|
"loss": 1.9834, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 5.797118508373802e-06, |
|
"loss": 2.2975, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 5.79519739657383e-06, |
|
"loss": 2.0384, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"eval_loss": 1.993146538734436, |
|
"eval_runtime": 73.1349, |
|
"eval_samples_per_second": 8.874, |
|
"eval_steps_per_second": 8.874, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.6484375, |
|
"learning_rate": 5.793267590440339e-06, |
|
"loss": 2.041, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 5.791329096697598e-06, |
|
"loss": 1.7858, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.71875, |
|
"learning_rate": 5.789381922100146e-06, |
|
"loss": 2.1009, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 5.787426073432772e-06, |
|
"loss": 2.0599, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 5.785461557510487e-06, |
|
"loss": 1.8, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 5.783488381178505e-06, |
|
"loss": 2.0379, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 5.781506551312215e-06, |
|
"loss": 1.9764, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 5.779516074817158e-06, |
|
"loss": 2.0071, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 5.777516958629004e-06, |
|
"loss": 1.9035, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 5.77550920971353e-06, |
|
"loss": 1.8642, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.5, |
|
"learning_rate": 5.773492835066588e-06, |
|
"loss": 2.0698, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.6171875, |
|
"learning_rate": 5.77146784171409e-06, |
|
"loss": 2.0487, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 5.769434236711978e-06, |
|
"loss": 2.0178, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 5.7673920271462e-06, |
|
"loss": 2.0158, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 5.765341220132685e-06, |
|
"loss": 1.8776, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 5.763281822817322e-06, |
|
"loss": 1.945, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 5.761213842375933e-06, |
|
"loss": 1.9996, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.7265625, |
|
"learning_rate": 5.759137286014241e-06, |
|
"loss": 2.0427, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 5.757052160967858e-06, |
|
"loss": 1.9874, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 5.75495847450225e-06, |
|
"loss": 1.7006, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.6484375, |
|
"learning_rate": 5.752856233912714e-06, |
|
"loss": 2.0536, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.8359375, |
|
"learning_rate": 5.750745446524355e-06, |
|
"loss": 2.08, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 5.748626119692058e-06, |
|
"loss": 1.9435, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.671875, |
|
"learning_rate": 5.746498260800462e-06, |
|
"loss": 2.0348, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.7578125, |
|
"learning_rate": 5.744361877263938e-06, |
|
"loss": 2.2069, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 5.742216976526557e-06, |
|
"loss": 2.002, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 5.7400635660620685e-06, |
|
"loss": 2.0927, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 5.737901653373878e-06, |
|
"loss": 1.6721, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 5.7357312459950085e-06, |
|
"loss": 1.8958, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.375, |
|
"learning_rate": 5.733552351488089e-06, |
|
"loss": 1.8287, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 5.7313649774453185e-06, |
|
"loss": 1.9405, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 5.729169131488443e-06, |
|
"loss": 1.8981, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.734375, |
|
"learning_rate": 5.726964821268725e-06, |
|
"loss": 1.8419, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.671875, |
|
"learning_rate": 5.724752054466926e-06, |
|
"loss": 1.9149, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 5.722530838793268e-06, |
|
"loss": 2.0117, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 5.720301181987417e-06, |
|
"loss": 2.0528, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 5.7180630918184485e-06, |
|
"loss": 2.0948, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 5.7158165760848265e-06, |
|
"loss": 1.9506, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.6640625, |
|
"learning_rate": 5.713561642614369e-06, |
|
"loss": 1.8097, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 5.71129829926423e-06, |
|
"loss": 1.9875, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 5.709026553920862e-06, |
|
"loss": 2.1816, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 5.706746414499997e-06, |
|
"loss": 2.0024, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.5, |
|
"learning_rate": 5.704457888946614e-06, |
|
"loss": 1.8431, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.7734375, |
|
"learning_rate": 5.702160985234916e-06, |
|
"loss": 2.3138, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 5.6998557113682935e-06, |
|
"loss": 2.0798, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 5.697542075379308e-06, |
|
"loss": 1.8796, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 5.695220085329653e-06, |
|
"loss": 2.0439, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 5.692889749310135e-06, |
|
"loss": 2.0048, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 5.690551075440642e-06, |
|
"loss": 1.9745, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 5.688204071870108e-06, |
|
"loss": 2.0887, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 5.685848746776501e-06, |
|
"loss": 2.0145, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.6171875, |
|
"learning_rate": 5.683485108366776e-06, |
|
"loss": 1.9202, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 5.681113164876861e-06, |
|
"loss": 2.1363, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 5.678732924571618e-06, |
|
"loss": 1.8549, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 5.6763443957448236e-06, |
|
"loss": 1.8595, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 5.67394758671913e-06, |
|
"loss": 1.9278, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 5.671542505846046e-06, |
|
"loss": 1.973, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 5.669129161505899e-06, |
|
"loss": 1.9534, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 5.666707562107812e-06, |
|
"loss": 1.9176, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.8359375, |
|
"learning_rate": 5.6642777160896705e-06, |
|
"loss": 1.978, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"eval_loss": 1.9843242168426514, |
|
"eval_runtime": 72.526, |
|
"eval_samples_per_second": 8.949, |
|
"eval_steps_per_second": 8.949, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 5.661839631918097e-06, |
|
"loss": 1.7639, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.75, |
|
"learning_rate": 5.659393318088419e-06, |
|
"loss": 2.1257, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.6484375, |
|
"learning_rate": 5.656938783124639e-06, |
|
"loss": 2.0958, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 5.654476035579402e-06, |
|
"loss": 1.9213, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 5.652005084033976e-06, |
|
"loss": 2.1154, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 5.64952593709821e-06, |
|
"loss": 2.1574, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 5.64703860341051e-06, |
|
"loss": 1.964, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 5.64454309163781e-06, |
|
"loss": 1.9656, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 2.046875, |
|
"learning_rate": 5.64203941047554e-06, |
|
"loss": 2.008, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 5.639527568647593e-06, |
|
"loss": 1.9648, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 5.6370075749063e-06, |
|
"loss": 1.9933, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 5.634479438032394e-06, |
|
"loss": 2.0599, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 5.631943166834985e-06, |
|
"loss": 1.9217, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.8203125, |
|
"learning_rate": 5.629398770151526e-06, |
|
"loss": 2.035, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 5.626846256847779e-06, |
|
"loss": 1.7794, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.609375, |
|
"learning_rate": 5.624285635817793e-06, |
|
"loss": 2.104, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 5.621716915983865e-06, |
|
"loss": 1.859, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 5.619140106296511e-06, |
|
"loss": 1.8899, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.6875, |
|
"learning_rate": 5.616555215734438e-06, |
|
"loss": 1.9897, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.78125, |
|
"learning_rate": 5.6139622533045085e-06, |
|
"loss": 2.2321, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 5.61136122804171e-06, |
|
"loss": 1.9103, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 5.608752149009127e-06, |
|
"loss": 1.9372, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 3.125, |
|
"learning_rate": 5.606135025297905e-06, |
|
"loss": 2.1246, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 5.60350986602722e-06, |
|
"loss": 1.9047, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 5.600876680344249e-06, |
|
"loss": 1.755, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 5.5982354774241355e-06, |
|
"loss": 1.925, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 5.595586266469958e-06, |
|
"loss": 1.9832, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 5.5929290567127e-06, |
|
"loss": 2.0026, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.375, |
|
"learning_rate": 5.590263857411215e-06, |
|
"loss": 2.0023, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 5.587590677852195e-06, |
|
"loss": 1.801, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 5.584909527350141e-06, |
|
"loss": 1.8022, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 5.582220415247325e-06, |
|
"loss": 2.1502, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 5.579523350913763e-06, |
|
"loss": 1.9927, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 5.576818343747179e-06, |
|
"loss": 2.1792, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 5.574105403172973e-06, |
|
"loss": 1.832, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.8125, |
|
"learning_rate": 5.571384538644189e-06, |
|
"loss": 1.9887, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.375, |
|
"learning_rate": 5.568655759641482e-06, |
|
"loss": 2.0233, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 5.565919075673082e-06, |
|
"loss": 1.8999, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 5.563174496274767e-06, |
|
"loss": 2.0715, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 5.560422031009821e-06, |
|
"loss": 2.0547, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 5.557661689469011e-06, |
|
"loss": 2.1603, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.5, |
|
"learning_rate": 5.554893481270546e-06, |
|
"loss": 1.9174, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 5.552117416060045e-06, |
|
"loss": 1.8991, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 5.549333503510504e-06, |
|
"loss": 1.8692, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.640625, |
|
"learning_rate": 5.546541753322264e-06, |
|
"loss": 1.9568, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 5.543742175222977e-06, |
|
"loss": 2.1146, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 5.540934778967567e-06, |
|
"loss": 1.9401, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 5.538119574338203e-06, |
|
"loss": 1.878, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.609375, |
|
"learning_rate": 5.535296571144259e-06, |
|
"loss": 2.0552, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 5.532465779222283e-06, |
|
"loss": 2.1755, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 5.529627208435967e-06, |
|
"loss": 1.9412, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 5.5267808686761e-06, |
|
"loss": 1.9386, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 5.523926769860549e-06, |
|
"loss": 1.9339, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 5.52106492193421e-06, |
|
"loss": 1.8639, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.78125, |
|
"learning_rate": 5.5181953348689865e-06, |
|
"loss": 2.0624, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 5.515318018663744e-06, |
|
"loss": 1.8617, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 5.512432983344281e-06, |
|
"loss": 2.2311, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 5.509540238963293e-06, |
|
"loss": 1.9526, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 5.506639795600338e-06, |
|
"loss": 2.0589, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 5.5037316633618e-06, |
|
"loss": 1.8341, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"eval_loss": 1.9771074056625366, |
|
"eval_runtime": 72.6606, |
|
"eval_samples_per_second": 8.932, |
|
"eval_steps_per_second": 8.932, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 5.50081585238085e-06, |
|
"loss": 1.8993, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 5.497892372817423e-06, |
|
"loss": 1.9337, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 5.494961234858169e-06, |
|
"loss": 2.0068, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 5.492022448716425e-06, |
|
"loss": 2.0354, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 5.489076024632178e-06, |
|
"loss": 1.8277, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 5.486121972872026e-06, |
|
"loss": 1.8635, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 5.483160303729148e-06, |
|
"loss": 2.0839, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.7265625, |
|
"learning_rate": 5.480191027523265e-06, |
|
"loss": 2.0322, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 5.477214154600602e-06, |
|
"loss": 1.9341, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.6484375, |
|
"learning_rate": 5.474229695333859e-06, |
|
"loss": 2.0939, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 5.4712376601221625e-06, |
|
"loss": 1.9471, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 5.468238059391045e-06, |
|
"loss": 2.1356, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.671875, |
|
"learning_rate": 5.465230903592394e-06, |
|
"loss": 2.104, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.6875, |
|
"learning_rate": 5.462216203204427e-06, |
|
"loss": 1.8952, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 5.459193968731645e-06, |
|
"loss": 1.9731, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 5.456164210704808e-06, |
|
"loss": 1.9873, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 5.453126939680882e-06, |
|
"loss": 1.9, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 5.45008216624302e-06, |
|
"loss": 1.8434, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 5.447029901000513e-06, |
|
"loss": 1.7828, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 5.443970154588756e-06, |
|
"loss": 2.045, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.6171875, |
|
"learning_rate": 5.440902937669213e-06, |
|
"loss": 2.0628, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.8046875, |
|
"learning_rate": 5.4378282609293775e-06, |
|
"loss": 2.138, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 5.434746135082736e-06, |
|
"loss": 1.8824, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 5.431656570868731e-06, |
|
"loss": 2.0143, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.5, |
|
"learning_rate": 5.428559579052726e-06, |
|
"loss": 2.0449, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 5.425455170425963e-06, |
|
"loss": 1.734, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 5.422343355805526e-06, |
|
"loss": 1.7411, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 5.419224146034307e-06, |
|
"loss": 1.9042, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 5.4160975519809665e-06, |
|
"loss": 2.1657, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 5.412963584539892e-06, |
|
"loss": 2.1677, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 5.409822254631165e-06, |
|
"loss": 1.6379, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 5.40667357320052e-06, |
|
"loss": 1.9644, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 5.403517551219309e-06, |
|
"loss": 1.9929, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.640625, |
|
"learning_rate": 5.400354199684463e-06, |
|
"loss": 2.0832, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 5.3971835296184455e-06, |
|
"loss": 1.9306, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.671875, |
|
"learning_rate": 5.394005552069228e-06, |
|
"loss": 2.2007, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 5.39082027811024e-06, |
|
"loss": 1.881, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.625, |
|
"learning_rate": 5.387627718840339e-06, |
|
"loss": 2.0157, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 5.3844278853837626e-06, |
|
"loss": 1.9403, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 5.3812207888900985e-06, |
|
"loss": 2.1445, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.796875, |
|
"learning_rate": 5.378006440534241e-06, |
|
"loss": 1.9988, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.734375, |
|
"learning_rate": 5.374784851516352e-06, |
|
"loss": 2.1576, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 5.3715560330618245e-06, |
|
"loss": 1.9325, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 5.3683199964212405e-06, |
|
"loss": 1.9047, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 5.365076752870335e-06, |
|
"loss": 2.0143, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 5.361826313709952e-06, |
|
"loss": 1.8602, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.6484375, |
|
"learning_rate": 5.358568690266011e-06, |
|
"loss": 2.1187, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 5.355303893889463e-06, |
|
"loss": 1.9371, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 5.3520319359562545e-06, |
|
"loss": 1.8244, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 5.348752827867283e-06, |
|
"loss": 1.8302, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 5.345466581048362e-06, |
|
"loss": 1.8959, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 5.34217320695018e-06, |
|
"loss": 1.8984, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 5.338872717048259e-06, |
|
"loss": 1.9635, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.6171875, |
|
"learning_rate": 5.3355651228429135e-06, |
|
"loss": 1.8611, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 5.332250435859216e-06, |
|
"loss": 2.0737, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 5.3289286676469534e-06, |
|
"loss": 1.8734, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 5.325599829780581e-06, |
|
"loss": 1.9806, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 5.322263933859196e-06, |
|
"loss": 1.9084, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.609375, |
|
"learning_rate": 5.318920991506481e-06, |
|
"loss": 2.115, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 5.315571014370679e-06, |
|
"loss": 1.7521, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"eval_loss": 1.9717546701431274, |
|
"eval_runtime": 73.075, |
|
"eval_samples_per_second": 8.881, |
|
"eval_steps_per_second": 8.881, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 5.312214014124538e-06, |
|
"loss": 2.1913, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 5.308850002465282e-06, |
|
"loss": 2.0011, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.7265625, |
|
"learning_rate": 5.305478991114565e-06, |
|
"loss": 2.1085, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 5.302100991818428e-06, |
|
"loss": 1.83, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 5.298716016347267e-06, |
|
"loss": 2.2321, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 5.29532407649578e-06, |
|
"loss": 1.9749, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 5.291925184082935e-06, |
|
"loss": 1.8921, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 5.288519350951925e-06, |
|
"loss": 1.9127, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 5.2851065889701265e-06, |
|
"loss": 1.8932, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.6953125, |
|
"learning_rate": 5.281686910029061e-06, |
|
"loss": 2.2113, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 5.27826032604435e-06, |
|
"loss": 1.9799, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.5, |
|
"learning_rate": 5.274826848955677e-06, |
|
"loss": 1.9245, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 5.271386490726741e-06, |
|
"loss": 1.8607, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 5.26793926334522e-06, |
|
"loss": 1.8167, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 5.264485178822728e-06, |
|
"loss": 2.165, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 2.125, |
|
"learning_rate": 5.261024249194769e-06, |
|
"loss": 1.8488, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 5.2575564865207e-06, |
|
"loss": 1.8926, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 5.254081902883689e-06, |
|
"loss": 1.9392, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 5.250600510390669e-06, |
|
"loss": 2.1148, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 5.247112321172299e-06, |
|
"loss": 2.0925, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 5.243617347382919e-06, |
|
"loss": 1.898, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 5.240115601200511e-06, |
|
"loss": 2.0985, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 1.9609375, |
|
"learning_rate": 5.236607094826653e-06, |
|
"loss": 1.9642, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 1.6484375, |
|
"learning_rate": 5.2330918404864815e-06, |
|
"loss": 1.8557, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 5.229569850428646e-06, |
|
"loss": 1.8097, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 1.6484375, |
|
"learning_rate": 5.22604113692526e-06, |
|
"loss": 1.9262, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 5.22250571227187e-06, |
|
"loss": 2.0331, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 5.218963588787407e-06, |
|
"loss": 1.7557, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 5.215414778814143e-06, |
|
"loss": 2.0086, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 5.211859294717644e-06, |
|
"loss": 1.9163, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 5.2082971488867385e-06, |
|
"loss": 2.1011, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 5.204728353733462e-06, |
|
"loss": 1.9178, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 1.375, |
|
"learning_rate": 5.201152921693024e-06, |
|
"loss": 1.9985, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 1.375, |
|
"learning_rate": 5.197570865223754e-06, |
|
"loss": 1.8964, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 5.193982196807067e-06, |
|
"loss": 1.9792, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 5.190386928947419e-06, |
|
"loss": 1.7589, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 5.186785074172257e-06, |
|
"loss": 2.0091, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 5.183176645031982e-06, |
|
"loss": 2.1835, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 5.179561654099903e-06, |
|
"loss": 1.8255, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 5.1759401139721905e-06, |
|
"loss": 1.8562, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 5.1723120372678385e-06, |
|
"loss": 1.9574, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 5.168677436628616e-06, |
|
"loss": 1.8051, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 5.165036324719025e-06, |
|
"loss": 1.9382, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 5.161388714226253e-06, |
|
"loss": 1.7465, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 5.157734617860134e-06, |
|
"loss": 1.8892, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 5.1540740483531e-06, |
|
"loss": 1.994, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 5.150407018460139e-06, |
|
"loss": 1.8736, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 5.4375, |
|
"learning_rate": 5.146733540958751e-06, |
|
"loss": 1.876, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 5.143053628648897e-06, |
|
"loss": 1.9668, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 5.139367294352966e-06, |
|
"loss": 2.0875, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 5.135674550915721e-06, |
|
"loss": 1.8277, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 5.131975411204257e-06, |
|
"loss": 1.9919, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 5.1282698881079574e-06, |
|
"loss": 1.8748, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 5.124557994538446e-06, |
|
"loss": 1.6749, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 5.1208397434295465e-06, |
|
"loss": 2.0019, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 5.117115147737234e-06, |
|
"loss": 2.0223, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 5.1133842204395914e-06, |
|
"loss": 1.9228, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 5.109646974536761e-06, |
|
"loss": 2.0, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 5.105903423050907e-06, |
|
"loss": 1.9856, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 5.102153579026159e-06, |
|
"loss": 1.9863, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"eval_loss": 1.9678696393966675, |
|
"eval_runtime": 72.6404, |
|
"eval_samples_per_second": 8.934, |
|
"eval_steps_per_second": 8.934, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 5.098397455528576e-06, |
|
"loss": 1.9114, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 5.094635065646096e-06, |
|
"loss": 2.1427, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 5.090866422488495e-06, |
|
"loss": 2.0377, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 1.9140625, |
|
"learning_rate": 5.087091539187333e-06, |
|
"loss": 2.0124, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 5.083310428895916e-06, |
|
"loss": 1.878, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 5.079523104789248e-06, |
|
"loss": 1.6719, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 5.0757295800639846e-06, |
|
"loss": 1.8464, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 5.071929867938385e-06, |
|
"loss": 2.02, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 5.06812398165227e-06, |
|
"loss": 2.1547, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 5.064311934466972e-06, |
|
"loss": 1.8403, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 1.8984375, |
|
"learning_rate": 5.060493739665293e-06, |
|
"loss": 1.9878, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 5.056669410551454e-06, |
|
"loss": 1.9279, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 5.05283896045105e-06, |
|
"loss": 2.2803, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 5.049002402711008e-06, |
|
"loss": 1.9853, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 5.045159750699532e-06, |
|
"loss": 2.0884, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 5.041311017806063e-06, |
|
"loss": 1.9208, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 1.7421875, |
|
"learning_rate": 5.037456217441231e-06, |
|
"loss": 2.0662, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 5.033595363036805e-06, |
|
"loss": 1.9996, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 5.029728468045654e-06, |
|
"loss": 1.8858, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 5.0258555459416885e-06, |
|
"loss": 1.8735, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 5.021976610219827e-06, |
|
"loss": 1.8138, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 5.018091674395937e-06, |
|
"loss": 2.1505, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 1.9609375, |
|
"learning_rate": 5.014200752006792e-06, |
|
"loss": 1.8376, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 5.0103038566100295e-06, |
|
"loss": 2.1275, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 5.006401001784099e-06, |
|
"loss": 1.9623, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 5.002492201128211e-06, |
|
"loss": 1.7547, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.5, |
|
"learning_rate": 4.998577468262297e-06, |
|
"loss": 2.0941, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 4.9946568168269615e-06, |
|
"loss": 1.9377, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 4.990730260483425e-06, |
|
"loss": 2.0837, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.5, |
|
"learning_rate": 4.98679781291349e-06, |
|
"loss": 1.8037, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 4.98285948781948e-06, |
|
"loss": 2.0301, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 4.9789152989242045e-06, |
|
"loss": 1.999, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 4.974965259970902e-06, |
|
"loss": 1.9322, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 4.9710093847231955e-06, |
|
"loss": 1.8256, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 1.71875, |
|
"learning_rate": 4.967047686965043e-06, |
|
"loss": 2.0594, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 4.963080180500692e-06, |
|
"loss": 2.1973, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 4.95910687915463e-06, |
|
"loss": 2.0154, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 4.9551277967715376e-06, |
|
"loss": 1.9276, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 1.25, |
|
"learning_rate": 4.951142947216235e-06, |
|
"loss": 1.8117, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 4.9471523443736395e-06, |
|
"loss": 1.9427, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 4.943156002148719e-06, |
|
"loss": 1.9366, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 1.5, |
|
"learning_rate": 4.939153934466435e-06, |
|
"loss": 1.9254, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 4.9351461552717004e-06, |
|
"loss": 1.9774, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 4.9311326785293315e-06, |
|
"loss": 1.8385, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 4.927113518223993e-06, |
|
"loss": 1.9024, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 4.923088688360158e-06, |
|
"loss": 1.8878, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 4.919058202962052e-06, |
|
"loss": 1.8002, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 4.915022076073608e-06, |
|
"loss": 1.9392, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.5, |
|
"learning_rate": 4.910980321758417e-06, |
|
"loss": 2.0364, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 4.906932954099677e-06, |
|
"loss": 1.8918, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.625, |
|
"learning_rate": 4.902879987200145e-06, |
|
"loss": 2.2092, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.5, |
|
"learning_rate": 4.898821435182088e-06, |
|
"loss": 1.985, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 4.894757312187237e-06, |
|
"loss": 1.9997, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 4.890687632376731e-06, |
|
"loss": 1.9681, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 1.6875, |
|
"learning_rate": 4.886612409931073e-06, |
|
"loss": 1.9677, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 4.882531659050079e-06, |
|
"loss": 1.9783, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 4.8784453939528295e-06, |
|
"loss": 2.1565, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 4.8743536288776144e-06, |
|
"loss": 2.0744, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 4.8702563780818926e-06, |
|
"loss": 1.9511, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 4.8661536558422356e-06, |
|
"loss": 1.7847, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"eval_loss": 1.9644558429718018, |
|
"eval_runtime": 72.8255, |
|
"eval_samples_per_second": 8.912, |
|
"eval_steps_per_second": 8.912, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 4.86204547645428e-06, |
|
"loss": 2.1285, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 4.857931854232679e-06, |
|
"loss": 1.7975, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 4.853812803511047e-06, |
|
"loss": 1.8594, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 4.849688338641919e-06, |
|
"loss": 1.6401, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 4.84555847399669e-06, |
|
"loss": 1.8446, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 4.8414232239655735e-06, |
|
"loss": 1.8468, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 4.837282602957547e-06, |
|
"loss": 1.922, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 4.833136625400302e-06, |
|
"loss": 2.0706, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 1.6875, |
|
"learning_rate": 4.828985305740195e-06, |
|
"loss": 1.9775, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 4.824828658442199e-06, |
|
"loss": 1.9182, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 4.820666697989845e-06, |
|
"loss": 2.1004, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 4.816499438885184e-06, |
|
"loss": 1.8518, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 4.812326895648727e-06, |
|
"loss": 2.0677, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 1.65625, |
|
"learning_rate": 4.808149082819394e-06, |
|
"loss": 1.9105, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 4.803966014954471e-06, |
|
"loss": 1.9669, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 4.799777706629555e-06, |
|
"loss": 1.9302, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 4.795584172438498e-06, |
|
"loss": 1.7689, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 4.791385426993366e-06, |
|
"loss": 1.8315, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 4.787181484924381e-06, |
|
"loss": 1.8672, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 4.7829723608798736e-06, |
|
"loss": 2.0861, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 4.7787580695262295e-06, |
|
"loss": 1.9532, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 4.7745386255478385e-06, |
|
"loss": 1.7258, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 4.7703140436470486e-06, |
|
"loss": 1.7769, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 4.766084338544105e-06, |
|
"loss": 2.0005, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 4.761849524977107e-06, |
|
"loss": 1.9948, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 4.757609617701957e-06, |
|
"loss": 1.9489, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 1.375, |
|
"learning_rate": 4.753364631492299e-06, |
|
"loss": 1.863, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 4.74911458113948e-06, |
|
"loss": 1.947, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 4.744859481452491e-06, |
|
"loss": 1.9949, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 4.740599347257917e-06, |
|
"loss": 1.9907, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 4.736334193399884e-06, |
|
"loss": 2.0075, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 1.625, |
|
"learning_rate": 4.73206403474001e-06, |
|
"loss": 1.7911, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 4.7277888861573546e-06, |
|
"loss": 1.8767, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 4.723508762548356e-06, |
|
"loss": 1.8936, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 4.719223678826798e-06, |
|
"loss": 2.052, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 4.714933649923741e-06, |
|
"loss": 2.0797, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 4.710638690787478e-06, |
|
"loss": 1.9925, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 4.70633881638348e-06, |
|
"loss": 1.9232, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 4.702034041694348e-06, |
|
"loss": 1.8591, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 4.6977243817197546e-06, |
|
"loss": 1.723, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 4.693409851476397e-06, |
|
"loss": 2.0273, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 4.6890904659979405e-06, |
|
"loss": 1.9799, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 4.6847662403349685e-06, |
|
"loss": 1.9064, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 4.680437189554931e-06, |
|
"loss": 1.8878, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 4.676103328742091e-06, |
|
"loss": 2.0737, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 2.0, |
|
"learning_rate": 4.671764672997469e-06, |
|
"loss": 1.7712, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 4.6674212374387955e-06, |
|
"loss": 1.7269, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 4.663073037200457e-06, |
|
"loss": 2.0273, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 4.658720087433437e-06, |
|
"loss": 2.0479, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 4.654362403305274e-06, |
|
"loss": 1.9415, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 4.65e-06, |
|
"loss": 1.9311, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 4.645632892718092e-06, |
|
"loss": 1.8846, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 4.641261096676417e-06, |
|
"loss": 1.9105, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 4.636884627108181e-06, |
|
"loss": 1.7741, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 1.375, |
|
"learning_rate": 4.63250349926287e-06, |
|
"loss": 2.0834, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 3.40625, |
|
"learning_rate": 4.628117728406209e-06, |
|
"loss": 1.9291, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 4.623727329820092e-06, |
|
"loss": 1.9338, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 4.619332318802547e-06, |
|
"loss": 1.9905, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 4.6149327106676686e-06, |
|
"loss": 2.0903, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 1.828125, |
|
"learning_rate": 4.61052852074557e-06, |
|
"loss": 1.9864, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"eval_loss": 1.9622352123260498, |
|
"eval_runtime": 72.6996, |
|
"eval_samples_per_second": 8.927, |
|
"eval_steps_per_second": 8.927, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 4.606119764382332e-06, |
|
"loss": 2.0866, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 1.5, |
|
"learning_rate": 4.601706456939942e-06, |
|
"loss": 1.8834, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 4.597288613796251e-06, |
|
"loss": 2.0631, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 4.592866250344909e-06, |
|
"loss": 1.9722, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 4.588439381995321e-06, |
|
"loss": 1.9687, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 4.584008024172588e-06, |
|
"loss": 1.9895, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 4.57957219231745e-06, |
|
"loss": 1.8916, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 4.575131901886244e-06, |
|
"loss": 1.9296, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 4.570687168350835e-06, |
|
"loss": 1.9725, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 1.7265625, |
|
"learning_rate": 4.566238007198575e-06, |
|
"loss": 1.9202, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 4.561784433932243e-06, |
|
"loss": 1.9359, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 4.557326464069988e-06, |
|
"loss": 2.1014, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 4.552864113145281e-06, |
|
"loss": 1.9649, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 4.548397396706861e-06, |
|
"loss": 1.7436, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 4.543926330318675e-06, |
|
"loss": 1.7933, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 4.5394509295598275e-06, |
|
"loss": 1.8842, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 4.534971210024528e-06, |
|
"loss": 1.9636, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 4.530487187322031e-06, |
|
"loss": 1.9325, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 4.525998877076587e-06, |
|
"loss": 2.1198, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 4.521506294927387e-06, |
|
"loss": 1.8449, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 4.517009456528504e-06, |
|
"loss": 1.7361, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 4.512508377548847e-06, |
|
"loss": 1.9924, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 4.5080030736720956e-06, |
|
"loss": 1.888, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 4.503493560596654e-06, |
|
"loss": 1.809, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 4.49897985403559e-06, |
|
"loss": 1.8108, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 1.7109375, |
|
"learning_rate": 4.494461969716588e-06, |
|
"loss": 2.2171, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 4.489939923381884e-06, |
|
"loss": 1.7997, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 4.485413730788222e-06, |
|
"loss": 1.9431, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 4.4808834077067865e-06, |
|
"loss": 1.7227, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 4.476348969923162e-06, |
|
"loss": 1.96, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 4.471810433237263e-06, |
|
"loss": 1.9317, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 4.467267813463294e-06, |
|
"loss": 1.933, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 4.462721126429678e-06, |
|
"loss": 1.9302, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 1.625, |
|
"learning_rate": 4.458170387979017e-06, |
|
"loss": 2.0257, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 4.453615613968028e-06, |
|
"loss": 2.0193, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 4.44905682026749e-06, |
|
"loss": 1.9601, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 4.444494022762184e-06, |
|
"loss": 2.0479, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 1.6953125, |
|
"learning_rate": 4.43992723735085e-06, |
|
"loss": 1.9051, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 1.5, |
|
"learning_rate": 4.435356479946116e-06, |
|
"loss": 1.8617, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 4.430781766474452e-06, |
|
"loss": 1.8091, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 4.426203112876117e-06, |
|
"loss": 2.005, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 4.421620535105094e-06, |
|
"loss": 1.9742, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 4.417034049129042e-06, |
|
"loss": 1.856, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 4.4124436709292355e-06, |
|
"loss": 2.0985, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 4.407849416500515e-06, |
|
"loss": 1.9179, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 4.403251301851225e-06, |
|
"loss": 1.7981, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 4.398649343003162e-06, |
|
"loss": 1.9737, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 1.7734375, |
|
"learning_rate": 4.394043555991512e-06, |
|
"loss": 1.782, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 4.3894339568648086e-06, |
|
"loss": 1.8265, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 4.384820561684862e-06, |
|
"loss": 1.9757, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 1.5, |
|
"learning_rate": 4.3802033865267116e-06, |
|
"loss": 1.8808, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 1.6171875, |
|
"learning_rate": 4.375582447478569e-06, |
|
"loss": 1.908, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 4.370957760641759e-06, |
|
"loss": 1.837, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 4.366329342130667e-06, |
|
"loss": 1.8169, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 4.361697208072677e-06, |
|
"loss": 1.8081, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 1.65625, |
|
"learning_rate": 4.357061374608125e-06, |
|
"loss": 1.9752, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 4.352421857890234e-06, |
|
"loss": 1.8072, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 4.347778674085065e-06, |
|
"loss": 1.896, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 4.343131839371447e-06, |
|
"loss": 1.752, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 4.338481369940943e-06, |
|
"loss": 1.8197, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"eval_loss": 1.9600226879119873, |
|
"eval_runtime": 72.5961, |
|
"eval_samples_per_second": 8.94, |
|
"eval_steps_per_second": 8.94, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 4.3338272819977736e-06, |
|
"loss": 1.9461, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 4.329169591758768e-06, |
|
"loss": 1.9262, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 4.324508315453309e-06, |
|
"loss": 1.8615, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 1.625, |
|
"learning_rate": 4.319843469323273e-06, |
|
"loss": 1.7861, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 4.315175069622978e-06, |
|
"loss": 1.8771, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 4.310503132619122e-06, |
|
"loss": 2.0245, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 4.305827674590731e-06, |
|
"loss": 1.8933, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 4.301148711829094e-06, |
|
"loss": 2.0775, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 1.6640625, |
|
"learning_rate": 4.2964662606377195e-06, |
|
"loss": 2.0916, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 4.291780337332265e-06, |
|
"loss": 1.9606, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 1.6484375, |
|
"learning_rate": 4.28709095824049e-06, |
|
"loss": 1.9152, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 4.282398139702194e-06, |
|
"loss": 1.8195, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 1.6953125, |
|
"learning_rate": 4.277701898069161e-06, |
|
"loss": 1.7624, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 4.273002249705102e-06, |
|
"loss": 1.8801, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 4.268299210985601e-06, |
|
"loss": 2.0596, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 4.2635927982980544e-06, |
|
"loss": 2.053, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 4.258883028041611e-06, |
|
"loss": 1.902, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 4.254169916627126e-06, |
|
"loss": 1.735, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 4.249453480477089e-06, |
|
"loss": 1.8353, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 1.6640625, |
|
"learning_rate": 4.244733736025581e-06, |
|
"loss": 1.9626, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 4.240010699718206e-06, |
|
"loss": 1.821, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 1.5, |
|
"learning_rate": 4.23528438801204e-06, |
|
"loss": 1.7279, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 4.230554817375572e-06, |
|
"loss": 1.9597, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 1.6484375, |
|
"learning_rate": 4.225822004288648e-06, |
|
"loss": 2.2468, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 4.221085965242408e-06, |
|
"loss": 1.9827, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 4.216346716739235e-06, |
|
"loss": 1.8237, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 4.211604275292696e-06, |
|
"loss": 1.9025, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 4.206858657427482e-06, |
|
"loss": 2.0194, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 4.202109879679353e-06, |
|
"loss": 1.9849, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 4.1973579585950805e-06, |
|
"loss": 2.0062, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 1.7578125, |
|
"learning_rate": 4.192602910732385e-06, |
|
"loss": 2.0088, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 4.1878447526598875e-06, |
|
"loss": 1.8793, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 4.183083500957039e-06, |
|
"loss": 1.8007, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 4.178319172214075e-06, |
|
"loss": 1.6391, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 4.1735517830319554e-06, |
|
"loss": 2.0541, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 4.168781350022296e-06, |
|
"loss": 1.8274, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 4.164007889807325e-06, |
|
"loss": 1.9349, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 1.5, |
|
"learning_rate": 4.159231419019818e-06, |
|
"loss": 2.2892, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 1.375, |
|
"learning_rate": 4.154451954303038e-06, |
|
"loss": 2.0556, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 4.1496695123106805e-06, |
|
"loss": 1.9248, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 4.1448841097068166e-06, |
|
"loss": 2.0287, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 4.140095763165836e-06, |
|
"loss": 1.9871, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 4.135304489372379e-06, |
|
"loss": 1.8919, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 1.6640625, |
|
"learning_rate": 4.130510305021293e-06, |
|
"loss": 2.1305, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 4.125713226817563e-06, |
|
"loss": 1.8925, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 4.120913271476259e-06, |
|
"loss": 1.8463, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 4.116110455722474e-06, |
|
"loss": 1.5727, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 4.111304796291272e-06, |
|
"loss": 1.9468, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 4.10649630992762e-06, |
|
"loss": 1.9911, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 1.6796875, |
|
"learning_rate": 4.1016850133863425e-06, |
|
"loss": 2.0594, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 4.096870923432048e-06, |
|
"loss": 1.838, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 4.092054056839085e-06, |
|
"loss": 1.8663, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 4.087234430391472e-06, |
|
"loss": 2.1704, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 4.0824120608828466e-06, |
|
"loss": 1.8005, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 4.077586965116406e-06, |
|
"loss": 1.9346, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 1.6484375, |
|
"learning_rate": 4.072759159904841e-06, |
|
"loss": 2.179, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 4.067928662070291e-06, |
|
"loss": 2.0209, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 1.6640625, |
|
"learning_rate": 4.063095488444271e-06, |
|
"loss": 1.9372, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 1.5, |
|
"learning_rate": 4.058259655867623e-06, |
|
"loss": 1.9265, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 4.053421181190454e-06, |
|
"loss": 1.9848, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"eval_loss": 1.9584399461746216, |
|
"eval_runtime": 72.5746, |
|
"eval_samples_per_second": 8.943, |
|
"eval_steps_per_second": 8.943, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 4.048580081272078e-06, |
|
"loss": 2.0108, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 4.0437363729809515e-06, |
|
"loss": 1.8503, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 4.038890073194625e-06, |
|
"loss": 1.7675, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 4.034041198799675e-06, |
|
"loss": 2.0385, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 4.029189766691654e-06, |
|
"loss": 1.8441, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 4.024335793775022e-06, |
|
"loss": 1.8735, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 4.019479296963094e-06, |
|
"loss": 2.027, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 4.0146202931779806e-06, |
|
"loss": 1.9666, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 1.6640625, |
|
"learning_rate": 4.009758799350525e-06, |
|
"loss": 1.8787, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 4.004894832420249e-06, |
|
"loss": 1.8996, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 4.0000284093352885e-06, |
|
"loss": 1.8736, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 1.6796875, |
|
"learning_rate": 3.995159547052345e-06, |
|
"loss": 2.0839, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 3.9902882625366095e-06, |
|
"loss": 2.0342, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 3.985414572761721e-06, |
|
"loss": 1.9169, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 3.980538494709696e-06, |
|
"loss": 1.8123, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 3.975660045370871e-06, |
|
"loss": 1.7921, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 3.970779241743851e-06, |
|
"loss": 1.7977, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 3.965896100835437e-06, |
|
"loss": 1.826, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 3.96101063966058e-06, |
|
"loss": 1.9129, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 1.6796875, |
|
"learning_rate": 3.956122875242313e-06, |
|
"loss": 2.0396, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 3.9512328246116925e-06, |
|
"loss": 2.0096, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 3.946340504807748e-06, |
|
"loss": 1.9644, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 3.9414459328774095e-06, |
|
"loss": 2.0962, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 3.936549125875454e-06, |
|
"loss": 2.032, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 1.6875, |
|
"learning_rate": 3.931650100864452e-06, |
|
"loss": 2.0593, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 3.9267488749146966e-06, |
|
"loss": 1.9617, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 3.921845465104155e-06, |
|
"loss": 1.9175, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 3.916939888518399e-06, |
|
"loss": 1.8773, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 3.9120321622505544e-06, |
|
"loss": 2.002, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 3.907122303401236e-06, |
|
"loss": 1.9718, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 3.902210329078487e-06, |
|
"loss": 2.0348, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 3.897296256397727e-06, |
|
"loss": 1.8621, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 3.8923801024816825e-06, |
|
"loss": 2.0355, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 3.887461884460332e-06, |
|
"loss": 1.7751, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 3.882541619470851e-06, |
|
"loss": 1.876, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 3.8776193246575425e-06, |
|
"loss": 1.9031, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 1.71875, |
|
"learning_rate": 3.872695017171786e-06, |
|
"loss": 1.8683, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 3.867768714171971e-06, |
|
"loss": 2.0642, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 3.862840432823439e-06, |
|
"loss": 1.9005, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 3.857910190298431e-06, |
|
"loss": 1.9538, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 3.852978003776015e-06, |
|
"loss": 1.7715, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 3.848043890442038e-06, |
|
"loss": 1.8265, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 3.843107867489056e-06, |
|
"loss": 1.9873, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 3.8381699521162834e-06, |
|
"loss": 2.0191, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 3.8332301615295265e-06, |
|
"loss": 1.8491, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 3.828288512941122e-06, |
|
"loss": 1.9185, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 1.703125, |
|
"learning_rate": 3.823345023569889e-06, |
|
"loss": 2.0706, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 3.818399710641055e-06, |
|
"loss": 2.1711, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 3.8134525913861997e-06, |
|
"loss": 1.8938, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 1.6171875, |
|
"learning_rate": 3.8085036830432033e-06, |
|
"loss": 1.8994, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 3.8035530028561736e-06, |
|
"loss": 1.7224, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 3.798600568075397e-06, |
|
"loss": 2.2053, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 3.7936463959572707e-06, |
|
"loss": 1.8359, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 3.7886905037642454e-06, |
|
"loss": 1.8931, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 1.5, |
|
"learning_rate": 3.783732908764769e-06, |
|
"loss": 1.7162, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 3.778773628233217e-06, |
|
"loss": 1.9625, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 3.7738126794498437e-06, |
|
"loss": 1.847, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 3.7688500797007126e-06, |
|
"loss": 1.6776, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 3.7638858462776402e-06, |
|
"loss": 1.8384, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 3.7589199964781372e-06, |
|
"loss": 1.9262, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"eval_loss": 1.9571281671524048, |
|
"eval_runtime": 72.4769, |
|
"eval_samples_per_second": 8.955, |
|
"eval_steps_per_second": 8.955, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 1.6171875, |
|
"learning_rate": 3.753952547605344e-06, |
|
"loss": 1.9915, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 3.748983516967977e-06, |
|
"loss": 2.0314, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 3.7440129218802585e-06, |
|
"loss": 1.9247, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 3.739040779661867e-06, |
|
"loss": 1.9936, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 3.7340671076378677e-06, |
|
"loss": 1.6597, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 3.72909192313866e-06, |
|
"loss": 1.6903, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 3.724115243499912e-06, |
|
"loss": 1.7956, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 3.7191370860624998e-06, |
|
"loss": 1.8839, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 3.7141574681724506e-06, |
|
"loss": 1.8973, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 3.7091764071808814e-06, |
|
"loss": 2.0264, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 3.7041939204439346e-06, |
|
"loss": 1.7518, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 3.6992100253227237e-06, |
|
"loss": 1.9336, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 3.6942247391832676e-06, |
|
"loss": 1.7735, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 3.689238079396432e-06, |
|
"loss": 1.891, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 3.6842500633378707e-06, |
|
"loss": 2.0223, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 3.6792607083879604e-06, |
|
"loss": 2.0054, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 1.375, |
|
"learning_rate": 3.6742700319317468e-06, |
|
"loss": 1.81, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 3.669278051358877e-06, |
|
"loss": 1.9736, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 3.664284784063544e-06, |
|
"loss": 1.9764, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 1.6484375, |
|
"learning_rate": 3.6592902474444248e-06, |
|
"loss": 2.1843, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 3.6542944589046175e-06, |
|
"loss": 1.8826, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 3.6492974358515837e-06, |
|
"loss": 1.8483, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 3.644299195697085e-06, |
|
"loss": 1.6964, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 1.625, |
|
"learning_rate": 3.639299755857125e-06, |
|
"loss": 2.0857, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 3.6342991337518898e-06, |
|
"loss": 1.8794, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 3.62929734680568e-06, |
|
"loss": 1.8154, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 3.6242944124468605e-06, |
|
"loss": 1.852, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 3.6192903481077883e-06, |
|
"loss": 2.0246, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 3.614285171224765e-06, |
|
"loss": 1.953, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 3.6092788992379605e-06, |
|
"loss": 1.7793, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 3.604271549591367e-06, |
|
"loss": 1.846, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 3.5992631397327303e-06, |
|
"loss": 1.84, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 3.5942536871134875e-06, |
|
"loss": 1.9115, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 3.589243209188713e-06, |
|
"loss": 1.7918, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 3.584231723417051e-06, |
|
"loss": 2.1052, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 3.5792192472606596e-06, |
|
"loss": 2.0174, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 3.5742057981851454e-06, |
|
"loss": 1.9988, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 3.5691913936595063e-06, |
|
"loss": 1.9073, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 3.564176051156072e-06, |
|
"loss": 1.9155, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 3.559159788150435e-06, |
|
"loss": 1.8487, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 3.554142622121399e-06, |
|
"loss": 2.015, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 3.5491245705509153e-06, |
|
"loss": 2.0941, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 3.5441056509240177e-06, |
|
"loss": 1.9948, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 3.5390858807287653e-06, |
|
"loss": 2.1411, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 1.6171875, |
|
"learning_rate": 3.534065277456182e-06, |
|
"loss": 2.1164, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 3.529043858600194e-06, |
|
"loss": 1.8789, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 3.5240216416575704e-06, |
|
"loss": 1.9746, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 3.518998644127859e-06, |
|
"loss": 1.7627, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 3.513974883513329e-06, |
|
"loss": 1.938, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 3.5089503773189084e-06, |
|
"loss": 1.9516, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 3.5039251430521237e-06, |
|
"loss": 1.8827, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 1.890625, |
|
"learning_rate": 3.4988991982230364e-06, |
|
"loss": 1.8046, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 3.4938725603441848e-06, |
|
"loss": 1.9488, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 3.4888452469305233e-06, |
|
"loss": 1.9693, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 3.4838172754993586e-06, |
|
"loss": 1.9955, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 1.640625, |
|
"learning_rate": 3.478788663570291e-06, |
|
"loss": 1.947, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 3.473759428665152e-06, |
|
"loss": 1.9457, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 3.4687295883079457e-06, |
|
"loss": 1.9994, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 1.640625, |
|
"learning_rate": 3.463699160024783e-06, |
|
"loss": 1.7545, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 3.4586681613438247e-06, |
|
"loss": 2.0359, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"eval_loss": 1.9557842016220093, |
|
"eval_runtime": 72.8827, |
|
"eval_samples_per_second": 8.905, |
|
"eval_steps_per_second": 8.905, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 3.4536366097952182e-06, |
|
"loss": 1.8549, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 3.448604522911041e-06, |
|
"loss": 1.8572, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 3.4435719182252302e-06, |
|
"loss": 1.8748, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 3.438538813273532e-06, |
|
"loss": 1.9317, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 3.4335052255934334e-06, |
|
"loss": 1.928, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 3.428471172724105e-06, |
|
"loss": 1.933, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 3.423436672206335e-06, |
|
"loss": 1.9946, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 3.418401741582474e-06, |
|
"loss": 1.9355, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 1.7734375, |
|
"learning_rate": 3.413366398396373e-06, |
|
"loss": 1.9021, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 3.4083306601933165e-06, |
|
"loss": 2.0507, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 3.403294544519968e-06, |
|
"loss": 1.9046, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 3.3982580689243053e-06, |
|
"loss": 1.8577, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 3.3932212509555613e-06, |
|
"loss": 1.9135, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 3.388184108164161e-06, |
|
"loss": 2.0145, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 3.383146658101661e-06, |
|
"loss": 2.0523, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 3.378108918320688e-06, |
|
"loss": 1.9216, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 3.373070906374882e-06, |
|
"loss": 1.78, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 3.368032639818825e-06, |
|
"loss": 1.8309, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 3.3629941362079933e-06, |
|
"loss": 1.861, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 1.375, |
|
"learning_rate": 3.3579554130986842e-06, |
|
"loss": 2.0568, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 1.5, |
|
"learning_rate": 3.35291648804796e-06, |
|
"loss": 1.8546, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 3.34787737861359e-06, |
|
"loss": 1.9779, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 3.3428381023539818e-06, |
|
"loss": 1.876, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 3.3377986768281275e-06, |
|
"loss": 1.8134, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 3.3327591195955384e-06, |
|
"loss": 1.999, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 3.327719448216183e-06, |
|
"loss": 2.1062, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 3.3226796802504303e-06, |
|
"loss": 2.1174, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 3.317639833258985e-06, |
|
"loss": 1.7734, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 3.312599924802826e-06, |
|
"loss": 1.8079, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 3.3075599724431464e-06, |
|
"loss": 1.7901, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 3.302519993741294e-06, |
|
"loss": 2.0501, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 3.2974800062587066e-06, |
|
"loss": 1.8301, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 3.2924400275568548e-06, |
|
"loss": 1.8754, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 3.287400075197175e-06, |
|
"loss": 1.767, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 1.703125, |
|
"learning_rate": 3.2823601667410156e-06, |
|
"loss": 2.1026, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 3.2773203197495704e-06, |
|
"loss": 1.929, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 3.272280551783818e-06, |
|
"loss": 1.9192, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 3.2672408804044636e-06, |
|
"loss": 1.8981, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 3.2622013231718724e-06, |
|
"loss": 1.9101, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 3.2571618976460186e-06, |
|
"loss": 2.024, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 1.640625, |
|
"learning_rate": 3.2521226213864105e-06, |
|
"loss": 1.8604, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 3.24708351195204e-06, |
|
"loss": 1.8735, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 3.2420445869013165e-06, |
|
"loss": 1.8987, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 3.2370058637920066e-06, |
|
"loss": 1.9249, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 3.2319673601811747e-06, |
|
"loss": 1.82, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 3.2269290936251187e-06, |
|
"loss": 1.9913, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 3.2218910816793125e-06, |
|
"loss": 1.9907, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 1.75, |
|
"learning_rate": 3.2168533418983404e-06, |
|
"loss": 2.0234, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 3.21181589183584e-06, |
|
"loss": 1.7599, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 3.2067787490444394e-06, |
|
"loss": 1.7259, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 3.2017419310756955e-06, |
|
"loss": 1.9728, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 1.5, |
|
"learning_rate": 3.196705455480033e-06, |
|
"loss": 1.83, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 3.1916693398066847e-06, |
|
"loss": 1.662, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 3.186633601603627e-06, |
|
"loss": 1.8832, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 3.181598258417526e-06, |
|
"loss": 2.0136, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 3.1765633277936655e-06, |
|
"loss": 1.8746, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 3.171528827275896e-06, |
|
"loss": 1.8762, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 3.1664947744065665e-06, |
|
"loss": 1.9944, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 3.1614611867264677e-06, |
|
"loss": 1.8449, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 1.5, |
|
"learning_rate": 3.1564280817747697e-06, |
|
"loss": 1.8945, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"eval_loss": 1.9549872875213623, |
|
"eval_runtime": 72.7206, |
|
"eval_samples_per_second": 8.925, |
|
"eval_steps_per_second": 8.925, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 3.1513954770889597e-06, |
|
"loss": 1.8645, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 3.146363390204782e-06, |
|
"loss": 1.8943, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 3.1413318386561765e-06, |
|
"loss": 2.1025, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 1.6171875, |
|
"learning_rate": 3.136300839975218e-06, |
|
"loss": 1.9225, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 3.1312704116920546e-06, |
|
"loss": 2.0847, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 3.126240571334848e-06, |
|
"loss": 1.7204, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 3.12121133642971e-06, |
|
"loss": 1.8552, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 3.116182724500642e-06, |
|
"loss": 1.8653, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 3.1111547530694767e-06, |
|
"loss": 1.9546, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 3.106127439655816e-06, |
|
"loss": 2.0012, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 3.1011008017769644e-06, |
|
"loss": 1.9694, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 3.096074856947877e-06, |
|
"loss": 2.0119, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 3.0910496226810924e-06, |
|
"loss": 1.7685, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 3.086025116486671e-06, |
|
"loss": 1.9522, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 1.5, |
|
"learning_rate": 3.081001355872141e-06, |
|
"loss": 2.0921, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 3.0759783583424303e-06, |
|
"loss": 1.9473, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 3.0709561413998067e-06, |
|
"loss": 1.868, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 3.065934722543819e-06, |
|
"loss": 2.0399, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 3.0609141192712355e-06, |
|
"loss": 2.0686, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 1.5, |
|
"learning_rate": 3.055894349075983e-06, |
|
"loss": 1.7349, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 3.0508754294490854e-06, |
|
"loss": 1.8013, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 3.045857377878601e-06, |
|
"loss": 1.7967, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 3.0408402118495656e-06, |
|
"loss": 1.8798, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 3.035823948843928e-06, |
|
"loss": 1.9516, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 3.0308086063404936e-06, |
|
"loss": 1.9727, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 3.0257942018148554e-06, |
|
"loss": 1.7369, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 3.020780752739341e-06, |
|
"loss": 1.9381, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 3.0157682765829498e-06, |
|
"loss": 1.8097, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 3.010756790811287e-06, |
|
"loss": 1.8961, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 3.0057463128865125e-06, |
|
"loss": 1.9253, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 3.0007368602672705e-06, |
|
"loss": 1.861, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 2.9957284504086337e-06, |
|
"loss": 2.093, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 2.9907211007620407e-06, |
|
"loss": 1.873, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 2.985714828775237e-06, |
|
"loss": 1.9115, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 2.9807096518922116e-06, |
|
"loss": 1.8313, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 2.975705587553141e-06, |
|
"loss": 1.8919, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 2.9707026531943204e-06, |
|
"loss": 1.8009, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 2.965700866248111e-06, |
|
"loss": 2.006, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 2.9607002441428745e-06, |
|
"loss": 1.9849, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 1.375, |
|
"learning_rate": 2.9557008043029154e-06, |
|
"loss": 1.8528, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 1.640625, |
|
"learning_rate": 2.950702564148417e-06, |
|
"loss": 2.0375, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 2.945705541095383e-06, |
|
"loss": 1.9146, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 2.9407097525555764e-06, |
|
"loss": 1.8071, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 2.9357152159364554e-06, |
|
"loss": 1.8935, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 2.9307219486411228e-06, |
|
"loss": 1.8324, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 2.925729968068254e-06, |
|
"loss": 1.9883, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 2.92073929161204e-06, |
|
"loss": 1.9158, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 1.7578125, |
|
"learning_rate": 2.915749936662131e-06, |
|
"loss": 1.9927, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 1.640625, |
|
"learning_rate": 2.9107619206035687e-06, |
|
"loss": 1.9653, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 2.905775260816733e-06, |
|
"loss": 1.9164, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 2.900789974677276e-06, |
|
"loss": 1.8986, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 2.895806079556066e-06, |
|
"loss": 2.0182, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 2.8908235928191198e-06, |
|
"loss": 1.7159, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 2.8858425318275493e-06, |
|
"loss": 2.0026, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 2.880862913937501e-06, |
|
"loss": 2.0248, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 2.8758847565000887e-06, |
|
"loss": 1.8751, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 2.8709080768613406e-06, |
|
"loss": 1.7352, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 2.8659328923621335e-06, |
|
"loss": 1.9528, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 2.860959220338134e-06, |
|
"loss": 1.8967, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 2.8559870781197414e-06, |
|
"loss": 1.9923, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"eval_loss": 1.953945279121399, |
|
"eval_runtime": 72.8216, |
|
"eval_samples_per_second": 8.912, |
|
"eval_steps_per_second": 8.912, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 2.851016483032024e-06, |
|
"loss": 1.8657, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 1.609375, |
|
"learning_rate": 2.846047452394656e-06, |
|
"loss": 1.8847, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 2.8410800035218635e-06, |
|
"loss": 2.02, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 2.83611415372236e-06, |
|
"loss": 1.8547, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 2.831149920299288e-06, |
|
"loss": 1.7854, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 2.826187320550157e-06, |
|
"loss": 1.7301, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 2.8212263717667836e-06, |
|
"loss": 1.8653, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 2.8162670912352323e-06, |
|
"loss": 1.7948, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 2.811309496235754e-06, |
|
"loss": 1.9075, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 2.80635360404273e-06, |
|
"loss": 1.8065, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 2.801399431924603e-06, |
|
"loss": 1.9205, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 2.796446997143827e-06, |
|
"loss": 1.9284, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 2.791496316956798e-06, |
|
"loss": 1.9412, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 2.7865474086138003e-06, |
|
"loss": 1.985, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 2.781600289358946e-06, |
|
"loss": 1.8522, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 2.776654976430111e-06, |
|
"loss": 1.8031, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 2.771711487058878e-06, |
|
"loss": 1.8725, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 2.766769838470476e-06, |
|
"loss": 2.0065, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 1.7109375, |
|
"learning_rate": 2.761830047883717e-06, |
|
"loss": 2.0794, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 2.7568921325109447e-06, |
|
"loss": 1.7627, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 2.7519561095579635e-06, |
|
"loss": 1.8511, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 2.7470219962239855e-06, |
|
"loss": 1.9931, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 2.742089809701571e-06, |
|
"loss": 1.8826, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 1.609375, |
|
"learning_rate": 2.737159567176561e-06, |
|
"loss": 1.9399, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 2.7322312858280303e-06, |
|
"loss": 1.7728, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 1.5, |
|
"learning_rate": 2.727304982828215e-06, |
|
"loss": 2.062, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 2.722380675342458e-06, |
|
"loss": 1.8854, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 2.71745838052915e-06, |
|
"loss": 2.0153, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 2.7125381155396677e-06, |
|
"loss": 1.7867, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 2.7076198975183187e-06, |
|
"loss": 1.6952, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 2.7027037436022736e-06, |
|
"loss": 1.8918, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 2.6977896709215137e-06, |
|
"loss": 1.8403, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 2.6928776965987655e-06, |
|
"loss": 1.8227, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 1.625, |
|
"learning_rate": 2.6879678377494463e-06, |
|
"loss": 1.8661, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 2.6830601114816014e-06, |
|
"loss": 1.905, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 1.375, |
|
"learning_rate": 2.6781545348958467e-06, |
|
"loss": 1.6843, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 2.6732511250853046e-06, |
|
"loss": 1.8012, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 1.375, |
|
"learning_rate": 2.6683498991355496e-06, |
|
"loss": 1.9774, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 2.6634508741245463e-06, |
|
"loss": 2.1853, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 2.658554067122592e-06, |
|
"loss": 1.9452, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 2.653659495192253e-06, |
|
"loss": 1.9929, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 2.6487671753883074e-06, |
|
"loss": 2.026, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 1.375, |
|
"learning_rate": 2.643877124757689e-06, |
|
"loss": 1.9421, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 2.63898936033942e-06, |
|
"loss": 2.0657, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 2.634103899164563e-06, |
|
"loss": 1.964, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 2.6292207582561494e-06, |
|
"loss": 1.938, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 2.6243399546291285e-06, |
|
"loss": 2.0169, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 2.619461505290305e-06, |
|
"loss": 1.913, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 2.6145854272382794e-06, |
|
"loss": 1.8626, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 2.6097117374633904e-06, |
|
"loss": 1.8187, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 2.604840452947656e-06, |
|
"loss": 2.019, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 2.5999715906647114e-06, |
|
"loss": 1.9029, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 2.5951051675797527e-06, |
|
"loss": 1.8707, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 1.5, |
|
"learning_rate": 2.5902412006494745e-06, |
|
"loss": 1.9438, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 2.5853797068220198e-06, |
|
"loss": 2.0, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 2.580520703036906e-06, |
|
"loss": 1.8732, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 2.575664206224979e-06, |
|
"loss": 1.9073, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 1.609375, |
|
"learning_rate": 2.5708102333083473e-06, |
|
"loss": 1.8259, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 2.5659588012003246e-06, |
|
"loss": 1.9296, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 2.561109926805377e-06, |
|
"loss": 1.9241, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"eval_loss": 1.953582763671875, |
|
"eval_runtime": 72.6019, |
|
"eval_samples_per_second": 8.939, |
|
"eval_steps_per_second": 8.939, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 2.5562636270190496e-06, |
|
"loss": 1.9805, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 2.551419918727923e-06, |
|
"loss": 1.9033, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 2.5465788188095457e-06, |
|
"loss": 1.8066, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 1.5, |
|
"learning_rate": 2.541740344132377e-06, |
|
"loss": 1.8673, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 2.5369045115557296e-06, |
|
"loss": 1.8727, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 2.5320713379297097e-06, |
|
"loss": 1.8473, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 2.52724084009516e-06, |
|
"loss": 1.802, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 2.522413034883596e-06, |
|
"loss": 2.0202, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 2.5175879391171534e-06, |
|
"loss": 1.8701, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 2.512765569608529e-06, |
|
"loss": 2.0256, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 2.507945943160916e-06, |
|
"loss": 1.8134, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 2.5031290765679528e-06, |
|
"loss": 1.8631, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 2.4983149866136587e-06, |
|
"loss": 1.8127, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 1.6875, |
|
"learning_rate": 2.4935036900723794e-06, |
|
"loss": 1.9904, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 2.488695203708729e-06, |
|
"loss": 1.7608, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 2.4838895442775264e-06, |
|
"loss": 1.8589, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 2.4790867285237423e-06, |
|
"loss": 2.0151, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 2.474286773182438e-06, |
|
"loss": 1.8685, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 2.4694896949787075e-06, |
|
"loss": 1.683, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 1.5, |
|
"learning_rate": 2.464695510627622e-06, |
|
"loss": 1.9681, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 2.4599042368341653e-06, |
|
"loss": 1.794, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 2.455115890293184e-06, |
|
"loss": 1.7639, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 2.450330487689321e-06, |
|
"loss": 1.8602, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 2.445548045696964e-06, |
|
"loss": 1.7026, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 1.375, |
|
"learning_rate": 2.440768580980183e-06, |
|
"loss": 1.8607, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 2.4359921101926755e-06, |
|
"loss": 1.8726, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 1.6953125, |
|
"learning_rate": 2.431218649977705e-06, |
|
"loss": 2.0619, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 2.426448216968046e-06, |
|
"loss": 1.7456, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 2.4216808277859242e-06, |
|
"loss": 1.8947, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 2.4169164990429614e-06, |
|
"loss": 1.8927, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 1.75, |
|
"learning_rate": 2.4121552473401137e-06, |
|
"loss": 1.8026, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 1.65625, |
|
"learning_rate": 2.407397089267614e-06, |
|
"loss": 1.9866, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 2.4026420414049195e-06, |
|
"loss": 1.8591, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 2.3978901203206457e-06, |
|
"loss": 2.0265, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 2.393141342572518e-06, |
|
"loss": 1.7797, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 1.859375, |
|
"learning_rate": 2.388395724707305e-06, |
|
"loss": 2.0544, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 2.383653283260766e-06, |
|
"loss": 1.8944, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 2.378914034757593e-06, |
|
"loss": 2.0089, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 2.3741779957113526e-06, |
|
"loss": 1.7302, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 2.369445182624428e-06, |
|
"loss": 1.9784, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 2.3647156119879606e-06, |
|
"loss": 2.0007, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 2.359989300281795e-06, |
|
"loss": 2.016, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 1.6640625, |
|
"learning_rate": 2.3552662639744206e-06, |
|
"loss": 1.9814, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 2.3505465195229108e-06, |
|
"loss": 1.7757, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 2.345830083372875e-06, |
|
"loss": 1.9739, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 1.734375, |
|
"learning_rate": 2.3411169719583887e-06, |
|
"loss": 1.9702, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 2.3364072017019467e-06, |
|
"loss": 1.7358, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 2.331700789014399e-06, |
|
"loss": 1.8713, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 2.326997750294898e-06, |
|
"loss": 1.9347, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 2.32229810193084e-06, |
|
"loss": 1.6961, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 2.317601860297807e-06, |
|
"loss": 1.7551, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 2.3129090417595107e-06, |
|
"loss": 1.998, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 2.308219662667736e-06, |
|
"loss": 1.8426, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 2.3035337393622816e-06, |
|
"loss": 1.9068, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 2.2988512881709063e-06, |
|
"loss": 1.9824, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 2.29417232540927e-06, |
|
"loss": 1.8839, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 2.289496867380878e-06, |
|
"loss": 1.7661, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 2.284824930377023e-06, |
|
"loss": 1.9512, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 2.280156530676727e-06, |
|
"loss": 1.9426, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 1.8203125, |
|
"learning_rate": 2.275491684546692e-06, |
|
"loss": 2.1988, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"eval_loss": 1.9526869058609009, |
|
"eval_runtime": 72.6995, |
|
"eval_samples_per_second": 8.927, |
|
"eval_steps_per_second": 8.927, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 2.270830408241233e-06, |
|
"loss": 1.7793, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 2.266172718002227e-06, |
|
"loss": 1.838, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 2.2615186300590567e-06, |
|
"loss": 1.6998, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 2.2568681606285527e-06, |
|
"loss": 1.8002, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 2.252221325914937e-06, |
|
"loss": 2.073, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 2.2475781421097657e-06, |
|
"loss": 1.887, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 2.242938625391876e-06, |
|
"loss": 1.7672, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 2.238302791927324e-06, |
|
"loss": 1.8141, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 2.2336706578693345e-06, |
|
"loss": 1.8366, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 1.6953125, |
|
"learning_rate": 2.2290422393582413e-06, |
|
"loss": 1.9289, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 2.2244175525214313e-06, |
|
"loss": 2.059, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 2.219796613473289e-06, |
|
"loss": 1.8753, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 2.215179438315139e-06, |
|
"loss": 1.774, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 2.210566043135192e-06, |
|
"loss": 1.8905, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 2.2059564440084882e-06, |
|
"loss": 1.6761, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 2.201350656996839e-06, |
|
"loss": 1.8904, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 2.1967486981487753e-06, |
|
"loss": 1.8736, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 2.1921505834994855e-06, |
|
"loss": 1.8248, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 1.875, |
|
"learning_rate": 2.187556329070765e-06, |
|
"loss": 1.9285, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 2.182965950870959e-06, |
|
"loss": 1.9256, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 2.178379464894907e-06, |
|
"loss": 1.9867, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 2.173796887123885e-06, |
|
"loss": 1.7247, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 2.1692182335255495e-06, |
|
"loss": 1.9383, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 2.1646435200538856e-06, |
|
"loss": 1.7344, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 1.375, |
|
"learning_rate": 2.160072762649151e-06, |
|
"loss": 2.015, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 1.671875, |
|
"learning_rate": 2.155505977237816e-06, |
|
"loss": 2.136, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 2.150943179732511e-06, |
|
"loss": 1.8755, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 2.1463843860319723e-06, |
|
"loss": 1.8817, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 2.1418296120209825e-06, |
|
"loss": 1.8505, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 2.1372788735703227e-06, |
|
"loss": 1.814, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 1.7109375, |
|
"learning_rate": 2.132732186536708e-06, |
|
"loss": 1.9639, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 1.375, |
|
"learning_rate": 2.1281895667627374e-06, |
|
"loss": 1.7337, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 2.1236510300768383e-06, |
|
"loss": 1.9272, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 1.6953125, |
|
"learning_rate": 2.1191165922932134e-06, |
|
"loss": 1.9235, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 1.375, |
|
"learning_rate": 2.11458626921178e-06, |
|
"loss": 2.0302, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 1.5, |
|
"learning_rate": 2.110060076618117e-06, |
|
"loss": 2.0152, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 2.105538030283414e-06, |
|
"loss": 1.9395, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 1.6875, |
|
"learning_rate": 2.1010201459644097e-06, |
|
"loss": 1.9026, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 1.6875, |
|
"learning_rate": 2.096506439403347e-06, |
|
"loss": 2.0946, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 1.671875, |
|
"learning_rate": 2.091996926327905e-06, |
|
"loss": 1.8123, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 2.087491622451154e-06, |
|
"loss": 2.0841, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 2.082990543471497e-06, |
|
"loss": 1.8813, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 2.0784937050726136e-06, |
|
"loss": 1.8978, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 2.0740011229234137e-06, |
|
"loss": 1.7163, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 1.640625, |
|
"learning_rate": 2.0695128126779696e-06, |
|
"loss": 1.9937, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 1.6875, |
|
"learning_rate": 2.0650287899754726e-06, |
|
"loss": 2.1517, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 2.060549070440172e-06, |
|
"loss": 1.7763, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 2.056073669681324e-06, |
|
"loss": 1.9543, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 1.78125, |
|
"learning_rate": 2.051602603293139e-06, |
|
"loss": 1.8608, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 2.0471358868547196e-06, |
|
"loss": 1.9044, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 2.0426735359300136e-06, |
|
"loss": 1.861, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 2.038215566067759e-06, |
|
"loss": 1.9506, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 2.033761992801425e-06, |
|
"loss": 1.9469, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 2.029312831649165e-06, |
|
"loss": 1.8447, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 2.0248680981137574e-06, |
|
"loss": 1.9012, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 1.6640625, |
|
"learning_rate": 2.0204278076825505e-06, |
|
"loss": 1.9715, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 2.0159919758274136e-06, |
|
"loss": 2.2625, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 2.011560618004679e-06, |
|
"loss": 1.9621, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 2.0071337496550908e-06, |
|
"loss": 2.0286, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 2.00271138620375e-06, |
|
"loss": 1.7899, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"eval_loss": 1.952858805656433, |
|
"eval_runtime": 72.6356, |
|
"eval_samples_per_second": 8.935, |
|
"eval_steps_per_second": 8.935, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 1.65625, |
|
"learning_rate": 1.998293543060058e-06, |
|
"loss": 1.9355, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 1.9938802356176688e-06, |
|
"loss": 1.8697, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 1.989471479254429e-06, |
|
"loss": 1.7158, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 1.985067289332332e-06, |
|
"loss": 1.8872, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 1.9806676811974535e-06, |
|
"loss": 2.0431, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 1.976272670179909e-06, |
|
"loss": 1.8193, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.9718822715937937e-06, |
|
"loss": 1.9256, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 1.9674965007371302e-06, |
|
"loss": 1.8832, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 1.96311537289182e-06, |
|
"loss": 1.8657, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 1.958738903323583e-06, |
|
"loss": 1.9369, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 1.954367107281908e-06, |
|
"loss": 2.0463, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 1.7578125, |
|
"learning_rate": 1.950000000000001e-06, |
|
"loss": 1.9903, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 1.9456375966947263e-06, |
|
"loss": 1.8925, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 1.9412799125665633e-06, |
|
"loss": 1.7835, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 1.936926962799544e-06, |
|
"loss": 1.9661, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 1.9325787625612044e-06, |
|
"loss": 1.5986, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 1.9282353270025313e-06, |
|
"loss": 1.8952, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.92389667125791e-06, |
|
"loss": 1.9931, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 1.9195628104450694e-06, |
|
"loss": 1.9302, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 1.9152337596650327e-06, |
|
"loss": 1.7964, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 1.910909534002061e-06, |
|
"loss": 1.8921, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 1.9065901485236046e-06, |
|
"loss": 1.8691, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 1.9022756182802456e-06, |
|
"loss": 1.7877, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 1.5, |
|
"learning_rate": 1.8979659583056525e-06, |
|
"loss": 1.9212, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 1.8936611836165204e-06, |
|
"loss": 1.7434, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 1.25, |
|
"learning_rate": 1.8893613092125233e-06, |
|
"loss": 1.6815, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 1.8850663500762597e-06, |
|
"loss": 1.7606, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 1.8807763211732013e-06, |
|
"loss": 1.9047, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 1.7578125, |
|
"learning_rate": 1.8764912374516437e-06, |
|
"loss": 1.8633, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 1.8722111138426466e-06, |
|
"loss": 1.9066, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 1.8679359652599896e-06, |
|
"loss": 1.8429, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 1.8636658066001164e-06, |
|
"loss": 1.9299, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 1.8594006527420838e-06, |
|
"loss": 1.8668, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 1.6875, |
|
"learning_rate": 1.8551405185475094e-06, |
|
"loss": 1.778, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 1.8508854188605209e-06, |
|
"loss": 1.8144, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 1.846635368507702e-06, |
|
"loss": 1.7307, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 1.8423903822980448e-06, |
|
"loss": 1.9232, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 1.8381504750228928e-06, |
|
"loss": 1.993, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 1.8339156614558957e-06, |
|
"loss": 1.8617, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 1.8296859563529524e-06, |
|
"loss": 1.7955, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 1.825461374452162e-06, |
|
"loss": 1.8889, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 1.8212419304737717e-06, |
|
"loss": 1.8679, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 1.8170276391201263e-06, |
|
"loss": 2.0214, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 1.8128185150756188e-06, |
|
"loss": 2.0213, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 1.8086145730066346e-06, |
|
"loss": 1.7442, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 1.65625, |
|
"learning_rate": 1.8044158275615025e-06, |
|
"loss": 1.9483, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 1.800222293370446e-06, |
|
"loss": 1.8626, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 1.7960339850455288e-06, |
|
"loss": 1.8822, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 1.7918509171806064e-06, |
|
"loss": 1.6832, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 1.7876731043512742e-06, |
|
"loss": 1.9366, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 1.640625, |
|
"learning_rate": 1.7835005611148163e-06, |
|
"loss": 1.7482, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 1.7793333020101554e-06, |
|
"loss": 1.926, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 1.7751713415578018e-06, |
|
"loss": 1.6752, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 1.7710146942598051e-06, |
|
"loss": 1.7899, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 1.7668633745996988e-06, |
|
"loss": 1.8611, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 1.7627173970424541e-06, |
|
"loss": 1.9212, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 1.7585767760344273e-06, |
|
"loss": 1.803, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 1.75444152600331e-06, |
|
"loss": 1.9312, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 1.750311661358081e-06, |
|
"loss": 1.8898, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 1.5, |
|
"learning_rate": 1.7461871964889528e-06, |
|
"loss": 1.8785, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"eval_loss": 1.9536255598068237, |
|
"eval_runtime": 72.8027, |
|
"eval_samples_per_second": 8.915, |
|
"eval_steps_per_second": 8.915, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 1.7420681457673218e-06, |
|
"loss": 1.8501, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 1.6875, |
|
"learning_rate": 1.73795452354572e-06, |
|
"loss": 2.0064, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 1.7338463441577654e-06, |
|
"loss": 1.9383, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.7297436219181084e-06, |
|
"loss": 1.8458, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 1.7256463711223872e-06, |
|
"loss": 1.935, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 1.7215546060471724e-06, |
|
"loss": 1.755, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 1.7174683409499212e-06, |
|
"loss": 1.8595, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 1.7133875900689265e-06, |
|
"loss": 1.8653, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 1.7093123676232692e-06, |
|
"loss": 1.7989, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 1.375, |
|
"learning_rate": 1.7052426878127634e-06, |
|
"loss": 1.8884, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 1.5, |
|
"learning_rate": 1.7011785648179122e-06, |
|
"loss": 1.8548, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 1.6971200127998564e-06, |
|
"loss": 1.8745, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 1.693067045900323e-06, |
|
"loss": 1.7291, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.6890196782415828e-06, |
|
"loss": 1.9105, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 1.684977923926392e-06, |
|
"loss": 1.9538, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 1.6809417970379485e-06, |
|
"loss": 1.8723, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 1.676911311639843e-06, |
|
"loss": 1.9465, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 1.9765625, |
|
"learning_rate": 1.672886481776008e-06, |
|
"loss": 1.8716, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 1.6688673214706703e-06, |
|
"loss": 1.8797, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 1.6648538447283005e-06, |
|
"loss": 1.8554, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 1.6608460655335666e-06, |
|
"loss": 2.0352, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 1.6568439978512823e-06, |
|
"loss": 1.9972, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 1.6528476556263606e-06, |
|
"loss": 1.8229, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 1.6488570527837662e-06, |
|
"loss": 1.7553, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 1.375, |
|
"learning_rate": 1.6448722032284636e-06, |
|
"loss": 1.9446, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 1.6408931208453699e-06, |
|
"loss": 1.9821, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 1.6369198194993087e-06, |
|
"loss": 1.8894, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 1.6329523130349573e-06, |
|
"loss": 1.66, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 1.6289906152768046e-06, |
|
"loss": 1.8564, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 1.6250347400290976e-06, |
|
"loss": 1.9308, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 1.6210847010757952e-06, |
|
"loss": 1.9478, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 1.61714051218052e-06, |
|
"loss": 1.9453, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 1.71875, |
|
"learning_rate": 1.6132021870865111e-06, |
|
"loss": 1.977, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 1.609269739516575e-06, |
|
"loss": 1.7592, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 1.6053431831730393e-06, |
|
"loss": 1.9182, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 1.6014225317377031e-06, |
|
"loss": 1.8977, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.5975077988717903e-06, |
|
"loss": 1.6754, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 1.5935989982159013e-06, |
|
"loss": 2.006, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 1.5896961433899705e-06, |
|
"loss": 2.0322, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 1.5857992479932086e-06, |
|
"loss": 1.8969, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 1.5819083256040646e-06, |
|
"loss": 1.7744, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 1.5780233897801735e-06, |
|
"loss": 1.7507, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 1.5741444540583106e-06, |
|
"loss": 1.7593, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 1.6796875, |
|
"learning_rate": 1.5702715319543467e-06, |
|
"loss": 2.0022, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 1.566404636963195e-06, |
|
"loss": 2.1224, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 1.5625437825587703e-06, |
|
"loss": 1.9616, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 1.375, |
|
"learning_rate": 1.5586889821939378e-06, |
|
"loss": 1.9013, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 1.5548402493004688e-06, |
|
"loss": 1.7741, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 1.625, |
|
"learning_rate": 1.5509975972889925e-06, |
|
"loss": 1.8791, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 1.5471610395489502e-06, |
|
"loss": 1.8647, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 1.5433305894485472e-06, |
|
"loss": 1.8453, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 1.539506260334708e-06, |
|
"loss": 1.8188, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 1.6953125, |
|
"learning_rate": 1.535688065533028e-06, |
|
"loss": 1.9197, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 1.5318760183477305e-06, |
|
"loss": 1.9553, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 1.528070132061615e-06, |
|
"loss": 1.9533, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 1.375, |
|
"learning_rate": 1.5242704199360158e-06, |
|
"loss": 1.7673, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 1.520476895210752e-06, |
|
"loss": 2.0074, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 1.5166895711040836e-06, |
|
"loss": 1.9309, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 1.512908460812667e-06, |
|
"loss": 1.8433, |
|
"step": 1319 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 1.5091335775115052e-06, |
|
"loss": 1.7208, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"eval_loss": 1.9534618854522705, |
|
"eval_runtime": 72.4867, |
|
"eval_samples_per_second": 8.953, |
|
"eval_steps_per_second": 8.953, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 1.5053649343539036e-06, |
|
"loss": 1.9372, |
|
"step": 1321 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 1.5016025444714247e-06, |
|
"loss": 1.948, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 1.497846420973842e-06, |
|
"loss": 1.9946, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 1.4940965769490942e-06, |
|
"loss": 2.0408, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 1.4903530254632393e-06, |
|
"loss": 1.8153, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 1.625, |
|
"learning_rate": 1.4866157795604095e-06, |
|
"loss": 1.8674, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.4828848522627667e-06, |
|
"loss": 1.8996, |
|
"step": 1327 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 1.4791602565704536e-06, |
|
"loss": 1.9489, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 1.4754420054615543e-06, |
|
"loss": 2.0395, |
|
"step": 1329 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 1.4717301118920437e-06, |
|
"loss": 1.8873, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 1.4680245887957436e-06, |
|
"loss": 1.7733, |
|
"step": 1331 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 1.4643254490842798e-06, |
|
"loss": 1.826, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.4606327056470336e-06, |
|
"loss": 2.0092, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 1.456946371351103e-06, |
|
"loss": 1.9064, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 1.4532664590412498e-06, |
|
"loss": 2.0053, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 1.4495929815398604e-06, |
|
"loss": 1.8985, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 1.4459259516468998e-06, |
|
"loss": 1.666, |
|
"step": 1337 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 1.442265382139866e-06, |
|
"loss": 1.8388, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 1.4386112857737473e-06, |
|
"loss": 1.9407, |
|
"step": 1339 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 1.4349636752809757e-06, |
|
"loss": 1.8795, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 1.4313225633713843e-06, |
|
"loss": 1.9593, |
|
"step": 1341 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 1.4276879627321624e-06, |
|
"loss": 1.7267, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 1.42405988602781e-06, |
|
"loss": 1.9268, |
|
"step": 1343 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 1.4204383459000984e-06, |
|
"loss": 1.8601, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 1.4168233549680186e-06, |
|
"loss": 1.9448, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 1.4132149258277436e-06, |
|
"loss": 1.9202, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 1.4096130710525814e-06, |
|
"loss": 1.9888, |
|
"step": 1347 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 1.4060178031929324e-06, |
|
"loss": 1.9818, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 1.4024291347762465e-06, |
|
"loss": 1.7319, |
|
"step": 1349 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 1.3988470783069768e-06, |
|
"loss": 1.8546, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 1.3952716462665378e-06, |
|
"loss": 1.7922, |
|
"step": 1351 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 1.3917028511132623e-06, |
|
"loss": 1.7291, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 1.3881407052823564e-06, |
|
"loss": 1.894, |
|
"step": 1353 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 1.3845852211858582e-06, |
|
"loss": 1.9447, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 1.381036411212593e-06, |
|
"loss": 1.8265, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 1.3774942877281303e-06, |
|
"loss": 1.837, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 1.3739588630747414e-06, |
|
"loss": 1.8976, |
|
"step": 1357 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 1.3704301495713553e-06, |
|
"loss": 1.9525, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.3669081595135182e-06, |
|
"loss": 1.8708, |
|
"step": 1359 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 1.3633929051733473e-06, |
|
"loss": 1.9955, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 1.3598843987994905e-06, |
|
"loss": 1.8324, |
|
"step": 1361 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 1.7578125, |
|
"learning_rate": 1.3563826526170821e-06, |
|
"loss": 1.9427, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 1.640625, |
|
"learning_rate": 1.352887678827701e-06, |
|
"loss": 2.0053, |
|
"step": 1363 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 1.3493994896093306e-06, |
|
"loss": 1.9065, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 1.3459180971163105e-06, |
|
"loss": 1.9167, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 1.3424435134793002e-06, |
|
"loss": 1.772, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 1.7109375, |
|
"learning_rate": 1.338975750805232e-06, |
|
"loss": 2.0155, |
|
"step": 1367 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.3355148211772732e-06, |
|
"loss": 1.9183, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 1.3320607366547808e-06, |
|
"loss": 1.8341, |
|
"step": 1369 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 2.015625, |
|
"learning_rate": 1.3286135092732599e-06, |
|
"loss": 1.9931, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 1.3251731510443243e-06, |
|
"loss": 2.027, |
|
"step": 1371 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 1.8359375, |
|
"learning_rate": 1.3217396739556507e-06, |
|
"loss": 1.8582, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 1.3183130899709393e-06, |
|
"loss": 1.8663, |
|
"step": 1373 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 1.6171875, |
|
"learning_rate": 1.3148934110298738e-06, |
|
"loss": 1.8957, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 1.311480649048076e-06, |
|
"loss": 1.7826, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 1.6171875, |
|
"learning_rate": 1.308074815917066e-06, |
|
"loss": 1.7203, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 1.5, |
|
"learning_rate": 1.304675923504221e-06, |
|
"loss": 1.9513, |
|
"step": 1377 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 1.301283983652733e-06, |
|
"loss": 2.0152, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 1.2978990081815716e-06, |
|
"loss": 1.9463, |
|
"step": 1379 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 1.294521008885436e-06, |
|
"loss": 1.9643, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"eval_loss": 1.953567385673523, |
|
"eval_runtime": 72.5011, |
|
"eval_samples_per_second": 8.952, |
|
"eval_steps_per_second": 8.952, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 1.2911499975347184e-06, |
|
"loss": 2.1475, |
|
"step": 1381 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 1.2877859858754626e-06, |
|
"loss": 1.8881, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.284428985629322e-06, |
|
"loss": 1.7487, |
|
"step": 1383 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 1.281079008493519e-06, |
|
"loss": 1.891, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 1.2777360661408053e-06, |
|
"loss": 1.8939, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 1.2744001702194199e-06, |
|
"loss": 2.0425, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 1.625, |
|
"learning_rate": 1.2710713323530486e-06, |
|
"loss": 2.2004, |
|
"step": 1387 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 1.2677495641407838e-06, |
|
"loss": 1.6771, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.264434877157087e-06, |
|
"loss": 1.9232, |
|
"step": 1389 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.2611272829517423e-06, |
|
"loss": 2.0209, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 1.5, |
|
"learning_rate": 1.2578267930498205e-06, |
|
"loss": 2.027, |
|
"step": 1391 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 1.254533418951638e-06, |
|
"loss": 1.9216, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 1.2512471721327165e-06, |
|
"loss": 1.8335, |
|
"step": 1393 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.2479680640437454e-06, |
|
"loss": 1.9975, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 1.2446961061105367e-06, |
|
"loss": 1.7855, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 1.2414313097339893e-06, |
|
"loss": 1.8574, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 1.2381736862900485e-06, |
|
"loss": 2.0234, |
|
"step": 1397 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 1.2349232471296659e-06, |
|
"loss": 1.8838, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.2316800035787598e-06, |
|
"loss": 1.9116, |
|
"step": 1399 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 1.9765625, |
|
"learning_rate": 1.2284439669381758e-06, |
|
"loss": 2.0191, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 1.2252151484836484e-06, |
|
"loss": 1.9649, |
|
"step": 1401 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 1.22199355946576e-06, |
|
"loss": 1.8811, |
|
"step": 1402 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 1.6875, |
|
"learning_rate": 1.2187792111099016e-06, |
|
"loss": 1.9588, |
|
"step": 1403 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 1.2155721146162376e-06, |
|
"loss": 1.8881, |
|
"step": 1404 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 1.212372281159662e-06, |
|
"loss": 1.7173, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 1.6171875, |
|
"learning_rate": 1.20917972188976e-06, |
|
"loss": 1.8664, |
|
"step": 1406 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 1.205994447930773e-06, |
|
"loss": 1.9208, |
|
"step": 1407 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 1.2028164703815544e-06, |
|
"loss": 1.9237, |
|
"step": 1408 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 1.1996458003155372e-06, |
|
"loss": 1.8487, |
|
"step": 1409 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 1.625, |
|
"learning_rate": 1.19648244878069e-06, |
|
"loss": 1.9926, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 1.1933264267994795e-06, |
|
"loss": 1.8457, |
|
"step": 1411 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 1.1901777453688357e-06, |
|
"loss": 1.9027, |
|
"step": 1412 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 1.5, |
|
"learning_rate": 1.1870364154601086e-06, |
|
"loss": 2.1226, |
|
"step": 1413 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 1.183902448019034e-06, |
|
"loss": 1.9132, |
|
"step": 1414 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 1.375, |
|
"learning_rate": 1.180775853965693e-06, |
|
"loss": 1.7712, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 1.1776566441944746e-06, |
|
"loss": 1.8856, |
|
"step": 1416 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 1.6171875, |
|
"learning_rate": 1.1745448295740368e-06, |
|
"loss": 2.0439, |
|
"step": 1417 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 1.1714404209472736e-06, |
|
"loss": 1.8875, |
|
"step": 1418 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 1.375, |
|
"learning_rate": 1.1683434291312685e-06, |
|
"loss": 1.7842, |
|
"step": 1419 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 1.165253864917265e-06, |
|
"loss": 1.8657, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 1.1621717390706236e-06, |
|
"loss": 2.0253, |
|
"step": 1421 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 1.159097062330787e-06, |
|
"loss": 1.8576, |
|
"step": 1422 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 1.1560298454112436e-06, |
|
"loss": 1.9776, |
|
"step": 1423 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 1.1529700989994868e-06, |
|
"loss": 1.9832, |
|
"step": 1424 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.1499178337569794e-06, |
|
"loss": 2.0509, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 1.146873060319118e-06, |
|
"loss": 1.9491, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 1.1438357892951932e-06, |
|
"loss": 1.9559, |
|
"step": 1427 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 1.140806031268355e-06, |
|
"loss": 1.8652, |
|
"step": 1428 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 1.137783796795574e-06, |
|
"loss": 1.7705, |
|
"step": 1429 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 1.1347690964076065e-06, |
|
"loss": 1.8483, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 1.131761940608956e-06, |
|
"loss": 1.9094, |
|
"step": 1431 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 1.1287623398778372e-06, |
|
"loss": 1.8403, |
|
"step": 1432 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 1.1257703046661418e-06, |
|
"loss": 1.778, |
|
"step": 1433 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 1.1227858453993977e-06, |
|
"loss": 2.0312, |
|
"step": 1434 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 1.1198089724767356e-06, |
|
"loss": 2.0088, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 1.6171875, |
|
"learning_rate": 1.1168396962708524e-06, |
|
"loss": 1.8605, |
|
"step": 1436 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 1.113878027127974e-06, |
|
"loss": 1.9716, |
|
"step": 1437 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 1.640625, |
|
"learning_rate": 1.1109239753678222e-06, |
|
"loss": 1.9367, |
|
"step": 1438 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 1.5, |
|
"learning_rate": 1.1079775512835748e-06, |
|
"loss": 1.9521, |
|
"step": 1439 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 1.1050387651418307e-06, |
|
"loss": 1.9099, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"eval_loss": 1.953331470489502, |
|
"eval_runtime": 72.686, |
|
"eval_samples_per_second": 8.929, |
|
"eval_steps_per_second": 8.929, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 1.1021076271825771e-06, |
|
"loss": 1.7965, |
|
"step": 1441 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 1.0991841476191504e-06, |
|
"loss": 1.8824, |
|
"step": 1442 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 1.0962683366382017e-06, |
|
"loss": 1.8532, |
|
"step": 1443 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 1.0933602043996624e-06, |
|
"loss": 1.9575, |
|
"step": 1444 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 1.090459761036707e-06, |
|
"loss": 1.717, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 1.08756701665572e-06, |
|
"loss": 1.8202, |
|
"step": 1446 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.0846819813362563e-06, |
|
"loss": 2.0111, |
|
"step": 1447 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 1.081804665131014e-06, |
|
"loss": 1.9468, |
|
"step": 1448 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 1.0789350780657898e-06, |
|
"loss": 2.1138, |
|
"step": 1449 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 1.0760732301394517e-06, |
|
"loss": 1.903, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 1.0732191313239e-06, |
|
"loss": 1.7584, |
|
"step": 1451 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 1.375, |
|
"learning_rate": 1.0703727915640331e-06, |
|
"loss": 1.8592, |
|
"step": 1452 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 1.0675342207777166e-06, |
|
"loss": 1.8527, |
|
"step": 1453 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.0647034288557416e-06, |
|
"loss": 1.8394, |
|
"step": 1454 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 1.0618804256617976e-06, |
|
"loss": 2.0342, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 1.059065221032433e-06, |
|
"loss": 1.8342, |
|
"step": 1456 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 1.0562578247770232e-06, |
|
"loss": 1.7947, |
|
"step": 1457 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 1.0534582466777362e-06, |
|
"loss": 1.8489, |
|
"step": 1458 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 1.0506664964894971e-06, |
|
"loss": 1.9293, |
|
"step": 1459 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 1.0478825839399567e-06, |
|
"loss": 1.7394, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 1.0451065187294553e-06, |
|
"loss": 1.6778, |
|
"step": 1461 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 1.042338310530989e-06, |
|
"loss": 1.7995, |
|
"step": 1462 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 1.0395779689901793e-06, |
|
"loss": 1.9903, |
|
"step": 1463 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 1.765625, |
|
"learning_rate": 1.036825503725234e-06, |
|
"loss": 1.7324, |
|
"step": 1464 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 1.0340809243269183e-06, |
|
"loss": 1.8404, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 1.0313442403585187e-06, |
|
"loss": 1.8433, |
|
"step": 1466 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 1.0286154613558106e-06, |
|
"loss": 1.8111, |
|
"step": 1467 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 1.025894596827027e-06, |
|
"loss": 1.9719, |
|
"step": 1468 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 1.023181656252821e-06, |
|
"loss": 1.8663, |
|
"step": 1469 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 1.020476649086237e-06, |
|
"loss": 1.6536, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 1.0177795847526752e-06, |
|
"loss": 1.7711, |
|
"step": 1471 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 1.0150904726498593e-06, |
|
"loss": 2.0681, |
|
"step": 1472 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 1.0124093221478047e-06, |
|
"loss": 1.9075, |
|
"step": 1473 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 1.0097361425887852e-06, |
|
"loss": 1.9701, |
|
"step": 1474 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 1.0070709432873005e-06, |
|
"loss": 1.7582, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 1.0044137335300421e-06, |
|
"loss": 2.0247, |
|
"step": 1476 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 1.5, |
|
"learning_rate": 1.0017645225758648e-06, |
|
"loss": 1.9363, |
|
"step": 1477 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 9.99123319655751e-07, |
|
"loss": 2.0171, |
|
"step": 1478 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 1.9765625, |
|
"learning_rate": 9.9649013397278e-07, |
|
"loss": 1.8441, |
|
"step": 1479 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 9.938649747020956e-07, |
|
"loss": 1.8559, |
|
"step": 1480 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1773, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 148, |
|
"total_flos": 8.73512712108245e+18, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|