|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 48, |
|
"global_step": 190, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 51.5, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 5.2368, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"eval_loss": 4.782614707946777, |
|
"eval_runtime": 50.8138, |
|
"eval_samples_per_second": 20.742, |
|
"eval_steps_per_second": 20.742, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 48.25, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 4.8965, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 49.25, |
|
"learning_rate": 6e-06, |
|
"loss": 5.0969, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 51.5, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 5.0122, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 48.75, |
|
"learning_rate": 1e-05, |
|
"loss": 5.2505, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 45.25, |
|
"learning_rate": 1.2e-05, |
|
"loss": 4.8966, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 37.5, |
|
"learning_rate": 1.4000000000000001e-05, |
|
"loss": 4.4979, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 30.125, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 4.2038, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 29.625, |
|
"learning_rate": 1.8e-05, |
|
"loss": 4.3498, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 27.5, |
|
"learning_rate": 2e-05, |
|
"loss": 4.0182, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 29.875, |
|
"learning_rate": 2.2000000000000003e-05, |
|
"loss": 3.6018, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 27.375, |
|
"learning_rate": 2.4e-05, |
|
"loss": 3.4676, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 33.5, |
|
"learning_rate": 2.6000000000000002e-05, |
|
"loss": 3.1663, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 20.0, |
|
"learning_rate": 2.8000000000000003e-05, |
|
"loss": 3.0102, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 15.5, |
|
"learning_rate": 3e-05, |
|
"loss": 2.9281, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 17.75, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 2.7986, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 11.0, |
|
"learning_rate": 3.4000000000000007e-05, |
|
"loss": 2.7211, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 15.5, |
|
"learning_rate": 3.6e-05, |
|
"loss": 2.5806, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 17.375, |
|
"learning_rate": 3.8e-05, |
|
"loss": 2.4546, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 10.0625, |
|
"learning_rate": 4e-05, |
|
"loss": 2.6552, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 11.0625, |
|
"learning_rate": 4.2e-05, |
|
"loss": 2.4723, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 10.375, |
|
"learning_rate": 4.4000000000000006e-05, |
|
"loss": 2.5615, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 12.5, |
|
"learning_rate": 4.600000000000001e-05, |
|
"loss": 2.4721, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 8.75, |
|
"learning_rate": 4.8e-05, |
|
"loss": 2.3344, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 14.25, |
|
"learning_rate": 5e-05, |
|
"loss": 2.4028, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 9.5625, |
|
"learning_rate": 5.2000000000000004e-05, |
|
"loss": 2.3864, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 11.75, |
|
"learning_rate": 5.4000000000000005e-05, |
|
"loss": 2.3027, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 11.5625, |
|
"learning_rate": 5.6000000000000006e-05, |
|
"loss": 2.4142, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 8.5625, |
|
"learning_rate": 5.8e-05, |
|
"loss": 2.3577, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 10.8125, |
|
"learning_rate": 6e-05, |
|
"loss": 2.2604, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 12.0625, |
|
"learning_rate": 6.2e-05, |
|
"loss": 2.1379, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 12.875, |
|
"learning_rate": 6.400000000000001e-05, |
|
"loss": 2.2754, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 14.1875, |
|
"learning_rate": 6.6e-05, |
|
"loss": 2.1848, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 12.5625, |
|
"learning_rate": 6.800000000000001e-05, |
|
"loss": 2.2751, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 10.125, |
|
"learning_rate": 7e-05, |
|
"loss": 2.2342, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 10.3125, |
|
"learning_rate": 7.2e-05, |
|
"loss": 2.1432, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 15.5, |
|
"learning_rate": 7.4e-05, |
|
"loss": 2.4477, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 13.0625, |
|
"learning_rate": 7.6e-05, |
|
"loss": 2.4045, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 9.125, |
|
"learning_rate": 7.800000000000001e-05, |
|
"loss": 2.4156, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 10.5, |
|
"learning_rate": 8e-05, |
|
"loss": 2.2679, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 11.75, |
|
"learning_rate": 8.2e-05, |
|
"loss": 2.3045, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 13.5, |
|
"learning_rate": 8.4e-05, |
|
"loss": 2.0572, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 13.75, |
|
"learning_rate": 8.6e-05, |
|
"loss": 2.1523, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 11.875, |
|
"learning_rate": 8.800000000000001e-05, |
|
"loss": 2.1531, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 11.6875, |
|
"learning_rate": 9e-05, |
|
"loss": 1.9768, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 8.125, |
|
"learning_rate": 9.200000000000001e-05, |
|
"loss": 2.4555, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 6.625, |
|
"learning_rate": 9.4e-05, |
|
"loss": 2.3451, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 16.5, |
|
"learning_rate": 9.6e-05, |
|
"loss": 2.2757, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"eval_loss": 2.3981776237487793, |
|
"eval_runtime": 50.6747, |
|
"eval_samples_per_second": 20.799, |
|
"eval_steps_per_second": 20.799, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 16.25, |
|
"learning_rate": 9.8e-05, |
|
"loss": 2.4249, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 5.84375, |
|
"learning_rate": 0.0001, |
|
"loss": 2.306, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 7.75, |
|
"learning_rate": 0.00010200000000000001, |
|
"loss": 2.4711, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 9.1875, |
|
"learning_rate": 0.00010400000000000001, |
|
"loss": 2.3926, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 13.125, |
|
"learning_rate": 0.00010600000000000002, |
|
"loss": 2.1229, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 9.8125, |
|
"learning_rate": 0.00010800000000000001, |
|
"loss": 2.0945, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 12.375, |
|
"learning_rate": 0.00011000000000000002, |
|
"loss": 2.248, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 8.125, |
|
"learning_rate": 0.00011200000000000001, |
|
"loss": 2.349, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 15.5625, |
|
"learning_rate": 0.00011399999999999999, |
|
"loss": 2.2546, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 13.5, |
|
"learning_rate": 0.000116, |
|
"loss": 2.4298, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 8.3125, |
|
"learning_rate": 0.000118, |
|
"loss": 2.1131, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 8.625, |
|
"learning_rate": 0.00012, |
|
"loss": 2.2696, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 6.21875, |
|
"learning_rate": 0.000122, |
|
"loss": 2.244, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 5.875, |
|
"learning_rate": 0.000124, |
|
"loss": 2.1473, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 7.28125, |
|
"learning_rate": 0.000126, |
|
"loss": 2.2231, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 5.1875, |
|
"learning_rate": 0.00012800000000000002, |
|
"loss": 2.0796, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 7.59375, |
|
"learning_rate": 0.00013000000000000002, |
|
"loss": 2.3191, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 6.53125, |
|
"learning_rate": 0.000132, |
|
"loss": 2.214, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 8.375, |
|
"learning_rate": 0.000134, |
|
"loss": 2.3527, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 7.09375, |
|
"learning_rate": 0.00013600000000000003, |
|
"loss": 2.3337, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 7.28125, |
|
"learning_rate": 0.000138, |
|
"loss": 2.2933, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 19.75, |
|
"learning_rate": 0.00014, |
|
"loss": 2.3885, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 16.625, |
|
"learning_rate": 0.000142, |
|
"loss": 2.348, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 6.0, |
|
"learning_rate": 0.000144, |
|
"loss": 2.2594, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 11.875, |
|
"learning_rate": 0.000146, |
|
"loss": 2.5611, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 5.875, |
|
"learning_rate": 0.000148, |
|
"loss": 2.0386, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 16.0, |
|
"learning_rate": 0.00015000000000000001, |
|
"loss": 2.656, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 8.1875, |
|
"learning_rate": 0.000152, |
|
"loss": 2.3885, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 11.375, |
|
"learning_rate": 0.000154, |
|
"loss": 2.701, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 8.75, |
|
"learning_rate": 0.00015600000000000002, |
|
"loss": 2.5388, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 7.84375, |
|
"learning_rate": 0.00015800000000000002, |
|
"loss": 2.5263, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 6.3125, |
|
"learning_rate": 0.00016, |
|
"loss": 2.7169, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 19.125, |
|
"learning_rate": 0.000162, |
|
"loss": 2.504, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 8.25, |
|
"learning_rate": 0.000164, |
|
"loss": 2.4614, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 5.5625, |
|
"learning_rate": 0.000166, |
|
"loss": 2.7586, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 8.3125, |
|
"learning_rate": 0.000168, |
|
"loss": 2.6373, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 8.0625, |
|
"learning_rate": 0.00017, |
|
"loss": 2.3237, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 8.4375, |
|
"learning_rate": 0.000172, |
|
"loss": 2.1896, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 11.5625, |
|
"learning_rate": 0.000174, |
|
"loss": 2.5089, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 7.15625, |
|
"learning_rate": 0.00017600000000000002, |
|
"loss": 2.5806, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 8.875, |
|
"learning_rate": 0.00017800000000000002, |
|
"loss": 2.5497, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 8.6875, |
|
"learning_rate": 0.00018, |
|
"loss": 2.3526, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 5.96875, |
|
"learning_rate": 0.000182, |
|
"loss": 2.283, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 6.375, |
|
"learning_rate": 0.00018400000000000003, |
|
"loss": 2.5388, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 5.5625, |
|
"learning_rate": 0.00018600000000000002, |
|
"loss": 2.4216, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 5.5625, |
|
"learning_rate": 0.000188, |
|
"loss": 2.4199, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 6.5, |
|
"learning_rate": 0.00019, |
|
"loss": 2.6459, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 5.46875, |
|
"learning_rate": 0.000192, |
|
"loss": 2.5869, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"eval_loss": 2.5119166374206543, |
|
"eval_runtime": 50.7277, |
|
"eval_samples_per_second": 20.778, |
|
"eval_steps_per_second": 20.778, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 5.3125, |
|
"learning_rate": 0.000194, |
|
"loss": 2.6162, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 6.5625, |
|
"learning_rate": 0.000196, |
|
"loss": 2.605, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 4.9375, |
|
"learning_rate": 0.00019800000000000002, |
|
"loss": 2.4971, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 8.8125, |
|
"learning_rate": 0.0002, |
|
"loss": 2.4572, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 5.96875, |
|
"learning_rate": 0.0001999390827019096, |
|
"loss": 2.4781, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 9.3125, |
|
"learning_rate": 0.00019975640502598244, |
|
"loss": 2.5578, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 5.625, |
|
"learning_rate": 0.00019945218953682734, |
|
"loss": 2.4608, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 6.65625, |
|
"learning_rate": 0.00019902680687415705, |
|
"loss": 2.874, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 10.875, |
|
"learning_rate": 0.00019848077530122083, |
|
"loss": 2.8733, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1568.0, |
|
"learning_rate": 0.00019781476007338058, |
|
"loss": 6.3646, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 648.0, |
|
"learning_rate": 0.00019702957262759965, |
|
"loss": 3.75, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 11.0625, |
|
"learning_rate": 0.0001961261695938319, |
|
"loss": 2.8153, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 6.375, |
|
"learning_rate": 0.00019510565162951537, |
|
"loss": 2.7939, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 8.25, |
|
"learning_rate": 0.00019396926207859084, |
|
"loss": 2.8736, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 7.21875, |
|
"learning_rate": 0.00019271838545667876, |
|
"loss": 2.4797, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 5.6875, |
|
"learning_rate": 0.0001913545457642601, |
|
"loss": 2.7665, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 6.625, |
|
"learning_rate": 0.0001898794046299167, |
|
"loss": 2.6241, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 4.65625, |
|
"learning_rate": 0.00018829475928589271, |
|
"loss": 2.5569, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 6.6875, |
|
"learning_rate": 0.00018660254037844388, |
|
"loss": 2.6452, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 4.6875, |
|
"learning_rate": 0.0001848048096156426, |
|
"loss": 2.6205, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 6.53125, |
|
"learning_rate": 0.00018290375725550417, |
|
"loss": 2.5831, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 5.90625, |
|
"learning_rate": 0.00018090169943749476, |
|
"loss": 2.6601, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 6.6875, |
|
"learning_rate": 0.00017880107536067218, |
|
"loss": 2.5187, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 5.53125, |
|
"learning_rate": 0.0001766044443118978, |
|
"loss": 2.5023, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 7.84375, |
|
"learning_rate": 0.00017431448254773944, |
|
"loss": 2.6064, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 7.65625, |
|
"learning_rate": 0.0001719339800338651, |
|
"loss": 2.5065, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 6.15625, |
|
"learning_rate": 0.00016946583704589973, |
|
"loss": 2.6022, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 5.15625, |
|
"learning_rate": 0.00016691306063588583, |
|
"loss": 2.5981, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 5.28125, |
|
"learning_rate": 0.00016427876096865394, |
|
"loss": 2.6804, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 5.71875, |
|
"learning_rate": 0.0001615661475325658, |
|
"loss": 2.7233, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 3.84375, |
|
"learning_rate": 0.00015877852522924732, |
|
"loss": 2.4712, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 5.9375, |
|
"learning_rate": 0.0001559192903470747, |
|
"loss": 2.4518, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 4.53125, |
|
"learning_rate": 0.0001529919264233205, |
|
"loss": 2.5456, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 5.4375, |
|
"learning_rate": 0.00015000000000000001, |
|
"loss": 2.4622, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 4.84375, |
|
"learning_rate": 0.00014694715627858908, |
|
"loss": 2.6133, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 5.65625, |
|
"learning_rate": 0.00014383711467890774, |
|
"loss": 2.4844, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 5.09375, |
|
"learning_rate": 0.00014067366430758004, |
|
"loss": 2.676, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 6.65625, |
|
"learning_rate": 0.00013746065934159123, |
|
"loss": 2.6896, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 5.5625, |
|
"learning_rate": 0.00013420201433256689, |
|
"loss": 2.5363, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 4.34375, |
|
"learning_rate": 0.00013090169943749476, |
|
"loss": 2.2558, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 6.53125, |
|
"learning_rate": 0.0001275637355816999, |
|
"loss": 2.5952, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 4.28125, |
|
"learning_rate": 0.00012419218955996676, |
|
"loss": 2.3442, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 4.28125, |
|
"learning_rate": 0.00012079116908177593, |
|
"loss": 2.4271, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 4.28125, |
|
"learning_rate": 0.00011736481776669306, |
|
"loss": 2.4587, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 3.671875, |
|
"learning_rate": 0.00011391731009600654, |
|
"loss": 2.5501, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 5.375, |
|
"learning_rate": 0.00011045284632676536, |
|
"loss": 2.4936, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 5.5625, |
|
"learning_rate": 0.00010697564737441252, |
|
"loss": 2.1856, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 4.03125, |
|
"learning_rate": 0.00010348994967025012, |
|
"loss": 2.664, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"eval_loss": 2.378908395767212, |
|
"eval_runtime": 50.7719, |
|
"eval_samples_per_second": 20.76, |
|
"eval_steps_per_second": 20.76, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 5.15625, |
|
"learning_rate": 0.0001, |
|
"loss": 2.4778, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 4.625, |
|
"learning_rate": 9.651005032974994e-05, |
|
"loss": 2.388, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 4.1875, |
|
"learning_rate": 9.302435262558747e-05, |
|
"loss": 2.4377, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 6.40625, |
|
"learning_rate": 8.954715367323468e-05, |
|
"loss": 2.5183, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 5.09375, |
|
"learning_rate": 8.608268990399349e-05, |
|
"loss": 2.5759, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 4.4375, |
|
"learning_rate": 8.263518223330697e-05, |
|
"loss": 2.6291, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 5.0625, |
|
"learning_rate": 7.920883091822408e-05, |
|
"loss": 2.3462, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 3.875, |
|
"learning_rate": 7.580781044003324e-05, |
|
"loss": 2.1391, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 3.625, |
|
"learning_rate": 7.243626441830009e-05, |
|
"loss": 2.3882, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 4.21875, |
|
"learning_rate": 6.909830056250527e-05, |
|
"loss": 2.3264, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 3.90625, |
|
"learning_rate": 6.579798566743314e-05, |
|
"loss": 2.2026, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 5.75, |
|
"learning_rate": 6.25393406584088e-05, |
|
"loss": 2.5855, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 4.6875, |
|
"learning_rate": 5.9326335692419995e-05, |
|
"loss": 2.2483, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 3.65625, |
|
"learning_rate": 5.616288532109225e-05, |
|
"loss": 2.1449, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 3.578125, |
|
"learning_rate": 5.305284372141095e-05, |
|
"loss": 2.2383, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 3.65625, |
|
"learning_rate": 5.000000000000002e-05, |
|
"loss": 2.2851, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 4.125, |
|
"learning_rate": 4.700807357667952e-05, |
|
"loss": 2.4617, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 4.4375, |
|
"learning_rate": 4.4080709652925336e-05, |
|
"loss": 2.1831, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 4.3125, |
|
"learning_rate": 4.12214747707527e-05, |
|
"loss": 2.2079, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 3.28125, |
|
"learning_rate": 3.843385246743417e-05, |
|
"loss": 2.309, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 5.15625, |
|
"learning_rate": 3.5721239031346066e-05, |
|
"loss": 2.4679, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 5.09375, |
|
"learning_rate": 3.308693936411421e-05, |
|
"loss": 2.3249, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 4.625, |
|
"learning_rate": 3.053416295410026e-05, |
|
"loss": 2.2908, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 3.90625, |
|
"learning_rate": 2.8066019966134904e-05, |
|
"loss": 2.3121, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 3.8125, |
|
"learning_rate": 2.5685517452260567e-05, |
|
"loss": 2.2577, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 3.453125, |
|
"learning_rate": 2.339555568810221e-05, |
|
"loss": 2.2002, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 3.84375, |
|
"learning_rate": 2.119892463932781e-05, |
|
"loss": 2.4345, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 4.21875, |
|
"learning_rate": 1.9098300562505266e-05, |
|
"loss": 2.2662, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 3.546875, |
|
"learning_rate": 1.7096242744495837e-05, |
|
"loss": 2.4411, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 3.640625, |
|
"learning_rate": 1.5195190384357404e-05, |
|
"loss": 2.4609, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 4.375, |
|
"learning_rate": 1.339745962155613e-05, |
|
"loss": 2.3615, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 3.4375, |
|
"learning_rate": 1.1705240714107302e-05, |
|
"loss": 2.2071, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 3.578125, |
|
"learning_rate": 1.0120595370083318e-05, |
|
"loss": 2.3504, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 3.578125, |
|
"learning_rate": 8.645454235739903e-06, |
|
"loss": 2.2771, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 3.5625, |
|
"learning_rate": 7.281614543321269e-06, |
|
"loss": 2.3663, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 3.234375, |
|
"learning_rate": 6.030737921409169e-06, |
|
"loss": 2.4842, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 3.5625, |
|
"learning_rate": 4.8943483704846475e-06, |
|
"loss": 2.3921, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 3.484375, |
|
"learning_rate": 3.873830406168111e-06, |
|
"loss": 2.3467, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 3.34375, |
|
"learning_rate": 2.970427372400353e-06, |
|
"loss": 2.3193, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 3.71875, |
|
"learning_rate": 2.1852399266194314e-06, |
|
"loss": 2.3215, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 3.5625, |
|
"learning_rate": 1.5192246987791981e-06, |
|
"loss": 2.1687, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 3.359375, |
|
"learning_rate": 9.731931258429638e-07, |
|
"loss": 2.2471, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 3.203125, |
|
"learning_rate": 5.478104631726711e-07, |
|
"loss": 2.356, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 3.25, |
|
"learning_rate": 2.4359497401758024e-07, |
|
"loss": 2.2989, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 3.15625, |
|
"learning_rate": 6.09172980904238e-08, |
|
"loss": 2.0376, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 3.625, |
|
"learning_rate": 0.0, |
|
"loss": 2.5224, |
|
"step": 190 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 190, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"total_flos": 2415296753172480.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|