|
{ |
|
"best_metric": 0.12461505830287933, |
|
"best_model_checkpoint": "strategytransitionplanv1/checkpoint-155", |
|
"epoch": 5.0, |
|
"eval_steps": 500, |
|
"global_step": 155, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03225806451612903, |
|
"grad_norm": 1.7076008319854736, |
|
"learning_rate": 6.25e-07, |
|
"loss": 1.0981, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.06451612903225806, |
|
"grad_norm": 1.3594564199447632, |
|
"learning_rate": 1.25e-06, |
|
"loss": 1.0979, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0967741935483871, |
|
"grad_norm": 4.986942768096924, |
|
"learning_rate": 1.8750000000000003e-06, |
|
"loss": 1.12, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.12903225806451613, |
|
"grad_norm": 3.718357563018799, |
|
"learning_rate": 2.5e-06, |
|
"loss": 1.1224, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.16129032258064516, |
|
"grad_norm": 3.902071952819824, |
|
"learning_rate": 3.125e-06, |
|
"loss": 1.1079, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.1935483870967742, |
|
"grad_norm": 1.6320927143096924, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 1.1149, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.22580645161290322, |
|
"grad_norm": 2.3765382766723633, |
|
"learning_rate": 4.3750000000000005e-06, |
|
"loss": 1.1274, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.25806451612903225, |
|
"grad_norm": 1.9507699012756348, |
|
"learning_rate": 5e-06, |
|
"loss": 1.0572, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.2903225806451613, |
|
"grad_norm": 2.0720622539520264, |
|
"learning_rate": 5.625e-06, |
|
"loss": 1.0771, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.3225806451612903, |
|
"grad_norm": 3.6346395015716553, |
|
"learning_rate": 6.25e-06, |
|
"loss": 1.0732, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.3548387096774194, |
|
"grad_norm": 3.3288047313690186, |
|
"learning_rate": 6.875e-06, |
|
"loss": 1.0486, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.3870967741935484, |
|
"grad_norm": 3.1521379947662354, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 1.0992, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.41935483870967744, |
|
"grad_norm": 5.848731994628906, |
|
"learning_rate": 8.125000000000001e-06, |
|
"loss": 1.0845, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.45161290322580644, |
|
"grad_norm": 4.110851287841797, |
|
"learning_rate": 8.750000000000001e-06, |
|
"loss": 1.093, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.4838709677419355, |
|
"grad_norm": 1.617586374282837, |
|
"learning_rate": 9.375000000000001e-06, |
|
"loss": 1.0811, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.5161290322580645, |
|
"grad_norm": 2.5444748401641846, |
|
"learning_rate": 1e-05, |
|
"loss": 1.0869, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.5483870967741935, |
|
"grad_norm": 2.050872802734375, |
|
"learning_rate": 9.928057553956835e-06, |
|
"loss": 1.0879, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.5806451612903226, |
|
"grad_norm": 4.142770290374756, |
|
"learning_rate": 9.85611510791367e-06, |
|
"loss": 1.0807, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.6129032258064516, |
|
"grad_norm": 2.6557066440582275, |
|
"learning_rate": 9.784172661870505e-06, |
|
"loss": 1.0831, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.6451612903225806, |
|
"grad_norm": 2.187577724456787, |
|
"learning_rate": 9.712230215827338e-06, |
|
"loss": 1.0701, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.6774193548387096, |
|
"grad_norm": 2.1984810829162598, |
|
"learning_rate": 9.640287769784174e-06, |
|
"loss": 1.0397, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.7096774193548387, |
|
"grad_norm": 2.2228214740753174, |
|
"learning_rate": 9.568345323741008e-06, |
|
"loss": 1.0466, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.7419354838709677, |
|
"grad_norm": 2.3713221549987793, |
|
"learning_rate": 9.496402877697842e-06, |
|
"loss": 1.0695, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.7741935483870968, |
|
"grad_norm": 3.8912224769592285, |
|
"learning_rate": 9.424460431654678e-06, |
|
"loss": 1.0865, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.8064516129032258, |
|
"grad_norm": 4.4668869972229, |
|
"learning_rate": 9.35251798561151e-06, |
|
"loss": 1.087, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.8387096774193549, |
|
"grad_norm": 4.023162364959717, |
|
"learning_rate": 9.280575539568346e-06, |
|
"loss": 1.075, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.8709677419354839, |
|
"grad_norm": 2.8331081867218018, |
|
"learning_rate": 9.20863309352518e-06, |
|
"loss": 1.0941, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.9032258064516129, |
|
"grad_norm": 3.602895498275757, |
|
"learning_rate": 9.136690647482015e-06, |
|
"loss": 1.0431, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.9354838709677419, |
|
"grad_norm": 3.7651658058166504, |
|
"learning_rate": 9.064748201438849e-06, |
|
"loss": 1.0362, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.967741935483871, |
|
"grad_norm": 1.5405668020248413, |
|
"learning_rate": 8.992805755395683e-06, |
|
"loss": 1.0579, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 4.124188423156738, |
|
"learning_rate": 8.92086330935252e-06, |
|
"loss": 1.0354, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.8387096774193549, |
|
"eval_f1_macro": 0.8319607843137254, |
|
"eval_f1_micro": 0.8387096774193549, |
|
"eval_f1_weighted": 0.8339943074003796, |
|
"eval_loss": 1.025422215461731, |
|
"eval_precision_macro": 0.8604269293924466, |
|
"eval_precision_micro": 0.8387096774193549, |
|
"eval_precision_weighted": 0.8604798982996981, |
|
"eval_recall_macro": 0.834920634920635, |
|
"eval_recall_micro": 0.8387096774193549, |
|
"eval_recall_weighted": 0.8387096774193549, |
|
"eval_runtime": 0.3641, |
|
"eval_samples_per_second": 170.282, |
|
"eval_steps_per_second": 10.986, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 1.032258064516129, |
|
"grad_norm": 4.104296684265137, |
|
"learning_rate": 8.848920863309353e-06, |
|
"loss": 0.9849, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 1.064516129032258, |
|
"grad_norm": 3.3923492431640625, |
|
"learning_rate": 8.776978417266188e-06, |
|
"loss": 1.0266, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 1.096774193548387, |
|
"grad_norm": 5.368731498718262, |
|
"learning_rate": 8.705035971223022e-06, |
|
"loss": 0.9705, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 1.129032258064516, |
|
"grad_norm": 5.603680610656738, |
|
"learning_rate": 8.633093525179856e-06, |
|
"loss": 1.0303, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 1.1612903225806452, |
|
"grad_norm": 2.2851974964141846, |
|
"learning_rate": 8.561151079136692e-06, |
|
"loss": 0.9739, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 1.1935483870967742, |
|
"grad_norm": 4.70708703994751, |
|
"learning_rate": 8.489208633093526e-06, |
|
"loss": 0.9742, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 1.2258064516129032, |
|
"grad_norm": 7.92202091217041, |
|
"learning_rate": 8.41726618705036e-06, |
|
"loss": 0.957, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 1.2580645161290323, |
|
"grad_norm": 4.823170185089111, |
|
"learning_rate": 8.345323741007195e-06, |
|
"loss": 0.9731, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 1.2903225806451613, |
|
"grad_norm": 3.4790797233581543, |
|
"learning_rate": 8.273381294964029e-06, |
|
"loss": 0.9411, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 1.3225806451612903, |
|
"grad_norm": 4.450028419494629, |
|
"learning_rate": 8.201438848920865e-06, |
|
"loss": 0.9636, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 1.3548387096774195, |
|
"grad_norm": 3.824451446533203, |
|
"learning_rate": 8.129496402877699e-06, |
|
"loss": 0.938, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 1.3870967741935485, |
|
"grad_norm": 4.1180419921875, |
|
"learning_rate": 8.057553956834533e-06, |
|
"loss": 0.8856, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 1.4193548387096775, |
|
"grad_norm": 6.481126308441162, |
|
"learning_rate": 7.985611510791367e-06, |
|
"loss": 0.9327, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 1.4516129032258065, |
|
"grad_norm": 3.7428112030029297, |
|
"learning_rate": 7.913669064748202e-06, |
|
"loss": 0.8892, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 1.4838709677419355, |
|
"grad_norm": 5.0267791748046875, |
|
"learning_rate": 7.841726618705036e-06, |
|
"loss": 0.8205, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 1.5161290322580645, |
|
"grad_norm": 5.348020553588867, |
|
"learning_rate": 7.769784172661872e-06, |
|
"loss": 0.8093, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 1.5483870967741935, |
|
"grad_norm": 7.635085582733154, |
|
"learning_rate": 7.697841726618706e-06, |
|
"loss": 0.8342, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 1.5806451612903225, |
|
"grad_norm": 5.7552056312561035, |
|
"learning_rate": 7.62589928057554e-06, |
|
"loss": 0.8389, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 1.6129032258064515, |
|
"grad_norm": 6.611269950866699, |
|
"learning_rate": 7.5539568345323745e-06, |
|
"loss": 0.9106, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 1.6451612903225805, |
|
"grad_norm": 4.329951286315918, |
|
"learning_rate": 7.48201438848921e-06, |
|
"loss": 0.7197, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 1.6774193548387095, |
|
"grad_norm": 5.155747890472412, |
|
"learning_rate": 7.410071942446043e-06, |
|
"loss": 0.6425, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 1.7096774193548387, |
|
"grad_norm": 5.479297637939453, |
|
"learning_rate": 7.338129496402878e-06, |
|
"loss": 0.7609, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 1.7419354838709677, |
|
"grad_norm": 5.481513977050781, |
|
"learning_rate": 7.266187050359713e-06, |
|
"loss": 0.8313, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 1.7741935483870968, |
|
"grad_norm": 5.614933967590332, |
|
"learning_rate": 7.194244604316547e-06, |
|
"loss": 0.7208, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 1.8064516129032258, |
|
"grad_norm": 4.680207252502441, |
|
"learning_rate": 7.122302158273382e-06, |
|
"loss": 0.6751, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 1.838709677419355, |
|
"grad_norm": 5.295926094055176, |
|
"learning_rate": 7.050359712230216e-06, |
|
"loss": 0.8244, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 1.870967741935484, |
|
"grad_norm": 4.341407775878906, |
|
"learning_rate": 6.978417266187051e-06, |
|
"loss": 0.6805, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 1.903225806451613, |
|
"grad_norm": 5.317811489105225, |
|
"learning_rate": 6.906474820143886e-06, |
|
"loss": 0.6056, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 1.935483870967742, |
|
"grad_norm": 5.420223236083984, |
|
"learning_rate": 6.834532374100719e-06, |
|
"loss": 0.6867, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.967741935483871, |
|
"grad_norm": 7.248617649078369, |
|
"learning_rate": 6.762589928057554e-06, |
|
"loss": 0.7531, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 9.447990417480469, |
|
"learning_rate": 6.6906474820143886e-06, |
|
"loss": 0.5974, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.967741935483871, |
|
"eval_f1_macro": 0.967304625199362, |
|
"eval_f1_micro": 0.967741935483871, |
|
"eval_f1_weighted": 0.967626176879148, |
|
"eval_loss": 0.5691941380500793, |
|
"eval_precision_macro": 0.9710144927536232, |
|
"eval_precision_micro": 0.967741935483871, |
|
"eval_precision_weighted": 0.97054698457223, |
|
"eval_recall_macro": 0.9666666666666667, |
|
"eval_recall_micro": 0.967741935483871, |
|
"eval_recall_weighted": 0.967741935483871, |
|
"eval_runtime": 0.3633, |
|
"eval_samples_per_second": 170.675, |
|
"eval_steps_per_second": 11.011, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 2.032258064516129, |
|
"grad_norm": 3.9469432830810547, |
|
"learning_rate": 6.618705035971224e-06, |
|
"loss": 0.6395, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 2.064516129032258, |
|
"grad_norm": 4.58109188079834, |
|
"learning_rate": 6.546762589928059e-06, |
|
"loss": 0.5742, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 2.096774193548387, |
|
"grad_norm": 5.2513933181762695, |
|
"learning_rate": 6.474820143884892e-06, |
|
"loss": 0.665, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 2.129032258064516, |
|
"grad_norm": 4.5661301612854, |
|
"learning_rate": 6.402877697841727e-06, |
|
"loss": 0.5077, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 2.161290322580645, |
|
"grad_norm": 6.208225727081299, |
|
"learning_rate": 6.330935251798561e-06, |
|
"loss": 0.5777, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 2.193548387096774, |
|
"grad_norm": 5.244273662567139, |
|
"learning_rate": 6.2589928057553964e-06, |
|
"loss": 0.6024, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 2.225806451612903, |
|
"grad_norm": 6.400041580200195, |
|
"learning_rate": 6.1870503597122315e-06, |
|
"loss": 0.7029, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 2.258064516129032, |
|
"grad_norm": 3.543893575668335, |
|
"learning_rate": 6.115107913669065e-06, |
|
"loss": 0.4423, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 2.2903225806451615, |
|
"grad_norm": 4.5264787673950195, |
|
"learning_rate": 6.0431654676259e-06, |
|
"loss": 0.4439, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 2.3225806451612905, |
|
"grad_norm": 6.25119686126709, |
|
"learning_rate": 5.971223021582734e-06, |
|
"loss": 0.5624, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 2.3548387096774195, |
|
"grad_norm": 3.226388931274414, |
|
"learning_rate": 5.899280575539568e-06, |
|
"loss": 0.3964, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 2.3870967741935485, |
|
"grad_norm": 3.7347843647003174, |
|
"learning_rate": 5.8273381294964035e-06, |
|
"loss": 0.5466, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 2.4193548387096775, |
|
"grad_norm": 3.9312491416931152, |
|
"learning_rate": 5.755395683453238e-06, |
|
"loss": 0.5006, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 2.4516129032258065, |
|
"grad_norm": 5.62445592880249, |
|
"learning_rate": 5.683453237410073e-06, |
|
"loss": 0.3485, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 2.4838709677419355, |
|
"grad_norm": 3.4454846382141113, |
|
"learning_rate": 5.611510791366906e-06, |
|
"loss": 0.399, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 2.5161290322580645, |
|
"grad_norm": 3.8090169429779053, |
|
"learning_rate": 5.539568345323741e-06, |
|
"loss": 0.3801, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 2.5483870967741935, |
|
"grad_norm": 3.244086742401123, |
|
"learning_rate": 5.467625899280576e-06, |
|
"loss": 0.3907, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 2.5806451612903225, |
|
"grad_norm": 4.637303352355957, |
|
"learning_rate": 5.3956834532374105e-06, |
|
"loss": 0.4531, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 2.6129032258064515, |
|
"grad_norm": 3.3718318939208984, |
|
"learning_rate": 5.3237410071942456e-06, |
|
"loss": 0.3218, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 2.6451612903225805, |
|
"grad_norm": 3.9827523231506348, |
|
"learning_rate": 5.251798561151079e-06, |
|
"loss": 0.3826, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 2.6774193548387095, |
|
"grad_norm": 4.160843372344971, |
|
"learning_rate": 5.179856115107914e-06, |
|
"loss": 0.3748, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 2.709677419354839, |
|
"grad_norm": 3.411928653717041, |
|
"learning_rate": 5.107913669064749e-06, |
|
"loss": 0.353, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 2.741935483870968, |
|
"grad_norm": 5.943264007568359, |
|
"learning_rate": 5.035971223021583e-06, |
|
"loss": 0.2906, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 2.774193548387097, |
|
"grad_norm": 4.6810150146484375, |
|
"learning_rate": 4.9640287769784175e-06, |
|
"loss": 0.2362, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 2.806451612903226, |
|
"grad_norm": 3.2993741035461426, |
|
"learning_rate": 4.892086330935253e-06, |
|
"loss": 0.2516, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 2.838709677419355, |
|
"grad_norm": 2.4518892765045166, |
|
"learning_rate": 4.820143884892087e-06, |
|
"loss": 0.2009, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 2.870967741935484, |
|
"grad_norm": 6.592429161071777, |
|
"learning_rate": 4.748201438848921e-06, |
|
"loss": 0.5157, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 2.903225806451613, |
|
"grad_norm": 2.818664312362671, |
|
"learning_rate": 4.676258992805755e-06, |
|
"loss": 0.2421, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 2.935483870967742, |
|
"grad_norm": 4.569883346557617, |
|
"learning_rate": 4.60431654676259e-06, |
|
"loss": 0.2791, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 2.967741935483871, |
|
"grad_norm": 5.113462924957275, |
|
"learning_rate": 4.5323741007194245e-06, |
|
"loss": 0.2892, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 4.380687713623047, |
|
"learning_rate": 4.46043165467626e-06, |
|
"loss": 0.3231, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.967741935483871, |
|
"eval_f1_macro": 0.967304625199362, |
|
"eval_f1_micro": 0.967741935483871, |
|
"eval_f1_weighted": 0.967626176879148, |
|
"eval_loss": 0.23027083277702332, |
|
"eval_precision_macro": 0.9710144927536232, |
|
"eval_precision_micro": 0.967741935483871, |
|
"eval_precision_weighted": 0.97054698457223, |
|
"eval_recall_macro": 0.9666666666666667, |
|
"eval_recall_micro": 0.967741935483871, |
|
"eval_recall_weighted": 0.967741935483871, |
|
"eval_runtime": 0.3631, |
|
"eval_samples_per_second": 170.75, |
|
"eval_steps_per_second": 11.016, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 3.032258064516129, |
|
"grad_norm": 3.396799087524414, |
|
"learning_rate": 4.388489208633094e-06, |
|
"loss": 0.2294, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 3.064516129032258, |
|
"grad_norm": 2.6190600395202637, |
|
"learning_rate": 4.316546762589928e-06, |
|
"loss": 0.2166, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 3.096774193548387, |
|
"grad_norm": 4.4704060554504395, |
|
"learning_rate": 4.244604316546763e-06, |
|
"loss": 0.3076, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 3.129032258064516, |
|
"grad_norm": 3.570626974105835, |
|
"learning_rate": 4.172661870503597e-06, |
|
"loss": 0.2187, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 3.161290322580645, |
|
"grad_norm": 3.120173931121826, |
|
"learning_rate": 4.100719424460432e-06, |
|
"loss": 0.4226, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 3.193548387096774, |
|
"grad_norm": 2.8124935626983643, |
|
"learning_rate": 4.028776978417267e-06, |
|
"loss": 0.2103, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 3.225806451612903, |
|
"grad_norm": 3.7060863971710205, |
|
"learning_rate": 3.956834532374101e-06, |
|
"loss": 0.2506, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 3.258064516129032, |
|
"grad_norm": 2.422107458114624, |
|
"learning_rate": 3.884892086330936e-06, |
|
"loss": 0.174, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 3.2903225806451615, |
|
"grad_norm": 2.4062161445617676, |
|
"learning_rate": 3.81294964028777e-06, |
|
"loss": 0.2021, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 3.3225806451612905, |
|
"grad_norm": 2.4910507202148438, |
|
"learning_rate": 3.741007194244605e-06, |
|
"loss": 0.1937, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 3.3548387096774195, |
|
"grad_norm": 2.2566051483154297, |
|
"learning_rate": 3.669064748201439e-06, |
|
"loss": 0.1584, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 3.3870967741935485, |
|
"grad_norm": 2.0386288166046143, |
|
"learning_rate": 3.5971223021582737e-06, |
|
"loss": 0.1443, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 3.4193548387096775, |
|
"grad_norm": 2.2671079635620117, |
|
"learning_rate": 3.525179856115108e-06, |
|
"loss": 0.181, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 3.4516129032258065, |
|
"grad_norm": 1.894828200340271, |
|
"learning_rate": 3.453237410071943e-06, |
|
"loss": 0.1515, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 3.4838709677419355, |
|
"grad_norm": 4.2288384437561035, |
|
"learning_rate": 3.381294964028777e-06, |
|
"loss": 0.272, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 3.5161290322580645, |
|
"grad_norm": 3.274885416030884, |
|
"learning_rate": 3.309352517985612e-06, |
|
"loss": 0.172, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 3.5483870967741935, |
|
"grad_norm": 3.967803955078125, |
|
"learning_rate": 3.237410071942446e-06, |
|
"loss": 0.4134, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 3.5806451612903225, |
|
"grad_norm": 3.1320571899414062, |
|
"learning_rate": 3.1654676258992807e-06, |
|
"loss": 0.2018, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 3.6129032258064515, |
|
"grad_norm": 2.877068519592285, |
|
"learning_rate": 3.0935251798561158e-06, |
|
"loss": 0.1946, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 3.6451612903225805, |
|
"grad_norm": 2.552804470062256, |
|
"learning_rate": 3.02158273381295e-06, |
|
"loss": 0.1403, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 3.6774193548387095, |
|
"grad_norm": 5.13293981552124, |
|
"learning_rate": 2.949640287769784e-06, |
|
"loss": 0.3705, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 3.709677419354839, |
|
"grad_norm": 1.5643055438995361, |
|
"learning_rate": 2.877697841726619e-06, |
|
"loss": 0.1245, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 3.741935483870968, |
|
"grad_norm": 1.7311073541641235, |
|
"learning_rate": 2.805755395683453e-06, |
|
"loss": 0.1252, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 3.774193548387097, |
|
"grad_norm": 1.9834305047988892, |
|
"learning_rate": 2.733812949640288e-06, |
|
"loss": 0.1294, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 3.806451612903226, |
|
"grad_norm": 1.7714146375656128, |
|
"learning_rate": 2.6618705035971228e-06, |
|
"loss": 0.1248, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 3.838709677419355, |
|
"grad_norm": 2.352647304534912, |
|
"learning_rate": 2.589928057553957e-06, |
|
"loss": 0.3432, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 3.870967741935484, |
|
"grad_norm": 1.8365036249160767, |
|
"learning_rate": 2.5179856115107916e-06, |
|
"loss": 0.1135, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 3.903225806451613, |
|
"grad_norm": 9.964637756347656, |
|
"learning_rate": 2.4460431654676263e-06, |
|
"loss": 0.6474, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 3.935483870967742, |
|
"grad_norm": 2.3334484100341797, |
|
"learning_rate": 2.3741007194244605e-06, |
|
"loss": 0.1629, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 3.967741935483871, |
|
"grad_norm": 4.90928316116333, |
|
"learning_rate": 2.302158273381295e-06, |
|
"loss": 0.1706, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": Infinity, |
|
"learning_rate": 2.302158273381295e-06, |
|
"loss": 0.4531, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.967741935483871, |
|
"eval_f1_macro": 0.967304625199362, |
|
"eval_f1_micro": 0.967741935483871, |
|
"eval_f1_weighted": 0.967626176879148, |
|
"eval_loss": 0.1408514678478241, |
|
"eval_precision_macro": 0.9710144927536232, |
|
"eval_precision_micro": 0.967741935483871, |
|
"eval_precision_weighted": 0.97054698457223, |
|
"eval_recall_macro": 0.9666666666666667, |
|
"eval_recall_micro": 0.967741935483871, |
|
"eval_recall_weighted": 0.967741935483871, |
|
"eval_runtime": 0.364, |
|
"eval_samples_per_second": 170.325, |
|
"eval_steps_per_second": 10.989, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 4.032258064516129, |
|
"grad_norm": 1.4911006689071655, |
|
"learning_rate": 2.23021582733813e-06, |
|
"loss": 0.1036, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 4.064516129032258, |
|
"grad_norm": 1.8918758630752563, |
|
"learning_rate": 2.158273381294964e-06, |
|
"loss": 0.1496, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 4.096774193548387, |
|
"grad_norm": 7.057977199554443, |
|
"learning_rate": 2.0863309352517987e-06, |
|
"loss": 0.4655, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 4.129032258064516, |
|
"grad_norm": 1.8621776103973389, |
|
"learning_rate": 2.0143884892086333e-06, |
|
"loss": 0.1149, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 4.161290322580645, |
|
"grad_norm": 1.6348507404327393, |
|
"learning_rate": 1.942446043165468e-06, |
|
"loss": 0.0879, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 4.193548387096774, |
|
"grad_norm": 2.331972360610962, |
|
"learning_rate": 1.8705035971223024e-06, |
|
"loss": 0.1251, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 4.225806451612903, |
|
"grad_norm": 1.8028337955474854, |
|
"learning_rate": 1.7985611510791368e-06, |
|
"loss": 0.1074, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 4.258064516129032, |
|
"grad_norm": 1.5170693397521973, |
|
"learning_rate": 1.7266187050359715e-06, |
|
"loss": 0.0963, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 4.290322580645161, |
|
"grad_norm": 1.7213988304138184, |
|
"learning_rate": 1.654676258992806e-06, |
|
"loss": 0.1002, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 4.32258064516129, |
|
"grad_norm": 1.1244333982467651, |
|
"learning_rate": 1.5827338129496403e-06, |
|
"loss": 0.0825, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 4.354838709677419, |
|
"grad_norm": 1.8200867176055908, |
|
"learning_rate": 1.510791366906475e-06, |
|
"loss": 0.1212, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 4.387096774193548, |
|
"grad_norm": 1.1631309986114502, |
|
"learning_rate": 1.4388489208633094e-06, |
|
"loss": 0.0765, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 4.419354838709677, |
|
"grad_norm": 1.3975659608840942, |
|
"learning_rate": 1.366906474820144e-06, |
|
"loss": 0.1039, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 4.451612903225806, |
|
"grad_norm": 3.7922534942626953, |
|
"learning_rate": 1.2949640287769785e-06, |
|
"loss": 0.434, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 4.483870967741936, |
|
"grad_norm": 1.495962381362915, |
|
"learning_rate": 1.2230215827338131e-06, |
|
"loss": 0.0988, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 4.516129032258064, |
|
"grad_norm": 6.633321285247803, |
|
"learning_rate": 1.1510791366906476e-06, |
|
"loss": 0.3248, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 4.548387096774194, |
|
"grad_norm": 0.9505869746208191, |
|
"learning_rate": 1.079136690647482e-06, |
|
"loss": 0.0635, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 4.580645161290323, |
|
"grad_norm": 4.179118633270264, |
|
"learning_rate": 1.0071942446043167e-06, |
|
"loss": 0.4312, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 4.612903225806452, |
|
"grad_norm": 4.571844100952148, |
|
"learning_rate": 9.352517985611512e-07, |
|
"loss": 0.4026, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 4.645161290322581, |
|
"grad_norm": 6.533665180206299, |
|
"learning_rate": 8.633093525179857e-07, |
|
"loss": 0.2858, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 4.67741935483871, |
|
"grad_norm": 1.06562077999115, |
|
"learning_rate": 7.913669064748202e-07, |
|
"loss": 0.0803, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 4.709677419354839, |
|
"grad_norm": 1.3917797803878784, |
|
"learning_rate": 7.194244604316547e-07, |
|
"loss": 0.0883, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 4.741935483870968, |
|
"grad_norm": 2.7961225509643555, |
|
"learning_rate": 6.474820143884893e-07, |
|
"loss": 0.1603, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 4.774193548387097, |
|
"grad_norm": 2.315228223800659, |
|
"learning_rate": 5.755395683453238e-07, |
|
"loss": 0.1078, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 4.806451612903226, |
|
"grad_norm": 1.2167779207229614, |
|
"learning_rate": 5.035971223021583e-07, |
|
"loss": 0.0807, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 4.838709677419355, |
|
"grad_norm": 3.9313344955444336, |
|
"learning_rate": 4.3165467625899287e-07, |
|
"loss": 0.1382, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 4.870967741935484, |
|
"grad_norm": 1.4970654249191284, |
|
"learning_rate": 3.5971223021582736e-07, |
|
"loss": 0.1097, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 4.903225806451613, |
|
"grad_norm": 2.4545068740844727, |
|
"learning_rate": 2.877697841726619e-07, |
|
"loss": 0.1327, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 4.935483870967742, |
|
"grad_norm": 5.53891134262085, |
|
"learning_rate": 2.1582733812949643e-07, |
|
"loss": 0.1897, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 4.967741935483871, |
|
"grad_norm": 1.297095537185669, |
|
"learning_rate": 1.4388489208633095e-07, |
|
"loss": 0.0874, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 8.026548385620117, |
|
"learning_rate": 7.194244604316547e-08, |
|
"loss": 0.7796, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_accuracy": 0.9838709677419355, |
|
"eval_f1_macro": 0.9837010534684953, |
|
"eval_f1_micro": 0.9838709677419355, |
|
"eval_f1_weighted": 0.9838517321638102, |
|
"eval_loss": 0.12461505830287933, |
|
"eval_precision_macro": 0.9848484848484849, |
|
"eval_precision_micro": 0.9838709677419355, |
|
"eval_precision_weighted": 0.9846041055718475, |
|
"eval_recall_macro": 0.9833333333333334, |
|
"eval_recall_micro": 0.9838709677419355, |
|
"eval_recall_weighted": 0.9838709677419355, |
|
"eval_runtime": 0.3625, |
|
"eval_samples_per_second": 171.046, |
|
"eval_steps_per_second": 11.035, |
|
"step": 155 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 155, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"total_flos": 161613108449280.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|