|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"global_step": 791, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2.105263157894737e-07, |
|
"loss": 1.5774, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.210526315789474e-07, |
|
"loss": 2.0325, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 6.315789473684211e-07, |
|
"loss": 1.6573, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.421052631578948e-07, |
|
"loss": 1.5422, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.0526315789473685e-06, |
|
"loss": 1.4652, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.2631578947368422e-06, |
|
"loss": 1.4239, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.4736842105263159e-06, |
|
"loss": 1.3821, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.6842105263157895e-06, |
|
"loss": 1.3956, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.8947368421052634e-06, |
|
"loss": 1.3272, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.105263157894737e-06, |
|
"loss": 1.2911, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.3157894736842105e-06, |
|
"loss": 1.2653, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.5263157894736844e-06, |
|
"loss": 1.2739, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.7368421052631583e-06, |
|
"loss": 1.2295, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.9473684210526317e-06, |
|
"loss": 1.2498, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.157894736842105e-06, |
|
"loss": 1.2149, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.368421052631579e-06, |
|
"loss": 1.207, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.578947368421053e-06, |
|
"loss": 1.1964, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.789473684210527e-06, |
|
"loss": 1.1957, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.2128, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.210526315789474e-06, |
|
"loss": 1.1877, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.4210526315789476e-06, |
|
"loss": 1.1839, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.631578947368421e-06, |
|
"loss": 1.1865, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.842105263157895e-06, |
|
"loss": 1.1608, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5.052631578947369e-06, |
|
"loss": 1.1334, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5.263157894736842e-06, |
|
"loss": 1.1492, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5.4736842105263165e-06, |
|
"loss": 1.1678, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5.68421052631579e-06, |
|
"loss": 1.1471, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 5.8947368421052634e-06, |
|
"loss": 1.1156, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 6.105263157894738e-06, |
|
"loss": 1.1435, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 6.31578947368421e-06, |
|
"loss": 1.1368, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 6.526315789473685e-06, |
|
"loss": 1.0934, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 6.736842105263158e-06, |
|
"loss": 1.1093, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 6.947368421052632e-06, |
|
"loss": 1.1067, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 7.157894736842106e-06, |
|
"loss": 1.1155, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 7.368421052631579e-06, |
|
"loss": 1.1333, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 7.578947368421054e-06, |
|
"loss": 1.1088, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 7.789473684210526e-06, |
|
"loss": 1.1035, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 1.09, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 8.210526315789475e-06, |
|
"loss": 1.0929, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 8.421052631578948e-06, |
|
"loss": 1.0959, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 8.631578947368422e-06, |
|
"loss": 1.0963, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 8.842105263157895e-06, |
|
"loss": 1.1186, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.05263157894737e-06, |
|
"loss": 1.0964, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.263157894736842e-06, |
|
"loss": 1.0705, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.473684210526315e-06, |
|
"loss": 1.0615, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.68421052631579e-06, |
|
"loss": 1.0857, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.894736842105264e-06, |
|
"loss": 1.0757, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.0105263157894738e-05, |
|
"loss": 1.0795, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.0315789473684213e-05, |
|
"loss": 1.041, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.0526315789473684e-05, |
|
"loss": 1.0754, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.073684210526316e-05, |
|
"loss": 1.0703, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.0947368421052633e-05, |
|
"loss": 1.0828, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.1157894736842105e-05, |
|
"loss": 1.0721, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.136842105263158e-05, |
|
"loss": 1.0521, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.1578947368421053e-05, |
|
"loss": 1.0308, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.1789473684210527e-05, |
|
"loss": 1.0462, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.2e-05, |
|
"loss": 1.0658, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.2210526315789475e-05, |
|
"loss": 1.0586, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.2421052631578949e-05, |
|
"loss": 1.0702, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.263157894736842e-05, |
|
"loss": 1.0468, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.2842105263157896e-05, |
|
"loss": 1.0407, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.305263157894737e-05, |
|
"loss": 1.0314, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.3263157894736843e-05, |
|
"loss": 1.0773, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.3473684210526316e-05, |
|
"loss": 1.0317, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.3684210526315791e-05, |
|
"loss": 1.049, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.3894736842105265e-05, |
|
"loss": 1.0123, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.4105263157894738e-05, |
|
"loss": 1.0257, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.4315789473684212e-05, |
|
"loss": 1.0412, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.4526315789473687e-05, |
|
"loss": 1.0419, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.4736842105263159e-05, |
|
"loss": 1.0322, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.4947368421052632e-05, |
|
"loss": 1.0398, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.5157894736842107e-05, |
|
"loss": 1.0773, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.536842105263158e-05, |
|
"loss": 1.0233, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.5578947368421052e-05, |
|
"loss": 1.025, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.578947368421053e-05, |
|
"loss": 1.026, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 1.0122, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.6210526315789473e-05, |
|
"loss": 1.0413, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.642105263157895e-05, |
|
"loss": 1.0411, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.6631578947368423e-05, |
|
"loss": 1.0096, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.6842105263157896e-05, |
|
"loss": 1.0063, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.705263157894737e-05, |
|
"loss": 1.0144, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.7263157894736843e-05, |
|
"loss": 1.0264, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.7473684210526317e-05, |
|
"loss": 1.0291, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.768421052631579e-05, |
|
"loss": 1.0117, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.7894736842105264e-05, |
|
"loss": 1.0204, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.810526315789474e-05, |
|
"loss": 1.0234, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.831578947368421e-05, |
|
"loss": 1.0144, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.8526315789473684e-05, |
|
"loss": 1.0317, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.873684210526316e-05, |
|
"loss": 1.0133, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.894736842105263e-05, |
|
"loss": 1.0236, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9157894736842108e-05, |
|
"loss": 1.0168, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.936842105263158e-05, |
|
"loss": 1.0287, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9578947368421055e-05, |
|
"loss": 1.0184, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9789473684210528e-05, |
|
"loss": 1.037, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2e-05, |
|
"loss": 1.0157, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.999999049040018e-05, |
|
"loss": 1.0175, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9999961961618814e-05, |
|
"loss": 1.0371, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9999914413710156e-05, |
|
"loss": 1.0348, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9999847846764634e-05, |
|
"loss": 0.9883, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9999762260908862e-05, |
|
"loss": 1.0285, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9999657656305617e-05, |
|
"loss": 0.9958, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9999534033153842e-05, |
|
"loss": 1.0154, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9999391391688664e-05, |
|
"loss": 0.9959, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.999922973218137e-05, |
|
"loss": 0.9833, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.999904905493943e-05, |
|
"loss": 0.9956, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.999884936030647e-05, |
|
"loss": 0.9985, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9998630648662304e-05, |
|
"loss": 1.0135, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9998392920422897e-05, |
|
"loss": 1.0056, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9998136176040385e-05, |
|
"loss": 0.9693, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9997860416003086e-05, |
|
"loss": 0.9946, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9997565640835462e-05, |
|
"loss": 1.0038, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.999725185109816e-05, |
|
"loss": 1.0056, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.999691904738798e-05, |
|
"loss": 0.9862, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9996567230337888e-05, |
|
"loss": 0.9854, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9996196400617015e-05, |
|
"loss": 0.979, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9995806558930647e-05, |
|
"loss": 0.9778, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9995397706020227e-05, |
|
"loss": 1.0034, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9994969842663368e-05, |
|
"loss": 0.995, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.999452296967383e-05, |
|
"loss": 0.9927, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9994057087901526e-05, |
|
"loss": 1.004, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9993572198232528e-05, |
|
"loss": 0.9946, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.999306830158906e-05, |
|
"loss": 1.0066, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9992545398929488e-05, |
|
"loss": 0.9626, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9992003491248338e-05, |
|
"loss": 1.0083, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9991442579576268e-05, |
|
"loss": 0.9809, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.999086266498009e-05, |
|
"loss": 0.9915, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9990263748562754e-05, |
|
"loss": 0.9763, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9989645831463352e-05, |
|
"loss": 0.9745, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9989008914857115e-05, |
|
"loss": 0.9734, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9988352999955407e-05, |
|
"loss": 0.9847, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.998767808800572e-05, |
|
"loss": 1.0072, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9986984180291688e-05, |
|
"loss": 1.0006, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9986271278133066e-05, |
|
"loss": 0.9877, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.998553938288574e-05, |
|
"loss": 0.9584, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.998478849594171e-05, |
|
"loss": 0.9812, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9984018618729107e-05, |
|
"loss": 1.0122, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9983229752712178e-05, |
|
"loss": 0.9799, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.998242189939128e-05, |
|
"loss": 0.9948, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9981595060302883e-05, |
|
"loss": 0.9882, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9980749237019576e-05, |
|
"loss": 1.0044, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9979884431150037e-05, |
|
"loss": 0.9617, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9979000644339067e-05, |
|
"loss": 0.9794, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9978097878267554e-05, |
|
"loss": 0.9804, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9977176134652482e-05, |
|
"loss": 0.9694, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.997623541524694e-05, |
|
"loss": 0.9682, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9975275721840105e-05, |
|
"loss": 0.9545, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9974297056257223e-05, |
|
"loss": 0.9859, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.997329942035965e-05, |
|
"loss": 0.9864, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.997228281604481e-05, |
|
"loss": 0.9902, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9971247245246194e-05, |
|
"loss": 1.0059, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.997019270993338e-05, |
|
"loss": 0.9901, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9969119212112005e-05, |
|
"loss": 0.9865, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.996802675382379e-05, |
|
"loss": 0.9762, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9966915337146484e-05, |
|
"loss": 0.9687, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9965784964193925e-05, |
|
"loss": 0.9613, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9964635637115988e-05, |
|
"loss": 0.9625, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9963467358098607e-05, |
|
"loss": 0.9396, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9962280129363746e-05, |
|
"loss": 0.9731, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9961073953169422e-05, |
|
"loss": 0.9582, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9959848831809688e-05, |
|
"loss": 0.9309, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9958604767614626e-05, |
|
"loss": 0.988, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9957341762950346e-05, |
|
"loss": 0.9342, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9956059820218982e-05, |
|
"loss": 0.9972, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9954758941858685e-05, |
|
"loss": 0.9445, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9953439130343627e-05, |
|
"loss": 0.9711, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9952100388183973e-05, |
|
"loss": 0.9613, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9950742717925916e-05, |
|
"loss": 0.9613, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9949366122151627e-05, |
|
"loss": 0.9619, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9947970603479286e-05, |
|
"loss": 0.9873, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9946556164563055e-05, |
|
"loss": 0.9672, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9945122808093088e-05, |
|
"loss": 0.9687, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9943670536795504e-05, |
|
"loss": 0.9848, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.994219935343242e-05, |
|
"loss": 0.9629, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.99407092608019e-05, |
|
"loss": 0.9584, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9939200261737986e-05, |
|
"loss": 0.9649, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.993767235911067e-05, |
|
"loss": 0.9527, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.99361255558259e-05, |
|
"loss": 0.9832, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9934559854825574e-05, |
|
"loss": 0.9724, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9932975259087533e-05, |
|
"loss": 0.9466, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9931371771625545e-05, |
|
"loss": 0.9431, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9929749395489316e-05, |
|
"loss": 0.9612, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.992810813376448e-05, |
|
"loss": 0.9508, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.992644798957258e-05, |
|
"loss": 0.9415, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9924768966071087e-05, |
|
"loss": 0.9516, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9923071066453356e-05, |
|
"loss": 0.973, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9921354293948666e-05, |
|
"loss": 0.9281, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9919618651822174e-05, |
|
"loss": 0.9668, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9917864143374942e-05, |
|
"loss": 0.9713, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9916090771943893e-05, |
|
"loss": 0.9621, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9914298540901845e-05, |
|
"loss": 0.9716, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9912487453657478e-05, |
|
"loss": 0.9318, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.991065751365533e-05, |
|
"loss": 0.9341, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9908808724375807e-05, |
|
"loss": 0.9497, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.990694108933515e-05, |
|
"loss": 0.9681, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.990505461208546e-05, |
|
"loss": 0.9289, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.990314929621466e-05, |
|
"loss": 0.9432, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.990122514534651e-05, |
|
"loss": 0.9455, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.989928216314059e-05, |
|
"loss": 0.9737, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9897320353292304e-05, |
|
"loss": 0.977, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9895339719532848e-05, |
|
"loss": 0.9488, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.989334026562923e-05, |
|
"loss": 0.9708, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9891321995384254e-05, |
|
"loss": 0.9761, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.988928491263651e-05, |
|
"loss": 0.9425, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.988722902126036e-05, |
|
"loss": 0.9622, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.988515432516595e-05, |
|
"loss": 0.953, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9883060828299187e-05, |
|
"loss": 0.9425, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.988094853464173e-05, |
|
"loss": 0.9659, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9878817448210998e-05, |
|
"loss": 0.9257, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.987666757306014e-05, |
|
"loss": 0.9447, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9874498913278054e-05, |
|
"loss": 0.9591, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.987231147298935e-05, |
|
"loss": 0.9217, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9870105256354367e-05, |
|
"loss": 0.9525, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.986788026756916e-05, |
|
"loss": 0.954, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9865636510865466e-05, |
|
"loss": 0.9624, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.986337399051074e-05, |
|
"loss": 0.9416, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.986109271080811e-05, |
|
"loss": 0.9483, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9858792676096395e-05, |
|
"loss": 0.9335, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9856473890750067e-05, |
|
"loss": 0.936, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.985413635917928e-05, |
|
"loss": 0.9771, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.985178008582982e-05, |
|
"loss": 0.9358, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9849405075183136e-05, |
|
"loss": 0.9546, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9847011331756315e-05, |
|
"loss": 0.9571, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9844598860102057e-05, |
|
"loss": 0.9538, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.984216766480869e-05, |
|
"loss": 0.9795, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.983971775050016e-05, |
|
"loss": 0.9397, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9837249121836e-05, |
|
"loss": 0.9334, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.983476178351135e-05, |
|
"loss": 0.9456, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9832255740256926e-05, |
|
"loss": 0.9642, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.982973099683902e-05, |
|
"loss": 0.957, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9827187558059492e-05, |
|
"loss": 0.9557, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.982462542875576e-05, |
|
"loss": 0.9086, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9822044613800793e-05, |
|
"loss": 0.9432, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.981944511810309e-05, |
|
"loss": 0.9622, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9816826946606686e-05, |
|
"loss": 0.9462, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9814190104291128e-05, |
|
"loss": 0.9231, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9811534596171483e-05, |
|
"loss": 0.9376, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9808860427298323e-05, |
|
"loss": 0.9512, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9806167602757688e-05, |
|
"loss": 0.935, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9803456127671122e-05, |
|
"loss": 0.9497, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.980072600719564e-05, |
|
"loss": 0.9221, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9797977246523697e-05, |
|
"loss": 0.9381, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.979520985088323e-05, |
|
"loss": 0.9642, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9792423825537602e-05, |
|
"loss": 0.9308, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9789619175785604e-05, |
|
"loss": 0.9334, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.978679590696146e-05, |
|
"loss": 0.9275, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.97839540244348e-05, |
|
"loss": 0.9258, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9781093533610655e-05, |
|
"loss": 0.9465, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9778214439929453e-05, |
|
"loss": 0.927, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9775316748867e-05, |
|
"loss": 0.9282, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9772400465934468e-05, |
|
"loss": 0.9568, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.97694655966784e-05, |
|
"loss": 0.9256, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.976651214668068e-05, |
|
"loss": 0.9267, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9763540121558526e-05, |
|
"loss": 0.9174, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9760549526964505e-05, |
|
"loss": 0.9346, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.975754036858648e-05, |
|
"loss": 0.9255, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9754512652147632e-05, |
|
"loss": 0.942, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9751466383406434e-05, |
|
"loss": 0.9342, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9748401568156645e-05, |
|
"loss": 0.9335, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.97453182122273e-05, |
|
"loss": 0.9326, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9742216321482698e-05, |
|
"loss": 0.9336, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9739095901822384e-05, |
|
"loss": 0.922, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.973595695918114e-05, |
|
"loss": 0.9543, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9732799499528993e-05, |
|
"loss": 0.9252, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9729623528871178e-05, |
|
"loss": 0.958, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.972642905324813e-05, |
|
"loss": 0.9599, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9723216078735495e-05, |
|
"loss": 0.9147, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9719984611444086e-05, |
|
"loss": 0.9138, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9716734657519896e-05, |
|
"loss": 0.9464, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.971346622314408e-05, |
|
"loss": 0.9438, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.971017931453294e-05, |
|
"loss": 0.932, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9706873937937908e-05, |
|
"loss": 0.9205, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.970355009964555e-05, |
|
"loss": 0.9476, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.970020780597754e-05, |
|
"loss": 0.9302, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.969684706329065e-05, |
|
"loss": 0.9119, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9693467877976745e-05, |
|
"loss": 0.951, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9690070256462767e-05, |
|
"loss": 0.9773, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.968665420521072e-05, |
|
"loss": 0.932, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9683219730717658e-05, |
|
"loss": 0.9197, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9679766839515677e-05, |
|
"loss": 0.9243, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.96762955381719e-05, |
|
"loss": 0.945, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9672805833288465e-05, |
|
"loss": 0.9157, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.966929773150251e-05, |
|
"loss": 0.929, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9665771239486163e-05, |
|
"loss": 0.9445, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9662226363946534e-05, |
|
"loss": 0.9293, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.965866311162569e-05, |
|
"loss": 0.9472, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9655081489300646e-05, |
|
"loss": 0.9103, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.965148150378337e-05, |
|
"loss": 0.941, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9647863161920747e-05, |
|
"loss": 0.9211, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9644226470594567e-05, |
|
"loss": 0.9194, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9640571436721524e-05, |
|
"loss": 0.9219, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.963689806725321e-05, |
|
"loss": 0.9213, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.963320636917607e-05, |
|
"loss": 0.9437, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9629496349511423e-05, |
|
"loss": 0.9327, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.962576801531543e-05, |
|
"loss": 0.93, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9622021373679085e-05, |
|
"loss": 0.9287, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.961825643172819e-05, |
|
"loss": 0.9137, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9614473196623377e-05, |
|
"loss": 0.9097, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9610671675560054e-05, |
|
"loss": 0.9169, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9606851875768404e-05, |
|
"loss": 0.9305, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9603013804513385e-05, |
|
"loss": 0.9336, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9599157469094696e-05, |
|
"loss": 0.9549, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9595282876846785e-05, |
|
"loss": 0.9209, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9591390035138812e-05, |
|
"loss": 0.9132, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.958747895137465e-05, |
|
"loss": 0.9188, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9583549632992872e-05, |
|
"loss": 0.9158, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9579602087466726e-05, |
|
"loss": 0.9424, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9575636322304122e-05, |
|
"loss": 0.9398, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9571652345047632e-05, |
|
"loss": 0.9084, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.956765016327446e-05, |
|
"loss": 0.9229, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.956362978459644e-05, |
|
"loss": 0.9358, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9559591216660007e-05, |
|
"loss": 0.9316, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9555534467146194e-05, |
|
"loss": 0.9104, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9551459543770614e-05, |
|
"loss": 0.8928, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9547366454283447e-05, |
|
"loss": 0.9528, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9543255206469417e-05, |
|
"loss": 0.9328, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.953912580814779e-05, |
|
"loss": 0.9272, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9534978267172357e-05, |
|
"loss": 0.914, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9530812591431403e-05, |
|
"loss": 0.9044, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.952662878884771e-05, |
|
"loss": 0.8994, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.952242686737854e-05, |
|
"loss": 0.9459, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.95182068350156e-05, |
|
"loss": 0.9199, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.951396869978507e-05, |
|
"loss": 0.9258, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9509712469747535e-05, |
|
"loss": 0.9449, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9505438152998005e-05, |
|
"loss": 0.9069, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9501145757665884e-05, |
|
"loss": 0.9179, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9496835291914972e-05, |
|
"loss": 0.9168, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9492506763943426e-05, |
|
"loss": 0.9289, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9488160181983763e-05, |
|
"loss": 0.9098, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9483795554302827e-05, |
|
"loss": 0.9042, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9479412889201796e-05, |
|
"loss": 0.9175, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9475012195016148e-05, |
|
"loss": 0.9149, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.947059348011565e-05, |
|
"loss": 0.9149, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9466156752904344e-05, |
|
"loss": 0.8985, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.946170202182053e-05, |
|
"loss": 0.9222, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.945722929533675e-05, |
|
"loss": 0.9138, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9452738581959775e-05, |
|
"loss": 0.9146, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9448229890230574e-05, |
|
"loss": 0.9004, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9443703228724327e-05, |
|
"loss": 0.9188, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9439158606050377e-05, |
|
"loss": 0.9147, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9434596030852234e-05, |
|
"loss": 0.9046, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9430015511807552e-05, |
|
"loss": 0.9348, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.942541705762811e-05, |
|
"loss": 0.9293, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9420800677059797e-05, |
|
"loss": 0.9332, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9416166378882604e-05, |
|
"loss": 0.9082, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9411514171910595e-05, |
|
"loss": 0.9015, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9406844064991892e-05, |
|
"loss": 0.9277, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.940215606700867e-05, |
|
"loss": 0.9195, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9397450186877117e-05, |
|
"loss": 0.9082, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.939272643354745e-05, |
|
"loss": 0.9159, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9387984816003868e-05, |
|
"loss": 0.8845, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.938322534326454e-05, |
|
"loss": 0.915, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.937844802438161e-05, |
|
"loss": 0.9067, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9373652868441155e-05, |
|
"loss": 0.9209, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9368839884563175e-05, |
|
"loss": 0.9384, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.936400908190158e-05, |
|
"loss": 0.8906, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.935916046964418e-05, |
|
"loss": 0.9128, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9354294057012635e-05, |
|
"loss": 0.908, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9349409853262474e-05, |
|
"loss": 0.9341, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9344507867683065e-05, |
|
"loss": 0.9417, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9339588109597597e-05, |
|
"loss": 0.9092, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9334650588363047e-05, |
|
"loss": 0.9371, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9329695313370188e-05, |
|
"loss": 0.8856, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.932472229404356e-05, |
|
"loss": 0.8909, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9319731539841445e-05, |
|
"loss": 0.8955, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9314723060255857e-05, |
|
"loss": 0.9132, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9309696864812523e-05, |
|
"loss": 0.9164, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9304652963070868e-05, |
|
"loss": 0.9025, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9299591364623987e-05, |
|
"loss": 0.9206, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9294512079098636e-05, |
|
"loss": 0.9451, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9289415116155207e-05, |
|
"loss": 0.9042, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.928430048548772e-05, |
|
"loss": 0.9101, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.927916819682379e-05, |
|
"loss": 0.8794, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9274018259924618e-05, |
|
"loss": 0.8981, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9268850684584975e-05, |
|
"loss": 0.8975, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9263665480633177e-05, |
|
"loss": 0.9116, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9258462657931064e-05, |
|
"loss": 0.8874, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9253242226373986e-05, |
|
"loss": 0.872, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9248004195890796e-05, |
|
"loss": 0.909, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9242748576443792e-05, |
|
"loss": 0.9167, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.923747537802876e-05, |
|
"loss": 0.9047, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9232184610674886e-05, |
|
"loss": 0.8909, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9226876284444795e-05, |
|
"loss": 0.9321, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9221550409434496e-05, |
|
"loss": 0.9289, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9216206995773373e-05, |
|
"loss": 0.8834, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.921084605362418e-05, |
|
"loss": 0.9221, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9205467593182998e-05, |
|
"loss": 0.9352, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.920007162467922e-05, |
|
"loss": 0.8981, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9194658158375553e-05, |
|
"loss": 0.8973, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9189227204567977e-05, |
|
"loss": 0.9233, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.918377877358573e-05, |
|
"loss": 0.8806, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.917831287579129e-05, |
|
"loss": 0.9191, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9172829521580358e-05, |
|
"loss": 0.9011, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9167328721381835e-05, |
|
"loss": 0.9209, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9161810485657807e-05, |
|
"loss": 0.9088, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.915627482490351e-05, |
|
"loss": 0.9122, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.915072174964733e-05, |
|
"loss": 0.9123, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9145151270450773e-05, |
|
"loss": 0.884, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9139563397908443e-05, |
|
"loss": 0.9058, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9133958142648028e-05, |
|
"loss": 0.9292, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.9128335515330275e-05, |
|
"loss": 0.9134, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.9122695526648968e-05, |
|
"loss": 0.9215, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.911703818733092e-05, |
|
"loss": 0.9035, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.911136350813593e-05, |
|
"loss": 0.9261, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.910567149985679e-05, |
|
"loss": 0.914, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.909996217331924e-05, |
|
"loss": 0.8913, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.9094235539381965e-05, |
|
"loss": 0.9326, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.9088491608936564e-05, |
|
"loss": 0.8968, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.908273039290753e-05, |
|
"loss": 0.9046, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.9076951902252238e-05, |
|
"loss": 0.9075, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.9071156147960912e-05, |
|
"loss": 0.893, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.9065343141056616e-05, |
|
"loss": 0.9237, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.905951289259522e-05, |
|
"loss": 0.9066, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.9053665413665397e-05, |
|
"loss": 0.9137, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.9047800715388578e-05, |
|
"loss": 0.8853, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.904191880891895e-05, |
|
"loss": 0.9168, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.903601970544343e-05, |
|
"loss": 0.9233, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.9030103416181637e-05, |
|
"loss": 0.8915, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.9024169952385887e-05, |
|
"loss": 0.9097, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.9018219325341146e-05, |
|
"loss": 0.9165, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.9012251546365033e-05, |
|
"loss": 0.9207, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.9006266626807788e-05, |
|
"loss": 0.8928, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.9000264578052244e-05, |
|
"loss": 0.9091, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.8994245411513825e-05, |
|
"loss": 0.8946, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8988209138640496e-05, |
|
"loss": 0.8954, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8982155770912772e-05, |
|
"loss": 0.9082, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8976085319843668e-05, |
|
"loss": 0.8897, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.89699977969787e-05, |
|
"loss": 0.9085, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8963893213895844e-05, |
|
"loss": 0.89, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8957771582205536e-05, |
|
"loss": 0.8951, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8951632913550625e-05, |
|
"loss": 0.89, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8945477219606367e-05, |
|
"loss": 0.8862, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.89393045120804e-05, |
|
"loss": 0.9058, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8933114802712726e-05, |
|
"loss": 0.8961, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8926908103275663e-05, |
|
"loss": 0.9204, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8920684425573865e-05, |
|
"loss": 0.9098, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8914443781444273e-05, |
|
"loss": 0.8867, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8908186182756084e-05, |
|
"loss": 0.8762, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.890191164141076e-05, |
|
"loss": 0.9006, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.889562016934196e-05, |
|
"loss": 0.8832, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8889311778515577e-05, |
|
"loss": 0.9193, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8882986480929658e-05, |
|
"loss": 0.9148, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8876644288614417e-05, |
|
"loss": 0.8919, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.887028521363219e-05, |
|
"loss": 0.9054, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.886390926807743e-05, |
|
"loss": 0.8911, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.885751646407668e-05, |
|
"loss": 0.9038, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8851106813788537e-05, |
|
"loss": 0.8997, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.884468032940364e-05, |
|
"loss": 0.918, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.883823702314466e-05, |
|
"loss": 0.8851, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8831776907266235e-05, |
|
"loss": 0.8954, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8825299994055e-05, |
|
"loss": 0.8852, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8818806295829516e-05, |
|
"loss": 0.896, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8812295824940284e-05, |
|
"loss": 0.9127, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.88057685937697e-05, |
|
"loss": 0.8899, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.879922461473203e-05, |
|
"loss": 0.8797, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.87926639002734e-05, |
|
"loss": 0.8876, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8786086462871764e-05, |
|
"loss": 0.9196, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.877949231503688e-05, |
|
"loss": 0.9034, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8772881469310293e-05, |
|
"loss": 0.9233, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.87662539382653e-05, |
|
"loss": 0.8956, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8759609734506935e-05, |
|
"loss": 0.9225, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8752948870671942e-05, |
|
"loss": 0.9015, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8746271359428752e-05, |
|
"loss": 0.9209, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8739577213477456e-05, |
|
"loss": 0.8963, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.873286644554978e-05, |
|
"loss": 0.8775, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8726139068409075e-05, |
|
"loss": 0.9067, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8719395094850267e-05, |
|
"loss": 0.8907, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.871263453769986e-05, |
|
"loss": 0.8898, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8705857409815887e-05, |
|
"loss": 0.9054, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8699063724087905e-05, |
|
"loss": 0.8808, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8692253493436962e-05, |
|
"loss": 0.8916, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.868542673081557e-05, |
|
"loss": 0.9066, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.8678583449207685e-05, |
|
"loss": 0.8933, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.8671723661628683e-05, |
|
"loss": 0.8896, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.8664847381125327e-05, |
|
"loss": 0.9074, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.8657954620775757e-05, |
|
"loss": 0.911, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.865104539368945e-05, |
|
"loss": 0.8993, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.8644119713007197e-05, |
|
"loss": 0.8925, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.8637177591901096e-05, |
|
"loss": 0.8738, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.8630219043574504e-05, |
|
"loss": 0.8911, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.8623244081262022e-05, |
|
"loss": 0.8848, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.861625271822947e-05, |
|
"loss": 0.8986, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.860924496777386e-05, |
|
"loss": 0.9051, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.8602220843223377e-05, |
|
"loss": 0.9231, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.8595180357937336e-05, |
|
"loss": 0.8871, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.858812352530618e-05, |
|
"loss": 0.9075, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.8581050358751444e-05, |
|
"loss": 0.9086, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.857396087172572e-05, |
|
"loss": 0.9005, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8566855077712647e-05, |
|
"loss": 0.9124, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8559732990226874e-05, |
|
"loss": 0.8944, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8552594622814045e-05, |
|
"loss": 0.9002, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.854543998905076e-05, |
|
"loss": 0.8933, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8538269102544563e-05, |
|
"loss": 0.9103, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8531081976933904e-05, |
|
"loss": 0.881, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8523878625888122e-05, |
|
"loss": 0.8961, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8516659063107413e-05, |
|
"loss": 0.8955, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8509423302322805e-05, |
|
"loss": 0.8837, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8502171357296144e-05, |
|
"loss": 0.8956, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.849490324182004e-05, |
|
"loss": 0.8842, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8487618969717872e-05, |
|
"loss": 0.8848, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8480318554843744e-05, |
|
"loss": 0.8903, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8473002011082453e-05, |
|
"loss": 0.8818, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8465669352349487e-05, |
|
"loss": 0.851, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8458320592590976e-05, |
|
"loss": 0.9054, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8450955745783667e-05, |
|
"loss": 0.8893, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8443574825934915e-05, |
|
"loss": 0.9287, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8436177847082636e-05, |
|
"loss": 0.8935, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8428764823295293e-05, |
|
"loss": 0.9067, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8421335768671868e-05, |
|
"loss": 0.8949, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8413890697341815e-05, |
|
"loss": 0.8699, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.840642962346508e-05, |
|
"loss": 0.8966, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.8398952561232016e-05, |
|
"loss": 0.904, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.8391459524863403e-05, |
|
"loss": 0.8795, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.8383950528610395e-05, |
|
"loss": 0.8705, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.8376425586754502e-05, |
|
"loss": 0.8809, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.836888471360756e-05, |
|
"loss": 0.8939, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.8361327923511712e-05, |
|
"loss": 0.9027, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.835375523083936e-05, |
|
"loss": 0.8844, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.8346166649993162e-05, |
|
"loss": 0.887, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8338562195405995e-05, |
|
"loss": 0.897, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8330941881540917e-05, |
|
"loss": 0.899, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.832330572289116e-05, |
|
"loss": 0.8848, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8315653733980086e-05, |
|
"loss": 0.8973, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8307985929361162e-05, |
|
"loss": 0.8942, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8300302323617944e-05, |
|
"loss": 0.8972, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8292602931364026e-05, |
|
"loss": 0.8888, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8284887767243046e-05, |
|
"loss": 0.9092, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8277156845928624e-05, |
|
"loss": 0.8699, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8269410182124354e-05, |
|
"loss": 0.8733, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.826164779056377e-05, |
|
"loss": 0.923, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8253869686010324e-05, |
|
"loss": 0.8816, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.824607588325734e-05, |
|
"loss": 0.8766, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8238266397128014e-05, |
|
"loss": 0.9013, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8230441242475365e-05, |
|
"loss": 0.879, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.82226004341822e-05, |
|
"loss": 0.8988, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.821474398716112e-05, |
|
"loss": 0.874, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.8206871916354453e-05, |
|
"loss": 0.8795, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.8198984236734246e-05, |
|
"loss": 0.919, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.819108096330224e-05, |
|
"loss": 0.8943, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.8183162111089826e-05, |
|
"loss": 0.8748, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.8175227695158028e-05, |
|
"loss": 0.8988, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.816727773059747e-05, |
|
"loss": 0.876, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.8159312232528342e-05, |
|
"loss": 0.8783, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.815133121610039e-05, |
|
"loss": 0.8847, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.814333469649287e-05, |
|
"loss": 0.9016, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.813532268891452e-05, |
|
"loss": 0.8971, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.812729520860354e-05, |
|
"loss": 0.8913, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.8119252270827546e-05, |
|
"loss": 0.8705, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.811119389088357e-05, |
|
"loss": 0.8989, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.8103120084098003e-05, |
|
"loss": 0.8941, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.809503086582658e-05, |
|
"loss": 0.8896, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.8086926251454347e-05, |
|
"loss": 0.8901, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.807880625639563e-05, |
|
"loss": 0.8821, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.8070670896094015e-05, |
|
"loss": 0.8936, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.80625201860223e-05, |
|
"loss": 0.9096, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.8054354141682485e-05, |
|
"loss": 0.9003, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.8046172778605736e-05, |
|
"loss": 0.8815, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.8037976112352348e-05, |
|
"loss": 0.8955, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.802976415851172e-05, |
|
"loss": 0.865, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.802153693270234e-05, |
|
"loss": 0.8834, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.8013294450571725e-05, |
|
"loss": 0.896, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.800503672779642e-05, |
|
"loss": 0.8951, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.7996763780081954e-05, |
|
"loss": 0.9067, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.7988475623162807e-05, |
|
"loss": 0.8994, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.7980172272802398e-05, |
|
"loss": 0.8974, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.7971853744793026e-05, |
|
"loss": 0.8836, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.796352005495587e-05, |
|
"loss": 0.8656, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.795517121914094e-05, |
|
"loss": 0.8945, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.7946807253227054e-05, |
|
"loss": 0.8527, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.7938428173121806e-05, |
|
"loss": 0.876, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.7930033994761534e-05, |
|
"loss": 0.8942, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.7921624734111292e-05, |
|
"loss": 0.8992, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.791320040716483e-05, |
|
"loss": 0.8686, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.7904761029944534e-05, |
|
"loss": 0.8778, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7896306618501425e-05, |
|
"loss": 0.8721, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7887837188915123e-05, |
|
"loss": 0.8698, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.78793527572938e-05, |
|
"loss": 0.8748, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7870853339774165e-05, |
|
"loss": 0.8875, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7862338952521432e-05, |
|
"loss": 0.8777, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7853809611729285e-05, |
|
"loss": 0.8703, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7845265333619845e-05, |
|
"loss": 0.8627, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.783670613444365e-05, |
|
"loss": 0.8811, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7828132030479604e-05, |
|
"loss": 0.9072, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7819543038034973e-05, |
|
"loss": 0.8675, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7810939173445333e-05, |
|
"loss": 0.8889, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.780232045307454e-05, |
|
"loss": 0.8738, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7793686893314716e-05, |
|
"loss": 0.8937, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.77850385105862e-05, |
|
"loss": 0.8725, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7776375321337523e-05, |
|
"loss": 0.8709, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7767697342045376e-05, |
|
"loss": 0.9043, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.775900458921458e-05, |
|
"loss": 0.8823, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.775029707937806e-05, |
|
"loss": 0.9034, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7741574829096803e-05, |
|
"loss": 0.8626, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7732837854959822e-05, |
|
"loss": 0.8928, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7724086173584153e-05, |
|
"loss": 0.8833, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7715319801614788e-05, |
|
"loss": 0.8755, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7706538755724667e-05, |
|
"loss": 0.8746, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7697743052614637e-05, |
|
"loss": 0.8628, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7688932709013418e-05, |
|
"loss": 0.8742, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.768010774167758e-05, |
|
"loss": 0.8613, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7671268167391505e-05, |
|
"loss": 0.8693, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7662414002967353e-05, |
|
"loss": 0.885, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7653545265245043e-05, |
|
"loss": 0.8836, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.76446619710922e-05, |
|
"loss": 0.8528, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7635764137404136e-05, |
|
"loss": 0.8895, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.762685178110382e-05, |
|
"loss": 0.8816, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.7617924919141843e-05, |
|
"loss": 0.8765, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.760898356849638e-05, |
|
"loss": 0.8849, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.7600027746173168e-05, |
|
"loss": 0.8925, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.759105746920546e-05, |
|
"loss": 0.8843, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.7582072754654005e-05, |
|
"loss": 0.8528, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.7573073619607012e-05, |
|
"loss": 0.8762, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.7564060081180113e-05, |
|
"loss": 0.8497, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.7555032156516342e-05, |
|
"loss": 0.8872, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.7545989862786087e-05, |
|
"loss": 0.8856, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.7536933217187062e-05, |
|
"loss": 0.8848, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.752786223694429e-05, |
|
"loss": 0.8871, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.7518776939310045e-05, |
|
"loss": 0.8799, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.7509677341563837e-05, |
|
"loss": 0.8694, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.750056346101237e-05, |
|
"loss": 0.8579, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.749143531498952e-05, |
|
"loss": 0.8879, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.7482292920856285e-05, |
|
"loss": 0.8786, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.747313629600077e-05, |
|
"loss": 0.8711, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.7463965457838145e-05, |
|
"loss": 0.8841, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.745478042381061e-05, |
|
"loss": 0.8571, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.7445581211387358e-05, |
|
"loss": 0.8811, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.743636783806456e-05, |
|
"loss": 0.857, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.742714032136531e-05, |
|
"loss": 0.9036, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.741789867883961e-05, |
|
"loss": 0.8687, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7408642928064327e-05, |
|
"loss": 0.88, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.739937308664316e-05, |
|
"loss": 0.8685, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7390089172206594e-05, |
|
"loss": 0.8835, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.73807912024119e-05, |
|
"loss": 0.9031, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7371479194943068e-05, |
|
"loss": 0.8672, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7362153167510797e-05, |
|
"loss": 0.8657, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7352813137852437e-05, |
|
"loss": 0.8915, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7343459123731983e-05, |
|
"loss": 0.8775, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7334091142940018e-05, |
|
"loss": 0.8956, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7324709213293696e-05, |
|
"loss": 0.8972, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.731531335263669e-05, |
|
"loss": 0.8736, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.730590357883918e-05, |
|
"loss": 0.8883, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.72964799097978e-05, |
|
"loss": 0.8563, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7287042363435614e-05, |
|
"loss": 0.8843, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7277590957702082e-05, |
|
"loss": 0.8553, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.726812571057302e-05, |
|
"loss": 0.8679, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.725864664005057e-05, |
|
"loss": 0.882, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7249153764163166e-05, |
|
"loss": 0.8697, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.72396471009655e-05, |
|
"loss": 0.8874, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.723012666853848e-05, |
|
"loss": 0.8775, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.722059248498921e-05, |
|
"loss": 0.8998, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.721104456845094e-05, |
|
"loss": 0.899, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7201482937083048e-05, |
|
"loss": 0.8743, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7191907609070994e-05, |
|
"loss": 0.8789, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.718231860262628e-05, |
|
"loss": 0.8732, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.717271593598643e-05, |
|
"loss": 0.8757, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.716309962741494e-05, |
|
"loss": 0.8729, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.7153469695201278e-05, |
|
"loss": 0.8643, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.7143826157660788e-05, |
|
"loss": 0.8786, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.713416903313471e-05, |
|
"loss": 0.8917, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.7124498339990124e-05, |
|
"loss": 0.8658, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.7114814096619916e-05, |
|
"loss": 0.8795, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.710511632144274e-05, |
|
"loss": 0.8652, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.7095405032902987e-05, |
|
"loss": 0.8847, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.708568024947075e-05, |
|
"loss": 0.8334, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.7075941989641792e-05, |
|
"loss": 0.8682, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.7066190271937504e-05, |
|
"loss": 0.8787, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.7056425114904868e-05, |
|
"loss": 0.8678, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.7046646537116432e-05, |
|
"loss": 0.876, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.7036854557170274e-05, |
|
"loss": 0.8952, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.702704919368995e-05, |
|
"loss": 0.8634, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.7017230465324477e-05, |
|
"loss": 0.907, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.7007398390748296e-05, |
|
"loss": 0.8827, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.699755298866122e-05, |
|
"loss": 0.8856, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.698769427778842e-05, |
|
"loss": 0.8959, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6977822276880368e-05, |
|
"loss": 0.872, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.696793700471283e-05, |
|
"loss": 0.848, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6958038480086792e-05, |
|
"loss": 0.8633, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6948126721828466e-05, |
|
"loss": 0.8743, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6938201748789214e-05, |
|
"loss": 0.8662, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6928263579845546e-05, |
|
"loss": 0.8743, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6918312233899058e-05, |
|
"loss": 0.8841, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6908347729876423e-05, |
|
"loss": 0.8639, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.689837008672932e-05, |
|
"loss": 0.8824, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6888379323434433e-05, |
|
"loss": 0.8687, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6878375458993397e-05, |
|
"loss": 0.8531, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6868358512432755e-05, |
|
"loss": 0.8865, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.685832850280394e-05, |
|
"loss": 0.89, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6848285449183227e-05, |
|
"loss": 0.8859, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6838229370671704e-05, |
|
"loss": 0.8611, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6828160286395222e-05, |
|
"loss": 0.8547, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.681807821550438e-05, |
|
"loss": 0.8834, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.680798317717446e-05, |
|
"loss": 0.8706, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6797875190605428e-05, |
|
"loss": 0.8837, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.678775427502186e-05, |
|
"loss": 0.8706, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6777620449672927e-05, |
|
"loss": 0.8807, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6767473733832355e-05, |
|
"loss": 0.8592, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.675731414679838e-05, |
|
"loss": 0.8483, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.674714170789373e-05, |
|
"loss": 0.856, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6736956436465573e-05, |
|
"loss": 0.8903, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.672675835188547e-05, |
|
"loss": 0.8966, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6716547473549372e-05, |
|
"loss": 0.8456, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6706323820877545e-05, |
|
"loss": 0.8882, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6696087413314557e-05, |
|
"loss": 0.8789, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6685838270329243e-05, |
|
"loss": 0.8772, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6675576411414644e-05, |
|
"loss": 0.8908, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6665301856088004e-05, |
|
"loss": 0.8826, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6655014623890697e-05, |
|
"loss": 0.8726, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.664471473438822e-05, |
|
"loss": 0.8612, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6634402207170134e-05, |
|
"loss": 0.8757, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6624077061850047e-05, |
|
"loss": 0.8547, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.661373931806555e-05, |
|
"loss": 0.8637, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6603388995478214e-05, |
|
"loss": 0.8599, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6593026113773516e-05, |
|
"loss": 0.892, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6582650692660833e-05, |
|
"loss": 0.8571, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6572262751873383e-05, |
|
"loss": 0.8716, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.65618623111682e-05, |
|
"loss": 0.8634, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6551449390326087e-05, |
|
"loss": 0.8638, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.654102400915159e-05, |
|
"loss": 0.855, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6530586187472944e-05, |
|
"loss": 0.8796, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6520135945142057e-05, |
|
"loss": 0.8685, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6509673302034444e-05, |
|
"loss": 0.8623, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6499198278049228e-05, |
|
"loss": 0.863, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6488710893109054e-05, |
|
"loss": 0.8977, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6478211167160093e-05, |
|
"loss": 0.8513, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.646769912017199e-05, |
|
"loss": 0.8825, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.6457174772137804e-05, |
|
"loss": 0.8584, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.6446638143074016e-05, |
|
"loss": 0.8474, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.6436089253020444e-05, |
|
"loss": 0.861, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.642552812204023e-05, |
|
"loss": 0.8652, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.6414954770219807e-05, |
|
"loss": 0.8599, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.640436921766884e-05, |
|
"loss": 0.8628, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.6393771484520203e-05, |
|
"loss": 0.8593, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6383161590929933e-05, |
|
"loss": 0.8662, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6372539557077204e-05, |
|
"loss": 0.8584, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6361905403164275e-05, |
|
"loss": 0.8661, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.635125914941645e-05, |
|
"loss": 0.8651, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6340600816082056e-05, |
|
"loss": 0.8746, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6329930423432383e-05, |
|
"loss": 0.8446, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6319247991761672e-05, |
|
"loss": 0.8458, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.630855354138705e-05, |
|
"loss": 0.8928, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6297847092648508e-05, |
|
"loss": 0.8491, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.628712866590885e-05, |
|
"loss": 0.8788, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.627639828155367e-05, |
|
"loss": 0.8532, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6265655959991295e-05, |
|
"loss": 0.8952, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6254901721652764e-05, |
|
"loss": 0.8634, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6244135586991784e-05, |
|
"loss": 0.8624, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6233357576484666e-05, |
|
"loss": 0.8491, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.622256771063034e-05, |
|
"loss": 0.8691, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.6211766009950255e-05, |
|
"loss": 0.8462, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.620095249498839e-05, |
|
"loss": 0.869, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.6190127186311178e-05, |
|
"loss": 0.8698, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.6179290104507493e-05, |
|
"loss": 0.8611, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.61684412701886e-05, |
|
"loss": 0.8777, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.6157580703988105e-05, |
|
"loss": 0.8752, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.6146708426561944e-05, |
|
"loss": 0.8703, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.6135824458588318e-05, |
|
"loss": 0.8613, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.6124928820767657e-05, |
|
"loss": 0.883, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.6114021533822595e-05, |
|
"loss": 0.8651, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.6103102618497922e-05, |
|
"loss": 0.8556, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.6092172095560538e-05, |
|
"loss": 0.8611, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.608122998579942e-05, |
|
"loss": 0.8623, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.607027631002559e-05, |
|
"loss": 0.8862, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.6059311089072064e-05, |
|
"loss": 0.8791, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.6048334343793814e-05, |
|
"loss": 0.8602, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.603734609506772e-05, |
|
"loss": 0.8531, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.6026346363792565e-05, |
|
"loss": 0.8605, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.6015335170888953e-05, |
|
"loss": 0.8793, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.600431253729929e-05, |
|
"loss": 0.8857, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5993278483987743e-05, |
|
"loss": 0.8849, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.59822330319402e-05, |
|
"loss": 0.8619, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5971176202164228e-05, |
|
"loss": 0.8597, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5960108015689027e-05, |
|
"loss": 0.8746, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.594902849356541e-05, |
|
"loss": 0.8734, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5937937656865733e-05, |
|
"loss": 0.8774, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5926835526683884e-05, |
|
"loss": 0.8712, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5915722124135227e-05, |
|
"loss": 0.8711, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5904597470356557e-05, |
|
"loss": 0.8763, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5893461586506087e-05, |
|
"loss": 0.8625, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.588231449376337e-05, |
|
"loss": 0.8665, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.587115621332928e-05, |
|
"loss": 0.8574, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.585998676642598e-05, |
|
"loss": 0.8585, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5848806174296863e-05, |
|
"loss": 0.861, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5837614458206522e-05, |
|
"loss": 0.8801, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5826411639440698e-05, |
|
"loss": 0.8839, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.581519773930626e-05, |
|
"loss": 0.8756, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5803972779131154e-05, |
|
"loss": 0.8738, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5792736780264346e-05, |
|
"loss": 0.8683, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5781489764075816e-05, |
|
"loss": 0.8403, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.577023175195648e-05, |
|
"loss": 0.8483, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.5758962765318177e-05, |
|
"loss": 0.8787, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.5747682825593624e-05, |
|
"loss": 0.8522, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.573639195423636e-05, |
|
"loss": 0.9269, |
|
"step": 791 |
|
} |
|
], |
|
"max_steps": 2373, |
|
"num_train_epochs": 3, |
|
"total_flos": 1.0466956714212065e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|