|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.0105, |
|
"eval_steps": 500, |
|
"global_step": 1050, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 1e-05, |
|
"grad_norm": 1.4574347149106512, |
|
"learning_rate": 3e-06, |
|
"loss": 10.8576, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 2e-05, |
|
"grad_norm": 1.4482443114713344, |
|
"learning_rate": 6e-06, |
|
"loss": 10.8575, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 3e-05, |
|
"grad_norm": 1.4642003089092823, |
|
"learning_rate": 9e-06, |
|
"loss": 10.8576, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 4e-05, |
|
"grad_norm": 1.4485584072048276, |
|
"learning_rate": 1.2e-05, |
|
"loss": 10.8569, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 5e-05, |
|
"grad_norm": 1.4574321657516995, |
|
"learning_rate": 1.5e-05, |
|
"loss": 10.8527, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 6e-05, |
|
"grad_norm": 1.4585537094370684, |
|
"learning_rate": 1.8e-05, |
|
"loss": 10.8518, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 7e-05, |
|
"grad_norm": 1.4183780170798466, |
|
"learning_rate": 2.1000000000000002e-05, |
|
"loss": 10.8383, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 8e-05, |
|
"grad_norm": 1.286272643239374, |
|
"learning_rate": 2.4e-05, |
|
"loss": 10.8119, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 9e-05, |
|
"grad_norm": 1.246364249616181, |
|
"learning_rate": 2.7e-05, |
|
"loss": 10.8063, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0001, |
|
"grad_norm": 1.2300728857390288, |
|
"learning_rate": 3e-05, |
|
"loss": 10.7913, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.00011, |
|
"grad_norm": 1.1639358472437353, |
|
"learning_rate": 3.2999999999999996e-05, |
|
"loss": 10.7756, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.00012, |
|
"grad_norm": 1.1455069336623074, |
|
"learning_rate": 3.6e-05, |
|
"loss": 10.7622, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.00013, |
|
"grad_norm": 1.104711768149659, |
|
"learning_rate": 3.9e-05, |
|
"loss": 10.7415, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.00014, |
|
"grad_norm": 1.0916409179063882, |
|
"learning_rate": 4.2000000000000004e-05, |
|
"loss": 10.7296, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.00015, |
|
"grad_norm": 1.0723152562842644, |
|
"learning_rate": 4.4999999999999996e-05, |
|
"loss": 10.7183, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.00016, |
|
"grad_norm": 1.0297043497697835, |
|
"learning_rate": 4.8e-05, |
|
"loss": 10.7, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.00017, |
|
"grad_norm": 0.9882210863590011, |
|
"learning_rate": 5.1000000000000006e-05, |
|
"loss": 10.6838, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.00018, |
|
"grad_norm": 0.966791538351231, |
|
"learning_rate": 5.4e-05, |
|
"loss": 10.6659, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.00019, |
|
"grad_norm": 0.9358769725124266, |
|
"learning_rate": 5.7e-05, |
|
"loss": 10.6509, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0002, |
|
"grad_norm": 0.9313245306892226, |
|
"learning_rate": 6e-05, |
|
"loss": 10.638, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.00021, |
|
"grad_norm": 0.9056933112957348, |
|
"learning_rate": 6.3e-05, |
|
"loss": 10.6235, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.00022, |
|
"grad_norm": 0.9013277325687112, |
|
"learning_rate": 6.599999999999999e-05, |
|
"loss": 10.6069, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.00023, |
|
"grad_norm": 0.8982860560109955, |
|
"learning_rate": 6.9e-05, |
|
"loss": 10.5935, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.00024, |
|
"grad_norm": 0.8941575476865218, |
|
"learning_rate": 7.2e-05, |
|
"loss": 10.5802, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.00025, |
|
"grad_norm": 0.8955811690787305, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 10.5652, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.00026, |
|
"grad_norm": 0.9005417636666915, |
|
"learning_rate": 7.8e-05, |
|
"loss": 10.5507, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.00027, |
|
"grad_norm": 0.8929394290279239, |
|
"learning_rate": 8.1e-05, |
|
"loss": 10.5379, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.00028, |
|
"grad_norm": 0.892893092343029, |
|
"learning_rate": 8.400000000000001e-05, |
|
"loss": 10.5234, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.00029, |
|
"grad_norm": 0.8951984198637418, |
|
"learning_rate": 8.7e-05, |
|
"loss": 10.5078, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.0003, |
|
"grad_norm": 0.901037080772758, |
|
"learning_rate": 8.999999999999999e-05, |
|
"loss": 10.4913, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.00031, |
|
"grad_norm": 0.899757879512845, |
|
"learning_rate": 9.3e-05, |
|
"loss": 10.4759, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.00032, |
|
"grad_norm": 0.8963415000423225, |
|
"learning_rate": 9.6e-05, |
|
"loss": 10.4612, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.00033, |
|
"grad_norm": 0.8971468155029705, |
|
"learning_rate": 9.900000000000001e-05, |
|
"loss": 10.4429, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.00034, |
|
"grad_norm": 0.8964368558533553, |
|
"learning_rate": 0.00010200000000000001, |
|
"loss": 10.426, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.00035, |
|
"grad_norm": 0.9007340868108898, |
|
"learning_rate": 0.00010500000000000002, |
|
"loss": 10.4083, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.00036, |
|
"grad_norm": 0.8977475222905443, |
|
"learning_rate": 0.000108, |
|
"loss": 10.3895, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.00037, |
|
"grad_norm": 0.8929896912849768, |
|
"learning_rate": 0.000111, |
|
"loss": 10.3721, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.00038, |
|
"grad_norm": 0.8925204062685723, |
|
"learning_rate": 0.000114, |
|
"loss": 10.3515, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.00039, |
|
"grad_norm": 0.8947925451707294, |
|
"learning_rate": 0.000117, |
|
"loss": 10.3314, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.0004, |
|
"grad_norm": 0.8990961452390619, |
|
"learning_rate": 0.00012, |
|
"loss": 10.3088, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.00041, |
|
"grad_norm": 0.8951984584897338, |
|
"learning_rate": 0.000123, |
|
"loss": 10.2891, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.00042, |
|
"grad_norm": 0.8942493459254965, |
|
"learning_rate": 0.000126, |
|
"loss": 10.2679, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.00043, |
|
"grad_norm": 0.8983684110980745, |
|
"learning_rate": 0.000129, |
|
"loss": 10.243, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.00044, |
|
"grad_norm": 0.8985869855625094, |
|
"learning_rate": 0.00013199999999999998, |
|
"loss": 10.2206, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.00045, |
|
"grad_norm": 0.899962207774676, |
|
"learning_rate": 0.000135, |
|
"loss": 10.1962, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.00046, |
|
"grad_norm": 0.8914153211826606, |
|
"learning_rate": 0.000138, |
|
"loss": 10.1735, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.00047, |
|
"grad_norm": 0.8980761529388176, |
|
"learning_rate": 0.000141, |
|
"loss": 10.146, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.00048, |
|
"grad_norm": 0.9007540011988344, |
|
"learning_rate": 0.000144, |
|
"loss": 10.1222, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.00049, |
|
"grad_norm": 0.896344611975273, |
|
"learning_rate": 0.000147, |
|
"loss": 10.0974, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.0005, |
|
"grad_norm": 0.9015948414141589, |
|
"learning_rate": 0.00015000000000000001, |
|
"loss": 10.0706, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.00051, |
|
"grad_norm": 0.9016102183936469, |
|
"learning_rate": 0.000153, |
|
"loss": 10.0433, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.00052, |
|
"grad_norm": 0.8993585803645682, |
|
"learning_rate": 0.000156, |
|
"loss": 10.0158, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.00053, |
|
"grad_norm": 0.8901736410301992, |
|
"learning_rate": 0.000159, |
|
"loss": 9.9917, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.00054, |
|
"grad_norm": 0.9083328832659304, |
|
"learning_rate": 0.000162, |
|
"loss": 9.9578, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.00055, |
|
"grad_norm": 0.8975570632788401, |
|
"learning_rate": 0.000165, |
|
"loss": 9.9337, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.00056, |
|
"grad_norm": 0.8924582131156604, |
|
"learning_rate": 0.00016800000000000002, |
|
"loss": 9.904, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.00057, |
|
"grad_norm": 0.8937117244788118, |
|
"learning_rate": 0.000171, |
|
"loss": 9.8765, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.00058, |
|
"grad_norm": 0.8912667557674427, |
|
"learning_rate": 0.000174, |
|
"loss": 9.8514, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.00059, |
|
"grad_norm": 0.899445317105929, |
|
"learning_rate": 0.000177, |
|
"loss": 9.819, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.0006, |
|
"grad_norm": 0.8827247644946434, |
|
"learning_rate": 0.00017999999999999998, |
|
"loss": 9.7927, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.00061, |
|
"grad_norm": 0.8856006114384688, |
|
"learning_rate": 0.000183, |
|
"loss": 9.7645, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.00062, |
|
"grad_norm": 0.8887861476206323, |
|
"learning_rate": 0.000186, |
|
"loss": 9.7329, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.00063, |
|
"grad_norm": 0.8870948257177573, |
|
"learning_rate": 0.000189, |
|
"loss": 9.7044, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.00064, |
|
"grad_norm": 0.8854465518366905, |
|
"learning_rate": 0.000192, |
|
"loss": 9.6741, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.00065, |
|
"grad_norm": 0.8913964558604793, |
|
"learning_rate": 0.00019500000000000002, |
|
"loss": 9.6417, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.00066, |
|
"grad_norm": 0.8860459843940278, |
|
"learning_rate": 0.00019800000000000002, |
|
"loss": 9.6181, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.00067, |
|
"grad_norm": 0.8855602986146246, |
|
"learning_rate": 0.000201, |
|
"loss": 9.5886, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.00068, |
|
"grad_norm": 0.89182613307124, |
|
"learning_rate": 0.00020400000000000003, |
|
"loss": 9.5554, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.00069, |
|
"grad_norm": 0.8788147221752699, |
|
"learning_rate": 0.00020700000000000002, |
|
"loss": 9.5276, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.0007, |
|
"grad_norm": 0.89013480384513, |
|
"learning_rate": 0.00021000000000000004, |
|
"loss": 9.4911, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.00071, |
|
"grad_norm": 0.8832608390392153, |
|
"learning_rate": 0.00021299999999999997, |
|
"loss": 9.4637, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.00072, |
|
"grad_norm": 0.8850551460752941, |
|
"learning_rate": 0.000216, |
|
"loss": 9.4371, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.00073, |
|
"grad_norm": 0.8854430272945265, |
|
"learning_rate": 0.00021899999999999998, |
|
"loss": 9.4029, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.00074, |
|
"grad_norm": 0.881388508551569, |
|
"learning_rate": 0.000222, |
|
"loss": 9.3766, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.00075, |
|
"grad_norm": 0.8804734413831777, |
|
"learning_rate": 0.000225, |
|
"loss": 9.3413, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.00076, |
|
"grad_norm": 0.891470306594604, |
|
"learning_rate": 0.000228, |
|
"loss": 9.3136, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.00077, |
|
"grad_norm": 0.8835806398314079, |
|
"learning_rate": 0.000231, |
|
"loss": 9.2843, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.00078, |
|
"grad_norm": 0.8778470434340718, |
|
"learning_rate": 0.000234, |
|
"loss": 9.2534, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.00079, |
|
"grad_norm": 0.8849142747864956, |
|
"learning_rate": 0.00023700000000000001, |
|
"loss": 9.2174, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.0008, |
|
"grad_norm": 0.8831859916332898, |
|
"learning_rate": 0.00024, |
|
"loss": 9.1835, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.00081, |
|
"grad_norm": 0.8883671155155716, |
|
"learning_rate": 0.00024300000000000002, |
|
"loss": 9.1516, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.00082, |
|
"grad_norm": 0.8919005998406658, |
|
"learning_rate": 0.000246, |
|
"loss": 9.1278, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.00083, |
|
"grad_norm": 0.8904109612127737, |
|
"learning_rate": 0.00024900000000000004, |
|
"loss": 9.0926, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.00084, |
|
"grad_norm": 0.8922919840670517, |
|
"learning_rate": 0.000252, |
|
"loss": 9.0627, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.00085, |
|
"grad_norm": 0.8851770394662277, |
|
"learning_rate": 0.000255, |
|
"loss": 9.0389, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.00086, |
|
"grad_norm": 0.892287423580011, |
|
"learning_rate": 0.000258, |
|
"loss": 9.0029, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.00087, |
|
"grad_norm": 0.8819038715566697, |
|
"learning_rate": 0.000261, |
|
"loss": 8.9819, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.00088, |
|
"grad_norm": 0.8825670273484024, |
|
"learning_rate": 0.00026399999999999997, |
|
"loss": 8.9452, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.00089, |
|
"grad_norm": 0.87734677010751, |
|
"learning_rate": 0.000267, |
|
"loss": 8.92, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.0009, |
|
"grad_norm": 0.8800982265590005, |
|
"learning_rate": 0.00027, |
|
"loss": 8.8935, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.00091, |
|
"grad_norm": 0.8751738333241339, |
|
"learning_rate": 0.000273, |
|
"loss": 8.8635, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.00092, |
|
"grad_norm": 0.8805112062017327, |
|
"learning_rate": 0.000276, |
|
"loss": 8.8313, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.00093, |
|
"grad_norm": 0.8782110873043902, |
|
"learning_rate": 0.000279, |
|
"loss": 8.8018, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.00094, |
|
"grad_norm": 0.8793675998346636, |
|
"learning_rate": 0.000282, |
|
"loss": 8.7769, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.00095, |
|
"grad_norm": 0.8739437022686155, |
|
"learning_rate": 0.000285, |
|
"loss": 8.7468, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.00096, |
|
"grad_norm": 0.8792093745975903, |
|
"learning_rate": 0.000288, |
|
"loss": 8.7137, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.00097, |
|
"grad_norm": 0.8733827683228169, |
|
"learning_rate": 0.000291, |
|
"loss": 8.6904, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.00098, |
|
"grad_norm": 0.8720585520116785, |
|
"learning_rate": 0.000294, |
|
"loss": 8.6531, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.00099, |
|
"grad_norm": 0.8706078195741852, |
|
"learning_rate": 0.000297, |
|
"loss": 8.6333, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.001, |
|
"grad_norm": 0.868963072522692, |
|
"learning_rate": 0.00030000000000000003, |
|
"loss": 8.6078, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.00101, |
|
"grad_norm": 0.8733428125714966, |
|
"learning_rate": 0.00030300000000000005, |
|
"loss": 8.5742, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.00102, |
|
"grad_norm": 0.8594222895452354, |
|
"learning_rate": 0.000306, |
|
"loss": 8.5555, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.00103, |
|
"grad_norm": 0.8663526773106932, |
|
"learning_rate": 0.000309, |
|
"loss": 8.5314, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.00104, |
|
"grad_norm": 0.8588890352637412, |
|
"learning_rate": 0.000312, |
|
"loss": 8.5113, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.00105, |
|
"grad_norm": 0.862875507176608, |
|
"learning_rate": 0.000315, |
|
"loss": 8.4848, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.00106, |
|
"grad_norm": 0.858707935395144, |
|
"learning_rate": 0.000318, |
|
"loss": 8.4562, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.00107, |
|
"grad_norm": 0.8477892944676333, |
|
"learning_rate": 0.000321, |
|
"loss": 8.4313, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.00108, |
|
"grad_norm": 0.8523289020593828, |
|
"learning_rate": 0.000324, |
|
"loss": 8.4028, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.00109, |
|
"grad_norm": 0.847730490341604, |
|
"learning_rate": 0.000327, |
|
"loss": 8.3837, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.0011, |
|
"grad_norm": 0.8528691839316361, |
|
"learning_rate": 0.00033, |
|
"loss": 8.3605, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.00111, |
|
"grad_norm": 0.8490568938398722, |
|
"learning_rate": 0.000333, |
|
"loss": 8.3302, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.00112, |
|
"grad_norm": 0.8687159568709311, |
|
"learning_rate": 0.00033600000000000004, |
|
"loss": 8.3067, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.00113, |
|
"grad_norm": 0.9514329048402193, |
|
"learning_rate": 0.000339, |
|
"loss": 8.2682, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.00114, |
|
"grad_norm": 1.0232734531890686, |
|
"learning_rate": 0.000342, |
|
"loss": 8.2639, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.00115, |
|
"grad_norm": 0.88782361437861, |
|
"learning_rate": 0.00034500000000000004, |
|
"loss": 8.2339, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.00116, |
|
"grad_norm": 0.8477580889036649, |
|
"learning_rate": 0.000348, |
|
"loss": 8.2115, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.00117, |
|
"grad_norm": 0.9025353672223668, |
|
"learning_rate": 0.000351, |
|
"loss": 8.182, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.00118, |
|
"grad_norm": 0.8194804043714383, |
|
"learning_rate": 0.000354, |
|
"loss": 8.1632, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.00119, |
|
"grad_norm": 0.8747187640572928, |
|
"learning_rate": 0.000357, |
|
"loss": 8.1526, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.0012, |
|
"grad_norm": 0.819302051650556, |
|
"learning_rate": 0.00035999999999999997, |
|
"loss": 8.125, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.00121, |
|
"grad_norm": 0.8357670289489353, |
|
"learning_rate": 0.000363, |
|
"loss": 8.0977, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.00122, |
|
"grad_norm": 0.8269622916799205, |
|
"learning_rate": 0.000366, |
|
"loss": 8.0806, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.00123, |
|
"grad_norm": 0.7923909888868983, |
|
"learning_rate": 0.000369, |
|
"loss": 8.0558, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.00124, |
|
"grad_norm": 0.8405159164509748, |
|
"learning_rate": 0.000372, |
|
"loss": 8.0354, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.00125, |
|
"grad_norm": 0.7957014508382675, |
|
"learning_rate": 0.000375, |
|
"loss": 8.0077, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.00126, |
|
"grad_norm": 0.8032762193906265, |
|
"learning_rate": 0.000378, |
|
"loss": 7.9954, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.00127, |
|
"grad_norm": 0.8534337354315669, |
|
"learning_rate": 0.000381, |
|
"loss": 7.9712, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.00128, |
|
"grad_norm": 0.7889902179927859, |
|
"learning_rate": 0.000384, |
|
"loss": 7.9489, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.00129, |
|
"grad_norm": 0.7667804338310586, |
|
"learning_rate": 0.00038700000000000003, |
|
"loss": 7.936, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.0013, |
|
"grad_norm": 0.9090077886672961, |
|
"learning_rate": 0.00039000000000000005, |
|
"loss": 7.9113, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.00131, |
|
"grad_norm": 1.1017726636538738, |
|
"learning_rate": 0.000393, |
|
"loss": 7.8813, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.00132, |
|
"grad_norm": 0.9363739073599702, |
|
"learning_rate": 0.00039600000000000003, |
|
"loss": 7.8715, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.00133, |
|
"grad_norm": 0.7265974712286192, |
|
"learning_rate": 0.00039900000000000005, |
|
"loss": 7.8436, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.00134, |
|
"grad_norm": 1.085264808454749, |
|
"learning_rate": 0.000402, |
|
"loss": 7.8358, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.00135, |
|
"grad_norm": 1.1024504993278263, |
|
"learning_rate": 0.00040500000000000003, |
|
"loss": 7.8094, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.00136, |
|
"grad_norm": 0.7406778288782102, |
|
"learning_rate": 0.00040800000000000005, |
|
"loss": 7.7862, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.00137, |
|
"grad_norm": 0.948337541042023, |
|
"learning_rate": 0.000411, |
|
"loss": 7.7688, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.00138, |
|
"grad_norm": 0.9084991981726261, |
|
"learning_rate": 0.00041400000000000003, |
|
"loss": 7.7417, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.00139, |
|
"grad_norm": 0.7074494223870315, |
|
"learning_rate": 0.00041700000000000005, |
|
"loss": 7.7227, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.0014, |
|
"grad_norm": 0.7974190203181878, |
|
"learning_rate": 0.00042000000000000007, |
|
"loss": 7.7086, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.00141, |
|
"grad_norm": 0.6861992297830735, |
|
"learning_rate": 0.000423, |
|
"loss": 7.6913, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.00142, |
|
"grad_norm": 0.7349980301116962, |
|
"learning_rate": 0.00042599999999999995, |
|
"loss": 7.677, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.00143, |
|
"grad_norm": 0.7684994731116016, |
|
"learning_rate": 0.00042899999999999997, |
|
"loss": 7.6484, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.00144, |
|
"grad_norm": 0.6719380878952201, |
|
"learning_rate": 0.000432, |
|
"loss": 7.629, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.00145, |
|
"grad_norm": 0.7001362890446781, |
|
"learning_rate": 0.000435, |
|
"loss": 7.6232, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.00146, |
|
"grad_norm": 0.7384188504614574, |
|
"learning_rate": 0.00043799999999999997, |
|
"loss": 7.5849, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.00147, |
|
"grad_norm": 0.6536906507985312, |
|
"learning_rate": 0.000441, |
|
"loss": 7.5755, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.00148, |
|
"grad_norm": 0.6570387947682329, |
|
"learning_rate": 0.000444, |
|
"loss": 7.5632, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.00149, |
|
"grad_norm": 0.7127206041028632, |
|
"learning_rate": 0.00044699999999999997, |
|
"loss": 7.5571, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.0015, |
|
"grad_norm": 0.7308289900379658, |
|
"learning_rate": 0.00045, |
|
"loss": 7.5253, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.00151, |
|
"grad_norm": 0.8664894863114535, |
|
"learning_rate": 0.000453, |
|
"loss": 7.505, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.00152, |
|
"grad_norm": 0.8895841422117233, |
|
"learning_rate": 0.000456, |
|
"loss": 7.5034, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.00153, |
|
"grad_norm": 0.662621907628313, |
|
"learning_rate": 0.000459, |
|
"loss": 7.4855, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.00154, |
|
"grad_norm": 0.7176449004695503, |
|
"learning_rate": 0.000462, |
|
"loss": 7.4655, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.00155, |
|
"grad_norm": 0.7407325792933065, |
|
"learning_rate": 0.000465, |
|
"loss": 7.4508, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.00156, |
|
"grad_norm": 0.7037717820949844, |
|
"learning_rate": 0.000468, |
|
"loss": 7.4265, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.00157, |
|
"grad_norm": 0.5688508632010355, |
|
"learning_rate": 0.000471, |
|
"loss": 7.4159, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.00158, |
|
"grad_norm": 0.812094320370736, |
|
"learning_rate": 0.00047400000000000003, |
|
"loss": 7.4015, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.00159, |
|
"grad_norm": 0.6851465337071334, |
|
"learning_rate": 0.000477, |
|
"loss": 7.3879, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.0016, |
|
"grad_norm": 0.9553203809874583, |
|
"learning_rate": 0.00048, |
|
"loss": 7.3739, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.00161, |
|
"grad_norm": 0.7328141993841643, |
|
"learning_rate": 0.00048300000000000003, |
|
"loss": 7.3497, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.00162, |
|
"grad_norm": 0.6781537584586433, |
|
"learning_rate": 0.00048600000000000005, |
|
"loss": 7.3535, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.00163, |
|
"grad_norm": 0.5807738383940755, |
|
"learning_rate": 0.0004890000000000001, |
|
"loss": 7.3306, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.00164, |
|
"grad_norm": 0.6466438270814104, |
|
"learning_rate": 0.000492, |
|
"loss": 7.3371, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.00165, |
|
"grad_norm": 0.4365334647721832, |
|
"learning_rate": 0.000495, |
|
"loss": 7.3038, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.00166, |
|
"grad_norm": 0.7387692677149859, |
|
"learning_rate": 0.0004980000000000001, |
|
"loss": 7.2849, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.00167, |
|
"grad_norm": 0.6489238814400348, |
|
"learning_rate": 0.000501, |
|
"loss": 7.2765, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.00168, |
|
"grad_norm": 0.4943076416069544, |
|
"learning_rate": 0.000504, |
|
"loss": 7.2512, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.00169, |
|
"grad_norm": 0.6623047646201438, |
|
"learning_rate": 0.0005070000000000001, |
|
"loss": 7.2485, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.0017, |
|
"grad_norm": 0.5841690806336316, |
|
"learning_rate": 0.00051, |
|
"loss": 7.2331, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.00171, |
|
"grad_norm": 0.455313434659774, |
|
"learning_rate": 0.000513, |
|
"loss": 7.2132, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.00172, |
|
"grad_norm": 0.6012704724036779, |
|
"learning_rate": 0.000516, |
|
"loss": 7.214, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.00173, |
|
"grad_norm": 0.5433283029678282, |
|
"learning_rate": 0.0005189999999999999, |
|
"loss": 7.1993, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.00174, |
|
"grad_norm": 0.38251135313099477, |
|
"learning_rate": 0.000522, |
|
"loss": 7.2121, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.00175, |
|
"grad_norm": 0.6483519243845758, |
|
"learning_rate": 0.000525, |
|
"loss": 7.1796, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.00176, |
|
"grad_norm": 0.5106681215905619, |
|
"learning_rate": 0.0005279999999999999, |
|
"loss": 7.1531, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.00177, |
|
"grad_norm": 0.3867365673342131, |
|
"learning_rate": 0.000531, |
|
"loss": 7.1566, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.00178, |
|
"grad_norm": 0.6803005272577738, |
|
"learning_rate": 0.000534, |
|
"loss": 7.1525, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.00179, |
|
"grad_norm": 0.44918531151657043, |
|
"learning_rate": 0.000537, |
|
"loss": 7.1407, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.0018, |
|
"grad_norm": 0.39944612038572996, |
|
"learning_rate": 0.00054, |
|
"loss": 7.1121, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.00181, |
|
"grad_norm": 0.5937704312371144, |
|
"learning_rate": 0.000543, |
|
"loss": 7.1037, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.00182, |
|
"grad_norm": 0.3956001398507633, |
|
"learning_rate": 0.000546, |
|
"loss": 7.0976, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.00183, |
|
"grad_norm": 0.36202282333499824, |
|
"learning_rate": 0.000549, |
|
"loss": 7.0864, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.00184, |
|
"grad_norm": 0.564760920006622, |
|
"learning_rate": 0.000552, |
|
"loss": 7.0785, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.00185, |
|
"grad_norm": 0.36333201524042624, |
|
"learning_rate": 0.000555, |
|
"loss": 7.0515, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.00186, |
|
"grad_norm": 0.3517854453126823, |
|
"learning_rate": 0.000558, |
|
"loss": 7.0614, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.00187, |
|
"grad_norm": 0.5413049623390372, |
|
"learning_rate": 0.000561, |
|
"loss": 7.0394, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.00188, |
|
"grad_norm": 0.4452202297805183, |
|
"learning_rate": 0.000564, |
|
"loss": 7.0168, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.00189, |
|
"grad_norm": 0.2888404630990515, |
|
"learning_rate": 0.000567, |
|
"loss": 7.0108, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.0019, |
|
"grad_norm": 0.5069984229642092, |
|
"learning_rate": 0.00057, |
|
"loss": 6.9967, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.00191, |
|
"grad_norm": 0.5049159939798347, |
|
"learning_rate": 0.000573, |
|
"loss": 6.9897, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.00192, |
|
"grad_norm": 0.353459237780404, |
|
"learning_rate": 0.000576, |
|
"loss": 6.9784, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.00193, |
|
"grad_norm": 0.4583730098712965, |
|
"learning_rate": 0.000579, |
|
"loss": 6.9798, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.00194, |
|
"grad_norm": 0.5934016188318005, |
|
"learning_rate": 0.000582, |
|
"loss": 6.9757, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.00195, |
|
"grad_norm": 0.6611510755360627, |
|
"learning_rate": 0.000585, |
|
"loss": 6.9556, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.00196, |
|
"grad_norm": 0.6794801564658052, |
|
"learning_rate": 0.000588, |
|
"loss": 6.9455, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.00197, |
|
"grad_norm": 0.6965025324131076, |
|
"learning_rate": 0.000591, |
|
"loss": 6.9424, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.00198, |
|
"grad_norm": 1.0010424094015926, |
|
"learning_rate": 0.000594, |
|
"loss": 6.9428, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.00199, |
|
"grad_norm": 0.8473959375492113, |
|
"learning_rate": 0.0005970000000000001, |
|
"loss": 6.9257, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.002, |
|
"grad_norm": 0.4674164352666973, |
|
"learning_rate": 0.0006000000000000001, |
|
"loss": 6.9135, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.00201, |
|
"grad_norm": 0.6130101305251902, |
|
"learning_rate": 0.000603, |
|
"loss": 6.8907, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.00202, |
|
"grad_norm": 0.5766204915712545, |
|
"learning_rate": 0.0006060000000000001, |
|
"loss": 6.8786, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.00203, |
|
"grad_norm": 0.42726729176690026, |
|
"learning_rate": 0.0006090000000000001, |
|
"loss": 6.8819, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.00204, |
|
"grad_norm": 0.5161441207128264, |
|
"learning_rate": 0.000612, |
|
"loss": 6.8675, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.00205, |
|
"grad_norm": 0.44005998133884483, |
|
"learning_rate": 0.000615, |
|
"loss": 6.852, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.00206, |
|
"grad_norm": 0.4214710856814876, |
|
"learning_rate": 0.000618, |
|
"loss": 6.8455, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.00207, |
|
"grad_norm": 0.3835330802523119, |
|
"learning_rate": 0.000621, |
|
"loss": 6.8358, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.00208, |
|
"grad_norm": 0.4118282878888594, |
|
"learning_rate": 0.000624, |
|
"loss": 6.8325, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.00209, |
|
"grad_norm": 0.35207765117623135, |
|
"learning_rate": 0.000627, |
|
"loss": 6.8108, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.0021, |
|
"grad_norm": 0.35165038308098223, |
|
"learning_rate": 0.00063, |
|
"loss": 6.8001, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.00211, |
|
"grad_norm": 0.2907998960376576, |
|
"learning_rate": 0.000633, |
|
"loss": 6.794, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.00212, |
|
"grad_norm": 0.34183459291083, |
|
"learning_rate": 0.000636, |
|
"loss": 6.7864, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.00213, |
|
"grad_norm": 0.31625023606106545, |
|
"learning_rate": 0.000639, |
|
"loss": 6.7664, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.00214, |
|
"grad_norm": 0.3389167462098675, |
|
"learning_rate": 0.000642, |
|
"loss": 6.7645, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.00215, |
|
"grad_norm": 0.4689783367804683, |
|
"learning_rate": 0.000645, |
|
"loss": 6.7617, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.00216, |
|
"grad_norm": 0.7554849005001304, |
|
"learning_rate": 0.000648, |
|
"loss": 6.763, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.00217, |
|
"grad_norm": 1.3852957930548755, |
|
"learning_rate": 0.000651, |
|
"loss": 6.7541, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.00218, |
|
"grad_norm": 0.6607509792225358, |
|
"learning_rate": 0.000654, |
|
"loss": 6.7405, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.00219, |
|
"grad_norm": 0.6480761266032634, |
|
"learning_rate": 0.000657, |
|
"loss": 6.7122, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.0022, |
|
"grad_norm": 1.2227203355321279, |
|
"learning_rate": 0.00066, |
|
"loss": 6.7471, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.00221, |
|
"grad_norm": 1.1390371544106608, |
|
"learning_rate": 0.0006630000000000001, |
|
"loss": 6.7248, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.00222, |
|
"grad_norm": 0.7354052143767025, |
|
"learning_rate": 0.000666, |
|
"loss": 6.7013, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.00223, |
|
"grad_norm": 0.6134943537271429, |
|
"learning_rate": 0.000669, |
|
"loss": 6.6937, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.00224, |
|
"grad_norm": 0.5502679770385809, |
|
"learning_rate": 0.0006720000000000001, |
|
"loss": 6.6934, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.00225, |
|
"grad_norm": 0.47738132510350817, |
|
"learning_rate": 0.000675, |
|
"loss": 6.6778, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.00226, |
|
"grad_norm": 0.507114065421453, |
|
"learning_rate": 0.000678, |
|
"loss": 6.6712, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.00227, |
|
"grad_norm": 0.4403172481189424, |
|
"learning_rate": 0.0006810000000000001, |
|
"loss": 6.646, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.00228, |
|
"grad_norm": 0.34083753361811386, |
|
"learning_rate": 0.000684, |
|
"loss": 6.6465, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.00229, |
|
"grad_norm": 0.3502366193988861, |
|
"learning_rate": 0.000687, |
|
"loss": 6.6418, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.0023, |
|
"grad_norm": 0.40867670062411404, |
|
"learning_rate": 0.0006900000000000001, |
|
"loss": 6.6218, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.00231, |
|
"grad_norm": 0.3512396474179769, |
|
"learning_rate": 0.000693, |
|
"loss": 6.605, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.00232, |
|
"grad_norm": 0.37061772246720376, |
|
"learning_rate": 0.000696, |
|
"loss": 6.6164, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.00233, |
|
"grad_norm": 0.30975845835339183, |
|
"learning_rate": 0.0006990000000000001, |
|
"loss": 6.6026, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.00234, |
|
"grad_norm": 0.31056463949410484, |
|
"learning_rate": 0.000702, |
|
"loss": 6.5934, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.00235, |
|
"grad_norm": 0.3416829938754332, |
|
"learning_rate": 0.000705, |
|
"loss": 6.5798, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.00236, |
|
"grad_norm": 0.3580449551775802, |
|
"learning_rate": 0.000708, |
|
"loss": 6.5618, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.00237, |
|
"grad_norm": 0.31186166958910727, |
|
"learning_rate": 0.0007109999999999999, |
|
"loss": 6.5709, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.00238, |
|
"grad_norm": 0.35712039334009765, |
|
"learning_rate": 0.000714, |
|
"loss": 6.5601, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.00239, |
|
"grad_norm": 0.4689874670209286, |
|
"learning_rate": 0.000717, |
|
"loss": 6.5431, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.0024, |
|
"grad_norm": 0.8385556457838154, |
|
"learning_rate": 0.0007199999999999999, |
|
"loss": 6.5652, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.00241, |
|
"grad_norm": 1.2597764356570753, |
|
"learning_rate": 0.000723, |
|
"loss": 6.5721, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.00242, |
|
"grad_norm": 0.7018547016449656, |
|
"learning_rate": 0.000726, |
|
"loss": 6.5258, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.00243, |
|
"grad_norm": 0.9679840331331718, |
|
"learning_rate": 0.000729, |
|
"loss": 6.5235, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.00244, |
|
"grad_norm": 0.8151244219176279, |
|
"learning_rate": 0.000732, |
|
"loss": 6.5289, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.00245, |
|
"grad_norm": 0.7408239035858821, |
|
"learning_rate": 0.000735, |
|
"loss": 6.5159, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.00246, |
|
"grad_norm": 0.7269888029650857, |
|
"learning_rate": 0.000738, |
|
"loss": 6.5006, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.00247, |
|
"grad_norm": 0.6715573633422388, |
|
"learning_rate": 0.000741, |
|
"loss": 6.4916, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.00248, |
|
"grad_norm": 0.5355440416613977, |
|
"learning_rate": 0.000744, |
|
"loss": 6.4965, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.00249, |
|
"grad_norm": 0.47837466185087324, |
|
"learning_rate": 0.000747, |
|
"loss": 6.474, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.0025, |
|
"grad_norm": 0.5421674013984196, |
|
"learning_rate": 0.00075, |
|
"loss": 6.4781, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.00251, |
|
"grad_norm": 0.5046285559759315, |
|
"learning_rate": 0.000753, |
|
"loss": 6.4394, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.00252, |
|
"grad_norm": 0.4902399046559828, |
|
"learning_rate": 0.000756, |
|
"loss": 6.4478, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.00253, |
|
"grad_norm": 0.6026763597870729, |
|
"learning_rate": 0.000759, |
|
"loss": 6.4417, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.00254, |
|
"grad_norm": 0.6534629043130656, |
|
"learning_rate": 0.000762, |
|
"loss": 6.4357, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.00255, |
|
"grad_norm": 0.5879568865224861, |
|
"learning_rate": 0.0007650000000000001, |
|
"loss": 6.4331, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.00256, |
|
"grad_norm": 0.587549512889128, |
|
"learning_rate": 0.000768, |
|
"loss": 6.4121, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.00257, |
|
"grad_norm": 0.4948417519702818, |
|
"learning_rate": 0.000771, |
|
"loss": 6.4144, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.00258, |
|
"grad_norm": 0.500640645286767, |
|
"learning_rate": 0.0007740000000000001, |
|
"loss": 6.3915, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.00259, |
|
"grad_norm": 0.46096026590523803, |
|
"learning_rate": 0.000777, |
|
"loss": 6.3904, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.0026, |
|
"grad_norm": 0.36710644781296853, |
|
"learning_rate": 0.0007800000000000001, |
|
"loss": 6.3738, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.00261, |
|
"grad_norm": 0.3228962429333065, |
|
"learning_rate": 0.0007830000000000001, |
|
"loss": 6.3839, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.00262, |
|
"grad_norm": 0.33723485694460476, |
|
"learning_rate": 0.000786, |
|
"loss": 6.3601, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.00263, |
|
"grad_norm": 0.3792844357509868, |
|
"learning_rate": 0.0007890000000000001, |
|
"loss": 6.3664, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.00264, |
|
"grad_norm": 0.41999467297545995, |
|
"learning_rate": 0.0007920000000000001, |
|
"loss": 6.3505, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.00265, |
|
"grad_norm": 0.5058327981670172, |
|
"learning_rate": 0.000795, |
|
"loss": 6.3427, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.00266, |
|
"grad_norm": 0.728666013298328, |
|
"learning_rate": 0.0007980000000000001, |
|
"loss": 6.3445, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.00267, |
|
"grad_norm": 1.051597167429085, |
|
"learning_rate": 0.0008010000000000001, |
|
"loss": 6.3504, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.00268, |
|
"grad_norm": 0.8184600443662068, |
|
"learning_rate": 0.000804, |
|
"loss": 6.3425, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.00269, |
|
"grad_norm": 0.9044150906220578, |
|
"learning_rate": 0.0008070000000000001, |
|
"loss": 6.3249, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.0027, |
|
"grad_norm": 1.128589514160359, |
|
"learning_rate": 0.0008100000000000001, |
|
"loss": 6.3418, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.00271, |
|
"grad_norm": 0.8269681270889044, |
|
"learning_rate": 0.000813, |
|
"loss": 6.3324, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.00272, |
|
"grad_norm": 0.9605488749490486, |
|
"learning_rate": 0.0008160000000000001, |
|
"loss": 6.3229, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.00273, |
|
"grad_norm": 1.4256959145221717, |
|
"learning_rate": 0.0008190000000000001, |
|
"loss": 6.3332, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.00274, |
|
"grad_norm": 0.8746481700595169, |
|
"learning_rate": 0.000822, |
|
"loss": 6.301, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.00275, |
|
"grad_norm": 0.7483839590071554, |
|
"learning_rate": 0.0008250000000000001, |
|
"loss": 6.3047, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.00276, |
|
"grad_norm": 0.9683696395132253, |
|
"learning_rate": 0.0008280000000000001, |
|
"loss": 6.2965, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.00277, |
|
"grad_norm": 0.9445364353770574, |
|
"learning_rate": 0.0008310000000000001, |
|
"loss": 6.3021, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.00278, |
|
"grad_norm": 0.5637393663463164, |
|
"learning_rate": 0.0008340000000000001, |
|
"loss": 6.2711, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.00279, |
|
"grad_norm": 0.6474632709262491, |
|
"learning_rate": 0.0008370000000000001, |
|
"loss": 6.2722, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.0028, |
|
"grad_norm": 0.5477535984985658, |
|
"learning_rate": 0.0008400000000000001, |
|
"loss": 6.2548, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.00281, |
|
"grad_norm": 0.5235139271800053, |
|
"learning_rate": 0.0008430000000000001, |
|
"loss": 6.2428, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.00282, |
|
"grad_norm": 0.4309641347762167, |
|
"learning_rate": 0.000846, |
|
"loss": 6.2382, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.00283, |
|
"grad_norm": 0.47598469419389666, |
|
"learning_rate": 0.0008489999999999999, |
|
"loss": 6.2321, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.00284, |
|
"grad_norm": 0.41791767912211497, |
|
"learning_rate": 0.0008519999999999999, |
|
"loss": 6.2199, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.00285, |
|
"grad_norm": 0.4068034669968041, |
|
"learning_rate": 0.000855, |
|
"loss": 6.2226, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.00286, |
|
"grad_norm": 0.461449105404332, |
|
"learning_rate": 0.0008579999999999999, |
|
"loss": 6.2004, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.00287, |
|
"grad_norm": 0.3833700707849029, |
|
"learning_rate": 0.000861, |
|
"loss": 6.2013, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.00288, |
|
"grad_norm": 0.3960645710165419, |
|
"learning_rate": 0.000864, |
|
"loss": 6.1818, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.00289, |
|
"grad_norm": 0.44431212101949524, |
|
"learning_rate": 0.0008669999999999999, |
|
"loss": 6.1961, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.0029, |
|
"grad_norm": 0.5861155579490576, |
|
"learning_rate": 0.00087, |
|
"loss": 6.1786, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.00291, |
|
"grad_norm": 0.6639450218120423, |
|
"learning_rate": 0.000873, |
|
"loss": 6.1739, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.00292, |
|
"grad_norm": 0.8465664162859037, |
|
"learning_rate": 0.0008759999999999999, |
|
"loss": 6.1756, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.00293, |
|
"grad_norm": 0.9291009677838105, |
|
"learning_rate": 0.000879, |
|
"loss": 6.1618, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.00294, |
|
"grad_norm": 1.3082966000676708, |
|
"learning_rate": 0.000882, |
|
"loss": 6.1833, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.00295, |
|
"grad_norm": 1.1893342417992165, |
|
"learning_rate": 0.0008849999999999999, |
|
"loss": 6.1586, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.00296, |
|
"grad_norm": 0.6546841726146672, |
|
"learning_rate": 0.000888, |
|
"loss": 6.1551, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.00297, |
|
"grad_norm": 0.8416245966488266, |
|
"learning_rate": 0.000891, |
|
"loss": 6.1315, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.00298, |
|
"grad_norm": 0.806301942306892, |
|
"learning_rate": 0.0008939999999999999, |
|
"loss": 6.1452, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.00299, |
|
"grad_norm": 1.0994469943740992, |
|
"learning_rate": 0.000897, |
|
"loss": 6.1288, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.003, |
|
"grad_norm": 0.7425213774845364, |
|
"learning_rate": 0.0009, |
|
"loss": 6.123, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.00301, |
|
"grad_norm": 0.6433032250398036, |
|
"learning_rate": 0.0009029999999999999, |
|
"loss": 6.1149, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.00302, |
|
"grad_norm": 0.5449990789285414, |
|
"learning_rate": 0.000906, |
|
"loss": 6.0952, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.00303, |
|
"grad_norm": 0.5356674479383384, |
|
"learning_rate": 0.000909, |
|
"loss": 6.1067, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.00304, |
|
"grad_norm": 0.5049891146020773, |
|
"learning_rate": 0.000912, |
|
"loss": 6.1013, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.00305, |
|
"grad_norm": 0.48068374440179407, |
|
"learning_rate": 0.000915, |
|
"loss": 6.0985, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.00306, |
|
"grad_norm": 0.39305660615027715, |
|
"learning_rate": 0.000918, |
|
"loss": 6.0835, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.00307, |
|
"grad_norm": 0.4044411514506039, |
|
"learning_rate": 0.000921, |
|
"loss": 6.0774, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.00308, |
|
"grad_norm": 0.3317383414417003, |
|
"learning_rate": 0.000924, |
|
"loss": 6.0578, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.00309, |
|
"grad_norm": 0.4170081186564571, |
|
"learning_rate": 0.000927, |
|
"loss": 6.0741, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.0031, |
|
"grad_norm": 0.3537372632819937, |
|
"learning_rate": 0.00093, |
|
"loss": 6.0544, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.00311, |
|
"grad_norm": 0.3807795877969161, |
|
"learning_rate": 0.000933, |
|
"loss": 6.0455, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.00312, |
|
"grad_norm": 0.36301395919450175, |
|
"learning_rate": 0.000936, |
|
"loss": 6.025, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.00313, |
|
"grad_norm": 0.456154645430038, |
|
"learning_rate": 0.0009390000000000001, |
|
"loss": 6.0399, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.00314, |
|
"grad_norm": 0.6252307526263491, |
|
"learning_rate": 0.000942, |
|
"loss": 6.048, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.00315, |
|
"grad_norm": 1.0054536709858048, |
|
"learning_rate": 0.000945, |
|
"loss": 6.0332, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.00316, |
|
"grad_norm": 1.2671801420052267, |
|
"learning_rate": 0.0009480000000000001, |
|
"loss": 6.0322, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.00317, |
|
"grad_norm": 0.5267566088823273, |
|
"learning_rate": 0.000951, |
|
"loss": 6.0198, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.00318, |
|
"grad_norm": 1.2005145600955376, |
|
"learning_rate": 0.000954, |
|
"loss": 6.041, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.00319, |
|
"grad_norm": 1.004603435172909, |
|
"learning_rate": 0.0009570000000000001, |
|
"loss": 6.025, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.0032, |
|
"grad_norm": 1.1201777687880106, |
|
"learning_rate": 0.00096, |
|
"loss": 6.0356, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.00321, |
|
"grad_norm": 1.0167944206203925, |
|
"learning_rate": 0.000963, |
|
"loss": 6.012, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.00322, |
|
"grad_norm": 1.374478470085335, |
|
"learning_rate": 0.0009660000000000001, |
|
"loss": 6.025, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.00323, |
|
"grad_norm": 0.7765654772896999, |
|
"learning_rate": 0.000969, |
|
"loss": 6.0057, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.00324, |
|
"grad_norm": 0.6987857474960232, |
|
"learning_rate": 0.0009720000000000001, |
|
"loss": 6.0035, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.00325, |
|
"grad_norm": 0.7564069291902213, |
|
"learning_rate": 0.0009750000000000001, |
|
"loss": 5.9742, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.00326, |
|
"grad_norm": 0.7275905653375951, |
|
"learning_rate": 0.0009780000000000001, |
|
"loss": 5.9877, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.00327, |
|
"grad_norm": 0.6984989438722892, |
|
"learning_rate": 0.000981, |
|
"loss": 5.9857, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.00328, |
|
"grad_norm": 0.721382136086564, |
|
"learning_rate": 0.000984, |
|
"loss": 5.9627, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.00329, |
|
"grad_norm": 0.7841656285031653, |
|
"learning_rate": 0.000987, |
|
"loss": 5.9649, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.0033, |
|
"grad_norm": 0.8334397806028965, |
|
"learning_rate": 0.00099, |
|
"loss": 5.9568, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.00331, |
|
"grad_norm": 1.070828806370832, |
|
"learning_rate": 0.0009930000000000002, |
|
"loss": 5.9642, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.00332, |
|
"grad_norm": 1.0826703064607723, |
|
"learning_rate": 0.0009960000000000001, |
|
"loss": 5.9538, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.00333, |
|
"grad_norm": 0.9458285226462658, |
|
"learning_rate": 0.000999, |
|
"loss": 5.9361, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.00334, |
|
"grad_norm": 0.8783572499928525, |
|
"learning_rate": 0.001002, |
|
"loss": 5.9443, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.00335, |
|
"grad_norm": 0.5654549281403035, |
|
"learning_rate": 0.001005, |
|
"loss": 5.9354, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.00336, |
|
"grad_norm": 0.6516350129184051, |
|
"learning_rate": 0.001008, |
|
"loss": 5.9285, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.00337, |
|
"grad_norm": 0.5946351510133966, |
|
"learning_rate": 0.0010110000000000002, |
|
"loss": 5.9026, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.00338, |
|
"grad_norm": 0.46824568572142555, |
|
"learning_rate": 0.0010140000000000001, |
|
"loss": 5.9165, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.00339, |
|
"grad_norm": 0.5098867793571775, |
|
"learning_rate": 0.0010170000000000001, |
|
"loss": 5.9215, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.0034, |
|
"grad_norm": 0.5202026348670868, |
|
"learning_rate": 0.00102, |
|
"loss": 5.8881, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.00341, |
|
"grad_norm": 0.4748523902202147, |
|
"learning_rate": 0.001023, |
|
"loss": 5.8705, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.00342, |
|
"grad_norm": 0.42065780247182966, |
|
"learning_rate": 0.001026, |
|
"loss": 5.8732, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.00343, |
|
"grad_norm": 0.4385189981740614, |
|
"learning_rate": 0.0010290000000000002, |
|
"loss": 5.8762, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.00344, |
|
"grad_norm": 0.44131626809067537, |
|
"learning_rate": 0.001032, |
|
"loss": 5.8646, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.00345, |
|
"grad_norm": 0.5602404326473228, |
|
"learning_rate": 0.001035, |
|
"loss": 5.8576, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.00346, |
|
"grad_norm": 0.7338782884674983, |
|
"learning_rate": 0.0010379999999999999, |
|
"loss": 5.8684, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.00347, |
|
"grad_norm": 0.8871217127911775, |
|
"learning_rate": 0.001041, |
|
"loss": 5.8429, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.00348, |
|
"grad_norm": 0.7294005585257834, |
|
"learning_rate": 0.001044, |
|
"loss": 5.8498, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.00349, |
|
"grad_norm": 0.672488440407315, |
|
"learning_rate": 0.001047, |
|
"loss": 5.8268, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.0035, |
|
"grad_norm": 0.944274771030096, |
|
"learning_rate": 0.00105, |
|
"loss": 5.8574, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.00351, |
|
"grad_norm": 0.8431353927439175, |
|
"learning_rate": 0.001053, |
|
"loss": 5.8586, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.00352, |
|
"grad_norm": 0.8223595555288745, |
|
"learning_rate": 0.0010559999999999999, |
|
"loss": 5.8326, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.00353, |
|
"grad_norm": 0.7412573085023916, |
|
"learning_rate": 0.001059, |
|
"loss": 5.8336, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.00354, |
|
"grad_norm": 0.7219192917744488, |
|
"learning_rate": 0.001062, |
|
"loss": 5.8047, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.00355, |
|
"grad_norm": 0.7332482461779326, |
|
"learning_rate": 0.001065, |
|
"loss": 5.8213, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.00356, |
|
"grad_norm": 0.6300796792438244, |
|
"learning_rate": 0.001068, |
|
"loss": 5.8096, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.00357, |
|
"grad_norm": 0.7796926368866208, |
|
"learning_rate": 0.001071, |
|
"loss": 5.802, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.00358, |
|
"grad_norm": 1.048118676098695, |
|
"learning_rate": 0.001074, |
|
"loss": 5.8089, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.00359, |
|
"grad_norm": 0.8469283052653666, |
|
"learning_rate": 0.001077, |
|
"loss": 5.7908, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.0036, |
|
"grad_norm": 0.824969051981288, |
|
"learning_rate": 0.00108, |
|
"loss": 5.7948, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.00361, |
|
"grad_norm": 0.8007424119602606, |
|
"learning_rate": 0.001083, |
|
"loss": 5.7876, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.00362, |
|
"grad_norm": 0.7740442530973947, |
|
"learning_rate": 0.001086, |
|
"loss": 5.7713, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.00363, |
|
"grad_norm": 0.8217790100779416, |
|
"learning_rate": 0.001089, |
|
"loss": 5.7673, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.00364, |
|
"grad_norm": 0.9222078053619298, |
|
"learning_rate": 0.001092, |
|
"loss": 5.7813, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.00365, |
|
"grad_norm": 1.3725693861639678, |
|
"learning_rate": 0.001095, |
|
"loss": 5.801, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.00366, |
|
"grad_norm": 0.7809021294214551, |
|
"learning_rate": 0.001098, |
|
"loss": 5.7587, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.00367, |
|
"grad_norm": 1.1843193874441642, |
|
"learning_rate": 0.001101, |
|
"loss": 5.7731, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.00368, |
|
"grad_norm": 0.7875897064576857, |
|
"learning_rate": 0.001104, |
|
"loss": 5.7636, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.00369, |
|
"grad_norm": 0.6858717859751121, |
|
"learning_rate": 0.001107, |
|
"loss": 5.7637, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.0037, |
|
"grad_norm": 0.6252688126791845, |
|
"learning_rate": 0.00111, |
|
"loss": 5.7428, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.00371, |
|
"grad_norm": 0.6219765776345385, |
|
"learning_rate": 0.001113, |
|
"loss": 5.7116, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.00372, |
|
"grad_norm": 0.5998751494739524, |
|
"learning_rate": 0.001116, |
|
"loss": 5.7331, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.00373, |
|
"grad_norm": 0.5714053922949759, |
|
"learning_rate": 0.001119, |
|
"loss": 5.72, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.00374, |
|
"grad_norm": 0.628102864915935, |
|
"learning_rate": 0.001122, |
|
"loss": 5.7262, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.00375, |
|
"grad_norm": 0.5909991857975568, |
|
"learning_rate": 0.0011250000000000001, |
|
"loss": 5.7276, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.00376, |
|
"grad_norm": 0.5216592986380127, |
|
"learning_rate": 0.001128, |
|
"loss": 5.72, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.00377, |
|
"grad_norm": 0.5469626358369448, |
|
"learning_rate": 0.001131, |
|
"loss": 5.7145, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.00378, |
|
"grad_norm": 0.6291207985788981, |
|
"learning_rate": 0.001134, |
|
"loss": 5.7123, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.00379, |
|
"grad_norm": 0.6445779804368097, |
|
"learning_rate": 0.001137, |
|
"loss": 5.698, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.0038, |
|
"grad_norm": 0.7034810438425685, |
|
"learning_rate": 0.00114, |
|
"loss": 5.7177, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.00381, |
|
"grad_norm": 0.9330449756794958, |
|
"learning_rate": 0.0011430000000000001, |
|
"loss": 5.6924, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.00382, |
|
"grad_norm": 0.9449582673655755, |
|
"learning_rate": 0.001146, |
|
"loss": 5.6863, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.00383, |
|
"grad_norm": 0.6715143882341864, |
|
"learning_rate": 0.001149, |
|
"loss": 5.6846, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.00384, |
|
"grad_norm": 0.8662082921806001, |
|
"learning_rate": 0.001152, |
|
"loss": 5.699, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.00385, |
|
"grad_norm": 0.8482542645994808, |
|
"learning_rate": 0.001155, |
|
"loss": 5.6818, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.00386, |
|
"grad_norm": 0.8694120663581818, |
|
"learning_rate": 0.001158, |
|
"loss": 5.6914, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.00387, |
|
"grad_norm": 1.382803938865885, |
|
"learning_rate": 0.0011610000000000001, |
|
"loss": 5.6813, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.00388, |
|
"grad_norm": 0.9644228724095026, |
|
"learning_rate": 0.001164, |
|
"loss": 5.6689, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.00389, |
|
"grad_norm": 0.7396617808819147, |
|
"learning_rate": 0.001167, |
|
"loss": 5.6709, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.0039, |
|
"grad_norm": 0.5609185173455993, |
|
"learning_rate": 0.00117, |
|
"loss": 5.648, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.00391, |
|
"grad_norm": 0.735416794156057, |
|
"learning_rate": 0.001173, |
|
"loss": 5.6589, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.00392, |
|
"grad_norm": 0.7564046131732848, |
|
"learning_rate": 0.001176, |
|
"loss": 5.6662, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.00393, |
|
"grad_norm": 0.7233997204719024, |
|
"learning_rate": 0.0011790000000000001, |
|
"loss": 5.6377, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.00394, |
|
"grad_norm": 0.6288246845478384, |
|
"learning_rate": 0.001182, |
|
"loss": 5.6239, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.00395, |
|
"grad_norm": 0.6696319293361586, |
|
"learning_rate": 0.001185, |
|
"loss": 5.6102, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.00396, |
|
"grad_norm": 0.7214553632515296, |
|
"learning_rate": 0.001188, |
|
"loss": 5.6276, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.00397, |
|
"grad_norm": 0.7104651338358826, |
|
"learning_rate": 0.001191, |
|
"loss": 5.619, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.00398, |
|
"grad_norm": 0.5955487966253655, |
|
"learning_rate": 0.0011940000000000002, |
|
"loss": 5.6195, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.00399, |
|
"grad_norm": 0.6061151049974988, |
|
"learning_rate": 0.0011970000000000001, |
|
"loss": 5.6211, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.004, |
|
"grad_norm": 0.747975845260549, |
|
"learning_rate": 0.0012000000000000001, |
|
"loss": 5.6119, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.00401, |
|
"grad_norm": 1.066378470212407, |
|
"learning_rate": 0.001203, |
|
"loss": 5.6055, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.00402, |
|
"grad_norm": 1.1521859370487337, |
|
"learning_rate": 0.001206, |
|
"loss": 5.6144, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.00403, |
|
"grad_norm": 1.0137206411467539, |
|
"learning_rate": 0.001209, |
|
"loss": 5.6121, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.00404, |
|
"grad_norm": 1.1663989264954397, |
|
"learning_rate": 0.0012120000000000002, |
|
"loss": 5.6177, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.00405, |
|
"grad_norm": 1.1363050593119097, |
|
"learning_rate": 0.0012150000000000002, |
|
"loss": 5.6098, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.00406, |
|
"grad_norm": 1.0087910495777368, |
|
"learning_rate": 0.0012180000000000001, |
|
"loss": 5.5952, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.00407, |
|
"grad_norm": 0.9520738669869687, |
|
"learning_rate": 0.0012209999999999999, |
|
"loss": 5.5915, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.00408, |
|
"grad_norm": 1.0377478362348644, |
|
"learning_rate": 0.001224, |
|
"loss": 5.6129, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.00409, |
|
"grad_norm": 1.1016437762309168, |
|
"learning_rate": 0.001227, |
|
"loss": 5.5927, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.0041, |
|
"grad_norm": 0.9709841842795465, |
|
"learning_rate": 0.00123, |
|
"loss": 5.5883, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.00411, |
|
"grad_norm": 0.8240534275873757, |
|
"learning_rate": 0.001233, |
|
"loss": 5.5977, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.00412, |
|
"grad_norm": 0.8077831494606339, |
|
"learning_rate": 0.001236, |
|
"loss": 5.5894, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.00413, |
|
"grad_norm": 0.7433003641205871, |
|
"learning_rate": 0.0012389999999999999, |
|
"loss": 5.5815, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.00414, |
|
"grad_norm": 0.615571577509929, |
|
"learning_rate": 0.001242, |
|
"loss": 5.5582, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.00415, |
|
"grad_norm": 0.656841702591939, |
|
"learning_rate": 0.001245, |
|
"loss": 5.5606, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.00416, |
|
"grad_norm": 0.613196865035469, |
|
"learning_rate": 0.001248, |
|
"loss": 5.5418, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.00417, |
|
"grad_norm": 0.48631699887544017, |
|
"learning_rate": 0.001251, |
|
"loss": 5.5683, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.00418, |
|
"grad_norm": 0.4611320175617472, |
|
"learning_rate": 0.001254, |
|
"loss": 5.5401, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.00419, |
|
"grad_norm": 0.5824396705507784, |
|
"learning_rate": 0.0012569999999999999, |
|
"loss": 5.5305, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.0042, |
|
"grad_norm": 0.8055087238093066, |
|
"learning_rate": 0.00126, |
|
"loss": 5.5467, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.00421, |
|
"grad_norm": 0.9831917562206494, |
|
"learning_rate": 0.001263, |
|
"loss": 5.5503, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.00422, |
|
"grad_norm": 0.8124592037579013, |
|
"learning_rate": 0.001266, |
|
"loss": 5.5357, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.00423, |
|
"grad_norm": 1.0116811247233115, |
|
"learning_rate": 0.001269, |
|
"loss": 5.5337, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.00424, |
|
"grad_norm": 1.368200107021336, |
|
"learning_rate": 0.001272, |
|
"loss": 5.5595, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.00425, |
|
"grad_norm": 0.9353292964204464, |
|
"learning_rate": 0.001275, |
|
"loss": 5.5445, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.00426, |
|
"grad_norm": 0.9604600896045714, |
|
"learning_rate": 0.001278, |
|
"loss": 5.5493, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.00427, |
|
"grad_norm": 0.9684632680550875, |
|
"learning_rate": 0.001281, |
|
"loss": 5.5328, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.00428, |
|
"grad_norm": 0.9533788870858234, |
|
"learning_rate": 0.001284, |
|
"loss": 5.5058, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.00429, |
|
"grad_norm": 1.113271451738206, |
|
"learning_rate": 0.001287, |
|
"loss": 5.5322, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.0043, |
|
"grad_norm": 0.9492520989573756, |
|
"learning_rate": 0.00129, |
|
"loss": 5.5226, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.00431, |
|
"grad_norm": 1.1309185966069144, |
|
"learning_rate": 0.001293, |
|
"loss": 5.5118, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.00432, |
|
"grad_norm": 1.2851571092183034, |
|
"learning_rate": 0.001296, |
|
"loss": 5.5367, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.00433, |
|
"grad_norm": 0.7219415163940603, |
|
"learning_rate": 0.001299, |
|
"loss": 5.5106, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.00434, |
|
"grad_norm": 0.7943786167833424, |
|
"learning_rate": 0.001302, |
|
"loss": 5.5109, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.00435, |
|
"grad_norm": 0.7514833526497924, |
|
"learning_rate": 0.001305, |
|
"loss": 5.5051, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.00436, |
|
"grad_norm": 0.5499552719714149, |
|
"learning_rate": 0.001308, |
|
"loss": 5.4758, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.00437, |
|
"grad_norm": 0.5803132978630802, |
|
"learning_rate": 0.001311, |
|
"loss": 5.494, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.00438, |
|
"grad_norm": 0.4343754614294673, |
|
"learning_rate": 0.001314, |
|
"loss": 5.4701, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.00439, |
|
"grad_norm": 0.49450539118744047, |
|
"learning_rate": 0.001317, |
|
"loss": 5.4997, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.0044, |
|
"grad_norm": 0.3670496641121159, |
|
"learning_rate": 0.00132, |
|
"loss": 5.4633, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.00441, |
|
"grad_norm": 0.44023069828614625, |
|
"learning_rate": 0.001323, |
|
"loss": 5.4624, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.00442, |
|
"grad_norm": 0.45124022563783683, |
|
"learning_rate": 0.0013260000000000001, |
|
"loss": 5.4541, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.00443, |
|
"grad_norm": 0.5170657075032803, |
|
"learning_rate": 0.001329, |
|
"loss": 5.4572, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.00444, |
|
"grad_norm": 0.6406930762525409, |
|
"learning_rate": 0.001332, |
|
"loss": 5.4608, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.00445, |
|
"grad_norm": 0.7431205120059805, |
|
"learning_rate": 0.001335, |
|
"loss": 5.4462, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.00446, |
|
"grad_norm": 0.9849435734941417, |
|
"learning_rate": 0.001338, |
|
"loss": 5.4578, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.00447, |
|
"grad_norm": 1.1031854356255555, |
|
"learning_rate": 0.001341, |
|
"loss": 5.469, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.00448, |
|
"grad_norm": 0.8550264287932019, |
|
"learning_rate": 0.0013440000000000001, |
|
"loss": 5.4411, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.00449, |
|
"grad_norm": 0.8569858280927948, |
|
"learning_rate": 0.001347, |
|
"loss": 5.4491, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.0045, |
|
"grad_norm": 0.766625402040655, |
|
"learning_rate": 0.00135, |
|
"loss": 5.4461, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.00451, |
|
"grad_norm": 0.8992860281272513, |
|
"learning_rate": 0.001353, |
|
"loss": 5.4411, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.00452, |
|
"grad_norm": 0.9587820940422671, |
|
"learning_rate": 0.001356, |
|
"loss": 5.4273, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.00453, |
|
"grad_norm": 0.9706881368414635, |
|
"learning_rate": 0.001359, |
|
"loss": 5.4204, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.00454, |
|
"grad_norm": 1.1942708782025127, |
|
"learning_rate": 0.0013620000000000001, |
|
"loss": 5.4481, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.00455, |
|
"grad_norm": 1.0651260215062406, |
|
"learning_rate": 0.0013650000000000001, |
|
"loss": 5.4483, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.00456, |
|
"grad_norm": 1.0894309440916736, |
|
"learning_rate": 0.001368, |
|
"loss": 5.4405, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.00457, |
|
"grad_norm": 1.3632436316792311, |
|
"learning_rate": 0.001371, |
|
"loss": 5.442, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.00458, |
|
"grad_norm": 0.9802910013598249, |
|
"learning_rate": 0.001374, |
|
"loss": 5.4423, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.00459, |
|
"grad_norm": 0.9681577835390196, |
|
"learning_rate": 0.0013770000000000002, |
|
"loss": 5.4314, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.0046, |
|
"grad_norm": 0.955881976726698, |
|
"learning_rate": 0.0013800000000000002, |
|
"loss": 5.4146, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.00461, |
|
"grad_norm": 1.072138092727722, |
|
"learning_rate": 0.0013830000000000001, |
|
"loss": 5.4186, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.00462, |
|
"grad_norm": 0.8672451154734739, |
|
"learning_rate": 0.001386, |
|
"loss": 5.4193, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.00463, |
|
"grad_norm": 0.9908998654710571, |
|
"learning_rate": 0.001389, |
|
"loss": 5.4011, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.00464, |
|
"grad_norm": 1.1599842863140275, |
|
"learning_rate": 0.001392, |
|
"loss": 5.4321, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.00465, |
|
"grad_norm": 0.698212811337685, |
|
"learning_rate": 0.0013950000000000002, |
|
"loss": 5.3861, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.00466, |
|
"grad_norm": 0.6209828526031017, |
|
"learning_rate": 0.0013980000000000002, |
|
"loss": 5.4004, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.00467, |
|
"grad_norm": 0.626908730655191, |
|
"learning_rate": 0.0014010000000000001, |
|
"loss": 5.3894, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.00468, |
|
"grad_norm": 0.6046317726003113, |
|
"learning_rate": 0.001404, |
|
"loss": 5.3787, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.00469, |
|
"grad_norm": 0.5283854127913247, |
|
"learning_rate": 0.001407, |
|
"loss": 5.3767, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.0047, |
|
"grad_norm": 0.5494342704278756, |
|
"learning_rate": 0.00141, |
|
"loss": 5.3859, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.00471, |
|
"grad_norm": 0.6548172150181142, |
|
"learning_rate": 0.001413, |
|
"loss": 5.3653, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.00472, |
|
"grad_norm": 0.8352552132751145, |
|
"learning_rate": 0.001416, |
|
"loss": 5.3799, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.00473, |
|
"grad_norm": 1.04775859687285, |
|
"learning_rate": 0.001419, |
|
"loss": 5.3985, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.00474, |
|
"grad_norm": 0.9808295146566943, |
|
"learning_rate": 0.0014219999999999999, |
|
"loss": 5.3787, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.00475, |
|
"grad_norm": 1.0065096729700158, |
|
"learning_rate": 0.001425, |
|
"loss": 5.3725, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.00476, |
|
"grad_norm": 1.0604383614131039, |
|
"learning_rate": 0.001428, |
|
"loss": 5.4172, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.00477, |
|
"grad_norm": 0.5551620314639825, |
|
"learning_rate": 0.001431, |
|
"loss": 5.3484, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.00478, |
|
"grad_norm": 0.6446758952809472, |
|
"learning_rate": 0.001434, |
|
"loss": 5.3819, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.00479, |
|
"grad_norm": 0.6811842486215005, |
|
"learning_rate": 0.001437, |
|
"loss": 5.3551, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.0048, |
|
"grad_norm": 0.6985415352827566, |
|
"learning_rate": 0.0014399999999999999, |
|
"loss": 5.3502, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.00481, |
|
"grad_norm": 0.7430626707063542, |
|
"learning_rate": 0.001443, |
|
"loss": 5.3537, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.00482, |
|
"grad_norm": 0.7883555304691023, |
|
"learning_rate": 0.001446, |
|
"loss": 5.3281, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.00483, |
|
"grad_norm": 0.9604272864715826, |
|
"learning_rate": 0.001449, |
|
"loss": 5.3455, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.00484, |
|
"grad_norm": 0.9435245256880889, |
|
"learning_rate": 0.001452, |
|
"loss": 5.3494, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.00485, |
|
"grad_norm": 0.8042883678590446, |
|
"learning_rate": 0.001455, |
|
"loss": 5.3511, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.00486, |
|
"grad_norm": 0.7958734942371152, |
|
"learning_rate": 0.001458, |
|
"loss": 5.3247, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.00487, |
|
"grad_norm": 0.7735191187819994, |
|
"learning_rate": 0.001461, |
|
"loss": 5.3397, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.00488, |
|
"grad_norm": 1.012081783792506, |
|
"learning_rate": 0.001464, |
|
"loss": 5.3265, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.00489, |
|
"grad_norm": 0.8905045147335564, |
|
"learning_rate": 0.001467, |
|
"loss": 5.3131, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.0049, |
|
"grad_norm": 0.8898357576674187, |
|
"learning_rate": 0.00147, |
|
"loss": 5.3259, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.00491, |
|
"grad_norm": 0.9415043482581119, |
|
"learning_rate": 0.001473, |
|
"loss": 5.3269, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.00492, |
|
"grad_norm": 0.8320755719970162, |
|
"learning_rate": 0.001476, |
|
"loss": 5.3196, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.00493, |
|
"grad_norm": 0.8445852574519659, |
|
"learning_rate": 0.001479, |
|
"loss": 5.3145, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.00494, |
|
"grad_norm": 1.055556747775556, |
|
"learning_rate": 0.001482, |
|
"loss": 5.3274, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.00495, |
|
"grad_norm": 0.8469107680360277, |
|
"learning_rate": 0.001485, |
|
"loss": 5.3268, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.00496, |
|
"grad_norm": 0.7635320378283988, |
|
"learning_rate": 0.001488, |
|
"loss": 5.3083, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.00497, |
|
"grad_norm": 0.8508130432624411, |
|
"learning_rate": 0.001491, |
|
"loss": 5.2927, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.00498, |
|
"grad_norm": 0.6847944056515368, |
|
"learning_rate": 0.001494, |
|
"loss": 5.3134, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.00499, |
|
"grad_norm": 0.73441383592788, |
|
"learning_rate": 0.001497, |
|
"loss": 5.2894, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.005, |
|
"grad_norm": 0.6599139353534871, |
|
"learning_rate": 0.0015, |
|
"loss": 5.2898, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.00501, |
|
"grad_norm": 0.5852764030687648, |
|
"learning_rate": 0.001503, |
|
"loss": 5.2881, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.00502, |
|
"grad_norm": 0.6853021422001514, |
|
"learning_rate": 0.001506, |
|
"loss": 5.2946, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.00503, |
|
"grad_norm": 0.720141729806561, |
|
"learning_rate": 0.0015090000000000001, |
|
"loss": 5.2988, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.00504, |
|
"grad_norm": 0.7571759373525393, |
|
"learning_rate": 0.001512, |
|
"loss": 5.2829, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.00505, |
|
"grad_norm": 0.7892633543044816, |
|
"learning_rate": 0.001515, |
|
"loss": 5.2795, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.00506, |
|
"grad_norm": 0.7628738296895545, |
|
"learning_rate": 0.001518, |
|
"loss": 5.2778, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.00507, |
|
"grad_norm": 0.7334519380566521, |
|
"learning_rate": 0.001521, |
|
"loss": 5.2722, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.00508, |
|
"grad_norm": 0.7350882061617702, |
|
"learning_rate": 0.001524, |
|
"loss": 5.2693, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.00509, |
|
"grad_norm": 0.7407389039658853, |
|
"learning_rate": 0.0015270000000000001, |
|
"loss": 5.2562, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.0051, |
|
"grad_norm": 0.7104400166661097, |
|
"learning_rate": 0.0015300000000000001, |
|
"loss": 5.2696, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.00511, |
|
"grad_norm": 0.8484400275698938, |
|
"learning_rate": 0.001533, |
|
"loss": 5.2541, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.00512, |
|
"grad_norm": 0.7909533536283434, |
|
"learning_rate": 0.001536, |
|
"loss": 5.2475, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.00513, |
|
"grad_norm": 0.9481517521047255, |
|
"learning_rate": 0.001539, |
|
"loss": 5.2684, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.00514, |
|
"grad_norm": 1.405665610624422, |
|
"learning_rate": 0.001542, |
|
"loss": 5.2742, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.00515, |
|
"grad_norm": 0.9237050984729354, |
|
"learning_rate": 0.0015450000000000001, |
|
"loss": 5.2636, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.00516, |
|
"grad_norm": 1.1634719669204763, |
|
"learning_rate": 0.0015480000000000001, |
|
"loss": 5.2755, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.00517, |
|
"grad_norm": 0.8585760471561034, |
|
"learning_rate": 0.001551, |
|
"loss": 5.2577, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.00518, |
|
"grad_norm": 0.728905665920263, |
|
"learning_rate": 0.001554, |
|
"loss": 5.2422, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.00519, |
|
"grad_norm": 0.841003643450708, |
|
"learning_rate": 0.001557, |
|
"loss": 5.2472, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.0052, |
|
"grad_norm": 0.8542329179478354, |
|
"learning_rate": 0.0015600000000000002, |
|
"loss": 5.2573, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.00521, |
|
"grad_norm": 0.9136344705581151, |
|
"learning_rate": 0.0015630000000000002, |
|
"loss": 5.2327, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.00522, |
|
"grad_norm": 1.1142541660381047, |
|
"learning_rate": 0.0015660000000000001, |
|
"loss": 5.2435, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.00523, |
|
"grad_norm": 1.2424782927197504, |
|
"learning_rate": 0.001569, |
|
"loss": 5.2642, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.00524, |
|
"grad_norm": 0.9564855927553343, |
|
"learning_rate": 0.001572, |
|
"loss": 5.264, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.00525, |
|
"grad_norm": 0.8662515649518988, |
|
"learning_rate": 0.001575, |
|
"loss": 5.2537, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.00526, |
|
"grad_norm": 0.72079723918141, |
|
"learning_rate": 0.0015780000000000002, |
|
"loss": 5.2224, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.00527, |
|
"grad_norm": 0.6822789926865414, |
|
"learning_rate": 0.0015810000000000002, |
|
"loss": 5.2353, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.00528, |
|
"grad_norm": 0.6986034229049856, |
|
"learning_rate": 0.0015840000000000001, |
|
"loss": 5.2128, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.00529, |
|
"grad_norm": 0.6935526974121139, |
|
"learning_rate": 0.001587, |
|
"loss": 5.2236, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.0053, |
|
"grad_norm": 0.6635846542220551, |
|
"learning_rate": 0.00159, |
|
"loss": 5.2302, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.00531, |
|
"grad_norm": 0.7197123783695031, |
|
"learning_rate": 0.001593, |
|
"loss": 5.2167, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.00532, |
|
"grad_norm": 0.6862921713046577, |
|
"learning_rate": 0.0015960000000000002, |
|
"loss": 5.2205, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.00533, |
|
"grad_norm": 0.672254585940336, |
|
"learning_rate": 0.0015990000000000002, |
|
"loss": 5.2165, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.00534, |
|
"grad_norm": 0.6180247915886188, |
|
"learning_rate": 0.0016020000000000001, |
|
"loss": 5.2018, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.00535, |
|
"grad_norm": 0.7076887136252737, |
|
"learning_rate": 0.001605, |
|
"loss": 5.2099, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.00536, |
|
"grad_norm": 0.8627381010586813, |
|
"learning_rate": 0.001608, |
|
"loss": 5.2158, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.00537, |
|
"grad_norm": 0.9890089503230703, |
|
"learning_rate": 0.0016110000000000002, |
|
"loss": 5.2125, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.00538, |
|
"grad_norm": 1.0890684861329838, |
|
"learning_rate": 0.0016140000000000002, |
|
"loss": 5.1997, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.00539, |
|
"grad_norm": 0.7898695514456295, |
|
"learning_rate": 0.0016170000000000002, |
|
"loss": 5.1885, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.0054, |
|
"grad_norm": 0.7877858658336557, |
|
"learning_rate": 0.0016200000000000001, |
|
"loss": 5.1979, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.00541, |
|
"grad_norm": 0.8613625325852945, |
|
"learning_rate": 0.001623, |
|
"loss": 5.1884, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.00542, |
|
"grad_norm": 0.9310959295325639, |
|
"learning_rate": 0.001626, |
|
"loss": 5.2135, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.00543, |
|
"grad_norm": 1.0650490028229627, |
|
"learning_rate": 0.0016290000000000002, |
|
"loss": 5.1821, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.00544, |
|
"grad_norm": 1.058155288535174, |
|
"learning_rate": 0.0016320000000000002, |
|
"loss": 5.2106, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.00545, |
|
"grad_norm": 0.7888748411290551, |
|
"learning_rate": 0.0016350000000000002, |
|
"loss": 5.2093, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.00546, |
|
"grad_norm": 0.8327623405474364, |
|
"learning_rate": 0.0016380000000000001, |
|
"loss": 5.1821, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.00547, |
|
"grad_norm": 0.9359889846839972, |
|
"learning_rate": 0.001641, |
|
"loss": 5.1867, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.00548, |
|
"grad_norm": 0.8250520922796094, |
|
"learning_rate": 0.001644, |
|
"loss": 5.1781, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.00549, |
|
"grad_norm": 0.8346002681162655, |
|
"learning_rate": 0.0016470000000000002, |
|
"loss": 5.1786, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.0055, |
|
"grad_norm": 0.8307775357404767, |
|
"learning_rate": 0.0016500000000000002, |
|
"loss": 5.1901, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.00551, |
|
"grad_norm": 0.8868587049067305, |
|
"learning_rate": 0.0016530000000000002, |
|
"loss": 5.1882, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.00552, |
|
"grad_norm": 1.0021721179116465, |
|
"learning_rate": 0.0016560000000000001, |
|
"loss": 5.1887, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.00553, |
|
"grad_norm": 1.0435640829012027, |
|
"learning_rate": 0.001659, |
|
"loss": 5.1828, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.00554, |
|
"grad_norm": 1.091952614784726, |
|
"learning_rate": 0.0016620000000000003, |
|
"loss": 5.1843, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.00555, |
|
"grad_norm": 0.7530351004511261, |
|
"learning_rate": 0.0016650000000000002, |
|
"loss": 5.1804, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.00556, |
|
"grad_norm": 0.7840106393058217, |
|
"learning_rate": 0.0016680000000000002, |
|
"loss": 5.1664, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.00557, |
|
"grad_norm": 0.6999816254444311, |
|
"learning_rate": 0.0016710000000000002, |
|
"loss": 5.1437, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.00558, |
|
"grad_norm": 0.7845980119871422, |
|
"learning_rate": 0.0016740000000000001, |
|
"loss": 5.1792, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.00559, |
|
"grad_norm": 1.002237738003299, |
|
"learning_rate": 0.001677, |
|
"loss": 5.165, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.0056, |
|
"grad_norm": 1.1590360602458978, |
|
"learning_rate": 0.0016800000000000003, |
|
"loss": 5.15, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.00561, |
|
"grad_norm": 0.9693350319936842, |
|
"learning_rate": 0.0016830000000000003, |
|
"loss": 5.1673, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.00562, |
|
"grad_norm": 0.9337806686381701, |
|
"learning_rate": 0.0016860000000000002, |
|
"loss": 5.1656, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.00563, |
|
"grad_norm": 0.7021371152666548, |
|
"learning_rate": 0.001689, |
|
"loss": 5.1554, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.00564, |
|
"grad_norm": 0.761762323285238, |
|
"learning_rate": 0.001692, |
|
"loss": 5.1364, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.00565, |
|
"grad_norm": 0.9962272803832337, |
|
"learning_rate": 0.001695, |
|
"loss": 5.1465, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.00566, |
|
"grad_norm": 0.9270663941212276, |
|
"learning_rate": 0.0016979999999999999, |
|
"loss": 5.1454, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.00567, |
|
"grad_norm": 1.0296751705367089, |
|
"learning_rate": 0.0017009999999999998, |
|
"loss": 5.1403, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.00568, |
|
"grad_norm": 1.0921308967550072, |
|
"learning_rate": 0.0017039999999999998, |
|
"loss": 5.1647, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.00569, |
|
"grad_norm": 0.9680118526613578, |
|
"learning_rate": 0.001707, |
|
"loss": 5.1376, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.0057, |
|
"grad_norm": 1.1373894554942883, |
|
"learning_rate": 0.00171, |
|
"loss": 5.1667, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.00571, |
|
"grad_norm": 0.9491793536820188, |
|
"learning_rate": 0.001713, |
|
"loss": 5.1618, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.00572, |
|
"grad_norm": 1.0938883440367575, |
|
"learning_rate": 0.0017159999999999999, |
|
"loss": 5.1499, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.00573, |
|
"grad_norm": 0.8953824806877001, |
|
"learning_rate": 0.0017189999999999998, |
|
"loss": 5.1459, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.00574, |
|
"grad_norm": 0.8777825611555061, |
|
"learning_rate": 0.001722, |
|
"loss": 5.1467, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.00575, |
|
"grad_norm": 0.9427806830230203, |
|
"learning_rate": 0.001725, |
|
"loss": 5.132, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.00576, |
|
"grad_norm": 0.8846520959631657, |
|
"learning_rate": 0.001728, |
|
"loss": 5.1269, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.00577, |
|
"grad_norm": 0.6344552018416748, |
|
"learning_rate": 0.001731, |
|
"loss": 5.1395, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.00578, |
|
"grad_norm": 0.6771922101340231, |
|
"learning_rate": 0.0017339999999999999, |
|
"loss": 5.1199, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.00579, |
|
"grad_norm": 0.5381676919488529, |
|
"learning_rate": 0.0017369999999999998, |
|
"loss": 5.1001, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.0058, |
|
"grad_norm": 0.5416857271346956, |
|
"learning_rate": 0.00174, |
|
"loss": 5.1179, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.00581, |
|
"grad_norm": 0.4886409554358658, |
|
"learning_rate": 0.001743, |
|
"loss": 5.1131, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.00582, |
|
"grad_norm": 0.3953796837190132, |
|
"learning_rate": 0.001746, |
|
"loss": 5.0842, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.00583, |
|
"grad_norm": 0.3816231764982968, |
|
"learning_rate": 0.001749, |
|
"loss": 5.1076, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.00584, |
|
"grad_norm": 0.39062356918599367, |
|
"learning_rate": 0.0017519999999999999, |
|
"loss": 5.1003, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.00585, |
|
"grad_norm": 0.3915858898605007, |
|
"learning_rate": 0.0017549999999999998, |
|
"loss": 5.0837, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.00586, |
|
"grad_norm": 0.4063811781565053, |
|
"learning_rate": 0.001758, |
|
"loss": 5.0866, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.00587, |
|
"grad_norm": 0.4274699383917251, |
|
"learning_rate": 0.001761, |
|
"loss": 5.0709, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.00588, |
|
"grad_norm": 0.5008029306674315, |
|
"learning_rate": 0.001764, |
|
"loss": 5.0767, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.00589, |
|
"grad_norm": 0.7229655060897183, |
|
"learning_rate": 0.001767, |
|
"loss": 5.084, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.0059, |
|
"grad_norm": 0.9281114393382421, |
|
"learning_rate": 0.0017699999999999999, |
|
"loss": 5.0779, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.00591, |
|
"grad_norm": 0.9689787780419555, |
|
"learning_rate": 0.001773, |
|
"loss": 5.0756, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.00592, |
|
"grad_norm": 0.8435580831880815, |
|
"learning_rate": 0.001776, |
|
"loss": 5.0975, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.00593, |
|
"grad_norm": 0.9986679613531879, |
|
"learning_rate": 0.001779, |
|
"loss": 5.098, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.00594, |
|
"grad_norm": 1.0996840470415932, |
|
"learning_rate": 0.001782, |
|
"loss": 5.1014, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.00595, |
|
"grad_norm": 0.9601369241773534, |
|
"learning_rate": 0.001785, |
|
"loss": 5.0957, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.00596, |
|
"grad_norm": 0.8606352953891906, |
|
"learning_rate": 0.0017879999999999999, |
|
"loss": 5.0932, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.00597, |
|
"grad_norm": 0.8716680293105395, |
|
"learning_rate": 0.001791, |
|
"loss": 5.066, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.00598, |
|
"grad_norm": 0.9954805607751368, |
|
"learning_rate": 0.001794, |
|
"loss": 5.0837, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.00599, |
|
"grad_norm": 1.0139742121604893, |
|
"learning_rate": 0.001797, |
|
"loss": 5.0892, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.006, |
|
"grad_norm": 0.8994983668637855, |
|
"learning_rate": 0.0018, |
|
"loss": 5.083, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.00601, |
|
"grad_norm": 1.012817382509115, |
|
"learning_rate": 0.001803, |
|
"loss": 5.0872, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.00602, |
|
"grad_norm": 0.829792766990954, |
|
"learning_rate": 0.0018059999999999999, |
|
"loss": 5.0655, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.00603, |
|
"grad_norm": 0.8978641621726422, |
|
"learning_rate": 0.001809, |
|
"loss": 5.0896, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.00604, |
|
"grad_norm": 1.006526980400353, |
|
"learning_rate": 0.001812, |
|
"loss": 5.0733, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.00605, |
|
"grad_norm": 0.9664384705752951, |
|
"learning_rate": 0.001815, |
|
"loss": 5.0828, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.00606, |
|
"grad_norm": 1.0962791606856168, |
|
"learning_rate": 0.001818, |
|
"loss": 5.0888, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.00607, |
|
"grad_norm": 0.9313257227556363, |
|
"learning_rate": 0.001821, |
|
"loss": 5.0876, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.00608, |
|
"grad_norm": 0.9207616431206566, |
|
"learning_rate": 0.001824, |
|
"loss": 5.068, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.00609, |
|
"grad_norm": 0.9552257534317795, |
|
"learning_rate": 0.001827, |
|
"loss": 5.0751, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.0061, |
|
"grad_norm": 1.1338881088580717, |
|
"learning_rate": 0.00183, |
|
"loss": 5.062, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.00611, |
|
"grad_norm": 0.907481169345242, |
|
"learning_rate": 0.001833, |
|
"loss": 5.0576, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.00612, |
|
"grad_norm": 1.04757168088542, |
|
"learning_rate": 0.001836, |
|
"loss": 5.0686, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.00613, |
|
"grad_norm": 0.9021638869008188, |
|
"learning_rate": 0.001839, |
|
"loss": 5.0559, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.00614, |
|
"grad_norm": 0.7877209937196055, |
|
"learning_rate": 0.001842, |
|
"loss": 5.0768, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.00615, |
|
"grad_norm": 0.7456491069500266, |
|
"learning_rate": 0.001845, |
|
"loss": 5.0572, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.00616, |
|
"grad_norm": 0.7643816652567319, |
|
"learning_rate": 0.001848, |
|
"loss": 5.043, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.00617, |
|
"grad_norm": 0.8920948602141958, |
|
"learning_rate": 0.001851, |
|
"loss": 5.0555, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.00618, |
|
"grad_norm": 1.0257178323795717, |
|
"learning_rate": 0.001854, |
|
"loss": 5.0507, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.00619, |
|
"grad_norm": 0.7092119630036949, |
|
"learning_rate": 0.001857, |
|
"loss": 5.0429, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.0062, |
|
"grad_norm": 0.6092469798525071, |
|
"learning_rate": 0.00186, |
|
"loss": 5.0262, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.00621, |
|
"grad_norm": 0.7552706683211996, |
|
"learning_rate": 0.001863, |
|
"loss": 5.032, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.00622, |
|
"grad_norm": 0.6988650940233329, |
|
"learning_rate": 0.001866, |
|
"loss": 5.0245, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.00623, |
|
"grad_norm": 0.6062593525325608, |
|
"learning_rate": 0.001869, |
|
"loss": 5.0091, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.00624, |
|
"grad_norm": 0.555479795278756, |
|
"learning_rate": 0.001872, |
|
"loss": 5.0161, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.00625, |
|
"grad_norm": 0.6339519418385815, |
|
"learning_rate": 0.001875, |
|
"loss": 5.0276, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.00626, |
|
"grad_norm": 0.7820017260467441, |
|
"learning_rate": 0.0018780000000000001, |
|
"loss": 5.0084, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.00627, |
|
"grad_norm": 0.8263278266088275, |
|
"learning_rate": 0.001881, |
|
"loss": 5.0003, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.00628, |
|
"grad_norm": 0.5717634931424201, |
|
"learning_rate": 0.001884, |
|
"loss": 5.0204, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.00629, |
|
"grad_norm": 0.5505525631139665, |
|
"learning_rate": 0.001887, |
|
"loss": 4.9928, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.0063, |
|
"grad_norm": 0.47331922258372455, |
|
"learning_rate": 0.00189, |
|
"loss": 4.9837, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.00631, |
|
"grad_norm": 0.4173073508747504, |
|
"learning_rate": 0.0018930000000000002, |
|
"loss": 4.9757, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.00632, |
|
"grad_norm": 0.42808966750972455, |
|
"learning_rate": 0.0018960000000000001, |
|
"loss": 4.9928, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.00633, |
|
"grad_norm": 0.46750467774391197, |
|
"learning_rate": 0.001899, |
|
"loss": 4.9879, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.00634, |
|
"grad_norm": 0.5696215613123979, |
|
"learning_rate": 0.001902, |
|
"loss": 4.9763, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.00635, |
|
"grad_norm": 0.7138673203190975, |
|
"learning_rate": 0.001905, |
|
"loss": 4.9686, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.00636, |
|
"grad_norm": 0.8836310721952346, |
|
"learning_rate": 0.001908, |
|
"loss": 4.9828, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.00637, |
|
"grad_norm": 0.9755328249694639, |
|
"learning_rate": 0.0019110000000000002, |
|
"loss": 5.002, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.00638, |
|
"grad_norm": 1.3272264449165134, |
|
"learning_rate": 0.0019140000000000001, |
|
"loss": 5.0051, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.00639, |
|
"grad_norm": 0.9140856571338883, |
|
"learning_rate": 0.001917, |
|
"loss": 4.9726, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.0064, |
|
"grad_norm": 1.0466736994412218, |
|
"learning_rate": 0.00192, |
|
"loss": 4.9869, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.00641, |
|
"grad_norm": 1.1161814084223103, |
|
"learning_rate": 0.001923, |
|
"loss": 5.0003, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.00642, |
|
"grad_norm": 1.1352153221992676, |
|
"learning_rate": 0.001926, |
|
"loss": 4.992, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.00643, |
|
"grad_norm": 1.2268384891507862, |
|
"learning_rate": 0.0019290000000000002, |
|
"loss": 5.008, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.00644, |
|
"grad_norm": 0.9564122465750423, |
|
"learning_rate": 0.0019320000000000001, |
|
"loss": 4.9857, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.00645, |
|
"grad_norm": 0.9066234784688915, |
|
"learning_rate": 0.001935, |
|
"loss": 4.9889, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.00646, |
|
"grad_norm": 0.894776309426942, |
|
"learning_rate": 0.001938, |
|
"loss": 4.995, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.00647, |
|
"grad_norm": 1.036514260058091, |
|
"learning_rate": 0.001941, |
|
"loss": 5.0081, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.00648, |
|
"grad_norm": 1.0669688261896326, |
|
"learning_rate": 0.0019440000000000002, |
|
"loss": 4.9991, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.00649, |
|
"grad_norm": 1.0027602192299327, |
|
"learning_rate": 0.0019470000000000002, |
|
"loss": 4.9834, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.0065, |
|
"grad_norm": 1.2453243505592877, |
|
"learning_rate": 0.0019500000000000001, |
|
"loss": 5.0183, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.00651, |
|
"grad_norm": 0.9810465710779535, |
|
"learning_rate": 0.001953, |
|
"loss": 4.9899, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.00652, |
|
"grad_norm": 1.0529607937914427, |
|
"learning_rate": 0.0019560000000000003, |
|
"loss": 4.9855, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.00653, |
|
"grad_norm": 0.8367844014470341, |
|
"learning_rate": 0.0019590000000000002, |
|
"loss": 4.9719, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.00654, |
|
"grad_norm": 0.9556352679930769, |
|
"learning_rate": 0.001962, |
|
"loss": 4.96, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.00655, |
|
"grad_norm": 1.0712859822115042, |
|
"learning_rate": 0.001965, |
|
"loss": 4.9811, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.00656, |
|
"grad_norm": 0.8381525542412449, |
|
"learning_rate": 0.001968, |
|
"loss": 4.9628, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.00657, |
|
"grad_norm": 0.853522104902103, |
|
"learning_rate": 0.001971, |
|
"loss": 4.9486, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.00658, |
|
"grad_norm": 0.9276507218825019, |
|
"learning_rate": 0.001974, |
|
"loss": 4.9746, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.00659, |
|
"grad_norm": 1.0041122671115763, |
|
"learning_rate": 0.001977, |
|
"loss": 4.9552, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.0066, |
|
"grad_norm": 0.925609681342879, |
|
"learning_rate": 0.00198, |
|
"loss": 4.9516, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.00661, |
|
"grad_norm": 0.9340897694356347, |
|
"learning_rate": 0.001983, |
|
"loss": 4.9301, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.00662, |
|
"grad_norm": 1.1039924911609775, |
|
"learning_rate": 0.0019860000000000004, |
|
"loss": 4.9353, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.00663, |
|
"grad_norm": 0.842271096957419, |
|
"learning_rate": 0.0019890000000000003, |
|
"loss": 4.942, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.00664, |
|
"grad_norm": 0.9255021185692508, |
|
"learning_rate": 0.0019920000000000003, |
|
"loss": 4.9294, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.00665, |
|
"grad_norm": 0.9624840652606003, |
|
"learning_rate": 0.0019950000000000002, |
|
"loss": 4.9033, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.00666, |
|
"grad_norm": 0.8706912476713129, |
|
"learning_rate": 0.001998, |
|
"loss": 4.9275, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.00667, |
|
"grad_norm": 0.9355886153293348, |
|
"learning_rate": 0.002001, |
|
"loss": 4.9186, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.00668, |
|
"grad_norm": 0.9525697321085718, |
|
"learning_rate": 0.002004, |
|
"loss": 4.9422, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.00669, |
|
"grad_norm": 0.9357519892866003, |
|
"learning_rate": 0.002007, |
|
"loss": 4.9423, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.0067, |
|
"grad_norm": 0.8227655724451596, |
|
"learning_rate": 0.00201, |
|
"loss": 4.9287, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.00671, |
|
"grad_norm": 0.6363703099565691, |
|
"learning_rate": 0.002013, |
|
"loss": 4.8975, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.00672, |
|
"grad_norm": 0.5774852286623988, |
|
"learning_rate": 0.002016, |
|
"loss": 4.894, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.00673, |
|
"grad_norm": 0.5472152449551199, |
|
"learning_rate": 0.002019, |
|
"loss": 4.8881, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.00674, |
|
"grad_norm": 0.5267919584843693, |
|
"learning_rate": 0.0020220000000000004, |
|
"loss": 4.8857, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.00675, |
|
"grad_norm": 0.4823533534454447, |
|
"learning_rate": 0.0020250000000000003, |
|
"loss": 4.8922, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.00676, |
|
"grad_norm": 0.5135326218761617, |
|
"learning_rate": 0.0020280000000000003, |
|
"loss": 4.8739, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.00677, |
|
"grad_norm": 0.5629070673817519, |
|
"learning_rate": 0.0020310000000000003, |
|
"loss": 4.8753, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.00678, |
|
"grad_norm": 0.6636518437686225, |
|
"learning_rate": 0.0020340000000000002, |
|
"loss": 4.8844, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.00679, |
|
"grad_norm": 0.7939150938071041, |
|
"learning_rate": 0.002037, |
|
"loss": 4.8694, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.0068, |
|
"grad_norm": 0.6961050241971515, |
|
"learning_rate": 0.00204, |
|
"loss": 4.8681, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.00681, |
|
"grad_norm": 0.55281231514776, |
|
"learning_rate": 0.002043, |
|
"loss": 4.854, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.00682, |
|
"grad_norm": 0.6966375314025457, |
|
"learning_rate": 0.002046, |
|
"loss": 4.8606, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.00683, |
|
"grad_norm": 0.7919918537287608, |
|
"learning_rate": 0.002049, |
|
"loss": 4.8437, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.00684, |
|
"grad_norm": 1.014505658611748, |
|
"learning_rate": 0.002052, |
|
"loss": 4.868, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.00685, |
|
"grad_norm": 1.1979460789898289, |
|
"learning_rate": 0.0020550000000000004, |
|
"loss": 4.8824, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.00686, |
|
"grad_norm": 1.0209197434565493, |
|
"learning_rate": 0.0020580000000000004, |
|
"loss": 4.8555, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.00687, |
|
"grad_norm": 0.8666785819902827, |
|
"learning_rate": 0.0020610000000000003, |
|
"loss": 4.8457, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.00688, |
|
"grad_norm": 1.066277682185938, |
|
"learning_rate": 0.002064, |
|
"loss": 4.8576, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.00689, |
|
"grad_norm": 0.9564373481813865, |
|
"learning_rate": 0.002067, |
|
"loss": 4.8511, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.0069, |
|
"grad_norm": 1.0921039434762347, |
|
"learning_rate": 0.00207, |
|
"loss": 4.854, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.00691, |
|
"grad_norm": 0.9682767243250711, |
|
"learning_rate": 0.0020729999999999998, |
|
"loss": 4.8265, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.00692, |
|
"grad_norm": 1.0182149348151321, |
|
"learning_rate": 0.0020759999999999997, |
|
"loss": 4.8523, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.00693, |
|
"grad_norm": 1.1559582715656997, |
|
"learning_rate": 0.0020789999999999997, |
|
"loss": 4.8626, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.00694, |
|
"grad_norm": 0.8731253854999043, |
|
"learning_rate": 0.002082, |
|
"loss": 4.8314, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.00695, |
|
"grad_norm": 1.02924772696324, |
|
"learning_rate": 0.002085, |
|
"loss": 4.8388, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.00696, |
|
"grad_norm": 1.1148665982097032, |
|
"learning_rate": 0.002088, |
|
"loss": 4.8532, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.00697, |
|
"grad_norm": 1.1306884538795905, |
|
"learning_rate": 0.002091, |
|
"loss": 4.8556, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.00698, |
|
"grad_norm": 1.267565149046305, |
|
"learning_rate": 0.002094, |
|
"loss": 4.8474, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.00699, |
|
"grad_norm": 0.9067921371830103, |
|
"learning_rate": 0.002097, |
|
"loss": 4.8312, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.007, |
|
"grad_norm": 0.8797095491461238, |
|
"learning_rate": 0.0021, |
|
"loss": 4.8378, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.00701, |
|
"grad_norm": 0.9764684557107473, |
|
"learning_rate": 0.002103, |
|
"loss": 4.8231, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.00702, |
|
"grad_norm": 0.9975561372265659, |
|
"learning_rate": 0.002106, |
|
"loss": 4.8269, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.00703, |
|
"grad_norm": 0.8409706105195134, |
|
"learning_rate": 0.0021089999999999998, |
|
"loss": 4.8046, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.00704, |
|
"grad_norm": 0.8602349489288845, |
|
"learning_rate": 0.0021119999999999997, |
|
"loss": 4.8112, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.00705, |
|
"grad_norm": 0.810283974291816, |
|
"learning_rate": 0.002115, |
|
"loss": 4.8189, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.00706, |
|
"grad_norm": 0.7904979262360543, |
|
"learning_rate": 0.002118, |
|
"loss": 4.8127, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.00707, |
|
"grad_norm": 0.8638007527609218, |
|
"learning_rate": 0.002121, |
|
"loss": 4.7902, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.00708, |
|
"grad_norm": 0.9274642295068019, |
|
"learning_rate": 0.002124, |
|
"loss": 4.7754, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.00709, |
|
"grad_norm": 0.9265048264631317, |
|
"learning_rate": 0.002127, |
|
"loss": 4.8051, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.0071, |
|
"grad_norm": 1.0021235226233682, |
|
"learning_rate": 0.00213, |
|
"loss": 4.8021, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.00711, |
|
"grad_norm": 0.8616225643918476, |
|
"learning_rate": 0.002133, |
|
"loss": 4.7687, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.00712, |
|
"grad_norm": 1.1509118831082872, |
|
"learning_rate": 0.002136, |
|
"loss": 4.8063, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.00713, |
|
"grad_norm": 0.8979386810595619, |
|
"learning_rate": 0.002139, |
|
"loss": 4.8085, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.00714, |
|
"grad_norm": 0.9672478380991173, |
|
"learning_rate": 0.002142, |
|
"loss": 4.7999, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.00715, |
|
"grad_norm": 1.0283238664767786, |
|
"learning_rate": 0.0021449999999999998, |
|
"loss": 4.7746, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.00716, |
|
"grad_norm": 0.8031836486660412, |
|
"learning_rate": 0.002148, |
|
"loss": 4.7611, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.00717, |
|
"grad_norm": 0.8079202930068127, |
|
"learning_rate": 0.002151, |
|
"loss": 4.7802, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.00718, |
|
"grad_norm": 0.7625878769693025, |
|
"learning_rate": 0.002154, |
|
"loss": 4.7607, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.00719, |
|
"grad_norm": 0.8833410128202536, |
|
"learning_rate": 0.002157, |
|
"loss": 4.7767, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.0072, |
|
"grad_norm": 1.0099718860880083, |
|
"learning_rate": 0.00216, |
|
"loss": 4.7733, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.00721, |
|
"grad_norm": 1.0037155368349988, |
|
"learning_rate": 0.002163, |
|
"loss": 4.78, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.00722, |
|
"grad_norm": 0.8678480066188063, |
|
"learning_rate": 0.002166, |
|
"loss": 4.7474, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.00723, |
|
"grad_norm": 0.9399210002572385, |
|
"learning_rate": 0.002169, |
|
"loss": 4.7657, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.00724, |
|
"grad_norm": 0.8184829699796181, |
|
"learning_rate": 0.002172, |
|
"loss": 4.7533, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.00725, |
|
"grad_norm": 0.8676839571587074, |
|
"learning_rate": 0.002175, |
|
"loss": 4.7513, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.00726, |
|
"grad_norm": 0.9799992988904748, |
|
"learning_rate": 0.002178, |
|
"loss": 4.7626, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.00727, |
|
"grad_norm": 1.2281779129682024, |
|
"learning_rate": 0.0021809999999999998, |
|
"loss": 4.7581, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.00728, |
|
"grad_norm": 1.082945747060172, |
|
"learning_rate": 0.002184, |
|
"loss": 4.7657, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.00729, |
|
"grad_norm": 1.0915510364818644, |
|
"learning_rate": 0.002187, |
|
"loss": 4.7617, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.0073, |
|
"grad_norm": 1.0738468909531949, |
|
"learning_rate": 0.00219, |
|
"loss": 4.7676, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.00731, |
|
"grad_norm": 1.0774407965183543, |
|
"learning_rate": 0.002193, |
|
"loss": 4.7572, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.00732, |
|
"grad_norm": 0.9732910355796593, |
|
"learning_rate": 0.002196, |
|
"loss": 4.7638, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.00733, |
|
"grad_norm": 1.02567545008427, |
|
"learning_rate": 0.002199, |
|
"loss": 4.7467, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.00734, |
|
"grad_norm": 0.9058765241181546, |
|
"learning_rate": 0.002202, |
|
"loss": 4.7686, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.00735, |
|
"grad_norm": 0.9682392169542167, |
|
"learning_rate": 0.002205, |
|
"loss": 4.7759, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.00736, |
|
"grad_norm": 0.9938811207200824, |
|
"learning_rate": 0.002208, |
|
"loss": 4.7615, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.00737, |
|
"grad_norm": 1.0964297344539389, |
|
"learning_rate": 0.002211, |
|
"loss": 4.7347, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.00738, |
|
"grad_norm": 0.8707349769325928, |
|
"learning_rate": 0.002214, |
|
"loss": 4.7342, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.00739, |
|
"grad_norm": 0.7399818233744658, |
|
"learning_rate": 0.0022170000000000002, |
|
"loss": 4.717, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.0074, |
|
"grad_norm": 0.9377539124718652, |
|
"learning_rate": 0.00222, |
|
"loss": 4.7301, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.00741, |
|
"grad_norm": 0.8596400455739317, |
|
"learning_rate": 0.002223, |
|
"loss": 4.699, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.00742, |
|
"grad_norm": 0.6578432901740889, |
|
"learning_rate": 0.002226, |
|
"loss": 4.7249, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.00743, |
|
"grad_norm": 0.6175884285032084, |
|
"learning_rate": 0.002229, |
|
"loss": 4.6843, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.00744, |
|
"grad_norm": 0.6615203369086972, |
|
"learning_rate": 0.002232, |
|
"loss": 4.6918, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.00745, |
|
"grad_norm": 0.6999197355703424, |
|
"learning_rate": 0.002235, |
|
"loss": 4.7005, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.00746, |
|
"grad_norm": 0.7056349857734648, |
|
"learning_rate": 0.002238, |
|
"loss": 4.6964, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.00747, |
|
"grad_norm": 0.6678161149510893, |
|
"learning_rate": 0.002241, |
|
"loss": 4.6817, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.00748, |
|
"grad_norm": 0.6725287147155753, |
|
"learning_rate": 0.002244, |
|
"loss": 4.6915, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.00749, |
|
"grad_norm": 0.7368138262221237, |
|
"learning_rate": 0.002247, |
|
"loss": 4.6725, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.0075, |
|
"grad_norm": 0.7550252977049275, |
|
"learning_rate": 0.0022500000000000003, |
|
"loss": 4.7124, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.00751, |
|
"grad_norm": 0.6461697616177359, |
|
"learning_rate": 0.0022530000000000002, |
|
"loss": 4.6948, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.00752, |
|
"grad_norm": 0.6473605328229959, |
|
"learning_rate": 0.002256, |
|
"loss": 4.648, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.00753, |
|
"grad_norm": 0.8172272904356894, |
|
"learning_rate": 0.002259, |
|
"loss": 4.6929, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.00754, |
|
"grad_norm": 0.8400684755887758, |
|
"learning_rate": 0.002262, |
|
"loss": 4.7068, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.00755, |
|
"grad_norm": 0.7078832518794317, |
|
"learning_rate": 0.002265, |
|
"loss": 4.6656, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.00756, |
|
"grad_norm": 0.6076060117836831, |
|
"learning_rate": 0.002268, |
|
"loss": 4.6484, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.00757, |
|
"grad_norm": 0.7133093934008413, |
|
"learning_rate": 0.002271, |
|
"loss": 4.6658, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.00758, |
|
"grad_norm": 0.7661771348142844, |
|
"learning_rate": 0.002274, |
|
"loss": 4.6521, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.00759, |
|
"grad_norm": 0.9250288948777622, |
|
"learning_rate": 0.002277, |
|
"loss": 4.6753, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.0076, |
|
"grad_norm": 1.037174236565274, |
|
"learning_rate": 0.00228, |
|
"loss": 4.669, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.00761, |
|
"grad_norm": 0.9678315157211191, |
|
"learning_rate": 0.002283, |
|
"loss": 4.6392, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.00762, |
|
"grad_norm": 1.3728001530688312, |
|
"learning_rate": 0.0022860000000000003, |
|
"loss": 4.6453, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.00763, |
|
"grad_norm": 1.0284727877786697, |
|
"learning_rate": 0.0022890000000000002, |
|
"loss": 4.6793, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.00764, |
|
"grad_norm": 0.9914794664489192, |
|
"learning_rate": 0.002292, |
|
"loss": 4.6942, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.00765, |
|
"grad_norm": 0.984322504117537, |
|
"learning_rate": 0.002295, |
|
"loss": 4.6765, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.00766, |
|
"grad_norm": 0.9320893698991433, |
|
"learning_rate": 0.002298, |
|
"loss": 4.6792, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.00767, |
|
"grad_norm": 1.0651442494276249, |
|
"learning_rate": 0.002301, |
|
"loss": 4.6823, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.00768, |
|
"grad_norm": 0.9179111624711317, |
|
"learning_rate": 0.002304, |
|
"loss": 4.6817, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.00769, |
|
"grad_norm": 1.026483766110404, |
|
"learning_rate": 0.002307, |
|
"loss": 4.6987, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.0077, |
|
"grad_norm": 1.1653119571960542, |
|
"learning_rate": 0.00231, |
|
"loss": 4.6621, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.00771, |
|
"grad_norm": 0.8477238808348645, |
|
"learning_rate": 0.002313, |
|
"loss": 4.689, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.00772, |
|
"grad_norm": 0.6519421622488206, |
|
"learning_rate": 0.002316, |
|
"loss": 4.6631, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.00773, |
|
"grad_norm": 0.6177861857364649, |
|
"learning_rate": 0.0023190000000000003, |
|
"loss": 4.6627, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.00774, |
|
"grad_norm": 0.6901665734497584, |
|
"learning_rate": 0.0023220000000000003, |
|
"loss": 4.6775, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.00775, |
|
"grad_norm": 0.7356087164350898, |
|
"learning_rate": 0.0023250000000000002, |
|
"loss": 4.6725, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.00776, |
|
"grad_norm": 0.8693432194982287, |
|
"learning_rate": 0.002328, |
|
"loss": 4.6722, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.00777, |
|
"grad_norm": 1.099570573598906, |
|
"learning_rate": 0.002331, |
|
"loss": 4.6596, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.00778, |
|
"grad_norm": 1.0694357671416344, |
|
"learning_rate": 0.002334, |
|
"loss": 4.6725, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.00779, |
|
"grad_norm": 1.0144407661707453, |
|
"learning_rate": 0.002337, |
|
"loss": 4.6345, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.0078, |
|
"grad_norm": 0.9392788681741788, |
|
"learning_rate": 0.00234, |
|
"loss": 4.6579, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.00781, |
|
"grad_norm": 0.9039044158767507, |
|
"learning_rate": 0.002343, |
|
"loss": 4.6528, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.00782, |
|
"grad_norm": 0.9671545635863801, |
|
"learning_rate": 0.002346, |
|
"loss": 4.6552, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.00783, |
|
"grad_norm": 0.9789758043675277, |
|
"learning_rate": 0.002349, |
|
"loss": 4.6404, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.00784, |
|
"grad_norm": 0.7674708275760124, |
|
"learning_rate": 0.002352, |
|
"loss": 4.6449, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.00785, |
|
"grad_norm": 0.6509064102772842, |
|
"learning_rate": 0.0023550000000000003, |
|
"loss": 4.6391, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.00786, |
|
"grad_norm": 0.6282839162170526, |
|
"learning_rate": 0.0023580000000000003, |
|
"loss": 4.6356, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.00787, |
|
"grad_norm": 0.6520823986249177, |
|
"learning_rate": 0.0023610000000000003, |
|
"loss": 4.6384, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.00788, |
|
"grad_norm": 0.6693545084259133, |
|
"learning_rate": 0.002364, |
|
"loss": 4.6342, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.00789, |
|
"grad_norm": 0.7061838607079715, |
|
"learning_rate": 0.002367, |
|
"loss": 4.6597, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.0079, |
|
"grad_norm": 0.7001074671969121, |
|
"learning_rate": 0.00237, |
|
"loss": 4.6333, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.00791, |
|
"grad_norm": 0.6934841557036142, |
|
"learning_rate": 0.002373, |
|
"loss": 4.5873, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.00792, |
|
"grad_norm": 0.6555126375785874, |
|
"learning_rate": 0.002376, |
|
"loss": 4.6158, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.00793, |
|
"grad_norm": 0.6198619755446345, |
|
"learning_rate": 0.002379, |
|
"loss": 4.6082, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.00794, |
|
"grad_norm": 0.5540734864838481, |
|
"learning_rate": 0.002382, |
|
"loss": 4.615, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.00795, |
|
"grad_norm": 0.5756469670820633, |
|
"learning_rate": 0.002385, |
|
"loss": 4.5927, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.00796, |
|
"grad_norm": 0.5912602735893169, |
|
"learning_rate": 0.0023880000000000004, |
|
"loss": 4.5839, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.00797, |
|
"grad_norm": 0.6477479187436139, |
|
"learning_rate": 0.0023910000000000003, |
|
"loss": 4.628, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.00798, |
|
"grad_norm": 0.6568610896012951, |
|
"learning_rate": 0.0023940000000000003, |
|
"loss": 4.5975, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.00799, |
|
"grad_norm": 0.8964867071559416, |
|
"learning_rate": 0.0023970000000000003, |
|
"loss": 4.6327, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.008, |
|
"grad_norm": 1.1130272400690795, |
|
"learning_rate": 0.0024000000000000002, |
|
"loss": 4.6127, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.00801, |
|
"grad_norm": 0.8936330318178004, |
|
"learning_rate": 0.002403, |
|
"loss": 4.624, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.00802, |
|
"grad_norm": 1.279259611288336, |
|
"learning_rate": 0.002406, |
|
"loss": 4.6431, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.00803, |
|
"grad_norm": 0.7588568023085343, |
|
"learning_rate": 0.002409, |
|
"loss": 4.5967, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.00804, |
|
"grad_norm": 0.9911172738466991, |
|
"learning_rate": 0.002412, |
|
"loss": 4.6083, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.00805, |
|
"grad_norm": 1.0297855025384943, |
|
"learning_rate": 0.002415, |
|
"loss": 4.6095, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.00806, |
|
"grad_norm": 0.8656546102408385, |
|
"learning_rate": 0.002418, |
|
"loss": 4.6231, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.00807, |
|
"grad_norm": 0.7922998530369505, |
|
"learning_rate": 0.0024210000000000004, |
|
"loss": 4.6144, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.00808, |
|
"grad_norm": 0.7119238411669042, |
|
"learning_rate": 0.0024240000000000004, |
|
"loss": 4.5933, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.00809, |
|
"grad_norm": 0.6923611180056076, |
|
"learning_rate": 0.0024270000000000003, |
|
"loss": 4.5855, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.0081, |
|
"grad_norm": 0.759382946117684, |
|
"learning_rate": 0.0024300000000000003, |
|
"loss": 4.6149, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.00811, |
|
"grad_norm": 0.9629068291697402, |
|
"learning_rate": 0.0024330000000000003, |
|
"loss": 4.5955, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.00812, |
|
"grad_norm": 0.9897740547487952, |
|
"learning_rate": 0.0024360000000000002, |
|
"loss": 4.5793, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.00813, |
|
"grad_norm": 1.0343029805479964, |
|
"learning_rate": 0.0024389999999999998, |
|
"loss": 4.6192, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.00814, |
|
"grad_norm": 1.188458156645309, |
|
"learning_rate": 0.0024419999999999997, |
|
"loss": 4.6409, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.00815, |
|
"grad_norm": 0.960728927074141, |
|
"learning_rate": 0.0024449999999999997, |
|
"loss": 4.6319, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.00816, |
|
"grad_norm": 1.0099133377105225, |
|
"learning_rate": 0.002448, |
|
"loss": 4.6186, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.00817, |
|
"grad_norm": 0.9311887346236664, |
|
"learning_rate": 0.002451, |
|
"loss": 4.6246, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.00818, |
|
"grad_norm": 1.2609797944179002, |
|
"learning_rate": 0.002454, |
|
"loss": 4.625, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.00819, |
|
"grad_norm": 1.0447566978787928, |
|
"learning_rate": 0.002457, |
|
"loss": 4.6401, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.0082, |
|
"grad_norm": 1.1000322233283122, |
|
"learning_rate": 0.00246, |
|
"loss": 4.6239, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.00821, |
|
"grad_norm": 0.8928816997822231, |
|
"learning_rate": 0.002463, |
|
"loss": 4.5932, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.00822, |
|
"grad_norm": 0.9105337296182261, |
|
"learning_rate": 0.002466, |
|
"loss": 4.5996, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.00823, |
|
"grad_norm": 0.6537296194510775, |
|
"learning_rate": 0.002469, |
|
"loss": 4.5905, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.00824, |
|
"grad_norm": 0.6615102644633621, |
|
"learning_rate": 0.002472, |
|
"loss": 4.61, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.00825, |
|
"grad_norm": 0.7605760557018463, |
|
"learning_rate": 0.0024749999999999998, |
|
"loss": 4.6148, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.00826, |
|
"grad_norm": 0.9288808111624368, |
|
"learning_rate": 0.0024779999999999997, |
|
"loss": 4.5806, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.00827, |
|
"grad_norm": 0.8099205132023622, |
|
"learning_rate": 0.002481, |
|
"loss": 4.6084, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.00828, |
|
"grad_norm": 0.7444084275861881, |
|
"learning_rate": 0.002484, |
|
"loss": 4.5965, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.00829, |
|
"grad_norm": 0.6438605280155706, |
|
"learning_rate": 0.002487, |
|
"loss": 4.5891, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.0083, |
|
"grad_norm": 0.6242869251575957, |
|
"learning_rate": 0.00249, |
|
"loss": 4.5955, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.00831, |
|
"grad_norm": 0.6705073637500226, |
|
"learning_rate": 0.002493, |
|
"loss": 4.576, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.00832, |
|
"grad_norm": 0.7603459804613621, |
|
"learning_rate": 0.002496, |
|
"loss": 4.5759, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.00833, |
|
"grad_norm": 0.7662808437283888, |
|
"learning_rate": 0.002499, |
|
"loss": 4.5696, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.00834, |
|
"grad_norm": 0.8012380275176963, |
|
"learning_rate": 0.002502, |
|
"loss": 4.5743, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.00835, |
|
"grad_norm": 1.0075156101089233, |
|
"learning_rate": 0.002505, |
|
"loss": 4.5783, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.00836, |
|
"grad_norm": 1.0847073328294785, |
|
"learning_rate": 0.002508, |
|
"loss": 4.6155, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.00837, |
|
"grad_norm": 0.8849400794535106, |
|
"learning_rate": 0.0025109999999999998, |
|
"loss": 4.5603, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.00838, |
|
"grad_norm": 0.8832989831172253, |
|
"learning_rate": 0.0025139999999999997, |
|
"loss": 4.5816, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.00839, |
|
"grad_norm": 0.9511730781538094, |
|
"learning_rate": 0.002517, |
|
"loss": 4.5843, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.0084, |
|
"grad_norm": 1.037817208386755, |
|
"learning_rate": 0.00252, |
|
"loss": 4.5831, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.00841, |
|
"grad_norm": 0.8472466492390959, |
|
"learning_rate": 0.002523, |
|
"loss": 4.5624, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.00842, |
|
"grad_norm": 0.9269217528832409, |
|
"learning_rate": 0.002526, |
|
"loss": 4.5661, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.00843, |
|
"grad_norm": 0.8716247137682855, |
|
"learning_rate": 0.002529, |
|
"loss": 4.5688, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.00844, |
|
"grad_norm": 0.7416972066179122, |
|
"learning_rate": 0.002532, |
|
"loss": 4.585, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.00845, |
|
"grad_norm": 0.6177824387251759, |
|
"learning_rate": 0.002535, |
|
"loss": 4.5442, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.00846, |
|
"grad_norm": 0.5865348849804463, |
|
"learning_rate": 0.002538, |
|
"loss": 4.5656, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.00847, |
|
"grad_norm": 0.5324615561823309, |
|
"learning_rate": 0.002541, |
|
"loss": 4.5442, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.00848, |
|
"grad_norm": 0.5568583953268653, |
|
"learning_rate": 0.002544, |
|
"loss": 4.5407, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.00849, |
|
"grad_norm": 0.600307108588631, |
|
"learning_rate": 0.002547, |
|
"loss": 4.5371, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.0085, |
|
"grad_norm": 0.5343909127282478, |
|
"learning_rate": 0.00255, |
|
"loss": 4.5435, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.00851, |
|
"grad_norm": 0.5790732432599173, |
|
"learning_rate": 0.002553, |
|
"loss": 4.5588, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.00852, |
|
"grad_norm": 0.6406298666409433, |
|
"learning_rate": 0.002556, |
|
"loss": 4.5503, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.00853, |
|
"grad_norm": 0.6711876835719848, |
|
"learning_rate": 0.002559, |
|
"loss": 4.5298, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.00854, |
|
"grad_norm": 0.643558144142947, |
|
"learning_rate": 0.002562, |
|
"loss": 4.5219, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.00855, |
|
"grad_norm": 0.621905866188772, |
|
"learning_rate": 0.002565, |
|
"loss": 4.5026, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.00856, |
|
"grad_norm": 0.7503391003054042, |
|
"learning_rate": 0.002568, |
|
"loss": 4.5375, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.00857, |
|
"grad_norm": 0.90802719077466, |
|
"learning_rate": 0.002571, |
|
"loss": 4.5344, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.00858, |
|
"grad_norm": 0.9157789056738207, |
|
"learning_rate": 0.002574, |
|
"loss": 4.5308, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.00859, |
|
"grad_norm": 0.8455467899517649, |
|
"learning_rate": 0.002577, |
|
"loss": 4.5208, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.0086, |
|
"grad_norm": 0.929986585155377, |
|
"learning_rate": 0.00258, |
|
"loss": 4.5557, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.00861, |
|
"grad_norm": 1.062443351155347, |
|
"learning_rate": 0.0025830000000000002, |
|
"loss": 4.5815, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.00862, |
|
"grad_norm": 1.0476479971551458, |
|
"learning_rate": 0.002586, |
|
"loss": 4.5382, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.00863, |
|
"grad_norm": 0.9516272804079478, |
|
"learning_rate": 0.002589, |
|
"loss": 4.5686, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.00864, |
|
"grad_norm": 1.0775847362135182, |
|
"learning_rate": 0.002592, |
|
"loss": 4.5222, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.00865, |
|
"grad_norm": 0.902924351244226, |
|
"learning_rate": 0.002595, |
|
"loss": 4.5357, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.00866, |
|
"grad_norm": 0.889409265471235, |
|
"learning_rate": 0.002598, |
|
"loss": 4.5623, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.00867, |
|
"grad_norm": 1.1312843257696636, |
|
"learning_rate": 0.002601, |
|
"loss": 4.5565, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.00868, |
|
"grad_norm": 1.0195609890673947, |
|
"learning_rate": 0.002604, |
|
"loss": 4.5479, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.00869, |
|
"grad_norm": 0.8378311515658349, |
|
"learning_rate": 0.002607, |
|
"loss": 4.5492, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.0087, |
|
"grad_norm": 0.93569973985326, |
|
"learning_rate": 0.00261, |
|
"loss": 4.5413, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.00871, |
|
"grad_norm": 0.9947247250751194, |
|
"learning_rate": 0.002613, |
|
"loss": 4.5608, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.00872, |
|
"grad_norm": 0.9456334146879876, |
|
"learning_rate": 0.002616, |
|
"loss": 4.5489, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.00873, |
|
"grad_norm": 0.9088952463307589, |
|
"learning_rate": 0.0026190000000000002, |
|
"loss": 4.5587, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.00874, |
|
"grad_norm": 0.7636008668853458, |
|
"learning_rate": 0.002622, |
|
"loss": 4.5702, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.00875, |
|
"grad_norm": 0.8446171091890929, |
|
"learning_rate": 0.002625, |
|
"loss": 4.5191, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.00876, |
|
"grad_norm": 0.8731048122579586, |
|
"learning_rate": 0.002628, |
|
"loss": 4.551, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.00877, |
|
"grad_norm": 0.9444127454444267, |
|
"learning_rate": 0.002631, |
|
"loss": 4.5701, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.00878, |
|
"grad_norm": 0.9863621265162925, |
|
"learning_rate": 0.002634, |
|
"loss": 4.5108, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.00879, |
|
"grad_norm": 0.7746522972030878, |
|
"learning_rate": 0.002637, |
|
"loss": 4.5293, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.0088, |
|
"grad_norm": 0.7150869014843059, |
|
"learning_rate": 0.00264, |
|
"loss": 4.5368, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.00881, |
|
"grad_norm": 0.866142425879982, |
|
"learning_rate": 0.002643, |
|
"loss": 4.5448, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.00882, |
|
"grad_norm": 1.0565004714930601, |
|
"learning_rate": 0.002646, |
|
"loss": 4.5522, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.00883, |
|
"grad_norm": 0.88323512650563, |
|
"learning_rate": 0.002649, |
|
"loss": 4.5382, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.00884, |
|
"grad_norm": 0.8377440854137777, |
|
"learning_rate": 0.0026520000000000003, |
|
"loss": 4.5227, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.00885, |
|
"grad_norm": 0.8587569363150891, |
|
"learning_rate": 0.0026550000000000002, |
|
"loss": 4.5189, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.00886, |
|
"grad_norm": 0.9455034420832737, |
|
"learning_rate": 0.002658, |
|
"loss": 4.5539, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.00887, |
|
"grad_norm": 0.8258368480900744, |
|
"learning_rate": 0.002661, |
|
"loss": 4.536, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.00888, |
|
"grad_norm": 0.927023832062946, |
|
"learning_rate": 0.002664, |
|
"loss": 4.5392, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.00889, |
|
"grad_norm": 0.9905400773231482, |
|
"learning_rate": 0.002667, |
|
"loss": 4.5077, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.0089, |
|
"grad_norm": 1.0181625806478707, |
|
"learning_rate": 0.00267, |
|
"loss": 4.551, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.00891, |
|
"grad_norm": 1.0618776306697646, |
|
"learning_rate": 0.002673, |
|
"loss": 4.5446, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.00892, |
|
"grad_norm": 0.9464629097549706, |
|
"learning_rate": 0.002676, |
|
"loss": 4.5493, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.00893, |
|
"grad_norm": 1.02959831042168, |
|
"learning_rate": 0.002679, |
|
"loss": 4.5321, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.00894, |
|
"grad_norm": 0.8717588229222071, |
|
"learning_rate": 0.002682, |
|
"loss": 4.5126, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.00895, |
|
"grad_norm": 0.7766302283006379, |
|
"learning_rate": 0.0026850000000000003, |
|
"loss": 4.5191, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.00896, |
|
"grad_norm": 0.7210349653989065, |
|
"learning_rate": 0.0026880000000000003, |
|
"loss": 4.5061, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.00897, |
|
"grad_norm": 0.7840425119747009, |
|
"learning_rate": 0.0026910000000000002, |
|
"loss": 4.5249, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.00898, |
|
"grad_norm": 0.7907811575022647, |
|
"learning_rate": 0.002694, |
|
"loss": 4.518, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.00899, |
|
"grad_norm": 0.7866169386193732, |
|
"learning_rate": 0.002697, |
|
"loss": 4.5195, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.009, |
|
"grad_norm": 0.7303478296484299, |
|
"learning_rate": 0.0027, |
|
"loss": 4.5449, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.00901, |
|
"grad_norm": 0.6444049592088867, |
|
"learning_rate": 0.002703, |
|
"loss": 4.4943, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 0.00902, |
|
"grad_norm": 0.5222361304775145, |
|
"learning_rate": 0.002706, |
|
"loss": 4.498, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.00903, |
|
"grad_norm": 0.6152635474063327, |
|
"learning_rate": 0.002709, |
|
"loss": 4.4967, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.00904, |
|
"grad_norm": 0.6931791624762859, |
|
"learning_rate": 0.002712, |
|
"loss": 4.4818, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.00905, |
|
"grad_norm": 0.6467753244335184, |
|
"learning_rate": 0.002715, |
|
"loss": 4.4929, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.00906, |
|
"grad_norm": 0.6638188976320862, |
|
"learning_rate": 0.002718, |
|
"loss": 4.5049, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.00907, |
|
"grad_norm": 0.503631484987646, |
|
"learning_rate": 0.0027210000000000003, |
|
"loss": 4.5121, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 0.00908, |
|
"grad_norm": 0.4745734069368428, |
|
"learning_rate": 0.0027240000000000003, |
|
"loss": 4.481, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.00909, |
|
"grad_norm": 0.5648420216556126, |
|
"learning_rate": 0.0027270000000000003, |
|
"loss": 4.5013, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.0091, |
|
"grad_norm": 0.5471966544704259, |
|
"learning_rate": 0.0027300000000000002, |
|
"loss": 4.5027, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.00911, |
|
"grad_norm": 0.6580432739675335, |
|
"learning_rate": 0.002733, |
|
"loss": 4.4619, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 0.00912, |
|
"grad_norm": 0.652804172410743, |
|
"learning_rate": 0.002736, |
|
"loss": 4.4678, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.00913, |
|
"grad_norm": 0.6108886726833864, |
|
"learning_rate": 0.002739, |
|
"loss": 4.4601, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 0.00914, |
|
"grad_norm": 0.5246452629450868, |
|
"learning_rate": 0.002742, |
|
"loss": 4.4865, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.00915, |
|
"grad_norm": 0.6046562570851716, |
|
"learning_rate": 0.002745, |
|
"loss": 4.4501, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.00916, |
|
"grad_norm": 0.6470672223067736, |
|
"learning_rate": 0.002748, |
|
"loss": 4.4304, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.00917, |
|
"grad_norm": 0.6458845430146066, |
|
"learning_rate": 0.002751, |
|
"loss": 4.4431, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 0.00918, |
|
"grad_norm": 0.6107710101855935, |
|
"learning_rate": 0.0027540000000000004, |
|
"loss": 4.4622, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.00919, |
|
"grad_norm": 0.6056157888916294, |
|
"learning_rate": 0.0027570000000000003, |
|
"loss": 4.4436, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 0.0092, |
|
"grad_norm": 0.6858065732895877, |
|
"learning_rate": 0.0027600000000000003, |
|
"loss": 4.462, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.00921, |
|
"grad_norm": 0.9391926745722488, |
|
"learning_rate": 0.0027630000000000003, |
|
"loss": 4.4556, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.00922, |
|
"grad_norm": 1.1348542218598812, |
|
"learning_rate": 0.0027660000000000002, |
|
"loss": 4.5063, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.00923, |
|
"grad_norm": 1.0421503736233508, |
|
"learning_rate": 0.002769, |
|
"loss": 4.4713, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 0.00924, |
|
"grad_norm": 1.0588478925879097, |
|
"learning_rate": 0.002772, |
|
"loss": 4.4959, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.00925, |
|
"grad_norm": 0.9781832929515508, |
|
"learning_rate": 0.002775, |
|
"loss": 4.4806, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.00926, |
|
"grad_norm": 0.7776497655560727, |
|
"learning_rate": 0.002778, |
|
"loss": 4.488, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 0.00927, |
|
"grad_norm": 0.7034519817521436, |
|
"learning_rate": 0.002781, |
|
"loss": 4.4448, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 0.00928, |
|
"grad_norm": 0.9270286412247504, |
|
"learning_rate": 0.002784, |
|
"loss": 4.4965, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 0.00929, |
|
"grad_norm": 1.0728170784278697, |
|
"learning_rate": 0.0027870000000000004, |
|
"loss": 4.4869, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 0.0093, |
|
"grad_norm": 0.9492127377122095, |
|
"learning_rate": 0.0027900000000000004, |
|
"loss": 4.4732, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.00931, |
|
"grad_norm": 0.9513900088751025, |
|
"learning_rate": 0.0027930000000000003, |
|
"loss": 4.4833, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.00932, |
|
"grad_norm": 1.4536440294804005, |
|
"learning_rate": 0.0027960000000000003, |
|
"loss": 4.5168, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 0.00933, |
|
"grad_norm": 1.132862147568844, |
|
"learning_rate": 0.0027990000000000003, |
|
"loss": 4.4769, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 0.00934, |
|
"grad_norm": 0.8513409648274727, |
|
"learning_rate": 0.0028020000000000002, |
|
"loss": 4.4941, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 0.00935, |
|
"grad_norm": 0.8677938211616196, |
|
"learning_rate": 0.002805, |
|
"loss": 4.5057, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.00936, |
|
"grad_norm": 0.8298971693271944, |
|
"learning_rate": 0.002808, |
|
"loss": 4.5081, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 0.00937, |
|
"grad_norm": 0.8363829119527492, |
|
"learning_rate": 0.002811, |
|
"loss": 4.521, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 0.00938, |
|
"grad_norm": 1.0036953395826609, |
|
"learning_rate": 0.002814, |
|
"loss": 4.5006, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 0.00939, |
|
"grad_norm": 1.0054078613176451, |
|
"learning_rate": 0.002817, |
|
"loss": 4.5073, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 0.0094, |
|
"grad_norm": 0.8667374621688471, |
|
"learning_rate": 0.00282, |
|
"loss": 4.4958, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.00941, |
|
"grad_norm": 0.9205808599892458, |
|
"learning_rate": 0.002823, |
|
"loss": 4.4733, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 0.00942, |
|
"grad_norm": 0.9778408651584425, |
|
"learning_rate": 0.002826, |
|
"loss": 4.5008, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 0.00943, |
|
"grad_norm": 1.082550194860624, |
|
"learning_rate": 0.002829, |
|
"loss": 4.4958, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 0.00944, |
|
"grad_norm": 1.355725245571646, |
|
"learning_rate": 0.002832, |
|
"loss": 4.5179, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 0.00945, |
|
"grad_norm": 0.8345488861943434, |
|
"learning_rate": 0.002835, |
|
"loss": 4.5015, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.00946, |
|
"grad_norm": 1.0009931674850154, |
|
"learning_rate": 0.002838, |
|
"loss": 4.5193, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 0.00947, |
|
"grad_norm": 1.0176650405493206, |
|
"learning_rate": 0.0028409999999999998, |
|
"loss": 4.4912, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 0.00948, |
|
"grad_norm": 0.836896216552642, |
|
"learning_rate": 0.0028439999999999997, |
|
"loss": 4.472, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 0.00949, |
|
"grad_norm": 0.6881971993105954, |
|
"learning_rate": 0.002847, |
|
"loss": 4.5057, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 0.0095, |
|
"grad_norm": 0.8926787293989068, |
|
"learning_rate": 0.00285, |
|
"loss": 4.5027, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.00951, |
|
"grad_norm": 1.0437772730376889, |
|
"learning_rate": 0.002853, |
|
"loss": 4.4861, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 0.00952, |
|
"grad_norm": 0.8745743913439339, |
|
"learning_rate": 0.002856, |
|
"loss": 4.5041, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 0.00953, |
|
"grad_norm": 0.7847706213592531, |
|
"learning_rate": 0.002859, |
|
"loss": 4.4446, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 0.00954, |
|
"grad_norm": 0.6236105730880978, |
|
"learning_rate": 0.002862, |
|
"loss": 4.4945, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 0.00955, |
|
"grad_norm": 0.5696186051972435, |
|
"learning_rate": 0.002865, |
|
"loss": 4.475, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.00956, |
|
"grad_norm": 0.5459272314199634, |
|
"learning_rate": 0.002868, |
|
"loss": 4.4518, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 0.00957, |
|
"grad_norm": 0.5100325019322003, |
|
"learning_rate": 0.002871, |
|
"loss": 4.4721, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 0.00958, |
|
"grad_norm": 0.6617022302690957, |
|
"learning_rate": 0.002874, |
|
"loss": 4.4567, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 0.00959, |
|
"grad_norm": 0.7948420381771908, |
|
"learning_rate": 0.002877, |
|
"loss": 4.4693, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 0.0096, |
|
"grad_norm": 1.0277128972108451, |
|
"learning_rate": 0.0028799999999999997, |
|
"loss": 4.4747, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.00961, |
|
"grad_norm": 0.9925275083373442, |
|
"learning_rate": 0.002883, |
|
"loss": 4.4507, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.00962, |
|
"grad_norm": 1.0177847800658486, |
|
"learning_rate": 0.002886, |
|
"loss": 4.4861, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 0.00963, |
|
"grad_norm": 1.0118802628275685, |
|
"learning_rate": 0.002889, |
|
"loss": 4.4865, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 0.00964, |
|
"grad_norm": 1.1856958623453784, |
|
"learning_rate": 0.002892, |
|
"loss": 4.4868, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 0.00965, |
|
"grad_norm": 0.8341279969334199, |
|
"learning_rate": 0.002895, |
|
"loss": 4.4524, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.00966, |
|
"grad_norm": 0.6756655743310646, |
|
"learning_rate": 0.002898, |
|
"loss": 4.4552, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 0.00967, |
|
"grad_norm": 0.6634929266596646, |
|
"learning_rate": 0.002901, |
|
"loss": 4.4933, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 0.00968, |
|
"grad_norm": 0.6850072615364151, |
|
"learning_rate": 0.002904, |
|
"loss": 4.4564, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 0.00969, |
|
"grad_norm": 0.6166935294692494, |
|
"learning_rate": 0.002907, |
|
"loss": 4.4862, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 0.0097, |
|
"grad_norm": 0.6616720297110108, |
|
"learning_rate": 0.00291, |
|
"loss": 4.4401, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.00971, |
|
"grad_norm": 0.5463010292381552, |
|
"learning_rate": 0.002913, |
|
"loss": 4.4415, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 0.00972, |
|
"grad_norm": 0.5115285821904162, |
|
"learning_rate": 0.002916, |
|
"loss": 4.4454, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 0.00973, |
|
"grad_norm": 0.543781975527911, |
|
"learning_rate": 0.002919, |
|
"loss": 4.4402, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 0.00974, |
|
"grad_norm": 0.5167263203162235, |
|
"learning_rate": 0.002922, |
|
"loss": 4.4246, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 0.00975, |
|
"grad_norm": 0.4676730441900605, |
|
"learning_rate": 0.002925, |
|
"loss": 4.4434, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.00976, |
|
"grad_norm": 0.4483140513716565, |
|
"learning_rate": 0.002928, |
|
"loss": 4.4259, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 0.00977, |
|
"grad_norm": 0.45300675248114197, |
|
"learning_rate": 0.002931, |
|
"loss": 4.4582, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 0.00978, |
|
"grad_norm": 0.524704391185419, |
|
"learning_rate": 0.002934, |
|
"loss": 4.4335, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 0.00979, |
|
"grad_norm": 0.6643809565846889, |
|
"learning_rate": 0.002937, |
|
"loss": 4.4266, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 0.0098, |
|
"grad_norm": 0.9159629342753403, |
|
"learning_rate": 0.00294, |
|
"loss": 4.433, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.00981, |
|
"grad_norm": 1.0134318026936866, |
|
"learning_rate": 0.002943, |
|
"loss": 4.4516, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 0.00982, |
|
"grad_norm": 0.7532241089289973, |
|
"learning_rate": 0.002946, |
|
"loss": 4.4387, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 0.00983, |
|
"grad_norm": 0.7539130672753217, |
|
"learning_rate": 0.0029490000000000002, |
|
"loss": 4.4372, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 0.00984, |
|
"grad_norm": 0.7045823796689694, |
|
"learning_rate": 0.002952, |
|
"loss": 4.4213, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 0.00985, |
|
"grad_norm": 0.6478785171714704, |
|
"learning_rate": 0.002955, |
|
"loss": 4.444, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.00986, |
|
"grad_norm": 0.5640139479823427, |
|
"learning_rate": 0.002958, |
|
"loss": 4.3997, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 0.00987, |
|
"grad_norm": 0.6145958247621988, |
|
"learning_rate": 0.002961, |
|
"loss": 4.4141, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 0.00988, |
|
"grad_norm": 0.7121366087401472, |
|
"learning_rate": 0.002964, |
|
"loss": 4.4512, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 0.00989, |
|
"grad_norm": 0.7474063595618389, |
|
"learning_rate": 0.002967, |
|
"loss": 4.3897, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 0.0099, |
|
"grad_norm": 0.7608901784540225, |
|
"learning_rate": 0.00297, |
|
"loss": 4.4296, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.00991, |
|
"grad_norm": 0.8136541225519112, |
|
"learning_rate": 0.002973, |
|
"loss": 4.4314, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 0.00992, |
|
"grad_norm": 0.7865701920195308, |
|
"learning_rate": 0.002976, |
|
"loss": 4.4266, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.00993, |
|
"grad_norm": 0.8315737176917932, |
|
"learning_rate": 0.002979, |
|
"loss": 4.4267, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 0.00994, |
|
"grad_norm": 0.7821802322270756, |
|
"learning_rate": 0.002982, |
|
"loss": 4.4281, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 0.00995, |
|
"grad_norm": 0.8705272973695986, |
|
"learning_rate": 0.0029850000000000002, |
|
"loss": 4.4331, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.00996, |
|
"grad_norm": 0.9954881536889274, |
|
"learning_rate": 0.002988, |
|
"loss": 4.4408, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 0.00997, |
|
"grad_norm": 0.9720370341600497, |
|
"learning_rate": 0.002991, |
|
"loss": 4.4354, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 0.00998, |
|
"grad_norm": 0.7522042168889891, |
|
"learning_rate": 0.002994, |
|
"loss": 4.4331, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 0.00999, |
|
"grad_norm": 0.9425882614336212, |
|
"learning_rate": 0.002997, |
|
"loss": 4.4261, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.9689827453070083, |
|
"learning_rate": 0.003, |
|
"loss": 4.458, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.01001, |
|
"grad_norm": 0.9802085236972514, |
|
"learning_rate": 0.003, |
|
"loss": 4.4569, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 0.01002, |
|
"grad_norm": 0.9742392022619593, |
|
"learning_rate": 0.003, |
|
"loss": 4.4262, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 0.01003, |
|
"grad_norm": 0.9115330327806416, |
|
"learning_rate": 0.003, |
|
"loss": 4.4513, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 0.01004, |
|
"grad_norm": 0.8562148232052564, |
|
"learning_rate": 0.003, |
|
"loss": 4.4397, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 0.01005, |
|
"grad_norm": 0.9105663755998641, |
|
"learning_rate": 0.003, |
|
"loss": 4.4154, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.01006, |
|
"grad_norm": 1.018045955439956, |
|
"learning_rate": 0.003, |
|
"loss": 4.433, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 0.01007, |
|
"grad_norm": 0.9357911521230117, |
|
"learning_rate": 0.003, |
|
"loss": 4.4713, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 0.01008, |
|
"grad_norm": 0.9681416399703533, |
|
"learning_rate": 0.003, |
|
"loss": 4.4846, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 0.01009, |
|
"grad_norm": 1.030244827646589, |
|
"learning_rate": 0.003, |
|
"loss": 4.4619, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 0.0101, |
|
"grad_norm": 1.0010362528613534, |
|
"learning_rate": 0.003, |
|
"loss": 4.4688, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.01011, |
|
"grad_norm": 1.0284858383133122, |
|
"learning_rate": 0.003, |
|
"loss": 4.4881, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 0.01012, |
|
"grad_norm": 0.9778345429910184, |
|
"learning_rate": 0.003, |
|
"loss": 4.4674, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 0.01013, |
|
"grad_norm": 0.8705611016855861, |
|
"learning_rate": 0.003, |
|
"loss": 4.4919, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 0.01014, |
|
"grad_norm": 0.8083144078788229, |
|
"learning_rate": 0.003, |
|
"loss": 4.4268, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 0.01015, |
|
"grad_norm": 0.7155205086193928, |
|
"learning_rate": 0.003, |
|
"loss": 4.4657, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.01016, |
|
"grad_norm": 0.5844948538982189, |
|
"learning_rate": 0.003, |
|
"loss": 4.466, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 0.01017, |
|
"grad_norm": 0.50419422786756, |
|
"learning_rate": 0.003, |
|
"loss": 4.4019, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 0.01018, |
|
"grad_norm": 0.5273528303530755, |
|
"learning_rate": 0.003, |
|
"loss": 4.4545, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 0.01019, |
|
"grad_norm": 0.5329688106940915, |
|
"learning_rate": 0.003, |
|
"loss": 4.4315, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 0.0102, |
|
"grad_norm": 0.6285459843623249, |
|
"learning_rate": 0.003, |
|
"loss": 4.4292, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.01021, |
|
"grad_norm": 0.6444525489483212, |
|
"learning_rate": 0.003, |
|
"loss": 4.412, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 0.01022, |
|
"grad_norm": 0.6609826768689684, |
|
"learning_rate": 0.003, |
|
"loss": 4.4194, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 0.01023, |
|
"grad_norm": 0.6479610287689606, |
|
"learning_rate": 0.003, |
|
"loss": 4.4151, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 0.01024, |
|
"grad_norm": 0.7091931447524652, |
|
"learning_rate": 0.003, |
|
"loss": 4.3798, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.01025, |
|
"grad_norm": 0.7026391214213478, |
|
"learning_rate": 0.003, |
|
"loss": 4.4116, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.01026, |
|
"grad_norm": 0.6731332289892269, |
|
"learning_rate": 0.003, |
|
"loss": 4.3991, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 0.01027, |
|
"grad_norm": 0.6590644472165706, |
|
"learning_rate": 0.003, |
|
"loss": 4.4177, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 0.01028, |
|
"grad_norm": 0.7485101036485022, |
|
"learning_rate": 0.003, |
|
"loss": 4.4014, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 0.01029, |
|
"grad_norm": 0.7198477689690366, |
|
"learning_rate": 0.003, |
|
"loss": 4.3803, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 0.0103, |
|
"grad_norm": 0.5542753635749327, |
|
"learning_rate": 0.003, |
|
"loss": 4.4023, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.01031, |
|
"grad_norm": 0.5292390629019561, |
|
"learning_rate": 0.003, |
|
"loss": 4.4144, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 0.01032, |
|
"grad_norm": 0.500926566427153, |
|
"learning_rate": 0.003, |
|
"loss": 4.3837, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 0.01033, |
|
"grad_norm": 0.49056326531128164, |
|
"learning_rate": 0.003, |
|
"loss": 4.4201, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 0.01034, |
|
"grad_norm": 0.5309292795236984, |
|
"learning_rate": 0.003, |
|
"loss": 4.3865, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 0.01035, |
|
"grad_norm": 0.6084296376545847, |
|
"learning_rate": 0.003, |
|
"loss": 4.3892, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.01036, |
|
"grad_norm": 0.8615895093148164, |
|
"learning_rate": 0.003, |
|
"loss": 4.3883, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 0.01037, |
|
"grad_norm": 0.8936988458454226, |
|
"learning_rate": 0.003, |
|
"loss": 4.4336, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 0.01038, |
|
"grad_norm": 0.6948017038229403, |
|
"learning_rate": 0.003, |
|
"loss": 4.4116, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 0.01039, |
|
"grad_norm": 0.9114790426144561, |
|
"learning_rate": 0.003, |
|
"loss": 4.4152, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 0.0104, |
|
"grad_norm": 0.8448966818619524, |
|
"learning_rate": 0.003, |
|
"loss": 4.4288, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.01041, |
|
"grad_norm": 0.7995140521375168, |
|
"learning_rate": 0.003, |
|
"loss": 4.404, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 0.01042, |
|
"grad_norm": 0.8979223927667839, |
|
"learning_rate": 0.003, |
|
"loss": 4.4357, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 0.01043, |
|
"grad_norm": 0.7233892787514891, |
|
"learning_rate": 0.003, |
|
"loss": 4.3775, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 0.01044, |
|
"grad_norm": 0.782866039824708, |
|
"learning_rate": 0.003, |
|
"loss": 4.4144, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 0.01045, |
|
"grad_norm": 0.841755171402396, |
|
"learning_rate": 0.003, |
|
"loss": 4.4353, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.01046, |
|
"grad_norm": 0.780348729633882, |
|
"learning_rate": 0.003, |
|
"loss": 4.4043, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 0.01047, |
|
"grad_norm": 0.8623775519511051, |
|
"learning_rate": 0.003, |
|
"loss": 4.4135, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 0.01048, |
|
"grad_norm": 0.9009461265034386, |
|
"learning_rate": 0.003, |
|
"loss": 4.4049, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 0.01049, |
|
"grad_norm": 0.739794637514069, |
|
"learning_rate": 0.003, |
|
"loss": 4.3786, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 0.0105, |
|
"grad_norm": 0.6259101207085414, |
|
"learning_rate": 0.003, |
|
"loss": 4.4154, |
|
"step": 1050 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 100000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 25, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4.16258249981952e+16, |
|
"train_batch_size": 256, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|