|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 205, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2.857142857142857e-08, |
|
"loss": 1.2934, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.714285714285714e-08, |
|
"loss": 1.2798, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.57142857142857e-08, |
|
"loss": 1.2641, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.1428571428571427e-07, |
|
"loss": 1.2162, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.4285714285714285e-07, |
|
"loss": 1.3073, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.714285714285714e-07, |
|
"loss": 1.3019, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2e-07, |
|
"loss": 1.2939, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.999874127673875e-07, |
|
"loss": 1.2734, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9994965423831852e-07, |
|
"loss": 1.3112, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.998867339183008e-07, |
|
"loss": 1.3059, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9979866764718843e-07, |
|
"loss": 1.3516, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9968547759519424e-07, |
|
"loss": 1.3082, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9954719225730845e-07, |
|
"loss": 1.255, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.993838464461254e-07, |
|
"loss": 1.2618, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9919548128307953e-07, |
|
"loss": 1.3239, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9898214418809327e-07, |
|
"loss": 1.285, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9874388886763942e-07, |
|
"loss": 1.2076, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.984807753012208e-07, |
|
"loss": 1.3292, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9819286972627065e-07, |
|
"loss": 1.2239, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9788024462147787e-07, |
|
"loss": 1.2907, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.975429786885407e-07, |
|
"loss": 1.2698, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9718115683235416e-07, |
|
"loss": 1.3123, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9679487013963562e-07, |
|
"loss": 1.2604, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9638421585599422e-07, |
|
"loss": 1.3007, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9594929736144973e-07, |
|
"loss": 1.3232, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9549022414440737e-07, |
|
"loss": 1.2522, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9500711177409452e-07, |
|
"loss": 1.2735, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9450008187146683e-07, |
|
"loss": 1.2489, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9396926207859085e-07, |
|
"loss": 1.2745, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9341478602651066e-07, |
|
"loss": 1.3546, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9283679330160724e-07, |
|
"loss": 1.3067, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9223542941045813e-07, |
|
"loss": 1.2281, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9161084574320693e-07, |
|
"loss": 1.2836, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9096319953545185e-07, |
|
"loss": 1.2204, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9029265382866213e-07, |
|
"loss": 1.2464, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8959937742913357e-07, |
|
"loss": 1.2542, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8888354486549234e-07, |
|
"loss": 1.3529, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.881453363447582e-07, |
|
"loss": 1.2395, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.873849377069785e-07, |
|
"loss": 1.2126, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8660254037844388e-07, |
|
"loss": 1.206, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8579834132349769e-07, |
|
"loss": 1.2899, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8497254299495145e-07, |
|
"loss": 1.2112, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8412535328311812e-07, |
|
"loss": 1.2797, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8325698546347712e-07, |
|
"loss": 1.2513, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8236765814298328e-07, |
|
"loss": 1.2499, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8145759520503356e-07, |
|
"loss": 1.2332, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.8052702575310586e-07, |
|
"loss": 1.1517, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.795761840530832e-07, |
|
"loss": 1.2733, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7860530947427875e-07, |
|
"loss": 1.199, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7761464642917567e-07, |
|
"loss": 1.2419, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.766044443118978e-07, |
|
"loss": 1.2258, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7557495743542582e-07, |
|
"loss": 1.2609, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7452644496757548e-07, |
|
"loss": 1.192, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.734591708657533e-07, |
|
"loss": 1.1758, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.72373403810507e-07, |
|
"loss": 1.2181, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.712694171378863e-07, |
|
"loss": 1.2666, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.7014748877063213e-07, |
|
"loss": 1.2084, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.690079011482112e-07, |
|
"loss": 1.2985, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.678509411557132e-07, |
|
"loss": 1.2617, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.6667690005162916e-07, |
|
"loss": 1.1919, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.6548607339452852e-07, |
|
"loss": 1.1732, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.6427876096865392e-07, |
|
"loss": 1.2219, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6305526670845225e-07, |
|
"loss": 1.2498, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6181589862206052e-07, |
|
"loss": 1.2018, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.6056096871376665e-07, |
|
"loss": 1.2268, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.5929079290546407e-07, |
|
"loss": 1.2108, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5800569095711981e-07, |
|
"loss": 1.2669, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5670598638627704e-07, |
|
"loss": 1.2009, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5539200638661102e-07, |
|
"loss": 1.249, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5406408174555975e-07, |
|
"loss": 1.2238, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5272254676105024e-07, |
|
"loss": 1.1932, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5136773915734065e-07, |
|
"loss": 1.146, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.5e-07, |
|
"loss": 1.1604, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4861967361004686e-07, |
|
"loss": 1.152, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4722710747726827e-07, |
|
"loss": 1.2761, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4582265217274104e-07, |
|
"loss": 1.2246, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4440666126057742e-07, |
|
"loss": 1.1919, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4297949120891716e-07, |
|
"loss": 1.1962, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.4154150130018864e-07, |
|
"loss": 1.145, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.4009305354066136e-07, |
|
"loss": 1.2599, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3863451256931284e-07, |
|
"loss": 1.1728, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3716624556603275e-07, |
|
"loss": 1.2814, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3568862215918716e-07, |
|
"loss": 1.1492, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.342020143325669e-07, |
|
"loss": 1.1769, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3270679633174217e-07, |
|
"loss": 1.1795, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.3120334456984869e-07, |
|
"loss": 1.187, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.296920375328275e-07, |
|
"loss": 1.2158, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2817325568414297e-07, |
|
"loss": 1.1903, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2664738136900349e-07, |
|
"loss": 1.2518, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2511479871810792e-07, |
|
"loss": 1.1396, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2357589355094274e-07, |
|
"loss": 1.2396, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2203105327865406e-07, |
|
"loss": 1.2634, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2048066680651908e-07, |
|
"loss": 1.1883, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1892512443604101e-07, |
|
"loss": 1.2214, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1736481776669305e-07, |
|
"loss": 1.2324, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1580013959733499e-07, |
|
"loss": 1.165, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1423148382732852e-07, |
|
"loss": 1.1884, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1265924535737492e-07, |
|
"loss": 1.1774, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.110838199901011e-07, |
|
"loss": 1.1927, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0950560433041824e-07, |
|
"loss": 1.2058, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0792499568567884e-07, |
|
"loss": 1.1488, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0634239196565645e-07, |
|
"loss": 1.1537, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0475819158237424e-07, |
|
"loss": 1.2555, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0317279334980678e-07, |
|
"loss": 1.1353, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.015865963834808e-07, |
|
"loss": 1.1506, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1e-07, |
|
"loss": 1.2785, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.841340361651919e-08, |
|
"loss": 1.221, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.682720665019325e-08, |
|
"loss": 1.2045, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.524180841762577e-08, |
|
"loss": 1.2851, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.365760803434354e-08, |
|
"loss": 1.2263, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.207500431432114e-08, |
|
"loss": 1.2084, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.049439566958175e-08, |
|
"loss": 1.2495, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.89161800098989e-08, |
|
"loss": 1.1412, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.734075464262506e-08, |
|
"loss": 1.2215, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.576851617267149e-08, |
|
"loss": 1.1579, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.4199860402665e-08, |
|
"loss": 1.1943, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.263518223330696e-08, |
|
"loss": 1.1636, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 8.107487556395901e-08, |
|
"loss": 1.2279, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.951933319348094e-08, |
|
"loss": 1.1739, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.796894672134593e-08, |
|
"loss": 1.1788, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.642410644905725e-08, |
|
"loss": 1.2144, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.488520128189209e-08, |
|
"loss": 1.1632, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.33526186309965e-08, |
|
"loss": 1.189, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.182674431585702e-08, |
|
"loss": 1.1962, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.030796246717254e-08, |
|
"loss": 1.1785, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.879665543015129e-08, |
|
"loss": 1.1997, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.729320366825783e-08, |
|
"loss": 1.1685, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.579798566743313e-08, |
|
"loss": 1.2235, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.431137784081282e-08, |
|
"loss": 1.253, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.283375443396726e-08, |
|
"loss": 1.1822, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.136548743068712e-08, |
|
"loss": 1.1801, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.990694645933865e-08, |
|
"loss": 1.2556, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.845849869981137e-08, |
|
"loss": 1.186, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.7020508791082835e-08, |
|
"loss": 1.1407, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.559333873942258e-08, |
|
"loss": 1.1429, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.4177347827258954e-08, |
|
"loss": 1.1542, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.2772892522731736e-08, |
|
"loss": 1.1555, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.138032638995314e-08, |
|
"loss": 1.1717, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.000000000000002e-08, |
|
"loss": 1.207, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.863226084265939e-08, |
|
"loss": 1.2138, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.727745323894975e-08, |
|
"loss": 1.1169, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.5935918254440276e-08, |
|
"loss": 1.1844, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.460799361338897e-08, |
|
"loss": 1.1074, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.3294013613722934e-08, |
|
"loss": 1.2708, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.199430904288019e-08, |
|
"loss": 1.1677, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.070920709453597e-08, |
|
"loss": 1.1804, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.943903128623335e-08, |
|
"loss": 1.2236, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.818410137793947e-08, |
|
"loss": 1.2135, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.694473329154778e-08, |
|
"loss": 1.1679, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.572123903134606e-08, |
|
"loss": 1.1737, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.45139266054715e-08, |
|
"loss": 1.2123, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.3323099948370846e-08, |
|
"loss": 1.1855, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.214905884428679e-08, |
|
"loss": 1.1712, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.0992098851788814e-08, |
|
"loss": 1.0934, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.985251122936786e-08, |
|
"loss": 1.2342, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.873058286211374e-08, |
|
"loss": 1.1785, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.762659618949298e-08, |
|
"loss": 1.1129, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.654082913424668e-08, |
|
"loss": 1.2042, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.547355503242453e-08, |
|
"loss": 1.1776, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.4425042564574183e-08, |
|
"loss": 1.2015, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.339555568810221e-08, |
|
"loss": 1.2125, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.2385353570824306e-08, |
|
"loss": 1.1405, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.139469052572127e-08, |
|
"loss": 1.1515, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.042381594691678e-08, |
|
"loss": 1.0014, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.9472974246894136e-08, |
|
"loss": 1.145, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8542404794966426e-08, |
|
"loss": 1.2162, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.763234185701673e-08, |
|
"loss": 1.2195, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.674301453652287e-08, |
|
"loss": 1.1321, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.5874646716881866e-08, |
|
"loss": 1.1922, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.502745700504857e-08, |
|
"loss": 1.1713, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4201658676502293e-08, |
|
"loss": 1.2084, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3397459621556128e-08, |
|
"loss": 1.1722, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2615062293021505e-08, |
|
"loss": 1.1945, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1854663655241803e-08, |
|
"loss": 1.1389, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1116455134507664e-08, |
|
"loss": 1.1721, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.0400622570866425e-08, |
|
"loss": 1.1674, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.707346171337894e-09, |
|
"loss": 1.2015, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.036800464548155e-09, |
|
"loss": 1.2152, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.38915425679304e-09, |
|
"loss": 1.1764, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.764570589541875e-09, |
|
"loss": 1.1159, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.163206698392743e-09, |
|
"loss": 1.1123, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.585213973489334e-09, |
|
"loss": 1.1876, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.030737921409168e-09, |
|
"loss": 1.2447, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.499918128533154e-09, |
|
"loss": 1.245, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.9928882259054674e-09, |
|
"loss": 1.1083, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.509775855592613e-09, |
|
"loss": 1.1305, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.050702638550274e-09, |
|
"loss": 1.1942, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.6157841440057958e-09, |
|
"loss": 1.1729, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.205129860364375e-09, |
|
"loss": 1.0882, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.8188431676458345e-09, |
|
"loss": 1.198, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.457021311459295e-09, |
|
"loss": 1.194, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.1197553785221366e-09, |
|
"loss": 1.1636, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.8071302737293292e-09, |
|
"loss": 1.1798, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.519224698779198e-09, |
|
"loss": 1.1553, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.256111132360571e-09, |
|
"loss": 1.159, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0178558119067315e-09, |
|
"loss": 1.2488, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 8.045187169204659e-10, |
|
"loss": 1.1661, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 6.161535538745877e-10, |
|
"loss": 1.1698, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.528077426915411e-10, |
|
"loss": 1.1729, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.1452240480577264e-10, |
|
"loss": 1.1583, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.0133235281156735e-10, |
|
"loss": 1.2136, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.132660816992037e-10, |
|
"loss": 1.1568, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 5.0345761681491736e-11, |
|
"loss": 1.2338, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.2587232612493171e-11, |
|
"loss": 1.2104, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 0.0, |
|
"loss": 1.1763, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 205, |
|
"total_flos": 46263470604288.0, |
|
"train_loss": 1.2101893407542532, |
|
"train_runtime": 1529.7798, |
|
"train_samples_per_second": 17.114, |
|
"train_steps_per_second": 0.134 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 205, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50000, |
|
"total_flos": 46263470604288.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|