|
{ |
|
"best_metric": 0.7565978169441223, |
|
"best_model_checkpoint": "autotrain-80y7u-aoweu/checkpoint-1017", |
|
"epoch": 9.0, |
|
"eval_steps": 500, |
|
"global_step": 1017, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.017699115044247787, |
|
"grad_norm": Infinity, |
|
"learning_rate": 0.0, |
|
"loss": 1.8501, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"grad_norm": 45.10062026977539, |
|
"learning_rate": 8.849557522123894e-07, |
|
"loss": 1.857, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.05309734513274336, |
|
"grad_norm": 58.66633605957031, |
|
"learning_rate": 1.3274336283185841e-06, |
|
"loss": 1.8116, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"grad_norm": 56.49775695800781, |
|
"learning_rate": 2.2123893805309734e-06, |
|
"loss": 2.0345, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.08849557522123894, |
|
"grad_norm": 58.526424407958984, |
|
"learning_rate": 3.097345132743363e-06, |
|
"loss": 1.9499, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.10619469026548672, |
|
"grad_norm": 52.64912796020508, |
|
"learning_rate": 3.982300884955752e-06, |
|
"loss": 1.9647, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.12389380530973451, |
|
"grad_norm": 31.514617919921875, |
|
"learning_rate": 4.867256637168142e-06, |
|
"loss": 1.6022, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.1415929203539823, |
|
"grad_norm": 39.92071533203125, |
|
"learning_rate": 5.752212389380531e-06, |
|
"loss": 1.7882, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.1592920353982301, |
|
"grad_norm": 57.345947265625, |
|
"learning_rate": 6.6371681415929215e-06, |
|
"loss": 1.7493, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.17699115044247787, |
|
"grad_norm": 40.93531799316406, |
|
"learning_rate": 7.52212389380531e-06, |
|
"loss": 1.5886, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.19469026548672566, |
|
"grad_norm": 34.53605651855469, |
|
"learning_rate": 8.407079646017701e-06, |
|
"loss": 1.6985, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.21238938053097345, |
|
"grad_norm": 59.89328384399414, |
|
"learning_rate": 9.29203539823009e-06, |
|
"loss": 1.8046, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.23008849557522124, |
|
"grad_norm": 36.242183685302734, |
|
"learning_rate": 1.0176991150442479e-05, |
|
"loss": 1.5316, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.24778761061946902, |
|
"grad_norm": 19.723634719848633, |
|
"learning_rate": 1.1061946902654869e-05, |
|
"loss": 1.612, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.26548672566371684, |
|
"grad_norm": 33.05853271484375, |
|
"learning_rate": 1.1946902654867258e-05, |
|
"loss": 1.7078, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.2831858407079646, |
|
"grad_norm": 25.069561004638672, |
|
"learning_rate": 1.2831858407079647e-05, |
|
"loss": 1.616, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.3008849557522124, |
|
"grad_norm": 58.163509368896484, |
|
"learning_rate": 1.3716814159292036e-05, |
|
"loss": 1.6574, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.3185840707964602, |
|
"grad_norm": 32.350101470947266, |
|
"learning_rate": 1.4601769911504426e-05, |
|
"loss": 1.3009, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.336283185840708, |
|
"grad_norm": 36.51224899291992, |
|
"learning_rate": 1.5486725663716813e-05, |
|
"loss": 1.36, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.35398230088495575, |
|
"grad_norm": 32.10768508911133, |
|
"learning_rate": 1.6371681415929206e-05, |
|
"loss": 1.515, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.37168141592920356, |
|
"grad_norm": 25.605300903320312, |
|
"learning_rate": 1.7256637168141594e-05, |
|
"loss": 1.3663, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.3893805309734513, |
|
"grad_norm": 146.0785369873047, |
|
"learning_rate": 1.8141592920353983e-05, |
|
"loss": 1.3139, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.40707964601769914, |
|
"grad_norm": 37.9253044128418, |
|
"learning_rate": 1.9026548672566372e-05, |
|
"loss": 1.3613, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.4247787610619469, |
|
"grad_norm": 36.18649673461914, |
|
"learning_rate": 1.991150442477876e-05, |
|
"loss": 1.4165, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.4424778761061947, |
|
"grad_norm": 22.322145462036133, |
|
"learning_rate": 2.079646017699115e-05, |
|
"loss": 1.3432, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.46017699115044247, |
|
"grad_norm": 28.047632217407227, |
|
"learning_rate": 2.1681415929203542e-05, |
|
"loss": 1.1527, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.4778761061946903, |
|
"grad_norm": 34.25035858154297, |
|
"learning_rate": 2.2566371681415928e-05, |
|
"loss": 1.0954, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.49557522123893805, |
|
"grad_norm": 154.13543701171875, |
|
"learning_rate": 2.345132743362832e-05, |
|
"loss": 1.289, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.5132743362831859, |
|
"grad_norm": 34.313114166259766, |
|
"learning_rate": 2.433628318584071e-05, |
|
"loss": 1.4081, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.5309734513274337, |
|
"grad_norm": 31.601171493530273, |
|
"learning_rate": 2.5221238938053098e-05, |
|
"loss": 1.1908, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.5486725663716814, |
|
"grad_norm": 40.591888427734375, |
|
"learning_rate": 2.610619469026549e-05, |
|
"loss": 1.3728, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.5663716814159292, |
|
"grad_norm": 39.01655578613281, |
|
"learning_rate": 2.6991150442477875e-05, |
|
"loss": 1.1882, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.584070796460177, |
|
"grad_norm": 35.67778778076172, |
|
"learning_rate": 2.7876106194690264e-05, |
|
"loss": 0.9822, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.6017699115044248, |
|
"grad_norm": 37.64238357543945, |
|
"learning_rate": 2.8761061946902656e-05, |
|
"loss": 0.9998, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.6194690265486725, |
|
"grad_norm": 29.477062225341797, |
|
"learning_rate": 2.9203539823008852e-05, |
|
"loss": 1.3209, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.6371681415929203, |
|
"grad_norm": 21.486434936523438, |
|
"learning_rate": 3.008849557522124e-05, |
|
"loss": 1.2007, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.6548672566371682, |
|
"grad_norm": 32.44023513793945, |
|
"learning_rate": 3.097345132743363e-05, |
|
"loss": 1.2391, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.672566371681416, |
|
"grad_norm": 87.39546203613281, |
|
"learning_rate": 3.185840707964602e-05, |
|
"loss": 1.2293, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.6902654867256637, |
|
"grad_norm": 22.6693058013916, |
|
"learning_rate": 3.274336283185841e-05, |
|
"loss": 1.3051, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.7079646017699115, |
|
"grad_norm": 22.473268508911133, |
|
"learning_rate": 3.3628318584070804e-05, |
|
"loss": 1.1976, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.7256637168141593, |
|
"grad_norm": 27.98000144958496, |
|
"learning_rate": 3.451327433628319e-05, |
|
"loss": 1.172, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.7433628318584071, |
|
"grad_norm": 30.333744049072266, |
|
"learning_rate": 3.5398230088495574e-05, |
|
"loss": 1.4081, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.7610619469026548, |
|
"grad_norm": 32.933128356933594, |
|
"learning_rate": 3.628318584070797e-05, |
|
"loss": 1.0802, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.7787610619469026, |
|
"grad_norm": 29.343582153320312, |
|
"learning_rate": 3.716814159292036e-05, |
|
"loss": 1.0651, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.7964601769911505, |
|
"grad_norm": 28.437707901000977, |
|
"learning_rate": 3.8053097345132744e-05, |
|
"loss": 1.079, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.8141592920353983, |
|
"grad_norm": 34.3442268371582, |
|
"learning_rate": 3.893805309734514e-05, |
|
"loss": 1.1715, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.831858407079646, |
|
"grad_norm": 24.838363647460938, |
|
"learning_rate": 3.982300884955752e-05, |
|
"loss": 1.2568, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.8495575221238938, |
|
"grad_norm": 19.97493553161621, |
|
"learning_rate": 4.0707964601769914e-05, |
|
"loss": 1.0946, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.8672566371681416, |
|
"grad_norm": 26.681554794311523, |
|
"learning_rate": 4.15929203539823e-05, |
|
"loss": 0.9971, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.8849557522123894, |
|
"grad_norm": 20.66066551208496, |
|
"learning_rate": 4.247787610619469e-05, |
|
"loss": 1.205, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.9026548672566371, |
|
"grad_norm": 30.359079360961914, |
|
"learning_rate": 4.3362831858407084e-05, |
|
"loss": 1.0563, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.9203539823008849, |
|
"grad_norm": 42.22723388671875, |
|
"learning_rate": 4.4247787610619477e-05, |
|
"loss": 1.0019, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.9380530973451328, |
|
"grad_norm": 28.618349075317383, |
|
"learning_rate": 4.5132743362831855e-05, |
|
"loss": 1.1699, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.9557522123893806, |
|
"grad_norm": 47.17839431762695, |
|
"learning_rate": 4.601769911504425e-05, |
|
"loss": 1.0793, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.9734513274336283, |
|
"grad_norm": 28.66024398803711, |
|
"learning_rate": 4.690265486725664e-05, |
|
"loss": 1.1115, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.9911504424778761, |
|
"grad_norm": 24.186906814575195, |
|
"learning_rate": 4.778761061946903e-05, |
|
"loss": 0.9751, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.1276780366897583, |
|
"eval_map": 0.1062, |
|
"eval_map_50": 0.2136, |
|
"eval_map_75": 0.0953, |
|
"eval_map_large": 0.1451, |
|
"eval_map_medium": 0.0275, |
|
"eval_map_per_class": 0.1062, |
|
"eval_map_small": 0.0, |
|
"eval_mar_1": 0.1671, |
|
"eval_mar_10": 0.3786, |
|
"eval_mar_100": 0.6103, |
|
"eval_mar_100_per_class": 0.6103, |
|
"eval_mar_large": 0.6775, |
|
"eval_mar_medium": 0.4422, |
|
"eval_mar_small": 0.0, |
|
"eval_runtime": 4.6231, |
|
"eval_samples_per_second": 21.631, |
|
"eval_steps_per_second": 1.514, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.008849557522124, |
|
"grad_norm": 29.109996795654297, |
|
"learning_rate": 4.867256637168142e-05, |
|
"loss": 1.0909, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.0265486725663717, |
|
"grad_norm": 36.879417419433594, |
|
"learning_rate": 4.955752212389381e-05, |
|
"loss": 1.0213, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.0442477876106195, |
|
"grad_norm": 25.578012466430664, |
|
"learning_rate": 4.9950835791543757e-05, |
|
"loss": 1.0314, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.0619469026548674, |
|
"grad_norm": 20.469444274902344, |
|
"learning_rate": 4.985250737463127e-05, |
|
"loss": 1.0589, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.079646017699115, |
|
"grad_norm": 18.58279037475586, |
|
"learning_rate": 4.9754178957718786e-05, |
|
"loss": 1.0507, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.0973451327433628, |
|
"grad_norm": 29.6292724609375, |
|
"learning_rate": 4.9655850540806295e-05, |
|
"loss": 1.0577, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.1150442477876106, |
|
"grad_norm": 57.019222259521484, |
|
"learning_rate": 4.955752212389381e-05, |
|
"loss": 1.0879, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.1327433628318584, |
|
"grad_norm": 39.2066650390625, |
|
"learning_rate": 4.9459193706981325e-05, |
|
"loss": 1.1121, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.1504424778761062, |
|
"grad_norm": 26.280122756958008, |
|
"learning_rate": 4.936086529006883e-05, |
|
"loss": 1.001, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.168141592920354, |
|
"grad_norm": 41.13880920410156, |
|
"learning_rate": 4.926253687315635e-05, |
|
"loss": 1.0486, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.1858407079646018, |
|
"grad_norm": 45.9874267578125, |
|
"learning_rate": 4.9164208456243856e-05, |
|
"loss": 1.0237, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.2035398230088497, |
|
"grad_norm": 31.62242317199707, |
|
"learning_rate": 4.906588003933137e-05, |
|
"loss": 1.0973, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.2212389380530975, |
|
"grad_norm": 30.55558967590332, |
|
"learning_rate": 4.8967551622418886e-05, |
|
"loss": 1.0251, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.238938053097345, |
|
"grad_norm": 40.2532844543457, |
|
"learning_rate": 4.8869223205506394e-05, |
|
"loss": 0.9459, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.2566371681415929, |
|
"grad_norm": 77.14492797851562, |
|
"learning_rate": 4.877089478859391e-05, |
|
"loss": 1.0721, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.2743362831858407, |
|
"grad_norm": 30.2625732421875, |
|
"learning_rate": 4.867256637168142e-05, |
|
"loss": 1.0249, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.2920353982300885, |
|
"grad_norm": 39.786590576171875, |
|
"learning_rate": 4.857423795476893e-05, |
|
"loss": 1.0735, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.3097345132743363, |
|
"grad_norm": 33.97883605957031, |
|
"learning_rate": 4.847590953785645e-05, |
|
"loss": 0.9914, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.3274336283185841, |
|
"grad_norm": 27.49061393737793, |
|
"learning_rate": 4.8377581120943956e-05, |
|
"loss": 1.2146, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.3451327433628317, |
|
"grad_norm": 25.95128059387207, |
|
"learning_rate": 4.827925270403147e-05, |
|
"loss": 1.048, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.3628318584070795, |
|
"grad_norm": 25.32373809814453, |
|
"learning_rate": 4.818092428711898e-05, |
|
"loss": 1.0952, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.3805309734513274, |
|
"grad_norm": 29.068500518798828, |
|
"learning_rate": 4.8082595870206494e-05, |
|
"loss": 1.0903, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.3982300884955752, |
|
"grad_norm": 30.730670928955078, |
|
"learning_rate": 4.7984267453294e-05, |
|
"loss": 1.0388, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.415929203539823, |
|
"grad_norm": 34.4339714050293, |
|
"learning_rate": 4.788593903638152e-05, |
|
"loss": 1.0369, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.4336283185840708, |
|
"grad_norm": 74.3895263671875, |
|
"learning_rate": 4.778761061946903e-05, |
|
"loss": 1.0317, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.4513274336283186, |
|
"grad_norm": 25.546483993530273, |
|
"learning_rate": 4.768928220255654e-05, |
|
"loss": 0.9741, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.4690265486725664, |
|
"grad_norm": 26.646549224853516, |
|
"learning_rate": 4.7590953785644055e-05, |
|
"loss": 1.1132, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.4867256637168142, |
|
"grad_norm": 27.06178092956543, |
|
"learning_rate": 4.749262536873156e-05, |
|
"loss": 0.9592, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.504424778761062, |
|
"grad_norm": 27.895265579223633, |
|
"learning_rate": 4.739429695181908e-05, |
|
"loss": 1.0061, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.5221238938053099, |
|
"grad_norm": 28.37946319580078, |
|
"learning_rate": 4.729596853490659e-05, |
|
"loss": 0.9069, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.5398230088495575, |
|
"grad_norm": 22.880842208862305, |
|
"learning_rate": 4.71976401179941e-05, |
|
"loss": 1.0585, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.5575221238938053, |
|
"grad_norm": 27.391437530517578, |
|
"learning_rate": 4.7099311701081617e-05, |
|
"loss": 1.1805, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.575221238938053, |
|
"grad_norm": 76.33792877197266, |
|
"learning_rate": 4.7000983284169125e-05, |
|
"loss": 0.93, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.592920353982301, |
|
"grad_norm": 32.36155700683594, |
|
"learning_rate": 4.690265486725664e-05, |
|
"loss": 1.066, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.6106194690265485, |
|
"grad_norm": 23.659805297851562, |
|
"learning_rate": 4.680432645034415e-05, |
|
"loss": 0.9452, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.6283185840707963, |
|
"grad_norm": 40.60184097290039, |
|
"learning_rate": 4.670599803343166e-05, |
|
"loss": 0.9825, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.6460176991150441, |
|
"grad_norm": 47.14199447631836, |
|
"learning_rate": 4.660766961651918e-05, |
|
"loss": 0.9014, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.663716814159292, |
|
"grad_norm": 26.055511474609375, |
|
"learning_rate": 4.6509341199606686e-05, |
|
"loss": 1.0034, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.6814159292035398, |
|
"grad_norm": 31.73177146911621, |
|
"learning_rate": 4.64110127826942e-05, |
|
"loss": 1.0716, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.6991150442477876, |
|
"grad_norm": 28.099885940551758, |
|
"learning_rate": 4.631268436578171e-05, |
|
"loss": 1.1829, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.7168141592920354, |
|
"grad_norm": 13.378188133239746, |
|
"learning_rate": 4.6214355948869224e-05, |
|
"loss": 0.9111, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.7345132743362832, |
|
"grad_norm": 22.647581100463867, |
|
"learning_rate": 4.611602753195674e-05, |
|
"loss": 1.0079, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.752212389380531, |
|
"grad_norm": 22.10202407836914, |
|
"learning_rate": 4.601769911504425e-05, |
|
"loss": 1.0541, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.7699115044247788, |
|
"grad_norm": 19.46767807006836, |
|
"learning_rate": 4.591937069813176e-05, |
|
"loss": 0.8407, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.7876106194690267, |
|
"grad_norm": 34.163822174072266, |
|
"learning_rate": 4.582104228121927e-05, |
|
"loss": 1.157, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.8053097345132745, |
|
"grad_norm": 24.074146270751953, |
|
"learning_rate": 4.5722713864306786e-05, |
|
"loss": 0.9934, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.823008849557522, |
|
"grad_norm": 150.62364196777344, |
|
"learning_rate": 4.5624385447394294e-05, |
|
"loss": 0.9615, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.8407079646017699, |
|
"grad_norm": 20.424564361572266, |
|
"learning_rate": 4.552605703048181e-05, |
|
"loss": 1.0251, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.8584070796460177, |
|
"grad_norm": 30.88136100769043, |
|
"learning_rate": 4.5427728613569324e-05, |
|
"loss": 0.9429, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.8761061946902655, |
|
"grad_norm": 25.247404098510742, |
|
"learning_rate": 4.532940019665683e-05, |
|
"loss": 0.9711, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.893805309734513, |
|
"grad_norm": 37.985774993896484, |
|
"learning_rate": 4.523107177974435e-05, |
|
"loss": 0.9705, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.911504424778761, |
|
"grad_norm": 22.748384475708008, |
|
"learning_rate": 4.5132743362831855e-05, |
|
"loss": 1.0017, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.9292035398230087, |
|
"grad_norm": 19.267290115356445, |
|
"learning_rate": 4.503441494591937e-05, |
|
"loss": 0.9072, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.9469026548672566, |
|
"grad_norm": 19.34298324584961, |
|
"learning_rate": 4.4936086529006885e-05, |
|
"loss": 1.0597, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.9646017699115044, |
|
"grad_norm": 23.347171783447266, |
|
"learning_rate": 4.48377581120944e-05, |
|
"loss": 0.9139, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.9823008849557522, |
|
"grad_norm": 20.579362869262695, |
|
"learning_rate": 4.473942969518191e-05, |
|
"loss": 0.8591, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 64.1324691772461, |
|
"learning_rate": 4.464110127826942e-05, |
|
"loss": 1.0063, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 0.9410255551338196, |
|
"eval_map": 0.2993, |
|
"eval_map_50": 0.536, |
|
"eval_map_75": 0.3136, |
|
"eval_map_large": 0.3631, |
|
"eval_map_medium": 0.1534, |
|
"eval_map_per_class": 0.2993, |
|
"eval_map_small": 0.0, |
|
"eval_mar_1": 0.2427, |
|
"eval_mar_10": 0.5261, |
|
"eval_mar_100": 0.6393, |
|
"eval_mar_100_per_class": 0.6393, |
|
"eval_mar_large": 0.7107, |
|
"eval_mar_medium": 0.4609, |
|
"eval_mar_small": 0.0, |
|
"eval_runtime": 3.9582, |
|
"eval_samples_per_second": 25.264, |
|
"eval_steps_per_second": 1.768, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 2.017699115044248, |
|
"grad_norm": 27.324804306030273, |
|
"learning_rate": 4.454277286135694e-05, |
|
"loss": 0.9027, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 2.0353982300884956, |
|
"grad_norm": 20.43166732788086, |
|
"learning_rate": 4.4444444444444447e-05, |
|
"loss": 0.9257, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.0530973451327434, |
|
"grad_norm": 18.177186965942383, |
|
"learning_rate": 4.434611602753196e-05, |
|
"loss": 0.9379, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 2.0707964601769913, |
|
"grad_norm": 24.545679092407227, |
|
"learning_rate": 4.4247787610619477e-05, |
|
"loss": 1.0571, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 2.088495575221239, |
|
"grad_norm": 21.458559036254883, |
|
"learning_rate": 4.4149459193706985e-05, |
|
"loss": 1.0239, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 2.106194690265487, |
|
"grad_norm": 20.25364112854004, |
|
"learning_rate": 4.40511307767945e-05, |
|
"loss": 0.9194, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 2.1238938053097347, |
|
"grad_norm": 24.264408111572266, |
|
"learning_rate": 4.395280235988201e-05, |
|
"loss": 0.9601, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.1415929203539825, |
|
"grad_norm": 17.67040252685547, |
|
"learning_rate": 4.385447394296952e-05, |
|
"loss": 1.1069, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 2.15929203539823, |
|
"grad_norm": 25.185571670532227, |
|
"learning_rate": 4.375614552605704e-05, |
|
"loss": 0.9537, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 2.1769911504424777, |
|
"grad_norm": 39.48079299926758, |
|
"learning_rate": 4.3657817109144546e-05, |
|
"loss": 1.082, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 2.1946902654867255, |
|
"grad_norm": 22.608633041381836, |
|
"learning_rate": 4.355948869223206e-05, |
|
"loss": 0.8366, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 2.2123893805309733, |
|
"grad_norm": 57.35368347167969, |
|
"learning_rate": 4.346116027531957e-05, |
|
"loss": 0.8564, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.230088495575221, |
|
"grad_norm": 34.98164367675781, |
|
"learning_rate": 4.3362831858407084e-05, |
|
"loss": 0.8928, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 2.247787610619469, |
|
"grad_norm": 19.134063720703125, |
|
"learning_rate": 4.326450344149459e-05, |
|
"loss": 0.9219, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 2.265486725663717, |
|
"grad_norm": 33.00546646118164, |
|
"learning_rate": 4.316617502458211e-05, |
|
"loss": 0.8816, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 2.2831858407079646, |
|
"grad_norm": 26.70623779296875, |
|
"learning_rate": 4.306784660766962e-05, |
|
"loss": 0.9003, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 2.3008849557522124, |
|
"grad_norm": 16.002517700195312, |
|
"learning_rate": 4.296951819075713e-05, |
|
"loss": 0.9086, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.3185840707964602, |
|
"grad_norm": 26.876773834228516, |
|
"learning_rate": 4.2871189773844646e-05, |
|
"loss": 0.8936, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 2.336283185840708, |
|
"grad_norm": 19.438966751098633, |
|
"learning_rate": 4.2772861356932154e-05, |
|
"loss": 0.886, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 2.353982300884956, |
|
"grad_norm": 18.447446823120117, |
|
"learning_rate": 4.267453294001967e-05, |
|
"loss": 0.9216, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 2.3716814159292037, |
|
"grad_norm": 17.754484176635742, |
|
"learning_rate": 4.2576204523107184e-05, |
|
"loss": 0.8503, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 2.3893805309734515, |
|
"grad_norm": 21.90740394592285, |
|
"learning_rate": 4.247787610619469e-05, |
|
"loss": 0.8937, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.4070796460176993, |
|
"grad_norm": 17.798200607299805, |
|
"learning_rate": 4.237954768928221e-05, |
|
"loss": 0.97, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 2.4247787610619467, |
|
"grad_norm": 26.43834114074707, |
|
"learning_rate": 4.2281219272369715e-05, |
|
"loss": 0.7791, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 2.442477876106195, |
|
"grad_norm": 18.365053176879883, |
|
"learning_rate": 4.218289085545723e-05, |
|
"loss": 0.9906, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 2.4601769911504423, |
|
"grad_norm": 26.327442169189453, |
|
"learning_rate": 4.208456243854474e-05, |
|
"loss": 1.001, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 2.47787610619469, |
|
"grad_norm": 22.054363250732422, |
|
"learning_rate": 4.1986234021632253e-05, |
|
"loss": 1.0239, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.495575221238938, |
|
"grad_norm": 39.17606735229492, |
|
"learning_rate": 4.188790560471977e-05, |
|
"loss": 0.9184, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 2.5132743362831858, |
|
"grad_norm": 18.545665740966797, |
|
"learning_rate": 4.178957718780728e-05, |
|
"loss": 0.8107, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 2.5309734513274336, |
|
"grad_norm": 15.98513412475586, |
|
"learning_rate": 4.169124877089479e-05, |
|
"loss": 1.0371, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 2.5486725663716814, |
|
"grad_norm": 23.60863494873047, |
|
"learning_rate": 4.15929203539823e-05, |
|
"loss": 0.8471, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 2.566371681415929, |
|
"grad_norm": 39.60649108886719, |
|
"learning_rate": 4.1494591937069815e-05, |
|
"loss": 0.9184, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.584070796460177, |
|
"grad_norm": 32.43935012817383, |
|
"learning_rate": 4.139626352015733e-05, |
|
"loss": 0.8087, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 2.601769911504425, |
|
"grad_norm": 22.511394500732422, |
|
"learning_rate": 4.129793510324484e-05, |
|
"loss": 0.8604, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 2.6194690265486726, |
|
"grad_norm": 17.795698165893555, |
|
"learning_rate": 4.119960668633235e-05, |
|
"loss": 0.9068, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 2.6371681415929205, |
|
"grad_norm": 25.883586883544922, |
|
"learning_rate": 4.110127826941986e-05, |
|
"loss": 0.7654, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 2.6548672566371683, |
|
"grad_norm": 17.679630279541016, |
|
"learning_rate": 4.1002949852507376e-05, |
|
"loss": 0.8956, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.672566371681416, |
|
"grad_norm": 40.860904693603516, |
|
"learning_rate": 4.0904621435594884e-05, |
|
"loss": 0.9776, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 2.6902654867256635, |
|
"grad_norm": 30.485763549804688, |
|
"learning_rate": 4.08062930186824e-05, |
|
"loss": 0.7637, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 2.7079646017699117, |
|
"grad_norm": 20.94835090637207, |
|
"learning_rate": 4.0707964601769914e-05, |
|
"loss": 0.972, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 2.725663716814159, |
|
"grad_norm": 28.85026741027832, |
|
"learning_rate": 4.060963618485742e-05, |
|
"loss": 1.0257, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 2.7433628318584073, |
|
"grad_norm": 20.580015182495117, |
|
"learning_rate": 4.051130776794494e-05, |
|
"loss": 0.9242, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.7610619469026547, |
|
"grad_norm": 19.40909767150879, |
|
"learning_rate": 4.0412979351032446e-05, |
|
"loss": 1.0149, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 2.7787610619469025, |
|
"grad_norm": 17.306678771972656, |
|
"learning_rate": 4.031465093411996e-05, |
|
"loss": 0.8118, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 2.7964601769911503, |
|
"grad_norm": 30.43448829650879, |
|
"learning_rate": 4.0216322517207476e-05, |
|
"loss": 0.8623, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 2.814159292035398, |
|
"grad_norm": 20.09423828125, |
|
"learning_rate": 4.0117994100294984e-05, |
|
"loss": 0.8364, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 2.831858407079646, |
|
"grad_norm": 16.069490432739258, |
|
"learning_rate": 4.00196656833825e-05, |
|
"loss": 0.812, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 2.849557522123894, |
|
"grad_norm": 25.968656539916992, |
|
"learning_rate": 3.992133726647001e-05, |
|
"loss": 0.8953, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 2.8672566371681416, |
|
"grad_norm": 28.93232536315918, |
|
"learning_rate": 3.982300884955752e-05, |
|
"loss": 1.0083, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 2.8849557522123894, |
|
"grad_norm": 32.24972152709961, |
|
"learning_rate": 3.972468043264504e-05, |
|
"loss": 0.9767, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 2.9026548672566372, |
|
"grad_norm": 32.170352935791016, |
|
"learning_rate": 3.9626352015732545e-05, |
|
"loss": 0.787, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 2.920353982300885, |
|
"grad_norm": 19.10132598876953, |
|
"learning_rate": 3.952802359882006e-05, |
|
"loss": 0.8321, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 2.938053097345133, |
|
"grad_norm": 30.334962844848633, |
|
"learning_rate": 3.9429695181907575e-05, |
|
"loss": 0.8306, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 2.9557522123893807, |
|
"grad_norm": 24.146038055419922, |
|
"learning_rate": 3.9331366764995083e-05, |
|
"loss": 0.8859, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 2.9734513274336285, |
|
"grad_norm": 24.66895294189453, |
|
"learning_rate": 3.92330383480826e-05, |
|
"loss": 0.8035, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 2.991150442477876, |
|
"grad_norm": 18.740558624267578, |
|
"learning_rate": 3.9134709931170113e-05, |
|
"loss": 0.8284, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 0.883878231048584, |
|
"eval_map": 0.4451, |
|
"eval_map_50": 0.8057, |
|
"eval_map_75": 0.399, |
|
"eval_map_large": 0.5228, |
|
"eval_map_medium": 0.2384, |
|
"eval_map_per_class": 0.4451, |
|
"eval_map_small": 0.0123, |
|
"eval_mar_1": 0.2662, |
|
"eval_mar_10": 0.5637, |
|
"eval_mar_100": 0.6295, |
|
"eval_mar_100_per_class": 0.6295, |
|
"eval_mar_large": 0.6976, |
|
"eval_mar_medium": 0.4547, |
|
"eval_mar_small": 0.3, |
|
"eval_runtime": 4.2753, |
|
"eval_samples_per_second": 23.39, |
|
"eval_steps_per_second": 1.637, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 3.0088495575221237, |
|
"grad_norm": 17.387792587280273, |
|
"learning_rate": 3.903638151425762e-05, |
|
"loss": 0.9204, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 3.0265486725663715, |
|
"grad_norm": 23.816524505615234, |
|
"learning_rate": 3.893805309734514e-05, |
|
"loss": 0.7563, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 3.0442477876106193, |
|
"grad_norm": 23.070165634155273, |
|
"learning_rate": 3.883972468043265e-05, |
|
"loss": 0.6889, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 3.061946902654867, |
|
"grad_norm": 21.282630920410156, |
|
"learning_rate": 3.874139626352016e-05, |
|
"loss": 0.7727, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 3.079646017699115, |
|
"grad_norm": 32.04922866821289, |
|
"learning_rate": 3.8643067846607675e-05, |
|
"loss": 0.7494, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 3.0973451327433628, |
|
"grad_norm": 22.994802474975586, |
|
"learning_rate": 3.854473942969518e-05, |
|
"loss": 0.9539, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 3.1150442477876106, |
|
"grad_norm": 19.214599609375, |
|
"learning_rate": 3.84464110127827e-05, |
|
"loss": 0.6915, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 3.1327433628318584, |
|
"grad_norm": 15.473613739013672, |
|
"learning_rate": 3.834808259587021e-05, |
|
"loss": 0.8087, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 3.150442477876106, |
|
"grad_norm": 27.318443298339844, |
|
"learning_rate": 3.824975417895772e-05, |
|
"loss": 0.7237, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 3.168141592920354, |
|
"grad_norm": 20.30460548400879, |
|
"learning_rate": 3.8151425762045236e-05, |
|
"loss": 0.8245, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 3.185840707964602, |
|
"grad_norm": 20.886333465576172, |
|
"learning_rate": 3.8053097345132744e-05, |
|
"loss": 0.9283, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 3.2035398230088497, |
|
"grad_norm": 19.330215454101562, |
|
"learning_rate": 3.795476892822026e-05, |
|
"loss": 0.8195, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 3.2212389380530975, |
|
"grad_norm": 28.00650405883789, |
|
"learning_rate": 3.7856440511307774e-05, |
|
"loss": 0.8749, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 3.2389380530973453, |
|
"grad_norm": 23.165494918823242, |
|
"learning_rate": 3.775811209439528e-05, |
|
"loss": 0.8077, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 3.256637168141593, |
|
"grad_norm": 24.138504028320312, |
|
"learning_rate": 3.76597836774828e-05, |
|
"loss": 0.7378, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 3.274336283185841, |
|
"grad_norm": 26.703155517578125, |
|
"learning_rate": 3.7561455260570306e-05, |
|
"loss": 0.771, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 3.2920353982300883, |
|
"grad_norm": 19.84184455871582, |
|
"learning_rate": 3.746312684365782e-05, |
|
"loss": 0.8013, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 3.309734513274336, |
|
"grad_norm": 23.238510131835938, |
|
"learning_rate": 3.736479842674533e-05, |
|
"loss": 0.7144, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 3.327433628318584, |
|
"grad_norm": 27.819055557250977, |
|
"learning_rate": 3.7266470009832844e-05, |
|
"loss": 0.9366, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 3.3451327433628317, |
|
"grad_norm": 18.144052505493164, |
|
"learning_rate": 3.716814159292036e-05, |
|
"loss": 0.8564, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 3.3628318584070795, |
|
"grad_norm": 16.014623641967773, |
|
"learning_rate": 3.706981317600787e-05, |
|
"loss": 0.9854, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 3.3805309734513274, |
|
"grad_norm": 19.18610191345215, |
|
"learning_rate": 3.697148475909538e-05, |
|
"loss": 0.8153, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 3.398230088495575, |
|
"grad_norm": 28.12213134765625, |
|
"learning_rate": 3.687315634218289e-05, |
|
"loss": 0.8107, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 3.415929203539823, |
|
"grad_norm": 42.245262145996094, |
|
"learning_rate": 3.6774827925270405e-05, |
|
"loss": 0.8498, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 3.433628318584071, |
|
"grad_norm": 20.642005920410156, |
|
"learning_rate": 3.667649950835792e-05, |
|
"loss": 0.8426, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 3.4513274336283186, |
|
"grad_norm": 20.72900390625, |
|
"learning_rate": 3.657817109144543e-05, |
|
"loss": 0.7979, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 3.4690265486725664, |
|
"grad_norm": 36.909027099609375, |
|
"learning_rate": 3.6479842674532944e-05, |
|
"loss": 1.0862, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 3.4867256637168142, |
|
"grad_norm": 34.34043884277344, |
|
"learning_rate": 3.638151425762045e-05, |
|
"loss": 0.9602, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 3.504424778761062, |
|
"grad_norm": 17.44400978088379, |
|
"learning_rate": 3.628318584070797e-05, |
|
"loss": 0.7522, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 3.52212389380531, |
|
"grad_norm": 29.59943962097168, |
|
"learning_rate": 3.6184857423795475e-05, |
|
"loss": 0.8881, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 3.5398230088495577, |
|
"grad_norm": 33.33137893676758, |
|
"learning_rate": 3.608652900688299e-05, |
|
"loss": 0.9806, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 3.557522123893805, |
|
"grad_norm": 25.366241455078125, |
|
"learning_rate": 3.5988200589970505e-05, |
|
"loss": 1.0134, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 3.5752212389380533, |
|
"grad_norm": 28.731691360473633, |
|
"learning_rate": 3.588987217305801e-05, |
|
"loss": 0.926, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 3.5929203539823007, |
|
"grad_norm": 24.277082443237305, |
|
"learning_rate": 3.579154375614553e-05, |
|
"loss": 0.8202, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 3.6106194690265485, |
|
"grad_norm": 15.626072883605957, |
|
"learning_rate": 3.5693215339233036e-05, |
|
"loss": 0.8415, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 3.6283185840707963, |
|
"grad_norm": 35.861610412597656, |
|
"learning_rate": 3.559488692232055e-05, |
|
"loss": 0.8739, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 3.646017699115044, |
|
"grad_norm": 30.36337661743164, |
|
"learning_rate": 3.5496558505408066e-05, |
|
"loss": 0.7045, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 3.663716814159292, |
|
"grad_norm": 25.959348678588867, |
|
"learning_rate": 3.5398230088495574e-05, |
|
"loss": 0.6826, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 3.6814159292035398, |
|
"grad_norm": 20.39558219909668, |
|
"learning_rate": 3.529990167158309e-05, |
|
"loss": 0.8847, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 3.6991150442477876, |
|
"grad_norm": 13.092119216918945, |
|
"learning_rate": 3.52015732546706e-05, |
|
"loss": 1.0101, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 3.7168141592920354, |
|
"grad_norm": 32.8824462890625, |
|
"learning_rate": 3.510324483775811e-05, |
|
"loss": 0.8229, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 3.734513274336283, |
|
"grad_norm": 52.47900390625, |
|
"learning_rate": 3.500491642084562e-05, |
|
"loss": 0.8438, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 3.752212389380531, |
|
"grad_norm": 40.38644790649414, |
|
"learning_rate": 3.4906588003933136e-05, |
|
"loss": 0.8129, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 3.769911504424779, |
|
"grad_norm": 24.974903106689453, |
|
"learning_rate": 3.480825958702065e-05, |
|
"loss": 0.8478, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 3.7876106194690267, |
|
"grad_norm": 26.479198455810547, |
|
"learning_rate": 3.470993117010816e-05, |
|
"loss": 0.8803, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 3.8053097345132745, |
|
"grad_norm": 20.01055908203125, |
|
"learning_rate": 3.4611602753195674e-05, |
|
"loss": 0.8078, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 3.823008849557522, |
|
"grad_norm": 34.75425720214844, |
|
"learning_rate": 3.451327433628319e-05, |
|
"loss": 0.9117, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 3.84070796460177, |
|
"grad_norm": 30.763608932495117, |
|
"learning_rate": 3.44149459193707e-05, |
|
"loss": 0.7776, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 3.8584070796460175, |
|
"grad_norm": 38.77566909790039, |
|
"learning_rate": 3.431661750245821e-05, |
|
"loss": 0.8284, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 3.8761061946902657, |
|
"grad_norm": 24.3690128326416, |
|
"learning_rate": 3.421828908554573e-05, |
|
"loss": 0.7673, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 3.893805309734513, |
|
"grad_norm": 22.95048713684082, |
|
"learning_rate": 3.4119960668633235e-05, |
|
"loss": 0.7641, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 3.911504424778761, |
|
"grad_norm": 21.6431827545166, |
|
"learning_rate": 3.402163225172075e-05, |
|
"loss": 0.832, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 3.9292035398230087, |
|
"grad_norm": 30.03487777709961, |
|
"learning_rate": 3.3923303834808265e-05, |
|
"loss": 0.8857, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 3.9469026548672566, |
|
"grad_norm": 32.90857696533203, |
|
"learning_rate": 3.3824975417895774e-05, |
|
"loss": 0.8063, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 3.9646017699115044, |
|
"grad_norm": 18.506166458129883, |
|
"learning_rate": 3.372664700098329e-05, |
|
"loss": 0.8189, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 3.982300884955752, |
|
"grad_norm": 21.59197235107422, |
|
"learning_rate": 3.3628318584070804e-05, |
|
"loss": 0.711, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 55.65620422363281, |
|
"learning_rate": 3.352999016715831e-05, |
|
"loss": 0.8934, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 0.8967828154563904, |
|
"eval_map": 0.4488, |
|
"eval_map_50": 0.8568, |
|
"eval_map_75": 0.4271, |
|
"eval_map_large": 0.5174, |
|
"eval_map_medium": 0.264, |
|
"eval_map_per_class": 0.4488, |
|
"eval_map_small": 0.0, |
|
"eval_mar_1": 0.2483, |
|
"eval_mar_10": 0.5628, |
|
"eval_mar_100": 0.6197, |
|
"eval_mar_100_per_class": 0.6197, |
|
"eval_mar_large": 0.6834, |
|
"eval_mar_medium": 0.4609, |
|
"eval_mar_small": 0.0, |
|
"eval_runtime": 3.9666, |
|
"eval_samples_per_second": 25.21, |
|
"eval_steps_per_second": 1.765, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 4.017699115044247, |
|
"grad_norm": 27.480350494384766, |
|
"learning_rate": 3.343166175024583e-05, |
|
"loss": 0.8197, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 4.035398230088496, |
|
"grad_norm": 17.332786560058594, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 0.8171, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 4.053097345132743, |
|
"grad_norm": 14.860098838806152, |
|
"learning_rate": 3.323500491642085e-05, |
|
"loss": 0.7993, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 4.070796460176991, |
|
"grad_norm": 16.08732795715332, |
|
"learning_rate": 3.313667649950836e-05, |
|
"loss": 0.7328, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 4.088495575221239, |
|
"grad_norm": 24.5137939453125, |
|
"learning_rate": 3.303834808259587e-05, |
|
"loss": 0.7629, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 4.106194690265487, |
|
"grad_norm": 28.86885643005371, |
|
"learning_rate": 3.294001966568339e-05, |
|
"loss": 0.9142, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 4.123893805309734, |
|
"grad_norm": 26.640892028808594, |
|
"learning_rate": 3.2841691248770896e-05, |
|
"loss": 0.8952, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 4.1415929203539825, |
|
"grad_norm": 16.20265769958496, |
|
"learning_rate": 3.274336283185841e-05, |
|
"loss": 0.7812, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 4.15929203539823, |
|
"grad_norm": 22.30803871154785, |
|
"learning_rate": 3.264503441494592e-05, |
|
"loss": 0.8641, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 4.176991150442478, |
|
"grad_norm": 17.450302124023438, |
|
"learning_rate": 3.2546705998033434e-05, |
|
"loss": 0.8061, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 4.1946902654867255, |
|
"grad_norm": 20.605792999267578, |
|
"learning_rate": 3.244837758112095e-05, |
|
"loss": 0.8872, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 4.212389380530974, |
|
"grad_norm": 21.550548553466797, |
|
"learning_rate": 3.235004916420846e-05, |
|
"loss": 0.8599, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 4.230088495575221, |
|
"grad_norm": 14.21965217590332, |
|
"learning_rate": 3.225172074729597e-05, |
|
"loss": 0.7412, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 4.247787610619469, |
|
"grad_norm": 24.305540084838867, |
|
"learning_rate": 3.215339233038348e-05, |
|
"loss": 0.7362, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 4.265486725663717, |
|
"grad_norm": 26.687883377075195, |
|
"learning_rate": 3.2055063913470996e-05, |
|
"loss": 0.8364, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 4.283185840707965, |
|
"grad_norm": 23.61547088623047, |
|
"learning_rate": 3.1956735496558504e-05, |
|
"loss": 0.6807, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 4.300884955752212, |
|
"grad_norm": 36.08306121826172, |
|
"learning_rate": 3.185840707964602e-05, |
|
"loss": 0.7484, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 4.31858407079646, |
|
"grad_norm": 24.663066864013672, |
|
"learning_rate": 3.1760078662733534e-05, |
|
"loss": 0.7722, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 4.336283185840708, |
|
"grad_norm": 25.614458084106445, |
|
"learning_rate": 3.166175024582104e-05, |
|
"loss": 0.9276, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 4.353982300884955, |
|
"grad_norm": 22.014490127563477, |
|
"learning_rate": 3.156342182890856e-05, |
|
"loss": 0.8552, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 4.371681415929204, |
|
"grad_norm": 16.620935440063477, |
|
"learning_rate": 3.1465093411996065e-05, |
|
"loss": 0.8883, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 4.389380530973451, |
|
"grad_norm": 19.76045036315918, |
|
"learning_rate": 3.136676499508358e-05, |
|
"loss": 0.5994, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 4.407079646017699, |
|
"grad_norm": 27.53520393371582, |
|
"learning_rate": 3.1268436578171095e-05, |
|
"loss": 0.7127, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 4.424778761061947, |
|
"grad_norm": 17.578168869018555, |
|
"learning_rate": 3.1170108161258604e-05, |
|
"loss": 0.8606, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 4.442477876106195, |
|
"grad_norm": 25.594207763671875, |
|
"learning_rate": 3.107177974434612e-05, |
|
"loss": 0.736, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 4.460176991150442, |
|
"grad_norm": 17.3150691986084, |
|
"learning_rate": 3.097345132743363e-05, |
|
"loss": 0.8871, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 4.477876106194691, |
|
"grad_norm": 34.62860107421875, |
|
"learning_rate": 3.087512291052114e-05, |
|
"loss": 0.8176, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 4.495575221238938, |
|
"grad_norm": 21.978734970092773, |
|
"learning_rate": 3.077679449360865e-05, |
|
"loss": 0.8523, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 4.513274336283186, |
|
"grad_norm": 37.72077941894531, |
|
"learning_rate": 3.0678466076696165e-05, |
|
"loss": 0.6939, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 4.530973451327434, |
|
"grad_norm": 29.62040901184082, |
|
"learning_rate": 3.058013765978368e-05, |
|
"loss": 0.9062, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 4.548672566371682, |
|
"grad_norm": 43.172821044921875, |
|
"learning_rate": 3.048180924287119e-05, |
|
"loss": 0.9389, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 4.566371681415929, |
|
"grad_norm": 19.374061584472656, |
|
"learning_rate": 3.0383480825958703e-05, |
|
"loss": 0.8577, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 4.584070796460177, |
|
"grad_norm": 18.854230880737305, |
|
"learning_rate": 3.0285152409046215e-05, |
|
"loss": 0.7861, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 4.601769911504425, |
|
"grad_norm": 27.53508949279785, |
|
"learning_rate": 3.018682399213373e-05, |
|
"loss": 0.8935, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 4.619469026548672, |
|
"grad_norm": 25.085142135620117, |
|
"learning_rate": 3.008849557522124e-05, |
|
"loss": 0.6619, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 4.6371681415929205, |
|
"grad_norm": 22.648717880249023, |
|
"learning_rate": 2.9990167158308753e-05, |
|
"loss": 0.7065, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 4.654867256637168, |
|
"grad_norm": 19.692138671875, |
|
"learning_rate": 2.9891838741396268e-05, |
|
"loss": 0.8524, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 4.672566371681416, |
|
"grad_norm": 17.703353881835938, |
|
"learning_rate": 2.9793510324483776e-05, |
|
"loss": 0.8405, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 4.6902654867256635, |
|
"grad_norm": 19.434688568115234, |
|
"learning_rate": 2.969518190757129e-05, |
|
"loss": 0.7932, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 4.707964601769912, |
|
"grad_norm": 19.711090087890625, |
|
"learning_rate": 2.95968534906588e-05, |
|
"loss": 0.6708, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 4.725663716814159, |
|
"grad_norm": 32.07950973510742, |
|
"learning_rate": 2.9498525073746314e-05, |
|
"loss": 0.8732, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 4.743362831858407, |
|
"grad_norm": 26.682344436645508, |
|
"learning_rate": 2.940019665683383e-05, |
|
"loss": 0.8234, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 4.761061946902655, |
|
"grad_norm": 20.226280212402344, |
|
"learning_rate": 2.9301868239921338e-05, |
|
"loss": 0.6903, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 4.778761061946903, |
|
"grad_norm": 28.748722076416016, |
|
"learning_rate": 2.9203539823008852e-05, |
|
"loss": 0.8606, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 4.79646017699115, |
|
"grad_norm": 21.78730583190918, |
|
"learning_rate": 2.910521140609636e-05, |
|
"loss": 0.7924, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 4.814159292035399, |
|
"grad_norm": 24.920272827148438, |
|
"learning_rate": 2.9006882989183876e-05, |
|
"loss": 0.7917, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 4.831858407079646, |
|
"grad_norm": 32.964298248291016, |
|
"learning_rate": 2.890855457227139e-05, |
|
"loss": 0.8017, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 4.849557522123893, |
|
"grad_norm": 27.990001678466797, |
|
"learning_rate": 2.88102261553589e-05, |
|
"loss": 0.8392, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 4.867256637168142, |
|
"grad_norm": 24.096538543701172, |
|
"learning_rate": 2.8711897738446414e-05, |
|
"loss": 0.9032, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 4.88495575221239, |
|
"grad_norm": 22.516420364379883, |
|
"learning_rate": 2.8613569321533922e-05, |
|
"loss": 0.8411, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 4.902654867256637, |
|
"grad_norm": 32.133506774902344, |
|
"learning_rate": 2.8515240904621437e-05, |
|
"loss": 0.824, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 4.920353982300885, |
|
"grad_norm": 20.993709564208984, |
|
"learning_rate": 2.8416912487708945e-05, |
|
"loss": 0.6233, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 4.938053097345133, |
|
"grad_norm": 17.405899047851562, |
|
"learning_rate": 2.831858407079646e-05, |
|
"loss": 0.8157, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 4.95575221238938, |
|
"grad_norm": 18.684974670410156, |
|
"learning_rate": 2.8220255653883975e-05, |
|
"loss": 0.889, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 4.9734513274336285, |
|
"grad_norm": 22.140762329101562, |
|
"learning_rate": 2.8121927236971483e-05, |
|
"loss": 0.7555, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 4.991150442477876, |
|
"grad_norm": 93.78765869140625, |
|
"learning_rate": 2.8023598820059e-05, |
|
"loss": 0.8166, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_loss": 0.8204176425933838, |
|
"eval_map": 0.4963, |
|
"eval_map_50": 0.8784, |
|
"eval_map_75": 0.5214, |
|
"eval_map_large": 0.56, |
|
"eval_map_medium": 0.3309, |
|
"eval_map_per_class": 0.4963, |
|
"eval_map_small": 0.0209, |
|
"eval_mar_1": 0.2598, |
|
"eval_mar_10": 0.5983, |
|
"eval_mar_100": 0.6581, |
|
"eval_mar_100_per_class": 0.6581, |
|
"eval_mar_large": 0.7207, |
|
"eval_mar_medium": 0.4969, |
|
"eval_mar_small": 0.4, |
|
"eval_runtime": 4.466, |
|
"eval_samples_per_second": 22.392, |
|
"eval_steps_per_second": 1.567, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 5.008849557522124, |
|
"grad_norm": 17.966142654418945, |
|
"learning_rate": 2.792527040314651e-05, |
|
"loss": 0.7743, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 5.0265486725663715, |
|
"grad_norm": 17.44831657409668, |
|
"learning_rate": 2.782694198623402e-05, |
|
"loss": 0.8002, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 5.04424778761062, |
|
"grad_norm": 16.89792823791504, |
|
"learning_rate": 2.7728613569321537e-05, |
|
"loss": 0.8238, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 5.061946902654867, |
|
"grad_norm": 15.499351501464844, |
|
"learning_rate": 2.7630285152409048e-05, |
|
"loss": 0.5594, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 5.079646017699115, |
|
"grad_norm": 26.665563583374023, |
|
"learning_rate": 2.753195673549656e-05, |
|
"loss": 0.7994, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 5.097345132743363, |
|
"grad_norm": 19.839536666870117, |
|
"learning_rate": 2.743362831858407e-05, |
|
"loss": 0.6578, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 5.115044247787611, |
|
"grad_norm": 24.534902572631836, |
|
"learning_rate": 2.7335299901671586e-05, |
|
"loss": 0.7748, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 5.132743362831858, |
|
"grad_norm": 23.61812400817871, |
|
"learning_rate": 2.7236971484759095e-05, |
|
"loss": 0.7771, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 5.150442477876107, |
|
"grad_norm": 17.933223724365234, |
|
"learning_rate": 2.713864306784661e-05, |
|
"loss": 0.7349, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 5.168141592920354, |
|
"grad_norm": 18.23980712890625, |
|
"learning_rate": 2.7040314650934125e-05, |
|
"loss": 0.7771, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 5.185840707964601, |
|
"grad_norm": 20.185016632080078, |
|
"learning_rate": 2.6941986234021633e-05, |
|
"loss": 0.8009, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 5.20353982300885, |
|
"grad_norm": 26.6129093170166, |
|
"learning_rate": 2.6843657817109148e-05, |
|
"loss": 0.8614, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 5.221238938053097, |
|
"grad_norm": 27.28291893005371, |
|
"learning_rate": 2.6745329400196656e-05, |
|
"loss": 0.8594, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 5.238938053097345, |
|
"grad_norm": 19.851408004760742, |
|
"learning_rate": 2.664700098328417e-05, |
|
"loss": 0.6753, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 5.256637168141593, |
|
"grad_norm": 15.925873756408691, |
|
"learning_rate": 2.6548672566371686e-05, |
|
"loss": 0.7364, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 5.274336283185841, |
|
"grad_norm": 16.969070434570312, |
|
"learning_rate": 2.6450344149459194e-05, |
|
"loss": 0.7607, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 5.292035398230088, |
|
"grad_norm": 29.575332641601562, |
|
"learning_rate": 2.635201573254671e-05, |
|
"loss": 0.6162, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 5.3097345132743365, |
|
"grad_norm": 27.425867080688477, |
|
"learning_rate": 2.6253687315634217e-05, |
|
"loss": 0.7873, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 5.327433628318584, |
|
"grad_norm": 36.430233001708984, |
|
"learning_rate": 2.6155358898721732e-05, |
|
"loss": 0.7635, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 5.345132743362832, |
|
"grad_norm": 23.08193588256836, |
|
"learning_rate": 2.605703048180924e-05, |
|
"loss": 0.8998, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 5.3628318584070795, |
|
"grad_norm": 19.06671714782715, |
|
"learning_rate": 2.5958702064896756e-05, |
|
"loss": 0.6907, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 5.380530973451328, |
|
"grad_norm": 17.793241500854492, |
|
"learning_rate": 2.586037364798427e-05, |
|
"loss": 0.7653, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 5.398230088495575, |
|
"grad_norm": 18.47113800048828, |
|
"learning_rate": 2.576204523107178e-05, |
|
"loss": 0.7344, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 5.415929203539823, |
|
"grad_norm": 23.754230499267578, |
|
"learning_rate": 2.5663716814159294e-05, |
|
"loss": 0.7098, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 5.433628318584071, |
|
"grad_norm": 17.286996841430664, |
|
"learning_rate": 2.5565388397246802e-05, |
|
"loss": 0.7242, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 5.451327433628318, |
|
"grad_norm": 33.41328811645508, |
|
"learning_rate": 2.5467059980334317e-05, |
|
"loss": 0.8577, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 5.469026548672566, |
|
"grad_norm": 23.295421600341797, |
|
"learning_rate": 2.5368731563421832e-05, |
|
"loss": 0.765, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 5.486725663716814, |
|
"grad_norm": 29.50783348083496, |
|
"learning_rate": 2.527040314650934e-05, |
|
"loss": 0.7021, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 5.504424778761062, |
|
"grad_norm": 20.60035514831543, |
|
"learning_rate": 2.5172074729596855e-05, |
|
"loss": 0.8331, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 5.522123893805309, |
|
"grad_norm": 28.808502197265625, |
|
"learning_rate": 2.5073746312684367e-05, |
|
"loss": 0.9017, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 5.539823008849558, |
|
"grad_norm": 63.102195739746094, |
|
"learning_rate": 2.4975417895771878e-05, |
|
"loss": 0.7724, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 5.557522123893805, |
|
"grad_norm": 31.407791137695312, |
|
"learning_rate": 2.4877089478859393e-05, |
|
"loss": 0.775, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 5.575221238938053, |
|
"grad_norm": 27.927444458007812, |
|
"learning_rate": 2.4778761061946905e-05, |
|
"loss": 0.6807, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 5.592920353982301, |
|
"grad_norm": 31.49755096435547, |
|
"learning_rate": 2.4680432645034416e-05, |
|
"loss": 0.7617, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 5.610619469026549, |
|
"grad_norm": 24.49897003173828, |
|
"learning_rate": 2.4582104228121928e-05, |
|
"loss": 0.7275, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 5.628318584070796, |
|
"grad_norm": 29.279388427734375, |
|
"learning_rate": 2.4483775811209443e-05, |
|
"loss": 0.6349, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 5.646017699115045, |
|
"grad_norm": 18.994455337524414, |
|
"learning_rate": 2.4385447394296955e-05, |
|
"loss": 0.6773, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 5.663716814159292, |
|
"grad_norm": 21.28229522705078, |
|
"learning_rate": 2.4287118977384466e-05, |
|
"loss": 0.8142, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 5.68141592920354, |
|
"grad_norm": 28.956829071044922, |
|
"learning_rate": 2.4188790560471978e-05, |
|
"loss": 0.7215, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 5.699115044247788, |
|
"grad_norm": 35.786991119384766, |
|
"learning_rate": 2.409046214355949e-05, |
|
"loss": 0.8148, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 5.716814159292035, |
|
"grad_norm": 17.956323623657227, |
|
"learning_rate": 2.3992133726647e-05, |
|
"loss": 0.9103, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 5.734513274336283, |
|
"grad_norm": 21.570697784423828, |
|
"learning_rate": 2.3893805309734516e-05, |
|
"loss": 0.6904, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 5.752212389380531, |
|
"grad_norm": 13.703173637390137, |
|
"learning_rate": 2.3795476892822028e-05, |
|
"loss": 0.6716, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 5.769911504424779, |
|
"grad_norm": 19.16945457458496, |
|
"learning_rate": 2.369714847590954e-05, |
|
"loss": 0.8447, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 5.787610619469026, |
|
"grad_norm": 15.37072467803955, |
|
"learning_rate": 2.359882005899705e-05, |
|
"loss": 0.7729, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 5.8053097345132745, |
|
"grad_norm": 16.8475341796875, |
|
"learning_rate": 2.3500491642084562e-05, |
|
"loss": 0.8331, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 5.823008849557522, |
|
"grad_norm": 23.530323028564453, |
|
"learning_rate": 2.3402163225172074e-05, |
|
"loss": 0.7384, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 5.84070796460177, |
|
"grad_norm": 29.106124877929688, |
|
"learning_rate": 2.330383480825959e-05, |
|
"loss": 0.7302, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 5.8584070796460175, |
|
"grad_norm": 13.785088539123535, |
|
"learning_rate": 2.32055063913471e-05, |
|
"loss": 0.6624, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 5.876106194690266, |
|
"grad_norm": 24.307811737060547, |
|
"learning_rate": 2.3107177974434612e-05, |
|
"loss": 0.8129, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 5.893805309734513, |
|
"grad_norm": 32.765541076660156, |
|
"learning_rate": 2.3008849557522124e-05, |
|
"loss": 0.7463, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 5.911504424778761, |
|
"grad_norm": 15.36600112915039, |
|
"learning_rate": 2.2910521140609635e-05, |
|
"loss": 0.8945, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 5.929203539823009, |
|
"grad_norm": 21.934234619140625, |
|
"learning_rate": 2.2812192723697147e-05, |
|
"loss": 0.7991, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 5.946902654867257, |
|
"grad_norm": 18.248523712158203, |
|
"learning_rate": 2.2713864306784662e-05, |
|
"loss": 0.7406, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 5.964601769911504, |
|
"grad_norm": 18.006677627563477, |
|
"learning_rate": 2.2615535889872174e-05, |
|
"loss": 0.7947, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 5.982300884955752, |
|
"grad_norm": 34.55229568481445, |
|
"learning_rate": 2.2517207472959685e-05, |
|
"loss": 0.7464, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"grad_norm": 36.384239196777344, |
|
"learning_rate": 2.24188790560472e-05, |
|
"loss": 0.6889, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_loss": 0.7955648303031921, |
|
"eval_map": 0.5081, |
|
"eval_map_50": 0.8984, |
|
"eval_map_75": 0.5276, |
|
"eval_map_large": 0.5756, |
|
"eval_map_medium": 0.3127, |
|
"eval_map_per_class": 0.5081, |
|
"eval_map_small": 0.0226, |
|
"eval_mar_1": 0.2658, |
|
"eval_mar_10": 0.5991, |
|
"eval_mar_100": 0.6538, |
|
"eval_mar_100_per_class": 0.6538, |
|
"eval_mar_large": 0.713, |
|
"eval_mar_medium": 0.5016, |
|
"eval_mar_small": 0.4, |
|
"eval_runtime": 4.0138, |
|
"eval_samples_per_second": 24.914, |
|
"eval_steps_per_second": 1.744, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 6.017699115044247, |
|
"grad_norm": 30.158843994140625, |
|
"learning_rate": 2.232055063913471e-05, |
|
"loss": 0.7447, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 6.035398230088496, |
|
"grad_norm": 21.382171630859375, |
|
"learning_rate": 2.2222222222222223e-05, |
|
"loss": 0.7444, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 6.053097345132743, |
|
"grad_norm": 16.47271156311035, |
|
"learning_rate": 2.2123893805309738e-05, |
|
"loss": 0.7564, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 6.070796460176991, |
|
"grad_norm": 26.326805114746094, |
|
"learning_rate": 2.202556538839725e-05, |
|
"loss": 0.7716, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 6.088495575221239, |
|
"grad_norm": 21.000934600830078, |
|
"learning_rate": 2.192723697148476e-05, |
|
"loss": 0.9769, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 6.106194690265487, |
|
"grad_norm": 14.34850025177002, |
|
"learning_rate": 2.1828908554572273e-05, |
|
"loss": 0.6075, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 6.123893805309734, |
|
"grad_norm": 14.81210994720459, |
|
"learning_rate": 2.1730580137659785e-05, |
|
"loss": 0.6219, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 6.1415929203539825, |
|
"grad_norm": 15.356616973876953, |
|
"learning_rate": 2.1632251720747296e-05, |
|
"loss": 0.6604, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 6.15929203539823, |
|
"grad_norm": 25.215105056762695, |
|
"learning_rate": 2.153392330383481e-05, |
|
"loss": 0.8016, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 6.176991150442478, |
|
"grad_norm": 29.563343048095703, |
|
"learning_rate": 2.1435594886922323e-05, |
|
"loss": 0.6862, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 6.1946902654867255, |
|
"grad_norm": 21.926513671875, |
|
"learning_rate": 2.1337266470009834e-05, |
|
"loss": 0.7859, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 6.212389380530974, |
|
"grad_norm": 23.07330322265625, |
|
"learning_rate": 2.1238938053097346e-05, |
|
"loss": 0.6345, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 6.230088495575221, |
|
"grad_norm": 17.459535598754883, |
|
"learning_rate": 2.1140609636184858e-05, |
|
"loss": 0.7799, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 6.247787610619469, |
|
"grad_norm": 16.414749145507812, |
|
"learning_rate": 2.104228121927237e-05, |
|
"loss": 0.8234, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 6.265486725663717, |
|
"grad_norm": 30.402633666992188, |
|
"learning_rate": 2.0943952802359884e-05, |
|
"loss": 0.8438, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 6.283185840707965, |
|
"grad_norm": 36.2429084777832, |
|
"learning_rate": 2.0845624385447396e-05, |
|
"loss": 0.8062, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 6.300884955752212, |
|
"grad_norm": 29.136394500732422, |
|
"learning_rate": 2.0747295968534907e-05, |
|
"loss": 0.7717, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 6.31858407079646, |
|
"grad_norm": 22.905851364135742, |
|
"learning_rate": 2.064896755162242e-05, |
|
"loss": 0.7359, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 6.336283185840708, |
|
"grad_norm": 20.239849090576172, |
|
"learning_rate": 2.055063913470993e-05, |
|
"loss": 0.6981, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 6.353982300884955, |
|
"grad_norm": 24.594646453857422, |
|
"learning_rate": 2.0452310717797442e-05, |
|
"loss": 0.6381, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 6.371681415929204, |
|
"grad_norm": 26.90498161315918, |
|
"learning_rate": 2.0353982300884957e-05, |
|
"loss": 0.8903, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 6.389380530973451, |
|
"grad_norm": 29.696321487426758, |
|
"learning_rate": 2.025565388397247e-05, |
|
"loss": 0.6345, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 6.407079646017699, |
|
"grad_norm": 30.505664825439453, |
|
"learning_rate": 2.015732546705998e-05, |
|
"loss": 0.7814, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 6.424778761061947, |
|
"grad_norm": 36.33932876586914, |
|
"learning_rate": 2.0058997050147492e-05, |
|
"loss": 0.7754, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 6.442477876106195, |
|
"grad_norm": 18.5018310546875, |
|
"learning_rate": 1.9960668633235004e-05, |
|
"loss": 0.7061, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 6.460176991150442, |
|
"grad_norm": 16.27545166015625, |
|
"learning_rate": 1.986234021632252e-05, |
|
"loss": 0.7759, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 6.477876106194691, |
|
"grad_norm": 19.90315055847168, |
|
"learning_rate": 1.976401179941003e-05, |
|
"loss": 0.7227, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 6.495575221238938, |
|
"grad_norm": 23.48577880859375, |
|
"learning_rate": 1.9665683382497542e-05, |
|
"loss": 0.7967, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 6.513274336283186, |
|
"grad_norm": 25.51618003845215, |
|
"learning_rate": 1.9567354965585057e-05, |
|
"loss": 0.7768, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 6.530973451327434, |
|
"grad_norm": 17.899826049804688, |
|
"learning_rate": 1.946902654867257e-05, |
|
"loss": 0.6657, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 6.548672566371682, |
|
"grad_norm": 19.610185623168945, |
|
"learning_rate": 1.937069813176008e-05, |
|
"loss": 0.8086, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 6.566371681415929, |
|
"grad_norm": 25.385841369628906, |
|
"learning_rate": 1.927236971484759e-05, |
|
"loss": 0.6468, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 6.584070796460177, |
|
"grad_norm": 15.946438789367676, |
|
"learning_rate": 1.9174041297935107e-05, |
|
"loss": 0.676, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 6.601769911504425, |
|
"grad_norm": 17.454030990600586, |
|
"learning_rate": 1.9075712881022618e-05, |
|
"loss": 0.7586, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 6.619469026548672, |
|
"grad_norm": 24.09688949584961, |
|
"learning_rate": 1.897738446411013e-05, |
|
"loss": 0.5866, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 6.6371681415929205, |
|
"grad_norm": 20.896167755126953, |
|
"learning_rate": 1.887905604719764e-05, |
|
"loss": 0.6377, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 6.654867256637168, |
|
"grad_norm": 13.922218322753906, |
|
"learning_rate": 1.8780727630285153e-05, |
|
"loss": 0.6401, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 6.672566371681416, |
|
"grad_norm": 22.96921157836914, |
|
"learning_rate": 1.8682399213372664e-05, |
|
"loss": 0.7318, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 6.6902654867256635, |
|
"grad_norm": 24.575101852416992, |
|
"learning_rate": 1.858407079646018e-05, |
|
"loss": 0.7185, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 6.707964601769912, |
|
"grad_norm": 19.46567726135254, |
|
"learning_rate": 1.848574237954769e-05, |
|
"loss": 0.7333, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 6.725663716814159, |
|
"grad_norm": 16.00038719177246, |
|
"learning_rate": 1.8387413962635203e-05, |
|
"loss": 0.6284, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 6.743362831858407, |
|
"grad_norm": 19.083911895751953, |
|
"learning_rate": 1.8289085545722714e-05, |
|
"loss": 0.7064, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 6.761061946902655, |
|
"grad_norm": 21.226545333862305, |
|
"learning_rate": 1.8190757128810226e-05, |
|
"loss": 0.8701, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 6.778761061946903, |
|
"grad_norm": 20.60919189453125, |
|
"learning_rate": 1.8092428711897737e-05, |
|
"loss": 0.8677, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 6.79646017699115, |
|
"grad_norm": 26.225242614746094, |
|
"learning_rate": 1.7994100294985252e-05, |
|
"loss": 0.7429, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 6.814159292035399, |
|
"grad_norm": 23.108654022216797, |
|
"learning_rate": 1.7895771878072764e-05, |
|
"loss": 0.6417, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 6.831858407079646, |
|
"grad_norm": 21.556283950805664, |
|
"learning_rate": 1.7797443461160276e-05, |
|
"loss": 0.6965, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 6.849557522123893, |
|
"grad_norm": 14.243925094604492, |
|
"learning_rate": 1.7699115044247787e-05, |
|
"loss": 0.6269, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 6.867256637168142, |
|
"grad_norm": 25.8636417388916, |
|
"learning_rate": 1.76007866273353e-05, |
|
"loss": 0.707, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 6.88495575221239, |
|
"grad_norm": 15.025083541870117, |
|
"learning_rate": 1.750245821042281e-05, |
|
"loss": 0.5885, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 6.902654867256637, |
|
"grad_norm": 12.319948196411133, |
|
"learning_rate": 1.7404129793510325e-05, |
|
"loss": 0.6618, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 6.920353982300885, |
|
"grad_norm": 18.02228355407715, |
|
"learning_rate": 1.7305801376597837e-05, |
|
"loss": 0.7721, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 6.938053097345133, |
|
"grad_norm": 13.130449295043945, |
|
"learning_rate": 1.720747295968535e-05, |
|
"loss": 0.7447, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 6.95575221238938, |
|
"grad_norm": 30.51601219177246, |
|
"learning_rate": 1.7109144542772864e-05, |
|
"loss": 0.7415, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 6.9734513274336285, |
|
"grad_norm": 26.357257843017578, |
|
"learning_rate": 1.7010816125860375e-05, |
|
"loss": 0.6869, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 6.991150442477876, |
|
"grad_norm": 23.887100219726562, |
|
"learning_rate": 1.6912487708947887e-05, |
|
"loss": 0.673, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_loss": 0.7656337022781372, |
|
"eval_map": 0.5423, |
|
"eval_map_50": 0.9234, |
|
"eval_map_75": 0.5491, |
|
"eval_map_large": 0.6107, |
|
"eval_map_medium": 0.361, |
|
"eval_map_per_class": 0.5423, |
|
"eval_map_small": 0.025, |
|
"eval_mar_1": 0.2774, |
|
"eval_mar_10": 0.6252, |
|
"eval_mar_100": 0.6808, |
|
"eval_mar_100_per_class": 0.6808, |
|
"eval_mar_large": 0.7473, |
|
"eval_mar_medium": 0.5094, |
|
"eval_mar_small": 0.4, |
|
"eval_runtime": 3.6757, |
|
"eval_samples_per_second": 27.206, |
|
"eval_steps_per_second": 1.904, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 7.008849557522124, |
|
"grad_norm": 16.251768112182617, |
|
"learning_rate": 1.6814159292035402e-05, |
|
"loss": 0.6063, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 7.0265486725663715, |
|
"grad_norm": 15.13443660736084, |
|
"learning_rate": 1.6715830875122913e-05, |
|
"loss": 0.7639, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 7.04424778761062, |
|
"grad_norm": 18.05428123474121, |
|
"learning_rate": 1.6617502458210425e-05, |
|
"loss": 0.6947, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 7.061946902654867, |
|
"grad_norm": 18.904369354248047, |
|
"learning_rate": 1.6519174041297937e-05, |
|
"loss": 0.6764, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 7.079646017699115, |
|
"grad_norm": 25.891990661621094, |
|
"learning_rate": 1.6420845624385448e-05, |
|
"loss": 0.6985, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 7.097345132743363, |
|
"grad_norm": 21.057519912719727, |
|
"learning_rate": 1.632251720747296e-05, |
|
"loss": 0.7379, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 7.115044247787611, |
|
"grad_norm": 19.18006706237793, |
|
"learning_rate": 1.6224188790560475e-05, |
|
"loss": 0.6897, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 7.132743362831858, |
|
"grad_norm": 18.494470596313477, |
|
"learning_rate": 1.6125860373647986e-05, |
|
"loss": 0.6482, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 7.150442477876107, |
|
"grad_norm": 30.54767608642578, |
|
"learning_rate": 1.6027531956735498e-05, |
|
"loss": 0.7792, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 7.168141592920354, |
|
"grad_norm": 26.467973709106445, |
|
"learning_rate": 1.592920353982301e-05, |
|
"loss": 0.6576, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 7.185840707964601, |
|
"grad_norm": 24.232133865356445, |
|
"learning_rate": 1.583087512291052e-05, |
|
"loss": 0.7487, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 7.20353982300885, |
|
"grad_norm": 22.034992218017578, |
|
"learning_rate": 1.5732546705998033e-05, |
|
"loss": 0.7208, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 7.221238938053097, |
|
"grad_norm": 17.85079574584961, |
|
"learning_rate": 1.5634218289085548e-05, |
|
"loss": 0.7738, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 7.238938053097345, |
|
"grad_norm": 27.755950927734375, |
|
"learning_rate": 1.553588987217306e-05, |
|
"loss": 0.6782, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 7.256637168141593, |
|
"grad_norm": 28.554393768310547, |
|
"learning_rate": 1.543756145526057e-05, |
|
"loss": 0.8659, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 7.274336283185841, |
|
"grad_norm": 17.34409523010254, |
|
"learning_rate": 1.5339233038348082e-05, |
|
"loss": 0.6944, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 7.292035398230088, |
|
"grad_norm": 17.795570373535156, |
|
"learning_rate": 1.5240904621435596e-05, |
|
"loss": 0.804, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 7.3097345132743365, |
|
"grad_norm": 23.85445213317871, |
|
"learning_rate": 1.5142576204523107e-05, |
|
"loss": 0.7226, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 7.327433628318584, |
|
"grad_norm": 22.983423233032227, |
|
"learning_rate": 1.504424778761062e-05, |
|
"loss": 0.7246, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 7.345132743362832, |
|
"grad_norm": 22.66045570373535, |
|
"learning_rate": 1.4945919370698134e-05, |
|
"loss": 0.596, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 7.3628318584070795, |
|
"grad_norm": 21.034364700317383, |
|
"learning_rate": 1.4847590953785646e-05, |
|
"loss": 0.709, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 7.380530973451328, |
|
"grad_norm": 22.207504272460938, |
|
"learning_rate": 1.4749262536873157e-05, |
|
"loss": 0.6881, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 7.398230088495575, |
|
"grad_norm": 32.008148193359375, |
|
"learning_rate": 1.4650934119960669e-05, |
|
"loss": 0.5796, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 7.415929203539823, |
|
"grad_norm": 20.24285125732422, |
|
"learning_rate": 1.455260570304818e-05, |
|
"loss": 0.6238, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 7.433628318584071, |
|
"grad_norm": 21.326202392578125, |
|
"learning_rate": 1.4454277286135695e-05, |
|
"loss": 0.7368, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 7.451327433628318, |
|
"grad_norm": 30.127544403076172, |
|
"learning_rate": 1.4355948869223207e-05, |
|
"loss": 0.7563, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 7.469026548672566, |
|
"grad_norm": 21.667497634887695, |
|
"learning_rate": 1.4257620452310719e-05, |
|
"loss": 0.7823, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 7.486725663716814, |
|
"grad_norm": 25.011871337890625, |
|
"learning_rate": 1.415929203539823e-05, |
|
"loss": 0.7403, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 7.504424778761062, |
|
"grad_norm": 22.891185760498047, |
|
"learning_rate": 1.4060963618485742e-05, |
|
"loss": 0.765, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 7.522123893805309, |
|
"grad_norm": 20.289926528930664, |
|
"learning_rate": 1.3962635201573255e-05, |
|
"loss": 0.6539, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 7.539823008849558, |
|
"grad_norm": 34.45566177368164, |
|
"learning_rate": 1.3864306784660768e-05, |
|
"loss": 0.6781, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 7.557522123893805, |
|
"grad_norm": 16.83544921875, |
|
"learning_rate": 1.376597836774828e-05, |
|
"loss": 0.6451, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 7.575221238938053, |
|
"grad_norm": 23.303050994873047, |
|
"learning_rate": 1.3667649950835793e-05, |
|
"loss": 0.6929, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 7.592920353982301, |
|
"grad_norm": 22.088911056518555, |
|
"learning_rate": 1.3569321533923305e-05, |
|
"loss": 0.5961, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 7.610619469026549, |
|
"grad_norm": 14.59677791595459, |
|
"learning_rate": 1.3470993117010816e-05, |
|
"loss": 0.6993, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 7.628318584070796, |
|
"grad_norm": 23.7571964263916, |
|
"learning_rate": 1.3372664700098328e-05, |
|
"loss": 0.7123, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 7.646017699115045, |
|
"grad_norm": 22.422454833984375, |
|
"learning_rate": 1.3274336283185843e-05, |
|
"loss": 0.8684, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 7.663716814159292, |
|
"grad_norm": 18.472415924072266, |
|
"learning_rate": 1.3176007866273355e-05, |
|
"loss": 0.7778, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 7.68141592920354, |
|
"grad_norm": 18.021146774291992, |
|
"learning_rate": 1.3077679449360866e-05, |
|
"loss": 0.8397, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 7.699115044247788, |
|
"grad_norm": 19.535381317138672, |
|
"learning_rate": 1.2979351032448378e-05, |
|
"loss": 0.7257, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 7.716814159292035, |
|
"grad_norm": 20.843671798706055, |
|
"learning_rate": 1.288102261553589e-05, |
|
"loss": 0.7802, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 7.734513274336283, |
|
"grad_norm": 14.444415092468262, |
|
"learning_rate": 1.2782694198623401e-05, |
|
"loss": 0.6855, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 7.752212389380531, |
|
"grad_norm": 15.192424774169922, |
|
"learning_rate": 1.2684365781710916e-05, |
|
"loss": 0.7157, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 7.769911504424779, |
|
"grad_norm": 14.531706809997559, |
|
"learning_rate": 1.2586037364798428e-05, |
|
"loss": 0.7733, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 7.787610619469026, |
|
"grad_norm": 22.154911041259766, |
|
"learning_rate": 1.2487708947885939e-05, |
|
"loss": 0.6404, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 7.8053097345132745, |
|
"grad_norm": 18.85184669494629, |
|
"learning_rate": 1.2389380530973452e-05, |
|
"loss": 0.5891, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 7.823008849557522, |
|
"grad_norm": 18.25426483154297, |
|
"learning_rate": 1.2291052114060964e-05, |
|
"loss": 0.8318, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 7.84070796460177, |
|
"grad_norm": 15.124105453491211, |
|
"learning_rate": 1.2192723697148477e-05, |
|
"loss": 0.728, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 7.8584070796460175, |
|
"grad_norm": 18.860633850097656, |
|
"learning_rate": 1.2094395280235989e-05, |
|
"loss": 0.8461, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 7.876106194690266, |
|
"grad_norm": 21.146745681762695, |
|
"learning_rate": 1.19960668633235e-05, |
|
"loss": 0.7089, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 7.893805309734513, |
|
"grad_norm": 19.12373924255371, |
|
"learning_rate": 1.1897738446411014e-05, |
|
"loss": 0.668, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 7.911504424778761, |
|
"grad_norm": 12.939081192016602, |
|
"learning_rate": 1.1799410029498525e-05, |
|
"loss": 0.8069, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 7.929203539823009, |
|
"grad_norm": 14.852071762084961, |
|
"learning_rate": 1.1701081612586037e-05, |
|
"loss": 0.6977, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 7.946902654867257, |
|
"grad_norm": 16.5046329498291, |
|
"learning_rate": 1.160275319567355e-05, |
|
"loss": 0.6836, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 7.964601769911504, |
|
"grad_norm": 27.28510284423828, |
|
"learning_rate": 1.1504424778761062e-05, |
|
"loss": 0.7441, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 7.982300884955752, |
|
"grad_norm": 20.414901733398438, |
|
"learning_rate": 1.1406096361848573e-05, |
|
"loss": 0.7189, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"grad_norm": 15.717076301574707, |
|
"learning_rate": 1.1307767944936087e-05, |
|
"loss": 0.7197, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_loss": 0.7693684101104736, |
|
"eval_map": 0.5471, |
|
"eval_map_50": 0.9308, |
|
"eval_map_75": 0.5913, |
|
"eval_map_large": 0.6096, |
|
"eval_map_medium": 0.3715, |
|
"eval_map_per_class": 0.5471, |
|
"eval_map_small": 0.0188, |
|
"eval_mar_1": 0.2833, |
|
"eval_mar_10": 0.6235, |
|
"eval_mar_100": 0.6761, |
|
"eval_mar_100_per_class": 0.6761, |
|
"eval_mar_large": 0.7278, |
|
"eval_mar_medium": 0.5453, |
|
"eval_mar_small": 0.3, |
|
"eval_runtime": 3.669, |
|
"eval_samples_per_second": 27.255, |
|
"eval_steps_per_second": 1.908, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 8.017699115044248, |
|
"grad_norm": 19.057044982910156, |
|
"learning_rate": 1.12094395280236e-05, |
|
"loss": 0.6378, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 8.035398230088495, |
|
"grad_norm": 26.174707412719727, |
|
"learning_rate": 1.1111111111111112e-05, |
|
"loss": 0.6774, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 8.053097345132743, |
|
"grad_norm": 23.515504837036133, |
|
"learning_rate": 1.1012782694198625e-05, |
|
"loss": 0.7632, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 8.070796460176991, |
|
"grad_norm": 32.522216796875, |
|
"learning_rate": 1.0914454277286137e-05, |
|
"loss": 0.7866, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 8.08849557522124, |
|
"grad_norm": 17.90611457824707, |
|
"learning_rate": 1.0816125860373648e-05, |
|
"loss": 0.6607, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 8.106194690265486, |
|
"grad_norm": 17.308259963989258, |
|
"learning_rate": 1.0717797443461161e-05, |
|
"loss": 0.9028, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 8.123893805309734, |
|
"grad_norm": 23.00870704650879, |
|
"learning_rate": 1.0619469026548673e-05, |
|
"loss": 0.6444, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 8.141592920353983, |
|
"grad_norm": 23.513565063476562, |
|
"learning_rate": 1.0521140609636185e-05, |
|
"loss": 0.7422, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 8.15929203539823, |
|
"grad_norm": 22.787883758544922, |
|
"learning_rate": 1.0422812192723698e-05, |
|
"loss": 0.8156, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 8.176991150442477, |
|
"grad_norm": 26.117023468017578, |
|
"learning_rate": 1.032448377581121e-05, |
|
"loss": 0.538, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 8.194690265486726, |
|
"grad_norm": 21.87485122680664, |
|
"learning_rate": 1.0226155358898721e-05, |
|
"loss": 0.6871, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 8.212389380530974, |
|
"grad_norm": 21.752988815307617, |
|
"learning_rate": 1.0127826941986234e-05, |
|
"loss": 0.7181, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 8.230088495575222, |
|
"grad_norm": 24.260828018188477, |
|
"learning_rate": 1.0029498525073746e-05, |
|
"loss": 0.7211, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 8.247787610619469, |
|
"grad_norm": 12.92242431640625, |
|
"learning_rate": 9.93117010816126e-06, |
|
"loss": 0.8347, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 8.265486725663717, |
|
"grad_norm": 16.984403610229492, |
|
"learning_rate": 9.832841691248771e-06, |
|
"loss": 0.7955, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 8.283185840707965, |
|
"grad_norm": 36.490318298339844, |
|
"learning_rate": 9.734513274336284e-06, |
|
"loss": 0.7464, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 8.300884955752213, |
|
"grad_norm": 16.81266975402832, |
|
"learning_rate": 9.636184857423796e-06, |
|
"loss": 0.5796, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 8.31858407079646, |
|
"grad_norm": 14.971821784973145, |
|
"learning_rate": 9.537856440511309e-06, |
|
"loss": 0.6221, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 8.336283185840708, |
|
"grad_norm": 21.590499877929688, |
|
"learning_rate": 9.43952802359882e-06, |
|
"loss": 0.6271, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 8.353982300884956, |
|
"grad_norm": 31.352338790893555, |
|
"learning_rate": 9.341199606686332e-06, |
|
"loss": 0.7592, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 8.371681415929203, |
|
"grad_norm": 23.651779174804688, |
|
"learning_rate": 9.242871189773846e-06, |
|
"loss": 0.6352, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 8.389380530973451, |
|
"grad_norm": 20.58877182006836, |
|
"learning_rate": 9.144542772861357e-06, |
|
"loss": 0.6383, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 8.4070796460177, |
|
"grad_norm": 24.20329475402832, |
|
"learning_rate": 9.046214355948869e-06, |
|
"loss": 0.7669, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 8.424778761061948, |
|
"grad_norm": 18.367490768432617, |
|
"learning_rate": 8.947885939036382e-06, |
|
"loss": 0.6304, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 8.442477876106194, |
|
"grad_norm": 27.963157653808594, |
|
"learning_rate": 8.849557522123894e-06, |
|
"loss": 0.7402, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 8.460176991150442, |
|
"grad_norm": 19.442922592163086, |
|
"learning_rate": 8.751229105211405e-06, |
|
"loss": 0.6278, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 8.47787610619469, |
|
"grad_norm": 13.04371166229248, |
|
"learning_rate": 8.652900688298919e-06, |
|
"loss": 0.7028, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 8.495575221238939, |
|
"grad_norm": 25.378374099731445, |
|
"learning_rate": 8.554572271386432e-06, |
|
"loss": 0.6889, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 8.513274336283185, |
|
"grad_norm": 19.712556838989258, |
|
"learning_rate": 8.456243854473943e-06, |
|
"loss": 0.581, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 8.530973451327434, |
|
"grad_norm": 20.15717315673828, |
|
"learning_rate": 8.357915437561457e-06, |
|
"loss": 0.6877, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 8.548672566371682, |
|
"grad_norm": 24.568340301513672, |
|
"learning_rate": 8.259587020648968e-06, |
|
"loss": 0.5734, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 8.56637168141593, |
|
"grad_norm": 20.580244064331055, |
|
"learning_rate": 8.16125860373648e-06, |
|
"loss": 0.695, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 8.584070796460177, |
|
"grad_norm": 25.639179229736328, |
|
"learning_rate": 8.062930186823993e-06, |
|
"loss": 0.7367, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 8.601769911504425, |
|
"grad_norm": 14.882777214050293, |
|
"learning_rate": 7.964601769911505e-06, |
|
"loss": 0.7085, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 8.619469026548673, |
|
"grad_norm": 25.450916290283203, |
|
"learning_rate": 7.866273352999016e-06, |
|
"loss": 0.7158, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 8.63716814159292, |
|
"grad_norm": 17.252824783325195, |
|
"learning_rate": 7.76794493608653e-06, |
|
"loss": 0.722, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 8.654867256637168, |
|
"grad_norm": 17.026081085205078, |
|
"learning_rate": 7.669616519174041e-06, |
|
"loss": 0.7374, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 8.672566371681416, |
|
"grad_norm": 17.954788208007812, |
|
"learning_rate": 7.571288102261554e-06, |
|
"loss": 0.7943, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 8.690265486725664, |
|
"grad_norm": 14.818877220153809, |
|
"learning_rate": 7.472959685349067e-06, |
|
"loss": 0.687, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 8.70796460176991, |
|
"grad_norm": 14.548257827758789, |
|
"learning_rate": 7.374631268436579e-06, |
|
"loss": 0.6824, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 8.725663716814159, |
|
"grad_norm": 26.486650466918945, |
|
"learning_rate": 7.27630285152409e-06, |
|
"loss": 0.7428, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 8.743362831858407, |
|
"grad_norm": 14.338964462280273, |
|
"learning_rate": 7.1779744346116035e-06, |
|
"loss": 0.6337, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 8.761061946902656, |
|
"grad_norm": 11.065580368041992, |
|
"learning_rate": 7.079646017699115e-06, |
|
"loss": 0.7577, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 8.778761061946902, |
|
"grad_norm": 20.75477409362793, |
|
"learning_rate": 6.9813176007866275e-06, |
|
"loss": 0.8219, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 8.79646017699115, |
|
"grad_norm": 29.077869415283203, |
|
"learning_rate": 6.88298918387414e-06, |
|
"loss": 0.6982, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 8.814159292035399, |
|
"grad_norm": 19.636323928833008, |
|
"learning_rate": 6.784660766961652e-06, |
|
"loss": 0.7256, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 8.831858407079647, |
|
"grad_norm": 34.08713912963867, |
|
"learning_rate": 6.686332350049164e-06, |
|
"loss": 0.6156, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 8.849557522123893, |
|
"grad_norm": 21.24959945678711, |
|
"learning_rate": 6.588003933136677e-06, |
|
"loss": 0.7501, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 8.867256637168142, |
|
"grad_norm": 24.227680206298828, |
|
"learning_rate": 6.489675516224189e-06, |
|
"loss": 0.7819, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 8.88495575221239, |
|
"grad_norm": 19.83379364013672, |
|
"learning_rate": 6.3913470993117005e-06, |
|
"loss": 0.6253, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 8.902654867256636, |
|
"grad_norm": 17.381193161010742, |
|
"learning_rate": 6.293018682399214e-06, |
|
"loss": 0.7019, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 8.920353982300885, |
|
"grad_norm": 19.214319229125977, |
|
"learning_rate": 6.194690265486726e-06, |
|
"loss": 0.7062, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 8.938053097345133, |
|
"grad_norm": 13.928613662719727, |
|
"learning_rate": 6.096361848574239e-06, |
|
"loss": 0.6825, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 8.955752212389381, |
|
"grad_norm": 29.10097885131836, |
|
"learning_rate": 5.99803343166175e-06, |
|
"loss": 0.6878, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 8.973451327433628, |
|
"grad_norm": 24.121362686157227, |
|
"learning_rate": 5.899705014749263e-06, |
|
"loss": 0.7148, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 8.991150442477876, |
|
"grad_norm": 23.926654815673828, |
|
"learning_rate": 5.801376597836775e-06, |
|
"loss": 0.7509, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_loss": 0.7565978169441223, |
|
"eval_map": 0.5582, |
|
"eval_map_50": 0.9326, |
|
"eval_map_75": 0.6075, |
|
"eval_map_large": 0.618, |
|
"eval_map_medium": 0.3937, |
|
"eval_map_per_class": 0.5582, |
|
"eval_map_small": 0.025, |
|
"eval_mar_1": 0.2923, |
|
"eval_mar_10": 0.6329, |
|
"eval_mar_100": 0.697, |
|
"eval_mar_100_per_class": 0.697, |
|
"eval_mar_large": 0.7497, |
|
"eval_mar_medium": 0.5641, |
|
"eval_mar_small": 0.3, |
|
"eval_runtime": 3.7908, |
|
"eval_samples_per_second": 26.38, |
|
"eval_steps_per_second": 1.847, |
|
"step": 1017 |
|
} |
|
], |
|
"logging_steps": 2, |
|
"max_steps": 1130, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 10, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.01 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.3929164854288e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|