|
{ |
|
"best_metric": 1.4606529474258423, |
|
"best_model_checkpoint": "mit-b0-finetuned-human-parsing-dataset/checkpoint-1000", |
|
"epoch": 10.0, |
|
"eval_steps": 20, |
|
"global_step": 1000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 4.234813690185547, |
|
"learning_rate": 9.990000000000001e-06, |
|
"loss": 2.9544, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.482877254486084, |
|
"learning_rate": 9.980000000000001e-06, |
|
"loss": 2.9782, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.2303290367126465, |
|
"learning_rate": 9.970000000000001e-06, |
|
"loss": 2.9407, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 4.319561004638672, |
|
"learning_rate": 9.960000000000001e-06, |
|
"loss": 2.957, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 4.1521172523498535, |
|
"learning_rate": 9.950000000000001e-06, |
|
"loss": 2.9248, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.158083438873291, |
|
"learning_rate": 9.940000000000001e-06, |
|
"loss": 2.917, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 4.390671253204346, |
|
"learning_rate": 9.930000000000001e-06, |
|
"loss": 2.9315, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 4.232095718383789, |
|
"learning_rate": 9.920000000000002e-06, |
|
"loss": 2.8976, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 4.352518081665039, |
|
"learning_rate": 9.91e-06, |
|
"loss": 2.9013, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 4.207855701446533, |
|
"learning_rate": 9.9e-06, |
|
"loss": 2.9322, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 4.342838764190674, |
|
"learning_rate": 9.89e-06, |
|
"loss": 2.9101, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 4.032674789428711, |
|
"learning_rate": 9.88e-06, |
|
"loss": 2.895, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 4.308503150939941, |
|
"learning_rate": 9.87e-06, |
|
"loss": 2.8912, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 4.204571723937988, |
|
"learning_rate": 9.86e-06, |
|
"loss": 2.9022, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 4.331779956817627, |
|
"learning_rate": 9.85e-06, |
|
"loss": 2.8694, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 4.142786026000977, |
|
"learning_rate": 9.84e-06, |
|
"loss": 2.8598, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 4.46060848236084, |
|
"learning_rate": 9.83e-06, |
|
"loss": 2.877, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 4.331510066986084, |
|
"learning_rate": 9.820000000000001e-06, |
|
"loss": 2.8599, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 4.420529365539551, |
|
"learning_rate": 9.810000000000001e-06, |
|
"loss": 2.8277, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 4.404209613800049, |
|
"learning_rate": 9.800000000000001e-06, |
|
"loss": 2.8373, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 4.0589728355407715, |
|
"learning_rate": 9.790000000000001e-06, |
|
"loss": 2.8069, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 4.358518600463867, |
|
"learning_rate": 9.780000000000001e-06, |
|
"loss": 2.8124, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 4.316899299621582, |
|
"learning_rate": 9.770000000000001e-06, |
|
"loss": 2.8338, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 4.143735408782959, |
|
"learning_rate": 9.760000000000001e-06, |
|
"loss": 2.824, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 4.233428478240967, |
|
"learning_rate": 9.75e-06, |
|
"loss": 2.7872, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 4.530389308929443, |
|
"learning_rate": 9.74e-06, |
|
"loss": 2.8217, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 4.209986686706543, |
|
"learning_rate": 9.73e-06, |
|
"loss": 2.7753, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 4.464058876037598, |
|
"learning_rate": 9.72e-06, |
|
"loss": 2.7692, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 4.205682754516602, |
|
"learning_rate": 9.71e-06, |
|
"loss": 2.7632, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 4.321238994598389, |
|
"learning_rate": 9.7e-06, |
|
"loss": 2.7516, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 4.201343059539795, |
|
"learning_rate": 9.69e-06, |
|
"loss": 2.7675, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 4.461489677429199, |
|
"learning_rate": 9.68e-06, |
|
"loss": 2.7395, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 4.33034610748291, |
|
"learning_rate": 9.67e-06, |
|
"loss": 2.7237, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 4.418294906616211, |
|
"learning_rate": 9.66e-06, |
|
"loss": 2.741, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 4.152261257171631, |
|
"learning_rate": 9.65e-06, |
|
"loss": 2.7291, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 4.155457496643066, |
|
"learning_rate": 9.640000000000001e-06, |
|
"loss": 2.689, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 4.235340118408203, |
|
"learning_rate": 9.630000000000001e-06, |
|
"loss": 2.6799, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 4.320679664611816, |
|
"learning_rate": 9.620000000000001e-06, |
|
"loss": 2.724, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 4.223893165588379, |
|
"learning_rate": 9.610000000000001e-06, |
|
"loss": 2.7029, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 4.128443241119385, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 2.6838, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 4.292538166046143, |
|
"learning_rate": 9.59e-06, |
|
"loss": 2.6862, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 4.063634395599365, |
|
"learning_rate": 9.58e-06, |
|
"loss": 2.6165, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 4.104532241821289, |
|
"learning_rate": 9.57e-06, |
|
"loss": 2.6584, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 4.567317962646484, |
|
"learning_rate": 9.56e-06, |
|
"loss": 2.7009, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 4.2191853523254395, |
|
"learning_rate": 9.55e-06, |
|
"loss": 2.6215, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 4.206693172454834, |
|
"learning_rate": 9.54e-06, |
|
"loss": 2.6121, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 4.210999965667725, |
|
"learning_rate": 9.53e-06, |
|
"loss": 2.6421, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 4.174373626708984, |
|
"learning_rate": 9.52e-06, |
|
"loss": 2.6417, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 4.117812633514404, |
|
"learning_rate": 9.51e-06, |
|
"loss": 2.6148, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 4.169362545013428, |
|
"learning_rate": 9.5e-06, |
|
"loss": 2.6173, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 4.231525421142578, |
|
"learning_rate": 9.49e-06, |
|
"loss": 2.5996, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 4.312726974487305, |
|
"learning_rate": 9.48e-06, |
|
"loss": 2.6272, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 4.259713172912598, |
|
"learning_rate": 9.47e-06, |
|
"loss": 2.5771, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 4.279856204986572, |
|
"learning_rate": 9.460000000000001e-06, |
|
"loss": 2.5871, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 4.321524143218994, |
|
"learning_rate": 9.450000000000001e-06, |
|
"loss": 2.5957, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 4.146280765533447, |
|
"learning_rate": 9.440000000000001e-06, |
|
"loss": 2.5519, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 4.274604797363281, |
|
"learning_rate": 9.43e-06, |
|
"loss": 2.5572, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 4.1859660148620605, |
|
"learning_rate": 9.42e-06, |
|
"loss": 2.5933, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 4.148138046264648, |
|
"learning_rate": 9.41e-06, |
|
"loss": 2.5072, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 4.229477882385254, |
|
"learning_rate": 9.4e-06, |
|
"loss": 2.559, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 4.231686115264893, |
|
"learning_rate": 9.39e-06, |
|
"loss": 2.5582, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 4.0880937576293945, |
|
"learning_rate": 9.38e-06, |
|
"loss": 2.5129, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 4.442220687866211, |
|
"learning_rate": 9.370000000000002e-06, |
|
"loss": 2.5395, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 4.162861347198486, |
|
"learning_rate": 9.360000000000002e-06, |
|
"loss": 2.5599, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 4.077202320098877, |
|
"learning_rate": 9.350000000000002e-06, |
|
"loss": 2.4959, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 4.41274356842041, |
|
"learning_rate": 9.340000000000002e-06, |
|
"loss": 2.5087, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 4.205565452575684, |
|
"learning_rate": 9.33e-06, |
|
"loss": 2.4967, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 4.4488301277160645, |
|
"learning_rate": 9.32e-06, |
|
"loss": 2.5008, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 4.2641215324401855, |
|
"learning_rate": 9.31e-06, |
|
"loss": 2.4962, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 4.122101783752441, |
|
"learning_rate": 9.3e-06, |
|
"loss": 2.4618, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 4.225414752960205, |
|
"learning_rate": 9.29e-06, |
|
"loss": 2.4764, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 4.134582996368408, |
|
"learning_rate": 9.280000000000001e-06, |
|
"loss": 2.4755, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 4.329803466796875, |
|
"learning_rate": 9.270000000000001e-06, |
|
"loss": 2.451, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 4.251955986022949, |
|
"learning_rate": 9.260000000000001e-06, |
|
"loss": 2.4543, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 4.2452874183654785, |
|
"learning_rate": 9.250000000000001e-06, |
|
"loss": 2.4764, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 4.201510429382324, |
|
"learning_rate": 9.240000000000001e-06, |
|
"loss": 2.4099, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 4.222127914428711, |
|
"learning_rate": 9.230000000000001e-06, |
|
"loss": 2.4549, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 4.239346504211426, |
|
"learning_rate": 9.220000000000002e-06, |
|
"loss": 2.401, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 4.115464210510254, |
|
"learning_rate": 9.210000000000002e-06, |
|
"loss": 2.4632, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 4.2422380447387695, |
|
"learning_rate": 9.200000000000002e-06, |
|
"loss": 2.4088, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 4.1780829429626465, |
|
"learning_rate": 9.190000000000002e-06, |
|
"loss": 2.4062, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 4.678599834442139, |
|
"learning_rate": 9.180000000000002e-06, |
|
"loss": 2.4069, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 4.137949466705322, |
|
"learning_rate": 9.17e-06, |
|
"loss": 2.4054, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 4.363401412963867, |
|
"learning_rate": 9.16e-06, |
|
"loss": 2.389, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 4.235044956207275, |
|
"learning_rate": 9.15e-06, |
|
"loss": 2.4167, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 4.4384026527404785, |
|
"learning_rate": 9.14e-06, |
|
"loss": 2.387, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 4.342342376708984, |
|
"learning_rate": 9.13e-06, |
|
"loss": 2.3607, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 4.32953405380249, |
|
"learning_rate": 9.12e-06, |
|
"loss": 2.4294, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 4.2945332527160645, |
|
"learning_rate": 9.110000000000001e-06, |
|
"loss": 2.3819, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 4.32944917678833, |
|
"learning_rate": 9.100000000000001e-06, |
|
"loss": 2.425, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 4.604047775268555, |
|
"learning_rate": 9.090000000000001e-06, |
|
"loss": 2.305, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 4.261952877044678, |
|
"learning_rate": 9.080000000000001e-06, |
|
"loss": 2.4083, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 4.248727321624756, |
|
"learning_rate": 9.070000000000001e-06, |
|
"loss": 2.3476, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 4.361407279968262, |
|
"learning_rate": 9.060000000000001e-06, |
|
"loss": 2.3573, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 4.175127029418945, |
|
"learning_rate": 9.050000000000001e-06, |
|
"loss": 2.3338, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 4.134150981903076, |
|
"learning_rate": 9.040000000000002e-06, |
|
"loss": 2.2981, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 4.137899875640869, |
|
"learning_rate": 9.030000000000002e-06, |
|
"loss": 2.3634, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 4.461462497711182, |
|
"learning_rate": 9.020000000000002e-06, |
|
"loss": 2.3432, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 4.128418445587158, |
|
"learning_rate": 9.01e-06, |
|
"loss": 2.36, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 4.3132243156433105, |
|
"learning_rate": 9e-06, |
|
"loss": 2.3018, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy_Background": NaN, |
|
"eval_accuracy_Bag": 0.0004242982626285018, |
|
"eval_accuracy_Belt": 0.0, |
|
"eval_accuracy_Dress": 0.0, |
|
"eval_accuracy_Face": 0.0, |
|
"eval_accuracy_Hair": 0.7491044848017727, |
|
"eval_accuracy_Hat": 0.012600869543447964, |
|
"eval_accuracy_Left-arm": 0.000600905527840321, |
|
"eval_accuracy_Left-leg": 0.168196153470596, |
|
"eval_accuracy_Left-shoe": 0.0, |
|
"eval_accuracy_Pants": 0.4504217042326877, |
|
"eval_accuracy_Right-arm": 0.09564491616214466, |
|
"eval_accuracy_Right-leg": 0.6099398212539764, |
|
"eval_accuracy_Right-shoe": 0.06388818074631146, |
|
"eval_accuracy_Scarf": 0.0, |
|
"eval_accuracy_Skirt": 0.0963603056555026, |
|
"eval_accuracy_Sunglasses": 0.0, |
|
"eval_accuracy_Upper-clothes": 0.5957944559486313, |
|
"eval_iou_Background": 0.0, |
|
"eval_iou_Bag": 0.00042370463117808913, |
|
"eval_iou_Belt": 0.0, |
|
"eval_iou_Dress": 0.0, |
|
"eval_iou_Face": 0.0, |
|
"eval_iou_Hair": 0.44102861090675605, |
|
"eval_iou_Hat": 0.007479061105415137, |
|
"eval_iou_Left-arm": 0.0005830695637687712, |
|
"eval_iou_Left-leg": 0.08776529416444462, |
|
"eval_iou_Left-shoe": 0.0, |
|
"eval_iou_Pants": 0.24111432727486445, |
|
"eval_iou_Right-arm": 0.04182582831243258, |
|
"eval_iou_Right-leg": 0.1302883591842858, |
|
"eval_iou_Right-shoe": 0.05600698384984723, |
|
"eval_iou_Scarf": 0.0, |
|
"eval_iou_Skirt": 0.07954996199139007, |
|
"eval_iou_Sunglasses": 0.0, |
|
"eval_iou_Upper-clothes": 0.34940081904561693, |
|
"eval_loss": 2.3396735191345215, |
|
"eval_mean_accuracy": 0.16723388797679645, |
|
"eval_mean_iou": 0.07974811222388889, |
|
"eval_overall_accuracy": 0.33045138702368393, |
|
"eval_runtime": 22.3961, |
|
"eval_samples_per_second": 8.93, |
|
"eval_steps_per_second": 4.465, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 4.286224365234375, |
|
"learning_rate": 8.99e-06, |
|
"loss": 2.3669, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 4.451814651489258, |
|
"learning_rate": 8.98e-06, |
|
"loss": 2.3646, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 4.178186416625977, |
|
"learning_rate": 8.97e-06, |
|
"loss": 2.2924, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 4.435781955718994, |
|
"learning_rate": 8.96e-06, |
|
"loss": 2.3132, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 4.4983649253845215, |
|
"learning_rate": 8.95e-06, |
|
"loss": 2.3042, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 4.294534206390381, |
|
"learning_rate": 8.94e-06, |
|
"loss": 2.2876, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 4.1910400390625, |
|
"learning_rate": 8.930000000000001e-06, |
|
"loss": 2.2721, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 4.531942844390869, |
|
"learning_rate": 8.920000000000001e-06, |
|
"loss": 2.3367, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 4.372553825378418, |
|
"learning_rate": 8.910000000000001e-06, |
|
"loss": 2.27, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 4.451037406921387, |
|
"learning_rate": 8.900000000000001e-06, |
|
"loss": 2.316, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 4.161291122436523, |
|
"learning_rate": 8.890000000000001e-06, |
|
"loss": 2.2651, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 4.308383941650391, |
|
"learning_rate": 8.880000000000001e-06, |
|
"loss": 2.282, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 4.404356479644775, |
|
"learning_rate": 8.870000000000001e-06, |
|
"loss": 2.3226, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 4.139420509338379, |
|
"learning_rate": 8.860000000000002e-06, |
|
"loss": 2.2672, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 4.047986030578613, |
|
"learning_rate": 8.85e-06, |
|
"loss": 2.2245, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 4.221693515777588, |
|
"learning_rate": 8.84e-06, |
|
"loss": 2.2595, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 4.234687328338623, |
|
"learning_rate": 8.83e-06, |
|
"loss": 2.2448, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 4.4049577713012695, |
|
"learning_rate": 8.82e-06, |
|
"loss": 2.2624, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 4.35460901260376, |
|
"learning_rate": 8.81e-06, |
|
"loss": 2.3176, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 4.06089448928833, |
|
"learning_rate": 8.8e-06, |
|
"loss": 2.2436, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 4.414927959442139, |
|
"learning_rate": 8.79e-06, |
|
"loss": 2.3054, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 4.067784309387207, |
|
"learning_rate": 8.78e-06, |
|
"loss": 2.196, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 4.225794315338135, |
|
"learning_rate": 8.77e-06, |
|
"loss": 2.2262, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 4.199454307556152, |
|
"learning_rate": 8.76e-06, |
|
"loss": 2.2372, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 4.410147666931152, |
|
"learning_rate": 8.750000000000001e-06, |
|
"loss": 2.2219, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 4.723634719848633, |
|
"learning_rate": 8.740000000000001e-06, |
|
"loss": 2.2056, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 4.29050874710083, |
|
"learning_rate": 8.730000000000001e-06, |
|
"loss": 2.2307, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 4.196619510650635, |
|
"learning_rate": 8.720000000000001e-06, |
|
"loss": 2.1879, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 4.161979675292969, |
|
"learning_rate": 8.710000000000001e-06, |
|
"loss": 2.2397, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 4.27070426940918, |
|
"learning_rate": 8.700000000000001e-06, |
|
"loss": 2.1536, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 4.183769226074219, |
|
"learning_rate": 8.690000000000002e-06, |
|
"loss": 2.2254, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 4.2046732902526855, |
|
"learning_rate": 8.68e-06, |
|
"loss": 2.1642, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 4.474245548248291, |
|
"learning_rate": 8.67e-06, |
|
"loss": 2.2762, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 4.33955192565918, |
|
"learning_rate": 8.66e-06, |
|
"loss": 2.1745, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 4.123064041137695, |
|
"learning_rate": 8.65e-06, |
|
"loss": 2.1694, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 4.026848316192627, |
|
"learning_rate": 8.64e-06, |
|
"loss": 2.1587, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 4.1834588050842285, |
|
"learning_rate": 8.63e-06, |
|
"loss": 2.2318, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 5.340048789978027, |
|
"learning_rate": 8.62e-06, |
|
"loss": 2.1738, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 4.085732936859131, |
|
"learning_rate": 8.61e-06, |
|
"loss": 2.1923, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 4.373236656188965, |
|
"learning_rate": 8.6e-06, |
|
"loss": 2.1845, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 4.328385353088379, |
|
"learning_rate": 8.59e-06, |
|
"loss": 2.1633, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 4.406303882598877, |
|
"learning_rate": 8.580000000000001e-06, |
|
"loss": 2.1604, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 4.517782688140869, |
|
"learning_rate": 8.570000000000001e-06, |
|
"loss": 2.1907, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 4.278939247131348, |
|
"learning_rate": 8.560000000000001e-06, |
|
"loss": 2.1701, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 4.1413469314575195, |
|
"learning_rate": 8.550000000000001e-06, |
|
"loss": 2.1394, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 4.0532026290893555, |
|
"learning_rate": 8.540000000000001e-06, |
|
"loss": 2.1431, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 4.254635810852051, |
|
"learning_rate": 8.530000000000001e-06, |
|
"loss": 2.2317, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 4.055116176605225, |
|
"learning_rate": 8.52e-06, |
|
"loss": 2.1935, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 4.118351936340332, |
|
"learning_rate": 8.51e-06, |
|
"loss": 2.1636, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 4.374722003936768, |
|
"learning_rate": 8.5e-06, |
|
"loss": 2.1559, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 4.5501933097839355, |
|
"learning_rate": 8.49e-06, |
|
"loss": 2.1764, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 4.26231575012207, |
|
"learning_rate": 8.48e-06, |
|
"loss": 2.1498, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 7.63175106048584, |
|
"learning_rate": 8.47e-06, |
|
"loss": 2.1612, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 3.973308563232422, |
|
"learning_rate": 8.46e-06, |
|
"loss": 2.1384, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 4.09283447265625, |
|
"learning_rate": 8.45e-06, |
|
"loss": 2.1002, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 4.324411392211914, |
|
"learning_rate": 8.44e-06, |
|
"loss": 2.0922, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 4.172537803649902, |
|
"learning_rate": 8.43e-06, |
|
"loss": 2.1144, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 5.155778408050537, |
|
"learning_rate": 8.42e-06, |
|
"loss": 2.2074, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 4.4674296379089355, |
|
"learning_rate": 8.41e-06, |
|
"loss": 2.0831, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 4.196486949920654, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 2.1758, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 4.47146463394165, |
|
"learning_rate": 8.390000000000001e-06, |
|
"loss": 2.1384, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 4.0660200119018555, |
|
"learning_rate": 8.380000000000001e-06, |
|
"loss": 2.1295, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 4.404181480407715, |
|
"learning_rate": 8.370000000000001e-06, |
|
"loss": 2.1443, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 4.747539520263672, |
|
"learning_rate": 8.36e-06, |
|
"loss": 2.1839, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 4.183701992034912, |
|
"learning_rate": 8.35e-06, |
|
"loss": 2.1427, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 4.322850227355957, |
|
"learning_rate": 8.34e-06, |
|
"loss": 2.1349, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 4.506062984466553, |
|
"learning_rate": 8.33e-06, |
|
"loss": 2.1562, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 4.082846164703369, |
|
"learning_rate": 8.32e-06, |
|
"loss": 2.1612, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 4.112331867218018, |
|
"learning_rate": 8.31e-06, |
|
"loss": 2.0689, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 4.14832878112793, |
|
"learning_rate": 8.3e-06, |
|
"loss": 2.0736, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 4.264697074890137, |
|
"learning_rate": 8.29e-06, |
|
"loss": 2.1141, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 4.074257850646973, |
|
"learning_rate": 8.28e-06, |
|
"loss": 2.1276, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 4.264401435852051, |
|
"learning_rate": 8.27e-06, |
|
"loss": 2.1429, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 4.2053117752075195, |
|
"learning_rate": 8.26e-06, |
|
"loss": 2.1004, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 4.097294330596924, |
|
"learning_rate": 8.25e-06, |
|
"loss": 2.0208, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 4.364256381988525, |
|
"learning_rate": 8.24e-06, |
|
"loss": 2.0695, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 4.236232280731201, |
|
"learning_rate": 8.23e-06, |
|
"loss": 2.1557, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 4.267558574676514, |
|
"learning_rate": 8.220000000000001e-06, |
|
"loss": 2.0741, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 4.059826850891113, |
|
"learning_rate": 8.210000000000001e-06, |
|
"loss": 2.0801, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 3.9970080852508545, |
|
"learning_rate": 8.2e-06, |
|
"loss": 2.0269, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 4.119307041168213, |
|
"learning_rate": 8.19e-06, |
|
"loss": 2.0623, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 4.0061163902282715, |
|
"learning_rate": 8.18e-06, |
|
"loss": 2.0713, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 4.1727070808410645, |
|
"learning_rate": 8.17e-06, |
|
"loss": 2.0429, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 4.1692728996276855, |
|
"learning_rate": 8.16e-06, |
|
"loss": 2.1827, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 4.165792465209961, |
|
"learning_rate": 8.15e-06, |
|
"loss": 2.0486, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 4.03242826461792, |
|
"learning_rate": 8.14e-06, |
|
"loss": 2.0572, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 4.288402080535889, |
|
"learning_rate": 8.13e-06, |
|
"loss": 2.0689, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 4.761503219604492, |
|
"learning_rate": 8.120000000000002e-06, |
|
"loss": 2.0299, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 3.990095615386963, |
|
"learning_rate": 8.110000000000002e-06, |
|
"loss": 2.0568, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 4.078096389770508, |
|
"learning_rate": 8.1e-06, |
|
"loss": 2.0691, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 4.596857070922852, |
|
"learning_rate": 8.09e-06, |
|
"loss": 2.125, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 4.260069847106934, |
|
"learning_rate": 8.08e-06, |
|
"loss": 2.081, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 4.099155426025391, |
|
"learning_rate": 8.07e-06, |
|
"loss": 2.0735, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 4.057877063751221, |
|
"learning_rate": 8.06e-06, |
|
"loss": 2.0659, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 5.057287216186523, |
|
"learning_rate": 8.050000000000001e-06, |
|
"loss": 2.0161, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 4.245755195617676, |
|
"learning_rate": 8.040000000000001e-06, |
|
"loss": 2.106, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 4.133609294891357, |
|
"learning_rate": 8.030000000000001e-06, |
|
"loss": 2.1208, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 4.171890735626221, |
|
"learning_rate": 8.020000000000001e-06, |
|
"loss": 2.0807, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 4.291568756103516, |
|
"learning_rate": 8.010000000000001e-06, |
|
"loss": 2.108, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 4.7984395027160645, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 1.9921, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy_Background": NaN, |
|
"eval_accuracy_Bag": 0.0001524010086175843, |
|
"eval_accuracy_Belt": 0.0, |
|
"eval_accuracy_Dress": 0.0, |
|
"eval_accuracy_Face": 0.0, |
|
"eval_accuracy_Hair": 0.8151702875861072, |
|
"eval_accuracy_Hat": 0.0008511845651865513, |
|
"eval_accuracy_Left-arm": 0.0, |
|
"eval_accuracy_Left-leg": 0.035928236033761204, |
|
"eval_accuracy_Left-shoe": 0.0, |
|
"eval_accuracy_Pants": 0.5551989602697041, |
|
"eval_accuracy_Right-arm": 0.12874654308991704, |
|
"eval_accuracy_Right-leg": 0.5839644986711101, |
|
"eval_accuracy_Right-shoe": 0.032396195149931487, |
|
"eval_accuracy_Scarf": 0.0, |
|
"eval_accuracy_Skirt": 0.06263091441208025, |
|
"eval_accuracy_Sunglasses": 0.0, |
|
"eval_accuracy_Upper-clothes": 0.8937049089042407, |
|
"eval_iou_Background": 0.0, |
|
"eval_iou_Bag": 0.00015239625797915636, |
|
"eval_iou_Belt": 0.0, |
|
"eval_iou_Dress": 0.0, |
|
"eval_iou_Face": 0.0, |
|
"eval_iou_Hair": 0.46177313952206467, |
|
"eval_iou_Hat": 0.0008418064175359831, |
|
"eval_iou_Left-arm": 0.0, |
|
"eval_iou_Left-leg": 0.03438585676084672, |
|
"eval_iou_Left-shoe": 0.0, |
|
"eval_iou_Pants": 0.2898815955791097, |
|
"eval_iou_Right-arm": 0.08055891596617695, |
|
"eval_iou_Right-leg": 0.27754043507072557, |
|
"eval_iou_Right-shoe": 0.0303856772983535, |
|
"eval_iou_Scarf": 0.0, |
|
"eval_iou_Skirt": 0.05422349063260212, |
|
"eval_iou_Sunglasses": 0.0, |
|
"eval_iou_Upper-clothes": 0.4120677337941092, |
|
"eval_loss": 2.011120557785034, |
|
"eval_mean_accuracy": 0.18286730174650923, |
|
"eval_mean_iou": 0.09121172484997243, |
|
"eval_overall_accuracy": 0.41548018981977597, |
|
"eval_runtime": 20.8064, |
|
"eval_samples_per_second": 9.612, |
|
"eval_steps_per_second": 4.806, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 4.045700550079346, |
|
"learning_rate": 7.990000000000001e-06, |
|
"loss": 1.9668, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 4.148788928985596, |
|
"learning_rate": 7.980000000000002e-06, |
|
"loss": 2.0369, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 4.063356876373291, |
|
"learning_rate": 7.970000000000002e-06, |
|
"loss": 1.9951, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 4.149481296539307, |
|
"learning_rate": 7.960000000000002e-06, |
|
"loss": 1.9882, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 4.301010608673096, |
|
"learning_rate": 7.950000000000002e-06, |
|
"loss": 2.0667, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 4.157419681549072, |
|
"learning_rate": 7.94e-06, |
|
"loss": 2.0454, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 5.0409135818481445, |
|
"learning_rate": 7.93e-06, |
|
"loss": 2.0474, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 3.9728503227233887, |
|
"learning_rate": 7.92e-06, |
|
"loss": 2.0432, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 3.9620349407196045, |
|
"learning_rate": 7.91e-06, |
|
"loss": 2.0618, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 4.344201564788818, |
|
"learning_rate": 7.9e-06, |
|
"loss": 2.0413, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 3.976184606552124, |
|
"learning_rate": 7.89e-06, |
|
"loss": 2.0701, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 4.014520168304443, |
|
"learning_rate": 7.88e-06, |
|
"loss": 2.0095, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 4.018935203552246, |
|
"learning_rate": 7.870000000000001e-06, |
|
"loss": 2.02, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 4.141693115234375, |
|
"learning_rate": 7.860000000000001e-06, |
|
"loss": 2.0509, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 4.22378396987915, |
|
"learning_rate": 7.850000000000001e-06, |
|
"loss": 2.0261, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 4.042500019073486, |
|
"learning_rate": 7.840000000000001e-06, |
|
"loss": 2.0405, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 4.180110931396484, |
|
"learning_rate": 7.830000000000001e-06, |
|
"loss": 2.0558, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 4.0605573654174805, |
|
"learning_rate": 7.820000000000001e-06, |
|
"loss": 2.0333, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 4.012240886688232, |
|
"learning_rate": 7.810000000000001e-06, |
|
"loss": 1.9888, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 4.105309009552002, |
|
"learning_rate": 7.800000000000002e-06, |
|
"loss": 2.0076, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 3.9832894802093506, |
|
"learning_rate": 7.790000000000002e-06, |
|
"loss": 2.0743, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 4.073923587799072, |
|
"learning_rate": 7.78e-06, |
|
"loss": 2.0394, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 4.040003776550293, |
|
"learning_rate": 7.77e-06, |
|
"loss": 1.9874, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 4.035463333129883, |
|
"learning_rate": 7.76e-06, |
|
"loss": 1.9818, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 4.258572578430176, |
|
"learning_rate": 7.75e-06, |
|
"loss": 2.0686, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 4.1776299476623535, |
|
"learning_rate": 7.74e-06, |
|
"loss": 2.0114, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 4.2493720054626465, |
|
"learning_rate": 7.73e-06, |
|
"loss": 1.9849, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 4.026092052459717, |
|
"learning_rate": 7.72e-06, |
|
"loss": 2.0081, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 4.408401966094971, |
|
"learning_rate": 7.71e-06, |
|
"loss": 1.9841, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 4.104363918304443, |
|
"learning_rate": 7.7e-06, |
|
"loss": 1.9689, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 4.193114757537842, |
|
"learning_rate": 7.690000000000001e-06, |
|
"loss": 1.9288, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 4.143477916717529, |
|
"learning_rate": 7.680000000000001e-06, |
|
"loss": 1.9632, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 4.406826019287109, |
|
"learning_rate": 7.670000000000001e-06, |
|
"loss": 1.9716, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 4.276412010192871, |
|
"learning_rate": 7.660000000000001e-06, |
|
"loss": 1.9888, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 4.386712074279785, |
|
"learning_rate": 7.650000000000001e-06, |
|
"loss": 1.9709, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 4.155978202819824, |
|
"learning_rate": 7.640000000000001e-06, |
|
"loss": 2.0018, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 4.175647258758545, |
|
"learning_rate": 7.630000000000001e-06, |
|
"loss": 2.0384, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 4.876473426818848, |
|
"learning_rate": 7.620000000000001e-06, |
|
"loss": 1.9503, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 3.9242897033691406, |
|
"learning_rate": 7.610000000000001e-06, |
|
"loss": 2.001, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 4.103695869445801, |
|
"learning_rate": 7.600000000000001e-06, |
|
"loss": 1.9621, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 4.3203125, |
|
"learning_rate": 7.590000000000001e-06, |
|
"loss": 1.9524, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 4.360772132873535, |
|
"learning_rate": 7.58e-06, |
|
"loss": 2.0937, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 4.1674957275390625, |
|
"learning_rate": 7.57e-06, |
|
"loss": 1.96, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 4.108003616333008, |
|
"learning_rate": 7.5600000000000005e-06, |
|
"loss": 1.9627, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 4.0955963134765625, |
|
"learning_rate": 7.5500000000000006e-06, |
|
"loss": 1.9612, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 4.028230667114258, |
|
"learning_rate": 7.540000000000001e-06, |
|
"loss": 2.0314, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 4.203660011291504, |
|
"learning_rate": 7.530000000000001e-06, |
|
"loss": 1.936, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 4.051420211791992, |
|
"learning_rate": 7.520000000000001e-06, |
|
"loss": 1.9686, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 4.281933307647705, |
|
"learning_rate": 7.510000000000001e-06, |
|
"loss": 2.0066, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 4.1710405349731445, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 1.9595, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"grad_norm": 4.02245569229126, |
|
"learning_rate": 7.49e-06, |
|
"loss": 1.924, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 4.0477070808410645, |
|
"learning_rate": 7.48e-06, |
|
"loss": 1.8846, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 3.970536470413208, |
|
"learning_rate": 7.4700000000000005e-06, |
|
"loss": 1.9435, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"grad_norm": 4.245212554931641, |
|
"learning_rate": 7.4600000000000006e-06, |
|
"loss": 2.0053, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 4.036355018615723, |
|
"learning_rate": 7.450000000000001e-06, |
|
"loss": 1.9436, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 4.107367038726807, |
|
"learning_rate": 7.440000000000001e-06, |
|
"loss": 1.8638, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"grad_norm": 4.085211753845215, |
|
"learning_rate": 7.430000000000001e-06, |
|
"loss": 1.9129, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 4.601454734802246, |
|
"learning_rate": 7.420000000000001e-06, |
|
"loss": 1.939, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 4.423075199127197, |
|
"learning_rate": 7.41e-06, |
|
"loss": 1.9247, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 4.0297040939331055, |
|
"learning_rate": 7.4e-06, |
|
"loss": 1.9338, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 4.133910179138184, |
|
"learning_rate": 7.39e-06, |
|
"loss": 1.9503, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 3.9908201694488525, |
|
"learning_rate": 7.3800000000000005e-06, |
|
"loss": 1.9911, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 4.061481475830078, |
|
"learning_rate": 7.370000000000001e-06, |
|
"loss": 1.9758, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 3.9681100845336914, |
|
"learning_rate": 7.360000000000001e-06, |
|
"loss": 1.9507, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 4.01893424987793, |
|
"learning_rate": 7.350000000000001e-06, |
|
"loss": 1.9537, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 4.067238807678223, |
|
"learning_rate": 7.340000000000001e-06, |
|
"loss": 1.9459, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 3.9909346103668213, |
|
"learning_rate": 7.33e-06, |
|
"loss": 1.9526, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 3.92202091217041, |
|
"learning_rate": 7.32e-06, |
|
"loss": 1.9245, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"grad_norm": 4.3507866859436035, |
|
"learning_rate": 7.31e-06, |
|
"loss": 2.0039, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 4.007924556732178, |
|
"learning_rate": 7.3e-06, |
|
"loss": 1.9026, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"grad_norm": 4.544615745544434, |
|
"learning_rate": 7.2900000000000005e-06, |
|
"loss": 1.8988, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"grad_norm": 4.161890983581543, |
|
"learning_rate": 7.280000000000001e-06, |
|
"loss": 1.8955, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 4.098465442657471, |
|
"learning_rate": 7.270000000000001e-06, |
|
"loss": 1.9041, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 4.141757011413574, |
|
"learning_rate": 7.260000000000001e-06, |
|
"loss": 1.8529, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 3.9224603176116943, |
|
"learning_rate": 7.25e-06, |
|
"loss": 1.9443, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 4.068088531494141, |
|
"learning_rate": 7.24e-06, |
|
"loss": 1.921, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"grad_norm": 4.020340442657471, |
|
"learning_rate": 7.23e-06, |
|
"loss": 1.9318, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"grad_norm": 3.9954025745391846, |
|
"learning_rate": 7.22e-06, |
|
"loss": 1.8985, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 4.023241996765137, |
|
"learning_rate": 7.2100000000000004e-06, |
|
"loss": 1.861, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 4.951297283172607, |
|
"learning_rate": 7.2000000000000005e-06, |
|
"loss": 1.9629, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 4.812080383300781, |
|
"learning_rate": 7.190000000000001e-06, |
|
"loss": 1.8649, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 4.293552875518799, |
|
"learning_rate": 7.180000000000001e-06, |
|
"loss": 1.8988, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"grad_norm": 4.053041934967041, |
|
"learning_rate": 7.17e-06, |
|
"loss": 1.8989, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 4.39008092880249, |
|
"learning_rate": 7.16e-06, |
|
"loss": 1.9082, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"grad_norm": 3.992619752883911, |
|
"learning_rate": 7.15e-06, |
|
"loss": 1.9064, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"grad_norm": 4.384286403656006, |
|
"learning_rate": 7.14e-06, |
|
"loss": 1.9225, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"grad_norm": 4.078967571258545, |
|
"learning_rate": 7.13e-06, |
|
"loss": 2.0009, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 4.019415378570557, |
|
"learning_rate": 7.1200000000000004e-06, |
|
"loss": 1.8821, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 4.355335712432861, |
|
"learning_rate": 7.1100000000000005e-06, |
|
"loss": 1.9277, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 4.152808666229248, |
|
"learning_rate": 7.100000000000001e-06, |
|
"loss": 1.9106, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"grad_norm": 4.32996129989624, |
|
"learning_rate": 7.09e-06, |
|
"loss": 2.011, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"grad_norm": 3.9992029666900635, |
|
"learning_rate": 7.08e-06, |
|
"loss": 1.8785, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"grad_norm": 4.624423980712891, |
|
"learning_rate": 7.07e-06, |
|
"loss": 1.9402, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"grad_norm": 3.9507522583007812, |
|
"learning_rate": 7.06e-06, |
|
"loss": 1.8818, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 4.057668685913086, |
|
"learning_rate": 7.05e-06, |
|
"loss": 1.9491, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 3.9667344093322754, |
|
"learning_rate": 7.04e-06, |
|
"loss": 1.8763, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"grad_norm": 3.984171152114868, |
|
"learning_rate": 7.0300000000000005e-06, |
|
"loss": 1.8966, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"grad_norm": 4.4407219886779785, |
|
"learning_rate": 7.0200000000000006e-06, |
|
"loss": 1.8848, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"grad_norm": 3.9947664737701416, |
|
"learning_rate": 7.01e-06, |
|
"loss": 1.8761, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 3.957763195037842, |
|
"learning_rate": 7e-06, |
|
"loss": 1.9356, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy_Background": NaN, |
|
"eval_accuracy_Bag": 0.000216478705422705, |
|
"eval_accuracy_Belt": 0.0, |
|
"eval_accuracy_Dress": 0.0, |
|
"eval_accuracy_Face": 0.13377628981028414, |
|
"eval_accuracy_Hair": 0.8592629702779517, |
|
"eval_accuracy_Hat": 0.00011682925404521292, |
|
"eval_accuracy_Left-arm": 0.0008633418195909919, |
|
"eval_accuracy_Left-leg": 0.38908636789568085, |
|
"eval_accuracy_Left-shoe": 0.0, |
|
"eval_accuracy_Pants": 0.6946684315731616, |
|
"eval_accuracy_Right-arm": 0.28640904900715053, |
|
"eval_accuracy_Right-leg": 0.5265330432267926, |
|
"eval_accuracy_Right-shoe": 0.0318903157961824, |
|
"eval_accuracy_Scarf": 0.0, |
|
"eval_accuracy_Skirt": 0.08470603404443731, |
|
"eval_accuracy_Sunglasses": 0.0, |
|
"eval_accuracy_Upper-clothes": 0.8534177875998399, |
|
"eval_iou_Background": 0.0, |
|
"eval_iou_Bag": 0.00021643634953258406, |
|
"eval_iou_Belt": 0.0, |
|
"eval_iou_Dress": 0.0, |
|
"eval_iou_Face": 0.13214727880196056, |
|
"eval_iou_Hair": 0.4867109389794084, |
|
"eval_iou_Hat": 0.00011654623555659152, |
|
"eval_iou_Left-arm": 0.0008633121756447862, |
|
"eval_iou_Left-leg": 0.3196635712898404, |
|
"eval_iou_Left-shoe": 0.0, |
|
"eval_iou_Pants": 0.33922237684628787, |
|
"eval_iou_Right-arm": 0.17098812441990527, |
|
"eval_iou_Right-leg": 0.3099714533845017, |
|
"eval_iou_Right-shoe": 0.02950889763663438, |
|
"eval_iou_Scarf": 0.0, |
|
"eval_iou_Skirt": 0.07372372737984706, |
|
"eval_iou_Sunglasses": 0.0, |
|
"eval_iou_Upper-clothes": 0.4317668477897235, |
|
"eval_loss": 1.8783364295959473, |
|
"eval_mean_accuracy": 0.22711452582414945, |
|
"eval_mean_iou": 0.12749441729382463, |
|
"eval_overall_accuracy": 0.45668173275369917, |
|
"eval_runtime": 20.4818, |
|
"eval_samples_per_second": 9.765, |
|
"eval_steps_per_second": 4.882, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"grad_norm": 4.003926753997803, |
|
"learning_rate": 6.99e-06, |
|
"loss": 1.8087, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"grad_norm": 5.012253761291504, |
|
"learning_rate": 6.98e-06, |
|
"loss": 1.933, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"grad_norm": 4.140829563140869, |
|
"learning_rate": 6.97e-06, |
|
"loss": 1.8835, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"grad_norm": 4.647022724151611, |
|
"learning_rate": 6.96e-06, |
|
"loss": 1.9417, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"grad_norm": 4.209743499755859, |
|
"learning_rate": 6.95e-06, |
|
"loss": 1.9871, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"grad_norm": 4.242799282073975, |
|
"learning_rate": 6.9400000000000005e-06, |
|
"loss": 1.9349, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"grad_norm": 4.059386253356934, |
|
"learning_rate": 6.93e-06, |
|
"loss": 1.8512, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"grad_norm": 4.615259170532227, |
|
"learning_rate": 6.92e-06, |
|
"loss": 1.8653, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"grad_norm": 4.544364929199219, |
|
"learning_rate": 6.91e-06, |
|
"loss": 1.9228, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"grad_norm": 4.089725017547607, |
|
"learning_rate": 6.9e-06, |
|
"loss": 1.883, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"grad_norm": 3.990675687789917, |
|
"learning_rate": 6.89e-06, |
|
"loss": 1.8422, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"grad_norm": 4.0297980308532715, |
|
"learning_rate": 6.88e-06, |
|
"loss": 1.8746, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"grad_norm": 3.964310884475708, |
|
"learning_rate": 6.870000000000001e-06, |
|
"loss": 1.8863, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"grad_norm": 4.886177062988281, |
|
"learning_rate": 6.860000000000001e-06, |
|
"loss": 1.8894, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"grad_norm": 4.124607563018799, |
|
"learning_rate": 6.850000000000001e-06, |
|
"loss": 1.9095, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"grad_norm": 3.994948625564575, |
|
"learning_rate": 6.8400000000000014e-06, |
|
"loss": 1.8547, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"grad_norm": 3.945030927658081, |
|
"learning_rate": 6.830000000000001e-06, |
|
"loss": 1.7959, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"grad_norm": 4.07049560546875, |
|
"learning_rate": 6.820000000000001e-06, |
|
"loss": 1.8175, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"grad_norm": 4.00924015045166, |
|
"learning_rate": 6.810000000000001e-06, |
|
"loss": 1.8117, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 3.9380557537078857, |
|
"learning_rate": 6.800000000000001e-06, |
|
"loss": 1.8589, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"grad_norm": 5.128328323364258, |
|
"learning_rate": 6.790000000000001e-06, |
|
"loss": 1.8012, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"grad_norm": 4.0204997062683105, |
|
"learning_rate": 6.780000000000001e-06, |
|
"loss": 1.7897, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"grad_norm": 3.9723575115203857, |
|
"learning_rate": 6.770000000000001e-06, |
|
"loss": 1.7977, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"grad_norm": 4.08590030670166, |
|
"learning_rate": 6.760000000000001e-06, |
|
"loss": 1.8906, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"grad_norm": 4.190238952636719, |
|
"learning_rate": 6.750000000000001e-06, |
|
"loss": 1.8767, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"grad_norm": 3.9300475120544434, |
|
"learning_rate": 6.740000000000001e-06, |
|
"loss": 1.8214, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"grad_norm": 3.9917032718658447, |
|
"learning_rate": 6.730000000000001e-06, |
|
"loss": 1.8616, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"grad_norm": 4.450725555419922, |
|
"learning_rate": 6.720000000000001e-06, |
|
"loss": 1.8484, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"grad_norm": 4.116485118865967, |
|
"learning_rate": 6.710000000000001e-06, |
|
"loss": 1.9084, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"grad_norm": 4.216673851013184, |
|
"learning_rate": 6.700000000000001e-06, |
|
"loss": 1.8647, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"grad_norm": 4.016748428344727, |
|
"learning_rate": 6.690000000000001e-06, |
|
"loss": 1.8363, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"grad_norm": 4.239066123962402, |
|
"learning_rate": 6.680000000000001e-06, |
|
"loss": 1.874, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"grad_norm": 3.9612040519714355, |
|
"learning_rate": 6.6700000000000005e-06, |
|
"loss": 1.8357, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"grad_norm": 3.9869627952575684, |
|
"learning_rate": 6.660000000000001e-06, |
|
"loss": 1.844, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"grad_norm": 4.674553871154785, |
|
"learning_rate": 6.650000000000001e-06, |
|
"loss": 1.838, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"grad_norm": 4.254626274108887, |
|
"learning_rate": 6.640000000000001e-06, |
|
"loss": 1.8847, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"grad_norm": 4.037281513214111, |
|
"learning_rate": 6.630000000000001e-06, |
|
"loss": 1.8034, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"grad_norm": 4.079427719116211, |
|
"learning_rate": 6.620000000000001e-06, |
|
"loss": 1.8936, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"grad_norm": 3.9827044010162354, |
|
"learning_rate": 6.610000000000001e-06, |
|
"loss": 1.8376, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"grad_norm": 4.275979518890381, |
|
"learning_rate": 6.600000000000001e-06, |
|
"loss": 1.8784, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"grad_norm": 4.256450176239014, |
|
"learning_rate": 6.5900000000000004e-06, |
|
"loss": 1.896, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"grad_norm": 4.032690525054932, |
|
"learning_rate": 6.5800000000000005e-06, |
|
"loss": 1.8182, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"grad_norm": 3.9834063053131104, |
|
"learning_rate": 6.570000000000001e-06, |
|
"loss": 1.866, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"grad_norm": 3.9587719440460205, |
|
"learning_rate": 6.560000000000001e-06, |
|
"loss": 1.8362, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"grad_norm": 4.204609394073486, |
|
"learning_rate": 6.550000000000001e-06, |
|
"loss": 1.7791, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"grad_norm": 4.014987945556641, |
|
"learning_rate": 6.540000000000001e-06, |
|
"loss": 1.8339, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"grad_norm": 3.9102377891540527, |
|
"learning_rate": 6.530000000000001e-06, |
|
"loss": 1.834, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"grad_norm": 4.002016067504883, |
|
"learning_rate": 6.520000000000001e-06, |
|
"loss": 1.7964, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"grad_norm": 4.090336799621582, |
|
"learning_rate": 6.51e-06, |
|
"loss": 1.8072, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"grad_norm": 3.968061923980713, |
|
"learning_rate": 6.5000000000000004e-06, |
|
"loss": 1.8277, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"grad_norm": 3.973949432373047, |
|
"learning_rate": 6.4900000000000005e-06, |
|
"loss": 1.8506, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"grad_norm": 3.9202258586883545, |
|
"learning_rate": 6.480000000000001e-06, |
|
"loss": 1.8283, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"grad_norm": 4.712566375732422, |
|
"learning_rate": 6.470000000000001e-06, |
|
"loss": 1.7885, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"grad_norm": 3.9483203887939453, |
|
"learning_rate": 6.460000000000001e-06, |
|
"loss": 1.7676, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"grad_norm": 3.901716947555542, |
|
"learning_rate": 6.450000000000001e-06, |
|
"loss": 1.8014, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"grad_norm": 4.3014092445373535, |
|
"learning_rate": 6.440000000000001e-06, |
|
"loss": 1.8463, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"grad_norm": 4.11579704284668, |
|
"learning_rate": 6.43e-06, |
|
"loss": 1.7782, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"grad_norm": 3.999476432800293, |
|
"learning_rate": 6.42e-06, |
|
"loss": 1.8609, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"grad_norm": 4.3478193283081055, |
|
"learning_rate": 6.4100000000000005e-06, |
|
"loss": 1.8069, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"grad_norm": 5.4783148765563965, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 1.8138, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"grad_norm": 4.044590473175049, |
|
"learning_rate": 6.390000000000001e-06, |
|
"loss": 1.779, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"grad_norm": 8.013321876525879, |
|
"learning_rate": 6.380000000000001e-06, |
|
"loss": 1.8059, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"grad_norm": 4.031410217285156, |
|
"learning_rate": 6.370000000000001e-06, |
|
"loss": 1.7424, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"grad_norm": 4.058076858520508, |
|
"learning_rate": 6.360000000000001e-06, |
|
"loss": 1.8143, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"grad_norm": 4.283531188964844, |
|
"learning_rate": 6.35e-06, |
|
"loss": 1.8077, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"grad_norm": 4.0810041427612305, |
|
"learning_rate": 6.34e-06, |
|
"loss": 1.873, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"grad_norm": 3.9741570949554443, |
|
"learning_rate": 6.33e-06, |
|
"loss": 1.7648, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"grad_norm": 3.9928829669952393, |
|
"learning_rate": 6.3200000000000005e-06, |
|
"loss": 1.8732, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"grad_norm": 4.0868353843688965, |
|
"learning_rate": 6.3100000000000006e-06, |
|
"loss": 1.8418, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"grad_norm": 3.9633114337921143, |
|
"learning_rate": 6.300000000000001e-06, |
|
"loss": 1.8612, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"grad_norm": 3.941833972930908, |
|
"learning_rate": 6.290000000000001e-06, |
|
"loss": 1.7949, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"grad_norm": 3.9225800037384033, |
|
"learning_rate": 6.280000000000001e-06, |
|
"loss": 1.8074, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"grad_norm": 4.004086017608643, |
|
"learning_rate": 6.27e-06, |
|
"loss": 1.8615, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"grad_norm": 3.866331100463867, |
|
"learning_rate": 6.26e-06, |
|
"loss": 1.754, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"grad_norm": 4.170721054077148, |
|
"learning_rate": 6.25e-06, |
|
"loss": 1.8606, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"grad_norm": 4.022711753845215, |
|
"learning_rate": 6.24e-06, |
|
"loss": 1.7216, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"grad_norm": 3.9754745960235596, |
|
"learning_rate": 6.2300000000000005e-06, |
|
"loss": 1.8139, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"grad_norm": 3.899186372756958, |
|
"learning_rate": 6.220000000000001e-06, |
|
"loss": 1.7894, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"grad_norm": 4.057887554168701, |
|
"learning_rate": 6.210000000000001e-06, |
|
"loss": 1.8337, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"grad_norm": 3.93603777885437, |
|
"learning_rate": 6.200000000000001e-06, |
|
"loss": 1.766, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"grad_norm": 3.942586898803711, |
|
"learning_rate": 6.190000000000001e-06, |
|
"loss": 1.7758, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"grad_norm": 3.9234979152679443, |
|
"learning_rate": 6.18e-06, |
|
"loss": 1.8028, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"grad_norm": 4.158355712890625, |
|
"learning_rate": 6.17e-06, |
|
"loss": 1.7271, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"grad_norm": 3.9193642139434814, |
|
"learning_rate": 6.16e-06, |
|
"loss": 1.7709, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"grad_norm": 4.086193084716797, |
|
"learning_rate": 6.15e-06, |
|
"loss": 1.8225, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"grad_norm": 4.275905132293701, |
|
"learning_rate": 6.1400000000000005e-06, |
|
"loss": 1.7797, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"grad_norm": 4.014064311981201, |
|
"learning_rate": 6.130000000000001e-06, |
|
"loss": 1.7737, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"grad_norm": 4.341443061828613, |
|
"learning_rate": 6.120000000000001e-06, |
|
"loss": 1.8007, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"grad_norm": 3.9263601303100586, |
|
"learning_rate": 6.110000000000001e-06, |
|
"loss": 1.8191, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"grad_norm": 3.908109188079834, |
|
"learning_rate": 6.1e-06, |
|
"loss": 1.8143, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"grad_norm": 4.180897235870361, |
|
"learning_rate": 6.09e-06, |
|
"loss": 1.6849, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"grad_norm": 4.73223876953125, |
|
"learning_rate": 6.08e-06, |
|
"loss": 1.7972, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"grad_norm": 3.970501184463501, |
|
"learning_rate": 6.07e-06, |
|
"loss": 1.797, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"grad_norm": 4.551540851593018, |
|
"learning_rate": 6.0600000000000004e-06, |
|
"loss": 1.7708, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"grad_norm": 3.9829537868499756, |
|
"learning_rate": 6.0500000000000005e-06, |
|
"loss": 1.7374, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"grad_norm": 3.864532470703125, |
|
"learning_rate": 6.040000000000001e-06, |
|
"loss": 1.7764, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"grad_norm": 4.039094924926758, |
|
"learning_rate": 6.030000000000001e-06, |
|
"loss": 1.7655, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"grad_norm": 4.147581100463867, |
|
"learning_rate": 6.02e-06, |
|
"loss": 1.8172, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"grad_norm": 3.9826345443725586, |
|
"learning_rate": 6.01e-06, |
|
"loss": 1.7737, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 4.246896743774414, |
|
"learning_rate": 6e-06, |
|
"loss": 1.7506, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy_Background": NaN, |
|
"eval_accuracy_Bag": 0.0006182631826872454, |
|
"eval_accuracy_Belt": 0.0, |
|
"eval_accuracy_Dress": 0.0005081570324998075, |
|
"eval_accuracy_Face": 0.3401682547858185, |
|
"eval_accuracy_Hair": 0.8490611301122405, |
|
"eval_accuracy_Hat": 0.0, |
|
"eval_accuracy_Left-arm": 0.005574931692983876, |
|
"eval_accuracy_Left-leg": 0.5808852122580501, |
|
"eval_accuracy_Left-shoe": 0.0, |
|
"eval_accuracy_Pants": 0.6755268815433463, |
|
"eval_accuracy_Right-arm": 0.33479333283504, |
|
"eval_accuracy_Right-leg": 0.5197870333393008, |
|
"eval_accuracy_Right-shoe": 0.01559064402026704, |
|
"eval_accuracy_Scarf": 0.0, |
|
"eval_accuracy_Skirt": 0.11172701197661604, |
|
"eval_accuracy_Sunglasses": 0.0, |
|
"eval_accuracy_Upper-clothes": 0.8918252214314302, |
|
"eval_iou_Background": 0.0, |
|
"eval_iou_Bag": 0.0006176844618618775, |
|
"eval_iou_Belt": 0.0, |
|
"eval_iou_Dress": 0.0005080753183168851, |
|
"eval_iou_Face": 0.328217845564617, |
|
"eval_iou_Hair": 0.5368625738101287, |
|
"eval_iou_Hat": 0.0, |
|
"eval_iou_Left-arm": 0.00554931640625, |
|
"eval_iou_Left-leg": 0.41831202235339837, |
|
"eval_iou_Left-shoe": 0.0, |
|
"eval_iou_Pants": 0.37582063763596807, |
|
"eval_iou_Right-arm": 0.19133540080819472, |
|
"eval_iou_Right-leg": 0.3503233927525219, |
|
"eval_iou_Right-shoe": 0.014809657682746408, |
|
"eval_iou_Scarf": 0.0, |
|
"eval_iou_Skirt": 0.09438058853388519, |
|
"eval_iou_Sunglasses": 0.0, |
|
"eval_iou_Upper-clothes": 0.43714558514733404, |
|
"eval_loss": 1.7308621406555176, |
|
"eval_mean_accuracy": 0.2544744749535459, |
|
"eval_mean_iou": 0.1529934878041791, |
|
"eval_overall_accuracy": 0.4868066795335941, |
|
"eval_runtime": 20.0252, |
|
"eval_samples_per_second": 9.987, |
|
"eval_steps_per_second": 4.994, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"grad_norm": 3.906787872314453, |
|
"learning_rate": 5.99e-06, |
|
"loss": 1.7166, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"grad_norm": 3.9316136837005615, |
|
"learning_rate": 5.98e-06, |
|
"loss": 1.7602, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"grad_norm": 4.142512321472168, |
|
"learning_rate": 5.9700000000000004e-06, |
|
"loss": 1.7018, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"grad_norm": 3.9604501724243164, |
|
"learning_rate": 5.9600000000000005e-06, |
|
"loss": 1.7908, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"grad_norm": 3.9293625354766846, |
|
"learning_rate": 5.950000000000001e-06, |
|
"loss": 1.7457, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"grad_norm": 4.6011881828308105, |
|
"learning_rate": 5.94e-06, |
|
"loss": 1.7951, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"grad_norm": 4.185412406921387, |
|
"learning_rate": 5.93e-06, |
|
"loss": 1.8099, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"grad_norm": 5.215122222900391, |
|
"learning_rate": 5.92e-06, |
|
"loss": 1.7177, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"grad_norm": 4.188220977783203, |
|
"learning_rate": 5.91e-06, |
|
"loss": 1.8003, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"grad_norm": 3.847362995147705, |
|
"learning_rate": 5.9e-06, |
|
"loss": 1.7339, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"grad_norm": 3.900439500808716, |
|
"learning_rate": 5.89e-06, |
|
"loss": 1.7613, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"grad_norm": 4.307349681854248, |
|
"learning_rate": 5.8800000000000005e-06, |
|
"loss": 1.7702, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"grad_norm": 3.981179714202881, |
|
"learning_rate": 5.8700000000000005e-06, |
|
"loss": 1.7913, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"grad_norm": 3.9282853603363037, |
|
"learning_rate": 5.86e-06, |
|
"loss": 1.7618, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"grad_norm": 3.7949161529541016, |
|
"learning_rate": 5.85e-06, |
|
"loss": 1.7126, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"grad_norm": 3.823367118835449, |
|
"learning_rate": 5.84e-06, |
|
"loss": 1.701, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"grad_norm": 4.349286079406738, |
|
"learning_rate": 5.83e-06, |
|
"loss": 1.7988, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"grad_norm": 3.832627296447754, |
|
"learning_rate": 5.82e-06, |
|
"loss": 1.7517, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"grad_norm": 3.9570107460021973, |
|
"learning_rate": 5.81e-06, |
|
"loss": 1.7215, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"grad_norm": 3.8986053466796875, |
|
"learning_rate": 5.8e-06, |
|
"loss": 1.7607, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"grad_norm": 3.9459450244903564, |
|
"learning_rate": 5.7900000000000005e-06, |
|
"loss": 1.7081, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"grad_norm": 4.1990509033203125, |
|
"learning_rate": 5.78e-06, |
|
"loss": 1.7438, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"grad_norm": 9.36162281036377, |
|
"learning_rate": 5.77e-06, |
|
"loss": 1.7954, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"grad_norm": 3.7987680435180664, |
|
"learning_rate": 5.76e-06, |
|
"loss": 1.7231, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"grad_norm": 3.9184491634368896, |
|
"learning_rate": 5.75e-06, |
|
"loss": 1.7387, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"grad_norm": 3.9590370655059814, |
|
"learning_rate": 5.74e-06, |
|
"loss": 1.6951, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"grad_norm": 3.9022176265716553, |
|
"learning_rate": 5.73e-06, |
|
"loss": 1.7064, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"grad_norm": 4.193649768829346, |
|
"learning_rate": 5.72e-06, |
|
"loss": 1.6982, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"grad_norm": 4.376874923706055, |
|
"learning_rate": 5.71e-06, |
|
"loss": 1.7261, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"grad_norm": 4.793328762054443, |
|
"learning_rate": 5.7e-06, |
|
"loss": 1.6951, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"grad_norm": 4.027029514312744, |
|
"learning_rate": 5.69e-06, |
|
"loss": 1.768, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"grad_norm": 3.9366440773010254, |
|
"learning_rate": 5.68e-06, |
|
"loss": 1.731, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"grad_norm": 3.912431478500366, |
|
"learning_rate": 5.67e-06, |
|
"loss": 1.7468, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"grad_norm": 3.943423271179199, |
|
"learning_rate": 5.66e-06, |
|
"loss": 1.7997, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"grad_norm": 3.9392988681793213, |
|
"learning_rate": 5.65e-06, |
|
"loss": 1.7279, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"grad_norm": 3.8220345973968506, |
|
"learning_rate": 5.64e-06, |
|
"loss": 1.6975, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"grad_norm": 5.20045804977417, |
|
"learning_rate": 5.63e-06, |
|
"loss": 1.7261, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"grad_norm": 3.8737871646881104, |
|
"learning_rate": 5.620000000000001e-06, |
|
"loss": 1.7512, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"grad_norm": 3.9536356925964355, |
|
"learning_rate": 5.610000000000001e-06, |
|
"loss": 1.7412, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"grad_norm": 3.815074920654297, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 1.7231, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"grad_norm": 4.8853864669799805, |
|
"learning_rate": 5.590000000000001e-06, |
|
"loss": 1.7886, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"grad_norm": 3.9920268058776855, |
|
"learning_rate": 5.580000000000001e-06, |
|
"loss": 1.7154, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"grad_norm": 4.06306266784668, |
|
"learning_rate": 5.570000000000001e-06, |
|
"loss": 1.7192, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"grad_norm": 3.9798781871795654, |
|
"learning_rate": 5.560000000000001e-06, |
|
"loss": 1.7948, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"grad_norm": 4.121651649475098, |
|
"learning_rate": 5.550000000000001e-06, |
|
"loss": 1.7477, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"grad_norm": 3.9828436374664307, |
|
"learning_rate": 5.540000000000001e-06, |
|
"loss": 1.6689, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"grad_norm": 3.8925745487213135, |
|
"learning_rate": 5.530000000000001e-06, |
|
"loss": 1.6934, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"grad_norm": 4.046696662902832, |
|
"learning_rate": 5.5200000000000005e-06, |
|
"loss": 1.7039, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"grad_norm": 3.9007444381713867, |
|
"learning_rate": 5.510000000000001e-06, |
|
"loss": 1.6938, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"grad_norm": 4.0163655281066895, |
|
"learning_rate": 5.500000000000001e-06, |
|
"loss": 1.7053, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"grad_norm": 3.9669549465179443, |
|
"learning_rate": 5.490000000000001e-06, |
|
"loss": 1.764, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"grad_norm": 3.8638901710510254, |
|
"learning_rate": 5.480000000000001e-06, |
|
"loss": 1.6782, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"grad_norm": 4.246222019195557, |
|
"learning_rate": 5.470000000000001e-06, |
|
"loss": 1.7227, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"grad_norm": 3.9141976833343506, |
|
"learning_rate": 5.460000000000001e-06, |
|
"loss": 1.7518, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"grad_norm": 3.987290143966675, |
|
"learning_rate": 5.450000000000001e-06, |
|
"loss": 1.7515, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"grad_norm": 3.853123426437378, |
|
"learning_rate": 5.4400000000000004e-06, |
|
"loss": 1.6767, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"grad_norm": 3.992387294769287, |
|
"learning_rate": 5.4300000000000005e-06, |
|
"loss": 1.6721, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"grad_norm": 3.875953197479248, |
|
"learning_rate": 5.420000000000001e-06, |
|
"loss": 1.6866, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"grad_norm": 3.852982521057129, |
|
"learning_rate": 5.410000000000001e-06, |
|
"loss": 1.6505, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"grad_norm": 3.951180934906006, |
|
"learning_rate": 5.400000000000001e-06, |
|
"loss": 1.671, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"grad_norm": 3.888502836227417, |
|
"learning_rate": 5.390000000000001e-06, |
|
"loss": 1.6969, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"grad_norm": 3.87117600440979, |
|
"learning_rate": 5.380000000000001e-06, |
|
"loss": 1.649, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"grad_norm": 4.048139572143555, |
|
"learning_rate": 5.370000000000001e-06, |
|
"loss": 1.7443, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"grad_norm": 3.8636538982391357, |
|
"learning_rate": 5.36e-06, |
|
"loss": 1.653, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"grad_norm": 4.008995056152344, |
|
"learning_rate": 5.3500000000000004e-06, |
|
"loss": 1.7473, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"grad_norm": 3.8537802696228027, |
|
"learning_rate": 5.3400000000000005e-06, |
|
"loss": 1.7315, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"grad_norm": 3.8246848583221436, |
|
"learning_rate": 5.330000000000001e-06, |
|
"loss": 1.6694, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"grad_norm": 4.352231502532959, |
|
"learning_rate": 5.320000000000001e-06, |
|
"loss": 1.6741, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"grad_norm": 5.090497970581055, |
|
"learning_rate": 5.310000000000001e-06, |
|
"loss": 1.6921, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"grad_norm": 3.982093095779419, |
|
"learning_rate": 5.300000000000001e-06, |
|
"loss": 1.6371, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"grad_norm": 3.979620933532715, |
|
"learning_rate": 5.290000000000001e-06, |
|
"loss": 1.6924, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"grad_norm": 4.002071380615234, |
|
"learning_rate": 5.28e-06, |
|
"loss": 1.6912, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"grad_norm": 3.9546217918395996, |
|
"learning_rate": 5.27e-06, |
|
"loss": 1.6869, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"grad_norm": 3.870701551437378, |
|
"learning_rate": 5.2600000000000005e-06, |
|
"loss": 1.7529, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"grad_norm": 3.8638765811920166, |
|
"learning_rate": 5.2500000000000006e-06, |
|
"loss": 1.7197, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"grad_norm": 3.89359450340271, |
|
"learning_rate": 5.240000000000001e-06, |
|
"loss": 1.6631, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"grad_norm": 3.901143789291382, |
|
"learning_rate": 5.230000000000001e-06, |
|
"loss": 1.757, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"grad_norm": 4.2106757164001465, |
|
"learning_rate": 5.220000000000001e-06, |
|
"loss": 1.6399, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"grad_norm": 4.360852241516113, |
|
"learning_rate": 5.210000000000001e-06, |
|
"loss": 1.6512, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"grad_norm": 4.084482669830322, |
|
"learning_rate": 5.2e-06, |
|
"loss": 1.695, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"grad_norm": 3.9859793186187744, |
|
"learning_rate": 5.19e-06, |
|
"loss": 1.7407, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"grad_norm": 3.7998862266540527, |
|
"learning_rate": 5.18e-06, |
|
"loss": 1.696, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"grad_norm": 3.849905252456665, |
|
"learning_rate": 5.1700000000000005e-06, |
|
"loss": 1.6359, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"grad_norm": 3.900766611099243, |
|
"learning_rate": 5.1600000000000006e-06, |
|
"loss": 1.6532, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"grad_norm": 3.866856336593628, |
|
"learning_rate": 5.150000000000001e-06, |
|
"loss": 1.6706, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"grad_norm": 3.8643901348114014, |
|
"learning_rate": 5.140000000000001e-06, |
|
"loss": 1.7067, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"grad_norm": 3.936910390853882, |
|
"learning_rate": 5.130000000000001e-06, |
|
"loss": 1.6667, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"grad_norm": 3.799140691757202, |
|
"learning_rate": 5.12e-06, |
|
"loss": 1.6669, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"grad_norm": 4.125818252563477, |
|
"learning_rate": 5.11e-06, |
|
"loss": 1.7095, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"grad_norm": 3.8781321048736572, |
|
"learning_rate": 5.1e-06, |
|
"loss": 1.7054, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"grad_norm": 3.777982473373413, |
|
"learning_rate": 5.09e-06, |
|
"loss": 1.6945, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"grad_norm": 3.943347930908203, |
|
"learning_rate": 5.0800000000000005e-06, |
|
"loss": 1.7176, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"grad_norm": 4.336722373962402, |
|
"learning_rate": 5.070000000000001e-06, |
|
"loss": 1.6816, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"grad_norm": 3.8266570568084717, |
|
"learning_rate": 5.060000000000001e-06, |
|
"loss": 1.6898, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"grad_norm": 3.9701790809631348, |
|
"learning_rate": 5.050000000000001e-06, |
|
"loss": 1.693, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"grad_norm": 3.8597071170806885, |
|
"learning_rate": 5.04e-06, |
|
"loss": 1.6664, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"grad_norm": 4.002712249755859, |
|
"learning_rate": 5.03e-06, |
|
"loss": 1.6646, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"grad_norm": 3.9471821784973145, |
|
"learning_rate": 5.02e-06, |
|
"loss": 1.7108, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"grad_norm": 3.9103269577026367, |
|
"learning_rate": 5.01e-06, |
|
"loss": 1.6578, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 4.6496968269348145, |
|
"learning_rate": 5e-06, |
|
"loss": 1.6655, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_accuracy_Background": NaN, |
|
"eval_accuracy_Bag": 0.0005593809748122697, |
|
"eval_accuracy_Belt": 0.0, |
|
"eval_accuracy_Dress": 0.00039437776428015366, |
|
"eval_accuracy_Face": 0.526036569662632, |
|
"eval_accuracy_Hair": 0.841159978804374, |
|
"eval_accuracy_Hat": 0.0, |
|
"eval_accuracy_Left-arm": 0.014227971293884499, |
|
"eval_accuracy_Left-leg": 0.650094989781532, |
|
"eval_accuracy_Left-shoe": 0.0, |
|
"eval_accuracy_Pants": 0.6487939005051119, |
|
"eval_accuracy_Right-arm": 0.33363230934050875, |
|
"eval_accuracy_Right-leg": 0.5098960987096686, |
|
"eval_accuracy_Right-shoe": 0.0097112902711832, |
|
"eval_accuracy_Scarf": 0.0, |
|
"eval_accuracy_Skirt": 0.19349545433535012, |
|
"eval_accuracy_Sunglasses": 0.0, |
|
"eval_accuracy_Upper-clothes": 0.9096570205509249, |
|
"eval_iou_Background": 0.0, |
|
"eval_iou_Bag": 0.000558735562394156, |
|
"eval_iou_Belt": 0.0, |
|
"eval_iou_Dress": 0.00039420914462601436, |
|
"eval_iou_Face": 0.48030361697358087, |
|
"eval_iou_Hair": 0.5798027129296479, |
|
"eval_iou_Hat": 0.0, |
|
"eval_iou_Left-arm": 0.014113119045533725, |
|
"eval_iou_Left-leg": 0.43610983103359907, |
|
"eval_iou_Left-shoe": 0.0, |
|
"eval_iou_Pants": 0.40184011533021435, |
|
"eval_iou_Right-arm": 0.18888736862446945, |
|
"eval_iou_Right-leg": 0.3519069840379781, |
|
"eval_iou_Right-shoe": 0.009437869937016348, |
|
"eval_iou_Scarf": 0.0, |
|
"eval_iou_Skirt": 0.15035155722032084, |
|
"eval_iou_Sunglasses": 0.0, |
|
"eval_iou_Upper-clothes": 0.4436276211239529, |
|
"eval_loss": 1.65687894821167, |
|
"eval_mean_accuracy": 0.2728034907055448, |
|
"eval_mean_iou": 0.16985187449796296, |
|
"eval_overall_accuracy": 0.5058896326306082, |
|
"eval_runtime": 19.9046, |
|
"eval_samples_per_second": 10.048, |
|
"eval_steps_per_second": 5.024, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"grad_norm": 3.804652214050293, |
|
"learning_rate": 4.9900000000000005e-06, |
|
"loss": 1.6349, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"grad_norm": 4.142456531524658, |
|
"learning_rate": 4.980000000000001e-06, |
|
"loss": 1.6559, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"grad_norm": 3.9484057426452637, |
|
"learning_rate": 4.970000000000001e-06, |
|
"loss": 1.7418, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"grad_norm": 3.9426329135894775, |
|
"learning_rate": 4.960000000000001e-06, |
|
"loss": 1.6736, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"grad_norm": 3.8289520740509033, |
|
"learning_rate": 4.95e-06, |
|
"loss": 1.7213, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"grad_norm": 3.924334764480591, |
|
"learning_rate": 4.94e-06, |
|
"loss": 1.6024, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"grad_norm": 4.055715084075928, |
|
"learning_rate": 4.93e-06, |
|
"loss": 1.6369, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"grad_norm": 3.9286556243896484, |
|
"learning_rate": 4.92e-06, |
|
"loss": 1.6621, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"grad_norm": 3.855855703353882, |
|
"learning_rate": 4.9100000000000004e-06, |
|
"loss": 1.6163, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"grad_norm": 3.9543490409851074, |
|
"learning_rate": 4.9000000000000005e-06, |
|
"loss": 1.6825, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"grad_norm": 3.8890864849090576, |
|
"learning_rate": 4.890000000000001e-06, |
|
"loss": 1.656, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"grad_norm": 3.9430246353149414, |
|
"learning_rate": 4.880000000000001e-06, |
|
"loss": 1.7152, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"grad_norm": 3.8309576511383057, |
|
"learning_rate": 4.87e-06, |
|
"loss": 1.64, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"grad_norm": 4.385008811950684, |
|
"learning_rate": 4.86e-06, |
|
"loss": 1.6569, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"grad_norm": 3.956075429916382, |
|
"learning_rate": 4.85e-06, |
|
"loss": 1.6464, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"grad_norm": 4.677767276763916, |
|
"learning_rate": 4.84e-06, |
|
"loss": 1.5964, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"grad_norm": 3.972614049911499, |
|
"learning_rate": 4.83e-06, |
|
"loss": 1.6642, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"grad_norm": 3.726994037628174, |
|
"learning_rate": 4.8200000000000004e-06, |
|
"loss": 1.6123, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"grad_norm": 4.8021697998046875, |
|
"learning_rate": 4.8100000000000005e-06, |
|
"loss": 1.7095, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"grad_norm": 3.8248772621154785, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 1.6826, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"grad_norm": 3.8613359928131104, |
|
"learning_rate": 4.79e-06, |
|
"loss": 1.6423, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"grad_norm": 4.146315097808838, |
|
"learning_rate": 4.78e-06, |
|
"loss": 1.6788, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"grad_norm": 3.8883233070373535, |
|
"learning_rate": 4.77e-06, |
|
"loss": 1.7306, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"grad_norm": 4.360881328582764, |
|
"learning_rate": 4.76e-06, |
|
"loss": 1.6162, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"grad_norm": 3.7286407947540283, |
|
"learning_rate": 4.75e-06, |
|
"loss": 1.6046, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"grad_norm": 3.8278820514678955, |
|
"learning_rate": 4.74e-06, |
|
"loss": 1.6989, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"grad_norm": 3.7664759159088135, |
|
"learning_rate": 4.7300000000000005e-06, |
|
"loss": 1.6151, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"grad_norm": 11.46270751953125, |
|
"learning_rate": 4.7200000000000005e-06, |
|
"loss": 1.6346, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"grad_norm": 4.157445430755615, |
|
"learning_rate": 4.71e-06, |
|
"loss": 1.6747, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"grad_norm": 3.6979782581329346, |
|
"learning_rate": 4.7e-06, |
|
"loss": 1.6155, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"grad_norm": 4.047948837280273, |
|
"learning_rate": 4.69e-06, |
|
"loss": 1.6717, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"grad_norm": 3.965113639831543, |
|
"learning_rate": 4.680000000000001e-06, |
|
"loss": 1.6782, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"grad_norm": 3.9399850368499756, |
|
"learning_rate": 4.670000000000001e-06, |
|
"loss": 1.6168, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"grad_norm": 5.138143062591553, |
|
"learning_rate": 4.66e-06, |
|
"loss": 1.721, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"grad_norm": 3.855048656463623, |
|
"learning_rate": 4.65e-06, |
|
"loss": 1.6508, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"grad_norm": 3.7510130405426025, |
|
"learning_rate": 4.6400000000000005e-06, |
|
"loss": 1.6291, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 5.37, |
|
"grad_norm": 3.8167595863342285, |
|
"learning_rate": 4.6300000000000006e-06, |
|
"loss": 1.6111, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"grad_norm": 3.90309739112854, |
|
"learning_rate": 4.620000000000001e-06, |
|
"loss": 1.6145, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"grad_norm": 3.7262179851531982, |
|
"learning_rate": 4.610000000000001e-06, |
|
"loss": 1.6275, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"grad_norm": 3.779728651046753, |
|
"learning_rate": 4.600000000000001e-06, |
|
"loss": 1.6267, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"grad_norm": 3.8452236652374268, |
|
"learning_rate": 4.590000000000001e-06, |
|
"loss": 1.6215, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 5.42, |
|
"grad_norm": 4.098355770111084, |
|
"learning_rate": 4.58e-06, |
|
"loss": 1.6625, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"grad_norm": 3.868823289871216, |
|
"learning_rate": 4.57e-06, |
|
"loss": 1.6565, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"grad_norm": 3.8368430137634277, |
|
"learning_rate": 4.56e-06, |
|
"loss": 1.7015, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"grad_norm": 3.9534285068511963, |
|
"learning_rate": 4.5500000000000005e-06, |
|
"loss": 1.6231, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"grad_norm": 4.724597930908203, |
|
"learning_rate": 4.540000000000001e-06, |
|
"loss": 1.6184, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"grad_norm": 3.994943141937256, |
|
"learning_rate": 4.530000000000001e-06, |
|
"loss": 1.6957, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"grad_norm": 4.073677062988281, |
|
"learning_rate": 4.520000000000001e-06, |
|
"loss": 1.6397, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"grad_norm": 3.8306500911712646, |
|
"learning_rate": 4.510000000000001e-06, |
|
"loss": 1.6241, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"grad_norm": 4.004467487335205, |
|
"learning_rate": 4.5e-06, |
|
"loss": 1.6076, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"grad_norm": 3.7737276554107666, |
|
"learning_rate": 4.49e-06, |
|
"loss": 1.6359, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"grad_norm": 3.8704159259796143, |
|
"learning_rate": 4.48e-06, |
|
"loss": 1.6447, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"grad_norm": 3.7226834297180176, |
|
"learning_rate": 4.47e-06, |
|
"loss": 1.5993, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 5.54, |
|
"grad_norm": 3.87687611579895, |
|
"learning_rate": 4.4600000000000005e-06, |
|
"loss": 1.628, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"grad_norm": 7.174743175506592, |
|
"learning_rate": 4.450000000000001e-06, |
|
"loss": 1.5938, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"grad_norm": 4.229218006134033, |
|
"learning_rate": 4.440000000000001e-06, |
|
"loss": 1.6151, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"grad_norm": 3.7360026836395264, |
|
"learning_rate": 4.430000000000001e-06, |
|
"loss": 1.6209, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"grad_norm": 3.7990097999572754, |
|
"learning_rate": 4.42e-06, |
|
"loss": 1.6359, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"grad_norm": 3.7567763328552246, |
|
"learning_rate": 4.41e-06, |
|
"loss": 1.6551, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"grad_norm": 3.8619561195373535, |
|
"learning_rate": 4.4e-06, |
|
"loss": 1.6461, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"grad_norm": 3.8153328895568848, |
|
"learning_rate": 4.39e-06, |
|
"loss": 1.566, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"grad_norm": 3.813605785369873, |
|
"learning_rate": 4.38e-06, |
|
"loss": 1.5936, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"grad_norm": 4.274602890014648, |
|
"learning_rate": 4.3700000000000005e-06, |
|
"loss": 1.6444, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"grad_norm": 4.587132930755615, |
|
"learning_rate": 4.360000000000001e-06, |
|
"loss": 1.6594, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"grad_norm": 3.864215135574341, |
|
"learning_rate": 4.350000000000001e-06, |
|
"loss": 1.6513, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"grad_norm": 3.656938314437866, |
|
"learning_rate": 4.34e-06, |
|
"loss": 1.5682, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"grad_norm": 3.8212952613830566, |
|
"learning_rate": 4.33e-06, |
|
"loss": 1.6297, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"grad_norm": 3.8825900554656982, |
|
"learning_rate": 4.32e-06, |
|
"loss": 1.6938, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"grad_norm": 3.691157817840576, |
|
"learning_rate": 4.31e-06, |
|
"loss": 1.5491, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"grad_norm": 3.7921180725097656, |
|
"learning_rate": 4.3e-06, |
|
"loss": 1.5975, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"grad_norm": 3.862549304962158, |
|
"learning_rate": 4.2900000000000004e-06, |
|
"loss": 1.6195, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"grad_norm": 3.726428508758545, |
|
"learning_rate": 4.2800000000000005e-06, |
|
"loss": 1.6305, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"grad_norm": 3.906430959701538, |
|
"learning_rate": 4.270000000000001e-06, |
|
"loss": 1.6224, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"grad_norm": 3.78315806388855, |
|
"learning_rate": 4.26e-06, |
|
"loss": 1.6003, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"grad_norm": 3.9849722385406494, |
|
"learning_rate": 4.25e-06, |
|
"loss": 1.6364, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"grad_norm": 4.186992168426514, |
|
"learning_rate": 4.24e-06, |
|
"loss": 1.577, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"grad_norm": 3.8204498291015625, |
|
"learning_rate": 4.23e-06, |
|
"loss": 1.5832, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"grad_norm": 3.7085537910461426, |
|
"learning_rate": 4.22e-06, |
|
"loss": 1.5836, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"grad_norm": 3.88561749458313, |
|
"learning_rate": 4.21e-06, |
|
"loss": 1.5925, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"grad_norm": 3.90736985206604, |
|
"learning_rate": 4.2000000000000004e-06, |
|
"loss": 1.6804, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"grad_norm": 3.7693142890930176, |
|
"learning_rate": 4.1900000000000005e-06, |
|
"loss": 1.6092, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"grad_norm": 3.933356761932373, |
|
"learning_rate": 4.18e-06, |
|
"loss": 1.6444, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"grad_norm": 3.68426775932312, |
|
"learning_rate": 4.17e-06, |
|
"loss": 1.5705, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"grad_norm": 3.745680809020996, |
|
"learning_rate": 4.16e-06, |
|
"loss": 1.6241, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"grad_norm": 3.7543203830718994, |
|
"learning_rate": 4.15e-06, |
|
"loss": 1.6066, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 5.86, |
|
"grad_norm": 3.801584005355835, |
|
"learning_rate": 4.14e-06, |
|
"loss": 1.5934, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"grad_norm": 4.025122165679932, |
|
"learning_rate": 4.13e-06, |
|
"loss": 1.6515, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"grad_norm": 3.7570040225982666, |
|
"learning_rate": 4.12e-06, |
|
"loss": 1.6435, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"grad_norm": 3.7467195987701416, |
|
"learning_rate": 4.1100000000000005e-06, |
|
"loss": 1.6262, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"grad_norm": 3.7339022159576416, |
|
"learning_rate": 4.1e-06, |
|
"loss": 1.549, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"grad_norm": 4.661428928375244, |
|
"learning_rate": 4.09e-06, |
|
"loss": 1.6164, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"grad_norm": 3.7260732650756836, |
|
"learning_rate": 4.08e-06, |
|
"loss": 1.5802, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"grad_norm": 3.751892566680908, |
|
"learning_rate": 4.07e-06, |
|
"loss": 1.6012, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"grad_norm": 3.8394174575805664, |
|
"learning_rate": 4.060000000000001e-06, |
|
"loss": 1.5969, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"grad_norm": 3.7985377311706543, |
|
"learning_rate": 4.05e-06, |
|
"loss": 1.6176, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"grad_norm": 3.592172384262085, |
|
"learning_rate": 4.04e-06, |
|
"loss": 1.5839, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"grad_norm": 3.72371768951416, |
|
"learning_rate": 4.03e-06, |
|
"loss": 1.631, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"grad_norm": 3.73107647895813, |
|
"learning_rate": 4.0200000000000005e-06, |
|
"loss": 1.5953, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"grad_norm": 4.2740254402160645, |
|
"learning_rate": 4.0100000000000006e-06, |
|
"loss": 1.5528, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"grad_norm": 3.7860562801361084, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.5728, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_accuracy_Background": NaN, |
|
"eval_accuracy_Bag": 0.0007013910055695641, |
|
"eval_accuracy_Belt": 0.0, |
|
"eval_accuracy_Dress": 0.0010180250314390084, |
|
"eval_accuracy_Face": 0.6261412996823762, |
|
"eval_accuracy_Hair": 0.834858133821475, |
|
"eval_accuracy_Hat": 0.0, |
|
"eval_accuracy_Left-arm": 0.024570904399609534, |
|
"eval_accuracy_Left-leg": 0.7271917457391645, |
|
"eval_accuracy_Left-shoe": 0.0, |
|
"eval_accuracy_Pants": 0.6714103675218778, |
|
"eval_accuracy_Right-arm": 0.3287266113561053, |
|
"eval_accuracy_Right-leg": 0.4377418510817026, |
|
"eval_accuracy_Right-shoe": 0.010109620470985629, |
|
"eval_accuracy_Scarf": 0.0, |
|
"eval_accuracy_Skirt": 0.26314617894129927, |
|
"eval_accuracy_Sunglasses": 0.0, |
|
"eval_accuracy_Upper-clothes": 0.903813318948266, |
|
"eval_iou_Background": 0.0, |
|
"eval_iou_Bag": 0.0007005381170573248, |
|
"eval_iou_Belt": 0.0, |
|
"eval_iou_Dress": 0.0010174470799268805, |
|
"eval_iou_Face": 0.543490928057822, |
|
"eval_iou_Hair": 0.6015820199009051, |
|
"eval_iou_Hat": 0.0, |
|
"eval_iou_Left-arm": 0.024346972304043084, |
|
"eval_iou_Left-leg": 0.4255940326041186, |
|
"eval_iou_Left-shoe": 0.0, |
|
"eval_iou_Pants": 0.42178875709341845, |
|
"eval_iou_Right-arm": 0.19686193061691593, |
|
"eval_iou_Right-leg": 0.3129657411429957, |
|
"eval_iou_Right-shoe": 0.00991115884018354, |
|
"eval_iou_Scarf": 0.0, |
|
"eval_iou_Skirt": 0.19120046383123895, |
|
"eval_iou_Sunglasses": 0.0, |
|
"eval_iou_Upper-clothes": 0.4563441571415292, |
|
"eval_loss": 1.5538502931594849, |
|
"eval_mean_accuracy": 0.28408408517646294, |
|
"eval_mean_iou": 0.17698911926278638, |
|
"eval_overall_accuracy": 0.5175217267677371, |
|
"eval_runtime": 19.3428, |
|
"eval_samples_per_second": 10.34, |
|
"eval_steps_per_second": 5.17, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"grad_norm": 3.988349676132202, |
|
"learning_rate": 3.990000000000001e-06, |
|
"loss": 1.553, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 6.02, |
|
"grad_norm": 3.8314743041992188, |
|
"learning_rate": 3.980000000000001e-06, |
|
"loss": 1.609, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"grad_norm": 3.7854995727539062, |
|
"learning_rate": 3.97e-06, |
|
"loss": 1.578, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 6.04, |
|
"grad_norm": 3.587686777114868, |
|
"learning_rate": 3.96e-06, |
|
"loss": 1.5419, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"grad_norm": 3.9213783740997314, |
|
"learning_rate": 3.95e-06, |
|
"loss": 1.6277, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"grad_norm": 3.775315284729004, |
|
"learning_rate": 3.94e-06, |
|
"loss": 1.6061, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 6.07, |
|
"grad_norm": 3.6883187294006348, |
|
"learning_rate": 3.9300000000000005e-06, |
|
"loss": 1.5696, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 6.08, |
|
"grad_norm": 4.554684162139893, |
|
"learning_rate": 3.920000000000001e-06, |
|
"loss": 1.6241, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"grad_norm": 3.797959566116333, |
|
"learning_rate": 3.910000000000001e-06, |
|
"loss": 1.6631, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"grad_norm": 3.717813491821289, |
|
"learning_rate": 3.900000000000001e-06, |
|
"loss": 1.5584, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 6.11, |
|
"grad_norm": 3.7369353771209717, |
|
"learning_rate": 3.89e-06, |
|
"loss": 1.6131, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"grad_norm": 3.7472145557403564, |
|
"learning_rate": 3.88e-06, |
|
"loss": 1.6325, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 6.13, |
|
"grad_norm": 4.524040699005127, |
|
"learning_rate": 3.87e-06, |
|
"loss": 1.5712, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"grad_norm": 3.7258477210998535, |
|
"learning_rate": 3.86e-06, |
|
"loss": 1.5946, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"grad_norm": 3.7628583908081055, |
|
"learning_rate": 3.85e-06, |
|
"loss": 1.5788, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"grad_norm": 3.590494155883789, |
|
"learning_rate": 3.8400000000000005e-06, |
|
"loss": 1.5368, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 6.17, |
|
"grad_norm": 3.6877408027648926, |
|
"learning_rate": 3.830000000000001e-06, |
|
"loss": 1.6084, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 6.18, |
|
"grad_norm": 3.8319718837738037, |
|
"learning_rate": 3.820000000000001e-06, |
|
"loss": 1.5497, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"grad_norm": 3.6777663230895996, |
|
"learning_rate": 3.8100000000000004e-06, |
|
"loss": 1.6109, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 6.2, |
|
"grad_norm": 3.733649492263794, |
|
"learning_rate": 3.8000000000000005e-06, |
|
"loss": 1.572, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 6.21, |
|
"grad_norm": 3.7246081829071045, |
|
"learning_rate": 3.79e-06, |
|
"loss": 1.5634, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"grad_norm": 3.7477712631225586, |
|
"learning_rate": 3.7800000000000002e-06, |
|
"loss": 1.6007, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"grad_norm": 3.7148218154907227, |
|
"learning_rate": 3.7700000000000003e-06, |
|
"loss": 1.5698, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"grad_norm": 3.8456928730010986, |
|
"learning_rate": 3.7600000000000004e-06, |
|
"loss": 1.6518, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"grad_norm": 4.332857608795166, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 1.6462, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"grad_norm": 3.7697911262512207, |
|
"learning_rate": 3.74e-06, |
|
"loss": 1.5998, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 6.27, |
|
"grad_norm": 3.7395827770233154, |
|
"learning_rate": 3.7300000000000003e-06, |
|
"loss": 1.5759, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 6.28, |
|
"grad_norm": 4.004927158355713, |
|
"learning_rate": 3.7200000000000004e-06, |
|
"loss": 1.6583, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"grad_norm": 3.816969633102417, |
|
"learning_rate": 3.7100000000000005e-06, |
|
"loss": 1.5696, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"grad_norm": 3.666729211807251, |
|
"learning_rate": 3.7e-06, |
|
"loss": 1.5726, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 6.31, |
|
"grad_norm": 4.094972610473633, |
|
"learning_rate": 3.6900000000000002e-06, |
|
"loss": 1.6314, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"grad_norm": 3.57910418510437, |
|
"learning_rate": 3.6800000000000003e-06, |
|
"loss": 1.5194, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"grad_norm": 3.8278920650482178, |
|
"learning_rate": 3.6700000000000004e-06, |
|
"loss": 1.6331, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"grad_norm": 3.8611884117126465, |
|
"learning_rate": 3.66e-06, |
|
"loss": 1.6281, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 6.35, |
|
"grad_norm": 3.753445863723755, |
|
"learning_rate": 3.65e-06, |
|
"loss": 1.5807, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"grad_norm": 3.8415653705596924, |
|
"learning_rate": 3.6400000000000003e-06, |
|
"loss": 1.5694, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 6.37, |
|
"grad_norm": 3.6803395748138428, |
|
"learning_rate": 3.6300000000000004e-06, |
|
"loss": 1.5912, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"grad_norm": 5.002444744110107, |
|
"learning_rate": 3.62e-06, |
|
"loss": 1.5818, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"grad_norm": 3.776406764984131, |
|
"learning_rate": 3.61e-06, |
|
"loss": 1.6067, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"grad_norm": 3.6537561416625977, |
|
"learning_rate": 3.6000000000000003e-06, |
|
"loss": 1.5453, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 6.41, |
|
"grad_norm": 3.739206314086914, |
|
"learning_rate": 3.5900000000000004e-06, |
|
"loss": 1.5898, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"grad_norm": 3.748415946960449, |
|
"learning_rate": 3.58e-06, |
|
"loss": 1.6067, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 6.43, |
|
"grad_norm": 3.616525173187256, |
|
"learning_rate": 3.57e-06, |
|
"loss": 1.4957, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"grad_norm": 4.066712379455566, |
|
"learning_rate": 3.5600000000000002e-06, |
|
"loss": 1.6655, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 6.45, |
|
"grad_norm": 3.7741849422454834, |
|
"learning_rate": 3.5500000000000003e-06, |
|
"loss": 1.5733, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"grad_norm": 3.923842430114746, |
|
"learning_rate": 3.54e-06, |
|
"loss": 1.5473, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"grad_norm": 4.060410976409912, |
|
"learning_rate": 3.53e-06, |
|
"loss": 1.6088, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"grad_norm": 3.7101454734802246, |
|
"learning_rate": 3.52e-06, |
|
"loss": 1.5492, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"grad_norm": 3.6631035804748535, |
|
"learning_rate": 3.5100000000000003e-06, |
|
"loss": 1.5445, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"grad_norm": 3.797605276107788, |
|
"learning_rate": 3.5e-06, |
|
"loss": 1.5246, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 6.51, |
|
"grad_norm": 3.5534019470214844, |
|
"learning_rate": 3.49e-06, |
|
"loss": 1.5443, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"grad_norm": 3.7080326080322266, |
|
"learning_rate": 3.48e-06, |
|
"loss": 1.5701, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 6.53, |
|
"grad_norm": 4.018558025360107, |
|
"learning_rate": 3.4700000000000002e-06, |
|
"loss": 1.6453, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 6.54, |
|
"grad_norm": 4.576212406158447, |
|
"learning_rate": 3.46e-06, |
|
"loss": 1.5754, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 6.55, |
|
"grad_norm": 3.820403814315796, |
|
"learning_rate": 3.45e-06, |
|
"loss": 1.5593, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"grad_norm": 4.293280124664307, |
|
"learning_rate": 3.44e-06, |
|
"loss": 1.6198, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 6.57, |
|
"grad_norm": 3.841649293899536, |
|
"learning_rate": 3.4300000000000006e-06, |
|
"loss": 1.5033, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"grad_norm": 3.6984217166900635, |
|
"learning_rate": 3.4200000000000007e-06, |
|
"loss": 1.5733, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"grad_norm": 3.884720802307129, |
|
"learning_rate": 3.4100000000000004e-06, |
|
"loss": 1.5565, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"grad_norm": 3.9936680793762207, |
|
"learning_rate": 3.4000000000000005e-06, |
|
"loss": 1.5067, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"grad_norm": 3.644507884979248, |
|
"learning_rate": 3.3900000000000006e-06, |
|
"loss": 1.5657, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"grad_norm": 3.6582627296447754, |
|
"learning_rate": 3.3800000000000007e-06, |
|
"loss": 1.5496, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"grad_norm": 3.7611379623413086, |
|
"learning_rate": 3.3700000000000003e-06, |
|
"loss": 1.609, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"grad_norm": 3.7463736534118652, |
|
"learning_rate": 3.3600000000000004e-06, |
|
"loss": 1.5572, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 6.65, |
|
"grad_norm": 3.672816038131714, |
|
"learning_rate": 3.3500000000000005e-06, |
|
"loss": 1.509, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"grad_norm": 3.750598430633545, |
|
"learning_rate": 3.3400000000000006e-06, |
|
"loss": 1.5722, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"grad_norm": 3.681091070175171, |
|
"learning_rate": 3.3300000000000003e-06, |
|
"loss": 1.5474, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 6.68, |
|
"grad_norm": 3.844376564025879, |
|
"learning_rate": 3.3200000000000004e-06, |
|
"loss": 1.5528, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"grad_norm": 3.9930875301361084, |
|
"learning_rate": 3.3100000000000005e-06, |
|
"loss": 1.5911, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"grad_norm": 3.7618513107299805, |
|
"learning_rate": 3.3000000000000006e-06, |
|
"loss": 1.6154, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 6.71, |
|
"grad_norm": 4.1283087730407715, |
|
"learning_rate": 3.2900000000000003e-06, |
|
"loss": 1.6, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"grad_norm": 3.6537392139434814, |
|
"learning_rate": 3.2800000000000004e-06, |
|
"loss": 1.5778, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"grad_norm": 3.699431896209717, |
|
"learning_rate": 3.2700000000000005e-06, |
|
"loss": 1.5573, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 6.74, |
|
"grad_norm": 3.5754897594451904, |
|
"learning_rate": 3.2600000000000006e-06, |
|
"loss": 1.523, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"grad_norm": 4.889479637145996, |
|
"learning_rate": 3.2500000000000002e-06, |
|
"loss": 1.5217, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"grad_norm": 3.9556918144226074, |
|
"learning_rate": 3.2400000000000003e-06, |
|
"loss": 1.5648, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"grad_norm": 3.928116798400879, |
|
"learning_rate": 3.2300000000000004e-06, |
|
"loss": 1.5848, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 6.78, |
|
"grad_norm": 7.319475173950195, |
|
"learning_rate": 3.2200000000000005e-06, |
|
"loss": 1.6142, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 6.79, |
|
"grad_norm": 3.798788547515869, |
|
"learning_rate": 3.21e-06, |
|
"loss": 1.5714, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"grad_norm": 3.664579153060913, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 1.5536, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"grad_norm": 4.740457057952881, |
|
"learning_rate": 3.1900000000000004e-06, |
|
"loss": 1.5049, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"grad_norm": 3.767961025238037, |
|
"learning_rate": 3.1800000000000005e-06, |
|
"loss": 1.5265, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 6.83, |
|
"grad_norm": 5.350912570953369, |
|
"learning_rate": 3.17e-06, |
|
"loss": 1.5874, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"grad_norm": 3.9624764919281006, |
|
"learning_rate": 3.1600000000000002e-06, |
|
"loss": 1.5962, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"grad_norm": 5.696044445037842, |
|
"learning_rate": 3.1500000000000003e-06, |
|
"loss": 1.5823, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"grad_norm": 3.7998106479644775, |
|
"learning_rate": 3.1400000000000004e-06, |
|
"loss": 1.545, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 6.87, |
|
"grad_norm": 3.566140651702881, |
|
"learning_rate": 3.13e-06, |
|
"loss": 1.5136, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"grad_norm": 5.0331034660339355, |
|
"learning_rate": 3.12e-06, |
|
"loss": 1.6512, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 6.89, |
|
"grad_norm": 3.7487294673919678, |
|
"learning_rate": 3.1100000000000003e-06, |
|
"loss": 1.6111, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"grad_norm": 3.6235270500183105, |
|
"learning_rate": 3.1000000000000004e-06, |
|
"loss": 1.5063, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"grad_norm": 3.680666446685791, |
|
"learning_rate": 3.09e-06, |
|
"loss": 1.5583, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 6.92, |
|
"grad_norm": 3.614833354949951, |
|
"learning_rate": 3.08e-06, |
|
"loss": 1.5614, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"grad_norm": 3.607713460922241, |
|
"learning_rate": 3.0700000000000003e-06, |
|
"loss": 1.4991, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 6.94, |
|
"grad_norm": 3.688390016555786, |
|
"learning_rate": 3.0600000000000003e-06, |
|
"loss": 1.5396, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 6.95, |
|
"grad_norm": 3.798908233642578, |
|
"learning_rate": 3.05e-06, |
|
"loss": 1.5662, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"grad_norm": 3.712977647781372, |
|
"learning_rate": 3.04e-06, |
|
"loss": 1.5635, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"grad_norm": 3.8752996921539307, |
|
"learning_rate": 3.0300000000000002e-06, |
|
"loss": 1.5521, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 6.98, |
|
"grad_norm": 3.8525609970092773, |
|
"learning_rate": 3.0200000000000003e-06, |
|
"loss": 1.568, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"grad_norm": 3.535076856613159, |
|
"learning_rate": 3.01e-06, |
|
"loss": 1.5145, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"grad_norm": 3.7408225536346436, |
|
"learning_rate": 3e-06, |
|
"loss": 1.5283, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_accuracy_Background": NaN, |
|
"eval_accuracy_Bag": 0.0009126742220621242, |
|
"eval_accuracy_Belt": 0.0, |
|
"eval_accuracy_Dress": 0.0019231262778780594, |
|
"eval_accuracy_Face": 0.578734655335222, |
|
"eval_accuracy_Hair": 0.845249771183583, |
|
"eval_accuracy_Hat": 0.0, |
|
"eval_accuracy_Left-arm": 0.036022446887309365, |
|
"eval_accuracy_Left-leg": 0.6706449883254219, |
|
"eval_accuracy_Left-shoe": 4.069209108517669e-06, |
|
"eval_accuracy_Pants": 0.6728932458016567, |
|
"eval_accuracy_Right-arm": 0.30538655106261053, |
|
"eval_accuracy_Right-leg": 0.480935937865789, |
|
"eval_accuracy_Right-shoe": 0.011455976546317836, |
|
"eval_accuracy_Scarf": 0.0, |
|
"eval_accuracy_Skirt": 0.27110261257118945, |
|
"eval_accuracy_Sunglasses": 0.0, |
|
"eval_accuracy_Upper-clothes": 0.9136871595872413, |
|
"eval_iou_Background": 0.0, |
|
"eval_iou_Bag": 0.0009116259116259116, |
|
"eval_iou_Belt": 0.0, |
|
"eval_iou_Dress": 0.0019225440525997106, |
|
"eval_iou_Face": 0.5178848523397338, |
|
"eval_iou_Hair": 0.6007418484996638, |
|
"eval_iou_Hat": 0.0, |
|
"eval_iou_Left-arm": 0.03568764381850742, |
|
"eval_iou_Left-leg": 0.4374997206166995, |
|
"eval_iou_Left-shoe": 4.069209108517669e-06, |
|
"eval_iou_Pants": 0.419367916862678, |
|
"eval_iou_Right-arm": 0.1942821547562062, |
|
"eval_iou_Right-leg": 0.3581906432208735, |
|
"eval_iou_Right-shoe": 0.011219649210411336, |
|
"eval_iou_Scarf": 0.0, |
|
"eval_iou_Skirt": 0.19643190800026794, |
|
"eval_iou_Sunglasses": 0.0, |
|
"eval_iou_Upper-clothes": 0.45058670738637385, |
|
"eval_loss": 1.4944405555725098, |
|
"eval_mean_accuracy": 0.2817031302867876, |
|
"eval_mean_iou": 0.1791517379935972, |
|
"eval_overall_accuracy": 0.5183315661883767, |
|
"eval_runtime": 19.4698, |
|
"eval_samples_per_second": 10.272, |
|
"eval_steps_per_second": 5.136, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"grad_norm": 3.7130165100097656, |
|
"learning_rate": 2.99e-06, |
|
"loss": 1.484, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 7.02, |
|
"grad_norm": 3.7528035640716553, |
|
"learning_rate": 2.9800000000000003e-06, |
|
"loss": 1.5472, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"grad_norm": 4.869040012359619, |
|
"learning_rate": 2.97e-06, |
|
"loss": 1.5262, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 7.04, |
|
"grad_norm": 3.6537253856658936, |
|
"learning_rate": 2.96e-06, |
|
"loss": 1.5337, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"grad_norm": 3.6546456813812256, |
|
"learning_rate": 2.95e-06, |
|
"loss": 1.5214, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 7.06, |
|
"grad_norm": 3.727203845977783, |
|
"learning_rate": 2.9400000000000002e-06, |
|
"loss": 1.5664, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 7.07, |
|
"grad_norm": 3.654703140258789, |
|
"learning_rate": 2.93e-06, |
|
"loss": 1.5727, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 7.08, |
|
"grad_norm": 3.8210673332214355, |
|
"learning_rate": 2.92e-06, |
|
"loss": 1.6212, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 7.09, |
|
"grad_norm": 3.768174409866333, |
|
"learning_rate": 2.91e-06, |
|
"loss": 1.583, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 7.1, |
|
"grad_norm": 3.725416898727417, |
|
"learning_rate": 2.9e-06, |
|
"loss": 1.5524, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 7.11, |
|
"grad_norm": 4.663346767425537, |
|
"learning_rate": 2.89e-06, |
|
"loss": 1.5127, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 7.12, |
|
"grad_norm": 3.6472418308258057, |
|
"learning_rate": 2.88e-06, |
|
"loss": 1.57, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"grad_norm": 3.8266072273254395, |
|
"learning_rate": 2.87e-06, |
|
"loss": 1.5692, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 7.14, |
|
"grad_norm": 4.420265197753906, |
|
"learning_rate": 2.86e-06, |
|
"loss": 1.5352, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 7.15, |
|
"grad_norm": 3.6510508060455322, |
|
"learning_rate": 2.85e-06, |
|
"loss": 1.5221, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 7.16, |
|
"grad_norm": 4.078074932098389, |
|
"learning_rate": 2.84e-06, |
|
"loss": 1.5972, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 7.17, |
|
"grad_norm": 3.7629568576812744, |
|
"learning_rate": 2.83e-06, |
|
"loss": 1.5468, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 7.18, |
|
"grad_norm": 3.631978750228882, |
|
"learning_rate": 2.82e-06, |
|
"loss": 1.5277, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 7.19, |
|
"grad_norm": 3.8016979694366455, |
|
"learning_rate": 2.8100000000000006e-06, |
|
"loss": 1.5902, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"grad_norm": 3.8197274208068848, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 1.5703, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"grad_norm": 3.7942843437194824, |
|
"learning_rate": 2.7900000000000004e-06, |
|
"loss": 1.5542, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 7.22, |
|
"grad_norm": 3.6853299140930176, |
|
"learning_rate": 2.7800000000000005e-06, |
|
"loss": 1.5363, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 7.23, |
|
"grad_norm": 3.8867900371551514, |
|
"learning_rate": 2.7700000000000006e-06, |
|
"loss": 1.5655, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"grad_norm": 3.6021687984466553, |
|
"learning_rate": 2.7600000000000003e-06, |
|
"loss": 1.5484, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"grad_norm": 3.616196632385254, |
|
"learning_rate": 2.7500000000000004e-06, |
|
"loss": 1.5237, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 7.26, |
|
"grad_norm": 3.6985888481140137, |
|
"learning_rate": 2.7400000000000004e-06, |
|
"loss": 1.4881, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 7.27, |
|
"grad_norm": 3.6621720790863037, |
|
"learning_rate": 2.7300000000000005e-06, |
|
"loss": 1.5581, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 7.28, |
|
"grad_norm": 3.9236061573028564, |
|
"learning_rate": 2.7200000000000002e-06, |
|
"loss": 1.5638, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 7.29, |
|
"grad_norm": 3.6243274211883545, |
|
"learning_rate": 2.7100000000000003e-06, |
|
"loss": 1.5136, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 7.3, |
|
"grad_norm": 3.638765573501587, |
|
"learning_rate": 2.7000000000000004e-06, |
|
"loss": 1.5479, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"grad_norm": 4.021576404571533, |
|
"learning_rate": 2.6900000000000005e-06, |
|
"loss": 1.5911, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 7.32, |
|
"grad_norm": 3.783817768096924, |
|
"learning_rate": 2.68e-06, |
|
"loss": 1.5876, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 7.33, |
|
"grad_norm": 3.9485607147216797, |
|
"learning_rate": 2.6700000000000003e-06, |
|
"loss": 1.4901, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"grad_norm": 3.484511375427246, |
|
"learning_rate": 2.6600000000000004e-06, |
|
"loss": 1.487, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 7.35, |
|
"grad_norm": 3.701113224029541, |
|
"learning_rate": 2.6500000000000005e-06, |
|
"loss": 1.5362, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"grad_norm": 3.749748945236206, |
|
"learning_rate": 2.64e-06, |
|
"loss": 1.5197, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"grad_norm": 3.566333532333374, |
|
"learning_rate": 2.6300000000000002e-06, |
|
"loss": 1.5267, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 7.38, |
|
"grad_norm": 3.6239655017852783, |
|
"learning_rate": 2.6200000000000003e-06, |
|
"loss": 1.5088, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"grad_norm": 3.600148916244507, |
|
"learning_rate": 2.6100000000000004e-06, |
|
"loss": 1.5092, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 7.4, |
|
"grad_norm": 3.764092206954956, |
|
"learning_rate": 2.6e-06, |
|
"loss": 1.5429, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 7.41, |
|
"grad_norm": 3.622396230697632, |
|
"learning_rate": 2.59e-06, |
|
"loss": 1.4913, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 7.42, |
|
"grad_norm": 3.618483543395996, |
|
"learning_rate": 2.5800000000000003e-06, |
|
"loss": 1.5355, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"grad_norm": 3.7289323806762695, |
|
"learning_rate": 2.5700000000000004e-06, |
|
"loss": 1.5694, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 7.44, |
|
"grad_norm": 3.675210475921631, |
|
"learning_rate": 2.56e-06, |
|
"loss": 1.5125, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"grad_norm": 5.952239990234375, |
|
"learning_rate": 2.55e-06, |
|
"loss": 1.5209, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 7.46, |
|
"grad_norm": 3.7280726432800293, |
|
"learning_rate": 2.5400000000000002e-06, |
|
"loss": 1.5112, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"grad_norm": 3.7634828090667725, |
|
"learning_rate": 2.5300000000000003e-06, |
|
"loss": 1.5861, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 7.48, |
|
"grad_norm": 3.5279695987701416, |
|
"learning_rate": 2.52e-06, |
|
"loss": 1.5223, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 7.49, |
|
"grad_norm": 4.113193035125732, |
|
"learning_rate": 2.51e-06, |
|
"loss": 1.5657, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"grad_norm": 4.778825283050537, |
|
"learning_rate": 2.5e-06, |
|
"loss": 1.6106, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 7.51, |
|
"grad_norm": 3.8941431045532227, |
|
"learning_rate": 2.4900000000000003e-06, |
|
"loss": 1.5927, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 7.52, |
|
"grad_norm": 3.94014048576355, |
|
"learning_rate": 2.4800000000000004e-06, |
|
"loss": 1.5941, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 7.53, |
|
"grad_norm": 3.9820590019226074, |
|
"learning_rate": 2.47e-06, |
|
"loss": 1.5982, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 7.54, |
|
"grad_norm": 3.7743656635284424, |
|
"learning_rate": 2.46e-06, |
|
"loss": 1.5324, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 7.55, |
|
"grad_norm": 3.545868396759033, |
|
"learning_rate": 2.4500000000000003e-06, |
|
"loss": 1.5289, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 7.56, |
|
"grad_norm": 3.651278495788574, |
|
"learning_rate": 2.4400000000000004e-06, |
|
"loss": 1.5539, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 7.57, |
|
"grad_norm": 3.6776418685913086, |
|
"learning_rate": 2.43e-06, |
|
"loss": 1.4985, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 7.58, |
|
"grad_norm": 3.6130895614624023, |
|
"learning_rate": 2.42e-06, |
|
"loss": 1.5035, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 7.59, |
|
"grad_norm": 3.8193483352661133, |
|
"learning_rate": 2.4100000000000002e-06, |
|
"loss": 1.4696, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 7.6, |
|
"grad_norm": 4.524262428283691, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 1.5263, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"grad_norm": 3.712820291519165, |
|
"learning_rate": 2.39e-06, |
|
"loss": 1.5367, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 7.62, |
|
"grad_norm": 3.8065736293792725, |
|
"learning_rate": 2.38e-06, |
|
"loss": 1.5408, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 7.63, |
|
"grad_norm": 3.8932955265045166, |
|
"learning_rate": 2.37e-06, |
|
"loss": 1.5534, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 7.64, |
|
"grad_norm": 3.821714401245117, |
|
"learning_rate": 2.3600000000000003e-06, |
|
"loss": 1.5752, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"grad_norm": 3.6142208576202393, |
|
"learning_rate": 2.35e-06, |
|
"loss": 1.5256, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"grad_norm": 3.7436635494232178, |
|
"learning_rate": 2.3400000000000005e-06, |
|
"loss": 1.551, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 7.67, |
|
"grad_norm": 3.660123109817505, |
|
"learning_rate": 2.33e-06, |
|
"loss": 1.4843, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 7.68, |
|
"grad_norm": 3.7594404220581055, |
|
"learning_rate": 2.3200000000000002e-06, |
|
"loss": 1.4972, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 7.69, |
|
"grad_norm": 3.8042383193969727, |
|
"learning_rate": 2.3100000000000003e-06, |
|
"loss": 1.5341, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 7.7, |
|
"grad_norm": 3.5988681316375732, |
|
"learning_rate": 2.3000000000000004e-06, |
|
"loss": 1.5041, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 7.71, |
|
"grad_norm": 3.490817070007324, |
|
"learning_rate": 2.29e-06, |
|
"loss": 1.5127, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"grad_norm": 3.794908046722412, |
|
"learning_rate": 2.28e-06, |
|
"loss": 1.5646, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 7.73, |
|
"grad_norm": 3.616783380508423, |
|
"learning_rate": 2.2700000000000003e-06, |
|
"loss": 1.4901, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 7.74, |
|
"grad_norm": 3.681711435317993, |
|
"learning_rate": 2.2600000000000004e-06, |
|
"loss": 1.5315, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"grad_norm": 3.7920382022857666, |
|
"learning_rate": 2.25e-06, |
|
"loss": 1.509, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 7.76, |
|
"grad_norm": 3.6532371044158936, |
|
"learning_rate": 2.24e-06, |
|
"loss": 1.5448, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 7.77, |
|
"grad_norm": 3.788717031478882, |
|
"learning_rate": 2.2300000000000002e-06, |
|
"loss": 1.5913, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 7.78, |
|
"grad_norm": 3.6618025302886963, |
|
"learning_rate": 2.2200000000000003e-06, |
|
"loss": 1.5931, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"grad_norm": 3.660717010498047, |
|
"learning_rate": 2.21e-06, |
|
"loss": 1.5322, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 7.8, |
|
"grad_norm": 3.6478917598724365, |
|
"learning_rate": 2.2e-06, |
|
"loss": 1.5965, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"grad_norm": 3.695657968521118, |
|
"learning_rate": 2.19e-06, |
|
"loss": 1.5281, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 7.82, |
|
"grad_norm": 3.769746780395508, |
|
"learning_rate": 2.1800000000000003e-06, |
|
"loss": 1.4949, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 7.83, |
|
"grad_norm": 3.935413360595703, |
|
"learning_rate": 2.17e-06, |
|
"loss": 1.5479, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"grad_norm": 3.6024951934814453, |
|
"learning_rate": 2.16e-06, |
|
"loss": 1.5072, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 7.85, |
|
"grad_norm": 3.811274528503418, |
|
"learning_rate": 2.15e-06, |
|
"loss": 1.4751, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 7.86, |
|
"grad_norm": 3.6077747344970703, |
|
"learning_rate": 2.1400000000000003e-06, |
|
"loss": 1.4972, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 7.87, |
|
"grad_norm": 4.010575294494629, |
|
"learning_rate": 2.13e-06, |
|
"loss": 1.5247, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"grad_norm": 3.8022336959838867, |
|
"learning_rate": 2.12e-06, |
|
"loss": 1.4417, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 7.89, |
|
"grad_norm": 3.602715253829956, |
|
"learning_rate": 2.11e-06, |
|
"loss": 1.5313, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 7.9, |
|
"grad_norm": 3.749661445617676, |
|
"learning_rate": 2.1000000000000002e-06, |
|
"loss": 1.4681, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 7.91, |
|
"grad_norm": 3.915527105331421, |
|
"learning_rate": 2.09e-06, |
|
"loss": 1.5141, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"grad_norm": 3.765939235687256, |
|
"learning_rate": 2.08e-06, |
|
"loss": 1.4743, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 7.93, |
|
"grad_norm": 3.658564805984497, |
|
"learning_rate": 2.07e-06, |
|
"loss": 1.5254, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 7.94, |
|
"grad_norm": 3.61059308052063, |
|
"learning_rate": 2.06e-06, |
|
"loss": 1.5296, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 7.95, |
|
"grad_norm": 3.76487398147583, |
|
"learning_rate": 2.05e-06, |
|
"loss": 1.5297, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 7.96, |
|
"grad_norm": 3.7161519527435303, |
|
"learning_rate": 2.04e-06, |
|
"loss": 1.5504, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"grad_norm": 3.8170692920684814, |
|
"learning_rate": 2.0300000000000005e-06, |
|
"loss": 1.5587, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 7.98, |
|
"grad_norm": 3.55039119720459, |
|
"learning_rate": 2.02e-06, |
|
"loss": 1.4905, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 7.99, |
|
"grad_norm": 3.5521974563598633, |
|
"learning_rate": 2.0100000000000002e-06, |
|
"loss": 1.554, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"grad_norm": 3.8850300312042236, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.5141, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_accuracy_Background": NaN, |
|
"eval_accuracy_Bag": 0.0014339549447199978, |
|
"eval_accuracy_Belt": 0.0, |
|
"eval_accuracy_Dress": 0.0039951066359833354, |
|
"eval_accuracy_Face": 0.6319237702807108, |
|
"eval_accuracy_Hair": 0.8404903897104871, |
|
"eval_accuracy_Hat": 0.0, |
|
"eval_accuracy_Left-arm": 0.0420462182194556, |
|
"eval_accuracy_Left-leg": 0.7109552597272448, |
|
"eval_accuracy_Left-shoe": 4.069209108517669e-06, |
|
"eval_accuracy_Pants": 0.660479200933781, |
|
"eval_accuracy_Right-arm": 0.281411166753868, |
|
"eval_accuracy_Right-leg": 0.4871758679097181, |
|
"eval_accuracy_Right-shoe": 0.008870813549600077, |
|
"eval_accuracy_Scarf": 0.0, |
|
"eval_accuracy_Skirt": 0.30314486523570067, |
|
"eval_accuracy_Sunglasses": 0.0, |
|
"eval_accuracy_Upper-clothes": 0.9182612629857135, |
|
"eval_iou_Background": 0.0, |
|
"eval_iou_Bag": 0.0014318125224801483, |
|
"eval_iou_Belt": 0.0, |
|
"eval_iou_Dress": 0.003990218411055724, |
|
"eval_iou_Face": 0.5504740282756857, |
|
"eval_iou_Hair": 0.6120304644737821, |
|
"eval_iou_Hat": 0.0, |
|
"eval_iou_Left-arm": 0.0416618061631185, |
|
"eval_iou_Left-leg": 0.4381265465724155, |
|
"eval_iou_Left-shoe": 4.069209108517669e-06, |
|
"eval_iou_Pants": 0.4309463728612006, |
|
"eval_iou_Right-arm": 0.18934599775031138, |
|
"eval_iou_Right-leg": 0.35409428317983965, |
|
"eval_iou_Right-shoe": 0.008738130738444636, |
|
"eval_iou_Scarf": 0.0, |
|
"eval_iou_Skirt": 0.2113208827083289, |
|
"eval_iou_Sunglasses": 0.0, |
|
"eval_iou_Upper-clothes": 0.45485907051723534, |
|
"eval_loss": 1.4899530410766602, |
|
"eval_mean_accuracy": 0.28765834977035837, |
|
"eval_mean_iou": 0.18316798241016705, |
|
"eval_overall_accuracy": 0.5244818678183832, |
|
"eval_runtime": 21.1255, |
|
"eval_samples_per_second": 9.467, |
|
"eval_steps_per_second": 4.734, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 8.01, |
|
"grad_norm": 3.8289945125579834, |
|
"learning_rate": 1.9900000000000004e-06, |
|
"loss": 1.5278, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 8.02, |
|
"grad_norm": 3.7112457752227783, |
|
"learning_rate": 1.98e-06, |
|
"loss": 1.5109, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 8.03, |
|
"grad_norm": 3.4778196811676025, |
|
"learning_rate": 1.97e-06, |
|
"loss": 1.5195, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 8.04, |
|
"grad_norm": 4.177113056182861, |
|
"learning_rate": 1.9600000000000003e-06, |
|
"loss": 1.5636, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 8.05, |
|
"grad_norm": 4.124269008636475, |
|
"learning_rate": 1.9500000000000004e-06, |
|
"loss": 1.5282, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"grad_norm": 3.603883981704712, |
|
"learning_rate": 1.94e-06, |
|
"loss": 1.5311, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 8.07, |
|
"grad_norm": 3.7415599822998047, |
|
"learning_rate": 1.93e-06, |
|
"loss": 1.4982, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 8.08, |
|
"grad_norm": 3.6410176753997803, |
|
"learning_rate": 1.9200000000000003e-06, |
|
"loss": 1.5194, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 8.09, |
|
"grad_norm": 3.6034183502197266, |
|
"learning_rate": 1.9100000000000003e-06, |
|
"loss": 1.5339, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 8.1, |
|
"grad_norm": 3.5757575035095215, |
|
"learning_rate": 1.9000000000000002e-06, |
|
"loss": 1.5118, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 8.11, |
|
"grad_norm": 3.724278688430786, |
|
"learning_rate": 1.8900000000000001e-06, |
|
"loss": 1.4946, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 8.12, |
|
"grad_norm": 3.567145586013794, |
|
"learning_rate": 1.8800000000000002e-06, |
|
"loss": 1.4762, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 8.13, |
|
"grad_norm": 3.6245245933532715, |
|
"learning_rate": 1.87e-06, |
|
"loss": 1.5236, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 8.14, |
|
"grad_norm": 3.5970418453216553, |
|
"learning_rate": 1.8600000000000002e-06, |
|
"loss": 1.5411, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 8.15, |
|
"grad_norm": 3.745891809463501, |
|
"learning_rate": 1.85e-06, |
|
"loss": 1.4836, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 8.16, |
|
"grad_norm": 3.6910178661346436, |
|
"learning_rate": 1.8400000000000002e-06, |
|
"loss": 1.4475, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 8.17, |
|
"grad_norm": 6.758592128753662, |
|
"learning_rate": 1.83e-06, |
|
"loss": 1.525, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 8.18, |
|
"grad_norm": 3.565028429031372, |
|
"learning_rate": 1.8200000000000002e-06, |
|
"loss": 1.515, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 8.19, |
|
"grad_norm": 3.6472854614257812, |
|
"learning_rate": 1.81e-06, |
|
"loss": 1.5248, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 8.2, |
|
"grad_norm": 3.801252603530884, |
|
"learning_rate": 1.8000000000000001e-06, |
|
"loss": 1.4472, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 8.21, |
|
"grad_norm": 3.6744468212127686, |
|
"learning_rate": 1.79e-06, |
|
"loss": 1.4936, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 8.22, |
|
"grad_norm": 3.6374239921569824, |
|
"learning_rate": 1.7800000000000001e-06, |
|
"loss": 1.5305, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 8.23, |
|
"grad_norm": 3.754434108734131, |
|
"learning_rate": 1.77e-06, |
|
"loss": 1.5504, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 8.24, |
|
"grad_norm": 3.609375476837158, |
|
"learning_rate": 1.76e-06, |
|
"loss": 1.4758, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 8.25, |
|
"grad_norm": 3.6927921772003174, |
|
"learning_rate": 1.75e-06, |
|
"loss": 1.5141, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 8.26, |
|
"grad_norm": 3.788146495819092, |
|
"learning_rate": 1.74e-06, |
|
"loss": 1.5064, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 8.27, |
|
"grad_norm": 3.8312742710113525, |
|
"learning_rate": 1.73e-06, |
|
"loss": 1.47, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 8.28, |
|
"grad_norm": 3.9256484508514404, |
|
"learning_rate": 1.72e-06, |
|
"loss": 1.5341, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 8.29, |
|
"grad_norm": 4.1321330070495605, |
|
"learning_rate": 1.7100000000000004e-06, |
|
"loss": 1.4789, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 8.3, |
|
"grad_norm": 3.580707550048828, |
|
"learning_rate": 1.7000000000000002e-06, |
|
"loss": 1.4593, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 8.31, |
|
"grad_norm": 3.616859197616577, |
|
"learning_rate": 1.6900000000000003e-06, |
|
"loss": 1.4941, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 8.32, |
|
"grad_norm": 3.6412851810455322, |
|
"learning_rate": 1.6800000000000002e-06, |
|
"loss": 1.4852, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 8.33, |
|
"grad_norm": 3.6023621559143066, |
|
"learning_rate": 1.6700000000000003e-06, |
|
"loss": 1.498, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 8.34, |
|
"grad_norm": 3.5858147144317627, |
|
"learning_rate": 1.6600000000000002e-06, |
|
"loss": 1.5442, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 8.35, |
|
"grad_norm": 3.4679300785064697, |
|
"learning_rate": 1.6500000000000003e-06, |
|
"loss": 1.4991, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 8.36, |
|
"grad_norm": 3.636946439743042, |
|
"learning_rate": 1.6400000000000002e-06, |
|
"loss": 1.5151, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 8.37, |
|
"grad_norm": 3.7489140033721924, |
|
"learning_rate": 1.6300000000000003e-06, |
|
"loss": 1.557, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"grad_norm": 5.346006393432617, |
|
"learning_rate": 1.6200000000000002e-06, |
|
"loss": 1.4376, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 8.39, |
|
"grad_norm": 3.636017322540283, |
|
"learning_rate": 1.6100000000000003e-06, |
|
"loss": 1.5406, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 8.4, |
|
"grad_norm": 3.614189624786377, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 1.5203, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 8.41, |
|
"grad_norm": 3.575302839279175, |
|
"learning_rate": 1.5900000000000002e-06, |
|
"loss": 1.5213, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 8.42, |
|
"grad_norm": 3.6021499633789062, |
|
"learning_rate": 1.5800000000000001e-06, |
|
"loss": 1.4498, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 8.43, |
|
"grad_norm": 3.5428478717803955, |
|
"learning_rate": 1.5700000000000002e-06, |
|
"loss": 1.4813, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 8.44, |
|
"grad_norm": 3.575767755508423, |
|
"learning_rate": 1.56e-06, |
|
"loss": 1.4918, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 8.45, |
|
"grad_norm": 3.663245677947998, |
|
"learning_rate": 1.5500000000000002e-06, |
|
"loss": 1.4605, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 8.46, |
|
"grad_norm": 3.5065038204193115, |
|
"learning_rate": 1.54e-06, |
|
"loss": 1.4743, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 8.47, |
|
"grad_norm": 3.814398765563965, |
|
"learning_rate": 1.5300000000000002e-06, |
|
"loss": 1.5622, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 8.48, |
|
"grad_norm": 3.6801810264587402, |
|
"learning_rate": 1.52e-06, |
|
"loss": 1.5411, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 8.49, |
|
"grad_norm": 3.9393820762634277, |
|
"learning_rate": 1.5100000000000002e-06, |
|
"loss": 1.5145, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 8.5, |
|
"grad_norm": 3.565678119659424, |
|
"learning_rate": 1.5e-06, |
|
"loss": 1.4361, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 8.51, |
|
"grad_norm": 3.7065446376800537, |
|
"learning_rate": 1.4900000000000001e-06, |
|
"loss": 1.5213, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 8.52, |
|
"grad_norm": 3.5156848430633545, |
|
"learning_rate": 1.48e-06, |
|
"loss": 1.5117, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 8.53, |
|
"grad_norm": 3.9525537490844727, |
|
"learning_rate": 1.4700000000000001e-06, |
|
"loss": 1.5415, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 8.54, |
|
"grad_norm": 3.5773215293884277, |
|
"learning_rate": 1.46e-06, |
|
"loss": 1.4983, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 8.55, |
|
"grad_norm": 4.079376697540283, |
|
"learning_rate": 1.45e-06, |
|
"loss": 1.5875, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 8.56, |
|
"grad_norm": 4.09572696685791, |
|
"learning_rate": 1.44e-06, |
|
"loss": 1.5173, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 8.57, |
|
"grad_norm": 3.643479347229004, |
|
"learning_rate": 1.43e-06, |
|
"loss": 1.5536, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 8.58, |
|
"grad_norm": 4.02496862411499, |
|
"learning_rate": 1.42e-06, |
|
"loss": 1.5821, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 8.59, |
|
"grad_norm": 4.333177089691162, |
|
"learning_rate": 1.41e-06, |
|
"loss": 1.4706, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 8.6, |
|
"grad_norm": 3.50635027885437, |
|
"learning_rate": 1.4000000000000001e-06, |
|
"loss": 1.4768, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 8.61, |
|
"grad_norm": 4.010918617248535, |
|
"learning_rate": 1.3900000000000002e-06, |
|
"loss": 1.4998, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 8.62, |
|
"grad_norm": 3.5356197357177734, |
|
"learning_rate": 1.3800000000000001e-06, |
|
"loss": 1.5336, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 8.63, |
|
"grad_norm": 3.6609745025634766, |
|
"learning_rate": 1.3700000000000002e-06, |
|
"loss": 1.4741, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 8.64, |
|
"grad_norm": 3.625298500061035, |
|
"learning_rate": 1.3600000000000001e-06, |
|
"loss": 1.4695, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 8.65, |
|
"grad_norm": 4.100505828857422, |
|
"learning_rate": 1.3500000000000002e-06, |
|
"loss": 1.5354, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 8.66, |
|
"grad_norm": 4.842679500579834, |
|
"learning_rate": 1.34e-06, |
|
"loss": 1.5656, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 8.67, |
|
"grad_norm": 4.331446170806885, |
|
"learning_rate": 1.3300000000000002e-06, |
|
"loss": 1.4827, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 8.68, |
|
"grad_norm": 5.948760986328125, |
|
"learning_rate": 1.32e-06, |
|
"loss": 1.4965, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 8.69, |
|
"grad_norm": 3.821892261505127, |
|
"learning_rate": 1.3100000000000002e-06, |
|
"loss": 1.5706, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 8.7, |
|
"grad_norm": 3.4270920753479004, |
|
"learning_rate": 1.3e-06, |
|
"loss": 1.468, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 8.71, |
|
"grad_norm": 3.6648504734039307, |
|
"learning_rate": 1.2900000000000001e-06, |
|
"loss": 1.5191, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 8.72, |
|
"grad_norm": 3.6058945655822754, |
|
"learning_rate": 1.28e-06, |
|
"loss": 1.4853, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 8.73, |
|
"grad_norm": 3.8144729137420654, |
|
"learning_rate": 1.2700000000000001e-06, |
|
"loss": 1.4729, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 8.74, |
|
"grad_norm": 3.588139772415161, |
|
"learning_rate": 1.26e-06, |
|
"loss": 1.4795, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 8.75, |
|
"grad_norm": 3.4725489616394043, |
|
"learning_rate": 1.25e-06, |
|
"loss": 1.4724, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 8.76, |
|
"grad_norm": 3.60231876373291, |
|
"learning_rate": 1.2400000000000002e-06, |
|
"loss": 1.5059, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 8.77, |
|
"grad_norm": 3.6680707931518555, |
|
"learning_rate": 1.23e-06, |
|
"loss": 1.5639, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 8.78, |
|
"grad_norm": 3.7948758602142334, |
|
"learning_rate": 1.2200000000000002e-06, |
|
"loss": 1.4903, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 8.79, |
|
"grad_norm": 3.6697640419006348, |
|
"learning_rate": 1.21e-06, |
|
"loss": 1.4904, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"grad_norm": 3.5780935287475586, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 1.4581, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 8.81, |
|
"grad_norm": 3.8164820671081543, |
|
"learning_rate": 1.19e-06, |
|
"loss": 1.5288, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 8.82, |
|
"grad_norm": 3.647608757019043, |
|
"learning_rate": 1.1800000000000001e-06, |
|
"loss": 1.5303, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 8.83, |
|
"grad_norm": 3.4735324382781982, |
|
"learning_rate": 1.1700000000000002e-06, |
|
"loss": 1.4956, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 8.84, |
|
"grad_norm": 3.616305112838745, |
|
"learning_rate": 1.1600000000000001e-06, |
|
"loss": 1.4892, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 8.85, |
|
"grad_norm": 3.66984224319458, |
|
"learning_rate": 1.1500000000000002e-06, |
|
"loss": 1.5317, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 8.86, |
|
"grad_norm": 3.8444058895111084, |
|
"learning_rate": 1.14e-06, |
|
"loss": 1.515, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 8.87, |
|
"grad_norm": 4.0910186767578125, |
|
"learning_rate": 1.1300000000000002e-06, |
|
"loss": 1.4947, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 8.88, |
|
"grad_norm": 3.7761266231536865, |
|
"learning_rate": 1.12e-06, |
|
"loss": 1.4598, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 8.89, |
|
"grad_norm": 4.1520466804504395, |
|
"learning_rate": 1.1100000000000002e-06, |
|
"loss": 1.5511, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 8.9, |
|
"grad_norm": 3.530054807662964, |
|
"learning_rate": 1.1e-06, |
|
"loss": 1.4704, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 8.91, |
|
"grad_norm": 3.4831347465515137, |
|
"learning_rate": 1.0900000000000002e-06, |
|
"loss": 1.4447, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 8.92, |
|
"grad_norm": 3.4318552017211914, |
|
"learning_rate": 1.08e-06, |
|
"loss": 1.4308, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 8.93, |
|
"grad_norm": 3.5509145259857178, |
|
"learning_rate": 1.0700000000000001e-06, |
|
"loss": 1.4265, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 8.94, |
|
"grad_norm": 4.087670803070068, |
|
"learning_rate": 1.06e-06, |
|
"loss": 1.4684, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 8.95, |
|
"grad_norm": 4.161181449890137, |
|
"learning_rate": 1.0500000000000001e-06, |
|
"loss": 1.5536, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 8.96, |
|
"grad_norm": 3.9097800254821777, |
|
"learning_rate": 1.04e-06, |
|
"loss": 1.4921, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 8.97, |
|
"grad_norm": 3.7469077110290527, |
|
"learning_rate": 1.03e-06, |
|
"loss": 1.4285, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 8.98, |
|
"grad_norm": 3.770315170288086, |
|
"learning_rate": 1.02e-06, |
|
"loss": 1.5288, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 8.99, |
|
"grad_norm": 3.7717573642730713, |
|
"learning_rate": 1.01e-06, |
|
"loss": 1.5141, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"grad_norm": 3.617389678955078, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 1.5519, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_accuracy_Background": NaN, |
|
"eval_accuracy_Bag": 0.003072265787359029, |
|
"eval_accuracy_Belt": 0.0, |
|
"eval_accuracy_Dress": 0.00474878735253608, |
|
"eval_accuracy_Face": 0.6375894926603142, |
|
"eval_accuracy_Hair": 0.8471169131461053, |
|
"eval_accuracy_Hat": 0.0, |
|
"eval_accuracy_Left-arm": 0.04899955361303646, |
|
"eval_accuracy_Left-leg": 0.7140216566423467, |
|
"eval_accuracy_Left-shoe": 2.034604554258834e-05, |
|
"eval_accuracy_Pants": 0.704087899479929, |
|
"eval_accuracy_Right-arm": 0.2775095298602287, |
|
"eval_accuracy_Right-leg": 0.4916927165815167, |
|
"eval_accuracy_Right-shoe": 0.007381058602338995, |
|
"eval_accuracy_Scarf": 0.0, |
|
"eval_accuracy_Skirt": 0.3431435515301021, |
|
"eval_accuracy_Sunglasses": 0.0, |
|
"eval_accuracy_Upper-clothes": 0.8986799380514034, |
|
"eval_iou_Background": 0.0, |
|
"eval_iou_Bag": 0.0030653538980450162, |
|
"eval_iou_Belt": 0.0, |
|
"eval_iou_Dress": 0.00473680482845189, |
|
"eval_iou_Face": 0.5557427831734583, |
|
"eval_iou_Hair": 0.6135876733264014, |
|
"eval_iou_Hat": 0.0, |
|
"eval_iou_Left-arm": 0.048533170082305725, |
|
"eval_iou_Left-leg": 0.4329384196389064, |
|
"eval_iou_Left-shoe": 2.034604554258834e-05, |
|
"eval_iou_Pants": 0.43646755253215225, |
|
"eval_iou_Right-arm": 0.1887960404766344, |
|
"eval_iou_Right-leg": 0.3513980658725287, |
|
"eval_iou_Right-shoe": 0.007283375587131262, |
|
"eval_iou_Scarf": 0.0, |
|
"eval_iou_Skirt": 0.2286983845940569, |
|
"eval_iou_Sunglasses": 0.0, |
|
"eval_iou_Upper-clothes": 0.46615604522805204, |
|
"eval_loss": 1.485084056854248, |
|
"eval_mean_accuracy": 0.29282727702075056, |
|
"eval_mean_iou": 0.18541244529353704, |
|
"eval_overall_accuracy": 0.5295694045481099, |
|
"eval_runtime": 20.3708, |
|
"eval_samples_per_second": 9.818, |
|
"eval_steps_per_second": 4.909, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 9.01, |
|
"grad_norm": 3.8818020820617676, |
|
"learning_rate": 9.9e-07, |
|
"loss": 1.5319, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 9.02, |
|
"grad_norm": 3.656554937362671, |
|
"learning_rate": 9.800000000000001e-07, |
|
"loss": 1.5407, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 9.03, |
|
"grad_norm": 3.5967953205108643, |
|
"learning_rate": 9.7e-07, |
|
"loss": 1.4693, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 9.04, |
|
"grad_norm": 3.6473309993743896, |
|
"learning_rate": 9.600000000000001e-07, |
|
"loss": 1.5023, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 9.05, |
|
"grad_norm": 3.9312899112701416, |
|
"learning_rate": 9.500000000000001e-07, |
|
"loss": 1.5816, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 9.06, |
|
"grad_norm": 3.5336856842041016, |
|
"learning_rate": 9.400000000000001e-07, |
|
"loss": 1.4675, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 9.07, |
|
"grad_norm": 5.659257888793945, |
|
"learning_rate": 9.300000000000001e-07, |
|
"loss": 1.5115, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 9.08, |
|
"grad_norm": 3.7190868854522705, |
|
"learning_rate": 9.200000000000001e-07, |
|
"loss": 1.4899, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 9.09, |
|
"grad_norm": 3.895996332168579, |
|
"learning_rate": 9.100000000000001e-07, |
|
"loss": 1.5517, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 9.1, |
|
"grad_norm": 3.491661548614502, |
|
"learning_rate": 9.000000000000001e-07, |
|
"loss": 1.4908, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 9.11, |
|
"grad_norm": 3.505462169647217, |
|
"learning_rate": 8.900000000000001e-07, |
|
"loss": 1.4581, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 9.12, |
|
"grad_norm": 3.5954673290252686, |
|
"learning_rate": 8.8e-07, |
|
"loss": 1.5062, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 9.13, |
|
"grad_norm": 3.5745086669921875, |
|
"learning_rate": 8.7e-07, |
|
"loss": 1.4829, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 9.14, |
|
"grad_norm": 3.609250545501709, |
|
"learning_rate": 8.6e-07, |
|
"loss": 1.4689, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 9.15, |
|
"grad_norm": 4.004081726074219, |
|
"learning_rate": 8.500000000000001e-07, |
|
"loss": 1.513, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 9.16, |
|
"grad_norm": 3.6088292598724365, |
|
"learning_rate": 8.400000000000001e-07, |
|
"loss": 1.5226, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 9.17, |
|
"grad_norm": 3.3713223934173584, |
|
"learning_rate": 8.300000000000001e-07, |
|
"loss": 1.4688, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 9.18, |
|
"grad_norm": 3.4937918186187744, |
|
"learning_rate": 8.200000000000001e-07, |
|
"loss": 1.4888, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 9.19, |
|
"grad_norm": 4.056433200836182, |
|
"learning_rate": 8.100000000000001e-07, |
|
"loss": 1.4864, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 9.2, |
|
"grad_norm": 4.000036239624023, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 1.5375, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 9.21, |
|
"grad_norm": 3.6113851070404053, |
|
"learning_rate": 7.900000000000001e-07, |
|
"loss": 1.482, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 9.22, |
|
"grad_norm": 3.656848192214966, |
|
"learning_rate": 7.8e-07, |
|
"loss": 1.4768, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 9.23, |
|
"grad_norm": 5.102404594421387, |
|
"learning_rate": 7.7e-07, |
|
"loss": 1.5656, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 9.24, |
|
"grad_norm": 4.507122993469238, |
|
"learning_rate": 7.6e-07, |
|
"loss": 1.4941, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 9.25, |
|
"grad_norm": 3.8796324729919434, |
|
"learning_rate": 7.5e-07, |
|
"loss": 1.537, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 9.26, |
|
"grad_norm": 3.546234130859375, |
|
"learning_rate": 7.4e-07, |
|
"loss": 1.471, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 9.27, |
|
"grad_norm": 5.03427791595459, |
|
"learning_rate": 7.3e-07, |
|
"loss": 1.4713, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 9.28, |
|
"grad_norm": 3.627492904663086, |
|
"learning_rate": 7.2e-07, |
|
"loss": 1.4444, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 9.29, |
|
"grad_norm": 3.6240904331207275, |
|
"learning_rate": 7.1e-07, |
|
"loss": 1.4876, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 9.3, |
|
"grad_norm": 3.4112884998321533, |
|
"learning_rate": 7.000000000000001e-07, |
|
"loss": 1.4615, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 9.31, |
|
"grad_norm": 3.6896421909332275, |
|
"learning_rate": 6.900000000000001e-07, |
|
"loss": 1.5532, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 9.32, |
|
"grad_norm": 3.6501705646514893, |
|
"learning_rate": 6.800000000000001e-07, |
|
"loss": 1.482, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 9.33, |
|
"grad_norm": 3.548887014389038, |
|
"learning_rate": 6.7e-07, |
|
"loss": 1.4896, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 9.34, |
|
"grad_norm": 3.5884876251220703, |
|
"learning_rate": 6.6e-07, |
|
"loss": 1.4985, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 9.35, |
|
"grad_norm": 3.5559868812561035, |
|
"learning_rate": 6.5e-07, |
|
"loss": 1.467, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 9.36, |
|
"grad_norm": 4.355540752410889, |
|
"learning_rate": 6.4e-07, |
|
"loss": 1.4938, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 9.37, |
|
"grad_norm": 3.697014331817627, |
|
"learning_rate": 6.3e-07, |
|
"loss": 1.5162, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 9.38, |
|
"grad_norm": 3.850156784057617, |
|
"learning_rate": 6.200000000000001e-07, |
|
"loss": 1.5463, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 9.39, |
|
"grad_norm": 3.7023043632507324, |
|
"learning_rate": 6.100000000000001e-07, |
|
"loss": 1.5325, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 9.4, |
|
"grad_norm": 3.533128261566162, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 1.5011, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 9.41, |
|
"grad_norm": 3.618462562561035, |
|
"learning_rate": 5.900000000000001e-07, |
|
"loss": 1.4593, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 9.42, |
|
"grad_norm": 3.6282787322998047, |
|
"learning_rate": 5.800000000000001e-07, |
|
"loss": 1.5022, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 9.43, |
|
"grad_norm": 3.6790478229522705, |
|
"learning_rate": 5.7e-07, |
|
"loss": 1.5246, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 9.44, |
|
"grad_norm": 3.531965732574463, |
|
"learning_rate": 5.6e-07, |
|
"loss": 1.4611, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 9.45, |
|
"grad_norm": 5.838464736938477, |
|
"learning_rate": 5.5e-07, |
|
"loss": 1.4679, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 9.46, |
|
"grad_norm": 4.229060649871826, |
|
"learning_rate": 5.4e-07, |
|
"loss": 1.5022, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 9.47, |
|
"grad_norm": 3.606503486633301, |
|
"learning_rate": 5.3e-07, |
|
"loss": 1.4992, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 9.48, |
|
"grad_norm": 4.053056240081787, |
|
"learning_rate": 5.2e-07, |
|
"loss": 1.4904, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 9.49, |
|
"grad_norm": 3.6705806255340576, |
|
"learning_rate": 5.1e-07, |
|
"loss": 1.4901, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 9.5, |
|
"grad_norm": 3.6708433628082275, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 1.5384, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 9.51, |
|
"grad_norm": 3.6792004108428955, |
|
"learning_rate": 4.900000000000001e-07, |
|
"loss": 1.5062, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 9.52, |
|
"grad_norm": 3.6054112911224365, |
|
"learning_rate": 4.800000000000001e-07, |
|
"loss": 1.5037, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 9.53, |
|
"grad_norm": 3.4688820838928223, |
|
"learning_rate": 4.7000000000000005e-07, |
|
"loss": 1.4723, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 9.54, |
|
"grad_norm": 3.427682638168335, |
|
"learning_rate": 4.6000000000000004e-07, |
|
"loss": 1.4699, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 9.55, |
|
"grad_norm": 3.6832149028778076, |
|
"learning_rate": 4.5000000000000003e-07, |
|
"loss": 1.4784, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 9.56, |
|
"grad_norm": 3.405571222305298, |
|
"learning_rate": 4.4e-07, |
|
"loss": 1.4407, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 9.57, |
|
"grad_norm": 3.500774621963501, |
|
"learning_rate": 4.3e-07, |
|
"loss": 1.4638, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 9.58, |
|
"grad_norm": 3.620715618133545, |
|
"learning_rate": 4.2000000000000006e-07, |
|
"loss": 1.5111, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 9.59, |
|
"grad_norm": 3.919987916946411, |
|
"learning_rate": 4.1000000000000004e-07, |
|
"loss": 1.5272, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 9.6, |
|
"grad_norm": 3.4874136447906494, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 1.452, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 9.61, |
|
"grad_norm": 3.940392255783081, |
|
"learning_rate": 3.9e-07, |
|
"loss": 1.5017, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 9.62, |
|
"grad_norm": 3.5874853134155273, |
|
"learning_rate": 3.8e-07, |
|
"loss": 1.4985, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 9.63, |
|
"grad_norm": 3.5837271213531494, |
|
"learning_rate": 3.7e-07, |
|
"loss": 1.4875, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 9.64, |
|
"grad_norm": 3.666430950164795, |
|
"learning_rate": 3.6e-07, |
|
"loss": 1.5117, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 9.65, |
|
"grad_norm": 3.5945935249328613, |
|
"learning_rate": 3.5000000000000004e-07, |
|
"loss": 1.478, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 9.66, |
|
"grad_norm": 3.785447835922241, |
|
"learning_rate": 3.4000000000000003e-07, |
|
"loss": 1.5521, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 9.67, |
|
"grad_norm": 3.7975199222564697, |
|
"learning_rate": 3.3e-07, |
|
"loss": 1.5358, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 9.68, |
|
"grad_norm": 3.343806743621826, |
|
"learning_rate": 3.2e-07, |
|
"loss": 1.465, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 9.69, |
|
"grad_norm": 3.770555257797241, |
|
"learning_rate": 3.1000000000000005e-07, |
|
"loss": 1.4692, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 9.7, |
|
"grad_norm": 3.662292003631592, |
|
"learning_rate": 3.0000000000000004e-07, |
|
"loss": 1.5106, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 9.71, |
|
"grad_norm": 4.143115997314453, |
|
"learning_rate": 2.9000000000000003e-07, |
|
"loss": 1.4888, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 9.72, |
|
"grad_norm": 3.734050750732422, |
|
"learning_rate": 2.8e-07, |
|
"loss": 1.4891, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 9.73, |
|
"grad_norm": 3.8173704147338867, |
|
"learning_rate": 2.7e-07, |
|
"loss": 1.4368, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 9.74, |
|
"grad_norm": 3.636580228805542, |
|
"learning_rate": 2.6e-07, |
|
"loss": 1.4263, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 9.75, |
|
"grad_norm": 3.6115496158599854, |
|
"learning_rate": 2.5000000000000004e-07, |
|
"loss": 1.5288, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 9.76, |
|
"grad_norm": 3.625690221786499, |
|
"learning_rate": 2.4000000000000003e-07, |
|
"loss": 1.4836, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 9.77, |
|
"grad_norm": 3.6790812015533447, |
|
"learning_rate": 2.3000000000000002e-07, |
|
"loss": 1.4854, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 9.78, |
|
"grad_norm": 4.2709760665893555, |
|
"learning_rate": 2.2e-07, |
|
"loss": 1.5266, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 9.79, |
|
"grad_norm": 3.527794599533081, |
|
"learning_rate": 2.1000000000000003e-07, |
|
"loss": 1.4624, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 9.8, |
|
"grad_norm": 3.573599100112915, |
|
"learning_rate": 2.0000000000000002e-07, |
|
"loss": 1.4549, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 9.81, |
|
"grad_norm": 3.921391248703003, |
|
"learning_rate": 1.9e-07, |
|
"loss": 1.4461, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 9.82, |
|
"grad_norm": 3.568066120147705, |
|
"learning_rate": 1.8e-07, |
|
"loss": 1.4996, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 9.83, |
|
"grad_norm": 5.041318893432617, |
|
"learning_rate": 1.7000000000000001e-07, |
|
"loss": 1.5472, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 9.84, |
|
"grad_norm": 3.500187397003174, |
|
"learning_rate": 1.6e-07, |
|
"loss": 1.4739, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 9.85, |
|
"grad_norm": 4.885345935821533, |
|
"learning_rate": 1.5000000000000002e-07, |
|
"loss": 1.4583, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 9.86, |
|
"grad_norm": 3.598309278488159, |
|
"learning_rate": 1.4e-07, |
|
"loss": 1.5187, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 9.87, |
|
"grad_norm": 3.5442376136779785, |
|
"learning_rate": 1.3e-07, |
|
"loss": 1.4594, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 9.88, |
|
"grad_norm": 3.703713893890381, |
|
"learning_rate": 1.2000000000000002e-07, |
|
"loss": 1.4952, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 9.89, |
|
"grad_norm": 3.474971055984497, |
|
"learning_rate": 1.1e-07, |
|
"loss": 1.5144, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 9.9, |
|
"grad_norm": 4.749277591705322, |
|
"learning_rate": 1.0000000000000001e-07, |
|
"loss": 1.4724, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"grad_norm": 3.81951904296875, |
|
"learning_rate": 9e-08, |
|
"loss": 1.5222, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 9.92, |
|
"grad_norm": 3.663991928100586, |
|
"learning_rate": 8e-08, |
|
"loss": 1.4863, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 9.93, |
|
"grad_norm": 3.5149052143096924, |
|
"learning_rate": 7e-08, |
|
"loss": 1.45, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 9.94, |
|
"grad_norm": 3.5175232887268066, |
|
"learning_rate": 6.000000000000001e-08, |
|
"loss": 1.5463, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 9.95, |
|
"grad_norm": 3.833277463912964, |
|
"learning_rate": 5.0000000000000004e-08, |
|
"loss": 1.489, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 9.96, |
|
"grad_norm": 3.59645414352417, |
|
"learning_rate": 4e-08, |
|
"loss": 1.5371, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 9.97, |
|
"grad_norm": 3.5608158111572266, |
|
"learning_rate": 3.0000000000000004e-08, |
|
"loss": 1.4857, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 9.98, |
|
"grad_norm": 4.02113676071167, |
|
"learning_rate": 2e-08, |
|
"loss": 1.4749, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 9.99, |
|
"grad_norm": 5.692778587341309, |
|
"learning_rate": 1e-08, |
|
"loss": 1.5184, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"grad_norm": 3.5329225063323975, |
|
"learning_rate": 0.0, |
|
"loss": 1.5166, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_accuracy_Background": NaN, |
|
"eval_accuracy_Bag": 0.0040559450247998, |
|
"eval_accuracy_Belt": 0.0, |
|
"eval_accuracy_Dress": 0.004398894715680152, |
|
"eval_accuracy_Face": 0.6567155979053996, |
|
"eval_accuracy_Hair": 0.8342588756683847, |
|
"eval_accuracy_Hat": 0.0, |
|
"eval_accuracy_Left-arm": 0.04175189714459504, |
|
"eval_accuracy_Left-leg": 0.7527387721050387, |
|
"eval_accuracy_Left-shoe": 0.0, |
|
"eval_accuracy_Pants": 0.6638292295140126, |
|
"eval_accuracy_Right-arm": 0.29192017340608417, |
|
"eval_accuracy_Right-leg": 0.4466120192241486, |
|
"eval_accuracy_Right-shoe": 0.007042477932506931, |
|
"eval_accuracy_Scarf": 0.0, |
|
"eval_accuracy_Skirt": 0.35244410061038656, |
|
"eval_accuracy_Sunglasses": 0.0, |
|
"eval_accuracy_Upper-clothes": 0.9139391302834671, |
|
"eval_iou_Background": 0.0, |
|
"eval_iou_Bag": 0.004045981141789008, |
|
"eval_iou_Belt": 0.0, |
|
"eval_iou_Dress": 0.004388978559558063, |
|
"eval_iou_Face": 0.5668027472975276, |
|
"eval_iou_Hair": 0.6182097654866752, |
|
"eval_iou_Hat": 0.0, |
|
"eval_iou_Left-arm": 0.041412445871648905, |
|
"eval_iou_Left-leg": 0.4301887642165467, |
|
"eval_iou_Left-shoe": 0.0, |
|
"eval_iou_Pants": 0.4407660242581031, |
|
"eval_iou_Right-arm": 0.19617488443114675, |
|
"eval_iou_Right-leg": 0.33068482640043945, |
|
"eval_iou_Right-shoe": 0.00694378986395199, |
|
"eval_iou_Scarf": 0.0, |
|
"eval_iou_Skirt": 0.22994156724845466, |
|
"eval_iou_Sunglasses": 0.0, |
|
"eval_iou_Upper-clothes": 0.4633347642354916, |
|
"eval_loss": 1.4606529474258423, |
|
"eval_mean_accuracy": 0.2923357125608531, |
|
"eval_mean_iou": 0.18516080772285184, |
|
"eval_overall_accuracy": 0.5286778820561939, |
|
"eval_runtime": 21.1371, |
|
"eval_samples_per_second": 9.462, |
|
"eval_steps_per_second": 4.731, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"step": 1000, |
|
"total_flos": 1.4037886697472e+17, |
|
"train_loss": 1.804133994936943, |
|
"train_runtime": 966.4336, |
|
"train_samples_per_second": 8.278, |
|
"train_steps_per_second": 1.035 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 10, |
|
"save_steps": 20, |
|
"total_flos": 1.4037886697472e+17, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|