diff --git "a/mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.log" "b/mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.log" new file mode 100644--- /dev/null +++ "b/mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.log" @@ -0,0 +1,5154 @@ +2022-06-05 01:41:25,360 - mmseg - INFO - Environment info: +------------------------------------------------------------ +sys.platform: linux +Python: 3.7.11 (default, Jul 27 2021, 14:32:16) [GCC 7.5.0] +CUDA available: True +GPU 0,1,2,3,4,5,6,7: A100-SXM-80GB +CUDA_HOME: /mnt/lustre/share/cuda-11.1 +NVCC: Build cuda_11.1.TC455_06.29069683_0 +GCC: gcc (GCC) 7.3.0 +PyTorch: 1.9.0+cu111 +PyTorch compiling details: PyTorch built with: + - GCC 7.3 + - C++ Version: 201402 + - Intel(R) Math Kernel Library Version 2020.0.0 Product Build 20191122 for Intel(R) 64 architecture applications + - Intel(R) MKL-DNN v2.1.2 (Git Hash 98be7e8afa711dc9b66c8ff3504129cb82013cdb) + - OpenMP 201511 (a.k.a. OpenMP 4.5) + - NNPACK is enabled + - CPU capability usage: AVX2 + - CUDA Runtime 11.1 + - NVCC architecture flags: -gencode;arch=compute_37,code=sm_37;-gencode;arch=compute_50,code=sm_50;-gencode;arch=compute_60,code=sm_60;-gencode;arch=compute_70,code=sm_70;-gencode;arch=compute_75,code=sm_75;-gencode;arch=compute_80,code=sm_80;-gencode;arch=compute_86,code=sm_86 + - CuDNN 8.0.5 + - Magma 2.5.2 + - Build settings: BLAS_INFO=mkl, BUILD_TYPE=Release, CUDA_VERSION=11.1, CUDNN_VERSION=8.0.5, CXX_COMPILER=/opt/rh/devtoolset-7/root/usr/bin/c++, CXX_FLAGS= -Wno-deprecated -fvisibility-inlines-hidden -DUSE_PTHREADPOOL -fopenmp -DNDEBUG -DUSE_KINETO -DUSE_FBGEMM -DUSE_QNNPACK -DUSE_PYTORCH_QNNPACK -DUSE_XNNPACK -DSYMBOLICATE_MOBILE_DEBUG_HANDLE -O2 -fPIC -Wno-narrowing -Wall -Wextra -Werror=return-type -Wno-missing-field-initializers -Wno-type-limits -Wno-array-bounds -Wno-unknown-pragmas -Wno-sign-compare -Wno-unused-parameter -Wno-unused-variable -Wno-unused-function -Wno-unused-result -Wno-unused-local-typedefs -Wno-strict-overflow -Wno-strict-aliasing -Wno-error=deprecated-declarations -Wno-stringop-overflow -Wno-psabi -Wno-error=pedantic -Wno-error=redundant-decls -Wno-error=old-style-cast -fdiagnostics-color=always -faligned-new -Wno-unused-but-set-variable -Wno-maybe-uninitialized -fno-math-errno -fno-trapping-math -Werror=format -Wno-stringop-overflow, LAPACK_INFO=mkl, PERF_WITH_AVX=1, PERF_WITH_AVX2=1, PERF_WITH_AVX512=1, TORCH_VERSION=1.9.0, USE_CUDA=ON, USE_CUDNN=ON, USE_EXCEPTION_PTR=1, USE_GFLAGS=OFF, USE_GLOG=OFF, USE_MKL=ON, USE_MKLDNN=ON, USE_MPI=OFF, USE_NCCL=ON, USE_NNPACK=ON, USE_OPENMP=ON, + +TorchVision: 0.10.0+cu111 +OpenCV: 4.5.5 +MMCV: 1.4.2 +MMCV Compiler: GCC 7.3 +MMCV CUDA Compiler: 11.1 +MMSegmentation: 0.20.2+ +------------------------------------------------------------ + +2022-06-05 01:41:25,361 - mmseg - INFO - Distributed training: True +2022-06-05 01:41:25,977 - mmseg - INFO - Config: +num_things_classes = 29 +num_stuff_classes = 30 +num_classes = 59 +norm_cfg = dict(type='SyncBN', requires_grad=True) +model = dict( + type='EncoderDecoderMask2Former', + pretrained='pretrained/beit_base_patch16_224_pt22k_ft22k.pth', + backbone=dict( + type='BEiTAdapter', + patch_size=16, + embed_dim=768, + depth=12, + num_heads=12, + mlp_ratio=4, + qkv_bias=True, + use_abs_pos_emb=False, + use_rel_pos_bias=True, + img_size=480, + init_values=1e-06, + drop_path_rate=0.1, + conv_inplane=64, + n_points=4, + deform_num_heads=12, + cffn_ratio=0.25, + deform_ratio=0.5, + interaction_indexes=[[0, 2], [3, 5], [6, 8], [9, 11]]), + decode_head=dict( + type='Mask2FormerHead', + in_channels=[768, 768, 768, 768], + feat_channels=256, + out_channels=256, + in_index=[0, 1, 2, 3], + num_things_classes=29, + num_stuff_classes=30, + num_queries=100, + num_transformer_feat_level=3, + pixel_decoder=dict( + type='MSDeformAttnPixelDecoder', + num_outs=3, + norm_cfg=dict(type='GN', num_groups=32), + act_cfg=dict(type='ReLU'), + encoder=dict( + type='DetrTransformerEncoder', + num_layers=6, + transformerlayers=dict( + type='BaseTransformerLayer', + attn_cfgs=dict( + type='MultiScaleDeformableAttention', + embed_dims=256, + num_heads=8, + num_levels=3, + num_points=4, + im2col_step=64, + dropout=0.0, + batch_first=False, + norm_cfg=None, + init_cfg=None), + ffn_cfgs=dict( + type='FFN', + embed_dims=256, + feedforward_channels=1024, + num_fcs=2, + ffn_drop=0.0, + act_cfg=dict(type='ReLU', inplace=True)), + operation_order=('self_attn', 'norm', 'ffn', 'norm')), + init_cfg=None), + positional_encoding=dict( + type='SinePositionalEncoding', num_feats=128, normalize=True), + init_cfg=None), + enforce_decoder_input_project=False, + positional_encoding=dict( + type='SinePositionalEncoding', num_feats=128, normalize=True), + transformer_decoder=dict( + type='DetrTransformerDecoder', + return_intermediate=True, + num_layers=9, + transformerlayers=dict( + type='DetrTransformerDecoderLayer', + attn_cfgs=dict( + type='MultiheadAttention', + embed_dims=256, + num_heads=8, + attn_drop=0.0, + proj_drop=0.0, + dropout_layer=None, + batch_first=False), + ffn_cfgs=dict( + embed_dims=256, + feedforward_channels=2048, + num_fcs=2, + act_cfg=dict(type='ReLU', inplace=True), + ffn_drop=0.0, + dropout_layer=None, + add_identity=True), + feedforward_channels=2048, + operation_order=('cross_attn', 'norm', 'self_attn', 'norm', + 'ffn', 'norm')), + init_cfg=None), + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=2.0, + reduction='mean', + class_weight=[ + 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, + 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, + 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, + 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, + 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.1 + ]), + loss_mask=dict( + type='CrossEntropyLoss', + use_sigmoid=True, + reduction='mean', + loss_weight=5.0), + loss_dice=dict( + type='DiceLoss', + use_sigmoid=True, + activate=True, + reduction='mean', + naive_dice=True, + eps=1.0, + loss_weight=5.0)), + train_cfg=dict( + num_points=12544, + oversample_ratio=3.0, + importance_sample_ratio=0.75, + assigner=dict( + type='MaskHungarianAssigner', + cls_cost=dict(type='ClassificationCost', weight=2.0), + mask_cost=dict( + type='CrossEntropyLossCost', weight=5.0, use_sigmoid=True), + dice_cost=dict( + type='DiceCost', weight=5.0, pred_act=True, eps=1.0)), + sampler=dict(type='MaskPseudoSampler')), + test_cfg=dict( + panoptic_on=True, + semantic_on=False, + instance_on=True, + max_per_image=100, + iou_thr=0.8, + filter_low_score=True, + mode='slide', + crop_size=(480, 480), + stride=(320, 320)), + init_cfg=None) +find_unused_parameters = True +dataset_type = 'PascalContextDataset59' +data_root = 'data/VOCdevkit/VOC2010/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +img_scale = (520, 520) +crop_size = (480, 480) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', reduce_zero_label=True), + dict(type='Resize', img_scale=(520, 520), ratio_range=(0.5, 2.0)), + dict(type='RandomCrop', crop_size=(480, 480), cat_max_ratio=0.75), + dict(type='RandomFlip', prob=0.5), + dict(type='PhotoMetricDistortion'), + dict( + type='Normalize', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True), + dict(type='Pad', size=(480, 480), pad_val=0, seg_pad_val=255), + dict(type='ToMask'), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + keys=['img', 'gt_semantic_seg', 'gt_masks', 'gt_labels']) +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(4096, 520), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='ResizeToMultiple', size_divisor=32), + dict(type='RandomFlip'), + dict( + type='Normalize', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']) + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=4, + train=dict( + type='PascalContextDataset59', + data_root='data/VOCdevkit/VOC2010/', + img_dir='JPEGImages', + ann_dir='SegmentationClassContext', + split='ImageSets/SegmentationContext/train.txt', + pipeline=[ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', reduce_zero_label=True), + dict(type='Resize', img_scale=(520, 520), ratio_range=(0.5, 2.0)), + dict(type='RandomCrop', crop_size=(480, 480), cat_max_ratio=0.75), + dict(type='RandomFlip', prob=0.5), + dict(type='PhotoMetricDistortion'), + dict( + type='Normalize', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True), + dict(type='Pad', size=(480, 480), pad_val=0, seg_pad_val=255), + dict(type='ToMask'), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + keys=['img', 'gt_semantic_seg', 'gt_masks', 'gt_labels']) + ]), + val=dict( + type='PascalContextDataset59', + data_root='data/VOCdevkit/VOC2010/', + img_dir='JPEGImages', + ann_dir='SegmentationClassContext', + split='ImageSets/SegmentationContext/val.txt', + pipeline=[ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(4096, 520), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='ResizeToMultiple', size_divisor=32), + dict(type='RandomFlip'), + dict( + type='Normalize', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']) + ]) + ]), + test=dict( + type='PascalContextDataset59', + data_root='data/VOCdevkit/VOC2010/', + img_dir='JPEGImages', + ann_dir='SegmentationClassContext', + split='ImageSets/SegmentationContext/val.txt', + pipeline=[ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(4096, 520), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='ResizeToMultiple', size_divisor=32), + dict(type='RandomFlip'), + dict( + type='Normalize', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']) + ]) + ])) +log_config = dict( + interval=50, hooks=[dict(type='TextLoggerHook', by_epoch=False)]) +dist_params = dict(backend='nccl') +log_level = 'INFO' +load_from = None +resume_from = None +workflow = [('train', 1)] +cudnn_benchmark = True +optimizer = dict( + type='AdamW', + lr=3e-05, + betas=(0.9, 0.999), + weight_decay=0.05, + constructor='LayerDecayOptimizerConstructor', + paramwise_cfg=dict(num_layers=12, layer_decay_rate=0.9)) +optimizer_config = dict() +lr_config = dict( + policy='poly', + warmup='linear', + warmup_iters=1500, + warmup_ratio=1e-06, + power=1.0, + min_lr=0.0, + by_epoch=False) +runner = dict(type='IterBasedRunner', max_iters=40000) +checkpoint_config = dict(by_epoch=False, interval=1000, max_keep_ckpts=1) +evaluation = dict( + interval=4000, metric='mIoU', pre_eval=True, save_best='mIoU') +pretrained = 'pretrained/beit_base_patch16_224_pt22k_ft22k.pth' +work_dir = './work_dirs/mask2former_beit_adapter_base_480_40k_pascal_context_59_ss' +gpu_ids = range(0, 8) +auto_resume = False + +2022-06-05 01:41:34,614 - mmseg - INFO - Set random seed to 51259253, deterministic: False +2022-06-05 01:41:37,165 - mmseg - WARNING - The model and loaded state dict do not match exactly + +unexpected key in source state_dict: fc_norm.weight, fc_norm.bias, head.weight, head.bias + +missing keys in source state_dict: blocks.0.attn.relative_position_index, blocks.1.attn.relative_position_index, blocks.2.attn.relative_position_index, blocks.3.attn.relative_position_index, blocks.4.attn.relative_position_index, blocks.5.attn.relative_position_index, blocks.6.attn.relative_position_index, blocks.7.attn.relative_position_index, blocks.8.attn.relative_position_index, blocks.9.attn.relative_position_index, blocks.10.attn.relative_position_index, blocks.11.attn.relative_position_index + +Name of parameter - Initialization information + +backbone.cls_token - torch.Size([1, 1, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.level_embed - torch.Size([3, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.patch_embed.proj.weight - torch.Size([768, 3, 16, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.patch_embed.proj.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.gamma_1 - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.gamma_2 - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.norm1.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.norm1.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.attn.q_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.attn.v_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.attn.relative_position_bias_table - torch.Size([3484, 12]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.attn.qkv.weight - torch.Size([2304, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.attn.proj.weight - torch.Size([768, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.attn.proj.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.norm2.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.norm2.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.mlp.fc1.weight - torch.Size([3072, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.mlp.fc1.bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.mlp.fc2.weight - torch.Size([768, 3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.mlp.fc2.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.gamma_1 - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.gamma_2 - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.norm1.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.norm1.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.attn.q_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.attn.v_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.attn.relative_position_bias_table - torch.Size([3484, 12]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.attn.qkv.weight - torch.Size([2304, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.attn.proj.weight - torch.Size([768, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.attn.proj.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.norm2.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.norm2.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.mlp.fc1.weight - torch.Size([3072, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.mlp.fc1.bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.mlp.fc2.weight - torch.Size([768, 3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.mlp.fc2.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.gamma_1 - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.gamma_2 - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.norm1.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.norm1.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.attn.q_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.attn.v_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.attn.relative_position_bias_table - torch.Size([3484, 12]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.attn.qkv.weight - torch.Size([2304, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.attn.proj.weight - torch.Size([768, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.attn.proj.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.norm2.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.norm2.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.mlp.fc1.weight - torch.Size([3072, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.mlp.fc1.bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.mlp.fc2.weight - torch.Size([768, 3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.mlp.fc2.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.gamma_1 - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.gamma_2 - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.norm1.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.norm1.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.attn.q_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.attn.v_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.attn.relative_position_bias_table - torch.Size([3484, 12]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.attn.qkv.weight - torch.Size([2304, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.attn.proj.weight - torch.Size([768, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.attn.proj.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.norm2.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.norm2.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.mlp.fc1.weight - torch.Size([3072, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.mlp.fc1.bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.mlp.fc2.weight - torch.Size([768, 3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.mlp.fc2.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.gamma_1 - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.gamma_2 - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.norm1.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.norm1.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.attn.q_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.attn.v_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.attn.relative_position_bias_table - torch.Size([3484, 12]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.attn.qkv.weight - torch.Size([2304, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.attn.proj.weight - torch.Size([768, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.attn.proj.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.norm2.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.norm2.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.mlp.fc1.weight - torch.Size([3072, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.mlp.fc1.bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.mlp.fc2.weight - torch.Size([768, 3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.mlp.fc2.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.gamma_1 - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.gamma_2 - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.norm1.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.norm1.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.attn.q_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.attn.v_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.attn.relative_position_bias_table - torch.Size([3484, 12]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.attn.qkv.weight - torch.Size([2304, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.attn.proj.weight - torch.Size([768, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.attn.proj.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.norm2.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.norm2.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.mlp.fc1.weight - torch.Size([3072, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.mlp.fc1.bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.mlp.fc2.weight - torch.Size([768, 3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.mlp.fc2.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.gamma_1 - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.gamma_2 - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.norm1.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.norm1.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.attn.q_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.attn.v_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.attn.relative_position_bias_table - torch.Size([3484, 12]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.attn.qkv.weight - torch.Size([2304, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.attn.proj.weight - torch.Size([768, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.attn.proj.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.norm2.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.norm2.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.mlp.fc1.weight - torch.Size([3072, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.mlp.fc1.bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.mlp.fc2.weight - torch.Size([768, 3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.mlp.fc2.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.gamma_1 - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.gamma_2 - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.norm1.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.norm1.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.attn.q_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.attn.v_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.attn.relative_position_bias_table - torch.Size([3484, 12]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.attn.qkv.weight - torch.Size([2304, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.attn.proj.weight - torch.Size([768, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.attn.proj.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.norm2.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.norm2.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.mlp.fc1.weight - torch.Size([3072, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.mlp.fc1.bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.mlp.fc2.weight - torch.Size([768, 3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.mlp.fc2.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.gamma_1 - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.gamma_2 - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.norm1.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.norm1.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.attn.q_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.attn.v_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.attn.relative_position_bias_table - torch.Size([3484, 12]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.attn.qkv.weight - torch.Size([2304, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.attn.proj.weight - torch.Size([768, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.attn.proj.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.norm2.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.norm2.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.mlp.fc1.weight - torch.Size([3072, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.mlp.fc1.bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.mlp.fc2.weight - torch.Size([768, 3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.mlp.fc2.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.gamma_1 - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.gamma_2 - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.norm1.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.norm1.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.attn.q_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.attn.v_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.attn.relative_position_bias_table - torch.Size([3484, 12]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.attn.qkv.weight - torch.Size([2304, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.attn.proj.weight - torch.Size([768, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.attn.proj.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.norm2.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.norm2.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.mlp.fc1.weight - torch.Size([3072, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.mlp.fc1.bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.mlp.fc2.weight - torch.Size([768, 3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.mlp.fc2.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.gamma_1 - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.gamma_2 - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.norm1.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.norm1.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.attn.q_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.attn.v_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.attn.relative_position_bias_table - torch.Size([3484, 12]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.attn.qkv.weight - torch.Size([2304, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.attn.proj.weight - torch.Size([768, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.attn.proj.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.norm2.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.norm2.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.mlp.fc1.weight - torch.Size([3072, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.mlp.fc1.bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.mlp.fc2.weight - torch.Size([768, 3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.mlp.fc2.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.gamma_1 - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.gamma_2 - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.norm1.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.norm1.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.attn.q_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.attn.v_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.attn.relative_position_bias_table - torch.Size([3484, 12]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.attn.qkv.weight - torch.Size([2304, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.attn.proj.weight - torch.Size([768, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.attn.proj.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.norm2.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.norm2.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.mlp.fc1.weight - torch.Size([3072, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.mlp.fc1.bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.mlp.fc2.weight - torch.Size([768, 3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.mlp.fc2.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.spm.stem.0.weight - torch.Size([64, 3, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.spm.stem.1.weight - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.spm.stem.1.bias - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.spm.stem.3.weight - torch.Size([64, 64, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.spm.stem.4.weight - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.spm.stem.4.bias - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.spm.stem.6.weight - torch.Size([64, 64, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.spm.stem.7.weight - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.spm.stem.7.bias - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.spm.conv2.0.weight - torch.Size([128, 64, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.spm.conv2.1.weight - torch.Size([128]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.spm.conv2.1.bias - torch.Size([128]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.spm.conv3.0.weight - torch.Size([256, 128, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.spm.conv3.1.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.spm.conv3.1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.spm.conv4.0.weight - torch.Size([256, 256, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.spm.conv4.1.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.spm.conv4.1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.spm.fc1.weight - torch.Size([768, 64, 1, 1]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.spm.fc1.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.spm.fc2.weight - torch.Size([768, 128, 1, 1]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.spm.fc2.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.spm.fc3.weight - torch.Size([768, 256, 1, 1]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.spm.fc3.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.spm.fc4.weight - torch.Size([768, 256, 1, 1]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.spm.fc4.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.0.injector.gamma - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.0.injector.query_norm.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.0.injector.query_norm.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.0.injector.feat_norm.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.0.injector.feat_norm.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.0.injector.attn.sampling_offsets.weight - torch.Size([288, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.0.injector.attn.sampling_offsets.bias - torch.Size([288]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.0.injector.attn.attention_weights.weight - torch.Size([144, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.0.injector.attn.attention_weights.bias - torch.Size([144]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.0.injector.attn.value_proj.weight - torch.Size([384, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.0.injector.attn.value_proj.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.0.injector.attn.output_proj.weight - torch.Size([768, 384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.0.injector.attn.output_proj.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.0.extractor.query_norm.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.0.extractor.query_norm.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.0.extractor.feat_norm.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.0.extractor.feat_norm.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.0.extractor.attn.sampling_offsets.weight - torch.Size([96, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.0.extractor.attn.sampling_offsets.bias - torch.Size([96]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.0.extractor.attn.attention_weights.weight - torch.Size([48, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.0.extractor.attn.attention_weights.bias - torch.Size([48]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.0.extractor.attn.value_proj.weight - torch.Size([384, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.0.extractor.attn.value_proj.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.0.extractor.attn.output_proj.weight - torch.Size([768, 384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.0.extractor.attn.output_proj.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.0.extractor.ffn.fc1.weight - torch.Size([192, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.0.extractor.ffn.fc1.bias - torch.Size([192]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.0.extractor.ffn.dwconv.dwconv.weight - torch.Size([192, 1, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.0.extractor.ffn.dwconv.dwconv.bias - torch.Size([192]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.0.extractor.ffn.fc2.weight - torch.Size([768, 192]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.0.extractor.ffn.fc2.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.0.extractor.ffn_norm.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.0.extractor.ffn_norm.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.1.injector.gamma - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.1.injector.query_norm.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.1.injector.query_norm.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.1.injector.feat_norm.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.1.injector.feat_norm.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.1.injector.attn.sampling_offsets.weight - torch.Size([288, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.1.injector.attn.sampling_offsets.bias - torch.Size([288]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.1.injector.attn.attention_weights.weight - torch.Size([144, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.1.injector.attn.attention_weights.bias - torch.Size([144]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.1.injector.attn.value_proj.weight - torch.Size([384, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.1.injector.attn.value_proj.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.1.injector.attn.output_proj.weight - torch.Size([768, 384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.1.injector.attn.output_proj.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.1.extractor.query_norm.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.1.extractor.query_norm.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.1.extractor.feat_norm.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.1.extractor.feat_norm.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.1.extractor.attn.sampling_offsets.weight - torch.Size([96, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.1.extractor.attn.sampling_offsets.bias - torch.Size([96]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.1.extractor.attn.attention_weights.weight - torch.Size([48, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.1.extractor.attn.attention_weights.bias - torch.Size([48]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.1.extractor.attn.value_proj.weight - torch.Size([384, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.1.extractor.attn.value_proj.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.1.extractor.attn.output_proj.weight - torch.Size([768, 384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.1.extractor.attn.output_proj.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.1.extractor.ffn.fc1.weight - torch.Size([192, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.1.extractor.ffn.fc1.bias - torch.Size([192]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.1.extractor.ffn.dwconv.dwconv.weight - torch.Size([192, 1, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.1.extractor.ffn.dwconv.dwconv.bias - torch.Size([192]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.1.extractor.ffn.fc2.weight - torch.Size([768, 192]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.1.extractor.ffn.fc2.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.1.extractor.ffn_norm.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.1.extractor.ffn_norm.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.2.injector.gamma - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.2.injector.query_norm.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.2.injector.query_norm.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.2.injector.feat_norm.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.2.injector.feat_norm.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.2.injector.attn.sampling_offsets.weight - torch.Size([288, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.2.injector.attn.sampling_offsets.bias - torch.Size([288]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.2.injector.attn.attention_weights.weight - torch.Size([144, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.2.injector.attn.attention_weights.bias - torch.Size([144]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.2.injector.attn.value_proj.weight - torch.Size([384, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.2.injector.attn.value_proj.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.2.injector.attn.output_proj.weight - torch.Size([768, 384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.2.injector.attn.output_proj.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.2.extractor.query_norm.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.2.extractor.query_norm.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.2.extractor.feat_norm.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.2.extractor.feat_norm.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.2.extractor.attn.sampling_offsets.weight - torch.Size([96, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.2.extractor.attn.sampling_offsets.bias - torch.Size([96]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.2.extractor.attn.attention_weights.weight - torch.Size([48, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.2.extractor.attn.attention_weights.bias - torch.Size([48]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.2.extractor.attn.value_proj.weight - torch.Size([384, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.2.extractor.attn.value_proj.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.2.extractor.attn.output_proj.weight - torch.Size([768, 384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.2.extractor.attn.output_proj.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.2.extractor.ffn.fc1.weight - torch.Size([192, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.2.extractor.ffn.fc1.bias - torch.Size([192]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.2.extractor.ffn.dwconv.dwconv.weight - torch.Size([192, 1, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.2.extractor.ffn.dwconv.dwconv.bias - torch.Size([192]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.2.extractor.ffn.fc2.weight - torch.Size([768, 192]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.2.extractor.ffn.fc2.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.2.extractor.ffn_norm.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.2.extractor.ffn_norm.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.injector.gamma - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.injector.query_norm.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.injector.query_norm.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.injector.feat_norm.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.injector.feat_norm.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.injector.attn.sampling_offsets.weight - torch.Size([288, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.injector.attn.sampling_offsets.bias - torch.Size([288]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.injector.attn.attention_weights.weight - torch.Size([144, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.injector.attn.attention_weights.bias - torch.Size([144]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.injector.attn.value_proj.weight - torch.Size([384, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.injector.attn.value_proj.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.injector.attn.output_proj.weight - torch.Size([768, 384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.injector.attn.output_proj.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extractor.query_norm.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extractor.query_norm.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extractor.feat_norm.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extractor.feat_norm.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extractor.attn.sampling_offsets.weight - torch.Size([96, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extractor.attn.sampling_offsets.bias - torch.Size([96]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extractor.attn.attention_weights.weight - torch.Size([48, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extractor.attn.attention_weights.bias - torch.Size([48]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extractor.attn.value_proj.weight - torch.Size([384, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extractor.attn.value_proj.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extractor.attn.output_proj.weight - torch.Size([768, 384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extractor.attn.output_proj.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extractor.ffn.fc1.weight - torch.Size([192, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extractor.ffn.fc1.bias - torch.Size([192]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extractor.ffn.dwconv.dwconv.weight - torch.Size([192, 1, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extractor.ffn.dwconv.dwconv.bias - torch.Size([192]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extractor.ffn.fc2.weight - torch.Size([768, 192]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extractor.ffn.fc2.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extractor.ffn_norm.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extractor.ffn_norm.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.0.query_norm.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.0.query_norm.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.0.feat_norm.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.0.feat_norm.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.0.attn.sampling_offsets.weight - torch.Size([96, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.0.attn.sampling_offsets.bias - torch.Size([96]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.0.attn.attention_weights.weight - torch.Size([48, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.0.attn.attention_weights.bias - torch.Size([48]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.0.attn.value_proj.weight - torch.Size([384, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.0.attn.value_proj.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.0.attn.output_proj.weight - torch.Size([768, 384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.0.attn.output_proj.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.0.ffn.fc1.weight - torch.Size([192, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.0.ffn.fc1.bias - torch.Size([192]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.0.ffn.dwconv.dwconv.weight - torch.Size([192, 1, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.0.ffn.dwconv.dwconv.bias - torch.Size([192]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.0.ffn.fc2.weight - torch.Size([768, 192]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.0.ffn.fc2.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.0.ffn_norm.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.0.ffn_norm.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.1.query_norm.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.1.query_norm.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.1.feat_norm.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.1.feat_norm.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.1.attn.sampling_offsets.weight - torch.Size([96, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.1.attn.sampling_offsets.bias - torch.Size([96]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.1.attn.attention_weights.weight - torch.Size([48, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.1.attn.attention_weights.bias - torch.Size([48]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.1.attn.value_proj.weight - torch.Size([384, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.1.attn.value_proj.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.1.attn.output_proj.weight - torch.Size([768, 384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.1.attn.output_proj.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.1.ffn.fc1.weight - torch.Size([192, 768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.1.ffn.fc1.bias - torch.Size([192]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.1.ffn.dwconv.dwconv.weight - torch.Size([192, 1, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.1.ffn.dwconv.dwconv.bias - torch.Size([192]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.1.ffn.fc2.weight - torch.Size([768, 192]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.1.ffn.fc2.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.1.ffn_norm.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interactions.3.extra_extractors.1.ffn_norm.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.up.weight - torch.Size([768, 768, 2, 2]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.up.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.norm1.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.norm1.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.norm2.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.norm2.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.norm3.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.norm3.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.norm4.weight - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.norm4.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.conv_seg.weight - torch.Size([59, 256, 1, 1]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.conv_seg.bias - torch.Size([59]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.input_convs.0.conv.weight - torch.Size([256, 768, 1, 1]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.input_convs.0.conv.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.input_convs.0.gn.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.input_convs.0.gn.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.input_convs.1.conv.weight - torch.Size([256, 768, 1, 1]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.input_convs.1.conv.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.input_convs.1.gn.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.input_convs.1.gn.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.input_convs.2.conv.weight - torch.Size([256, 768, 1, 1]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.input_convs.2.conv.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.input_convs.2.gn.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.input_convs.2.gn.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.attentions.0.sampling_offsets.weight - torch.Size([192, 256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.attentions.0.sampling_offsets.bias - torch.Size([192]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.attentions.0.attention_weights.weight - torch.Size([96, 256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.attentions.0.attention_weights.bias - torch.Size([96]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.attentions.0.value_proj.weight - torch.Size([256, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.0.attentions.0.value_proj.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.attentions.0.output_proj.weight - torch.Size([256, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.0.attentions.0.output_proj.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.ffns.0.layers.0.0.weight - torch.Size([1024, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.0.ffns.0.layers.0.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.ffns.0.layers.1.weight - torch.Size([256, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.0.ffns.0.layers.1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.norms.0.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.norms.0.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.norms.1.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.norms.1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.attentions.0.sampling_offsets.weight - torch.Size([192, 256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.attentions.0.sampling_offsets.bias - torch.Size([192]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.attentions.0.attention_weights.weight - torch.Size([96, 256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.attentions.0.attention_weights.bias - torch.Size([96]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.attentions.0.value_proj.weight - torch.Size([256, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.1.attentions.0.value_proj.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.attentions.0.output_proj.weight - torch.Size([256, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.1.attentions.0.output_proj.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.ffns.0.layers.0.0.weight - torch.Size([1024, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.1.ffns.0.layers.0.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.ffns.0.layers.1.weight - torch.Size([256, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.1.ffns.0.layers.1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.norms.0.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.norms.0.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.norms.1.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.norms.1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.attentions.0.sampling_offsets.weight - torch.Size([192, 256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.attentions.0.sampling_offsets.bias - torch.Size([192]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.attentions.0.attention_weights.weight - torch.Size([96, 256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.attentions.0.attention_weights.bias - torch.Size([96]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.attentions.0.value_proj.weight - torch.Size([256, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.2.attentions.0.value_proj.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.attentions.0.output_proj.weight - torch.Size([256, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.2.attentions.0.output_proj.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.ffns.0.layers.0.0.weight - torch.Size([1024, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.2.ffns.0.layers.0.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.ffns.0.layers.1.weight - torch.Size([256, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.2.ffns.0.layers.1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.norms.0.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.norms.0.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.norms.1.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.norms.1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.attentions.0.sampling_offsets.weight - torch.Size([192, 256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.attentions.0.sampling_offsets.bias - torch.Size([192]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.attentions.0.attention_weights.weight - torch.Size([96, 256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.attentions.0.attention_weights.bias - torch.Size([96]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.attentions.0.value_proj.weight - torch.Size([256, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.3.attentions.0.value_proj.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.attentions.0.output_proj.weight - torch.Size([256, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.3.attentions.0.output_proj.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.ffns.0.layers.0.0.weight - torch.Size([1024, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.3.ffns.0.layers.0.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.ffns.0.layers.1.weight - torch.Size([256, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.3.ffns.0.layers.1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.norms.0.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.norms.0.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.norms.1.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.norms.1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.attentions.0.sampling_offsets.weight - torch.Size([192, 256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.attentions.0.sampling_offsets.bias - torch.Size([192]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.attentions.0.attention_weights.weight - torch.Size([96, 256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.attentions.0.attention_weights.bias - torch.Size([96]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.attentions.0.value_proj.weight - torch.Size([256, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.4.attentions.0.value_proj.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.attentions.0.output_proj.weight - torch.Size([256, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.4.attentions.0.output_proj.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.ffns.0.layers.0.0.weight - torch.Size([1024, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.4.ffns.0.layers.0.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.ffns.0.layers.1.weight - torch.Size([256, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.4.ffns.0.layers.1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.norms.0.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.norms.0.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.norms.1.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.norms.1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.attentions.0.sampling_offsets.weight - torch.Size([192, 256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.attentions.0.sampling_offsets.bias - torch.Size([192]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.attentions.0.attention_weights.weight - torch.Size([96, 256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.attentions.0.attention_weights.bias - torch.Size([96]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.attentions.0.value_proj.weight - torch.Size([256, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.5.attentions.0.value_proj.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.attentions.0.output_proj.weight - torch.Size([256, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.5.attentions.0.output_proj.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.ffns.0.layers.0.0.weight - torch.Size([1024, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.5.ffns.0.layers.0.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.ffns.0.layers.1.weight - torch.Size([256, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.5.ffns.0.layers.1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.norms.0.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.norms.0.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.norms.1.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.norms.1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.level_encoding.weight - torch.Size([3, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.lateral_convs.0.conv.weight - torch.Size([256, 768, 1, 1]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.lateral_convs.0.gn.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.lateral_convs.0.gn.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.output_convs.0.conv.weight - torch.Size([256, 256, 3, 3]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.output_convs.0.gn.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.output_convs.0.gn.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.mask_feature.weight - torch.Size([256, 256, 1, 1]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.mask_feature.bias - torch.Size([256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.0.attentions.0.attn.in_proj_weight - torch.Size([768, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.0.attentions.0.attn.in_proj_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.attentions.0.attn.out_proj.weight - torch.Size([256, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.0.attentions.0.attn.out_proj.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.attentions.1.attn.in_proj_weight - torch.Size([768, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.0.attentions.1.attn.in_proj_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.attentions.1.attn.out_proj.weight - torch.Size([256, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.0.attentions.1.attn.out_proj.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.ffns.0.layers.0.0.weight - torch.Size([2048, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.0.ffns.0.layers.0.0.bias - torch.Size([2048]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.ffns.0.layers.1.weight - torch.Size([256, 2048]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.0.ffns.0.layers.1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.norms.0.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.norms.0.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.norms.1.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.norms.1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.norms.2.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.norms.2.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.attentions.0.attn.in_proj_weight - torch.Size([768, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.1.attentions.0.attn.in_proj_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.attentions.0.attn.out_proj.weight - torch.Size([256, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.1.attentions.0.attn.out_proj.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.attentions.1.attn.in_proj_weight - torch.Size([768, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.1.attentions.1.attn.in_proj_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.attentions.1.attn.out_proj.weight - torch.Size([256, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.1.attentions.1.attn.out_proj.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.ffns.0.layers.0.0.weight - torch.Size([2048, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.1.ffns.0.layers.0.0.bias - torch.Size([2048]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.ffns.0.layers.1.weight - torch.Size([256, 2048]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.1.ffns.0.layers.1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.norms.0.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.norms.0.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.norms.1.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.norms.1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.norms.2.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.norms.2.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.attentions.0.attn.in_proj_weight - torch.Size([768, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.2.attentions.0.attn.in_proj_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.attentions.0.attn.out_proj.weight - torch.Size([256, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.2.attentions.0.attn.out_proj.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.attentions.1.attn.in_proj_weight - torch.Size([768, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.2.attentions.1.attn.in_proj_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.attentions.1.attn.out_proj.weight - torch.Size([256, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.2.attentions.1.attn.out_proj.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.ffns.0.layers.0.0.weight - torch.Size([2048, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.2.ffns.0.layers.0.0.bias - torch.Size([2048]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.ffns.0.layers.1.weight - torch.Size([256, 2048]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.2.ffns.0.layers.1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.norms.0.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.norms.0.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.norms.1.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.norms.1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.norms.2.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.norms.2.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.attentions.0.attn.in_proj_weight - torch.Size([768, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.3.attentions.0.attn.in_proj_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.attentions.0.attn.out_proj.weight - torch.Size([256, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.3.attentions.0.attn.out_proj.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.attentions.1.attn.in_proj_weight - torch.Size([768, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.3.attentions.1.attn.in_proj_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.attentions.1.attn.out_proj.weight - torch.Size([256, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.3.attentions.1.attn.out_proj.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.ffns.0.layers.0.0.weight - torch.Size([2048, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.3.ffns.0.layers.0.0.bias - torch.Size([2048]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.ffns.0.layers.1.weight - torch.Size([256, 2048]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.3.ffns.0.layers.1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.norms.0.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.norms.0.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.norms.1.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.norms.1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.norms.2.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.norms.2.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.attentions.0.attn.in_proj_weight - torch.Size([768, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.4.attentions.0.attn.in_proj_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.attentions.0.attn.out_proj.weight - torch.Size([256, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.4.attentions.0.attn.out_proj.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.attentions.1.attn.in_proj_weight - torch.Size([768, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.4.attentions.1.attn.in_proj_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.attentions.1.attn.out_proj.weight - torch.Size([256, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.4.attentions.1.attn.out_proj.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.ffns.0.layers.0.0.weight - torch.Size([2048, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.4.ffns.0.layers.0.0.bias - torch.Size([2048]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.ffns.0.layers.1.weight - torch.Size([256, 2048]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.4.ffns.0.layers.1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.norms.0.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.norms.0.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.norms.1.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.norms.1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.norms.2.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.norms.2.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.attentions.0.attn.in_proj_weight - torch.Size([768, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.5.attentions.0.attn.in_proj_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.attentions.0.attn.out_proj.weight - torch.Size([256, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.5.attentions.0.attn.out_proj.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.attentions.1.attn.in_proj_weight - torch.Size([768, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.5.attentions.1.attn.in_proj_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.attentions.1.attn.out_proj.weight - torch.Size([256, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.5.attentions.1.attn.out_proj.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.ffns.0.layers.0.0.weight - torch.Size([2048, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.5.ffns.0.layers.0.0.bias - torch.Size([2048]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.ffns.0.layers.1.weight - torch.Size([256, 2048]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.5.ffns.0.layers.1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.norms.0.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.norms.0.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.norms.1.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.norms.1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.norms.2.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.norms.2.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.attentions.0.attn.in_proj_weight - torch.Size([768, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.6.attentions.0.attn.in_proj_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.attentions.0.attn.out_proj.weight - torch.Size([256, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.6.attentions.0.attn.out_proj.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.attentions.1.attn.in_proj_weight - torch.Size([768, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.6.attentions.1.attn.in_proj_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.attentions.1.attn.out_proj.weight - torch.Size([256, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.6.attentions.1.attn.out_proj.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.ffns.0.layers.0.0.weight - torch.Size([2048, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.6.ffns.0.layers.0.0.bias - torch.Size([2048]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.ffns.0.layers.1.weight - torch.Size([256, 2048]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.6.ffns.0.layers.1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.norms.0.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.norms.0.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.norms.1.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.norms.1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.norms.2.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.norms.2.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.attentions.0.attn.in_proj_weight - torch.Size([768, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.7.attentions.0.attn.in_proj_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.attentions.0.attn.out_proj.weight - torch.Size([256, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.7.attentions.0.attn.out_proj.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.attentions.1.attn.in_proj_weight - torch.Size([768, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.7.attentions.1.attn.in_proj_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.attentions.1.attn.out_proj.weight - torch.Size([256, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.7.attentions.1.attn.out_proj.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.ffns.0.layers.0.0.weight - torch.Size([2048, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.7.ffns.0.layers.0.0.bias - torch.Size([2048]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.ffns.0.layers.1.weight - torch.Size([256, 2048]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.7.ffns.0.layers.1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.norms.0.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.norms.0.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.norms.1.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.norms.1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.norms.2.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.norms.2.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.attentions.0.attn.in_proj_weight - torch.Size([768, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.8.attentions.0.attn.in_proj_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.attentions.0.attn.out_proj.weight - torch.Size([256, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.8.attentions.0.attn.out_proj.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.attentions.1.attn.in_proj_weight - torch.Size([768, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.8.attentions.1.attn.in_proj_bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.attentions.1.attn.out_proj.weight - torch.Size([256, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.8.attentions.1.attn.out_proj.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.ffns.0.layers.0.0.weight - torch.Size([2048, 256]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.8.ffns.0.layers.0.0.bias - torch.Size([2048]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.ffns.0.layers.1.weight - torch.Size([256, 2048]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.8.ffns.0.layers.1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.norms.0.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.norms.0.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.norms.1.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.norms.1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.norms.2.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.norms.2.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.post_norm.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.post_norm.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.query_embed.weight - torch.Size([100, 256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.query_feat.weight - torch.Size([100, 256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.level_embed.weight - torch.Size([3, 256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.cls_embed.weight - torch.Size([60, 256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.cls_embed.bias - torch.Size([60]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.mask_embed.0.weight - torch.Size([256, 256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.mask_embed.0.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.mask_embed.2.weight - torch.Size([256, 256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.mask_embed.2.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.mask_embed.4.weight - torch.Size([256, 256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.mask_embed.4.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former +2022-06-05 01:41:37,743 - mmseg - INFO - EncoderDecoderMask2Former( + (backbone): BEiTAdapter( + (patch_embed): PatchEmbed( + (proj): Conv2d(3, 768, kernel_size=(16, 16), stride=(16, 16)) + ) + (pos_drop): Dropout(p=0.0, inplace=False) + (blocks): ModuleList( + (0): Block( + (norm1): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=768, out_features=2304, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=768, out_features=768, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): Identity() + (norm2): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=768, out_features=3072, bias=True) + (act): GELU() + (fc2): Linear(in_features=3072, out_features=768, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (1): Block( + (norm1): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=768, out_features=2304, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=768, out_features=768, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.00909090880304575) + (norm2): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=768, out_features=3072, bias=True) + (act): GELU() + (fc2): Linear(in_features=3072, out_features=768, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (2): Block( + (norm1): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=768, out_features=2304, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=768, out_features=768, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.0181818176060915) + (norm2): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=768, out_features=3072, bias=True) + (act): GELU() + (fc2): Linear(in_features=3072, out_features=768, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (3): Block( + (norm1): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=768, out_features=2304, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=768, out_features=768, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.027272727340459824) + (norm2): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=768, out_features=3072, bias=True) + (act): GELU() + (fc2): Linear(in_features=3072, out_features=768, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (4): Block( + (norm1): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=768, out_features=2304, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=768, out_features=768, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.036363635212183) + (norm2): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=768, out_features=3072, bias=True) + (act): GELU() + (fc2): Linear(in_features=3072, out_features=768, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (5): Block( + (norm1): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=768, out_features=2304, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=768, out_features=768, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.045454543083906174) + (norm2): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=768, out_features=3072, bias=True) + (act): GELU() + (fc2): Linear(in_features=3072, out_features=768, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (6): Block( + (norm1): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=768, out_features=2304, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=768, out_features=768, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.054545458406209946) + (norm2): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=768, out_features=3072, bias=True) + (act): GELU() + (fc2): Linear(in_features=3072, out_features=768, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (7): Block( + (norm1): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=768, out_features=2304, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=768, out_features=768, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.06363636255264282) + (norm2): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=768, out_features=3072, bias=True) + (act): GELU() + (fc2): Linear(in_features=3072, out_features=768, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (8): Block( + (norm1): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=768, out_features=2304, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=768, out_features=768, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.0727272778749466) + (norm2): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=768, out_features=3072, bias=True) + (act): GELU() + (fc2): Linear(in_features=3072, out_features=768, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (9): Block( + (norm1): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=768, out_features=2304, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=768, out_features=768, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.08181818574666977) + (norm2): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=768, out_features=3072, bias=True) + (act): GELU() + (fc2): Linear(in_features=3072, out_features=768, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (10): Block( + (norm1): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=768, out_features=2304, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=768, out_features=768, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.09090909361839294) + (norm2): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=768, out_features=3072, bias=True) + (act): GELU() + (fc2): Linear(in_features=3072, out_features=768, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (11): Block( + (norm1): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=768, out_features=2304, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=768, out_features=768, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.10000000149011612) + (norm2): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=768, out_features=3072, bias=True) + (act): GELU() + (fc2): Linear(in_features=3072, out_features=768, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + ) + (spm): SpatialPriorModule( + (stem): Sequential( + (0): Conv2d(3, 64, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False) + (1): SyncBatchNorm(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (2): ReLU(inplace=True) + (3): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (4): SyncBatchNorm(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (5): ReLU(inplace=True) + (6): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (7): SyncBatchNorm(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (8): ReLU(inplace=True) + (9): MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=1, ceil_mode=False) + ) + (conv2): Sequential( + (0): Conv2d(64, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False) + (1): SyncBatchNorm(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (2): ReLU(inplace=True) + ) + (conv3): Sequential( + (0): Conv2d(128, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False) + (1): SyncBatchNorm(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (2): ReLU(inplace=True) + ) + (conv4): Sequential( + (0): Conv2d(256, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False) + (1): SyncBatchNorm(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (2): ReLU(inplace=True) + ) + (fc1): Conv2d(64, 768, kernel_size=(1, 1), stride=(1, 1)) + (fc2): Conv2d(128, 768, kernel_size=(1, 1), stride=(1, 1)) + (fc3): Conv2d(256, 768, kernel_size=(1, 1), stride=(1, 1)) + (fc4): Conv2d(256, 768, kernel_size=(1, 1), stride=(1, 1)) + ) + (interactions): Sequential( + (0): InteractionBlockWithCls( + (injector): Injector( + (query_norm): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=768, out_features=288, bias=True) + (attention_weights): Linear(in_features=768, out_features=144, bias=True) + (value_proj): Linear(in_features=768, out_features=384, bias=True) + (output_proj): Linear(in_features=384, out_features=768, bias=True) + ) + ) + (extractor): Extractor( + (query_norm): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=768, out_features=96, bias=True) + (attention_weights): Linear(in_features=768, out_features=48, bias=True) + (value_proj): Linear(in_features=768, out_features=384, bias=True) + (output_proj): Linear(in_features=384, out_features=768, bias=True) + ) + (ffn): ConvFFN( + (fc1): Linear(in_features=768, out_features=192, bias=True) + (dwconv): DWConv( + (dwconv): Conv2d(192, 192, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=192) + ) + (act): GELU() + (fc2): Linear(in_features=192, out_features=768, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + (ffn_norm): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (drop_path): DropPath() + ) + ) + (1): InteractionBlockWithCls( + (injector): Injector( + (query_norm): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=768, out_features=288, bias=True) + (attention_weights): Linear(in_features=768, out_features=144, bias=True) + (value_proj): Linear(in_features=768, out_features=384, bias=True) + (output_proj): Linear(in_features=384, out_features=768, bias=True) + ) + ) + (extractor): Extractor( + (query_norm): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=768, out_features=96, bias=True) + (attention_weights): Linear(in_features=768, out_features=48, bias=True) + (value_proj): Linear(in_features=768, out_features=384, bias=True) + (output_proj): Linear(in_features=384, out_features=768, bias=True) + ) + (ffn): ConvFFN( + (fc1): Linear(in_features=768, out_features=192, bias=True) + (dwconv): DWConv( + (dwconv): Conv2d(192, 192, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=192) + ) + (act): GELU() + (fc2): Linear(in_features=192, out_features=768, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + (ffn_norm): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (drop_path): DropPath() + ) + ) + (2): InteractionBlockWithCls( + (injector): Injector( + (query_norm): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=768, out_features=288, bias=True) + (attention_weights): Linear(in_features=768, out_features=144, bias=True) + (value_proj): Linear(in_features=768, out_features=384, bias=True) + (output_proj): Linear(in_features=384, out_features=768, bias=True) + ) + ) + (extractor): Extractor( + (query_norm): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=768, out_features=96, bias=True) + (attention_weights): Linear(in_features=768, out_features=48, bias=True) + (value_proj): Linear(in_features=768, out_features=384, bias=True) + (output_proj): Linear(in_features=384, out_features=768, bias=True) + ) + (ffn): ConvFFN( + (fc1): Linear(in_features=768, out_features=192, bias=True) + (dwconv): DWConv( + (dwconv): Conv2d(192, 192, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=192) + ) + (act): GELU() + (fc2): Linear(in_features=192, out_features=768, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + (ffn_norm): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (drop_path): DropPath() + ) + ) + (3): InteractionBlockWithCls( + (injector): Injector( + (query_norm): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=768, out_features=288, bias=True) + (attention_weights): Linear(in_features=768, out_features=144, bias=True) + (value_proj): Linear(in_features=768, out_features=384, bias=True) + (output_proj): Linear(in_features=384, out_features=768, bias=True) + ) + ) + (extractor): Extractor( + (query_norm): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=768, out_features=96, bias=True) + (attention_weights): Linear(in_features=768, out_features=48, bias=True) + (value_proj): Linear(in_features=768, out_features=384, bias=True) + (output_proj): Linear(in_features=384, out_features=768, bias=True) + ) + (ffn): ConvFFN( + (fc1): Linear(in_features=768, out_features=192, bias=True) + (dwconv): DWConv( + (dwconv): Conv2d(192, 192, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=192) + ) + (act): GELU() + (fc2): Linear(in_features=192, out_features=768, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + (ffn_norm): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (drop_path): DropPath() + ) + (extra_extractors): Sequential( + (0): Extractor( + (query_norm): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=768, out_features=96, bias=True) + (attention_weights): Linear(in_features=768, out_features=48, bias=True) + (value_proj): Linear(in_features=768, out_features=384, bias=True) + (output_proj): Linear(in_features=384, out_features=768, bias=True) + ) + (ffn): ConvFFN( + (fc1): Linear(in_features=768, out_features=192, bias=True) + (dwconv): DWConv( + (dwconv): Conv2d(192, 192, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=192) + ) + (act): GELU() + (fc2): Linear(in_features=192, out_features=768, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + (ffn_norm): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (drop_path): Identity() + ) + (1): Extractor( + (query_norm): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=768, out_features=96, bias=True) + (attention_weights): Linear(in_features=768, out_features=48, bias=True) + (value_proj): Linear(in_features=768, out_features=384, bias=True) + (output_proj): Linear(in_features=384, out_features=768, bias=True) + ) + (ffn): ConvFFN( + (fc1): Linear(in_features=768, out_features=192, bias=True) + (dwconv): DWConv( + (dwconv): Conv2d(192, 192, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=192) + ) + (act): GELU() + (fc2): Linear(in_features=192, out_features=768, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + (ffn_norm): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (drop_path): Identity() + ) + ) + ) + ) + (up): ConvTranspose2d(768, 768, kernel_size=(2, 2), stride=(2, 2)) + (norm1): SyncBatchNorm(768, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (norm2): SyncBatchNorm(768, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (norm3): SyncBatchNorm(768, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (norm4): SyncBatchNorm(768, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + ) + (decode_head): Mask2FormerHead( + input_transform=multiple_select, ignore_index=255, align_corners=False + (loss_decode): CrossEntropyLoss(avg_non_ignore=False) + (conv_seg): Conv2d(256, 59, kernel_size=(1, 1), stride=(1, 1)) + (dropout): Dropout2d(p=0.1, inplace=False) + (pixel_decoder): MSDeformAttnPixelDecoder( + (input_convs): ModuleList( + (0): ConvModule( + (conv): Conv2d(768, 256, kernel_size=(1, 1), stride=(1, 1)) + (gn): GroupNorm(32, 256, eps=1e-05, affine=True) + ) + (1): ConvModule( + (conv): Conv2d(768, 256, kernel_size=(1, 1), stride=(1, 1)) + (gn): GroupNorm(32, 256, eps=1e-05, affine=True) + ) + (2): ConvModule( + (conv): Conv2d(768, 256, kernel_size=(1, 1), stride=(1, 1)) + (gn): GroupNorm(32, 256, eps=1e-05, affine=True) + ) + ) + (encoder): DetrTransformerEncoder( + (layers): ModuleList( + (0): BaseTransformerLayer( + (attentions): ModuleList( + (0): MultiScaleDeformableAttention( + (dropout): Dropout(p=0.0, inplace=False) + (sampling_offsets): Linear(in_features=256, out_features=192, bias=True) + (attention_weights): Linear(in_features=256, out_features=96, bias=True) + (value_proj): Linear(in_features=256, out_features=256, bias=True) + (output_proj): Linear(in_features=256, out_features=256, bias=True) + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=256, out_features=1024, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=1024, out_features=256, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + ) + ) + (1): BaseTransformerLayer( + (attentions): ModuleList( + (0): MultiScaleDeformableAttention( + (dropout): Dropout(p=0.0, inplace=False) + (sampling_offsets): Linear(in_features=256, out_features=192, bias=True) + (attention_weights): Linear(in_features=256, out_features=96, bias=True) + (value_proj): Linear(in_features=256, out_features=256, bias=True) + (output_proj): Linear(in_features=256, out_features=256, bias=True) + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=256, out_features=1024, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=1024, out_features=256, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + ) + ) + (2): BaseTransformerLayer( + (attentions): ModuleList( + (0): MultiScaleDeformableAttention( + (dropout): Dropout(p=0.0, inplace=False) + (sampling_offsets): Linear(in_features=256, out_features=192, bias=True) + (attention_weights): Linear(in_features=256, out_features=96, bias=True) + (value_proj): Linear(in_features=256, out_features=256, bias=True) + (output_proj): Linear(in_features=256, out_features=256, bias=True) + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=256, out_features=1024, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=1024, out_features=256, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + ) + ) + (3): BaseTransformerLayer( + (attentions): ModuleList( + (0): MultiScaleDeformableAttention( + (dropout): Dropout(p=0.0, inplace=False) + (sampling_offsets): Linear(in_features=256, out_features=192, bias=True) + (attention_weights): Linear(in_features=256, out_features=96, bias=True) + (value_proj): Linear(in_features=256, out_features=256, bias=True) + (output_proj): Linear(in_features=256, out_features=256, bias=True) + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=256, out_features=1024, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=1024, out_features=256, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + ) + ) + (4): BaseTransformerLayer( + (attentions): ModuleList( + (0): MultiScaleDeformableAttention( + (dropout): Dropout(p=0.0, inplace=False) + (sampling_offsets): Linear(in_features=256, out_features=192, bias=True) + (attention_weights): Linear(in_features=256, out_features=96, bias=True) + (value_proj): Linear(in_features=256, out_features=256, bias=True) + (output_proj): Linear(in_features=256, out_features=256, bias=True) + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=256, out_features=1024, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=1024, out_features=256, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + ) + ) + (5): BaseTransformerLayer( + (attentions): ModuleList( + (0): MultiScaleDeformableAttention( + (dropout): Dropout(p=0.0, inplace=False) + (sampling_offsets): Linear(in_features=256, out_features=192, bias=True) + (attention_weights): Linear(in_features=256, out_features=96, bias=True) + (value_proj): Linear(in_features=256, out_features=256, bias=True) + (output_proj): Linear(in_features=256, out_features=256, bias=True) + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=256, out_features=1024, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=1024, out_features=256, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + ) + ) + ) + ) + (postional_encoding): SinePositionalEncoding(num_feats=128, temperature=10000, normalize=True, scale=6.283185307179586, eps=1e-06) + (level_encoding): Embedding(3, 256) + (lateral_convs): ModuleList( + (0): ConvModule( + (conv): Conv2d(768, 256, kernel_size=(1, 1), stride=(1, 1), bias=False) + (gn): GroupNorm(32, 256, eps=1e-05, affine=True) + ) + ) + (output_convs): ModuleList( + (0): ConvModule( + (conv): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (gn): GroupNorm(32, 256, eps=1e-05, affine=True) + (activate): ReLU(inplace=True) + ) + ) + (mask_feature): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1)) + ) + (transformer_decoder): DetrTransformerDecoder( + (layers): ModuleList( + (0): DetrTransformerDecoderLayer( + (attentions): ModuleList( + (0): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=256, out_features=256, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + (1): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=256, out_features=256, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=256, out_features=2048, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=2048, out_features=256, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + (2): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + ) + ) + (1): DetrTransformerDecoderLayer( + (attentions): ModuleList( + (0): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=256, out_features=256, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + (1): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=256, out_features=256, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=256, out_features=2048, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=2048, out_features=256, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + (2): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + ) + ) + (2): DetrTransformerDecoderLayer( + (attentions): ModuleList( + (0): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=256, out_features=256, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + (1): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=256, out_features=256, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=256, out_features=2048, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=2048, out_features=256, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + (2): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + ) + ) + (3): DetrTransformerDecoderLayer( + (attentions): ModuleList( + (0): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=256, out_features=256, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + (1): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=256, out_features=256, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=256, out_features=2048, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=2048, out_features=256, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + (2): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + ) + ) + (4): DetrTransformerDecoderLayer( + (attentions): ModuleList( + (0): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=256, out_features=256, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + (1): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=256, out_features=256, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=256, out_features=2048, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=2048, out_features=256, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + (2): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + ) + ) + (5): DetrTransformerDecoderLayer( + (attentions): ModuleList( + (0): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=256, out_features=256, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + (1): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=256, out_features=256, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=256, out_features=2048, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=2048, out_features=256, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + (2): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + ) + ) + (6): DetrTransformerDecoderLayer( + (attentions): ModuleList( + (0): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=256, out_features=256, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + (1): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=256, out_features=256, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=256, out_features=2048, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=2048, out_features=256, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + (2): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + ) + ) + (7): DetrTransformerDecoderLayer( + (attentions): ModuleList( + (0): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=256, out_features=256, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + (1): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=256, out_features=256, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=256, out_features=2048, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=2048, out_features=256, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + (2): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + ) + ) + (8): DetrTransformerDecoderLayer( + (attentions): ModuleList( + (0): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=256, out_features=256, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + (1): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=256, out_features=256, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=256, out_features=2048, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=2048, out_features=256, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + (2): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + ) + ) + ) + (post_norm): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + ) + (decoder_input_projs): ModuleList( + (0): Identity() + (1): Identity() + (2): Identity() + ) + (decoder_positional_encoding): SinePositionalEncoding(num_feats=128, temperature=10000, normalize=True, scale=6.283185307179586, eps=1e-06) + (query_embed): Embedding(100, 256) + (query_feat): Embedding(100, 256) + (level_embed): Embedding(3, 256) + (cls_embed): Linear(in_features=256, out_features=60, bias=True) + (mask_embed): Sequential( + (0): Linear(in_features=256, out_features=256, bias=True) + (1): ReLU(inplace=True) + (2): Linear(in_features=256, out_features=256, bias=True) + (3): ReLU(inplace=True) + (4): Linear(in_features=256, out_features=256, bias=True) + ) + (loss_cls): CrossEntropyLoss(avg_non_ignore=False) + (loss_mask): CrossEntropyLoss(avg_non_ignore=False) + (loss_dice): DiceLoss() + ) +) +2022-06-05 01:41:37,793 - mmseg - INFO - Loaded 4996 images +2022-06-05 01:41:38,973 - mmseg - INFO - Loaded 5104 images +2022-06-05 01:41:38,973 - mmseg - INFO - Start running, host: chenzhe.vendor@SH-IDC1-10-140-1-150, work_dir: /mnt/lustre/chenzhe.vendor/workspace/ViT-Adapter-Release/segmentation/work_dirs/mask2former_beit_adapter_base_480_40k_pascal_context_59_ss +2022-06-05 01:41:38,973 - mmseg - INFO - Hooks will be executed in the following order: +before_run: +(VERY_HIGH ) PolyLrUpdaterHook +(NORMAL ) CheckpointHook +(LOW ) DistEvalHook +(VERY_LOW ) TextLoggerHook + -------------------- +before_train_epoch: +(VERY_HIGH ) PolyLrUpdaterHook +(LOW ) IterTimerHook +(LOW ) DistEvalHook +(VERY_LOW ) TextLoggerHook + -------------------- +before_train_iter: +(VERY_HIGH ) PolyLrUpdaterHook +(LOW ) IterTimerHook +(LOW ) DistEvalHook + -------------------- +after_train_iter: +(ABOVE_NORMAL) OptimizerHook +(NORMAL ) CheckpointHook +(LOW ) IterTimerHook +(LOW ) DistEvalHook +(VERY_LOW ) TextLoggerHook + -------------------- +after_train_epoch: +(NORMAL ) CheckpointHook +(LOW ) DistEvalHook +(VERY_LOW ) TextLoggerHook + -------------------- +before_val_epoch: +(LOW ) IterTimerHook +(VERY_LOW ) TextLoggerHook + -------------------- +before_val_iter: +(LOW ) IterTimerHook + -------------------- +after_val_iter: +(LOW ) IterTimerHook + -------------------- +after_val_epoch: +(VERY_LOW ) TextLoggerHook + -------------------- +after_run: +(VERY_LOW ) TextLoggerHook + -------------------- +2022-06-05 01:41:38,973 - mmseg - INFO - workflow: [('train', 1)], max: 40000 iters +2022-06-05 01:41:38,974 - mmseg - INFO - Checkpoints will be saved to /mnt/lustre/chenzhe.vendor/workspace/ViT-Adapter-Release/segmentation/work_dirs/mask2former_beit_adapter_base_480_40k_pascal_context_59_ss by HardDiskBackend. +2022-06-05 01:42:38,303 - mmseg - INFO - Iter [50/40000] lr: 2.488e-07, eta: 5:28:32, time: 0.493, data_time: 0.010, memory: 31652, decode.loss_cls: 8.3385, decode.loss_mask: 2.9073, decode.loss_dice: 4.0495, decode.d0.loss_cls: 8.5090, decode.d0.loss_mask: 2.3531, decode.d0.loss_dice: 3.7653, decode.d1.loss_cls: 7.9831, decode.d1.loss_mask: 2.6963, decode.d1.loss_dice: 3.8258, decode.d2.loss_cls: 8.3870, decode.d2.loss_mask: 2.8662, decode.d2.loss_dice: 3.9038, decode.d3.loss_cls: 9.3211, decode.d3.loss_mask: 3.0021, decode.d3.loss_dice: 3.9786, decode.d4.loss_cls: 9.2480, decode.d4.loss_mask: 2.7875, decode.d4.loss_dice: 3.9916, decode.d5.loss_cls: 10.1731, decode.d5.loss_mask: 3.3287, decode.d5.loss_dice: 3.9911, decode.d6.loss_cls: 8.5606, decode.d6.loss_mask: 3.4602, decode.d6.loss_dice: 4.0034, decode.d7.loss_cls: 8.5884, decode.d7.loss_mask: 3.1527, decode.d7.loss_dice: 4.0242, decode.d8.loss_cls: 8.2071, decode.d8.loss_mask: 3.2329, decode.d8.loss_dice: 4.0217, loss: 156.6581 +2022-06-05 01:42:59,417 - mmseg - INFO - Iter [100/40000] lr: 5.021e-07, eta: 5:03:53, time: 0.421, data_time: 0.007, memory: 31652, decode.loss_cls: 6.7568, decode.loss_mask: 2.4583, decode.loss_dice: 4.1403, decode.d0.loss_cls: 8.5181, decode.d0.loss_mask: 2.1904, decode.d0.loss_dice: 3.7120, decode.d1.loss_cls: 7.3231, decode.d1.loss_mask: 2.3459, decode.d1.loss_dice: 3.7576, decode.d2.loss_cls: 7.2021, decode.d2.loss_mask: 2.3436, decode.d2.loss_dice: 3.8666, decode.d3.loss_cls: 7.8294, decode.d3.loss_mask: 2.3732, decode.d3.loss_dice: 3.9604, decode.d4.loss_cls: 7.6884, decode.d4.loss_mask: 2.2993, decode.d4.loss_dice: 4.0860, decode.d5.loss_cls: 8.4995, decode.d5.loss_mask: 2.6612, decode.d5.loss_dice: 3.9869, decode.d6.loss_cls: 7.0504, decode.d6.loss_mask: 2.7918, decode.d6.loss_dice: 3.9963, decode.d7.loss_cls: 6.8756, decode.d7.loss_mask: 2.5528, decode.d7.loss_dice: 4.0926, decode.d8.loss_cls: 6.5187, decode.d8.loss_mask: 2.6316, decode.d8.loss_dice: 4.0746, loss: 138.5834 +2022-06-05 01:43:20,649 - mmseg - INFO - Iter [150/40000] lr: 7.547e-07, eta: 4:56:42, time: 0.426, data_time: 0.010, memory: 31652, decode.loss_cls: 4.4363, decode.loss_mask: 2.4961, decode.loss_dice: 4.0751, decode.d0.loss_cls: 8.5124, decode.d0.loss_mask: 2.1486, decode.d0.loss_dice: 3.6702, decode.d1.loss_cls: 6.0777, decode.d1.loss_mask: 2.2405, decode.d1.loss_dice: 3.7218, decode.d2.loss_cls: 5.2171, decode.d2.loss_mask: 2.2375, decode.d2.loss_dice: 3.8321, decode.d3.loss_cls: 5.1252, decode.d3.loss_mask: 2.2699, decode.d3.loss_dice: 3.9244, decode.d4.loss_cls: 4.8840, decode.d4.loss_mask: 2.2925, decode.d4.loss_dice: 4.0067, decode.d5.loss_cls: 5.2179, decode.d5.loss_mask: 2.4892, decode.d5.loss_dice: 3.9682, decode.d6.loss_cls: 4.5268, decode.d6.loss_mask: 2.5016, decode.d6.loss_dice: 4.0073, decode.d7.loss_cls: 4.4290, decode.d7.loss_mask: 2.4697, decode.d7.loss_dice: 4.0623, decode.d8.loss_cls: 4.3583, decode.d8.loss_mask: 2.5055, decode.d8.loss_dice: 4.0685, loss: 115.7725 +2022-06-05 01:43:41,308 - mmseg - INFO - Iter [200/40000] lr: 1.007e-06, eta: 4:50:46, time: 0.413, data_time: 0.007, memory: 31652, decode.loss_cls: 3.8526, decode.loss_mask: 2.4160, decode.loss_dice: 4.0940, decode.d0.loss_cls: 8.5233, decode.d0.loss_mask: 2.1341, decode.d0.loss_dice: 3.6554, decode.d1.loss_cls: 4.9444, decode.d1.loss_mask: 2.2101, decode.d1.loss_dice: 3.7137, decode.d2.loss_cls: 4.0871, decode.d2.loss_mask: 2.2065, decode.d2.loss_dice: 3.7953, decode.d3.loss_cls: 4.0048, decode.d3.loss_mask: 2.2284, decode.d3.loss_dice: 3.8741, decode.d4.loss_cls: 3.9688, decode.d4.loss_mask: 2.2574, decode.d4.loss_dice: 3.9195, decode.d5.loss_cls: 3.9350, decode.d5.loss_mask: 2.3504, decode.d5.loss_dice: 3.9645, decode.d6.loss_cls: 3.9051, decode.d6.loss_mask: 2.3711, decode.d6.loss_dice: 4.0315, decode.d7.loss_cls: 3.8798, decode.d7.loss_mask: 2.4061, decode.d7.loss_dice: 4.0599, decode.d8.loss_cls: 3.8803, decode.d8.loss_mask: 2.4424, decode.d8.loss_dice: 4.0796, loss: 107.1916 +2022-06-05 01:44:02,178 - mmseg - INFO - Iter [250/40000] lr: 1.258e-06, eta: 4:47:38, time: 0.417, data_time: 0.007, memory: 31652, decode.loss_cls: 3.8061, decode.loss_mask: 2.3722, decode.loss_dice: 4.0568, decode.d0.loss_cls: 8.5235, decode.d0.loss_mask: 2.1002, decode.d0.loss_dice: 3.6394, decode.d1.loss_cls: 4.3639, decode.d1.loss_mask: 2.1197, decode.d1.loss_dice: 3.7123, decode.d2.loss_cls: 3.8857, decode.d2.loss_mask: 2.0926, decode.d2.loss_dice: 3.7825, decode.d3.loss_cls: 3.8835, decode.d3.loss_mask: 2.0932, decode.d3.loss_dice: 3.8332, decode.d4.loss_cls: 3.9098, decode.d4.loss_mask: 2.1193, decode.d4.loss_dice: 3.8509, decode.d5.loss_cls: 3.8519, decode.d5.loss_mask: 2.1997, decode.d5.loss_dice: 3.8874, decode.d6.loss_cls: 3.8496, decode.d6.loss_mask: 2.2566, decode.d6.loss_dice: 3.9440, decode.d7.loss_cls: 3.8340, decode.d7.loss_mask: 2.3317, decode.d7.loss_dice: 3.9909, decode.d8.loss_cls: 3.8306, decode.d8.loss_mask: 2.3596, decode.d8.loss_dice: 4.0291, loss: 104.5096 +2022-06-05 01:44:22,965 - mmseg - INFO - Iter [300/40000] lr: 1.509e-06, eta: 4:45:14, time: 0.416, data_time: 0.007, memory: 31652, decode.loss_cls: 3.6259, decode.loss_mask: 2.2965, decode.loss_dice: 3.9100, decode.d0.loss_cls: 8.5189, decode.d0.loss_mask: 2.0975, decode.d0.loss_dice: 3.5565, decode.d1.loss_cls: 4.0403, decode.d1.loss_mask: 2.0741, decode.d1.loss_dice: 3.6205, decode.d2.loss_cls: 3.6838, decode.d2.loss_mask: 2.0551, decode.d2.loss_dice: 3.6708, decode.d3.loss_cls: 3.6681, decode.d3.loss_mask: 2.0431, decode.d3.loss_dice: 3.6963, decode.d4.loss_cls: 3.6839, decode.d4.loss_mask: 2.0416, decode.d4.loss_dice: 3.6928, decode.d5.loss_cls: 3.6524, decode.d5.loss_mask: 2.0886, decode.d5.loss_dice: 3.7262, decode.d6.loss_cls: 3.6353, decode.d6.loss_mask: 2.1304, decode.d6.loss_dice: 3.7628, decode.d7.loss_cls: 3.6344, decode.d7.loss_mask: 2.2107, decode.d7.loss_dice: 3.8042, decode.d8.loss_cls: 3.6489, decode.d8.loss_mask: 2.2597, decode.d8.loss_dice: 3.8618, loss: 100.3912 +2022-06-05 01:44:46,011 - mmseg - INFO - Iter [350/40000] lr: 1.759e-06, eta: 4:47:41, time: 0.461, data_time: 0.055, memory: 31652, decode.loss_cls: 3.6180, decode.loss_mask: 2.0999, decode.loss_dice: 3.7672, decode.d0.loss_cls: 8.5229, decode.d0.loss_mask: 2.0620, decode.d0.loss_dice: 3.5326, decode.d1.loss_cls: 3.9646, decode.d1.loss_mask: 2.0081, decode.d1.loss_dice: 3.5672, decode.d2.loss_cls: 3.6806, decode.d2.loss_mask: 1.9903, decode.d2.loss_dice: 3.5774, decode.d3.loss_cls: 3.6197, decode.d3.loss_mask: 1.9740, decode.d3.loss_dice: 3.5847, decode.d4.loss_cls: 3.6371, decode.d4.loss_mask: 1.9677, decode.d4.loss_dice: 3.5666, decode.d5.loss_cls: 3.6080, decode.d5.loss_mask: 1.9811, decode.d5.loss_dice: 3.5894, decode.d6.loss_cls: 3.5939, decode.d6.loss_mask: 1.9906, decode.d6.loss_dice: 3.6064, decode.d7.loss_cls: 3.6021, decode.d7.loss_mask: 2.0082, decode.d7.loss_dice: 3.6468, decode.d8.loss_cls: 3.6240, decode.d8.loss_mask: 2.0487, decode.d8.loss_dice: 3.7148, loss: 97.7543 +2022-06-05 01:45:06,624 - mmseg - INFO - Iter [400/40000] lr: 2.008e-06, eta: 4:45:25, time: 0.412, data_time: 0.007, memory: 31652, decode.loss_cls: 3.5831, decode.loss_mask: 1.9523, decode.loss_dice: 3.5750, decode.d0.loss_cls: 8.5245, decode.d0.loss_mask: 1.9935, decode.d0.loss_dice: 3.4844, decode.d1.loss_cls: 3.8876, decode.d1.loss_mask: 1.9468, decode.d1.loss_dice: 3.4868, decode.d2.loss_cls: 3.6464, decode.d2.loss_mask: 1.9234, decode.d2.loss_dice: 3.4588, decode.d3.loss_cls: 3.5777, decode.d3.loss_mask: 1.9076, decode.d3.loss_dice: 3.4429, decode.d4.loss_cls: 3.5899, decode.d4.loss_mask: 1.9136, decode.d4.loss_dice: 3.4268, decode.d5.loss_cls: 3.5588, decode.d5.loss_mask: 1.9175, decode.d5.loss_dice: 3.4535, decode.d6.loss_cls: 3.5644, decode.d6.loss_mask: 1.9206, decode.d6.loss_dice: 3.4694, decode.d7.loss_cls: 3.5478, decode.d7.loss_mask: 1.9170, decode.d7.loss_dice: 3.4764, decode.d8.loss_cls: 3.5764, decode.d8.loss_mask: 1.9305, decode.d8.loss_dice: 3.5215, loss: 95.1753 +2022-06-05 01:45:27,051 - mmseg - INFO - Iter [450/40000] lr: 2.257e-06, eta: 4:43:18, time: 0.408, data_time: 0.007, memory: 31652, decode.loss_cls: 3.5138, decode.loss_mask: 1.9540, decode.loss_dice: 3.4204, decode.d0.loss_cls: 8.5322, decode.d0.loss_mask: 1.9950, decode.d0.loss_dice: 3.4041, decode.d1.loss_cls: 3.7645, decode.d1.loss_mask: 1.9500, decode.d1.loss_dice: 3.3585, decode.d2.loss_cls: 3.5568, decode.d2.loss_mask: 1.9281, decode.d2.loss_dice: 3.3367, decode.d3.loss_cls: 3.4802, decode.d3.loss_mask: 1.9206, decode.d3.loss_dice: 3.3155, decode.d4.loss_cls: 3.4858, decode.d4.loss_mask: 1.9201, decode.d4.loss_dice: 3.2986, decode.d5.loss_cls: 3.4782, decode.d5.loss_mask: 1.9355, decode.d5.loss_dice: 3.3223, decode.d6.loss_cls: 3.4776, decode.d6.loss_mask: 1.9326, decode.d6.loss_dice: 3.3346, decode.d7.loss_cls: 3.4681, decode.d7.loss_mask: 1.9329, decode.d7.loss_dice: 3.3387, decode.d8.loss_cls: 3.4967, decode.d8.loss_mask: 1.9495, decode.d8.loss_dice: 3.3811, loss: 93.1824 +2022-06-05 01:45:47,747 - mmseg - INFO - Iter [500/40000] lr: 2.505e-06, eta: 4:41:54, time: 0.414, data_time: 0.008, memory: 31652, decode.loss_cls: 3.4427, decode.loss_mask: 1.9377, decode.loss_dice: 3.2632, decode.d0.loss_cls: 8.5239, decode.d0.loss_mask: 1.9942, decode.d0.loss_dice: 3.3298, decode.d1.loss_cls: 3.7156, decode.d1.loss_mask: 1.9428, decode.d1.loss_dice: 3.2538, decode.d2.loss_cls: 3.5071, decode.d2.loss_mask: 1.9155, decode.d2.loss_dice: 3.2044, decode.d3.loss_cls: 3.4293, decode.d3.loss_mask: 1.9013, decode.d3.loss_dice: 3.1826, decode.d4.loss_cls: 3.4274, decode.d4.loss_mask: 1.9041, decode.d4.loss_dice: 3.1675, decode.d5.loss_cls: 3.3986, decode.d5.loss_mask: 1.9055, decode.d5.loss_dice: 3.1825, decode.d6.loss_cls: 3.3964, decode.d6.loss_mask: 1.9098, decode.d6.loss_dice: 3.1868, decode.d7.loss_cls: 3.3911, decode.d7.loss_mask: 1.9206, decode.d7.loss_dice: 3.1909, decode.d8.loss_cls: 3.4071, decode.d8.loss_mask: 1.9260, decode.d8.loss_dice: 3.2215, loss: 91.0798 +2022-06-05 01:46:08,232 - mmseg - INFO - Iter [550/40000] lr: 2.753e-06, eta: 4:40:26, time: 0.410, data_time: 0.007, memory: 31652, decode.loss_cls: 3.4172, decode.loss_mask: 1.8598, decode.loss_dice: 3.2001, decode.d0.loss_cls: 8.5248, decode.d0.loss_mask: 1.9298, decode.d0.loss_dice: 3.3240, decode.d1.loss_cls: 3.7128, decode.d1.loss_mask: 1.8700, decode.d1.loss_dice: 3.2187, decode.d2.loss_cls: 3.5039, decode.d2.loss_mask: 1.8448, decode.d2.loss_dice: 3.1673, decode.d3.loss_cls: 3.4260, decode.d3.loss_mask: 1.8304, decode.d3.loss_dice: 3.1392, decode.d4.loss_cls: 3.4070, decode.d4.loss_mask: 1.8369, decode.d4.loss_dice: 3.1219, decode.d5.loss_cls: 3.3728, decode.d5.loss_mask: 1.8387, decode.d5.loss_dice: 3.1411, decode.d6.loss_cls: 3.3749, decode.d6.loss_mask: 1.8444, decode.d6.loss_dice: 3.1402, decode.d7.loss_cls: 3.3772, decode.d7.loss_mask: 1.8417, decode.d7.loss_dice: 3.1398, decode.d8.loss_cls: 3.3914, decode.d8.loss_mask: 1.8520, decode.d8.loss_dice: 3.1621, loss: 89.8109 +2022-06-05 01:46:29,436 - mmseg - INFO - Iter [600/40000] lr: 3.000e-06, eta: 4:39:56, time: 0.424, data_time: 0.007, memory: 31652, decode.loss_cls: 3.3030, decode.loss_mask: 1.8543, decode.loss_dice: 3.0046, decode.d0.loss_cls: 8.5198, decode.d0.loss_mask: 1.9395, decode.d0.loss_dice: 3.2178, decode.d1.loss_cls: 3.5904, decode.d1.loss_mask: 1.8696, decode.d1.loss_dice: 3.0889, decode.d2.loss_cls: 3.3857, decode.d2.loss_mask: 1.8478, decode.d2.loss_dice: 3.0275, decode.d3.loss_cls: 3.3372, decode.d3.loss_mask: 1.8341, decode.d3.loss_dice: 2.9853, decode.d4.loss_cls: 3.3049, decode.d4.loss_mask: 1.8353, decode.d4.loss_dice: 2.9567, decode.d5.loss_cls: 3.2548, decode.d5.loss_mask: 1.8402, decode.d5.loss_dice: 2.9632, decode.d6.loss_cls: 3.2735, decode.d6.loss_mask: 1.8441, decode.d6.loss_dice: 2.9572, decode.d7.loss_cls: 3.2657, decode.d7.loss_mask: 1.8455, decode.d7.loss_dice: 2.9515, decode.d8.loss_cls: 3.2772, decode.d8.loss_mask: 1.8568, decode.d8.loss_dice: 2.9736, loss: 87.2057 +2022-06-05 01:46:52,451 - mmseg - INFO - Iter [650/40000] lr: 3.246e-06, eta: 4:41:16, time: 0.460, data_time: 0.057, memory: 31652, decode.loss_cls: 3.2621, decode.loss_mask: 1.7138, decode.loss_dice: 2.8826, decode.d0.loss_cls: 8.5349, decode.d0.loss_mask: 1.8286, decode.d0.loss_dice: 3.1689, decode.d1.loss_cls: 3.5821, decode.d1.loss_mask: 1.7500, decode.d1.loss_dice: 3.0069, decode.d2.loss_cls: 3.3699, decode.d2.loss_mask: 1.7245, decode.d2.loss_dice: 2.9397, decode.d3.loss_cls: 3.3049, decode.d3.loss_mask: 1.7047, decode.d3.loss_dice: 2.8819, decode.d4.loss_cls: 3.2528, decode.d4.loss_mask: 1.7029, decode.d4.loss_dice: 2.8529, decode.d5.loss_cls: 3.1978, decode.d5.loss_mask: 1.7108, decode.d5.loss_dice: 2.8504, decode.d6.loss_cls: 3.2041, decode.d6.loss_mask: 1.7086, decode.d6.loss_dice: 2.8480, decode.d7.loss_cls: 3.2098, decode.d7.loss_mask: 1.7120, decode.d7.loss_dice: 2.8382, decode.d8.loss_cls: 3.2302, decode.d8.loss_mask: 1.7148, decode.d8.loss_dice: 2.8568, loss: 84.5454 +2022-06-05 01:47:13,071 - mmseg - INFO - Iter [700/40000] lr: 3.491e-06, eta: 4:40:10, time: 0.413, data_time: 0.008, memory: 31652, decode.loss_cls: 3.1279, decode.loss_mask: 1.6921, decode.loss_dice: 2.7123, decode.d0.loss_cls: 8.5254, decode.d0.loss_mask: 1.8476, decode.d0.loss_dice: 3.0632, decode.d1.loss_cls: 3.4818, decode.d1.loss_mask: 1.7511, decode.d1.loss_dice: 2.8704, decode.d2.loss_cls: 3.2710, decode.d2.loss_mask: 1.7113, decode.d2.loss_dice: 2.7817, decode.d3.loss_cls: 3.1892, decode.d3.loss_mask: 1.6917, decode.d3.loss_dice: 2.7279, decode.d4.loss_cls: 3.1354, decode.d4.loss_mask: 1.6908, decode.d4.loss_dice: 2.6901, decode.d5.loss_cls: 3.0916, decode.d5.loss_mask: 1.6908, decode.d5.loss_dice: 2.6890, decode.d6.loss_cls: 3.0793, decode.d6.loss_mask: 1.6841, decode.d6.loss_dice: 2.6768, decode.d7.loss_cls: 3.0854, decode.d7.loss_mask: 1.6787, decode.d7.loss_dice: 2.6632, decode.d8.loss_cls: 3.1049, decode.d8.loss_mask: 1.6898, decode.d8.loss_dice: 2.6808, loss: 81.7751 +2022-06-05 01:47:33,768 - mmseg - INFO - Iter [750/40000] lr: 3.736e-06, eta: 4:39:13, time: 0.414, data_time: 0.008, memory: 31652, decode.loss_cls: 3.0892, decode.loss_mask: 1.6220, decode.loss_dice: 2.5609, decode.d0.loss_cls: 8.5279, decode.d0.loss_mask: 1.7819, decode.d0.loss_dice: 2.9574, decode.d1.loss_cls: 3.4610, decode.d1.loss_mask: 1.6728, decode.d1.loss_dice: 2.7337, decode.d2.loss_cls: 3.2446, decode.d2.loss_mask: 1.6294, decode.d2.loss_dice: 2.6339, decode.d3.loss_cls: 3.1484, decode.d3.loss_mask: 1.6234, decode.d3.loss_dice: 2.5819, decode.d4.loss_cls: 3.0920, decode.d4.loss_mask: 1.6212, decode.d4.loss_dice: 2.5447, decode.d5.loss_cls: 3.0579, decode.d5.loss_mask: 1.6258, decode.d5.loss_dice: 2.5533, decode.d6.loss_cls: 3.0631, decode.d6.loss_mask: 1.6210, decode.d6.loss_dice: 2.5293, decode.d7.loss_cls: 3.0575, decode.d7.loss_mask: 1.6187, decode.d7.loss_dice: 2.5297, decode.d8.loss_cls: 3.0655, decode.d8.loss_mask: 1.6279, decode.d8.loss_dice: 2.5382, loss: 79.4143 +2022-06-05 01:47:54,831 - mmseg - INFO - Iter [800/40000] lr: 3.981e-06, eta: 4:38:36, time: 0.421, data_time: 0.008, memory: 31652, decode.loss_cls: 3.1071, decode.loss_mask: 1.5651, decode.loss_dice: 2.5004, decode.d0.loss_cls: 8.5068, decode.d0.loss_mask: 1.7252, decode.d0.loss_dice: 2.9325, decode.d1.loss_cls: 3.4989, decode.d1.loss_mask: 1.6050, decode.d1.loss_dice: 2.6855, decode.d2.loss_cls: 3.2776, decode.d2.loss_mask: 1.5642, decode.d2.loss_dice: 2.5770, decode.d3.loss_cls: 3.1825, decode.d3.loss_mask: 1.5498, decode.d3.loss_dice: 2.5218, decode.d4.loss_cls: 3.1325, decode.d4.loss_mask: 1.5480, decode.d4.loss_dice: 2.4815, decode.d5.loss_cls: 3.0846, decode.d5.loss_mask: 1.5577, decode.d5.loss_dice: 2.4872, decode.d6.loss_cls: 3.0758, decode.d6.loss_mask: 1.5691, decode.d6.loss_dice: 2.4731, decode.d7.loss_cls: 3.0615, decode.d7.loss_mask: 1.5582, decode.d7.loss_dice: 2.4748, decode.d8.loss_cls: 3.0825, decode.d8.loss_mask: 1.5679, decode.d8.loss_dice: 2.4824, loss: 78.4364 +2022-06-05 01:48:15,309 - mmseg - INFO - Iter [850/40000] lr: 4.224e-06, eta: 4:37:37, time: 0.410, data_time: 0.009, memory: 31652, decode.loss_cls: 2.9389, decode.loss_mask: 1.4952, decode.loss_dice: 2.3438, decode.d0.loss_cls: 8.5139, decode.d0.loss_mask: 1.6864, decode.d0.loss_dice: 2.8247, decode.d1.loss_cls: 3.3681, decode.d1.loss_mask: 1.5500, decode.d1.loss_dice: 2.5497, decode.d2.loss_cls: 3.1414, decode.d2.loss_mask: 1.5050, decode.d2.loss_dice: 2.4294, decode.d3.loss_cls: 3.0308, decode.d3.loss_mask: 1.4976, decode.d3.loss_dice: 2.3770, decode.d4.loss_cls: 2.9837, decode.d4.loss_mask: 1.4950, decode.d4.loss_dice: 2.3341, decode.d5.loss_cls: 2.9294, decode.d5.loss_mask: 1.4944, decode.d5.loss_dice: 2.3443, decode.d6.loss_cls: 2.9266, decode.d6.loss_mask: 1.4844, decode.d6.loss_dice: 2.3249, decode.d7.loss_cls: 2.9140, decode.d7.loss_mask: 1.4812, decode.d7.loss_dice: 2.3288, decode.d8.loss_cls: 2.9264, decode.d8.loss_mask: 1.4904, decode.d8.loss_dice: 2.3285, loss: 75.0380 +2022-06-05 01:48:35,816 - mmseg - INFO - Iter [900/40000] lr: 4.468e-06, eta: 4:36:43, time: 0.410, data_time: 0.007, memory: 31652, decode.loss_cls: 2.8733, decode.loss_mask: 1.4160, decode.loss_dice: 2.2276, decode.d0.loss_cls: 8.5068, decode.d0.loss_mask: 1.6059, decode.d0.loss_dice: 2.7045, decode.d1.loss_cls: 3.3440, decode.d1.loss_mask: 1.4681, decode.d1.loss_dice: 2.4234, decode.d2.loss_cls: 3.0923, decode.d2.loss_mask: 1.4205, decode.d2.loss_dice: 2.2963, decode.d3.loss_cls: 2.9696, decode.d3.loss_mask: 1.4062, decode.d3.loss_dice: 2.2557, decode.d4.loss_cls: 2.9035, decode.d4.loss_mask: 1.4195, decode.d4.loss_dice: 2.2295, decode.d5.loss_cls: 2.8570, decode.d5.loss_mask: 1.4181, decode.d5.loss_dice: 2.2281, decode.d6.loss_cls: 2.8486, decode.d6.loss_mask: 1.4133, decode.d6.loss_dice: 2.2171, decode.d7.loss_cls: 2.8326, decode.d7.loss_mask: 1.4191, decode.d7.loss_dice: 2.2136, decode.d8.loss_cls: 2.8500, decode.d8.loss_mask: 1.4166, decode.d8.loss_dice: 2.2141, loss: 72.4911 +2022-06-05 01:48:58,742 - mmseg - INFO - Iter [950/40000] lr: 4.710e-06, eta: 4:37:31, time: 0.458, data_time: 0.057, memory: 31652, decode.loss_cls: 2.8096, decode.loss_mask: 1.3440, decode.loss_dice: 2.1386, decode.d0.loss_cls: 8.4932, decode.d0.loss_mask: 1.5459, decode.d0.loss_dice: 2.6536, decode.d1.loss_cls: 3.3078, decode.d1.loss_mask: 1.3953, decode.d1.loss_dice: 2.3486, decode.d2.loss_cls: 3.0608, decode.d2.loss_mask: 1.3654, decode.d2.loss_dice: 2.2395, decode.d3.loss_cls: 2.9236, decode.d3.loss_mask: 1.3526, decode.d3.loss_dice: 2.1735, decode.d4.loss_cls: 2.8624, decode.d4.loss_mask: 1.3593, decode.d4.loss_dice: 2.1448, decode.d5.loss_cls: 2.8187, decode.d5.loss_mask: 1.3568, decode.d5.loss_dice: 2.1443, decode.d6.loss_cls: 2.7837, decode.d6.loss_mask: 1.3565, decode.d6.loss_dice: 2.1386, decode.d7.loss_cls: 2.7757, decode.d7.loss_mask: 1.3450, decode.d7.loss_dice: 2.1257, decode.d8.loss_cls: 2.7850, decode.d8.loss_mask: 1.3457, decode.d8.loss_dice: 2.1267, loss: 70.6209 +2022-06-05 01:49:19,110 - mmseg - INFO - Saving checkpoint at 1000 iterations +2022-06-05 01:49:21,817 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 01:49:21,818 - mmseg - INFO - Iter [1000/40000] lr: 4.952e-06, eta: 4:38:18, time: 0.461, data_time: 0.008, memory: 31652, decode.loss_cls: 2.7105, decode.loss_mask: 1.2831, decode.loss_dice: 2.0281, decode.d0.loss_cls: 8.4971, decode.d0.loss_mask: 1.4858, decode.d0.loss_dice: 2.5725, decode.d1.loss_cls: 3.2498, decode.d1.loss_mask: 1.3203, decode.d1.loss_dice: 2.2207, decode.d2.loss_cls: 2.9767, decode.d2.loss_mask: 1.2879, decode.d2.loss_dice: 2.1078, decode.d3.loss_cls: 2.8439, decode.d3.loss_mask: 1.2789, decode.d3.loss_dice: 2.0586, decode.d4.loss_cls: 2.7670, decode.d4.loss_mask: 1.2876, decode.d4.loss_dice: 2.0389, decode.d5.loss_cls: 2.7105, decode.d5.loss_mask: 1.2941, decode.d5.loss_dice: 2.0340, decode.d6.loss_cls: 2.6912, decode.d6.loss_mask: 1.2931, decode.d6.loss_dice: 2.0248, decode.d7.loss_cls: 2.6793, decode.d7.loss_mask: 1.2913, decode.d7.loss_dice: 2.0200, decode.d8.loss_cls: 2.6793, decode.d8.loss_mask: 1.2946, decode.d8.loss_dice: 2.0241, loss: 68.0517 +2022-06-05 01:49:41,865 - mmseg - INFO - Iter [1050/40000] lr: 5.193e-06, eta: 4:37:06, time: 0.401, data_time: 0.006, memory: 31652, decode.loss_cls: 2.6133, decode.loss_mask: 1.2299, decode.loss_dice: 1.8956, decode.d0.loss_cls: 8.4814, decode.d0.loss_mask: 1.4349, decode.d0.loss_dice: 2.4641, decode.d1.loss_cls: 3.1680, decode.d1.loss_mask: 1.2662, decode.d1.loss_dice: 2.0862, decode.d2.loss_cls: 2.8836, decode.d2.loss_mask: 1.2445, decode.d2.loss_dice: 1.9885, decode.d3.loss_cls: 2.7590, decode.d3.loss_mask: 1.2330, decode.d3.loss_dice: 1.9383, decode.d4.loss_cls: 2.6878, decode.d4.loss_mask: 1.2350, decode.d4.loss_dice: 1.9069, decode.d5.loss_cls: 2.6246, decode.d5.loss_mask: 1.2377, decode.d5.loss_dice: 1.9076, decode.d6.loss_cls: 2.6039, decode.d6.loss_mask: 1.2268, decode.d6.loss_dice: 1.8847, decode.d7.loss_cls: 2.5941, decode.d7.loss_mask: 1.2232, decode.d7.loss_dice: 1.8763, decode.d8.loss_cls: 2.5956, decode.d8.loss_mask: 1.2267, decode.d8.loss_dice: 1.8815, loss: 65.3990 +2022-06-05 01:50:02,290 - mmseg - INFO - Iter [1100/40000] lr: 5.434e-06, eta: 4:36:13, time: 0.409, data_time: 0.007, memory: 31652, decode.loss_cls: 2.5995, decode.loss_mask: 1.2074, decode.loss_dice: 1.8860, decode.d0.loss_cls: 8.4701, decode.d0.loss_mask: 1.3789, decode.d0.loss_dice: 2.4391, decode.d1.loss_cls: 3.1850, decode.d1.loss_mask: 1.2440, decode.d1.loss_dice: 2.0727, decode.d2.loss_cls: 2.8788, decode.d2.loss_mask: 1.2096, decode.d2.loss_dice: 1.9595, decode.d3.loss_cls: 2.7608, decode.d3.loss_mask: 1.2013, decode.d3.loss_dice: 1.9168, decode.d4.loss_cls: 2.6959, decode.d4.loss_mask: 1.1976, decode.d4.loss_dice: 1.8830, decode.d5.loss_cls: 2.6313, decode.d5.loss_mask: 1.1980, decode.d5.loss_dice: 1.8766, decode.d6.loss_cls: 2.6030, decode.d6.loss_mask: 1.2082, decode.d6.loss_dice: 1.8820, decode.d7.loss_cls: 2.5903, decode.d7.loss_mask: 1.1992, decode.d7.loss_dice: 1.8739, decode.d8.loss_cls: 2.5847, decode.d8.loss_mask: 1.1964, decode.d8.loss_dice: 1.8729, loss: 64.9024 +2022-06-05 01:50:22,778 - mmseg - INFO - Iter [1150/40000] lr: 5.673e-06, eta: 4:35:23, time: 0.409, data_time: 0.007, memory: 31652, decode.loss_cls: 2.4316, decode.loss_mask: 1.1370, decode.loss_dice: 1.7697, decode.d0.loss_cls: 8.4747, decode.d0.loss_mask: 1.3018, decode.d0.loss_dice: 2.3260, decode.d1.loss_cls: 3.0183, decode.d1.loss_mask: 1.1510, decode.d1.loss_dice: 1.9323, decode.d2.loss_cls: 2.7118, decode.d2.loss_mask: 1.1388, decode.d2.loss_dice: 1.8295, decode.d3.loss_cls: 2.5984, decode.d3.loss_mask: 1.1292, decode.d3.loss_dice: 1.7889, decode.d4.loss_cls: 2.5279, decode.d4.loss_mask: 1.1293, decode.d4.loss_dice: 1.7580, decode.d5.loss_cls: 2.4651, decode.d5.loss_mask: 1.1308, decode.d5.loss_dice: 1.7670, decode.d6.loss_cls: 2.4245, decode.d6.loss_mask: 1.1400, decode.d6.loss_dice: 1.7688, decode.d7.loss_cls: 2.4153, decode.d7.loss_mask: 1.1393, decode.d7.loss_dice: 1.7696, decode.d8.loss_cls: 2.4161, decode.d8.loss_mask: 1.1384, decode.d8.loss_dice: 1.7657, loss: 61.4949 +2022-06-05 01:50:43,221 - mmseg - INFO - Iter [1200/40000] lr: 5.913e-06, eta: 4:34:36, time: 0.409, data_time: 0.008, memory: 31652, decode.loss_cls: 2.3245, decode.loss_mask: 1.1376, decode.loss_dice: 1.7194, decode.d0.loss_cls: 8.4512, decode.d0.loss_mask: 1.2764, decode.d0.loss_dice: 2.2447, decode.d1.loss_cls: 2.9491, decode.d1.loss_mask: 1.1517, decode.d1.loss_dice: 1.8737, decode.d2.loss_cls: 2.6389, decode.d2.loss_mask: 1.1293, decode.d2.loss_dice: 1.7724, decode.d3.loss_cls: 2.5049, decode.d3.loss_mask: 1.1235, decode.d3.loss_dice: 1.7357, decode.d4.loss_cls: 2.4408, decode.d4.loss_mask: 1.1311, decode.d4.loss_dice: 1.7129, decode.d5.loss_cls: 2.3740, decode.d5.loss_mask: 1.1424, decode.d5.loss_dice: 1.7153, decode.d6.loss_cls: 2.3300, decode.d6.loss_mask: 1.1398, decode.d6.loss_dice: 1.7103, decode.d7.loss_cls: 2.3246, decode.d7.loss_mask: 1.1341, decode.d7.loss_dice: 1.7107, decode.d8.loss_cls: 2.3175, decode.d8.loss_mask: 1.1399, decode.d8.loss_dice: 1.7049, loss: 60.0611 +2022-06-05 01:51:06,220 - mmseg - INFO - Iter [1250/40000] lr: 6.151e-06, eta: 4:35:09, time: 0.460, data_time: 0.057, memory: 31652, decode.loss_cls: 2.3265, decode.loss_mask: 1.0939, decode.loss_dice: 1.6857, decode.d0.loss_cls: 8.4395, decode.d0.loss_mask: 1.2402, decode.d0.loss_dice: 2.2457, decode.d1.loss_cls: 2.9258, decode.d1.loss_mask: 1.1077, decode.d1.loss_dice: 1.8494, decode.d2.loss_cls: 2.6252, decode.d2.loss_mask: 1.0889, decode.d2.loss_dice: 1.7471, decode.d3.loss_cls: 2.5042, decode.d3.loss_mask: 1.0850, decode.d3.loss_dice: 1.7135, decode.d4.loss_cls: 2.4192, decode.d4.loss_mask: 1.0844, decode.d4.loss_dice: 1.6932, decode.d5.loss_cls: 2.3611, decode.d5.loss_mask: 1.0837, decode.d5.loss_dice: 1.6816, decode.d6.loss_cls: 2.3268, decode.d6.loss_mask: 1.0874, decode.d6.loss_dice: 1.6863, decode.d7.loss_cls: 2.3149, decode.d7.loss_mask: 1.0803, decode.d7.loss_dice: 1.6797, decode.d8.loss_cls: 2.3038, decode.d8.loss_mask: 1.0885, decode.d8.loss_dice: 1.6758, loss: 59.2452 +2022-06-05 01:51:26,672 - mmseg - INFO - Iter [1300/40000] lr: 6.389e-06, eta: 4:34:23, time: 0.409, data_time: 0.008, memory: 31652, decode.loss_cls: 2.2138, decode.loss_mask: 1.0748, decode.loss_dice: 1.5761, decode.d0.loss_cls: 8.4249, decode.d0.loss_mask: 1.2153, decode.d0.loss_dice: 2.1326, decode.d1.loss_cls: 2.8380, decode.d1.loss_mask: 1.0858, decode.d1.loss_dice: 1.7332, decode.d2.loss_cls: 2.5266, decode.d2.loss_mask: 1.0742, decode.d2.loss_dice: 1.6262, decode.d3.loss_cls: 2.3974, decode.d3.loss_mask: 1.0744, decode.d3.loss_dice: 1.5907, decode.d4.loss_cls: 2.3335, decode.d4.loss_mask: 1.0819, decode.d4.loss_dice: 1.5871, decode.d5.loss_cls: 2.2716, decode.d5.loss_mask: 1.0789, decode.d5.loss_dice: 1.5930, decode.d6.loss_cls: 2.2364, decode.d6.loss_mask: 1.0748, decode.d6.loss_dice: 1.5778, decode.d7.loss_cls: 2.2141, decode.d7.loss_mask: 1.0692, decode.d7.loss_dice: 1.5755, decode.d8.loss_cls: 2.2076, decode.d8.loss_mask: 1.0682, decode.d8.loss_dice: 1.5702, loss: 57.1237 +2022-06-05 01:51:47,129 - mmseg - INFO - Iter [1350/40000] lr: 6.627e-06, eta: 4:33:38, time: 0.409, data_time: 0.009, memory: 31652, decode.loss_cls: 2.1640, decode.loss_mask: 1.0288, decode.loss_dice: 1.5853, decode.d0.loss_cls: 8.4042, decode.d0.loss_mask: 1.1406, decode.d0.loss_dice: 2.1210, decode.d1.loss_cls: 2.7919, decode.d1.loss_mask: 1.0282, decode.d1.loss_dice: 1.7118, decode.d2.loss_cls: 2.4706, decode.d2.loss_mask: 1.0104, decode.d2.loss_dice: 1.6157, decode.d3.loss_cls: 2.3399, decode.d3.loss_mask: 1.0106, decode.d3.loss_dice: 1.5880, decode.d4.loss_cls: 2.2695, decode.d4.loss_mask: 1.0221, decode.d4.loss_dice: 1.5748, decode.d5.loss_cls: 2.1961, decode.d5.loss_mask: 1.0297, decode.d5.loss_dice: 1.5808, decode.d6.loss_cls: 2.1588, decode.d6.loss_mask: 1.0262, decode.d6.loss_dice: 1.5706, decode.d7.loss_cls: 2.1364, decode.d7.loss_mask: 1.0249, decode.d7.loss_dice: 1.5714, decode.d8.loss_cls: 2.1283, decode.d8.loss_mask: 1.0345, decode.d8.loss_dice: 1.5827, loss: 55.9179 +2022-06-05 01:52:07,612 - mmseg - INFO - Iter [1400/40000] lr: 6.863e-06, eta: 4:32:55, time: 0.409, data_time: 0.007, memory: 31652, decode.loss_cls: 2.1323, decode.loss_mask: 1.0504, decode.loss_dice: 1.5778, decode.d0.loss_cls: 8.3915, decode.d0.loss_mask: 1.1607, decode.d0.loss_dice: 2.1056, decode.d1.loss_cls: 2.7567, decode.d1.loss_mask: 1.0644, decode.d1.loss_dice: 1.7192, decode.d2.loss_cls: 2.4437, decode.d2.loss_mask: 1.0572, decode.d2.loss_dice: 1.6291, decode.d3.loss_cls: 2.3111, decode.d3.loss_mask: 1.0502, decode.d3.loss_dice: 1.5967, decode.d4.loss_cls: 2.2512, decode.d4.loss_mask: 1.0454, decode.d4.loss_dice: 1.5712, decode.d5.loss_cls: 2.1882, decode.d5.loss_mask: 1.0459, decode.d5.loss_dice: 1.5780, decode.d6.loss_cls: 2.1449, decode.d6.loss_mask: 1.0477, decode.d6.loss_dice: 1.5669, decode.d7.loss_cls: 2.1313, decode.d7.loss_mask: 1.0463, decode.d7.loss_dice: 1.5657, decode.d8.loss_cls: 2.1218, decode.d8.loss_mask: 1.0589, decode.d8.loss_dice: 1.5667, loss: 55.9767 +2022-06-05 01:52:28,904 - mmseg - INFO - Iter [1450/40000] lr: 7.099e-06, eta: 4:32:37, time: 0.427, data_time: 0.008, memory: 31652, decode.loss_cls: 1.9931, decode.loss_mask: 1.0146, decode.loss_dice: 1.4790, decode.d0.loss_cls: 8.3821, decode.d0.loss_mask: 1.1393, decode.d0.loss_dice: 1.9830, decode.d1.loss_cls: 2.6240, decode.d1.loss_mask: 1.0279, decode.d1.loss_dice: 1.5867, decode.d2.loss_cls: 2.3277, decode.d2.loss_mask: 1.0121, decode.d2.loss_dice: 1.5013, decode.d3.loss_cls: 2.1992, decode.d3.loss_mask: 1.0089, decode.d3.loss_dice: 1.4931, decode.d4.loss_cls: 2.1228, decode.d4.loss_mask: 1.0124, decode.d4.loss_dice: 1.4754, decode.d5.loss_cls: 2.0559, decode.d5.loss_mask: 1.0160, decode.d5.loss_dice: 1.4864, decode.d6.loss_cls: 2.0228, decode.d6.loss_mask: 1.0233, decode.d6.loss_dice: 1.4766, decode.d7.loss_cls: 1.9970, decode.d7.loss_mask: 1.0138, decode.d7.loss_dice: 1.4764, decode.d8.loss_cls: 1.9805, decode.d8.loss_mask: 1.0143, decode.d8.loss_dice: 1.4690, loss: 53.4144 +2022-06-05 01:52:49,626 - mmseg - INFO - Iter [1500/40000] lr: 7.335e-06, eta: 4:32:03, time: 0.414, data_time: 0.007, memory: 31652, decode.loss_cls: 1.9108, decode.loss_mask: 1.0415, decode.loss_dice: 1.4461, decode.d0.loss_cls: 8.3538, decode.d0.loss_mask: 1.1287, decode.d0.loss_dice: 1.9410, decode.d1.loss_cls: 2.5523, decode.d1.loss_mask: 1.0319, decode.d1.loss_dice: 1.5459, decode.d2.loss_cls: 2.2550, decode.d2.loss_mask: 1.0277, decode.d2.loss_dice: 1.4609, decode.d3.loss_cls: 2.1036, decode.d3.loss_mask: 1.0329, decode.d3.loss_dice: 1.4463, decode.d4.loss_cls: 2.0372, decode.d4.loss_mask: 1.0347, decode.d4.loss_dice: 1.4351, decode.d5.loss_cls: 1.9837, decode.d5.loss_mask: 1.0302, decode.d5.loss_dice: 1.4441, decode.d6.loss_cls: 1.9454, decode.d6.loss_mask: 1.0337, decode.d6.loss_dice: 1.4313, decode.d7.loss_cls: 1.9181, decode.d7.loss_mask: 1.0333, decode.d7.loss_dice: 1.4327, decode.d8.loss_cls: 1.9020, decode.d8.loss_mask: 1.0402, decode.d8.loss_dice: 1.4384, loss: 52.4184 +2022-06-05 01:53:10,596 - mmseg - INFO - Iter [1550/40000] lr: 7.330e-06, eta: 4:31:36, time: 0.419, data_time: 0.008, memory: 31652, decode.loss_cls: 1.9352, decode.loss_mask: 1.0109, decode.loss_dice: 1.4505, decode.d0.loss_cls: 8.3429, decode.d0.loss_mask: 1.0868, decode.d0.loss_dice: 1.9314, decode.d1.loss_cls: 2.5647, decode.d1.loss_mask: 1.0021, decode.d1.loss_dice: 1.5479, decode.d2.loss_cls: 2.2670, decode.d2.loss_mask: 0.9988, decode.d2.loss_dice: 1.4633, decode.d3.loss_cls: 2.0981, decode.d3.loss_mask: 1.0051, decode.d3.loss_dice: 1.4561, decode.d4.loss_cls: 2.0395, decode.d4.loss_mask: 1.0039, decode.d4.loss_dice: 1.4437, decode.d5.loss_cls: 1.9785, decode.d5.loss_mask: 1.0047, decode.d5.loss_dice: 1.4397, decode.d6.loss_cls: 1.9428, decode.d6.loss_mask: 1.0060, decode.d6.loss_dice: 1.4361, decode.d7.loss_cls: 1.9292, decode.d7.loss_mask: 1.0085, decode.d7.loss_dice: 1.4363, decode.d8.loss_cls: 1.9175, decode.d8.loss_mask: 1.0135, decode.d8.loss_dice: 1.4479, loss: 52.2085 +2022-06-05 01:53:33,560 - mmseg - INFO - Iter [1600/40000] lr: 7.321e-06, eta: 4:31:57, time: 0.459, data_time: 0.054, memory: 31652, decode.loss_cls: 1.8554, decode.loss_mask: 0.9706, decode.loss_dice: 1.4303, decode.d0.loss_cls: 8.3234, decode.d0.loss_mask: 1.0443, decode.d0.loss_dice: 1.8852, decode.d1.loss_cls: 2.5054, decode.d1.loss_mask: 0.9703, decode.d1.loss_dice: 1.5259, decode.d2.loss_cls: 2.1947, decode.d2.loss_mask: 0.9602, decode.d2.loss_dice: 1.4461, decode.d3.loss_cls: 2.0460, decode.d3.loss_mask: 0.9637, decode.d3.loss_dice: 1.4414, decode.d4.loss_cls: 1.9686, decode.d4.loss_mask: 0.9658, decode.d4.loss_dice: 1.4297, decode.d5.loss_cls: 1.9214, decode.d5.loss_mask: 0.9672, decode.d5.loss_dice: 1.4378, decode.d6.loss_cls: 1.8805, decode.d6.loss_mask: 0.9683, decode.d6.loss_dice: 1.4306, decode.d7.loss_cls: 1.8609, decode.d7.loss_mask: 0.9648, decode.d7.loss_dice: 1.4296, decode.d8.loss_cls: 1.8441, decode.d8.loss_mask: 0.9696, decode.d8.loss_dice: 1.4325, loss: 51.0343 +2022-06-05 01:53:54,385 - mmseg - INFO - Iter [1650/40000] lr: 7.311e-06, eta: 4:31:26, time: 0.417, data_time: 0.008, memory: 31652, decode.loss_cls: 1.8273, decode.loss_mask: 0.9511, decode.loss_dice: 1.4103, decode.d0.loss_cls: 8.2941, decode.d0.loss_mask: 1.0354, decode.d0.loss_dice: 1.8882, decode.d1.loss_cls: 2.4597, decode.d1.loss_mask: 0.9549, decode.d1.loss_dice: 1.4957, decode.d2.loss_cls: 2.1650, decode.d2.loss_mask: 0.9494, decode.d2.loss_dice: 1.4315, decode.d3.loss_cls: 2.0126, decode.d3.loss_mask: 0.9595, decode.d3.loss_dice: 1.4205, decode.d4.loss_cls: 1.9443, decode.d4.loss_mask: 0.9557, decode.d4.loss_dice: 1.4177, decode.d5.loss_cls: 1.8827, decode.d5.loss_mask: 0.9606, decode.d5.loss_dice: 1.4325, decode.d6.loss_cls: 1.8505, decode.d6.loss_mask: 0.9520, decode.d6.loss_dice: 1.4156, decode.d7.loss_cls: 1.8247, decode.d7.loss_mask: 0.9598, decode.d7.loss_dice: 1.4101, decode.d8.loss_cls: 1.8126, decode.d8.loss_mask: 0.9597, decode.d8.loss_dice: 1.4152, loss: 50.4489 +2022-06-05 01:54:15,050 - mmseg - INFO - Iter [1700/40000] lr: 7.302e-06, eta: 4:30:52, time: 0.413, data_time: 0.007, memory: 31652, decode.loss_cls: 1.7274, decode.loss_mask: 0.9859, decode.loss_dice: 1.3488, decode.d0.loss_cls: 8.2997, decode.d0.loss_mask: 1.0329, decode.d0.loss_dice: 1.7821, decode.d1.loss_cls: 2.3656, decode.d1.loss_mask: 0.9714, decode.d1.loss_dice: 1.4295, decode.d2.loss_cls: 2.0629, decode.d2.loss_mask: 0.9732, decode.d2.loss_dice: 1.3631, decode.d3.loss_cls: 1.9071, decode.d3.loss_mask: 0.9782, decode.d3.loss_dice: 1.3646, decode.d4.loss_cls: 1.8395, decode.d4.loss_mask: 0.9749, decode.d4.loss_dice: 1.3552, decode.d5.loss_cls: 1.7845, decode.d5.loss_mask: 0.9668, decode.d5.loss_dice: 1.3580, decode.d6.loss_cls: 1.7507, decode.d6.loss_mask: 0.9707, decode.d6.loss_dice: 1.3524, decode.d7.loss_cls: 1.7330, decode.d7.loss_mask: 0.9671, decode.d7.loss_dice: 1.3504, decode.d8.loss_cls: 1.7177, decode.d8.loss_mask: 0.9789, decode.d8.loss_dice: 1.3524, loss: 49.0445 +2022-06-05 01:54:35,846 - mmseg - INFO - Iter [1750/40000] lr: 7.292e-06, eta: 4:30:22, time: 0.416, data_time: 0.008, memory: 31652, decode.loss_cls: 1.6600, decode.loss_mask: 0.9758, decode.loss_dice: 1.3517, decode.d0.loss_cls: 8.2624, decode.d0.loss_mask: 1.0288, decode.d0.loss_dice: 1.8111, decode.d1.loss_cls: 2.3247, decode.d1.loss_mask: 0.9556, decode.d1.loss_dice: 1.4359, decode.d2.loss_cls: 2.0207, decode.d2.loss_mask: 0.9664, decode.d2.loss_dice: 1.3728, decode.d3.loss_cls: 1.8686, decode.d3.loss_mask: 0.9554, decode.d3.loss_dice: 1.3606, decode.d4.loss_cls: 1.8004, decode.d4.loss_mask: 0.9617, decode.d4.loss_dice: 1.3507, decode.d5.loss_cls: 1.7531, decode.d5.loss_mask: 0.9641, decode.d5.loss_dice: 1.3622, decode.d6.loss_cls: 1.7065, decode.d6.loss_mask: 0.9592, decode.d6.loss_dice: 1.3502, decode.d7.loss_cls: 1.6708, decode.d7.loss_mask: 0.9692, decode.d7.loss_dice: 1.3502, decode.d8.loss_cls: 1.6600, decode.d8.loss_mask: 0.9683, decode.d8.loss_dice: 1.3534, loss: 48.5306 +2022-06-05 01:54:56,622 - mmseg - INFO - Iter [1800/40000] lr: 7.283e-06, eta: 4:29:52, time: 0.416, data_time: 0.008, memory: 31652, decode.loss_cls: 1.6516, decode.loss_mask: 0.9644, decode.loss_dice: 1.3466, decode.d0.loss_cls: 8.2411, decode.d0.loss_mask: 1.0093, decode.d0.loss_dice: 1.7826, decode.d1.loss_cls: 2.3277, decode.d1.loss_mask: 0.9515, decode.d1.loss_dice: 1.4206, decode.d2.loss_cls: 2.0158, decode.d2.loss_mask: 0.9547, decode.d2.loss_dice: 1.3596, decode.d3.loss_cls: 1.8583, decode.d3.loss_mask: 0.9472, decode.d3.loss_dice: 1.3449, decode.d4.loss_cls: 1.7861, decode.d4.loss_mask: 0.9465, decode.d4.loss_dice: 1.3397, decode.d5.loss_cls: 1.7305, decode.d5.loss_mask: 0.9444, decode.d5.loss_dice: 1.3454, decode.d6.loss_cls: 1.6956, decode.d6.loss_mask: 0.9533, decode.d6.loss_dice: 1.3442, decode.d7.loss_cls: 1.6659, decode.d7.loss_mask: 0.9569, decode.d7.loss_dice: 1.3432, decode.d8.loss_cls: 1.6507, decode.d8.loss_mask: 0.9656, decode.d8.loss_dice: 1.3419, loss: 48.1859 +2022-06-05 01:55:17,007 - mmseg - INFO - Iter [1850/40000] lr: 7.273e-06, eta: 4:29:14, time: 0.408, data_time: 0.008, memory: 31652, decode.loss_cls: 1.6322, decode.loss_mask: 0.9729, decode.loss_dice: 1.4108, decode.d0.loss_cls: 8.2327, decode.d0.loss_mask: 1.0150, decode.d0.loss_dice: 1.8465, decode.d1.loss_cls: 2.3475, decode.d1.loss_mask: 0.9640, decode.d1.loss_dice: 1.4963, decode.d2.loss_cls: 2.0444, decode.d2.loss_mask: 0.9670, decode.d2.loss_dice: 1.4280, decode.d3.loss_cls: 1.8712, decode.d3.loss_mask: 0.9601, decode.d3.loss_dice: 1.4081, decode.d4.loss_cls: 1.7861, decode.d4.loss_mask: 0.9679, decode.d4.loss_dice: 1.4125, decode.d5.loss_cls: 1.7123, decode.d5.loss_mask: 0.9669, decode.d5.loss_dice: 1.4111, decode.d6.loss_cls: 1.6802, decode.d6.loss_mask: 0.9709, decode.d6.loss_dice: 1.4078, decode.d7.loss_cls: 1.6535, decode.d7.loss_mask: 0.9679, decode.d7.loss_dice: 1.4033, decode.d8.loss_cls: 1.6359, decode.d8.loss_mask: 0.9728, decode.d8.loss_dice: 1.4084, loss: 48.9541 +2022-06-05 01:55:39,805 - mmseg - INFO - Iter [1900/40000] lr: 7.264e-06, eta: 4:29:25, time: 0.456, data_time: 0.058, memory: 31652, decode.loss_cls: 1.5672, decode.loss_mask: 0.9315, decode.loss_dice: 1.2948, decode.d0.loss_cls: 8.1962, decode.d0.loss_mask: 0.9675, decode.d0.loss_dice: 1.7099, decode.d1.loss_cls: 2.2568, decode.d1.loss_mask: 0.9171, decode.d1.loss_dice: 1.3679, decode.d2.loss_cls: 1.9480, decode.d2.loss_mask: 0.9176, decode.d2.loss_dice: 1.3145, decode.d3.loss_cls: 1.7860, decode.d3.loss_mask: 0.9135, decode.d3.loss_dice: 1.2979, decode.d4.loss_cls: 1.7093, decode.d4.loss_mask: 0.9203, decode.d4.loss_dice: 1.2931, decode.d5.loss_cls: 1.6469, decode.d5.loss_mask: 0.9106, decode.d5.loss_dice: 1.2893, decode.d6.loss_cls: 1.6090, decode.d6.loss_mask: 0.9094, decode.d6.loss_dice: 1.2788, decode.d7.loss_cls: 1.5834, decode.d7.loss_mask: 0.9172, decode.d7.loss_dice: 1.2876, decode.d8.loss_cls: 1.5728, decode.d8.loss_mask: 0.9187, decode.d8.loss_dice: 1.2843, loss: 46.5174 +2022-06-05 01:56:00,585 - mmseg - INFO - Iter [1950/40000] lr: 7.254e-06, eta: 4:28:55, time: 0.416, data_time: 0.007, memory: 31652, decode.loss_cls: 1.5688, decode.loss_mask: 0.9341, decode.loss_dice: 1.3006, decode.d0.loss_cls: 8.1894, decode.d0.loss_mask: 0.9903, decode.d0.loss_dice: 1.7406, decode.d1.loss_cls: 2.2482, decode.d1.loss_mask: 0.9247, decode.d1.loss_dice: 1.3897, decode.d2.loss_cls: 1.9395, decode.d2.loss_mask: 0.9328, decode.d2.loss_dice: 1.3359, decode.d3.loss_cls: 1.7720, decode.d3.loss_mask: 0.9300, decode.d3.loss_dice: 1.3207, decode.d4.loss_cls: 1.6755, decode.d4.loss_mask: 0.9356, decode.d4.loss_dice: 1.3118, decode.d5.loss_cls: 1.6221, decode.d5.loss_mask: 0.9332, decode.d5.loss_dice: 1.3147, decode.d6.loss_cls: 1.5994, decode.d6.loss_mask: 0.9340, decode.d6.loss_dice: 1.3124, decode.d7.loss_cls: 1.5700, decode.d7.loss_mask: 0.9341, decode.d7.loss_dice: 1.3082, decode.d8.loss_cls: 1.5699, decode.d8.loss_mask: 0.9398, decode.d8.loss_dice: 1.2983, loss: 46.7765 +2022-06-05 01:56:21,448 - mmseg - INFO - Saving checkpoint at 2000 iterations +2022-06-05 01:56:24,017 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 01:56:24,017 - mmseg - INFO - Iter [2000/40000] lr: 7.245e-06, eta: 4:29:17, time: 0.469, data_time: 0.008, memory: 31652, decode.loss_cls: 1.4339, decode.loss_mask: 0.9360, decode.loss_dice: 1.2695, decode.d0.loss_cls: 8.1649, decode.d0.loss_mask: 0.9794, decode.d0.loss_dice: 1.6713, decode.d1.loss_cls: 2.1357, decode.d1.loss_mask: 0.9256, decode.d1.loss_dice: 1.3306, decode.d2.loss_cls: 1.8196, decode.d2.loss_mask: 0.9229, decode.d2.loss_dice: 1.2832, decode.d3.loss_cls: 1.6443, decode.d3.loss_mask: 0.9289, decode.d3.loss_dice: 1.2704, decode.d4.loss_cls: 1.5532, decode.d4.loss_mask: 0.9228, decode.d4.loss_dice: 1.2619, decode.d5.loss_cls: 1.4861, decode.d5.loss_mask: 0.9326, decode.d5.loss_dice: 1.2643, decode.d6.loss_cls: 1.4757, decode.d6.loss_mask: 0.9260, decode.d6.loss_dice: 1.2491, decode.d7.loss_cls: 1.4378, decode.d7.loss_mask: 0.9264, decode.d7.loss_dice: 1.2674, decode.d8.loss_cls: 1.4267, decode.d8.loss_mask: 0.9328, decode.d8.loss_dice: 1.2617, loss: 45.0407 +2022-06-05 01:56:44,961 - mmseg - INFO - Iter [2050/40000] lr: 7.235e-06, eta: 4:28:49, time: 0.418, data_time: 0.007, memory: 31652, decode.loss_cls: 1.4919, decode.loss_mask: 0.9177, decode.loss_dice: 1.3124, decode.d0.loss_cls: 8.1413, decode.d0.loss_mask: 0.9635, decode.d0.loss_dice: 1.7152, decode.d1.loss_cls: 2.1802, decode.d1.loss_mask: 0.9098, decode.d1.loss_dice: 1.3833, decode.d2.loss_cls: 1.8762, decode.d2.loss_mask: 0.9075, decode.d2.loss_dice: 1.3215, decode.d3.loss_cls: 1.7043, decode.d3.loss_mask: 0.9047, decode.d3.loss_dice: 1.3049, decode.d4.loss_cls: 1.6168, decode.d4.loss_mask: 0.9067, decode.d4.loss_dice: 1.3098, decode.d5.loss_cls: 1.5461, decode.d5.loss_mask: 0.9096, decode.d5.loss_dice: 1.3127, decode.d6.loss_cls: 1.5244, decode.d6.loss_mask: 0.9133, decode.d6.loss_dice: 1.3019, decode.d7.loss_cls: 1.5107, decode.d7.loss_mask: 0.9149, decode.d7.loss_dice: 1.3101, decode.d8.loss_cls: 1.4925, decode.d8.loss_mask: 0.9138, decode.d8.loss_dice: 1.3150, loss: 45.8327 +2022-06-05 01:57:05,789 - mmseg - INFO - Iter [2100/40000] lr: 7.225e-06, eta: 4:28:20, time: 0.417, data_time: 0.008, memory: 31652, decode.loss_cls: 1.4620, decode.loss_mask: 0.8980, decode.loss_dice: 1.2551, decode.d0.loss_cls: 8.1217, decode.d0.loss_mask: 0.9412, decode.d0.loss_dice: 1.6419, decode.d1.loss_cls: 2.1047, decode.d1.loss_mask: 0.8827, decode.d1.loss_dice: 1.3195, decode.d2.loss_cls: 1.7938, decode.d2.loss_mask: 0.8782, decode.d2.loss_dice: 1.2614, decode.d3.loss_cls: 1.6563, decode.d3.loss_mask: 0.8775, decode.d3.loss_dice: 1.2508, decode.d4.loss_cls: 1.5648, decode.d4.loss_mask: 0.8850, decode.d4.loss_dice: 1.2470, decode.d5.loss_cls: 1.5037, decode.d5.loss_mask: 0.8903, decode.d5.loss_dice: 1.2516, decode.d6.loss_cls: 1.4731, decode.d6.loss_mask: 0.8813, decode.d6.loss_dice: 1.2482, decode.d7.loss_cls: 1.4566, decode.d7.loss_mask: 0.8834, decode.d7.loss_dice: 1.2420, decode.d8.loss_cls: 1.4610, decode.d8.loss_mask: 0.8883, decode.d8.loss_dice: 1.2509, loss: 44.4719 +2022-06-05 01:57:26,726 - mmseg - INFO - Iter [2150/40000] lr: 7.216e-06, eta: 4:27:53, time: 0.418, data_time: 0.008, memory: 31652, decode.loss_cls: 1.4086, decode.loss_mask: 0.9006, decode.loss_dice: 1.2903, decode.d0.loss_cls: 8.1164, decode.d0.loss_mask: 0.9272, decode.d0.loss_dice: 1.6773, decode.d1.loss_cls: 2.0964, decode.d1.loss_mask: 0.8975, decode.d1.loss_dice: 1.3596, decode.d2.loss_cls: 1.7771, decode.d2.loss_mask: 0.8940, decode.d2.loss_dice: 1.2939, decode.d3.loss_cls: 1.6001, decode.d3.loss_mask: 0.8998, decode.d3.loss_dice: 1.2863, decode.d4.loss_cls: 1.5169, decode.d4.loss_mask: 0.8990, decode.d4.loss_dice: 1.2905, decode.d5.loss_cls: 1.4674, decode.d5.loss_mask: 0.9024, decode.d5.loss_dice: 1.2953, decode.d6.loss_cls: 1.4334, decode.d6.loss_mask: 0.8979, decode.d6.loss_dice: 1.2887, decode.d7.loss_cls: 1.4177, decode.d7.loss_mask: 0.9002, decode.d7.loss_dice: 1.2924, decode.d8.loss_cls: 1.3930, decode.d8.loss_mask: 0.9058, decode.d8.loss_dice: 1.2862, loss: 44.6117 +2022-06-05 01:57:50,027 - mmseg - INFO - Iter [2200/40000] lr: 7.206e-06, eta: 4:28:08, time: 0.467, data_time: 0.056, memory: 31652, decode.loss_cls: 1.4241, decode.loss_mask: 0.8803, decode.loss_dice: 1.2800, decode.d0.loss_cls: 8.0946, decode.d0.loss_mask: 0.9098, decode.d0.loss_dice: 1.6682, decode.d1.loss_cls: 2.1251, decode.d1.loss_mask: 0.8724, decode.d1.loss_dice: 1.3486, decode.d2.loss_cls: 1.8065, decode.d2.loss_mask: 0.8656, decode.d2.loss_dice: 1.2921, decode.d3.loss_cls: 1.6254, decode.d3.loss_mask: 0.8677, decode.d3.loss_dice: 1.2817, decode.d4.loss_cls: 1.5373, decode.d4.loss_mask: 0.8739, decode.d4.loss_dice: 1.2766, decode.d5.loss_cls: 1.4862, decode.d5.loss_mask: 0.8784, decode.d5.loss_dice: 1.2852, decode.d6.loss_cls: 1.4557, decode.d6.loss_mask: 0.8825, decode.d6.loss_dice: 1.2815, decode.d7.loss_cls: 1.4324, decode.d7.loss_mask: 0.8783, decode.d7.loss_dice: 1.2794, decode.d8.loss_cls: 1.4195, decode.d8.loss_mask: 0.8800, decode.d8.loss_dice: 1.2838, loss: 44.4729 +2022-06-05 01:58:10,666 - mmseg - INFO - Iter [2250/40000] lr: 7.197e-06, eta: 4:27:36, time: 0.413, data_time: 0.007, memory: 31652, decode.loss_cls: 1.3095, decode.loss_mask: 0.8911, decode.loss_dice: 1.2039, decode.d0.loss_cls: 8.0582, decode.d0.loss_mask: 0.9180, decode.d0.loss_dice: 1.5794, decode.d1.loss_cls: 1.9902, decode.d1.loss_mask: 0.8964, decode.d1.loss_dice: 1.2867, decode.d2.loss_cls: 1.6865, decode.d2.loss_mask: 0.8803, decode.d2.loss_dice: 1.2211, decode.d3.loss_cls: 1.5107, decode.d3.loss_mask: 0.8809, decode.d3.loss_dice: 1.2088, decode.d4.loss_cls: 1.4233, decode.d4.loss_mask: 0.8823, decode.d4.loss_dice: 1.2119, decode.d5.loss_cls: 1.3639, decode.d5.loss_mask: 0.8895, decode.d5.loss_dice: 1.2231, decode.d6.loss_cls: 1.3423, decode.d6.loss_mask: 0.8873, decode.d6.loss_dice: 1.2101, decode.d7.loss_cls: 1.3157, decode.d7.loss_mask: 0.8906, decode.d7.loss_dice: 1.2139, decode.d8.loss_cls: 1.3086, decode.d8.loss_mask: 0.8891, decode.d8.loss_dice: 1.2076, loss: 42.7811 +2022-06-05 01:58:31,650 - mmseg - INFO - Iter [2300/40000] lr: 7.187e-06, eta: 4:27:10, time: 0.420, data_time: 0.007, memory: 31652, decode.loss_cls: 1.3544, decode.loss_mask: 0.9073, decode.loss_dice: 1.2583, decode.d0.loss_cls: 8.0366, decode.d0.loss_mask: 0.9297, decode.d0.loss_dice: 1.6337, decode.d1.loss_cls: 2.0280, decode.d1.loss_mask: 0.9004, decode.d1.loss_dice: 1.3254, decode.d2.loss_cls: 1.7071, decode.d2.loss_mask: 0.8981, decode.d2.loss_dice: 1.2737, decode.d3.loss_cls: 1.5418, decode.d3.loss_mask: 0.8912, decode.d3.loss_dice: 1.2595, decode.d4.loss_cls: 1.4571, decode.d4.loss_mask: 0.8996, decode.d4.loss_dice: 1.2643, decode.d5.loss_cls: 1.3941, decode.d5.loss_mask: 0.8997, decode.d5.loss_dice: 1.2631, decode.d6.loss_cls: 1.3749, decode.d6.loss_mask: 0.8987, decode.d6.loss_dice: 1.2501, decode.d7.loss_cls: 1.3491, decode.d7.loss_mask: 0.9000, decode.d7.loss_dice: 1.2501, decode.d8.loss_cls: 1.3489, decode.d8.loss_mask: 0.9004, decode.d8.loss_dice: 1.2559, loss: 43.6514 +2022-06-05 01:58:52,410 - mmseg - INFO - Iter [2350/40000] lr: 7.178e-06, eta: 4:26:40, time: 0.415, data_time: 0.008, memory: 31652, decode.loss_cls: 1.3113, decode.loss_mask: 0.8820, decode.loss_dice: 1.2297, decode.d0.loss_cls: 8.0241, decode.d0.loss_mask: 0.8984, decode.d0.loss_dice: 1.5888, decode.d1.loss_cls: 1.9904, decode.d1.loss_mask: 0.8735, decode.d1.loss_dice: 1.3030, decode.d2.loss_cls: 1.6659, decode.d2.loss_mask: 0.8682, decode.d2.loss_dice: 1.2285, decode.d3.loss_cls: 1.5051, decode.d3.loss_mask: 0.8643, decode.d3.loss_dice: 1.2118, decode.d4.loss_cls: 1.4204, decode.d4.loss_mask: 0.8635, decode.d4.loss_dice: 1.2127, decode.d5.loss_cls: 1.3615, decode.d5.loss_mask: 0.8727, decode.d5.loss_dice: 1.2196, decode.d6.loss_cls: 1.3378, decode.d6.loss_mask: 0.8688, decode.d6.loss_dice: 1.2107, decode.d7.loss_cls: 1.3196, decode.d7.loss_mask: 0.8745, decode.d7.loss_dice: 1.2220, decode.d8.loss_cls: 1.3029, decode.d8.loss_mask: 0.8806, decode.d8.loss_dice: 1.2295, loss: 42.6418 +2022-06-05 01:59:13,417 - mmseg - INFO - Iter [2400/40000] lr: 7.168e-06, eta: 4:26:16, time: 0.421, data_time: 0.009, memory: 31652, decode.loss_cls: 1.2425, decode.loss_mask: 0.8915, decode.loss_dice: 1.2100, decode.d0.loss_cls: 7.9944, decode.d0.loss_mask: 0.9060, decode.d0.loss_dice: 1.5471, decode.d1.loss_cls: 1.8996, decode.d1.loss_mask: 0.8836, decode.d1.loss_dice: 1.2581, decode.d2.loss_cls: 1.6004, decode.d2.loss_mask: 0.8720, decode.d2.loss_dice: 1.2067, decode.d3.loss_cls: 1.4229, decode.d3.loss_mask: 0.8747, decode.d3.loss_dice: 1.1949, decode.d4.loss_cls: 1.3445, decode.d4.loss_mask: 0.8798, decode.d4.loss_dice: 1.2015, decode.d5.loss_cls: 1.2927, decode.d5.loss_mask: 0.8833, decode.d5.loss_dice: 1.1980, decode.d6.loss_cls: 1.2688, decode.d6.loss_mask: 0.8869, decode.d6.loss_dice: 1.1960, decode.d7.loss_cls: 1.2493, decode.d7.loss_mask: 0.8925, decode.d7.loss_dice: 1.2068, decode.d8.loss_cls: 1.2476, decode.d8.loss_mask: 0.8875, decode.d8.loss_dice: 1.2092, loss: 41.8489 +2022-06-05 01:59:34,484 - mmseg - INFO - Iter [2450/40000] lr: 7.159e-06, eta: 4:25:52, time: 0.421, data_time: 0.009, memory: 31652, decode.loss_cls: 1.3400, decode.loss_mask: 0.8850, decode.loss_dice: 1.2495, decode.d0.loss_cls: 7.9690, decode.d0.loss_mask: 0.9226, decode.d0.loss_dice: 1.6121, decode.d1.loss_cls: 2.0329, decode.d1.loss_mask: 0.8810, decode.d1.loss_dice: 1.3244, decode.d2.loss_cls: 1.6861, decode.d2.loss_mask: 0.8837, decode.d2.loss_dice: 1.2661, decode.d3.loss_cls: 1.5234, decode.d3.loss_mask: 0.8730, decode.d3.loss_dice: 1.2450, decode.d4.loss_cls: 1.4443, decode.d4.loss_mask: 0.8761, decode.d4.loss_dice: 1.2449, decode.d5.loss_cls: 1.3881, decode.d5.loss_mask: 0.8778, decode.d5.loss_dice: 1.2546, decode.d6.loss_cls: 1.3648, decode.d6.loss_mask: 0.8765, decode.d6.loss_dice: 1.2453, decode.d7.loss_cls: 1.3491, decode.d7.loss_mask: 0.8781, decode.d7.loss_dice: 1.2500, decode.d8.loss_cls: 1.3477, decode.d8.loss_mask: 0.8838, decode.d8.loss_dice: 1.2465, loss: 43.2213 +2022-06-05 01:59:57,571 - mmseg - INFO - Iter [2500/40000] lr: 7.149e-06, eta: 4:25:58, time: 0.461, data_time: 0.060, memory: 31652, decode.loss_cls: 1.2924, decode.loss_mask: 0.8813, decode.loss_dice: 1.2193, decode.d0.loss_cls: 7.9501, decode.d0.loss_mask: 0.9001, decode.d0.loss_dice: 1.5955, decode.d1.loss_cls: 1.9999, decode.d1.loss_mask: 0.8718, decode.d1.loss_dice: 1.2944, decode.d2.loss_cls: 1.6541, decode.d2.loss_mask: 0.8632, decode.d2.loss_dice: 1.2306, decode.d3.loss_cls: 1.4808, decode.d3.loss_mask: 0.8643, decode.d3.loss_dice: 1.2043, decode.d4.loss_cls: 1.3973, decode.d4.loss_mask: 0.8657, decode.d4.loss_dice: 1.2132, decode.d5.loss_cls: 1.3444, decode.d5.loss_mask: 0.8693, decode.d5.loss_dice: 1.2168, decode.d6.loss_cls: 1.3169, decode.d6.loss_mask: 0.8600, decode.d6.loss_dice: 1.2069, decode.d7.loss_cls: 1.3072, decode.d7.loss_mask: 0.8666, decode.d7.loss_dice: 1.2122, decode.d8.loss_cls: 1.2981, decode.d8.loss_mask: 0.8716, decode.d8.loss_dice: 1.2148, loss: 42.3629 +2022-06-05 02:00:20,728 - mmseg - INFO - Iter [2550/40000] lr: 7.140e-06, eta: 4:26:03, time: 0.462, data_time: 0.008, memory: 31652, decode.loss_cls: 1.2277, decode.loss_mask: 0.8635, decode.loss_dice: 1.2206, decode.d0.loss_cls: 7.9249, decode.d0.loss_mask: 0.8929, decode.d0.loss_dice: 1.5736, decode.d1.loss_cls: 1.9132, decode.d1.loss_mask: 0.8642, decode.d1.loss_dice: 1.2737, decode.d2.loss_cls: 1.5741, decode.d2.loss_mask: 0.8658, decode.d2.loss_dice: 1.2237, decode.d3.loss_cls: 1.4012, decode.d3.loss_mask: 0.8651, decode.d3.loss_dice: 1.2093, decode.d4.loss_cls: 1.3118, decode.d4.loss_mask: 0.8594, decode.d4.loss_dice: 1.2110, decode.d5.loss_cls: 1.2560, decode.d5.loss_mask: 0.8687, decode.d5.loss_dice: 1.2103, decode.d6.loss_cls: 1.2402, decode.d6.loss_mask: 0.8667, decode.d6.loss_dice: 1.2116, decode.d7.loss_cls: 1.2318, decode.d7.loss_mask: 0.8625, decode.d7.loss_dice: 1.2119, decode.d8.loss_cls: 1.2331, decode.d8.loss_mask: 0.8596, decode.d8.loss_dice: 1.2156, loss: 41.5435 +2022-06-05 02:00:42,344 - mmseg - INFO - Iter [2600/40000] lr: 7.130e-06, eta: 4:25:47, time: 0.434, data_time: 0.008, memory: 31652, decode.loss_cls: 1.1919, decode.loss_mask: 0.8701, decode.loss_dice: 1.1852, decode.d0.loss_cls: 7.9029, decode.d0.loss_mask: 0.8934, decode.d0.loss_dice: 1.5239, decode.d1.loss_cls: 1.8656, decode.d1.loss_mask: 0.8688, decode.d1.loss_dice: 1.2486, decode.d2.loss_cls: 1.5204, decode.d2.loss_mask: 0.8631, decode.d2.loss_dice: 1.1976, decode.d3.loss_cls: 1.3557, decode.d3.loss_mask: 0.8656, decode.d3.loss_dice: 1.1782, decode.d4.loss_cls: 1.2696, decode.d4.loss_mask: 0.8653, decode.d4.loss_dice: 1.1865, decode.d5.loss_cls: 1.2143, decode.d5.loss_mask: 0.8711, decode.d5.loss_dice: 1.1884, decode.d6.loss_cls: 1.2083, decode.d6.loss_mask: 0.8659, decode.d6.loss_dice: 1.1839, decode.d7.loss_cls: 1.1970, decode.d7.loss_mask: 0.8707, decode.d7.loss_dice: 1.1857, decode.d8.loss_cls: 1.1909, decode.d8.loss_mask: 0.8740, decode.d8.loss_dice: 1.1870, loss: 40.8893 +2022-06-05 02:01:03,427 - mmseg - INFO - Iter [2650/40000] lr: 7.121e-06, eta: 4:25:23, time: 0.422, data_time: 0.008, memory: 31652, decode.loss_cls: 1.2038, decode.loss_mask: 0.8568, decode.loss_dice: 1.1694, decode.d0.loss_cls: 7.8734, decode.d0.loss_mask: 0.8939, decode.d0.loss_dice: 1.5118, decode.d1.loss_cls: 1.8526, decode.d1.loss_mask: 0.8674, decode.d1.loss_dice: 1.2360, decode.d2.loss_cls: 1.5069, decode.d2.loss_mask: 0.8588, decode.d2.loss_dice: 1.1879, decode.d3.loss_cls: 1.3529, decode.d3.loss_mask: 0.8522, decode.d3.loss_dice: 1.1630, decode.d4.loss_cls: 1.2716, decode.d4.loss_mask: 0.8612, decode.d4.loss_dice: 1.1677, decode.d5.loss_cls: 1.2245, decode.d5.loss_mask: 0.8710, decode.d5.loss_dice: 1.1742, decode.d6.loss_cls: 1.2031, decode.d6.loss_mask: 0.8663, decode.d6.loss_dice: 1.1650, decode.d7.loss_cls: 1.1899, decode.d7.loss_mask: 0.8660, decode.d7.loss_dice: 1.1705, decode.d8.loss_cls: 1.1963, decode.d8.loss_mask: 0.8625, decode.d8.loss_dice: 1.1674, loss: 40.6439 +2022-06-05 02:01:24,198 - mmseg - INFO - Iter [2700/40000] lr: 7.111e-06, eta: 4:24:54, time: 0.415, data_time: 0.007, memory: 31652, decode.loss_cls: 1.1428, decode.loss_mask: 0.8415, decode.loss_dice: 1.1513, decode.d0.loss_cls: 7.8452, decode.d0.loss_mask: 0.8566, decode.d0.loss_dice: 1.4785, decode.d1.loss_cls: 1.8019, decode.d1.loss_mask: 0.8430, decode.d1.loss_dice: 1.2096, decode.d2.loss_cls: 1.4593, decode.d2.loss_mask: 0.8313, decode.d2.loss_dice: 1.1649, decode.d3.loss_cls: 1.3035, decode.d3.loss_mask: 0.8323, decode.d3.loss_dice: 1.1410, decode.d4.loss_cls: 1.2236, decode.d4.loss_mask: 0.8432, decode.d4.loss_dice: 1.1477, decode.d5.loss_cls: 1.1743, decode.d5.loss_mask: 0.8465, decode.d5.loss_dice: 1.1585, decode.d6.loss_cls: 1.1459, decode.d6.loss_mask: 0.8445, decode.d6.loss_dice: 1.1502, decode.d7.loss_cls: 1.1382, decode.d7.loss_mask: 0.8420, decode.d7.loss_dice: 1.1526, decode.d8.loss_cls: 1.1403, decode.d8.loss_mask: 0.8393, decode.d8.loss_dice: 1.1526, loss: 39.7023 +2022-06-05 02:01:45,165 - mmseg - INFO - Iter [2750/40000] lr: 7.102e-06, eta: 4:24:28, time: 0.419, data_time: 0.007, memory: 31652, decode.loss_cls: 1.1567, decode.loss_mask: 0.8784, decode.loss_dice: 1.2053, decode.d0.loss_cls: 7.8268, decode.d0.loss_mask: 0.8990, decode.d0.loss_dice: 1.5243, decode.d1.loss_cls: 1.8001, decode.d1.loss_mask: 0.8845, decode.d1.loss_dice: 1.2626, decode.d2.loss_cls: 1.4539, decode.d2.loss_mask: 0.8668, decode.d2.loss_dice: 1.2135, decode.d3.loss_cls: 1.3097, decode.d3.loss_mask: 0.8623, decode.d3.loss_dice: 1.1959, decode.d4.loss_cls: 1.2428, decode.d4.loss_mask: 0.8653, decode.d4.loss_dice: 1.2015, decode.d5.loss_cls: 1.1922, decode.d5.loss_mask: 0.8776, decode.d5.loss_dice: 1.2043, decode.d6.loss_cls: 1.1653, decode.d6.loss_mask: 0.8791, decode.d6.loss_dice: 1.1972, decode.d7.loss_cls: 1.1573, decode.d7.loss_mask: 0.8766, decode.d7.loss_dice: 1.2065, decode.d8.loss_cls: 1.1554, decode.d8.loss_mask: 0.8761, decode.d8.loss_dice: 1.2079, loss: 40.6449 +2022-06-05 02:02:06,276 - mmseg - INFO - Iter [2800/40000] lr: 7.092e-06, eta: 4:24:04, time: 0.422, data_time: 0.007, memory: 31652, decode.loss_cls: 1.2088, decode.loss_mask: 0.8687, decode.loss_dice: 1.1968, decode.d0.loss_cls: 7.8042, decode.d0.loss_mask: 0.8774, decode.d0.loss_dice: 1.5305, decode.d1.loss_cls: 1.8744, decode.d1.loss_mask: 0.8567, decode.d1.loss_dice: 1.2655, decode.d2.loss_cls: 1.5094, decode.d2.loss_mask: 0.8456, decode.d2.loss_dice: 1.2105, decode.d3.loss_cls: 1.3509, decode.d3.loss_mask: 0.8453, decode.d3.loss_dice: 1.1976, decode.d4.loss_cls: 1.2785, decode.d4.loss_mask: 0.8490, decode.d4.loss_dice: 1.1971, decode.d5.loss_cls: 1.2361, decode.d5.loss_mask: 0.8522, decode.d5.loss_dice: 1.1935, decode.d6.loss_cls: 1.2190, decode.d6.loss_mask: 0.8544, decode.d6.loss_dice: 1.1947, decode.d7.loss_cls: 1.2050, decode.d7.loss_mask: 0.8596, decode.d7.loss_dice: 1.1977, decode.d8.loss_cls: 1.2065, decode.d8.loss_mask: 0.8651, decode.d8.loss_dice: 1.1999, loss: 40.8504 +2022-06-05 02:02:29,782 - mmseg - INFO - Iter [2850/40000] lr: 7.082e-06, eta: 4:24:11, time: 0.469, data_time: 0.055, memory: 31652, decode.loss_cls: 1.1470, decode.loss_mask: 0.8324, decode.loss_dice: 1.1844, decode.d0.loss_cls: 7.7819, decode.d0.loss_mask: 0.8553, decode.d0.loss_dice: 1.5289, decode.d1.loss_cls: 1.8017, decode.d1.loss_mask: 0.8369, decode.d1.loss_dice: 1.2593, decode.d2.loss_cls: 1.4363, decode.d2.loss_mask: 0.8248, decode.d2.loss_dice: 1.1959, decode.d3.loss_cls: 1.2863, decode.d3.loss_mask: 0.8230, decode.d3.loss_dice: 1.1778, decode.d4.loss_cls: 1.2142, decode.d4.loss_mask: 0.8306, decode.d4.loss_dice: 1.1798, decode.d5.loss_cls: 1.1788, decode.d5.loss_mask: 0.8365, decode.d5.loss_dice: 1.1860, decode.d6.loss_cls: 1.1575, decode.d6.loss_mask: 0.8304, decode.d6.loss_dice: 1.1744, decode.d7.loss_cls: 1.1560, decode.d7.loss_mask: 0.8336, decode.d7.loss_dice: 1.1828, decode.d8.loss_cls: 1.1546, decode.d8.loss_mask: 0.8272, decode.d8.loss_dice: 1.1818, loss: 39.8958 +2022-06-05 02:02:50,608 - mmseg - INFO - Iter [2900/40000] lr: 7.073e-06, eta: 4:23:44, time: 0.417, data_time: 0.008, memory: 31652, decode.loss_cls: 1.1232, decode.loss_mask: 0.8308, decode.loss_dice: 1.1370, decode.d0.loss_cls: 7.7511, decode.d0.loss_mask: 0.8535, decode.d0.loss_dice: 1.4814, decode.d1.loss_cls: 1.7622, decode.d1.loss_mask: 0.8365, decode.d1.loss_dice: 1.2115, decode.d2.loss_cls: 1.4180, decode.d2.loss_mask: 0.8194, decode.d2.loss_dice: 1.1538, decode.d3.loss_cls: 1.2649, decode.d3.loss_mask: 0.8165, decode.d3.loss_dice: 1.1407, decode.d4.loss_cls: 1.1997, decode.d4.loss_mask: 0.8151, decode.d4.loss_dice: 1.1375, decode.d5.loss_cls: 1.1485, decode.d5.loss_mask: 0.8236, decode.d5.loss_dice: 1.1406, decode.d6.loss_cls: 1.1339, decode.d6.loss_mask: 0.8203, decode.d6.loss_dice: 1.1279, decode.d7.loss_cls: 1.1207, decode.d7.loss_mask: 0.8253, decode.d7.loss_dice: 1.1432, decode.d8.loss_cls: 1.1108, decode.d8.loss_mask: 0.8280, decode.d8.loss_dice: 1.1411, loss: 39.1169 +2022-06-05 02:03:11,327 - mmseg - INFO - Iter [2950/40000] lr: 7.063e-06, eta: 4:23:15, time: 0.414, data_time: 0.008, memory: 31652, decode.loss_cls: 1.0693, decode.loss_mask: 0.8343, decode.loss_dice: 1.1386, decode.d0.loss_cls: 7.7219, decode.d0.loss_mask: 0.8563, decode.d0.loss_dice: 1.4340, decode.d1.loss_cls: 1.7069, decode.d1.loss_mask: 0.8334, decode.d1.loss_dice: 1.1903, decode.d2.loss_cls: 1.3634, decode.d2.loss_mask: 0.8220, decode.d2.loss_dice: 1.1323, decode.d3.loss_cls: 1.2192, decode.d3.loss_mask: 0.8233, decode.d3.loss_dice: 1.1195, decode.d4.loss_cls: 1.1519, decode.d4.loss_mask: 0.8265, decode.d4.loss_dice: 1.1265, decode.d5.loss_cls: 1.1096, decode.d5.loss_mask: 0.8348, decode.d5.loss_dice: 1.1365, decode.d6.loss_cls: 1.0896, decode.d6.loss_mask: 0.8374, decode.d6.loss_dice: 1.1370, decode.d7.loss_cls: 1.0824, decode.d7.loss_mask: 0.8338, decode.d7.loss_dice: 1.1394, decode.d8.loss_cls: 1.0629, decode.d8.loss_mask: 0.8392, decode.d8.loss_dice: 1.1352, loss: 38.6074 +2022-06-05 02:03:32,070 - mmseg - INFO - Saving checkpoint at 3000 iterations +2022-06-05 02:03:34,395 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 02:03:34,395 - mmseg - INFO - Iter [3000/40000] lr: 7.054e-06, eta: 4:23:15, time: 0.461, data_time: 0.007, memory: 31652, decode.loss_cls: 1.0966, decode.loss_mask: 0.8193, decode.loss_dice: 1.1243, decode.d0.loss_cls: 7.7090, decode.d0.loss_mask: 0.8533, decode.d0.loss_dice: 1.4502, decode.d1.loss_cls: 1.7474, decode.d1.loss_mask: 0.8304, decode.d1.loss_dice: 1.1968, decode.d2.loss_cls: 1.3955, decode.d2.loss_mask: 0.8189, decode.d2.loss_dice: 1.1378, decode.d3.loss_cls: 1.2466, decode.d3.loss_mask: 0.8121, decode.d3.loss_dice: 1.1198, decode.d4.loss_cls: 1.1706, decode.d4.loss_mask: 0.8111, decode.d4.loss_dice: 1.1173, decode.d5.loss_cls: 1.1336, decode.d5.loss_mask: 0.8181, decode.d5.loss_dice: 1.1311, decode.d6.loss_cls: 1.1042, decode.d6.loss_mask: 0.8186, decode.d6.loss_dice: 1.1152, decode.d7.loss_cls: 1.1015, decode.d7.loss_mask: 0.8215, decode.d7.loss_dice: 1.1192, decode.d8.loss_cls: 1.0921, decode.d8.loss_mask: 0.8209, decode.d8.loss_dice: 1.1219, loss: 38.6550 +2022-06-05 02:03:54,978 - mmseg - INFO - Iter [3050/40000] lr: 7.044e-06, eta: 4:22:44, time: 0.411, data_time: 0.007, memory: 31652, decode.loss_cls: 1.1112, decode.loss_mask: 0.8479, decode.loss_dice: 1.1311, decode.d0.loss_cls: 7.6813, decode.d0.loss_mask: 0.8638, decode.d0.loss_dice: 1.4271, decode.d1.loss_cls: 1.7108, decode.d1.loss_mask: 0.8470, decode.d1.loss_dice: 1.1881, decode.d2.loss_cls: 1.3692, decode.d2.loss_mask: 0.8365, decode.d2.loss_dice: 1.1413, decode.d3.loss_cls: 1.2403, decode.d3.loss_mask: 0.8302, decode.d3.loss_dice: 1.1191, decode.d4.loss_cls: 1.1749, decode.d4.loss_mask: 0.8411, decode.d4.loss_dice: 1.1236, decode.d5.loss_cls: 1.1411, decode.d5.loss_mask: 0.8389, decode.d5.loss_dice: 1.1290, decode.d6.loss_cls: 1.1210, decode.d6.loss_mask: 0.8448, decode.d6.loss_dice: 1.1211, decode.d7.loss_cls: 1.1092, decode.d7.loss_mask: 0.8499, decode.d7.loss_dice: 1.1284, decode.d8.loss_cls: 1.1073, decode.d8.loss_mask: 0.8534, decode.d8.loss_dice: 1.1363, loss: 38.8649 +2022-06-05 02:04:16,029 - mmseg - INFO - Iter [3100/40000] lr: 7.035e-06, eta: 4:22:20, time: 0.422, data_time: 0.008, memory: 31652, decode.loss_cls: 1.1014, decode.loss_mask: 0.8436, decode.loss_dice: 1.1789, decode.d0.loss_cls: 7.6568, decode.d0.loss_mask: 0.8597, decode.d0.loss_dice: 1.4989, decode.d1.loss_cls: 1.7332, decode.d1.loss_mask: 0.8517, decode.d1.loss_dice: 1.2409, decode.d2.loss_cls: 1.3774, decode.d2.loss_mask: 0.8384, decode.d2.loss_dice: 1.2005, decode.d3.loss_cls: 1.2435, decode.d3.loss_mask: 0.8330, decode.d3.loss_dice: 1.1737, decode.d4.loss_cls: 1.1677, decode.d4.loss_mask: 0.8364, decode.d4.loss_dice: 1.1840, decode.d5.loss_cls: 1.1351, decode.d5.loss_mask: 0.8369, decode.d5.loss_dice: 1.1828, decode.d6.loss_cls: 1.1199, decode.d6.loss_mask: 0.8394, decode.d6.loss_dice: 1.1741, decode.d7.loss_cls: 1.1027, decode.d7.loss_mask: 0.8417, decode.d7.loss_dice: 1.1679, decode.d8.loss_cls: 1.1019, decode.d8.loss_mask: 0.8465, decode.d8.loss_dice: 1.1800, loss: 39.3485 +2022-06-05 02:04:38,945 - mmseg - INFO - Iter [3150/40000] lr: 7.025e-06, eta: 4:22:17, time: 0.458, data_time: 0.057, memory: 31652, decode.loss_cls: 1.0495, decode.loss_mask: 0.8272, decode.loss_dice: 1.1336, decode.d0.loss_cls: 7.6348, decode.d0.loss_mask: 0.8475, decode.d0.loss_dice: 1.4270, decode.d1.loss_cls: 1.7102, decode.d1.loss_mask: 0.8354, decode.d1.loss_dice: 1.1849, decode.d2.loss_cls: 1.3408, decode.d2.loss_mask: 0.8211, decode.d2.loss_dice: 1.1413, decode.d3.loss_cls: 1.2066, decode.d3.loss_mask: 0.8162, decode.d3.loss_dice: 1.1253, decode.d4.loss_cls: 1.1326, decode.d4.loss_mask: 0.8204, decode.d4.loss_dice: 1.1235, decode.d5.loss_cls: 1.0939, decode.d5.loss_mask: 0.8220, decode.d5.loss_dice: 1.1276, decode.d6.loss_cls: 1.0714, decode.d6.loss_mask: 0.8208, decode.d6.loss_dice: 1.1289, decode.d7.loss_cls: 1.0507, decode.d7.loss_mask: 0.8295, decode.d7.loss_dice: 1.1363, decode.d8.loss_cls: 1.0398, decode.d8.loss_mask: 0.8253, decode.d8.loss_dice: 1.1376, loss: 38.2619 +2022-06-05 02:04:59,853 - mmseg - INFO - Iter [3200/40000] lr: 7.016e-06, eta: 4:21:50, time: 0.418, data_time: 0.007, memory: 31652, decode.loss_cls: 1.0271, decode.loss_mask: 0.7968, decode.loss_dice: 1.1307, decode.d0.loss_cls: 7.5984, decode.d0.loss_mask: 0.8196, decode.d0.loss_dice: 1.4324, decode.d1.loss_cls: 1.6548, decode.d1.loss_mask: 0.8044, decode.d1.loss_dice: 1.1916, decode.d2.loss_cls: 1.2943, decode.d2.loss_mask: 0.7881, decode.d2.loss_dice: 1.1271, decode.d3.loss_cls: 1.1557, decode.d3.loss_mask: 0.7844, decode.d3.loss_dice: 1.1161, decode.d4.loss_cls: 1.0940, decode.d4.loss_mask: 0.7861, decode.d4.loss_dice: 1.1196, decode.d5.loss_cls: 1.0666, decode.d5.loss_mask: 0.7881, decode.d5.loss_dice: 1.1244, decode.d6.loss_cls: 1.0413, decode.d6.loss_mask: 0.7887, decode.d6.loss_dice: 1.1204, decode.d7.loss_cls: 1.0289, decode.d7.loss_mask: 0.7891, decode.d7.loss_dice: 1.1286, decode.d8.loss_cls: 1.0215, decode.d8.loss_mask: 0.7963, decode.d8.loss_dice: 1.1308, loss: 37.5462 +2022-06-05 02:05:20,299 - mmseg - INFO - Iter [3250/40000] lr: 7.006e-06, eta: 4:21:19, time: 0.409, data_time: 0.008, memory: 31652, decode.loss_cls: 1.0246, decode.loss_mask: 0.7993, decode.loss_dice: 1.1184, decode.d0.loss_cls: 7.5575, decode.d0.loss_mask: 0.8260, decode.d0.loss_dice: 1.4137, decode.d1.loss_cls: 1.6456, decode.d1.loss_mask: 0.8013, decode.d1.loss_dice: 1.1770, decode.d2.loss_cls: 1.2823, decode.d2.loss_mask: 0.7962, decode.d2.loss_dice: 1.1294, decode.d3.loss_cls: 1.1576, decode.d3.loss_mask: 0.7886, decode.d3.loss_dice: 1.1062, decode.d4.loss_cls: 1.0936, decode.d4.loss_mask: 0.7985, decode.d4.loss_dice: 1.1126, decode.d5.loss_cls: 1.0575, decode.d5.loss_mask: 0.7947, decode.d5.loss_dice: 1.1106, decode.d6.loss_cls: 1.0294, decode.d6.loss_mask: 0.7959, decode.d6.loss_dice: 1.1060, decode.d7.loss_cls: 1.0211, decode.d7.loss_mask: 0.7983, decode.d7.loss_dice: 1.1062, decode.d8.loss_cls: 1.0282, decode.d8.loss_mask: 0.7932, decode.d8.loss_dice: 1.1086, loss: 37.3783 +2022-06-05 02:05:41,033 - mmseg - INFO - Iter [3300/40000] lr: 6.997e-06, eta: 4:20:51, time: 0.415, data_time: 0.008, memory: 31652, decode.loss_cls: 1.0695, decode.loss_mask: 0.8101, decode.loss_dice: 1.1263, decode.d0.loss_cls: 7.5590, decode.d0.loss_mask: 0.8331, decode.d0.loss_dice: 1.4258, decode.d1.loss_cls: 1.6234, decode.d1.loss_mask: 0.8139, decode.d1.loss_dice: 1.1903, decode.d2.loss_cls: 1.2897, decode.d2.loss_mask: 0.8118, decode.d2.loss_dice: 1.1358, decode.d3.loss_cls: 1.1578, decode.d3.loss_mask: 0.8100, decode.d3.loss_dice: 1.1177, decode.d4.loss_cls: 1.1083, decode.d4.loss_mask: 0.8113, decode.d4.loss_dice: 1.1228, decode.d5.loss_cls: 1.0863, decode.d5.loss_mask: 0.8030, decode.d5.loss_dice: 1.1219, decode.d6.loss_cls: 1.0699, decode.d6.loss_mask: 0.8074, decode.d6.loss_dice: 1.1224, decode.d7.loss_cls: 1.0624, decode.d7.loss_mask: 0.8105, decode.d7.loss_dice: 1.1218, decode.d8.loss_cls: 1.0627, decode.d8.loss_mask: 0.8142, decode.d8.loss_dice: 1.1265, loss: 37.8252 +2022-06-05 02:06:01,511 - mmseg - INFO - Iter [3350/40000] lr: 6.987e-06, eta: 4:20:21, time: 0.410, data_time: 0.007, memory: 31652, decode.loss_cls: 1.0112, decode.loss_mask: 0.8217, decode.loss_dice: 1.1443, decode.d0.loss_cls: 7.5143, decode.d0.loss_mask: 0.8395, decode.d0.loss_dice: 1.4528, decode.d1.loss_cls: 1.6334, decode.d1.loss_mask: 0.8288, decode.d1.loss_dice: 1.2033, decode.d2.loss_cls: 1.2648, decode.d2.loss_mask: 0.8294, decode.d2.loss_dice: 1.1592, decode.d3.loss_cls: 1.1247, decode.d3.loss_mask: 0.8211, decode.d3.loss_dice: 1.1313, decode.d4.loss_cls: 1.0782, decode.d4.loss_mask: 0.8230, decode.d4.loss_dice: 1.1342, decode.d5.loss_cls: 1.0438, decode.d5.loss_mask: 0.8272, decode.d5.loss_dice: 1.1365, decode.d6.loss_cls: 1.0204, decode.d6.loss_mask: 0.8238, decode.d6.loss_dice: 1.1403, decode.d7.loss_cls: 1.0175, decode.d7.loss_mask: 0.8230, decode.d7.loss_dice: 1.1437, decode.d8.loss_cls: 1.0165, decode.d8.loss_mask: 0.8194, decode.d8.loss_dice: 1.1339, loss: 37.7609 +2022-06-05 02:06:22,802 - mmseg - INFO - Iter [3400/40000] lr: 6.978e-06, eta: 4:19:59, time: 0.426, data_time: 0.007, memory: 31652, decode.loss_cls: 1.0237, decode.loss_mask: 0.8031, decode.loss_dice: 1.0907, decode.d0.loss_cls: 7.5062, decode.d0.loss_mask: 0.8185, decode.d0.loss_dice: 1.3748, decode.d1.loss_cls: 1.6213, decode.d1.loss_mask: 0.8079, decode.d1.loss_dice: 1.1542, decode.d2.loss_cls: 1.2738, decode.d2.loss_mask: 0.8054, decode.d2.loss_dice: 1.1028, decode.d3.loss_cls: 1.1456, decode.d3.loss_mask: 0.7969, decode.d3.loss_dice: 1.0804, decode.d4.loss_cls: 1.0797, decode.d4.loss_mask: 0.7996, decode.d4.loss_dice: 1.0840, decode.d5.loss_cls: 1.0462, decode.d5.loss_mask: 0.8104, decode.d5.loss_dice: 1.0923, decode.d6.loss_cls: 1.0402, decode.d6.loss_mask: 0.8064, decode.d6.loss_dice: 1.0818, decode.d7.loss_cls: 1.0216, decode.d7.loss_mask: 0.8044, decode.d7.loss_dice: 1.0897, decode.d8.loss_cls: 1.0203, decode.d8.loss_mask: 0.8041, decode.d8.loss_dice: 1.0898, loss: 37.0756 +2022-06-05 02:06:46,063 - mmseg - INFO - Iter [3450/40000] lr: 6.968e-06, eta: 4:19:58, time: 0.465, data_time: 0.056, memory: 31652, decode.loss_cls: 1.0155, decode.loss_mask: 0.7987, decode.loss_dice: 1.0943, decode.d0.loss_cls: 7.4749, decode.d0.loss_mask: 0.8312, decode.d0.loss_dice: 1.3788, decode.d1.loss_cls: 1.5997, decode.d1.loss_mask: 0.8058, decode.d1.loss_dice: 1.1630, decode.d2.loss_cls: 1.2519, decode.d2.loss_mask: 0.8030, decode.d2.loss_dice: 1.1138, decode.d3.loss_cls: 1.1250, decode.d3.loss_mask: 0.7961, decode.d3.loss_dice: 1.0962, decode.d4.loss_cls: 1.0713, decode.d4.loss_mask: 0.7972, decode.d4.loss_dice: 1.0955, decode.d5.loss_cls: 1.0381, decode.d5.loss_mask: 0.8018, decode.d5.loss_dice: 1.1023, decode.d6.loss_cls: 1.0191, decode.d6.loss_mask: 0.8024, decode.d6.loss_dice: 1.0955, decode.d7.loss_cls: 1.0303, decode.d7.loss_mask: 0.7966, decode.d7.loss_dice: 1.0962, decode.d8.loss_cls: 1.0187, decode.d8.loss_mask: 0.7994, decode.d8.loss_dice: 1.0969, loss: 37.0094 +2022-06-05 02:07:06,785 - mmseg - INFO - Iter [3500/40000] lr: 6.959e-06, eta: 4:19:31, time: 0.415, data_time: 0.009, memory: 31652, decode.loss_cls: 0.9754, decode.loss_mask: 0.7897, decode.loss_dice: 1.0823, decode.d0.loss_cls: 7.4403, decode.d0.loss_mask: 0.8019, decode.d0.loss_dice: 1.3829, decode.d1.loss_cls: 1.5643, decode.d1.loss_mask: 0.7894, decode.d1.loss_dice: 1.1445, decode.d2.loss_cls: 1.2139, decode.d2.loss_mask: 0.7822, decode.d2.loss_dice: 1.0985, decode.d3.loss_cls: 1.0852, decode.d3.loss_mask: 0.7753, decode.d3.loss_dice: 1.0831, decode.d4.loss_cls: 1.0296, decode.d4.loss_mask: 0.7841, decode.d4.loss_dice: 1.0895, decode.d5.loss_cls: 0.9983, decode.d5.loss_mask: 0.7894, decode.d5.loss_dice: 1.0915, decode.d6.loss_cls: 0.9810, decode.d6.loss_mask: 0.7854, decode.d6.loss_dice: 1.0854, decode.d7.loss_cls: 0.9803, decode.d7.loss_mask: 0.7869, decode.d7.loss_dice: 1.0875, decode.d8.loss_cls: 0.9699, decode.d8.loss_mask: 0.7911, decode.d8.loss_dice: 1.0900, loss: 36.3488 +2022-06-05 02:07:27,894 - mmseg - INFO - Iter [3550/40000] lr: 6.949e-06, eta: 4:19:07, time: 0.422, data_time: 0.008, memory: 31652, decode.loss_cls: 0.9989, decode.loss_mask: 0.7768, decode.loss_dice: 1.1061, decode.d0.loss_cls: 7.4154, decode.d0.loss_mask: 0.8055, decode.d0.loss_dice: 1.4179, decode.d1.loss_cls: 1.5973, decode.d1.loss_mask: 0.7858, decode.d1.loss_dice: 1.1770, decode.d2.loss_cls: 1.2446, decode.d2.loss_mask: 0.7731, decode.d2.loss_dice: 1.1169, decode.d3.loss_cls: 1.1085, decode.d3.loss_mask: 0.7749, decode.d3.loss_dice: 1.1014, decode.d4.loss_cls: 1.0546, decode.d4.loss_mask: 0.7824, decode.d4.loss_dice: 1.1023, decode.d5.loss_cls: 1.0230, decode.d5.loss_mask: 0.7849, decode.d5.loss_dice: 1.1065, decode.d6.loss_cls: 1.0047, decode.d6.loss_mask: 0.7832, decode.d6.loss_dice: 1.1017, decode.d7.loss_cls: 0.9996, decode.d7.loss_mask: 0.7803, decode.d7.loss_dice: 1.1070, decode.d8.loss_cls: 0.9971, decode.d8.loss_mask: 0.7794, decode.d8.loss_dice: 1.1067, loss: 36.7134 +2022-06-05 02:07:48,324 - mmseg - INFO - Iter [3600/40000] lr: 6.939e-06, eta: 4:18:36, time: 0.408, data_time: 0.007, memory: 31652, decode.loss_cls: 0.9291, decode.loss_mask: 0.7865, decode.loss_dice: 1.0636, decode.d0.loss_cls: 7.3820, decode.d0.loss_mask: 0.8158, decode.d0.loss_dice: 1.3300, decode.d1.loss_cls: 1.4987, decode.d1.loss_mask: 0.7995, decode.d1.loss_dice: 1.1148, decode.d2.loss_cls: 1.1455, decode.d2.loss_mask: 0.7906, decode.d2.loss_dice: 1.0737, decode.d3.loss_cls: 1.0370, decode.d3.loss_mask: 0.7753, decode.d3.loss_dice: 1.0553, decode.d4.loss_cls: 0.9826, decode.d4.loss_mask: 0.7796, decode.d4.loss_dice: 1.0626, decode.d5.loss_cls: 0.9534, decode.d5.loss_mask: 0.7823, decode.d5.loss_dice: 1.0577, decode.d6.loss_cls: 0.9364, decode.d6.loss_mask: 0.7822, decode.d6.loss_dice: 1.0614, decode.d7.loss_cls: 0.9324, decode.d7.loss_mask: 0.7853, decode.d7.loss_dice: 1.0676, decode.d8.loss_cls: 0.9317, decode.d8.loss_mask: 0.7903, decode.d8.loss_dice: 1.0663, loss: 35.5692 +2022-06-05 02:08:09,028 - mmseg - INFO - Iter [3650/40000] lr: 6.930e-06, eta: 4:18:09, time: 0.415, data_time: 0.008, memory: 31652, decode.loss_cls: 0.9557, decode.loss_mask: 0.8045, decode.loss_dice: 1.1008, decode.d0.loss_cls: 7.3532, decode.d0.loss_mask: 0.8163, decode.d0.loss_dice: 1.3712, decode.d1.loss_cls: 1.5321, decode.d1.loss_mask: 0.8014, decode.d1.loss_dice: 1.1560, decode.d2.loss_cls: 1.1761, decode.d2.loss_mask: 0.8005, decode.d2.loss_dice: 1.1128, decode.d3.loss_cls: 1.0548, decode.d3.loss_mask: 0.7993, decode.d3.loss_dice: 1.0889, decode.d4.loss_cls: 1.0018, decode.d4.loss_mask: 0.8036, decode.d4.loss_dice: 1.0978, decode.d5.loss_cls: 0.9760, decode.d5.loss_mask: 0.8048, decode.d5.loss_dice: 1.1006, decode.d6.loss_cls: 0.9470, decode.d6.loss_mask: 0.8104, decode.d6.loss_dice: 1.0970, decode.d7.loss_cls: 0.9565, decode.d7.loss_mask: 0.8035, decode.d7.loss_dice: 1.0969, decode.d8.loss_cls: 0.9536, decode.d8.loss_mask: 0.8002, decode.d8.loss_dice: 1.0955, loss: 36.2689 +2022-06-05 02:08:29,777 - mmseg - INFO - Iter [3700/40000] lr: 6.920e-06, eta: 4:17:42, time: 0.415, data_time: 0.008, memory: 31652, decode.loss_cls: 0.9433, decode.loss_mask: 0.8040, decode.loss_dice: 1.0842, decode.d0.loss_cls: 7.3305, decode.d0.loss_mask: 0.8095, decode.d0.loss_dice: 1.3538, decode.d1.loss_cls: 1.5105, decode.d1.loss_mask: 0.8070, decode.d1.loss_dice: 1.1399, decode.d2.loss_cls: 1.1527, decode.d2.loss_mask: 0.8046, decode.d2.loss_dice: 1.1016, decode.d3.loss_cls: 1.0501, decode.d3.loss_mask: 0.7938, decode.d3.loss_dice: 1.0797, decode.d4.loss_cls: 0.9926, decode.d4.loss_mask: 0.8024, decode.d4.loss_dice: 1.0870, decode.d5.loss_cls: 0.9636, decode.d5.loss_mask: 0.8023, decode.d5.loss_dice: 1.0868, decode.d6.loss_cls: 0.9529, decode.d6.loss_mask: 0.8031, decode.d6.loss_dice: 1.0752, decode.d7.loss_cls: 0.9481, decode.d7.loss_mask: 0.8064, decode.d7.loss_dice: 1.0748, decode.d8.loss_cls: 0.9408, decode.d8.loss_mask: 0.8082, decode.d8.loss_dice: 1.0797, loss: 35.9891 +2022-06-05 02:08:52,545 - mmseg - INFO - Iter [3750/40000] lr: 6.911e-06, eta: 4:17:35, time: 0.455, data_time: 0.055, memory: 31652, decode.loss_cls: 0.9608, decode.loss_mask: 0.7961, decode.loss_dice: 1.0846, decode.d0.loss_cls: 7.3107, decode.d0.loss_mask: 0.8151, decode.d0.loss_dice: 1.3737, decode.d1.loss_cls: 1.5343, decode.d1.loss_mask: 0.7974, decode.d1.loss_dice: 1.1419, decode.d2.loss_cls: 1.1975, decode.d2.loss_mask: 0.7871, decode.d2.loss_dice: 1.0885, decode.d3.loss_cls: 1.0919, decode.d3.loss_mask: 0.7796, decode.d3.loss_dice: 1.0715, decode.d4.loss_cls: 1.0218, decode.d4.loss_mask: 0.7924, decode.d4.loss_dice: 1.0788, decode.d5.loss_cls: 0.9920, decode.d5.loss_mask: 0.7938, decode.d5.loss_dice: 1.0796, decode.d6.loss_cls: 0.9841, decode.d6.loss_mask: 0.7877, decode.d6.loss_dice: 1.0756, decode.d7.loss_cls: 0.9730, decode.d7.loss_mask: 0.7911, decode.d7.loss_dice: 1.0728, decode.d8.loss_cls: 0.9611, decode.d8.loss_mask: 0.7927, decode.d8.loss_dice: 1.0732, loss: 36.1006 +2022-06-05 02:09:13,617 - mmseg - INFO - Iter [3800/40000] lr: 6.901e-06, eta: 4:17:11, time: 0.421, data_time: 0.007, memory: 31652, decode.loss_cls: 0.9625, decode.loss_mask: 0.7750, decode.loss_dice: 1.0811, decode.d0.loss_cls: 7.2711, decode.d0.loss_mask: 0.8018, decode.d0.loss_dice: 1.3569, decode.d1.loss_cls: 1.4905, decode.d1.loss_mask: 0.7847, decode.d1.loss_dice: 1.1274, decode.d2.loss_cls: 1.1638, decode.d2.loss_mask: 0.7715, decode.d2.loss_dice: 1.0790, decode.d3.loss_cls: 1.0579, decode.d3.loss_mask: 0.7631, decode.d3.loss_dice: 1.0633, decode.d4.loss_cls: 1.0053, decode.d4.loss_mask: 0.7691, decode.d4.loss_dice: 1.0697, decode.d5.loss_cls: 0.9734, decode.d5.loss_mask: 0.7754, decode.d5.loss_dice: 1.0765, decode.d6.loss_cls: 0.9646, decode.d6.loss_mask: 0.7698, decode.d6.loss_dice: 1.0764, decode.d7.loss_cls: 0.9619, decode.d7.loss_mask: 0.7740, decode.d7.loss_dice: 1.0731, decode.d8.loss_cls: 0.9563, decode.d8.loss_mask: 0.7734, decode.d8.loss_dice: 1.0734, loss: 35.6420 +2022-06-05 02:09:34,285 - mmseg - INFO - Iter [3850/40000] lr: 6.892e-06, eta: 4:16:44, time: 0.414, data_time: 0.009, memory: 31652, decode.loss_cls: 0.9184, decode.loss_mask: 0.7710, decode.loss_dice: 1.0511, decode.d0.loss_cls: 7.2522, decode.d0.loss_mask: 0.7898, decode.d0.loss_dice: 1.3432, decode.d1.loss_cls: 1.4822, decode.d1.loss_mask: 0.7831, decode.d1.loss_dice: 1.1179, decode.d2.loss_cls: 1.1396, decode.d2.loss_mask: 0.7647, decode.d2.loss_dice: 1.0610, decode.d3.loss_cls: 1.0130, decode.d3.loss_mask: 0.7661, decode.d3.loss_dice: 1.0513, decode.d4.loss_cls: 0.9668, decode.d4.loss_mask: 0.7686, decode.d4.loss_dice: 1.0473, decode.d5.loss_cls: 0.9337, decode.d5.loss_mask: 0.7677, decode.d5.loss_dice: 1.0511, decode.d6.loss_cls: 0.9311, decode.d6.loss_mask: 0.7622, decode.d6.loss_dice: 1.0441, decode.d7.loss_cls: 0.9276, decode.d7.loss_mask: 0.7655, decode.d7.loss_dice: 1.0444, decode.d8.loss_cls: 0.9207, decode.d8.loss_mask: 0.7679, decode.d8.loss_dice: 1.0537, loss: 35.0570 +2022-06-05 02:09:54,860 - mmseg - INFO - Iter [3900/40000] lr: 6.882e-06, eta: 4:16:16, time: 0.411, data_time: 0.008, memory: 31652, decode.loss_cls: 0.9044, decode.loss_mask: 0.7553, decode.loss_dice: 1.0223, decode.d0.loss_cls: 7.2126, decode.d0.loss_mask: 0.7760, decode.d0.loss_dice: 1.3005, decode.d1.loss_cls: 1.4485, decode.d1.loss_mask: 0.7680, decode.d1.loss_dice: 1.0869, decode.d2.loss_cls: 1.1014, decode.d2.loss_mask: 0.7546, decode.d2.loss_dice: 1.0462, decode.d3.loss_cls: 0.9996, decode.d3.loss_mask: 0.7485, decode.d3.loss_dice: 1.0244, decode.d4.loss_cls: 0.9441, decode.d4.loss_mask: 0.7510, decode.d4.loss_dice: 1.0265, decode.d5.loss_cls: 0.9166, decode.d5.loss_mask: 0.7553, decode.d5.loss_dice: 1.0314, decode.d6.loss_cls: 0.9133, decode.d6.loss_mask: 0.7528, decode.d6.loss_dice: 1.0251, decode.d7.loss_cls: 0.9032, decode.d7.loss_mask: 0.7571, decode.d7.loss_dice: 1.0254, decode.d8.loss_cls: 0.8917, decode.d8.loss_mask: 0.7602, decode.d8.loss_dice: 1.0332, loss: 34.4363 +2022-06-05 02:10:15,538 - mmseg - INFO - Iter [3950/40000] lr: 6.873e-06, eta: 4:15:49, time: 0.414, data_time: 0.008, memory: 31652, decode.loss_cls: 0.9259, decode.loss_mask: 0.7354, decode.loss_dice: 1.0451, decode.d0.loss_cls: 7.1946, decode.d0.loss_mask: 0.7584, decode.d0.loss_dice: 1.3359, decode.d1.loss_cls: 1.4608, decode.d1.loss_mask: 0.7497, decode.d1.loss_dice: 1.1087, decode.d2.loss_cls: 1.1331, decode.d2.loss_mask: 0.7404, decode.d2.loss_dice: 1.0629, decode.d3.loss_cls: 1.0200, decode.d3.loss_mask: 0.7366, decode.d3.loss_dice: 1.0455, decode.d4.loss_cls: 0.9741, decode.d4.loss_mask: 0.7352, decode.d4.loss_dice: 1.0432, decode.d5.loss_cls: 0.9487, decode.d5.loss_mask: 0.7418, decode.d5.loss_dice: 1.0554, decode.d6.loss_cls: 0.9385, decode.d6.loss_mask: 0.7330, decode.d6.loss_dice: 1.0455, decode.d7.loss_cls: 0.9255, decode.d7.loss_mask: 0.7400, decode.d7.loss_dice: 1.0480, decode.d8.loss_cls: 0.9204, decode.d8.loss_mask: 0.7372, decode.d8.loss_dice: 1.0459, loss: 34.6855 +2022-06-05 02:10:36,123 - mmseg - INFO - Saving checkpoint at 4000 iterations +2022-06-05 02:10:38,504 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 02:10:38,504 - mmseg - INFO - Iter [4000/40000] lr: 6.863e-06, eta: 4:15:43, time: 0.459, data_time: 0.008, memory: 31652, decode.loss_cls: 0.8582, decode.loss_mask: 0.7673, decode.loss_dice: 1.0471, decode.d0.loss_cls: 7.1468, decode.d0.loss_mask: 0.7795, decode.d0.loss_dice: 1.2950, decode.d1.loss_cls: 1.3861, decode.d1.loss_mask: 0.7706, decode.d1.loss_dice: 1.0970, decode.d2.loss_cls: 1.0498, decode.d2.loss_mask: 0.7654, decode.d2.loss_dice: 1.0542, decode.d3.loss_cls: 0.9493, decode.d3.loss_mask: 0.7605, decode.d3.loss_dice: 1.0406, decode.d4.loss_cls: 0.9026, decode.d4.loss_mask: 0.7615, decode.d4.loss_dice: 1.0479, decode.d5.loss_cls: 0.8727, decode.d5.loss_mask: 0.7666, decode.d5.loss_dice: 1.0513, decode.d6.loss_cls: 0.8711, decode.d6.loss_mask: 0.7729, decode.d6.loss_dice: 1.0422, decode.d7.loss_cls: 0.8631, decode.d7.loss_mask: 0.7723, decode.d7.loss_dice: 1.0455, decode.d8.loss_cls: 0.8637, decode.d8.loss_mask: 0.7671, decode.d8.loss_dice: 1.0416, loss: 34.2098 +2022-06-05 02:13:58,865 - mmseg - INFO - per class results: +2022-06-05 02:13:58,870 - mmseg - INFO - ++-------------+-------+-------+ +| Class | IoU | Acc | ++-------------+-------+-------+ +| aeroplane | 86.5 | 90.45 | +| bag | 0.82 | 0.84 | +| bed | 0.0 | 0.0 | +| bedclothes | 34.24 | 48.18 | +| bench | 0.0 | 0.0 | +| bicycle | 81.79 | 91.7 | +| bird | 92.4 | 96.58 | +| boat | 77.58 | 82.73 | +| book | 46.64 | 65.26 | +| bottle | 82.07 | 95.15 | +| building | 62.06 | 79.4 | +| bus | 94.19 | 96.3 | +| cabinet | 38.12 | 65.31 | +| car | 85.74 | 96.57 | +| cat | 93.47 | 97.59 | +| ceiling | 62.6 | 77.42 | +| chair | 57.87 | 73.98 | +| cloth | 24.4 | 54.47 | +| computer | 0.07 | 0.07 | +| cow | 94.16 | 96.27 | +| cup | 31.12 | 48.58 | +| curtain | 55.7 | 74.61 | +| dog | 90.54 | 96.88 | +| door | 17.37 | 27.88 | +| fence | 37.41 | 49.15 | +| floor | 71.76 | 89.07 | +| flower | 0.16 | 0.16 | +| food | 27.41 | 32.98 | +| grass | 81.93 | 89.64 | +| ground | 55.59 | 66.8 | +| horse | 93.61 | 97.22 | +| keyboard | 71.1 | 78.02 | +| light | 46.5 | 70.82 | +| motorbike | 88.22 | 94.55 | +| mountain | 50.1 | 80.73 | +| mouse | 25.82 | 25.85 | +| person | 88.97 | 95.29 | +| plate | 10.17 | 11.94 | +| platform | 35.64 | 39.6 | +| pottedplant | 72.88 | 89.44 | +| road | 54.28 | 72.19 | +| rock | 40.71 | 59.66 | +| sheep | 92.8 | 95.94 | +| shelves | 26.77 | 34.86 | +| sidewalk | 22.73 | 48.45 | +| sign | 33.47 | 38.93 | +| sky | 94.35 | 95.75 | +| snow | 67.15 | 72.94 | +| sofa | 54.04 | 64.08 | +| table | 64.93 | 82.53 | +| track | 64.93 | 74.51 | +| train | 89.68 | 92.29 | +| tree | 79.36 | 89.79 | +| truck | 0.0 | 0.0 | +| tvmonitor | 81.36 | 89.8 | +| wall | 70.41 | 82.09 | +| water | 89.09 | 96.39 | +| window | 40.11 | 57.54 | +| wood | 13.31 | 15.2 | ++-------------+-------+-------+ +2022-06-05 02:13:58,870 - mmseg - INFO - Summary: +2022-06-05 02:13:58,870 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 83.59 | 55.02 | 64.92 | ++-------+-------+-------+ +2022-06-05 02:14:01,371 - mmseg - INFO - Now best checkpoint is saved as best_mIoU_iter_4000.pth. +2022-06-05 02:14:01,372 - mmseg - INFO - Best mIoU is 0.5502 at 4000 iter. +2022-06-05 02:14:01,393 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 02:14:01,393 - mmseg - INFO - Iter(val) [638] aAcc: 0.8359, mIoU: 0.5502, mAcc: 0.6492, IoU.aeroplane: 0.8650, IoU.bag: 0.0082, IoU.bed: 0.0000, IoU.bedclothes: 0.3424, IoU.bench: 0.0000, IoU.bicycle: 0.8179, IoU.bird: 0.9240, IoU.boat: 0.7758, IoU.book: 0.4664, IoU.bottle: 0.8207, IoU.building: 0.6206, IoU.bus: 0.9419, IoU.cabinet: 0.3812, IoU.car: 0.8574, IoU.cat: 0.9347, IoU.ceiling: 0.6260, IoU.chair: 0.5787, IoU.cloth: 0.2440, IoU.computer: 0.0007, IoU.cow: 0.9416, IoU.cup: 0.3112, IoU.curtain: 0.5570, IoU.dog: 0.9054, IoU.door: 0.1737, IoU.fence: 0.3741, IoU.floor: 0.7176, IoU.flower: 0.0016, IoU.food: 0.2741, IoU.grass: 0.8193, IoU.ground: 0.5559, IoU.horse: 0.9361, IoU.keyboard: 0.7110, IoU.light: 0.4650, IoU.motorbike: 0.8822, IoU.mountain: 0.5010, IoU.mouse: 0.2582, IoU.person: 0.8897, IoU.plate: 0.1017, IoU.platform: 0.3564, IoU.pottedplant: 0.7288, IoU.road: 0.5428, IoU.rock: 0.4071, IoU.sheep: 0.9280, IoU.shelves: 0.2677, IoU.sidewalk: 0.2273, IoU.sign: 0.3347, IoU.sky: 0.9435, IoU.snow: 0.6715, IoU.sofa: 0.5404, IoU.table: 0.6493, IoU.track: 0.6493, IoU.train: 0.8968, IoU.tree: 0.7936, IoU.truck: 0.0000, IoU.tvmonitor: 0.8136, IoU.wall: 0.7041, IoU.water: 0.8909, IoU.window: 0.4011, IoU.wood: 0.1331, Acc.aeroplane: 0.9045, Acc.bag: 0.0084, Acc.bed: 0.0000, Acc.bedclothes: 0.4818, Acc.bench: 0.0000, Acc.bicycle: 0.9170, Acc.bird: 0.9658, Acc.boat: 0.8273, Acc.book: 0.6526, Acc.bottle: 0.9515, Acc.building: 0.7940, Acc.bus: 0.9630, Acc.cabinet: 0.6531, Acc.car: 0.9657, Acc.cat: 0.9759, Acc.ceiling: 0.7742, Acc.chair: 0.7398, Acc.cloth: 0.5447, Acc.computer: 0.0007, Acc.cow: 0.9627, Acc.cup: 0.4858, Acc.curtain: 0.7461, Acc.dog: 0.9688, Acc.door: 0.2788, Acc.fence: 0.4915, Acc.floor: 0.8907, Acc.flower: 0.0016, Acc.food: 0.3298, Acc.grass: 0.8964, Acc.ground: 0.6680, Acc.horse: 0.9722, Acc.keyboard: 0.7802, Acc.light: 0.7082, Acc.motorbike: 0.9455, Acc.mountain: 0.8073, Acc.mouse: 0.2585, Acc.person: 0.9529, Acc.plate: 0.1194, Acc.platform: 0.3960, Acc.pottedplant: 0.8944, Acc.road: 0.7219, Acc.rock: 0.5966, Acc.sheep: 0.9594, Acc.shelves: 0.3486, Acc.sidewalk: 0.4845, Acc.sign: 0.3893, Acc.sky: 0.9575, Acc.snow: 0.7294, Acc.sofa: 0.6408, Acc.table: 0.8253, Acc.track: 0.7451, Acc.train: 0.9229, Acc.tree: 0.8979, Acc.truck: 0.0000, Acc.tvmonitor: 0.8980, Acc.wall: 0.8209, Acc.water: 0.9639, Acc.window: 0.5754, Acc.wood: 0.1520 +2022-06-05 02:14:23,306 - mmseg - INFO - Iter [4050/40000] lr: 6.854e-06, eta: 4:45:28, time: 4.496, data_time: 4.065, memory: 31652, decode.loss_cls: 0.9185, decode.loss_mask: 0.7710, decode.loss_dice: 1.0948, decode.d0.loss_cls: 7.1185, decode.d0.loss_mask: 0.7899, decode.d0.loss_dice: 1.3601, decode.d1.loss_cls: 1.4438, decode.d1.loss_mask: 0.7831, decode.d1.loss_dice: 1.1524, decode.d2.loss_cls: 1.1091, decode.d2.loss_mask: 0.7761, decode.d2.loss_dice: 1.0964, decode.d3.loss_cls: 1.0069, decode.d3.loss_mask: 0.7614, decode.d3.loss_dice: 1.0755, decode.d4.loss_cls: 0.9536, decode.d4.loss_mask: 0.7649, decode.d4.loss_dice: 1.0776, decode.d5.loss_cls: 0.9257, decode.d5.loss_mask: 0.7695, decode.d5.loss_dice: 1.0862, decode.d6.loss_cls: 0.9189, decode.d6.loss_mask: 0.7676, decode.d6.loss_dice: 1.0853, decode.d7.loss_cls: 0.9179, decode.d7.loss_mask: 0.7698, decode.d7.loss_dice: 1.0842, decode.d8.loss_cls: 0.9106, decode.d8.loss_mask: 0.7741, decode.d8.loss_dice: 1.0899, loss: 35.1534 +2022-06-05 02:14:48,715 - mmseg - INFO - Iter [4100/40000] lr: 6.844e-06, eta: 4:45:18, time: 0.507, data_time: 0.058, memory: 31652, decode.loss_cls: 0.8684, decode.loss_mask: 0.7448, decode.loss_dice: 1.0089, decode.d0.loss_cls: 7.1047, decode.d0.loss_mask: 0.7594, decode.d0.loss_dice: 1.2616, decode.d1.loss_cls: 1.3953, decode.d1.loss_mask: 0.7498, decode.d1.loss_dice: 1.0679, decode.d2.loss_cls: 1.0583, decode.d2.loss_mask: 0.7412, decode.d2.loss_dice: 1.0194, decode.d3.loss_cls: 0.9474, decode.d3.loss_mask: 0.7427, decode.d3.loss_dice: 1.0127, decode.d4.loss_cls: 0.9066, decode.d4.loss_mask: 0.7382, decode.d4.loss_dice: 1.0114, decode.d5.loss_cls: 0.8899, decode.d5.loss_mask: 0.7403, decode.d5.loss_dice: 1.0093, decode.d6.loss_cls: 0.8675, decode.d6.loss_mask: 0.7424, decode.d6.loss_dice: 1.0110, decode.d7.loss_cls: 0.8708, decode.d7.loss_mask: 0.7413, decode.d7.loss_dice: 1.0124, decode.d8.loss_cls: 0.8636, decode.d8.loss_mask: 0.7419, decode.d8.loss_dice: 1.0147, loss: 33.6438 +2022-06-05 02:15:11,317 - mmseg - INFO - Iter [4150/40000] lr: 6.835e-06, eta: 4:44:43, time: 0.453, data_time: 0.009, memory: 31652, decode.loss_cls: 0.8693, decode.loss_mask: 0.7801, decode.loss_dice: 1.0381, decode.d0.loss_cls: 7.0686, decode.d0.loss_mask: 0.7903, decode.d0.loss_dice: 1.3054, decode.d1.loss_cls: 1.3972, decode.d1.loss_mask: 0.7819, decode.d1.loss_dice: 1.1072, decode.d2.loss_cls: 1.0545, decode.d2.loss_mask: 0.7741, decode.d2.loss_dice: 1.0555, decode.d3.loss_cls: 0.9583, decode.d3.loss_mask: 0.7739, decode.d3.loss_dice: 1.0415, decode.d4.loss_cls: 0.9133, decode.d4.loss_mask: 0.7764, decode.d4.loss_dice: 1.0431, decode.d5.loss_cls: 0.8913, decode.d5.loss_mask: 0.7769, decode.d5.loss_dice: 1.0355, decode.d6.loss_cls: 0.8748, decode.d6.loss_mask: 0.7791, decode.d6.loss_dice: 1.0422, decode.d7.loss_cls: 0.8708, decode.d7.loss_mask: 0.7777, decode.d7.loss_dice: 1.0400, decode.d8.loss_cls: 0.8658, decode.d8.loss_mask: 0.7840, decode.d8.loss_dice: 1.0429, loss: 34.3096 +2022-06-05 02:15:33,957 - mmseg - INFO - Iter [4200/40000] lr: 6.825e-06, eta: 4:44:10, time: 0.453, data_time: 0.008, memory: 31652, decode.loss_cls: 0.8614, decode.loss_mask: 0.7597, decode.loss_dice: 1.0755, decode.d0.loss_cls: 7.0356, decode.d0.loss_mask: 0.7757, decode.d0.loss_dice: 1.3189, decode.d1.loss_cls: 1.3851, decode.d1.loss_mask: 0.7711, decode.d1.loss_dice: 1.1192, decode.d2.loss_cls: 1.0448, decode.d2.loss_mask: 0.7652, decode.d2.loss_dice: 1.0731, decode.d3.loss_cls: 0.9453, decode.d3.loss_mask: 0.7567, decode.d3.loss_dice: 1.0571, decode.d4.loss_cls: 0.9091, decode.d4.loss_mask: 0.7625, decode.d4.loss_dice: 1.0713, decode.d5.loss_cls: 0.8901, decode.d5.loss_mask: 0.7611, decode.d5.loss_dice: 1.0683, decode.d6.loss_cls: 0.8789, decode.d6.loss_mask: 0.7583, decode.d6.loss_dice: 1.0636, decode.d7.loss_cls: 0.8692, decode.d7.loss_mask: 0.7593, decode.d7.loss_dice: 1.0737, decode.d8.loss_cls: 0.8595, decode.d8.loss_mask: 0.7600, decode.d8.loss_dice: 1.0727, loss: 34.3020 +2022-06-05 02:15:56,025 - mmseg - INFO - Iter [4250/40000] lr: 6.816e-06, eta: 4:43:31, time: 0.441, data_time: 0.007, memory: 31652, decode.loss_cls: 0.8822, decode.loss_mask: 0.7635, decode.loss_dice: 1.0253, decode.d0.loss_cls: 7.0159, decode.d0.loss_mask: 0.7857, decode.d0.loss_dice: 1.2985, decode.d1.loss_cls: 1.3892, decode.d1.loss_mask: 0.7725, decode.d1.loss_dice: 1.0953, decode.d2.loss_cls: 1.0598, decode.d2.loss_mask: 0.7660, decode.d2.loss_dice: 1.0466, decode.d3.loss_cls: 0.9639, decode.d3.loss_mask: 0.7562, decode.d3.loss_dice: 1.0286, decode.d4.loss_cls: 0.9232, decode.d4.loss_mask: 0.7612, decode.d4.loss_dice: 1.0290, decode.d5.loss_cls: 0.9024, decode.d5.loss_mask: 0.7598, decode.d5.loss_dice: 1.0309, decode.d6.loss_cls: 0.8909, decode.d6.loss_mask: 0.7587, decode.d6.loss_dice: 1.0274, decode.d7.loss_cls: 0.8853, decode.d7.loss_mask: 0.7581, decode.d7.loss_dice: 1.0267, decode.d8.loss_cls: 0.8729, decode.d8.loss_mask: 0.7612, decode.d8.loss_dice: 1.0292, loss: 34.0660 +2022-06-05 02:16:18,899 - mmseg - INFO - Iter [4300/40000] lr: 6.806e-06, eta: 4:43:00, time: 0.458, data_time: 0.008, memory: 31652, decode.loss_cls: 0.8636, decode.loss_mask: 0.7960, decode.loss_dice: 1.0851, decode.d0.loss_cls: 6.9606, decode.d0.loss_mask: 0.8073, decode.d0.loss_dice: 1.3305, decode.d1.loss_cls: 1.3361, decode.d1.loss_mask: 0.8032, decode.d1.loss_dice: 1.1258, decode.d2.loss_cls: 1.0123, decode.d2.loss_mask: 0.7974, decode.d2.loss_dice: 1.0893, decode.d3.loss_cls: 0.9310, decode.d3.loss_mask: 0.7908, decode.d3.loss_dice: 1.0754, decode.d4.loss_cls: 0.8913, decode.d4.loss_mask: 0.7969, decode.d4.loss_dice: 1.0764, decode.d5.loss_cls: 0.8781, decode.d5.loss_mask: 0.7989, decode.d5.loss_dice: 1.0761, decode.d6.loss_cls: 0.8659, decode.d6.loss_mask: 0.7980, decode.d6.loss_dice: 1.0754, decode.d7.loss_cls: 0.8681, decode.d7.loss_mask: 0.7964, decode.d7.loss_dice: 1.0770, decode.d8.loss_cls: 0.8651, decode.d8.loss_mask: 0.7966, decode.d8.loss_dice: 1.0837, loss: 34.5484 +2022-06-05 02:16:42,048 - mmseg - INFO - Iter [4350/40000] lr: 6.797e-06, eta: 4:42:31, time: 0.463, data_time: 0.008, memory: 31652, decode.loss_cls: 0.8739, decode.loss_mask: 0.7858, decode.loss_dice: 1.0333, decode.d0.loss_cls: 6.9347, decode.d0.loss_mask: 0.7848, decode.d0.loss_dice: 1.2867, decode.d1.loss_cls: 1.3442, decode.d1.loss_mask: 0.7860, decode.d1.loss_dice: 1.0994, decode.d2.loss_cls: 1.0367, decode.d2.loss_mask: 0.7815, decode.d2.loss_dice: 1.0509, decode.d3.loss_cls: 0.9630, decode.d3.loss_mask: 0.7700, decode.d3.loss_dice: 1.0358, decode.d4.loss_cls: 0.9180, decode.d4.loss_mask: 0.7748, decode.d4.loss_dice: 1.0406, decode.d5.loss_cls: 0.8944, decode.d5.loss_mask: 0.7813, decode.d5.loss_dice: 1.0407, decode.d6.loss_cls: 0.8835, decode.d6.loss_mask: 0.7785, decode.d6.loss_dice: 1.0311, decode.d7.loss_cls: 0.8769, decode.d7.loss_mask: 0.7823, decode.d7.loss_dice: 1.0372, decode.d8.loss_cls: 0.8756, decode.d8.loss_mask: 0.7777, decode.d8.loss_dice: 1.0393, loss: 34.0989 +2022-06-05 02:17:07,543 - mmseg - INFO - Iter [4400/40000] lr: 6.787e-06, eta: 4:42:21, time: 0.510, data_time: 0.058, memory: 31652, decode.loss_cls: 0.8242, decode.loss_mask: 0.7269, decode.loss_dice: 0.9937, decode.d0.loss_cls: 6.8917, decode.d0.loss_mask: 0.7628, decode.d0.loss_dice: 1.2485, decode.d1.loss_cls: 1.3199, decode.d1.loss_mask: 0.7476, decode.d1.loss_dice: 1.0574, decode.d2.loss_cls: 1.0106, decode.d2.loss_mask: 0.7326, decode.d2.loss_dice: 1.0025, decode.d3.loss_cls: 0.8980, decode.d3.loss_mask: 0.7333, decode.d3.loss_dice: 0.9950, decode.d4.loss_cls: 0.8599, decode.d4.loss_mask: 0.7325, decode.d4.loss_dice: 1.0020, decode.d5.loss_cls: 0.8386, decode.d5.loss_mask: 0.7374, decode.d5.loss_dice: 1.0010, decode.d6.loss_cls: 0.8281, decode.d6.loss_mask: 0.7329, decode.d6.loss_dice: 0.9895, decode.d7.loss_cls: 0.8236, decode.d7.loss_mask: 0.7356, decode.d7.loss_dice: 0.9934, decode.d8.loss_cls: 0.8186, decode.d8.loss_mask: 0.7349, decode.d8.loss_dice: 0.9895, loss: 32.7621 +2022-06-05 02:17:29,504 - mmseg - INFO - Iter [4450/40000] lr: 6.777e-06, eta: 4:41:42, time: 0.439, data_time: 0.007, memory: 31652, decode.loss_cls: 0.8332, decode.loss_mask: 0.7519, decode.loss_dice: 1.0289, decode.d0.loss_cls: 6.8991, decode.d0.loss_mask: 0.7665, decode.d0.loss_dice: 1.2905, decode.d1.loss_cls: 1.3469, decode.d1.loss_mask: 0.7628, decode.d1.loss_dice: 1.0935, decode.d2.loss_cls: 1.0328, decode.d2.loss_mask: 0.7470, decode.d2.loss_dice: 1.0443, decode.d3.loss_cls: 0.9243, decode.d3.loss_mask: 0.7436, decode.d3.loss_dice: 1.0296, decode.d4.loss_cls: 0.8895, decode.d4.loss_mask: 0.7405, decode.d4.loss_dice: 1.0336, decode.d5.loss_cls: 0.8633, decode.d5.loss_mask: 0.7454, decode.d5.loss_dice: 1.0416, decode.d6.loss_cls: 0.8418, decode.d6.loss_mask: 0.7520, decode.d6.loss_dice: 1.0356, decode.d7.loss_cls: 0.8416, decode.d7.loss_mask: 0.7504, decode.d7.loss_dice: 1.0296, decode.d8.loss_cls: 0.8323, decode.d8.loss_mask: 0.7506, decode.d8.loss_dice: 1.0339, loss: 33.4764 +2022-06-05 02:17:51,776 - mmseg - INFO - Iter [4500/40000] lr: 6.768e-06, eta: 4:41:07, time: 0.446, data_time: 0.007, memory: 31652, decode.loss_cls: 0.8512, decode.loss_mask: 0.7505, decode.loss_dice: 0.9979, decode.d0.loss_cls: 6.8456, decode.d0.loss_mask: 0.7645, decode.d0.loss_dice: 1.2657, decode.d1.loss_cls: 1.2947, decode.d1.loss_mask: 0.7593, decode.d1.loss_dice: 1.0622, decode.d2.loss_cls: 0.9902, decode.d2.loss_mask: 0.7481, decode.d2.loss_dice: 1.0174, decode.d3.loss_cls: 0.9118, decode.d3.loss_mask: 0.7436, decode.d3.loss_dice: 1.0022, decode.d4.loss_cls: 0.8778, decode.d4.loss_mask: 0.7437, decode.d4.loss_dice: 1.0041, decode.d5.loss_cls: 0.8562, decode.d5.loss_mask: 0.7424, decode.d5.loss_dice: 1.0003, decode.d6.loss_cls: 0.8440, decode.d6.loss_mask: 0.7452, decode.d6.loss_dice: 0.9915, decode.d7.loss_cls: 0.8419, decode.d7.loss_mask: 0.7459, decode.d7.loss_dice: 0.9987, decode.d8.loss_cls: 0.8381, decode.d8.loss_mask: 0.7452, decode.d8.loss_dice: 0.9971, loss: 32.9771 +2022-06-05 02:18:14,293 - mmseg - INFO - Iter [4550/40000] lr: 6.758e-06, eta: 4:40:33, time: 0.450, data_time: 0.008, memory: 31652, decode.loss_cls: 0.8364, decode.loss_mask: 0.7352, decode.loss_dice: 1.0038, decode.d0.loss_cls: 6.8258, decode.d0.loss_mask: 0.7570, decode.d0.loss_dice: 1.2579, decode.d1.loss_cls: 1.2826, decode.d1.loss_mask: 0.7426, decode.d1.loss_dice: 1.0692, decode.d2.loss_cls: 0.9866, decode.d2.loss_mask: 0.7286, decode.d2.loss_dice: 1.0178, decode.d3.loss_cls: 0.9103, decode.d3.loss_mask: 0.7242, decode.d3.loss_dice: 1.0021, decode.d4.loss_cls: 0.8655, decode.d4.loss_mask: 0.7249, decode.d4.loss_dice: 1.0108, decode.d5.loss_cls: 0.8452, decode.d5.loss_mask: 0.7319, decode.d5.loss_dice: 1.0123, decode.d6.loss_cls: 0.8387, decode.d6.loss_mask: 0.7332, decode.d6.loss_dice: 1.0068, decode.d7.loss_cls: 0.8296, decode.d7.loss_mask: 0.7354, decode.d7.loss_dice: 1.0075, decode.d8.loss_cls: 0.8330, decode.d8.loss_mask: 0.7314, decode.d8.loss_dice: 1.0029, loss: 32.7891 +2022-06-05 02:18:36,143 - mmseg - INFO - Iter [4600/40000] lr: 6.749e-06, eta: 4:39:55, time: 0.437, data_time: 0.007, memory: 31652, decode.loss_cls: 0.8235, decode.loss_mask: 0.7574, decode.loss_dice: 1.0150, decode.d0.loss_cls: 6.7914, decode.d0.loss_mask: 0.7819, decode.d0.loss_dice: 1.2631, decode.d1.loss_cls: 1.3046, decode.d1.loss_mask: 0.7670, decode.d1.loss_dice: 1.0644, decode.d2.loss_cls: 0.9894, decode.d2.loss_mask: 0.7598, decode.d2.loss_dice: 1.0178, decode.d3.loss_cls: 0.8997, decode.d3.loss_mask: 0.7546, decode.d3.loss_dice: 1.0077, decode.d4.loss_cls: 0.8592, decode.d4.loss_mask: 0.7569, decode.d4.loss_dice: 1.0158, decode.d5.loss_cls: 0.8414, decode.d5.loss_mask: 0.7652, decode.d5.loss_dice: 1.0165, decode.d6.loss_cls: 0.8206, decode.d6.loss_mask: 0.7612, decode.d6.loss_dice: 1.0087, decode.d7.loss_cls: 0.8146, decode.d7.loss_mask: 0.7625, decode.d7.loss_dice: 1.0127, decode.d8.loss_cls: 0.8085, decode.d8.loss_mask: 0.7634, decode.d8.loss_dice: 1.0161, loss: 33.0204 +2022-06-05 02:18:58,288 - mmseg - INFO - Iter [4650/40000] lr: 6.739e-06, eta: 4:39:19, time: 0.443, data_time: 0.007, memory: 31652, decode.loss_cls: 0.7995, decode.loss_mask: 0.7346, decode.loss_dice: 0.9700, decode.d0.loss_cls: 6.7392, decode.d0.loss_mask: 0.7515, decode.d0.loss_dice: 1.2117, decode.d1.loss_cls: 1.2477, decode.d1.loss_mask: 0.7558, decode.d1.loss_dice: 1.0376, decode.d2.loss_cls: 0.9650, decode.d2.loss_mask: 0.7353, decode.d2.loss_dice: 0.9899, decode.d3.loss_cls: 0.8705, decode.d3.loss_mask: 0.7392, decode.d3.loss_dice: 0.9768, decode.d4.loss_cls: 0.8356, decode.d4.loss_mask: 0.7360, decode.d4.loss_dice: 0.9756, decode.d5.loss_cls: 0.8194, decode.d5.loss_mask: 0.7414, decode.d5.loss_dice: 0.9718, decode.d6.loss_cls: 0.8038, decode.d6.loss_mask: 0.7357, decode.d6.loss_dice: 0.9753, decode.d7.loss_cls: 0.8031, decode.d7.loss_mask: 0.7394, decode.d7.loss_dice: 0.9759, decode.d8.loss_cls: 0.8005, decode.d8.loss_mask: 0.7356, decode.d8.loss_dice: 0.9773, loss: 32.1508 +2022-06-05 02:19:23,543 - mmseg - INFO - Iter [4700/40000] lr: 6.730e-06, eta: 4:39:07, time: 0.505, data_time: 0.057, memory: 31652, decode.loss_cls: 0.8362, decode.loss_mask: 0.7250, decode.loss_dice: 1.0203, decode.d0.loss_cls: 6.7185, decode.d0.loss_mask: 0.7345, decode.d0.loss_dice: 1.2459, decode.d1.loss_cls: 1.2866, decode.d1.loss_mask: 0.7368, decode.d1.loss_dice: 1.0647, decode.d2.loss_cls: 1.0001, decode.d2.loss_mask: 0.7329, decode.d2.loss_dice: 1.0280, decode.d3.loss_cls: 0.8864, decode.d3.loss_mask: 0.7273, decode.d3.loss_dice: 1.0150, decode.d4.loss_cls: 0.8625, decode.d4.loss_mask: 0.7316, decode.d4.loss_dice: 1.0145, decode.d5.loss_cls: 0.8431, decode.d5.loss_mask: 0.7340, decode.d5.loss_dice: 1.0211, decode.d6.loss_cls: 0.8306, decode.d6.loss_mask: 0.7276, decode.d6.loss_dice: 1.0189, decode.d7.loss_cls: 0.8281, decode.d7.loss_mask: 0.7268, decode.d7.loss_dice: 1.0171, decode.d8.loss_cls: 0.8248, decode.d8.loss_mask: 0.7268, decode.d8.loss_dice: 1.0212, loss: 32.6871 +2022-06-05 02:19:45,903 - mmseg - INFO - Iter [4750/40000] lr: 6.720e-06, eta: 4:38:33, time: 0.447, data_time: 0.008, memory: 31652, decode.loss_cls: 0.7762, decode.loss_mask: 0.7261, decode.loss_dice: 0.9896, decode.d0.loss_cls: 6.6782, decode.d0.loss_mask: 0.7524, decode.d0.loss_dice: 1.2334, decode.d1.loss_cls: 1.2351, decode.d1.loss_mask: 0.7405, decode.d1.loss_dice: 1.0565, decode.d2.loss_cls: 0.9416, decode.d2.loss_mask: 0.7288, decode.d2.loss_dice: 1.0014, decode.d3.loss_cls: 0.8493, decode.d3.loss_mask: 0.7263, decode.d3.loss_dice: 0.9996, decode.d4.loss_cls: 0.8175, decode.d4.loss_mask: 0.7261, decode.d4.loss_dice: 0.9949, decode.d5.loss_cls: 0.7907, decode.d5.loss_mask: 0.7266, decode.d5.loss_dice: 0.9944, decode.d6.loss_cls: 0.7801, decode.d6.loss_mask: 0.7249, decode.d6.loss_dice: 0.9854, decode.d7.loss_cls: 0.7885, decode.d7.loss_mask: 0.7242, decode.d7.loss_dice: 0.9859, decode.d8.loss_cls: 0.7774, decode.d8.loss_mask: 0.7293, decode.d8.loss_dice: 0.9860, loss: 31.9669 +2022-06-05 02:20:08,139 - mmseg - INFO - Iter [4800/40000] lr: 6.711e-06, eta: 4:37:59, time: 0.444, data_time: 0.008, memory: 31652, decode.loss_cls: 0.7583, decode.loss_mask: 0.7356, decode.loss_dice: 0.9939, decode.d0.loss_cls: 6.6545, decode.d0.loss_mask: 0.7434, decode.d0.loss_dice: 1.2173, decode.d1.loss_cls: 1.2274, decode.d1.loss_mask: 0.7401, decode.d1.loss_dice: 1.0394, decode.d2.loss_cls: 0.9400, decode.d2.loss_mask: 0.7270, decode.d2.loss_dice: 0.9960, decode.d3.loss_cls: 0.8429, decode.d3.loss_mask: 0.7260, decode.d3.loss_dice: 0.9808, decode.d4.loss_cls: 0.8187, decode.d4.loss_mask: 0.7275, decode.d4.loss_dice: 0.9828, decode.d5.loss_cls: 0.7912, decode.d5.loss_mask: 0.7302, decode.d5.loss_dice: 0.9856, decode.d6.loss_cls: 0.7732, decode.d6.loss_mask: 0.7292, decode.d6.loss_dice: 0.9809, decode.d7.loss_cls: 0.7649, decode.d7.loss_mask: 0.7382, decode.d7.loss_dice: 0.9863, decode.d8.loss_cls: 0.7566, decode.d8.loss_mask: 0.7337, decode.d8.loss_dice: 0.9914, loss: 31.8130 +2022-06-05 02:20:30,413 - mmseg - INFO - Iter [4850/40000] lr: 6.701e-06, eta: 4:37:25, time: 0.445, data_time: 0.008, memory: 31652, decode.loss_cls: 0.7876, decode.loss_mask: 0.7322, decode.loss_dice: 0.9995, decode.d0.loss_cls: 6.6339, decode.d0.loss_mask: 0.7524, decode.d0.loss_dice: 1.2405, decode.d1.loss_cls: 1.2136, decode.d1.loss_mask: 0.7480, decode.d1.loss_dice: 1.0544, decode.d2.loss_cls: 0.9501, decode.d2.loss_mask: 0.7359, decode.d2.loss_dice: 1.0126, decode.d3.loss_cls: 0.8637, decode.d3.loss_mask: 0.7263, decode.d3.loss_dice: 0.9927, decode.d4.loss_cls: 0.8277, decode.d4.loss_mask: 0.7280, decode.d4.loss_dice: 1.0009, decode.d5.loss_cls: 0.8078, decode.d5.loss_mask: 0.7325, decode.d5.loss_dice: 1.0018, decode.d6.loss_cls: 0.7937, decode.d6.loss_mask: 0.7299, decode.d6.loss_dice: 0.9953, decode.d7.loss_cls: 0.7895, decode.d7.loss_mask: 0.7296, decode.d7.loss_dice: 0.9977, decode.d8.loss_cls: 0.7907, decode.d8.loss_mask: 0.7286, decode.d8.loss_dice: 0.9986, loss: 32.0957 +2022-06-05 02:20:52,842 - mmseg - INFO - Iter [4900/40000] lr: 6.692e-06, eta: 4:36:52, time: 0.448, data_time: 0.008, memory: 31652, decode.loss_cls: 0.7915, decode.loss_mask: 0.7456, decode.loss_dice: 0.9902, decode.d0.loss_cls: 6.6070, decode.d0.loss_mask: 0.7636, decode.d0.loss_dice: 1.2384, decode.d1.loss_cls: 1.2292, decode.d1.loss_mask: 0.7583, decode.d1.loss_dice: 1.0515, decode.d2.loss_cls: 0.9339, decode.d2.loss_mask: 0.7504, decode.d2.loss_dice: 0.9956, decode.d3.loss_cls: 0.8620, decode.d3.loss_mask: 0.7459, decode.d3.loss_dice: 0.9901, decode.d4.loss_cls: 0.8312, decode.d4.loss_mask: 0.7485, decode.d4.loss_dice: 0.9960, decode.d5.loss_cls: 0.8107, decode.d5.loss_mask: 0.7466, decode.d5.loss_dice: 0.9957, decode.d6.loss_cls: 0.7992, decode.d6.loss_mask: 0.7463, decode.d6.loss_dice: 0.9914, decode.d7.loss_cls: 0.7951, decode.d7.loss_mask: 0.7443, decode.d7.loss_dice: 0.9969, decode.d8.loss_cls: 0.7926, decode.d8.loss_mask: 0.7430, decode.d8.loss_dice: 0.9886, loss: 32.1792 +2022-06-05 02:21:15,997 - mmseg - INFO - Iter [4950/40000] lr: 6.682e-06, eta: 4:36:25, time: 0.464, data_time: 0.009, memory: 31652, decode.loss_cls: 0.7932, decode.loss_mask: 0.7317, decode.loss_dice: 1.0088, decode.d0.loss_cls: 6.5848, decode.d0.loss_mask: 0.7520, decode.d0.loss_dice: 1.2305, decode.d1.loss_cls: 1.2330, decode.d1.loss_mask: 0.7351, decode.d1.loss_dice: 1.0527, decode.d2.loss_cls: 0.9422, decode.d2.loss_mask: 0.7301, decode.d2.loss_dice: 1.0101, decode.d3.loss_cls: 0.8561, decode.d3.loss_mask: 0.7260, decode.d3.loss_dice: 1.0102, decode.d4.loss_cls: 0.8171, decode.d4.loss_mask: 0.7266, decode.d4.loss_dice: 1.0072, decode.d5.loss_cls: 0.8106, decode.d5.loss_mask: 0.7279, decode.d5.loss_dice: 0.9988, decode.d6.loss_cls: 0.7963, decode.d6.loss_mask: 0.7369, decode.d6.loss_dice: 0.9994, decode.d7.loss_cls: 0.7944, decode.d7.loss_mask: 0.7279, decode.d7.loss_dice: 1.0033, decode.d8.loss_cls: 0.7890, decode.d8.loss_mask: 0.7320, decode.d8.loss_dice: 1.0078, loss: 32.0719 +2022-06-05 02:21:40,708 - mmseg - INFO - Saving checkpoint at 5000 iterations +2022-06-05 02:21:43,363 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 02:21:43,364 - mmseg - INFO - Iter [5000/40000] lr: 6.673e-06, eta: 4:36:27, time: 0.547, data_time: 0.058, memory: 31652, decode.loss_cls: 0.8147, decode.loss_mask: 0.7028, decode.loss_dice: 1.0135, decode.d0.loss_cls: 6.5391, decode.d0.loss_mask: 0.7250, decode.d0.loss_dice: 1.2609, decode.d1.loss_cls: 1.2758, decode.d1.loss_mask: 0.7104, decode.d1.loss_dice: 1.0654, decode.d2.loss_cls: 0.9752, decode.d2.loss_mask: 0.6995, decode.d2.loss_dice: 1.0136, decode.d3.loss_cls: 0.8810, decode.d3.loss_mask: 0.6993, decode.d3.loss_dice: 1.0052, decode.d4.loss_cls: 0.8513, decode.d4.loss_mask: 0.7002, decode.d4.loss_dice: 1.0066, decode.d5.loss_cls: 0.8312, decode.d5.loss_mask: 0.6988, decode.d5.loss_dice: 1.0019, decode.d6.loss_cls: 0.8119, decode.d6.loss_mask: 0.7028, decode.d6.loss_dice: 0.9978, decode.d7.loss_cls: 0.8061, decode.d7.loss_mask: 0.7017, decode.d7.loss_dice: 1.0065, decode.d8.loss_cls: 0.8118, decode.d8.loss_mask: 0.7040, decode.d8.loss_dice: 1.0135, loss: 32.0276 +2022-06-05 02:22:05,872 - mmseg - INFO - Iter [5050/40000] lr: 6.663e-06, eta: 4:35:55, time: 0.450, data_time: 0.007, memory: 31652, decode.loss_cls: 0.7447, decode.loss_mask: 0.7132, decode.loss_dice: 0.9787, decode.d0.loss_cls: 6.5144, decode.d0.loss_mask: 0.7250, decode.d0.loss_dice: 1.2010, decode.d1.loss_cls: 1.2008, decode.d1.loss_mask: 0.7163, decode.d1.loss_dice: 1.0199, decode.d2.loss_cls: 0.9078, decode.d2.loss_mask: 0.7107, decode.d2.loss_dice: 0.9765, decode.d3.loss_cls: 0.8292, decode.d3.loss_mask: 0.7094, decode.d3.loss_dice: 0.9681, decode.d4.loss_cls: 0.7957, decode.d4.loss_mask: 0.7066, decode.d4.loss_dice: 0.9688, decode.d5.loss_cls: 0.7709, decode.d5.loss_mask: 0.7115, decode.d5.loss_dice: 0.9685, decode.d6.loss_cls: 0.7598, decode.d6.loss_mask: 0.7134, decode.d6.loss_dice: 0.9743, decode.d7.loss_cls: 0.7520, decode.d7.loss_mask: 0.7135, decode.d7.loss_dice: 0.9776, decode.d8.loss_cls: 0.7446, decode.d8.loss_mask: 0.7149, decode.d8.loss_dice: 0.9771, loss: 31.1646 +2022-06-05 02:22:27,988 - mmseg - INFO - Iter [5100/40000] lr: 6.654e-06, eta: 4:35:21, time: 0.442, data_time: 0.008, memory: 31652, decode.loss_cls: 0.7258, decode.loss_mask: 0.7115, decode.loss_dice: 0.9595, decode.d0.loss_cls: 6.4636, decode.d0.loss_mask: 0.7334, decode.d0.loss_dice: 1.1992, decode.d1.loss_cls: 1.1597, decode.d1.loss_mask: 0.7183, decode.d1.loss_dice: 1.0139, decode.d2.loss_cls: 0.8685, decode.d2.loss_mask: 0.7086, decode.d2.loss_dice: 0.9630, decode.d3.loss_cls: 0.7925, decode.d3.loss_mask: 0.7023, decode.d3.loss_dice: 0.9541, decode.d4.loss_cls: 0.7580, decode.d4.loss_mask: 0.7049, decode.d4.loss_dice: 0.9584, decode.d5.loss_cls: 0.7415, decode.d5.loss_mask: 0.7077, decode.d5.loss_dice: 0.9559, decode.d6.loss_cls: 0.7298, decode.d6.loss_mask: 0.7004, decode.d6.loss_dice: 0.9458, decode.d7.loss_cls: 0.7248, decode.d7.loss_mask: 0.7073, decode.d7.loss_dice: 0.9535, decode.d8.loss_cls: 0.7220, decode.d8.loss_mask: 0.7082, decode.d8.loss_dice: 0.9595, loss: 30.6516 +2022-06-05 02:22:50,479 - mmseg - INFO - Iter [5150/40000] lr: 6.644e-06, eta: 4:34:49, time: 0.450, data_time: 0.008, memory: 31652, decode.loss_cls: 0.7341, decode.loss_mask: 0.7077, decode.loss_dice: 0.9664, decode.d0.loss_cls: 6.4486, decode.d0.loss_mask: 0.7390, decode.d0.loss_dice: 1.2036, decode.d1.loss_cls: 1.1433, decode.d1.loss_mask: 0.7257, decode.d1.loss_dice: 1.0324, decode.d2.loss_cls: 0.8646, decode.d2.loss_mask: 0.7151, decode.d2.loss_dice: 0.9843, decode.d3.loss_cls: 0.7969, decode.d3.loss_mask: 0.7106, decode.d3.loss_dice: 0.9688, decode.d4.loss_cls: 0.7595, decode.d4.loss_mask: 0.7115, decode.d4.loss_dice: 0.9678, decode.d5.loss_cls: 0.7524, decode.d5.loss_mask: 0.7079, decode.d5.loss_dice: 0.9661, decode.d6.loss_cls: 0.7357, decode.d6.loss_mask: 0.7092, decode.d6.loss_dice: 0.9592, decode.d7.loss_cls: 0.7355, decode.d7.loss_mask: 0.7115, decode.d7.loss_dice: 0.9687, decode.d8.loss_cls: 0.7142, decode.d8.loss_mask: 0.7104, decode.d8.loss_dice: 0.9685, loss: 30.8191 +2022-06-05 02:23:13,048 - mmseg - INFO - Iter [5200/40000] lr: 6.634e-06, eta: 4:34:18, time: 0.451, data_time: 0.008, memory: 31652, decode.loss_cls: 0.7340, decode.loss_mask: 0.7091, decode.loss_dice: 0.9897, decode.d0.loss_cls: 6.4119, decode.d0.loss_mask: 0.7233, decode.d0.loss_dice: 1.2166, decode.d1.loss_cls: 1.1630, decode.d1.loss_mask: 0.7175, decode.d1.loss_dice: 1.0444, decode.d2.loss_cls: 0.8697, decode.d2.loss_mask: 0.7096, decode.d2.loss_dice: 1.0048, decode.d3.loss_cls: 0.7971, decode.d3.loss_mask: 0.7102, decode.d3.loss_dice: 0.9925, decode.d4.loss_cls: 0.7667, decode.d4.loss_mask: 0.7126, decode.d4.loss_dice: 0.9997, decode.d5.loss_cls: 0.7482, decode.d5.loss_mask: 0.7098, decode.d5.loss_dice: 0.9931, decode.d6.loss_cls: 0.7379, decode.d6.loss_mask: 0.7086, decode.d6.loss_dice: 0.9900, decode.d7.loss_cls: 0.7298, decode.d7.loss_mask: 0.7104, decode.d7.loss_dice: 0.9897, decode.d8.loss_cls: 0.7333, decode.d8.loss_mask: 0.7077, decode.d8.loss_dice: 0.9979, loss: 31.0287 +2022-06-05 02:23:36,170 - mmseg - INFO - Iter [5250/40000] lr: 6.625e-06, eta: 4:33:51, time: 0.463, data_time: 0.009, memory: 31652, decode.loss_cls: 0.7687, decode.loss_mask: 0.7171, decode.loss_dice: 0.9865, decode.d0.loss_cls: 6.3963, decode.d0.loss_mask: 0.7234, decode.d0.loss_dice: 1.2192, decode.d1.loss_cls: 1.1832, decode.d1.loss_mask: 0.7231, decode.d1.loss_dice: 1.0481, decode.d2.loss_cls: 0.9168, decode.d2.loss_mask: 0.7140, decode.d2.loss_dice: 1.0084, decode.d3.loss_cls: 0.8295, decode.d3.loss_mask: 0.7048, decode.d3.loss_dice: 0.9876, decode.d4.loss_cls: 0.7974, decode.d4.loss_mask: 0.7096, decode.d4.loss_dice: 0.9936, decode.d5.loss_cls: 0.7885, decode.d5.loss_mask: 0.7079, decode.d5.loss_dice: 0.9899, decode.d6.loss_cls: 0.7693, decode.d6.loss_mask: 0.7058, decode.d6.loss_dice: 0.9853, decode.d7.loss_cls: 0.7819, decode.d7.loss_mask: 0.7093, decode.d7.loss_dice: 0.9842, decode.d8.loss_cls: 0.7697, decode.d8.loss_mask: 0.7090, decode.d8.loss_dice: 0.9860, loss: 31.3139 +2022-06-05 02:23:58,263 - mmseg - INFO - Iter [5300/40000] lr: 6.615e-06, eta: 4:33:17, time: 0.442, data_time: 0.008, memory: 31652, decode.loss_cls: 0.7611, decode.loss_mask: 0.7213, decode.loss_dice: 0.9817, decode.d0.loss_cls: 6.3523, decode.d0.loss_mask: 0.7378, decode.d0.loss_dice: 1.2035, decode.d1.loss_cls: 1.1682, decode.d1.loss_mask: 0.7345, decode.d1.loss_dice: 1.0351, decode.d2.loss_cls: 0.8959, decode.d2.loss_mask: 0.7239, decode.d2.loss_dice: 0.9892, decode.d3.loss_cls: 0.8225, decode.d3.loss_mask: 0.7197, decode.d3.loss_dice: 0.9731, decode.d4.loss_cls: 0.8008, decode.d4.loss_mask: 0.7225, decode.d4.loss_dice: 0.9710, decode.d5.loss_cls: 0.7745, decode.d5.loss_mask: 0.7290, decode.d5.loss_dice: 0.9780, decode.d6.loss_cls: 0.7665, decode.d6.loss_mask: 0.7273, decode.d6.loss_dice: 0.9700, decode.d7.loss_cls: 0.7648, decode.d7.loss_mask: 0.7266, decode.d7.loss_dice: 0.9740, decode.d8.loss_cls: 0.7695, decode.d8.loss_mask: 0.7248, decode.d8.loss_dice: 0.9798, loss: 31.1989 +2022-06-05 02:24:23,085 - mmseg - INFO - Iter [5350/40000] lr: 6.606e-06, eta: 4:33:02, time: 0.496, data_time: 0.061, memory: 31652, decode.loss_cls: 0.7630, decode.loss_mask: 0.6878, decode.loss_dice: 0.9788, decode.d0.loss_cls: 6.3080, decode.d0.loss_mask: 0.7021, decode.d0.loss_dice: 1.2021, decode.d1.loss_cls: 1.1900, decode.d1.loss_mask: 0.7009, decode.d1.loss_dice: 1.0375, decode.d2.loss_cls: 0.9061, decode.d2.loss_mask: 0.6943, decode.d2.loss_dice: 0.9941, decode.d3.loss_cls: 0.8183, decode.d3.loss_mask: 0.6905, decode.d3.loss_dice: 0.9813, decode.d4.loss_cls: 0.7911, decode.d4.loss_mask: 0.6870, decode.d4.loss_dice: 0.9779, decode.d5.loss_cls: 0.7752, decode.d5.loss_mask: 0.6905, decode.d5.loss_dice: 0.9799, decode.d6.loss_cls: 0.7665, decode.d6.loss_mask: 0.6849, decode.d6.loss_dice: 0.9742, decode.d7.loss_cls: 0.7620, decode.d7.loss_mask: 0.6904, decode.d7.loss_dice: 0.9794, decode.d8.loss_cls: 0.7585, decode.d8.loss_mask: 0.6859, decode.d8.loss_dice: 0.9792, loss: 30.8373 +2022-06-05 02:24:45,853 - mmseg - INFO - Iter [5400/40000] lr: 6.596e-06, eta: 4:32:32, time: 0.455, data_time: 0.007, memory: 31652, decode.loss_cls: 0.6953, decode.loss_mask: 0.7225, decode.loss_dice: 0.9747, decode.d0.loss_cls: 6.2701, decode.d0.loss_mask: 0.7342, decode.d0.loss_dice: 1.1946, decode.d1.loss_cls: 1.0784, decode.d1.loss_mask: 0.7364, decode.d1.loss_dice: 1.0339, decode.d2.loss_cls: 0.8278, decode.d2.loss_mask: 0.7229, decode.d2.loss_dice: 0.9847, decode.d3.loss_cls: 0.7650, decode.d3.loss_mask: 0.7177, decode.d3.loss_dice: 0.9763, decode.d4.loss_cls: 0.7253, decode.d4.loss_mask: 0.7135, decode.d4.loss_dice: 0.9751, decode.d5.loss_cls: 0.7150, decode.d5.loss_mask: 0.7160, decode.d5.loss_dice: 0.9692, decode.d6.loss_cls: 0.6998, decode.d6.loss_mask: 0.7225, decode.d6.loss_dice: 0.9776, decode.d7.loss_cls: 0.6990, decode.d7.loss_mask: 0.7244, decode.d7.loss_dice: 0.9751, decode.d8.loss_cls: 0.6993, decode.d8.loss_mask: 0.7231, decode.d8.loss_dice: 0.9734, loss: 30.4427 +2022-06-05 02:25:08,272 - mmseg - INFO - Iter [5450/40000] lr: 6.587e-06, eta: 4:32:01, time: 0.448, data_time: 0.007, memory: 31652, decode.loss_cls: 0.6939, decode.loss_mask: 0.7194, decode.loss_dice: 0.9379, decode.d0.loss_cls: 6.2572, decode.d0.loss_mask: 0.7339, decode.d0.loss_dice: 1.1502, decode.d1.loss_cls: 1.0902, decode.d1.loss_mask: 0.7306, decode.d1.loss_dice: 0.9896, decode.d2.loss_cls: 0.8229, decode.d2.loss_mask: 0.7212, decode.d2.loss_dice: 0.9504, decode.d3.loss_cls: 0.7561, decode.d3.loss_mask: 0.7153, decode.d3.loss_dice: 0.9408, decode.d4.loss_cls: 0.7242, decode.d4.loss_mask: 0.7121, decode.d4.loss_dice: 0.9353, decode.d5.loss_cls: 0.7075, decode.d5.loss_mask: 0.7151, decode.d5.loss_dice: 0.9357, decode.d6.loss_cls: 0.7014, decode.d6.loss_mask: 0.7143, decode.d6.loss_dice: 0.9350, decode.d7.loss_cls: 0.7026, decode.d7.loss_mask: 0.7126, decode.d7.loss_dice: 0.9344, decode.d8.loss_cls: 0.6922, decode.d8.loss_mask: 0.7153, decode.d8.loss_dice: 0.9419, loss: 29.9895 +2022-06-05 02:25:30,321 - mmseg - INFO - Iter [5500/40000] lr: 6.577e-06, eta: 4:31:28, time: 0.442, data_time: 0.008, memory: 31652, decode.loss_cls: 0.7142, decode.loss_mask: 0.6942, decode.loss_dice: 0.9636, decode.d0.loss_cls: 6.2000, decode.d0.loss_mask: 0.7173, decode.d0.loss_dice: 1.1813, decode.d1.loss_cls: 1.0911, decode.d1.loss_mask: 0.7159, decode.d1.loss_dice: 1.0260, decode.d2.loss_cls: 0.8392, decode.d2.loss_mask: 0.7054, decode.d2.loss_dice: 0.9732, decode.d3.loss_cls: 0.7635, decode.d3.loss_mask: 0.7017, decode.d3.loss_dice: 0.9586, decode.d4.loss_cls: 0.7363, decode.d4.loss_mask: 0.6982, decode.d4.loss_dice: 0.9630, decode.d5.loss_cls: 0.7245, decode.d5.loss_mask: 0.6991, decode.d5.loss_dice: 0.9584, decode.d6.loss_cls: 0.7087, decode.d6.loss_mask: 0.6966, decode.d6.loss_dice: 0.9530, decode.d7.loss_cls: 0.7157, decode.d7.loss_mask: 0.6998, decode.d7.loss_dice: 0.9624, decode.d8.loss_cls: 0.7070, decode.d8.loss_mask: 0.7018, decode.d8.loss_dice: 0.9650, loss: 30.1347 +2022-06-05 02:25:52,499 - mmseg - INFO - Iter [5550/40000] lr: 6.568e-06, eta: 4:30:55, time: 0.444, data_time: 0.007, memory: 31652, decode.loss_cls: 0.6866, decode.loss_mask: 0.7076, decode.loss_dice: 0.9555, decode.d0.loss_cls: 6.1958, decode.d0.loss_mask: 0.7232, decode.d0.loss_dice: 1.1620, decode.d1.loss_cls: 1.0735, decode.d1.loss_mask: 0.7137, decode.d1.loss_dice: 1.0017, decode.d2.loss_cls: 0.8049, decode.d2.loss_mask: 0.7083, decode.d2.loss_dice: 0.9656, decode.d3.loss_cls: 0.7416, decode.d3.loss_mask: 0.7079, decode.d3.loss_dice: 0.9584, decode.d4.loss_cls: 0.7110, decode.d4.loss_mask: 0.7082, decode.d4.loss_dice: 0.9607, decode.d5.loss_cls: 0.6972, decode.d5.loss_mask: 0.7026, decode.d5.loss_dice: 0.9555, decode.d6.loss_cls: 0.6953, decode.d6.loss_mask: 0.7044, decode.d6.loss_dice: 0.9530, decode.d7.loss_cls: 0.6843, decode.d7.loss_mask: 0.7063, decode.d7.loss_dice: 0.9583, decode.d8.loss_cls: 0.6825, decode.d8.loss_mask: 0.7074, decode.d8.loss_dice: 0.9606, loss: 29.8934 +2022-06-05 02:26:14,413 - mmseg - INFO - Iter [5600/40000] lr: 6.558e-06, eta: 4:30:21, time: 0.438, data_time: 0.008, memory: 31652, decode.loss_cls: 0.7269, decode.loss_mask: 0.7256, decode.loss_dice: 0.9701, decode.d0.loss_cls: 6.1816, decode.d0.loss_mask: 0.7382, decode.d0.loss_dice: 1.2007, decode.d1.loss_cls: 1.1540, decode.d1.loss_mask: 0.7375, decode.d1.loss_dice: 1.0284, decode.d2.loss_cls: 0.8905, decode.d2.loss_mask: 0.7269, decode.d2.loss_dice: 0.9785, decode.d3.loss_cls: 0.8057, decode.d3.loss_mask: 0.7233, decode.d3.loss_dice: 0.9568, decode.d4.loss_cls: 0.7647, decode.d4.loss_mask: 0.7208, decode.d4.loss_dice: 0.9672, decode.d5.loss_cls: 0.7500, decode.d5.loss_mask: 0.7228, decode.d5.loss_dice: 0.9629, decode.d6.loss_cls: 0.7417, decode.d6.loss_mask: 0.7215, decode.d6.loss_dice: 0.9595, decode.d7.loss_cls: 0.7307, decode.d7.loss_mask: 0.7184, decode.d7.loss_dice: 0.9648, decode.d8.loss_cls: 0.7260, decode.d8.loss_mask: 0.7217, decode.d8.loss_dice: 0.9661, loss: 30.6834 +2022-06-05 02:26:38,762 - mmseg - INFO - Iter [5650/40000] lr: 6.549e-06, eta: 4:30:02, time: 0.488, data_time: 0.059, memory: 31652, decode.loss_cls: 0.6986, decode.loss_mask: 0.6886, decode.loss_dice: 0.9791, decode.d0.loss_cls: 6.1048, decode.d0.loss_mask: 0.6990, decode.d0.loss_dice: 1.1901, decode.d1.loss_cls: 1.0546, decode.d1.loss_mask: 0.7000, decode.d1.loss_dice: 1.0328, decode.d2.loss_cls: 0.8157, decode.d2.loss_mask: 0.6888, decode.d2.loss_dice: 0.9855, decode.d3.loss_cls: 0.7488, decode.d3.loss_mask: 0.6885, decode.d3.loss_dice: 0.9788, decode.d4.loss_cls: 0.7251, decode.d4.loss_mask: 0.6869, decode.d4.loss_dice: 0.9791, decode.d5.loss_cls: 0.7147, decode.d5.loss_mask: 0.6917, decode.d5.loss_dice: 0.9769, decode.d6.loss_cls: 0.6999, decode.d6.loss_mask: 0.6909, decode.d6.loss_dice: 0.9776, decode.d7.loss_cls: 0.6988, decode.d7.loss_mask: 0.6883, decode.d7.loss_dice: 0.9805, decode.d8.loss_cls: 0.6965, decode.d8.loss_mask: 0.6893, decode.d8.loss_dice: 0.9822, loss: 29.9322 +2022-06-05 02:27:00,543 - mmseg - INFO - Iter [5700/40000] lr: 6.539e-06, eta: 4:29:28, time: 0.436, data_time: 0.008, memory: 31652, decode.loss_cls: 0.6775, decode.loss_mask: 0.6734, decode.loss_dice: 0.9043, decode.d0.loss_cls: 6.0984, decode.d0.loss_mask: 0.6927, decode.d0.loss_dice: 1.1208, decode.d1.loss_cls: 1.0445, decode.d1.loss_mask: 0.6882, decode.d1.loss_dice: 0.9641, decode.d2.loss_cls: 0.7968, decode.d2.loss_mask: 0.6705, decode.d2.loss_dice: 0.9172, decode.d3.loss_cls: 0.7306, decode.d3.loss_mask: 0.6684, decode.d3.loss_dice: 0.9025, decode.d4.loss_cls: 0.7019, decode.d4.loss_mask: 0.6703, decode.d4.loss_dice: 0.9000, decode.d5.loss_cls: 0.6935, decode.d5.loss_mask: 0.6717, decode.d5.loss_dice: 0.9021, decode.d6.loss_cls: 0.6807, decode.d6.loss_mask: 0.6691, decode.d6.loss_dice: 0.8977, decode.d7.loss_cls: 0.6787, decode.d7.loss_mask: 0.6724, decode.d7.loss_dice: 0.9028, decode.d8.loss_cls: 0.6741, decode.d8.loss_mask: 0.6726, decode.d8.loss_dice: 0.9079, loss: 28.8453 +2022-06-05 02:27:22,362 - mmseg - INFO - Iter [5750/40000] lr: 6.530e-06, eta: 4:28:54, time: 0.436, data_time: 0.007, memory: 31652, decode.loss_cls: 0.7169, decode.loss_mask: 0.6695, decode.loss_dice: 0.9390, decode.d0.loss_cls: 6.0674, decode.d0.loss_mask: 0.6940, decode.d0.loss_dice: 1.1643, decode.d1.loss_cls: 1.1126, decode.d1.loss_mask: 0.6820, decode.d1.loss_dice: 1.0021, decode.d2.loss_cls: 0.8516, decode.d2.loss_mask: 0.6711, decode.d2.loss_dice: 0.9595, decode.d3.loss_cls: 0.7724, decode.d3.loss_mask: 0.6721, decode.d3.loss_dice: 0.9523, decode.d4.loss_cls: 0.7409, decode.d4.loss_mask: 0.6766, decode.d4.loss_dice: 0.9440, decode.d5.loss_cls: 0.7309, decode.d5.loss_mask: 0.6732, decode.d5.loss_dice: 0.9441, decode.d6.loss_cls: 0.7240, decode.d6.loss_mask: 0.6737, decode.d6.loss_dice: 0.9365, decode.d7.loss_cls: 0.7211, decode.d7.loss_mask: 0.6735, decode.d7.loss_dice: 0.9393, decode.d8.loss_cls: 0.7214, decode.d8.loss_mask: 0.6730, decode.d8.loss_dice: 0.9386, loss: 29.6375 +2022-06-05 02:27:44,290 - mmseg - INFO - Iter [5800/40000] lr: 6.520e-06, eta: 4:28:21, time: 0.439, data_time: 0.008, memory: 31652, decode.loss_cls: 0.6948, decode.loss_mask: 0.6992, decode.loss_dice: 0.9585, decode.d0.loss_cls: 6.0248, decode.d0.loss_mask: 0.7113, decode.d0.loss_dice: 1.1543, decode.d1.loss_cls: 1.0830, decode.d1.loss_mask: 0.7011, decode.d1.loss_dice: 1.0028, decode.d2.loss_cls: 0.8359, decode.d2.loss_mask: 0.6964, decode.d2.loss_dice: 0.9595, decode.d3.loss_cls: 0.7586, decode.d3.loss_mask: 0.6945, decode.d3.loss_dice: 0.9536, decode.d4.loss_cls: 0.7379, decode.d4.loss_mask: 0.6935, decode.d4.loss_dice: 0.9556, decode.d5.loss_cls: 0.7206, decode.d5.loss_mask: 0.6979, decode.d5.loss_dice: 0.9496, decode.d6.loss_cls: 0.7099, decode.d6.loss_mask: 0.6928, decode.d6.loss_dice: 0.9451, decode.d7.loss_cls: 0.7029, decode.d7.loss_mask: 0.6946, decode.d7.loss_dice: 0.9519, decode.d8.loss_cls: 0.7057, decode.d8.loss_mask: 0.6940, decode.d8.loss_dice: 0.9526, loss: 29.7329 +2022-06-05 02:28:06,398 - mmseg - INFO - Iter [5850/40000] lr: 6.511e-06, eta: 4:27:49, time: 0.441, data_time: 0.007, memory: 31652, decode.loss_cls: 0.6940, decode.loss_mask: 0.7148, decode.loss_dice: 0.9615, decode.d0.loss_cls: 6.0030, decode.d0.loss_mask: 0.7400, decode.d0.loss_dice: 1.1839, decode.d1.loss_cls: 1.1007, decode.d1.loss_mask: 0.7301, decode.d1.loss_dice: 1.0201, decode.d2.loss_cls: 0.8282, decode.d2.loss_mask: 0.7200, decode.d2.loss_dice: 0.9750, decode.d3.loss_cls: 0.7576, decode.d3.loss_mask: 0.7100, decode.d3.loss_dice: 0.9651, decode.d4.loss_cls: 0.7284, decode.d4.loss_mask: 0.7083, decode.d4.loss_dice: 0.9673, decode.d5.loss_cls: 0.7057, decode.d5.loss_mask: 0.7117, decode.d5.loss_dice: 0.9592, decode.d6.loss_cls: 0.6985, decode.d6.loss_mask: 0.7111, decode.d6.loss_dice: 0.9622, decode.d7.loss_cls: 0.6951, decode.d7.loss_mask: 0.7124, decode.d7.loss_dice: 0.9624, decode.d8.loss_cls: 0.6905, decode.d8.loss_mask: 0.7151, decode.d8.loss_dice: 0.9678, loss: 29.9996 +2022-06-05 02:28:28,255 - mmseg - INFO - Iter [5900/40000] lr: 6.501e-06, eta: 4:27:16, time: 0.438, data_time: 0.009, memory: 31652, decode.loss_cls: 0.6757, decode.loss_mask: 0.7044, decode.loss_dice: 0.9612, decode.d0.loss_cls: 5.9517, decode.d0.loss_mask: 0.7303, decode.d0.loss_dice: 1.1700, decode.d1.loss_cls: 1.0542, decode.d1.loss_mask: 0.7160, decode.d1.loss_dice: 1.0112, decode.d2.loss_cls: 0.7980, decode.d2.loss_mask: 0.7067, decode.d2.loss_dice: 0.9708, decode.d3.loss_cls: 0.7383, decode.d3.loss_mask: 0.6987, decode.d3.loss_dice: 0.9636, decode.d4.loss_cls: 0.7140, decode.d4.loss_mask: 0.6970, decode.d4.loss_dice: 0.9641, decode.d5.loss_cls: 0.6918, decode.d5.loss_mask: 0.7036, decode.d5.loss_dice: 0.9604, decode.d6.loss_cls: 0.6842, decode.d6.loss_mask: 0.7023, decode.d6.loss_dice: 0.9559, decode.d7.loss_cls: 0.6796, decode.d7.loss_mask: 0.7025, decode.d7.loss_dice: 0.9649, decode.d8.loss_cls: 0.6696, decode.d8.loss_mask: 0.7034, decode.d8.loss_dice: 0.9636, loss: 29.6076 +2022-06-05 02:28:52,711 - mmseg - INFO - Iter [5950/40000] lr: 6.491e-06, eta: 4:26:57, time: 0.489, data_time: 0.056, memory: 31652, decode.loss_cls: 0.6522, decode.loss_mask: 0.6751, decode.loss_dice: 0.9237, decode.d0.loss_cls: 5.9109, decode.d0.loss_mask: 0.6871, decode.d0.loss_dice: 1.1355, decode.d1.loss_cls: 1.0227, decode.d1.loss_mask: 0.6808, decode.d1.loss_dice: 0.9737, decode.d2.loss_cls: 0.7647, decode.d2.loss_mask: 0.6659, decode.d2.loss_dice: 0.9335, decode.d3.loss_cls: 0.7028, decode.d3.loss_mask: 0.6644, decode.d3.loss_dice: 0.9191, decode.d4.loss_cls: 0.6746, decode.d4.loss_mask: 0.6665, decode.d4.loss_dice: 0.9231, decode.d5.loss_cls: 0.6599, decode.d5.loss_mask: 0.6680, decode.d5.loss_dice: 0.9248, decode.d6.loss_cls: 0.6607, decode.d6.loss_mask: 0.6622, decode.d6.loss_dice: 0.9169, decode.d7.loss_cls: 0.6490, decode.d7.loss_mask: 0.6680, decode.d7.loss_dice: 0.9148, decode.d8.loss_cls: 0.6444, decode.d8.loss_mask: 0.6710, decode.d8.loss_dice: 0.9193, loss: 28.5352 +2022-06-05 02:29:14,667 - mmseg - INFO - Saving checkpoint at 6000 iterations +2022-06-05 02:29:17,032 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 02:29:17,032 - mmseg - INFO - Iter [6000/40000] lr: 6.482e-06, eta: 4:26:38, time: 0.486, data_time: 0.008, memory: 31652, decode.loss_cls: 0.6537, decode.loss_mask: 0.6587, decode.loss_dice: 0.9169, decode.d0.loss_cls: 5.8895, decode.d0.loss_mask: 0.6776, decode.d0.loss_dice: 1.1373, decode.d1.loss_cls: 1.0307, decode.d1.loss_mask: 0.6692, decode.d1.loss_dice: 0.9691, decode.d2.loss_cls: 0.7724, decode.d2.loss_mask: 0.6630, decode.d2.loss_dice: 0.9304, decode.d3.loss_cls: 0.7118, decode.d3.loss_mask: 0.6613, decode.d3.loss_dice: 0.9199, decode.d4.loss_cls: 0.6831, decode.d4.loss_mask: 0.6580, decode.d4.loss_dice: 0.9179, decode.d5.loss_cls: 0.6746, decode.d5.loss_mask: 0.6571, decode.d5.loss_dice: 0.9135, decode.d6.loss_cls: 0.6570, decode.d6.loss_mask: 0.6597, decode.d6.loss_dice: 0.9193, decode.d7.loss_cls: 0.6442, decode.d7.loss_mask: 0.6674, decode.d7.loss_dice: 0.9227, decode.d8.loss_cls: 0.6519, decode.d8.loss_mask: 0.6627, decode.d8.loss_dice: 0.9216, loss: 28.4721 +2022-06-05 02:29:38,996 - mmseg - INFO - Iter [6050/40000] lr: 6.472e-06, eta: 4:26:06, time: 0.439, data_time: 0.007, memory: 31652, decode.loss_cls: 0.6810, decode.loss_mask: 0.6883, decode.loss_dice: 0.8992, decode.d0.loss_cls: 5.8659, decode.d0.loss_mask: 0.7143, decode.d0.loss_dice: 1.1180, decode.d1.loss_cls: 1.0588, decode.d1.loss_mask: 0.7010, decode.d1.loss_dice: 0.9554, decode.d2.loss_cls: 0.8042, decode.d2.loss_mask: 0.6845, decode.d2.loss_dice: 0.9122, decode.d3.loss_cls: 0.7344, decode.d3.loss_mask: 0.6818, decode.d3.loss_dice: 0.8957, decode.d4.loss_cls: 0.7031, decode.d4.loss_mask: 0.6847, decode.d4.loss_dice: 0.9060, decode.d5.loss_cls: 0.6901, decode.d5.loss_mask: 0.6883, decode.d5.loss_dice: 0.8962, decode.d6.loss_cls: 0.6800, decode.d6.loss_mask: 0.6893, decode.d6.loss_dice: 0.8984, decode.d7.loss_cls: 0.6806, decode.d7.loss_mask: 0.6850, decode.d7.loss_dice: 0.8964, decode.d8.loss_cls: 0.6704, decode.d8.loss_mask: 0.6857, decode.d8.loss_dice: 0.9005, loss: 28.7495 +2022-06-05 02:30:01,027 - mmseg - INFO - Iter [6100/40000] lr: 6.463e-06, eta: 4:25:34, time: 0.441, data_time: 0.008, memory: 31652, decode.loss_cls: 0.6782, decode.loss_mask: 0.6962, decode.loss_dice: 0.9338, decode.d0.loss_cls: 5.8263, decode.d0.loss_mask: 0.7217, decode.d0.loss_dice: 1.1574, decode.d1.loss_cls: 1.0615, decode.d1.loss_mask: 0.7080, decode.d1.loss_dice: 0.9981, decode.d2.loss_cls: 0.8141, decode.d2.loss_mask: 0.6986, decode.d2.loss_dice: 0.9506, decode.d3.loss_cls: 0.7496, decode.d3.loss_mask: 0.6874, decode.d3.loss_dice: 0.9378, decode.d4.loss_cls: 0.7088, decode.d4.loss_mask: 0.6900, decode.d4.loss_dice: 0.9442, decode.d5.loss_cls: 0.7023, decode.d5.loss_mask: 0.6897, decode.d5.loss_dice: 0.9426, decode.d6.loss_cls: 0.6802, decode.d6.loss_mask: 0.6941, decode.d6.loss_dice: 0.9407, decode.d7.loss_cls: 0.6902, decode.d7.loss_mask: 0.6892, decode.d7.loss_dice: 0.9395, decode.d8.loss_cls: 0.6785, decode.d8.loss_mask: 0.6908, decode.d8.loss_dice: 0.9371, loss: 29.2371 +2022-06-05 02:30:23,027 - mmseg - INFO - Iter [6150/40000] lr: 6.453e-06, eta: 4:25:03, time: 0.440, data_time: 0.008, memory: 31652, decode.loss_cls: 0.6917, decode.loss_mask: 0.7055, decode.loss_dice: 0.9850, decode.d0.loss_cls: 5.8131, decode.d0.loss_mask: 0.7235, decode.d0.loss_dice: 1.1929, decode.d1.loss_cls: 1.0487, decode.d1.loss_mask: 0.7216, decode.d1.loss_dice: 1.0278, decode.d2.loss_cls: 0.8077, decode.d2.loss_mask: 0.7106, decode.d2.loss_dice: 0.9881, decode.d3.loss_cls: 0.7412, decode.d3.loss_mask: 0.7086, decode.d3.loss_dice: 0.9837, decode.d4.loss_cls: 0.7173, decode.d4.loss_mask: 0.7095, decode.d4.loss_dice: 0.9852, decode.d5.loss_cls: 0.7054, decode.d5.loss_mask: 0.7074, decode.d5.loss_dice: 0.9809, decode.d6.loss_cls: 0.6987, decode.d6.loss_mask: 0.7107, decode.d6.loss_dice: 0.9790, decode.d7.loss_cls: 0.6979, decode.d7.loss_mask: 0.7021, decode.d7.loss_dice: 0.9713, decode.d8.loss_cls: 0.6893, decode.d8.loss_mask: 0.7014, decode.d8.loss_dice: 0.9857, loss: 29.7914 +2022-06-05 02:30:44,946 - mmseg - INFO - Iter [6200/40000] lr: 6.444e-06, eta: 4:24:31, time: 0.438, data_time: 0.008, memory: 31652, decode.loss_cls: 0.6104, decode.loss_mask: 0.6860, decode.loss_dice: 0.9211, decode.d0.loss_cls: 5.7480, decode.d0.loss_mask: 0.7054, decode.d0.loss_dice: 1.1201, decode.d1.loss_cls: 0.9510, decode.d1.loss_mask: 0.7064, decode.d1.loss_dice: 0.9770, decode.d2.loss_cls: 0.7120, decode.d2.loss_mask: 0.6841, decode.d2.loss_dice: 0.9333, decode.d3.loss_cls: 0.6645, decode.d3.loss_mask: 0.6744, decode.d3.loss_dice: 0.9217, decode.d4.loss_cls: 0.6273, decode.d4.loss_mask: 0.6820, decode.d4.loss_dice: 0.9280, decode.d5.loss_cls: 0.6199, decode.d5.loss_mask: 0.6834, decode.d5.loss_dice: 0.9267, decode.d6.loss_cls: 0.6132, decode.d6.loss_mask: 0.6826, decode.d6.loss_dice: 0.9182, decode.d7.loss_cls: 0.6068, decode.d7.loss_mask: 0.6850, decode.d7.loss_dice: 0.9184, decode.d8.loss_cls: 0.6064, decode.d8.loss_mask: 0.6876, decode.d8.loss_dice: 0.9233, loss: 28.1240 +2022-06-05 02:31:09,455 - mmseg - INFO - Iter [6250/40000] lr: 6.434e-06, eta: 4:24:13, time: 0.490, data_time: 0.055, memory: 31652, decode.loss_cls: 0.6427, decode.loss_mask: 0.6710, decode.loss_dice: 0.9354, decode.d0.loss_cls: 5.7305, decode.d0.loss_mask: 0.6914, decode.d0.loss_dice: 1.1368, decode.d1.loss_cls: 1.0146, decode.d1.loss_mask: 0.6821, decode.d1.loss_dice: 0.9941, decode.d2.loss_cls: 0.7705, decode.d2.loss_mask: 0.6665, decode.d2.loss_dice: 0.9496, decode.d3.loss_cls: 0.7070, decode.d3.loss_mask: 0.6637, decode.d3.loss_dice: 0.9380, decode.d4.loss_cls: 0.6713, decode.d4.loss_mask: 0.6683, decode.d4.loss_dice: 0.9399, decode.d5.loss_cls: 0.6653, decode.d5.loss_mask: 0.6718, decode.d5.loss_dice: 0.9363, decode.d6.loss_cls: 0.6479, decode.d6.loss_mask: 0.6650, decode.d6.loss_dice: 0.9346, decode.d7.loss_cls: 0.6492, decode.d7.loss_mask: 0.6654, decode.d7.loss_dice: 0.9370, decode.d8.loss_cls: 0.6526, decode.d8.loss_mask: 0.6674, decode.d8.loss_dice: 0.9376, loss: 28.5035 +2022-06-05 02:31:31,240 - mmseg - INFO - Iter [6300/40000] lr: 6.425e-06, eta: 4:23:40, time: 0.436, data_time: 0.008, memory: 31652, decode.loss_cls: 0.6414, decode.loss_mask: 0.6511, decode.loss_dice: 0.9225, decode.d0.loss_cls: 5.7107, decode.d0.loss_mask: 0.6905, decode.d0.loss_dice: 1.1553, decode.d1.loss_cls: 1.0193, decode.d1.loss_mask: 0.6733, decode.d1.loss_dice: 0.9801, decode.d2.loss_cls: 0.7766, decode.d2.loss_mask: 0.6582, decode.d2.loss_dice: 0.9386, decode.d3.loss_cls: 0.6978, decode.d3.loss_mask: 0.6550, decode.d3.loss_dice: 0.9246, decode.d4.loss_cls: 0.6686, decode.d4.loss_mask: 0.6530, decode.d4.loss_dice: 0.9285, decode.d5.loss_cls: 0.6641, decode.d5.loss_mask: 0.6498, decode.d5.loss_dice: 0.9238, decode.d6.loss_cls: 0.6521, decode.d6.loss_mask: 0.6499, decode.d6.loss_dice: 0.9260, decode.d7.loss_cls: 0.6481, decode.d7.loss_mask: 0.6486, decode.d7.loss_dice: 0.9256, decode.d8.loss_cls: 0.6430, decode.d8.loss_mask: 0.6531, decode.d8.loss_dice: 0.9271, loss: 28.2564 +2022-06-05 02:31:53,295 - mmseg - INFO - Iter [6350/40000] lr: 6.415e-06, eta: 4:23:09, time: 0.440, data_time: 0.008, memory: 31652, decode.loss_cls: 0.6173, decode.loss_mask: 0.6990, decode.loss_dice: 0.9173, decode.d0.loss_cls: 5.6605, decode.d0.loss_mask: 0.7072, decode.d0.loss_dice: 1.1144, decode.d1.loss_cls: 0.9904, decode.d1.loss_mask: 0.7005, decode.d1.loss_dice: 0.9608, decode.d2.loss_cls: 0.7516, decode.d2.loss_mask: 0.6945, decode.d2.loss_dice: 0.9237, decode.d3.loss_cls: 0.6778, decode.d3.loss_mask: 0.6939, decode.d3.loss_dice: 0.9149, decode.d4.loss_cls: 0.6568, decode.d4.loss_mask: 0.6955, decode.d4.loss_dice: 0.9121, decode.d5.loss_cls: 0.6365, decode.d5.loss_mask: 0.6970, decode.d5.loss_dice: 0.9204, decode.d6.loss_cls: 0.6344, decode.d6.loss_mask: 0.6914, decode.d6.loss_dice: 0.9114, decode.d7.loss_cls: 0.6268, decode.d7.loss_mask: 0.6966, decode.d7.loss_dice: 0.9129, decode.d8.loss_cls: 0.6205, decode.d8.loss_mask: 0.6989, decode.d8.loss_dice: 0.9142, loss: 28.2491 +2022-06-05 02:32:16,103 - mmseg - INFO - Iter [6400/40000] lr: 6.406e-06, eta: 4:22:42, time: 0.456, data_time: 0.009, memory: 31652, decode.loss_cls: 0.6735, decode.loss_mask: 0.6580, decode.loss_dice: 0.9367, decode.d0.loss_cls: 5.6260, decode.d0.loss_mask: 0.6805, decode.d0.loss_dice: 1.1412, decode.d1.loss_cls: 1.0273, decode.d1.loss_mask: 0.6712, decode.d1.loss_dice: 0.9704, decode.d2.loss_cls: 0.8008, decode.d2.loss_mask: 0.6567, decode.d2.loss_dice: 0.9399, decode.d3.loss_cls: 0.7344, decode.d3.loss_mask: 0.6583, decode.d3.loss_dice: 0.9219, decode.d4.loss_cls: 0.6977, decode.d4.loss_mask: 0.6580, decode.d4.loss_dice: 0.9293, decode.d5.loss_cls: 0.6888, decode.d5.loss_mask: 0.6581, decode.d5.loss_dice: 0.9290, decode.d6.loss_cls: 0.6722, decode.d6.loss_mask: 0.6611, decode.d6.loss_dice: 0.9267, decode.d7.loss_cls: 0.6738, decode.d7.loss_mask: 0.6648, decode.d7.loss_dice: 0.9367, decode.d8.loss_cls: 0.6702, decode.d8.loss_mask: 0.6589, decode.d8.loss_dice: 0.9328, loss: 28.4547 +2022-06-05 02:32:39,339 - mmseg - INFO - Iter [6450/40000] lr: 6.396e-06, eta: 4:22:18, time: 0.465, data_time: 0.008, memory: 31652, decode.loss_cls: 0.6079, decode.loss_mask: 0.6310, decode.loss_dice: 0.9090, decode.d0.loss_cls: 5.5777, decode.d0.loss_mask: 0.6547, decode.d0.loss_dice: 1.1033, decode.d1.loss_cls: 0.9482, decode.d1.loss_mask: 0.6551, decode.d1.loss_dice: 0.9639, decode.d2.loss_cls: 0.7391, decode.d2.loss_mask: 0.6407, decode.d2.loss_dice: 0.9193, decode.d3.loss_cls: 0.6688, decode.d3.loss_mask: 0.6382, decode.d3.loss_dice: 0.9060, decode.d4.loss_cls: 0.6368, decode.d4.loss_mask: 0.6336, decode.d4.loss_dice: 0.9044, decode.d5.loss_cls: 0.6325, decode.d5.loss_mask: 0.6334, decode.d5.loss_dice: 0.9063, decode.d6.loss_cls: 0.6193, decode.d6.loss_mask: 0.6406, decode.d6.loss_dice: 0.8988, decode.d7.loss_cls: 0.6131, decode.d7.loss_mask: 0.6354, decode.d7.loss_dice: 0.9091, decode.d8.loss_cls: 0.6154, decode.d8.loss_mask: 0.6322, decode.d8.loss_dice: 0.9093, loss: 27.3835 +2022-06-05 02:33:02,088 - mmseg - INFO - Iter [6500/40000] lr: 6.387e-06, eta: 4:21:50, time: 0.455, data_time: 0.008, memory: 31652, decode.loss_cls: 0.6455, decode.loss_mask: 0.6462, decode.loss_dice: 0.9094, decode.d0.loss_cls: 5.5654, decode.d0.loss_mask: 0.6727, decode.d0.loss_dice: 1.1158, decode.d1.loss_cls: 0.9948, decode.d1.loss_mask: 0.6600, decode.d1.loss_dice: 0.9664, decode.d2.loss_cls: 0.7552, decode.d2.loss_mask: 0.6423, decode.d2.loss_dice: 0.9202, decode.d3.loss_cls: 0.6955, decode.d3.loss_mask: 0.6422, decode.d3.loss_dice: 0.9085, decode.d4.loss_cls: 0.6800, decode.d4.loss_mask: 0.6424, decode.d4.loss_dice: 0.9088, decode.d5.loss_cls: 0.6633, decode.d5.loss_mask: 0.6448, decode.d5.loss_dice: 0.9058, decode.d6.loss_cls: 0.6500, decode.d6.loss_mask: 0.6425, decode.d6.loss_dice: 0.9058, decode.d7.loss_cls: 0.6466, decode.d7.loss_mask: 0.6434, decode.d7.loss_dice: 0.9029, decode.d8.loss_cls: 0.6408, decode.d8.loss_mask: 0.6458, decode.d8.loss_dice: 0.9102, loss: 27.7731 +2022-06-05 02:33:24,636 - mmseg - INFO - Iter [6550/40000] lr: 6.377e-06, eta: 4:21:22, time: 0.451, data_time: 0.007, memory: 31652, decode.loss_cls: 0.6367, decode.loss_mask: 0.7111, decode.loss_dice: 0.9357, decode.d0.loss_cls: 5.5408, decode.d0.loss_mask: 0.7279, decode.d0.loss_dice: 1.1369, decode.d1.loss_cls: 0.9852, decode.d1.loss_mask: 0.7194, decode.d1.loss_dice: 0.9909, decode.d2.loss_cls: 0.7727, decode.d2.loss_mask: 0.7033, decode.d2.loss_dice: 0.9418, decode.d3.loss_cls: 0.6885, decode.d3.loss_mask: 0.7038, decode.d3.loss_dice: 0.9352, decode.d4.loss_cls: 0.6664, decode.d4.loss_mask: 0.7111, decode.d4.loss_dice: 0.9440, decode.d5.loss_cls: 0.6563, decode.d5.loss_mask: 0.7073, decode.d5.loss_dice: 0.9353, decode.d6.loss_cls: 0.6433, decode.d6.loss_mask: 0.7072, decode.d6.loss_dice: 0.9358, decode.d7.loss_cls: 0.6372, decode.d7.loss_mask: 0.7069, decode.d7.loss_dice: 0.9348, decode.d8.loss_cls: 0.6435, decode.d8.loss_mask: 0.7046, decode.d8.loss_dice: 0.9362, loss: 28.5997 +2022-06-05 02:33:49,207 - mmseg - INFO - Iter [6600/40000] lr: 6.368e-06, eta: 4:21:05, time: 0.492, data_time: 0.054, memory: 31652, decode.loss_cls: 0.6166, decode.loss_mask: 0.6647, decode.loss_dice: 0.9294, decode.d0.loss_cls: 5.4934, decode.d0.loss_mask: 0.6975, decode.d0.loss_dice: 1.1317, decode.d1.loss_cls: 0.9637, decode.d1.loss_mask: 0.6792, decode.d1.loss_dice: 0.9735, decode.d2.loss_cls: 0.7420, decode.d2.loss_mask: 0.6729, decode.d2.loss_dice: 0.9399, decode.d3.loss_cls: 0.6691, decode.d3.loss_mask: 0.6656, decode.d3.loss_dice: 0.9244, decode.d4.loss_cls: 0.6458, decode.d4.loss_mask: 0.6678, decode.d4.loss_dice: 0.9296, decode.d5.loss_cls: 0.6408, decode.d5.loss_mask: 0.6637, decode.d5.loss_dice: 0.9288, decode.d6.loss_cls: 0.6252, decode.d6.loss_mask: 0.6689, decode.d6.loss_dice: 0.9322, decode.d7.loss_cls: 0.6176, decode.d7.loss_mask: 0.6643, decode.d7.loss_dice: 0.9320, decode.d8.loss_cls: 0.6163, decode.d8.loss_mask: 0.6646, decode.d8.loss_dice: 0.9269, loss: 27.8880 +2022-06-05 02:34:11,636 - mmseg - INFO - Iter [6650/40000] lr: 6.358e-06, eta: 4:20:36, time: 0.449, data_time: 0.008, memory: 31652, decode.loss_cls: 0.6157, decode.loss_mask: 0.6580, decode.loss_dice: 0.9232, decode.d0.loss_cls: 5.4608, decode.d0.loss_mask: 0.6897, decode.d0.loss_dice: 1.1313, decode.d1.loss_cls: 0.9819, decode.d1.loss_mask: 0.6723, decode.d1.loss_dice: 0.9693, decode.d2.loss_cls: 0.7350, decode.d2.loss_mask: 0.6573, decode.d2.loss_dice: 0.9295, decode.d3.loss_cls: 0.6714, decode.d3.loss_mask: 0.6580, decode.d3.loss_dice: 0.9149, decode.d4.loss_cls: 0.6363, decode.d4.loss_mask: 0.6607, decode.d4.loss_dice: 0.9210, decode.d5.loss_cls: 0.6250, decode.d5.loss_mask: 0.6594, decode.d5.loss_dice: 0.9189, decode.d6.loss_cls: 0.6213, decode.d6.loss_mask: 0.6595, decode.d6.loss_dice: 0.9099, decode.d7.loss_cls: 0.6128, decode.d7.loss_mask: 0.6636, decode.d7.loss_dice: 0.9115, decode.d8.loss_cls: 0.6104, decode.d8.loss_mask: 0.6623, decode.d8.loss_dice: 0.9211, loss: 27.6620 +2022-06-05 02:34:33,859 - mmseg - INFO - Iter [6700/40000] lr: 6.349e-06, eta: 4:20:07, time: 0.444, data_time: 0.008, memory: 31652, decode.loss_cls: 0.5919, decode.loss_mask: 0.6682, decode.loss_dice: 0.9073, decode.d0.loss_cls: 5.4209, decode.d0.loss_mask: 0.6889, decode.d0.loss_dice: 1.1008, decode.d1.loss_cls: 0.9349, decode.d1.loss_mask: 0.6790, decode.d1.loss_dice: 0.9462, decode.d2.loss_cls: 0.7117, decode.d2.loss_mask: 0.6707, decode.d2.loss_dice: 0.9200, decode.d3.loss_cls: 0.6434, decode.d3.loss_mask: 0.6667, decode.d3.loss_dice: 0.9104, decode.d4.loss_cls: 0.6222, decode.d4.loss_mask: 0.6713, decode.d4.loss_dice: 0.9117, decode.d5.loss_cls: 0.6124, decode.d5.loss_mask: 0.6720, decode.d5.loss_dice: 0.9120, decode.d6.loss_cls: 0.5986, decode.d6.loss_mask: 0.6732, decode.d6.loss_dice: 0.9080, decode.d7.loss_cls: 0.5944, decode.d7.loss_mask: 0.6705, decode.d7.loss_dice: 0.9069, decode.d8.loss_cls: 0.5948, decode.d8.loss_mask: 0.6685, decode.d8.loss_dice: 0.9091, loss: 27.3867 +2022-06-05 02:34:57,007 - mmseg - INFO - Iter [6750/40000] lr: 6.339e-06, eta: 4:19:42, time: 0.463, data_time: 0.007, memory: 31652, decode.loss_cls: 0.5891, decode.loss_mask: 0.6446, decode.loss_dice: 0.8662, decode.d0.loss_cls: 5.3849, decode.d0.loss_mask: 0.6790, decode.d0.loss_dice: 1.0878, decode.d1.loss_cls: 0.9314, decode.d1.loss_mask: 0.6577, decode.d1.loss_dice: 0.9328, decode.d2.loss_cls: 0.7038, decode.d2.loss_mask: 0.6501, decode.d2.loss_dice: 0.8976, decode.d3.loss_cls: 0.6314, decode.d3.loss_mask: 0.6425, decode.d3.loss_dice: 0.8848, decode.d4.loss_cls: 0.6089, decode.d4.loss_mask: 0.6440, decode.d4.loss_dice: 0.8756, decode.d5.loss_cls: 0.5954, decode.d5.loss_mask: 0.6416, decode.d5.loss_dice: 0.8747, decode.d6.loss_cls: 0.5898, decode.d6.loss_mask: 0.6448, decode.d6.loss_dice: 0.8654, decode.d7.loss_cls: 0.5822, decode.d7.loss_mask: 0.6449, decode.d7.loss_dice: 0.8693, decode.d8.loss_cls: 0.5749, decode.d8.loss_mask: 0.6447, decode.d8.loss_dice: 0.8743, loss: 26.7143 +2022-06-05 02:35:19,710 - mmseg - INFO - Iter [6800/40000] lr: 6.329e-06, eta: 4:19:15, time: 0.454, data_time: 0.007, memory: 31652, decode.loss_cls: 0.5933, decode.loss_mask: 0.6507, decode.loss_dice: 0.8755, decode.d0.loss_cls: 5.3656, decode.d0.loss_mask: 0.6781, decode.d0.loss_dice: 1.0835, decode.d1.loss_cls: 0.9491, decode.d1.loss_mask: 0.6613, decode.d1.loss_dice: 0.9323, decode.d2.loss_cls: 0.7094, decode.d2.loss_mask: 0.6473, decode.d2.loss_dice: 0.8872, decode.d3.loss_cls: 0.6490, decode.d3.loss_mask: 0.6462, decode.d3.loss_dice: 0.8720, decode.d4.loss_cls: 0.6195, decode.d4.loss_mask: 0.6464, decode.d4.loss_dice: 0.8777, decode.d5.loss_cls: 0.6139, decode.d5.loss_mask: 0.6462, decode.d5.loss_dice: 0.8777, decode.d6.loss_cls: 0.5951, decode.d6.loss_mask: 0.6502, decode.d6.loss_dice: 0.8744, decode.d7.loss_cls: 0.5961, decode.d7.loss_mask: 0.6493, decode.d7.loss_dice: 0.8808, decode.d8.loss_cls: 0.5869, decode.d8.loss_mask: 0.6521, decode.d8.loss_dice: 0.8784, loss: 26.8453 +2022-06-05 02:35:41,930 - mmseg - INFO - Iter [6850/40000] lr: 6.320e-06, eta: 4:18:46, time: 0.444, data_time: 0.009, memory: 31652, decode.loss_cls: 0.6284, decode.loss_mask: 0.6634, decode.loss_dice: 0.8902, decode.d0.loss_cls: 5.3480, decode.d0.loss_mask: 0.6818, decode.d0.loss_dice: 1.1005, decode.d1.loss_cls: 0.9679, decode.d1.loss_mask: 0.6792, decode.d1.loss_dice: 0.9297, decode.d2.loss_cls: 0.7384, decode.d2.loss_mask: 0.6677, decode.d2.loss_dice: 0.8965, decode.d3.loss_cls: 0.6799, decode.d3.loss_mask: 0.6638, decode.d3.loss_dice: 0.8856, decode.d4.loss_cls: 0.6490, decode.d4.loss_mask: 0.6628, decode.d4.loss_dice: 0.8919, decode.d5.loss_cls: 0.6394, decode.d5.loss_mask: 0.6606, decode.d5.loss_dice: 0.8943, decode.d6.loss_cls: 0.6323, decode.d6.loss_mask: 0.6560, decode.d6.loss_dice: 0.8855, decode.d7.loss_cls: 0.6321, decode.d7.loss_mask: 0.6608, decode.d7.loss_dice: 0.8868, decode.d8.loss_cls: 0.6273, decode.d8.loss_mask: 0.6614, decode.d8.loss_dice: 0.8876, loss: 27.3487 +2022-06-05 02:36:06,666 - mmseg - INFO - Iter [6900/40000] lr: 6.310e-06, eta: 4:18:28, time: 0.495, data_time: 0.055, memory: 31652, decode.loss_cls: 0.5870, decode.loss_mask: 0.6443, decode.loss_dice: 0.9020, decode.d0.loss_cls: 5.2828, decode.d0.loss_mask: 0.6735, decode.d0.loss_dice: 1.1009, decode.d1.loss_cls: 0.9095, decode.d1.loss_mask: 0.6639, decode.d1.loss_dice: 0.9536, decode.d2.loss_cls: 0.6833, decode.d2.loss_mask: 0.6532, decode.d2.loss_dice: 0.9194, decode.d3.loss_cls: 0.6325, decode.d3.loss_mask: 0.6422, decode.d3.loss_dice: 0.9047, decode.d4.loss_cls: 0.6211, decode.d4.loss_mask: 0.6428, decode.d4.loss_dice: 0.9019, decode.d5.loss_cls: 0.6022, decode.d5.loss_mask: 0.6438, decode.d5.loss_dice: 0.8977, decode.d6.loss_cls: 0.5966, decode.d6.loss_mask: 0.6405, decode.d6.loss_dice: 0.8915, decode.d7.loss_cls: 0.5891, decode.d7.loss_mask: 0.6405, decode.d7.loss_dice: 0.8949, decode.d8.loss_cls: 0.5915, decode.d8.loss_mask: 0.6415, decode.d8.loss_dice: 0.8990, loss: 26.8475 +2022-06-05 02:36:29,266 - mmseg - INFO - Iter [6950/40000] lr: 6.301e-06, eta: 4:18:01, time: 0.452, data_time: 0.008, memory: 31652, decode.loss_cls: 0.6060, decode.loss_mask: 0.6357, decode.loss_dice: 0.8816, decode.d0.loss_cls: 5.2594, decode.d0.loss_mask: 0.6624, decode.d0.loss_dice: 1.0898, decode.d1.loss_cls: 0.9396, decode.d1.loss_mask: 0.6523, decode.d1.loss_dice: 0.9399, decode.d2.loss_cls: 0.7253, decode.d2.loss_mask: 0.6426, decode.d2.loss_dice: 0.8933, decode.d3.loss_cls: 0.6489, decode.d3.loss_mask: 0.6395, decode.d3.loss_dice: 0.8897, decode.d4.loss_cls: 0.6313, decode.d4.loss_mask: 0.6411, decode.d4.loss_dice: 0.8865, decode.d5.loss_cls: 0.6182, decode.d5.loss_mask: 0.6369, decode.d5.loss_dice: 0.8942, decode.d6.loss_cls: 0.6116, decode.d6.loss_mask: 0.6360, decode.d6.loss_dice: 0.8839, decode.d7.loss_cls: 0.6055, decode.d7.loss_mask: 0.6375, decode.d7.loss_dice: 0.8929, decode.d8.loss_cls: 0.6016, decode.d8.loss_mask: 0.6391, decode.d8.loss_dice: 0.8880, loss: 26.8102 +2022-06-05 02:36:52,147 - mmseg - INFO - Saving checkpoint at 7000 iterations +2022-06-05 02:36:56,159 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 02:36:56,160 - mmseg - INFO - Iter [7000/40000] lr: 6.291e-06, eta: 4:17:52, time: 0.528, data_time: 0.008, memory: 31652, decode.loss_cls: 0.6063, decode.loss_mask: 0.6601, decode.loss_dice: 0.9157, decode.d0.loss_cls: 5.2332, decode.d0.loss_mask: 0.6900, decode.d0.loss_dice: 1.1128, decode.d1.loss_cls: 0.9197, decode.d1.loss_mask: 0.6836, decode.d1.loss_dice: 0.9632, decode.d2.loss_cls: 0.7142, decode.d2.loss_mask: 0.6704, decode.d2.loss_dice: 0.9265, decode.d3.loss_cls: 0.6469, decode.d3.loss_mask: 0.6629, decode.d3.loss_dice: 0.9122, decode.d4.loss_cls: 0.6263, decode.d4.loss_mask: 0.6622, decode.d4.loss_dice: 0.9135, decode.d5.loss_cls: 0.6082, decode.d5.loss_mask: 0.6616, decode.d5.loss_dice: 0.9085, decode.d6.loss_cls: 0.6039, decode.d6.loss_mask: 0.6620, decode.d6.loss_dice: 0.9016, decode.d7.loss_cls: 0.5964, decode.d7.loss_mask: 0.6659, decode.d7.loss_dice: 0.9115, decode.d8.loss_cls: 0.5953, decode.d8.loss_mask: 0.6644, decode.d8.loss_dice: 0.9155, loss: 27.2145 +2022-06-05 02:37:18,608 - mmseg - INFO - Iter [7050/40000] lr: 6.282e-06, eta: 4:17:26, time: 0.459, data_time: 0.017, memory: 31652, decode.loss_cls: 0.5897, decode.loss_mask: 0.6532, decode.loss_dice: 0.8946, decode.d0.loss_cls: 5.1910, decode.d0.loss_mask: 0.6858, decode.d0.loss_dice: 1.0917, decode.d1.loss_cls: 0.9046, decode.d1.loss_mask: 0.6742, decode.d1.loss_dice: 0.9544, decode.d2.loss_cls: 0.6868, decode.d2.loss_mask: 0.6660, decode.d2.loss_dice: 0.9075, decode.d3.loss_cls: 0.6344, decode.d3.loss_mask: 0.6572, decode.d3.loss_dice: 0.8921, decode.d4.loss_cls: 0.6086, decode.d4.loss_mask: 0.6550, decode.d4.loss_dice: 0.8961, decode.d5.loss_cls: 0.6023, decode.d5.loss_mask: 0.6552, decode.d5.loss_dice: 0.8912, decode.d6.loss_cls: 0.5935, decode.d6.loss_mask: 0.6564, decode.d6.loss_dice: 0.8913, decode.d7.loss_cls: 0.5881, decode.d7.loss_mask: 0.6536, decode.d7.loss_dice: 0.8898, decode.d8.loss_cls: 0.5902, decode.d8.loss_mask: 0.6520, decode.d8.loss_dice: 0.8875, loss: 26.7943 +2022-06-05 02:37:41,428 - mmseg - INFO - Iter [7100/40000] lr: 6.272e-06, eta: 4:17:00, time: 0.456, data_time: 0.008, memory: 31652, decode.loss_cls: 0.5954, decode.loss_mask: 0.6809, decode.loss_dice: 0.8834, decode.d0.loss_cls: 5.1744, decode.d0.loss_mask: 0.6997, decode.d0.loss_dice: 1.1070, decode.d1.loss_cls: 0.9643, decode.d1.loss_mask: 0.6894, decode.d1.loss_dice: 0.9513, decode.d2.loss_cls: 0.7118, decode.d2.loss_mask: 0.6795, decode.d2.loss_dice: 0.9061, decode.d3.loss_cls: 0.6522, decode.d3.loss_mask: 0.6734, decode.d3.loss_dice: 0.8893, decode.d4.loss_cls: 0.6242, decode.d4.loss_mask: 0.6734, decode.d4.loss_dice: 0.8890, decode.d5.loss_cls: 0.6145, decode.d5.loss_mask: 0.6729, decode.d5.loss_dice: 0.8883, decode.d6.loss_cls: 0.5977, decode.d6.loss_mask: 0.6747, decode.d6.loss_dice: 0.8832, decode.d7.loss_cls: 0.5994, decode.d7.loss_mask: 0.6704, decode.d7.loss_dice: 0.8868, decode.d8.loss_cls: 0.5967, decode.d8.loss_mask: 0.6677, decode.d8.loss_dice: 0.8854, loss: 27.0819 +2022-06-05 02:38:04,342 - mmseg - INFO - Iter [7150/40000] lr: 6.263e-06, eta: 4:16:34, time: 0.458, data_time: 0.008, memory: 31652, decode.loss_cls: 0.5871, decode.loss_mask: 0.6732, decode.loss_dice: 0.9180, decode.d0.loss_cls: 5.1435, decode.d0.loss_mask: 0.6972, decode.d0.loss_dice: 1.1208, decode.d1.loss_cls: 0.9331, decode.d1.loss_mask: 0.6886, decode.d1.loss_dice: 0.9699, decode.d2.loss_cls: 0.7064, decode.d2.loss_mask: 0.6780, decode.d2.loss_dice: 0.9399, decode.d3.loss_cls: 0.6307, decode.d3.loss_mask: 0.6740, decode.d3.loss_dice: 0.9226, decode.d4.loss_cls: 0.6158, decode.d4.loss_mask: 0.6792, decode.d4.loss_dice: 0.9268, decode.d5.loss_cls: 0.5944, decode.d5.loss_mask: 0.6811, decode.d5.loss_dice: 0.9184, decode.d6.loss_cls: 0.5900, decode.d6.loss_mask: 0.6783, decode.d6.loss_dice: 0.9236, decode.d7.loss_cls: 0.5919, decode.d7.loss_mask: 0.6734, decode.d7.loss_dice: 0.9242, decode.d8.loss_cls: 0.5924, decode.d8.loss_mask: 0.6720, decode.d8.loss_dice: 0.9226, loss: 27.2670 +2022-06-05 02:38:29,234 - mmseg - INFO - Iter [7200/40000] lr: 6.253e-06, eta: 4:16:17, time: 0.498, data_time: 0.059, memory: 31652, decode.loss_cls: 0.5786, decode.loss_mask: 0.6563, decode.loss_dice: 0.8670, decode.d0.loss_cls: 5.0784, decode.d0.loss_mask: 0.6840, decode.d0.loss_dice: 1.0563, decode.d1.loss_cls: 0.8728, decode.d1.loss_mask: 0.6692, decode.d1.loss_dice: 0.9210, decode.d2.loss_cls: 0.6630, decode.d2.loss_mask: 0.6580, decode.d2.loss_dice: 0.8810, decode.d3.loss_cls: 0.6176, decode.d3.loss_mask: 0.6545, decode.d3.loss_dice: 0.8670, decode.d4.loss_cls: 0.6037, decode.d4.loss_mask: 0.6555, decode.d4.loss_dice: 0.8678, decode.d5.loss_cls: 0.5926, decode.d5.loss_mask: 0.6520, decode.d5.loss_dice: 0.8675, decode.d6.loss_cls: 0.5738, decode.d6.loss_mask: 0.6571, decode.d6.loss_dice: 0.8691, decode.d7.loss_cls: 0.5799, decode.d7.loss_mask: 0.6536, decode.d7.loss_dice: 0.8653, decode.d8.loss_cls: 0.5745, decode.d8.loss_mask: 0.6579, decode.d8.loss_dice: 0.8678, loss: 26.2630 +2022-06-05 02:38:51,813 - mmseg - INFO - Iter [7250/40000] lr: 6.244e-06, eta: 4:15:50, time: 0.451, data_time: 0.007, memory: 31652, decode.loss_cls: 0.6024, decode.loss_mask: 0.6298, decode.loss_dice: 0.8787, decode.d0.loss_cls: 5.0478, decode.d0.loss_mask: 0.6594, decode.d0.loss_dice: 1.0755, decode.d1.loss_cls: 0.9026, decode.d1.loss_mask: 0.6423, decode.d1.loss_dice: 0.9340, decode.d2.loss_cls: 0.6908, decode.d2.loss_mask: 0.6373, decode.d2.loss_dice: 0.9009, decode.d3.loss_cls: 0.6215, decode.d3.loss_mask: 0.6330, decode.d3.loss_dice: 0.8912, decode.d4.loss_cls: 0.6107, decode.d4.loss_mask: 0.6297, decode.d4.loss_dice: 0.8933, decode.d5.loss_cls: 0.6001, decode.d5.loss_mask: 0.6304, decode.d5.loss_dice: 0.8850, decode.d6.loss_cls: 0.5958, decode.d6.loss_mask: 0.6300, decode.d6.loss_dice: 0.8831, decode.d7.loss_cls: 0.5931, decode.d7.loss_mask: 0.6277, decode.d7.loss_dice: 0.8899, decode.d8.loss_cls: 0.5926, decode.d8.loss_mask: 0.6308, decode.d8.loss_dice: 0.8834, loss: 26.3228 +2022-06-05 02:39:14,510 - mmseg - INFO - Iter [7300/40000] lr: 6.234e-06, eta: 4:15:23, time: 0.453, data_time: 0.008, memory: 31652, decode.loss_cls: 0.5284, decode.loss_mask: 0.6379, decode.loss_dice: 0.8843, decode.d0.loss_cls: 5.0232, decode.d0.loss_mask: 0.6603, decode.d0.loss_dice: 1.0765, decode.d1.loss_cls: 0.8658, decode.d1.loss_mask: 0.6570, decode.d1.loss_dice: 0.9432, decode.d2.loss_cls: 0.6393, decode.d2.loss_mask: 0.6392, decode.d2.loss_dice: 0.9011, decode.d3.loss_cls: 0.5874, decode.d3.loss_mask: 0.6350, decode.d3.loss_dice: 0.8859, decode.d4.loss_cls: 0.5665, decode.d4.loss_mask: 0.6314, decode.d4.loss_dice: 0.8818, decode.d5.loss_cls: 0.5485, decode.d5.loss_mask: 0.6327, decode.d5.loss_dice: 0.8787, decode.d6.loss_cls: 0.5378, decode.d6.loss_mask: 0.6329, decode.d6.loss_dice: 0.8758, decode.d7.loss_cls: 0.5335, decode.d7.loss_mask: 0.6369, decode.d7.loss_dice: 0.8845, decode.d8.loss_cls: 0.5257, decode.d8.loss_mask: 0.6385, decode.d8.loss_dice: 0.8879, loss: 25.8577 +2022-06-05 02:39:36,974 - mmseg - INFO - Iter [7350/40000] lr: 6.225e-06, eta: 4:14:55, time: 0.451, data_time: 0.010, memory: 31652, decode.loss_cls: 0.5564, decode.loss_mask: 0.6564, decode.loss_dice: 0.8574, decode.d0.loss_cls: 4.9919, decode.d0.loss_mask: 0.6861, decode.d0.loss_dice: 1.0478, decode.d1.loss_cls: 0.8669, decode.d1.loss_mask: 0.6734, decode.d1.loss_dice: 0.9055, decode.d2.loss_cls: 0.6597, decode.d2.loss_mask: 0.6608, decode.d2.loss_dice: 0.8621, decode.d3.loss_cls: 0.6001, decode.d3.loss_mask: 0.6569, decode.d3.loss_dice: 0.8533, decode.d4.loss_cls: 0.5798, decode.d4.loss_mask: 0.6572, decode.d4.loss_dice: 0.8559, decode.d5.loss_cls: 0.5667, decode.d5.loss_mask: 0.6563, decode.d5.loss_dice: 0.8521, decode.d6.loss_cls: 0.5549, decode.d6.loss_mask: 0.6580, decode.d6.loss_dice: 0.8584, decode.d7.loss_cls: 0.5547, decode.d7.loss_mask: 0.6613, decode.d7.loss_dice: 0.8608, decode.d8.loss_cls: 0.5572, decode.d8.loss_mask: 0.6553, decode.d8.loss_dice: 0.8588, loss: 25.9222 +2022-06-05 02:39:59,076 - mmseg - INFO - Iter [7400/40000] lr: 6.215e-06, eta: 4:14:26, time: 0.442, data_time: 0.008, memory: 31652, decode.loss_cls: 0.5539, decode.loss_mask: 0.6488, decode.loss_dice: 0.9006, decode.d0.loss_cls: 4.9490, decode.d0.loss_mask: 0.6838, decode.d0.loss_dice: 1.1045, decode.d1.loss_cls: 0.8761, decode.d1.loss_mask: 0.6689, decode.d1.loss_dice: 0.9488, decode.d2.loss_cls: 0.6584, decode.d2.loss_mask: 0.6589, decode.d2.loss_dice: 0.9117, decode.d3.loss_cls: 0.6003, decode.d3.loss_mask: 0.6539, decode.d3.loss_dice: 0.9013, decode.d4.loss_cls: 0.5856, decode.d4.loss_mask: 0.6532, decode.d4.loss_dice: 0.9034, decode.d5.loss_cls: 0.5685, decode.d5.loss_mask: 0.6577, decode.d5.loss_dice: 0.9013, decode.d6.loss_cls: 0.5570, decode.d6.loss_mask: 0.6532, decode.d6.loss_dice: 0.8942, decode.d7.loss_cls: 0.5514, decode.d7.loss_mask: 0.6517, decode.d7.loss_dice: 0.8949, decode.d8.loss_cls: 0.5551, decode.d8.loss_mask: 0.6503, decode.d8.loss_dice: 0.9023, loss: 26.2986 +2022-06-05 02:40:21,778 - mmseg - INFO - Iter [7450/40000] lr: 6.206e-06, eta: 4:13:59, time: 0.454, data_time: 0.008, memory: 31652, decode.loss_cls: 0.5911, decode.loss_mask: 0.6407, decode.loss_dice: 0.8720, decode.d0.loss_cls: 4.9284, decode.d0.loss_mask: 0.6779, decode.d0.loss_dice: 1.0670, decode.d1.loss_cls: 0.9106, decode.d1.loss_mask: 0.6577, decode.d1.loss_dice: 0.9222, decode.d2.loss_cls: 0.7024, decode.d2.loss_mask: 0.6457, decode.d2.loss_dice: 0.8808, decode.d3.loss_cls: 0.6422, decode.d3.loss_mask: 0.6393, decode.d3.loss_dice: 0.8731, decode.d4.loss_cls: 0.6135, decode.d4.loss_mask: 0.6428, decode.d4.loss_dice: 0.8726, decode.d5.loss_cls: 0.6067, decode.d5.loss_mask: 0.6467, decode.d5.loss_dice: 0.8737, decode.d6.loss_cls: 0.6005, decode.d6.loss_mask: 0.6429, decode.d6.loss_dice: 0.8674, decode.d7.loss_cls: 0.5978, decode.d7.loss_mask: 0.6450, decode.d7.loss_dice: 0.8686, decode.d8.loss_cls: 0.5883, decode.d8.loss_mask: 0.6441, decode.d8.loss_dice: 0.8725, loss: 26.2343 +2022-06-05 02:40:46,779 - mmseg - INFO - Iter [7500/40000] lr: 6.196e-06, eta: 4:13:43, time: 0.500, data_time: 0.052, memory: 31652, decode.loss_cls: 0.5747, decode.loss_mask: 0.6598, decode.loss_dice: 0.8963, decode.d0.loss_cls: 4.9182, decode.d0.loss_mask: 0.6965, decode.d0.loss_dice: 1.0977, decode.d1.loss_cls: 0.8798, decode.d1.loss_mask: 0.6843, decode.d1.loss_dice: 0.9501, decode.d2.loss_cls: 0.6775, decode.d2.loss_mask: 0.6681, decode.d2.loss_dice: 0.9175, decode.d3.loss_cls: 0.6150, decode.d3.loss_mask: 0.6682, decode.d3.loss_dice: 0.9097, decode.d4.loss_cls: 0.5960, decode.d4.loss_mask: 0.6637, decode.d4.loss_dice: 0.9042, decode.d5.loss_cls: 0.5863, decode.d5.loss_mask: 0.6599, decode.d5.loss_dice: 0.8984, decode.d6.loss_cls: 0.5780, decode.d6.loss_mask: 0.6615, decode.d6.loss_dice: 0.8924, decode.d7.loss_cls: 0.5728, decode.d7.loss_mask: 0.6636, decode.d7.loss_dice: 0.8999, decode.d8.loss_cls: 0.5751, decode.d8.loss_mask: 0.6599, decode.d8.loss_dice: 0.8976, loss: 26.5224 +2022-06-05 02:41:09,193 - mmseg - INFO - Iter [7550/40000] lr: 6.186e-06, eta: 4:13:15, time: 0.447, data_time: 0.008, memory: 31652, decode.loss_cls: 0.5433, decode.loss_mask: 0.6321, decode.loss_dice: 0.8616, decode.d0.loss_cls: 4.8541, decode.d0.loss_mask: 0.6733, decode.d0.loss_dice: 1.0644, decode.d1.loss_cls: 0.8537, decode.d1.loss_mask: 0.6530, decode.d1.loss_dice: 0.9170, decode.d2.loss_cls: 0.6632, decode.d2.loss_mask: 0.6417, decode.d2.loss_dice: 0.8775, decode.d3.loss_cls: 0.5942, decode.d3.loss_mask: 0.6353, decode.d3.loss_dice: 0.8673, decode.d4.loss_cls: 0.5683, decode.d4.loss_mask: 0.6344, decode.d4.loss_dice: 0.8610, decode.d5.loss_cls: 0.5566, decode.d5.loss_mask: 0.6340, decode.d5.loss_dice: 0.8631, decode.d6.loss_cls: 0.5548, decode.d6.loss_mask: 0.6350, decode.d6.loss_dice: 0.8546, decode.d7.loss_cls: 0.5514, decode.d7.loss_mask: 0.6278, decode.d7.loss_dice: 0.8584, decode.d8.loss_cls: 0.5487, decode.d8.loss_mask: 0.6343, decode.d8.loss_dice: 0.8584, loss: 25.5725 +2022-06-05 02:41:31,964 - mmseg - INFO - Iter [7600/40000] lr: 6.177e-06, eta: 4:12:49, time: 0.456, data_time: 0.010, memory: 31652, decode.loss_cls: 0.5333, decode.loss_mask: 0.6305, decode.loss_dice: 0.8841, decode.d0.loss_cls: 4.8289, decode.d0.loss_mask: 0.6594, decode.d0.loss_dice: 1.0688, decode.d1.loss_cls: 0.8488, decode.d1.loss_mask: 0.6455, decode.d1.loss_dice: 0.9416, decode.d2.loss_cls: 0.6367, decode.d2.loss_mask: 0.6330, decode.d2.loss_dice: 0.9020, decode.d3.loss_cls: 0.5814, decode.d3.loss_mask: 0.6317, decode.d3.loss_dice: 0.8909, decode.d4.loss_cls: 0.5544, decode.d4.loss_mask: 0.6325, decode.d4.loss_dice: 0.8890, decode.d5.loss_cls: 0.5489, decode.d5.loss_mask: 0.6292, decode.d5.loss_dice: 0.8975, decode.d6.loss_cls: 0.5390, decode.d6.loss_mask: 0.6313, decode.d6.loss_dice: 0.8892, decode.d7.loss_cls: 0.5340, decode.d7.loss_mask: 0.6325, decode.d7.loss_dice: 0.8887, decode.d8.loss_cls: 0.5278, decode.d8.loss_mask: 0.6348, decode.d8.loss_dice: 0.8933, loss: 25.6388 +2022-06-05 02:41:54,529 - mmseg - INFO - Iter [7650/40000] lr: 6.167e-06, eta: 4:12:22, time: 0.451, data_time: 0.008, memory: 31652, decode.loss_cls: 0.6069, decode.loss_mask: 0.6349, decode.loss_dice: 0.9164, decode.d0.loss_cls: 4.8038, decode.d0.loss_mask: 0.6597, decode.d0.loss_dice: 1.1082, decode.d1.loss_cls: 0.9162, decode.d1.loss_mask: 0.6521, decode.d1.loss_dice: 0.9643, decode.d2.loss_cls: 0.7071, decode.d2.loss_mask: 0.6376, decode.d2.loss_dice: 0.9281, decode.d3.loss_cls: 0.6450, decode.d3.loss_mask: 0.6343, decode.d3.loss_dice: 0.9161, decode.d4.loss_cls: 0.6275, decode.d4.loss_mask: 0.6373, decode.d4.loss_dice: 0.9221, decode.d5.loss_cls: 0.6208, decode.d5.loss_mask: 0.6381, decode.d5.loss_dice: 0.9125, decode.d6.loss_cls: 0.6063, decode.d6.loss_mask: 0.6361, decode.d6.loss_dice: 0.9192, decode.d7.loss_cls: 0.6113, decode.d7.loss_mask: 0.6359, decode.d7.loss_dice: 0.9161, decode.d8.loss_cls: 0.6097, decode.d8.loss_mask: 0.6318, decode.d8.loss_dice: 0.9165, loss: 26.5720 +2022-06-05 02:42:16,945 - mmseg - INFO - Iter [7700/40000] lr: 6.158e-06, eta: 4:11:54, time: 0.448, data_time: 0.008, memory: 31652, decode.loss_cls: 0.5166, decode.loss_mask: 0.6253, decode.loss_dice: 0.8662, decode.d0.loss_cls: 4.7473, decode.d0.loss_mask: 0.6527, decode.d0.loss_dice: 1.0377, decode.d1.loss_cls: 0.8482, decode.d1.loss_mask: 0.6351, decode.d1.loss_dice: 0.9016, decode.d2.loss_cls: 0.6375, decode.d2.loss_mask: 0.6266, decode.d2.loss_dice: 0.8780, decode.d3.loss_cls: 0.5746, decode.d3.loss_mask: 0.6250, decode.d3.loss_dice: 0.8690, decode.d4.loss_cls: 0.5524, decode.d4.loss_mask: 0.6243, decode.d4.loss_dice: 0.8655, decode.d5.loss_cls: 0.5362, decode.d5.loss_mask: 0.6279, decode.d5.loss_dice: 0.8664, decode.d6.loss_cls: 0.5276, decode.d6.loss_mask: 0.6268, decode.d6.loss_dice: 0.8634, decode.d7.loss_cls: 0.5211, decode.d7.loss_mask: 0.6262, decode.d7.loss_dice: 0.8640, decode.d8.loss_cls: 0.5245, decode.d8.loss_mask: 0.6249, decode.d8.loss_dice: 0.8633, loss: 25.1561 +2022-06-05 02:42:39,568 - mmseg - INFO - Iter [7750/40000] lr: 6.148e-06, eta: 4:11:28, time: 0.452, data_time: 0.008, memory: 31652, decode.loss_cls: 0.5779, decode.loss_mask: 0.6414, decode.loss_dice: 0.8814, decode.d0.loss_cls: 4.7508, decode.d0.loss_mask: 0.6585, decode.d0.loss_dice: 1.0769, decode.d1.loss_cls: 0.8674, decode.d1.loss_mask: 0.6480, decode.d1.loss_dice: 0.9283, decode.d2.loss_cls: 0.6664, decode.d2.loss_mask: 0.6370, decode.d2.loss_dice: 0.8974, decode.d3.loss_cls: 0.6125, decode.d3.loss_mask: 0.6366, decode.d3.loss_dice: 0.8843, decode.d4.loss_cls: 0.5966, decode.d4.loss_mask: 0.6356, decode.d4.loss_dice: 0.8780, decode.d5.loss_cls: 0.5793, decode.d5.loss_mask: 0.6407, decode.d5.loss_dice: 0.8885, decode.d6.loss_cls: 0.5686, decode.d6.loss_mask: 0.6458, decode.d6.loss_dice: 0.8789, decode.d7.loss_cls: 0.5757, decode.d7.loss_mask: 0.6474, decode.d7.loss_dice: 0.8834, decode.d8.loss_cls: 0.5784, decode.d8.loss_mask: 0.6411, decode.d8.loss_dice: 0.8775, loss: 25.8803 +2022-06-05 02:43:01,712 - mmseg - INFO - Iter [7800/40000] lr: 6.139e-06, eta: 4:10:59, time: 0.443, data_time: 0.008, memory: 31652, decode.loss_cls: 0.5745, decode.loss_mask: 0.6646, decode.loss_dice: 0.8871, decode.d0.loss_cls: 4.7302, decode.d0.loss_mask: 0.7014, decode.d0.loss_dice: 1.0936, decode.d1.loss_cls: 0.8782, decode.d1.loss_mask: 0.6885, decode.d1.loss_dice: 0.9519, decode.d2.loss_cls: 0.6773, decode.d2.loss_mask: 0.6673, decode.d2.loss_dice: 0.9075, decode.d3.loss_cls: 0.6266, decode.d3.loss_mask: 0.6633, decode.d3.loss_dice: 0.8901, decode.d4.loss_cls: 0.6007, decode.d4.loss_mask: 0.6613, decode.d4.loss_dice: 0.8869, decode.d5.loss_cls: 0.5854, decode.d5.loss_mask: 0.6663, decode.d5.loss_dice: 0.8892, decode.d6.loss_cls: 0.5793, decode.d6.loss_mask: 0.6632, decode.d6.loss_dice: 0.8867, decode.d7.loss_cls: 0.5739, decode.d7.loss_mask: 0.6652, decode.d7.loss_dice: 0.8855, decode.d8.loss_cls: 0.5783, decode.d8.loss_mask: 0.6616, decode.d8.loss_dice: 0.8917, loss: 26.2773 +2022-06-05 02:43:26,717 - mmseg - INFO - Iter [7850/40000] lr: 6.129e-06, eta: 4:10:42, time: 0.500, data_time: 0.061, memory: 31652, decode.loss_cls: 0.5287, decode.loss_mask: 0.6199, decode.loss_dice: 0.8675, decode.d0.loss_cls: 4.6630, decode.d0.loss_mask: 0.6463, decode.d0.loss_dice: 1.0456, decode.d1.loss_cls: 0.8424, decode.d1.loss_mask: 0.6351, decode.d1.loss_dice: 0.9101, decode.d2.loss_cls: 0.6409, decode.d2.loss_mask: 0.6258, decode.d2.loss_dice: 0.8728, decode.d3.loss_cls: 0.5731, decode.d3.loss_mask: 0.6146, decode.d3.loss_dice: 0.8635, decode.d4.loss_cls: 0.5543, decode.d4.loss_mask: 0.6176, decode.d4.loss_dice: 0.8666, decode.d5.loss_cls: 0.5464, decode.d5.loss_mask: 0.6160, decode.d5.loss_dice: 0.8636, decode.d6.loss_cls: 0.5284, decode.d6.loss_mask: 0.6198, decode.d6.loss_dice: 0.8628, decode.d7.loss_cls: 0.5328, decode.d7.loss_mask: 0.6144, decode.d7.loss_dice: 0.8651, decode.d8.loss_cls: 0.5307, decode.d8.loss_mask: 0.6181, decode.d8.loss_dice: 0.8680, loss: 25.0536 +2022-06-05 02:43:49,244 - mmseg - INFO - Iter [7900/40000] lr: 6.120e-06, eta: 4:10:16, time: 0.451, data_time: 0.008, memory: 31652, decode.loss_cls: 0.5381, decode.loss_mask: 0.6234, decode.loss_dice: 0.8570, decode.d0.loss_cls: 4.6278, decode.d0.loss_mask: 0.6567, decode.d0.loss_dice: 1.0458, decode.d1.loss_cls: 0.8230, decode.d1.loss_mask: 0.6416, decode.d1.loss_dice: 0.9040, decode.d2.loss_cls: 0.6247, decode.d2.loss_mask: 0.6293, decode.d2.loss_dice: 0.8701, decode.d3.loss_cls: 0.5723, decode.d3.loss_mask: 0.6243, decode.d3.loss_dice: 0.8672, decode.d4.loss_cls: 0.5479, decode.d4.loss_mask: 0.6257, decode.d4.loss_dice: 0.8700, decode.d5.loss_cls: 0.5454, decode.d5.loss_mask: 0.6246, decode.d5.loss_dice: 0.8607, decode.d6.loss_cls: 0.5385, decode.d6.loss_mask: 0.6256, decode.d6.loss_dice: 0.8593, decode.d7.loss_cls: 0.5353, decode.d7.loss_mask: 0.6248, decode.d7.loss_dice: 0.8630, decode.d8.loss_cls: 0.5290, decode.d8.loss_mask: 0.6222, decode.d8.loss_dice: 0.8560, loss: 25.0334 +2022-06-05 02:44:11,052 - mmseg - INFO - Iter [7950/40000] lr: 6.110e-06, eta: 4:09:46, time: 0.436, data_time: 0.007, memory: 31652, decode.loss_cls: 0.5245, decode.loss_mask: 0.6249, decode.loss_dice: 0.8570, decode.d0.loss_cls: 4.6096, decode.d0.loss_mask: 0.6524, decode.d0.loss_dice: 1.0529, decode.d1.loss_cls: 0.8328, decode.d1.loss_mask: 0.6398, decode.d1.loss_dice: 0.9034, decode.d2.loss_cls: 0.6474, decode.d2.loss_mask: 0.6299, decode.d2.loss_dice: 0.8663, decode.d3.loss_cls: 0.5860, decode.d3.loss_mask: 0.6196, decode.d3.loss_dice: 0.8559, decode.d4.loss_cls: 0.5523, decode.d4.loss_mask: 0.6218, decode.d4.loss_dice: 0.8610, decode.d5.loss_cls: 0.5519, decode.d5.loss_mask: 0.6226, decode.d5.loss_dice: 0.8577, decode.d6.loss_cls: 0.5356, decode.d6.loss_mask: 0.6238, decode.d6.loss_dice: 0.8466, decode.d7.loss_cls: 0.5278, decode.d7.loss_mask: 0.6230, decode.d7.loss_dice: 0.8567, decode.d8.loss_cls: 0.5237, decode.d8.loss_mask: 0.6246, decode.d8.loss_dice: 0.8587, loss: 24.9901 +2022-06-05 02:44:33,631 - mmseg - INFO - Saving checkpoint at 8000 iterations +2022-06-05 02:44:36,065 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 02:44:36,065 - mmseg - INFO - Iter [8000/40000] lr: 6.101e-06, eta: 4:09:29, time: 0.500, data_time: 0.008, memory: 31652, decode.loss_cls: 0.5418, decode.loss_mask: 0.6234, decode.loss_dice: 0.8692, decode.d0.loss_cls: 4.5750, decode.d0.loss_mask: 0.6575, decode.d0.loss_dice: 1.0638, decode.d1.loss_cls: 0.8258, decode.d1.loss_mask: 0.6459, decode.d1.loss_dice: 0.9236, decode.d2.loss_cls: 0.6419, decode.d2.loss_mask: 0.6313, decode.d2.loss_dice: 0.8856, decode.d3.loss_cls: 0.5781, decode.d3.loss_mask: 0.6287, decode.d3.loss_dice: 0.8757, decode.d4.loss_cls: 0.5592, decode.d4.loss_mask: 0.6324, decode.d4.loss_dice: 0.8838, decode.d5.loss_cls: 0.5553, decode.d5.loss_mask: 0.6293, decode.d5.loss_dice: 0.8711, decode.d6.loss_cls: 0.5400, decode.d6.loss_mask: 0.6244, decode.d6.loss_dice: 0.8735, decode.d7.loss_cls: 0.5450, decode.d7.loss_mask: 0.6249, decode.d7.loss_dice: 0.8712, decode.d8.loss_cls: 0.5435, decode.d8.loss_mask: 0.6293, decode.d8.loss_dice: 0.8735, loss: 25.2240 +2022-06-05 02:47:13,641 - mmseg - INFO - per class results: +2022-06-05 02:47:13,647 - mmseg - INFO - ++-------------+-------+-------+ +| Class | IoU | Acc | ++-------------+-------+-------+ +| aeroplane | 89.2 | 92.41 | +| bag | 25.82 | 30.18 | +| bed | 8.91 | 9.48 | +| bedclothes | 42.5 | 65.05 | +| bench | 9.98 | 10.56 | +| bicycle | 82.44 | 90.86 | +| bird | 93.46 | 96.77 | +| boat | 81.06 | 88.16 | +| book | 52.29 | 63.76 | +| bottle | 87.05 | 93.2 | +| building | 64.63 | 78.33 | +| bus | 93.66 | 95.96 | +| cabinet | 39.59 | 67.13 | +| car | 89.62 | 95.45 | +| cat | 93.89 | 97.85 | +| ceiling | 57.56 | 65.46 | +| chair | 60.78 | 82.18 | +| cloth | 26.92 | 43.59 | +| computer | 33.97 | 47.76 | +| cow | 94.66 | 96.33 | +| cup | 41.6 | 64.43 | +| curtain | 57.51 | 70.3 | +| dog | 91.26 | 96.86 | +| door | 30.01 | 53.71 | +| fence | 45.52 | 62.81 | +| floor | 71.04 | 81.73 | +| flower | 39.12 | 55.18 | +| food | 33.63 | 41.92 | +| grass | 81.68 | 90.9 | +| ground | 58.34 | 75.6 | +| horse | 94.04 | 96.56 | +| keyboard | 77.32 | 81.03 | +| light | 57.47 | 67.68 | +| motorbike | 89.68 | 94.96 | +| mountain | 55.98 | 75.31 | +| mouse | 71.98 | 81.35 | +| person | 89.88 | 96.31 | +| plate | 26.66 | 34.87 | +| platform | 49.8 | 64.03 | +| pottedplant | 79.63 | 86.26 | +| road | 55.58 | 69.26 | +| rock | 48.74 | 62.42 | +| sheep | 94.2 | 96.7 | +| shelves | 37.45 | 58.77 | +| sidewalk | 29.91 | 48.83 | +| sign | 45.54 | 58.7 | +| sky | 94.66 | 97.36 | +| snow | 73.41 | 84.78 | +| sofa | 56.6 | 61.56 | +| table | 69.56 | 82.37 | +| track | 66.65 | 75.48 | +| train | 92.4 | 95.96 | +| tree | 80.8 | 89.45 | +| truck | 39.79 | 46.95 | +| tvmonitor | 84.18 | 89.51 | +| wall | 70.1 | 81.3 | +| water | 91.21 | 94.57 | +| window | 44.17 | 61.48 | +| wood | 29.69 | 44.81 | ++-------------+-------+-------+ +2022-06-05 02:47:13,647 - mmseg - INFO - Summary: +2022-06-05 02:47:13,648 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 85.04 | 62.28 | 72.59 | ++-------+-------+-------+ +2022-06-05 02:47:13,662 - mmseg - INFO - The previous best checkpoint /mnt/lustre/chenzhe.vendor/workspace/ViT-Adapter-Release/segmentation/work_dirs/mask2former_beit_adapter_base_480_40k_pascal_context_59_ss/best_mIoU_iter_4000.pth was removed +2022-06-05 02:47:16,126 - mmseg - INFO - Now best checkpoint is saved as best_mIoU_iter_8000.pth. +2022-06-05 02:47:16,126 - mmseg - INFO - Best mIoU is 0.6228 at 8000 iter. +2022-06-05 02:47:16,147 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 02:47:16,147 - mmseg - INFO - Iter(val) [638] aAcc: 0.8504, mIoU: 0.6228, mAcc: 0.7259, IoU.aeroplane: 0.8920, IoU.bag: 0.2582, IoU.bed: 0.0891, IoU.bedclothes: 0.4250, IoU.bench: 0.0998, IoU.bicycle: 0.8244, IoU.bird: 0.9346, IoU.boat: 0.8106, IoU.book: 0.5229, IoU.bottle: 0.8705, IoU.building: 0.6463, IoU.bus: 0.9366, IoU.cabinet: 0.3959, IoU.car: 0.8962, IoU.cat: 0.9389, IoU.ceiling: 0.5756, IoU.chair: 0.6078, IoU.cloth: 0.2692, IoU.computer: 0.3397, IoU.cow: 0.9466, IoU.cup: 0.4160, IoU.curtain: 0.5751, IoU.dog: 0.9126, IoU.door: 0.3001, IoU.fence: 0.4552, IoU.floor: 0.7104, IoU.flower: 0.3912, IoU.food: 0.3363, IoU.grass: 0.8168, IoU.ground: 0.5834, IoU.horse: 0.9404, IoU.keyboard: 0.7732, IoU.light: 0.5747, IoU.motorbike: 0.8968, IoU.mountain: 0.5598, IoU.mouse: 0.7198, IoU.person: 0.8988, IoU.plate: 0.2666, IoU.platform: 0.4980, IoU.pottedplant: 0.7963, IoU.road: 0.5558, IoU.rock: 0.4874, IoU.sheep: 0.9420, IoU.shelves: 0.3745, IoU.sidewalk: 0.2991, IoU.sign: 0.4554, IoU.sky: 0.9466, IoU.snow: 0.7341, IoU.sofa: 0.5660, IoU.table: 0.6956, IoU.track: 0.6665, IoU.train: 0.9240, IoU.tree: 0.8080, IoU.truck: 0.3979, IoU.tvmonitor: 0.8418, IoU.wall: 0.7010, IoU.water: 0.9121, IoU.window: 0.4417, IoU.wood: 0.2969, Acc.aeroplane: 0.9241, Acc.bag: 0.3018, Acc.bed: 0.0948, Acc.bedclothes: 0.6505, Acc.bench: 0.1056, Acc.bicycle: 0.9086, Acc.bird: 0.9677, Acc.boat: 0.8816, Acc.book: 0.6376, Acc.bottle: 0.9320, Acc.building: 0.7833, Acc.bus: 0.9596, Acc.cabinet: 0.6713, Acc.car: 0.9545, Acc.cat: 0.9785, Acc.ceiling: 0.6546, Acc.chair: 0.8218, Acc.cloth: 0.4359, Acc.computer: 0.4776, Acc.cow: 0.9633, Acc.cup: 0.6443, Acc.curtain: 0.7030, Acc.dog: 0.9686, Acc.door: 0.5371, Acc.fence: 0.6281, Acc.floor: 0.8173, Acc.flower: 0.5518, Acc.food: 0.4192, Acc.grass: 0.9090, Acc.ground: 0.7560, Acc.horse: 0.9656, Acc.keyboard: 0.8103, Acc.light: 0.6768, Acc.motorbike: 0.9496, Acc.mountain: 0.7531, Acc.mouse: 0.8135, Acc.person: 0.9631, Acc.plate: 0.3487, Acc.platform: 0.6403, Acc.pottedplant: 0.8626, Acc.road: 0.6926, Acc.rock: 0.6242, Acc.sheep: 0.9670, Acc.shelves: 0.5877, Acc.sidewalk: 0.4883, Acc.sign: 0.5870, Acc.sky: 0.9736, Acc.snow: 0.8478, Acc.sofa: 0.6156, Acc.table: 0.8237, Acc.track: 0.7548, Acc.train: 0.9596, Acc.tree: 0.8945, Acc.truck: 0.4695, Acc.tvmonitor: 0.8951, Acc.wall: 0.8130, Acc.water: 0.9457, Acc.window: 0.6148, Acc.wood: 0.4481 +2022-06-05 02:47:38,472 - mmseg - INFO - Iter [8050/40000] lr: 6.091e-06, eta: 4:19:37, time: 3.648, data_time: 3.210, memory: 31652, decode.loss_cls: 0.5081, decode.loss_mask: 0.6245, decode.loss_dice: 0.8480, decode.d0.loss_cls: 4.5570, decode.d0.loss_mask: 0.6496, decode.d0.loss_dice: 1.0201, decode.d1.loss_cls: 0.8063, decode.d1.loss_mask: 0.6364, decode.d1.loss_dice: 0.8936, decode.d2.loss_cls: 0.6135, decode.d2.loss_mask: 0.6295, decode.d2.loss_dice: 0.8624, decode.d3.loss_cls: 0.5590, decode.d3.loss_mask: 0.6284, decode.d3.loss_dice: 0.8550, decode.d4.loss_cls: 0.5362, decode.d4.loss_mask: 0.6254, decode.d4.loss_dice: 0.8524, decode.d5.loss_cls: 0.5300, decode.d5.loss_mask: 0.6274, decode.d5.loss_dice: 0.8486, decode.d6.loss_cls: 0.5192, decode.d6.loss_mask: 0.6205, decode.d6.loss_dice: 0.8445, decode.d7.loss_cls: 0.5100, decode.d7.loss_mask: 0.6265, decode.d7.loss_dice: 0.8480, decode.d8.loss_cls: 0.5097, decode.d8.loss_mask: 0.6236, decode.d8.loss_dice: 0.8482, loss: 24.6618 +2022-06-05 02:48:00,657 - mmseg - INFO - Iter [8100/40000] lr: 6.082e-06, eta: 4:19:04, time: 0.443, data_time: 0.008, memory: 31652, decode.loss_cls: 0.5418, decode.loss_mask: 0.6615, decode.loss_dice: 0.8747, decode.d0.loss_cls: 4.5506, decode.d0.loss_mask: 0.6906, decode.d0.loss_dice: 1.0761, decode.d1.loss_cls: 0.8531, decode.d1.loss_mask: 0.6725, decode.d1.loss_dice: 0.9353, decode.d2.loss_cls: 0.6517, decode.d2.loss_mask: 0.6629, decode.d2.loss_dice: 0.8950, decode.d3.loss_cls: 0.5889, decode.d3.loss_mask: 0.6632, decode.d3.loss_dice: 0.8871, decode.d4.loss_cls: 0.5637, decode.d4.loss_mask: 0.6622, decode.d4.loss_dice: 0.8811, decode.d5.loss_cls: 0.5543, decode.d5.loss_mask: 0.6617, decode.d5.loss_dice: 0.8778, decode.d6.loss_cls: 0.5476, decode.d6.loss_mask: 0.6614, decode.d6.loss_dice: 0.8718, decode.d7.loss_cls: 0.5410, decode.d7.loss_mask: 0.6603, decode.d7.loss_dice: 0.8830, decode.d8.loss_cls: 0.5482, decode.d8.loss_mask: 0.6620, decode.d8.loss_dice: 0.8776, loss: 25.6590 +2022-06-05 02:48:25,102 - mmseg - INFO - Iter [8150/40000] lr: 6.072e-06, eta: 4:18:40, time: 0.489, data_time: 0.060, memory: 31652, decode.loss_cls: 0.5107, decode.loss_mask: 0.6045, decode.loss_dice: 0.8071, decode.d0.loss_cls: 4.4923, decode.d0.loss_mask: 0.6536, decode.d0.loss_dice: 0.9988, decode.d1.loss_cls: 0.8243, decode.d1.loss_mask: 0.6263, decode.d1.loss_dice: 0.8545, decode.d2.loss_cls: 0.6108, decode.d2.loss_mask: 0.6193, decode.d2.loss_dice: 0.8232, decode.d3.loss_cls: 0.5486, decode.d3.loss_mask: 0.6134, decode.d3.loss_dice: 0.8091, decode.d4.loss_cls: 0.5335, decode.d4.loss_mask: 0.6104, decode.d4.loss_dice: 0.8102, decode.d5.loss_cls: 0.5170, decode.d5.loss_mask: 0.6064, decode.d5.loss_dice: 0.8111, decode.d6.loss_cls: 0.5138, decode.d6.loss_mask: 0.6045, decode.d6.loss_dice: 0.8066, decode.d7.loss_cls: 0.5131, decode.d7.loss_mask: 0.6049, decode.d7.loss_dice: 0.8090, decode.d8.loss_cls: 0.5074, decode.d8.loss_mask: 0.6091, decode.d8.loss_dice: 0.8067, loss: 24.0603 +2022-06-05 02:48:47,010 - mmseg - INFO - Iter [8200/40000] lr: 6.063e-06, eta: 4:18:06, time: 0.438, data_time: 0.008, memory: 31652, decode.loss_cls: 0.5076, decode.loss_mask: 0.6224, decode.loss_dice: 0.8657, decode.d0.loss_cls: 4.4570, decode.d0.loss_mask: 0.6498, decode.d0.loss_dice: 1.0467, decode.d1.loss_cls: 0.7983, decode.d1.loss_mask: 0.6345, decode.d1.loss_dice: 0.9150, decode.d2.loss_cls: 0.6016, decode.d2.loss_mask: 0.6236, decode.d2.loss_dice: 0.8783, decode.d3.loss_cls: 0.5416, decode.d3.loss_mask: 0.6210, decode.d3.loss_dice: 0.8651, decode.d4.loss_cls: 0.5202, decode.d4.loss_mask: 0.6228, decode.d4.loss_dice: 0.8697, decode.d5.loss_cls: 0.5086, decode.d5.loss_mask: 0.6221, decode.d5.loss_dice: 0.8624, decode.d6.loss_cls: 0.5055, decode.d6.loss_mask: 0.6190, decode.d6.loss_dice: 0.8625, decode.d7.loss_cls: 0.5036, decode.d7.loss_mask: 0.6240, decode.d7.loss_dice: 0.8641, decode.d8.loss_cls: 0.5042, decode.d8.loss_mask: 0.6185, decode.d8.loss_dice: 0.8617, loss: 24.5971 +2022-06-05 02:49:08,954 - mmseg - INFO - Iter [8250/40000] lr: 6.053e-06, eta: 4:17:32, time: 0.438, data_time: 0.008, memory: 31652, decode.loss_cls: 0.5402, decode.loss_mask: 0.6388, decode.loss_dice: 0.8768, decode.d0.loss_cls: 4.4524, decode.d0.loss_mask: 0.6771, decode.d0.loss_dice: 1.0711, decode.d1.loss_cls: 0.8398, decode.d1.loss_mask: 0.6603, decode.d1.loss_dice: 0.9317, decode.d2.loss_cls: 0.6355, decode.d2.loss_mask: 0.6445, decode.d2.loss_dice: 0.8944, decode.d3.loss_cls: 0.5853, decode.d3.loss_mask: 0.6404, decode.d3.loss_dice: 0.8816, decode.d4.loss_cls: 0.5745, decode.d4.loss_mask: 0.6395, decode.d4.loss_dice: 0.8806, decode.d5.loss_cls: 0.5521, decode.d5.loss_mask: 0.6418, decode.d5.loss_dice: 0.8788, decode.d6.loss_cls: 0.5414, decode.d6.loss_mask: 0.6418, decode.d6.loss_dice: 0.8782, decode.d7.loss_cls: 0.5462, decode.d7.loss_mask: 0.6381, decode.d7.loss_dice: 0.8784, decode.d8.loss_cls: 0.5397, decode.d8.loss_mask: 0.6412, decode.d8.loss_dice: 0.8791, loss: 25.3211 +2022-06-05 02:49:30,890 - mmseg - INFO - Iter [8300/40000] lr: 6.043e-06, eta: 4:16:59, time: 0.439, data_time: 0.009, memory: 31652, decode.loss_cls: 0.5127, decode.loss_mask: 0.5957, decode.loss_dice: 0.8173, decode.d0.loss_cls: 4.3748, decode.d0.loss_mask: 0.6327, decode.d0.loss_dice: 1.0101, decode.d1.loss_cls: 0.8221, decode.d1.loss_mask: 0.6123, decode.d1.loss_dice: 0.8747, decode.d2.loss_cls: 0.6280, decode.d2.loss_mask: 0.6018, decode.d2.loss_dice: 0.8371, decode.d3.loss_cls: 0.5635, decode.d3.loss_mask: 0.5993, decode.d3.loss_dice: 0.8199, decode.d4.loss_cls: 0.5443, decode.d4.loss_mask: 0.5989, decode.d4.loss_dice: 0.8188, decode.d5.loss_cls: 0.5269, decode.d5.loss_mask: 0.5936, decode.d5.loss_dice: 0.8214, decode.d6.loss_cls: 0.5162, decode.d6.loss_mask: 0.5961, decode.d6.loss_dice: 0.8168, decode.d7.loss_cls: 0.5174, decode.d7.loss_mask: 0.5927, decode.d7.loss_dice: 0.8120, decode.d8.loss_cls: 0.5109, decode.d8.loss_mask: 0.5954, decode.d8.loss_dice: 0.8149, loss: 23.9783 +2022-06-05 02:49:52,727 - mmseg - INFO - Iter [8350/40000] lr: 6.034e-06, eta: 4:16:25, time: 0.437, data_time: 0.007, memory: 31652, decode.loss_cls: 0.4977, decode.loss_mask: 0.6082, decode.loss_dice: 0.8001, decode.d0.loss_cls: 4.3283, decode.d0.loss_mask: 0.6438, decode.d0.loss_dice: 0.9735, decode.d1.loss_cls: 0.7576, decode.d1.loss_mask: 0.6348, decode.d1.loss_dice: 0.8554, decode.d2.loss_cls: 0.5789, decode.d2.loss_mask: 0.6169, decode.d2.loss_dice: 0.8186, decode.d3.loss_cls: 0.5345, decode.d3.loss_mask: 0.6129, decode.d3.loss_dice: 0.8057, decode.d4.loss_cls: 0.5179, decode.d4.loss_mask: 0.6149, decode.d4.loss_dice: 0.8067, decode.d5.loss_cls: 0.5026, decode.d5.loss_mask: 0.6137, decode.d5.loss_dice: 0.8009, decode.d6.loss_cls: 0.4958, decode.d6.loss_mask: 0.6143, decode.d6.loss_dice: 0.8053, decode.d7.loss_cls: 0.4989, decode.d7.loss_mask: 0.6116, decode.d7.loss_dice: 0.8071, decode.d8.loss_cls: 0.4892, decode.d8.loss_mask: 0.6112, decode.d8.loss_dice: 0.8055, loss: 23.6624 +2022-06-05 02:50:14,737 - mmseg - INFO - Iter [8400/40000] lr: 6.024e-06, eta: 4:15:52, time: 0.440, data_time: 0.008, memory: 31652, decode.loss_cls: 0.5242, decode.loss_mask: 0.6221, decode.loss_dice: 0.8726, decode.d0.loss_cls: 4.3556, decode.d0.loss_mask: 0.6508, decode.d0.loss_dice: 1.0525, decode.d1.loss_cls: 0.8226, decode.d1.loss_mask: 0.6429, decode.d1.loss_dice: 0.9240, decode.d2.loss_cls: 0.6328, decode.d2.loss_mask: 0.6321, decode.d2.loss_dice: 0.8819, decode.d3.loss_cls: 0.5691, decode.d3.loss_mask: 0.6251, decode.d3.loss_dice: 0.8757, decode.d4.loss_cls: 0.5537, decode.d4.loss_mask: 0.6249, decode.d4.loss_dice: 0.8839, decode.d5.loss_cls: 0.5462, decode.d5.loss_mask: 0.6239, decode.d5.loss_dice: 0.8763, decode.d6.loss_cls: 0.5345, decode.d6.loss_mask: 0.6230, decode.d6.loss_dice: 0.8736, decode.d7.loss_cls: 0.5342, decode.d7.loss_mask: 0.6237, decode.d7.loss_dice: 0.8744, decode.d8.loss_cls: 0.5246, decode.d8.loss_mask: 0.6237, decode.d8.loss_dice: 0.8739, loss: 24.8784 +2022-06-05 02:50:39,630 - mmseg - INFO - Iter [8450/40000] lr: 6.015e-06, eta: 4:15:30, time: 0.498, data_time: 0.059, memory: 31652, decode.loss_cls: 0.5252, decode.loss_mask: 0.6051, decode.loss_dice: 0.8344, decode.d0.loss_cls: 4.3336, decode.d0.loss_mask: 0.6466, decode.d0.loss_dice: 1.0264, decode.d1.loss_cls: 0.8047, decode.d1.loss_mask: 0.6215, decode.d1.loss_dice: 0.8950, decode.d2.loss_cls: 0.5998, decode.d2.loss_mask: 0.6130, decode.d2.loss_dice: 0.8601, decode.d3.loss_cls: 0.5382, decode.d3.loss_mask: 0.6090, decode.d3.loss_dice: 0.8481, decode.d4.loss_cls: 0.5280, decode.d4.loss_mask: 0.6068, decode.d4.loss_dice: 0.8487, decode.d5.loss_cls: 0.5235, decode.d5.loss_mask: 0.6068, decode.d5.loss_dice: 0.8436, decode.d6.loss_cls: 0.5210, decode.d6.loss_mask: 0.6125, decode.d6.loss_dice: 0.8365, decode.d7.loss_cls: 0.5140, decode.d7.loss_mask: 0.6079, decode.d7.loss_dice: 0.8365, decode.d8.loss_cls: 0.5187, decode.d8.loss_mask: 0.6095, decode.d8.loss_dice: 0.8360, loss: 24.2110 +2022-06-05 02:51:01,874 - mmseg - INFO - Iter [8500/40000] lr: 6.005e-06, eta: 4:14:58, time: 0.445, data_time: 0.008, memory: 31652, decode.loss_cls: 0.4940, decode.loss_mask: 0.6171, decode.loss_dice: 0.8315, decode.d0.loss_cls: 4.2857, decode.d0.loss_mask: 0.6510, decode.d0.loss_dice: 1.0087, decode.d1.loss_cls: 0.7876, decode.d1.loss_mask: 0.6360, decode.d1.loss_dice: 0.8777, decode.d2.loss_cls: 0.5886, decode.d2.loss_mask: 0.6256, decode.d2.loss_dice: 0.8474, decode.d3.loss_cls: 0.5285, decode.d3.loss_mask: 0.6194, decode.d3.loss_dice: 0.8376, decode.d4.loss_cls: 0.5098, decode.d4.loss_mask: 0.6156, decode.d4.loss_dice: 0.8335, decode.d5.loss_cls: 0.4985, decode.d5.loss_mask: 0.6154, decode.d5.loss_dice: 0.8303, decode.d6.loss_cls: 0.4897, decode.d6.loss_mask: 0.6166, decode.d6.loss_dice: 0.8258, decode.d7.loss_cls: 0.4892, decode.d7.loss_mask: 0.6119, decode.d7.loss_dice: 0.8298, decode.d8.loss_cls: 0.4872, decode.d8.loss_mask: 0.6130, decode.d8.loss_dice: 0.8300, loss: 23.9327 +2022-06-05 02:51:24,727 - mmseg - INFO - Iter [8550/40000] lr: 5.996e-06, eta: 4:14:28, time: 0.457, data_time: 0.008, memory: 31652, decode.loss_cls: 0.4699, decode.loss_mask: 0.6189, decode.loss_dice: 0.8497, decode.d0.loss_cls: 4.2406, decode.d0.loss_mask: 0.6424, decode.d0.loss_dice: 1.0260, decode.d1.loss_cls: 0.7553, decode.d1.loss_mask: 0.6358, decode.d1.loss_dice: 0.8959, decode.d2.loss_cls: 0.5762, decode.d2.loss_mask: 0.6271, decode.d2.loss_dice: 0.8657, decode.d3.loss_cls: 0.5278, decode.d3.loss_mask: 0.6187, decode.d3.loss_dice: 0.8529, decode.d4.loss_cls: 0.5032, decode.d4.loss_mask: 0.6222, decode.d4.loss_dice: 0.8544, decode.d5.loss_cls: 0.4963, decode.d5.loss_mask: 0.6206, decode.d5.loss_dice: 0.8552, decode.d6.loss_cls: 0.4822, decode.d6.loss_mask: 0.6180, decode.d6.loss_dice: 0.8450, decode.d7.loss_cls: 0.4785, decode.d7.loss_mask: 0.6161, decode.d7.loss_dice: 0.8478, decode.d8.loss_cls: 0.4695, decode.d8.loss_mask: 0.6167, decode.d8.loss_dice: 0.8486, loss: 23.9774 +2022-06-05 02:51:46,967 - mmseg - INFO - Iter [8600/40000] lr: 5.986e-06, eta: 4:13:57, time: 0.445, data_time: 0.007, memory: 31652, decode.loss_cls: 0.5282, decode.loss_mask: 0.6060, decode.loss_dice: 0.8379, decode.d0.loss_cls: 4.2135, decode.d0.loss_mask: 0.6372, decode.d0.loss_dice: 1.0156, decode.d1.loss_cls: 0.7926, decode.d1.loss_mask: 0.6272, decode.d1.loss_dice: 0.8873, decode.d2.loss_cls: 0.6060, decode.d2.loss_mask: 0.6157, decode.d2.loss_dice: 0.8572, decode.d3.loss_cls: 0.5587, decode.d3.loss_mask: 0.6122, decode.d3.loss_dice: 0.8402, decode.d4.loss_cls: 0.5438, decode.d4.loss_mask: 0.6086, decode.d4.loss_dice: 0.8446, decode.d5.loss_cls: 0.5302, decode.d5.loss_mask: 0.6097, decode.d5.loss_dice: 0.8431, decode.d6.loss_cls: 0.5269, decode.d6.loss_mask: 0.6086, decode.d6.loss_dice: 0.8374, decode.d7.loss_cls: 0.5299, decode.d7.loss_mask: 0.6077, decode.d7.loss_dice: 0.8348, decode.d8.loss_cls: 0.5242, decode.d8.loss_mask: 0.6084, decode.d8.loss_dice: 0.8381, loss: 24.1314 +2022-06-05 02:52:09,628 - mmseg - INFO - Iter [8650/40000] lr: 5.977e-06, eta: 4:13:27, time: 0.452, data_time: 0.008, memory: 31652, decode.loss_cls: 0.5533, decode.loss_mask: 0.6189, decode.loss_dice: 0.8442, decode.d0.loss_cls: 4.2062, decode.d0.loss_mask: 0.6565, decode.d0.loss_dice: 1.0404, decode.d1.loss_cls: 0.8330, decode.d1.loss_mask: 0.6399, decode.d1.loss_dice: 0.8976, decode.d2.loss_cls: 0.6403, decode.d2.loss_mask: 0.6223, decode.d2.loss_dice: 0.8543, decode.d3.loss_cls: 0.5875, decode.d3.loss_mask: 0.6203, decode.d3.loss_dice: 0.8361, decode.d4.loss_cls: 0.5684, decode.d4.loss_mask: 0.6209, decode.d4.loss_dice: 0.8438, decode.d5.loss_cls: 0.5593, decode.d5.loss_mask: 0.6223, decode.d5.loss_dice: 0.8382, decode.d6.loss_cls: 0.5482, decode.d6.loss_mask: 0.6203, decode.d6.loss_dice: 0.8417, decode.d7.loss_cls: 0.5420, decode.d7.loss_mask: 0.6197, decode.d7.loss_dice: 0.8441, decode.d8.loss_cls: 0.5452, decode.d8.loss_mask: 0.6205, decode.d8.loss_dice: 0.8392, loss: 24.5248 +2022-06-05 02:52:32,027 - mmseg - INFO - Iter [8700/40000] lr: 5.967e-06, eta: 4:12:56, time: 0.449, data_time: 0.008, memory: 31652, decode.loss_cls: 0.5021, decode.loss_mask: 0.6125, decode.loss_dice: 0.8244, decode.d0.loss_cls: 4.1570, decode.d0.loss_mask: 0.6507, decode.d0.loss_dice: 1.0124, decode.d1.loss_cls: 0.7562, decode.d1.loss_mask: 0.6363, decode.d1.loss_dice: 0.8882, decode.d2.loss_cls: 0.5847, decode.d2.loss_mask: 0.6205, decode.d2.loss_dice: 0.8435, decode.d3.loss_cls: 0.5394, decode.d3.loss_mask: 0.6152, decode.d3.loss_dice: 0.8268, decode.d4.loss_cls: 0.5178, decode.d4.loss_mask: 0.6188, decode.d4.loss_dice: 0.8298, decode.d5.loss_cls: 0.5120, decode.d5.loss_mask: 0.6157, decode.d5.loss_dice: 0.8291, decode.d6.loss_cls: 0.5025, decode.d6.loss_mask: 0.6173, decode.d6.loss_dice: 0.8243, decode.d7.loss_cls: 0.4974, decode.d7.loss_mask: 0.6198, decode.d7.loss_dice: 0.8246, decode.d8.loss_cls: 0.4936, decode.d8.loss_mask: 0.6175, decode.d8.loss_dice: 0.8249, loss: 23.8152 +2022-06-05 02:52:56,917 - mmseg - INFO - Iter [8750/40000] lr: 5.958e-06, eta: 4:12:34, time: 0.498, data_time: 0.056, memory: 31652, decode.loss_cls: 0.4951, decode.loss_mask: 0.5849, decode.loss_dice: 0.8312, decode.d0.loss_cls: 4.1107, decode.d0.loss_mask: 0.6246, decode.d0.loss_dice: 1.0170, decode.d1.loss_cls: 0.7601, decode.d1.loss_mask: 0.6087, decode.d1.loss_dice: 0.8858, decode.d2.loss_cls: 0.5891, decode.d2.loss_mask: 0.5898, decode.d2.loss_dice: 0.8493, decode.d3.loss_cls: 0.5438, decode.d3.loss_mask: 0.5881, decode.d3.loss_dice: 0.8302, decode.d4.loss_cls: 0.5195, decode.d4.loss_mask: 0.5891, decode.d4.loss_dice: 0.8303, decode.d5.loss_cls: 0.5054, decode.d5.loss_mask: 0.5863, decode.d5.loss_dice: 0.8317, decode.d6.loss_cls: 0.4975, decode.d6.loss_mask: 0.5884, decode.d6.loss_dice: 0.8301, decode.d7.loss_cls: 0.4914, decode.d7.loss_mask: 0.5884, decode.d7.loss_dice: 0.8361, decode.d8.loss_cls: 0.4870, decode.d8.loss_mask: 0.5887, decode.d8.loss_dice: 0.8293, loss: 23.5074 +2022-06-05 02:53:19,880 - mmseg - INFO - Iter [8800/40000] lr: 5.948e-06, eta: 4:12:05, time: 0.459, data_time: 0.009, memory: 31652, decode.loss_cls: 0.4993, decode.loss_mask: 0.6163, decode.loss_dice: 0.8354, decode.d0.loss_cls: 4.1028, decode.d0.loss_mask: 0.6628, decode.d0.loss_dice: 1.0225, decode.d1.loss_cls: 0.7666, decode.d1.loss_mask: 0.6339, decode.d1.loss_dice: 0.8799, decode.d2.loss_cls: 0.5868, decode.d2.loss_mask: 0.6221, decode.d2.loss_dice: 0.8467, decode.d3.loss_cls: 0.5304, decode.d3.loss_mask: 0.6150, decode.d3.loss_dice: 0.8284, decode.d4.loss_cls: 0.5206, decode.d4.loss_mask: 0.6147, decode.d4.loss_dice: 0.8335, decode.d5.loss_cls: 0.5056, decode.d5.loss_mask: 0.6148, decode.d5.loss_dice: 0.8319, decode.d6.loss_cls: 0.4957, decode.d6.loss_mask: 0.6174, decode.d6.loss_dice: 0.8313, decode.d7.loss_cls: 0.4918, decode.d7.loss_mask: 0.6167, decode.d7.loss_dice: 0.8302, decode.d8.loss_cls: 0.4991, decode.d8.loss_mask: 0.6188, decode.d8.loss_dice: 0.8325, loss: 23.8036 +2022-06-05 02:53:42,152 - mmseg - INFO - Iter [8850/40000] lr: 5.939e-06, eta: 4:11:34, time: 0.446, data_time: 0.009, memory: 31652, decode.loss_cls: 0.5217, decode.loss_mask: 0.5935, decode.loss_dice: 0.8400, decode.d0.loss_cls: 4.0870, decode.d0.loss_mask: 0.6385, decode.d0.loss_dice: 1.0338, decode.d1.loss_cls: 0.7927, decode.d1.loss_mask: 0.6160, decode.d1.loss_dice: 0.8955, decode.d2.loss_cls: 0.6059, decode.d2.loss_mask: 0.6018, decode.d2.loss_dice: 0.8569, decode.d3.loss_cls: 0.5526, decode.d3.loss_mask: 0.5944, decode.d3.loss_dice: 0.8426, decode.d4.loss_cls: 0.5418, decode.d4.loss_mask: 0.5955, decode.d4.loss_dice: 0.8418, decode.d5.loss_cls: 0.5296, decode.d5.loss_mask: 0.5934, decode.d5.loss_dice: 0.8417, decode.d6.loss_cls: 0.5199, decode.d6.loss_mask: 0.5923, decode.d6.loss_dice: 0.8337, decode.d7.loss_cls: 0.5253, decode.d7.loss_mask: 0.5921, decode.d7.loss_dice: 0.8356, decode.d8.loss_cls: 0.5149, decode.d8.loss_mask: 0.5913, decode.d8.loss_dice: 0.8407, loss: 23.8626 +2022-06-05 02:54:04,761 - mmseg - INFO - Iter [8900/40000] lr: 5.929e-06, eta: 4:11:04, time: 0.452, data_time: 0.008, memory: 31652, decode.loss_cls: 0.4347, decode.loss_mask: 0.6089, decode.loss_dice: 0.7844, decode.d0.loss_cls: 3.9902, decode.d0.loss_mask: 0.6518, decode.d0.loss_dice: 0.9622, decode.d1.loss_cls: 0.6694, decode.d1.loss_mask: 0.6319, decode.d1.loss_dice: 0.8490, decode.d2.loss_cls: 0.5054, decode.d2.loss_mask: 0.6147, decode.d2.loss_dice: 0.7958, decode.d3.loss_cls: 0.4696, decode.d3.loss_mask: 0.6097, decode.d3.loss_dice: 0.7872, decode.d4.loss_cls: 0.4565, decode.d4.loss_mask: 0.6097, decode.d4.loss_dice: 0.7836, decode.d5.loss_cls: 0.4489, decode.d5.loss_mask: 0.6101, decode.d5.loss_dice: 0.7787, decode.d6.loss_cls: 0.4412, decode.d6.loss_mask: 0.6111, decode.d6.loss_dice: 0.7850, decode.d7.loss_cls: 0.4422, decode.d7.loss_mask: 0.6093, decode.d7.loss_dice: 0.7855, decode.d8.loss_cls: 0.4401, decode.d8.loss_mask: 0.6087, decode.d8.loss_dice: 0.7898, loss: 22.5653 +2022-06-05 02:54:27,271 - mmseg - INFO - Iter [8950/40000] lr: 5.920e-06, eta: 4:10:34, time: 0.450, data_time: 0.007, memory: 31652, decode.loss_cls: 0.5128, decode.loss_mask: 0.5995, decode.loss_dice: 0.8554, decode.d0.loss_cls: 4.0304, decode.d0.loss_mask: 0.6311, decode.d0.loss_dice: 1.0384, decode.d1.loss_cls: 0.7864, decode.d1.loss_mask: 0.6130, decode.d1.loss_dice: 0.9077, decode.d2.loss_cls: 0.5973, decode.d2.loss_mask: 0.6019, decode.d2.loss_dice: 0.8719, decode.d3.loss_cls: 0.5422, decode.d3.loss_mask: 0.5982, decode.d3.loss_dice: 0.8590, decode.d4.loss_cls: 0.5322, decode.d4.loss_mask: 0.6011, decode.d4.loss_dice: 0.8600, decode.d5.loss_cls: 0.5259, decode.d5.loss_mask: 0.5993, decode.d5.loss_dice: 0.8568, decode.d6.loss_cls: 0.5095, decode.d6.loss_mask: 0.5992, decode.d6.loss_dice: 0.8536, decode.d7.loss_cls: 0.5098, decode.d7.loss_mask: 0.6007, decode.d7.loss_dice: 0.8572, decode.d8.loss_cls: 0.5111, decode.d8.loss_mask: 0.5986, decode.d8.loss_dice: 0.8558, loss: 23.9159 +2022-06-05 02:54:49,975 - mmseg - INFO - Saving checkpoint at 9000 iterations +2022-06-05 02:54:52,219 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 02:54:52,219 - mmseg - INFO - Iter [9000/40000] lr: 5.910e-06, eta: 4:10:12, time: 0.499, data_time: 0.008, memory: 31652, decode.loss_cls: 0.4708, decode.loss_mask: 0.6088, decode.loss_dice: 0.8134, decode.d0.loss_cls: 3.9937, decode.d0.loss_mask: 0.6384, decode.d0.loss_dice: 0.9823, decode.d1.loss_cls: 0.7432, decode.d1.loss_mask: 0.6252, decode.d1.loss_dice: 0.8586, decode.d2.loss_cls: 0.5564, decode.d2.loss_mask: 0.6178, decode.d2.loss_dice: 0.8205, decode.d3.loss_cls: 0.5047, decode.d3.loss_mask: 0.6132, decode.d3.loss_dice: 0.8214, decode.d4.loss_cls: 0.4940, decode.d4.loss_mask: 0.6135, decode.d4.loss_dice: 0.8217, decode.d5.loss_cls: 0.4794, decode.d5.loss_mask: 0.6043, decode.d5.loss_dice: 0.8241, decode.d6.loss_cls: 0.4683, decode.d6.loss_mask: 0.6052, decode.d6.loss_dice: 0.8106, decode.d7.loss_cls: 0.4685, decode.d7.loss_mask: 0.6103, decode.d7.loss_dice: 0.8104, decode.d8.loss_cls: 0.4672, decode.d8.loss_mask: 0.6089, decode.d8.loss_dice: 0.8154, loss: 23.1701 +2022-06-05 02:55:17,362 - mmseg - INFO - Iter [9050/40000] lr: 5.900e-06, eta: 4:09:51, time: 0.503, data_time: 0.058, memory: 31652, decode.loss_cls: 0.5350, decode.loss_mask: 0.6059, decode.loss_dice: 0.8602, decode.d0.loss_cls: 4.0057, decode.d0.loss_mask: 0.6352, decode.d0.loss_dice: 1.0303, decode.d1.loss_cls: 0.8126, decode.d1.loss_mask: 0.6230, decode.d1.loss_dice: 0.9127, decode.d2.loss_cls: 0.6184, decode.d2.loss_mask: 0.6157, decode.d2.loss_dice: 0.8687, decode.d3.loss_cls: 0.5747, decode.d3.loss_mask: 0.6039, decode.d3.loss_dice: 0.8590, decode.d4.loss_cls: 0.5523, decode.d4.loss_mask: 0.6109, decode.d4.loss_dice: 0.8594, decode.d5.loss_cls: 0.5515, decode.d5.loss_mask: 0.6070, decode.d5.loss_dice: 0.8558, decode.d6.loss_cls: 0.5386, decode.d6.loss_mask: 0.6085, decode.d6.loss_dice: 0.8591, decode.d7.loss_cls: 0.5383, decode.d7.loss_mask: 0.6059, decode.d7.loss_dice: 0.8479, decode.d8.loss_cls: 0.5403, decode.d8.loss_mask: 0.6027, decode.d8.loss_dice: 0.8517, loss: 24.1908 +2022-06-05 02:55:40,189 - mmseg - INFO - Iter [9100/40000] lr: 5.891e-06, eta: 4:09:22, time: 0.457, data_time: 0.009, memory: 31652, decode.loss_cls: 0.4557, decode.loss_mask: 0.5706, decode.loss_dice: 0.8065, decode.d0.loss_cls: 3.9362, decode.d0.loss_mask: 0.6101, decode.d0.loss_dice: 0.9864, decode.d1.loss_cls: 0.7298, decode.d1.loss_mask: 0.5941, decode.d1.loss_dice: 0.8598, decode.d2.loss_cls: 0.5543, decode.d2.loss_mask: 0.5777, decode.d2.loss_dice: 0.8149, decode.d3.loss_cls: 0.4938, decode.d3.loss_mask: 0.5702, decode.d3.loss_dice: 0.8060, decode.d4.loss_cls: 0.4776, decode.d4.loss_mask: 0.5651, decode.d4.loss_dice: 0.8065, decode.d5.loss_cls: 0.4649, decode.d5.loss_mask: 0.5697, decode.d5.loss_dice: 0.8109, decode.d6.loss_cls: 0.4583, decode.d6.loss_mask: 0.5703, decode.d6.loss_dice: 0.8078, decode.d7.loss_cls: 0.4571, decode.d7.loss_mask: 0.5691, decode.d7.loss_dice: 0.8029, decode.d8.loss_cls: 0.4547, decode.d8.loss_mask: 0.5702, decode.d8.loss_dice: 0.8050, loss: 22.5562 +2022-06-05 02:56:02,385 - mmseg - INFO - Iter [9150/40000] lr: 5.881e-06, eta: 4:08:51, time: 0.443, data_time: 0.008, memory: 31652, decode.loss_cls: 0.4873, decode.loss_mask: 0.6242, decode.loss_dice: 0.8517, decode.d0.loss_cls: 3.9061, decode.d0.loss_mask: 0.6582, decode.d0.loss_dice: 1.0304, decode.d1.loss_cls: 0.7464, decode.d1.loss_mask: 0.6447, decode.d1.loss_dice: 0.9021, decode.d2.loss_cls: 0.5787, decode.d2.loss_mask: 0.6306, decode.d2.loss_dice: 0.8662, decode.d3.loss_cls: 0.5335, decode.d3.loss_mask: 0.6290, decode.d3.loss_dice: 0.8525, decode.d4.loss_cls: 0.5176, decode.d4.loss_mask: 0.6300, decode.d4.loss_dice: 0.8580, decode.d5.loss_cls: 0.5015, decode.d5.loss_mask: 0.6338, decode.d5.loss_dice: 0.8556, decode.d6.loss_cls: 0.4923, decode.d6.loss_mask: 0.6318, decode.d6.loss_dice: 0.8548, decode.d7.loss_cls: 0.4921, decode.d7.loss_mask: 0.6268, decode.d7.loss_dice: 0.8536, decode.d8.loss_cls: 0.4854, decode.d8.loss_mask: 0.6295, decode.d8.loss_dice: 0.8557, loss: 23.8599 +2022-06-05 02:56:24,579 - mmseg - INFO - Iter [9200/40000] lr: 5.872e-06, eta: 4:08:20, time: 0.444, data_time: 0.008, memory: 31652, decode.loss_cls: 0.4539, decode.loss_mask: 0.5873, decode.loss_dice: 0.7738, decode.d0.loss_cls: 3.8594, decode.d0.loss_mask: 0.6212, decode.d0.loss_dice: 0.9394, decode.d1.loss_cls: 0.7050, decode.d1.loss_mask: 0.6029, decode.d1.loss_dice: 0.8179, decode.d2.loss_cls: 0.5406, decode.d2.loss_mask: 0.5887, decode.d2.loss_dice: 0.7866, decode.d3.loss_cls: 0.4889, decode.d3.loss_mask: 0.5844, decode.d3.loss_dice: 0.7769, decode.d4.loss_cls: 0.4762, decode.d4.loss_mask: 0.5830, decode.d4.loss_dice: 0.7762, decode.d5.loss_cls: 0.4593, decode.d5.loss_mask: 0.5840, decode.d5.loss_dice: 0.7750, decode.d6.loss_cls: 0.4517, decode.d6.loss_mask: 0.5849, decode.d6.loss_dice: 0.7726, decode.d7.loss_cls: 0.4488, decode.d7.loss_mask: 0.5864, decode.d7.loss_dice: 0.7739, decode.d8.loss_cls: 0.4478, decode.d8.loss_mask: 0.5865, decode.d8.loss_dice: 0.7707, loss: 22.2038 +2022-06-05 02:56:46,966 - mmseg - INFO - Iter [9250/40000] lr: 5.862e-06, eta: 4:07:50, time: 0.448, data_time: 0.008, memory: 31652, decode.loss_cls: 0.4508, decode.loss_mask: 0.5939, decode.loss_dice: 0.8427, decode.d0.loss_cls: 3.8412, decode.d0.loss_mask: 0.6323, decode.d0.loss_dice: 1.0184, decode.d1.loss_cls: 0.7195, decode.d1.loss_mask: 0.6166, decode.d1.loss_dice: 0.8887, decode.d2.loss_cls: 0.5427, decode.d2.loss_mask: 0.5993, decode.d2.loss_dice: 0.8448, decode.d3.loss_cls: 0.4901, decode.d3.loss_mask: 0.5997, decode.d3.loss_dice: 0.8371, decode.d4.loss_cls: 0.4689, decode.d4.loss_mask: 0.6013, decode.d4.loss_dice: 0.8417, decode.d5.loss_cls: 0.4592, decode.d5.loss_mask: 0.6017, decode.d5.loss_dice: 0.8416, decode.d6.loss_cls: 0.4573, decode.d6.loss_mask: 0.5990, decode.d6.loss_dice: 0.8432, decode.d7.loss_cls: 0.4531, decode.d7.loss_mask: 0.6003, decode.d7.loss_dice: 0.8409, decode.d8.loss_cls: 0.4425, decode.d8.loss_mask: 0.5989, decode.d8.loss_dice: 0.8479, loss: 23.0153 +2022-06-05 02:57:09,361 - mmseg - INFO - Iter [9300/40000] lr: 5.853e-06, eta: 4:07:20, time: 0.448, data_time: 0.007, memory: 31652, decode.loss_cls: 0.4875, decode.loss_mask: 0.6047, decode.loss_dice: 0.8287, decode.d0.loss_cls: 3.8330, decode.d0.loss_mask: 0.6465, decode.d0.loss_dice: 1.0101, decode.d1.loss_cls: 0.7289, decode.d1.loss_mask: 0.6271, decode.d1.loss_dice: 0.8878, decode.d2.loss_cls: 0.5700, decode.d2.loss_mask: 0.6091, decode.d2.loss_dice: 0.8439, decode.d3.loss_cls: 0.5236, decode.d3.loss_mask: 0.6050, decode.d3.loss_dice: 0.8339, decode.d4.loss_cls: 0.5146, decode.d4.loss_mask: 0.6035, decode.d4.loss_dice: 0.8332, decode.d5.loss_cls: 0.5047, decode.d5.loss_mask: 0.6046, decode.d5.loss_dice: 0.8367, decode.d6.loss_cls: 0.4915, decode.d6.loss_mask: 0.6064, decode.d6.loss_dice: 0.8312, decode.d7.loss_cls: 0.4851, decode.d7.loss_mask: 0.6064, decode.d7.loss_dice: 0.8330, decode.d8.loss_cls: 0.4885, decode.d8.loss_mask: 0.6075, decode.d8.loss_dice: 0.8314, loss: 23.3182 +2022-06-05 02:57:32,276 - mmseg - INFO - Iter [9350/40000] lr: 5.843e-06, eta: 4:06:52, time: 0.458, data_time: 0.008, memory: 31652, decode.loss_cls: 0.4573, decode.loss_mask: 0.6100, decode.loss_dice: 0.8425, decode.d0.loss_cls: 3.8104, decode.d0.loss_mask: 0.6518, decode.d0.loss_dice: 1.0130, decode.d1.loss_cls: 0.7118, decode.d1.loss_mask: 0.6362, decode.d1.loss_dice: 0.8999, decode.d2.loss_cls: 0.5446, decode.d2.loss_mask: 0.6229, decode.d2.loss_dice: 0.8572, decode.d3.loss_cls: 0.4977, decode.d3.loss_mask: 0.6195, decode.d3.loss_dice: 0.8440, decode.d4.loss_cls: 0.4862, decode.d4.loss_mask: 0.6149, decode.d4.loss_dice: 0.8407, decode.d5.loss_cls: 0.4739, decode.d5.loss_mask: 0.6131, decode.d5.loss_dice: 0.8426, decode.d6.loss_cls: 0.4646, decode.d6.loss_mask: 0.6138, decode.d6.loss_dice: 0.8378, decode.d7.loss_cls: 0.4678, decode.d7.loss_mask: 0.6078, decode.d7.loss_dice: 0.8339, decode.d8.loss_cls: 0.4607, decode.d8.loss_mask: 0.6079, decode.d8.loss_dice: 0.8359, loss: 23.2206 +2022-06-05 02:57:57,737 - mmseg - INFO - Iter [9400/40000] lr: 5.834e-06, eta: 4:06:32, time: 0.509, data_time: 0.058, memory: 31652, decode.loss_cls: 0.4617, decode.loss_mask: 0.5879, decode.loss_dice: 0.8482, decode.d0.loss_cls: 3.7592, decode.d0.loss_mask: 0.6203, decode.d0.loss_dice: 1.0203, decode.d1.loss_cls: 0.7349, decode.d1.loss_mask: 0.6072, decode.d1.loss_dice: 0.9000, decode.d2.loss_cls: 0.5562, decode.d2.loss_mask: 0.5948, decode.d2.loss_dice: 0.8615, decode.d3.loss_cls: 0.5012, decode.d3.loss_mask: 0.5953, decode.d3.loss_dice: 0.8535, decode.d4.loss_cls: 0.4922, decode.d4.loss_mask: 0.5881, decode.d4.loss_dice: 0.8491, decode.d5.loss_cls: 0.4728, decode.d5.loss_mask: 0.5918, decode.d5.loss_dice: 0.8504, decode.d6.loss_cls: 0.4684, decode.d6.loss_mask: 0.5879, decode.d6.loss_dice: 0.8412, decode.d7.loss_cls: 0.4729, decode.d7.loss_mask: 0.5865, decode.d7.loss_dice: 0.8415, decode.d8.loss_cls: 0.4735, decode.d8.loss_mask: 0.5846, decode.d8.loss_dice: 0.8499, loss: 23.0531 +2022-06-05 02:58:20,199 - mmseg - INFO - Iter [9450/40000] lr: 5.824e-06, eta: 4:06:02, time: 0.450, data_time: 0.009, memory: 31652, decode.loss_cls: 0.4481, decode.loss_mask: 0.5607, decode.loss_dice: 0.7965, decode.d0.loss_cls: 3.7427, decode.d0.loss_mask: 0.6019, decode.d0.loss_dice: 0.9712, decode.d1.loss_cls: 0.7046, decode.d1.loss_mask: 0.5809, decode.d1.loss_dice: 0.8543, decode.d2.loss_cls: 0.5374, decode.d2.loss_mask: 0.5701, decode.d2.loss_dice: 0.8129, decode.d3.loss_cls: 0.4837, decode.d3.loss_mask: 0.5614, decode.d3.loss_dice: 0.8028, decode.d4.loss_cls: 0.4691, decode.d4.loss_mask: 0.5631, decode.d4.loss_dice: 0.8041, decode.d5.loss_cls: 0.4578, decode.d5.loss_mask: 0.5614, decode.d5.loss_dice: 0.8020, decode.d6.loss_cls: 0.4532, decode.d6.loss_mask: 0.5610, decode.d6.loss_dice: 0.7987, decode.d7.loss_cls: 0.4477, decode.d7.loss_mask: 0.5598, decode.d7.loss_dice: 0.8014, decode.d8.loss_cls: 0.4484, decode.d8.loss_mask: 0.5595, decode.d8.loss_dice: 0.7984, loss: 22.1150 +2022-06-05 02:58:42,796 - mmseg - INFO - Iter [9500/40000] lr: 5.815e-06, eta: 4:05:33, time: 0.452, data_time: 0.008, memory: 31652, decode.loss_cls: 0.4677, decode.loss_mask: 0.5850, decode.loss_dice: 0.8081, decode.d0.loss_cls: 3.7224, decode.d0.loss_mask: 0.6147, decode.d0.loss_dice: 0.9920, decode.d1.loss_cls: 0.7358, decode.d1.loss_mask: 0.6014, decode.d1.loss_dice: 0.8594, decode.d2.loss_cls: 0.5683, decode.d2.loss_mask: 0.5864, decode.d2.loss_dice: 0.8240, decode.d3.loss_cls: 0.4992, decode.d3.loss_mask: 0.5842, decode.d3.loss_dice: 0.8090, decode.d4.loss_cls: 0.4850, decode.d4.loss_mask: 0.5877, decode.d4.loss_dice: 0.8150, decode.d5.loss_cls: 0.4809, decode.d5.loss_mask: 0.5865, decode.d5.loss_dice: 0.8124, decode.d6.loss_cls: 0.4755, decode.d6.loss_mask: 0.5832, decode.d6.loss_dice: 0.8029, decode.d7.loss_cls: 0.4700, decode.d7.loss_mask: 0.5825, decode.d7.loss_dice: 0.8090, decode.d8.loss_cls: 0.4722, decode.d8.loss_mask: 0.5859, decode.d8.loss_dice: 0.8094, loss: 22.6160 +2022-06-05 02:59:05,084 - mmseg - INFO - Iter [9550/40000] lr: 5.805e-06, eta: 4:05:03, time: 0.446, data_time: 0.008, memory: 31652, decode.loss_cls: 0.4538, decode.loss_mask: 0.6132, decode.loss_dice: 0.8223, decode.d0.loss_cls: 3.7285, decode.d0.loss_mask: 0.6434, decode.d0.loss_dice: 1.0000, decode.d1.loss_cls: 0.7222, decode.d1.loss_mask: 0.6325, decode.d1.loss_dice: 0.8700, decode.d2.loss_cls: 0.5535, decode.d2.loss_mask: 0.6167, decode.d2.loss_dice: 0.8316, decode.d3.loss_cls: 0.5019, decode.d3.loss_mask: 0.6150, decode.d3.loss_dice: 0.8229, decode.d4.loss_cls: 0.4774, decode.d4.loss_mask: 0.6164, decode.d4.loss_dice: 0.8291, decode.d5.loss_cls: 0.4726, decode.d5.loss_mask: 0.6131, decode.d5.loss_dice: 0.8212, decode.d6.loss_cls: 0.4535, decode.d6.loss_mask: 0.6128, decode.d6.loss_dice: 0.8234, decode.d7.loss_cls: 0.4586, decode.d7.loss_mask: 0.6135, decode.d7.loss_dice: 0.8224, decode.d8.loss_cls: 0.4549, decode.d8.loss_mask: 0.6133, decode.d8.loss_dice: 0.8184, loss: 22.9277 +2022-06-05 02:59:27,278 - mmseg - INFO - Iter [9600/40000] lr: 5.796e-06, eta: 4:04:32, time: 0.443, data_time: 0.008, memory: 31652, decode.loss_cls: 0.4421, decode.loss_mask: 0.5770, decode.loss_dice: 0.7974, decode.d0.loss_cls: 3.6345, decode.d0.loss_mask: 0.6106, decode.d0.loss_dice: 0.9678, decode.d1.loss_cls: 0.6776, decode.d1.loss_mask: 0.5899, decode.d1.loss_dice: 0.8453, decode.d2.loss_cls: 0.5227, decode.d2.loss_mask: 0.5797, decode.d2.loss_dice: 0.8111, decode.d3.loss_cls: 0.4880, decode.d3.loss_mask: 0.5720, decode.d3.loss_dice: 0.7981, decode.d4.loss_cls: 0.4676, decode.d4.loss_mask: 0.5755, decode.d4.loss_dice: 0.7964, decode.d5.loss_cls: 0.4551, decode.d5.loss_mask: 0.5798, decode.d5.loss_dice: 0.7969, decode.d6.loss_cls: 0.4494, decode.d6.loss_mask: 0.5764, decode.d6.loss_dice: 0.7966, decode.d7.loss_cls: 0.4373, decode.d7.loss_mask: 0.5755, decode.d7.loss_dice: 0.7986, decode.d8.loss_cls: 0.4391, decode.d8.loss_mask: 0.5815, decode.d8.loss_dice: 0.7958, loss: 22.0353 +2022-06-05 02:59:49,351 - mmseg - INFO - Iter [9650/40000] lr: 5.786e-06, eta: 4:04:02, time: 0.442, data_time: 0.008, memory: 31652, decode.loss_cls: 0.4792, decode.loss_mask: 0.5957, decode.loss_dice: 0.8242, decode.d0.loss_cls: 3.6612, decode.d0.loss_mask: 0.6339, decode.d0.loss_dice: 1.0174, decode.d1.loss_cls: 0.7410, decode.d1.loss_mask: 0.6181, decode.d1.loss_dice: 0.8881, decode.d2.loss_cls: 0.5781, decode.d2.loss_mask: 0.6027, decode.d2.loss_dice: 0.8448, decode.d3.loss_cls: 0.5225, decode.d3.loss_mask: 0.5957, decode.d3.loss_dice: 0.8310, decode.d4.loss_cls: 0.5066, decode.d4.loss_mask: 0.5929, decode.d4.loss_dice: 0.8340, decode.d5.loss_cls: 0.4910, decode.d5.loss_mask: 0.5954, decode.d5.loss_dice: 0.8338, decode.d6.loss_cls: 0.4856, decode.d6.loss_mask: 0.5927, decode.d6.loss_dice: 0.8308, decode.d7.loss_cls: 0.4827, decode.d7.loss_mask: 0.5919, decode.d7.loss_dice: 0.8279, decode.d8.loss_cls: 0.4841, decode.d8.loss_mask: 0.5930, decode.d8.loss_dice: 0.8277, loss: 23.0037 +2022-06-05 03:00:14,671 - mmseg - INFO - Iter [9700/40000] lr: 5.777e-06, eta: 4:03:41, time: 0.506, data_time: 0.060, memory: 31652, decode.loss_cls: 0.4559, decode.loss_mask: 0.5623, decode.loss_dice: 0.7996, decode.d0.loss_cls: 3.6093, decode.d0.loss_mask: 0.6008, decode.d0.loss_dice: 0.9716, decode.d1.loss_cls: 0.7111, decode.d1.loss_mask: 0.5815, decode.d1.loss_dice: 0.8410, decode.d2.loss_cls: 0.5481, decode.d2.loss_mask: 0.5678, decode.d2.loss_dice: 0.8075, decode.d3.loss_cls: 0.4923, decode.d3.loss_mask: 0.5646, decode.d3.loss_dice: 0.7977, decode.d4.loss_cls: 0.4790, decode.d4.loss_mask: 0.5625, decode.d4.loss_dice: 0.7997, decode.d5.loss_cls: 0.4751, decode.d5.loss_mask: 0.5610, decode.d5.loss_dice: 0.8037, decode.d6.loss_cls: 0.4622, decode.d6.loss_mask: 0.5629, decode.d6.loss_dice: 0.7940, decode.d7.loss_cls: 0.4602, decode.d7.loss_mask: 0.5647, decode.d7.loss_dice: 0.7988, decode.d8.loss_cls: 0.4482, decode.d8.loss_mask: 0.5648, decode.d8.loss_dice: 0.8050, loss: 22.0530 +2022-06-05 03:00:37,315 - mmseg - INFO - Iter [9750/40000] lr: 5.767e-06, eta: 4:03:13, time: 0.453, data_time: 0.008, memory: 31652, decode.loss_cls: 0.4299, decode.loss_mask: 0.5937, decode.loss_dice: 0.8045, decode.d0.loss_cls: 3.5916, decode.d0.loss_mask: 0.6355, decode.d0.loss_dice: 0.9755, decode.d1.loss_cls: 0.6981, decode.d1.loss_mask: 0.6115, decode.d1.loss_dice: 0.8580, decode.d2.loss_cls: 0.5311, decode.d2.loss_mask: 0.5939, decode.d2.loss_dice: 0.8217, decode.d3.loss_cls: 0.4753, decode.d3.loss_mask: 0.5897, decode.d3.loss_dice: 0.8057, decode.d4.loss_cls: 0.4617, decode.d4.loss_mask: 0.5852, decode.d4.loss_dice: 0.8063, decode.d5.loss_cls: 0.4466, decode.d5.loss_mask: 0.5846, decode.d5.loss_dice: 0.8040, decode.d6.loss_cls: 0.4408, decode.d6.loss_mask: 0.5898, decode.d6.loss_dice: 0.8064, decode.d7.loss_cls: 0.4406, decode.d7.loss_mask: 0.5876, decode.d7.loss_dice: 0.8043, decode.d8.loss_cls: 0.4385, decode.d8.loss_mask: 0.5913, decode.d8.loss_dice: 0.8059, loss: 22.2095 +2022-06-05 03:01:00,158 - mmseg - INFO - Iter [9800/40000] lr: 5.758e-06, eta: 4:02:45, time: 0.456, data_time: 0.007, memory: 31652, decode.loss_cls: 0.4520, decode.loss_mask: 0.5594, decode.loss_dice: 0.8292, decode.d0.loss_cls: 3.5726, decode.d0.loss_mask: 0.5911, decode.d0.loss_dice: 0.9974, decode.d1.loss_cls: 0.7175, decode.d1.loss_mask: 0.5797, decode.d1.loss_dice: 0.8815, decode.d2.loss_cls: 0.5571, decode.d2.loss_mask: 0.5632, decode.d2.loss_dice: 0.8404, decode.d3.loss_cls: 0.5022, decode.d3.loss_mask: 0.5613, decode.d3.loss_dice: 0.8304, decode.d4.loss_cls: 0.4858, decode.d4.loss_mask: 0.5625, decode.d4.loss_dice: 0.8339, decode.d5.loss_cls: 0.4708, decode.d5.loss_mask: 0.5628, decode.d5.loss_dice: 0.8253, decode.d6.loss_cls: 0.4552, decode.d6.loss_mask: 0.5609, decode.d6.loss_dice: 0.8247, decode.d7.loss_cls: 0.4585, decode.d7.loss_mask: 0.5604, decode.d7.loss_dice: 0.8259, decode.d8.loss_cls: 0.4509, decode.d8.loss_mask: 0.5594, decode.d8.loss_dice: 0.8278, loss: 22.2999 +2022-06-05 03:01:22,434 - mmseg - INFO - Iter [9850/40000] lr: 5.748e-06, eta: 4:02:15, time: 0.446, data_time: 0.008, memory: 31652, decode.loss_cls: 0.4424, decode.loss_mask: 0.6043, decode.loss_dice: 0.8030, decode.d0.loss_cls: 3.5228, decode.d0.loss_mask: 0.6409, decode.d0.loss_dice: 0.9728, decode.d1.loss_cls: 0.6813, decode.d1.loss_mask: 0.6268, decode.d1.loss_dice: 0.8586, decode.d2.loss_cls: 0.5247, decode.d2.loss_mask: 0.6135, decode.d2.loss_dice: 0.8208, decode.d3.loss_cls: 0.4733, decode.d3.loss_mask: 0.6068, decode.d3.loss_dice: 0.8096, decode.d4.loss_cls: 0.4640, decode.d4.loss_mask: 0.6064, decode.d4.loss_dice: 0.8124, decode.d5.loss_cls: 0.4574, decode.d5.loss_mask: 0.6021, decode.d5.loss_dice: 0.8134, decode.d6.loss_cls: 0.4429, decode.d6.loss_mask: 0.6037, decode.d6.loss_dice: 0.8032, decode.d7.loss_cls: 0.4412, decode.d7.loss_mask: 0.6054, decode.d7.loss_dice: 0.8023, decode.d8.loss_cls: 0.4423, decode.d8.loss_mask: 0.6022, decode.d8.loss_dice: 0.8007, loss: 22.3008 +2022-06-05 03:01:44,549 - mmseg - INFO - Iter [9900/40000] lr: 5.738e-06, eta: 4:01:45, time: 0.442, data_time: 0.008, memory: 31652, decode.loss_cls: 0.4539, decode.loss_mask: 0.5919, decode.loss_dice: 0.8086, decode.d0.loss_cls: 3.5118, decode.d0.loss_mask: 0.6295, decode.d0.loss_dice: 0.9779, decode.d1.loss_cls: 0.6930, decode.d1.loss_mask: 0.6147, decode.d1.loss_dice: 0.8571, decode.d2.loss_cls: 0.5328, decode.d2.loss_mask: 0.5922, decode.d2.loss_dice: 0.8242, decode.d3.loss_cls: 0.4788, decode.d3.loss_mask: 0.5916, decode.d3.loss_dice: 0.8102, decode.d4.loss_cls: 0.4632, decode.d4.loss_mask: 0.5935, decode.d4.loss_dice: 0.8158, decode.d5.loss_cls: 0.4551, decode.d5.loss_mask: 0.5882, decode.d5.loss_dice: 0.8145, decode.d6.loss_cls: 0.4476, decode.d6.loss_mask: 0.5921, decode.d6.loss_dice: 0.8076, decode.d7.loss_cls: 0.4451, decode.d7.loss_mask: 0.5909, decode.d7.loss_dice: 0.8111, decode.d8.loss_cls: 0.4404, decode.d8.loss_mask: 0.5904, decode.d8.loss_dice: 0.8104, loss: 22.2338 +2022-06-05 03:02:06,986 - mmseg - INFO - Iter [9950/40000] lr: 5.729e-06, eta: 4:01:16, time: 0.449, data_time: 0.008, memory: 31652, decode.loss_cls: 0.4296, decode.loss_mask: 0.6021, decode.loss_dice: 0.7821, decode.d0.loss_cls: 3.5039, decode.d0.loss_mask: 0.6432, decode.d0.loss_dice: 0.9474, decode.d1.loss_cls: 0.6764, decode.d1.loss_mask: 0.6143, decode.d1.loss_dice: 0.8183, decode.d2.loss_cls: 0.5062, decode.d2.loss_mask: 0.6012, decode.d2.loss_dice: 0.7896, decode.d3.loss_cls: 0.4582, decode.d3.loss_mask: 0.6022, decode.d3.loss_dice: 0.7820, decode.d4.loss_cls: 0.4487, decode.d4.loss_mask: 0.6027, decode.d4.loss_dice: 0.7793, decode.d5.loss_cls: 0.4271, decode.d5.loss_mask: 0.6057, decode.d5.loss_dice: 0.7797, decode.d6.loss_cls: 0.4273, decode.d6.loss_mask: 0.6018, decode.d6.loss_dice: 0.7762, decode.d7.loss_cls: 0.4270, decode.d7.loss_mask: 0.6026, decode.d7.loss_dice: 0.7837, decode.d8.loss_cls: 0.4216, decode.d8.loss_mask: 0.6045, decode.d8.loss_dice: 0.7859, loss: 21.8307 +2022-06-05 03:02:31,317 - mmseg - INFO - Saving checkpoint at 10000 iterations +2022-06-05 03:02:33,546 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 03:02:33,546 - mmseg - INFO - Iter [10000/40000] lr: 5.719e-06, eta: 4:00:59, time: 0.531, data_time: 0.054, memory: 31652, decode.loss_cls: 0.4597, decode.loss_mask: 0.5876, decode.loss_dice: 0.8237, decode.d0.loss_cls: 3.4711, decode.d0.loss_mask: 0.6311, decode.d0.loss_dice: 1.0083, decode.d1.loss_cls: 0.6901, decode.d1.loss_mask: 0.6111, decode.d1.loss_dice: 0.8823, decode.d2.loss_cls: 0.5402, decode.d2.loss_mask: 0.5963, decode.d2.loss_dice: 0.8451, decode.d3.loss_cls: 0.4915, decode.d3.loss_mask: 0.5891, decode.d3.loss_dice: 0.8249, decode.d4.loss_cls: 0.4760, decode.d4.loss_mask: 0.5878, decode.d4.loss_dice: 0.8233, decode.d5.loss_cls: 0.4663, decode.d5.loss_mask: 0.5881, decode.d5.loss_dice: 0.8238, decode.d6.loss_cls: 0.4549, decode.d6.loss_mask: 0.5859, decode.d6.loss_dice: 0.8222, decode.d7.loss_cls: 0.4626, decode.d7.loss_mask: 0.5863, decode.d7.loss_dice: 0.8180, decode.d8.loss_cls: 0.4592, decode.d8.loss_mask: 0.5844, decode.d8.loss_dice: 0.8218, loss: 22.4126 +2022-06-05 03:02:56,066 - mmseg - INFO - Iter [10050/40000] lr: 5.710e-06, eta: 4:00:30, time: 0.450, data_time: 0.007, memory: 31652, decode.loss_cls: 0.4279, decode.loss_mask: 0.5898, decode.loss_dice: 0.8031, decode.d0.loss_cls: 3.4019, decode.d0.loss_mask: 0.6224, decode.d0.loss_dice: 0.9560, decode.d1.loss_cls: 0.6481, decode.d1.loss_mask: 0.6076, decode.d1.loss_dice: 0.8504, decode.d2.loss_cls: 0.4972, decode.d2.loss_mask: 0.5970, decode.d2.loss_dice: 0.8169, decode.d3.loss_cls: 0.4588, decode.d3.loss_mask: 0.5916, decode.d3.loss_dice: 0.8053, decode.d4.loss_cls: 0.4430, decode.d4.loss_mask: 0.5913, decode.d4.loss_dice: 0.8007, decode.d5.loss_cls: 0.4309, decode.d5.loss_mask: 0.5904, decode.d5.loss_dice: 0.7991, decode.d6.loss_cls: 0.4203, decode.d6.loss_mask: 0.5891, decode.d6.loss_dice: 0.7950, decode.d7.loss_cls: 0.4196, decode.d7.loss_mask: 0.5878, decode.d7.loss_dice: 0.8018, decode.d8.loss_cls: 0.4173, decode.d8.loss_mask: 0.5874, decode.d8.loss_dice: 0.8087, loss: 21.7564 +2022-06-05 03:03:18,203 - mmseg - INFO - Iter [10100/40000] lr: 5.700e-06, eta: 4:00:00, time: 0.443, data_time: 0.008, memory: 31652, decode.loss_cls: 0.4644, decode.loss_mask: 0.5657, decode.loss_dice: 0.8206, decode.d0.loss_cls: 3.4477, decode.d0.loss_mask: 0.5911, decode.d0.loss_dice: 1.0013, decode.d1.loss_cls: 0.7089, decode.d1.loss_mask: 0.5814, decode.d1.loss_dice: 0.8804, decode.d2.loss_cls: 0.5532, decode.d2.loss_mask: 0.5646, decode.d2.loss_dice: 0.8437, decode.d3.loss_cls: 0.4953, decode.d3.loss_mask: 0.5631, decode.d3.loss_dice: 0.8262, decode.d4.loss_cls: 0.4809, decode.d4.loss_mask: 0.5654, decode.d4.loss_dice: 0.8269, decode.d5.loss_cls: 0.4663, decode.d5.loss_mask: 0.5673, decode.d5.loss_dice: 0.8268, decode.d6.loss_cls: 0.4661, decode.d6.loss_mask: 0.5639, decode.d6.loss_dice: 0.8165, decode.d7.loss_cls: 0.4627, decode.d7.loss_mask: 0.5634, decode.d7.loss_dice: 0.8208, decode.d8.loss_cls: 0.4658, decode.d8.loss_mask: 0.5617, decode.d8.loss_dice: 0.8199, loss: 22.1822 +2022-06-05 03:03:40,193 - mmseg - INFO - Iter [10150/40000] lr: 5.691e-06, eta: 3:59:30, time: 0.440, data_time: 0.007, memory: 31652, decode.loss_cls: 0.4773, decode.loss_mask: 0.6067, decode.loss_dice: 0.7889, decode.d0.loss_cls: 3.4188, decode.d0.loss_mask: 0.6370, decode.d0.loss_dice: 0.9674, decode.d1.loss_cls: 0.7157, decode.d1.loss_mask: 0.6233, decode.d1.loss_dice: 0.8462, decode.d2.loss_cls: 0.5609, decode.d2.loss_mask: 0.6081, decode.d2.loss_dice: 0.8109, decode.d3.loss_cls: 0.5204, decode.d3.loss_mask: 0.6040, decode.d3.loss_dice: 0.7915, decode.d4.loss_cls: 0.5011, decode.d4.loss_mask: 0.6039, decode.d4.loss_dice: 0.8044, decode.d5.loss_cls: 0.4857, decode.d5.loss_mask: 0.6044, decode.d5.loss_dice: 0.7972, decode.d6.loss_cls: 0.4748, decode.d6.loss_mask: 0.6080, decode.d6.loss_dice: 0.7932, decode.d7.loss_cls: 0.4783, decode.d7.loss_mask: 0.6074, decode.d7.loss_dice: 0.7979, decode.d8.loss_cls: 0.4808, decode.d8.loss_mask: 0.6056, decode.d8.loss_dice: 0.7957, loss: 22.4155 +2022-06-05 03:04:02,759 - mmseg - INFO - Iter [10200/40000] lr: 5.681e-06, eta: 3:59:01, time: 0.451, data_time: 0.008, memory: 31652, decode.loss_cls: 0.4075, decode.loss_mask: 0.5820, decode.loss_dice: 0.7835, decode.d0.loss_cls: 3.3895, decode.d0.loss_mask: 0.6228, decode.d0.loss_dice: 0.9436, decode.d1.loss_cls: 0.6904, decode.d1.loss_mask: 0.5995, decode.d1.loss_dice: 0.8298, decode.d2.loss_cls: 0.5088, decode.d2.loss_mask: 0.5838, decode.d2.loss_dice: 0.7946, decode.d3.loss_cls: 0.4457, decode.d3.loss_mask: 0.5817, decode.d3.loss_dice: 0.7838, decode.d4.loss_cls: 0.4313, decode.d4.loss_mask: 0.5831, decode.d4.loss_dice: 0.7860, decode.d5.loss_cls: 0.4261, decode.d5.loss_mask: 0.5807, decode.d5.loss_dice: 0.7851, decode.d6.loss_cls: 0.4115, decode.d6.loss_mask: 0.5836, decode.d6.loss_dice: 0.7841, decode.d7.loss_cls: 0.4118, decode.d7.loss_mask: 0.5812, decode.d7.loss_dice: 0.7876, decode.d8.loss_cls: 0.4106, decode.d8.loss_mask: 0.5786, decode.d8.loss_dice: 0.7848, loss: 21.4731 +2022-06-05 03:04:25,397 - mmseg - INFO - Iter [10250/40000] lr: 5.672e-06, eta: 3:58:33, time: 0.453, data_time: 0.009, memory: 31652, decode.loss_cls: 0.4306, decode.loss_mask: 0.5629, decode.loss_dice: 0.7935, decode.d0.loss_cls: 3.3497, decode.d0.loss_mask: 0.6030, decode.d0.loss_dice: 0.9554, decode.d1.loss_cls: 0.6727, decode.d1.loss_mask: 0.5796, decode.d1.loss_dice: 0.8427, decode.d2.loss_cls: 0.4982, decode.d2.loss_mask: 0.5690, decode.d2.loss_dice: 0.8174, decode.d3.loss_cls: 0.4608, decode.d3.loss_mask: 0.5649, decode.d3.loss_dice: 0.8020, decode.d4.loss_cls: 0.4535, decode.d4.loss_mask: 0.5647, decode.d4.loss_dice: 0.8006, decode.d5.loss_cls: 0.4381, decode.d5.loss_mask: 0.5635, decode.d5.loss_dice: 0.8004, decode.d6.loss_cls: 0.4425, decode.d6.loss_mask: 0.5626, decode.d6.loss_dice: 0.7901, decode.d7.loss_cls: 0.4282, decode.d7.loss_mask: 0.5633, decode.d7.loss_dice: 0.7930, decode.d8.loss_cls: 0.4322, decode.d8.loss_mask: 0.5629, decode.d8.loss_dice: 0.7943, loss: 21.4924 +2022-06-05 03:04:50,143 - mmseg - INFO - Iter [10300/40000] lr: 5.662e-06, eta: 3:58:11, time: 0.494, data_time: 0.057, memory: 31652, decode.loss_cls: 0.4178, decode.loss_mask: 0.5912, decode.loss_dice: 0.8046, decode.d0.loss_cls: 3.3356, decode.d0.loss_mask: 0.6263, decode.d0.loss_dice: 0.9742, decode.d1.loss_cls: 0.6503, decode.d1.loss_mask: 0.6127, decode.d1.loss_dice: 0.8541, decode.d2.loss_cls: 0.4961, decode.d2.loss_mask: 0.6001, decode.d2.loss_dice: 0.8228, decode.d3.loss_cls: 0.4501, decode.d3.loss_mask: 0.5899, decode.d3.loss_dice: 0.8027, decode.d4.loss_cls: 0.4340, decode.d4.loss_mask: 0.5934, decode.d4.loss_dice: 0.8032, decode.d5.loss_cls: 0.4252, decode.d5.loss_mask: 0.5892, decode.d5.loss_dice: 0.8061, decode.d6.loss_cls: 0.4181, decode.d6.loss_mask: 0.5919, decode.d6.loss_dice: 0.8011, decode.d7.loss_cls: 0.4158, decode.d7.loss_mask: 0.5911, decode.d7.loss_dice: 0.8065, decode.d8.loss_cls: 0.4188, decode.d8.loss_mask: 0.5906, decode.d8.loss_dice: 0.8036, loss: 21.7173 +2022-06-05 03:05:12,426 - mmseg - INFO - Iter [10350/40000] lr: 5.653e-06, eta: 3:57:42, time: 0.446, data_time: 0.008, memory: 31652, decode.loss_cls: 0.4134, decode.loss_mask: 0.5710, decode.loss_dice: 0.7899, decode.d0.loss_cls: 3.3124, decode.d0.loss_mask: 0.6110, decode.d0.loss_dice: 0.9613, decode.d1.loss_cls: 0.6531, decode.d1.loss_mask: 0.5906, decode.d1.loss_dice: 0.8357, decode.d2.loss_cls: 0.4974, decode.d2.loss_mask: 0.5780, decode.d2.loss_dice: 0.7996, decode.d3.loss_cls: 0.4462, decode.d3.loss_mask: 0.5746, decode.d3.loss_dice: 0.7903, decode.d4.loss_cls: 0.4323, decode.d4.loss_mask: 0.5756, decode.d4.loss_dice: 0.7925, decode.d5.loss_cls: 0.4329, decode.d5.loss_mask: 0.5723, decode.d5.loss_dice: 0.7889, decode.d6.loss_cls: 0.4224, decode.d6.loss_mask: 0.5731, decode.d6.loss_dice: 0.7888, decode.d7.loss_cls: 0.4139, decode.d7.loss_mask: 0.5718, decode.d7.loss_dice: 0.7928, decode.d8.loss_cls: 0.4057, decode.d8.loss_mask: 0.5713, decode.d8.loss_dice: 0.7908, loss: 21.3493 +2022-06-05 03:05:34,551 - mmseg - INFO - Iter [10400/40000] lr: 5.643e-06, eta: 3:57:13, time: 0.442, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3993, decode.loss_mask: 0.5718, decode.loss_dice: 0.8093, decode.d0.loss_cls: 3.2709, decode.d0.loss_mask: 0.6133, decode.d0.loss_dice: 0.9600, decode.d1.loss_cls: 0.6291, decode.d1.loss_mask: 0.5950, decode.d1.loss_dice: 0.8686, decode.d2.loss_cls: 0.4764, decode.d2.loss_mask: 0.5846, decode.d2.loss_dice: 0.8337, decode.d3.loss_cls: 0.4322, decode.d3.loss_mask: 0.5787, decode.d3.loss_dice: 0.8173, decode.d4.loss_cls: 0.4182, decode.d4.loss_mask: 0.5770, decode.d4.loss_dice: 0.8163, decode.d5.loss_cls: 0.4187, decode.d5.loss_mask: 0.5750, decode.d5.loss_dice: 0.8156, decode.d6.loss_cls: 0.4028, decode.d6.loss_mask: 0.5757, decode.d6.loss_dice: 0.8079, decode.d7.loss_cls: 0.3949, decode.d7.loss_mask: 0.5754, decode.d7.loss_dice: 0.8098, decode.d8.loss_cls: 0.3991, decode.d8.loss_mask: 0.5740, decode.d8.loss_dice: 0.8092, loss: 21.4099 +2022-06-05 03:05:56,302 - mmseg - INFO - Iter [10450/40000] lr: 5.634e-06, eta: 3:56:42, time: 0.436, data_time: 0.008, memory: 31652, decode.loss_cls: 0.4125, decode.loss_mask: 0.5832, decode.loss_dice: 0.7485, decode.d0.loss_cls: 3.2336, decode.d0.loss_mask: 0.6259, decode.d0.loss_dice: 0.9144, decode.d1.loss_cls: 0.6381, decode.d1.loss_mask: 0.6023, decode.d1.loss_dice: 0.7961, decode.d2.loss_cls: 0.4866, decode.d2.loss_mask: 0.5943, decode.d2.loss_dice: 0.7672, decode.d3.loss_cls: 0.4417, decode.d3.loss_mask: 0.5891, decode.d3.loss_dice: 0.7599, decode.d4.loss_cls: 0.4226, decode.d4.loss_mask: 0.5865, decode.d4.loss_dice: 0.7582, decode.d5.loss_cls: 0.4230, decode.d5.loss_mask: 0.5852, decode.d5.loss_dice: 0.7584, decode.d6.loss_cls: 0.4086, decode.d6.loss_mask: 0.5839, decode.d6.loss_dice: 0.7507, decode.d7.loss_cls: 0.4132, decode.d7.loss_mask: 0.5823, decode.d7.loss_dice: 0.7542, decode.d8.loss_cls: 0.4086, decode.d8.loss_mask: 0.5810, decode.d8.loss_dice: 0.7520, loss: 20.9620 +2022-06-05 03:06:18,672 - mmseg - INFO - Iter [10500/40000] lr: 5.624e-06, eta: 3:56:13, time: 0.447, data_time: 0.008, memory: 31652, decode.loss_cls: 0.4571, decode.loss_mask: 0.5735, decode.loss_dice: 0.8091, decode.d0.loss_cls: 3.2730, decode.d0.loss_mask: 0.6133, decode.d0.loss_dice: 0.9912, decode.d1.loss_cls: 0.7166, decode.d1.loss_mask: 0.6003, decode.d1.loss_dice: 0.8666, decode.d2.loss_cls: 0.5422, decode.d2.loss_mask: 0.5816, decode.d2.loss_dice: 0.8281, decode.d3.loss_cls: 0.4885, decode.d3.loss_mask: 0.5748, decode.d3.loss_dice: 0.8104, decode.d4.loss_cls: 0.4754, decode.d4.loss_mask: 0.5745, decode.d4.loss_dice: 0.8103, decode.d5.loss_cls: 0.4722, decode.d5.loss_mask: 0.5720, decode.d5.loss_dice: 0.8054, decode.d6.loss_cls: 0.4654, decode.d6.loss_mask: 0.5698, decode.d6.loss_dice: 0.8090, decode.d7.loss_cls: 0.4632, decode.d7.loss_mask: 0.5724, decode.d7.loss_dice: 0.8079, decode.d8.loss_cls: 0.4599, decode.d8.loss_mask: 0.5719, decode.d8.loss_dice: 0.8082, loss: 21.9638 +2022-06-05 03:06:40,676 - mmseg - INFO - Iter [10550/40000] lr: 5.615e-06, eta: 3:55:44, time: 0.440, data_time: 0.008, memory: 31652, decode.loss_cls: 0.4168, decode.loss_mask: 0.5728, decode.loss_dice: 0.7674, decode.d0.loss_cls: 3.1846, decode.d0.loss_mask: 0.6138, decode.d0.loss_dice: 0.9376, decode.d1.loss_cls: 0.6335, decode.d1.loss_mask: 0.5922, decode.d1.loss_dice: 0.8216, decode.d2.loss_cls: 0.4827, decode.d2.loss_mask: 0.5802, decode.d2.loss_dice: 0.7871, decode.d3.loss_cls: 0.4410, decode.d3.loss_mask: 0.5742, decode.d3.loss_dice: 0.7755, decode.d4.loss_cls: 0.4312, decode.d4.loss_mask: 0.5729, decode.d4.loss_dice: 0.7776, decode.d5.loss_cls: 0.4202, decode.d5.loss_mask: 0.5733, decode.d5.loss_dice: 0.7745, decode.d6.loss_cls: 0.4167, decode.d6.loss_mask: 0.5747, decode.d6.loss_dice: 0.7701, decode.d7.loss_cls: 0.4199, decode.d7.loss_mask: 0.5732, decode.d7.loss_dice: 0.7712, decode.d8.loss_cls: 0.4161, decode.d8.loss_mask: 0.5713, decode.d8.loss_dice: 0.7712, loss: 21.0148 +2022-06-05 03:07:02,718 - mmseg - INFO - Iter [10600/40000] lr: 5.605e-06, eta: 3:55:14, time: 0.441, data_time: 0.007, memory: 31652, decode.loss_cls: 0.4746, decode.loss_mask: 0.5838, decode.loss_dice: 0.8192, decode.d0.loss_cls: 3.2380, decode.d0.loss_mask: 0.6227, decode.d0.loss_dice: 0.9909, decode.d1.loss_cls: 0.7166, decode.d1.loss_mask: 0.6019, decode.d1.loss_dice: 0.8583, decode.d2.loss_cls: 0.5649, decode.d2.loss_mask: 0.5903, decode.d2.loss_dice: 0.8294, decode.d3.loss_cls: 0.5066, decode.d3.loss_mask: 0.5842, decode.d3.loss_dice: 0.8171, decode.d4.loss_cls: 0.4900, decode.d4.loss_mask: 0.5844, decode.d4.loss_dice: 0.8223, decode.d5.loss_cls: 0.4879, decode.d5.loss_mask: 0.5825, decode.d5.loss_dice: 0.8188, decode.d6.loss_cls: 0.4798, decode.d6.loss_mask: 0.5834, decode.d6.loss_dice: 0.8197, decode.d7.loss_cls: 0.4757, decode.d7.loss_mask: 0.5816, decode.d7.loss_dice: 0.8221, decode.d8.loss_cls: 0.4704, decode.d8.loss_mask: 0.5833, decode.d8.loss_dice: 0.8226, loss: 22.2230 +2022-06-05 03:07:27,651 - mmseg - INFO - Iter [10650/40000] lr: 5.595e-06, eta: 3:54:53, time: 0.499, data_time: 0.060, memory: 31652, decode.loss_cls: 0.4020, decode.loss_mask: 0.5701, decode.loss_dice: 0.7719, decode.d0.loss_cls: 3.1399, decode.d0.loss_mask: 0.5997, decode.d0.loss_dice: 0.9187, decode.d1.loss_cls: 0.6289, decode.d1.loss_mask: 0.5879, decode.d1.loss_dice: 0.8158, decode.d2.loss_cls: 0.4868, decode.d2.loss_mask: 0.5767, decode.d2.loss_dice: 0.7799, decode.d3.loss_cls: 0.4340, decode.d3.loss_mask: 0.5725, decode.d3.loss_dice: 0.7764, decode.d4.loss_cls: 0.4224, decode.d4.loss_mask: 0.5718, decode.d4.loss_dice: 0.7750, decode.d5.loss_cls: 0.4098, decode.d5.loss_mask: 0.5729, decode.d5.loss_dice: 0.7743, decode.d6.loss_cls: 0.4070, decode.d6.loss_mask: 0.5714, decode.d6.loss_dice: 0.7725, decode.d7.loss_cls: 0.4020, decode.d7.loss_mask: 0.5720, decode.d7.loss_dice: 0.7741, decode.d8.loss_cls: 0.4046, decode.d8.loss_mask: 0.5725, decode.d8.loss_dice: 0.7707, loss: 20.8341 +2022-06-05 03:07:49,539 - mmseg - INFO - Iter [10700/40000] lr: 5.586e-06, eta: 3:54:23, time: 0.437, data_time: 0.007, memory: 31652, decode.loss_cls: 0.4499, decode.loss_mask: 0.5784, decode.loss_dice: 0.8316, decode.d0.loss_cls: 3.1426, decode.d0.loss_mask: 0.6175, decode.d0.loss_dice: 0.9837, decode.d1.loss_cls: 0.6901, decode.d1.loss_mask: 0.5963, decode.d1.loss_dice: 0.8745, decode.d2.loss_cls: 0.5286, decode.d2.loss_mask: 0.5815, decode.d2.loss_dice: 0.8405, decode.d3.loss_cls: 0.4918, decode.d3.loss_mask: 0.5797, decode.d3.loss_dice: 0.8292, decode.d4.loss_cls: 0.4715, decode.d4.loss_mask: 0.5796, decode.d4.loss_dice: 0.8307, decode.d5.loss_cls: 0.4662, decode.d5.loss_mask: 0.5794, decode.d5.loss_dice: 0.8321, decode.d6.loss_cls: 0.4598, decode.d6.loss_mask: 0.5788, decode.d6.loss_dice: 0.8272, decode.d7.loss_cls: 0.4606, decode.d7.loss_mask: 0.5773, decode.d7.loss_dice: 0.8255, decode.d8.loss_cls: 0.4496, decode.d8.loss_mask: 0.5773, decode.d8.loss_dice: 0.8352, loss: 21.9665 +2022-06-05 03:08:11,902 - mmseg - INFO - Iter [10750/40000] lr: 5.576e-06, eta: 3:53:55, time: 0.448, data_time: 0.008, memory: 31652, decode.loss_cls: 0.4480, decode.loss_mask: 0.5811, decode.loss_dice: 0.8125, decode.d0.loss_cls: 3.1908, decode.d0.loss_mask: 0.6311, decode.d0.loss_dice: 0.9992, decode.d1.loss_cls: 0.7159, decode.d1.loss_mask: 0.6077, decode.d1.loss_dice: 0.8640, decode.d2.loss_cls: 0.5323, decode.d2.loss_mask: 0.5936, decode.d2.loss_dice: 0.8284, decode.d3.loss_cls: 0.4847, decode.d3.loss_mask: 0.5869, decode.d3.loss_dice: 0.8097, decode.d4.loss_cls: 0.4713, decode.d4.loss_mask: 0.5860, decode.d4.loss_dice: 0.8138, decode.d5.loss_cls: 0.4623, decode.d5.loss_mask: 0.5886, decode.d5.loss_dice: 0.8193, decode.d6.loss_cls: 0.4468, decode.d6.loss_mask: 0.5865, decode.d6.loss_dice: 0.8148, decode.d7.loss_cls: 0.4484, decode.d7.loss_mask: 0.5851, decode.d7.loss_dice: 0.8138, decode.d8.loss_cls: 0.4474, decode.d8.loss_mask: 0.5817, decode.d8.loss_dice: 0.8160, loss: 21.9677 +2022-06-05 03:08:33,940 - mmseg - INFO - Iter [10800/40000] lr: 5.567e-06, eta: 3:53:25, time: 0.441, data_time: 0.008, memory: 31652, decode.loss_cls: 0.4181, decode.loss_mask: 0.5858, decode.loss_dice: 0.8096, decode.d0.loss_cls: 3.1223, decode.d0.loss_mask: 0.6231, decode.d0.loss_dice: 0.9687, decode.d1.loss_cls: 0.6496, decode.d1.loss_mask: 0.5985, decode.d1.loss_dice: 0.8549, decode.d2.loss_cls: 0.5054, decode.d2.loss_mask: 0.5848, decode.d2.loss_dice: 0.8270, decode.d3.loss_cls: 0.4584, decode.d3.loss_mask: 0.5861, decode.d3.loss_dice: 0.8080, decode.d4.loss_cls: 0.4396, decode.d4.loss_mask: 0.5830, decode.d4.loss_dice: 0.8117, decode.d5.loss_cls: 0.4190, decode.d5.loss_mask: 0.5889, decode.d5.loss_dice: 0.8163, decode.d6.loss_cls: 0.4178, decode.d6.loss_mask: 0.5874, decode.d6.loss_dice: 0.8080, decode.d7.loss_cls: 0.4147, decode.d7.loss_mask: 0.5851, decode.d7.loss_dice: 0.8075, decode.d8.loss_cls: 0.4123, decode.d8.loss_mask: 0.5901, decode.d8.loss_dice: 0.8083, loss: 21.4899 +2022-06-05 03:08:56,053 - mmseg - INFO - Iter [10850/40000] lr: 5.557e-06, eta: 3:52:56, time: 0.442, data_time: 0.007, memory: 31652, decode.loss_cls: 0.4082, decode.loss_mask: 0.5541, decode.loss_dice: 0.7581, decode.d0.loss_cls: 3.1256, decode.d0.loss_mask: 0.5893, decode.d0.loss_dice: 0.9106, decode.d1.loss_cls: 0.6481, decode.d1.loss_mask: 0.5775, decode.d1.loss_dice: 0.8051, decode.d2.loss_cls: 0.4870, decode.d2.loss_mask: 0.5651, decode.d2.loss_dice: 0.7785, decode.d3.loss_cls: 0.4476, decode.d3.loss_mask: 0.5581, decode.d3.loss_dice: 0.7574, decode.d4.loss_cls: 0.4300, decode.d4.loss_mask: 0.5547, decode.d4.loss_dice: 0.7621, decode.d5.loss_cls: 0.4222, decode.d5.loss_mask: 0.5573, decode.d5.loss_dice: 0.7641, decode.d6.loss_cls: 0.4129, decode.d6.loss_mask: 0.5573, decode.d6.loss_dice: 0.7584, decode.d7.loss_cls: 0.4138, decode.d7.loss_mask: 0.5562, decode.d7.loss_dice: 0.7584, decode.d8.loss_cls: 0.4172, decode.d8.loss_mask: 0.5558, decode.d8.loss_dice: 0.7559, loss: 20.6467 +2022-06-05 03:09:18,356 - mmseg - INFO - Iter [10900/40000] lr: 5.548e-06, eta: 3:52:28, time: 0.446, data_time: 0.009, memory: 31652, decode.loss_cls: 0.4103, decode.loss_mask: 0.5661, decode.loss_dice: 0.7641, decode.d0.loss_cls: 3.0611, decode.d0.loss_mask: 0.5997, decode.d0.loss_dice: 0.9280, decode.d1.loss_cls: 0.6298, decode.d1.loss_mask: 0.5823, decode.d1.loss_dice: 0.8239, decode.d2.loss_cls: 0.4831, decode.d2.loss_mask: 0.5702, decode.d2.loss_dice: 0.7883, decode.d3.loss_cls: 0.4438, decode.d3.loss_mask: 0.5665, decode.d3.loss_dice: 0.7659, decode.d4.loss_cls: 0.4345, decode.d4.loss_mask: 0.5651, decode.d4.loss_dice: 0.7642, decode.d5.loss_cls: 0.4232, decode.d5.loss_mask: 0.5620, decode.d5.loss_dice: 0.7656, decode.d6.loss_cls: 0.4169, decode.d6.loss_mask: 0.5659, decode.d6.loss_dice: 0.7644, decode.d7.loss_cls: 0.4128, decode.d7.loss_mask: 0.5689, decode.d7.loss_dice: 0.7648, decode.d8.loss_cls: 0.4072, decode.d8.loss_mask: 0.5673, decode.d8.loss_dice: 0.7659, loss: 20.7315 +2022-06-05 03:09:42,778 - mmseg - INFO - Iter [10950/40000] lr: 5.538e-06, eta: 3:52:05, time: 0.489, data_time: 0.058, memory: 31652, decode.loss_cls: 0.3924, decode.loss_mask: 0.5748, decode.loss_dice: 0.7629, decode.d0.loss_cls: 3.0260, decode.d0.loss_mask: 0.6165, decode.d0.loss_dice: 0.9254, decode.d1.loss_cls: 0.6285, decode.d1.loss_mask: 0.5964, decode.d1.loss_dice: 0.8160, decode.d2.loss_cls: 0.4683, decode.d2.loss_mask: 0.5795, decode.d2.loss_dice: 0.7833, decode.d3.loss_cls: 0.4211, decode.d3.loss_mask: 0.5763, decode.d3.loss_dice: 0.7725, decode.d4.loss_cls: 0.4187, decode.d4.loss_mask: 0.5764, decode.d4.loss_dice: 0.7692, decode.d5.loss_cls: 0.4084, decode.d5.loss_mask: 0.5767, decode.d5.loss_dice: 0.7670, decode.d6.loss_cls: 0.3875, decode.d6.loss_mask: 0.5752, decode.d6.loss_dice: 0.7676, decode.d7.loss_cls: 0.3929, decode.d7.loss_mask: 0.5761, decode.d7.loss_dice: 0.7690, decode.d8.loss_cls: 0.3991, decode.d8.loss_mask: 0.5704, decode.d8.loss_dice: 0.7653, loss: 20.6594 +2022-06-05 03:10:04,997 - mmseg - INFO - Saving checkpoint at 11000 iterations +2022-06-05 03:10:07,680 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 03:10:07,680 - mmseg - INFO - Iter [11000/40000] lr: 5.529e-06, eta: 3:51:44, time: 0.498, data_time: 0.008, memory: 31652, decode.loss_cls: 0.4204, decode.loss_mask: 0.5535, decode.loss_dice: 0.7694, decode.d0.loss_cls: 3.0566, decode.d0.loss_mask: 0.6011, decode.d0.loss_dice: 0.9454, decode.d1.loss_cls: 0.6592, decode.d1.loss_mask: 0.5785, decode.d1.loss_dice: 0.8279, decode.d2.loss_cls: 0.5053, decode.d2.loss_mask: 0.5644, decode.d2.loss_dice: 0.7886, decode.d3.loss_cls: 0.4523, decode.d3.loss_mask: 0.5593, decode.d3.loss_dice: 0.7810, decode.d4.loss_cls: 0.4393, decode.d4.loss_mask: 0.5563, decode.d4.loss_dice: 0.7746, decode.d5.loss_cls: 0.4294, decode.d5.loss_mask: 0.5581, decode.d5.loss_dice: 0.7740, decode.d6.loss_cls: 0.4244, decode.d6.loss_mask: 0.5556, decode.d6.loss_dice: 0.7727, decode.d7.loss_cls: 0.4265, decode.d7.loss_mask: 0.5534, decode.d7.loss_dice: 0.7697, decode.d8.loss_cls: 0.4155, decode.d8.loss_mask: 0.5542, decode.d8.loss_dice: 0.7722, loss: 20.8386 +2022-06-05 03:10:29,776 - mmseg - INFO - Iter [11050/40000] lr: 5.519e-06, eta: 3:51:15, time: 0.442, data_time: 0.009, memory: 31652, decode.loss_cls: 0.4109, decode.loss_mask: 0.5771, decode.loss_dice: 0.7907, decode.d0.loss_cls: 3.0169, decode.d0.loss_mask: 0.6176, decode.d0.loss_dice: 0.9599, decode.d1.loss_cls: 0.6450, decode.d1.loss_mask: 0.5952, decode.d1.loss_dice: 0.8424, decode.d2.loss_cls: 0.4832, decode.d2.loss_mask: 0.5848, decode.d2.loss_dice: 0.8072, decode.d3.loss_cls: 0.4448, decode.d3.loss_mask: 0.5778, decode.d3.loss_dice: 0.7982, decode.d4.loss_cls: 0.4283, decode.d4.loss_mask: 0.5799, decode.d4.loss_dice: 0.7976, decode.d5.loss_cls: 0.4212, decode.d5.loss_mask: 0.5761, decode.d5.loss_dice: 0.7954, decode.d6.loss_cls: 0.4161, decode.d6.loss_mask: 0.5782, decode.d6.loss_dice: 0.7862, decode.d7.loss_cls: 0.4164, decode.d7.loss_mask: 0.5741, decode.d7.loss_dice: 0.7896, decode.d8.loss_cls: 0.4111, decode.d8.loss_mask: 0.5770, decode.d8.loss_dice: 0.7933, loss: 21.0924 +2022-06-05 03:10:51,818 - mmseg - INFO - Iter [11100/40000] lr: 5.510e-06, eta: 3:50:46, time: 0.441, data_time: 0.009, memory: 31652, decode.loss_cls: 0.3952, decode.loss_mask: 0.5580, decode.loss_dice: 0.7775, decode.d0.loss_cls: 3.0040, decode.d0.loss_mask: 0.5986, decode.d0.loss_dice: 0.9443, decode.d1.loss_cls: 0.6125, decode.d1.loss_mask: 0.5875, decode.d1.loss_dice: 0.8348, decode.d2.loss_cls: 0.4725, decode.d2.loss_mask: 0.5679, decode.d2.loss_dice: 0.8013, decode.d3.loss_cls: 0.4300, decode.d3.loss_mask: 0.5609, decode.d3.loss_dice: 0.7823, decode.d4.loss_cls: 0.4138, decode.d4.loss_mask: 0.5624, decode.d4.loss_dice: 0.7854, decode.d5.loss_cls: 0.4114, decode.d5.loss_mask: 0.5583, decode.d5.loss_dice: 0.7834, decode.d6.loss_cls: 0.4009, decode.d6.loss_mask: 0.5606, decode.d6.loss_dice: 0.7750, decode.d7.loss_cls: 0.4017, decode.d7.loss_mask: 0.5607, decode.d7.loss_dice: 0.7792, decode.d8.loss_cls: 0.3949, decode.d8.loss_mask: 0.5578, decode.d8.loss_dice: 0.7812, loss: 20.6542 +2022-06-05 03:11:14,184 - mmseg - INFO - Iter [11150/40000] lr: 5.500e-06, eta: 3:50:18, time: 0.447, data_time: 0.009, memory: 31652, decode.loss_cls: 0.4402, decode.loss_mask: 0.5727, decode.loss_dice: 0.7902, decode.d0.loss_cls: 3.0050, decode.d0.loss_mask: 0.6204, decode.d0.loss_dice: 0.9511, decode.d1.loss_cls: 0.6609, decode.d1.loss_mask: 0.5990, decode.d1.loss_dice: 0.8388, decode.d2.loss_cls: 0.5220, decode.d2.loss_mask: 0.5883, decode.d2.loss_dice: 0.8008, decode.d3.loss_cls: 0.4762, decode.d3.loss_mask: 0.5833, decode.d3.loss_dice: 0.7913, decode.d4.loss_cls: 0.4584, decode.d4.loss_mask: 0.5769, decode.d4.loss_dice: 0.7850, decode.d5.loss_cls: 0.4469, decode.d5.loss_mask: 0.5810, decode.d5.loss_dice: 0.7903, decode.d6.loss_cls: 0.4345, decode.d6.loss_mask: 0.5815, decode.d6.loss_dice: 0.7858, decode.d7.loss_cls: 0.4476, decode.d7.loss_mask: 0.5784, decode.d7.loss_dice: 0.7860, decode.d8.loss_cls: 0.4398, decode.d8.loss_mask: 0.5762, decode.d8.loss_dice: 0.7911, loss: 21.2995 +2022-06-05 03:11:36,032 - mmseg - INFO - Iter [11200/40000] lr: 5.491e-06, eta: 3:49:48, time: 0.437, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3732, decode.loss_mask: 0.5625, decode.loss_dice: 0.7657, decode.d0.loss_cls: 2.9532, decode.d0.loss_mask: 0.6000, decode.d0.loss_dice: 0.9110, decode.d1.loss_cls: 0.5880, decode.d1.loss_mask: 0.5783, decode.d1.loss_dice: 0.8129, decode.d2.loss_cls: 0.4543, decode.d2.loss_mask: 0.5661, decode.d2.loss_dice: 0.7853, decode.d3.loss_cls: 0.4116, decode.d3.loss_mask: 0.5635, decode.d3.loss_dice: 0.7681, decode.d4.loss_cls: 0.4017, decode.d4.loss_mask: 0.5631, decode.d4.loss_dice: 0.7677, decode.d5.loss_cls: 0.3862, decode.d5.loss_mask: 0.5649, decode.d5.loss_dice: 0.7673, decode.d6.loss_cls: 0.3763, decode.d6.loss_mask: 0.5626, decode.d6.loss_dice: 0.7709, decode.d7.loss_cls: 0.3682, decode.d7.loss_mask: 0.5667, decode.d7.loss_dice: 0.7724, decode.d8.loss_cls: 0.3718, decode.d8.loss_mask: 0.5658, decode.d8.loss_dice: 0.7730, loss: 20.2719 +2022-06-05 03:12:00,594 - mmseg - INFO - Iter [11250/40000] lr: 5.481e-06, eta: 3:49:26, time: 0.491, data_time: 0.059, memory: 31652, decode.loss_cls: 0.4199, decode.loss_mask: 0.5556, decode.loss_dice: 0.7920, decode.d0.loss_cls: 2.9349, decode.d0.loss_mask: 0.6033, decode.d0.loss_dice: 0.9632, decode.d1.loss_cls: 0.6361, decode.d1.loss_mask: 0.5823, decode.d1.loss_dice: 0.8448, decode.d2.loss_cls: 0.4940, decode.d2.loss_mask: 0.5636, decode.d2.loss_dice: 0.7940, decode.d3.loss_cls: 0.4493, decode.d3.loss_mask: 0.5623, decode.d3.loss_dice: 0.7867, decode.d4.loss_cls: 0.4434, decode.d4.loss_mask: 0.5592, decode.d4.loss_dice: 0.7907, decode.d5.loss_cls: 0.4305, decode.d5.loss_mask: 0.5542, decode.d5.loss_dice: 0.7875, decode.d6.loss_cls: 0.4228, decode.d6.loss_mask: 0.5551, decode.d6.loss_dice: 0.7833, decode.d7.loss_cls: 0.4243, decode.d7.loss_mask: 0.5577, decode.d7.loss_dice: 0.7837, decode.d8.loss_cls: 0.4199, decode.d8.loss_mask: 0.5559, decode.d8.loss_dice: 0.7887, loss: 20.8389 +2022-06-05 03:12:22,894 - mmseg - INFO - Iter [11300/40000] lr: 5.472e-06, eta: 3:48:58, time: 0.446, data_time: 0.010, memory: 31652, decode.loss_cls: 0.3947, decode.loss_mask: 0.5732, decode.loss_dice: 0.7815, decode.d0.loss_cls: 2.9036, decode.d0.loss_mask: 0.6206, decode.d0.loss_dice: 0.9480, decode.d1.loss_cls: 0.6349, decode.d1.loss_mask: 0.5974, decode.d1.loss_dice: 0.8273, decode.d2.loss_cls: 0.4758, decode.d2.loss_mask: 0.5809, decode.d2.loss_dice: 0.7993, decode.d3.loss_cls: 0.4342, decode.d3.loss_mask: 0.5754, decode.d3.loss_dice: 0.7917, decode.d4.loss_cls: 0.4143, decode.d4.loss_mask: 0.5781, decode.d4.loss_dice: 0.7912, decode.d5.loss_cls: 0.4080, decode.d5.loss_mask: 0.5744, decode.d5.loss_dice: 0.7840, decode.d6.loss_cls: 0.3976, decode.d6.loss_mask: 0.5740, decode.d6.loss_dice: 0.7774, decode.d7.loss_cls: 0.4068, decode.d7.loss_mask: 0.5757, decode.d7.loss_dice: 0.7782, decode.d8.loss_cls: 0.3957, decode.d8.loss_mask: 0.5741, decode.d8.loss_dice: 0.7854, loss: 20.7535 +2022-06-05 03:12:45,498 - mmseg - INFO - Iter [11350/40000] lr: 5.462e-06, eta: 3:48:30, time: 0.452, data_time: 0.008, memory: 31652, decode.loss_cls: 0.4164, decode.loss_mask: 0.5680, decode.loss_dice: 0.7704, decode.d0.loss_cls: 2.9361, decode.d0.loss_mask: 0.6114, decode.d0.loss_dice: 0.9504, decode.d1.loss_cls: 0.6359, decode.d1.loss_mask: 0.5816, decode.d1.loss_dice: 0.8202, decode.d2.loss_cls: 0.4820, decode.d2.loss_mask: 0.5719, decode.d2.loss_dice: 0.7880, decode.d3.loss_cls: 0.4403, decode.d3.loss_mask: 0.5669, decode.d3.loss_dice: 0.7807, decode.d4.loss_cls: 0.4273, decode.d4.loss_mask: 0.5690, decode.d4.loss_dice: 0.7789, decode.d5.loss_cls: 0.4189, decode.d5.loss_mask: 0.5700, decode.d5.loss_dice: 0.7755, decode.d6.loss_cls: 0.4251, decode.d6.loss_mask: 0.5657, decode.d6.loss_dice: 0.7672, decode.d7.loss_cls: 0.4144, decode.d7.loss_mask: 0.5667, decode.d7.loss_dice: 0.7655, decode.d8.loss_cls: 0.4203, decode.d8.loss_mask: 0.5660, decode.d8.loss_dice: 0.7657, loss: 20.7164 +2022-06-05 03:13:07,315 - mmseg - INFO - Iter [11400/40000] lr: 5.452e-06, eta: 3:48:01, time: 0.437, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3788, decode.loss_mask: 0.5573, decode.loss_dice: 0.7648, decode.d0.loss_cls: 2.9047, decode.d0.loss_mask: 0.5909, decode.d0.loss_dice: 0.9264, decode.d1.loss_cls: 0.5978, decode.d1.loss_mask: 0.5730, decode.d1.loss_dice: 0.8153, decode.d2.loss_cls: 0.4582, decode.d2.loss_mask: 0.5629, decode.d2.loss_dice: 0.7848, decode.d3.loss_cls: 0.4091, decode.d3.loss_mask: 0.5580, decode.d3.loss_dice: 0.7717, decode.d4.loss_cls: 0.3993, decode.d4.loss_mask: 0.5578, decode.d4.loss_dice: 0.7675, decode.d5.loss_cls: 0.3947, decode.d5.loss_mask: 0.5584, decode.d5.loss_dice: 0.7706, decode.d6.loss_cls: 0.3793, decode.d6.loss_mask: 0.5601, decode.d6.loss_dice: 0.7716, decode.d7.loss_cls: 0.3783, decode.d7.loss_mask: 0.5579, decode.d7.loss_dice: 0.7687, decode.d8.loss_cls: 0.3770, decode.d8.loss_mask: 0.5564, decode.d8.loss_dice: 0.7690, loss: 20.2201 +2022-06-05 03:13:29,172 - mmseg - INFO - Iter [11450/40000] lr: 5.443e-06, eta: 3:47:32, time: 0.437, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3883, decode.loss_mask: 0.5455, decode.loss_dice: 0.7728, decode.d0.loss_cls: 2.8775, decode.d0.loss_mask: 0.5724, decode.d0.loss_dice: 0.9199, decode.d1.loss_cls: 0.5959, decode.d1.loss_mask: 0.5611, decode.d1.loss_dice: 0.8190, decode.d2.loss_cls: 0.4676, decode.d2.loss_mask: 0.5480, decode.d2.loss_dice: 0.7812, decode.d3.loss_cls: 0.4159, decode.d3.loss_mask: 0.5455, decode.d3.loss_dice: 0.7757, decode.d4.loss_cls: 0.3999, decode.d4.loss_mask: 0.5453, decode.d4.loss_dice: 0.7772, decode.d5.loss_cls: 0.3865, decode.d5.loss_mask: 0.5430, decode.d5.loss_dice: 0.7774, decode.d6.loss_cls: 0.3863, decode.d6.loss_mask: 0.5455, decode.d6.loss_dice: 0.7683, decode.d7.loss_cls: 0.3891, decode.d7.loss_mask: 0.5442, decode.d7.loss_dice: 0.7741, decode.d8.loss_cls: 0.3912, decode.d8.loss_mask: 0.5441, decode.d8.loss_dice: 0.7708, loss: 20.1294 +2022-06-05 03:13:50,864 - mmseg - INFO - Iter [11500/40000] lr: 5.433e-06, eta: 3:47:03, time: 0.434, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3878, decode.loss_mask: 0.5640, decode.loss_dice: 0.7539, decode.d0.loss_cls: 2.8543, decode.d0.loss_mask: 0.6169, decode.d0.loss_dice: 0.9248, decode.d1.loss_cls: 0.6042, decode.d1.loss_mask: 0.5903, decode.d1.loss_dice: 0.8106, decode.d2.loss_cls: 0.4616, decode.d2.loss_mask: 0.5736, decode.d2.loss_dice: 0.7793, decode.d3.loss_cls: 0.4143, decode.d3.loss_mask: 0.5726, decode.d3.loss_dice: 0.7608, decode.d4.loss_cls: 0.4095, decode.d4.loss_mask: 0.5705, decode.d4.loss_dice: 0.7594, decode.d5.loss_cls: 0.4057, decode.d5.loss_mask: 0.5693, decode.d5.loss_dice: 0.7603, decode.d6.loss_cls: 0.3948, decode.d6.loss_mask: 0.5693, decode.d6.loss_dice: 0.7553, decode.d7.loss_cls: 0.3902, decode.d7.loss_mask: 0.5655, decode.d7.loss_dice: 0.7530, decode.d8.loss_cls: 0.3912, decode.d8.loss_mask: 0.5631, decode.d8.loss_dice: 0.7571, loss: 20.2833 +2022-06-05 03:14:15,365 - mmseg - INFO - Iter [11550/40000] lr: 5.424e-06, eta: 3:46:40, time: 0.490, data_time: 0.058, memory: 31652, decode.loss_cls: 0.3925, decode.loss_mask: 0.5520, decode.loss_dice: 0.7677, decode.d0.loss_cls: 2.8173, decode.d0.loss_mask: 0.5944, decode.d0.loss_dice: 0.9202, decode.d1.loss_cls: 0.5826, decode.d1.loss_mask: 0.5721, decode.d1.loss_dice: 0.8144, decode.d2.loss_cls: 0.4590, decode.d2.loss_mask: 0.5593, decode.d2.loss_dice: 0.7801, decode.d3.loss_cls: 0.4067, decode.d3.loss_mask: 0.5563, decode.d3.loss_dice: 0.7745, decode.d4.loss_cls: 0.3999, decode.d4.loss_mask: 0.5556, decode.d4.loss_dice: 0.7813, decode.d5.loss_cls: 0.4002, decode.d5.loss_mask: 0.5535, decode.d5.loss_dice: 0.7722, decode.d6.loss_cls: 0.3884, decode.d6.loss_mask: 0.5509, decode.d6.loss_dice: 0.7673, decode.d7.loss_cls: 0.3894, decode.d7.loss_mask: 0.5509, decode.d7.loss_dice: 0.7692, decode.d8.loss_cls: 0.3831, decode.d8.loss_mask: 0.5541, decode.d8.loss_dice: 0.7714, loss: 20.1362 +2022-06-05 03:14:37,650 - mmseg - INFO - Iter [11600/40000] lr: 5.414e-06, eta: 3:46:13, time: 0.446, data_time: 0.009, memory: 31652, decode.loss_cls: 0.3949, decode.loss_mask: 0.5432, decode.loss_dice: 0.7473, decode.d0.loss_cls: 2.8322, decode.d0.loss_mask: 0.5862, decode.d0.loss_dice: 0.9262, decode.d1.loss_cls: 0.6173, decode.d1.loss_mask: 0.5677, decode.d1.loss_dice: 0.8095, decode.d2.loss_cls: 0.4793, decode.d2.loss_mask: 0.5518, decode.d2.loss_dice: 0.7648, decode.d3.loss_cls: 0.4344, decode.d3.loss_mask: 0.5490, decode.d3.loss_dice: 0.7605, decode.d4.loss_cls: 0.4130, decode.d4.loss_mask: 0.5449, decode.d4.loss_dice: 0.7591, decode.d5.loss_cls: 0.4069, decode.d5.loss_mask: 0.5460, decode.d5.loss_dice: 0.7587, decode.d6.loss_cls: 0.3988, decode.d6.loss_mask: 0.5445, decode.d6.loss_dice: 0.7530, decode.d7.loss_cls: 0.3963, decode.d7.loss_mask: 0.5427, decode.d7.loss_dice: 0.7500, decode.d8.loss_cls: 0.3919, decode.d8.loss_mask: 0.5409, decode.d8.loss_dice: 0.7507, loss: 20.0616 +2022-06-05 03:14:59,578 - mmseg - INFO - Iter [11650/40000] lr: 5.405e-06, eta: 3:45:44, time: 0.439, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3886, decode.loss_mask: 0.5669, decode.loss_dice: 0.7823, decode.d0.loss_cls: 2.8132, decode.d0.loss_mask: 0.6105, decode.d0.loss_dice: 0.9408, decode.d1.loss_cls: 0.6288, decode.d1.loss_mask: 0.5891, decode.d1.loss_dice: 0.8362, decode.d2.loss_cls: 0.4771, decode.d2.loss_mask: 0.5721, decode.d2.loss_dice: 0.8007, decode.d3.loss_cls: 0.4269, decode.d3.loss_mask: 0.5689, decode.d3.loss_dice: 0.7836, decode.d4.loss_cls: 0.4019, decode.d4.loss_mask: 0.5755, decode.d4.loss_dice: 0.7918, decode.d5.loss_cls: 0.4046, decode.d5.loss_mask: 0.5700, decode.d5.loss_dice: 0.7774, decode.d6.loss_cls: 0.3993, decode.d6.loss_mask: 0.5674, decode.d6.loss_dice: 0.7782, decode.d7.loss_cls: 0.4009, decode.d7.loss_mask: 0.5674, decode.d7.loss_dice: 0.7770, decode.d8.loss_cls: 0.3938, decode.d8.loss_mask: 0.5660, decode.d8.loss_dice: 0.7853, loss: 20.5419 +2022-06-05 03:15:21,900 - mmseg - INFO - Iter [11700/40000] lr: 5.395e-06, eta: 3:45:16, time: 0.446, data_time: 0.007, memory: 31652, decode.loss_cls: 0.3736, decode.loss_mask: 0.5527, decode.loss_dice: 0.7848, decode.d0.loss_cls: 2.8078, decode.d0.loss_mask: 0.5902, decode.d0.loss_dice: 0.9394, decode.d1.loss_cls: 0.5915, decode.d1.loss_mask: 0.5735, decode.d1.loss_dice: 0.8389, decode.d2.loss_cls: 0.4404, decode.d2.loss_mask: 0.5602, decode.d2.loss_dice: 0.8049, decode.d3.loss_cls: 0.4033, decode.d3.loss_mask: 0.5544, decode.d3.loss_dice: 0.7860, decode.d4.loss_cls: 0.3887, decode.d4.loss_mask: 0.5518, decode.d4.loss_dice: 0.7884, decode.d5.loss_cls: 0.3801, decode.d5.loss_mask: 0.5525, decode.d5.loss_dice: 0.7913, decode.d6.loss_cls: 0.3737, decode.d6.loss_mask: 0.5548, decode.d6.loss_dice: 0.7870, decode.d7.loss_cls: 0.3728, decode.d7.loss_mask: 0.5545, decode.d7.loss_dice: 0.7874, decode.d8.loss_cls: 0.3754, decode.d8.loss_mask: 0.5561, decode.d8.loss_dice: 0.7883, loss: 20.2041 +2022-06-05 03:15:43,912 - mmseg - INFO - Iter [11750/40000] lr: 5.386e-06, eta: 3:44:48, time: 0.440, data_time: 0.007, memory: 31652, decode.loss_cls: 0.3708, decode.loss_mask: 0.5594, decode.loss_dice: 0.7717, decode.d0.loss_cls: 2.7898, decode.d0.loss_mask: 0.5955, decode.d0.loss_dice: 0.9186, decode.d1.loss_cls: 0.5963, decode.d1.loss_mask: 0.5802, decode.d1.loss_dice: 0.8183, decode.d2.loss_cls: 0.4549, decode.d2.loss_mask: 0.5655, decode.d2.loss_dice: 0.7926, decode.d3.loss_cls: 0.4135, decode.d3.loss_mask: 0.5596, decode.d3.loss_dice: 0.7779, decode.d4.loss_cls: 0.3913, decode.d4.loss_mask: 0.5661, decode.d4.loss_dice: 0.7802, decode.d5.loss_cls: 0.3810, decode.d5.loss_mask: 0.5630, decode.d5.loss_dice: 0.7856, decode.d6.loss_cls: 0.3811, decode.d6.loss_mask: 0.5616, decode.d6.loss_dice: 0.7690, decode.d7.loss_cls: 0.3757, decode.d7.loss_mask: 0.5614, decode.d7.loss_dice: 0.7724, decode.d8.loss_cls: 0.3741, decode.d8.loss_mask: 0.5605, decode.d8.loss_dice: 0.7741, loss: 20.1616 +2022-06-05 03:16:06,409 - mmseg - INFO - Iter [11800/40000] lr: 5.376e-06, eta: 3:44:21, time: 0.450, data_time: 0.009, memory: 31652, decode.loss_cls: 0.4062, decode.loss_mask: 0.5679, decode.loss_dice: 0.7716, decode.d0.loss_cls: 2.7504, decode.d0.loss_mask: 0.6044, decode.d0.loss_dice: 0.9274, decode.d1.loss_cls: 0.6177, decode.d1.loss_mask: 0.5915, decode.d1.loss_dice: 0.8204, decode.d2.loss_cls: 0.4790, decode.d2.loss_mask: 0.5780, decode.d2.loss_dice: 0.7852, decode.d3.loss_cls: 0.4400, decode.d3.loss_mask: 0.5667, decode.d3.loss_dice: 0.7800, decode.d4.loss_cls: 0.4294, decode.d4.loss_mask: 0.5677, decode.d4.loss_dice: 0.7737, decode.d5.loss_cls: 0.4240, decode.d5.loss_mask: 0.5649, decode.d5.loss_dice: 0.7729, decode.d6.loss_cls: 0.4092, decode.d6.loss_mask: 0.5678, decode.d6.loss_dice: 0.7670, decode.d7.loss_cls: 0.4043, decode.d7.loss_mask: 0.5699, decode.d7.loss_dice: 0.7711, decode.d8.loss_cls: 0.4111, decode.d8.loss_mask: 0.5673, decode.d8.loss_dice: 0.7684, loss: 20.4552 +2022-06-05 03:16:28,584 - mmseg - INFO - Iter [11850/40000] lr: 5.367e-06, eta: 3:43:53, time: 0.443, data_time: 0.008, memory: 31652, decode.loss_cls: 0.4144, decode.loss_mask: 0.5508, decode.loss_dice: 0.7723, decode.d0.loss_cls: 2.7988, decode.d0.loss_mask: 0.5950, decode.d0.loss_dice: 0.9364, decode.d1.loss_cls: 0.6321, decode.d1.loss_mask: 0.5748, decode.d1.loss_dice: 0.8217, decode.d2.loss_cls: 0.4965, decode.d2.loss_mask: 0.5581, decode.d2.loss_dice: 0.7855, decode.d3.loss_cls: 0.4470, decode.d3.loss_mask: 0.5534, decode.d3.loss_dice: 0.7751, decode.d4.loss_cls: 0.4298, decode.d4.loss_mask: 0.5526, decode.d4.loss_dice: 0.7740, decode.d5.loss_cls: 0.4226, decode.d5.loss_mask: 0.5487, decode.d5.loss_dice: 0.7726, decode.d6.loss_cls: 0.4182, decode.d6.loss_mask: 0.5491, decode.d6.loss_dice: 0.7659, decode.d7.loss_cls: 0.4189, decode.d7.loss_mask: 0.5488, decode.d7.loss_dice: 0.7719, decode.d8.loss_cls: 0.4164, decode.d8.loss_mask: 0.5495, decode.d8.loss_dice: 0.7721, loss: 20.4231 +2022-06-05 03:16:52,814 - mmseg - INFO - Iter [11900/40000] lr: 5.357e-06, eta: 3:43:30, time: 0.485, data_time: 0.056, memory: 31652, decode.loss_cls: 0.3712, decode.loss_mask: 0.5455, decode.loss_dice: 0.7596, decode.d0.loss_cls: 2.7466, decode.d0.loss_mask: 0.5905, decode.d0.loss_dice: 0.9235, decode.d1.loss_cls: 0.5916, decode.d1.loss_mask: 0.5694, decode.d1.loss_dice: 0.8069, decode.d2.loss_cls: 0.4449, decode.d2.loss_mask: 0.5568, decode.d2.loss_dice: 0.7786, decode.d3.loss_cls: 0.4021, decode.d3.loss_mask: 0.5503, decode.d3.loss_dice: 0.7610, decode.d4.loss_cls: 0.3914, decode.d4.loss_mask: 0.5482, decode.d4.loss_dice: 0.7627, decode.d5.loss_cls: 0.3780, decode.d5.loss_mask: 0.5456, decode.d5.loss_dice: 0.7608, decode.d6.loss_cls: 0.3753, decode.d6.loss_mask: 0.5455, decode.d6.loss_dice: 0.7550, decode.d7.loss_cls: 0.3780, decode.d7.loss_mask: 0.5469, decode.d7.loss_dice: 0.7590, decode.d8.loss_cls: 0.3742, decode.d8.loss_mask: 0.5461, decode.d8.loss_dice: 0.7578, loss: 19.8232 +2022-06-05 03:17:15,062 - mmseg - INFO - Iter [11950/40000] lr: 5.348e-06, eta: 3:43:02, time: 0.445, data_time: 0.007, memory: 31652, decode.loss_cls: 0.3926, decode.loss_mask: 0.5385, decode.loss_dice: 0.7573, decode.d0.loss_cls: 2.7310, decode.d0.loss_mask: 0.5697, decode.d0.loss_dice: 0.9273, decode.d1.loss_cls: 0.6068, decode.d1.loss_mask: 0.5592, decode.d1.loss_dice: 0.8115, decode.d2.loss_cls: 0.4584, decode.d2.loss_mask: 0.5490, decode.d2.loss_dice: 0.7764, decode.d3.loss_cls: 0.4184, decode.d3.loss_mask: 0.5413, decode.d3.loss_dice: 0.7640, decode.d4.loss_cls: 0.3983, decode.d4.loss_mask: 0.5437, decode.d4.loss_dice: 0.7691, decode.d5.loss_cls: 0.3969, decode.d5.loss_mask: 0.5410, decode.d5.loss_dice: 0.7624, decode.d6.loss_cls: 0.3959, decode.d6.loss_mask: 0.5374, decode.d6.loss_dice: 0.7563, decode.d7.loss_cls: 0.3918, decode.d7.loss_mask: 0.5391, decode.d7.loss_dice: 0.7610, decode.d8.loss_cls: 0.3891, decode.d8.loss_mask: 0.5388, decode.d8.loss_dice: 0.7664, loss: 19.8885 +2022-06-05 03:17:37,553 - mmseg - INFO - Saving checkpoint at 12000 iterations +2022-06-05 03:17:40,213 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 03:17:40,214 - mmseg - INFO - Iter [12000/40000] lr: 5.338e-06, eta: 3:42:41, time: 0.503, data_time: 0.007, memory: 31652, decode.loss_cls: 0.3605, decode.loss_mask: 0.5354, decode.loss_dice: 0.7563, decode.d0.loss_cls: 2.7243, decode.d0.loss_mask: 0.5775, decode.d0.loss_dice: 0.9016, decode.d1.loss_cls: 0.5753, decode.d1.loss_mask: 0.5579, decode.d1.loss_dice: 0.8033, decode.d2.loss_cls: 0.4361, decode.d2.loss_mask: 0.5424, decode.d2.loss_dice: 0.7735, decode.d3.loss_cls: 0.3952, decode.d3.loss_mask: 0.5352, decode.d3.loss_dice: 0.7592, decode.d4.loss_cls: 0.3827, decode.d4.loss_mask: 0.5352, decode.d4.loss_dice: 0.7661, decode.d5.loss_cls: 0.3716, decode.d5.loss_mask: 0.5379, decode.d5.loss_dice: 0.7672, decode.d6.loss_cls: 0.3620, decode.d6.loss_mask: 0.5369, decode.d6.loss_dice: 0.7574, decode.d7.loss_cls: 0.3635, decode.d7.loss_mask: 0.5371, decode.d7.loss_dice: 0.7575, decode.d8.loss_cls: 0.3617, decode.d8.loss_mask: 0.5353, decode.d8.loss_dice: 0.7615, loss: 19.5673 +2022-06-05 03:20:20,605 - mmseg - INFO - per class results: +2022-06-05 03:20:20,612 - mmseg - INFO - ++-------------+-------+-------+ +| Class | IoU | Acc | ++-------------+-------+-------+ +| aeroplane | 90.83 | 96.02 | +| bag | 34.65 | 46.52 | +| bed | 29.54 | 37.0 | +| bedclothes | 45.45 | 75.04 | +| bench | 16.72 | 18.31 | +| bicycle | 83.78 | 92.92 | +| bird | 94.56 | 97.19 | +| boat | 81.55 | 90.23 | +| book | 51.91 | 64.72 | +| bottle | 88.35 | 95.01 | +| building | 64.35 | 79.22 | +| bus | 93.28 | 96.55 | +| cabinet | 45.05 | 63.44 | +| car | 90.43 | 95.85 | +| cat | 93.46 | 97.32 | +| ceiling | 61.19 | 81.67 | +| chair | 60.33 | 78.13 | +| cloth | 18.64 | 24.39 | +| computer | 41.36 | 54.82 | +| cow | 94.08 | 97.0 | +| cup | 42.21 | 55.11 | +| curtain | 54.8 | 69.81 | +| dog | 91.32 | 96.36 | +| door | 29.96 | 51.38 | +| fence | 44.04 | 60.76 | +| floor | 72.56 | 87.26 | +| flower | 31.2 | 54.17 | +| food | 35.03 | 42.67 | +| grass | 81.69 | 88.87 | +| ground | 57.26 | 72.36 | +| horse | 93.3 | 96.67 | +| keyboard | 86.84 | 94.09 | +| light | 57.28 | 71.48 | +| motorbike | 90.02 | 95.54 | +| mountain | 54.76 | 72.31 | +| mouse | 79.42 | 85.0 | +| person | 89.91 | 94.86 | +| plate | 23.84 | 26.66 | +| platform | 51.87 | 70.29 | +| pottedplant | 79.37 | 87.75 | +| road | 53.32 | 67.22 | +| rock | 51.46 | 67.13 | +| sheep | 93.84 | 96.79 | +| shelves | 33.54 | 45.57 | +| sidewalk | 29.88 | 54.16 | +| sign | 45.48 | 56.29 | +| sky | 94.97 | 97.45 | +| snow | 75.14 | 87.48 | +| sofa | 57.82 | 74.95 | +| table | 69.75 | 85.72 | +| track | 68.76 | 78.7 | +| train | 92.63 | 95.98 | +| tree | 80.87 | 89.18 | +| truck | 37.05 | 46.05 | +| tvmonitor | 86.97 | 93.16 | +| wall | 70.01 | 80.72 | +| water | 91.57 | 95.89 | +| window | 41.06 | 53.21 | +| wood | 26.73 | 43.42 | ++-------------+-------+-------+ +2022-06-05 03:20:20,612 - mmseg - INFO - Summary: +2022-06-05 03:20:20,612 - mmseg - INFO - ++------+-------+-------+ +| aAcc | mIoU | mAcc | ++------+-------+-------+ +| 85.1 | 63.17 | 73.96 | ++------+-------+-------+ +2022-06-05 03:20:20,614 - mmseg - INFO - The previous best checkpoint /mnt/lustre/chenzhe.vendor/workspace/ViT-Adapter-Release/segmentation/work_dirs/mask2former_beit_adapter_base_480_40k_pascal_context_59_ss/best_mIoU_iter_8000.pth was removed +2022-06-05 03:20:23,335 - mmseg - INFO - Now best checkpoint is saved as best_mIoU_iter_12000.pth. +2022-06-05 03:20:23,335 - mmseg - INFO - Best mIoU is 0.6317 at 12000 iter. +2022-06-05 03:20:23,362 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 03:20:23,362 - mmseg - INFO - Iter(val) [638] aAcc: 0.8510, mIoU: 0.6317, mAcc: 0.7396, IoU.aeroplane: 0.9083, IoU.bag: 0.3465, IoU.bed: 0.2954, IoU.bedclothes: 0.4545, IoU.bench: 0.1672, IoU.bicycle: 0.8378, IoU.bird: 0.9456, IoU.boat: 0.8155, IoU.book: 0.5191, IoU.bottle: 0.8835, IoU.building: 0.6435, IoU.bus: 0.9328, IoU.cabinet: 0.4505, IoU.car: 0.9043, IoU.cat: 0.9346, IoU.ceiling: 0.6119, IoU.chair: 0.6033, IoU.cloth: 0.1864, IoU.computer: 0.4136, IoU.cow: 0.9408, IoU.cup: 0.4221, IoU.curtain: 0.5480, IoU.dog: 0.9132, IoU.door: 0.2996, IoU.fence: 0.4404, IoU.floor: 0.7256, IoU.flower: 0.3120, IoU.food: 0.3503, IoU.grass: 0.8169, IoU.ground: 0.5726, IoU.horse: 0.9330, IoU.keyboard: 0.8684, IoU.light: 0.5728, IoU.motorbike: 0.9002, IoU.mountain: 0.5476, IoU.mouse: 0.7942, IoU.person: 0.8991, IoU.plate: 0.2384, IoU.platform: 0.5187, IoU.pottedplant: 0.7937, IoU.road: 0.5332, IoU.rock: 0.5146, IoU.sheep: 0.9384, IoU.shelves: 0.3354, IoU.sidewalk: 0.2988, IoU.sign: 0.4548, IoU.sky: 0.9497, IoU.snow: 0.7514, IoU.sofa: 0.5782, IoU.table: 0.6975, IoU.track: 0.6876, IoU.train: 0.9263, IoU.tree: 0.8087, IoU.truck: 0.3705, IoU.tvmonitor: 0.8697, IoU.wall: 0.7001, IoU.water: 0.9157, IoU.window: 0.4106, IoU.wood: 0.2673, Acc.aeroplane: 0.9602, Acc.bag: 0.4652, Acc.bed: 0.3700, Acc.bedclothes: 0.7504, Acc.bench: 0.1831, Acc.bicycle: 0.9292, Acc.bird: 0.9719, Acc.boat: 0.9023, Acc.book: 0.6472, Acc.bottle: 0.9501, Acc.building: 0.7922, Acc.bus: 0.9655, Acc.cabinet: 0.6344, Acc.car: 0.9585, Acc.cat: 0.9732, Acc.ceiling: 0.8167, Acc.chair: 0.7813, Acc.cloth: 0.2439, Acc.computer: 0.5482, Acc.cow: 0.9700, Acc.cup: 0.5511, Acc.curtain: 0.6981, Acc.dog: 0.9636, Acc.door: 0.5138, Acc.fence: 0.6076, Acc.floor: 0.8726, Acc.flower: 0.5417, Acc.food: 0.4267, Acc.grass: 0.8887, Acc.ground: 0.7236, Acc.horse: 0.9667, Acc.keyboard: 0.9409, Acc.light: 0.7148, Acc.motorbike: 0.9554, Acc.mountain: 0.7231, Acc.mouse: 0.8500, Acc.person: 0.9486, Acc.plate: 0.2666, Acc.platform: 0.7029, Acc.pottedplant: 0.8775, Acc.road: 0.6722, Acc.rock: 0.6713, Acc.sheep: 0.9679, Acc.shelves: 0.4557, Acc.sidewalk: 0.5416, Acc.sign: 0.5629, Acc.sky: 0.9745, Acc.snow: 0.8748, Acc.sofa: 0.7495, Acc.table: 0.8572, Acc.track: 0.7870, Acc.train: 0.9598, Acc.tree: 0.8918, Acc.truck: 0.4605, Acc.tvmonitor: 0.9316, Acc.wall: 0.8072, Acc.water: 0.9589, Acc.window: 0.5321, Acc.wood: 0.4342 +2022-06-05 03:20:45,409 - mmseg - INFO - Iter [12050/40000] lr: 5.329e-06, eta: 3:48:32, time: 3.704, data_time: 3.272, memory: 31652, decode.loss_cls: 0.3724, decode.loss_mask: 0.5692, decode.loss_dice: 0.7612, decode.d0.loss_cls: 2.7144, decode.d0.loss_mask: 0.6083, decode.d0.loss_dice: 0.9234, decode.d1.loss_cls: 0.6054, decode.d1.loss_mask: 0.5911, decode.d1.loss_dice: 0.8183, decode.d2.loss_cls: 0.4576, decode.d2.loss_mask: 0.5755, decode.d2.loss_dice: 0.7830, decode.d3.loss_cls: 0.4155, decode.d3.loss_mask: 0.5681, decode.d3.loss_dice: 0.7626, decode.d4.loss_cls: 0.3934, decode.d4.loss_mask: 0.5675, decode.d4.loss_dice: 0.7714, decode.d5.loss_cls: 0.3906, decode.d5.loss_mask: 0.5668, decode.d5.loss_dice: 0.7642, decode.d6.loss_cls: 0.3791, decode.d6.loss_mask: 0.5675, decode.d6.loss_dice: 0.7624, decode.d7.loss_cls: 0.3752, decode.d7.loss_mask: 0.5656, decode.d7.loss_dice: 0.7610, decode.d8.loss_cls: 0.3749, decode.d8.loss_mask: 0.5674, decode.d8.loss_dice: 0.7594, loss: 20.0922 +2022-06-05 03:21:07,788 - mmseg - INFO - Iter [12100/40000] lr: 5.319e-06, eta: 3:48:02, time: 0.447, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3782, decode.loss_mask: 0.5524, decode.loss_dice: 0.7790, decode.d0.loss_cls: 2.6590, decode.d0.loss_mask: 0.5962, decode.d0.loss_dice: 0.9308, decode.d1.loss_cls: 0.6118, decode.d1.loss_mask: 0.5706, decode.d1.loss_dice: 0.8238, decode.d2.loss_cls: 0.4689, decode.d2.loss_mask: 0.5544, decode.d2.loss_dice: 0.7938, decode.d3.loss_cls: 0.4250, decode.d3.loss_mask: 0.5542, decode.d3.loss_dice: 0.7828, decode.d4.loss_cls: 0.4063, decode.d4.loss_mask: 0.5533, decode.d4.loss_dice: 0.7824, decode.d5.loss_cls: 0.3977, decode.d5.loss_mask: 0.5532, decode.d5.loss_dice: 0.7817, decode.d6.loss_cls: 0.3846, decode.d6.loss_mask: 0.5519, decode.d6.loss_dice: 0.7842, decode.d7.loss_cls: 0.3790, decode.d7.loss_mask: 0.5511, decode.d7.loss_dice: 0.7845, decode.d8.loss_cls: 0.3870, decode.d8.loss_mask: 0.5483, decode.d8.loss_dice: 0.7797, loss: 20.1060 +2022-06-05 03:21:29,603 - mmseg - INFO - Iter [12150/40000] lr: 5.310e-06, eta: 3:47:31, time: 0.436, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3508, decode.loss_mask: 0.5392, decode.loss_dice: 0.7185, decode.d0.loss_cls: 2.6346, decode.d0.loss_mask: 0.5745, decode.d0.loss_dice: 0.8683, decode.d1.loss_cls: 0.5582, decode.d1.loss_mask: 0.5589, decode.d1.loss_dice: 0.7638, decode.d2.loss_cls: 0.4187, decode.d2.loss_mask: 0.5456, decode.d2.loss_dice: 0.7340, decode.d3.loss_cls: 0.3790, decode.d3.loss_mask: 0.5408, decode.d3.loss_dice: 0.7255, decode.d4.loss_cls: 0.3669, decode.d4.loss_mask: 0.5398, decode.d4.loss_dice: 0.7275, decode.d5.loss_cls: 0.3586, decode.d5.loss_mask: 0.5416, decode.d5.loss_dice: 0.7270, decode.d6.loss_cls: 0.3493, decode.d6.loss_mask: 0.5444, decode.d6.loss_dice: 0.7257, decode.d7.loss_cls: 0.3529, decode.d7.loss_mask: 0.5397, decode.d7.loss_dice: 0.7202, decode.d8.loss_cls: 0.3482, decode.d8.loss_mask: 0.5353, decode.d8.loss_dice: 0.7249, loss: 19.0123 +2022-06-05 03:21:54,375 - mmseg - INFO - Iter [12200/40000] lr: 5.300e-06, eta: 3:47:08, time: 0.495, data_time: 0.056, memory: 31652, decode.loss_cls: 0.3699, decode.loss_mask: 0.5292, decode.loss_dice: 0.7656, decode.d0.loss_cls: 2.6812, decode.d0.loss_mask: 0.5652, decode.d0.loss_dice: 0.9164, decode.d1.loss_cls: 0.6010, decode.d1.loss_mask: 0.5512, decode.d1.loss_dice: 0.8168, decode.d2.loss_cls: 0.4536, decode.d2.loss_mask: 0.5381, decode.d2.loss_dice: 0.7781, decode.d3.loss_cls: 0.4120, decode.d3.loss_mask: 0.5279, decode.d3.loss_dice: 0.7639, decode.d4.loss_cls: 0.3925, decode.d4.loss_mask: 0.5305, decode.d4.loss_dice: 0.7659, decode.d5.loss_cls: 0.3790, decode.d5.loss_mask: 0.5263, decode.d5.loss_dice: 0.7691, decode.d6.loss_cls: 0.3744, decode.d6.loss_mask: 0.5261, decode.d6.loss_dice: 0.7620, decode.d7.loss_cls: 0.3718, decode.d7.loss_mask: 0.5258, decode.d7.loss_dice: 0.7655, decode.d8.loss_cls: 0.3712, decode.d8.loss_mask: 0.5263, decode.d8.loss_dice: 0.7620, loss: 19.6185 +2022-06-05 03:22:16,069 - mmseg - INFO - Iter [12250/40000] lr: 5.290e-06, eta: 3:46:37, time: 0.434, data_time: 0.007, memory: 31652, decode.loss_cls: 0.3487, decode.loss_mask: 0.5497, decode.loss_dice: 0.7585, decode.d0.loss_cls: 2.5775, decode.d0.loss_mask: 0.6046, decode.d0.loss_dice: 0.9198, decode.d1.loss_cls: 0.5631, decode.d1.loss_mask: 0.5723, decode.d1.loss_dice: 0.8137, decode.d2.loss_cls: 0.4266, decode.d2.loss_mask: 0.5594, decode.d2.loss_dice: 0.7751, decode.d3.loss_cls: 0.3871, decode.d3.loss_mask: 0.5530, decode.d3.loss_dice: 0.7637, decode.d4.loss_cls: 0.3699, decode.d4.loss_mask: 0.5515, decode.d4.loss_dice: 0.7725, decode.d5.loss_cls: 0.3594, decode.d5.loss_mask: 0.5476, decode.d5.loss_dice: 0.7659, decode.d6.loss_cls: 0.3543, decode.d6.loss_mask: 0.5467, decode.d6.loss_dice: 0.7598, decode.d7.loss_cls: 0.3529, decode.d7.loss_mask: 0.5468, decode.d7.loss_dice: 0.7617, decode.d8.loss_cls: 0.3509, decode.d8.loss_mask: 0.5476, decode.d8.loss_dice: 0.7578, loss: 19.5184 +2022-06-05 03:22:37,650 - mmseg - INFO - Iter [12300/40000] lr: 5.281e-06, eta: 3:46:06, time: 0.432, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3570, decode.loss_mask: 0.5387, decode.loss_dice: 0.7586, decode.d0.loss_cls: 2.6327, decode.d0.loss_mask: 0.5797, decode.d0.loss_dice: 0.9092, decode.d1.loss_cls: 0.5873, decode.d1.loss_mask: 0.5603, decode.d1.loss_dice: 0.8083, decode.d2.loss_cls: 0.4454, decode.d2.loss_mask: 0.5462, decode.d2.loss_dice: 0.7783, decode.d3.loss_cls: 0.3998, decode.d3.loss_mask: 0.5445, decode.d3.loss_dice: 0.7628, decode.d4.loss_cls: 0.3813, decode.d4.loss_mask: 0.5423, decode.d4.loss_dice: 0.7648, decode.d5.loss_cls: 0.3684, decode.d5.loss_mask: 0.5414, decode.d5.loss_dice: 0.7640, decode.d6.loss_cls: 0.3567, decode.d6.loss_mask: 0.5451, decode.d6.loss_dice: 0.7608, decode.d7.loss_cls: 0.3557, decode.d7.loss_mask: 0.5407, decode.d7.loss_dice: 0.7582, decode.d8.loss_cls: 0.3585, decode.d8.loss_mask: 0.5394, decode.d8.loss_dice: 0.7587, loss: 19.5448 +2022-06-05 03:22:59,474 - mmseg - INFO - Iter [12350/40000] lr: 5.271e-06, eta: 3:45:35, time: 0.436, data_time: 0.007, memory: 31652, decode.loss_cls: 0.3870, decode.loss_mask: 0.5331, decode.loss_dice: 0.7693, decode.d0.loss_cls: 2.6352, decode.d0.loss_mask: 0.5749, decode.d0.loss_dice: 0.9237, decode.d1.loss_cls: 0.6020, decode.d1.loss_mask: 0.5539, decode.d1.loss_dice: 0.8223, decode.d2.loss_cls: 0.4654, decode.d2.loss_mask: 0.5447, decode.d2.loss_dice: 0.7874, decode.d3.loss_cls: 0.4181, decode.d3.loss_mask: 0.5388, decode.d3.loss_dice: 0.7730, decode.d4.loss_cls: 0.4128, decode.d4.loss_mask: 0.5366, decode.d4.loss_dice: 0.7689, decode.d5.loss_cls: 0.3988, decode.d5.loss_mask: 0.5354, decode.d5.loss_dice: 0.7719, decode.d6.loss_cls: 0.3925, decode.d6.loss_mask: 0.5381, decode.d6.loss_dice: 0.7686, decode.d7.loss_cls: 0.3910, decode.d7.loss_mask: 0.5371, decode.d7.loss_dice: 0.7705, decode.d8.loss_cls: 0.3901, decode.d8.loss_mask: 0.5330, decode.d8.loss_dice: 0.7718, loss: 19.8459 +2022-06-05 03:23:21,004 - mmseg - INFO - Iter [12400/40000] lr: 5.262e-06, eta: 3:45:04, time: 0.431, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3743, decode.loss_mask: 0.5417, decode.loss_dice: 0.7620, decode.d0.loss_cls: 2.5943, decode.d0.loss_mask: 0.5943, decode.d0.loss_dice: 0.9282, decode.d1.loss_cls: 0.5968, decode.d1.loss_mask: 0.5667, decode.d1.loss_dice: 0.8191, decode.d2.loss_cls: 0.4558, decode.d2.loss_mask: 0.5534, decode.d2.loss_dice: 0.7889, decode.d3.loss_cls: 0.4054, decode.d3.loss_mask: 0.5489, decode.d3.loss_dice: 0.7704, decode.d4.loss_cls: 0.3907, decode.d4.loss_mask: 0.5479, decode.d4.loss_dice: 0.7764, decode.d5.loss_cls: 0.3903, decode.d5.loss_mask: 0.5454, decode.d5.loss_dice: 0.7743, decode.d6.loss_cls: 0.3892, decode.d6.loss_mask: 0.5419, decode.d6.loss_dice: 0.7589, decode.d7.loss_cls: 0.3824, decode.d7.loss_mask: 0.5422, decode.d7.loss_dice: 0.7595, decode.d8.loss_cls: 0.3774, decode.d8.loss_mask: 0.5421, decode.d8.loss_dice: 0.7620, loss: 19.7809 +2022-06-05 03:23:43,023 - mmseg - INFO - Iter [12450/40000] lr: 5.252e-06, eta: 3:44:34, time: 0.440, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3465, decode.loss_mask: 0.5670, decode.loss_dice: 0.7523, decode.d0.loss_cls: 2.5823, decode.d0.loss_mask: 0.6100, decode.d0.loss_dice: 0.9016, decode.d1.loss_cls: 0.5802, decode.d1.loss_mask: 0.5861, decode.d1.loss_dice: 0.8012, decode.d2.loss_cls: 0.4370, decode.d2.loss_mask: 0.5751, decode.d2.loss_dice: 0.7697, decode.d3.loss_cls: 0.3930, decode.d3.loss_mask: 0.5717, decode.d3.loss_dice: 0.7544, decode.d4.loss_cls: 0.3763, decode.d4.loss_mask: 0.5699, decode.d4.loss_dice: 0.7598, decode.d5.loss_cls: 0.3692, decode.d5.loss_mask: 0.5682, decode.d5.loss_dice: 0.7568, decode.d6.loss_cls: 0.3574, decode.d6.loss_mask: 0.5661, decode.d6.loss_dice: 0.7577, decode.d7.loss_cls: 0.3549, decode.d7.loss_mask: 0.5662, decode.d7.loss_dice: 0.7550, decode.d8.loss_cls: 0.3545, decode.d8.loss_mask: 0.5672, decode.d8.loss_dice: 0.7539, loss: 19.6613 +2022-06-05 03:24:06,887 - mmseg - INFO - Iter [12500/40000] lr: 5.243e-06, eta: 3:44:08, time: 0.478, data_time: 0.059, memory: 31652, decode.loss_cls: 0.3570, decode.loss_mask: 0.5391, decode.loss_dice: 0.7477, decode.d0.loss_cls: 2.5756, decode.d0.loss_mask: 0.5824, decode.d0.loss_dice: 0.9066, decode.d1.loss_cls: 0.6021, decode.d1.loss_mask: 0.5612, decode.d1.loss_dice: 0.7991, decode.d2.loss_cls: 0.4517, decode.d2.loss_mask: 0.5470, decode.d2.loss_dice: 0.7594, decode.d3.loss_cls: 0.4020, decode.d3.loss_mask: 0.5416, decode.d3.loss_dice: 0.7491, decode.d4.loss_cls: 0.3903, decode.d4.loss_mask: 0.5392, decode.d4.loss_dice: 0.7495, decode.d5.loss_cls: 0.3801, decode.d5.loss_mask: 0.5398, decode.d5.loss_dice: 0.7512, decode.d6.loss_cls: 0.3687, decode.d6.loss_mask: 0.5365, decode.d6.loss_dice: 0.7510, decode.d7.loss_cls: 0.3611, decode.d7.loss_mask: 0.5361, decode.d7.loss_dice: 0.7422, decode.d8.loss_cls: 0.3651, decode.d8.loss_mask: 0.5385, decode.d8.loss_dice: 0.7431, loss: 19.4141 +2022-06-05 03:24:28,165 - mmseg - INFO - Iter [12550/40000] lr: 5.233e-06, eta: 3:43:37, time: 0.426, data_time: 0.007, memory: 31652, decode.loss_cls: 0.3626, decode.loss_mask: 0.5587, decode.loss_dice: 0.7624, decode.d0.loss_cls: 2.5543, decode.d0.loss_mask: 0.5971, decode.d0.loss_dice: 0.9147, decode.d1.loss_cls: 0.5710, decode.d1.loss_mask: 0.5764, decode.d1.loss_dice: 0.8165, decode.d2.loss_cls: 0.4418, decode.d2.loss_mask: 0.5639, decode.d2.loss_dice: 0.7796, decode.d3.loss_cls: 0.4011, decode.d3.loss_mask: 0.5625, decode.d3.loss_dice: 0.7662, decode.d4.loss_cls: 0.3853, decode.d4.loss_mask: 0.5583, decode.d4.loss_dice: 0.7695, decode.d5.loss_cls: 0.3750, decode.d5.loss_mask: 0.5604, decode.d5.loss_dice: 0.7704, decode.d6.loss_cls: 0.3748, decode.d6.loss_mask: 0.5590, decode.d6.loss_dice: 0.7613, decode.d7.loss_cls: 0.3697, decode.d7.loss_mask: 0.5581, decode.d7.loss_dice: 0.7616, decode.d8.loss_cls: 0.3663, decode.d8.loss_mask: 0.5588, decode.d8.loss_dice: 0.7627, loss: 19.7199 +2022-06-05 03:24:49,858 - mmseg - INFO - Iter [12600/40000] lr: 5.224e-06, eta: 3:43:07, time: 0.434, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3385, decode.loss_mask: 0.5246, decode.loss_dice: 0.7481, decode.d0.loss_cls: 2.5646, decode.d0.loss_mask: 0.5640, decode.d0.loss_dice: 0.8979, decode.d1.loss_cls: 0.5493, decode.d1.loss_mask: 0.5427, decode.d1.loss_dice: 0.7931, decode.d2.loss_cls: 0.4181, decode.d2.loss_mask: 0.5295, decode.d2.loss_dice: 0.7640, decode.d3.loss_cls: 0.3660, decode.d3.loss_mask: 0.5252, decode.d3.loss_dice: 0.7540, decode.d4.loss_cls: 0.3557, decode.d4.loss_mask: 0.5254, decode.d4.loss_dice: 0.7544, decode.d5.loss_cls: 0.3466, decode.d5.loss_mask: 0.5260, decode.d5.loss_dice: 0.7470, decode.d6.loss_cls: 0.3355, decode.d6.loss_mask: 0.5258, decode.d6.loss_dice: 0.7473, decode.d7.loss_cls: 0.3380, decode.d7.loss_mask: 0.5241, decode.d7.loss_dice: 0.7468, decode.d8.loss_cls: 0.3347, decode.d8.loss_mask: 0.5273, decode.d8.loss_dice: 0.7473, loss: 18.9615 +2022-06-05 03:25:11,553 - mmseg - INFO - Iter [12650/40000] lr: 5.214e-06, eta: 3:42:36, time: 0.434, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3937, decode.loss_mask: 0.5268, decode.loss_dice: 0.7660, decode.d0.loss_cls: 2.5690, decode.d0.loss_mask: 0.5633, decode.d0.loss_dice: 0.9166, decode.d1.loss_cls: 0.6115, decode.d1.loss_mask: 0.5507, decode.d1.loss_dice: 0.8147, decode.d2.loss_cls: 0.4744, decode.d2.loss_mask: 0.5361, decode.d2.loss_dice: 0.7808, decode.d3.loss_cls: 0.4386, decode.d3.loss_mask: 0.5334, decode.d3.loss_dice: 0.7669, decode.d4.loss_cls: 0.4210, decode.d4.loss_mask: 0.5299, decode.d4.loss_dice: 0.7620, decode.d5.loss_cls: 0.4081, decode.d5.loss_mask: 0.5274, decode.d5.loss_dice: 0.7639, decode.d6.loss_cls: 0.3980, decode.d6.loss_mask: 0.5266, decode.d6.loss_dice: 0.7690, decode.d7.loss_cls: 0.4000, decode.d7.loss_mask: 0.5239, decode.d7.loss_dice: 0.7674, decode.d8.loss_cls: 0.3979, decode.d8.loss_mask: 0.5229, decode.d8.loss_dice: 0.7657, loss: 19.7265 +2022-06-05 03:25:33,762 - mmseg - INFO - Iter [12700/40000] lr: 5.205e-06, eta: 3:42:07, time: 0.444, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3616, decode.loss_mask: 0.5469, decode.loss_dice: 0.7710, decode.d0.loss_cls: 2.5119, decode.d0.loss_mask: 0.5858, decode.d0.loss_dice: 0.9213, decode.d1.loss_cls: 0.5658, decode.d1.loss_mask: 0.5663, decode.d1.loss_dice: 0.8207, decode.d2.loss_cls: 0.4447, decode.d2.loss_mask: 0.5525, decode.d2.loss_dice: 0.7863, decode.d3.loss_cls: 0.4037, decode.d3.loss_mask: 0.5494, decode.d3.loss_dice: 0.7697, decode.d4.loss_cls: 0.3873, decode.d4.loss_mask: 0.5506, decode.d4.loss_dice: 0.7744, decode.d5.loss_cls: 0.3787, decode.d5.loss_mask: 0.5468, decode.d5.loss_dice: 0.7717, decode.d6.loss_cls: 0.3670, decode.d6.loss_mask: 0.5477, decode.d6.loss_dice: 0.7690, decode.d7.loss_cls: 0.3755, decode.d7.loss_mask: 0.5449, decode.d7.loss_dice: 0.7699, decode.d8.loss_cls: 0.3690, decode.d8.loss_mask: 0.5471, decode.d8.loss_dice: 0.7714, loss: 19.6285 +2022-06-05 03:25:55,254 - mmseg - INFO - Iter [12750/40000] lr: 5.195e-06, eta: 3:41:36, time: 0.430, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3396, decode.loss_mask: 0.5416, decode.loss_dice: 0.7365, decode.d0.loss_cls: 2.4721, decode.d0.loss_mask: 0.5863, decode.d0.loss_dice: 0.8813, decode.d1.loss_cls: 0.5217, decode.d1.loss_mask: 0.5682, decode.d1.loss_dice: 0.7841, decode.d2.loss_cls: 0.4133, decode.d2.loss_mask: 0.5498, decode.d2.loss_dice: 0.7519, decode.d3.loss_cls: 0.3684, decode.d3.loss_mask: 0.5468, decode.d3.loss_dice: 0.7392, decode.d4.loss_cls: 0.3533, decode.d4.loss_mask: 0.5432, decode.d4.loss_dice: 0.7425, decode.d5.loss_cls: 0.3523, decode.d5.loss_mask: 0.5444, decode.d5.loss_dice: 0.7422, decode.d6.loss_cls: 0.3369, decode.d6.loss_mask: 0.5441, decode.d6.loss_dice: 0.7399, decode.d7.loss_cls: 0.3300, decode.d7.loss_mask: 0.5442, decode.d7.loss_dice: 0.7422, decode.d8.loss_cls: 0.3366, decode.d8.loss_mask: 0.5435, decode.d8.loss_dice: 0.7377, loss: 18.9336 +2022-06-05 03:26:19,423 - mmseg - INFO - Iter [12800/40000] lr: 5.186e-06, eta: 3:41:12, time: 0.484, data_time: 0.060, memory: 31652, decode.loss_cls: 0.3606, decode.loss_mask: 0.5483, decode.loss_dice: 0.7539, decode.d0.loss_cls: 2.5416, decode.d0.loss_mask: 0.5972, decode.d0.loss_dice: 0.9027, decode.d1.loss_cls: 0.5932, decode.d1.loss_mask: 0.5718, decode.d1.loss_dice: 0.8022, decode.d2.loss_cls: 0.4475, decode.d2.loss_mask: 0.5582, decode.d2.loss_dice: 0.7686, decode.d3.loss_cls: 0.3951, decode.d3.loss_mask: 0.5501, decode.d3.loss_dice: 0.7566, decode.d4.loss_cls: 0.3781, decode.d4.loss_mask: 0.5497, decode.d4.loss_dice: 0.7601, decode.d5.loss_cls: 0.3703, decode.d5.loss_mask: 0.5498, decode.d5.loss_dice: 0.7550, decode.d6.loss_cls: 0.3584, decode.d6.loss_mask: 0.5486, decode.d6.loss_dice: 0.7505, decode.d7.loss_cls: 0.3587, decode.d7.loss_mask: 0.5514, decode.d7.loss_dice: 0.7565, decode.d8.loss_cls: 0.3569, decode.d8.loss_mask: 0.5497, decode.d8.loss_dice: 0.7559, loss: 19.4971 +2022-06-05 03:26:41,282 - mmseg - INFO - Iter [12850/40000] lr: 5.176e-06, eta: 3:40:42, time: 0.437, data_time: 0.009, memory: 31652, decode.loss_cls: 0.3531, decode.loss_mask: 0.5395, decode.loss_dice: 0.7216, decode.d0.loss_cls: 2.4743, decode.d0.loss_mask: 0.5909, decode.d0.loss_dice: 0.8588, decode.d1.loss_cls: 0.5515, decode.d1.loss_mask: 0.5689, decode.d1.loss_dice: 0.7706, decode.d2.loss_cls: 0.4257, decode.d2.loss_mask: 0.5538, decode.d2.loss_dice: 0.7409, decode.d3.loss_cls: 0.3792, decode.d3.loss_mask: 0.5466, decode.d3.loss_dice: 0.7303, decode.d4.loss_cls: 0.3680, decode.d4.loss_mask: 0.5497, decode.d4.loss_dice: 0.7347, decode.d5.loss_cls: 0.3656, decode.d5.loss_mask: 0.5472, decode.d5.loss_dice: 0.7293, decode.d6.loss_cls: 0.3631, decode.d6.loss_mask: 0.5441, decode.d6.loss_dice: 0.7252, decode.d7.loss_cls: 0.3582, decode.d7.loss_mask: 0.5420, decode.d7.loss_dice: 0.7212, decode.d8.loss_cls: 0.3571, decode.d8.loss_mask: 0.5403, decode.d8.loss_dice: 0.7306, loss: 18.9820 +2022-06-05 03:27:03,060 - mmseg - INFO - Iter [12900/40000] lr: 5.167e-06, eta: 3:40:12, time: 0.435, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3735, decode.loss_mask: 0.5208, decode.loss_dice: 0.7749, decode.d0.loss_cls: 2.5183, decode.d0.loss_mask: 0.5599, decode.d0.loss_dice: 0.9308, decode.d1.loss_cls: 0.5865, decode.d1.loss_mask: 0.5358, decode.d1.loss_dice: 0.8242, decode.d2.loss_cls: 0.4527, decode.d2.loss_mask: 0.5249, decode.d2.loss_dice: 0.7877, decode.d3.loss_cls: 0.4057, decode.d3.loss_mask: 0.5185, decode.d3.loss_dice: 0.7795, decode.d4.loss_cls: 0.3944, decode.d4.loss_mask: 0.5207, decode.d4.loss_dice: 0.7758, decode.d5.loss_cls: 0.3842, decode.d5.loss_mask: 0.5218, decode.d5.loss_dice: 0.7745, decode.d6.loss_cls: 0.3795, decode.d6.loss_mask: 0.5202, decode.d6.loss_dice: 0.7707, decode.d7.loss_cls: 0.3743, decode.d7.loss_mask: 0.5206, decode.d7.loss_dice: 0.7724, decode.d8.loss_cls: 0.3699, decode.d8.loss_mask: 0.5239, decode.d8.loss_dice: 0.7739, loss: 19.4708 +2022-06-05 03:27:25,950 - mmseg - INFO - Iter [12950/40000] lr: 5.157e-06, eta: 3:39:44, time: 0.458, data_time: 0.009, memory: 31652, decode.loss_cls: 0.3448, decode.loss_mask: 0.5285, decode.loss_dice: 0.7398, decode.d0.loss_cls: 2.4707, decode.d0.loss_mask: 0.5687, decode.d0.loss_dice: 0.8893, decode.d1.loss_cls: 0.5602, decode.d1.loss_mask: 0.5461, decode.d1.loss_dice: 0.7908, decode.d2.loss_cls: 0.4237, decode.d2.loss_mask: 0.5304, decode.d2.loss_dice: 0.7567, decode.d3.loss_cls: 0.3740, decode.d3.loss_mask: 0.5302, decode.d3.loss_dice: 0.7471, decode.d4.loss_cls: 0.3640, decode.d4.loss_mask: 0.5239, decode.d4.loss_dice: 0.7444, decode.d5.loss_cls: 0.3508, decode.d5.loss_mask: 0.5278, decode.d5.loss_dice: 0.7426, decode.d6.loss_cls: 0.3462, decode.d6.loss_mask: 0.5281, decode.d6.loss_dice: 0.7431, decode.d7.loss_cls: 0.3511, decode.d7.loss_mask: 0.5258, decode.d7.loss_dice: 0.7373, decode.d8.loss_cls: 0.3552, decode.d8.loss_mask: 0.5249, decode.d8.loss_dice: 0.7393, loss: 18.9055 +2022-06-05 03:27:48,231 - mmseg - INFO - Saving checkpoint at 13000 iterations +2022-06-05 03:27:50,695 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 03:27:50,696 - mmseg - INFO - Iter [13000/40000] lr: 5.147e-06, eta: 3:39:21, time: 0.495, data_time: 0.007, memory: 31652, decode.loss_cls: 0.3395, decode.loss_mask: 0.5356, decode.loss_dice: 0.7113, decode.d0.loss_cls: 2.4434, decode.d0.loss_mask: 0.5812, decode.d0.loss_dice: 0.8715, decode.d1.loss_cls: 0.5328, decode.d1.loss_mask: 0.5590, decode.d1.loss_dice: 0.7611, decode.d2.loss_cls: 0.4060, decode.d2.loss_mask: 0.5492, decode.d2.loss_dice: 0.7356, decode.d3.loss_cls: 0.3710, decode.d3.loss_mask: 0.5398, decode.d3.loss_dice: 0.7189, decode.d4.loss_cls: 0.3535, decode.d4.loss_mask: 0.5400, decode.d4.loss_dice: 0.7201, decode.d5.loss_cls: 0.3548, decode.d5.loss_mask: 0.5376, decode.d5.loss_dice: 0.7143, decode.d6.loss_cls: 0.3541, decode.d6.loss_mask: 0.5356, decode.d6.loss_dice: 0.7130, decode.d7.loss_cls: 0.3461, decode.d7.loss_mask: 0.5361, decode.d7.loss_dice: 0.7135, decode.d8.loss_cls: 0.3461, decode.d8.loss_mask: 0.5366, decode.d8.loss_dice: 0.7082, loss: 18.6656 +2022-06-05 03:28:12,613 - mmseg - INFO - Iter [13050/40000] lr: 5.138e-06, eta: 3:38:51, time: 0.438, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3463, decode.loss_mask: 0.5487, decode.loss_dice: 0.7597, decode.d0.loss_cls: 2.4430, decode.d0.loss_mask: 0.5849, decode.d0.loss_dice: 0.9009, decode.d1.loss_cls: 0.5531, decode.d1.loss_mask: 0.5641, decode.d1.loss_dice: 0.8038, decode.d2.loss_cls: 0.4237, decode.d2.loss_mask: 0.5526, decode.d2.loss_dice: 0.7667, decode.d3.loss_cls: 0.3800, decode.d3.loss_mask: 0.5536, decode.d3.loss_dice: 0.7564, decode.d4.loss_cls: 0.3722, decode.d4.loss_mask: 0.5516, decode.d4.loss_dice: 0.7579, decode.d5.loss_cls: 0.3646, decode.d5.loss_mask: 0.5529, decode.d5.loss_dice: 0.7534, decode.d6.loss_cls: 0.3527, decode.d6.loss_mask: 0.5489, decode.d6.loss_dice: 0.7554, decode.d7.loss_cls: 0.3427, decode.d7.loss_mask: 0.5509, decode.d7.loss_dice: 0.7611, decode.d8.loss_cls: 0.3429, decode.d8.loss_mask: 0.5516, decode.d8.loss_dice: 0.7612, loss: 19.2575 +2022-06-05 03:28:34,827 - mmseg - INFO - Iter [13100/40000] lr: 5.128e-06, eta: 3:38:23, time: 0.444, data_time: 0.009, memory: 31652, decode.loss_cls: 0.3415, decode.loss_mask: 0.5470, decode.loss_dice: 0.7361, decode.d0.loss_cls: 2.4156, decode.d0.loss_mask: 0.5873, decode.d0.loss_dice: 0.8693, decode.d1.loss_cls: 0.5665, decode.d1.loss_mask: 0.5656, decode.d1.loss_dice: 0.7837, decode.d2.loss_cls: 0.4234, decode.d2.loss_mask: 0.5563, decode.d2.loss_dice: 0.7575, decode.d3.loss_cls: 0.3727, decode.d3.loss_mask: 0.5509, decode.d3.loss_dice: 0.7442, decode.d4.loss_cls: 0.3589, decode.d4.loss_mask: 0.5491, decode.d4.loss_dice: 0.7409, decode.d5.loss_cls: 0.3491, decode.d5.loss_mask: 0.5505, decode.d5.loss_dice: 0.7456, decode.d6.loss_cls: 0.3445, decode.d6.loss_mask: 0.5487, decode.d6.loss_dice: 0.7437, decode.d7.loss_cls: 0.3497, decode.d7.loss_mask: 0.5438, decode.d7.loss_dice: 0.7433, decode.d8.loss_cls: 0.3433, decode.d8.loss_mask: 0.5433, decode.d8.loss_dice: 0.7384, loss: 19.0105 +2022-06-05 03:28:59,834 - mmseg - INFO - Iter [13150/40000] lr: 5.119e-06, eta: 3:37:59, time: 0.498, data_time: 0.060, memory: 31652, decode.loss_cls: 0.3670, decode.loss_mask: 0.5275, decode.loss_dice: 0.7428, decode.d0.loss_cls: 2.4618, decode.d0.loss_mask: 0.5635, decode.d0.loss_dice: 0.8925, decode.d1.loss_cls: 0.5752, decode.d1.loss_mask: 0.5445, decode.d1.loss_dice: 0.7929, decode.d2.loss_cls: 0.4493, decode.d2.loss_mask: 0.5343, decode.d2.loss_dice: 0.7581, decode.d3.loss_cls: 0.4029, decode.d3.loss_mask: 0.5295, decode.d3.loss_dice: 0.7516, decode.d4.loss_cls: 0.3917, decode.d4.loss_mask: 0.5249, decode.d4.loss_dice: 0.7472, decode.d5.loss_cls: 0.3859, decode.d5.loss_mask: 0.5268, decode.d5.loss_dice: 0.7469, decode.d6.loss_cls: 0.3681, decode.d6.loss_mask: 0.5268, decode.d6.loss_dice: 0.7407, decode.d7.loss_cls: 0.3715, decode.d7.loss_mask: 0.5272, decode.d7.loss_dice: 0.7428, decode.d8.loss_cls: 0.3650, decode.d8.loss_mask: 0.5275, decode.d8.loss_dice: 0.7473, loss: 19.1338 +2022-06-05 03:29:21,709 - mmseg - INFO - Iter [13200/40000] lr: 5.109e-06, eta: 3:37:30, time: 0.439, data_time: 0.010, memory: 31652, decode.loss_cls: 0.3057, decode.loss_mask: 0.5174, decode.loss_dice: 0.7004, decode.d0.loss_cls: 2.3536, decode.d0.loss_mask: 0.5696, decode.d0.loss_dice: 0.8412, decode.d1.loss_cls: 0.4809, decode.d1.loss_mask: 0.5442, decode.d1.loss_dice: 0.7562, decode.d2.loss_cls: 0.3710, decode.d2.loss_mask: 0.5300, decode.d2.loss_dice: 0.7243, decode.d3.loss_cls: 0.3330, decode.d3.loss_mask: 0.5227, decode.d3.loss_dice: 0.7037, decode.d4.loss_cls: 0.3161, decode.d4.loss_mask: 0.5236, decode.d4.loss_dice: 0.7091, decode.d5.loss_cls: 0.3057, decode.d5.loss_mask: 0.5218, decode.d5.loss_dice: 0.7042, decode.d6.loss_cls: 0.3063, decode.d6.loss_mask: 0.5208, decode.d6.loss_dice: 0.7035, decode.d7.loss_cls: 0.3057, decode.d7.loss_mask: 0.5208, decode.d7.loss_dice: 0.7025, decode.d8.loss_cls: 0.3049, decode.d8.loss_mask: 0.5209, decode.d8.loss_dice: 0.7032, loss: 17.9230 +2022-06-05 03:29:43,280 - mmseg - INFO - Iter [13250/40000] lr: 5.100e-06, eta: 3:37:00, time: 0.431, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3607, decode.loss_mask: 0.5445, decode.loss_dice: 0.7721, decode.d0.loss_cls: 2.4409, decode.d0.loss_mask: 0.5837, decode.d0.loss_dice: 0.9164, decode.d1.loss_cls: 0.5778, decode.d1.loss_mask: 0.5635, decode.d1.loss_dice: 0.8087, decode.d2.loss_cls: 0.4365, decode.d2.loss_mask: 0.5465, decode.d2.loss_dice: 0.7850, decode.d3.loss_cls: 0.3941, decode.d3.loss_mask: 0.5420, decode.d3.loss_dice: 0.7740, decode.d4.loss_cls: 0.3867, decode.d4.loss_mask: 0.5425, decode.d4.loss_dice: 0.7763, decode.d5.loss_cls: 0.3756, decode.d5.loss_mask: 0.5426, decode.d5.loss_dice: 0.7720, decode.d6.loss_cls: 0.3657, decode.d6.loss_mask: 0.5399, decode.d6.loss_dice: 0.7673, decode.d7.loss_cls: 0.3616, decode.d7.loss_mask: 0.5424, decode.d7.loss_dice: 0.7683, decode.d8.loss_cls: 0.3607, decode.d8.loss_mask: 0.5438, decode.d8.loss_dice: 0.7704, loss: 19.4623 +2022-06-05 03:30:05,805 - mmseg - INFO - Iter [13300/40000] lr: 5.090e-06, eta: 3:36:32, time: 0.450, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3553, decode.loss_mask: 0.5355, decode.loss_dice: 0.7595, decode.d0.loss_cls: 2.3824, decode.d0.loss_mask: 0.5732, decode.d0.loss_dice: 0.8952, decode.d1.loss_cls: 0.5564, decode.d1.loss_mask: 0.5583, decode.d1.loss_dice: 0.8047, decode.d2.loss_cls: 0.4255, decode.d2.loss_mask: 0.5387, decode.d2.loss_dice: 0.7760, decode.d3.loss_cls: 0.3807, decode.d3.loss_mask: 0.5354, decode.d3.loss_dice: 0.7703, decode.d4.loss_cls: 0.3678, decode.d4.loss_mask: 0.5361, decode.d4.loss_dice: 0.7666, decode.d5.loss_cls: 0.3645, decode.d5.loss_mask: 0.5360, decode.d5.loss_dice: 0.7643, decode.d6.loss_cls: 0.3573, decode.d6.loss_mask: 0.5363, decode.d6.loss_dice: 0.7646, decode.d7.loss_cls: 0.3607, decode.d7.loss_mask: 0.5352, decode.d7.loss_dice: 0.7597, decode.d8.loss_cls: 0.3553, decode.d8.loss_mask: 0.5348, decode.d8.loss_dice: 0.7591, loss: 19.1457 +2022-06-05 03:30:27,834 - mmseg - INFO - Iter [13350/40000] lr: 5.081e-06, eta: 3:36:03, time: 0.441, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3742, decode.loss_mask: 0.5529, decode.loss_dice: 0.7575, decode.d0.loss_cls: 2.4439, decode.d0.loss_mask: 0.5865, decode.d0.loss_dice: 0.9058, decode.d1.loss_cls: 0.5944, decode.d1.loss_mask: 0.5704, decode.d1.loss_dice: 0.8035, decode.d2.loss_cls: 0.4518, decode.d2.loss_mask: 0.5543, decode.d2.loss_dice: 0.7689, decode.d3.loss_cls: 0.4109, decode.d3.loss_mask: 0.5545, decode.d3.loss_dice: 0.7620, decode.d4.loss_cls: 0.3978, decode.d4.loss_mask: 0.5516, decode.d4.loss_dice: 0.7595, decode.d5.loss_cls: 0.3864, decode.d5.loss_mask: 0.5506, decode.d5.loss_dice: 0.7598, decode.d6.loss_cls: 0.3833, decode.d6.loss_mask: 0.5522, decode.d6.loss_dice: 0.7550, decode.d7.loss_cls: 0.3779, decode.d7.loss_mask: 0.5505, decode.d7.loss_dice: 0.7627, decode.d8.loss_cls: 0.3761, decode.d8.loss_mask: 0.5528, decode.d8.loss_dice: 0.7585, loss: 19.5662 +2022-06-05 03:30:49,735 - mmseg - INFO - Iter [13400/40000] lr: 5.071e-06, eta: 3:35:34, time: 0.437, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3473, decode.loss_mask: 0.5326, decode.loss_dice: 0.7135, decode.d0.loss_cls: 2.3467, decode.d0.loss_mask: 0.5759, decode.d0.loss_dice: 0.8618, decode.d1.loss_cls: 0.5444, decode.d1.loss_mask: 0.5556, decode.d1.loss_dice: 0.7684, decode.d2.loss_cls: 0.4168, decode.d2.loss_mask: 0.5399, decode.d2.loss_dice: 0.7310, decode.d3.loss_cls: 0.3687, decode.d3.loss_mask: 0.5378, decode.d3.loss_dice: 0.7262, decode.d4.loss_cls: 0.3630, decode.d4.loss_mask: 0.5363, decode.d4.loss_dice: 0.7229, decode.d5.loss_cls: 0.3513, decode.d5.loss_mask: 0.5371, decode.d5.loss_dice: 0.7217, decode.d6.loss_cls: 0.3472, decode.d6.loss_mask: 0.5336, decode.d6.loss_dice: 0.7209, decode.d7.loss_cls: 0.3503, decode.d7.loss_mask: 0.5337, decode.d7.loss_dice: 0.7175, decode.d8.loss_cls: 0.3410, decode.d8.loss_mask: 0.5341, decode.d8.loss_dice: 0.7185, loss: 18.5955 +2022-06-05 03:31:14,145 - mmseg - INFO - Iter [13450/40000] lr: 5.062e-06, eta: 3:35:10, time: 0.489, data_time: 0.056, memory: 31652, decode.loss_cls: 0.3502, decode.loss_mask: 0.5236, decode.loss_dice: 0.7562, decode.d0.loss_cls: 2.4010, decode.d0.loss_mask: 0.5712, decode.d0.loss_dice: 0.9001, decode.d1.loss_cls: 0.5622, decode.d1.loss_mask: 0.5525, decode.d1.loss_dice: 0.8070, decode.d2.loss_cls: 0.4284, decode.d2.loss_mask: 0.5343, decode.d2.loss_dice: 0.7712, decode.d3.loss_cls: 0.3785, decode.d3.loss_mask: 0.5299, decode.d3.loss_dice: 0.7616, decode.d4.loss_cls: 0.3759, decode.d4.loss_mask: 0.5280, decode.d4.loss_dice: 0.7624, decode.d5.loss_cls: 0.3578, decode.d5.loss_mask: 0.5284, decode.d5.loss_dice: 0.7604, decode.d6.loss_cls: 0.3501, decode.d6.loss_mask: 0.5246, decode.d6.loss_dice: 0.7575, decode.d7.loss_cls: 0.3510, decode.d7.loss_mask: 0.5254, decode.d7.loss_dice: 0.7542, decode.d8.loss_cls: 0.3543, decode.d8.loss_mask: 0.5253, decode.d8.loss_dice: 0.7550, loss: 19.0383 +2022-06-05 03:31:36,126 - mmseg - INFO - Iter [13500/40000] lr: 5.052e-06, eta: 3:34:41, time: 0.439, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3584, decode.loss_mask: 0.5302, decode.loss_dice: 0.7497, decode.d0.loss_cls: 2.3886, decode.d0.loss_mask: 0.5634, decode.d0.loss_dice: 0.9073, decode.d1.loss_cls: 0.5569, decode.d1.loss_mask: 0.5455, decode.d1.loss_dice: 0.7951, decode.d2.loss_cls: 0.4243, decode.d2.loss_mask: 0.5325, decode.d2.loss_dice: 0.7661, decode.d3.loss_cls: 0.3851, decode.d3.loss_mask: 0.5345, decode.d3.loss_dice: 0.7604, decode.d4.loss_cls: 0.3709, decode.d4.loss_mask: 0.5339, decode.d4.loss_dice: 0.7640, decode.d5.loss_cls: 0.3682, decode.d5.loss_mask: 0.5301, decode.d5.loss_dice: 0.7549, decode.d6.loss_cls: 0.3557, decode.d6.loss_mask: 0.5338, decode.d6.loss_dice: 0.7515, decode.d7.loss_cls: 0.3557, decode.d7.loss_mask: 0.5309, decode.d7.loss_dice: 0.7554, decode.d8.loss_cls: 0.3498, decode.d8.loss_mask: 0.5309, decode.d8.loss_dice: 0.7521, loss: 19.0357 +2022-06-05 03:31:58,403 - mmseg - INFO - Iter [13550/40000] lr: 5.043e-06, eta: 3:34:13, time: 0.446, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3655, decode.loss_mask: 0.5309, decode.loss_dice: 0.7395, decode.d0.loss_cls: 2.3633, decode.d0.loss_mask: 0.5742, decode.d0.loss_dice: 0.8892, decode.d1.loss_cls: 0.5606, decode.d1.loss_mask: 0.5524, decode.d1.loss_dice: 0.7920, decode.d2.loss_cls: 0.4341, decode.d2.loss_mask: 0.5390, decode.d2.loss_dice: 0.7550, decode.d3.loss_cls: 0.4005, decode.d3.loss_mask: 0.5370, decode.d3.loss_dice: 0.7369, decode.d4.loss_cls: 0.3870, decode.d4.loss_mask: 0.5381, decode.d4.loss_dice: 0.7369, decode.d5.loss_cls: 0.3785, decode.d5.loss_mask: 0.5361, decode.d5.loss_dice: 0.7336, decode.d6.loss_cls: 0.3742, decode.d6.loss_mask: 0.5322, decode.d6.loss_dice: 0.7364, decode.d7.loss_cls: 0.3742, decode.d7.loss_mask: 0.5304, decode.d7.loss_dice: 0.7344, decode.d8.loss_cls: 0.3672, decode.d8.loss_mask: 0.5296, decode.d8.loss_dice: 0.7347, loss: 18.9935 +2022-06-05 03:32:20,792 - mmseg - INFO - Iter [13600/40000] lr: 5.033e-06, eta: 3:33:45, time: 0.448, data_time: 0.007, memory: 31652, decode.loss_cls: 0.3185, decode.loss_mask: 0.5408, decode.loss_dice: 0.7175, decode.d0.loss_cls: 2.3231, decode.d0.loss_mask: 0.5792, decode.d0.loss_dice: 0.8608, decode.d1.loss_cls: 0.5044, decode.d1.loss_mask: 0.5648, decode.d1.loss_dice: 0.7669, decode.d2.loss_cls: 0.3853, decode.d2.loss_mask: 0.5502, decode.d2.loss_dice: 0.7430, decode.d3.loss_cls: 0.3458, decode.d3.loss_mask: 0.5449, decode.d3.loss_dice: 0.7282, decode.d4.loss_cls: 0.3386, decode.d4.loss_mask: 0.5446, decode.d4.loss_dice: 0.7331, decode.d5.loss_cls: 0.3280, decode.d5.loss_mask: 0.5422, decode.d5.loss_dice: 0.7305, decode.d6.loss_cls: 0.3251, decode.d6.loss_mask: 0.5416, decode.d6.loss_dice: 0.7237, decode.d7.loss_cls: 0.3204, decode.d7.loss_mask: 0.5403, decode.d7.loss_dice: 0.7239, decode.d8.loss_cls: 0.3178, decode.d8.loss_mask: 0.5412, decode.d8.loss_dice: 0.7232, loss: 18.4477 +2022-06-05 03:32:42,864 - mmseg - INFO - Iter [13650/40000] lr: 5.024e-06, eta: 3:33:16, time: 0.441, data_time: 0.009, memory: 31652, decode.loss_cls: 0.3468, decode.loss_mask: 0.5479, decode.loss_dice: 0.7438, decode.d0.loss_cls: 2.3332, decode.d0.loss_mask: 0.5846, decode.d0.loss_dice: 0.8823, decode.d1.loss_cls: 0.5566, decode.d1.loss_mask: 0.5637, decode.d1.loss_dice: 0.7928, decode.d2.loss_cls: 0.4390, decode.d2.loss_mask: 0.5503, decode.d2.loss_dice: 0.7634, decode.d3.loss_cls: 0.3923, decode.d3.loss_mask: 0.5452, decode.d3.loss_dice: 0.7502, decode.d4.loss_cls: 0.3802, decode.d4.loss_mask: 0.5441, decode.d4.loss_dice: 0.7429, decode.d5.loss_cls: 0.3676, decode.d5.loss_mask: 0.5437, decode.d5.loss_dice: 0.7448, decode.d6.loss_cls: 0.3569, decode.d6.loss_mask: 0.5408, decode.d6.loss_dice: 0.7378, decode.d7.loss_cls: 0.3552, decode.d7.loss_mask: 0.5437, decode.d7.loss_dice: 0.7424, decode.d8.loss_cls: 0.3550, decode.d8.loss_mask: 0.5436, decode.d8.loss_dice: 0.7385, loss: 19.0292 +2022-06-05 03:33:04,873 - mmseg - INFO - Iter [13700/40000] lr: 5.014e-06, eta: 3:32:48, time: 0.441, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3437, decode.loss_mask: 0.5391, decode.loss_dice: 0.7176, decode.d0.loss_cls: 2.3043, decode.d0.loss_mask: 0.5791, decode.d0.loss_dice: 0.8619, decode.d1.loss_cls: 0.5404, decode.d1.loss_mask: 0.5602, decode.d1.loss_dice: 0.7682, decode.d2.loss_cls: 0.4074, decode.d2.loss_mask: 0.5467, decode.d2.loss_dice: 0.7347, decode.d3.loss_cls: 0.3690, decode.d3.loss_mask: 0.5452, decode.d3.loss_dice: 0.7173, decode.d4.loss_cls: 0.3574, decode.d4.loss_mask: 0.5460, decode.d4.loss_dice: 0.7190, decode.d5.loss_cls: 0.3461, decode.d5.loss_mask: 0.5466, decode.d5.loss_dice: 0.7162, decode.d6.loss_cls: 0.3445, decode.d6.loss_mask: 0.5399, decode.d6.loss_dice: 0.7197, decode.d7.loss_cls: 0.3511, decode.d7.loss_mask: 0.5386, decode.d7.loss_dice: 0.7175, decode.d8.loss_cls: 0.3355, decode.d8.loss_mask: 0.5436, decode.d8.loss_dice: 0.7215, loss: 18.5780 +2022-06-05 03:33:29,092 - mmseg - INFO - Iter [13750/40000] lr: 5.004e-06, eta: 3:32:23, time: 0.484, data_time: 0.059, memory: 31652, decode.loss_cls: 0.3204, decode.loss_mask: 0.5358, decode.loss_dice: 0.7314, decode.d0.loss_cls: 2.2938, decode.d0.loss_mask: 0.5794, decode.d0.loss_dice: 0.8703, decode.d1.loss_cls: 0.5070, decode.d1.loss_mask: 0.5557, decode.d1.loss_dice: 0.7832, decode.d2.loss_cls: 0.3798, decode.d2.loss_mask: 0.5399, decode.d2.loss_dice: 0.7544, decode.d3.loss_cls: 0.3572, decode.d3.loss_mask: 0.5370, decode.d3.loss_dice: 0.7409, decode.d4.loss_cls: 0.3461, decode.d4.loss_mask: 0.5351, decode.d4.loss_dice: 0.7449, decode.d5.loss_cls: 0.3344, decode.d5.loss_mask: 0.5338, decode.d5.loss_dice: 0.7398, decode.d6.loss_cls: 0.3307, decode.d6.loss_mask: 0.5343, decode.d6.loss_dice: 0.7381, decode.d7.loss_cls: 0.3221, decode.d7.loss_mask: 0.5362, decode.d7.loss_dice: 0.7363, decode.d8.loss_cls: 0.3189, decode.d8.loss_mask: 0.5351, decode.d8.loss_dice: 0.7372, loss: 18.5093 +2022-06-05 03:33:51,595 - mmseg - INFO - Iter [13800/40000] lr: 4.995e-06, eta: 3:31:56, time: 0.450, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2870, decode.loss_mask: 0.5145, decode.loss_dice: 0.7083, decode.d0.loss_cls: 2.2932, decode.d0.loss_mask: 0.5599, decode.d0.loss_dice: 0.8447, decode.d1.loss_cls: 0.4953, decode.d1.loss_mask: 0.5361, decode.d1.loss_dice: 0.7518, decode.d2.loss_cls: 0.3690, decode.d2.loss_mask: 0.5225, decode.d2.loss_dice: 0.7176, decode.d3.loss_cls: 0.3161, decode.d3.loss_mask: 0.5187, decode.d3.loss_dice: 0.7111, decode.d4.loss_cls: 0.3082, decode.d4.loss_mask: 0.5169, decode.d4.loss_dice: 0.7122, decode.d5.loss_cls: 0.2988, decode.d5.loss_mask: 0.5175, decode.d5.loss_dice: 0.7097, decode.d6.loss_cls: 0.2932, decode.d6.loss_mask: 0.5152, decode.d6.loss_dice: 0.7080, decode.d7.loss_cls: 0.2888, decode.d7.loss_mask: 0.5144, decode.d7.loss_dice: 0.7112, decode.d8.loss_cls: 0.2811, decode.d8.loss_mask: 0.5136, decode.d8.loss_dice: 0.7131, loss: 17.7479 +2022-06-05 03:34:13,891 - mmseg - INFO - Iter [13850/40000] lr: 4.985e-06, eta: 3:31:28, time: 0.446, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3178, decode.loss_mask: 0.5256, decode.loss_dice: 0.7087, decode.d0.loss_cls: 2.2955, decode.d0.loss_mask: 0.5694, decode.d0.loss_dice: 0.8434, decode.d1.loss_cls: 0.5196, decode.d1.loss_mask: 0.5443, decode.d1.loss_dice: 0.7462, decode.d2.loss_cls: 0.4050, decode.d2.loss_mask: 0.5345, decode.d2.loss_dice: 0.7150, decode.d3.loss_cls: 0.3567, decode.d3.loss_mask: 0.5281, decode.d3.loss_dice: 0.7081, decode.d4.loss_cls: 0.3457, decode.d4.loss_mask: 0.5288, decode.d4.loss_dice: 0.7154, decode.d5.loss_cls: 0.3363, decode.d5.loss_mask: 0.5260, decode.d5.loss_dice: 0.7056, decode.d6.loss_cls: 0.3279, decode.d6.loss_mask: 0.5258, decode.d6.loss_dice: 0.7016, decode.d7.loss_cls: 0.3194, decode.d7.loss_mask: 0.5249, decode.d7.loss_dice: 0.7076, decode.d8.loss_cls: 0.3253, decode.d8.loss_mask: 0.5273, decode.d8.loss_dice: 0.7050, loss: 18.1404 +2022-06-05 03:34:36,465 - mmseg - INFO - Iter [13900/40000] lr: 4.976e-06, eta: 3:31:00, time: 0.451, data_time: 0.009, memory: 31652, decode.loss_cls: 0.3036, decode.loss_mask: 0.5105, decode.loss_dice: 0.7162, decode.d0.loss_cls: 2.2792, decode.d0.loss_mask: 0.5461, decode.d0.loss_dice: 0.8606, decode.d1.loss_cls: 0.4985, decode.d1.loss_mask: 0.5342, decode.d1.loss_dice: 0.7668, decode.d2.loss_cls: 0.3837, decode.d2.loss_mask: 0.5217, decode.d2.loss_dice: 0.7363, decode.d3.loss_cls: 0.3320, decode.d3.loss_mask: 0.5171, decode.d3.loss_dice: 0.7258, decode.d4.loss_cls: 0.3221, decode.d4.loss_mask: 0.5148, decode.d4.loss_dice: 0.7264, decode.d5.loss_cls: 0.3108, decode.d5.loss_mask: 0.5128, decode.d5.loss_dice: 0.7271, decode.d6.loss_cls: 0.3040, decode.d6.loss_mask: 0.5147, decode.d6.loss_dice: 0.7216, decode.d7.loss_cls: 0.3051, decode.d7.loss_mask: 0.5121, decode.d7.loss_dice: 0.7204, decode.d8.loss_cls: 0.3052, decode.d8.loss_mask: 0.5099, decode.d8.loss_dice: 0.7168, loss: 17.9557 +2022-06-05 03:34:58,691 - mmseg - INFO - Iter [13950/40000] lr: 4.966e-06, eta: 3:30:32, time: 0.444, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3303, decode.loss_mask: 0.5512, decode.loss_dice: 0.7634, decode.d0.loss_cls: 2.3250, decode.d0.loss_mask: 0.5943, decode.d0.loss_dice: 0.9053, decode.d1.loss_cls: 0.5494, decode.d1.loss_mask: 0.5688, decode.d1.loss_dice: 0.8123, decode.d2.loss_cls: 0.4285, decode.d2.loss_mask: 0.5521, decode.d2.loss_dice: 0.7743, decode.d3.loss_cls: 0.3787, decode.d3.loss_mask: 0.5473, decode.d3.loss_dice: 0.7631, decode.d4.loss_cls: 0.3606, decode.d4.loss_mask: 0.5486, decode.d4.loss_dice: 0.7590, decode.d5.loss_cls: 0.3543, decode.d5.loss_mask: 0.5510, decode.d5.loss_dice: 0.7653, decode.d6.loss_cls: 0.3447, decode.d6.loss_mask: 0.5461, decode.d6.loss_dice: 0.7606, decode.d7.loss_cls: 0.3407, decode.d7.loss_mask: 0.5480, decode.d7.loss_dice: 0.7604, decode.d8.loss_cls: 0.3329, decode.d8.loss_mask: 0.5494, decode.d8.loss_dice: 0.7644, loss: 19.1298 +2022-06-05 03:35:20,806 - mmseg - INFO - Saving checkpoint at 14000 iterations +2022-06-05 03:35:23,187 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 03:35:23,187 - mmseg - INFO - Iter [14000/40000] lr: 4.957e-06, eta: 3:30:08, time: 0.490, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3689, decode.loss_mask: 0.5328, decode.loss_dice: 0.7507, decode.d0.loss_cls: 2.2856, decode.d0.loss_mask: 0.5743, decode.d0.loss_dice: 0.9058, decode.d1.loss_cls: 0.5800, decode.d1.loss_mask: 0.5559, decode.d1.loss_dice: 0.8052, decode.d2.loss_cls: 0.4454, decode.d2.loss_mask: 0.5460, decode.d2.loss_dice: 0.7722, decode.d3.loss_cls: 0.4062, decode.d3.loss_mask: 0.5395, decode.d3.loss_dice: 0.7560, decode.d4.loss_cls: 0.3925, decode.d4.loss_mask: 0.5386, decode.d4.loss_dice: 0.7551, decode.d5.loss_cls: 0.3908, decode.d5.loss_mask: 0.5336, decode.d5.loss_dice: 0.7551, decode.d6.loss_cls: 0.3789, decode.d6.loss_mask: 0.5360, decode.d6.loss_dice: 0.7496, decode.d7.loss_cls: 0.3792, decode.d7.loss_mask: 0.5307, decode.d7.loss_dice: 0.7508, decode.d8.loss_cls: 0.3738, decode.d8.loss_mask: 0.5293, decode.d8.loss_dice: 0.7506, loss: 19.1691 +2022-06-05 03:35:47,877 - mmseg - INFO - Iter [14050/40000] lr: 4.947e-06, eta: 3:29:45, time: 0.493, data_time: 0.055, memory: 31652, decode.loss_cls: 0.3083, decode.loss_mask: 0.5239, decode.loss_dice: 0.7147, decode.d0.loss_cls: 2.2272, decode.d0.loss_mask: 0.5650, decode.d0.loss_dice: 0.8526, decode.d1.loss_cls: 0.4956, decode.d1.loss_mask: 0.5473, decode.d1.loss_dice: 0.7706, decode.d2.loss_cls: 0.3684, decode.d2.loss_mask: 0.5317, decode.d2.loss_dice: 0.7375, decode.d3.loss_cls: 0.3379, decode.d3.loss_mask: 0.5313, decode.d3.loss_dice: 0.7202, decode.d4.loss_cls: 0.3246, decode.d4.loss_mask: 0.5272, decode.d4.loss_dice: 0.7192, decode.d5.loss_cls: 0.3165, decode.d5.loss_mask: 0.5275, decode.d5.loss_dice: 0.7153, decode.d6.loss_cls: 0.3052, decode.d6.loss_mask: 0.5259, decode.d6.loss_dice: 0.7163, decode.d7.loss_cls: 0.2975, decode.d7.loss_mask: 0.5265, decode.d7.loss_dice: 0.7182, decode.d8.loss_cls: 0.3008, decode.d8.loss_mask: 0.5261, decode.d8.loss_dice: 0.7206, loss: 17.9997 +2022-06-05 03:36:09,814 - mmseg - INFO - Iter [14100/40000] lr: 4.938e-06, eta: 3:29:17, time: 0.439, data_time: 0.009, memory: 31652, decode.loss_cls: 0.3218, decode.loss_mask: 0.5069, decode.loss_dice: 0.7076, decode.d0.loss_cls: 2.2416, decode.d0.loss_mask: 0.5542, decode.d0.loss_dice: 0.8463, decode.d1.loss_cls: 0.5065, decode.d1.loss_mask: 0.5361, decode.d1.loss_dice: 0.7533, decode.d2.loss_cls: 0.3798, decode.d2.loss_mask: 0.5205, decode.d2.loss_dice: 0.7230, decode.d3.loss_cls: 0.3384, decode.d3.loss_mask: 0.5129, decode.d3.loss_dice: 0.7160, decode.d4.loss_cls: 0.3353, decode.d4.loss_mask: 0.5123, decode.d4.loss_dice: 0.7177, decode.d5.loss_cls: 0.3271, decode.d5.loss_mask: 0.5108, decode.d5.loss_dice: 0.7121, decode.d6.loss_cls: 0.3200, decode.d6.loss_mask: 0.5105, decode.d6.loss_dice: 0.7107, decode.d7.loss_cls: 0.3197, decode.d7.loss_mask: 0.5089, decode.d7.loss_dice: 0.7052, decode.d8.loss_cls: 0.3151, decode.d8.loss_mask: 0.5067, decode.d8.loss_dice: 0.7029, loss: 17.8801 +2022-06-05 03:36:32,389 - mmseg - INFO - Iter [14150/40000] lr: 4.928e-06, eta: 3:28:49, time: 0.451, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3349, decode.loss_mask: 0.5354, decode.loss_dice: 0.7412, decode.d0.loss_cls: 2.2658, decode.d0.loss_mask: 0.5718, decode.d0.loss_dice: 0.8792, decode.d1.loss_cls: 0.5377, decode.d1.loss_mask: 0.5549, decode.d1.loss_dice: 0.7800, decode.d2.loss_cls: 0.4121, decode.d2.loss_mask: 0.5375, decode.d2.loss_dice: 0.7465, decode.d3.loss_cls: 0.3635, decode.d3.loss_mask: 0.5389, decode.d3.loss_dice: 0.7393, decode.d4.loss_cls: 0.3521, decode.d4.loss_mask: 0.5352, decode.d4.loss_dice: 0.7382, decode.d5.loss_cls: 0.3425, decode.d5.loss_mask: 0.5363, decode.d5.loss_dice: 0.7376, decode.d6.loss_cls: 0.3376, decode.d6.loss_mask: 0.5344, decode.d6.loss_dice: 0.7337, decode.d7.loss_cls: 0.3368, decode.d7.loss_mask: 0.5326, decode.d7.loss_dice: 0.7391, decode.d8.loss_cls: 0.3348, decode.d8.loss_mask: 0.5338, decode.d8.loss_dice: 0.7389, loss: 18.6025 +2022-06-05 03:36:54,736 - mmseg - INFO - Iter [14200/40000] lr: 4.919e-06, eta: 3:28:22, time: 0.447, data_time: 0.009, memory: 31652, decode.loss_cls: 0.3106, decode.loss_mask: 0.5172, decode.loss_dice: 0.7245, decode.d0.loss_cls: 2.2227, decode.d0.loss_mask: 0.5639, decode.d0.loss_dice: 0.8582, decode.d1.loss_cls: 0.5058, decode.d1.loss_mask: 0.5405, decode.d1.loss_dice: 0.7747, decode.d2.loss_cls: 0.3823, decode.d2.loss_mask: 0.5275, decode.d2.loss_dice: 0.7457, decode.d3.loss_cls: 0.3423, decode.d3.loss_mask: 0.5216, decode.d3.loss_dice: 0.7271, decode.d4.loss_cls: 0.3278, decode.d4.loss_mask: 0.5235, decode.d4.loss_dice: 0.7277, decode.d5.loss_cls: 0.3221, decode.d5.loss_mask: 0.5195, decode.d5.loss_dice: 0.7236, decode.d6.loss_cls: 0.3177, decode.d6.loss_mask: 0.5156, decode.d6.loss_dice: 0.7204, decode.d7.loss_cls: 0.3103, decode.d7.loss_mask: 0.5162, decode.d7.loss_dice: 0.7257, decode.d8.loss_cls: 0.3113, decode.d8.loss_mask: 0.5174, decode.d8.loss_dice: 0.7207, loss: 18.0641 +2022-06-05 03:37:17,256 - mmseg - INFO - Iter [14250/40000] lr: 4.909e-06, eta: 3:27:54, time: 0.451, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3234, decode.loss_mask: 0.5191, decode.loss_dice: 0.7426, decode.d0.loss_cls: 2.2246, decode.d0.loss_mask: 0.5666, decode.d0.loss_dice: 0.8799, decode.d1.loss_cls: 0.5348, decode.d1.loss_mask: 0.5436, decode.d1.loss_dice: 0.7955, decode.d2.loss_cls: 0.4136, decode.d2.loss_mask: 0.5279, decode.d2.loss_dice: 0.7609, decode.d3.loss_cls: 0.3688, decode.d3.loss_mask: 0.5267, decode.d3.loss_dice: 0.7525, decode.d4.loss_cls: 0.3544, decode.d4.loss_mask: 0.5234, decode.d4.loss_dice: 0.7517, decode.d5.loss_cls: 0.3450, decode.d5.loss_mask: 0.5259, decode.d5.loss_dice: 0.7454, decode.d6.loss_cls: 0.3349, decode.d6.loss_mask: 0.5239, decode.d6.loss_dice: 0.7378, decode.d7.loss_cls: 0.3359, decode.d7.loss_mask: 0.5232, decode.d7.loss_dice: 0.7432, decode.d8.loss_cls: 0.3335, decode.d8.loss_mask: 0.5209, decode.d8.loss_dice: 0.7453, loss: 18.5250 +2022-06-05 03:37:39,233 - mmseg - INFO - Iter [14300/40000] lr: 4.900e-06, eta: 3:27:26, time: 0.439, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3077, decode.loss_mask: 0.5016, decode.loss_dice: 0.7054, decode.d0.loss_cls: 2.2204, decode.d0.loss_mask: 0.5426, decode.d0.loss_dice: 0.8361, decode.d1.loss_cls: 0.5119, decode.d1.loss_mask: 0.5216, decode.d1.loss_dice: 0.7594, decode.d2.loss_cls: 0.3917, decode.d2.loss_mask: 0.5067, decode.d2.loss_dice: 0.7203, decode.d3.loss_cls: 0.3451, decode.d3.loss_mask: 0.5067, decode.d3.loss_dice: 0.7135, decode.d4.loss_cls: 0.3385, decode.d4.loss_mask: 0.5006, decode.d4.loss_dice: 0.7079, decode.d5.loss_cls: 0.3230, decode.d5.loss_mask: 0.5011, decode.d5.loss_dice: 0.7070, decode.d6.loss_cls: 0.3165, decode.d6.loss_mask: 0.5007, decode.d6.loss_dice: 0.7024, decode.d7.loss_cls: 0.3190, decode.d7.loss_mask: 0.4996, decode.d7.loss_dice: 0.7031, decode.d8.loss_cls: 0.3133, decode.d8.loss_mask: 0.5001, decode.d8.loss_dice: 0.7090, loss: 17.7326 +2022-06-05 03:38:00,723 - mmseg - INFO - Iter [14350/40000] lr: 4.890e-06, eta: 3:26:57, time: 0.430, data_time: 0.009, memory: 31652, decode.loss_cls: 0.3268, decode.loss_mask: 0.5380, decode.loss_dice: 0.7522, decode.d0.loss_cls: 2.2566, decode.d0.loss_mask: 0.5791, decode.d0.loss_dice: 0.8733, decode.d1.loss_cls: 0.5043, decode.d1.loss_mask: 0.5617, decode.d1.loss_dice: 0.8005, decode.d2.loss_cls: 0.3879, decode.d2.loss_mask: 0.5438, decode.d2.loss_dice: 0.7607, decode.d3.loss_cls: 0.3538, decode.d3.loss_mask: 0.5403, decode.d3.loss_dice: 0.7516, decode.d4.loss_cls: 0.3458, decode.d4.loss_mask: 0.5375, decode.d4.loss_dice: 0.7510, decode.d5.loss_cls: 0.3393, decode.d5.loss_mask: 0.5363, decode.d5.loss_dice: 0.7487, decode.d6.loss_cls: 0.3267, decode.d6.loss_mask: 0.5396, decode.d6.loss_dice: 0.7479, decode.d7.loss_cls: 0.3256, decode.d7.loss_mask: 0.5379, decode.d7.loss_dice: 0.7495, decode.d8.loss_cls: 0.3201, decode.d8.loss_mask: 0.5379, decode.d8.loss_dice: 0.7520, loss: 18.6265 +2022-06-05 03:38:24,701 - mmseg - INFO - Iter [14400/40000] lr: 4.881e-06, eta: 3:26:32, time: 0.480, data_time: 0.056, memory: 31652, decode.loss_cls: 0.3113, decode.loss_mask: 0.5022, decode.loss_dice: 0.7085, decode.d0.loss_cls: 2.1798, decode.d0.loss_mask: 0.5429, decode.d0.loss_dice: 0.8487, decode.d1.loss_cls: 0.5167, decode.d1.loss_mask: 0.5194, decode.d1.loss_dice: 0.7576, decode.d2.loss_cls: 0.3799, decode.d2.loss_mask: 0.5035, decode.d2.loss_dice: 0.7207, decode.d3.loss_cls: 0.3467, decode.d3.loss_mask: 0.5033, decode.d3.loss_dice: 0.7122, decode.d4.loss_cls: 0.3432, decode.d4.loss_mask: 0.5019, decode.d4.loss_dice: 0.7106, decode.d5.loss_cls: 0.3309, decode.d5.loss_mask: 0.5011, decode.d5.loss_dice: 0.7088, decode.d6.loss_cls: 0.3221, decode.d6.loss_mask: 0.5006, decode.d6.loss_dice: 0.7038, decode.d7.loss_cls: 0.3198, decode.d7.loss_mask: 0.5021, decode.d7.loss_dice: 0.7064, decode.d8.loss_cls: 0.3125, decode.d8.loss_mask: 0.5029, decode.d8.loss_dice: 0.7113, loss: 17.7318 +2022-06-05 03:38:46,342 - mmseg - INFO - Iter [14450/40000] lr: 4.871e-06, eta: 3:26:04, time: 0.433, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3055, decode.loss_mask: 0.5269, decode.loss_dice: 0.7414, decode.d0.loss_cls: 2.1890, decode.d0.loss_mask: 0.5725, decode.d0.loss_dice: 0.8729, decode.d1.loss_cls: 0.5199, decode.d1.loss_mask: 0.5573, decode.d1.loss_dice: 0.7901, decode.d2.loss_cls: 0.3919, decode.d2.loss_mask: 0.5406, decode.d2.loss_dice: 0.7586, decode.d3.loss_cls: 0.3464, decode.d3.loss_mask: 0.5353, decode.d3.loss_dice: 0.7443, decode.d4.loss_cls: 0.3369, decode.d4.loss_mask: 0.5332, decode.d4.loss_dice: 0.7428, decode.d5.loss_cls: 0.3246, decode.d5.loss_mask: 0.5333, decode.d5.loss_dice: 0.7406, decode.d6.loss_cls: 0.3203, decode.d6.loss_mask: 0.5299, decode.d6.loss_dice: 0.7372, decode.d7.loss_cls: 0.3124, decode.d7.loss_mask: 0.5293, decode.d7.loss_dice: 0.7385, decode.d8.loss_cls: 0.3069, decode.d8.loss_mask: 0.5311, decode.d8.loss_dice: 0.7393, loss: 18.3488 +2022-06-05 03:39:08,473 - mmseg - INFO - Iter [14500/40000] lr: 4.862e-06, eta: 3:25:36, time: 0.443, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2886, decode.loss_mask: 0.5041, decode.loss_dice: 0.6837, decode.d0.loss_cls: 2.1714, decode.d0.loss_mask: 0.5390, decode.d0.loss_dice: 0.8238, decode.d1.loss_cls: 0.5068, decode.d1.loss_mask: 0.5170, decode.d1.loss_dice: 0.7229, decode.d2.loss_cls: 0.3781, decode.d2.loss_mask: 0.5065, decode.d2.loss_dice: 0.6985, decode.d3.loss_cls: 0.3315, decode.d3.loss_mask: 0.5046, decode.d3.loss_dice: 0.6877, decode.d4.loss_cls: 0.3155, decode.d4.loss_mask: 0.5054, decode.d4.loss_dice: 0.6879, decode.d5.loss_cls: 0.3023, decode.d5.loss_mask: 0.5048, decode.d5.loss_dice: 0.6864, decode.d6.loss_cls: 0.3023, decode.d6.loss_mask: 0.5023, decode.d6.loss_dice: 0.6886, decode.d7.loss_cls: 0.2955, decode.d7.loss_mask: 0.5021, decode.d7.loss_dice: 0.6875, decode.d8.loss_cls: 0.2985, decode.d8.loss_mask: 0.5021, decode.d8.loss_dice: 0.6839, loss: 17.3290 +2022-06-05 03:39:30,275 - mmseg - INFO - Iter [14550/40000] lr: 4.852e-06, eta: 3:25:07, time: 0.436, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3362, decode.loss_mask: 0.5035, decode.loss_dice: 0.7349, decode.d0.loss_cls: 2.2447, decode.d0.loss_mask: 0.5477, decode.d0.loss_dice: 0.8842, decode.d1.loss_cls: 0.5444, decode.d1.loss_mask: 0.5303, decode.d1.loss_dice: 0.7891, decode.d2.loss_cls: 0.4243, decode.d2.loss_mask: 0.5114, decode.d2.loss_dice: 0.7512, decode.d3.loss_cls: 0.3686, decode.d3.loss_mask: 0.5075, decode.d3.loss_dice: 0.7438, decode.d4.loss_cls: 0.3496, decode.d4.loss_mask: 0.5034, decode.d4.loss_dice: 0.7417, decode.d5.loss_cls: 0.3572, decode.d5.loss_mask: 0.5030, decode.d5.loss_dice: 0.7395, decode.d6.loss_cls: 0.3493, decode.d6.loss_mask: 0.5053, decode.d6.loss_dice: 0.7308, decode.d7.loss_cls: 0.3455, decode.d7.loss_mask: 0.5030, decode.d7.loss_dice: 0.7323, decode.d8.loss_cls: 0.3340, decode.d8.loss_mask: 0.5029, decode.d8.loss_dice: 0.7375, loss: 18.3568 +2022-06-05 03:39:52,254 - mmseg - INFO - Iter [14600/40000] lr: 4.842e-06, eta: 3:24:39, time: 0.440, data_time: 0.007, memory: 31652, decode.loss_cls: 0.3329, decode.loss_mask: 0.5193, decode.loss_dice: 0.7192, decode.d0.loss_cls: 2.1710, decode.d0.loss_mask: 0.5581, decode.d0.loss_dice: 0.8590, decode.d1.loss_cls: 0.4973, decode.d1.loss_mask: 0.5422, decode.d1.loss_dice: 0.7669, decode.d2.loss_cls: 0.3859, decode.d2.loss_mask: 0.5295, decode.d2.loss_dice: 0.7388, decode.d3.loss_cls: 0.3489, decode.d3.loss_mask: 0.5229, decode.d3.loss_dice: 0.7248, decode.d4.loss_cls: 0.3400, decode.d4.loss_mask: 0.5202, decode.d4.loss_dice: 0.7276, decode.d5.loss_cls: 0.3407, decode.d5.loss_mask: 0.5197, decode.d5.loss_dice: 0.7240, decode.d6.loss_cls: 0.3319, decode.d6.loss_mask: 0.5199, decode.d6.loss_dice: 0.7186, decode.d7.loss_cls: 0.3359, decode.d7.loss_mask: 0.5180, decode.d7.loss_dice: 0.7201, decode.d8.loss_cls: 0.3293, decode.d8.loss_mask: 0.5187, decode.d8.loss_dice: 0.7235, loss: 18.1047 +2022-06-05 03:40:13,794 - mmseg - INFO - Iter [14650/40000] lr: 4.833e-06, eta: 3:24:11, time: 0.430, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3137, decode.loss_mask: 0.5209, decode.loss_dice: 0.7170, decode.d0.loss_cls: 2.2182, decode.d0.loss_mask: 0.5512, decode.d0.loss_dice: 0.8514, decode.d1.loss_cls: 0.5081, decode.d1.loss_mask: 0.5416, decode.d1.loss_dice: 0.7662, decode.d2.loss_cls: 0.3814, decode.d2.loss_mask: 0.5349, decode.d2.loss_dice: 0.7402, decode.d3.loss_cls: 0.3444, decode.d3.loss_mask: 0.5267, decode.d3.loss_dice: 0.7286, decode.d4.loss_cls: 0.3286, decode.d4.loss_mask: 0.5260, decode.d4.loss_dice: 0.7300, decode.d5.loss_cls: 0.3186, decode.d5.loss_mask: 0.5239, decode.d5.loss_dice: 0.7196, decode.d6.loss_cls: 0.3095, decode.d6.loss_mask: 0.5231, decode.d6.loss_dice: 0.7162, decode.d7.loss_cls: 0.3060, decode.d7.loss_mask: 0.5216, decode.d7.loss_dice: 0.7184, decode.d8.loss_cls: 0.3095, decode.d8.loss_mask: 0.5210, decode.d8.loss_dice: 0.7167, loss: 18.0331 +2022-06-05 03:40:38,055 - mmseg - INFO - Iter [14700/40000] lr: 4.823e-06, eta: 3:23:47, time: 0.485, data_time: 0.057, memory: 31652, decode.loss_cls: 0.2978, decode.loss_mask: 0.5029, decode.loss_dice: 0.7236, decode.d0.loss_cls: 2.1518, decode.d0.loss_mask: 0.5431, decode.d0.loss_dice: 0.8604, decode.d1.loss_cls: 0.4971, decode.d1.loss_mask: 0.5210, decode.d1.loss_dice: 0.7714, decode.d2.loss_cls: 0.3777, decode.d2.loss_mask: 0.5083, decode.d2.loss_dice: 0.7417, decode.d3.loss_cls: 0.3350, decode.d3.loss_mask: 0.5039, decode.d3.loss_dice: 0.7303, decode.d4.loss_cls: 0.3161, decode.d4.loss_mask: 0.5055, decode.d4.loss_dice: 0.7341, decode.d5.loss_cls: 0.3169, decode.d5.loss_mask: 0.5022, decode.d5.loss_dice: 0.7305, decode.d6.loss_cls: 0.3010, decode.d6.loss_mask: 0.5032, decode.d6.loss_dice: 0.7253, decode.d7.loss_cls: 0.3043, decode.d7.loss_mask: 0.5011, decode.d7.loss_dice: 0.7281, decode.d8.loss_cls: 0.3023, decode.d8.loss_mask: 0.4994, decode.d8.loss_dice: 0.7263, loss: 17.7623 +2022-06-05 03:40:59,689 - mmseg - INFO - Iter [14750/40000] lr: 4.814e-06, eta: 3:23:18, time: 0.433, data_time: 0.009, memory: 31652, decode.loss_cls: 0.2969, decode.loss_mask: 0.5067, decode.loss_dice: 0.6737, decode.d0.loss_cls: 2.1202, decode.d0.loss_mask: 0.5523, decode.d0.loss_dice: 0.8152, decode.d1.loss_cls: 0.4785, decode.d1.loss_mask: 0.5317, decode.d1.loss_dice: 0.7279, decode.d2.loss_cls: 0.3717, decode.d2.loss_mask: 0.5172, decode.d2.loss_dice: 0.6950, decode.d3.loss_cls: 0.3229, decode.d3.loss_mask: 0.5114, decode.d3.loss_dice: 0.6849, decode.d4.loss_cls: 0.3144, decode.d4.loss_mask: 0.5113, decode.d4.loss_dice: 0.6841, decode.d5.loss_cls: 0.3039, decode.d5.loss_mask: 0.5097, decode.d5.loss_dice: 0.6833, decode.d6.loss_cls: 0.2909, decode.d6.loss_mask: 0.5093, decode.d6.loss_dice: 0.6811, decode.d7.loss_cls: 0.2926, decode.d7.loss_mask: 0.5076, decode.d7.loss_dice: 0.6826, decode.d8.loss_cls: 0.2925, decode.d8.loss_mask: 0.5075, decode.d8.loss_dice: 0.6800, loss: 17.2572 +2022-06-05 03:41:21,393 - mmseg - INFO - Iter [14800/40000] lr: 4.804e-06, eta: 3:22:50, time: 0.434, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3062, decode.loss_mask: 0.5248, decode.loss_dice: 0.7121, decode.d0.loss_cls: 2.1864, decode.d0.loss_mask: 0.5638, decode.d0.loss_dice: 0.8462, decode.d1.loss_cls: 0.5092, decode.d1.loss_mask: 0.5477, decode.d1.loss_dice: 0.7604, decode.d2.loss_cls: 0.3796, decode.d2.loss_mask: 0.5335, decode.d2.loss_dice: 0.7276, decode.d3.loss_cls: 0.3355, decode.d3.loss_mask: 0.5284, decode.d3.loss_dice: 0.7184, decode.d4.loss_cls: 0.3282, decode.d4.loss_mask: 0.5282, decode.d4.loss_dice: 0.7192, decode.d5.loss_cls: 0.3174, decode.d5.loss_mask: 0.5296, decode.d5.loss_dice: 0.7141, decode.d6.loss_cls: 0.3115, decode.d6.loss_mask: 0.5264, decode.d6.loss_dice: 0.7139, decode.d7.loss_cls: 0.3042, decode.d7.loss_mask: 0.5292, decode.d7.loss_dice: 0.7131, decode.d8.loss_cls: 0.3001, decode.d8.loss_mask: 0.5252, decode.d8.loss_dice: 0.7151, loss: 17.9554 +2022-06-05 03:41:43,025 - mmseg - INFO - Iter [14850/40000] lr: 4.795e-06, eta: 3:22:21, time: 0.433, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3097, decode.loss_mask: 0.5169, decode.loss_dice: 0.7110, decode.d0.loss_cls: 2.1798, decode.d0.loss_mask: 0.5523, decode.d0.loss_dice: 0.8449, decode.d1.loss_cls: 0.5201, decode.d1.loss_mask: 0.5384, decode.d1.loss_dice: 0.7571, decode.d2.loss_cls: 0.3815, decode.d2.loss_mask: 0.5251, decode.d2.loss_dice: 0.7273, decode.d3.loss_cls: 0.3325, decode.d3.loss_mask: 0.5208, decode.d3.loss_dice: 0.7164, decode.d4.loss_cls: 0.3261, decode.d4.loss_mask: 0.5234, decode.d4.loss_dice: 0.7158, decode.d5.loss_cls: 0.3230, decode.d5.loss_mask: 0.5179, decode.d5.loss_dice: 0.7136, decode.d6.loss_cls: 0.3194, decode.d6.loss_mask: 0.5166, decode.d6.loss_dice: 0.7109, decode.d7.loss_cls: 0.3093, decode.d7.loss_mask: 0.5175, decode.d7.loss_dice: 0.7121, decode.d8.loss_cls: 0.3105, decode.d8.loss_mask: 0.5165, decode.d8.loss_dice: 0.7141, loss: 17.8806 +2022-06-05 03:42:04,500 - mmseg - INFO - Iter [14900/40000] lr: 4.785e-06, eta: 3:21:53, time: 0.430, data_time: 0.007, memory: 31652, decode.loss_cls: 0.3496, decode.loss_mask: 0.5089, decode.loss_dice: 0.7348, decode.d0.loss_cls: 2.1903, decode.d0.loss_mask: 0.5585, decode.d0.loss_dice: 0.8853, decode.d1.loss_cls: 0.5269, decode.d1.loss_mask: 0.5321, decode.d1.loss_dice: 0.7895, decode.d2.loss_cls: 0.4130, decode.d2.loss_mask: 0.5183, decode.d2.loss_dice: 0.7511, decode.d3.loss_cls: 0.3844, decode.d3.loss_mask: 0.5120, decode.d3.loss_dice: 0.7391, decode.d4.loss_cls: 0.3640, decode.d4.loss_mask: 0.5123, decode.d4.loss_dice: 0.7440, decode.d5.loss_cls: 0.3582, decode.d5.loss_mask: 0.5100, decode.d5.loss_dice: 0.7408, decode.d6.loss_cls: 0.3465, decode.d6.loss_mask: 0.5104, decode.d6.loss_dice: 0.7362, decode.d7.loss_cls: 0.3470, decode.d7.loss_mask: 0.5099, decode.d7.loss_dice: 0.7357, decode.d8.loss_cls: 0.3426, decode.d8.loss_mask: 0.5119, decode.d8.loss_dice: 0.7355, loss: 18.3987 +2022-06-05 03:42:26,320 - mmseg - INFO - Iter [14950/40000] lr: 4.776e-06, eta: 3:21:25, time: 0.436, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3003, decode.loss_mask: 0.5055, decode.loss_dice: 0.7254, decode.d0.loss_cls: 2.1372, decode.d0.loss_mask: 0.5436, decode.d0.loss_dice: 0.8590, decode.d1.loss_cls: 0.5055, decode.d1.loss_mask: 0.5266, decode.d1.loss_dice: 0.7754, decode.d2.loss_cls: 0.3807, decode.d2.loss_mask: 0.5125, decode.d2.loss_dice: 0.7392, decode.d3.loss_cls: 0.3356, decode.d3.loss_mask: 0.5087, decode.d3.loss_dice: 0.7243, decode.d4.loss_cls: 0.3142, decode.d4.loss_mask: 0.5097, decode.d4.loss_dice: 0.7314, decode.d5.loss_cls: 0.3132, decode.d5.loss_mask: 0.5067, decode.d5.loss_dice: 0.7299, decode.d6.loss_cls: 0.3048, decode.d6.loss_mask: 0.5047, decode.d6.loss_dice: 0.7219, decode.d7.loss_cls: 0.3055, decode.d7.loss_mask: 0.5069, decode.d7.loss_dice: 0.7270, decode.d8.loss_cls: 0.2993, decode.d8.loss_mask: 0.5051, decode.d8.loss_dice: 0.7246, loss: 17.7843 +2022-06-05 03:42:50,295 - mmseg - INFO - Saving checkpoint at 15000 iterations +2022-06-05 03:42:52,610 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 03:42:52,610 - mmseg - INFO - Iter [15000/40000] lr: 4.766e-06, eta: 3:21:04, time: 0.526, data_time: 0.059, memory: 31652, decode.loss_cls: 0.3176, decode.loss_mask: 0.4879, decode.loss_dice: 0.7053, decode.d0.loss_cls: 2.1690, decode.d0.loss_mask: 0.5309, decode.d0.loss_dice: 0.8386, decode.d1.loss_cls: 0.5080, decode.d1.loss_mask: 0.5112, decode.d1.loss_dice: 0.7528, decode.d2.loss_cls: 0.3833, decode.d2.loss_mask: 0.4965, decode.d2.loss_dice: 0.7235, decode.d3.loss_cls: 0.3381, decode.d3.loss_mask: 0.4899, decode.d3.loss_dice: 0.7155, decode.d4.loss_cls: 0.3337, decode.d4.loss_mask: 0.4879, decode.d4.loss_dice: 0.7142, decode.d5.loss_cls: 0.3137, decode.d5.loss_mask: 0.4886, decode.d5.loss_dice: 0.7134, decode.d6.loss_cls: 0.3146, decode.d6.loss_mask: 0.4884, decode.d6.loss_dice: 0.7049, decode.d7.loss_cls: 0.3120, decode.d7.loss_mask: 0.4894, decode.d7.loss_dice: 0.7072, decode.d8.loss_cls: 0.3156, decode.d8.loss_mask: 0.4861, decode.d8.loss_dice: 0.7050, loss: 17.5428 +2022-06-05 03:43:14,415 - mmseg - INFO - Iter [15050/40000] lr: 4.757e-06, eta: 3:20:36, time: 0.436, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2873, decode.loss_mask: 0.5129, decode.loss_dice: 0.7015, decode.d0.loss_cls: 2.1388, decode.d0.loss_mask: 0.5477, decode.d0.loss_dice: 0.8427, decode.d1.loss_cls: 0.4811, decode.d1.loss_mask: 0.5345, decode.d1.loss_dice: 0.7514, decode.d2.loss_cls: 0.3576, decode.d2.loss_mask: 0.5216, decode.d2.loss_dice: 0.7212, decode.d3.loss_cls: 0.3111, decode.d3.loss_mask: 0.5201, decode.d3.loss_dice: 0.7102, decode.d4.loss_cls: 0.3080, decode.d4.loss_mask: 0.5182, decode.d4.loss_dice: 0.7064, decode.d5.loss_cls: 0.2934, decode.d5.loss_mask: 0.5179, decode.d5.loss_dice: 0.7056, decode.d6.loss_cls: 0.2875, decode.d6.loss_mask: 0.5172, decode.d6.loss_dice: 0.7028, decode.d7.loss_cls: 0.2909, decode.d7.loss_mask: 0.5160, decode.d7.loss_dice: 0.7036, decode.d8.loss_cls: 0.2850, decode.d8.loss_mask: 0.5150, decode.d8.loss_dice: 0.7028, loss: 17.5100 +2022-06-05 03:43:36,081 - mmseg - INFO - Iter [15100/40000] lr: 4.747e-06, eta: 3:20:08, time: 0.433, data_time: 0.007, memory: 31652, decode.loss_cls: 0.3041, decode.loss_mask: 0.4922, decode.loss_dice: 0.6963, decode.d0.loss_cls: 2.1475, decode.d0.loss_mask: 0.5430, decode.d0.loss_dice: 0.8374, decode.d1.loss_cls: 0.4957, decode.d1.loss_mask: 0.5136, decode.d1.loss_dice: 0.7390, decode.d2.loss_cls: 0.3756, decode.d2.loss_mask: 0.5001, decode.d2.loss_dice: 0.7103, decode.d3.loss_cls: 0.3368, decode.d3.loss_mask: 0.4988, decode.d3.loss_dice: 0.7040, decode.d4.loss_cls: 0.3246, decode.d4.loss_mask: 0.4945, decode.d4.loss_dice: 0.7017, decode.d5.loss_cls: 0.3147, decode.d5.loss_mask: 0.4966, decode.d5.loss_dice: 0.7004, decode.d6.loss_cls: 0.3125, decode.d6.loss_mask: 0.4958, decode.d6.loss_dice: 0.6974, decode.d7.loss_cls: 0.3108, decode.d7.loss_mask: 0.4922, decode.d7.loss_dice: 0.6999, decode.d8.loss_cls: 0.3021, decode.d8.loss_mask: 0.4922, decode.d8.loss_dice: 0.6999, loss: 17.4297 +2022-06-05 03:43:57,778 - mmseg - INFO - Iter [15150/40000] lr: 4.738e-06, eta: 3:19:40, time: 0.434, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2912, decode.loss_mask: 0.5284, decode.loss_dice: 0.7123, decode.d0.loss_cls: 2.0976, decode.d0.loss_mask: 0.5665, decode.d0.loss_dice: 0.8305, decode.d1.loss_cls: 0.4841, decode.d1.loss_mask: 0.5479, decode.d1.loss_dice: 0.7495, decode.d2.loss_cls: 0.3628, decode.d2.loss_mask: 0.5353, decode.d2.loss_dice: 0.7241, decode.d3.loss_cls: 0.3326, decode.d3.loss_mask: 0.5288, decode.d3.loss_dice: 0.7113, decode.d4.loss_cls: 0.3225, decode.d4.loss_mask: 0.5260, decode.d4.loss_dice: 0.7104, decode.d5.loss_cls: 0.3130, decode.d5.loss_mask: 0.5245, decode.d5.loss_dice: 0.7107, decode.d6.loss_cls: 0.2988, decode.d6.loss_mask: 0.5255, decode.d6.loss_dice: 0.7115, decode.d7.loss_cls: 0.2971, decode.d7.loss_mask: 0.5261, decode.d7.loss_dice: 0.7082, decode.d8.loss_cls: 0.2980, decode.d8.loss_mask: 0.5297, decode.d8.loss_dice: 0.7109, loss: 17.7155 +2022-06-05 03:44:19,408 - mmseg - INFO - Iter [15200/40000] lr: 4.728e-06, eta: 3:19:12, time: 0.432, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2907, decode.loss_mask: 0.5074, decode.loss_dice: 0.7279, decode.d0.loss_cls: 2.0756, decode.d0.loss_mask: 0.5467, decode.d0.loss_dice: 0.8629, decode.d1.loss_cls: 0.4634, decode.d1.loss_mask: 0.5273, decode.d1.loss_dice: 0.7765, decode.d2.loss_cls: 0.3557, decode.d2.loss_mask: 0.5113, decode.d2.loss_dice: 0.7426, decode.d3.loss_cls: 0.3211, decode.d3.loss_mask: 0.5106, decode.d3.loss_dice: 0.7267, decode.d4.loss_cls: 0.3098, decode.d4.loss_mask: 0.5078, decode.d4.loss_dice: 0.7286, decode.d5.loss_cls: 0.3018, decode.d5.loss_mask: 0.5083, decode.d5.loss_dice: 0.7264, decode.d6.loss_cls: 0.3004, decode.d6.loss_mask: 0.5083, decode.d6.loss_dice: 0.7260, decode.d7.loss_cls: 0.2958, decode.d7.loss_mask: 0.5076, decode.d7.loss_dice: 0.7243, decode.d8.loss_cls: 0.2877, decode.d8.loss_mask: 0.5074, decode.d8.loss_dice: 0.7219, loss: 17.6084 +2022-06-05 03:44:41,064 - mmseg - INFO - Iter [15250/40000] lr: 4.719e-06, eta: 3:18:44, time: 0.433, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3353, decode.loss_mask: 0.5090, decode.loss_dice: 0.7291, decode.d0.loss_cls: 2.1770, decode.d0.loss_mask: 0.5494, decode.d0.loss_dice: 0.8655, decode.d1.loss_cls: 0.5524, decode.d1.loss_mask: 0.5281, decode.d1.loss_dice: 0.7753, decode.d2.loss_cls: 0.4181, decode.d2.loss_mask: 0.5156, decode.d2.loss_dice: 0.7431, decode.d3.loss_cls: 0.3704, decode.d3.loss_mask: 0.5125, decode.d3.loss_dice: 0.7332, decode.d4.loss_cls: 0.3534, decode.d4.loss_mask: 0.5125, decode.d4.loss_dice: 0.7290, decode.d5.loss_cls: 0.3457, decode.d5.loss_mask: 0.5102, decode.d5.loss_dice: 0.7277, decode.d6.loss_cls: 0.3360, decode.d6.loss_mask: 0.5106, decode.d6.loss_dice: 0.7262, decode.d7.loss_cls: 0.3275, decode.d7.loss_mask: 0.5106, decode.d7.loss_dice: 0.7261, decode.d8.loss_cls: 0.3415, decode.d8.loss_mask: 0.5085, decode.d8.loss_dice: 0.7261, loss: 18.2057 +2022-06-05 03:45:05,825 - mmseg - INFO - Iter [15300/40000] lr: 4.709e-06, eta: 3:18:21, time: 0.496, data_time: 0.062, memory: 31652, decode.loss_cls: 0.2948, decode.loss_mask: 0.4986, decode.loss_dice: 0.6850, decode.d0.loss_cls: 2.1323, decode.d0.loss_mask: 0.5432, decode.d0.loss_dice: 0.8115, decode.d1.loss_cls: 0.4725, decode.d1.loss_mask: 0.5214, decode.d1.loss_dice: 0.7350, decode.d2.loss_cls: 0.3614, decode.d2.loss_mask: 0.5080, decode.d2.loss_dice: 0.7026, decode.d3.loss_cls: 0.3270, decode.d3.loss_mask: 0.5038, decode.d3.loss_dice: 0.6952, decode.d4.loss_cls: 0.3137, decode.d4.loss_mask: 0.5019, decode.d4.loss_dice: 0.6982, decode.d5.loss_cls: 0.3082, decode.d5.loss_mask: 0.5006, decode.d5.loss_dice: 0.6962, decode.d6.loss_cls: 0.2957, decode.d6.loss_mask: 0.5008, decode.d6.loss_dice: 0.6895, decode.d7.loss_cls: 0.2904, decode.d7.loss_mask: 0.4994, decode.d7.loss_dice: 0.6944, decode.d8.loss_cls: 0.2903, decode.d8.loss_mask: 0.5007, decode.d8.loss_dice: 0.6925, loss: 17.2653 +2022-06-05 03:45:28,223 - mmseg - INFO - Iter [15350/40000] lr: 4.699e-06, eta: 3:17:54, time: 0.447, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3237, decode.loss_mask: 0.4919, decode.loss_dice: 0.6986, decode.d0.loss_cls: 2.1377, decode.d0.loss_mask: 0.5364, decode.d0.loss_dice: 0.8429, decode.d1.loss_cls: 0.5260, decode.d1.loss_mask: 0.5142, decode.d1.loss_dice: 0.7457, decode.d2.loss_cls: 0.3952, decode.d2.loss_mask: 0.4996, decode.d2.loss_dice: 0.7210, decode.d3.loss_cls: 0.3569, decode.d3.loss_mask: 0.4954, decode.d3.loss_dice: 0.7089, decode.d4.loss_cls: 0.3402, decode.d4.loss_mask: 0.4951, decode.d4.loss_dice: 0.7079, decode.d5.loss_cls: 0.3335, decode.d5.loss_mask: 0.4973, decode.d5.loss_dice: 0.7068, decode.d6.loss_cls: 0.3264, decode.d6.loss_mask: 0.4961, decode.d6.loss_dice: 0.7016, decode.d7.loss_cls: 0.3247, decode.d7.loss_mask: 0.4922, decode.d7.loss_dice: 0.7042, decode.d8.loss_cls: 0.3222, decode.d8.loss_mask: 0.4927, decode.d8.loss_dice: 0.6993, loss: 17.6343 +2022-06-05 03:45:49,860 - mmseg - INFO - Iter [15400/40000] lr: 4.690e-06, eta: 3:17:26, time: 0.433, data_time: 0.010, memory: 31652, decode.loss_cls: 0.2877, decode.loss_mask: 0.4921, decode.loss_dice: 0.7155, decode.d0.loss_cls: 2.1102, decode.d0.loss_mask: 0.5308, decode.d0.loss_dice: 0.8445, decode.d1.loss_cls: 0.4918, decode.d1.loss_mask: 0.5116, decode.d1.loss_dice: 0.7619, decode.d2.loss_cls: 0.3664, decode.d2.loss_mask: 0.4992, decode.d2.loss_dice: 0.7311, decode.d3.loss_cls: 0.3203, decode.d3.loss_mask: 0.4952, decode.d3.loss_dice: 0.7222, decode.d4.loss_cls: 0.3115, decode.d4.loss_mask: 0.4963, decode.d4.loss_dice: 0.7203, decode.d5.loss_cls: 0.3036, decode.d5.loss_mask: 0.4941, decode.d5.loss_dice: 0.7187, decode.d6.loss_cls: 0.2998, decode.d6.loss_mask: 0.4942, decode.d6.loss_dice: 0.7079, decode.d7.loss_cls: 0.2904, decode.d7.loss_mask: 0.4921, decode.d7.loss_dice: 0.7173, decode.d8.loss_cls: 0.2884, decode.d8.loss_mask: 0.4939, decode.d8.loss_dice: 0.7158, loss: 17.4248 +2022-06-05 03:46:12,211 - mmseg - INFO - Iter [15450/40000] lr: 4.680e-06, eta: 3:16:59, time: 0.447, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2873, decode.loss_mask: 0.5059, decode.loss_dice: 0.6862, decode.d0.loss_cls: 2.0817, decode.d0.loss_mask: 0.5558, decode.d0.loss_dice: 0.8304, decode.d1.loss_cls: 0.4865, decode.d1.loss_mask: 0.5265, decode.d1.loss_dice: 0.7399, decode.d2.loss_cls: 0.3652, decode.d2.loss_mask: 0.5159, decode.d2.loss_dice: 0.7084, decode.d3.loss_cls: 0.3300, decode.d3.loss_mask: 0.5089, decode.d3.loss_dice: 0.6841, decode.d4.loss_cls: 0.3137, decode.d4.loss_mask: 0.5099, decode.d4.loss_dice: 0.6900, decode.d5.loss_cls: 0.3080, decode.d5.loss_mask: 0.5073, decode.d5.loss_dice: 0.6898, decode.d6.loss_cls: 0.3010, decode.d6.loss_mask: 0.5072, decode.d6.loss_dice: 0.6824, decode.d7.loss_cls: 0.2931, decode.d7.loss_mask: 0.5074, decode.d7.loss_dice: 0.6877, decode.d8.loss_cls: 0.2948, decode.d8.loss_mask: 0.5066, decode.d8.loss_dice: 0.6873, loss: 17.2988 +2022-06-05 03:46:34,731 - mmseg - INFO - Iter [15500/40000] lr: 4.671e-06, eta: 3:16:33, time: 0.450, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3093, decode.loss_mask: 0.5285, decode.loss_dice: 0.7057, decode.d0.loss_cls: 2.0688, decode.d0.loss_mask: 0.5697, decode.d0.loss_dice: 0.8343, decode.d1.loss_cls: 0.4799, decode.d1.loss_mask: 0.5460, decode.d1.loss_dice: 0.7512, decode.d2.loss_cls: 0.3561, decode.d2.loss_mask: 0.5349, decode.d2.loss_dice: 0.7248, decode.d3.loss_cls: 0.3344, decode.d3.loss_mask: 0.5301, decode.d3.loss_dice: 0.7117, decode.d4.loss_cls: 0.3247, decode.d4.loss_mask: 0.5292, decode.d4.loss_dice: 0.7078, decode.d5.loss_cls: 0.3220, decode.d5.loss_mask: 0.5290, decode.d5.loss_dice: 0.7066, decode.d6.loss_cls: 0.3141, decode.d6.loss_mask: 0.5282, decode.d6.loss_dice: 0.7048, decode.d7.loss_cls: 0.3102, decode.d7.loss_mask: 0.5263, decode.d7.loss_dice: 0.7078, decode.d8.loss_cls: 0.3075, decode.d8.loss_mask: 0.5287, decode.d8.loss_dice: 0.7086, loss: 17.7409 +2022-06-05 03:46:56,702 - mmseg - INFO - Iter [15550/40000] lr: 4.661e-06, eta: 3:16:05, time: 0.440, data_time: 0.010, memory: 31652, decode.loss_cls: 0.2814, decode.loss_mask: 0.4971, decode.loss_dice: 0.6824, decode.d0.loss_cls: 2.0655, decode.d0.loss_mask: 0.5450, decode.d0.loss_dice: 0.8125, decode.d1.loss_cls: 0.4815, decode.d1.loss_mask: 0.5253, decode.d1.loss_dice: 0.7259, decode.d2.loss_cls: 0.3544, decode.d2.loss_mask: 0.5072, decode.d2.loss_dice: 0.6960, decode.d3.loss_cls: 0.3121, decode.d3.loss_mask: 0.5029, decode.d3.loss_dice: 0.6860, decode.d4.loss_cls: 0.3019, decode.d4.loss_mask: 0.5038, decode.d4.loss_dice: 0.6855, decode.d5.loss_cls: 0.2956, decode.d5.loss_mask: 0.5036, decode.d5.loss_dice: 0.6813, decode.d6.loss_cls: 0.2797, decode.d6.loss_mask: 0.5003, decode.d6.loss_dice: 0.6857, decode.d7.loss_cls: 0.2835, decode.d7.loss_mask: 0.4975, decode.d7.loss_dice: 0.6845, decode.d8.loss_cls: 0.2817, decode.d8.loss_mask: 0.5009, decode.d8.loss_dice: 0.6827, loss: 17.0435 +2022-06-05 03:47:18,865 - mmseg - INFO - Iter [15600/40000] lr: 4.652e-06, eta: 3:15:38, time: 0.443, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2959, decode.loss_mask: 0.5083, decode.loss_dice: 0.7026, decode.d0.loss_cls: 2.0879, decode.d0.loss_mask: 0.5509, decode.d0.loss_dice: 0.8404, decode.d1.loss_cls: 0.4837, decode.d1.loss_mask: 0.5276, decode.d1.loss_dice: 0.7581, decode.d2.loss_cls: 0.3737, decode.d2.loss_mask: 0.5129, decode.d2.loss_dice: 0.7262, decode.d3.loss_cls: 0.3329, decode.d3.loss_mask: 0.5124, decode.d3.loss_dice: 0.7115, decode.d4.loss_cls: 0.3207, decode.d4.loss_mask: 0.5128, decode.d4.loss_dice: 0.7107, decode.d5.loss_cls: 0.3091, decode.d5.loss_mask: 0.5102, decode.d5.loss_dice: 0.7107, decode.d6.loss_cls: 0.2956, decode.d6.loss_mask: 0.5103, decode.d6.loss_dice: 0.7095, decode.d7.loss_cls: 0.3020, decode.d7.loss_mask: 0.5109, decode.d7.loss_dice: 0.7062, decode.d8.loss_cls: 0.2973, decode.d8.loss_mask: 0.5093, decode.d8.loss_dice: 0.7079, loss: 17.5484 +2022-06-05 03:47:43,222 - mmseg - INFO - Iter [15650/40000] lr: 4.642e-06, eta: 3:15:15, time: 0.487, data_time: 0.056, memory: 31652, decode.loss_cls: 0.2999, decode.loss_mask: 0.4941, decode.loss_dice: 0.7118, decode.d0.loss_cls: 2.0868, decode.d0.loss_mask: 0.5391, decode.d0.loss_dice: 0.8466, decode.d1.loss_cls: 0.5195, decode.d1.loss_mask: 0.5124, decode.d1.loss_dice: 0.7583, decode.d2.loss_cls: 0.3805, decode.d2.loss_mask: 0.5032, decode.d2.loss_dice: 0.7300, decode.d3.loss_cls: 0.3375, decode.d3.loss_mask: 0.4990, decode.d3.loss_dice: 0.7199, decode.d4.loss_cls: 0.3291, decode.d4.loss_mask: 0.4978, decode.d4.loss_dice: 0.7171, decode.d5.loss_cls: 0.3191, decode.d5.loss_mask: 0.4958, decode.d5.loss_dice: 0.7124, decode.d6.loss_cls: 0.3090, decode.d6.loss_mask: 0.4983, decode.d6.loss_dice: 0.7137, decode.d7.loss_cls: 0.3096, decode.d7.loss_mask: 0.4947, decode.d7.loss_dice: 0.7089, decode.d8.loss_cls: 0.3020, decode.d8.loss_mask: 0.4962, decode.d8.loss_dice: 0.7104, loss: 17.5527 +2022-06-05 03:48:04,999 - mmseg - INFO - Iter [15700/40000] lr: 4.633e-06, eta: 3:14:47, time: 0.436, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2568, decode.loss_mask: 0.4839, decode.loss_dice: 0.6613, decode.d0.loss_cls: 2.0039, decode.d0.loss_mask: 0.5287, decode.d0.loss_dice: 0.7895, decode.d1.loss_cls: 0.4259, decode.d1.loss_mask: 0.5065, decode.d1.loss_dice: 0.7079, decode.d2.loss_cls: 0.3221, decode.d2.loss_mask: 0.4915, decode.d2.loss_dice: 0.6779, decode.d3.loss_cls: 0.2891, decode.d3.loss_mask: 0.4880, decode.d3.loss_dice: 0.6719, decode.d4.loss_cls: 0.2751, decode.d4.loss_mask: 0.4852, decode.d4.loss_dice: 0.6712, decode.d5.loss_cls: 0.2629, decode.d5.loss_mask: 0.4865, decode.d5.loss_dice: 0.6656, decode.d6.loss_cls: 0.2573, decode.d6.loss_mask: 0.4856, decode.d6.loss_dice: 0.6607, decode.d7.loss_cls: 0.2651, decode.d7.loss_mask: 0.4839, decode.d7.loss_dice: 0.6617, decode.d8.loss_cls: 0.2573, decode.d8.loss_mask: 0.4837, decode.d8.loss_dice: 0.6602, loss: 16.3670 +2022-06-05 03:48:27,007 - mmseg - INFO - Iter [15750/40000] lr: 4.623e-06, eta: 3:14:20, time: 0.440, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2589, decode.loss_mask: 0.5141, decode.loss_dice: 0.6933, decode.d0.loss_cls: 2.0300, decode.d0.loss_mask: 0.5560, decode.d0.loss_dice: 0.8204, decode.d1.loss_cls: 0.4617, decode.d1.loss_mask: 0.5343, decode.d1.loss_dice: 0.7344, decode.d2.loss_cls: 0.3384, decode.d2.loss_mask: 0.5211, decode.d2.loss_dice: 0.7076, decode.d3.loss_cls: 0.2951, decode.d3.loss_mask: 0.5159, decode.d3.loss_dice: 0.6998, decode.d4.loss_cls: 0.2768, decode.d4.loss_mask: 0.5151, decode.d4.loss_dice: 0.6978, decode.d5.loss_cls: 0.2772, decode.d5.loss_mask: 0.5133, decode.d5.loss_dice: 0.6999, decode.d6.loss_cls: 0.2679, decode.d6.loss_mask: 0.5153, decode.d6.loss_dice: 0.6941, decode.d7.loss_cls: 0.2595, decode.d7.loss_mask: 0.5148, decode.d7.loss_dice: 0.6987, decode.d8.loss_cls: 0.2585, decode.d8.loss_mask: 0.5135, decode.d8.loss_dice: 0.6954, loss: 17.0789 +2022-06-05 03:48:49,362 - mmseg - INFO - Iter [15800/40000] lr: 4.614e-06, eta: 3:13:53, time: 0.447, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2955, decode.loss_mask: 0.5101, decode.loss_dice: 0.7096, decode.d0.loss_cls: 2.0648, decode.d0.loss_mask: 0.5508, decode.d0.loss_dice: 0.8491, decode.d1.loss_cls: 0.4753, decode.d1.loss_mask: 0.5323, decode.d1.loss_dice: 0.7614, decode.d2.loss_cls: 0.3589, decode.d2.loss_mask: 0.5168, decode.d2.loss_dice: 0.7262, decode.d3.loss_cls: 0.3186, decode.d3.loss_mask: 0.5140, decode.d3.loss_dice: 0.7159, decode.d4.loss_cls: 0.3093, decode.d4.loss_mask: 0.5109, decode.d4.loss_dice: 0.7160, decode.d5.loss_cls: 0.3051, decode.d5.loss_mask: 0.5118, decode.d5.loss_dice: 0.7081, decode.d6.loss_cls: 0.2996, decode.d6.loss_mask: 0.5107, decode.d6.loss_dice: 0.7098, decode.d7.loss_cls: 0.2951, decode.d7.loss_mask: 0.5116, decode.d7.loss_dice: 0.7064, decode.d8.loss_cls: 0.2949, decode.d8.loss_mask: 0.5101, decode.d8.loss_dice: 0.7077, loss: 17.5065 +2022-06-05 03:49:12,400 - mmseg - INFO - Iter [15850/40000] lr: 4.604e-06, eta: 3:13:28, time: 0.461, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2944, decode.loss_mask: 0.5182, decode.loss_dice: 0.6992, decode.d0.loss_cls: 2.0465, decode.d0.loss_mask: 0.5661, decode.d0.loss_dice: 0.8310, decode.d1.loss_cls: 0.4578, decode.d1.loss_mask: 0.5418, decode.d1.loss_dice: 0.7425, decode.d2.loss_cls: 0.3600, decode.d2.loss_mask: 0.5248, decode.d2.loss_dice: 0.7112, decode.d3.loss_cls: 0.3177, decode.d3.loss_mask: 0.5244, decode.d3.loss_dice: 0.7047, decode.d4.loss_cls: 0.3073, decode.d4.loss_mask: 0.5219, decode.d4.loss_dice: 0.7003, decode.d5.loss_cls: 0.2974, decode.d5.loss_mask: 0.5231, decode.d5.loss_dice: 0.6991, decode.d6.loss_cls: 0.2932, decode.d6.loss_mask: 0.5219, decode.d6.loss_dice: 0.6974, decode.d7.loss_cls: 0.2892, decode.d7.loss_mask: 0.5217, decode.d7.loss_dice: 0.7001, decode.d8.loss_cls: 0.2889, decode.d8.loss_mask: 0.5197, decode.d8.loss_dice: 0.7007, loss: 17.4225 +2022-06-05 03:49:34,564 - mmseg - INFO - Iter [15900/40000] lr: 4.595e-06, eta: 3:13:01, time: 0.443, data_time: 0.007, memory: 31652, decode.loss_cls: 0.3190, decode.loss_mask: 0.5022, decode.loss_dice: 0.7133, decode.d0.loss_cls: 2.1098, decode.d0.loss_mask: 0.5435, decode.d0.loss_dice: 0.8643, decode.d1.loss_cls: 0.5142, decode.d1.loss_mask: 0.5260, decode.d1.loss_dice: 0.7628, decode.d2.loss_cls: 0.3905, decode.d2.loss_mask: 0.5085, decode.d2.loss_dice: 0.7299, decode.d3.loss_cls: 0.3546, decode.d3.loss_mask: 0.5031, decode.d3.loss_dice: 0.7240, decode.d4.loss_cls: 0.3403, decode.d4.loss_mask: 0.5023, decode.d4.loss_dice: 0.7182, decode.d5.loss_cls: 0.3278, decode.d5.loss_mask: 0.5014, decode.d5.loss_dice: 0.7192, decode.d6.loss_cls: 0.3258, decode.d6.loss_mask: 0.5024, decode.d6.loss_dice: 0.7130, decode.d7.loss_cls: 0.3263, decode.d7.loss_mask: 0.5034, decode.d7.loss_dice: 0.7098, decode.d8.loss_cls: 0.3213, decode.d8.loss_mask: 0.5024, decode.d8.loss_dice: 0.7097, loss: 17.7889 +2022-06-05 03:49:58,889 - mmseg - INFO - Iter [15950/40000] lr: 4.585e-06, eta: 3:12:37, time: 0.486, data_time: 0.055, memory: 31652, decode.loss_cls: 0.2743, decode.loss_mask: 0.5182, decode.loss_dice: 0.7052, decode.d0.loss_cls: 2.0462, decode.d0.loss_mask: 0.5567, decode.d0.loss_dice: 0.8345, decode.d1.loss_cls: 0.4601, decode.d1.loss_mask: 0.5413, decode.d1.loss_dice: 0.7568, decode.d2.loss_cls: 0.3507, decode.d2.loss_mask: 0.5219, decode.d2.loss_dice: 0.7201, decode.d3.loss_cls: 0.3100, decode.d3.loss_mask: 0.5165, decode.d3.loss_dice: 0.7045, decode.d4.loss_cls: 0.2930, decode.d4.loss_mask: 0.5166, decode.d4.loss_dice: 0.7041, decode.d5.loss_cls: 0.2811, decode.d5.loss_mask: 0.5193, decode.d5.loss_dice: 0.7017, decode.d6.loss_cls: 0.2800, decode.d6.loss_mask: 0.5155, decode.d6.loss_dice: 0.6992, decode.d7.loss_cls: 0.2685, decode.d7.loss_mask: 0.5171, decode.d7.loss_dice: 0.7049, decode.d8.loss_cls: 0.2718, decode.d8.loss_mask: 0.5176, decode.d8.loss_dice: 0.7020, loss: 17.3096 +2022-06-05 03:50:21,076 - mmseg - INFO - Saving checkpoint at 16000 iterations +2022-06-05 03:50:23,361 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 03:50:23,362 - mmseg - INFO - Iter [16000/40000] lr: 4.576e-06, eta: 3:12:14, time: 0.489, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2642, decode.loss_mask: 0.4900, decode.loss_dice: 0.6872, decode.d0.loss_cls: 2.0295, decode.d0.loss_mask: 0.5260, decode.d0.loss_dice: 0.8001, decode.d1.loss_cls: 0.4516, decode.d1.loss_mask: 0.5099, decode.d1.loss_dice: 0.7298, decode.d2.loss_cls: 0.3355, decode.d2.loss_mask: 0.4963, decode.d2.loss_dice: 0.6939, decode.d3.loss_cls: 0.2929, decode.d3.loss_mask: 0.4933, decode.d3.loss_dice: 0.6866, decode.d4.loss_cls: 0.2735, decode.d4.loss_mask: 0.4942, decode.d4.loss_dice: 0.6916, decode.d5.loss_cls: 0.2695, decode.d5.loss_mask: 0.4934, decode.d5.loss_dice: 0.6906, decode.d6.loss_cls: 0.2668, decode.d6.loss_mask: 0.4929, decode.d6.loss_dice: 0.6814, decode.d7.loss_cls: 0.2634, decode.d7.loss_mask: 0.4898, decode.d7.loss_dice: 0.6899, decode.d8.loss_cls: 0.2589, decode.d8.loss_mask: 0.4912, decode.d8.loss_dice: 0.6842, loss: 16.7181 +2022-06-05 03:53:01,796 - mmseg - INFO - per class results: +2022-06-05 03:53:01,802 - mmseg - INFO - ++-------------+-------+-------+ +| Class | IoU | Acc | ++-------------+-------+-------+ +| aeroplane | 90.59 | 94.69 | +| bag | 36.43 | 52.84 | +| bed | 17.5 | 19.85 | +| bedclothes | 44.02 | 71.67 | +| bench | 19.64 | 24.94 | +| bicycle | 83.82 | 93.41 | +| bird | 94.16 | 96.49 | +| boat | 82.41 | 89.83 | +| book | 51.33 | 60.85 | +| bottle | 86.89 | 95.04 | +| building | 63.01 | 72.19 | +| bus | 92.39 | 95.54 | +| cabinet | 45.2 | 61.2 | +| car | 90.17 | 96.18 | +| cat | 93.77 | 97.82 | +| ceiling | 61.5 | 76.83 | +| chair | 60.8 | 80.47 | +| cloth | 25.86 | 41.62 | +| computer | 33.18 | 51.55 | +| cow | 94.84 | 96.85 | +| cup | 44.01 | 63.11 | +| curtain | 50.9 | 70.25 | +| dog | 91.34 | 96.71 | +| door | 29.58 | 50.31 | +| fence | 44.09 | 57.08 | +| floor | 72.2 | 85.4 | +| flower | 35.69 | 58.47 | +| food | 33.48 | 41.55 | +| grass | 82.13 | 91.52 | +| ground | 55.15 | 66.53 | +| horse | 94.24 | 97.1 | +| keyboard | 77.28 | 82.75 | +| light | 57.65 | 74.85 | +| motorbike | 90.91 | 95.63 | +| mountain | 52.43 | 75.14 | +| mouse | 78.98 | 85.35 | +| person | 90.35 | 95.44 | +| plate | 25.2 | 34.13 | +| platform | 56.43 | 69.41 | +| pottedplant | 79.15 | 89.4 | +| road | 54.25 | 74.7 | +| rock | 48.45 | 65.79 | +| sheep | 93.97 | 96.89 | +| shelves | 36.25 | 56.13 | +| sidewalk | 30.38 | 55.04 | +| sign | 47.15 | 64.37 | +| sky | 95.05 | 97.27 | +| snow | 75.72 | 86.17 | +| sofa | 59.47 | 68.33 | +| table | 67.19 | 79.08 | +| track | 69.5 | 79.81 | +| train | 92.12 | 95.9 | +| tree | 81.02 | 89.51 | +| truck | 40.29 | 55.66 | +| tvmonitor | 87.15 | 93.41 | +| wall | 68.76 | 84.73 | +| water | 90.82 | 95.55 | +| window | 43.24 | 65.26 | +| wood | 28.64 | 40.97 | ++-------------+-------+-------+ +2022-06-05 03:53:01,802 - mmseg - INFO - Summary: +2022-06-05 03:53:01,802 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 84.92 | 63.02 | 74.48 | ++-------+-------+-------+ +2022-06-05 03:53:01,816 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 03:53:01,816 - mmseg - INFO - Iter(val) [638] aAcc: 0.8492, mIoU: 0.6302, mAcc: 0.7448, IoU.aeroplane: 0.9059, IoU.bag: 0.3643, IoU.bed: 0.1750, IoU.bedclothes: 0.4402, IoU.bench: 0.1964, IoU.bicycle: 0.8382, IoU.bird: 0.9416, IoU.boat: 0.8241, IoU.book: 0.5133, IoU.bottle: 0.8689, IoU.building: 0.6301, IoU.bus: 0.9239, IoU.cabinet: 0.4520, IoU.car: 0.9017, IoU.cat: 0.9377, IoU.ceiling: 0.6150, IoU.chair: 0.6080, IoU.cloth: 0.2586, IoU.computer: 0.3318, IoU.cow: 0.9484, IoU.cup: 0.4401, IoU.curtain: 0.5090, IoU.dog: 0.9134, IoU.door: 0.2958, IoU.fence: 0.4409, IoU.floor: 0.7220, IoU.flower: 0.3569, IoU.food: 0.3348, IoU.grass: 0.8213, IoU.ground: 0.5515, IoU.horse: 0.9424, IoU.keyboard: 0.7728, IoU.light: 0.5765, IoU.motorbike: 0.9091, IoU.mountain: 0.5243, IoU.mouse: 0.7898, IoU.person: 0.9035, IoU.plate: 0.2520, IoU.platform: 0.5643, IoU.pottedplant: 0.7915, IoU.road: 0.5425, IoU.rock: 0.4845, IoU.sheep: 0.9397, IoU.shelves: 0.3625, IoU.sidewalk: 0.3038, IoU.sign: 0.4715, IoU.sky: 0.9505, IoU.snow: 0.7572, IoU.sofa: 0.5947, IoU.table: 0.6719, IoU.track: 0.6950, IoU.train: 0.9212, IoU.tree: 0.8102, IoU.truck: 0.4029, IoU.tvmonitor: 0.8715, IoU.wall: 0.6876, IoU.water: 0.9082, IoU.window: 0.4324, IoU.wood: 0.2864, Acc.aeroplane: 0.9469, Acc.bag: 0.5284, Acc.bed: 0.1985, Acc.bedclothes: 0.7167, Acc.bench: 0.2494, Acc.bicycle: 0.9341, Acc.bird: 0.9649, Acc.boat: 0.8983, Acc.book: 0.6085, Acc.bottle: 0.9504, Acc.building: 0.7219, Acc.bus: 0.9554, Acc.cabinet: 0.6120, Acc.car: 0.9618, Acc.cat: 0.9782, Acc.ceiling: 0.7683, Acc.chair: 0.8047, Acc.cloth: 0.4162, Acc.computer: 0.5155, Acc.cow: 0.9685, Acc.cup: 0.6311, Acc.curtain: 0.7025, Acc.dog: 0.9671, Acc.door: 0.5031, Acc.fence: 0.5708, Acc.floor: 0.8540, Acc.flower: 0.5847, Acc.food: 0.4155, Acc.grass: 0.9152, Acc.ground: 0.6653, Acc.horse: 0.9710, Acc.keyboard: 0.8275, Acc.light: 0.7485, Acc.motorbike: 0.9563, Acc.mountain: 0.7514, Acc.mouse: 0.8535, Acc.person: 0.9544, Acc.plate: 0.3413, Acc.platform: 0.6941, Acc.pottedplant: 0.8940, Acc.road: 0.7470, Acc.rock: 0.6579, Acc.sheep: 0.9689, Acc.shelves: 0.5613, Acc.sidewalk: 0.5504, Acc.sign: 0.6437, Acc.sky: 0.9727, Acc.snow: 0.8617, Acc.sofa: 0.6833, Acc.table: 0.7908, Acc.track: 0.7981, Acc.train: 0.9590, Acc.tree: 0.8951, Acc.truck: 0.5566, Acc.tvmonitor: 0.9341, Acc.wall: 0.8473, Acc.water: 0.9555, Acc.window: 0.6526, Acc.wood: 0.4097 +2022-06-05 03:53:24,430 - mmseg - INFO - Iter [16050/40000] lr: 4.566e-06, eta: 3:15:44, time: 3.621, data_time: 3.176, memory: 31652, decode.loss_cls: 0.2641, decode.loss_mask: 0.5054, decode.loss_dice: 0.6927, decode.d0.loss_cls: 2.0163, decode.d0.loss_mask: 0.5486, decode.d0.loss_dice: 0.8182, decode.d1.loss_cls: 0.4635, decode.d1.loss_mask: 0.5225, decode.d1.loss_dice: 0.7397, decode.d2.loss_cls: 0.3451, decode.d2.loss_mask: 0.5082, decode.d2.loss_dice: 0.7061, decode.d3.loss_cls: 0.3025, decode.d3.loss_mask: 0.5039, decode.d3.loss_dice: 0.6954, decode.d4.loss_cls: 0.2931, decode.d4.loss_mask: 0.5034, decode.d4.loss_dice: 0.7018, decode.d5.loss_cls: 0.2774, decode.d5.loss_mask: 0.5047, decode.d5.loss_dice: 0.6959, decode.d6.loss_cls: 0.2728, decode.d6.loss_mask: 0.5070, decode.d6.loss_dice: 0.6926, decode.d7.loss_cls: 0.2670, decode.d7.loss_mask: 0.5051, decode.d7.loss_dice: 0.6917, decode.d8.loss_cls: 0.2692, decode.d8.loss_mask: 0.5042, decode.d8.loss_dice: 0.6891, loss: 17.0072 +2022-06-05 03:53:46,518 - mmseg - INFO - Iter [16100/40000] lr: 4.556e-06, eta: 3:15:16, time: 0.442, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2866, decode.loss_mask: 0.4897, decode.loss_dice: 0.7017, decode.d0.loss_cls: 2.0425, decode.d0.loss_mask: 0.5280, decode.d0.loss_dice: 0.8344, decode.d1.loss_cls: 0.4729, decode.d1.loss_mask: 0.5128, decode.d1.loss_dice: 0.7458, decode.d2.loss_cls: 0.3612, decode.d2.loss_mask: 0.4970, decode.d2.loss_dice: 0.7190, decode.d3.loss_cls: 0.3216, decode.d3.loss_mask: 0.4918, decode.d3.loss_dice: 0.7075, decode.d4.loss_cls: 0.3039, decode.d4.loss_mask: 0.4921, decode.d4.loss_dice: 0.7066, decode.d5.loss_cls: 0.2906, decode.d5.loss_mask: 0.4916, decode.d5.loss_dice: 0.7014, decode.d6.loss_cls: 0.2855, decode.d6.loss_mask: 0.4926, decode.d6.loss_dice: 0.7044, decode.d7.loss_cls: 0.2860, decode.d7.loss_mask: 0.4888, decode.d7.loss_dice: 0.7024, decode.d8.loss_cls: 0.2829, decode.d8.loss_mask: 0.4875, decode.d8.loss_dice: 0.7043, loss: 17.1331 +2022-06-05 03:54:09,099 - mmseg - INFO - Iter [16150/40000] lr: 4.547e-06, eta: 3:14:49, time: 0.452, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2891, decode.loss_mask: 0.5111, decode.loss_dice: 0.6992, decode.d0.loss_cls: 2.0477, decode.d0.loss_mask: 0.5511, decode.d0.loss_dice: 0.8322, decode.d1.loss_cls: 0.4900, decode.d1.loss_mask: 0.5281, decode.d1.loss_dice: 0.7439, decode.d2.loss_cls: 0.3652, decode.d2.loss_mask: 0.5169, decode.d2.loss_dice: 0.7188, decode.d3.loss_cls: 0.3211, decode.d3.loss_mask: 0.5094, decode.d3.loss_dice: 0.7125, decode.d4.loss_cls: 0.3074, decode.d4.loss_mask: 0.5130, decode.d4.loss_dice: 0.7086, decode.d5.loss_cls: 0.3009, decode.d5.loss_mask: 0.5094, decode.d5.loss_dice: 0.7034, decode.d6.loss_cls: 0.2833, decode.d6.loss_mask: 0.5090, decode.d6.loss_dice: 0.7025, decode.d7.loss_cls: 0.2868, decode.d7.loss_mask: 0.5080, decode.d7.loss_dice: 0.7002, decode.d8.loss_cls: 0.2886, decode.d8.loss_mask: 0.5074, decode.d8.loss_dice: 0.7016, loss: 17.3664 +2022-06-05 03:54:31,695 - mmseg - INFO - Iter [16200/40000] lr: 4.537e-06, eta: 3:14:21, time: 0.452, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3095, decode.loss_mask: 0.4971, decode.loss_dice: 0.7086, decode.d0.loss_cls: 2.0488, decode.d0.loss_mask: 0.5403, decode.d0.loss_dice: 0.8509, decode.d1.loss_cls: 0.4850, decode.d1.loss_mask: 0.5194, decode.d1.loss_dice: 0.7570, decode.d2.loss_cls: 0.3745, decode.d2.loss_mask: 0.5018, decode.d2.loss_dice: 0.7326, decode.d3.loss_cls: 0.3315, decode.d3.loss_mask: 0.4981, decode.d3.loss_dice: 0.7196, decode.d4.loss_cls: 0.3194, decode.d4.loss_mask: 0.4967, decode.d4.loss_dice: 0.7170, decode.d5.loss_cls: 0.3109, decode.d5.loss_mask: 0.4966, decode.d5.loss_dice: 0.7149, decode.d6.loss_cls: 0.3079, decode.d6.loss_mask: 0.4965, decode.d6.loss_dice: 0.7114, decode.d7.loss_cls: 0.3106, decode.d7.loss_mask: 0.4998, decode.d7.loss_dice: 0.7116, decode.d8.loss_cls: 0.3102, decode.d8.loss_mask: 0.4966, decode.d8.loss_dice: 0.7111, loss: 17.4863 +2022-06-05 03:54:56,347 - mmseg - INFO - Iter [16250/40000] lr: 4.528e-06, eta: 3:13:57, time: 0.493, data_time: 0.055, memory: 31652, decode.loss_cls: 0.2833, decode.loss_mask: 0.4983, decode.loss_dice: 0.7017, decode.d0.loss_cls: 1.9814, decode.d0.loss_mask: 0.5445, decode.d0.loss_dice: 0.8300, decode.d1.loss_cls: 0.4621, decode.d1.loss_mask: 0.5167, decode.d1.loss_dice: 0.7447, decode.d2.loss_cls: 0.3615, decode.d2.loss_mask: 0.5046, decode.d2.loss_dice: 0.7100, decode.d3.loss_cls: 0.3185, decode.d3.loss_mask: 0.5025, decode.d3.loss_dice: 0.7045, decode.d4.loss_cls: 0.3005, decode.d4.loss_mask: 0.5018, decode.d4.loss_dice: 0.7068, decode.d5.loss_cls: 0.2986, decode.d5.loss_mask: 0.4997, decode.d5.loss_dice: 0.7030, decode.d6.loss_cls: 0.2765, decode.d6.loss_mask: 0.5020, decode.d6.loss_dice: 0.7036, decode.d7.loss_cls: 0.2830, decode.d7.loss_mask: 0.4988, decode.d7.loss_dice: 0.7026, decode.d8.loss_cls: 0.2819, decode.d8.loss_mask: 0.4976, decode.d8.loss_dice: 0.6999, loss: 17.1205 +2022-06-05 03:55:18,295 - mmseg - INFO - Iter [16300/40000] lr: 4.518e-06, eta: 3:13:29, time: 0.439, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2561, decode.loss_mask: 0.4954, decode.loss_dice: 0.6622, decode.d0.loss_cls: 1.9961, decode.d0.loss_mask: 0.5386, decode.d0.loss_dice: 0.7893, decode.d1.loss_cls: 0.4401, decode.d1.loss_mask: 0.5198, decode.d1.loss_dice: 0.7067, decode.d2.loss_cls: 0.3295, decode.d2.loss_mask: 0.5058, decode.d2.loss_dice: 0.6784, decode.d3.loss_cls: 0.2830, decode.d3.loss_mask: 0.5027, decode.d3.loss_dice: 0.6668, decode.d4.loss_cls: 0.2697, decode.d4.loss_mask: 0.4998, decode.d4.loss_dice: 0.6657, decode.d5.loss_cls: 0.2657, decode.d5.loss_mask: 0.4969, decode.d5.loss_dice: 0.6646, decode.d6.loss_cls: 0.2561, decode.d6.loss_mask: 0.4967, decode.d6.loss_dice: 0.6642, decode.d7.loss_cls: 0.2566, decode.d7.loss_mask: 0.4970, decode.d7.loss_dice: 0.6650, decode.d8.loss_cls: 0.2563, decode.d8.loss_mask: 0.4956, decode.d8.loss_dice: 0.6621, loss: 16.4823 +2022-06-05 03:55:40,919 - mmseg - INFO - Iter [16350/40000] lr: 4.509e-06, eta: 3:13:02, time: 0.452, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2987, decode.loss_mask: 0.4951, decode.loss_dice: 0.6867, decode.d0.loss_cls: 1.9973, decode.d0.loss_mask: 0.5392, decode.d0.loss_dice: 0.8265, decode.d1.loss_cls: 0.4838, decode.d1.loss_mask: 0.5162, decode.d1.loss_dice: 0.7369, decode.d2.loss_cls: 0.3742, decode.d2.loss_mask: 0.5000, decode.d2.loss_dice: 0.7044, decode.d3.loss_cls: 0.3341, decode.d3.loss_mask: 0.4982, decode.d3.loss_dice: 0.6921, decode.d4.loss_cls: 0.3222, decode.d4.loss_mask: 0.4960, decode.d4.loss_dice: 0.6914, decode.d5.loss_cls: 0.3139, decode.d5.loss_mask: 0.4957, decode.d5.loss_dice: 0.6948, decode.d6.loss_cls: 0.3048, decode.d6.loss_mask: 0.4929, decode.d6.loss_dice: 0.6847, decode.d7.loss_cls: 0.2997, decode.d7.loss_mask: 0.4953, decode.d7.loss_dice: 0.6847, decode.d8.loss_cls: 0.2936, decode.d8.loss_mask: 0.4941, decode.d8.loss_dice: 0.6906, loss: 17.1380 +2022-06-05 03:56:02,784 - mmseg - INFO - Iter [16400/40000] lr: 4.499e-06, eta: 3:12:33, time: 0.437, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2978, decode.loss_mask: 0.4720, decode.loss_dice: 0.7128, decode.d0.loss_cls: 2.0773, decode.d0.loss_mask: 0.5111, decode.d0.loss_dice: 0.8544, decode.d1.loss_cls: 0.4993, decode.d1.loss_mask: 0.4930, decode.d1.loss_dice: 0.7573, decode.d2.loss_cls: 0.3842, decode.d2.loss_mask: 0.4796, decode.d2.loss_dice: 0.7249, decode.d3.loss_cls: 0.3397, decode.d3.loss_mask: 0.4746, decode.d3.loss_dice: 0.7155, decode.d4.loss_cls: 0.3231, decode.d4.loss_mask: 0.4756, decode.d4.loss_dice: 0.7176, decode.d5.loss_cls: 0.3076, decode.d5.loss_mask: 0.4736, decode.d5.loss_dice: 0.7172, decode.d6.loss_cls: 0.3029, decode.d6.loss_mask: 0.4744, decode.d6.loss_dice: 0.7188, decode.d7.loss_cls: 0.2941, decode.d7.loss_mask: 0.4754, decode.d7.loss_dice: 0.7186, decode.d8.loss_cls: 0.2968, decode.d8.loss_mask: 0.4735, decode.d8.loss_dice: 0.7156, loss: 17.2784 +2022-06-05 03:56:24,447 - mmseg - INFO - Iter [16450/40000] lr: 4.490e-06, eta: 3:12:05, time: 0.433, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2552, decode.loss_mask: 0.5162, decode.loss_dice: 0.7044, decode.d0.loss_cls: 1.9822, decode.d0.loss_mask: 0.5551, decode.d0.loss_dice: 0.8433, decode.d1.loss_cls: 0.4474, decode.d1.loss_mask: 0.5408, decode.d1.loss_dice: 0.7521, decode.d2.loss_cls: 0.3254, decode.d2.loss_mask: 0.5251, decode.d2.loss_dice: 0.7224, decode.d3.loss_cls: 0.2879, decode.d3.loss_mask: 0.5214, decode.d3.loss_dice: 0.7081, decode.d4.loss_cls: 0.2738, decode.d4.loss_mask: 0.5153, decode.d4.loss_dice: 0.7097, decode.d5.loss_cls: 0.2707, decode.d5.loss_mask: 0.5157, decode.d5.loss_dice: 0.7080, decode.d6.loss_cls: 0.2671, decode.d6.loss_mask: 0.5138, decode.d6.loss_dice: 0.7028, decode.d7.loss_cls: 0.2606, decode.d7.loss_mask: 0.5137, decode.d7.loss_dice: 0.7045, decode.d8.loss_cls: 0.2612, decode.d8.loss_mask: 0.5159, decode.d8.loss_dice: 0.7054, loss: 17.1253 +2022-06-05 03:56:46,222 - mmseg - INFO - Iter [16500/40000] lr: 4.480e-06, eta: 3:11:37, time: 0.435, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2940, decode.loss_mask: 0.5079, decode.loss_dice: 0.7074, decode.d0.loss_cls: 2.0187, decode.d0.loss_mask: 0.5484, decode.d0.loss_dice: 0.8430, decode.d1.loss_cls: 0.5045, decode.d1.loss_mask: 0.5228, decode.d1.loss_dice: 0.7548, decode.d2.loss_cls: 0.3856, decode.d2.loss_mask: 0.5112, decode.d2.loss_dice: 0.7220, decode.d3.loss_cls: 0.3375, decode.d3.loss_mask: 0.5099, decode.d3.loss_dice: 0.7073, decode.d4.loss_cls: 0.3211, decode.d4.loss_mask: 0.5082, decode.d4.loss_dice: 0.7087, decode.d5.loss_cls: 0.3075, decode.d5.loss_mask: 0.5090, decode.d5.loss_dice: 0.7119, decode.d6.loss_cls: 0.3121, decode.d6.loss_mask: 0.5075, decode.d6.loss_dice: 0.7077, decode.d7.loss_cls: 0.3057, decode.d7.loss_mask: 0.5064, decode.d7.loss_dice: 0.7049, decode.d8.loss_cls: 0.3001, decode.d8.loss_mask: 0.5087, decode.d8.loss_dice: 0.7059, loss: 17.5001 +2022-06-05 03:57:10,758 - mmseg - INFO - Iter [16550/40000] lr: 4.471e-06, eta: 3:11:12, time: 0.491, data_time: 0.061, memory: 31652, decode.loss_cls: 0.2712, decode.loss_mask: 0.5084, decode.loss_dice: 0.6762, decode.d0.loss_cls: 1.9625, decode.d0.loss_mask: 0.5496, decode.d0.loss_dice: 0.8054, decode.d1.loss_cls: 0.4576, decode.d1.loss_mask: 0.5342, decode.d1.loss_dice: 0.7228, decode.d2.loss_cls: 0.3447, decode.d2.loss_mask: 0.5162, decode.d2.loss_dice: 0.6923, decode.d3.loss_cls: 0.3046, decode.d3.loss_mask: 0.5143, decode.d3.loss_dice: 0.6881, decode.d4.loss_cls: 0.2943, decode.d4.loss_mask: 0.5117, decode.d4.loss_dice: 0.6828, decode.d5.loss_cls: 0.2807, decode.d5.loss_mask: 0.5137, decode.d5.loss_dice: 0.6837, decode.d6.loss_cls: 0.2764, decode.d6.loss_mask: 0.5073, decode.d6.loss_dice: 0.6793, decode.d7.loss_cls: 0.2778, decode.d7.loss_mask: 0.5095, decode.d7.loss_dice: 0.6736, decode.d8.loss_cls: 0.2741, decode.d8.loss_mask: 0.5068, decode.d8.loss_dice: 0.6802, loss: 16.8999 +2022-06-05 03:57:33,010 - mmseg - INFO - Iter [16600/40000] lr: 4.461e-06, eta: 3:10:45, time: 0.445, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2677, decode.loss_mask: 0.5012, decode.loss_dice: 0.6825, decode.d0.loss_cls: 1.9742, decode.d0.loss_mask: 0.5408, decode.d0.loss_dice: 0.8041, decode.d1.loss_cls: 0.4448, decode.d1.loss_mask: 0.5220, decode.d1.loss_dice: 0.7253, decode.d2.loss_cls: 0.3417, decode.d2.loss_mask: 0.5075, decode.d2.loss_dice: 0.6967, decode.d3.loss_cls: 0.3087, decode.d3.loss_mask: 0.5011, decode.d3.loss_dice: 0.6831, decode.d4.loss_cls: 0.2946, decode.d4.loss_mask: 0.5000, decode.d4.loss_dice: 0.6838, decode.d5.loss_cls: 0.2821, decode.d5.loss_mask: 0.4996, decode.d5.loss_dice: 0.6828, decode.d6.loss_cls: 0.2716, decode.d6.loss_mask: 0.5016, decode.d6.loss_dice: 0.6820, decode.d7.loss_cls: 0.2710, decode.d7.loss_mask: 0.4994, decode.d7.loss_dice: 0.6787, decode.d8.loss_cls: 0.2710, decode.d8.loss_mask: 0.4989, decode.d8.loss_dice: 0.6831, loss: 16.8018 +2022-06-05 03:57:54,683 - mmseg - INFO - Iter [16650/40000] lr: 4.452e-06, eta: 3:10:16, time: 0.434, data_time: 0.008, memory: 31652, decode.loss_cls: 0.3168, decode.loss_mask: 0.4831, decode.loss_dice: 0.7100, decode.d0.loss_cls: 2.0243, decode.d0.loss_mask: 0.5235, decode.d0.loss_dice: 0.8477, decode.d1.loss_cls: 0.5101, decode.d1.loss_mask: 0.5004, decode.d1.loss_dice: 0.7560, decode.d2.loss_cls: 0.3986, decode.d2.loss_mask: 0.4895, decode.d2.loss_dice: 0.7280, decode.d3.loss_cls: 0.3500, decode.d3.loss_mask: 0.4881, decode.d3.loss_dice: 0.7142, decode.d4.loss_cls: 0.3367, decode.d4.loss_mask: 0.4870, decode.d4.loss_dice: 0.7163, decode.d5.loss_cls: 0.3227, decode.d5.loss_mask: 0.4874, decode.d5.loss_dice: 0.7151, decode.d6.loss_cls: 0.3207, decode.d6.loss_mask: 0.4853, decode.d6.loss_dice: 0.7136, decode.d7.loss_cls: 0.3113, decode.d7.loss_mask: 0.4845, decode.d7.loss_dice: 0.7128, decode.d8.loss_cls: 0.3131, decode.d8.loss_mask: 0.4840, decode.d8.loss_dice: 0.7154, loss: 17.4459 +2022-06-05 03:58:16,465 - mmseg - INFO - Iter [16700/40000] lr: 4.442e-06, eta: 3:09:48, time: 0.436, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2477, decode.loss_mask: 0.4767, decode.loss_dice: 0.6796, decode.d0.loss_cls: 1.9668, decode.d0.loss_mask: 0.5145, decode.d0.loss_dice: 0.8027, decode.d1.loss_cls: 0.4348, decode.d1.loss_mask: 0.4937, decode.d1.loss_dice: 0.7256, decode.d2.loss_cls: 0.3131, decode.d2.loss_mask: 0.4854, decode.d2.loss_dice: 0.6959, decode.d3.loss_cls: 0.2777, decode.d3.loss_mask: 0.4802, decode.d3.loss_dice: 0.6867, decode.d4.loss_cls: 0.2640, decode.d4.loss_mask: 0.4799, decode.d4.loss_dice: 0.6864, decode.d5.loss_cls: 0.2525, decode.d5.loss_mask: 0.4781, decode.d5.loss_dice: 0.6846, decode.d6.loss_cls: 0.2484, decode.d6.loss_mask: 0.4769, decode.d6.loss_dice: 0.6863, decode.d7.loss_cls: 0.2424, decode.d7.loss_mask: 0.4762, decode.d7.loss_dice: 0.6798, decode.d8.loss_cls: 0.2415, decode.d8.loss_mask: 0.4756, decode.d8.loss_dice: 0.6813, loss: 16.3354 +2022-06-05 03:58:38,214 - mmseg - INFO - Iter [16750/40000] lr: 4.433e-06, eta: 3:09:20, time: 0.434, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2572, decode.loss_mask: 0.4935, decode.loss_dice: 0.6771, decode.d0.loss_cls: 1.9287, decode.d0.loss_mask: 0.5285, decode.d0.loss_dice: 0.7855, decode.d1.loss_cls: 0.4557, decode.d1.loss_mask: 0.5075, decode.d1.loss_dice: 0.7164, decode.d2.loss_cls: 0.3346, decode.d2.loss_mask: 0.4972, decode.d2.loss_dice: 0.6887, decode.d3.loss_cls: 0.2979, decode.d3.loss_mask: 0.4942, decode.d3.loss_dice: 0.6775, decode.d4.loss_cls: 0.2808, decode.d4.loss_mask: 0.4926, decode.d4.loss_dice: 0.6743, decode.d5.loss_cls: 0.2771, decode.d5.loss_mask: 0.4910, decode.d5.loss_dice: 0.6745, decode.d6.loss_cls: 0.2696, decode.d6.loss_mask: 0.4922, decode.d6.loss_dice: 0.6735, decode.d7.loss_cls: 0.2702, decode.d7.loss_mask: 0.4908, decode.d7.loss_dice: 0.6753, decode.d8.loss_cls: 0.2628, decode.d8.loss_mask: 0.4932, decode.d8.loss_dice: 0.6756, loss: 16.5337 +2022-06-05 03:59:00,451 - mmseg - INFO - Iter [16800/40000] lr: 4.423e-06, eta: 3:08:53, time: 0.445, data_time: 0.009, memory: 31652, decode.loss_cls: 0.2944, decode.loss_mask: 0.5072, decode.loss_dice: 0.7095, decode.d0.loss_cls: 2.0565, decode.d0.loss_mask: 0.5484, decode.d0.loss_dice: 0.8425, decode.d1.loss_cls: 0.4911, decode.d1.loss_mask: 0.5259, decode.d1.loss_dice: 0.7623, decode.d2.loss_cls: 0.3686, decode.d2.loss_mask: 0.5122, decode.d2.loss_dice: 0.7339, decode.d3.loss_cls: 0.3337, decode.d3.loss_mask: 0.5098, decode.d3.loss_dice: 0.7196, decode.d4.loss_cls: 0.3188, decode.d4.loss_mask: 0.5074, decode.d4.loss_dice: 0.7163, decode.d5.loss_cls: 0.3101, decode.d5.loss_mask: 0.5041, decode.d5.loss_dice: 0.7137, decode.d6.loss_cls: 0.2996, decode.d6.loss_mask: 0.5041, decode.d6.loss_dice: 0.7140, decode.d7.loss_cls: 0.2946, decode.d7.loss_mask: 0.5051, decode.d7.loss_dice: 0.7162, decode.d8.loss_cls: 0.2912, decode.d8.loss_mask: 0.5057, decode.d8.loss_dice: 0.7149, loss: 17.5314 +2022-06-05 03:59:25,192 - mmseg - INFO - Iter [16850/40000] lr: 4.414e-06, eta: 3:08:29, time: 0.495, data_time: 0.056, memory: 31652, decode.loss_cls: 0.2640, decode.loss_mask: 0.5080, decode.loss_dice: 0.6715, decode.d0.loss_cls: 1.9674, decode.d0.loss_mask: 0.5472, decode.d0.loss_dice: 0.7967, decode.d1.loss_cls: 0.4370, decode.d1.loss_mask: 0.5276, decode.d1.loss_dice: 0.7185, decode.d2.loss_cls: 0.3288, decode.d2.loss_mask: 0.5185, decode.d2.loss_dice: 0.6922, decode.d3.loss_cls: 0.2964, decode.d3.loss_mask: 0.5120, decode.d3.loss_dice: 0.6740, decode.d4.loss_cls: 0.2800, decode.d4.loss_mask: 0.5124, decode.d4.loss_dice: 0.6792, decode.d5.loss_cls: 0.2740, decode.d5.loss_mask: 0.5106, decode.d5.loss_dice: 0.6739, decode.d6.loss_cls: 0.2669, decode.d6.loss_mask: 0.5100, decode.d6.loss_dice: 0.6692, decode.d7.loss_cls: 0.2632, decode.d7.loss_mask: 0.5084, decode.d7.loss_dice: 0.6706, decode.d8.loss_cls: 0.2621, decode.d8.loss_mask: 0.5063, decode.d8.loss_dice: 0.6750, loss: 16.7213 +2022-06-05 03:59:47,351 - mmseg - INFO - Iter [16900/40000] lr: 4.404e-06, eta: 3:08:01, time: 0.443, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2834, decode.loss_mask: 0.5053, decode.loss_dice: 0.6945, decode.d0.loss_cls: 2.0057, decode.d0.loss_mask: 0.5436, decode.d0.loss_dice: 0.8217, decode.d1.loss_cls: 0.4640, decode.d1.loss_mask: 0.5255, decode.d1.loss_dice: 0.7389, decode.d2.loss_cls: 0.3461, decode.d2.loss_mask: 0.5100, decode.d2.loss_dice: 0.7088, decode.d3.loss_cls: 0.3099, decode.d3.loss_mask: 0.5066, decode.d3.loss_dice: 0.6977, decode.d4.loss_cls: 0.3021, decode.d4.loss_mask: 0.5074, decode.d4.loss_dice: 0.7013, decode.d5.loss_cls: 0.2961, decode.d5.loss_mask: 0.5064, decode.d5.loss_dice: 0.6968, decode.d6.loss_cls: 0.2841, decode.d6.loss_mask: 0.5074, decode.d6.loss_dice: 0.6933, decode.d7.loss_cls: 0.2895, decode.d7.loss_mask: 0.5058, decode.d7.loss_dice: 0.6935, decode.d8.loss_cls: 0.2847, decode.d8.loss_mask: 0.5063, decode.d8.loss_dice: 0.6965, loss: 17.1331 +2022-06-05 04:00:08,767 - mmseg - INFO - Iter [16950/40000] lr: 4.394e-06, eta: 3:07:32, time: 0.428, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2723, decode.loss_mask: 0.5000, decode.loss_dice: 0.7031, decode.d0.loss_cls: 1.9856, decode.d0.loss_mask: 0.5425, decode.d0.loss_dice: 0.8328, decode.d1.loss_cls: 0.4687, decode.d1.loss_mask: 0.5187, decode.d1.loss_dice: 0.7561, decode.d2.loss_cls: 0.3430, decode.d2.loss_mask: 0.5061, decode.d2.loss_dice: 0.7244, decode.d3.loss_cls: 0.2965, decode.d3.loss_mask: 0.5048, decode.d3.loss_dice: 0.7112, decode.d4.loss_cls: 0.2904, decode.d4.loss_mask: 0.5051, decode.d4.loss_dice: 0.7138, decode.d5.loss_cls: 0.2767, decode.d5.loss_mask: 0.5033, decode.d5.loss_dice: 0.7082, decode.d6.loss_cls: 0.2651, decode.d6.loss_mask: 0.4993, decode.d6.loss_dice: 0.7082, decode.d7.loss_cls: 0.2670, decode.d7.loss_mask: 0.5015, decode.d7.loss_dice: 0.7104, decode.d8.loss_cls: 0.2705, decode.d8.loss_mask: 0.5027, decode.d8.loss_dice: 0.7027, loss: 17.0905 +2022-06-05 04:00:30,143 - mmseg - INFO - Saving checkpoint at 17000 iterations +2022-06-05 04:00:32,828 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 04:00:32,828 - mmseg - INFO - Iter [17000/40000] lr: 4.385e-06, eta: 3:07:08, time: 0.482, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2480, decode.loss_mask: 0.4842, decode.loss_dice: 0.6878, decode.d0.loss_cls: 1.9449, decode.d0.loss_mask: 0.5262, decode.d0.loss_dice: 0.8067, decode.d1.loss_cls: 0.4221, decode.d1.loss_mask: 0.5053, decode.d1.loss_dice: 0.7244, decode.d2.loss_cls: 0.3232, decode.d2.loss_mask: 0.4916, decode.d2.loss_dice: 0.6987, decode.d3.loss_cls: 0.2844, decode.d3.loss_mask: 0.4884, decode.d3.loss_dice: 0.6861, decode.d4.loss_cls: 0.2713, decode.d4.loss_mask: 0.4870, decode.d4.loss_dice: 0.6897, decode.d5.loss_cls: 0.2677, decode.d5.loss_mask: 0.4878, decode.d5.loss_dice: 0.6937, decode.d6.loss_cls: 0.2508, decode.d6.loss_mask: 0.4885, decode.d6.loss_dice: 0.6860, decode.d7.loss_cls: 0.2473, decode.d7.loss_mask: 0.4879, decode.d7.loss_dice: 0.6834, decode.d8.loss_cls: 0.2518, decode.d8.loss_mask: 0.4863, decode.d8.loss_dice: 0.6898, loss: 16.4911 +2022-06-05 04:00:54,957 - mmseg - INFO - Iter [17050/40000] lr: 4.375e-06, eta: 3:06:40, time: 0.443, data_time: 0.009, memory: 31652, decode.loss_cls: 0.2778, decode.loss_mask: 0.4942, decode.loss_dice: 0.6897, decode.d0.loss_cls: 1.9327, decode.d0.loss_mask: 0.5335, decode.d0.loss_dice: 0.8196, decode.d1.loss_cls: 0.4681, decode.d1.loss_mask: 0.5120, decode.d1.loss_dice: 0.7400, decode.d2.loss_cls: 0.3532, decode.d2.loss_mask: 0.4990, decode.d2.loss_dice: 0.7066, decode.d3.loss_cls: 0.3172, decode.d3.loss_mask: 0.4973, decode.d3.loss_dice: 0.6908, decode.d4.loss_cls: 0.3072, decode.d4.loss_mask: 0.4974, decode.d4.loss_dice: 0.6871, decode.d5.loss_cls: 0.2933, decode.d5.loss_mask: 0.4981, decode.d5.loss_dice: 0.6899, decode.d6.loss_cls: 0.2854, decode.d6.loss_mask: 0.4960, decode.d6.loss_dice: 0.6862, decode.d7.loss_cls: 0.2775, decode.d7.loss_mask: 0.4959, decode.d7.loss_dice: 0.6919, decode.d8.loss_cls: 0.2777, decode.d8.loss_mask: 0.4968, decode.d8.loss_dice: 0.6898, loss: 16.9018 +2022-06-05 04:01:16,536 - mmseg - INFO - Iter [17100/40000] lr: 4.366e-06, eta: 3:06:12, time: 0.432, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2912, decode.loss_mask: 0.4846, decode.loss_dice: 0.7068, decode.d0.loss_cls: 1.9666, decode.d0.loss_mask: 0.5232, decode.d0.loss_dice: 0.8332, decode.d1.loss_cls: 0.4851, decode.d1.loss_mask: 0.5046, decode.d1.loss_dice: 0.7575, decode.d2.loss_cls: 0.3653, decode.d2.loss_mask: 0.4927, decode.d2.loss_dice: 0.7225, decode.d3.loss_cls: 0.3182, decode.d3.loss_mask: 0.4871, decode.d3.loss_dice: 0.7104, decode.d4.loss_cls: 0.3123, decode.d4.loss_mask: 0.4856, decode.d4.loss_dice: 0.7083, decode.d5.loss_cls: 0.3037, decode.d5.loss_mask: 0.4839, decode.d5.loss_dice: 0.7083, decode.d6.loss_cls: 0.2907, decode.d6.loss_mask: 0.4877, decode.d6.loss_dice: 0.7077, decode.d7.loss_cls: 0.2939, decode.d7.loss_mask: 0.4859, decode.d7.loss_dice: 0.7026, decode.d8.loss_cls: 0.2846, decode.d8.loss_mask: 0.4840, decode.d8.loss_dice: 0.7063, loss: 17.0942 +2022-06-05 04:01:38,260 - mmseg - INFO - Iter [17150/40000] lr: 4.356e-06, eta: 3:05:44, time: 0.434, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2517, decode.loss_mask: 0.4943, decode.loss_dice: 0.6522, decode.d0.loss_cls: 1.9573, decode.d0.loss_mask: 0.5339, decode.d0.loss_dice: 0.7835, decode.d1.loss_cls: 0.4560, decode.d1.loss_mask: 0.5149, decode.d1.loss_dice: 0.7011, decode.d2.loss_cls: 0.3309, decode.d2.loss_mask: 0.5011, decode.d2.loss_dice: 0.6756, decode.d3.loss_cls: 0.2890, decode.d3.loss_mask: 0.4961, decode.d3.loss_dice: 0.6611, decode.d4.loss_cls: 0.2760, decode.d4.loss_mask: 0.4947, decode.d4.loss_dice: 0.6622, decode.d5.loss_cls: 0.2639, decode.d5.loss_mask: 0.4975, decode.d5.loss_dice: 0.6621, decode.d6.loss_cls: 0.2638, decode.d6.loss_mask: 0.4926, decode.d6.loss_dice: 0.6564, decode.d7.loss_cls: 0.2524, decode.d7.loss_mask: 0.4971, decode.d7.loss_dice: 0.6570, decode.d8.loss_cls: 0.2532, decode.d8.loss_mask: 0.4948, decode.d8.loss_dice: 0.6567, loss: 16.3792 +2022-06-05 04:02:02,813 - mmseg - INFO - Iter [17200/40000] lr: 4.347e-06, eta: 3:05:20, time: 0.491, data_time: 0.062, memory: 31652, decode.loss_cls: 0.2731, decode.loss_mask: 0.4893, decode.loss_dice: 0.6859, decode.d0.loss_cls: 1.9736, decode.d0.loss_mask: 0.5353, decode.d0.loss_dice: 0.8179, decode.d1.loss_cls: 0.4637, decode.d1.loss_mask: 0.5119, decode.d1.loss_dice: 0.7410, decode.d2.loss_cls: 0.3499, decode.d2.loss_mask: 0.4954, decode.d2.loss_dice: 0.7076, decode.d3.loss_cls: 0.3064, decode.d3.loss_mask: 0.4931, decode.d3.loss_dice: 0.6947, decode.d4.loss_cls: 0.2897, decode.d4.loss_mask: 0.4902, decode.d4.loss_dice: 0.6946, decode.d5.loss_cls: 0.2894, decode.d5.loss_mask: 0.4882, decode.d5.loss_dice: 0.6840, decode.d6.loss_cls: 0.2716, decode.d6.loss_mask: 0.4901, decode.d6.loss_dice: 0.6850, decode.d7.loss_cls: 0.2712, decode.d7.loss_mask: 0.4904, decode.d7.loss_dice: 0.6860, decode.d8.loss_cls: 0.2694, decode.d8.loss_mask: 0.4930, decode.d8.loss_dice: 0.6873, loss: 16.8190 +2022-06-05 04:02:24,311 - mmseg - INFO - Iter [17250/40000] lr: 4.337e-06, eta: 3:04:52, time: 0.430, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2328, decode.loss_mask: 0.4865, decode.loss_dice: 0.6452, decode.d0.loss_cls: 1.9154, decode.d0.loss_mask: 0.5263, decode.d0.loss_dice: 0.7676, decode.d1.loss_cls: 0.4362, decode.d1.loss_mask: 0.5064, decode.d1.loss_dice: 0.6861, decode.d2.loss_cls: 0.3115, decode.d2.loss_mask: 0.4927, decode.d2.loss_dice: 0.6639, decode.d3.loss_cls: 0.2718, decode.d3.loss_mask: 0.4898, decode.d3.loss_dice: 0.6501, decode.d4.loss_cls: 0.2578, decode.d4.loss_mask: 0.4873, decode.d4.loss_dice: 0.6538, decode.d5.loss_cls: 0.2510, decode.d5.loss_mask: 0.4866, decode.d5.loss_dice: 0.6504, decode.d6.loss_cls: 0.2436, decode.d6.loss_mask: 0.4840, decode.d6.loss_dice: 0.6520, decode.d7.loss_cls: 0.2390, decode.d7.loss_mask: 0.4833, decode.d7.loss_dice: 0.6464, decode.d8.loss_cls: 0.2346, decode.d8.loss_mask: 0.4859, decode.d8.loss_dice: 0.6483, loss: 15.9860 +2022-06-05 04:02:46,190 - mmseg - INFO - Iter [17300/40000] lr: 4.328e-06, eta: 3:04:24, time: 0.438, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2642, decode.loss_mask: 0.4861, decode.loss_dice: 0.6835, decode.d0.loss_cls: 1.9392, decode.d0.loss_mask: 0.5211, decode.d0.loss_dice: 0.7979, decode.d1.loss_cls: 0.4372, decode.d1.loss_mask: 0.5029, decode.d1.loss_dice: 0.7324, decode.d2.loss_cls: 0.3338, decode.d2.loss_mask: 0.4898, decode.d2.loss_dice: 0.7027, decode.d3.loss_cls: 0.2934, decode.d3.loss_mask: 0.4881, decode.d3.loss_dice: 0.6870, decode.d4.loss_cls: 0.2803, decode.d4.loss_mask: 0.4895, decode.d4.loss_dice: 0.6887, decode.d5.loss_cls: 0.2711, decode.d5.loss_mask: 0.4892, decode.d5.loss_dice: 0.6878, decode.d6.loss_cls: 0.2644, decode.d6.loss_mask: 0.4866, decode.d6.loss_dice: 0.6853, decode.d7.loss_cls: 0.2608, decode.d7.loss_mask: 0.4868, decode.d7.loss_dice: 0.6797, decode.d8.loss_cls: 0.2627, decode.d8.loss_mask: 0.4819, decode.d8.loss_dice: 0.6845, loss: 16.5584 +2022-06-05 04:03:08,059 - mmseg - INFO - Iter [17350/40000] lr: 4.318e-06, eta: 3:03:56, time: 0.437, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2747, decode.loss_mask: 0.4835, decode.loss_dice: 0.6840, decode.d0.loss_cls: 1.9816, decode.d0.loss_mask: 0.5255, decode.d0.loss_dice: 0.8118, decode.d1.loss_cls: 0.4595, decode.d1.loss_mask: 0.5008, decode.d1.loss_dice: 0.7268, decode.d2.loss_cls: 0.3542, decode.d2.loss_mask: 0.4873, decode.d2.loss_dice: 0.6959, decode.d3.loss_cls: 0.3193, decode.d3.loss_mask: 0.4815, decode.d3.loss_dice: 0.6828, decode.d4.loss_cls: 0.3061, decode.d4.loss_mask: 0.4810, decode.d4.loss_dice: 0.6812, decode.d5.loss_cls: 0.2926, decode.d5.loss_mask: 0.4812, decode.d5.loss_dice: 0.6803, decode.d6.loss_cls: 0.2828, decode.d6.loss_mask: 0.4822, decode.d6.loss_dice: 0.6837, decode.d7.loss_cls: 0.2788, decode.d7.loss_mask: 0.4807, decode.d7.loss_dice: 0.6798, decode.d8.loss_cls: 0.2729, decode.d8.loss_mask: 0.4818, decode.d8.loss_dice: 0.6888, loss: 16.7229 +2022-06-05 04:03:30,336 - mmseg - INFO - Iter [17400/40000] lr: 4.309e-06, eta: 3:03:29, time: 0.446, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2566, decode.loss_mask: 0.4826, decode.loss_dice: 0.6676, decode.d0.loss_cls: 1.9258, decode.d0.loss_mask: 0.5239, decode.d0.loss_dice: 0.7839, decode.d1.loss_cls: 0.4275, decode.d1.loss_mask: 0.5026, decode.d1.loss_dice: 0.7090, decode.d2.loss_cls: 0.3275, decode.d2.loss_mask: 0.4924, decode.d2.loss_dice: 0.6793, decode.d3.loss_cls: 0.2878, decode.d3.loss_mask: 0.4856, decode.d3.loss_dice: 0.6679, decode.d4.loss_cls: 0.2784, decode.d4.loss_mask: 0.4834, decode.d4.loss_dice: 0.6667, decode.d5.loss_cls: 0.2628, decode.d5.loss_mask: 0.4848, decode.d5.loss_dice: 0.6726, decode.d6.loss_cls: 0.2669, decode.d6.loss_mask: 0.4823, decode.d6.loss_dice: 0.6694, decode.d7.loss_cls: 0.2644, decode.d7.loss_mask: 0.4810, decode.d7.loss_dice: 0.6663, decode.d8.loss_cls: 0.2583, decode.d8.loss_mask: 0.4811, decode.d8.loss_dice: 0.6679, loss: 16.3062 +2022-06-05 04:03:52,461 - mmseg - INFO - Iter [17450/40000] lr: 4.299e-06, eta: 3:03:02, time: 0.443, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2822, decode.loss_mask: 0.5025, decode.loss_dice: 0.7073, decode.d0.loss_cls: 1.9245, decode.d0.loss_mask: 0.5462, decode.d0.loss_dice: 0.8348, decode.d1.loss_cls: 0.4719, decode.d1.loss_mask: 0.5194, decode.d1.loss_dice: 0.7560, decode.d2.loss_cls: 0.3476, decode.d2.loss_mask: 0.5079, decode.d2.loss_dice: 0.7252, decode.d3.loss_cls: 0.3155, decode.d3.loss_mask: 0.5042, decode.d3.loss_dice: 0.7155, decode.d4.loss_cls: 0.2995, decode.d4.loss_mask: 0.5045, decode.d4.loss_dice: 0.7174, decode.d5.loss_cls: 0.2965, decode.d5.loss_mask: 0.5019, decode.d5.loss_dice: 0.7092, decode.d6.loss_cls: 0.2888, decode.d6.loss_mask: 0.5009, decode.d6.loss_dice: 0.7058, decode.d7.loss_cls: 0.2787, decode.d7.loss_mask: 0.5040, decode.d7.loss_dice: 0.7070, decode.d8.loss_cls: 0.2838, decode.d8.loss_mask: 0.5025, decode.d8.loss_dice: 0.7066, loss: 17.1680 +2022-06-05 04:04:17,519 - mmseg - INFO - Iter [17500/40000] lr: 4.290e-06, eta: 3:02:39, time: 0.501, data_time: 0.059, memory: 31652, decode.loss_cls: 0.2721, decode.loss_mask: 0.4781, decode.loss_dice: 0.6834, decode.d0.loss_cls: 1.9839, decode.d0.loss_mask: 0.5211, decode.d0.loss_dice: 0.8195, decode.d1.loss_cls: 0.4560, decode.d1.loss_mask: 0.4963, decode.d1.loss_dice: 0.7365, decode.d2.loss_cls: 0.3393, decode.d2.loss_mask: 0.4855, decode.d2.loss_dice: 0.7089, decode.d3.loss_cls: 0.3038, decode.d3.loss_mask: 0.4841, decode.d3.loss_dice: 0.6919, decode.d4.loss_cls: 0.2901, decode.d4.loss_mask: 0.4799, decode.d4.loss_dice: 0.6915, decode.d5.loss_cls: 0.2811, decode.d5.loss_mask: 0.4787, decode.d5.loss_dice: 0.6915, decode.d6.loss_cls: 0.2760, decode.d6.loss_mask: 0.4781, decode.d6.loss_dice: 0.6869, decode.d7.loss_cls: 0.2735, decode.d7.loss_mask: 0.4781, decode.d7.loss_dice: 0.6922, decode.d8.loss_cls: 0.2685, decode.d8.loss_mask: 0.4753, decode.d8.loss_dice: 0.6903, loss: 16.6920 +2022-06-05 04:04:39,361 - mmseg - INFO - Iter [17550/40000] lr: 4.280e-06, eta: 3:02:11, time: 0.437, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2589, decode.loss_mask: 0.5052, decode.loss_dice: 0.6877, decode.d0.loss_cls: 1.9206, decode.d0.loss_mask: 0.5480, decode.d0.loss_dice: 0.8158, decode.d1.loss_cls: 0.4429, decode.d1.loss_mask: 0.5242, decode.d1.loss_dice: 0.7232, decode.d2.loss_cls: 0.3373, decode.d2.loss_mask: 0.5136, decode.d2.loss_dice: 0.6963, decode.d3.loss_cls: 0.2914, decode.d3.loss_mask: 0.5092, decode.d3.loss_dice: 0.6875, decode.d4.loss_cls: 0.2763, decode.d4.loss_mask: 0.5073, decode.d4.loss_dice: 0.6900, decode.d5.loss_cls: 0.2729, decode.d5.loss_mask: 0.5057, decode.d5.loss_dice: 0.6831, decode.d6.loss_cls: 0.2651, decode.d6.loss_mask: 0.5061, decode.d6.loss_dice: 0.6772, decode.d7.loss_cls: 0.2600, decode.d7.loss_mask: 0.5051, decode.d7.loss_dice: 0.6789, decode.d8.loss_cls: 0.2587, decode.d8.loss_mask: 0.5039, decode.d8.loss_dice: 0.6773, loss: 16.7295 +2022-06-05 04:05:01,754 - mmseg - INFO - Iter [17600/40000] lr: 4.271e-06, eta: 3:01:44, time: 0.448, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2631, decode.loss_mask: 0.4876, decode.loss_dice: 0.6774, decode.d0.loss_cls: 1.9379, decode.d0.loss_mask: 0.5287, decode.d0.loss_dice: 0.7969, decode.d1.loss_cls: 0.4390, decode.d1.loss_mask: 0.5095, decode.d1.loss_dice: 0.7258, decode.d2.loss_cls: 0.3358, decode.d2.loss_mask: 0.4962, decode.d2.loss_dice: 0.6951, decode.d3.loss_cls: 0.3019, decode.d3.loss_mask: 0.4904, decode.d3.loss_dice: 0.6783, decode.d4.loss_cls: 0.2891, decode.d4.loss_mask: 0.4881, decode.d4.loss_dice: 0.6795, decode.d5.loss_cls: 0.2772, decode.d5.loss_mask: 0.4874, decode.d5.loss_dice: 0.6844, decode.d6.loss_cls: 0.2729, decode.d6.loss_mask: 0.4872, decode.d6.loss_dice: 0.6798, decode.d7.loss_cls: 0.2678, decode.d7.loss_mask: 0.4892, decode.d7.loss_dice: 0.6812, decode.d8.loss_cls: 0.2688, decode.d8.loss_mask: 0.4901, decode.d8.loss_dice: 0.6755, loss: 16.5819 +2022-06-05 04:05:23,925 - mmseg - INFO - Iter [17650/40000] lr: 4.261e-06, eta: 3:01:17, time: 0.443, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2739, decode.loss_mask: 0.4767, decode.loss_dice: 0.6639, decode.d0.loss_cls: 1.8878, decode.d0.loss_mask: 0.5182, decode.d0.loss_dice: 0.7911, decode.d1.loss_cls: 0.4379, decode.d1.loss_mask: 0.5006, decode.d1.loss_dice: 0.7138, decode.d2.loss_cls: 0.3432, decode.d2.loss_mask: 0.4839, decode.d2.loss_dice: 0.6781, decode.d3.loss_cls: 0.3010, decode.d3.loss_mask: 0.4800, decode.d3.loss_dice: 0.6687, decode.d4.loss_cls: 0.2923, decode.d4.loss_mask: 0.4796, decode.d4.loss_dice: 0.6691, decode.d5.loss_cls: 0.2831, decode.d5.loss_mask: 0.4773, decode.d5.loss_dice: 0.6658, decode.d6.loss_cls: 0.2818, decode.d6.loss_mask: 0.4758, decode.d6.loss_dice: 0.6635, decode.d7.loss_cls: 0.2788, decode.d7.loss_mask: 0.4746, decode.d7.loss_dice: 0.6634, decode.d8.loss_cls: 0.2748, decode.d8.loss_mask: 0.4748, decode.d8.loss_dice: 0.6674, loss: 16.3407 +2022-06-05 04:05:45,949 - mmseg - INFO - Iter [17700/40000] lr: 4.251e-06, eta: 3:00:50, time: 0.441, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2710, decode.loss_mask: 0.4863, decode.loss_dice: 0.6995, decode.d0.loss_cls: 1.9258, decode.d0.loss_mask: 0.5249, decode.d0.loss_dice: 0.8110, decode.d1.loss_cls: 0.4618, decode.d1.loss_mask: 0.5039, decode.d1.loss_dice: 0.7438, decode.d2.loss_cls: 0.3459, decode.d2.loss_mask: 0.4920, decode.d2.loss_dice: 0.7141, decode.d3.loss_cls: 0.3162, decode.d3.loss_mask: 0.4872, decode.d3.loss_dice: 0.7036, decode.d4.loss_cls: 0.2995, decode.d4.loss_mask: 0.4903, decode.d4.loss_dice: 0.7015, decode.d5.loss_cls: 0.2834, decode.d5.loss_mask: 0.4882, decode.d5.loss_dice: 0.7033, decode.d6.loss_cls: 0.2767, decode.d6.loss_mask: 0.4875, decode.d6.loss_dice: 0.7000, decode.d7.loss_cls: 0.2806, decode.d7.loss_mask: 0.4850, decode.d7.loss_dice: 0.7019, decode.d8.loss_cls: 0.2737, decode.d8.loss_mask: 0.4858, decode.d8.loss_dice: 0.7018, loss: 16.8460 +2022-06-05 04:06:08,136 - mmseg - INFO - Iter [17750/40000] lr: 4.242e-06, eta: 3:00:23, time: 0.444, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2613, decode.loss_mask: 0.4834, decode.loss_dice: 0.6625, decode.d0.loss_cls: 1.9075, decode.d0.loss_mask: 0.5261, decode.d0.loss_dice: 0.7915, decode.d1.loss_cls: 0.4398, decode.d1.loss_mask: 0.5022, decode.d1.loss_dice: 0.7149, decode.d2.loss_cls: 0.3319, decode.d2.loss_mask: 0.4898, decode.d2.loss_dice: 0.6774, decode.d3.loss_cls: 0.2957, decode.d3.loss_mask: 0.4869, decode.d3.loss_dice: 0.6734, decode.d4.loss_cls: 0.2795, decode.d4.loss_mask: 0.4861, decode.d4.loss_dice: 0.6749, decode.d5.loss_cls: 0.2751, decode.d5.loss_mask: 0.4862, decode.d5.loss_dice: 0.6732, decode.d6.loss_cls: 0.2692, decode.d6.loss_mask: 0.4836, decode.d6.loss_dice: 0.6653, decode.d7.loss_cls: 0.2681, decode.d7.loss_mask: 0.4839, decode.d7.loss_dice: 0.6641, decode.d8.loss_cls: 0.2630, decode.d8.loss_mask: 0.4842, decode.d8.loss_dice: 0.6647, loss: 16.3654 +2022-06-05 04:06:32,581 - mmseg - INFO - Iter [17800/40000] lr: 4.232e-06, eta: 2:59:59, time: 0.489, data_time: 0.056, memory: 31652, decode.loss_cls: 0.2623, decode.loss_mask: 0.4726, decode.loss_dice: 0.6599, decode.d0.loss_cls: 1.8872, decode.d0.loss_mask: 0.5097, decode.d0.loss_dice: 0.7856, decode.d1.loss_cls: 0.4435, decode.d1.loss_mask: 0.4918, decode.d1.loss_dice: 0.7042, decode.d2.loss_cls: 0.3344, decode.d2.loss_mask: 0.4754, decode.d2.loss_dice: 0.6705, decode.d3.loss_cls: 0.2899, decode.d3.loss_mask: 0.4693, decode.d3.loss_dice: 0.6627, decode.d4.loss_cls: 0.2781, decode.d4.loss_mask: 0.4708, decode.d4.loss_dice: 0.6611, decode.d5.loss_cls: 0.2720, decode.d5.loss_mask: 0.4702, decode.d5.loss_dice: 0.6617, decode.d6.loss_cls: 0.2694, decode.d6.loss_mask: 0.4696, decode.d6.loss_dice: 0.6596, decode.d7.loss_cls: 0.2638, decode.d7.loss_mask: 0.4697, decode.d7.loss_dice: 0.6596, decode.d8.loss_cls: 0.2592, decode.d8.loss_mask: 0.4731, decode.d8.loss_dice: 0.6622, loss: 16.1194 +2022-06-05 04:06:55,112 - mmseg - INFO - Iter [17850/40000] lr: 4.223e-06, eta: 2:59:32, time: 0.451, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2312, decode.loss_mask: 0.4930, decode.loss_dice: 0.6700, decode.d0.loss_cls: 1.8915, decode.d0.loss_mask: 0.5314, decode.d0.loss_dice: 0.7834, decode.d1.loss_cls: 0.4138, decode.d1.loss_mask: 0.5121, decode.d1.loss_dice: 0.7022, decode.d2.loss_cls: 0.3029, decode.d2.loss_mask: 0.5000, decode.d2.loss_dice: 0.6844, decode.d3.loss_cls: 0.2599, decode.d3.loss_mask: 0.4963, decode.d3.loss_dice: 0.6709, decode.d4.loss_cls: 0.2502, decode.d4.loss_mask: 0.4941, decode.d4.loss_dice: 0.6744, decode.d5.loss_cls: 0.2498, decode.d5.loss_mask: 0.4924, decode.d5.loss_dice: 0.6709, decode.d6.loss_cls: 0.2339, decode.d6.loss_mask: 0.4920, decode.d6.loss_dice: 0.6661, decode.d7.loss_cls: 0.2355, decode.d7.loss_mask: 0.4935, decode.d7.loss_dice: 0.6654, decode.d8.loss_cls: 0.2388, decode.d8.loss_mask: 0.4929, decode.d8.loss_dice: 0.6647, loss: 16.1572 +2022-06-05 04:07:17,551 - mmseg - INFO - Iter [17900/40000] lr: 4.213e-06, eta: 2:59:06, time: 0.449, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2653, decode.loss_mask: 0.4980, decode.loss_dice: 0.6629, decode.d0.loss_cls: 1.8771, decode.d0.loss_mask: 0.5436, decode.d0.loss_dice: 0.7890, decode.d1.loss_cls: 0.4300, decode.d1.loss_mask: 0.5177, decode.d1.loss_dice: 0.7091, decode.d2.loss_cls: 0.3285, decode.d2.loss_mask: 0.5048, decode.d2.loss_dice: 0.6789, decode.d3.loss_cls: 0.2929, decode.d3.loss_mask: 0.5001, decode.d3.loss_dice: 0.6674, decode.d4.loss_cls: 0.2845, decode.d4.loss_mask: 0.4996, decode.d4.loss_dice: 0.6619, decode.d5.loss_cls: 0.2729, decode.d5.loss_mask: 0.4996, decode.d5.loss_dice: 0.6654, decode.d6.loss_cls: 0.2678, decode.d6.loss_mask: 0.5015, decode.d6.loss_dice: 0.6633, decode.d7.loss_cls: 0.2711, decode.d7.loss_mask: 0.4993, decode.d7.loss_dice: 0.6652, decode.d8.loss_cls: 0.2599, decode.d8.loss_mask: 0.4991, decode.d8.loss_dice: 0.6651, loss: 16.4416 +2022-06-05 04:07:39,591 - mmseg - INFO - Iter [17950/40000] lr: 4.204e-06, eta: 2:58:38, time: 0.441, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2509, decode.loss_mask: 0.4766, decode.loss_dice: 0.6891, decode.d0.loss_cls: 1.9568, decode.d0.loss_mask: 0.5139, decode.d0.loss_dice: 0.8061, decode.d1.loss_cls: 0.4446, decode.d1.loss_mask: 0.4916, decode.d1.loss_dice: 0.7296, decode.d2.loss_cls: 0.3419, decode.d2.loss_mask: 0.4819, decode.d2.loss_dice: 0.7009, decode.d3.loss_cls: 0.2918, decode.d3.loss_mask: 0.4779, decode.d3.loss_dice: 0.6911, decode.d4.loss_cls: 0.2728, decode.d4.loss_mask: 0.4775, decode.d4.loss_dice: 0.6906, decode.d5.loss_cls: 0.2691, decode.d5.loss_mask: 0.4751, decode.d5.loss_dice: 0.6905, decode.d6.loss_cls: 0.2597, decode.d6.loss_mask: 0.4789, decode.d6.loss_dice: 0.6878, decode.d7.loss_cls: 0.2561, decode.d7.loss_mask: 0.4744, decode.d7.loss_dice: 0.6849, decode.d8.loss_cls: 0.2500, decode.d8.loss_mask: 0.4751, decode.d8.loss_dice: 0.6909, loss: 16.4783 +2022-06-05 04:08:02,011 - mmseg - INFO - Saving checkpoint at 18000 iterations +2022-06-05 04:08:04,341 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 04:08:04,341 - mmseg - INFO - Iter [18000/40000] lr: 4.194e-06, eta: 2:58:15, time: 0.495, data_time: 0.009, memory: 31652, decode.loss_cls: 0.2548, decode.loss_mask: 0.4690, decode.loss_dice: 0.6766, decode.d0.loss_cls: 1.9107, decode.d0.loss_mask: 0.5021, decode.d0.loss_dice: 0.7929, decode.d1.loss_cls: 0.4419, decode.d1.loss_mask: 0.4853, decode.d1.loss_dice: 0.7213, decode.d2.loss_cls: 0.3305, decode.d2.loss_mask: 0.4746, decode.d2.loss_dice: 0.6858, decode.d3.loss_cls: 0.2884, decode.d3.loss_mask: 0.4717, decode.d3.loss_dice: 0.6752, decode.d4.loss_cls: 0.2771, decode.d4.loss_mask: 0.4704, decode.d4.loss_dice: 0.6743, decode.d5.loss_cls: 0.2682, decode.d5.loss_mask: 0.4715, decode.d5.loss_dice: 0.6741, decode.d6.loss_cls: 0.2579, decode.d6.loss_mask: 0.4683, decode.d6.loss_dice: 0.6726, decode.d7.loss_cls: 0.2579, decode.d7.loss_mask: 0.4670, decode.d7.loss_dice: 0.6724, decode.d8.loss_cls: 0.2530, decode.d8.loss_mask: 0.4681, decode.d8.loss_dice: 0.6733, loss: 16.2069 +2022-06-05 04:08:26,833 - mmseg - INFO - Iter [18050/40000] lr: 4.185e-06, eta: 2:57:48, time: 0.450, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2684, decode.loss_mask: 0.5008, decode.loss_dice: 0.6709, decode.d0.loss_cls: 1.8954, decode.d0.loss_mask: 0.5371, decode.d0.loss_dice: 0.7903, decode.d1.loss_cls: 0.4333, decode.d1.loss_mask: 0.5183, decode.d1.loss_dice: 0.7179, decode.d2.loss_cls: 0.3351, decode.d2.loss_mask: 0.5085, decode.d2.loss_dice: 0.6877, decode.d3.loss_cls: 0.2855, decode.d3.loss_mask: 0.5074, decode.d3.loss_dice: 0.6778, decode.d4.loss_cls: 0.2807, decode.d4.loss_mask: 0.5077, decode.d4.loss_dice: 0.6747, decode.d5.loss_cls: 0.2693, decode.d5.loss_mask: 0.5055, decode.d5.loss_dice: 0.6769, decode.d6.loss_cls: 0.2695, decode.d6.loss_mask: 0.5040, decode.d6.loss_dice: 0.6764, decode.d7.loss_cls: 0.2702, decode.d7.loss_mask: 0.5007, decode.d7.loss_dice: 0.6729, decode.d8.loss_cls: 0.2678, decode.d8.loss_mask: 0.5024, decode.d8.loss_dice: 0.6771, loss: 16.5906 +2022-06-05 04:08:51,817 - mmseg - INFO - Iter [18100/40000] lr: 4.175e-06, eta: 2:57:25, time: 0.500, data_time: 0.060, memory: 31652, decode.loss_cls: 0.2667, decode.loss_mask: 0.4780, decode.loss_dice: 0.6908, decode.d0.loss_cls: 1.9310, decode.d0.loss_mask: 0.5158, decode.d0.loss_dice: 0.8081, decode.d1.loss_cls: 0.4500, decode.d1.loss_mask: 0.4920, decode.d1.loss_dice: 0.7325, decode.d2.loss_cls: 0.3355, decode.d2.loss_mask: 0.4844, decode.d2.loss_dice: 0.7100, decode.d3.loss_cls: 0.3046, decode.d3.loss_mask: 0.4793, decode.d3.loss_dice: 0.6948, decode.d4.loss_cls: 0.2983, decode.d4.loss_mask: 0.4737, decode.d4.loss_dice: 0.6940, decode.d5.loss_cls: 0.2809, decode.d5.loss_mask: 0.4784, decode.d5.loss_dice: 0.6937, decode.d6.loss_cls: 0.2739, decode.d6.loss_mask: 0.4750, decode.d6.loss_dice: 0.6886, decode.d7.loss_cls: 0.2734, decode.d7.loss_mask: 0.4802, decode.d7.loss_dice: 0.6954, decode.d8.loss_cls: 0.2703, decode.d8.loss_mask: 0.4772, decode.d8.loss_dice: 0.6932, loss: 16.6195 +2022-06-05 04:09:13,922 - mmseg - INFO - Iter [18150/40000] lr: 4.166e-06, eta: 2:56:58, time: 0.442, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2533, decode.loss_mask: 0.4918, decode.loss_dice: 0.6760, decode.d0.loss_cls: 1.9218, decode.d0.loss_mask: 0.5277, decode.d0.loss_dice: 0.7950, decode.d1.loss_cls: 0.4355, decode.d1.loss_mask: 0.5110, decode.d1.loss_dice: 0.7215, decode.d2.loss_cls: 0.3181, decode.d2.loss_mask: 0.4979, decode.d2.loss_dice: 0.6937, decode.d3.loss_cls: 0.2839, decode.d3.loss_mask: 0.4932, decode.d3.loss_dice: 0.6793, decode.d4.loss_cls: 0.2729, decode.d4.loss_mask: 0.4912, decode.d4.loss_dice: 0.6749, decode.d5.loss_cls: 0.2616, decode.d5.loss_mask: 0.4923, decode.d5.loss_dice: 0.6770, decode.d6.loss_cls: 0.2548, decode.d6.loss_mask: 0.4919, decode.d6.loss_dice: 0.6769, decode.d7.loss_cls: 0.2559, decode.d7.loss_mask: 0.4913, decode.d7.loss_dice: 0.6742, decode.d8.loss_cls: 0.2506, decode.d8.loss_mask: 0.4915, decode.d8.loss_dice: 0.6799, loss: 16.4367 +2022-06-05 04:09:36,106 - mmseg - INFO - Iter [18200/40000] lr: 4.156e-06, eta: 2:56:31, time: 0.443, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2418, decode.loss_mask: 0.4899, decode.loss_dice: 0.6659, decode.d0.loss_cls: 1.8517, decode.d0.loss_mask: 0.5322, decode.d0.loss_dice: 0.7878, decode.d1.loss_cls: 0.4307, decode.d1.loss_mask: 0.5078, decode.d1.loss_dice: 0.7113, decode.d2.loss_cls: 0.3272, decode.d2.loss_mask: 0.4936, decode.d2.loss_dice: 0.6864, decode.d3.loss_cls: 0.2739, decode.d3.loss_mask: 0.4949, decode.d3.loss_dice: 0.6717, decode.d4.loss_cls: 0.2593, decode.d4.loss_mask: 0.4919, decode.d4.loss_dice: 0.6739, decode.d5.loss_cls: 0.2495, decode.d5.loss_mask: 0.4936, decode.d5.loss_dice: 0.6736, decode.d6.loss_cls: 0.2395, decode.d6.loss_mask: 0.4909, decode.d6.loss_dice: 0.6662, decode.d7.loss_cls: 0.2379, decode.d7.loss_mask: 0.4909, decode.d7.loss_dice: 0.6667, decode.d8.loss_cls: 0.2351, decode.d8.loss_mask: 0.4903, decode.d8.loss_dice: 0.6708, loss: 16.1966 +2022-06-05 04:09:57,962 - mmseg - INFO - Iter [18250/40000] lr: 4.147e-06, eta: 2:56:04, time: 0.438, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2615, decode.loss_mask: 0.4727, decode.loss_dice: 0.6799, decode.d0.loss_cls: 1.8842, decode.d0.loss_mask: 0.5148, decode.d0.loss_dice: 0.8060, decode.d1.loss_cls: 0.4289, decode.d1.loss_mask: 0.4917, decode.d1.loss_dice: 0.7252, decode.d2.loss_cls: 0.3205, decode.d2.loss_mask: 0.4779, decode.d2.loss_dice: 0.6928, decode.d3.loss_cls: 0.2907, decode.d3.loss_mask: 0.4751, decode.d3.loss_dice: 0.6828, decode.d4.loss_cls: 0.2807, decode.d4.loss_mask: 0.4716, decode.d4.loss_dice: 0.6811, decode.d5.loss_cls: 0.2733, decode.d5.loss_mask: 0.4706, decode.d5.loss_dice: 0.6772, decode.d6.loss_cls: 0.2637, decode.d6.loss_mask: 0.4703, decode.d6.loss_dice: 0.6769, decode.d7.loss_cls: 0.2623, decode.d7.loss_mask: 0.4717, decode.d7.loss_dice: 0.6784, decode.d8.loss_cls: 0.2578, decode.d8.loss_mask: 0.4728, decode.d8.loss_dice: 0.6814, loss: 16.2945 +2022-06-05 04:10:20,140 - mmseg - INFO - Iter [18300/40000] lr: 4.137e-06, eta: 2:55:37, time: 0.444, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2370, decode.loss_mask: 0.4765, decode.loss_dice: 0.6586, decode.d0.loss_cls: 1.8732, decode.d0.loss_mask: 0.5131, decode.d0.loss_dice: 0.7830, decode.d1.loss_cls: 0.4168, decode.d1.loss_mask: 0.4909, decode.d1.loss_dice: 0.7001, decode.d2.loss_cls: 0.3005, decode.d2.loss_mask: 0.4843, decode.d2.loss_dice: 0.6726, decode.d3.loss_cls: 0.2676, decode.d3.loss_mask: 0.4815, decode.d3.loss_dice: 0.6629, decode.d4.loss_cls: 0.2560, decode.d4.loss_mask: 0.4816, decode.d4.loss_dice: 0.6620, decode.d5.loss_cls: 0.2497, decode.d5.loss_mask: 0.4793, decode.d5.loss_dice: 0.6600, decode.d6.loss_cls: 0.2486, decode.d6.loss_mask: 0.4791, decode.d6.loss_dice: 0.6599, decode.d7.loss_cls: 0.2433, decode.d7.loss_mask: 0.4772, decode.d7.loss_dice: 0.6577, decode.d8.loss_cls: 0.2388, decode.d8.loss_mask: 0.4772, decode.d8.loss_dice: 0.6598, loss: 15.9488 +2022-06-05 04:10:42,047 - mmseg - INFO - Iter [18350/40000] lr: 4.128e-06, eta: 2:55:10, time: 0.438, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2535, decode.loss_mask: 0.4811, decode.loss_dice: 0.6772, decode.d0.loss_cls: 1.8645, decode.d0.loss_mask: 0.5225, decode.d0.loss_dice: 0.8028, decode.d1.loss_cls: 0.4382, decode.d1.loss_mask: 0.4922, decode.d1.loss_dice: 0.7190, decode.d2.loss_cls: 0.3215, decode.d2.loss_mask: 0.4850, decode.d2.loss_dice: 0.6961, decode.d3.loss_cls: 0.2828, decode.d3.loss_mask: 0.4818, decode.d3.loss_dice: 0.6883, decode.d4.loss_cls: 0.2714, decode.d4.loss_mask: 0.4811, decode.d4.loss_dice: 0.6811, decode.d5.loss_cls: 0.2767, decode.d5.loss_mask: 0.4809, decode.d5.loss_dice: 0.6769, decode.d6.loss_cls: 0.2614, decode.d6.loss_mask: 0.4798, decode.d6.loss_dice: 0.6773, decode.d7.loss_cls: 0.2617, decode.d7.loss_mask: 0.4815, decode.d7.loss_dice: 0.6744, decode.d8.loss_cls: 0.2606, decode.d8.loss_mask: 0.4796, decode.d8.loss_dice: 0.6804, loss: 16.3315 +2022-06-05 04:11:03,929 - mmseg - INFO - Iter [18400/40000] lr: 4.118e-06, eta: 2:54:43, time: 0.438, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2511, decode.loss_mask: 0.4841, decode.loss_dice: 0.6532, decode.d0.loss_cls: 1.8511, decode.d0.loss_mask: 0.5189, decode.d0.loss_dice: 0.7657, decode.d1.loss_cls: 0.4395, decode.d1.loss_mask: 0.4957, decode.d1.loss_dice: 0.6869, decode.d2.loss_cls: 0.3302, decode.d2.loss_mask: 0.4884, decode.d2.loss_dice: 0.6652, decode.d3.loss_cls: 0.2859, decode.d3.loss_mask: 0.4849, decode.d3.loss_dice: 0.6571, decode.d4.loss_cls: 0.2744, decode.d4.loss_mask: 0.4843, decode.d4.loss_dice: 0.6585, decode.d5.loss_cls: 0.2614, decode.d5.loss_mask: 0.4858, decode.d5.loss_dice: 0.6566, decode.d6.loss_cls: 0.2589, decode.d6.loss_mask: 0.4853, decode.d6.loss_dice: 0.6558, decode.d7.loss_cls: 0.2594, decode.d7.loss_mask: 0.4816, decode.d7.loss_dice: 0.6556, decode.d8.loss_cls: 0.2562, decode.d8.loss_mask: 0.4831, decode.d8.loss_dice: 0.6535, loss: 16.0684 +2022-06-05 04:11:28,060 - mmseg - INFO - Iter [18450/40000] lr: 4.108e-06, eta: 2:54:18, time: 0.482, data_time: 0.054, memory: 31652, decode.loss_cls: 0.2496, decode.loss_mask: 0.4859, decode.loss_dice: 0.6479, decode.d0.loss_cls: 1.8613, decode.d0.loss_mask: 0.5255, decode.d0.loss_dice: 0.7655, decode.d1.loss_cls: 0.4379, decode.d1.loss_mask: 0.5017, decode.d1.loss_dice: 0.6881, decode.d2.loss_cls: 0.3213, decode.d2.loss_mask: 0.4913, decode.d2.loss_dice: 0.6615, decode.d3.loss_cls: 0.2798, decode.d3.loss_mask: 0.4898, decode.d3.loss_dice: 0.6534, decode.d4.loss_cls: 0.2743, decode.d4.loss_mask: 0.4896, decode.d4.loss_dice: 0.6574, decode.d5.loss_cls: 0.2646, decode.d5.loss_mask: 0.4858, decode.d5.loss_dice: 0.6517, decode.d6.loss_cls: 0.2524, decode.d6.loss_mask: 0.4845, decode.d6.loss_dice: 0.6512, decode.d7.loss_cls: 0.2533, decode.d7.loss_mask: 0.4871, decode.d7.loss_dice: 0.6466, decode.d8.loss_cls: 0.2534, decode.d8.loss_mask: 0.4863, decode.d8.loss_dice: 0.6481, loss: 16.0467 +2022-06-05 04:11:49,837 - mmseg - INFO - Iter [18500/40000] lr: 4.099e-06, eta: 2:53:51, time: 0.436, data_time: 0.009, memory: 31652, decode.loss_cls: 0.2497, decode.loss_mask: 0.4585, decode.loss_dice: 0.6585, decode.d0.loss_cls: 1.9091, decode.d0.loss_mask: 0.5043, decode.d0.loss_dice: 0.7886, decode.d1.loss_cls: 0.4377, decode.d1.loss_mask: 0.4766, decode.d1.loss_dice: 0.7037, decode.d2.loss_cls: 0.3228, decode.d2.loss_mask: 0.4678, decode.d2.loss_dice: 0.6782, decode.d3.loss_cls: 0.2792, decode.d3.loss_mask: 0.4626, decode.d3.loss_dice: 0.6651, decode.d4.loss_cls: 0.2710, decode.d4.loss_mask: 0.4626, decode.d4.loss_dice: 0.6655, decode.d5.loss_cls: 0.2601, decode.d5.loss_mask: 0.4610, decode.d5.loss_dice: 0.6641, decode.d6.loss_cls: 0.2572, decode.d6.loss_mask: 0.4611, decode.d6.loss_dice: 0.6649, decode.d7.loss_cls: 0.2493, decode.d7.loss_mask: 0.4603, decode.d7.loss_dice: 0.6661, decode.d8.loss_cls: 0.2528, decode.d8.loss_mask: 0.4602, decode.d8.loss_dice: 0.6633, loss: 15.9821 +2022-06-05 04:12:11,638 - mmseg - INFO - Iter [18550/40000] lr: 4.089e-06, eta: 2:53:24, time: 0.436, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2439, decode.loss_mask: 0.4775, decode.loss_dice: 0.6812, decode.d0.loss_cls: 1.8320, decode.d0.loss_mask: 0.5150, decode.d0.loss_dice: 0.7944, decode.d1.loss_cls: 0.4127, decode.d1.loss_mask: 0.4956, decode.d1.loss_dice: 0.7225, decode.d2.loss_cls: 0.3146, decode.d2.loss_mask: 0.4775, decode.d2.loss_dice: 0.6917, decode.d3.loss_cls: 0.2786, decode.d3.loss_mask: 0.4752, decode.d3.loss_dice: 0.6807, decode.d4.loss_cls: 0.2655, decode.d4.loss_mask: 0.4740, decode.d4.loss_dice: 0.6829, decode.d5.loss_cls: 0.2517, decode.d5.loss_mask: 0.4737, decode.d5.loss_dice: 0.6854, decode.d6.loss_cls: 0.2511, decode.d6.loss_mask: 0.4759, decode.d6.loss_dice: 0.6847, decode.d7.loss_cls: 0.2385, decode.d7.loss_mask: 0.4791, decode.d7.loss_dice: 0.6824, decode.d8.loss_cls: 0.2402, decode.d8.loss_mask: 0.4763, decode.d8.loss_dice: 0.6844, loss: 16.1389 +2022-06-05 04:12:34,296 - mmseg - INFO - Iter [18600/40000] lr: 4.080e-06, eta: 2:52:58, time: 0.453, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2398, decode.loss_mask: 0.4904, decode.loss_dice: 0.6847, decode.d0.loss_cls: 1.8432, decode.d0.loss_mask: 0.5291, decode.d0.loss_dice: 0.7880, decode.d1.loss_cls: 0.4277, decode.d1.loss_mask: 0.5103, decode.d1.loss_dice: 0.7178, decode.d2.loss_cls: 0.3155, decode.d2.loss_mask: 0.4985, decode.d2.loss_dice: 0.6957, decode.d3.loss_cls: 0.2815, decode.d3.loss_mask: 0.4945, decode.d3.loss_dice: 0.6891, decode.d4.loss_cls: 0.2660, decode.d4.loss_mask: 0.4915, decode.d4.loss_dice: 0.6816, decode.d5.loss_cls: 0.2632, decode.d5.loss_mask: 0.4919, decode.d5.loss_dice: 0.6856, decode.d6.loss_cls: 0.2497, decode.d6.loss_mask: 0.4928, decode.d6.loss_dice: 0.6805, decode.d7.loss_cls: 0.2418, decode.d7.loss_mask: 0.4930, decode.d7.loss_dice: 0.6816, decode.d8.loss_cls: 0.2403, decode.d8.loss_mask: 0.4908, decode.d8.loss_dice: 0.6808, loss: 16.3367 +2022-06-05 04:12:56,848 - mmseg - INFO - Iter [18650/40000] lr: 4.070e-06, eta: 2:52:32, time: 0.451, data_time: 0.009, memory: 31652, decode.loss_cls: 0.2634, decode.loss_mask: 0.4750, decode.loss_dice: 0.6734, decode.d0.loss_cls: 1.9598, decode.d0.loss_mask: 0.5217, decode.d0.loss_dice: 0.8005, decode.d1.loss_cls: 0.4490, decode.d1.loss_mask: 0.4961, decode.d1.loss_dice: 0.7191, decode.d2.loss_cls: 0.3369, decode.d2.loss_mask: 0.4837, decode.d2.loss_dice: 0.6856, decode.d3.loss_cls: 0.3030, decode.d3.loss_mask: 0.4796, decode.d3.loss_dice: 0.6722, decode.d4.loss_cls: 0.2861, decode.d4.loss_mask: 0.4795, decode.d4.loss_dice: 0.6734, decode.d5.loss_cls: 0.2760, decode.d5.loss_mask: 0.4785, decode.d5.loss_dice: 0.6705, decode.d6.loss_cls: 0.2739, decode.d6.loss_mask: 0.4766, decode.d6.loss_dice: 0.6706, decode.d7.loss_cls: 0.2641, decode.d7.loss_mask: 0.4762, decode.d7.loss_dice: 0.6709, decode.d8.loss_cls: 0.2617, decode.d8.loss_mask: 0.4766, decode.d8.loss_dice: 0.6745, loss: 16.4279 +2022-06-05 04:13:18,996 - mmseg - INFO - Iter [18700/40000] lr: 4.061e-06, eta: 2:52:05, time: 0.442, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2392, decode.loss_mask: 0.4824, decode.loss_dice: 0.6592, decode.d0.loss_cls: 1.8543, decode.d0.loss_mask: 0.5296, decode.d0.loss_dice: 0.7748, decode.d1.loss_cls: 0.4140, decode.d1.loss_mask: 0.5027, decode.d1.loss_dice: 0.6984, decode.d2.loss_cls: 0.3161, decode.d2.loss_mask: 0.4902, decode.d2.loss_dice: 0.6709, decode.d3.loss_cls: 0.2665, decode.d3.loss_mask: 0.4886, decode.d3.loss_dice: 0.6700, decode.d4.loss_cls: 0.2620, decode.d4.loss_mask: 0.4838, decode.d4.loss_dice: 0.6639, decode.d5.loss_cls: 0.2568, decode.d5.loss_mask: 0.4835, decode.d5.loss_dice: 0.6633, decode.d6.loss_cls: 0.2544, decode.d6.loss_mask: 0.4830, decode.d6.loss_dice: 0.6604, decode.d7.loss_cls: 0.2446, decode.d7.loss_mask: 0.4824, decode.d7.loss_dice: 0.6615, decode.d8.loss_cls: 0.2426, decode.d8.loss_mask: 0.4830, decode.d8.loss_dice: 0.6601, loss: 16.0425 +2022-06-05 04:13:43,231 - mmseg - INFO - Iter [18750/40000] lr: 4.051e-06, eta: 2:51:41, time: 0.485, data_time: 0.056, memory: 31652, decode.loss_cls: 0.2541, decode.loss_mask: 0.4661, decode.loss_dice: 0.6731, decode.d0.loss_cls: 1.8980, decode.d0.loss_mask: 0.5098, decode.d0.loss_dice: 0.7894, decode.d1.loss_cls: 0.4454, decode.d1.loss_mask: 0.4844, decode.d1.loss_dice: 0.7096, decode.d2.loss_cls: 0.3325, decode.d2.loss_mask: 0.4730, decode.d2.loss_dice: 0.6856, decode.d3.loss_cls: 0.2889, decode.d3.loss_mask: 0.4706, decode.d3.loss_dice: 0.6763, decode.d4.loss_cls: 0.2728, decode.d4.loss_mask: 0.4700, decode.d4.loss_dice: 0.6770, decode.d5.loss_cls: 0.2664, decode.d5.loss_mask: 0.4710, decode.d5.loss_dice: 0.6735, decode.d6.loss_cls: 0.2604, decode.d6.loss_mask: 0.4711, decode.d6.loss_dice: 0.6739, decode.d7.loss_cls: 0.2533, decode.d7.loss_mask: 0.4680, decode.d7.loss_dice: 0.6744, decode.d8.loss_cls: 0.2508, decode.d8.loss_mask: 0.4678, decode.d8.loss_dice: 0.6772, loss: 16.1842 +2022-06-05 04:14:05,373 - mmseg - INFO - Iter [18800/40000] lr: 4.042e-06, eta: 2:51:14, time: 0.443, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2434, decode.loss_mask: 0.4864, decode.loss_dice: 0.6627, decode.d0.loss_cls: 1.8724, decode.d0.loss_mask: 0.5278, decode.d0.loss_dice: 0.7887, decode.d1.loss_cls: 0.4450, decode.d1.loss_mask: 0.5041, decode.d1.loss_dice: 0.7036, decode.d2.loss_cls: 0.3260, decode.d2.loss_mask: 0.4893, decode.d2.loss_dice: 0.6813, decode.d3.loss_cls: 0.2786, decode.d3.loss_mask: 0.4879, decode.d3.loss_dice: 0.6667, decode.d4.loss_cls: 0.2620, decode.d4.loss_mask: 0.4893, decode.d4.loss_dice: 0.6740, decode.d5.loss_cls: 0.2609, decode.d5.loss_mask: 0.4872, decode.d5.loss_dice: 0.6663, decode.d6.loss_cls: 0.2497, decode.d6.loss_mask: 0.4877, decode.d6.loss_dice: 0.6649, decode.d7.loss_cls: 0.2479, decode.d7.loss_mask: 0.4863, decode.d7.loss_dice: 0.6677, decode.d8.loss_cls: 0.2439, decode.d8.loss_mask: 0.4849, decode.d8.loss_dice: 0.6643, loss: 16.2010 +2022-06-05 04:14:26,936 - mmseg - INFO - Iter [18850/40000] lr: 4.032e-06, eta: 2:50:47, time: 0.431, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2416, decode.loss_mask: 0.4990, decode.loss_dice: 0.7054, decode.d0.loss_cls: 1.8156, decode.d0.loss_mask: 0.5420, decode.d0.loss_dice: 0.8174, decode.d1.loss_cls: 0.4188, decode.d1.loss_mask: 0.5154, decode.d1.loss_dice: 0.7492, decode.d2.loss_cls: 0.3046, decode.d2.loss_mask: 0.5055, decode.d2.loss_dice: 0.7229, decode.d3.loss_cls: 0.2731, decode.d3.loss_mask: 0.5029, decode.d3.loss_dice: 0.7142, decode.d4.loss_cls: 0.2640, decode.d4.loss_mask: 0.4993, decode.d4.loss_dice: 0.7101, decode.d5.loss_cls: 0.2529, decode.d5.loss_mask: 0.5000, decode.d5.loss_dice: 0.7079, decode.d6.loss_cls: 0.2491, decode.d6.loss_mask: 0.4990, decode.d6.loss_dice: 0.7055, decode.d7.loss_cls: 0.2453, decode.d7.loss_mask: 0.4982, decode.d7.loss_dice: 0.7085, decode.d8.loss_cls: 0.2473, decode.d8.loss_mask: 0.4992, decode.d8.loss_dice: 0.7058, loss: 16.6199 +2022-06-05 04:14:48,249 - mmseg - INFO - Iter [18900/40000] lr: 4.023e-06, eta: 2:50:20, time: 0.426, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2452, decode.loss_mask: 0.4747, decode.loss_dice: 0.6669, decode.d0.loss_cls: 1.8578, decode.d0.loss_mask: 0.5107, decode.d0.loss_dice: 0.7732, decode.d1.loss_cls: 0.4156, decode.d1.loss_mask: 0.4912, decode.d1.loss_dice: 0.7054, decode.d2.loss_cls: 0.3015, decode.d2.loss_mask: 0.4816, decode.d2.loss_dice: 0.6824, decode.d3.loss_cls: 0.2710, decode.d3.loss_mask: 0.4778, decode.d3.loss_dice: 0.6711, decode.d4.loss_cls: 0.2588, decode.d4.loss_mask: 0.4785, decode.d4.loss_dice: 0.6695, decode.d5.loss_cls: 0.2570, decode.d5.loss_mask: 0.4758, decode.d5.loss_dice: 0.6680, decode.d6.loss_cls: 0.2460, decode.d6.loss_mask: 0.4774, decode.d6.loss_dice: 0.6664, decode.d7.loss_cls: 0.2467, decode.d7.loss_mask: 0.4767, decode.d7.loss_dice: 0.6643, decode.d8.loss_cls: 0.2399, decode.d8.loss_mask: 0.4755, decode.d8.loss_dice: 0.6667, loss: 15.9936 +2022-06-05 04:15:09,794 - mmseg - INFO - Iter [18950/40000] lr: 4.013e-06, eta: 2:49:52, time: 0.431, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2474, decode.loss_mask: 0.4755, decode.loss_dice: 0.6645, decode.d0.loss_cls: 1.8392, decode.d0.loss_mask: 0.5127, decode.d0.loss_dice: 0.7854, decode.d1.loss_cls: 0.4209, decode.d1.loss_mask: 0.4979, decode.d1.loss_dice: 0.7191, decode.d2.loss_cls: 0.3222, decode.d2.loss_mask: 0.4782, decode.d2.loss_dice: 0.6834, decode.d3.loss_cls: 0.2803, decode.d3.loss_mask: 0.4745, decode.d3.loss_dice: 0.6638, decode.d4.loss_cls: 0.2654, decode.d4.loss_mask: 0.4763, decode.d4.loss_dice: 0.6661, decode.d5.loss_cls: 0.2517, decode.d5.loss_mask: 0.4746, decode.d5.loss_dice: 0.6690, decode.d6.loss_cls: 0.2513, decode.d6.loss_mask: 0.4732, decode.d6.loss_dice: 0.6646, decode.d7.loss_cls: 0.2495, decode.d7.loss_mask: 0.4722, decode.d7.loss_dice: 0.6646, decode.d8.loss_cls: 0.2502, decode.d8.loss_mask: 0.4738, decode.d8.loss_dice: 0.6643, loss: 16.0318 +2022-06-05 04:15:31,365 - mmseg - INFO - Saving checkpoint at 19000 iterations +2022-06-05 04:15:33,813 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 04:15:33,813 - mmseg - INFO - Iter [19000/40000] lr: 4.004e-06, eta: 2:49:28, time: 0.479, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2469, decode.loss_mask: 0.4580, decode.loss_dice: 0.6638, decode.d0.loss_cls: 1.8421, decode.d0.loss_mask: 0.4990, decode.d0.loss_dice: 0.7780, decode.d1.loss_cls: 0.4193, decode.d1.loss_mask: 0.4768, decode.d1.loss_dice: 0.7002, decode.d2.loss_cls: 0.3237, decode.d2.loss_mask: 0.4666, decode.d2.loss_dice: 0.6792, decode.d3.loss_cls: 0.2785, decode.d3.loss_mask: 0.4627, decode.d3.loss_dice: 0.6679, decode.d4.loss_cls: 0.2632, decode.d4.loss_mask: 0.4627, decode.d4.loss_dice: 0.6648, decode.d5.loss_cls: 0.2526, decode.d5.loss_mask: 0.4602, decode.d5.loss_dice: 0.6640, decode.d6.loss_cls: 0.2474, decode.d6.loss_mask: 0.4603, decode.d6.loss_dice: 0.6670, decode.d7.loss_cls: 0.2486, decode.d7.loss_mask: 0.4593, decode.d7.loss_dice: 0.6637, decode.d8.loss_cls: 0.2463, decode.d8.loss_mask: 0.4584, decode.d8.loss_dice: 0.6646, loss: 15.8460 +2022-06-05 04:15:58,174 - mmseg - INFO - Iter [19050/40000] lr: 3.994e-06, eta: 2:49:04, time: 0.488, data_time: 0.059, memory: 31652, decode.loss_cls: 0.2395, decode.loss_mask: 0.4769, decode.loss_dice: 0.6700, decode.d0.loss_cls: 1.8634, decode.d0.loss_mask: 0.5224, decode.d0.loss_dice: 0.7916, decode.d1.loss_cls: 0.4115, decode.d1.loss_mask: 0.4990, decode.d1.loss_dice: 0.7205, decode.d2.loss_cls: 0.3070, decode.d2.loss_mask: 0.4864, decode.d2.loss_dice: 0.6932, decode.d3.loss_cls: 0.2675, decode.d3.loss_mask: 0.4825, decode.d3.loss_dice: 0.6792, decode.d4.loss_cls: 0.2546, decode.d4.loss_mask: 0.4791, decode.d4.loss_dice: 0.6764, decode.d5.loss_cls: 0.2528, decode.d5.loss_mask: 0.4788, decode.d5.loss_dice: 0.6776, decode.d6.loss_cls: 0.2365, decode.d6.loss_mask: 0.4799, decode.d6.loss_dice: 0.6746, decode.d7.loss_cls: 0.2393, decode.d7.loss_mask: 0.4791, decode.d7.loss_dice: 0.6740, decode.d8.loss_cls: 0.2364, decode.d8.loss_mask: 0.4761, decode.d8.loss_dice: 0.6702, loss: 16.0960 +2022-06-05 04:16:19,589 - mmseg - INFO - Iter [19100/40000] lr: 3.985e-06, eta: 2:48:37, time: 0.428, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2546, decode.loss_mask: 0.4689, decode.loss_dice: 0.6848, decode.d0.loss_cls: 1.8287, decode.d0.loss_mask: 0.5151, decode.d0.loss_dice: 0.8050, decode.d1.loss_cls: 0.4433, decode.d1.loss_mask: 0.4893, decode.d1.loss_dice: 0.7264, decode.d2.loss_cls: 0.3347, decode.d2.loss_mask: 0.4739, decode.d2.loss_dice: 0.6970, decode.d3.loss_cls: 0.2884, decode.d3.loss_mask: 0.4733, decode.d3.loss_dice: 0.6856, decode.d4.loss_cls: 0.2735, decode.d4.loss_mask: 0.4719, decode.d4.loss_dice: 0.6915, decode.d5.loss_cls: 0.2643, decode.d5.loss_mask: 0.4706, decode.d5.loss_dice: 0.6872, decode.d6.loss_cls: 0.2596, decode.d6.loss_mask: 0.4697, decode.d6.loss_dice: 0.6871, decode.d7.loss_cls: 0.2519, decode.d7.loss_mask: 0.4699, decode.d7.loss_dice: 0.6902, decode.d8.loss_cls: 0.2560, decode.d8.loss_mask: 0.4719, decode.d8.loss_dice: 0.6892, loss: 16.2734 +2022-06-05 04:16:41,122 - mmseg - INFO - Iter [19150/40000] lr: 3.975e-06, eta: 2:48:10, time: 0.431, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2482, decode.loss_mask: 0.4757, decode.loss_dice: 0.6882, decode.d0.loss_cls: 1.8636, decode.d0.loss_mask: 0.5121, decode.d0.loss_dice: 0.7900, decode.d1.loss_cls: 0.4297, decode.d1.loss_mask: 0.4909, decode.d1.loss_dice: 0.7176, decode.d2.loss_cls: 0.3239, decode.d2.loss_mask: 0.4845, decode.d2.loss_dice: 0.6965, decode.d3.loss_cls: 0.2874, decode.d3.loss_mask: 0.4792, decode.d3.loss_dice: 0.6887, decode.d4.loss_cls: 0.2755, decode.d4.loss_mask: 0.4784, decode.d4.loss_dice: 0.6854, decode.d5.loss_cls: 0.2662, decode.d5.loss_mask: 0.4807, decode.d5.loss_dice: 0.6882, decode.d6.loss_cls: 0.2619, decode.d6.loss_mask: 0.4780, decode.d6.loss_dice: 0.6823, decode.d7.loss_cls: 0.2515, decode.d7.loss_mask: 0.4777, decode.d7.loss_dice: 0.6877, decode.d8.loss_cls: 0.2501, decode.d8.loss_mask: 0.4770, decode.d8.loss_dice: 0.6873, loss: 16.3039 +2022-06-05 04:17:03,007 - mmseg - INFO - Iter [19200/40000] lr: 3.966e-06, eta: 2:47:43, time: 0.438, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2407, decode.loss_mask: 0.4547, decode.loss_dice: 0.6651, decode.d0.loss_cls: 1.8431, decode.d0.loss_mask: 0.4998, decode.d0.loss_dice: 0.7786, decode.d1.loss_cls: 0.4252, decode.d1.loss_mask: 0.4706, decode.d1.loss_dice: 0.6964, decode.d2.loss_cls: 0.3117, decode.d2.loss_mask: 0.4605, decode.d2.loss_dice: 0.6747, decode.d3.loss_cls: 0.2655, decode.d3.loss_mask: 0.4557, decode.d3.loss_dice: 0.6674, decode.d4.loss_cls: 0.2510, decode.d4.loss_mask: 0.4559, decode.d4.loss_dice: 0.6667, decode.d5.loss_cls: 0.2438, decode.d5.loss_mask: 0.4546, decode.d5.loss_dice: 0.6641, decode.d6.loss_cls: 0.2398, decode.d6.loss_mask: 0.4558, decode.d6.loss_dice: 0.6645, decode.d7.loss_cls: 0.2420, decode.d7.loss_mask: 0.4544, decode.d7.loss_dice: 0.6677, decode.d8.loss_cls: 0.2427, decode.d8.loss_mask: 0.4538, decode.d8.loss_dice: 0.6674, loss: 15.7337 +2022-06-05 04:17:24,696 - mmseg - INFO - Iter [19250/40000] lr: 3.956e-06, eta: 2:47:16, time: 0.433, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2435, decode.loss_mask: 0.4678, decode.loss_dice: 0.6587, decode.d0.loss_cls: 1.7994, decode.d0.loss_mask: 0.5089, decode.d0.loss_dice: 0.7679, decode.d1.loss_cls: 0.4194, decode.d1.loss_mask: 0.4830, decode.d1.loss_dice: 0.6971, decode.d2.loss_cls: 0.3112, decode.d2.loss_mask: 0.4716, decode.d2.loss_dice: 0.6713, decode.d3.loss_cls: 0.2734, decode.d3.loss_mask: 0.4693, decode.d3.loss_dice: 0.6623, decode.d4.loss_cls: 0.2676, decode.d4.loss_mask: 0.4679, decode.d4.loss_dice: 0.6574, decode.d5.loss_cls: 0.2579, decode.d5.loss_mask: 0.4693, decode.d5.loss_dice: 0.6562, decode.d6.loss_cls: 0.2503, decode.d6.loss_mask: 0.4669, decode.d6.loss_dice: 0.6573, decode.d7.loss_cls: 0.2496, decode.d7.loss_mask: 0.4668, decode.d7.loss_dice: 0.6598, decode.d8.loss_cls: 0.2510, decode.d8.loss_mask: 0.4661, decode.d8.loss_dice: 0.6546, loss: 15.8034 +2022-06-05 04:17:46,855 - mmseg - INFO - Iter [19300/40000] lr: 3.946e-06, eta: 2:46:50, time: 0.444, data_time: 0.009, memory: 31652, decode.loss_cls: 0.2401, decode.loss_mask: 0.4820, decode.loss_dice: 0.6704, decode.d0.loss_cls: 1.8467, decode.d0.loss_mask: 0.5295, decode.d0.loss_dice: 0.7909, decode.d1.loss_cls: 0.4383, decode.d1.loss_mask: 0.5030, decode.d1.loss_dice: 0.7079, decode.d2.loss_cls: 0.3206, decode.d2.loss_mask: 0.4875, decode.d2.loss_dice: 0.6809, decode.d3.loss_cls: 0.2812, decode.d3.loss_mask: 0.4840, decode.d3.loss_dice: 0.6720, decode.d4.loss_cls: 0.2705, decode.d4.loss_mask: 0.4816, decode.d4.loss_dice: 0.6712, decode.d5.loss_cls: 0.2546, decode.d5.loss_mask: 0.4840, decode.d5.loss_dice: 0.6714, decode.d6.loss_cls: 0.2490, decode.d6.loss_mask: 0.4799, decode.d6.loss_dice: 0.6694, decode.d7.loss_cls: 0.2421, decode.d7.loss_mask: 0.4789, decode.d7.loss_dice: 0.6698, decode.d8.loss_cls: 0.2401, decode.d8.loss_mask: 0.4799, decode.d8.loss_dice: 0.6686, loss: 16.1460 +2022-06-05 04:18:11,083 - mmseg - INFO - Iter [19350/40000] lr: 3.937e-06, eta: 2:46:25, time: 0.485, data_time: 0.060, memory: 31652, decode.loss_cls: 0.2296, decode.loss_mask: 0.4836, decode.loss_dice: 0.6443, decode.d0.loss_cls: 1.7726, decode.d0.loss_mask: 0.5314, decode.d0.loss_dice: 0.7442, decode.d1.loss_cls: 0.3900, decode.d1.loss_mask: 0.5016, decode.d1.loss_dice: 0.6771, decode.d2.loss_cls: 0.2939, decode.d2.loss_mask: 0.4918, decode.d2.loss_dice: 0.6535, decode.d3.loss_cls: 0.2573, decode.d3.loss_mask: 0.4909, decode.d3.loss_dice: 0.6423, decode.d4.loss_cls: 0.2386, decode.d4.loss_mask: 0.4890, decode.d4.loss_dice: 0.6486, decode.d5.loss_cls: 0.2356, decode.d5.loss_mask: 0.4849, decode.d5.loss_dice: 0.6454, decode.d6.loss_cls: 0.2327, decode.d6.loss_mask: 0.4846, decode.d6.loss_dice: 0.6385, decode.d7.loss_cls: 0.2300, decode.d7.loss_mask: 0.4841, decode.d7.loss_dice: 0.6440, decode.d8.loss_cls: 0.2313, decode.d8.loss_mask: 0.4846, decode.d8.loss_dice: 0.6441, loss: 15.6199 +2022-06-05 04:18:32,569 - mmseg - INFO - Iter [19400/40000] lr: 3.927e-06, eta: 2:45:58, time: 0.430, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2495, decode.loss_mask: 0.4843, decode.loss_dice: 0.6984, decode.d0.loss_cls: 1.8728, decode.d0.loss_mask: 0.5232, decode.d0.loss_dice: 0.8215, decode.d1.loss_cls: 0.4224, decode.d1.loss_mask: 0.5022, decode.d1.loss_dice: 0.7376, decode.d2.loss_cls: 0.3079, decode.d2.loss_mask: 0.4907, decode.d2.loss_dice: 0.7143, decode.d3.loss_cls: 0.2763, decode.d3.loss_mask: 0.4873, decode.d3.loss_dice: 0.7031, decode.d4.loss_cls: 0.2655, decode.d4.loss_mask: 0.4868, decode.d4.loss_dice: 0.7049, decode.d5.loss_cls: 0.2599, decode.d5.loss_mask: 0.4851, decode.d5.loss_dice: 0.7011, decode.d6.loss_cls: 0.2519, decode.d6.loss_mask: 0.4832, decode.d6.loss_dice: 0.6979, decode.d7.loss_cls: 0.2498, decode.d7.loss_mask: 0.4810, decode.d7.loss_dice: 0.7010, decode.d8.loss_cls: 0.2457, decode.d8.loss_mask: 0.4845, decode.d8.loss_dice: 0.7001, loss: 16.4895 +2022-06-05 04:18:53,982 - mmseg - INFO - Iter [19450/40000] lr: 3.918e-06, eta: 2:45:31, time: 0.428, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2529, decode.loss_mask: 0.4750, decode.loss_dice: 0.6554, decode.d0.loss_cls: 1.8253, decode.d0.loss_mask: 0.5127, decode.d0.loss_dice: 0.7787, decode.d1.loss_cls: 0.4373, decode.d1.loss_mask: 0.4917, decode.d1.loss_dice: 0.7042, decode.d2.loss_cls: 0.3189, decode.d2.loss_mask: 0.4809, decode.d2.loss_dice: 0.6723, decode.d3.loss_cls: 0.2794, decode.d3.loss_mask: 0.4760, decode.d3.loss_dice: 0.6607, decode.d4.loss_cls: 0.2690, decode.d4.loss_mask: 0.4762, decode.d4.loss_dice: 0.6610, decode.d5.loss_cls: 0.2600, decode.d5.loss_mask: 0.4774, decode.d5.loss_dice: 0.6642, decode.d6.loss_cls: 0.2538, decode.d6.loss_mask: 0.4746, decode.d6.loss_dice: 0.6570, decode.d7.loss_cls: 0.2491, decode.d7.loss_mask: 0.4727, decode.d7.loss_dice: 0.6591, decode.d8.loss_cls: 0.2521, decode.d8.loss_mask: 0.4750, decode.d8.loss_dice: 0.6570, loss: 15.9796 +2022-06-05 04:19:15,835 - mmseg - INFO - Iter [19500/40000] lr: 3.908e-06, eta: 2:45:05, time: 0.437, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2425, decode.loss_mask: 0.4753, decode.loss_dice: 0.6617, decode.d0.loss_cls: 1.8189, decode.d0.loss_mask: 0.5198, decode.d0.loss_dice: 0.7758, decode.d1.loss_cls: 0.4141, decode.d1.loss_mask: 0.4937, decode.d1.loss_dice: 0.7043, decode.d2.loss_cls: 0.3143, decode.d2.loss_mask: 0.4849, decode.d2.loss_dice: 0.6716, decode.d3.loss_cls: 0.2832, decode.d3.loss_mask: 0.4775, decode.d3.loss_dice: 0.6621, decode.d4.loss_cls: 0.2700, decode.d4.loss_mask: 0.4788, decode.d4.loss_dice: 0.6622, decode.d5.loss_cls: 0.2593, decode.d5.loss_mask: 0.4761, decode.d5.loss_dice: 0.6563, decode.d6.loss_cls: 0.2501, decode.d6.loss_mask: 0.4794, decode.d6.loss_dice: 0.6534, decode.d7.loss_cls: 0.2478, decode.d7.loss_mask: 0.4739, decode.d7.loss_dice: 0.6557, decode.d8.loss_cls: 0.2504, decode.d8.loss_mask: 0.4751, decode.d8.loss_dice: 0.6585, loss: 15.9468 +2022-06-05 04:19:37,600 - mmseg - INFO - Iter [19550/40000] lr: 3.899e-06, eta: 2:44:38, time: 0.435, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2598, decode.loss_mask: 0.4570, decode.loss_dice: 0.6611, decode.d0.loss_cls: 1.8596, decode.d0.loss_mask: 0.4945, decode.d0.loss_dice: 0.7780, decode.d1.loss_cls: 0.4256, decode.d1.loss_mask: 0.4808, decode.d1.loss_dice: 0.7036, decode.d2.loss_cls: 0.3225, decode.d2.loss_mask: 0.4674, decode.d2.loss_dice: 0.6707, decode.d3.loss_cls: 0.2859, decode.d3.loss_mask: 0.4629, decode.d3.loss_dice: 0.6639, decode.d4.loss_cls: 0.2696, decode.d4.loss_mask: 0.4602, decode.d4.loss_dice: 0.6608, decode.d5.loss_cls: 0.2688, decode.d5.loss_mask: 0.4609, decode.d5.loss_dice: 0.6587, decode.d6.loss_cls: 0.2629, decode.d6.loss_mask: 0.4592, decode.d6.loss_dice: 0.6594, decode.d7.loss_cls: 0.2587, decode.d7.loss_mask: 0.4585, decode.d7.loss_dice: 0.6622, decode.d8.loss_cls: 0.2558, decode.d8.loss_mask: 0.4585, decode.d8.loss_dice: 0.6628, loss: 15.9101 +2022-06-05 04:19:59,405 - mmseg - INFO - Iter [19600/40000] lr: 3.889e-06, eta: 2:44:11, time: 0.436, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2305, decode.loss_mask: 0.4747, decode.loss_dice: 0.6357, decode.d0.loss_cls: 1.8020, decode.d0.loss_mask: 0.5255, decode.d0.loss_dice: 0.7636, decode.d1.loss_cls: 0.4052, decode.d1.loss_mask: 0.4945, decode.d1.loss_dice: 0.6847, decode.d2.loss_cls: 0.3034, decode.d2.loss_mask: 0.4840, decode.d2.loss_dice: 0.6553, decode.d3.loss_cls: 0.2551, decode.d3.loss_mask: 0.4821, decode.d3.loss_dice: 0.6428, decode.d4.loss_cls: 0.2535, decode.d4.loss_mask: 0.4797, decode.d4.loss_dice: 0.6441, decode.d5.loss_cls: 0.2365, decode.d5.loss_mask: 0.4787, decode.d5.loss_dice: 0.6490, decode.d6.loss_cls: 0.2302, decode.d6.loss_mask: 0.4755, decode.d6.loss_dice: 0.6405, decode.d7.loss_cls: 0.2280, decode.d7.loss_mask: 0.4770, decode.d7.loss_dice: 0.6397, decode.d8.loss_cls: 0.2299, decode.d8.loss_mask: 0.4739, decode.d8.loss_dice: 0.6382, loss: 15.6136 +2022-06-05 04:20:21,197 - mmseg - INFO - Iter [19650/40000] lr: 3.880e-06, eta: 2:43:45, time: 0.436, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2320, decode.loss_mask: 0.4833, decode.loss_dice: 0.6535, decode.d0.loss_cls: 1.8077, decode.d0.loss_mask: 0.5297, decode.d0.loss_dice: 0.7679, decode.d1.loss_cls: 0.3956, decode.d1.loss_mask: 0.5023, decode.d1.loss_dice: 0.6942, decode.d2.loss_cls: 0.2989, decode.d2.loss_mask: 0.4896, decode.d2.loss_dice: 0.6740, decode.d3.loss_cls: 0.2653, decode.d3.loss_mask: 0.4845, decode.d3.loss_dice: 0.6621, decode.d4.loss_cls: 0.2582, decode.d4.loss_mask: 0.4850, decode.d4.loss_dice: 0.6570, decode.d5.loss_cls: 0.2473, decode.d5.loss_mask: 0.4854, decode.d5.loss_dice: 0.6568, decode.d6.loss_cls: 0.2357, decode.d6.loss_mask: 0.4826, decode.d6.loss_dice: 0.6503, decode.d7.loss_cls: 0.2303, decode.d7.loss_mask: 0.4804, decode.d7.loss_dice: 0.6554, decode.d8.loss_cls: 0.2324, decode.d8.loss_mask: 0.4807, decode.d8.loss_dice: 0.6487, loss: 15.8266 +2022-06-05 04:20:45,636 - mmseg - INFO - Iter [19700/40000] lr: 3.870e-06, eta: 2:43:21, time: 0.489, data_time: 0.056, memory: 31652, decode.loss_cls: 0.2692, decode.loss_mask: 0.4865, decode.loss_dice: 0.6652, decode.d0.loss_cls: 1.8234, decode.d0.loss_mask: 0.5337, decode.d0.loss_dice: 0.7991, decode.d1.loss_cls: 0.4433, decode.d1.loss_mask: 0.5064, decode.d1.loss_dice: 0.7136, decode.d2.loss_cls: 0.3321, decode.d2.loss_mask: 0.4932, decode.d2.loss_dice: 0.6798, decode.d3.loss_cls: 0.2925, decode.d3.loss_mask: 0.4906, decode.d3.loss_dice: 0.6708, decode.d4.loss_cls: 0.2810, decode.d4.loss_mask: 0.4916, decode.d4.loss_dice: 0.6752, decode.d5.loss_cls: 0.2674, decode.d5.loss_mask: 0.4915, decode.d5.loss_dice: 0.6705, decode.d6.loss_cls: 0.2676, decode.d6.loss_mask: 0.4899, decode.d6.loss_dice: 0.6667, decode.d7.loss_cls: 0.2594, decode.d7.loss_mask: 0.4869, decode.d7.loss_dice: 0.6697, decode.d8.loss_cls: 0.2616, decode.d8.loss_mask: 0.4875, decode.d8.loss_dice: 0.6733, loss: 16.3392 +2022-06-05 04:21:07,266 - mmseg - INFO - Iter [19750/40000] lr: 3.861e-06, eta: 2:42:54, time: 0.432, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2217, decode.loss_mask: 0.4754, decode.loss_dice: 0.6707, decode.d0.loss_cls: 1.7652, decode.d0.loss_mask: 0.5101, decode.d0.loss_dice: 0.7782, decode.d1.loss_cls: 0.4029, decode.d1.loss_mask: 0.4950, decode.d1.loss_dice: 0.7192, decode.d2.loss_cls: 0.2844, decode.d2.loss_mask: 0.4807, decode.d2.loss_dice: 0.6915, decode.d3.loss_cls: 0.2434, decode.d3.loss_mask: 0.4801, decode.d3.loss_dice: 0.6746, decode.d4.loss_cls: 0.2430, decode.d4.loss_mask: 0.4783, decode.d4.loss_dice: 0.6747, decode.d5.loss_cls: 0.2254, decode.d5.loss_mask: 0.4790, decode.d5.loss_dice: 0.6784, decode.d6.loss_cls: 0.2250, decode.d6.loss_mask: 0.4779, decode.d6.loss_dice: 0.6816, decode.d7.loss_cls: 0.2193, decode.d7.loss_mask: 0.4786, decode.d7.loss_dice: 0.6751, decode.d8.loss_cls: 0.2214, decode.d8.loss_mask: 0.4767, decode.d8.loss_dice: 0.6745, loss: 15.8019 +2022-06-05 04:21:29,634 - mmseg - INFO - Iter [19800/40000] lr: 3.851e-06, eta: 2:42:28, time: 0.448, data_time: 0.009, memory: 31652, decode.loss_cls: 0.2336, decode.loss_mask: 0.4638, decode.loss_dice: 0.6515, decode.d0.loss_cls: 1.8141, decode.d0.loss_mask: 0.5053, decode.d0.loss_dice: 0.7750, decode.d1.loss_cls: 0.4133, decode.d1.loss_mask: 0.4773, decode.d1.loss_dice: 0.6934, decode.d2.loss_cls: 0.3069, decode.d2.loss_mask: 0.4692, decode.d2.loss_dice: 0.6636, decode.d3.loss_cls: 0.2684, decode.d3.loss_mask: 0.4637, decode.d3.loss_dice: 0.6585, decode.d4.loss_cls: 0.2559, decode.d4.loss_mask: 0.4628, decode.d4.loss_dice: 0.6575, decode.d5.loss_cls: 0.2446, decode.d5.loss_mask: 0.4638, decode.d5.loss_dice: 0.6565, decode.d6.loss_cls: 0.2421, decode.d6.loss_mask: 0.4625, decode.d6.loss_dice: 0.6529, decode.d7.loss_cls: 0.2315, decode.d7.loss_mask: 0.4641, decode.d7.loss_dice: 0.6517, decode.d8.loss_cls: 0.2364, decode.d8.loss_mask: 0.4633, decode.d8.loss_dice: 0.6543, loss: 15.6575 +2022-06-05 04:21:51,654 - mmseg - INFO - Iter [19850/40000] lr: 3.842e-06, eta: 2:42:02, time: 0.440, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2372, decode.loss_mask: 0.4682, decode.loss_dice: 0.6351, decode.d0.loss_cls: 1.8112, decode.d0.loss_mask: 0.5148, decode.d0.loss_dice: 0.7535, decode.d1.loss_cls: 0.3909, decode.d1.loss_mask: 0.4917, decode.d1.loss_dice: 0.6836, decode.d2.loss_cls: 0.3090, decode.d2.loss_mask: 0.4786, decode.d2.loss_dice: 0.6513, decode.d3.loss_cls: 0.2586, decode.d3.loss_mask: 0.4775, decode.d3.loss_dice: 0.6440, decode.d4.loss_cls: 0.2482, decode.d4.loss_mask: 0.4747, decode.d4.loss_dice: 0.6458, decode.d5.loss_cls: 0.2483, decode.d5.loss_mask: 0.4719, decode.d5.loss_dice: 0.6419, decode.d6.loss_cls: 0.2432, decode.d6.loss_mask: 0.4735, decode.d6.loss_dice: 0.6377, decode.d7.loss_cls: 0.2384, decode.d7.loss_mask: 0.4720, decode.d7.loss_dice: 0.6416, decode.d8.loss_cls: 0.2355, decode.d8.loss_mask: 0.4700, decode.d8.loss_dice: 0.6367, loss: 15.5844 +2022-06-05 04:22:13,767 - mmseg - INFO - Iter [19900/40000] lr: 3.832e-06, eta: 2:41:36, time: 0.442, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2600, decode.loss_mask: 0.4856, decode.loss_dice: 0.6667, decode.d0.loss_cls: 1.8336, decode.d0.loss_mask: 0.5257, decode.d0.loss_dice: 0.7903, decode.d1.loss_cls: 0.4173, decode.d1.loss_mask: 0.5069, decode.d1.loss_dice: 0.7132, decode.d2.loss_cls: 0.3245, decode.d2.loss_mask: 0.4958, decode.d2.loss_dice: 0.6830, decode.d3.loss_cls: 0.2913, decode.d3.loss_mask: 0.4906, decode.d3.loss_dice: 0.6704, decode.d4.loss_cls: 0.2801, decode.d4.loss_mask: 0.4880, decode.d4.loss_dice: 0.6705, decode.d5.loss_cls: 0.2738, decode.d5.loss_mask: 0.4861, decode.d5.loss_dice: 0.6665, decode.d6.loss_cls: 0.2646, decode.d6.loss_mask: 0.4846, decode.d6.loss_dice: 0.6643, decode.d7.loss_cls: 0.2601, decode.d7.loss_mask: 0.4855, decode.d7.loss_dice: 0.6617, decode.d8.loss_cls: 0.2587, decode.d8.loss_mask: 0.4849, decode.d8.loss_dice: 0.6639, loss: 16.2482 +2022-06-05 04:22:35,737 - mmseg - INFO - Iter [19950/40000] lr: 3.823e-06, eta: 2:41:10, time: 0.439, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2313, decode.loss_mask: 0.4759, decode.loss_dice: 0.6573, decode.d0.loss_cls: 1.8257, decode.d0.loss_mask: 0.5185, decode.d0.loss_dice: 0.7695, decode.d1.loss_cls: 0.4312, decode.d1.loss_mask: 0.4971, decode.d1.loss_dice: 0.6993, decode.d2.loss_cls: 0.3122, decode.d2.loss_mask: 0.4819, decode.d2.loss_dice: 0.6716, decode.d3.loss_cls: 0.2690, decode.d3.loss_mask: 0.4847, decode.d3.loss_dice: 0.6616, decode.d4.loss_cls: 0.2581, decode.d4.loss_mask: 0.4811, decode.d4.loss_dice: 0.6642, decode.d5.loss_cls: 0.2468, decode.d5.loss_mask: 0.4788, decode.d5.loss_dice: 0.6634, decode.d6.loss_cls: 0.2425, decode.d6.loss_mask: 0.4768, decode.d6.loss_dice: 0.6575, decode.d7.loss_cls: 0.2381, decode.d7.loss_mask: 0.4783, decode.d7.loss_dice: 0.6571, decode.d8.loss_cls: 0.2324, decode.d8.loss_mask: 0.4750, decode.d8.loss_dice: 0.6610, loss: 15.8981 +2022-06-05 04:23:00,318 - mmseg - INFO - Saving checkpoint at 20000 iterations +2022-06-05 04:23:02,986 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 04:23:02,986 - mmseg - INFO - Iter [20000/40000] lr: 3.813e-06, eta: 2:40:49, time: 0.545, data_time: 0.056, memory: 31652, decode.loss_cls: 0.2342, decode.loss_mask: 0.4763, decode.loss_dice: 0.6558, decode.d0.loss_cls: 1.8017, decode.d0.loss_mask: 0.5224, decode.d0.loss_dice: 0.7803, decode.d1.loss_cls: 0.4184, decode.d1.loss_mask: 0.4949, decode.d1.loss_dice: 0.6980, decode.d2.loss_cls: 0.3153, decode.d2.loss_mask: 0.4833, decode.d2.loss_dice: 0.6778, decode.d3.loss_cls: 0.2813, decode.d3.loss_mask: 0.4754, decode.d3.loss_dice: 0.6622, decode.d4.loss_cls: 0.2602, decode.d4.loss_mask: 0.4768, decode.d4.loss_dice: 0.6665, decode.d5.loss_cls: 0.2502, decode.d5.loss_mask: 0.4757, decode.d5.loss_dice: 0.6605, decode.d6.loss_cls: 0.2391, decode.d6.loss_mask: 0.4781, decode.d6.loss_dice: 0.6576, decode.d7.loss_cls: 0.2407, decode.d7.loss_mask: 0.4761, decode.d7.loss_dice: 0.6612, decode.d8.loss_cls: 0.2396, decode.d8.loss_mask: 0.4753, decode.d8.loss_dice: 0.6619, loss: 15.8971 +2022-06-05 04:25:42,763 - mmseg - INFO - per class results: +2022-06-05 04:25:42,772 - mmseg - INFO - ++-------------+-------+-------+ +| Class | IoU | Acc | ++-------------+-------+-------+ +| aeroplane | 90.57 | 94.78 | +| bag | 35.08 | 44.15 | +| bed | 33.77 | 48.4 | +| bedclothes | 43.19 | 63.13 | +| bench | 21.9 | 27.76 | +| bicycle | 84.81 | 92.92 | +| bird | 94.45 | 96.68 | +| boat | 84.95 | 91.73 | +| book | 51.1 | 64.03 | +| bottle | 88.29 | 96.18 | +| building | 63.55 | 73.89 | +| bus | 94.79 | 96.77 | +| cabinet | 43.8 | 63.12 | +| car | 91.31 | 96.0 | +| cat | 93.55 | 97.91 | +| ceiling | 59.15 | 72.2 | +| chair | 59.85 | 80.19 | +| cloth | 22.04 | 29.44 | +| computer | 34.54 | 43.72 | +| cow | 94.12 | 96.74 | +| cup | 42.16 | 54.05 | +| curtain | 52.98 | 61.06 | +| dog | 91.53 | 96.72 | +| door | 31.26 | 44.75 | +| fence | 44.32 | 60.59 | +| floor | 73.55 | 86.33 | +| flower | 38.52 | 60.88 | +| food | 32.92 | 38.5 | +| grass | 81.62 | 90.56 | +| ground | 57.77 | 77.15 | +| horse | 94.03 | 97.13 | +| keyboard | 85.17 | 91.04 | +| light | 56.78 | 70.79 | +| motorbike | 89.89 | 94.86 | +| mountain | 56.22 | 70.97 | +| mouse | 76.07 | 81.68 | +| person | 90.3 | 95.61 | +| plate | 23.78 | 27.14 | +| platform | 49.96 | 63.34 | +| pottedplant | 80.34 | 89.16 | +| road | 50.44 | 60.69 | +| rock | 52.01 | 62.36 | +| sheep | 93.91 | 96.67 | +| shelves | 35.71 | 51.37 | +| sidewalk | 26.7 | 47.55 | +| sign | 47.4 | 58.01 | +| sky | 94.83 | 97.3 | +| snow | 73.21 | 85.82 | +| sofa | 59.65 | 68.83 | +| table | 69.16 | 86.73 | +| track | 69.22 | 80.09 | +| train | 91.27 | 96.32 | +| tree | 81.13 | 89.59 | +| truck | 39.8 | 58.59 | +| tvmonitor | 85.43 | 92.91 | +| wall | 69.46 | 86.61 | +| water | 90.79 | 94.47 | +| window | 41.99 | 53.73 | +| wood | 30.18 | 44.73 | ++-------------+-------+-------+ +2022-06-05 04:25:42,772 - mmseg - INFO - Summary: +2022-06-05 04:25:42,772 - mmseg - INFO - ++------+-------+-------+ +| aAcc | mIoU | mAcc | ++------+-------+-------+ +| 85.2 | 63.33 | 73.46 | ++------+-------+-------+ +2022-06-05 04:25:42,783 - mmseg - INFO - The previous best checkpoint /mnt/lustre/chenzhe.vendor/workspace/ViT-Adapter-Release/segmentation/work_dirs/mask2former_beit_adapter_base_480_40k_pascal_context_59_ss/best_mIoU_iter_12000.pth was removed +2022-06-05 04:25:45,450 - mmseg - INFO - Now best checkpoint is saved as best_mIoU_iter_20000.pth. +2022-06-05 04:25:45,451 - mmseg - INFO - Best mIoU is 0.6333 at 20000 iter. +2022-06-05 04:25:45,473 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 04:25:45,473 - mmseg - INFO - Iter(val) [638] aAcc: 0.8520, mIoU: 0.6333, mAcc: 0.7346, IoU.aeroplane: 0.9057, IoU.bag: 0.3508, IoU.bed: 0.3377, IoU.bedclothes: 0.4319, IoU.bench: 0.2190, IoU.bicycle: 0.8481, IoU.bird: 0.9445, IoU.boat: 0.8495, IoU.book: 0.5110, IoU.bottle: 0.8829, IoU.building: 0.6355, IoU.bus: 0.9479, IoU.cabinet: 0.4380, IoU.car: 0.9131, IoU.cat: 0.9355, IoU.ceiling: 0.5915, IoU.chair: 0.5985, IoU.cloth: 0.2204, IoU.computer: 0.3454, IoU.cow: 0.9412, IoU.cup: 0.4216, IoU.curtain: 0.5298, IoU.dog: 0.9153, IoU.door: 0.3126, IoU.fence: 0.4432, IoU.floor: 0.7355, IoU.flower: 0.3852, IoU.food: 0.3292, IoU.grass: 0.8162, IoU.ground: 0.5777, IoU.horse: 0.9403, IoU.keyboard: 0.8517, IoU.light: 0.5678, IoU.motorbike: 0.8989, IoU.mountain: 0.5622, IoU.mouse: 0.7607, IoU.person: 0.9030, IoU.plate: 0.2378, IoU.platform: 0.4996, IoU.pottedplant: 0.8034, IoU.road: 0.5044, IoU.rock: 0.5201, IoU.sheep: 0.9391, IoU.shelves: 0.3571, IoU.sidewalk: 0.2670, IoU.sign: 0.4740, IoU.sky: 0.9483, IoU.snow: 0.7321, IoU.sofa: 0.5965, IoU.table: 0.6916, IoU.track: 0.6922, IoU.train: 0.9127, IoU.tree: 0.8113, IoU.truck: 0.3980, IoU.tvmonitor: 0.8543, IoU.wall: 0.6946, IoU.water: 0.9079, IoU.window: 0.4199, IoU.wood: 0.3018, Acc.aeroplane: 0.9478, Acc.bag: 0.4415, Acc.bed: 0.4840, Acc.bedclothes: 0.6313, Acc.bench: 0.2776, Acc.bicycle: 0.9292, Acc.bird: 0.9668, Acc.boat: 0.9173, Acc.book: 0.6403, Acc.bottle: 0.9618, Acc.building: 0.7389, Acc.bus: 0.9677, Acc.cabinet: 0.6312, Acc.car: 0.9600, Acc.cat: 0.9791, Acc.ceiling: 0.7220, Acc.chair: 0.8019, Acc.cloth: 0.2944, Acc.computer: 0.4372, Acc.cow: 0.9674, Acc.cup: 0.5405, Acc.curtain: 0.6106, Acc.dog: 0.9672, Acc.door: 0.4475, Acc.fence: 0.6059, Acc.floor: 0.8633, Acc.flower: 0.6088, Acc.food: 0.3850, Acc.grass: 0.9056, Acc.ground: 0.7715, Acc.horse: 0.9713, Acc.keyboard: 0.9104, Acc.light: 0.7079, Acc.motorbike: 0.9486, Acc.mountain: 0.7097, Acc.mouse: 0.8168, Acc.person: 0.9561, Acc.plate: 0.2714, Acc.platform: 0.6334, Acc.pottedplant: 0.8916, Acc.road: 0.6069, Acc.rock: 0.6236, Acc.sheep: 0.9667, Acc.shelves: 0.5137, Acc.sidewalk: 0.4755, Acc.sign: 0.5801, Acc.sky: 0.9730, Acc.snow: 0.8582, Acc.sofa: 0.6883, Acc.table: 0.8673, Acc.track: 0.8009, Acc.train: 0.9632, Acc.tree: 0.8959, Acc.truck: 0.5859, Acc.tvmonitor: 0.9291, Acc.wall: 0.8661, Acc.water: 0.9447, Acc.window: 0.5373, Acc.wood: 0.4473 +2022-06-05 04:26:08,609 - mmseg - INFO - Iter [20050/40000] lr: 3.803e-06, eta: 2:43:05, time: 3.712, data_time: 3.258, memory: 31652, decode.loss_cls: 0.2332, decode.loss_mask: 0.4568, decode.loss_dice: 0.6528, decode.d0.loss_cls: 1.8212, decode.d0.loss_mask: 0.4985, decode.d0.loss_dice: 0.7716, decode.d1.loss_cls: 0.3995, decode.d1.loss_mask: 0.4770, decode.d1.loss_dice: 0.6993, decode.d2.loss_cls: 0.3025, decode.d2.loss_mask: 0.4643, decode.d2.loss_dice: 0.6709, decode.d3.loss_cls: 0.2627, decode.d3.loss_mask: 0.4600, decode.d3.loss_dice: 0.6637, decode.d4.loss_cls: 0.2535, decode.d4.loss_mask: 0.4598, decode.d4.loss_dice: 0.6617, decode.d5.loss_cls: 0.2421, decode.d5.loss_mask: 0.4586, decode.d5.loss_dice: 0.6555, decode.d6.loss_cls: 0.2385, decode.d6.loss_mask: 0.4576, decode.d6.loss_dice: 0.6511, decode.d7.loss_cls: 0.2380, decode.d7.loss_mask: 0.4548, decode.d7.loss_dice: 0.6535, decode.d8.loss_cls: 0.2331, decode.d8.loss_mask: 0.4575, decode.d8.loss_dice: 0.6564, loss: 15.6059 +2022-06-05 04:26:30,632 - mmseg - INFO - Iter [20100/40000] lr: 3.794e-06, eta: 2:42:38, time: 0.440, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2421, decode.loss_mask: 0.4645, decode.loss_dice: 0.6469, decode.d0.loss_cls: 1.8036, decode.d0.loss_mask: 0.5013, decode.d0.loss_dice: 0.7571, decode.d1.loss_cls: 0.4180, decode.d1.loss_mask: 0.4788, decode.d1.loss_dice: 0.6810, decode.d2.loss_cls: 0.3091, decode.d2.loss_mask: 0.4691, decode.d2.loss_dice: 0.6603, decode.d3.loss_cls: 0.2667, decode.d3.loss_mask: 0.4676, decode.d3.loss_dice: 0.6482, decode.d4.loss_cls: 0.2576, decode.d4.loss_mask: 0.4671, decode.d4.loss_dice: 0.6473, decode.d5.loss_cls: 0.2481, decode.d5.loss_mask: 0.4668, decode.d5.loss_dice: 0.6488, decode.d6.loss_cls: 0.2449, decode.d6.loss_mask: 0.4653, decode.d6.loss_dice: 0.6440, decode.d7.loss_cls: 0.2457, decode.d7.loss_mask: 0.4646, decode.d7.loss_dice: 0.6442, decode.d8.loss_cls: 0.2347, decode.d8.loss_mask: 0.4623, decode.d8.loss_dice: 0.6473, loss: 15.6030 +2022-06-05 04:26:52,704 - mmseg - INFO - Iter [20150/40000] lr: 3.784e-06, eta: 2:42:11, time: 0.441, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2130, decode.loss_mask: 0.4656, decode.loss_dice: 0.6563, decode.d0.loss_cls: 1.7614, decode.d0.loss_mask: 0.5045, decode.d0.loss_dice: 0.7711, decode.d1.loss_cls: 0.3848, decode.d1.loss_mask: 0.4835, decode.d1.loss_dice: 0.6888, decode.d2.loss_cls: 0.2892, decode.d2.loss_mask: 0.4720, decode.d2.loss_dice: 0.6638, decode.d3.loss_cls: 0.2546, decode.d3.loss_mask: 0.4672, decode.d3.loss_dice: 0.6596, decode.d4.loss_cls: 0.2360, decode.d4.loss_mask: 0.4663, decode.d4.loss_dice: 0.6610, decode.d5.loss_cls: 0.2269, decode.d5.loss_mask: 0.4641, decode.d5.loss_dice: 0.6604, decode.d6.loss_cls: 0.2196, decode.d6.loss_mask: 0.4648, decode.d6.loss_dice: 0.6586, decode.d7.loss_cls: 0.2153, decode.d7.loss_mask: 0.4639, decode.d7.loss_dice: 0.6580, decode.d8.loss_cls: 0.2198, decode.d8.loss_mask: 0.4645, decode.d8.loss_dice: 0.6624, loss: 15.4773 +2022-06-05 04:27:15,180 - mmseg - INFO - Iter [20200/40000] lr: 3.775e-06, eta: 2:41:45, time: 0.450, data_time: 0.009, memory: 31652, decode.loss_cls: 0.2281, decode.loss_mask: 0.4555, decode.loss_dice: 0.6412, decode.d0.loss_cls: 1.7797, decode.d0.loss_mask: 0.4998, decode.d0.loss_dice: 0.7492, decode.d1.loss_cls: 0.4212, decode.d1.loss_mask: 0.4733, decode.d1.loss_dice: 0.6808, decode.d2.loss_cls: 0.3032, decode.d2.loss_mask: 0.4639, decode.d2.loss_dice: 0.6529, decode.d3.loss_cls: 0.2665, decode.d3.loss_mask: 0.4583, decode.d3.loss_dice: 0.6417, decode.d4.loss_cls: 0.2557, decode.d4.loss_mask: 0.4572, decode.d4.loss_dice: 0.6419, decode.d5.loss_cls: 0.2434, decode.d5.loss_mask: 0.4572, decode.d5.loss_dice: 0.6474, decode.d6.loss_cls: 0.2426, decode.d6.loss_mask: 0.4544, decode.d6.loss_dice: 0.6425, decode.d7.loss_cls: 0.2306, decode.d7.loss_mask: 0.4529, decode.d7.loss_dice: 0.6432, decode.d8.loss_cls: 0.2311, decode.d8.loss_mask: 0.4536, decode.d8.loss_dice: 0.6409, loss: 15.4097 +2022-06-05 04:27:36,842 - mmseg - INFO - Iter [20250/40000] lr: 3.765e-06, eta: 2:41:18, time: 0.433, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2264, decode.loss_mask: 0.4738, decode.loss_dice: 0.6671, decode.d0.loss_cls: 1.8100, decode.d0.loss_mask: 0.5140, decode.d0.loss_dice: 0.7692, decode.d1.loss_cls: 0.4144, decode.d1.loss_mask: 0.4965, decode.d1.loss_dice: 0.7056, decode.d2.loss_cls: 0.3034, decode.d2.loss_mask: 0.4827, decode.d2.loss_dice: 0.6807, decode.d3.loss_cls: 0.2688, decode.d3.loss_mask: 0.4798, decode.d3.loss_dice: 0.6669, decode.d4.loss_cls: 0.2551, decode.d4.loss_mask: 0.4765, decode.d4.loss_dice: 0.6678, decode.d5.loss_cls: 0.2400, decode.d5.loss_mask: 0.4771, decode.d5.loss_dice: 0.6712, decode.d6.loss_cls: 0.2397, decode.d6.loss_mask: 0.4749, decode.d6.loss_dice: 0.6674, decode.d7.loss_cls: 0.2315, decode.d7.loss_mask: 0.4754, decode.d7.loss_dice: 0.6662, decode.d8.loss_cls: 0.2266, decode.d8.loss_mask: 0.4742, decode.d8.loss_dice: 0.6677, loss: 15.8708 +2022-06-05 04:28:01,431 - mmseg - INFO - Iter [20300/40000] lr: 3.756e-06, eta: 2:40:53, time: 0.492, data_time: 0.058, memory: 31652, decode.loss_cls: 0.2181, decode.loss_mask: 0.4727, decode.loss_dice: 0.6669, decode.d0.loss_cls: 1.8176, decode.d0.loss_mask: 0.5105, decode.d0.loss_dice: 0.7760, decode.d1.loss_cls: 0.3984, decode.d1.loss_mask: 0.4874, decode.d1.loss_dice: 0.7031, decode.d2.loss_cls: 0.2977, decode.d2.loss_mask: 0.4769, decode.d2.loss_dice: 0.6763, decode.d3.loss_cls: 0.2536, decode.d3.loss_mask: 0.4739, decode.d3.loss_dice: 0.6658, decode.d4.loss_cls: 0.2462, decode.d4.loss_mask: 0.4722, decode.d4.loss_dice: 0.6656, decode.d5.loss_cls: 0.2428, decode.d5.loss_mask: 0.4724, decode.d5.loss_dice: 0.6648, decode.d6.loss_cls: 0.2343, decode.d6.loss_mask: 0.4736, decode.d6.loss_dice: 0.6635, decode.d7.loss_cls: 0.2314, decode.d7.loss_mask: 0.4700, decode.d7.loss_dice: 0.6629, decode.d8.loss_cls: 0.2260, decode.d8.loss_mask: 0.4720, decode.d8.loss_dice: 0.6644, loss: 15.7571 +2022-06-05 04:28:23,865 - mmseg - INFO - Iter [20350/40000] lr: 3.746e-06, eta: 2:40:27, time: 0.449, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2225, decode.loss_mask: 0.4653, decode.loss_dice: 0.6553, decode.d0.loss_cls: 1.7718, decode.d0.loss_mask: 0.5042, decode.d0.loss_dice: 0.7649, decode.d1.loss_cls: 0.3901, decode.d1.loss_mask: 0.4823, decode.d1.loss_dice: 0.7003, decode.d2.loss_cls: 0.2877, decode.d2.loss_mask: 0.4728, decode.d2.loss_dice: 0.6712, decode.d3.loss_cls: 0.2538, decode.d3.loss_mask: 0.4674, decode.d3.loss_dice: 0.6638, decode.d4.loss_cls: 0.2386, decode.d4.loss_mask: 0.4687, decode.d4.loss_dice: 0.6706, decode.d5.loss_cls: 0.2337, decode.d5.loss_mask: 0.4676, decode.d5.loss_dice: 0.6628, decode.d6.loss_cls: 0.2303, decode.d6.loss_mask: 0.4656, decode.d6.loss_dice: 0.6574, decode.d7.loss_cls: 0.2225, decode.d7.loss_mask: 0.4649, decode.d7.loss_dice: 0.6594, decode.d8.loss_cls: 0.2200, decode.d8.loss_mask: 0.4653, decode.d8.loss_dice: 0.6573, loss: 15.5582 +2022-06-05 04:28:46,148 - mmseg - INFO - Iter [20400/40000] lr: 3.737e-06, eta: 2:40:00, time: 0.446, data_time: 0.010, memory: 31652, decode.loss_cls: 0.2361, decode.loss_mask: 0.4642, decode.loss_dice: 0.6654, decode.d0.loss_cls: 1.8243, decode.d0.loss_mask: 0.5065, decode.d0.loss_dice: 0.7841, decode.d1.loss_cls: 0.4042, decode.d1.loss_mask: 0.4811, decode.d1.loss_dice: 0.7075, decode.d2.loss_cls: 0.3101, decode.d2.loss_mask: 0.4693, decode.d2.loss_dice: 0.6769, decode.d3.loss_cls: 0.2645, decode.d3.loss_mask: 0.4641, decode.d3.loss_dice: 0.6723, decode.d4.loss_cls: 0.2561, decode.d4.loss_mask: 0.4677, decode.d4.loss_dice: 0.6701, decode.d5.loss_cls: 0.2471, decode.d5.loss_mask: 0.4659, decode.d5.loss_dice: 0.6690, decode.d6.loss_cls: 0.2407, decode.d6.loss_mask: 0.4647, decode.d6.loss_dice: 0.6657, decode.d7.loss_cls: 0.2397, decode.d7.loss_mask: 0.4649, decode.d7.loss_dice: 0.6605, decode.d8.loss_cls: 0.2390, decode.d8.loss_mask: 0.4647, decode.d8.loss_dice: 0.6684, loss: 15.8145 +2022-06-05 04:29:08,036 - mmseg - INFO - Iter [20450/40000] lr: 3.727e-06, eta: 2:39:33, time: 0.438, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2564, decode.loss_mask: 0.4639, decode.loss_dice: 0.6722, decode.d0.loss_cls: 1.8488, decode.d0.loss_mask: 0.5061, decode.d0.loss_dice: 0.7897, decode.d1.loss_cls: 0.4391, decode.d1.loss_mask: 0.4840, decode.d1.loss_dice: 0.7147, decode.d2.loss_cls: 0.3331, decode.d2.loss_mask: 0.4738, decode.d2.loss_dice: 0.6865, decode.d3.loss_cls: 0.2884, decode.d3.loss_mask: 0.4677, decode.d3.loss_dice: 0.6769, decode.d4.loss_cls: 0.2820, decode.d4.loss_mask: 0.4660, decode.d4.loss_dice: 0.6726, decode.d5.loss_cls: 0.2748, decode.d5.loss_mask: 0.4661, decode.d5.loss_dice: 0.6737, decode.d6.loss_cls: 0.2643, decode.d6.loss_mask: 0.4658, decode.d6.loss_dice: 0.6666, decode.d7.loss_cls: 0.2608, decode.d7.loss_mask: 0.4648, decode.d7.loss_dice: 0.6725, decode.d8.loss_cls: 0.2578, decode.d8.loss_mask: 0.4629, decode.d8.loss_dice: 0.6720, loss: 16.1239 +2022-06-05 04:29:30,173 - mmseg - INFO - Iter [20500/40000] lr: 3.718e-06, eta: 2:39:06, time: 0.443, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2232, decode.loss_mask: 0.4635, decode.loss_dice: 0.6311, decode.d0.loss_cls: 1.7215, decode.d0.loss_mask: 0.5092, decode.d0.loss_dice: 0.7372, decode.d1.loss_cls: 0.3820, decode.d1.loss_mask: 0.4813, decode.d1.loss_dice: 0.6683, decode.d2.loss_cls: 0.2909, decode.d2.loss_mask: 0.4675, decode.d2.loss_dice: 0.6480, decode.d3.loss_cls: 0.2431, decode.d3.loss_mask: 0.4697, decode.d3.loss_dice: 0.6335, decode.d4.loss_cls: 0.2393, decode.d4.loss_mask: 0.4679, decode.d4.loss_dice: 0.6388, decode.d5.loss_cls: 0.2359, decode.d5.loss_mask: 0.4652, decode.d5.loss_dice: 0.6360, decode.d6.loss_cls: 0.2293, decode.d6.loss_mask: 0.4650, decode.d6.loss_dice: 0.6320, decode.d7.loss_cls: 0.2258, decode.d7.loss_mask: 0.4636, decode.d7.loss_dice: 0.6334, decode.d8.loss_cls: 0.2202, decode.d8.loss_mask: 0.4638, decode.d8.loss_dice: 0.6338, loss: 15.2199 +2022-06-05 04:29:52,336 - mmseg - INFO - Iter [20550/40000] lr: 3.708e-06, eta: 2:38:40, time: 0.443, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2392, decode.loss_mask: 0.4760, decode.loss_dice: 0.6653, decode.d0.loss_cls: 1.8113, decode.d0.loss_mask: 0.5209, decode.d0.loss_dice: 0.7842, decode.d1.loss_cls: 0.4198, decode.d1.loss_mask: 0.4955, decode.d1.loss_dice: 0.7158, decode.d2.loss_cls: 0.3167, decode.d2.loss_mask: 0.4828, decode.d2.loss_dice: 0.6843, decode.d3.loss_cls: 0.2792, decode.d3.loss_mask: 0.4763, decode.d3.loss_dice: 0.6692, decode.d4.loss_cls: 0.2672, decode.d4.loss_mask: 0.4749, decode.d4.loss_dice: 0.6741, decode.d5.loss_cls: 0.2557, decode.d5.loss_mask: 0.4758, decode.d5.loss_dice: 0.6734, decode.d6.loss_cls: 0.2490, decode.d6.loss_mask: 0.4761, decode.d6.loss_dice: 0.6670, decode.d7.loss_cls: 0.2392, decode.d7.loss_mask: 0.4773, decode.d7.loss_dice: 0.6694, decode.d8.loss_cls: 0.2351, decode.d8.loss_mask: 0.4760, decode.d8.loss_dice: 0.6687, loss: 16.0154 +2022-06-05 04:30:17,292 - mmseg - INFO - Iter [20600/40000] lr: 3.699e-06, eta: 2:38:16, time: 0.499, data_time: 0.059, memory: 31652, decode.loss_cls: 0.2260, decode.loss_mask: 0.4639, decode.loss_dice: 0.6613, decode.d0.loss_cls: 1.7854, decode.d0.loss_mask: 0.5052, decode.d0.loss_dice: 0.7669, decode.d1.loss_cls: 0.4151, decode.d1.loss_mask: 0.4836, decode.d1.loss_dice: 0.6983, decode.d2.loss_cls: 0.2994, decode.d2.loss_mask: 0.4706, decode.d2.loss_dice: 0.6716, decode.d3.loss_cls: 0.2607, decode.d3.loss_mask: 0.4666, decode.d3.loss_dice: 0.6623, decode.d4.loss_cls: 0.2476, decode.d4.loss_mask: 0.4652, decode.d4.loss_dice: 0.6668, decode.d5.loss_cls: 0.2392, decode.d5.loss_mask: 0.4660, decode.d5.loss_dice: 0.6661, decode.d6.loss_cls: 0.2304, decode.d6.loss_mask: 0.4663, decode.d6.loss_dice: 0.6602, decode.d7.loss_cls: 0.2234, decode.d7.loss_mask: 0.4630, decode.d7.loss_dice: 0.6644, decode.d8.loss_cls: 0.2283, decode.d8.loss_mask: 0.4631, decode.d8.loss_dice: 0.6629, loss: 15.6497 +2022-06-05 04:30:39,438 - mmseg - INFO - Iter [20650/40000] lr: 3.689e-06, eta: 2:37:49, time: 0.443, data_time: 0.009, memory: 31652, decode.loss_cls: 0.2311, decode.loss_mask: 0.4672, decode.loss_dice: 0.6478, decode.d0.loss_cls: 1.8038, decode.d0.loss_mask: 0.5081, decode.d0.loss_dice: 0.7560, decode.d1.loss_cls: 0.4130, decode.d1.loss_mask: 0.4913, decode.d1.loss_dice: 0.6830, decode.d2.loss_cls: 0.3123, decode.d2.loss_mask: 0.4767, decode.d2.loss_dice: 0.6588, decode.d3.loss_cls: 0.2695, decode.d3.loss_mask: 0.4702, decode.d3.loss_dice: 0.6491, decode.d4.loss_cls: 0.2638, decode.d4.loss_mask: 0.4683, decode.d4.loss_dice: 0.6502, decode.d5.loss_cls: 0.2509, decode.d5.loss_mask: 0.4658, decode.d5.loss_dice: 0.6499, decode.d6.loss_cls: 0.2300, decode.d6.loss_mask: 0.4668, decode.d6.loss_dice: 0.6434, decode.d7.loss_cls: 0.2302, decode.d7.loss_mask: 0.4647, decode.d7.loss_dice: 0.6494, decode.d8.loss_cls: 0.2280, decode.d8.loss_mask: 0.4665, decode.d8.loss_dice: 0.6456, loss: 15.6112 +2022-06-05 04:31:01,071 - mmseg - INFO - Iter [20700/40000] lr: 3.680e-06, eta: 2:37:22, time: 0.433, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2168, decode.loss_mask: 0.4636, decode.loss_dice: 0.6223, decode.d0.loss_cls: 1.7560, decode.d0.loss_mask: 0.5067, decode.d0.loss_dice: 0.7358, decode.d1.loss_cls: 0.3843, decode.d1.loss_mask: 0.4804, decode.d1.loss_dice: 0.6608, decode.d2.loss_cls: 0.2898, decode.d2.loss_mask: 0.4690, decode.d2.loss_dice: 0.6392, decode.d3.loss_cls: 0.2396, decode.d3.loss_mask: 0.4651, decode.d3.loss_dice: 0.6367, decode.d4.loss_cls: 0.2357, decode.d4.loss_mask: 0.4649, decode.d4.loss_dice: 0.6315, decode.d5.loss_cls: 0.2252, decode.d5.loss_mask: 0.4625, decode.d5.loss_dice: 0.6270, decode.d6.loss_cls: 0.2171, decode.d6.loss_mask: 0.4634, decode.d6.loss_dice: 0.6292, decode.d7.loss_cls: 0.2142, decode.d7.loss_mask: 0.4631, decode.d7.loss_dice: 0.6244, decode.d8.loss_cls: 0.2183, decode.d8.loss_mask: 0.4630, decode.d8.loss_dice: 0.6208, loss: 15.1265 +2022-06-05 04:31:23,366 - mmseg - INFO - Iter [20750/40000] lr: 3.670e-06, eta: 2:36:55, time: 0.446, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2222, decode.loss_mask: 0.4844, decode.loss_dice: 0.6589, decode.d0.loss_cls: 1.7968, decode.d0.loss_mask: 0.5235, decode.d0.loss_dice: 0.7725, decode.d1.loss_cls: 0.3897, decode.d1.loss_mask: 0.5000, decode.d1.loss_dice: 0.7061, decode.d2.loss_cls: 0.2922, decode.d2.loss_mask: 0.4911, decode.d2.loss_dice: 0.6739, decode.d3.loss_cls: 0.2574, decode.d3.loss_mask: 0.4860, decode.d3.loss_dice: 0.6606, decode.d4.loss_cls: 0.2425, decode.d4.loss_mask: 0.4840, decode.d4.loss_dice: 0.6658, decode.d5.loss_cls: 0.2322, decode.d5.loss_mask: 0.4838, decode.d5.loss_dice: 0.6616, decode.d6.loss_cls: 0.2257, decode.d6.loss_mask: 0.4835, decode.d6.loss_dice: 0.6575, decode.d7.loss_cls: 0.2258, decode.d7.loss_mask: 0.4806, decode.d7.loss_dice: 0.6547, decode.d8.loss_cls: 0.2164, decode.d8.loss_mask: 0.4834, decode.d8.loss_dice: 0.6613, loss: 15.7741 +2022-06-05 04:31:45,691 - mmseg - INFO - Iter [20800/40000] lr: 3.660e-06, eta: 2:36:29, time: 0.446, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2204, decode.loss_mask: 0.4676, decode.loss_dice: 0.6443, decode.d0.loss_cls: 1.7530, decode.d0.loss_mask: 0.5092, decode.d0.loss_dice: 0.7442, decode.d1.loss_cls: 0.3890, decode.d1.loss_mask: 0.4851, decode.d1.loss_dice: 0.6817, decode.d2.loss_cls: 0.2849, decode.d2.loss_mask: 0.4748, decode.d2.loss_dice: 0.6595, decode.d3.loss_cls: 0.2516, decode.d3.loss_mask: 0.4707, decode.d3.loss_dice: 0.6506, decode.d4.loss_cls: 0.2380, decode.d4.loss_mask: 0.4684, decode.d4.loss_dice: 0.6497, decode.d5.loss_cls: 0.2332, decode.d5.loss_mask: 0.4683, decode.d5.loss_dice: 0.6438, decode.d6.loss_cls: 0.2284, decode.d6.loss_mask: 0.4683, decode.d6.loss_dice: 0.6440, decode.d7.loss_cls: 0.2193, decode.d7.loss_mask: 0.4674, decode.d7.loss_dice: 0.6435, decode.d8.loss_cls: 0.2197, decode.d8.loss_mask: 0.4659, decode.d8.loss_dice: 0.6457, loss: 15.3905 +2022-06-05 04:32:08,151 - mmseg - INFO - Iter [20850/40000] lr: 3.651e-06, eta: 2:36:03, time: 0.450, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2454, decode.loss_mask: 0.4737, decode.loss_dice: 0.6584, decode.d0.loss_cls: 1.8016, decode.d0.loss_mask: 0.5084, decode.d0.loss_dice: 0.7660, decode.d1.loss_cls: 0.4290, decode.d1.loss_mask: 0.4907, decode.d1.loss_dice: 0.6967, decode.d2.loss_cls: 0.3265, decode.d2.loss_mask: 0.4805, decode.d2.loss_dice: 0.6689, decode.d3.loss_cls: 0.2871, decode.d3.loss_mask: 0.4738, decode.d3.loss_dice: 0.6608, decode.d4.loss_cls: 0.2728, decode.d4.loss_mask: 0.4735, decode.d4.loss_dice: 0.6583, decode.d5.loss_cls: 0.2547, decode.d5.loss_mask: 0.4758, decode.d5.loss_dice: 0.6574, decode.d6.loss_cls: 0.2534, decode.d6.loss_mask: 0.4740, decode.d6.loss_dice: 0.6564, decode.d7.loss_cls: 0.2472, decode.d7.loss_mask: 0.4731, decode.d7.loss_dice: 0.6585, decode.d8.loss_cls: 0.2485, decode.d8.loss_mask: 0.4732, decode.d8.loss_dice: 0.6582, loss: 15.9026 +2022-06-05 04:32:29,744 - mmseg - INFO - Iter [20900/40000] lr: 3.641e-06, eta: 2:35:36, time: 0.432, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2252, decode.loss_mask: 0.4698, decode.loss_dice: 0.6465, decode.d0.loss_cls: 1.7727, decode.d0.loss_mask: 0.5086, decode.d0.loss_dice: 0.7461, decode.d1.loss_cls: 0.3948, decode.d1.loss_mask: 0.4927, decode.d1.loss_dice: 0.6867, decode.d2.loss_cls: 0.2905, decode.d2.loss_mask: 0.4800, decode.d2.loss_dice: 0.6593, decode.d3.loss_cls: 0.2490, decode.d3.loss_mask: 0.4744, decode.d3.loss_dice: 0.6483, decode.d4.loss_cls: 0.2343, decode.d4.loss_mask: 0.4741, decode.d4.loss_dice: 0.6478, decode.d5.loss_cls: 0.2318, decode.d5.loss_mask: 0.4694, decode.d5.loss_dice: 0.6454, decode.d6.loss_cls: 0.2295, decode.d6.loss_mask: 0.4732, decode.d6.loss_dice: 0.6415, decode.d7.loss_cls: 0.2276, decode.d7.loss_mask: 0.4695, decode.d7.loss_dice: 0.6462, decode.d8.loss_cls: 0.2234, decode.d8.loss_mask: 0.4686, decode.d8.loss_dice: 0.6451, loss: 15.4719 +2022-06-05 04:32:53,847 - mmseg - INFO - Iter [20950/40000] lr: 3.632e-06, eta: 2:35:11, time: 0.482, data_time: 0.056, memory: 31652, decode.loss_cls: 0.2218, decode.loss_mask: 0.4670, decode.loss_dice: 0.6589, decode.d0.loss_cls: 1.7882, decode.d0.loss_mask: 0.5120, decode.d0.loss_dice: 0.7712, decode.d1.loss_cls: 0.4081, decode.d1.loss_mask: 0.4874, decode.d1.loss_dice: 0.7042, decode.d2.loss_cls: 0.2913, decode.d2.loss_mask: 0.4756, decode.d2.loss_dice: 0.6777, decode.d3.loss_cls: 0.2570, decode.d3.loss_mask: 0.4703, decode.d3.loss_dice: 0.6630, decode.d4.loss_cls: 0.2421, decode.d4.loss_mask: 0.4709, decode.d4.loss_dice: 0.6658, decode.d5.loss_cls: 0.2325, decode.d5.loss_mask: 0.4694, decode.d5.loss_dice: 0.6627, decode.d6.loss_cls: 0.2294, decode.d6.loss_mask: 0.4663, decode.d6.loss_dice: 0.6596, decode.d7.loss_cls: 0.2240, decode.d7.loss_mask: 0.4672, decode.d7.loss_dice: 0.6677, decode.d8.loss_cls: 0.2216, decode.d8.loss_mask: 0.4667, decode.d8.loss_dice: 0.6629, loss: 15.6626 +2022-06-05 04:33:15,357 - mmseg - INFO - Saving checkpoint at 21000 iterations +2022-06-05 04:33:18,334 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 04:33:18,335 - mmseg - INFO - Iter [21000/40000] lr: 3.622e-06, eta: 2:34:46, time: 0.490, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2155, decode.loss_mask: 0.4611, decode.loss_dice: 0.6212, decode.d0.loss_cls: 1.7488, decode.d0.loss_mask: 0.5017, decode.d0.loss_dice: 0.7344, decode.d1.loss_cls: 0.3795, decode.d1.loss_mask: 0.4752, decode.d1.loss_dice: 0.6595, decode.d2.loss_cls: 0.2799, decode.d2.loss_mask: 0.4668, decode.d2.loss_dice: 0.6337, decode.d3.loss_cls: 0.2372, decode.d3.loss_mask: 0.4663, decode.d3.loss_dice: 0.6269, decode.d4.loss_cls: 0.2379, decode.d4.loss_mask: 0.4660, decode.d4.loss_dice: 0.6252, decode.d5.loss_cls: 0.2335, decode.d5.loss_mask: 0.4608, decode.d5.loss_dice: 0.6209, decode.d6.loss_cls: 0.2195, decode.d6.loss_mask: 0.4613, decode.d6.loss_dice: 0.6198, decode.d7.loss_cls: 0.2188, decode.d7.loss_mask: 0.4601, decode.d7.loss_dice: 0.6224, decode.d8.loss_cls: 0.2155, decode.d8.loss_mask: 0.4596, decode.d8.loss_dice: 0.6182, loss: 15.0472 +2022-06-05 04:33:39,780 - mmseg - INFO - Iter [21050/40000] lr: 3.613e-06, eta: 2:34:19, time: 0.429, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2087, decode.loss_mask: 0.4550, decode.loss_dice: 0.6098, decode.d0.loss_cls: 1.7530, decode.d0.loss_mask: 0.5001, decode.d0.loss_dice: 0.7258, decode.d1.loss_cls: 0.3876, decode.d1.loss_mask: 0.4764, decode.d1.loss_dice: 0.6512, decode.d2.loss_cls: 0.2855, decode.d2.loss_mask: 0.4644, decode.d2.loss_dice: 0.6260, decode.d3.loss_cls: 0.2427, decode.d3.loss_mask: 0.4591, decode.d3.loss_dice: 0.6170, decode.d4.loss_cls: 0.2368, decode.d4.loss_mask: 0.4571, decode.d4.loss_dice: 0.6140, decode.d5.loss_cls: 0.2268, decode.d5.loss_mask: 0.4581, decode.d5.loss_dice: 0.6122, decode.d6.loss_cls: 0.2228, decode.d6.loss_mask: 0.4578, decode.d6.loss_dice: 0.6157, decode.d7.loss_cls: 0.2170, decode.d7.loss_mask: 0.4570, decode.d7.loss_dice: 0.6119, decode.d8.loss_cls: 0.2126, decode.d8.loss_mask: 0.4567, decode.d8.loss_dice: 0.6139, loss: 14.9324 +2022-06-05 04:34:01,291 - mmseg - INFO - Iter [21100/40000] lr: 3.603e-06, eta: 2:33:52, time: 0.430, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2180, decode.loss_mask: 0.4720, decode.loss_dice: 0.6463, decode.d0.loss_cls: 1.7632, decode.d0.loss_mask: 0.5111, decode.d0.loss_dice: 0.7590, decode.d1.loss_cls: 0.3865, decode.d1.loss_mask: 0.4922, decode.d1.loss_dice: 0.6917, decode.d2.loss_cls: 0.2842, decode.d2.loss_mask: 0.4827, decode.d2.loss_dice: 0.6656, decode.d3.loss_cls: 0.2491, decode.d3.loss_mask: 0.4750, decode.d3.loss_dice: 0.6542, decode.d4.loss_cls: 0.2378, decode.d4.loss_mask: 0.4736, decode.d4.loss_dice: 0.6546, decode.d5.loss_cls: 0.2329, decode.d5.loss_mask: 0.4730, decode.d5.loss_dice: 0.6539, decode.d6.loss_cls: 0.2215, decode.d6.loss_mask: 0.4729, decode.d6.loss_dice: 0.6463, decode.d7.loss_cls: 0.2193, decode.d7.loss_mask: 0.4703, decode.d7.loss_dice: 0.6507, decode.d8.loss_cls: 0.2188, decode.d8.loss_mask: 0.4720, decode.d8.loss_dice: 0.6507, loss: 15.4993 +2022-06-05 04:34:22,828 - mmseg - INFO - Iter [21150/40000] lr: 3.594e-06, eta: 2:33:25, time: 0.431, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2053, decode.loss_mask: 0.4610, decode.loss_dice: 0.6439, decode.d0.loss_cls: 1.7725, decode.d0.loss_mask: 0.4984, decode.d0.loss_dice: 0.7598, decode.d1.loss_cls: 0.3881, decode.d1.loss_mask: 0.4820, decode.d1.loss_dice: 0.6880, decode.d2.loss_cls: 0.2950, decode.d2.loss_mask: 0.4686, decode.d2.loss_dice: 0.6644, decode.d3.loss_cls: 0.2490, decode.d3.loss_mask: 0.4678, decode.d3.loss_dice: 0.6534, decode.d4.loss_cls: 0.2403, decode.d4.loss_mask: 0.4620, decode.d4.loss_dice: 0.6511, decode.d5.loss_cls: 0.2207, decode.d5.loss_mask: 0.4635, decode.d5.loss_dice: 0.6470, decode.d6.loss_cls: 0.2162, decode.d6.loss_mask: 0.4610, decode.d6.loss_dice: 0.6447, decode.d7.loss_cls: 0.2127, decode.d7.loss_mask: 0.4613, decode.d7.loss_dice: 0.6527, decode.d8.loss_cls: 0.2064, decode.d8.loss_mask: 0.4614, decode.d8.loss_dice: 0.6461, loss: 15.3445 +2022-06-05 04:34:44,272 - mmseg - INFO - Iter [21200/40000] lr: 3.584e-06, eta: 2:32:58, time: 0.429, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2339, decode.loss_mask: 0.4688, decode.loss_dice: 0.6603, decode.d0.loss_cls: 1.7781, decode.d0.loss_mask: 0.5080, decode.d0.loss_dice: 0.7800, decode.d1.loss_cls: 0.4039, decode.d1.loss_mask: 0.4899, decode.d1.loss_dice: 0.7109, decode.d2.loss_cls: 0.3109, decode.d2.loss_mask: 0.4762, decode.d2.loss_dice: 0.6732, decode.d3.loss_cls: 0.2687, decode.d3.loss_mask: 0.4714, decode.d3.loss_dice: 0.6608, decode.d4.loss_cls: 0.2594, decode.d4.loss_mask: 0.4699, decode.d4.loss_dice: 0.6556, decode.d5.loss_cls: 0.2526, decode.d5.loss_mask: 0.4697, decode.d5.loss_dice: 0.6583, decode.d6.loss_cls: 0.2413, decode.d6.loss_mask: 0.4708, decode.d6.loss_dice: 0.6544, decode.d7.loss_cls: 0.2349, decode.d7.loss_mask: 0.4677, decode.d7.loss_dice: 0.6595, decode.d8.loss_cls: 0.2286, decode.d8.loss_mask: 0.4713, decode.d8.loss_dice: 0.6560, loss: 15.7452 +2022-06-05 04:35:08,566 - mmseg - INFO - Iter [21250/40000] lr: 3.575e-06, eta: 2:32:34, time: 0.486, data_time: 0.058, memory: 31652, decode.loss_cls: 0.2218, decode.loss_mask: 0.4740, decode.loss_dice: 0.6692, decode.d0.loss_cls: 1.7486, decode.d0.loss_mask: 0.5184, decode.d0.loss_dice: 0.7855, decode.d1.loss_cls: 0.3828, decode.d1.loss_mask: 0.4923, decode.d1.loss_dice: 0.7134, decode.d2.loss_cls: 0.2764, decode.d2.loss_mask: 0.4837, decode.d2.loss_dice: 0.6859, decode.d3.loss_cls: 0.2454, decode.d3.loss_mask: 0.4804, decode.d3.loss_dice: 0.6724, decode.d4.loss_cls: 0.2321, decode.d4.loss_mask: 0.4780, decode.d4.loss_dice: 0.6770, decode.d5.loss_cls: 0.2267, decode.d5.loss_mask: 0.4781, decode.d5.loss_dice: 0.6766, decode.d6.loss_cls: 0.2180, decode.d6.loss_mask: 0.4751, decode.d6.loss_dice: 0.6717, decode.d7.loss_cls: 0.2149, decode.d7.loss_mask: 0.4752, decode.d7.loss_dice: 0.6739, decode.d8.loss_cls: 0.2089, decode.d8.loss_mask: 0.4744, decode.d8.loss_dice: 0.6707, loss: 15.7019 +2022-06-05 04:35:30,167 - mmseg - INFO - Iter [21300/40000] lr: 3.565e-06, eta: 2:32:07, time: 0.432, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2115, decode.loss_mask: 0.4540, decode.loss_dice: 0.6585, decode.d0.loss_cls: 1.7704, decode.d0.loss_mask: 0.4949, decode.d0.loss_dice: 0.7613, decode.d1.loss_cls: 0.3669, decode.d1.loss_mask: 0.4698, decode.d1.loss_dice: 0.6940, decode.d2.loss_cls: 0.2859, decode.d2.loss_mask: 0.4596, decode.d2.loss_dice: 0.6687, decode.d3.loss_cls: 0.2510, decode.d3.loss_mask: 0.4556, decode.d3.loss_dice: 0.6595, decode.d4.loss_cls: 0.2397, decode.d4.loss_mask: 0.4559, decode.d4.loss_dice: 0.6633, decode.d5.loss_cls: 0.2304, decode.d5.loss_mask: 0.4538, decode.d5.loss_dice: 0.6590, decode.d6.loss_cls: 0.2254, decode.d6.loss_mask: 0.4547, decode.d6.loss_dice: 0.6558, decode.d7.loss_cls: 0.2180, decode.d7.loss_mask: 0.4542, decode.d7.loss_dice: 0.6576, decode.d8.loss_cls: 0.2133, decode.d8.loss_mask: 0.4538, decode.d8.loss_dice: 0.6581, loss: 15.3547 +2022-06-05 04:35:52,216 - mmseg - INFO - Iter [21350/40000] lr: 3.556e-06, eta: 2:31:40, time: 0.440, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2342, decode.loss_mask: 0.4535, decode.loss_dice: 0.6378, decode.d0.loss_cls: 1.7440, decode.d0.loss_mask: 0.4912, decode.d0.loss_dice: 0.7471, decode.d1.loss_cls: 0.4103, decode.d1.loss_mask: 0.4688, decode.d1.loss_dice: 0.6759, decode.d2.loss_cls: 0.3064, decode.d2.loss_mask: 0.4610, decode.d2.loss_dice: 0.6499, decode.d3.loss_cls: 0.2565, decode.d3.loss_mask: 0.4612, decode.d3.loss_dice: 0.6431, decode.d4.loss_cls: 0.2457, decode.d4.loss_mask: 0.4572, decode.d4.loss_dice: 0.6442, decode.d5.loss_cls: 0.2374, decode.d5.loss_mask: 0.4563, decode.d5.loss_dice: 0.6394, decode.d6.loss_cls: 0.2293, decode.d6.loss_mask: 0.4561, decode.d6.loss_dice: 0.6404, decode.d7.loss_cls: 0.2323, decode.d7.loss_mask: 0.4538, decode.d7.loss_dice: 0.6365, decode.d8.loss_cls: 0.2346, decode.d8.loss_mask: 0.4534, decode.d8.loss_dice: 0.6351, loss: 15.2926 +2022-06-05 04:36:14,045 - mmseg - INFO - Iter [21400/40000] lr: 3.546e-06, eta: 2:31:14, time: 0.437, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2264, decode.loss_mask: 0.4666, decode.loss_dice: 0.6306, decode.d0.loss_cls: 1.7519, decode.d0.loss_mask: 0.5096, decode.d0.loss_dice: 0.7472, decode.d1.loss_cls: 0.4047, decode.d1.loss_mask: 0.4853, decode.d1.loss_dice: 0.6700, decode.d2.loss_cls: 0.2925, decode.d2.loss_mask: 0.4737, decode.d2.loss_dice: 0.6453, decode.d3.loss_cls: 0.2552, decode.d3.loss_mask: 0.4696, decode.d3.loss_dice: 0.6339, decode.d4.loss_cls: 0.2456, decode.d4.loss_mask: 0.4689, decode.d4.loss_dice: 0.6336, decode.d5.loss_cls: 0.2354, decode.d5.loss_mask: 0.4682, decode.d5.loss_dice: 0.6254, decode.d6.loss_cls: 0.2342, decode.d6.loss_mask: 0.4677, decode.d6.loss_dice: 0.6273, decode.d7.loss_cls: 0.2271, decode.d7.loss_mask: 0.4687, decode.d7.loss_dice: 0.6280, decode.d8.loss_cls: 0.2318, decode.d8.loss_mask: 0.4667, decode.d8.loss_dice: 0.6308, loss: 15.3220 +2022-06-05 04:36:35,561 - mmseg - INFO - Iter [21450/40000] lr: 3.537e-06, eta: 2:30:47, time: 0.431, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2562, decode.loss_mask: 0.4649, decode.loss_dice: 0.6799, decode.d0.loss_cls: 1.8025, decode.d0.loss_mask: 0.4993, decode.d0.loss_dice: 0.7926, decode.d1.loss_cls: 0.4326, decode.d1.loss_mask: 0.4838, decode.d1.loss_dice: 0.7216, decode.d2.loss_cls: 0.3260, decode.d2.loss_mask: 0.4729, decode.d2.loss_dice: 0.6934, decode.d3.loss_cls: 0.2949, decode.d3.loss_mask: 0.4677, decode.d3.loss_dice: 0.6841, decode.d4.loss_cls: 0.2833, decode.d4.loss_mask: 0.4643, decode.d4.loss_dice: 0.6879, decode.d5.loss_cls: 0.2724, decode.d5.loss_mask: 0.4670, decode.d5.loss_dice: 0.6847, decode.d6.loss_cls: 0.2677, decode.d6.loss_mask: 0.4664, decode.d6.loss_dice: 0.6787, decode.d7.loss_cls: 0.2642, decode.d7.loss_mask: 0.4621, decode.d7.loss_dice: 0.6767, decode.d8.loss_cls: 0.2633, decode.d8.loss_mask: 0.4639, decode.d8.loss_dice: 0.6761, loss: 16.1507 +2022-06-05 04:36:57,435 - mmseg - INFO - Iter [21500/40000] lr: 3.527e-06, eta: 2:30:20, time: 0.437, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2049, decode.loss_mask: 0.4533, decode.loss_dice: 0.6353, decode.d0.loss_cls: 1.7213, decode.d0.loss_mask: 0.4930, decode.d0.loss_dice: 0.7421, decode.d1.loss_cls: 0.3725, decode.d1.loss_mask: 0.4706, decode.d1.loss_dice: 0.6781, decode.d2.loss_cls: 0.2754, decode.d2.loss_mask: 0.4576, decode.d2.loss_dice: 0.6502, decode.d3.loss_cls: 0.2442, decode.d3.loss_mask: 0.4562, decode.d3.loss_dice: 0.6452, decode.d4.loss_cls: 0.2277, decode.d4.loss_mask: 0.4548, decode.d4.loss_dice: 0.6411, decode.d5.loss_cls: 0.2197, decode.d5.loss_mask: 0.4528, decode.d5.loss_dice: 0.6384, decode.d6.loss_cls: 0.2101, decode.d6.loss_mask: 0.4521, decode.d6.loss_dice: 0.6369, decode.d7.loss_cls: 0.2064, decode.d7.loss_mask: 0.4524, decode.d7.loss_dice: 0.6375, decode.d8.loss_cls: 0.2030, decode.d8.loss_mask: 0.4535, decode.d8.loss_dice: 0.6391, loss: 15.0257 +2022-06-05 04:37:21,731 - mmseg - INFO - Iter [21550/40000] lr: 3.517e-06, eta: 2:29:56, time: 0.486, data_time: 0.057, memory: 31652, decode.loss_cls: 0.2209, decode.loss_mask: 0.4624, decode.loss_dice: 0.6509, decode.d0.loss_cls: 1.7528, decode.d0.loss_mask: 0.5100, decode.d0.loss_dice: 0.7714, decode.d1.loss_cls: 0.4068, decode.d1.loss_mask: 0.4850, decode.d1.loss_dice: 0.6955, decode.d2.loss_cls: 0.2956, decode.d2.loss_mask: 0.4692, decode.d2.loss_dice: 0.6688, decode.d3.loss_cls: 0.2543, decode.d3.loss_mask: 0.4645, decode.d3.loss_dice: 0.6597, decode.d4.loss_cls: 0.2487, decode.d4.loss_mask: 0.4614, decode.d4.loss_dice: 0.6539, decode.d5.loss_cls: 0.2330, decode.d5.loss_mask: 0.4647, decode.d5.loss_dice: 0.6573, decode.d6.loss_cls: 0.2300, decode.d6.loss_mask: 0.4610, decode.d6.loss_dice: 0.6510, decode.d7.loss_cls: 0.2238, decode.d7.loss_mask: 0.4610, decode.d7.loss_dice: 0.6537, decode.d8.loss_cls: 0.2215, decode.d8.loss_mask: 0.4633, decode.d8.loss_dice: 0.6526, loss: 15.5047 +2022-06-05 04:37:43,545 - mmseg - INFO - Iter [21600/40000] lr: 3.508e-06, eta: 2:29:29, time: 0.436, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2032, decode.loss_mask: 0.4554, decode.loss_dice: 0.6334, decode.d0.loss_cls: 1.7639, decode.d0.loss_mask: 0.4966, decode.d0.loss_dice: 0.7438, decode.d1.loss_cls: 0.3797, decode.d1.loss_mask: 0.4723, decode.d1.loss_dice: 0.6681, decode.d2.loss_cls: 0.2765, decode.d2.loss_mask: 0.4644, decode.d2.loss_dice: 0.6422, decode.d3.loss_cls: 0.2352, decode.d3.loss_mask: 0.4603, decode.d3.loss_dice: 0.6353, decode.d4.loss_cls: 0.2256, decode.d4.loss_mask: 0.4592, decode.d4.loss_dice: 0.6395, decode.d5.loss_cls: 0.2193, decode.d5.loss_mask: 0.4577, decode.d5.loss_dice: 0.6376, decode.d6.loss_cls: 0.2104, decode.d6.loss_mask: 0.4558, decode.d6.loss_dice: 0.6347, decode.d7.loss_cls: 0.2182, decode.d7.loss_mask: 0.4551, decode.d7.loss_dice: 0.6300, decode.d8.loss_cls: 0.2064, decode.d8.loss_mask: 0.4550, decode.d8.loss_dice: 0.6294, loss: 15.0644 +2022-06-05 04:38:05,169 - mmseg - INFO - Iter [21650/40000] lr: 3.498e-06, eta: 2:29:03, time: 0.432, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2266, decode.loss_mask: 0.4561, decode.loss_dice: 0.6566, decode.d0.loss_cls: 1.7780, decode.d0.loss_mask: 0.4980, decode.d0.loss_dice: 0.7692, decode.d1.loss_cls: 0.4087, decode.d1.loss_mask: 0.4741, decode.d1.loss_dice: 0.6948, decode.d2.loss_cls: 0.3114, decode.d2.loss_mask: 0.4617, decode.d2.loss_dice: 0.6663, decode.d3.loss_cls: 0.2573, decode.d3.loss_mask: 0.4590, decode.d3.loss_dice: 0.6613, decode.d4.loss_cls: 0.2529, decode.d4.loss_mask: 0.4555, decode.d4.loss_dice: 0.6543, decode.d5.loss_cls: 0.2415, decode.d5.loss_mask: 0.4556, decode.d5.loss_dice: 0.6561, decode.d6.loss_cls: 0.2319, decode.d6.loss_mask: 0.4545, decode.d6.loss_dice: 0.6538, decode.d7.loss_cls: 0.2335, decode.d7.loss_mask: 0.4534, decode.d7.loss_dice: 0.6547, decode.d8.loss_cls: 0.2293, decode.d8.loss_mask: 0.4555, decode.d8.loss_dice: 0.6559, loss: 15.5177 +2022-06-05 04:38:26,760 - mmseg - INFO - Iter [21700/40000] lr: 3.489e-06, eta: 2:28:36, time: 0.432, data_time: 0.009, memory: 31652, decode.loss_cls: 0.1989, decode.loss_mask: 0.4732, decode.loss_dice: 0.6252, decode.d0.loss_cls: 1.7291, decode.d0.loss_mask: 0.5159, decode.d0.loss_dice: 0.7232, decode.d1.loss_cls: 0.3572, decode.d1.loss_mask: 0.4949, decode.d1.loss_dice: 0.6634, decode.d2.loss_cls: 0.2662, decode.d2.loss_mask: 0.4801, decode.d2.loss_dice: 0.6389, decode.d3.loss_cls: 0.2276, decode.d3.loss_mask: 0.4756, decode.d3.loss_dice: 0.6260, decode.d4.loss_cls: 0.2168, decode.d4.loss_mask: 0.4755, decode.d4.loss_dice: 0.6248, decode.d5.loss_cls: 0.2081, decode.d5.loss_mask: 0.4728, decode.d5.loss_dice: 0.6266, decode.d6.loss_cls: 0.2055, decode.d6.loss_mask: 0.4756, decode.d6.loss_dice: 0.6261, decode.d7.loss_cls: 0.2017, decode.d7.loss_mask: 0.4719, decode.d7.loss_dice: 0.6254, decode.d8.loss_cls: 0.2052, decode.d8.loss_mask: 0.4725, decode.d8.loss_dice: 0.6260, loss: 15.0299 +2022-06-05 04:38:48,286 - mmseg - INFO - Iter [21750/40000] lr: 3.479e-06, eta: 2:28:09, time: 0.431, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2054, decode.loss_mask: 0.4470, decode.loss_dice: 0.6212, decode.d0.loss_cls: 1.7158, decode.d0.loss_mask: 0.4865, decode.d0.loss_dice: 0.7230, decode.d1.loss_cls: 0.3659, decode.d1.loss_mask: 0.4589, decode.d1.loss_dice: 0.6539, decode.d2.loss_cls: 0.2669, decode.d2.loss_mask: 0.4502, decode.d2.loss_dice: 0.6346, decode.d3.loss_cls: 0.2309, decode.d3.loss_mask: 0.4463, decode.d3.loss_dice: 0.6266, decode.d4.loss_cls: 0.2162, decode.d4.loss_mask: 0.4474, decode.d4.loss_dice: 0.6279, decode.d5.loss_cls: 0.2106, decode.d5.loss_mask: 0.4479, decode.d5.loss_dice: 0.6252, decode.d6.loss_cls: 0.2098, decode.d6.loss_mask: 0.4457, decode.d6.loss_dice: 0.6228, decode.d7.loss_cls: 0.2033, decode.d7.loss_mask: 0.4472, decode.d7.loss_dice: 0.6250, decode.d8.loss_cls: 0.2063, decode.d8.loss_mask: 0.4487, decode.d8.loss_dice: 0.6229, loss: 14.7401 +2022-06-05 04:39:09,843 - mmseg - INFO - Iter [21800/40000] lr: 3.470e-06, eta: 2:27:42, time: 0.431, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2142, decode.loss_mask: 0.4539, decode.loss_dice: 0.6376, decode.d0.loss_cls: 1.7332, decode.d0.loss_mask: 0.4969, decode.d0.loss_dice: 0.7343, decode.d1.loss_cls: 0.3852, decode.d1.loss_mask: 0.4747, decode.d1.loss_dice: 0.6750, decode.d2.loss_cls: 0.2961, decode.d2.loss_mask: 0.4613, decode.d2.loss_dice: 0.6473, decode.d3.loss_cls: 0.2531, decode.d3.loss_mask: 0.4594, decode.d3.loss_dice: 0.6401, decode.d4.loss_cls: 0.2391, decode.d4.loss_mask: 0.4579, decode.d4.loss_dice: 0.6405, decode.d5.loss_cls: 0.2236, decode.d5.loss_mask: 0.4596, decode.d5.loss_dice: 0.6445, decode.d6.loss_cls: 0.2136, decode.d6.loss_mask: 0.4575, decode.d6.loss_dice: 0.6385, decode.d7.loss_cls: 0.2164, decode.d7.loss_mask: 0.4552, decode.d7.loss_dice: 0.6379, decode.d8.loss_cls: 0.2157, decode.d8.loss_mask: 0.4541, decode.d8.loss_dice: 0.6391, loss: 15.1556 +2022-06-05 04:39:34,333 - mmseg - INFO - Iter [21850/40000] lr: 3.460e-06, eta: 2:27:18, time: 0.490, data_time: 0.058, memory: 31652, decode.loss_cls: 0.2100, decode.loss_mask: 0.4563, decode.loss_dice: 0.6448, decode.d0.loss_cls: 1.7586, decode.d0.loss_mask: 0.4966, decode.d0.loss_dice: 0.7500, decode.d1.loss_cls: 0.3931, decode.d1.loss_mask: 0.4747, decode.d1.loss_dice: 0.6922, decode.d2.loss_cls: 0.2946, decode.d2.loss_mask: 0.4624, decode.d2.loss_dice: 0.6608, decode.d3.loss_cls: 0.2469, decode.d3.loss_mask: 0.4582, decode.d3.loss_dice: 0.6494, decode.d4.loss_cls: 0.2368, decode.d4.loss_mask: 0.4579, decode.d4.loss_dice: 0.6478, decode.d5.loss_cls: 0.2270, decode.d5.loss_mask: 0.4567, decode.d5.loss_dice: 0.6459, decode.d6.loss_cls: 0.2236, decode.d6.loss_mask: 0.4545, decode.d6.loss_dice: 0.6449, decode.d7.loss_cls: 0.2151, decode.d7.loss_mask: 0.4568, decode.d7.loss_dice: 0.6461, decode.d8.loss_cls: 0.2123, decode.d8.loss_mask: 0.4567, decode.d8.loss_dice: 0.6465, loss: 15.2772 +2022-06-05 04:39:56,726 - mmseg - INFO - Iter [21900/40000] lr: 3.451e-06, eta: 2:26:52, time: 0.448, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2142, decode.loss_mask: 0.4479, decode.loss_dice: 0.6277, decode.d0.loss_cls: 1.7286, decode.d0.loss_mask: 0.4935, decode.d0.loss_dice: 0.7384, decode.d1.loss_cls: 0.3652, decode.d1.loss_mask: 0.4643, decode.d1.loss_dice: 0.6698, decode.d2.loss_cls: 0.2819, decode.d2.loss_mask: 0.4547, decode.d2.loss_dice: 0.6429, decode.d3.loss_cls: 0.2453, decode.d3.loss_mask: 0.4482, decode.d3.loss_dice: 0.6330, decode.d4.loss_cls: 0.2317, decode.d4.loss_mask: 0.4484, decode.d4.loss_dice: 0.6316, decode.d5.loss_cls: 0.2258, decode.d5.loss_mask: 0.4495, decode.d5.loss_dice: 0.6312, decode.d6.loss_cls: 0.2209, decode.d6.loss_mask: 0.4482, decode.d6.loss_dice: 0.6287, decode.d7.loss_cls: 0.2146, decode.d7.loss_mask: 0.4481, decode.d7.loss_dice: 0.6318, decode.d8.loss_cls: 0.2191, decode.d8.loss_mask: 0.4494, decode.d8.loss_dice: 0.6312, loss: 14.9658 +2022-06-05 04:40:19,203 - mmseg - INFO - Iter [21950/40000] lr: 3.441e-06, eta: 2:26:26, time: 0.449, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2192, decode.loss_mask: 0.4727, decode.loss_dice: 0.6394, decode.d0.loss_cls: 1.7309, decode.d0.loss_mask: 0.5196, decode.d0.loss_dice: 0.7578, decode.d1.loss_cls: 0.3835, decode.d1.loss_mask: 0.4959, decode.d1.loss_dice: 0.6879, decode.d2.loss_cls: 0.2844, decode.d2.loss_mask: 0.4831, decode.d2.loss_dice: 0.6551, decode.d3.loss_cls: 0.2530, decode.d3.loss_mask: 0.4794, decode.d3.loss_dice: 0.6447, decode.d4.loss_cls: 0.2379, decode.d4.loss_mask: 0.4776, decode.d4.loss_dice: 0.6472, decode.d5.loss_cls: 0.2327, decode.d5.loss_mask: 0.4771, decode.d5.loss_dice: 0.6486, decode.d6.loss_cls: 0.2215, decode.d6.loss_mask: 0.4752, decode.d6.loss_dice: 0.6458, decode.d7.loss_cls: 0.2188, decode.d7.loss_mask: 0.4751, decode.d7.loss_dice: 0.6442, decode.d8.loss_cls: 0.2132, decode.d8.loss_mask: 0.4749, decode.d8.loss_dice: 0.6484, loss: 15.4449 +2022-06-05 04:40:41,240 - mmseg - INFO - Saving checkpoint at 22000 iterations +2022-06-05 04:40:43,687 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 04:40:43,688 - mmseg - INFO - Iter [22000/40000] lr: 3.432e-06, eta: 2:26:02, time: 0.490, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1919, decode.loss_mask: 0.4577, decode.loss_dice: 0.6092, decode.d0.loss_cls: 1.6866, decode.d0.loss_mask: 0.4904, decode.d0.loss_dice: 0.7137, decode.d1.loss_cls: 0.3454, decode.d1.loss_mask: 0.4728, decode.d1.loss_dice: 0.6537, decode.d2.loss_cls: 0.2606, decode.d2.loss_mask: 0.4659, decode.d2.loss_dice: 0.6194, decode.d3.loss_cls: 0.2194, decode.d3.loss_mask: 0.4615, decode.d3.loss_dice: 0.6115, decode.d4.loss_cls: 0.2156, decode.d4.loss_mask: 0.4579, decode.d4.loss_dice: 0.6160, decode.d5.loss_cls: 0.2060, decode.d5.loss_mask: 0.4597, decode.d5.loss_dice: 0.6103, decode.d6.loss_cls: 0.1982, decode.d6.loss_mask: 0.4573, decode.d6.loss_dice: 0.6128, decode.d7.loss_cls: 0.1962, decode.d7.loss_mask: 0.4586, decode.d7.loss_dice: 0.6145, decode.d8.loss_cls: 0.1955, decode.d8.loss_mask: 0.4570, decode.d8.loss_dice: 0.6109, loss: 14.6262 +2022-06-05 04:41:05,591 - mmseg - INFO - Iter [22050/40000] lr: 3.422e-06, eta: 2:25:36, time: 0.438, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2204, decode.loss_mask: 0.4490, decode.loss_dice: 0.6569, decode.d0.loss_cls: 1.7448, decode.d0.loss_mask: 0.4868, decode.d0.loss_dice: 0.7612, decode.d1.loss_cls: 0.4115, decode.d1.loss_mask: 0.4651, decode.d1.loss_dice: 0.6979, decode.d2.loss_cls: 0.2929, decode.d2.loss_mask: 0.4562, decode.d2.loss_dice: 0.6684, decode.d3.loss_cls: 0.2601, decode.d3.loss_mask: 0.4545, decode.d3.loss_dice: 0.6579, decode.d4.loss_cls: 0.2511, decode.d4.loss_mask: 0.4525, decode.d4.loss_dice: 0.6551, decode.d5.loss_cls: 0.2358, decode.d5.loss_mask: 0.4526, decode.d5.loss_dice: 0.6555, decode.d6.loss_cls: 0.2407, decode.d6.loss_mask: 0.4512, decode.d6.loss_dice: 0.6548, decode.d7.loss_cls: 0.2246, decode.d7.loss_mask: 0.4499, decode.d7.loss_dice: 0.6540, decode.d8.loss_cls: 0.2199, decode.d8.loss_mask: 0.4488, decode.d8.loss_dice: 0.6602, loss: 15.3902 +2022-06-05 04:41:27,451 - mmseg - INFO - Iter [22100/40000] lr: 3.413e-06, eta: 2:25:09, time: 0.437, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2143, decode.loss_mask: 0.4657, decode.loss_dice: 0.6456, decode.d0.loss_cls: 1.7390, decode.d0.loss_mask: 0.5065, decode.d0.loss_dice: 0.7518, decode.d1.loss_cls: 0.3882, decode.d1.loss_mask: 0.4869, decode.d1.loss_dice: 0.6848, decode.d2.loss_cls: 0.2766, decode.d2.loss_mask: 0.4747, decode.d2.loss_dice: 0.6560, decode.d3.loss_cls: 0.2513, decode.d3.loss_mask: 0.4705, decode.d3.loss_dice: 0.6481, decode.d4.loss_cls: 0.2373, decode.d4.loss_mask: 0.4686, decode.d4.loss_dice: 0.6483, decode.d5.loss_cls: 0.2307, decode.d5.loss_mask: 0.4676, decode.d5.loss_dice: 0.6467, decode.d6.loss_cls: 0.2191, decode.d6.loss_mask: 0.4663, decode.d6.loss_dice: 0.6470, decode.d7.loss_cls: 0.2155, decode.d7.loss_mask: 0.4652, decode.d7.loss_dice: 0.6451, decode.d8.loss_cls: 0.2145, decode.d8.loss_mask: 0.4656, decode.d8.loss_dice: 0.6461, loss: 15.3438 +2022-06-05 04:41:49,790 - mmseg - INFO - Iter [22150/40000] lr: 3.403e-06, eta: 2:24:44, time: 0.447, data_time: 0.009, memory: 31652, decode.loss_cls: 0.2299, decode.loss_mask: 0.4811, decode.loss_dice: 0.6676, decode.d0.loss_cls: 1.7885, decode.d0.loss_mask: 0.5224, decode.d0.loss_dice: 0.7942, decode.d1.loss_cls: 0.4142, decode.d1.loss_mask: 0.5000, decode.d1.loss_dice: 0.7129, decode.d2.loss_cls: 0.3081, decode.d2.loss_mask: 0.4855, decode.d2.loss_dice: 0.6842, decode.d3.loss_cls: 0.2653, decode.d3.loss_mask: 0.4813, decode.d3.loss_dice: 0.6732, decode.d4.loss_cls: 0.2571, decode.d4.loss_mask: 0.4805, decode.d4.loss_dice: 0.6735, decode.d5.loss_cls: 0.2491, decode.d5.loss_mask: 0.4813, decode.d5.loss_dice: 0.6751, decode.d6.loss_cls: 0.2425, decode.d6.loss_mask: 0.4830, decode.d6.loss_dice: 0.6678, decode.d7.loss_cls: 0.2420, decode.d7.loss_mask: 0.4795, decode.d7.loss_dice: 0.6662, decode.d8.loss_cls: 0.2381, decode.d8.loss_mask: 0.4815, decode.d8.loss_dice: 0.6688, loss: 15.9944 +2022-06-05 04:42:14,135 - mmseg - INFO - Iter [22200/40000] lr: 3.394e-06, eta: 2:24:19, time: 0.487, data_time: 0.059, memory: 31652, decode.loss_cls: 0.1797, decode.loss_mask: 0.4567, decode.loss_dice: 0.6174, decode.d0.loss_cls: 1.6870, decode.d0.loss_mask: 0.5003, decode.d0.loss_dice: 0.7230, decode.d1.loss_cls: 0.3485, decode.d1.loss_mask: 0.4748, decode.d1.loss_dice: 0.6576, decode.d2.loss_cls: 0.2496, decode.d2.loss_mask: 0.4614, decode.d2.loss_dice: 0.6319, decode.d3.loss_cls: 0.2202, decode.d3.loss_mask: 0.4592, decode.d3.loss_dice: 0.6272, decode.d4.loss_cls: 0.2100, decode.d4.loss_mask: 0.4557, decode.d4.loss_dice: 0.6236, decode.d5.loss_cls: 0.1974, decode.d5.loss_mask: 0.4572, decode.d5.loss_dice: 0.6229, decode.d6.loss_cls: 0.1914, decode.d6.loss_mask: 0.4568, decode.d6.loss_dice: 0.6192, decode.d7.loss_cls: 0.1883, decode.d7.loss_mask: 0.4554, decode.d7.loss_dice: 0.6132, decode.d8.loss_cls: 0.1808, decode.d8.loss_mask: 0.4560, decode.d8.loss_dice: 0.6139, loss: 14.6362 +2022-06-05 04:42:36,118 - mmseg - INFO - Iter [22250/40000] lr: 3.384e-06, eta: 2:23:53, time: 0.440, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2112, decode.loss_mask: 0.4766, decode.loss_dice: 0.6203, decode.d0.loss_cls: 1.7265, decode.d0.loss_mask: 0.5164, decode.d0.loss_dice: 0.7348, decode.d1.loss_cls: 0.3728, decode.d1.loss_mask: 0.5007, decode.d1.loss_dice: 0.6604, decode.d2.loss_cls: 0.2865, decode.d2.loss_mask: 0.4877, decode.d2.loss_dice: 0.6376, decode.d3.loss_cls: 0.2409, decode.d3.loss_mask: 0.4829, decode.d3.loss_dice: 0.6329, decode.d4.loss_cls: 0.2310, decode.d4.loss_mask: 0.4823, decode.d4.loss_dice: 0.6289, decode.d5.loss_cls: 0.2291, decode.d5.loss_mask: 0.4783, decode.d5.loss_dice: 0.6259, decode.d6.loss_cls: 0.2207, decode.d6.loss_mask: 0.4778, decode.d6.loss_dice: 0.6235, decode.d7.loss_cls: 0.2128, decode.d7.loss_mask: 0.4765, decode.d7.loss_dice: 0.6235, decode.d8.loss_cls: 0.2092, decode.d8.loss_mask: 0.4777, decode.d8.loss_dice: 0.6221, loss: 15.2077 +2022-06-05 04:42:58,445 - mmseg - INFO - Iter [22300/40000] lr: 3.375e-06, eta: 2:23:27, time: 0.447, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2008, decode.loss_mask: 0.4528, decode.loss_dice: 0.6402, decode.d0.loss_cls: 1.7156, decode.d0.loss_mask: 0.4924, decode.d0.loss_dice: 0.7514, decode.d1.loss_cls: 0.3765, decode.d1.loss_mask: 0.4721, decode.d1.loss_dice: 0.6884, decode.d2.loss_cls: 0.2718, decode.d2.loss_mask: 0.4621, decode.d2.loss_dice: 0.6588, decode.d3.loss_cls: 0.2225, decode.d3.loss_mask: 0.4580, decode.d3.loss_dice: 0.6491, decode.d4.loss_cls: 0.2179, decode.d4.loss_mask: 0.4549, decode.d4.loss_dice: 0.6442, decode.d5.loss_cls: 0.2071, decode.d5.loss_mask: 0.4541, decode.d5.loss_dice: 0.6460, decode.d6.loss_cls: 0.2063, decode.d6.loss_mask: 0.4518, decode.d6.loss_dice: 0.6383, decode.d7.loss_cls: 0.2002, decode.d7.loss_mask: 0.4539, decode.d7.loss_dice: 0.6397, decode.d8.loss_cls: 0.2001, decode.d8.loss_mask: 0.4546, decode.d8.loss_dice: 0.6384, loss: 15.0202 +2022-06-05 04:43:21,023 - mmseg - INFO - Iter [22350/40000] lr: 3.365e-06, eta: 2:23:01, time: 0.452, data_time: 0.009, memory: 31652, decode.loss_cls: 0.2076, decode.loss_mask: 0.4487, decode.loss_dice: 0.6326, decode.d0.loss_cls: 1.7227, decode.d0.loss_mask: 0.4908, decode.d0.loss_dice: 0.7377, decode.d1.loss_cls: 0.3551, decode.d1.loss_mask: 0.4672, decode.d1.loss_dice: 0.6713, decode.d2.loss_cls: 0.2767, decode.d2.loss_mask: 0.4589, decode.d2.loss_dice: 0.6479, decode.d3.loss_cls: 0.2414, decode.d3.loss_mask: 0.4545, decode.d3.loss_dice: 0.6361, decode.d4.loss_cls: 0.2262, decode.d4.loss_mask: 0.4541, decode.d4.loss_dice: 0.6333, decode.d5.loss_cls: 0.2207, decode.d5.loss_mask: 0.4493, decode.d5.loss_dice: 0.6344, decode.d6.loss_cls: 0.2140, decode.d6.loss_mask: 0.4502, decode.d6.loss_dice: 0.6286, decode.d7.loss_cls: 0.2079, decode.d7.loss_mask: 0.4489, decode.d7.loss_dice: 0.6316, decode.d8.loss_cls: 0.2083, decode.d8.loss_mask: 0.4478, decode.d8.loss_dice: 0.6327, loss: 14.9372 +2022-06-05 04:43:43,053 - mmseg - INFO - Iter [22400/40000] lr: 3.355e-06, eta: 2:22:35, time: 0.440, data_time: 0.009, memory: 31652, decode.loss_cls: 0.1986, decode.loss_mask: 0.4607, decode.loss_dice: 0.6318, decode.d0.loss_cls: 1.6854, decode.d0.loss_mask: 0.5050, decode.d0.loss_dice: 0.7346, decode.d1.loss_cls: 0.3548, decode.d1.loss_mask: 0.4812, decode.d1.loss_dice: 0.6690, decode.d2.loss_cls: 0.2533, decode.d2.loss_mask: 0.4680, decode.d2.loss_dice: 0.6500, decode.d3.loss_cls: 0.2271, decode.d3.loss_mask: 0.4644, decode.d3.loss_dice: 0.6419, decode.d4.loss_cls: 0.2154, decode.d4.loss_mask: 0.4643, decode.d4.loss_dice: 0.6431, decode.d5.loss_cls: 0.2127, decode.d5.loss_mask: 0.4629, decode.d5.loss_dice: 0.6358, decode.d6.loss_cls: 0.2110, decode.d6.loss_mask: 0.4611, decode.d6.loss_dice: 0.6333, decode.d7.loss_cls: 0.2083, decode.d7.loss_mask: 0.4605, decode.d7.loss_dice: 0.6327, decode.d8.loss_cls: 0.2008, decode.d8.loss_mask: 0.4602, decode.d8.loss_dice: 0.6333, loss: 14.9612 +2022-06-05 04:44:05,517 - mmseg - INFO - Iter [22450/40000] lr: 3.346e-06, eta: 2:22:10, time: 0.450, data_time: 0.009, memory: 31652, decode.loss_cls: 0.2016, decode.loss_mask: 0.4606, decode.loss_dice: 0.6477, decode.d0.loss_cls: 1.7534, decode.d0.loss_mask: 0.5011, decode.d0.loss_dice: 0.7617, decode.d1.loss_cls: 0.3797, decode.d1.loss_mask: 0.4784, decode.d1.loss_dice: 0.6856, decode.d2.loss_cls: 0.2745, decode.d2.loss_mask: 0.4661, decode.d2.loss_dice: 0.6577, decode.d3.loss_cls: 0.2396, decode.d3.loss_mask: 0.4628, decode.d3.loss_dice: 0.6481, decode.d4.loss_cls: 0.2237, decode.d4.loss_mask: 0.4628, decode.d4.loss_dice: 0.6500, decode.d5.loss_cls: 0.2147, decode.d5.loss_mask: 0.4632, decode.d5.loss_dice: 0.6470, decode.d6.loss_cls: 0.2053, decode.d6.loss_mask: 0.4619, decode.d6.loss_dice: 0.6458, decode.d7.loss_cls: 0.2045, decode.d7.loss_mask: 0.4610, decode.d7.loss_dice: 0.6468, decode.d8.loss_cls: 0.1990, decode.d8.loss_mask: 0.4599, decode.d8.loss_dice: 0.6456, loss: 15.2096 +2022-06-05 04:44:30,427 - mmseg - INFO - Iter [22500/40000] lr: 3.336e-06, eta: 2:21:46, time: 0.498, data_time: 0.059, memory: 31652, decode.loss_cls: 0.2024, decode.loss_mask: 0.4515, decode.loss_dice: 0.6169, decode.d0.loss_cls: 1.6481, decode.d0.loss_mask: 0.4929, decode.d0.loss_dice: 0.7152, decode.d1.loss_cls: 0.3603, decode.d1.loss_mask: 0.4725, decode.d1.loss_dice: 0.6544, decode.d2.loss_cls: 0.2787, decode.d2.loss_mask: 0.4607, decode.d2.loss_dice: 0.6276, decode.d3.loss_cls: 0.2392, decode.d3.loss_mask: 0.4561, decode.d3.loss_dice: 0.6195, decode.d4.loss_cls: 0.2329, decode.d4.loss_mask: 0.4534, decode.d4.loss_dice: 0.6225, decode.d5.loss_cls: 0.2149, decode.d5.loss_mask: 0.4530, decode.d5.loss_dice: 0.6230, decode.d6.loss_cls: 0.2063, decode.d6.loss_mask: 0.4520, decode.d6.loss_dice: 0.6210, decode.d7.loss_cls: 0.2061, decode.d7.loss_mask: 0.4505, decode.d7.loss_dice: 0.6158, decode.d8.loss_cls: 0.2031, decode.d8.loss_mask: 0.4520, decode.d8.loss_dice: 0.6188, loss: 14.7211 +2022-06-05 04:44:52,793 - mmseg - INFO - Iter [22550/40000] lr: 3.327e-06, eta: 2:21:20, time: 0.447, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1991, decode.loss_mask: 0.4529, decode.loss_dice: 0.6190, decode.d0.loss_cls: 1.6723, decode.d0.loss_mask: 0.4961, decode.d0.loss_dice: 0.7272, decode.d1.loss_cls: 0.3722, decode.d1.loss_mask: 0.4740, decode.d1.loss_dice: 0.6511, decode.d2.loss_cls: 0.2564, decode.d2.loss_mask: 0.4592, decode.d2.loss_dice: 0.6282, decode.d3.loss_cls: 0.2201, decode.d3.loss_mask: 0.4569, decode.d3.loss_dice: 0.6220, decode.d4.loss_cls: 0.2156, decode.d4.loss_mask: 0.4557, decode.d4.loss_dice: 0.6192, decode.d5.loss_cls: 0.2039, decode.d5.loss_mask: 0.4543, decode.d5.loss_dice: 0.6211, decode.d6.loss_cls: 0.2017, decode.d6.loss_mask: 0.4545, decode.d6.loss_dice: 0.6224, decode.d7.loss_cls: 0.1947, decode.d7.loss_mask: 0.4543, decode.d7.loss_dice: 0.6162, decode.d8.loss_cls: 0.1966, decode.d8.loss_mask: 0.4543, decode.d8.loss_dice: 0.6167, loss: 14.6880 +2022-06-05 04:45:14,850 - mmseg - INFO - Iter [22600/40000] lr: 3.317e-06, eta: 2:20:54, time: 0.441, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2099, decode.loss_mask: 0.4550, decode.loss_dice: 0.6461, decode.d0.loss_cls: 1.7279, decode.d0.loss_mask: 0.4915, decode.d0.loss_dice: 0.7488, decode.d1.loss_cls: 0.3763, decode.d1.loss_mask: 0.4700, decode.d1.loss_dice: 0.6905, decode.d2.loss_cls: 0.2835, decode.d2.loss_mask: 0.4572, decode.d2.loss_dice: 0.6598, decode.d3.loss_cls: 0.2479, decode.d3.loss_mask: 0.4568, decode.d3.loss_dice: 0.6497, decode.d4.loss_cls: 0.2338, decode.d4.loss_mask: 0.4564, decode.d4.loss_dice: 0.6499, decode.d5.loss_cls: 0.2290, decode.d5.loss_mask: 0.4568, decode.d5.loss_dice: 0.6467, decode.d6.loss_cls: 0.2151, decode.d6.loss_mask: 0.4573, decode.d6.loss_dice: 0.6461, decode.d7.loss_cls: 0.2154, decode.d7.loss_mask: 0.4562, decode.d7.loss_dice: 0.6473, decode.d8.loss_cls: 0.2092, decode.d8.loss_mask: 0.4544, decode.d8.loss_dice: 0.6478, loss: 15.1922 +2022-06-05 04:45:37,369 - mmseg - INFO - Iter [22650/40000] lr: 3.308e-06, eta: 2:20:28, time: 0.451, data_time: 0.009, memory: 31652, decode.loss_cls: 0.2252, decode.loss_mask: 0.4564, decode.loss_dice: 0.6402, decode.d0.loss_cls: 1.7572, decode.d0.loss_mask: 0.5000, decode.d0.loss_dice: 0.7579, decode.d1.loss_cls: 0.3988, decode.d1.loss_mask: 0.4739, decode.d1.loss_dice: 0.6810, decode.d2.loss_cls: 0.2958, decode.d2.loss_mask: 0.4636, decode.d2.loss_dice: 0.6580, decode.d3.loss_cls: 0.2530, decode.d3.loss_mask: 0.4594, decode.d3.loss_dice: 0.6445, decode.d4.loss_cls: 0.2437, decode.d4.loss_mask: 0.4579, decode.d4.loss_dice: 0.6461, decode.d5.loss_cls: 0.2398, decode.d5.loss_mask: 0.4562, decode.d5.loss_dice: 0.6425, decode.d6.loss_cls: 0.2316, decode.d6.loss_mask: 0.4539, decode.d6.loss_dice: 0.6353, decode.d7.loss_cls: 0.2280, decode.d7.loss_mask: 0.4558, decode.d7.loss_dice: 0.6369, decode.d8.loss_cls: 0.2278, decode.d8.loss_mask: 0.4548, decode.d8.loss_dice: 0.6364, loss: 15.3115 +2022-06-05 04:46:00,182 - mmseg - INFO - Iter [22700/40000] lr: 3.298e-06, eta: 2:20:03, time: 0.456, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1972, decode.loss_mask: 0.4495, decode.loss_dice: 0.6333, decode.d0.loss_cls: 1.7154, decode.d0.loss_mask: 0.4932, decode.d0.loss_dice: 0.7378, decode.d1.loss_cls: 0.3811, decode.d1.loss_mask: 0.4655, decode.d1.loss_dice: 0.6733, decode.d2.loss_cls: 0.2685, decode.d2.loss_mask: 0.4544, decode.d2.loss_dice: 0.6477, decode.d3.loss_cls: 0.2239, decode.d3.loss_mask: 0.4509, decode.d3.loss_dice: 0.6415, decode.d4.loss_cls: 0.2175, decode.d4.loss_mask: 0.4516, decode.d4.loss_dice: 0.6369, decode.d5.loss_cls: 0.2080, decode.d5.loss_mask: 0.4489, decode.d5.loss_dice: 0.6362, decode.d6.loss_cls: 0.2030, decode.d6.loss_mask: 0.4479, decode.d6.loss_dice: 0.6364, decode.d7.loss_cls: 0.1977, decode.d7.loss_mask: 0.4493, decode.d7.loss_dice: 0.6353, decode.d8.loss_cls: 0.1938, decode.d8.loss_mask: 0.4492, decode.d8.loss_dice: 0.6353, loss: 14.8804 +2022-06-05 04:46:22,182 - mmseg - INFO - Iter [22750/40000] lr: 3.289e-06, eta: 2:19:37, time: 0.440, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2055, decode.loss_mask: 0.4491, decode.loss_dice: 0.6439, decode.d0.loss_cls: 1.7345, decode.d0.loss_mask: 0.4947, decode.d0.loss_dice: 0.7443, decode.d1.loss_cls: 0.3927, decode.d1.loss_mask: 0.4694, decode.d1.loss_dice: 0.6830, decode.d2.loss_cls: 0.2872, decode.d2.loss_mask: 0.4576, decode.d2.loss_dice: 0.6490, decode.d3.loss_cls: 0.2368, decode.d3.loss_mask: 0.4529, decode.d3.loss_dice: 0.6476, decode.d4.loss_cls: 0.2248, decode.d4.loss_mask: 0.4538, decode.d4.loss_dice: 0.6480, decode.d5.loss_cls: 0.2185, decode.d5.loss_mask: 0.4525, decode.d5.loss_dice: 0.6404, decode.d6.loss_cls: 0.2087, decode.d6.loss_mask: 0.4503, decode.d6.loss_dice: 0.6461, decode.d7.loss_cls: 0.2121, decode.d7.loss_mask: 0.4505, decode.d7.loss_dice: 0.6435, decode.d8.loss_cls: 0.2067, decode.d8.loss_mask: 0.4484, decode.d8.loss_dice: 0.6398, loss: 15.0924 +2022-06-05 04:46:46,175 - mmseg - INFO - Iter [22800/40000] lr: 3.279e-06, eta: 2:19:12, time: 0.480, data_time: 0.060, memory: 31652, decode.loss_cls: 0.1931, decode.loss_mask: 0.4535, decode.loss_dice: 0.6242, decode.d0.loss_cls: 1.7053, decode.d0.loss_mask: 0.4944, decode.d0.loss_dice: 0.7344, decode.d1.loss_cls: 0.3679, decode.d1.loss_mask: 0.4700, decode.d1.loss_dice: 0.6615, decode.d2.loss_cls: 0.2727, decode.d2.loss_mask: 0.4589, decode.d2.loss_dice: 0.6336, decode.d3.loss_cls: 0.2303, decode.d3.loss_mask: 0.4553, decode.d3.loss_dice: 0.6276, decode.d4.loss_cls: 0.2147, decode.d4.loss_mask: 0.4549, decode.d4.loss_dice: 0.6228, decode.d5.loss_cls: 0.2116, decode.d5.loss_mask: 0.4551, decode.d5.loss_dice: 0.6269, decode.d6.loss_cls: 0.2054, decode.d6.loss_mask: 0.4539, decode.d6.loss_dice: 0.6179, decode.d7.loss_cls: 0.2017, decode.d7.loss_mask: 0.4536, decode.d7.loss_dice: 0.6209, decode.d8.loss_cls: 0.2023, decode.d8.loss_mask: 0.4540, decode.d8.loss_dice: 0.6242, loss: 14.8027 +2022-06-05 04:47:08,382 - mmseg - INFO - Iter [22850/40000] lr: 3.270e-06, eta: 2:18:46, time: 0.443, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2074, decode.loss_mask: 0.4425, decode.loss_dice: 0.6314, decode.d0.loss_cls: 1.7275, decode.d0.loss_mask: 0.4894, decode.d0.loss_dice: 0.7444, decode.d1.loss_cls: 0.3868, decode.d1.loss_mask: 0.4590, decode.d1.loss_dice: 0.6766, decode.d2.loss_cls: 0.2775, decode.d2.loss_mask: 0.4508, decode.d2.loss_dice: 0.6513, decode.d3.loss_cls: 0.2313, decode.d3.loss_mask: 0.4467, decode.d3.loss_dice: 0.6377, decode.d4.loss_cls: 0.2154, decode.d4.loss_mask: 0.4461, decode.d4.loss_dice: 0.6366, decode.d5.loss_cls: 0.2153, decode.d5.loss_mask: 0.4455, decode.d5.loss_dice: 0.6350, decode.d6.loss_cls: 0.2079, decode.d6.loss_mask: 0.4455, decode.d6.loss_dice: 0.6322, decode.d7.loss_cls: 0.2058, decode.d7.loss_mask: 0.4451, decode.d7.loss_dice: 0.6308, decode.d8.loss_cls: 0.2050, decode.d8.loss_mask: 0.4447, decode.d8.loss_dice: 0.6332, loss: 14.9044 +2022-06-05 04:47:30,566 - mmseg - INFO - Iter [22900/40000] lr: 3.260e-06, eta: 2:18:21, time: 0.444, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1957, decode.loss_mask: 0.4364, decode.loss_dice: 0.6308, decode.d0.loss_cls: 1.7300, decode.d0.loss_mask: 0.4815, decode.d0.loss_dice: 0.7370, decode.d1.loss_cls: 0.3627, decode.d1.loss_mask: 0.4571, decode.d1.loss_dice: 0.6753, decode.d2.loss_cls: 0.2659, decode.d2.loss_mask: 0.4446, decode.d2.loss_dice: 0.6449, decode.d3.loss_cls: 0.2276, decode.d3.loss_mask: 0.4412, decode.d3.loss_dice: 0.6381, decode.d4.loss_cls: 0.2176, decode.d4.loss_mask: 0.4387, decode.d4.loss_dice: 0.6366, decode.d5.loss_cls: 0.2104, decode.d5.loss_mask: 0.4384, decode.d5.loss_dice: 0.6327, decode.d6.loss_cls: 0.2032, decode.d6.loss_mask: 0.4364, decode.d6.loss_dice: 0.6315, decode.d7.loss_cls: 0.1981, decode.d7.loss_mask: 0.4360, decode.d7.loss_dice: 0.6352, decode.d8.loss_cls: 0.2007, decode.d8.loss_mask: 0.4369, decode.d8.loss_dice: 0.6333, loss: 14.7545 +2022-06-05 04:47:52,789 - mmseg - INFO - Iter [22950/40000] lr: 3.251e-06, eta: 2:17:55, time: 0.444, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1835, decode.loss_mask: 0.4432, decode.loss_dice: 0.6054, decode.d0.loss_cls: 1.6239, decode.d0.loss_mask: 0.4855, decode.d0.loss_dice: 0.7094, decode.d1.loss_cls: 0.3387, decode.d1.loss_mask: 0.4618, decode.d1.loss_dice: 0.6399, decode.d2.loss_cls: 0.2541, decode.d2.loss_mask: 0.4499, decode.d2.loss_dice: 0.6149, decode.d3.loss_cls: 0.2088, decode.d3.loss_mask: 0.4486, decode.d3.loss_dice: 0.6088, decode.d4.loss_cls: 0.2020, decode.d4.loss_mask: 0.4474, decode.d4.loss_dice: 0.6121, decode.d5.loss_cls: 0.1976, decode.d5.loss_mask: 0.4464, decode.d5.loss_dice: 0.6087, decode.d6.loss_cls: 0.1891, decode.d6.loss_mask: 0.4470, decode.d6.loss_dice: 0.6082, decode.d7.loss_cls: 0.1920, decode.d7.loss_mask: 0.4455, decode.d7.loss_dice: 0.6064, decode.d8.loss_cls: 0.1898, decode.d8.loss_mask: 0.4441, decode.d8.loss_dice: 0.6035, loss: 14.3162 +2022-06-05 04:48:14,768 - mmseg - INFO - Saving checkpoint at 23000 iterations +2022-06-05 04:48:18,180 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 04:48:18,180 - mmseg - INFO - Iter [23000/40000] lr: 3.241e-06, eta: 2:17:31, time: 0.508, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1792, decode.loss_mask: 0.4681, decode.loss_dice: 0.6459, decode.d0.loss_cls: 1.7092, decode.d0.loss_mask: 0.5091, decode.d0.loss_dice: 0.7437, decode.d1.loss_cls: 0.3694, decode.d1.loss_mask: 0.4844, decode.d1.loss_dice: 0.6727, decode.d2.loss_cls: 0.2640, decode.d2.loss_mask: 0.4730, decode.d2.loss_dice: 0.6538, decode.d3.loss_cls: 0.2184, decode.d3.loss_mask: 0.4717, decode.d3.loss_dice: 0.6494, decode.d4.loss_cls: 0.2120, decode.d4.loss_mask: 0.4691, decode.d4.loss_dice: 0.6428, decode.d5.loss_cls: 0.2003, decode.d5.loss_mask: 0.4658, decode.d5.loss_dice: 0.6474, decode.d6.loss_cls: 0.1908, decode.d6.loss_mask: 0.4673, decode.d6.loss_dice: 0.6487, decode.d7.loss_cls: 0.1860, decode.d7.loss_mask: 0.4655, decode.d7.loss_dice: 0.6439, decode.d8.loss_cls: 0.1881, decode.d8.loss_mask: 0.4674, decode.d8.loss_dice: 0.6456, loss: 15.0527 +2022-06-05 04:48:40,096 - mmseg - INFO - Iter [23050/40000] lr: 3.232e-06, eta: 2:17:05, time: 0.439, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1930, decode.loss_mask: 0.4653, decode.loss_dice: 0.6207, decode.d0.loss_cls: 1.6909, decode.d0.loss_mask: 0.5050, decode.d0.loss_dice: 0.7172, decode.d1.loss_cls: 0.3693, decode.d1.loss_mask: 0.4853, decode.d1.loss_dice: 0.6610, decode.d2.loss_cls: 0.2640, decode.d2.loss_mask: 0.4754, decode.d2.loss_dice: 0.6346, decode.d3.loss_cls: 0.2336, decode.d3.loss_mask: 0.4691, decode.d3.loss_dice: 0.6224, decode.d4.loss_cls: 0.2169, decode.d4.loss_mask: 0.4677, decode.d4.loss_dice: 0.6276, decode.d5.loss_cls: 0.2080, decode.d5.loss_mask: 0.4680, decode.d5.loss_dice: 0.6220, decode.d6.loss_cls: 0.2037, decode.d6.loss_mask: 0.4651, decode.d6.loss_dice: 0.6261, decode.d7.loss_cls: 0.1990, decode.d7.loss_mask: 0.4672, decode.d7.loss_dice: 0.6274, decode.d8.loss_cls: 0.1964, decode.d8.loss_mask: 0.4667, decode.d8.loss_dice: 0.6278, loss: 14.8965 +2022-06-05 04:49:05,027 - mmseg - INFO - Iter [23100/40000] lr: 3.222e-06, eta: 2:16:42, time: 0.499, data_time: 0.060, memory: 31652, decode.loss_cls: 0.2126, decode.loss_mask: 0.4666, decode.loss_dice: 0.6437, decode.d0.loss_cls: 1.7370, decode.d0.loss_mask: 0.5085, decode.d0.loss_dice: 0.7556, decode.d1.loss_cls: 0.3860, decode.d1.loss_mask: 0.4876, decode.d1.loss_dice: 0.6909, decode.d2.loss_cls: 0.2783, decode.d2.loss_mask: 0.4737, decode.d2.loss_dice: 0.6592, decode.d3.loss_cls: 0.2431, decode.d3.loss_mask: 0.4691, decode.d3.loss_dice: 0.6406, decode.d4.loss_cls: 0.2271, decode.d4.loss_mask: 0.4688, decode.d4.loss_dice: 0.6439, decode.d5.loss_cls: 0.2258, decode.d5.loss_mask: 0.4685, decode.d5.loss_dice: 0.6431, decode.d6.loss_cls: 0.2150, decode.d6.loss_mask: 0.4663, decode.d6.loss_dice: 0.6379, decode.d7.loss_cls: 0.2128, decode.d7.loss_mask: 0.4681, decode.d7.loss_dice: 0.6376, decode.d8.loss_cls: 0.2077, decode.d8.loss_mask: 0.4665, decode.d8.loss_dice: 0.6394, loss: 15.2809 +2022-06-05 04:49:26,910 - mmseg - INFO - Iter [23150/40000] lr: 3.212e-06, eta: 2:16:16, time: 0.437, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1990, decode.loss_mask: 0.4403, decode.loss_dice: 0.6039, decode.d0.loss_cls: 1.6463, decode.d0.loss_mask: 0.4806, decode.d0.loss_dice: 0.7097, decode.d1.loss_cls: 0.3577, decode.d1.loss_mask: 0.4573, decode.d1.loss_dice: 0.6407, decode.d2.loss_cls: 0.2628, decode.d2.loss_mask: 0.4483, decode.d2.loss_dice: 0.6148, decode.d3.loss_cls: 0.2332, decode.d3.loss_mask: 0.4432, decode.d3.loss_dice: 0.6051, decode.d4.loss_cls: 0.2215, decode.d4.loss_mask: 0.4423, decode.d4.loss_dice: 0.6052, decode.d5.loss_cls: 0.2143, decode.d5.loss_mask: 0.4429, decode.d5.loss_dice: 0.5996, decode.d6.loss_cls: 0.2069, decode.d6.loss_mask: 0.4411, decode.d6.loss_dice: 0.5996, decode.d7.loss_cls: 0.2038, decode.d7.loss_mask: 0.4412, decode.d7.loss_dice: 0.6011, decode.d8.loss_cls: 0.2018, decode.d8.loss_mask: 0.4412, decode.d8.loss_dice: 0.5995, loss: 14.4046 +2022-06-05 04:49:48,852 - mmseg - INFO - Iter [23200/40000] lr: 3.203e-06, eta: 2:15:50, time: 0.439, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1932, decode.loss_mask: 0.4556, decode.loss_dice: 0.6381, decode.d0.loss_cls: 1.6830, decode.d0.loss_mask: 0.4871, decode.d0.loss_dice: 0.7407, decode.d1.loss_cls: 0.3661, decode.d1.loss_mask: 0.4664, decode.d1.loss_dice: 0.6760, decode.d2.loss_cls: 0.2609, decode.d2.loss_mask: 0.4607, decode.d2.loss_dice: 0.6534, decode.d3.loss_cls: 0.2243, decode.d3.loss_mask: 0.4580, decode.d3.loss_dice: 0.6439, decode.d4.loss_cls: 0.2157, decode.d4.loss_mask: 0.4577, decode.d4.loss_dice: 0.6432, decode.d5.loss_cls: 0.2069, decode.d5.loss_mask: 0.4544, decode.d5.loss_dice: 0.6448, decode.d6.loss_cls: 0.1950, decode.d6.loss_mask: 0.4558, decode.d6.loss_dice: 0.6418, decode.d7.loss_cls: 0.1905, decode.d7.loss_mask: 0.4554, decode.d7.loss_dice: 0.6466, decode.d8.loss_cls: 0.1933, decode.d8.loss_mask: 0.4529, decode.d8.loss_dice: 0.6407, loss: 14.9021 +2022-06-05 04:50:10,992 - mmseg - INFO - Iter [23250/40000] lr: 3.193e-06, eta: 2:15:24, time: 0.443, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1831, decode.loss_mask: 0.4507, decode.loss_dice: 0.6015, decode.d0.loss_cls: 1.6850, decode.d0.loss_mask: 0.4927, decode.d0.loss_dice: 0.7185, decode.d1.loss_cls: 0.3507, decode.d1.loss_mask: 0.4710, decode.d1.loss_dice: 0.6449, decode.d2.loss_cls: 0.2562, decode.d2.loss_mask: 0.4605, decode.d2.loss_dice: 0.6174, decode.d3.loss_cls: 0.2070, decode.d3.loss_mask: 0.4547, decode.d3.loss_dice: 0.6076, decode.d4.loss_cls: 0.1938, decode.d4.loss_mask: 0.4521, decode.d4.loss_dice: 0.6127, decode.d5.loss_cls: 0.1910, decode.d5.loss_mask: 0.4519, decode.d5.loss_dice: 0.6061, decode.d6.loss_cls: 0.1860, decode.d6.loss_mask: 0.4489, decode.d6.loss_dice: 0.6018, decode.d7.loss_cls: 0.1878, decode.d7.loss_mask: 0.4491, decode.d7.loss_dice: 0.6042, decode.d8.loss_cls: 0.1837, decode.d8.loss_mask: 0.4504, decode.d8.loss_dice: 0.5986, loss: 14.4198 +2022-06-05 04:50:33,082 - mmseg - INFO - Iter [23300/40000] lr: 3.184e-06, eta: 2:14:58, time: 0.442, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1890, decode.loss_mask: 0.4234, decode.loss_dice: 0.6236, decode.d0.loss_cls: 1.6908, decode.d0.loss_mask: 0.4678, decode.d0.loss_dice: 0.7331, decode.d1.loss_cls: 0.3663, decode.d1.loss_mask: 0.4456, decode.d1.loss_dice: 0.6600, decode.d2.loss_cls: 0.2603, decode.d2.loss_mask: 0.4317, decode.d2.loss_dice: 0.6343, decode.d3.loss_cls: 0.2238, decode.d3.loss_mask: 0.4256, decode.d3.loss_dice: 0.6227, decode.d4.loss_cls: 0.2058, decode.d4.loss_mask: 0.4257, decode.d4.loss_dice: 0.6262, decode.d5.loss_cls: 0.2051, decode.d5.loss_mask: 0.4242, decode.d5.loss_dice: 0.6195, decode.d6.loss_cls: 0.1981, decode.d6.loss_mask: 0.4231, decode.d6.loss_dice: 0.6193, decode.d7.loss_cls: 0.1934, decode.d7.loss_mask: 0.4231, decode.d7.loss_dice: 0.6188, decode.d8.loss_cls: 0.1928, decode.d8.loss_mask: 0.4231, decode.d8.loss_dice: 0.6177, loss: 14.4141 +2022-06-05 04:50:54,800 - mmseg - INFO - Iter [23350/40000] lr: 3.174e-06, eta: 2:14:32, time: 0.434, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1907, decode.loss_mask: 0.4532, decode.loss_dice: 0.6259, decode.d0.loss_cls: 1.7376, decode.d0.loss_mask: 0.4974, decode.d0.loss_dice: 0.7389, decode.d1.loss_cls: 0.3689, decode.d1.loss_mask: 0.4723, decode.d1.loss_dice: 0.6683, decode.d2.loss_cls: 0.2713, decode.d2.loss_mask: 0.4592, decode.d2.loss_dice: 0.6460, decode.d3.loss_cls: 0.2303, decode.d3.loss_mask: 0.4540, decode.d3.loss_dice: 0.6353, decode.d4.loss_cls: 0.2176, decode.d4.loss_mask: 0.4522, decode.d4.loss_dice: 0.6293, decode.d5.loss_cls: 0.2057, decode.d5.loss_mask: 0.4534, decode.d5.loss_dice: 0.6296, decode.d6.loss_cls: 0.2010, decode.d6.loss_mask: 0.4530, decode.d6.loss_dice: 0.6264, decode.d7.loss_cls: 0.1915, decode.d7.loss_mask: 0.4520, decode.d7.loss_dice: 0.6278, decode.d8.loss_cls: 0.1961, decode.d8.loss_mask: 0.4519, decode.d8.loss_dice: 0.6293, loss: 14.8663 +2022-06-05 04:51:16,263 - mmseg - INFO - Iter [23400/40000] lr: 3.165e-06, eta: 2:14:06, time: 0.429, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1976, decode.loss_mask: 0.4572, decode.loss_dice: 0.6385, decode.d0.loss_cls: 1.7354, decode.d0.loss_mask: 0.5000, decode.d0.loss_dice: 0.7506, decode.d1.loss_cls: 0.3817, decode.d1.loss_mask: 0.4774, decode.d1.loss_dice: 0.6782, decode.d2.loss_cls: 0.2783, decode.d2.loss_mask: 0.4666, decode.d2.loss_dice: 0.6502, decode.d3.loss_cls: 0.2249, decode.d3.loss_mask: 0.4640, decode.d3.loss_dice: 0.6446, decode.d4.loss_cls: 0.2181, decode.d4.loss_mask: 0.4622, decode.d4.loss_dice: 0.6446, decode.d5.loss_cls: 0.2073, decode.d5.loss_mask: 0.4613, decode.d5.loss_dice: 0.6389, decode.d6.loss_cls: 0.2040, decode.d6.loss_mask: 0.4601, decode.d6.loss_dice: 0.6382, decode.d7.loss_cls: 0.1991, decode.d7.loss_mask: 0.4581, decode.d7.loss_dice: 0.6416, decode.d8.loss_cls: 0.2026, decode.d8.loss_mask: 0.4566, decode.d8.loss_dice: 0.6373, loss: 15.0753 +2022-06-05 04:51:40,922 - mmseg - INFO - Iter [23450/40000] lr: 3.155e-06, eta: 2:13:42, time: 0.493, data_time: 0.057, memory: 31652, decode.loss_cls: 0.1752, decode.loss_mask: 0.4493, decode.loss_dice: 0.6037, decode.d0.loss_cls: 1.6939, decode.d0.loss_mask: 0.4895, decode.d0.loss_dice: 0.7193, decode.d1.loss_cls: 0.3640, decode.d1.loss_mask: 0.4653, decode.d1.loss_dice: 0.6444, decode.d2.loss_cls: 0.2490, decode.d2.loss_mask: 0.4550, decode.d2.loss_dice: 0.6205, decode.d3.loss_cls: 0.2042, decode.d3.loss_mask: 0.4535, decode.d3.loss_dice: 0.6090, decode.d4.loss_cls: 0.1975, decode.d4.loss_mask: 0.4523, decode.d4.loss_dice: 0.6070, decode.d5.loss_cls: 0.1856, decode.d5.loss_mask: 0.4514, decode.d5.loss_dice: 0.6103, decode.d6.loss_cls: 0.1772, decode.d6.loss_mask: 0.4503, decode.d6.loss_dice: 0.6055, decode.d7.loss_cls: 0.1777, decode.d7.loss_mask: 0.4495, decode.d7.loss_dice: 0.6079, decode.d8.loss_cls: 0.1750, decode.d8.loss_mask: 0.4499, decode.d8.loss_dice: 0.6060, loss: 14.3991 +2022-06-05 04:52:02,900 - mmseg - INFO - Iter [23500/40000] lr: 3.146e-06, eta: 2:13:16, time: 0.440, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1920, decode.loss_mask: 0.4251, decode.loss_dice: 0.6264, decode.d0.loss_cls: 1.6861, decode.d0.loss_mask: 0.4715, decode.d0.loss_dice: 0.7435, decode.d1.loss_cls: 0.3671, decode.d1.loss_mask: 0.4458, decode.d1.loss_dice: 0.6661, decode.d2.loss_cls: 0.2713, decode.d2.loss_mask: 0.4315, decode.d2.loss_dice: 0.6438, decode.d3.loss_cls: 0.2242, decode.d3.loss_mask: 0.4286, decode.d3.loss_dice: 0.6344, decode.d4.loss_cls: 0.2153, decode.d4.loss_mask: 0.4269, decode.d4.loss_dice: 0.6332, decode.d5.loss_cls: 0.2062, decode.d5.loss_mask: 0.4258, decode.d5.loss_dice: 0.6305, decode.d6.loss_cls: 0.1999, decode.d6.loss_mask: 0.4263, decode.d6.loss_dice: 0.6262, decode.d7.loss_cls: 0.1962, decode.d7.loss_mask: 0.4262, decode.d7.loss_dice: 0.6261, decode.d8.loss_cls: 0.1902, decode.d8.loss_mask: 0.4255, decode.d8.loss_dice: 0.6257, loss: 14.5376 +2022-06-05 04:52:24,744 - mmseg - INFO - Iter [23550/40000] lr: 3.136e-06, eta: 2:12:50, time: 0.437, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1902, decode.loss_mask: 0.4404, decode.loss_dice: 0.6155, decode.d0.loss_cls: 1.6740, decode.d0.loss_mask: 0.4771, decode.d0.loss_dice: 0.7157, decode.d1.loss_cls: 0.3527, decode.d1.loss_mask: 0.4551, decode.d1.loss_dice: 0.6511, decode.d2.loss_cls: 0.2714, decode.d2.loss_mask: 0.4442, decode.d2.loss_dice: 0.6252, decode.d3.loss_cls: 0.2254, decode.d3.loss_mask: 0.4422, decode.d3.loss_dice: 0.6192, decode.d4.loss_cls: 0.2126, decode.d4.loss_mask: 0.4416, decode.d4.loss_dice: 0.6173, decode.d5.loss_cls: 0.2045, decode.d5.loss_mask: 0.4400, decode.d5.loss_dice: 0.6203, decode.d6.loss_cls: 0.1955, decode.d6.loss_mask: 0.4402, decode.d6.loss_dice: 0.6194, decode.d7.loss_cls: 0.1881, decode.d7.loss_mask: 0.4382, decode.d7.loss_dice: 0.6190, decode.d8.loss_cls: 0.1960, decode.d8.loss_mask: 0.4384, decode.d8.loss_dice: 0.6140, loss: 14.4844 +2022-06-05 04:52:46,298 - mmseg - INFO - Iter [23600/40000] lr: 3.127e-06, eta: 2:12:24, time: 0.431, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1864, decode.loss_mask: 0.4438, decode.loss_dice: 0.6291, decode.d0.loss_cls: 1.7099, decode.d0.loss_mask: 0.4808, decode.d0.loss_dice: 0.7328, decode.d1.loss_cls: 0.3699, decode.d1.loss_mask: 0.4547, decode.d1.loss_dice: 0.6639, decode.d2.loss_cls: 0.2769, decode.d2.loss_mask: 0.4487, decode.d2.loss_dice: 0.6428, decode.d3.loss_cls: 0.2278, decode.d3.loss_mask: 0.4428, decode.d3.loss_dice: 0.6273, decode.d4.loss_cls: 0.2115, decode.d4.loss_mask: 0.4435, decode.d4.loss_dice: 0.6343, decode.d5.loss_cls: 0.2051, decode.d5.loss_mask: 0.4424, decode.d5.loss_dice: 0.6248, decode.d6.loss_cls: 0.1949, decode.d6.loss_mask: 0.4427, decode.d6.loss_dice: 0.6250, decode.d7.loss_cls: 0.1914, decode.d7.loss_mask: 0.4432, decode.d7.loss_dice: 0.6227, decode.d8.loss_cls: 0.1848, decode.d8.loss_mask: 0.4419, decode.d8.loss_dice: 0.6256, loss: 14.6713 +2022-06-05 04:53:08,244 - mmseg - INFO - Iter [23650/40000] lr: 3.117e-06, eta: 2:11:58, time: 0.439, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1898, decode.loss_mask: 0.4547, decode.loss_dice: 0.6489, decode.d0.loss_cls: 1.7119, decode.d0.loss_mask: 0.4948, decode.d0.loss_dice: 0.7542, decode.d1.loss_cls: 0.3745, decode.d1.loss_mask: 0.4663, decode.d1.loss_dice: 0.6864, decode.d2.loss_cls: 0.2668, decode.d2.loss_mask: 0.4562, decode.d2.loss_dice: 0.6674, decode.d3.loss_cls: 0.2306, decode.d3.loss_mask: 0.4546, decode.d3.loss_dice: 0.6527, decode.d4.loss_cls: 0.2171, decode.d4.loss_mask: 0.4545, decode.d4.loss_dice: 0.6535, decode.d5.loss_cls: 0.2065, decode.d5.loss_mask: 0.4546, decode.d5.loss_dice: 0.6534, decode.d6.loss_cls: 0.1997, decode.d6.loss_mask: 0.4520, decode.d6.loss_dice: 0.6493, decode.d7.loss_cls: 0.2022, decode.d7.loss_mask: 0.4512, decode.d7.loss_dice: 0.6469, decode.d8.loss_cls: 0.1942, decode.d8.loss_mask: 0.4527, decode.d8.loss_dice: 0.6484, loss: 15.0461 +2022-06-05 04:53:29,716 - mmseg - INFO - Iter [23700/40000] lr: 3.108e-06, eta: 2:11:32, time: 0.429, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2013, decode.loss_mask: 0.4645, decode.loss_dice: 0.6157, decode.d0.loss_cls: 1.6493, decode.d0.loss_mask: 0.5009, decode.d0.loss_dice: 0.7132, decode.d1.loss_cls: 0.3636, decode.d1.loss_mask: 0.4823, decode.d1.loss_dice: 0.6570, decode.d2.loss_cls: 0.2604, decode.d2.loss_mask: 0.4703, decode.d2.loss_dice: 0.6296, decode.d3.loss_cls: 0.2300, decode.d3.loss_mask: 0.4643, decode.d3.loss_dice: 0.6157, decode.d4.loss_cls: 0.2204, decode.d4.loss_mask: 0.4632, decode.d4.loss_dice: 0.6183, decode.d5.loss_cls: 0.2101, decode.d5.loss_mask: 0.4629, decode.d5.loss_dice: 0.6142, decode.d6.loss_cls: 0.2025, decode.d6.loss_mask: 0.4627, decode.d6.loss_dice: 0.6152, decode.d7.loss_cls: 0.2022, decode.d7.loss_mask: 0.4619, decode.d7.loss_dice: 0.6159, decode.d8.loss_cls: 0.1989, decode.d8.loss_mask: 0.4641, decode.d8.loss_dice: 0.6162, loss: 14.7466 +2022-06-05 04:53:53,781 - mmseg - INFO - Iter [23750/40000] lr: 3.098e-06, eta: 2:11:08, time: 0.481, data_time: 0.059, memory: 31652, decode.loss_cls: 0.1944, decode.loss_mask: 0.4528, decode.loss_dice: 0.6387, decode.d0.loss_cls: 1.7123, decode.d0.loss_mask: 0.4915, decode.d0.loss_dice: 0.7417, decode.d1.loss_cls: 0.3621, decode.d1.loss_mask: 0.4708, decode.d1.loss_dice: 0.6794, decode.d2.loss_cls: 0.2704, decode.d2.loss_mask: 0.4591, decode.d2.loss_dice: 0.6534, decode.d3.loss_cls: 0.2273, decode.d3.loss_mask: 0.4551, decode.d3.loss_dice: 0.6459, decode.d4.loss_cls: 0.2192, decode.d4.loss_mask: 0.4533, decode.d4.loss_dice: 0.6414, decode.d5.loss_cls: 0.2145, decode.d5.loss_mask: 0.4543, decode.d5.loss_dice: 0.6440, decode.d6.loss_cls: 0.2053, decode.d6.loss_mask: 0.4545, decode.d6.loss_dice: 0.6401, decode.d7.loss_cls: 0.2065, decode.d7.loss_mask: 0.4528, decode.d7.loss_dice: 0.6398, decode.d8.loss_cls: 0.2049, decode.d8.loss_mask: 0.4518, decode.d8.loss_dice: 0.6373, loss: 14.9745 +2022-06-05 04:54:15,598 - mmseg - INFO - Iter [23800/40000] lr: 3.089e-06, eta: 2:10:42, time: 0.437, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1778, decode.loss_mask: 0.4484, decode.loss_dice: 0.6248, decode.d0.loss_cls: 1.6714, decode.d0.loss_mask: 0.4872, decode.d0.loss_dice: 0.7260, decode.d1.loss_cls: 0.3323, decode.d1.loss_mask: 0.4666, decode.d1.loss_dice: 0.6690, decode.d2.loss_cls: 0.2562, decode.d2.loss_mask: 0.4529, decode.d2.loss_dice: 0.6406, decode.d3.loss_cls: 0.2000, decode.d3.loss_mask: 0.4504, decode.d3.loss_dice: 0.6335, decode.d4.loss_cls: 0.1952, decode.d4.loss_mask: 0.4500, decode.d4.loss_dice: 0.6342, decode.d5.loss_cls: 0.1834, decode.d5.loss_mask: 0.4499, decode.d5.loss_dice: 0.6309, decode.d6.loss_cls: 0.1800, decode.d6.loss_mask: 0.4480, decode.d6.loss_dice: 0.6267, decode.d7.loss_cls: 0.1818, decode.d7.loss_mask: 0.4486, decode.d7.loss_dice: 0.6290, decode.d8.loss_cls: 0.1763, decode.d8.loss_mask: 0.4486, decode.d8.loss_dice: 0.6343, loss: 14.5540 +2022-06-05 04:54:37,293 - mmseg - INFO - Iter [23850/40000] lr: 3.079e-06, eta: 2:10:16, time: 0.434, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1792, decode.loss_mask: 0.4280, decode.loss_dice: 0.5907, decode.d0.loss_cls: 1.6697, decode.d0.loss_mask: 0.4689, decode.d0.loss_dice: 0.7018, decode.d1.loss_cls: 0.3434, decode.d1.loss_mask: 0.4473, decode.d1.loss_dice: 0.6333, decode.d2.loss_cls: 0.2529, decode.d2.loss_mask: 0.4351, decode.d2.loss_dice: 0.6039, decode.d3.loss_cls: 0.2097, decode.d3.loss_mask: 0.4307, decode.d3.loss_dice: 0.5940, decode.d4.loss_cls: 0.2015, decode.d4.loss_mask: 0.4271, decode.d4.loss_dice: 0.5949, decode.d5.loss_cls: 0.1884, decode.d5.loss_mask: 0.4270, decode.d5.loss_dice: 0.5940, decode.d6.loss_cls: 0.1811, decode.d6.loss_mask: 0.4265, decode.d6.loss_dice: 0.5893, decode.d7.loss_cls: 0.1817, decode.d7.loss_mask: 0.4269, decode.d7.loss_dice: 0.5928, decode.d8.loss_cls: 0.1787, decode.d8.loss_mask: 0.4270, decode.d8.loss_dice: 0.5912, loss: 14.0171 +2022-06-05 04:54:58,909 - mmseg - INFO - Iter [23900/40000] lr: 3.069e-06, eta: 2:09:50, time: 0.432, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1792, decode.loss_mask: 0.4384, decode.loss_dice: 0.6002, decode.d0.loss_cls: 1.6704, decode.d0.loss_mask: 0.4788, decode.d0.loss_dice: 0.7007, decode.d1.loss_cls: 0.3416, decode.d1.loss_mask: 0.4577, decode.d1.loss_dice: 0.6389, decode.d2.loss_cls: 0.2464, decode.d2.loss_mask: 0.4450, decode.d2.loss_dice: 0.6160, decode.d3.loss_cls: 0.2105, decode.d3.loss_mask: 0.4407, decode.d3.loss_dice: 0.6061, decode.d4.loss_cls: 0.2017, decode.d4.loss_mask: 0.4393, decode.d4.loss_dice: 0.6044, decode.d5.loss_cls: 0.1833, decode.d5.loss_mask: 0.4406, decode.d5.loss_dice: 0.6075, decode.d6.loss_cls: 0.1833, decode.d6.loss_mask: 0.4382, decode.d6.loss_dice: 0.6009, decode.d7.loss_cls: 0.1804, decode.d7.loss_mask: 0.4380, decode.d7.loss_dice: 0.5992, decode.d8.loss_cls: 0.1785, decode.d8.loss_mask: 0.4379, decode.d8.loss_dice: 0.6024, loss: 14.2065 +2022-06-05 04:55:20,535 - mmseg - INFO - Iter [23950/40000] lr: 3.060e-06, eta: 2:09:24, time: 0.432, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1921, decode.loss_mask: 0.4503, decode.loss_dice: 0.6450, decode.d0.loss_cls: 1.6668, decode.d0.loss_mask: 0.4902, decode.d0.loss_dice: 0.7416, decode.d1.loss_cls: 0.3757, decode.d1.loss_mask: 0.4646, decode.d1.loss_dice: 0.6783, decode.d2.loss_cls: 0.2749, decode.d2.loss_mask: 0.4547, decode.d2.loss_dice: 0.6575, decode.d3.loss_cls: 0.2306, decode.d3.loss_mask: 0.4523, decode.d3.loss_dice: 0.6489, decode.d4.loss_cls: 0.2208, decode.d4.loss_mask: 0.4523, decode.d4.loss_dice: 0.6458, decode.d5.loss_cls: 0.2172, decode.d5.loss_mask: 0.4500, decode.d5.loss_dice: 0.6443, decode.d6.loss_cls: 0.1997, decode.d6.loss_mask: 0.4517, decode.d6.loss_dice: 0.6456, decode.d7.loss_cls: 0.2070, decode.d7.loss_mask: 0.4503, decode.d7.loss_dice: 0.6440, decode.d8.loss_cls: 0.1967, decode.d8.loss_mask: 0.4519, decode.d8.loss_dice: 0.6464, loss: 14.9473 +2022-06-05 04:55:42,371 - mmseg - INFO - Saving checkpoint at 24000 iterations +2022-06-05 04:55:45,327 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 04:55:45,328 - mmseg - INFO - Iter [24000/40000] lr: 3.050e-06, eta: 2:09:01, time: 0.496, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2038, decode.loss_mask: 0.4436, decode.loss_dice: 0.6196, decode.d0.loss_cls: 1.6737, decode.d0.loss_mask: 0.4792, decode.d0.loss_dice: 0.7296, decode.d1.loss_cls: 0.3675, decode.d1.loss_mask: 0.4646, decode.d1.loss_dice: 0.6641, decode.d2.loss_cls: 0.2679, decode.d2.loss_mask: 0.4518, decode.d2.loss_dice: 0.6357, decode.d3.loss_cls: 0.2416, decode.d3.loss_mask: 0.4458, decode.d3.loss_dice: 0.6286, decode.d4.loss_cls: 0.2297, decode.d4.loss_mask: 0.4470, decode.d4.loss_dice: 0.6277, decode.d5.loss_cls: 0.2222, decode.d5.loss_mask: 0.4463, decode.d5.loss_dice: 0.6247, decode.d6.loss_cls: 0.2164, decode.d6.loss_mask: 0.4440, decode.d6.loss_dice: 0.6239, decode.d7.loss_cls: 0.2110, decode.d7.loss_mask: 0.4440, decode.d7.loss_dice: 0.6237, decode.d8.loss_cls: 0.2123, decode.d8.loss_mask: 0.4424, decode.d8.loss_dice: 0.6254, loss: 14.7578 +2022-06-05 04:58:24,441 - mmseg - INFO - per class results: +2022-06-05 04:58:24,448 - mmseg - INFO - ++-------------+-------+-------+ +| Class | IoU | Acc | ++-------------+-------+-------+ +| aeroplane | 91.13 | 94.36 | +| bag | 38.26 | 48.8 | +| bed | 31.92 | 58.07 | +| bedclothes | 40.69 | 59.27 | +| bench | 18.25 | 20.99 | +| bicycle | 83.85 | 92.15 | +| bird | 94.48 | 97.22 | +| boat | 84.57 | 90.56 | +| book | 50.93 | 62.82 | +| bottle | 87.62 | 95.39 | +| building | 65.62 | 79.53 | +| bus | 93.12 | 97.12 | +| cabinet | 42.77 | 67.37 | +| car | 91.79 | 95.23 | +| cat | 93.73 | 98.04 | +| ceiling | 57.94 | 70.99 | +| chair | 60.21 | 79.9 | +| cloth | 23.71 | 32.16 | +| computer | 37.98 | 54.0 | +| cow | 94.8 | 96.98 | +| cup | 44.02 | 56.64 | +| curtain | 56.0 | 66.88 | +| dog | 91.74 | 97.03 | +| door | 31.94 | 43.62 | +| fence | 43.71 | 56.39 | +| floor | 73.38 | 86.76 | +| flower | 38.37 | 59.23 | +| food | 38.02 | 46.19 | +| grass | 82.05 | 91.8 | +| ground | 56.62 | 71.49 | +| horse | 93.88 | 97.32 | +| keyboard | 78.28 | 82.38 | +| light | 58.3 | 70.93 | +| motorbike | 89.18 | 95.56 | +| mountain | 55.68 | 72.23 | +| mouse | 75.03 | 85.13 | +| person | 90.2 | 96.37 | +| plate | 26.84 | 33.93 | +| platform | 54.91 | 68.41 | +| pottedplant | 79.44 | 87.33 | +| road | 53.08 | 68.26 | +| rock | 52.08 | 63.98 | +| sheep | 94.41 | 97.06 | +| shelves | 36.51 | 47.17 | +| sidewalk | 27.76 | 48.63 | +| sign | 45.13 | 57.49 | +| sky | 94.97 | 97.36 | +| snow | 74.53 | 88.05 | +| sofa | 57.2 | 63.17 | +| table | 69.81 | 82.42 | +| track | 69.55 | 81.83 | +| train | 92.74 | 95.63 | +| tree | 81.0 | 89.23 | +| truck | 38.33 | 47.03 | +| tvmonitor | 87.43 | 93.51 | +| wall | 70.29 | 84.26 | +| water | 91.42 | 96.13 | +| window | 45.32 | 58.0 | +| wood | 29.37 | 44.13 | ++-------------+-------+-------+ +2022-06-05 04:58:24,448 - mmseg - INFO - Summary: +2022-06-05 04:58:24,448 - mmseg - INFO - ++-------+-------+------+ +| aAcc | mIoU | mAcc | ++-------+-------+------+ +| 85.37 | 63.59 | 73.9 | ++-------+-------+------+ +2022-06-05 04:58:24,450 - mmseg - INFO - The previous best checkpoint /mnt/lustre/chenzhe.vendor/workspace/ViT-Adapter-Release/segmentation/work_dirs/mask2former_beit_adapter_base_480_40k_pascal_context_59_ss/best_mIoU_iter_20000.pth was removed +2022-06-05 04:58:27,426 - mmseg - INFO - Now best checkpoint is saved as best_mIoU_iter_24000.pth. +2022-06-05 04:58:27,426 - mmseg - INFO - Best mIoU is 0.6359 at 24000 iter. +2022-06-05 04:58:27,447 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 04:58:27,447 - mmseg - INFO - Iter(val) [638] aAcc: 0.8537, mIoU: 0.6359, mAcc: 0.7390, IoU.aeroplane: 0.9113, IoU.bag: 0.3826, IoU.bed: 0.3192, IoU.bedclothes: 0.4069, IoU.bench: 0.1825, IoU.bicycle: 0.8385, IoU.bird: 0.9448, IoU.boat: 0.8457, IoU.book: 0.5093, IoU.bottle: 0.8762, IoU.building: 0.6562, IoU.bus: 0.9312, IoU.cabinet: 0.4277, IoU.car: 0.9179, IoU.cat: 0.9373, IoU.ceiling: 0.5794, IoU.chair: 0.6021, IoU.cloth: 0.2371, IoU.computer: 0.3798, IoU.cow: 0.9480, IoU.cup: 0.4402, IoU.curtain: 0.5600, IoU.dog: 0.9174, IoU.door: 0.3194, IoU.fence: 0.4371, IoU.floor: 0.7338, IoU.flower: 0.3837, IoU.food: 0.3802, IoU.grass: 0.8205, IoU.ground: 0.5662, IoU.horse: 0.9388, IoU.keyboard: 0.7828, IoU.light: 0.5830, IoU.motorbike: 0.8918, IoU.mountain: 0.5568, IoU.mouse: 0.7503, IoU.person: 0.9020, IoU.plate: 0.2684, IoU.platform: 0.5491, IoU.pottedplant: 0.7944, IoU.road: 0.5308, IoU.rock: 0.5208, IoU.sheep: 0.9441, IoU.shelves: 0.3651, IoU.sidewalk: 0.2776, IoU.sign: 0.4513, IoU.sky: 0.9497, IoU.snow: 0.7453, IoU.sofa: 0.5720, IoU.table: 0.6981, IoU.track: 0.6955, IoU.train: 0.9274, IoU.tree: 0.8100, IoU.truck: 0.3833, IoU.tvmonitor: 0.8743, IoU.wall: 0.7029, IoU.water: 0.9142, IoU.window: 0.4532, IoU.wood: 0.2937, Acc.aeroplane: 0.9436, Acc.bag: 0.4880, Acc.bed: 0.5807, Acc.bedclothes: 0.5927, Acc.bench: 0.2099, Acc.bicycle: 0.9215, Acc.bird: 0.9722, Acc.boat: 0.9056, Acc.book: 0.6282, Acc.bottle: 0.9539, Acc.building: 0.7953, Acc.bus: 0.9712, Acc.cabinet: 0.6737, Acc.car: 0.9523, Acc.cat: 0.9804, Acc.ceiling: 0.7099, Acc.chair: 0.7990, Acc.cloth: 0.3216, Acc.computer: 0.5400, Acc.cow: 0.9698, Acc.cup: 0.5664, Acc.curtain: 0.6688, Acc.dog: 0.9703, Acc.door: 0.4362, Acc.fence: 0.5639, Acc.floor: 0.8676, Acc.flower: 0.5923, Acc.food: 0.4619, Acc.grass: 0.9180, Acc.ground: 0.7149, Acc.horse: 0.9732, Acc.keyboard: 0.8238, Acc.light: 0.7093, Acc.motorbike: 0.9556, Acc.mountain: 0.7223, Acc.mouse: 0.8513, Acc.person: 0.9637, Acc.plate: 0.3393, Acc.platform: 0.6841, Acc.pottedplant: 0.8733, Acc.road: 0.6826, Acc.rock: 0.6398, Acc.sheep: 0.9706, Acc.shelves: 0.4717, Acc.sidewalk: 0.4863, Acc.sign: 0.5749, Acc.sky: 0.9736, Acc.snow: 0.8805, Acc.sofa: 0.6317, Acc.table: 0.8242, Acc.track: 0.8183, Acc.train: 0.9563, Acc.tree: 0.8923, Acc.truck: 0.4703, Acc.tvmonitor: 0.9351, Acc.wall: 0.8426, Acc.water: 0.9613, Acc.window: 0.5800, Acc.wood: 0.4413 +2022-06-05 04:58:52,459 - mmseg - INFO - Iter [24050/40000] lr: 3.041e-06, eta: 2:10:24, time: 3.742, data_time: 3.299, memory: 31652, decode.loss_cls: 0.1823, decode.loss_mask: 0.4422, decode.loss_dice: 0.6031, decode.d0.loss_cls: 1.6261, decode.d0.loss_mask: 0.4837, decode.d0.loss_dice: 0.7028, decode.d1.loss_cls: 0.3330, decode.d1.loss_mask: 0.4610, decode.d1.loss_dice: 0.6337, decode.d2.loss_cls: 0.2449, decode.d2.loss_mask: 0.4488, decode.d2.loss_dice: 0.6170, decode.d3.loss_cls: 0.2036, decode.d3.loss_mask: 0.4450, decode.d3.loss_dice: 0.6084, decode.d4.loss_cls: 0.1992, decode.d4.loss_mask: 0.4452, decode.d4.loss_dice: 0.6076, decode.d5.loss_cls: 0.1873, decode.d5.loss_mask: 0.4440, decode.d5.loss_dice: 0.6044, decode.d6.loss_cls: 0.1838, decode.d6.loss_mask: 0.4420, decode.d6.loss_dice: 0.6033, decode.d7.loss_cls: 0.1814, decode.d7.loss_mask: 0.4417, decode.d7.loss_dice: 0.6026, decode.d8.loss_cls: 0.1762, decode.d8.loss_mask: 0.4418, decode.d8.loss_dice: 0.6054, loss: 14.2014 +2022-06-05 04:59:14,609 - mmseg - INFO - Iter [24100/40000] lr: 3.031e-06, eta: 2:09:58, time: 0.443, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1950, decode.loss_mask: 0.4382, decode.loss_dice: 0.6114, decode.d0.loss_cls: 1.6746, decode.d0.loss_mask: 0.4742, decode.d0.loss_dice: 0.7062, decode.d1.loss_cls: 0.3469, decode.d1.loss_mask: 0.4562, decode.d1.loss_dice: 0.6395, decode.d2.loss_cls: 0.2599, decode.d2.loss_mask: 0.4425, decode.d2.loss_dice: 0.6214, decode.d3.loss_cls: 0.2243, decode.d3.loss_mask: 0.4403, decode.d3.loss_dice: 0.6131, decode.d4.loss_cls: 0.2085, decode.d4.loss_mask: 0.4390, decode.d4.loss_dice: 0.6144, decode.d5.loss_cls: 0.2061, decode.d5.loss_mask: 0.4390, decode.d5.loss_dice: 0.6183, decode.d6.loss_cls: 0.1945, decode.d6.loss_mask: 0.4387, decode.d6.loss_dice: 0.6142, decode.d7.loss_cls: 0.1989, decode.d7.loss_mask: 0.4383, decode.d7.loss_dice: 0.6124, decode.d8.loss_cls: 0.1924, decode.d8.loss_mask: 0.4373, decode.d8.loss_dice: 0.6062, loss: 14.4016 +2022-06-05 04:59:36,758 - mmseg - INFO - Iter [24150/40000] lr: 3.022e-06, eta: 2:09:32, time: 0.443, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2070, decode.loss_mask: 0.4392, decode.loss_dice: 0.6199, decode.d0.loss_cls: 1.6776, decode.d0.loss_mask: 0.4780, decode.d0.loss_dice: 0.7256, decode.d1.loss_cls: 0.3561, decode.d1.loss_mask: 0.4562, decode.d1.loss_dice: 0.6574, decode.d2.loss_cls: 0.2621, decode.d2.loss_mask: 0.4440, decode.d2.loss_dice: 0.6320, decode.d3.loss_cls: 0.2302, decode.d3.loss_mask: 0.4388, decode.d3.loss_dice: 0.6217, decode.d4.loss_cls: 0.2198, decode.d4.loss_mask: 0.4362, decode.d4.loss_dice: 0.6205, decode.d5.loss_cls: 0.2128, decode.d5.loss_mask: 0.4379, decode.d5.loss_dice: 0.6209, decode.d6.loss_cls: 0.2065, decode.d6.loss_mask: 0.4377, decode.d6.loss_dice: 0.6199, decode.d7.loss_cls: 0.2042, decode.d7.loss_mask: 0.4388, decode.d7.loss_dice: 0.6215, decode.d8.loss_cls: 0.2055, decode.d8.loss_mask: 0.4385, decode.d8.loss_dice: 0.6232, loss: 14.5896 +2022-06-05 04:59:59,045 - mmseg - INFO - Iter [24200/40000] lr: 3.012e-06, eta: 2:09:06, time: 0.445, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1997, decode.loss_mask: 0.4493, decode.loss_dice: 0.6283, decode.d0.loss_cls: 1.7129, decode.d0.loss_mask: 0.4906, decode.d0.loss_dice: 0.7535, decode.d1.loss_cls: 0.3781, decode.d1.loss_mask: 0.4672, decode.d1.loss_dice: 0.6736, decode.d2.loss_cls: 0.2663, decode.d2.loss_mask: 0.4539, decode.d2.loss_dice: 0.6462, decode.d3.loss_cls: 0.2364, decode.d3.loss_mask: 0.4514, decode.d3.loss_dice: 0.6335, decode.d4.loss_cls: 0.2233, decode.d4.loss_mask: 0.4516, decode.d4.loss_dice: 0.6338, decode.d5.loss_cls: 0.2080, decode.d5.loss_mask: 0.4508, decode.d5.loss_dice: 0.6353, decode.d6.loss_cls: 0.2054, decode.d6.loss_mask: 0.4469, decode.d6.loss_dice: 0.6305, decode.d7.loss_cls: 0.2032, decode.d7.loss_mask: 0.4485, decode.d7.loss_dice: 0.6352, decode.d8.loss_cls: 0.2022, decode.d8.loss_mask: 0.4485, decode.d8.loss_dice: 0.6312, loss: 14.8954 +2022-06-05 05:00:21,620 - mmseg - INFO - Iter [24250/40000] lr: 3.003e-06, eta: 2:08:40, time: 0.452, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1953, decode.loss_mask: 0.4436, decode.loss_dice: 0.6384, decode.d0.loss_cls: 1.6667, decode.d0.loss_mask: 0.4894, decode.d0.loss_dice: 0.7398, decode.d1.loss_cls: 0.3581, decode.d1.loss_mask: 0.4650, decode.d1.loss_dice: 0.6736, decode.d2.loss_cls: 0.2718, decode.d2.loss_mask: 0.4523, decode.d2.loss_dice: 0.6461, decode.d3.loss_cls: 0.2312, decode.d3.loss_mask: 0.4489, decode.d3.loss_dice: 0.6325, decode.d4.loss_cls: 0.2159, decode.d4.loss_mask: 0.4470, decode.d4.loss_dice: 0.6372, decode.d5.loss_cls: 0.2054, decode.d5.loss_mask: 0.4476, decode.d5.loss_dice: 0.6401, decode.d6.loss_cls: 0.1974, decode.d6.loss_mask: 0.4454, decode.d6.loss_dice: 0.6309, decode.d7.loss_cls: 0.1974, decode.d7.loss_mask: 0.4450, decode.d7.loss_dice: 0.6352, decode.d8.loss_cls: 0.1909, decode.d8.loss_mask: 0.4462, decode.d8.loss_dice: 0.6364, loss: 14.7707 +2022-06-05 05:00:44,057 - mmseg - INFO - Iter [24300/40000] lr: 2.993e-06, eta: 2:08:15, time: 0.449, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2112, decode.loss_mask: 0.4465, decode.loss_dice: 0.6285, decode.d0.loss_cls: 1.6843, decode.d0.loss_mask: 0.4928, decode.d0.loss_dice: 0.7351, decode.d1.loss_cls: 0.3688, decode.d1.loss_mask: 0.4717, decode.d1.loss_dice: 0.6699, decode.d2.loss_cls: 0.2758, decode.d2.loss_mask: 0.4586, decode.d2.loss_dice: 0.6481, decode.d3.loss_cls: 0.2387, decode.d3.loss_mask: 0.4511, decode.d3.loss_dice: 0.6353, decode.d4.loss_cls: 0.2311, decode.d4.loss_mask: 0.4487, decode.d4.loss_dice: 0.6325, decode.d5.loss_cls: 0.2264, decode.d5.loss_mask: 0.4473, decode.d5.loss_dice: 0.6270, decode.d6.loss_cls: 0.2132, decode.d6.loss_mask: 0.4474, decode.d6.loss_dice: 0.6250, decode.d7.loss_cls: 0.2119, decode.d7.loss_mask: 0.4457, decode.d7.loss_dice: 0.6224, decode.d8.loss_cls: 0.2087, decode.d8.loss_mask: 0.4470, decode.d8.loss_dice: 0.6223, loss: 14.8731 +2022-06-05 05:01:09,259 - mmseg - INFO - Iter [24350/40000] lr: 2.984e-06, eta: 2:07:51, time: 0.504, data_time: 0.061, memory: 31652, decode.loss_cls: 0.1819, decode.loss_mask: 0.4462, decode.loss_dice: 0.6165, decode.d0.loss_cls: 1.6291, decode.d0.loss_mask: 0.4891, decode.d0.loss_dice: 0.7244, decode.d1.loss_cls: 0.3600, decode.d1.loss_mask: 0.4665, decode.d1.loss_dice: 0.6559, decode.d2.loss_cls: 0.2526, decode.d2.loss_mask: 0.4538, decode.d2.loss_dice: 0.6300, decode.d3.loss_cls: 0.2138, decode.d3.loss_mask: 0.4512, decode.d3.loss_dice: 0.6203, decode.d4.loss_cls: 0.2025, decode.d4.loss_mask: 0.4478, decode.d4.loss_dice: 0.6215, decode.d5.loss_cls: 0.1977, decode.d5.loss_mask: 0.4486, decode.d5.loss_dice: 0.6208, decode.d6.loss_cls: 0.1891, decode.d6.loss_mask: 0.4473, decode.d6.loss_dice: 0.6138, decode.d7.loss_cls: 0.1831, decode.d7.loss_mask: 0.4462, decode.d7.loss_dice: 0.6191, decode.d8.loss_cls: 0.1798, decode.d8.loss_mask: 0.4475, decode.d8.loss_dice: 0.6187, loss: 14.4749 +2022-06-05 05:01:30,549 - mmseg - INFO - Iter [24400/40000] lr: 2.974e-06, eta: 2:07:24, time: 0.426, data_time: 0.009, memory: 31652, decode.loss_cls: 0.1782, decode.loss_mask: 0.4427, decode.loss_dice: 0.6124, decode.d0.loss_cls: 1.6031, decode.d0.loss_mask: 0.4888, decode.d0.loss_dice: 0.7270, decode.d1.loss_cls: 0.3345, decode.d1.loss_mask: 0.4639, decode.d1.loss_dice: 0.6592, decode.d2.loss_cls: 0.2496, decode.d2.loss_mask: 0.4473, decode.d2.loss_dice: 0.6302, decode.d3.loss_cls: 0.2104, decode.d3.loss_mask: 0.4443, decode.d3.loss_dice: 0.6159, decode.d4.loss_cls: 0.1980, decode.d4.loss_mask: 0.4439, decode.d4.loss_dice: 0.6208, decode.d5.loss_cls: 0.1907, decode.d5.loss_mask: 0.4420, decode.d5.loss_dice: 0.6156, decode.d6.loss_cls: 0.1824, decode.d6.loss_mask: 0.4428, decode.d6.loss_dice: 0.6127, decode.d7.loss_cls: 0.1810, decode.d7.loss_mask: 0.4424, decode.d7.loss_dice: 0.6107, decode.d8.loss_cls: 0.1777, decode.d8.loss_mask: 0.4427, decode.d8.loss_dice: 0.6132, loss: 14.3241 +2022-06-05 05:01:52,960 - mmseg - INFO - Iter [24450/40000] lr: 2.965e-06, eta: 2:06:58, time: 0.448, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1833, decode.loss_mask: 0.4327, decode.loss_dice: 0.6150, decode.d0.loss_cls: 1.6528, decode.d0.loss_mask: 0.4703, decode.d0.loss_dice: 0.7163, decode.d1.loss_cls: 0.3469, decode.d1.loss_mask: 0.4475, decode.d1.loss_dice: 0.6507, decode.d2.loss_cls: 0.2636, decode.d2.loss_mask: 0.4375, decode.d2.loss_dice: 0.6271, decode.d3.loss_cls: 0.2154, decode.d3.loss_mask: 0.4359, decode.d3.loss_dice: 0.6198, decode.d4.loss_cls: 0.2022, decode.d4.loss_mask: 0.4347, decode.d4.loss_dice: 0.6179, decode.d5.loss_cls: 0.1963, decode.d5.loss_mask: 0.4332, decode.d5.loss_dice: 0.6156, decode.d6.loss_cls: 0.1909, decode.d6.loss_mask: 0.4331, decode.d6.loss_dice: 0.6128, decode.d7.loss_cls: 0.1830, decode.d7.loss_mask: 0.4323, decode.d7.loss_dice: 0.6144, decode.d8.loss_cls: 0.1873, decode.d8.loss_mask: 0.4313, decode.d8.loss_dice: 0.6186, loss: 14.3183 +2022-06-05 05:02:14,776 - mmseg - INFO - Iter [24500/40000] lr: 2.955e-06, eta: 2:06:32, time: 0.436, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1951, decode.loss_mask: 0.4372, decode.loss_dice: 0.6303, decode.d0.loss_cls: 1.7122, decode.d0.loss_mask: 0.4752, decode.d0.loss_dice: 0.7328, decode.d1.loss_cls: 0.3597, decode.d1.loss_mask: 0.4543, decode.d1.loss_dice: 0.6695, decode.d2.loss_cls: 0.2605, decode.d2.loss_mask: 0.4415, decode.d2.loss_dice: 0.6446, decode.d3.loss_cls: 0.2248, decode.d3.loss_mask: 0.4391, decode.d3.loss_dice: 0.6340, decode.d4.loss_cls: 0.2066, decode.d4.loss_mask: 0.4371, decode.d4.loss_dice: 0.6382, decode.d5.loss_cls: 0.1991, decode.d5.loss_mask: 0.4365, decode.d5.loss_dice: 0.6333, decode.d6.loss_cls: 0.1960, decode.d6.loss_mask: 0.4371, decode.d6.loss_dice: 0.6310, decode.d7.loss_cls: 0.1885, decode.d7.loss_mask: 0.4352, decode.d7.loss_dice: 0.6309, decode.d8.loss_cls: 0.1942, decode.d8.loss_mask: 0.4352, decode.d8.loss_dice: 0.6333, loss: 14.6430 +2022-06-05 05:02:37,213 - mmseg - INFO - Iter [24550/40000] lr: 2.946e-06, eta: 2:06:06, time: 0.449, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2228, decode.loss_mask: 0.4446, decode.loss_dice: 0.6368, decode.d0.loss_cls: 1.7731, decode.d0.loss_mask: 0.4836, decode.d0.loss_dice: 0.7539, decode.d1.loss_cls: 0.4097, decode.d1.loss_mask: 0.4631, decode.d1.loss_dice: 0.6753, decode.d2.loss_cls: 0.3055, decode.d2.loss_mask: 0.4533, decode.d2.loss_dice: 0.6541, decode.d3.loss_cls: 0.2532, decode.d3.loss_mask: 0.4491, decode.d3.loss_dice: 0.6390, decode.d4.loss_cls: 0.2439, decode.d4.loss_mask: 0.4474, decode.d4.loss_dice: 0.6425, decode.d5.loss_cls: 0.2423, decode.d5.loss_mask: 0.4443, decode.d5.loss_dice: 0.6436, decode.d6.loss_cls: 0.2261, decode.d6.loss_mask: 0.4469, decode.d6.loss_dice: 0.6387, decode.d7.loss_cls: 0.2275, decode.d7.loss_mask: 0.4451, decode.d7.loss_dice: 0.6333, decode.d8.loss_cls: 0.2247, decode.d8.loss_mask: 0.4437, decode.d8.loss_dice: 0.6382, loss: 15.2052 +2022-06-05 05:02:59,307 - mmseg - INFO - Iter [24600/40000] lr: 2.936e-06, eta: 2:05:40, time: 0.442, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1764, decode.loss_mask: 0.4439, decode.loss_dice: 0.6209, decode.d0.loss_cls: 1.6201, decode.d0.loss_mask: 0.4875, decode.d0.loss_dice: 0.7223, decode.d1.loss_cls: 0.3345, decode.d1.loss_mask: 0.4642, decode.d1.loss_dice: 0.6580, decode.d2.loss_cls: 0.2453, decode.d2.loss_mask: 0.4511, decode.d2.loss_dice: 0.6371, decode.d3.loss_cls: 0.2141, decode.d3.loss_mask: 0.4488, decode.d3.loss_dice: 0.6224, decode.d4.loss_cls: 0.1991, decode.d4.loss_mask: 0.4459, decode.d4.loss_dice: 0.6232, decode.d5.loss_cls: 0.1878, decode.d5.loss_mask: 0.4486, decode.d5.loss_dice: 0.6254, decode.d6.loss_cls: 0.1778, decode.d6.loss_mask: 0.4478, decode.d6.loss_dice: 0.6246, decode.d7.loss_cls: 0.1827, decode.d7.loss_mask: 0.4452, decode.d7.loss_dice: 0.6206, decode.d8.loss_cls: 0.1777, decode.d8.loss_mask: 0.4456, decode.d8.loss_dice: 0.6274, loss: 14.4259 +2022-06-05 05:03:23,556 - mmseg - INFO - Iter [24650/40000] lr: 2.927e-06, eta: 2:05:16, time: 0.485, data_time: 0.055, memory: 31652, decode.loss_cls: 0.1882, decode.loss_mask: 0.4511, decode.loss_dice: 0.6080, decode.d0.loss_cls: 1.6330, decode.d0.loss_mask: 0.4914, decode.d0.loss_dice: 0.7096, decode.d1.loss_cls: 0.3328, decode.d1.loss_mask: 0.4679, decode.d1.loss_dice: 0.6473, decode.d2.loss_cls: 0.2483, decode.d2.loss_mask: 0.4582, decode.d2.loss_dice: 0.6258, decode.d3.loss_cls: 0.2187, decode.d3.loss_mask: 0.4552, decode.d3.loss_dice: 0.6107, decode.d4.loss_cls: 0.2047, decode.d4.loss_mask: 0.4526, decode.d4.loss_dice: 0.6126, decode.d5.loss_cls: 0.1946, decode.d5.loss_mask: 0.4527, decode.d5.loss_dice: 0.6130, decode.d6.loss_cls: 0.1906, decode.d6.loss_mask: 0.4515, decode.d6.loss_dice: 0.6113, decode.d7.loss_cls: 0.1879, decode.d7.loss_mask: 0.4513, decode.d7.loss_dice: 0.6096, decode.d8.loss_cls: 0.1870, decode.d8.loss_mask: 0.4526, decode.d8.loss_dice: 0.6100, loss: 14.4282 +2022-06-05 05:03:45,552 - mmseg - INFO - Iter [24700/40000] lr: 2.917e-06, eta: 2:04:49, time: 0.439, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1772, decode.loss_mask: 0.4344, decode.loss_dice: 0.6199, decode.d0.loss_cls: 1.6612, decode.d0.loss_mask: 0.4761, decode.d0.loss_dice: 0.7211, decode.d1.loss_cls: 0.3497, decode.d1.loss_mask: 0.4586, decode.d1.loss_dice: 0.6579, decode.d2.loss_cls: 0.2524, decode.d2.loss_mask: 0.4420, decode.d2.loss_dice: 0.6329, decode.d3.loss_cls: 0.2047, decode.d3.loss_mask: 0.4382, decode.d3.loss_dice: 0.6222, decode.d4.loss_cls: 0.2004, decode.d4.loss_mask: 0.4372, decode.d4.loss_dice: 0.6251, decode.d5.loss_cls: 0.1848, decode.d5.loss_mask: 0.4400, decode.d5.loss_dice: 0.6260, decode.d6.loss_cls: 0.1821, decode.d6.loss_mask: 0.4360, decode.d6.loss_dice: 0.6248, decode.d7.loss_cls: 0.1792, decode.d7.loss_mask: 0.4374, decode.d7.loss_dice: 0.6275, decode.d8.loss_cls: 0.1801, decode.d8.loss_mask: 0.4351, decode.d8.loss_dice: 0.6231, loss: 14.3875 +2022-06-05 05:04:07,401 - mmseg - INFO - Iter [24750/40000] lr: 2.907e-06, eta: 2:04:23, time: 0.438, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1744, decode.loss_mask: 0.4370, decode.loss_dice: 0.6140, decode.d0.loss_cls: 1.6713, decode.d0.loss_mask: 0.4802, decode.d0.loss_dice: 0.7145, decode.d1.loss_cls: 0.3418, decode.d1.loss_mask: 0.4538, decode.d1.loss_dice: 0.6473, decode.d2.loss_cls: 0.2460, decode.d2.loss_mask: 0.4433, decode.d2.loss_dice: 0.6215, decode.d3.loss_cls: 0.2066, decode.d3.loss_mask: 0.4417, decode.d3.loss_dice: 0.6151, decode.d4.loss_cls: 0.1972, decode.d4.loss_mask: 0.4419, decode.d4.loss_dice: 0.6125, decode.d5.loss_cls: 0.1833, decode.d5.loss_mask: 0.4391, decode.d5.loss_dice: 0.6126, decode.d6.loss_cls: 0.1812, decode.d6.loss_mask: 0.4401, decode.d6.loss_dice: 0.6076, decode.d7.loss_cls: 0.1801, decode.d7.loss_mask: 0.4399, decode.d7.loss_dice: 0.6070, decode.d8.loss_cls: 0.1798, decode.d8.loss_mask: 0.4390, decode.d8.loss_dice: 0.6091, loss: 14.2788 +2022-06-05 05:04:29,886 - mmseg - INFO - Iter [24800/40000] lr: 2.898e-06, eta: 2:03:58, time: 0.450, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1689, decode.loss_mask: 0.4365, decode.loss_dice: 0.6122, decode.d0.loss_cls: 1.6347, decode.d0.loss_mask: 0.4754, decode.d0.loss_dice: 0.7091, decode.d1.loss_cls: 0.3471, decode.d1.loss_mask: 0.4591, decode.d1.loss_dice: 0.6501, decode.d2.loss_cls: 0.2457, decode.d2.loss_mask: 0.4465, decode.d2.loss_dice: 0.6265, decode.d3.loss_cls: 0.2111, decode.d3.loss_mask: 0.4389, decode.d3.loss_dice: 0.6173, decode.d4.loss_cls: 0.1922, decode.d4.loss_mask: 0.4388, decode.d4.loss_dice: 0.6166, decode.d5.loss_cls: 0.1839, decode.d5.loss_mask: 0.4378, decode.d5.loss_dice: 0.6175, decode.d6.loss_cls: 0.1775, decode.d6.loss_mask: 0.4379, decode.d6.loss_dice: 0.6120, decode.d7.loss_cls: 0.1794, decode.d7.loss_mask: 0.4388, decode.d7.loss_dice: 0.6112, decode.d8.loss_cls: 0.1780, decode.d8.loss_mask: 0.4377, decode.d8.loss_dice: 0.6133, loss: 14.2516 +2022-06-05 05:04:51,876 - mmseg - INFO - Iter [24850/40000] lr: 2.888e-06, eta: 2:03:32, time: 0.440, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1814, decode.loss_mask: 0.4173, decode.loss_dice: 0.6095, decode.d0.loss_cls: 1.7028, decode.d0.loss_mask: 0.4561, decode.d0.loss_dice: 0.7111, decode.d1.loss_cls: 0.3529, decode.d1.loss_mask: 0.4328, decode.d1.loss_dice: 0.6461, decode.d2.loss_cls: 0.2477, decode.d2.loss_mask: 0.4237, decode.d2.loss_dice: 0.6226, decode.d3.loss_cls: 0.2127, decode.d3.loss_mask: 0.4209, decode.d3.loss_dice: 0.6113, decode.d4.loss_cls: 0.2043, decode.d4.loss_mask: 0.4203, decode.d4.loss_dice: 0.6115, decode.d5.loss_cls: 0.1943, decode.d5.loss_mask: 0.4177, decode.d5.loss_dice: 0.6118, decode.d6.loss_cls: 0.1847, decode.d6.loss_mask: 0.4172, decode.d6.loss_dice: 0.6110, decode.d7.loss_cls: 0.1865, decode.d7.loss_mask: 0.4183, decode.d7.loss_dice: 0.6123, decode.d8.loss_cls: 0.1799, decode.d8.loss_mask: 0.4186, decode.d8.loss_dice: 0.6137, loss: 14.1510 +2022-06-05 05:05:13,593 - mmseg - INFO - Iter [24900/40000] lr: 2.879e-06, eta: 2:03:06, time: 0.434, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2033, decode.loss_mask: 0.4413, decode.loss_dice: 0.6236, decode.d0.loss_cls: 1.7087, decode.d0.loss_mask: 0.4824, decode.d0.loss_dice: 0.7328, decode.d1.loss_cls: 0.3769, decode.d1.loss_mask: 0.4598, decode.d1.loss_dice: 0.6698, decode.d2.loss_cls: 0.2765, decode.d2.loss_mask: 0.4476, decode.d2.loss_dice: 0.6395, decode.d3.loss_cls: 0.2386, decode.d3.loss_mask: 0.4449, decode.d3.loss_dice: 0.6296, decode.d4.loss_cls: 0.2259, decode.d4.loss_mask: 0.4436, decode.d4.loss_dice: 0.6274, decode.d5.loss_cls: 0.2166, decode.d5.loss_mask: 0.4442, decode.d5.loss_dice: 0.6268, decode.d6.loss_cls: 0.2077, decode.d6.loss_mask: 0.4418, decode.d6.loss_dice: 0.6272, decode.d7.loss_cls: 0.2123, decode.d7.loss_mask: 0.4401, decode.d7.loss_dice: 0.6206, decode.d8.loss_cls: 0.2066, decode.d8.loss_mask: 0.4405, decode.d8.loss_dice: 0.6253, loss: 14.7820 +2022-06-05 05:05:35,599 - mmseg - INFO - Iter [24950/40000] lr: 2.869e-06, eta: 2:02:40, time: 0.441, data_time: 0.009, memory: 31652, decode.loss_cls: 0.1809, decode.loss_mask: 0.4470, decode.loss_dice: 0.5973, decode.d0.loss_cls: 1.6059, decode.d0.loss_mask: 0.4867, decode.d0.loss_dice: 0.6885, decode.d1.loss_cls: 0.3339, decode.d1.loss_mask: 0.4647, decode.d1.loss_dice: 0.6355, decode.d2.loss_cls: 0.2414, decode.d2.loss_mask: 0.4535, decode.d2.loss_dice: 0.6124, decode.d3.loss_cls: 0.2086, decode.d3.loss_mask: 0.4516, decode.d3.loss_dice: 0.6033, decode.d4.loss_cls: 0.2001, decode.d4.loss_mask: 0.4469, decode.d4.loss_dice: 0.6023, decode.d5.loss_cls: 0.1868, decode.d5.loss_mask: 0.4476, decode.d5.loss_dice: 0.6002, decode.d6.loss_cls: 0.1860, decode.d6.loss_mask: 0.4455, decode.d6.loss_dice: 0.5970, decode.d7.loss_cls: 0.1851, decode.d7.loss_mask: 0.4476, decode.d7.loss_dice: 0.5967, decode.d8.loss_cls: 0.1796, decode.d8.loss_mask: 0.4460, decode.d8.loss_dice: 0.5968, loss: 14.1755 +2022-06-05 05:05:59,639 - mmseg - INFO - Saving checkpoint at 25000 iterations +2022-06-05 05:06:02,189 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 05:06:02,189 - mmseg - INFO - Iter [25000/40000] lr: 2.860e-06, eta: 2:02:17, time: 0.532, data_time: 0.054, memory: 31652, decode.loss_cls: 0.1673, decode.loss_mask: 0.4341, decode.loss_dice: 0.6017, decode.d0.loss_cls: 1.5842, decode.d0.loss_mask: 0.4737, decode.d0.loss_dice: 0.6981, decode.d1.loss_cls: 0.3243, decode.d1.loss_mask: 0.4513, decode.d1.loss_dice: 0.6343, decode.d2.loss_cls: 0.2407, decode.d2.loss_mask: 0.4385, decode.d2.loss_dice: 0.6112, decode.d3.loss_cls: 0.1925, decode.d3.loss_mask: 0.4384, decode.d3.loss_dice: 0.6055, decode.d4.loss_cls: 0.1837, decode.d4.loss_mask: 0.4365, decode.d4.loss_dice: 0.6054, decode.d5.loss_cls: 0.1738, decode.d5.loss_mask: 0.4372, decode.d5.loss_dice: 0.6062, decode.d6.loss_cls: 0.1811, decode.d6.loss_mask: 0.4366, decode.d6.loss_dice: 0.6033, decode.d7.loss_cls: 0.1691, decode.d7.loss_mask: 0.4370, decode.d7.loss_dice: 0.6039, decode.d8.loss_cls: 0.1622, decode.d8.loss_mask: 0.4360, decode.d8.loss_dice: 0.6061, loss: 13.9739 +2022-06-05 05:06:24,210 - mmseg - INFO - Iter [25050/40000] lr: 2.850e-06, eta: 2:01:51, time: 0.440, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1896, decode.loss_mask: 0.4327, decode.loss_dice: 0.6129, decode.d0.loss_cls: 1.6642, decode.d0.loss_mask: 0.4690, decode.d0.loss_dice: 0.7141, decode.d1.loss_cls: 0.3696, decode.d1.loss_mask: 0.4505, decode.d1.loss_dice: 0.6541, decode.d2.loss_cls: 0.2694, decode.d2.loss_mask: 0.4404, decode.d2.loss_dice: 0.6291, decode.d3.loss_cls: 0.2260, decode.d3.loss_mask: 0.4337, decode.d3.loss_dice: 0.6201, decode.d4.loss_cls: 0.2126, decode.d4.loss_mask: 0.4346, decode.d4.loss_dice: 0.6226, decode.d5.loss_cls: 0.2047, decode.d5.loss_mask: 0.4317, decode.d5.loss_dice: 0.6165, decode.d6.loss_cls: 0.1994, decode.d6.loss_mask: 0.4310, decode.d6.loss_dice: 0.6158, decode.d7.loss_cls: 0.1983, decode.d7.loss_mask: 0.4314, decode.d7.loss_dice: 0.6133, decode.d8.loss_cls: 0.1970, decode.d8.loss_mask: 0.4292, decode.d8.loss_dice: 0.6129, loss: 14.4264 +2022-06-05 05:06:47,158 - mmseg - INFO - Iter [25100/40000] lr: 2.841e-06, eta: 2:01:25, time: 0.458, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1768, decode.loss_mask: 0.4448, decode.loss_dice: 0.6013, decode.d0.loss_cls: 1.5998, decode.d0.loss_mask: 0.4886, decode.d0.loss_dice: 0.6954, decode.d1.loss_cls: 0.3401, decode.d1.loss_mask: 0.4654, decode.d1.loss_dice: 0.6439, decode.d2.loss_cls: 0.2455, decode.d2.loss_mask: 0.4543, decode.d2.loss_dice: 0.6226, decode.d3.loss_cls: 0.2079, decode.d3.loss_mask: 0.4493, decode.d3.loss_dice: 0.6114, decode.d4.loss_cls: 0.1966, decode.d4.loss_mask: 0.4470, decode.d4.loss_dice: 0.6126, decode.d5.loss_cls: 0.1877, decode.d5.loss_mask: 0.4454, decode.d5.loss_dice: 0.6055, decode.d6.loss_cls: 0.1835, decode.d6.loss_mask: 0.4468, decode.d6.loss_dice: 0.6027, decode.d7.loss_cls: 0.1735, decode.d7.loss_mask: 0.4475, decode.d7.loss_dice: 0.6049, decode.d8.loss_cls: 0.1775, decode.d8.loss_mask: 0.4451, decode.d8.loss_dice: 0.6003, loss: 14.2237 +2022-06-05 05:07:09,158 - mmseg - INFO - Iter [25150/40000] lr: 2.831e-06, eta: 2:00:59, time: 0.440, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2131, decode.loss_mask: 0.4511, decode.loss_dice: 0.6350, decode.d0.loss_cls: 1.6961, decode.d0.loss_mask: 0.4919, decode.d0.loss_dice: 0.7495, decode.d1.loss_cls: 0.3718, decode.d1.loss_mask: 0.4711, decode.d1.loss_dice: 0.6799, decode.d2.loss_cls: 0.2804, decode.d2.loss_mask: 0.4611, decode.d2.loss_dice: 0.6526, decode.d3.loss_cls: 0.2414, decode.d3.loss_mask: 0.4585, decode.d3.loss_dice: 0.6427, decode.d4.loss_cls: 0.2326, decode.d4.loss_mask: 0.4560, decode.d4.loss_dice: 0.6443, decode.d5.loss_cls: 0.2239, decode.d5.loss_mask: 0.4537, decode.d5.loss_dice: 0.6363, decode.d6.loss_cls: 0.2178, decode.d6.loss_mask: 0.4531, decode.d6.loss_dice: 0.6384, decode.d7.loss_cls: 0.2157, decode.d7.loss_mask: 0.4523, decode.d7.loss_dice: 0.6400, decode.d8.loss_cls: 0.2162, decode.d8.loss_mask: 0.4511, decode.d8.loss_dice: 0.6334, loss: 15.0610 +2022-06-05 05:07:31,355 - mmseg - INFO - Iter [25200/40000] lr: 2.822e-06, eta: 2:00:34, time: 0.445, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1864, decode.loss_mask: 0.4357, decode.loss_dice: 0.6053, decode.d0.loss_cls: 1.7041, decode.d0.loss_mask: 0.4775, decode.d0.loss_dice: 0.7196, decode.d1.loss_cls: 0.3698, decode.d1.loss_mask: 0.4521, decode.d1.loss_dice: 0.6449, decode.d2.loss_cls: 0.2722, decode.d2.loss_mask: 0.4391, decode.d2.loss_dice: 0.6148, decode.d3.loss_cls: 0.2205, decode.d3.loss_mask: 0.4363, decode.d3.loss_dice: 0.6070, decode.d4.loss_cls: 0.2070, decode.d4.loss_mask: 0.4347, decode.d4.loss_dice: 0.6113, decode.d5.loss_cls: 0.2029, decode.d5.loss_mask: 0.4344, decode.d5.loss_dice: 0.6094, decode.d6.loss_cls: 0.1906, decode.d6.loss_mask: 0.4341, decode.d6.loss_dice: 0.6073, decode.d7.loss_cls: 0.1894, decode.d7.loss_mask: 0.4362, decode.d7.loss_dice: 0.6058, decode.d8.loss_cls: 0.1901, decode.d8.loss_mask: 0.4337, decode.d8.loss_dice: 0.6019, loss: 14.3741 +2022-06-05 05:07:53,156 - mmseg - INFO - Iter [25250/40000] lr: 2.812e-06, eta: 2:00:08, time: 0.436, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1607, decode.loss_mask: 0.4311, decode.loss_dice: 0.5922, decode.d0.loss_cls: 1.5944, decode.d0.loss_mask: 0.4702, decode.d0.loss_dice: 0.6867, decode.d1.loss_cls: 0.3258, decode.d1.loss_mask: 0.4492, decode.d1.loss_dice: 0.6319, decode.d2.loss_cls: 0.2295, decode.d2.loss_mask: 0.4381, decode.d2.loss_dice: 0.6028, decode.d3.loss_cls: 0.1900, decode.d3.loss_mask: 0.4324, decode.d3.loss_dice: 0.5959, decode.d4.loss_cls: 0.1820, decode.d4.loss_mask: 0.4305, decode.d4.loss_dice: 0.5966, decode.d5.loss_cls: 0.1747, decode.d5.loss_mask: 0.4301, decode.d5.loss_dice: 0.5919, decode.d6.loss_cls: 0.1688, decode.d6.loss_mask: 0.4308, decode.d6.loss_dice: 0.5905, decode.d7.loss_cls: 0.1630, decode.d7.loss_mask: 0.4307, decode.d7.loss_dice: 0.5894, decode.d8.loss_cls: 0.1589, decode.d8.loss_mask: 0.4307, decode.d8.loss_dice: 0.5917, loss: 13.7912 +2022-06-05 05:08:16,871 - mmseg - INFO - Iter [25300/40000] lr: 2.803e-06, eta: 1:59:43, time: 0.474, data_time: 0.056, memory: 31652, decode.loss_cls: 0.1895, decode.loss_mask: 0.4445, decode.loss_dice: 0.6279, decode.d0.loss_cls: 1.6679, decode.d0.loss_mask: 0.4903, decode.d0.loss_dice: 0.7326, decode.d1.loss_cls: 0.3511, decode.d1.loss_mask: 0.4677, decode.d1.loss_dice: 0.6753, decode.d2.loss_cls: 0.2419, decode.d2.loss_mask: 0.4528, decode.d2.loss_dice: 0.6468, decode.d3.loss_cls: 0.2056, decode.d3.loss_mask: 0.4507, decode.d3.loss_dice: 0.6353, decode.d4.loss_cls: 0.2090, decode.d4.loss_mask: 0.4485, decode.d4.loss_dice: 0.6316, decode.d5.loss_cls: 0.2008, decode.d5.loss_mask: 0.4485, decode.d5.loss_dice: 0.6313, decode.d6.loss_cls: 0.1872, decode.d6.loss_mask: 0.4466, decode.d6.loss_dice: 0.6303, decode.d7.loss_cls: 0.1903, decode.d7.loss_mask: 0.4462, decode.d7.loss_dice: 0.6310, decode.d8.loss_cls: 0.1877, decode.d8.loss_mask: 0.4445, decode.d8.loss_dice: 0.6258, loss: 14.6394 +2022-06-05 05:08:38,445 - mmseg - INFO - Iter [25350/40000] lr: 2.793e-06, eta: 1:59:17, time: 0.432, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1671, decode.loss_mask: 0.4481, decode.loss_dice: 0.6057, decode.d0.loss_cls: 1.6478, decode.d0.loss_mask: 0.4899, decode.d0.loss_dice: 0.7120, decode.d1.loss_cls: 0.3282, decode.d1.loss_mask: 0.4658, decode.d1.loss_dice: 0.6427, decode.d2.loss_cls: 0.2388, decode.d2.loss_mask: 0.4543, decode.d2.loss_dice: 0.6174, decode.d3.loss_cls: 0.2049, decode.d3.loss_mask: 0.4515, decode.d3.loss_dice: 0.6141, decode.d4.loss_cls: 0.1873, decode.d4.loss_mask: 0.4510, decode.d4.loss_dice: 0.6118, decode.d5.loss_cls: 0.1804, decode.d5.loss_mask: 0.4478, decode.d5.loss_dice: 0.6061, decode.d6.loss_cls: 0.1790, decode.d6.loss_mask: 0.4481, decode.d6.loss_dice: 0.6053, decode.d7.loss_cls: 0.1692, decode.d7.loss_mask: 0.4500, decode.d7.loss_dice: 0.6058, decode.d8.loss_cls: 0.1707, decode.d8.loss_mask: 0.4487, decode.d8.loss_dice: 0.6048, loss: 14.2545 +2022-06-05 05:09:00,107 - mmseg - INFO - Iter [25400/40000] lr: 2.784e-06, eta: 1:58:51, time: 0.433, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1759, decode.loss_mask: 0.4334, decode.loss_dice: 0.6199, decode.d0.loss_cls: 1.6240, decode.d0.loss_mask: 0.4778, decode.d0.loss_dice: 0.7163, decode.d1.loss_cls: 0.3551, decode.d1.loss_mask: 0.4516, decode.d1.loss_dice: 0.6519, decode.d2.loss_cls: 0.2546, decode.d2.loss_mask: 0.4420, decode.d2.loss_dice: 0.6363, decode.d3.loss_cls: 0.2058, decode.d3.loss_mask: 0.4370, decode.d3.loss_dice: 0.6294, decode.d4.loss_cls: 0.1997, decode.d4.loss_mask: 0.4363, decode.d4.loss_dice: 0.6243, decode.d5.loss_cls: 0.1870, decode.d5.loss_mask: 0.4369, decode.d5.loss_dice: 0.6253, decode.d6.loss_cls: 0.1832, decode.d6.loss_mask: 0.4333, decode.d6.loss_dice: 0.6181, decode.d7.loss_cls: 0.1843, decode.d7.loss_mask: 0.4337, decode.d7.loss_dice: 0.6155, decode.d8.loss_cls: 0.1770, decode.d8.loss_mask: 0.4313, decode.d8.loss_dice: 0.6236, loss: 14.3203 +2022-06-05 05:09:21,538 - mmseg - INFO - Iter [25450/40000] lr: 2.774e-06, eta: 1:58:25, time: 0.429, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1861, decode.loss_mask: 0.4340, decode.loss_dice: 0.6069, decode.d0.loss_cls: 1.6475, decode.d0.loss_mask: 0.4735, decode.d0.loss_dice: 0.7041, decode.d1.loss_cls: 0.3467, decode.d1.loss_mask: 0.4470, decode.d1.loss_dice: 0.6468, decode.d2.loss_cls: 0.2489, decode.d2.loss_mask: 0.4399, decode.d2.loss_dice: 0.6211, decode.d3.loss_cls: 0.2180, decode.d3.loss_mask: 0.4372, decode.d3.loss_dice: 0.6124, decode.d4.loss_cls: 0.2016, decode.d4.loss_mask: 0.4356, decode.d4.loss_dice: 0.6086, decode.d5.loss_cls: 0.1956, decode.d5.loss_mask: 0.4360, decode.d5.loss_dice: 0.6167, decode.d6.loss_cls: 0.1915, decode.d6.loss_mask: 0.4359, decode.d6.loss_dice: 0.6058, decode.d7.loss_cls: 0.1872, decode.d7.loss_mask: 0.4359, decode.d7.loss_dice: 0.6093, decode.d8.loss_cls: 0.1837, decode.d8.loss_mask: 0.4358, decode.d8.loss_dice: 0.6073, loss: 14.2567 +2022-06-05 05:09:43,351 - mmseg - INFO - Iter [25500/40000] lr: 2.764e-06, eta: 1:57:59, time: 0.436, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2009, decode.loss_mask: 0.4208, decode.loss_dice: 0.6188, decode.d0.loss_cls: 1.6381, decode.d0.loss_mask: 0.4608, decode.d0.loss_dice: 0.7189, decode.d1.loss_cls: 0.3638, decode.d1.loss_mask: 0.4341, decode.d1.loss_dice: 0.6509, decode.d2.loss_cls: 0.2753, decode.d2.loss_mask: 0.4274, decode.d2.loss_dice: 0.6253, decode.d3.loss_cls: 0.2334, decode.d3.loss_mask: 0.4265, decode.d3.loss_dice: 0.6181, decode.d4.loss_cls: 0.2245, decode.d4.loss_mask: 0.4244, decode.d4.loss_dice: 0.6204, decode.d5.loss_cls: 0.2127, decode.d5.loss_mask: 0.4250, decode.d5.loss_dice: 0.6203, decode.d6.loss_cls: 0.2049, decode.d6.loss_mask: 0.4240, decode.d6.loss_dice: 0.6172, decode.d7.loss_cls: 0.2037, decode.d7.loss_mask: 0.4218, decode.d7.loss_dice: 0.6197, decode.d8.loss_cls: 0.2022, decode.d8.loss_mask: 0.4222, decode.d8.loss_dice: 0.6196, loss: 14.3759 +2022-06-05 05:10:05,473 - mmseg - INFO - Iter [25550/40000] lr: 2.755e-06, eta: 1:57:33, time: 0.442, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1903, decode.loss_mask: 0.4354, decode.loss_dice: 0.6202, decode.d0.loss_cls: 1.6712, decode.d0.loss_mask: 0.4771, decode.d0.loss_dice: 0.7375, decode.d1.loss_cls: 0.3533, decode.d1.loss_mask: 0.4530, decode.d1.loss_dice: 0.6637, decode.d2.loss_cls: 0.2616, decode.d2.loss_mask: 0.4421, decode.d2.loss_dice: 0.6445, decode.d3.loss_cls: 0.2242, decode.d3.loss_mask: 0.4348, decode.d3.loss_dice: 0.6290, decode.d4.loss_cls: 0.2142, decode.d4.loss_mask: 0.4341, decode.d4.loss_dice: 0.6288, decode.d5.loss_cls: 0.1910, decode.d5.loss_mask: 0.4364, decode.d5.loss_dice: 0.6305, decode.d6.loss_cls: 0.1939, decode.d6.loss_mask: 0.4378, decode.d6.loss_dice: 0.6258, decode.d7.loss_cls: 0.1899, decode.d7.loss_mask: 0.4349, decode.d7.loss_dice: 0.6192, decode.d8.loss_cls: 0.1905, decode.d8.loss_mask: 0.4350, decode.d8.loss_dice: 0.6239, loss: 14.5239 +2022-06-05 05:10:30,299 - mmseg - INFO - Iter [25600/40000] lr: 2.745e-06, eta: 1:57:09, time: 0.497, data_time: 0.061, memory: 31652, decode.loss_cls: 0.1905, decode.loss_mask: 0.4406, decode.loss_dice: 0.6082, decode.d0.loss_cls: 1.6419, decode.d0.loss_mask: 0.4805, decode.d0.loss_dice: 0.7098, decode.d1.loss_cls: 0.3553, decode.d1.loss_mask: 0.4577, decode.d1.loss_dice: 0.6491, decode.d2.loss_cls: 0.2537, decode.d2.loss_mask: 0.4514, decode.d2.loss_dice: 0.6271, decode.d3.loss_cls: 0.2150, decode.d3.loss_mask: 0.4451, decode.d3.loss_dice: 0.6210, decode.d4.loss_cls: 0.2043, decode.d4.loss_mask: 0.4431, decode.d4.loss_dice: 0.6163, decode.d5.loss_cls: 0.2018, decode.d5.loss_mask: 0.4403, decode.d5.loss_dice: 0.6123, decode.d6.loss_cls: 0.1929, decode.d6.loss_mask: 0.4392, decode.d6.loss_dice: 0.6121, decode.d7.loss_cls: 0.1912, decode.d7.loss_mask: 0.4389, decode.d7.loss_dice: 0.6138, decode.d8.loss_cls: 0.1876, decode.d8.loss_mask: 0.4399, decode.d8.loss_dice: 0.6139, loss: 14.3946 +2022-06-05 05:10:51,953 - mmseg - INFO - Iter [25650/40000] lr: 2.736e-06, eta: 1:56:43, time: 0.433, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1812, decode.loss_mask: 0.4358, decode.loss_dice: 0.6033, decode.d0.loss_cls: 1.5929, decode.d0.loss_mask: 0.4734, decode.d0.loss_dice: 0.6993, decode.d1.loss_cls: 0.3350, decode.d1.loss_mask: 0.4537, decode.d1.loss_dice: 0.6380, decode.d2.loss_cls: 0.2371, decode.d2.loss_mask: 0.4430, decode.d2.loss_dice: 0.6157, decode.d3.loss_cls: 0.2059, decode.d3.loss_mask: 0.4389, decode.d3.loss_dice: 0.6085, decode.d4.loss_cls: 0.1978, decode.d4.loss_mask: 0.4383, decode.d4.loss_dice: 0.6055, decode.d5.loss_cls: 0.1848, decode.d5.loss_mask: 0.4368, decode.d5.loss_dice: 0.6062, decode.d6.loss_cls: 0.1845, decode.d6.loss_mask: 0.4362, decode.d6.loss_dice: 0.6009, decode.d7.loss_cls: 0.1819, decode.d7.loss_mask: 0.4361, decode.d7.loss_dice: 0.6016, decode.d8.loss_cls: 0.1791, decode.d8.loss_mask: 0.4371, decode.d8.loss_dice: 0.6023, loss: 14.0910 +2022-06-05 05:11:13,527 - mmseg - INFO - Iter [25700/40000] lr: 2.726e-06, eta: 1:56:17, time: 0.431, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1918, decode.loss_mask: 0.4241, decode.loss_dice: 0.6236, decode.d0.loss_cls: 1.7122, decode.d0.loss_mask: 0.4702, decode.d0.loss_dice: 0.7286, decode.d1.loss_cls: 0.3560, decode.d1.loss_mask: 0.4418, decode.d1.loss_dice: 0.6605, decode.d2.loss_cls: 0.2636, decode.d2.loss_mask: 0.4311, decode.d2.loss_dice: 0.6433, decode.d3.loss_cls: 0.2171, decode.d3.loss_mask: 0.4281, decode.d3.loss_dice: 0.6346, decode.d4.loss_cls: 0.2083, decode.d4.loss_mask: 0.4288, decode.d4.loss_dice: 0.6331, decode.d5.loss_cls: 0.1979, decode.d5.loss_mask: 0.4288, decode.d5.loss_dice: 0.6313, decode.d6.loss_cls: 0.1885, decode.d6.loss_mask: 0.4271, decode.d6.loss_dice: 0.6295, decode.d7.loss_cls: 0.1886, decode.d7.loss_mask: 0.4263, decode.d7.loss_dice: 0.6310, decode.d8.loss_cls: 0.1910, decode.d8.loss_mask: 0.4275, decode.d8.loss_dice: 0.6252, loss: 14.4891 +2022-06-05 05:11:35,282 - mmseg - INFO - Iter [25750/40000] lr: 2.717e-06, eta: 1:55:51, time: 0.435, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2012, decode.loss_mask: 0.4327, decode.loss_dice: 0.6052, decode.d0.loss_cls: 1.6383, decode.d0.loss_mask: 0.4764, decode.d0.loss_dice: 0.7014, decode.d1.loss_cls: 0.3551, decode.d1.loss_mask: 0.4498, decode.d1.loss_dice: 0.6394, decode.d2.loss_cls: 0.2671, decode.d2.loss_mask: 0.4405, decode.d2.loss_dice: 0.6203, decode.d3.loss_cls: 0.2246, decode.d3.loss_mask: 0.4373, decode.d3.loss_dice: 0.6071, decode.d4.loss_cls: 0.2130, decode.d4.loss_mask: 0.4363, decode.d4.loss_dice: 0.6102, decode.d5.loss_cls: 0.2008, decode.d5.loss_mask: 0.4363, decode.d5.loss_dice: 0.6129, decode.d6.loss_cls: 0.1958, decode.d6.loss_mask: 0.4344, decode.d6.loss_dice: 0.6019, decode.d7.loss_cls: 0.1944, decode.d7.loss_mask: 0.4341, decode.d7.loss_dice: 0.6044, decode.d8.loss_cls: 0.1973, decode.d8.loss_mask: 0.4332, decode.d8.loss_dice: 0.6012, loss: 14.3030 +2022-06-05 05:11:56,473 - mmseg - INFO - Iter [25800/40000] lr: 2.707e-06, eta: 1:55:25, time: 0.423, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1444, decode.loss_mask: 0.4391, decode.loss_dice: 0.6068, decode.d0.loss_cls: 1.6298, decode.d0.loss_mask: 0.4818, decode.d0.loss_dice: 0.6981, decode.d1.loss_cls: 0.3306, decode.d1.loss_mask: 0.4561, decode.d1.loss_dice: 0.6457, decode.d2.loss_cls: 0.2131, decode.d2.loss_mask: 0.4462, decode.d2.loss_dice: 0.6211, decode.d3.loss_cls: 0.1743, decode.d3.loss_mask: 0.4451, decode.d3.loss_dice: 0.6156, decode.d4.loss_cls: 0.1679, decode.d4.loss_mask: 0.4448, decode.d4.loss_dice: 0.6189, decode.d5.loss_cls: 0.1572, decode.d5.loss_mask: 0.4417, decode.d5.loss_dice: 0.6086, decode.d6.loss_cls: 0.1497, decode.d6.loss_mask: 0.4406, decode.d6.loss_dice: 0.6078, decode.d7.loss_cls: 0.1552, decode.d7.loss_mask: 0.4413, decode.d7.loss_dice: 0.6057, decode.d8.loss_cls: 0.1516, decode.d8.loss_mask: 0.4401, decode.d8.loss_dice: 0.6070, loss: 13.9858 +2022-06-05 05:12:17,890 - mmseg - INFO - Iter [25850/40000] lr: 2.698e-06, eta: 1:54:59, time: 0.429, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1777, decode.loss_mask: 0.4305, decode.loss_dice: 0.6024, decode.d0.loss_cls: 1.6524, decode.d0.loss_mask: 0.4702, decode.d0.loss_dice: 0.7027, decode.d1.loss_cls: 0.3484, decode.d1.loss_mask: 0.4482, decode.d1.loss_dice: 0.6458, decode.d2.loss_cls: 0.2388, decode.d2.loss_mask: 0.4378, decode.d2.loss_dice: 0.6242, decode.d3.loss_cls: 0.2105, decode.d3.loss_mask: 0.4327, decode.d3.loss_dice: 0.6047, decode.d4.loss_cls: 0.2064, decode.d4.loss_mask: 0.4302, decode.d4.loss_dice: 0.6066, decode.d5.loss_cls: 0.1912, decode.d5.loss_mask: 0.4302, decode.d5.loss_dice: 0.6035, decode.d6.loss_cls: 0.1817, decode.d6.loss_mask: 0.4296, decode.d6.loss_dice: 0.6029, decode.d7.loss_cls: 0.1793, decode.d7.loss_mask: 0.4302, decode.d7.loss_dice: 0.6027, decode.d8.loss_cls: 0.1782, decode.d8.loss_mask: 0.4304, decode.d8.loss_dice: 0.6044, loss: 14.1345 +2022-06-05 05:12:42,303 - mmseg - INFO - Iter [25900/40000] lr: 2.688e-06, eta: 1:54:34, time: 0.488, data_time: 0.057, memory: 31652, decode.loss_cls: 0.1852, decode.loss_mask: 0.4447, decode.loss_dice: 0.6117, decode.d0.loss_cls: 1.6598, decode.d0.loss_mask: 0.4882, decode.d0.loss_dice: 0.7097, decode.d1.loss_cls: 0.3440, decode.d1.loss_mask: 0.4617, decode.d1.loss_dice: 0.6518, decode.d2.loss_cls: 0.2516, decode.d2.loss_mask: 0.4513, decode.d2.loss_dice: 0.6273, decode.d3.loss_cls: 0.2147, decode.d3.loss_mask: 0.4461, decode.d3.loss_dice: 0.6171, decode.d4.loss_cls: 0.2120, decode.d4.loss_mask: 0.4447, decode.d4.loss_dice: 0.6161, decode.d5.loss_cls: 0.2033, decode.d5.loss_mask: 0.4440, decode.d5.loss_dice: 0.6132, decode.d6.loss_cls: 0.1939, decode.d6.loss_mask: 0.4453, decode.d6.loss_dice: 0.6074, decode.d7.loss_cls: 0.1873, decode.d7.loss_mask: 0.4463, decode.d7.loss_dice: 0.6092, decode.d8.loss_cls: 0.1831, decode.d8.loss_mask: 0.4479, decode.d8.loss_dice: 0.6132, loss: 14.4319 +2022-06-05 05:13:04,048 - mmseg - INFO - Iter [25950/40000] lr: 2.679e-06, eta: 1:54:09, time: 0.434, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1806, decode.loss_mask: 0.4463, decode.loss_dice: 0.6052, decode.d0.loss_cls: 1.6677, decode.d0.loss_mask: 0.4862, decode.d0.loss_dice: 0.7044, decode.d1.loss_cls: 0.3456, decode.d1.loss_mask: 0.4638, decode.d1.loss_dice: 0.6435, decode.d2.loss_cls: 0.2485, decode.d2.loss_mask: 0.4513, decode.d2.loss_dice: 0.6151, decode.d3.loss_cls: 0.2112, decode.d3.loss_mask: 0.4488, decode.d3.loss_dice: 0.6094, decode.d4.loss_cls: 0.2059, decode.d4.loss_mask: 0.4479, decode.d4.loss_dice: 0.6100, decode.d5.loss_cls: 0.1954, decode.d5.loss_mask: 0.4477, decode.d5.loss_dice: 0.6043, decode.d6.loss_cls: 0.1858, decode.d6.loss_mask: 0.4460, decode.d6.loss_dice: 0.6017, decode.d7.loss_cls: 0.1808, decode.d7.loss_mask: 0.4470, decode.d7.loss_dice: 0.6026, decode.d8.loss_cls: 0.1794, decode.d8.loss_mask: 0.4472, decode.d8.loss_dice: 0.6041, loss: 14.3335 +2022-06-05 05:13:25,697 - mmseg - INFO - Saving checkpoint at 26000 iterations +2022-06-05 05:13:28,906 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 05:13:28,906 - mmseg - INFO - Iter [26000/40000] lr: 2.669e-06, eta: 1:53:45, time: 0.498, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1825, decode.loss_mask: 0.4331, decode.loss_dice: 0.6189, decode.d0.loss_cls: 1.6273, decode.d0.loss_mask: 0.4788, decode.d0.loss_dice: 0.7212, decode.d1.loss_cls: 0.3505, decode.d1.loss_mask: 0.4542, decode.d1.loss_dice: 0.6553, decode.d2.loss_cls: 0.2534, decode.d2.loss_mask: 0.4407, decode.d2.loss_dice: 0.6364, decode.d3.loss_cls: 0.2194, decode.d3.loss_mask: 0.4361, decode.d3.loss_dice: 0.6227, decode.d4.loss_cls: 0.2056, decode.d4.loss_mask: 0.4354, decode.d4.loss_dice: 0.6213, decode.d5.loss_cls: 0.1997, decode.d5.loss_mask: 0.4344, decode.d5.loss_dice: 0.6210, decode.d6.loss_cls: 0.1859, decode.d6.loss_mask: 0.4343, decode.d6.loss_dice: 0.6147, decode.d7.loss_cls: 0.1781, decode.d7.loss_mask: 0.4339, decode.d7.loss_dice: 0.6180, decode.d8.loss_cls: 0.1812, decode.d8.loss_mask: 0.4314, decode.d8.loss_dice: 0.6210, loss: 14.3462 +2022-06-05 05:13:51,186 - mmseg - INFO - Iter [26050/40000] lr: 2.660e-06, eta: 1:53:19, time: 0.446, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1921, decode.loss_mask: 0.4332, decode.loss_dice: 0.6450, decode.d0.loss_cls: 1.6171, decode.d0.loss_mask: 0.4774, decode.d0.loss_dice: 0.7460, decode.d1.loss_cls: 0.3560, decode.d1.loss_mask: 0.4513, decode.d1.loss_dice: 0.6789, decode.d2.loss_cls: 0.2489, decode.d2.loss_mask: 0.4425, decode.d2.loss_dice: 0.6605, decode.d3.loss_cls: 0.2185, decode.d3.loss_mask: 0.4382, decode.d3.loss_dice: 0.6471, decode.d4.loss_cls: 0.2132, decode.d4.loss_mask: 0.4363, decode.d4.loss_dice: 0.6482, decode.d5.loss_cls: 0.2083, decode.d5.loss_mask: 0.4367, decode.d5.loss_dice: 0.6423, decode.d6.loss_cls: 0.1986, decode.d6.loss_mask: 0.4345, decode.d6.loss_dice: 0.6411, decode.d7.loss_cls: 0.1940, decode.d7.loss_mask: 0.4351, decode.d7.loss_dice: 0.6451, decode.d8.loss_cls: 0.1887, decode.d8.loss_mask: 0.4328, decode.d8.loss_dice: 0.6495, loss: 14.6574 +2022-06-05 05:14:13,224 - mmseg - INFO - Iter [26100/40000] lr: 2.650e-06, eta: 1:52:53, time: 0.441, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2029, decode.loss_mask: 0.4339, decode.loss_dice: 0.5900, decode.d0.loss_cls: 1.6424, decode.d0.loss_mask: 0.4771, decode.d0.loss_dice: 0.7010, decode.d1.loss_cls: 0.3644, decode.d1.loss_mask: 0.4494, decode.d1.loss_dice: 0.6333, decode.d2.loss_cls: 0.2688, decode.d2.loss_mask: 0.4418, decode.d2.loss_dice: 0.6098, decode.d3.loss_cls: 0.2324, decode.d3.loss_mask: 0.4372, decode.d3.loss_dice: 0.6005, decode.d4.loss_cls: 0.2216, decode.d4.loss_mask: 0.4376, decode.d4.loss_dice: 0.6024, decode.d5.loss_cls: 0.2123, decode.d5.loss_mask: 0.4350, decode.d5.loss_dice: 0.5988, decode.d6.loss_cls: 0.2032, decode.d6.loss_mask: 0.4342, decode.d6.loss_dice: 0.5926, decode.d7.loss_cls: 0.2043, decode.d7.loss_mask: 0.4329, decode.d7.loss_dice: 0.5908, decode.d8.loss_cls: 0.1987, decode.d8.loss_mask: 0.4340, decode.d8.loss_dice: 0.5982, loss: 14.2816 +2022-06-05 05:14:34,320 - mmseg - INFO - Iter [26150/40000] lr: 2.641e-06, eta: 1:52:27, time: 0.422, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1669, decode.loss_mask: 0.4355, decode.loss_dice: 0.5758, decode.d0.loss_cls: 1.5883, decode.d0.loss_mask: 0.4810, decode.d0.loss_dice: 0.6722, decode.d1.loss_cls: 0.3223, decode.d1.loss_mask: 0.4543, decode.d1.loss_dice: 0.6015, decode.d2.loss_cls: 0.2300, decode.d2.loss_mask: 0.4445, decode.d2.loss_dice: 0.5876, decode.d3.loss_cls: 0.1840, decode.d3.loss_mask: 0.4411, decode.d3.loss_dice: 0.5785, decode.d4.loss_cls: 0.1764, decode.d4.loss_mask: 0.4410, decode.d4.loss_dice: 0.5809, decode.d5.loss_cls: 0.1713, decode.d5.loss_mask: 0.4401, decode.d5.loss_dice: 0.5796, decode.d6.loss_cls: 0.1644, decode.d6.loss_mask: 0.4388, decode.d6.loss_dice: 0.5770, decode.d7.loss_cls: 0.1625, decode.d7.loss_mask: 0.4371, decode.d7.loss_dice: 0.5737, decode.d8.loss_cls: 0.1655, decode.d8.loss_mask: 0.4351, decode.d8.loss_dice: 0.5802, loss: 13.6869 +2022-06-05 05:14:55,951 - mmseg - INFO - Iter [26200/40000] lr: 2.631e-06, eta: 1:52:02, time: 0.433, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1745, decode.loss_mask: 0.4301, decode.loss_dice: 0.6031, decode.d0.loss_cls: 1.6085, decode.d0.loss_mask: 0.4752, decode.d0.loss_dice: 0.6952, decode.d1.loss_cls: 0.3461, decode.d1.loss_mask: 0.4496, decode.d1.loss_dice: 0.6400, decode.d2.loss_cls: 0.2451, decode.d2.loss_mask: 0.4385, decode.d2.loss_dice: 0.6159, decode.d3.loss_cls: 0.2100, decode.d3.loss_mask: 0.4335, decode.d3.loss_dice: 0.6048, decode.d4.loss_cls: 0.2013, decode.d4.loss_mask: 0.4325, decode.d4.loss_dice: 0.6044, decode.d5.loss_cls: 0.1916, decode.d5.loss_mask: 0.4320, decode.d5.loss_dice: 0.6066, decode.d6.loss_cls: 0.1812, decode.d6.loss_mask: 0.4327, decode.d6.loss_dice: 0.6015, decode.d7.loss_cls: 0.1724, decode.d7.loss_mask: 0.4306, decode.d7.loss_dice: 0.6033, decode.d8.loss_cls: 0.1756, decode.d8.loss_mask: 0.4306, decode.d8.loss_dice: 0.6054, loss: 14.0718 +2022-06-05 05:15:20,456 - mmseg - INFO - Iter [26250/40000] lr: 2.621e-06, eta: 1:51:37, time: 0.490, data_time: 0.057, memory: 31652, decode.loss_cls: 0.1756, decode.loss_mask: 0.4340, decode.loss_dice: 0.6112, decode.d0.loss_cls: 1.6453, decode.d0.loss_mask: 0.4742, decode.d0.loss_dice: 0.7120, decode.d1.loss_cls: 0.3470, decode.d1.loss_mask: 0.4480, decode.d1.loss_dice: 0.6510, decode.d2.loss_cls: 0.2466, decode.d2.loss_mask: 0.4373, decode.d2.loss_dice: 0.6197, decode.d3.loss_cls: 0.2040, decode.d3.loss_mask: 0.4367, decode.d3.loss_dice: 0.6144, decode.d4.loss_cls: 0.1957, decode.d4.loss_mask: 0.4351, decode.d4.loss_dice: 0.6144, decode.d5.loss_cls: 0.1822, decode.d5.loss_mask: 0.4337, decode.d5.loss_dice: 0.6172, decode.d6.loss_cls: 0.1847, decode.d6.loss_mask: 0.4327, decode.d6.loss_dice: 0.6132, decode.d7.loss_cls: 0.1737, decode.d7.loss_mask: 0.4323, decode.d7.loss_dice: 0.6157, decode.d8.loss_cls: 0.1824, decode.d8.loss_mask: 0.4326, decode.d8.loss_dice: 0.6131, loss: 14.2157 +2022-06-05 05:15:42,625 - mmseg - INFO - Iter [26300/40000] lr: 2.612e-06, eta: 1:51:12, time: 0.443, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1813, decode.loss_mask: 0.4459, decode.loss_dice: 0.6193, decode.d0.loss_cls: 1.6456, decode.d0.loss_mask: 0.4885, decode.d0.loss_dice: 0.7221, decode.d1.loss_cls: 0.3513, decode.d1.loss_mask: 0.4651, decode.d1.loss_dice: 0.6540, decode.d2.loss_cls: 0.2506, decode.d2.loss_mask: 0.4530, decode.d2.loss_dice: 0.6313, decode.d3.loss_cls: 0.2150, decode.d3.loss_mask: 0.4496, decode.d3.loss_dice: 0.6250, decode.d4.loss_cls: 0.2053, decode.d4.loss_mask: 0.4496, decode.d4.loss_dice: 0.6282, decode.d5.loss_cls: 0.2001, decode.d5.loss_mask: 0.4494, decode.d5.loss_dice: 0.6237, decode.d6.loss_cls: 0.1923, decode.d6.loss_mask: 0.4470, decode.d6.loss_dice: 0.6190, decode.d7.loss_cls: 0.1871, decode.d7.loss_mask: 0.4465, decode.d7.loss_dice: 0.6186, decode.d8.loss_cls: 0.1847, decode.d8.loss_mask: 0.4454, decode.d8.loss_dice: 0.6202, loss: 14.5151 +2022-06-05 05:16:05,205 - mmseg - INFO - Iter [26350/40000] lr: 2.602e-06, eta: 1:50:46, time: 0.451, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1661, decode.loss_mask: 0.4359, decode.loss_dice: 0.6053, decode.d0.loss_cls: 1.5905, decode.d0.loss_mask: 0.4696, decode.d0.loss_dice: 0.7051, decode.d1.loss_cls: 0.3306, decode.d1.loss_mask: 0.4501, decode.d1.loss_dice: 0.6448, decode.d2.loss_cls: 0.2318, decode.d2.loss_mask: 0.4444, decode.d2.loss_dice: 0.6249, decode.d3.loss_cls: 0.2004, decode.d3.loss_mask: 0.4399, decode.d3.loss_dice: 0.6120, decode.d4.loss_cls: 0.1867, decode.d4.loss_mask: 0.4372, decode.d4.loss_dice: 0.6083, decode.d5.loss_cls: 0.1786, decode.d5.loss_mask: 0.4353, decode.d5.loss_dice: 0.6068, decode.d6.loss_cls: 0.1727, decode.d6.loss_mask: 0.4353, decode.d6.loss_dice: 0.6075, decode.d7.loss_cls: 0.1706, decode.d7.loss_mask: 0.4346, decode.d7.loss_dice: 0.6062, decode.d8.loss_cls: 0.1703, decode.d8.loss_mask: 0.4364, decode.d8.loss_dice: 0.6066, loss: 14.0444 +2022-06-05 05:16:27,083 - mmseg - INFO - Iter [26400/40000] lr: 2.593e-06, eta: 1:50:21, time: 0.438, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1958, decode.loss_mask: 0.4367, decode.loss_dice: 0.6099, decode.d0.loss_cls: 1.6119, decode.d0.loss_mask: 0.4707, decode.d0.loss_dice: 0.7126, decode.d1.loss_cls: 0.3444, decode.d1.loss_mask: 0.4508, decode.d1.loss_dice: 0.6511, decode.d2.loss_cls: 0.2617, decode.d2.loss_mask: 0.4417, decode.d2.loss_dice: 0.6246, decode.d3.loss_cls: 0.2222, decode.d3.loss_mask: 0.4383, decode.d3.loss_dice: 0.6108, decode.d4.loss_cls: 0.2151, decode.d4.loss_mask: 0.4391, decode.d4.loss_dice: 0.6090, decode.d5.loss_cls: 0.2047, decode.d5.loss_mask: 0.4374, decode.d5.loss_dice: 0.6139, decode.d6.loss_cls: 0.2007, decode.d6.loss_mask: 0.4370, decode.d6.loss_dice: 0.6121, decode.d7.loss_cls: 0.1932, decode.d7.loss_mask: 0.4374, decode.d7.loss_dice: 0.6118, decode.d8.loss_cls: 0.1962, decode.d8.loss_mask: 0.4367, decode.d8.loss_dice: 0.6097, loss: 14.3373 +2022-06-05 05:16:49,856 - mmseg - INFO - Iter [26450/40000] lr: 2.583e-06, eta: 1:49:56, time: 0.455, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1825, decode.loss_mask: 0.4428, decode.loss_dice: 0.6051, decode.d0.loss_cls: 1.6339, decode.d0.loss_mask: 0.4807, decode.d0.loss_dice: 0.6952, decode.d1.loss_cls: 0.3455, decode.d1.loss_mask: 0.4619, decode.d1.loss_dice: 0.6422, decode.d2.loss_cls: 0.2471, decode.d2.loss_mask: 0.4497, decode.d2.loss_dice: 0.6208, decode.d3.loss_cls: 0.2082, decode.d3.loss_mask: 0.4450, decode.d3.loss_dice: 0.6136, decode.d4.loss_cls: 0.1918, decode.d4.loss_mask: 0.4466, decode.d4.loss_dice: 0.6162, decode.d5.loss_cls: 0.1882, decode.d5.loss_mask: 0.4434, decode.d5.loss_dice: 0.6104, decode.d6.loss_cls: 0.1759, decode.d6.loss_mask: 0.4454, decode.d6.loss_dice: 0.6103, decode.d7.loss_cls: 0.1733, decode.d7.loss_mask: 0.4453, decode.d7.loss_dice: 0.6147, decode.d8.loss_cls: 0.1753, decode.d8.loss_mask: 0.4461, decode.d8.loss_dice: 0.6090, loss: 14.2662 +2022-06-05 05:17:11,831 - mmseg - INFO - Iter [26500/40000] lr: 2.574e-06, eta: 1:49:30, time: 0.439, data_time: 0.009, memory: 31652, decode.loss_cls: 0.1909, decode.loss_mask: 0.4292, decode.loss_dice: 0.6203, decode.d0.loss_cls: 1.6193, decode.d0.loss_mask: 0.4699, decode.d0.loss_dice: 0.7167, decode.d1.loss_cls: 0.3510, decode.d1.loss_mask: 0.4485, decode.d1.loss_dice: 0.6614, decode.d2.loss_cls: 0.2528, decode.d2.loss_mask: 0.4343, decode.d2.loss_dice: 0.6319, decode.d3.loss_cls: 0.2278, decode.d3.loss_mask: 0.4315, decode.d3.loss_dice: 0.6196, decode.d4.loss_cls: 0.2103, decode.d4.loss_mask: 0.4307, decode.d4.loss_dice: 0.6228, decode.d5.loss_cls: 0.2028, decode.d5.loss_mask: 0.4290, decode.d5.loss_dice: 0.6190, decode.d6.loss_cls: 0.1910, decode.d6.loss_mask: 0.4317, decode.d6.loss_dice: 0.6230, decode.d7.loss_cls: 0.1914, decode.d7.loss_mask: 0.4282, decode.d7.loss_dice: 0.6144, decode.d8.loss_cls: 0.1929, decode.d8.loss_mask: 0.4284, decode.d8.loss_dice: 0.6175, loss: 14.3381 +2022-06-05 05:17:36,277 - mmseg - INFO - Iter [26550/40000] lr: 2.564e-06, eta: 1:49:06, time: 0.488, data_time: 0.064, memory: 31652, decode.loss_cls: 0.1634, decode.loss_mask: 0.4232, decode.loss_dice: 0.6085, decode.d0.loss_cls: 1.6189, decode.d0.loss_mask: 0.4658, decode.d0.loss_dice: 0.7005, decode.d1.loss_cls: 0.3235, decode.d1.loss_mask: 0.4376, decode.d1.loss_dice: 0.6413, decode.d2.loss_cls: 0.2400, decode.d2.loss_mask: 0.4270, decode.d2.loss_dice: 0.6182, decode.d3.loss_cls: 0.2023, decode.d3.loss_mask: 0.4233, decode.d3.loss_dice: 0.6029, decode.d4.loss_cls: 0.1892, decode.d4.loss_mask: 0.4244, decode.d4.loss_dice: 0.6104, decode.d5.loss_cls: 0.1799, decode.d5.loss_mask: 0.4233, decode.d5.loss_dice: 0.6036, decode.d6.loss_cls: 0.1737, decode.d6.loss_mask: 0.4228, decode.d6.loss_dice: 0.6002, decode.d7.loss_cls: 0.1691, decode.d7.loss_mask: 0.4235, decode.d7.loss_dice: 0.6039, decode.d8.loss_cls: 0.1698, decode.d8.loss_mask: 0.4221, decode.d8.loss_dice: 0.6064, loss: 13.9186 +2022-06-05 05:17:59,125 - mmseg - INFO - Iter [26600/40000] lr: 2.555e-06, eta: 1:48:41, time: 0.457, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1678, decode.loss_mask: 0.4392, decode.loss_dice: 0.5820, decode.d0.loss_cls: 1.6070, decode.d0.loss_mask: 0.4788, decode.d0.loss_dice: 0.6767, decode.d1.loss_cls: 0.3276, decode.d1.loss_mask: 0.4528, decode.d1.loss_dice: 0.6238, decode.d2.loss_cls: 0.2313, decode.d2.loss_mask: 0.4431, decode.d2.loss_dice: 0.5950, decode.d3.loss_cls: 0.2000, decode.d3.loss_mask: 0.4393, decode.d3.loss_dice: 0.5813, decode.d4.loss_cls: 0.1874, decode.d4.loss_mask: 0.4403, decode.d4.loss_dice: 0.5844, decode.d5.loss_cls: 0.1832, decode.d5.loss_mask: 0.4411, decode.d5.loss_dice: 0.5772, decode.d6.loss_cls: 0.1717, decode.d6.loss_mask: 0.4414, decode.d6.loss_dice: 0.5791, decode.d7.loss_cls: 0.1723, decode.d7.loss_mask: 0.4369, decode.d7.loss_dice: 0.5777, decode.d8.loss_cls: 0.1689, decode.d8.loss_mask: 0.4390, decode.d8.loss_dice: 0.5827, loss: 13.8289 +2022-06-05 05:18:21,091 - mmseg - INFO - Iter [26650/40000] lr: 2.545e-06, eta: 1:48:15, time: 0.439, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1810, decode.loss_mask: 0.4383, decode.loss_dice: 0.6066, decode.d0.loss_cls: 1.6250, decode.d0.loss_mask: 0.4818, decode.d0.loss_dice: 0.7026, decode.d1.loss_cls: 0.3457, decode.d1.loss_mask: 0.4570, decode.d1.loss_dice: 0.6404, decode.d2.loss_cls: 0.2494, decode.d2.loss_mask: 0.4438, decode.d2.loss_dice: 0.6178, decode.d3.loss_cls: 0.2120, decode.d3.loss_mask: 0.4386, decode.d3.loss_dice: 0.6024, decode.d4.loss_cls: 0.1980, decode.d4.loss_mask: 0.4401, decode.d4.loss_dice: 0.6083, decode.d5.loss_cls: 0.1926, decode.d5.loss_mask: 0.4381, decode.d5.loss_dice: 0.6061, decode.d6.loss_cls: 0.1837, decode.d6.loss_mask: 0.4386, decode.d6.loss_dice: 0.6080, decode.d7.loss_cls: 0.1831, decode.d7.loss_mask: 0.4390, decode.d7.loss_dice: 0.6057, decode.d8.loss_cls: 0.1759, decode.d8.loss_mask: 0.4397, decode.d8.loss_dice: 0.6031, loss: 14.2021 +2022-06-05 05:18:43,114 - mmseg - INFO - Iter [26700/40000] lr: 2.536e-06, eta: 1:47:50, time: 0.440, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1712, decode.loss_mask: 0.4305, decode.loss_dice: 0.5931, decode.d0.loss_cls: 1.6318, decode.d0.loss_mask: 0.4766, decode.d0.loss_dice: 0.6940, decode.d1.loss_cls: 0.3394, decode.d1.loss_mask: 0.4520, decode.d1.loss_dice: 0.6352, decode.d2.loss_cls: 0.2416, decode.d2.loss_mask: 0.4393, decode.d2.loss_dice: 0.6080, decode.d3.loss_cls: 0.2089, decode.d3.loss_mask: 0.4361, decode.d3.loss_dice: 0.5985, decode.d4.loss_cls: 0.1922, decode.d4.loss_mask: 0.4350, decode.d4.loss_dice: 0.5992, decode.d5.loss_cls: 0.1858, decode.d5.loss_mask: 0.4296, decode.d5.loss_dice: 0.5966, decode.d6.loss_cls: 0.1805, decode.d6.loss_mask: 0.4309, decode.d6.loss_dice: 0.5962, decode.d7.loss_cls: 0.1771, decode.d7.loss_mask: 0.4301, decode.d7.loss_dice: 0.5922, decode.d8.loss_cls: 0.1675, decode.d8.loss_mask: 0.4319, decode.d8.loss_dice: 0.5933, loss: 13.9942 +2022-06-05 05:19:05,143 - mmseg - INFO - Iter [26750/40000] lr: 2.526e-06, eta: 1:47:24, time: 0.441, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1832, decode.loss_mask: 0.4417, decode.loss_dice: 0.6353, decode.d0.loss_cls: 1.6083, decode.d0.loss_mask: 0.4819, decode.d0.loss_dice: 0.7375, decode.d1.loss_cls: 0.3528, decode.d1.loss_mask: 0.4589, decode.d1.loss_dice: 0.6682, decode.d2.loss_cls: 0.2552, decode.d2.loss_mask: 0.4467, decode.d2.loss_dice: 0.6479, decode.d3.loss_cls: 0.2179, decode.d3.loss_mask: 0.4465, decode.d3.loss_dice: 0.6380, decode.d4.loss_cls: 0.2146, decode.d4.loss_mask: 0.4453, decode.d4.loss_dice: 0.6347, decode.d5.loss_cls: 0.2024, decode.d5.loss_mask: 0.4441, decode.d5.loss_dice: 0.6359, decode.d6.loss_cls: 0.1916, decode.d6.loss_mask: 0.4430, decode.d6.loss_dice: 0.6317, decode.d7.loss_cls: 0.1825, decode.d7.loss_mask: 0.4431, decode.d7.loss_dice: 0.6365, decode.d8.loss_cls: 0.1841, decode.d8.loss_mask: 0.4417, decode.d8.loss_dice: 0.6357, loss: 14.5870 +2022-06-05 05:19:27,392 - mmseg - INFO - Iter [26800/40000] lr: 2.517e-06, eta: 1:46:59, time: 0.445, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1581, decode.loss_mask: 0.4243, decode.loss_dice: 0.6062, decode.d0.loss_cls: 1.6106, decode.d0.loss_mask: 0.4586, decode.d0.loss_dice: 0.7062, decode.d1.loss_cls: 0.3295, decode.d1.loss_mask: 0.4409, decode.d1.loss_dice: 0.6529, decode.d2.loss_cls: 0.2353, decode.d2.loss_mask: 0.4286, decode.d2.loss_dice: 0.6222, decode.d3.loss_cls: 0.1954, decode.d3.loss_mask: 0.4239, decode.d3.loss_dice: 0.6086, decode.d4.loss_cls: 0.1800, decode.d4.loss_mask: 0.4235, decode.d4.loss_dice: 0.6110, decode.d5.loss_cls: 0.1727, decode.d5.loss_mask: 0.4243, decode.d5.loss_dice: 0.6114, decode.d6.loss_cls: 0.1664, decode.d6.loss_mask: 0.4237, decode.d6.loss_dice: 0.6058, decode.d7.loss_cls: 0.1658, decode.d7.loss_mask: 0.4232, decode.d7.loss_dice: 0.6082, decode.d8.loss_cls: 0.1627, decode.d8.loss_mask: 0.4243, decode.d8.loss_dice: 0.6065, loss: 13.9107 +2022-06-05 05:19:52,328 - mmseg - INFO - Iter [26850/40000] lr: 2.507e-06, eta: 1:46:35, time: 0.499, data_time: 0.059, memory: 31652, decode.loss_cls: 0.2232, decode.loss_mask: 0.4456, decode.loss_dice: 0.6288, decode.d0.loss_cls: 1.6529, decode.d0.loss_mask: 0.4831, decode.d0.loss_dice: 0.7321, decode.d1.loss_cls: 0.3735, decode.d1.loss_mask: 0.4610, decode.d1.loss_dice: 0.6685, decode.d2.loss_cls: 0.2771, decode.d2.loss_mask: 0.4542, decode.d2.loss_dice: 0.6459, decode.d3.loss_cls: 0.2421, decode.d3.loss_mask: 0.4495, decode.d3.loss_dice: 0.6361, decode.d4.loss_cls: 0.2372, decode.d4.loss_mask: 0.4464, decode.d4.loss_dice: 0.6421, decode.d5.loss_cls: 0.2297, decode.d5.loss_mask: 0.4447, decode.d5.loss_dice: 0.6321, decode.d6.loss_cls: 0.2246, decode.d6.loss_mask: 0.4434, decode.d6.loss_dice: 0.6374, decode.d7.loss_cls: 0.2187, decode.d7.loss_mask: 0.4423, decode.d7.loss_dice: 0.6398, decode.d8.loss_cls: 0.2184, decode.d8.loss_mask: 0.4488, decode.d8.loss_dice: 0.6329, loss: 14.9121 +2022-06-05 05:20:14,319 - mmseg - INFO - Iter [26900/40000] lr: 2.498e-06, eta: 1:46:10, time: 0.440, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1770, decode.loss_mask: 0.4262, decode.loss_dice: 0.5952, decode.d0.loss_cls: 1.6030, decode.d0.loss_mask: 0.4707, decode.d0.loss_dice: 0.6953, decode.d1.loss_cls: 0.3553, decode.d1.loss_mask: 0.4387, decode.d1.loss_dice: 0.6289, decode.d2.loss_cls: 0.2534, decode.d2.loss_mask: 0.4289, decode.d2.loss_dice: 0.6079, decode.d3.loss_cls: 0.2126, decode.d3.loss_mask: 0.4282, decode.d3.loss_dice: 0.5986, decode.d4.loss_cls: 0.1985, decode.d4.loss_mask: 0.4249, decode.d4.loss_dice: 0.5948, decode.d5.loss_cls: 0.1922, decode.d5.loss_mask: 0.4249, decode.d5.loss_dice: 0.5916, decode.d6.loss_cls: 0.1847, decode.d6.loss_mask: 0.4244, decode.d6.loss_dice: 0.5970, decode.d7.loss_cls: 0.1852, decode.d7.loss_mask: 0.4258, decode.d7.loss_dice: 0.5935, decode.d8.loss_cls: 0.1812, decode.d8.loss_mask: 0.4231, decode.d8.loss_dice: 0.5955, loss: 13.9567 +2022-06-05 05:20:36,087 - mmseg - INFO - Iter [26950/40000] lr: 2.488e-06, eta: 1:45:44, time: 0.435, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1611, decode.loss_mask: 0.4378, decode.loss_dice: 0.6104, decode.d0.loss_cls: 1.6165, decode.d0.loss_mask: 0.4753, decode.d0.loss_dice: 0.6992, decode.d1.loss_cls: 0.3253, decode.d1.loss_mask: 0.4523, decode.d1.loss_dice: 0.6430, decode.d2.loss_cls: 0.2278, decode.d2.loss_mask: 0.4460, decode.d2.loss_dice: 0.6190, decode.d3.loss_cls: 0.1906, decode.d3.loss_mask: 0.4441, decode.d3.loss_dice: 0.6129, decode.d4.loss_cls: 0.1830, decode.d4.loss_mask: 0.4430, decode.d4.loss_dice: 0.6142, decode.d5.loss_cls: 0.1824, decode.d5.loss_mask: 0.4414, decode.d5.loss_dice: 0.6089, decode.d6.loss_cls: 0.1686, decode.d6.loss_mask: 0.4425, decode.d6.loss_dice: 0.6105, decode.d7.loss_cls: 0.1702, decode.d7.loss_mask: 0.4406, decode.d7.loss_dice: 0.6115, decode.d8.loss_cls: 0.1629, decode.d8.loss_mask: 0.4398, decode.d8.loss_dice: 0.6105, loss: 14.0913 +2022-06-05 05:20:58,363 - mmseg - INFO - Saving checkpoint at 27000 iterations +2022-06-05 05:21:02,026 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 05:21:02,027 - mmseg - INFO - Iter [27000/40000] lr: 2.479e-06, eta: 1:45:20, time: 0.518, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1751, decode.loss_mask: 0.4343, decode.loss_dice: 0.6012, decode.d0.loss_cls: 1.6111, decode.d0.loss_mask: 0.4790, decode.d0.loss_dice: 0.6938, decode.d1.loss_cls: 0.3304, decode.d1.loss_mask: 0.4535, decode.d1.loss_dice: 0.6294, decode.d2.loss_cls: 0.2395, decode.d2.loss_mask: 0.4454, decode.d2.loss_dice: 0.6099, decode.d3.loss_cls: 0.2087, decode.d3.loss_mask: 0.4391, decode.d3.loss_dice: 0.6017, decode.d4.loss_cls: 0.2005, decode.d4.loss_mask: 0.4378, decode.d4.loss_dice: 0.6003, decode.d5.loss_cls: 0.1877, decode.d5.loss_mask: 0.4352, decode.d5.loss_dice: 0.6004, decode.d6.loss_cls: 0.1813, decode.d6.loss_mask: 0.4345, decode.d6.loss_dice: 0.5990, decode.d7.loss_cls: 0.1780, decode.d7.loss_mask: 0.4331, decode.d7.loss_dice: 0.5989, decode.d8.loss_cls: 0.1749, decode.d8.loss_mask: 0.4341, decode.d8.loss_dice: 0.5996, loss: 14.0473 +2022-06-05 05:21:23,988 - mmseg - INFO - Iter [27050/40000] lr: 2.469e-06, eta: 1:44:55, time: 0.440, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1681, decode.loss_mask: 0.4420, decode.loss_dice: 0.5956, decode.d0.loss_cls: 1.5879, decode.d0.loss_mask: 0.4845, decode.d0.loss_dice: 0.6853, decode.d1.loss_cls: 0.3210, decode.d1.loss_mask: 0.4596, decode.d1.loss_dice: 0.6314, decode.d2.loss_cls: 0.2325, decode.d2.loss_mask: 0.4496, decode.d2.loss_dice: 0.6109, decode.d3.loss_cls: 0.2002, decode.d3.loss_mask: 0.4449, decode.d3.loss_dice: 0.6006, decode.d4.loss_cls: 0.1865, decode.d4.loss_mask: 0.4440, decode.d4.loss_dice: 0.6010, decode.d5.loss_cls: 0.1850, decode.d5.loss_mask: 0.4447, decode.d5.loss_dice: 0.5945, decode.d6.loss_cls: 0.1715, decode.d6.loss_mask: 0.4431, decode.d6.loss_dice: 0.5942, decode.d7.loss_cls: 0.1690, decode.d7.loss_mask: 0.4427, decode.d7.loss_dice: 0.5933, decode.d8.loss_cls: 0.1705, decode.d8.loss_mask: 0.4428, decode.d8.loss_dice: 0.5972, loss: 13.9941 +2022-06-05 05:21:46,111 - mmseg - INFO - Iter [27100/40000] lr: 2.459e-06, eta: 1:44:30, time: 0.442, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1599, decode.loss_mask: 0.4284, decode.loss_dice: 0.6045, decode.d0.loss_cls: 1.6014, decode.d0.loss_mask: 0.4713, decode.d0.loss_dice: 0.6994, decode.d1.loss_cls: 0.3162, decode.d1.loss_mask: 0.4450, decode.d1.loss_dice: 0.6389, decode.d2.loss_cls: 0.2348, decode.d2.loss_mask: 0.4332, decode.d2.loss_dice: 0.6146, decode.d3.loss_cls: 0.1957, decode.d3.loss_mask: 0.4300, decode.d3.loss_dice: 0.6073, decode.d4.loss_cls: 0.1897, decode.d4.loss_mask: 0.4290, decode.d4.loss_dice: 0.6068, decode.d5.loss_cls: 0.1765, decode.d5.loss_mask: 0.4290, decode.d5.loss_dice: 0.6023, decode.d6.loss_cls: 0.1666, decode.d6.loss_mask: 0.4303, decode.d6.loss_dice: 0.6005, decode.d7.loss_cls: 0.1594, decode.d7.loss_mask: 0.4286, decode.d7.loss_dice: 0.6031, decode.d8.loss_cls: 0.1627, decode.d8.loss_mask: 0.4271, decode.d8.loss_dice: 0.6038, loss: 13.8960 +2022-06-05 05:22:11,302 - mmseg - INFO - Iter [27150/40000] lr: 2.450e-06, eta: 1:44:06, time: 0.503, data_time: 0.058, memory: 31652, decode.loss_cls: 0.1899, decode.loss_mask: 0.4270, decode.loss_dice: 0.6233, decode.d0.loss_cls: 1.6588, decode.d0.loss_mask: 0.4744, decode.d0.loss_dice: 0.7345, decode.d1.loss_cls: 0.3475, decode.d1.loss_mask: 0.4465, decode.d1.loss_dice: 0.6625, decode.d2.loss_cls: 0.2560, decode.d2.loss_mask: 0.4333, decode.d2.loss_dice: 0.6388, decode.d3.loss_cls: 0.2209, decode.d3.loss_mask: 0.4274, decode.d3.loss_dice: 0.6251, decode.d4.loss_cls: 0.2075, decode.d4.loss_mask: 0.4294, decode.d4.loss_dice: 0.6321, decode.d5.loss_cls: 0.1960, decode.d5.loss_mask: 0.4306, decode.d5.loss_dice: 0.6301, decode.d6.loss_cls: 0.1922, decode.d6.loss_mask: 0.4281, decode.d6.loss_dice: 0.6243, decode.d7.loss_cls: 0.1853, decode.d7.loss_mask: 0.4273, decode.d7.loss_dice: 0.6273, decode.d8.loss_cls: 0.1899, decode.d8.loss_mask: 0.4257, decode.d8.loss_dice: 0.6232, loss: 14.4147 +2022-06-05 05:22:34,040 - mmseg - INFO - Iter [27200/40000] lr: 2.440e-06, eta: 1:43:41, time: 0.455, data_time: 0.009, memory: 31652, decode.loss_cls: 0.1808, decode.loss_mask: 0.4268, decode.loss_dice: 0.6127, decode.d0.loss_cls: 1.6532, decode.d0.loss_mask: 0.4617, decode.d0.loss_dice: 0.7119, decode.d1.loss_cls: 0.3456, decode.d1.loss_mask: 0.4437, decode.d1.loss_dice: 0.6466, decode.d2.loss_cls: 0.2439, decode.d2.loss_mask: 0.4336, decode.d2.loss_dice: 0.6303, decode.d3.loss_cls: 0.2069, decode.d3.loss_mask: 0.4294, decode.d3.loss_dice: 0.6195, decode.d4.loss_cls: 0.1952, decode.d4.loss_mask: 0.4293, decode.d4.loss_dice: 0.6175, decode.d5.loss_cls: 0.1886, decode.d5.loss_mask: 0.4293, decode.d5.loss_dice: 0.6138, decode.d6.loss_cls: 0.1861, decode.d6.loss_mask: 0.4261, decode.d6.loss_dice: 0.6139, decode.d7.loss_cls: 0.1827, decode.d7.loss_mask: 0.4273, decode.d7.loss_dice: 0.6125, decode.d8.loss_cls: 0.1786, decode.d8.loss_mask: 0.4277, decode.d8.loss_dice: 0.6138, loss: 14.1890 +2022-06-05 05:22:55,895 - mmseg - INFO - Iter [27250/40000] lr: 2.431e-06, eta: 1:43:15, time: 0.437, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1552, decode.loss_mask: 0.4338, decode.loss_dice: 0.6055, decode.d0.loss_cls: 1.5997, decode.d0.loss_mask: 0.4830, decode.d0.loss_dice: 0.7070, decode.d1.loss_cls: 0.3221, decode.d1.loss_mask: 0.4542, decode.d1.loss_dice: 0.6443, decode.d2.loss_cls: 0.2292, decode.d2.loss_mask: 0.4414, decode.d2.loss_dice: 0.6125, decode.d3.loss_cls: 0.1918, decode.d3.loss_mask: 0.4380, decode.d3.loss_dice: 0.6070, decode.d4.loss_cls: 0.1786, decode.d4.loss_mask: 0.4374, decode.d4.loss_dice: 0.6054, decode.d5.loss_cls: 0.1728, decode.d5.loss_mask: 0.4376, decode.d5.loss_dice: 0.6082, decode.d6.loss_cls: 0.1698, decode.d6.loss_mask: 0.4363, decode.d6.loss_dice: 0.6050, decode.d7.loss_cls: 0.1664, decode.d7.loss_mask: 0.4333, decode.d7.loss_dice: 0.6044, decode.d8.loss_cls: 0.1633, decode.d8.loss_mask: 0.4327, decode.d8.loss_dice: 0.6041, loss: 13.9800 +2022-06-05 05:23:17,794 - mmseg - INFO - Iter [27300/40000] lr: 2.421e-06, eta: 1:42:50, time: 0.438, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1775, decode.loss_mask: 0.4337, decode.loss_dice: 0.6214, decode.d0.loss_cls: 1.6097, decode.d0.loss_mask: 0.4793, decode.d0.loss_dice: 0.7203, decode.d1.loss_cls: 0.3154, decode.d1.loss_mask: 0.4514, decode.d1.loss_dice: 0.6523, decode.d2.loss_cls: 0.2278, decode.d2.loss_mask: 0.4407, decode.d2.loss_dice: 0.6318, decode.d3.loss_cls: 0.2028, decode.d3.loss_mask: 0.4356, decode.d3.loss_dice: 0.6193, decode.d4.loss_cls: 0.1954, decode.d4.loss_mask: 0.4359, decode.d4.loss_dice: 0.6224, decode.d5.loss_cls: 0.1889, decode.d5.loss_mask: 0.4347, decode.d5.loss_dice: 0.6214, decode.d6.loss_cls: 0.1842, decode.d6.loss_mask: 0.4334, decode.d6.loss_dice: 0.6196, decode.d7.loss_cls: 0.1787, decode.d7.loss_mask: 0.4335, decode.d7.loss_dice: 0.6195, decode.d8.loss_cls: 0.1703, decode.d8.loss_mask: 0.4342, decode.d8.loss_dice: 0.6221, loss: 14.2132 +2022-06-05 05:23:39,805 - mmseg - INFO - Iter [27350/40000] lr: 2.412e-06, eta: 1:42:25, time: 0.440, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1594, decode.loss_mask: 0.4359, decode.loss_dice: 0.5837, decode.d0.loss_cls: 1.5487, decode.d0.loss_mask: 0.4798, decode.d0.loss_dice: 0.6806, decode.d1.loss_cls: 0.3212, decode.d1.loss_mask: 0.4533, decode.d1.loss_dice: 0.6212, decode.d2.loss_cls: 0.2205, decode.d2.loss_mask: 0.4421, decode.d2.loss_dice: 0.5962, decode.d3.loss_cls: 0.1846, decode.d3.loss_mask: 0.4397, decode.d3.loss_dice: 0.5877, decode.d4.loss_cls: 0.1719, decode.d4.loss_mask: 0.4403, decode.d4.loss_dice: 0.5869, decode.d5.loss_cls: 0.1686, decode.d5.loss_mask: 0.4388, decode.d5.loss_dice: 0.5852, decode.d6.loss_cls: 0.1609, decode.d6.loss_mask: 0.4370, decode.d6.loss_dice: 0.5857, decode.d7.loss_cls: 0.1607, decode.d7.loss_mask: 0.4345, decode.d7.loss_dice: 0.5864, decode.d8.loss_cls: 0.1611, decode.d8.loss_mask: 0.4349, decode.d8.loss_dice: 0.5840, loss: 13.6917 +2022-06-05 05:24:02,064 - mmseg - INFO - Iter [27400/40000] lr: 2.402e-06, eta: 1:41:59, time: 0.445, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1796, decode.loss_mask: 0.4247, decode.loss_dice: 0.6015, decode.d0.loss_cls: 1.6084, decode.d0.loss_mask: 0.4693, decode.d0.loss_dice: 0.6881, decode.d1.loss_cls: 0.3424, decode.d1.loss_mask: 0.4402, decode.d1.loss_dice: 0.6398, decode.d2.loss_cls: 0.2426, decode.d2.loss_mask: 0.4303, decode.d2.loss_dice: 0.6175, decode.d3.loss_cls: 0.2128, decode.d3.loss_mask: 0.4255, decode.d3.loss_dice: 0.6091, decode.d4.loss_cls: 0.1964, decode.d4.loss_mask: 0.4264, decode.d4.loss_dice: 0.6069, decode.d5.loss_cls: 0.1981, decode.d5.loss_mask: 0.4251, decode.d5.loss_dice: 0.6021, decode.d6.loss_cls: 0.1873, decode.d6.loss_mask: 0.4262, decode.d6.loss_dice: 0.6037, decode.d7.loss_cls: 0.1850, decode.d7.loss_mask: 0.4255, decode.d7.loss_dice: 0.6040, decode.d8.loss_cls: 0.1774, decode.d8.loss_mask: 0.4253, decode.d8.loss_dice: 0.5984, loss: 14.0194 +2022-06-05 05:24:23,962 - mmseg - INFO - Iter [27450/40000] lr: 2.393e-06, eta: 1:41:34, time: 0.438, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1573, decode.loss_mask: 0.4219, decode.loss_dice: 0.5811, decode.d0.loss_cls: 1.6063, decode.d0.loss_mask: 0.4649, decode.d0.loss_dice: 0.6772, decode.d1.loss_cls: 0.3137, decode.d1.loss_mask: 0.4368, decode.d1.loss_dice: 0.6192, decode.d2.loss_cls: 0.2209, decode.d2.loss_mask: 0.4261, decode.d2.loss_dice: 0.5913, decode.d3.loss_cls: 0.1872, decode.d3.loss_mask: 0.4237, decode.d3.loss_dice: 0.5894, decode.d4.loss_cls: 0.1785, decode.d4.loss_mask: 0.4233, decode.d4.loss_dice: 0.5822, decode.d5.loss_cls: 0.1716, decode.d5.loss_mask: 0.4238, decode.d5.loss_dice: 0.5808, decode.d6.loss_cls: 0.1695, decode.d6.loss_mask: 0.4234, decode.d6.loss_dice: 0.5794, decode.d7.loss_cls: 0.1645, decode.d7.loss_mask: 0.4219, decode.d7.loss_dice: 0.5741, decode.d8.loss_cls: 0.1612, decode.d8.loss_mask: 0.4222, decode.d8.loss_dice: 0.5800, loss: 13.5731 +2022-06-05 05:24:48,697 - mmseg - INFO - Iter [27500/40000] lr: 2.383e-06, eta: 1:41:10, time: 0.494, data_time: 0.054, memory: 31652, decode.loss_cls: 0.1687, decode.loss_mask: 0.4215, decode.loss_dice: 0.5904, decode.d0.loss_cls: 1.5733, decode.d0.loss_mask: 0.4648, decode.d0.loss_dice: 0.6865, decode.d1.loss_cls: 0.3327, decode.d1.loss_mask: 0.4403, decode.d1.loss_dice: 0.6265, decode.d2.loss_cls: 0.2402, decode.d2.loss_mask: 0.4292, decode.d2.loss_dice: 0.6070, decode.d3.loss_cls: 0.2042, decode.d3.loss_mask: 0.4242, decode.d3.loss_dice: 0.5974, decode.d4.loss_cls: 0.1934, decode.d4.loss_mask: 0.4241, decode.d4.loss_dice: 0.5954, decode.d5.loss_cls: 0.1792, decode.d5.loss_mask: 0.4244, decode.d5.loss_dice: 0.5921, decode.d6.loss_cls: 0.1724, decode.d6.loss_mask: 0.4255, decode.d6.loss_dice: 0.5935, decode.d7.loss_cls: 0.1671, decode.d7.loss_mask: 0.4236, decode.d7.loss_dice: 0.5924, decode.d8.loss_cls: 0.1731, decode.d8.loss_mask: 0.4231, decode.d8.loss_dice: 0.5894, loss: 13.7758 +2022-06-05 05:25:11,053 - mmseg - INFO - Iter [27550/40000] lr: 2.374e-06, eta: 1:40:45, time: 0.448, data_time: 0.009, memory: 31652, decode.loss_cls: 0.1784, decode.loss_mask: 0.4252, decode.loss_dice: 0.5932, decode.d0.loss_cls: 1.5921, decode.d0.loss_mask: 0.4622, decode.d0.loss_dice: 0.6910, decode.d1.loss_cls: 0.3355, decode.d1.loss_mask: 0.4399, decode.d1.loss_dice: 0.6384, decode.d2.loss_cls: 0.2408, decode.d2.loss_mask: 0.4302, decode.d2.loss_dice: 0.6147, decode.d3.loss_cls: 0.2008, decode.d3.loss_mask: 0.4281, decode.d3.loss_dice: 0.6063, decode.d4.loss_cls: 0.1911, decode.d4.loss_mask: 0.4284, decode.d4.loss_dice: 0.6046, decode.d5.loss_cls: 0.1842, decode.d5.loss_mask: 0.4274, decode.d5.loss_dice: 0.6046, decode.d6.loss_cls: 0.1784, decode.d6.loss_mask: 0.4261, decode.d6.loss_dice: 0.6007, decode.d7.loss_cls: 0.1738, decode.d7.loss_mask: 0.4259, decode.d7.loss_dice: 0.6008, decode.d8.loss_cls: 0.1700, decode.d8.loss_mask: 0.4260, decode.d8.loss_dice: 0.6000, loss: 13.9190 +2022-06-05 05:25:33,039 - mmseg - INFO - Iter [27600/40000] lr: 2.364e-06, eta: 1:40:19, time: 0.440, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1556, decode.loss_mask: 0.4231, decode.loss_dice: 0.5747, decode.d0.loss_cls: 1.5520, decode.d0.loss_mask: 0.4644, decode.d0.loss_dice: 0.6663, decode.d1.loss_cls: 0.2996, decode.d1.loss_mask: 0.4412, decode.d1.loss_dice: 0.6111, decode.d2.loss_cls: 0.2236, decode.d2.loss_mask: 0.4305, decode.d2.loss_dice: 0.5901, decode.d3.loss_cls: 0.1870, decode.d3.loss_mask: 0.4272, decode.d3.loss_dice: 0.5798, decode.d4.loss_cls: 0.1786, decode.d4.loss_mask: 0.4256, decode.d4.loss_dice: 0.5788, decode.d5.loss_cls: 0.1703, decode.d5.loss_mask: 0.4249, decode.d5.loss_dice: 0.5798, decode.d6.loss_cls: 0.1646, decode.d6.loss_mask: 0.4234, decode.d6.loss_dice: 0.5773, decode.d7.loss_cls: 0.1607, decode.d7.loss_mask: 0.4239, decode.d7.loss_dice: 0.5731, decode.d8.loss_cls: 0.1641, decode.d8.loss_mask: 0.4245, decode.d8.loss_dice: 0.5722, loss: 13.4679 +2022-06-05 05:25:55,233 - mmseg - INFO - Iter [27650/40000] lr: 2.355e-06, eta: 1:39:54, time: 0.443, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1695, decode.loss_mask: 0.4368, decode.loss_dice: 0.6206, decode.d0.loss_cls: 1.5973, decode.d0.loss_mask: 0.4824, decode.d0.loss_dice: 0.7262, decode.d1.loss_cls: 0.3204, decode.d1.loss_mask: 0.4527, decode.d1.loss_dice: 0.6589, decode.d2.loss_cls: 0.2299, decode.d2.loss_mask: 0.4409, decode.d2.loss_dice: 0.6282, decode.d3.loss_cls: 0.1905, decode.d3.loss_mask: 0.4361, decode.d3.loss_dice: 0.6249, decode.d4.loss_cls: 0.1846, decode.d4.loss_mask: 0.4372, decode.d4.loss_dice: 0.6254, decode.d5.loss_cls: 0.1783, decode.d5.loss_mask: 0.4379, decode.d5.loss_dice: 0.6262, decode.d6.loss_cls: 0.1737, decode.d6.loss_mask: 0.4365, decode.d6.loss_dice: 0.6180, decode.d7.loss_cls: 0.1741, decode.d7.loss_mask: 0.4363, decode.d7.loss_dice: 0.6207, decode.d8.loss_cls: 0.1695, decode.d8.loss_mask: 0.4358, decode.d8.loss_dice: 0.6202, loss: 14.1896 +2022-06-05 05:26:17,069 - mmseg - INFO - Iter [27700/40000] lr: 2.345e-06, eta: 1:39:29, time: 0.437, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1635, decode.loss_mask: 0.4214, decode.loss_dice: 0.6078, decode.d0.loss_cls: 1.6475, decode.d0.loss_mask: 0.4619, decode.d0.loss_dice: 0.7043, decode.d1.loss_cls: 0.3321, decode.d1.loss_mask: 0.4374, decode.d1.loss_dice: 0.6382, decode.d2.loss_cls: 0.2384, decode.d2.loss_mask: 0.4278, decode.d2.loss_dice: 0.6138, decode.d3.loss_cls: 0.1905, decode.d3.loss_mask: 0.4251, decode.d3.loss_dice: 0.6092, decode.d4.loss_cls: 0.1756, decode.d4.loss_mask: 0.4217, decode.d4.loss_dice: 0.6082, decode.d5.loss_cls: 0.1727, decode.d5.loss_mask: 0.4219, decode.d5.loss_dice: 0.6081, decode.d6.loss_cls: 0.1717, decode.d6.loss_mask: 0.4218, decode.d6.loss_dice: 0.6050, decode.d7.loss_cls: 0.1640, decode.d7.loss_mask: 0.4224, decode.d7.loss_dice: 0.6066, decode.d8.loss_cls: 0.1636, decode.d8.loss_mask: 0.4219, decode.d8.loss_dice: 0.6097, loss: 13.9138 +2022-06-05 05:26:38,758 - mmseg - INFO - Iter [27750/40000] lr: 2.336e-06, eta: 1:39:03, time: 0.434, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1610, decode.loss_mask: 0.4295, decode.loss_dice: 0.6098, decode.d0.loss_cls: 1.6146, decode.d0.loss_mask: 0.4727, decode.d0.loss_dice: 0.7036, decode.d1.loss_cls: 0.3242, decode.d1.loss_mask: 0.4464, decode.d1.loss_dice: 0.6417, decode.d2.loss_cls: 0.2331, decode.d2.loss_mask: 0.4385, decode.d2.loss_dice: 0.6250, decode.d3.loss_cls: 0.1966, decode.d3.loss_mask: 0.4332, decode.d3.loss_dice: 0.6143, decode.d4.loss_cls: 0.1893, decode.d4.loss_mask: 0.4318, decode.d4.loss_dice: 0.6117, decode.d5.loss_cls: 0.1801, decode.d5.loss_mask: 0.4308, decode.d5.loss_dice: 0.6130, decode.d6.loss_cls: 0.1672, decode.d6.loss_mask: 0.4311, decode.d6.loss_dice: 0.6086, decode.d7.loss_cls: 0.1658, decode.d7.loss_mask: 0.4290, decode.d7.loss_dice: 0.6116, decode.d8.loss_cls: 0.1674, decode.d8.loss_mask: 0.4286, decode.d8.loss_dice: 0.6112, loss: 14.0216 +2022-06-05 05:27:02,624 - mmseg - INFO - Iter [27800/40000] lr: 2.326e-06, eta: 1:38:39, time: 0.477, data_time: 0.056, memory: 31652, decode.loss_cls: 0.1685, decode.loss_mask: 0.4051, decode.loss_dice: 0.5822, decode.d0.loss_cls: 1.6033, decode.d0.loss_mask: 0.4443, decode.d0.loss_dice: 0.6858, decode.d1.loss_cls: 0.3312, decode.d1.loss_mask: 0.4241, decode.d1.loss_dice: 0.6305, decode.d2.loss_cls: 0.2452, decode.d2.loss_mask: 0.4102, decode.d2.loss_dice: 0.5998, decode.d3.loss_cls: 0.1993, decode.d3.loss_mask: 0.4083, decode.d3.loss_dice: 0.5865, decode.d4.loss_cls: 0.1848, decode.d4.loss_mask: 0.4080, decode.d4.loss_dice: 0.5899, decode.d5.loss_cls: 0.1789, decode.d5.loss_mask: 0.4086, decode.d5.loss_dice: 0.5885, decode.d6.loss_cls: 0.1748, decode.d6.loss_mask: 0.4046, decode.d6.loss_dice: 0.5843, decode.d7.loss_cls: 0.1684, decode.d7.loss_mask: 0.4049, decode.d7.loss_dice: 0.5835, decode.d8.loss_cls: 0.1661, decode.d8.loss_mask: 0.4039, decode.d8.loss_dice: 0.5855, loss: 13.5593 +2022-06-05 05:27:24,374 - mmseg - INFO - Iter [27850/40000] lr: 2.316e-06, eta: 1:38:14, time: 0.435, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1640, decode.loss_mask: 0.4290, decode.loss_dice: 0.5965, decode.d0.loss_cls: 1.5849, decode.d0.loss_mask: 0.4683, decode.d0.loss_dice: 0.6911, decode.d1.loss_cls: 0.3242, decode.d1.loss_mask: 0.4398, decode.d1.loss_dice: 0.6262, decode.d2.loss_cls: 0.2339, decode.d2.loss_mask: 0.4349, decode.d2.loss_dice: 0.6084, decode.d3.loss_cls: 0.1863, decode.d3.loss_mask: 0.4315, decode.d3.loss_dice: 0.6040, decode.d4.loss_cls: 0.1780, decode.d4.loss_mask: 0.4282, decode.d4.loss_dice: 0.6046, decode.d5.loss_cls: 0.1677, decode.d5.loss_mask: 0.4282, decode.d5.loss_dice: 0.6004, decode.d6.loss_cls: 0.1635, decode.d6.loss_mask: 0.4284, decode.d6.loss_dice: 0.5965, decode.d7.loss_cls: 0.1656, decode.d7.loss_mask: 0.4286, decode.d7.loss_dice: 0.5978, decode.d8.loss_cls: 0.1626, decode.d8.loss_mask: 0.4289, decode.d8.loss_dice: 0.6002, loss: 13.8019 +2022-06-05 05:27:46,121 - mmseg - INFO - Iter [27900/40000] lr: 2.307e-06, eta: 1:37:48, time: 0.435, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1813, decode.loss_mask: 0.4334, decode.loss_dice: 0.6090, decode.d0.loss_cls: 1.6185, decode.d0.loss_mask: 0.4765, decode.d0.loss_dice: 0.7063, decode.d1.loss_cls: 0.3469, decode.d1.loss_mask: 0.4489, decode.d1.loss_dice: 0.6425, decode.d2.loss_cls: 0.2469, decode.d2.loss_mask: 0.4385, decode.d2.loss_dice: 0.6214, decode.d3.loss_cls: 0.2087, decode.d3.loss_mask: 0.4368, decode.d3.loss_dice: 0.6140, decode.d4.loss_cls: 0.2082, decode.d4.loss_mask: 0.4364, decode.d4.loss_dice: 0.6118, decode.d5.loss_cls: 0.1922, decode.d5.loss_mask: 0.4353, decode.d5.loss_dice: 0.6071, decode.d6.loss_cls: 0.1811, decode.d6.loss_mask: 0.4359, decode.d6.loss_dice: 0.6080, decode.d7.loss_cls: 0.1772, decode.d7.loss_mask: 0.4338, decode.d7.loss_dice: 0.6082, decode.d8.loss_cls: 0.1773, decode.d8.loss_mask: 0.4348, decode.d8.loss_dice: 0.6028, loss: 14.1795 +2022-06-05 05:28:07,707 - mmseg - INFO - Iter [27950/40000] lr: 2.297e-06, eta: 1:37:23, time: 0.432, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1581, decode.loss_mask: 0.4272, decode.loss_dice: 0.5749, decode.d0.loss_cls: 1.5760, decode.d0.loss_mask: 0.4691, decode.d0.loss_dice: 0.6741, decode.d1.loss_cls: 0.2978, decode.d1.loss_mask: 0.4440, decode.d1.loss_dice: 0.6132, decode.d2.loss_cls: 0.2169, decode.d2.loss_mask: 0.4361, decode.d2.loss_dice: 0.5924, decode.d3.loss_cls: 0.1807, decode.d3.loss_mask: 0.4314, decode.d3.loss_dice: 0.5813, decode.d4.loss_cls: 0.1751, decode.d4.loss_mask: 0.4297, decode.d4.loss_dice: 0.5802, decode.d5.loss_cls: 0.1647, decode.d5.loss_mask: 0.4275, decode.d5.loss_dice: 0.5766, decode.d6.loss_cls: 0.1601, decode.d6.loss_mask: 0.4276, decode.d6.loss_dice: 0.5784, decode.d7.loss_cls: 0.1610, decode.d7.loss_mask: 0.4280, decode.d7.loss_dice: 0.5768, decode.d8.loss_cls: 0.1518, decode.d8.loss_mask: 0.4274, decode.d8.loss_dice: 0.5759, loss: 13.5141 +2022-06-05 05:28:29,300 - mmseg - INFO - Saving checkpoint at 28000 iterations +2022-06-05 05:28:31,786 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 05:28:31,786 - mmseg - INFO - Iter [28000/40000] lr: 2.288e-06, eta: 1:36:59, time: 0.482, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1680, decode.loss_mask: 0.4339, decode.loss_dice: 0.6130, decode.d0.loss_cls: 1.5976, decode.d0.loss_mask: 0.4776, decode.d0.loss_dice: 0.7069, decode.d1.loss_cls: 0.3074, decode.d1.loss_mask: 0.4535, decode.d1.loss_dice: 0.6521, decode.d2.loss_cls: 0.2363, decode.d2.loss_mask: 0.4418, decode.d2.loss_dice: 0.6256, decode.d3.loss_cls: 0.1926, decode.d3.loss_mask: 0.4383, decode.d3.loss_dice: 0.6160, decode.d4.loss_cls: 0.1864, decode.d4.loss_mask: 0.4357, decode.d4.loss_dice: 0.6144, decode.d5.loss_cls: 0.1842, decode.d5.loss_mask: 0.4356, decode.d5.loss_dice: 0.6129, decode.d6.loss_cls: 0.1688, decode.d6.loss_mask: 0.4361, decode.d6.loss_dice: 0.6143, decode.d7.loss_cls: 0.1770, decode.d7.loss_mask: 0.4354, decode.d7.loss_dice: 0.6130, decode.d8.loss_cls: 0.1711, decode.d8.loss_mask: 0.4327, decode.d8.loss_dice: 0.6074, loss: 14.0857 +2022-06-05 05:31:11,207 - mmseg - INFO - per class results: +2022-06-05 05:31:11,211 - mmseg - INFO - ++-------------+-------+-------+ +| Class | IoU | Acc | ++-------------+-------+-------+ +| aeroplane | 90.48 | 95.1 | +| bag | 40.61 | 56.49 | +| bed | 35.45 | 47.45 | +| bedclothes | 42.82 | 59.78 | +| bench | 22.85 | 30.34 | +| bicycle | 84.3 | 93.0 | +| bird | 93.98 | 96.93 | +| boat | 84.41 | 91.07 | +| book | 50.55 | 63.06 | +| bottle | 86.99 | 95.76 | +| building | 65.01 | 77.96 | +| bus | 93.78 | 97.01 | +| cabinet | 43.6 | 62.83 | +| car | 91.06 | 95.06 | +| cat | 93.84 | 98.06 | +| ceiling | 60.87 | 76.19 | +| chair | 58.86 | 80.49 | +| cloth | 26.0 | 41.85 | +| computer | 37.37 | 54.92 | +| cow | 94.14 | 97.08 | +| cup | 43.59 | 59.51 | +| curtain | 55.32 | 67.88 | +| dog | 91.54 | 96.94 | +| door | 32.99 | 52.37 | +| fence | 46.21 | 61.85 | +| floor | 72.71 | 87.1 | +| flower | 38.81 | 57.3 | +| food | 35.8 | 47.97 | +| grass | 82.25 | 92.58 | +| ground | 55.2 | 67.63 | +| horse | 94.18 | 97.56 | +| keyboard | 77.0 | 82.2 | +| light | 57.45 | 76.46 | +| motorbike | 89.67 | 94.79 | +| mountain | 54.62 | 72.72 | +| mouse | 75.16 | 78.33 | +| person | 90.33 | 96.14 | +| plate | 26.57 | 35.44 | +| platform | 56.0 | 76.73 | +| pottedplant | 80.18 | 88.96 | +| road | 52.79 | 69.76 | +| rock | 47.92 | 56.72 | +| sheep | 94.02 | 97.04 | +| shelves | 35.05 | 56.53 | +| sidewalk | 26.62 | 53.15 | +| sign | 47.26 | 58.91 | +| sky | 94.77 | 97.38 | +| snow | 73.89 | 85.38 | +| sofa | 57.57 | 64.3 | +| table | 68.55 | 80.17 | +| track | 69.02 | 79.23 | +| train | 92.94 | 96.04 | +| tree | 81.21 | 89.9 | +| truck | 38.19 | 48.11 | +| tvmonitor | 87.48 | 93.27 | +| wall | 70.07 | 82.53 | +| water | 91.49 | 95.99 | +| window | 44.65 | 57.98 | +| wood | 29.36 | 40.94 | ++-------------+-------+-------+ +2022-06-05 05:31:11,211 - mmseg - INFO - Summary: +2022-06-05 05:31:11,211 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 85.23 | 63.65 | 74.65 | ++-------+-------+-------+ +2022-06-05 05:31:11,214 - mmseg - INFO - The previous best checkpoint /mnt/lustre/chenzhe.vendor/workspace/ViT-Adapter-Release/segmentation/work_dirs/mask2former_beit_adapter_base_480_40k_pascal_context_59_ss/best_mIoU_iter_24000.pth was removed +2022-06-05 05:31:14,068 - mmseg - INFO - Now best checkpoint is saved as best_mIoU_iter_28000.pth. +2022-06-05 05:31:14,069 - mmseg - INFO - Best mIoU is 0.6365 at 28000 iter. +2022-06-05 05:31:14,098 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 05:31:14,098 - mmseg - INFO - Iter(val) [638] aAcc: 0.8523, mIoU: 0.6365, mAcc: 0.7465, IoU.aeroplane: 0.9048, IoU.bag: 0.4061, IoU.bed: 0.3545, IoU.bedclothes: 0.4282, IoU.bench: 0.2285, IoU.bicycle: 0.8430, IoU.bird: 0.9398, IoU.boat: 0.8441, IoU.book: 0.5055, IoU.bottle: 0.8699, IoU.building: 0.6501, IoU.bus: 0.9378, IoU.cabinet: 0.4360, IoU.car: 0.9106, IoU.cat: 0.9384, IoU.ceiling: 0.6087, IoU.chair: 0.5886, IoU.cloth: 0.2600, IoU.computer: 0.3737, IoU.cow: 0.9414, IoU.cup: 0.4359, IoU.curtain: 0.5532, IoU.dog: 0.9154, IoU.door: 0.3299, IoU.fence: 0.4621, IoU.floor: 0.7271, IoU.flower: 0.3881, IoU.food: 0.3580, IoU.grass: 0.8225, IoU.ground: 0.5520, IoU.horse: 0.9418, IoU.keyboard: 0.7700, IoU.light: 0.5745, IoU.motorbike: 0.8967, IoU.mountain: 0.5462, IoU.mouse: 0.7516, IoU.person: 0.9033, IoU.plate: 0.2657, IoU.platform: 0.5600, IoU.pottedplant: 0.8018, IoU.road: 0.5279, IoU.rock: 0.4792, IoU.sheep: 0.9402, IoU.shelves: 0.3505, IoU.sidewalk: 0.2662, IoU.sign: 0.4726, IoU.sky: 0.9477, IoU.snow: 0.7389, IoU.sofa: 0.5757, IoU.table: 0.6855, IoU.track: 0.6902, IoU.train: 0.9294, IoU.tree: 0.8121, IoU.truck: 0.3819, IoU.tvmonitor: 0.8748, IoU.wall: 0.7007, IoU.water: 0.9149, IoU.window: 0.4465, IoU.wood: 0.2936, Acc.aeroplane: 0.9510, Acc.bag: 0.5649, Acc.bed: 0.4745, Acc.bedclothes: 0.5978, Acc.bench: 0.3034, Acc.bicycle: 0.9300, Acc.bird: 0.9693, Acc.boat: 0.9107, Acc.book: 0.6306, Acc.bottle: 0.9576, Acc.building: 0.7796, Acc.bus: 0.9701, Acc.cabinet: 0.6283, Acc.car: 0.9506, Acc.cat: 0.9806, Acc.ceiling: 0.7619, Acc.chair: 0.8049, Acc.cloth: 0.4185, Acc.computer: 0.5492, Acc.cow: 0.9708, Acc.cup: 0.5951, Acc.curtain: 0.6788, Acc.dog: 0.9694, Acc.door: 0.5237, Acc.fence: 0.6185, Acc.floor: 0.8710, Acc.flower: 0.5730, Acc.food: 0.4797, Acc.grass: 0.9258, Acc.ground: 0.6763, Acc.horse: 0.9756, Acc.keyboard: 0.8220, Acc.light: 0.7646, Acc.motorbike: 0.9479, Acc.mountain: 0.7272, Acc.mouse: 0.7833, Acc.person: 0.9614, Acc.plate: 0.3544, Acc.platform: 0.7673, Acc.pottedplant: 0.8896, Acc.road: 0.6976, Acc.rock: 0.5672, Acc.sheep: 0.9704, Acc.shelves: 0.5653, Acc.sidewalk: 0.5315, Acc.sign: 0.5891, Acc.sky: 0.9738, Acc.snow: 0.8538, Acc.sofa: 0.6430, Acc.table: 0.8017, Acc.track: 0.7923, Acc.train: 0.9604, Acc.tree: 0.8990, Acc.truck: 0.4811, Acc.tvmonitor: 0.9327, Acc.wall: 0.8253, Acc.water: 0.9599, Acc.window: 0.5798, Acc.wood: 0.4094 +2022-06-05 05:31:36,499 - mmseg - INFO - Iter [28050/40000] lr: 2.278e-06, eta: 1:37:43, time: 3.694, data_time: 3.254, memory: 31652, decode.loss_cls: 0.1600, decode.loss_mask: 0.4243, decode.loss_dice: 0.6028, decode.d0.loss_cls: 1.5986, decode.d0.loss_mask: 0.4723, decode.d0.loss_dice: 0.7048, decode.d1.loss_cls: 0.3322, decode.d1.loss_mask: 0.4448, decode.d1.loss_dice: 0.6419, decode.d2.loss_cls: 0.2394, decode.d2.loss_mask: 0.4323, decode.d2.loss_dice: 0.6142, decode.d3.loss_cls: 0.1988, decode.d3.loss_mask: 0.4271, decode.d3.loss_dice: 0.6025, decode.d4.loss_cls: 0.1871, decode.d4.loss_mask: 0.4262, decode.d4.loss_dice: 0.6031, decode.d5.loss_cls: 0.1707, decode.d5.loss_mask: 0.4254, decode.d5.loss_dice: 0.6031, decode.d6.loss_cls: 0.1703, decode.d6.loss_mask: 0.4239, decode.d6.loss_dice: 0.5998, decode.d7.loss_cls: 0.1641, decode.d7.loss_mask: 0.4246, decode.d7.loss_dice: 0.5994, decode.d8.loss_cls: 0.1634, decode.d8.loss_mask: 0.4249, decode.d8.loss_dice: 0.5972, loss: 13.8789 +2022-06-05 05:32:01,128 - mmseg - INFO - Iter [28100/40000] lr: 2.269e-06, eta: 1:37:18, time: 0.492, data_time: 0.056, memory: 31652, decode.loss_cls: 0.1630, decode.loss_mask: 0.4213, decode.loss_dice: 0.6119, decode.d0.loss_cls: 1.6318, decode.d0.loss_mask: 0.4613, decode.d0.loss_dice: 0.7098, decode.d1.loss_cls: 0.3238, decode.d1.loss_mask: 0.4374, decode.d1.loss_dice: 0.6520, decode.d2.loss_cls: 0.2360, decode.d2.loss_mask: 0.4297, decode.d2.loss_dice: 0.6256, decode.d3.loss_cls: 0.1929, decode.d3.loss_mask: 0.4260, decode.d3.loss_dice: 0.6209, decode.d4.loss_cls: 0.1852, decode.d4.loss_mask: 0.4229, decode.d4.loss_dice: 0.6170, decode.d5.loss_cls: 0.1797, decode.d5.loss_mask: 0.4211, decode.d5.loss_dice: 0.6122, decode.d6.loss_cls: 0.1687, decode.d6.loss_mask: 0.4214, decode.d6.loss_dice: 0.6114, decode.d7.loss_cls: 0.1696, decode.d7.loss_mask: 0.4190, decode.d7.loss_dice: 0.6115, decode.d8.loss_cls: 0.1644, decode.d8.loss_mask: 0.4200, decode.d8.loss_dice: 0.6107, loss: 13.9783 +2022-06-05 05:32:23,045 - mmseg - INFO - Iter [28150/40000] lr: 2.259e-06, eta: 1:36:53, time: 0.438, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1717, decode.loss_mask: 0.4505, decode.loss_dice: 0.6100, decode.d0.loss_cls: 1.5818, decode.d0.loss_mask: 0.4906, decode.d0.loss_dice: 0.7031, decode.d1.loss_cls: 0.3254, decode.d1.loss_mask: 0.4644, decode.d1.loss_dice: 0.6458, decode.d2.loss_cls: 0.2395, decode.d2.loss_mask: 0.4557, decode.d2.loss_dice: 0.6195, decode.d3.loss_cls: 0.2077, decode.d3.loss_mask: 0.4510, decode.d3.loss_dice: 0.6123, decode.d4.loss_cls: 0.1955, decode.d4.loss_mask: 0.4521, decode.d4.loss_dice: 0.6157, decode.d5.loss_cls: 0.1804, decode.d5.loss_mask: 0.4524, decode.d5.loss_dice: 0.6158, decode.d6.loss_cls: 0.1725, decode.d6.loss_mask: 0.4508, decode.d6.loss_dice: 0.6117, decode.d7.loss_cls: 0.1690, decode.d7.loss_mask: 0.4496, decode.d7.loss_dice: 0.6132, decode.d8.loss_cls: 0.1718, decode.d8.loss_mask: 0.4505, decode.d8.loss_dice: 0.6135, loss: 14.2434 +2022-06-05 05:32:44,955 - mmseg - INFO - Iter [28200/40000] lr: 2.250e-06, eta: 1:36:27, time: 0.438, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1528, decode.loss_mask: 0.4254, decode.loss_dice: 0.6005, decode.d0.loss_cls: 1.5762, decode.d0.loss_mask: 0.4658, decode.d0.loss_dice: 0.6914, decode.d1.loss_cls: 0.2970, decode.d1.loss_mask: 0.4424, decode.d1.loss_dice: 0.6353, decode.d2.loss_cls: 0.2091, decode.d2.loss_mask: 0.4295, decode.d2.loss_dice: 0.6129, decode.d3.loss_cls: 0.1831, decode.d3.loss_mask: 0.4268, decode.d3.loss_dice: 0.6036, decode.d4.loss_cls: 0.1648, decode.d4.loss_mask: 0.4288, decode.d4.loss_dice: 0.6026, decode.d5.loss_cls: 0.1562, decode.d5.loss_mask: 0.4278, decode.d5.loss_dice: 0.6008, decode.d6.loss_cls: 0.1601, decode.d6.loss_mask: 0.4269, decode.d6.loss_dice: 0.5963, decode.d7.loss_cls: 0.1509, decode.d7.loss_mask: 0.4266, decode.d7.loss_dice: 0.5993, decode.d8.loss_cls: 0.1539, decode.d8.loss_mask: 0.4263, decode.d8.loss_dice: 0.5982, loss: 13.6712 +2022-06-05 05:33:06,898 - mmseg - INFO - Iter [28250/40000] lr: 2.240e-06, eta: 1:36:01, time: 0.438, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1565, decode.loss_mask: 0.4205, decode.loss_dice: 0.5732, decode.d0.loss_cls: 1.6059, decode.d0.loss_mask: 0.4647, decode.d0.loss_dice: 0.6738, decode.d1.loss_cls: 0.3038, decode.d1.loss_mask: 0.4370, decode.d1.loss_dice: 0.6096, decode.d2.loss_cls: 0.2180, decode.d2.loss_mask: 0.4265, decode.d2.loss_dice: 0.5897, decode.d3.loss_cls: 0.1847, decode.d3.loss_mask: 0.4208, decode.d3.loss_dice: 0.5761, decode.d4.loss_cls: 0.1740, decode.d4.loss_mask: 0.4209, decode.d4.loss_dice: 0.5785, decode.d5.loss_cls: 0.1671, decode.d5.loss_mask: 0.4194, decode.d5.loss_dice: 0.5752, decode.d6.loss_cls: 0.1571, decode.d6.loss_mask: 0.4195, decode.d6.loss_dice: 0.5730, decode.d7.loss_cls: 0.1586, decode.d7.loss_mask: 0.4198, decode.d7.loss_dice: 0.5746, decode.d8.loss_cls: 0.1594, decode.d8.loss_mask: 0.4193, decode.d8.loss_dice: 0.5752, loss: 13.4525 +2022-06-05 05:33:29,186 - mmseg - INFO - Iter [28300/40000] lr: 2.231e-06, eta: 1:35:36, time: 0.447, data_time: 0.008, memory: 31652, decode.loss_cls: 0.2023, decode.loss_mask: 0.4173, decode.loss_dice: 0.6007, decode.d0.loss_cls: 1.6236, decode.d0.loss_mask: 0.4587, decode.d0.loss_dice: 0.7049, decode.d1.loss_cls: 0.3840, decode.d1.loss_mask: 0.4345, decode.d1.loss_dice: 0.6336, decode.d2.loss_cls: 0.2684, decode.d2.loss_mask: 0.4233, decode.d2.loss_dice: 0.6156, decode.d3.loss_cls: 0.2304, decode.d3.loss_mask: 0.4188, decode.d3.loss_dice: 0.6061, decode.d4.loss_cls: 0.2162, decode.d4.loss_mask: 0.4175, decode.d4.loss_dice: 0.6012, decode.d5.loss_cls: 0.2087, decode.d5.loss_mask: 0.4157, decode.d5.loss_dice: 0.5964, decode.d6.loss_cls: 0.2079, decode.d6.loss_mask: 0.4161, decode.d6.loss_dice: 0.5984, decode.d7.loss_cls: 0.2067, decode.d7.loss_mask: 0.4148, decode.d7.loss_dice: 0.5985, decode.d8.loss_cls: 0.2032, decode.d8.loss_mask: 0.4172, decode.d8.loss_dice: 0.5999, loss: 14.1408 +2022-06-05 05:33:50,962 - mmseg - INFO - Iter [28350/40000] lr: 2.221e-06, eta: 1:35:10, time: 0.435, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1552, decode.loss_mask: 0.4280, decode.loss_dice: 0.5824, decode.d0.loss_cls: 1.6124, decode.d0.loss_mask: 0.4729, decode.d0.loss_dice: 0.6785, decode.d1.loss_cls: 0.3215, decode.d1.loss_mask: 0.4481, decode.d1.loss_dice: 0.6159, decode.d2.loss_cls: 0.2292, decode.d2.loss_mask: 0.4349, decode.d2.loss_dice: 0.5921, decode.d3.loss_cls: 0.1893, decode.d3.loss_mask: 0.4319, decode.d3.loss_dice: 0.5831, decode.d4.loss_cls: 0.1794, decode.d4.loss_mask: 0.4313, decode.d4.loss_dice: 0.5861, decode.d5.loss_cls: 0.1646, decode.d5.loss_mask: 0.4290, decode.d5.loss_dice: 0.5859, decode.d6.loss_cls: 0.1586, decode.d6.loss_mask: 0.4300, decode.d6.loss_dice: 0.5799, decode.d7.loss_cls: 0.1560, decode.d7.loss_mask: 0.4285, decode.d7.loss_dice: 0.5845, decode.d8.loss_cls: 0.1581, decode.d8.loss_mask: 0.4280, decode.d8.loss_dice: 0.5837, loss: 13.6589 +2022-06-05 05:34:15,626 - mmseg - INFO - Iter [28400/40000] lr: 2.212e-06, eta: 1:34:46, time: 0.494, data_time: 0.058, memory: 31652, decode.loss_cls: 0.1576, decode.loss_mask: 0.4098, decode.loss_dice: 0.5696, decode.d0.loss_cls: 1.5744, decode.d0.loss_mask: 0.4504, decode.d0.loss_dice: 0.6665, decode.d1.loss_cls: 0.3250, decode.d1.loss_mask: 0.4281, decode.d1.loss_dice: 0.6046, decode.d2.loss_cls: 0.2360, decode.d2.loss_mask: 0.4159, decode.d2.loss_dice: 0.5795, decode.d3.loss_cls: 0.1958, decode.d3.loss_mask: 0.4138, decode.d3.loss_dice: 0.5714, decode.d4.loss_cls: 0.1860, decode.d4.loss_mask: 0.4133, decode.d4.loss_dice: 0.5718, decode.d5.loss_cls: 0.1780, decode.d5.loss_mask: 0.4099, decode.d5.loss_dice: 0.5719, decode.d6.loss_cls: 0.1703, decode.d6.loss_mask: 0.4100, decode.d6.loss_dice: 0.5685, decode.d7.loss_cls: 0.1647, decode.d7.loss_mask: 0.4093, decode.d7.loss_dice: 0.5716, decode.d8.loss_cls: 0.1657, decode.d8.loss_mask: 0.4107, decode.d8.loss_dice: 0.5676, loss: 13.3677 +2022-06-05 05:34:37,739 - mmseg - INFO - Iter [28450/40000] lr: 2.202e-06, eta: 1:34:20, time: 0.442, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1632, decode.loss_mask: 0.4262, decode.loss_dice: 0.5909, decode.d0.loss_cls: 1.5916, decode.d0.loss_mask: 0.4643, decode.d0.loss_dice: 0.6888, decode.d1.loss_cls: 0.3057, decode.d1.loss_mask: 0.4445, decode.d1.loss_dice: 0.6276, decode.d2.loss_cls: 0.2330, decode.d2.loss_mask: 0.4340, decode.d2.loss_dice: 0.6021, decode.d3.loss_cls: 0.1908, decode.d3.loss_mask: 0.4289, decode.d3.loss_dice: 0.5943, decode.d4.loss_cls: 0.1806, decode.d4.loss_mask: 0.4276, decode.d4.loss_dice: 0.5960, decode.d5.loss_cls: 0.1735, decode.d5.loss_mask: 0.4264, decode.d5.loss_dice: 0.5913, decode.d6.loss_cls: 0.1714, decode.d6.loss_mask: 0.4264, decode.d6.loss_dice: 0.5910, decode.d7.loss_cls: 0.1649, decode.d7.loss_mask: 0.4270, decode.d7.loss_dice: 0.5940, decode.d8.loss_cls: 0.1663, decode.d8.loss_mask: 0.4292, decode.d8.loss_dice: 0.5945, loss: 13.7459 +2022-06-05 05:34:59,695 - mmseg - INFO - Iter [28500/40000] lr: 2.193e-06, eta: 1:33:55, time: 0.439, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1650, decode.loss_mask: 0.4300, decode.loss_dice: 0.5913, decode.d0.loss_cls: 1.6065, decode.d0.loss_mask: 0.4736, decode.d0.loss_dice: 0.7032, decode.d1.loss_cls: 0.3218, decode.d1.loss_mask: 0.4449, decode.d1.loss_dice: 0.6294, decode.d2.loss_cls: 0.2247, decode.d2.loss_mask: 0.4341, decode.d2.loss_dice: 0.6067, decode.d3.loss_cls: 0.1971, decode.d3.loss_mask: 0.4319, decode.d3.loss_dice: 0.5916, decode.d4.loss_cls: 0.1850, decode.d4.loss_mask: 0.4313, decode.d4.loss_dice: 0.5961, decode.d5.loss_cls: 0.1776, decode.d5.loss_mask: 0.4310, decode.d5.loss_dice: 0.5902, decode.d6.loss_cls: 0.1736, decode.d6.loss_mask: 0.4307, decode.d6.loss_dice: 0.5883, decode.d7.loss_cls: 0.1731, decode.d7.loss_mask: 0.4309, decode.d7.loss_dice: 0.5926, decode.d8.loss_cls: 0.1696, decode.d8.loss_mask: 0.4294, decode.d8.loss_dice: 0.5893, loss: 13.8405 +2022-06-05 05:35:21,794 - mmseg - INFO - Iter [28550/40000] lr: 2.183e-06, eta: 1:33:29, time: 0.442, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1676, decode.loss_mask: 0.4311, decode.loss_dice: 0.6078, decode.d0.loss_cls: 1.5810, decode.d0.loss_mask: 0.4716, decode.d0.loss_dice: 0.7036, decode.d1.loss_cls: 0.3210, decode.d1.loss_mask: 0.4462, decode.d1.loss_dice: 0.6438, decode.d2.loss_cls: 0.2379, decode.d2.loss_mask: 0.4375, decode.d2.loss_dice: 0.6237, decode.d3.loss_cls: 0.2006, decode.d3.loss_mask: 0.4349, decode.d3.loss_dice: 0.6148, decode.d4.loss_cls: 0.1853, decode.d4.loss_mask: 0.4339, decode.d4.loss_dice: 0.6151, decode.d5.loss_cls: 0.1812, decode.d5.loss_mask: 0.4348, decode.d5.loss_dice: 0.6076, decode.d6.loss_cls: 0.1704, decode.d6.loss_mask: 0.4344, decode.d6.loss_dice: 0.6112, decode.d7.loss_cls: 0.1672, decode.d7.loss_mask: 0.4329, decode.d7.loss_dice: 0.6101, decode.d8.loss_cls: 0.1645, decode.d8.loss_mask: 0.4314, decode.d8.loss_dice: 0.6076, loss: 14.0106 +2022-06-05 05:35:43,697 - mmseg - INFO - Iter [28600/40000] lr: 2.173e-06, eta: 1:33:04, time: 0.438, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1792, decode.loss_mask: 0.4151, decode.loss_dice: 0.5942, decode.d0.loss_cls: 1.5649, decode.d0.loss_mask: 0.4535, decode.d0.loss_dice: 0.6899, decode.d1.loss_cls: 0.3488, decode.d1.loss_mask: 0.4297, decode.d1.loss_dice: 0.6325, decode.d2.loss_cls: 0.2545, decode.d2.loss_mask: 0.4211, decode.d2.loss_dice: 0.6133, decode.d3.loss_cls: 0.2195, decode.d3.loss_mask: 0.4174, decode.d3.loss_dice: 0.5980, decode.d4.loss_cls: 0.2055, decode.d4.loss_mask: 0.4182, decode.d4.loss_dice: 0.6015, decode.d5.loss_cls: 0.1969, decode.d5.loss_mask: 0.4161, decode.d5.loss_dice: 0.5997, decode.d6.loss_cls: 0.1853, decode.d6.loss_mask: 0.4160, decode.d6.loss_dice: 0.5937, decode.d7.loss_cls: 0.1825, decode.d7.loss_mask: 0.4158, decode.d7.loss_dice: 0.5973, decode.d8.loss_cls: 0.1802, decode.d8.loss_mask: 0.4157, decode.d8.loss_dice: 0.5986, loss: 13.8546 +2022-06-05 05:36:05,990 - mmseg - INFO - Iter [28650/40000] lr: 2.164e-06, eta: 1:32:38, time: 0.446, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1593, decode.loss_mask: 0.4194, decode.loss_dice: 0.5916, decode.d0.loss_cls: 1.6118, decode.d0.loss_mask: 0.4611, decode.d0.loss_dice: 0.6832, decode.d1.loss_cls: 0.3291, decode.d1.loss_mask: 0.4345, decode.d1.loss_dice: 0.6215, decode.d2.loss_cls: 0.2395, decode.d2.loss_mask: 0.4237, decode.d2.loss_dice: 0.5999, decode.d3.loss_cls: 0.1899, decode.d3.loss_mask: 0.4224, decode.d3.loss_dice: 0.5915, decode.d4.loss_cls: 0.1744, decode.d4.loss_mask: 0.4213, decode.d4.loss_dice: 0.5898, decode.d5.loss_cls: 0.1703, decode.d5.loss_mask: 0.4208, decode.d5.loss_dice: 0.5915, decode.d6.loss_cls: 0.1653, decode.d6.loss_mask: 0.4191, decode.d6.loss_dice: 0.5895, decode.d7.loss_cls: 0.1627, decode.d7.loss_mask: 0.4192, decode.d7.loss_dice: 0.5905, decode.d8.loss_cls: 0.1629, decode.d8.loss_mask: 0.4198, decode.d8.loss_dice: 0.5906, loss: 13.6658 +2022-06-05 05:36:27,960 - mmseg - INFO - Iter [28700/40000] lr: 2.154e-06, eta: 1:32:13, time: 0.440, data_time: 0.009, memory: 31652, decode.loss_cls: 0.1536, decode.loss_mask: 0.4200, decode.loss_dice: 0.5721, decode.d0.loss_cls: 1.5918, decode.d0.loss_mask: 0.4665, decode.d0.loss_dice: 0.6667, decode.d1.loss_cls: 0.3221, decode.d1.loss_mask: 0.4378, decode.d1.loss_dice: 0.6007, decode.d2.loss_cls: 0.2199, decode.d2.loss_mask: 0.4282, decode.d2.loss_dice: 0.5791, decode.d3.loss_cls: 0.1797, decode.d3.loss_mask: 0.4252, decode.d3.loss_dice: 0.5737, decode.d4.loss_cls: 0.1686, decode.d4.loss_mask: 0.4227, decode.d4.loss_dice: 0.5819, decode.d5.loss_cls: 0.1561, decode.d5.loss_mask: 0.4227, decode.d5.loss_dice: 0.5781, decode.d6.loss_cls: 0.1572, decode.d6.loss_mask: 0.4196, decode.d6.loss_dice: 0.5735, decode.d7.loss_cls: 0.1546, decode.d7.loss_mask: 0.4207, decode.d7.loss_dice: 0.5733, decode.d8.loss_cls: 0.1501, decode.d8.loss_mask: 0.4191, decode.d8.loss_dice: 0.5746, loss: 13.4097 +2022-06-05 05:36:53,843 - mmseg - INFO - Iter [28750/40000] lr: 2.145e-06, eta: 1:31:49, time: 0.517, data_time: 0.056, memory: 31652, decode.loss_cls: 0.1411, decode.loss_mask: 0.4202, decode.loss_dice: 0.6027, decode.d0.loss_cls: 1.5456, decode.d0.loss_mask: 0.4649, decode.d0.loss_dice: 0.6968, decode.d1.loss_cls: 0.3097, decode.d1.loss_mask: 0.4390, decode.d1.loss_dice: 0.6381, decode.d2.loss_cls: 0.2163, decode.d2.loss_mask: 0.4291, decode.d2.loss_dice: 0.6140, decode.d3.loss_cls: 0.1728, decode.d3.loss_mask: 0.4253, decode.d3.loss_dice: 0.6074, decode.d4.loss_cls: 0.1645, decode.d4.loss_mask: 0.4229, decode.d4.loss_dice: 0.6074, decode.d5.loss_cls: 0.1545, decode.d5.loss_mask: 0.4238, decode.d5.loss_dice: 0.6048, decode.d6.loss_cls: 0.1513, decode.d6.loss_mask: 0.4224, decode.d6.loss_dice: 0.6067, decode.d7.loss_cls: 0.1504, decode.d7.loss_mask: 0.4221, decode.d7.loss_dice: 0.6062, decode.d8.loss_cls: 0.1433, decode.d8.loss_mask: 0.4218, decode.d8.loss_dice: 0.6040, loss: 13.6292 +2022-06-05 05:37:16,098 - mmseg - INFO - Iter [28800/40000] lr: 2.135e-06, eta: 1:31:24, time: 0.446, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1648, decode.loss_mask: 0.4161, decode.loss_dice: 0.5549, decode.d0.loss_cls: 1.5638, decode.d0.loss_mask: 0.4585, decode.d0.loss_dice: 0.6604, decode.d1.loss_cls: 0.3160, decode.d1.loss_mask: 0.4367, decode.d1.loss_dice: 0.5957, decode.d2.loss_cls: 0.2311, decode.d2.loss_mask: 0.4238, decode.d2.loss_dice: 0.5713, decode.d3.loss_cls: 0.1955, decode.d3.loss_mask: 0.4197, decode.d3.loss_dice: 0.5668, decode.d4.loss_cls: 0.1819, decode.d4.loss_mask: 0.4189, decode.d4.loss_dice: 0.5609, decode.d5.loss_cls: 0.1732, decode.d5.loss_mask: 0.4198, decode.d5.loss_dice: 0.5592, decode.d6.loss_cls: 0.1684, decode.d6.loss_mask: 0.4157, decode.d6.loss_dice: 0.5570, decode.d7.loss_cls: 0.1619, decode.d7.loss_mask: 0.4169, decode.d7.loss_dice: 0.5586, decode.d8.loss_cls: 0.1693, decode.d8.loss_mask: 0.4165, decode.d8.loss_dice: 0.5598, loss: 13.3131 +2022-06-05 05:37:38,045 - mmseg - INFO - Iter [28850/40000] lr: 2.126e-06, eta: 1:30:58, time: 0.439, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1568, decode.loss_mask: 0.4252, decode.loss_dice: 0.5883, decode.d0.loss_cls: 1.5868, decode.d0.loss_mask: 0.4726, decode.d0.loss_dice: 0.6922, decode.d1.loss_cls: 0.3223, decode.d1.loss_mask: 0.4451, decode.d1.loss_dice: 0.6308, decode.d2.loss_cls: 0.2296, decode.d2.loss_mask: 0.4309, decode.d2.loss_dice: 0.6056, decode.d3.loss_cls: 0.1918, decode.d3.loss_mask: 0.4281, decode.d3.loss_dice: 0.5935, decode.d4.loss_cls: 0.1875, decode.d4.loss_mask: 0.4258, decode.d4.loss_dice: 0.5949, decode.d5.loss_cls: 0.1755, decode.d5.loss_mask: 0.4252, decode.d5.loss_dice: 0.5914, decode.d6.loss_cls: 0.1747, decode.d6.loss_mask: 0.4236, decode.d6.loss_dice: 0.5888, decode.d7.loss_cls: 0.1674, decode.d7.loss_mask: 0.4245, decode.d7.loss_dice: 0.5910, decode.d8.loss_cls: 0.1663, decode.d8.loss_mask: 0.4239, decode.d8.loss_dice: 0.5897, loss: 13.7498 +2022-06-05 05:38:00,173 - mmseg - INFO - Iter [28900/40000] lr: 2.116e-06, eta: 1:30:33, time: 0.443, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1477, decode.loss_mask: 0.4057, decode.loss_dice: 0.5684, decode.d0.loss_cls: 1.5748, decode.d0.loss_mask: 0.4465, decode.d0.loss_dice: 0.6674, decode.d1.loss_cls: 0.3052, decode.d1.loss_mask: 0.4226, decode.d1.loss_dice: 0.6042, decode.d2.loss_cls: 0.2139, decode.d2.loss_mask: 0.4122, decode.d2.loss_dice: 0.5798, decode.d3.loss_cls: 0.1712, decode.d3.loss_mask: 0.4115, decode.d3.loss_dice: 0.5724, decode.d4.loss_cls: 0.1641, decode.d4.loss_mask: 0.4088, decode.d4.loss_dice: 0.5727, decode.d5.loss_cls: 0.1532, decode.d5.loss_mask: 0.4072, decode.d5.loss_dice: 0.5715, decode.d6.loss_cls: 0.1522, decode.d6.loss_mask: 0.4054, decode.d6.loss_dice: 0.5697, decode.d7.loss_cls: 0.1537, decode.d7.loss_mask: 0.4043, decode.d7.loss_dice: 0.5682, decode.d8.loss_cls: 0.1513, decode.d8.loss_mask: 0.4054, decode.d8.loss_dice: 0.5714, loss: 13.1625 +2022-06-05 05:38:22,207 - mmseg - INFO - Iter [28950/40000] lr: 2.107e-06, eta: 1:30:07, time: 0.441, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1645, decode.loss_mask: 0.4207, decode.loss_dice: 0.5895, decode.d0.loss_cls: 1.5788, decode.d0.loss_mask: 0.4631, decode.d0.loss_dice: 0.6897, decode.d1.loss_cls: 0.3213, decode.d1.loss_mask: 0.4374, decode.d1.loss_dice: 0.6266, decode.d2.loss_cls: 0.2235, decode.d2.loss_mask: 0.4276, decode.d2.loss_dice: 0.5994, decode.d3.loss_cls: 0.1895, decode.d3.loss_mask: 0.4247, decode.d3.loss_dice: 0.5909, decode.d4.loss_cls: 0.1761, decode.d4.loss_mask: 0.4234, decode.d4.loss_dice: 0.5943, decode.d5.loss_cls: 0.1726, decode.d5.loss_mask: 0.4214, decode.d5.loss_dice: 0.5921, decode.d6.loss_cls: 0.1605, decode.d6.loss_mask: 0.4216, decode.d6.loss_dice: 0.5908, decode.d7.loss_cls: 0.1686, decode.d7.loss_mask: 0.4201, decode.d7.loss_dice: 0.5874, decode.d8.loss_cls: 0.1607, decode.d8.loss_mask: 0.4199, decode.d8.loss_dice: 0.5914, loss: 13.6481 +2022-06-05 05:38:44,338 - mmseg - INFO - Saving checkpoint at 29000 iterations +2022-06-05 05:38:47,278 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 05:38:47,279 - mmseg - INFO - Iter [29000/40000] lr: 2.097e-06, eta: 1:29:43, time: 0.501, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1619, decode.loss_mask: 0.4257, decode.loss_dice: 0.5989, decode.d0.loss_cls: 1.6372, decode.d0.loss_mask: 0.4687, decode.d0.loss_dice: 0.6935, decode.d1.loss_cls: 0.3235, decode.d1.loss_mask: 0.4442, decode.d1.loss_dice: 0.6363, decode.d2.loss_cls: 0.2296, decode.d2.loss_mask: 0.4334, decode.d2.loss_dice: 0.6116, decode.d3.loss_cls: 0.1988, decode.d3.loss_mask: 0.4313, decode.d3.loss_dice: 0.6039, decode.d4.loss_cls: 0.1909, decode.d4.loss_mask: 0.4287, decode.d4.loss_dice: 0.6039, decode.d5.loss_cls: 0.1698, decode.d5.loss_mask: 0.4288, decode.d5.loss_dice: 0.6064, decode.d6.loss_cls: 0.1707, decode.d6.loss_mask: 0.4276, decode.d6.loss_dice: 0.6026, decode.d7.loss_cls: 0.1670, decode.d7.loss_mask: 0.4266, decode.d7.loss_dice: 0.5983, decode.d8.loss_cls: 0.1677, decode.d8.loss_mask: 0.4268, decode.d8.loss_dice: 0.6007, loss: 13.9149 +2022-06-05 05:39:12,046 - mmseg - INFO - Iter [29050/40000] lr: 2.088e-06, eta: 1:29:19, time: 0.495, data_time: 0.056, memory: 31652, decode.loss_cls: 0.1601, decode.loss_mask: 0.4122, decode.loss_dice: 0.6020, decode.d0.loss_cls: 1.5763, decode.d0.loss_mask: 0.4551, decode.d0.loss_dice: 0.7051, decode.d1.loss_cls: 0.3167, decode.d1.loss_mask: 0.4275, decode.d1.loss_dice: 0.6394, decode.d2.loss_cls: 0.2346, decode.d2.loss_mask: 0.4187, decode.d2.loss_dice: 0.6156, decode.d3.loss_cls: 0.1997, decode.d3.loss_mask: 0.4151, decode.d3.loss_dice: 0.6071, decode.d4.loss_cls: 0.1827, decode.d4.loss_mask: 0.4158, decode.d4.loss_dice: 0.6083, decode.d5.loss_cls: 0.1747, decode.d5.loss_mask: 0.4146, decode.d5.loss_dice: 0.6067, decode.d6.loss_cls: 0.1662, decode.d6.loss_mask: 0.4135, decode.d6.loss_dice: 0.6049, decode.d7.loss_cls: 0.1591, decode.d7.loss_mask: 0.4135, decode.d7.loss_dice: 0.6092, decode.d8.loss_cls: 0.1614, decode.d8.loss_mask: 0.4118, decode.d8.loss_dice: 0.6074, loss: 13.7351 +2022-06-05 05:39:34,128 - mmseg - INFO - Iter [29100/40000] lr: 2.078e-06, eta: 1:28:53, time: 0.442, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1564, decode.loss_mask: 0.4208, decode.loss_dice: 0.6094, decode.d0.loss_cls: 1.5557, decode.d0.loss_mask: 0.4603, decode.d0.loss_dice: 0.7011, decode.d1.loss_cls: 0.3204, decode.d1.loss_mask: 0.4342, decode.d1.loss_dice: 0.6402, decode.d2.loss_cls: 0.2249, decode.d2.loss_mask: 0.4268, decode.d2.loss_dice: 0.6223, decode.d3.loss_cls: 0.1941, decode.d3.loss_mask: 0.4235, decode.d3.loss_dice: 0.6107, decode.d4.loss_cls: 0.1859, decode.d4.loss_mask: 0.4209, decode.d4.loss_dice: 0.6093, decode.d5.loss_cls: 0.1723, decode.d5.loss_mask: 0.4232, decode.d5.loss_dice: 0.6104, decode.d6.loss_cls: 0.1660, decode.d6.loss_mask: 0.4218, decode.d6.loss_dice: 0.6076, decode.d7.loss_cls: 0.1601, decode.d7.loss_mask: 0.4224, decode.d7.loss_dice: 0.6064, decode.d8.loss_cls: 0.1604, decode.d8.loss_mask: 0.4201, decode.d8.loss_dice: 0.6067, loss: 13.7944 +2022-06-05 05:39:56,315 - mmseg - INFO - Iter [29150/40000] lr: 2.069e-06, eta: 1:28:28, time: 0.443, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1686, decode.loss_mask: 0.4350, decode.loss_dice: 0.5871, decode.d0.loss_cls: 1.5652, decode.d0.loss_mask: 0.4760, decode.d0.loss_dice: 0.6784, decode.d1.loss_cls: 0.3448, decode.d1.loss_mask: 0.4492, decode.d1.loss_dice: 0.6215, decode.d2.loss_cls: 0.2395, decode.d2.loss_mask: 0.4390, decode.d2.loss_dice: 0.5976, decode.d3.loss_cls: 0.1979, decode.d3.loss_mask: 0.4365, decode.d3.loss_dice: 0.5858, decode.d4.loss_cls: 0.1880, decode.d4.loss_mask: 0.4361, decode.d4.loss_dice: 0.5862, decode.d5.loss_cls: 0.1782, decode.d5.loss_mask: 0.4372, decode.d5.loss_dice: 0.5878, decode.d6.loss_cls: 0.1784, decode.d6.loss_mask: 0.4364, decode.d6.loss_dice: 0.5872, decode.d7.loss_cls: 0.1680, decode.d7.loss_mask: 0.4352, decode.d7.loss_dice: 0.5852, decode.d8.loss_cls: 0.1685, decode.d8.loss_mask: 0.4343, decode.d8.loss_dice: 0.5840, loss: 13.8126 +2022-06-05 05:40:18,796 - mmseg - INFO - Iter [29200/40000] lr: 2.059e-06, eta: 1:28:03, time: 0.450, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1753, decode.loss_mask: 0.4202, decode.loss_dice: 0.5950, decode.d0.loss_cls: 1.6029, decode.d0.loss_mask: 0.4609, decode.d0.loss_dice: 0.6989, decode.d1.loss_cls: 0.3238, decode.d1.loss_mask: 0.4390, decode.d1.loss_dice: 0.6346, decode.d2.loss_cls: 0.2358, decode.d2.loss_mask: 0.4280, decode.d2.loss_dice: 0.6061, decode.d3.loss_cls: 0.2025, decode.d3.loss_mask: 0.4238, decode.d3.loss_dice: 0.6021, decode.d4.loss_cls: 0.1887, decode.d4.loss_mask: 0.4234, decode.d4.loss_dice: 0.6004, decode.d5.loss_cls: 0.1771, decode.d5.loss_mask: 0.4224, decode.d5.loss_dice: 0.6021, decode.d6.loss_cls: 0.1714, decode.d6.loss_mask: 0.4217, decode.d6.loss_dice: 0.5980, decode.d7.loss_cls: 0.1719, decode.d7.loss_mask: 0.4209, decode.d7.loss_dice: 0.6020, decode.d8.loss_cls: 0.1721, decode.d8.loss_mask: 0.4196, decode.d8.loss_dice: 0.5980, loss: 13.8386 +2022-06-05 05:40:40,703 - mmseg - INFO - Iter [29250/40000] lr: 2.050e-06, eta: 1:27:38, time: 0.438, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1836, decode.loss_mask: 0.4152, decode.loss_dice: 0.5917, decode.d0.loss_cls: 1.5856, decode.d0.loss_mask: 0.4608, decode.d0.loss_dice: 0.6941, decode.d1.loss_cls: 0.3307, decode.d1.loss_mask: 0.4346, decode.d1.loss_dice: 0.6338, decode.d2.loss_cls: 0.2401, decode.d2.loss_mask: 0.4207, decode.d2.loss_dice: 0.6080, decode.d3.loss_cls: 0.2158, decode.d3.loss_mask: 0.4176, decode.d3.loss_dice: 0.5977, decode.d4.loss_cls: 0.2085, decode.d4.loss_mask: 0.4159, decode.d4.loss_dice: 0.5980, decode.d5.loss_cls: 0.1986, decode.d5.loss_mask: 0.4159, decode.d5.loss_dice: 0.5934, decode.d6.loss_cls: 0.1913, decode.d6.loss_mask: 0.4150, decode.d6.loss_dice: 0.5892, decode.d7.loss_cls: 0.1891, decode.d7.loss_mask: 0.4163, decode.d7.loss_dice: 0.5894, decode.d8.loss_cls: 0.1892, decode.d8.loss_mask: 0.4158, decode.d8.loss_dice: 0.5890, loss: 13.8448 +2022-06-05 05:41:02,700 - mmseg - INFO - Iter [29300/40000] lr: 2.040e-06, eta: 1:27:12, time: 0.441, data_time: 0.009, memory: 31652, decode.loss_cls: 0.1782, decode.loss_mask: 0.4070, decode.loss_dice: 0.5993, decode.d0.loss_cls: 1.6041, decode.d0.loss_mask: 0.4519, decode.d0.loss_dice: 0.6939, decode.d1.loss_cls: 0.3305, decode.d1.loss_mask: 0.4260, decode.d1.loss_dice: 0.6382, decode.d2.loss_cls: 0.2479, decode.d2.loss_mask: 0.4132, decode.d2.loss_dice: 0.6109, decode.d3.loss_cls: 0.2065, decode.d3.loss_mask: 0.4119, decode.d3.loss_dice: 0.6014, decode.d4.loss_cls: 0.2000, decode.d4.loss_mask: 0.4098, decode.d4.loss_dice: 0.5991, decode.d5.loss_cls: 0.1884, decode.d5.loss_mask: 0.4084, decode.d5.loss_dice: 0.6006, decode.d6.loss_cls: 0.1812, decode.d6.loss_mask: 0.4092, decode.d6.loss_dice: 0.6011, decode.d7.loss_cls: 0.1799, decode.d7.loss_mask: 0.4085, decode.d7.loss_dice: 0.6033, decode.d8.loss_cls: 0.1781, decode.d8.loss_mask: 0.4068, decode.d8.loss_dice: 0.5994, loss: 13.7945 +2022-06-05 05:41:27,339 - mmseg - INFO - Iter [29350/40000] lr: 2.031e-06, eta: 1:26:48, time: 0.493, data_time: 0.059, memory: 31652, decode.loss_cls: 0.1691, decode.loss_mask: 0.4044, decode.loss_dice: 0.5746, decode.d0.loss_cls: 1.5780, decode.d0.loss_mask: 0.4453, decode.d0.loss_dice: 0.6697, decode.d1.loss_cls: 0.3163, decode.d1.loss_mask: 0.4216, decode.d1.loss_dice: 0.6152, decode.d2.loss_cls: 0.2274, decode.d2.loss_mask: 0.4144, decode.d2.loss_dice: 0.5952, decode.d3.loss_cls: 0.1876, decode.d3.loss_mask: 0.4103, decode.d3.loss_dice: 0.5871, decode.d4.loss_cls: 0.1788, decode.d4.loss_mask: 0.4088, decode.d4.loss_dice: 0.5856, decode.d5.loss_cls: 0.1748, decode.d5.loss_mask: 0.4062, decode.d5.loss_dice: 0.5767, decode.d6.loss_cls: 0.1645, decode.d6.loss_mask: 0.4055, decode.d6.loss_dice: 0.5760, decode.d7.loss_cls: 0.1672, decode.d7.loss_mask: 0.4058, decode.d7.loss_dice: 0.5771, decode.d8.loss_cls: 0.1680, decode.d8.loss_mask: 0.4042, decode.d8.loss_dice: 0.5779, loss: 13.3931 +2022-06-05 05:41:49,823 - mmseg - INFO - Iter [29400/40000] lr: 2.021e-06, eta: 1:26:23, time: 0.450, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1585, decode.loss_mask: 0.4327, decode.loss_dice: 0.5882, decode.d0.loss_cls: 1.5901, decode.d0.loss_mask: 0.4801, decode.d0.loss_dice: 0.6933, decode.d1.loss_cls: 0.3261, decode.d1.loss_mask: 0.4515, decode.d1.loss_dice: 0.6259, decode.d2.loss_cls: 0.2283, decode.d2.loss_mask: 0.4401, decode.d2.loss_dice: 0.6043, decode.d3.loss_cls: 0.1934, decode.d3.loss_mask: 0.4367, decode.d3.loss_dice: 0.5913, decode.d4.loss_cls: 0.1780, decode.d4.loss_mask: 0.4340, decode.d4.loss_dice: 0.5905, decode.d5.loss_cls: 0.1719, decode.d5.loss_mask: 0.4329, decode.d5.loss_dice: 0.5919, decode.d6.loss_cls: 0.1652, decode.d6.loss_mask: 0.4318, decode.d6.loss_dice: 0.5894, decode.d7.loss_cls: 0.1648, decode.d7.loss_mask: 0.4313, decode.d7.loss_dice: 0.5863, decode.d8.loss_cls: 0.1615, decode.d8.loss_mask: 0.4318, decode.d8.loss_dice: 0.5872, loss: 13.7890 +2022-06-05 05:42:11,693 - mmseg - INFO - Iter [29450/40000] lr: 2.011e-06, eta: 1:25:57, time: 0.437, data_time: 0.009, memory: 31652, decode.loss_cls: 0.1562, decode.loss_mask: 0.4147, decode.loss_dice: 0.5732, decode.d0.loss_cls: 1.5168, decode.d0.loss_mask: 0.4594, decode.d0.loss_dice: 0.6633, decode.d1.loss_cls: 0.2922, decode.d1.loss_mask: 0.4342, decode.d1.loss_dice: 0.6097, decode.d2.loss_cls: 0.2168, decode.d2.loss_mask: 0.4235, decode.d2.loss_dice: 0.5870, decode.d3.loss_cls: 0.1865, decode.d3.loss_mask: 0.4187, decode.d3.loss_dice: 0.5807, decode.d4.loss_cls: 0.1712, decode.d4.loss_mask: 0.4186, decode.d4.loss_dice: 0.5809, decode.d5.loss_cls: 0.1718, decode.d5.loss_mask: 0.4171, decode.d5.loss_dice: 0.5787, decode.d6.loss_cls: 0.1563, decode.d6.loss_mask: 0.4162, decode.d6.loss_dice: 0.5776, decode.d7.loss_cls: 0.1550, decode.d7.loss_mask: 0.4172, decode.d7.loss_dice: 0.5783, decode.d8.loss_cls: 0.1585, decode.d8.loss_mask: 0.4168, decode.d8.loss_dice: 0.5767, loss: 13.3240 +2022-06-05 05:42:33,831 - mmseg - INFO - Iter [29500/40000] lr: 2.002e-06, eta: 1:25:32, time: 0.443, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1519, decode.loss_mask: 0.4271, decode.loss_dice: 0.6041, decode.d0.loss_cls: 1.6002, decode.d0.loss_mask: 0.4720, decode.d0.loss_dice: 0.7105, decode.d1.loss_cls: 0.3126, decode.d1.loss_mask: 0.4434, decode.d1.loss_dice: 0.6495, decode.d2.loss_cls: 0.2309, decode.d2.loss_mask: 0.4319, decode.d2.loss_dice: 0.6203, decode.d3.loss_cls: 0.1769, decode.d3.loss_mask: 0.4302, decode.d3.loss_dice: 0.6163, decode.d4.loss_cls: 0.1746, decode.d4.loss_mask: 0.4291, decode.d4.loss_dice: 0.6132, decode.d5.loss_cls: 0.1634, decode.d5.loss_mask: 0.4285, decode.d5.loss_dice: 0.6131, decode.d6.loss_cls: 0.1544, decode.d6.loss_mask: 0.4290, decode.d6.loss_dice: 0.6097, decode.d7.loss_cls: 0.1602, decode.d7.loss_mask: 0.4261, decode.d7.loss_dice: 0.6027, decode.d8.loss_cls: 0.1552, decode.d8.loss_mask: 0.4271, decode.d8.loss_dice: 0.6079, loss: 13.8722 +2022-06-05 05:42:56,115 - mmseg - INFO - Iter [29550/40000] lr: 1.992e-06, eta: 1:25:07, time: 0.446, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1592, decode.loss_mask: 0.4156, decode.loss_dice: 0.5942, decode.d0.loss_cls: 1.6228, decode.d0.loss_mask: 0.4566, decode.d0.loss_dice: 0.6860, decode.d1.loss_cls: 0.3238, decode.d1.loss_mask: 0.4334, decode.d1.loss_dice: 0.6306, decode.d2.loss_cls: 0.2295, decode.d2.loss_mask: 0.4221, decode.d2.loss_dice: 0.6081, decode.d3.loss_cls: 0.1933, decode.d3.loss_mask: 0.4200, decode.d3.loss_dice: 0.6008, decode.d4.loss_cls: 0.1811, decode.d4.loss_mask: 0.4174, decode.d4.loss_dice: 0.5994, decode.d5.loss_cls: 0.1710, decode.d5.loss_mask: 0.4168, decode.d5.loss_dice: 0.5975, decode.d6.loss_cls: 0.1649, decode.d6.loss_mask: 0.4157, decode.d6.loss_dice: 0.5967, decode.d7.loss_cls: 0.1673, decode.d7.loss_mask: 0.4170, decode.d7.loss_dice: 0.5970, decode.d8.loss_cls: 0.1599, decode.d8.loss_mask: 0.4156, decode.d8.loss_dice: 0.5988, loss: 13.7122 +2022-06-05 05:43:18,335 - mmseg - INFO - Iter [29600/40000] lr: 1.983e-06, eta: 1:24:42, time: 0.445, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1721, decode.loss_mask: 0.4306, decode.loss_dice: 0.6219, decode.d0.loss_cls: 1.6030, decode.d0.loss_mask: 0.4699, decode.d0.loss_dice: 0.7190, decode.d1.loss_cls: 0.3130, decode.d1.loss_mask: 0.4470, decode.d1.loss_dice: 0.6605, decode.d2.loss_cls: 0.2331, decode.d2.loss_mask: 0.4349, decode.d2.loss_dice: 0.6323, decode.d3.loss_cls: 0.1957, decode.d3.loss_mask: 0.4331, decode.d3.loss_dice: 0.6289, decode.d4.loss_cls: 0.1773, decode.d4.loss_mask: 0.4337, decode.d4.loss_dice: 0.6334, decode.d5.loss_cls: 0.1755, decode.d5.loss_mask: 0.4315, decode.d5.loss_dice: 0.6292, decode.d6.loss_cls: 0.1719, decode.d6.loss_mask: 0.4320, decode.d6.loss_dice: 0.6261, decode.d7.loss_cls: 0.1644, decode.d7.loss_mask: 0.4352, decode.d7.loss_dice: 0.6323, decode.d8.loss_cls: 0.1628, decode.d8.loss_mask: 0.4336, decode.d8.loss_dice: 0.6283, loss: 14.1620 +2022-06-05 05:43:42,910 - mmseg - INFO - Iter [29650/40000] lr: 1.973e-06, eta: 1:24:17, time: 0.491, data_time: 0.058, memory: 31652, decode.loss_cls: 0.1637, decode.loss_mask: 0.4285, decode.loss_dice: 0.5956, decode.d0.loss_cls: 1.5658, decode.d0.loss_mask: 0.4682, decode.d0.loss_dice: 0.6858, decode.d1.loss_cls: 0.3142, decode.d1.loss_mask: 0.4453, decode.d1.loss_dice: 0.6336, decode.d2.loss_cls: 0.2263, decode.d2.loss_mask: 0.4363, decode.d2.loss_dice: 0.6082, decode.d3.loss_cls: 0.1862, decode.d3.loss_mask: 0.4331, decode.d3.loss_dice: 0.6029, decode.d4.loss_cls: 0.1774, decode.d4.loss_mask: 0.4334, decode.d4.loss_dice: 0.6013, decode.d5.loss_cls: 0.1652, decode.d5.loss_mask: 0.4313, decode.d5.loss_dice: 0.6045, decode.d6.loss_cls: 0.1615, decode.d6.loss_mask: 0.4305, decode.d6.loss_dice: 0.5935, decode.d7.loss_cls: 0.1614, decode.d7.loss_mask: 0.4295, decode.d7.loss_dice: 0.5974, decode.d8.loss_cls: 0.1607, decode.d8.loss_mask: 0.4286, decode.d8.loss_dice: 0.5966, loss: 13.7665 +2022-06-05 05:44:04,852 - mmseg - INFO - Iter [29700/40000] lr: 1.964e-06, eta: 1:23:52, time: 0.439, data_time: 0.009, memory: 31652, decode.loss_cls: 0.1486, decode.loss_mask: 0.4357, decode.loss_dice: 0.6042, decode.d0.loss_cls: 1.5482, decode.d0.loss_mask: 0.4778, decode.d0.loss_dice: 0.6995, decode.d1.loss_cls: 0.3159, decode.d1.loss_mask: 0.4525, decode.d1.loss_dice: 0.6382, decode.d2.loss_cls: 0.2281, decode.d2.loss_mask: 0.4396, decode.d2.loss_dice: 0.6217, decode.d3.loss_cls: 0.1836, decode.d3.loss_mask: 0.4386, decode.d3.loss_dice: 0.6102, decode.d4.loss_cls: 0.1710, decode.d4.loss_mask: 0.4372, decode.d4.loss_dice: 0.6093, decode.d5.loss_cls: 0.1566, decode.d5.loss_mask: 0.4373, decode.d5.loss_dice: 0.6081, decode.d6.loss_cls: 0.1559, decode.d6.loss_mask: 0.4353, decode.d6.loss_dice: 0.6035, decode.d7.loss_cls: 0.1509, decode.d7.loss_mask: 0.4353, decode.d7.loss_dice: 0.6074, decode.d8.loss_cls: 0.1492, decode.d8.loss_mask: 0.4350, decode.d8.loss_dice: 0.6041, loss: 13.8384 +2022-06-05 05:44:26,435 - mmseg - INFO - Iter [29750/40000] lr: 1.954e-06, eta: 1:23:27, time: 0.432, data_time: 0.009, memory: 31652, decode.loss_cls: 0.1437, decode.loss_mask: 0.4030, decode.loss_dice: 0.5575, decode.d0.loss_cls: 1.5639, decode.d0.loss_mask: 0.4449, decode.d0.loss_dice: 0.6501, decode.d1.loss_cls: 0.3037, decode.d1.loss_mask: 0.4207, decode.d1.loss_dice: 0.5911, decode.d2.loss_cls: 0.2137, decode.d2.loss_mask: 0.4083, decode.d2.loss_dice: 0.5716, decode.d3.loss_cls: 0.1728, decode.d3.loss_mask: 0.4056, decode.d3.loss_dice: 0.5653, decode.d4.loss_cls: 0.1596, decode.d4.loss_mask: 0.4030, decode.d4.loss_dice: 0.5620, decode.d5.loss_cls: 0.1532, decode.d5.loss_mask: 0.4041, decode.d5.loss_dice: 0.5647, decode.d6.loss_cls: 0.1463, decode.d6.loss_mask: 0.4037, decode.d6.loss_dice: 0.5617, decode.d7.loss_cls: 0.1458, decode.d7.loss_mask: 0.4028, decode.d7.loss_dice: 0.5625, decode.d8.loss_cls: 0.1446, decode.d8.loss_mask: 0.4013, decode.d8.loss_dice: 0.5597, loss: 12.9913 +2022-06-05 05:44:47,814 - mmseg - INFO - Iter [29800/40000] lr: 1.945e-06, eta: 1:23:01, time: 0.427, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1612, decode.loss_mask: 0.4221, decode.loss_dice: 0.5885, decode.d0.loss_cls: 1.5599, decode.d0.loss_mask: 0.4653, decode.d0.loss_dice: 0.6796, decode.d1.loss_cls: 0.3098, decode.d1.loss_mask: 0.4386, decode.d1.loss_dice: 0.6245, decode.d2.loss_cls: 0.2227, decode.d2.loss_mask: 0.4278, decode.d2.loss_dice: 0.6020, decode.d3.loss_cls: 0.1817, decode.d3.loss_mask: 0.4265, decode.d3.loss_dice: 0.5912, decode.d4.loss_cls: 0.1752, decode.d4.loss_mask: 0.4254, decode.d4.loss_dice: 0.5964, decode.d5.loss_cls: 0.1628, decode.d5.loss_mask: 0.4243, decode.d5.loss_dice: 0.5902, decode.d6.loss_cls: 0.1624, decode.d6.loss_mask: 0.4235, decode.d6.loss_dice: 0.5891, decode.d7.loss_cls: 0.1616, decode.d7.loss_mask: 0.4226, decode.d7.loss_dice: 0.5885, decode.d8.loss_cls: 0.1589, decode.d8.loss_mask: 0.4231, decode.d8.loss_dice: 0.5888, loss: 13.5940 +2022-06-05 05:45:09,269 - mmseg - INFO - Iter [29850/40000] lr: 1.935e-06, eta: 1:22:36, time: 0.430, data_time: 0.009, memory: 31652, decode.loss_cls: 0.1569, decode.loss_mask: 0.4051, decode.loss_dice: 0.5683, decode.d0.loss_cls: 1.5836, decode.d0.loss_mask: 0.4437, decode.d0.loss_dice: 0.6639, decode.d1.loss_cls: 0.3283, decode.d1.loss_mask: 0.4204, decode.d1.loss_dice: 0.6074, decode.d2.loss_cls: 0.2307, decode.d2.loss_mask: 0.4109, decode.d2.loss_dice: 0.5802, decode.d3.loss_cls: 0.1903, decode.d3.loss_mask: 0.4083, decode.d3.loss_dice: 0.5768, decode.d4.loss_cls: 0.1763, decode.d4.loss_mask: 0.4078, decode.d4.loss_dice: 0.5787, decode.d5.loss_cls: 0.1681, decode.d5.loss_mask: 0.4090, decode.d5.loss_dice: 0.5749, decode.d6.loss_cls: 0.1612, decode.d6.loss_mask: 0.4074, decode.d6.loss_dice: 0.5729, decode.d7.loss_cls: 0.1636, decode.d7.loss_mask: 0.4057, decode.d7.loss_dice: 0.5736, decode.d8.loss_cls: 0.1617, decode.d8.loss_mask: 0.4057, decode.d8.loss_dice: 0.5695, loss: 13.3108 +2022-06-05 05:45:30,695 - mmseg - INFO - Iter [29900/40000] lr: 1.926e-06, eta: 1:22:10, time: 0.428, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1643, decode.loss_mask: 0.4127, decode.loss_dice: 0.5987, decode.d0.loss_cls: 1.5800, decode.d0.loss_mask: 0.4495, decode.d0.loss_dice: 0.6949, decode.d1.loss_cls: 0.3294, decode.d1.loss_mask: 0.4313, decode.d1.loss_dice: 0.6296, decode.d2.loss_cls: 0.2371, decode.d2.loss_mask: 0.4203, decode.d2.loss_dice: 0.6117, decode.d3.loss_cls: 0.1995, decode.d3.loss_mask: 0.4161, decode.d3.loss_dice: 0.6028, decode.d4.loss_cls: 0.1888, decode.d4.loss_mask: 0.4135, decode.d4.loss_dice: 0.6049, decode.d5.loss_cls: 0.1782, decode.d5.loss_mask: 0.4133, decode.d5.loss_dice: 0.6014, decode.d6.loss_cls: 0.1671, decode.d6.loss_mask: 0.4150, decode.d6.loss_dice: 0.6041, decode.d7.loss_cls: 0.1674, decode.d7.loss_mask: 0.4142, decode.d7.loss_dice: 0.5951, decode.d8.loss_cls: 0.1671, decode.d8.loss_mask: 0.4136, decode.d8.loss_dice: 0.5980, loss: 13.7197 +2022-06-05 05:45:52,131 - mmseg - INFO - Iter [29950/40000] lr: 1.916e-06, eta: 1:21:45, time: 0.429, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1504, decode.loss_mask: 0.4289, decode.loss_dice: 0.5861, decode.d0.loss_cls: 1.5588, decode.d0.loss_mask: 0.4725, decode.d0.loss_dice: 0.6837, decode.d1.loss_cls: 0.3224, decode.d1.loss_mask: 0.4440, decode.d1.loss_dice: 0.6212, decode.d2.loss_cls: 0.2218, decode.d2.loss_mask: 0.4360, decode.d2.loss_dice: 0.6006, decode.d3.loss_cls: 0.1755, decode.d3.loss_mask: 0.4315, decode.d3.loss_dice: 0.5899, decode.d4.loss_cls: 0.1690, decode.d4.loss_mask: 0.4304, decode.d4.loss_dice: 0.5911, decode.d5.loss_cls: 0.1588, decode.d5.loss_mask: 0.4294, decode.d5.loss_dice: 0.5899, decode.d6.loss_cls: 0.1583, decode.d6.loss_mask: 0.4285, decode.d6.loss_dice: 0.5856, decode.d7.loss_cls: 0.1558, decode.d7.loss_mask: 0.4285, decode.d7.loss_dice: 0.5843, decode.d8.loss_cls: 0.1480, decode.d8.loss_mask: 0.4287, decode.d8.loss_dice: 0.5885, loss: 13.5979 +2022-06-05 05:46:16,830 - mmseg - INFO - Saving checkpoint at 30000 iterations +2022-06-05 05:46:19,286 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 05:46:19,287 - mmseg - INFO - Iter [30000/40000] lr: 1.907e-06, eta: 1:21:21, time: 0.543, data_time: 0.059, memory: 31652, decode.loss_cls: 0.1627, decode.loss_mask: 0.4007, decode.loss_dice: 0.5819, decode.d0.loss_cls: 1.6167, decode.d0.loss_mask: 0.4436, decode.d0.loss_dice: 0.6817, decode.d1.loss_cls: 0.3154, decode.d1.loss_mask: 0.4189, decode.d1.loss_dice: 0.6206, decode.d2.loss_cls: 0.2175, decode.d2.loss_mask: 0.4077, decode.d2.loss_dice: 0.5950, decode.d3.loss_cls: 0.1877, decode.d3.loss_mask: 0.4059, decode.d3.loss_dice: 0.5858, decode.d4.loss_cls: 0.1798, decode.d4.loss_mask: 0.4041, decode.d4.loss_dice: 0.5811, decode.d5.loss_cls: 0.1681, decode.d5.loss_mask: 0.4027, decode.d5.loss_dice: 0.5819, decode.d6.loss_cls: 0.1581, decode.d6.loss_mask: 0.4030, decode.d6.loss_dice: 0.5815, decode.d7.loss_cls: 0.1595, decode.d7.loss_mask: 0.4007, decode.d7.loss_dice: 0.5847, decode.d8.loss_cls: 0.1602, decode.d8.loss_mask: 0.4011, decode.d8.loss_dice: 0.5825, loss: 13.3908 +2022-06-05 05:46:40,805 - mmseg - INFO - Iter [30050/40000] lr: 1.897e-06, eta: 1:20:56, time: 0.431, data_time: 0.007, memory: 31652, decode.loss_cls: 0.2011, decode.loss_mask: 0.4150, decode.loss_dice: 0.5967, decode.d0.loss_cls: 1.6170, decode.d0.loss_mask: 0.4565, decode.d0.loss_dice: 0.7038, decode.d1.loss_cls: 0.3582, decode.d1.loss_mask: 0.4332, decode.d1.loss_dice: 0.6392, decode.d2.loss_cls: 0.2607, decode.d2.loss_mask: 0.4211, decode.d2.loss_dice: 0.6139, decode.d3.loss_cls: 0.2237, decode.d3.loss_mask: 0.4160, decode.d3.loss_dice: 0.6031, decode.d4.loss_cls: 0.2192, decode.d4.loss_mask: 0.4148, decode.d4.loss_dice: 0.5954, decode.d5.loss_cls: 0.2053, decode.d5.loss_mask: 0.4174, decode.d5.loss_dice: 0.6032, decode.d6.loss_cls: 0.2041, decode.d6.loss_mask: 0.4150, decode.d6.loss_dice: 0.5997, decode.d7.loss_cls: 0.2043, decode.d7.loss_mask: 0.4142, decode.d7.loss_dice: 0.5964, decode.d8.loss_cls: 0.1968, decode.d8.loss_mask: 0.4155, decode.d8.loss_dice: 0.6035, loss: 14.0642 +2022-06-05 05:47:02,277 - mmseg - INFO - Iter [30100/40000] lr: 1.888e-06, eta: 1:20:31, time: 0.429, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1415, decode.loss_mask: 0.4135, decode.loss_dice: 0.5752, decode.d0.loss_cls: 1.4939, decode.d0.loss_mask: 0.4592, decode.d0.loss_dice: 0.6706, decode.d1.loss_cls: 0.2858, decode.d1.loss_mask: 0.4284, decode.d1.loss_dice: 0.6105, decode.d2.loss_cls: 0.2066, decode.d2.loss_mask: 0.4178, decode.d2.loss_dice: 0.5884, decode.d3.loss_cls: 0.1692, decode.d3.loss_mask: 0.4149, decode.d3.loss_dice: 0.5798, decode.d4.loss_cls: 0.1597, decode.d4.loss_mask: 0.4157, decode.d4.loss_dice: 0.5825, decode.d5.loss_cls: 0.1500, decode.d5.loss_mask: 0.4159, decode.d5.loss_dice: 0.5776, decode.d6.loss_cls: 0.1422, decode.d6.loss_mask: 0.4156, decode.d6.loss_dice: 0.5738, decode.d7.loss_cls: 0.1435, decode.d7.loss_mask: 0.4142, decode.d7.loss_dice: 0.5752, decode.d8.loss_cls: 0.1409, decode.d8.loss_mask: 0.4120, decode.d8.loss_dice: 0.5751, loss: 13.1494 +2022-06-05 05:47:24,240 - mmseg - INFO - Iter [30150/40000] lr: 1.878e-06, eta: 1:20:05, time: 0.439, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1460, decode.loss_mask: 0.4077, decode.loss_dice: 0.5835, decode.d0.loss_cls: 1.5567, decode.d0.loss_mask: 0.4455, decode.d0.loss_dice: 0.6696, decode.d1.loss_cls: 0.3082, decode.d1.loss_mask: 0.4215, decode.d1.loss_dice: 0.6145, decode.d2.loss_cls: 0.2205, decode.d2.loss_mask: 0.4124, decode.d2.loss_dice: 0.5901, decode.d3.loss_cls: 0.1826, decode.d3.loss_mask: 0.4087, decode.d3.loss_dice: 0.5824, decode.d4.loss_cls: 0.1665, decode.d4.loss_mask: 0.4083, decode.d4.loss_dice: 0.5837, decode.d5.loss_cls: 0.1650, decode.d5.loss_mask: 0.4075, decode.d5.loss_dice: 0.5801, decode.d6.loss_cls: 0.1553, decode.d6.loss_mask: 0.4067, decode.d6.loss_dice: 0.5783, decode.d7.loss_cls: 0.1458, decode.d7.loss_mask: 0.4077, decode.d7.loss_dice: 0.5829, decode.d8.loss_cls: 0.1515, decode.d8.loss_mask: 0.4069, decode.d8.loss_dice: 0.5813, loss: 13.2775 +2022-06-05 05:47:46,156 - mmseg - INFO - Iter [30200/40000] lr: 1.868e-06, eta: 1:19:40, time: 0.438, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1598, decode.loss_mask: 0.4232, decode.loss_dice: 0.5923, decode.d0.loss_cls: 1.5409, decode.d0.loss_mask: 0.4679, decode.d0.loss_dice: 0.6877, decode.d1.loss_cls: 0.3128, decode.d1.loss_mask: 0.4402, decode.d1.loss_dice: 0.6306, decode.d2.loss_cls: 0.2219, decode.d2.loss_mask: 0.4283, decode.d2.loss_dice: 0.6117, decode.d3.loss_cls: 0.1906, decode.d3.loss_mask: 0.4258, decode.d3.loss_dice: 0.6003, decode.d4.loss_cls: 0.1815, decode.d4.loss_mask: 0.4238, decode.d4.loss_dice: 0.6020, decode.d5.loss_cls: 0.1746, decode.d5.loss_mask: 0.4241, decode.d5.loss_dice: 0.6003, decode.d6.loss_cls: 0.1668, decode.d6.loss_mask: 0.4230, decode.d6.loss_dice: 0.5950, decode.d7.loss_cls: 0.1583, decode.d7.loss_mask: 0.4237, decode.d7.loss_dice: 0.5984, decode.d8.loss_cls: 0.1554, decode.d8.loss_mask: 0.4220, decode.d8.loss_dice: 0.5969, loss: 13.6798 +2022-06-05 05:48:07,521 - mmseg - INFO - Iter [30250/40000] lr: 1.859e-06, eta: 1:19:15, time: 0.427, data_time: 0.009, memory: 31652, decode.loss_cls: 0.1689, decode.loss_mask: 0.4158, decode.loss_dice: 0.5998, decode.d0.loss_cls: 1.6321, decode.d0.loss_mask: 0.4578, decode.d0.loss_dice: 0.7026, decode.d1.loss_cls: 0.3364, decode.d1.loss_mask: 0.4319, decode.d1.loss_dice: 0.6400, decode.d2.loss_cls: 0.2382, decode.d2.loss_mask: 0.4240, decode.d2.loss_dice: 0.6170, decode.d3.loss_cls: 0.2050, decode.d3.loss_mask: 0.4217, decode.d3.loss_dice: 0.6068, decode.d4.loss_cls: 0.1912, decode.d4.loss_mask: 0.4197, decode.d4.loss_dice: 0.6067, decode.d5.loss_cls: 0.1827, decode.d5.loss_mask: 0.4194, decode.d5.loss_dice: 0.6049, decode.d6.loss_cls: 0.1796, decode.d6.loss_mask: 0.4188, decode.d6.loss_dice: 0.6024, decode.d7.loss_cls: 0.1705, decode.d7.loss_mask: 0.4190, decode.d7.loss_dice: 0.6000, decode.d8.loss_cls: 0.1675, decode.d8.loss_mask: 0.4174, decode.d8.loss_dice: 0.6026, loss: 13.9004 +2022-06-05 05:48:31,558 - mmseg - INFO - Iter [30300/40000] lr: 1.849e-06, eta: 1:18:50, time: 0.481, data_time: 0.055, memory: 31652, decode.loss_cls: 0.1578, decode.loss_mask: 0.4195, decode.loss_dice: 0.5886, decode.d0.loss_cls: 1.5699, decode.d0.loss_mask: 0.4674, decode.d0.loss_dice: 0.6972, decode.d1.loss_cls: 0.3036, decode.d1.loss_mask: 0.4421, decode.d1.loss_dice: 0.6278, decode.d2.loss_cls: 0.2089, decode.d2.loss_mask: 0.4301, decode.d2.loss_dice: 0.6068, decode.d3.loss_cls: 0.1795, decode.d3.loss_mask: 0.4256, decode.d3.loss_dice: 0.5981, decode.d4.loss_cls: 0.1745, decode.d4.loss_mask: 0.4236, decode.d4.loss_dice: 0.5939, decode.d5.loss_cls: 0.1669, decode.d5.loss_mask: 0.4224, decode.d5.loss_dice: 0.5921, decode.d6.loss_cls: 0.1610, decode.d6.loss_mask: 0.4216, decode.d6.loss_dice: 0.5923, decode.d7.loss_cls: 0.1565, decode.d7.loss_mask: 0.4210, decode.d7.loss_dice: 0.5925, decode.d8.loss_cls: 0.1550, decode.d8.loss_mask: 0.4220, decode.d8.loss_dice: 0.5883, loss: 13.6066 +2022-06-05 05:48:53,380 - mmseg - INFO - Iter [30350/40000] lr: 1.840e-06, eta: 1:18:25, time: 0.437, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1634, decode.loss_mask: 0.4217, decode.loss_dice: 0.5857, decode.d0.loss_cls: 1.5488, decode.d0.loss_mask: 0.4622, decode.d0.loss_dice: 0.6812, decode.d1.loss_cls: 0.3181, decode.d1.loss_mask: 0.4404, decode.d1.loss_dice: 0.6197, decode.d2.loss_cls: 0.2212, decode.d2.loss_mask: 0.4297, decode.d2.loss_dice: 0.5943, decode.d3.loss_cls: 0.1869, decode.d3.loss_mask: 0.4260, decode.d3.loss_dice: 0.5882, decode.d4.loss_cls: 0.1743, decode.d4.loss_mask: 0.4260, decode.d4.loss_dice: 0.5911, decode.d5.loss_cls: 0.1656, decode.d5.loss_mask: 0.4256, decode.d5.loss_dice: 0.5894, decode.d6.loss_cls: 0.1633, decode.d6.loss_mask: 0.4243, decode.d6.loss_dice: 0.5834, decode.d7.loss_cls: 0.1584, decode.d7.loss_mask: 0.4230, decode.d7.loss_dice: 0.5842, decode.d8.loss_cls: 0.1556, decode.d8.loss_mask: 0.4236, decode.d8.loss_dice: 0.5844, loss: 13.5597 +2022-06-05 05:49:15,354 - mmseg - INFO - Iter [30400/40000] lr: 1.830e-06, eta: 1:18:00, time: 0.439, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1529, decode.loss_mask: 0.4190, decode.loss_dice: 0.5909, decode.d0.loss_cls: 1.5762, decode.d0.loss_mask: 0.4635, decode.d0.loss_dice: 0.6972, decode.d1.loss_cls: 0.3158, decode.d1.loss_mask: 0.4352, decode.d1.loss_dice: 0.6274, decode.d2.loss_cls: 0.2134, decode.d2.loss_mask: 0.4244, decode.d2.loss_dice: 0.6110, decode.d3.loss_cls: 0.1773, decode.d3.loss_mask: 0.4221, decode.d3.loss_dice: 0.5999, decode.d4.loss_cls: 0.1704, decode.d4.loss_mask: 0.4195, decode.d4.loss_dice: 0.5934, decode.d5.loss_cls: 0.1632, decode.d5.loss_mask: 0.4181, decode.d5.loss_dice: 0.5956, decode.d6.loss_cls: 0.1574, decode.d6.loss_mask: 0.4185, decode.d6.loss_dice: 0.5936, decode.d7.loss_cls: 0.1537, decode.d7.loss_mask: 0.4184, decode.d7.loss_dice: 0.5948, decode.d8.loss_cls: 0.1526, decode.d8.loss_mask: 0.4190, decode.d8.loss_dice: 0.5945, loss: 13.5888 +2022-06-05 05:49:37,372 - mmseg - INFO - Iter [30450/40000] lr: 1.821e-06, eta: 1:17:35, time: 0.441, data_time: 0.009, memory: 31652, decode.loss_cls: 0.1607, decode.loss_mask: 0.4192, decode.loss_dice: 0.5734, decode.d0.loss_cls: 1.5889, decode.d0.loss_mask: 0.4641, decode.d0.loss_dice: 0.6729, decode.d1.loss_cls: 0.3159, decode.d1.loss_mask: 0.4360, decode.d1.loss_dice: 0.6051, decode.d2.loss_cls: 0.2273, decode.d2.loss_mask: 0.4244, decode.d2.loss_dice: 0.5878, decode.d3.loss_cls: 0.1930, decode.d3.loss_mask: 0.4218, decode.d3.loss_dice: 0.5766, decode.d4.loss_cls: 0.1803, decode.d4.loss_mask: 0.4226, decode.d4.loss_dice: 0.5800, decode.d5.loss_cls: 0.1782, decode.d5.loss_mask: 0.4215, decode.d5.loss_dice: 0.5734, decode.d6.loss_cls: 0.1645, decode.d6.loss_mask: 0.4215, decode.d6.loss_dice: 0.5756, decode.d7.loss_cls: 0.1645, decode.d7.loss_mask: 0.4202, decode.d7.loss_dice: 0.5742, decode.d8.loss_cls: 0.1624, decode.d8.loss_mask: 0.4193, decode.d8.loss_dice: 0.5732, loss: 13.4985 +2022-06-05 05:49:59,007 - mmseg - INFO - Iter [30500/40000] lr: 1.811e-06, eta: 1:17:10, time: 0.433, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1408, decode.loss_mask: 0.4295, decode.loss_dice: 0.5960, decode.d0.loss_cls: 1.5322, decode.d0.loss_mask: 0.4680, decode.d0.loss_dice: 0.6881, decode.d1.loss_cls: 0.2930, decode.d1.loss_mask: 0.4458, decode.d1.loss_dice: 0.6357, decode.d2.loss_cls: 0.2144, decode.d2.loss_mask: 0.4343, decode.d2.loss_dice: 0.6100, decode.d3.loss_cls: 0.1724, decode.d3.loss_mask: 0.4326, decode.d3.loss_dice: 0.6034, decode.d4.loss_cls: 0.1620, decode.d4.loss_mask: 0.4310, decode.d4.loss_dice: 0.6051, decode.d5.loss_cls: 0.1558, decode.d5.loss_mask: 0.4294, decode.d5.loss_dice: 0.6015, decode.d6.loss_cls: 0.1485, decode.d6.loss_mask: 0.4296, decode.d6.loss_dice: 0.5963, decode.d7.loss_cls: 0.1458, decode.d7.loss_mask: 0.4304, decode.d7.loss_dice: 0.5964, decode.d8.loss_cls: 0.1452, decode.d8.loss_mask: 0.4292, decode.d8.loss_dice: 0.5967, loss: 13.5992 +2022-06-05 05:50:20,651 - mmseg - INFO - Iter [30550/40000] lr: 1.802e-06, eta: 1:16:44, time: 0.433, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1527, decode.loss_mask: 0.4108, decode.loss_dice: 0.5844, decode.d0.loss_cls: 1.5595, decode.d0.loss_mask: 0.4525, decode.d0.loss_dice: 0.6839, decode.d1.loss_cls: 0.3110, decode.d1.loss_mask: 0.4248, decode.d1.loss_dice: 0.6186, decode.d2.loss_cls: 0.2172, decode.d2.loss_mask: 0.4175, decode.d2.loss_dice: 0.5966, decode.d3.loss_cls: 0.1824, decode.d3.loss_mask: 0.4148, decode.d3.loss_dice: 0.5883, decode.d4.loss_cls: 0.1730, decode.d4.loss_mask: 0.4123, decode.d4.loss_dice: 0.5871, decode.d5.loss_cls: 0.1616, decode.d5.loss_mask: 0.4122, decode.d5.loss_dice: 0.5913, decode.d6.loss_cls: 0.1551, decode.d6.loss_mask: 0.4122, decode.d6.loss_dice: 0.5879, decode.d7.loss_cls: 0.1521, decode.d7.loss_mask: 0.4118, decode.d7.loss_dice: 0.5874, decode.d8.loss_cls: 0.1539, decode.d8.loss_mask: 0.4117, decode.d8.loss_dice: 0.5899, loss: 13.4145 +2022-06-05 05:50:45,207 - mmseg - INFO - Iter [30600/40000] lr: 1.792e-06, eta: 1:16:20, time: 0.491, data_time: 0.057, memory: 31652, decode.loss_cls: 0.1766, decode.loss_mask: 0.4035, decode.loss_dice: 0.5825, decode.d0.loss_cls: 1.5809, decode.d0.loss_mask: 0.4416, decode.d0.loss_dice: 0.6844, decode.d1.loss_cls: 0.3190, decode.d1.loss_mask: 0.4213, decode.d1.loss_dice: 0.6153, decode.d2.loss_cls: 0.2402, decode.d2.loss_mask: 0.4111, decode.d2.loss_dice: 0.5941, decode.d3.loss_cls: 0.2019, decode.d3.loss_mask: 0.4076, decode.d3.loss_dice: 0.5857, decode.d4.loss_cls: 0.1951, decode.d4.loss_mask: 0.4066, decode.d4.loss_dice: 0.5872, decode.d5.loss_cls: 0.1823, decode.d5.loss_mask: 0.4084, decode.d5.loss_dice: 0.5861, decode.d6.loss_cls: 0.1740, decode.d6.loss_mask: 0.4069, decode.d6.loss_dice: 0.5846, decode.d7.loss_cls: 0.1753, decode.d7.loss_mask: 0.4042, decode.d7.loss_dice: 0.5832, decode.d8.loss_cls: 0.1685, decode.d8.loss_mask: 0.4046, decode.d8.loss_dice: 0.5874, loss: 13.5200 +2022-06-05 05:51:07,515 - mmseg - INFO - Iter [30650/40000] lr: 1.783e-06, eta: 1:15:55, time: 0.446, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1515, decode.loss_mask: 0.4139, decode.loss_dice: 0.5700, decode.d0.loss_cls: 1.5356, decode.d0.loss_mask: 0.4580, decode.d0.loss_dice: 0.6593, decode.d1.loss_cls: 0.3054, decode.d1.loss_mask: 0.4356, decode.d1.loss_dice: 0.6067, decode.d2.loss_cls: 0.2149, decode.d2.loss_mask: 0.4211, decode.d2.loss_dice: 0.5816, decode.d3.loss_cls: 0.1719, decode.d3.loss_mask: 0.4186, decode.d3.loss_dice: 0.5752, decode.d4.loss_cls: 0.1667, decode.d4.loss_mask: 0.4163, decode.d4.loss_dice: 0.5762, decode.d5.loss_cls: 0.1568, decode.d5.loss_mask: 0.4170, decode.d5.loss_dice: 0.5722, decode.d6.loss_cls: 0.1503, decode.d6.loss_mask: 0.4152, decode.d6.loss_dice: 0.5723, decode.d7.loss_cls: 0.1455, decode.d7.loss_mask: 0.4153, decode.d7.loss_dice: 0.5715, decode.d8.loss_cls: 0.1465, decode.d8.loss_mask: 0.4157, decode.d8.loss_dice: 0.5705, loss: 13.2272 +2022-06-05 05:51:29,718 - mmseg - INFO - Iter [30700/40000] lr: 1.773e-06, eta: 1:15:30, time: 0.445, data_time: 0.009, memory: 31652, decode.loss_cls: 0.1589, decode.loss_mask: 0.4284, decode.loss_dice: 0.5871, decode.d0.loss_cls: 1.5497, decode.d0.loss_mask: 0.4689, decode.d0.loss_dice: 0.6803, decode.d1.loss_cls: 0.3361, decode.d1.loss_mask: 0.4443, decode.d1.loss_dice: 0.6222, decode.d2.loss_cls: 0.2292, decode.d2.loss_mask: 0.4336, decode.d2.loss_dice: 0.5961, decode.d3.loss_cls: 0.1861, decode.d3.loss_mask: 0.4306, decode.d3.loss_dice: 0.5879, decode.d4.loss_cls: 0.1764, decode.d4.loss_mask: 0.4293, decode.d4.loss_dice: 0.5877, decode.d5.loss_cls: 0.1723, decode.d5.loss_mask: 0.4311, decode.d5.loss_dice: 0.5868, decode.d6.loss_cls: 0.1649, decode.d6.loss_mask: 0.4282, decode.d6.loss_dice: 0.5808, decode.d7.loss_cls: 0.1581, decode.d7.loss_mask: 0.4288, decode.d7.loss_dice: 0.5843, decode.d8.loss_cls: 0.1573, decode.d8.loss_mask: 0.4293, decode.d8.loss_dice: 0.5825, loss: 13.6374 +2022-06-05 05:51:51,294 - mmseg - INFO - Iter [30750/40000] lr: 1.764e-06, eta: 1:15:05, time: 0.432, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1504, decode.loss_mask: 0.4119, decode.loss_dice: 0.5938, decode.d0.loss_cls: 1.5527, decode.d0.loss_mask: 0.4488, decode.d0.loss_dice: 0.6762, decode.d1.loss_cls: 0.3204, decode.d1.loss_mask: 0.4294, decode.d1.loss_dice: 0.6333, decode.d2.loss_cls: 0.2177, decode.d2.loss_mask: 0.4176, decode.d2.loss_dice: 0.6054, decode.d3.loss_cls: 0.1811, decode.d3.loss_mask: 0.4173, decode.d3.loss_dice: 0.5988, decode.d4.loss_cls: 0.1690, decode.d4.loss_mask: 0.4150, decode.d4.loss_dice: 0.5940, decode.d5.loss_cls: 0.1601, decode.d5.loss_mask: 0.4155, decode.d5.loss_dice: 0.5959, decode.d6.loss_cls: 0.1577, decode.d6.loss_mask: 0.4140, decode.d6.loss_dice: 0.5921, decode.d7.loss_cls: 0.1538, decode.d7.loss_mask: 0.4117, decode.d7.loss_dice: 0.5904, decode.d8.loss_cls: 0.1472, decode.d8.loss_mask: 0.4117, decode.d8.loss_dice: 0.5965, loss: 13.4793 +2022-06-05 05:52:13,468 - mmseg - INFO - Iter [30800/40000] lr: 1.754e-06, eta: 1:14:40, time: 0.443, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1465, decode.loss_mask: 0.4344, decode.loss_dice: 0.5794, decode.d0.loss_cls: 1.5757, decode.d0.loss_mask: 0.4775, decode.d0.loss_dice: 0.6767, decode.d1.loss_cls: 0.3148, decode.d1.loss_mask: 0.4496, decode.d1.loss_dice: 0.6011, decode.d2.loss_cls: 0.2169, decode.d2.loss_mask: 0.4387, decode.d2.loss_dice: 0.5862, decode.d3.loss_cls: 0.1792, decode.d3.loss_mask: 0.4342, decode.d3.loss_dice: 0.5818, decode.d4.loss_cls: 0.1741, decode.d4.loss_mask: 0.4329, decode.d4.loss_dice: 0.5787, decode.d5.loss_cls: 0.1596, decode.d5.loss_mask: 0.4336, decode.d5.loss_dice: 0.5780, decode.d6.loss_cls: 0.1520, decode.d6.loss_mask: 0.4351, decode.d6.loss_dice: 0.5763, decode.d7.loss_cls: 0.1521, decode.d7.loss_mask: 0.4332, decode.d7.loss_dice: 0.5774, decode.d8.loss_cls: 0.1497, decode.d8.loss_mask: 0.4328, decode.d8.loss_dice: 0.5749, loss: 13.5330 +2022-06-05 05:52:35,211 - mmseg - INFO - Iter [30850/40000] lr: 1.745e-06, eta: 1:14:15, time: 0.435, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1548, decode.loss_mask: 0.4096, decode.loss_dice: 0.5791, decode.d0.loss_cls: 1.5570, decode.d0.loss_mask: 0.4446, decode.d0.loss_dice: 0.6656, decode.d1.loss_cls: 0.3011, decode.d1.loss_mask: 0.4244, decode.d1.loss_dice: 0.6085, decode.d2.loss_cls: 0.2156, decode.d2.loss_mask: 0.4166, decode.d2.loss_dice: 0.5858, decode.d3.loss_cls: 0.1765, decode.d3.loss_mask: 0.4167, decode.d3.loss_dice: 0.5784, decode.d4.loss_cls: 0.1699, decode.d4.loss_mask: 0.4135, decode.d4.loss_dice: 0.5806, decode.d5.loss_cls: 0.1649, decode.d5.loss_mask: 0.4119, decode.d5.loss_dice: 0.5780, decode.d6.loss_cls: 0.1555, decode.d6.loss_mask: 0.4122, decode.d6.loss_dice: 0.5726, decode.d7.loss_cls: 0.1565, decode.d7.loss_mask: 0.4109, decode.d7.loss_dice: 0.5727, decode.d8.loss_cls: 0.1537, decode.d8.loss_mask: 0.4084, decode.d8.loss_dice: 0.5739, loss: 13.2695 +2022-06-05 05:52:59,701 - mmseg - INFO - Iter [30900/40000] lr: 1.735e-06, eta: 1:13:50, time: 0.490, data_time: 0.055, memory: 31652, decode.loss_cls: 0.1785, decode.loss_mask: 0.4199, decode.loss_dice: 0.6058, decode.d0.loss_cls: 1.6472, decode.d0.loss_mask: 0.4645, decode.d0.loss_dice: 0.7285, decode.d1.loss_cls: 0.3451, decode.d1.loss_mask: 0.4404, decode.d1.loss_dice: 0.6466, decode.d2.loss_cls: 0.2553, decode.d2.loss_mask: 0.4301, decode.d2.loss_dice: 0.6219, decode.d3.loss_cls: 0.2132, decode.d3.loss_mask: 0.4259, decode.d3.loss_dice: 0.6131, decode.d4.loss_cls: 0.2012, decode.d4.loss_mask: 0.4228, decode.d4.loss_dice: 0.6109, decode.d5.loss_cls: 0.1888, decode.d5.loss_mask: 0.4230, decode.d5.loss_dice: 0.6135, decode.d6.loss_cls: 0.1778, decode.d6.loss_mask: 0.4227, decode.d6.loss_dice: 0.6087, decode.d7.loss_cls: 0.1770, decode.d7.loss_mask: 0.4233, decode.d7.loss_dice: 0.6132, decode.d8.loss_cls: 0.1833, decode.d8.loss_mask: 0.4210, decode.d8.loss_dice: 0.6133, loss: 14.1362 +2022-06-05 05:53:21,564 - mmseg - INFO - Iter [30950/40000] lr: 1.725e-06, eta: 1:13:25, time: 0.437, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1466, decode.loss_mask: 0.4139, decode.loss_dice: 0.5853, decode.d0.loss_cls: 1.5528, decode.d0.loss_mask: 0.4642, decode.d0.loss_dice: 0.6862, decode.d1.loss_cls: 0.3132, decode.d1.loss_mask: 0.4344, decode.d1.loss_dice: 0.6212, decode.d2.loss_cls: 0.2108, decode.d2.loss_mask: 0.4215, decode.d2.loss_dice: 0.5968, decode.d3.loss_cls: 0.1787, decode.d3.loss_mask: 0.4174, decode.d3.loss_dice: 0.5886, decode.d4.loss_cls: 0.1700, decode.d4.loss_mask: 0.4169, decode.d4.loss_dice: 0.5853, decode.d5.loss_cls: 0.1549, decode.d5.loss_mask: 0.4179, decode.d5.loss_dice: 0.5881, decode.d6.loss_cls: 0.1475, decode.d6.loss_mask: 0.4157, decode.d6.loss_dice: 0.5877, decode.d7.loss_cls: 0.1536, decode.d7.loss_mask: 0.4136, decode.d7.loss_dice: 0.5850, decode.d8.loss_cls: 0.1464, decode.d8.loss_mask: 0.4139, decode.d8.loss_dice: 0.5838, loss: 13.4116 +2022-06-05 05:53:44,223 - mmseg - INFO - Saving checkpoint at 31000 iterations +2022-06-05 05:53:47,223 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 05:53:47,224 - mmseg - INFO - Iter [31000/40000] lr: 1.716e-06, eta: 1:13:01, time: 0.513, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1454, decode.loss_mask: 0.4126, decode.loss_dice: 0.5953, decode.d0.loss_cls: 1.5598, decode.d0.loss_mask: 0.4607, decode.d0.loss_dice: 0.6964, decode.d1.loss_cls: 0.3063, decode.d1.loss_mask: 0.4300, decode.d1.loss_dice: 0.6308, decode.d2.loss_cls: 0.2159, decode.d2.loss_mask: 0.4193, decode.d2.loss_dice: 0.6014, decode.d3.loss_cls: 0.1741, decode.d3.loss_mask: 0.4152, decode.d3.loss_dice: 0.5994, decode.d4.loss_cls: 0.1679, decode.d4.loss_mask: 0.4128, decode.d4.loss_dice: 0.6025, decode.d5.loss_cls: 0.1593, decode.d5.loss_mask: 0.4127, decode.d5.loss_dice: 0.5980, decode.d6.loss_cls: 0.1536, decode.d6.loss_mask: 0.4109, decode.d6.loss_dice: 0.5952, decode.d7.loss_cls: 0.1550, decode.d7.loss_mask: 0.4109, decode.d7.loss_dice: 0.5985, decode.d8.loss_cls: 0.1496, decode.d8.loss_mask: 0.4117, decode.d8.loss_dice: 0.5956, loss: 13.4970 +2022-06-05 05:54:09,246 - mmseg - INFO - Iter [31050/40000] lr: 1.706e-06, eta: 1:12:36, time: 0.440, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1305, decode.loss_mask: 0.4024, decode.loss_dice: 0.5645, decode.d0.loss_cls: 1.5072, decode.d0.loss_mask: 0.4485, decode.d0.loss_dice: 0.6646, decode.d1.loss_cls: 0.2922, decode.d1.loss_mask: 0.4177, decode.d1.loss_dice: 0.6010, decode.d2.loss_cls: 0.2025, decode.d2.loss_mask: 0.4093, decode.d2.loss_dice: 0.5779, decode.d3.loss_cls: 0.1633, decode.d3.loss_mask: 0.4068, decode.d3.loss_dice: 0.5746, decode.d4.loss_cls: 0.1541, decode.d4.loss_mask: 0.4044, decode.d4.loss_dice: 0.5707, decode.d5.loss_cls: 0.1388, decode.d5.loss_mask: 0.4047, decode.d5.loss_dice: 0.5716, decode.d6.loss_cls: 0.1372, decode.d6.loss_mask: 0.4034, decode.d6.loss_dice: 0.5661, decode.d7.loss_cls: 0.1362, decode.d7.loss_mask: 0.4029, decode.d7.loss_dice: 0.5691, decode.d8.loss_cls: 0.1307, decode.d8.loss_mask: 0.4027, decode.d8.loss_dice: 0.5661, loss: 12.9219 +2022-06-05 05:54:31,451 - mmseg - INFO - Iter [31100/40000] lr: 1.697e-06, eta: 1:12:11, time: 0.444, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1491, decode.loss_mask: 0.4105, decode.loss_dice: 0.5741, decode.d0.loss_cls: 1.5665, decode.d0.loss_mask: 0.4529, decode.d0.loss_dice: 0.6679, decode.d1.loss_cls: 0.2936, decode.d1.loss_mask: 0.4262, decode.d1.loss_dice: 0.6098, decode.d2.loss_cls: 0.2018, decode.d2.loss_mask: 0.4170, decode.d2.loss_dice: 0.5894, decode.d3.loss_cls: 0.1774, decode.d3.loss_mask: 0.4155, decode.d3.loss_dice: 0.5768, decode.d4.loss_cls: 0.1672, decode.d4.loss_mask: 0.4159, decode.d4.loss_dice: 0.5816, decode.d5.loss_cls: 0.1571, decode.d5.loss_mask: 0.4138, decode.d5.loss_dice: 0.5797, decode.d6.loss_cls: 0.1481, decode.d6.loss_mask: 0.4129, decode.d6.loss_dice: 0.5740, decode.d7.loss_cls: 0.1511, decode.d7.loss_mask: 0.4131, decode.d7.loss_dice: 0.5757, decode.d8.loss_cls: 0.1491, decode.d8.loss_mask: 0.4122, decode.d8.loss_dice: 0.5783, loss: 13.2583 +2022-06-05 05:54:53,668 - mmseg - INFO - Iter [31150/40000] lr: 1.687e-06, eta: 1:11:47, time: 0.444, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1477, decode.loss_mask: 0.4106, decode.loss_dice: 0.5721, decode.d0.loss_cls: 1.5720, decode.d0.loss_mask: 0.4549, decode.d0.loss_dice: 0.6742, decode.d1.loss_cls: 0.3199, decode.d1.loss_mask: 0.4242, decode.d1.loss_dice: 0.6105, decode.d2.loss_cls: 0.2316, decode.d2.loss_mask: 0.4153, decode.d2.loss_dice: 0.5897, decode.d3.loss_cls: 0.1903, decode.d3.loss_mask: 0.4121, decode.d3.loss_dice: 0.5790, decode.d4.loss_cls: 0.1745, decode.d4.loss_mask: 0.4124, decode.d4.loss_dice: 0.5833, decode.d5.loss_cls: 0.1671, decode.d5.loss_mask: 0.4127, decode.d5.loss_dice: 0.5829, decode.d6.loss_cls: 0.1492, decode.d6.loss_mask: 0.4122, decode.d6.loss_dice: 0.5786, decode.d7.loss_cls: 0.1500, decode.d7.loss_mask: 0.4107, decode.d7.loss_dice: 0.5808, decode.d8.loss_cls: 0.1483, decode.d8.loss_mask: 0.4101, decode.d8.loss_dice: 0.5793, loss: 13.3565 +2022-06-05 05:55:15,843 - mmseg - INFO - Iter [31200/40000] lr: 1.678e-06, eta: 1:11:22, time: 0.443, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1696, decode.loss_mask: 0.4273, decode.loss_dice: 0.5929, decode.d0.loss_cls: 1.5847, decode.d0.loss_mask: 0.4715, decode.d0.loss_dice: 0.6931, decode.d1.loss_cls: 0.3168, decode.d1.loss_mask: 0.4455, decode.d1.loss_dice: 0.6380, decode.d2.loss_cls: 0.2410, decode.d2.loss_mask: 0.4335, decode.d2.loss_dice: 0.6073, decode.d3.loss_cls: 0.2065, decode.d3.loss_mask: 0.4316, decode.d3.loss_dice: 0.6007, decode.d4.loss_cls: 0.1976, decode.d4.loss_mask: 0.4308, decode.d4.loss_dice: 0.5980, decode.d5.loss_cls: 0.1877, decode.d5.loss_mask: 0.4279, decode.d5.loss_dice: 0.5960, decode.d6.loss_cls: 0.1760, decode.d6.loss_mask: 0.4269, decode.d6.loss_dice: 0.5973, decode.d7.loss_cls: 0.1739, decode.d7.loss_mask: 0.4261, decode.d7.loss_dice: 0.5942, decode.d8.loss_cls: 0.1726, decode.d8.loss_mask: 0.4284, decode.d8.loss_dice: 0.5938, loss: 13.8872 +2022-06-05 05:55:40,854 - mmseg - INFO - Iter [31250/40000] lr: 1.668e-06, eta: 1:10:57, time: 0.500, data_time: 0.059, memory: 31652, decode.loss_cls: 0.1696, decode.loss_mask: 0.4217, decode.loss_dice: 0.5612, decode.d0.loss_cls: 1.5641, decode.d0.loss_mask: 0.4613, decode.d0.loss_dice: 0.6608, decode.d1.loss_cls: 0.3308, decode.d1.loss_mask: 0.4387, decode.d1.loss_dice: 0.6054, decode.d2.loss_cls: 0.2395, decode.d2.loss_mask: 0.4260, decode.d2.loss_dice: 0.5780, decode.d3.loss_cls: 0.1988, decode.d3.loss_mask: 0.4252, decode.d3.loss_dice: 0.5670, decode.d4.loss_cls: 0.1938, decode.d4.loss_mask: 0.4223, decode.d4.loss_dice: 0.5679, decode.d5.loss_cls: 0.1835, decode.d5.loss_mask: 0.4211, decode.d5.loss_dice: 0.5612, decode.d6.loss_cls: 0.1787, decode.d6.loss_mask: 0.4204, decode.d6.loss_dice: 0.5596, decode.d7.loss_cls: 0.1750, decode.d7.loss_mask: 0.4197, decode.d7.loss_dice: 0.5616, decode.d8.loss_cls: 0.1724, decode.d8.loss_mask: 0.4195, decode.d8.loss_dice: 0.5627, loss: 13.4676 +2022-06-05 05:56:02,857 - mmseg - INFO - Iter [31300/40000] lr: 1.659e-06, eta: 1:10:32, time: 0.440, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1560, decode.loss_mask: 0.4169, decode.loss_dice: 0.5795, decode.d0.loss_cls: 1.5520, decode.d0.loss_mask: 0.4605, decode.d0.loss_dice: 0.6801, decode.d1.loss_cls: 0.3063, decode.d1.loss_mask: 0.4316, decode.d1.loss_dice: 0.6209, decode.d2.loss_cls: 0.2209, decode.d2.loss_mask: 0.4227, decode.d2.loss_dice: 0.5954, decode.d3.loss_cls: 0.1836, decode.d3.loss_mask: 0.4215, decode.d3.loss_dice: 0.5860, decode.d4.loss_cls: 0.1709, decode.d4.loss_mask: 0.4202, decode.d4.loss_dice: 0.5856, decode.d5.loss_cls: 0.1645, decode.d5.loss_mask: 0.4213, decode.d5.loss_dice: 0.5854, decode.d6.loss_cls: 0.1566, decode.d6.loss_mask: 0.4198, decode.d6.loss_dice: 0.5816, decode.d7.loss_cls: 0.1572, decode.d7.loss_mask: 0.4200, decode.d7.loss_dice: 0.5849, decode.d8.loss_cls: 0.1510, decode.d8.loss_mask: 0.4178, decode.d8.loss_dice: 0.5822, loss: 13.4529 +2022-06-05 05:56:24,606 - mmseg - INFO - Iter [31350/40000] lr: 1.649e-06, eta: 1:10:07, time: 0.435, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1500, decode.loss_mask: 0.4068, decode.loss_dice: 0.5897, decode.d0.loss_cls: 1.5935, decode.d0.loss_mask: 0.4502, decode.d0.loss_dice: 0.6925, decode.d1.loss_cls: 0.3019, decode.d1.loss_mask: 0.4229, decode.d1.loss_dice: 0.6326, decode.d2.loss_cls: 0.2174, decode.d2.loss_mask: 0.4130, decode.d2.loss_dice: 0.6042, decode.d3.loss_cls: 0.1768, decode.d3.loss_mask: 0.4108, decode.d3.loss_dice: 0.5963, decode.d4.loss_cls: 0.1654, decode.d4.loss_mask: 0.4077, decode.d4.loss_dice: 0.5993, decode.d5.loss_cls: 0.1595, decode.d5.loss_mask: 0.4077, decode.d5.loss_dice: 0.5948, decode.d6.loss_cls: 0.1579, decode.d6.loss_mask: 0.4060, decode.d6.loss_dice: 0.5874, decode.d7.loss_cls: 0.1532, decode.d7.loss_mask: 0.4074, decode.d7.loss_dice: 0.5902, decode.d8.loss_cls: 0.1486, decode.d8.loss_mask: 0.4093, decode.d8.loss_dice: 0.5929, loss: 13.4457 +2022-06-05 05:56:46,644 - mmseg - INFO - Iter [31400/40000] lr: 1.640e-06, eta: 1:09:42, time: 0.441, data_time: 0.010, memory: 31652, decode.loss_cls: 0.1503, decode.loss_mask: 0.4013, decode.loss_dice: 0.5866, decode.d0.loss_cls: 1.5766, decode.d0.loss_mask: 0.4375, decode.d0.loss_dice: 0.6729, decode.d1.loss_cls: 0.3008, decode.d1.loss_mask: 0.4195, decode.d1.loss_dice: 0.6246, decode.d2.loss_cls: 0.2186, decode.d2.loss_mask: 0.4071, decode.d2.loss_dice: 0.6035, decode.d3.loss_cls: 0.1792, decode.d3.loss_mask: 0.4039, decode.d3.loss_dice: 0.5893, decode.d4.loss_cls: 0.1710, decode.d4.loss_mask: 0.4014, decode.d4.loss_dice: 0.5877, decode.d5.loss_cls: 0.1641, decode.d5.loss_mask: 0.4020, decode.d5.loss_dice: 0.5879, decode.d6.loss_cls: 0.1609, decode.d6.loss_mask: 0.3997, decode.d6.loss_dice: 0.5862, decode.d7.loss_cls: 0.1547, decode.d7.loss_mask: 0.3987, decode.d7.loss_dice: 0.5839, decode.d8.loss_cls: 0.1541, decode.d8.loss_mask: 0.4010, decode.d8.loss_dice: 0.5870, loss: 13.3119 +2022-06-05 05:57:08,457 - mmseg - INFO - Iter [31450/40000] lr: 1.630e-06, eta: 1:09:17, time: 0.436, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1447, decode.loss_mask: 0.3779, decode.loss_dice: 0.5542, decode.d0.loss_cls: 1.5659, decode.d0.loss_mask: 0.4205, decode.d0.loss_dice: 0.6433, decode.d1.loss_cls: 0.3065, decode.d1.loss_mask: 0.3958, decode.d1.loss_dice: 0.5913, decode.d2.loss_cls: 0.2221, decode.d2.loss_mask: 0.3851, decode.d2.loss_dice: 0.5642, decode.d3.loss_cls: 0.1728, decode.d3.loss_mask: 0.3841, decode.d3.loss_dice: 0.5628, decode.d4.loss_cls: 0.1641, decode.d4.loss_mask: 0.3815, decode.d4.loss_dice: 0.5569, decode.d5.loss_cls: 0.1563, decode.d5.loss_mask: 0.3809, decode.d5.loss_dice: 0.5562, decode.d6.loss_cls: 0.1482, decode.d6.loss_mask: 0.3792, decode.d6.loss_dice: 0.5587, decode.d7.loss_cls: 0.1415, decode.d7.loss_mask: 0.3794, decode.d7.loss_dice: 0.5571, decode.d8.loss_cls: 0.1448, decode.d8.loss_mask: 0.3797, decode.d8.loss_dice: 0.5573, loss: 12.7329 +2022-06-05 05:57:30,763 - mmseg - INFO - Iter [31500/40000] lr: 1.621e-06, eta: 1:08:53, time: 0.447, data_time: 0.010, memory: 31652, decode.loss_cls: 0.1441, decode.loss_mask: 0.4079, decode.loss_dice: 0.5735, decode.d0.loss_cls: 1.5202, decode.d0.loss_mask: 0.4501, decode.d0.loss_dice: 0.6620, decode.d1.loss_cls: 0.2876, decode.d1.loss_mask: 0.4213, decode.d1.loss_dice: 0.6027, decode.d2.loss_cls: 0.2028, decode.d2.loss_mask: 0.4121, decode.d2.loss_dice: 0.5823, decode.d3.loss_cls: 0.1675, decode.d3.loss_mask: 0.4112, decode.d3.loss_dice: 0.5757, decode.d4.loss_cls: 0.1630, decode.d4.loss_mask: 0.4090, decode.d4.loss_dice: 0.5778, decode.d5.loss_cls: 0.1566, decode.d5.loss_mask: 0.4087, decode.d5.loss_dice: 0.5693, decode.d6.loss_cls: 0.1484, decode.d6.loss_mask: 0.4087, decode.d6.loss_dice: 0.5704, decode.d7.loss_cls: 0.1424, decode.d7.loss_mask: 0.4074, decode.d7.loss_dice: 0.5742, decode.d8.loss_cls: 0.1427, decode.d8.loss_mask: 0.4079, decode.d8.loss_dice: 0.5713, loss: 13.0790 +2022-06-05 05:57:55,727 - mmseg - INFO - Iter [31550/40000] lr: 1.611e-06, eta: 1:08:28, time: 0.499, data_time: 0.057, memory: 31652, decode.loss_cls: 0.1406, decode.loss_mask: 0.4118, decode.loss_dice: 0.5828, decode.d0.loss_cls: 1.5668, decode.d0.loss_mask: 0.4520, decode.d0.loss_dice: 0.6699, decode.d1.loss_cls: 0.2879, decode.d1.loss_mask: 0.4279, decode.d1.loss_dice: 0.6192, decode.d2.loss_cls: 0.1954, decode.d2.loss_mask: 0.4184, decode.d2.loss_dice: 0.5980, decode.d3.loss_cls: 0.1728, decode.d3.loss_mask: 0.4158, decode.d3.loss_dice: 0.5866, decode.d4.loss_cls: 0.1593, decode.d4.loss_mask: 0.4148, decode.d4.loss_dice: 0.5885, decode.d5.loss_cls: 0.1528, decode.d5.loss_mask: 0.4134, decode.d5.loss_dice: 0.5836, decode.d6.loss_cls: 0.1478, decode.d6.loss_mask: 0.4139, decode.d6.loss_dice: 0.5822, decode.d7.loss_cls: 0.1437, decode.d7.loss_mask: 0.4128, decode.d7.loss_dice: 0.5860, decode.d8.loss_cls: 0.1408, decode.d8.loss_mask: 0.4120, decode.d8.loss_dice: 0.5819, loss: 13.2791 +2022-06-05 05:58:17,377 - mmseg - INFO - Iter [31600/40000] lr: 1.602e-06, eta: 1:08:03, time: 0.433, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1463, decode.loss_mask: 0.4167, decode.loss_dice: 0.5589, decode.d0.loss_cls: 1.5467, decode.d0.loss_mask: 0.4579, decode.d0.loss_dice: 0.6609, decode.d1.loss_cls: 0.2896, decode.d1.loss_mask: 0.4346, decode.d1.loss_dice: 0.5975, decode.d2.loss_cls: 0.2066, decode.d2.loss_mask: 0.4236, decode.d2.loss_dice: 0.5707, decode.d3.loss_cls: 0.1707, decode.d3.loss_mask: 0.4189, decode.d3.loss_dice: 0.5631, decode.d4.loss_cls: 0.1687, decode.d4.loss_mask: 0.4159, decode.d4.loss_dice: 0.5635, decode.d5.loss_cls: 0.1540, decode.d5.loss_mask: 0.4192, decode.d5.loss_dice: 0.5644, decode.d6.loss_cls: 0.1472, decode.d6.loss_mask: 0.4187, decode.d6.loss_dice: 0.5608, decode.d7.loss_cls: 0.1477, decode.d7.loss_mask: 0.4174, decode.d7.loss_dice: 0.5603, decode.d8.loss_cls: 0.1463, decode.d8.loss_mask: 0.4175, decode.d8.loss_dice: 0.5632, loss: 13.1274 +2022-06-05 05:58:39,956 - mmseg - INFO - Iter [31650/40000] lr: 1.592e-06, eta: 1:07:39, time: 0.452, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1501, decode.loss_mask: 0.4127, decode.loss_dice: 0.5883, decode.d0.loss_cls: 1.4962, decode.d0.loss_mask: 0.4521, decode.d0.loss_dice: 0.6690, decode.d1.loss_cls: 0.3097, decode.d1.loss_mask: 0.4282, decode.d1.loss_dice: 0.6167, decode.d2.loss_cls: 0.2198, decode.d2.loss_mask: 0.4192, decode.d2.loss_dice: 0.5954, decode.d3.loss_cls: 0.1797, decode.d3.loss_mask: 0.4155, decode.d3.loss_dice: 0.5922, decode.d4.loss_cls: 0.1668, decode.d4.loss_mask: 0.4153, decode.d4.loss_dice: 0.5920, decode.d5.loss_cls: 0.1600, decode.d5.loss_mask: 0.4142, decode.d5.loss_dice: 0.5869, decode.d6.loss_cls: 0.1556, decode.d6.loss_mask: 0.4132, decode.d6.loss_dice: 0.5870, decode.d7.loss_cls: 0.1533, decode.d7.loss_mask: 0.4124, decode.d7.loss_dice: 0.5842, decode.d8.loss_cls: 0.1490, decode.d8.loss_mask: 0.4138, decode.d8.loss_dice: 0.5881, loss: 13.3363 +2022-06-05 05:59:02,218 - mmseg - INFO - Iter [31700/40000] lr: 1.583e-06, eta: 1:07:14, time: 0.445, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1503, decode.loss_mask: 0.4133, decode.loss_dice: 0.5905, decode.d0.loss_cls: 1.5872, decode.d0.loss_mask: 0.4528, decode.d0.loss_dice: 0.6860, decode.d1.loss_cls: 0.3117, decode.d1.loss_mask: 0.4278, decode.d1.loss_dice: 0.6225, decode.d2.loss_cls: 0.2126, decode.d2.loss_mask: 0.4189, decode.d2.loss_dice: 0.5992, decode.d3.loss_cls: 0.1739, decode.d3.loss_mask: 0.4144, decode.d3.loss_dice: 0.5931, decode.d4.loss_cls: 0.1680, decode.d4.loss_mask: 0.4136, decode.d4.loss_dice: 0.5912, decode.d5.loss_cls: 0.1545, decode.d5.loss_mask: 0.4144, decode.d5.loss_dice: 0.5923, decode.d6.loss_cls: 0.1521, decode.d6.loss_mask: 0.4146, decode.d6.loss_dice: 0.5894, decode.d7.loss_cls: 0.1453, decode.d7.loss_mask: 0.4144, decode.d7.loss_dice: 0.5900, decode.d8.loss_cls: 0.1498, decode.d8.loss_mask: 0.4124, decode.d8.loss_dice: 0.5876, loss: 13.4440 +2022-06-05 05:59:24,369 - mmseg - INFO - Iter [31750/40000] lr: 1.573e-06, eta: 1:06:49, time: 0.443, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1361, decode.loss_mask: 0.4213, decode.loss_dice: 0.5674, decode.d0.loss_cls: 1.5011, decode.d0.loss_mask: 0.4644, decode.d0.loss_dice: 0.6675, decode.d1.loss_cls: 0.2982, decode.d1.loss_mask: 0.4374, decode.d1.loss_dice: 0.6014, decode.d2.loss_cls: 0.2034, decode.d2.loss_mask: 0.4297, decode.d2.loss_dice: 0.5786, decode.d3.loss_cls: 0.1638, decode.d3.loss_mask: 0.4277, decode.d3.loss_dice: 0.5757, decode.d4.loss_cls: 0.1583, decode.d4.loss_mask: 0.4251, decode.d4.loss_dice: 0.5691, decode.d5.loss_cls: 0.1514, decode.d5.loss_mask: 0.4228, decode.d5.loss_dice: 0.5709, decode.d6.loss_cls: 0.1429, decode.d6.loss_mask: 0.4233, decode.d6.loss_dice: 0.5731, decode.d7.loss_cls: 0.1425, decode.d7.loss_mask: 0.4222, decode.d7.loss_dice: 0.5705, decode.d8.loss_cls: 0.1378, decode.d8.loss_mask: 0.4213, decode.d8.loss_dice: 0.5718, loss: 13.1768 +2022-06-05 05:59:46,590 - mmseg - INFO - Iter [31800/40000] lr: 1.563e-06, eta: 1:06:24, time: 0.444, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1542, decode.loss_mask: 0.4062, decode.loss_dice: 0.5759, decode.d0.loss_cls: 1.5596, decode.d0.loss_mask: 0.4436, decode.d0.loss_dice: 0.6704, decode.d1.loss_cls: 0.3217, decode.d1.loss_mask: 0.4212, decode.d1.loss_dice: 0.6130, decode.d2.loss_cls: 0.2294, decode.d2.loss_mask: 0.4099, decode.d2.loss_dice: 0.5916, decode.d3.loss_cls: 0.1847, decode.d3.loss_mask: 0.4080, decode.d3.loss_dice: 0.5850, decode.d4.loss_cls: 0.1798, decode.d4.loss_mask: 0.4086, decode.d4.loss_dice: 0.5828, decode.d5.loss_cls: 0.1715, decode.d5.loss_mask: 0.4076, decode.d5.loss_dice: 0.5809, decode.d6.loss_cls: 0.1578, decode.d6.loss_mask: 0.4083, decode.d6.loss_dice: 0.5760, decode.d7.loss_cls: 0.1571, decode.d7.loss_mask: 0.4053, decode.d7.loss_dice: 0.5776, decode.d8.loss_cls: 0.1547, decode.d8.loss_mask: 0.4066, decode.d8.loss_dice: 0.5783, loss: 13.3274 +2022-06-05 06:00:11,821 - mmseg - INFO - Iter [31850/40000] lr: 1.554e-06, eta: 1:06:00, time: 0.505, data_time: 0.058, memory: 31652, decode.loss_cls: 0.1454, decode.loss_mask: 0.4144, decode.loss_dice: 0.5713, decode.d0.loss_cls: 1.5378, decode.d0.loss_mask: 0.4571, decode.d0.loss_dice: 0.6621, decode.d1.loss_cls: 0.2850, decode.d1.loss_mask: 0.4318, decode.d1.loss_dice: 0.6085, decode.d2.loss_cls: 0.2144, decode.d2.loss_mask: 0.4226, decode.d2.loss_dice: 0.5858, decode.d3.loss_cls: 0.1729, decode.d3.loss_mask: 0.4217, decode.d3.loss_dice: 0.5772, decode.d4.loss_cls: 0.1707, decode.d4.loss_mask: 0.4176, decode.d4.loss_dice: 0.5791, decode.d5.loss_cls: 0.1537, decode.d5.loss_mask: 0.4175, decode.d5.loss_dice: 0.5767, decode.d6.loss_cls: 0.1499, decode.d6.loss_mask: 0.4145, decode.d6.loss_dice: 0.5709, decode.d7.loss_cls: 0.1498, decode.d7.loss_mask: 0.4143, decode.d7.loss_dice: 0.5794, decode.d8.loss_cls: 0.1461, decode.d8.loss_mask: 0.4145, decode.d8.loss_dice: 0.5776, loss: 13.2404 +2022-06-05 06:00:34,281 - mmseg - INFO - Iter [31900/40000] lr: 1.544e-06, eta: 1:05:35, time: 0.448, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1407, decode.loss_mask: 0.4125, decode.loss_dice: 0.5733, decode.d0.loss_cls: 1.5607, decode.d0.loss_mask: 0.4593, decode.d0.loss_dice: 0.6710, decode.d1.loss_cls: 0.3237, decode.d1.loss_mask: 0.4310, decode.d1.loss_dice: 0.6100, decode.d2.loss_cls: 0.2131, decode.d2.loss_mask: 0.4222, decode.d2.loss_dice: 0.5873, decode.d3.loss_cls: 0.1755, decode.d3.loss_mask: 0.4179, decode.d3.loss_dice: 0.5814, decode.d4.loss_cls: 0.1637, decode.d4.loss_mask: 0.4164, decode.d4.loss_dice: 0.5805, decode.d5.loss_cls: 0.1511, decode.d5.loss_mask: 0.4164, decode.d5.loss_dice: 0.5807, decode.d6.loss_cls: 0.1512, decode.d6.loss_mask: 0.4152, decode.d6.loss_dice: 0.5725, decode.d7.loss_cls: 0.1448, decode.d7.loss_mask: 0.4139, decode.d7.loss_dice: 0.5761, decode.d8.loss_cls: 0.1410, decode.d8.loss_mask: 0.4131, decode.d8.loss_dice: 0.5748, loss: 13.2910 +2022-06-05 06:00:56,306 - mmseg - INFO - Iter [31950/40000] lr: 1.535e-06, eta: 1:05:10, time: 0.441, data_time: 0.009, memory: 31652, decode.loss_cls: 0.1545, decode.loss_mask: 0.4097, decode.loss_dice: 0.5849, decode.d0.loss_cls: 1.5663, decode.d0.loss_mask: 0.4482, decode.d0.loss_dice: 0.6807, decode.d1.loss_cls: 0.3163, decode.d1.loss_mask: 0.4262, decode.d1.loss_dice: 0.6218, decode.d2.loss_cls: 0.2219, decode.d2.loss_mask: 0.4165, decode.d2.loss_dice: 0.5985, decode.d3.loss_cls: 0.1883, decode.d3.loss_mask: 0.4137, decode.d3.loss_dice: 0.5948, decode.d4.loss_cls: 0.1831, decode.d4.loss_mask: 0.4110, decode.d4.loss_dice: 0.5944, decode.d5.loss_cls: 0.1664, decode.d5.loss_mask: 0.4111, decode.d5.loss_dice: 0.5869, decode.d6.loss_cls: 0.1653, decode.d6.loss_mask: 0.4121, decode.d6.loss_dice: 0.5826, decode.d7.loss_cls: 0.1598, decode.d7.loss_mask: 0.4102, decode.d7.loss_dice: 0.5860, decode.d8.loss_cls: 0.1555, decode.d8.loss_mask: 0.4110, decode.d8.loss_dice: 0.5897, loss: 13.4675 +2022-06-05 06:01:18,278 - mmseg - INFO - Saving checkpoint at 32000 iterations +2022-06-05 06:01:21,566 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 06:01:21,567 - mmseg - INFO - Iter [32000/40000] lr: 1.525e-06, eta: 1:04:46, time: 0.506, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1475, decode.loss_mask: 0.4123, decode.loss_dice: 0.5793, decode.d0.loss_cls: 1.5349, decode.d0.loss_mask: 0.4502, decode.d0.loss_dice: 0.6633, decode.d1.loss_cls: 0.3149, decode.d1.loss_mask: 0.4273, decode.d1.loss_dice: 0.6103, decode.d2.loss_cls: 0.2214, decode.d2.loss_mask: 0.4186, decode.d2.loss_dice: 0.5900, decode.d3.loss_cls: 0.1859, decode.d3.loss_mask: 0.4158, decode.d3.loss_dice: 0.5848, decode.d4.loss_cls: 0.1743, decode.d4.loss_mask: 0.4142, decode.d4.loss_dice: 0.5851, decode.d5.loss_cls: 0.1647, decode.d5.loss_mask: 0.4152, decode.d5.loss_dice: 0.5824, decode.d6.loss_cls: 0.1590, decode.d6.loss_mask: 0.4130, decode.d6.loss_dice: 0.5781, decode.d7.loss_cls: 0.1525, decode.d7.loss_mask: 0.4137, decode.d7.loss_dice: 0.5812, decode.d8.loss_cls: 0.1486, decode.d8.loss_mask: 0.4121, decode.d8.loss_dice: 0.5808, loss: 13.3314 +2022-06-05 06:04:00,604 - mmseg - INFO - per class results: +2022-06-05 06:04:00,624 - mmseg - INFO - ++-------------+-------+-------+ +| Class | IoU | Acc | ++-------------+-------+-------+ +| aeroplane | 90.79 | 95.85 | +| bag | 39.76 | 52.26 | +| bed | 31.93 | 40.45 | +| bedclothes | 43.67 | 60.81 | +| bench | 20.63 | 25.08 | +| bicycle | 83.58 | 93.32 | +| bird | 93.66 | 96.98 | +| boat | 83.84 | 92.59 | +| book | 50.92 | 63.49 | +| bottle | 86.17 | 95.75 | +| building | 65.19 | 77.75 | +| bus | 93.91 | 97.0 | +| cabinet | 43.5 | 63.18 | +| car | 90.76 | 95.93 | +| cat | 93.4 | 98.03 | +| ceiling | 61.93 | 78.77 | +| chair | 58.31 | 78.34 | +| cloth | 26.97 | 39.51 | +| computer | 40.12 | 51.96 | +| cow | 94.91 | 97.19 | +| cup | 43.83 | 58.21 | +| curtain | 56.39 | 71.55 | +| dog | 91.62 | 97.24 | +| door | 31.04 | 51.64 | +| fence | 44.94 | 59.25 | +| floor | 72.24 | 84.39 | +| flower | 41.37 | 65.29 | +| food | 35.55 | 48.65 | +| grass | 82.33 | 91.14 | +| ground | 55.7 | 69.2 | +| horse | 93.85 | 97.42 | +| keyboard | 87.06 | 94.16 | +| light | 57.51 | 73.82 | +| motorbike | 89.69 | 95.22 | +| mountain | 53.87 | 73.9 | +| mouse | 74.82 | 82.49 | +| person | 90.27 | 96.07 | +| plate | 27.08 | 36.37 | +| platform | 52.34 | 65.54 | +| pottedplant | 80.25 | 89.66 | +| road | 53.72 | 73.93 | +| rock | 50.22 | 59.67 | +| sheep | 94.17 | 97.37 | +| shelves | 36.52 | 53.71 | +| sidewalk | 28.46 | 51.88 | +| sign | 46.8 | 59.12 | +| sky | 94.96 | 97.22 | +| snow | 73.8 | 86.28 | +| sofa | 58.11 | 70.26 | +| table | 66.99 | 77.93 | +| track | 69.53 | 81.83 | +| train | 92.47 | 96.58 | +| tree | 80.93 | 90.27 | +| truck | 41.62 | 52.36 | +| tvmonitor | 85.43 | 94.18 | +| wall | 70.37 | 81.54 | +| water | 91.75 | 95.14 | +| window | 43.1 | 57.69 | +| wood | 29.29 | 41.37 | ++-------------+-------+-------+ +2022-06-05 06:04:00,624 - mmseg - INFO - Summary: +2022-06-05 06:04:00,624 - mmseg - INFO - ++-------+------+-------+ +| aAcc | mIoU | mAcc | ++-------+------+-------+ +| 85.26 | 63.8 | 74.81 | ++-------+------+-------+ +2022-06-05 06:04:00,626 - mmseg - INFO - The previous best checkpoint /mnt/lustre/chenzhe.vendor/workspace/ViT-Adapter-Release/segmentation/work_dirs/mask2former_beit_adapter_base_480_40k_pascal_context_59_ss/best_mIoU_iter_28000.pth was removed +2022-06-05 06:04:03,535 - mmseg - INFO - Now best checkpoint is saved as best_mIoU_iter_32000.pth. +2022-06-05 06:04:03,536 - mmseg - INFO - Best mIoU is 0.6380 at 32000 iter. +2022-06-05 06:04:03,558 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 06:04:03,558 - mmseg - INFO - Iter(val) [638] aAcc: 0.8526, mIoU: 0.6380, mAcc: 0.7481, IoU.aeroplane: 0.9079, IoU.bag: 0.3976, IoU.bed: 0.3193, IoU.bedclothes: 0.4367, IoU.bench: 0.2063, IoU.bicycle: 0.8358, IoU.bird: 0.9366, IoU.boat: 0.8384, IoU.book: 0.5092, IoU.bottle: 0.8617, IoU.building: 0.6519, IoU.bus: 0.9391, IoU.cabinet: 0.4350, IoU.car: 0.9076, IoU.cat: 0.9340, IoU.ceiling: 0.6193, IoU.chair: 0.5831, IoU.cloth: 0.2697, IoU.computer: 0.4012, IoU.cow: 0.9491, IoU.cup: 0.4383, IoU.curtain: 0.5639, IoU.dog: 0.9162, IoU.door: 0.3104, IoU.fence: 0.4494, IoU.floor: 0.7224, IoU.flower: 0.4137, IoU.food: 0.3555, IoU.grass: 0.8233, IoU.ground: 0.5570, IoU.horse: 0.9385, IoU.keyboard: 0.8706, IoU.light: 0.5751, IoU.motorbike: 0.8969, IoU.mountain: 0.5387, IoU.mouse: 0.7482, IoU.person: 0.9027, IoU.plate: 0.2708, IoU.platform: 0.5234, IoU.pottedplant: 0.8025, IoU.road: 0.5372, IoU.rock: 0.5022, IoU.sheep: 0.9417, IoU.shelves: 0.3652, IoU.sidewalk: 0.2846, IoU.sign: 0.4680, IoU.sky: 0.9496, IoU.snow: 0.7380, IoU.sofa: 0.5811, IoU.table: 0.6699, IoU.track: 0.6953, IoU.train: 0.9247, IoU.tree: 0.8093, IoU.truck: 0.4162, IoU.tvmonitor: 0.8543, IoU.wall: 0.7037, IoU.water: 0.9175, IoU.window: 0.4310, IoU.wood: 0.2929, Acc.aeroplane: 0.9585, Acc.bag: 0.5226, Acc.bed: 0.4045, Acc.bedclothes: 0.6081, Acc.bench: 0.2508, Acc.bicycle: 0.9332, Acc.bird: 0.9698, Acc.boat: 0.9259, Acc.book: 0.6349, Acc.bottle: 0.9575, Acc.building: 0.7775, Acc.bus: 0.9700, Acc.cabinet: 0.6318, Acc.car: 0.9593, Acc.cat: 0.9803, Acc.ceiling: 0.7877, Acc.chair: 0.7834, Acc.cloth: 0.3951, Acc.computer: 0.5196, Acc.cow: 0.9719, Acc.cup: 0.5821, Acc.curtain: 0.7155, Acc.dog: 0.9724, Acc.door: 0.5164, Acc.fence: 0.5925, Acc.floor: 0.8439, Acc.flower: 0.6529, Acc.food: 0.4865, Acc.grass: 0.9114, Acc.ground: 0.6920, Acc.horse: 0.9742, Acc.keyboard: 0.9416, Acc.light: 0.7382, Acc.motorbike: 0.9522, Acc.mountain: 0.7390, Acc.mouse: 0.8249, Acc.person: 0.9607, Acc.plate: 0.3637, Acc.platform: 0.6554, Acc.pottedplant: 0.8966, Acc.road: 0.7393, Acc.rock: 0.5967, Acc.sheep: 0.9737, Acc.shelves: 0.5371, Acc.sidewalk: 0.5188, Acc.sign: 0.5912, Acc.sky: 0.9722, Acc.snow: 0.8628, Acc.sofa: 0.7026, Acc.table: 0.7793, Acc.track: 0.8183, Acc.train: 0.9658, Acc.tree: 0.9027, Acc.truck: 0.5236, Acc.tvmonitor: 0.9418, Acc.wall: 0.8154, Acc.water: 0.9514, Acc.window: 0.5769, Acc.wood: 0.4137 +2022-06-05 06:04:25,086 - mmseg - INFO - Iter [32050/40000] lr: 1.516e-06, eta: 1:05:02, time: 3.670, data_time: 3.248, memory: 31652, decode.loss_cls: 0.1506, decode.loss_mask: 0.4105, decode.loss_dice: 0.5656, decode.d0.loss_cls: 1.4994, decode.d0.loss_mask: 0.4531, decode.d0.loss_dice: 0.6564, decode.d1.loss_cls: 0.2899, decode.d1.loss_mask: 0.4286, decode.d1.loss_dice: 0.5977, decode.d2.loss_cls: 0.1990, decode.d2.loss_mask: 0.4177, decode.d2.loss_dice: 0.5779, decode.d3.loss_cls: 0.1740, decode.d3.loss_mask: 0.4148, decode.d3.loss_dice: 0.5695, decode.d4.loss_cls: 0.1599, decode.d4.loss_mask: 0.4124, decode.d4.loss_dice: 0.5722, decode.d5.loss_cls: 0.1526, decode.d5.loss_mask: 0.4116, decode.d5.loss_dice: 0.5677, decode.d6.loss_cls: 0.1489, decode.d6.loss_mask: 0.4100, decode.d6.loss_dice: 0.5652, decode.d7.loss_cls: 0.1455, decode.d7.loss_mask: 0.4121, decode.d7.loss_dice: 0.5649, decode.d8.loss_cls: 0.1479, decode.d8.loss_mask: 0.4110, decode.d8.loss_dice: 0.5674, loss: 13.0541 +2022-06-05 06:04:46,753 - mmseg - INFO - Iter [32100/40000] lr: 1.506e-06, eta: 1:04:36, time: 0.433, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1534, decode.loss_mask: 0.4242, decode.loss_dice: 0.5885, decode.d0.loss_cls: 1.5591, decode.d0.loss_mask: 0.4609, decode.d0.loss_dice: 0.6795, decode.d1.loss_cls: 0.3102, decode.d1.loss_mask: 0.4346, decode.d1.loss_dice: 0.6252, decode.d2.loss_cls: 0.2261, decode.d2.loss_mask: 0.4271, decode.d2.loss_dice: 0.6047, decode.d3.loss_cls: 0.1970, decode.d3.loss_mask: 0.4255, decode.d3.loss_dice: 0.5944, decode.d4.loss_cls: 0.1833, decode.d4.loss_mask: 0.4248, decode.d4.loss_dice: 0.5921, decode.d5.loss_cls: 0.1693, decode.d5.loss_mask: 0.4245, decode.d5.loss_dice: 0.5909, decode.d6.loss_cls: 0.1672, decode.d6.loss_mask: 0.4216, decode.d6.loss_dice: 0.5914, decode.d7.loss_cls: 0.1593, decode.d7.loss_mask: 0.4232, decode.d7.loss_dice: 0.5906, decode.d8.loss_cls: 0.1559, decode.d8.loss_mask: 0.4229, decode.d8.loss_dice: 0.5900, loss: 13.6174 +2022-06-05 06:05:10,848 - mmseg - INFO - Iter [32150/40000] lr: 1.497e-06, eta: 1:04:12, time: 0.482, data_time: 0.060, memory: 31652, decode.loss_cls: 0.1549, decode.loss_mask: 0.4162, decode.loss_dice: 0.5993, decode.d0.loss_cls: 1.5461, decode.d0.loss_mask: 0.4553, decode.d0.loss_dice: 0.6837, decode.d1.loss_cls: 0.3103, decode.d1.loss_mask: 0.4291, decode.d1.loss_dice: 0.6275, decode.d2.loss_cls: 0.2219, decode.d2.loss_mask: 0.4204, decode.d2.loss_dice: 0.6067, decode.d3.loss_cls: 0.1859, decode.d3.loss_mask: 0.4195, decode.d3.loss_dice: 0.5985, decode.d4.loss_cls: 0.1702, decode.d4.loss_mask: 0.4170, decode.d4.loss_dice: 0.5962, decode.d5.loss_cls: 0.1580, decode.d5.loss_mask: 0.4172, decode.d5.loss_dice: 0.5992, decode.d6.loss_cls: 0.1522, decode.d6.loss_mask: 0.4173, decode.d6.loss_dice: 0.5975, decode.d7.loss_cls: 0.1505, decode.d7.loss_mask: 0.4143, decode.d7.loss_dice: 0.5942, decode.d8.loss_cls: 0.1540, decode.d8.loss_mask: 0.4173, decode.d8.loss_dice: 0.5950, loss: 13.5251 +2022-06-05 06:05:32,802 - mmseg - INFO - Iter [32200/40000] lr: 1.487e-06, eta: 1:03:47, time: 0.439, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1296, decode.loss_mask: 0.4099, decode.loss_dice: 0.5840, decode.d0.loss_cls: 1.5273, decode.d0.loss_mask: 0.4505, decode.d0.loss_dice: 0.6679, decode.d1.loss_cls: 0.2804, decode.d1.loss_mask: 0.4272, decode.d1.loss_dice: 0.6121, decode.d2.loss_cls: 0.1935, decode.d2.loss_mask: 0.4154, decode.d2.loss_dice: 0.5969, decode.d3.loss_cls: 0.1639, decode.d3.loss_mask: 0.4119, decode.d3.loss_dice: 0.5904, decode.d4.loss_cls: 0.1501, decode.d4.loss_mask: 0.4110, decode.d4.loss_dice: 0.5892, decode.d5.loss_cls: 0.1445, decode.d5.loss_mask: 0.4101, decode.d5.loss_dice: 0.5891, decode.d6.loss_cls: 0.1380, decode.d6.loss_mask: 0.4093, decode.d6.loss_dice: 0.5848, decode.d7.loss_cls: 0.1353, decode.d7.loss_mask: 0.4093, decode.d7.loss_dice: 0.5804, decode.d8.loss_cls: 0.1361, decode.d8.loss_mask: 0.4090, decode.d8.loss_dice: 0.5833, loss: 13.1405 +2022-06-05 06:05:54,327 - mmseg - INFO - Iter [32250/40000] lr: 1.478e-06, eta: 1:03:21, time: 0.430, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1398, decode.loss_mask: 0.4045, decode.loss_dice: 0.5900, decode.d0.loss_cls: 1.5486, decode.d0.loss_mask: 0.4446, decode.d0.loss_dice: 0.6760, decode.d1.loss_cls: 0.2898, decode.d1.loss_mask: 0.4199, decode.d1.loss_dice: 0.6211, decode.d2.loss_cls: 0.2057, decode.d2.loss_mask: 0.4113, decode.d2.loss_dice: 0.5992, decode.d3.loss_cls: 0.1715, decode.d3.loss_mask: 0.4090, decode.d3.loss_dice: 0.5889, decode.d4.loss_cls: 0.1583, decode.d4.loss_mask: 0.4066, decode.d4.loss_dice: 0.5867, decode.d5.loss_cls: 0.1507, decode.d5.loss_mask: 0.4059, decode.d5.loss_dice: 0.5908, decode.d6.loss_cls: 0.1468, decode.d6.loss_mask: 0.4052, decode.d6.loss_dice: 0.5873, decode.d7.loss_cls: 0.1486, decode.d7.loss_mask: 0.4065, decode.d7.loss_dice: 0.5847, decode.d8.loss_cls: 0.1386, decode.d8.loss_mask: 0.4054, decode.d8.loss_dice: 0.5855, loss: 13.2275 +2022-06-05 06:06:16,230 - mmseg - INFO - Iter [32300/40000] lr: 1.468e-06, eta: 1:02:56, time: 0.438, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1399, decode.loss_mask: 0.4237, decode.loss_dice: 0.5634, decode.d0.loss_cls: 1.5226, decode.d0.loss_mask: 0.4709, decode.d0.loss_dice: 0.6639, decode.d1.loss_cls: 0.2799, decode.d1.loss_mask: 0.4480, decode.d1.loss_dice: 0.6040, decode.d2.loss_cls: 0.2024, decode.d2.loss_mask: 0.4334, decode.d2.loss_dice: 0.5877, decode.d3.loss_cls: 0.1687, decode.d3.loss_mask: 0.4303, decode.d3.loss_dice: 0.5749, decode.d4.loss_cls: 0.1520, decode.d4.loss_mask: 0.4284, decode.d4.loss_dice: 0.5733, decode.d5.loss_cls: 0.1457, decode.d5.loss_mask: 0.4277, decode.d5.loss_dice: 0.5733, decode.d6.loss_cls: 0.1393, decode.d6.loss_mask: 0.4263, decode.d6.loss_dice: 0.5695, decode.d7.loss_cls: 0.1380, decode.d7.loss_mask: 0.4273, decode.d7.loss_dice: 0.5694, decode.d8.loss_cls: 0.1366, decode.d8.loss_mask: 0.4272, decode.d8.loss_dice: 0.5682, loss: 13.2159 +2022-06-05 06:06:37,779 - mmseg - INFO - Iter [32350/40000] lr: 1.459e-06, eta: 1:02:31, time: 0.431, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1558, decode.loss_mask: 0.4136, decode.loss_dice: 0.5753, decode.d0.loss_cls: 1.5345, decode.d0.loss_mask: 0.4551, decode.d0.loss_dice: 0.6623, decode.d1.loss_cls: 0.3128, decode.d1.loss_mask: 0.4289, decode.d1.loss_dice: 0.6058, decode.d2.loss_cls: 0.2231, decode.d2.loss_mask: 0.4180, decode.d2.loss_dice: 0.5892, decode.d3.loss_cls: 0.1796, decode.d3.loss_mask: 0.4173, decode.d3.loss_dice: 0.5796, decode.d4.loss_cls: 0.1706, decode.d4.loss_mask: 0.4169, decode.d4.loss_dice: 0.5809, decode.d5.loss_cls: 0.1671, decode.d5.loss_mask: 0.4168, decode.d5.loss_dice: 0.5777, decode.d6.loss_cls: 0.1582, decode.d6.loss_mask: 0.4143, decode.d6.loss_dice: 0.5766, decode.d7.loss_cls: 0.1563, decode.d7.loss_mask: 0.4140, decode.d7.loss_dice: 0.5776, decode.d8.loss_cls: 0.1537, decode.d8.loss_mask: 0.4139, decode.d8.loss_dice: 0.5729, loss: 13.3184 +2022-06-05 06:06:59,773 - mmseg - INFO - Iter [32400/40000] lr: 1.449e-06, eta: 1:02:06, time: 0.440, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1533, decode.loss_mask: 0.4093, decode.loss_dice: 0.5688, decode.d0.loss_cls: 1.5405, decode.d0.loss_mask: 0.4481, decode.d0.loss_dice: 0.6575, decode.d1.loss_cls: 0.3105, decode.d1.loss_mask: 0.4267, decode.d1.loss_dice: 0.6003, decode.d2.loss_cls: 0.2219, decode.d2.loss_mask: 0.4178, decode.d2.loss_dice: 0.5836, decode.d3.loss_cls: 0.1769, decode.d3.loss_mask: 0.4135, decode.d3.loss_dice: 0.5735, decode.d4.loss_cls: 0.1711, decode.d4.loss_mask: 0.4118, decode.d4.loss_dice: 0.5772, decode.d5.loss_cls: 0.1586, decode.d5.loss_mask: 0.4107, decode.d5.loss_dice: 0.5710, decode.d6.loss_cls: 0.1533, decode.d6.loss_mask: 0.4088, decode.d6.loss_dice: 0.5715, decode.d7.loss_cls: 0.1515, decode.d7.loss_mask: 0.4090, decode.d7.loss_dice: 0.5732, decode.d8.loss_cls: 0.1455, decode.d8.loss_mask: 0.4089, decode.d8.loss_dice: 0.5742, loss: 13.1982 +2022-06-05 06:07:23,803 - mmseg - INFO - Iter [32450/40000] lr: 1.440e-06, eta: 1:01:41, time: 0.481, data_time: 0.056, memory: 31652, decode.loss_cls: 0.1497, decode.loss_mask: 0.4113, decode.loss_dice: 0.5906, decode.d0.loss_cls: 1.5706, decode.d0.loss_mask: 0.4543, decode.d0.loss_dice: 0.6803, decode.d1.loss_cls: 0.3000, decode.d1.loss_mask: 0.4343, decode.d1.loss_dice: 0.6283, decode.d2.loss_cls: 0.2215, decode.d2.loss_mask: 0.4216, decode.d2.loss_dice: 0.6046, decode.d3.loss_cls: 0.1830, decode.d3.loss_mask: 0.4169, decode.d3.loss_dice: 0.5936, decode.d4.loss_cls: 0.1742, decode.d4.loss_mask: 0.4143, decode.d4.loss_dice: 0.5951, decode.d5.loss_cls: 0.1629, decode.d5.loss_mask: 0.4151, decode.d5.loss_dice: 0.5994, decode.d6.loss_cls: 0.1575, decode.d6.loss_mask: 0.4142, decode.d6.loss_dice: 0.5953, decode.d7.loss_cls: 0.1482, decode.d7.loss_mask: 0.4127, decode.d7.loss_dice: 0.5961, decode.d8.loss_cls: 0.1553, decode.d8.loss_mask: 0.4114, decode.d8.loss_dice: 0.5917, loss: 13.5041 +2022-06-05 06:07:45,178 - mmseg - INFO - Iter [32500/40000] lr: 1.430e-06, eta: 1:01:16, time: 0.427, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1340, decode.loss_mask: 0.4145, decode.loss_dice: 0.5587, decode.d0.loss_cls: 1.5424, decode.d0.loss_mask: 0.4585, decode.d0.loss_dice: 0.6577, decode.d1.loss_cls: 0.2887, decode.d1.loss_mask: 0.4308, decode.d1.loss_dice: 0.5939, decode.d2.loss_cls: 0.2075, decode.d2.loss_mask: 0.4230, decode.d2.loss_dice: 0.5700, decode.d3.loss_cls: 0.1691, decode.d3.loss_mask: 0.4193, decode.d3.loss_dice: 0.5662, decode.d4.loss_cls: 0.1535, decode.d4.loss_mask: 0.4179, decode.d4.loss_dice: 0.5684, decode.d5.loss_cls: 0.1523, decode.d5.loss_mask: 0.4174, decode.d5.loss_dice: 0.5641, decode.d6.loss_cls: 0.1453, decode.d6.loss_mask: 0.4166, decode.d6.loss_dice: 0.5645, decode.d7.loss_cls: 0.1421, decode.d7.loss_mask: 0.4145, decode.d7.loss_dice: 0.5609, decode.d8.loss_cls: 0.1375, decode.d8.loss_mask: 0.4139, decode.d8.loss_dice: 0.5612, loss: 13.0642 +2022-06-05 06:08:06,905 - mmseg - INFO - Iter [32550/40000] lr: 1.420e-06, eta: 1:00:51, time: 0.435, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1429, decode.loss_mask: 0.4154, decode.loss_dice: 0.5680, decode.d0.loss_cls: 1.5517, decode.d0.loss_mask: 0.4612, decode.d0.loss_dice: 0.6728, decode.d1.loss_cls: 0.2997, decode.d1.loss_mask: 0.4342, decode.d1.loss_dice: 0.6129, decode.d2.loss_cls: 0.1961, decode.d2.loss_mask: 0.4236, decode.d2.loss_dice: 0.5928, decode.d3.loss_cls: 0.1692, decode.d3.loss_mask: 0.4199, decode.d3.loss_dice: 0.5810, decode.d4.loss_cls: 0.1587, decode.d4.loss_mask: 0.4175, decode.d4.loss_dice: 0.5788, decode.d5.loss_cls: 0.1491, decode.d5.loss_mask: 0.4165, decode.d5.loss_dice: 0.5788, decode.d6.loss_cls: 0.1433, decode.d6.loss_mask: 0.4175, decode.d6.loss_dice: 0.5744, decode.d7.loss_cls: 0.1404, decode.d7.loss_mask: 0.4163, decode.d7.loss_dice: 0.5710, decode.d8.loss_cls: 0.1403, decode.d8.loss_mask: 0.4143, decode.d8.loss_dice: 0.5674, loss: 13.2259 +2022-06-05 06:08:28,335 - mmseg - INFO - Iter [32600/40000] lr: 1.411e-06, eta: 1:00:26, time: 0.429, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1723, decode.loss_mask: 0.4026, decode.loss_dice: 0.5890, decode.d0.loss_cls: 1.5775, decode.d0.loss_mask: 0.4387, decode.d0.loss_dice: 0.6971, decode.d1.loss_cls: 0.3210, decode.d1.loss_mask: 0.4135, decode.d1.loss_dice: 0.6287, decode.d2.loss_cls: 0.2401, decode.d2.loss_mask: 0.4031, decode.d2.loss_dice: 0.6044, decode.d3.loss_cls: 0.1974, decode.d3.loss_mask: 0.4038, decode.d3.loss_dice: 0.6012, decode.d4.loss_cls: 0.1837, decode.d4.loss_mask: 0.4031, decode.d4.loss_dice: 0.5990, decode.d5.loss_cls: 0.1789, decode.d5.loss_mask: 0.4031, decode.d5.loss_dice: 0.5953, decode.d6.loss_cls: 0.1715, decode.d6.loss_mask: 0.4024, decode.d6.loss_dice: 0.5946, decode.d7.loss_cls: 0.1704, decode.d7.loss_mask: 0.4019, decode.d7.loss_dice: 0.5979, decode.d8.loss_cls: 0.1670, decode.d8.loss_mask: 0.4006, decode.d8.loss_dice: 0.5926, loss: 13.5525 +2022-06-05 06:08:49,735 - mmseg - INFO - Iter [32650/40000] lr: 1.401e-06, eta: 1:00:00, time: 0.428, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1489, decode.loss_mask: 0.4262, decode.loss_dice: 0.5938, decode.d0.loss_cls: 1.5438, decode.d0.loss_mask: 0.4698, decode.d0.loss_dice: 0.6735, decode.d1.loss_cls: 0.3080, decode.d1.loss_mask: 0.4411, decode.d1.loss_dice: 0.6183, decode.d2.loss_cls: 0.2215, decode.d2.loss_mask: 0.4322, decode.d2.loss_dice: 0.5976, decode.d3.loss_cls: 0.1750, decode.d3.loss_mask: 0.4295, decode.d3.loss_dice: 0.5943, decode.d4.loss_cls: 0.1702, decode.d4.loss_mask: 0.4284, decode.d4.loss_dice: 0.5931, decode.d5.loss_cls: 0.1590, decode.d5.loss_mask: 0.4270, decode.d5.loss_dice: 0.5875, decode.d6.loss_cls: 0.1512, decode.d6.loss_mask: 0.4260, decode.d6.loss_dice: 0.5917, decode.d7.loss_cls: 0.1499, decode.d7.loss_mask: 0.4264, decode.d7.loss_dice: 0.5902, decode.d8.loss_cls: 0.1557, decode.d8.loss_mask: 0.4247, decode.d8.loss_dice: 0.5899, loss: 13.5441 +2022-06-05 06:09:11,511 - mmseg - INFO - Iter [32700/40000] lr: 1.392e-06, eta: 0:59:35, time: 0.435, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1538, decode.loss_mask: 0.4014, decode.loss_dice: 0.5878, decode.d0.loss_cls: 1.5470, decode.d0.loss_mask: 0.4496, decode.d0.loss_dice: 0.6882, decode.d1.loss_cls: 0.3006, decode.d1.loss_mask: 0.4200, decode.d1.loss_dice: 0.6341, decode.d2.loss_cls: 0.2097, decode.d2.loss_mask: 0.4088, decode.d2.loss_dice: 0.6109, decode.d3.loss_cls: 0.1865, decode.d3.loss_mask: 0.4066, decode.d3.loss_dice: 0.5960, decode.d4.loss_cls: 0.1716, decode.d4.loss_mask: 0.4050, decode.d4.loss_dice: 0.5944, decode.d5.loss_cls: 0.1613, decode.d5.loss_mask: 0.4043, decode.d5.loss_dice: 0.5943, decode.d6.loss_cls: 0.1569, decode.d6.loss_mask: 0.4029, decode.d6.loss_dice: 0.5910, decode.d7.loss_cls: 0.1540, decode.d7.loss_mask: 0.4034, decode.d7.loss_dice: 0.5944, decode.d8.loss_cls: 0.1556, decode.d8.loss_mask: 0.4021, decode.d8.loss_dice: 0.5873, loss: 13.3796 +2022-06-05 06:09:33,267 - mmseg - INFO - Iter [32750/40000] lr: 1.382e-06, eta: 0:59:10, time: 0.435, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1255, decode.loss_mask: 0.4138, decode.loss_dice: 0.5698, decode.d0.loss_cls: 1.5065, decode.d0.loss_mask: 0.4492, decode.d0.loss_dice: 0.6561, decode.d1.loss_cls: 0.2822, decode.d1.loss_mask: 0.4286, decode.d1.loss_dice: 0.6009, decode.d2.loss_cls: 0.1849, decode.d2.loss_mask: 0.4199, decode.d2.loss_dice: 0.5801, decode.d3.loss_cls: 0.1546, decode.d3.loss_mask: 0.4174, decode.d3.loss_dice: 0.5713, decode.d4.loss_cls: 0.1409, decode.d4.loss_mask: 0.4160, decode.d4.loss_dice: 0.5760, decode.d5.loss_cls: 0.1357, decode.d5.loss_mask: 0.4161, decode.d5.loss_dice: 0.5732, decode.d6.loss_cls: 0.1341, decode.d6.loss_mask: 0.4149, decode.d6.loss_dice: 0.5663, decode.d7.loss_cls: 0.1234, decode.d7.loss_mask: 0.4142, decode.d7.loss_dice: 0.5672, decode.d8.loss_cls: 0.1244, decode.d8.loss_mask: 0.4152, decode.d8.loss_dice: 0.5675, loss: 12.9459 +2022-06-05 06:09:57,707 - mmseg - INFO - Iter [32800/40000] lr: 1.373e-06, eta: 0:58:46, time: 0.489, data_time: 0.059, memory: 31652, decode.loss_cls: 0.1452, decode.loss_mask: 0.4020, decode.loss_dice: 0.5952, decode.d0.loss_cls: 1.5795, decode.d0.loss_mask: 0.4395, decode.d0.loss_dice: 0.6829, decode.d1.loss_cls: 0.3038, decode.d1.loss_mask: 0.4163, decode.d1.loss_dice: 0.6315, decode.d2.loss_cls: 0.2126, decode.d2.loss_mask: 0.4076, decode.d2.loss_dice: 0.6126, decode.d3.loss_cls: 0.1746, decode.d3.loss_mask: 0.4066, decode.d3.loss_dice: 0.6042, decode.d4.loss_cls: 0.1615, decode.d4.loss_mask: 0.4049, decode.d4.loss_dice: 0.6033, decode.d5.loss_cls: 0.1530, decode.d5.loss_mask: 0.4041, decode.d5.loss_dice: 0.6002, decode.d6.loss_cls: 0.1527, decode.d6.loss_mask: 0.4025, decode.d6.loss_dice: 0.5966, decode.d7.loss_cls: 0.1415, decode.d7.loss_mask: 0.4033, decode.d7.loss_dice: 0.5993, decode.d8.loss_cls: 0.1393, decode.d8.loss_mask: 0.4038, decode.d8.loss_dice: 0.5999, loss: 13.3801 +2022-06-05 06:10:19,657 - mmseg - INFO - Iter [32850/40000] lr: 1.363e-06, eta: 0:58:21, time: 0.439, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1238, decode.loss_mask: 0.4093, decode.loss_dice: 0.5587, decode.d0.loss_cls: 1.5059, decode.d0.loss_mask: 0.4546, decode.d0.loss_dice: 0.6522, decode.d1.loss_cls: 0.2767, decode.d1.loss_mask: 0.4275, decode.d1.loss_dice: 0.5971, decode.d2.loss_cls: 0.2063, decode.d2.loss_mask: 0.4162, decode.d2.loss_dice: 0.5703, decode.d3.loss_cls: 0.1610, decode.d3.loss_mask: 0.4133, decode.d3.loss_dice: 0.5657, decode.d4.loss_cls: 0.1501, decode.d4.loss_mask: 0.4106, decode.d4.loss_dice: 0.5633, decode.d5.loss_cls: 0.1406, decode.d5.loss_mask: 0.4113, decode.d5.loss_dice: 0.5612, decode.d6.loss_cls: 0.1319, decode.d6.loss_mask: 0.4100, decode.d6.loss_dice: 0.5588, decode.d7.loss_cls: 0.1307, decode.d7.loss_mask: 0.4085, decode.d7.loss_dice: 0.5583, decode.d8.loss_cls: 0.1297, decode.d8.loss_mask: 0.4099, decode.d8.loss_dice: 0.5568, loss: 12.8703 +2022-06-05 06:10:41,746 - mmseg - INFO - Iter [32900/40000] lr: 1.354e-06, eta: 0:57:56, time: 0.442, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1295, decode.loss_mask: 0.4280, decode.loss_dice: 0.5717, decode.d0.loss_cls: 1.5413, decode.d0.loss_mask: 0.4675, decode.d0.loss_dice: 0.6608, decode.d1.loss_cls: 0.2852, decode.d1.loss_mask: 0.4409, decode.d1.loss_dice: 0.6008, decode.d2.loss_cls: 0.2018, decode.d2.loss_mask: 0.4286, decode.d2.loss_dice: 0.5783, decode.d3.loss_cls: 0.1641, decode.d3.loss_mask: 0.4271, decode.d3.loss_dice: 0.5744, decode.d4.loss_cls: 0.1504, decode.d4.loss_mask: 0.4248, decode.d4.loss_dice: 0.5755, decode.d5.loss_cls: 0.1400, decode.d5.loss_mask: 0.4256, decode.d5.loss_dice: 0.5766, decode.d6.loss_cls: 0.1323, decode.d6.loss_mask: 0.4271, decode.d6.loss_dice: 0.5717, decode.d7.loss_cls: 0.1297, decode.d7.loss_mask: 0.4279, decode.d7.loss_dice: 0.5751, decode.d8.loss_cls: 0.1287, decode.d8.loss_mask: 0.4276, decode.d8.loss_dice: 0.5737, loss: 13.1870 +2022-06-05 06:11:03,859 - mmseg - INFO - Iter [32950/40000] lr: 1.344e-06, eta: 0:57:31, time: 0.442, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1593, decode.loss_mask: 0.4211, decode.loss_dice: 0.5955, decode.d0.loss_cls: 1.4974, decode.d0.loss_mask: 0.4644, decode.d0.loss_dice: 0.6942, decode.d1.loss_cls: 0.3113, decode.d1.loss_mask: 0.4378, decode.d1.loss_dice: 0.6342, decode.d2.loss_cls: 0.2148, decode.d2.loss_mask: 0.4288, decode.d2.loss_dice: 0.6138, decode.d3.loss_cls: 0.1913, decode.d3.loss_mask: 0.4257, decode.d3.loss_dice: 0.6044, decode.d4.loss_cls: 0.1788, decode.d4.loss_mask: 0.4240, decode.d4.loss_dice: 0.6058, decode.d5.loss_cls: 0.1646, decode.d5.loss_mask: 0.4234, decode.d5.loss_dice: 0.6017, decode.d6.loss_cls: 0.1618, decode.d6.loss_mask: 0.4227, decode.d6.loss_dice: 0.6011, decode.d7.loss_cls: 0.1627, decode.d7.loss_mask: 0.4201, decode.d7.loss_dice: 0.5989, decode.d8.loss_cls: 0.1608, decode.d8.loss_mask: 0.4222, decode.d8.loss_dice: 0.5989, loss: 13.6414 +2022-06-05 06:11:26,062 - mmseg - INFO - Saving checkpoint at 33000 iterations +2022-06-05 06:11:29,267 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 06:11:29,268 - mmseg - INFO - Iter [33000/40000] lr: 1.335e-06, eta: 0:57:06, time: 0.508, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1616, decode.loss_mask: 0.4065, decode.loss_dice: 0.5860, decode.d0.loss_cls: 1.6056, decode.d0.loss_mask: 0.4534, decode.d0.loss_dice: 0.6834, decode.d1.loss_cls: 0.3367, decode.d1.loss_mask: 0.4209, decode.d1.loss_dice: 0.6194, decode.d2.loss_cls: 0.2454, decode.d2.loss_mask: 0.4117, decode.d2.loss_dice: 0.5943, decode.d3.loss_cls: 0.1994, decode.d3.loss_mask: 0.4079, decode.d3.loss_dice: 0.5878, decode.d4.loss_cls: 0.1860, decode.d4.loss_mask: 0.4076, decode.d4.loss_dice: 0.5891, decode.d5.loss_cls: 0.1823, decode.d5.loss_mask: 0.4071, decode.d5.loss_dice: 0.5893, decode.d6.loss_cls: 0.1742, decode.d6.loss_mask: 0.4065, decode.d6.loss_dice: 0.5830, decode.d7.loss_cls: 0.1703, decode.d7.loss_mask: 0.4053, decode.d7.loss_dice: 0.5854, decode.d8.loss_cls: 0.1666, decode.d8.loss_mask: 0.4060, decode.d8.loss_dice: 0.5811, loss: 13.5600 +2022-06-05 06:11:51,628 - mmseg - INFO - Iter [33050/40000] lr: 1.325e-06, eta: 0:56:41, time: 0.447, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1481, decode.loss_mask: 0.4055, decode.loss_dice: 0.5747, decode.d0.loss_cls: 1.5201, decode.d0.loss_mask: 0.4484, decode.d0.loss_dice: 0.6698, decode.d1.loss_cls: 0.2948, decode.d1.loss_mask: 0.4191, decode.d1.loss_dice: 0.6072, decode.d2.loss_cls: 0.2174, decode.d2.loss_mask: 0.4107, decode.d2.loss_dice: 0.5842, decode.d3.loss_cls: 0.1812, decode.d3.loss_mask: 0.4102, decode.d3.loss_dice: 0.5768, decode.d4.loss_cls: 0.1709, decode.d4.loss_mask: 0.4083, decode.d4.loss_dice: 0.5784, decode.d5.loss_cls: 0.1620, decode.d5.loss_mask: 0.4075, decode.d5.loss_dice: 0.5771, decode.d6.loss_cls: 0.1558, decode.d6.loss_mask: 0.4061, decode.d6.loss_dice: 0.5732, decode.d7.loss_cls: 0.1558, decode.d7.loss_mask: 0.4061, decode.d7.loss_dice: 0.5733, decode.d8.loss_cls: 0.1509, decode.d8.loss_mask: 0.4074, decode.d8.loss_dice: 0.5732, loss: 13.1741 +2022-06-05 06:12:16,104 - mmseg - INFO - Iter [33100/40000] lr: 1.316e-06, eta: 0:56:17, time: 0.489, data_time: 0.061, memory: 31652, decode.loss_cls: 0.1437, decode.loss_mask: 0.4082, decode.loss_dice: 0.5460, decode.d0.loss_cls: 1.5268, decode.d0.loss_mask: 0.4520, decode.d0.loss_dice: 0.6340, decode.d1.loss_cls: 0.2898, decode.d1.loss_mask: 0.4275, decode.d1.loss_dice: 0.5829, decode.d2.loss_cls: 0.2055, decode.d2.loss_mask: 0.4154, decode.d2.loss_dice: 0.5575, decode.d3.loss_cls: 0.1679, decode.d3.loss_mask: 0.4120, decode.d3.loss_dice: 0.5522, decode.d4.loss_cls: 0.1595, decode.d4.loss_mask: 0.4104, decode.d4.loss_dice: 0.5510, decode.d5.loss_cls: 0.1554, decode.d5.loss_mask: 0.4124, decode.d5.loss_dice: 0.5489, decode.d6.loss_cls: 0.1500, decode.d6.loss_mask: 0.4103, decode.d6.loss_dice: 0.5452, decode.d7.loss_cls: 0.1494, decode.d7.loss_mask: 0.4095, decode.d7.loss_dice: 0.5465, decode.d8.loss_cls: 0.1504, decode.d8.loss_mask: 0.4088, decode.d8.loss_dice: 0.5436, loss: 12.8728 +2022-06-05 06:12:38,396 - mmseg - INFO - Iter [33150/40000] lr: 1.306e-06, eta: 0:55:52, time: 0.446, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1631, decode.loss_mask: 0.4083, decode.loss_dice: 0.5889, decode.d0.loss_cls: 1.5765, decode.d0.loss_mask: 0.4561, decode.d0.loss_dice: 0.6836, decode.d1.loss_cls: 0.3329, decode.d1.loss_mask: 0.4298, decode.d1.loss_dice: 0.6235, decode.d2.loss_cls: 0.2298, decode.d2.loss_mask: 0.4163, decode.d2.loss_dice: 0.5983, decode.d3.loss_cls: 0.1930, decode.d3.loss_mask: 0.4146, decode.d3.loss_dice: 0.5886, decode.d4.loss_cls: 0.1773, decode.d4.loss_mask: 0.4146, decode.d4.loss_dice: 0.5889, decode.d5.loss_cls: 0.1674, decode.d5.loss_mask: 0.4133, decode.d5.loss_dice: 0.5871, decode.d6.loss_cls: 0.1577, decode.d6.loss_mask: 0.4126, decode.d6.loss_dice: 0.5838, decode.d7.loss_cls: 0.1643, decode.d7.loss_mask: 0.4113, decode.d7.loss_dice: 0.5881, decode.d8.loss_cls: 0.1647, decode.d8.loss_mask: 0.4104, decode.d8.loss_dice: 0.5852, loss: 13.5304 +2022-06-05 06:13:00,095 - mmseg - INFO - Iter [33200/40000] lr: 1.297e-06, eta: 0:55:27, time: 0.434, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1384, decode.loss_mask: 0.4139, decode.loss_dice: 0.5549, decode.d0.loss_cls: 1.4969, decode.d0.loss_mask: 0.4548, decode.d0.loss_dice: 0.6493, decode.d1.loss_cls: 0.3029, decode.d1.loss_mask: 0.4247, decode.d1.loss_dice: 0.5872, decode.d2.loss_cls: 0.2013, decode.d2.loss_mask: 0.4192, decode.d2.loss_dice: 0.5683, decode.d3.loss_cls: 0.1646, decode.d3.loss_mask: 0.4177, decode.d3.loss_dice: 0.5626, decode.d4.loss_cls: 0.1526, decode.d4.loss_mask: 0.4164, decode.d4.loss_dice: 0.5641, decode.d5.loss_cls: 0.1471, decode.d5.loss_mask: 0.4154, decode.d5.loss_dice: 0.5569, decode.d6.loss_cls: 0.1396, decode.d6.loss_mask: 0.4161, decode.d6.loss_dice: 0.5555, decode.d7.loss_cls: 0.1371, decode.d7.loss_mask: 0.4129, decode.d7.loss_dice: 0.5564, decode.d8.loss_cls: 0.1380, decode.d8.loss_mask: 0.4135, decode.d8.loss_dice: 0.5543, loss: 12.9326 +2022-06-05 06:13:22,644 - mmseg - INFO - Iter [33250/40000] lr: 1.287e-06, eta: 0:55:02, time: 0.451, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1342, decode.loss_mask: 0.4013, decode.loss_dice: 0.5730, decode.d0.loss_cls: 1.5455, decode.d0.loss_mask: 0.4421, decode.d0.loss_dice: 0.6663, decode.d1.loss_cls: 0.2995, decode.d1.loss_mask: 0.4172, decode.d1.loss_dice: 0.6086, decode.d2.loss_cls: 0.2102, decode.d2.loss_mask: 0.4068, decode.d2.loss_dice: 0.5811, decode.d3.loss_cls: 0.1656, decode.d3.loss_mask: 0.4060, decode.d3.loss_dice: 0.5741, decode.d4.loss_cls: 0.1572, decode.d4.loss_mask: 0.4034, decode.d4.loss_dice: 0.5753, decode.d5.loss_cls: 0.1504, decode.d5.loss_mask: 0.4019, decode.d5.loss_dice: 0.5734, decode.d6.loss_cls: 0.1398, decode.d6.loss_mask: 0.4021, decode.d6.loss_dice: 0.5718, decode.d7.loss_cls: 0.1380, decode.d7.loss_mask: 0.4009, decode.d7.loss_dice: 0.5724, decode.d8.loss_cls: 0.1338, decode.d8.loss_mask: 0.4010, decode.d8.loss_dice: 0.5719, loss: 13.0247 +2022-06-05 06:13:44,570 - mmseg - INFO - Iter [33300/40000] lr: 1.277e-06, eta: 0:54:37, time: 0.438, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1591, decode.loss_mask: 0.3952, decode.loss_dice: 0.5891, decode.d0.loss_cls: 1.5584, decode.d0.loss_mask: 0.4329, decode.d0.loss_dice: 0.6758, decode.d1.loss_cls: 0.3108, decode.d1.loss_mask: 0.4103, decode.d1.loss_dice: 0.6209, decode.d2.loss_cls: 0.2311, decode.d2.loss_mask: 0.4020, decode.d2.loss_dice: 0.5970, decode.d3.loss_cls: 0.1916, decode.d3.loss_mask: 0.3980, decode.d3.loss_dice: 0.5924, decode.d4.loss_cls: 0.1776, decode.d4.loss_mask: 0.3973, decode.d4.loss_dice: 0.5905, decode.d5.loss_cls: 0.1650, decode.d5.loss_mask: 0.3972, decode.d5.loss_dice: 0.5883, decode.d6.loss_cls: 0.1594, decode.d6.loss_mask: 0.3968, decode.d6.loss_dice: 0.5898, decode.d7.loss_cls: 0.1573, decode.d7.loss_mask: 0.3924, decode.d7.loss_dice: 0.5824, decode.d8.loss_cls: 0.1576, decode.d8.loss_mask: 0.3944, decode.d8.loss_dice: 0.5899, loss: 13.3005 +2022-06-05 06:14:06,595 - mmseg - INFO - Iter [33350/40000] lr: 1.268e-06, eta: 0:54:12, time: 0.441, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1511, decode.loss_mask: 0.4150, decode.loss_dice: 0.5935, decode.d0.loss_cls: 1.5375, decode.d0.loss_mask: 0.4560, decode.d0.loss_dice: 0.6827, decode.d1.loss_cls: 0.2979, decode.d1.loss_mask: 0.4325, decode.d1.loss_dice: 0.6224, decode.d2.loss_cls: 0.2097, decode.d2.loss_mask: 0.4243, decode.d2.loss_dice: 0.6084, decode.d3.loss_cls: 0.1808, decode.d3.loss_mask: 0.4173, decode.d3.loss_dice: 0.5948, decode.d4.loss_cls: 0.1695, decode.d4.loss_mask: 0.4182, decode.d4.loss_dice: 0.5971, decode.d5.loss_cls: 0.1619, decode.d5.loss_mask: 0.4166, decode.d5.loss_dice: 0.5932, decode.d6.loss_cls: 0.1608, decode.d6.loss_mask: 0.4149, decode.d6.loss_dice: 0.5899, decode.d7.loss_cls: 0.1555, decode.d7.loss_mask: 0.4150, decode.d7.loss_dice: 0.5891, decode.d8.loss_cls: 0.1497, decode.d8.loss_mask: 0.4139, decode.d8.loss_dice: 0.5946, loss: 13.4637 +2022-06-05 06:14:31,035 - mmseg - INFO - Iter [33400/40000] lr: 1.258e-06, eta: 0:53:48, time: 0.489, data_time: 0.058, memory: 31652, decode.loss_cls: 0.1371, decode.loss_mask: 0.4163, decode.loss_dice: 0.5974, decode.d0.loss_cls: 1.5398, decode.d0.loss_mask: 0.4549, decode.d0.loss_dice: 0.6920, decode.d1.loss_cls: 0.2988, decode.d1.loss_mask: 0.4349, decode.d1.loss_dice: 0.6367, decode.d2.loss_cls: 0.2076, decode.d2.loss_mask: 0.4237, decode.d2.loss_dice: 0.6171, decode.d3.loss_cls: 0.1680, decode.d3.loss_mask: 0.4195, decode.d3.loss_dice: 0.6072, decode.d4.loss_cls: 0.1574, decode.d4.loss_mask: 0.4180, decode.d4.loss_dice: 0.6020, decode.d5.loss_cls: 0.1557, decode.d5.loss_mask: 0.4176, decode.d5.loss_dice: 0.6008, decode.d6.loss_cls: 0.1462, decode.d6.loss_mask: 0.4167, decode.d6.loss_dice: 0.5954, decode.d7.loss_cls: 0.1435, decode.d7.loss_mask: 0.4152, decode.d7.loss_dice: 0.5960, decode.d8.loss_cls: 0.1431, decode.d8.loss_mask: 0.4142, decode.d8.loss_dice: 0.5979, loss: 13.4707 +2022-06-05 06:14:52,884 - mmseg - INFO - Iter [33450/40000] lr: 1.249e-06, eta: 0:53:23, time: 0.437, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1338, decode.loss_mask: 0.4009, decode.loss_dice: 0.5663, decode.d0.loss_cls: 1.5304, decode.d0.loss_mask: 0.4416, decode.d0.loss_dice: 0.6563, decode.d1.loss_cls: 0.2836, decode.d1.loss_mask: 0.4180, decode.d1.loss_dice: 0.6039, decode.d2.loss_cls: 0.1917, decode.d2.loss_mask: 0.4074, decode.d2.loss_dice: 0.5801, decode.d3.loss_cls: 0.1560, decode.d3.loss_mask: 0.4052, decode.d3.loss_dice: 0.5766, decode.d4.loss_cls: 0.1448, decode.d4.loss_mask: 0.4044, decode.d4.loss_dice: 0.5765, decode.d5.loss_cls: 0.1382, decode.d5.loss_mask: 0.4030, decode.d5.loss_dice: 0.5732, decode.d6.loss_cls: 0.1352, decode.d6.loss_mask: 0.4020, decode.d6.loss_dice: 0.5705, decode.d7.loss_cls: 0.1370, decode.d7.loss_mask: 0.4010, decode.d7.loss_dice: 0.5669, decode.d8.loss_cls: 0.1358, decode.d8.loss_mask: 0.4021, decode.d8.loss_dice: 0.5686, loss: 12.9112 +2022-06-05 06:15:15,836 - mmseg - INFO - Iter [33500/40000] lr: 1.239e-06, eta: 0:52:58, time: 0.459, data_time: 0.009, memory: 31652, decode.loss_cls: 0.1370, decode.loss_mask: 0.4016, decode.loss_dice: 0.5755, decode.d0.loss_cls: 1.5511, decode.d0.loss_mask: 0.4426, decode.d0.loss_dice: 0.6683, decode.d1.loss_cls: 0.2782, decode.d1.loss_mask: 0.4172, decode.d1.loss_dice: 0.6109, decode.d2.loss_cls: 0.2093, decode.d2.loss_mask: 0.4085, decode.d2.loss_dice: 0.5878, decode.d3.loss_cls: 0.1653, decode.d3.loss_mask: 0.4056, decode.d3.loss_dice: 0.5807, decode.d4.loss_cls: 0.1608, decode.d4.loss_mask: 0.4036, decode.d4.loss_dice: 0.5802, decode.d5.loss_cls: 0.1456, decode.d5.loss_mask: 0.4021, decode.d5.loss_dice: 0.5772, decode.d6.loss_cls: 0.1378, decode.d6.loss_mask: 0.4012, decode.d6.loss_dice: 0.5766, decode.d7.loss_cls: 0.1384, decode.d7.loss_mask: 0.4022, decode.d7.loss_dice: 0.5742, decode.d8.loss_cls: 0.1358, decode.d8.loss_mask: 0.4010, decode.d8.loss_dice: 0.5753, loss: 13.0517 +2022-06-05 06:15:37,967 - mmseg - INFO - Iter [33550/40000] lr: 1.230e-06, eta: 0:52:33, time: 0.442, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1384, decode.loss_mask: 0.4263, decode.loss_dice: 0.5722, decode.d0.loss_cls: 1.4971, decode.d0.loss_mask: 0.4680, decode.d0.loss_dice: 0.6577, decode.d1.loss_cls: 0.2840, decode.d1.loss_mask: 0.4427, decode.d1.loss_dice: 0.6021, decode.d2.loss_cls: 0.1945, decode.d2.loss_mask: 0.4315, decode.d2.loss_dice: 0.5858, decode.d3.loss_cls: 0.1579, decode.d3.loss_mask: 0.4281, decode.d3.loss_dice: 0.5806, decode.d4.loss_cls: 0.1444, decode.d4.loss_mask: 0.4275, decode.d4.loss_dice: 0.5768, decode.d5.loss_cls: 0.1419, decode.d5.loss_mask: 0.4257, decode.d5.loss_dice: 0.5774, decode.d6.loss_cls: 0.1337, decode.d6.loss_mask: 0.4259, decode.d6.loss_dice: 0.5741, decode.d7.loss_cls: 0.1302, decode.d7.loss_mask: 0.4246, decode.d7.loss_dice: 0.5730, decode.d8.loss_cls: 0.1320, decode.d8.loss_mask: 0.4259, decode.d8.loss_dice: 0.5766, loss: 13.1566 +2022-06-05 06:16:00,057 - mmseg - INFO - Iter [33600/40000] lr: 1.220e-06, eta: 0:52:08, time: 0.442, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1416, decode.loss_mask: 0.3956, decode.loss_dice: 0.5533, decode.d0.loss_cls: 1.5289, decode.d0.loss_mask: 0.4389, decode.d0.loss_dice: 0.6545, decode.d1.loss_cls: 0.2936, decode.d1.loss_mask: 0.4132, decode.d1.loss_dice: 0.5900, decode.d2.loss_cls: 0.2175, decode.d2.loss_mask: 0.4026, decode.d2.loss_dice: 0.5737, decode.d3.loss_cls: 0.1733, decode.d3.loss_mask: 0.3983, decode.d3.loss_dice: 0.5630, decode.d4.loss_cls: 0.1645, decode.d4.loss_mask: 0.3978, decode.d4.loss_dice: 0.5626, decode.d5.loss_cls: 0.1534, decode.d5.loss_mask: 0.3976, decode.d5.loss_dice: 0.5583, decode.d6.loss_cls: 0.1436, decode.d6.loss_mask: 0.3947, decode.d6.loss_dice: 0.5543, decode.d7.loss_cls: 0.1455, decode.d7.loss_mask: 0.3944, decode.d7.loss_dice: 0.5500, decode.d8.loss_cls: 0.1435, decode.d8.loss_mask: 0.3948, decode.d8.loss_dice: 0.5542, loss: 12.8473 +2022-06-05 06:16:22,540 - mmseg - INFO - Iter [33650/40000] lr: 1.211e-06, eta: 0:51:43, time: 0.449, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1526, decode.loss_mask: 0.4224, decode.loss_dice: 0.5830, decode.d0.loss_cls: 1.5574, decode.d0.loss_mask: 0.4603, decode.d0.loss_dice: 0.6748, decode.d1.loss_cls: 0.3146, decode.d1.loss_mask: 0.4360, decode.d1.loss_dice: 0.6118, decode.d2.loss_cls: 0.2208, decode.d2.loss_mask: 0.4269, decode.d2.loss_dice: 0.5952, decode.d3.loss_cls: 0.1888, decode.d3.loss_mask: 0.4248, decode.d3.loss_dice: 0.5896, decode.d4.loss_cls: 0.1742, decode.d4.loss_mask: 0.4230, decode.d4.loss_dice: 0.5873, decode.d5.loss_cls: 0.1634, decode.d5.loss_mask: 0.4213, decode.d5.loss_dice: 0.5872, decode.d6.loss_cls: 0.1590, decode.d6.loss_mask: 0.4198, decode.d6.loss_dice: 0.5827, decode.d7.loss_cls: 0.1522, decode.d7.loss_mask: 0.4225, decode.d7.loss_dice: 0.5850, decode.d8.loss_cls: 0.1591, decode.d8.loss_mask: 0.4211, decode.d8.loss_dice: 0.5810, loss: 13.4977 +2022-06-05 06:16:46,643 - mmseg - INFO - Iter [33700/40000] lr: 1.201e-06, eta: 0:51:19, time: 0.483, data_time: 0.056, memory: 31652, decode.loss_cls: 0.1536, decode.loss_mask: 0.4066, decode.loss_dice: 0.5772, decode.d0.loss_cls: 1.5277, decode.d0.loss_mask: 0.4507, decode.d0.loss_dice: 0.6785, decode.d1.loss_cls: 0.3104, decode.d1.loss_mask: 0.4243, decode.d1.loss_dice: 0.6097, decode.d2.loss_cls: 0.2282, decode.d2.loss_mask: 0.4117, decode.d2.loss_dice: 0.5921, decode.d3.loss_cls: 0.1841, decode.d3.loss_mask: 0.4123, decode.d3.loss_dice: 0.5823, decode.d4.loss_cls: 0.1764, decode.d4.loss_mask: 0.4097, decode.d4.loss_dice: 0.5803, decode.d5.loss_cls: 0.1644, decode.d5.loss_mask: 0.4085, decode.d5.loss_dice: 0.5810, decode.d6.loss_cls: 0.1605, decode.d6.loss_mask: 0.4075, decode.d6.loss_dice: 0.5740, decode.d7.loss_cls: 0.1512, decode.d7.loss_mask: 0.4090, decode.d7.loss_dice: 0.5778, decode.d8.loss_cls: 0.1543, decode.d8.loss_mask: 0.4066, decode.d8.loss_dice: 0.5750, loss: 13.2853 +2022-06-05 06:17:08,509 - mmseg - INFO - Iter [33750/40000] lr: 1.192e-06, eta: 0:50:54, time: 0.437, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1494, decode.loss_mask: 0.4151, decode.loss_dice: 0.5773, decode.d0.loss_cls: 1.5568, decode.d0.loss_mask: 0.4552, decode.d0.loss_dice: 0.6602, decode.d1.loss_cls: 0.2864, decode.d1.loss_mask: 0.4318, decode.d1.loss_dice: 0.6133, decode.d2.loss_cls: 0.2121, decode.d2.loss_mask: 0.4238, decode.d2.loss_dice: 0.5885, decode.d3.loss_cls: 0.1737, decode.d3.loss_mask: 0.4202, decode.d3.loss_dice: 0.5843, decode.d4.loss_cls: 0.1626, decode.d4.loss_mask: 0.4186, decode.d4.loss_dice: 0.5841, decode.d5.loss_cls: 0.1536, decode.d5.loss_mask: 0.4187, decode.d5.loss_dice: 0.5843, decode.d6.loss_cls: 0.1586, decode.d6.loss_mask: 0.4178, decode.d6.loss_dice: 0.5793, decode.d7.loss_cls: 0.1499, decode.d7.loss_mask: 0.4168, decode.d7.loss_dice: 0.5769, decode.d8.loss_cls: 0.1480, decode.d8.loss_mask: 0.4164, decode.d8.loss_dice: 0.5792, loss: 13.3128 +2022-06-05 06:17:30,555 - mmseg - INFO - Iter [33800/40000] lr: 1.182e-06, eta: 0:50:29, time: 0.441, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1534, decode.loss_mask: 0.3991, decode.loss_dice: 0.5895, decode.d0.loss_cls: 1.5674, decode.d0.loss_mask: 0.4469, decode.d0.loss_dice: 0.6913, decode.d1.loss_cls: 0.3144, decode.d1.loss_mask: 0.4153, decode.d1.loss_dice: 0.6258, decode.d2.loss_cls: 0.2165, decode.d2.loss_mask: 0.4048, decode.d2.loss_dice: 0.6026, decode.d3.loss_cls: 0.1805, decode.d3.loss_mask: 0.4016, decode.d3.loss_dice: 0.5906, decode.d4.loss_cls: 0.1733, decode.d4.loss_mask: 0.4012, decode.d4.loss_dice: 0.5908, decode.d5.loss_cls: 0.1667, decode.d5.loss_mask: 0.4012, decode.d5.loss_dice: 0.5921, decode.d6.loss_cls: 0.1620, decode.d6.loss_mask: 0.4002, decode.d6.loss_dice: 0.5891, decode.d7.loss_cls: 0.1533, decode.d7.loss_mask: 0.3996, decode.d7.loss_dice: 0.5900, decode.d8.loss_cls: 0.1544, decode.d8.loss_mask: 0.3991, decode.d8.loss_dice: 0.5933, loss: 13.3656 +2022-06-05 06:17:52,809 - mmseg - INFO - Iter [33850/40000] lr: 1.173e-06, eta: 0:50:04, time: 0.444, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1496, decode.loss_mask: 0.3989, decode.loss_dice: 0.5799, decode.d0.loss_cls: 1.5303, decode.d0.loss_mask: 0.4358, decode.d0.loss_dice: 0.6815, decode.d1.loss_cls: 0.3157, decode.d1.loss_mask: 0.4138, decode.d1.loss_dice: 0.6132, decode.d2.loss_cls: 0.2262, decode.d2.loss_mask: 0.4045, decode.d2.loss_dice: 0.5908, decode.d3.loss_cls: 0.1820, decode.d3.loss_mask: 0.3994, decode.d3.loss_dice: 0.5828, decode.d4.loss_cls: 0.1749, decode.d4.loss_mask: 0.3994, decode.d4.loss_dice: 0.5903, decode.d5.loss_cls: 0.1704, decode.d5.loss_mask: 0.3987, decode.d5.loss_dice: 0.5816, decode.d6.loss_cls: 0.1615, decode.d6.loss_mask: 0.3987, decode.d6.loss_dice: 0.5843, decode.d7.loss_cls: 0.1587, decode.d7.loss_mask: 0.3979, decode.d7.loss_dice: 0.5810, decode.d8.loss_cls: 0.1574, decode.d8.loss_mask: 0.3988, decode.d8.loss_dice: 0.5818, loss: 13.2397 +2022-06-05 06:18:14,684 - mmseg - INFO - Iter [33900/40000] lr: 1.163e-06, eta: 0:49:39, time: 0.438, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1364, decode.loss_mask: 0.4023, decode.loss_dice: 0.5614, decode.d0.loss_cls: 1.5086, decode.d0.loss_mask: 0.4398, decode.d0.loss_dice: 0.6510, decode.d1.loss_cls: 0.2892, decode.d1.loss_mask: 0.4183, decode.d1.loss_dice: 0.5964, decode.d2.loss_cls: 0.1958, decode.d2.loss_mask: 0.4093, decode.d2.loss_dice: 0.5742, decode.d3.loss_cls: 0.1615, decode.d3.loss_mask: 0.4052, decode.d3.loss_dice: 0.5692, decode.d4.loss_cls: 0.1496, decode.d4.loss_mask: 0.4053, decode.d4.loss_dice: 0.5666, decode.d5.loss_cls: 0.1458, decode.d5.loss_mask: 0.4029, decode.d5.loss_dice: 0.5623, decode.d6.loss_cls: 0.1372, decode.d6.loss_mask: 0.4016, decode.d6.loss_dice: 0.5622, decode.d7.loss_cls: 0.1330, decode.d7.loss_mask: 0.4012, decode.d7.loss_dice: 0.5597, decode.d8.loss_cls: 0.1352, decode.d8.loss_mask: 0.4010, decode.d8.loss_dice: 0.5615, loss: 12.8437 +2022-06-05 06:18:36,672 - mmseg - INFO - Iter [33950/40000] lr: 1.154e-06, eta: 0:49:15, time: 0.440, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1467, decode.loss_mask: 0.4092, decode.loss_dice: 0.5538, decode.d0.loss_cls: 1.5132, decode.d0.loss_mask: 0.4572, decode.d0.loss_dice: 0.6468, decode.d1.loss_cls: 0.2776, decode.d1.loss_mask: 0.4308, decode.d1.loss_dice: 0.5946, decode.d2.loss_cls: 0.2097, decode.d2.loss_mask: 0.4184, decode.d2.loss_dice: 0.5666, decode.d3.loss_cls: 0.1704, decode.d3.loss_mask: 0.4134, decode.d3.loss_dice: 0.5601, decode.d4.loss_cls: 0.1626, decode.d4.loss_mask: 0.4116, decode.d4.loss_dice: 0.5598, decode.d5.loss_cls: 0.1527, decode.d5.loss_mask: 0.4108, decode.d5.loss_dice: 0.5584, decode.d6.loss_cls: 0.1469, decode.d6.loss_mask: 0.4101, decode.d6.loss_dice: 0.5597, decode.d7.loss_cls: 0.1462, decode.d7.loss_mask: 0.4097, decode.d7.loss_dice: 0.5547, decode.d8.loss_cls: 0.1446, decode.d8.loss_mask: 0.4096, decode.d8.loss_dice: 0.5566, loss: 12.9626 +2022-06-05 06:18:58,454 - mmseg - INFO - Saving checkpoint at 34000 iterations +2022-06-05 06:19:01,682 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 06:19:01,682 - mmseg - INFO - Iter [34000/40000] lr: 1.144e-06, eta: 0:48:50, time: 0.500, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1366, decode.loss_mask: 0.4132, decode.loss_dice: 0.5635, decode.d0.loss_cls: 1.4806, decode.d0.loss_mask: 0.4596, decode.d0.loss_dice: 0.6668, decode.d1.loss_cls: 0.2978, decode.d1.loss_mask: 0.4292, decode.d1.loss_dice: 0.5968, decode.d2.loss_cls: 0.2056, decode.d2.loss_mask: 0.4219, decode.d2.loss_dice: 0.5776, decode.d3.loss_cls: 0.1685, decode.d3.loss_mask: 0.4192, decode.d3.loss_dice: 0.5686, decode.d4.loss_cls: 0.1520, decode.d4.loss_mask: 0.4171, decode.d4.loss_dice: 0.5711, decode.d5.loss_cls: 0.1473, decode.d5.loss_mask: 0.4154, decode.d5.loss_dice: 0.5661, decode.d6.loss_cls: 0.1384, decode.d6.loss_mask: 0.4137, decode.d6.loss_dice: 0.5664, decode.d7.loss_cls: 0.1379, decode.d7.loss_mask: 0.4133, decode.d7.loss_dice: 0.5638, decode.d8.loss_cls: 0.1349, decode.d8.loss_mask: 0.4139, decode.d8.loss_dice: 0.5666, loss: 13.0234 +2022-06-05 06:19:26,215 - mmseg - INFO - Iter [34050/40000] lr: 1.134e-06, eta: 0:48:26, time: 0.490, data_time: 0.058, memory: 31652, decode.loss_cls: 0.1435, decode.loss_mask: 0.4108, decode.loss_dice: 0.5938, decode.d0.loss_cls: 1.5508, decode.d0.loss_mask: 0.4530, decode.d0.loss_dice: 0.6814, decode.d1.loss_cls: 0.3117, decode.d1.loss_mask: 0.4278, decode.d1.loss_dice: 0.6245, decode.d2.loss_cls: 0.2224, decode.d2.loss_mask: 0.4169, decode.d2.loss_dice: 0.6047, decode.d3.loss_cls: 0.1834, decode.d3.loss_mask: 0.4129, decode.d3.loss_dice: 0.5965, decode.d4.loss_cls: 0.1700, decode.d4.loss_mask: 0.4126, decode.d4.loss_dice: 0.5995, decode.d5.loss_cls: 0.1561, decode.d5.loss_mask: 0.4116, decode.d5.loss_dice: 0.5934, decode.d6.loss_cls: 0.1522, decode.d6.loss_mask: 0.4115, decode.d6.loss_dice: 0.5944, decode.d7.loss_cls: 0.1494, decode.d7.loss_mask: 0.4122, decode.d7.loss_dice: 0.5913, decode.d8.loss_cls: 0.1453, decode.d8.loss_mask: 0.4120, decode.d8.loss_dice: 0.5970, loss: 13.4427 +2022-06-05 06:19:48,339 - mmseg - INFO - Iter [34100/40000] lr: 1.125e-06, eta: 0:48:01, time: 0.443, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1485, decode.loss_mask: 0.4086, decode.loss_dice: 0.5555, decode.d0.loss_cls: 1.5429, decode.d0.loss_mask: 0.4527, decode.d0.loss_dice: 0.6517, decode.d1.loss_cls: 0.3045, decode.d1.loss_mask: 0.4293, decode.d1.loss_dice: 0.5971, decode.d2.loss_cls: 0.2015, decode.d2.loss_mask: 0.4204, decode.d2.loss_dice: 0.5751, decode.d3.loss_cls: 0.1727, decode.d3.loss_mask: 0.4148, decode.d3.loss_dice: 0.5644, decode.d4.loss_cls: 0.1682, decode.d4.loss_mask: 0.4127, decode.d4.loss_dice: 0.5625, decode.d5.loss_cls: 0.1632, decode.d5.loss_mask: 0.4105, decode.d5.loss_dice: 0.5599, decode.d6.loss_cls: 0.1534, decode.d6.loss_mask: 0.4089, decode.d6.loss_dice: 0.5578, decode.d7.loss_cls: 0.1489, decode.d7.loss_mask: 0.4085, decode.d7.loss_dice: 0.5580, decode.d8.loss_cls: 0.1494, decode.d8.loss_mask: 0.4089, decode.d8.loss_dice: 0.5556, loss: 13.0662 +2022-06-05 06:20:10,389 - mmseg - INFO - Iter [34150/40000] lr: 1.115e-06, eta: 0:47:36, time: 0.441, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1266, decode.loss_mask: 0.4112, decode.loss_dice: 0.5752, decode.d0.loss_cls: 1.5213, decode.d0.loss_mask: 0.4516, decode.d0.loss_dice: 0.6718, decode.d1.loss_cls: 0.2918, decode.d1.loss_mask: 0.4272, decode.d1.loss_dice: 0.6043, decode.d2.loss_cls: 0.1985, decode.d2.loss_mask: 0.4184, decode.d2.loss_dice: 0.5860, decode.d3.loss_cls: 0.1637, decode.d3.loss_mask: 0.4168, decode.d3.loss_dice: 0.5802, decode.d4.loss_cls: 0.1548, decode.d4.loss_mask: 0.4148, decode.d4.loss_dice: 0.5772, decode.d5.loss_cls: 0.1463, decode.d5.loss_mask: 0.4123, decode.d5.loss_dice: 0.5772, decode.d6.loss_cls: 0.1359, decode.d6.loss_mask: 0.4123, decode.d6.loss_dice: 0.5736, decode.d7.loss_cls: 0.1311, decode.d7.loss_mask: 0.4126, decode.d7.loss_dice: 0.5759, decode.d8.loss_cls: 0.1296, decode.d8.loss_mask: 0.4120, decode.d8.loss_dice: 0.5753, loss: 13.0856 +2022-06-05 06:20:32,130 - mmseg - INFO - Iter [34200/40000] lr: 1.106e-06, eta: 0:47:11, time: 0.435, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1245, decode.loss_mask: 0.4168, decode.loss_dice: 0.5641, decode.d0.loss_cls: 1.4792, decode.d0.loss_mask: 0.4633, decode.d0.loss_dice: 0.6538, decode.d1.loss_cls: 0.2775, decode.d1.loss_mask: 0.4349, decode.d1.loss_dice: 0.6008, decode.d2.loss_cls: 0.1892, decode.d2.loss_mask: 0.4237, decode.d2.loss_dice: 0.5799, decode.d3.loss_cls: 0.1532, decode.d3.loss_mask: 0.4221, decode.d3.loss_dice: 0.5692, decode.d4.loss_cls: 0.1496, decode.d4.loss_mask: 0.4215, decode.d4.loss_dice: 0.5681, decode.d5.loss_cls: 0.1332, decode.d5.loss_mask: 0.4207, decode.d5.loss_dice: 0.5653, decode.d6.loss_cls: 0.1311, decode.d6.loss_mask: 0.4188, decode.d6.loss_dice: 0.5646, decode.d7.loss_cls: 0.1279, decode.d7.loss_mask: 0.4172, decode.d7.loss_dice: 0.5664, decode.d8.loss_cls: 0.1335, decode.d8.loss_mask: 0.4179, decode.d8.loss_dice: 0.5644, loss: 12.9523 +2022-06-05 06:20:53,985 - mmseg - INFO - Iter [34250/40000] lr: 1.096e-06, eta: 0:46:47, time: 0.437, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1217, decode.loss_mask: 0.4016, decode.loss_dice: 0.5405, decode.d0.loss_cls: 1.5360, decode.d0.loss_mask: 0.4420, decode.d0.loss_dice: 0.6241, decode.d1.loss_cls: 0.2757, decode.d1.loss_mask: 0.4161, decode.d1.loss_dice: 0.5717, decode.d2.loss_cls: 0.1893, decode.d2.loss_mask: 0.4044, decode.d2.loss_dice: 0.5519, decode.d3.loss_cls: 0.1532, decode.d3.loss_mask: 0.4022, decode.d3.loss_dice: 0.5445, decode.d4.loss_cls: 0.1429, decode.d4.loss_mask: 0.4005, decode.d4.loss_dice: 0.5409, decode.d5.loss_cls: 0.1373, decode.d5.loss_mask: 0.4015, decode.d5.loss_dice: 0.5420, decode.d6.loss_cls: 0.1287, decode.d6.loss_mask: 0.4022, decode.d6.loss_dice: 0.5427, decode.d7.loss_cls: 0.1290, decode.d7.loss_mask: 0.4014, decode.d7.loss_dice: 0.5386, decode.d8.loss_cls: 0.1275, decode.d8.loss_mask: 0.4018, decode.d8.loss_dice: 0.5417, loss: 12.5535 +2022-06-05 06:21:15,880 - mmseg - INFO - Iter [34300/40000] lr: 1.087e-06, eta: 0:46:22, time: 0.438, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1540, decode.loss_mask: 0.3978, decode.loss_dice: 0.5700, decode.d0.loss_cls: 1.5662, decode.d0.loss_mask: 0.4393, decode.d0.loss_dice: 0.6606, decode.d1.loss_cls: 0.3181, decode.d1.loss_mask: 0.4137, decode.d1.loss_dice: 0.6107, decode.d2.loss_cls: 0.2212, decode.d2.loss_mask: 0.4025, decode.d2.loss_dice: 0.5844, decode.d3.loss_cls: 0.1878, decode.d3.loss_mask: 0.4008, decode.d3.loss_dice: 0.5800, decode.d4.loss_cls: 0.1676, decode.d4.loss_mask: 0.4003, decode.d4.loss_dice: 0.5776, decode.d5.loss_cls: 0.1614, decode.d5.loss_mask: 0.3997, decode.d5.loss_dice: 0.5786, decode.d6.loss_cls: 0.1580, decode.d6.loss_mask: 0.3972, decode.d6.loss_dice: 0.5734, decode.d7.loss_cls: 0.1541, decode.d7.loss_mask: 0.3975, decode.d7.loss_dice: 0.5737, decode.d8.loss_cls: 0.1506, decode.d8.loss_mask: 0.3972, decode.d8.loss_dice: 0.5727, loss: 13.1671 +2022-06-05 06:21:40,324 - mmseg - INFO - Iter [34350/40000] lr: 1.077e-06, eta: 0:45:57, time: 0.488, data_time: 0.058, memory: 31652, decode.loss_cls: 0.1450, decode.loss_mask: 0.4039, decode.loss_dice: 0.5591, decode.d0.loss_cls: 1.4828, decode.d0.loss_mask: 0.4421, decode.d0.loss_dice: 0.6491, decode.d1.loss_cls: 0.3062, decode.d1.loss_mask: 0.4182, decode.d1.loss_dice: 0.5938, decode.d2.loss_cls: 0.2100, decode.d2.loss_mask: 0.4082, decode.d2.loss_dice: 0.5718, decode.d3.loss_cls: 0.1681, decode.d3.loss_mask: 0.4079, decode.d3.loss_dice: 0.5689, decode.d4.loss_cls: 0.1670, decode.d4.loss_mask: 0.4055, decode.d4.loss_dice: 0.5625, decode.d5.loss_cls: 0.1603, decode.d5.loss_mask: 0.4040, decode.d5.loss_dice: 0.5606, decode.d6.loss_cls: 0.1470, decode.d6.loss_mask: 0.4036, decode.d6.loss_dice: 0.5625, decode.d7.loss_cls: 0.1491, decode.d7.loss_mask: 0.4019, decode.d7.loss_dice: 0.5619, decode.d8.loss_cls: 0.1490, decode.d8.loss_mask: 0.4025, decode.d8.loss_dice: 0.5619, loss: 12.9344 +2022-06-05 06:22:01,914 - mmseg - INFO - Iter [34400/40000] lr: 1.068e-06, eta: 0:45:32, time: 0.432, data_time: 0.010, memory: 31652, decode.loss_cls: 0.1392, decode.loss_mask: 0.4055, decode.loss_dice: 0.5728, decode.d0.loss_cls: 1.5617, decode.d0.loss_mask: 0.4447, decode.d0.loss_dice: 0.6711, decode.d1.loss_cls: 0.2948, decode.d1.loss_mask: 0.4209, decode.d1.loss_dice: 0.6132, decode.d2.loss_cls: 0.2087, decode.d2.loss_mask: 0.4106, decode.d2.loss_dice: 0.5877, decode.d3.loss_cls: 0.1665, decode.d3.loss_mask: 0.4074, decode.d3.loss_dice: 0.5754, decode.d4.loss_cls: 0.1596, decode.d4.loss_mask: 0.4075, decode.d4.loss_dice: 0.5769, decode.d5.loss_cls: 0.1528, decode.d5.loss_mask: 0.4065, decode.d5.loss_dice: 0.5719, decode.d6.loss_cls: 0.1445, decode.d6.loss_mask: 0.4062, decode.d6.loss_dice: 0.5749, decode.d7.loss_cls: 0.1402, decode.d7.loss_mask: 0.4059, decode.d7.loss_dice: 0.5755, decode.d8.loss_cls: 0.1450, decode.d8.loss_mask: 0.4050, decode.d8.loss_dice: 0.5718, loss: 13.1246 +2022-06-05 06:22:23,644 - mmseg - INFO - Iter [34450/40000] lr: 1.058e-06, eta: 0:45:08, time: 0.435, data_time: 0.009, memory: 31652, decode.loss_cls: 0.1465, decode.loss_mask: 0.4042, decode.loss_dice: 0.5704, decode.d0.loss_cls: 1.5361, decode.d0.loss_mask: 0.4414, decode.d0.loss_dice: 0.6571, decode.d1.loss_cls: 0.2941, decode.d1.loss_mask: 0.4203, decode.d1.loss_dice: 0.5994, decode.d2.loss_cls: 0.2092, decode.d2.loss_mask: 0.4119, decode.d2.loss_dice: 0.5824, decode.d3.loss_cls: 0.1728, decode.d3.loss_mask: 0.4085, decode.d3.loss_dice: 0.5732, decode.d4.loss_cls: 0.1602, decode.d4.loss_mask: 0.4072, decode.d4.loss_dice: 0.5721, decode.d5.loss_cls: 0.1546, decode.d5.loss_mask: 0.4052, decode.d5.loss_dice: 0.5740, decode.d6.loss_cls: 0.1512, decode.d6.loss_mask: 0.4051, decode.d6.loss_dice: 0.5760, decode.d7.loss_cls: 0.1448, decode.d7.loss_mask: 0.4050, decode.d7.loss_dice: 0.5695, decode.d8.loss_cls: 0.1485, decode.d8.loss_mask: 0.4044, decode.d8.loss_dice: 0.5717, loss: 13.0770 +2022-06-05 06:22:44,958 - mmseg - INFO - Iter [34500/40000] lr: 1.049e-06, eta: 0:44:43, time: 0.426, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1315, decode.loss_mask: 0.3999, decode.loss_dice: 0.5671, decode.d0.loss_cls: 1.5187, decode.d0.loss_mask: 0.4404, decode.d0.loss_dice: 0.6475, decode.d1.loss_cls: 0.2922, decode.d1.loss_mask: 0.4144, decode.d1.loss_dice: 0.5998, decode.d2.loss_cls: 0.2020, decode.d2.loss_mask: 0.4027, decode.d2.loss_dice: 0.5782, decode.d3.loss_cls: 0.1620, decode.d3.loss_mask: 0.4002, decode.d3.loss_dice: 0.5678, decode.d4.loss_cls: 0.1429, decode.d4.loss_mask: 0.4026, decode.d4.loss_dice: 0.5717, decode.d5.loss_cls: 0.1391, decode.d5.loss_mask: 0.3993, decode.d5.loss_dice: 0.5689, decode.d6.loss_cls: 0.1372, decode.d6.loss_mask: 0.3990, decode.d6.loss_dice: 0.5674, decode.d7.loss_cls: 0.1347, decode.d7.loss_mask: 0.3989, decode.d7.loss_dice: 0.5664, decode.d8.loss_cls: 0.1315, decode.d8.loss_mask: 0.3993, decode.d8.loss_dice: 0.5665, loss: 12.8500 +2022-06-05 06:23:06,393 - mmseg - INFO - Iter [34550/40000] lr: 1.039e-06, eta: 0:44:18, time: 0.429, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1484, decode.loss_mask: 0.4356, decode.loss_dice: 0.5794, decode.d0.loss_cls: 1.5156, decode.d0.loss_mask: 0.4775, decode.d0.loss_dice: 0.6587, decode.d1.loss_cls: 0.2834, decode.d1.loss_mask: 0.4530, decode.d1.loss_dice: 0.6079, decode.d2.loss_cls: 0.2095, decode.d2.loss_mask: 0.4434, decode.d2.loss_dice: 0.5894, decode.d3.loss_cls: 0.1793, decode.d3.loss_mask: 0.4387, decode.d3.loss_dice: 0.5844, decode.d4.loss_cls: 0.1691, decode.d4.loss_mask: 0.4382, decode.d4.loss_dice: 0.5794, decode.d5.loss_cls: 0.1563, decode.d5.loss_mask: 0.4363, decode.d5.loss_dice: 0.5799, decode.d6.loss_cls: 0.1524, decode.d6.loss_mask: 0.4371, decode.d6.loss_dice: 0.5780, decode.d7.loss_cls: 0.1519, decode.d7.loss_mask: 0.4364, decode.d7.loss_dice: 0.5787, decode.d8.loss_cls: 0.1492, decode.d8.loss_mask: 0.4367, decode.d8.loss_dice: 0.5754, loss: 13.4589 +2022-06-05 06:23:28,032 - mmseg - INFO - Iter [34600/40000] lr: 1.030e-06, eta: 0:43:53, time: 0.433, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1444, decode.loss_mask: 0.3924, decode.loss_dice: 0.5579, decode.d0.loss_cls: 1.5357, decode.d0.loss_mask: 0.4278, decode.d0.loss_dice: 0.6555, decode.d1.loss_cls: 0.2818, decode.d1.loss_mask: 0.4064, decode.d1.loss_dice: 0.5989, decode.d2.loss_cls: 0.1968, decode.d2.loss_mask: 0.3977, decode.d2.loss_dice: 0.5771, decode.d3.loss_cls: 0.1665, decode.d3.loss_mask: 0.3952, decode.d3.loss_dice: 0.5679, decode.d4.loss_cls: 0.1545, decode.d4.loss_mask: 0.3950, decode.d4.loss_dice: 0.5708, decode.d5.loss_cls: 0.1499, decode.d5.loss_mask: 0.3958, decode.d5.loss_dice: 0.5652, decode.d6.loss_cls: 0.1454, decode.d6.loss_mask: 0.3931, decode.d6.loss_dice: 0.5677, decode.d7.loss_cls: 0.1433, decode.d7.loss_mask: 0.3944, decode.d7.loss_dice: 0.5632, decode.d8.loss_cls: 0.1423, decode.d8.loss_mask: 0.3931, decode.d8.loss_dice: 0.5673, loss: 12.8431 +2022-06-05 06:23:52,195 - mmseg - INFO - Iter [34650/40000] lr: 1.020e-06, eta: 0:43:29, time: 0.483, data_time: 0.058, memory: 31652, decode.loss_cls: 0.1293, decode.loss_mask: 0.4082, decode.loss_dice: 0.5833, decode.d0.loss_cls: 1.5353, decode.d0.loss_mask: 0.4501, decode.d0.loss_dice: 0.6805, decode.d1.loss_cls: 0.2787, decode.d1.loss_mask: 0.4253, decode.d1.loss_dice: 0.6162, decode.d2.loss_cls: 0.1941, decode.d2.loss_mask: 0.4151, decode.d2.loss_dice: 0.5987, decode.d3.loss_cls: 0.1582, decode.d3.loss_mask: 0.4125, decode.d3.loss_dice: 0.5890, decode.d4.loss_cls: 0.1465, decode.d4.loss_mask: 0.4110, decode.d4.loss_dice: 0.5880, decode.d5.loss_cls: 0.1420, decode.d5.loss_mask: 0.4102, decode.d5.loss_dice: 0.5862, decode.d6.loss_cls: 0.1419, decode.d6.loss_mask: 0.4089, decode.d6.loss_dice: 0.5814, decode.d7.loss_cls: 0.1303, decode.d7.loss_mask: 0.4085, decode.d7.loss_dice: 0.5870, decode.d8.loss_cls: 0.1299, decode.d8.loss_mask: 0.4099, decode.d8.loss_dice: 0.5878, loss: 13.1440 +2022-06-05 06:24:13,975 - mmseg - INFO - Iter [34700/40000] lr: 1.011e-06, eta: 0:43:04, time: 0.436, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1310, decode.loss_mask: 0.4066, decode.loss_dice: 0.5644, decode.d0.loss_cls: 1.5250, decode.d0.loss_mask: 0.4514, decode.d0.loss_dice: 0.6502, decode.d1.loss_cls: 0.2900, decode.d1.loss_mask: 0.4252, decode.d1.loss_dice: 0.5946, decode.d2.loss_cls: 0.1949, decode.d2.loss_mask: 0.4155, decode.d2.loss_dice: 0.5778, decode.d3.loss_cls: 0.1599, decode.d3.loss_mask: 0.4119, decode.d3.loss_dice: 0.5683, decode.d4.loss_cls: 0.1513, decode.d4.loss_mask: 0.4090, decode.d4.loss_dice: 0.5653, decode.d5.loss_cls: 0.1433, decode.d5.loss_mask: 0.4100, decode.d5.loss_dice: 0.5682, decode.d6.loss_cls: 0.1300, decode.d6.loss_mask: 0.4092, decode.d6.loss_dice: 0.5675, decode.d7.loss_cls: 0.1304, decode.d7.loss_mask: 0.4076, decode.d7.loss_dice: 0.5672, decode.d8.loss_cls: 0.1258, decode.d8.loss_mask: 0.4074, decode.d8.loss_dice: 0.5699, loss: 12.9289 +2022-06-05 06:24:35,455 - mmseg - INFO - Iter [34750/40000] lr: 1.001e-06, eta: 0:42:39, time: 0.429, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1259, decode.loss_mask: 0.4089, decode.loss_dice: 0.5637, decode.d0.loss_cls: 1.4917, decode.d0.loss_mask: 0.4463, decode.d0.loss_dice: 0.6537, decode.d1.loss_cls: 0.2799, decode.d1.loss_mask: 0.4235, decode.d1.loss_dice: 0.5996, decode.d2.loss_cls: 0.1911, decode.d2.loss_mask: 0.4167, decode.d2.loss_dice: 0.5732, decode.d3.loss_cls: 0.1558, decode.d3.loss_mask: 0.4138, decode.d3.loss_dice: 0.5704, decode.d4.loss_cls: 0.1522, decode.d4.loss_mask: 0.4127, decode.d4.loss_dice: 0.5676, decode.d5.loss_cls: 0.1431, decode.d5.loss_mask: 0.4120, decode.d5.loss_dice: 0.5638, decode.d6.loss_cls: 0.1368, decode.d6.loss_mask: 0.4110, decode.d6.loss_dice: 0.5644, decode.d7.loss_cls: 0.1262, decode.d7.loss_mask: 0.4099, decode.d7.loss_dice: 0.5640, decode.d8.loss_cls: 0.1224, decode.d8.loss_mask: 0.4099, decode.d8.loss_dice: 0.5652, loss: 12.8755 +2022-06-05 06:24:57,024 - mmseg - INFO - Iter [34800/40000] lr: 9.915e-07, eta: 0:42:14, time: 0.432, data_time: 0.009, memory: 31652, decode.loss_cls: 0.1488, decode.loss_mask: 0.4120, decode.loss_dice: 0.5629, decode.d0.loss_cls: 1.5170, decode.d0.loss_mask: 0.4556, decode.d0.loss_dice: 0.6509, decode.d1.loss_cls: 0.2964, decode.d1.loss_mask: 0.4308, decode.d1.loss_dice: 0.5995, decode.d2.loss_cls: 0.2188, decode.d2.loss_mask: 0.4201, decode.d2.loss_dice: 0.5761, decode.d3.loss_cls: 0.1798, decode.d3.loss_mask: 0.4157, decode.d3.loss_dice: 0.5639, decode.d4.loss_cls: 0.1700, decode.d4.loss_mask: 0.4153, decode.d4.loss_dice: 0.5670, decode.d5.loss_cls: 0.1578, decode.d5.loss_mask: 0.4145, decode.d5.loss_dice: 0.5631, decode.d6.loss_cls: 0.1578, decode.d6.loss_mask: 0.4134, decode.d6.loss_dice: 0.5634, decode.d7.loss_cls: 0.1565, decode.d7.loss_mask: 0.4128, decode.d7.loss_dice: 0.5646, decode.d8.loss_cls: 0.1493, decode.d8.loss_mask: 0.4121, decode.d8.loss_dice: 0.5640, loss: 13.1300 +2022-06-05 06:25:18,552 - mmseg - INFO - Iter [34850/40000] lr: 9.820e-07, eta: 0:41:49, time: 0.430, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1419, decode.loss_mask: 0.3972, decode.loss_dice: 0.5790, decode.d0.loss_cls: 1.5490, decode.d0.loss_mask: 0.4380, decode.d0.loss_dice: 0.6698, decode.d1.loss_cls: 0.2905, decode.d1.loss_mask: 0.4135, decode.d1.loss_dice: 0.6175, decode.d2.loss_cls: 0.1981, decode.d2.loss_mask: 0.4033, decode.d2.loss_dice: 0.5929, decode.d3.loss_cls: 0.1627, decode.d3.loss_mask: 0.4000, decode.d3.loss_dice: 0.5870, decode.d4.loss_cls: 0.1516, decode.d4.loss_mask: 0.4000, decode.d4.loss_dice: 0.5869, decode.d5.loss_cls: 0.1496, decode.d5.loss_mask: 0.4004, decode.d5.loss_dice: 0.5866, decode.d6.loss_cls: 0.1425, decode.d6.loss_mask: 0.3992, decode.d6.loss_dice: 0.5837, decode.d7.loss_cls: 0.1468, decode.d7.loss_mask: 0.3992, decode.d7.loss_dice: 0.5792, decode.d8.loss_cls: 0.1393, decode.d8.loss_mask: 0.3970, decode.d8.loss_dice: 0.5835, loss: 13.0858 +2022-06-05 06:25:39,920 - mmseg - INFO - Iter [34900/40000] lr: 9.725e-07, eta: 0:41:25, time: 0.428, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1344, decode.loss_mask: 0.4129, decode.loss_dice: 0.5810, decode.d0.loss_cls: 1.5171, decode.d0.loss_mask: 0.4591, decode.d0.loss_dice: 0.6766, decode.d1.loss_cls: 0.3000, decode.d1.loss_mask: 0.4273, decode.d1.loss_dice: 0.6145, decode.d2.loss_cls: 0.2124, decode.d2.loss_mask: 0.4184, decode.d2.loss_dice: 0.5938, decode.d3.loss_cls: 0.1691, decode.d3.loss_mask: 0.4170, decode.d3.loss_dice: 0.5871, decode.d4.loss_cls: 0.1559, decode.d4.loss_mask: 0.4150, decode.d4.loss_dice: 0.5854, decode.d5.loss_cls: 0.1441, decode.d5.loss_mask: 0.4136, decode.d5.loss_dice: 0.5854, decode.d6.loss_cls: 0.1372, decode.d6.loss_mask: 0.4135, decode.d6.loss_dice: 0.5831, decode.d7.loss_cls: 0.1361, decode.d7.loss_mask: 0.4135, decode.d7.loss_dice: 0.5810, decode.d8.loss_cls: 0.1350, decode.d8.loss_mask: 0.4134, decode.d8.loss_dice: 0.5786, loss: 13.2114 +2022-06-05 06:26:04,164 - mmseg - INFO - Iter [34950/40000] lr: 9.629e-07, eta: 0:41:00, time: 0.485, data_time: 0.058, memory: 31652, decode.loss_cls: 0.1309, decode.loss_mask: 0.3999, decode.loss_dice: 0.5678, decode.d0.loss_cls: 1.5189, decode.d0.loss_mask: 0.4391, decode.d0.loss_dice: 0.6579, decode.d1.loss_cls: 0.2731, decode.d1.loss_mask: 0.4156, decode.d1.loss_dice: 0.5987, decode.d2.loss_cls: 0.1901, decode.d2.loss_mask: 0.4047, decode.d2.loss_dice: 0.5817, decode.d3.loss_cls: 0.1515, decode.d3.loss_mask: 0.4030, decode.d3.loss_dice: 0.5751, decode.d4.loss_cls: 0.1476, decode.d4.loss_mask: 0.4008, decode.d4.loss_dice: 0.5712, decode.d5.loss_cls: 0.1398, decode.d5.loss_mask: 0.4006, decode.d5.loss_dice: 0.5714, decode.d6.loss_cls: 0.1354, decode.d6.loss_mask: 0.3998, decode.d6.loss_dice: 0.5661, decode.d7.loss_cls: 0.1341, decode.d7.loss_mask: 0.4005, decode.d7.loss_dice: 0.5670, decode.d8.loss_cls: 0.1355, decode.d8.loss_mask: 0.4000, decode.d8.loss_dice: 0.5691, loss: 12.8469 +2022-06-05 06:26:25,783 - mmseg - INFO - Saving checkpoint at 35000 iterations +2022-06-05 06:26:29,116 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 06:26:29,117 - mmseg - INFO - Iter [35000/40000] lr: 9.534e-07, eta: 0:40:36, time: 0.499, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1578, decode.loss_mask: 0.4037, decode.loss_dice: 0.5710, decode.d0.loss_cls: 1.5213, decode.d0.loss_mask: 0.4489, decode.d0.loss_dice: 0.6793, decode.d1.loss_cls: 0.2948, decode.d1.loss_mask: 0.4192, decode.d1.loss_dice: 0.6105, decode.d2.loss_cls: 0.2230, decode.d2.loss_mask: 0.4081, decode.d2.loss_dice: 0.5868, decode.d3.loss_cls: 0.1775, decode.d3.loss_mask: 0.4068, decode.d3.loss_dice: 0.5766, decode.d4.loss_cls: 0.1671, decode.d4.loss_mask: 0.4061, decode.d4.loss_dice: 0.5742, decode.d5.loss_cls: 0.1655, decode.d5.loss_mask: 0.4052, decode.d5.loss_dice: 0.5703, decode.d6.loss_cls: 0.1529, decode.d6.loss_mask: 0.4051, decode.d6.loss_dice: 0.5715, decode.d7.loss_cls: 0.1532, decode.d7.loss_mask: 0.4058, decode.d7.loss_dice: 0.5728, decode.d8.loss_cls: 0.1516, decode.d8.loss_mask: 0.4044, decode.d8.loss_dice: 0.5703, loss: 13.1613 +2022-06-05 06:26:50,631 - mmseg - INFO - Iter [35050/40000] lr: 9.439e-07, eta: 0:40:11, time: 0.430, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1332, decode.loss_mask: 0.4024, decode.loss_dice: 0.5542, decode.d0.loss_cls: 1.5174, decode.d0.loss_mask: 0.4450, decode.d0.loss_dice: 0.6483, decode.d1.loss_cls: 0.2952, decode.d1.loss_mask: 0.4181, decode.d1.loss_dice: 0.5936, decode.d2.loss_cls: 0.2012, decode.d2.loss_mask: 0.4105, decode.d2.loss_dice: 0.5698, decode.d3.loss_cls: 0.1597, decode.d3.loss_mask: 0.4062, decode.d3.loss_dice: 0.5650, decode.d4.loss_cls: 0.1533, decode.d4.loss_mask: 0.4046, decode.d4.loss_dice: 0.5651, decode.d5.loss_cls: 0.1455, decode.d5.loss_mask: 0.4042, decode.d5.loss_dice: 0.5624, decode.d6.loss_cls: 0.1411, decode.d6.loss_mask: 0.4032, decode.d6.loss_dice: 0.5590, decode.d7.loss_cls: 0.1367, decode.d7.loss_mask: 0.4020, decode.d7.loss_dice: 0.5577, decode.d8.loss_cls: 0.1313, decode.d8.loss_mask: 0.4017, decode.d8.loss_dice: 0.5571, loss: 12.8448 +2022-06-05 06:27:13,075 - mmseg - INFO - Iter [35100/40000] lr: 9.343e-07, eta: 0:39:47, time: 0.450, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1307, decode.loss_mask: 0.3860, decode.loss_dice: 0.5424, decode.d0.loss_cls: 1.5086, decode.d0.loss_mask: 0.4299, decode.d0.loss_dice: 0.6369, decode.d1.loss_cls: 0.2804, decode.d1.loss_mask: 0.4047, decode.d1.loss_dice: 0.5782, decode.d2.loss_cls: 0.1946, decode.d2.loss_mask: 0.3951, decode.d2.loss_dice: 0.5580, decode.d3.loss_cls: 0.1531, decode.d3.loss_mask: 0.3915, decode.d3.loss_dice: 0.5502, decode.d4.loss_cls: 0.1446, decode.d4.loss_mask: 0.3892, decode.d4.loss_dice: 0.5494, decode.d5.loss_cls: 0.1427, decode.d5.loss_mask: 0.3887, decode.d5.loss_dice: 0.5453, decode.d6.loss_cls: 0.1381, decode.d6.loss_mask: 0.3871, decode.d6.loss_dice: 0.5425, decode.d7.loss_cls: 0.1306, decode.d7.loss_mask: 0.3868, decode.d7.loss_dice: 0.5437, decode.d8.loss_cls: 0.1320, decode.d8.loss_mask: 0.3854, decode.d8.loss_dice: 0.5406, loss: 12.4870 +2022-06-05 06:27:35,280 - mmseg - INFO - Iter [35150/40000] lr: 9.248e-07, eta: 0:39:22, time: 0.444, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1368, decode.loss_mask: 0.4103, decode.loss_dice: 0.5531, decode.d0.loss_cls: 1.5561, decode.d0.loss_mask: 0.4554, decode.d0.loss_dice: 0.6531, decode.d1.loss_cls: 0.2902, decode.d1.loss_mask: 0.4312, decode.d1.loss_dice: 0.5976, decode.d2.loss_cls: 0.1922, decode.d2.loss_mask: 0.4197, decode.d2.loss_dice: 0.5755, decode.d3.loss_cls: 0.1578, decode.d3.loss_mask: 0.4154, decode.d3.loss_dice: 0.5630, decode.d4.loss_cls: 0.1500, decode.d4.loss_mask: 0.4142, decode.d4.loss_dice: 0.5582, decode.d5.loss_cls: 0.1446, decode.d5.loss_mask: 0.4123, decode.d5.loss_dice: 0.5591, decode.d6.loss_cls: 0.1388, decode.d6.loss_mask: 0.4118, decode.d6.loss_dice: 0.5584, decode.d7.loss_cls: 0.1386, decode.d7.loss_mask: 0.4119, decode.d7.loss_dice: 0.5596, decode.d8.loss_cls: 0.1414, decode.d8.loss_mask: 0.4117, decode.d8.loss_dice: 0.5555, loss: 12.9735 +2022-06-05 06:27:57,224 - mmseg - INFO - Iter [35200/40000] lr: 9.153e-07, eta: 0:38:57, time: 0.439, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1633, decode.loss_mask: 0.4082, decode.loss_dice: 0.5890, decode.d0.loss_cls: 1.5693, decode.d0.loss_mask: 0.4518, decode.d0.loss_dice: 0.6794, decode.d1.loss_cls: 0.3277, decode.d1.loss_mask: 0.4262, decode.d1.loss_dice: 0.6201, decode.d2.loss_cls: 0.2316, decode.d2.loss_mask: 0.4166, decode.d2.loss_dice: 0.6026, decode.d3.loss_cls: 0.1857, decode.d3.loss_mask: 0.4121, decode.d3.loss_dice: 0.5974, decode.d4.loss_cls: 0.1801, decode.d4.loss_mask: 0.4129, decode.d4.loss_dice: 0.5969, decode.d5.loss_cls: 0.1799, decode.d5.loss_mask: 0.4102, decode.d5.loss_dice: 0.5910, decode.d6.loss_cls: 0.1690, decode.d6.loss_mask: 0.4104, decode.d6.loss_dice: 0.5909, decode.d7.loss_cls: 0.1639, decode.d7.loss_mask: 0.4104, decode.d7.loss_dice: 0.5872, decode.d8.loss_cls: 0.1670, decode.d8.loss_mask: 0.4093, decode.d8.loss_dice: 0.5863, loss: 13.5462 +2022-06-05 06:28:19,513 - mmseg - INFO - Iter [35250/40000] lr: 9.057e-07, eta: 0:38:33, time: 0.446, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1230, decode.loss_mask: 0.4109, decode.loss_dice: 0.5607, decode.d0.loss_cls: 1.4941, decode.d0.loss_mask: 0.4487, decode.d0.loss_dice: 0.6456, decode.d1.loss_cls: 0.2814, decode.d1.loss_mask: 0.4261, decode.d1.loss_dice: 0.5955, decode.d2.loss_cls: 0.1860, decode.d2.loss_mask: 0.4170, decode.d2.loss_dice: 0.5750, decode.d3.loss_cls: 0.1491, decode.d3.loss_mask: 0.4123, decode.d3.loss_dice: 0.5683, decode.d4.loss_cls: 0.1409, decode.d4.loss_mask: 0.4112, decode.d4.loss_dice: 0.5655, decode.d5.loss_cls: 0.1313, decode.d5.loss_mask: 0.4102, decode.d5.loss_dice: 0.5644, decode.d6.loss_cls: 0.1307, decode.d6.loss_mask: 0.4090, decode.d6.loss_dice: 0.5596, decode.d7.loss_cls: 0.1292, decode.d7.loss_mask: 0.4099, decode.d7.loss_dice: 0.5615, decode.d8.loss_cls: 0.1270, decode.d8.loss_mask: 0.4099, decode.d8.loss_dice: 0.5594, loss: 12.8133 +2022-06-05 06:28:44,298 - mmseg - INFO - Iter [35300/40000] lr: 8.962e-07, eta: 0:38:08, time: 0.496, data_time: 0.057, memory: 31652, decode.loss_cls: 0.1272, decode.loss_mask: 0.4037, decode.loss_dice: 0.5734, decode.d0.loss_cls: 1.5056, decode.d0.loss_mask: 0.4467, decode.d0.loss_dice: 0.6628, decode.d1.loss_cls: 0.2870, decode.d1.loss_mask: 0.4191, decode.d1.loss_dice: 0.6017, decode.d2.loss_cls: 0.1897, decode.d2.loss_mask: 0.4108, decode.d2.loss_dice: 0.5853, decode.d3.loss_cls: 0.1551, decode.d3.loss_mask: 0.4076, decode.d3.loss_dice: 0.5773, decode.d4.loss_cls: 0.1445, decode.d4.loss_mask: 0.4051, decode.d4.loss_dice: 0.5724, decode.d5.loss_cls: 0.1361, decode.d5.loss_mask: 0.4051, decode.d5.loss_dice: 0.5719, decode.d6.loss_cls: 0.1355, decode.d6.loss_mask: 0.4037, decode.d6.loss_dice: 0.5695, decode.d7.loss_cls: 0.1276, decode.d7.loss_mask: 0.4026, decode.d7.loss_dice: 0.5681, decode.d8.loss_cls: 0.1296, decode.d8.loss_mask: 0.4017, decode.d8.loss_dice: 0.5701, loss: 12.8966 +2022-06-05 06:29:06,828 - mmseg - INFO - Iter [35350/40000] lr: 8.867e-07, eta: 0:37:44, time: 0.451, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1446, decode.loss_mask: 0.4036, decode.loss_dice: 0.5775, decode.d0.loss_cls: 1.5559, decode.d0.loss_mask: 0.4442, decode.d0.loss_dice: 0.6707, decode.d1.loss_cls: 0.3020, decode.d1.loss_mask: 0.4193, decode.d1.loss_dice: 0.6126, decode.d2.loss_cls: 0.2104, decode.d2.loss_mask: 0.4122, decode.d2.loss_dice: 0.5909, decode.d3.loss_cls: 0.1745, decode.d3.loss_mask: 0.4076, decode.d3.loss_dice: 0.5853, decode.d4.loss_cls: 0.1602, decode.d4.loss_mask: 0.4054, decode.d4.loss_dice: 0.5831, decode.d5.loss_cls: 0.1551, decode.d5.loss_mask: 0.4029, decode.d5.loss_dice: 0.5816, decode.d6.loss_cls: 0.1506, decode.d6.loss_mask: 0.4040, decode.d6.loss_dice: 0.5801, decode.d7.loss_cls: 0.1472, decode.d7.loss_mask: 0.4050, decode.d7.loss_dice: 0.5773, decode.d8.loss_cls: 0.1406, decode.d8.loss_mask: 0.4037, decode.d8.loss_dice: 0.5799, loss: 13.1881 +2022-06-05 06:29:28,606 - mmseg - INFO - Iter [35400/40000] lr: 8.771e-07, eta: 0:37:19, time: 0.436, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1468, decode.loss_mask: 0.4015, decode.loss_dice: 0.5758, decode.d0.loss_cls: 1.5184, decode.d0.loss_mask: 0.4423, decode.d0.loss_dice: 0.6764, decode.d1.loss_cls: 0.3148, decode.d1.loss_mask: 0.4164, decode.d1.loss_dice: 0.6131, decode.d2.loss_cls: 0.2191, decode.d2.loss_mask: 0.4079, decode.d2.loss_dice: 0.5929, decode.d3.loss_cls: 0.1747, decode.d3.loss_mask: 0.4058, decode.d3.loss_dice: 0.5846, decode.d4.loss_cls: 0.1606, decode.d4.loss_mask: 0.4041, decode.d4.loss_dice: 0.5803, decode.d5.loss_cls: 0.1555, decode.d5.loss_mask: 0.4048, decode.d5.loss_dice: 0.5812, decode.d6.loss_cls: 0.1511, decode.d6.loss_mask: 0.4032, decode.d6.loss_dice: 0.5809, decode.d7.loss_cls: 0.1467, decode.d7.loss_mask: 0.4018, decode.d7.loss_dice: 0.5776, decode.d8.loss_cls: 0.1414, decode.d8.loss_mask: 0.4015, decode.d8.loss_dice: 0.5808, loss: 13.1621 +2022-06-05 06:29:50,584 - mmseg - INFO - Iter [35450/40000] lr: 8.676e-07, eta: 0:36:54, time: 0.440, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1573, decode.loss_mask: 0.3925, decode.loss_dice: 0.5697, decode.d0.loss_cls: 1.5214, decode.d0.loss_mask: 0.4381, decode.d0.loss_dice: 0.6856, decode.d1.loss_cls: 0.2907, decode.d1.loss_mask: 0.4087, decode.d1.loss_dice: 0.6175, decode.d2.loss_cls: 0.2098, decode.d2.loss_mask: 0.3986, decode.d2.loss_dice: 0.5882, decode.d3.loss_cls: 0.1699, decode.d3.loss_mask: 0.3965, decode.d3.loss_dice: 0.5784, decode.d4.loss_cls: 0.1609, decode.d4.loss_mask: 0.3967, decode.d4.loss_dice: 0.5787, decode.d5.loss_cls: 0.1581, decode.d5.loss_mask: 0.3966, decode.d5.loss_dice: 0.5766, decode.d6.loss_cls: 0.1513, decode.d6.loss_mask: 0.3954, decode.d6.loss_dice: 0.5733, decode.d7.loss_cls: 0.1493, decode.d7.loss_mask: 0.3952, decode.d7.loss_dice: 0.5735, decode.d8.loss_cls: 0.1506, decode.d8.loss_mask: 0.3941, decode.d8.loss_dice: 0.5715, loss: 13.0448 +2022-06-05 06:30:12,808 - mmseg - INFO - Iter [35500/40000] lr: 8.581e-07, eta: 0:36:30, time: 0.444, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1195, decode.loss_mask: 0.4078, decode.loss_dice: 0.5551, decode.d0.loss_cls: 1.4843, decode.d0.loss_mask: 0.4516, decode.d0.loss_dice: 0.6447, decode.d1.loss_cls: 0.2676, decode.d1.loss_mask: 0.4261, decode.d1.loss_dice: 0.5848, decode.d2.loss_cls: 0.1754, decode.d2.loss_mask: 0.4142, decode.d2.loss_dice: 0.5623, decode.d3.loss_cls: 0.1491, decode.d3.loss_mask: 0.4103, decode.d3.loss_dice: 0.5581, decode.d4.loss_cls: 0.1354, decode.d4.loss_mask: 0.4088, decode.d4.loss_dice: 0.5575, decode.d5.loss_cls: 0.1304, decode.d5.loss_mask: 0.4090, decode.d5.loss_dice: 0.5550, decode.d6.loss_cls: 0.1241, decode.d6.loss_mask: 0.4088, decode.d6.loss_dice: 0.5555, decode.d7.loss_cls: 0.1228, decode.d7.loss_mask: 0.4072, decode.d7.loss_dice: 0.5581, decode.d8.loss_cls: 0.1228, decode.d8.loss_mask: 0.4079, decode.d8.loss_dice: 0.5544, loss: 12.6688 +2022-06-05 06:30:34,782 - mmseg - INFO - Iter [35550/40000] lr: 8.485e-07, eta: 0:36:05, time: 0.439, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1347, decode.loss_mask: 0.4022, decode.loss_dice: 0.5618, decode.d0.loss_cls: 1.5408, decode.d0.loss_mask: 0.4442, decode.d0.loss_dice: 0.6442, decode.d1.loss_cls: 0.2920, decode.d1.loss_mask: 0.4183, decode.d1.loss_dice: 0.5886, decode.d2.loss_cls: 0.2087, decode.d2.loss_mask: 0.4076, decode.d2.loss_dice: 0.5698, decode.d3.loss_cls: 0.1697, decode.d3.loss_mask: 0.4064, decode.d3.loss_dice: 0.5660, decode.d4.loss_cls: 0.1541, decode.d4.loss_mask: 0.4050, decode.d4.loss_dice: 0.5623, decode.d5.loss_cls: 0.1457, decode.d5.loss_mask: 0.4050, decode.d5.loss_dice: 0.5623, decode.d6.loss_cls: 0.1391, decode.d6.loss_mask: 0.4038, decode.d6.loss_dice: 0.5587, decode.d7.loss_cls: 0.1371, decode.d7.loss_mask: 0.4028, decode.d7.loss_dice: 0.5628, decode.d8.loss_cls: 0.1400, decode.d8.loss_mask: 0.4016, decode.d8.loss_dice: 0.5598, loss: 12.8954 +2022-06-05 06:30:59,483 - mmseg - INFO - Iter [35600/40000] lr: 8.390e-07, eta: 0:35:41, time: 0.493, data_time: 0.056, memory: 31652, decode.loss_cls: 0.1265, decode.loss_mask: 0.4031, decode.loss_dice: 0.5522, decode.d0.loss_cls: 1.4669, decode.d0.loss_mask: 0.4495, decode.d0.loss_dice: 0.6419, decode.d1.loss_cls: 0.2745, decode.d1.loss_mask: 0.4206, decode.d1.loss_dice: 0.5813, decode.d2.loss_cls: 0.1921, decode.d2.loss_mask: 0.4103, decode.d2.loss_dice: 0.5666, decode.d3.loss_cls: 0.1602, decode.d3.loss_mask: 0.4073, decode.d3.loss_dice: 0.5561, decode.d4.loss_cls: 0.1479, decode.d4.loss_mask: 0.4049, decode.d4.loss_dice: 0.5555, decode.d5.loss_cls: 0.1422, decode.d5.loss_mask: 0.4041, decode.d5.loss_dice: 0.5563, decode.d6.loss_cls: 0.1340, decode.d6.loss_mask: 0.4025, decode.d6.loss_dice: 0.5555, decode.d7.loss_cls: 0.1324, decode.d7.loss_mask: 0.4034, decode.d7.loss_dice: 0.5580, decode.d8.loss_cls: 0.1269, decode.d8.loss_mask: 0.4022, decode.d8.loss_dice: 0.5522, loss: 12.6873 +2022-06-05 06:31:21,614 - mmseg - INFO - Iter [35650/40000] lr: 8.295e-07, eta: 0:35:16, time: 0.443, data_time: 0.009, memory: 31652, decode.loss_cls: 0.1404, decode.loss_mask: 0.3977, decode.loss_dice: 0.5695, decode.d0.loss_cls: 1.5272, decode.d0.loss_mask: 0.4366, decode.d0.loss_dice: 0.6642, decode.d1.loss_cls: 0.3107, decode.d1.loss_mask: 0.4138, decode.d1.loss_dice: 0.6060, decode.d2.loss_cls: 0.2099, decode.d2.loss_mask: 0.4036, decode.d2.loss_dice: 0.5839, decode.d3.loss_cls: 0.1671, decode.d3.loss_mask: 0.4019, decode.d3.loss_dice: 0.5791, decode.d4.loss_cls: 0.1562, decode.d4.loss_mask: 0.4013, decode.d4.loss_dice: 0.5800, decode.d5.loss_cls: 0.1495, decode.d5.loss_mask: 0.3997, decode.d5.loss_dice: 0.5791, decode.d6.loss_cls: 0.1431, decode.d6.loss_mask: 0.3996, decode.d6.loss_dice: 0.5752, decode.d7.loss_cls: 0.1418, decode.d7.loss_mask: 0.3981, decode.d7.loss_dice: 0.5727, decode.d8.loss_cls: 0.1387, decode.d8.loss_mask: 0.3989, decode.d8.loss_dice: 0.5759, loss: 13.0212 +2022-06-05 06:31:44,138 - mmseg - INFO - Iter [35700/40000] lr: 8.199e-07, eta: 0:34:52, time: 0.450, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1395, decode.loss_mask: 0.3930, decode.loss_dice: 0.5633, decode.d0.loss_cls: 1.5217, decode.d0.loss_mask: 0.4323, decode.d0.loss_dice: 0.6565, decode.d1.loss_cls: 0.3000, decode.d1.loss_mask: 0.4053, decode.d1.loss_dice: 0.5954, decode.d2.loss_cls: 0.2147, decode.d2.loss_mask: 0.3969, decode.d2.loss_dice: 0.5738, decode.d3.loss_cls: 0.1692, decode.d3.loss_mask: 0.3949, decode.d3.loss_dice: 0.5676, decode.d4.loss_cls: 0.1624, decode.d4.loss_mask: 0.3943, decode.d4.loss_dice: 0.5682, decode.d5.loss_cls: 0.1519, decode.d5.loss_mask: 0.3940, decode.d5.loss_dice: 0.5687, decode.d6.loss_cls: 0.1471, decode.d6.loss_mask: 0.3930, decode.d6.loss_dice: 0.5642, decode.d7.loss_cls: 0.1400, decode.d7.loss_mask: 0.3938, decode.d7.loss_dice: 0.5677, decode.d8.loss_cls: 0.1472, decode.d8.loss_mask: 0.3922, decode.d8.loss_dice: 0.5660, loss: 12.8749 +2022-06-05 06:32:06,039 - mmseg - INFO - Iter [35750/40000] lr: 8.104e-07, eta: 0:34:27, time: 0.438, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1211, decode.loss_mask: 0.3867, decode.loss_dice: 0.5489, decode.d0.loss_cls: 1.4952, decode.d0.loss_mask: 0.4263, decode.d0.loss_dice: 0.6408, decode.d1.loss_cls: 0.2804, decode.d1.loss_mask: 0.4040, decode.d1.loss_dice: 0.5828, decode.d2.loss_cls: 0.1765, decode.d2.loss_mask: 0.3928, decode.d2.loss_dice: 0.5633, decode.d3.loss_cls: 0.1512, decode.d3.loss_mask: 0.3897, decode.d3.loss_dice: 0.5471, decode.d4.loss_cls: 0.1347, decode.d4.loss_mask: 0.3887, decode.d4.loss_dice: 0.5558, decode.d5.loss_cls: 0.1273, decode.d5.loss_mask: 0.3881, decode.d5.loss_dice: 0.5489, decode.d6.loss_cls: 0.1249, decode.d6.loss_mask: 0.3868, decode.d6.loss_dice: 0.5511, decode.d7.loss_cls: 0.1231, decode.d7.loss_mask: 0.3857, decode.d7.loss_dice: 0.5487, decode.d8.loss_cls: 0.1207, decode.d8.loss_mask: 0.3863, decode.d8.loss_dice: 0.5480, loss: 12.4257 +2022-06-05 06:32:28,571 - mmseg - INFO - Iter [35800/40000] lr: 8.009e-07, eta: 0:34:03, time: 0.450, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1399, decode.loss_mask: 0.3986, decode.loss_dice: 0.5365, decode.d0.loss_cls: 1.5377, decode.d0.loss_mask: 0.4427, decode.d0.loss_dice: 0.6295, decode.d1.loss_cls: 0.2882, decode.d1.loss_mask: 0.4154, decode.d1.loss_dice: 0.5673, decode.d2.loss_cls: 0.2023, decode.d2.loss_mask: 0.4061, decode.d2.loss_dice: 0.5471, decode.d3.loss_cls: 0.1592, decode.d3.loss_mask: 0.4034, decode.d3.loss_dice: 0.5411, decode.d4.loss_cls: 0.1550, decode.d4.loss_mask: 0.4030, decode.d4.loss_dice: 0.5416, decode.d5.loss_cls: 0.1470, decode.d5.loss_mask: 0.4017, decode.d5.loss_dice: 0.5406, decode.d6.loss_cls: 0.1432, decode.d6.loss_mask: 0.4004, decode.d6.loss_dice: 0.5383, decode.d7.loss_cls: 0.1391, decode.d7.loss_mask: 0.3990, decode.d7.loss_dice: 0.5385, decode.d8.loss_cls: 0.1369, decode.d8.loss_mask: 0.3996, decode.d8.loss_dice: 0.5385, loss: 12.6374 +2022-06-05 06:32:50,775 - mmseg - INFO - Iter [35850/40000] lr: 7.913e-07, eta: 0:33:38, time: 0.445, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1514, decode.loss_mask: 0.3942, decode.loss_dice: 0.5779, decode.d0.loss_cls: 1.5468, decode.d0.loss_mask: 0.4354, decode.d0.loss_dice: 0.6694, decode.d1.loss_cls: 0.3129, decode.d1.loss_mask: 0.4104, decode.d1.loss_dice: 0.6105, decode.d2.loss_cls: 0.2093, decode.d2.loss_mask: 0.4015, decode.d2.loss_dice: 0.5929, decode.d3.loss_cls: 0.1776, decode.d3.loss_mask: 0.3996, decode.d3.loss_dice: 0.5842, decode.d4.loss_cls: 0.1680, decode.d4.loss_mask: 0.3981, decode.d4.loss_dice: 0.5828, decode.d5.loss_cls: 0.1602, decode.d5.loss_mask: 0.3957, decode.d5.loss_dice: 0.5815, decode.d6.loss_cls: 0.1483, decode.d6.loss_mask: 0.3959, decode.d6.loss_dice: 0.5789, decode.d7.loss_cls: 0.1551, decode.d7.loss_mask: 0.3954, decode.d7.loss_dice: 0.5831, decode.d8.loss_cls: 0.1507, decode.d8.loss_mask: 0.3955, decode.d8.loss_dice: 0.5786, loss: 13.1418 +2022-06-05 06:33:15,002 - mmseg - INFO - Iter [35900/40000] lr: 7.818e-07, eta: 0:33:14, time: 0.484, data_time: 0.058, memory: 31652, decode.loss_cls: 0.1630, decode.loss_mask: 0.3928, decode.loss_dice: 0.5907, decode.d0.loss_cls: 1.5837, decode.d0.loss_mask: 0.4307, decode.d0.loss_dice: 0.6903, decode.d1.loss_cls: 0.3186, decode.d1.loss_mask: 0.4055, decode.d1.loss_dice: 0.6333, decode.d2.loss_cls: 0.2282, decode.d2.loss_mask: 0.4001, decode.d2.loss_dice: 0.6080, decode.d3.loss_cls: 0.1853, decode.d3.loss_mask: 0.3982, decode.d3.loss_dice: 0.5959, decode.d4.loss_cls: 0.1761, decode.d4.loss_mask: 0.3961, decode.d4.loss_dice: 0.5981, decode.d5.loss_cls: 0.1690, decode.d5.loss_mask: 0.3938, decode.d5.loss_dice: 0.5952, decode.d6.loss_cls: 0.1609, decode.d6.loss_mask: 0.3938, decode.d6.loss_dice: 0.5938, decode.d7.loss_cls: 0.1628, decode.d7.loss_mask: 0.3930, decode.d7.loss_dice: 0.5894, decode.d8.loss_cls: 0.1527, decode.d8.loss_mask: 0.3927, decode.d8.loss_dice: 0.5946, loss: 13.3860 +2022-06-05 06:33:37,368 - mmseg - INFO - Iter [35950/40000] lr: 7.723e-07, eta: 0:32:49, time: 0.448, data_time: 0.009, memory: 31652, decode.loss_cls: 0.1262, decode.loss_mask: 0.3893, decode.loss_dice: 0.5593, decode.d0.loss_cls: 1.5080, decode.d0.loss_mask: 0.4286, decode.d0.loss_dice: 0.6482, decode.d1.loss_cls: 0.2978, decode.d1.loss_mask: 0.4019, decode.d1.loss_dice: 0.5893, decode.d2.loss_cls: 0.2048, decode.d2.loss_mask: 0.3939, decode.d2.loss_dice: 0.5751, decode.d3.loss_cls: 0.1620, decode.d3.loss_mask: 0.3919, decode.d3.loss_dice: 0.5641, decode.d4.loss_cls: 0.1466, decode.d4.loss_mask: 0.3900, decode.d4.loss_dice: 0.5669, decode.d5.loss_cls: 0.1375, decode.d5.loss_mask: 0.3902, decode.d5.loss_dice: 0.5622, decode.d6.loss_cls: 0.1282, decode.d6.loss_mask: 0.3901, decode.d6.loss_dice: 0.5582, decode.d7.loss_cls: 0.1294, decode.d7.loss_mask: 0.3891, decode.d7.loss_dice: 0.5591, decode.d8.loss_cls: 0.1251, decode.d8.loss_mask: 0.3886, decode.d8.loss_dice: 0.5589, loss: 12.6605 +2022-06-05 06:33:59,150 - mmseg - INFO - Saving checkpoint at 36000 iterations +2022-06-05 06:34:01,575 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 06:34:01,575 - mmseg - INFO - Iter [36000/40000] lr: 7.628e-07, eta: 0:32:25, time: 0.484, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1281, decode.loss_mask: 0.3939, decode.loss_dice: 0.5222, decode.d0.loss_cls: 1.5056, decode.d0.loss_mask: 0.4320, decode.d0.loss_dice: 0.6105, decode.d1.loss_cls: 0.2662, decode.d1.loss_mask: 0.4119, decode.d1.loss_dice: 0.5557, decode.d2.loss_cls: 0.1840, decode.d2.loss_mask: 0.4013, decode.d2.loss_dice: 0.5336, decode.d3.loss_cls: 0.1574, decode.d3.loss_mask: 0.3979, decode.d3.loss_dice: 0.5265, decode.d4.loss_cls: 0.1424, decode.d4.loss_mask: 0.3965, decode.d4.loss_dice: 0.5266, decode.d5.loss_cls: 0.1375, decode.d5.loss_mask: 0.3965, decode.d5.loss_dice: 0.5283, decode.d6.loss_cls: 0.1333, decode.d6.loss_mask: 0.3963, decode.d6.loss_dice: 0.5225, decode.d7.loss_cls: 0.1308, decode.d7.loss_mask: 0.3945, decode.d7.loss_dice: 0.5218, decode.d8.loss_cls: 0.1270, decode.d8.loss_mask: 0.3948, decode.d8.loss_dice: 0.5257, loss: 12.3014 +2022-06-05 06:36:42,072 - mmseg - INFO - per class results: +2022-06-05 06:36:42,079 - mmseg - INFO - ++-------------+-------+-------+ +| Class | IoU | Acc | ++-------------+-------+-------+ +| aeroplane | 90.9 | 95.4 | +| bag | 39.45 | 50.11 | +| bed | 32.37 | 43.92 | +| bedclothes | 43.87 | 62.22 | +| bench | 25.49 | 30.33 | +| bicycle | 83.93 | 93.28 | +| bird | 94.41 | 96.97 | +| boat | 84.27 | 91.41 | +| book | 52.33 | 65.01 | +| bottle | 87.04 | 96.43 | +| building | 65.15 | 77.51 | +| bus | 93.87 | 97.24 | +| cabinet | 43.86 | 63.09 | +| car | 91.17 | 95.59 | +| cat | 93.79 | 97.97 | +| ceiling | 60.3 | 74.96 | +| chair | 59.1 | 77.74 | +| cloth | 24.6 | 35.64 | +| computer | 41.46 | 54.31 | +| cow | 95.03 | 97.19 | +| cup | 43.03 | 57.71 | +| curtain | 55.72 | 69.03 | +| dog | 91.6 | 96.93 | +| door | 30.67 | 48.31 | +| fence | 43.76 | 56.47 | +| floor | 72.83 | 85.72 | +| flower | 41.84 | 62.04 | +| food | 35.97 | 47.54 | +| grass | 82.36 | 91.76 | +| ground | 57.08 | 73.43 | +| horse | 93.99 | 97.56 | +| keyboard | 81.86 | 87.91 | +| light | 57.63 | 73.03 | +| motorbike | 89.47 | 94.64 | +| mountain | 53.95 | 73.5 | +| mouse | 77.5 | 83.58 | +| person | 90.42 | 96.1 | +| plate | 27.65 | 37.85 | +| platform | 54.77 | 71.8 | +| pottedplant | 80.65 | 88.46 | +| road | 50.19 | 62.43 | +| rock | 49.56 | 59.21 | +| sheep | 94.02 | 97.19 | +| shelves | 35.74 | 49.43 | +| sidewalk | 28.44 | 46.4 | +| sign | 47.19 | 57.52 | +| sky | 95.01 | 97.48 | +| snow | 74.18 | 86.1 | +| sofa | 58.79 | 70.2 | +| table | 67.35 | 78.86 | +| track | 69.52 | 79.24 | +| train | 92.16 | 96.28 | +| tree | 81.03 | 90.55 | +| truck | 39.04 | 52.51 | +| tvmonitor | 86.89 | 94.02 | +| wall | 70.55 | 83.79 | +| water | 91.71 | 95.8 | +| window | 45.27 | 60.5 | +| wood | 27.91 | 41.85 | ++-------------+-------+-------+ +2022-06-05 06:36:42,079 - mmseg - INFO - Summary: +2022-06-05 06:36:42,079 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 85.37 | 63.89 | 74.39 | ++-------+-------+-------+ +2022-06-05 06:36:42,081 - mmseg - INFO - The previous best checkpoint /mnt/lustre/chenzhe.vendor/workspace/ViT-Adapter-Release/segmentation/work_dirs/mask2former_beit_adapter_base_480_40k_pascal_context_59_ss/best_mIoU_iter_32000.pth was removed +2022-06-05 06:36:45,157 - mmseg - INFO - Now best checkpoint is saved as best_mIoU_iter_36000.pth. +2022-06-05 06:36:45,158 - mmseg - INFO - Best mIoU is 0.6389 at 36000 iter. +2022-06-05 06:36:45,184 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 06:36:45,184 - mmseg - INFO - Iter(val) [638] aAcc: 0.8537, mIoU: 0.6389, mAcc: 0.7439, IoU.aeroplane: 0.9090, IoU.bag: 0.3945, IoU.bed: 0.3237, IoU.bedclothes: 0.4387, IoU.bench: 0.2549, IoU.bicycle: 0.8393, IoU.bird: 0.9441, IoU.boat: 0.8427, IoU.book: 0.5233, IoU.bottle: 0.8704, IoU.building: 0.6515, IoU.bus: 0.9387, IoU.cabinet: 0.4386, IoU.car: 0.9117, IoU.cat: 0.9379, IoU.ceiling: 0.6030, IoU.chair: 0.5910, IoU.cloth: 0.2460, IoU.computer: 0.4146, IoU.cow: 0.9503, IoU.cup: 0.4303, IoU.curtain: 0.5572, IoU.dog: 0.9160, IoU.door: 0.3067, IoU.fence: 0.4376, IoU.floor: 0.7283, IoU.flower: 0.4184, IoU.food: 0.3597, IoU.grass: 0.8236, IoU.ground: 0.5708, IoU.horse: 0.9399, IoU.keyboard: 0.8186, IoU.light: 0.5763, IoU.motorbike: 0.8947, IoU.mountain: 0.5395, IoU.mouse: 0.7750, IoU.person: 0.9042, IoU.plate: 0.2765, IoU.platform: 0.5477, IoU.pottedplant: 0.8065, IoU.road: 0.5019, IoU.rock: 0.4956, IoU.sheep: 0.9402, IoU.shelves: 0.3574, IoU.sidewalk: 0.2844, IoU.sign: 0.4719, IoU.sky: 0.9501, IoU.snow: 0.7418, IoU.sofa: 0.5879, IoU.table: 0.6735, IoU.track: 0.6952, IoU.train: 0.9216, IoU.tree: 0.8103, IoU.truck: 0.3904, IoU.tvmonitor: 0.8689, IoU.wall: 0.7055, IoU.water: 0.9171, IoU.window: 0.4527, IoU.wood: 0.2791, Acc.aeroplane: 0.9540, Acc.bag: 0.5011, Acc.bed: 0.4392, Acc.bedclothes: 0.6222, Acc.bench: 0.3033, Acc.bicycle: 0.9328, Acc.bird: 0.9697, Acc.boat: 0.9141, Acc.book: 0.6501, Acc.bottle: 0.9643, Acc.building: 0.7751, Acc.bus: 0.9724, Acc.cabinet: 0.6309, Acc.car: 0.9559, Acc.cat: 0.9797, Acc.ceiling: 0.7496, Acc.chair: 0.7774, Acc.cloth: 0.3564, Acc.computer: 0.5431, Acc.cow: 0.9719, Acc.cup: 0.5771, Acc.curtain: 0.6903, Acc.dog: 0.9693, Acc.door: 0.4831, Acc.fence: 0.5647, Acc.floor: 0.8572, Acc.flower: 0.6204, Acc.food: 0.4754, Acc.grass: 0.9176, Acc.ground: 0.7343, Acc.horse: 0.9756, Acc.keyboard: 0.8791, Acc.light: 0.7303, Acc.motorbike: 0.9464, Acc.mountain: 0.7350, Acc.mouse: 0.8358, Acc.person: 0.9610, Acc.plate: 0.3785, Acc.platform: 0.7180, Acc.pottedplant: 0.8846, Acc.road: 0.6243, Acc.rock: 0.5921, Acc.sheep: 0.9719, Acc.shelves: 0.4943, Acc.sidewalk: 0.4640, Acc.sign: 0.5752, Acc.sky: 0.9748, Acc.snow: 0.8610, Acc.sofa: 0.7020, Acc.table: 0.7886, Acc.track: 0.7924, Acc.train: 0.9628, Acc.tree: 0.9055, Acc.truck: 0.5251, Acc.tvmonitor: 0.9402, Acc.wall: 0.8379, Acc.water: 0.9580, Acc.window: 0.6050, Acc.wood: 0.4185 +2022-06-05 06:37:07,619 - mmseg - INFO - Iter [36050/40000] lr: 7.532e-07, eta: 0:32:18, time: 3.721, data_time: 3.279, memory: 31652, decode.loss_cls: 0.1194, decode.loss_mask: 0.4110, decode.loss_dice: 0.5450, decode.d0.loss_cls: 1.4792, decode.d0.loss_mask: 0.4515, decode.d0.loss_dice: 0.6230, decode.d1.loss_cls: 0.2837, decode.d1.loss_mask: 0.4223, decode.d1.loss_dice: 0.5746, decode.d2.loss_cls: 0.1856, decode.d2.loss_mask: 0.4172, decode.d2.loss_dice: 0.5544, decode.d3.loss_cls: 0.1511, decode.d3.loss_mask: 0.4138, decode.d3.loss_dice: 0.5450, decode.d4.loss_cls: 0.1375, decode.d4.loss_mask: 0.4131, decode.d4.loss_dice: 0.5472, decode.d5.loss_cls: 0.1342, decode.d5.loss_mask: 0.4114, decode.d5.loss_dice: 0.5458, decode.d6.loss_cls: 0.1310, decode.d6.loss_mask: 0.4110, decode.d6.loss_dice: 0.5444, decode.d7.loss_cls: 0.1251, decode.d7.loss_mask: 0.4092, decode.d7.loss_dice: 0.5412, decode.d8.loss_cls: 0.1232, decode.d8.loss_mask: 0.4097, decode.d8.loss_dice: 0.5469, loss: 12.6080 +2022-06-05 06:37:29,971 - mmseg - INFO - Iter [36100/40000] lr: 7.437e-07, eta: 0:31:53, time: 0.447, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1567, decode.loss_mask: 0.4131, decode.loss_dice: 0.5681, decode.d0.loss_cls: 1.5665, decode.d0.loss_mask: 0.4504, decode.d0.loss_dice: 0.6591, decode.d1.loss_cls: 0.3043, decode.d1.loss_mask: 0.4279, decode.d1.loss_dice: 0.6065, decode.d2.loss_cls: 0.2320, decode.d2.loss_mask: 0.4186, decode.d2.loss_dice: 0.5795, decode.d3.loss_cls: 0.1888, decode.d3.loss_mask: 0.4155, decode.d3.loss_dice: 0.5672, decode.d4.loss_cls: 0.1740, decode.d4.loss_mask: 0.4148, decode.d4.loss_dice: 0.5737, decode.d5.loss_cls: 0.1691, decode.d5.loss_mask: 0.4146, decode.d5.loss_dice: 0.5740, decode.d6.loss_cls: 0.1603, decode.d6.loss_mask: 0.4137, decode.d6.loss_dice: 0.5674, decode.d7.loss_cls: 0.1598, decode.d7.loss_mask: 0.4135, decode.d7.loss_dice: 0.5674, decode.d8.loss_cls: 0.1559, decode.d8.loss_mask: 0.4131, decode.d8.loss_dice: 0.5680, loss: 13.2935 +2022-06-05 06:37:51,986 - mmseg - INFO - Iter [36150/40000] lr: 7.342e-07, eta: 0:31:29, time: 0.441, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1298, decode.loss_mask: 0.3980, decode.loss_dice: 0.5773, decode.d0.loss_cls: 1.5276, decode.d0.loss_mask: 0.4357, decode.d0.loss_dice: 0.6661, decode.d1.loss_cls: 0.2946, decode.d1.loss_mask: 0.4147, decode.d1.loss_dice: 0.6146, decode.d2.loss_cls: 0.2145, decode.d2.loss_mask: 0.4040, decode.d2.loss_dice: 0.5905, decode.d3.loss_cls: 0.1636, decode.d3.loss_mask: 0.4037, decode.d3.loss_dice: 0.5819, decode.d4.loss_cls: 0.1507, decode.d4.loss_mask: 0.4017, decode.d4.loss_dice: 0.5863, decode.d5.loss_cls: 0.1416, decode.d5.loss_mask: 0.4003, decode.d5.loss_dice: 0.5826, decode.d6.loss_cls: 0.1355, decode.d6.loss_mask: 0.4004, decode.d6.loss_dice: 0.5829, decode.d7.loss_cls: 0.1367, decode.d7.loss_mask: 0.3996, decode.d7.loss_dice: 0.5825, decode.d8.loss_cls: 0.1356, decode.d8.loss_mask: 0.3981, decode.d8.loss_dice: 0.5792, loss: 13.0304 +2022-06-05 06:38:16,998 - mmseg - INFO - Iter [36200/40000] lr: 7.246e-07, eta: 0:31:04, time: 0.500, data_time: 0.059, memory: 31652, decode.loss_cls: 0.1506, decode.loss_mask: 0.4051, decode.loss_dice: 0.5690, decode.d0.loss_cls: 1.4978, decode.d0.loss_mask: 0.4449, decode.d0.loss_dice: 0.6543, decode.d1.loss_cls: 0.3128, decode.d1.loss_mask: 0.4199, decode.d1.loss_dice: 0.5969, decode.d2.loss_cls: 0.2213, decode.d2.loss_mask: 0.4120, decode.d2.loss_dice: 0.5797, decode.d3.loss_cls: 0.1805, decode.d3.loss_mask: 0.4120, decode.d3.loss_dice: 0.5706, decode.d4.loss_cls: 0.1635, decode.d4.loss_mask: 0.4073, decode.d4.loss_dice: 0.5725, decode.d5.loss_cls: 0.1589, decode.d5.loss_mask: 0.4078, decode.d5.loss_dice: 0.5697, decode.d6.loss_cls: 0.1569, decode.d6.loss_mask: 0.4069, decode.d6.loss_dice: 0.5690, decode.d7.loss_cls: 0.1580, decode.d7.loss_mask: 0.4053, decode.d7.loss_dice: 0.5695, decode.d8.loss_cls: 0.1525, decode.d8.loss_mask: 0.4055, decode.d8.loss_dice: 0.5673, loss: 13.0982 +2022-06-05 06:38:38,755 - mmseg - INFO - Iter [36250/40000] lr: 7.151e-07, eta: 0:30:39, time: 0.435, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1376, decode.loss_mask: 0.3907, decode.loss_dice: 0.5634, decode.d0.loss_cls: 1.5527, decode.d0.loss_mask: 0.4291, decode.d0.loss_dice: 0.6540, decode.d1.loss_cls: 0.2871, decode.d1.loss_mask: 0.4017, decode.d1.loss_dice: 0.5906, decode.d2.loss_cls: 0.1980, decode.d2.loss_mask: 0.3983, decode.d2.loss_dice: 0.5754, decode.d3.loss_cls: 0.1595, decode.d3.loss_mask: 0.3975, decode.d3.loss_dice: 0.5665, decode.d4.loss_cls: 0.1594, decode.d4.loss_mask: 0.3950, decode.d4.loss_dice: 0.5680, decode.d5.loss_cls: 0.1498, decode.d5.loss_mask: 0.3946, decode.d5.loss_dice: 0.5726, decode.d6.loss_cls: 0.1452, decode.d6.loss_mask: 0.3929, decode.d6.loss_dice: 0.5659, decode.d7.loss_cls: 0.1388, decode.d7.loss_mask: 0.3928, decode.d7.loss_dice: 0.5677, decode.d8.loss_cls: 0.1376, decode.d8.loss_mask: 0.3914, decode.d8.loss_dice: 0.5665, loss: 12.8401 +2022-06-05 06:39:00,142 - mmseg - INFO - Iter [36300/40000] lr: 7.056e-07, eta: 0:30:14, time: 0.428, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1317, decode.loss_mask: 0.4002, decode.loss_dice: 0.5593, decode.d0.loss_cls: 1.5148, decode.d0.loss_mask: 0.4439, decode.d0.loss_dice: 0.6500, decode.d1.loss_cls: 0.2841, decode.d1.loss_mask: 0.4159, decode.d1.loss_dice: 0.5881, decode.d2.loss_cls: 0.1933, decode.d2.loss_mask: 0.4070, decode.d2.loss_dice: 0.5667, decode.d3.loss_cls: 0.1560, decode.d3.loss_mask: 0.4060, decode.d3.loss_dice: 0.5656, decode.d4.loss_cls: 0.1528, decode.d4.loss_mask: 0.4035, decode.d4.loss_dice: 0.5625, decode.d5.loss_cls: 0.1449, decode.d5.loss_mask: 0.4009, decode.d5.loss_dice: 0.5600, decode.d6.loss_cls: 0.1379, decode.d6.loss_mask: 0.3999, decode.d6.loss_dice: 0.5591, decode.d7.loss_cls: 0.1363, decode.d7.loss_mask: 0.3997, decode.d7.loss_dice: 0.5566, decode.d8.loss_cls: 0.1305, decode.d8.loss_mask: 0.3994, decode.d8.loss_dice: 0.5552, loss: 12.7816 +2022-06-05 06:39:21,545 - mmseg - INFO - Iter [36350/40000] lr: 6.960e-07, eta: 0:29:50, time: 0.428, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1235, decode.loss_mask: 0.3999, decode.loss_dice: 0.5611, decode.d0.loss_cls: 1.4948, decode.d0.loss_mask: 0.4426, decode.d0.loss_dice: 0.6475, decode.d1.loss_cls: 0.2692, decode.d1.loss_mask: 0.4157, decode.d1.loss_dice: 0.5915, decode.d2.loss_cls: 0.1864, decode.d2.loss_mask: 0.4065, decode.d2.loss_dice: 0.5731, decode.d3.loss_cls: 0.1488, decode.d3.loss_mask: 0.4049, decode.d3.loss_dice: 0.5668, decode.d4.loss_cls: 0.1407, decode.d4.loss_mask: 0.4043, decode.d4.loss_dice: 0.5709, decode.d5.loss_cls: 0.1318, decode.d5.loss_mask: 0.4023, decode.d5.loss_dice: 0.5665, decode.d6.loss_cls: 0.1264, decode.d6.loss_mask: 0.4021, decode.d6.loss_dice: 0.5630, decode.d7.loss_cls: 0.1261, decode.d7.loss_mask: 0.3994, decode.d7.loss_dice: 0.5600, decode.d8.loss_cls: 0.1219, decode.d8.loss_mask: 0.4006, decode.d8.loss_dice: 0.5623, loss: 12.7105 +2022-06-05 06:39:43,502 - mmseg - INFO - Iter [36400/40000] lr: 6.865e-07, eta: 0:29:25, time: 0.439, data_time: 0.009, memory: 31652, decode.loss_cls: 0.1329, decode.loss_mask: 0.4073, decode.loss_dice: 0.5684, decode.d0.loss_cls: 1.5072, decode.d0.loss_mask: 0.4479, decode.d0.loss_dice: 0.6586, decode.d1.loss_cls: 0.2857, decode.d1.loss_mask: 0.4243, decode.d1.loss_dice: 0.5969, decode.d2.loss_cls: 0.1900, decode.d2.loss_mask: 0.4129, decode.d2.loss_dice: 0.5751, decode.d3.loss_cls: 0.1610, decode.d3.loss_mask: 0.4085, decode.d3.loss_dice: 0.5633, decode.d4.loss_cls: 0.1513, decode.d4.loss_mask: 0.4073, decode.d4.loss_dice: 0.5658, decode.d5.loss_cls: 0.1405, decode.d5.loss_mask: 0.4086, decode.d5.loss_dice: 0.5675, decode.d6.loss_cls: 0.1334, decode.d6.loss_mask: 0.4073, decode.d6.loss_dice: 0.5661, decode.d7.loss_cls: 0.1351, decode.d7.loss_mask: 0.4073, decode.d7.loss_dice: 0.5669, decode.d8.loss_cls: 0.1399, decode.d8.loss_mask: 0.4068, decode.d8.loss_dice: 0.5651, loss: 12.9088 +2022-06-05 06:40:05,531 - mmseg - INFO - Iter [36450/40000] lr: 6.770e-07, eta: 0:29:00, time: 0.441, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1376, decode.loss_mask: 0.4059, decode.loss_dice: 0.5618, decode.d0.loss_cls: 1.5140, decode.d0.loss_mask: 0.4464, decode.d0.loss_dice: 0.6566, decode.d1.loss_cls: 0.2781, decode.d1.loss_mask: 0.4212, decode.d1.loss_dice: 0.5979, decode.d2.loss_cls: 0.2015, decode.d2.loss_mask: 0.4128, decode.d2.loss_dice: 0.5743, decode.d3.loss_cls: 0.1654, decode.d3.loss_mask: 0.4106, decode.d3.loss_dice: 0.5679, decode.d4.loss_cls: 0.1542, decode.d4.loss_mask: 0.4081, decode.d4.loss_dice: 0.5678, decode.d5.loss_cls: 0.1525, decode.d5.loss_mask: 0.4083, decode.d5.loss_dice: 0.5688, decode.d6.loss_cls: 0.1413, decode.d6.loss_mask: 0.4067, decode.d6.loss_dice: 0.5662, decode.d7.loss_cls: 0.1370, decode.d7.loss_mask: 0.4052, decode.d7.loss_dice: 0.5630, decode.d8.loss_cls: 0.1359, decode.d8.loss_mask: 0.4067, decode.d8.loss_dice: 0.5649, loss: 12.9383 +2022-06-05 06:40:27,140 - mmseg - INFO - Iter [36500/40000] lr: 6.674e-07, eta: 0:28:35, time: 0.432, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1378, decode.loss_mask: 0.4074, decode.loss_dice: 0.5593, decode.d0.loss_cls: 1.5201, decode.d0.loss_mask: 0.4464, decode.d0.loss_dice: 0.6458, decode.d1.loss_cls: 0.2879, decode.d1.loss_mask: 0.4201, decode.d1.loss_dice: 0.5921, decode.d2.loss_cls: 0.2009, decode.d2.loss_mask: 0.4108, decode.d2.loss_dice: 0.5711, decode.d3.loss_cls: 0.1606, decode.d3.loss_mask: 0.4103, decode.d3.loss_dice: 0.5613, decode.d4.loss_cls: 0.1570, decode.d4.loss_mask: 0.4097, decode.d4.loss_dice: 0.5630, decode.d5.loss_cls: 0.1514, decode.d5.loss_mask: 0.4085, decode.d5.loss_dice: 0.5588, decode.d6.loss_cls: 0.1440, decode.d6.loss_mask: 0.4073, decode.d6.loss_dice: 0.5598, decode.d7.loss_cls: 0.1398, decode.d7.loss_mask: 0.4084, decode.d7.loss_dice: 0.5596, decode.d8.loss_cls: 0.1395, decode.d8.loss_mask: 0.4079, decode.d8.loss_dice: 0.5580, loss: 12.9046 +2022-06-05 06:40:51,662 - mmseg - INFO - Iter [36550/40000] lr: 6.579e-07, eta: 0:28:11, time: 0.490, data_time: 0.057, memory: 31652, decode.loss_cls: 0.1209, decode.loss_mask: 0.3971, decode.loss_dice: 0.5525, decode.d0.loss_cls: 1.5084, decode.d0.loss_mask: 0.4413, decode.d0.loss_dice: 0.6451, decode.d1.loss_cls: 0.2617, decode.d1.loss_mask: 0.4163, decode.d1.loss_dice: 0.5903, decode.d2.loss_cls: 0.1812, decode.d2.loss_mask: 0.4044, decode.d2.loss_dice: 0.5646, decode.d3.loss_cls: 0.1458, decode.d3.loss_mask: 0.4018, decode.d3.loss_dice: 0.5565, decode.d4.loss_cls: 0.1330, decode.d4.loss_mask: 0.4006, decode.d4.loss_dice: 0.5583, decode.d5.loss_cls: 0.1275, decode.d5.loss_mask: 0.4009, decode.d5.loss_dice: 0.5571, decode.d6.loss_cls: 0.1234, decode.d6.loss_mask: 0.3991, decode.d6.loss_dice: 0.5524, decode.d7.loss_cls: 0.1175, decode.d7.loss_mask: 0.4003, decode.d7.loss_dice: 0.5535, decode.d8.loss_cls: 0.1179, decode.d8.loss_mask: 0.3991, decode.d8.loss_dice: 0.5522, loss: 12.5807 +2022-06-05 06:41:13,435 - mmseg - INFO - Iter [36600/40000] lr: 6.484e-07, eta: 0:27:46, time: 0.436, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1253, decode.loss_mask: 0.3950, decode.loss_dice: 0.5583, decode.d0.loss_cls: 1.5174, decode.d0.loss_mask: 0.4317, decode.d0.loss_dice: 0.6442, decode.d1.loss_cls: 0.2683, decode.d1.loss_mask: 0.4128, decode.d1.loss_dice: 0.5924, decode.d2.loss_cls: 0.1925, decode.d2.loss_mask: 0.4033, decode.d2.loss_dice: 0.5675, decode.d3.loss_cls: 0.1538, decode.d3.loss_mask: 0.3992, decode.d3.loss_dice: 0.5661, decode.d4.loss_cls: 0.1429, decode.d4.loss_mask: 0.3975, decode.d4.loss_dice: 0.5625, decode.d5.loss_cls: 0.1402, decode.d5.loss_mask: 0.3984, decode.d5.loss_dice: 0.5622, decode.d6.loss_cls: 0.1349, decode.d6.loss_mask: 0.3961, decode.d6.loss_dice: 0.5631, decode.d7.loss_cls: 0.1283, decode.d7.loss_mask: 0.3957, decode.d7.loss_dice: 0.5638, decode.d8.loss_cls: 0.1232, decode.d8.loss_mask: 0.3963, decode.d8.loss_dice: 0.5611, loss: 12.6941 +2022-06-05 06:41:35,357 - mmseg - INFO - Iter [36650/40000] lr: 6.388e-07, eta: 0:27:21, time: 0.438, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1408, decode.loss_mask: 0.4011, decode.loss_dice: 0.5775, decode.d0.loss_cls: 1.5326, decode.d0.loss_mask: 0.4412, decode.d0.loss_dice: 0.6801, decode.d1.loss_cls: 0.2982, decode.d1.loss_mask: 0.4155, decode.d1.loss_dice: 0.6099, decode.d2.loss_cls: 0.2152, decode.d2.loss_mask: 0.4084, decode.d2.loss_dice: 0.5854, decode.d3.loss_cls: 0.1729, decode.d3.loss_mask: 0.4050, decode.d3.loss_dice: 0.5811, decode.d4.loss_cls: 0.1608, decode.d4.loss_mask: 0.4033, decode.d4.loss_dice: 0.5793, decode.d5.loss_cls: 0.1607, decode.d5.loss_mask: 0.4025, decode.d5.loss_dice: 0.5774, decode.d6.loss_cls: 0.1539, decode.d6.loss_mask: 0.4014, decode.d6.loss_dice: 0.5777, decode.d7.loss_cls: 0.1472, decode.d7.loss_mask: 0.4013, decode.d7.loss_dice: 0.5743, decode.d8.loss_cls: 0.1482, decode.d8.loss_mask: 0.4008, decode.d8.loss_dice: 0.5758, loss: 13.1296 +2022-06-05 06:41:56,736 - mmseg - INFO - Iter [36700/40000] lr: 6.293e-07, eta: 0:26:56, time: 0.428, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1374, decode.loss_mask: 0.4124, decode.loss_dice: 0.5842, decode.d0.loss_cls: 1.5355, decode.d0.loss_mask: 0.4517, decode.d0.loss_dice: 0.6697, decode.d1.loss_cls: 0.2956, decode.d1.loss_mask: 0.4277, decode.d1.loss_dice: 0.6166, decode.d2.loss_cls: 0.2068, decode.d2.loss_mask: 0.4184, decode.d2.loss_dice: 0.5994, decode.d3.loss_cls: 0.1633, decode.d3.loss_mask: 0.4153, decode.d3.loss_dice: 0.5878, decode.d4.loss_cls: 0.1546, decode.d4.loss_mask: 0.4150, decode.d4.loss_dice: 0.5913, decode.d5.loss_cls: 0.1446, decode.d5.loss_mask: 0.4155, decode.d5.loss_dice: 0.5890, decode.d6.loss_cls: 0.1362, decode.d6.loss_mask: 0.4142, decode.d6.loss_dice: 0.5825, decode.d7.loss_cls: 0.1390, decode.d7.loss_mask: 0.4133, decode.d7.loss_dice: 0.5843, decode.d8.loss_cls: 0.1339, decode.d8.loss_mask: 0.4129, decode.d8.loss_dice: 0.5848, loss: 13.2329 +2022-06-05 06:42:18,518 - mmseg - INFO - Iter [36750/40000] lr: 6.198e-07, eta: 0:26:32, time: 0.436, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1333, decode.loss_mask: 0.3999, decode.loss_dice: 0.5561, decode.d0.loss_cls: 1.4980, decode.d0.loss_mask: 0.4416, decode.d0.loss_dice: 0.6531, decode.d1.loss_cls: 0.2893, decode.d1.loss_mask: 0.4155, decode.d1.loss_dice: 0.5950, decode.d2.loss_cls: 0.2032, decode.d2.loss_mask: 0.4074, decode.d2.loss_dice: 0.5761, decode.d3.loss_cls: 0.1711, decode.d3.loss_mask: 0.4008, decode.d3.loss_dice: 0.5649, decode.d4.loss_cls: 0.1667, decode.d4.loss_mask: 0.4007, decode.d4.loss_dice: 0.5581, decode.d5.loss_cls: 0.1482, decode.d5.loss_mask: 0.4027, decode.d5.loss_dice: 0.5601, decode.d6.loss_cls: 0.1417, decode.d6.loss_mask: 0.4021, decode.d6.loss_dice: 0.5584, decode.d7.loss_cls: 0.1360, decode.d7.loss_mask: 0.4008, decode.d7.loss_dice: 0.5605, decode.d8.loss_cls: 0.1350, decode.d8.loss_mask: 0.4002, decode.d8.loss_dice: 0.5575, loss: 12.8338 +2022-06-05 06:42:40,161 - mmseg - INFO - Iter [36800/40000] lr: 6.102e-07, eta: 0:26:07, time: 0.432, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1434, decode.loss_mask: 0.4021, decode.loss_dice: 0.5657, decode.d0.loss_cls: 1.5337, decode.d0.loss_mask: 0.4429, decode.d0.loss_dice: 0.6621, decode.d1.loss_cls: 0.3334, decode.d1.loss_mask: 0.4178, decode.d1.loss_dice: 0.6066, decode.d2.loss_cls: 0.2237, decode.d2.loss_mask: 0.4089, decode.d2.loss_dice: 0.5859, decode.d3.loss_cls: 0.1758, decode.d3.loss_mask: 0.4061, decode.d3.loss_dice: 0.5761, decode.d4.loss_cls: 0.1724, decode.d4.loss_mask: 0.4046, decode.d4.loss_dice: 0.5720, decode.d5.loss_cls: 0.1547, decode.d5.loss_mask: 0.4042, decode.d5.loss_dice: 0.5689, decode.d6.loss_cls: 0.1543, decode.d6.loss_mask: 0.4019, decode.d6.loss_dice: 0.5716, decode.d7.loss_cls: 0.1537, decode.d7.loss_mask: 0.4018, decode.d7.loss_dice: 0.5691, decode.d8.loss_cls: 0.1510, decode.d8.loss_mask: 0.4020, decode.d8.loss_dice: 0.5672, loss: 13.1335 +2022-06-05 06:43:04,287 - mmseg - INFO - Iter [36850/40000] lr: 6.007e-07, eta: 0:25:42, time: 0.483, data_time: 0.056, memory: 31652, decode.loss_cls: 0.1323, decode.loss_mask: 0.4111, decode.loss_dice: 0.5783, decode.d0.loss_cls: 1.5178, decode.d0.loss_mask: 0.4581, decode.d0.loss_dice: 0.6655, decode.d1.loss_cls: 0.2811, decode.d1.loss_mask: 0.4256, decode.d1.loss_dice: 0.6086, decode.d2.loss_cls: 0.1917, decode.d2.loss_mask: 0.4176, decode.d2.loss_dice: 0.5927, decode.d3.loss_cls: 0.1591, decode.d3.loss_mask: 0.4129, decode.d3.loss_dice: 0.5882, decode.d4.loss_cls: 0.1528, decode.d4.loss_mask: 0.4125, decode.d4.loss_dice: 0.5849, decode.d5.loss_cls: 0.1470, decode.d5.loss_mask: 0.4131, decode.d5.loss_dice: 0.5843, decode.d6.loss_cls: 0.1419, decode.d6.loss_mask: 0.4119, decode.d6.loss_dice: 0.5839, decode.d7.loss_cls: 0.1383, decode.d7.loss_mask: 0.4089, decode.d7.loss_dice: 0.5823, decode.d8.loss_cls: 0.1319, decode.d8.loss_mask: 0.4102, decode.d8.loss_dice: 0.5835, loss: 13.1282 +2022-06-05 06:43:25,490 - mmseg - INFO - Iter [36900/40000] lr: 5.912e-07, eta: 0:25:18, time: 0.424, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1383, decode.loss_mask: 0.4182, decode.loss_dice: 0.5756, decode.d0.loss_cls: 1.5214, decode.d0.loss_mask: 0.4624, decode.d0.loss_dice: 0.6594, decode.d1.loss_cls: 0.3070, decode.d1.loss_mask: 0.4318, decode.d1.loss_dice: 0.5992, decode.d2.loss_cls: 0.2093, decode.d2.loss_mask: 0.4246, decode.d2.loss_dice: 0.5849, decode.d3.loss_cls: 0.1670, decode.d3.loss_mask: 0.4224, decode.d3.loss_dice: 0.5793, decode.d4.loss_cls: 0.1624, decode.d4.loss_mask: 0.4207, decode.d4.loss_dice: 0.5786, decode.d5.loss_cls: 0.1497, decode.d5.loss_mask: 0.4187, decode.d5.loss_dice: 0.5750, decode.d6.loss_cls: 0.1461, decode.d6.loss_mask: 0.4193, decode.d6.loss_dice: 0.5750, decode.d7.loss_cls: 0.1408, decode.d7.loss_mask: 0.4184, decode.d7.loss_dice: 0.5764, decode.d8.loss_cls: 0.1406, decode.d8.loss_mask: 0.4179, decode.d8.loss_dice: 0.5747, loss: 13.2152 +2022-06-05 06:43:47,185 - mmseg - INFO - Iter [36950/40000] lr: 5.816e-07, eta: 0:24:53, time: 0.434, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1491, decode.loss_mask: 0.4044, decode.loss_dice: 0.5741, decode.d0.loss_cls: 1.5302, decode.d0.loss_mask: 0.4466, decode.d0.loss_dice: 0.6672, decode.d1.loss_cls: 0.2997, decode.d1.loss_mask: 0.4196, decode.d1.loss_dice: 0.6090, decode.d2.loss_cls: 0.2173, decode.d2.loss_mask: 0.4112, decode.d2.loss_dice: 0.5891, decode.d3.loss_cls: 0.1752, decode.d3.loss_mask: 0.4108, decode.d3.loss_dice: 0.5797, decode.d4.loss_cls: 0.1635, decode.d4.loss_mask: 0.4080, decode.d4.loss_dice: 0.5817, decode.d5.loss_cls: 0.1556, decode.d5.loss_mask: 0.4070, decode.d5.loss_dice: 0.5812, decode.d6.loss_cls: 0.1515, decode.d6.loss_mask: 0.4051, decode.d6.loss_dice: 0.5794, decode.d7.loss_cls: 0.1506, decode.d7.loss_mask: 0.4053, decode.d7.loss_dice: 0.5780, decode.d8.loss_cls: 0.1481, decode.d8.loss_mask: 0.4038, decode.d8.loss_dice: 0.5744, loss: 13.1763 +2022-06-05 06:44:09,350 - mmseg - INFO - Saving checkpoint at 37000 iterations +2022-06-05 06:44:12,935 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 06:44:12,936 - mmseg - INFO - Iter [37000/40000] lr: 5.721e-07, eta: 0:24:29, time: 0.515, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1324, decode.loss_mask: 0.4140, decode.loss_dice: 0.5817, decode.d0.loss_cls: 1.5050, decode.d0.loss_mask: 0.4529, decode.d0.loss_dice: 0.6679, decode.d1.loss_cls: 0.2707, decode.d1.loss_mask: 0.4284, decode.d1.loss_dice: 0.6144, decode.d2.loss_cls: 0.1968, decode.d2.loss_mask: 0.4200, decode.d2.loss_dice: 0.5960, decode.d3.loss_cls: 0.1673, decode.d3.loss_mask: 0.4151, decode.d3.loss_dice: 0.5855, decode.d4.loss_cls: 0.1523, decode.d4.loss_mask: 0.4161, decode.d4.loss_dice: 0.5869, decode.d5.loss_cls: 0.1461, decode.d5.loss_mask: 0.4134, decode.d5.loss_dice: 0.5830, decode.d6.loss_cls: 0.1387, decode.d6.loss_mask: 0.4151, decode.d6.loss_dice: 0.5865, decode.d7.loss_cls: 0.1397, decode.d7.loss_mask: 0.4127, decode.d7.loss_dice: 0.5820, decode.d8.loss_cls: 0.1336, decode.d8.loss_mask: 0.4122, decode.d8.loss_dice: 0.5805, loss: 13.1469 +2022-06-05 06:44:34,990 - mmseg - INFO - Iter [37050/40000] lr: 5.626e-07, eta: 0:24:04, time: 0.441, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1530, decode.loss_mask: 0.4071, decode.loss_dice: 0.5582, decode.d0.loss_cls: 1.5210, decode.d0.loss_mask: 0.4480, decode.d0.loss_dice: 0.6519, decode.d1.loss_cls: 0.2921, decode.d1.loss_mask: 0.4219, decode.d1.loss_dice: 0.5906, decode.d2.loss_cls: 0.2027, decode.d2.loss_mask: 0.4136, decode.d2.loss_dice: 0.5736, decode.d3.loss_cls: 0.1764, decode.d3.loss_mask: 0.4111, decode.d3.loss_dice: 0.5639, decode.d4.loss_cls: 0.1673, decode.d4.loss_mask: 0.4070, decode.d4.loss_dice: 0.5683, decode.d5.loss_cls: 0.1574, decode.d5.loss_mask: 0.4066, decode.d5.loss_dice: 0.5633, decode.d6.loss_cls: 0.1571, decode.d6.loss_mask: 0.4070, decode.d6.loss_dice: 0.5635, decode.d7.loss_cls: 0.1523, decode.d7.loss_mask: 0.4072, decode.d7.loss_dice: 0.5637, decode.d8.loss_cls: 0.1526, decode.d8.loss_mask: 0.4068, decode.d8.loss_dice: 0.5660, loss: 13.0312 +2022-06-05 06:44:56,350 - mmseg - INFO - Iter [37100/40000] lr: 5.530e-07, eta: 0:23:39, time: 0.427, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1328, decode.loss_mask: 0.3931, decode.loss_dice: 0.5563, decode.d0.loss_cls: 1.5538, decode.d0.loss_mask: 0.4370, decode.d0.loss_dice: 0.6490, decode.d1.loss_cls: 0.2809, decode.d1.loss_mask: 0.4044, decode.d1.loss_dice: 0.5872, decode.d2.loss_cls: 0.1998, decode.d2.loss_mask: 0.3982, decode.d2.loss_dice: 0.5676, decode.d3.loss_cls: 0.1604, decode.d3.loss_mask: 0.3954, decode.d3.loss_dice: 0.5624, decode.d4.loss_cls: 0.1588, decode.d4.loss_mask: 0.3927, decode.d4.loss_dice: 0.5580, decode.d5.loss_cls: 0.1484, decode.d5.loss_mask: 0.3951, decode.d5.loss_dice: 0.5574, decode.d6.loss_cls: 0.1407, decode.d6.loss_mask: 0.3933, decode.d6.loss_dice: 0.5559, decode.d7.loss_cls: 0.1419, decode.d7.loss_mask: 0.3922, decode.d7.loss_dice: 0.5498, decode.d8.loss_cls: 0.1324, decode.d8.loss_mask: 0.3934, decode.d8.loss_dice: 0.5542, loss: 12.7426 +2022-06-05 06:45:20,667 - mmseg - INFO - Iter [37150/40000] lr: 5.435e-07, eta: 0:23:15, time: 0.486, data_time: 0.056, memory: 31652, decode.loss_cls: 0.1211, decode.loss_mask: 0.3986, decode.loss_dice: 0.5430, decode.d0.loss_cls: 1.4915, decode.d0.loss_mask: 0.4399, decode.d0.loss_dice: 0.6315, decode.d1.loss_cls: 0.2695, decode.d1.loss_mask: 0.4160, decode.d1.loss_dice: 0.5786, decode.d2.loss_cls: 0.1781, decode.d2.loss_mask: 0.4080, decode.d2.loss_dice: 0.5573, decode.d3.loss_cls: 0.1481, decode.d3.loss_mask: 0.4046, decode.d3.loss_dice: 0.5458, decode.d4.loss_cls: 0.1431, decode.d4.loss_mask: 0.3995, decode.d4.loss_dice: 0.5487, decode.d5.loss_cls: 0.1330, decode.d5.loss_mask: 0.4002, decode.d5.loss_dice: 0.5487, decode.d6.loss_cls: 0.1233, decode.d6.loss_mask: 0.4000, decode.d6.loss_dice: 0.5463, decode.d7.loss_cls: 0.1246, decode.d7.loss_mask: 0.3989, decode.d7.loss_dice: 0.5468, decode.d8.loss_cls: 0.1204, decode.d8.loss_mask: 0.3997, decode.d8.loss_dice: 0.5446, loss: 12.5096 +2022-06-05 06:45:42,642 - mmseg - INFO - Iter [37200/40000] lr: 5.340e-07, eta: 0:22:50, time: 0.439, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1294, decode.loss_mask: 0.3903, decode.loss_dice: 0.5608, decode.d0.loss_cls: 1.5022, decode.d0.loss_mask: 0.4369, decode.d0.loss_dice: 0.6531, decode.d1.loss_cls: 0.2863, decode.d1.loss_mask: 0.4088, decode.d1.loss_dice: 0.5968, decode.d2.loss_cls: 0.1915, decode.d2.loss_mask: 0.3984, decode.d2.loss_dice: 0.5743, decode.d3.loss_cls: 0.1559, decode.d3.loss_mask: 0.3947, decode.d3.loss_dice: 0.5661, decode.d4.loss_cls: 0.1446, decode.d4.loss_mask: 0.3934, decode.d4.loss_dice: 0.5653, decode.d5.loss_cls: 0.1368, decode.d5.loss_mask: 0.3925, decode.d5.loss_dice: 0.5644, decode.d6.loss_cls: 0.1335, decode.d6.loss_mask: 0.3929, decode.d6.loss_dice: 0.5620, decode.d7.loss_cls: 0.1303, decode.d7.loss_mask: 0.3916, decode.d7.loss_dice: 0.5623, decode.d8.loss_cls: 0.1279, decode.d8.loss_mask: 0.3906, decode.d8.loss_dice: 0.5642, loss: 12.6981 +2022-06-05 06:46:04,466 - mmseg - INFO - Iter [37250/40000] lr: 5.245e-07, eta: 0:22:25, time: 0.437, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1325, decode.loss_mask: 0.4073, decode.loss_dice: 0.5592, decode.d0.loss_cls: 1.5075, decode.d0.loss_mask: 0.4478, decode.d0.loss_dice: 0.6448, decode.d1.loss_cls: 0.2902, decode.d1.loss_mask: 0.4242, decode.d1.loss_dice: 0.5936, decode.d2.loss_cls: 0.2045, decode.d2.loss_mask: 0.4140, decode.d2.loss_dice: 0.5764, decode.d3.loss_cls: 0.1582, decode.d3.loss_mask: 0.4118, decode.d3.loss_dice: 0.5643, decode.d4.loss_cls: 0.1510, decode.d4.loss_mask: 0.4110, decode.d4.loss_dice: 0.5657, decode.d5.loss_cls: 0.1483, decode.d5.loss_mask: 0.4093, decode.d5.loss_dice: 0.5627, decode.d6.loss_cls: 0.1367, decode.d6.loss_mask: 0.4102, decode.d6.loss_dice: 0.5614, decode.d7.loss_cls: 0.1358, decode.d7.loss_mask: 0.4088, decode.d7.loss_dice: 0.5599, decode.d8.loss_cls: 0.1351, decode.d8.loss_mask: 0.4080, decode.d8.loss_dice: 0.5581, loss: 12.8984 +2022-06-05 06:46:27,294 - mmseg - INFO - Iter [37300/40000] lr: 5.149e-07, eta: 0:22:01, time: 0.457, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1353, decode.loss_mask: 0.4054, decode.loss_dice: 0.5623, decode.d0.loss_cls: 1.4909, decode.d0.loss_mask: 0.4479, decode.d0.loss_dice: 0.6526, decode.d1.loss_cls: 0.2730, decode.d1.loss_mask: 0.4225, decode.d1.loss_dice: 0.5958, decode.d2.loss_cls: 0.1991, decode.d2.loss_mask: 0.4113, decode.d2.loss_dice: 0.5729, decode.d3.loss_cls: 0.1688, decode.d3.loss_mask: 0.4087, decode.d3.loss_dice: 0.5656, decode.d4.loss_cls: 0.1524, decode.d4.loss_mask: 0.4084, decode.d4.loss_dice: 0.5678, decode.d5.loss_cls: 0.1448, decode.d5.loss_mask: 0.4078, decode.d5.loss_dice: 0.5652, decode.d6.loss_cls: 0.1400, decode.d6.loss_mask: 0.4069, decode.d6.loss_dice: 0.5606, decode.d7.loss_cls: 0.1407, decode.d7.loss_mask: 0.4056, decode.d7.loss_dice: 0.5614, decode.d8.loss_cls: 0.1342, decode.d8.loss_mask: 0.4063, decode.d8.loss_dice: 0.5659, loss: 12.8801 +2022-06-05 06:46:49,491 - mmseg - INFO - Iter [37350/40000] lr: 5.054e-07, eta: 0:21:36, time: 0.444, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1171, decode.loss_mask: 0.4244, decode.loss_dice: 0.5493, decode.d0.loss_cls: 1.4820, decode.d0.loss_mask: 0.4683, decode.d0.loss_dice: 0.6377, decode.d1.loss_cls: 0.2492, decode.d1.loss_mask: 0.4442, decode.d1.loss_dice: 0.5866, decode.d2.loss_cls: 0.1696, decode.d2.loss_mask: 0.4335, decode.d2.loss_dice: 0.5661, decode.d3.loss_cls: 0.1478, decode.d3.loss_mask: 0.4293, decode.d3.loss_dice: 0.5564, decode.d4.loss_cls: 0.1343, decode.d4.loss_mask: 0.4278, decode.d4.loss_dice: 0.5555, decode.d5.loss_cls: 0.1315, decode.d5.loss_mask: 0.4256, decode.d5.loss_dice: 0.5558, decode.d6.loss_cls: 0.1247, decode.d6.loss_mask: 0.4238, decode.d6.loss_dice: 0.5497, decode.d7.loss_cls: 0.1190, decode.d7.loss_mask: 0.4253, decode.d7.loss_dice: 0.5513, decode.d8.loss_cls: 0.1164, decode.d8.loss_mask: 0.4250, decode.d8.loss_dice: 0.5522, loss: 12.7793 +2022-06-05 06:47:11,539 - mmseg - INFO - Iter [37400/40000] lr: 4.959e-07, eta: 0:21:12, time: 0.440, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1458, decode.loss_mask: 0.4143, decode.loss_dice: 0.5877, decode.d0.loss_cls: 1.5099, decode.d0.loss_mask: 0.4560, decode.d0.loss_dice: 0.6742, decode.d1.loss_cls: 0.3026, decode.d1.loss_mask: 0.4320, decode.d1.loss_dice: 0.6168, decode.d2.loss_cls: 0.2021, decode.d2.loss_mask: 0.4248, decode.d2.loss_dice: 0.6043, decode.d3.loss_cls: 0.1681, decode.d3.loss_mask: 0.4226, decode.d3.loss_dice: 0.5941, decode.d4.loss_cls: 0.1616, decode.d4.loss_mask: 0.4208, decode.d4.loss_dice: 0.5947, decode.d5.loss_cls: 0.1562, decode.d5.loss_mask: 0.4186, decode.d5.loss_dice: 0.5913, decode.d6.loss_cls: 0.1469, decode.d6.loss_mask: 0.4163, decode.d6.loss_dice: 0.5878, decode.d7.loss_cls: 0.1460, decode.d7.loss_mask: 0.4133, decode.d7.loss_dice: 0.5869, decode.d8.loss_cls: 0.1373, decode.d8.loss_mask: 0.4178, decode.d8.loss_dice: 0.5926, loss: 13.3433 +2022-06-05 06:47:36,346 - mmseg - INFO - Iter [37450/40000] lr: 4.863e-07, eta: 0:20:47, time: 0.497, data_time: 0.060, memory: 31652, decode.loss_cls: 0.1529, decode.loss_mask: 0.4015, decode.loss_dice: 0.5892, decode.d0.loss_cls: 1.5589, decode.d0.loss_mask: 0.4444, decode.d0.loss_dice: 0.6834, decode.d1.loss_cls: 0.3300, decode.d1.loss_mask: 0.4172, decode.d1.loss_dice: 0.6293, decode.d2.loss_cls: 0.2384, decode.d2.loss_mask: 0.4056, decode.d2.loss_dice: 0.6037, decode.d3.loss_cls: 0.1841, decode.d3.loss_mask: 0.4019, decode.d3.loss_dice: 0.5962, decode.d4.loss_cls: 0.1745, decode.d4.loss_mask: 0.4025, decode.d4.loss_dice: 0.5955, decode.d5.loss_cls: 0.1672, decode.d5.loss_mask: 0.4016, decode.d5.loss_dice: 0.5916, decode.d6.loss_cls: 0.1653, decode.d6.loss_mask: 0.4011, decode.d6.loss_dice: 0.5892, decode.d7.loss_cls: 0.1613, decode.d7.loss_mask: 0.4002, decode.d7.loss_dice: 0.5883, decode.d8.loss_cls: 0.1582, decode.d8.loss_mask: 0.4008, decode.d8.loss_dice: 0.5883, loss: 13.4223 +2022-06-05 06:47:58,319 - mmseg - INFO - Iter [37500/40000] lr: 4.768e-07, eta: 0:20:22, time: 0.439, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1357, decode.loss_mask: 0.3901, decode.loss_dice: 0.5549, decode.d0.loss_cls: 1.5556, decode.d0.loss_mask: 0.4311, decode.d0.loss_dice: 0.6449, decode.d1.loss_cls: 0.3000, decode.d1.loss_mask: 0.4048, decode.d1.loss_dice: 0.5948, decode.d2.loss_cls: 0.2005, decode.d2.loss_mask: 0.3972, decode.d2.loss_dice: 0.5743, decode.d3.loss_cls: 0.1702, decode.d3.loss_mask: 0.3930, decode.d3.loss_dice: 0.5623, decode.d4.loss_cls: 0.1577, decode.d4.loss_mask: 0.3906, decode.d4.loss_dice: 0.5627, decode.d5.loss_cls: 0.1418, decode.d5.loss_mask: 0.3910, decode.d5.loss_dice: 0.5576, decode.d6.loss_cls: 0.1407, decode.d6.loss_mask: 0.3913, decode.d6.loss_dice: 0.5588, decode.d7.loss_cls: 0.1407, decode.d7.loss_mask: 0.3904, decode.d7.loss_dice: 0.5613, decode.d8.loss_cls: 0.1341, decode.d8.loss_mask: 0.3894, decode.d8.loss_dice: 0.5600, loss: 12.7774 +2022-06-05 06:48:20,667 - mmseg - INFO - Iter [37550/40000] lr: 4.673e-07, eta: 0:19:58, time: 0.447, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1418, decode.loss_mask: 0.3829, decode.loss_dice: 0.5566, decode.d0.loss_cls: 1.5084, decode.d0.loss_mask: 0.4228, decode.d0.loss_dice: 0.6452, decode.d1.loss_cls: 0.2901, decode.d1.loss_mask: 0.3946, decode.d1.loss_dice: 0.5876, decode.d2.loss_cls: 0.2086, decode.d2.loss_mask: 0.3883, decode.d2.loss_dice: 0.5682, decode.d3.loss_cls: 0.1753, decode.d3.loss_mask: 0.3876, decode.d3.loss_dice: 0.5663, decode.d4.loss_cls: 0.1624, decode.d4.loss_mask: 0.3856, decode.d4.loss_dice: 0.5641, decode.d5.loss_cls: 0.1526, decode.d5.loss_mask: 0.3835, decode.d5.loss_dice: 0.5593, decode.d6.loss_cls: 0.1473, decode.d6.loss_mask: 0.3833, decode.d6.loss_dice: 0.5624, decode.d7.loss_cls: 0.1468, decode.d7.loss_mask: 0.3823, decode.d7.loss_dice: 0.5577, decode.d8.loss_cls: 0.1465, decode.d8.loss_mask: 0.3825, decode.d8.loss_dice: 0.5571, loss: 12.6977 +2022-06-05 06:48:42,373 - mmseg - INFO - Iter [37600/40000] lr: 4.577e-07, eta: 0:19:33, time: 0.434, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1199, decode.loss_mask: 0.3972, decode.loss_dice: 0.5556, decode.d0.loss_cls: 1.4660, decode.d0.loss_mask: 0.4353, decode.d0.loss_dice: 0.6391, decode.d1.loss_cls: 0.2662, decode.d1.loss_mask: 0.4135, decode.d1.loss_dice: 0.5868, decode.d2.loss_cls: 0.1756, decode.d2.loss_mask: 0.4050, decode.d2.loss_dice: 0.5667, decode.d3.loss_cls: 0.1514, decode.d3.loss_mask: 0.4003, decode.d3.loss_dice: 0.5613, decode.d4.loss_cls: 0.1368, decode.d4.loss_mask: 0.3992, decode.d4.loss_dice: 0.5614, decode.d5.loss_cls: 0.1270, decode.d5.loss_mask: 0.3989, decode.d5.loss_dice: 0.5580, decode.d6.loss_cls: 0.1252, decode.d6.loss_mask: 0.3977, decode.d6.loss_dice: 0.5591, decode.d7.loss_cls: 0.1225, decode.d7.loss_mask: 0.3973, decode.d7.loss_dice: 0.5531, decode.d8.loss_cls: 0.1232, decode.d8.loss_mask: 0.3967, decode.d8.loss_dice: 0.5545, loss: 12.5505 +2022-06-05 06:49:04,069 - mmseg - INFO - Iter [37650/40000] lr: 4.482e-07, eta: 0:19:09, time: 0.434, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1433, decode.loss_mask: 0.4020, decode.loss_dice: 0.5714, decode.d0.loss_cls: 1.5348, decode.d0.loss_mask: 0.4433, decode.d0.loss_dice: 0.6631, decode.d1.loss_cls: 0.2886, decode.d1.loss_mask: 0.4173, decode.d1.loss_dice: 0.6037, decode.d2.loss_cls: 0.2053, decode.d2.loss_mask: 0.4088, decode.d2.loss_dice: 0.5821, decode.d3.loss_cls: 0.1731, decode.d3.loss_mask: 0.4044, decode.d3.loss_dice: 0.5764, decode.d4.loss_cls: 0.1604, decode.d4.loss_mask: 0.4029, decode.d4.loss_dice: 0.5742, decode.d5.loss_cls: 0.1537, decode.d5.loss_mask: 0.4027, decode.d5.loss_dice: 0.5697, decode.d6.loss_cls: 0.1491, decode.d6.loss_mask: 0.4041, decode.d6.loss_dice: 0.5693, decode.d7.loss_cls: 0.1491, decode.d7.loss_mask: 0.4027, decode.d7.loss_dice: 0.5672, decode.d8.loss_cls: 0.1429, decode.d8.loss_mask: 0.4029, decode.d8.loss_dice: 0.5709, loss: 13.0394 +2022-06-05 06:49:26,355 - mmseg - INFO - Iter [37700/40000] lr: 4.387e-07, eta: 0:18:44, time: 0.445, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1198, decode.loss_mask: 0.4099, decode.loss_dice: 0.5621, decode.d0.loss_cls: 1.5009, decode.d0.loss_mask: 0.4547, decode.d0.loss_dice: 0.6537, decode.d1.loss_cls: 0.2683, decode.d1.loss_mask: 0.4260, decode.d1.loss_dice: 0.5984, decode.d2.loss_cls: 0.1897, decode.d2.loss_mask: 0.4164, decode.d2.loss_dice: 0.5778, decode.d3.loss_cls: 0.1468, decode.d3.loss_mask: 0.4130, decode.d3.loss_dice: 0.5666, decode.d4.loss_cls: 0.1385, decode.d4.loss_mask: 0.4115, decode.d4.loss_dice: 0.5658, decode.d5.loss_cls: 0.1318, decode.d5.loss_mask: 0.4109, decode.d5.loss_dice: 0.5631, decode.d6.loss_cls: 0.1262, decode.d6.loss_mask: 0.4116, decode.d6.loss_dice: 0.5643, decode.d7.loss_cls: 0.1221, decode.d7.loss_mask: 0.4106, decode.d7.loss_dice: 0.5623, decode.d8.loss_cls: 0.1194, decode.d8.loss_mask: 0.4095, decode.d8.loss_dice: 0.5620, loss: 12.8135 +2022-06-05 06:49:48,287 - mmseg - INFO - Iter [37750/40000] lr: 4.291e-07, eta: 0:18:19, time: 0.439, data_time: 0.009, memory: 31652, decode.loss_cls: 0.1576, decode.loss_mask: 0.4044, decode.loss_dice: 0.5766, decode.d0.loss_cls: 1.5481, decode.d0.loss_mask: 0.4476, decode.d0.loss_dice: 0.6717, decode.d1.loss_cls: 0.2914, decode.d1.loss_mask: 0.4211, decode.d1.loss_dice: 0.6111, decode.d2.loss_cls: 0.2135, decode.d2.loss_mask: 0.4117, decode.d2.loss_dice: 0.5858, decode.d3.loss_cls: 0.1872, decode.d3.loss_mask: 0.4091, decode.d3.loss_dice: 0.5800, decode.d4.loss_cls: 0.1721, decode.d4.loss_mask: 0.4073, decode.d4.loss_dice: 0.5789, decode.d5.loss_cls: 0.1653, decode.d5.loss_mask: 0.4068, decode.d5.loss_dice: 0.5807, decode.d6.loss_cls: 0.1549, decode.d6.loss_mask: 0.4071, decode.d6.loss_dice: 0.5798, decode.d7.loss_cls: 0.1546, decode.d7.loss_mask: 0.4045, decode.d7.loss_dice: 0.5810, decode.d8.loss_cls: 0.1528, decode.d8.loss_mask: 0.4045, decode.d8.loss_dice: 0.5795, loss: 13.2468 +2022-06-05 06:50:13,385 - mmseg - INFO - Iter [37800/40000] lr: 4.196e-07, eta: 0:17:55, time: 0.501, data_time: 0.060, memory: 31652, decode.loss_cls: 0.1240, decode.loss_mask: 0.4002, decode.loss_dice: 0.5780, decode.d0.loss_cls: 1.5652, decode.d0.loss_mask: 0.4409, decode.d0.loss_dice: 0.6612, decode.d1.loss_cls: 0.2945, decode.d1.loss_mask: 0.4157, decode.d1.loss_dice: 0.6090, decode.d2.loss_cls: 0.2015, decode.d2.loss_mask: 0.4085, decode.d2.loss_dice: 0.5910, decode.d3.loss_cls: 0.1517, decode.d3.loss_mask: 0.4049, decode.d3.loss_dice: 0.5875, decode.d4.loss_cls: 0.1428, decode.d4.loss_mask: 0.4037, decode.d4.loss_dice: 0.5880, decode.d5.loss_cls: 0.1351, decode.d5.loss_mask: 0.4024, decode.d5.loss_dice: 0.5833, decode.d6.loss_cls: 0.1240, decode.d6.loss_mask: 0.4018, decode.d6.loss_dice: 0.5826, decode.d7.loss_cls: 0.1276, decode.d7.loss_mask: 0.4016, decode.d7.loss_dice: 0.5807, decode.d8.loss_cls: 0.1240, decode.d8.loss_mask: 0.4005, decode.d8.loss_dice: 0.5825, loss: 13.0143 +2022-06-05 06:50:35,463 - mmseg - INFO - Iter [37850/40000] lr: 4.101e-07, eta: 0:17:30, time: 0.442, data_time: 0.009, memory: 31652, decode.loss_cls: 0.1056, decode.loss_mask: 0.3897, decode.loss_dice: 0.5319, decode.d0.loss_cls: 1.4683, decode.d0.loss_mask: 0.4341, decode.d0.loss_dice: 0.6150, decode.d1.loss_cls: 0.2525, decode.d1.loss_mask: 0.4044, decode.d1.loss_dice: 0.5639, decode.d2.loss_cls: 0.1672, decode.d2.loss_mask: 0.3977, decode.d2.loss_dice: 0.5420, decode.d3.loss_cls: 0.1419, decode.d3.loss_mask: 0.3955, decode.d3.loss_dice: 0.5356, decode.d4.loss_cls: 0.1346, decode.d4.loss_mask: 0.3920, decode.d4.loss_dice: 0.5327, decode.d5.loss_cls: 0.1209, decode.d5.loss_mask: 0.3914, decode.d5.loss_dice: 0.5327, decode.d6.loss_cls: 0.1195, decode.d6.loss_mask: 0.3903, decode.d6.loss_dice: 0.5282, decode.d7.loss_cls: 0.1132, decode.d7.loss_mask: 0.3895, decode.d7.loss_dice: 0.5285, decode.d8.loss_cls: 0.1101, decode.d8.loss_mask: 0.3902, decode.d8.loss_dice: 0.5296, loss: 12.1488 +2022-06-05 06:50:57,240 - mmseg - INFO - Iter [37900/40000] lr: 4.005e-07, eta: 0:17:06, time: 0.436, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1339, decode.loss_mask: 0.4015, decode.loss_dice: 0.5704, decode.d0.loss_cls: 1.5706, decode.d0.loss_mask: 0.4414, decode.d0.loss_dice: 0.6610, decode.d1.loss_cls: 0.2975, decode.d1.loss_mask: 0.4174, decode.d1.loss_dice: 0.6076, decode.d2.loss_cls: 0.2053, decode.d2.loss_mask: 0.4104, decode.d2.loss_dice: 0.5849, decode.d3.loss_cls: 0.1736, decode.d3.loss_mask: 0.4079, decode.d3.loss_dice: 0.5758, decode.d4.loss_cls: 0.1573, decode.d4.loss_mask: 0.4043, decode.d4.loss_dice: 0.5722, decode.d5.loss_cls: 0.1469, decode.d5.loss_mask: 0.4042, decode.d5.loss_dice: 0.5682, decode.d6.loss_cls: 0.1391, decode.d6.loss_mask: 0.4037, decode.d6.loss_dice: 0.5675, decode.d7.loss_cls: 0.1340, decode.d7.loss_mask: 0.4019, decode.d7.loss_dice: 0.5679, decode.d8.loss_cls: 0.1362, decode.d8.loss_mask: 0.4026, decode.d8.loss_dice: 0.5728, loss: 13.0380 +2022-06-05 06:51:19,378 - mmseg - INFO - Iter [37950/40000] lr: 3.910e-07, eta: 0:16:41, time: 0.442, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1316, decode.loss_mask: 0.3861, decode.loss_dice: 0.5610, decode.d0.loss_cls: 1.4918, decode.d0.loss_mask: 0.4351, decode.d0.loss_dice: 0.6562, decode.d1.loss_cls: 0.2946, decode.d1.loss_mask: 0.4027, decode.d1.loss_dice: 0.5958, decode.d2.loss_cls: 0.2116, decode.d2.loss_mask: 0.3938, decode.d2.loss_dice: 0.5728, decode.d3.loss_cls: 0.1661, decode.d3.loss_mask: 0.3915, decode.d3.loss_dice: 0.5677, decode.d4.loss_cls: 0.1500, decode.d4.loss_mask: 0.3890, decode.d4.loss_dice: 0.5672, decode.d5.loss_cls: 0.1362, decode.d5.loss_mask: 0.3873, decode.d5.loss_dice: 0.5618, decode.d6.loss_cls: 0.1336, decode.d6.loss_mask: 0.3878, decode.d6.loss_dice: 0.5614, decode.d7.loss_cls: 0.1310, decode.d7.loss_mask: 0.3863, decode.d7.loss_dice: 0.5608, decode.d8.loss_cls: 0.1316, decode.d8.loss_mask: 0.3862, decode.d8.loss_dice: 0.5623, loss: 12.6908 +2022-06-05 06:51:41,196 - mmseg - INFO - Saving checkpoint at 38000 iterations +2022-06-05 06:51:44,552 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 06:51:44,552 - mmseg - INFO - Iter [38000/40000] lr: 3.815e-07, eta: 0:16:17, time: 0.504, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1384, decode.loss_mask: 0.4037, decode.loss_dice: 0.5683, decode.d0.loss_cls: 1.5048, decode.d0.loss_mask: 0.4426, decode.d0.loss_dice: 0.6524, decode.d1.loss_cls: 0.2808, decode.d1.loss_mask: 0.4213, decode.d1.loss_dice: 0.5958, decode.d2.loss_cls: 0.2016, decode.d2.loss_mask: 0.4108, decode.d2.loss_dice: 0.5748, decode.d3.loss_cls: 0.1691, decode.d3.loss_mask: 0.4075, decode.d3.loss_dice: 0.5644, decode.d4.loss_cls: 0.1547, decode.d4.loss_mask: 0.4056, decode.d4.loss_dice: 0.5666, decode.d5.loss_cls: 0.1478, decode.d5.loss_mask: 0.4061, decode.d5.loss_dice: 0.5692, decode.d6.loss_cls: 0.1402, decode.d6.loss_mask: 0.4054, decode.d6.loss_dice: 0.5656, decode.d7.loss_cls: 0.1363, decode.d7.loss_mask: 0.4029, decode.d7.loss_dice: 0.5678, decode.d8.loss_cls: 0.1353, decode.d8.loss_mask: 0.4041, decode.d8.loss_dice: 0.5673, loss: 12.9112 +2022-06-05 06:52:07,135 - mmseg - INFO - Iter [38050/40000] lr: 3.719e-07, eta: 0:15:52, time: 0.452, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1336, decode.loss_mask: 0.3859, decode.loss_dice: 0.5427, decode.d0.loss_cls: 1.5307, decode.d0.loss_mask: 0.4306, decode.d0.loss_dice: 0.6340, decode.d1.loss_cls: 0.2973, decode.d1.loss_mask: 0.4039, decode.d1.loss_dice: 0.5773, decode.d2.loss_cls: 0.2020, decode.d2.loss_mask: 0.3964, decode.d2.loss_dice: 0.5558, decode.d3.loss_cls: 0.1586, decode.d3.loss_mask: 0.3929, decode.d3.loss_dice: 0.5498, decode.d4.loss_cls: 0.1556, decode.d4.loss_mask: 0.3914, decode.d4.loss_dice: 0.5481, decode.d5.loss_cls: 0.1470, decode.d5.loss_mask: 0.3896, decode.d5.loss_dice: 0.5454, decode.d6.loss_cls: 0.1392, decode.d6.loss_mask: 0.3888, decode.d6.loss_dice: 0.5439, decode.d7.loss_cls: 0.1386, decode.d7.loss_mask: 0.3872, decode.d7.loss_dice: 0.5390, decode.d8.loss_cls: 0.1327, decode.d8.loss_mask: 0.3869, decode.d8.loss_dice: 0.5423, loss: 12.5673 +2022-06-05 06:52:31,680 - mmseg - INFO - Iter [38100/40000] lr: 3.624e-07, eta: 0:15:28, time: 0.491, data_time: 0.059, memory: 31652, decode.loss_cls: 0.1375, decode.loss_mask: 0.3975, decode.loss_dice: 0.5576, decode.d0.loss_cls: 1.4962, decode.d0.loss_mask: 0.4389, decode.d0.loss_dice: 0.6481, decode.d1.loss_cls: 0.2746, decode.d1.loss_mask: 0.4127, decode.d1.loss_dice: 0.5936, decode.d2.loss_cls: 0.2004, decode.d2.loss_mask: 0.4043, decode.d2.loss_dice: 0.5720, decode.d3.loss_cls: 0.1635, decode.d3.loss_mask: 0.4015, decode.d3.loss_dice: 0.5650, decode.d4.loss_cls: 0.1580, decode.d4.loss_mask: 0.4004, decode.d4.loss_dice: 0.5620, decode.d5.loss_cls: 0.1476, decode.d5.loss_mask: 0.3999, decode.d5.loss_dice: 0.5637, decode.d6.loss_cls: 0.1434, decode.d6.loss_mask: 0.3993, decode.d6.loss_dice: 0.5642, decode.d7.loss_cls: 0.1365, decode.d7.loss_mask: 0.3985, decode.d7.loss_dice: 0.5620, decode.d8.loss_cls: 0.1407, decode.d8.loss_mask: 0.3979, decode.d8.loss_dice: 0.5605, loss: 12.7982 +2022-06-05 06:52:53,326 - mmseg - INFO - Iter [38150/40000] lr: 3.529e-07, eta: 0:15:03, time: 0.433, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1323, decode.loss_mask: 0.3905, decode.loss_dice: 0.5633, decode.d0.loss_cls: 1.5626, decode.d0.loss_mask: 0.4303, decode.d0.loss_dice: 0.6606, decode.d1.loss_cls: 0.2940, decode.d1.loss_mask: 0.4083, decode.d1.loss_dice: 0.5937, decode.d2.loss_cls: 0.2103, decode.d2.loss_mask: 0.3992, decode.d2.loss_dice: 0.5692, decode.d3.loss_cls: 0.1626, decode.d3.loss_mask: 0.3950, decode.d3.loss_dice: 0.5638, decode.d4.loss_cls: 0.1496, decode.d4.loss_mask: 0.3931, decode.d4.loss_dice: 0.5644, decode.d5.loss_cls: 0.1431, decode.d5.loss_mask: 0.3923, decode.d5.loss_dice: 0.5674, decode.d6.loss_cls: 0.1375, decode.d6.loss_mask: 0.3927, decode.d6.loss_dice: 0.5617, decode.d7.loss_cls: 0.1355, decode.d7.loss_mask: 0.3919, decode.d7.loss_dice: 0.5623, decode.d8.loss_cls: 0.1335, decode.d8.loss_mask: 0.3915, decode.d8.loss_dice: 0.5602, loss: 12.8126 +2022-06-05 06:53:15,007 - mmseg - INFO - Iter [38200/40000] lr: 3.433e-07, eta: 0:14:39, time: 0.434, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1139, decode.loss_mask: 0.4147, decode.loss_dice: 0.5467, decode.d0.loss_cls: 1.5060, decode.d0.loss_mask: 0.4571, decode.d0.loss_dice: 0.6443, decode.d1.loss_cls: 0.2670, decode.d1.loss_mask: 0.4292, decode.d1.loss_dice: 0.5817, decode.d2.loss_cls: 0.1788, decode.d2.loss_mask: 0.4207, decode.d2.loss_dice: 0.5625, decode.d3.loss_cls: 0.1389, decode.d3.loss_mask: 0.4196, decode.d3.loss_dice: 0.5556, decode.d4.loss_cls: 0.1324, decode.d4.loss_mask: 0.4175, decode.d4.loss_dice: 0.5540, decode.d5.loss_cls: 0.1265, decode.d5.loss_mask: 0.4150, decode.d5.loss_dice: 0.5535, decode.d6.loss_cls: 0.1213, decode.d6.loss_mask: 0.4166, decode.d6.loss_dice: 0.5537, decode.d7.loss_cls: 0.1148, decode.d7.loss_mask: 0.4163, decode.d7.loss_dice: 0.5521, decode.d8.loss_cls: 0.1091, decode.d8.loss_mask: 0.4149, decode.d8.loss_dice: 0.5487, loss: 12.6833 +2022-06-05 06:53:36,865 - mmseg - INFO - Iter [38250/40000] lr: 3.338e-07, eta: 0:14:14, time: 0.437, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1226, decode.loss_mask: 0.3916, decode.loss_dice: 0.5593, decode.d0.loss_cls: 1.4633, decode.d0.loss_mask: 0.4357, decode.d0.loss_dice: 0.6467, decode.d1.loss_cls: 0.2763, decode.d1.loss_mask: 0.4084, decode.d1.loss_dice: 0.5873, decode.d2.loss_cls: 0.1911, decode.d2.loss_mask: 0.3993, decode.d2.loss_dice: 0.5737, decode.d3.loss_cls: 0.1557, decode.d3.loss_mask: 0.3947, decode.d3.loss_dice: 0.5641, decode.d4.loss_cls: 0.1392, decode.d4.loss_mask: 0.3933, decode.d4.loss_dice: 0.5628, decode.d5.loss_cls: 0.1317, decode.d5.loss_mask: 0.3932, decode.d5.loss_dice: 0.5634, decode.d6.loss_cls: 0.1293, decode.d6.loss_mask: 0.3928, decode.d6.loss_dice: 0.5611, decode.d7.loss_cls: 0.1288, decode.d7.loss_mask: 0.3917, decode.d7.loss_dice: 0.5578, decode.d8.loss_cls: 0.1241, decode.d8.loss_mask: 0.3918, decode.d8.loss_dice: 0.5622, loss: 12.5930 +2022-06-05 06:53:58,636 - mmseg - INFO - Iter [38300/40000] lr: 3.243e-07, eta: 0:13:50, time: 0.435, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1404, decode.loss_mask: 0.4046, decode.loss_dice: 0.5681, decode.d0.loss_cls: 1.5586, decode.d0.loss_mask: 0.4446, decode.d0.loss_dice: 0.6613, decode.d1.loss_cls: 0.3042, decode.d1.loss_mask: 0.4199, decode.d1.loss_dice: 0.6028, decode.d2.loss_cls: 0.2131, decode.d2.loss_mask: 0.4127, decode.d2.loss_dice: 0.5786, decode.d3.loss_cls: 0.1701, decode.d3.loss_mask: 0.4080, decode.d3.loss_dice: 0.5741, decode.d4.loss_cls: 0.1610, decode.d4.loss_mask: 0.4067, decode.d4.loss_dice: 0.5712, decode.d5.loss_cls: 0.1537, decode.d5.loss_mask: 0.4045, decode.d5.loss_dice: 0.5687, decode.d6.loss_cls: 0.1476, decode.d6.loss_mask: 0.4049, decode.d6.loss_dice: 0.5668, decode.d7.loss_cls: 0.1489, decode.d7.loss_mask: 0.4048, decode.d7.loss_dice: 0.5642, decode.d8.loss_cls: 0.1423, decode.d8.loss_mask: 0.4050, decode.d8.loss_dice: 0.5653, loss: 13.0768 +2022-06-05 06:54:21,451 - mmseg - INFO - Iter [38350/40000] lr: 3.147e-07, eta: 0:13:25, time: 0.456, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1109, decode.loss_mask: 0.3937, decode.loss_dice: 0.5391, decode.d0.loss_cls: 1.4747, decode.d0.loss_mask: 0.4358, decode.d0.loss_dice: 0.6318, decode.d1.loss_cls: 0.2696, decode.d1.loss_mask: 0.4132, decode.d1.loss_dice: 0.5718, decode.d2.loss_cls: 0.1801, decode.d2.loss_mask: 0.4033, decode.d2.loss_dice: 0.5512, decode.d3.loss_cls: 0.1432, decode.d3.loss_mask: 0.3989, decode.d3.loss_dice: 0.5454, decode.d4.loss_cls: 0.1319, decode.d4.loss_mask: 0.3965, decode.d4.loss_dice: 0.5449, decode.d5.loss_cls: 0.1272, decode.d5.loss_mask: 0.3950, decode.d5.loss_dice: 0.5380, decode.d6.loss_cls: 0.1198, decode.d6.loss_mask: 0.3950, decode.d6.loss_dice: 0.5402, decode.d7.loss_cls: 0.1165, decode.d7.loss_mask: 0.3938, decode.d7.loss_dice: 0.5385, decode.d8.loss_cls: 0.1185, decode.d8.loss_mask: 0.3936, decode.d8.loss_dice: 0.5387, loss: 12.3507 +2022-06-05 06:54:45,853 - mmseg - INFO - Iter [38400/40000] lr: 3.052e-07, eta: 0:13:01, time: 0.488, data_time: 0.060, memory: 31652, decode.loss_cls: 0.1399, decode.loss_mask: 0.3808, decode.loss_dice: 0.5335, decode.d0.loss_cls: 1.5096, decode.d0.loss_mask: 0.4227, decode.d0.loss_dice: 0.6366, decode.d1.loss_cls: 0.2811, decode.d1.loss_mask: 0.3974, decode.d1.loss_dice: 0.5799, decode.d2.loss_cls: 0.1943, decode.d2.loss_mask: 0.3895, decode.d2.loss_dice: 0.5557, decode.d3.loss_cls: 0.1625, decode.d3.loss_mask: 0.3855, decode.d3.loss_dice: 0.5495, decode.d4.loss_cls: 0.1457, decode.d4.loss_mask: 0.3839, decode.d4.loss_dice: 0.5488, decode.d5.loss_cls: 0.1394, decode.d5.loss_mask: 0.3833, decode.d5.loss_dice: 0.5457, decode.d6.loss_cls: 0.1405, decode.d6.loss_mask: 0.3841, decode.d6.loss_dice: 0.5409, decode.d7.loss_cls: 0.1356, decode.d7.loss_mask: 0.3806, decode.d7.loss_dice: 0.5383, decode.d8.loss_cls: 0.1317, decode.d8.loss_mask: 0.3808, decode.d8.loss_dice: 0.5406, loss: 12.4384 +2022-06-05 06:55:08,097 - mmseg - INFO - Iter [38450/40000] lr: 2.957e-07, eta: 0:12:36, time: 0.446, data_time: 0.009, memory: 31652, decode.loss_cls: 0.1254, decode.loss_mask: 0.3945, decode.loss_dice: 0.5453, decode.d0.loss_cls: 1.5229, decode.d0.loss_mask: 0.4387, decode.d0.loss_dice: 0.6395, decode.d1.loss_cls: 0.2973, decode.d1.loss_mask: 0.4123, decode.d1.loss_dice: 0.5753, decode.d2.loss_cls: 0.1955, decode.d2.loss_mask: 0.4018, decode.d2.loss_dice: 0.5590, decode.d3.loss_cls: 0.1610, decode.d3.loss_mask: 0.3973, decode.d3.loss_dice: 0.5478, decode.d4.loss_cls: 0.1444, decode.d4.loss_mask: 0.3974, decode.d4.loss_dice: 0.5518, decode.d5.loss_cls: 0.1357, decode.d5.loss_mask: 0.3954, decode.d5.loss_dice: 0.5450, decode.d6.loss_cls: 0.1355, decode.d6.loss_mask: 0.3947, decode.d6.loss_dice: 0.5438, decode.d7.loss_cls: 0.1281, decode.d7.loss_mask: 0.3943, decode.d7.loss_dice: 0.5451, decode.d8.loss_cls: 0.1307, decode.d8.loss_mask: 0.3946, decode.d8.loss_dice: 0.5443, loss: 12.5946 +2022-06-05 06:55:29,938 - mmseg - INFO - Iter [38500/40000] lr: 2.862e-07, eta: 0:12:12, time: 0.437, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1137, decode.loss_mask: 0.4010, decode.loss_dice: 0.5537, decode.d0.loss_cls: 1.4813, decode.d0.loss_mask: 0.4440, decode.d0.loss_dice: 0.6416, decode.d1.loss_cls: 0.2691, decode.d1.loss_mask: 0.4190, decode.d1.loss_dice: 0.5908, decode.d2.loss_cls: 0.1937, decode.d2.loss_mask: 0.4079, decode.d2.loss_dice: 0.5627, decode.d3.loss_cls: 0.1424, decode.d3.loss_mask: 0.4043, decode.d3.loss_dice: 0.5590, decode.d4.loss_cls: 0.1322, decode.d4.loss_mask: 0.4042, decode.d4.loss_dice: 0.5649, decode.d5.loss_cls: 0.1260, decode.d5.loss_mask: 0.4031, decode.d5.loss_dice: 0.5559, decode.d6.loss_cls: 0.1144, decode.d6.loss_mask: 0.4019, decode.d6.loss_dice: 0.5589, decode.d7.loss_cls: 0.1098, decode.d7.loss_mask: 0.4011, decode.d7.loss_dice: 0.5563, decode.d8.loss_cls: 0.1117, decode.d8.loss_mask: 0.4013, decode.d8.loss_dice: 0.5598, loss: 12.5857 +2022-06-05 06:55:52,377 - mmseg - INFO - Iter [38550/40000] lr: 2.766e-07, eta: 0:11:47, time: 0.449, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1323, decode.loss_mask: 0.4081, decode.loss_dice: 0.5614, decode.d0.loss_cls: 1.5131, decode.d0.loss_mask: 0.4560, decode.d0.loss_dice: 0.6547, decode.d1.loss_cls: 0.2788, decode.d1.loss_mask: 0.4258, decode.d1.loss_dice: 0.5968, decode.d2.loss_cls: 0.1951, decode.d2.loss_mask: 0.4196, decode.d2.loss_dice: 0.5827, decode.d3.loss_cls: 0.1642, decode.d3.loss_mask: 0.4153, decode.d3.loss_dice: 0.5687, decode.d4.loss_cls: 0.1555, decode.d4.loss_mask: 0.4121, decode.d4.loss_dice: 0.5705, decode.d5.loss_cls: 0.1386, decode.d5.loss_mask: 0.4118, decode.d5.loss_dice: 0.5688, decode.d6.loss_cls: 0.1364, decode.d6.loss_mask: 0.4109, decode.d6.loss_dice: 0.5648, decode.d7.loss_cls: 0.1330, decode.d7.loss_mask: 0.4089, decode.d7.loss_dice: 0.5612, decode.d8.loss_cls: 0.1347, decode.d8.loss_mask: 0.4093, decode.d8.loss_dice: 0.5661, loss: 12.9552 +2022-06-05 06:56:14,104 - mmseg - INFO - Iter [38600/40000] lr: 2.671e-07, eta: 0:11:23, time: 0.435, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1491, decode.loss_mask: 0.4045, decode.loss_dice: 0.5756, decode.d0.loss_cls: 1.5346, decode.d0.loss_mask: 0.4481, decode.d0.loss_dice: 0.6708, decode.d1.loss_cls: 0.2948, decode.d1.loss_mask: 0.4218, decode.d1.loss_dice: 0.6162, decode.d2.loss_cls: 0.2130, decode.d2.loss_mask: 0.4105, decode.d2.loss_dice: 0.5910, decode.d3.loss_cls: 0.1782, decode.d3.loss_mask: 0.4085, decode.d3.loss_dice: 0.5871, decode.d4.loss_cls: 0.1682, decode.d4.loss_mask: 0.4078, decode.d4.loss_dice: 0.5845, decode.d5.loss_cls: 0.1530, decode.d5.loss_mask: 0.4062, decode.d5.loss_dice: 0.5796, decode.d6.loss_cls: 0.1523, decode.d6.loss_mask: 0.4058, decode.d6.loss_dice: 0.5778, decode.d7.loss_cls: 0.1480, decode.d7.loss_mask: 0.4052, decode.d7.loss_dice: 0.5750, decode.d8.loss_cls: 0.1481, decode.d8.loss_mask: 0.4042, decode.d8.loss_dice: 0.5734, loss: 13.1930 +2022-06-05 06:56:35,520 - mmseg - INFO - Iter [38650/40000] lr: 2.576e-07, eta: 0:10:58, time: 0.428, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1515, decode.loss_mask: 0.3937, decode.loss_dice: 0.5830, decode.d0.loss_cls: 1.5337, decode.d0.loss_mask: 0.4335, decode.d0.loss_dice: 0.6793, decode.d1.loss_cls: 0.3024, decode.d1.loss_mask: 0.4117, decode.d1.loss_dice: 0.6242, decode.d2.loss_cls: 0.2281, decode.d2.loss_mask: 0.4003, decode.d2.loss_dice: 0.5925, decode.d3.loss_cls: 0.1822, decode.d3.loss_mask: 0.3987, decode.d3.loss_dice: 0.5838, decode.d4.loss_cls: 0.1723, decode.d4.loss_mask: 0.3976, decode.d4.loss_dice: 0.5840, decode.d5.loss_cls: 0.1594, decode.d5.loss_mask: 0.3959, decode.d5.loss_dice: 0.5802, decode.d6.loss_cls: 0.1571, decode.d6.loss_mask: 0.3955, decode.d6.loss_dice: 0.5813, decode.d7.loss_cls: 0.1530, decode.d7.loss_mask: 0.3947, decode.d7.loss_dice: 0.5809, decode.d8.loss_cls: 0.1523, decode.d8.loss_mask: 0.3946, decode.d8.loss_dice: 0.5811, loss: 13.1785 +2022-06-05 06:56:59,626 - mmseg - INFO - Iter [38700/40000] lr: 2.480e-07, eta: 0:10:34, time: 0.482, data_time: 0.057, memory: 31652, decode.loss_cls: 0.1316, decode.loss_mask: 0.4006, decode.loss_dice: 0.5555, decode.d0.loss_cls: 1.4882, decode.d0.loss_mask: 0.4398, decode.d0.loss_dice: 0.6426, decode.d1.loss_cls: 0.2815, decode.d1.loss_mask: 0.4119, decode.d1.loss_dice: 0.5841, decode.d2.loss_cls: 0.1939, decode.d2.loss_mask: 0.4062, decode.d2.loss_dice: 0.5709, decode.d3.loss_cls: 0.1629, decode.d3.loss_mask: 0.4032, decode.d3.loss_dice: 0.5593, decode.d4.loss_cls: 0.1527, decode.d4.loss_mask: 0.4013, decode.d4.loss_dice: 0.5587, decode.d5.loss_cls: 0.1432, decode.d5.loss_mask: 0.4013, decode.d5.loss_dice: 0.5543, decode.d6.loss_cls: 0.1383, decode.d6.loss_mask: 0.4006, decode.d6.loss_dice: 0.5528, decode.d7.loss_cls: 0.1361, decode.d7.loss_mask: 0.3999, decode.d7.loss_dice: 0.5545, decode.d8.loss_cls: 0.1413, decode.d8.loss_mask: 0.3996, decode.d8.loss_dice: 0.5519, loss: 12.7188 +2022-06-05 06:57:21,466 - mmseg - INFO - Iter [38750/40000] lr: 2.385e-07, eta: 0:10:09, time: 0.437, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1149, decode.loss_mask: 0.4093, decode.loss_dice: 0.5427, decode.d0.loss_cls: 1.4958, decode.d0.loss_mask: 0.4566, decode.d0.loss_dice: 0.6227, decode.d1.loss_cls: 0.2764, decode.d1.loss_mask: 0.4278, decode.d1.loss_dice: 0.5691, decode.d2.loss_cls: 0.1828, decode.d2.loss_mask: 0.4187, decode.d2.loss_dice: 0.5563, decode.d3.loss_cls: 0.1477, decode.d3.loss_mask: 0.4129, decode.d3.loss_dice: 0.5443, decode.d4.loss_cls: 0.1380, decode.d4.loss_mask: 0.4125, decode.d4.loss_dice: 0.5413, decode.d5.loss_cls: 0.1323, decode.d5.loss_mask: 0.4105, decode.d5.loss_dice: 0.5398, decode.d6.loss_cls: 0.1227, decode.d6.loss_mask: 0.4094, decode.d6.loss_dice: 0.5398, decode.d7.loss_cls: 0.1202, decode.d7.loss_mask: 0.4092, decode.d7.loss_dice: 0.5407, decode.d8.loss_cls: 0.1170, decode.d8.loss_mask: 0.4092, decode.d8.loss_dice: 0.5390, loss: 12.5594 +2022-06-05 06:57:43,136 - mmseg - INFO - Iter [38800/40000] lr: 2.290e-07, eta: 0:09:45, time: 0.433, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1496, decode.loss_mask: 0.3850, decode.loss_dice: 0.5689, decode.d0.loss_cls: 1.5310, decode.d0.loss_mask: 0.4310, decode.d0.loss_dice: 0.6810, decode.d1.loss_cls: 0.3089, decode.d1.loss_mask: 0.4008, decode.d1.loss_dice: 0.6101, decode.d2.loss_cls: 0.2105, decode.d2.loss_mask: 0.3929, decode.d2.loss_dice: 0.5870, decode.d3.loss_cls: 0.1755, decode.d3.loss_mask: 0.3917, decode.d3.loss_dice: 0.5780, decode.d4.loss_cls: 0.1649, decode.d4.loss_mask: 0.3885, decode.d4.loss_dice: 0.5783, decode.d5.loss_cls: 0.1596, decode.d5.loss_mask: 0.3871, decode.d5.loss_dice: 0.5740, decode.d6.loss_cls: 0.1590, decode.d6.loss_mask: 0.3858, decode.d6.loss_dice: 0.5698, decode.d7.loss_cls: 0.1547, decode.d7.loss_mask: 0.3857, decode.d7.loss_dice: 0.5733, decode.d8.loss_cls: 0.1520, decode.d8.loss_mask: 0.3845, decode.d8.loss_dice: 0.5696, loss: 12.9887 +2022-06-05 06:58:04,866 - mmseg - INFO - Iter [38850/40000] lr: 2.194e-07, eta: 0:09:20, time: 0.435, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1466, decode.loss_mask: 0.4058, decode.loss_dice: 0.5807, decode.d0.loss_cls: 1.4977, decode.d0.loss_mask: 0.4506, decode.d0.loss_dice: 0.6759, decode.d1.loss_cls: 0.3007, decode.d1.loss_mask: 0.4231, decode.d1.loss_dice: 0.6204, decode.d2.loss_cls: 0.2066, decode.d2.loss_mask: 0.4148, decode.d2.loss_dice: 0.6001, decode.d3.loss_cls: 0.1731, decode.d3.loss_mask: 0.4126, decode.d3.loss_dice: 0.5872, decode.d4.loss_cls: 0.1631, decode.d4.loss_mask: 0.4112, decode.d4.loss_dice: 0.5899, decode.d5.loss_cls: 0.1583, decode.d5.loss_mask: 0.4089, decode.d5.loss_dice: 0.5861, decode.d6.loss_cls: 0.1543, decode.d6.loss_mask: 0.4081, decode.d6.loss_dice: 0.5860, decode.d7.loss_cls: 0.1468, decode.d7.loss_mask: 0.4077, decode.d7.loss_dice: 0.5806, decode.d8.loss_cls: 0.1410, decode.d8.loss_mask: 0.4077, decode.d8.loss_dice: 0.5811, loss: 13.2266 +2022-06-05 06:58:26,445 - mmseg - INFO - Iter [38900/40000] lr: 2.099e-07, eta: 0:08:56, time: 0.432, data_time: 0.009, memory: 31652, decode.loss_cls: 0.1420, decode.loss_mask: 0.3897, decode.loss_dice: 0.5708, decode.d0.loss_cls: 1.5485, decode.d0.loss_mask: 0.4290, decode.d0.loss_dice: 0.6689, decode.d1.loss_cls: 0.3061, decode.d1.loss_mask: 0.4046, decode.d1.loss_dice: 0.6113, decode.d2.loss_cls: 0.2269, decode.d2.loss_mask: 0.3954, decode.d2.loss_dice: 0.5863, decode.d3.loss_cls: 0.1741, decode.d3.loss_mask: 0.3929, decode.d3.loss_dice: 0.5823, decode.d4.loss_cls: 0.1599, decode.d4.loss_mask: 0.3919, decode.d4.loss_dice: 0.5862, decode.d5.loss_cls: 0.1604, decode.d5.loss_mask: 0.3913, decode.d5.loss_dice: 0.5751, decode.d6.loss_cls: 0.1475, decode.d6.loss_mask: 0.3909, decode.d6.loss_dice: 0.5739, decode.d7.loss_cls: 0.1493, decode.d7.loss_mask: 0.3893, decode.d7.loss_dice: 0.5707, decode.d8.loss_cls: 0.1407, decode.d8.loss_mask: 0.3906, decode.d8.loss_dice: 0.5762, loss: 13.0227 +2022-06-05 06:58:48,173 - mmseg - INFO - Iter [38950/40000] lr: 2.004e-07, eta: 0:08:32, time: 0.434, data_time: 0.009, memory: 31652, decode.loss_cls: 0.1362, decode.loss_mask: 0.3995, decode.loss_dice: 0.5412, decode.d0.loss_cls: 1.4905, decode.d0.loss_mask: 0.4423, decode.d0.loss_dice: 0.6369, decode.d1.loss_cls: 0.2868, decode.d1.loss_mask: 0.4191, decode.d1.loss_dice: 0.5802, decode.d2.loss_cls: 0.2079, decode.d2.loss_mask: 0.4105, decode.d2.loss_dice: 0.5592, decode.d3.loss_cls: 0.1622, decode.d3.loss_mask: 0.4050, decode.d3.loss_dice: 0.5467, decode.d4.loss_cls: 0.1460, decode.d4.loss_mask: 0.4039, decode.d4.loss_dice: 0.5474, decode.d5.loss_cls: 0.1418, decode.d5.loss_mask: 0.4031, decode.d5.loss_dice: 0.5444, decode.d6.loss_cls: 0.1410, decode.d6.loss_mask: 0.4030, decode.d6.loss_dice: 0.5416, decode.d7.loss_cls: 0.1409, decode.d7.loss_mask: 0.4009, decode.d7.loss_dice: 0.5418, decode.d8.loss_cls: 0.1346, decode.d8.loss_mask: 0.4008, decode.d8.loss_dice: 0.5398, loss: 12.6551 +2022-06-05 06:59:09,007 - mmseg - INFO - Saving checkpoint at 39000 iterations +2022-06-05 06:59:12,812 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 06:59:12,813 - mmseg - INFO - Iter [39000/40000] lr: 1.908e-07, eta: 0:08:07, time: 0.493, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1311, decode.loss_mask: 0.4029, decode.loss_dice: 0.5435, decode.d0.loss_cls: 1.5140, decode.d0.loss_mask: 0.4436, decode.d0.loss_dice: 0.6364, decode.d1.loss_cls: 0.2850, decode.d1.loss_mask: 0.4193, decode.d1.loss_dice: 0.5787, decode.d2.loss_cls: 0.1961, decode.d2.loss_mask: 0.4100, decode.d2.loss_dice: 0.5559, decode.d3.loss_cls: 0.1536, decode.d3.loss_mask: 0.4065, decode.d3.loss_dice: 0.5493, decode.d4.loss_cls: 0.1450, decode.d4.loss_mask: 0.4060, decode.d4.loss_dice: 0.5467, decode.d5.loss_cls: 0.1354, decode.d5.loss_mask: 0.4064, decode.d5.loss_dice: 0.5445, decode.d6.loss_cls: 0.1337, decode.d6.loss_mask: 0.4038, decode.d6.loss_dice: 0.5440, decode.d7.loss_cls: 0.1325, decode.d7.loss_mask: 0.4024, decode.d7.loss_dice: 0.5435, decode.d8.loss_cls: 0.1326, decode.d8.loss_mask: 0.4033, decode.d8.loss_dice: 0.5459, loss: 12.6516 +2022-06-05 06:59:36,758 - mmseg - INFO - Iter [39050/40000] lr: 1.813e-07, eta: 0:07:43, time: 0.478, data_time: 0.049, memory: 31652, decode.loss_cls: 0.1298, decode.loss_mask: 0.4016, decode.loss_dice: 0.5701, decode.d0.loss_cls: 1.5198, decode.d0.loss_mask: 0.4482, decode.d0.loss_dice: 0.6633, decode.d1.loss_cls: 0.2939, decode.d1.loss_mask: 0.4199, decode.d1.loss_dice: 0.6056, decode.d2.loss_cls: 0.1922, decode.d2.loss_mask: 0.4106, decode.d2.loss_dice: 0.5815, decode.d3.loss_cls: 0.1528, decode.d3.loss_mask: 0.4060, decode.d3.loss_dice: 0.5750, decode.d4.loss_cls: 0.1480, decode.d4.loss_mask: 0.4024, decode.d4.loss_dice: 0.5726, decode.d5.loss_cls: 0.1421, decode.d5.loss_mask: 0.4021, decode.d5.loss_dice: 0.5733, decode.d6.loss_cls: 0.1325, decode.d6.loss_mask: 0.4022, decode.d6.loss_dice: 0.5718, decode.d7.loss_cls: 0.1312, decode.d7.loss_mask: 0.4028, decode.d7.loss_dice: 0.5722, decode.d8.loss_cls: 0.1281, decode.d8.loss_mask: 0.4023, decode.d8.loss_dice: 0.5725, loss: 12.9264 +2022-06-05 06:59:58,169 - mmseg - INFO - Iter [39100/40000] lr: 1.718e-07, eta: 0:07:18, time: 0.429, data_time: 0.009, memory: 31652, decode.loss_cls: 0.1361, decode.loss_mask: 0.3937, decode.loss_dice: 0.5406, decode.d0.loss_cls: 1.4814, decode.d0.loss_mask: 0.4368, decode.d0.loss_dice: 0.6380, decode.d1.loss_cls: 0.2715, decode.d1.loss_mask: 0.4134, decode.d1.loss_dice: 0.5767, decode.d2.loss_cls: 0.1988, decode.d2.loss_mask: 0.4035, decode.d2.loss_dice: 0.5572, decode.d3.loss_cls: 0.1602, decode.d3.loss_mask: 0.3998, decode.d3.loss_dice: 0.5481, decode.d4.loss_cls: 0.1535, decode.d4.loss_mask: 0.3977, decode.d4.loss_dice: 0.5455, decode.d5.loss_cls: 0.1463, decode.d5.loss_mask: 0.3970, decode.d5.loss_dice: 0.5463, decode.d6.loss_cls: 0.1383, decode.d6.loss_mask: 0.3974, decode.d6.loss_dice: 0.5440, decode.d7.loss_cls: 0.1374, decode.d7.loss_mask: 0.3956, decode.d7.loss_dice: 0.5432, decode.d8.loss_cls: 0.1343, decode.d8.loss_mask: 0.3949, decode.d8.loss_dice: 0.5403, loss: 12.5674 +2022-06-05 07:00:19,870 - mmseg - INFO - Iter [39150/40000] lr: 1.622e-07, eta: 0:06:54, time: 0.434, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1292, decode.loss_mask: 0.3970, decode.loss_dice: 0.5813, decode.d0.loss_cls: 1.5172, decode.d0.loss_mask: 0.4363, decode.d0.loss_dice: 0.6725, decode.d1.loss_cls: 0.2844, decode.d1.loss_mask: 0.4111, decode.d1.loss_dice: 0.6167, decode.d2.loss_cls: 0.1885, decode.d2.loss_mask: 0.4040, decode.d2.loss_dice: 0.5955, decode.d3.loss_cls: 0.1529, decode.d3.loss_mask: 0.4007, decode.d3.loss_dice: 0.5862, decode.d4.loss_cls: 0.1483, decode.d4.loss_mask: 0.4007, decode.d4.loss_dice: 0.5846, decode.d5.loss_cls: 0.1417, decode.d5.loss_mask: 0.3995, decode.d5.loss_dice: 0.5868, decode.d6.loss_cls: 0.1363, decode.d6.loss_mask: 0.3995, decode.d6.loss_dice: 0.5833, decode.d7.loss_cls: 0.1343, decode.d7.loss_mask: 0.3979, decode.d7.loss_dice: 0.5825, decode.d8.loss_cls: 0.1305, decode.d8.loss_mask: 0.3964, decode.d8.loss_dice: 0.5829, loss: 12.9787 +2022-06-05 07:00:41,670 - mmseg - INFO - Iter [39200/40000] lr: 1.527e-07, eta: 0:06:29, time: 0.436, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1177, decode.loss_mask: 0.3968, decode.loss_dice: 0.5642, decode.d0.loss_cls: 1.5135, decode.d0.loss_mask: 0.4381, decode.d0.loss_dice: 0.6524, decode.d1.loss_cls: 0.2705, decode.d1.loss_mask: 0.4121, decode.d1.loss_dice: 0.5900, decode.d2.loss_cls: 0.1897, decode.d2.loss_mask: 0.4030, decode.d2.loss_dice: 0.5715, decode.d3.loss_cls: 0.1460, decode.d3.loss_mask: 0.4007, decode.d3.loss_dice: 0.5687, decode.d4.loss_cls: 0.1404, decode.d4.loss_mask: 0.3985, decode.d4.loss_dice: 0.5647, decode.d5.loss_cls: 0.1320, decode.d5.loss_mask: 0.3993, decode.d5.loss_dice: 0.5641, decode.d6.loss_cls: 0.1292, decode.d6.loss_mask: 0.3973, decode.d6.loss_dice: 0.5630, decode.d7.loss_cls: 0.1286, decode.d7.loss_mask: 0.3973, decode.d7.loss_dice: 0.5597, decode.d8.loss_cls: 0.1289, decode.d8.loss_mask: 0.3962, decode.d8.loss_dice: 0.5628, loss: 12.6969 +2022-06-05 07:01:03,834 - mmseg - INFO - Iter [39250/40000] lr: 1.432e-07, eta: 0:06:05, time: 0.443, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1261, decode.loss_mask: 0.4018, decode.loss_dice: 0.5692, decode.d0.loss_cls: 1.5227, decode.d0.loss_mask: 0.4456, decode.d0.loss_dice: 0.6644, decode.d1.loss_cls: 0.2882, decode.d1.loss_mask: 0.4204, decode.d1.loss_dice: 0.6085, decode.d2.loss_cls: 0.1939, decode.d2.loss_mask: 0.4082, decode.d2.loss_dice: 0.5842, decode.d3.loss_cls: 0.1519, decode.d3.loss_mask: 0.4058, decode.d3.loss_dice: 0.5754, decode.d4.loss_cls: 0.1427, decode.d4.loss_mask: 0.4036, decode.d4.loss_dice: 0.5719, decode.d5.loss_cls: 0.1347, decode.d5.loss_mask: 0.4031, decode.d5.loss_dice: 0.5706, decode.d6.loss_cls: 0.1284, decode.d6.loss_mask: 0.4029, decode.d6.loss_dice: 0.5699, decode.d7.loss_cls: 0.1297, decode.d7.loss_mask: 0.4020, decode.d7.loss_dice: 0.5685, decode.d8.loss_cls: 0.1243, decode.d8.loss_mask: 0.4012, decode.d8.loss_dice: 0.5691, loss: 12.8889 +2022-06-05 07:01:25,634 - mmseg - INFO - Iter [39300/40000] lr: 1.336e-07, eta: 0:05:41, time: 0.436, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1120, decode.loss_mask: 0.3915, decode.loss_dice: 0.5341, decode.d0.loss_cls: 1.5083, decode.d0.loss_mask: 0.4337, decode.d0.loss_dice: 0.6145, decode.d1.loss_cls: 0.2689, decode.d1.loss_mask: 0.4089, decode.d1.loss_dice: 0.5657, decode.d2.loss_cls: 0.1819, decode.d2.loss_mask: 0.3989, decode.d2.loss_dice: 0.5447, decode.d3.loss_cls: 0.1450, decode.d3.loss_mask: 0.3955, decode.d3.loss_dice: 0.5411, decode.d4.loss_cls: 0.1283, decode.d4.loss_mask: 0.3940, decode.d4.loss_dice: 0.5390, decode.d5.loss_cls: 0.1265, decode.d5.loss_mask: 0.3943, decode.d5.loss_dice: 0.5394, decode.d6.loss_cls: 0.1200, decode.d6.loss_mask: 0.3917, decode.d6.loss_dice: 0.5310, decode.d7.loss_cls: 0.1134, decode.d7.loss_mask: 0.3918, decode.d7.loss_dice: 0.5337, decode.d8.loss_cls: 0.1134, decode.d8.loss_mask: 0.3914, decode.d8.loss_dice: 0.5318, loss: 12.2844 +2022-06-05 07:01:49,847 - mmseg - INFO - Iter [39350/40000] lr: 1.241e-07, eta: 0:05:16, time: 0.484, data_time: 0.055, memory: 31652, decode.loss_cls: 0.1291, decode.loss_mask: 0.3958, decode.loss_dice: 0.5553, decode.d0.loss_cls: 1.5367, decode.d0.loss_mask: 0.4414, decode.d0.loss_dice: 0.6484, decode.d1.loss_cls: 0.2801, decode.d1.loss_mask: 0.4126, decode.d1.loss_dice: 0.5893, decode.d2.loss_cls: 0.1953, decode.d2.loss_mask: 0.4032, decode.d2.loss_dice: 0.5683, decode.d3.loss_cls: 0.1562, decode.d3.loss_mask: 0.3973, decode.d3.loss_dice: 0.5599, decode.d4.loss_cls: 0.1388, decode.d4.loss_mask: 0.3974, decode.d4.loss_dice: 0.5590, decode.d5.loss_cls: 0.1331, decode.d5.loss_mask: 0.3976, decode.d5.loss_dice: 0.5586, decode.d6.loss_cls: 0.1307, decode.d6.loss_mask: 0.3963, decode.d6.loss_dice: 0.5567, decode.d7.loss_cls: 0.1287, decode.d7.loss_mask: 0.3956, decode.d7.loss_dice: 0.5553, decode.d8.loss_cls: 0.1260, decode.d8.loss_mask: 0.3961, decode.d8.loss_dice: 0.5581, loss: 12.6969 +2022-06-05 07:02:11,507 - mmseg - INFO - Iter [39400/40000] lr: 1.146e-07, eta: 0:04:52, time: 0.433, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1173, decode.loss_mask: 0.3894, decode.loss_dice: 0.5572, decode.d0.loss_cls: 1.5087, decode.d0.loss_mask: 0.4264, decode.d0.loss_dice: 0.6335, decode.d1.loss_cls: 0.2622, decode.d1.loss_mask: 0.4032, decode.d1.loss_dice: 0.5871, decode.d2.loss_cls: 0.1879, decode.d2.loss_mask: 0.3928, decode.d2.loss_dice: 0.5628, decode.d3.loss_cls: 0.1434, decode.d3.loss_mask: 0.3922, decode.d3.loss_dice: 0.5613, decode.d4.loss_cls: 0.1291, decode.d4.loss_mask: 0.3918, decode.d4.loss_dice: 0.5600, decode.d5.loss_cls: 0.1240, decode.d5.loss_mask: 0.3912, decode.d5.loss_dice: 0.5594, decode.d6.loss_cls: 0.1194, decode.d6.loss_mask: 0.3901, decode.d6.loss_dice: 0.5566, decode.d7.loss_cls: 0.1178, decode.d7.loss_mask: 0.3895, decode.d7.loss_dice: 0.5584, decode.d8.loss_cls: 0.1115, decode.d8.loss_mask: 0.3892, decode.d8.loss_dice: 0.5575, loss: 12.4710 +2022-06-05 07:02:33,225 - mmseg - INFO - Iter [39450/40000] lr: 1.050e-07, eta: 0:04:27, time: 0.434, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1219, decode.loss_mask: 0.4012, decode.loss_dice: 0.5657, decode.d0.loss_cls: 1.5294, decode.d0.loss_mask: 0.4387, decode.d0.loss_dice: 0.6538, decode.d1.loss_cls: 0.2915, decode.d1.loss_mask: 0.4123, decode.d1.loss_dice: 0.5990, decode.d2.loss_cls: 0.1897, decode.d2.loss_mask: 0.4061, decode.d2.loss_dice: 0.5770, decode.d3.loss_cls: 0.1540, decode.d3.loss_mask: 0.4048, decode.d3.loss_dice: 0.5739, decode.d4.loss_cls: 0.1423, decode.d4.loss_mask: 0.4044, decode.d4.loss_dice: 0.5747, decode.d5.loss_cls: 0.1392, decode.d5.loss_mask: 0.4023, decode.d5.loss_dice: 0.5731, decode.d6.loss_cls: 0.1297, decode.d6.loss_mask: 0.4021, decode.d6.loss_dice: 0.5723, decode.d7.loss_cls: 0.1238, decode.d7.loss_mask: 0.4010, decode.d7.loss_dice: 0.5700, decode.d8.loss_cls: 0.1211, decode.d8.loss_mask: 0.4016, decode.d8.loss_dice: 0.5684, loss: 12.8452 +2022-06-05 07:02:55,232 - mmseg - INFO - Iter [39500/40000] lr: 9.551e-08, eta: 0:04:03, time: 0.441, data_time: 0.009, memory: 31652, decode.loss_cls: 0.1531, decode.loss_mask: 0.3963, decode.loss_dice: 0.5539, decode.d0.loss_cls: 1.5265, decode.d0.loss_mask: 0.4383, decode.d0.loss_dice: 0.6513, decode.d1.loss_cls: 0.2893, decode.d1.loss_mask: 0.4100, decode.d1.loss_dice: 0.5932, decode.d2.loss_cls: 0.2111, decode.d2.loss_mask: 0.4037, decode.d2.loss_dice: 0.5719, decode.d3.loss_cls: 0.1778, decode.d3.loss_mask: 0.4013, decode.d3.loss_dice: 0.5637, decode.d4.loss_cls: 0.1677, decode.d4.loss_mask: 0.3993, decode.d4.loss_dice: 0.5597, decode.d5.loss_cls: 0.1622, decode.d5.loss_mask: 0.3988, decode.d5.loss_dice: 0.5586, decode.d6.loss_cls: 0.1561, decode.d6.loss_mask: 0.3990, decode.d6.loss_dice: 0.5576, decode.d7.loss_cls: 0.1539, decode.d7.loss_mask: 0.3979, decode.d7.loss_dice: 0.5547, decode.d8.loss_cls: 0.1510, decode.d8.loss_mask: 0.3973, decode.d8.loss_dice: 0.5577, loss: 12.9130 +2022-06-05 07:03:17,495 - mmseg - INFO - Iter [39550/40000] lr: 8.598e-08, eta: 0:03:39, time: 0.445, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1276, decode.loss_mask: 0.4142, decode.loss_dice: 0.5629, decode.d0.loss_cls: 1.5044, decode.d0.loss_mask: 0.4583, decode.d0.loss_dice: 0.6615, decode.d1.loss_cls: 0.2791, decode.d1.loss_mask: 0.4294, decode.d1.loss_dice: 0.6078, decode.d2.loss_cls: 0.1994, decode.d2.loss_mask: 0.4213, decode.d2.loss_dice: 0.5800, decode.d3.loss_cls: 0.1622, decode.d3.loss_mask: 0.4178, decode.d3.loss_dice: 0.5679, decode.d4.loss_cls: 0.1453, decode.d4.loss_mask: 0.4155, decode.d4.loss_dice: 0.5645, decode.d5.loss_cls: 0.1400, decode.d5.loss_mask: 0.4152, decode.d5.loss_dice: 0.5646, decode.d6.loss_cls: 0.1345, decode.d6.loss_mask: 0.4129, decode.d6.loss_dice: 0.5625, decode.d7.loss_cls: 0.1344, decode.d7.loss_mask: 0.4123, decode.d7.loss_dice: 0.5648, decode.d8.loss_cls: 0.1338, decode.d8.loss_mask: 0.4126, decode.d8.loss_dice: 0.5672, loss: 12.9740 +2022-06-05 07:03:39,385 - mmseg - INFO - Iter [39600/40000] lr: 7.645e-08, eta: 0:03:14, time: 0.438, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1194, decode.loss_mask: 0.3966, decode.loss_dice: 0.5530, decode.d0.loss_cls: 1.4944, decode.d0.loss_mask: 0.4379, decode.d0.loss_dice: 0.6466, decode.d1.loss_cls: 0.2785, decode.d1.loss_mask: 0.4134, decode.d1.loss_dice: 0.5924, decode.d2.loss_cls: 0.1868, decode.d2.loss_mask: 0.4037, decode.d2.loss_dice: 0.5652, decode.d3.loss_cls: 0.1478, decode.d3.loss_mask: 0.4011, decode.d3.loss_dice: 0.5607, decode.d4.loss_cls: 0.1397, decode.d4.loss_mask: 0.3986, decode.d4.loss_dice: 0.5569, decode.d5.loss_cls: 0.1304, decode.d5.loss_mask: 0.3988, decode.d5.loss_dice: 0.5559, decode.d6.loss_cls: 0.1249, decode.d6.loss_mask: 0.3977, decode.d6.loss_dice: 0.5547, decode.d7.loss_cls: 0.1197, decode.d7.loss_mask: 0.3974, decode.d7.loss_dice: 0.5549, decode.d8.loss_cls: 0.1157, decode.d8.loss_mask: 0.3974, decode.d8.loss_dice: 0.5577, loss: 12.5976 +2022-06-05 07:04:03,974 - mmseg - INFO - Iter [39650/40000] lr: 6.691e-08, eta: 0:02:50, time: 0.492, data_time: 0.057, memory: 31652, decode.loss_cls: 0.1238, decode.loss_mask: 0.3932, decode.loss_dice: 0.5542, decode.d0.loss_cls: 1.5182, decode.d0.loss_mask: 0.4321, decode.d0.loss_dice: 0.6520, decode.d1.loss_cls: 0.2787, decode.d1.loss_mask: 0.4080, decode.d1.loss_dice: 0.5917, decode.d2.loss_cls: 0.1902, decode.d2.loss_mask: 0.4007, decode.d2.loss_dice: 0.5713, decode.d3.loss_cls: 0.1543, decode.d3.loss_mask: 0.3948, decode.d3.loss_dice: 0.5609, decode.d4.loss_cls: 0.1451, decode.d4.loss_mask: 0.3951, decode.d4.loss_dice: 0.5645, decode.d5.loss_cls: 0.1415, decode.d5.loss_mask: 0.3943, decode.d5.loss_dice: 0.5587, decode.d6.loss_cls: 0.1337, decode.d6.loss_mask: 0.3936, decode.d6.loss_dice: 0.5586, decode.d7.loss_cls: 0.1234, decode.d7.loss_mask: 0.3923, decode.d7.loss_dice: 0.5582, decode.d8.loss_cls: 0.1265, decode.d8.loss_mask: 0.3921, decode.d8.loss_dice: 0.5567, loss: 12.6583 +2022-06-05 07:04:25,885 - mmseg - INFO - Iter [39700/40000] lr: 5.738e-08, eta: 0:02:26, time: 0.438, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1201, decode.loss_mask: 0.3919, decode.loss_dice: 0.5547, decode.d0.loss_cls: 1.5128, decode.d0.loss_mask: 0.4364, decode.d0.loss_dice: 0.6436, decode.d1.loss_cls: 0.2896, decode.d1.loss_mask: 0.4078, decode.d1.loss_dice: 0.5859, decode.d2.loss_cls: 0.1877, decode.d2.loss_mask: 0.3999, decode.d2.loss_dice: 0.5677, decode.d3.loss_cls: 0.1510, decode.d3.loss_mask: 0.3954, decode.d3.loss_dice: 0.5579, decode.d4.loss_cls: 0.1374, decode.d4.loss_mask: 0.3946, decode.d4.loss_dice: 0.5578, decode.d5.loss_cls: 0.1284, decode.d5.loss_mask: 0.3946, decode.d5.loss_dice: 0.5577, decode.d6.loss_cls: 0.1252, decode.d6.loss_mask: 0.3941, decode.d6.loss_dice: 0.5572, decode.d7.loss_cls: 0.1240, decode.d7.loss_mask: 0.3923, decode.d7.loss_dice: 0.5539, decode.d8.loss_cls: 0.1201, decode.d8.loss_mask: 0.3925, decode.d8.loss_dice: 0.5523, loss: 12.5845 +2022-06-05 07:04:47,997 - mmseg - INFO - Iter [39750/40000] lr: 4.785e-08, eta: 0:02:01, time: 0.442, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1365, decode.loss_mask: 0.3887, decode.loss_dice: 0.5441, decode.d0.loss_cls: 1.4811, decode.d0.loss_mask: 0.4279, decode.d0.loss_dice: 0.6456, decode.d1.loss_cls: 0.2784, decode.d1.loss_mask: 0.4012, decode.d1.loss_dice: 0.5846, decode.d2.loss_cls: 0.1911, decode.d2.loss_mask: 0.3948, decode.d2.loss_dice: 0.5604, decode.d3.loss_cls: 0.1600, decode.d3.loss_mask: 0.3911, decode.d3.loss_dice: 0.5549, decode.d4.loss_cls: 0.1555, decode.d4.loss_mask: 0.3903, decode.d4.loss_dice: 0.5503, decode.d5.loss_cls: 0.1509, decode.d5.loss_mask: 0.3890, decode.d5.loss_dice: 0.5487, decode.d6.loss_cls: 0.1416, decode.d6.loss_mask: 0.3888, decode.d6.loss_dice: 0.5470, decode.d7.loss_cls: 0.1437, decode.d7.loss_mask: 0.3878, decode.d7.loss_dice: 0.5459, decode.d8.loss_cls: 0.1366, decode.d8.loss_mask: 0.3877, decode.d8.loss_dice: 0.5471, loss: 12.5513 +2022-06-05 07:05:09,924 - mmseg - INFO - Iter [39800/40000] lr: 3.832e-08, eta: 0:01:37, time: 0.437, data_time: 0.008, memory: 31652, decode.loss_cls: 0.1276, decode.loss_mask: 0.3933, decode.loss_dice: 0.5593, decode.d0.loss_cls: 1.5086, decode.d0.loss_mask: 0.4334, decode.d0.loss_dice: 0.6479, decode.d1.loss_cls: 0.2866, decode.d1.loss_mask: 0.4096, decode.d1.loss_dice: 0.5969, decode.d2.loss_cls: 0.1986, decode.d2.loss_mask: 0.3989, decode.d2.loss_dice: 0.5706, decode.d3.loss_cls: 0.1628, decode.d3.loss_mask: 0.3957, decode.d3.loss_dice: 0.5648, decode.d4.loss_cls: 0.1498, decode.d4.loss_mask: 0.3931, decode.d4.loss_dice: 0.5651, decode.d5.loss_cls: 0.1417, decode.d5.loss_mask: 0.3940, decode.d5.loss_dice: 0.5633, decode.d6.loss_cls: 0.1336, decode.d6.loss_mask: 0.3926, decode.d6.loss_dice: 0.5626, decode.d7.loss_cls: 0.1332, decode.d7.loss_mask: 0.3938, decode.d7.loss_dice: 0.5609, decode.d8.loss_cls: 0.1320, decode.d8.loss_mask: 0.3927, decode.d8.loss_dice: 0.5603, loss: 12.7234 +2022-06-05 07:05:31,613 - mmseg - INFO - Iter [39850/40000] lr: 2.879e-08, eta: 0:01:13, time: 0.435, data_time: 0.009, memory: 31652, decode.loss_cls: 0.1454, decode.loss_mask: 0.4009, decode.loss_dice: 0.5640, decode.d0.loss_cls: 1.5295, decode.d0.loss_mask: 0.4379, decode.d0.loss_dice: 0.6539, decode.d1.loss_cls: 0.2970, decode.d1.loss_mask: 0.4164, decode.d1.loss_dice: 0.5986, decode.d2.loss_cls: 0.2151, decode.d2.loss_mask: 0.4075, decode.d2.loss_dice: 0.5813, decode.d3.loss_cls: 0.1712, decode.d3.loss_mask: 0.4022, decode.d3.loss_dice: 0.5655, decode.d4.loss_cls: 0.1651, decode.d4.loss_mask: 0.4028, decode.d4.loss_dice: 0.5703, decode.d5.loss_cls: 0.1568, decode.d5.loss_mask: 0.4015, decode.d5.loss_dice: 0.5672, decode.d6.loss_cls: 0.1503, decode.d6.loss_mask: 0.4005, decode.d6.loss_dice: 0.5634, decode.d7.loss_cls: 0.1440, decode.d7.loss_mask: 0.3994, decode.d7.loss_dice: 0.5668, decode.d8.loss_cls: 0.1439, decode.d8.loss_mask: 0.4008, decode.d8.loss_dice: 0.5642, loss: 12.9834 +2022-06-05 07:05:53,363 - mmseg - INFO - Iter [39900/40000] lr: 1.925e-08, eta: 0:00:48, time: 0.435, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1395, decode.loss_mask: 0.3976, decode.loss_dice: 0.5693, decode.d0.loss_cls: 1.5477, decode.d0.loss_mask: 0.4406, decode.d0.loss_dice: 0.6584, decode.d1.loss_cls: 0.2954, decode.d1.loss_mask: 0.4167, decode.d1.loss_dice: 0.6076, decode.d2.loss_cls: 0.2033, decode.d2.loss_mask: 0.4065, decode.d2.loss_dice: 0.5826, decode.d3.loss_cls: 0.1699, decode.d3.loss_mask: 0.4045, decode.d3.loss_dice: 0.5763, decode.d4.loss_cls: 0.1634, decode.d4.loss_mask: 0.4009, decode.d4.loss_dice: 0.5739, decode.d5.loss_cls: 0.1538, decode.d5.loss_mask: 0.4004, decode.d5.loss_dice: 0.5738, decode.d6.loss_cls: 0.1469, decode.d6.loss_mask: 0.3994, decode.d6.loss_dice: 0.5722, decode.d7.loss_cls: 0.1416, decode.d7.loss_mask: 0.3982, decode.d7.loss_dice: 0.5701, decode.d8.loss_cls: 0.1374, decode.d8.loss_mask: 0.3980, decode.d8.loss_dice: 0.5710, loss: 13.0167 +2022-06-05 07:06:18,043 - mmseg - INFO - Iter [39950/40000] lr: 9.723e-09, eta: 0:00:24, time: 0.494, data_time: 0.056, memory: 31652, decode.loss_cls: 0.1458, decode.loss_mask: 0.4070, decode.loss_dice: 0.5730, decode.d0.loss_cls: 1.5287, decode.d0.loss_mask: 0.4457, decode.d0.loss_dice: 0.6628, decode.d1.loss_cls: 0.3038, decode.d1.loss_mask: 0.4235, decode.d1.loss_dice: 0.6131, decode.d2.loss_cls: 0.2223, decode.d2.loss_mask: 0.4148, decode.d2.loss_dice: 0.5894, decode.d3.loss_cls: 0.1744, decode.d3.loss_mask: 0.4122, decode.d3.loss_dice: 0.5802, decode.d4.loss_cls: 0.1613, decode.d4.loss_mask: 0.4101, decode.d4.loss_dice: 0.5776, decode.d5.loss_cls: 0.1608, decode.d5.loss_mask: 0.4082, decode.d5.loss_dice: 0.5771, decode.d6.loss_cls: 0.1500, decode.d6.loss_mask: 0.4072, decode.d6.loss_dice: 0.5740, decode.d7.loss_cls: 0.1533, decode.d7.loss_mask: 0.4073, decode.d7.loss_dice: 0.5728, decode.d8.loss_cls: 0.1475, decode.d8.loss_mask: 0.4068, decode.d8.loss_dice: 0.5704, loss: 13.1809 +2022-06-05 07:06:39,805 - mmseg - INFO - Saving checkpoint at 40000 iterations +2022-06-05 07:06:43,247 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 07:06:43,248 - mmseg - INFO - Iter [40000/40000] lr: 1.906e-10, eta: 0:00:00, time: 0.504, data_time: 0.007, memory: 31652, decode.loss_cls: 0.1265, decode.loss_mask: 0.4020, decode.loss_dice: 0.5502, decode.d0.loss_cls: 1.4986, decode.d0.loss_mask: 0.4432, decode.d0.loss_dice: 0.6407, decode.d1.loss_cls: 0.2881, decode.d1.loss_mask: 0.4164, decode.d1.loss_dice: 0.5805, decode.d2.loss_cls: 0.1914, decode.d2.loss_mask: 0.4063, decode.d2.loss_dice: 0.5648, decode.d3.loss_cls: 0.1552, decode.d3.loss_mask: 0.4034, decode.d3.loss_dice: 0.5595, decode.d4.loss_cls: 0.1484, decode.d4.loss_mask: 0.4027, decode.d4.loss_dice: 0.5561, decode.d5.loss_cls: 0.1444, decode.d5.loss_mask: 0.4010, decode.d5.loss_dice: 0.5500, decode.d6.loss_cls: 0.1323, decode.d6.loss_mask: 0.4019, decode.d6.loss_dice: 0.5516, decode.d7.loss_cls: 0.1353, decode.d7.loss_mask: 0.4010, decode.d7.loss_dice: 0.5519, decode.d8.loss_cls: 0.1267, decode.d8.loss_mask: 0.4008, decode.d8.loss_dice: 0.5543, loss: 12.6853 +2022-06-05 07:09:21,083 - mmseg - INFO - per class results: +2022-06-05 07:09:21,091 - mmseg - INFO - ++-------------+-------+-------+ +| Class | IoU | Acc | ++-------------+-------+-------+ +| aeroplane | 90.47 | 95.37 | +| bag | 41.19 | 54.54 | +| bed | 31.96 | 42.44 | +| bedclothes | 44.46 | 63.75 | +| bench | 25.01 | 30.45 | +| bicycle | 84.16 | 93.4 | +| bird | 94.15 | 96.9 | +| boat | 84.44 | 91.74 | +| book | 51.61 | 63.96 | +| bottle | 86.76 | 95.92 | +| building | 65.43 | 79.06 | +| bus | 94.0 | 97.19 | +| cabinet | 42.81 | 64.8 | +| car | 91.4 | 95.63 | +| cat | 93.87 | 98.03 | +| ceiling | 60.66 | 76.53 | +| chair | 58.92 | 78.15 | +| cloth | 25.62 | 37.24 | +| computer | 42.17 | 54.78 | +| cow | 95.08 | 97.11 | +| cup | 44.46 | 59.3 | +| curtain | 56.05 | 70.88 | +| dog | 91.86 | 97.03 | +| door | 31.64 | 49.36 | +| fence | 44.24 | 57.35 | +| floor | 72.37 | 85.76 | +| flower | 41.64 | 62.54 | +| food | 35.79 | 46.49 | +| grass | 82.35 | 91.26 | +| ground | 55.79 | 70.63 | +| horse | 94.19 | 97.53 | +| keyboard | 86.26 | 91.9 | +| light | 57.52 | 73.28 | +| motorbike | 89.93 | 94.84 | +| mountain | 54.54 | 73.49 | +| mouse | 75.36 | 83.45 | +| person | 90.59 | 96.07 | +| plate | 27.85 | 37.18 | +| platform | 53.09 | 68.62 | +| pottedplant | 80.92 | 88.83 | +| road | 51.85 | 68.23 | +| rock | 50.18 | 59.07 | +| sheep | 93.86 | 97.32 | +| shelves | 35.07 | 50.03 | +| sidewalk | 27.14 | 51.52 | +| sign | 45.85 | 57.41 | +| sky | 94.95 | 97.51 | +| snow | 74.46 | 86.65 | +| sofa | 59.5 | 69.62 | +| table | 67.21 | 78.61 | +| track | 69.38 | 79.69 | +| train | 92.21 | 96.31 | +| tree | 81.24 | 90.34 | +| truck | 40.47 | 52.88 | +| tvmonitor | 86.98 | 93.94 | +| wall | 70.39 | 81.94 | +| water | 91.73 | 95.71 | +| window | 44.08 | 57.32 | +| wood | 28.6 | 41.52 | ++-------------+-------+-------+ +2022-06-05 07:09:21,091 - mmseg - INFO - Summary: +2022-06-05 07:09:21,091 - mmseg - INFO - ++-------+------+-------+ +| aAcc | mIoU | mAcc | ++-------+------+-------+ +| 85.33 | 64.0 | 74.72 | ++-------+------+-------+ +2022-06-05 07:09:21,108 - mmseg - INFO - The previous best checkpoint /mnt/lustre/chenzhe.vendor/workspace/ViT-Adapter-Release/segmentation/work_dirs/mask2former_beit_adapter_base_480_40k_pascal_context_59_ss/best_mIoU_iter_36000.pth was removed +2022-06-05 07:09:23,732 - mmseg - INFO - Now best checkpoint is saved as best_mIoU_iter_40000.pth. +2022-06-05 07:09:23,732 - mmseg - INFO - Best mIoU is 0.6400 at 40000 iter. +2022-06-05 07:09:23,755 - mmseg - INFO - Exp name: mask2former_beit_adapter_base_480_40k_pascal_context_59_ss.py +2022-06-05 07:09:23,755 - mmseg - INFO - Iter(val) [638] aAcc: 0.8533, mIoU: 0.6400, mAcc: 0.7472, IoU.aeroplane: 0.9047, IoU.bag: 0.4119, IoU.bed: 0.3196, IoU.bedclothes: 0.4446, IoU.bench: 0.2501, IoU.bicycle: 0.8416, IoU.bird: 0.9415, IoU.boat: 0.8444, IoU.book: 0.5161, IoU.bottle: 0.8676, IoU.building: 0.6543, IoU.bus: 0.9400, IoU.cabinet: 0.4281, IoU.car: 0.9140, IoU.cat: 0.9387, IoU.ceiling: 0.6066, IoU.chair: 0.5892, IoU.cloth: 0.2562, IoU.computer: 0.4217, IoU.cow: 0.9508, IoU.cup: 0.4446, IoU.curtain: 0.5605, IoU.dog: 0.9186, IoU.door: 0.3164, IoU.fence: 0.4424, IoU.floor: 0.7237, IoU.flower: 0.4164, IoU.food: 0.3579, IoU.grass: 0.8235, IoU.ground: 0.5579, IoU.horse: 0.9419, IoU.keyboard: 0.8626, IoU.light: 0.5752, IoU.motorbike: 0.8993, IoU.mountain: 0.5454, IoU.mouse: 0.7536, IoU.person: 0.9059, IoU.plate: 0.2785, IoU.platform: 0.5309, IoU.pottedplant: 0.8092, IoU.road: 0.5185, IoU.rock: 0.5018, IoU.sheep: 0.9386, IoU.shelves: 0.3507, IoU.sidewalk: 0.2714, IoU.sign: 0.4585, IoU.sky: 0.9495, IoU.snow: 0.7446, IoU.sofa: 0.5950, IoU.table: 0.6721, IoU.track: 0.6938, IoU.train: 0.9221, IoU.tree: 0.8124, IoU.truck: 0.4047, IoU.tvmonitor: 0.8698, IoU.wall: 0.7039, IoU.water: 0.9173, IoU.window: 0.4408, IoU.wood: 0.2860, Acc.aeroplane: 0.9537, Acc.bag: 0.5454, Acc.bed: 0.4244, Acc.bedclothes: 0.6375, Acc.bench: 0.3045, Acc.bicycle: 0.9340, Acc.bird: 0.9690, Acc.boat: 0.9174, Acc.book: 0.6396, Acc.bottle: 0.9592, Acc.building: 0.7906, Acc.bus: 0.9719, Acc.cabinet: 0.6480, Acc.car: 0.9563, Acc.cat: 0.9803, Acc.ceiling: 0.7653, Acc.chair: 0.7815, Acc.cloth: 0.3724, Acc.computer: 0.5478, Acc.cow: 0.9711, Acc.cup: 0.5930, Acc.curtain: 0.7088, Acc.dog: 0.9703, Acc.door: 0.4936, Acc.fence: 0.5735, Acc.floor: 0.8576, Acc.flower: 0.6254, Acc.food: 0.4649, Acc.grass: 0.9126, Acc.ground: 0.7063, Acc.horse: 0.9753, Acc.keyboard: 0.9190, Acc.light: 0.7328, Acc.motorbike: 0.9484, Acc.mountain: 0.7349, Acc.mouse: 0.8345, Acc.person: 0.9607, Acc.plate: 0.3718, Acc.platform: 0.6862, Acc.pottedplant: 0.8883, Acc.road: 0.6823, Acc.rock: 0.5907, Acc.sheep: 0.9732, Acc.shelves: 0.5003, Acc.sidewalk: 0.5152, Acc.sign: 0.5741, Acc.sky: 0.9751, Acc.snow: 0.8665, Acc.sofa: 0.6962, Acc.table: 0.7861, Acc.track: 0.7969, Acc.train: 0.9631, Acc.tree: 0.9034, Acc.truck: 0.5288, Acc.tvmonitor: 0.9394, Acc.wall: 0.8194, Acc.water: 0.9571, Acc.window: 0.5732, Acc.wood: 0.4152