diff --git "a/mask2former_beit_adapter_large_896_80k_cityscapes_ss.log" "b/mask2former_beit_adapter_large_896_80k_cityscapes_ss.log" new file mode 100644--- /dev/null +++ "b/mask2former_beit_adapter_large_896_80k_cityscapes_ss.log" @@ -0,0 +1,17478 @@ +2022-05-09 17:25:05,566 - mmseg - INFO - Environment info: +------------------------------------------------------------ +sys.platform: linux +Python: 3.7.11 (default, Jul 27 2021, 14:32:16) [GCC 7.5.0] +CUDA available: True +GPU 0,1,2,3,4,5,6,7: A100-SXM-80GB +CUDA_HOME: /mnt/lustre/share/cuda-11.1 +NVCC: Build cuda_11.1.TC455_06.29069683_0 +GCC: gcc (GCC) 5.4.0 +PyTorch: 1.9.0+cu111 +PyTorch compiling details: PyTorch built with: + - GCC 7.3 + - C++ Version: 201402 + - Intel(R) Math Kernel Library Version 2020.0.0 Product Build 20191122 for Intel(R) 64 architecture applications + - Intel(R) MKL-DNN v2.1.2 (Git Hash 98be7e8afa711dc9b66c8ff3504129cb82013cdb) + - OpenMP 201511 (a.k.a. OpenMP 4.5) + - NNPACK is enabled + - CPU capability usage: AVX2 + - CUDA Runtime 11.1 + - NVCC architecture flags: -gencode;arch=compute_37,code=sm_37;-gencode;arch=compute_50,code=sm_50;-gencode;arch=compute_60,code=sm_60;-gencode;arch=compute_70,code=sm_70;-gencode;arch=compute_75,code=sm_75;-gencode;arch=compute_80,code=sm_80;-gencode;arch=compute_86,code=sm_86 + - CuDNN 8.0.5 + - Magma 2.5.2 + - Build settings: BLAS_INFO=mkl, BUILD_TYPE=Release, CUDA_VERSION=11.1, CUDNN_VERSION=8.0.5, CXX_COMPILER=/opt/rh/devtoolset-7/root/usr/bin/c++, CXX_FLAGS= -Wno-deprecated -fvisibility-inlines-hidden -DUSE_PTHREADPOOL -fopenmp -DNDEBUG -DUSE_KINETO -DUSE_FBGEMM -DUSE_QNNPACK -DUSE_PYTORCH_QNNPACK -DUSE_XNNPACK -DSYMBOLICATE_MOBILE_DEBUG_HANDLE -O2 -fPIC -Wno-narrowing -Wall -Wextra -Werror=return-type -Wno-missing-field-initializers -Wno-type-limits -Wno-array-bounds -Wno-unknown-pragmas -Wno-sign-compare -Wno-unused-parameter -Wno-unused-variable -Wno-unused-function -Wno-unused-result -Wno-unused-local-typedefs -Wno-strict-overflow -Wno-strict-aliasing -Wno-error=deprecated-declarations -Wno-stringop-overflow -Wno-psabi -Wno-error=pedantic -Wno-error=redundant-decls -Wno-error=old-style-cast -fdiagnostics-color=always -faligned-new -Wno-unused-but-set-variable -Wno-maybe-uninitialized -fno-math-errno -fno-trapping-math -Werror=format -Wno-stringop-overflow, LAPACK_INFO=mkl, PERF_WITH_AVX=1, PERF_WITH_AVX2=1, PERF_WITH_AVX512=1, TORCH_VERSION=1.9.0, USE_CUDA=ON, USE_CUDNN=ON, USE_EXCEPTION_PTR=1, USE_GFLAGS=OFF, USE_GLOG=OFF, USE_MKL=ON, USE_MKLDNN=ON, USE_MPI=OFF, USE_NCCL=ON, USE_NNPACK=ON, USE_OPENMP=ON, + +TorchVision: 0.10.0+cu111 +OpenCV: 4.5.5 +MMCV: 1.4.2 +MMCV Compiler: GCC 7.3 +MMCV CUDA Compiler: 11.1 +MMSegmentation: 0.20.2+ +------------------------------------------------------------ + +2022-05-09 17:25:05,567 - mmseg - INFO - Distributed training: True +2022-05-09 17:25:06,172 - mmseg - INFO - Config: +num_things_classes = 8 +num_stuff_classes = 11 +num_classes = 19 +norm_cfg = dict(type='SyncBN', requires_grad=True) +model = dict( + type='EncoderDecoderMask2Former', + pretrained='pretrained/beit_large_patch16_224_pt22k_ft22k.pth', + backbone=dict( + type='BEiTAdapter', + patch_size=16, + embed_dim=1024, + depth=24, + num_heads=16, + mlp_ratio=4, + qkv_bias=True, + use_abs_pos_emb=False, + use_rel_pos_bias=True, + img_size=896, + init_values=1e-06, + drop_path_rate=0.3, + conv_inplane=64, + n_points=4, + deform_num_heads=16, + interact_with_ffn=True, + interact_ffn_ratio=0.25, + interact_deform_ratio=0.5, + extract_with_ffn=True, + extract_ffn_ratio=0.25, + extract_deform_ratio=0.5, + num_extract_block=2, + add_vit_feature=True, + interact_indexes=[[0, 5], [6, 11], [12, 17], [18, 23]]), + decode_head=dict( + type='Mask2FormerHead', + in_channels=[1024, 1024, 1024, 1024], + feat_channels=1024, + out_channels=1024, + in_index=[0, 1, 2, 3], + num_things_classes=8, + num_stuff_classes=11, + num_queries=100, + num_transformer_feat_level=3, + pixel_decoder=dict( + type='MSDeformAttnPixelDecoder', + num_outs=3, + norm_cfg=dict(type='GN', num_groups=32), + act_cfg=dict(type='ReLU'), + encoder=dict( + type='DetrTransformerEncoder', + num_layers=6, + transformerlayers=dict( + type='BaseTransformerLayer', + attn_cfgs=dict( + type='MultiScaleDeformableAttention', + embed_dims=1024, + num_heads=32, + num_levels=3, + num_points=4, + im2col_step=64, + dropout=0.0, + batch_first=False, + norm_cfg=None, + init_cfg=None), + ffn_cfgs=dict( + type='FFN', + embed_dims=1024, + feedforward_channels=4096, + num_fcs=2, + ffn_drop=0.0, + act_cfg=dict(type='ReLU', inplace=True)), + operation_order=('self_attn', 'norm', 'ffn', 'norm')), + init_cfg=None), + positional_encoding=dict( + type='SinePositionalEncoding', num_feats=512, normalize=True), + init_cfg=None), + enforce_decoder_input_project=False, + positional_encoding=dict( + type='SinePositionalEncoding', num_feats=512, normalize=True), + transformer_decoder=dict( + type='DetrTransformerDecoder', + return_intermediate=True, + num_layers=9, + transformerlayers=dict( + type='DetrTransformerDecoderLayer', + attn_cfgs=dict( + type='MultiheadAttention', + embed_dims=1024, + num_heads=32, + attn_drop=0.0, + proj_drop=0.0, + dropout_layer=None, + batch_first=False), + ffn_cfgs=dict( + embed_dims=1024, + feedforward_channels=4096, + num_fcs=2, + act_cfg=dict(type='ReLU', inplace=True), + ffn_drop=0.0, + dropout_layer=None, + add_identity=True), + feedforward_channels=4096, + operation_order=('cross_attn', 'norm', 'self_attn', 'norm', + 'ffn', 'norm')), + init_cfg=None), + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=2.0, + reduction='mean', + class_weight=[ + 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, + 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.1 + ]), + loss_mask=dict( + type='CrossEntropyLoss', + use_sigmoid=True, + reduction='mean', + loss_weight=5.0), + loss_dice=dict( + type='DiceLoss', + use_sigmoid=True, + activate=True, + reduction='mean', + naive_dice=True, + eps=1.0, + loss_weight=5.0)), + train_cfg=dict( + num_points=12544, + oversample_ratio=3.0, + importance_sample_ratio=0.75, + assigner=dict( + type='MaskHungarianAssigner', + cls_cost=dict(type='ClassificationCost', weight=2.0), + mask_cost=dict( + type='CrossEntropyLossCost', weight=5.0, use_sigmoid=True), + dice_cost=dict( + type='DiceCost', weight=5.0, pred_act=True, eps=1.0)), + sampler=dict(type='MaskPseudoSampler')), + test_cfg=dict( + panoptic_on=True, + semantic_on=False, + instance_on=True, + max_per_image=100, + iou_thr=0.8, + filter_low_score=True, + mode='slide', + crop_size=(896, 896), + stride=(512, 512)), + init_cfg=None) +find_unused_parameters = True +dataset_type = 'CityscapesDataset' +data_root = 'data/cityscapes/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +crop_size = (896, 896) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations'), + dict(type='Resize', img_scale=(2048, 1024), ratio_range=(0.5, 2.0)), + dict(type='RandomCrop', crop_size=(896, 896), cat_max_ratio=0.75), + dict(type='RandomFlip', prob=0.5), + dict(type='PhotoMetricDistortion'), + dict( + type='Normalize', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True), + dict(type='Pad', size=(896, 896), pad_val=0, seg_pad_val=255), + dict(type='ToMask'), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + keys=['img', 'gt_semantic_seg', 'gt_masks', 'gt_labels']) +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(2048, 1024), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict( + type='Normalize', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']) + ]) +] +data = dict( + samples_per_gpu=1, + workers_per_gpu=2, + train=dict( + type='CityscapesDataset', + data_root='data/cityscapes/', + img_dir='leftImg8bit/train', + ann_dir='gtFine/train', + pipeline=[ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations'), + dict( + type='Resize', img_scale=(2048, 1024), ratio_range=(0.5, 2.0)), + dict(type='RandomCrop', crop_size=(896, 896), cat_max_ratio=0.75), + dict(type='RandomFlip', prob=0.5), + dict(type='PhotoMetricDistortion'), + dict( + type='Normalize', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True), + dict(type='Pad', size=(896, 896), pad_val=0, seg_pad_val=255), + dict(type='ToMask'), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + keys=['img', 'gt_semantic_seg', 'gt_masks', 'gt_labels']) + ]), + val=dict( + type='CityscapesDataset', + data_root='data/cityscapes/', + img_dir='leftImg8bit/val', + ann_dir='gtFine/val', + pipeline=[ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(2048, 1024), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict( + type='Normalize', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']) + ]) + ]), + test=dict( + type='CityscapesDataset', + data_root='data/cityscapes/', + img_dir='leftImg8bit/val', + ann_dir='gtFine/val', + pipeline=[ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(2048, 1024), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict( + type='Normalize', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']) + ]) + ])) +log_config = dict( + interval=50, hooks=[dict(type='TextLoggerHook', by_epoch=False)]) +dist_params = dict(backend='nccl') +log_level = 'INFO' +load_from = 'work_dirs/mask2former_beit_adapter_large_896_80k_mapillary_ss/iter_80000.pth' +resume_from = None +workflow = [('train', 1)] +cudnn_benchmark = True +optimizer = dict( + type='AdamW', + lr=2e-05, + betas=(0.9, 0.999), + weight_decay=0.05, + constructor='LayerDecayOptimizerConstructor', + paramwise_cfg=dict(num_layers=24, layer_decay_rate=0.9)) +optimizer_config = dict() +lr_config = dict( + policy='poly', + warmup='linear', + warmup_iters=1500, + warmup_ratio=1e-06, + power=1.0, + min_lr=0.0, + by_epoch=False) +runner = dict(type='IterBasedRunner', max_iters=80000) +checkpoint_config = dict(by_epoch=False, interval=1000, max_keep_ckpts=1) +evaluation = dict( + interval=1000, metric='mIoU', pre_eval=True, save_best='mIoU') +work_dir = './work_dirs/mask2former_beit_adapter_large_896_80k_cityscapes_ss' +gpu_ids = range(0, 16) +auto_resume = False + +2022-05-09 17:25:13,813 - mmseg - INFO - Set random seed to 1139839196, deterministic: False +2022-05-09 17:25:53,734 - mmseg - WARNING - The model and loaded state dict do not match exactly + +unexpected key in source state_dict: fc_norm.weight, fc_norm.bias, head.weight, head.bias + +missing keys in source state_dict: blocks.0.attn.relative_position_index, blocks.1.attn.relative_position_index, blocks.2.attn.relative_position_index, blocks.3.attn.relative_position_index, blocks.4.attn.relative_position_index, blocks.5.attn.relative_position_index, blocks.6.attn.relative_position_index, blocks.7.attn.relative_position_index, blocks.8.attn.relative_position_index, blocks.9.attn.relative_position_index, blocks.10.attn.relative_position_index, blocks.11.attn.relative_position_index, blocks.12.attn.relative_position_index, blocks.13.attn.relative_position_index, blocks.14.attn.relative_position_index, blocks.15.attn.relative_position_index, blocks.16.attn.relative_position_index, blocks.17.attn.relative_position_index, blocks.18.attn.relative_position_index, blocks.19.attn.relative_position_index, blocks.20.attn.relative_position_index, blocks.21.attn.relative_position_index, blocks.22.attn.relative_position_index, blocks.23.attn.relative_position_index + +Name of parameter - Initialization information + +backbone.cls_token - torch.Size([1, 1, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.level_embed - torch.Size([3, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.patch_embed.proj.weight - torch.Size([1024, 3, 16, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.patch_embed.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.stem.0.weight - torch.Size([64, 3, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.stem.1.weight - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.stem.1.bias - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.stem.3.weight - torch.Size([64, 64, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.stem.4.weight - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.stem.4.bias - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.stem.6.weight - torch.Size([64, 64, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.stem.7.weight - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.stem.7.bias - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.conv2.0.weight - torch.Size([128, 64, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.conv2.1.weight - torch.Size([128]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.conv2.1.bias - torch.Size([128]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.conv3.0.weight - torch.Size([256, 128, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.conv3.1.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.conv3.1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.conv4.0.weight - torch.Size([256, 256, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.conv4.1.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.conv4.1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.fc1.weight - torch.Size([1024, 64, 1, 1]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.fc1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.fc2.weight - torch.Size([1024, 128, 1, 1]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.fc3.weight - torch.Size([1024, 256, 1, 1]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.fc3.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.fc4.weight - torch.Size([1024, 256, 1, 1]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.fc4.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.extract.query_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.extract.query_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.extract.feat_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.extract.feat_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.extract.attn.sampling_offsets.weight - torch.Size([128, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.extract.attn.sampling_offsets.bias - torch.Size([128]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.extract.attn.attention_weights.weight - torch.Size([64, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.extract.attn.attention_weights.bias - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.extract.attn.value_proj.weight - torch.Size([512, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.extract.attn.value_proj.bias - torch.Size([512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.extract.attn.output_proj.weight - torch.Size([1024, 512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.extract.attn.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.insert.gamma - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.insert.query_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.insert.query_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.insert.feat_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.insert.feat_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.insert.attn.sampling_offsets.weight - torch.Size([384, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.insert.attn.sampling_offsets.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.insert.attn.attention_weights.weight - torch.Size([192, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.insert.attn.attention_weights.bias - torch.Size([192]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.insert.attn.value_proj.weight - torch.Size([512, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.insert.attn.value_proj.bias - torch.Size([512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.insert.attn.output_proj.weight - torch.Size([1024, 512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.insert.attn.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.ffn.fc1.weight - torch.Size([256, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.ffn.fc1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.ffn.dwconv.dwconv.weight - torch.Size([256, 1, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.ffn.dwconv.dwconv.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.ffn.fc2.weight - torch.Size([1024, 256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.ffn.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.ffn_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.ffn_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.extract.query_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.extract.query_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.extract.feat_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.extract.feat_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.extract.attn.sampling_offsets.weight - torch.Size([128, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.extract.attn.sampling_offsets.bias - torch.Size([128]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.extract.attn.attention_weights.weight - torch.Size([64, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.extract.attn.attention_weights.bias - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.extract.attn.value_proj.weight - torch.Size([512, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.extract.attn.value_proj.bias - torch.Size([512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.extract.attn.output_proj.weight - torch.Size([1024, 512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.extract.attn.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.insert.gamma - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.insert.query_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.insert.query_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.insert.feat_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.insert.feat_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.insert.attn.sampling_offsets.weight - torch.Size([384, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.insert.attn.sampling_offsets.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.insert.attn.attention_weights.weight - torch.Size([192, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.insert.attn.attention_weights.bias - torch.Size([192]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.insert.attn.value_proj.weight - torch.Size([512, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.insert.attn.value_proj.bias - torch.Size([512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.insert.attn.output_proj.weight - torch.Size([1024, 512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.insert.attn.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.ffn.fc1.weight - torch.Size([256, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.ffn.fc1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.ffn.dwconv.dwconv.weight - torch.Size([256, 1, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.ffn.dwconv.dwconv.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.ffn.fc2.weight - torch.Size([1024, 256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.ffn.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.ffn_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.ffn_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.extract.query_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.extract.query_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.extract.feat_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.extract.feat_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.extract.attn.sampling_offsets.weight - torch.Size([128, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.extract.attn.sampling_offsets.bias - torch.Size([128]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.extract.attn.attention_weights.weight - torch.Size([64, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.extract.attn.attention_weights.bias - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.extract.attn.value_proj.weight - torch.Size([512, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.extract.attn.value_proj.bias - torch.Size([512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.extract.attn.output_proj.weight - torch.Size([1024, 512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.extract.attn.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.insert.gamma - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.insert.query_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.insert.query_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.insert.feat_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.insert.feat_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.insert.attn.sampling_offsets.weight - torch.Size([384, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.insert.attn.sampling_offsets.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.insert.attn.attention_weights.weight - torch.Size([192, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.insert.attn.attention_weights.bias - torch.Size([192]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.insert.attn.value_proj.weight - torch.Size([512, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.insert.attn.value_proj.bias - torch.Size([512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.insert.attn.output_proj.weight - torch.Size([1024, 512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.insert.attn.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.ffn.fc1.weight - torch.Size([256, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.ffn.fc1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.ffn.dwconv.dwconv.weight - torch.Size([256, 1, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.ffn.dwconv.dwconv.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.ffn.fc2.weight - torch.Size([1024, 256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.ffn.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.ffn_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.ffn_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.extract.query_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.extract.query_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.extract.feat_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.extract.feat_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.extract.attn.sampling_offsets.weight - torch.Size([128, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.extract.attn.sampling_offsets.bias - torch.Size([128]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.extract.attn.attention_weights.weight - torch.Size([64, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.extract.attn.attention_weights.bias - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.extract.attn.value_proj.weight - torch.Size([512, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.extract.attn.value_proj.bias - torch.Size([512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.extract.attn.output_proj.weight - torch.Size([1024, 512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.extract.attn.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.insert.gamma - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.insert.query_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.insert.query_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.insert.feat_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.insert.feat_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.insert.attn.sampling_offsets.weight - torch.Size([384, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.insert.attn.sampling_offsets.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.insert.attn.attention_weights.weight - torch.Size([192, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.insert.attn.attention_weights.bias - torch.Size([192]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.insert.attn.value_proj.weight - torch.Size([512, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.insert.attn.value_proj.bias - torch.Size([512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.insert.attn.output_proj.weight - torch.Size([1024, 512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.insert.attn.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.ffn.fc1.weight - torch.Size([256, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.ffn.fc1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.ffn.dwconv.dwconv.weight - torch.Size([256, 1, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.ffn.dwconv.dwconv.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.ffn.fc2.weight - torch.Size([1024, 256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.ffn.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.ffn_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.ffn_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.extract.query_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.extract.query_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.extract.feat_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.extract.feat_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.extract.attn.sampling_offsets.weight - torch.Size([128, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.extract.attn.sampling_offsets.bias - torch.Size([128]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.extract.attn.attention_weights.weight - torch.Size([64, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.extract.attn.attention_weights.bias - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.extract.attn.value_proj.weight - torch.Size([512, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.extract.attn.value_proj.bias - torch.Size([512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.extract.attn.output_proj.weight - torch.Size([1024, 512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.extract.attn.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.ffn.fc1.weight - torch.Size([256, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.ffn.fc1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.ffn.dwconv.dwconv.weight - torch.Size([256, 1, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.ffn.dwconv.dwconv.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.ffn.fc2.weight - torch.Size([1024, 256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.ffn.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.ffn_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.ffn_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.extract.query_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.extract.query_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.extract.feat_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.extract.feat_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.extract.attn.sampling_offsets.weight - torch.Size([128, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.extract.attn.sampling_offsets.bias - torch.Size([128]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.extract.attn.attention_weights.weight - torch.Size([64, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.extract.attn.attention_weights.bias - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.extract.attn.value_proj.weight - torch.Size([512, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.extract.attn.value_proj.bias - torch.Size([512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.extract.attn.output_proj.weight - torch.Size([1024, 512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.extract.attn.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.ffn.fc1.weight - torch.Size([256, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.ffn.fc1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.ffn.dwconv.dwconv.weight - torch.Size([256, 1, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.ffn.dwconv.dwconv.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.ffn.fc2.weight - torch.Size([1024, 256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.ffn.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.ffn_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.ffn_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.up.weight - torch.Size([1024, 1024, 2, 2]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.up.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.norm3.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.norm3.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.norm4.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.norm4.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.conv_seg.weight - torch.Size([19, 1024, 1, 1]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.conv_seg.bias - torch.Size([19]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.input_convs.0.conv.weight - torch.Size([1024, 1024, 1, 1]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.input_convs.0.conv.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.input_convs.0.gn.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.input_convs.0.gn.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.input_convs.1.conv.weight - torch.Size([1024, 1024, 1, 1]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.input_convs.1.conv.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.input_convs.1.gn.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.input_convs.1.gn.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.input_convs.2.conv.weight - torch.Size([1024, 1024, 1, 1]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.input_convs.2.conv.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.input_convs.2.gn.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.input_convs.2.gn.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.attentions.0.sampling_offsets.weight - torch.Size([768, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.attentions.0.sampling_offsets.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.attentions.0.attention_weights.weight - torch.Size([384, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.attentions.0.attention_weights.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.attentions.0.value_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.0.attentions.0.value_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.attentions.0.output_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.0.attentions.0.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.0.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.0.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.attentions.0.sampling_offsets.weight - torch.Size([768, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.attentions.0.sampling_offsets.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.attentions.0.attention_weights.weight - torch.Size([384, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.attentions.0.attention_weights.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.attentions.0.value_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.1.attentions.0.value_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.attentions.0.output_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.1.attentions.0.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.1.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.1.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.attentions.0.sampling_offsets.weight - torch.Size([768, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.attentions.0.sampling_offsets.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.attentions.0.attention_weights.weight - torch.Size([384, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.attentions.0.attention_weights.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.attentions.0.value_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.2.attentions.0.value_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.attentions.0.output_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.2.attentions.0.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.2.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.2.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.attentions.0.sampling_offsets.weight - torch.Size([768, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.attentions.0.sampling_offsets.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.attentions.0.attention_weights.weight - torch.Size([384, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.attentions.0.attention_weights.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.attentions.0.value_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.3.attentions.0.value_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.attentions.0.output_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.3.attentions.0.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.3.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.3.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.attentions.0.sampling_offsets.weight - torch.Size([768, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.attentions.0.sampling_offsets.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.attentions.0.attention_weights.weight - torch.Size([384, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.attentions.0.attention_weights.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.attentions.0.value_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.4.attentions.0.value_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.attentions.0.output_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.4.attentions.0.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.4.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.4.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.attentions.0.sampling_offsets.weight - torch.Size([768, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.attentions.0.sampling_offsets.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.attentions.0.attention_weights.weight - torch.Size([384, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.attentions.0.attention_weights.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.attentions.0.value_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.5.attentions.0.value_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.attentions.0.output_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.5.attentions.0.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.5.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.5.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.level_encoding.weight - torch.Size([3, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.lateral_convs.0.conv.weight - torch.Size([1024, 1024, 1, 1]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.lateral_convs.0.gn.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.lateral_convs.0.gn.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.output_convs.0.conv.weight - torch.Size([1024, 1024, 3, 3]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.output_convs.0.gn.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.output_convs.0.gn.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.mask_feature.weight - torch.Size([1024, 1024, 1, 1]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.mask_feature.bias - torch.Size([1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.0.attentions.0.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.0.attentions.0.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.attentions.0.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.0.attentions.0.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.attentions.1.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.0.attentions.1.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.attentions.1.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.0.attentions.1.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.0.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.0.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.norms.2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.norms.2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.attentions.0.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.1.attentions.0.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.attentions.0.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.1.attentions.0.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.attentions.1.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.1.attentions.1.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.attentions.1.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.1.attentions.1.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.1.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.1.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.norms.2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.norms.2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.attentions.0.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.2.attentions.0.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.attentions.0.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.2.attentions.0.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.attentions.1.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.2.attentions.1.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.attentions.1.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.2.attentions.1.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.2.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.2.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.norms.2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.norms.2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.attentions.0.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.3.attentions.0.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.attentions.0.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.3.attentions.0.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.attentions.1.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.3.attentions.1.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.attentions.1.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.3.attentions.1.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.3.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.3.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.norms.2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.norms.2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.attentions.0.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.4.attentions.0.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.attentions.0.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.4.attentions.0.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.attentions.1.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.4.attentions.1.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.attentions.1.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.4.attentions.1.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.4.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.4.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.norms.2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.norms.2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.attentions.0.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.5.attentions.0.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.attentions.0.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.5.attentions.0.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.attentions.1.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.5.attentions.1.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.attentions.1.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.5.attentions.1.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.5.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.5.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.norms.2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.norms.2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.attentions.0.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.6.attentions.0.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.attentions.0.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.6.attentions.0.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.attentions.1.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.6.attentions.1.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.attentions.1.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.6.attentions.1.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.6.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.6.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.norms.2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.norms.2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.attentions.0.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.7.attentions.0.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.attentions.0.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.7.attentions.0.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.attentions.1.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.7.attentions.1.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.attentions.1.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.7.attentions.1.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.7.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.7.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.norms.2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.norms.2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.attentions.0.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.8.attentions.0.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.attentions.0.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.8.attentions.0.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.attentions.1.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.8.attentions.1.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.attentions.1.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.8.attentions.1.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.8.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.8.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.norms.2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.norms.2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.post_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.post_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.query_embed.weight - torch.Size([100, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.query_feat.weight - torch.Size([100, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.level_embed.weight - torch.Size([3, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.cls_embed.weight - torch.Size([20, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.cls_embed.bias - torch.Size([20]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.mask_embed.0.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.mask_embed.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.mask_embed.2.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.mask_embed.2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.mask_embed.4.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.mask_embed.4.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former +2022-05-09 17:26:24,916 - mmseg - INFO - EncoderDecoderMask2Former( + (backbone): BEiTAdapter( + (patch_embed): PatchEmbed( + (proj): Conv2d(3, 1024, kernel_size=(16, 16), stride=(16, 16)) + ) + (pos_drop): Dropout(p=0.0, inplace=False) + (blocks): ModuleList( + (0): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): Identity() + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (1): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.013043479062616825) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (2): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.02608695812523365) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (3): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.03913043811917305) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (4): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.0521739162504673) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (5): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.06521739810705185) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (6): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.0782608762383461) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (7): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.09130435436964035) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (8): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.1043478325009346) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (9): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.11739131063222885) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (10): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.1304347962141037) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (11): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.14347827434539795) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (12): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.1565217524766922) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (13): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.16956523060798645) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (14): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.1826087087392807) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (15): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.19565218687057495) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (16): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.2086956650018692) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (17): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.22173914313316345) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (18): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.2347826212644577) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (19): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.24782609939575195) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (20): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.260869562625885) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (21): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.27391305565834045) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (22): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.2869565188884735) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (23): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.30000001192092896) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + ) + (conv_branch): ConvBranch( + (stem): Sequential( + (0): Conv2d(3, 64, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False) + (1): SyncBatchNorm(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (2): ReLU(inplace=True) + (3): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (4): SyncBatchNorm(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (5): ReLU(inplace=True) + (6): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (7): SyncBatchNorm(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (8): ReLU(inplace=True) + (9): MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=1, ceil_mode=False) + ) + (conv2): Sequential( + (0): Conv2d(64, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False) + (1): SyncBatchNorm(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (2): ReLU(inplace=True) + ) + (conv3): Sequential( + (0): Conv2d(128, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False) + (1): SyncBatchNorm(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (2): ReLU(inplace=True) + ) + (conv4): Sequential( + (0): Conv2d(256, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False) + (1): SyncBatchNorm(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (2): ReLU(inplace=True) + ) + (fc1): Conv2d(64, 1024, kernel_size=(1, 1), stride=(1, 1)) + (fc2): Conv2d(128, 1024, kernel_size=(1, 1), stride=(1, 1)) + (fc3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1)) + (fc4): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1)) + ) + (interact_blocks): Sequential( + (0): InteractBlock( + (extract): ExtractLayer( + (query_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=1024, out_features=128, bias=True) + (attention_weights): Linear(in_features=1024, out_features=64, bias=True) + (value_proj): Linear(in_features=1024, out_features=512, bias=True) + (output_proj): Linear(in_features=512, out_features=1024, bias=True) + ) + ) + (insert): InsertLayer( + (query_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=1024, out_features=384, bias=True) + (attention_weights): Linear(in_features=1024, out_features=192, bias=True) + (value_proj): Linear(in_features=1024, out_features=512, bias=True) + (output_proj): Linear(in_features=512, out_features=1024, bias=True) + ) + ) + (ffn): ConvFFN( + (fc1): Linear(in_features=1024, out_features=256, bias=True) + (dwconv): DWConv( + (dwconv): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=256) + ) + (act): GELU() + (fc2): Linear(in_features=256, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + (ffn_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (drop_path): DropPath() + ) + (1): InteractBlock( + (extract): ExtractLayer( + (query_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=1024, out_features=128, bias=True) + (attention_weights): Linear(in_features=1024, out_features=64, bias=True) + (value_proj): Linear(in_features=1024, out_features=512, bias=True) + (output_proj): Linear(in_features=512, out_features=1024, bias=True) + ) + ) + (insert): InsertLayer( + (query_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=1024, out_features=384, bias=True) + (attention_weights): Linear(in_features=1024, out_features=192, bias=True) + (value_proj): Linear(in_features=1024, out_features=512, bias=True) + (output_proj): Linear(in_features=512, out_features=1024, bias=True) + ) + ) + (ffn): ConvFFN( + (fc1): Linear(in_features=1024, out_features=256, bias=True) + (dwconv): DWConv( + (dwconv): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=256) + ) + (act): GELU() + (fc2): Linear(in_features=256, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + (ffn_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (drop_path): DropPath() + ) + (2): InteractBlock( + (extract): ExtractLayer( + (query_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=1024, out_features=128, bias=True) + (attention_weights): Linear(in_features=1024, out_features=64, bias=True) + (value_proj): Linear(in_features=1024, out_features=512, bias=True) + (output_proj): Linear(in_features=512, out_features=1024, bias=True) + ) + ) + (insert): InsertLayer( + (query_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=1024, out_features=384, bias=True) + (attention_weights): Linear(in_features=1024, out_features=192, bias=True) + (value_proj): Linear(in_features=1024, out_features=512, bias=True) + (output_proj): Linear(in_features=512, out_features=1024, bias=True) + ) + ) + (ffn): ConvFFN( + (fc1): Linear(in_features=1024, out_features=256, bias=True) + (dwconv): DWConv( + (dwconv): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=256) + ) + (act): GELU() + (fc2): Linear(in_features=256, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + (ffn_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (drop_path): DropPath() + ) + (3): InteractBlock( + (extract): ExtractLayer( + (query_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=1024, out_features=128, bias=True) + (attention_weights): Linear(in_features=1024, out_features=64, bias=True) + (value_proj): Linear(in_features=1024, out_features=512, bias=True) + (output_proj): Linear(in_features=512, out_features=1024, bias=True) + ) + ) + (insert): InsertLayer( + (query_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=1024, out_features=384, bias=True) + (attention_weights): Linear(in_features=1024, out_features=192, bias=True) + (value_proj): Linear(in_features=1024, out_features=512, bias=True) + (output_proj): Linear(in_features=512, out_features=1024, bias=True) + ) + ) + (ffn): ConvFFN( + (fc1): Linear(in_features=1024, out_features=256, bias=True) + (dwconv): DWConv( + (dwconv): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=256) + ) + (act): GELU() + (fc2): Linear(in_features=256, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + (ffn_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (drop_path): DropPath() + ) + ) + (extract_blocks): Sequential( + (0): ExtractBlock( + (extract): ExtractLayer( + (query_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=1024, out_features=128, bias=True) + (attention_weights): Linear(in_features=1024, out_features=64, bias=True) + (value_proj): Linear(in_features=1024, out_features=512, bias=True) + (output_proj): Linear(in_features=512, out_features=1024, bias=True) + ) + ) + (ffn): ConvFFN( + (fc1): Linear(in_features=1024, out_features=256, bias=True) + (dwconv): DWConv( + (dwconv): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=256) + ) + (act): GELU() + (fc2): Linear(in_features=256, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + (ffn_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (drop_path): Identity() + ) + (1): ExtractBlock( + (extract): ExtractLayer( + (query_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=1024, out_features=128, bias=True) + (attention_weights): Linear(in_features=1024, out_features=64, bias=True) + (value_proj): Linear(in_features=1024, out_features=512, bias=True) + (output_proj): Linear(in_features=512, out_features=1024, bias=True) + ) + ) + (ffn): ConvFFN( + (fc1): Linear(in_features=1024, out_features=256, bias=True) + (dwconv): DWConv( + (dwconv): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=256) + ) + (act): GELU() + (fc2): Linear(in_features=256, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + (ffn_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (drop_path): Identity() + ) + ) + (up): ConvTranspose2d(1024, 1024, kernel_size=(2, 2), stride=(2, 2)) + (norm1): SyncBatchNorm(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (norm2): SyncBatchNorm(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (norm3): SyncBatchNorm(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (norm4): SyncBatchNorm(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + ) + (decode_head): Mask2FormerHead( + input_transform=multiple_select, ignore_index=255, align_corners=False + (loss_decode): CrossEntropyLoss(avg_non_ignore=False) + (conv_seg): Conv2d(1024, 19, kernel_size=(1, 1), stride=(1, 1)) + (dropout): Dropout2d(p=0.1, inplace=False) + (pixel_decoder): MSDeformAttnPixelDecoder( + (input_convs): ModuleList( + (0): ConvModule( + (conv): Conv2d(1024, 1024, kernel_size=(1, 1), stride=(1, 1)) + (gn): GroupNorm(32, 1024, eps=1e-05, affine=True) + ) + (1): ConvModule( + (conv): Conv2d(1024, 1024, kernel_size=(1, 1), stride=(1, 1)) + (gn): GroupNorm(32, 1024, eps=1e-05, affine=True) + ) + (2): ConvModule( + (conv): Conv2d(1024, 1024, kernel_size=(1, 1), stride=(1, 1)) + (gn): GroupNorm(32, 1024, eps=1e-05, affine=True) + ) + ) + (encoder): DetrTransformerEncoder( + (layers): ModuleList( + (0): BaseTransformerLayer( + (attentions): ModuleList( + (0): MultiScaleDeformableAttention( + (dropout): Dropout(p=0.0, inplace=False) + (sampling_offsets): Linear(in_features=1024, out_features=768, bias=True) + (attention_weights): Linear(in_features=1024, out_features=384, bias=True) + (value_proj): Linear(in_features=1024, out_features=1024, bias=True) + (output_proj): Linear(in_features=1024, out_features=1024, bias=True) + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + (1): BaseTransformerLayer( + (attentions): ModuleList( + (0): MultiScaleDeformableAttention( + (dropout): Dropout(p=0.0, inplace=False) + (sampling_offsets): Linear(in_features=1024, out_features=768, bias=True) + (attention_weights): Linear(in_features=1024, out_features=384, bias=True) + (value_proj): Linear(in_features=1024, out_features=1024, bias=True) + (output_proj): Linear(in_features=1024, out_features=1024, bias=True) + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + (2): BaseTransformerLayer( + (attentions): ModuleList( + (0): MultiScaleDeformableAttention( + (dropout): Dropout(p=0.0, inplace=False) + (sampling_offsets): Linear(in_features=1024, out_features=768, bias=True) + (attention_weights): Linear(in_features=1024, out_features=384, bias=True) + (value_proj): Linear(in_features=1024, out_features=1024, bias=True) + (output_proj): Linear(in_features=1024, out_features=1024, bias=True) + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + (3): BaseTransformerLayer( + (attentions): ModuleList( + (0): MultiScaleDeformableAttention( + (dropout): Dropout(p=0.0, inplace=False) + (sampling_offsets): Linear(in_features=1024, out_features=768, bias=True) + (attention_weights): Linear(in_features=1024, out_features=384, bias=True) + (value_proj): Linear(in_features=1024, out_features=1024, bias=True) + (output_proj): Linear(in_features=1024, out_features=1024, bias=True) + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + (4): BaseTransformerLayer( + (attentions): ModuleList( + (0): MultiScaleDeformableAttention( + (dropout): Dropout(p=0.0, inplace=False) + (sampling_offsets): Linear(in_features=1024, out_features=768, bias=True) + (attention_weights): Linear(in_features=1024, out_features=384, bias=True) + (value_proj): Linear(in_features=1024, out_features=1024, bias=True) + (output_proj): Linear(in_features=1024, out_features=1024, bias=True) + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + (5): BaseTransformerLayer( + (attentions): ModuleList( + (0): MultiScaleDeformableAttention( + (dropout): Dropout(p=0.0, inplace=False) + (sampling_offsets): Linear(in_features=1024, out_features=768, bias=True) + (attention_weights): Linear(in_features=1024, out_features=384, bias=True) + (value_proj): Linear(in_features=1024, out_features=1024, bias=True) + (output_proj): Linear(in_features=1024, out_features=1024, bias=True) + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + ) + ) + (postional_encoding): SinePositionalEncoding(num_feats=512, temperature=10000, normalize=True, scale=6.283185307179586, eps=1e-06) + (level_encoding): Embedding(3, 1024) + (lateral_convs): ModuleList( + (0): ConvModule( + (conv): Conv2d(1024, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False) + (gn): GroupNorm(32, 1024, eps=1e-05, affine=True) + ) + ) + (output_convs): ModuleList( + (0): ConvModule( + (conv): Conv2d(1024, 1024, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (gn): GroupNorm(32, 1024, eps=1e-05, affine=True) + (activate): ReLU(inplace=True) + ) + ) + (mask_feature): Conv2d(1024, 1024, kernel_size=(1, 1), stride=(1, 1)) + ) + (transformer_decoder): DetrTransformerDecoder( + (layers): ModuleList( + (0): DetrTransformerDecoderLayer( + (attentions): ModuleList( + (0): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + (1): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (2): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + (1): DetrTransformerDecoderLayer( + (attentions): ModuleList( + (0): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + (1): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (2): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + (2): DetrTransformerDecoderLayer( + (attentions): ModuleList( + (0): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + (1): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (2): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + (3): DetrTransformerDecoderLayer( + (attentions): ModuleList( + (0): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + (1): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (2): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + (4): DetrTransformerDecoderLayer( + (attentions): ModuleList( + (0): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + (1): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (2): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + (5): DetrTransformerDecoderLayer( + (attentions): ModuleList( + (0): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + (1): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (2): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + (6): DetrTransformerDecoderLayer( + (attentions): ModuleList( + (0): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + (1): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (2): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + (7): DetrTransformerDecoderLayer( + (attentions): ModuleList( + (0): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + (1): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (2): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + (8): DetrTransformerDecoderLayer( + (attentions): ModuleList( + (0): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + (1): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (2): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + ) + (post_norm): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + (decoder_input_projs): ModuleList( + (0): Identity() + (1): Identity() + (2): Identity() + ) + (decoder_positional_encoding): SinePositionalEncoding(num_feats=512, temperature=10000, normalize=True, scale=6.283185307179586, eps=1e-06) + (query_embed): Embedding(100, 1024) + (query_feat): Embedding(100, 1024) + (level_embed): Embedding(3, 1024) + (cls_embed): Linear(in_features=1024, out_features=20, bias=True) + (mask_embed): Sequential( + (0): Linear(in_features=1024, out_features=1024, bias=True) + (1): ReLU(inplace=True) + (2): Linear(in_features=1024, out_features=1024, bias=True) + (3): ReLU(inplace=True) + (4): Linear(in_features=1024, out_features=1024, bias=True) + ) + (loss_cls): CrossEntropyLoss(avg_non_ignore=False) + (loss_mask): CrossEntropyLoss(avg_non_ignore=False) + (loss_dice): DiceLoss() + ) +) +2022-05-09 17:26:25,000 - mmseg - INFO - Loaded 2975 images +2022-05-09 17:26:26,884 - mmseg - INFO - Loaded 500 images +2022-05-09 17:26:26,885 - mmseg - INFO - load checkpoint from local path: work_dirs/mask2former_beit_adapter_large_896_80k_mapillary_ss/iter_80000.pth +2022-05-09 17:27:24,924 - mmseg - INFO - Start running, host: chenzhe.vendor@SH-IDC1-10-140-1-143, work_dir: /mnt/lustre/chenzhe.vendor/workspace/ViT-Adapter/segmentation/work_dirs/mask2former_beit_adapter_large_896_80k_cityscapes_ss +2022-05-09 17:27:24,930 - mmseg - INFO - Hooks will be executed in the following order: +before_run: +(VERY_HIGH ) PolyLrUpdaterHook +(NORMAL ) CheckpointHook +(LOW ) DistEvalHook +(VERY_LOW ) TextLoggerHook + -------------------- +before_train_epoch: +(VERY_HIGH ) PolyLrUpdaterHook +(LOW ) IterTimerHook +(LOW ) DistEvalHook +(VERY_LOW ) TextLoggerHook + -------------------- +before_train_iter: +(VERY_HIGH ) PolyLrUpdaterHook +(LOW ) IterTimerHook +(LOW ) DistEvalHook + -------------------- +after_train_iter: +(ABOVE_NORMAL) OptimizerHook +(NORMAL ) CheckpointHook +(LOW ) IterTimerHook +(LOW ) DistEvalHook +(VERY_LOW ) TextLoggerHook + -------------------- +after_train_epoch: +(NORMAL ) CheckpointHook +(LOW ) DistEvalHook +(VERY_LOW ) TextLoggerHook + -------------------- +before_val_epoch: +(LOW ) IterTimerHook +(VERY_LOW ) TextLoggerHook + -------------------- +before_val_iter: +(LOW ) IterTimerHook + -------------------- +after_val_iter: +(LOW ) IterTimerHook + -------------------- +after_val_epoch: +(VERY_LOW ) TextLoggerHook + -------------------- +after_run: +(VERY_LOW ) TextLoggerHook + -------------------- +2022-05-09 17:27:24,931 - mmseg - INFO - workflow: [('train', 1)], max: 80000 iters +2022-05-09 17:27:24,932 - mmseg - INFO - Checkpoints will be saved to /mnt/lustre/chenzhe.vendor/workspace/ViT-Adapter/segmentation/work_dirs/mask2former_beit_adapter_large_896_80k_cityscapes_ss by HardDiskBackend. +2022-05-09 17:29:39,669 - mmseg - INFO - Iter [50/80000] lr: 4.688e-08, eta: 2 days, 1:15:37, time: 2.218, data_time: 0.023, memory: 64699, decode.loss_cls: 0.3989, decode.loss_mask: 0.4912, decode.loss_dice: 0.9788, decode.d0.loss_cls: 0.5731, decode.d0.loss_mask: 0.5769, decode.d0.loss_dice: 1.0959, decode.d1.loss_cls: 0.3978, decode.d1.loss_mask: 0.5063, decode.d1.loss_dice: 1.0255, decode.d2.loss_cls: 0.3960, decode.d2.loss_mask: 0.4988, decode.d2.loss_dice: 1.0022, decode.d3.loss_cls: 0.3992, decode.d3.loss_mask: 0.4934, decode.d3.loss_dice: 0.9838, decode.d4.loss_cls: 0.3953, decode.d4.loss_mask: 0.4945, decode.d4.loss_dice: 0.9853, decode.d5.loss_cls: 0.3972, decode.d5.loss_mask: 0.4955, decode.d5.loss_dice: 0.9791, decode.d6.loss_cls: 0.3979, decode.d6.loss_mask: 0.4924, decode.d6.loss_dice: 0.9787, decode.d7.loss_cls: 0.3889, decode.d7.loss_mask: 0.4936, decode.d7.loss_dice: 0.9796, decode.d8.loss_cls: 0.3956, decode.d8.loss_mask: 0.4927, decode.d8.loss_dice: 0.9775, loss: 19.1617 +2022-05-09 17:31:09,417 - mmseg - INFO - Iter [100/80000] lr: 9.465e-08, eta: 1 day, 20:29:22, time: 1.791, data_time: 0.017, memory: 64699, decode.loss_cls: 0.3671, decode.loss_mask: 0.4318, decode.loss_dice: 0.9777, decode.d0.loss_cls: 0.5739, decode.d0.loss_mask: 0.5197, decode.d0.loss_dice: 1.0871, decode.d1.loss_cls: 0.3811, decode.d1.loss_mask: 0.4518, decode.d1.loss_dice: 1.0211, decode.d2.loss_cls: 0.3739, decode.d2.loss_mask: 0.4425, decode.d2.loss_dice: 0.9973, decode.d3.loss_cls: 0.3724, decode.d3.loss_mask: 0.4370, decode.d3.loss_dice: 0.9780, decode.d4.loss_cls: 0.3657, decode.d4.loss_mask: 0.4362, decode.d4.loss_dice: 0.9790, decode.d5.loss_cls: 0.3759, decode.d5.loss_mask: 0.4356, decode.d5.loss_dice: 0.9771, decode.d6.loss_cls: 0.3720, decode.d6.loss_mask: 0.4329, decode.d6.loss_dice: 0.9742, decode.d7.loss_cls: 0.3694, decode.d7.loss_mask: 0.4309, decode.d7.loss_dice: 0.9735, decode.d8.loss_cls: 0.3815, decode.d8.loss_mask: 0.4301, decode.d8.loss_dice: 0.9704, loss: 18.3169 +2022-05-09 17:32:38,181 - mmseg - INFO - Iter [150/80000] lr: 1.424e-07, eta: 1 day, 18:47:27, time: 1.779, data_time: 0.021, memory: 64699, decode.loss_cls: 0.3173, decode.loss_mask: 0.3843, decode.loss_dice: 0.9247, decode.d0.loss_cls: 0.5381, decode.d0.loss_mask: 0.4568, decode.d0.loss_dice: 1.0351, decode.d1.loss_cls: 0.3308, decode.d1.loss_mask: 0.3988, decode.d1.loss_dice: 0.9772, decode.d2.loss_cls: 0.3269, decode.d2.loss_mask: 0.3965, decode.d2.loss_dice: 0.9478, decode.d3.loss_cls: 0.3212, decode.d3.loss_mask: 0.3915, decode.d3.loss_dice: 0.9366, decode.d4.loss_cls: 0.3321, decode.d4.loss_mask: 0.3885, decode.d4.loss_dice: 0.9318, decode.d5.loss_cls: 0.3165, decode.d5.loss_mask: 0.3892, decode.d5.loss_dice: 0.9312, decode.d6.loss_cls: 0.3125, decode.d6.loss_mask: 0.3877, decode.d6.loss_dice: 0.9327, decode.d7.loss_cls: 0.3192, decode.d7.loss_mask: 0.3865, decode.d7.loss_dice: 0.9282, decode.d8.loss_cls: 0.3124, decode.d8.loss_mask: 0.3872, decode.d8.loss_dice: 0.9286, loss: 16.8680 +2022-05-09 17:34:09,525 - mmseg - INFO - Iter [200/80000] lr: 1.900e-07, eta: 1 day, 18:11:42, time: 1.827, data_time: 0.062, memory: 64699, decode.loss_cls: 0.3106, decode.loss_mask: 0.3627, decode.loss_dice: 0.9168, decode.d0.loss_cls: 0.5284, decode.d0.loss_mask: 0.4193, decode.d0.loss_dice: 1.0347, decode.d1.loss_cls: 0.3304, decode.d1.loss_mask: 0.3741, decode.d1.loss_dice: 0.9616, decode.d2.loss_cls: 0.3327, decode.d2.loss_mask: 0.3658, decode.d2.loss_dice: 0.9316, decode.d3.loss_cls: 0.3238, decode.d3.loss_mask: 0.3624, decode.d3.loss_dice: 0.9191, decode.d4.loss_cls: 0.3186, decode.d4.loss_mask: 0.3634, decode.d4.loss_dice: 0.9169, decode.d5.loss_cls: 0.3158, decode.d5.loss_mask: 0.3636, decode.d5.loss_dice: 0.9151, decode.d6.loss_cls: 0.3197, decode.d6.loss_mask: 0.3611, decode.d6.loss_dice: 0.9089, decode.d7.loss_cls: 0.3203, decode.d7.loss_mask: 0.3622, decode.d7.loss_dice: 0.9163, decode.d8.loss_cls: 0.3143, decode.d8.loss_mask: 0.3636, decode.d8.loss_dice: 0.9093, loss: 16.4431 +2022-05-09 17:35:37,347 - mmseg - INFO - Iter [250/80000] lr: 2.376e-07, eta: 1 day, 17:31:22, time: 1.758, data_time: 0.018, memory: 64699, decode.loss_cls: 0.3089, decode.loss_mask: 0.3618, decode.loss_dice: 0.8916, decode.d0.loss_cls: 0.5271, decode.d0.loss_mask: 0.4140, decode.d0.loss_dice: 1.0178, decode.d1.loss_cls: 0.3170, decode.d1.loss_mask: 0.3712, decode.d1.loss_dice: 0.9420, decode.d2.loss_cls: 0.3064, decode.d2.loss_mask: 0.3665, decode.d2.loss_dice: 0.9159, decode.d3.loss_cls: 0.3066, decode.d3.loss_mask: 0.3632, decode.d3.loss_dice: 0.9012, decode.d4.loss_cls: 0.3073, decode.d4.loss_mask: 0.3642, decode.d4.loss_dice: 0.8996, decode.d5.loss_cls: 0.3065, decode.d5.loss_mask: 0.3662, decode.d5.loss_dice: 0.8941, decode.d6.loss_cls: 0.3066, decode.d6.loss_mask: 0.3625, decode.d6.loss_dice: 0.8922, decode.d7.loss_cls: 0.3024, decode.d7.loss_mask: 0.3633, decode.d7.loss_dice: 0.8890, decode.d8.loss_cls: 0.3078, decode.d8.loss_mask: 0.3622, decode.d8.loss_dice: 0.8899, loss: 16.1249 +2022-05-09 17:37:05,075 - mmseg - INFO - Iter [300/80000] lr: 2.851e-07, eta: 1 day, 17:03:17, time: 1.755, data_time: 0.020, memory: 64699, decode.loss_cls: 0.3154, decode.loss_mask: 0.3624, decode.loss_dice: 0.8953, decode.d0.loss_cls: 0.5194, decode.d0.loss_mask: 0.4132, decode.d0.loss_dice: 1.0166, decode.d1.loss_cls: 0.3301, decode.d1.loss_mask: 0.3686, decode.d1.loss_dice: 0.9423, decode.d2.loss_cls: 0.3295, decode.d2.loss_mask: 0.3667, decode.d2.loss_dice: 0.9194, decode.d3.loss_cls: 0.3159, decode.d3.loss_mask: 0.3670, decode.d3.loss_dice: 0.9085, decode.d4.loss_cls: 0.3259, decode.d4.loss_mask: 0.3653, decode.d4.loss_dice: 0.8993, decode.d5.loss_cls: 0.3269, decode.d5.loss_mask: 0.3645, decode.d5.loss_dice: 0.8977, decode.d6.loss_cls: 0.3151, decode.d6.loss_mask: 0.3651, decode.d6.loss_dice: 0.8944, decode.d7.loss_cls: 0.3214, decode.d7.loss_mask: 0.3621, decode.d7.loss_dice: 0.8964, decode.d8.loss_cls: 0.3118, decode.d8.loss_mask: 0.3627, decode.d8.loss_dice: 0.8945, loss: 16.2731 +2022-05-09 17:38:33,395 - mmseg - INFO - Iter [350/80000] lr: 3.326e-07, eta: 1 day, 16:45:01, time: 1.766, data_time: 0.018, memory: 64699, decode.loss_cls: 0.3061, decode.loss_mask: 0.3624, decode.loss_dice: 0.8845, decode.d0.loss_cls: 0.5599, decode.d0.loss_mask: 0.4101, decode.d0.loss_dice: 0.9967, decode.d1.loss_cls: 0.3215, decode.d1.loss_mask: 0.3723, decode.d1.loss_dice: 0.9282, decode.d2.loss_cls: 0.3239, decode.d2.loss_mask: 0.3672, decode.d2.loss_dice: 0.9062, decode.d3.loss_cls: 0.3121, decode.d3.loss_mask: 0.3640, decode.d3.loss_dice: 0.8911, decode.d4.loss_cls: 0.3150, decode.d4.loss_mask: 0.3626, decode.d4.loss_dice: 0.8881, decode.d5.loss_cls: 0.3106, decode.d5.loss_mask: 0.3649, decode.d5.loss_dice: 0.8886, decode.d6.loss_cls: 0.3117, decode.d6.loss_mask: 0.3626, decode.d6.loss_dice: 0.8808, decode.d7.loss_cls: 0.3098, decode.d7.loss_mask: 0.3620, decode.d7.loss_dice: 0.8911, decode.d8.loss_cls: 0.3094, decode.d8.loss_mask: 0.3619, decode.d8.loss_dice: 0.8849, loss: 16.1104 +2022-05-09 17:40:03,882 - mmseg - INFO - Iter [400/80000] lr: 3.800e-07, eta: 1 day, 16:38:10, time: 1.810, data_time: 0.066, memory: 64699, decode.loss_cls: 0.3089, decode.loss_mask: 0.3460, decode.loss_dice: 0.8830, decode.d0.loss_cls: 0.5397, decode.d0.loss_mask: 0.3913, decode.d0.loss_dice: 0.9911, decode.d1.loss_cls: 0.3163, decode.d1.loss_mask: 0.3554, decode.d1.loss_dice: 0.9293, decode.d2.loss_cls: 0.3058, decode.d2.loss_mask: 0.3489, decode.d2.loss_dice: 0.9043, decode.d3.loss_cls: 0.3055, decode.d3.loss_mask: 0.3488, decode.d3.loss_dice: 0.8919, decode.d4.loss_cls: 0.3010, decode.d4.loss_mask: 0.3484, decode.d4.loss_dice: 0.8909, decode.d5.loss_cls: 0.2993, decode.d5.loss_mask: 0.3488, decode.d5.loss_dice: 0.8876, decode.d6.loss_cls: 0.3046, decode.d6.loss_mask: 0.3480, decode.d6.loss_dice: 0.8818, decode.d7.loss_cls: 0.2992, decode.d7.loss_mask: 0.3468, decode.d7.loss_dice: 0.8861, decode.d8.loss_cls: 0.3041, decode.d8.loss_mask: 0.3468, decode.d8.loss_dice: 0.8851, loss: 15.8450 +2022-05-09 17:41:32,310 - mmseg - INFO - Iter [450/80000] lr: 4.274e-07, eta: 1 day, 16:26:25, time: 1.769, data_time: 0.018, memory: 64699, decode.loss_cls: 0.3139, decode.loss_mask: 0.3537, decode.loss_dice: 0.8853, decode.d0.loss_cls: 0.5431, decode.d0.loss_mask: 0.4038, decode.d0.loss_dice: 0.9952, decode.d1.loss_cls: 0.3068, decode.d1.loss_mask: 0.3691, decode.d1.loss_dice: 0.9311, decode.d2.loss_cls: 0.3174, decode.d2.loss_mask: 0.3620, decode.d2.loss_dice: 0.9070, decode.d3.loss_cls: 0.3096, decode.d3.loss_mask: 0.3585, decode.d3.loss_dice: 0.8991, decode.d4.loss_cls: 0.3114, decode.d4.loss_mask: 0.3565, decode.d4.loss_dice: 0.8952, decode.d5.loss_cls: 0.3141, decode.d5.loss_mask: 0.3559, decode.d5.loss_dice: 0.8907, decode.d6.loss_cls: 0.3087, decode.d6.loss_mask: 0.3550, decode.d6.loss_dice: 0.8851, decode.d7.loss_cls: 0.3163, decode.d7.loss_mask: 0.3540, decode.d7.loss_dice: 0.8861, decode.d8.loss_cls: 0.3114, decode.d8.loss_mask: 0.3548, decode.d8.loss_dice: 0.8864, loss: 16.0371 +2022-05-09 17:43:01,546 - mmseg - INFO - Iter [500/80000] lr: 4.747e-07, eta: 1 day, 16:18:51, time: 1.784, data_time: 0.018, memory: 64699, decode.loss_cls: 0.3023, decode.loss_mask: 0.3418, decode.loss_dice: 0.8661, decode.d0.loss_cls: 0.5488, decode.d0.loss_mask: 0.3850, decode.d0.loss_dice: 0.9747, decode.d1.loss_cls: 0.3179, decode.d1.loss_mask: 0.3495, decode.d1.loss_dice: 0.9059, decode.d2.loss_cls: 0.3053, decode.d2.loss_mask: 0.3438, decode.d2.loss_dice: 0.8865, decode.d3.loss_cls: 0.2904, decode.d3.loss_mask: 0.3430, decode.d3.loss_dice: 0.8722, decode.d4.loss_cls: 0.3034, decode.d4.loss_mask: 0.3438, decode.d4.loss_dice: 0.8742, decode.d5.loss_cls: 0.3001, decode.d5.loss_mask: 0.3438, decode.d5.loss_dice: 0.8724, decode.d6.loss_cls: 0.2992, decode.d6.loss_mask: 0.3442, decode.d6.loss_dice: 0.8699, decode.d7.loss_cls: 0.3002, decode.d7.loss_mask: 0.3421, decode.d7.loss_dice: 0.8622, decode.d8.loss_cls: 0.2970, decode.d8.loss_mask: 0.3412, decode.d8.loss_dice: 0.8656, loss: 15.5924 +2022-05-09 17:44:30,533 - mmseg - INFO - Iter [550/80000] lr: 5.219e-07, eta: 1 day, 16:11:37, time: 1.778, data_time: 0.018, memory: 64699, decode.loss_cls: 0.2946, decode.loss_mask: 0.3413, decode.loss_dice: 0.8860, decode.d0.loss_cls: 0.5261, decode.d0.loss_mask: 0.3845, decode.d0.loss_dice: 0.9956, decode.d1.loss_cls: 0.3062, decode.d1.loss_mask: 0.3526, decode.d1.loss_dice: 0.9313, decode.d2.loss_cls: 0.3003, decode.d2.loss_mask: 0.3476, decode.d2.loss_dice: 0.9077, decode.d3.loss_cls: 0.2928, decode.d3.loss_mask: 0.3445, decode.d3.loss_dice: 0.8951, decode.d4.loss_cls: 0.2997, decode.d4.loss_mask: 0.3452, decode.d4.loss_dice: 0.8900, decode.d5.loss_cls: 0.2994, decode.d5.loss_mask: 0.3435, decode.d5.loss_dice: 0.8893, decode.d6.loss_cls: 0.2956, decode.d6.loss_mask: 0.3439, decode.d6.loss_dice: 0.8845, decode.d7.loss_cls: 0.2969, decode.d7.loss_mask: 0.3419, decode.d7.loss_dice: 0.8883, decode.d8.loss_cls: 0.2948, decode.d8.loss_mask: 0.3419, decode.d8.loss_dice: 0.8869, loss: 15.7477 +2022-05-09 17:46:02,820 - mmseg - INFO - Iter [600/80000] lr: 5.691e-07, eta: 1 day, 16:13:00, time: 1.848, data_time: 0.066, memory: 64699, decode.loss_cls: 0.2667, decode.loss_mask: 0.3559, decode.loss_dice: 0.8711, decode.d0.loss_cls: 0.5149, decode.d0.loss_mask: 0.3940, decode.d0.loss_dice: 0.9669, decode.d1.loss_cls: 0.2964, decode.d1.loss_mask: 0.3593, decode.d1.loss_dice: 0.9046, decode.d2.loss_cls: 0.2827, decode.d2.loss_mask: 0.3579, decode.d2.loss_dice: 0.8906, decode.d3.loss_cls: 0.2790, decode.d3.loss_mask: 0.3570, decode.d3.loss_dice: 0.8754, decode.d4.loss_cls: 0.2752, decode.d4.loss_mask: 0.3569, decode.d4.loss_dice: 0.8735, decode.d5.loss_cls: 0.2733, decode.d5.loss_mask: 0.3561, decode.d5.loss_dice: 0.8755, decode.d6.loss_cls: 0.2709, decode.d6.loss_mask: 0.3560, decode.d6.loss_dice: 0.8714, decode.d7.loss_cls: 0.2744, decode.d7.loss_mask: 0.3565, decode.d7.loss_dice: 0.8728, decode.d8.loss_cls: 0.2728, decode.d8.loss_mask: 0.3559, decode.d8.loss_dice: 0.8718, loss: 15.4857 +2022-05-09 17:47:31,846 - mmseg - INFO - Iter [650/80000] lr: 6.162e-07, eta: 1 day, 16:07:07, time: 1.780, data_time: 0.018, memory: 64699, decode.loss_cls: 0.2904, decode.loss_mask: 0.3279, decode.loss_dice: 0.8664, decode.d0.loss_cls: 0.5156, decode.d0.loss_mask: 0.3655, decode.d0.loss_dice: 0.9676, decode.d1.loss_cls: 0.3072, decode.d1.loss_mask: 0.3370, decode.d1.loss_dice: 0.9047, decode.d2.loss_cls: 0.2986, decode.d2.loss_mask: 0.3306, decode.d2.loss_dice: 0.8793, decode.d3.loss_cls: 0.2946, decode.d3.loss_mask: 0.3278, decode.d3.loss_dice: 0.8721, decode.d4.loss_cls: 0.2916, decode.d4.loss_mask: 0.3279, decode.d4.loss_dice: 0.8696, decode.d5.loss_cls: 0.2914, decode.d5.loss_mask: 0.3271, decode.d5.loss_dice: 0.8638, decode.d6.loss_cls: 0.2878, decode.d6.loss_mask: 0.3279, decode.d6.loss_dice: 0.8641, decode.d7.loss_cls: 0.2891, decode.d7.loss_mask: 0.3269, decode.d7.loss_dice: 0.8649, decode.d8.loss_cls: 0.2942, decode.d8.loss_mask: 0.3267, decode.d8.loss_dice: 0.8668, loss: 15.3049 +2022-05-09 17:49:00,970 - mmseg - INFO - Iter [700/80000] lr: 6.632e-07, eta: 1 day, 16:01:50, time: 1.780, data_time: 0.019, memory: 64699, decode.loss_cls: 0.2844, decode.loss_mask: 0.3409, decode.loss_dice: 0.8982, decode.d0.loss_cls: 0.5411, decode.d0.loss_mask: 0.3778, decode.d0.loss_dice: 0.9924, decode.d1.loss_cls: 0.3104, decode.d1.loss_mask: 0.3477, decode.d1.loss_dice: 0.9290, decode.d2.loss_cls: 0.3007, decode.d2.loss_mask: 0.3405, decode.d2.loss_dice: 0.9032, decode.d3.loss_cls: 0.3075, decode.d3.loss_mask: 0.3398, decode.d3.loss_dice: 0.9001, decode.d4.loss_cls: 0.2893, decode.d4.loss_mask: 0.3420, decode.d4.loss_dice: 0.9005, decode.d5.loss_cls: 0.2896, decode.d5.loss_mask: 0.3433, decode.d5.loss_dice: 0.8948, decode.d6.loss_cls: 0.2812, decode.d6.loss_mask: 0.3410, decode.d6.loss_dice: 0.8987, decode.d7.loss_cls: 0.2891, decode.d7.loss_mask: 0.3401, decode.d7.loss_dice: 0.8903, decode.d8.loss_cls: 0.2826, decode.d8.loss_mask: 0.3404, decode.d8.loss_dice: 0.8961, loss: 15.7327 +2022-05-09 17:50:31,802 - mmseg - INFO - Iter [750/80000] lr: 7.102e-07, eta: 1 day, 16:00:22, time: 1.818, data_time: 0.068, memory: 64699, decode.loss_cls: 0.2880, decode.loss_mask: 0.3449, decode.loss_dice: 0.8700, decode.d0.loss_cls: 0.5250, decode.d0.loss_mask: 0.3823, decode.d0.loss_dice: 0.9712, decode.d1.loss_cls: 0.2999, decode.d1.loss_mask: 0.3525, decode.d1.loss_dice: 0.9101, decode.d2.loss_cls: 0.2987, decode.d2.loss_mask: 0.3495, decode.d2.loss_dice: 0.8916, decode.d3.loss_cls: 0.2945, decode.d3.loss_mask: 0.3455, decode.d3.loss_dice: 0.8795, decode.d4.loss_cls: 0.2842, decode.d4.loss_mask: 0.3472, decode.d4.loss_dice: 0.8729, decode.d5.loss_cls: 0.2887, decode.d5.loss_mask: 0.3468, decode.d5.loss_dice: 0.8752, decode.d6.loss_cls: 0.2786, decode.d6.loss_mask: 0.3439, decode.d6.loss_dice: 0.8722, decode.d7.loss_cls: 0.2853, decode.d7.loss_mask: 0.3452, decode.d7.loss_dice: 0.8763, decode.d8.loss_cls: 0.2891, decode.d8.loss_mask: 0.3453, decode.d8.loss_dice: 0.8687, loss: 15.5227 +2022-05-09 17:52:02,446 - mmseg - INFO - Iter [800/80000] lr: 7.572e-07, eta: 1 day, 15:58:32, time: 1.814, data_time: 0.019, memory: 64699, decode.loss_cls: 0.2647, decode.loss_mask: 0.3367, decode.loss_dice: 0.8578, decode.d0.loss_cls: 0.5042, decode.d0.loss_mask: 0.3743, decode.d0.loss_dice: 0.9597, decode.d1.loss_cls: 0.2880, decode.d1.loss_mask: 0.3444, decode.d1.loss_dice: 0.8986, decode.d2.loss_cls: 0.2863, decode.d2.loss_mask: 0.3400, decode.d2.loss_dice: 0.8728, decode.d3.loss_cls: 0.2713, decode.d3.loss_mask: 0.3375, decode.d3.loss_dice: 0.8634, decode.d4.loss_cls: 0.2706, decode.d4.loss_mask: 0.3375, decode.d4.loss_dice: 0.8608, decode.d5.loss_cls: 0.2702, decode.d5.loss_mask: 0.3372, decode.d5.loss_dice: 0.8585, decode.d6.loss_cls: 0.2736, decode.d6.loss_mask: 0.3379, decode.d6.loss_dice: 0.8574, decode.d7.loss_cls: 0.2793, decode.d7.loss_mask: 0.3379, decode.d7.loss_dice: 0.8586, decode.d8.loss_cls: 0.2691, decode.d8.loss_mask: 0.3372, decode.d8.loss_dice: 0.8561, loss: 15.1417 +2022-05-09 17:53:31,975 - mmseg - INFO - Iter [850/80000] lr: 8.040e-07, eta: 1 day, 15:54:57, time: 1.790, data_time: 0.019, memory: 64699, decode.loss_cls: 0.2963, decode.loss_mask: 0.3290, decode.loss_dice: 0.8754, decode.d0.loss_cls: 0.5249, decode.d0.loss_mask: 0.3662, decode.d0.loss_dice: 0.9777, decode.d1.loss_cls: 0.3254, decode.d1.loss_mask: 0.3365, decode.d1.loss_dice: 0.9107, decode.d2.loss_cls: 0.3146, decode.d2.loss_mask: 0.3315, decode.d2.loss_dice: 0.8936, decode.d3.loss_cls: 0.3005, decode.d3.loss_mask: 0.3303, decode.d3.loss_dice: 0.8812, decode.d4.loss_cls: 0.2937, decode.d4.loss_mask: 0.3282, decode.d4.loss_dice: 0.8797, decode.d5.loss_cls: 0.3047, decode.d5.loss_mask: 0.3281, decode.d5.loss_dice: 0.8783, decode.d6.loss_cls: 0.2993, decode.d6.loss_mask: 0.3301, decode.d6.loss_dice: 0.8732, decode.d7.loss_cls: 0.2961, decode.d7.loss_mask: 0.3304, decode.d7.loss_dice: 0.8759, decode.d8.loss_cls: 0.2987, decode.d8.loss_mask: 0.3296, decode.d8.loss_dice: 0.8727, loss: 15.5125 +2022-05-09 17:55:00,979 - mmseg - INFO - Iter [900/80000] lr: 8.509e-07, eta: 1 day, 15:50:52, time: 1.780, data_time: 0.017, memory: 64699, decode.loss_cls: 0.2824, decode.loss_mask: 0.3327, decode.loss_dice: 0.8641, decode.d0.loss_cls: 0.5128, decode.d0.loss_mask: 0.3688, decode.d0.loss_dice: 0.9674, decode.d1.loss_cls: 0.2994, decode.d1.loss_mask: 0.3407, decode.d1.loss_dice: 0.8998, decode.d2.loss_cls: 0.2981, decode.d2.loss_mask: 0.3363, decode.d2.loss_dice: 0.8823, decode.d3.loss_cls: 0.2977, decode.d3.loss_mask: 0.3337, decode.d3.loss_dice: 0.8663, decode.d4.loss_cls: 0.3013, decode.d4.loss_mask: 0.3337, decode.d4.loss_dice: 0.8661, decode.d5.loss_cls: 0.2958, decode.d5.loss_mask: 0.3336, decode.d5.loss_dice: 0.8623, decode.d6.loss_cls: 0.2884, decode.d6.loss_mask: 0.3340, decode.d6.loss_dice: 0.8636, decode.d7.loss_cls: 0.2918, decode.d7.loss_mask: 0.3335, decode.d7.loss_dice: 0.8637, decode.d8.loss_cls: 0.2927, decode.d8.loss_mask: 0.3322, decode.d8.loss_dice: 0.8659, loss: 15.3411 +2022-05-09 17:56:32,194 - mmseg - INFO - Iter [950/80000] lr: 8.976e-07, eta: 1 day, 15:50:03, time: 1.824, data_time: 0.063, memory: 64699, decode.loss_cls: 0.2698, decode.loss_mask: 0.3299, decode.loss_dice: 0.8605, decode.d0.loss_cls: 0.4912, decode.d0.loss_mask: 0.3679, decode.d0.loss_dice: 0.9645, decode.d1.loss_cls: 0.2743, decode.d1.loss_mask: 0.3379, decode.d1.loss_dice: 0.9028, decode.d2.loss_cls: 0.2827, decode.d2.loss_mask: 0.3337, decode.d2.loss_dice: 0.8782, decode.d3.loss_cls: 0.2788, decode.d3.loss_mask: 0.3325, decode.d3.loss_dice: 0.8617, decode.d4.loss_cls: 0.2777, decode.d4.loss_mask: 0.3323, decode.d4.loss_dice: 0.8630, decode.d5.loss_cls: 0.2757, decode.d5.loss_mask: 0.3315, decode.d5.loss_dice: 0.8621, decode.d6.loss_cls: 0.2693, decode.d6.loss_mask: 0.3311, decode.d6.loss_dice: 0.8602, decode.d7.loss_cls: 0.2721, decode.d7.loss_mask: 0.3303, decode.d7.loss_dice: 0.8600, decode.d8.loss_cls: 0.2752, decode.d8.loss_mask: 0.3304, decode.d8.loss_dice: 0.8610, loss: 15.0983 +2022-05-09 17:58:02,400 - mmseg - INFO - Saving checkpoint at 1000 iterations +2022-05-09 17:58:37,860 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-09 17:58:37,860 - mmseg - INFO - Iter [1000/80000] lr: 9.443e-07, eta: 1 day, 16:34:38, time: 2.514, data_time: 0.017, memory: 64699, decode.loss_cls: 0.2763, decode.loss_mask: 0.3421, decode.loss_dice: 0.8643, decode.d0.loss_cls: 0.4888, decode.d0.loss_mask: 0.3789, decode.d0.loss_dice: 0.9598, decode.d1.loss_cls: 0.2937, decode.d1.loss_mask: 0.3478, decode.d1.loss_dice: 0.8948, decode.d2.loss_cls: 0.2911, decode.d2.loss_mask: 0.3435, decode.d2.loss_dice: 0.8749, decode.d3.loss_cls: 0.2926, decode.d3.loss_mask: 0.3432, decode.d3.loss_dice: 0.8667, decode.d4.loss_cls: 0.2823, decode.d4.loss_mask: 0.3434, decode.d4.loss_dice: 0.8668, decode.d5.loss_cls: 0.2840, decode.d5.loss_mask: 0.3413, decode.d5.loss_dice: 0.8654, decode.d6.loss_cls: 0.2844, decode.d6.loss_mask: 0.3428, decode.d6.loss_dice: 0.8615, decode.d7.loss_cls: 0.2773, decode.d7.loss_mask: 0.3416, decode.d7.loss_dice: 0.8597, decode.d8.loss_cls: 0.2845, decode.d8.loss_mask: 0.3426, decode.d8.loss_dice: 0.8568, loss: 15.2929 +2022-05-09 18:00:43,261 - mmseg - INFO - per class results: +2022-05-09 18:00:43,278 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.66 | 99.23 | +| sidewalk | 89.11 | 93.88 | +| building | 94.64 | 97.06 | +| wall | 70.3 | 83.8 | +| fence | 70.57 | 83.8 | +| pole | 72.07 | 84.3 | +| traffic light | 75.87 | 88.75 | +| traffic sign | 84.07 | 91.94 | +| vegetation | 93.43 | 96.66 | +| terrain | 69.06 | 82.47 | +| sky | 95.92 | 98.53 | +| person | 86.59 | 93.03 | +| rider | 72.34 | 85.83 | +| car | 96.37 | 98.15 | +| truck | 91.34 | 96.72 | +| bus | 93.79 | 97.12 | +| train | 87.8 | 91.58 | +| motorcycle | 74.7 | 87.17 | +| bicycle | 82.1 | 90.97 | ++---------------+-------+-------+ +2022-05-09 18:00:43,279 - mmseg - INFO - Summary: +2022-05-09 18:00:43,279 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 97.06 | 84.14 | 91.63 | ++-------+-------+-------+ +2022-05-09 18:01:16,778 - mmseg - INFO - Now best checkpoint is saved as best_mIoU_iter_1000.pth. +2022-05-09 18:01:16,792 - mmseg - INFO - Best mIoU is 0.8414 at 1000 iter. +2022-05-09 18:01:16,804 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-09 18:01:16,805 - mmseg - INFO - Iter(val) [32] aAcc: 0.9706, mIoU: 0.8414, mAcc: 0.9163, IoU.road: 0.9866, IoU.sidewalk: 0.8911, IoU.building: 0.9464, IoU.wall: 0.7030, IoU.fence: 0.7057, IoU.pole: 0.7207, IoU.traffic light: 0.7587, IoU.traffic sign: 0.8407, IoU.vegetation: 0.9343, IoU.terrain: 0.6906, IoU.sky: 0.9592, IoU.person: 0.8659, IoU.rider: 0.7234, IoU.car: 0.9637, IoU.truck: 0.9134, IoU.bus: 0.9379, IoU.train: 0.8780, IoU.motorcycle: 0.7470, IoU.bicycle: 0.8210, Acc.road: 0.9923, Acc.sidewalk: 0.9388, Acc.building: 0.9706, Acc.wall: 0.8380, Acc.fence: 0.8380, Acc.pole: 0.8430, Acc.traffic light: 0.8875, Acc.traffic sign: 0.9194, Acc.vegetation: 0.9666, Acc.terrain: 0.8247, Acc.sky: 0.9853, Acc.person: 0.9303, Acc.rider: 0.8583, Acc.car: 0.9815, Acc.truck: 0.9672, Acc.bus: 0.9712, Acc.train: 0.9158, Acc.motorcycle: 0.8717, Acc.bicycle: 0.9097 +2022-05-09 18:02:46,884 - mmseg - INFO - Iter [1050/80000] lr: 9.909e-07, eta: 1 day, 19:49:17, time: 4.980, data_time: 3.197, memory: 64699, decode.loss_cls: 0.2811, decode.loss_mask: 0.3340, decode.loss_dice: 0.8681, decode.d0.loss_cls: 0.5178, decode.d0.loss_mask: 0.3646, decode.d0.loss_dice: 0.9636, decode.d1.loss_cls: 0.2982, decode.d1.loss_mask: 0.3422, decode.d1.loss_dice: 0.9015, decode.d2.loss_cls: 0.2938, decode.d2.loss_mask: 0.3376, decode.d2.loss_dice: 0.8816, decode.d3.loss_cls: 0.2874, decode.d3.loss_mask: 0.3340, decode.d3.loss_dice: 0.8688, decode.d4.loss_cls: 0.2871, decode.d4.loss_mask: 0.3339, decode.d4.loss_dice: 0.8677, decode.d5.loss_cls: 0.2856, decode.d5.loss_mask: 0.3348, decode.d5.loss_dice: 0.8662, decode.d6.loss_cls: 0.2842, decode.d6.loss_mask: 0.3348, decode.d6.loss_dice: 0.8648, decode.d7.loss_cls: 0.2781, decode.d7.loss_mask: 0.3344, decode.d7.loss_dice: 0.8654, decode.d8.loss_cls: 0.2804, decode.d8.loss_mask: 0.3340, decode.d8.loss_dice: 0.8651, loss: 15.2908 +2022-05-09 18:04:17,136 - mmseg - INFO - Iter [1100/80000] lr: 1.038e-06, eta: 1 day, 19:36:05, time: 1.805, data_time: 0.018, memory: 64699, decode.loss_cls: 0.2597, decode.loss_mask: 0.3468, decode.loss_dice: 0.8738, decode.d0.loss_cls: 0.4917, decode.d0.loss_mask: 0.3861, decode.d0.loss_dice: 0.9659, decode.d1.loss_cls: 0.2714, decode.d1.loss_mask: 0.3523, decode.d1.loss_dice: 0.9052, decode.d2.loss_cls: 0.2750, decode.d2.loss_mask: 0.3470, decode.d2.loss_dice: 0.8871, decode.d3.loss_cls: 0.2644, decode.d3.loss_mask: 0.3455, decode.d3.loss_dice: 0.8836, decode.d4.loss_cls: 0.2778, decode.d4.loss_mask: 0.3447, decode.d4.loss_dice: 0.8752, decode.d5.loss_cls: 0.2703, decode.d5.loss_mask: 0.3451, decode.d5.loss_dice: 0.8723, decode.d6.loss_cls: 0.2679, decode.d6.loss_mask: 0.3461, decode.d6.loss_dice: 0.8724, decode.d7.loss_cls: 0.2684, decode.d7.loss_mask: 0.3457, decode.d7.loss_dice: 0.8655, decode.d8.loss_cls: 0.2617, decode.d8.loss_mask: 0.3453, decode.d8.loss_dice: 0.8697, loss: 15.2836 +2022-05-09 18:05:48,142 - mmseg - INFO - Iter [1150/80000] lr: 1.084e-06, eta: 1 day, 19:24:45, time: 1.820, data_time: 0.063, memory: 64699, decode.loss_cls: 0.2545, decode.loss_mask: 0.3278, decode.loss_dice: 0.8439, decode.d0.loss_cls: 0.4798, decode.d0.loss_mask: 0.3631, decode.d0.loss_dice: 0.9478, decode.d1.loss_cls: 0.2803, decode.d1.loss_mask: 0.3318, decode.d1.loss_dice: 0.8762, decode.d2.loss_cls: 0.2678, decode.d2.loss_mask: 0.3307, decode.d2.loss_dice: 0.8624, decode.d3.loss_cls: 0.2604, decode.d3.loss_mask: 0.3284, decode.d3.loss_dice: 0.8495, decode.d4.loss_cls: 0.2638, decode.d4.loss_mask: 0.3284, decode.d4.loss_dice: 0.8512, decode.d5.loss_cls: 0.2633, decode.d5.loss_mask: 0.3294, decode.d5.loss_dice: 0.8428, decode.d6.loss_cls: 0.2569, decode.d6.loss_mask: 0.3285, decode.d6.loss_dice: 0.8459, decode.d7.loss_cls: 0.2579, decode.d7.loss_mask: 0.3279, decode.d7.loss_dice: 0.8475, decode.d8.loss_cls: 0.2631, decode.d8.loss_mask: 0.3284, decode.d8.loss_dice: 0.8437, loss: 14.7832 +2022-05-09 18:07:18,408 - mmseg - INFO - Iter [1200/80000] lr: 1.130e-06, eta: 1 day, 19:13:26, time: 1.805, data_time: 0.018, memory: 64699, decode.loss_cls: 0.2782, decode.loss_mask: 0.3211, decode.loss_dice: 0.8529, decode.d0.loss_cls: 0.5245, decode.d0.loss_mask: 0.3516, decode.d0.loss_dice: 0.9509, decode.d1.loss_cls: 0.2844, decode.d1.loss_mask: 0.3292, decode.d1.loss_dice: 0.8899, decode.d2.loss_cls: 0.2869, decode.d2.loss_mask: 0.3241, decode.d2.loss_dice: 0.8665, decode.d3.loss_cls: 0.2832, decode.d3.loss_mask: 0.3231, decode.d3.loss_dice: 0.8587, decode.d4.loss_cls: 0.2806, decode.d4.loss_mask: 0.3235, decode.d4.loss_dice: 0.8537, decode.d5.loss_cls: 0.2742, decode.d5.loss_mask: 0.3229, decode.d5.loss_dice: 0.8555, decode.d6.loss_cls: 0.2715, decode.d6.loss_mask: 0.3217, decode.d6.loss_dice: 0.8524, decode.d7.loss_cls: 0.2827, decode.d7.loss_mask: 0.3221, decode.d7.loss_dice: 0.8532, decode.d8.loss_cls: 0.2833, decode.d8.loss_mask: 0.3202, decode.d8.loss_dice: 0.8521, loss: 14.9947 +2022-05-09 18:08:47,157 - mmseg - INFO - Iter [1250/80000] lr: 1.177e-06, eta: 1 day, 19:01:18, time: 1.775, data_time: 0.017, memory: 64699, decode.loss_cls: 0.2986, decode.loss_mask: 0.3252, decode.loss_dice: 0.8719, decode.d0.loss_cls: 0.5341, decode.d0.loss_mask: 0.3614, decode.d0.loss_dice: 0.9731, decode.d1.loss_cls: 0.3143, decode.d1.loss_mask: 0.3327, decode.d1.loss_dice: 0.9082, decode.d2.loss_cls: 0.3102, decode.d2.loss_mask: 0.3274, decode.d2.loss_dice: 0.8868, decode.d3.loss_cls: 0.3043, decode.d3.loss_mask: 0.3240, decode.d3.loss_dice: 0.8728, decode.d4.loss_cls: 0.2988, decode.d4.loss_mask: 0.3232, decode.d4.loss_dice: 0.8725, decode.d5.loss_cls: 0.2961, decode.d5.loss_mask: 0.3258, decode.d5.loss_dice: 0.8794, decode.d6.loss_cls: 0.2987, decode.d6.loss_mask: 0.3252, decode.d6.loss_dice: 0.8686, decode.d7.loss_cls: 0.2997, decode.d7.loss_mask: 0.3247, decode.d7.loss_dice: 0.8646, decode.d8.loss_cls: 0.3050, decode.d8.loss_mask: 0.3242, decode.d8.loss_dice: 0.8657, loss: 15.4175 +2022-05-09 18:10:16,432 - mmseg - INFO - Iter [1300/80000] lr: 1.223e-06, eta: 1 day, 18:50:30, time: 1.785, data_time: 0.018, memory: 64699, decode.loss_cls: 0.2657, decode.loss_mask: 0.3374, decode.loss_dice: 0.8574, decode.d0.loss_cls: 0.5130, decode.d0.loss_mask: 0.3669, decode.d0.loss_dice: 0.9496, decode.d1.loss_cls: 0.2908, decode.d1.loss_mask: 0.3424, decode.d1.loss_dice: 0.8884, decode.d2.loss_cls: 0.2794, decode.d2.loss_mask: 0.3384, decode.d2.loss_dice: 0.8676, decode.d3.loss_cls: 0.2793, decode.d3.loss_mask: 0.3374, decode.d3.loss_dice: 0.8591, decode.d4.loss_cls: 0.2721, decode.d4.loss_mask: 0.3381, decode.d4.loss_dice: 0.8532, decode.d5.loss_cls: 0.2769, decode.d5.loss_mask: 0.3365, decode.d5.loss_dice: 0.8603, decode.d6.loss_cls: 0.2703, decode.d6.loss_mask: 0.3381, decode.d6.loss_dice: 0.8559, decode.d7.loss_cls: 0.2692, decode.d7.loss_mask: 0.3372, decode.d7.loss_dice: 0.8557, decode.d8.loss_cls: 0.2732, decode.d8.loss_mask: 0.3372, decode.d8.loss_dice: 0.8609, loss: 15.1074 +2022-05-09 18:11:48,322 - mmseg - INFO - Iter [1350/80000] lr: 1.269e-06, eta: 1 day, 18:42:57, time: 1.838, data_time: 0.067, memory: 64699, decode.loss_cls: 0.2703, decode.loss_mask: 0.3335, decode.loss_dice: 0.8655, decode.d0.loss_cls: 0.4983, decode.d0.loss_mask: 0.3628, decode.d0.loss_dice: 0.9568, decode.d1.loss_cls: 0.2957, decode.d1.loss_mask: 0.3356, decode.d1.loss_dice: 0.8996, decode.d2.loss_cls: 0.2777, decode.d2.loss_mask: 0.3349, decode.d2.loss_dice: 0.8841, decode.d3.loss_cls: 0.2724, decode.d3.loss_mask: 0.3344, decode.d3.loss_dice: 0.8681, decode.d4.loss_cls: 0.2757, decode.d4.loss_mask: 0.3339, decode.d4.loss_dice: 0.8672, decode.d5.loss_cls: 0.2750, decode.d5.loss_mask: 0.3348, decode.d5.loss_dice: 0.8669, decode.d6.loss_cls: 0.2748, decode.d6.loss_mask: 0.3328, decode.d6.loss_dice: 0.8652, decode.d7.loss_cls: 0.2740, decode.d7.loss_mask: 0.3328, decode.d7.loss_dice: 0.8663, decode.d8.loss_cls: 0.2749, decode.d8.loss_mask: 0.3332, decode.d8.loss_dice: 0.8662, loss: 15.1636 +2022-05-09 18:13:16,803 - mmseg - INFO - Iter [1400/80000] lr: 1.316e-06, eta: 1 day, 18:32:39, time: 1.770, data_time: 0.017, memory: 64699, decode.loss_cls: 0.2629, decode.loss_mask: 0.3281, decode.loss_dice: 0.8745, decode.d0.loss_cls: 0.4946, decode.d0.loss_mask: 0.3625, decode.d0.loss_dice: 0.9709, decode.d1.loss_cls: 0.2763, decode.d1.loss_mask: 0.3331, decode.d1.loss_dice: 0.9082, decode.d2.loss_cls: 0.2663, decode.d2.loss_mask: 0.3301, decode.d2.loss_dice: 0.8895, decode.d3.loss_cls: 0.2699, decode.d3.loss_mask: 0.3272, decode.d3.loss_dice: 0.8736, decode.d4.loss_cls: 0.2668, decode.d4.loss_mask: 0.3267, decode.d4.loss_dice: 0.8833, decode.d5.loss_cls: 0.2646, decode.d5.loss_mask: 0.3268, decode.d5.loss_dice: 0.8766, decode.d6.loss_cls: 0.2596, decode.d6.loss_mask: 0.3294, decode.d6.loss_dice: 0.8770, decode.d7.loss_cls: 0.2580, decode.d7.loss_mask: 0.3307, decode.d7.loss_dice: 0.8771, decode.d8.loss_cls: 0.2669, decode.d8.loss_mask: 0.3292, decode.d8.loss_dice: 0.8800, loss: 15.1204 +2022-05-09 18:14:46,148 - mmseg - INFO - Iter [1450/80000] lr: 1.362e-06, eta: 1 day, 18:23:43, time: 1.787, data_time: 0.017, memory: 64699, decode.loss_cls: 0.2706, decode.loss_mask: 0.3398, decode.loss_dice: 0.8515, decode.d0.loss_cls: 0.4926, decode.d0.loss_mask: 0.3726, decode.d0.loss_dice: 0.9495, decode.d1.loss_cls: 0.2829, decode.d1.loss_mask: 0.3455, decode.d1.loss_dice: 0.8891, decode.d2.loss_cls: 0.2736, decode.d2.loss_mask: 0.3428, decode.d2.loss_dice: 0.8713, decode.d3.loss_cls: 0.2783, decode.d3.loss_mask: 0.3410, decode.d3.loss_dice: 0.8582, decode.d4.loss_cls: 0.2791, decode.d4.loss_mask: 0.3414, decode.d4.loss_dice: 0.8571, decode.d5.loss_cls: 0.2629, decode.d5.loss_mask: 0.3418, decode.d5.loss_dice: 0.8616, decode.d6.loss_cls: 0.2717, decode.d6.loss_mask: 0.3401, decode.d6.loss_dice: 0.8524, decode.d7.loss_cls: 0.2721, decode.d7.loss_mask: 0.3390, decode.d7.loss_dice: 0.8530, decode.d8.loss_cls: 0.2662, decode.d8.loss_mask: 0.3400, decode.d8.loss_dice: 0.8544, loss: 15.0920 +2022-05-09 18:16:17,865 - mmseg - INFO - Iter [1500/80000] lr: 1.408e-06, eta: 1 day, 18:17:22, time: 1.834, data_time: 0.068, memory: 64699, decode.loss_cls: 0.2965, decode.loss_mask: 0.3275, decode.loss_dice: 0.8565, decode.d0.loss_cls: 0.5034, decode.d0.loss_mask: 0.3658, decode.d0.loss_dice: 0.9545, decode.d1.loss_cls: 0.3122, decode.d1.loss_mask: 0.3334, decode.d1.loss_dice: 0.8892, decode.d2.loss_cls: 0.3064, decode.d2.loss_mask: 0.3308, decode.d2.loss_dice: 0.8713, decode.d3.loss_cls: 0.3019, decode.d3.loss_mask: 0.3299, decode.d3.loss_dice: 0.8584, decode.d4.loss_cls: 0.2996, decode.d4.loss_mask: 0.3302, decode.d4.loss_dice: 0.8601, decode.d5.loss_cls: 0.2983, decode.d5.loss_mask: 0.3288, decode.d5.loss_dice: 0.8556, decode.d6.loss_cls: 0.2865, decode.d6.loss_mask: 0.3302, decode.d6.loss_dice: 0.8607, decode.d7.loss_cls: 0.2895, decode.d7.loss_mask: 0.3291, decode.d7.loss_dice: 0.8582, decode.d8.loss_cls: 0.2900, decode.d8.loss_mask: 0.3289, decode.d8.loss_dice: 0.8595, loss: 15.2430 +2022-05-09 18:17:46,784 - mmseg - INFO - Iter [1550/80000] lr: 1.408e-06, eta: 1 day, 18:08:57, time: 1.778, data_time: 0.017, memory: 64699, decode.loss_cls: 0.2555, decode.loss_mask: 0.3262, decode.loss_dice: 0.8480, decode.d0.loss_cls: 0.4992, decode.d0.loss_mask: 0.3545, decode.d0.loss_dice: 0.9346, decode.d1.loss_cls: 0.2745, decode.d1.loss_mask: 0.3285, decode.d1.loss_dice: 0.8716, decode.d2.loss_cls: 0.2715, decode.d2.loss_mask: 0.3263, decode.d2.loss_dice: 0.8572, decode.d3.loss_cls: 0.2544, decode.d3.loss_mask: 0.3265, decode.d3.loss_dice: 0.8489, decode.d4.loss_cls: 0.2546, decode.d4.loss_mask: 0.3259, decode.d4.loss_dice: 0.8497, decode.d5.loss_cls: 0.2504, decode.d5.loss_mask: 0.3270, decode.d5.loss_dice: 0.8497, decode.d6.loss_cls: 0.2572, decode.d6.loss_mask: 0.3259, decode.d6.loss_dice: 0.8455, decode.d7.loss_cls: 0.2535, decode.d7.loss_mask: 0.3265, decode.d7.loss_dice: 0.8464, decode.d8.loss_cls: 0.2557, decode.d8.loss_mask: 0.3269, decode.d8.loss_dice: 0.8468, loss: 14.7191 +2022-05-09 18:19:14,798 - mmseg - INFO - Iter [1600/80000] lr: 1.407e-06, eta: 1 day, 18:00:14, time: 1.760, data_time: 0.018, memory: 64699, decode.loss_cls: 0.2617, decode.loss_mask: 0.3325, decode.loss_dice: 0.8629, decode.d0.loss_cls: 0.4904, decode.d0.loss_mask: 0.3578, decode.d0.loss_dice: 0.9525, decode.d1.loss_cls: 0.2908, decode.d1.loss_mask: 0.3308, decode.d1.loss_dice: 0.8901, decode.d2.loss_cls: 0.2810, decode.d2.loss_mask: 0.3294, decode.d2.loss_dice: 0.8782, decode.d3.loss_cls: 0.2709, decode.d3.loss_mask: 0.3322, decode.d3.loss_dice: 0.8659, decode.d4.loss_cls: 0.2712, decode.d4.loss_mask: 0.3332, decode.d4.loss_dice: 0.8655, decode.d5.loss_cls: 0.2696, decode.d5.loss_mask: 0.3323, decode.d5.loss_dice: 0.8648, decode.d6.loss_cls: 0.2609, decode.d6.loss_mask: 0.3315, decode.d6.loss_dice: 0.8590, decode.d7.loss_cls: 0.2726, decode.d7.loss_mask: 0.3315, decode.d7.loss_dice: 0.8629, decode.d8.loss_cls: 0.2732, decode.d8.loss_mask: 0.3323, decode.d8.loss_dice: 0.8644, loss: 15.0520 +2022-05-09 18:20:43,075 - mmseg - INFO - Iter [1650/80000] lr: 1.406e-06, eta: 1 day, 17:52:09, time: 1.765, data_time: 0.017, memory: 64699, decode.loss_cls: 0.2723, decode.loss_mask: 0.3242, decode.loss_dice: 0.8489, decode.d0.loss_cls: 0.4987, decode.d0.loss_mask: 0.3523, decode.d0.loss_dice: 0.9367, decode.d1.loss_cls: 0.2951, decode.d1.loss_mask: 0.3300, decode.d1.loss_dice: 0.8811, decode.d2.loss_cls: 0.2812, decode.d2.loss_mask: 0.3265, decode.d2.loss_dice: 0.8630, decode.d3.loss_cls: 0.2734, decode.d3.loss_mask: 0.3250, decode.d3.loss_dice: 0.8519, decode.d4.loss_cls: 0.2751, decode.d4.loss_mask: 0.3268, decode.d4.loss_dice: 0.8504, decode.d5.loss_cls: 0.2717, decode.d5.loss_mask: 0.3259, decode.d5.loss_dice: 0.8500, decode.d6.loss_cls: 0.2739, decode.d6.loss_mask: 0.3248, decode.d6.loss_dice: 0.8469, decode.d7.loss_cls: 0.2789, decode.d7.loss_mask: 0.3252, decode.d7.loss_dice: 0.8468, decode.d8.loss_cls: 0.2762, decode.d8.loss_mask: 0.3240, decode.d8.loss_dice: 0.8448, loss: 14.9017 +2022-05-09 18:22:14,538 - mmseg - INFO - Iter [1700/80000] lr: 1.405e-06, eta: 1 day, 17:46:55, time: 1.829, data_time: 0.064, memory: 64699, decode.loss_cls: 0.2562, decode.loss_mask: 0.3065, decode.loss_dice: 0.8602, decode.d0.loss_cls: 0.4838, decode.d0.loss_mask: 0.3316, decode.d0.loss_dice: 0.9398, decode.d1.loss_cls: 0.2831, decode.d1.loss_mask: 0.3133, decode.d1.loss_dice: 0.8890, decode.d2.loss_cls: 0.2786, decode.d2.loss_mask: 0.3094, decode.d2.loss_dice: 0.8694, decode.d3.loss_cls: 0.2672, decode.d3.loss_mask: 0.3089, decode.d3.loss_dice: 0.8661, decode.d4.loss_cls: 0.2695, decode.d4.loss_mask: 0.3083, decode.d4.loss_dice: 0.8577, decode.d5.loss_cls: 0.2633, decode.d5.loss_mask: 0.3071, decode.d5.loss_dice: 0.8618, decode.d6.loss_cls: 0.2698, decode.d6.loss_mask: 0.3056, decode.d6.loss_dice: 0.8503, decode.d7.loss_cls: 0.2655, decode.d7.loss_mask: 0.3067, decode.d7.loss_dice: 0.8544, decode.d8.loss_cls: 0.2726, decode.d8.loss_mask: 0.3049, decode.d8.loss_dice: 0.8536, loss: 14.7141 +2022-05-09 18:23:43,550 - mmseg - INFO - Iter [1750/80000] lr: 1.404e-06, eta: 1 day, 17:40:05, time: 1.781, data_time: 0.018, memory: 64699, decode.loss_cls: 0.2706, decode.loss_mask: 0.3228, decode.loss_dice: 0.8517, decode.d0.loss_cls: 0.4769, decode.d0.loss_mask: 0.3547, decode.d0.loss_dice: 0.9437, decode.d1.loss_cls: 0.2798, decode.d1.loss_mask: 0.3315, decode.d1.loss_dice: 0.8872, decode.d2.loss_cls: 0.2817, decode.d2.loss_mask: 0.3251, decode.d2.loss_dice: 0.8675, decode.d3.loss_cls: 0.2708, decode.d3.loss_mask: 0.3243, decode.d3.loss_dice: 0.8525, decode.d4.loss_cls: 0.2683, decode.d4.loss_mask: 0.3242, decode.d4.loss_dice: 0.8561, decode.d5.loss_cls: 0.2662, decode.d5.loss_mask: 0.3258, decode.d5.loss_dice: 0.8566, decode.d6.loss_cls: 0.2675, decode.d6.loss_mask: 0.3254, decode.d6.loss_dice: 0.8528, decode.d7.loss_cls: 0.2698, decode.d7.loss_mask: 0.3245, decode.d7.loss_dice: 0.8526, decode.d8.loss_cls: 0.2724, decode.d8.loss_mask: 0.3228, decode.d8.loss_dice: 0.8469, loss: 14.8726 +2022-05-09 18:25:13,426 - mmseg - INFO - Iter [1800/80000] lr: 1.404e-06, eta: 1 day, 17:34:07, time: 1.796, data_time: 0.019, memory: 64699, decode.loss_cls: 0.2384, decode.loss_mask: 0.3114, decode.loss_dice: 0.8438, decode.d0.loss_cls: 0.4796, decode.d0.loss_mask: 0.3405, decode.d0.loss_dice: 0.9372, decode.d1.loss_cls: 0.2683, decode.d1.loss_mask: 0.3170, decode.d1.loss_dice: 0.8730, decode.d2.loss_cls: 0.2485, decode.d2.loss_mask: 0.3148, decode.d2.loss_dice: 0.8622, decode.d3.loss_cls: 0.2454, decode.d3.loss_mask: 0.3127, decode.d3.loss_dice: 0.8470, decode.d4.loss_cls: 0.2489, decode.d4.loss_mask: 0.3142, decode.d4.loss_dice: 0.8515, decode.d5.loss_cls: 0.2438, decode.d5.loss_mask: 0.3126, decode.d5.loss_dice: 0.8501, decode.d6.loss_cls: 0.2368, decode.d6.loss_mask: 0.3122, decode.d6.loss_dice: 0.8488, decode.d7.loss_cls: 0.2396, decode.d7.loss_mask: 0.3128, decode.d7.loss_dice: 0.8467, decode.d8.loss_cls: 0.2398, decode.d8.loss_mask: 0.3113, decode.d8.loss_dice: 0.8407, loss: 14.4496 +2022-05-09 18:26:42,149 - mmseg - INFO - Iter [1850/80000] lr: 1.403e-06, eta: 1 day, 17:27:40, time: 1.776, data_time: 0.018, memory: 64699, decode.loss_cls: 0.2606, decode.loss_mask: 0.3206, decode.loss_dice: 0.8359, decode.d0.loss_cls: 0.4876, decode.d0.loss_mask: 0.3502, decode.d0.loss_dice: 0.9311, decode.d1.loss_cls: 0.2743, decode.d1.loss_mask: 0.3275, decode.d1.loss_dice: 0.8783, decode.d2.loss_cls: 0.2878, decode.d2.loss_mask: 0.3217, decode.d2.loss_dice: 0.8528, decode.d3.loss_cls: 0.2575, decode.d3.loss_mask: 0.3208, decode.d3.loss_dice: 0.8437, decode.d4.loss_cls: 0.2666, decode.d4.loss_mask: 0.3225, decode.d4.loss_dice: 0.8374, decode.d5.loss_cls: 0.2706, decode.d5.loss_mask: 0.3210, decode.d5.loss_dice: 0.8421, decode.d6.loss_cls: 0.2638, decode.d6.loss_mask: 0.3220, decode.d6.loss_dice: 0.8425, decode.d7.loss_cls: 0.2622, decode.d7.loss_mask: 0.3224, decode.d7.loss_dice: 0.8382, decode.d8.loss_cls: 0.2651, decode.d8.loss_mask: 0.3206, decode.d8.loss_dice: 0.8374, loss: 14.6848 +2022-05-09 18:28:13,968 - mmseg - INFO - Iter [1900/80000] lr: 1.402e-06, eta: 1 day, 17:23:32, time: 1.836, data_time: 0.066, memory: 64699, decode.loss_cls: 0.2598, decode.loss_mask: 0.3204, decode.loss_dice: 0.8444, decode.d0.loss_cls: 0.4759, decode.d0.loss_mask: 0.3405, decode.d0.loss_dice: 0.9426, decode.d1.loss_cls: 0.2782, decode.d1.loss_mask: 0.3247, decode.d1.loss_dice: 0.8780, decode.d2.loss_cls: 0.2748, decode.d2.loss_mask: 0.3198, decode.d2.loss_dice: 0.8617, decode.d3.loss_cls: 0.2577, decode.d3.loss_mask: 0.3194, decode.d3.loss_dice: 0.8468, decode.d4.loss_cls: 0.2614, decode.d4.loss_mask: 0.3208, decode.d4.loss_dice: 0.8453, decode.d5.loss_cls: 0.2646, decode.d5.loss_mask: 0.3205, decode.d5.loss_dice: 0.8482, decode.d6.loss_cls: 0.2653, decode.d6.loss_mask: 0.3190, decode.d6.loss_dice: 0.8459, decode.d7.loss_cls: 0.2547, decode.d7.loss_mask: 0.3205, decode.d7.loss_dice: 0.8422, decode.d8.loss_cls: 0.2617, decode.d8.loss_mask: 0.3203, decode.d8.loss_dice: 0.8434, loss: 14.6785 +2022-05-09 18:29:43,611 - mmseg - INFO - Iter [1950/80000] lr: 1.401e-06, eta: 1 day, 17:18:07, time: 1.793, data_time: 0.019, memory: 64699, decode.loss_cls: 0.2650, decode.loss_mask: 0.3206, decode.loss_dice: 0.8564, decode.d0.loss_cls: 0.4762, decode.d0.loss_mask: 0.3477, decode.d0.loss_dice: 0.9432, decode.d1.loss_cls: 0.2828, decode.d1.loss_mask: 0.3251, decode.d1.loss_dice: 0.8888, decode.d2.loss_cls: 0.2784, decode.d2.loss_mask: 0.3218, decode.d2.loss_dice: 0.8685, decode.d3.loss_cls: 0.2632, decode.d3.loss_mask: 0.3206, decode.d3.loss_dice: 0.8638, decode.d4.loss_cls: 0.2691, decode.d4.loss_mask: 0.3216, decode.d4.loss_dice: 0.8626, decode.d5.loss_cls: 0.2597, decode.d5.loss_mask: 0.3212, decode.d5.loss_dice: 0.8630, decode.d6.loss_cls: 0.2552, decode.d6.loss_mask: 0.3222, decode.d6.loss_dice: 0.8603, decode.d7.loss_cls: 0.2647, decode.d7.loss_mask: 0.3204, decode.d7.loss_dice: 0.8629, decode.d8.loss_cls: 0.2700, decode.d8.loss_mask: 0.3203, decode.d8.loss_dice: 0.8538, loss: 14.8493 +2022-05-09 18:31:12,909 - mmseg - INFO - Saving checkpoint at 2000 iterations +2022-05-09 18:31:42,859 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-09 18:31:42,862 - mmseg - INFO - Iter [2000/80000] lr: 1.400e-06, eta: 1 day, 17:32:03, time: 2.383, data_time: 0.019, memory: 64699, decode.loss_cls: 0.2449, decode.loss_mask: 0.3265, decode.loss_dice: 0.8370, decode.d0.loss_cls: 0.4746, decode.d0.loss_mask: 0.3598, decode.d0.loss_dice: 0.9225, decode.d1.loss_cls: 0.2624, decode.d1.loss_mask: 0.3316, decode.d1.loss_dice: 0.8616, decode.d2.loss_cls: 0.2561, decode.d2.loss_mask: 0.3282, decode.d2.loss_dice: 0.8466, decode.d3.loss_cls: 0.2370, decode.d3.loss_mask: 0.3293, decode.d3.loss_dice: 0.8391, decode.d4.loss_cls: 0.2473, decode.d4.loss_mask: 0.3284, decode.d4.loss_dice: 0.8384, decode.d5.loss_cls: 0.2469, decode.d5.loss_mask: 0.3271, decode.d5.loss_dice: 0.8340, decode.d6.loss_cls: 0.2424, decode.d6.loss_mask: 0.3269, decode.d6.loss_dice: 0.8367, decode.d7.loss_cls: 0.2437, decode.d7.loss_mask: 0.3287, decode.d7.loss_dice: 0.8363, decode.d8.loss_cls: 0.2463, decode.d8.loss_mask: 0.3281, decode.d8.loss_dice: 0.8379, loss: 14.5066 +2022-05-09 18:33:38,853 - mmseg - INFO - per class results: +2022-05-09 18:33:38,858 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.68 | 99.18 | +| sidewalk | 88.98 | 94.19 | +| building | 94.59 | 96.92 | +| wall | 67.37 | 84.18 | +| fence | 71.73 | 85.02 | +| pole | 72.58 | 84.68 | +| traffic light | 75.8 | 88.7 | +| traffic sign | 84.27 | 91.19 | +| vegetation | 93.5 | 96.87 | +| terrain | 68.36 | 80.85 | +| sky | 95.9 | 98.58 | +| person | 86.72 | 93.4 | +| rider | 72.96 | 83.92 | +| car | 96.05 | 97.73 | +| truck | 89.74 | 93.86 | +| bus | 90.37 | 96.43 | +| train | 89.05 | 95.15 | +| motorcycle | 75.3 | 89.23 | +| bicycle | 82.18 | 90.7 | ++---------------+-------+-------+ +2022-05-09 18:33:38,858 - mmseg - INFO - Summary: +2022-05-09 18:33:38,858 - mmseg - INFO - ++-------+------+-------+ +| aAcc | mIoU | mAcc | ++-------+------+-------+ +| 97.03 | 83.9 | 91.62 | ++-------+------+-------+ +2022-05-09 18:33:38,862 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-09 18:33:38,862 - mmseg - INFO - Iter(val) [32] aAcc: 0.9703, mIoU: 0.8390, mAcc: 0.9162, IoU.road: 0.9868, IoU.sidewalk: 0.8898, IoU.building: 0.9459, IoU.wall: 0.6737, IoU.fence: 0.7173, IoU.pole: 0.7258, IoU.traffic light: 0.7580, IoU.traffic sign: 0.8427, IoU.vegetation: 0.9350, IoU.terrain: 0.6836, IoU.sky: 0.9590, IoU.person: 0.8672, IoU.rider: 0.7296, IoU.car: 0.9605, IoU.truck: 0.8974, IoU.bus: 0.9037, IoU.train: 0.8905, IoU.motorcycle: 0.7530, IoU.bicycle: 0.8218, Acc.road: 0.9918, Acc.sidewalk: 0.9419, Acc.building: 0.9692, Acc.wall: 0.8418, Acc.fence: 0.8502, Acc.pole: 0.8468, Acc.traffic light: 0.8870, Acc.traffic sign: 0.9119, Acc.vegetation: 0.9687, Acc.terrain: 0.8085, Acc.sky: 0.9858, Acc.person: 0.9340, Acc.rider: 0.8392, Acc.car: 0.9773, Acc.truck: 0.9386, Acc.bus: 0.9643, Acc.train: 0.9515, Acc.motorcycle: 0.8923, Acc.bicycle: 0.9070 +2022-05-09 18:35:12,235 - mmseg - INFO - Iter [2050/80000] lr: 1.399e-06, eta: 1 day, 18:42:29, time: 4.190, data_time: 2.388, memory: 64699, decode.loss_cls: 0.2700, decode.loss_mask: 0.3225, decode.loss_dice: 0.8476, decode.d0.loss_cls: 0.4780, decode.d0.loss_mask: 0.3525, decode.d0.loss_dice: 0.9434, decode.d1.loss_cls: 0.2949, decode.d1.loss_mask: 0.3275, decode.d1.loss_dice: 0.8769, decode.d2.loss_cls: 0.2775, decode.d2.loss_mask: 0.3250, decode.d2.loss_dice: 0.8604, decode.d3.loss_cls: 0.2669, decode.d3.loss_mask: 0.3243, decode.d3.loss_dice: 0.8516, decode.d4.loss_cls: 0.2712, decode.d4.loss_mask: 0.3231, decode.d4.loss_dice: 0.8523, decode.d5.loss_cls: 0.2723, decode.d5.loss_mask: 0.3222, decode.d5.loss_dice: 0.8484, decode.d6.loss_cls: 0.2690, decode.d6.loss_mask: 0.3220, decode.d6.loss_dice: 0.8504, decode.d7.loss_cls: 0.2732, decode.d7.loss_mask: 0.3219, decode.d7.loss_dice: 0.8502, decode.d8.loss_cls: 0.2685, decode.d8.loss_mask: 0.3230, decode.d8.loss_dice: 0.8529, loss: 14.8396 +2022-05-09 18:36:42,023 - mmseg - INFO - Iter [2100/80000] lr: 1.398e-06, eta: 1 day, 18:35:22, time: 1.796, data_time: 0.018, memory: 64699, decode.loss_cls: 0.2609, decode.loss_mask: 0.3049, decode.loss_dice: 0.8388, decode.d0.loss_cls: 0.4711, decode.d0.loss_mask: 0.3339, decode.d0.loss_dice: 0.9229, decode.d1.loss_cls: 0.2751, decode.d1.loss_mask: 0.3099, decode.d1.loss_dice: 0.8599, decode.d2.loss_cls: 0.2763, decode.d2.loss_mask: 0.3067, decode.d2.loss_dice: 0.8487, decode.d3.loss_cls: 0.2612, decode.d3.loss_mask: 0.3054, decode.d3.loss_dice: 0.8409, decode.d4.loss_cls: 0.2572, decode.d4.loss_mask: 0.3046, decode.d4.loss_dice: 0.8398, decode.d5.loss_cls: 0.2680, decode.d5.loss_mask: 0.3054, decode.d5.loss_dice: 0.8410, decode.d6.loss_cls: 0.2702, decode.d6.loss_mask: 0.3054, decode.d6.loss_dice: 0.8326, decode.d7.loss_cls: 0.2646, decode.d7.loss_mask: 0.3050, decode.d7.loss_dice: 0.8357, decode.d8.loss_cls: 0.2593, decode.d8.loss_mask: 0.3062, decode.d8.loss_dice: 0.8407, loss: 14.4523 +2022-05-09 18:38:10,946 - mmseg - INFO - Iter [2150/80000] lr: 1.397e-06, eta: 1 day, 18:27:59, time: 1.778, data_time: 0.019, memory: 64699, decode.loss_cls: 0.2501, decode.loss_mask: 0.3126, decode.loss_dice: 0.8314, decode.d0.loss_cls: 0.4784, decode.d0.loss_mask: 0.3413, decode.d0.loss_dice: 0.9243, decode.d1.loss_cls: 0.2714, decode.d1.loss_mask: 0.3172, decode.d1.loss_dice: 0.8628, decode.d2.loss_cls: 0.2586, decode.d2.loss_mask: 0.3174, decode.d2.loss_dice: 0.8507, decode.d3.loss_cls: 0.2549, decode.d3.loss_mask: 0.3147, decode.d3.loss_dice: 0.8369, decode.d4.loss_cls: 0.2583, decode.d4.loss_mask: 0.3136, decode.d4.loss_dice: 0.8312, decode.d5.loss_cls: 0.2523, decode.d5.loss_mask: 0.3129, decode.d5.loss_dice: 0.8347, decode.d6.loss_cls: 0.2534, decode.d6.loss_mask: 0.3127, decode.d6.loss_dice: 0.8336, decode.d7.loss_cls: 0.2513, decode.d7.loss_mask: 0.3126, decode.d7.loss_dice: 0.8325, decode.d8.loss_cls: 0.2607, decode.d8.loss_mask: 0.3115, decode.d8.loss_dice: 0.8277, loss: 14.4220 +2022-05-09 18:39:39,917 - mmseg - INFO - Iter [2200/80000] lr: 1.396e-06, eta: 1 day, 18:20:56, time: 1.780, data_time: 0.018, memory: 64699, decode.loss_cls: 0.2358, decode.loss_mask: 0.3164, decode.loss_dice: 0.8418, decode.d0.loss_cls: 0.4443, decode.d0.loss_mask: 0.3434, decode.d0.loss_dice: 0.9123, decode.d1.loss_cls: 0.2510, decode.d1.loss_mask: 0.3216, decode.d1.loss_dice: 0.8636, decode.d2.loss_cls: 0.2501, decode.d2.loss_mask: 0.3180, decode.d2.loss_dice: 0.8468, decode.d3.loss_cls: 0.2453, decode.d3.loss_mask: 0.3167, decode.d3.loss_dice: 0.8375, decode.d4.loss_cls: 0.2419, decode.d4.loss_mask: 0.3169, decode.d4.loss_dice: 0.8403, decode.d5.loss_cls: 0.2412, decode.d5.loss_mask: 0.3184, decode.d5.loss_dice: 0.8331, decode.d6.loss_cls: 0.2417, decode.d6.loss_mask: 0.3180, decode.d6.loss_dice: 0.8382, decode.d7.loss_cls: 0.2385, decode.d7.loss_mask: 0.3178, decode.d7.loss_dice: 0.8419, decode.d8.loss_cls: 0.2444, decode.d8.loss_mask: 0.3178, decode.d8.loss_dice: 0.8369, loss: 14.3317 +2022-05-09 18:41:11,236 - mmseg - INFO - Iter [2250/80000] lr: 1.395e-06, eta: 1 day, 18:15:27, time: 1.826, data_time: 0.064, memory: 64699, decode.loss_cls: 0.2633, decode.loss_mask: 0.3232, decode.loss_dice: 0.8564, decode.d0.loss_cls: 0.4901, decode.d0.loss_mask: 0.3554, decode.d0.loss_dice: 0.9471, decode.d1.loss_cls: 0.2864, decode.d1.loss_mask: 0.3280, decode.d1.loss_dice: 0.8852, decode.d2.loss_cls: 0.2702, decode.d2.loss_mask: 0.3249, decode.d2.loss_dice: 0.8725, decode.d3.loss_cls: 0.2649, decode.d3.loss_mask: 0.3246, decode.d3.loss_dice: 0.8594, decode.d4.loss_cls: 0.2669, decode.d4.loss_mask: 0.3232, decode.d4.loss_dice: 0.8594, decode.d5.loss_cls: 0.2640, decode.d5.loss_mask: 0.3241, decode.d5.loss_dice: 0.8655, decode.d6.loss_cls: 0.2588, decode.d6.loss_mask: 0.3232, decode.d6.loss_dice: 0.8593, decode.d7.loss_cls: 0.2570, decode.d7.loss_mask: 0.3231, decode.d7.loss_dice: 0.8601, decode.d8.loss_cls: 0.2581, decode.d8.loss_mask: 0.3234, decode.d8.loss_dice: 0.8611, loss: 14.8785 +2022-05-09 18:42:40,102 - mmseg - INFO - Iter [2300/80000] lr: 1.395e-06, eta: 1 day, 18:08:47, time: 1.778, data_time: 0.018, memory: 64699, decode.loss_cls: 0.2503, decode.loss_mask: 0.3223, decode.loss_dice: 0.8443, decode.d0.loss_cls: 0.4748, decode.d0.loss_mask: 0.3462, decode.d0.loss_dice: 0.9326, decode.d1.loss_cls: 0.2878, decode.d1.loss_mask: 0.3257, decode.d1.loss_dice: 0.8759, decode.d2.loss_cls: 0.2673, decode.d2.loss_mask: 0.3238, decode.d2.loss_dice: 0.8554, decode.d3.loss_cls: 0.2534, decode.d3.loss_mask: 0.3225, decode.d3.loss_dice: 0.8495, decode.d4.loss_cls: 0.2561, decode.d4.loss_mask: 0.3208, decode.d4.loss_dice: 0.8490, decode.d5.loss_cls: 0.2613, decode.d5.loss_mask: 0.3213, decode.d5.loss_dice: 0.8455, decode.d6.loss_cls: 0.2547, decode.d6.loss_mask: 0.3219, decode.d6.loss_dice: 0.8426, decode.d7.loss_cls: 0.2507, decode.d7.loss_mask: 0.3225, decode.d7.loss_dice: 0.8449, decode.d8.loss_cls: 0.2492, decode.d8.loss_mask: 0.3214, decode.d8.loss_dice: 0.8433, loss: 14.6369 +2022-05-09 18:44:08,706 - mmseg - INFO - Iter [2350/80000] lr: 1.394e-06, eta: 1 day, 18:02:11, time: 1.772, data_time: 0.018, memory: 64699, decode.loss_cls: 0.2572, decode.loss_mask: 0.3209, decode.loss_dice: 0.8547, decode.d0.loss_cls: 0.4734, decode.d0.loss_mask: 0.3458, decode.d0.loss_dice: 0.9427, decode.d1.loss_cls: 0.2816, decode.d1.loss_mask: 0.3275, decode.d1.loss_dice: 0.8857, decode.d2.loss_cls: 0.2681, decode.d2.loss_mask: 0.3224, decode.d2.loss_dice: 0.8699, decode.d3.loss_cls: 0.2611, decode.d3.loss_mask: 0.3211, decode.d3.loss_dice: 0.8578, decode.d4.loss_cls: 0.2553, decode.d4.loss_mask: 0.3205, decode.d4.loss_dice: 0.8551, decode.d5.loss_cls: 0.2625, decode.d5.loss_mask: 0.3212, decode.d5.loss_dice: 0.8562, decode.d6.loss_cls: 0.2558, decode.d6.loss_mask: 0.3204, decode.d6.loss_dice: 0.8558, decode.d7.loss_cls: 0.2579, decode.d7.loss_mask: 0.3204, decode.d7.loss_dice: 0.8541, decode.d8.loss_cls: 0.2587, decode.d8.loss_mask: 0.3216, decode.d8.loss_dice: 0.8537, loss: 14.7592 +2022-05-09 18:45:37,295 - mmseg - INFO - Iter [2400/80000] lr: 1.393e-06, eta: 1 day, 17:55:47, time: 1.772, data_time: 0.017, memory: 64699, decode.loss_cls: 0.2562, decode.loss_mask: 0.3145, decode.loss_dice: 0.8284, decode.d0.loss_cls: 0.4833, decode.d0.loss_mask: 0.3447, decode.d0.loss_dice: 0.9189, decode.d1.loss_cls: 0.2796, decode.d1.loss_mask: 0.3182, decode.d1.loss_dice: 0.8616, decode.d2.loss_cls: 0.2621, decode.d2.loss_mask: 0.3157, decode.d2.loss_dice: 0.8472, decode.d3.loss_cls: 0.2571, decode.d3.loss_mask: 0.3149, decode.d3.loss_dice: 0.8304, decode.d4.loss_cls: 0.2627, decode.d4.loss_mask: 0.3152, decode.d4.loss_dice: 0.8276, decode.d5.loss_cls: 0.2674, decode.d5.loss_mask: 0.3147, decode.d5.loss_dice: 0.8249, decode.d6.loss_cls: 0.2589, decode.d6.loss_mask: 0.3138, decode.d6.loss_dice: 0.8262, decode.d7.loss_cls: 0.2609, decode.d7.loss_mask: 0.3149, decode.d7.loss_dice: 0.8258, decode.d8.loss_cls: 0.2588, decode.d8.loss_mask: 0.3150, decode.d8.loss_dice: 0.8260, loss: 14.4454 +2022-05-09 18:47:07,998 - mmseg - INFO - Iter [2450/80000] lr: 1.392e-06, eta: 1 day, 17:50:42, time: 1.814, data_time: 0.063, memory: 64699, decode.loss_cls: 0.2502, decode.loss_mask: 0.3069, decode.loss_dice: 0.8302, decode.d0.loss_cls: 0.4640, decode.d0.loss_mask: 0.3287, decode.d0.loss_dice: 0.9190, decode.d1.loss_cls: 0.2695, decode.d1.loss_mask: 0.3120, decode.d1.loss_dice: 0.8605, decode.d2.loss_cls: 0.2566, decode.d2.loss_mask: 0.3074, decode.d2.loss_dice: 0.8471, decode.d3.loss_cls: 0.2526, decode.d3.loss_mask: 0.3062, decode.d3.loss_dice: 0.8320, decode.d4.loss_cls: 0.2496, decode.d4.loss_mask: 0.3070, decode.d4.loss_dice: 0.8398, decode.d5.loss_cls: 0.2464, decode.d5.loss_mask: 0.3070, decode.d5.loss_dice: 0.8347, decode.d6.loss_cls: 0.2499, decode.d6.loss_mask: 0.3078, decode.d6.loss_dice: 0.8290, decode.d7.loss_cls: 0.2513, decode.d7.loss_mask: 0.3068, decode.d7.loss_dice: 0.8330, decode.d8.loss_cls: 0.2485, decode.d8.loss_mask: 0.3068, decode.d8.loss_dice: 0.8382, loss: 14.2986 +2022-05-09 18:48:36,154 - mmseg - INFO - Iter [2500/80000] lr: 1.391e-06, eta: 1 day, 17:44:25, time: 1.762, data_time: 0.019, memory: 64699, decode.loss_cls: 0.2353, decode.loss_mask: 0.3267, decode.loss_dice: 0.8427, decode.d0.loss_cls: 0.4530, decode.d0.loss_mask: 0.3559, decode.d0.loss_dice: 0.9244, decode.d1.loss_cls: 0.2507, decode.d1.loss_mask: 0.3326, decode.d1.loss_dice: 0.8754, decode.d2.loss_cls: 0.2429, decode.d2.loss_mask: 0.3260, decode.d2.loss_dice: 0.8550, decode.d3.loss_cls: 0.2368, decode.d3.loss_mask: 0.3266, decode.d3.loss_dice: 0.8470, decode.d4.loss_cls: 0.2444, decode.d4.loss_mask: 0.3255, decode.d4.loss_dice: 0.8444, decode.d5.loss_cls: 0.2353, decode.d5.loss_mask: 0.3270, decode.d5.loss_dice: 0.8425, decode.d6.loss_cls: 0.2379, decode.d6.loss_mask: 0.3265, decode.d6.loss_dice: 0.8416, decode.d7.loss_cls: 0.2342, decode.d7.loss_mask: 0.3275, decode.d7.loss_dice: 0.8409, decode.d8.loss_cls: 0.2322, decode.d8.loss_mask: 0.3278, decode.d8.loss_dice: 0.8437, loss: 14.4623 +2022-05-09 18:50:05,646 - mmseg - INFO - Iter [2550/80000] lr: 1.390e-06, eta: 1 day, 17:39:03, time: 1.791, data_time: 0.018, memory: 64699, decode.loss_cls: 0.2411, decode.loss_mask: 0.3075, decode.loss_dice: 0.8086, decode.d0.loss_cls: 0.4678, decode.d0.loss_mask: 0.3312, decode.d0.loss_dice: 0.8990, decode.d1.loss_cls: 0.2647, decode.d1.loss_mask: 0.3118, decode.d1.loss_dice: 0.8401, decode.d2.loss_cls: 0.2480, decode.d2.loss_mask: 0.3089, decode.d2.loss_dice: 0.8232, decode.d3.loss_cls: 0.2453, decode.d3.loss_mask: 0.3062, decode.d3.loss_dice: 0.8155, decode.d4.loss_cls: 0.2501, decode.d4.loss_mask: 0.3057, decode.d4.loss_dice: 0.8121, decode.d5.loss_cls: 0.2501, decode.d5.loss_mask: 0.3048, decode.d5.loss_dice: 0.8124, decode.d6.loss_cls: 0.2432, decode.d6.loss_mask: 0.3064, decode.d6.loss_dice: 0.8063, decode.d7.loss_cls: 0.2419, decode.d7.loss_mask: 0.3069, decode.d7.loss_dice: 0.8107, decode.d8.loss_cls: 0.2452, decode.d8.loss_mask: 0.3070, decode.d8.loss_dice: 0.8146, loss: 14.0363 +2022-05-09 18:51:35,012 - mmseg - INFO - Iter [2600/80000] lr: 1.389e-06, eta: 1 day, 17:33:45, time: 1.788, data_time: 0.020, memory: 64699, decode.loss_cls: 0.2412, decode.loss_mask: 0.3138, decode.loss_dice: 0.8185, decode.d0.loss_cls: 0.4627, decode.d0.loss_mask: 0.3386, decode.d0.loss_dice: 0.9028, decode.d1.loss_cls: 0.2715, decode.d1.loss_mask: 0.3158, decode.d1.loss_dice: 0.8450, decode.d2.loss_cls: 0.2569, decode.d2.loss_mask: 0.3133, decode.d2.loss_dice: 0.8291, decode.d3.loss_cls: 0.2486, decode.d3.loss_mask: 0.3137, decode.d3.loss_dice: 0.8182, decode.d4.loss_cls: 0.2450, decode.d4.loss_mask: 0.3151, decode.d4.loss_dice: 0.8186, decode.d5.loss_cls: 0.2419, decode.d5.loss_mask: 0.3157, decode.d5.loss_dice: 0.8201, decode.d6.loss_cls: 0.2418, decode.d6.loss_mask: 0.3143, decode.d6.loss_dice: 0.8191, decode.d7.loss_cls: 0.2308, decode.d7.loss_mask: 0.3144, decode.d7.loss_dice: 0.8182, decode.d8.loss_cls: 0.2377, decode.d8.loss_mask: 0.3144, decode.d8.loss_dice: 0.8216, loss: 14.1584 +2022-05-09 18:53:06,313 - mmseg - INFO - Iter [2650/80000] lr: 1.388e-06, eta: 1 day, 17:29:31, time: 1.825, data_time: 0.064, memory: 64699, decode.loss_cls: 0.2247, decode.loss_mask: 0.3044, decode.loss_dice: 0.8290, decode.d0.loss_cls: 0.4528, decode.d0.loss_mask: 0.3268, decode.d0.loss_dice: 0.9086, decode.d1.loss_cls: 0.2423, decode.d1.loss_mask: 0.3090, decode.d1.loss_dice: 0.8552, decode.d2.loss_cls: 0.2334, decode.d2.loss_mask: 0.3054, decode.d2.loss_dice: 0.8387, decode.d3.loss_cls: 0.2252, decode.d3.loss_mask: 0.3039, decode.d3.loss_dice: 0.8305, decode.d4.loss_cls: 0.2288, decode.d4.loss_mask: 0.3038, decode.d4.loss_dice: 0.8258, decode.d5.loss_cls: 0.2295, decode.d5.loss_mask: 0.3036, decode.d5.loss_dice: 0.8213, decode.d6.loss_cls: 0.2311, decode.d6.loss_mask: 0.3042, decode.d6.loss_dice: 0.8216, decode.d7.loss_cls: 0.2268, decode.d7.loss_mask: 0.3043, decode.d7.loss_dice: 0.8240, decode.d8.loss_cls: 0.2253, decode.d8.loss_mask: 0.3037, decode.d8.loss_dice: 0.8203, loss: 13.9640 +2022-05-09 18:54:35,936 - mmseg - INFO - Iter [2700/80000] lr: 1.387e-06, eta: 1 day, 17:24:37, time: 1.793, data_time: 0.019, memory: 64699, decode.loss_cls: 0.2383, decode.loss_mask: 0.3077, decode.loss_dice: 0.8408, decode.d0.loss_cls: 0.4701, decode.d0.loss_mask: 0.3329, decode.d0.loss_dice: 0.9236, decode.d1.loss_cls: 0.2564, decode.d1.loss_mask: 0.3104, decode.d1.loss_dice: 0.8662, decode.d2.loss_cls: 0.2416, decode.d2.loss_mask: 0.3105, decode.d2.loss_dice: 0.8512, decode.d3.loss_cls: 0.2372, decode.d3.loss_mask: 0.3080, decode.d3.loss_dice: 0.8424, decode.d4.loss_cls: 0.2315, decode.d4.loss_mask: 0.3087, decode.d4.loss_dice: 0.8451, decode.d5.loss_cls: 0.2445, decode.d5.loss_mask: 0.3082, decode.d5.loss_dice: 0.8468, decode.d6.loss_cls: 0.2335, decode.d6.loss_mask: 0.3071, decode.d6.loss_dice: 0.8354, decode.d7.loss_cls: 0.2322, decode.d7.loss_mask: 0.3075, decode.d7.loss_dice: 0.8408, decode.d8.loss_cls: 0.2411, decode.d8.loss_mask: 0.3068, decode.d8.loss_dice: 0.8376, loss: 14.2640 +2022-05-09 18:56:04,897 - mmseg - INFO - Iter [2750/80000] lr: 1.386e-06, eta: 1 day, 17:19:31, time: 1.779, data_time: 0.017, memory: 64699, decode.loss_cls: 0.2491, decode.loss_mask: 0.3138, decode.loss_dice: 0.8206, decode.d0.loss_cls: 0.4737, decode.d0.loss_mask: 0.3390, decode.d0.loss_dice: 0.9146, decode.d1.loss_cls: 0.2743, decode.d1.loss_mask: 0.3178, decode.d1.loss_dice: 0.8508, decode.d2.loss_cls: 0.2701, decode.d2.loss_mask: 0.3152, decode.d2.loss_dice: 0.8323, decode.d3.loss_cls: 0.2573, decode.d3.loss_mask: 0.3131, decode.d3.loss_dice: 0.8262, decode.d4.loss_cls: 0.2577, decode.d4.loss_mask: 0.3146, decode.d4.loss_dice: 0.8225, decode.d5.loss_cls: 0.2624, decode.d5.loss_mask: 0.3133, decode.d5.loss_dice: 0.8206, decode.d6.loss_cls: 0.2539, decode.d6.loss_mask: 0.3142, decode.d6.loss_dice: 0.8200, decode.d7.loss_cls: 0.2508, decode.d7.loss_mask: 0.3141, decode.d7.loss_dice: 0.8174, decode.d8.loss_cls: 0.2499, decode.d8.loss_mask: 0.3132, decode.d8.loss_dice: 0.8232, loss: 14.3159 +2022-05-09 18:57:37,463 - mmseg - INFO - Iter [2800/80000] lr: 1.386e-06, eta: 1 day, 17:16:07, time: 1.848, data_time: 0.066, memory: 64699, decode.loss_cls: 0.2581, decode.loss_mask: 0.3179, decode.loss_dice: 0.8254, decode.d0.loss_cls: 0.4590, decode.d0.loss_mask: 0.3411, decode.d0.loss_dice: 0.9142, decode.d1.loss_cls: 0.2776, decode.d1.loss_mask: 0.3207, decode.d1.loss_dice: 0.8541, decode.d2.loss_cls: 0.2674, decode.d2.loss_mask: 0.3178, decode.d2.loss_dice: 0.8356, decode.d3.loss_cls: 0.2527, decode.d3.loss_mask: 0.3171, decode.d3.loss_dice: 0.8295, decode.d4.loss_cls: 0.2501, decode.d4.loss_mask: 0.3185, decode.d4.loss_dice: 0.8294, decode.d5.loss_cls: 0.2471, decode.d5.loss_mask: 0.3182, decode.d5.loss_dice: 0.8257, decode.d6.loss_cls: 0.2521, decode.d6.loss_mask: 0.3172, decode.d6.loss_dice: 0.8239, decode.d7.loss_cls: 0.2479, decode.d7.loss_mask: 0.3188, decode.d7.loss_dice: 0.8237, decode.d8.loss_cls: 0.2511, decode.d8.loss_mask: 0.3178, decode.d8.loss_dice: 0.8250, loss: 14.3546 +2022-05-09 18:59:06,095 - mmseg - INFO - Iter [2850/80000] lr: 1.385e-06, eta: 1 day, 17:11:09, time: 1.775, data_time: 0.022, memory: 64699, decode.loss_cls: 0.2484, decode.loss_mask: 0.3082, decode.loss_dice: 0.8491, decode.d0.loss_cls: 0.4718, decode.d0.loss_mask: 0.3348, decode.d0.loss_dice: 0.9343, decode.d1.loss_cls: 0.2893, decode.d1.loss_mask: 0.3136, decode.d1.loss_dice: 0.8712, decode.d2.loss_cls: 0.2644, decode.d2.loss_mask: 0.3092, decode.d2.loss_dice: 0.8578, decode.d3.loss_cls: 0.2610, decode.d3.loss_mask: 0.3086, decode.d3.loss_dice: 0.8498, decode.d4.loss_cls: 0.2637, decode.d4.loss_mask: 0.3095, decode.d4.loss_dice: 0.8489, decode.d5.loss_cls: 0.2576, decode.d5.loss_mask: 0.3098, decode.d5.loss_dice: 0.8509, decode.d6.loss_cls: 0.2566, decode.d6.loss_mask: 0.3094, decode.d6.loss_dice: 0.8455, decode.d7.loss_cls: 0.2515, decode.d7.loss_mask: 0.3078, decode.d7.loss_dice: 0.8453, decode.d8.loss_cls: 0.2560, decode.d8.loss_mask: 0.3082, decode.d8.loss_dice: 0.8452, loss: 14.5376 +2022-05-09 19:00:34,988 - mmseg - INFO - Iter [2900/80000] lr: 1.384e-06, eta: 1 day, 17:06:22, time: 1.778, data_time: 0.018, memory: 64699, decode.loss_cls: 0.2559, decode.loss_mask: 0.3152, decode.loss_dice: 0.8180, decode.d0.loss_cls: 0.4645, decode.d0.loss_mask: 0.3420, decode.d0.loss_dice: 0.9015, decode.d1.loss_cls: 0.2615, decode.d1.loss_mask: 0.3202, decode.d1.loss_dice: 0.8506, decode.d2.loss_cls: 0.2610, decode.d2.loss_mask: 0.3168, decode.d2.loss_dice: 0.8362, decode.d3.loss_cls: 0.2513, decode.d3.loss_mask: 0.3158, decode.d3.loss_dice: 0.8221, decode.d4.loss_cls: 0.2467, decode.d4.loss_mask: 0.3164, decode.d4.loss_dice: 0.8254, decode.d5.loss_cls: 0.2511, decode.d5.loss_mask: 0.3166, decode.d5.loss_dice: 0.8215, decode.d6.loss_cls: 0.2505, decode.d6.loss_mask: 0.3148, decode.d6.loss_dice: 0.8132, decode.d7.loss_cls: 0.2487, decode.d7.loss_mask: 0.3151, decode.d7.loss_dice: 0.8157, decode.d8.loss_cls: 0.2397, decode.d8.loss_mask: 0.3149, decode.d8.loss_dice: 0.8202, loss: 14.2432 +2022-05-09 19:02:02,394 - mmseg - INFO - Iter [2950/80000] lr: 1.383e-06, eta: 1 day, 17:01:03, time: 1.748, data_time: 0.018, memory: 64699, decode.loss_cls: 0.2470, decode.loss_mask: 0.3126, decode.loss_dice: 0.8404, decode.d0.loss_cls: 0.4740, decode.d0.loss_mask: 0.3380, decode.d0.loss_dice: 0.9266, decode.d1.loss_cls: 0.2602, decode.d1.loss_mask: 0.3140, decode.d1.loss_dice: 0.8746, decode.d2.loss_cls: 0.2567, decode.d2.loss_mask: 0.3115, decode.d2.loss_dice: 0.8538, decode.d3.loss_cls: 0.2468, decode.d3.loss_mask: 0.3112, decode.d3.loss_dice: 0.8426, decode.d4.loss_cls: 0.2399, decode.d4.loss_mask: 0.3113, decode.d4.loss_dice: 0.8414, decode.d5.loss_cls: 0.2429, decode.d5.loss_mask: 0.3107, decode.d5.loss_dice: 0.8447, decode.d6.loss_cls: 0.2420, decode.d6.loss_mask: 0.3113, decode.d6.loss_dice: 0.8396, decode.d7.loss_cls: 0.2444, decode.d7.loss_mask: 0.3119, decode.d7.loss_dice: 0.8424, decode.d8.loss_cls: 0.2409, decode.d8.loss_mask: 0.3121, decode.d8.loss_dice: 0.8437, loss: 14.3888 +2022-05-09 19:03:33,511 - mmseg - INFO - Saving checkpoint at 3000 iterations +2022-05-09 19:04:03,390 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-09 19:04:03,393 - mmseg - INFO - Iter [3000/80000] lr: 1.382e-06, eta: 1 day, 17:10:10, time: 2.418, data_time: 0.065, memory: 64699, decode.loss_cls: 0.2335, decode.loss_mask: 0.3104, decode.loss_dice: 0.8387, decode.d0.loss_cls: 0.4628, decode.d0.loss_mask: 0.3339, decode.d0.loss_dice: 0.9250, decode.d1.loss_cls: 0.2665, decode.d1.loss_mask: 0.3175, decode.d1.loss_dice: 0.8656, decode.d2.loss_cls: 0.2424, decode.d2.loss_mask: 0.3128, decode.d2.loss_dice: 0.8498, decode.d3.loss_cls: 0.2402, decode.d3.loss_mask: 0.3112, decode.d3.loss_dice: 0.8374, decode.d4.loss_cls: 0.2433, decode.d4.loss_mask: 0.3105, decode.d4.loss_dice: 0.8358, decode.d5.loss_cls: 0.2397, decode.d5.loss_mask: 0.3088, decode.d5.loss_dice: 0.8375, decode.d6.loss_cls: 0.2350, decode.d6.loss_mask: 0.3104, decode.d6.loss_dice: 0.8317, decode.d7.loss_cls: 0.2321, decode.d7.loss_mask: 0.3105, decode.d7.loss_dice: 0.8341, decode.d8.loss_cls: 0.2345, decode.d8.loss_mask: 0.3095, decode.d8.loss_dice: 0.8348, loss: 14.2557 +2022-05-09 19:05:58,821 - mmseg - INFO - per class results: +2022-05-09 19:05:58,825 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.78 | 99.26 | +| sidewalk | 89.62 | 93.75 | +| building | 94.5 | 97.06 | +| wall | 65.94 | 80.24 | +| fence | 71.51 | 82.47 | +| pole | 72.33 | 85.08 | +| traffic light | 75.8 | 91.16 | +| traffic sign | 84.63 | 91.95 | +| vegetation | 93.48 | 96.49 | +| terrain | 68.24 | 86.59 | +| sky | 95.85 | 98.64 | +| person | 86.77 | 93.33 | +| rider | 72.96 | 86.22 | +| car | 96.16 | 97.98 | +| truck | 91.61 | 96.36 | +| bus | 91.02 | 97.02 | +| train | 87.42 | 90.47 | +| motorcycle | 71.38 | 90.67 | +| bicycle | 82.14 | 91.25 | ++---------------+-------+-------+ +2022-05-09 19:05:58,825 - mmseg - INFO - Summary: +2022-05-09 19:05:58,826 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 97.04 | 83.69 | 91.89 | ++-------+-------+-------+ +2022-05-09 19:05:58,830 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-09 19:05:58,831 - mmseg - INFO - Iter(val) [32] aAcc: 0.9704, mIoU: 0.8369, mAcc: 0.9189, IoU.road: 0.9878, IoU.sidewalk: 0.8962, IoU.building: 0.9450, IoU.wall: 0.6594, IoU.fence: 0.7151, IoU.pole: 0.7233, IoU.traffic light: 0.7580, IoU.traffic sign: 0.8463, IoU.vegetation: 0.9348, IoU.terrain: 0.6824, IoU.sky: 0.9585, IoU.person: 0.8677, IoU.rider: 0.7296, IoU.car: 0.9616, IoU.truck: 0.9161, IoU.bus: 0.9102, IoU.train: 0.8742, IoU.motorcycle: 0.7138, IoU.bicycle: 0.8214, Acc.road: 0.9926, Acc.sidewalk: 0.9375, Acc.building: 0.9706, Acc.wall: 0.8024, Acc.fence: 0.8247, Acc.pole: 0.8508, Acc.traffic light: 0.9116, Acc.traffic sign: 0.9195, Acc.vegetation: 0.9649, Acc.terrain: 0.8659, Acc.sky: 0.9864, Acc.person: 0.9333, Acc.rider: 0.8622, Acc.car: 0.9798, Acc.truck: 0.9636, Acc.bus: 0.9702, Acc.train: 0.9047, Acc.motorcycle: 0.9067, Acc.bicycle: 0.9125 +2022-05-09 19:07:28,342 - mmseg - INFO - Iter [3050/80000] lr: 1.381e-06, eta: 1 day, 17:54:16, time: 4.098, data_time: 2.328, memory: 64699, decode.loss_cls: 0.2308, decode.loss_mask: 0.3056, decode.loss_dice: 0.8114, decode.d0.loss_cls: 0.4594, decode.d0.loss_mask: 0.3292, decode.d0.loss_dice: 0.8989, decode.d1.loss_cls: 0.2666, decode.d1.loss_mask: 0.3072, decode.d1.loss_dice: 0.8401, decode.d2.loss_cls: 0.2520, decode.d2.loss_mask: 0.3070, decode.d2.loss_dice: 0.8276, decode.d3.loss_cls: 0.2395, decode.d3.loss_mask: 0.3061, decode.d3.loss_dice: 0.8203, decode.d4.loss_cls: 0.2374, decode.d4.loss_mask: 0.3057, decode.d4.loss_dice: 0.8180, decode.d5.loss_cls: 0.2394, decode.d5.loss_mask: 0.3032, decode.d5.loss_dice: 0.8104, decode.d6.loss_cls: 0.2372, decode.d6.loss_mask: 0.3054, decode.d6.loss_dice: 0.8122, decode.d7.loss_cls: 0.2346, decode.d7.loss_mask: 0.3063, decode.d7.loss_dice: 0.8133, decode.d8.loss_cls: 0.2420, decode.d8.loss_mask: 0.3064, decode.d8.loss_dice: 0.8092, loss: 13.9825 +2022-05-09 19:08:57,056 - mmseg - INFO - Iter [3100/80000] lr: 1.380e-06, eta: 1 day, 17:48:49, time: 1.776, data_time: 0.020, memory: 64699, decode.loss_cls: 0.2458, decode.loss_mask: 0.3028, decode.loss_dice: 0.8231, decode.d0.loss_cls: 0.4659, decode.d0.loss_mask: 0.3243, decode.d0.loss_dice: 0.9034, decode.d1.loss_cls: 0.2652, decode.d1.loss_mask: 0.3081, decode.d1.loss_dice: 0.8456, decode.d2.loss_cls: 0.2575, decode.d2.loss_mask: 0.3048, decode.d2.loss_dice: 0.8352, decode.d3.loss_cls: 0.2424, decode.d3.loss_mask: 0.3031, decode.d3.loss_dice: 0.8292, decode.d4.loss_cls: 0.2357, decode.d4.loss_mask: 0.3030, decode.d4.loss_dice: 0.8210, decode.d5.loss_cls: 0.2454, decode.d5.loss_mask: 0.3029, decode.d5.loss_dice: 0.8229, decode.d6.loss_cls: 0.2327, decode.d6.loss_mask: 0.3030, decode.d6.loss_dice: 0.8263, decode.d7.loss_cls: 0.2369, decode.d7.loss_mask: 0.3028, decode.d7.loss_dice: 0.8258, decode.d8.loss_cls: 0.2348, decode.d8.loss_mask: 0.3044, decode.d8.loss_dice: 0.8233, loss: 14.0774 +2022-05-09 19:10:27,267 - mmseg - INFO - Iter [3150/80000] lr: 1.379e-06, eta: 1 day, 17:44:05, time: 1.804, data_time: 0.022, memory: 64699, decode.loss_cls: 0.2568, decode.loss_mask: 0.3153, decode.loss_dice: 0.8485, decode.d0.loss_cls: 0.4726, decode.d0.loss_mask: 0.3382, decode.d0.loss_dice: 0.9288, decode.d1.loss_cls: 0.2755, decode.d1.loss_mask: 0.3186, decode.d1.loss_dice: 0.8758, decode.d2.loss_cls: 0.2659, decode.d2.loss_mask: 0.3174, decode.d2.loss_dice: 0.8528, decode.d3.loss_cls: 0.2657, decode.d3.loss_mask: 0.3151, decode.d3.loss_dice: 0.8504, decode.d4.loss_cls: 0.2619, decode.d4.loss_mask: 0.3144, decode.d4.loss_dice: 0.8496, decode.d5.loss_cls: 0.2599, decode.d5.loss_mask: 0.3151, decode.d5.loss_dice: 0.8459, decode.d6.loss_cls: 0.2619, decode.d6.loss_mask: 0.3151, decode.d6.loss_dice: 0.8464, decode.d7.loss_cls: 0.2628, decode.d7.loss_mask: 0.3156, decode.d7.loss_dice: 0.8464, decode.d8.loss_cls: 0.2547, decode.d8.loss_mask: 0.3149, decode.d8.loss_dice: 0.8468, loss: 14.6091 +2022-05-09 19:11:57,672 - mmseg - INFO - Iter [3200/80000] lr: 1.378e-06, eta: 1 day, 17:39:31, time: 1.809, data_time: 0.065, memory: 64699, decode.loss_cls: 0.2489, decode.loss_mask: 0.3103, decode.loss_dice: 0.8256, decode.d0.loss_cls: 0.4579, decode.d0.loss_mask: 0.3357, decode.d0.loss_dice: 0.9081, decode.d1.loss_cls: 0.2801, decode.d1.loss_mask: 0.3142, decode.d1.loss_dice: 0.8479, decode.d2.loss_cls: 0.2578, decode.d2.loss_mask: 0.3113, decode.d2.loss_dice: 0.8425, decode.d3.loss_cls: 0.2531, decode.d3.loss_mask: 0.3098, decode.d3.loss_dice: 0.8311, decode.d4.loss_cls: 0.2546, decode.d4.loss_mask: 0.3099, decode.d4.loss_dice: 0.8236, decode.d5.loss_cls: 0.2596, decode.d5.loss_mask: 0.3084, decode.d5.loss_dice: 0.8253, decode.d6.loss_cls: 0.2542, decode.d6.loss_mask: 0.3086, decode.d6.loss_dice: 0.8208, decode.d7.loss_cls: 0.2471, decode.d7.loss_mask: 0.3088, decode.d7.loss_dice: 0.8279, decode.d8.loss_cls: 0.2482, decode.d8.loss_mask: 0.3096, decode.d8.loss_dice: 0.8253, loss: 14.2662 +2022-05-09 19:13:26,456 - mmseg - INFO - Iter [3250/80000] lr: 1.377e-06, eta: 1 day, 17:34:25, time: 1.776, data_time: 0.018, memory: 64699, decode.loss_cls: 0.2291, decode.loss_mask: 0.3031, decode.loss_dice: 0.8015, decode.d0.loss_cls: 0.4444, decode.d0.loss_mask: 0.3293, decode.d0.loss_dice: 0.8864, decode.d1.loss_cls: 0.2533, decode.d1.loss_mask: 0.3089, decode.d1.loss_dice: 0.8279, decode.d2.loss_cls: 0.2376, decode.d2.loss_mask: 0.3061, decode.d2.loss_dice: 0.8199, decode.d3.loss_cls: 0.2340, decode.d3.loss_mask: 0.3040, decode.d3.loss_dice: 0.8053, decode.d4.loss_cls: 0.2296, decode.d4.loss_mask: 0.3036, decode.d4.loss_dice: 0.8058, decode.d5.loss_cls: 0.2352, decode.d5.loss_mask: 0.3031, decode.d5.loss_dice: 0.8016, decode.d6.loss_cls: 0.2314, decode.d6.loss_mask: 0.3038, decode.d6.loss_dice: 0.8016, decode.d7.loss_cls: 0.2391, decode.d7.loss_mask: 0.3039, decode.d7.loss_dice: 0.7971, decode.d8.loss_cls: 0.2294, decode.d8.loss_mask: 0.3038, decode.d8.loss_dice: 0.8021, loss: 13.7818 +2022-05-09 19:14:56,884 - mmseg - INFO - Iter [3300/80000] lr: 1.377e-06, eta: 1 day, 17:30:03, time: 1.808, data_time: 0.017, memory: 64699, decode.loss_cls: 0.2371, decode.loss_mask: 0.3118, decode.loss_dice: 0.8236, decode.d0.loss_cls: 0.4711, decode.d0.loss_mask: 0.3316, decode.d0.loss_dice: 0.9129, decode.d1.loss_cls: 0.2635, decode.d1.loss_mask: 0.3136, decode.d1.loss_dice: 0.8547, decode.d2.loss_cls: 0.2380, decode.d2.loss_mask: 0.3112, decode.d2.loss_dice: 0.8378, decode.d3.loss_cls: 0.2336, decode.d3.loss_mask: 0.3118, decode.d3.loss_dice: 0.8348, decode.d4.loss_cls: 0.2354, decode.d4.loss_mask: 0.3113, decode.d4.loss_dice: 0.8336, decode.d5.loss_cls: 0.2399, decode.d5.loss_mask: 0.3101, decode.d5.loss_dice: 0.8292, decode.d6.loss_cls: 0.2333, decode.d6.loss_mask: 0.3100, decode.d6.loss_dice: 0.8291, decode.d7.loss_cls: 0.2389, decode.d7.loss_mask: 0.3098, decode.d7.loss_dice: 0.8264, decode.d8.loss_cls: 0.2349, decode.d8.loss_mask: 0.3098, decode.d8.loss_dice: 0.8332, loss: 14.1720 +2022-05-09 19:16:29,067 - mmseg - INFO - Iter [3350/80000] lr: 1.376e-06, eta: 1 day, 17:26:24, time: 1.842, data_time: 0.067, memory: 64699, decode.loss_cls: 0.2280, decode.loss_mask: 0.3070, decode.loss_dice: 0.8223, decode.d0.loss_cls: 0.4386, decode.d0.loss_mask: 0.3326, decode.d0.loss_dice: 0.8940, decode.d1.loss_cls: 0.2605, decode.d1.loss_mask: 0.3125, decode.d1.loss_dice: 0.8458, decode.d2.loss_cls: 0.2461, decode.d2.loss_mask: 0.3090, decode.d2.loss_dice: 0.8268, decode.d3.loss_cls: 0.2352, decode.d3.loss_mask: 0.3097, decode.d3.loss_dice: 0.8252, decode.d4.loss_cls: 0.2357, decode.d4.loss_mask: 0.3094, decode.d4.loss_dice: 0.8223, decode.d5.loss_cls: 0.2490, decode.d5.loss_mask: 0.3089, decode.d5.loss_dice: 0.8235, decode.d6.loss_cls: 0.2373, decode.d6.loss_mask: 0.3089, decode.d6.loss_dice: 0.8207, decode.d7.loss_cls: 0.2378, decode.d7.loss_mask: 0.3077, decode.d7.loss_dice: 0.8186, decode.d8.loss_cls: 0.2341, decode.d8.loss_mask: 0.3084, decode.d8.loss_dice: 0.8252, loss: 14.0407 +2022-05-09 19:17:57,827 - mmseg - INFO - Iter [3400/80000] lr: 1.375e-06, eta: 1 day, 17:21:36, time: 1.776, data_time: 0.021, memory: 64699, decode.loss_cls: 0.2535, decode.loss_mask: 0.3073, decode.loss_dice: 0.8247, decode.d0.loss_cls: 0.4672, decode.d0.loss_mask: 0.3335, decode.d0.loss_dice: 0.9130, decode.d1.loss_cls: 0.2716, decode.d1.loss_mask: 0.3120, decode.d1.loss_dice: 0.8600, decode.d2.loss_cls: 0.2613, decode.d2.loss_mask: 0.3097, decode.d2.loss_dice: 0.8366, decode.d3.loss_cls: 0.2531, decode.d3.loss_mask: 0.3066, decode.d3.loss_dice: 0.8338, decode.d4.loss_cls: 0.2552, decode.d4.loss_mask: 0.3063, decode.d4.loss_dice: 0.8348, decode.d5.loss_cls: 0.2572, decode.d5.loss_mask: 0.3055, decode.d5.loss_dice: 0.8315, decode.d6.loss_cls: 0.2560, decode.d6.loss_mask: 0.3057, decode.d6.loss_dice: 0.8282, decode.d7.loss_cls: 0.2498, decode.d7.loss_mask: 0.3066, decode.d7.loss_dice: 0.8338, decode.d8.loss_cls: 0.2537, decode.d8.loss_mask: 0.3067, decode.d8.loss_dice: 0.8279, loss: 14.3031 +2022-05-09 19:19:26,821 - mmseg - INFO - Iter [3450/80000] lr: 1.374e-06, eta: 1 day, 17:16:57, time: 1.780, data_time: 0.018, memory: 64699, decode.loss_cls: 0.2298, decode.loss_mask: 0.3212, decode.loss_dice: 0.8284, decode.d0.loss_cls: 0.4661, decode.d0.loss_mask: 0.3497, decode.d0.loss_dice: 0.9094, decode.d1.loss_cls: 0.2707, decode.d1.loss_mask: 0.3265, decode.d1.loss_dice: 0.8543, decode.d2.loss_cls: 0.2423, decode.d2.loss_mask: 0.3235, decode.d2.loss_dice: 0.8417, decode.d3.loss_cls: 0.2316, decode.d3.loss_mask: 0.3229, decode.d3.loss_dice: 0.8312, decode.d4.loss_cls: 0.2325, decode.d4.loss_mask: 0.3236, decode.d4.loss_dice: 0.8299, decode.d5.loss_cls: 0.2320, decode.d5.loss_mask: 0.3227, decode.d5.loss_dice: 0.8312, decode.d6.loss_cls: 0.2357, decode.d6.loss_mask: 0.3208, decode.d6.loss_dice: 0.8247, decode.d7.loss_cls: 0.2337, decode.d7.loss_mask: 0.3213, decode.d7.loss_dice: 0.8252, decode.d8.loss_cls: 0.2325, decode.d8.loss_mask: 0.3217, decode.d8.loss_dice: 0.8293, loss: 14.2660 +2022-05-09 19:20:55,979 - mmseg - INFO - Iter [3500/80000] lr: 1.373e-06, eta: 1 day, 17:12:27, time: 1.783, data_time: 0.018, memory: 64699, decode.loss_cls: 0.2238, decode.loss_mask: 0.3043, decode.loss_dice: 0.8216, decode.d0.loss_cls: 0.4219, decode.d0.loss_mask: 0.3253, decode.d0.loss_dice: 0.9072, decode.d1.loss_cls: 0.2373, decode.d1.loss_mask: 0.3100, decode.d1.loss_dice: 0.8489, decode.d2.loss_cls: 0.2329, decode.d2.loss_mask: 0.3069, decode.d2.loss_dice: 0.8348, decode.d3.loss_cls: 0.2311, decode.d3.loss_mask: 0.3047, decode.d3.loss_dice: 0.8190, decode.d4.loss_cls: 0.2295, decode.d4.loss_mask: 0.3049, decode.d4.loss_dice: 0.8233, decode.d5.loss_cls: 0.2223, decode.d5.loss_mask: 0.3056, decode.d5.loss_dice: 0.8216, decode.d6.loss_cls: 0.2255, decode.d6.loss_mask: 0.3040, decode.d6.loss_dice: 0.8158, decode.d7.loss_cls: 0.2289, decode.d7.loss_mask: 0.3050, decode.d7.loss_dice: 0.8208, decode.d8.loss_cls: 0.2218, decode.d8.loss_mask: 0.3048, decode.d8.loss_dice: 0.8237, loss: 13.8871 +2022-05-09 19:22:27,223 - mmseg - INFO - Iter [3550/80000] lr: 1.372e-06, eta: 1 day, 17:08:47, time: 1.825, data_time: 0.068, memory: 64699, decode.loss_cls: 0.2327, decode.loss_mask: 0.3065, decode.loss_dice: 0.8171, decode.d0.loss_cls: 0.4469, decode.d0.loss_mask: 0.3298, decode.d0.loss_dice: 0.9072, decode.d1.loss_cls: 0.2628, decode.d1.loss_mask: 0.3115, decode.d1.loss_dice: 0.8528, decode.d2.loss_cls: 0.2514, decode.d2.loss_mask: 0.3094, decode.d2.loss_dice: 0.8316, decode.d3.loss_cls: 0.2326, decode.d3.loss_mask: 0.3078, decode.d3.loss_dice: 0.8234, decode.d4.loss_cls: 0.2405, decode.d4.loss_mask: 0.3082, decode.d4.loss_dice: 0.8205, decode.d5.loss_cls: 0.2373, decode.d5.loss_mask: 0.3069, decode.d5.loss_dice: 0.8206, decode.d6.loss_cls: 0.2337, decode.d6.loss_mask: 0.3066, decode.d6.loss_dice: 0.8156, decode.d7.loss_cls: 0.2349, decode.d7.loss_mask: 0.3060, decode.d7.loss_dice: 0.8164, decode.d8.loss_cls: 0.2349, decode.d8.loss_mask: 0.3064, decode.d8.loss_dice: 0.8213, loss: 14.0334 +2022-05-09 19:23:55,987 - mmseg - INFO - Iter [3600/80000] lr: 1.371e-06, eta: 1 day, 17:04:18, time: 1.775, data_time: 0.019, memory: 64699, decode.loss_cls: 0.2232, decode.loss_mask: 0.3144, decode.loss_dice: 0.8190, decode.d0.loss_cls: 0.4454, decode.d0.loss_mask: 0.3438, decode.d0.loss_dice: 0.9072, decode.d1.loss_cls: 0.2562, decode.d1.loss_mask: 0.3184, decode.d1.loss_dice: 0.8451, decode.d2.loss_cls: 0.2467, decode.d2.loss_mask: 0.3142, decode.d2.loss_dice: 0.8290, decode.d3.loss_cls: 0.2346, decode.d3.loss_mask: 0.3138, decode.d3.loss_dice: 0.8203, decode.d4.loss_cls: 0.2357, decode.d4.loss_mask: 0.3142, decode.d4.loss_dice: 0.8237, decode.d5.loss_cls: 0.2449, decode.d5.loss_mask: 0.3144, decode.d5.loss_dice: 0.8156, decode.d6.loss_cls: 0.2379, decode.d6.loss_mask: 0.3144, decode.d6.loss_dice: 0.8199, decode.d7.loss_cls: 0.2284, decode.d7.loss_mask: 0.3139, decode.d7.loss_dice: 0.8179, decode.d8.loss_cls: 0.2322, decode.d8.loss_mask: 0.3134, decode.d8.loss_dice: 0.8186, loss: 14.0765 +2022-05-09 19:25:25,141 - mmseg - INFO - Iter [3650/80000] lr: 1.370e-06, eta: 1 day, 17:00:01, time: 1.783, data_time: 0.019, memory: 64699, decode.loss_cls: 0.2253, decode.loss_mask: 0.2991, decode.loss_dice: 0.8138, decode.d0.loss_cls: 0.4372, decode.d0.loss_mask: 0.3236, decode.d0.loss_dice: 0.9061, decode.d1.loss_cls: 0.2584, decode.d1.loss_mask: 0.3043, decode.d1.loss_dice: 0.8448, decode.d2.loss_cls: 0.2368, decode.d2.loss_mask: 0.3006, decode.d2.loss_dice: 0.8257, decode.d3.loss_cls: 0.2290, decode.d3.loss_mask: 0.3001, decode.d3.loss_dice: 0.8215, decode.d4.loss_cls: 0.2399, decode.d4.loss_mask: 0.2981, decode.d4.loss_dice: 0.8200, decode.d5.loss_cls: 0.2315, decode.d5.loss_mask: 0.2973, decode.d5.loss_dice: 0.8207, decode.d6.loss_cls: 0.2310, decode.d6.loss_mask: 0.2994, decode.d6.loss_dice: 0.8160, decode.d7.loss_cls: 0.2356, decode.d7.loss_mask: 0.2986, decode.d7.loss_dice: 0.8181, decode.d8.loss_cls: 0.2273, decode.d8.loss_mask: 0.2987, decode.d8.loss_dice: 0.8163, loss: 13.8746 +2022-05-09 19:26:53,890 - mmseg - INFO - Iter [3700/80000] lr: 1.369e-06, eta: 1 day, 16:55:42, time: 1.775, data_time: 0.019, memory: 64699, decode.loss_cls: 0.2139, decode.loss_mask: 0.3028, decode.loss_dice: 0.8056, decode.d0.loss_cls: 0.4496, decode.d0.loss_mask: 0.3262, decode.d0.loss_dice: 0.8746, decode.d1.loss_cls: 0.2459, decode.d1.loss_mask: 0.3093, decode.d1.loss_dice: 0.8248, decode.d2.loss_cls: 0.2309, decode.d2.loss_mask: 0.3058, decode.d2.loss_dice: 0.8117, decode.d3.loss_cls: 0.2192, decode.d3.loss_mask: 0.3036, decode.d3.loss_dice: 0.8054, decode.d4.loss_cls: 0.2205, decode.d4.loss_mask: 0.3040, decode.d4.loss_dice: 0.8038, decode.d5.loss_cls: 0.2168, decode.d5.loss_mask: 0.3042, decode.d5.loss_dice: 0.8009, decode.d6.loss_cls: 0.2184, decode.d6.loss_mask: 0.3039, decode.d6.loss_dice: 0.8042, decode.d7.loss_cls: 0.2087, decode.d7.loss_mask: 0.3041, decode.d7.loss_dice: 0.8035, decode.d8.loss_cls: 0.2164, decode.d8.loss_mask: 0.3030, decode.d8.loss_dice: 0.7976, loss: 13.6393 +2022-05-09 19:28:23,870 - mmseg - INFO - Iter [3750/80000] lr: 1.369e-06, eta: 1 day, 16:51:51, time: 1.800, data_time: 0.064, memory: 64699, decode.loss_cls: 0.2342, decode.loss_mask: 0.3073, decode.loss_dice: 0.8246, decode.d0.loss_cls: 0.4369, decode.d0.loss_mask: 0.3345, decode.d0.loss_dice: 0.9062, decode.d1.loss_cls: 0.2454, decode.d1.loss_mask: 0.3132, decode.d1.loss_dice: 0.8520, decode.d2.loss_cls: 0.2396, decode.d2.loss_mask: 0.3107, decode.d2.loss_dice: 0.8372, decode.d3.loss_cls: 0.2341, decode.d3.loss_mask: 0.3090, decode.d3.loss_dice: 0.8273, decode.d4.loss_cls: 0.2351, decode.d4.loss_mask: 0.3065, decode.d4.loss_dice: 0.8266, decode.d5.loss_cls: 0.2332, decode.d5.loss_mask: 0.3083, decode.d5.loss_dice: 0.8264, decode.d6.loss_cls: 0.2300, decode.d6.loss_mask: 0.3085, decode.d6.loss_dice: 0.8196, decode.d7.loss_cls: 0.2260, decode.d7.loss_mask: 0.3069, decode.d7.loss_dice: 0.8205, decode.d8.loss_cls: 0.2393, decode.d8.loss_mask: 0.3069, decode.d8.loss_dice: 0.8237, loss: 14.0293 +2022-05-09 19:29:53,566 - mmseg - INFO - Iter [3800/80000] lr: 1.368e-06, eta: 1 day, 16:47:59, time: 1.794, data_time: 0.018, memory: 64699, decode.loss_cls: 0.2263, decode.loss_mask: 0.2969, decode.loss_dice: 0.8218, decode.d0.loss_cls: 0.4503, decode.d0.loss_mask: 0.3201, decode.d0.loss_dice: 0.9130, decode.d1.loss_cls: 0.2553, decode.d1.loss_mask: 0.3018, decode.d1.loss_dice: 0.8540, decode.d2.loss_cls: 0.2380, decode.d2.loss_mask: 0.2986, decode.d2.loss_dice: 0.8402, decode.d3.loss_cls: 0.2275, decode.d3.loss_mask: 0.2983, decode.d3.loss_dice: 0.8281, decode.d4.loss_cls: 0.2299, decode.d4.loss_mask: 0.2986, decode.d4.loss_dice: 0.8239, decode.d5.loss_cls: 0.2255, decode.d5.loss_mask: 0.2983, decode.d5.loss_dice: 0.8232, decode.d6.loss_cls: 0.2164, decode.d6.loss_mask: 0.2981, decode.d6.loss_dice: 0.8205, decode.d7.loss_cls: 0.2262, decode.d7.loss_mask: 0.2974, decode.d7.loss_dice: 0.8198, decode.d8.loss_cls: 0.2221, decode.d8.loss_mask: 0.2975, decode.d8.loss_dice: 0.8228, loss: 13.8905 +2022-05-09 19:31:22,989 - mmseg - INFO - Iter [3850/80000] lr: 1.367e-06, eta: 1 day, 16:44:05, time: 1.788, data_time: 0.019, memory: 64699, decode.loss_cls: 0.2375, decode.loss_mask: 0.3045, decode.loss_dice: 0.8211, decode.d0.loss_cls: 0.4400, decode.d0.loss_mask: 0.3229, decode.d0.loss_dice: 0.8935, decode.d1.loss_cls: 0.2594, decode.d1.loss_mask: 0.3067, decode.d1.loss_dice: 0.8363, decode.d2.loss_cls: 0.2446, decode.d2.loss_mask: 0.3050, decode.d2.loss_dice: 0.8287, decode.d3.loss_cls: 0.2349, decode.d3.loss_mask: 0.3051, decode.d3.loss_dice: 0.8165, decode.d4.loss_cls: 0.2408, decode.d4.loss_mask: 0.3039, decode.d4.loss_dice: 0.8184, decode.d5.loss_cls: 0.2391, decode.d5.loss_mask: 0.3037, decode.d5.loss_dice: 0.8130, decode.d6.loss_cls: 0.2371, decode.d6.loss_mask: 0.3027, decode.d6.loss_dice: 0.8167, decode.d7.loss_cls: 0.2387, decode.d7.loss_mask: 0.3030, decode.d7.loss_dice: 0.8183, decode.d8.loss_cls: 0.2379, decode.d8.loss_mask: 0.3022, decode.d8.loss_dice: 0.8144, loss: 13.9467 +2022-05-09 19:32:52,030 - mmseg - INFO - Iter [3900/80000] lr: 1.366e-06, eta: 1 day, 16:40:07, time: 1.781, data_time: 0.017, memory: 64699, decode.loss_cls: 0.2235, decode.loss_mask: 0.3012, decode.loss_dice: 0.7974, decode.d0.loss_cls: 0.4460, decode.d0.loss_mask: 0.3251, decode.d0.loss_dice: 0.8801, decode.d1.loss_cls: 0.2440, decode.d1.loss_mask: 0.3062, decode.d1.loss_dice: 0.8282, decode.d2.loss_cls: 0.2393, decode.d2.loss_mask: 0.3022, decode.d2.loss_dice: 0.8129, decode.d3.loss_cls: 0.2193, decode.d3.loss_mask: 0.3021, decode.d3.loss_dice: 0.8023, decode.d4.loss_cls: 0.2255, decode.d4.loss_mask: 0.3027, decode.d4.loss_dice: 0.8014, decode.d5.loss_cls: 0.2239, decode.d5.loss_mask: 0.3022, decode.d5.loss_dice: 0.7990, decode.d6.loss_cls: 0.2134, decode.d6.loss_mask: 0.3034, decode.d6.loss_dice: 0.7942, decode.d7.loss_cls: 0.2269, decode.d7.loss_mask: 0.3021, decode.d7.loss_dice: 0.7945, decode.d8.loss_cls: 0.2196, decode.d8.loss_mask: 0.3025, decode.d8.loss_dice: 0.7986, loss: 13.6397 +2022-05-09 19:34:23,481 - mmseg - INFO - Iter [3950/80000] lr: 1.365e-06, eta: 1 day, 16:37:00, time: 1.829, data_time: 0.064, memory: 64699, decode.loss_cls: 0.2172, decode.loss_mask: 0.3063, decode.loss_dice: 0.8146, decode.d0.loss_cls: 0.4361, decode.d0.loss_mask: 0.3302, decode.d0.loss_dice: 0.8971, decode.d1.loss_cls: 0.2415, decode.d1.loss_mask: 0.3104, decode.d1.loss_dice: 0.8448, decode.d2.loss_cls: 0.2281, decode.d2.loss_mask: 0.3092, decode.d2.loss_dice: 0.8336, decode.d3.loss_cls: 0.2278, decode.d3.loss_mask: 0.3067, decode.d3.loss_dice: 0.8165, decode.d4.loss_cls: 0.2247, decode.d4.loss_mask: 0.3063, decode.d4.loss_dice: 0.8195, decode.d5.loss_cls: 0.2245, decode.d5.loss_mask: 0.3059, decode.d5.loss_dice: 0.8162, decode.d6.loss_cls: 0.2211, decode.d6.loss_mask: 0.3062, decode.d6.loss_dice: 0.8127, decode.d7.loss_cls: 0.2175, decode.d7.loss_mask: 0.3062, decode.d7.loss_dice: 0.8204, decode.d8.loss_cls: 0.2162, decode.d8.loss_mask: 0.3069, decode.d8.loss_dice: 0.8216, loss: 13.8458 +2022-05-09 19:35:53,488 - mmseg - INFO - Saving checkpoint at 4000 iterations +2022-05-09 19:36:28,900 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-09 19:36:28,911 - mmseg - INFO - Iter [4000/80000] lr: 1.364e-06, eta: 1 day, 16:44:38, time: 2.506, data_time: 0.020, memory: 64699, decode.loss_cls: 0.2191, decode.loss_mask: 0.3045, decode.loss_dice: 0.8223, decode.d0.loss_cls: 0.4356, decode.d0.loss_mask: 0.3264, decode.d0.loss_dice: 0.9001, decode.d1.loss_cls: 0.2417, decode.d1.loss_mask: 0.3101, decode.d1.loss_dice: 0.8457, decode.d2.loss_cls: 0.2283, decode.d2.loss_mask: 0.3060, decode.d2.loss_dice: 0.8399, decode.d3.loss_cls: 0.2280, decode.d3.loss_mask: 0.3036, decode.d3.loss_dice: 0.8233, decode.d4.loss_cls: 0.2296, decode.d4.loss_mask: 0.3041, decode.d4.loss_dice: 0.8193, decode.d5.loss_cls: 0.2216, decode.d5.loss_mask: 0.3047, decode.d5.loss_dice: 0.8219, decode.d6.loss_cls: 0.2255, decode.d6.loss_mask: 0.3043, decode.d6.loss_dice: 0.8192, decode.d7.loss_cls: 0.2175, decode.d7.loss_mask: 0.3042, decode.d7.loss_dice: 0.8221, decode.d8.loss_cls: 0.2179, decode.d8.loss_mask: 0.3053, decode.d8.loss_dice: 0.8226, loss: 13.8747 +2022-05-09 19:38:24,390 - mmseg - INFO - per class results: +2022-05-09 19:38:24,396 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.8 | 99.28 | +| sidewalk | 89.41 | 94.93 | +| building | 94.5 | 97.28 | +| wall | 68.17 | 80.47 | +| fence | 71.16 | 84.6 | +| pole | 71.95 | 83.62 | +| traffic light | 77.07 | 89.45 | +| traffic sign | 84.66 | 91.73 | +| vegetation | 93.5 | 96.39 | +| terrain | 66.95 | 78.85 | +| sky | 95.87 | 98.7 | +| person | 86.76 | 92.9 | +| rider | 73.34 | 86.95 | +| car | 96.22 | 98.21 | +| truck | 88.23 | 92.41 | +| bus | 93.67 | 95.74 | +| train | 89.88 | 94.6 | +| motorcycle | 76.64 | 88.31 | +| bicycle | 82.13 | 90.19 | ++---------------+-------+-------+ +2022-05-09 19:38:24,397 - mmseg - INFO - Summary: +2022-05-09 19:38:24,397 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 97.06 | 84.15 | 91.29 | ++-------+-------+-------+ +2022-05-09 19:38:24,399 - mmseg - INFO - The previous best checkpoint /mnt/lustre/chenzhe.vendor/workspace/ViT-Adapter/segmentation/work_dirs/mask2former_beit_adapter_large_896_80k_cityscapes_ss/best_mIoU_iter_1000.pth was removed +2022-05-09 19:38:54,301 - mmseg - INFO - Now best checkpoint is saved as best_mIoU_iter_4000.pth. +2022-05-09 19:38:54,311 - mmseg - INFO - Best mIoU is 0.8415 at 4000 iter. +2022-05-09 19:38:54,321 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-09 19:38:54,321 - mmseg - INFO - Iter(val) [32] aAcc: 0.9706, mIoU: 0.8415, mAcc: 0.9129, IoU.road: 0.9880, IoU.sidewalk: 0.8941, IoU.building: 0.9450, IoU.wall: 0.6817, IoU.fence: 0.7116, IoU.pole: 0.7195, IoU.traffic light: 0.7707, IoU.traffic sign: 0.8466, IoU.vegetation: 0.9350, IoU.terrain: 0.6695, IoU.sky: 0.9587, IoU.person: 0.8676, IoU.rider: 0.7334, IoU.car: 0.9622, IoU.truck: 0.8823, IoU.bus: 0.9367, IoU.train: 0.8988, IoU.motorcycle: 0.7664, IoU.bicycle: 0.8213, Acc.road: 0.9928, Acc.sidewalk: 0.9493, Acc.building: 0.9728, Acc.wall: 0.8047, Acc.fence: 0.8460, Acc.pole: 0.8362, Acc.traffic light: 0.8945, Acc.traffic sign: 0.9173, Acc.vegetation: 0.9639, Acc.terrain: 0.7885, Acc.sky: 0.9870, Acc.person: 0.9290, Acc.rider: 0.8695, Acc.car: 0.9821, Acc.truck: 0.9241, Acc.bus: 0.9574, Acc.train: 0.9460, Acc.motorcycle: 0.8831, Acc.bicycle: 0.9019 +2022-05-09 19:40:24,277 - mmseg - INFO - Iter [4050/80000] lr: 1.363e-06, eta: 1 day, 17:26:28, time: 4.710, data_time: 2.930, memory: 64699, decode.loss_cls: 0.2195, decode.loss_mask: 0.2978, decode.loss_dice: 0.8008, decode.d0.loss_cls: 0.4338, decode.d0.loss_mask: 0.3192, decode.d0.loss_dice: 0.8795, decode.d1.loss_cls: 0.2413, decode.d1.loss_mask: 0.3037, decode.d1.loss_dice: 0.8280, decode.d2.loss_cls: 0.2305, decode.d2.loss_mask: 0.3012, decode.d2.loss_dice: 0.8137, decode.d3.loss_cls: 0.2162, decode.d3.loss_mask: 0.3005, decode.d3.loss_dice: 0.8044, decode.d4.loss_cls: 0.2099, decode.d4.loss_mask: 0.2994, decode.d4.loss_dice: 0.8063, decode.d5.loss_cls: 0.2216, decode.d5.loss_mask: 0.2988, decode.d5.loss_dice: 0.8006, decode.d6.loss_cls: 0.2219, decode.d6.loss_mask: 0.2987, decode.d6.loss_dice: 0.8001, decode.d7.loss_cls: 0.2212, decode.d7.loss_mask: 0.2977, decode.d7.loss_dice: 0.8008, decode.d8.loss_cls: 0.2108, decode.d8.loss_mask: 0.2986, decode.d8.loss_dice: 0.8038, loss: 13.5802 +2022-05-09 19:41:55,457 - mmseg - INFO - Iter [4100/80000] lr: 1.362e-06, eta: 1 day, 17:22:39, time: 1.824, data_time: 0.067, memory: 64699, decode.loss_cls: 0.2385, decode.loss_mask: 0.2943, decode.loss_dice: 0.8068, decode.d0.loss_cls: 0.4173, decode.d0.loss_mask: 0.3187, decode.d0.loss_dice: 0.8990, decode.d1.loss_cls: 0.2585, decode.d1.loss_mask: 0.3010, decode.d1.loss_dice: 0.8373, decode.d2.loss_cls: 0.2535, decode.d2.loss_mask: 0.2974, decode.d2.loss_dice: 0.8241, decode.d3.loss_cls: 0.2413, decode.d3.loss_mask: 0.2967, decode.d3.loss_dice: 0.8125, decode.d4.loss_cls: 0.2349, decode.d4.loss_mask: 0.2960, decode.d4.loss_dice: 0.8158, decode.d5.loss_cls: 0.2356, decode.d5.loss_mask: 0.2949, decode.d5.loss_dice: 0.8155, decode.d6.loss_cls: 0.2372, decode.d6.loss_mask: 0.2942, decode.d6.loss_dice: 0.8129, decode.d7.loss_cls: 0.2445, decode.d7.loss_mask: 0.2955, decode.d7.loss_dice: 0.8113, decode.d8.loss_cls: 0.2345, decode.d8.loss_mask: 0.2952, decode.d8.loss_dice: 0.8123, loss: 13.8270 +2022-05-09 19:43:24,591 - mmseg - INFO - Iter [4150/80000] lr: 1.361e-06, eta: 1 day, 17:18:17, time: 1.783, data_time: 0.018, memory: 64699, decode.loss_cls: 0.2006, decode.loss_mask: 0.3043, decode.loss_dice: 0.7983, decode.d0.loss_cls: 0.4236, decode.d0.loss_mask: 0.3250, decode.d0.loss_dice: 0.8663, decode.d1.loss_cls: 0.2412, decode.d1.loss_mask: 0.3103, decode.d1.loss_dice: 0.8210, decode.d2.loss_cls: 0.2200, decode.d2.loss_mask: 0.3058, decode.d2.loss_dice: 0.8018, decode.d3.loss_cls: 0.2107, decode.d3.loss_mask: 0.3052, decode.d3.loss_dice: 0.7968, decode.d4.loss_cls: 0.2151, decode.d4.loss_mask: 0.3049, decode.d4.loss_dice: 0.7967, decode.d5.loss_cls: 0.2146, decode.d5.loss_mask: 0.3037, decode.d5.loss_dice: 0.8022, decode.d6.loss_cls: 0.2108, decode.d6.loss_mask: 0.3035, decode.d6.loss_dice: 0.7953, decode.d7.loss_cls: 0.2101, decode.d7.loss_mask: 0.3051, decode.d7.loss_dice: 0.7981, decode.d8.loss_cls: 0.2068, decode.d8.loss_mask: 0.3038, decode.d8.loss_dice: 0.7989, loss: 13.5007 +2022-05-09 19:44:55,025 - mmseg - INFO - Iter [4200/80000] lr: 1.360e-06, eta: 1 day, 17:14:22, time: 1.809, data_time: 0.019, memory: 64699, decode.loss_cls: 0.2234, decode.loss_mask: 0.3028, decode.loss_dice: 0.8202, decode.d0.loss_cls: 0.4376, decode.d0.loss_mask: 0.3236, decode.d0.loss_dice: 0.8917, decode.d1.loss_cls: 0.2489, decode.d1.loss_mask: 0.3076, decode.d1.loss_dice: 0.8448, decode.d2.loss_cls: 0.2357, decode.d2.loss_mask: 0.3050, decode.d2.loss_dice: 0.8280, decode.d3.loss_cls: 0.2273, decode.d3.loss_mask: 0.3032, decode.d3.loss_dice: 0.8236, decode.d4.loss_cls: 0.2357, decode.d4.loss_mask: 0.3037, decode.d4.loss_dice: 0.8233, decode.d5.loss_cls: 0.2252, decode.d5.loss_mask: 0.3029, decode.d5.loss_dice: 0.8198, decode.d6.loss_cls: 0.2321, decode.d6.loss_mask: 0.3018, decode.d6.loss_dice: 0.8182, decode.d7.loss_cls: 0.2310, decode.d7.loss_mask: 0.3022, decode.d7.loss_dice: 0.8163, decode.d8.loss_cls: 0.2183, decode.d8.loss_mask: 0.3027, decode.d8.loss_dice: 0.8163, loss: 13.8730 +2022-05-09 19:46:22,825 - mmseg - INFO - Iter [4250/80000] lr: 1.360e-06, eta: 1 day, 17:09:43, time: 1.756, data_time: 0.018, memory: 64699, decode.loss_cls: 0.2266, decode.loss_mask: 0.3028, decode.loss_dice: 0.8065, decode.d0.loss_cls: 0.4199, decode.d0.loss_mask: 0.3286, decode.d0.loss_dice: 0.8876, decode.d1.loss_cls: 0.2529, decode.d1.loss_mask: 0.3065, decode.d1.loss_dice: 0.8278, decode.d2.loss_cls: 0.2443, decode.d2.loss_mask: 0.3046, decode.d2.loss_dice: 0.8156, decode.d3.loss_cls: 0.2256, decode.d3.loss_mask: 0.3035, decode.d3.loss_dice: 0.8043, decode.d4.loss_cls: 0.2324, decode.d4.loss_mask: 0.3031, decode.d4.loss_dice: 0.8072, decode.d5.loss_cls: 0.2259, decode.d5.loss_mask: 0.3037, decode.d5.loss_dice: 0.8094, decode.d6.loss_cls: 0.2245, decode.d6.loss_mask: 0.3039, decode.d6.loss_dice: 0.8073, decode.d7.loss_cls: 0.2304, decode.d7.loss_mask: 0.3041, decode.d7.loss_dice: 0.8079, decode.d8.loss_cls: 0.2222, decode.d8.loss_mask: 0.3026, decode.d8.loss_dice: 0.8068, loss: 13.7486 +2022-05-09 19:47:54,446 - mmseg - INFO - Iter [4300/80000] lr: 1.359e-06, eta: 1 day, 17:06:16, time: 1.832, data_time: 0.065, memory: 64699, decode.loss_cls: 0.2296, decode.loss_mask: 0.2960, decode.loss_dice: 0.8155, decode.d0.loss_cls: 0.4592, decode.d0.loss_mask: 0.3185, decode.d0.loss_dice: 0.9022, decode.d1.loss_cls: 0.2625, decode.d1.loss_mask: 0.3015, decode.d1.loss_dice: 0.8458, decode.d2.loss_cls: 0.2386, decode.d2.loss_mask: 0.2976, decode.d2.loss_dice: 0.8259, decode.d3.loss_cls: 0.2265, decode.d3.loss_mask: 0.2962, decode.d3.loss_dice: 0.8211, decode.d4.loss_cls: 0.2242, decode.d4.loss_mask: 0.2952, decode.d4.loss_dice: 0.8215, decode.d5.loss_cls: 0.2289, decode.d5.loss_mask: 0.2968, decode.d5.loss_dice: 0.8196, decode.d6.loss_cls: 0.2223, decode.d6.loss_mask: 0.2962, decode.d6.loss_dice: 0.8212, decode.d7.loss_cls: 0.2296, decode.d7.loss_mask: 0.2964, decode.d7.loss_dice: 0.8156, decode.d8.loss_cls: 0.2256, decode.d8.loss_mask: 0.2958, decode.d8.loss_dice: 0.8192, loss: 13.8446 +2022-05-09 19:49:24,336 - mmseg - INFO - Iter [4350/80000] lr: 1.358e-06, eta: 1 day, 17:02:22, time: 1.798, data_time: 0.017, memory: 64699, decode.loss_cls: 0.2164, decode.loss_mask: 0.2914, decode.loss_dice: 0.8011, decode.d0.loss_cls: 0.4353, decode.d0.loss_mask: 0.3079, decode.d0.loss_dice: 0.8681, decode.d1.loss_cls: 0.2424, decode.d1.loss_mask: 0.2963, decode.d1.loss_dice: 0.8253, decode.d2.loss_cls: 0.2287, decode.d2.loss_mask: 0.2933, decode.d2.loss_dice: 0.8092, decode.d3.loss_cls: 0.2151, decode.d3.loss_mask: 0.2914, decode.d3.loss_dice: 0.8021, decode.d4.loss_cls: 0.2152, decode.d4.loss_mask: 0.2924, decode.d4.loss_dice: 0.8017, decode.d5.loss_cls: 0.2154, decode.d5.loss_mask: 0.2926, decode.d5.loss_dice: 0.8033, decode.d6.loss_cls: 0.2099, decode.d6.loss_mask: 0.2918, decode.d6.loss_dice: 0.8010, decode.d7.loss_cls: 0.2187, decode.d7.loss_mask: 0.2908, decode.d7.loss_dice: 0.7967, decode.d8.loss_cls: 0.2105, decode.d8.loss_mask: 0.2909, decode.d8.loss_dice: 0.8004, loss: 13.4554 +2022-05-09 19:50:53,273 - mmseg - INFO - Iter [4400/80000] lr: 1.357e-06, eta: 1 day, 16:58:14, time: 1.778, data_time: 0.016, memory: 64699, decode.loss_cls: 0.2292, decode.loss_mask: 0.3039, decode.loss_dice: 0.8238, decode.d0.loss_cls: 0.4342, decode.d0.loss_mask: 0.3242, decode.d0.loss_dice: 0.9066, decode.d1.loss_cls: 0.2704, decode.d1.loss_mask: 0.3064, decode.d1.loss_dice: 0.8500, decode.d2.loss_cls: 0.2477, decode.d2.loss_mask: 0.3028, decode.d2.loss_dice: 0.8337, decode.d3.loss_cls: 0.2290, decode.d3.loss_mask: 0.3035, decode.d3.loss_dice: 0.8284, decode.d4.loss_cls: 0.2236, decode.d4.loss_mask: 0.3046, decode.d4.loss_dice: 0.8286, decode.d5.loss_cls: 0.2268, decode.d5.loss_mask: 0.3039, decode.d5.loss_dice: 0.8229, decode.d6.loss_cls: 0.2232, decode.d6.loss_mask: 0.3041, decode.d6.loss_dice: 0.8237, decode.d7.loss_cls: 0.2184, decode.d7.loss_mask: 0.3037, decode.d7.loss_dice: 0.8228, decode.d8.loss_cls: 0.2233, decode.d8.loss_mask: 0.3042, decode.d8.loss_dice: 0.8224, loss: 13.9497 +2022-05-09 19:52:21,963 - mmseg - INFO - Iter [4450/80000] lr: 1.356e-06, eta: 1 day, 16:54:06, time: 1.773, data_time: 0.017, memory: 64699, decode.loss_cls: 0.2193, decode.loss_mask: 0.2938, decode.loss_dice: 0.7896, decode.d0.loss_cls: 0.4294, decode.d0.loss_mask: 0.3145, decode.d0.loss_dice: 0.8653, decode.d1.loss_cls: 0.2419, decode.d1.loss_mask: 0.2990, decode.d1.loss_dice: 0.8132, decode.d2.loss_cls: 0.2259, decode.d2.loss_mask: 0.2962, decode.d2.loss_dice: 0.8007, decode.d3.loss_cls: 0.2167, decode.d3.loss_mask: 0.2947, decode.d3.loss_dice: 0.7912, decode.d4.loss_cls: 0.2154, decode.d4.loss_mask: 0.2953, decode.d4.loss_dice: 0.7936, decode.d5.loss_cls: 0.2152, decode.d5.loss_mask: 0.2944, decode.d5.loss_dice: 0.7925, decode.d6.loss_cls: 0.2170, decode.d6.loss_mask: 0.2936, decode.d6.loss_dice: 0.7873, decode.d7.loss_cls: 0.2147, decode.d7.loss_mask: 0.2936, decode.d7.loss_dice: 0.7893, decode.d8.loss_cls: 0.2172, decode.d8.loss_mask: 0.2936, decode.d8.loss_dice: 0.7884, loss: 13.3927 +2022-05-09 19:53:53,621 - mmseg - INFO - Iter [4500/80000] lr: 1.355e-06, eta: 1 day, 16:50:52, time: 1.833, data_time: 0.065, memory: 64699, decode.loss_cls: 0.2152, decode.loss_mask: 0.2904, decode.loss_dice: 0.7817, decode.d0.loss_cls: 0.4237, decode.d0.loss_mask: 0.3122, decode.d0.loss_dice: 0.8579, decode.d1.loss_cls: 0.2470, decode.d1.loss_mask: 0.2957, decode.d1.loss_dice: 0.8014, decode.d2.loss_cls: 0.2243, decode.d2.loss_mask: 0.2919, decode.d2.loss_dice: 0.7897, decode.d3.loss_cls: 0.2176, decode.d3.loss_mask: 0.2920, decode.d3.loss_dice: 0.7826, decode.d4.loss_cls: 0.2153, decode.d4.loss_mask: 0.2912, decode.d4.loss_dice: 0.7842, decode.d5.loss_cls: 0.2133, decode.d5.loss_mask: 0.2911, decode.d5.loss_dice: 0.7817, decode.d6.loss_cls: 0.2084, decode.d6.loss_mask: 0.2908, decode.d6.loss_dice: 0.7826, decode.d7.loss_cls: 0.2107, decode.d7.loss_mask: 0.2917, decode.d7.loss_dice: 0.7815, decode.d8.loss_cls: 0.2158, decode.d8.loss_mask: 0.2893, decode.d8.loss_dice: 0.7780, loss: 13.2490 +2022-05-09 19:55:23,302 - mmseg - INFO - Iter [4550/80000] lr: 1.354e-06, eta: 1 day, 16:47:07, time: 1.794, data_time: 0.018, memory: 64699, decode.loss_cls: 0.2166, decode.loss_mask: 0.3025, decode.loss_dice: 0.8067, decode.d0.loss_cls: 0.4459, decode.d0.loss_mask: 0.3243, decode.d0.loss_dice: 0.8865, decode.d1.loss_cls: 0.2482, decode.d1.loss_mask: 0.3080, decode.d1.loss_dice: 0.8364, decode.d2.loss_cls: 0.2323, decode.d2.loss_mask: 0.3045, decode.d2.loss_dice: 0.8231, decode.d3.loss_cls: 0.2220, decode.d3.loss_mask: 0.3040, decode.d3.loss_dice: 0.8121, decode.d4.loss_cls: 0.2192, decode.d4.loss_mask: 0.3036, decode.d4.loss_dice: 0.8079, decode.d5.loss_cls: 0.2256, decode.d5.loss_mask: 0.3040, decode.d5.loss_dice: 0.8075, decode.d6.loss_cls: 0.2147, decode.d6.loss_mask: 0.3028, decode.d6.loss_dice: 0.8083, decode.d7.loss_cls: 0.2200, decode.d7.loss_mask: 0.3043, decode.d7.loss_dice: 0.8083, decode.d8.loss_cls: 0.2189, decode.d8.loss_mask: 0.3040, decode.d8.loss_dice: 0.8091, loss: 13.7314 +2022-05-09 19:56:52,131 - mmseg - INFO - Iter [4600/80000] lr: 1.353e-06, eta: 1 day, 16:43:11, time: 1.776, data_time: 0.016, memory: 64699, decode.loss_cls: 0.2187, decode.loss_mask: 0.2952, decode.loss_dice: 0.8027, decode.d0.loss_cls: 0.4285, decode.d0.loss_mask: 0.3145, decode.d0.loss_dice: 0.8799, decode.d1.loss_cls: 0.2503, decode.d1.loss_mask: 0.2994, decode.d1.loss_dice: 0.8326, decode.d2.loss_cls: 0.2320, decode.d2.loss_mask: 0.2962, decode.d2.loss_dice: 0.8161, decode.d3.loss_cls: 0.2315, decode.d3.loss_mask: 0.2964, decode.d3.loss_dice: 0.8074, decode.d4.loss_cls: 0.2298, decode.d4.loss_mask: 0.2957, decode.d4.loss_dice: 0.8068, decode.d5.loss_cls: 0.2217, decode.d5.loss_mask: 0.2951, decode.d5.loss_dice: 0.8087, decode.d6.loss_cls: 0.2204, decode.d6.loss_mask: 0.2950, decode.d6.loss_dice: 0.8034, decode.d7.loss_cls: 0.2203, decode.d7.loss_mask: 0.2947, decode.d7.loss_dice: 0.8027, decode.d8.loss_cls: 0.2207, decode.d8.loss_mask: 0.2960, decode.d8.loss_dice: 0.8039, loss: 13.6162 +2022-05-09 19:58:20,362 - mmseg - INFO - Iter [4650/80000] lr: 1.352e-06, eta: 1 day, 16:39:08, time: 1.765, data_time: 0.017, memory: 64699, decode.loss_cls: 0.2292, decode.loss_mask: 0.3037, decode.loss_dice: 0.8107, decode.d0.loss_cls: 0.4546, decode.d0.loss_mask: 0.3252, decode.d0.loss_dice: 0.8924, decode.d1.loss_cls: 0.2599, decode.d1.loss_mask: 0.3091, decode.d1.loss_dice: 0.8382, decode.d2.loss_cls: 0.2428, decode.d2.loss_mask: 0.3047, decode.d2.loss_dice: 0.8184, decode.d3.loss_cls: 0.2373, decode.d3.loss_mask: 0.3052, decode.d3.loss_dice: 0.8079, decode.d4.loss_cls: 0.2342, decode.d4.loss_mask: 0.3042, decode.d4.loss_dice: 0.8117, decode.d5.loss_cls: 0.2304, decode.d5.loss_mask: 0.3055, decode.d5.loss_dice: 0.8096, decode.d6.loss_cls: 0.2326, decode.d6.loss_mask: 0.3036, decode.d6.loss_dice: 0.8095, decode.d7.loss_cls: 0.2272, decode.d7.loss_mask: 0.3039, decode.d7.loss_dice: 0.8136, decode.d8.loss_cls: 0.2283, decode.d8.loss_mask: 0.3043, decode.d8.loss_dice: 0.8115, loss: 13.8695 +2022-05-09 19:59:52,281 - mmseg - INFO - Iter [4700/80000] lr: 1.351e-06, eta: 1 day, 16:36:07, time: 1.838, data_time: 0.063, memory: 64699, decode.loss_cls: 0.2242, decode.loss_mask: 0.2981, decode.loss_dice: 0.8106, decode.d0.loss_cls: 0.4331, decode.d0.loss_mask: 0.3186, decode.d0.loss_dice: 0.8824, decode.d1.loss_cls: 0.2415, decode.d1.loss_mask: 0.3033, decode.d1.loss_dice: 0.8363, decode.d2.loss_cls: 0.2367, decode.d2.loss_mask: 0.2991, decode.d2.loss_dice: 0.8245, decode.d3.loss_cls: 0.2308, decode.d3.loss_mask: 0.2982, decode.d3.loss_dice: 0.8084, decode.d4.loss_cls: 0.2200, decode.d4.loss_mask: 0.2993, decode.d4.loss_dice: 0.8099, decode.d5.loss_cls: 0.2218, decode.d5.loss_mask: 0.2997, decode.d5.loss_dice: 0.8132, decode.d6.loss_cls: 0.2189, decode.d6.loss_mask: 0.2995, decode.d6.loss_dice: 0.8062, decode.d7.loss_cls: 0.2172, decode.d7.loss_mask: 0.2992, decode.d7.loss_dice: 0.8070, decode.d8.loss_cls: 0.2167, decode.d8.loss_mask: 0.2984, decode.d8.loss_dice: 0.8060, loss: 13.6785 +2022-05-09 20:01:20,687 - mmseg - INFO - Iter [4750/80000] lr: 1.351e-06, eta: 1 day, 16:32:14, time: 1.768, data_time: 0.017, memory: 64699, decode.loss_cls: 0.2203, decode.loss_mask: 0.3045, decode.loss_dice: 0.7969, decode.d0.loss_cls: 0.4267, decode.d0.loss_mask: 0.3228, decode.d0.loss_dice: 0.8776, decode.d1.loss_cls: 0.2360, decode.d1.loss_mask: 0.3094, decode.d1.loss_dice: 0.8252, decode.d2.loss_cls: 0.2365, decode.d2.loss_mask: 0.3069, decode.d2.loss_dice: 0.8108, decode.d3.loss_cls: 0.2296, decode.d3.loss_mask: 0.3054, decode.d3.loss_dice: 0.7985, decode.d4.loss_cls: 0.2257, decode.d4.loss_mask: 0.3042, decode.d4.loss_dice: 0.7984, decode.d5.loss_cls: 0.2206, decode.d5.loss_mask: 0.3036, decode.d5.loss_dice: 0.8004, decode.d6.loss_cls: 0.2246, decode.d6.loss_mask: 0.3039, decode.d6.loss_dice: 0.7953, decode.d7.loss_cls: 0.2267, decode.d7.loss_mask: 0.3030, decode.d7.loss_dice: 0.7951, decode.d8.loss_cls: 0.2294, decode.d8.loss_mask: 0.3040, decode.d8.loss_dice: 0.7958, loss: 13.6376 +2022-05-09 20:02:48,860 - mmseg - INFO - Iter [4800/80000] lr: 1.350e-06, eta: 1 day, 16:28:19, time: 1.763, data_time: 0.016, memory: 64699, decode.loss_cls: 0.2051, decode.loss_mask: 0.2947, decode.loss_dice: 0.7918, decode.d0.loss_cls: 0.4230, decode.d0.loss_mask: 0.3186, decode.d0.loss_dice: 0.8593, decode.d1.loss_cls: 0.2295, decode.d1.loss_mask: 0.2993, decode.d1.loss_dice: 0.8131, decode.d2.loss_cls: 0.2158, decode.d2.loss_mask: 0.2972, decode.d2.loss_dice: 0.8010, decode.d3.loss_cls: 0.2084, decode.d3.loss_mask: 0.2947, decode.d3.loss_dice: 0.7958, decode.d4.loss_cls: 0.2105, decode.d4.loss_mask: 0.2943, decode.d4.loss_dice: 0.7936, decode.d5.loss_cls: 0.2044, decode.d5.loss_mask: 0.2932, decode.d5.loss_dice: 0.7929, decode.d6.loss_cls: 0.2078, decode.d6.loss_mask: 0.2944, decode.d6.loss_dice: 0.7906, decode.d7.loss_cls: 0.2210, decode.d7.loss_mask: 0.2942, decode.d7.loss_dice: 0.7935, decode.d8.loss_cls: 0.2058, decode.d8.loss_mask: 0.2946, decode.d8.loss_dice: 0.7894, loss: 13.3272 +2022-05-09 20:04:20,847 - mmseg - INFO - Iter [4850/80000] lr: 1.349e-06, eta: 1 day, 16:25:26, time: 1.840, data_time: 0.065, memory: 64699, decode.loss_cls: 0.2083, decode.loss_mask: 0.2966, decode.loss_dice: 0.8065, decode.d0.loss_cls: 0.4274, decode.d0.loss_mask: 0.3148, decode.d0.loss_dice: 0.8760, decode.d1.loss_cls: 0.2280, decode.d1.loss_mask: 0.3017, decode.d1.loss_dice: 0.8253, decode.d2.loss_cls: 0.2106, decode.d2.loss_mask: 0.2991, decode.d2.loss_dice: 0.8198, decode.d3.loss_cls: 0.2174, decode.d3.loss_mask: 0.2982, decode.d3.loss_dice: 0.8049, decode.d4.loss_cls: 0.2152, decode.d4.loss_mask: 0.2976, decode.d4.loss_dice: 0.8060, decode.d5.loss_cls: 0.2031, decode.d5.loss_mask: 0.2962, decode.d5.loss_dice: 0.8099, decode.d6.loss_cls: 0.2138, decode.d6.loss_mask: 0.2977, decode.d6.loss_dice: 0.8053, decode.d7.loss_cls: 0.2046, decode.d7.loss_mask: 0.2979, decode.d7.loss_dice: 0.8041, decode.d8.loss_cls: 0.2078, decode.d8.loss_mask: 0.2972, decode.d8.loss_dice: 0.8032, loss: 13.4940 +2022-05-09 20:05:50,449 - mmseg - INFO - Iter [4900/80000] lr: 1.348e-06, eta: 1 day, 16:21:58, time: 1.792, data_time: 0.017, memory: 64699, decode.loss_cls: 0.2195, decode.loss_mask: 0.2998, decode.loss_dice: 0.8152, decode.d0.loss_cls: 0.4466, decode.d0.loss_mask: 0.3216, decode.d0.loss_dice: 0.8963, decode.d1.loss_cls: 0.2554, decode.d1.loss_mask: 0.3030, decode.d1.loss_dice: 0.8433, decode.d2.loss_cls: 0.2398, decode.d2.loss_mask: 0.2996, decode.d2.loss_dice: 0.8229, decode.d3.loss_cls: 0.2326, decode.d3.loss_mask: 0.2994, decode.d3.loss_dice: 0.8154, decode.d4.loss_cls: 0.2311, decode.d4.loss_mask: 0.2993, decode.d4.loss_dice: 0.8144, decode.d5.loss_cls: 0.2285, decode.d5.loss_mask: 0.2986, decode.d5.loss_dice: 0.8157, decode.d6.loss_cls: 0.2196, decode.d6.loss_mask: 0.2993, decode.d6.loss_dice: 0.8108, decode.d7.loss_cls: 0.2219, decode.d7.loss_mask: 0.2978, decode.d7.loss_dice: 0.8155, decode.d8.loss_cls: 0.2183, decode.d8.loss_mask: 0.2985, decode.d8.loss_dice: 0.8142, loss: 13.7940 +2022-05-09 20:07:18,562 - mmseg - INFO - Iter [4950/80000] lr: 1.347e-06, eta: 1 day, 16:18:11, time: 1.763, data_time: 0.017, memory: 64699, decode.loss_cls: 0.2218, decode.loss_mask: 0.2974, decode.loss_dice: 0.8130, decode.d0.loss_cls: 0.4199, decode.d0.loss_mask: 0.3177, decode.d0.loss_dice: 0.8910, decode.d1.loss_cls: 0.2529, decode.d1.loss_mask: 0.3017, decode.d1.loss_dice: 0.8399, decode.d2.loss_cls: 0.2347, decode.d2.loss_mask: 0.2992, decode.d2.loss_dice: 0.8311, decode.d3.loss_cls: 0.2191, decode.d3.loss_mask: 0.2986, decode.d3.loss_dice: 0.8141, decode.d4.loss_cls: 0.2284, decode.d4.loss_mask: 0.2985, decode.d4.loss_dice: 0.8123, decode.d5.loss_cls: 0.2266, decode.d5.loss_mask: 0.2979, decode.d5.loss_dice: 0.8191, decode.d6.loss_cls: 0.2259, decode.d6.loss_mask: 0.2986, decode.d6.loss_dice: 0.8084, decode.d7.loss_cls: 0.2252, decode.d7.loss_mask: 0.2986, decode.d7.loss_dice: 0.8135, decode.d8.loss_cls: 0.2283, decode.d8.loss_mask: 0.2979, decode.d8.loss_dice: 0.8105, loss: 13.7419 +2022-05-09 20:08:46,630 - mmseg - INFO - Saving checkpoint at 5000 iterations +2022-05-09 20:09:18,899 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-09 20:09:18,910 - mmseg - INFO - Iter [5000/80000] lr: 1.346e-06, eta: 1 day, 16:22:27, time: 2.403, data_time: 0.017, memory: 64699, decode.loss_cls: 0.1983, decode.loss_mask: 0.3057, decode.loss_dice: 0.7970, decode.d0.loss_cls: 0.4125, decode.d0.loss_mask: 0.3308, decode.d0.loss_dice: 0.8726, decode.d1.loss_cls: 0.2250, decode.d1.loss_mask: 0.3101, decode.d1.loss_dice: 0.8257, decode.d2.loss_cls: 0.2171, decode.d2.loss_mask: 0.3050, decode.d2.loss_dice: 0.8073, decode.d3.loss_cls: 0.2100, decode.d3.loss_mask: 0.3046, decode.d3.loss_dice: 0.7932, decode.d4.loss_cls: 0.2088, decode.d4.loss_mask: 0.3071, decode.d4.loss_dice: 0.7993, decode.d5.loss_cls: 0.2026, decode.d5.loss_mask: 0.3064, decode.d5.loss_dice: 0.7964, decode.d6.loss_cls: 0.2087, decode.d6.loss_mask: 0.3060, decode.d6.loss_dice: 0.7955, decode.d7.loss_cls: 0.2002, decode.d7.loss_mask: 0.3052, decode.d7.loss_dice: 0.7913, decode.d8.loss_cls: 0.2017, decode.d8.loss_mask: 0.3054, decode.d8.loss_dice: 0.7917, loss: 13.4414 +2022-05-09 20:11:14,744 - mmseg - INFO - per class results: +2022-05-09 20:11:14,756 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.69 | 99.15 | +| sidewalk | 89.14 | 94.78 | +| building | 94.35 | 97.16 | +| wall | 69.25 | 77.35 | +| fence | 71.61 | 83.26 | +| pole | 71.75 | 85.21 | +| traffic light | 76.79 | 89.02 | +| traffic sign | 84.66 | 91.4 | +| vegetation | 93.04 | 95.81 | +| terrain | 64.18 | 86.19 | +| sky | 95.48 | 98.91 | +| person | 86.83 | 92.86 | +| rider | 73.7 | 85.45 | +| car | 96.31 | 98.13 | +| truck | 89.9 | 95.54 | +| bus | 92.92 | 96.13 | +| train | 87.04 | 90.95 | +| motorcycle | 74.06 | 85.48 | +| bicycle | 82.05 | 92.03 | ++---------------+-------+-------+ +2022-05-09 20:11:14,757 - mmseg - INFO - Summary: +2022-05-09 20:11:14,757 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 96.94 | 83.78 | 91.31 | ++-------+-------+-------+ +2022-05-09 20:11:14,761 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-09 20:11:14,762 - mmseg - INFO - Iter(val) [32] aAcc: 0.9694, mIoU: 0.8378, mAcc: 0.9131, IoU.road: 0.9869, IoU.sidewalk: 0.8914, IoU.building: 0.9435, IoU.wall: 0.6925, IoU.fence: 0.7161, IoU.pole: 0.7175, IoU.traffic light: 0.7679, IoU.traffic sign: 0.8466, IoU.vegetation: 0.9304, IoU.terrain: 0.6418, IoU.sky: 0.9548, IoU.person: 0.8683, IoU.rider: 0.7370, IoU.car: 0.9631, IoU.truck: 0.8990, IoU.bus: 0.9292, IoU.train: 0.8704, IoU.motorcycle: 0.7406, IoU.bicycle: 0.8205, Acc.road: 0.9915, Acc.sidewalk: 0.9478, Acc.building: 0.9716, Acc.wall: 0.7735, Acc.fence: 0.8326, Acc.pole: 0.8521, Acc.traffic light: 0.8902, Acc.traffic sign: 0.9140, Acc.vegetation: 0.9581, Acc.terrain: 0.8619, Acc.sky: 0.9891, Acc.person: 0.9286, Acc.rider: 0.8545, Acc.car: 0.9813, Acc.truck: 0.9554, Acc.bus: 0.9613, Acc.train: 0.9095, Acc.motorcycle: 0.8548, Acc.bicycle: 0.9203 +2022-05-09 20:12:45,926 - mmseg - INFO - Iter [5050/80000] lr: 1.345e-06, eta: 1 day, 16:48:07, time: 4.144, data_time: 2.384, memory: 64699, decode.loss_cls: 0.2111, decode.loss_mask: 0.3006, decode.loss_dice: 0.7826, decode.d0.loss_cls: 0.4347, decode.d0.loss_mask: 0.3225, decode.d0.loss_dice: 0.8620, decode.d1.loss_cls: 0.2334, decode.d1.loss_mask: 0.3066, decode.d1.loss_dice: 0.8150, decode.d2.loss_cls: 0.2232, decode.d2.loss_mask: 0.3038, decode.d2.loss_dice: 0.8002, decode.d3.loss_cls: 0.2210, decode.d3.loss_mask: 0.3027, decode.d3.loss_dice: 0.7926, decode.d4.loss_cls: 0.2211, decode.d4.loss_mask: 0.3018, decode.d4.loss_dice: 0.7926, decode.d5.loss_cls: 0.2212, decode.d5.loss_mask: 0.3013, decode.d5.loss_dice: 0.7875, decode.d6.loss_cls: 0.2140, decode.d6.loss_mask: 0.3004, decode.d6.loss_dice: 0.7857, decode.d7.loss_cls: 0.2175, decode.d7.loss_mask: 0.3013, decode.d7.loss_dice: 0.7841, decode.d8.loss_cls: 0.2158, decode.d8.loss_mask: 0.3008, decode.d8.loss_dice: 0.7857, loss: 13.4431 +2022-05-09 20:14:14,969 - mmseg - INFO - Iter [5100/80000] lr: 1.344e-06, eta: 1 day, 16:44:14, time: 1.777, data_time: 0.017, memory: 64699, decode.loss_cls: 0.2178, decode.loss_mask: 0.2924, decode.loss_dice: 0.8018, decode.d0.loss_cls: 0.4222, decode.d0.loss_mask: 0.3152, decode.d0.loss_dice: 0.8857, decode.d1.loss_cls: 0.2463, decode.d1.loss_mask: 0.2983, decode.d1.loss_dice: 0.8315, decode.d2.loss_cls: 0.2262, decode.d2.loss_mask: 0.2963, decode.d2.loss_dice: 0.8218, decode.d3.loss_cls: 0.2213, decode.d3.loss_mask: 0.2954, decode.d3.loss_dice: 0.8099, decode.d4.loss_cls: 0.2211, decode.d4.loss_mask: 0.2930, decode.d4.loss_dice: 0.8080, decode.d5.loss_cls: 0.2211, decode.d5.loss_mask: 0.2935, decode.d5.loss_dice: 0.8098, decode.d6.loss_cls: 0.2104, decode.d6.loss_mask: 0.2932, decode.d6.loss_dice: 0.8076, decode.d7.loss_cls: 0.2149, decode.d7.loss_mask: 0.2933, decode.d7.loss_dice: 0.8062, decode.d8.loss_cls: 0.2199, decode.d8.loss_mask: 0.2927, decode.d8.loss_dice: 0.8071, loss: 13.5738 +2022-05-09 20:15:44,203 - mmseg - INFO - Iter [5150/80000] lr: 1.343e-06, eta: 1 day, 16:40:33, time: 1.788, data_time: 0.020, memory: 64699, decode.loss_cls: 0.2103, decode.loss_mask: 0.2989, decode.loss_dice: 0.7927, decode.d0.loss_cls: 0.4339, decode.d0.loss_mask: 0.3164, decode.d0.loss_dice: 0.8599, decode.d1.loss_cls: 0.2317, decode.d1.loss_mask: 0.3012, decode.d1.loss_dice: 0.8169, decode.d2.loss_cls: 0.2156, decode.d2.loss_mask: 0.3005, decode.d2.loss_dice: 0.8070, decode.d3.loss_cls: 0.2180, decode.d3.loss_mask: 0.2982, decode.d3.loss_dice: 0.7961, decode.d4.loss_cls: 0.2139, decode.d4.loss_mask: 0.3000, decode.d4.loss_dice: 0.7978, decode.d5.loss_cls: 0.2084, decode.d5.loss_mask: 0.2985, decode.d5.loss_dice: 0.7955, decode.d6.loss_cls: 0.2133, decode.d6.loss_mask: 0.2989, decode.d6.loss_dice: 0.7952, decode.d7.loss_cls: 0.2030, decode.d7.loss_mask: 0.2985, decode.d7.loss_dice: 0.7959, decode.d8.loss_cls: 0.2151, decode.d8.loss_mask: 0.2989, decode.d8.loss_dice: 0.7944, loss: 13.4244 +2022-05-09 20:17:12,982 - mmseg - INFO - Iter [5200/80000] lr: 1.342e-06, eta: 1 day, 16:36:46, time: 1.776, data_time: 0.018, memory: 64699, decode.loss_cls: 0.2188, decode.loss_mask: 0.2943, decode.loss_dice: 0.8050, decode.d0.loss_cls: 0.4297, decode.d0.loss_mask: 0.3185, decode.d0.loss_dice: 0.8815, decode.d1.loss_cls: 0.2435, decode.d1.loss_mask: 0.2963, decode.d1.loss_dice: 0.8300, decode.d2.loss_cls: 0.2426, decode.d2.loss_mask: 0.2965, decode.d2.loss_dice: 0.8131, decode.d3.loss_cls: 0.2268, decode.d3.loss_mask: 0.2969, decode.d3.loss_dice: 0.8059, decode.d4.loss_cls: 0.2291, decode.d4.loss_mask: 0.2964, decode.d4.loss_dice: 0.8063, decode.d5.loss_cls: 0.2252, decode.d5.loss_mask: 0.2962, decode.d5.loss_dice: 0.8046, decode.d6.loss_cls: 0.2249, decode.d6.loss_mask: 0.2956, decode.d6.loss_dice: 0.7985, decode.d7.loss_cls: 0.2194, decode.d7.loss_mask: 0.2953, decode.d7.loss_dice: 0.8049, decode.d8.loss_cls: 0.2183, decode.d8.loss_mask: 0.2958, decode.d8.loss_dice: 0.8035, loss: 13.6135 +2022-05-09 20:18:44,217 - mmseg - INFO - Iter [5250/80000] lr: 1.342e-06, eta: 1 day, 16:33:35, time: 1.824, data_time: 0.066, memory: 64699, decode.loss_cls: 0.2104, decode.loss_mask: 0.2990, decode.loss_dice: 0.7981, decode.d0.loss_cls: 0.4182, decode.d0.loss_mask: 0.3246, decode.d0.loss_dice: 0.8785, decode.d1.loss_cls: 0.2282, decode.d1.loss_mask: 0.3054, decode.d1.loss_dice: 0.8280, decode.d2.loss_cls: 0.2214, decode.d2.loss_mask: 0.3016, decode.d2.loss_dice: 0.8145, decode.d3.loss_cls: 0.2123, decode.d3.loss_mask: 0.3017, decode.d3.loss_dice: 0.8019, decode.d4.loss_cls: 0.2099, decode.d4.loss_mask: 0.3011, decode.d4.loss_dice: 0.7979, decode.d5.loss_cls: 0.2092, decode.d5.loss_mask: 0.3011, decode.d5.loss_dice: 0.7981, decode.d6.loss_cls: 0.2074, decode.d6.loss_mask: 0.3003, decode.d6.loss_dice: 0.7983, decode.d7.loss_cls: 0.2028, decode.d7.loss_mask: 0.3005, decode.d7.loss_dice: 0.8028, decode.d8.loss_cls: 0.2131, decode.d8.loss_mask: 0.2995, decode.d8.loss_dice: 0.7992, loss: 13.4849 +2022-05-09 20:20:12,799 - mmseg - INFO - Iter [5300/80000] lr: 1.341e-06, eta: 1 day, 16:29:50, time: 1.772, data_time: 0.017, memory: 64699, decode.loss_cls: 0.2190, decode.loss_mask: 0.2970, decode.loss_dice: 0.7911, decode.d0.loss_cls: 0.4274, decode.d0.loss_mask: 0.3208, decode.d0.loss_dice: 0.8760, decode.d1.loss_cls: 0.2448, decode.d1.loss_mask: 0.3025, decode.d1.loss_dice: 0.8276, decode.d2.loss_cls: 0.2262, decode.d2.loss_mask: 0.2985, decode.d2.loss_dice: 0.8110, decode.d3.loss_cls: 0.2171, decode.d3.loss_mask: 0.2967, decode.d3.loss_dice: 0.7963, decode.d4.loss_cls: 0.2178, decode.d4.loss_mask: 0.2983, decode.d4.loss_dice: 0.7984, decode.d5.loss_cls: 0.2254, decode.d5.loss_mask: 0.2975, decode.d5.loss_dice: 0.7906, decode.d6.loss_cls: 0.2155, decode.d6.loss_mask: 0.2963, decode.d6.loss_dice: 0.7939, decode.d7.loss_cls: 0.2163, decode.d7.loss_mask: 0.2976, decode.d7.loss_dice: 0.7936, decode.d8.loss_cls: 0.2154, decode.d8.loss_mask: 0.2973, decode.d8.loss_dice: 0.7948, loss: 13.5007 +2022-05-09 20:21:41,353 - mmseg - INFO - Iter [5350/80000] lr: 1.340e-06, eta: 1 day, 16:26:06, time: 1.771, data_time: 0.019, memory: 64699, decode.loss_cls: 0.2030, decode.loss_mask: 0.2845, decode.loss_dice: 0.7790, decode.d0.loss_cls: 0.4186, decode.d0.loss_mask: 0.3098, decode.d0.loss_dice: 0.8573, decode.d1.loss_cls: 0.2220, decode.d1.loss_mask: 0.2912, decode.d1.loss_dice: 0.8065, decode.d2.loss_cls: 0.2124, decode.d2.loss_mask: 0.2884, decode.d2.loss_dice: 0.7957, decode.d3.loss_cls: 0.2087, decode.d3.loss_mask: 0.2864, decode.d3.loss_dice: 0.7788, decode.d4.loss_cls: 0.2064, decode.d4.loss_mask: 0.2856, decode.d4.loss_dice: 0.7824, decode.d5.loss_cls: 0.2066, decode.d5.loss_mask: 0.2845, decode.d5.loss_dice: 0.7809, decode.d6.loss_cls: 0.2047, decode.d6.loss_mask: 0.2860, decode.d6.loss_dice: 0.7742, decode.d7.loss_cls: 0.2118, decode.d7.loss_mask: 0.2851, decode.d7.loss_dice: 0.7778, decode.d8.loss_cls: 0.2043, decode.d8.loss_mask: 0.2859, decode.d8.loss_dice: 0.7785, loss: 13.0971 +2022-05-09 20:23:12,285 - mmseg - INFO - Iter [5400/80000] lr: 1.339e-06, eta: 1 day, 16:22:58, time: 1.819, data_time: 0.064, memory: 64699, decode.loss_cls: 0.2066, decode.loss_mask: 0.2964, decode.loss_dice: 0.7870, decode.d0.loss_cls: 0.4348, decode.d0.loss_mask: 0.3177, decode.d0.loss_dice: 0.8683, decode.d1.loss_cls: 0.2376, decode.d1.loss_mask: 0.3002, decode.d1.loss_dice: 0.8132, decode.d2.loss_cls: 0.2195, decode.d2.loss_mask: 0.2975, decode.d2.loss_dice: 0.7992, decode.d3.loss_cls: 0.2110, decode.d3.loss_mask: 0.2985, decode.d3.loss_dice: 0.7878, decode.d4.loss_cls: 0.2088, decode.d4.loss_mask: 0.2970, decode.d4.loss_dice: 0.7905, decode.d5.loss_cls: 0.2059, decode.d5.loss_mask: 0.2972, decode.d5.loss_dice: 0.7904, decode.d6.loss_cls: 0.2171, decode.d6.loss_mask: 0.2973, decode.d6.loss_dice: 0.7879, decode.d7.loss_cls: 0.2100, decode.d7.loss_mask: 0.2956, decode.d7.loss_dice: 0.7867, decode.d8.loss_cls: 0.2129, decode.d8.loss_mask: 0.2969, decode.d8.loss_dice: 0.7922, loss: 13.3617 +2022-05-09 20:24:42,237 - mmseg - INFO - Iter [5450/80000] lr: 1.338e-06, eta: 1 day, 16:19:38, time: 1.799, data_time: 0.017, memory: 64699, decode.loss_cls: 0.2153, decode.loss_mask: 0.2934, decode.loss_dice: 0.7888, decode.d0.loss_cls: 0.4125, decode.d0.loss_mask: 0.3133, decode.d0.loss_dice: 0.8715, decode.d1.loss_cls: 0.2389, decode.d1.loss_mask: 0.2968, decode.d1.loss_dice: 0.8199, decode.d2.loss_cls: 0.2245, decode.d2.loss_mask: 0.2946, decode.d2.loss_dice: 0.8004, decode.d3.loss_cls: 0.2141, decode.d3.loss_mask: 0.2923, decode.d3.loss_dice: 0.7877, decode.d4.loss_cls: 0.2234, decode.d4.loss_mask: 0.2928, decode.d4.loss_dice: 0.7917, decode.d5.loss_cls: 0.2174, decode.d5.loss_mask: 0.2922, decode.d5.loss_dice: 0.7967, decode.d6.loss_cls: 0.2207, decode.d6.loss_mask: 0.2929, decode.d6.loss_dice: 0.7930, decode.d7.loss_cls: 0.2188, decode.d7.loss_mask: 0.2921, decode.d7.loss_dice: 0.7881, decode.d8.loss_cls: 0.2120, decode.d8.loss_mask: 0.2918, decode.d8.loss_dice: 0.7899, loss: 13.3773 +2022-05-09 20:26:11,642 - mmseg - INFO - Iter [5500/80000] lr: 1.337e-06, eta: 1 day, 16:16:13, time: 1.788, data_time: 0.018, memory: 64699, decode.loss_cls: 0.2021, decode.loss_mask: 0.2881, decode.loss_dice: 0.8113, decode.d0.loss_cls: 0.4266, decode.d0.loss_mask: 0.3094, decode.d0.loss_dice: 0.8845, decode.d1.loss_cls: 0.2384, decode.d1.loss_mask: 0.2922, decode.d1.loss_dice: 0.8341, decode.d2.loss_cls: 0.2233, decode.d2.loss_mask: 0.2887, decode.d2.loss_dice: 0.8218, decode.d3.loss_cls: 0.2077, decode.d3.loss_mask: 0.2882, decode.d3.loss_dice: 0.8077, decode.d4.loss_cls: 0.2095, decode.d4.loss_mask: 0.2886, decode.d4.loss_dice: 0.8116, decode.d5.loss_cls: 0.2080, decode.d5.loss_mask: 0.2890, decode.d5.loss_dice: 0.8141, decode.d6.loss_cls: 0.2104, decode.d6.loss_mask: 0.2883, decode.d6.loss_dice: 0.8100, decode.d7.loss_cls: 0.2053, decode.d7.loss_mask: 0.2883, decode.d7.loss_dice: 0.8095, decode.d8.loss_cls: 0.1995, decode.d8.loss_mask: 0.2887, decode.d8.loss_dice: 0.8121, loss: 13.4572 +2022-05-09 20:27:40,171 - mmseg - INFO - Iter [5550/80000] lr: 1.336e-06, eta: 1 day, 16:12:38, time: 1.771, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1985, decode.loss_mask: 0.2983, decode.loss_dice: 0.7991, decode.d0.loss_cls: 0.4403, decode.d0.loss_mask: 0.3152, decode.d0.loss_dice: 0.8691, decode.d1.loss_cls: 0.2335, decode.d1.loss_mask: 0.2996, decode.d1.loss_dice: 0.8275, decode.d2.loss_cls: 0.2167, decode.d2.loss_mask: 0.2989, decode.d2.loss_dice: 0.8108, decode.d3.loss_cls: 0.2103, decode.d3.loss_mask: 0.2979, decode.d3.loss_dice: 0.8004, decode.d4.loss_cls: 0.2159, decode.d4.loss_mask: 0.2979, decode.d4.loss_dice: 0.7988, decode.d5.loss_cls: 0.2081, decode.d5.loss_mask: 0.2985, decode.d5.loss_dice: 0.8009, decode.d6.loss_cls: 0.2064, decode.d6.loss_mask: 0.2981, decode.d6.loss_dice: 0.7965, decode.d7.loss_cls: 0.2100, decode.d7.loss_mask: 0.2974, decode.d7.loss_dice: 0.7957, decode.d8.loss_cls: 0.2094, decode.d8.loss_mask: 0.2973, decode.d8.loss_dice: 0.7951, loss: 13.4423 +2022-05-09 20:29:11,168 - mmseg - INFO - Iter [5600/80000] lr: 1.335e-06, eta: 1 day, 16:09:38, time: 1.820, data_time: 0.067, memory: 64699, decode.loss_cls: 0.1843, decode.loss_mask: 0.2903, decode.loss_dice: 0.7847, decode.d0.loss_cls: 0.4161, decode.d0.loss_mask: 0.3095, decode.d0.loss_dice: 0.8560, decode.d1.loss_cls: 0.2089, decode.d1.loss_mask: 0.2938, decode.d1.loss_dice: 0.8076, decode.d2.loss_cls: 0.1934, decode.d2.loss_mask: 0.2922, decode.d2.loss_dice: 0.7978, decode.d3.loss_cls: 0.1986, decode.d3.loss_mask: 0.2910, decode.d3.loss_dice: 0.7864, decode.d4.loss_cls: 0.1968, decode.d4.loss_mask: 0.2896, decode.d4.loss_dice: 0.7859, decode.d5.loss_cls: 0.1909, decode.d5.loss_mask: 0.2910, decode.d5.loss_dice: 0.7840, decode.d6.loss_cls: 0.1870, decode.d6.loss_mask: 0.2900, decode.d6.loss_dice: 0.7836, decode.d7.loss_cls: 0.1899, decode.d7.loss_mask: 0.2902, decode.d7.loss_dice: 0.7879, decode.d8.loss_cls: 0.1963, decode.d8.loss_mask: 0.2898, decode.d8.loss_dice: 0.7840, loss: 13.0474 +2022-05-09 20:30:40,138 - mmseg - INFO - Iter [5650/80000] lr: 1.334e-06, eta: 1 day, 16:06:13, time: 1.779, data_time: 0.018, memory: 64699, decode.loss_cls: 0.2180, decode.loss_mask: 0.2923, decode.loss_dice: 0.7875, decode.d0.loss_cls: 0.4329, decode.d0.loss_mask: 0.3112, decode.d0.loss_dice: 0.8603, decode.d1.loss_cls: 0.2381, decode.d1.loss_mask: 0.2980, decode.d1.loss_dice: 0.8150, decode.d2.loss_cls: 0.2286, decode.d2.loss_mask: 0.2931, decode.d2.loss_dice: 0.7984, decode.d3.loss_cls: 0.2143, decode.d3.loss_mask: 0.2924, decode.d3.loss_dice: 0.7959, decode.d4.loss_cls: 0.2181, decode.d4.loss_mask: 0.2934, decode.d4.loss_dice: 0.7900, decode.d5.loss_cls: 0.2201, decode.d5.loss_mask: 0.2919, decode.d5.loss_dice: 0.7902, decode.d6.loss_cls: 0.2119, decode.d6.loss_mask: 0.2922, decode.d6.loss_dice: 0.7882, decode.d7.loss_cls: 0.2136, decode.d7.loss_mask: 0.2920, decode.d7.loss_dice: 0.7929, decode.d8.loss_cls: 0.2186, decode.d8.loss_mask: 0.2923, decode.d8.loss_dice: 0.7899, loss: 13.3714 +2022-05-09 20:32:10,142 - mmseg - INFO - Iter [5700/80000] lr: 1.334e-06, eta: 1 day, 16:03:03, time: 1.800, data_time: 0.018, memory: 64699, decode.loss_cls: 0.2105, decode.loss_mask: 0.2967, decode.loss_dice: 0.8027, decode.d0.loss_cls: 0.4048, decode.d0.loss_mask: 0.3181, decode.d0.loss_dice: 0.8734, decode.d1.loss_cls: 0.2244, decode.d1.loss_mask: 0.3017, decode.d1.loss_dice: 0.8264, decode.d2.loss_cls: 0.2277, decode.d2.loss_mask: 0.2981, decode.d2.loss_dice: 0.8136, decode.d3.loss_cls: 0.2156, decode.d3.loss_mask: 0.2971, decode.d3.loss_dice: 0.8041, decode.d4.loss_cls: 0.2151, decode.d4.loss_mask: 0.2976, decode.d4.loss_dice: 0.8070, decode.d5.loss_cls: 0.2116, decode.d5.loss_mask: 0.2980, decode.d5.loss_dice: 0.8041, decode.d6.loss_cls: 0.2170, decode.d6.loss_mask: 0.2968, decode.d6.loss_dice: 0.8012, decode.d7.loss_cls: 0.2119, decode.d7.loss_mask: 0.2972, decode.d7.loss_dice: 0.8016, decode.d8.loss_cls: 0.2110, decode.d8.loss_mask: 0.2974, decode.d8.loss_dice: 0.8050, loss: 13.4873 +2022-05-09 20:33:38,988 - mmseg - INFO - Iter [5750/80000] lr: 1.333e-06, eta: 1 day, 15:59:40, time: 1.777, data_time: 0.017, memory: 64699, decode.loss_cls: 0.2068, decode.loss_mask: 0.2931, decode.loss_dice: 0.7772, decode.d0.loss_cls: 0.4114, decode.d0.loss_mask: 0.3134, decode.d0.loss_dice: 0.8565, decode.d1.loss_cls: 0.2343, decode.d1.loss_mask: 0.3001, decode.d1.loss_dice: 0.8062, decode.d2.loss_cls: 0.2148, decode.d2.loss_mask: 0.2965, decode.d2.loss_dice: 0.7899, decode.d3.loss_cls: 0.2124, decode.d3.loss_mask: 0.2943, decode.d3.loss_dice: 0.7841, decode.d4.loss_cls: 0.2132, decode.d4.loss_mask: 0.2951, decode.d4.loss_dice: 0.7819, decode.d5.loss_cls: 0.2165, decode.d5.loss_mask: 0.2928, decode.d5.loss_dice: 0.7795, decode.d6.loss_cls: 0.2067, decode.d6.loss_mask: 0.2927, decode.d6.loss_dice: 0.7764, decode.d7.loss_cls: 0.2162, decode.d7.loss_mask: 0.2917, decode.d7.loss_dice: 0.7773, decode.d8.loss_cls: 0.2113, decode.d8.loss_mask: 0.2929, decode.d8.loss_dice: 0.7800, loss: 13.2150 +2022-05-09 20:35:10,578 - mmseg - INFO - Iter [5800/80000] lr: 1.332e-06, eta: 1 day, 15:56:55, time: 1.832, data_time: 0.066, memory: 64699, decode.loss_cls: 0.2011, decode.loss_mask: 0.2929, decode.loss_dice: 0.7720, decode.d0.loss_cls: 0.3989, decode.d0.loss_mask: 0.3137, decode.d0.loss_dice: 0.8591, decode.d1.loss_cls: 0.2284, decode.d1.loss_mask: 0.2996, decode.d1.loss_dice: 0.8063, decode.d2.loss_cls: 0.2144, decode.d2.loss_mask: 0.2959, decode.d2.loss_dice: 0.7919, decode.d3.loss_cls: 0.2114, decode.d3.loss_mask: 0.2940, decode.d3.loss_dice: 0.7804, decode.d4.loss_cls: 0.2153, decode.d4.loss_mask: 0.2943, decode.d4.loss_dice: 0.7785, decode.d5.loss_cls: 0.2135, decode.d5.loss_mask: 0.2927, decode.d5.loss_dice: 0.7766, decode.d6.loss_cls: 0.2057, decode.d6.loss_mask: 0.2926, decode.d6.loss_dice: 0.7767, decode.d7.loss_cls: 0.1997, decode.d7.loss_mask: 0.2932, decode.d7.loss_dice: 0.7746, decode.d8.loss_cls: 0.2097, decode.d8.loss_mask: 0.2939, decode.d8.loss_dice: 0.7778, loss: 13.1546 +2022-05-09 20:36:40,048 - mmseg - INFO - Iter [5850/80000] lr: 1.331e-06, eta: 1 day, 15:53:44, time: 1.789, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1998, decode.loss_mask: 0.2864, decode.loss_dice: 0.7810, decode.d0.loss_cls: 0.4202, decode.d0.loss_mask: 0.3081, decode.d0.loss_dice: 0.8557, decode.d1.loss_cls: 0.2337, decode.d1.loss_mask: 0.2912, decode.d1.loss_dice: 0.8034, decode.d2.loss_cls: 0.2169, decode.d2.loss_mask: 0.2872, decode.d2.loss_dice: 0.7927, decode.d3.loss_cls: 0.2065, decode.d3.loss_mask: 0.2878, decode.d3.loss_dice: 0.7885, decode.d4.loss_cls: 0.2154, decode.d4.loss_mask: 0.2868, decode.d4.loss_dice: 0.7853, decode.d5.loss_cls: 0.2121, decode.d5.loss_mask: 0.2873, decode.d5.loss_dice: 0.7829, decode.d6.loss_cls: 0.2054, decode.d6.loss_mask: 0.2860, decode.d6.loss_dice: 0.7803, decode.d7.loss_cls: 0.2058, decode.d7.loss_mask: 0.2867, decode.d7.loss_dice: 0.7809, decode.d8.loss_cls: 0.2087, decode.d8.loss_mask: 0.2856, decode.d8.loss_dice: 0.7803, loss: 13.1484 +2022-05-09 20:38:09,537 - mmseg - INFO - Iter [5900/80000] lr: 1.330e-06, eta: 1 day, 15:50:34, time: 1.789, data_time: 0.021, memory: 64699, decode.loss_cls: 0.2021, decode.loss_mask: 0.2992, decode.loss_dice: 0.7783, decode.d0.loss_cls: 0.4115, decode.d0.loss_mask: 0.3211, decode.d0.loss_dice: 0.8557, decode.d1.loss_cls: 0.2333, decode.d1.loss_mask: 0.2996, decode.d1.loss_dice: 0.8001, decode.d2.loss_cls: 0.2167, decode.d2.loss_mask: 0.2981, decode.d2.loss_dice: 0.7892, decode.d3.loss_cls: 0.2152, decode.d3.loss_mask: 0.2962, decode.d3.loss_dice: 0.7781, decode.d4.loss_cls: 0.2109, decode.d4.loss_mask: 0.2967, decode.d4.loss_dice: 0.7803, decode.d5.loss_cls: 0.2112, decode.d5.loss_mask: 0.2962, decode.d5.loss_dice: 0.7778, decode.d6.loss_cls: 0.2047, decode.d6.loss_mask: 0.2965, decode.d6.loss_dice: 0.7765, decode.d7.loss_cls: 0.2070, decode.d7.loss_mask: 0.2972, decode.d7.loss_dice: 0.7770, decode.d8.loss_cls: 0.2022, decode.d8.loss_mask: 0.2991, decode.d8.loss_dice: 0.7753, loss: 13.2031 +2022-05-09 20:39:39,466 - mmseg - INFO - Iter [5950/80000] lr: 1.329e-06, eta: 1 day, 15:47:32, time: 1.799, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1984, decode.loss_mask: 0.2872, decode.loss_dice: 0.7919, decode.d0.loss_cls: 0.4179, decode.d0.loss_mask: 0.3078, decode.d0.loss_dice: 0.8604, decode.d1.loss_cls: 0.2304, decode.d1.loss_mask: 0.2920, decode.d1.loss_dice: 0.8152, decode.d2.loss_cls: 0.2020, decode.d2.loss_mask: 0.2888, decode.d2.loss_dice: 0.8005, decode.d3.loss_cls: 0.2044, decode.d3.loss_mask: 0.2879, decode.d3.loss_dice: 0.7932, decode.d4.loss_cls: 0.2108, decode.d4.loss_mask: 0.2878, decode.d4.loss_dice: 0.7945, decode.d5.loss_cls: 0.2061, decode.d5.loss_mask: 0.2867, decode.d5.loss_dice: 0.7993, decode.d6.loss_cls: 0.2046, decode.d6.loss_mask: 0.2872, decode.d6.loss_dice: 0.7876, decode.d7.loss_cls: 0.2008, decode.d7.loss_mask: 0.2871, decode.d7.loss_dice: 0.7901, decode.d8.loss_cls: 0.2037, decode.d8.loss_mask: 0.2866, decode.d8.loss_dice: 0.7902, loss: 13.2009 +2022-05-09 20:41:11,055 - mmseg - INFO - Saving checkpoint at 6000 iterations +2022-05-09 20:41:43,531 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-09 20:41:43,539 - mmseg - INFO - Iter [6000/80000] lr: 1.328e-06, eta: 1 day, 15:51:31, time: 2.479, data_time: 0.063, memory: 64699, decode.loss_cls: 0.1842, decode.loss_mask: 0.2962, decode.loss_dice: 0.7679, decode.d0.loss_cls: 0.4013, decode.d0.loss_mask: 0.3154, decode.d0.loss_dice: 0.8379, decode.d1.loss_cls: 0.2045, decode.d1.loss_mask: 0.3001, decode.d1.loss_dice: 0.7938, decode.d2.loss_cls: 0.1861, decode.d2.loss_mask: 0.2985, decode.d2.loss_dice: 0.7808, decode.d3.loss_cls: 0.1890, decode.d3.loss_mask: 0.2969, decode.d3.loss_dice: 0.7700, decode.d4.loss_cls: 0.1875, decode.d4.loss_mask: 0.2977, decode.d4.loss_dice: 0.7725, decode.d5.loss_cls: 0.1959, decode.d5.loss_mask: 0.2975, decode.d5.loss_dice: 0.7693, decode.d6.loss_cls: 0.1924, decode.d6.loss_mask: 0.2973, decode.d6.loss_dice: 0.7625, decode.d7.loss_cls: 0.1860, decode.d7.loss_mask: 0.2969, decode.d7.loss_dice: 0.7671, decode.d8.loss_cls: 0.1922, decode.d8.loss_mask: 0.2965, decode.d8.loss_dice: 0.7661, loss: 12.9001 +2022-05-09 20:43:39,224 - mmseg - INFO - per class results: +2022-05-09 20:43:39,230 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.79 | 99.26 | +| sidewalk | 89.3 | 95.03 | +| building | 94.56 | 97.3 | +| wall | 74.25 | 80.74 | +| fence | 72.04 | 85.9 | +| pole | 72.25 | 85.57 | +| traffic light | 77.02 | 87.58 | +| traffic sign | 84.8 | 91.82 | +| vegetation | 93.47 | 96.41 | +| terrain | 65.47 | 76.09 | +| sky | 95.92 | 98.44 | +| person | 86.8 | 93.74 | +| rider | 72.79 | 85.02 | +| car | 95.87 | 98.0 | +| truck | 88.55 | 93.9 | +| bus | 93.91 | 95.64 | +| train | 90.52 | 95.13 | +| motorcycle | 75.17 | 85.44 | +| bicycle | 82.37 | 91.93 | ++---------------+-------+-------+ +2022-05-09 20:43:39,230 - mmseg - INFO - Summary: +2022-05-09 20:43:39,230 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 97.09 | 84.41 | 91.21 | ++-------+-------+-------+ +2022-05-09 20:43:39,233 - mmseg - INFO - The previous best checkpoint /mnt/lustre/chenzhe.vendor/workspace/ViT-Adapter/segmentation/work_dirs/mask2former_beit_adapter_large_896_80k_cityscapes_ss/best_mIoU_iter_4000.pth was removed +2022-05-09 20:44:11,955 - mmseg - INFO - Now best checkpoint is saved as best_mIoU_iter_6000.pth. +2022-05-09 20:44:11,968 - mmseg - INFO - Best mIoU is 0.8441 at 6000 iter. +2022-05-09 20:44:11,982 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-09 20:44:11,982 - mmseg - INFO - Iter(val) [32] aAcc: 0.9709, mIoU: 0.8441, mAcc: 0.9121, IoU.road: 0.9879, IoU.sidewalk: 0.8930, IoU.building: 0.9456, IoU.wall: 0.7425, IoU.fence: 0.7204, IoU.pole: 0.7225, IoU.traffic light: 0.7702, IoU.traffic sign: 0.8480, IoU.vegetation: 0.9347, IoU.terrain: 0.6547, IoU.sky: 0.9592, IoU.person: 0.8680, IoU.rider: 0.7279, IoU.car: 0.9587, IoU.truck: 0.8855, IoU.bus: 0.9391, IoU.train: 0.9052, IoU.motorcycle: 0.7517, IoU.bicycle: 0.8237, Acc.road: 0.9926, Acc.sidewalk: 0.9503, Acc.building: 0.9730, Acc.wall: 0.8074, Acc.fence: 0.8590, Acc.pole: 0.8557, Acc.traffic light: 0.8758, Acc.traffic sign: 0.9182, Acc.vegetation: 0.9641, Acc.terrain: 0.7609, Acc.sky: 0.9844, Acc.person: 0.9374, Acc.rider: 0.8502, Acc.car: 0.9800, Acc.truck: 0.9390, Acc.bus: 0.9564, Acc.train: 0.9513, Acc.motorcycle: 0.8544, Acc.bicycle: 0.9193 +2022-05-09 20:45:41,400 - mmseg - INFO - Iter [6050/80000] lr: 1.327e-06, eta: 1 day, 16:18:38, time: 4.760, data_time: 2.990, memory: 64699, decode.loss_cls: 0.2166, decode.loss_mask: 0.2999, decode.loss_dice: 0.7892, decode.d0.loss_cls: 0.4231, decode.d0.loss_mask: 0.3236, decode.d0.loss_dice: 0.8805, decode.d1.loss_cls: 0.2341, decode.d1.loss_mask: 0.3071, decode.d1.loss_dice: 0.8254, decode.d2.loss_cls: 0.2215, decode.d2.loss_mask: 0.3027, decode.d2.loss_dice: 0.8099, decode.d3.loss_cls: 0.2220, decode.d3.loss_mask: 0.2993, decode.d3.loss_dice: 0.7975, decode.d4.loss_cls: 0.2209, decode.d4.loss_mask: 0.3006, decode.d4.loss_dice: 0.7973, decode.d5.loss_cls: 0.2181, decode.d5.loss_mask: 0.2990, decode.d5.loss_dice: 0.7987, decode.d6.loss_cls: 0.2191, decode.d6.loss_mask: 0.3004, decode.d6.loss_dice: 0.7918, decode.d7.loss_cls: 0.2204, decode.d7.loss_mask: 0.3000, decode.d7.loss_dice: 0.7876, decode.d8.loss_cls: 0.2134, decode.d8.loss_mask: 0.3003, decode.d8.loss_dice: 0.7937, loss: 13.5137 +2022-05-09 20:47:10,735 - mmseg - INFO - Iter [6100/80000] lr: 1.326e-06, eta: 1 day, 16:15:14, time: 1.787, data_time: 0.017, memory: 64699, decode.loss_cls: 0.2111, decode.loss_mask: 0.2905, decode.loss_dice: 0.7971, decode.d0.loss_cls: 0.4205, decode.d0.loss_mask: 0.3126, decode.d0.loss_dice: 0.8793, decode.d1.loss_cls: 0.2337, decode.d1.loss_mask: 0.2962, decode.d1.loss_dice: 0.8231, decode.d2.loss_cls: 0.2127, decode.d2.loss_mask: 0.2916, decode.d2.loss_dice: 0.8134, decode.d3.loss_cls: 0.2138, decode.d3.loss_mask: 0.2907, decode.d3.loss_dice: 0.8003, decode.d4.loss_cls: 0.2133, decode.d4.loss_mask: 0.2915, decode.d4.loss_dice: 0.8015, decode.d5.loss_cls: 0.2150, decode.d5.loss_mask: 0.2917, decode.d5.loss_dice: 0.8037, decode.d6.loss_cls: 0.2114, decode.d6.loss_mask: 0.2911, decode.d6.loss_dice: 0.7991, decode.d7.loss_cls: 0.2144, decode.d7.loss_mask: 0.2911, decode.d7.loss_dice: 0.8043, decode.d8.loss_cls: 0.2083, decode.d8.loss_mask: 0.2913, decode.d8.loss_dice: 0.8030, loss: 13.4171 +2022-05-09 20:48:42,257 - mmseg - INFO - Iter [6150/80000] lr: 1.325e-06, eta: 1 day, 16:12:17, time: 1.830, data_time: 0.065, memory: 64699, decode.loss_cls: 0.2007, decode.loss_mask: 0.2830, decode.loss_dice: 0.7890, decode.d0.loss_cls: 0.4093, decode.d0.loss_mask: 0.3041, decode.d0.loss_dice: 0.8717, decode.d1.loss_cls: 0.2238, decode.d1.loss_mask: 0.2872, decode.d1.loss_dice: 0.8172, decode.d2.loss_cls: 0.2078, decode.d2.loss_mask: 0.2850, decode.d2.loss_dice: 0.8062, decode.d3.loss_cls: 0.2128, decode.d3.loss_mask: 0.2833, decode.d3.loss_dice: 0.7896, decode.d4.loss_cls: 0.2064, decode.d4.loss_mask: 0.2843, decode.d4.loss_dice: 0.7895, decode.d5.loss_cls: 0.2026, decode.d5.loss_mask: 0.2829, decode.d5.loss_dice: 0.7907, decode.d6.loss_cls: 0.2092, decode.d6.loss_mask: 0.2814, decode.d6.loss_dice: 0.7865, decode.d7.loss_cls: 0.2066, decode.d7.loss_mask: 0.2819, decode.d7.loss_dice: 0.7863, decode.d8.loss_cls: 0.2035, decode.d8.loss_mask: 0.2831, decode.d8.loss_dice: 0.7862, loss: 13.1518 +2022-05-09 20:50:11,999 - mmseg - INFO - Iter [6200/80000] lr: 1.325e-06, eta: 1 day, 16:09:01, time: 1.795, data_time: 0.019, memory: 64699, decode.loss_cls: 0.2022, decode.loss_mask: 0.3023, decode.loss_dice: 0.7838, decode.d0.loss_cls: 0.4112, decode.d0.loss_mask: 0.3196, decode.d0.loss_dice: 0.8493, decode.d1.loss_cls: 0.2201, decode.d1.loss_mask: 0.3056, decode.d1.loss_dice: 0.8009, decode.d2.loss_cls: 0.1993, decode.d2.loss_mask: 0.3024, decode.d2.loss_dice: 0.7904, decode.d3.loss_cls: 0.1984, decode.d3.loss_mask: 0.3024, decode.d3.loss_dice: 0.7782, decode.d4.loss_cls: 0.1980, decode.d4.loss_mask: 0.3028, decode.d4.loss_dice: 0.7809, decode.d5.loss_cls: 0.2017, decode.d5.loss_mask: 0.3004, decode.d5.loss_dice: 0.7791, decode.d6.loss_cls: 0.2007, decode.d6.loss_mask: 0.3029, decode.d6.loss_dice: 0.7749, decode.d7.loss_cls: 0.1989, decode.d7.loss_mask: 0.3022, decode.d7.loss_dice: 0.7745, decode.d8.loss_cls: 0.1962, decode.d8.loss_mask: 0.3027, decode.d8.loss_dice: 0.7777, loss: 13.1597 +2022-05-09 20:51:40,492 - mmseg - INFO - Iter [6250/80000] lr: 1.324e-06, eta: 1 day, 16:05:31, time: 1.770, data_time: 0.019, memory: 64699, decode.loss_cls: 0.2073, decode.loss_mask: 0.2946, decode.loss_dice: 0.7975, decode.d0.loss_cls: 0.4300, decode.d0.loss_mask: 0.3153, decode.d0.loss_dice: 0.8707, decode.d1.loss_cls: 0.2268, decode.d1.loss_mask: 0.2988, decode.d1.loss_dice: 0.8244, decode.d2.loss_cls: 0.2138, decode.d2.loss_mask: 0.2953, decode.d2.loss_dice: 0.8045, decode.d3.loss_cls: 0.2066, decode.d3.loss_mask: 0.2945, decode.d3.loss_dice: 0.8036, decode.d4.loss_cls: 0.2068, decode.d4.loss_mask: 0.2956, decode.d4.loss_dice: 0.8000, decode.d5.loss_cls: 0.2093, decode.d5.loss_mask: 0.2956, decode.d5.loss_dice: 0.7942, decode.d6.loss_cls: 0.2066, decode.d6.loss_mask: 0.2960, decode.d6.loss_dice: 0.7975, decode.d7.loss_cls: 0.2057, decode.d7.loss_mask: 0.2945, decode.d7.loss_dice: 0.7955, decode.d8.loss_cls: 0.2008, decode.d8.loss_mask: 0.2962, decode.d8.loss_dice: 0.8029, loss: 13.3809 +2022-05-09 20:53:09,175 - mmseg - INFO - Iter [6300/80000] lr: 1.323e-06, eta: 1 day, 16:02:06, time: 1.773, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1849, decode.loss_mask: 0.2893, decode.loss_dice: 0.7802, decode.d0.loss_cls: 0.4048, decode.d0.loss_mask: 0.3065, decode.d0.loss_dice: 0.8466, decode.d1.loss_cls: 0.2128, decode.d1.loss_mask: 0.2923, decode.d1.loss_dice: 0.8003, decode.d2.loss_cls: 0.1962, decode.d2.loss_mask: 0.2906, decode.d2.loss_dice: 0.7908, decode.d3.loss_cls: 0.1940, decode.d3.loss_mask: 0.2883, decode.d3.loss_dice: 0.7792, decode.d4.loss_cls: 0.1905, decode.d4.loss_mask: 0.2887, decode.d4.loss_dice: 0.7822, decode.d5.loss_cls: 0.1897, decode.d5.loss_mask: 0.2888, decode.d5.loss_dice: 0.7809, decode.d6.loss_cls: 0.1865, decode.d6.loss_mask: 0.2890, decode.d6.loss_dice: 0.7796, decode.d7.loss_cls: 0.1851, decode.d7.loss_mask: 0.2903, decode.d7.loss_dice: 0.7827, decode.d8.loss_cls: 0.1871, decode.d8.loss_mask: 0.2899, decode.d8.loss_dice: 0.7821, loss: 12.9497 +2022-05-09 20:54:40,946 - mmseg - INFO - Iter [6350/80000] lr: 1.322e-06, eta: 1 day, 15:59:19, time: 1.836, data_time: 0.066, memory: 64699, decode.loss_cls: 0.2075, decode.loss_mask: 0.2853, decode.loss_dice: 0.7850, decode.d0.loss_cls: 0.4144, decode.d0.loss_mask: 0.3026, decode.d0.loss_dice: 0.8648, decode.d1.loss_cls: 0.2422, decode.d1.loss_mask: 0.2892, decode.d1.loss_dice: 0.8125, decode.d2.loss_cls: 0.2219, decode.d2.loss_mask: 0.2863, decode.d2.loss_dice: 0.7939, decode.d3.loss_cls: 0.2055, decode.d3.loss_mask: 0.2852, decode.d3.loss_dice: 0.7948, decode.d4.loss_cls: 0.2090, decode.d4.loss_mask: 0.2867, decode.d4.loss_dice: 0.7898, decode.d5.loss_cls: 0.2050, decode.d5.loss_mask: 0.2854, decode.d5.loss_dice: 0.7868, decode.d6.loss_cls: 0.2072, decode.d6.loss_mask: 0.2859, decode.d6.loss_dice: 0.7881, decode.d7.loss_cls: 0.2125, decode.d7.loss_mask: 0.2862, decode.d7.loss_dice: 0.7873, decode.d8.loss_cls: 0.2050, decode.d8.loss_mask: 0.2858, decode.d8.loss_dice: 0.7897, loss: 13.2012 +2022-05-09 20:56:11,076 - mmseg - INFO - Iter [6400/80000] lr: 1.321e-06, eta: 1 day, 15:56:14, time: 1.803, data_time: 0.019, memory: 64699, decode.loss_cls: 0.2040, decode.loss_mask: 0.2796, decode.loss_dice: 0.7834, decode.d0.loss_cls: 0.4115, decode.d0.loss_mask: 0.2972, decode.d0.loss_dice: 0.8538, decode.d1.loss_cls: 0.2318, decode.d1.loss_mask: 0.2817, decode.d1.loss_dice: 0.8007, decode.d2.loss_cls: 0.2141, decode.d2.loss_mask: 0.2811, decode.d2.loss_dice: 0.7926, decode.d3.loss_cls: 0.2109, decode.d3.loss_mask: 0.2799, decode.d3.loss_dice: 0.7858, decode.d4.loss_cls: 0.2076, decode.d4.loss_mask: 0.2797, decode.d4.loss_dice: 0.7815, decode.d5.loss_cls: 0.2100, decode.d5.loss_mask: 0.2787, decode.d5.loss_dice: 0.7865, decode.d6.loss_cls: 0.2110, decode.d6.loss_mask: 0.2787, decode.d6.loss_dice: 0.7844, decode.d7.loss_cls: 0.2103, decode.d7.loss_mask: 0.2786, decode.d7.loss_dice: 0.7823, decode.d8.loss_cls: 0.2091, decode.d8.loss_mask: 0.2796, decode.d8.loss_dice: 0.7816, loss: 13.0674 +2022-05-09 20:57:41,247 - mmseg - INFO - Iter [6450/80000] lr: 1.320e-06, eta: 1 day, 15:53:10, time: 1.803, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1931, decode.loss_mask: 0.2868, decode.loss_dice: 0.7732, decode.d0.loss_cls: 0.4103, decode.d0.loss_mask: 0.3095, decode.d0.loss_dice: 0.8430, decode.d1.loss_cls: 0.2182, decode.d1.loss_mask: 0.2940, decode.d1.loss_dice: 0.8006, decode.d2.loss_cls: 0.2026, decode.d2.loss_mask: 0.2904, decode.d2.loss_dice: 0.7866, decode.d3.loss_cls: 0.1923, decode.d3.loss_mask: 0.2880, decode.d3.loss_dice: 0.7796, decode.d4.loss_cls: 0.1906, decode.d4.loss_mask: 0.2888, decode.d4.loss_dice: 0.7793, decode.d5.loss_cls: 0.1973, decode.d5.loss_mask: 0.2889, decode.d5.loss_dice: 0.7783, decode.d6.loss_cls: 0.1893, decode.d6.loss_mask: 0.2882, decode.d6.loss_dice: 0.7749, decode.d7.loss_cls: 0.1849, decode.d7.loss_mask: 0.2880, decode.d7.loss_dice: 0.7753, decode.d8.loss_cls: 0.1906, decode.d8.loss_mask: 0.2871, decode.d8.loss_dice: 0.7760, loss: 12.9455 +2022-05-09 20:59:10,468 - mmseg - INFO - Iter [6500/80000] lr: 1.319e-06, eta: 1 day, 15:49:58, time: 1.784, data_time: 0.017, memory: 64699, decode.loss_cls: 0.1993, decode.loss_mask: 0.2810, decode.loss_dice: 0.7902, decode.d0.loss_cls: 0.4008, decode.d0.loss_mask: 0.2990, decode.d0.loss_dice: 0.8579, decode.d1.loss_cls: 0.2314, decode.d1.loss_mask: 0.2860, decode.d1.loss_dice: 0.8126, decode.d2.loss_cls: 0.2151, decode.d2.loss_mask: 0.2822, decode.d2.loss_dice: 0.8039, decode.d3.loss_cls: 0.2019, decode.d3.loss_mask: 0.2816, decode.d3.loss_dice: 0.7954, decode.d4.loss_cls: 0.1994, decode.d4.loss_mask: 0.2825, decode.d4.loss_dice: 0.7939, decode.d5.loss_cls: 0.1974, decode.d5.loss_mask: 0.2826, decode.d5.loss_dice: 0.7917, decode.d6.loss_cls: 0.2013, decode.d6.loss_mask: 0.2814, decode.d6.loss_dice: 0.7882, decode.d7.loss_cls: 0.2046, decode.d7.loss_mask: 0.2821, decode.d7.loss_dice: 0.7874, decode.d8.loss_cls: 0.2012, decode.d8.loss_mask: 0.2812, decode.d8.loss_dice: 0.7847, loss: 13.0980 +2022-05-09 21:00:42,609 - mmseg - INFO - Iter [6550/80000] lr: 1.318e-06, eta: 1 day, 15:47:20, time: 1.843, data_time: 0.065, memory: 64699, decode.loss_cls: 0.2047, decode.loss_mask: 0.2893, decode.loss_dice: 0.7750, decode.d0.loss_cls: 0.4224, decode.d0.loss_mask: 0.3103, decode.d0.loss_dice: 0.8354, decode.d1.loss_cls: 0.2390, decode.d1.loss_mask: 0.2938, decode.d1.loss_dice: 0.7965, decode.d2.loss_cls: 0.2154, decode.d2.loss_mask: 0.2915, decode.d2.loss_dice: 0.7799, decode.d3.loss_cls: 0.2040, decode.d3.loss_mask: 0.2896, decode.d3.loss_dice: 0.7758, decode.d4.loss_cls: 0.2059, decode.d4.loss_mask: 0.2909, decode.d4.loss_dice: 0.7734, decode.d5.loss_cls: 0.2046, decode.d5.loss_mask: 0.2906, decode.d5.loss_dice: 0.7739, decode.d6.loss_cls: 0.2071, decode.d6.loss_mask: 0.2895, decode.d6.loss_dice: 0.7726, decode.d7.loss_cls: 0.2076, decode.d7.loss_mask: 0.2896, decode.d7.loss_dice: 0.7706, decode.d8.loss_cls: 0.2003, decode.d8.loss_mask: 0.2900, decode.d8.loss_dice: 0.7726, loss: 13.0620 +2022-05-09 21:02:10,903 - mmseg - INFO - Iter [6600/80000] lr: 1.317e-06, eta: 1 day, 15:44:00, time: 1.766, data_time: 0.018, memory: 64699, decode.loss_cls: 0.2045, decode.loss_mask: 0.2891, decode.loss_dice: 0.7873, decode.d0.loss_cls: 0.4104, decode.d0.loss_mask: 0.3081, decode.d0.loss_dice: 0.8540, decode.d1.loss_cls: 0.2341, decode.d1.loss_mask: 0.2923, decode.d1.loss_dice: 0.8039, decode.d2.loss_cls: 0.2053, decode.d2.loss_mask: 0.2897, decode.d2.loss_dice: 0.7940, decode.d3.loss_cls: 0.2043, decode.d3.loss_mask: 0.2906, decode.d3.loss_dice: 0.7902, decode.d4.loss_cls: 0.2115, decode.d4.loss_mask: 0.2904, decode.d4.loss_dice: 0.7847, decode.d5.loss_cls: 0.2040, decode.d5.loss_mask: 0.2890, decode.d5.loss_dice: 0.7915, decode.d6.loss_cls: 0.2072, decode.d6.loss_mask: 0.2893, decode.d6.loss_dice: 0.7790, decode.d7.loss_cls: 0.2119, decode.d7.loss_mask: 0.2889, decode.d7.loss_dice: 0.7794, decode.d8.loss_cls: 0.2022, decode.d8.loss_mask: 0.2888, decode.d8.loss_dice: 0.7875, loss: 13.1631 +2022-05-09 21:03:40,530 - mmseg - INFO - Iter [6650/80000] lr: 1.316e-06, eta: 1 day, 15:40:56, time: 1.792, data_time: 0.017, memory: 64699, decode.loss_cls: 0.1956, decode.loss_mask: 0.2868, decode.loss_dice: 0.7739, decode.d0.loss_cls: 0.4015, decode.d0.loss_mask: 0.3086, decode.d0.loss_dice: 0.8514, decode.d1.loss_cls: 0.2144, decode.d1.loss_mask: 0.2926, decode.d1.loss_dice: 0.7995, decode.d2.loss_cls: 0.2033, decode.d2.loss_mask: 0.2915, decode.d2.loss_dice: 0.7899, decode.d3.loss_cls: 0.1966, decode.d3.loss_mask: 0.2898, decode.d3.loss_dice: 0.7804, decode.d4.loss_cls: 0.1972, decode.d4.loss_mask: 0.2878, decode.d4.loss_dice: 0.7824, decode.d5.loss_cls: 0.2041, decode.d5.loss_mask: 0.2885, decode.d5.loss_dice: 0.7791, decode.d6.loss_cls: 0.1909, decode.d6.loss_mask: 0.2889, decode.d6.loss_dice: 0.7812, decode.d7.loss_cls: 0.2040, decode.d7.loss_mask: 0.2876, decode.d7.loss_dice: 0.7745, decode.d8.loss_cls: 0.1996, decode.d8.loss_mask: 0.2873, decode.d8.loss_dice: 0.7780, loss: 13.0068 +2022-05-09 21:05:11,627 - mmseg - INFO - Iter [6700/80000] lr: 1.316e-06, eta: 1 day, 15:38:10, time: 1.822, data_time: 0.064, memory: 64699, decode.loss_cls: 0.1923, decode.loss_mask: 0.2953, decode.loss_dice: 0.7875, decode.d0.loss_cls: 0.4159, decode.d0.loss_mask: 0.3169, decode.d0.loss_dice: 0.8591, decode.d1.loss_cls: 0.2259, decode.d1.loss_mask: 0.2979, decode.d1.loss_dice: 0.8123, decode.d2.loss_cls: 0.2135, decode.d2.loss_mask: 0.2967, decode.d2.loss_dice: 0.7958, decode.d3.loss_cls: 0.2032, decode.d3.loss_mask: 0.2967, decode.d3.loss_dice: 0.7902, decode.d4.loss_cls: 0.1988, decode.d4.loss_mask: 0.2966, decode.d4.loss_dice: 0.7965, decode.d5.loss_cls: 0.2055, decode.d5.loss_mask: 0.2957, decode.d5.loss_dice: 0.7897, decode.d6.loss_cls: 0.2018, decode.d6.loss_mask: 0.2953, decode.d6.loss_dice: 0.7850, decode.d7.loss_cls: 0.2011, decode.d7.loss_mask: 0.2955, decode.d7.loss_dice: 0.7910, decode.d8.loss_cls: 0.1954, decode.d8.loss_mask: 0.2965, decode.d8.loss_dice: 0.7903, loss: 13.2342 +2022-05-09 21:06:39,413 - mmseg - INFO - Iter [6750/80000] lr: 1.315e-06, eta: 1 day, 15:34:49, time: 1.755, data_time: 0.017, memory: 64699, decode.loss_cls: 0.1841, decode.loss_mask: 0.2947, decode.loss_dice: 0.7457, decode.d0.loss_cls: 0.3886, decode.d0.loss_mask: 0.3114, decode.d0.loss_dice: 0.8192, decode.d1.loss_cls: 0.2052, decode.d1.loss_mask: 0.2975, decode.d1.loss_dice: 0.7722, decode.d2.loss_cls: 0.2009, decode.d2.loss_mask: 0.2946, decode.d2.loss_dice: 0.7572, decode.d3.loss_cls: 0.1991, decode.d3.loss_mask: 0.2925, decode.d3.loss_dice: 0.7473, decode.d4.loss_cls: 0.1970, decode.d4.loss_mask: 0.2942, decode.d4.loss_dice: 0.7482, decode.d5.loss_cls: 0.1900, decode.d5.loss_mask: 0.2937, decode.d5.loss_dice: 0.7494, decode.d6.loss_cls: 0.1904, decode.d6.loss_mask: 0.2944, decode.d6.loss_dice: 0.7470, decode.d7.loss_cls: 0.1810, decode.d7.loss_mask: 0.2953, decode.d7.loss_dice: 0.7481, decode.d8.loss_cls: 0.1890, decode.d8.loss_mask: 0.2954, decode.d8.loss_dice: 0.7466, loss: 12.6700 +2022-05-09 21:08:07,497 - mmseg - INFO - Iter [6800/80000] lr: 1.314e-06, eta: 1 day, 15:31:33, time: 1.762, data_time: 0.017, memory: 64699, decode.loss_cls: 0.1826, decode.loss_mask: 0.2857, decode.loss_dice: 0.7744, decode.d0.loss_cls: 0.4058, decode.d0.loss_mask: 0.3070, decode.d0.loss_dice: 0.8462, decode.d1.loss_cls: 0.2100, decode.d1.loss_mask: 0.2876, decode.d1.loss_dice: 0.8012, decode.d2.loss_cls: 0.1998, decode.d2.loss_mask: 0.2861, decode.d2.loss_dice: 0.7857, decode.d3.loss_cls: 0.1920, decode.d3.loss_mask: 0.2862, decode.d3.loss_dice: 0.7781, decode.d4.loss_cls: 0.1834, decode.d4.loss_mask: 0.2862, decode.d4.loss_dice: 0.7784, decode.d5.loss_cls: 0.1847, decode.d5.loss_mask: 0.2858, decode.d5.loss_dice: 0.7759, decode.d6.loss_cls: 0.1891, decode.d6.loss_mask: 0.2856, decode.d6.loss_dice: 0.7730, decode.d7.loss_cls: 0.1916, decode.d7.loss_mask: 0.2851, decode.d7.loss_dice: 0.7764, decode.d8.loss_cls: 0.1879, decode.d8.loss_mask: 0.2849, decode.d8.loss_dice: 0.7741, loss: 12.8703 +2022-05-09 21:09:37,141 - mmseg - INFO - Iter [6850/80000] lr: 1.313e-06, eta: 1 day, 15:28:35, time: 1.793, data_time: 0.017, memory: 64699, decode.loss_cls: 0.1990, decode.loss_mask: 0.2831, decode.loss_dice: 0.7797, decode.d0.loss_cls: 0.4236, decode.d0.loss_mask: 0.3005, decode.d0.loss_dice: 0.8457, decode.d1.loss_cls: 0.2260, decode.d1.loss_mask: 0.2877, decode.d1.loss_dice: 0.8026, decode.d2.loss_cls: 0.2109, decode.d2.loss_mask: 0.2848, decode.d2.loss_dice: 0.7848, decode.d3.loss_cls: 0.1996, decode.d3.loss_mask: 0.2827, decode.d3.loss_dice: 0.7792, decode.d4.loss_cls: 0.2051, decode.d4.loss_mask: 0.2824, decode.d4.loss_dice: 0.7788, decode.d5.loss_cls: 0.2023, decode.d5.loss_mask: 0.2832, decode.d5.loss_dice: 0.7748, decode.d6.loss_cls: 0.1968, decode.d6.loss_mask: 0.2839, decode.d6.loss_dice: 0.7755, decode.d7.loss_cls: 0.1982, decode.d7.loss_mask: 0.2833, decode.d7.loss_dice: 0.7789, decode.d8.loss_cls: 0.2003, decode.d8.loss_mask: 0.2833, decode.d8.loss_dice: 0.7778, loss: 12.9945 +2022-05-09 21:11:08,375 - mmseg - INFO - Iter [6900/80000] lr: 1.312e-06, eta: 1 day, 15:25:55, time: 1.825, data_time: 0.063, memory: 64699, decode.loss_cls: 0.1886, decode.loss_mask: 0.2814, decode.loss_dice: 0.7800, decode.d0.loss_cls: 0.3860, decode.d0.loss_mask: 0.3000, decode.d0.loss_dice: 0.8415, decode.d1.loss_cls: 0.2236, decode.d1.loss_mask: 0.2844, decode.d1.loss_dice: 0.7947, decode.d2.loss_cls: 0.2055, decode.d2.loss_mask: 0.2826, decode.d2.loss_dice: 0.7868, decode.d3.loss_cls: 0.1939, decode.d3.loss_mask: 0.2818, decode.d3.loss_dice: 0.7770, decode.d4.loss_cls: 0.1870, decode.d4.loss_mask: 0.2825, decode.d4.loss_dice: 0.7729, decode.d5.loss_cls: 0.1970, decode.d5.loss_mask: 0.2815, decode.d5.loss_dice: 0.7721, decode.d6.loss_cls: 0.1903, decode.d6.loss_mask: 0.2814, decode.d6.loss_dice: 0.7727, decode.d7.loss_cls: 0.1911, decode.d7.loss_mask: 0.2818, decode.d7.loss_dice: 0.7742, decode.d8.loss_cls: 0.1848, decode.d8.loss_mask: 0.2806, decode.d8.loss_dice: 0.7774, loss: 12.8352 +2022-05-09 21:12:37,547 - mmseg - INFO - Iter [6950/80000] lr: 1.311e-06, eta: 1 day, 15:22:55, time: 1.783, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1876, decode.loss_mask: 0.2797, decode.loss_dice: 0.7837, decode.d0.loss_cls: 0.3815, decode.d0.loss_mask: 0.2983, decode.d0.loss_dice: 0.8483, decode.d1.loss_cls: 0.2168, decode.d1.loss_mask: 0.2835, decode.d1.loss_dice: 0.8025, decode.d2.loss_cls: 0.2034, decode.d2.loss_mask: 0.2804, decode.d2.loss_dice: 0.7916, decode.d3.loss_cls: 0.1876, decode.d3.loss_mask: 0.2812, decode.d3.loss_dice: 0.7850, decode.d4.loss_cls: 0.1903, decode.d4.loss_mask: 0.2815, decode.d4.loss_dice: 0.7838, decode.d5.loss_cls: 0.1910, decode.d5.loss_mask: 0.2798, decode.d5.loss_dice: 0.7827, decode.d6.loss_cls: 0.1880, decode.d6.loss_mask: 0.2794, decode.d6.loss_dice: 0.7849, decode.d7.loss_cls: 0.1861, decode.d7.loss_mask: 0.2802, decode.d7.loss_dice: 0.7863, decode.d8.loss_cls: 0.1859, decode.d8.loss_mask: 0.2793, decode.d8.loss_dice: 0.7840, loss: 12.8743 +2022-05-09 21:14:06,309 - mmseg - INFO - Saving checkpoint at 7000 iterations +2022-05-09 21:14:40,313 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-09 21:14:40,321 - mmseg - INFO - Iter [7000/80000] lr: 1.310e-06, eta: 1 day, 15:25:45, time: 2.453, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1871, decode.loss_mask: 0.2857, decode.loss_dice: 0.7864, decode.d0.loss_cls: 0.3998, decode.d0.loss_mask: 0.3084, decode.d0.loss_dice: 0.8510, decode.d1.loss_cls: 0.2129, decode.d1.loss_mask: 0.2888, decode.d1.loss_dice: 0.8070, decode.d2.loss_cls: 0.1970, decode.d2.loss_mask: 0.2873, decode.d2.loss_dice: 0.7919, decode.d3.loss_cls: 0.1922, decode.d3.loss_mask: 0.2861, decode.d3.loss_dice: 0.7824, decode.d4.loss_cls: 0.1874, decode.d4.loss_mask: 0.2859, decode.d4.loss_dice: 0.7793, decode.d5.loss_cls: 0.1905, decode.d5.loss_mask: 0.2851, decode.d5.loss_dice: 0.7804, decode.d6.loss_cls: 0.1831, decode.d6.loss_mask: 0.2854, decode.d6.loss_dice: 0.7789, decode.d7.loss_cls: 0.1786, decode.d7.loss_mask: 0.2853, decode.d7.loss_dice: 0.7822, decode.d8.loss_cls: 0.1931, decode.d8.loss_mask: 0.2855, decode.d8.loss_dice: 0.7769, loss: 12.9215 +2022-05-09 21:16:35,962 - mmseg - INFO - per class results: +2022-05-09 21:16:35,971 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.76 | 99.22 | +| sidewalk | 89.67 | 95.43 | +| building | 94.52 | 97.16 | +| wall | 66.3 | 77.41 | +| fence | 73.19 | 83.15 | +| pole | 72.42 | 83.86 | +| traffic light | 76.96 | 88.12 | +| traffic sign | 84.81 | 90.58 | +| vegetation | 93.47 | 96.75 | +| terrain | 67.84 | 83.4 | +| sky | 96.03 | 98.38 | +| person | 86.83 | 92.1 | +| rider | 73.06 | 87.2 | +| car | 96.3 | 98.2 | +| truck | 88.46 | 91.49 | +| bus | 93.89 | 97.35 | +| train | 87.86 | 90.88 | +| motorcycle | 72.84 | 88.33 | +| bicycle | 82.38 | 90.17 | ++---------------+-------+-------+ +2022-05-09 21:16:35,971 - mmseg - INFO - Summary: +2022-05-09 21:16:35,972 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 97.08 | 83.98 | 91.01 | ++-------+-------+-------+ +2022-05-09 21:16:35,976 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-09 21:16:35,976 - mmseg - INFO - Iter(val) [32] aAcc: 0.9708, mIoU: 0.8398, mAcc: 0.9101, IoU.road: 0.9876, IoU.sidewalk: 0.8967, IoU.building: 0.9452, IoU.wall: 0.6630, IoU.fence: 0.7319, IoU.pole: 0.7242, IoU.traffic light: 0.7696, IoU.traffic sign: 0.8481, IoU.vegetation: 0.9347, IoU.terrain: 0.6784, IoU.sky: 0.9603, IoU.person: 0.8683, IoU.rider: 0.7306, IoU.car: 0.9630, IoU.truck: 0.8846, IoU.bus: 0.9389, IoU.train: 0.8786, IoU.motorcycle: 0.7284, IoU.bicycle: 0.8238, Acc.road: 0.9922, Acc.sidewalk: 0.9543, Acc.building: 0.9716, Acc.wall: 0.7741, Acc.fence: 0.8315, Acc.pole: 0.8386, Acc.traffic light: 0.8812, Acc.traffic sign: 0.9058, Acc.vegetation: 0.9675, Acc.terrain: 0.8340, Acc.sky: 0.9838, Acc.person: 0.9210, Acc.rider: 0.8720, Acc.car: 0.9820, Acc.truck: 0.9149, Acc.bus: 0.9735, Acc.train: 0.9088, Acc.motorcycle: 0.8833, Acc.bicycle: 0.9017 +2022-05-09 21:18:03,590 - mmseg - INFO - Iter [7050/80000] lr: 1.309e-06, eta: 1 day, 15:42:26, time: 4.068, data_time: 2.333, memory: 64699, decode.loss_cls: 0.1870, decode.loss_mask: 0.2799, decode.loss_dice: 0.7835, decode.d0.loss_cls: 0.3972, decode.d0.loss_mask: 0.2997, decode.d0.loss_dice: 0.8531, decode.d1.loss_cls: 0.2064, decode.d1.loss_mask: 0.2824, decode.d1.loss_dice: 0.8039, decode.d2.loss_cls: 0.2002, decode.d2.loss_mask: 0.2799, decode.d2.loss_dice: 0.7921, decode.d3.loss_cls: 0.1851, decode.d3.loss_mask: 0.2804, decode.d3.loss_dice: 0.7842, decode.d4.loss_cls: 0.1907, decode.d4.loss_mask: 0.2794, decode.d4.loss_dice: 0.7801, decode.d5.loss_cls: 0.1861, decode.d5.loss_mask: 0.2794, decode.d5.loss_dice: 0.7842, decode.d6.loss_cls: 0.1868, decode.d6.loss_mask: 0.2801, decode.d6.loss_dice: 0.7817, decode.d7.loss_cls: 0.1855, decode.d7.loss_mask: 0.2802, decode.d7.loss_dice: 0.7808, decode.d8.loss_cls: 0.1861, decode.d8.loss_mask: 0.2800, decode.d8.loss_dice: 0.7767, loss: 12.8528 +2022-05-09 21:19:34,727 - mmseg - INFO - Iter [7100/80000] lr: 1.308e-06, eta: 1 day, 15:39:38, time: 1.823, data_time: 0.065, memory: 64699, decode.loss_cls: 0.2070, decode.loss_mask: 0.2837, decode.loss_dice: 0.7706, decode.d0.loss_cls: 0.4103, decode.d0.loss_mask: 0.3020, decode.d0.loss_dice: 0.8494, decode.d1.loss_cls: 0.2264, decode.d1.loss_mask: 0.2892, decode.d1.loss_dice: 0.8010, decode.d2.loss_cls: 0.2214, decode.d2.loss_mask: 0.2845, decode.d2.loss_dice: 0.7862, decode.d3.loss_cls: 0.2091, decode.d3.loss_mask: 0.2838, decode.d3.loss_dice: 0.7774, decode.d4.loss_cls: 0.2124, decode.d4.loss_mask: 0.2820, decode.d4.loss_dice: 0.7771, decode.d5.loss_cls: 0.2093, decode.d5.loss_mask: 0.2821, decode.d5.loss_dice: 0.7806, decode.d6.loss_cls: 0.2015, decode.d6.loss_mask: 0.2839, decode.d6.loss_dice: 0.7752, decode.d7.loss_cls: 0.2038, decode.d7.loss_mask: 0.2827, decode.d7.loss_dice: 0.7783, decode.d8.loss_cls: 0.2047, decode.d8.loss_mask: 0.2823, decode.d8.loss_dice: 0.7737, loss: 13.0315 +2022-05-09 21:21:04,526 - mmseg - INFO - Iter [7150/80000] lr: 1.307e-06, eta: 1 day, 15:36:37, time: 1.796, data_time: 0.017, memory: 64699, decode.loss_cls: 0.1884, decode.loss_mask: 0.2885, decode.loss_dice: 0.7467, decode.d0.loss_cls: 0.3989, decode.d0.loss_mask: 0.3095, decode.d0.loss_dice: 0.8199, decode.d1.loss_cls: 0.2204, decode.d1.loss_mask: 0.2921, decode.d1.loss_dice: 0.7681, decode.d2.loss_cls: 0.2097, decode.d2.loss_mask: 0.2904, decode.d2.loss_dice: 0.7548, decode.d3.loss_cls: 0.1922, decode.d3.loss_mask: 0.2880, decode.d3.loss_dice: 0.7472, decode.d4.loss_cls: 0.1971, decode.d4.loss_mask: 0.2887, decode.d4.loss_dice: 0.7512, decode.d5.loss_cls: 0.1880, decode.d5.loss_mask: 0.2894, decode.d5.loss_dice: 0.7500, decode.d6.loss_cls: 0.1893, decode.d6.loss_mask: 0.2886, decode.d6.loss_dice: 0.7449, decode.d7.loss_cls: 0.1882, decode.d7.loss_mask: 0.2883, decode.d7.loss_dice: 0.7494, decode.d8.loss_cls: 0.1883, decode.d8.loss_mask: 0.2887, decode.d8.loss_dice: 0.7462, loss: 12.6512 +2022-05-09 21:22:35,127 - mmseg - INFO - Iter [7200/80000] lr: 1.307e-06, eta: 1 day, 15:33:46, time: 1.812, data_time: 0.017, memory: 64699, decode.loss_cls: 0.1914, decode.loss_mask: 0.2849, decode.loss_dice: 0.7790, decode.d0.loss_cls: 0.4112, decode.d0.loss_mask: 0.3034, decode.d0.loss_dice: 0.8539, decode.d1.loss_cls: 0.2155, decode.d1.loss_mask: 0.2892, decode.d1.loss_dice: 0.8050, decode.d2.loss_cls: 0.1979, decode.d2.loss_mask: 0.2864, decode.d2.loss_dice: 0.7953, decode.d3.loss_cls: 0.1941, decode.d3.loss_mask: 0.2857, decode.d3.loss_dice: 0.7858, decode.d4.loss_cls: 0.1874, decode.d4.loss_mask: 0.2855, decode.d4.loss_dice: 0.7846, decode.d5.loss_cls: 0.1899, decode.d5.loss_mask: 0.2854, decode.d5.loss_dice: 0.7836, decode.d6.loss_cls: 0.1894, decode.d6.loss_mask: 0.2853, decode.d6.loss_dice: 0.7795, decode.d7.loss_cls: 0.1891, decode.d7.loss_mask: 0.2842, decode.d7.loss_dice: 0.7802, decode.d8.loss_cls: 0.1836, decode.d8.loss_mask: 0.2840, decode.d8.loss_dice: 0.7833, loss: 12.9536 +2022-05-09 21:24:03,983 - mmseg - INFO - Iter [7250/80000] lr: 1.306e-06, eta: 1 day, 15:30:38, time: 1.777, data_time: 0.017, memory: 64699, decode.loss_cls: 0.1855, decode.loss_mask: 0.2840, decode.loss_dice: 0.7809, decode.d0.loss_cls: 0.3988, decode.d0.loss_mask: 0.3023, decode.d0.loss_dice: 0.8486, decode.d1.loss_cls: 0.2024, decode.d1.loss_mask: 0.2882, decode.d1.loss_dice: 0.8061, decode.d2.loss_cls: 0.1911, decode.d2.loss_mask: 0.2856, decode.d2.loss_dice: 0.7938, decode.d3.loss_cls: 0.1933, decode.d3.loss_mask: 0.2842, decode.d3.loss_dice: 0.7847, decode.d4.loss_cls: 0.1861, decode.d4.loss_mask: 0.2845, decode.d4.loss_dice: 0.7833, decode.d5.loss_cls: 0.1895, decode.d5.loss_mask: 0.2847, decode.d5.loss_dice: 0.7818, decode.d6.loss_cls: 0.1850, decode.d6.loss_mask: 0.2845, decode.d6.loss_dice: 0.7796, decode.d7.loss_cls: 0.1909, decode.d7.loss_mask: 0.2856, decode.d7.loss_dice: 0.7821, decode.d8.loss_cls: 0.1898, decode.d8.loss_mask: 0.2841, decode.d8.loss_dice: 0.7800, loss: 12.9010 +2022-05-09 21:25:35,633 - mmseg - INFO - Iter [7300/80000] lr: 1.305e-06, eta: 1 day, 15:27:59, time: 1.833, data_time: 0.065, memory: 64699, decode.loss_cls: 0.1710, decode.loss_mask: 0.2796, decode.loss_dice: 0.7724, decode.d0.loss_cls: 0.4016, decode.d0.loss_mask: 0.2967, decode.d0.loss_dice: 0.8368, decode.d1.loss_cls: 0.2000, decode.d1.loss_mask: 0.2839, decode.d1.loss_dice: 0.7925, decode.d2.loss_cls: 0.1892, decode.d2.loss_mask: 0.2796, decode.d2.loss_dice: 0.7816, decode.d3.loss_cls: 0.1737, decode.d3.loss_mask: 0.2791, decode.d3.loss_dice: 0.7700, decode.d4.loss_cls: 0.1762, decode.d4.loss_mask: 0.2787, decode.d4.loss_dice: 0.7678, decode.d5.loss_cls: 0.1793, decode.d5.loss_mask: 0.2794, decode.d5.loss_dice: 0.7685, decode.d6.loss_cls: 0.1689, decode.d6.loss_mask: 0.2783, decode.d6.loss_dice: 0.7687, decode.d7.loss_cls: 0.1725, decode.d7.loss_mask: 0.2800, decode.d7.loss_dice: 0.7729, decode.d8.loss_cls: 0.1769, decode.d8.loss_mask: 0.2802, decode.d8.loss_dice: 0.7670, loss: 12.6229 +2022-05-09 21:27:06,147 - mmseg - INFO - Iter [7350/80000] lr: 1.304e-06, eta: 1 day, 15:25:11, time: 1.810, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1873, decode.loss_mask: 0.2858, decode.loss_dice: 0.7620, decode.d0.loss_cls: 0.4062, decode.d0.loss_mask: 0.3035, decode.d0.loss_dice: 0.8348, decode.d1.loss_cls: 0.2092, decode.d1.loss_mask: 0.2886, decode.d1.loss_dice: 0.7866, decode.d2.loss_cls: 0.1961, decode.d2.loss_mask: 0.2872, decode.d2.loss_dice: 0.7720, decode.d3.loss_cls: 0.1907, decode.d3.loss_mask: 0.2869, decode.d3.loss_dice: 0.7656, decode.d4.loss_cls: 0.1963, decode.d4.loss_mask: 0.2861, decode.d4.loss_dice: 0.7632, decode.d5.loss_cls: 0.1874, decode.d5.loss_mask: 0.2857, decode.d5.loss_dice: 0.7679, decode.d6.loss_cls: 0.1851, decode.d6.loss_mask: 0.2867, decode.d6.loss_dice: 0.7604, decode.d7.loss_cls: 0.1891, decode.d7.loss_mask: 0.2854, decode.d7.loss_dice: 0.7582, decode.d8.loss_cls: 0.1837, decode.d8.loss_mask: 0.2853, decode.d8.loss_dice: 0.7673, loss: 12.7506 +2022-05-09 21:28:34,641 - mmseg - INFO - Iter [7400/80000] lr: 1.303e-06, eta: 1 day, 15:22:03, time: 1.770, data_time: 0.016, memory: 64699, decode.loss_cls: 0.1877, decode.loss_mask: 0.2807, decode.loss_dice: 0.7582, decode.d0.loss_cls: 0.4050, decode.d0.loss_mask: 0.2989, decode.d0.loss_dice: 0.8295, decode.d1.loss_cls: 0.2136, decode.d1.loss_mask: 0.2839, decode.d1.loss_dice: 0.7856, decode.d2.loss_cls: 0.1991, decode.d2.loss_mask: 0.2816, decode.d2.loss_dice: 0.7744, decode.d3.loss_cls: 0.1991, decode.d3.loss_mask: 0.2805, decode.d3.loss_dice: 0.7600, decode.d4.loss_cls: 0.1906, decode.d4.loss_mask: 0.2809, decode.d4.loss_dice: 0.7650, decode.d5.loss_cls: 0.1955, decode.d5.loss_mask: 0.2807, decode.d5.loss_dice: 0.7666, decode.d6.loss_cls: 0.1878, decode.d6.loss_mask: 0.2810, decode.d6.loss_dice: 0.7579, decode.d7.loss_cls: 0.1959, decode.d7.loss_mask: 0.2804, decode.d7.loss_dice: 0.7605, decode.d8.loss_cls: 0.1906, decode.d8.loss_mask: 0.2803, decode.d8.loss_dice: 0.7575, loss: 12.7090 +2022-05-09 21:30:06,604 - mmseg - INFO - Iter [7450/80000] lr: 1.302e-06, eta: 1 day, 15:19:30, time: 1.839, data_time: 0.066, memory: 64699, decode.loss_cls: 0.1825, decode.loss_mask: 0.2977, decode.loss_dice: 0.7838, decode.d0.loss_cls: 0.3935, decode.d0.loss_mask: 0.3195, decode.d0.loss_dice: 0.8417, decode.d1.loss_cls: 0.2177, decode.d1.loss_mask: 0.3040, decode.d1.loss_dice: 0.7990, decode.d2.loss_cls: 0.1988, decode.d2.loss_mask: 0.2998, decode.d2.loss_dice: 0.7850, decode.d3.loss_cls: 0.1941, decode.d3.loss_mask: 0.2986, decode.d3.loss_dice: 0.7818, decode.d4.loss_cls: 0.1916, decode.d4.loss_mask: 0.2989, decode.d4.loss_dice: 0.7824, decode.d5.loss_cls: 0.1939, decode.d5.loss_mask: 0.2967, decode.d5.loss_dice: 0.7810, decode.d6.loss_cls: 0.1950, decode.d6.loss_mask: 0.2973, decode.d6.loss_dice: 0.7732, decode.d7.loss_cls: 0.1917, decode.d7.loss_mask: 0.2982, decode.d7.loss_dice: 0.7789, decode.d8.loss_cls: 0.1926, decode.d8.loss_mask: 0.2983, decode.d8.loss_dice: 0.7814, loss: 13.0490 +2022-05-09 21:31:34,848 - mmseg - INFO - Iter [7500/80000] lr: 1.301e-06, eta: 1 day, 15:16:23, time: 1.765, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1887, decode.loss_mask: 0.2899, decode.loss_dice: 0.7657, decode.d0.loss_cls: 0.4007, decode.d0.loss_mask: 0.3063, decode.d0.loss_dice: 0.8433, decode.d1.loss_cls: 0.2106, decode.d1.loss_mask: 0.2926, decode.d1.loss_dice: 0.7936, decode.d2.loss_cls: 0.2013, decode.d2.loss_mask: 0.2909, decode.d2.loss_dice: 0.7792, decode.d3.loss_cls: 0.1960, decode.d3.loss_mask: 0.2896, decode.d3.loss_dice: 0.7687, decode.d4.loss_cls: 0.1967, decode.d4.loss_mask: 0.2896, decode.d4.loss_dice: 0.7675, decode.d5.loss_cls: 0.1952, decode.d5.loss_mask: 0.2902, decode.d5.loss_dice: 0.7701, decode.d6.loss_cls: 0.1953, decode.d6.loss_mask: 0.2892, decode.d6.loss_dice: 0.7692, decode.d7.loss_cls: 0.2002, decode.d7.loss_mask: 0.2899, decode.d7.loss_dice: 0.7676, decode.d8.loss_cls: 0.1944, decode.d8.loss_mask: 0.2895, decode.d8.loss_dice: 0.7672, loss: 12.8888 +2022-05-09 21:33:03,482 - mmseg - INFO - Iter [7550/80000] lr: 1.300e-06, eta: 1 day, 15:13:20, time: 1.772, data_time: 0.017, memory: 64699, decode.loss_cls: 0.1854, decode.loss_mask: 0.2806, decode.loss_dice: 0.7609, decode.d0.loss_cls: 0.3879, decode.d0.loss_mask: 0.2994, decode.d0.loss_dice: 0.8376, decode.d1.loss_cls: 0.2155, decode.d1.loss_mask: 0.2834, decode.d1.loss_dice: 0.7846, decode.d2.loss_cls: 0.1911, decode.d2.loss_mask: 0.2803, decode.d2.loss_dice: 0.7711, decode.d3.loss_cls: 0.1888, decode.d3.loss_mask: 0.2807, decode.d3.loss_dice: 0.7625, decode.d4.loss_cls: 0.1883, decode.d4.loss_mask: 0.2799, decode.d4.loss_dice: 0.7611, decode.d5.loss_cls: 0.1924, decode.d5.loss_mask: 0.2789, decode.d5.loss_dice: 0.7665, decode.d6.loss_cls: 0.1934, decode.d6.loss_mask: 0.2804, decode.d6.loss_dice: 0.7604, decode.d7.loss_cls: 0.1851, decode.d7.loss_mask: 0.2804, decode.d7.loss_dice: 0.7630, decode.d8.loss_cls: 0.1941, decode.d8.loss_mask: 0.2791, decode.d8.loss_dice: 0.7612, loss: 12.6740 +2022-05-09 21:34:32,951 - mmseg - INFO - Iter [7600/80000] lr: 1.299e-06, eta: 1 day, 15:10:26, time: 1.789, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1706, decode.loss_mask: 0.2789, decode.loss_dice: 0.7647, decode.d0.loss_cls: 0.3770, decode.d0.loss_mask: 0.2948, decode.d0.loss_dice: 0.8258, decode.d1.loss_cls: 0.1987, decode.d1.loss_mask: 0.2821, decode.d1.loss_dice: 0.7846, decode.d2.loss_cls: 0.1838, decode.d2.loss_mask: 0.2803, decode.d2.loss_dice: 0.7735, decode.d3.loss_cls: 0.1778, decode.d3.loss_mask: 0.2793, decode.d3.loss_dice: 0.7637, decode.d4.loss_cls: 0.1738, decode.d4.loss_mask: 0.2790, decode.d4.loss_dice: 0.7662, decode.d5.loss_cls: 0.1702, decode.d5.loss_mask: 0.2805, decode.d5.loss_dice: 0.7675, decode.d6.loss_cls: 0.1688, decode.d6.loss_mask: 0.2796, decode.d6.loss_dice: 0.7625, decode.d7.loss_cls: 0.1662, decode.d7.loss_mask: 0.2792, decode.d7.loss_dice: 0.7613, decode.d8.loss_cls: 0.1686, decode.d8.loss_mask: 0.2794, decode.d8.loss_dice: 0.7644, loss: 12.5026 +2022-05-09 21:36:05,160 - mmseg - INFO - Iter [7650/80000] lr: 1.299e-06, eta: 1 day, 15:08:00, time: 1.845, data_time: 0.068, memory: 64699, decode.loss_cls: 0.1815, decode.loss_mask: 0.2776, decode.loss_dice: 0.7610, decode.d0.loss_cls: 0.3950, decode.d0.loss_mask: 0.2981, decode.d0.loss_dice: 0.8342, decode.d1.loss_cls: 0.2066, decode.d1.loss_mask: 0.2820, decode.d1.loss_dice: 0.7838, decode.d2.loss_cls: 0.2004, decode.d2.loss_mask: 0.2807, decode.d2.loss_dice: 0.7745, decode.d3.loss_cls: 0.1898, decode.d3.loss_mask: 0.2799, decode.d3.loss_dice: 0.7642, decode.d4.loss_cls: 0.1914, decode.d4.loss_mask: 0.2794, decode.d4.loss_dice: 0.7614, decode.d5.loss_cls: 0.1882, decode.d5.loss_mask: 0.2784, decode.d5.loss_dice: 0.7704, decode.d6.loss_cls: 0.1881, decode.d6.loss_mask: 0.2789, decode.d6.loss_dice: 0.7587, decode.d7.loss_cls: 0.1877, decode.d7.loss_mask: 0.2785, decode.d7.loss_dice: 0.7628, decode.d8.loss_cls: 0.1848, decode.d8.loss_mask: 0.2779, decode.d8.loss_dice: 0.7617, loss: 12.6575 +2022-05-09 21:37:35,738 - mmseg - INFO - Iter [7700/80000] lr: 1.298e-06, eta: 1 day, 15:05:19, time: 1.812, data_time: 0.017, memory: 64699, decode.loss_cls: 0.1917, decode.loss_mask: 0.2775, decode.loss_dice: 0.7743, decode.d0.loss_cls: 0.4106, decode.d0.loss_mask: 0.2962, decode.d0.loss_dice: 0.8450, decode.d1.loss_cls: 0.2144, decode.d1.loss_mask: 0.2846, decode.d1.loss_dice: 0.8003, decode.d2.loss_cls: 0.2110, decode.d2.loss_mask: 0.2809, decode.d2.loss_dice: 0.7906, decode.d3.loss_cls: 0.1935, decode.d3.loss_mask: 0.2784, decode.d3.loss_dice: 0.7792, decode.d4.loss_cls: 0.1965, decode.d4.loss_mask: 0.2777, decode.d4.loss_dice: 0.7797, decode.d5.loss_cls: 0.2025, decode.d5.loss_mask: 0.2784, decode.d5.loss_dice: 0.7787, decode.d6.loss_cls: 0.1941, decode.d6.loss_mask: 0.2780, decode.d6.loss_dice: 0.7757, decode.d7.loss_cls: 0.1987, decode.d7.loss_mask: 0.2769, decode.d7.loss_dice: 0.7744, decode.d8.loss_cls: 0.1993, decode.d8.loss_mask: 0.2767, decode.d8.loss_dice: 0.7737, loss: 12.8894 +2022-05-09 21:39:04,737 - mmseg - INFO - Iter [7750/80000] lr: 1.297e-06, eta: 1 day, 15:02:24, time: 1.780, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1728, decode.loss_mask: 0.2878, decode.loss_dice: 0.7647, decode.d0.loss_cls: 0.3990, decode.d0.loss_mask: 0.3104, decode.d0.loss_dice: 0.8264, decode.d1.loss_cls: 0.2019, decode.d1.loss_mask: 0.2919, decode.d1.loss_dice: 0.7853, decode.d2.loss_cls: 0.1897, decode.d2.loss_mask: 0.2896, decode.d2.loss_dice: 0.7740, decode.d3.loss_cls: 0.1816, decode.d3.loss_mask: 0.2890, decode.d3.loss_dice: 0.7662, decode.d4.loss_cls: 0.1814, decode.d4.loss_mask: 0.2880, decode.d4.loss_dice: 0.7667, decode.d5.loss_cls: 0.1815, decode.d5.loss_mask: 0.2882, decode.d5.loss_dice: 0.7618, decode.d6.loss_cls: 0.1771, decode.d6.loss_mask: 0.2889, decode.d6.loss_dice: 0.7579, decode.d7.loss_cls: 0.1793, decode.d7.loss_mask: 0.2883, decode.d7.loss_dice: 0.7613, decode.d8.loss_cls: 0.1842, decode.d8.loss_mask: 0.2884, decode.d8.loss_dice: 0.7622, loss: 12.6855 +2022-05-09 21:40:33,237 - mmseg - INFO - Iter [7800/80000] lr: 1.296e-06, eta: 1 day, 14:59:25, time: 1.768, data_time: 0.017, memory: 64699, decode.loss_cls: 0.1868, decode.loss_mask: 0.2896, decode.loss_dice: 0.7575, decode.d0.loss_cls: 0.3979, decode.d0.loss_mask: 0.3089, decode.d0.loss_dice: 0.8300, decode.d1.loss_cls: 0.2028, decode.d1.loss_mask: 0.2938, decode.d1.loss_dice: 0.7884, decode.d2.loss_cls: 0.2071, decode.d2.loss_mask: 0.2917, decode.d2.loss_dice: 0.7651, decode.d3.loss_cls: 0.1994, decode.d3.loss_mask: 0.2909, decode.d3.loss_dice: 0.7565, decode.d4.loss_cls: 0.2006, decode.d4.loss_mask: 0.2899, decode.d4.loss_dice: 0.7600, decode.d5.loss_cls: 0.1932, decode.d5.loss_mask: 0.2895, decode.d5.loss_dice: 0.7593, decode.d6.loss_cls: 0.1945, decode.d6.loss_mask: 0.2891, decode.d6.loss_dice: 0.7571, decode.d7.loss_cls: 0.1934, decode.d7.loss_mask: 0.2887, decode.d7.loss_dice: 0.7559, decode.d8.loss_cls: 0.1966, decode.d8.loss_mask: 0.2881, decode.d8.loss_dice: 0.7576, loss: 12.7799 +2022-05-09 21:42:05,395 - mmseg - INFO - Iter [7850/80000] lr: 1.295e-06, eta: 1 day, 14:57:02, time: 1.845, data_time: 0.067, memory: 64699, decode.loss_cls: 0.1871, decode.loss_mask: 0.2764, decode.loss_dice: 0.7786, decode.d0.loss_cls: 0.3887, decode.d0.loss_mask: 0.2914, decode.d0.loss_dice: 0.8456, decode.d1.loss_cls: 0.2231, decode.d1.loss_mask: 0.2791, decode.d1.loss_dice: 0.7982, decode.d2.loss_cls: 0.1994, decode.d2.loss_mask: 0.2781, decode.d2.loss_dice: 0.7905, decode.d3.loss_cls: 0.1915, decode.d3.loss_mask: 0.2783, decode.d3.loss_dice: 0.7792, decode.d4.loss_cls: 0.1936, decode.d4.loss_mask: 0.2769, decode.d4.loss_dice: 0.7821, decode.d5.loss_cls: 0.1927, decode.d5.loss_mask: 0.2777, decode.d5.loss_dice: 0.7796, decode.d6.loss_cls: 0.1903, decode.d6.loss_mask: 0.2770, decode.d6.loss_dice: 0.7741, decode.d7.loss_cls: 0.1872, decode.d7.loss_mask: 0.2764, decode.d7.loss_dice: 0.7756, decode.d8.loss_cls: 0.1899, decode.d8.loss_mask: 0.2762, decode.d8.loss_dice: 0.7811, loss: 12.8157 +2022-05-09 21:43:33,555 - mmseg - INFO - Iter [7900/80000] lr: 1.294e-06, eta: 1 day, 14:54:02, time: 1.763, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1925, decode.loss_mask: 0.2772, decode.loss_dice: 0.7722, decode.d0.loss_cls: 0.4189, decode.d0.loss_mask: 0.2897, decode.d0.loss_dice: 0.8421, decode.d1.loss_cls: 0.2250, decode.d1.loss_mask: 0.2800, decode.d1.loss_dice: 0.7942, decode.d2.loss_cls: 0.2120, decode.d2.loss_mask: 0.2769, decode.d2.loss_dice: 0.7803, decode.d3.loss_cls: 0.2010, decode.d3.loss_mask: 0.2766, decode.d3.loss_dice: 0.7720, decode.d4.loss_cls: 0.2049, decode.d4.loss_mask: 0.2769, decode.d4.loss_dice: 0.7711, decode.d5.loss_cls: 0.2017, decode.d5.loss_mask: 0.2775, decode.d5.loss_dice: 0.7721, decode.d6.loss_cls: 0.1966, decode.d6.loss_mask: 0.2769, decode.d6.loss_dice: 0.7654, decode.d7.loss_cls: 0.1944, decode.d7.loss_mask: 0.2777, decode.d7.loss_dice: 0.7682, decode.d8.loss_cls: 0.1977, decode.d8.loss_mask: 0.2770, decode.d8.loss_dice: 0.7665, loss: 12.8352 +2022-05-09 21:45:00,985 - mmseg - INFO - Iter [7950/80000] lr: 1.293e-06, eta: 1 day, 14:50:58, time: 1.749, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1748, decode.loss_mask: 0.2808, decode.loss_dice: 0.7567, decode.d0.loss_cls: 0.3784, decode.d0.loss_mask: 0.2989, decode.d0.loss_dice: 0.8227, decode.d1.loss_cls: 0.1860, decode.d1.loss_mask: 0.2856, decode.d1.loss_dice: 0.7828, decode.d2.loss_cls: 0.1836, decode.d2.loss_mask: 0.2810, decode.d2.loss_dice: 0.7739, decode.d3.loss_cls: 0.1751, decode.d3.loss_mask: 0.2810, decode.d3.loss_dice: 0.7637, decode.d4.loss_cls: 0.1723, decode.d4.loss_mask: 0.2813, decode.d4.loss_dice: 0.7670, decode.d5.loss_cls: 0.1731, decode.d5.loss_mask: 0.2814, decode.d5.loss_dice: 0.7611, decode.d6.loss_cls: 0.1734, decode.d6.loss_mask: 0.2815, decode.d6.loss_dice: 0.7586, decode.d7.loss_cls: 0.1711, decode.d7.loss_mask: 0.2810, decode.d7.loss_dice: 0.7620, decode.d8.loss_cls: 0.1728, decode.d8.loss_mask: 0.2801, decode.d8.loss_dice: 0.7591, loss: 12.5007 +2022-05-09 21:46:32,090 - mmseg - INFO - Saving checkpoint at 8000 iterations +2022-05-09 21:47:01,422 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-09 21:47:01,425 - mmseg - INFO - Iter [8000/80000] lr: 1.292e-06, eta: 1 day, 14:52:50, time: 2.407, data_time: 0.065, memory: 64699, decode.loss_cls: 0.1934, decode.loss_mask: 0.2810, decode.loss_dice: 0.7835, decode.d0.loss_cls: 0.3968, decode.d0.loss_mask: 0.3019, decode.d0.loss_dice: 0.8490, decode.d1.loss_cls: 0.2083, decode.d1.loss_mask: 0.2869, decode.d1.loss_dice: 0.8078, decode.d2.loss_cls: 0.1962, decode.d2.loss_mask: 0.2848, decode.d2.loss_dice: 0.7914, decode.d3.loss_cls: 0.1945, decode.d3.loss_mask: 0.2839, decode.d3.loss_dice: 0.7826, decode.d4.loss_cls: 0.1995, decode.d4.loss_mask: 0.2813, decode.d4.loss_dice: 0.7834, decode.d5.loss_cls: 0.1885, decode.d5.loss_mask: 0.2825, decode.d5.loss_dice: 0.7880, decode.d6.loss_cls: 0.1931, decode.d6.loss_mask: 0.2799, decode.d6.loss_dice: 0.7810, decode.d7.loss_cls: 0.1886, decode.d7.loss_mask: 0.2799, decode.d7.loss_dice: 0.7810, decode.d8.loss_cls: 0.1883, decode.d8.loss_mask: 0.2817, decode.d8.loss_dice: 0.7780, loss: 12.9169 +2022-05-09 21:48:56,501 - mmseg - INFO - per class results: +2022-05-09 21:48:56,514 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.72 | 99.16 | +| sidewalk | 88.81 | 95.05 | +| building | 94.58 | 96.95 | +| wall | 73.51 | 83.46 | +| fence | 75.96 | 90.45 | +| pole | 72.34 | 84.1 | +| traffic light | 76.99 | 88.94 | +| traffic sign | 84.33 | 93.22 | +| vegetation | 93.48 | 96.8 | +| terrain | 65.49 | 75.42 | +| sky | 96.01 | 98.41 | +| person | 86.7 | 93.38 | +| rider | 72.43 | 83.17 | +| car | 96.37 | 98.25 | +| truck | 89.19 | 93.82 | +| bus | 92.88 | 95.82 | +| train | 87.24 | 91.45 | +| motorcycle | 76.77 | 88.11 | +| bicycle | 82.51 | 92.08 | ++---------------+-------+-------+ +2022-05-09 21:48:56,515 - mmseg - INFO - Summary: +2022-05-09 21:48:56,515 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 97.09 | 84.44 | 91.48 | ++-------+-------+-------+ +2022-05-09 21:48:56,519 - mmseg - INFO - The previous best checkpoint /mnt/lustre/chenzhe.vendor/workspace/ViT-Adapter/segmentation/work_dirs/mask2former_beit_adapter_large_896_80k_cityscapes_ss/best_mIoU_iter_6000.pth was removed +2022-05-09 21:49:26,425 - mmseg - INFO - Now best checkpoint is saved as best_mIoU_iter_8000.pth. +2022-05-09 21:49:26,440 - mmseg - INFO - Best mIoU is 0.8444 at 8000 iter. +2022-05-09 21:49:26,452 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-09 21:49:26,453 - mmseg - INFO - Iter(val) [32] aAcc: 0.9709, mIoU: 0.8444, mAcc: 0.9148, IoU.road: 0.9872, IoU.sidewalk: 0.8881, IoU.building: 0.9458, IoU.wall: 0.7351, IoU.fence: 0.7596, IoU.pole: 0.7234, IoU.traffic light: 0.7699, IoU.traffic sign: 0.8433, IoU.vegetation: 0.9348, IoU.terrain: 0.6549, IoU.sky: 0.9601, IoU.person: 0.8670, IoU.rider: 0.7243, IoU.car: 0.9637, IoU.truck: 0.8919, IoU.bus: 0.9288, IoU.train: 0.8724, IoU.motorcycle: 0.7677, IoU.bicycle: 0.8251, Acc.road: 0.9916, Acc.sidewalk: 0.9505, Acc.building: 0.9695, Acc.wall: 0.8346, Acc.fence: 0.9045, Acc.pole: 0.8410, Acc.traffic light: 0.8894, Acc.traffic sign: 0.9322, Acc.vegetation: 0.9680, Acc.terrain: 0.7542, Acc.sky: 0.9841, Acc.person: 0.9338, Acc.rider: 0.8317, Acc.car: 0.9825, Acc.truck: 0.9382, Acc.bus: 0.9582, Acc.train: 0.9145, Acc.motorcycle: 0.8811, Acc.bicycle: 0.9208 +2022-05-09 21:50:55,931 - mmseg - INFO - Iter [8050/80000] lr: 1.291e-06, eta: 1 day, 15:11:41, time: 4.692, data_time: 2.920, memory: 64699, decode.loss_cls: 0.1784, decode.loss_mask: 0.2775, decode.loss_dice: 0.7609, decode.d0.loss_cls: 0.3925, decode.d0.loss_mask: 0.2960, decode.d0.loss_dice: 0.8342, decode.d1.loss_cls: 0.2020, decode.d1.loss_mask: 0.2825, decode.d1.loss_dice: 0.7822, decode.d2.loss_cls: 0.1892, decode.d2.loss_mask: 0.2805, decode.d2.loss_dice: 0.7720, decode.d3.loss_cls: 0.1789, decode.d3.loss_mask: 0.2800, decode.d3.loss_dice: 0.7616, decode.d4.loss_cls: 0.1830, decode.d4.loss_mask: 0.2800, decode.d4.loss_dice: 0.7633, decode.d5.loss_cls: 0.1830, decode.d5.loss_mask: 0.2798, decode.d5.loss_dice: 0.7630, decode.d6.loss_cls: 0.1791, decode.d6.loss_mask: 0.2798, decode.d6.loss_dice: 0.7598, decode.d7.loss_cls: 0.1764, decode.d7.loss_mask: 0.2786, decode.d7.loss_dice: 0.7620, decode.d8.loss_cls: 0.1759, decode.d8.loss_mask: 0.2793, decode.d8.loss_dice: 0.7636, loss: 12.5752 +2022-05-09 21:52:25,280 - mmseg - INFO - Iter [8100/80000] lr: 1.290e-06, eta: 1 day, 15:08:45, time: 1.787, data_time: 0.017, memory: 64699, decode.loss_cls: 0.1779, decode.loss_mask: 0.2820, decode.loss_dice: 0.7628, decode.d0.loss_cls: 0.4057, decode.d0.loss_mask: 0.2992, decode.d0.loss_dice: 0.8249, decode.d1.loss_cls: 0.2052, decode.d1.loss_mask: 0.2842, decode.d1.loss_dice: 0.7743, decode.d2.loss_cls: 0.1899, decode.d2.loss_mask: 0.2822, decode.d2.loss_dice: 0.7693, decode.d3.loss_cls: 0.1802, decode.d3.loss_mask: 0.2822, decode.d3.loss_dice: 0.7606, decode.d4.loss_cls: 0.1817, decode.d4.loss_mask: 0.2829, decode.d4.loss_dice: 0.7574, decode.d5.loss_cls: 0.1789, decode.d5.loss_mask: 0.2821, decode.d5.loss_dice: 0.7582, decode.d6.loss_cls: 0.1762, decode.d6.loss_mask: 0.2820, decode.d6.loss_dice: 0.7562, decode.d7.loss_cls: 0.1759, decode.d7.loss_mask: 0.2819, decode.d7.loss_dice: 0.7583, decode.d8.loss_cls: 0.1783, decode.d8.loss_mask: 0.2818, decode.d8.loss_dice: 0.7564, loss: 12.5687 +2022-05-09 21:53:55,203 - mmseg - INFO - Iter [8150/80000] lr: 1.290e-06, eta: 1 day, 15:05:56, time: 1.799, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1796, decode.loss_mask: 0.2730, decode.loss_dice: 0.7545, decode.d0.loss_cls: 0.3926, decode.d0.loss_mask: 0.2914, decode.d0.loss_dice: 0.8298, decode.d1.loss_cls: 0.2058, decode.d1.loss_mask: 0.2763, decode.d1.loss_dice: 0.7837, decode.d2.loss_cls: 0.1957, decode.d2.loss_mask: 0.2746, decode.d2.loss_dice: 0.7714, decode.d3.loss_cls: 0.1896, decode.d3.loss_mask: 0.2733, decode.d3.loss_dice: 0.7665, decode.d4.loss_cls: 0.1838, decode.d4.loss_mask: 0.2742, decode.d4.loss_dice: 0.7631, decode.d5.loss_cls: 0.1832, decode.d5.loss_mask: 0.2732, decode.d5.loss_dice: 0.7640, decode.d6.loss_cls: 0.1811, decode.d6.loss_mask: 0.2740, decode.d6.loss_dice: 0.7582, decode.d7.loss_cls: 0.1811, decode.d7.loss_mask: 0.2739, decode.d7.loss_dice: 0.7595, decode.d8.loss_cls: 0.1811, decode.d8.loss_mask: 0.2744, decode.d8.loss_dice: 0.7611, loss: 12.5437 +2022-05-09 21:55:27,561 - mmseg - INFO - Iter [8200/80000] lr: 1.289e-06, eta: 1 day, 15:03:29, time: 1.847, data_time: 0.067, memory: 64699, decode.loss_cls: 0.1795, decode.loss_mask: 0.2849, decode.loss_dice: 0.7769, decode.d0.loss_cls: 0.3973, decode.d0.loss_mask: 0.3032, decode.d0.loss_dice: 0.8481, decode.d1.loss_cls: 0.2112, decode.d1.loss_mask: 0.2864, decode.d1.loss_dice: 0.7986, decode.d2.loss_cls: 0.2000, decode.d2.loss_mask: 0.2853, decode.d2.loss_dice: 0.7851, decode.d3.loss_cls: 0.1881, decode.d3.loss_mask: 0.2847, decode.d3.loss_dice: 0.7800, decode.d4.loss_cls: 0.1940, decode.d4.loss_mask: 0.2845, decode.d4.loss_dice: 0.7776, decode.d5.loss_cls: 0.1927, decode.d5.loss_mask: 0.2845, decode.d5.loss_dice: 0.7801, decode.d6.loss_cls: 0.1894, decode.d6.loss_mask: 0.2850, decode.d6.loss_dice: 0.7727, decode.d7.loss_cls: 0.1856, decode.d7.loss_mask: 0.2854, decode.d7.loss_dice: 0.7734, decode.d8.loss_cls: 0.1812, decode.d8.loss_mask: 0.2852, decode.d8.loss_dice: 0.7757, loss: 12.8564 +2022-05-09 21:56:56,761 - mmseg - INFO - Iter [8250/80000] lr: 1.288e-06, eta: 1 day, 15:00:36, time: 1.784, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1688, decode.loss_mask: 0.2734, decode.loss_dice: 0.7555, decode.d0.loss_cls: 0.3939, decode.d0.loss_mask: 0.2896, decode.d0.loss_dice: 0.8286, decode.d1.loss_cls: 0.1817, decode.d1.loss_mask: 0.2769, decode.d1.loss_dice: 0.7770, decode.d2.loss_cls: 0.1786, decode.d2.loss_mask: 0.2736, decode.d2.loss_dice: 0.7651, decode.d3.loss_cls: 0.1703, decode.d3.loss_mask: 0.2737, decode.d3.loss_dice: 0.7606, decode.d4.loss_cls: 0.1746, decode.d4.loss_mask: 0.2737, decode.d4.loss_dice: 0.7594, decode.d5.loss_cls: 0.1775, decode.d5.loss_mask: 0.2728, decode.d5.loss_dice: 0.7619, decode.d6.loss_cls: 0.1721, decode.d6.loss_mask: 0.2723, decode.d6.loss_dice: 0.7527, decode.d7.loss_cls: 0.1684, decode.d7.loss_mask: 0.2734, decode.d7.loss_dice: 0.7583, decode.d8.loss_cls: 0.1656, decode.d8.loss_mask: 0.2734, decode.d8.loss_dice: 0.7610, loss: 12.3843 +2022-05-09 21:58:26,123 - mmseg - INFO - Iter [8300/80000] lr: 1.287e-06, eta: 1 day, 14:57:44, time: 1.787, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1827, decode.loss_mask: 0.2925, decode.loss_dice: 0.7721, decode.d0.loss_cls: 0.3866, decode.d0.loss_mask: 0.3122, decode.d0.loss_dice: 0.8420, decode.d1.loss_cls: 0.2212, decode.d1.loss_mask: 0.2959, decode.d1.loss_dice: 0.7903, decode.d2.loss_cls: 0.2114, decode.d2.loss_mask: 0.2934, decode.d2.loss_dice: 0.7863, decode.d3.loss_cls: 0.1972, decode.d3.loss_mask: 0.2903, decode.d3.loss_dice: 0.7710, decode.d4.loss_cls: 0.2007, decode.d4.loss_mask: 0.2907, decode.d4.loss_dice: 0.7686, decode.d5.loss_cls: 0.1860, decode.d5.loss_mask: 0.2908, decode.d5.loss_dice: 0.7675, decode.d6.loss_cls: 0.1870, decode.d6.loss_mask: 0.2913, decode.d6.loss_dice: 0.7772, decode.d7.loss_cls: 0.1881, decode.d7.loss_mask: 0.2907, decode.d7.loss_dice: 0.7729, decode.d8.loss_cls: 0.1980, decode.d8.loss_mask: 0.2915, decode.d8.loss_dice: 0.7682, loss: 12.9146 +2022-05-09 21:59:55,452 - mmseg - INFO - Iter [8350/80000] lr: 1.286e-06, eta: 1 day, 14:54:53, time: 1.786, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1851, decode.loss_mask: 0.2732, decode.loss_dice: 0.7496, decode.d0.loss_cls: 0.4024, decode.d0.loss_mask: 0.2920, decode.d0.loss_dice: 0.8177, decode.d1.loss_cls: 0.2051, decode.d1.loss_mask: 0.2783, decode.d1.loss_dice: 0.7727, decode.d2.loss_cls: 0.1942, decode.d2.loss_mask: 0.2748, decode.d2.loss_dice: 0.7633, decode.d3.loss_cls: 0.1869, decode.d3.loss_mask: 0.2731, decode.d3.loss_dice: 0.7542, decode.d4.loss_cls: 0.1912, decode.d4.loss_mask: 0.2732, decode.d4.loss_dice: 0.7532, decode.d5.loss_cls: 0.1919, decode.d5.loss_mask: 0.2735, decode.d5.loss_dice: 0.7543, decode.d6.loss_cls: 0.1930, decode.d6.loss_mask: 0.2725, decode.d6.loss_dice: 0.7480, decode.d7.loss_cls: 0.1914, decode.d7.loss_mask: 0.2732, decode.d7.loss_dice: 0.7534, decode.d8.loss_cls: 0.1890, decode.d8.loss_mask: 0.2731, decode.d8.loss_dice: 0.7563, loss: 12.5096 +2022-05-09 22:01:28,695 - mmseg - INFO - Iter [8400/80000] lr: 1.285e-06, eta: 1 day, 14:52:37, time: 1.865, data_time: 0.066, memory: 64699, decode.loss_cls: 0.1742, decode.loss_mask: 0.2775, decode.loss_dice: 0.7478, decode.d0.loss_cls: 0.3777, decode.d0.loss_mask: 0.2970, decode.d0.loss_dice: 0.8087, decode.d1.loss_cls: 0.1946, decode.d1.loss_mask: 0.2808, decode.d1.loss_dice: 0.7645, decode.d2.loss_cls: 0.1813, decode.d2.loss_mask: 0.2799, decode.d2.loss_dice: 0.7538, decode.d3.loss_cls: 0.1739, decode.d3.loss_mask: 0.2770, decode.d3.loss_dice: 0.7401, decode.d4.loss_cls: 0.1688, decode.d4.loss_mask: 0.2769, decode.d4.loss_dice: 0.7457, decode.d5.loss_cls: 0.1763, decode.d5.loss_mask: 0.2770, decode.d5.loss_dice: 0.7390, decode.d6.loss_cls: 0.1738, decode.d6.loss_mask: 0.2773, decode.d6.loss_dice: 0.7384, decode.d7.loss_cls: 0.1775, decode.d7.loss_mask: 0.2778, decode.d7.loss_dice: 0.7446, decode.d8.loss_cls: 0.1788, decode.d8.loss_mask: 0.2774, decode.d8.loss_dice: 0.7422, loss: 12.3004 +2022-05-09 22:02:57,206 - mmseg - INFO - Iter [8450/80000] lr: 1.284e-06, eta: 1 day, 14:49:42, time: 1.770, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1761, decode.loss_mask: 0.2725, decode.loss_dice: 0.7399, decode.d0.loss_cls: 0.3792, decode.d0.loss_mask: 0.2874, decode.d0.loss_dice: 0.8118, decode.d1.loss_cls: 0.2009, decode.d1.loss_mask: 0.2769, decode.d1.loss_dice: 0.7573, decode.d2.loss_cls: 0.1918, decode.d2.loss_mask: 0.2749, decode.d2.loss_dice: 0.7494, decode.d3.loss_cls: 0.1870, decode.d3.loss_mask: 0.2746, decode.d3.loss_dice: 0.7443, decode.d4.loss_cls: 0.1804, decode.d4.loss_mask: 0.2728, decode.d4.loss_dice: 0.7413, decode.d5.loss_cls: 0.1825, decode.d5.loss_mask: 0.2723, decode.d5.loss_dice: 0.7424, decode.d6.loss_cls: 0.1784, decode.d6.loss_mask: 0.2728, decode.d6.loss_dice: 0.7386, decode.d7.loss_cls: 0.1779, decode.d7.loss_mask: 0.2724, decode.d7.loss_dice: 0.7421, decode.d8.loss_cls: 0.1742, decode.d8.loss_mask: 0.2730, decode.d8.loss_dice: 0.7415, loss: 12.2867 +2022-05-09 22:04:26,507 - mmseg - INFO - Iter [8500/80000] lr: 1.283e-06, eta: 1 day, 14:46:53, time: 1.786, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1760, decode.loss_mask: 0.2804, decode.loss_dice: 0.7624, decode.d0.loss_cls: 0.3953, decode.d0.loss_mask: 0.2979, decode.d0.loss_dice: 0.8303, decode.d1.loss_cls: 0.2125, decode.d1.loss_mask: 0.2842, decode.d1.loss_dice: 0.7890, decode.d2.loss_cls: 0.1914, decode.d2.loss_mask: 0.2827, decode.d2.loss_dice: 0.7738, decode.d3.loss_cls: 0.1921, decode.d3.loss_mask: 0.2821, decode.d3.loss_dice: 0.7626, decode.d4.loss_cls: 0.1864, decode.d4.loss_mask: 0.2812, decode.d4.loss_dice: 0.7632, decode.d5.loss_cls: 0.1811, decode.d5.loss_mask: 0.2812, decode.d5.loss_dice: 0.7638, decode.d6.loss_cls: 0.1862, decode.d6.loss_mask: 0.2816, decode.d6.loss_dice: 0.7646, decode.d7.loss_cls: 0.1832, decode.d7.loss_mask: 0.2818, decode.d7.loss_dice: 0.7601, decode.d8.loss_cls: 0.1784, decode.d8.loss_mask: 0.2812, decode.d8.loss_dice: 0.7610, loss: 12.6481 +2022-05-09 22:05:55,723 - mmseg - INFO - Iter [8550/80000] lr: 1.282e-06, eta: 1 day, 14:44:05, time: 1.784, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1843, decode.loss_mask: 0.2818, decode.loss_dice: 0.7723, decode.d0.loss_cls: 0.4012, decode.d0.loss_mask: 0.2994, decode.d0.loss_dice: 0.8430, decode.d1.loss_cls: 0.2144, decode.d1.loss_mask: 0.2834, decode.d1.loss_dice: 0.8006, decode.d2.loss_cls: 0.1947, decode.d2.loss_mask: 0.2808, decode.d2.loss_dice: 0.7857, decode.d3.loss_cls: 0.1882, decode.d3.loss_mask: 0.2829, decode.d3.loss_dice: 0.7755, decode.d4.loss_cls: 0.1884, decode.d4.loss_mask: 0.2816, decode.d4.loss_dice: 0.7730, decode.d5.loss_cls: 0.1873, decode.d5.loss_mask: 0.2816, decode.d5.loss_dice: 0.7763, decode.d6.loss_cls: 0.1865, decode.d6.loss_mask: 0.2822, decode.d6.loss_dice: 0.7725, decode.d7.loss_cls: 0.1879, decode.d7.loss_mask: 0.2817, decode.d7.loss_dice: 0.7710, decode.d8.loss_cls: 0.1936, decode.d8.loss_mask: 0.2826, decode.d8.loss_dice: 0.7735, loss: 12.8081 +2022-05-09 22:07:27,666 - mmseg - INFO - Iter [8600/80000] lr: 1.281e-06, eta: 1 day, 14:41:41, time: 1.839, data_time: 0.064, memory: 64699, decode.loss_cls: 0.1656, decode.loss_mask: 0.2711, decode.loss_dice: 0.7586, decode.d0.loss_cls: 0.3711, decode.d0.loss_mask: 0.2896, decode.d0.loss_dice: 0.8263, decode.d1.loss_cls: 0.1975, decode.d1.loss_mask: 0.2759, decode.d1.loss_dice: 0.7765, decode.d2.loss_cls: 0.1795, decode.d2.loss_mask: 0.2722, decode.d2.loss_dice: 0.7648, decode.d3.loss_cls: 0.1682, decode.d3.loss_mask: 0.2706, decode.d3.loss_dice: 0.7565, decode.d4.loss_cls: 0.1748, decode.d4.loss_mask: 0.2716, decode.d4.loss_dice: 0.7576, decode.d5.loss_cls: 0.1746, decode.d5.loss_mask: 0.2715, decode.d5.loss_dice: 0.7561, decode.d6.loss_cls: 0.1734, decode.d6.loss_mask: 0.2703, decode.d6.loss_dice: 0.7567, decode.d7.loss_cls: 0.1683, decode.d7.loss_mask: 0.2704, decode.d7.loss_dice: 0.7574, decode.d8.loss_cls: 0.1787, decode.d8.loss_mask: 0.2707, decode.d8.loss_dice: 0.7565, loss: 12.3528 +2022-05-09 22:08:57,383 - mmseg - INFO - Iter [8650/80000] lr: 1.281e-06, eta: 1 day, 14:38:59, time: 1.794, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1706, decode.loss_mask: 0.2700, decode.loss_dice: 0.7645, decode.d0.loss_cls: 0.3863, decode.d0.loss_mask: 0.2861, decode.d0.loss_dice: 0.8286, decode.d1.loss_cls: 0.1897, decode.d1.loss_mask: 0.2734, decode.d1.loss_dice: 0.7848, decode.d2.loss_cls: 0.1784, decode.d2.loss_mask: 0.2708, decode.d2.loss_dice: 0.7716, decode.d3.loss_cls: 0.1819, decode.d3.loss_mask: 0.2713, decode.d3.loss_dice: 0.7656, decode.d4.loss_cls: 0.1770, decode.d4.loss_mask: 0.2707, decode.d4.loss_dice: 0.7648, decode.d5.loss_cls: 0.1747, decode.d5.loss_mask: 0.2700, decode.d5.loss_dice: 0.7656, decode.d6.loss_cls: 0.1742, decode.d6.loss_mask: 0.2706, decode.d6.loss_dice: 0.7647, decode.d7.loss_cls: 0.1814, decode.d7.loss_mask: 0.2703, decode.d7.loss_dice: 0.7651, decode.d8.loss_cls: 0.1755, decode.d8.loss_mask: 0.2701, decode.d8.loss_dice: 0.7667, loss: 12.4549 +2022-05-09 22:10:26,577 - mmseg - INFO - Iter [8700/80000] lr: 1.280e-06, eta: 1 day, 14:36:13, time: 1.784, data_time: 0.017, memory: 64699, decode.loss_cls: 0.1759, decode.loss_mask: 0.2775, decode.loss_dice: 0.7444, decode.d0.loss_cls: 0.3879, decode.d0.loss_mask: 0.2942, decode.d0.loss_dice: 0.8159, decode.d1.loss_cls: 0.1933, decode.d1.loss_mask: 0.2806, decode.d1.loss_dice: 0.7675, decode.d2.loss_cls: 0.1811, decode.d2.loss_mask: 0.2769, decode.d2.loss_dice: 0.7540, decode.d3.loss_cls: 0.1773, decode.d3.loss_mask: 0.2778, decode.d3.loss_dice: 0.7438, decode.d4.loss_cls: 0.1715, decode.d4.loss_mask: 0.2772, decode.d4.loss_dice: 0.7469, decode.d5.loss_cls: 0.1743, decode.d5.loss_mask: 0.2770, decode.d5.loss_dice: 0.7478, decode.d6.loss_cls: 0.1753, decode.d6.loss_mask: 0.2765, decode.d6.loss_dice: 0.7407, decode.d7.loss_cls: 0.1729, decode.d7.loss_mask: 0.2780, decode.d7.loss_dice: 0.7472, decode.d8.loss_cls: 0.1690, decode.d8.loss_mask: 0.2774, decode.d8.loss_dice: 0.7458, loss: 12.3257 +2022-05-09 22:11:58,257 - mmseg - INFO - Iter [8750/80000] lr: 1.279e-06, eta: 1 day, 14:33:49, time: 1.834, data_time: 0.067, memory: 64699, decode.loss_cls: 0.1769, decode.loss_mask: 0.2827, decode.loss_dice: 0.7677, decode.d0.loss_cls: 0.3853, decode.d0.loss_mask: 0.3023, decode.d0.loss_dice: 0.8313, decode.d1.loss_cls: 0.2108, decode.d1.loss_mask: 0.2877, decode.d1.loss_dice: 0.7837, decode.d2.loss_cls: 0.1972, decode.d2.loss_mask: 0.2856, decode.d2.loss_dice: 0.7736, decode.d3.loss_cls: 0.1793, decode.d3.loss_mask: 0.2830, decode.d3.loss_dice: 0.7670, decode.d4.loss_cls: 0.1832, decode.d4.loss_mask: 0.2844, decode.d4.loss_dice: 0.7652, decode.d5.loss_cls: 0.1858, decode.d5.loss_mask: 0.2836, decode.d5.loss_dice: 0.7671, decode.d6.loss_cls: 0.1816, decode.d6.loss_mask: 0.2839, decode.d6.loss_dice: 0.7602, decode.d7.loss_cls: 0.1830, decode.d7.loss_mask: 0.2839, decode.d7.loss_dice: 0.7633, decode.d8.loss_cls: 0.1752, decode.d8.loss_mask: 0.2837, decode.d8.loss_dice: 0.7660, loss: 12.6644 +2022-05-09 22:13:29,204 - mmseg - INFO - Iter [8800/80000] lr: 1.278e-06, eta: 1 day, 14:31:19, time: 1.819, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1718, decode.loss_mask: 0.2772, decode.loss_dice: 0.7593, decode.d0.loss_cls: 0.3947, decode.d0.loss_mask: 0.2935, decode.d0.loss_dice: 0.8283, decode.d1.loss_cls: 0.2085, decode.d1.loss_mask: 0.2806, decode.d1.loss_dice: 0.7768, decode.d2.loss_cls: 0.1899, decode.d2.loss_mask: 0.2794, decode.d2.loss_dice: 0.7677, decode.d3.loss_cls: 0.1837, decode.d3.loss_mask: 0.2786, decode.d3.loss_dice: 0.7608, decode.d4.loss_cls: 0.1755, decode.d4.loss_mask: 0.2783, decode.d4.loss_dice: 0.7624, decode.d5.loss_cls: 0.1730, decode.d5.loss_mask: 0.2779, decode.d5.loss_dice: 0.7604, decode.d6.loss_cls: 0.1743, decode.d6.loss_mask: 0.2777, decode.d6.loss_dice: 0.7625, decode.d7.loss_cls: 0.1755, decode.d7.loss_mask: 0.2779, decode.d7.loss_dice: 0.7614, decode.d8.loss_cls: 0.1740, decode.d8.loss_mask: 0.2777, decode.d8.loss_dice: 0.7612, loss: 12.5204 +2022-05-09 22:14:59,177 - mmseg - INFO - Iter [8850/80000] lr: 1.277e-06, eta: 1 day, 14:28:42, time: 1.799, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1871, decode.loss_mask: 0.2761, decode.loss_dice: 0.7647, decode.d0.loss_cls: 0.3798, decode.d0.loss_mask: 0.2897, decode.d0.loss_dice: 0.8275, decode.d1.loss_cls: 0.2046, decode.d1.loss_mask: 0.2774, decode.d1.loss_dice: 0.7854, decode.d2.loss_cls: 0.1888, decode.d2.loss_mask: 0.2777, decode.d2.loss_dice: 0.7711, decode.d3.loss_cls: 0.1909, decode.d3.loss_mask: 0.2761, decode.d3.loss_dice: 0.7637, decode.d4.loss_cls: 0.1850, decode.d4.loss_mask: 0.2764, decode.d4.loss_dice: 0.7722, decode.d5.loss_cls: 0.1822, decode.d5.loss_mask: 0.2761, decode.d5.loss_dice: 0.7670, decode.d6.loss_cls: 0.1839, decode.d6.loss_mask: 0.2771, decode.d6.loss_dice: 0.7612, decode.d7.loss_cls: 0.1851, decode.d7.loss_mask: 0.2768, decode.d7.loss_dice: 0.7655, decode.d8.loss_cls: 0.1842, decode.d8.loss_mask: 0.2765, decode.d8.loss_dice: 0.7635, loss: 12.5932 +2022-05-09 22:16:28,738 - mmseg - INFO - Iter [8900/80000] lr: 1.276e-06, eta: 1 day, 14:26:02, time: 1.791, data_time: 0.017, memory: 64699, decode.loss_cls: 0.1934, decode.loss_mask: 0.2807, decode.loss_dice: 0.7580, decode.d0.loss_cls: 0.3984, decode.d0.loss_mask: 0.2996, decode.d0.loss_dice: 0.8323, decode.d1.loss_cls: 0.2151, decode.d1.loss_mask: 0.2849, decode.d1.loss_dice: 0.7848, decode.d2.loss_cls: 0.1958, decode.d2.loss_mask: 0.2841, decode.d2.loss_dice: 0.7680, decode.d3.loss_cls: 0.1930, decode.d3.loss_mask: 0.2827, decode.d3.loss_dice: 0.7599, decode.d4.loss_cls: 0.1946, decode.d4.loss_mask: 0.2826, decode.d4.loss_dice: 0.7601, decode.d5.loss_cls: 0.1910, decode.d5.loss_mask: 0.2819, decode.d5.loss_dice: 0.7577, decode.d6.loss_cls: 0.1888, decode.d6.loss_mask: 0.2821, decode.d6.loss_dice: 0.7608, decode.d7.loss_cls: 0.1905, decode.d7.loss_mask: 0.2819, decode.d7.loss_dice: 0.7548, decode.d8.loss_cls: 0.1965, decode.d8.loss_mask: 0.2821, decode.d8.loss_dice: 0.7548, loss: 12.6906 +2022-05-09 22:17:58,690 - mmseg - INFO - Iter [8950/80000] lr: 1.275e-06, eta: 1 day, 14:23:27, time: 1.799, data_time: 0.063, memory: 64699, decode.loss_cls: 0.1756, decode.loss_mask: 0.2749, decode.loss_dice: 0.7663, decode.d0.loss_cls: 0.3814, decode.d0.loss_mask: 0.2937, decode.d0.loss_dice: 0.8274, decode.d1.loss_cls: 0.2066, decode.d1.loss_mask: 0.2783, decode.d1.loss_dice: 0.7846, decode.d2.loss_cls: 0.1945, decode.d2.loss_mask: 0.2752, decode.d2.loss_dice: 0.7728, decode.d3.loss_cls: 0.1741, decode.d3.loss_mask: 0.2762, decode.d3.loss_dice: 0.7674, decode.d4.loss_cls: 0.1832, decode.d4.loss_mask: 0.2755, decode.d4.loss_dice: 0.7657, decode.d5.loss_cls: 0.1804, decode.d5.loss_mask: 0.2764, decode.d5.loss_dice: 0.7665, decode.d6.loss_cls: 0.1684, decode.d6.loss_mask: 0.2754, decode.d6.loss_dice: 0.7612, decode.d7.loss_cls: 0.1789, decode.d7.loss_mask: 0.2750, decode.d7.loss_dice: 0.7635, decode.d8.loss_cls: 0.1739, decode.d8.loss_mask: 0.2752, decode.d8.loss_dice: 0.7649, loss: 12.5333 +2022-05-09 22:19:27,725 - mmseg - INFO - Saving checkpoint at 9000 iterations +2022-05-09 22:20:01,736 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-09 22:20:01,744 - mmseg - INFO - Iter [9000/80000] lr: 1.274e-06, eta: 1 day, 14:25:12, time: 2.459, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1742, decode.loss_mask: 0.2784, decode.loss_dice: 0.7603, decode.d0.loss_cls: 0.3855, decode.d0.loss_mask: 0.2951, decode.d0.loss_dice: 0.8239, decode.d1.loss_cls: 0.1976, decode.d1.loss_mask: 0.2824, decode.d1.loss_dice: 0.7840, decode.d2.loss_cls: 0.1843, decode.d2.loss_mask: 0.2796, decode.d2.loss_dice: 0.7731, decode.d3.loss_cls: 0.1824, decode.d3.loss_mask: 0.2787, decode.d3.loss_dice: 0.7636, decode.d4.loss_cls: 0.1771, decode.d4.loss_mask: 0.2783, decode.d4.loss_dice: 0.7618, decode.d5.loss_cls: 0.1779, decode.d5.loss_mask: 0.2781, decode.d5.loss_dice: 0.7638, decode.d6.loss_cls: 0.1782, decode.d6.loss_mask: 0.2784, decode.d6.loss_dice: 0.7608, decode.d7.loss_cls: 0.1792, decode.d7.loss_mask: 0.2779, decode.d7.loss_dice: 0.7634, decode.d8.loss_cls: 0.1686, decode.d8.loss_mask: 0.2779, decode.d8.loss_dice: 0.7615, loss: 12.5261 +2022-05-09 22:21:57,349 - mmseg - INFO - per class results: +2022-05-09 22:21:57,355 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.72 | 99.22 | +| sidewalk | 89.37 | 94.73 | +| building | 94.48 | 97.09 | +| wall | 67.68 | 83.67 | +| fence | 73.95 | 83.39 | +| pole | 72.28 | 85.79 | +| traffic light | 76.52 | 88.54 | +| traffic sign | 84.62 | 91.65 | +| vegetation | 93.34 | 96.34 | +| terrain | 66.94 | 81.39 | +| sky | 95.87 | 98.48 | +| person | 87.05 | 93.83 | +| rider | 73.83 | 86.4 | +| car | 96.47 | 98.1 | +| truck | 92.46 | 96.51 | +| bus | 93.61 | 96.53 | +| train | 87.47 | 91.35 | +| motorcycle | 74.96 | 86.74 | +| bicycle | 82.47 | 90.27 | ++---------------+-------+-------+ +2022-05-09 22:21:57,356 - mmseg - INFO - Summary: +2022-05-09 22:21:57,356 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 97.05 | 84.32 | 91.58 | ++-------+-------+-------+ +2022-05-09 22:21:57,360 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-09 22:21:57,360 - mmseg - INFO - Iter(val) [32] aAcc: 0.9705, mIoU: 0.8432, mAcc: 0.9158, IoU.road: 0.9872, IoU.sidewalk: 0.8937, IoU.building: 0.9448, IoU.wall: 0.6768, IoU.fence: 0.7395, IoU.pole: 0.7228, IoU.traffic light: 0.7652, IoU.traffic sign: 0.8462, IoU.vegetation: 0.9334, IoU.terrain: 0.6694, IoU.sky: 0.9587, IoU.person: 0.8705, IoU.rider: 0.7383, IoU.car: 0.9647, IoU.truck: 0.9246, IoU.bus: 0.9361, IoU.train: 0.8747, IoU.motorcycle: 0.7496, IoU.bicycle: 0.8247, Acc.road: 0.9922, Acc.sidewalk: 0.9473, Acc.building: 0.9709, Acc.wall: 0.8367, Acc.fence: 0.8339, Acc.pole: 0.8579, Acc.traffic light: 0.8854, Acc.traffic sign: 0.9165, Acc.vegetation: 0.9634, Acc.terrain: 0.8139, Acc.sky: 0.9848, Acc.person: 0.9383, Acc.rider: 0.8640, Acc.car: 0.9810, Acc.truck: 0.9651, Acc.bus: 0.9653, Acc.train: 0.9135, Acc.motorcycle: 0.8674, Acc.bicycle: 0.9027 +2022-05-09 22:23:27,232 - mmseg - INFO - Iter [9050/80000] lr: 1.273e-06, eta: 1 day, 14:37:42, time: 4.111, data_time: 2.331, memory: 64699, decode.loss_cls: 0.1603, decode.loss_mask: 0.2756, decode.loss_dice: 0.7418, decode.d0.loss_cls: 0.3858, decode.d0.loss_mask: 0.2902, decode.d0.loss_dice: 0.8041, decode.d1.loss_cls: 0.1836, decode.d1.loss_mask: 0.2763, decode.d1.loss_dice: 0.7594, decode.d2.loss_cls: 0.1702, decode.d2.loss_mask: 0.2746, decode.d2.loss_dice: 0.7492, decode.d3.loss_cls: 0.1633, decode.d3.loss_mask: 0.2762, decode.d3.loss_dice: 0.7353, decode.d4.loss_cls: 0.1654, decode.d4.loss_mask: 0.2755, decode.d4.loss_dice: 0.7396, decode.d5.loss_cls: 0.1672, decode.d5.loss_mask: 0.2757, decode.d5.loss_dice: 0.7428, decode.d6.loss_cls: 0.1650, decode.d6.loss_mask: 0.2753, decode.d6.loss_dice: 0.7355, decode.d7.loss_cls: 0.1603, decode.d7.loss_mask: 0.2754, decode.d7.loss_dice: 0.7328, decode.d8.loss_cls: 0.1591, decode.d8.loss_mask: 0.2744, decode.d8.loss_dice: 0.7382, loss: 12.1284 +2022-05-09 22:24:56,686 - mmseg - INFO - Iter [9100/80000] lr: 1.272e-06, eta: 1 day, 14:34:58, time: 1.790, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1905, decode.loss_mask: 0.2694, decode.loss_dice: 0.7485, decode.d0.loss_cls: 0.3951, decode.d0.loss_mask: 0.2866, decode.d0.loss_dice: 0.8155, decode.d1.loss_cls: 0.2140, decode.d1.loss_mask: 0.2738, decode.d1.loss_dice: 0.7732, decode.d2.loss_cls: 0.1993, decode.d2.loss_mask: 0.2711, decode.d2.loss_dice: 0.7583, decode.d3.loss_cls: 0.1852, decode.d3.loss_mask: 0.2705, decode.d3.loss_dice: 0.7475, decode.d4.loss_cls: 0.1954, decode.d4.loss_mask: 0.2704, decode.d4.loss_dice: 0.7511, decode.d5.loss_cls: 0.1927, decode.d5.loss_mask: 0.2704, decode.d5.loss_dice: 0.7510, decode.d6.loss_cls: 0.1852, decode.d6.loss_mask: 0.2695, decode.d6.loss_dice: 0.7446, decode.d7.loss_cls: 0.1831, decode.d7.loss_mask: 0.2694, decode.d7.loss_dice: 0.7482, decode.d8.loss_cls: 0.1809, decode.d8.loss_mask: 0.2697, decode.d8.loss_dice: 0.7471, loss: 12.4271 +2022-05-09 22:26:27,577 - mmseg - INFO - Iter [9150/80000] lr: 1.272e-06, eta: 1 day, 14:32:24, time: 1.815, data_time: 0.066, memory: 64699, decode.loss_cls: 0.1788, decode.loss_mask: 0.2782, decode.loss_dice: 0.7511, decode.d0.loss_cls: 0.3821, decode.d0.loss_mask: 0.2949, decode.d0.loss_dice: 0.8221, decode.d1.loss_cls: 0.2023, decode.d1.loss_mask: 0.2817, decode.d1.loss_dice: 0.7711, decode.d2.loss_cls: 0.1927, decode.d2.loss_mask: 0.2794, decode.d2.loss_dice: 0.7594, decode.d3.loss_cls: 0.1877, decode.d3.loss_mask: 0.2784, decode.d3.loss_dice: 0.7536, decode.d4.loss_cls: 0.1823, decode.d4.loss_mask: 0.2775, decode.d4.loss_dice: 0.7571, decode.d5.loss_cls: 0.1868, decode.d5.loss_mask: 0.2778, decode.d5.loss_dice: 0.7582, decode.d6.loss_cls: 0.1850, decode.d6.loss_mask: 0.2790, decode.d6.loss_dice: 0.7525, decode.d7.loss_cls: 0.1772, decode.d7.loss_mask: 0.2790, decode.d7.loss_dice: 0.7492, decode.d8.loss_cls: 0.1794, decode.d8.loss_mask: 0.2784, decode.d8.loss_dice: 0.7547, loss: 12.4877 +2022-05-09 22:27:56,287 - mmseg - INFO - Iter [9200/80000] lr: 1.271e-06, eta: 1 day, 14:29:36, time: 1.777, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1719, decode.loss_mask: 0.2710, decode.loss_dice: 0.7546, decode.d0.loss_cls: 0.3834, decode.d0.loss_mask: 0.2851, decode.d0.loss_dice: 0.8202, decode.d1.loss_cls: 0.1982, decode.d1.loss_mask: 0.2739, decode.d1.loss_dice: 0.7735, decode.d2.loss_cls: 0.1767, decode.d2.loss_mask: 0.2716, decode.d2.loss_dice: 0.7664, decode.d3.loss_cls: 0.1764, decode.d3.loss_mask: 0.2713, decode.d3.loss_dice: 0.7565, decode.d4.loss_cls: 0.1682, decode.d4.loss_mask: 0.2712, decode.d4.loss_dice: 0.7571, decode.d5.loss_cls: 0.1732, decode.d5.loss_mask: 0.2706, decode.d5.loss_dice: 0.7544, decode.d6.loss_cls: 0.1787, decode.d6.loss_mask: 0.2706, decode.d6.loss_dice: 0.7514, decode.d7.loss_cls: 0.1739, decode.d7.loss_mask: 0.2697, decode.d7.loss_dice: 0.7547, decode.d8.loss_cls: 0.1707, decode.d8.loss_mask: 0.2715, decode.d8.loss_dice: 0.7547, loss: 12.3411 +2022-05-09 22:29:25,111 - mmseg - INFO - Iter [9250/80000] lr: 1.270e-06, eta: 1 day, 14:26:50, time: 1.777, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1732, decode.loss_mask: 0.2803, decode.loss_dice: 0.7441, decode.d0.loss_cls: 0.3816, decode.d0.loss_mask: 0.3010, decode.d0.loss_dice: 0.8077, decode.d1.loss_cls: 0.1984, decode.d1.loss_mask: 0.2851, decode.d1.loss_dice: 0.7643, decode.d2.loss_cls: 0.1882, decode.d2.loss_mask: 0.2806, decode.d2.loss_dice: 0.7500, decode.d3.loss_cls: 0.1755, decode.d3.loss_mask: 0.2804, decode.d3.loss_dice: 0.7477, decode.d4.loss_cls: 0.1760, decode.d4.loss_mask: 0.2797, decode.d4.loss_dice: 0.7444, decode.d5.loss_cls: 0.1818, decode.d5.loss_mask: 0.2790, decode.d5.loss_dice: 0.7458, decode.d6.loss_cls: 0.1746, decode.d6.loss_mask: 0.2796, decode.d6.loss_dice: 0.7453, decode.d7.loss_cls: 0.1806, decode.d7.loss_mask: 0.2794, decode.d7.loss_dice: 0.7452, decode.d8.loss_cls: 0.1750, decode.d8.loss_mask: 0.2790, decode.d8.loss_dice: 0.7408, loss: 12.3644 +2022-05-09 22:30:53,718 - mmseg - INFO - Iter [9300/80000] lr: 1.269e-06, eta: 1 day, 14:24:02, time: 1.772, data_time: 0.017, memory: 64699, decode.loss_cls: 0.1697, decode.loss_mask: 0.2728, decode.loss_dice: 0.7575, decode.d0.loss_cls: 0.3856, decode.d0.loss_mask: 0.2865, decode.d0.loss_dice: 0.8212, decode.d1.loss_cls: 0.1988, decode.d1.loss_mask: 0.2752, decode.d1.loss_dice: 0.7753, decode.d2.loss_cls: 0.1859, decode.d2.loss_mask: 0.2732, decode.d2.loss_dice: 0.7664, decode.d3.loss_cls: 0.1861, decode.d3.loss_mask: 0.2730, decode.d3.loss_dice: 0.7621, decode.d4.loss_cls: 0.1847, decode.d4.loss_mask: 0.2731, decode.d4.loss_dice: 0.7565, decode.d5.loss_cls: 0.1786, decode.d5.loss_mask: 0.2732, decode.d5.loss_dice: 0.7606, decode.d6.loss_cls: 0.1760, decode.d6.loss_mask: 0.2726, decode.d6.loss_dice: 0.7569, decode.d7.loss_cls: 0.1760, decode.d7.loss_mask: 0.2732, decode.d7.loss_dice: 0.7607, decode.d8.loss_cls: 0.1767, decode.d8.loss_mask: 0.2743, decode.d8.loss_dice: 0.7592, loss: 12.4414 +2022-05-09 22:32:24,952 - mmseg - INFO - Iter [9350/80000] lr: 1.268e-06, eta: 1 day, 14:21:35, time: 1.825, data_time: 0.066, memory: 64699, decode.loss_cls: 0.1672, decode.loss_mask: 0.2712, decode.loss_dice: 0.7416, decode.d0.loss_cls: 0.3821, decode.d0.loss_mask: 0.2877, decode.d0.loss_dice: 0.8007, decode.d1.loss_cls: 0.1867, decode.d1.loss_mask: 0.2760, decode.d1.loss_dice: 0.7601, decode.d2.loss_cls: 0.1879, decode.d2.loss_mask: 0.2739, decode.d2.loss_dice: 0.7495, decode.d3.loss_cls: 0.1786, decode.d3.loss_mask: 0.2729, decode.d3.loss_dice: 0.7415, decode.d4.loss_cls: 0.1789, decode.d4.loss_mask: 0.2721, decode.d4.loss_dice: 0.7403, decode.d5.loss_cls: 0.1745, decode.d5.loss_mask: 0.2720, decode.d5.loss_dice: 0.7455, decode.d6.loss_cls: 0.1629, decode.d6.loss_mask: 0.2717, decode.d6.loss_dice: 0.7410, decode.d7.loss_cls: 0.1659, decode.d7.loss_mask: 0.2721, decode.d7.loss_dice: 0.7439, decode.d8.loss_cls: 0.1673, decode.d8.loss_mask: 0.2706, decode.d8.loss_dice: 0.7429, loss: 12.1991 +2022-05-09 22:33:53,691 - mmseg - INFO - Iter [9400/80000] lr: 1.267e-06, eta: 1 day, 14:18:49, time: 1.775, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1827, decode.loss_mask: 0.2791, decode.loss_dice: 0.7517, decode.d0.loss_cls: 0.3854, decode.d0.loss_mask: 0.3016, decode.d0.loss_dice: 0.8174, decode.d1.loss_cls: 0.1840, decode.d1.loss_mask: 0.2839, decode.d1.loss_dice: 0.7744, decode.d2.loss_cls: 0.1824, decode.d2.loss_mask: 0.2809, decode.d2.loss_dice: 0.7596, decode.d3.loss_cls: 0.1757, decode.d3.loss_mask: 0.2798, decode.d3.loss_dice: 0.7520, decode.d4.loss_cls: 0.1800, decode.d4.loss_mask: 0.2796, decode.d4.loss_dice: 0.7487, decode.d5.loss_cls: 0.1940, decode.d5.loss_mask: 0.2784, decode.d5.loss_dice: 0.7472, decode.d6.loss_cls: 0.1810, decode.d6.loss_mask: 0.2796, decode.d6.loss_dice: 0.7489, decode.d7.loss_cls: 0.1777, decode.d7.loss_mask: 0.2794, decode.d7.loss_dice: 0.7522, decode.d8.loss_cls: 0.1733, decode.d8.loss_mask: 0.2789, decode.d8.loss_dice: 0.7490, loss: 12.4384 +2022-05-09 22:35:22,495 - mmseg - INFO - Iter [9450/80000] lr: 1.266e-06, eta: 1 day, 14:16:05, time: 1.776, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1803, decode.loss_mask: 0.2751, decode.loss_dice: 0.7514, decode.d0.loss_cls: 0.3873, decode.d0.loss_mask: 0.2921, decode.d0.loss_dice: 0.8163, decode.d1.loss_cls: 0.1994, decode.d1.loss_mask: 0.2804, decode.d1.loss_dice: 0.7723, decode.d2.loss_cls: 0.1874, decode.d2.loss_mask: 0.2760, decode.d2.loss_dice: 0.7620, decode.d3.loss_cls: 0.1831, decode.d3.loss_mask: 0.2765, decode.d3.loss_dice: 0.7543, decode.d4.loss_cls: 0.1772, decode.d4.loss_mask: 0.2766, decode.d4.loss_dice: 0.7537, decode.d5.loss_cls: 0.1768, decode.d5.loss_mask: 0.2765, decode.d5.loss_dice: 0.7559, decode.d6.loss_cls: 0.1728, decode.d6.loss_mask: 0.2766, decode.d6.loss_dice: 0.7521, decode.d7.loss_cls: 0.1748, decode.d7.loss_mask: 0.2754, decode.d7.loss_dice: 0.7519, decode.d8.loss_cls: 0.1760, decode.d8.loss_mask: 0.2757, decode.d8.loss_dice: 0.7504, loss: 12.4162 +2022-05-09 22:36:54,060 - mmseg - INFO - Iter [9500/80000] lr: 1.265e-06, eta: 1 day, 14:13:43, time: 1.831, data_time: 0.066, memory: 64699, decode.loss_cls: 0.1547, decode.loss_mask: 0.2722, decode.loss_dice: 0.7503, decode.d0.loss_cls: 0.3760, decode.d0.loss_mask: 0.2931, decode.d0.loss_dice: 0.8095, decode.d1.loss_cls: 0.1938, decode.d1.loss_mask: 0.2775, decode.d1.loss_dice: 0.7681, decode.d2.loss_cls: 0.1804, decode.d2.loss_mask: 0.2747, decode.d2.loss_dice: 0.7594, decode.d3.loss_cls: 0.1706, decode.d3.loss_mask: 0.2742, decode.d3.loss_dice: 0.7502, decode.d4.loss_cls: 0.1649, decode.d4.loss_mask: 0.2751, decode.d4.loss_dice: 0.7533, decode.d5.loss_cls: 0.1687, decode.d5.loss_mask: 0.2735, decode.d5.loss_dice: 0.7479, decode.d6.loss_cls: 0.1593, decode.d6.loss_mask: 0.2731, decode.d6.loss_dice: 0.7477, decode.d7.loss_cls: 0.1602, decode.d7.loss_mask: 0.2743, decode.d7.loss_dice: 0.7502, decode.d8.loss_cls: 0.1567, decode.d8.loss_mask: 0.2741, decode.d8.loss_dice: 0.7528, loss: 12.2365 +2022-05-09 22:38:21,493 - mmseg - INFO - Iter [9550/80000] lr: 1.264e-06, eta: 1 day, 14:10:50, time: 1.749, data_time: 0.017, memory: 64699, decode.loss_cls: 0.1740, decode.loss_mask: 0.2678, decode.loss_dice: 0.7437, decode.d0.loss_cls: 0.3862, decode.d0.loss_mask: 0.2842, decode.d0.loss_dice: 0.8056, decode.d1.loss_cls: 0.1975, decode.d1.loss_mask: 0.2712, decode.d1.loss_dice: 0.7655, decode.d2.loss_cls: 0.1911, decode.d2.loss_mask: 0.2667, decode.d2.loss_dice: 0.7599, decode.d3.loss_cls: 0.1819, decode.d3.loss_mask: 0.2679, decode.d3.loss_dice: 0.7498, decode.d4.loss_cls: 0.1875, decode.d4.loss_mask: 0.2674, decode.d4.loss_dice: 0.7500, decode.d5.loss_cls: 0.1817, decode.d5.loss_mask: 0.2691, decode.d5.loss_dice: 0.7452, decode.d6.loss_cls: 0.1794, decode.d6.loss_mask: 0.2679, decode.d6.loss_dice: 0.7431, decode.d7.loss_cls: 0.1792, decode.d7.loss_mask: 0.2670, decode.d7.loss_dice: 0.7431, decode.d8.loss_cls: 0.1865, decode.d8.loss_mask: 0.2664, decode.d8.loss_dice: 0.7456, loss: 12.2920 +2022-05-09 22:39:49,730 - mmseg - INFO - Iter [9600/80000] lr: 1.264e-06, eta: 1 day, 14:08:04, time: 1.764, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1682, decode.loss_mask: 0.2724, decode.loss_dice: 0.7431, decode.d0.loss_cls: 0.3867, decode.d0.loss_mask: 0.2870, decode.d0.loss_dice: 0.8167, decode.d1.loss_cls: 0.1968, decode.d1.loss_mask: 0.2746, decode.d1.loss_dice: 0.7718, decode.d2.loss_cls: 0.1856, decode.d2.loss_mask: 0.2737, decode.d2.loss_dice: 0.7540, decode.d3.loss_cls: 0.1730, decode.d3.loss_mask: 0.2727, decode.d3.loss_dice: 0.7454, decode.d4.loss_cls: 0.1795, decode.d4.loss_mask: 0.2730, decode.d4.loss_dice: 0.7432, decode.d5.loss_cls: 0.1758, decode.d5.loss_mask: 0.2721, decode.d5.loss_dice: 0.7464, decode.d6.loss_cls: 0.1703, decode.d6.loss_mask: 0.2728, decode.d6.loss_dice: 0.7440, decode.d7.loss_cls: 0.1699, decode.d7.loss_mask: 0.2714, decode.d7.loss_dice: 0.7417, decode.d8.loss_cls: 0.1729, decode.d8.loss_mask: 0.2716, decode.d8.loss_dice: 0.7422, loss: 12.2684 +2022-05-09 22:41:17,575 - mmseg - INFO - Iter [9650/80000] lr: 1.263e-06, eta: 1 day, 14:05:16, time: 1.756, data_time: 0.017, memory: 64699, decode.loss_cls: 0.1671, decode.loss_mask: 0.2734, decode.loss_dice: 0.7529, decode.d0.loss_cls: 0.3803, decode.d0.loss_mask: 0.2880, decode.d0.loss_dice: 0.8168, decode.d1.loss_cls: 0.2034, decode.d1.loss_mask: 0.2752, decode.d1.loss_dice: 0.7739, decode.d2.loss_cls: 0.1818, decode.d2.loss_mask: 0.2730, decode.d2.loss_dice: 0.7612, decode.d3.loss_cls: 0.1696, decode.d3.loss_mask: 0.2728, decode.d3.loss_dice: 0.7523, decode.d4.loss_cls: 0.1790, decode.d4.loss_mask: 0.2739, decode.d4.loss_dice: 0.7513, decode.d5.loss_cls: 0.1737, decode.d5.loss_mask: 0.2731, decode.d5.loss_dice: 0.7568, decode.d6.loss_cls: 0.1721, decode.d6.loss_mask: 0.2733, decode.d6.loss_dice: 0.7487, decode.d7.loss_cls: 0.1694, decode.d7.loss_mask: 0.2741, decode.d7.loss_dice: 0.7556, decode.d8.loss_cls: 0.1641, decode.d8.loss_mask: 0.2741, decode.d8.loss_dice: 0.7507, loss: 12.3317 +2022-05-09 22:42:49,095 - mmseg - INFO - Iter [9700/80000] lr: 1.262e-06, eta: 1 day, 14:02:55, time: 1.829, data_time: 0.065, memory: 64699, decode.loss_cls: 0.1681, decode.loss_mask: 0.2751, decode.loss_dice: 0.7495, decode.d0.loss_cls: 0.3781, decode.d0.loss_mask: 0.2947, decode.d0.loss_dice: 0.8152, decode.d1.loss_cls: 0.1982, decode.d1.loss_mask: 0.2813, decode.d1.loss_dice: 0.7741, decode.d2.loss_cls: 0.1885, decode.d2.loss_mask: 0.2767, decode.d2.loss_dice: 0.7618, decode.d3.loss_cls: 0.1774, decode.d3.loss_mask: 0.2771, decode.d3.loss_dice: 0.7520, decode.d4.loss_cls: 0.1771, decode.d4.loss_mask: 0.2771, decode.d4.loss_dice: 0.7535, decode.d5.loss_cls: 0.1726, decode.d5.loss_mask: 0.2753, decode.d5.loss_dice: 0.7489, decode.d6.loss_cls: 0.1699, decode.d6.loss_mask: 0.2749, decode.d6.loss_dice: 0.7461, decode.d7.loss_cls: 0.1730, decode.d7.loss_mask: 0.2750, decode.d7.loss_dice: 0.7501, decode.d8.loss_cls: 0.1759, decode.d8.loss_mask: 0.2742, decode.d8.loss_dice: 0.7459, loss: 12.3572 +2022-05-09 22:44:18,670 - mmseg - INFO - Iter [9750/80000] lr: 1.261e-06, eta: 1 day, 14:00:22, time: 1.793, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1515, decode.loss_mask: 0.2623, decode.loss_dice: 0.7357, decode.d0.loss_cls: 0.3749, decode.d0.loss_mask: 0.2794, decode.d0.loss_dice: 0.7969, decode.d1.loss_cls: 0.1884, decode.d1.loss_mask: 0.2659, decode.d1.loss_dice: 0.7557, decode.d2.loss_cls: 0.1789, decode.d2.loss_mask: 0.2635, decode.d2.loss_dice: 0.7408, decode.d3.loss_cls: 0.1559, decode.d3.loss_mask: 0.2634, decode.d3.loss_dice: 0.7382, decode.d4.loss_cls: 0.1627, decode.d4.loss_mask: 0.2634, decode.d4.loss_dice: 0.7385, decode.d5.loss_cls: 0.1692, decode.d5.loss_mask: 0.2633, decode.d5.loss_dice: 0.7335, decode.d6.loss_cls: 0.1619, decode.d6.loss_mask: 0.2613, decode.d6.loss_dice: 0.7314, decode.d7.loss_cls: 0.1623, decode.d7.loss_mask: 0.2620, decode.d7.loss_dice: 0.7357, decode.d8.loss_cls: 0.1598, decode.d8.loss_mask: 0.2632, decode.d8.loss_dice: 0.7365, loss: 11.9561 +2022-05-09 22:45:48,258 - mmseg - INFO - Iter [9800/80000] lr: 1.260e-06, eta: 1 day, 13:57:48, time: 1.791, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1581, decode.loss_mask: 0.2714, decode.loss_dice: 0.7530, decode.d0.loss_cls: 0.3791, decode.d0.loss_mask: 0.2917, decode.d0.loss_dice: 0.8147, decode.d1.loss_cls: 0.1795, decode.d1.loss_mask: 0.2749, decode.d1.loss_dice: 0.7689, decode.d2.loss_cls: 0.1654, decode.d2.loss_mask: 0.2719, decode.d2.loss_dice: 0.7616, decode.d3.loss_cls: 0.1606, decode.d3.loss_mask: 0.2718, decode.d3.loss_dice: 0.7571, decode.d4.loss_cls: 0.1594, decode.d4.loss_mask: 0.2713, decode.d4.loss_dice: 0.7517, decode.d5.loss_cls: 0.1591, decode.d5.loss_mask: 0.2714, decode.d5.loss_dice: 0.7550, decode.d6.loss_cls: 0.1579, decode.d6.loss_mask: 0.2717, decode.d6.loss_dice: 0.7525, decode.d7.loss_cls: 0.1582, decode.d7.loss_mask: 0.2725, decode.d7.loss_dice: 0.7528, decode.d8.loss_cls: 0.1612, decode.d8.loss_mask: 0.2722, decode.d8.loss_dice: 0.7573, loss: 12.2037 +2022-05-09 22:47:17,248 - mmseg - INFO - Iter [9850/80000] lr: 1.259e-06, eta: 1 day, 13:55:11, time: 1.779, data_time: 0.017, memory: 64699, decode.loss_cls: 0.1714, decode.loss_mask: 0.2722, decode.loss_dice: 0.7420, decode.d0.loss_cls: 0.3883, decode.d0.loss_mask: 0.2915, decode.d0.loss_dice: 0.8014, decode.d1.loss_cls: 0.1976, decode.d1.loss_mask: 0.2769, decode.d1.loss_dice: 0.7653, decode.d2.loss_cls: 0.1947, decode.d2.loss_mask: 0.2734, decode.d2.loss_dice: 0.7504, decode.d3.loss_cls: 0.1864, decode.d3.loss_mask: 0.2728, decode.d3.loss_dice: 0.7425, decode.d4.loss_cls: 0.1754, decode.d4.loss_mask: 0.2721, decode.d4.loss_dice: 0.7464, decode.d5.loss_cls: 0.1843, decode.d5.loss_mask: 0.2716, decode.d5.loss_dice: 0.7475, decode.d6.loss_cls: 0.1724, decode.d6.loss_mask: 0.2716, decode.d6.loss_dice: 0.7397, decode.d7.loss_cls: 0.1757, decode.d7.loss_mask: 0.2726, decode.d7.loss_dice: 0.7438, decode.d8.loss_cls: 0.1797, decode.d8.loss_mask: 0.2727, decode.d8.loss_dice: 0.7389, loss: 12.2910 +2022-05-09 22:48:48,618 - mmseg - INFO - Iter [9900/80000] lr: 1.258e-06, eta: 1 day, 13:52:52, time: 1.828, data_time: 0.065, memory: 64699, decode.loss_cls: 0.1503, decode.loss_mask: 0.2697, decode.loss_dice: 0.7592, decode.d0.loss_cls: 0.3642, decode.d0.loss_mask: 0.2853, decode.d0.loss_dice: 0.8200, decode.d1.loss_cls: 0.1891, decode.d1.loss_mask: 0.2713, decode.d1.loss_dice: 0.7792, decode.d2.loss_cls: 0.1696, decode.d2.loss_mask: 0.2695, decode.d2.loss_dice: 0.7654, decode.d3.loss_cls: 0.1661, decode.d3.loss_mask: 0.2701, decode.d3.loss_dice: 0.7609, decode.d4.loss_cls: 0.1591, decode.d4.loss_mask: 0.2698, decode.d4.loss_dice: 0.7584, decode.d5.loss_cls: 0.1587, decode.d5.loss_mask: 0.2687, decode.d5.loss_dice: 0.7598, decode.d6.loss_cls: 0.1594, decode.d6.loss_mask: 0.2696, decode.d6.loss_dice: 0.7581, decode.d7.loss_cls: 0.1568, decode.d7.loss_mask: 0.2684, decode.d7.loss_dice: 0.7560, decode.d8.loss_cls: 0.1564, decode.d8.loss_mask: 0.2695, decode.d8.loss_dice: 0.7600, loss: 12.2184 +2022-05-09 22:50:17,742 - mmseg - INFO - Iter [9950/80000] lr: 1.257e-06, eta: 1 day, 13:50:18, time: 1.783, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1713, decode.loss_mask: 0.2787, decode.loss_dice: 0.7516, decode.d0.loss_cls: 0.3761, decode.d0.loss_mask: 0.2949, decode.d0.loss_dice: 0.8164, decode.d1.loss_cls: 0.1909, decode.d1.loss_mask: 0.2838, decode.d1.loss_dice: 0.7768, decode.d2.loss_cls: 0.1836, decode.d2.loss_mask: 0.2804, decode.d2.loss_dice: 0.7630, decode.d3.loss_cls: 0.1721, decode.d3.loss_mask: 0.2795, decode.d3.loss_dice: 0.7572, decode.d4.loss_cls: 0.1741, decode.d4.loss_mask: 0.2788, decode.d4.loss_dice: 0.7555, decode.d5.loss_cls: 0.1696, decode.d5.loss_mask: 0.2783, decode.d5.loss_dice: 0.7543, decode.d6.loss_cls: 0.1668, decode.d6.loss_mask: 0.2794, decode.d6.loss_dice: 0.7549, decode.d7.loss_cls: 0.1639, decode.d7.loss_mask: 0.2793, decode.d7.loss_dice: 0.7499, decode.d8.loss_cls: 0.1612, decode.d8.loss_mask: 0.2798, decode.d8.loss_dice: 0.7539, loss: 12.3759 +2022-05-09 22:51:47,911 - mmseg - INFO - Saving checkpoint at 10000 iterations +2022-05-09 22:52:17,668 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-09 22:52:17,671 - mmseg - INFO - Iter [10000/80000] lr: 1.256e-06, eta: 1 day, 13:51:19, time: 2.396, data_time: 0.017, memory: 64699, decode.loss_cls: 0.1661, decode.loss_mask: 0.2746, decode.loss_dice: 0.7339, decode.d0.loss_cls: 0.3964, decode.d0.loss_mask: 0.2932, decode.d0.loss_dice: 0.7949, decode.d1.loss_cls: 0.1964, decode.d1.loss_mask: 0.2771, decode.d1.loss_dice: 0.7488, decode.d2.loss_cls: 0.1833, decode.d2.loss_mask: 0.2756, decode.d2.loss_dice: 0.7412, decode.d3.loss_cls: 0.1829, decode.d3.loss_mask: 0.2748, decode.d3.loss_dice: 0.7363, decode.d4.loss_cls: 0.1787, decode.d4.loss_mask: 0.2740, decode.d4.loss_dice: 0.7381, decode.d5.loss_cls: 0.1757, decode.d5.loss_mask: 0.2752, decode.d5.loss_dice: 0.7411, decode.d6.loss_cls: 0.1724, decode.d6.loss_mask: 0.2751, decode.d6.loss_dice: 0.7380, decode.d7.loss_cls: 0.1688, decode.d7.loss_mask: 0.2743, decode.d7.loss_dice: 0.7362, decode.d8.loss_cls: 0.1782, decode.d8.loss_mask: 0.2751, decode.d8.loss_dice: 0.7366, loss: 12.2128 +2022-05-09 22:54:13,171 - mmseg - INFO - per class results: +2022-05-09 22:54:13,177 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.69 | 99.21 | +| sidewalk | 89.29 | 94.55 | +| building | 94.56 | 97.11 | +| wall | 69.62 | 79.79 | +| fence | 71.53 | 83.72 | +| pole | 71.97 | 84.85 | +| traffic light | 76.99 | 89.28 | +| traffic sign | 83.45 | 90.86 | +| vegetation | 93.46 | 96.89 | +| terrain | 69.75 | 79.24 | +| sky | 96.01 | 98.34 | +| person | 86.85 | 94.12 | +| rider | 74.17 | 86.74 | +| car | 96.29 | 98.13 | +| truck | 90.34 | 93.29 | +| bus | 93.67 | 96.6 | +| train | 87.92 | 90.67 | +| motorcycle | 74.01 | 88.83 | +| bicycle | 82.12 | 90.75 | ++---------------+-------+-------+ +2022-05-09 22:54:13,178 - mmseg - INFO - Summary: +2022-05-09 22:54:13,178 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 97.07 | 84.25 | 91.21 | ++-------+-------+-------+ +2022-05-09 22:54:13,182 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-09 22:54:13,182 - mmseg - INFO - Iter(val) [32] aAcc: 0.9707, mIoU: 0.8425, mAcc: 0.9121, IoU.road: 0.9869, IoU.sidewalk: 0.8929, IoU.building: 0.9456, IoU.wall: 0.6962, IoU.fence: 0.7153, IoU.pole: 0.7197, IoU.traffic light: 0.7699, IoU.traffic sign: 0.8345, IoU.vegetation: 0.9346, IoU.terrain: 0.6975, IoU.sky: 0.9601, IoU.person: 0.8685, IoU.rider: 0.7417, IoU.car: 0.9629, IoU.truck: 0.9034, IoU.bus: 0.9367, IoU.train: 0.8792, IoU.motorcycle: 0.7401, IoU.bicycle: 0.8212, Acc.road: 0.9921, Acc.sidewalk: 0.9455, Acc.building: 0.9711, Acc.wall: 0.7979, Acc.fence: 0.8372, Acc.pole: 0.8485, Acc.traffic light: 0.8928, Acc.traffic sign: 0.9086, Acc.vegetation: 0.9689, Acc.terrain: 0.7924, Acc.sky: 0.9834, Acc.person: 0.9412, Acc.rider: 0.8674, Acc.car: 0.9813, Acc.truck: 0.9329, Acc.bus: 0.9660, Acc.train: 0.9067, Acc.motorcycle: 0.8883, Acc.bicycle: 0.9075 +2022-05-09 22:55:44,458 - mmseg - INFO - Iter [10050/80000] lr: 1.255e-06, eta: 1 day, 14:02:24, time: 4.138, data_time: 2.375, memory: 64699, decode.loss_cls: 0.1513, decode.loss_mask: 0.2669, decode.loss_dice: 0.7556, decode.d0.loss_cls: 0.3825, decode.d0.loss_mask: 0.2815, decode.d0.loss_dice: 0.8152, decode.d1.loss_cls: 0.1821, decode.d1.loss_mask: 0.2697, decode.d1.loss_dice: 0.7745, decode.d2.loss_cls: 0.1663, decode.d2.loss_mask: 0.2679, decode.d2.loss_dice: 0.7647, decode.d3.loss_cls: 0.1581, decode.d3.loss_mask: 0.2668, decode.d3.loss_dice: 0.7558, decode.d4.loss_cls: 0.1599, decode.d4.loss_mask: 0.2670, decode.d4.loss_dice: 0.7589, decode.d5.loss_cls: 0.1544, decode.d5.loss_mask: 0.2672, decode.d5.loss_dice: 0.7587, decode.d6.loss_cls: 0.1544, decode.d6.loss_mask: 0.2665, decode.d6.loss_dice: 0.7578, decode.d7.loss_cls: 0.1567, decode.d7.loss_mask: 0.2670, decode.d7.loss_dice: 0.7555, decode.d8.loss_cls: 0.1608, decode.d8.loss_mask: 0.2663, decode.d8.loss_dice: 0.7537, loss: 12.1637 +2022-05-09 22:57:12,889 - mmseg - INFO - Iter [10100/80000] lr: 1.255e-06, eta: 1 day, 13:59:40, time: 1.769, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1571, decode.loss_mask: 0.2681, decode.loss_dice: 0.7309, decode.d0.loss_cls: 0.3721, decode.d0.loss_mask: 0.2823, decode.d0.loss_dice: 0.7935, decode.d1.loss_cls: 0.1855, decode.d1.loss_mask: 0.2700, decode.d1.loss_dice: 0.7514, decode.d2.loss_cls: 0.1657, decode.d2.loss_mask: 0.2677, decode.d2.loss_dice: 0.7381, decode.d3.loss_cls: 0.1678, decode.d3.loss_mask: 0.2671, decode.d3.loss_dice: 0.7299, decode.d4.loss_cls: 0.1697, decode.d4.loss_mask: 0.2675, decode.d4.loss_dice: 0.7323, decode.d5.loss_cls: 0.1699, decode.d5.loss_mask: 0.2665, decode.d5.loss_dice: 0.7343, decode.d6.loss_cls: 0.1639, decode.d6.loss_mask: 0.2668, decode.d6.loss_dice: 0.7274, decode.d7.loss_cls: 0.1579, decode.d7.loss_mask: 0.2667, decode.d7.loss_dice: 0.7285, decode.d8.loss_cls: 0.1624, decode.d8.loss_mask: 0.2672, decode.d8.loss_dice: 0.7318, loss: 11.9601 +2022-05-09 22:58:41,494 - mmseg - INFO - Iter [10150/80000] lr: 1.254e-06, eta: 1 day, 13:56:59, time: 1.772, data_time: 0.017, memory: 64699, decode.loss_cls: 0.1849, decode.loss_mask: 0.2667, decode.loss_dice: 0.7393, decode.d0.loss_cls: 0.3936, decode.d0.loss_mask: 0.2852, decode.d0.loss_dice: 0.8070, decode.d1.loss_cls: 0.2124, decode.d1.loss_mask: 0.2694, decode.d1.loss_dice: 0.7644, decode.d2.loss_cls: 0.2000, decode.d2.loss_mask: 0.2681, decode.d2.loss_dice: 0.7547, decode.d3.loss_cls: 0.1879, decode.d3.loss_mask: 0.2684, decode.d3.loss_dice: 0.7434, decode.d4.loss_cls: 0.1850, decode.d4.loss_mask: 0.2674, decode.d4.loss_dice: 0.7375, decode.d5.loss_cls: 0.1833, decode.d5.loss_mask: 0.2669, decode.d5.loss_dice: 0.7385, decode.d6.loss_cls: 0.1898, decode.d6.loss_mask: 0.2669, decode.d6.loss_dice: 0.7399, decode.d7.loss_cls: 0.1831, decode.d7.loss_mask: 0.2670, decode.d7.loss_dice: 0.7400, decode.d8.loss_cls: 0.1784, decode.d8.loss_mask: 0.2669, decode.d8.loss_dice: 0.7416, loss: 12.2977 +2022-05-09 23:00:09,845 - mmseg - INFO - Iter [10200/80000] lr: 1.253e-06, eta: 1 day, 13:54:17, time: 1.767, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1683, decode.loss_mask: 0.2741, decode.loss_dice: 0.7441, decode.d0.loss_cls: 0.3737, decode.d0.loss_mask: 0.2921, decode.d0.loss_dice: 0.8100, decode.d1.loss_cls: 0.2055, decode.d1.loss_mask: 0.2780, decode.d1.loss_dice: 0.7645, decode.d2.loss_cls: 0.1917, decode.d2.loss_mask: 0.2761, decode.d2.loss_dice: 0.7543, decode.d3.loss_cls: 0.1734, decode.d3.loss_mask: 0.2752, decode.d3.loss_dice: 0.7458, decode.d4.loss_cls: 0.1822, decode.d4.loss_mask: 0.2757, decode.d4.loss_dice: 0.7438, decode.d5.loss_cls: 0.1781, decode.d5.loss_mask: 0.2754, decode.d5.loss_dice: 0.7469, decode.d6.loss_cls: 0.1759, decode.d6.loss_mask: 0.2753, decode.d6.loss_dice: 0.7468, decode.d7.loss_cls: 0.1744, decode.d7.loss_mask: 0.2749, decode.d7.loss_dice: 0.7461, decode.d8.loss_cls: 0.1820, decode.d8.loss_mask: 0.2749, decode.d8.loss_dice: 0.7447, loss: 12.3238 +2022-05-09 23:01:41,583 - mmseg - INFO - Iter [10250/80000] lr: 1.252e-06, eta: 1 day, 13:51:58, time: 1.834, data_time: 0.064, memory: 64699, decode.loss_cls: 0.1505, decode.loss_mask: 0.2709, decode.loss_dice: 0.7373, decode.d0.loss_cls: 0.3685, decode.d0.loss_mask: 0.2831, decode.d0.loss_dice: 0.7960, decode.d1.loss_cls: 0.1722, decode.d1.loss_mask: 0.2722, decode.d1.loss_dice: 0.7603, decode.d2.loss_cls: 0.1620, decode.d2.loss_mask: 0.2712, decode.d2.loss_dice: 0.7459, decode.d3.loss_cls: 0.1529, decode.d3.loss_mask: 0.2710, decode.d3.loss_dice: 0.7397, decode.d4.loss_cls: 0.1533, decode.d4.loss_mask: 0.2702, decode.d4.loss_dice: 0.7400, decode.d5.loss_cls: 0.1569, decode.d5.loss_mask: 0.2701, decode.d5.loss_dice: 0.7332, decode.d6.loss_cls: 0.1528, decode.d6.loss_mask: 0.2709, decode.d6.loss_dice: 0.7332, decode.d7.loss_cls: 0.1539, decode.d7.loss_mask: 0.2706, decode.d7.loss_dice: 0.7331, decode.d8.loss_cls: 0.1550, decode.d8.loss_mask: 0.2708, decode.d8.loss_dice: 0.7358, loss: 11.9538 +2022-05-09 23:03:10,543 - mmseg - INFO - Iter [10300/80000] lr: 1.251e-06, eta: 1 day, 13:49:21, time: 1.780, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1655, decode.loss_mask: 0.2708, decode.loss_dice: 0.7463, decode.d0.loss_cls: 0.3671, decode.d0.loss_mask: 0.2843, decode.d0.loss_dice: 0.8071, decode.d1.loss_cls: 0.1882, decode.d1.loss_mask: 0.2755, decode.d1.loss_dice: 0.7649, decode.d2.loss_cls: 0.1808, decode.d2.loss_mask: 0.2723, decode.d2.loss_dice: 0.7498, decode.d3.loss_cls: 0.1674, decode.d3.loss_mask: 0.2717, decode.d3.loss_dice: 0.7488, decode.d4.loss_cls: 0.1656, decode.d4.loss_mask: 0.2721, decode.d4.loss_dice: 0.7485, decode.d5.loss_cls: 0.1636, decode.d5.loss_mask: 0.2715, decode.d5.loss_dice: 0.7489, decode.d6.loss_cls: 0.1610, decode.d6.loss_mask: 0.2708, decode.d6.loss_dice: 0.7468, decode.d7.loss_cls: 0.1657, decode.d7.loss_mask: 0.2712, decode.d7.loss_dice: 0.7495, decode.d8.loss_cls: 0.1650, decode.d8.loss_mask: 0.2717, decode.d8.loss_dice: 0.7480, loss: 12.1805 +2022-05-09 23:04:39,126 - mmseg - INFO - Iter [10350/80000] lr: 1.250e-06, eta: 1 day, 13:46:42, time: 1.772, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1660, decode.loss_mask: 0.2852, decode.loss_dice: 0.7503, decode.d0.loss_cls: 0.3828, decode.d0.loss_mask: 0.3063, decode.d0.loss_dice: 0.8123, decode.d1.loss_cls: 0.1939, decode.d1.loss_mask: 0.2886, decode.d1.loss_dice: 0.7701, decode.d2.loss_cls: 0.1823, decode.d2.loss_mask: 0.2880, decode.d2.loss_dice: 0.7648, decode.d3.loss_cls: 0.1727, decode.d3.loss_mask: 0.2868, decode.d3.loss_dice: 0.7534, decode.d4.loss_cls: 0.1723, decode.d4.loss_mask: 0.2865, decode.d4.loss_dice: 0.7504, decode.d5.loss_cls: 0.1653, decode.d5.loss_mask: 0.2866, decode.d5.loss_dice: 0.7561, decode.d6.loss_cls: 0.1687, decode.d6.loss_mask: 0.2866, decode.d6.loss_dice: 0.7503, decode.d7.loss_cls: 0.1689, decode.d7.loss_mask: 0.2859, decode.d7.loss_dice: 0.7499, decode.d8.loss_cls: 0.1651, decode.d8.loss_mask: 0.2863, decode.d8.loss_dice: 0.7509, loss: 12.4333 +2022-05-09 23:06:08,257 - mmseg - INFO - Iter [10400/80000] lr: 1.249e-06, eta: 1 day, 13:44:07, time: 1.783, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1651, decode.loss_mask: 0.2673, decode.loss_dice: 0.7374, decode.d0.loss_cls: 0.3924, decode.d0.loss_mask: 0.2825, decode.d0.loss_dice: 0.7990, decode.d1.loss_cls: 0.1967, decode.d1.loss_mask: 0.2701, decode.d1.loss_dice: 0.7613, decode.d2.loss_cls: 0.1837, decode.d2.loss_mask: 0.2677, decode.d2.loss_dice: 0.7444, decode.d3.loss_cls: 0.1715, decode.d3.loss_mask: 0.2663, decode.d3.loss_dice: 0.7381, decode.d4.loss_cls: 0.1715, decode.d4.loss_mask: 0.2667, decode.d4.loss_dice: 0.7385, decode.d5.loss_cls: 0.1666, decode.d5.loss_mask: 0.2668, decode.d5.loss_dice: 0.7413, decode.d6.loss_cls: 0.1646, decode.d6.loss_mask: 0.2664, decode.d6.loss_dice: 0.7326, decode.d7.loss_cls: 0.1624, decode.d7.loss_mask: 0.2670, decode.d7.loss_dice: 0.7313, decode.d8.loss_cls: 0.1673, decode.d8.loss_mask: 0.2669, decode.d8.loss_dice: 0.7336, loss: 12.0871 +2022-05-09 23:07:39,714 - mmseg - INFO - Iter [10450/80000] lr: 1.248e-06, eta: 1 day, 13:41:48, time: 1.827, data_time: 0.065, memory: 64699, decode.loss_cls: 0.1774, decode.loss_mask: 0.2700, decode.loss_dice: 0.7398, decode.d0.loss_cls: 0.3894, decode.d0.loss_mask: 0.2878, decode.d0.loss_dice: 0.8030, decode.d1.loss_cls: 0.2095, decode.d1.loss_mask: 0.2726, decode.d1.loss_dice: 0.7579, decode.d2.loss_cls: 0.1946, decode.d2.loss_mask: 0.2708, decode.d2.loss_dice: 0.7486, decode.d3.loss_cls: 0.1889, decode.d3.loss_mask: 0.2700, decode.d3.loss_dice: 0.7454, decode.d4.loss_cls: 0.1868, decode.d4.loss_mask: 0.2695, decode.d4.loss_dice: 0.7435, decode.d5.loss_cls: 0.1820, decode.d5.loss_mask: 0.2696, decode.d5.loss_dice: 0.7456, decode.d6.loss_cls: 0.1844, decode.d6.loss_mask: 0.2702, decode.d6.loss_dice: 0.7417, decode.d7.loss_cls: 0.1778, decode.d7.loss_mask: 0.2703, decode.d7.loss_dice: 0.7420, decode.d8.loss_cls: 0.1806, decode.d8.loss_mask: 0.2699, decode.d8.loss_dice: 0.7386, loss: 12.2983 +2022-05-09 23:09:08,968 - mmseg - INFO - Iter [10500/80000] lr: 1.247e-06, eta: 1 day, 13:39:16, time: 1.787, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1613, decode.loss_mask: 0.2576, decode.loss_dice: 0.7384, decode.d0.loss_cls: 0.3753, decode.d0.loss_mask: 0.2741, decode.d0.loss_dice: 0.8088, decode.d1.loss_cls: 0.1892, decode.d1.loss_mask: 0.2602, decode.d1.loss_dice: 0.7597, decode.d2.loss_cls: 0.1764, decode.d2.loss_mask: 0.2577, decode.d2.loss_dice: 0.7527, decode.d3.loss_cls: 0.1757, decode.d3.loss_mask: 0.2575, decode.d3.loss_dice: 0.7394, decode.d4.loss_cls: 0.1757, decode.d4.loss_mask: 0.2573, decode.d4.loss_dice: 0.7471, decode.d5.loss_cls: 0.1719, decode.d5.loss_mask: 0.2577, decode.d5.loss_dice: 0.7405, decode.d6.loss_cls: 0.1698, decode.d6.loss_mask: 0.2590, decode.d6.loss_dice: 0.7370, decode.d7.loss_cls: 0.1642, decode.d7.loss_mask: 0.2579, decode.d7.loss_dice: 0.7422, decode.d8.loss_cls: 0.1645, decode.d8.loss_mask: 0.2580, decode.d8.loss_dice: 0.7399, loss: 12.0268 +2022-05-09 23:10:38,034 - mmseg - INFO - Iter [10550/80000] lr: 1.246e-06, eta: 1 day, 13:36:43, time: 1.781, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1690, decode.loss_mask: 0.2737, decode.loss_dice: 0.7492, decode.d0.loss_cls: 0.3823, decode.d0.loss_mask: 0.2953, decode.d0.loss_dice: 0.8105, decode.d1.loss_cls: 0.2025, decode.d1.loss_mask: 0.2796, decode.d1.loss_dice: 0.7716, decode.d2.loss_cls: 0.1915, decode.d2.loss_mask: 0.2759, decode.d2.loss_dice: 0.7601, decode.d3.loss_cls: 0.1804, decode.d3.loss_mask: 0.2747, decode.d3.loss_dice: 0.7546, decode.d4.loss_cls: 0.1765, decode.d4.loss_mask: 0.2750, decode.d4.loss_dice: 0.7535, decode.d5.loss_cls: 0.1832, decode.d5.loss_mask: 0.2735, decode.d5.loss_dice: 0.7538, decode.d6.loss_cls: 0.1762, decode.d6.loss_mask: 0.2739, decode.d6.loss_dice: 0.7511, decode.d7.loss_cls: 0.1672, decode.d7.loss_mask: 0.2755, decode.d7.loss_dice: 0.7527, decode.d8.loss_cls: 0.1736, decode.d8.loss_mask: 0.2736, decode.d8.loss_dice: 0.7491, loss: 12.3793 +2022-05-09 23:12:05,730 - mmseg - INFO - Iter [10600/80000] lr: 1.246e-06, eta: 1 day, 13:34:02, time: 1.754, data_time: 0.016, memory: 64699, decode.loss_cls: 0.1605, decode.loss_mask: 0.2755, decode.loss_dice: 0.7408, decode.d0.loss_cls: 0.3732, decode.d0.loss_mask: 0.2939, decode.d0.loss_dice: 0.8030, decode.d1.loss_cls: 0.1875, decode.d1.loss_mask: 0.2783, decode.d1.loss_dice: 0.7633, decode.d2.loss_cls: 0.1702, decode.d2.loss_mask: 0.2759, decode.d2.loss_dice: 0.7542, decode.d3.loss_cls: 0.1690, decode.d3.loss_mask: 0.2775, decode.d3.loss_dice: 0.7475, decode.d4.loss_cls: 0.1687, decode.d4.loss_mask: 0.2772, decode.d4.loss_dice: 0.7511, decode.d5.loss_cls: 0.1656, decode.d5.loss_mask: 0.2764, decode.d5.loss_dice: 0.7487, decode.d6.loss_cls: 0.1675, decode.d6.loss_mask: 0.2755, decode.d6.loss_dice: 0.7391, decode.d7.loss_cls: 0.1698, decode.d7.loss_mask: 0.2762, decode.d7.loss_dice: 0.7371, decode.d8.loss_cls: 0.1639, decode.d8.loss_mask: 0.2770, decode.d8.loss_dice: 0.7474, loss: 12.2114 +2022-05-09 23:13:36,866 - mmseg - INFO - Iter [10650/80000] lr: 1.245e-06, eta: 1 day, 13:31:43, time: 1.823, data_time: 0.066, memory: 64699, decode.loss_cls: 0.1700, decode.loss_mask: 0.2728, decode.loss_dice: 0.7370, decode.d0.loss_cls: 0.3834, decode.d0.loss_mask: 0.2926, decode.d0.loss_dice: 0.8091, decode.d1.loss_cls: 0.1961, decode.d1.loss_mask: 0.2802, decode.d1.loss_dice: 0.7632, decode.d2.loss_cls: 0.1879, decode.d2.loss_mask: 0.2748, decode.d2.loss_dice: 0.7497, decode.d3.loss_cls: 0.1773, decode.d3.loss_mask: 0.2735, decode.d3.loss_dice: 0.7401, decode.d4.loss_cls: 0.1751, decode.d4.loss_mask: 0.2742, decode.d4.loss_dice: 0.7417, decode.d5.loss_cls: 0.1686, decode.d5.loss_mask: 0.2726, decode.d5.loss_dice: 0.7411, decode.d6.loss_cls: 0.1715, decode.d6.loss_mask: 0.2726, decode.d6.loss_dice: 0.7398, decode.d7.loss_cls: 0.1739, decode.d7.loss_mask: 0.2728, decode.d7.loss_dice: 0.7379, decode.d8.loss_cls: 0.1670, decode.d8.loss_mask: 0.2729, decode.d8.loss_dice: 0.7396, loss: 12.2291 +2022-05-09 23:15:05,000 - mmseg - INFO - Iter [10700/80000] lr: 1.244e-06, eta: 1 day, 13:29:06, time: 1.763, data_time: 0.017, memory: 64699, decode.loss_cls: 0.1610, decode.loss_mask: 0.2688, decode.loss_dice: 0.7362, decode.d0.loss_cls: 0.3943, decode.d0.loss_mask: 0.2798, decode.d0.loss_dice: 0.7917, decode.d1.loss_cls: 0.1894, decode.d1.loss_mask: 0.2709, decode.d1.loss_dice: 0.7538, decode.d2.loss_cls: 0.1775, decode.d2.loss_mask: 0.2680, decode.d2.loss_dice: 0.7426, decode.d3.loss_cls: 0.1601, decode.d3.loss_mask: 0.2674, decode.d3.loss_dice: 0.7318, decode.d4.loss_cls: 0.1685, decode.d4.loss_mask: 0.2683, decode.d4.loss_dice: 0.7409, decode.d5.loss_cls: 0.1610, decode.d5.loss_mask: 0.2684, decode.d5.loss_dice: 0.7398, decode.d6.loss_cls: 0.1662, decode.d6.loss_mask: 0.2683, decode.d6.loss_dice: 0.7327, decode.d7.loss_cls: 0.1634, decode.d7.loss_mask: 0.2686, decode.d7.loss_dice: 0.7290, decode.d8.loss_cls: 0.1651, decode.d8.loss_mask: 0.2685, decode.d8.loss_dice: 0.7371, loss: 12.0390 +2022-05-09 23:16:32,993 - mmseg - INFO - Iter [10750/80000] lr: 1.243e-06, eta: 1 day, 13:26:28, time: 1.760, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1702, decode.loss_mask: 0.2672, decode.loss_dice: 0.7361, decode.d0.loss_cls: 0.3918, decode.d0.loss_mask: 0.2860, decode.d0.loss_dice: 0.7991, decode.d1.loss_cls: 0.1956, decode.d1.loss_mask: 0.2704, decode.d1.loss_dice: 0.7528, decode.d2.loss_cls: 0.1841, decode.d2.loss_mask: 0.2703, decode.d2.loss_dice: 0.7434, decode.d3.loss_cls: 0.1844, decode.d3.loss_mask: 0.2678, decode.d3.loss_dice: 0.7325, decode.d4.loss_cls: 0.1816, decode.d4.loss_mask: 0.2684, decode.d4.loss_dice: 0.7410, decode.d5.loss_cls: 0.1738, decode.d5.loss_mask: 0.2687, decode.d5.loss_dice: 0.7351, decode.d6.loss_cls: 0.1729, decode.d6.loss_mask: 0.2672, decode.d6.loss_dice: 0.7357, decode.d7.loss_cls: 0.1813, decode.d7.loss_mask: 0.2679, decode.d7.loss_dice: 0.7334, decode.d8.loss_cls: 0.1802, decode.d8.loss_mask: 0.2682, decode.d8.loss_dice: 0.7326, loss: 12.1596 +2022-05-09 23:18:04,417 - mmseg - INFO - Iter [10800/80000] lr: 1.242e-06, eta: 1 day, 13:24:13, time: 1.828, data_time: 0.067, memory: 64699, decode.loss_cls: 0.1645, decode.loss_mask: 0.2711, decode.loss_dice: 0.7347, decode.d0.loss_cls: 0.3736, decode.d0.loss_mask: 0.2895, decode.d0.loss_dice: 0.7972, decode.d1.loss_cls: 0.1919, decode.d1.loss_mask: 0.2721, decode.d1.loss_dice: 0.7505, decode.d2.loss_cls: 0.1735, decode.d2.loss_mask: 0.2709, decode.d2.loss_dice: 0.7395, decode.d3.loss_cls: 0.1729, decode.d3.loss_mask: 0.2717, decode.d3.loss_dice: 0.7346, decode.d4.loss_cls: 0.1787, decode.d4.loss_mask: 0.2715, decode.d4.loss_dice: 0.7325, decode.d5.loss_cls: 0.1726, decode.d5.loss_mask: 0.2712, decode.d5.loss_dice: 0.7347, decode.d6.loss_cls: 0.1603, decode.d6.loss_mask: 0.2710, decode.d6.loss_dice: 0.7346, decode.d7.loss_cls: 0.1599, decode.d7.loss_mask: 0.2713, decode.d7.loss_dice: 0.7317, decode.d8.loss_cls: 0.1638, decode.d8.loss_mask: 0.2715, decode.d8.loss_dice: 0.7340, loss: 12.0673 +2022-05-09 23:19:33,186 - mmseg - INFO - Iter [10850/80000] lr: 1.241e-06, eta: 1 day, 13:21:41, time: 1.775, data_time: 0.017, memory: 64699, decode.loss_cls: 0.1602, decode.loss_mask: 0.2678, decode.loss_dice: 0.7326, decode.d0.loss_cls: 0.3862, decode.d0.loss_mask: 0.2814, decode.d0.loss_dice: 0.7997, decode.d1.loss_cls: 0.1960, decode.d1.loss_mask: 0.2699, decode.d1.loss_dice: 0.7485, decode.d2.loss_cls: 0.1843, decode.d2.loss_mask: 0.2678, decode.d2.loss_dice: 0.7424, decode.d3.loss_cls: 0.1728, decode.d3.loss_mask: 0.2671, decode.d3.loss_dice: 0.7331, decode.d4.loss_cls: 0.1743, decode.d4.loss_mask: 0.2673, decode.d4.loss_dice: 0.7334, decode.d5.loss_cls: 0.1696, decode.d5.loss_mask: 0.2676, decode.d5.loss_dice: 0.7331, decode.d6.loss_cls: 0.1679, decode.d6.loss_mask: 0.2681, decode.d6.loss_dice: 0.7324, decode.d7.loss_cls: 0.1651, decode.d7.loss_mask: 0.2673, decode.d7.loss_dice: 0.7291, decode.d8.loss_cls: 0.1693, decode.d8.loss_mask: 0.2674, decode.d8.loss_dice: 0.7302, loss: 12.0519 +2022-05-09 23:21:01,352 - mmseg - INFO - Iter [10900/80000] lr: 1.240e-06, eta: 1 day, 13:19:06, time: 1.763, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1727, decode.loss_mask: 0.2629, decode.loss_dice: 0.7473, decode.d0.loss_cls: 0.3807, decode.d0.loss_mask: 0.2771, decode.d0.loss_dice: 0.8096, decode.d1.loss_cls: 0.1952, decode.d1.loss_mask: 0.2678, decode.d1.loss_dice: 0.7675, decode.d2.loss_cls: 0.1806, decode.d2.loss_mask: 0.2648, decode.d2.loss_dice: 0.7610, decode.d3.loss_cls: 0.1763, decode.d3.loss_mask: 0.2652, decode.d3.loss_dice: 0.7470, decode.d4.loss_cls: 0.1798, decode.d4.loss_mask: 0.2652, decode.d4.loss_dice: 0.7535, decode.d5.loss_cls: 0.1780, decode.d5.loss_mask: 0.2640, decode.d5.loss_dice: 0.7483, decode.d6.loss_cls: 0.1740, decode.d6.loss_mask: 0.2635, decode.d6.loss_dice: 0.7482, decode.d7.loss_cls: 0.1737, decode.d7.loss_mask: 0.2632, decode.d7.loss_dice: 0.7433, decode.d8.loss_cls: 0.1747, decode.d8.loss_mask: 0.2634, decode.d8.loss_dice: 0.7465, loss: 12.2149 +2022-05-09 23:22:29,242 - mmseg - INFO - Iter [10950/80000] lr: 1.239e-06, eta: 1 day, 13:16:30, time: 1.758, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1594, decode.loss_mask: 0.2672, decode.loss_dice: 0.7512, decode.d0.loss_cls: 0.3681, decode.d0.loss_mask: 0.2824, decode.d0.loss_dice: 0.8015, decode.d1.loss_cls: 0.1836, decode.d1.loss_mask: 0.2717, decode.d1.loss_dice: 0.7659, decode.d2.loss_cls: 0.1725, decode.d2.loss_mask: 0.2683, decode.d2.loss_dice: 0.7556, decode.d3.loss_cls: 0.1652, decode.d3.loss_mask: 0.2675, decode.d3.loss_dice: 0.7464, decode.d4.loss_cls: 0.1656, decode.d4.loss_mask: 0.2672, decode.d4.loss_dice: 0.7493, decode.d5.loss_cls: 0.1581, decode.d5.loss_mask: 0.2676, decode.d5.loss_dice: 0.7543, decode.d6.loss_cls: 0.1566, decode.d6.loss_mask: 0.2673, decode.d6.loss_dice: 0.7515, decode.d7.loss_cls: 0.1588, decode.d7.loss_mask: 0.2673, decode.d7.loss_dice: 0.7553, decode.d8.loss_cls: 0.1570, decode.d8.loss_mask: 0.2676, decode.d8.loss_dice: 0.7509, loss: 12.1210 +2022-05-09 23:24:00,116 - mmseg - INFO - Saving checkpoint at 11000 iterations +2022-05-09 23:24:34,400 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-09 23:24:34,408 - mmseg - INFO - Iter [11000/80000] lr: 1.238e-06, eta: 1 day, 13:17:48, time: 2.501, data_time: 0.063, memory: 64699, decode.loss_cls: 0.1672, decode.loss_mask: 0.2681, decode.loss_dice: 0.7369, decode.d0.loss_cls: 0.4027, decode.d0.loss_mask: 0.2830, decode.d0.loss_dice: 0.7948, decode.d1.loss_cls: 0.1909, decode.d1.loss_mask: 0.2706, decode.d1.loss_dice: 0.7530, decode.d2.loss_cls: 0.1812, decode.d2.loss_mask: 0.2692, decode.d2.loss_dice: 0.7487, decode.d3.loss_cls: 0.1678, decode.d3.loss_mask: 0.2678, decode.d3.loss_dice: 0.7436, decode.d4.loss_cls: 0.1719, decode.d4.loss_mask: 0.2675, decode.d4.loss_dice: 0.7387, decode.d5.loss_cls: 0.1689, decode.d5.loss_mask: 0.2682, decode.d5.loss_dice: 0.7402, decode.d6.loss_cls: 0.1674, decode.d6.loss_mask: 0.2685, decode.d6.loss_dice: 0.7386, decode.d7.loss_cls: 0.1711, decode.d7.loss_mask: 0.2695, decode.d7.loss_dice: 0.7402, decode.d8.loss_cls: 0.1657, decode.d8.loss_mask: 0.2684, decode.d8.loss_dice: 0.7333, loss: 12.1237 +2022-05-09 23:26:30,010 - mmseg - INFO - per class results: +2022-05-09 23:26:30,014 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.76 | 99.21 | +| sidewalk | 89.67 | 95.4 | +| building | 94.33 | 97.08 | +| wall | 71.8 | 77.52 | +| fence | 73.34 | 83.19 | +| pole | 71.81 | 85.64 | +| traffic light | 77.04 | 89.29 | +| traffic sign | 83.97 | 89.92 | +| vegetation | 93.48 | 96.81 | +| terrain | 68.86 | 81.15 | +| sky | 95.97 | 98.36 | +| person | 87.26 | 92.99 | +| rider | 74.66 | 85.76 | +| car | 96.44 | 98.07 | +| truck | 91.08 | 95.59 | +| bus | 93.56 | 96.5 | +B +| train | 87.74 | 90.5 | +| motorcycle | 75.29 | 87.92 | +| bicycle | 82.66 | 92.86 | ++---------------+-------+-------+ +2022-05-09 23:26:30,015 - mmseg - INFO - Summary: +2022-05-09 23:26:30,015 - mmseg - INFO - ++------+-------+-------+ +| aAcc | mIoU | mAcc | ++------+-------+-------+ +| 97.1 | 84.62 | 91.25 | ++------+-------+-------+ +2022-05-09 23:26:30,016 - mmseg - INFO - The previous best checkpoint /mnt/lustre/chenzhe.vendor/workspace/ViT-Adapter/segmentation/work_dirs/mask2former_beit_adapter_large_896_80k_cityscapes_ss/best_mIoU_iter_8000.pth was removed +2022-05-09 23:26:59,712 - mmseg - INFO - Now best checkpoint is saved as best_mIoU_iter_11000.pth. +2022-05-09 23:26:59,725 - mmseg - INFO - Best mIoU is 0.8462 at 11000 iter. +2022-05-09 23:26:59,736 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-09 23:26:59,736 - mmseg - INFO - Iter(val) [32] aAcc: 0.9710, mIoU: 0.8462, mAcc: 0.9125, IoU.road: 0.9876, IoU.sidewalk: 0.8967, IoU.building: 0.9433, IoU.wall: 0.7180, IoU.fence: 0.7334, IoU.pole: 0.7181, IoU.traffic light: 0.7704, IoU.traffic sign: 0.8397, IoU.vegetation: 0.9348, IoU.terrain: 0.6886, IoU.sky: 0.9597, IoU.person: 0.8726, IoU.rider: 0.7466, IoU.car: 0.9644, IoU.truck: 0.9108, IoU.bus: 0.9356, IoU.train: 0.8774, IoU.motorcycle: 0.7529, IoU.bicycle: 0.8266, Acc.road: 0.9921, Acc.sidewalk: 0.9540, Acc.building: 0.9708, Acc.wall: 0.7752, Acc.fence: 0.8319, Acc.pole: 0.8564, Acc.traffic light: 0.8929, Acc.traffic sign: 0.8992, Acc.vegetation: 0.9681, Acc.terrain: 0.8115, Acc.sky: 0.9836, Acc.person: 0.9299, Acc.rider: 0.8576, Acc.car: 0.9807, Acc.truck: 0.9559, Acc.bus: 0.9650, Acc.train: 0.9050, Acc.motorcycle: 0.8792, Acc.bicycle: 0.9286 +2022-05-09 23:28:29,226 - mmseg - INFO - Iter [11050/80000] lr: 1.237e-06, eta: 1 day, 13:30:30, time: 4.699, data_time: 2.929, memory: 64699, decode.loss_cls: 0.1484, decode.loss_mask: 0.2671, decode.loss_dice: 0.7314, decode.d0.loss_cls: 0.3809, decode.d0.loss_mask: 0.2817, decode.d0.loss_dice: 0.7860, decode.d1.loss_cls: 0.1691, decode.d1.loss_mask: 0.2713, decode.d1.loss_dice: 0.7514, decode.d2.loss_cls: 0.1582, decode.d2.loss_mask: 0.2688, decode.d2.loss_dice: 0.7374, decode.d3.loss_cls: 0.1522, decode.d3.loss_mask: 0.2677, decode.d3.loss_dice: 0.7292, decode.d4.loss_cls: 0.1527, decode.d4.loss_mask: 0.2676, decode.d4.loss_dice: 0.7270, decode.d5.loss_cls: 0.1503, decode.d5.loss_mask: 0.2679, decode.d5.loss_dice: 0.7314, decode.d6.loss_cls: 0.1451, decode.d6.loss_mask: 0.2668, decode.d6.loss_dice: 0.7279, decode.d7.loss_cls: 0.1484, decode.d7.loss_mask: 0.2671, decode.d7.loss_dice: 0.7299, decode.d8.loss_cls: 0.1532, decode.d8.loss_mask: 0.2670, decode.d8.loss_dice: 0.7248, loss: 11.8279 +2022-05-09 23:29:58,804 - mmseg - INFO - Iter [11100/80000] lr: 1.237e-06, eta: 1 day, 13:28:00, time: 1.791, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1614, decode.loss_mask: 0.2746, decode.loss_dice: 0.7419, decode.d0.loss_cls: 0.3705, decode.d0.loss_mask: 0.2885, decode.d0.loss_dice: 0.8068, decode.d1.loss_cls: 0.1858, decode.d1.loss_mask: 0.2764, decode.d1.loss_dice: 0.7625, decode.d2.loss_cls: 0.1772, decode.d2.loss_mask: 0.2767, decode.d2.loss_dice: 0.7554, decode.d3.loss_cls: 0.1727, decode.d3.loss_mask: 0.2751, decode.d3.loss_dice: 0.7427, decode.d4.loss_cls: 0.1690, decode.d4.loss_mask: 0.2753, decode.d4.loss_dice: 0.7405, decode.d5.loss_cls: 0.1637, decode.d5.loss_mask: 0.2752, decode.d5.loss_dice: 0.7484, decode.d6.loss_cls: 0.1679, decode.d6.loss_mask: 0.2741, decode.d6.loss_dice: 0.7428, decode.d7.loss_cls: 0.1609, decode.d7.loss_mask: 0.2744, decode.d7.loss_dice: 0.7414, decode.d8.loss_cls: 0.1661, decode.d8.loss_mask: 0.2745, decode.d8.loss_dice: 0.7433, loss: 12.1856 +2022-05-09 23:31:27,307 - mmseg - INFO - Iter [11150/80000] lr: 1.236e-06, eta: 1 day, 13:25:24, time: 1.770, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1504, decode.loss_mask: 0.2601, decode.loss_dice: 0.7417, decode.d0.loss_cls: 0.3597, decode.d0.loss_mask: 0.2743, decode.d0.loss_dice: 0.8095, decode.d1.loss_cls: 0.1825, decode.d1.loss_mask: 0.2633, decode.d1.loss_dice: 0.7665, decode.d2.loss_cls: 0.1700, decode.d2.loss_mask: 0.2632, decode.d2.loss_dice: 0.7593, decode.d3.loss_cls: 0.1655, decode.d3.loss_mask: 0.2607, decode.d3.loss_dice: 0.7418, decode.d4.loss_cls: 0.1577, decode.d4.loss_mask: 0.2609, decode.d4.loss_dice: 0.7468, decode.d5.loss_cls: 0.1605, decode.d5.loss_mask: 0.2599, decode.d5.loss_dice: 0.7444, decode.d6.loss_cls: 0.1615, decode.d6.loss_mask: 0.2601, decode.d6.loss_dice: 0.7386, decode.d7.loss_cls: 0.1558, decode.d7.loss_mask: 0.2596, decode.d7.loss_dice: 0.7434, decode.d8.loss_cls: 0.1613, decode.d8.loss_mask: 0.2597, decode.d8.loss_dice: 0.7413, loss: 11.9799 +2022-05-09 23:32:58,249 - mmseg - INFO - Iter [11200/80000] lr: 1.235e-06, eta: 1 day, 13:23:04, time: 1.819, data_time: 0.065, memory: 64699, decode.loss_cls: 0.1609, decode.loss_mask: 0.2761, decode.loss_dice: 0.7292, decode.d0.loss_cls: 0.3905, decode.d0.loss_mask: 0.2923, decode.d0.loss_dice: 0.7846, decode.d1.loss_cls: 0.1918, decode.d1.loss_mask: 0.2777, decode.d1.loss_dice: 0.7396, decode.d2.loss_cls: 0.1795, decode.d2.loss_mask: 0.2769, decode.d2.loss_dice: 0.7344, decode.d3.loss_cls: 0.1651, decode.d3.loss_mask: 0.2765, decode.d3.loss_dice: 0.7272, decode.d4.loss_cls: 0.1649, decode.d4.loss_mask: 0.2762, decode.d4.loss_dice: 0.7302, decode.d5.loss_cls: 0.1607, decode.d5.loss_mask: 0.2776, decode.d5.loss_dice: 0.7297, decode.d6.loss_cls: 0.1555, decode.d6.loss_mask: 0.2763, decode.d6.loss_dice: 0.7268, decode.d7.loss_cls: 0.1625, decode.d7.loss_mask: 0.2745, decode.d7.loss_dice: 0.7229, decode.d8.loss_cls: 0.1625, decode.d8.loss_mask: 0.2751, decode.d8.loss_dice: 0.7213, loss: 12.0190 +2022-05-09 23:34:26,611 - mmseg - INFO - Iter [11250/80000] lr: 1.234e-06, eta: 1 day, 13:20:28, time: 1.767, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1454, decode.loss_mask: 0.2568, decode.loss_dice: 0.7204, decode.d0.loss_cls: 0.3808, decode.d0.loss_mask: 0.2733, decode.d0.loss_dice: 0.7807, decode.d1.loss_cls: 0.1790, decode.d1.loss_mask: 0.2606, decode.d1.loss_dice: 0.7424, decode.d2.loss_cls: 0.1631, decode.d2.loss_mask: 0.2575, decode.d2.loss_dice: 0.7286, decode.d3.loss_cls: 0.1528, decode.d3.loss_mask: 0.2579, decode.d3.loss_dice: 0.7225, decode.d4.loss_cls: 0.1545, decode.d4.loss_mask: 0.2565, decode.d4.loss_dice: 0.7182, decode.d5.loss_cls: 0.1587, decode.d5.loss_mask: 0.2569, decode.d5.loss_dice: 0.7232, decode.d6.loss_cls: 0.1507, decode.d6.loss_mask: 0.2566, decode.d6.loss_dice: 0.7211, decode.d7.loss_cls: 0.1485, decode.d7.loss_mask: 0.2567, decode.d7.loss_dice: 0.7191, decode.d8.loss_cls: 0.1486, decode.d8.loss_mask: 0.2571, decode.d8.loss_dice: 0.7205, loss: 11.6684 +2022-05-09 23:35:55,074 - mmseg - INFO - Iter [11300/80000] lr: 1.233e-06, eta: 1 day, 13:17:54, time: 1.769, data_time: 0.017, memory: 64699, decode.loss_cls: 0.1518, decode.loss_mask: 0.2731, decode.loss_dice: 0.7243, decode.d0.loss_cls: 0.3602, decode.d0.loss_mask: 0.2881, decode.d0.loss_dice: 0.7825, decode.d1.loss_cls: 0.1757, decode.d1.loss_mask: 0.2757, decode.d1.loss_dice: 0.7451, decode.d2.loss_cls: 0.1571, decode.d2.loss_mask: 0.2735, decode.d2.loss_dice: 0.7369, decode.d3.loss_cls: 0.1484, decode.d3.loss_mask: 0.2727, decode.d3.loss_dice: 0.7270, decode.d4.loss_cls: 0.1474, decode.d4.loss_mask: 0.2733, decode.d4.loss_dice: 0.7313, decode.d5.loss_cls: 0.1509, decode.d5.loss_mask: 0.2732, decode.d5.loss_dice: 0.7310, decode.d6.loss_cls: 0.1516, decode.d6.loss_mask: 0.2724, decode.d6.loss_dice: 0.7260, decode.d7.loss_cls: 0.1510, decode.d7.loss_mask: 0.2724, decode.d7.loss_dice: 0.7287, decode.d8.loss_cls: 0.1500, decode.d8.loss_mask: 0.2726, decode.d8.loss_dice: 0.7307, loss: 11.8545 +2022-05-09 23:37:26,074 - mmseg - INFO - Iter [11350/80000] lr: 1.232e-06, eta: 1 day, 13:15:36, time: 1.820, data_time: 0.067, memory: 64699, decode.loss_cls: 0.1655, decode.loss_mask: 0.2703, decode.loss_dice: 0.7476, decode.d0.loss_cls: 0.3699, decode.d0.loss_mask: 0.2876, decode.d0.loss_dice: 0.8024, decode.d1.loss_cls: 0.1762, decode.d1.loss_mask: 0.2737, decode.d1.loss_dice: 0.7624, decode.d2.loss_cls: 0.1704, decode.d2.loss_mask: 0.2723, decode.d2.loss_dice: 0.7467, decode.d3.loss_cls: 0.1675, decode.d3.loss_mask: 0.2702, decode.d3.loss_dice: 0.7461, decode.d4.loss_cls: 0.1676, decode.d4.loss_mask: 0.2712, decode.d4.loss_dice: 0.7470, decode.d5.loss_cls: 0.1566, decode.d5.loss_mask: 0.2706, decode.d5.loss_dice: 0.7493, decode.d6.loss_cls: 0.1626, decode.d6.loss_mask: 0.2714, decode.d6.loss_dice: 0.7440, decode.d7.loss_cls: 0.1563, decode.d7.loss_mask: 0.2711, decode.d7.loss_dice: 0.7427, decode.d8.loss_cls: 0.1610, decode.d8.loss_mask: 0.2708, decode.d8.loss_dice: 0.7482, loss: 12.1188 +2022-05-09 23:38:54,581 - mmseg - INFO - Iter [11400/80000] lr: 1.231e-06, eta: 1 day, 13:13:01, time: 1.766, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1597, decode.loss_mask: 0.2703, decode.loss_dice: 0.7253, decode.d0.loss_cls: 0.3771, decode.d0.loss_mask: 0.2850, decode.d0.loss_dice: 0.7945, decode.d1.loss_cls: 0.1883, decode.d1.loss_mask: 0.2728, decode.d1.loss_dice: 0.7466, decode.d2.loss_cls: 0.1751, decode.d2.loss_mask: 0.2716, decode.d2.loss_dice: 0.7380, decode.d3.loss_cls: 0.1647, decode.d3.loss_mask: 0.2712, decode.d3.loss_dice: 0.7324, decode.d4.loss_cls: 0.1628, decode.d4.loss_mask: 0.2708, decode.d4.loss_dice: 0.7333, decode.d5.loss_cls: 0.1653, decode.d5.loss_mask: 0.2706, decode.d5.loss_dice: 0.7278, decode.d6.loss_cls: 0.1643, decode.d6.loss_mask: 0.2702, decode.d6.loss_dice: 0.7303, decode.d7.loss_cls: 0.1625, decode.d7.loss_mask: 0.2699, decode.d7.loss_dice: 0.7214, decode.d8.loss_cls: 0.1633, decode.d8.loss_mask: 0.2701, decode.d8.loss_dice: 0.7232, loss: 11.9788 +2022-05-09 23:40:24,279 - mmseg - INFO - Iter [11450/80000] lr: 1.230e-06, eta: 1 day, 13:10:37, time: 1.798, data_time: 0.025, memory: 64699, decode.loss_cls: 0.1575, decode.loss_mask: 0.2617, decode.loss_dice: 0.7267, decode.d0.loss_cls: 0.3729, decode.d0.loss_mask: 0.2788, decode.d0.loss_dice: 0.7792, decode.d1.loss_cls: 0.1906, decode.d1.loss_mask: 0.2629, decode.d1.loss_dice: 0.7401, decode.d2.loss_cls: 0.1780, decode.d2.loss_mask: 0.2614, decode.d2.loss_dice: 0.7333, decode.d3.loss_cls: 0.1632, decode.d3.loss_mask: 0.2625, decode.d3.loss_dice: 0.7310, decode.d4.loss_cls: 0.1656, decode.d4.loss_mask: 0.2618, decode.d4.loss_dice: 0.7266, decode.d5.loss_cls: 0.1641, decode.d5.loss_mask: 0.2617, decode.d5.loss_dice: 0.7251, decode.d6.loss_cls: 0.1646, decode.d6.loss_mask: 0.2611, decode.d6.loss_dice: 0.7254, decode.d7.loss_cls: 0.1631, decode.d7.loss_mask: 0.2609, decode.d7.loss_dice: 0.7238, decode.d8.loss_cls: 0.1611, decode.d8.loss_mask: 0.2609, decode.d8.loss_dice: 0.7271, loss: 11.8527 +2022-05-09 23:41:53,823 - mmseg - INFO - Iter [11500/80000] lr: 1.229e-06, eta: 1 day, 13:08:12, time: 1.791, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1554, decode.loss_mask: 0.2610, decode.loss_dice: 0.7285, decode.d0.loss_cls: 0.3681, decode.d0.loss_mask: 0.2763, decode.d0.loss_dice: 0.7935, decode.d1.loss_cls: 0.1820, decode.d1.loss_mask: 0.2645, decode.d1.loss_dice: 0.7516, decode.d2.loss_cls: 0.1761, decode.d2.loss_mask: 0.2636, decode.d2.loss_dice: 0.7393, decode.d3.loss_cls: 0.1646, decode.d3.loss_mask: 0.2624, decode.d3.loss_dice: 0.7270, decode.d4.loss_cls: 0.1664, decode.d4.loss_mask: 0.2616, decode.d4.loss_dice: 0.7286, decode.d5.loss_cls: 0.1556, decode.d5.loss_mask: 0.2619, decode.d5.loss_dice: 0.7282, decode.d6.loss_cls: 0.1584, decode.d6.loss_mask: 0.2620, decode.d6.loss_dice: 0.7312, decode.d7.loss_cls: 0.1558, decode.d7.loss_mask: 0.2613, decode.d7.loss_dice: 0.7290, decode.d8.loss_cls: 0.1682, decode.d8.loss_mask: 0.2611, decode.d8.loss_dice: 0.7254, loss: 11.8687 +2022-05-09 23:43:25,262 - mmseg - INFO - Iter [11550/80000] lr: 1.229e-06, eta: 1 day, 13:05:57, time: 1.829, data_time: 0.067, memory: 64699, decode.loss_cls: 0.1573, decode.loss_mask: 0.2671, decode.loss_dice: 0.7373, decode.d0.loss_cls: 0.3854, decode.d0.loss_mask: 0.2836, decode.d0.loss_dice: 0.8002, decode.d1.loss_cls: 0.1923, decode.d1.loss_mask: 0.2684, decode.d1.loss_dice: 0.7570, decode.d2.loss_cls: 0.1754, decode.d2.loss_mask: 0.2677, decode.d2.loss_dice: 0.7455, decode.d3.loss_cls: 0.1650, decode.d3.loss_mask: 0.2682, decode.d3.loss_dice: 0.7349, decode.d4.loss_cls: 0.1647, decode.d4.loss_mask: 0.2675, decode.d4.loss_dice: 0.7414, decode.d5.loss_cls: 0.1598, decode.d5.loss_mask: 0.2671, decode.d5.loss_dice: 0.7408, decode.d6.loss_cls: 0.1568, decode.d6.loss_mask: 0.2668, decode.d6.loss_dice: 0.7393, decode.d7.loss_cls: 0.1690, decode.d7.loss_mask: 0.2666, decode.d7.loss_dice: 0.7367, decode.d8.loss_cls: 0.1587, decode.d8.loss_mask: 0.2670, decode.d8.loss_dice: 0.7367, loss: 12.0443 +2022-05-09 23:44:53,535 - mmseg - INFO - Iter [11600/80000] lr: 1.228e-06, eta: 1 day, 13:03:25, time: 1.765, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1596, decode.loss_mask: 0.2645, decode.loss_dice: 0.7386, decode.d0.loss_cls: 0.3703, decode.d0.loss_mask: 0.2824, decode.d0.loss_dice: 0.8002, decode.d1.loss_cls: 0.1833, decode.d1.loss_mask: 0.2700, decode.d1.loss_dice: 0.7634, decode.d2.loss_cls: 0.1701, decode.d2.loss_mask: 0.2653, decode.d2.loss_dice: 0.7485, decode.d3.loss_cls: 0.1610, decode.d3.loss_mask: 0.2647, decode.d3.loss_dice: 0.7404, decode.d4.loss_cls: 0.1582, decode.d4.loss_mask: 0.2654, decode.d4.loss_dice: 0.7379, decode.d5.loss_cls: 0.1591, decode.d5.loss_mask: 0.2649, decode.d5.loss_dice: 0.7397, decode.d6.loss_cls: 0.1459, decode.d6.loss_mask: 0.2659, decode.d6.loss_dice: 0.7393, decode.d7.loss_cls: 0.1611, decode.d7.loss_mask: 0.2653, decode.d7.loss_dice: 0.7374, decode.d8.loss_cls: 0.1581, decode.d8.loss_mask: 0.2645, decode.d8.loss_dice: 0.7366, loss: 11.9815 +2022-05-09 23:46:22,035 - mmseg - INFO - Iter [11650/80000] lr: 1.227e-06, eta: 1 day, 13:00:55, time: 1.770, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1475, decode.loss_mask: 0.2712, decode.loss_dice: 0.7216, decode.d0.loss_cls: 0.3688, decode.d0.loss_mask: 0.2879, decode.d0.loss_dice: 0.7816, decode.d1.loss_cls: 0.1753, decode.d1.loss_mask: 0.2769, decode.d1.loss_dice: 0.7427, decode.d2.loss_cls: 0.1525, decode.d2.loss_mask: 0.2752, decode.d2.loss_dice: 0.7373, decode.d3.loss_cls: 0.1569, decode.d3.loss_mask: 0.2727, decode.d3.loss_dice: 0.7294, decode.d4.loss_cls: 0.1589, decode.d4.loss_mask: 0.2726, decode.d4.loss_dice: 0.7290, decode.d5.loss_cls: 0.1526, decode.d5.loss_mask: 0.2720, decode.d5.loss_dice: 0.7253, decode.d6.loss_cls: 0.1455, decode.d6.loss_mask: 0.2727, decode.d6.loss_dice: 0.7220, decode.d7.loss_cls: 0.1499, decode.d7.loss_mask: 0.2723, decode.d7.loss_dice: 0.7224, decode.d8.loss_cls: 0.1475, decode.d8.loss_mask: 0.2719, decode.d8.loss_dice: 0.7216, loss: 11.8338 +2022-05-09 23:47:50,207 - mmseg - INFO - Iter [11700/80000] lr: 1.226e-06, eta: 1 day, 12:58:23, time: 1.763, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1518, decode.loss_mask: 0.2701, decode.loss_dice: 0.7312, decode.d0.loss_cls: 0.3595, decode.d0.loss_mask: 0.2830, decode.d0.loss_dice: 0.7849, decode.d1.loss_cls: 0.1693, decode.d1.loss_mask: 0.2724, decode.d1.loss_dice: 0.7500, decode.d2.loss_cls: 0.1637, decode.d2.loss_mask: 0.2711, decode.d2.loss_dice: 0.7411, decode.d3.loss_cls: 0.1500, decode.d3.loss_mask: 0.2703, decode.d3.loss_dice: 0.7305, decode.d4.loss_cls: 0.1502, decode.d4.loss_mask: 0.2711, decode.d4.loss_dice: 0.7313, decode.d5.loss_cls: 0.1488, decode.d5.loss_mask: 0.2707, decode.d5.loss_dice: 0.7337, decode.d6.loss_cls: 0.1501, decode.d6.loss_mask: 0.2701, decode.d6.loss_dice: 0.7252, decode.d7.loss_cls: 0.1471, decode.d7.loss_mask: 0.2702, decode.d7.loss_dice: 0.7311, decode.d8.loss_cls: 0.1491, decode.d8.loss_mask: 0.2697, decode.d8.loss_dice: 0.7326, loss: 11.8497 +2022-05-09 23:49:21,334 - mmseg - INFO - Iter [11750/80000] lr: 1.225e-06, eta: 1 day, 12:56:09, time: 1.823, data_time: 0.067, memory: 64699, decode.loss_cls: 0.1489, decode.loss_mask: 0.2692, decode.loss_dice: 0.7320, decode.d0.loss_cls: 0.3654, decode.d0.loss_mask: 0.2857, decode.d0.loss_dice: 0.7950, decode.d1.loss_cls: 0.1706, decode.d1.loss_mask: 0.2730, decode.d1.loss_dice: 0.7551, decode.d2.loss_cls: 0.1628, decode.d2.loss_mask: 0.2711, decode.d2.loss_dice: 0.7475, decode.d3.loss_cls: 0.1573, decode.d3.loss_mask: 0.2703, decode.d3.loss_dice: 0.7348, decode.d4.loss_cls: 0.1501, decode.d4.loss_mask: 0.2700, decode.d4.loss_dice: 0.7326, decode.d5.loss_cls: 0.1625, decode.d5.loss_mask: 0.2707, decode.d5.loss_dice: 0.7343, decode.d6.loss_cls: 0.1533, decode.d6.loss_mask: 0.2696, decode.d6.loss_dice: 0.7339, decode.d7.loss_cls: 0.1500, decode.d7.loss_mask: 0.2687, decode.d7.loss_dice: 0.7319, decode.d8.loss_cls: 0.1592, decode.d8.loss_mask: 0.2689, decode.d8.loss_dice: 0.7292, loss: 11.9235 +2022-05-09 23:50:49,634 - mmseg - INFO - Iter [11800/80000] lr: 1.224e-06, eta: 1 day, 12:53:39, time: 1.766, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1450, decode.loss_mask: 0.2697, decode.loss_dice: 0.7262, decode.d0.loss_cls: 0.3832, decode.d0.loss_mask: 0.2840, decode.d0.loss_dice: 0.7836, decode.d1.loss_cls: 0.1740, decode.d1.loss_mask: 0.2721, decode.d1.loss_dice: 0.7469, decode.d2.loss_cls: 0.1650, decode.d2.loss_mask: 0.2710, decode.d2.loss_dice: 0.7357, decode.d3.loss_cls: 0.1451, decode.d3.loss_mask: 0.2720, decode.d3.loss_dice: 0.7303, decode.d4.loss_cls: 0.1578, decode.d4.loss_mask: 0.2716, decode.d4.loss_dice: 0.7301, decode.d5.loss_cls: 0.1568, decode.d5.loss_mask: 0.2708, decode.d5.loss_dice: 0.7309, decode.d6.loss_cls: 0.1449, decode.d6.loss_mask: 0.2692, decode.d6.loss_dice: 0.7264, decode.d7.loss_cls: 0.1492, decode.d7.loss_mask: 0.2692, decode.d7.loss_dice: 0.7257, decode.d8.loss_cls: 0.1400, decode.d8.loss_mask: 0.2699, decode.d8.loss_dice: 0.7245, loss: 11.8409 +2022-05-09 23:52:19,115 - mmseg - INFO - Iter [11850/80000] lr: 1.223e-06, eta: 1 day, 12:51:16, time: 1.789, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1596, decode.loss_mask: 0.2680, decode.loss_dice: 0.7163, decode.d0.loss_cls: 0.3755, decode.d0.loss_mask: 0.2837, decode.d0.loss_dice: 0.7799, decode.d1.loss_cls: 0.1854, decode.d1.loss_mask: 0.2716, decode.d1.loss_dice: 0.7364, decode.d2.loss_cls: 0.1733, decode.d2.loss_mask: 0.2674, decode.d2.loss_dice: 0.7249, decode.d3.loss_cls: 0.1644, decode.d3.loss_mask: 0.2667, decode.d3.loss_dice: 0.7199, decode.d4.loss_cls: 0.1605, decode.d4.loss_mask: 0.2667, decode.d4.loss_dice: 0.7175, decode.d5.loss_cls: 0.1636, decode.d5.loss_mask: 0.2660, decode.d5.loss_dice: 0.7151, decode.d6.loss_cls: 0.1560, decode.d6.loss_mask: 0.2664, decode.d6.loss_dice: 0.7187, decode.d7.loss_cls: 0.1623, decode.d7.loss_mask: 0.2666, decode.d7.loss_dice: 0.7137, decode.d8.loss_cls: 0.1578, decode.d8.loss_mask: 0.2676, decode.d8.loss_dice: 0.7159, loss: 11.8073 +2022-05-09 23:53:47,016 - mmseg - INFO - Iter [11900/80000] lr: 1.222e-06, eta: 1 day, 12:48:45, time: 1.758, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1630, decode.loss_mask: 0.2644, decode.loss_dice: 0.7265, decode.d0.loss_cls: 0.3819, decode.d0.loss_mask: 0.2801, decode.d0.loss_dice: 0.7858, decode.d1.loss_cls: 0.1879, decode.d1.loss_mask: 0.2677, decode.d1.loss_dice: 0.7445, decode.d2.loss_cls: 0.1822, decode.d2.loss_mask: 0.2660, decode.d2.loss_dice: 0.7353, decode.d3.loss_cls: 0.1697, decode.d3.loss_mask: 0.2648, decode.d3.loss_dice: 0.7286, decode.d4.loss_cls: 0.1689, decode.d4.loss_mask: 0.2638, decode.d4.loss_dice: 0.7281, decode.d5.loss_cls: 0.1648, decode.d5.loss_mask: 0.2641, decode.d5.loss_dice: 0.7330, decode.d6.loss_cls: 0.1661, decode.d6.loss_mask: 0.2644, decode.d6.loss_dice: 0.7272, decode.d7.loss_cls: 0.1641, decode.d7.loss_mask: 0.2643, decode.d7.loss_dice: 0.7283, decode.d8.loss_cls: 0.1652, decode.d8.loss_mask: 0.2638, decode.d8.loss_dice: 0.7279, loss: 11.9424 +2022-05-09 23:55:18,420 - mmseg - INFO - Iter [11950/80000] lr: 1.221e-06, eta: 1 day, 12:46:34, time: 1.828, data_time: 0.066, memory: 64699, decode.loss_cls: 0.1387, decode.loss_mask: 0.2585, decode.loss_dice: 0.7235, decode.d0.loss_cls: 0.3661, decode.d0.loss_mask: 0.2735, decode.d0.loss_dice: 0.7760, decode.d1.loss_cls: 0.1727, decode.d1.loss_mask: 0.2603, decode.d1.loss_dice: 0.7430, decode.d2.loss_cls: 0.1553, decode.d2.loss_mask: 0.2586, decode.d2.loss_dice: 0.7301, decode.d3.loss_cls: 0.1353, decode.d3.loss_mask: 0.2581, decode.d3.loss_dice: 0.7262, decode.d4.loss_cls: 0.1494, decode.d4.loss_mask: 0.2574, decode.d4.loss_dice: 0.7195, decode.d5.loss_cls: 0.1473, decode.d5.loss_mask: 0.2579, decode.d5.loss_dice: 0.7223, decode.d6.loss_cls: 0.1441, decode.d6.loss_mask: 0.2583, decode.d6.loss_dice: 0.7192, decode.d7.loss_cls: 0.1455, decode.d7.loss_mask: 0.2585, decode.d7.loss_dice: 0.7203, decode.d8.loss_cls: 0.1365, decode.d8.loss_mask: 0.2586, decode.d8.loss_dice: 0.7213, loss: 11.5922 +2022-05-09 23:56:47,105 - mmseg - INFO - Saving checkpoint at 12000 iterations +2022-05-09 23:57:17,278 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-09 23:57:17,287 - mmseg - INFO - Iter [12000/80000] lr: 1.220e-06, eta: 1 day, 12:46:58, time: 2.374, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1574, decode.loss_mask: 0.2670, decode.loss_dice: 0.7382, decode.d0.loss_cls: 0.3678, decode.d0.loss_mask: 0.2819, decode.d0.loss_dice: 0.7997, decode.d1.loss_cls: 0.1820, decode.d1.loss_mask: 0.2717, decode.d1.loss_dice: 0.7573, decode.d2.loss_cls: 0.1671, decode.d2.loss_mask: 0.2695, decode.d2.loss_dice: 0.7429, decode.d3.loss_cls: 0.1628, decode.d3.loss_mask: 0.2686, decode.d3.loss_dice: 0.7366, decode.d4.loss_cls: 0.1546, decode.d4.loss_mask: 0.2681, decode.d4.loss_dice: 0.7397, decode.d5.loss_cls: 0.1582, decode.d5.loss_mask: 0.2682, decode.d5.loss_dice: 0.7390, decode.d6.loss_cls: 0.1616, decode.d6.loss_mask: 0.2681, decode.d6.loss_dice: 0.7356, decode.d7.loss_cls: 0.1661, decode.d7.loss_mask: 0.2678, decode.d7.loss_dice: 0.7372, decode.d8.loss_cls: 0.1606, decode.d8.loss_mask: 0.2681, decode.d8.loss_dice: 0.7362, loss: 11.9996 +2022-05-09 23:59:12,619 - mmseg - INFO - per class results: +2022-05-09 23:59:12,625 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.7 | 99.16 | +| sidewalk | 89.19 | 94.66 | +| building | 94.66 | 97.1 | +| wall | 69.72 | 82.0 | +| fence | 71.56 | 82.86 | +| pole | 72.54 | 85.56 | +| traffic light | 76.62 | 89.54 | +| traffic sign | 84.08 | 92.14 | +| vegetation | 93.4 | 96.55 | +| terrain | 67.6 | 81.12 | +| sky | 95.91 | 98.46 | +| person | 86.93 | 94.39 | +| rider | 74.91 | 83.93 | +| car | 96.36 | 98.22 | +| truck | 92.04 | 95.78 | +| bus | 93.84 | 96.77 | +| train | 87.55 | 90.92 | +| motorcycle | 76.27 | 88.56 | +| bicycle | 82.33 | 92.75 | ++---------------+-------+-------+ +2022-05-09 23:59:12,626 - mmseg - INFO - Summary: +2022-05-09 23:59:12,626 - mmseg - INFO - ++-------+-------+------+ +| aAcc | mIoU | mAcc | ++-------+-------+------+ +| 97.07 | 84.43 | 91.6 | ++-------+-------+------+ +2022-05-09 23:59:12,630 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-09 23:59:12,631 - mmseg - INFO - Iter(val) [32] aAcc: 0.9707, mIoU: 0.8443, mAcc: 0.9160, IoU.road: 0.9870, IoU.sidewalk: 0.8919, IoU.building: 0.9466, IoU.wall: 0.6972, IoU.fence: 0.7156, IoU.pole: 0.7254, IoU.traffic light: 0.7662, IoU.traffic sign: 0.8408, IoU.vegetation: 0.9340, IoU.terrain: 0.6760, IoU.sky: 0.9591, IoU.person: 0.8693, IoU.rider: 0.7491, IoU.car: 0.9636, IoU.truck: 0.9204, IoU.bus: 0.9384, IoU.train: 0.8755, IoU.motorcycle: 0.7627, IoU.bicycle: 0.8233, Acc.road: 0.9916, Acc.sidewalk: 0.9466, Acc.building: 0.9710, Acc.wall: 0.8200, Acc.fence: 0.8286, Acc.pole: 0.8556, Acc.traffic light: 0.8954, Acc.traffic sign: 0.9214, Acc.vegetation: 0.9655, Acc.terrain: 0.8112, Acc.sky: 0.9846, Acc.person: 0.9439, Acc.rider: 0.8393, Acc.car: 0.9822, Acc.truck: 0.9578, Acc.bus: 0.9677, Acc.train: 0.9092, Acc.motorcycle: 0.8856, Acc.bicycle: 0.9275 +2022-05-10 00:00:44,615 - mmseg - INFO - Iter [12050/80000] lr: 1.220e-06, eta: 1 day, 12:55:41, time: 4.149, data_time: 2.330, memory: 64699, decode.loss_cls: 0.1391, decode.loss_mask: 0.2542, decode.loss_dice: 0.7066, decode.d0.loss_cls: 0.3676, decode.d0.loss_mask: 0.2695, decode.d0.loss_dice: 0.7636, decode.d1.loss_cls: 0.1569, decode.d1.loss_mask: 0.2568, decode.d1.loss_dice: 0.7236, decode.d2.loss_cls: 0.1468, decode.d2.loss_mask: 0.2552, decode.d2.loss_dice: 0.7165, decode.d3.loss_cls: 0.1369, decode.d3.loss_mask: 0.2552, decode.d3.loss_dice: 0.7077, decode.d4.loss_cls: 0.1396, decode.d4.loss_mask: 0.2559, decode.d4.loss_dice: 0.7075, decode.d5.loss_cls: 0.1394, decode.d5.loss_mask: 0.2556, decode.d5.loss_dice: 0.7055, decode.d6.loss_cls: 0.1345, decode.d6.loss_mask: 0.2548, decode.d6.loss_dice: 0.7085, decode.d7.loss_cls: 0.1359, decode.d7.loss_mask: 0.2536, decode.d7.loss_dice: 0.7071, decode.d8.loss_cls: 0.1399, decode.d8.loss_mask: 0.2538, decode.d8.loss_dice: 0.7101, loss: 11.3580 +2022-05-10 00:02:16,812 - mmseg - INFO - Iter [12100/80000] lr: 1.219e-06, eta: 1 day, 12:53:32, time: 1.843, data_time: 0.068, memory: 64699, decode.loss_cls: 0.1407, decode.loss_mask: 0.2635, decode.loss_dice: 0.6999, decode.d0.loss_cls: 0.3730, decode.d0.loss_mask: 0.2791, decode.d0.loss_dice: 0.7622, decode.d1.loss_cls: 0.1747, decode.d1.loss_mask: 0.2659, decode.d1.loss_dice: 0.7228, decode.d2.loss_cls: 0.1611, decode.d2.loss_mask: 0.2640, decode.d2.loss_dice: 0.7148, decode.d3.loss_cls: 0.1434, decode.d3.loss_mask: 0.2631, decode.d3.loss_dice: 0.7073, decode.d4.loss_cls: 0.1512, decode.d4.loss_mask: 0.2629, decode.d4.loss_dice: 0.7061, decode.d5.loss_cls: 0.1503, decode.d5.loss_mask: 0.2626, decode.d5.loss_dice: 0.7031, decode.d6.loss_cls: 0.1426, decode.d6.loss_mask: 0.2635, decode.d6.loss_dice: 0.7018, decode.d7.loss_cls: 0.1510, decode.d7.loss_mask: 0.2637, decode.d7.loss_dice: 0.7011, decode.d8.loss_cls: 0.1479, decode.d8.loss_mask: 0.2632, decode.d8.loss_dice: 0.6988, loss: 11.5054 +2022-05-10 00:03:45,411 - mmseg - INFO - Iter [12150/80000] lr: 1.218e-06, eta: 1 day, 12:51:03, time: 1.773, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1495, decode.loss_mask: 0.2643, decode.loss_dice: 0.7145, decode.d0.loss_cls: 0.3678, decode.d0.loss_mask: 0.2768, decode.d0.loss_dice: 0.7615, decode.d1.loss_cls: 0.1712, decode.d1.loss_mask: 0.2664, decode.d1.loss_dice: 0.7230, decode.d2.loss_cls: 0.1604, decode.d2.loss_mask: 0.2654, decode.d2.loss_dice: 0.7166, decode.d3.loss_cls: 0.1556, decode.d3.loss_mask: 0.2636, decode.d3.loss_dice: 0.7142, decode.d4.loss_cls: 0.1635, decode.d4.loss_mask: 0.2637, decode.d4.loss_dice: 0.7070, decode.d5.loss_cls: 0.1509, decode.d5.loss_mask: 0.2634, decode.d5.loss_dice: 0.7129, decode.d6.loss_cls: 0.1477, decode.d6.loss_mask: 0.2635, decode.d6.loss_dice: 0.7111, decode.d7.loss_cls: 0.1583, decode.d7.loss_mask: 0.2637, decode.d7.loss_dice: 0.7084, decode.d8.loss_cls: 0.1482, decode.d8.loss_mask: 0.2632, decode.d8.loss_dice: 0.7055, loss: 11.6015 +2022-05-10 00:05:14,824 - mmseg - INFO - Iter [12200/80000] lr: 1.217e-06, eta: 1 day, 12:48:39, time: 1.788, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1382, decode.loss_mask: 0.2598, decode.loss_dice: 0.7035, decode.d0.loss_cls: 0.3596, decode.d0.loss_mask: 0.2747, decode.d0.loss_dice: 0.7602, decode.d1.loss_cls: 0.1655, decode.d1.loss_mask: 0.2625, decode.d1.loss_dice: 0.7251, decode.d2.loss_cls: 0.1538, decode.d2.loss_mask: 0.2600, decode.d2.loss_dice: 0.7209, decode.d3.loss_cls: 0.1440, decode.d3.loss_mask: 0.2600, decode.d3.loss_dice: 0.7088, decode.d4.loss_cls: 0.1461, decode.d4.loss_mask: 0.2603, decode.d4.loss_dice: 0.7043, decode.d5.loss_cls: 0.1414, decode.d5.loss_mask: 0.2595, decode.d5.loss_dice: 0.7048, decode.d6.loss_cls: 0.1390, decode.d6.loss_mask: 0.2605, decode.d6.loss_dice: 0.7045, decode.d7.loss_cls: 0.1392, decode.d7.loss_mask: 0.2599, decode.d7.loss_dice: 0.7056, decode.d8.loss_cls: 0.1351, decode.d8.loss_mask: 0.2594, decode.d8.loss_dice: 0.7089, loss: 11.4253 +2022-05-10 00:06:44,230 - mmseg - INFO - Iter [12250/80000] lr: 1.216e-06, eta: 1 day, 12:46:15, time: 1.787, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1466, decode.loss_mask: 0.2591, decode.loss_dice: 0.7193, decode.d0.loss_cls: 0.3621, decode.d0.loss_mask: 0.2765, decode.d0.loss_dice: 0.7849, decode.d1.loss_cls: 0.1781, decode.d1.loss_mask: 0.2612, decode.d1.loss_dice: 0.7387, decode.d2.loss_cls: 0.1665, decode.d2.loss_mask: 0.2616, decode.d2.loss_dice: 0.7331, decode.d3.loss_cls: 0.1553, decode.d3.loss_mask: 0.2589, decode.d3.loss_dice: 0.7248, decode.d4.loss_cls: 0.1567, decode.d4.loss_mask: 0.2586, decode.d4.loss_dice: 0.7248, decode.d5.loss_cls: 0.1524, decode.d5.loss_mask: 0.2589, decode.d5.loss_dice: 0.7238, decode.d6.loss_cls: 0.1536, decode.d6.loss_mask: 0.2580, decode.d6.loss_dice: 0.7182, decode.d7.loss_cls: 0.1546, decode.d7.loss_mask: 0.2591, decode.d7.loss_dice: 0.7224, decode.d8.loss_cls: 0.1547, decode.d8.loss_mask: 0.2590, decode.d8.loss_dice: 0.7185, loss: 11.7002 +2022-05-10 00:08:15,871 - mmseg - INFO - Iter [12300/80000] lr: 1.215e-06, eta: 1 day, 12:44:04, time: 1.833, data_time: 0.067, memory: 64699, decode.loss_cls: 0.1564, decode.loss_mask: 0.2601, decode.loss_dice: 0.7295, decode.d0.loss_cls: 0.3653, decode.d0.loss_mask: 0.2767, decode.d0.loss_dice: 0.7914, decode.d1.loss_cls: 0.1805, decode.d1.loss_mask: 0.2645, decode.d1.loss_dice: 0.7447, decode.d2.loss_cls: 0.1707, decode.d2.loss_mask: 0.2622, decode.d2.loss_dice: 0.7402, decode.d3.loss_cls: 0.1622, decode.d3.loss_mask: 0.2607, decode.d3.loss_dice: 0.7310, decode.d4.loss_cls: 0.1655, decode.d4.loss_mask: 0.2606, decode.d4.loss_dice: 0.7331, decode.d5.loss_cls: 0.1575, decode.d5.loss_mask: 0.2609, decode.d5.loss_dice: 0.7312, decode.d6.loss_cls: 0.1563, decode.d6.loss_mask: 0.2604, decode.d6.loss_dice: 0.7269, decode.d7.loss_cls: 0.1638, decode.d7.loss_mask: 0.2597, decode.d7.loss_dice: 0.7301, decode.d8.loss_cls: 0.1563, decode.d8.loss_mask: 0.2602, decode.d8.loss_dice: 0.7294, loss: 11.8479 +2022-05-10 00:09:46,378 - mmseg - INFO - Iter [12350/80000] lr: 1.214e-06, eta: 1 day, 12:41:47, time: 1.811, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1511, decode.loss_mask: 0.2666, decode.loss_dice: 0.7260, decode.d0.loss_cls: 0.3650, decode.d0.loss_mask: 0.2841, decode.d0.loss_dice: 0.7827, decode.d1.loss_cls: 0.1742, decode.d1.loss_mask: 0.2688, decode.d1.loss_dice: 0.7455, decode.d2.loss_cls: 0.1628, decode.d2.loss_mask: 0.2672, decode.d2.loss_dice: 0.7367, decode.d3.loss_cls: 0.1557, decode.d3.loss_mask: 0.2662, decode.d3.loss_dice: 0.7262, decode.d4.loss_cls: 0.1515, decode.d4.loss_mask: 0.2670, decode.d4.loss_dice: 0.7301, decode.d5.loss_cls: 0.1499, decode.d5.loss_mask: 0.2660, decode.d5.loss_dice: 0.7315, decode.d6.loss_cls: 0.1546, decode.d6.loss_mask: 0.2665, decode.d6.loss_dice: 0.7241, decode.d7.loss_cls: 0.1512, decode.d7.loss_mask: 0.2668, decode.d7.loss_dice: 0.7234, decode.d8.loss_cls: 0.1548, decode.d8.loss_mask: 0.2660, decode.d8.loss_dice: 0.7251, loss: 11.8073 +2022-05-10 00:11:17,617 - mmseg - INFO - Iter [12400/80000] lr: 1.213e-06, eta: 1 day, 12:39:34, time: 1.822, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1480, decode.loss_mask: 0.2645, decode.loss_dice: 0.7374, decode.d0.loss_cls: 0.3640, decode.d0.loss_mask: 0.2777, decode.d0.loss_dice: 0.7932, decode.d1.loss_cls: 0.1745, decode.d1.loss_mask: 0.2669, decode.d1.loss_dice: 0.7610, decode.d2.loss_cls: 0.1656, decode.d2.loss_mask: 0.2644, decode.d2.loss_dice: 0.7449, decode.d3.loss_cls: 0.1549, decode.d3.loss_mask: 0.2636, decode.d3.loss_dice: 0.7363, decode.d4.loss_cls: 0.1568, decode.d4.loss_mask: 0.2647, decode.d4.loss_dice: 0.7346, decode.d5.loss_cls: 0.1572, decode.d5.loss_mask: 0.2640, decode.d5.loss_dice: 0.7352, decode.d6.loss_cls: 0.1477, decode.d6.loss_mask: 0.2640, decode.d6.loss_dice: 0.7326, decode.d7.loss_cls: 0.1571, decode.d7.loss_mask: 0.2642, decode.d7.loss_dice: 0.7368, decode.d8.loss_cls: 0.1562, decode.d8.loss_mask: 0.2644, decode.d8.loss_dice: 0.7328, loss: 11.8853 +2022-05-10 00:12:47,584 - mmseg - INFO - Iter [12450/80000] lr: 1.212e-06, eta: 1 day, 12:37:15, time: 1.800, data_time: 0.022, memory: 64699, decode.loss_cls: 0.1441, decode.loss_mask: 0.2713, decode.loss_dice: 0.7166, decode.d0.loss_cls: 0.3648, decode.d0.loss_mask: 0.2874, decode.d0.loss_dice: 0.7737, decode.d1.loss_cls: 0.1718, decode.d1.loss_mask: 0.2758, decode.d1.loss_dice: 0.7375, decode.d2.loss_cls: 0.1607, decode.d2.loss_mask: 0.2731, decode.d2.loss_dice: 0.7291, decode.d3.loss_cls: 0.1502, decode.d3.loss_mask: 0.2722, decode.d3.loss_dice: 0.7211, decode.d4.loss_cls: 0.1528, decode.d4.loss_mask: 0.2718, decode.d4.loss_dice: 0.7158, decode.d5.loss_cls: 0.1509, decode.d5.loss_mask: 0.2723, decode.d5.loss_dice: 0.7189, decode.d6.loss_cls: 0.1414, decode.d6.loss_mask: 0.2714, decode.d6.loss_dice: 0.7169, decode.d7.loss_cls: 0.1433, decode.d7.loss_mask: 0.2720, decode.d7.loss_dice: 0.7142, decode.d8.loss_cls: 0.1421, decode.d8.loss_mask: 0.2720, decode.d8.loss_dice: 0.7175, loss: 11.7226 +2022-05-10 00:14:19,671 - mmseg - INFO - Iter [12500/80000] lr: 1.211e-06, eta: 1 day, 12:35:08, time: 1.844, data_time: 0.067, memory: 64699, decode.loss_cls: 0.1454, decode.loss_mask: 0.2667, decode.loss_dice: 0.7131, decode.d0.loss_cls: 0.3563, decode.d0.loss_mask: 0.2844, decode.d0.loss_dice: 0.7707, decode.d1.loss_cls: 0.1613, decode.d1.loss_mask: 0.2712, decode.d1.loss_dice: 0.7345, decode.d2.loss_cls: 0.1554, decode.d2.loss_mask: 0.2692, decode.d2.loss_dice: 0.7208, decode.d3.loss_cls: 0.1468, decode.d3.loss_mask: 0.2679, decode.d3.loss_dice: 0.7198, decode.d4.loss_cls: 0.1499, decode.d4.loss_mask: 0.2687, decode.d4.loss_dice: 0.7175, decode.d5.loss_cls: 0.1449, decode.d5.loss_mask: 0.2694, decode.d5.loss_dice: 0.7157, decode.d6.loss_cls: 0.1418, decode.d6.loss_mask: 0.2668, decode.d6.loss_dice: 0.7146, decode.d7.loss_cls: 0.1426, decode.d7.loss_mask: 0.2678, decode.d7.loss_dice: 0.7151, decode.d8.loss_cls: 0.1457, decode.d8.loss_mask: 0.2671, decode.d8.loss_dice: 0.7117, loss: 11.6233 +2022-05-10 00:15:48,051 - mmseg - INFO - Iter [12550/80000] lr: 1.211e-06, eta: 1 day, 12:32:41, time: 1.767, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1498, decode.loss_mask: 0.2642, decode.loss_dice: 0.7361, decode.d0.loss_cls: 0.3554, decode.d0.loss_mask: 0.2792, decode.d0.loss_dice: 0.7882, decode.d1.loss_cls: 0.1767, decode.d1.loss_mask: 0.2672, decode.d1.loss_dice: 0.7472, decode.d2.loss_cls: 0.1612, decode.d2.loss_mask: 0.2655, decode.d2.loss_dice: 0.7430, decode.d3.loss_cls: 0.1501, decode.d3.loss_mask: 0.2650, decode.d3.loss_dice: 0.7345, decode.d4.loss_cls: 0.1473, decode.d4.loss_mask: 0.2653, decode.d4.loss_dice: 0.7336, decode.d5.loss_cls: 0.1447, decode.d5.loss_mask: 0.2650, decode.d5.loss_dice: 0.7350, decode.d6.loss_cls: 0.1506, decode.d6.loss_mask: 0.2639, decode.d6.loss_dice: 0.7351, decode.d7.loss_cls: 0.1402, decode.d7.loss_mask: 0.2642, decode.d7.loss_dice: 0.7345, decode.d8.loss_cls: 0.1512, decode.d8.loss_mask: 0.2643, decode.d8.loss_dice: 0.7288, loss: 11.8071 +2022-05-10 00:17:16,826 - mmseg - INFO - Iter [12600/80000] lr: 1.210e-06, eta: 1 day, 12:30:17, time: 1.775, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1562, decode.loss_mask: 0.2579, decode.loss_dice: 0.7026, decode.d0.loss_cls: 0.3745, decode.d0.loss_mask: 0.2762, decode.d0.loss_dice: 0.7697, decode.d1.loss_cls: 0.1769, decode.d1.loss_mask: 0.2606, decode.d1.loss_dice: 0.7298, decode.d2.loss_cls: 0.1640, decode.d2.loss_mask: 0.2582, decode.d2.loss_dice: 0.7183, decode.d3.loss_cls: 0.1570, decode.d3.loss_mask: 0.2593, decode.d3.loss_dice: 0.7155, decode.d4.loss_cls: 0.1480, decode.d4.loss_mask: 0.2568, decode.d4.loss_dice: 0.7142, decode.d5.loss_cls: 0.1602, decode.d5.loss_mask: 0.2585, decode.d5.loss_dice: 0.7107, decode.d6.loss_cls: 0.1532, decode.d6.loss_mask: 0.2585, decode.d6.loss_dice: 0.7038, decode.d7.loss_cls: 0.1507, decode.d7.loss_mask: 0.2584, decode.d7.loss_dice: 0.7098, decode.d8.loss_cls: 0.1521, decode.d8.loss_mask: 0.2584, decode.d8.loss_dice: 0.7018, loss: 11.5720 +2022-05-10 00:18:48,269 - mmseg - INFO - Iter [12650/80000] lr: 1.209e-06, eta: 1 day, 12:28:07, time: 1.828, data_time: 0.067, memory: 64699, decode.loss_cls: 0.1429, decode.loss_mask: 0.2687, decode.loss_dice: 0.7089, decode.d0.loss_cls: 0.3591, decode.d0.loss_mask: 0.2798, decode.d0.loss_dice: 0.7673, decode.d1.loss_cls: 0.1679, decode.d1.loss_mask: 0.2701, decode.d1.loss_dice: 0.7266, decode.d2.loss_cls: 0.1508, decode.d2.loss_mask: 0.2684, decode.d2.loss_dice: 0.7225, decode.d3.loss_cls: 0.1468, decode.d3.loss_mask: 0.2689, decode.d3.loss_dice: 0.7171, decode.d4.loss_cls: 0.1480, decode.d4.loss_mask: 0.2679, decode.d4.loss_dice: 0.7149, decode.d5.loss_cls: 0.1483, decode.d5.loss_mask: 0.2675, decode.d5.loss_dice: 0.7174, decode.d6.loss_cls: 0.1474, decode.d6.loss_mask: 0.2682, decode.d6.loss_dice: 0.7130, decode.d7.loss_cls: 0.1438, decode.d7.loss_mask: 0.2678, decode.d7.loss_dice: 0.7136, decode.d8.loss_cls: 0.1417, decode.d8.loss_mask: 0.2677, decode.d8.loss_dice: 0.7120, loss: 11.6051 +2022-05-10 00:20:17,934 - mmseg - INFO - Iter [12700/80000] lr: 1.208e-06, eta: 1 day, 12:25:48, time: 1.794, data_time: 0.022, memory: 64699, decode.loss_cls: 0.1603, decode.loss_mask: 0.2583, decode.loss_dice: 0.7258, decode.d0.loss_cls: 0.3651, decode.d0.loss_mask: 0.2754, decode.d0.loss_dice: 0.7849, decode.d1.loss_cls: 0.1801, decode.d1.loss_mask: 0.2607, decode.d1.loss_dice: 0.7442, decode.d2.loss_cls: 0.1749, decode.d2.loss_mask: 0.2582, decode.d2.loss_dice: 0.7323, decode.d3.loss_cls: 0.1634, decode.d3.loss_mask: 0.2583, decode.d3.loss_dice: 0.7322, decode.d4.loss_cls: 0.1592, decode.d4.loss_mask: 0.2599, decode.d4.loss_dice: 0.7306, decode.d5.loss_cls: 0.1612, decode.d5.loss_mask: 0.2588, decode.d5.loss_dice: 0.7265, decode.d6.loss_cls: 0.1677, decode.d6.loss_mask: 0.2583, decode.d6.loss_dice: 0.7235, decode.d7.loss_cls: 0.1638, decode.d7.loss_mask: 0.2586, decode.d7.loss_dice: 0.7215, decode.d8.loss_cls: 0.1621, decode.d8.loss_mask: 0.2581, decode.d8.loss_dice: 0.7254, loss: 11.8091 +2022-05-10 00:21:46,019 - mmseg - INFO - Iter [12750/80000] lr: 1.207e-06, eta: 1 day, 12:23:21, time: 1.762, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1364, decode.loss_mask: 0.2645, decode.loss_dice: 0.7213, decode.d0.loss_cls: 0.3527, decode.d0.loss_mask: 0.2788, decode.d0.loss_dice: 0.7781, decode.d1.loss_cls: 0.1605, decode.d1.loss_mask: 0.2677, decode.d1.loss_dice: 0.7435, decode.d2.loss_cls: 0.1438, decode.d2.loss_mask: 0.2652, decode.d2.loss_dice: 0.7312, decode.d3.loss_cls: 0.1374, decode.d3.loss_mask: 0.2644, decode.d3.loss_dice: 0.7218, decode.d4.loss_cls: 0.1439, decode.d4.loss_mask: 0.2649, decode.d4.loss_dice: 0.7231, decode.d5.loss_cls: 0.1443, decode.d5.loss_mask: 0.2648, decode.d5.loss_dice: 0.7233, decode.d6.loss_cls: 0.1411, decode.d6.loss_mask: 0.2647, decode.d6.loss_dice: 0.7237, decode.d7.loss_cls: 0.1388, decode.d7.loss_mask: 0.2644, decode.d7.loss_dice: 0.7189, decode.d8.loss_cls: 0.1385, decode.d8.loss_mask: 0.2638, decode.d8.loss_dice: 0.7191, loss: 11.6046 +2022-05-10 00:23:14,458 - mmseg - INFO - Iter [12800/80000] lr: 1.206e-06, eta: 1 day, 12:20:57, time: 1.769, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1351, decode.loss_mask: 0.2621, decode.loss_dice: 0.7219, decode.d0.loss_cls: 0.3519, decode.d0.loss_mask: 0.2767, decode.d0.loss_dice: 0.7707, decode.d1.loss_cls: 0.1626, decode.d1.loss_mask: 0.2668, decode.d1.loss_dice: 0.7372, decode.d2.loss_cls: 0.1568, decode.d2.loss_mask: 0.2639, decode.d2.loss_dice: 0.7272, decode.d3.loss_cls: 0.1440, decode.d3.loss_mask: 0.2627, decode.d3.loss_dice: 0.7261, decode.d4.loss_cls: 0.1395, decode.d4.loss_mask: 0.2619, decode.d4.loss_dice: 0.7208, decode.d5.loss_cls: 0.1380, decode.d5.loss_mask: 0.2618, decode.d5.loss_dice: 0.7293, decode.d6.loss_cls: 0.1350, decode.d6.loss_mask: 0.2618, decode.d6.loss_dice: 0.7256, decode.d7.loss_cls: 0.1381, decode.d7.loss_mask: 0.2610, decode.d7.loss_dice: 0.7203, decode.d8.loss_cls: 0.1384, decode.d8.loss_mask: 0.2611, decode.d8.loss_dice: 0.7233, loss: 11.5814 +2022-05-10 00:24:45,300 - mmseg - INFO - Iter [12850/80000] lr: 1.205e-06, eta: 1 day, 12:18:46, time: 1.817, data_time: 0.066, memory: 64699, decode.loss_cls: 0.1361, decode.loss_mask: 0.2544, decode.loss_dice: 0.7299, decode.d0.loss_cls: 0.3677, decode.d0.loss_mask: 0.2662, decode.d0.loss_dice: 0.7867, decode.d1.loss_cls: 0.1686, decode.d1.loss_mask: 0.2570, decode.d1.loss_dice: 0.7414, decode.d2.loss_cls: 0.1552, decode.d2.loss_mask: 0.2547, decode.d2.loss_dice: 0.7324, decode.d3.loss_cls: 0.1501, decode.d3.loss_mask: 0.2541, decode.d3.loss_dice: 0.7287, decode.d4.loss_cls: 0.1415, decode.d4.loss_mask: 0.2538, decode.d4.loss_dice: 0.7281, decode.d5.loss_cls: 0.1466, decode.d5.loss_mask: 0.2538, decode.d5.loss_dice: 0.7286, decode.d6.loss_cls: 0.1427, decode.d6.loss_mask: 0.2534, decode.d6.loss_dice: 0.7266, decode.d7.loss_cls: 0.1429, decode.d7.loss_mask: 0.2541, decode.d7.loss_dice: 0.7329, decode.d8.loss_cls: 0.1424, decode.d8.loss_mask: 0.2547, decode.d8.loss_dice: 0.7281, loss: 11.6135 +2022-05-10 00:26:13,026 - mmseg - INFO - Iter [12900/80000] lr: 1.204e-06, eta: 1 day, 12:16:18, time: 1.754, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1527, decode.loss_mask: 0.2634, decode.loss_dice: 0.7046, decode.d0.loss_cls: 0.3631, decode.d0.loss_mask: 0.2770, decode.d0.loss_dice: 0.7660, decode.d1.loss_cls: 0.1671, decode.d1.loss_mask: 0.2661, decode.d1.loss_dice: 0.7282, decode.d2.loss_cls: 0.1607, decode.d2.loss_mask: 0.2639, decode.d2.loss_dice: 0.7135, decode.d3.loss_cls: 0.1525, decode.d3.loss_mask: 0.2645, decode.d3.loss_dice: 0.7065, decode.d4.loss_cls: 0.1547, decode.d4.loss_mask: 0.2619, decode.d4.loss_dice: 0.7082, decode.d5.loss_cls: 0.1480, decode.d5.loss_mask: 0.2643, decode.d5.loss_dice: 0.7055, decode.d6.loss_cls: 0.1454, decode.d6.loss_mask: 0.2635, decode.d6.loss_dice: 0.7072, decode.d7.loss_cls: 0.1484, decode.d7.loss_mask: 0.2630, decode.d7.loss_dice: 0.7111, decode.d8.loss_cls: 0.1484, decode.d8.loss_mask: 0.2630, decode.d8.loss_dice: 0.7095, loss: 11.5518 +2022-05-10 00:27:42,468 - mmseg - INFO - Iter [12950/80000] lr: 1.203e-06, eta: 1 day, 12:14:00, time: 1.789, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1457, decode.loss_mask: 0.2536, decode.loss_dice: 0.7134, decode.d0.loss_cls: 0.3661, decode.d0.loss_mask: 0.2655, decode.d0.loss_dice: 0.7711, decode.d1.loss_cls: 0.1722, decode.d1.loss_mask: 0.2559, decode.d1.loss_dice: 0.7314, decode.d2.loss_cls: 0.1528, decode.d2.loss_mask: 0.2525, decode.d2.loss_dice: 0.7243, decode.d3.loss_cls: 0.1477, decode.d3.loss_mask: 0.2540, decode.d3.loss_dice: 0.7159, decode.d4.loss_cls: 0.1432, decode.d4.loss_mask: 0.2540, decode.d4.loss_dice: 0.7155, decode.d5.loss_cls: 0.1482, decode.d5.loss_mask: 0.2531, decode.d5.loss_dice: 0.7187, decode.d6.loss_cls: 0.1452, decode.d6.loss_mask: 0.2535, decode.d6.loss_dice: 0.7165, decode.d7.loss_cls: 0.1416, decode.d7.loss_mask: 0.2535, decode.d7.loss_dice: 0.7172, decode.d8.loss_cls: 0.1410, decode.d8.loss_mask: 0.2539, decode.d8.loss_dice: 0.7165, loss: 11.4938 +2022-05-10 00:29:11,537 - mmseg - INFO - Saving checkpoint at 13000 iterations +2022-05-10 00:29:44,563 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 00:29:44,605 - mmseg - INFO - Iter [13000/80000] lr: 1.202e-06, eta: 1 day, 12:14:30, time: 2.440, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1434, decode.loss_mask: 0.2509, decode.loss_dice: 0.7022, decode.d0.loss_cls: 0.3510, decode.d0.loss_mask: 0.2628, decode.d0.loss_dice: 0.7558, decode.d1.loss_cls: 0.1708, decode.d1.loss_mask: 0.2524, decode.d1.loss_dice: 0.7163, decode.d2.loss_cls: 0.1518, decode.d2.loss_mask: 0.2520, decode.d2.loss_dice: 0.7075, decode.d3.loss_cls: 0.1500, decode.d3.loss_mask: 0.2507, decode.d3.loss_dice: 0.6999, decode.d4.loss_cls: 0.1455, decode.d4.loss_mask: 0.2506, decode.d4.loss_dice: 0.6977, decode.d5.loss_cls: 0.1487, decode.d5.loss_mask: 0.2509, decode.d5.loss_dice: 0.7021, decode.d6.loss_cls: 0.1400, decode.d6.loss_mask: 0.2506, decode.d6.loss_dice: 0.6982, decode.d7.loss_cls: 0.1408, decode.d7.loss_mask: 0.2513, decode.d7.loss_dice: 0.6983, decode.d8.loss_cls: 0.1432, decode.d8.loss_mask: 0.2512, decode.d8.loss_dice: 0.6967, loss: 11.2835 +2022-05-10 00:31:40,116 - mmseg - INFO - per class results: +2022-05-10 00:31:40,121 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.68 | 99.24 | +| sidewalk | 89.27 | 94.75 | +| building | 94.59 | 97.18 | +| wall | 67.71 | 81.04 | +| fence | 73.35 | 85.37 | +| pole | 72.13 | 84.67 | +| traffic light | 76.85 | 89.78 | +| traffic sign | 84.57 | 91.19 | +| vegetation | 93.51 | 96.55 | +| terrain | 68.06 | 78.13 | +| sky | 95.94 | 98.53 | +| person | 86.96 | 93.63 | +| rider | 73.78 | 87.27 | +| car | 96.36 | 98.14 | +| truck | 91.99 | 95.74 | +| bus | 93.76 | 96.85 | +| train | 87.67 | 90.74 | +| motorcycle | 76.79 | 88.4 | +| bicycle | 82.8 | 91.48 | ++---------------+-------+-------+ +2022-05-10 00:31:40,121 - mmseg - INFO - Summary: +2022-05-10 00:31:40,121 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 97.08 | 84.46 | 91.51 | ++-------+-------+-------+ +2022-05-10 00:31:40,124 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 00:31:40,125 - mmseg - INFO - Iter(val) [32] aAcc: 0.9708, mIoU: 0.8446, mAcc: 0.9151, IoU.road: 0.9868, IoU.sidewalk: 0.8927, IoU.building: 0.9459, IoU.wall: 0.6771, IoU.fence: 0.7335, IoU.pole: 0.7213, IoU.traffic light: 0.7685, IoU.traffic sign: 0.8457, IoU.vegetation: 0.9351, IoU.terrain: 0.6806, IoU.sky: 0.9594, IoU.person: 0.8696, IoU.rider: 0.7378, IoU.car: 0.9636, IoU.truck: 0.9199, IoU.bus: 0.9376, IoU.train: 0.8767, IoU.motorcycle: 0.7679, IoU.bicycle: 0.8280, Acc.road: 0.9924, Acc.sidewalk: 0.9475, Acc.building: 0.9718, Acc.wall: 0.8104, Acc.fence: 0.8537, Acc.pole: 0.8467, Acc.traffic light: 0.8978, Acc.traffic sign: 0.9119, Acc.vegetation: 0.9655, Acc.terrain: 0.7813, Acc.sky: 0.9853, Acc.person: 0.9363, Acc.rider: 0.8727, Acc.car: 0.9814, Acc.truck: 0.9574, Acc.bus: 0.9685, Acc.train: 0.9074, Acc.motorcycle: 0.8840, Acc.bicycle: 0.9148 +2022-05-10 00:33:11,551 - mmseg - INFO - Iter [13050/80000] lr: 1.202e-06, eta: 1 day, 12:22:16, time: 4.142, data_time: 2.379, memory: 64699, decode.loss_cls: 0.1459, decode.loss_mask: 0.2611, decode.loss_dice: 0.7132, decode.d0.loss_cls: 0.3771, decode.d0.loss_mask: 0.2741, decode.d0.loss_dice: 0.7593, decode.d1.loss_cls: 0.1736, decode.d1.loss_mask: 0.2639, decode.d1.loss_dice: 0.7270, decode.d2.loss_cls: 0.1559, decode.d2.loss_mask: 0.2626, decode.d2.loss_dice: 0.7139, decode.d3.loss_cls: 0.1489, decode.d3.loss_mask: 0.2611, decode.d3.loss_dice: 0.7139, decode.d4.loss_cls: 0.1498, decode.d4.loss_mask: 0.2621, decode.d4.loss_dice: 0.7081, decode.d5.loss_cls: 0.1436, decode.d5.loss_mask: 0.2609, decode.d5.loss_dice: 0.7087, decode.d6.loss_cls: 0.1480, decode.d6.loss_mask: 0.2614, decode.d6.loss_dice: 0.7086, decode.d7.loss_cls: 0.1515, decode.d7.loss_mask: 0.2608, decode.d7.loss_dice: 0.7076, decode.d8.loss_cls: 0.1496, decode.d8.loss_mask: 0.2603, decode.d8.loss_dice: 0.7085, loss: 11.5408 +2022-05-10 00:34:39,824 - mmseg - INFO - Iter [13100/80000] lr: 1.201e-06, eta: 1 day, 12:19:49, time: 1.764, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1477, decode.loss_mask: 0.2530, decode.loss_dice: 0.7009, decode.d0.loss_cls: 0.3598, decode.d0.loss_mask: 0.2686, decode.d0.loss_dice: 0.7573, decode.d1.loss_cls: 0.1699, decode.d1.loss_mask: 0.2565, decode.d1.loss_dice: 0.7167, decode.d2.loss_cls: 0.1672, decode.d2.loss_mask: 0.2550, decode.d2.loss_dice: 0.7084, decode.d3.loss_cls: 0.1547, decode.d3.loss_mask: 0.2542, decode.d3.loss_dice: 0.7031, decode.d4.loss_cls: 0.1567, decode.d4.loss_mask: 0.2546, decode.d4.loss_dice: 0.7028, decode.d5.loss_cls: 0.1542, decode.d5.loss_mask: 0.2540, decode.d5.loss_dice: 0.7004, decode.d6.loss_cls: 0.1519, decode.d6.loss_mask: 0.2538, decode.d6.loss_dice: 0.7021, decode.d7.loss_cls: 0.1488, decode.d7.loss_mask: 0.2538, decode.d7.loss_dice: 0.7003, decode.d8.loss_cls: 0.1473, decode.d8.loss_mask: 0.2545, decode.d8.loss_dice: 0.7025, loss: 11.4106 +2022-05-10 00:36:08,049 - mmseg - INFO - Iter [13150/80000] lr: 1.200e-06, eta: 1 day, 12:17:23, time: 1.766, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1414, decode.loss_mask: 0.2648, decode.loss_dice: 0.7093, decode.d0.loss_cls: 0.3628, decode.d0.loss_mask: 0.2825, decode.d0.loss_dice: 0.7599, decode.d1.loss_cls: 0.1715, decode.d1.loss_mask: 0.2686, decode.d1.loss_dice: 0.7245, decode.d2.loss_cls: 0.1501, decode.d2.loss_mask: 0.2663, decode.d2.loss_dice: 0.7177, decode.d3.loss_cls: 0.1444, decode.d3.loss_mask: 0.2647, decode.d3.loss_dice: 0.7113, decode.d4.loss_cls: 0.1469, decode.d4.loss_mask: 0.2652, decode.d4.loss_dice: 0.7050, decode.d5.loss_cls: 0.1515, decode.d5.loss_mask: 0.2650, decode.d5.loss_dice: 0.7115, decode.d6.loss_cls: 0.1470, decode.d6.loss_mask: 0.2646, decode.d6.loss_dice: 0.7088, decode.d7.loss_cls: 0.1453, decode.d7.loss_mask: 0.2652, decode.d7.loss_dice: 0.7072, decode.d8.loss_cls: 0.1443, decode.d8.loss_mask: 0.2661, decode.d8.loss_dice: 0.7099, loss: 11.5432 +2022-05-10 00:37:36,725 - mmseg - INFO - Iter [13200/80000] lr: 1.199e-06, eta: 1 day, 12:15:00, time: 1.773, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1376, decode.loss_mask: 0.2558, decode.loss_dice: 0.7207, decode.d0.loss_cls: 0.3550, decode.d0.loss_mask: 0.2690, decode.d0.loss_dice: 0.7754, decode.d1.loss_cls: 0.1522, decode.d1.loss_mask: 0.2572, decode.d1.loss_dice: 0.7419, decode.d2.loss_cls: 0.1463, decode.d2.loss_mask: 0.2561, decode.d2.loss_dice: 0.7272, decode.d3.loss_cls: 0.1402, decode.d3.loss_mask: 0.2568, decode.d3.loss_dice: 0.7214, decode.d4.loss_cls: 0.1346, decode.d4.loss_mask: 0.2560, decode.d4.loss_dice: 0.7198, decode.d5.loss_cls: 0.1369, decode.d5.loss_mask: 0.2564, decode.d5.loss_dice: 0.7238, decode.d6.loss_cls: 0.1295, decode.d6.loss_mask: 0.2554, decode.d6.loss_dice: 0.7181, decode.d7.loss_cls: 0.1350, decode.d7.loss_mask: 0.2557, decode.d7.loss_dice: 0.7186, decode.d8.loss_cls: 0.1334, decode.d8.loss_mask: 0.2560, decode.d8.loss_dice: 0.7202, loss: 11.4621 +2022-05-10 00:39:08,505 - mmseg - INFO - Iter [13250/80000] lr: 1.198e-06, eta: 1 day, 12:12:52, time: 1.835, data_time: 0.070, memory: 64699, decode.loss_cls: 0.1501, decode.loss_mask: 0.2653, decode.loss_dice: 0.7237, decode.d0.loss_cls: 0.3667, decode.d0.loss_mask: 0.2813, decode.d0.loss_dice: 0.7840, decode.d1.loss_cls: 0.1672, decode.d1.loss_mask: 0.2676, decode.d1.loss_dice: 0.7439, decode.d2.loss_cls: 0.1612, decode.d2.loss_mask: 0.2661, decode.d2.loss_dice: 0.7354, decode.d3.loss_cls: 0.1439, decode.d3.loss_mask: 0.2667, decode.d3.loss_dice: 0.7311, decode.d4.loss_cls: 0.1483, decode.d4.loss_mask: 0.2665, decode.d4.loss_dice: 0.7260, decode.d5.loss_cls: 0.1441, decode.d5.loss_mask: 0.2658, decode.d5.loss_dice: 0.7317, decode.d6.loss_cls: 0.1459, decode.d6.loss_mask: 0.2658, decode.d6.loss_dice: 0.7263, decode.d7.loss_cls: 0.1426, decode.d7.loss_mask: 0.2656, decode.d7.loss_dice: 0.7265, decode.d8.loss_cls: 0.1443, decode.d8.loss_mask: 0.2654, decode.d8.loss_dice: 0.7310, loss: 11.7498 +2022-05-10 00:40:37,330 - mmseg - INFO - Iter [13300/80000] lr: 1.197e-06, eta: 1 day, 12:10:30, time: 1.777, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1413, decode.loss_mask: 0.2614, decode.loss_dice: 0.6948, decode.d0.loss_cls: 0.3536, decode.d0.loss_mask: 0.2766, decode.d0.loss_dice: 0.7500, decode.d1.loss_cls: 0.1598, decode.d1.loss_mask: 0.2648, decode.d1.loss_dice: 0.7151, decode.d2.loss_cls: 0.1518, decode.d2.loss_mask: 0.2628, decode.d2.loss_dice: 0.7068, decode.d3.loss_cls: 0.1393, decode.d3.loss_mask: 0.2625, decode.d3.loss_dice: 0.6976, decode.d4.loss_cls: 0.1425, decode.d4.loss_mask: 0.2620, decode.d4.loss_dice: 0.6940, decode.d5.loss_cls: 0.1430, decode.d5.loss_mask: 0.2618, decode.d5.loss_dice: 0.7014, decode.d6.loss_cls: 0.1456, decode.d6.loss_mask: 0.2614, decode.d6.loss_dice: 0.6903, decode.d7.loss_cls: 0.1411, decode.d7.loss_mask: 0.2608, decode.d7.loss_dice: 0.6934, decode.d8.loss_cls: 0.1358, decode.d8.loss_mask: 0.2615, decode.d8.loss_dice: 0.6971, loss: 11.3302 +2022-05-10 00:42:06,578 - mmseg - INFO - Iter [13350/80000] lr: 1.196e-06, eta: 1 day, 12:08:11, time: 1.785, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1343, decode.loss_mask: 0.2618, decode.loss_dice: 0.7031, decode.d0.loss_cls: 0.3656, decode.d0.loss_mask: 0.2784, decode.d0.loss_dice: 0.7617, decode.d1.loss_cls: 0.1605, decode.d1.loss_mask: 0.2664, decode.d1.loss_dice: 0.7320, decode.d2.loss_cls: 0.1488, decode.d2.loss_mask: 0.2645, decode.d2.loss_dice: 0.7191, decode.d3.loss_cls: 0.1395, decode.d3.loss_mask: 0.2635, decode.d3.loss_dice: 0.7085, decode.d4.loss_cls: 0.1475, decode.d4.loss_mask: 0.2637, decode.d4.loss_dice: 0.7077, decode.d5.loss_cls: 0.1372, decode.d5.loss_mask: 0.2621, decode.d5.loss_dice: 0.7085, decode.d6.loss_cls: 0.1369, decode.d6.loss_mask: 0.2618, decode.d6.loss_dice: 0.7085, decode.d7.loss_cls: 0.1431, decode.d7.loss_mask: 0.2617, decode.d7.loss_dice: 0.7079, decode.d8.loss_cls: 0.1391, decode.d8.loss_mask: 0.2617, decode.d8.loss_dice: 0.7045, loss: 11.4596 +2022-05-10 00:43:39,295 - mmseg - INFO - Iter [13400/80000] lr: 1.195e-06, eta: 1 day, 12:06:09, time: 1.853, data_time: 0.065, memory: 64699, decode.loss_cls: 0.1482, decode.loss_mask: 0.2460, decode.loss_dice: 0.7166, decode.d0.loss_cls: 0.3690, decode.d0.loss_mask: 0.2603, decode.d0.loss_dice: 0.7751, decode.d1.loss_cls: 0.1756, decode.d1.loss_mask: 0.2490, decode.d1.loss_dice: 0.7352, decode.d2.loss_cls: 0.1621, decode.d2.loss_mask: 0.2473, decode.d2.loss_dice: 0.7226, decode.d3.loss_cls: 0.1530, decode.d3.loss_mask: 0.2470, decode.d3.loss_dice: 0.7145, decode.d4.loss_cls: 0.1486, decode.d4.loss_mask: 0.2479, decode.d4.loss_dice: 0.7152, decode.d5.loss_cls: 0.1498, decode.d5.loss_mask: 0.2469, decode.d5.loss_dice: 0.7178, decode.d6.loss_cls: 0.1436, decode.d6.loss_mask: 0.2468, decode.d6.loss_dice: 0.7147, decode.d7.loss_cls: 0.1439, decode.d7.loss_mask: 0.2474, decode.d7.loss_dice: 0.7135, decode.d8.loss_cls: 0.1552, decode.d8.loss_mask: 0.2467, decode.d8.loss_dice: 0.7149, loss: 11.4741 +2022-05-10 00:45:08,356 - mmseg - INFO - Iter [13450/80000] lr: 1.194e-06, eta: 1 day, 12:03:49, time: 1.782, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1451, decode.loss_mask: 0.2558, decode.loss_dice: 0.7136, decode.d0.loss_cls: 0.3607, decode.d0.loss_mask: 0.2710, decode.d0.loss_dice: 0.7696, decode.d1.loss_cls: 0.1727, decode.d1.loss_mask: 0.2588, decode.d1.loss_dice: 0.7320, decode.d2.loss_cls: 0.1580, decode.d2.loss_mask: 0.2562, decode.d2.loss_dice: 0.7246, decode.d3.loss_cls: 0.1552, decode.d3.loss_mask: 0.2564, decode.d3.loss_dice: 0.7138, decode.d4.loss_cls: 0.1500, decode.d4.loss_mask: 0.2564, decode.d4.loss_dice: 0.7128, decode.d5.loss_cls: 0.1452, decode.d5.loss_mask: 0.2564, decode.d5.loss_dice: 0.7183, decode.d6.loss_cls: 0.1462, decode.d6.loss_mask: 0.2557, decode.d6.loss_dice: 0.7123, decode.d7.loss_cls: 0.1437, decode.d7.loss_mask: 0.2559, decode.d7.loss_dice: 0.7136, decode.d8.loss_cls: 0.1458, decode.d8.loss_mask: 0.2559, decode.d8.loss_dice: 0.7151, loss: 11.5267 +2022-05-10 00:46:36,975 - mmseg - INFO - Iter [13500/80000] lr: 1.194e-06, eta: 1 day, 12:01:28, time: 1.772, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1430, decode.loss_mask: 0.2609, decode.loss_dice: 0.7180, decode.d0.loss_cls: 0.3476, decode.d0.loss_mask: 0.2775, decode.d0.loss_dice: 0.7750, decode.d1.loss_cls: 0.1720, decode.d1.loss_mask: 0.2649, decode.d1.loss_dice: 0.7282, decode.d2.loss_cls: 0.1547, decode.d2.loss_mask: 0.2626, decode.d2.loss_dice: 0.7223, decode.d3.loss_cls: 0.1435, decode.d3.loss_mask: 0.2615, decode.d3.loss_dice: 0.7171, decode.d4.loss_cls: 0.1439, decode.d4.loss_mask: 0.2606, decode.d4.loss_dice: 0.7177, decode.d5.loss_cls: 0.1433, decode.d5.loss_mask: 0.2613, decode.d5.loss_dice: 0.7224, decode.d6.loss_cls: 0.1396, decode.d6.loss_mask: 0.2608, decode.d6.loss_dice: 0.7165, decode.d7.loss_cls: 0.1429, decode.d7.loss_mask: 0.2605, decode.d7.loss_dice: 0.7101, decode.d8.loss_cls: 0.1389, decode.d8.loss_mask: 0.2603, decode.d8.loss_dice: 0.7151, loss: 11.5429 +2022-05-10 00:48:05,840 - mmseg - INFO - Iter [13550/80000] lr: 1.193e-06, eta: 1 day, 11:59:08, time: 1.777, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1408, decode.loss_mask: 0.2666, decode.loss_dice: 0.6873, decode.d0.loss_cls: 0.3658, decode.d0.loss_mask: 0.2820, decode.d0.loss_dice: 0.7439, decode.d1.loss_cls: 0.1678, decode.d1.loss_mask: 0.2695, decode.d1.loss_dice: 0.7056, decode.d2.loss_cls: 0.1553, decode.d2.loss_mask: 0.2685, decode.d2.loss_dice: 0.6957, decode.d3.loss_cls: 0.1390, decode.d3.loss_mask: 0.2676, decode.d3.loss_dice: 0.6956, decode.d4.loss_cls: 0.1380, decode.d4.loss_mask: 0.2660, decode.d4.loss_dice: 0.6923, decode.d5.loss_cls: 0.1380, decode.d5.loss_mask: 0.2666, decode.d5.loss_dice: 0.6942, decode.d6.loss_cls: 0.1381, decode.d6.loss_mask: 0.2662, decode.d6.loss_dice: 0.6891, decode.d7.loss_cls: 0.1421, decode.d7.loss_mask: 0.2669, decode.d7.loss_dice: 0.6907, decode.d8.loss_cls: 0.1356, decode.d8.loss_mask: 0.2661, decode.d8.loss_dice: 0.6928, loss: 11.3339 +2022-05-10 00:49:37,360 - mmseg - INFO - Iter [13600/80000] lr: 1.192e-06, eta: 1 day, 11:57:01, time: 1.830, data_time: 0.064, memory: 64699, decode.loss_cls: 0.1325, decode.loss_mask: 0.2635, decode.loss_dice: 0.7150, decode.d0.loss_cls: 0.3658, decode.d0.loss_mask: 0.2768, decode.d0.loss_dice: 0.7642, decode.d1.loss_cls: 0.1639, decode.d1.loss_mask: 0.2649, decode.d1.loss_dice: 0.7280, decode.d2.loss_cls: 0.1524, decode.d2.loss_mask: 0.2634, decode.d2.loss_dice: 0.7252, decode.d3.loss_cls: 0.1372, decode.d3.loss_mask: 0.2631, decode.d3.loss_dice: 0.7108, decode.d4.loss_cls: 0.1457, decode.d4.loss_mask: 0.2628, decode.d4.loss_dice: 0.7115, decode.d5.loss_cls: 0.1343, decode.d5.loss_mask: 0.2630, decode.d5.loss_dice: 0.7137, decode.d6.loss_cls: 0.1391, decode.d6.loss_mask: 0.2623, decode.d6.loss_dice: 0.7108, decode.d7.loss_cls: 0.1437, decode.d7.loss_mask: 0.2629, decode.d7.loss_dice: 0.7113, decode.d8.loss_cls: 0.1374, decode.d8.loss_mask: 0.2637, decode.d8.loss_dice: 0.7106, loss: 11.4994 +2022-05-10 00:51:07,362 - mmseg - INFO - Iter [13650/80000] lr: 1.191e-06, eta: 1 day, 11:54:48, time: 1.800, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1381, decode.loss_mask: 0.2525, decode.loss_dice: 0.7113, decode.d0.loss_cls: 0.3568, decode.d0.loss_mask: 0.2669, decode.d0.loss_dice: 0.7628, decode.d1.loss_cls: 0.1655, decode.d1.loss_mask: 0.2562, decode.d1.loss_dice: 0.7272, decode.d2.loss_cls: 0.1520, decode.d2.loss_mask: 0.2541, decode.d2.loss_dice: 0.7154, decode.d3.loss_cls: 0.1558, decode.d3.loss_mask: 0.2528, decode.d3.loss_dice: 0.7124, decode.d4.loss_cls: 0.1507, decode.d4.loss_mask: 0.2533, decode.d4.loss_dice: 0.7117, decode.d5.loss_cls: 0.1458, decode.d5.loss_mask: 0.2520, decode.d5.loss_dice: 0.7112, decode.d6.loss_cls: 0.1447, decode.d6.loss_mask: 0.2519, decode.d6.loss_dice: 0.7057, decode.d7.loss_cls: 0.1459, decode.d7.loss_mask: 0.2526, decode.d7.loss_dice: 0.7073, decode.d8.loss_cls: 0.1430, decode.d8.loss_mask: 0.2528, decode.d8.loss_dice: 0.7042, loss: 11.4126 +2022-05-10 00:52:35,346 - mmseg - INFO - Iter [13700/80000] lr: 1.190e-06, eta: 1 day, 11:52:25, time: 1.760, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1377, decode.loss_mask: 0.2564, decode.loss_dice: 0.7070, decode.d0.loss_cls: 0.3532, decode.d0.loss_mask: 0.2694, decode.d0.loss_dice: 0.7670, decode.d1.loss_cls: 0.1608, decode.d1.loss_mask: 0.2594, decode.d1.loss_dice: 0.7245, decode.d2.loss_cls: 0.1450, decode.d2.loss_mask: 0.2578, decode.d2.loss_dice: 0.7195, decode.d3.loss_cls: 0.1403, decode.d3.loss_mask: 0.2570, decode.d3.loss_dice: 0.7094, decode.d4.loss_cls: 0.1395, decode.d4.loss_mask: 0.2561, decode.d4.loss_dice: 0.7109, decode.d5.loss_cls: 0.1401, decode.d5.loss_mask: 0.2565, decode.d5.loss_dice: 0.7117, decode.d6.loss_cls: 0.1406, decode.d6.loss_mask: 0.2573, decode.d6.loss_dice: 0.7072, decode.d7.loss_cls: 0.1362, decode.d7.loss_mask: 0.2562, decode.d7.loss_dice: 0.7075, decode.d8.loss_cls: 0.1386, decode.d8.loss_mask: 0.2561, decode.d8.loss_dice: 0.7094, loss: 11.3885 +2022-05-10 00:54:04,040 - mmseg - INFO - Iter [13750/80000] lr: 1.189e-06, eta: 1 day, 11:50:05, time: 1.774, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1386, decode.loss_mask: 0.2621, decode.loss_dice: 0.7133, decode.d0.loss_cls: 0.3630, decode.d0.loss_mask: 0.2755, decode.d0.loss_dice: 0.7740, decode.d1.loss_cls: 0.1637, decode.d1.loss_mask: 0.2660, decode.d1.loss_dice: 0.7415, decode.d2.loss_cls: 0.1543, decode.d2.loss_mask: 0.2646, decode.d2.loss_dice: 0.7264, decode.d3.loss_cls: 0.1394, decode.d3.loss_mask: 0.2628, decode.d3.loss_dice: 0.7179, decode.d4.loss_cls: 0.1406, decode.d4.loss_mask: 0.2626, decode.d4.loss_dice: 0.7159, decode.d5.loss_cls: 0.1443, decode.d5.loss_mask: 0.2637, decode.d5.loss_dice: 0.7177, decode.d6.loss_cls: 0.1464, decode.d6.loss_mask: 0.2623, decode.d6.loss_dice: 0.7137, decode.d7.loss_cls: 0.1417, decode.d7.loss_mask: 0.2625, decode.d7.loss_dice: 0.7164, decode.d8.loss_cls: 0.1451, decode.d8.loss_mask: 0.2620, decode.d8.loss_dice: 0.7146, loss: 11.5730 +2022-05-10 00:55:35,355 - mmseg - INFO - Iter [13800/80000] lr: 1.188e-06, eta: 1 day, 11:47:59, time: 1.826, data_time: 0.067, memory: 64699, decode.loss_cls: 0.1422, decode.loss_mask: 0.2666, decode.loss_dice: 0.7135, decode.d0.loss_cls: 0.3678, decode.d0.loss_mask: 0.2832, decode.d0.loss_dice: 0.7799, decode.d1.loss_cls: 0.1824, decode.d1.loss_mask: 0.2689, decode.d1.loss_dice: 0.7328, decode.d2.loss_cls: 0.1601, decode.d2.loss_mask: 0.2677, decode.d2.loss_dice: 0.7273, decode.d3.loss_cls: 0.1583, decode.d3.loss_mask: 0.2661, decode.d3.loss_dice: 0.7220, decode.d4.loss_cls: 0.1526, decode.d4.loss_mask: 0.2664, decode.d4.loss_dice: 0.7154, decode.d5.loss_cls: 0.1477, decode.d5.loss_mask: 0.2656, decode.d5.loss_dice: 0.7150, decode.d6.loss_cls: 0.1490, decode.d6.loss_mask: 0.2670, decode.d6.loss_dice: 0.7082, decode.d7.loss_cls: 0.1489, decode.d7.loss_mask: 0.2673, decode.d7.loss_dice: 0.7126, decode.d8.loss_cls: 0.1482, decode.d8.loss_mask: 0.2673, decode.d8.loss_dice: 0.7150, loss: 11.6847 +2022-05-10 00:57:03,700 - mmseg - INFO - Iter [13850/80000] lr: 1.187e-06, eta: 1 day, 11:45:39, time: 1.767, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1295, decode.loss_mask: 0.2595, decode.loss_dice: 0.6994, decode.d0.loss_cls: 0.3596, decode.d0.loss_mask: 0.2748, decode.d0.loss_dice: 0.7538, decode.d1.loss_cls: 0.1434, decode.d1.loss_mask: 0.2630, decode.d1.loss_dice: 0.7178, decode.d2.loss_cls: 0.1397, decode.d2.loss_mask: 0.2586, decode.d2.loss_dice: 0.7050, decode.d3.loss_cls: 0.1374, decode.d3.loss_mask: 0.2595, decode.d3.loss_dice: 0.7061, decode.d4.loss_cls: 0.1293, decode.d4.loss_mask: 0.2595, decode.d4.loss_dice: 0.7018, decode.d5.loss_cls: 0.1336, decode.d5.loss_mask: 0.2589, decode.d5.loss_dice: 0.7013, decode.d6.loss_cls: 0.1248, decode.d6.loss_mask: 0.2604, decode.d6.loss_dice: 0.6999, decode.d7.loss_cls: 0.1310, decode.d7.loss_mask: 0.2597, decode.d7.loss_dice: 0.7036, decode.d8.loss_cls: 0.1340, decode.d8.loss_mask: 0.2601, decode.d8.loss_dice: 0.7015, loss: 11.2666 +2022-05-10 00:58:31,813 - mmseg - INFO - Iter [13900/80000] lr: 1.186e-06, eta: 1 day, 11:43:17, time: 1.762, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1429, decode.loss_mask: 0.2598, decode.loss_dice: 0.7178, decode.d0.loss_cls: 0.3687, decode.d0.loss_mask: 0.2768, decode.d0.loss_dice: 0.7707, decode.d1.loss_cls: 0.1620, decode.d1.loss_mask: 0.2643, decode.d1.loss_dice: 0.7365, decode.d2.loss_cls: 0.1514, decode.d2.loss_mask: 0.2617, decode.d2.loss_dice: 0.7225, decode.d3.loss_cls: 0.1477, decode.d3.loss_mask: 0.2605, decode.d3.loss_dice: 0.7181, decode.d4.loss_cls: 0.1494, decode.d4.loss_mask: 0.2607, decode.d4.loss_dice: 0.7151, decode.d5.loss_cls: 0.1489, decode.d5.loss_mask: 0.2612, decode.d5.loss_dice: 0.7140, decode.d6.loss_cls: 0.1388, decode.d6.loss_mask: 0.2605, decode.d6.loss_dice: 0.7138, decode.d7.loss_cls: 0.1454, decode.d7.loss_mask: 0.2603, decode.d7.loss_dice: 0.7172, decode.d8.loss_cls: 0.1447, decode.d8.loss_mask: 0.2602, decode.d8.loss_dice: 0.7140, loss: 11.5654 +2022-05-10 00:59:59,686 - mmseg - INFO - Iter [13950/80000] lr: 1.185e-06, eta: 1 day, 11:40:56, time: 1.758, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1440, decode.loss_mask: 0.2537, decode.loss_dice: 0.7108, decode.d0.loss_cls: 0.3643, decode.d0.loss_mask: 0.2643, decode.d0.loss_dice: 0.7736, decode.d1.loss_cls: 0.1699, decode.d1.loss_mask: 0.2560, decode.d1.loss_dice: 0.7305, decode.d2.loss_cls: 0.1557, decode.d2.loss_mask: 0.2551, decode.d2.loss_dice: 0.7240, decode.d3.loss_cls: 0.1479, decode.d3.loss_mask: 0.2546, decode.d3.loss_dice: 0.7171, decode.d4.loss_cls: 0.1452, decode.d4.loss_mask: 0.2542, decode.d4.loss_dice: 0.7173, decode.d5.loss_cls: 0.1566, decode.d5.loss_mask: 0.2530, decode.d5.loss_dice: 0.7175, decode.d6.loss_cls: 0.1471, decode.d6.loss_mask: 0.2532, decode.d6.loss_dice: 0.7204, decode.d7.loss_cls: 0.1457, decode.d7.loss_mask: 0.2536, decode.d7.loss_dice: 0.7176, decode.d8.loss_cls: 0.1513, decode.d8.loss_mask: 0.2533, decode.d8.loss_dice: 0.7174, loss: 11.5248 +2022-05-10 01:01:31,484 - mmseg - INFO - Saving checkpoint at 14000 iterations +2022-05-10 01:02:04,852 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 01:02:04,861 - mmseg - INFO - Iter [14000/80000] lr: 1.185e-06, eta: 1 day, 11:41:30, time: 2.501, data_time: 0.066, memory: 64699, decode.loss_cls: 0.1536, decode.loss_mask: 0.2621, decode.loss_dice: 0.7076, decode.d0.loss_cls: 0.3669, decode.d0.loss_mask: 0.2767, decode.d0.loss_dice: 0.7588, decode.d1.loss_cls: 0.1747, decode.d1.loss_mask: 0.2647, decode.d1.loss_dice: 0.7208, decode.d2.loss_cls: 0.1683, decode.d2.loss_mask: 0.2631, decode.d2.loss_dice: 0.7155, decode.d3.loss_cls: 0.1606, decode.d3.loss_mask: 0.2616, decode.d3.loss_dice: 0.7077, decode.d4.loss_cls: 0.1566, decode.d4.loss_mask: 0.2623, decode.d4.loss_dice: 0.7061, decode.d5.loss_cls: 0.1569, decode.d5.loss_mask: 0.2616, decode.d5.loss_dice: 0.7064, decode.d6.loss_cls: 0.1533, decode.d6.loss_mask: 0.2618, decode.d6.loss_dice: 0.7079, decode.d7.loss_cls: 0.1558, decode.d7.loss_mask: 0.2614, decode.d7.loss_dice: 0.7049, decode.d8.loss_cls: 0.1597, decode.d8.loss_mask: 0.2611, decode.d8.loss_dice: 0.7076, loss: 11.5863 +2022-05-10 01:03:59,808 - mmseg - INFO - per class results: +2022-05-10 01:03:59,814 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.36 | 98.98 | +| sidewalk | 86.54 | 93.35 | +| building | 94.52 | 96.93 | +| wall | 71.41 | 80.44 | +| fence | 72.39 | 83.45 | +| pole | 71.89 | 84.49 | +| traffic light | 77.14 | 88.96 | +| traffic sign | 84.48 | 92.22 | +| vegetation | 93.5 | 97.3 | +| terrain | 67.68 | 77.64 | +| sky | 96.06 | 98.0 | +| person | 87.18 | 93.92 | +| rider | 74.97 | 84.77 | +| car | 96.41 | 98.22 | +| truck | 92.03 | 96.08 | +| bus | 93.25 | 96.82 | +| train | 87.41 | 89.67 | +| motorcycle | 75.16 | 88.12 | +| bicycle | 82.65 | 91.68 | ++---------------+-------+-------+ +2022-05-10 01:03:59,814 - mmseg - INFO - Summary: +2022-05-10 01:03:59,815 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 96.95 | 84.37 | 91.11 | ++-------+-------+-------+ +2022-05-10 01:03:59,819 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 01:03:59,819 - mmseg - INFO - Iter(val) [32] aAcc: 0.9695, mIoU: 0.8437, mAcc: 0.9111, IoU.road: 0.9836, IoU.sidewalk: 0.8654, IoU.building: 0.9452, IoU.wall: 0.7141, IoU.fence: 0.7239, IoU.pole: 0.7189, IoU.traffic light: 0.7714, IoU.traffic sign: 0.8448, IoU.vegetation: 0.9350, IoU.terrain: 0.6768, IoU.sky: 0.9606, IoU.person: 0.8718, IoU.rider: 0.7497, IoU.car: 0.9641, IoU.truck: 0.9203, IoU.bus: 0.9325, IoU.train: 0.8741, IoU.motorcycle: 0.7516, IoU.bicycle: 0.8265, Acc.road: 0.9898, Acc.sidewalk: 0.9335, Acc.building: 0.9693, Acc.wall: 0.8044, Acc.fence: 0.8345, Acc.pole: 0.8449, Acc.traffic light: 0.8896, Acc.traffic sign: 0.9222, Acc.vegetation: 0.9730, Acc.terrain: 0.7764, Acc.sky: 0.9800, Acc.person: 0.9392, Acc.rider: 0.8477, Acc.car: 0.9822, Acc.truck: 0.9608, Acc.bus: 0.9682, Acc.train: 0.8967, Acc.motorcycle: 0.8812, Acc.bicycle: 0.9168 +2022-05-10 01:05:29,106 - mmseg - INFO - Iter [14050/80000] lr: 1.184e-06, eta: 1 day, 11:48:15, time: 4.087, data_time: 2.320, memory: 64699, decode.loss_cls: 0.1343, decode.loss_mask: 0.2629, decode.loss_dice: 0.7142, decode.d0.loss_cls: 0.3590, decode.d0.loss_mask: 0.2738, decode.d0.loss_dice: 0.7641, decode.d1.loss_cls: 0.1633, decode.d1.loss_mask: 0.2662, decode.d1.loss_dice: 0.7290, decode.d2.loss_cls: 0.1570, decode.d2.loss_mask: 0.2638, decode.d2.loss_dice: 0.7193, decode.d3.loss_cls: 0.1457, decode.d3.loss_mask: 0.2633, decode.d3.loss_dice: 0.7148, decode.d4.loss_cls: 0.1477, decode.d4.loss_mask: 0.2637, decode.d4.loss_dice: 0.7172, decode.d5.loss_cls: 0.1458, decode.d5.loss_mask: 0.2628, decode.d5.loss_dice: 0.7156, decode.d6.loss_cls: 0.1348, decode.d6.loss_mask: 0.2631, decode.d6.loss_dice: 0.7198, decode.d7.loss_cls: 0.1427, decode.d7.loss_mask: 0.2628, decode.d7.loss_dice: 0.7131, decode.d8.loss_cls: 0.1434, decode.d8.loss_mask: 0.2629, decode.d8.loss_dice: 0.7165, loss: 11.5425 +2022-05-10 01:06:57,913 - mmseg - INFO - Iter [14100/80000] lr: 1.183e-06, eta: 1 day, 11:45:55, time: 1.776, data_time: 0.021, memory: 64699, decode.loss_cls: 0.1327, decode.loss_mask: 0.2597, decode.loss_dice: 0.7004, decode.d0.loss_cls: 0.3616, decode.d0.loss_mask: 0.2731, decode.d0.loss_dice: 0.7521, decode.d1.loss_cls: 0.1577, decode.d1.loss_mask: 0.2637, decode.d1.loss_dice: 0.7200, decode.d2.loss_cls: 0.1505, decode.d2.loss_mask: 0.2613, decode.d2.loss_dice: 0.7099, decode.d3.loss_cls: 0.1425, decode.d3.loss_mask: 0.2618, decode.d3.loss_dice: 0.7075, decode.d4.loss_cls: 0.1423, decode.d4.loss_mask: 0.2610, decode.d4.loss_dice: 0.7052, decode.d5.loss_cls: 0.1368, decode.d5.loss_mask: 0.2599, decode.d5.loss_dice: 0.7050, decode.d6.loss_cls: 0.1330, decode.d6.loss_mask: 0.2598, decode.d6.loss_dice: 0.7040, decode.d7.loss_cls: 0.1412, decode.d7.loss_mask: 0.2606, decode.d7.loss_dice: 0.7051, decode.d8.loss_cls: 0.1334, decode.d8.loss_mask: 0.2602, decode.d8.loss_dice: 0.7065, loss: 11.3687 +2022-05-10 01:08:29,146 - mmseg - INFO - Iter [14150/80000] lr: 1.182e-06, eta: 1 day, 11:43:48, time: 1.825, data_time: 0.065, memory: 64699, decode.loss_cls: 0.1341, decode.loss_mask: 0.2592, decode.loss_dice: 0.7034, decode.d0.loss_cls: 0.3595, decode.d0.loss_mask: 0.2713, decode.d0.loss_dice: 0.7555, decode.d1.loss_cls: 0.1611, decode.d1.loss_mask: 0.2615, decode.d1.loss_dice: 0.7238, decode.d2.loss_cls: 0.1447, decode.d2.loss_mask: 0.2593, decode.d2.loss_dice: 0.7150, decode.d3.loss_cls: 0.1433, decode.d3.loss_mask: 0.2595, decode.d3.loss_dice: 0.7072, decode.d4.loss_cls: 0.1405, decode.d4.loss_mask: 0.2590, decode.d4.loss_dice: 0.7088, decode.d5.loss_cls: 0.1334, decode.d5.loss_mask: 0.2591, decode.d5.loss_dice: 0.7105, decode.d6.loss_cls: 0.1340, decode.d6.loss_mask: 0.2588, decode.d6.loss_dice: 0.7072, decode.d7.loss_cls: 0.1341, decode.d7.loss_mask: 0.2596, decode.d7.loss_dice: 0.7108, decode.d8.loss_cls: 0.1329, decode.d8.loss_mask: 0.2584, decode.d8.loss_dice: 0.7089, loss: 11.3745 +2022-05-10 01:09:57,513 - mmseg - INFO - Iter [14200/80000] lr: 1.181e-06, eta: 1 day, 11:41:27, time: 1.767, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1279, decode.loss_mask: 0.2571, decode.loss_dice: 0.7114, decode.d0.loss_cls: 0.3558, decode.d0.loss_mask: 0.2691, decode.d0.loss_dice: 0.7629, decode.d1.loss_cls: 0.1451, decode.d1.loss_mask: 0.2597, decode.d1.loss_dice: 0.7258, decode.d2.loss_cls: 0.1396, decode.d2.loss_mask: 0.2574, decode.d2.loss_dice: 0.7193, decode.d3.loss_cls: 0.1347, decode.d3.loss_mask: 0.2571, decode.d3.loss_dice: 0.7085, decode.d4.loss_cls: 0.1330, decode.d4.loss_mask: 0.2569, decode.d4.loss_dice: 0.7100, decode.d5.loss_cls: 0.1311, decode.d5.loss_mask: 0.2571, decode.d5.loss_dice: 0.7110, decode.d6.loss_cls: 0.1289, decode.d6.loss_mask: 0.2570, decode.d6.loss_dice: 0.7063, decode.d7.loss_cls: 0.1377, decode.d7.loss_mask: 0.2566, decode.d7.loss_dice: 0.7054, decode.d8.loss_cls: 0.1266, decode.d8.loss_mask: 0.2573, decode.d8.loss_dice: 0.7114, loss: 11.3176 +2022-05-10 01:11:27,076 - mmseg - INFO - Iter [14250/80000] lr: 1.180e-06, eta: 1 day, 11:39:12, time: 1.791, data_time: 0.021, memory: 64699, decode.loss_cls: 0.1343, decode.loss_mask: 0.2604, decode.loss_dice: 0.7017, decode.d0.loss_cls: 0.3569, decode.d0.loss_mask: 0.2746, decode.d0.loss_dice: 0.7604, decode.d1.loss_cls: 0.1666, decode.d1.loss_mask: 0.2636, decode.d1.loss_dice: 0.7181, decode.d2.loss_cls: 0.1573, decode.d2.loss_mask: 0.2611, decode.d2.loss_dice: 0.7077, decode.d3.loss_cls: 0.1405, decode.d3.loss_mask: 0.2607, decode.d3.loss_dice: 0.7004, decode.d4.loss_cls: 0.1501, decode.d4.loss_mask: 0.2602, decode.d4.loss_dice: 0.7011, decode.d5.loss_cls: 0.1459, decode.d5.loss_mask: 0.2595, decode.d5.loss_dice: 0.7018, decode.d6.loss_cls: 0.1376, decode.d6.loss_mask: 0.2603, decode.d6.loss_dice: 0.6997, decode.d7.loss_cls: 0.1379, decode.d7.loss_mask: 0.2601, decode.d7.loss_dice: 0.7018, decode.d8.loss_cls: 0.1429, decode.d8.loss_mask: 0.2603, decode.d8.loss_dice: 0.6951, loss: 11.3787 +2022-05-10 01:12:54,979 - mmseg - INFO - Iter [14300/80000] lr: 1.179e-06, eta: 1 day, 11:36:50, time: 1.758, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1292, decode.loss_mask: 0.2489, decode.loss_dice: 0.7046, decode.d0.loss_cls: 0.3428, decode.d0.loss_mask: 0.2611, decode.d0.loss_dice: 0.7601, decode.d1.loss_cls: 0.1572, decode.d1.loss_mask: 0.2505, decode.d1.loss_dice: 0.7221, decode.d2.loss_cls: 0.1467, decode.d2.loss_mask: 0.2491, decode.d2.loss_dice: 0.7101, decode.d3.loss_cls: 0.1410, decode.d3.loss_mask: 0.2492, decode.d3.loss_dice: 0.7087, decode.d4.loss_cls: 0.1365, decode.d4.loss_mask: 0.2491, decode.d4.loss_dice: 0.7051, decode.d5.loss_cls: 0.1343, decode.d5.loss_mask: 0.2493, decode.d5.loss_dice: 0.7091, decode.d6.loss_cls: 0.1418, decode.d6.loss_mask: 0.2491, decode.d6.loss_dice: 0.7037, decode.d7.loss_cls: 0.1319, decode.d7.loss_mask: 0.2489, decode.d7.loss_dice: 0.7039, decode.d8.loss_cls: 0.1293, decode.d8.loss_mask: 0.2488, decode.d8.loss_dice: 0.7021, loss: 11.2243 +2022-05-10 01:14:24,767 - mmseg - INFO - Iter [14350/80000] lr: 1.178e-06, eta: 1 day, 11:34:36, time: 1.796, data_time: 0.065, memory: 64699, decode.loss_cls: 0.1350, decode.loss_mask: 0.2592, decode.loss_dice: 0.7138, decode.d0.loss_cls: 0.3508, decode.d0.loss_mask: 0.2732, decode.d0.loss_dice: 0.7678, decode.d1.loss_cls: 0.1581, decode.d1.loss_mask: 0.2616, decode.d1.loss_dice: 0.7366, decode.d2.loss_cls: 0.1431, decode.d2.loss_mask: 0.2592, decode.d2.loss_dice: 0.7267, decode.d3.loss_cls: 0.1442, decode.d3.loss_mask: 0.2589, decode.d3.loss_dice: 0.7163, decode.d4.loss_cls: 0.1422, decode.d4.loss_mask: 0.2590, decode.d4.loss_dice: 0.7143, decode.d5.loss_cls: 0.1357, decode.d5.loss_mask: 0.2595, decode.d5.loss_dice: 0.7212, decode.d6.loss_cls: 0.1338, decode.d6.loss_mask: 0.2596, decode.d6.loss_dice: 0.7104, decode.d7.loss_cls: 0.1351, decode.d7.loss_mask: 0.2592, decode.d7.loss_dice: 0.7183, decode.d8.loss_cls: 0.1356, decode.d8.loss_mask: 0.2592, decode.d8.loss_dice: 0.7150, loss: 11.4628 +2022-05-10 01:15:52,339 - mmseg - INFO - Iter [14400/80000] lr: 1.177e-06, eta: 1 day, 11:32:13, time: 1.751, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1381, decode.loss_mask: 0.2562, decode.loss_dice: 0.7021, decode.d0.loss_cls: 0.3574, decode.d0.loss_mask: 0.2699, decode.d0.loss_dice: 0.7618, decode.d1.loss_cls: 0.1641, decode.d1.loss_mask: 0.2576, decode.d1.loss_dice: 0.7200, decode.d2.loss_cls: 0.1427, decode.d2.loss_mask: 0.2576, decode.d2.loss_dice: 0.7154, decode.d3.loss_cls: 0.1380, decode.d3.loss_mask: 0.2571, decode.d3.loss_dice: 0.7038, decode.d4.loss_cls: 0.1390, decode.d4.loss_mask: 0.2564, decode.d4.loss_dice: 0.7008, decode.d5.loss_cls: 0.1421, decode.d5.loss_mask: 0.2560, decode.d5.loss_dice: 0.7076, decode.d6.loss_cls: 0.1408, decode.d6.loss_mask: 0.2550, decode.d6.loss_dice: 0.7013, decode.d7.loss_cls: 0.1351, decode.d7.loss_mask: 0.2557, decode.d7.loss_dice: 0.7031, decode.d8.loss_cls: 0.1321, decode.d8.loss_mask: 0.2564, decode.d8.loss_dice: 0.7012, loss: 11.3245 +2022-05-10 01:17:21,420 - mmseg - INFO - Iter [14450/80000] lr: 1.176e-06, eta: 1 day, 11:29:58, time: 1.782, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1326, decode.loss_mask: 0.2565, decode.loss_dice: 0.7120, decode.d0.loss_cls: 0.3559, decode.d0.loss_mask: 0.2686, decode.d0.loss_dice: 0.7547, decode.d1.loss_cls: 0.1569, decode.d1.loss_mask: 0.2584, decode.d1.loss_dice: 0.7221, decode.d2.loss_cls: 0.1365, decode.d2.loss_mask: 0.2580, decode.d2.loss_dice: 0.7174, decode.d3.loss_cls: 0.1381, decode.d3.loss_mask: 0.2565, decode.d3.loss_dice: 0.7101, decode.d4.loss_cls: 0.1400, decode.d4.loss_mask: 0.2575, decode.d4.loss_dice: 0.7091, decode.d5.loss_cls: 0.1412, decode.d5.loss_mask: 0.2571, decode.d5.loss_dice: 0.7109, decode.d6.loss_cls: 0.1370, decode.d6.loss_mask: 0.2568, decode.d6.loss_dice: 0.7082, decode.d7.loss_cls: 0.1289, decode.d7.loss_mask: 0.2564, decode.d7.loss_dice: 0.7075, decode.d8.loss_cls: 0.1316, decode.d8.loss_mask: 0.2563, decode.d8.loss_dice: 0.7097, loss: 11.3423 +2022-05-10 01:18:49,685 - mmseg - INFO - Iter [14500/80000] lr: 1.176e-06, eta: 1 day, 11:27:39, time: 1.765, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1293, decode.loss_mask: 0.2446, decode.loss_dice: 0.6969, decode.d0.loss_cls: 0.3534, decode.d0.loss_mask: 0.2589, decode.d0.loss_dice: 0.7482, decode.d1.loss_cls: 0.1617, decode.d1.loss_mask: 0.2461, decode.d1.loss_dice: 0.7105, decode.d2.loss_cls: 0.1461, decode.d2.loss_mask: 0.2454, decode.d2.loss_dice: 0.7014, decode.d3.loss_cls: 0.1362, decode.d3.loss_mask: 0.2450, decode.d3.loss_dice: 0.6964, decode.d4.loss_cls: 0.1316, decode.d4.loss_mask: 0.2453, decode.d4.loss_dice: 0.6948, decode.d5.loss_cls: 0.1348, decode.d5.loss_mask: 0.2444, decode.d5.loss_dice: 0.6935, decode.d6.loss_cls: 0.1350, decode.d6.loss_mask: 0.2440, decode.d6.loss_dice: 0.6904, decode.d7.loss_cls: 0.1236, decode.d7.loss_mask: 0.2449, decode.d7.loss_dice: 0.6971, decode.d8.loss_cls: 0.1400, decode.d8.loss_mask: 0.2447, decode.d8.loss_dice: 0.6932, loss: 11.0772 +2022-05-10 01:20:20,990 - mmseg - INFO - Iter [14550/80000] lr: 1.175e-06, eta: 1 day, 11:25:33, time: 1.826, data_time: 0.066, memory: 64699, decode.loss_cls: 0.1395, decode.loss_mask: 0.2541, decode.loss_dice: 0.7120, decode.d0.loss_cls: 0.3503, decode.d0.loss_mask: 0.2669, decode.d0.loss_dice: 0.7683, decode.d1.loss_cls: 0.1621, decode.d1.loss_mask: 0.2569, decode.d1.loss_dice: 0.7305, decode.d2.loss_cls: 0.1491, decode.d2.loss_mask: 0.2555, decode.d2.loss_dice: 0.7186, decode.d3.loss_cls: 0.1377, decode.d3.loss_mask: 0.2544, decode.d3.loss_dice: 0.7109, decode.d4.loss_cls: 0.1390, decode.d4.loss_mask: 0.2543, decode.d4.loss_dice: 0.7113, decode.d5.loss_cls: 0.1370, decode.d5.loss_mask: 0.2545, decode.d5.loss_dice: 0.7114, decode.d6.loss_cls: 0.1342, decode.d6.loss_mask: 0.2542, decode.d6.loss_dice: 0.7098, decode.d7.loss_cls: 0.1354, decode.d7.loss_mask: 0.2542, decode.d7.loss_dice: 0.7038, decode.d8.loss_cls: 0.1343, decode.d8.loss_mask: 0.2542, decode.d8.loss_dice: 0.7077, loss: 11.3621 +2022-05-10 01:21:50,027 - mmseg - INFO - Iter [14600/80000] lr: 1.174e-06, eta: 1 day, 11:23:18, time: 1.781, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1233, decode.loss_mask: 0.2560, decode.loss_dice: 0.7009, decode.d0.loss_cls: 0.3563, decode.d0.loss_mask: 0.2702, decode.d0.loss_dice: 0.7528, decode.d1.loss_cls: 0.1421, decode.d1.loss_mask: 0.2602, decode.d1.loss_dice: 0.7181, decode.d2.loss_cls: 0.1332, decode.d2.loss_mask: 0.2581, decode.d2.loss_dice: 0.7148, decode.d3.loss_cls: 0.1238, decode.d3.loss_mask: 0.2569, decode.d3.loss_dice: 0.7083, decode.d4.loss_cls: 0.1271, decode.d4.loss_mask: 0.2569, decode.d4.loss_dice: 0.7043, decode.d5.loss_cls: 0.1293, decode.d5.loss_mask: 0.2572, decode.d5.loss_dice: 0.7027, decode.d6.loss_cls: 0.1207, decode.d6.loss_mask: 0.2569, decode.d6.loss_dice: 0.6977, decode.d7.loss_cls: 0.1257, decode.d7.loss_mask: 0.2572, decode.d7.loss_dice: 0.7026, decode.d8.loss_cls: 0.1219, decode.d8.loss_mask: 0.2570, decode.d8.loss_dice: 0.6999, loss: 11.1925 +2022-05-10 01:23:19,043 - mmseg - INFO - Iter [14650/80000] lr: 1.173e-06, eta: 1 day, 11:21:04, time: 1.780, data_time: 0.021, memory: 64699, decode.loss_cls: 0.1482, decode.loss_mask: 0.2634, decode.loss_dice: 0.6961, decode.d0.loss_cls: 0.3625, decode.d0.loss_mask: 0.2798, decode.d0.loss_dice: 0.7516, decode.d1.loss_cls: 0.1681, decode.d1.loss_mask: 0.2663, decode.d1.loss_dice: 0.7125, decode.d2.loss_cls: 0.1574, decode.d2.loss_mask: 0.2631, decode.d2.loss_dice: 0.7064, decode.d3.loss_cls: 0.1498, decode.d3.loss_mask: 0.2628, decode.d3.loss_dice: 0.7005, decode.d4.loss_cls: 0.1442, decode.d4.loss_mask: 0.2624, decode.d4.loss_dice: 0.7015, decode.d5.loss_cls: 0.1421, decode.d5.loss_mask: 0.2626, decode.d5.loss_dice: 0.7025, decode.d6.loss_cls: 0.1502, decode.d6.loss_mask: 0.2630, decode.d6.loss_dice: 0.6955, decode.d7.loss_cls: 0.1438, decode.d7.loss_mask: 0.2634, decode.d7.loss_dice: 0.7009, decode.d8.loss_cls: 0.1362, decode.d8.loss_mask: 0.2628, decode.d8.loss_dice: 0.7025, loss: 11.4221 +2022-05-10 01:24:50,371 - mmseg - INFO - Iter [14700/80000] lr: 1.172e-06, eta: 1 day, 11:18:59, time: 1.826, data_time: 0.066, memory: 64699, decode.loss_cls: 0.1518, decode.loss_mask: 0.2630, decode.loss_dice: 0.7073, decode.d0.loss_cls: 0.3728, decode.d0.loss_mask: 0.2771, decode.d0.loss_dice: 0.7688, decode.d1.loss_cls: 0.1731, decode.d1.loss_mask: 0.2671, decode.d1.loss_dice: 0.7330, decode.d2.loss_cls: 0.1613, decode.d2.loss_mask: 0.2649, decode.d2.loss_dice: 0.7198, decode.d3.loss_cls: 0.1620, decode.d3.loss_mask: 0.2641, decode.d3.loss_dice: 0.7072, decode.d4.loss_cls: 0.1611, decode.d4.loss_mask: 0.2642, decode.d4.loss_dice: 0.7121, decode.d5.loss_cls: 0.1569, decode.d5.loss_mask: 0.2649, decode.d5.loss_dice: 0.7118, decode.d6.loss_cls: 0.1580, decode.d6.loss_mask: 0.2637, decode.d6.loss_dice: 0.7117, decode.d7.loss_cls: 0.1571, decode.d7.loss_mask: 0.2639, decode.d7.loss_dice: 0.7105, decode.d8.loss_cls: 0.1553, decode.d8.loss_mask: 0.2640, decode.d8.loss_dice: 0.7111, loss: 11.6594 +2022-05-10 01:26:19,201 - mmseg - INFO - Iter [14750/80000] lr: 1.171e-06, eta: 1 day, 11:16:44, time: 1.777, data_time: 0.021, memory: 64699, decode.loss_cls: 0.1331, decode.loss_mask: 0.2585, decode.loss_dice: 0.7168, decode.d0.loss_cls: 0.3583, decode.d0.loss_mask: 0.2709, decode.d0.loss_dice: 0.7757, decode.d1.loss_cls: 0.1514, decode.d1.loss_mask: 0.2616, decode.d1.loss_dice: 0.7347, decode.d2.loss_cls: 0.1432, decode.d2.loss_mask: 0.2599, decode.d2.loss_dice: 0.7264, decode.d3.loss_cls: 0.1471, decode.d3.loss_mask: 0.2598, decode.d3.loss_dice: 0.7149, decode.d4.loss_cls: 0.1456, decode.d4.loss_mask: 0.2596, decode.d4.loss_dice: 0.7203, decode.d5.loss_cls: 0.1340, decode.d5.loss_mask: 0.2588, decode.d5.loss_dice: 0.7201, decode.d6.loss_cls: 0.1372, decode.d6.loss_mask: 0.2590, decode.d6.loss_dice: 0.7165, decode.d7.loss_cls: 0.1345, decode.d7.loss_mask: 0.2592, decode.d7.loss_dice: 0.7155, decode.d8.loss_cls: 0.1324, decode.d8.loss_mask: 0.2588, decode.d8.loss_dice: 0.7181, loss: 11.4818 +2022-05-10 01:27:47,527 - mmseg - INFO - Iter [14800/80000] lr: 1.170e-06, eta: 1 day, 11:14:27, time: 1.766, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1368, decode.loss_mask: 0.2629, decode.loss_dice: 0.7023, decode.d0.loss_cls: 0.3510, decode.d0.loss_mask: 0.2743, decode.d0.loss_dice: 0.7617, decode.d1.loss_cls: 0.1632, decode.d1.loss_mask: 0.2657, decode.d1.loss_dice: 0.7208, decode.d2.loss_cls: 0.1497, decode.d2.loss_mask: 0.2635, decode.d2.loss_dice: 0.7123, decode.d3.loss_cls: 0.1383, decode.d3.loss_mask: 0.2633, decode.d3.loss_dice: 0.7062, decode.d4.loss_cls: 0.1420, decode.d4.loss_mask: 0.2634, decode.d4.loss_dice: 0.7067, decode.d5.loss_cls: 0.1381, decode.d5.loss_mask: 0.2628, decode.d5.loss_dice: 0.7079, decode.d6.loss_cls: 0.1340, decode.d6.loss_mask: 0.2616, decode.d6.loss_dice: 0.7046, decode.d7.loss_cls: 0.1363, decode.d7.loss_mask: 0.2628, decode.d7.loss_dice: 0.7056, decode.d8.loss_cls: 0.1368, decode.d8.loss_mask: 0.2635, decode.d8.loss_dice: 0.7011, loss: 11.3992 +2022-05-10 01:29:17,404 - mmseg - INFO - Iter [14850/80000] lr: 1.169e-06, eta: 1 day, 11:12:18, time: 1.798, data_time: 0.021, memory: 64699, decode.loss_cls: 0.1350, decode.loss_mask: 0.2557, decode.loss_dice: 0.6903, decode.d0.loss_cls: 0.3405, decode.d0.loss_mask: 0.2670, decode.d0.loss_dice: 0.7463, decode.d1.loss_cls: 0.1600, decode.d1.loss_mask: 0.2577, decode.d1.loss_dice: 0.7075, decode.d2.loss_cls: 0.1528, decode.d2.loss_mask: 0.2553, decode.d2.loss_dice: 0.6971, decode.d3.loss_cls: 0.1421, decode.d3.loss_mask: 0.2546, decode.d3.loss_dice: 0.6947, decode.d4.loss_cls: 0.1450, decode.d4.loss_mask: 0.2560, decode.d4.loss_dice: 0.6937, decode.d5.loss_cls: 0.1442, decode.d5.loss_mask: 0.2556, decode.d5.loss_dice: 0.6939, decode.d6.loss_cls: 0.1452, decode.d6.loss_mask: 0.2569, decode.d6.loss_dice: 0.6902, decode.d7.loss_cls: 0.1405, decode.d7.loss_mask: 0.2562, decode.d7.loss_dice: 0.6896, decode.d8.loss_cls: 0.1336, decode.d8.loss_mask: 0.2565, decode.d8.loss_dice: 0.6924, loss: 11.2060 +2022-05-10 01:30:48,559 - mmseg - INFO - Iter [14900/80000] lr: 1.168e-06, eta: 1 day, 11:10:14, time: 1.823, data_time: 0.067, memory: 64699, decode.loss_cls: 0.1214, decode.loss_mask: 0.2487, decode.loss_dice: 0.6885, decode.d0.loss_cls: 0.3524, decode.d0.loss_mask: 0.2611, decode.d0.loss_dice: 0.7401, decode.d1.loss_cls: 0.1442, decode.d1.loss_mask: 0.2512, decode.d1.loss_dice: 0.7099, decode.d2.loss_cls: 0.1365, decode.d2.loss_mask: 0.2500, decode.d2.loss_dice: 0.7018, decode.d3.loss_cls: 0.1289, decode.d3.loss_mask: 0.2502, decode.d3.loss_dice: 0.6921, decode.d4.loss_cls: 0.1321, decode.d4.loss_mask: 0.2504, decode.d4.loss_dice: 0.6928, decode.d5.loss_cls: 0.1315, decode.d5.loss_mask: 0.2492, decode.d5.loss_dice: 0.6898, decode.d6.loss_cls: 0.1283, decode.d6.loss_mask: 0.2496, decode.d6.loss_dice: 0.6921, decode.d7.loss_cls: 0.1284, decode.d7.loss_mask: 0.2495, decode.d7.loss_dice: 0.6920, decode.d8.loss_cls: 0.1291, decode.d8.loss_mask: 0.2493, decode.d8.loss_dice: 0.6903, loss: 11.0313 +2022-05-10 01:32:17,280 - mmseg - INFO - Iter [14950/80000] lr: 1.167e-06, eta: 1 day, 11:07:59, time: 1.774, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1314, decode.loss_mask: 0.2542, decode.loss_dice: 0.6877, decode.d0.loss_cls: 0.3586, decode.d0.loss_mask: 0.2662, decode.d0.loss_dice: 0.7438, decode.d1.loss_cls: 0.1649, decode.d1.loss_mask: 0.2555, decode.d1.loss_dice: 0.7057, decode.d2.loss_cls: 0.1357, decode.d2.loss_mask: 0.2551, decode.d2.loss_dice: 0.6953, decode.d3.loss_cls: 0.1383, decode.d3.loss_mask: 0.2538, decode.d3.loss_dice: 0.6929, decode.d4.loss_cls: 0.1352, decode.d4.loss_mask: 0.2542, decode.d4.loss_dice: 0.6900, decode.d5.loss_cls: 0.1300, decode.d5.loss_mask: 0.2545, decode.d5.loss_dice: 0.6869, decode.d6.loss_cls: 0.1283, decode.d6.loss_mask: 0.2559, decode.d6.loss_dice: 0.6866, decode.d7.loss_cls: 0.1258, decode.d7.loss_mask: 0.2550, decode.d7.loss_dice: 0.6866, decode.d8.loss_cls: 0.1359, decode.d8.loss_mask: 0.2549, decode.d8.loss_dice: 0.6869, loss: 11.1058 +2022-05-10 01:33:46,223 - mmseg - INFO - Saving checkpoint at 15000 iterations +2022-05-10 01:34:16,938 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 01:34:16,947 - mmseg - INFO - Iter [15000/80000] lr: 1.167e-06, eta: 1 day, 11:07:59, time: 2.391, data_time: 0.021, memory: 64699, decode.loss_cls: 0.1292, decode.loss_mask: 0.2516, decode.loss_dice: 0.7086, decode.d0.loss_cls: 0.3636, decode.d0.loss_mask: 0.2667, decode.d0.loss_dice: 0.7576, decode.d1.loss_cls: 0.1470, decode.d1.loss_mask: 0.2540, decode.d1.loss_dice: 0.7304, decode.d2.loss_cls: 0.1432, decode.d2.loss_mask: 0.2516, decode.d2.loss_dice: 0.7139, decode.d3.loss_cls: 0.1254, decode.d3.loss_mask: 0.2514, decode.d3.loss_dice: 0.7035, decode.d4.loss_cls: 0.1405, decode.d4.loss_mask: 0.2514, decode.d4.loss_dice: 0.7048, decode.d5.loss_cls: 0.1307, decode.d5.loss_mask: 0.2513, decode.d5.loss_dice: 0.7044, decode.d6.loss_cls: 0.1315, decode.d6.loss_mask: 0.2504, decode.d6.loss_dice: 0.7029, decode.d7.loss_cls: 0.1279, decode.d7.loss_mask: 0.2505, decode.d7.loss_dice: 0.7038, decode.d8.loss_cls: 0.1335, decode.d8.loss_mask: 0.2512, decode.d8.loss_dice: 0.7076, loss: 11.2402 +2022-05-10 01:36:12,139 - mmseg - INFO - per class results: +2022-05-10 01:36:12,146 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.63 | 99.31 | +| sidewalk | 88.86 | 93.19 | +| building | 94.48 | 96.76 | +| wall | 67.82 | 80.1 | +| fence | 73.61 | 84.07 | +| pole | 72.39 | 85.27 | +| traffic light | 76.93 | 89.27 | +| traffic sign | 84.97 | 91.92 | +| vegetation | 93.36 | 97.22 | +| terrain | 68.55 | 79.3 | +| sky | 95.99 | 98.31 | +| person | 87.15 | 93.49 | +| rider | 74.76 | 87.26 | +| car | 96.26 | 98.27 | +| truck | 89.55 | 92.05 | +| bus | 94.53 | 96.88 | +| train | 90.12 | 93.12 | +| motorcycle | 76.16 | 88.17 | +| bicycle | 82.4 | 91.51 | ++---------------+-------+-------+ +2022-05-10 01:36:12,146 - mmseg - INFO - Summary: +2022-05-10 01:36:12,146 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 97.04 | 84.55 | 91.34 | ++-------+-------+-------+ +2022-05-10 01:36:12,150 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 01:36:12,150 - mmseg - INFO - Iter(val) [32] aAcc: 0.9704, mIoU: 0.8455, mAcc: 0.9134, IoU.road: 0.9863, IoU.sidewalk: 0.8886, IoU.building: 0.9448, IoU.wall: 0.6782, IoU.fence: 0.7361, IoU.pole: 0.7239, IoU.traffic light: 0.7693, IoU.traffic sign: 0.8497, IoU.vegetation: 0.9336, IoU.terrain: 0.6855, IoU.sky: 0.9599, IoU.person: 0.8715, IoU.rider: 0.7476, IoU.car: 0.9626, IoU.truck: 0.8955, IoU.bus: 0.9453, IoU.train: 0.9012, IoU.motorcycle: 0.7616, IoU.bicycle: 0.8240, Acc.road: 0.9931, Acc.sidewalk: 0.9319, Acc.building: 0.9676, Acc.wall: 0.8010, Acc.fence: 0.8407, Acc.pole: 0.8527, Acc.traffic light: 0.8927, Acc.traffic sign: 0.9192, Acc.vegetation: 0.9722, Acc.terrain: 0.7930, Acc.sky: 0.9831, Acc.person: 0.9349, Acc.rider: 0.8726, Acc.car: 0.9827, Acc.truck: 0.9205, Acc.bus: 0.9688, Acc.train: 0.9312, Acc.motorcycle: 0.8817, Acc.bicycle: 0.9151 +2022-05-10 01:37:40,666 - mmseg - INFO - Iter [15050/80000] lr: 1.166e-06, eta: 1 day, 11:14:01, time: 4.077, data_time: 2.324, memory: 64699, decode.loss_cls: 0.1583, decode.loss_mask: 0.2657, decode.loss_dice: 0.7138, decode.d0.loss_cls: 0.3677, decode.d0.loss_mask: 0.2796, decode.d0.loss_dice: 0.7698, decode.d1.loss_cls: 0.1830, decode.d1.loss_mask: 0.2678, decode.d1.loss_dice: 0.7311, decode.d2.loss_cls: 0.1687, decode.d2.loss_mask: 0.2662, decode.d2.loss_dice: 0.7256, decode.d3.loss_cls: 0.1623, decode.d3.loss_mask: 0.2652, decode.d3.loss_dice: 0.7225, decode.d4.loss_cls: 0.1574, decode.d4.loss_mask: 0.2654, decode.d4.loss_dice: 0.7196, decode.d5.loss_cls: 0.1565, decode.d5.loss_mask: 0.2651, decode.d5.loss_dice: 0.7156, decode.d6.loss_cls: 0.1625, decode.d6.loss_mask: 0.2650, decode.d6.loss_dice: 0.7084, decode.d7.loss_cls: 0.1519, decode.d7.loss_mask: 0.2663, decode.d7.loss_dice: 0.7166, decode.d8.loss_cls: 0.1496, decode.d8.loss_mask: 0.2653, decode.d8.loss_dice: 0.7145, loss: 11.7268 +2022-05-10 01:39:11,632 - mmseg - INFO - Iter [15100/80000] lr: 1.165e-06, eta: 1 day, 11:11:55, time: 1.819, data_time: 0.067, memory: 64699, decode.loss_cls: 0.1342, decode.loss_mask: 0.2557, decode.loss_dice: 0.6958, decode.d0.loss_cls: 0.3568, decode.d0.loss_mask: 0.2702, decode.d0.loss_dice: 0.7505, decode.d1.loss_cls: 0.1585, decode.d1.loss_mask: 0.2576, decode.d1.loss_dice: 0.7102, decode.d2.loss_cls: 0.1489, decode.d2.loss_mask: 0.2568, decode.d2.loss_dice: 0.7053, decode.d3.loss_cls: 0.1307, decode.d3.loss_mask: 0.2558, decode.d3.loss_dice: 0.6993, decode.d4.loss_cls: 0.1415, decode.d4.loss_mask: 0.2562, decode.d4.loss_dice: 0.6938, decode.d5.loss_cls: 0.1374, decode.d5.loss_mask: 0.2562, decode.d5.loss_dice: 0.6967, decode.d6.loss_cls: 0.1370, decode.d6.loss_mask: 0.2559, decode.d6.loss_dice: 0.6924, decode.d7.loss_cls: 0.1355, decode.d7.loss_mask: 0.2556, decode.d7.loss_dice: 0.6962, decode.d8.loss_cls: 0.1343, decode.d8.loss_mask: 0.2551, decode.d8.loss_dice: 0.6958, loss: 11.2259 +2022-05-10 01:40:40,367 - mmseg - INFO - Iter [15150/80000] lr: 1.164e-06, eta: 1 day, 11:09:39, time: 1.775, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1428, decode.loss_mask: 0.2536, decode.loss_dice: 0.7028, decode.d0.loss_cls: 0.3716, decode.d0.loss_mask: 0.2682, decode.d0.loss_dice: 0.7595, decode.d1.loss_cls: 0.1725, decode.d1.loss_mask: 0.2583, decode.d1.loss_dice: 0.7197, decode.d2.loss_cls: 0.1576, decode.d2.loss_mask: 0.2552, decode.d2.loss_dice: 0.7101, decode.d3.loss_cls: 0.1518, decode.d3.loss_mask: 0.2542, decode.d3.loss_dice: 0.7086, decode.d4.loss_cls: 0.1477, decode.d4.loss_mask: 0.2543, decode.d4.loss_dice: 0.7051, decode.d5.loss_cls: 0.1483, decode.d5.loss_mask: 0.2536, decode.d5.loss_dice: 0.7020, decode.d6.loss_cls: 0.1429, decode.d6.loss_mask: 0.2543, decode.d6.loss_dice: 0.7008, decode.d7.loss_cls: 0.1411, decode.d7.loss_mask: 0.2538, decode.d7.loss_dice: 0.7004, decode.d8.loss_cls: 0.1492, decode.d8.loss_mask: 0.2535, decode.d8.loss_dice: 0.6998, loss: 11.3934 +2022-05-10 01:42:08,725 - mmseg - INFO - Iter [15200/80000] lr: 1.163e-06, eta: 1 day, 11:07:22, time: 1.767, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1307, decode.loss_mask: 0.2494, decode.loss_dice: 0.6938, decode.d0.loss_cls: 0.3488, decode.d0.loss_mask: 0.2626, decode.d0.loss_dice: 0.7374, decode.d1.loss_cls: 0.1519, decode.d1.loss_mask: 0.2519, decode.d1.loss_dice: 0.7055, decode.d2.loss_cls: 0.1502, decode.d2.loss_mask: 0.2498, decode.d2.loss_dice: 0.6938, decode.d3.loss_cls: 0.1432, decode.d3.loss_mask: 0.2498, decode.d3.loss_dice: 0.6917, decode.d4.loss_cls: 0.1363, decode.d4.loss_mask: 0.2487, decode.d4.loss_dice: 0.6918, decode.d5.loss_cls: 0.1365, decode.d5.loss_mask: 0.2500, decode.d5.loss_dice: 0.6941, decode.d6.loss_cls: 0.1324, decode.d6.loss_mask: 0.2500, decode.d6.loss_dice: 0.6910, decode.d7.loss_cls: 0.1332, decode.d7.loss_mask: 0.2496, decode.d7.loss_dice: 0.6894, decode.d8.loss_cls: 0.1301, decode.d8.loss_mask: 0.2497, decode.d8.loss_dice: 0.6953, loss: 11.0884 +2022-05-10 01:43:37,356 - mmseg - INFO - Iter [15250/80000] lr: 1.162e-06, eta: 1 day, 11:05:07, time: 1.772, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1344, decode.loss_mask: 0.2518, decode.loss_dice: 0.7091, decode.d0.loss_cls: 0.3499, decode.d0.loss_mask: 0.2668, decode.d0.loss_dice: 0.7607, decode.d1.loss_cls: 0.1639, decode.d1.loss_mask: 0.2545, decode.d1.loss_dice: 0.7294, decode.d2.loss_cls: 0.1509, decode.d2.loss_mask: 0.2528, decode.d2.loss_dice: 0.7147, decode.d3.loss_cls: 0.1377, decode.d3.loss_mask: 0.2523, decode.d3.loss_dice: 0.7076, decode.d4.loss_cls: 0.1420, decode.d4.loss_mask: 0.2529, decode.d4.loss_dice: 0.7108, decode.d5.loss_cls: 0.1406, decode.d5.loss_mask: 0.2525, decode.d5.loss_dice: 0.7124, decode.d6.loss_cls: 0.1417, decode.d6.loss_mask: 0.2532, decode.d6.loss_dice: 0.7121, decode.d7.loss_cls: 0.1390, decode.d7.loss_mask: 0.2515, decode.d7.loss_dice: 0.7067, decode.d8.loss_cls: 0.1334, decode.d8.loss_mask: 0.2522, decode.d8.loss_dice: 0.7072, loss: 11.3445 +2022-05-10 01:45:07,654 - mmseg - INFO - Iter [15300/80000] lr: 1.161e-06, eta: 1 day, 11:02:59, time: 1.806, data_time: 0.066, memory: 64699, decode.loss_cls: 0.1314, decode.loss_mask: 0.2551, decode.loss_dice: 0.7113, decode.d0.loss_cls: 0.3488, decode.d0.loss_mask: 0.2682, decode.d0.loss_dice: 0.7641, decode.d1.loss_cls: 0.1451, decode.d1.loss_mask: 0.2582, decode.d1.loss_dice: 0.7312, decode.d2.loss_cls: 0.1356, decode.d2.loss_mask: 0.2566, decode.d2.loss_dice: 0.7185, decode.d3.loss_cls: 0.1356, decode.d3.loss_mask: 0.2558, decode.d3.loss_dice: 0.7130, decode.d4.loss_cls: 0.1357, decode.d4.loss_mask: 0.2552, decode.d4.loss_dice: 0.7111, decode.d5.loss_cls: 0.1338, decode.d5.loss_mask: 0.2556, decode.d5.loss_dice: 0.7068, decode.d6.loss_cls: 0.1341, decode.d6.loss_mask: 0.2553, decode.d6.loss_dice: 0.7089, decode.d7.loss_cls: 0.1304, decode.d7.loss_mask: 0.2546, decode.d7.loss_dice: 0.7088, decode.d8.loss_cls: 0.1306, decode.d8.loss_mask: 0.2542, decode.d8.loss_dice: 0.7090, loss: 11.3124 +2022-05-10 01:46:36,388 - mmseg - INFO - Iter [15350/80000] lr: 1.160e-06, eta: 1 day, 11:00:44, time: 1.774, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1347, decode.loss_mask: 0.2563, decode.loss_dice: 0.6998, decode.d0.loss_cls: 0.3550, decode.d0.loss_mask: 0.2684, decode.d0.loss_dice: 0.7478, decode.d1.loss_cls: 0.1545, decode.d1.loss_mask: 0.2578, decode.d1.loss_dice: 0.7214, decode.d2.loss_cls: 0.1426, decode.d2.loss_mask: 0.2572, decode.d2.loss_dice: 0.7086, decode.d3.loss_cls: 0.1345, decode.d3.loss_mask: 0.2565, decode.d3.loss_dice: 0.7001, decode.d4.loss_cls: 0.1377, decode.d4.loss_mask: 0.2573, decode.d4.loss_dice: 0.7006, decode.d5.loss_cls: 0.1336, decode.d5.loss_mask: 0.2571, decode.d5.loss_dice: 0.7035, decode.d6.loss_cls: 0.1266, decode.d6.loss_mask: 0.2571, decode.d6.loss_dice: 0.6957, decode.d7.loss_cls: 0.1418, decode.d7.loss_mask: 0.2565, decode.d7.loss_dice: 0.7004, decode.d8.loss_cls: 0.1351, decode.d8.loss_mask: 0.2566, decode.d8.loss_dice: 0.7031, loss: 11.2581 +2022-05-10 01:48:05,351 - mmseg - INFO - Iter [15400/80000] lr: 1.159e-06, eta: 1 day, 10:58:31, time: 1.779, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1344, decode.loss_mask: 0.2585, decode.loss_dice: 0.6898, decode.d0.loss_cls: 0.3688, decode.d0.loss_mask: 0.2712, decode.d0.loss_dice: 0.7399, decode.d1.loss_cls: 0.1632, decode.d1.loss_mask: 0.2597, decode.d1.loss_dice: 0.7062, decode.d2.loss_cls: 0.1562, decode.d2.loss_mask: 0.2583, decode.d2.loss_dice: 0.6970, decode.d3.loss_cls: 0.1461, decode.d3.loss_mask: 0.2576, decode.d3.loss_dice: 0.6939, decode.d4.loss_cls: 0.1430, decode.d4.loss_mask: 0.2578, decode.d4.loss_dice: 0.6947, decode.d5.loss_cls: 0.1411, decode.d5.loss_mask: 0.2572, decode.d5.loss_dice: 0.6909, decode.d6.loss_cls: 0.1414, decode.d6.loss_mask: 0.2581, decode.d6.loss_dice: 0.6882, decode.d7.loss_cls: 0.1432, decode.d7.loss_mask: 0.2573, decode.d7.loss_dice: 0.6901, decode.d8.loss_cls: 0.1448, decode.d8.loss_mask: 0.2571, decode.d8.loss_dice: 0.6876, loss: 11.2532 +2022-05-10 01:49:38,015 - mmseg - INFO - Iter [15450/80000] lr: 1.159e-06, eta: 1 day, 10:56:33, time: 1.854, data_time: 0.069, memory: 64699, decode.loss_cls: 0.1398, decode.loss_mask: 0.2486, decode.loss_dice: 0.7079, decode.d0.loss_cls: 0.3587, decode.d0.loss_mask: 0.2643, decode.d0.loss_dice: 0.7554, decode.d1.loss_cls: 0.1700, decode.d1.loss_mask: 0.2526, decode.d1.loss_dice: 0.7259, decode.d2.loss_cls: 0.1517, decode.d2.loss_mask: 0.2505, decode.d2.loss_dice: 0.7194, decode.d3.loss_cls: 0.1475, decode.d3.loss_mask: 0.2496, decode.d3.loss_dice: 0.7081, decode.d4.loss_cls: 0.1404, decode.d4.loss_mask: 0.2491, decode.d4.loss_dice: 0.7090, decode.d5.loss_cls: 0.1497, decode.d5.loss_mask: 0.2484, decode.d5.loss_dice: 0.7109, decode.d6.loss_cls: 0.1435, decode.d6.loss_mask: 0.2486, decode.d6.loss_dice: 0.7064, decode.d7.loss_cls: 0.1418, decode.d7.loss_mask: 0.2482, decode.d7.loss_dice: 0.7044, decode.d8.loss_cls: 0.1395, decode.d8.loss_mask: 0.2496, decode.d8.loss_dice: 0.7063, loss: 11.3456 +2022-05-10 01:51:06,580 - mmseg - INFO - Iter [15500/80000] lr: 1.158e-06, eta: 1 day, 10:54:19, time: 1.771, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1381, decode.loss_mask: 0.2490, decode.loss_dice: 0.6961, decode.d0.loss_cls: 0.3580, decode.d0.loss_mask: 0.2643, decode.d0.loss_dice: 0.7526, decode.d1.loss_cls: 0.1641, decode.d1.loss_mask: 0.2538, decode.d1.loss_dice: 0.7133, decode.d2.loss_cls: 0.1481, decode.d2.loss_mask: 0.2501, decode.d2.loss_dice: 0.7088, decode.d3.loss_cls: 0.1398, decode.d3.loss_mask: 0.2497, decode.d3.loss_dice: 0.6987, decode.d4.loss_cls: 0.1391, decode.d4.loss_mask: 0.2496, decode.d4.loss_dice: 0.7020, decode.d5.loss_cls: 0.1401, decode.d5.loss_mask: 0.2502, decode.d5.loss_dice: 0.6948, decode.d6.loss_cls: 0.1316, decode.d6.loss_mask: 0.2491, decode.d6.loss_dice: 0.6940, decode.d7.loss_cls: 0.1332, decode.d7.loss_mask: 0.2497, decode.d7.loss_dice: 0.6983, decode.d8.loss_cls: 0.1371, decode.d8.loss_mask: 0.2500, decode.d8.loss_dice: 0.6968, loss: 11.1999 +2022-05-10 01:52:35,371 - mmseg - INFO - Iter [15550/80000] lr: 1.157e-06, eta: 1 day, 10:52:06, time: 1.776, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1308, decode.loss_mask: 0.2520, decode.loss_dice: 0.6873, decode.d0.loss_cls: 0.3488, decode.d0.loss_mask: 0.2662, decode.d0.loss_dice: 0.7435, decode.d1.loss_cls: 0.1532, decode.d1.loss_mask: 0.2548, decode.d1.loss_dice: 0.7092, decode.d2.loss_cls: 0.1386, decode.d2.loss_mask: 0.2540, decode.d2.loss_dice: 0.7021, decode.d3.loss_cls: 0.1285, decode.d3.loss_mask: 0.2533, decode.d3.loss_dice: 0.6908, decode.d4.loss_cls: 0.1307, decode.d4.loss_mask: 0.2524, decode.d4.loss_dice: 0.6875, decode.d5.loss_cls: 0.1310, decode.d5.loss_mask: 0.2520, decode.d5.loss_dice: 0.6927, decode.d6.loss_cls: 0.1274, decode.d6.loss_mask: 0.2527, decode.d6.loss_dice: 0.6944, decode.d7.loss_cls: 0.1297, decode.d7.loss_mask: 0.2523, decode.d7.loss_dice: 0.6909, decode.d8.loss_cls: 0.1267, decode.d8.loss_mask: 0.2522, decode.d8.loss_dice: 0.6909, loss: 11.0765 +2022-05-10 01:54:04,503 - mmseg - INFO - Iter [15600/80000] lr: 1.156e-06, eta: 1 day, 10:49:54, time: 1.782, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1249, decode.loss_mask: 0.2403, decode.loss_dice: 0.6902, decode.d0.loss_cls: 0.3412, decode.d0.loss_mask: 0.2535, decode.d0.loss_dice: 0.7386, decode.d1.loss_cls: 0.1491, decode.d1.loss_mask: 0.2429, decode.d1.loss_dice: 0.7069, decode.d2.loss_cls: 0.1315, decode.d2.loss_mask: 0.2402, decode.d2.loss_dice: 0.6982, decode.d3.loss_cls: 0.1317, decode.d3.loss_mask: 0.2410, decode.d3.loss_dice: 0.6924, decode.d4.loss_cls: 0.1347, decode.d4.loss_mask: 0.2416, decode.d4.loss_dice: 0.6934, decode.d5.loss_cls: 0.1310, decode.d5.loss_mask: 0.2415, decode.d5.loss_dice: 0.6886, decode.d6.loss_cls: 0.1328, decode.d6.loss_mask: 0.2407, decode.d6.loss_dice: 0.6873, decode.d7.loss_cls: 0.1320, decode.d7.loss_mask: 0.2408, decode.d7.loss_dice: 0.6907, decode.d8.loss_cls: 0.1339, decode.d8.loss_mask: 0.2401, decode.d8.loss_dice: 0.6898, loss: 10.9415 +2022-05-10 01:55:35,870 - mmseg - INFO - Iter [15650/80000] lr: 1.155e-06, eta: 1 day, 10:47:52, time: 1.827, data_time: 0.066, memory: 64699, decode.loss_cls: 0.1329, decode.loss_mask: 0.2539, decode.loss_dice: 0.6858, decode.d0.loss_cls: 0.3585, decode.d0.loss_mask: 0.2701, decode.d0.loss_dice: 0.7364, decode.d1.loss_cls: 0.1508, decode.d1.loss_mask: 0.2564, decode.d1.loss_dice: 0.7054, decode.d2.loss_cls: 0.1331, decode.d2.loss_mask: 0.2556, decode.d2.loss_dice: 0.6946, decode.d3.loss_cls: 0.1317, decode.d3.loss_mask: 0.2545, decode.d3.loss_dice: 0.6888, decode.d4.loss_cls: 0.1245, decode.d4.loss_mask: 0.2540, decode.d4.loss_dice: 0.6875, decode.d5.loss_cls: 0.1296, decode.d5.loss_mask: 0.2541, decode.d5.loss_dice: 0.6875, decode.d6.loss_cls: 0.1273, decode.d6.loss_mask: 0.2542, decode.d6.loss_dice: 0.6833, decode.d7.loss_cls: 0.1251, decode.d7.loss_mask: 0.2541, decode.d7.loss_dice: 0.6863, decode.d8.loss_cls: 0.1257, decode.d8.loss_mask: 0.2541, decode.d8.loss_dice: 0.6859, loss: 11.0417 +2022-05-10 01:57:05,192 - mmseg - INFO - Iter [15700/80000] lr: 1.154e-06, eta: 1 day, 10:45:42, time: 1.788, data_time: 0.022, memory: 64699, decode.loss_cls: 0.1432, decode.loss_mask: 0.2506, decode.loss_dice: 0.6973, decode.d0.loss_cls: 0.3449, decode.d0.loss_mask: 0.2627, decode.d0.loss_dice: 0.7564, decode.d1.loss_cls: 0.1593, decode.d1.loss_mask: 0.2535, decode.d1.loss_dice: 0.7214, decode.d2.loss_cls: 0.1556, decode.d2.loss_mask: 0.2524, decode.d2.loss_dice: 0.7051, decode.d3.loss_cls: 0.1449, decode.d3.loss_mask: 0.2524, decode.d3.loss_dice: 0.7022, decode.d4.loss_cls: 0.1506, decode.d4.loss_mask: 0.2526, decode.d4.loss_dice: 0.6986, decode.d5.loss_cls: 0.1389, decode.d5.loss_mask: 0.2518, decode.d5.loss_dice: 0.7059, decode.d6.loss_cls: 0.1392, decode.d6.loss_mask: 0.2518, decode.d6.loss_dice: 0.7002, decode.d7.loss_cls: 0.1363, decode.d7.loss_mask: 0.2515, decode.d7.loss_dice: 0.6995, decode.d8.loss_cls: 0.1417, decode.d8.loss_mask: 0.2510, decode.d8.loss_dice: 0.6981, loss: 11.2696 +2022-05-10 01:58:34,267 - mmseg - INFO - Iter [15750/80000] lr: 1.153e-06, eta: 1 day, 10:43:31, time: 1.781, data_time: 0.021, memory: 64699, decode.loss_cls: 0.1361, decode.loss_mask: 0.2562, decode.loss_dice: 0.6966, decode.d0.loss_cls: 0.3581, decode.d0.loss_mask: 0.2726, decode.d0.loss_dice: 0.7467, decode.d1.loss_cls: 0.1570, decode.d1.loss_mask: 0.2594, decode.d1.loss_dice: 0.7153, decode.d2.loss_cls: 0.1473, decode.d2.loss_mask: 0.2569, decode.d2.loss_dice: 0.7074, decode.d3.loss_cls: 0.1418, decode.d3.loss_mask: 0.2562, decode.d3.loss_dice: 0.6987, decode.d4.loss_cls: 0.1388, decode.d4.loss_mask: 0.2561, decode.d4.loss_dice: 0.6963, decode.d5.loss_cls: 0.1350, decode.d5.loss_mask: 0.2560, decode.d5.loss_dice: 0.6972, decode.d6.loss_cls: 0.1338, decode.d6.loss_mask: 0.2558, decode.d6.loss_dice: 0.6945, decode.d7.loss_cls: 0.1326, decode.d7.loss_mask: 0.2554, decode.d7.loss_dice: 0.6996, decode.d8.loss_cls: 0.1406, decode.d8.loss_mask: 0.2553, decode.d8.loss_dice: 0.6912, loss: 11.2445 +2022-05-10 02:00:04,698 - mmseg - INFO - Iter [15800/80000] lr: 1.152e-06, eta: 1 day, 10:41:26, time: 1.806, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1165, decode.loss_mask: 0.2560, decode.loss_dice: 0.6872, decode.d0.loss_cls: 0.3434, decode.d0.loss_mask: 0.2684, decode.d0.loss_dice: 0.7380, decode.d1.loss_cls: 0.1371, decode.d1.loss_mask: 0.2598, decode.d1.loss_dice: 0.7013, decode.d2.loss_cls: 0.1262, decode.d2.loss_mask: 0.2576, decode.d2.loss_dice: 0.6903, decode.d3.loss_cls: 0.1236, decode.d3.loss_mask: 0.2574, decode.d3.loss_dice: 0.6871, decode.d4.loss_cls: 0.1248, decode.d4.loss_mask: 0.2579, decode.d4.loss_dice: 0.6902, decode.d5.loss_cls: 0.1190, decode.d5.loss_mask: 0.2572, decode.d5.loss_dice: 0.6874, decode.d6.loss_cls: 0.1223, decode.d6.loss_mask: 0.2563, decode.d6.loss_dice: 0.6875, decode.d7.loss_cls: 0.1242, decode.d7.loss_mask: 0.2564, decode.d7.loss_dice: 0.6873, decode.d8.loss_cls: 0.1276, decode.d8.loss_mask: 0.2559, decode.d8.loss_dice: 0.6850, loss: 10.9885 +2022-05-10 02:01:38,527 - mmseg - INFO - Iter [15850/80000] lr: 1.151e-06, eta: 1 day, 10:39:35, time: 1.878, data_time: 0.072, memory: 64699, decode.loss_cls: 0.1242, decode.loss_mask: 0.2511, decode.loss_dice: 0.6873, decode.d0.loss_cls: 0.3317, decode.d0.loss_mask: 0.2656, decode.d0.loss_dice: 0.7424, decode.d1.loss_cls: 0.1527, decode.d1.loss_mask: 0.2541, decode.d1.loss_dice: 0.7083, decode.d2.loss_cls: 0.1311, decode.d2.loss_mask: 0.2530, decode.d2.loss_dice: 0.6966, decode.d3.loss_cls: 0.1157, decode.d3.loss_mask: 0.2522, decode.d3.loss_dice: 0.6899, decode.d4.loss_cls: 0.1257, decode.d4.loss_mask: 0.2515, decode.d4.loss_dice: 0.6894, decode.d5.loss_cls: 0.1275, decode.d5.loss_mask: 0.2511, decode.d5.loss_dice: 0.6911, decode.d6.loss_cls: 0.1183, decode.d6.loss_mask: 0.2507, decode.d6.loss_dice: 0.6899, decode.d7.loss_cls: 0.1222, decode.d7.loss_mask: 0.2505, decode.d7.loss_dice: 0.6826, decode.d8.loss_cls: 0.1249, decode.d8.loss_mask: 0.2518, decode.d8.loss_dice: 0.6885, loss: 10.9717 +2022-05-10 02:03:08,780 - mmseg - INFO - Iter [15900/80000] lr: 1.150e-06, eta: 1 day, 10:37:30, time: 1.805, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1378, decode.loss_mask: 0.2496, decode.loss_dice: 0.6911, decode.d0.loss_cls: 0.3660, decode.d0.loss_mask: 0.2645, decode.d0.loss_dice: 0.7408, decode.d1.loss_cls: 0.1558, decode.d1.loss_mask: 0.2524, decode.d1.loss_dice: 0.7071, decode.d2.loss_cls: 0.1507, decode.d2.loss_mask: 0.2495, decode.d2.loss_dice: 0.6919, decode.d3.loss_cls: 0.1381, decode.d3.loss_mask: 0.2495, decode.d3.loss_dice: 0.6908, decode.d4.loss_cls: 0.1449, decode.d4.loss_mask: 0.2495, decode.d4.loss_dice: 0.6874, decode.d5.loss_cls: 0.1400, decode.d5.loss_mask: 0.2491, decode.d5.loss_dice: 0.6880, decode.d6.loss_cls: 0.1396, decode.d6.loss_mask: 0.2497, decode.d6.loss_dice: 0.6884, decode.d7.loss_cls: 0.1408, decode.d7.loss_mask: 0.2497, decode.d7.loss_dice: 0.6850, decode.d8.loss_cls: 0.1408, decode.d8.loss_mask: 0.2496, decode.d8.loss_dice: 0.6845, loss: 11.1225 +2022-05-10 02:04:37,519 - mmseg - INFO - Iter [15950/80000] lr: 1.150e-06, eta: 1 day, 10:35:18, time: 1.775, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1407, decode.loss_mask: 0.2562, decode.loss_dice: 0.6945, decode.d0.loss_cls: 0.3663, decode.d0.loss_mask: 0.2717, decode.d0.loss_dice: 0.7435, decode.d1.loss_cls: 0.1611, decode.d1.loss_mask: 0.2578, decode.d1.loss_dice: 0.7080, decode.d2.loss_cls: 0.1513, decode.d2.loss_mask: 0.2574, decode.d2.loss_dice: 0.7008, decode.d3.loss_cls: 0.1422, decode.d3.loss_mask: 0.2553, decode.d3.loss_dice: 0.6907, decode.d4.loss_cls: 0.1429, decode.d4.loss_mask: 0.2560, decode.d4.loss_dice: 0.6933, decode.d5.loss_cls: 0.1417, decode.d5.loss_mask: 0.2563, decode.d5.loss_dice: 0.6922, decode.d6.loss_cls: 0.1383, decode.d6.loss_mask: 0.2561, decode.d6.loss_dice: 0.6925, decode.d7.loss_cls: 0.1465, decode.d7.loss_mask: 0.2558, decode.d7.loss_dice: 0.6886, decode.d8.loss_cls: 0.1366, decode.d8.loss_mask: 0.2565, decode.d8.loss_dice: 0.6889, loss: 11.2395 +2022-05-10 02:06:08,718 - mmseg - INFO - Saving checkpoint at 16000 iterations +2022-05-10 02:06:41,956 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 02:06:41,965 - mmseg - INFO - Iter [16000/80000] lr: 1.149e-06, eta: 1 day, 10:35:29, time: 2.486, data_time: 0.064, memory: 64699, decode.loss_cls: 0.1227, decode.loss_mask: 0.2602, decode.loss_dice: 0.6960, decode.d0.loss_cls: 0.3498, decode.d0.loss_mask: 0.2753, decode.d0.loss_dice: 0.7470, decode.d1.loss_cls: 0.1502, decode.d1.loss_mask: 0.2635, decode.d1.loss_dice: 0.7143, decode.d2.loss_cls: 0.1421, decode.d2.loss_mask: 0.2623, decode.d2.loss_dice: 0.7038, decode.d3.loss_cls: 0.1444, decode.d3.loss_mask: 0.2614, decode.d3.loss_dice: 0.6993, decode.d4.loss_cls: 0.1351, decode.d4.loss_mask: 0.2615, decode.d4.loss_dice: 0.6985, decode.d5.loss_cls: 0.1334, decode.d5.loss_mask: 0.2619, decode.d5.loss_dice: 0.7007, decode.d6.loss_cls: 0.1255, decode.d6.loss_mask: 0.2602, decode.d6.loss_dice: 0.6962, decode.d7.loss_cls: 0.1190, decode.d7.loss_mask: 0.2605, decode.d7.loss_dice: 0.6996, decode.d8.loss_cls: 0.1310, decode.d8.loss_mask: 0.2605, decode.d8.loss_dice: 0.6969, loss: 11.2329 +2022-05-10 02:08:38,187 - mmseg - INFO - per class results: +2022-05-10 02:08:38,194 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.73 | 99.26 | +| sidewalk | 89.52 | 94.38 | +| building | 94.5 | 97.06 | +| wall | 69.74 | 78.85 | +| fence | 73.92 | 84.83 | +| pole | 72.01 | 85.05 | +| traffic light | 77.11 | 88.92 | +| traffic sign | 84.51 | 91.54 | +| vegetation | 93.43 | 96.69 | +| terrain | 67.23 | 78.5 | +| sky | 95.88 | 98.55 | +| person | 86.99 | 94.27 | +| rider | 74.42 | 84.26 | +| car | 95.7 | 98.42 | +| truck | 89.73 | 92.06 | +| bus | 93.73 | 97.21 | +| train | 87.63 | 91.64 | +| motorcycle | 78.81 | 87.85 | +| bicycle | 82.79 | 91.41 | ++---------------+-------+-------+ +2022-05-10 02:08:38,194 - mmseg - INFO - Summary: +2022-05-10 02:08:38,195 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 97.07 | 84.55 | 91.09 | ++-------+-------+-------+ +2022-05-10 02:08:38,199 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 02:08:38,200 - mmseg - INFO - Iter(val) [32] aAcc: 0.9707, mIoU: 0.8455, mAcc: 0.9109, IoU.road: 0.9873, IoU.sidewalk: 0.8952, IoU.building: 0.9450, IoU.wall: 0.6974, IoU.fence: 0.7392, IoU.pole: 0.7201, IoU.traffic light: 0.7711, IoU.traffic sign: 0.8451, IoU.vegetation: 0.9343, IoU.terrain: 0.6723, IoU.sky: 0.9588, IoU.person: 0.8699, IoU.rider: 0.7442, IoU.car: 0.9570, IoU.truck: 0.8973, IoU.bus: 0.9373, IoU.train: 0.8763, IoU.motorcycle: 0.7881, IoU.bicycle: 0.8279, Acc.road: 0.9926, Acc.sidewalk: 0.9438, Acc.building: 0.9706, Acc.wall: 0.7885, Acc.fence: 0.8483, Acc.pole: 0.8505, Acc.traffic light: 0.8892, Acc.traffic sign: 0.9154, Acc.vegetation: 0.9669, Acc.terrain: 0.7850, Acc.sky: 0.9855, Acc.person: 0.9427, Acc.rider: 0.8426, Acc.car: 0.9842, Acc.truck: 0.9206, Acc.bus: 0.9721, Acc.train: 0.9164, Acc.motorcycle: 0.8785, Acc.bicycle: 0.9141 +2022-05-10 02:10:09,040 - mmseg - INFO - Iter [16050/80000] lr: 1.148e-06, eta: 1 day, 10:41:10, time: 4.144, data_time: 2.349, memory: 64699, decode.loss_cls: 0.1249, decode.loss_mask: 0.2523, decode.loss_dice: 0.6970, decode.d0.loss_cls: 0.3446, decode.d0.loss_mask: 0.2653, decode.d0.loss_dice: 0.7499, decode.d1.loss_cls: 0.1547, decode.d1.loss_mask: 0.2545, decode.d1.loss_dice: 0.7092, decode.d2.loss_cls: 0.1391, decode.d2.loss_mask: 0.2535, decode.d2.loss_dice: 0.7086, decode.d3.loss_cls: 0.1376, decode.d3.loss_mask: 0.2533, decode.d3.loss_dice: 0.6975, decode.d4.loss_cls: 0.1357, decode.d4.loss_mask: 0.2539, decode.d4.loss_dice: 0.6981, decode.d5.loss_cls: 0.1317, decode.d5.loss_mask: 0.2537, decode.d5.loss_dice: 0.6955, decode.d6.loss_cls: 0.1353, decode.d6.loss_mask: 0.2529, decode.d6.loss_dice: 0.6972, decode.d7.loss_cls: 0.1271, decode.d7.loss_mask: 0.2530, decode.d7.loss_dice: 0.7002, decode.d8.loss_cls: 0.1268, decode.d8.loss_mask: 0.2527, decode.d8.loss_dice: 0.7007, loss: 11.1563 +2022-05-10 02:11:38,321 - mmseg - INFO - Iter [16100/80000] lr: 1.147e-06, eta: 1 day, 10:38:59, time: 1.786, data_time: 0.022, memory: 64699, decode.loss_cls: 0.1272, decode.loss_mask: 0.2518, decode.loss_dice: 0.6995, decode.d0.loss_cls: 0.3519, decode.d0.loss_mask: 0.2649, decode.d0.loss_dice: 0.7504, decode.d1.loss_cls: 0.1542, decode.d1.loss_mask: 0.2549, decode.d1.loss_dice: 0.7119, decode.d2.loss_cls: 0.1358, decode.d2.loss_mask: 0.2539, decode.d2.loss_dice: 0.7089, decode.d3.loss_cls: 0.1296, decode.d3.loss_mask: 0.2518, decode.d3.loss_dice: 0.6975, decode.d4.loss_cls: 0.1308, decode.d4.loss_mask: 0.2522, decode.d4.loss_dice: 0.6995, decode.d5.loss_cls: 0.1338, decode.d5.loss_mask: 0.2515, decode.d5.loss_dice: 0.6993, decode.d6.loss_cls: 0.1190, decode.d6.loss_mask: 0.2517, decode.d6.loss_dice: 0.7016, decode.d7.loss_cls: 0.1266, decode.d7.loss_mask: 0.2517, decode.d7.loss_dice: 0.7003, decode.d8.loss_cls: 0.1252, decode.d8.loss_mask: 0.2514, decode.d8.loss_dice: 0.6998, loss: 11.1384 +2022-05-10 02:13:08,421 - mmseg - INFO - Iter [16150/80000] lr: 1.146e-06, eta: 1 day, 10:36:52, time: 1.802, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1390, decode.loss_mask: 0.2490, decode.loss_dice: 0.6843, decode.d0.loss_cls: 0.3695, decode.d0.loss_mask: 0.2601, decode.d0.loss_dice: 0.7297, decode.d1.loss_cls: 0.1559, decode.d1.loss_mask: 0.2509, decode.d1.loss_dice: 0.6994, decode.d2.loss_cls: 0.1516, decode.d2.loss_mask: 0.2499, decode.d2.loss_dice: 0.6963, decode.d3.loss_cls: 0.1393, decode.d3.loss_mask: 0.2481, decode.d3.loss_dice: 0.6880, decode.d4.loss_cls: 0.1303, decode.d4.loss_mask: 0.2500, decode.d4.loss_dice: 0.6877, decode.d5.loss_cls: 0.1361, decode.d5.loss_mask: 0.2489, decode.d5.loss_dice: 0.6897, decode.d6.loss_cls: 0.1326, decode.d6.loss_mask: 0.2487, decode.d6.loss_dice: 0.6833, decode.d7.loss_cls: 0.1373, decode.d7.loss_mask: 0.2498, decode.d7.loss_dice: 0.6857, decode.d8.loss_cls: 0.1368, decode.d8.loss_mask: 0.2494, decode.d8.loss_dice: 0.6907, loss: 11.0676 +2022-05-10 02:14:41,515 - mmseg - INFO - Iter [16200/80000] lr: 1.145e-06, eta: 1 day, 10:34:57, time: 1.862, data_time: 0.070, memory: 64699, decode.loss_cls: 0.1332, decode.loss_mask: 0.2483, decode.loss_dice: 0.7015, decode.d0.loss_cls: 0.3522, decode.d0.loss_mask: 0.2620, decode.d0.loss_dice: 0.7492, decode.d1.loss_cls: 0.1572, decode.d1.loss_mask: 0.2507, decode.d1.loss_dice: 0.7110, decode.d2.loss_cls: 0.1484, decode.d2.loss_mask: 0.2491, decode.d2.loss_dice: 0.7039, decode.d3.loss_cls: 0.1464, decode.d3.loss_mask: 0.2491, decode.d3.loss_dice: 0.6977, decode.d4.loss_cls: 0.1410, decode.d4.loss_mask: 0.2492, decode.d4.loss_dice: 0.6973, decode.d5.loss_cls: 0.1461, decode.d5.loss_mask: 0.2488, decode.d5.loss_dice: 0.6995, decode.d6.loss_cls: 0.1408, decode.d6.loss_mask: 0.2495, decode.d6.loss_dice: 0.6970, decode.d7.loss_cls: 0.1404, decode.d7.loss_mask: 0.2488, decode.d7.loss_dice: 0.6944, decode.d8.loss_cls: 0.1356, decode.d8.loss_mask: 0.2489, decode.d8.loss_dice: 0.7006, loss: 11.1976 +2022-05-10 02:16:10,120 - mmseg - INFO - Iter [16250/80000] lr: 1.144e-06, eta: 1 day, 10:32:44, time: 1.772, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1287, decode.loss_mask: 0.2606, decode.loss_dice: 0.6814, decode.d0.loss_cls: 0.3562, decode.d0.loss_mask: 0.2767, decode.d0.loss_dice: 0.7365, decode.d1.loss_cls: 0.1468, decode.d1.loss_mask: 0.2649, decode.d1.loss_dice: 0.6956, decode.d2.loss_cls: 0.1348, decode.d2.loss_mask: 0.2625, decode.d2.loss_dice: 0.6895, decode.d3.loss_cls: 0.1289, decode.d3.loss_mask: 0.2613, decode.d3.loss_dice: 0.6874, decode.d4.loss_cls: 0.1302, decode.d4.loss_mask: 0.2600, decode.d4.loss_dice: 0.6878, decode.d5.loss_cls: 0.1291, decode.d5.loss_mask: 0.2597, decode.d5.loss_dice: 0.6810, decode.d6.loss_cls: 0.1280, decode.d6.loss_mask: 0.2611, decode.d6.loss_dice: 0.6822, decode.d7.loss_cls: 0.1276, decode.d7.loss_mask: 0.2605, decode.d7.loss_dice: 0.6801, decode.d8.loss_cls: 0.1253, decode.d8.loss_mask: 0.2615, decode.d8.loss_dice: 0.6841, loss: 11.0700 +2022-05-10 02:17:38,429 - mmseg - INFO - Iter [16300/80000] lr: 1.143e-06, eta: 1 day, 10:30:30, time: 1.765, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1341, decode.loss_mask: 0.2516, decode.loss_dice: 0.7052, decode.d0.loss_cls: 0.3471, decode.d0.loss_mask: 0.2647, decode.d0.loss_dice: 0.7587, decode.d1.loss_cls: 0.1556, decode.d1.loss_mask: 0.2540, decode.d1.loss_dice: 0.7225, decode.d2.loss_cls: 0.1415, decode.d2.loss_mask: 0.2526, decode.d2.loss_dice: 0.7175, decode.d3.loss_cls: 0.1422, decode.d3.loss_mask: 0.2525, decode.d3.loss_dice: 0.7021, decode.d4.loss_cls: 0.1386, decode.d4.loss_mask: 0.2530, decode.d4.loss_dice: 0.7055, decode.d5.loss_cls: 0.1294, decode.d5.loss_mask: 0.2527, decode.d5.loss_dice: 0.7070, decode.d6.loss_cls: 0.1384, decode.d6.loss_mask: 0.2517, decode.d6.loss_dice: 0.7021, decode.d7.loss_cls: 0.1334, decode.d7.loss_mask: 0.2521, decode.d7.loss_dice: 0.7010, decode.d8.loss_cls: 0.1321, decode.d8.loss_mask: 0.2527, decode.d8.loss_dice: 0.7077, loss: 11.2593 +2022-05-10 02:19:07,434 - mmseg - INFO - Iter [16350/80000] lr: 1.142e-06, eta: 1 day, 10:28:20, time: 1.781, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1243, decode.loss_mask: 0.2498, decode.loss_dice: 0.6904, decode.d0.loss_cls: 0.3435, decode.d0.loss_mask: 0.2632, decode.d0.loss_dice: 0.7382, decode.d1.loss_cls: 0.1519, decode.d1.loss_mask: 0.2523, decode.d1.loss_dice: 0.7062, decode.d2.loss_cls: 0.1384, decode.d2.loss_mask: 0.2516, decode.d2.loss_dice: 0.6999, decode.d3.loss_cls: 0.1354, decode.d3.loss_mask: 0.2500, decode.d3.loss_dice: 0.6889, decode.d4.loss_cls: 0.1319, decode.d4.loss_mask: 0.2501, decode.d4.loss_dice: 0.6915, decode.d5.loss_cls: 0.1293, decode.d5.loss_mask: 0.2500, decode.d5.loss_dice: 0.6934, decode.d6.loss_cls: 0.1305, decode.d6.loss_mask: 0.2500, decode.d6.loss_dice: 0.6893, decode.d7.loss_cls: 0.1283, decode.d7.loss_mask: 0.2502, decode.d7.loss_dice: 0.6896, decode.d8.loss_cls: 0.1336, decode.d8.loss_mask: 0.2506, decode.d8.loss_dice: 0.6893, loss: 11.0414 +2022-05-10 02:20:38,020 - mmseg - INFO - Iter [16400/80000] lr: 1.141e-06, eta: 1 day, 10:26:16, time: 1.812, data_time: 0.066, memory: 64699, decode.loss_cls: 0.1318, decode.loss_mask: 0.2447, decode.loss_dice: 0.6817, decode.d0.loss_cls: 0.3411, decode.d0.loss_mask: 0.2562, decode.d0.loss_dice: 0.7347, decode.d1.loss_cls: 0.1481, decode.d1.loss_mask: 0.2474, decode.d1.loss_dice: 0.7034, decode.d2.loss_cls: 0.1385, decode.d2.loss_mask: 0.2452, decode.d2.loss_dice: 0.6945, decode.d3.loss_cls: 0.1375, decode.d3.loss_mask: 0.2451, decode.d3.loss_dice: 0.6889, decode.d4.loss_cls: 0.1302, decode.d4.loss_mask: 0.2453, decode.d4.loss_dice: 0.6900, decode.d5.loss_cls: 0.1346, decode.d5.loss_mask: 0.2444, decode.d5.loss_dice: 0.6925, decode.d6.loss_cls: 0.1333, decode.d6.loss_mask: 0.2446, decode.d6.loss_dice: 0.6882, decode.d7.loss_cls: 0.1228, decode.d7.loss_mask: 0.2450, decode.d7.loss_dice: 0.6852, decode.d8.loss_cls: 0.1347, decode.d8.loss_mask: 0.2443, decode.d8.loss_dice: 0.6841, loss: 10.9578 +2022-05-10 02:22:08,038 - mmseg - INFO - Iter [16450/80000] lr: 1.141e-06, eta: 1 day, 10:24:09, time: 1.800, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1330, decode.loss_mask: 0.2454, decode.loss_dice: 0.6795, decode.d0.loss_cls: 0.3487, decode.d0.loss_mask: 0.2606, decode.d0.loss_dice: 0.7287, decode.d1.loss_cls: 0.1582, decode.d1.loss_mask: 0.2474, decode.d1.loss_dice: 0.6941, decode.d2.loss_cls: 0.1395, decode.d2.loss_mask: 0.2451, decode.d2.loss_dice: 0.6879, decode.d3.loss_cls: 0.1333, decode.d3.loss_mask: 0.2450, decode.d3.loss_dice: 0.6800, decode.d4.loss_cls: 0.1352, decode.d4.loss_mask: 0.2460, decode.d4.loss_dice: 0.6814, decode.d5.loss_cls: 0.1338, decode.d5.loss_mask: 0.2454, decode.d5.loss_dice: 0.6798, decode.d6.loss_cls: 0.1359, decode.d6.loss_mask: 0.2447, decode.d6.loss_dice: 0.6749, decode.d7.loss_cls: 0.1331, decode.d7.loss_mask: 0.2447, decode.d7.loss_dice: 0.6767, decode.d8.loss_cls: 0.1388, decode.d8.loss_mask: 0.2451, decode.d8.loss_dice: 0.6824, loss: 10.9242 +2022-05-10 02:23:37,515 - mmseg - INFO - Iter [16500/80000] lr: 1.140e-06, eta: 1 day, 10:22:01, time: 1.789, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1291, decode.loss_mask: 0.2513, decode.loss_dice: 0.6970, decode.d0.loss_cls: 0.3516, decode.d0.loss_mask: 0.2632, decode.d0.loss_dice: 0.7458, decode.d1.loss_cls: 0.1589, decode.d1.loss_mask: 0.2541, decode.d1.loss_dice: 0.7082, decode.d2.loss_cls: 0.1397, decode.d2.loss_mask: 0.2525, decode.d2.loss_dice: 0.7001, decode.d3.loss_cls: 0.1300, decode.d3.loss_mask: 0.2519, decode.d3.loss_dice: 0.6963, decode.d4.loss_cls: 0.1295, decode.d4.loss_mask: 0.2510, decode.d4.loss_dice: 0.6960, decode.d5.loss_cls: 0.1283, decode.d5.loss_mask: 0.2513, decode.d5.loss_dice: 0.6972, decode.d6.loss_cls: 0.1202, decode.d6.loss_mask: 0.2506, decode.d6.loss_dice: 0.6946, decode.d7.loss_cls: 0.1252, decode.d7.loss_mask: 0.2510, decode.d7.loss_dice: 0.6907, decode.d8.loss_cls: 0.1258, decode.d8.loss_mask: 0.2510, decode.d8.loss_dice: 0.6942, loss: 11.0860 +2022-05-10 02:25:06,162 - mmseg - INFO - Iter [16550/80000] lr: 1.139e-06, eta: 1 day, 10:19:50, time: 1.773, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1286, decode.loss_mask: 0.2462, decode.loss_dice: 0.6840, decode.d0.loss_cls: 0.3623, decode.d0.loss_mask: 0.2591, decode.d0.loss_dice: 0.7358, decode.d1.loss_cls: 0.1531, decode.d1.loss_mask: 0.2490, decode.d1.loss_dice: 0.6980, decode.d2.loss_cls: 0.1442, decode.d2.loss_mask: 0.2475, decode.d2.loss_dice: 0.6954, decode.d3.loss_cls: 0.1320, decode.d3.loss_mask: 0.2467, decode.d3.loss_dice: 0.6861, decode.d4.loss_cls: 0.1359, decode.d4.loss_mask: 0.2466, decode.d4.loss_dice: 0.6893, decode.d5.loss_cls: 0.1304, decode.d5.loss_mask: 0.2472, decode.d5.loss_dice: 0.6874, decode.d6.loss_cls: 0.1313, decode.d6.loss_mask: 0.2470, decode.d6.loss_dice: 0.6832, decode.d7.loss_cls: 0.1335, decode.d7.loss_mask: 0.2466, decode.d7.loss_dice: 0.6852, decode.d8.loss_cls: 0.1274, decode.d8.loss_mask: 0.2462, decode.d8.loss_dice: 0.6812, loss: 10.9863 +2022-05-10 02:26:36,949 - mmseg - INFO - Iter [16600/80000] lr: 1.138e-06, eta: 1 day, 10:17:48, time: 1.816, data_time: 0.064, memory: 64699, decode.loss_cls: 0.1264, decode.loss_mask: 0.2461, decode.loss_dice: 0.6993, decode.d0.loss_cls: 0.3541, decode.d0.loss_mask: 0.2606, decode.d0.loss_dice: 0.7420, decode.d1.loss_cls: 0.1518, decode.d1.loss_mask: 0.2485, decode.d1.loss_dice: 0.7123, decode.d2.loss_cls: 0.1329, decode.d2.loss_mask: 0.2473, decode.d2.loss_dice: 0.7007, decode.d3.loss_cls: 0.1231, decode.d3.loss_mask: 0.2445, decode.d3.loss_dice: 0.6959, decode.d4.loss_cls: 0.1288, decode.d4.loss_mask: 0.2460, decode.d4.loss_dice: 0.6992, decode.d5.loss_cls: 0.1230, decode.d5.loss_mask: 0.2454, decode.d5.loss_dice: 0.6955, decode.d6.loss_cls: 0.1174, decode.d6.loss_mask: 0.2456, decode.d6.loss_dice: 0.6908, decode.d7.loss_cls: 0.1202, decode.d7.loss_mask: 0.2459, decode.d7.loss_dice: 0.6948, decode.d8.loss_cls: 0.1256, decode.d8.loss_mask: 0.2455, decode.d8.loss_dice: 0.6909, loss: 11.0000 +2022-05-10 02:28:05,898 - mmseg - INFO - Iter [16650/80000] lr: 1.137e-06, eta: 1 day, 10:15:38, time: 1.779, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1261, decode.loss_mask: 0.2518, decode.loss_dice: 0.6656, decode.d0.loss_cls: 0.3574, decode.d0.loss_mask: 0.2637, decode.d0.loss_dice: 0.7127, decode.d1.loss_cls: 0.1469, decode.d1.loss_mask: 0.2527, decode.d1.loss_dice: 0.6779, decode.d2.loss_cls: 0.1326, decode.d2.loss_mask: 0.2520, decode.d2.loss_dice: 0.6737, decode.d3.loss_cls: 0.1303, decode.d3.loss_mask: 0.2516, decode.d3.loss_dice: 0.6621, decode.d4.loss_cls: 0.1281, decode.d4.loss_mask: 0.2512, decode.d4.loss_dice: 0.6644, decode.d5.loss_cls: 0.1308, decode.d5.loss_mask: 0.2517, decode.d5.loss_dice: 0.6663, decode.d6.loss_cls: 0.1219, decode.d6.loss_mask: 0.2509, decode.d6.loss_dice: 0.6666, decode.d7.loss_cls: 0.1231, decode.d7.loss_mask: 0.2507, decode.d7.loss_dice: 0.6597, decode.d8.loss_cls: 0.1278, decode.d8.loss_mask: 0.2511, decode.d8.loss_dice: 0.6626, loss: 10.7642 +2022-05-10 02:29:34,292 - mmseg - INFO - Iter [16700/80000] lr: 1.136e-06, eta: 1 day, 10:13:27, time: 1.768, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1412, decode.loss_mask: 0.2465, decode.loss_dice: 0.6935, decode.d0.loss_cls: 0.3486, decode.d0.loss_mask: 0.2616, decode.d0.loss_dice: 0.7455, decode.d1.loss_cls: 0.1643, decode.d1.loss_mask: 0.2495, decode.d1.loss_dice: 0.7120, decode.d2.loss_cls: 0.1507, decode.d2.loss_mask: 0.2481, decode.d2.loss_dice: 0.7061, decode.d3.loss_cls: 0.1503, decode.d3.loss_mask: 0.2471, decode.d3.loss_dice: 0.6935, decode.d4.loss_cls: 0.1478, decode.d4.loss_mask: 0.2471, decode.d4.loss_dice: 0.6934, decode.d5.loss_cls: 0.1403, decode.d5.loss_mask: 0.2476, decode.d5.loss_dice: 0.6953, decode.d6.loss_cls: 0.1410, decode.d6.loss_mask: 0.2483, decode.d6.loss_dice: 0.6964, decode.d7.loss_cls: 0.1463, decode.d7.loss_mask: 0.2470, decode.d7.loss_dice: 0.6895, decode.d8.loss_cls: 0.1436, decode.d8.loss_mask: 0.2472, decode.d8.loss_dice: 0.6960, loss: 11.1852 +2022-05-10 02:31:06,559 - mmseg - INFO - Iter [16750/80000] lr: 1.135e-06, eta: 1 day, 10:11:30, time: 1.845, data_time: 0.067, memory: 64699, decode.loss_cls: 0.1216, decode.loss_mask: 0.2463, decode.loss_dice: 0.6817, decode.d0.loss_cls: 0.3492, decode.d0.loss_mask: 0.2585, decode.d0.loss_dice: 0.7283, decode.d1.loss_cls: 0.1495, decode.d1.loss_mask: 0.2490, decode.d1.loss_dice: 0.6952, decode.d2.loss_cls: 0.1321, decode.d2.loss_mask: 0.2472, decode.d2.loss_dice: 0.6905, decode.d3.loss_cls: 0.1290, decode.d3.loss_mask: 0.2468, decode.d3.loss_dice: 0.6820, decode.d4.loss_cls: 0.1283, decode.d4.loss_mask: 0.2470, decode.d4.loss_dice: 0.6811, decode.d5.loss_cls: 0.1241, decode.d5.loss_mask: 0.2483, decode.d5.loss_dice: 0.6836, decode.d6.loss_cls: 0.1233, decode.d6.loss_mask: 0.2465, decode.d6.loss_dice: 0.6762, decode.d7.loss_cls: 0.1153, decode.d7.loss_mask: 0.2467, decode.d7.loss_dice: 0.6811, decode.d8.loss_cls: 0.1193, decode.d8.loss_mask: 0.2469, decode.d8.loss_dice: 0.6800, loss: 10.8546 +2022-05-10 02:32:34,533 - mmseg - INFO - Iter [16800/80000] lr: 1.134e-06, eta: 1 day, 10:09:18, time: 1.759, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1347, decode.loss_mask: 0.2437, decode.loss_dice: 0.6889, decode.d0.loss_cls: 0.3520, decode.d0.loss_mask: 0.2588, decode.d0.loss_dice: 0.7513, decode.d1.loss_cls: 0.1623, decode.d1.loss_mask: 0.2464, decode.d1.loss_dice: 0.7061, decode.d2.loss_cls: 0.1554, decode.d2.loss_mask: 0.2446, decode.d2.loss_dice: 0.6914, decode.d3.loss_cls: 0.1391, decode.d3.loss_mask: 0.2444, decode.d3.loss_dice: 0.6907, decode.d4.loss_cls: 0.1435, decode.d4.loss_mask: 0.2443, decode.d4.loss_dice: 0.6926, decode.d5.loss_cls: 0.1409, decode.d5.loss_mask: 0.2443, decode.d5.loss_dice: 0.6951, decode.d6.loss_cls: 0.1456, decode.d6.loss_mask: 0.2442, decode.d6.loss_dice: 0.6888, decode.d7.loss_cls: 0.1357, decode.d7.loss_mask: 0.2435, decode.d7.loss_dice: 0.6915, decode.d8.loss_cls: 0.1390, decode.d8.loss_mask: 0.2435, decode.d8.loss_dice: 0.6880, loss: 11.0904 +2022-05-10 02:34:02,376 - mmseg - INFO - Iter [16850/80000] lr: 1.133e-06, eta: 1 day, 10:07:05, time: 1.757, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1291, decode.loss_mask: 0.2476, decode.loss_dice: 0.6860, decode.d0.loss_cls: 0.3478, decode.d0.loss_mask: 0.2589, decode.d0.loss_dice: 0.7366, decode.d1.loss_cls: 0.1473, decode.d1.loss_mask: 0.2494, decode.d1.loss_dice: 0.6965, decode.d2.loss_cls: 0.1407, decode.d2.loss_mask: 0.2479, decode.d2.loss_dice: 0.6892, decode.d3.loss_cls: 0.1370, decode.d3.loss_mask: 0.2478, decode.d3.loss_dice: 0.6827, decode.d4.loss_cls: 0.1396, decode.d4.loss_mask: 0.2476, decode.d4.loss_dice: 0.6881, decode.d5.loss_cls: 0.1408, decode.d5.loss_mask: 0.2485, decode.d5.loss_dice: 0.6854, decode.d6.loss_cls: 0.1333, decode.d6.loss_mask: 0.2478, decode.d6.loss_dice: 0.6898, decode.d7.loss_cls: 0.1345, decode.d7.loss_mask: 0.2483, decode.d7.loss_dice: 0.6851, decode.d8.loss_cls: 0.1280, decode.d8.loss_mask: 0.2488, decode.d8.loss_dice: 0.6901, loss: 11.0003 +2022-05-10 02:35:30,755 - mmseg - INFO - Iter [16900/80000] lr: 1.133e-06, eta: 1 day, 10:04:55, time: 1.766, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1209, decode.loss_mask: 0.2543, decode.loss_dice: 0.6749, decode.d0.loss_cls: 0.3391, decode.d0.loss_mask: 0.2667, decode.d0.loss_dice: 0.7257, decode.d1.loss_cls: 0.1477, decode.d1.loss_mask: 0.2571, decode.d1.loss_dice: 0.6938, decode.d2.loss_cls: 0.1356, decode.d2.loss_mask: 0.2562, decode.d2.loss_dice: 0.6821, decode.d3.loss_cls: 0.1298, decode.d3.loss_mask: 0.2544, decode.d3.loss_dice: 0.6805, decode.d4.loss_cls: 0.1265, decode.d4.loss_mask: 0.2542, decode.d4.loss_dice: 0.6816, decode.d5.loss_cls: 0.1333, decode.d5.loss_mask: 0.2551, decode.d5.loss_dice: 0.6793, decode.d6.loss_cls: 0.1217, decode.d6.loss_mask: 0.2546, decode.d6.loss_dice: 0.6764, decode.d7.loss_cls: 0.1227, decode.d7.loss_mask: 0.2538, decode.d7.loss_dice: 0.6767, decode.d8.loss_cls: 0.1285, decode.d8.loss_mask: 0.2543, decode.d8.loss_dice: 0.6763, loss: 10.9141 +2022-05-10 02:37:01,989 - mmseg - INFO - Iter [16950/80000] lr: 1.132e-06, eta: 1 day, 10:02:55, time: 1.825, data_time: 0.067, memory: 64699, decode.loss_cls: 0.1290, decode.loss_mask: 0.2432, decode.loss_dice: 0.6769, decode.d0.loss_cls: 0.3417, decode.d0.loss_mask: 0.2559, decode.d0.loss_dice: 0.7252, decode.d1.loss_cls: 0.1547, decode.d1.loss_mask: 0.2457, decode.d1.loss_dice: 0.6897, decode.d2.loss_cls: 0.1470, decode.d2.loss_mask: 0.2448, decode.d2.loss_dice: 0.6915, decode.d3.loss_cls: 0.1283, decode.d3.loss_mask: 0.2430, decode.d3.loss_dice: 0.6825, decode.d4.loss_cls: 0.1325, decode.d4.loss_mask: 0.2436, decode.d4.loss_dice: 0.6849, decode.d5.loss_cls: 0.1284, decode.d5.loss_mask: 0.2437, decode.d5.loss_dice: 0.6875, decode.d6.loss_cls: 0.1262, decode.d6.loss_mask: 0.2440, decode.d6.loss_dice: 0.6847, decode.d7.loss_cls: 0.1269, decode.d7.loss_mask: 0.2442, decode.d7.loss_dice: 0.6797, decode.d8.loss_cls: 0.1309, decode.d8.loss_mask: 0.2435, decode.d8.loss_dice: 0.6794, loss: 10.8792 +2022-05-10 02:38:31,058 - mmseg - INFO - Saving checkpoint at 17000 iterations +2022-05-10 02:39:04,590 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 02:39:04,598 - mmseg - INFO - Iter [17000/80000] lr: 1.131e-06, eta: 1 day, 10:02:52, time: 2.450, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1180, decode.loss_mask: 0.2519, decode.loss_dice: 0.6809, decode.d0.loss_cls: 0.3490, decode.d0.loss_mask: 0.2660, decode.d0.loss_dice: 0.7363, decode.d1.loss_cls: 0.1449, decode.d1.loss_mask: 0.2555, decode.d1.loss_dice: 0.7015, decode.d2.loss_cls: 0.1316, decode.d2.loss_mask: 0.2518, decode.d2.loss_dice: 0.6933, decode.d3.loss_cls: 0.1283, decode.d3.loss_mask: 0.2523, decode.d3.loss_dice: 0.6853, decode.d4.loss_cls: 0.1274, decode.d4.loss_mask: 0.2534, decode.d4.loss_dice: 0.6852, decode.d5.loss_cls: 0.1278, decode.d5.loss_mask: 0.2525, decode.d5.loss_dice: 0.6878, decode.d6.loss_cls: 0.1222, decode.d6.loss_mask: 0.2518, decode.d6.loss_dice: 0.6789, decode.d7.loss_cls: 0.1218, decode.d7.loss_mask: 0.2522, decode.d7.loss_dice: 0.6802, decode.d8.loss_cls: 0.1265, decode.d8.loss_mask: 0.2518, decode.d8.loss_dice: 0.6805, loss: 10.9468 +2022-05-10 02:40:59,348 - mmseg - INFO - per class results: +2022-05-10 02:40:59,352 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.66 | 99.33 | +| sidewalk | 89.05 | 93.66 | +| building | 94.62 | 96.88 | +| wall | 74.36 | 81.51 | +| fence | 73.86 | 83.77 | +| pole | 72.01 | 85.83 | +| traffic light | 76.87 | 89.69 | +| traffic sign | 84.44 | 91.95 | +| vegetation | 93.4 | 96.84 | +| terrain | 67.82 | 78.97 | +| sky | 95.81 | 98.64 | +| person | 86.95 | 94.43 | +| rider | 75.15 | 86.83 | +| car | 95.85 | 98.32 | +| truck | 90.99 | 95.12 | +| bus | 94.28 | 96.65 | +B +| train | 90.05 | 93.28 | +| motorcycle | 76.45 | 87.72 | +| bicycle | 82.88 | 91.56 | ++---------------+-------+-------+ +2022-05-10 02:40:59,353 - mmseg - INFO - Summary: +2022-05-10 02:40:59,353 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 97.08 | 84.92 | 91.63 | ++-------+-------+-------+ +2022-05-10 02:40:59,355 - mmseg - INFO - The previous best checkpoint /mnt/lustre/chenzhe.vendor/workspace/ViT-Adapter/segmentation/work_dirs/mask2former_beit_adapter_large_896_80k_cityscapes_ss/best_mIoU_iter_11000.pth was removed +2022-05-10 02:41:30,432 - mmseg - INFO - Now best checkpoint is saved as best_mIoU_iter_17000.pth. +2022-05-10 02:41:30,448 - mmseg - INFO - Best mIoU is 0.8492 at 17000 iter. +2022-05-10 02:41:30,459 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 02:41:30,460 - mmseg - INFO - Iter(val) [32] aAcc: 0.9708, mIoU: 0.8492, mAcc: 0.9163, IoU.road: 0.9866, IoU.sidewalk: 0.8905, IoU.building: 0.9462, IoU.wall: 0.7436, IoU.fence: 0.7386, IoU.pole: 0.7201, IoU.traffic light: 0.7687, IoU.traffic sign: 0.8444, IoU.vegetation: 0.9340, IoU.terrain: 0.6782, IoU.sky: 0.9581, IoU.person: 0.8695, IoU.rider: 0.7515, IoU.car: 0.9585, IoU.truck: 0.9099, IoU.bus: 0.9428, IoU.train: 0.9005, IoU.motorcycle: 0.7645, IoU.bicycle: 0.8288, Acc.road: 0.9933, Acc.sidewalk: 0.9366, Acc.building: 0.9688, Acc.wall: 0.8151, Acc.fence: 0.8377, Acc.pole: 0.8583, Acc.traffic light: 0.8969, Acc.traffic sign: 0.9195, Acc.vegetation: 0.9684, Acc.terrain: 0.7897, Acc.sky: 0.9864, Acc.person: 0.9443, Acc.rider: 0.8683, Acc.car: 0.9832, Acc.truck: 0.9512, Acc.bus: 0.9665, Acc.train: 0.9328, Acc.motorcycle: 0.8772, Acc.bicycle: 0.9156 +2022-05-10 02:43:00,588 - mmseg - INFO - Iter [17050/80000] lr: 1.130e-06, eta: 1 day, 10:09:47, time: 4.722, data_time: 2.941, memory: 64699, decode.loss_cls: 0.1264, decode.loss_mask: 0.2497, decode.loss_dice: 0.6890, decode.d0.loss_cls: 0.3605, decode.d0.loss_mask: 0.2611, decode.d0.loss_dice: 0.7403, decode.d1.loss_cls: 0.1550, decode.d1.loss_mask: 0.2508, decode.d1.loss_dice: 0.7024, decode.d2.loss_cls: 0.1440, decode.d2.loss_mask: 0.2490, decode.d2.loss_dice: 0.6925, decode.d3.loss_cls: 0.1408, decode.d3.loss_mask: 0.2496, decode.d3.loss_dice: 0.6876, decode.d4.loss_cls: 0.1315, decode.d4.loss_mask: 0.2493, decode.d4.loss_dice: 0.6900, decode.d5.loss_cls: 0.1367, decode.d5.loss_mask: 0.2490, decode.d5.loss_dice: 0.6914, decode.d6.loss_cls: 0.1262, decode.d6.loss_mask: 0.2481, decode.d6.loss_dice: 0.6912, decode.d7.loss_cls: 0.1291, decode.d7.loss_mask: 0.2486, decode.d7.loss_dice: 0.6891, decode.d8.loss_cls: 0.1384, decode.d8.loss_mask: 0.2490, decode.d8.loss_dice: 0.6883, loss: 11.0548 +2022-05-10 02:44:29,808 - mmseg - INFO - Iter [17100/80000] lr: 1.129e-06, eta: 1 day, 10:07:38, time: 1.785, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1191, decode.loss_mask: 0.2511, decode.loss_dice: 0.6962, decode.d0.loss_cls: 0.3534, decode.d0.loss_mask: 0.2645, decode.d0.loss_dice: 0.7449, decode.d1.loss_cls: 0.1444, decode.d1.loss_mask: 0.2540, decode.d1.loss_dice: 0.7133, decode.d2.loss_cls: 0.1295, decode.d2.loss_mask: 0.2521, decode.d2.loss_dice: 0.7117, decode.d3.loss_cls: 0.1292, decode.d3.loss_mask: 0.2522, decode.d3.loss_dice: 0.7029, decode.d4.loss_cls: 0.1282, decode.d4.loss_mask: 0.2522, decode.d4.loss_dice: 0.6993, decode.d5.loss_cls: 0.1247, decode.d5.loss_mask: 0.2514, decode.d5.loss_dice: 0.6990, decode.d6.loss_cls: 0.1286, decode.d6.loss_mask: 0.2515, decode.d6.loss_dice: 0.6950, decode.d7.loss_cls: 0.1223, decode.d7.loss_mask: 0.2510, decode.d7.loss_dice: 0.7025, decode.d8.loss_cls: 0.1253, decode.d8.loss_mask: 0.2502, decode.d8.loss_dice: 0.7029, loss: 11.1025 +2022-05-10 02:46:01,532 - mmseg - INFO - Iter [17150/80000] lr: 1.128e-06, eta: 1 day, 10:05:39, time: 1.834, data_time: 0.066, memory: 64699, decode.loss_cls: 0.1153, decode.loss_mask: 0.2444, decode.loss_dice: 0.6843, decode.d0.loss_cls: 0.3378, decode.d0.loss_mask: 0.2549, decode.d0.loss_dice: 0.7307, decode.d1.loss_cls: 0.1433, decode.d1.loss_mask: 0.2457, decode.d1.loss_dice: 0.6963, decode.d2.loss_cls: 0.1352, decode.d2.loss_mask: 0.2455, decode.d2.loss_dice: 0.6895, decode.d3.loss_cls: 0.1171, decode.d3.loss_mask: 0.2445, decode.d3.loss_dice: 0.6844, decode.d4.loss_cls: 0.1264, decode.d4.loss_mask: 0.2448, decode.d4.loss_dice: 0.6825, decode.d5.loss_cls: 0.1209, decode.d5.loss_mask: 0.2443, decode.d5.loss_dice: 0.6852, decode.d6.loss_cls: 0.1204, decode.d6.loss_mask: 0.2451, decode.d6.loss_dice: 0.6819, decode.d7.loss_cls: 0.1172, decode.d7.loss_mask: 0.2450, decode.d7.loss_dice: 0.6796, decode.d8.loss_cls: 0.1131, decode.d8.loss_mask: 0.2443, decode.d8.loss_dice: 0.6804, loss: 10.8002 +2022-05-10 02:47:30,117 - mmseg - INFO - Iter [17200/80000] lr: 1.127e-06, eta: 1 day, 10:03:28, time: 1.772, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1362, decode.loss_mask: 0.2502, decode.loss_dice: 0.6951, decode.d0.loss_cls: 0.3566, decode.d0.loss_mask: 0.2639, decode.d0.loss_dice: 0.7448, decode.d1.loss_cls: 0.1625, decode.d1.loss_mask: 0.2543, decode.d1.loss_dice: 0.7166, decode.d2.loss_cls: 0.1527, decode.d2.loss_mask: 0.2515, decode.d2.loss_dice: 0.7064, decode.d3.loss_cls: 0.1394, decode.d3.loss_mask: 0.2511, decode.d3.loss_dice: 0.6971, decode.d4.loss_cls: 0.1454, decode.d4.loss_mask: 0.2514, decode.d4.loss_dice: 0.7033, decode.d5.loss_cls: 0.1439, decode.d5.loss_mask: 0.2505, decode.d5.loss_dice: 0.7019, decode.d6.loss_cls: 0.1356, decode.d6.loss_mask: 0.2502, decode.d6.loss_dice: 0.6970, decode.d7.loss_cls: 0.1344, decode.d7.loss_mask: 0.2502, decode.d7.loss_dice: 0.6960, decode.d8.loss_cls: 0.1406, decode.d8.loss_mask: 0.2506, decode.d8.loss_dice: 0.6966, loss: 11.2261 +2022-05-10 02:48:58,895 - mmseg - INFO - Iter [17250/80000] lr: 1.126e-06, eta: 1 day, 10:01:18, time: 1.775, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1195, decode.loss_mask: 0.2456, decode.loss_dice: 0.6745, decode.d0.loss_cls: 0.3372, decode.d0.loss_mask: 0.2577, decode.d0.loss_dice: 0.7256, decode.d1.loss_cls: 0.1402, decode.d1.loss_mask: 0.2484, decode.d1.loss_dice: 0.6939, decode.d2.loss_cls: 0.1271, decode.d2.loss_mask: 0.2474, decode.d2.loss_dice: 0.6872, decode.d3.loss_cls: 0.1248, decode.d3.loss_mask: 0.2467, decode.d3.loss_dice: 0.6807, decode.d4.loss_cls: 0.1204, decode.d4.loss_mask: 0.2468, decode.d4.loss_dice: 0.6848, decode.d5.loss_cls: 0.1136, decode.d5.loss_mask: 0.2464, decode.d5.loss_dice: 0.6829, decode.d6.loss_cls: 0.1207, decode.d6.loss_mask: 0.2451, decode.d6.loss_dice: 0.6791, decode.d7.loss_cls: 0.1253, decode.d7.loss_mask: 0.2462, decode.d7.loss_dice: 0.6804, decode.d8.loss_cls: 0.1229, decode.d8.loss_mask: 0.2454, decode.d8.loss_dice: 0.6784, loss: 10.7950 +2022-05-10 02:50:29,554 - mmseg - INFO - Iter [17300/80000] lr: 1.125e-06, eta: 1 day, 9:59:16, time: 1.813, data_time: 0.066, memory: 64699, decode.loss_cls: 0.1352, decode.loss_mask: 0.2473, decode.loss_dice: 0.6956, decode.d0.loss_cls: 0.3553, decode.d0.loss_mask: 0.2609, decode.d0.loss_dice: 0.7479, decode.d1.loss_cls: 0.1600, decode.d1.loss_mask: 0.2481, decode.d1.loss_dice: 0.7091, decode.d2.loss_cls: 0.1534, decode.d2.loss_mask: 0.2472, decode.d2.loss_dice: 0.7054, decode.d3.loss_cls: 0.1378, decode.d3.loss_mask: 0.2467, decode.d3.loss_dice: 0.6994, decode.d4.loss_cls: 0.1399, decode.d4.loss_mask: 0.2471, decode.d4.loss_dice: 0.7005, decode.d5.loss_cls: 0.1350, decode.d5.loss_mask: 0.2470, decode.d5.loss_dice: 0.6978, decode.d6.loss_cls: 0.1303, decode.d6.loss_mask: 0.2468, decode.d6.loss_dice: 0.6990, decode.d7.loss_cls: 0.1402, decode.d7.loss_mask: 0.2475, decode.d7.loss_dice: 0.6956, decode.d8.loss_cls: 0.1407, decode.d8.loss_mask: 0.2478, decode.d8.loss_dice: 0.6992, loss: 11.1635 +2022-05-10 02:51:58,493 - mmseg - INFO - Iter [17350/80000] lr: 1.124e-06, eta: 1 day, 9:57:07, time: 1.779, data_time: 0.021, memory: 64699, decode.loss_cls: 0.1302, decode.loss_mask: 0.2455, decode.loss_dice: 0.6853, decode.d0.loss_cls: 0.3507, decode.d0.loss_mask: 0.2585, decode.d0.loss_dice: 0.7316, decode.d1.loss_cls: 0.1414, decode.d1.loss_mask: 0.2481, decode.d1.loss_dice: 0.7032, decode.d2.loss_cls: 0.1313, decode.d2.loss_mask: 0.2461, decode.d2.loss_dice: 0.6918, decode.d3.loss_cls: 0.1277, decode.d3.loss_mask: 0.2461, decode.d3.loss_dice: 0.6877, decode.d4.loss_cls: 0.1369, decode.d4.loss_mask: 0.2453, decode.d4.loss_dice: 0.6847, decode.d5.loss_cls: 0.1243, decode.d5.loss_mask: 0.2456, decode.d5.loss_dice: 0.6894, decode.d6.loss_cls: 0.1229, decode.d6.loss_mask: 0.2455, decode.d6.loss_dice: 0.6873, decode.d7.loss_cls: 0.1234, decode.d7.loss_mask: 0.2456, decode.d7.loss_dice: 0.6884, decode.d8.loss_cls: 0.1291, decode.d8.loss_mask: 0.2455, decode.d8.loss_dice: 0.6878, loss: 10.9270 +2022-05-10 02:53:26,837 - mmseg - INFO - Iter [17400/80000] lr: 1.124e-06, eta: 1 day, 9:54:56, time: 1.767, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1227, decode.loss_mask: 0.2479, decode.loss_dice: 0.6621, decode.d0.loss_cls: 0.3401, decode.d0.loss_mask: 0.2615, decode.d0.loss_dice: 0.7110, decode.d1.loss_cls: 0.1393, decode.d1.loss_mask: 0.2523, decode.d1.loss_dice: 0.6788, decode.d2.loss_cls: 0.1217, decode.d2.loss_mask: 0.2495, decode.d2.loss_dice: 0.6677, decode.d3.loss_cls: 0.1188, decode.d3.loss_mask: 0.2497, decode.d3.loss_dice: 0.6635, decode.d4.loss_cls: 0.1181, decode.d4.loss_mask: 0.2487, decode.d4.loss_dice: 0.6610, decode.d5.loss_cls: 0.1164, decode.d5.loss_mask: 0.2475, decode.d5.loss_dice: 0.6592, decode.d6.loss_cls: 0.1189, decode.d6.loss_mask: 0.2477, decode.d6.loss_dice: 0.6613, decode.d7.loss_cls: 0.1171, decode.d7.loss_mask: 0.2478, decode.d7.loss_dice: 0.6626, decode.d8.loss_cls: 0.1205, decode.d8.loss_mask: 0.2483, decode.d8.loss_dice: 0.6613, loss: 10.6229 +2022-05-10 02:54:56,339 - mmseg - INFO - Iter [17450/80000] lr: 1.123e-06, eta: 1 day, 9:52:50, time: 1.790, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1320, decode.loss_mask: 0.2530, decode.loss_dice: 0.6813, decode.d0.loss_cls: 0.3436, decode.d0.loss_mask: 0.2678, decode.d0.loss_dice: 0.7303, decode.d1.loss_cls: 0.1407, decode.d1.loss_mask: 0.2557, decode.d1.loss_dice: 0.7024, decode.d2.loss_cls: 0.1388, decode.d2.loss_mask: 0.2534, decode.d2.loss_dice: 0.6922, decode.d3.loss_cls: 0.1219, decode.d3.loss_mask: 0.2531, decode.d3.loss_dice: 0.6843, decode.d4.loss_cls: 0.1259, decode.d4.loss_mask: 0.2530, decode.d4.loss_dice: 0.6845, decode.d5.loss_cls: 0.1305, decode.d5.loss_mask: 0.2530, decode.d5.loss_dice: 0.6830, decode.d6.loss_cls: 0.1254, decode.d6.loss_mask: 0.2529, decode.d6.loss_dice: 0.6800, decode.d7.loss_cls: 0.1262, decode.d7.loss_mask: 0.2522, decode.d7.loss_dice: 0.6783, decode.d8.loss_cls: 0.1218, decode.d8.loss_mask: 0.2523, decode.d8.loss_dice: 0.6814, loss: 10.9510 +2022-05-10 02:56:26,742 - mmseg - INFO - Iter [17500/80000] lr: 1.122e-06, eta: 1 day, 9:50:47, time: 1.808, data_time: 0.065, memory: 64699, decode.loss_cls: 0.1316, decode.loss_mask: 0.2446, decode.loss_dice: 0.6820, decode.d0.loss_cls: 0.3505, decode.d0.loss_mask: 0.2570, decode.d0.loss_dice: 0.7338, decode.d1.loss_cls: 0.1606, decode.d1.loss_mask: 0.2452, decode.d1.loss_dice: 0.6942, decode.d2.loss_cls: 0.1437, decode.d2.loss_mask: 0.2444, decode.d2.loss_dice: 0.6908, decode.d3.loss_cls: 0.1280, decode.d3.loss_mask: 0.2433, decode.d3.loss_dice: 0.6847, decode.d4.loss_cls: 0.1274, decode.d4.loss_mask: 0.2436, decode.d4.loss_dice: 0.6852, decode.d5.loss_cls: 0.1345, decode.d5.loss_mask: 0.2433, decode.d5.loss_dice: 0.6874, decode.d6.loss_cls: 0.1258, decode.d6.loss_mask: 0.2434, decode.d6.loss_dice: 0.6845, decode.d7.loss_cls: 0.1293, decode.d7.loss_mask: 0.2434, decode.d7.loss_dice: 0.6802, decode.d8.loss_cls: 0.1259, decode.d8.loss_mask: 0.2446, decode.d8.loss_dice: 0.6826, loss: 10.9156 +2022-05-10 02:57:56,305 - mmseg - INFO - Iter [17550/80000] lr: 1.121e-06, eta: 1 day, 9:48:42, time: 1.791, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1214, decode.loss_mask: 0.2430, decode.loss_dice: 0.6744, decode.d0.loss_cls: 0.3360, decode.d0.loss_mask: 0.2550, decode.d0.loss_dice: 0.7240, decode.d1.loss_cls: 0.1423, decode.d1.loss_mask: 0.2479, decode.d1.loss_dice: 0.6932, decode.d2.loss_cls: 0.1327, decode.d2.loss_mask: 0.2439, decode.d2.loss_dice: 0.6819, decode.d3.loss_cls: 0.1315, decode.d3.loss_mask: 0.2439, decode.d3.loss_dice: 0.6785, decode.d4.loss_cls: 0.1262, decode.d4.loss_mask: 0.2434, decode.d4.loss_dice: 0.6792, decode.d5.loss_cls: 0.1293, decode.d5.loss_mask: 0.2438, decode.d5.loss_dice: 0.6764, decode.d6.loss_cls: 0.1210, decode.d6.loss_mask: 0.2426, decode.d6.loss_dice: 0.6768, decode.d7.loss_cls: 0.1175, decode.d7.loss_mask: 0.2434, decode.d7.loss_dice: 0.6773, decode.d8.loss_cls: 0.1148, decode.d8.loss_mask: 0.2440, decode.d8.loss_dice: 0.6810, loss: 10.7664 +2022-05-10 02:59:25,512 - mmseg - INFO - Iter [17600/80000] lr: 1.120e-06, eta: 1 day, 9:46:35, time: 1.784, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1155, decode.loss_mask: 0.2489, decode.loss_dice: 0.6805, decode.d0.loss_cls: 0.3460, decode.d0.loss_mask: 0.2616, decode.d0.loss_dice: 0.7297, decode.d1.loss_cls: 0.1430, decode.d1.loss_mask: 0.2532, decode.d1.loss_dice: 0.6970, decode.d2.loss_cls: 0.1247, decode.d2.loss_mask: 0.2511, decode.d2.loss_dice: 0.6896, decode.d3.loss_cls: 0.1201, decode.d3.loss_mask: 0.2512, decode.d3.loss_dice: 0.6833, decode.d4.loss_cls: 0.1141, decode.d4.loss_mask: 0.2501, decode.d4.loss_dice: 0.6884, decode.d5.loss_cls: 0.1149, decode.d5.loss_mask: 0.2508, decode.d5.loss_dice: 0.6860, decode.d6.loss_cls: 0.1156, decode.d6.loss_mask: 0.2503, decode.d6.loss_dice: 0.6821, decode.d7.loss_cls: 0.1167, decode.d7.loss_mask: 0.2501, decode.d7.loss_dice: 0.6839, decode.d8.loss_cls: 0.1164, decode.d8.loss_mask: 0.2498, decode.d8.loss_dice: 0.6833, loss: 10.8479 +2022-05-10 03:00:52,732 - mmseg - INFO - Iter [17650/80000] lr: 1.119e-06, eta: 1 day, 9:44:21, time: 1.744, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1149, decode.loss_mask: 0.2518, decode.loss_dice: 0.6656, decode.d0.loss_cls: 0.3502, decode.d0.loss_mask: 0.2704, decode.d0.loss_dice: 0.7164, decode.d1.loss_cls: 0.1336, decode.d1.loss_mask: 0.2558, decode.d1.loss_dice: 0.6844, decode.d2.loss_cls: 0.1273, decode.d2.loss_mask: 0.2542, decode.d2.loss_dice: 0.6766, decode.d3.loss_cls: 0.1289, decode.d3.loss_mask: 0.2533, decode.d3.loss_dice: 0.6715, decode.d4.loss_cls: 0.1221, decode.d4.loss_mask: 0.2530, decode.d4.loss_dice: 0.6683, decode.d5.loss_cls: 0.1247, decode.d5.loss_mask: 0.2523, decode.d5.loss_dice: 0.6684, decode.d6.loss_cls: 0.1180, decode.d6.loss_mask: 0.2516, decode.d6.loss_dice: 0.6690, decode.d7.loss_cls: 0.1192, decode.d7.loss_mask: 0.2525, decode.d7.loss_dice: 0.6669, decode.d8.loss_cls: 0.1164, decode.d8.loss_mask: 0.2529, decode.d8.loss_dice: 0.6690, loss: 10.7593 +2022-05-10 03:02:25,588 - mmseg - INFO - Iter [17700/80000] lr: 1.118e-06, eta: 1 day, 9:42:28, time: 1.857, data_time: 0.068, memory: 64699, decode.loss_cls: 0.1214, decode.loss_mask: 0.2511, decode.loss_dice: 0.6720, decode.d0.loss_cls: 0.3360, decode.d0.loss_mask: 0.2662, decode.d0.loss_dice: 0.7272, decode.d1.loss_cls: 0.1518, decode.d1.loss_mask: 0.2518, decode.d1.loss_dice: 0.6911, decode.d2.loss_cls: 0.1344, decode.d2.loss_mask: 0.2510, decode.d2.loss_dice: 0.6818, decode.d3.loss_cls: 0.1285, decode.d3.loss_mask: 0.2503, decode.d3.loss_dice: 0.6703, decode.d4.loss_cls: 0.1293, decode.d4.loss_mask: 0.2504, decode.d4.loss_dice: 0.6733, decode.d5.loss_cls: 0.1268, decode.d5.loss_mask: 0.2503, decode.d5.loss_dice: 0.6711, decode.d6.loss_cls: 0.1261, decode.d6.loss_mask: 0.2504, decode.d6.loss_dice: 0.6701, decode.d7.loss_cls: 0.1255, decode.d7.loss_mask: 0.2513, decode.d7.loss_dice: 0.6743, decode.d8.loss_cls: 0.1256, decode.d8.loss_mask: 0.2510, decode.d8.loss_dice: 0.6739, loss: 10.8342 +2022-05-10 03:03:53,701 - mmseg - INFO - Iter [17750/80000] lr: 1.117e-06, eta: 1 day, 9:40:18, time: 1.762, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1065, decode.loss_mask: 0.2362, decode.loss_dice: 0.6693, decode.d0.loss_cls: 0.3310, decode.d0.loss_mask: 0.2471, decode.d0.loss_dice: 0.7120, decode.d1.loss_cls: 0.1305, decode.d1.loss_mask: 0.2378, decode.d1.loss_dice: 0.6804, decode.d2.loss_cls: 0.1194, decode.d2.loss_mask: 0.2374, decode.d2.loss_dice: 0.6727, decode.d3.loss_cls: 0.1125, decode.d3.loss_mask: 0.2366, decode.d3.loss_dice: 0.6688, decode.d4.loss_cls: 0.1084, decode.d4.loss_mask: 0.2361, decode.d4.loss_dice: 0.6692, decode.d5.loss_cls: 0.1124, decode.d5.loss_mask: 0.2360, decode.d5.loss_dice: 0.6688, decode.d6.loss_cls: 0.1093, decode.d6.loss_mask: 0.2364, decode.d6.loss_dice: 0.6658, decode.d7.loss_cls: 0.1164, decode.d7.loss_mask: 0.2355, decode.d7.loss_dice: 0.6630, decode.d8.loss_cls: 0.1122, decode.d8.loss_mask: 0.2363, decode.d8.loss_dice: 0.6669, loss: 10.4711 +2022-05-10 03:05:23,699 - mmseg - INFO - Iter [17800/80000] lr: 1.116e-06, eta: 1 day, 9:38:15, time: 1.800, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1009, decode.loss_mask: 0.2429, decode.loss_dice: 0.6685, decode.d0.loss_cls: 0.3315, decode.d0.loss_mask: 0.2545, decode.d0.loss_dice: 0.7105, decode.d1.loss_cls: 0.1256, decode.d1.loss_mask: 0.2450, decode.d1.loss_dice: 0.6835, decode.d2.loss_cls: 0.1051, decode.d2.loss_mask: 0.2443, decode.d2.loss_dice: 0.6738, decode.d3.loss_cls: 0.1050, decode.d3.loss_mask: 0.2438, decode.d3.loss_dice: 0.6694, decode.d4.loss_cls: 0.1043, decode.d4.loss_mask: 0.2436, decode.d4.loss_dice: 0.6743, decode.d5.loss_cls: 0.1099, decode.d5.loss_mask: 0.2437, decode.d5.loss_dice: 0.6729, decode.d6.loss_cls: 0.1035, decode.d6.loss_mask: 0.2432, decode.d6.loss_dice: 0.6692, decode.d7.loss_cls: 0.1068, decode.d7.loss_mask: 0.2433, decode.d7.loss_dice: 0.6698, decode.d8.loss_cls: 0.1043, decode.d8.loss_mask: 0.2431, decode.d8.loss_dice: 0.6678, loss: 10.5038 +2022-05-10 03:06:52,598 - mmseg - INFO - Iter [17850/80000] lr: 1.115e-06, eta: 1 day, 9:36:08, time: 1.778, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1173, decode.loss_mask: 0.2474, decode.loss_dice: 0.6850, decode.d0.loss_cls: 0.3397, decode.d0.loss_mask: 0.2571, decode.d0.loss_dice: 0.7344, decode.d1.loss_cls: 0.1518, decode.d1.loss_mask: 0.2493, decode.d1.loss_dice: 0.7024, decode.d2.loss_cls: 0.1346, decode.d2.loss_mask: 0.2487, decode.d2.loss_dice: 0.6907, decode.d3.loss_cls: 0.1260, decode.d3.loss_mask: 0.2489, decode.d3.loss_dice: 0.6826, decode.d4.loss_cls: 0.1230, decode.d4.loss_mask: 0.2489, decode.d4.loss_dice: 0.6821, decode.d5.loss_cls: 0.1286, decode.d5.loss_mask: 0.2466, decode.d5.loss_dice: 0.6893, decode.d6.loss_cls: 0.1288, decode.d6.loss_mask: 0.2478, decode.d6.loss_dice: 0.6851, decode.d7.loss_cls: 0.1285, decode.d7.loss_mask: 0.2477, decode.d7.loss_dice: 0.6849, decode.d8.loss_cls: 0.1315, decode.d8.loss_mask: 0.2469, decode.d8.loss_dice: 0.6845, loss: 10.9203 +2022-05-10 03:08:23,284 - mmseg - INFO - Iter [17900/80000] lr: 1.115e-06, eta: 1 day, 9:34:08, time: 1.813, data_time: 0.065, memory: 64699, decode.loss_cls: 0.1325, decode.loss_mask: 0.2370, decode.loss_dice: 0.6824, decode.d0.loss_cls: 0.3510, decode.d0.loss_mask: 0.2533, decode.d0.loss_dice: 0.7435, decode.d1.loss_cls: 0.1512, decode.d1.loss_mask: 0.2386, decode.d1.loss_dice: 0.7057, decode.d2.loss_cls: 0.1383, decode.d2.loss_mask: 0.2378, decode.d2.loss_dice: 0.6939, decode.d3.loss_cls: 0.1352, decode.d3.loss_mask: 0.2364, decode.d3.loss_dice: 0.6876, decode.d4.loss_cls: 0.1322, decode.d4.loss_mask: 0.2372, decode.d4.loss_dice: 0.6879, decode.d5.loss_cls: 0.1309, decode.d5.loss_mask: 0.2377, decode.d5.loss_dice: 0.6870, decode.d6.loss_cls: 0.1297, decode.d6.loss_mask: 0.2375, decode.d6.loss_dice: 0.6837, decode.d7.loss_cls: 0.1304, decode.d7.loss_mask: 0.2381, decode.d7.loss_dice: 0.6819, decode.d8.loss_cls: 0.1310, decode.d8.loss_mask: 0.2374, decode.d8.loss_dice: 0.6862, loss: 10.8928 +2022-05-10 03:09:51,709 - mmseg - INFO - Iter [17950/80000] lr: 1.114e-06, eta: 1 day, 9:32:00, time: 1.769, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1140, decode.loss_mask: 0.2386, decode.loss_dice: 0.6663, decode.d0.loss_cls: 0.3455, decode.d0.loss_mask: 0.2547, decode.d0.loss_dice: 0.7163, decode.d1.loss_cls: 0.1433, decode.d1.loss_mask: 0.2424, decode.d1.loss_dice: 0.6886, decode.d2.loss_cls: 0.1285, decode.d2.loss_mask: 0.2392, decode.d2.loss_dice: 0.6781, decode.d3.loss_cls: 0.1182, decode.d3.loss_mask: 0.2384, decode.d3.loss_dice: 0.6707, decode.d4.loss_cls: 0.1225, decode.d4.loss_mask: 0.2387, decode.d4.loss_dice: 0.6694, decode.d5.loss_cls: 0.1250, decode.d5.loss_mask: 0.2385, decode.d5.loss_dice: 0.6713, decode.d6.loss_cls: 0.1177, decode.d6.loss_mask: 0.2391, decode.d6.loss_dice: 0.6703, decode.d7.loss_cls: 0.1162, decode.d7.loss_mask: 0.2390, decode.d7.loss_dice: 0.6685, decode.d8.loss_cls: 0.1211, decode.d8.loss_mask: 0.2381, decode.d8.loss_dice: 0.6712, loss: 10.6291 +2022-05-10 03:11:20,126 - mmseg - INFO - Saving checkpoint at 18000 iterations +2022-05-10 03:11:50,966 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 03:11:50,975 - mmseg - INFO - Iter [18000/80000] lr: 1.113e-06, eta: 1 day, 9:31:38, time: 2.383, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1175, decode.loss_mask: 0.2493, decode.loss_dice: 0.6816, decode.d0.loss_cls: 0.3503, decode.d0.loss_mask: 0.2627, decode.d0.loss_dice: 0.7376, decode.d1.loss_cls: 0.1492, decode.d1.loss_mask: 0.2527, decode.d1.loss_dice: 0.7010, decode.d2.loss_cls: 0.1250, decode.d2.loss_mask: 0.2510, decode.d2.loss_dice: 0.6922, decode.d3.loss_cls: 0.1307, decode.d3.loss_mask: 0.2500, decode.d3.loss_dice: 0.6880, decode.d4.loss_cls: 0.1284, decode.d4.loss_mask: 0.2488, decode.d4.loss_dice: 0.6820, decode.d5.loss_cls: 0.1308, decode.d5.loss_mask: 0.2487, decode.d5.loss_dice: 0.6865, decode.d6.loss_cls: 0.1213, decode.d6.loss_mask: 0.2500, decode.d6.loss_dice: 0.6847, decode.d7.loss_cls: 0.1203, decode.d7.loss_mask: 0.2493, decode.d7.loss_dice: 0.6838, decode.d8.loss_cls: 0.1203, decode.d8.loss_mask: 0.2495, decode.d8.loss_dice: 0.6838, loss: 10.9273 +2022-05-10 03:13:46,428 - mmseg - INFO - per class results: +2022-05-10 03:13:46,432 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.56 | 99.23 | +| sidewalk | 88.31 | 93.22 | +| building | 94.36 | 96.95 | +| wall | 68.69 | 80.53 | +| fence | 77.24 | 85.55 | +| pole | 71.48 | 83.02 | +| traffic light | 77.05 | 88.93 | +| traffic sign | 83.9 | 91.15 | +| vegetation | 93.32 | 96.94 | +| terrain | 67.32 | 79.68 | +| sky | 95.99 | 98.32 | +| person | 86.92 | 94.02 | +| rider | 75.08 | 85.1 | +| car | 96.23 | 98.28 | +| truck | 89.12 | 92.46 | +| bus | 93.63 | 96.57 | +| train | 87.7 | 91.39 | +| motorcycle | 78.03 | 87.85 | +| bicycle | 82.14 | 92.33 | ++---------------+-------+-------+ +2022-05-10 03:13:46,432 - mmseg - INFO - Summary: +2022-05-10 03:13:46,432 - mmseg - INFO - ++------+-------+-------+ +| aAcc | mIoU | mAcc | ++------+-------+-------+ +| 97.0 | 84.48 | 91.13 | ++------+-------+-------+ +2022-05-10 03:13:46,436 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 03:13:46,436 - mmseg - INFO - Iter(val) [32] aAcc: 0.9700, mIoU: 0.8448, mAcc: 0.9113, IoU.road: 0.9856, IoU.sidewalk: 0.8831, IoU.building: 0.9436, IoU.wall: 0.6869, IoU.fence: 0.7724, IoU.pole: 0.7148, IoU.traffic light: 0.7705, IoU.traffic sign: 0.8390, IoU.vegetation: 0.9332, IoU.terrain: 0.6732, IoU.sky: 0.9599, IoU.person: 0.8692, IoU.rider: 0.7508, IoU.car: 0.9623, IoU.truck: 0.8912, IoU.bus: 0.9363, IoU.train: 0.8770, IoU.motorcycle: 0.7803, IoU.bicycle: 0.8214, Acc.road: 0.9923, Acc.sidewalk: 0.9322, Acc.building: 0.9695, Acc.wall: 0.8053, Acc.fence: 0.8555, Acc.pole: 0.8302, Acc.traffic light: 0.8893, Acc.traffic sign: 0.9115, Acc.vegetation: 0.9694, Acc.terrain: 0.7968, Acc.sky: 0.9832, Acc.person: 0.9402, Acc.rider: 0.8510, Acc.car: 0.9828, Acc.truck: 0.9246, Acc.bus: 0.9657, Acc.train: 0.9139, Acc.motorcycle: 0.8785, Acc.bicycle: 0.9233 +2022-05-10 03:15:17,524 - mmseg - INFO - Iter [18050/80000] lr: 1.112e-06, eta: 1 day, 9:36:16, time: 4.133, data_time: 2.379, memory: 64699, decode.loss_cls: 0.1285, decode.loss_mask: 0.2498, decode.loss_dice: 0.6790, decode.d0.loss_cls: 0.3487, decode.d0.loss_mask: 0.2609, decode.d0.loss_dice: 0.7308, decode.d1.loss_cls: 0.1530, decode.d1.loss_mask: 0.2524, decode.d1.loss_dice: 0.6921, decode.d2.loss_cls: 0.1466, decode.d2.loss_mask: 0.2524, decode.d2.loss_dice: 0.6869, decode.d3.loss_cls: 0.1411, decode.d3.loss_mask: 0.2504, decode.d3.loss_dice: 0.6801, decode.d4.loss_cls: 0.1402, decode.d4.loss_mask: 0.2505, decode.d4.loss_dice: 0.6801, decode.d5.loss_cls: 0.1421, decode.d5.loss_mask: 0.2503, decode.d5.loss_dice: 0.6774, decode.d6.loss_cls: 0.1387, decode.d6.loss_mask: 0.2496, decode.d6.loss_dice: 0.6784, decode.d7.loss_cls: 0.1269, decode.d7.loss_mask: 0.2502, decode.d7.loss_dice: 0.6757, decode.d8.loss_cls: 0.1343, decode.d8.loss_mask: 0.2504, decode.d8.loss_dice: 0.6785, loss: 10.9758 +2022-05-10 03:16:46,041 - mmseg - INFO - Iter [18100/80000] lr: 1.111e-06, eta: 1 day, 9:34:07, time: 1.770, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1080, decode.loss_mask: 0.2457, decode.loss_dice: 0.6644, decode.d0.loss_cls: 0.3346, decode.d0.loss_mask: 0.2588, decode.d0.loss_dice: 0.7133, decode.d1.loss_cls: 0.1301, decode.d1.loss_mask: 0.2486, decode.d1.loss_dice: 0.6853, decode.d2.loss_cls: 0.1196, decode.d2.loss_mask: 0.2482, decode.d2.loss_dice: 0.6758, decode.d3.loss_cls: 0.1046, decode.d3.loss_mask: 0.2460, decode.d3.loss_dice: 0.6676, decode.d4.loss_cls: 0.1122, decode.d4.loss_mask: 0.2460, decode.d4.loss_dice: 0.6698, decode.d5.loss_cls: 0.1129, decode.d5.loss_mask: 0.2471, decode.d5.loss_dice: 0.6705, decode.d6.loss_cls: 0.1083, decode.d6.loss_mask: 0.2465, decode.d6.loss_dice: 0.6684, decode.d7.loss_cls: 0.1066, decode.d7.loss_mask: 0.2459, decode.d7.loss_dice: 0.6657, decode.d8.loss_cls: 0.1062, decode.d8.loss_mask: 0.2460, decode.d8.loss_dice: 0.6673, loss: 10.5700 +2022-05-10 03:18:14,662 - mmseg - INFO - Iter [18150/80000] lr: 1.110e-06, eta: 1 day, 9:31:59, time: 1.773, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1256, decode.loss_mask: 0.2441, decode.loss_dice: 0.6831, decode.d0.loss_cls: 0.3469, decode.d0.loss_mask: 0.2556, decode.d0.loss_dice: 0.7312, decode.d1.loss_cls: 0.1453, decode.d1.loss_mask: 0.2460, decode.d1.loss_dice: 0.6943, decode.d2.loss_cls: 0.1336, decode.d2.loss_mask: 0.2433, decode.d2.loss_dice: 0.6867, decode.d3.loss_cls: 0.1275, decode.d3.loss_mask: 0.2439, decode.d3.loss_dice: 0.6788, decode.d4.loss_cls: 0.1286, decode.d4.loss_mask: 0.2438, decode.d4.loss_dice: 0.6829, decode.d5.loss_cls: 0.1245, decode.d5.loss_mask: 0.2445, decode.d5.loss_dice: 0.6839, decode.d6.loss_cls: 0.1243, decode.d6.loss_mask: 0.2438, decode.d6.loss_dice: 0.6793, decode.d7.loss_cls: 0.1264, decode.d7.loss_mask: 0.2439, decode.d7.loss_dice: 0.6838, decode.d8.loss_cls: 0.1237, decode.d8.loss_mask: 0.2442, decode.d8.loss_dice: 0.6833, loss: 10.8468 +2022-05-10 03:19:43,907 - mmseg - INFO - Iter [18200/80000] lr: 1.109e-06, eta: 1 day, 9:29:52, time: 1.783, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1114, decode.loss_mask: 0.2470, decode.loss_dice: 0.6632, decode.d0.loss_cls: 0.3427, decode.d0.loss_mask: 0.2599, decode.d0.loss_dice: 0.7089, decode.d1.loss_cls: 0.1319, decode.d1.loss_mask: 0.2488, decode.d1.loss_dice: 0.6780, decode.d2.loss_cls: 0.1245, decode.d2.loss_mask: 0.2483, decode.d2.loss_dice: 0.6716, decode.d3.loss_cls: 0.1185, decode.d3.loss_mask: 0.2480, decode.d3.loss_dice: 0.6654, decode.d4.loss_cls: 0.1192, decode.d4.loss_mask: 0.2481, decode.d4.loss_dice: 0.6687, decode.d5.loss_cls: 0.1104, decode.d5.loss_mask: 0.2475, decode.d5.loss_dice: 0.6600, decode.d6.loss_cls: 0.1142, decode.d6.loss_mask: 0.2476, decode.d6.loss_dice: 0.6560, decode.d7.loss_cls: 0.1157, decode.d7.loss_mask: 0.2475, decode.d7.loss_dice: 0.6619, decode.d8.loss_cls: 0.1185, decode.d8.loss_mask: 0.2466, decode.d8.loss_dice: 0.6586, loss: 10.5885 +2022-05-10 03:21:14,952 - mmseg - INFO - Iter [18250/80000] lr: 1.108e-06, eta: 1 day, 9:27:53, time: 1.823, data_time: 0.071, memory: 64699, decode.loss_cls: 0.1249, decode.loss_mask: 0.2454, decode.loss_dice: 0.6811, decode.d0.loss_cls: 0.3391, decode.d0.loss_mask: 0.2574, decode.d0.loss_dice: 0.7370, decode.d1.loss_cls: 0.1522, decode.d1.loss_mask: 0.2473, decode.d1.loss_dice: 0.6953, decode.d2.loss_cls: 0.1391, decode.d2.loss_mask: 0.2454, decode.d2.loss_dice: 0.6905, decode.d3.loss_cls: 0.1331, decode.d3.loss_mask: 0.2454, decode.d3.loss_dice: 0.6842, decode.d4.loss_cls: 0.1272, decode.d4.loss_mask: 0.2450, decode.d4.loss_dice: 0.6789, decode.d5.loss_cls: 0.1213, decode.d5.loss_mask: 0.2447, decode.d5.loss_dice: 0.6851, decode.d6.loss_cls: 0.1203, decode.d6.loss_mask: 0.2448, decode.d6.loss_dice: 0.6802, decode.d7.loss_cls: 0.1233, decode.d7.loss_mask: 0.2454, decode.d7.loss_dice: 0.6827, decode.d8.loss_cls: 0.1273, decode.d8.loss_mask: 0.2451, decode.d8.loss_dice: 0.6857, loss: 10.8743 +2022-05-10 03:22:42,819 - mmseg - INFO - Iter [18300/80000] lr: 1.107e-06, eta: 1 day, 9:25:43, time: 1.758, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1151, decode.loss_mask: 0.2487, decode.loss_dice: 0.6848, decode.d0.loss_cls: 0.3304, decode.d0.loss_mask: 0.2654, decode.d0.loss_dice: 0.7336, decode.d1.loss_cls: 0.1408, decode.d1.loss_mask: 0.2515, decode.d1.loss_dice: 0.6972, decode.d2.loss_cls: 0.1216, decode.d2.loss_mask: 0.2520, decode.d2.loss_dice: 0.6909, decode.d3.loss_cls: 0.1264, decode.d3.loss_mask: 0.2493, decode.d3.loss_dice: 0.6856, decode.d4.loss_cls: 0.1253, decode.d4.loss_mask: 0.2498, decode.d4.loss_dice: 0.6797, decode.d5.loss_cls: 0.1294, decode.d5.loss_mask: 0.2481, decode.d5.loss_dice: 0.6795, decode.d6.loss_cls: 0.1240, decode.d6.loss_mask: 0.2483, decode.d6.loss_dice: 0.6787, decode.d7.loss_cls: 0.1212, decode.d7.loss_mask: 0.2495, decode.d7.loss_dice: 0.6803, decode.d8.loss_cls: 0.1207, decode.d8.loss_mask: 0.2493, decode.d8.loss_dice: 0.6777, loss: 10.8545 +2022-05-10 03:24:11,907 - mmseg - INFO - Iter [18350/80000] lr: 1.106e-06, eta: 1 day, 9:23:37, time: 1.782, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1211, decode.loss_mask: 0.2420, decode.loss_dice: 0.6776, decode.d0.loss_cls: 0.3352, decode.d0.loss_mask: 0.2553, decode.d0.loss_dice: 0.7267, decode.d1.loss_cls: 0.1541, decode.d1.loss_mask: 0.2456, decode.d1.loss_dice: 0.6879, decode.d2.loss_cls: 0.1310, decode.d2.loss_mask: 0.2441, decode.d2.loss_dice: 0.6783, decode.d3.loss_cls: 0.1326, decode.d3.loss_mask: 0.2426, decode.d3.loss_dice: 0.6774, decode.d4.loss_cls: 0.1330, decode.d4.loss_mask: 0.2424, decode.d4.loss_dice: 0.6776, decode.d5.loss_cls: 0.1252, decode.d5.loss_mask: 0.2435, decode.d5.loss_dice: 0.6746, decode.d6.loss_cls: 0.1284, decode.d6.loss_mask: 0.2429, decode.d6.loss_dice: 0.6684, decode.d7.loss_cls: 0.1286, decode.d7.loss_mask: 0.2419, decode.d7.loss_dice: 0.6703, decode.d8.loss_cls: 0.1222, decode.d8.loss_mask: 0.2419, decode.d8.loss_dice: 0.6778, loss: 10.7702 +2022-05-10 03:25:40,216 - mmseg - INFO - Iter [18400/80000] lr: 1.106e-06, eta: 1 day, 9:21:29, time: 1.766, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1184, decode.loss_mask: 0.2447, decode.loss_dice: 0.6714, decode.d0.loss_cls: 0.3485, decode.d0.loss_mask: 0.2567, decode.d0.loss_dice: 0.7192, decode.d1.loss_cls: 0.1480, decode.d1.loss_mask: 0.2473, decode.d1.loss_dice: 0.6883, decode.d2.loss_cls: 0.1343, decode.d2.loss_mask: 0.2443, decode.d2.loss_dice: 0.6784, decode.d3.loss_cls: 0.1229, decode.d3.loss_mask: 0.2439, decode.d3.loss_dice: 0.6752, decode.d4.loss_cls: 0.1182, decode.d4.loss_mask: 0.2432, decode.d4.loss_dice: 0.6680, decode.d5.loss_cls: 0.1262, decode.d5.loss_mask: 0.2441, decode.d5.loss_dice: 0.6714, decode.d6.loss_cls: 0.1204, decode.d6.loss_mask: 0.2446, decode.d6.loss_dice: 0.6683, decode.d7.loss_cls: 0.1191, decode.d7.loss_mask: 0.2442, decode.d7.loss_dice: 0.6704, decode.d8.loss_cls: 0.1245, decode.d8.loss_mask: 0.2444, decode.d8.loss_dice: 0.6699, loss: 10.7185 +2022-05-10 03:27:11,584 - mmseg - INFO - Iter [18450/80000] lr: 1.105e-06, eta: 1 day, 9:19:31, time: 1.827, data_time: 0.068, memory: 64699, decode.loss_cls: 0.1296, decode.loss_mask: 0.2521, decode.loss_dice: 0.6777, decode.d0.loss_cls: 0.3454, decode.d0.loss_mask: 0.2669, decode.d0.loss_dice: 0.7369, decode.d1.loss_cls: 0.1505, decode.d1.loss_mask: 0.2547, decode.d1.loss_dice: 0.7009, decode.d2.loss_cls: 0.1417, decode.d2.loss_mask: 0.2540, decode.d2.loss_dice: 0.6889, decode.d3.loss_cls: 0.1309, decode.d3.loss_mask: 0.2530, decode.d3.loss_dice: 0.6807, decode.d4.loss_cls: 0.1406, decode.d4.loss_mask: 0.2524, decode.d4.loss_dice: 0.6809, decode.d5.loss_cls: 0.1331, decode.d5.loss_mask: 0.2531, decode.d5.loss_dice: 0.6805, decode.d6.loss_cls: 0.1280, decode.d6.loss_mask: 0.2520, decode.d6.loss_dice: 0.6763, decode.d7.loss_cls: 0.1315, decode.d7.loss_mask: 0.2522, decode.d7.loss_dice: 0.6837, decode.d8.loss_cls: 0.1272, decode.d8.loss_mask: 0.2516, decode.d8.loss_dice: 0.6830, loss: 10.9901 +2022-05-10 03:28:41,003 - mmseg - INFO - Iter [18500/80000] lr: 1.104e-06, eta: 1 day, 9:17:27, time: 1.788, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1288, decode.loss_mask: 0.2482, decode.loss_dice: 0.6720, decode.d0.loss_cls: 0.3388, decode.d0.loss_mask: 0.2624, decode.d0.loss_dice: 0.7233, decode.d1.loss_cls: 0.1478, decode.d1.loss_mask: 0.2509, decode.d1.loss_dice: 0.6906, decode.d2.loss_cls: 0.1416, decode.d2.loss_mask: 0.2503, decode.d2.loss_dice: 0.6836, decode.d3.loss_cls: 0.1354, decode.d3.loss_mask: 0.2489, decode.d3.loss_dice: 0.6783, decode.d4.loss_cls: 0.1336, decode.d4.loss_mask: 0.2493, decode.d4.loss_dice: 0.6771, decode.d5.loss_cls: 0.1287, decode.d5.loss_mask: 0.2490, decode.d5.loss_dice: 0.6774, decode.d6.loss_cls: 0.1272, decode.d6.loss_mask: 0.2486, decode.d6.loss_dice: 0.6729, decode.d7.loss_cls: 0.1310, decode.d7.loss_mask: 0.2485, decode.d7.loss_dice: 0.6718, decode.d8.loss_cls: 0.1282, decode.d8.loss_mask: 0.2484, decode.d8.loss_dice: 0.6758, loss: 10.8683 +2022-05-10 03:30:10,243 - mmseg - INFO - Iter [18550/80000] lr: 1.103e-06, eta: 1 day, 9:15:22, time: 1.785, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1184, decode.loss_mask: 0.2413, decode.loss_dice: 0.6669, decode.d0.loss_cls: 0.3408, decode.d0.loss_mask: 0.2538, decode.d0.loss_dice: 0.7168, decode.d1.loss_cls: 0.1438, decode.d1.loss_mask: 0.2447, decode.d1.loss_dice: 0.6798, decode.d2.loss_cls: 0.1343, decode.d2.loss_mask: 0.2437, decode.d2.loss_dice: 0.6787, decode.d3.loss_cls: 0.1214, decode.d3.loss_mask: 0.2424, decode.d3.loss_dice: 0.6719, decode.d4.loss_cls: 0.1146, decode.d4.loss_mask: 0.2419, decode.d4.loss_dice: 0.6714, decode.d5.loss_cls: 0.1227, decode.d5.loss_mask: 0.2422, decode.d5.loss_dice: 0.6703, decode.d6.loss_cls: 0.1184, decode.d6.loss_mask: 0.2418, decode.d6.loss_dice: 0.6700, decode.d7.loss_cls: 0.1248, decode.d7.loss_mask: 0.2413, decode.d7.loss_dice: 0.6664, decode.d8.loss_cls: 0.1159, decode.d8.loss_mask: 0.2409, decode.d8.loss_dice: 0.6742, loss: 10.6555 +2022-05-10 03:31:39,082 - mmseg - INFO - Iter [18600/80000] lr: 1.102e-06, eta: 1 day, 9:13:16, time: 1.777, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1134, decode.loss_mask: 0.2461, decode.loss_dice: 0.6741, decode.d0.loss_cls: 0.3287, decode.d0.loss_mask: 0.2592, decode.d0.loss_dice: 0.7212, decode.d1.loss_cls: 0.1380, decode.d1.loss_mask: 0.2488, decode.d1.loss_dice: 0.6890, decode.d2.loss_cls: 0.1259, decode.d2.loss_mask: 0.2466, decode.d2.loss_dice: 0.6822, decode.d3.loss_cls: 0.1218, decode.d3.loss_mask: 0.2470, decode.d3.loss_dice: 0.6728, decode.d4.loss_cls: 0.1245, decode.d4.loss_mask: 0.2470, decode.d4.loss_dice: 0.6741, decode.d5.loss_cls: 0.1138, decode.d5.loss_mask: 0.2470, decode.d5.loss_dice: 0.6771, decode.d6.loss_cls: 0.1127, decode.d6.loss_mask: 0.2462, decode.d6.loss_dice: 0.6740, decode.d7.loss_cls: 0.1152, decode.d7.loss_mask: 0.2460, decode.d7.loss_dice: 0.6778, decode.d8.loss_cls: 0.1115, decode.d8.loss_mask: 0.2460, decode.d8.loss_dice: 0.6772, loss: 10.7049 +2022-05-10 03:33:10,591 - mmseg - INFO - Iter [18650/80000] lr: 1.101e-06, eta: 1 day, 9:11:20, time: 1.830, data_time: 0.065, memory: 64699, decode.loss_cls: 0.1111, decode.loss_mask: 0.2441, decode.loss_dice: 0.6629, decode.d0.loss_cls: 0.3431, decode.d0.loss_mask: 0.2575, decode.d0.loss_dice: 0.7171, decode.d1.loss_cls: 0.1415, decode.d1.loss_mask: 0.2455, decode.d1.loss_dice: 0.6811, decode.d2.loss_cls: 0.1258, decode.d2.loss_mask: 0.2440, decode.d2.loss_dice: 0.6734, decode.d3.loss_cls: 0.1243, decode.d3.loss_mask: 0.2437, decode.d3.loss_dice: 0.6683, decode.d4.loss_cls: 0.1210, decode.d4.loss_mask: 0.2427, decode.d4.loss_dice: 0.6715, decode.d5.loss_cls: 0.1171, decode.d5.loss_mask: 0.2430, decode.d5.loss_dice: 0.6653, decode.d6.loss_cls: 0.1164, decode.d6.loss_mask: 0.2434, decode.d6.loss_dice: 0.6681, decode.d7.loss_cls: 0.1165, decode.d7.loss_mask: 0.2434, decode.d7.loss_dice: 0.6687, decode.d8.loss_cls: 0.1111, decode.d8.loss_mask: 0.2433, decode.d8.loss_dice: 0.6656, loss: 10.6205 +2022-05-10 03:34:40,271 - mmseg - INFO - Iter [18700/80000] lr: 1.100e-06, eta: 1 day, 9:09:17, time: 1.794, data_time: 0.022, memory: 64699, decode.loss_cls: 0.1239, decode.loss_mask: 0.2399, decode.loss_dice: 0.6801, decode.d0.loss_cls: 0.3382, decode.d0.loss_mask: 0.2502, decode.d0.loss_dice: 0.7292, decode.d1.loss_cls: 0.1439, decode.d1.loss_mask: 0.2426, decode.d1.loss_dice: 0.6966, decode.d2.loss_cls: 0.1365, decode.d2.loss_mask: 0.2410, decode.d2.loss_dice: 0.6926, decode.d3.loss_cls: 0.1251, decode.d3.loss_mask: 0.2402, decode.d3.loss_dice: 0.6840, decode.d4.loss_cls: 0.1278, decode.d4.loss_mask: 0.2407, decode.d4.loss_dice: 0.6803, decode.d5.loss_cls: 0.1267, decode.d5.loss_mask: 0.2401, decode.d5.loss_dice: 0.6818, decode.d6.loss_cls: 0.1260, decode.d6.loss_mask: 0.2400, decode.d6.loss_dice: 0.6831, decode.d7.loss_cls: 0.1251, decode.d7.loss_mask: 0.2400, decode.d7.loss_dice: 0.6808, decode.d8.loss_cls: 0.1231, decode.d8.loss_mask: 0.2403, decode.d8.loss_dice: 0.6812, loss: 10.8010 +2022-05-10 03:36:09,530 - mmseg - INFO - Iter [18750/80000] lr: 1.099e-06, eta: 1 day, 9:07:13, time: 1.785, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1034, decode.loss_mask: 0.2409, decode.loss_dice: 0.6676, decode.d0.loss_cls: 0.3424, decode.d0.loss_mask: 0.2527, decode.d0.loss_dice: 0.7161, decode.d1.loss_cls: 0.1374, decode.d1.loss_mask: 0.2432, decode.d1.loss_dice: 0.6859, decode.d2.loss_cls: 0.1179, decode.d2.loss_mask: 0.2420, decode.d2.loss_dice: 0.6777, decode.d3.loss_cls: 0.1115, decode.d3.loss_mask: 0.2416, decode.d3.loss_dice: 0.6718, decode.d4.loss_cls: 0.1070, decode.d4.loss_mask: 0.2419, decode.d4.loss_dice: 0.6693, decode.d5.loss_cls: 0.1079, decode.d5.loss_mask: 0.2417, decode.d5.loss_dice: 0.6740, decode.d6.loss_cls: 0.1096, decode.d6.loss_mask: 0.2414, decode.d6.loss_dice: 0.6677, decode.d7.loss_cls: 0.1056, decode.d7.loss_mask: 0.2406, decode.d7.loss_dice: 0.6721, decode.d8.loss_cls: 0.1029, decode.d8.loss_mask: 0.2403, decode.d8.loss_dice: 0.6675, loss: 10.5414 +2022-05-10 03:37:40,610 - mmseg - INFO - Iter [18800/80000] lr: 1.098e-06, eta: 1 day, 9:05:15, time: 1.822, data_time: 0.068, memory: 64699, decode.loss_cls: 0.1121, decode.loss_mask: 0.2378, decode.loss_dice: 0.6642, decode.d0.loss_cls: 0.3432, decode.d0.loss_mask: 0.2485, decode.d0.loss_dice: 0.7185, decode.d1.loss_cls: 0.1409, decode.d1.loss_mask: 0.2393, decode.d1.loss_dice: 0.6794, decode.d2.loss_cls: 0.1223, decode.d2.loss_mask: 0.2384, decode.d2.loss_dice: 0.6731, decode.d3.loss_cls: 0.1160, decode.d3.loss_mask: 0.2377, decode.d3.loss_dice: 0.6662, decode.d4.loss_cls: 0.1210, decode.d4.loss_mask: 0.2380, decode.d4.loss_dice: 0.6645, decode.d5.loss_cls: 0.1226, decode.d5.loss_mask: 0.2384, decode.d5.loss_dice: 0.6675, decode.d6.loss_cls: 0.1172, decode.d6.loss_mask: 0.2370, decode.d6.loss_dice: 0.6588, decode.d7.loss_cls: 0.1143, decode.d7.loss_mask: 0.2378, decode.d7.loss_dice: 0.6629, decode.d8.loss_cls: 0.1158, decode.d8.loss_mask: 0.2380, decode.d8.loss_dice: 0.6639, loss: 10.5354 +2022-05-10 03:39:09,584 - mmseg - INFO - Iter [18850/80000] lr: 1.098e-06, eta: 1 day, 9:03:11, time: 1.779, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1033, decode.loss_mask: 0.2390, decode.loss_dice: 0.6715, decode.d0.loss_cls: 0.3250, decode.d0.loss_mask: 0.2510, decode.d0.loss_dice: 0.7120, decode.d1.loss_cls: 0.1203, decode.d1.loss_mask: 0.2411, decode.d1.loss_dice: 0.6773, decode.d2.loss_cls: 0.1227, decode.d2.loss_mask: 0.2396, decode.d2.loss_dice: 0.6775, decode.d3.loss_cls: 0.1019, decode.d3.loss_mask: 0.2399, decode.d3.loss_dice: 0.6680, decode.d4.loss_cls: 0.1085, decode.d4.loss_mask: 0.2393, decode.d4.loss_dice: 0.6669, decode.d5.loss_cls: 0.1069, decode.d5.loss_mask: 0.2388, decode.d5.loss_dice: 0.6724, decode.d6.loss_cls: 0.1042, decode.d6.loss_mask: 0.2395, decode.d6.loss_dice: 0.6653, decode.d7.loss_cls: 0.1047, decode.d7.loss_mask: 0.2388, decode.d7.loss_dice: 0.6700, decode.d8.loss_cls: 0.1092, decode.d8.loss_mask: 0.2387, decode.d8.loss_dice: 0.6682, loss: 10.4617 +2022-05-10 03:40:39,090 - mmseg - INFO - Iter [18900/80000] lr: 1.097e-06, eta: 1 day, 9:01:09, time: 1.790, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1162, decode.loss_mask: 0.2443, decode.loss_dice: 0.6669, decode.d0.loss_cls: 0.3536, decode.d0.loss_mask: 0.2558, decode.d0.loss_dice: 0.7146, decode.d1.loss_cls: 0.1326, decode.d1.loss_mask: 0.2465, decode.d1.loss_dice: 0.6911, decode.d2.loss_cls: 0.1281, decode.d2.loss_mask: 0.2439, decode.d2.loss_dice: 0.6764, decode.d3.loss_cls: 0.1247, decode.d3.loss_mask: 0.2433, decode.d3.loss_dice: 0.6699, decode.d4.loss_cls: 0.1175, decode.d4.loss_mask: 0.2433, decode.d4.loss_dice: 0.6688, decode.d5.loss_cls: 0.1208, decode.d5.loss_mask: 0.2436, decode.d5.loss_dice: 0.6699, decode.d6.loss_cls: 0.1214, decode.d6.loss_mask: 0.2452, decode.d6.loss_dice: 0.6701, decode.d7.loss_cls: 0.1186, decode.d7.loss_mask: 0.2443, decode.d7.loss_dice: 0.6687, decode.d8.loss_cls: 0.1141, decode.d8.loss_mask: 0.2446, decode.d8.loss_dice: 0.6710, loss: 10.6702 +2022-05-10 03:42:07,217 - mmseg - INFO - Iter [18950/80000] lr: 1.096e-06, eta: 1 day, 8:59:02, time: 1.762, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1307, decode.loss_mask: 0.2400, decode.loss_dice: 0.6681, decode.d0.loss_cls: 0.3652, decode.d0.loss_mask: 0.2533, decode.d0.loss_dice: 0.7182, decode.d1.loss_cls: 0.1633, decode.d1.loss_mask: 0.2428, decode.d1.loss_dice: 0.6857, decode.d2.loss_cls: 0.1421, decode.d2.loss_mask: 0.2420, decode.d2.loss_dice: 0.6791, decode.d3.loss_cls: 0.1353, decode.d3.loss_mask: 0.2413, decode.d3.loss_dice: 0.6739, decode.d4.loss_cls: 0.1339, decode.d4.loss_mask: 0.2410, decode.d4.loss_dice: 0.6679, decode.d5.loss_cls: 0.1382, decode.d5.loss_mask: 0.2409, decode.d5.loss_dice: 0.6741, decode.d6.loss_cls: 0.1293, decode.d6.loss_mask: 0.2401, decode.d6.loss_dice: 0.6730, decode.d7.loss_cls: 0.1341, decode.d7.loss_mask: 0.2401, decode.d7.loss_dice: 0.6734, decode.d8.loss_cls: 0.1400, decode.d8.loss_mask: 0.2408, decode.d8.loss_dice: 0.6711, loss: 10.8188 +2022-05-10 03:43:37,746 - mmseg - INFO - Saving checkpoint at 19000 iterations +2022-05-10 03:44:07,998 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 03:44:08,006 - mmseg - INFO - Iter [19000/80000] lr: 1.095e-06, eta: 1 day, 8:58:40, time: 2.413, data_time: 0.066, memory: 64699, decode.loss_cls: 0.1148, decode.loss_mask: 0.2475, decode.loss_dice: 0.6773, decode.d0.loss_cls: 0.3390, decode.d0.loss_mask: 0.2595, decode.d0.loss_dice: 0.7277, decode.d1.loss_cls: 0.1448, decode.d1.loss_mask: 0.2490, decode.d1.loss_dice: 0.6897, decode.d2.loss_cls: 0.1303, decode.d2.loss_mask: 0.2476, decode.d2.loss_dice: 0.6807, decode.d3.loss_cls: 0.1200, decode.d3.loss_mask: 0.2478, decode.d3.loss_dice: 0.6760, decode.d4.loss_cls: 0.1211, decode.d4.loss_mask: 0.2475, decode.d4.loss_dice: 0.6747, decode.d5.loss_cls: 0.1186, decode.d5.loss_mask: 0.2478, decode.d5.loss_dice: 0.6796, decode.d6.loss_cls: 0.1162, decode.d6.loss_mask: 0.2465, decode.d6.loss_dice: 0.6754, decode.d7.loss_cls: 0.1199, decode.d7.loss_mask: 0.2472, decode.d7.loss_dice: 0.6777, decode.d8.loss_cls: 0.1234, decode.d8.loss_mask: 0.2466, decode.d8.loss_dice: 0.6744, loss: 10.7683 +2022-05-10 03:46:04,058 - mmseg - INFO - per class results: +2022-05-10 03:46:04,064 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.49 | 98.94 | +| sidewalk | 88.17 | 95.09 | +| building | 94.36 | 96.71 | +| wall | 68.15 | 77.93 | +| fence | 77.79 | 87.56 | +| pole | 71.38 | 85.45 | +| traffic light | 77.45 | 87.88 | +| traffic sign | 83.99 | 91.35 | +| vegetation | 93.3 | 96.89 | +| terrain | 66.82 | 80.55 | +| sky | 95.99 | 98.23 | +| person | 86.99 | 94.39 | +| rider | 75.0 | 86.65 | +| car | 96.34 | 98.25 | +| truck | 90.17 | 93.61 | +| bus | 93.83 | 96.71 | +| train | 88.42 | 90.89 | +| motorcycle | 77.74 | 87.42 | +| bicycle | 82.84 | 91.93 | ++---------------+-------+-------+ +2022-05-10 03:46:04,064 - mmseg - INFO - Summary: +2022-05-10 03:46:04,064 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 96.97 | 84.59 | 91.39 | ++-------+-------+-------+ +2022-05-10 03:46:04,067 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 03:46:04,067 - mmseg - INFO - Iter(val) [32] aAcc: 0.9697, mIoU: 0.8459, mAcc: 0.9139, IoU.road: 0.9849, IoU.sidewalk: 0.8817, IoU.building: 0.9436, IoU.wall: 0.6815, IoU.fence: 0.7779, IoU.pole: 0.7138, IoU.traffic light: 0.7745, IoU.traffic sign: 0.8399, IoU.vegetation: 0.9330, IoU.terrain: 0.6682, IoU.sky: 0.9599, IoU.person: 0.8699, IoU.rider: 0.7500, IoU.car: 0.9634, IoU.truck: 0.9017, IoU.bus: 0.9383, IoU.train: 0.8842, IoU.motorcycle: 0.7774, IoU.bicycle: 0.8284, Acc.road: 0.9894, Acc.sidewalk: 0.9509, Acc.building: 0.9671, Acc.wall: 0.7793, Acc.fence: 0.8756, Acc.pole: 0.8545, Acc.traffic light: 0.8788, Acc.traffic sign: 0.9135, Acc.vegetation: 0.9689, Acc.terrain: 0.8055, Acc.sky: 0.9823, Acc.person: 0.9439, Acc.rider: 0.8665, Acc.car: 0.9825, Acc.truck: 0.9361, Acc.bus: 0.9671, Acc.train: 0.9089, Acc.motorcycle: 0.8742, Acc.bicycle: 0.9193 +2022-05-10 03:47:33,291 - mmseg - INFO - Iter [19050/80000] lr: 1.094e-06, eta: 1 day, 9:02:48, time: 4.108, data_time: 2.343, memory: 64699, decode.loss_cls: 0.1217, decode.loss_mask: 0.2418, decode.loss_dice: 0.6566, decode.d0.loss_cls: 0.3534, decode.d0.loss_mask: 0.2551, decode.d0.loss_dice: 0.7052, decode.d1.loss_cls: 0.1411, decode.d1.loss_mask: 0.2463, decode.d1.loss_dice: 0.6809, decode.d2.loss_cls: 0.1332, decode.d2.loss_mask: 0.2434, decode.d2.loss_dice: 0.6688, decode.d3.loss_cls: 0.1284, decode.d3.loss_mask: 0.2417, decode.d3.loss_dice: 0.6664, decode.d4.loss_cls: 0.1247, decode.d4.loss_mask: 0.2430, decode.d4.loss_dice: 0.6621, decode.d5.loss_cls: 0.1283, decode.d5.loss_mask: 0.2420, decode.d5.loss_dice: 0.6653, decode.d6.loss_cls: 0.1278, decode.d6.loss_mask: 0.2421, decode.d6.loss_dice: 0.6607, decode.d7.loss_cls: 0.1268, decode.d7.loss_mask: 0.2431, decode.d7.loss_dice: 0.6579, decode.d8.loss_cls: 0.1279, decode.d8.loss_mask: 0.2431, decode.d8.loss_dice: 0.6621, loss: 10.6409 +2022-05-10 03:49:00,875 - mmseg - INFO - Iter [19100/80000] lr: 1.093e-06, eta: 1 day, 9:00:39, time: 1.752, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1199, decode.loss_mask: 0.2418, decode.loss_dice: 0.6615, decode.d0.loss_cls: 0.3439, decode.d0.loss_mask: 0.2540, decode.d0.loss_dice: 0.7077, decode.d1.loss_cls: 0.1330, decode.d1.loss_mask: 0.2434, decode.d1.loss_dice: 0.6808, decode.d2.loss_cls: 0.1261, decode.d2.loss_mask: 0.2418, decode.d2.loss_dice: 0.6667, decode.d3.loss_cls: 0.1214, decode.d3.loss_mask: 0.2430, decode.d3.loss_dice: 0.6630, decode.d4.loss_cls: 0.1284, decode.d4.loss_mask: 0.2428, decode.d4.loss_dice: 0.6602, decode.d5.loss_cls: 0.1199, decode.d5.loss_mask: 0.2428, decode.d5.loss_dice: 0.6587, decode.d6.loss_cls: 0.1264, decode.d6.loss_mask: 0.2421, decode.d6.loss_dice: 0.6598, decode.d7.loss_cls: 0.1179, decode.d7.loss_mask: 0.2418, decode.d7.loss_dice: 0.6628, decode.d8.loss_cls: 0.1228, decode.d8.loss_mask: 0.2418, decode.d8.loss_dice: 0.6604, loss: 10.5764 +2022-05-10 03:50:29,155 - mmseg - INFO - Iter [19150/80000] lr: 1.092e-06, eta: 1 day, 8:58:32, time: 1.766, data_time: 0.021, memory: 64699, decode.loss_cls: 0.1282, decode.loss_mask: 0.2453, decode.loss_dice: 0.6736, decode.d0.loss_cls: 0.3460, decode.d0.loss_mask: 0.2574, decode.d0.loss_dice: 0.7164, decode.d1.loss_cls: 0.1523, decode.d1.loss_mask: 0.2479, decode.d1.loss_dice: 0.6895, decode.d2.loss_cls: 0.1406, decode.d2.loss_mask: 0.2464, decode.d2.loss_dice: 0.6783, decode.d3.loss_cls: 0.1350, decode.d3.loss_mask: 0.2450, decode.d3.loss_dice: 0.6757, decode.d4.loss_cls: 0.1279, decode.d4.loss_mask: 0.2460, decode.d4.loss_dice: 0.6723, decode.d5.loss_cls: 0.1269, decode.d5.loss_mask: 0.2462, decode.d5.loss_dice: 0.6724, decode.d6.loss_cls: 0.1230, decode.d6.loss_mask: 0.2453, decode.d6.loss_dice: 0.6748, decode.d7.loss_cls: 0.1216, decode.d7.loss_mask: 0.2454, decode.d7.loss_dice: 0.6761, decode.d8.loss_cls: 0.1240, decode.d8.loss_mask: 0.2456, decode.d8.loss_dice: 0.6781, loss: 10.8033 +2022-05-10 03:52:00,522 - mmseg - INFO - Iter [19200/80000] lr: 1.091e-06, eta: 1 day, 8:56:35, time: 1.827, data_time: 0.065, memory: 64699, decode.loss_cls: 0.1011, decode.loss_mask: 0.2434, decode.loss_dice: 0.6593, decode.d0.loss_cls: 0.3514, decode.d0.loss_mask: 0.2552, decode.d0.loss_dice: 0.7040, decode.d1.loss_cls: 0.1293, decode.d1.loss_mask: 0.2460, decode.d1.loss_dice: 0.6719, decode.d2.loss_cls: 0.1178, decode.d2.loss_mask: 0.2447, decode.d2.loss_dice: 0.6660, decode.d3.loss_cls: 0.1094, decode.d3.loss_mask: 0.2439, decode.d3.loss_dice: 0.6601, decode.d4.loss_cls: 0.1085, decode.d4.loss_mask: 0.2431, decode.d4.loss_dice: 0.6605, decode.d5.loss_cls: 0.1073, decode.d5.loss_mask: 0.2443, decode.d5.loss_dice: 0.6608, decode.d6.loss_cls: 0.1104, decode.d6.loss_mask: 0.2435, decode.d6.loss_dice: 0.6590, decode.d7.loss_cls: 0.1066, decode.d7.loss_mask: 0.2433, decode.d7.loss_dice: 0.6550, decode.d8.loss_cls: 0.1079, decode.d8.loss_mask: 0.2428, decode.d8.loss_dice: 0.6621, loss: 10.4584 +2022-05-10 03:53:28,044 - mmseg - INFO - Iter [19250/80000] lr: 1.090e-06, eta: 1 day, 8:54:25, time: 1.749, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1075, decode.loss_mask: 0.2444, decode.loss_dice: 0.6797, decode.d0.loss_cls: 0.3223, decode.d0.loss_mask: 0.2544, decode.d0.loss_dice: 0.7132, decode.d1.loss_cls: 0.1300, decode.d1.loss_mask: 0.2476, decode.d1.loss_dice: 0.6870, decode.d2.loss_cls: 0.1095, decode.d2.loss_mask: 0.2454, decode.d2.loss_dice: 0.6813, decode.d3.loss_cls: 0.1103, decode.d3.loss_mask: 0.2443, decode.d3.loss_dice: 0.6777, decode.d4.loss_cls: 0.1010, decode.d4.loss_mask: 0.2449, decode.d4.loss_dice: 0.6805, decode.d5.loss_cls: 0.1074, decode.d5.loss_mask: 0.2443, decode.d5.loss_dice: 0.6815, decode.d6.loss_cls: 0.0963, decode.d6.loss_mask: 0.2455, decode.d6.loss_dice: 0.6728, decode.d7.loss_cls: 0.0998, decode.d7.loss_mask: 0.2448, decode.d7.loss_dice: 0.6797, decode.d8.loss_cls: 0.1016, decode.d8.loss_mask: 0.2450, decode.d8.loss_dice: 0.6803, loss: 10.5800 +2022-05-10 03:54:57,049 - mmseg - INFO - Iter [19300/80000] lr: 1.089e-06, eta: 1 day, 8:52:21, time: 1.781, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1188, decode.loss_mask: 0.2418, decode.loss_dice: 0.6630, decode.d0.loss_cls: 0.3452, decode.d0.loss_mask: 0.2539, decode.d0.loss_dice: 0.7143, decode.d1.loss_cls: 0.1489, decode.d1.loss_mask: 0.2442, decode.d1.loss_dice: 0.6779, decode.d2.loss_cls: 0.1356, decode.d2.loss_mask: 0.2422, decode.d2.loss_dice: 0.6667, decode.d3.loss_cls: 0.1254, decode.d3.loss_mask: 0.2425, decode.d3.loss_dice: 0.6669, decode.d4.loss_cls: 0.1221, decode.d4.loss_mask: 0.2429, decode.d4.loss_dice: 0.6650, decode.d5.loss_cls: 0.1280, decode.d5.loss_mask: 0.2415, decode.d5.loss_dice: 0.6665, decode.d6.loss_cls: 0.1256, decode.d6.loss_mask: 0.2414, decode.d6.loss_dice: 0.6641, decode.d7.loss_cls: 0.1275, decode.d7.loss_mask: 0.2418, decode.d7.loss_dice: 0.6599, decode.d8.loss_cls: 0.1170, decode.d8.loss_mask: 0.2413, decode.d8.loss_dice: 0.6592, loss: 10.6311 +2022-05-10 03:56:27,220 - mmseg - INFO - Iter [19350/80000] lr: 1.089e-06, eta: 1 day, 8:50:21, time: 1.803, data_time: 0.067, memory: 64699, decode.loss_cls: 0.1091, decode.loss_mask: 0.2390, decode.loss_dice: 0.6735, decode.d0.loss_cls: 0.3355, decode.d0.loss_mask: 0.2517, decode.d0.loss_dice: 0.7215, decode.d1.loss_cls: 0.1358, decode.d1.loss_mask: 0.2418, decode.d1.loss_dice: 0.6902, decode.d2.loss_cls: 0.1183, decode.d2.loss_mask: 0.2401, decode.d2.loss_dice: 0.6846, decode.d3.loss_cls: 0.1159, decode.d3.loss_mask: 0.2402, decode.d3.loss_dice: 0.6762, decode.d4.loss_cls: 0.1148, decode.d4.loss_mask: 0.2403, decode.d4.loss_dice: 0.6740, decode.d5.loss_cls: 0.1185, decode.d5.loss_mask: 0.2394, decode.d5.loss_dice: 0.6794, decode.d6.loss_cls: 0.1129, decode.d6.loss_mask: 0.2399, decode.d6.loss_dice: 0.6749, decode.d7.loss_cls: 0.1101, decode.d7.loss_mask: 0.2396, decode.d7.loss_dice: 0.6761, decode.d8.loss_cls: 0.1102, decode.d8.loss_mask: 0.2394, decode.d8.loss_dice: 0.6770, loss: 10.6197 +2022-05-10 03:57:56,474 - mmseg - INFO - Iter [19400/80000] lr: 1.088e-06, eta: 1 day, 8:48:18, time: 1.785, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1309, decode.loss_mask: 0.2472, decode.loss_dice: 0.6611, decode.d0.loss_cls: 0.3561, decode.d0.loss_mask: 0.2600, decode.d0.loss_dice: 0.7127, decode.d1.loss_cls: 0.1536, decode.d1.loss_mask: 0.2491, decode.d1.loss_dice: 0.6759, decode.d2.loss_cls: 0.1395, decode.d2.loss_mask: 0.2477, decode.d2.loss_dice: 0.6724, decode.d3.loss_cls: 0.1314, decode.d3.loss_mask: 0.2474, decode.d3.loss_dice: 0.6658, decode.d4.loss_cls: 0.1341, decode.d4.loss_mask: 0.2475, decode.d4.loss_dice: 0.6636, decode.d5.loss_cls: 0.1292, decode.d5.loss_mask: 0.2474, decode.d5.loss_dice: 0.6662, decode.d6.loss_cls: 0.1289, decode.d6.loss_mask: 0.2482, decode.d6.loss_dice: 0.6652, decode.d7.loss_cls: 0.1304, decode.d7.loss_mask: 0.2475, decode.d7.loss_dice: 0.6671, decode.d8.loss_cls: 0.1248, decode.d8.loss_mask: 0.2479, decode.d8.loss_dice: 0.6627, loss: 10.7614 +2022-05-10 03:59:25,590 - mmseg - INFO - Iter [19450/80000] lr: 1.087e-06, eta: 1 day, 8:46:14, time: 1.782, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1152, decode.loss_mask: 0.2392, decode.loss_dice: 0.6653, decode.d0.loss_cls: 0.3502, decode.d0.loss_mask: 0.2546, decode.d0.loss_dice: 0.7142, decode.d1.loss_cls: 0.1471, decode.d1.loss_mask: 0.2440, decode.d1.loss_dice: 0.6808, decode.d2.loss_cls: 0.1386, decode.d2.loss_mask: 0.2414, decode.d2.loss_dice: 0.6715, decode.d3.loss_cls: 0.1250, decode.d3.loss_mask: 0.2399, decode.d3.loss_dice: 0.6666, decode.d4.loss_cls: 0.1260, decode.d4.loss_mask: 0.2406, decode.d4.loss_dice: 0.6650, decode.d5.loss_cls: 0.1246, decode.d5.loss_mask: 0.2411, decode.d5.loss_dice: 0.6648, decode.d6.loss_cls: 0.1233, decode.d6.loss_mask: 0.2391, decode.d6.loss_dice: 0.6693, decode.d7.loss_cls: 0.1247, decode.d7.loss_mask: 0.2398, decode.d7.loss_dice: 0.6674, decode.d8.loss_cls: 0.1142, decode.d8.loss_mask: 0.2396, decode.d8.loss_dice: 0.6645, loss: 10.6374 +2022-05-10 04:00:53,037 - mmseg - INFO - Iter [19500/80000] lr: 1.086e-06, eta: 1 day, 8:44:06, time: 1.749, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1188, decode.loss_mask: 0.2453, decode.loss_dice: 0.6639, decode.d0.loss_cls: 0.3423, decode.d0.loss_mask: 0.2545, decode.d0.loss_dice: 0.7080, decode.d1.loss_cls: 0.1380, decode.d1.loss_mask: 0.2456, decode.d1.loss_dice: 0.6757, decode.d2.loss_cls: 0.1266, decode.d2.loss_mask: 0.2457, decode.d2.loss_dice: 0.6653, decode.d3.loss_cls: 0.1170, decode.d3.loss_mask: 0.2443, decode.d3.loss_dice: 0.6619, decode.d4.loss_cls: 0.1196, decode.d4.loss_mask: 0.2453, decode.d4.loss_dice: 0.6607, decode.d5.loss_cls: 0.1202, decode.d5.loss_mask: 0.2465, decode.d5.loss_dice: 0.6668, decode.d6.loss_cls: 0.1145, decode.d6.loss_mask: 0.2443, decode.d6.loss_dice: 0.6646, decode.d7.loss_cls: 0.1204, decode.d7.loss_mask: 0.2454, decode.d7.loss_dice: 0.6625, decode.d8.loss_cls: 0.1201, decode.d8.loss_mask: 0.2454, decode.d8.loss_dice: 0.6628, loss: 10.5919 +2022-05-10 04:02:24,763 - mmseg - INFO - Iter [19550/80000] lr: 1.085e-06, eta: 1 day, 8:42:11, time: 1.834, data_time: 0.066, memory: 64699, decode.loss_cls: 0.1263, decode.loss_mask: 0.2440, decode.loss_dice: 0.6673, decode.d0.loss_cls: 0.3523, decode.d0.loss_mask: 0.2618, decode.d0.loss_dice: 0.7111, decode.d1.loss_cls: 0.1452, decode.d1.loss_mask: 0.2480, decode.d1.loss_dice: 0.6824, decode.d2.loss_cls: 0.1312, decode.d2.loss_mask: 0.2453, decode.d2.loss_dice: 0.6752, decode.d3.loss_cls: 0.1230, decode.d3.loss_mask: 0.2450, decode.d3.loss_dice: 0.6679, decode.d4.loss_cls: 0.1251, decode.d4.loss_mask: 0.2452, decode.d4.loss_dice: 0.6663, decode.d5.loss_cls: 0.1241, decode.d5.loss_mask: 0.2451, decode.d5.loss_dice: 0.6652, decode.d6.loss_cls: 0.1213, decode.d6.loss_mask: 0.2447, decode.d6.loss_dice: 0.6652, decode.d7.loss_cls: 0.1192, decode.d7.loss_mask: 0.2454, decode.d7.loss_dice: 0.6624, decode.d8.loss_cls: 0.1235, decode.d8.loss_mask: 0.2449, decode.d8.loss_dice: 0.6667, loss: 10.6903 +2022-05-10 04:03:54,047 - mmseg - INFO - Iter [19600/80000] lr: 1.084e-06, eta: 1 day, 8:40:09, time: 1.786, data_time: 0.022, memory: 64699, decode.loss_cls: 0.1171, decode.loss_mask: 0.2409, decode.loss_dice: 0.6698, decode.d0.loss_cls: 0.3506, decode.d0.loss_mask: 0.2517, decode.d0.loss_dice: 0.7220, decode.d1.loss_cls: 0.1356, decode.d1.loss_mask: 0.2423, decode.d1.loss_dice: 0.6908, decode.d2.loss_cls: 0.1281, decode.d2.loss_mask: 0.2425, decode.d2.loss_dice: 0.6783, decode.d3.loss_cls: 0.1243, decode.d3.loss_mask: 0.2407, decode.d3.loss_dice: 0.6736, decode.d4.loss_cls: 0.1189, decode.d4.loss_mask: 0.2411, decode.d4.loss_dice: 0.6729, decode.d5.loss_cls: 0.1189, decode.d5.loss_mask: 0.2411, decode.d5.loss_dice: 0.6700, decode.d6.loss_cls: 0.1249, decode.d6.loss_mask: 0.2405, decode.d6.loss_dice: 0.6726, decode.d7.loss_cls: 0.1110, decode.d7.loss_mask: 0.2409, decode.d7.loss_dice: 0.6689, decode.d8.loss_cls: 0.1191, decode.d8.loss_mask: 0.2412, decode.d8.loss_dice: 0.6753, loss: 10.6655 +2022-05-10 04:05:22,100 - mmseg - INFO - Iter [19650/80000] lr: 1.083e-06, eta: 1 day, 8:38:03, time: 1.761, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1104, decode.loss_mask: 0.2379, decode.loss_dice: 0.6722, decode.d0.loss_cls: 0.3335, decode.d0.loss_mask: 0.2499, decode.d0.loss_dice: 0.7210, decode.d1.loss_cls: 0.1380, decode.d1.loss_mask: 0.2401, decode.d1.loss_dice: 0.6849, decode.d2.loss_cls: 0.1262, decode.d2.loss_mask: 0.2387, decode.d2.loss_dice: 0.6777, decode.d3.loss_cls: 0.1203, decode.d3.loss_mask: 0.2389, decode.d3.loss_dice: 0.6679, decode.d4.loss_cls: 0.1147, decode.d4.loss_mask: 0.2382, decode.d4.loss_dice: 0.6719, decode.d5.loss_cls: 0.1153, decode.d5.loss_mask: 0.2385, decode.d5.loss_dice: 0.6727, decode.d6.loss_cls: 0.1091, decode.d6.loss_mask: 0.2382, decode.d6.loss_dice: 0.6735, decode.d7.loss_cls: 0.1158, decode.d7.loss_mask: 0.2384, decode.d7.loss_dice: 0.6727, decode.d8.loss_cls: 0.1114, decode.d8.loss_mask: 0.2381, decode.d8.loss_dice: 0.6723, loss: 10.5784 +2022-05-10 04:06:51,045 - mmseg - INFO - Iter [19700/80000] lr: 1.082e-06, eta: 1 day, 8:36:00, time: 1.779, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1090, decode.loss_mask: 0.2430, decode.loss_dice: 0.6669, decode.d0.loss_cls: 0.3371, decode.d0.loss_mask: 0.2538, decode.d0.loss_dice: 0.7051, decode.d1.loss_cls: 0.1349, decode.d1.loss_mask: 0.2429, decode.d1.loss_dice: 0.6808, decode.d2.loss_cls: 0.1255, decode.d2.loss_mask: 0.2439, decode.d2.loss_dice: 0.6761, decode.d3.loss_cls: 0.1244, decode.d3.loss_mask: 0.2427, decode.d3.loss_dice: 0.6676, decode.d4.loss_cls: 0.1180, decode.d4.loss_mask: 0.2434, decode.d4.loss_dice: 0.6659, decode.d5.loss_cls: 0.1172, decode.d5.loss_mask: 0.2437, decode.d5.loss_dice: 0.6709, decode.d6.loss_cls: 0.1152, decode.d6.loss_mask: 0.2431, decode.d6.loss_dice: 0.6678, decode.d7.loss_cls: 0.1098, decode.d7.loss_mask: 0.2418, decode.d7.loss_dice: 0.6680, decode.d8.loss_cls: 0.1119, decode.d8.loss_mask: 0.2427, decode.d8.loss_dice: 0.6704, loss: 10.5833 +2022-05-10 04:08:22,952 - mmseg - INFO - Iter [19750/80000] lr: 1.081e-06, eta: 1 day, 8:34:06, time: 1.838, data_time: 0.067, memory: 64699, decode.loss_cls: 0.1145, decode.loss_mask: 0.2391, decode.loss_dice: 0.6498, decode.d0.loss_cls: 0.3401, decode.d0.loss_mask: 0.2513, decode.d0.loss_dice: 0.7009, decode.d1.loss_cls: 0.1303, decode.d1.loss_mask: 0.2433, decode.d1.loss_dice: 0.6629, decode.d2.loss_cls: 0.1188, decode.d2.loss_mask: 0.2404, decode.d2.loss_dice: 0.6586, decode.d3.loss_cls: 0.1079, decode.d3.loss_mask: 0.2406, decode.d3.loss_dice: 0.6520, decode.d4.loss_cls: 0.1200, decode.d4.loss_mask: 0.2395, decode.d4.loss_dice: 0.6526, decode.d5.loss_cls: 0.1142, decode.d5.loss_mask: 0.2398, decode.d5.loss_dice: 0.6547, decode.d6.loss_cls: 0.1134, decode.d6.loss_mask: 0.2402, decode.d6.loss_dice: 0.6508, decode.d7.loss_cls: 0.1113, decode.d7.loss_mask: 0.2395, decode.d7.loss_dice: 0.6494, decode.d8.loss_cls: 0.1134, decode.d8.loss_mask: 0.2395, decode.d8.loss_dice: 0.6542, loss: 10.3832 +2022-05-10 04:09:51,074 - mmseg - INFO - Iter [19800/80000] lr: 1.080e-06, eta: 1 day, 8:32:01, time: 1.762, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1225, decode.loss_mask: 0.2462, decode.loss_dice: 0.6715, decode.d0.loss_cls: 0.3387, decode.d0.loss_mask: 0.2588, decode.d0.loss_dice: 0.7253, decode.d1.loss_cls: 0.1422, decode.d1.loss_mask: 0.2487, decode.d1.loss_dice: 0.6896, decode.d2.loss_cls: 0.1320, decode.d2.loss_mask: 0.2468, decode.d2.loss_dice: 0.6826, decode.d3.loss_cls: 0.1298, decode.d3.loss_mask: 0.2464, decode.d3.loss_dice: 0.6678, decode.d4.loss_cls: 0.1252, decode.d4.loss_mask: 0.2462, decode.d4.loss_dice: 0.6755, decode.d5.loss_cls: 0.1291, decode.d5.loss_mask: 0.2461, decode.d5.loss_dice: 0.6769, decode.d6.loss_cls: 0.1280, decode.d6.loss_mask: 0.2470, decode.d6.loss_dice: 0.6734, decode.d7.loss_cls: 0.1284, decode.d7.loss_mask: 0.2466, decode.d7.loss_dice: 0.6737, decode.d8.loss_cls: 0.1275, decode.d8.loss_mask: 0.2473, decode.d8.loss_dice: 0.6748, loss: 10.7946 +2022-05-10 04:11:20,185 - mmseg - INFO - Iter [19850/80000] lr: 1.080e-06, eta: 1 day, 8:29:59, time: 1.782, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1173, decode.loss_mask: 0.2375, decode.loss_dice: 0.6653, decode.d0.loss_cls: 0.3327, decode.d0.loss_mask: 0.2498, decode.d0.loss_dice: 0.7057, decode.d1.loss_cls: 0.1317, decode.d1.loss_mask: 0.2403, decode.d1.loss_dice: 0.6792, decode.d2.loss_cls: 0.1288, decode.d2.loss_mask: 0.2387, decode.d2.loss_dice: 0.6685, decode.d3.loss_cls: 0.1159, decode.d3.loss_mask: 0.2380, decode.d3.loss_dice: 0.6678, decode.d4.loss_cls: 0.1128, decode.d4.loss_mask: 0.2382, decode.d4.loss_dice: 0.6688, decode.d5.loss_cls: 0.1151, decode.d5.loss_mask: 0.2386, decode.d5.loss_dice: 0.6647, decode.d6.loss_cls: 0.1081, decode.d6.loss_mask: 0.2379, decode.d6.loss_dice: 0.6611, decode.d7.loss_cls: 0.1152, decode.d7.loss_mask: 0.2378, decode.d7.loss_dice: 0.6622, decode.d8.loss_cls: 0.1108, decode.d8.loss_mask: 0.2374, decode.d8.loss_dice: 0.6657, loss: 10.4913 +2022-05-10 04:12:48,288 - mmseg - INFO - Iter [19900/80000] lr: 1.079e-06, eta: 1 day, 8:27:54, time: 1.762, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1169, decode.loss_mask: 0.2455, decode.loss_dice: 0.6689, decode.d0.loss_cls: 0.3415, decode.d0.loss_mask: 0.2599, decode.d0.loss_dice: 0.7195, decode.d1.loss_cls: 0.1385, decode.d1.loss_mask: 0.2473, decode.d1.loss_dice: 0.6889, decode.d2.loss_cls: 0.1406, decode.d2.loss_mask: 0.2468, decode.d2.loss_dice: 0.6754, decode.d3.loss_cls: 0.1256, decode.d3.loss_mask: 0.2467, decode.d3.loss_dice: 0.6710, decode.d4.loss_cls: 0.1245, decode.d4.loss_mask: 0.2465, decode.d4.loss_dice: 0.6746, decode.d5.loss_cls: 0.1214, decode.d5.loss_mask: 0.2469, decode.d5.loss_dice: 0.6749, decode.d6.loss_cls: 0.1258, decode.d6.loss_mask: 0.2458, decode.d6.loss_dice: 0.6706, decode.d7.loss_cls: 0.1244, decode.d7.loss_mask: 0.2450, decode.d7.loss_dice: 0.6699, decode.d8.loss_cls: 0.1202, decode.d8.loss_mask: 0.2455, decode.d8.loss_dice: 0.6704, loss: 10.7392 +2022-05-10 04:14:19,784 - mmseg - INFO - Iter [19950/80000] lr: 1.078e-06, eta: 1 day, 8:25:59, time: 1.829, data_time: 0.068, memory: 64699, decode.loss_cls: 0.1057, decode.loss_mask: 0.2441, decode.loss_dice: 0.6641, decode.d0.loss_cls: 0.3422, decode.d0.loss_mask: 0.2577, decode.d0.loss_dice: 0.7138, decode.d1.loss_cls: 0.1233, decode.d1.loss_mask: 0.2472, decode.d1.loss_dice: 0.6781, decode.d2.loss_cls: 0.1249, decode.d2.loss_mask: 0.2447, decode.d2.loss_dice: 0.6686, decode.d3.loss_cls: 0.1045, decode.d3.loss_mask: 0.2447, decode.d3.loss_dice: 0.6670, decode.d4.loss_cls: 0.1093, decode.d4.loss_mask: 0.2446, decode.d4.loss_dice: 0.6673, decode.d5.loss_cls: 0.1103, decode.d5.loss_mask: 0.2447, decode.d5.loss_dice: 0.6665, decode.d6.loss_cls: 0.1049, decode.d6.loss_mask: 0.2445, decode.d6.loss_dice: 0.6674, decode.d7.loss_cls: 0.1086, decode.d7.loss_mask: 0.2438, decode.d7.loss_dice: 0.6665, decode.d8.loss_cls: 0.1079, decode.d8.loss_mask: 0.2446, decode.d8.loss_dice: 0.6652, loss: 10.5267 +2022-05-10 04:15:48,545 - mmseg - INFO - Saving checkpoint at 20000 iterations +2022-05-10 04:16:20,875 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 04:16:20,883 - mmseg - INFO - Iter [20000/80000] lr: 1.077e-06, eta: 1 day, 8:25:33, time: 2.420, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1288, decode.loss_mask: 0.2396, decode.loss_dice: 0.6563, decode.d0.loss_cls: 0.3350, decode.d0.loss_mask: 0.2486, decode.d0.loss_dice: 0.7094, decode.d1.loss_cls: 0.1388, decode.d1.loss_mask: 0.2410, decode.d1.loss_dice: 0.6692, decode.d2.loss_cls: 0.1318, decode.d2.loss_mask: 0.2408, decode.d2.loss_dice: 0.6635, decode.d3.loss_cls: 0.1286, decode.d3.loss_mask: 0.2395, decode.d3.loss_dice: 0.6555, decode.d4.loss_cls: 0.1228, decode.d4.loss_mask: 0.2395, decode.d4.loss_dice: 0.6584, decode.d5.loss_cls: 0.1203, decode.d5.loss_mask: 0.2396, decode.d5.loss_dice: 0.6597, decode.d6.loss_cls: 0.1259, decode.d6.loss_mask: 0.2393, decode.d6.loss_dice: 0.6576, decode.d7.loss_cls: 0.1272, decode.d7.loss_mask: 0.2396, decode.d7.loss_dice: 0.6569, decode.d8.loss_cls: 0.1295, decode.d8.loss_mask: 0.2396, decode.d8.loss_dice: 0.6607, loss: 10.5432 +2022-05-10 04:18:16,429 - mmseg - INFO - per class results: +2022-05-10 04:18:16,433 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.56 | 99.15 | +| sidewalk | 88.48 | 94.21 | +| building | 94.45 | 97.32 | +| wall | 66.74 | 81.14 | +| fence | 73.72 | 81.58 | +| pole | 71.76 | 82.5 | +| traffic light | 77.45 | 88.06 | +| traffic sign | 83.86 | 90.82 | +| vegetation | 93.45 | 96.51 | +| terrain | 68.35 | 80.36 | +| sky | 95.78 | 98.73 | +| person | 86.97 | 93.42 | +| rider | 74.79 | 86.93 | +| car | 96.31 | 98.35 | +| truck | 92.21 | 96.0 | +| bus | 93.64 | 96.88 | +| train | 87.25 | 89.88 | +| motorcycle | 76.39 | 89.4 | +| bicycle | 83.02 | 91.61 | ++---------------+-------+-------+ +2022-05-10 04:18:16,434 - mmseg - INFO - Summary: +2022-05-10 04:18:16,434 - mmseg - INFO - ++-------+-------+------+ +| aAcc | mIoU | mAcc | ++-------+-------+------+ +| 97.01 | 84.38 | 91.2 | ++-------+-------+------+ +2022-05-10 04:18:16,437 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 04:18:16,437 - mmseg - INFO - Iter(val) [32] aAcc: 0.9701, mIoU: 0.8438, mAcc: 0.9120, IoU.road: 0.9856, IoU.sidewalk: 0.8848, IoU.building: 0.9445, IoU.wall: 0.6674, IoU.fence: 0.7372, IoU.pole: 0.7176, IoU.traffic light: 0.7745, IoU.traffic sign: 0.8386, IoU.vegetation: 0.9345, IoU.terrain: 0.6835, IoU.sky: 0.9578, IoU.person: 0.8697, IoU.rider: 0.7479, IoU.car: 0.9631, IoU.truck: 0.9221, IoU.bus: 0.9364, IoU.train: 0.8725, IoU.motorcycle: 0.7639, IoU.bicycle: 0.8302, Acc.road: 0.9915, Acc.sidewalk: 0.9421, Acc.building: 0.9732, Acc.wall: 0.8114, Acc.fence: 0.8158, Acc.pole: 0.8250, Acc.traffic light: 0.8806, Acc.traffic sign: 0.9082, Acc.vegetation: 0.9651, Acc.terrain: 0.8036, Acc.sky: 0.9873, Acc.person: 0.9342, Acc.rider: 0.8693, Acc.car: 0.9835, Acc.truck: 0.9600, Acc.bus: 0.9688, Acc.train: 0.8988, Acc.motorcycle: 0.8940, Acc.bicycle: 0.9161 +2022-05-10 04:19:44,404 - mmseg - INFO - Iter [20050/80000] lr: 1.076e-06, eta: 1 day, 8:29:14, time: 4.073, data_time: 2.333, memory: 64699, decode.loss_cls: 0.1173, decode.loss_mask: 0.2472, decode.loss_dice: 0.6581, decode.d0.loss_cls: 0.3387, decode.d0.loss_mask: 0.2558, decode.d0.loss_dice: 0.7116, decode.d1.loss_cls: 0.1371, decode.d1.loss_mask: 0.2484, decode.d1.loss_dice: 0.6782, decode.d2.loss_cls: 0.1306, decode.d2.loss_mask: 0.2470, decode.d2.loss_dice: 0.6649, decode.d3.loss_cls: 0.1211, decode.d3.loss_mask: 0.2454, decode.d3.loss_dice: 0.6604, decode.d4.loss_cls: 0.1163, decode.d4.loss_mask: 0.2461, decode.d4.loss_dice: 0.6579, decode.d5.loss_cls: 0.1160, decode.d5.loss_mask: 0.2471, decode.d5.loss_dice: 0.6619, decode.d6.loss_cls: 0.1182, decode.d6.loss_mask: 0.2463, decode.d6.loss_dice: 0.6542, decode.d7.loss_cls: 0.1201, decode.d7.loss_mask: 0.2461, decode.d7.loss_dice: 0.6602, decode.d8.loss_cls: 0.1203, decode.d8.loss_mask: 0.2468, decode.d8.loss_dice: 0.6643, loss: 10.5836 +2022-05-10 04:21:15,335 - mmseg - INFO - Iter [20100/80000] lr: 1.075e-06, eta: 1 day, 8:27:17, time: 1.819, data_time: 0.065, memory: 64699, decode.loss_cls: 0.1168, decode.loss_mask: 0.2389, decode.loss_dice: 0.6562, decode.d0.loss_cls: 0.3438, decode.d0.loss_mask: 0.2528, decode.d0.loss_dice: 0.7061, decode.d1.loss_cls: 0.1412, decode.d1.loss_mask: 0.2420, decode.d1.loss_dice: 0.6735, decode.d2.loss_cls: 0.1322, decode.d2.loss_mask: 0.2406, decode.d2.loss_dice: 0.6688, decode.d3.loss_cls: 0.1240, decode.d3.loss_mask: 0.2400, decode.d3.loss_dice: 0.6617, decode.d4.loss_cls: 0.1197, decode.d4.loss_mask: 0.2400, decode.d4.loss_dice: 0.6567, decode.d5.loss_cls: 0.1201, decode.d5.loss_mask: 0.2402, decode.d5.loss_dice: 0.6592, decode.d6.loss_cls: 0.1221, decode.d6.loss_mask: 0.2400, decode.d6.loss_dice: 0.6612, decode.d7.loss_cls: 0.1224, decode.d7.loss_mask: 0.2392, decode.d7.loss_dice: 0.6634, decode.d8.loss_cls: 0.1202, decode.d8.loss_mask: 0.2394, decode.d8.loss_dice: 0.6610, loss: 10.5436 +2022-05-10 04:22:44,001 - mmseg - INFO - Iter [20150/80000] lr: 1.074e-06, eta: 1 day, 8:25:13, time: 1.772, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1060, decode.loss_mask: 0.2450, decode.loss_dice: 0.6717, decode.d0.loss_cls: 0.3250, decode.d0.loss_mask: 0.2570, decode.d0.loss_dice: 0.7182, decode.d1.loss_cls: 0.1283, decode.d1.loss_mask: 0.2488, decode.d1.loss_dice: 0.6823, decode.d2.loss_cls: 0.1285, decode.d2.loss_mask: 0.2467, decode.d2.loss_dice: 0.6761, decode.d3.loss_cls: 0.1268, decode.d3.loss_mask: 0.2464, decode.d3.loss_dice: 0.6696, decode.d4.loss_cls: 0.1216, decode.d4.loss_mask: 0.2459, decode.d4.loss_dice: 0.6642, decode.d5.loss_cls: 0.1153, decode.d5.loss_mask: 0.2462, decode.d5.loss_dice: 0.6726, decode.d6.loss_cls: 0.1114, decode.d6.loss_mask: 0.2461, decode.d6.loss_dice: 0.6709, decode.d7.loss_cls: 0.1173, decode.d7.loss_mask: 0.2461, decode.d7.loss_dice: 0.6652, decode.d8.loss_cls: 0.1133, decode.d8.loss_mask: 0.2460, decode.d8.loss_dice: 0.6692, loss: 10.6276 +2022-05-10 04:24:11,563 - mmseg - INFO - Iter [20200/80000] lr: 1.073e-06, eta: 1 day, 8:23:06, time: 1.750, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1118, decode.loss_mask: 0.2452, decode.loss_dice: 0.6547, decode.d0.loss_cls: 0.3423, decode.d0.loss_mask: 0.2592, decode.d0.loss_dice: 0.7000, decode.d1.loss_cls: 0.1294, decode.d1.loss_mask: 0.2452, decode.d1.loss_dice: 0.6675, decode.d2.loss_cls: 0.1248, decode.d2.loss_mask: 0.2450, decode.d2.loss_dice: 0.6655, decode.d3.loss_cls: 0.1179, decode.d3.loss_mask: 0.2439, decode.d3.loss_dice: 0.6565, decode.d4.loss_cls: 0.1118, decode.d4.loss_mask: 0.2441, decode.d4.loss_dice: 0.6552, decode.d5.loss_cls: 0.1194, decode.d5.loss_mask: 0.2431, decode.d5.loss_dice: 0.6568, decode.d6.loss_cls: 0.1083, decode.d6.loss_mask: 0.2445, decode.d6.loss_dice: 0.6529, decode.d7.loss_cls: 0.1128, decode.d7.loss_mask: 0.2442, decode.d7.loss_dice: 0.6553, decode.d8.loss_cls: 0.1161, decode.d8.loss_mask: 0.2442, decode.d8.loss_dice: 0.6545, loss: 10.4720 +2022-05-10 04:25:41,110 - mmseg - INFO - Iter [20250/80000] lr: 1.072e-06, eta: 1 day, 8:21:05, time: 1.792, data_time: 0.022, memory: 64699, decode.loss_cls: 0.1143, decode.loss_mask: 0.2375, decode.loss_dice: 0.6414, decode.d0.loss_cls: 0.3291, decode.d0.loss_mask: 0.2489, decode.d0.loss_dice: 0.6947, decode.d1.loss_cls: 0.1325, decode.d1.loss_mask: 0.2390, decode.d1.loss_dice: 0.6596, decode.d2.loss_cls: 0.1279, decode.d2.loss_mask: 0.2377, decode.d2.loss_dice: 0.6481, decode.d3.loss_cls: 0.1127, decode.d3.loss_mask: 0.2372, decode.d3.loss_dice: 0.6470, decode.d4.loss_cls: 0.1121, decode.d4.loss_mask: 0.2387, decode.d4.loss_dice: 0.6467, decode.d5.loss_cls: 0.1210, decode.d5.loss_mask: 0.2367, decode.d5.loss_dice: 0.6444, decode.d6.loss_cls: 0.1141, decode.d6.loss_mask: 0.2382, decode.d6.loss_dice: 0.6473, decode.d7.loss_cls: 0.1151, decode.d7.loss_mask: 0.2373, decode.d7.loss_dice: 0.6431, decode.d8.loss_cls: 0.1153, decode.d8.loss_mask: 0.2368, decode.d8.loss_dice: 0.6454, loss: 10.2997 +2022-05-10 04:27:12,562 - mmseg - INFO - Iter [20300/80000] lr: 1.071e-06, eta: 1 day, 8:19:10, time: 1.828, data_time: 0.066, memory: 64699, decode.loss_cls: 0.1242, decode.loss_mask: 0.2329, decode.loss_dice: 0.6679, decode.d0.loss_cls: 0.3406, decode.d0.loss_mask: 0.2438, decode.d0.loss_dice: 0.7131, decode.d1.loss_cls: 0.1451, decode.d1.loss_mask: 0.2352, decode.d1.loss_dice: 0.6827, decode.d2.loss_cls: 0.1331, decode.d2.loss_mask: 0.2333, decode.d2.loss_dice: 0.6768, decode.d3.loss_cls: 0.1271, decode.d3.loss_mask: 0.2329, decode.d3.loss_dice: 0.6696, decode.d4.loss_cls: 0.1318, decode.d4.loss_mask: 0.2330, decode.d4.loss_dice: 0.6712, decode.d5.loss_cls: 0.1296, decode.d5.loss_mask: 0.2338, decode.d5.loss_dice: 0.6729, decode.d6.loss_cls: 0.1204, decode.d6.loss_mask: 0.2325, decode.d6.loss_dice: 0.6717, decode.d7.loss_cls: 0.1213, decode.d7.loss_mask: 0.2321, decode.d7.loss_dice: 0.6682, decode.d8.loss_cls: 0.1224, decode.d8.loss_mask: 0.2325, decode.d8.loss_dice: 0.6684, loss: 10.5999 +2022-05-10 04:28:40,190 - mmseg - INFO - Iter [20350/80000] lr: 1.071e-06, eta: 1 day, 8:17:04, time: 1.754, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1153, decode.loss_mask: 0.2355, decode.loss_dice: 0.6660, decode.d0.loss_cls: 0.3362, decode.d0.loss_mask: 0.2485, decode.d0.loss_dice: 0.7115, decode.d1.loss_cls: 0.1501, decode.d1.loss_mask: 0.2378, decode.d1.loss_dice: 0.6769, decode.d2.loss_cls: 0.1370, decode.d2.loss_mask: 0.2357, decode.d2.loss_dice: 0.6714, decode.d3.loss_cls: 0.1120, decode.d3.loss_mask: 0.2366, decode.d3.loss_dice: 0.6685, decode.d4.loss_cls: 0.1180, decode.d4.loss_mask: 0.2366, decode.d4.loss_dice: 0.6659, decode.d5.loss_cls: 0.1195, decode.d5.loss_mask: 0.2358, decode.d5.loss_dice: 0.6635, decode.d6.loss_cls: 0.1185, decode.d6.loss_mask: 0.2355, decode.d6.loss_dice: 0.6625, decode.d7.loss_cls: 0.1252, decode.d7.loss_mask: 0.2358, decode.d7.loss_dice: 0.6631, decode.d8.loss_cls: 0.1157, decode.d8.loss_mask: 0.2354, decode.d8.loss_dice: 0.6607, loss: 10.5306 +2022-05-10 04:30:10,182 - mmseg - INFO - Iter [20400/80000] lr: 1.070e-06, eta: 1 day, 8:15:05, time: 1.800, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1121, decode.loss_mask: 0.2476, decode.loss_dice: 0.6745, decode.d0.loss_cls: 0.3466, decode.d0.loss_mask: 0.2611, decode.d0.loss_dice: 0.7198, decode.d1.loss_cls: 0.1350, decode.d1.loss_mask: 0.2502, decode.d1.loss_dice: 0.6895, decode.d2.loss_cls: 0.1262, decode.d2.loss_mask: 0.2481, decode.d2.loss_dice: 0.6824, decode.d3.loss_cls: 0.1212, decode.d3.loss_mask: 0.2476, decode.d3.loss_dice: 0.6803, decode.d4.loss_cls: 0.1242, decode.d4.loss_mask: 0.2466, decode.d4.loss_dice: 0.6778, decode.d5.loss_cls: 0.1160, decode.d5.loss_mask: 0.2469, decode.d5.loss_dice: 0.6768, decode.d6.loss_cls: 0.1187, decode.d6.loss_mask: 0.2469, decode.d6.loss_dice: 0.6768, decode.d7.loss_cls: 0.1221, decode.d7.loss_mask: 0.2479, decode.d7.loss_dice: 0.6796, decode.d8.loss_cls: 0.1205, decode.d8.loss_mask: 0.2479, decode.d8.loss_dice: 0.6722, loss: 10.7632 +2022-05-10 04:31:38,348 - mmseg - INFO - Iter [20450/80000] lr: 1.069e-06, eta: 1 day, 8:13:00, time: 1.763, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1115, decode.loss_mask: 0.2385, decode.loss_dice: 0.6529, decode.d0.loss_cls: 0.3371, decode.d0.loss_mask: 0.2507, decode.d0.loss_dice: 0.6951, decode.d1.loss_cls: 0.1247, decode.d1.loss_mask: 0.2391, decode.d1.loss_dice: 0.6633, decode.d2.loss_cls: 0.1147, decode.d2.loss_mask: 0.2381, decode.d2.loss_dice: 0.6535, decode.d3.loss_cls: 0.1031, decode.d3.loss_mask: 0.2376, decode.d3.loss_dice: 0.6517, decode.d4.loss_cls: 0.1022, decode.d4.loss_mask: 0.2381, decode.d4.loss_dice: 0.6531, decode.d5.loss_cls: 0.1090, decode.d5.loss_mask: 0.2382, decode.d5.loss_dice: 0.6486, decode.d6.loss_cls: 0.1047, decode.d6.loss_mask: 0.2371, decode.d6.loss_dice: 0.6500, decode.d7.loss_cls: 0.1068, decode.d7.loss_mask: 0.2375, decode.d7.loss_dice: 0.6491, decode.d8.loss_cls: 0.1043, decode.d8.loss_mask: 0.2384, decode.d8.loss_dice: 0.6503, loss: 10.2790 +2022-05-10 04:33:09,165 - mmseg - INFO - Iter [20500/80000] lr: 1.068e-06, eta: 1 day, 8:11:04, time: 1.816, data_time: 0.067, memory: 64699, decode.loss_cls: 0.1192, decode.loss_mask: 0.2364, decode.loss_dice: 0.6547, decode.d0.loss_cls: 0.3377, decode.d0.loss_mask: 0.2476, decode.d0.loss_dice: 0.6976, decode.d1.loss_cls: 0.1361, decode.d1.loss_mask: 0.2382, decode.d1.loss_dice: 0.6643, decode.d2.loss_cls: 0.1260, decode.d2.loss_mask: 0.2371, decode.d2.loss_dice: 0.6613, decode.d3.loss_cls: 0.1220, decode.d3.loss_mask: 0.2366, decode.d3.loss_dice: 0.6546, decode.d4.loss_cls: 0.1225, decode.d4.loss_mask: 0.2365, decode.d4.loss_dice: 0.6548, decode.d5.loss_cls: 0.1249, decode.d5.loss_mask: 0.2370, decode.d5.loss_dice: 0.6589, decode.d6.loss_cls: 0.1226, decode.d6.loss_mask: 0.2362, decode.d6.loss_dice: 0.6547, decode.d7.loss_cls: 0.1283, decode.d7.loss_mask: 0.2361, decode.d7.loss_dice: 0.6572, decode.d8.loss_cls: 0.1249, decode.d8.loss_mask: 0.2364, decode.d8.loss_dice: 0.6547, loss: 10.4550 +2022-05-10 04:34:38,567 - mmseg - INFO - Iter [20550/80000] lr: 1.067e-06, eta: 1 day, 8:09:03, time: 1.788, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1213, decode.loss_mask: 0.2373, decode.loss_dice: 0.6572, decode.d0.loss_cls: 0.3358, decode.d0.loss_mask: 0.2484, decode.d0.loss_dice: 0.7021, decode.d1.loss_cls: 0.1353, decode.d1.loss_mask: 0.2396, decode.d1.loss_dice: 0.6718, decode.d2.loss_cls: 0.1250, decode.d2.loss_mask: 0.2382, decode.d2.loss_dice: 0.6631, decode.d3.loss_cls: 0.1276, decode.d3.loss_mask: 0.2369, decode.d3.loss_dice: 0.6551, decode.d4.loss_cls: 0.1228, decode.d4.loss_mask: 0.2368, decode.d4.loss_dice: 0.6571, decode.d5.loss_cls: 0.1260, decode.d5.loss_mask: 0.2365, decode.d5.loss_dice: 0.6608, decode.d6.loss_cls: 0.1141, decode.d6.loss_mask: 0.2371, decode.d6.loss_dice: 0.6546, decode.d7.loss_cls: 0.1202, decode.d7.loss_mask: 0.2368, decode.d7.loss_dice: 0.6528, decode.d8.loss_cls: 0.1188, decode.d8.loss_mask: 0.2368, decode.d8.loss_dice: 0.6518, loss: 10.4574 +2022-05-10 04:36:07,882 - mmseg - INFO - Iter [20600/80000] lr: 1.066e-06, eta: 1 day, 8:07:03, time: 1.786, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1083, decode.loss_mask: 0.2392, decode.loss_dice: 0.6554, decode.d0.loss_cls: 0.3416, decode.d0.loss_mask: 0.2494, decode.d0.loss_dice: 0.7008, decode.d1.loss_cls: 0.1287, decode.d1.loss_mask: 0.2421, decode.d1.loss_dice: 0.6671, decode.d2.loss_cls: 0.1227, decode.d2.loss_mask: 0.2411, decode.d2.loss_dice: 0.6623, decode.d3.loss_cls: 0.1135, decode.d3.loss_mask: 0.2409, decode.d3.loss_dice: 0.6561, decode.d4.loss_cls: 0.1153, decode.d4.loss_mask: 0.2403, decode.d4.loss_dice: 0.6533, decode.d5.loss_cls: 0.1190, decode.d5.loss_mask: 0.2407, decode.d5.loss_dice: 0.6562, decode.d6.loss_cls: 0.1122, decode.d6.loss_mask: 0.2405, decode.d6.loss_dice: 0.6580, decode.d7.loss_cls: 0.1193, decode.d7.loss_mask: 0.2396, decode.d7.loss_dice: 0.6567, decode.d8.loss_cls: 0.1145, decode.d8.loss_mask: 0.2394, decode.d8.loss_dice: 0.6536, loss: 10.4278 +2022-05-10 04:37:39,439 - mmseg - INFO - Iter [20650/80000] lr: 1.065e-06, eta: 1 day, 8:05:09, time: 1.831, data_time: 0.067, memory: 64699, decode.loss_cls: 0.1138, decode.loss_mask: 0.2367, decode.loss_dice: 0.6549, decode.d0.loss_cls: 0.3327, decode.d0.loss_mask: 0.2463, decode.d0.loss_dice: 0.6995, decode.d1.loss_cls: 0.1433, decode.d1.loss_mask: 0.2387, decode.d1.loss_dice: 0.6641, decode.d2.loss_cls: 0.1244, decode.d2.loss_mask: 0.2370, decode.d2.loss_dice: 0.6554, decode.d3.loss_cls: 0.1190, decode.d3.loss_mask: 0.2368, decode.d3.loss_dice: 0.6524, decode.d4.loss_cls: 0.1179, decode.d4.loss_mask: 0.2374, decode.d4.loss_dice: 0.6518, decode.d5.loss_cls: 0.1159, decode.d5.loss_mask: 0.2368, decode.d5.loss_dice: 0.6507, decode.d6.loss_cls: 0.1214, decode.d6.loss_mask: 0.2366, decode.d6.loss_dice: 0.6525, decode.d7.loss_cls: 0.1152, decode.d7.loss_mask: 0.2370, decode.d7.loss_dice: 0.6515, decode.d8.loss_cls: 0.1193, decode.d8.loss_mask: 0.2364, decode.d8.loss_dice: 0.6503, loss: 10.3857 +2022-05-10 04:39:08,535 - mmseg - INFO - Iter [20700/80000] lr: 1.064e-06, eta: 1 day, 8:03:08, time: 1.782, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1174, decode.loss_mask: 0.2426, decode.loss_dice: 0.6513, decode.d0.loss_cls: 0.3297, decode.d0.loss_mask: 0.2580, decode.d0.loss_dice: 0.7041, decode.d1.loss_cls: 0.1323, decode.d1.loss_mask: 0.2454, decode.d1.loss_dice: 0.6641, decode.d2.loss_cls: 0.1268, decode.d2.loss_mask: 0.2439, decode.d2.loss_dice: 0.6630, decode.d3.loss_cls: 0.1207, decode.d3.loss_mask: 0.2429, decode.d3.loss_dice: 0.6538, decode.d4.loss_cls: 0.1110, decode.d4.loss_mask: 0.2431, decode.d4.loss_dice: 0.6577, decode.d5.loss_cls: 0.1133, decode.d5.loss_mask: 0.2422, decode.d5.loss_dice: 0.6564, decode.d6.loss_cls: 0.1109, decode.d6.loss_mask: 0.2427, decode.d6.loss_dice: 0.6479, decode.d7.loss_cls: 0.1126, decode.d7.loss_mask: 0.2426, decode.d7.loss_dice: 0.6521, decode.d8.loss_cls: 0.1135, decode.d8.loss_mask: 0.2422, decode.d8.loss_dice: 0.6546, loss: 10.4388 +2022-05-10 04:40:37,012 - mmseg - INFO - Iter [20750/80000] lr: 1.063e-06, eta: 1 day, 8:01:06, time: 1.770, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1076, decode.loss_mask: 0.2341, decode.loss_dice: 0.6601, decode.d0.loss_cls: 0.3413, decode.d0.loss_mask: 0.2443, decode.d0.loss_dice: 0.7048, decode.d1.loss_cls: 0.1296, decode.d1.loss_mask: 0.2360, decode.d1.loss_dice: 0.6787, decode.d2.loss_cls: 0.1199, decode.d2.loss_mask: 0.2354, decode.d2.loss_dice: 0.6689, decode.d3.loss_cls: 0.1163, decode.d3.loss_mask: 0.2347, decode.d3.loss_dice: 0.6648, decode.d4.loss_cls: 0.1144, decode.d4.loss_mask: 0.2351, decode.d4.loss_dice: 0.6631, decode.d5.loss_cls: 0.1133, decode.d5.loss_mask: 0.2354, decode.d5.loss_dice: 0.6640, decode.d6.loss_cls: 0.1138, decode.d6.loss_mask: 0.2354, decode.d6.loss_dice: 0.6613, decode.d7.loss_cls: 0.1136, decode.d7.loss_mask: 0.2356, decode.d7.loss_dice: 0.6619, decode.d8.loss_cls: 0.1054, decode.d8.loss_mask: 0.2350, decode.d8.loss_dice: 0.6600, loss: 10.4239 +2022-05-10 04:42:05,312 - mmseg - INFO - Iter [20800/80000] lr: 1.063e-06, eta: 1 day, 7:59:03, time: 1.766, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1166, decode.loss_mask: 0.2321, decode.loss_dice: 0.6636, decode.d0.loss_cls: 0.3451, decode.d0.loss_mask: 0.2429, decode.d0.loss_dice: 0.7074, decode.d1.loss_cls: 0.1356, decode.d1.loss_mask: 0.2332, decode.d1.loss_dice: 0.6776, decode.d2.loss_cls: 0.1321, decode.d2.loss_mask: 0.2329, decode.d2.loss_dice: 0.6732, decode.d3.loss_cls: 0.1194, decode.d3.loss_mask: 0.2329, decode.d3.loss_dice: 0.6687, decode.d4.loss_cls: 0.1193, decode.d4.loss_mask: 0.2328, decode.d4.loss_dice: 0.6701, decode.d5.loss_cls: 0.1280, decode.d5.loss_mask: 0.2330, decode.d5.loss_dice: 0.6694, decode.d6.loss_cls: 0.1135, decode.d6.loss_mask: 0.2326, decode.d6.loss_dice: 0.6660, decode.d7.loss_cls: 0.1160, decode.d7.loss_mask: 0.2329, decode.d7.loss_dice: 0.6693, decode.d8.loss_cls: 0.1197, decode.d8.loss_mask: 0.2324, decode.d8.loss_dice: 0.6689, loss: 10.5171 +2022-05-10 04:43:36,653 - mmseg - INFO - Iter [20850/80000] lr: 1.062e-06, eta: 1 day, 7:57:09, time: 1.827, data_time: 0.067, memory: 64699, decode.loss_cls: 0.1064, decode.loss_mask: 0.2308, decode.loss_dice: 0.6581, decode.d0.loss_cls: 0.3205, decode.d0.loss_mask: 0.2414, decode.d0.loss_dice: 0.7010, decode.d1.loss_cls: 0.1311, decode.d1.loss_mask: 0.2336, decode.d1.loss_dice: 0.6710, decode.d2.loss_cls: 0.1164, decode.d2.loss_mask: 0.2333, decode.d2.loss_dice: 0.6626, decode.d3.loss_cls: 0.1111, decode.d3.loss_mask: 0.2319, decode.d3.loss_dice: 0.6591, decode.d4.loss_cls: 0.1116, decode.d4.loss_mask: 0.2314, decode.d4.loss_dice: 0.6574, decode.d5.loss_cls: 0.1159, decode.d5.loss_mask: 0.2316, decode.d5.loss_dice: 0.6581, decode.d6.loss_cls: 0.1088, decode.d6.loss_mask: 0.2311, decode.d6.loss_dice: 0.6563, decode.d7.loss_cls: 0.1075, decode.d7.loss_mask: 0.2307, decode.d7.loss_dice: 0.6553, decode.d8.loss_cls: 0.1061, decode.d8.loss_mask: 0.2316, decode.d8.loss_dice: 0.6601, loss: 10.3018 +2022-05-10 04:45:05,121 - mmseg - INFO - Iter [20900/80000] lr: 1.061e-06, eta: 1 day, 7:55:07, time: 1.769, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0972, decode.loss_mask: 0.2386, decode.loss_dice: 0.6381, decode.d0.loss_cls: 0.3303, decode.d0.loss_mask: 0.2480, decode.d0.loss_dice: 0.6776, decode.d1.loss_cls: 0.1105, decode.d1.loss_mask: 0.2395, decode.d1.loss_dice: 0.6532, decode.d2.loss_cls: 0.1061, decode.d2.loss_mask: 0.2381, decode.d2.loss_dice: 0.6462, decode.d3.loss_cls: 0.1002, decode.d3.loss_mask: 0.2387, decode.d3.loss_dice: 0.6426, decode.d4.loss_cls: 0.0980, decode.d4.loss_mask: 0.2392, decode.d4.loss_dice: 0.6397, decode.d5.loss_cls: 0.0997, decode.d5.loss_mask: 0.2391, decode.d5.loss_dice: 0.6425, decode.d6.loss_cls: 0.0884, decode.d6.loss_mask: 0.2388, decode.d6.loss_dice: 0.6419, decode.d7.loss_cls: 0.0944, decode.d7.loss_mask: 0.2390, decode.d7.loss_dice: 0.6387, decode.d8.loss_cls: 0.0948, decode.d8.loss_mask: 0.2391, decode.d8.loss_dice: 0.6407, loss: 10.0787 +2022-05-10 04:46:33,172 - mmseg - INFO - Iter [20950/80000] lr: 1.060e-06, eta: 1 day, 7:53:04, time: 1.761, data_time: 0.021, memory: 64699, decode.loss_cls: 0.1089, decode.loss_mask: 0.2324, decode.loss_dice: 0.6715, decode.d0.loss_cls: 0.3281, decode.d0.loss_mask: 0.2436, decode.d0.loss_dice: 0.7205, decode.d1.loss_cls: 0.1333, decode.d1.loss_mask: 0.2340, decode.d1.loss_dice: 0.6912, decode.d2.loss_cls: 0.1297, decode.d2.loss_mask: 0.2324, decode.d2.loss_dice: 0.6820, decode.d3.loss_cls: 0.1174, decode.d3.loss_mask: 0.2318, decode.d3.loss_dice: 0.6722, decode.d4.loss_cls: 0.1116, decode.d4.loss_mask: 0.2313, decode.d4.loss_dice: 0.6722, decode.d5.loss_cls: 0.1224, decode.d5.loss_mask: 0.2322, decode.d5.loss_dice: 0.6748, decode.d6.loss_cls: 0.1101, decode.d6.loss_mask: 0.2318, decode.d6.loss_dice: 0.6752, decode.d7.loss_cls: 0.1142, decode.d7.loss_mask: 0.2316, decode.d7.loss_dice: 0.6729, decode.d8.loss_cls: 0.1162, decode.d8.loss_mask: 0.2323, decode.d8.loss_dice: 0.6728, loss: 10.5306 +2022-05-10 04:48:01,796 - mmseg - INFO - Saving checkpoint at 21000 iterations +2022-05-10 04:48:35,425 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 04:48:35,435 - mmseg - INFO - Iter [21000/80000] lr: 1.059e-06, eta: 1 day, 7:52:37, time: 2.443, data_time: 0.022, memory: 64699, decode.loss_cls: 0.0977, decode.loss_mask: 0.2371, decode.loss_dice: 0.6488, decode.d0.loss_cls: 0.3421, decode.d0.loss_mask: 0.2487, decode.d0.loss_dice: 0.6889, decode.d1.loss_cls: 0.1315, decode.d1.loss_mask: 0.2399, decode.d1.loss_dice: 0.6572, decode.d2.loss_cls: 0.1154, decode.d2.loss_mask: 0.2391, decode.d2.loss_dice: 0.6534, decode.d3.loss_cls: 0.1072, decode.d3.loss_mask: 0.2380, decode.d3.loss_dice: 0.6478, decode.d4.loss_cls: 0.1042, decode.d4.loss_mask: 0.2372, decode.d4.loss_dice: 0.6463, decode.d5.loss_cls: 0.1052, decode.d5.loss_mask: 0.2378, decode.d5.loss_dice: 0.6503, decode.d6.loss_cls: 0.0972, decode.d6.loss_mask: 0.2364, decode.d6.loss_dice: 0.6504, decode.d7.loss_cls: 0.1099, decode.d7.loss_mask: 0.2373, decode.d7.loss_dice: 0.6455, decode.d8.loss_cls: 0.1047, decode.d8.loss_mask: 0.2372, decode.d8.loss_dice: 0.6471, loss: 10.2396 +2022-05-10 04:50:30,519 - mmseg - INFO - per class results: +2022-05-10 04:50:30,524 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.54 | 99.25 | +| sidewalk | 88.31 | 93.29 | +| building | 94.44 | 97.01 | +| wall | 66.73 | 79.91 | +| fence | 74.5 | 85.14 | +| pole | 71.37 | 84.98 | +| traffic light | 77.52 | 88.23 | +| traffic sign | 83.73 | 90.08 | +| vegetation | 93.46 | 96.88 | +| terrain | 69.56 | 77.48 | +| sky | 95.79 | 98.62 | +| person | 86.9 | 93.35 | +| rider | 74.59 | 86.28 | +| car | 96.24 | 98.21 | +| truck | 91.9 | 95.45 | +| bus | 93.42 | 96.29 | +| train | 88.0 | 90.88 | +| motorcycle | 76.2 | 88.24 | +| bicycle | 83.05 | 91.34 | ++---------------+-------+-------+ +2022-05-10 04:50:30,524 - mmseg - INFO - Summary: +2022-05-10 04:50:30,524 - mmseg - INFO - ++------+-------+------+ +| aAcc | mIoU | mAcc | ++------+-------+------+ +| 97.0 | 84.43 | 91.1 | ++------+-------+------+ +2022-05-10 04:50:30,529 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 04:50:30,530 - mmseg - INFO - Iter(val) [32] aAcc: 0.9700, mIoU: 0.8443, mAcc: 0.9110, IoU.road: 0.9854, IoU.sidewalk: 0.8831, IoU.building: 0.9444, IoU.wall: 0.6673, IoU.fence: 0.7450, IoU.pole: 0.7137, IoU.traffic light: 0.7752, IoU.traffic sign: 0.8373, IoU.vegetation: 0.9346, IoU.terrain: 0.6956, IoU.sky: 0.9579, IoU.person: 0.8690, IoU.rider: 0.7459, IoU.car: 0.9624, IoU.truck: 0.9190, IoU.bus: 0.9342, IoU.train: 0.8800, IoU.motorcycle: 0.7620, IoU.bicycle: 0.8305, Acc.road: 0.9925, Acc.sidewalk: 0.9329, Acc.building: 0.9701, Acc.wall: 0.7991, Acc.fence: 0.8514, Acc.pole: 0.8498, Acc.traffic light: 0.8823, Acc.traffic sign: 0.9008, Acc.vegetation: 0.9688, Acc.terrain: 0.7748, Acc.sky: 0.9862, Acc.person: 0.9335, Acc.rider: 0.8628, Acc.car: 0.9821, Acc.truck: 0.9545, Acc.bus: 0.9629, Acc.train: 0.9088, Acc.motorcycle: 0.8824, Acc.bicycle: 0.9134 +2022-05-10 04:52:02,260 - mmseg - INFO - Iter [21050/80000] lr: 1.058e-06, eta: 1 day, 7:56:07, time: 4.139, data_time: 2.372, memory: 64699, decode.loss_cls: 0.1020, decode.loss_mask: 0.2399, decode.loss_dice: 0.6389, decode.d0.loss_cls: 0.3319, decode.d0.loss_mask: 0.2535, decode.d0.loss_dice: 0.6822, decode.d1.loss_cls: 0.1258, decode.d1.loss_mask: 0.2422, decode.d1.loss_dice: 0.6532, decode.d2.loss_cls: 0.1157, decode.d2.loss_mask: 0.2402, decode.d2.loss_dice: 0.6443, decode.d3.loss_cls: 0.1062, decode.d3.loss_mask: 0.2401, decode.d3.loss_dice: 0.6424, decode.d4.loss_cls: 0.1041, decode.d4.loss_mask: 0.2403, decode.d4.loss_dice: 0.6390, decode.d5.loss_cls: 0.1038, decode.d5.loss_mask: 0.2409, decode.d5.loss_dice: 0.6425, decode.d6.loss_cls: 0.1015, decode.d6.loss_mask: 0.2402, decode.d6.loss_dice: 0.6412, decode.d7.loss_cls: 0.1051, decode.d7.loss_mask: 0.2401, decode.d7.loss_dice: 0.6376, decode.d8.loss_cls: 0.1081, decode.d8.loss_mask: 0.2397, decode.d8.loss_dice: 0.6370, loss: 10.1797 +2022-05-10 04:53:30,570 - mmseg - INFO - Iter [21100/80000] lr: 1.057e-06, eta: 1 day, 7:54:03, time: 1.766, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1095, decode.loss_mask: 0.2349, decode.loss_dice: 0.6542, decode.d0.loss_cls: 0.3387, decode.d0.loss_mask: 0.2460, decode.d0.loss_dice: 0.6966, decode.d1.loss_cls: 0.1368, decode.d1.loss_mask: 0.2368, decode.d1.loss_dice: 0.6751, decode.d2.loss_cls: 0.1328, decode.d2.loss_mask: 0.2362, decode.d2.loss_dice: 0.6655, decode.d3.loss_cls: 0.1262, decode.d3.loss_mask: 0.2359, decode.d3.loss_dice: 0.6609, decode.d4.loss_cls: 0.1210, decode.d4.loss_mask: 0.2356, decode.d4.loss_dice: 0.6604, decode.d5.loss_cls: 0.1248, decode.d5.loss_mask: 0.2353, decode.d5.loss_dice: 0.6603, decode.d6.loss_cls: 0.1166, decode.d6.loss_mask: 0.2352, decode.d6.loss_dice: 0.6560, decode.d7.loss_cls: 0.1218, decode.d7.loss_mask: 0.2356, decode.d7.loss_dice: 0.6612, decode.d8.loss_cls: 0.1180, decode.d8.loss_mask: 0.2359, decode.d8.loss_dice: 0.6542, loss: 10.4581 +2022-05-10 04:54:59,527 - mmseg - INFO - Iter [21150/80000] lr: 1.056e-06, eta: 1 day, 7:52:02, time: 1.779, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1137, decode.loss_mask: 0.2326, decode.loss_dice: 0.6703, decode.d0.loss_cls: 0.3390, decode.d0.loss_mask: 0.2449, decode.d0.loss_dice: 0.7203, decode.d1.loss_cls: 0.1441, decode.d1.loss_mask: 0.2346, decode.d1.loss_dice: 0.6847, decode.d2.loss_cls: 0.1353, decode.d2.loss_mask: 0.2329, decode.d2.loss_dice: 0.6798, decode.d3.loss_cls: 0.1261, decode.d3.loss_mask: 0.2323, decode.d3.loss_dice: 0.6686, decode.d4.loss_cls: 0.1277, decode.d4.loss_mask: 0.2327, decode.d4.loss_dice: 0.6735, decode.d5.loss_cls: 0.1241, decode.d5.loss_mask: 0.2324, decode.d5.loss_dice: 0.6746, decode.d6.loss_cls: 0.1198, decode.d6.loss_mask: 0.2318, decode.d6.loss_dice: 0.6733, decode.d7.loss_cls: 0.1227, decode.d7.loss_mask: 0.2318, decode.d7.loss_dice: 0.6734, decode.d8.loss_cls: 0.1198, decode.d8.loss_mask: 0.2322, decode.d8.loss_dice: 0.6739, loss: 10.6030 +2022-05-10 04:56:28,087 - mmseg - INFO - Iter [21200/80000] lr: 1.055e-06, eta: 1 day, 7:50:00, time: 1.771, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1067, decode.loss_mask: 0.2400, decode.loss_dice: 0.6461, decode.d0.loss_cls: 0.3278, decode.d0.loss_mask: 0.2545, decode.d0.loss_dice: 0.6950, decode.d1.loss_cls: 0.1284, decode.d1.loss_mask: 0.2445, decode.d1.loss_dice: 0.6663, decode.d2.loss_cls: 0.1188, decode.d2.loss_mask: 0.2413, decode.d2.loss_dice: 0.6592, decode.d3.loss_cls: 0.1109, decode.d3.loss_mask: 0.2405, decode.d3.loss_dice: 0.6505, decode.d4.loss_cls: 0.1172, decode.d4.loss_mask: 0.2415, decode.d4.loss_dice: 0.6511, decode.d5.loss_cls: 0.1102, decode.d5.loss_mask: 0.2420, decode.d5.loss_dice: 0.6511, decode.d6.loss_cls: 0.1117, decode.d6.loss_mask: 0.2410, decode.d6.loss_dice: 0.6506, decode.d7.loss_cls: 0.1104, decode.d7.loss_mask: 0.2407, decode.d7.loss_dice: 0.6487, decode.d8.loss_cls: 0.1018, decode.d8.loss_mask: 0.2404, decode.d8.loss_dice: 0.6492, loss: 10.3377 +2022-05-10 04:57:58,533 - mmseg - INFO - Iter [21250/80000] lr: 1.054e-06, eta: 1 day, 7:48:03, time: 1.809, data_time: 0.064, memory: 64699, decode.loss_cls: 0.1123, decode.loss_mask: 0.2506, decode.loss_dice: 0.6668, decode.d0.loss_cls: 0.3474, decode.d0.loss_mask: 0.2668, decode.d0.loss_dice: 0.7045, decode.d1.loss_cls: 0.1448, decode.d1.loss_mask: 0.2520, decode.d1.loss_dice: 0.6775, decode.d2.loss_cls: 0.1229, decode.d2.loss_mask: 0.2503, decode.d2.loss_dice: 0.6724, decode.d3.loss_cls: 0.1095, decode.d3.loss_mask: 0.2511, decode.d3.loss_dice: 0.6666, decode.d4.loss_cls: 0.1167, decode.d4.loss_mask: 0.2496, decode.d4.loss_dice: 0.6664, decode.d5.loss_cls: 0.1065, decode.d5.loss_mask: 0.2498, decode.d5.loss_dice: 0.6668, decode.d6.loss_cls: 0.1117, decode.d6.loss_mask: 0.2502, decode.d6.loss_dice: 0.6634, decode.d7.loss_cls: 0.1043, decode.d7.loss_mask: 0.2506, decode.d7.loss_dice: 0.6625, decode.d8.loss_cls: 0.1081, decode.d8.loss_mask: 0.2501, decode.d8.loss_dice: 0.6616, loss: 10.6140 +2022-05-10 04:59:28,135 - mmseg - INFO - Iter [21300/80000] lr: 1.054e-06, eta: 1 day, 7:46:04, time: 1.792, data_time: 0.023, memory: 64699, decode.loss_cls: 0.1117, decode.loss_mask: 0.2350, decode.loss_dice: 0.6739, decode.d0.loss_cls: 0.3373, decode.d0.loss_mask: 0.2465, decode.d0.loss_dice: 0.7140, decode.d1.loss_cls: 0.1404, decode.d1.loss_mask: 0.2374, decode.d1.loss_dice: 0.6807, decode.d2.loss_cls: 0.1216, decode.d2.loss_mask: 0.2368, decode.d2.loss_dice: 0.6711, decode.d3.loss_cls: 0.1126, decode.d3.loss_mask: 0.2359, decode.d3.loss_dice: 0.6684, decode.d4.loss_cls: 0.1070, decode.d4.loss_mask: 0.2362, decode.d4.loss_dice: 0.6697, decode.d5.loss_cls: 0.1189, decode.d5.loss_mask: 0.2352, decode.d5.loss_dice: 0.6735, decode.d6.loss_cls: 0.1112, decode.d6.loss_mask: 0.2347, decode.d6.loss_dice: 0.6708, decode.d7.loss_cls: 0.1121, decode.d7.loss_mask: 0.2353, decode.d7.loss_dice: 0.6654, decode.d8.loss_cls: 0.1118, decode.d8.loss_mask: 0.2355, decode.d8.loss_dice: 0.6707, loss: 10.5114 +2022-05-10 05:00:56,512 - mmseg - INFO - Iter [21350/80000] lr: 1.053e-06, eta: 1 day, 7:44:02, time: 1.767, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1079, decode.loss_mask: 0.2363, decode.loss_dice: 0.6541, decode.d0.loss_cls: 0.3343, decode.d0.loss_mask: 0.2480, decode.d0.loss_dice: 0.6952, decode.d1.loss_cls: 0.1300, decode.d1.loss_mask: 0.2380, decode.d1.loss_dice: 0.6666, decode.d2.loss_cls: 0.1170, decode.d2.loss_mask: 0.2363, decode.d2.loss_dice: 0.6594, decode.d3.loss_cls: 0.1141, decode.d3.loss_mask: 0.2365, decode.d3.loss_dice: 0.6514, decode.d4.loss_cls: 0.1118, decode.d4.loss_mask: 0.2361, decode.d4.loss_dice: 0.6532, decode.d5.loss_cls: 0.1106, decode.d5.loss_mask: 0.2359, decode.d5.loss_dice: 0.6489, decode.d6.loss_cls: 0.1096, decode.d6.loss_mask: 0.2365, decode.d6.loss_dice: 0.6487, decode.d7.loss_cls: 0.1140, decode.d7.loss_mask: 0.2361, decode.d7.loss_dice: 0.6495, decode.d8.loss_cls: 0.1057, decode.d8.loss_mask: 0.2358, decode.d8.loss_dice: 0.6482, loss: 10.3055 +2022-05-10 05:02:26,873 - mmseg - INFO - Iter [21400/80000] lr: 1.052e-06, eta: 1 day, 7:42:05, time: 1.807, data_time: 0.067, memory: 64699, decode.loss_cls: 0.1164, decode.loss_mask: 0.2413, decode.loss_dice: 0.6592, decode.d0.loss_cls: 0.3320, decode.d0.loss_mask: 0.2536, decode.d0.loss_dice: 0.6994, decode.d1.loss_cls: 0.1358, decode.d1.loss_mask: 0.2443, decode.d1.loss_dice: 0.6715, decode.d2.loss_cls: 0.1314, decode.d2.loss_mask: 0.2428, decode.d2.loss_dice: 0.6608, decode.d3.loss_cls: 0.1122, decode.d3.loss_mask: 0.2426, decode.d3.loss_dice: 0.6628, decode.d4.loss_cls: 0.1157, decode.d4.loss_mask: 0.2419, decode.d4.loss_dice: 0.6633, decode.d5.loss_cls: 0.1077, decode.d5.loss_mask: 0.2412, decode.d5.loss_dice: 0.6591, decode.d6.loss_cls: 0.1103, decode.d6.loss_mask: 0.2424, decode.d6.loss_dice: 0.6585, decode.d7.loss_cls: 0.1107, decode.d7.loss_mask: 0.2418, decode.d7.loss_dice: 0.6593, decode.d8.loss_cls: 0.1147, decode.d8.loss_mask: 0.2420, decode.d8.loss_dice: 0.6587, loss: 10.4735 +2022-05-10 05:03:55,018 - mmseg - INFO - Iter [21450/80000] lr: 1.051e-06, eta: 1 day, 7:40:03, time: 1.763, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1172, decode.loss_mask: 0.2320, decode.loss_dice: 0.6436, decode.d0.loss_cls: 0.3432, decode.d0.loss_mask: 0.2439, decode.d0.loss_dice: 0.6929, decode.d1.loss_cls: 0.1439, decode.d1.loss_mask: 0.2345, decode.d1.loss_dice: 0.6536, decode.d2.loss_cls: 0.1231, decode.d2.loss_mask: 0.2335, decode.d2.loss_dice: 0.6522, decode.d3.loss_cls: 0.1223, decode.d3.loss_mask: 0.2320, decode.d3.loss_dice: 0.6470, decode.d4.loss_cls: 0.1204, decode.d4.loss_mask: 0.2326, decode.d4.loss_dice: 0.6423, decode.d5.loss_cls: 0.1225, decode.d5.loss_mask: 0.2314, decode.d5.loss_dice: 0.6424, decode.d6.loss_cls: 0.1182, decode.d6.loss_mask: 0.2315, decode.d6.loss_dice: 0.6445, decode.d7.loss_cls: 0.1161, decode.d7.loss_mask: 0.2317, decode.d7.loss_dice: 0.6437, decode.d8.loss_cls: 0.1180, decode.d8.loss_mask: 0.2316, decode.d8.loss_dice: 0.6439, loss: 10.2859 +2022-05-10 05:05:25,101 - mmseg - INFO - Iter [21500/80000] lr: 1.050e-06, eta: 1 day, 7:38:06, time: 1.801, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1124, decode.loss_mask: 0.2424, decode.loss_dice: 0.6617, decode.d0.loss_cls: 0.3247, decode.d0.loss_mask: 0.2545, decode.d0.loss_dice: 0.7078, decode.d1.loss_cls: 0.1258, decode.d1.loss_mask: 0.2445, decode.d1.loss_dice: 0.6742, decode.d2.loss_cls: 0.1168, decode.d2.loss_mask: 0.2423, decode.d2.loss_dice: 0.6704, decode.d3.loss_cls: 0.1148, decode.d3.loss_mask: 0.2429, decode.d3.loss_dice: 0.6648, decode.d4.loss_cls: 0.1200, decode.d4.loss_mask: 0.2428, decode.d4.loss_dice: 0.6633, decode.d5.loss_cls: 0.1108, decode.d5.loss_mask: 0.2428, decode.d5.loss_dice: 0.6637, decode.d6.loss_cls: 0.1178, decode.d6.loss_mask: 0.2430, decode.d6.loss_dice: 0.6581, decode.d7.loss_cls: 0.1150, decode.d7.loss_mask: 0.2416, decode.d7.loss_dice: 0.6542, decode.d8.loss_cls: 0.1119, decode.d8.loss_mask: 0.2427, decode.d8.loss_dice: 0.6620, loss: 10.4896 +2022-05-10 05:06:53,174 - mmseg - INFO - Iter [21550/80000] lr: 1.049e-06, eta: 1 day, 7:36:03, time: 1.761, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1184, decode.loss_mask: 0.2315, decode.loss_dice: 0.6538, decode.d0.loss_cls: 0.3410, decode.d0.loss_mask: 0.2428, decode.d0.loss_dice: 0.7015, decode.d1.loss_cls: 0.1455, decode.d1.loss_mask: 0.2342, decode.d1.loss_dice: 0.6760, decode.d2.loss_cls: 0.1305, decode.d2.loss_mask: 0.2322, decode.d2.loss_dice: 0.6637, decode.d3.loss_cls: 0.1259, decode.d3.loss_mask: 0.2324, decode.d3.loss_dice: 0.6560, decode.d4.loss_cls: 0.1318, decode.d4.loss_mask: 0.2327, decode.d4.loss_dice: 0.6593, decode.d5.loss_cls: 0.1234, decode.d5.loss_mask: 0.2317, decode.d5.loss_dice: 0.6585, decode.d6.loss_cls: 0.1209, decode.d6.loss_mask: 0.2326, decode.d6.loss_dice: 0.6585, decode.d7.loss_cls: 0.1190, decode.d7.loss_mask: 0.2323, decode.d7.loss_dice: 0.6558, decode.d8.loss_cls: 0.1247, decode.d8.loss_mask: 0.2318, decode.d8.loss_dice: 0.6578, loss: 10.4566 +2022-05-10 05:08:24,381 - mmseg - INFO - Iter [21600/80000] lr: 1.048e-06, eta: 1 day, 7:34:09, time: 1.824, data_time: 0.066, memory: 64699, decode.loss_cls: 0.1129, decode.loss_mask: 0.2386, decode.loss_dice: 0.6416, decode.d0.loss_cls: 0.3271, decode.d0.loss_mask: 0.2494, decode.d0.loss_dice: 0.6882, decode.d1.loss_cls: 0.1329, decode.d1.loss_mask: 0.2412, decode.d1.loss_dice: 0.6547, decode.d2.loss_cls: 0.1273, decode.d2.loss_mask: 0.2383, decode.d2.loss_dice: 0.6522, decode.d3.loss_cls: 0.1171, decode.d3.loss_mask: 0.2391, decode.d3.loss_dice: 0.6412, decode.d4.loss_cls: 0.1188, decode.d4.loss_mask: 0.2383, decode.d4.loss_dice: 0.6404, decode.d5.loss_cls: 0.1166, decode.d5.loss_mask: 0.2385, decode.d5.loss_dice: 0.6418, decode.d6.loss_cls: 0.1134, decode.d6.loss_mask: 0.2386, decode.d6.loss_dice: 0.6377, decode.d7.loss_cls: 0.1152, decode.d7.loss_mask: 0.2388, decode.d7.loss_dice: 0.6408, decode.d8.loss_cls: 0.1158, decode.d8.loss_mask: 0.2383, decode.d8.loss_dice: 0.6415, loss: 10.2763 +2022-05-10 05:09:52,362 - mmseg - INFO - Iter [21650/80000] lr: 1.047e-06, eta: 1 day, 7:32:07, time: 1.761, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1038, decode.loss_mask: 0.2360, decode.loss_dice: 0.6436, decode.d0.loss_cls: 0.3201, decode.d0.loss_mask: 0.2461, decode.d0.loss_dice: 0.6872, decode.d1.loss_cls: 0.1235, decode.d1.loss_mask: 0.2392, decode.d1.loss_dice: 0.6642, decode.d2.loss_cls: 0.1150, decode.d2.loss_mask: 0.2380, decode.d2.loss_dice: 0.6535, decode.d3.loss_cls: 0.1130, decode.d3.loss_mask: 0.2374, decode.d3.loss_dice: 0.6459, decode.d4.loss_cls: 0.1116, decode.d4.loss_mask: 0.2360, decode.d4.loss_dice: 0.6445, decode.d5.loss_cls: 0.1132, decode.d5.loss_mask: 0.2360, decode.d5.loss_dice: 0.6507, decode.d6.loss_cls: 0.1042, decode.d6.loss_mask: 0.2362, decode.d6.loss_dice: 0.6520, decode.d7.loss_cls: 0.1127, decode.d7.loss_mask: 0.2362, decode.d7.loss_dice: 0.6488, decode.d8.loss_cls: 0.1051, decode.d8.loss_mask: 0.2358, decode.d8.loss_dice: 0.6508, loss: 10.2403 +2022-05-10 05:11:20,098 - mmseg - INFO - Iter [21700/80000] lr: 1.046e-06, eta: 1 day, 7:30:04, time: 1.755, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1200, decode.loss_mask: 0.2338, decode.loss_dice: 0.6633, decode.d0.loss_cls: 0.3437, decode.d0.loss_mask: 0.2458, decode.d0.loss_dice: 0.7090, decode.d1.loss_cls: 0.1464, decode.d1.loss_mask: 0.2351, decode.d1.loss_dice: 0.6821, decode.d2.loss_cls: 0.1333, decode.d2.loss_mask: 0.2333, decode.d2.loss_dice: 0.6705, decode.d3.loss_cls: 0.1313, decode.d3.loss_mask: 0.2337, decode.d3.loss_dice: 0.6648, decode.d4.loss_cls: 0.1272, decode.d4.loss_mask: 0.2334, decode.d4.loss_dice: 0.6707, decode.d5.loss_cls: 0.1305, decode.d5.loss_mask: 0.2335, decode.d5.loss_dice: 0.6651, decode.d6.loss_cls: 0.1248, decode.d6.loss_mask: 0.2337, decode.d6.loss_dice: 0.6659, decode.d7.loss_cls: 0.1311, decode.d7.loss_mask: 0.2338, decode.d7.loss_dice: 0.6625, decode.d8.loss_cls: 0.1213, decode.d8.loss_mask: 0.2344, decode.d8.loss_dice: 0.6619, loss: 10.5762 +2022-05-10 05:12:49,727 - mmseg - INFO - Iter [21750/80000] lr: 1.045e-06, eta: 1 day, 7:28:06, time: 1.793, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1226, decode.loss_mask: 0.2391, decode.loss_dice: 0.6689, decode.d0.loss_cls: 0.3430, decode.d0.loss_mask: 0.2512, decode.d0.loss_dice: 0.7090, decode.d1.loss_cls: 0.1335, decode.d1.loss_mask: 0.2419, decode.d1.loss_dice: 0.6816, decode.d2.loss_cls: 0.1316, decode.d2.loss_mask: 0.2407, decode.d2.loss_dice: 0.6707, decode.d3.loss_cls: 0.1229, decode.d3.loss_mask: 0.2398, decode.d3.loss_dice: 0.6709, decode.d4.loss_cls: 0.1255, decode.d4.loss_mask: 0.2394, decode.d4.loss_dice: 0.6677, decode.d5.loss_cls: 0.1163, decode.d5.loss_mask: 0.2397, decode.d5.loss_dice: 0.6657, decode.d6.loss_cls: 0.1171, decode.d6.loss_mask: 0.2402, decode.d6.loss_dice: 0.6631, decode.d7.loss_cls: 0.1234, decode.d7.loss_mask: 0.2395, decode.d7.loss_dice: 0.6638, decode.d8.loss_cls: 0.1211, decode.d8.loss_mask: 0.2392, decode.d8.loss_dice: 0.6664, loss: 10.5956 +2022-05-10 05:14:20,992 - mmseg - INFO - Iter [21800/80000] lr: 1.045e-06, eta: 1 day, 7:26:13, time: 1.825, data_time: 0.066, memory: 64699, decode.loss_cls: 0.0992, decode.loss_mask: 0.2327, decode.loss_dice: 0.6527, decode.d0.loss_cls: 0.3289, decode.d0.loss_mask: 0.2465, decode.d0.loss_dice: 0.6914, decode.d1.loss_cls: 0.1302, decode.d1.loss_mask: 0.2340, decode.d1.loss_dice: 0.6684, decode.d2.loss_cls: 0.1126, decode.d2.loss_mask: 0.2331, decode.d2.loss_dice: 0.6567, decode.d3.loss_cls: 0.1124, decode.d3.loss_mask: 0.2334, decode.d3.loss_dice: 0.6515, decode.d4.loss_cls: 0.1119, decode.d4.loss_mask: 0.2330, decode.d4.loss_dice: 0.6512, decode.d5.loss_cls: 0.1120, decode.d5.loss_mask: 0.2329, decode.d5.loss_dice: 0.6523, decode.d6.loss_cls: 0.1041, decode.d6.loss_mask: 0.2325, decode.d6.loss_dice: 0.6521, decode.d7.loss_cls: 0.1096, decode.d7.loss_mask: 0.2325, decode.d7.loss_dice: 0.6534, decode.d8.loss_cls: 0.1088, decode.d8.loss_mask: 0.2333, decode.d8.loss_dice: 0.6574, loss: 10.2609 +2022-05-10 05:15:49,083 - mmseg - INFO - Iter [21850/80000] lr: 1.044e-06, eta: 1 day, 7:24:11, time: 1.762, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1025, decode.loss_mask: 0.2386, decode.loss_dice: 0.6465, decode.d0.loss_cls: 0.3424, decode.d0.loss_mask: 0.2486, decode.d0.loss_dice: 0.6954, decode.d1.loss_cls: 0.1331, decode.d1.loss_mask: 0.2396, decode.d1.loss_dice: 0.6623, decode.d2.loss_cls: 0.1241, decode.d2.loss_mask: 0.2399, decode.d2.loss_dice: 0.6567, decode.d3.loss_cls: 0.1181, decode.d3.loss_mask: 0.2390, decode.d3.loss_dice: 0.6539, decode.d4.loss_cls: 0.1121, decode.d4.loss_mask: 0.2382, decode.d4.loss_dice: 0.6530, decode.d5.loss_cls: 0.1084, decode.d5.loss_mask: 0.2384, decode.d5.loss_dice: 0.6510, decode.d6.loss_cls: 0.1076, decode.d6.loss_mask: 0.2387, decode.d6.loss_dice: 0.6492, decode.d7.loss_cls: 0.1086, decode.d7.loss_mask: 0.2385, decode.d7.loss_dice: 0.6534, decode.d8.loss_cls: 0.1066, decode.d8.loss_mask: 0.2388, decode.d8.loss_dice: 0.6544, loss: 10.3377 +2022-05-10 05:17:17,592 - mmseg - INFO - Iter [21900/80000] lr: 1.043e-06, eta: 1 day, 7:22:11, time: 1.770, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1044, decode.loss_mask: 0.2395, decode.loss_dice: 0.6620, decode.d0.loss_cls: 0.3221, decode.d0.loss_mask: 0.2494, decode.d0.loss_dice: 0.6976, decode.d1.loss_cls: 0.1334, decode.d1.loss_mask: 0.2408, decode.d1.loss_dice: 0.6715, decode.d2.loss_cls: 0.1172, decode.d2.loss_mask: 0.2393, decode.d2.loss_dice: 0.6637, decode.d3.loss_cls: 0.1042, decode.d3.loss_mask: 0.2393, decode.d3.loss_dice: 0.6643, decode.d4.loss_cls: 0.1095, decode.d4.loss_mask: 0.2398, decode.d4.loss_dice: 0.6595, decode.d5.loss_cls: 0.1072, decode.d5.loss_mask: 0.2388, decode.d5.loss_dice: 0.6593, decode.d6.loss_cls: 0.1090, decode.d6.loss_mask: 0.2383, decode.d6.loss_dice: 0.6564, decode.d7.loss_cls: 0.1090, decode.d7.loss_mask: 0.2390, decode.d7.loss_dice: 0.6615, decode.d8.loss_cls: 0.0978, decode.d8.loss_mask: 0.2399, decode.d8.loss_dice: 0.6610, loss: 10.3747 +2022-05-10 05:18:48,752 - mmseg - INFO - Iter [21950/80000] lr: 1.042e-06, eta: 1 day, 7:20:18, time: 1.824, data_time: 0.068, memory: 64699, decode.loss_cls: 0.1150, decode.loss_mask: 0.2288, decode.loss_dice: 0.6481, decode.d0.loss_cls: 0.3368, decode.d0.loss_mask: 0.2414, decode.d0.loss_dice: 0.6893, decode.d1.loss_cls: 0.1477, decode.d1.loss_mask: 0.2314, decode.d1.loss_dice: 0.6636, decode.d2.loss_cls: 0.1319, decode.d2.loss_mask: 0.2294, decode.d2.loss_dice: 0.6556, decode.d3.loss_cls: 0.1201, decode.d3.loss_mask: 0.2296, decode.d3.loss_dice: 0.6479, decode.d4.loss_cls: 0.1268, decode.d4.loss_mask: 0.2295, decode.d4.loss_dice: 0.6531, decode.d5.loss_cls: 0.1279, decode.d5.loss_mask: 0.2292, decode.d5.loss_dice: 0.6495, decode.d6.loss_cls: 0.1193, decode.d6.loss_mask: 0.2290, decode.d6.loss_dice: 0.6470, decode.d7.loss_cls: 0.1210, decode.d7.loss_mask: 0.2291, decode.d7.loss_dice: 0.6485, decode.d8.loss_cls: 0.1227, decode.d8.loss_mask: 0.2285, decode.d8.loss_dice: 0.6502, loss: 10.3281 +2022-05-10 05:20:17,308 - mmseg - INFO - Saving checkpoint at 22000 iterations +2022-05-10 05:20:47,158 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 05:20:47,167 - mmseg - INFO - Iter [22000/80000] lr: 1.041e-06, eta: 1 day, 7:19:36, time: 2.366, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1004, decode.loss_mask: 0.2392, decode.loss_dice: 0.6442, decode.d0.loss_cls: 0.3279, decode.d0.loss_mask: 0.2522, decode.d0.loss_dice: 0.6872, decode.d1.loss_cls: 0.1160, decode.d1.loss_mask: 0.2401, decode.d1.loss_dice: 0.6577, decode.d2.loss_cls: 0.1029, decode.d2.loss_mask: 0.2384, decode.d2.loss_dice: 0.6515, decode.d3.loss_cls: 0.1034, decode.d3.loss_mask: 0.2388, decode.d3.loss_dice: 0.6457, decode.d4.loss_cls: 0.0998, decode.d4.loss_mask: 0.2386, decode.d4.loss_dice: 0.6491, decode.d5.loss_cls: 0.0976, decode.d5.loss_mask: 0.2394, decode.d5.loss_dice: 0.6512, decode.d6.loss_cls: 0.0952, decode.d6.loss_mask: 0.2386, decode.d6.loss_dice: 0.6432, decode.d7.loss_cls: 0.0963, decode.d7.loss_mask: 0.2393, decode.d7.loss_dice: 0.6460, decode.d8.loss_cls: 0.0976, decode.d8.loss_mask: 0.2385, decode.d8.loss_dice: 0.6446, loss: 10.1605 +2022-05-10 05:22:42,448 - mmseg - INFO - per class results: +2022-05-10 05:22:42,452 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.61 | 99.1 | +| sidewalk | 88.52 | 94.79 | +| building | 94.32 | 96.63 | +| wall | 65.84 | 82.12 | +| fence | 73.58 | 82.76 | +| pole | 71.4 | 84.15 | +| traffic light | 77.12 | 88.16 | +| traffic sign | 83.68 | 90.72 | +| vegetation | 93.39 | 97.27 | +| terrain | 66.9 | 74.65 | +| sky | 95.96 | 98.19 | +| person | 86.98 | 93.66 | +| rider | 74.96 | 85.67 | +| car | 96.29 | 98.33 | +| truck | 91.4 | 94.44 | +| bus | 93.81 | 96.95 | +| train | 87.82 | 90.78 | +| motorcycle | 76.45 | 87.05 | +| bicycle | 82.68 | 92.27 | ++---------------+-------+-------+ +2022-05-10 05:22:42,452 - mmseg - INFO - Summary: +2022-05-10 05:22:42,453 - mmseg - INFO - ++-------+------+-------+ +| aAcc | mIoU | mAcc | ++-------+------+-------+ +| 96.98 | 84.2 | 90.93 | ++-------+------+-------+ +2022-05-10 05:22:42,456 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 05:22:42,456 - mmseg - INFO - Iter(val) [32] aAcc: 0.9698, mIoU: 0.8420, mAcc: 0.9093, IoU.road: 0.9861, IoU.sidewalk: 0.8852, IoU.building: 0.9432, IoU.wall: 0.6584, IoU.fence: 0.7358, IoU.pole: 0.7140, IoU.traffic light: 0.7712, IoU.traffic sign: 0.8368, IoU.vegetation: 0.9339, IoU.terrain: 0.6690, IoU.sky: 0.9596, IoU.person: 0.8698, IoU.rider: 0.7496, IoU.car: 0.9629, IoU.truck: 0.9140, IoU.bus: 0.9381, IoU.train: 0.8782, IoU.motorcycle: 0.7645, IoU.bicycle: 0.8268, Acc.road: 0.9910, Acc.sidewalk: 0.9479, Acc.building: 0.9663, Acc.wall: 0.8212, Acc.fence: 0.8276, Acc.pole: 0.8415, Acc.traffic light: 0.8816, Acc.traffic sign: 0.9072, Acc.vegetation: 0.9727, Acc.terrain: 0.7465, Acc.sky: 0.9819, Acc.person: 0.9366, Acc.rider: 0.8567, Acc.car: 0.9833, Acc.truck: 0.9444, Acc.bus: 0.9695, Acc.train: 0.9078, Acc.motorcycle: 0.8705, Acc.bicycle: 0.9227 +2022-05-10 05:24:11,256 - mmseg - INFO - Iter [22050/80000] lr: 1.040e-06, eta: 1 day, 7:22:40, time: 4.084, data_time: 2.326, memory: 64699, decode.loss_cls: 0.1021, decode.loss_mask: 0.2296, decode.loss_dice: 0.6412, decode.d0.loss_cls: 0.3330, decode.d0.loss_mask: 0.2412, decode.d0.loss_dice: 0.6884, decode.d1.loss_cls: 0.1206, decode.d1.loss_mask: 0.2316, decode.d1.loss_dice: 0.6575, decode.d2.loss_cls: 0.1112, decode.d2.loss_mask: 0.2304, decode.d2.loss_dice: 0.6508, decode.d3.loss_cls: 0.1075, decode.d3.loss_mask: 0.2293, decode.d3.loss_dice: 0.6436, decode.d4.loss_cls: 0.1084, decode.d4.loss_mask: 0.2292, decode.d4.loss_dice: 0.6445, decode.d5.loss_cls: 0.1068, decode.d5.loss_mask: 0.2298, decode.d5.loss_dice: 0.6448, decode.d6.loss_cls: 0.1118, decode.d6.loss_mask: 0.2294, decode.d6.loss_dice: 0.6427, decode.d7.loss_cls: 0.1046, decode.d7.loss_mask: 0.2300, decode.d7.loss_dice: 0.6473, decode.d8.loss_cls: 0.1032, decode.d8.loss_mask: 0.2298, decode.d8.loss_dice: 0.6479, loss: 10.1281 +2022-05-10 05:25:39,942 - mmseg - INFO - Iter [22100/80000] lr: 1.039e-06, eta: 1 day, 7:20:40, time: 1.774, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1046, decode.loss_mask: 0.2399, decode.loss_dice: 0.6483, decode.d0.loss_cls: 0.3345, decode.d0.loss_mask: 0.2521, decode.d0.loss_dice: 0.6948, decode.d1.loss_cls: 0.1286, decode.d1.loss_mask: 0.2434, decode.d1.loss_dice: 0.6647, decode.d2.loss_cls: 0.1176, decode.d2.loss_mask: 0.2422, decode.d2.loss_dice: 0.6570, decode.d3.loss_cls: 0.1116, decode.d3.loss_mask: 0.2412, decode.d3.loss_dice: 0.6534, decode.d4.loss_cls: 0.1072, decode.d4.loss_mask: 0.2409, decode.d4.loss_dice: 0.6534, decode.d5.loss_cls: 0.1099, decode.d5.loss_mask: 0.2406, decode.d5.loss_dice: 0.6530, decode.d6.loss_cls: 0.1091, decode.d6.loss_mask: 0.2398, decode.d6.loss_dice: 0.6494, decode.d7.loss_cls: 0.1118, decode.d7.loss_mask: 0.2394, decode.d7.loss_dice: 0.6516, decode.d8.loss_cls: 0.1124, decode.d8.loss_mask: 0.2402, decode.d8.loss_dice: 0.6542, loss: 10.3465 +2022-05-10 05:27:10,714 - mmseg - INFO - Iter [22150/80000] lr: 1.038e-06, eta: 1 day, 7:18:45, time: 1.815, data_time: 0.065, memory: 64699, decode.loss_cls: 0.1122, decode.loss_mask: 0.2315, decode.loss_dice: 0.6471, decode.d0.loss_cls: 0.3471, decode.d0.loss_mask: 0.2428, decode.d0.loss_dice: 0.6917, decode.d1.loss_cls: 0.1339, decode.d1.loss_mask: 0.2341, decode.d1.loss_dice: 0.6624, decode.d2.loss_cls: 0.1246, decode.d2.loss_mask: 0.2320, decode.d2.loss_dice: 0.6543, decode.d3.loss_cls: 0.1181, decode.d3.loss_mask: 0.2328, decode.d3.loss_dice: 0.6461, decode.d4.loss_cls: 0.1122, decode.d4.loss_mask: 0.2322, decode.d4.loss_dice: 0.6479, decode.d5.loss_cls: 0.1151, decode.d5.loss_mask: 0.2319, decode.d5.loss_dice: 0.6472, decode.d6.loss_cls: 0.1129, decode.d6.loss_mask: 0.2321, decode.d6.loss_dice: 0.6421, decode.d7.loss_cls: 0.1158, decode.d7.loss_mask: 0.2319, decode.d7.loss_dice: 0.6459, decode.d8.loss_cls: 0.1100, decode.d8.loss_mask: 0.2317, decode.d8.loss_dice: 0.6483, loss: 10.2679 +2022-05-10 05:28:40,005 - mmseg - INFO - Iter [22200/80000] lr: 1.037e-06, eta: 1 day, 7:16:47, time: 1.786, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1102, decode.loss_mask: 0.2320, decode.loss_dice: 0.6392, decode.d0.loss_cls: 0.3242, decode.d0.loss_mask: 0.2414, decode.d0.loss_dice: 0.6801, decode.d1.loss_cls: 0.1333, decode.d1.loss_mask: 0.2328, decode.d1.loss_dice: 0.6569, decode.d2.loss_cls: 0.1245, decode.d2.loss_mask: 0.2313, decode.d2.loss_dice: 0.6445, decode.d3.loss_cls: 0.1199, decode.d3.loss_mask: 0.2313, decode.d3.loss_dice: 0.6439, decode.d4.loss_cls: 0.1124, decode.d4.loss_mask: 0.2320, decode.d4.loss_dice: 0.6432, decode.d5.loss_cls: 0.1151, decode.d5.loss_mask: 0.2316, decode.d5.loss_dice: 0.6409, decode.d6.loss_cls: 0.1085, decode.d6.loss_mask: 0.2321, decode.d6.loss_dice: 0.6425, decode.d7.loss_cls: 0.1088, decode.d7.loss_mask: 0.2311, decode.d7.loss_dice: 0.6403, decode.d8.loss_cls: 0.1078, decode.d8.loss_mask: 0.2319, decode.d8.loss_dice: 0.6456, loss: 10.1693 +2022-05-10 05:30:08,521 - mmseg - INFO - Iter [22250/80000] lr: 1.036e-06, eta: 1 day, 7:14:46, time: 1.770, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1170, decode.loss_mask: 0.2420, decode.loss_dice: 0.6519, decode.d0.loss_cls: 0.3399, decode.d0.loss_mask: 0.2566, decode.d0.loss_dice: 0.6988, decode.d1.loss_cls: 0.1393, decode.d1.loss_mask: 0.2455, decode.d1.loss_dice: 0.6641, decode.d2.loss_cls: 0.1309, decode.d2.loss_mask: 0.2445, decode.d2.loss_dice: 0.6582, decode.d3.loss_cls: 0.1256, decode.d3.loss_mask: 0.2439, decode.d3.loss_dice: 0.6538, decode.d4.loss_cls: 0.1177, decode.d4.loss_mask: 0.2426, decode.d4.loss_dice: 0.6519, decode.d5.loss_cls: 0.1234, decode.d5.loss_mask: 0.2432, decode.d5.loss_dice: 0.6529, decode.d6.loss_cls: 0.1165, decode.d6.loss_mask: 0.2423, decode.d6.loss_dice: 0.6532, decode.d7.loss_cls: 0.1204, decode.d7.loss_mask: 0.2418, decode.d7.loss_dice: 0.6524, decode.d8.loss_cls: 0.1217, decode.d8.loss_mask: 0.2421, decode.d8.loss_dice: 0.6510, loss: 10.4851 +2022-05-10 05:31:35,569 - mmseg - INFO - Iter [22300/80000] lr: 1.036e-06, eta: 1 day, 7:12:42, time: 1.741, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1133, decode.loss_mask: 0.2360, decode.loss_dice: 0.6572, decode.d0.loss_cls: 0.3304, decode.d0.loss_mask: 0.2475, decode.d0.loss_dice: 0.7037, decode.d1.loss_cls: 0.1356, decode.d1.loss_mask: 0.2379, decode.d1.loss_dice: 0.6719, decode.d2.loss_cls: 0.1248, decode.d2.loss_mask: 0.2371, decode.d2.loss_dice: 0.6658, decode.d3.loss_cls: 0.1248, decode.d3.loss_mask: 0.2363, decode.d3.loss_dice: 0.6629, decode.d4.loss_cls: 0.1167, decode.d4.loss_mask: 0.2361, decode.d4.loss_dice: 0.6605, decode.d5.loss_cls: 0.1146, decode.d5.loss_mask: 0.2364, decode.d5.loss_dice: 0.6546, decode.d6.loss_cls: 0.1187, decode.d6.loss_mask: 0.2358, decode.d6.loss_dice: 0.6543, decode.d7.loss_cls: 0.1173, decode.d7.loss_mask: 0.2354, decode.d7.loss_dice: 0.6585, decode.d8.loss_cls: 0.1092, decode.d8.loss_mask: 0.2356, decode.d8.loss_dice: 0.6575, loss: 10.4266 +2022-05-10 05:33:07,302 - mmseg - INFO - Iter [22350/80000] lr: 1.035e-06, eta: 1 day, 7:10:50, time: 1.834, data_time: 0.065, memory: 64699, decode.loss_cls: 0.1097, decode.loss_mask: 0.2299, decode.loss_dice: 0.6458, decode.d0.loss_cls: 0.3261, decode.d0.loss_mask: 0.2421, decode.d0.loss_dice: 0.6902, decode.d1.loss_cls: 0.1300, decode.d1.loss_mask: 0.2327, decode.d1.loss_dice: 0.6621, decode.d2.loss_cls: 0.1248, decode.d2.loss_mask: 0.2310, decode.d2.loss_dice: 0.6581, decode.d3.loss_cls: 0.1163, decode.d3.loss_mask: 0.2307, decode.d3.loss_dice: 0.6494, decode.d4.loss_cls: 0.1227, decode.d4.loss_mask: 0.2304, decode.d4.loss_dice: 0.6512, decode.d5.loss_cls: 0.1154, decode.d5.loss_mask: 0.2297, decode.d5.loss_dice: 0.6516, decode.d6.loss_cls: 0.1202, decode.d6.loss_mask: 0.2304, decode.d6.loss_dice: 0.6512, decode.d7.loss_cls: 0.1123, decode.d7.loss_mask: 0.2307, decode.d7.loss_dice: 0.6515, decode.d8.loss_cls: 0.1108, decode.d8.loss_mask: 0.2307, decode.d8.loss_dice: 0.6524, loss: 10.2701 +2022-05-10 05:34:35,044 - mmseg - INFO - Iter [22400/80000] lr: 1.034e-06, eta: 1 day, 7:08:48, time: 1.756, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1000, decode.loss_mask: 0.2349, decode.loss_dice: 0.6300, decode.d0.loss_cls: 0.3309, decode.d0.loss_mask: 0.2464, decode.d0.loss_dice: 0.6755, decode.d1.loss_cls: 0.1293, decode.d1.loss_mask: 0.2379, decode.d1.loss_dice: 0.6488, decode.d2.loss_cls: 0.1200, decode.d2.loss_mask: 0.2367, decode.d2.loss_dice: 0.6360, decode.d3.loss_cls: 0.1058, decode.d3.loss_mask: 0.2359, decode.d3.loss_dice: 0.6333, decode.d4.loss_cls: 0.1050, decode.d4.loss_mask: 0.2350, decode.d4.loss_dice: 0.6342, decode.d5.loss_cls: 0.1067, decode.d5.loss_mask: 0.2347, decode.d5.loss_dice: 0.6324, decode.d6.loss_cls: 0.1065, decode.d6.loss_mask: 0.2348, decode.d6.loss_dice: 0.6346, decode.d7.loss_cls: 0.1097, decode.d7.loss_mask: 0.2348, decode.d7.loss_dice: 0.6300, decode.d8.loss_cls: 0.1034, decode.d8.loss_mask: 0.2345, decode.d8.loss_dice: 0.6331, loss: 10.0707 +2022-05-10 05:36:02,923 - mmseg - INFO - Iter [22450/80000] lr: 1.033e-06, eta: 1 day, 7:06:46, time: 1.758, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1068, decode.loss_mask: 0.2338, decode.loss_dice: 0.6441, decode.d0.loss_cls: 0.3327, decode.d0.loss_mask: 0.2440, decode.d0.loss_dice: 0.6834, decode.d1.loss_cls: 0.1173, decode.d1.loss_mask: 0.2358, decode.d1.loss_dice: 0.6523, decode.d2.loss_cls: 0.1161, decode.d2.loss_mask: 0.2350, decode.d2.loss_dice: 0.6497, decode.d3.loss_cls: 0.1139, decode.d3.loss_mask: 0.2343, decode.d3.loss_dice: 0.6421, decode.d4.loss_cls: 0.1110, decode.d4.loss_mask: 0.2347, decode.d4.loss_dice: 0.6449, decode.d5.loss_cls: 0.1092, decode.d5.loss_mask: 0.2346, decode.d5.loss_dice: 0.6438, decode.d6.loss_cls: 0.1045, decode.d6.loss_mask: 0.2341, decode.d6.loss_dice: 0.6414, decode.d7.loss_cls: 0.1089, decode.d7.loss_mask: 0.2339, decode.d7.loss_dice: 0.6429, decode.d8.loss_cls: 0.1051, decode.d8.loss_mask: 0.2341, decode.d8.loss_dice: 0.6428, loss: 10.1672 +2022-05-10 05:37:30,983 - mmseg - INFO - Iter [22500/80000] lr: 1.032e-06, eta: 1 day, 7:04:45, time: 1.761, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1162, decode.loss_mask: 0.2357, decode.loss_dice: 0.6566, decode.d0.loss_cls: 0.3373, decode.d0.loss_mask: 0.2460, decode.d0.loss_dice: 0.7025, decode.d1.loss_cls: 0.1385, decode.d1.loss_mask: 0.2372, decode.d1.loss_dice: 0.6695, decode.d2.loss_cls: 0.1318, decode.d2.loss_mask: 0.2369, decode.d2.loss_dice: 0.6679, decode.d3.loss_cls: 0.1193, decode.d3.loss_mask: 0.2364, decode.d3.loss_dice: 0.6589, decode.d4.loss_cls: 0.1211, decode.d4.loss_mask: 0.2361, decode.d4.loss_dice: 0.6594, decode.d5.loss_cls: 0.1280, decode.d5.loss_mask: 0.2355, decode.d5.loss_dice: 0.6583, decode.d6.loss_cls: 0.1200, decode.d6.loss_mask: 0.2360, decode.d6.loss_dice: 0.6621, decode.d7.loss_cls: 0.1210, decode.d7.loss_mask: 0.2356, decode.d7.loss_dice: 0.6560, decode.d8.loss_cls: 0.1136, decode.d8.loss_mask: 0.2355, decode.d8.loss_dice: 0.6582, loss: 10.4671 +2022-05-10 05:39:00,957 - mmseg - INFO - Iter [22550/80000] lr: 1.031e-06, eta: 1 day, 7:02:49, time: 1.799, data_time: 0.065, memory: 64699, decode.loss_cls: 0.0904, decode.loss_mask: 0.2389, decode.loss_dice: 0.6491, decode.d0.loss_cls: 0.3283, decode.d0.loss_mask: 0.2513, decode.d0.loss_dice: 0.6962, decode.d1.loss_cls: 0.1071, decode.d1.loss_mask: 0.2397, decode.d1.loss_dice: 0.6618, decode.d2.loss_cls: 0.1005, decode.d2.loss_mask: 0.2390, decode.d2.loss_dice: 0.6569, decode.d3.loss_cls: 0.0956, decode.d3.loss_mask: 0.2387, decode.d3.loss_dice: 0.6499, decode.d4.loss_cls: 0.0961, decode.d4.loss_mask: 0.2380, decode.d4.loss_dice: 0.6489, decode.d5.loss_cls: 0.1004, decode.d5.loss_mask: 0.2400, decode.d5.loss_dice: 0.6534, decode.d6.loss_cls: 0.0963, decode.d6.loss_mask: 0.2393, decode.d6.loss_dice: 0.6478, decode.d7.loss_cls: 0.0966, decode.d7.loss_mask: 0.2392, decode.d7.loss_dice: 0.6494, decode.d8.loss_cls: 0.0965, decode.d8.loss_mask: 0.2390, decode.d8.loss_dice: 0.6513, loss: 10.1755 +2022-05-10 05:40:29,705 - mmseg - INFO - Iter [22600/80000] lr: 1.030e-06, eta: 1 day, 7:00:50, time: 1.775, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1112, decode.loss_mask: 0.2291, decode.loss_dice: 0.6402, decode.d0.loss_cls: 0.3352, decode.d0.loss_mask: 0.2411, decode.d0.loss_dice: 0.6853, decode.d1.loss_cls: 0.1256, decode.d1.loss_mask: 0.2322, decode.d1.loss_dice: 0.6528, decode.d2.loss_cls: 0.1158, decode.d2.loss_mask: 0.2307, decode.d2.loss_dice: 0.6514, decode.d3.loss_cls: 0.1192, decode.d3.loss_mask: 0.2301, decode.d3.loss_dice: 0.6429, decode.d4.loss_cls: 0.1112, decode.d4.loss_mask: 0.2295, decode.d4.loss_dice: 0.6398, decode.d5.loss_cls: 0.1114, decode.d5.loss_mask: 0.2292, decode.d5.loss_dice: 0.6397, decode.d6.loss_cls: 0.1075, decode.d6.loss_mask: 0.2299, decode.d6.loss_dice: 0.6390, decode.d7.loss_cls: 0.1176, decode.d7.loss_mask: 0.2293, decode.d7.loss_dice: 0.6364, decode.d8.loss_cls: 0.1098, decode.d8.loss_mask: 0.2285, decode.d8.loss_dice: 0.6373, loss: 10.1392 +2022-05-10 05:41:58,557 - mmseg - INFO - Iter [22650/80000] lr: 1.029e-06, eta: 1 day, 6:58:52, time: 1.777, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1086, decode.loss_mask: 0.2262, decode.loss_dice: 0.6402, decode.d0.loss_cls: 0.3233, decode.d0.loss_mask: 0.2367, decode.d0.loss_dice: 0.6815, decode.d1.loss_cls: 0.1176, decode.d1.loss_mask: 0.2287, decode.d1.loss_dice: 0.6523, decode.d2.loss_cls: 0.1077, decode.d2.loss_mask: 0.2278, decode.d2.loss_dice: 0.6469, decode.d3.loss_cls: 0.1017, decode.d3.loss_mask: 0.2276, decode.d3.loss_dice: 0.6390, decode.d4.loss_cls: 0.1029, decode.d4.loss_mask: 0.2271, decode.d4.loss_dice: 0.6430, decode.d5.loss_cls: 0.1008, decode.d5.loss_mask: 0.2264, decode.d5.loss_dice: 0.6389, decode.d6.loss_cls: 0.1035, decode.d6.loss_mask: 0.2263, decode.d6.loss_dice: 0.6375, decode.d7.loss_cls: 0.0983, decode.d7.loss_mask: 0.2267, decode.d7.loss_dice: 0.6365, decode.d8.loss_cls: 0.1007, decode.d8.loss_mask: 0.2262, decode.d8.loss_dice: 0.6397, loss: 10.0003 +2022-05-10 05:43:29,945 - mmseg - INFO - Iter [22700/80000] lr: 1.028e-06, eta: 1 day, 6:57:00, time: 1.827, data_time: 0.066, memory: 64699, decode.loss_cls: 0.1029, decode.loss_mask: 0.2261, decode.loss_dice: 0.6266, decode.d0.loss_cls: 0.3242, decode.d0.loss_mask: 0.2380, decode.d0.loss_dice: 0.6689, decode.d1.loss_cls: 0.1223, decode.d1.loss_mask: 0.2273, decode.d1.loss_dice: 0.6426, decode.d2.loss_cls: 0.1202, decode.d2.loss_mask: 0.2268, decode.d2.loss_dice: 0.6311, decode.d3.loss_cls: 0.1139, decode.d3.loss_mask: 0.2263, decode.d3.loss_dice: 0.6273, decode.d4.loss_cls: 0.1156, decode.d4.loss_mask: 0.2265, decode.d4.loss_dice: 0.6296, decode.d5.loss_cls: 0.1146, decode.d5.loss_mask: 0.2266, decode.d5.loss_dice: 0.6282, decode.d6.loss_cls: 0.1095, decode.d6.loss_mask: 0.2259, decode.d6.loss_dice: 0.6259, decode.d7.loss_cls: 0.1142, decode.d7.loss_mask: 0.2263, decode.d7.loss_dice: 0.6310, decode.d8.loss_cls: 0.1127, decode.d8.loss_mask: 0.2266, decode.d8.loss_dice: 0.6274, loss: 9.9650 +2022-05-10 05:44:59,248 - mmseg - INFO - Iter [22750/80000] lr: 1.028e-06, eta: 1 day, 6:55:03, time: 1.786, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1065, decode.loss_mask: 0.2376, decode.loss_dice: 0.6383, decode.d0.loss_cls: 0.3396, decode.d0.loss_mask: 0.2496, decode.d0.loss_dice: 0.6761, decode.d1.loss_cls: 0.1223, decode.d1.loss_mask: 0.2410, decode.d1.loss_dice: 0.6520, decode.d2.loss_cls: 0.1144, decode.d2.loss_mask: 0.2396, decode.d2.loss_dice: 0.6464, decode.d3.loss_cls: 0.1084, decode.d3.loss_mask: 0.2388, decode.d3.loss_dice: 0.6426, decode.d4.loss_cls: 0.1108, decode.d4.loss_mask: 0.2392, decode.d4.loss_dice: 0.6394, decode.d5.loss_cls: 0.1060, decode.d5.loss_mask: 0.2383, decode.d5.loss_dice: 0.6392, decode.d6.loss_cls: 0.1076, decode.d6.loss_mask: 0.2390, decode.d6.loss_dice: 0.6378, decode.d7.loss_cls: 0.1026, decode.d7.loss_mask: 0.2380, decode.d7.loss_dice: 0.6407, decode.d8.loss_cls: 0.1070, decode.d8.loss_mask: 0.2378, decode.d8.loss_dice: 0.6380, loss: 10.1748 +2022-05-10 05:46:27,317 - mmseg - INFO - Iter [22800/80000] lr: 1.027e-06, eta: 1 day, 6:53:03, time: 1.762, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1043, decode.loss_mask: 0.2349, decode.loss_dice: 0.6499, decode.d0.loss_cls: 0.3432, decode.d0.loss_mask: 0.2483, decode.d0.loss_dice: 0.6885, decode.d1.loss_cls: 0.1308, decode.d1.loss_mask: 0.2365, decode.d1.loss_dice: 0.6589, decode.d2.loss_cls: 0.1168, decode.d2.loss_mask: 0.2367, decode.d2.loss_dice: 0.6568, decode.d3.loss_cls: 0.1159, decode.d3.loss_mask: 0.2358, decode.d3.loss_dice: 0.6480, decode.d4.loss_cls: 0.1100, decode.d4.loss_mask: 0.2364, decode.d4.loss_dice: 0.6451, decode.d5.loss_cls: 0.1137, decode.d5.loss_mask: 0.2354, decode.d5.loss_dice: 0.6493, decode.d6.loss_cls: 0.1087, decode.d6.loss_mask: 0.2348, decode.d6.loss_dice: 0.6452, decode.d7.loss_cls: 0.1018, decode.d7.loss_mask: 0.2352, decode.d7.loss_dice: 0.6460, decode.d8.loss_cls: 0.1055, decode.d8.loss_mask: 0.2356, decode.d8.loss_dice: 0.6435, loss: 10.2516 +2022-05-10 05:47:54,561 - mmseg - INFO - Iter [22850/80000] lr: 1.026e-06, eta: 1 day, 6:51:01, time: 1.745, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1100, decode.loss_mask: 0.2383, decode.loss_dice: 0.6424, decode.d0.loss_cls: 0.3317, decode.d0.loss_mask: 0.2505, decode.d0.loss_dice: 0.6897, decode.d1.loss_cls: 0.1308, decode.d1.loss_mask: 0.2410, decode.d1.loss_dice: 0.6538, decode.d2.loss_cls: 0.1160, decode.d2.loss_mask: 0.2402, decode.d2.loss_dice: 0.6552, decode.d3.loss_cls: 0.1076, decode.d3.loss_mask: 0.2394, decode.d3.loss_dice: 0.6430, decode.d4.loss_cls: 0.1138, decode.d4.loss_mask: 0.2387, decode.d4.loss_dice: 0.6435, decode.d5.loss_cls: 0.1123, decode.d5.loss_mask: 0.2385, decode.d5.loss_dice: 0.6478, decode.d6.loss_cls: 0.1035, decode.d6.loss_mask: 0.2385, decode.d6.loss_dice: 0.6449, decode.d7.loss_cls: 0.1005, decode.d7.loss_mask: 0.2390, decode.d7.loss_dice: 0.6435, decode.d8.loss_cls: 0.1087, decode.d8.loss_mask: 0.2385, decode.d8.loss_dice: 0.6431, loss: 10.2444 +2022-05-10 05:49:24,310 - mmseg - INFO - Iter [22900/80000] lr: 1.025e-06, eta: 1 day, 6:49:05, time: 1.795, data_time: 0.065, memory: 64699, decode.loss_cls: 0.1050, decode.loss_mask: 0.2344, decode.loss_dice: 0.6671, decode.d0.loss_cls: 0.3394, decode.d0.loss_mask: 0.2451, decode.d0.loss_dice: 0.7073, decode.d1.loss_cls: 0.1324, decode.d1.loss_mask: 0.2361, decode.d1.loss_dice: 0.6776, decode.d2.loss_cls: 0.1173, decode.d2.loss_mask: 0.2353, decode.d2.loss_dice: 0.6701, decode.d3.loss_cls: 0.1077, decode.d3.loss_mask: 0.2348, decode.d3.loss_dice: 0.6673, decode.d4.loss_cls: 0.1008, decode.d4.loss_mask: 0.2346, decode.d4.loss_dice: 0.6683, decode.d5.loss_cls: 0.1048, decode.d5.loss_mask: 0.2348, decode.d5.loss_dice: 0.6650, decode.d6.loss_cls: 0.1000, decode.d6.loss_mask: 0.2346, decode.d6.loss_dice: 0.6625, decode.d7.loss_cls: 0.1012, decode.d7.loss_mask: 0.2343, decode.d7.loss_dice: 0.6674, decode.d8.loss_cls: 0.1021, decode.d8.loss_mask: 0.2343, decode.d8.loss_dice: 0.6667, loss: 10.3882 +2022-05-10 05:50:52,093 - mmseg - INFO - Iter [22950/80000] lr: 1.024e-06, eta: 1 day, 6:47:04, time: 1.755, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1122, decode.loss_mask: 0.2317, decode.loss_dice: 0.6520, decode.d0.loss_cls: 0.3418, decode.d0.loss_mask: 0.2409, decode.d0.loss_dice: 0.6981, decode.d1.loss_cls: 0.1331, decode.d1.loss_mask: 0.2352, decode.d1.loss_dice: 0.6629, decode.d2.loss_cls: 0.1223, decode.d2.loss_mask: 0.2319, decode.d2.loss_dice: 0.6573, decode.d3.loss_cls: 0.1226, decode.d3.loss_mask: 0.2324, decode.d3.loss_dice: 0.6552, decode.d4.loss_cls: 0.1190, decode.d4.loss_mask: 0.2324, decode.d4.loss_dice: 0.6556, decode.d5.loss_cls: 0.1171, decode.d5.loss_mask: 0.2322, decode.d5.loss_dice: 0.6582, decode.d6.loss_cls: 0.1202, decode.d6.loss_mask: 0.2319, decode.d6.loss_dice: 0.6577, decode.d7.loss_cls: 0.1188, decode.d7.loss_mask: 0.2317, decode.d7.loss_dice: 0.6527, decode.d8.loss_cls: 0.1201, decode.d8.loss_mask: 0.2322, decode.d8.loss_dice: 0.6553, loss: 10.3646 +2022-05-10 05:52:20,199 - mmseg - INFO - Saving checkpoint at 23000 iterations +2022-05-10 05:52:53,137 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 05:52:53,145 - mmseg - INFO - Iter [23000/80000] lr: 1.023e-06, eta: 1 day, 6:46:26, time: 2.419, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1097, decode.loss_mask: 0.2297, decode.loss_dice: 0.6424, decode.d0.loss_cls: 0.3273, decode.d0.loss_mask: 0.2403, decode.d0.loss_dice: 0.6838, decode.d1.loss_cls: 0.1281, decode.d1.loss_mask: 0.2320, decode.d1.loss_dice: 0.6603, decode.d2.loss_cls: 0.1190, decode.d2.loss_mask: 0.2296, decode.d2.loss_dice: 0.6522, decode.d3.loss_cls: 0.1128, decode.d3.loss_mask: 0.2302, decode.d3.loss_dice: 0.6434, decode.d4.loss_cls: 0.1049, decode.d4.loss_mask: 0.2303, decode.d4.loss_dice: 0.6433, decode.d5.loss_cls: 0.1090, decode.d5.loss_mask: 0.2302, decode.d5.loss_dice: 0.6458, decode.d6.loss_cls: 0.1038, decode.d6.loss_mask: 0.2304, decode.d6.loss_dice: 0.6438, decode.d7.loss_cls: 0.1061, decode.d7.loss_mask: 0.2300, decode.d7.loss_dice: 0.6430, decode.d8.loss_cls: 0.1113, decode.d8.loss_mask: 0.2298, decode.d8.loss_dice: 0.6444, loss: 10.1469 +2022-05-10 05:54:48,227 - mmseg - INFO - per class results: +2022-05-10 05:54:48,232 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.66 | 99.19 | +| sidewalk | 89.04 | 94.42 | +| building | 94.48 | 96.76 | +| wall | 66.46 | 82.63 | +| fence | 73.56 | 82.17 | +| pole | 71.62 | 84.2 | +| traffic light | 77.4 | 87.58 | +| traffic sign | 83.86 | 91.43 | +| vegetation | 93.48 | 97.15 | +| terrain | 68.71 | 77.4 | +| sky | 95.75 | 98.66 | +| person | 86.94 | 94.25 | +| rider | 74.12 | 85.49 | +| car | 96.38 | 98.3 | +| truck | 91.59 | 96.23 | +| bus | 90.86 | 97.19 | +| train | 78.81 | 81.31 | +| motorcycle | 76.84 | 87.19 | +| bicycle | 83.0 | 91.25 | ++---------------+-------+-------+ +2022-05-10 05:54:48,232 - mmseg - INFO - Summary: +2022-05-10 05:54:48,232 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 97.04 | 83.77 | 90.67 | ++-------+-------+-------+ +2022-05-10 05:54:48,237 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 05:54:48,237 - mmseg - INFO - Iter(val) [32] aAcc: 0.9704, mIoU: 0.8377, mAcc: 0.9067, IoU.road: 0.9866, IoU.sidewalk: 0.8904, IoU.building: 0.9448, IoU.wall: 0.6646, IoU.fence: 0.7356, IoU.pole: 0.7162, IoU.traffic light: 0.7740, IoU.traffic sign: 0.8386, IoU.vegetation: 0.9348, IoU.terrain: 0.6871, IoU.sky: 0.9575, IoU.person: 0.8694, IoU.rider: 0.7412, IoU.car: 0.9638, IoU.truck: 0.9159, IoU.bus: 0.9086, IoU.train: 0.7881, IoU.motorcycle: 0.7684, IoU.bicycle: 0.8300, Acc.road: 0.9919, Acc.sidewalk: 0.9442, Acc.building: 0.9676, Acc.wall: 0.8263, Acc.fence: 0.8217, Acc.pole: 0.8420, Acc.traffic light: 0.8758, Acc.traffic sign: 0.9143, Acc.vegetation: 0.9715, Acc.terrain: 0.7740, Acc.sky: 0.9866, Acc.person: 0.9425, Acc.rider: 0.8549, Acc.car: 0.9830, Acc.truck: 0.9623, Acc.bus: 0.9719, Acc.train: 0.8131, Acc.motorcycle: 0.8719, Acc.bicycle: 0.9125 +2022-05-10 05:56:17,213 - mmseg - INFO - Iter [23050/80000] lr: 1.022e-06, eta: 1 day, 6:49:13, time: 4.084, data_time: 2.322, memory: 64699, decode.loss_cls: 0.1032, decode.loss_mask: 0.2401, decode.loss_dice: 0.6298, decode.d0.loss_cls: 0.3402, decode.d0.loss_mask: 0.2504, decode.d0.loss_dice: 0.6684, decode.d1.loss_cls: 0.1260, decode.d1.loss_mask: 0.2404, decode.d1.loss_dice: 0.6449, decode.d2.loss_cls: 0.1123, decode.d2.loss_mask: 0.2390, decode.d2.loss_dice: 0.6331, decode.d3.loss_cls: 0.1050, decode.d3.loss_mask: 0.2384, decode.d3.loss_dice: 0.6304, decode.d4.loss_cls: 0.1063, decode.d4.loss_mask: 0.2387, decode.d4.loss_dice: 0.6340, decode.d5.loss_cls: 0.1114, decode.d5.loss_mask: 0.2384, decode.d5.loss_dice: 0.6296, decode.d6.loss_cls: 0.1044, decode.d6.loss_mask: 0.2392, decode.d6.loss_dice: 0.6248, decode.d7.loss_cls: 0.1000, decode.d7.loss_mask: 0.2399, decode.d7.loss_dice: 0.6293, decode.d8.loss_cls: 0.1005, decode.d8.loss_mask: 0.2392, decode.d8.loss_dice: 0.6301, loss: 10.0675 +2022-05-10 05:57:47,797 - mmseg - INFO - Iter [23100/80000] lr: 1.021e-06, eta: 1 day, 6:47:19, time: 1.812, data_time: 0.067, memory: 64699, decode.loss_cls: 0.1142, decode.loss_mask: 0.2295, decode.loss_dice: 0.6467, decode.d0.loss_cls: 0.3357, decode.d0.loss_mask: 0.2414, decode.d0.loss_dice: 0.6929, decode.d1.loss_cls: 0.1307, decode.d1.loss_mask: 0.2335, decode.d1.loss_dice: 0.6602, decode.d2.loss_cls: 0.1226, decode.d2.loss_mask: 0.2316, decode.d2.loss_dice: 0.6551, decode.d3.loss_cls: 0.1158, decode.d3.loss_mask: 0.2310, decode.d3.loss_dice: 0.6481, decode.d4.loss_cls: 0.1121, decode.d4.loss_mask: 0.2307, decode.d4.loss_dice: 0.6478, decode.d5.loss_cls: 0.1155, decode.d5.loss_mask: 0.2303, decode.d5.loss_dice: 0.6529, decode.d6.loss_cls: 0.1145, decode.d6.loss_mask: 0.2306, decode.d6.loss_dice: 0.6497, decode.d7.loss_cls: 0.1156, decode.d7.loss_mask: 0.2304, decode.d7.loss_dice: 0.6448, decode.d8.loss_cls: 0.1136, decode.d8.loss_mask: 0.2299, decode.d8.loss_dice: 0.6508, loss: 10.2583 +2022-05-10 05:59:16,164 - mmseg - INFO - Iter [23150/80000] lr: 1.020e-06, eta: 1 day, 6:45:20, time: 1.767, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1142, decode.loss_mask: 0.2249, decode.loss_dice: 0.6364, decode.d0.loss_cls: 0.3277, decode.d0.loss_mask: 0.2337, decode.d0.loss_dice: 0.6829, decode.d1.loss_cls: 0.1307, decode.d1.loss_mask: 0.2258, decode.d1.loss_dice: 0.6515, decode.d2.loss_cls: 0.1196, decode.d2.loss_mask: 0.2249, decode.d2.loss_dice: 0.6475, decode.d3.loss_cls: 0.1109, decode.d3.loss_mask: 0.2247, decode.d3.loss_dice: 0.6369, decode.d4.loss_cls: 0.1160, decode.d4.loss_mask: 0.2239, decode.d4.loss_dice: 0.6405, decode.d5.loss_cls: 0.1111, decode.d5.loss_mask: 0.2245, decode.d5.loss_dice: 0.6394, decode.d6.loss_cls: 0.1066, decode.d6.loss_mask: 0.2250, decode.d6.loss_dice: 0.6434, decode.d7.loss_cls: 0.1169, decode.d7.loss_mask: 0.2250, decode.d7.loss_dice: 0.6399, decode.d8.loss_cls: 0.1098, decode.d8.loss_mask: 0.2245, decode.d8.loss_dice: 0.6418, loss: 10.0806 +2022-05-10 06:00:45,130 - mmseg - INFO - Iter [23200/80000] lr: 1.019e-06, eta: 1 day, 6:43:22, time: 1.779, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1136, decode.loss_mask: 0.2283, decode.loss_dice: 0.6367, decode.d0.loss_cls: 0.3457, decode.d0.loss_mask: 0.2400, decode.d0.loss_dice: 0.6880, decode.d1.loss_cls: 0.1354, decode.d1.loss_mask: 0.2299, decode.d1.loss_dice: 0.6556, decode.d2.loss_cls: 0.1331, decode.d2.loss_mask: 0.2301, decode.d2.loss_dice: 0.6471, decode.d3.loss_cls: 0.1188, decode.d3.loss_mask: 0.2290, decode.d3.loss_dice: 0.6402, decode.d4.loss_cls: 0.1186, decode.d4.loss_mask: 0.2287, decode.d4.loss_dice: 0.6422, decode.d5.loss_cls: 0.1113, decode.d5.loss_mask: 0.2295, decode.d5.loss_dice: 0.6440, decode.d6.loss_cls: 0.1206, decode.d6.loss_mask: 0.2281, decode.d6.loss_dice: 0.6416, decode.d7.loss_cls: 0.1122, decode.d7.loss_mask: 0.2288, decode.d7.loss_dice: 0.6406, decode.d8.loss_cls: 0.1137, decode.d8.loss_mask: 0.2287, decode.d8.loss_dice: 0.6432, loss: 10.2032 +2022-05-10 06:02:13,653 - mmseg - INFO - Iter [23250/80000] lr: 1.019e-06, eta: 1 day, 6:41:23, time: 1.770, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1072, decode.loss_mask: 0.2335, decode.loss_dice: 0.6624, decode.d0.loss_cls: 0.3295, decode.d0.loss_mask: 0.2445, decode.d0.loss_dice: 0.7009, decode.d1.loss_cls: 0.1226, decode.d1.loss_mask: 0.2378, decode.d1.loss_dice: 0.6775, decode.d2.loss_cls: 0.1209, decode.d2.loss_mask: 0.2343, decode.d2.loss_dice: 0.6682, decode.d3.loss_cls: 0.1090, decode.d3.loss_mask: 0.2340, decode.d3.loss_dice: 0.6632, decode.d4.loss_cls: 0.1110, decode.d4.loss_mask: 0.2335, decode.d4.loss_dice: 0.6650, decode.d5.loss_cls: 0.1035, decode.d5.loss_mask: 0.2343, decode.d5.loss_dice: 0.6606, decode.d6.loss_cls: 0.1031, decode.d6.loss_mask: 0.2351, decode.d6.loss_dice: 0.6620, decode.d7.loss_cls: 0.1101, decode.d7.loss_mask: 0.2339, decode.d7.loss_dice: 0.6643, decode.d8.loss_cls: 0.1037, decode.d8.loss_mask: 0.2332, decode.d8.loss_dice: 0.6626, loss: 10.3612 +2022-05-10 06:03:45,026 - mmseg - INFO - Iter [23300/80000] lr: 1.018e-06, eta: 1 day, 6:39:31, time: 1.827, data_time: 0.064, memory: 64699, decode.loss_cls: 0.1105, decode.loss_mask: 0.2384, decode.loss_dice: 0.6358, decode.d0.loss_cls: 0.3255, decode.d0.loss_mask: 0.2504, decode.d0.loss_dice: 0.6844, decode.d1.loss_cls: 0.1223, decode.d1.loss_mask: 0.2399, decode.d1.loss_dice: 0.6532, decode.d2.loss_cls: 0.1114, decode.d2.loss_mask: 0.2390, decode.d2.loss_dice: 0.6454, decode.d3.loss_cls: 0.1019, decode.d3.loss_mask: 0.2389, decode.d3.loss_dice: 0.6364, decode.d4.loss_cls: 0.1094, decode.d4.loss_mask: 0.2386, decode.d4.loss_dice: 0.6342, decode.d5.loss_cls: 0.1087, decode.d5.loss_mask: 0.2386, decode.d5.loss_dice: 0.6396, decode.d6.loss_cls: 0.1062, decode.d6.loss_mask: 0.2373, decode.d6.loss_dice: 0.6375, decode.d7.loss_cls: 0.1060, decode.d7.loss_mask: 0.2385, decode.d7.loss_dice: 0.6371, decode.d8.loss_cls: 0.1093, decode.d8.loss_mask: 0.2381, decode.d8.loss_dice: 0.6350, loss: 10.1472 +2022-05-10 06:05:12,447 - mmseg - INFO - Iter [23350/80000] lr: 1.017e-06, eta: 1 day, 6:37:29, time: 1.748, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0933, decode.loss_mask: 0.2314, decode.loss_dice: 0.6293, decode.d0.loss_cls: 0.3219, decode.d0.loss_mask: 0.2436, decode.d0.loss_dice: 0.6723, decode.d1.loss_cls: 0.1103, decode.d1.loss_mask: 0.2346, decode.d1.loss_dice: 0.6461, decode.d2.loss_cls: 0.1060, decode.d2.loss_mask: 0.2328, decode.d2.loss_dice: 0.6407, decode.d3.loss_cls: 0.0980, decode.d3.loss_mask: 0.2322, decode.d3.loss_dice: 0.6329, decode.d4.loss_cls: 0.0994, decode.d4.loss_mask: 0.2321, decode.d4.loss_dice: 0.6321, decode.d5.loss_cls: 0.0985, decode.d5.loss_mask: 0.2315, decode.d5.loss_dice: 0.6318, decode.d6.loss_cls: 0.0910, decode.d6.loss_mask: 0.2325, decode.d6.loss_dice: 0.6309, decode.d7.loss_cls: 0.0945, decode.d7.loss_mask: 0.2320, decode.d7.loss_dice: 0.6322, decode.d8.loss_cls: 0.0902, decode.d8.loss_mask: 0.2315, decode.d8.loss_dice: 0.6300, loss: 9.9160 +2022-05-10 06:06:42,172 - mmseg - INFO - Iter [23400/80000] lr: 1.016e-06, eta: 1 day, 6:35:34, time: 1.794, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1073, decode.loss_mask: 0.2277, decode.loss_dice: 0.6385, decode.d0.loss_cls: 0.3179, decode.d0.loss_mask: 0.2365, decode.d0.loss_dice: 0.6829, decode.d1.loss_cls: 0.1354, decode.d1.loss_mask: 0.2292, decode.d1.loss_dice: 0.6542, decode.d2.loss_cls: 0.1317, decode.d2.loss_mask: 0.2286, decode.d2.loss_dice: 0.6472, decode.d3.loss_cls: 0.1185, decode.d3.loss_mask: 0.2276, decode.d3.loss_dice: 0.6376, decode.d4.loss_cls: 0.1158, decode.d4.loss_mask: 0.2286, decode.d4.loss_dice: 0.6420, decode.d5.loss_cls: 0.1177, decode.d5.loss_mask: 0.2283, decode.d5.loss_dice: 0.6379, decode.d6.loss_cls: 0.1098, decode.d6.loss_mask: 0.2283, decode.d6.loss_dice: 0.6356, decode.d7.loss_cls: 0.1170, decode.d7.loss_mask: 0.2280, decode.d7.loss_dice: 0.6377, decode.d8.loss_cls: 0.1092, decode.d8.loss_mask: 0.2278, decode.d8.loss_dice: 0.6421, loss: 10.1266 +2022-05-10 06:08:12,806 - mmseg - INFO - Iter [23450/80000] lr: 1.015e-06, eta: 1 day, 6:33:40, time: 1.812, data_time: 0.068, memory: 64699, decode.loss_cls: 0.1035, decode.loss_mask: 0.2396, decode.loss_dice: 0.6466, decode.d0.loss_cls: 0.3476, decode.d0.loss_mask: 0.2547, decode.d0.loss_dice: 0.6933, decode.d1.loss_cls: 0.1281, decode.d1.loss_mask: 0.2434, decode.d1.loss_dice: 0.6728, decode.d2.loss_cls: 0.1237, decode.d2.loss_mask: 0.2422, decode.d2.loss_dice: 0.6601, decode.d3.loss_cls: 0.1155, decode.d3.loss_mask: 0.2412, decode.d3.loss_dice: 0.6546, decode.d4.loss_cls: 0.1137, decode.d4.loss_mask: 0.2424, decode.d4.loss_dice: 0.6565, decode.d5.loss_cls: 0.1103, decode.d5.loss_mask: 0.2419, decode.d5.loss_dice: 0.6574, decode.d6.loss_cls: 0.1100, decode.d6.loss_mask: 0.2403, decode.d6.loss_dice: 0.6509, decode.d7.loss_cls: 0.1111, decode.d7.loss_mask: 0.2408, decode.d7.loss_dice: 0.6521, decode.d8.loss_cls: 0.1111, decode.d8.loss_mask: 0.2399, decode.d8.loss_dice: 0.6502, loss: 10.3955 +2022-05-10 06:09:40,958 - mmseg - INFO - Iter [23500/80000] lr: 1.014e-06, eta: 1 day, 6:31:41, time: 1.763, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1041, decode.loss_mask: 0.2306, decode.loss_dice: 0.6401, decode.d0.loss_cls: 0.3448, decode.d0.loss_mask: 0.2427, decode.d0.loss_dice: 0.6812, decode.d1.loss_cls: 0.1205, decode.d1.loss_mask: 0.2327, decode.d1.loss_dice: 0.6601, decode.d2.loss_cls: 0.1122, decode.d2.loss_mask: 0.2326, decode.d2.loss_dice: 0.6503, decode.d3.loss_cls: 0.1126, decode.d3.loss_mask: 0.2314, decode.d3.loss_dice: 0.6405, decode.d4.loss_cls: 0.1100, decode.d4.loss_mask: 0.2312, decode.d4.loss_dice: 0.6437, decode.d5.loss_cls: 0.1074, decode.d5.loss_mask: 0.2318, decode.d5.loss_dice: 0.6432, decode.d6.loss_cls: 0.1043, decode.d6.loss_mask: 0.2315, decode.d6.loss_dice: 0.6428, decode.d7.loss_cls: 0.1037, decode.d7.loss_mask: 0.2308, decode.d7.loss_dice: 0.6403, decode.d8.loss_cls: 0.1071, decode.d8.loss_mask: 0.2309, decode.d8.loss_dice: 0.6421, loss: 10.1374 +2022-05-10 06:11:09,626 - mmseg - INFO - Iter [23550/80000] lr: 1.013e-06, eta: 1 day, 6:29:43, time: 1.774, data_time: 0.020, memory: 64699, decode.loss_cls: 0.0950, decode.loss_mask: 0.2334, decode.loss_dice: 0.6347, decode.d0.loss_cls: 0.3169, decode.d0.loss_mask: 0.2451, decode.d0.loss_dice: 0.6751, decode.d1.loss_cls: 0.1177, decode.d1.loss_mask: 0.2356, decode.d1.loss_dice: 0.6470, decode.d2.loss_cls: 0.1076, decode.d2.loss_mask: 0.2347, decode.d2.loss_dice: 0.6404, decode.d3.loss_cls: 0.1070, decode.d3.loss_mask: 0.2343, decode.d3.loss_dice: 0.6376, decode.d4.loss_cls: 0.1055, decode.d4.loss_mask: 0.2338, decode.d4.loss_dice: 0.6368, decode.d5.loss_cls: 0.0970, decode.d5.loss_mask: 0.2343, decode.d5.loss_dice: 0.6415, decode.d6.loss_cls: 0.0919, decode.d6.loss_mask: 0.2337, decode.d6.loss_dice: 0.6402, decode.d7.loss_cls: 0.0992, decode.d7.loss_mask: 0.2338, decode.d7.loss_dice: 0.6337, decode.d8.loss_cls: 0.0979, decode.d8.loss_mask: 0.2341, decode.d8.loss_dice: 0.6367, loss: 10.0122 +2022-05-10 06:12:37,704 - mmseg - INFO - Iter [23600/80000] lr: 1.012e-06, eta: 1 day, 6:27:44, time: 1.762, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0964, decode.loss_mask: 0.2308, decode.loss_dice: 0.6393, decode.d0.loss_cls: 0.3279, decode.d0.loss_mask: 0.2415, decode.d0.loss_dice: 0.6817, decode.d1.loss_cls: 0.1257, decode.d1.loss_mask: 0.2330, decode.d1.loss_dice: 0.6530, decode.d2.loss_cls: 0.1198, decode.d2.loss_mask: 0.2317, decode.d2.loss_dice: 0.6446, decode.d3.loss_cls: 0.1023, decode.d3.loss_mask: 0.2312, decode.d3.loss_dice: 0.6397, decode.d4.loss_cls: 0.1034, decode.d4.loss_mask: 0.2311, decode.d4.loss_dice: 0.6463, decode.d5.loss_cls: 0.0998, decode.d5.loss_mask: 0.2317, decode.d5.loss_dice: 0.6422, decode.d6.loss_cls: 0.0992, decode.d6.loss_mask: 0.2310, decode.d6.loss_dice: 0.6424, decode.d7.loss_cls: 0.0995, decode.d7.loss_mask: 0.2311, decode.d7.loss_dice: 0.6404, decode.d8.loss_cls: 0.0979, decode.d8.loss_mask: 0.2307, decode.d8.loss_dice: 0.6457, loss: 10.0710 +2022-05-10 06:14:08,336 - mmseg - INFO - Iter [23650/80000] lr: 1.011e-06, eta: 1 day, 6:25:51, time: 1.813, data_time: 0.066, memory: 64699, decode.loss_cls: 0.1075, decode.loss_mask: 0.2363, decode.loss_dice: 0.6340, decode.d0.loss_cls: 0.3356, decode.d0.loss_mask: 0.2474, decode.d0.loss_dice: 0.6796, decode.d1.loss_cls: 0.1263, decode.d1.loss_mask: 0.2385, decode.d1.loss_dice: 0.6539, decode.d2.loss_cls: 0.1236, decode.d2.loss_mask: 0.2371, decode.d2.loss_dice: 0.6424, decode.d3.loss_cls: 0.1156, decode.d3.loss_mask: 0.2360, decode.d3.loss_dice: 0.6408, decode.d4.loss_cls: 0.1163, decode.d4.loss_mask: 0.2366, decode.d4.loss_dice: 0.6370, decode.d5.loss_cls: 0.1134, decode.d5.loss_mask: 0.2368, decode.d5.loss_dice: 0.6382, decode.d6.loss_cls: 0.1102, decode.d6.loss_mask: 0.2366, decode.d6.loss_dice: 0.6373, decode.d7.loss_cls: 0.1098, decode.d7.loss_mask: 0.2365, decode.d7.loss_dice: 0.6394, decode.d8.loss_cls: 0.1150, decode.d8.loss_mask: 0.2366, decode.d8.loss_dice: 0.6365, loss: 10.1908 +2022-05-10 06:15:36,481 - mmseg - INFO - Iter [23700/80000] lr: 1.010e-06, eta: 1 day, 6:23:53, time: 1.763, data_time: 0.020, memory: 64699, decode.loss_cls: 0.0939, decode.loss_mask: 0.2365, decode.loss_dice: 0.6337, decode.d0.loss_cls: 0.3307, decode.d0.loss_mask: 0.2480, decode.d0.loss_dice: 0.6731, decode.d1.loss_cls: 0.1131, decode.d1.loss_mask: 0.2385, decode.d1.loss_dice: 0.6471, decode.d2.loss_cls: 0.1037, decode.d2.loss_mask: 0.2374, decode.d2.loss_dice: 0.6440, decode.d3.loss_cls: 0.1015, decode.d3.loss_mask: 0.2365, decode.d3.loss_dice: 0.6377, decode.d4.loss_cls: 0.1020, decode.d4.loss_mask: 0.2371, decode.d4.loss_dice: 0.6345, decode.d5.loss_cls: 0.1006, decode.d5.loss_mask: 0.2372, decode.d5.loss_dice: 0.6350, decode.d6.loss_cls: 0.1025, decode.d6.loss_mask: 0.2367, decode.d6.loss_dice: 0.6315, decode.d7.loss_cls: 0.1009, decode.d7.loss_mask: 0.2365, decode.d7.loss_dice: 0.6338, decode.d8.loss_cls: 0.0967, decode.d8.loss_mask: 0.2370, decode.d8.loss_dice: 0.6345, loss: 10.0319 +2022-05-10 06:17:04,466 - mmseg - INFO - Iter [23750/80000] lr: 1.010e-06, eta: 1 day, 6:21:54, time: 1.760, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1046, decode.loss_mask: 0.2317, decode.loss_dice: 0.6524, decode.d0.loss_cls: 0.3300, decode.d0.loss_mask: 0.2429, decode.d0.loss_dice: 0.6887, decode.d1.loss_cls: 0.1272, decode.d1.loss_mask: 0.2339, decode.d1.loss_dice: 0.6629, decode.d2.loss_cls: 0.1099, decode.d2.loss_mask: 0.2330, decode.d2.loss_dice: 0.6507, decode.d3.loss_cls: 0.1038, decode.d3.loss_mask: 0.2327, decode.d3.loss_dice: 0.6483, decode.d4.loss_cls: 0.1077, decode.d4.loss_mask: 0.2316, decode.d4.loss_dice: 0.6536, decode.d5.loss_cls: 0.1056, decode.d5.loss_mask: 0.2316, decode.d5.loss_dice: 0.6507, decode.d6.loss_cls: 0.1028, decode.d6.loss_mask: 0.2310, decode.d6.loss_dice: 0.6468, decode.d7.loss_cls: 0.1093, decode.d7.loss_mask: 0.2318, decode.d7.loss_dice: 0.6530, decode.d8.loss_cls: 0.1037, decode.d8.loss_mask: 0.2314, decode.d8.loss_dice: 0.6458, loss: 10.1889 +2022-05-10 06:18:32,197 - mmseg - INFO - Iter [23800/80000] lr: 1.009e-06, eta: 1 day, 6:19:54, time: 1.755, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0950, decode.loss_mask: 0.2269, decode.loss_dice: 0.6255, decode.d0.loss_cls: 0.3296, decode.d0.loss_mask: 0.2369, decode.d0.loss_dice: 0.6693, decode.d1.loss_cls: 0.1129, decode.d1.loss_mask: 0.2277, decode.d1.loss_dice: 0.6418, decode.d2.loss_cls: 0.1002, decode.d2.loss_mask: 0.2276, decode.d2.loss_dice: 0.6362, decode.d3.loss_cls: 0.0961, decode.d3.loss_mask: 0.2267, decode.d3.loss_dice: 0.6254, decode.d4.loss_cls: 0.1058, decode.d4.loss_mask: 0.2270, decode.d4.loss_dice: 0.6290, decode.d5.loss_cls: 0.0984, decode.d5.loss_mask: 0.2266, decode.d5.loss_dice: 0.6253, decode.d6.loss_cls: 0.0885, decode.d6.loss_mask: 0.2266, decode.d6.loss_dice: 0.6210, decode.d7.loss_cls: 0.0942, decode.d7.loss_mask: 0.2264, decode.d7.loss_dice: 0.6228, decode.d8.loss_cls: 0.0911, decode.d8.loss_mask: 0.2262, decode.d8.loss_dice: 0.6251, loss: 9.8117 +2022-05-10 06:20:02,835 - mmseg - INFO - Iter [23850/80000] lr: 1.008e-06, eta: 1 day, 6:18:01, time: 1.809, data_time: 0.066, memory: 64699, decode.loss_cls: 0.1043, decode.loss_mask: 0.2339, decode.loss_dice: 0.6490, decode.d0.loss_cls: 0.3342, decode.d0.loss_mask: 0.2471, decode.d0.loss_dice: 0.6897, decode.d1.loss_cls: 0.1281, decode.d1.loss_mask: 0.2367, decode.d1.loss_dice: 0.6599, decode.d2.loss_cls: 0.1154, decode.d2.loss_mask: 0.2355, decode.d2.loss_dice: 0.6527, decode.d3.loss_cls: 0.1101, decode.d3.loss_mask: 0.2349, decode.d3.loss_dice: 0.6488, decode.d4.loss_cls: 0.1080, decode.d4.loss_mask: 0.2343, decode.d4.loss_dice: 0.6483, decode.d5.loss_cls: 0.1133, decode.d5.loss_mask: 0.2347, decode.d5.loss_dice: 0.6483, decode.d6.loss_cls: 0.1071, decode.d6.loss_mask: 0.2344, decode.d6.loss_dice: 0.6494, decode.d7.loss_cls: 0.1083, decode.d7.loss_mask: 0.2343, decode.d7.loss_dice: 0.6467, decode.d8.loss_cls: 0.1050, decode.d8.loss_mask: 0.2348, decode.d8.loss_dice: 0.6456, loss: 10.2326 +2022-05-10 06:21:32,070 - mmseg - INFO - Iter [23900/80000] lr: 1.007e-06, eta: 1 day, 6:16:06, time: 1.787, data_time: 0.022, memory: 64699, decode.loss_cls: 0.0885, decode.loss_mask: 0.2285, decode.loss_dice: 0.6228, decode.d0.loss_cls: 0.3239, decode.d0.loss_mask: 0.2409, decode.d0.loss_dice: 0.6642, decode.d1.loss_cls: 0.1108, decode.d1.loss_mask: 0.2308, decode.d1.loss_dice: 0.6301, decode.d2.loss_cls: 0.0974, decode.d2.loss_mask: 0.2287, decode.d2.loss_dice: 0.6300, decode.d3.loss_cls: 0.0890, decode.d3.loss_mask: 0.2292, decode.d3.loss_dice: 0.6211, decode.d4.loss_cls: 0.0926, decode.d4.loss_mask: 0.2289, decode.d4.loss_dice: 0.6237, decode.d5.loss_cls: 0.0875, decode.d5.loss_mask: 0.2291, decode.d5.loss_dice: 0.6251, decode.d6.loss_cls: 0.0822, decode.d6.loss_mask: 0.2288, decode.d6.loss_dice: 0.6247, decode.d7.loss_cls: 0.0899, decode.d7.loss_mask: 0.2277, decode.d7.loss_dice: 0.6214, decode.d8.loss_cls: 0.0885, decode.d8.loss_mask: 0.2278, decode.d8.loss_dice: 0.6203, loss: 9.7339 +2022-05-10 06:23:00,551 - mmseg - INFO - Iter [23950/80000] lr: 1.006e-06, eta: 1 day, 6:14:09, time: 1.771, data_time: 0.020, memory: 64699, decode.loss_cls: 0.0927, decode.loss_mask: 0.2316, decode.loss_dice: 0.6376, decode.d0.loss_cls: 0.3181, decode.d0.loss_mask: 0.2434, decode.d0.loss_dice: 0.6746, decode.d1.loss_cls: 0.1177, decode.d1.loss_mask: 0.2348, decode.d1.loss_dice: 0.6498, decode.d2.loss_cls: 0.1034, decode.d2.loss_mask: 0.2333, decode.d2.loss_dice: 0.6448, decode.d3.loss_cls: 0.0975, decode.d3.loss_mask: 0.2335, decode.d3.loss_dice: 0.6420, decode.d4.loss_cls: 0.1003, decode.d4.loss_mask: 0.2334, decode.d4.loss_dice: 0.6388, decode.d5.loss_cls: 0.1002, decode.d5.loss_mask: 0.2330, decode.d5.loss_dice: 0.6381, decode.d6.loss_cls: 0.0946, decode.d6.loss_mask: 0.2334, decode.d6.loss_dice: 0.6390, decode.d7.loss_cls: 0.0948, decode.d7.loss_mask: 0.2329, decode.d7.loss_dice: 0.6385, decode.d8.loss_cls: 0.0919, decode.d8.loss_mask: 0.2323, decode.d8.loss_dice: 0.6421, loss: 9.9982 +2022-05-10 06:24:30,788 - mmseg - INFO - Saving checkpoint at 24000 iterations +2022-05-10 06:25:03,916 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 06:25:03,924 - mmseg - INFO - Iter [24000/80000] lr: 1.005e-06, eta: 1 day, 6:13:33, time: 2.465, data_time: 0.064, memory: 64699, decode.loss_cls: 0.1053, decode.loss_mask: 0.2282, decode.loss_dice: 0.6249, decode.d0.loss_cls: 0.3394, decode.d0.loss_mask: 0.2399, decode.d0.loss_dice: 0.6638, decode.d1.loss_cls: 0.1315, decode.d1.loss_mask: 0.2306, decode.d1.loss_dice: 0.6417, decode.d2.loss_cls: 0.1262, decode.d2.loss_mask: 0.2293, decode.d2.loss_dice: 0.6319, decode.d3.loss_cls: 0.1102, decode.d3.loss_mask: 0.2279, decode.d3.loss_dice: 0.6238, decode.d4.loss_cls: 0.1124, decode.d4.loss_mask: 0.2285, decode.d4.loss_dice: 0.6270, decode.d5.loss_cls: 0.1083, decode.d5.loss_mask: 0.2283, decode.d5.loss_dice: 0.6268, decode.d6.loss_cls: 0.1092, decode.d6.loss_mask: 0.2282, decode.d6.loss_dice: 0.6291, decode.d7.loss_cls: 0.1157, decode.d7.loss_mask: 0.2285, decode.d7.loss_dice: 0.6313, decode.d8.loss_cls: 0.1136, decode.d8.loss_mask: 0.2285, decode.d8.loss_dice: 0.6276, loss: 9.9975 +2022-05-10 06:26:59,243 - mmseg - INFO - per class results: +2022-05-10 06:26:59,249 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.48 | 99.02 | +| sidewalk | 88.0 | 94.9 | +| building | 94.48 | 97.25 | +| wall | 69.56 | 80.72 | +| fence | 73.95 | 84.2 | +| pole | 70.94 | 81.79 | +| traffic light | 77.46 | 88.29 | +| traffic sign | 83.37 | 90.13 | +| vegetation | 93.35 | 96.59 | +| terrain | 67.11 | 79.3 | +| sky | 95.82 | 98.54 | +| person | 86.83 | 92.39 | +| rider | 74.22 | 86.52 | +| car | 96.31 | 98.3 | +| truck | 92.03 | 96.19 | +| bus | 93.51 | 96.02 | +| train | 87.9 | 90.87 | +| motorcycle | 77.63 | 87.65 | +| bicycle | 82.85 | 92.3 | ++---------------+-------+-------+ +2022-05-10 06:26:59,250 - mmseg - INFO - Summary: +2022-05-10 06:26:59,250 - mmseg - INFO - ++-------+-------+------+ +| aAcc | mIoU | mAcc | ++-------+-------+------+ +| 96.97 | 84.41 | 91.1 | ++-------+-------+------+ +2022-05-10 06:26:59,254 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 06:26:59,254 - mmseg - INFO - Iter(val) [32] aAcc: 0.9697, mIoU: 0.8441, mAcc: 0.9110, IoU.road: 0.9848, IoU.sidewalk: 0.8800, IoU.building: 0.9448, IoU.wall: 0.6956, IoU.fence: 0.7395, IoU.pole: 0.7094, IoU.traffic light: 0.7746, IoU.traffic sign: 0.8337, IoU.vegetation: 0.9335, IoU.terrain: 0.6711, IoU.sky: 0.9582, IoU.person: 0.8683, IoU.rider: 0.7422, IoU.car: 0.9631, IoU.truck: 0.9203, IoU.bus: 0.9351, IoU.train: 0.8790, IoU.motorcycle: 0.7763, IoU.bicycle: 0.8285, Acc.road: 0.9902, Acc.sidewalk: 0.9490, Acc.building: 0.9725, Acc.wall: 0.8072, Acc.fence: 0.8420, Acc.pole: 0.8179, Acc.traffic light: 0.8829, Acc.traffic sign: 0.9013, Acc.vegetation: 0.9659, Acc.terrain: 0.7930, Acc.sky: 0.9854, Acc.person: 0.9239, Acc.rider: 0.8652, Acc.car: 0.9830, Acc.truck: 0.9619, Acc.bus: 0.9602, Acc.train: 0.9087, Acc.motorcycle: 0.8765, Acc.bicycle: 0.9230 +2022-05-10 06:28:27,656 - mmseg - INFO - Iter [24050/80000] lr: 1.004e-06, eta: 1 day, 6:16:04, time: 4.077, data_time: 2.327, memory: 64699, decode.loss_cls: 0.0815, decode.loss_mask: 0.2280, decode.loss_dice: 0.6353, decode.d0.loss_cls: 0.3278, decode.d0.loss_mask: 0.2363, decode.d0.loss_dice: 0.6716, decode.d1.loss_cls: 0.1113, decode.d1.loss_mask: 0.2290, decode.d1.loss_dice: 0.6470, decode.d2.loss_cls: 0.0933, decode.d2.loss_mask: 0.2285, decode.d2.loss_dice: 0.6413, decode.d3.loss_cls: 0.0901, decode.d3.loss_mask: 0.2280, decode.d3.loss_dice: 0.6346, decode.d4.loss_cls: 0.0877, decode.d4.loss_mask: 0.2276, decode.d4.loss_dice: 0.6309, decode.d5.loss_cls: 0.0891, decode.d5.loss_mask: 0.2278, decode.d5.loss_dice: 0.6346, decode.d6.loss_cls: 0.0844, decode.d6.loss_mask: 0.2276, decode.d6.loss_dice: 0.6353, decode.d7.loss_cls: 0.0895, decode.d7.loss_mask: 0.2278, decode.d7.loss_dice: 0.6363, decode.d8.loss_cls: 0.0829, decode.d8.loss_mask: 0.2279, decode.d8.loss_dice: 0.6343, loss: 9.8272 +2022-05-10 06:29:56,329 - mmseg - INFO - Iter [24100/80000] lr: 1.003e-06, eta: 1 day, 6:14:06, time: 1.773, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1060, decode.loss_mask: 0.2357, decode.loss_dice: 0.6548, decode.d0.loss_cls: 0.3183, decode.d0.loss_mask: 0.2479, decode.d0.loss_dice: 0.6945, decode.d1.loss_cls: 0.1226, decode.d1.loss_mask: 0.2384, decode.d1.loss_dice: 0.6676, decode.d2.loss_cls: 0.1115, decode.d2.loss_mask: 0.2372, decode.d2.loss_dice: 0.6621, decode.d3.loss_cls: 0.1088, decode.d3.loss_mask: 0.2365, decode.d3.loss_dice: 0.6555, decode.d4.loss_cls: 0.1133, decode.d4.loss_mask: 0.2360, decode.d4.loss_dice: 0.6597, decode.d5.loss_cls: 0.1109, decode.d5.loss_mask: 0.2359, decode.d5.loss_dice: 0.6598, decode.d6.loss_cls: 0.1091, decode.d6.loss_mask: 0.2363, decode.d6.loss_dice: 0.6596, decode.d7.loss_cls: 0.1124, decode.d7.loss_mask: 0.2364, decode.d7.loss_dice: 0.6519, decode.d8.loss_cls: 0.1101, decode.d8.loss_mask: 0.2361, decode.d8.loss_dice: 0.6551, loss: 10.3197 +2022-05-10 06:31:24,216 - mmseg - INFO - Iter [24150/80000] lr: 1.002e-06, eta: 1 day, 6:12:07, time: 1.758, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1044, decode.loss_mask: 0.2289, decode.loss_dice: 0.6390, decode.d0.loss_cls: 0.3465, decode.d0.loss_mask: 0.2403, decode.d0.loss_dice: 0.6811, decode.d1.loss_cls: 0.1375, decode.d1.loss_mask: 0.2309, decode.d1.loss_dice: 0.6582, decode.d2.loss_cls: 0.1167, decode.d2.loss_mask: 0.2293, decode.d2.loss_dice: 0.6484, decode.d3.loss_cls: 0.1129, decode.d3.loss_mask: 0.2283, decode.d3.loss_dice: 0.6411, decode.d4.loss_cls: 0.1112, decode.d4.loss_mask: 0.2280, decode.d4.loss_dice: 0.6429, decode.d5.loss_cls: 0.1086, decode.d5.loss_mask: 0.2291, decode.d5.loss_dice: 0.6436, decode.d6.loss_cls: 0.1090, decode.d6.loss_mask: 0.2286, decode.d6.loss_dice: 0.6388, decode.d7.loss_cls: 0.1062, decode.d7.loss_mask: 0.2288, decode.d7.loss_dice: 0.6425, decode.d8.loss_cls: 0.1059, decode.d8.loss_mask: 0.2290, decode.d8.loss_dice: 0.6417, loss: 10.1375 +2022-05-10 06:32:55,439 - mmseg - INFO - Iter [24200/80000] lr: 1.001e-06, eta: 1 day, 6:10:15, time: 1.824, data_time: 0.066, memory: 64699, decode.loss_cls: 0.0808, decode.loss_mask: 0.2300, decode.loss_dice: 0.6199, decode.d0.loss_cls: 0.3228, decode.d0.loss_mask: 0.2404, decode.d0.loss_dice: 0.6566, decode.d1.loss_cls: 0.1115, decode.d1.loss_mask: 0.2324, decode.d1.loss_dice: 0.6354, decode.d2.loss_cls: 0.0988, decode.d2.loss_mask: 0.2304, decode.d2.loss_dice: 0.6262, decode.d3.loss_cls: 0.0870, decode.d3.loss_mask: 0.2300, decode.d3.loss_dice: 0.6192, decode.d4.loss_cls: 0.0845, decode.d4.loss_mask: 0.2297, decode.d4.loss_dice: 0.6202, decode.d5.loss_cls: 0.0835, decode.d5.loss_mask: 0.2296, decode.d5.loss_dice: 0.6203, decode.d6.loss_cls: 0.0817, decode.d6.loss_mask: 0.2296, decode.d6.loss_dice: 0.6235, decode.d7.loss_cls: 0.0849, decode.d7.loss_mask: 0.2294, decode.d7.loss_dice: 0.6188, decode.d8.loss_cls: 0.0820, decode.d8.loss_mask: 0.2296, decode.d8.loss_dice: 0.6182, loss: 9.6869 +2022-05-10 06:34:24,750 - mmseg - INFO - Iter [24250/80000] lr: 1.001e-06, eta: 1 day, 6:08:20, time: 1.786, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1044, decode.loss_mask: 0.2308, decode.loss_dice: 0.6388, decode.d0.loss_cls: 0.3347, decode.d0.loss_mask: 0.2419, decode.d0.loss_dice: 0.6766, decode.d1.loss_cls: 0.1328, decode.d1.loss_mask: 0.2322, decode.d1.loss_dice: 0.6489, decode.d2.loss_cls: 0.1137, decode.d2.loss_mask: 0.2308, decode.d2.loss_dice: 0.6419, decode.d3.loss_cls: 0.1047, decode.d3.loss_mask: 0.2302, decode.d3.loss_dice: 0.6400, decode.d4.loss_cls: 0.1123, decode.d4.loss_mask: 0.2300, decode.d4.loss_dice: 0.6388, decode.d5.loss_cls: 0.1068, decode.d5.loss_mask: 0.2304, decode.d5.loss_dice: 0.6396, decode.d6.loss_cls: 0.1068, decode.d6.loss_mask: 0.2306, decode.d6.loss_dice: 0.6358, decode.d7.loss_cls: 0.1107, decode.d7.loss_mask: 0.2303, decode.d7.loss_dice: 0.6347, decode.d8.loss_cls: 0.1060, decode.d8.loss_mask: 0.2306, decode.d8.loss_dice: 0.6360, loss: 10.0817 +2022-05-10 06:35:54,806 - mmseg - INFO - Iter [24300/80000] lr: 9.997e-07, eta: 1 day, 6:06:26, time: 1.801, data_time: 0.020, memory: 64699, decode.loss_cls: 0.0956, decode.loss_mask: 0.2284, decode.loss_dice: 0.6232, decode.d0.loss_cls: 0.3219, decode.d0.loss_mask: 0.2398, decode.d0.loss_dice: 0.6661, decode.d1.loss_cls: 0.1118, decode.d1.loss_mask: 0.2313, decode.d1.loss_dice: 0.6346, decode.d2.loss_cls: 0.1001, decode.d2.loss_mask: 0.2305, decode.d2.loss_dice: 0.6281, decode.d3.loss_cls: 0.0976, decode.d3.loss_mask: 0.2286, decode.d3.loss_dice: 0.6190, decode.d4.loss_cls: 0.0915, decode.d4.loss_mask: 0.2287, decode.d4.loss_dice: 0.6256, decode.d5.loss_cls: 0.0894, decode.d5.loss_mask: 0.2287, decode.d5.loss_dice: 0.6200, decode.d6.loss_cls: 0.0942, decode.d6.loss_mask: 0.2285, decode.d6.loss_dice: 0.6199, decode.d7.loss_cls: 0.0960, decode.d7.loss_mask: 0.2286, decode.d7.loss_dice: 0.6224, decode.d8.loss_cls: 0.0957, decode.d8.loss_mask: 0.2281, decode.d8.loss_dice: 0.6183, loss: 9.7722 +2022-05-10 06:37:25,520 - mmseg - INFO - Iter [24350/80000] lr: 9.988e-07, eta: 1 day, 6:04:33, time: 1.815, data_time: 0.020, memory: 64699, decode.loss_cls: 0.0995, decode.loss_mask: 0.2318, decode.loss_dice: 0.6362, decode.d0.loss_cls: 0.3256, decode.d0.loss_mask: 0.2433, decode.d0.loss_dice: 0.6814, decode.d1.loss_cls: 0.1276, decode.d1.loss_mask: 0.2334, decode.d1.loss_dice: 0.6482, decode.d2.loss_cls: 0.1160, decode.d2.loss_mask: 0.2316, decode.d2.loss_dice: 0.6477, decode.d3.loss_cls: 0.1125, decode.d3.loss_mask: 0.2318, decode.d3.loss_dice: 0.6418, decode.d4.loss_cls: 0.1074, decode.d4.loss_mask: 0.2320, decode.d4.loss_dice: 0.6389, decode.d5.loss_cls: 0.1029, decode.d5.loss_mask: 0.2319, decode.d5.loss_dice: 0.6369, decode.d6.loss_cls: 0.1037, decode.d6.loss_mask: 0.2320, decode.d6.loss_dice: 0.6396, decode.d7.loss_cls: 0.1055, decode.d7.loss_mask: 0.2325, decode.d7.loss_dice: 0.6422, decode.d8.loss_cls: 0.1083, decode.d8.loss_mask: 0.2317, decode.d8.loss_dice: 0.6416, loss: 10.0955 +2022-05-10 06:38:58,853 - mmseg - INFO - Iter [24400/80000] lr: 9.979e-07, eta: 1 day, 6:02:47, time: 1.867, data_time: 0.067, memory: 64699, decode.loss_cls: 0.0866, decode.loss_mask: 0.2311, decode.loss_dice: 0.6286, decode.d0.loss_cls: 0.3251, decode.d0.loss_mask: 0.2413, decode.d0.loss_dice: 0.6616, decode.d1.loss_cls: 0.1070, decode.d1.loss_mask: 0.2332, decode.d1.loss_dice: 0.6367, decode.d2.loss_cls: 0.0907, decode.d2.loss_mask: 0.2307, decode.d2.loss_dice: 0.6381, decode.d3.loss_cls: 0.0865, decode.d3.loss_mask: 0.2315, decode.d3.loss_dice: 0.6321, decode.d4.loss_cls: 0.0895, decode.d4.loss_mask: 0.2319, decode.d4.loss_dice: 0.6293, decode.d5.loss_cls: 0.0865, decode.d5.loss_mask: 0.2316, decode.d5.loss_dice: 0.6316, decode.d6.loss_cls: 0.0841, decode.d6.loss_mask: 0.2303, decode.d6.loss_dice: 0.6276, decode.d7.loss_cls: 0.0827, decode.d7.loss_mask: 0.2313, decode.d7.loss_dice: 0.6281, decode.d8.loss_cls: 0.0851, decode.d8.loss_mask: 0.2313, decode.d8.loss_dice: 0.6311, loss: 9.7930 +2022-05-10 06:40:29,518 - mmseg - INFO - Iter [24450/80000] lr: 9.970e-07, eta: 1 day, 6:00:55, time: 1.813, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1018, decode.loss_mask: 0.2292, decode.loss_dice: 0.6439, decode.d0.loss_cls: 0.3226, decode.d0.loss_mask: 0.2383, decode.d0.loss_dice: 0.6844, decode.d1.loss_cls: 0.1166, decode.d1.loss_mask: 0.2300, decode.d1.loss_dice: 0.6535, decode.d2.loss_cls: 0.1006, decode.d2.loss_mask: 0.2298, decode.d2.loss_dice: 0.6472, decode.d3.loss_cls: 0.0979, decode.d3.loss_mask: 0.2298, decode.d3.loss_dice: 0.6460, decode.d4.loss_cls: 0.0982, decode.d4.loss_mask: 0.2292, decode.d4.loss_dice: 0.6441, decode.d5.loss_cls: 0.1045, decode.d5.loss_mask: 0.2295, decode.d5.loss_dice: 0.6418, decode.d6.loss_cls: 0.0992, decode.d6.loss_mask: 0.2290, decode.d6.loss_dice: 0.6428, decode.d7.loss_cls: 0.1003, decode.d7.loss_mask: 0.2291, decode.d7.loss_dice: 0.6426, decode.d8.loss_cls: 0.1006, decode.d8.loss_mask: 0.2294, decode.d8.loss_dice: 0.6409, loss: 10.0328 +2022-05-10 06:41:59,199 - mmseg - INFO - Iter [24500/80000] lr: 9.961e-07, eta: 1 day, 5:59:00, time: 1.794, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1094, decode.loss_mask: 0.2290, decode.loss_dice: 0.6422, decode.d0.loss_cls: 0.3418, decode.d0.loss_mask: 0.2414, decode.d0.loss_dice: 0.6822, decode.d1.loss_cls: 0.1210, decode.d1.loss_mask: 0.2320, decode.d1.loss_dice: 0.6567, decode.d2.loss_cls: 0.1099, decode.d2.loss_mask: 0.2301, decode.d2.loss_dice: 0.6510, decode.d3.loss_cls: 0.1127, decode.d3.loss_mask: 0.2298, decode.d3.loss_dice: 0.6433, decode.d4.loss_cls: 0.1097, decode.d4.loss_mask: 0.2295, decode.d4.loss_dice: 0.6466, decode.d5.loss_cls: 0.1123, decode.d5.loss_mask: 0.2300, decode.d5.loss_dice: 0.6442, decode.d6.loss_cls: 0.1087, decode.d6.loss_mask: 0.2293, decode.d6.loss_dice: 0.6446, decode.d7.loss_cls: 0.1029, decode.d7.loss_mask: 0.2295, decode.d7.loss_dice: 0.6452, decode.d8.loss_cls: 0.1069, decode.d8.loss_mask: 0.2292, decode.d8.loss_dice: 0.6435, loss: 10.1446 +2022-05-10 06:43:29,840 - mmseg - INFO - Iter [24550/80000] lr: 9.952e-07, eta: 1 day, 5:57:08, time: 1.813, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1062, decode.loss_mask: 0.2260, decode.loss_dice: 0.6287, decode.d0.loss_cls: 0.3451, decode.d0.loss_mask: 0.2351, decode.d0.loss_dice: 0.6699, decode.d1.loss_cls: 0.1162, decode.d1.loss_mask: 0.2278, decode.d1.loss_dice: 0.6450, decode.d2.loss_cls: 0.1177, decode.d2.loss_mask: 0.2264, decode.d2.loss_dice: 0.6367, decode.d3.loss_cls: 0.1075, decode.d3.loss_mask: 0.2262, decode.d3.loss_dice: 0.6329, decode.d4.loss_cls: 0.1097, decode.d4.loss_mask: 0.2254, decode.d4.loss_dice: 0.6310, decode.d5.loss_cls: 0.1109, decode.d5.loss_mask: 0.2261, decode.d5.loss_dice: 0.6324, decode.d6.loss_cls: 0.1121, decode.d6.loss_mask: 0.2265, decode.d6.loss_dice: 0.6282, decode.d7.loss_cls: 0.1099, decode.d7.loss_mask: 0.2260, decode.d7.loss_dice: 0.6290, decode.d8.loss_cls: 0.1104, decode.d8.loss_mask: 0.2262, decode.d8.loss_dice: 0.6311, loss: 9.9821 +2022-05-10 06:45:03,060 - mmseg - INFO - Iter [24600/80000] lr: 9.943e-07, eta: 1 day, 5:55:22, time: 1.864, data_time: 0.066, memory: 64699, decode.loss_cls: 0.0960, decode.loss_mask: 0.2267, decode.loss_dice: 0.6308, decode.d0.loss_cls: 0.3277, decode.d0.loss_mask: 0.2381, decode.d0.loss_dice: 0.6718, decode.d1.loss_cls: 0.1138, decode.d1.loss_mask: 0.2296, decode.d1.loss_dice: 0.6475, decode.d2.loss_cls: 0.1096, decode.d2.loss_mask: 0.2282, decode.d2.loss_dice: 0.6369, decode.d3.loss_cls: 0.1034, decode.d3.loss_mask: 0.2278, decode.d3.loss_dice: 0.6330, decode.d4.loss_cls: 0.0992, decode.d4.loss_mask: 0.2278, decode.d4.loss_dice: 0.6339, decode.d5.loss_cls: 0.1076, decode.d5.loss_mask: 0.2275, decode.d5.loss_dice: 0.6323, decode.d6.loss_cls: 0.0966, decode.d6.loss_mask: 0.2277, decode.d6.loss_dice: 0.6334, decode.d7.loss_cls: 0.1002, decode.d7.loss_mask: 0.2273, decode.d7.loss_dice: 0.6310, decode.d8.loss_cls: 0.0980, decode.d8.loss_mask: 0.2270, decode.d8.loss_dice: 0.6324, loss: 9.9229 +2022-05-10 06:46:34,178 - mmseg - INFO - Iter [24650/80000] lr: 9.934e-07, eta: 1 day, 5:53:31, time: 1.821, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1005, decode.loss_mask: 0.2343, decode.loss_dice: 0.6266, decode.d0.loss_cls: 0.3259, decode.d0.loss_mask: 0.2460, decode.d0.loss_dice: 0.6665, decode.d1.loss_cls: 0.1213, decode.d1.loss_mask: 0.2364, decode.d1.loss_dice: 0.6407, decode.d2.loss_cls: 0.1141, decode.d2.loss_mask: 0.2357, decode.d2.loss_dice: 0.6314, decode.d3.loss_cls: 0.1130, decode.d3.loss_mask: 0.2347, decode.d3.loss_dice: 0.6286, decode.d4.loss_cls: 0.1093, decode.d4.loss_mask: 0.2346, decode.d4.loss_dice: 0.6309, decode.d5.loss_cls: 0.1034, decode.d5.loss_mask: 0.2340, decode.d5.loss_dice: 0.6291, decode.d6.loss_cls: 0.1016, decode.d6.loss_mask: 0.2346, decode.d6.loss_dice: 0.6265, decode.d7.loss_cls: 0.1002, decode.d7.loss_mask: 0.2347, decode.d7.loss_dice: 0.6248, decode.d8.loss_cls: 0.0992, decode.d8.loss_mask: 0.2339, decode.d8.loss_dice: 0.6284, loss: 9.9809 +2022-05-10 06:48:04,355 - mmseg - INFO - Iter [24700/80000] lr: 9.925e-07, eta: 1 day, 5:51:38, time: 1.804, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1051, decode.loss_mask: 0.2255, decode.loss_dice: 0.6446, decode.d0.loss_cls: 0.3366, decode.d0.loss_mask: 0.2378, decode.d0.loss_dice: 0.6829, decode.d1.loss_cls: 0.1255, decode.d1.loss_mask: 0.2275, decode.d1.loss_dice: 0.6577, decode.d2.loss_cls: 0.1136, decode.d2.loss_mask: 0.2271, decode.d2.loss_dice: 0.6560, decode.d3.loss_cls: 0.1114, decode.d3.loss_mask: 0.2259, decode.d3.loss_dice: 0.6506, decode.d4.loss_cls: 0.1131, decode.d4.loss_mask: 0.2264, decode.d4.loss_dice: 0.6459, decode.d5.loss_cls: 0.1120, decode.d5.loss_mask: 0.2256, decode.d5.loss_dice: 0.6462, decode.d6.loss_cls: 0.1045, decode.d6.loss_mask: 0.2260, decode.d6.loss_dice: 0.6436, decode.d7.loss_cls: 0.1099, decode.d7.loss_mask: 0.2263, decode.d7.loss_dice: 0.6463, decode.d8.loss_cls: 0.1110, decode.d8.loss_mask: 0.2258, decode.d8.loss_dice: 0.6442, loss: 10.1344 +2022-05-10 06:49:37,794 - mmseg - INFO - Iter [24750/80000] lr: 9.916e-07, eta: 1 day, 5:49:52, time: 1.869, data_time: 0.069, memory: 64699, decode.loss_cls: 0.0943, decode.loss_mask: 0.2217, decode.loss_dice: 0.6335, decode.d0.loss_cls: 0.3227, decode.d0.loss_mask: 0.2311, decode.d0.loss_dice: 0.6691, decode.d1.loss_cls: 0.1175, decode.d1.loss_mask: 0.2226, decode.d1.loss_dice: 0.6442, decode.d2.loss_cls: 0.1044, decode.d2.loss_mask: 0.2214, decode.d2.loss_dice: 0.6356, decode.d3.loss_cls: 0.1029, decode.d3.loss_mask: 0.2210, decode.d3.loss_dice: 0.6365, decode.d4.loss_cls: 0.1004, decode.d4.loss_mask: 0.2208, decode.d4.loss_dice: 0.6321, decode.d5.loss_cls: 0.1035, decode.d5.loss_mask: 0.2211, decode.d5.loss_dice: 0.6351, decode.d6.loss_cls: 0.1037, decode.d6.loss_mask: 0.2206, decode.d6.loss_dice: 0.6356, decode.d7.loss_cls: 0.1044, decode.d7.loss_mask: 0.2220, decode.d7.loss_dice: 0.6360, decode.d8.loss_cls: 0.1010, decode.d8.loss_mask: 0.2217, decode.d8.loss_dice: 0.6330, loss: 9.8695 +2022-05-10 06:51:08,592 - mmseg - INFO - Iter [24800/80000] lr: 9.907e-07, eta: 1 day, 5:48:01, time: 1.816, data_time: 0.020, memory: 64699, decode.loss_cls: 0.0989, decode.loss_mask: 0.2293, decode.loss_dice: 0.6385, decode.d0.loss_cls: 0.3245, decode.d0.loss_mask: 0.2425, decode.d0.loss_dice: 0.6777, decode.d1.loss_cls: 0.1153, decode.d1.loss_mask: 0.2313, decode.d1.loss_dice: 0.6483, decode.d2.loss_cls: 0.1133, decode.d2.loss_mask: 0.2300, decode.d2.loss_dice: 0.6450, decode.d3.loss_cls: 0.1116, decode.d3.loss_mask: 0.2302, decode.d3.loss_dice: 0.6384, decode.d4.loss_cls: 0.1041, decode.d4.loss_mask: 0.2299, decode.d4.loss_dice: 0.6326, decode.d5.loss_cls: 0.1060, decode.d5.loss_mask: 0.2304, decode.d5.loss_dice: 0.6379, decode.d6.loss_cls: 0.1021, decode.d6.loss_mask: 0.2299, decode.d6.loss_dice: 0.6354, decode.d7.loss_cls: 0.1022, decode.d7.loss_mask: 0.2302, decode.d7.loss_dice: 0.6367, decode.d8.loss_cls: 0.0952, decode.d8.loss_mask: 0.2292, decode.d8.loss_dice: 0.6372, loss: 10.0138 +2022-05-10 06:52:38,743 - mmseg - INFO - Iter [24850/80000] lr: 9.898e-07, eta: 1 day, 5:46:08, time: 1.803, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0998, decode.loss_mask: 0.2333, decode.loss_dice: 0.6373, decode.d0.loss_cls: 0.3238, decode.d0.loss_mask: 0.2448, decode.d0.loss_dice: 0.6786, decode.d1.loss_cls: 0.1213, decode.d1.loss_mask: 0.2337, decode.d1.loss_dice: 0.6436, decode.d2.loss_cls: 0.1133, decode.d2.loss_mask: 0.2320, decode.d2.loss_dice: 0.6381, decode.d3.loss_cls: 0.1141, decode.d3.loss_mask: 0.2319, decode.d3.loss_dice: 0.6375, decode.d4.loss_cls: 0.1139, decode.d4.loss_mask: 0.2325, decode.d4.loss_dice: 0.6304, decode.d5.loss_cls: 0.1096, decode.d5.loss_mask: 0.2328, decode.d5.loss_dice: 0.6345, decode.d6.loss_cls: 0.1044, decode.d6.loss_mask: 0.2319, decode.d6.loss_dice: 0.6330, decode.d7.loss_cls: 0.0991, decode.d7.loss_mask: 0.2318, decode.d7.loss_dice: 0.6332, decode.d8.loss_cls: 0.1058, decode.d8.loss_mask: 0.2322, decode.d8.loss_dice: 0.6349, loss: 10.0431 +2022-05-10 06:54:07,514 - mmseg - INFO - Iter [24900/80000] lr: 9.889e-07, eta: 1 day, 5:44:12, time: 1.775, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0987, decode.loss_mask: 0.2301, decode.loss_dice: 0.6388, decode.d0.loss_cls: 0.3363, decode.d0.loss_mask: 0.2415, decode.d0.loss_dice: 0.6736, decode.d1.loss_cls: 0.1156, decode.d1.loss_mask: 0.2321, decode.d1.loss_dice: 0.6467, decode.d2.loss_cls: 0.1051, decode.d2.loss_mask: 0.2299, decode.d2.loss_dice: 0.6450, decode.d3.loss_cls: 0.1021, decode.d3.loss_mask: 0.2303, decode.d3.loss_dice: 0.6358, decode.d4.loss_cls: 0.0995, decode.d4.loss_mask: 0.2304, decode.d4.loss_dice: 0.6359, decode.d5.loss_cls: 0.0987, decode.d5.loss_mask: 0.2305, decode.d5.loss_dice: 0.6408, decode.d6.loss_cls: 0.0978, decode.d6.loss_mask: 0.2297, decode.d6.loss_dice: 0.6383, decode.d7.loss_cls: 0.0969, decode.d7.loss_mask: 0.2295, decode.d7.loss_dice: 0.6372, decode.d8.loss_cls: 0.1017, decode.d8.loss_mask: 0.2299, decode.d8.loss_dice: 0.6392, loss: 9.9974 +2022-05-10 06:55:39,685 - mmseg - INFO - Iter [24950/80000] lr: 9.880e-07, eta: 1 day, 5:42:24, time: 1.843, data_time: 0.069, memory: 64699, decode.loss_cls: 0.0964, decode.loss_mask: 0.2316, decode.loss_dice: 0.6336, decode.d0.loss_cls: 0.3343, decode.d0.loss_mask: 0.2441, decode.d0.loss_dice: 0.6739, decode.d1.loss_cls: 0.1207, decode.d1.loss_mask: 0.2343, decode.d1.loss_dice: 0.6475, decode.d2.loss_cls: 0.1098, decode.d2.loss_mask: 0.2338, decode.d2.loss_dice: 0.6392, decode.d3.loss_cls: 0.1015, decode.d3.loss_mask: 0.2324, decode.d3.loss_dice: 0.6336, decode.d4.loss_cls: 0.0996, decode.d4.loss_mask: 0.2319, decode.d4.loss_dice: 0.6321, decode.d5.loss_cls: 0.0955, decode.d5.loss_mask: 0.2321, decode.d5.loss_dice: 0.6370, decode.d6.loss_cls: 0.0893, decode.d6.loss_mask: 0.2325, decode.d6.loss_dice: 0.6339, decode.d7.loss_cls: 0.1003, decode.d7.loss_mask: 0.2322, decode.d7.loss_dice: 0.6355, decode.d8.loss_cls: 0.0953, decode.d8.loss_mask: 0.2320, decode.d8.loss_dice: 0.6360, loss: 9.9820 +2022-05-10 06:57:08,720 - mmseg - INFO - Saving checkpoint at 25000 iterations +2022-05-10 06:57:38,445 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 06:57:38,454 - mmseg - INFO - Iter [25000/80000] lr: 9.871e-07, eta: 1 day, 5:41:34, time: 2.372, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0914, decode.loss_mask: 0.2290, decode.loss_dice: 0.6372, decode.d0.loss_cls: 0.3299, decode.d0.loss_mask: 0.2392, decode.d0.loss_dice: 0.6760, decode.d1.loss_cls: 0.1214, decode.d1.loss_mask: 0.2299, decode.d1.loss_dice: 0.6535, decode.d2.loss_cls: 0.1045, decode.d2.loss_mask: 0.2293, decode.d2.loss_dice: 0.6460, decode.d3.loss_cls: 0.0967, decode.d3.loss_mask: 0.2287, decode.d3.loss_dice: 0.6371, decode.d4.loss_cls: 0.1008, decode.d4.loss_mask: 0.2292, decode.d4.loss_dice: 0.6432, decode.d5.loss_cls: 0.1007, decode.d5.loss_mask: 0.2296, decode.d5.loss_dice: 0.6405, decode.d6.loss_cls: 0.0947, decode.d6.loss_mask: 0.2293, decode.d6.loss_dice: 0.6391, decode.d7.loss_cls: 0.0991, decode.d7.loss_mask: 0.2288, decode.d7.loss_dice: 0.6366, decode.d8.loss_cls: 0.0964, decode.d8.loss_mask: 0.2292, decode.d8.loss_dice: 0.6405, loss: 9.9873 +2022-05-10 06:59:33,591 - mmseg - INFO - per class results: +2022-05-10 06:59:33,595 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.64 | 99.22 | +| sidewalk | 88.79 | 94.36 | +| building | 94.27 | 96.6 | +| wall | 64.38 | 76.92 | +| fence | 74.32 | 83.05 | +| pole | 71.82 | 84.98 | +| traffic light | 77.47 | 87.73 | +| traffic sign | 83.66 | 90.8 | +| vegetation | 93.27 | 97.26 | +| terrain | 66.18 | 75.69 | +| sky | 96.0 | 98.18 | +| person | 86.98 | 93.75 | +| rider | 74.13 | 85.2 | +| car | 96.37 | 98.25 | +| truck | 91.7 | 95.03 | +| bus | 93.43 | 97.13 | +| train | 87.89 | 91.34 | +| motorcycle | 77.33 | 88.2 | +| bicycle | 82.18 | 92.13 | ++---------------+-------+-------+ +2022-05-10 06:59:33,595 - mmseg - INFO - Summary: +2022-05-10 06:59:33,595 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 96.98 | 84.15 | 90.83 | ++-------+-------+-------+ +2022-05-10 06:59:33,599 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 06:59:33,600 - mmseg - INFO - Iter(val) [32] aAcc: 0.9698, mIoU: 0.8415, mAcc: 0.9083, IoU.road: 0.9864, IoU.sidewalk: 0.8879, IoU.building: 0.9427, IoU.wall: 0.6438, IoU.fence: 0.7432, IoU.pole: 0.7182, IoU.traffic light: 0.7747, IoU.traffic sign: 0.8366, IoU.vegetation: 0.9327, IoU.terrain: 0.6618, IoU.sky: 0.9600, IoU.person: 0.8698, IoU.rider: 0.7413, IoU.car: 0.9637, IoU.truck: 0.9170, IoU.bus: 0.9343, IoU.train: 0.8789, IoU.motorcycle: 0.7733, IoU.bicycle: 0.8218, Acc.road: 0.9922, Acc.sidewalk: 0.9436, Acc.building: 0.9660, Acc.wall: 0.7692, Acc.fence: 0.8305, Acc.pole: 0.8498, Acc.traffic light: 0.8773, Acc.traffic sign: 0.9080, Acc.vegetation: 0.9726, Acc.terrain: 0.7569, Acc.sky: 0.9818, Acc.person: 0.9375, Acc.rider: 0.8520, Acc.car: 0.9825, Acc.truck: 0.9503, Acc.bus: 0.9713, Acc.train: 0.9134, Acc.motorcycle: 0.8820, Acc.bicycle: 0.9213 +2022-05-10 07:01:03,873 - mmseg - INFO - Iter [25050/80000] lr: 9.862e-07, eta: 1 day, 5:43:55, time: 4.111, data_time: 2.324, memory: 64699, decode.loss_cls: 0.0835, decode.loss_mask: 0.2248, decode.loss_dice: 0.6255, decode.d0.loss_cls: 0.3109, decode.d0.loss_mask: 0.2324, decode.d0.loss_dice: 0.6689, decode.d1.loss_cls: 0.1086, decode.d1.loss_mask: 0.2264, decode.d1.loss_dice: 0.6361, decode.d2.loss_cls: 0.1025, decode.d2.loss_mask: 0.2260, decode.d2.loss_dice: 0.6315, decode.d3.loss_cls: 0.0925, decode.d3.loss_mask: 0.2251, decode.d3.loss_dice: 0.6329, decode.d4.loss_cls: 0.0923, decode.d4.loss_mask: 0.2255, decode.d4.loss_dice: 0.6275, decode.d5.loss_cls: 0.0921, decode.d5.loss_mask: 0.2247, decode.d5.loss_dice: 0.6287, decode.d6.loss_cls: 0.0934, decode.d6.loss_mask: 0.2249, decode.d6.loss_dice: 0.6240, decode.d7.loss_cls: 0.0885, decode.d7.loss_mask: 0.2250, decode.d7.loss_dice: 0.6210, decode.d8.loss_cls: 0.0890, decode.d8.loss_mask: 0.2246, decode.d8.loss_dice: 0.6267, loss: 9.7357 +2022-05-10 07:02:33,489 - mmseg - INFO - Iter [25100/80000] lr: 9.853e-07, eta: 1 day, 5:42:01, time: 1.792, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1014, decode.loss_mask: 0.2191, decode.loss_dice: 0.6301, decode.d0.loss_cls: 0.3339, decode.d0.loss_mask: 0.2295, decode.d0.loss_dice: 0.6656, decode.d1.loss_cls: 0.1259, decode.d1.loss_mask: 0.2220, decode.d1.loss_dice: 0.6438, decode.d2.loss_cls: 0.1206, decode.d2.loss_mask: 0.2198, decode.d2.loss_dice: 0.6330, decode.d3.loss_cls: 0.1048, decode.d3.loss_mask: 0.2192, decode.d3.loss_dice: 0.6294, decode.d4.loss_cls: 0.1104, decode.d4.loss_mask: 0.2198, decode.d4.loss_dice: 0.6269, decode.d5.loss_cls: 0.1029, decode.d5.loss_mask: 0.2191, decode.d5.loss_dice: 0.6295, decode.d6.loss_cls: 0.1053, decode.d6.loss_mask: 0.2187, decode.d6.loss_dice: 0.6298, decode.d7.loss_cls: 0.1108, decode.d7.loss_mask: 0.2193, decode.d7.loss_dice: 0.6233, decode.d8.loss_cls: 0.1061, decode.d8.loss_mask: 0.2193, decode.d8.loss_dice: 0.6339, loss: 9.8731 +2022-05-10 07:04:05,756 - mmseg - INFO - Iter [25150/80000] lr: 9.844e-07, eta: 1 day, 5:40:12, time: 1.845, data_time: 0.065, memory: 64699, decode.loss_cls: 0.0981, decode.loss_mask: 0.2261, decode.loss_dice: 0.6251, decode.d0.loss_cls: 0.3301, decode.d0.loss_mask: 0.2367, decode.d0.loss_dice: 0.6642, decode.d1.loss_cls: 0.1145, decode.d1.loss_mask: 0.2280, decode.d1.loss_dice: 0.6392, decode.d2.loss_cls: 0.1075, decode.d2.loss_mask: 0.2265, decode.d2.loss_dice: 0.6315, decode.d3.loss_cls: 0.1003, decode.d3.loss_mask: 0.2254, decode.d3.loss_dice: 0.6277, decode.d4.loss_cls: 0.0929, decode.d4.loss_mask: 0.2261, decode.d4.loss_dice: 0.6264, decode.d5.loss_cls: 0.1005, decode.d5.loss_mask: 0.2260, decode.d5.loss_dice: 0.6331, decode.d6.loss_cls: 0.0936, decode.d6.loss_mask: 0.2260, decode.d6.loss_dice: 0.6269, decode.d7.loss_cls: 0.0907, decode.d7.loss_mask: 0.2268, decode.d7.loss_dice: 0.6278, decode.d8.loss_cls: 0.0969, decode.d8.loss_mask: 0.2266, decode.d8.loss_dice: 0.6281, loss: 9.8293 +2022-05-10 07:05:37,042 - mmseg - INFO - Iter [25200/80000] lr: 9.835e-07, eta: 1 day, 5:38:21, time: 1.825, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0929, decode.loss_mask: 0.2358, decode.loss_dice: 0.6339, decode.d0.loss_cls: 0.3205, decode.d0.loss_mask: 0.2447, decode.d0.loss_dice: 0.6690, decode.d1.loss_cls: 0.1159, decode.d1.loss_mask: 0.2371, decode.d1.loss_dice: 0.6475, decode.d2.loss_cls: 0.1050, decode.d2.loss_mask: 0.2364, decode.d2.loss_dice: 0.6417, decode.d3.loss_cls: 0.1013, decode.d3.loss_mask: 0.2364, decode.d3.loss_dice: 0.6322, decode.d4.loss_cls: 0.1040, decode.d4.loss_mask: 0.2353, decode.d4.loss_dice: 0.6341, decode.d5.loss_cls: 0.0983, decode.d5.loss_mask: 0.2356, decode.d5.loss_dice: 0.6375, decode.d6.loss_cls: 0.0930, decode.d6.loss_mask: 0.2360, decode.d6.loss_dice: 0.6337, decode.d7.loss_cls: 0.0955, decode.d7.loss_mask: 0.2349, decode.d7.loss_dice: 0.6304, decode.d8.loss_cls: 0.0967, decode.d8.loss_mask: 0.2360, decode.d8.loss_dice: 0.6350, loss: 9.9863 +2022-05-10 07:07:06,960 - mmseg - INFO - Iter [25250/80000] lr: 9.826e-07, eta: 1 day, 5:36:28, time: 1.800, data_time: 0.021, memory: 64699, decode.loss_cls: 0.0997, decode.loss_mask: 0.2292, decode.loss_dice: 0.6278, decode.d0.loss_cls: 0.3312, decode.d0.loss_mask: 0.2394, decode.d0.loss_dice: 0.6687, decode.d1.loss_cls: 0.1063, decode.d1.loss_mask: 0.2301, decode.d1.loss_dice: 0.6461, decode.d2.loss_cls: 0.1119, decode.d2.loss_mask: 0.2283, decode.d2.loss_dice: 0.6381, decode.d3.loss_cls: 0.1091, decode.d3.loss_mask: 0.2287, decode.d3.loss_dice: 0.6306, decode.d4.loss_cls: 0.0987, decode.d4.loss_mask: 0.2292, decode.d4.loss_dice: 0.6286, decode.d5.loss_cls: 0.1059, decode.d5.loss_mask: 0.2294, decode.d5.loss_dice: 0.6303, decode.d6.loss_cls: 0.1029, decode.d6.loss_mask: 0.2292, decode.d6.loss_dice: 0.6286, decode.d7.loss_cls: 0.1034, decode.d7.loss_mask: 0.2292, decode.d7.loss_dice: 0.6301, decode.d8.loss_cls: 0.1016, decode.d8.loss_mask: 0.2293, decode.d8.loss_dice: 0.6330, loss: 9.9346 +2022-05-10 07:08:39,522 - mmseg - INFO - Iter [25300/80000] lr: 9.817e-07, eta: 1 day, 5:34:40, time: 1.851, data_time: 0.065, memory: 64699, decode.loss_cls: 0.0872, decode.loss_mask: 0.2321, decode.loss_dice: 0.6307, decode.d0.loss_cls: 0.3198, decode.d0.loss_mask: 0.2443, decode.d0.loss_dice: 0.6711, decode.d1.loss_cls: 0.1027, decode.d1.loss_mask: 0.2358, decode.d1.loss_dice: 0.6464, decode.d2.loss_cls: 0.1033, decode.d2.loss_mask: 0.2336, decode.d2.loss_dice: 0.6394, decode.d3.loss_cls: 0.0933, decode.d3.loss_mask: 0.2331, decode.d3.loss_dice: 0.6320, decode.d4.loss_cls: 0.1008, decode.d4.loss_mask: 0.2327, decode.d4.loss_dice: 0.6295, decode.d5.loss_cls: 0.0930, decode.d5.loss_mask: 0.2333, decode.d5.loss_dice: 0.6326, decode.d6.loss_cls: 0.0889, decode.d6.loss_mask: 0.2328, decode.d6.loss_dice: 0.6337, decode.d7.loss_cls: 0.0884, decode.d7.loss_mask: 0.2326, decode.d7.loss_dice: 0.6332, decode.d8.loss_cls: 0.0912, decode.d8.loss_mask: 0.2328, decode.d8.loss_dice: 0.6283, loss: 9.8883 +2022-05-10 07:10:10,482 - mmseg - INFO - Iter [25350/80000] lr: 9.808e-07, eta: 1 day, 5:32:49, time: 1.819, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1019, decode.loss_mask: 0.2406, decode.loss_dice: 0.6371, decode.d0.loss_cls: 0.3330, decode.d0.loss_mask: 0.2512, decode.d0.loss_dice: 0.6793, decode.d1.loss_cls: 0.1199, decode.d1.loss_mask: 0.2428, decode.d1.loss_dice: 0.6504, decode.d2.loss_cls: 0.1077, decode.d2.loss_mask: 0.2417, decode.d2.loss_dice: 0.6466, decode.d3.loss_cls: 0.1018, decode.d3.loss_mask: 0.2411, decode.d3.loss_dice: 0.6430, decode.d4.loss_cls: 0.1044, decode.d4.loss_mask: 0.2412, decode.d4.loss_dice: 0.6358, decode.d5.loss_cls: 0.1004, decode.d5.loss_mask: 0.2406, decode.d5.loss_dice: 0.6389, decode.d6.loss_cls: 0.0996, decode.d6.loss_mask: 0.2402, decode.d6.loss_dice: 0.6400, decode.d7.loss_cls: 0.1006, decode.d7.loss_mask: 0.2401, decode.d7.loss_dice: 0.6400, decode.d8.loss_cls: 0.1025, decode.d8.loss_mask: 0.2405, decode.d8.loss_dice: 0.6397, loss: 10.1425 +2022-05-10 07:11:41,517 - mmseg - INFO - Iter [25400/80000] lr: 9.799e-07, eta: 1 day, 5:30:58, time: 1.821, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1221, decode.loss_mask: 0.2278, decode.loss_dice: 0.6338, decode.d0.loss_cls: 0.3341, decode.d0.loss_mask: 0.2384, decode.d0.loss_dice: 0.6770, decode.d1.loss_cls: 0.1397, decode.d1.loss_mask: 0.2294, decode.d1.loss_dice: 0.6501, decode.d2.loss_cls: 0.1226, decode.d2.loss_mask: 0.2288, decode.d2.loss_dice: 0.6452, decode.d3.loss_cls: 0.1171, decode.d3.loss_mask: 0.2284, decode.d3.loss_dice: 0.6377, decode.d4.loss_cls: 0.1202, decode.d4.loss_mask: 0.2289, decode.d4.loss_dice: 0.6345, decode.d5.loss_cls: 0.1176, decode.d5.loss_mask: 0.2278, decode.d5.loss_dice: 0.6374, decode.d6.loss_cls: 0.1234, decode.d6.loss_mask: 0.2284, decode.d6.loss_dice: 0.6359, decode.d7.loss_cls: 0.1177, decode.d7.loss_mask: 0.2281, decode.d7.loss_dice: 0.6429, decode.d8.loss_cls: 0.1160, decode.d8.loss_mask: 0.2277, decode.d8.loss_dice: 0.6376, loss: 10.1562 +2022-05-10 07:13:11,803 - mmseg - INFO - Iter [25450/80000] lr: 9.791e-07, eta: 1 day, 5:29:06, time: 1.806, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1014, decode.loss_mask: 0.2314, decode.loss_dice: 0.6353, decode.d0.loss_cls: 0.3202, decode.d0.loss_mask: 0.2439, decode.d0.loss_dice: 0.6755, decode.d1.loss_cls: 0.1207, decode.d1.loss_mask: 0.2340, decode.d1.loss_dice: 0.6478, decode.d2.loss_cls: 0.1143, decode.d2.loss_mask: 0.2325, decode.d2.loss_dice: 0.6438, decode.d3.loss_cls: 0.1072, decode.d3.loss_mask: 0.2326, decode.d3.loss_dice: 0.6373, decode.d4.loss_cls: 0.1060, decode.d4.loss_mask: 0.2321, decode.d4.loss_dice: 0.6380, decode.d5.loss_cls: 0.0997, decode.d5.loss_mask: 0.2329, decode.d5.loss_dice: 0.6389, decode.d6.loss_cls: 0.1038, decode.d6.loss_mask: 0.2318, decode.d6.loss_dice: 0.6391, decode.d7.loss_cls: 0.1055, decode.d7.loss_mask: 0.2324, decode.d7.loss_dice: 0.6371, decode.d8.loss_cls: 0.0976, decode.d8.loss_mask: 0.2316, decode.d8.loss_dice: 0.6389, loss: 10.0433 +2022-05-10 07:14:44,568 - mmseg - INFO - Iter [25500/80000] lr: 9.782e-07, eta: 1 day, 5:27:19, time: 1.855, data_time: 0.065, memory: 64699, decode.loss_cls: 0.0992, decode.loss_mask: 0.2265, decode.loss_dice: 0.6163, decode.d0.loss_cls: 0.3301, decode.d0.loss_mask: 0.2362, decode.d0.loss_dice: 0.6574, decode.d1.loss_cls: 0.1101, decode.d1.loss_mask: 0.2286, decode.d1.loss_dice: 0.6323, decode.d2.loss_cls: 0.1021, decode.d2.loss_mask: 0.2277, decode.d2.loss_dice: 0.6268, decode.d3.loss_cls: 0.1012, decode.d3.loss_mask: 0.2269, decode.d3.loss_dice: 0.6189, decode.d4.loss_cls: 0.1020, decode.d4.loss_mask: 0.2272, decode.d4.loss_dice: 0.6180, decode.d5.loss_cls: 0.0987, decode.d5.loss_mask: 0.2266, decode.d5.loss_dice: 0.6195, decode.d6.loss_cls: 0.0928, decode.d6.loss_mask: 0.2267, decode.d6.loss_dice: 0.6195, decode.d7.loss_cls: 0.0945, decode.d7.loss_mask: 0.2264, decode.d7.loss_dice: 0.6179, decode.d8.loss_cls: 0.0965, decode.d8.loss_mask: 0.2261, decode.d8.loss_dice: 0.6182, loss: 9.7509 +2022-05-10 07:16:16,161 - mmseg - INFO - Iter [25550/80000] lr: 9.773e-07, eta: 1 day, 5:25:30, time: 1.832, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0903, decode.loss_mask: 0.2341, decode.loss_dice: 0.6284, decode.d0.loss_cls: 0.3304, decode.d0.loss_mask: 0.2470, decode.d0.loss_dice: 0.6607, decode.d1.loss_cls: 0.1080, decode.d1.loss_mask: 0.2363, decode.d1.loss_dice: 0.6362, decode.d2.loss_cls: 0.1024, decode.d2.loss_mask: 0.2344, decode.d2.loss_dice: 0.6367, decode.d3.loss_cls: 0.0935, decode.d3.loss_mask: 0.2344, decode.d3.loss_dice: 0.6265, decode.d4.loss_cls: 0.0896, decode.d4.loss_mask: 0.2340, decode.d4.loss_dice: 0.6290, decode.d5.loss_cls: 0.0913, decode.d5.loss_mask: 0.2344, decode.d5.loss_dice: 0.6288, decode.d6.loss_cls: 0.0903, decode.d6.loss_mask: 0.2340, decode.d6.loss_dice: 0.6236, decode.d7.loss_cls: 0.0898, decode.d7.loss_mask: 0.2341, decode.d7.loss_dice: 0.6267, decode.d8.loss_cls: 0.0969, decode.d8.loss_mask: 0.2342, decode.d8.loss_dice: 0.6251, loss: 9.8610 +2022-05-10 07:17:46,645 - mmseg - INFO - Iter [25600/80000] lr: 9.764e-07, eta: 1 day, 5:23:38, time: 1.809, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0941, decode.loss_mask: 0.2229, decode.loss_dice: 0.6343, decode.d0.loss_cls: 0.3108, decode.d0.loss_mask: 0.2337, decode.d0.loss_dice: 0.6680, decode.d1.loss_cls: 0.1096, decode.d1.loss_mask: 0.2249, decode.d1.loss_dice: 0.6425, decode.d2.loss_cls: 0.1087, decode.d2.loss_mask: 0.2232, decode.d2.loss_dice: 0.6365, decode.d3.loss_cls: 0.0938, decode.d3.loss_mask: 0.2234, decode.d3.loss_dice: 0.6324, decode.d4.loss_cls: 0.1028, decode.d4.loss_mask: 0.2242, decode.d4.loss_dice: 0.6310, decode.d5.loss_cls: 0.0986, decode.d5.loss_mask: 0.2230, decode.d5.loss_dice: 0.6333, decode.d6.loss_cls: 0.0944, decode.d6.loss_mask: 0.2228, decode.d6.loss_dice: 0.6332, decode.d7.loss_cls: 0.0870, decode.d7.loss_mask: 0.2228, decode.d7.loss_dice: 0.6341, decode.d8.loss_cls: 0.0915, decode.d8.loss_mask: 0.2237, decode.d8.loss_dice: 0.6327, loss: 9.8139 +2022-05-10 07:19:17,375 - mmseg - INFO - Iter [25650/80000] lr: 9.755e-07, eta: 1 day, 5:21:47, time: 1.815, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0954, decode.loss_mask: 0.2265, decode.loss_dice: 0.6054, decode.d0.loss_cls: 0.3212, decode.d0.loss_mask: 0.2392, decode.d0.loss_dice: 0.6495, decode.d1.loss_cls: 0.1207, decode.d1.loss_mask: 0.2302, decode.d1.loss_dice: 0.6228, decode.d2.loss_cls: 0.1095, decode.d2.loss_mask: 0.2297, decode.d2.loss_dice: 0.6192, decode.d3.loss_cls: 0.0941, decode.d3.loss_mask: 0.2281, decode.d3.loss_dice: 0.6106, decode.d4.loss_cls: 0.0924, decode.d4.loss_mask: 0.2274, decode.d4.loss_dice: 0.6151, decode.d5.loss_cls: 0.0994, decode.d5.loss_mask: 0.2276, decode.d5.loss_dice: 0.6132, decode.d6.loss_cls: 0.0902, decode.d6.loss_mask: 0.2268, decode.d6.loss_dice: 0.6108, decode.d7.loss_cls: 0.0959, decode.d7.loss_mask: 0.2270, decode.d7.loss_dice: 0.6130, decode.d8.loss_cls: 0.0941, decode.d8.loss_mask: 0.2266, decode.d8.loss_dice: 0.6105, loss: 9.6725 +2022-05-10 07:20:48,803 - mmseg - INFO - Iter [25700/80000] lr: 9.746e-07, eta: 1 day, 5:19:57, time: 1.826, data_time: 0.067, memory: 64699, decode.loss_cls: 0.0993, decode.loss_mask: 0.2283, decode.loss_dice: 0.6233, decode.d0.loss_cls: 0.3249, decode.d0.loss_mask: 0.2394, decode.d0.loss_dice: 0.6629, decode.d1.loss_cls: 0.1223, decode.d1.loss_mask: 0.2312, decode.d1.loss_dice: 0.6322, decode.d2.loss_cls: 0.1109, decode.d2.loss_mask: 0.2298, decode.d2.loss_dice: 0.6259, decode.d3.loss_cls: 0.1114, decode.d3.loss_mask: 0.2283, decode.d3.loss_dice: 0.6236, decode.d4.loss_cls: 0.1122, decode.d4.loss_mask: 0.2281, decode.d4.loss_dice: 0.6224, decode.d5.loss_cls: 0.1107, decode.d5.loss_mask: 0.2299, decode.d5.loss_dice: 0.6237, decode.d6.loss_cls: 0.1020, decode.d6.loss_mask: 0.2276, decode.d6.loss_dice: 0.6192, decode.d7.loss_cls: 0.1062, decode.d7.loss_mask: 0.2276, decode.d7.loss_dice: 0.6203, decode.d8.loss_cls: 0.1069, decode.d8.loss_mask: 0.2282, decode.d8.loss_dice: 0.6219, loss: 9.8805 +2022-05-10 07:22:19,794 - mmseg - INFO - Iter [25750/80000] lr: 9.737e-07, eta: 1 day, 5:18:07, time: 1.823, data_time: 0.021, memory: 64699, decode.loss_cls: 0.0926, decode.loss_mask: 0.2329, decode.loss_dice: 0.6225, decode.d0.loss_cls: 0.3267, decode.d0.loss_mask: 0.2445, decode.d0.loss_dice: 0.6655, decode.d1.loss_cls: 0.1125, decode.d1.loss_mask: 0.2353, decode.d1.loss_dice: 0.6334, decode.d2.loss_cls: 0.1042, decode.d2.loss_mask: 0.2345, decode.d2.loss_dice: 0.6314, decode.d3.loss_cls: 0.0917, decode.d3.loss_mask: 0.2334, decode.d3.loss_dice: 0.6218, decode.d4.loss_cls: 0.0962, decode.d4.loss_mask: 0.2338, decode.d4.loss_dice: 0.6243, decode.d5.loss_cls: 0.0959, decode.d5.loss_mask: 0.2340, decode.d5.loss_dice: 0.6220, decode.d6.loss_cls: 0.0964, decode.d6.loss_mask: 0.2334, decode.d6.loss_dice: 0.6251, decode.d7.loss_cls: 0.0987, decode.d7.loss_mask: 0.2338, decode.d7.loss_dice: 0.6237, decode.d8.loss_cls: 0.0989, decode.d8.loss_mask: 0.2326, decode.d8.loss_dice: 0.6202, loss: 9.8521 +2022-05-10 07:23:49,858 - mmseg - INFO - Iter [25800/80000] lr: 9.728e-07, eta: 1 day, 5:16:15, time: 1.801, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0817, decode.loss_mask: 0.2246, decode.loss_dice: 0.6273, decode.d0.loss_cls: 0.3096, decode.d0.loss_mask: 0.2344, decode.d0.loss_dice: 0.6624, decode.d1.loss_cls: 0.1054, decode.d1.loss_mask: 0.2265, decode.d1.loss_dice: 0.6358, decode.d2.loss_cls: 0.1025, decode.d2.loss_mask: 0.2257, decode.d2.loss_dice: 0.6323, decode.d3.loss_cls: 0.0861, decode.d3.loss_mask: 0.2247, decode.d3.loss_dice: 0.6274, decode.d4.loss_cls: 0.0804, decode.d4.loss_mask: 0.2247, decode.d4.loss_dice: 0.6302, decode.d5.loss_cls: 0.0903, decode.d5.loss_mask: 0.2245, decode.d5.loss_dice: 0.6272, decode.d6.loss_cls: 0.0903, decode.d6.loss_mask: 0.2249, decode.d6.loss_dice: 0.6265, decode.d7.loss_cls: 0.0855, decode.d7.loss_mask: 0.2253, decode.d7.loss_dice: 0.6271, decode.d8.loss_cls: 0.0832, decode.d8.loss_mask: 0.2251, decode.d8.loss_dice: 0.6228, loss: 9.6942 +2022-05-10 07:25:20,500 - mmseg - INFO - Iter [25850/80000] lr: 9.719e-07, eta: 1 day, 5:14:24, time: 1.813, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0945, decode.loss_mask: 0.2288, decode.loss_dice: 0.6204, decode.d0.loss_cls: 0.3278, decode.d0.loss_mask: 0.2401, decode.d0.loss_dice: 0.6633, decode.d1.loss_cls: 0.1093, decode.d1.loss_mask: 0.2312, decode.d1.loss_dice: 0.6370, decode.d2.loss_cls: 0.1013, decode.d2.loss_mask: 0.2296, decode.d2.loss_dice: 0.6276, decode.d3.loss_cls: 0.0984, decode.d3.loss_mask: 0.2298, decode.d3.loss_dice: 0.6221, decode.d4.loss_cls: 0.0983, decode.d4.loss_mask: 0.2294, decode.d4.loss_dice: 0.6245, decode.d5.loss_cls: 0.0930, decode.d5.loss_mask: 0.2294, decode.d5.loss_dice: 0.6246, decode.d6.loss_cls: 0.0928, decode.d6.loss_mask: 0.2292, decode.d6.loss_dice: 0.6233, decode.d7.loss_cls: 0.0959, decode.d7.loss_mask: 0.2293, decode.d7.loss_dice: 0.6214, decode.d8.loss_cls: 0.0957, decode.d8.loss_mask: 0.2290, decode.d8.loss_dice: 0.6197, loss: 9.7967 +2022-05-10 07:26:52,998 - mmseg - INFO - Iter [25900/80000] lr: 9.710e-07, eta: 1 day, 5:12:37, time: 1.850, data_time: 0.064, memory: 64699, decode.loss_cls: 0.0825, decode.loss_mask: 0.2286, decode.loss_dice: 0.6140, decode.d0.loss_cls: 0.3175, decode.d0.loss_mask: 0.2412, decode.d0.loss_dice: 0.6515, decode.d1.loss_cls: 0.1064, decode.d1.loss_mask: 0.2309, decode.d1.loss_dice: 0.6271, decode.d2.loss_cls: 0.0914, decode.d2.loss_mask: 0.2293, decode.d2.loss_dice: 0.6219, decode.d3.loss_cls: 0.0891, decode.d3.loss_mask: 0.2301, decode.d3.loss_dice: 0.6116, decode.d4.loss_cls: 0.0881, decode.d4.loss_mask: 0.2290, decode.d4.loss_dice: 0.6118, decode.d5.loss_cls: 0.0917, decode.d5.loss_mask: 0.2289, decode.d5.loss_dice: 0.6136, decode.d6.loss_cls: 0.0787, decode.d6.loss_mask: 0.2286, decode.d6.loss_dice: 0.6113, decode.d7.loss_cls: 0.0880, decode.d7.loss_mask: 0.2291, decode.d7.loss_dice: 0.6132, decode.d8.loss_cls: 0.0828, decode.d8.loss_mask: 0.2288, decode.d8.loss_dice: 0.6121, loss: 9.6086 +2022-05-10 07:28:22,725 - mmseg - INFO - Iter [25950/80000] lr: 9.701e-07, eta: 1 day, 5:10:44, time: 1.795, data_time: 0.017, memory: 64699, decode.loss_cls: 0.1019, decode.loss_mask: 0.2277, decode.loss_dice: 0.6158, decode.d0.loss_cls: 0.3314, decode.d0.loss_mask: 0.2416, decode.d0.loss_dice: 0.6582, decode.d1.loss_cls: 0.1234, decode.d1.loss_mask: 0.2313, decode.d1.loss_dice: 0.6335, decode.d2.loss_cls: 0.1116, decode.d2.loss_mask: 0.2296, decode.d2.loss_dice: 0.6270, decode.d3.loss_cls: 0.0971, decode.d3.loss_mask: 0.2290, decode.d3.loss_dice: 0.6225, decode.d4.loss_cls: 0.0953, decode.d4.loss_mask: 0.2285, decode.d4.loss_dice: 0.6196, decode.d5.loss_cls: 0.1064, decode.d5.loss_mask: 0.2283, decode.d5.loss_dice: 0.6209, decode.d6.loss_cls: 0.1043, decode.d6.loss_mask: 0.2285, decode.d6.loss_dice: 0.6200, decode.d7.loss_cls: 0.1103, decode.d7.loss_mask: 0.2288, decode.d7.loss_dice: 0.6175, decode.d8.loss_cls: 0.1056, decode.d8.loss_mask: 0.2283, decode.d8.loss_dice: 0.6213, loss: 9.8456 +2022-05-10 07:29:53,025 - mmseg - INFO - Saving checkpoint at 26000 iterations +2022-05-10 07:30:26,249 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 07:30:26,257 - mmseg - INFO - Iter [26000/80000] lr: 9.692e-07, eta: 1 day, 5:10:01, time: 2.468, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0844, decode.loss_mask: 0.2236, decode.loss_dice: 0.6233, decode.d0.loss_cls: 0.3108, decode.d0.loss_mask: 0.2343, decode.d0.loss_dice: 0.6639, decode.d1.loss_cls: 0.0965, decode.d1.loss_mask: 0.2263, decode.d1.loss_dice: 0.6410, decode.d2.loss_cls: 0.0955, decode.d2.loss_mask: 0.2255, decode.d2.loss_dice: 0.6357, decode.d3.loss_cls: 0.0881, decode.d3.loss_mask: 0.2243, decode.d3.loss_dice: 0.6313, decode.d4.loss_cls: 0.0912, decode.d4.loss_mask: 0.2238, decode.d4.loss_dice: 0.6241, decode.d5.loss_cls: 0.0874, decode.d5.loss_mask: 0.2240, decode.d5.loss_dice: 0.6342, decode.d6.loss_cls: 0.0842, decode.d6.loss_mask: 0.2232, decode.d6.loss_dice: 0.6286, decode.d7.loss_cls: 0.0831, decode.d7.loss_mask: 0.2228, decode.d7.loss_dice: 0.6310, decode.d8.loss_cls: 0.0846, decode.d8.loss_mask: 0.2234, decode.d8.loss_dice: 0.6307, loss: 9.7007 +2022-05-10 07:32:21,688 - mmseg - INFO - per class results: +2022-05-10 07:32:21,700 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.64 | 99.19 | +| sidewalk | 88.8 | 94.26 | +| building | 94.42 | 96.87 | +| wall | 66.87 | 81.46 | +| fence | 74.39 | 83.45 | +| pole | 71.44 | 82.83 | +| traffic light | 76.84 | 89.63 | +| traffic sign | 83.78 | 90.96 | +| vegetation | 93.43 | 97.01 | +| terrain | 69.08 | 80.95 | +| sky | 95.9 | 98.47 | +| person | 86.93 | 93.28 | +| rider | 74.24 | 86.39 | +| car | 96.26 | 98.31 | +| truck | 92.09 | 95.27 | +| bus | 93.21 | 96.85 | +| train | 87.41 | 90.13 | +| motorcycle | 76.59 | 87.19 | +| bicycle | 83.08 | 91.08 | ++---------------+-------+-------+ +2022-05-10 07:32:21,701 - mmseg - INFO - Summary: +2022-05-10 07:32:21,701 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 97.03 | 84.39 | 91.24 | ++-------+-------+-------+ +2022-05-10 07:32:21,705 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 07:32:21,705 - mmseg - INFO - Iter(val) [32] aAcc: 0.9703, mIoU: 0.8439, mAcc: 0.9124, IoU.road: 0.9864, IoU.sidewalk: 0.8880, IoU.building: 0.9442, IoU.wall: 0.6687, IoU.fence: 0.7439, IoU.pole: 0.7144, IoU.traffic light: 0.7684, IoU.traffic sign: 0.8378, IoU.vegetation: 0.9343, IoU.terrain: 0.6908, IoU.sky: 0.9590, IoU.person: 0.8693, IoU.rider: 0.7424, IoU.car: 0.9626, IoU.truck: 0.9209, IoU.bus: 0.9321, IoU.train: 0.8741, IoU.motorcycle: 0.7659, IoU.bicycle: 0.8308, Acc.road: 0.9919, Acc.sidewalk: 0.9426, Acc.building: 0.9687, Acc.wall: 0.8146, Acc.fence: 0.8345, Acc.pole: 0.8283, Acc.traffic light: 0.8963, Acc.traffic sign: 0.9096, Acc.vegetation: 0.9701, Acc.terrain: 0.8095, Acc.sky: 0.9847, Acc.person: 0.9328, Acc.rider: 0.8639, Acc.car: 0.9831, Acc.truck: 0.9527, Acc.bus: 0.9685, Acc.train: 0.9013, Acc.motorcycle: 0.8719, Acc.bicycle: 0.9108 +2022-05-10 07:33:54,516 - mmseg - INFO - Iter [26050/80000] lr: 9.683e-07, eta: 1 day, 5:12:14, time: 4.168, data_time: 2.375, memory: 64699, decode.loss_cls: 0.0963, decode.loss_mask: 0.2227, decode.loss_dice: 0.6159, decode.d0.loss_cls: 0.3425, decode.d0.loss_mask: 0.2333, decode.d0.loss_dice: 0.6561, decode.d1.loss_cls: 0.1196, decode.d1.loss_mask: 0.2246, decode.d1.loss_dice: 0.6270, decode.d2.loss_cls: 0.1123, decode.d2.loss_mask: 0.2227, decode.d2.loss_dice: 0.6233, decode.d3.loss_cls: 0.1043, decode.d3.loss_mask: 0.2229, decode.d3.loss_dice: 0.6119, decode.d4.loss_cls: 0.0998, decode.d4.loss_mask: 0.2224, decode.d4.loss_dice: 0.6159, decode.d5.loss_cls: 0.1058, decode.d5.loss_mask: 0.2219, decode.d5.loss_dice: 0.6148, decode.d6.loss_cls: 0.1005, decode.d6.loss_mask: 0.2218, decode.d6.loss_dice: 0.6109, decode.d7.loss_cls: 0.1033, decode.d7.loss_mask: 0.2224, decode.d7.loss_dice: 0.6164, decode.d8.loss_cls: 0.1019, decode.d8.loss_mask: 0.2217, decode.d8.loss_dice: 0.6160, loss: 9.7308 +2022-05-10 07:35:25,324 - mmseg - INFO - Iter [26100/80000] lr: 9.674e-07, eta: 1 day, 5:10:23, time: 1.816, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1018, decode.loss_mask: 0.2263, decode.loss_dice: 0.6389, decode.d0.loss_cls: 0.3375, decode.d0.loss_mask: 0.2391, decode.d0.loss_dice: 0.6738, decode.d1.loss_cls: 0.1235, decode.d1.loss_mask: 0.2287, decode.d1.loss_dice: 0.6548, decode.d2.loss_cls: 0.1211, decode.d2.loss_mask: 0.2271, decode.d2.loss_dice: 0.6511, decode.d3.loss_cls: 0.1039, decode.d3.loss_mask: 0.2272, decode.d3.loss_dice: 0.6404, decode.d4.loss_cls: 0.1070, decode.d4.loss_mask: 0.2265, decode.d4.loss_dice: 0.6403, decode.d5.loss_cls: 0.1030, decode.d5.loss_mask: 0.2270, decode.d5.loss_dice: 0.6411, decode.d6.loss_cls: 0.1071, decode.d6.loss_mask: 0.2271, decode.d6.loss_dice: 0.6365, decode.d7.loss_cls: 0.1081, decode.d7.loss_mask: 0.2273, decode.d7.loss_dice: 0.6402, decode.d8.loss_cls: 0.1061, decode.d8.loss_mask: 0.2267, decode.d8.loss_dice: 0.6366, loss: 10.0559 +2022-05-10 07:36:54,878 - mmseg - INFO - Iter [26150/80000] lr: 9.665e-07, eta: 1 day, 5:08:30, time: 1.791, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0914, decode.loss_mask: 0.2261, decode.loss_dice: 0.6183, decode.d0.loss_cls: 0.3203, decode.d0.loss_mask: 0.2378, decode.d0.loss_dice: 0.6561, decode.d1.loss_cls: 0.1076, decode.d1.loss_mask: 0.2287, decode.d1.loss_dice: 0.6328, decode.d2.loss_cls: 0.1030, decode.d2.loss_mask: 0.2281, decode.d2.loss_dice: 0.6244, decode.d3.loss_cls: 0.0938, decode.d3.loss_mask: 0.2275, decode.d3.loss_dice: 0.6232, decode.d4.loss_cls: 0.0910, decode.d4.loss_mask: 0.2266, decode.d4.loss_dice: 0.6196, decode.d5.loss_cls: 0.0938, decode.d5.loss_mask: 0.2272, decode.d5.loss_dice: 0.6195, decode.d6.loss_cls: 0.0873, decode.d6.loss_mask: 0.2263, decode.d6.loss_dice: 0.6200, decode.d7.loss_cls: 0.0883, decode.d7.loss_mask: 0.2269, decode.d7.loss_dice: 0.6177, decode.d8.loss_cls: 0.0937, decode.d8.loss_mask: 0.2269, decode.d8.loss_dice: 0.6208, loss: 9.7048 +2022-05-10 07:38:25,064 - mmseg - INFO - Iter [26200/80000] lr: 9.656e-07, eta: 1 day, 5:06:37, time: 1.804, data_time: 0.020, memory: 64699, decode.loss_cls: 0.1072, decode.loss_mask: 0.2316, decode.loss_dice: 0.6312, decode.d0.loss_cls: 0.3477, decode.d0.loss_mask: 0.2445, decode.d0.loss_dice: 0.6753, decode.d1.loss_cls: 0.1253, decode.d1.loss_mask: 0.2344, decode.d1.loss_dice: 0.6504, decode.d2.loss_cls: 0.1194, decode.d2.loss_mask: 0.2327, decode.d2.loss_dice: 0.6407, decode.d3.loss_cls: 0.1109, decode.d3.loss_mask: 0.2328, decode.d3.loss_dice: 0.6357, decode.d4.loss_cls: 0.1169, decode.d4.loss_mask: 0.2327, decode.d4.loss_dice: 0.6308, decode.d5.loss_cls: 0.1138, decode.d5.loss_mask: 0.2327, decode.d5.loss_dice: 0.6345, decode.d6.loss_cls: 0.1120, decode.d6.loss_mask: 0.2316, decode.d6.loss_dice: 0.6314, decode.d7.loss_cls: 0.1110, decode.d7.loss_mask: 0.2312, decode.d7.loss_dice: 0.6333, decode.d8.loss_cls: 0.1010, decode.d8.loss_mask: 0.2314, decode.d8.loss_dice: 0.6351, loss: 10.0993 +2022-05-10 07:39:56,348 - mmseg - INFO - Iter [26250/80000] lr: 9.647e-07, eta: 1 day, 5:04:47, time: 1.826, data_time: 0.064, memory: 64699, decode.loss_cls: 0.0878, decode.loss_mask: 0.2277, decode.loss_dice: 0.6246, decode.d0.loss_cls: 0.3300, decode.d0.loss_mask: 0.2373, decode.d0.loss_dice: 0.6613, decode.d1.loss_cls: 0.1137, decode.d1.loss_mask: 0.2293, decode.d1.loss_dice: 0.6297, decode.d2.loss_cls: 0.1080, decode.d2.loss_mask: 0.2282, decode.d2.loss_dice: 0.6281, decode.d3.loss_cls: 0.0995, decode.d3.loss_mask: 0.2280, decode.d3.loss_dice: 0.6207, decode.d4.loss_cls: 0.0983, decode.d4.loss_mask: 0.2275, decode.d4.loss_dice: 0.6218, decode.d5.loss_cls: 0.0970, decode.d5.loss_mask: 0.2274, decode.d5.loss_dice: 0.6244, decode.d6.loss_cls: 0.0974, decode.d6.loss_mask: 0.2280, decode.d6.loss_dice: 0.6206, decode.d7.loss_cls: 0.0962, decode.d7.loss_mask: 0.2277, decode.d7.loss_dice: 0.6258, decode.d8.loss_cls: 0.1022, decode.d8.loss_mask: 0.2275, decode.d8.loss_dice: 0.6208, loss: 9.7966 +2022-05-10 07:41:26,226 - mmseg - INFO - Iter [26300/80000] lr: 9.638e-07, eta: 1 day, 5:02:55, time: 1.798, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1010, decode.loss_mask: 0.2266, decode.loss_dice: 0.6211, decode.d0.loss_cls: 0.3258, decode.d0.loss_mask: 0.2362, decode.d0.loss_dice: 0.6556, decode.d1.loss_cls: 0.1296, decode.d1.loss_mask: 0.2277, decode.d1.loss_dice: 0.6316, decode.d2.loss_cls: 0.1123, decode.d2.loss_mask: 0.2269, decode.d2.loss_dice: 0.6239, decode.d3.loss_cls: 0.1100, decode.d3.loss_mask: 0.2259, decode.d3.loss_dice: 0.6225, decode.d4.loss_cls: 0.1092, decode.d4.loss_mask: 0.2257, decode.d4.loss_dice: 0.6171, decode.d5.loss_cls: 0.1042, decode.d5.loss_mask: 0.2256, decode.d5.loss_dice: 0.6212, decode.d6.loss_cls: 0.1025, decode.d6.loss_mask: 0.2263, decode.d6.loss_dice: 0.6199, decode.d7.loss_cls: 0.1073, decode.d7.loss_mask: 0.2265, decode.d7.loss_dice: 0.6211, decode.d8.loss_cls: 0.1004, decode.d8.loss_mask: 0.2261, decode.d8.loss_dice: 0.6145, loss: 9.8243 +2022-05-10 07:42:56,657 - mmseg - INFO - Iter [26350/80000] lr: 9.629e-07, eta: 1 day, 5:01:03, time: 1.808, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0940, decode.loss_mask: 0.2342, decode.loss_dice: 0.6162, decode.d0.loss_cls: 0.3252, decode.d0.loss_mask: 0.2446, decode.d0.loss_dice: 0.6477, decode.d1.loss_cls: 0.1148, decode.d1.loss_mask: 0.2376, decode.d1.loss_dice: 0.6237, decode.d2.loss_cls: 0.1077, decode.d2.loss_mask: 0.2355, decode.d2.loss_dice: 0.6222, decode.d3.loss_cls: 0.1022, decode.d3.loss_mask: 0.2341, decode.d3.loss_dice: 0.6187, decode.d4.loss_cls: 0.1049, decode.d4.loss_mask: 0.2349, decode.d4.loss_dice: 0.6168, decode.d5.loss_cls: 0.0986, decode.d5.loss_mask: 0.2344, decode.d5.loss_dice: 0.6201, decode.d6.loss_cls: 0.0951, decode.d6.loss_mask: 0.2343, decode.d6.loss_dice: 0.6166, decode.d7.loss_cls: 0.0976, decode.d7.loss_mask: 0.2342, decode.d7.loss_dice: 0.6164, decode.d8.loss_cls: 0.0964, decode.d8.loss_mask: 0.2341, decode.d8.loss_dice: 0.6160, loss: 9.8090 +2022-05-10 07:44:27,250 - mmseg - INFO - Iter [26400/80000] lr: 9.620e-07, eta: 1 day, 4:59:12, time: 1.812, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0944, decode.loss_mask: 0.2251, decode.loss_dice: 0.6289, decode.d0.loss_cls: 0.3228, decode.d0.loss_mask: 0.2348, decode.d0.loss_dice: 0.6638, decode.d1.loss_cls: 0.1181, decode.d1.loss_mask: 0.2281, decode.d1.loss_dice: 0.6450, decode.d2.loss_cls: 0.1004, decode.d2.loss_mask: 0.2263, decode.d2.loss_dice: 0.6362, decode.d3.loss_cls: 0.0958, decode.d3.loss_mask: 0.2257, decode.d3.loss_dice: 0.6317, decode.d4.loss_cls: 0.0955, decode.d4.loss_mask: 0.2260, decode.d4.loss_dice: 0.6323, decode.d5.loss_cls: 0.0969, decode.d5.loss_mask: 0.2251, decode.d5.loss_dice: 0.6285, decode.d6.loss_cls: 0.0899, decode.d6.loss_mask: 0.2249, decode.d6.loss_dice: 0.6257, decode.d7.loss_cls: 0.0900, decode.d7.loss_mask: 0.2252, decode.d7.loss_dice: 0.6239, decode.d8.loss_cls: 0.0929, decode.d8.loss_mask: 0.2253, decode.d8.loss_dice: 0.6297, loss: 9.8089 +2022-05-10 07:46:00,577 - mmseg - INFO - Iter [26450/80000] lr: 9.611e-07, eta: 1 day, 4:57:27, time: 1.866, data_time: 0.064, memory: 64699, decode.loss_cls: 0.1053, decode.loss_mask: 0.2308, decode.loss_dice: 0.6184, decode.d0.loss_cls: 0.3374, decode.d0.loss_mask: 0.2462, decode.d0.loss_dice: 0.6670, decode.d1.loss_cls: 0.1233, decode.d1.loss_mask: 0.2327, decode.d1.loss_dice: 0.6353, decode.d2.loss_cls: 0.1173, decode.d2.loss_mask: 0.2322, decode.d2.loss_dice: 0.6304, decode.d3.loss_cls: 0.1091, decode.d3.loss_mask: 0.2308, decode.d3.loss_dice: 0.6287, decode.d4.loss_cls: 0.1099, decode.d4.loss_mask: 0.2308, decode.d4.loss_dice: 0.6231, decode.d5.loss_cls: 0.1044, decode.d5.loss_mask: 0.2303, decode.d5.loss_dice: 0.6252, decode.d6.loss_cls: 0.1041, decode.d6.loss_mask: 0.2308, decode.d6.loss_dice: 0.6227, decode.d7.loss_cls: 0.1044, decode.d7.loss_mask: 0.2300, decode.d7.loss_dice: 0.6196, decode.d8.loss_cls: 0.1001, decode.d8.loss_mask: 0.2311, decode.d8.loss_dice: 0.6231, loss: 9.9347 +2022-05-10 07:47:30,052 - mmseg - INFO - Iter [26500/80000] lr: 9.602e-07, eta: 1 day, 4:55:33, time: 1.790, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0996, decode.loss_mask: 0.2264, decode.loss_dice: 0.6025, decode.d0.loss_cls: 0.3265, decode.d0.loss_mask: 0.2394, decode.d0.loss_dice: 0.6482, decode.d1.loss_cls: 0.1162, decode.d1.loss_mask: 0.2280, decode.d1.loss_dice: 0.6190, decode.d2.loss_cls: 0.1054, decode.d2.loss_mask: 0.2271, decode.d2.loss_dice: 0.6132, decode.d3.loss_cls: 0.0998, decode.d3.loss_mask: 0.2267, decode.d3.loss_dice: 0.6049, decode.d4.loss_cls: 0.0976, decode.d4.loss_mask: 0.2268, decode.d4.loss_dice: 0.6050, decode.d5.loss_cls: 0.1003, decode.d5.loss_mask: 0.2271, decode.d5.loss_dice: 0.6078, decode.d6.loss_cls: 0.0977, decode.d6.loss_mask: 0.2267, decode.d6.loss_dice: 0.6069, decode.d7.loss_cls: 0.0983, decode.d7.loss_mask: 0.2267, decode.d7.loss_dice: 0.6053, decode.d8.loss_cls: 0.0958, decode.d8.loss_mask: 0.2268, decode.d8.loss_dice: 0.6050, loss: 9.6367 +2022-05-10 07:49:00,394 - mmseg - INFO - Iter [26550/80000] lr: 9.593e-07, eta: 1 day, 4:53:42, time: 1.807, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0991, decode.loss_mask: 0.2248, decode.loss_dice: 0.6300, decode.d0.loss_cls: 0.3317, decode.d0.loss_mask: 0.2367, decode.d0.loss_dice: 0.6762, decode.d1.loss_cls: 0.1342, decode.d1.loss_mask: 0.2282, decode.d1.loss_dice: 0.6431, decode.d2.loss_cls: 0.1154, decode.d2.loss_mask: 0.2255, decode.d2.loss_dice: 0.6388, decode.d3.loss_cls: 0.1031, decode.d3.loss_mask: 0.2257, decode.d3.loss_dice: 0.6308, decode.d4.loss_cls: 0.1052, decode.d4.loss_mask: 0.2252, decode.d4.loss_dice: 0.6286, decode.d5.loss_cls: 0.1063, decode.d5.loss_mask: 0.2253, decode.d5.loss_dice: 0.6297, decode.d6.loss_cls: 0.1014, decode.d6.loss_mask: 0.2248, decode.d6.loss_dice: 0.6284, decode.d7.loss_cls: 0.1040, decode.d7.loss_mask: 0.2248, decode.d7.loss_dice: 0.6306, decode.d8.loss_cls: 0.1000, decode.d8.loss_mask: 0.2253, decode.d8.loss_dice: 0.6327, loss: 9.9358 +2022-05-10 07:50:34,213 - mmseg - INFO - Iter [26600/80000] lr: 9.584e-07, eta: 1 day, 4:51:58, time: 1.876, data_time: 0.068, memory: 64699, decode.loss_cls: 0.0937, decode.loss_mask: 0.2220, decode.loss_dice: 0.6205, decode.d0.loss_cls: 0.3372, decode.d0.loss_mask: 0.2325, decode.d0.loss_dice: 0.6579, decode.d1.loss_cls: 0.1126, decode.d1.loss_mask: 0.2249, decode.d1.loss_dice: 0.6339, decode.d2.loss_cls: 0.1103, decode.d2.loss_mask: 0.2224, decode.d2.loss_dice: 0.6237, decode.d3.loss_cls: 0.1012, decode.d3.loss_mask: 0.2218, decode.d3.loss_dice: 0.6205, decode.d4.loss_cls: 0.0995, decode.d4.loss_mask: 0.2210, decode.d4.loss_dice: 0.6195, decode.d5.loss_cls: 0.0985, decode.d5.loss_mask: 0.2212, decode.d5.loss_dice: 0.6164, decode.d6.loss_cls: 0.1002, decode.d6.loss_mask: 0.2206, decode.d6.loss_dice: 0.6186, decode.d7.loss_cls: 0.1011, decode.d7.loss_mask: 0.2212, decode.d7.loss_dice: 0.6192, decode.d8.loss_cls: 0.0980, decode.d8.loss_mask: 0.2211, decode.d8.loss_dice: 0.6174, loss: 9.7288 +2022-05-10 07:52:06,090 - mmseg - INFO - Iter [26650/80000] lr: 9.575e-07, eta: 1 day, 4:50:10, time: 1.838, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0887, decode.loss_mask: 0.2282, decode.loss_dice: 0.6234, decode.d0.loss_cls: 0.3271, decode.d0.loss_mask: 0.2409, decode.d0.loss_dice: 0.6602, decode.d1.loss_cls: 0.1160, decode.d1.loss_mask: 0.2311, decode.d1.loss_dice: 0.6360, decode.d2.loss_cls: 0.0950, decode.d2.loss_mask: 0.2295, decode.d2.loss_dice: 0.6319, decode.d3.loss_cls: 0.0977, decode.d3.loss_mask: 0.2280, decode.d3.loss_dice: 0.6283, decode.d4.loss_cls: 0.0896, decode.d4.loss_mask: 0.2284, decode.d4.loss_dice: 0.6242, decode.d5.loss_cls: 0.0965, decode.d5.loss_mask: 0.2284, decode.d5.loss_dice: 0.6264, decode.d6.loss_cls: 0.0951, decode.d6.loss_mask: 0.2286, decode.d6.loss_dice: 0.6226, decode.d7.loss_cls: 0.0860, decode.d7.loss_mask: 0.2283, decode.d7.loss_dice: 0.6242, decode.d8.loss_cls: 0.0916, decode.d8.loss_mask: 0.2286, decode.d8.loss_dice: 0.6220, loss: 9.7826 +2022-05-10 07:53:35,965 - mmseg - INFO - Iter [26700/80000] lr: 9.566e-07, eta: 1 day, 4:48:17, time: 1.798, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0987, decode.loss_mask: 0.2225, decode.loss_dice: 0.6242, decode.d0.loss_cls: 0.3224, decode.d0.loss_mask: 0.2343, decode.d0.loss_dice: 0.6674, decode.d1.loss_cls: 0.1151, decode.d1.loss_mask: 0.2246, decode.d1.loss_dice: 0.6412, decode.d2.loss_cls: 0.1020, decode.d2.loss_mask: 0.2240, decode.d2.loss_dice: 0.6301, decode.d3.loss_cls: 0.0978, decode.d3.loss_mask: 0.2228, decode.d3.loss_dice: 0.6276, decode.d4.loss_cls: 0.0967, decode.d4.loss_mask: 0.2225, decode.d4.loss_dice: 0.6273, decode.d5.loss_cls: 0.1003, decode.d5.loss_mask: 0.2230, decode.d5.loss_dice: 0.6274, decode.d6.loss_cls: 0.0953, decode.d6.loss_mask: 0.2224, decode.d6.loss_dice: 0.6274, decode.d7.loss_cls: 0.0999, decode.d7.loss_mask: 0.2227, decode.d7.loss_dice: 0.6276, decode.d8.loss_cls: 0.0958, decode.d8.loss_mask: 0.2217, decode.d8.loss_dice: 0.6242, loss: 9.7891 +2022-05-10 07:55:06,618 - mmseg - INFO - Iter [26750/80000] lr: 9.557e-07, eta: 1 day, 4:46:27, time: 1.813, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0930, decode.loss_mask: 0.2316, decode.loss_dice: 0.6154, decode.d0.loss_cls: 0.3206, decode.d0.loss_mask: 0.2452, decode.d0.loss_dice: 0.6516, decode.d1.loss_cls: 0.1053, decode.d1.loss_mask: 0.2344, decode.d1.loss_dice: 0.6274, decode.d2.loss_cls: 0.0963, decode.d2.loss_mask: 0.2330, decode.d2.loss_dice: 0.6202, decode.d3.loss_cls: 0.1007, decode.d3.loss_mask: 0.2326, decode.d3.loss_dice: 0.6163, decode.d4.loss_cls: 0.0967, decode.d4.loss_mask: 0.2330, decode.d4.loss_dice: 0.6172, decode.d5.loss_cls: 0.0942, decode.d5.loss_mask: 0.2319, decode.d5.loss_dice: 0.6162, decode.d6.loss_cls: 0.0936, decode.d6.loss_mask: 0.2318, decode.d6.loss_dice: 0.6179, decode.d7.loss_cls: 0.0932, decode.d7.loss_mask: 0.2315, decode.d7.loss_dice: 0.6193, decode.d8.loss_cls: 0.0924, decode.d8.loss_mask: 0.2322, decode.d8.loss_dice: 0.6157, loss: 9.7404 +2022-05-10 07:56:39,796 - mmseg - INFO - Iter [26800/80000] lr: 9.548e-07, eta: 1 day, 4:44:42, time: 1.864, data_time: 0.069, memory: 64699, decode.loss_cls: 0.1041, decode.loss_mask: 0.2172, decode.loss_dice: 0.6276, decode.d0.loss_cls: 0.3182, decode.d0.loss_mask: 0.2250, decode.d0.loss_dice: 0.6703, decode.d1.loss_cls: 0.1220, decode.d1.loss_mask: 0.2193, decode.d1.loss_dice: 0.6363, decode.d2.loss_cls: 0.1174, decode.d2.loss_mask: 0.2176, decode.d2.loss_dice: 0.6325, decode.d3.loss_cls: 0.1109, decode.d3.loss_mask: 0.2181, decode.d3.loss_dice: 0.6258, decode.d4.loss_cls: 0.1020, decode.d4.loss_mask: 0.2172, decode.d4.loss_dice: 0.6290, decode.d5.loss_cls: 0.1081, decode.d5.loss_mask: 0.2177, decode.d5.loss_dice: 0.6294, decode.d6.loss_cls: 0.1029, decode.d6.loss_mask: 0.2177, decode.d6.loss_dice: 0.6293, decode.d7.loss_cls: 0.1047, decode.d7.loss_mask: 0.2178, decode.d7.loss_dice: 0.6244, decode.d8.loss_cls: 0.1082, decode.d8.loss_mask: 0.2180, decode.d8.loss_dice: 0.6254, loss: 9.8140 +2022-05-10 07:58:10,928 - mmseg - INFO - Iter [26850/80000] lr: 9.539e-07, eta: 1 day, 4:42:52, time: 1.822, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0973, decode.loss_mask: 0.2244, decode.loss_dice: 0.6199, decode.d0.loss_cls: 0.3251, decode.d0.loss_mask: 0.2337, decode.d0.loss_dice: 0.6524, decode.d1.loss_cls: 0.1253, decode.d1.loss_mask: 0.2260, decode.d1.loss_dice: 0.6237, decode.d2.loss_cls: 0.1055, decode.d2.loss_mask: 0.2249, decode.d2.loss_dice: 0.6195, decode.d3.loss_cls: 0.1065, decode.d3.loss_mask: 0.2249, decode.d3.loss_dice: 0.6108, decode.d4.loss_cls: 0.0962, decode.d4.loss_mask: 0.2244, decode.d4.loss_dice: 0.6149, decode.d5.loss_cls: 0.1055, decode.d5.loss_mask: 0.2242, decode.d5.loss_dice: 0.6171, decode.d6.loss_cls: 0.0966, decode.d6.loss_mask: 0.2241, decode.d6.loss_dice: 0.6149, decode.d7.loss_cls: 0.0951, decode.d7.loss_mask: 0.2245, decode.d7.loss_dice: 0.6142, decode.d8.loss_cls: 0.0988, decode.d8.loss_mask: 0.2241, decode.d8.loss_dice: 0.6163, loss: 9.7111 +2022-05-10 07:59:42,122 - mmseg - INFO - Iter [26900/80000] lr: 9.530e-07, eta: 1 day, 4:41:03, time: 1.824, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0845, decode.loss_mask: 0.2222, decode.loss_dice: 0.6229, decode.d0.loss_cls: 0.3294, decode.d0.loss_mask: 0.2335, decode.d0.loss_dice: 0.6595, decode.d1.loss_cls: 0.1137, decode.d1.loss_mask: 0.2251, decode.d1.loss_dice: 0.6352, decode.d2.loss_cls: 0.1041, decode.d2.loss_mask: 0.2226, decode.d2.loss_dice: 0.6260, decode.d3.loss_cls: 0.1009, decode.d3.loss_mask: 0.2220, decode.d3.loss_dice: 0.6248, decode.d4.loss_cls: 0.0896, decode.d4.loss_mask: 0.2226, decode.d4.loss_dice: 0.6230, decode.d5.loss_cls: 0.0909, decode.d5.loss_mask: 0.2233, decode.d5.loss_dice: 0.6274, decode.d6.loss_cls: 0.0867, decode.d6.loss_mask: 0.2221, decode.d6.loss_dice: 0.6218, decode.d7.loss_cls: 0.0929, decode.d7.loss_mask: 0.2220, decode.d7.loss_dice: 0.6274, decode.d8.loss_cls: 0.0853, decode.d8.loss_mask: 0.2222, decode.d8.loss_dice: 0.6234, loss: 9.7069 +2022-05-10 08:01:12,785 - mmseg - INFO - Iter [26950/80000] lr: 9.521e-07, eta: 1 day, 4:39:13, time: 1.812, data_time: 0.021, memory: 64699, decode.loss_cls: 0.0937, decode.loss_mask: 0.2239, decode.loss_dice: 0.6232, decode.d0.loss_cls: 0.3214, decode.d0.loss_mask: 0.2400, decode.d0.loss_dice: 0.6623, decode.d1.loss_cls: 0.1158, decode.d1.loss_mask: 0.2257, decode.d1.loss_dice: 0.6383, decode.d2.loss_cls: 0.1038, decode.d2.loss_mask: 0.2242, decode.d2.loss_dice: 0.6314, decode.d3.loss_cls: 0.0997, decode.d3.loss_mask: 0.2234, decode.d3.loss_dice: 0.6259, decode.d4.loss_cls: 0.0984, decode.d4.loss_mask: 0.2237, decode.d4.loss_dice: 0.6239, decode.d5.loss_cls: 0.0972, decode.d5.loss_mask: 0.2250, decode.d5.loss_dice: 0.6253, decode.d6.loss_cls: 0.0908, decode.d6.loss_mask: 0.2244, decode.d6.loss_dice: 0.6208, decode.d7.loss_cls: 0.0955, decode.d7.loss_mask: 0.2242, decode.d7.loss_dice: 0.6215, decode.d8.loss_cls: 0.1006, decode.d8.loss_mask: 0.2252, decode.d8.loss_dice: 0.6220, loss: 9.7711 +2022-05-10 08:02:44,842 - mmseg - INFO - Saving checkpoint at 27000 iterations +2022-05-10 08:03:17,820 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 08:03:17,829 - mmseg - INFO - Iter [27000/80000] lr: 9.512e-07, eta: 1 day, 4:38:30, time: 2.499, data_time: 0.067, memory: 64699, decode.loss_cls: 0.0940, decode.loss_mask: 0.2247, decode.loss_dice: 0.6225, decode.d0.loss_cls: 0.3204, decode.d0.loss_mask: 0.2331, decode.d0.loss_dice: 0.6574, decode.d1.loss_cls: 0.1228, decode.d1.loss_mask: 0.2263, decode.d1.loss_dice: 0.6364, decode.d2.loss_cls: 0.1021, decode.d2.loss_mask: 0.2259, decode.d2.loss_dice: 0.6228, decode.d3.loss_cls: 0.1008, decode.d3.loss_mask: 0.2242, decode.d3.loss_dice: 0.6240, decode.d4.loss_cls: 0.0954, decode.d4.loss_mask: 0.2252, decode.d4.loss_dice: 0.6218, decode.d5.loss_cls: 0.0979, decode.d5.loss_mask: 0.2254, decode.d5.loss_dice: 0.6198, decode.d6.loss_cls: 0.0987, decode.d6.loss_mask: 0.2240, decode.d6.loss_dice: 0.6216, decode.d7.loss_cls: 0.1003, decode.d7.loss_mask: 0.2237, decode.d7.loss_dice: 0.6241, decode.d8.loss_cls: 0.0997, decode.d8.loss_mask: 0.2241, decode.d8.loss_dice: 0.6241, loss: 9.7634 +2022-05-10 08:05:13,051 - mmseg - INFO - per class results: +2022-05-10 08:05:13,058 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.59 | 99.14 | +| sidewalk | 88.68 | 94.74 | +| building | 94.41 | 97.33 | +| wall | 67.47 | 76.24 | +| fence | 73.77 | 79.5 | +| pole | 71.77 | 83.2 | +| traffic light | 77.34 | 87.67 | +| traffic sign | 83.94 | 90.71 | +| vegetation | 93.48 | 96.81 | +| terrain | 68.7 | 79.73 | +| sky | 95.82 | 98.52 | +| person | 87.15 | 93.34 | +| rider | 74.86 | 86.72 | +| car | 96.34 | 98.07 | +| truck | 80.19 | 95.44 | +| bus | 93.45 | 96.87 | +| train | 87.41 | 89.88 | +| motorcycle | 77.03 | 88.17 | +| bicycle | 82.67 | 92.0 | ++---------------+-------+-------+ +2022-05-10 08:05:13,058 - mmseg - INFO - Summary: +2022-05-10 08:05:13,058 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 97.01 | 83.85 | 90.74 | ++-------+-------+-------+ +2022-05-10 08:05:13,062 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 08:05:13,062 - mmseg - INFO - Iter(val) [32] aAcc: 0.9701, mIoU: 0.8385, mAcc: 0.9074, IoU.road: 0.9859, IoU.sidewalk: 0.8868, IoU.building: 0.9441, IoU.wall: 0.6747, IoU.fence: 0.7377, IoU.pole: 0.7177, IoU.traffic light: 0.7734, IoU.traffic sign: 0.8394, IoU.vegetation: 0.9348, IoU.terrain: 0.6870, IoU.sky: 0.9582, IoU.person: 0.8715, IoU.rider: 0.7486, IoU.car: 0.9634, IoU.truck: 0.8019, IoU.bus: 0.9345, IoU.train: 0.8741, IoU.motorcycle: 0.7703, IoU.bicycle: 0.8267, Acc.road: 0.9914, Acc.sidewalk: 0.9474, Acc.building: 0.9733, Acc.wall: 0.7624, Acc.fence: 0.7950, Acc.pole: 0.8320, Acc.traffic light: 0.8767, Acc.traffic sign: 0.9071, Acc.vegetation: 0.9681, Acc.terrain: 0.7973, Acc.sky: 0.9852, Acc.person: 0.9334, Acc.rider: 0.8672, Acc.car: 0.9807, Acc.truck: 0.9544, Acc.bus: 0.9687, Acc.train: 0.8988, Acc.motorcycle: 0.8817, Acc.bicycle: 0.9200 +2022-05-10 08:06:43,026 - mmseg - INFO - Iter [27050/80000] lr: 9.503e-07, eta: 1 day, 4:40:24, time: 4.105, data_time: 2.326, memory: 64699, decode.loss_cls: 0.0987, decode.loss_mask: 0.2271, decode.loss_dice: 0.6274, decode.d0.loss_cls: 0.3304, decode.d0.loss_mask: 0.2392, decode.d0.loss_dice: 0.6644, decode.d1.loss_cls: 0.1244, decode.d1.loss_mask: 0.2295, decode.d1.loss_dice: 0.6394, decode.d2.loss_cls: 0.1170, decode.d2.loss_mask: 0.2278, decode.d2.loss_dice: 0.6299, decode.d3.loss_cls: 0.1067, decode.d3.loss_mask: 0.2273, decode.d3.loss_dice: 0.6250, decode.d4.loss_cls: 0.1020, decode.d4.loss_mask: 0.2276, decode.d4.loss_dice: 0.6309, decode.d5.loss_cls: 0.1043, decode.d5.loss_mask: 0.2272, decode.d5.loss_dice: 0.6286, decode.d6.loss_cls: 0.0959, decode.d6.loss_mask: 0.2270, decode.d6.loss_dice: 0.6260, decode.d7.loss_cls: 0.1015, decode.d7.loss_mask: 0.2277, decode.d7.loss_dice: 0.6251, decode.d8.loss_cls: 0.1050, decode.d8.loss_mask: 0.2274, decode.d8.loss_dice: 0.6230, loss: 9.8936 +2022-05-10 08:08:13,601 - mmseg - INFO - Iter [27100/80000] lr: 9.494e-07, eta: 1 day, 4:38:33, time: 1.812, data_time: 0.021, memory: 64699, decode.loss_cls: 0.0800, decode.loss_mask: 0.2242, decode.loss_dice: 0.6159, decode.d0.loss_cls: 0.3166, decode.d0.loss_mask: 0.2348, decode.d0.loss_dice: 0.6514, decode.d1.loss_cls: 0.1011, decode.d1.loss_mask: 0.2255, decode.d1.loss_dice: 0.6276, decode.d2.loss_cls: 0.0959, decode.d2.loss_mask: 0.2243, decode.d2.loss_dice: 0.6203, decode.d3.loss_cls: 0.0864, decode.d3.loss_mask: 0.2247, decode.d3.loss_dice: 0.6173, decode.d4.loss_cls: 0.0879, decode.d4.loss_mask: 0.2235, decode.d4.loss_dice: 0.6178, decode.d5.loss_cls: 0.0857, decode.d5.loss_mask: 0.2232, decode.d5.loss_dice: 0.6130, decode.d6.loss_cls: 0.0826, decode.d6.loss_mask: 0.2235, decode.d6.loss_dice: 0.6092, decode.d7.loss_cls: 0.0852, decode.d7.loss_mask: 0.2229, decode.d7.loss_dice: 0.6134, decode.d8.loss_cls: 0.0890, decode.d8.loss_mask: 0.2230, decode.d8.loss_dice: 0.6095, loss: 9.5554 +2022-05-10 08:09:43,016 - mmseg - INFO - Iter [27150/80000] lr: 9.485e-07, eta: 1 day, 4:36:40, time: 1.789, data_time: 0.023, memory: 64699, decode.loss_cls: 0.0957, decode.loss_mask: 0.2200, decode.loss_dice: 0.6222, decode.d0.loss_cls: 0.3410, decode.d0.loss_mask: 0.2291, decode.d0.loss_dice: 0.6603, decode.d1.loss_cls: 0.1166, decode.d1.loss_mask: 0.2220, decode.d1.loss_dice: 0.6328, decode.d2.loss_cls: 0.1054, decode.d2.loss_mask: 0.2213, decode.d2.loss_dice: 0.6294, decode.d3.loss_cls: 0.0994, decode.d3.loss_mask: 0.2205, decode.d3.loss_dice: 0.6218, decode.d4.loss_cls: 0.1068, decode.d4.loss_mask: 0.2203, decode.d4.loss_dice: 0.6234, decode.d5.loss_cls: 0.1037, decode.d5.loss_mask: 0.2203, decode.d5.loss_dice: 0.6206, decode.d6.loss_cls: 0.1028, decode.d6.loss_mask: 0.2203, decode.d6.loss_dice: 0.6187, decode.d7.loss_cls: 0.1014, decode.d7.loss_mask: 0.2203, decode.d7.loss_dice: 0.6213, decode.d8.loss_cls: 0.0939, decode.d8.loss_mask: 0.2209, decode.d8.loss_dice: 0.6246, loss: 9.7569 +2022-05-10 08:11:16,650 - mmseg - INFO - Iter [27200/80000] lr: 9.476e-07, eta: 1 day, 4:34:55, time: 1.873, data_time: 0.065, memory: 64699, decode.loss_cls: 0.0947, decode.loss_mask: 0.2197, decode.loss_dice: 0.6087, decode.d0.loss_cls: 0.3313, decode.d0.loss_mask: 0.2306, decode.d0.loss_dice: 0.6449, decode.d1.loss_cls: 0.1235, decode.d1.loss_mask: 0.2208, decode.d1.loss_dice: 0.6159, decode.d2.loss_cls: 0.1120, decode.d2.loss_mask: 0.2199, decode.d2.loss_dice: 0.6140, decode.d3.loss_cls: 0.1030, decode.d3.loss_mask: 0.2198, decode.d3.loss_dice: 0.6053, decode.d4.loss_cls: 0.1020, decode.d4.loss_mask: 0.2202, decode.d4.loss_dice: 0.6087, decode.d5.loss_cls: 0.1008, decode.d5.loss_mask: 0.2199, decode.d5.loss_dice: 0.6047, decode.d6.loss_cls: 0.0949, decode.d6.loss_mask: 0.2199, decode.d6.loss_dice: 0.6050, decode.d7.loss_cls: 0.1026, decode.d7.loss_mask: 0.2201, decode.d7.loss_dice: 0.6064, decode.d8.loss_cls: 0.1001, decode.d8.loss_mask: 0.2199, decode.d8.loss_dice: 0.6112, loss: 9.6007 +2022-05-10 08:12:46,837 - mmseg - INFO - Iter [27250/80000] lr: 9.467e-07, eta: 1 day, 4:33:04, time: 1.804, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0968, decode.loss_mask: 0.2244, decode.loss_dice: 0.6271, decode.d0.loss_cls: 0.3320, decode.d0.loss_mask: 0.2332, decode.d0.loss_dice: 0.6613, decode.d1.loss_cls: 0.1180, decode.d1.loss_mask: 0.2255, decode.d1.loss_dice: 0.6358, decode.d2.loss_cls: 0.1014, decode.d2.loss_mask: 0.2249, decode.d2.loss_dice: 0.6278, decode.d3.loss_cls: 0.0993, decode.d3.loss_mask: 0.2251, decode.d3.loss_dice: 0.6280, decode.d4.loss_cls: 0.1012, decode.d4.loss_mask: 0.2256, decode.d4.loss_dice: 0.6259, decode.d5.loss_cls: 0.1056, decode.d5.loss_mask: 0.2253, decode.d5.loss_dice: 0.6294, decode.d6.loss_cls: 0.0957, decode.d6.loss_mask: 0.2245, decode.d6.loss_dice: 0.6261, decode.d7.loss_cls: 0.0948, decode.d7.loss_mask: 0.2243, decode.d7.loss_dice: 0.6236, decode.d8.loss_cls: 0.1032, decode.d8.loss_mask: 0.2248, decode.d8.loss_dice: 0.6249, loss: 9.8158 +2022-05-10 08:14:17,516 - mmseg - INFO - Iter [27300/80000] lr: 9.458e-07, eta: 1 day, 4:31:13, time: 1.813, data_time: 0.020, memory: 64699, decode.loss_cls: 0.0873, decode.loss_mask: 0.2306, decode.loss_dice: 0.6199, decode.d0.loss_cls: 0.3251, decode.d0.loss_mask: 0.2418, decode.d0.loss_dice: 0.6497, decode.d1.loss_cls: 0.1078, decode.d1.loss_mask: 0.2325, decode.d1.loss_dice: 0.6321, decode.d2.loss_cls: 0.1019, decode.d2.loss_mask: 0.2311, decode.d2.loss_dice: 0.6248, decode.d3.loss_cls: 0.0958, decode.d3.loss_mask: 0.2308, decode.d3.loss_dice: 0.6240, decode.d4.loss_cls: 0.0907, decode.d4.loss_mask: 0.2308, decode.d4.loss_dice: 0.6217, decode.d5.loss_cls: 0.1003, decode.d5.loss_mask: 0.2311, decode.d5.loss_dice: 0.6214, decode.d6.loss_cls: 0.0891, decode.d6.loss_mask: 0.2311, decode.d6.loss_dice: 0.6199, decode.d7.loss_cls: 0.0906, decode.d7.loss_mask: 0.2312, decode.d7.loss_dice: 0.6228, decode.d8.loss_cls: 0.0870, decode.d8.loss_mask: 0.2310, decode.d8.loss_dice: 0.6239, loss: 9.7577 +2022-05-10 08:15:50,721 - mmseg - INFO - Iter [27350/80000] lr: 9.450e-07, eta: 1 day, 4:29:28, time: 1.864, data_time: 0.065, memory: 64699, decode.loss_cls: 0.0954, decode.loss_mask: 0.2247, decode.loss_dice: 0.6066, decode.d0.loss_cls: 0.3289, decode.d0.loss_mask: 0.2371, decode.d0.loss_dice: 0.6494, decode.d1.loss_cls: 0.1118, decode.d1.loss_mask: 0.2263, decode.d1.loss_dice: 0.6190, decode.d2.loss_cls: 0.1079, decode.d2.loss_mask: 0.2247, decode.d2.loss_dice: 0.6121, decode.d3.loss_cls: 0.0939, decode.d3.loss_mask: 0.2254, decode.d3.loss_dice: 0.6076, decode.d4.loss_cls: 0.0959, decode.d4.loss_mask: 0.2255, decode.d4.loss_dice: 0.6066, decode.d5.loss_cls: 0.0997, decode.d5.loss_mask: 0.2250, decode.d5.loss_dice: 0.6090, decode.d6.loss_cls: 0.1007, decode.d6.loss_mask: 0.2248, decode.d6.loss_dice: 0.6084, decode.d7.loss_cls: 0.0970, decode.d7.loss_mask: 0.2248, decode.d7.loss_dice: 0.6056, decode.d8.loss_cls: 0.0934, decode.d8.loss_mask: 0.2248, decode.d8.loss_dice: 0.6087, loss: 9.6207 +2022-05-10 08:17:20,247 - mmseg - INFO - Iter [27400/80000] lr: 9.441e-07, eta: 1 day, 4:27:35, time: 1.790, data_time: 0.018, memory: 64699, decode.loss_cls: 0.1077, decode.loss_mask: 0.2306, decode.loss_dice: 0.6224, decode.d0.loss_cls: 0.3227, decode.d0.loss_mask: 0.2410, decode.d0.loss_dice: 0.6602, decode.d1.loss_cls: 0.1234, decode.d1.loss_mask: 0.2319, decode.d1.loss_dice: 0.6296, decode.d2.loss_cls: 0.1160, decode.d2.loss_mask: 0.2314, decode.d2.loss_dice: 0.6260, decode.d3.loss_cls: 0.1115, decode.d3.loss_mask: 0.2306, decode.d3.loss_dice: 0.6236, decode.d4.loss_cls: 0.1076, decode.d4.loss_mask: 0.2305, decode.d4.loss_dice: 0.6237, decode.d5.loss_cls: 0.1078, decode.d5.loss_mask: 0.2302, decode.d5.loss_dice: 0.6179, decode.d6.loss_cls: 0.1063, decode.d6.loss_mask: 0.2302, decode.d6.loss_dice: 0.6196, decode.d7.loss_cls: 0.1071, decode.d7.loss_mask: 0.2302, decode.d7.loss_dice: 0.6218, decode.d8.loss_cls: 0.1036, decode.d8.loss_mask: 0.2298, decode.d8.loss_dice: 0.6189, loss: 9.8936 +2022-05-10 08:18:50,274 - mmseg - INFO - Iter [27450/80000] lr: 9.432e-07, eta: 1 day, 4:25:44, time: 1.801, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0983, decode.loss_mask: 0.2244, decode.loss_dice: 0.6134, decode.d0.loss_cls: 0.3309, decode.d0.loss_mask: 0.2367, decode.d0.loss_dice: 0.6486, decode.d1.loss_cls: 0.1145, decode.d1.loss_mask: 0.2270, decode.d1.loss_dice: 0.6260, decode.d2.loss_cls: 0.1061, decode.d2.loss_mask: 0.2260, decode.d2.loss_dice: 0.6210, decode.d3.loss_cls: 0.1008, decode.d3.loss_mask: 0.2259, decode.d3.loss_dice: 0.6163, decode.d4.loss_cls: 0.1030, decode.d4.loss_mask: 0.2254, decode.d4.loss_dice: 0.6205, decode.d5.loss_cls: 0.1012, decode.d5.loss_mask: 0.2252, decode.d5.loss_dice: 0.6148, decode.d6.loss_cls: 0.0961, decode.d6.loss_mask: 0.2248, decode.d6.loss_dice: 0.6141, decode.d7.loss_cls: 0.0945, decode.d7.loss_mask: 0.2243, decode.d7.loss_dice: 0.6146, decode.d8.loss_cls: 0.0995, decode.d8.loss_mask: 0.2241, decode.d8.loss_dice: 0.6139, loss: 9.7118 +2022-05-10 08:20:21,707 - mmseg - INFO - Iter [27500/80000] lr: 9.423e-07, eta: 1 day, 4:23:55, time: 1.829, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0909, decode.loss_mask: 0.2295, decode.loss_dice: 0.6182, decode.d0.loss_cls: 0.3014, decode.d0.loss_mask: 0.2412, decode.d0.loss_dice: 0.6554, decode.d1.loss_cls: 0.1047, decode.d1.loss_mask: 0.2306, decode.d1.loss_dice: 0.6287, decode.d2.loss_cls: 0.0959, decode.d2.loss_mask: 0.2297, decode.d2.loss_dice: 0.6282, decode.d3.loss_cls: 0.0956, decode.d3.loss_mask: 0.2291, decode.d3.loss_dice: 0.6191, decode.d4.loss_cls: 0.0918, decode.d4.loss_mask: 0.2288, decode.d4.loss_dice: 0.6187, decode.d5.loss_cls: 0.0891, decode.d5.loss_mask: 0.2293, decode.d5.loss_dice: 0.6232, decode.d6.loss_cls: 0.0874, decode.d6.loss_mask: 0.2290, decode.d6.loss_dice: 0.6186, decode.d7.loss_cls: 0.0925, decode.d7.loss_mask: 0.2298, decode.d7.loss_dice: 0.6245, decode.d8.loss_cls: 0.0915, decode.d8.loss_mask: 0.2290, decode.d8.loss_dice: 0.6197, loss: 9.7014 +2022-05-10 08:21:55,442 - mmseg - INFO - Iter [27550/80000] lr: 9.414e-07, eta: 1 day, 4:22:11, time: 1.873, data_time: 0.067, memory: 64699, decode.loss_cls: 0.0857, decode.loss_mask: 0.2286, decode.loss_dice: 0.6204, decode.d0.loss_cls: 0.3210, decode.d0.loss_mask: 0.2394, decode.d0.loss_dice: 0.6551, decode.d1.loss_cls: 0.1116, decode.d1.loss_mask: 0.2309, decode.d1.loss_dice: 0.6340, decode.d2.loss_cls: 0.1012, decode.d2.loss_mask: 0.2293, decode.d2.loss_dice: 0.6273, decode.d3.loss_cls: 0.0930, decode.d3.loss_mask: 0.2291, decode.d3.loss_dice: 0.6227, decode.d4.loss_cls: 0.0886, decode.d4.loss_mask: 0.2293, decode.d4.loss_dice: 0.6278, decode.d5.loss_cls: 0.0908, decode.d5.loss_mask: 0.2288, decode.d5.loss_dice: 0.6255, decode.d6.loss_cls: 0.0865, decode.d6.loss_mask: 0.2291, decode.d6.loss_dice: 0.6203, decode.d7.loss_cls: 0.0875, decode.d7.loss_mask: 0.2285, decode.d7.loss_dice: 0.6233, decode.d8.loss_cls: 0.0845, decode.d8.loss_mask: 0.2292, decode.d8.loss_dice: 0.6227, loss: 9.7316 +2022-05-10 08:23:25,769 - mmseg - INFO - Iter [27600/80000] lr: 9.405e-07, eta: 1 day, 4:20:20, time: 1.808, data_time: 0.023, memory: 64699, decode.loss_cls: 0.0862, decode.loss_mask: 0.2315, decode.loss_dice: 0.6290, decode.d0.loss_cls: 0.3252, decode.d0.loss_mask: 0.2428, decode.d0.loss_dice: 0.6653, decode.d1.loss_cls: 0.1112, decode.d1.loss_mask: 0.2332, decode.d1.loss_dice: 0.6407, decode.d2.loss_cls: 0.1047, decode.d2.loss_mask: 0.2317, decode.d2.loss_dice: 0.6317, decode.d3.loss_cls: 0.0953, decode.d3.loss_mask: 0.2320, decode.d3.loss_dice: 0.6267, decode.d4.loss_cls: 0.0990, decode.d4.loss_mask: 0.2321, decode.d4.loss_dice: 0.6300, decode.d5.loss_cls: 0.0964, decode.d5.loss_mask: 0.2309, decode.d5.loss_dice: 0.6285, decode.d6.loss_cls: 0.0885, decode.d6.loss_mask: 0.2313, decode.d6.loss_dice: 0.6307, decode.d7.loss_cls: 0.0880, decode.d7.loss_mask: 0.2319, decode.d7.loss_dice: 0.6299, decode.d8.loss_cls: 0.0940, decode.d8.loss_mask: 0.2320, decode.d8.loss_dice: 0.6276, loss: 9.8581 +2022-05-10 08:24:55,341 - mmseg - INFO - Iter [27650/80000] lr: 9.396e-07, eta: 1 day, 4:18:28, time: 1.790, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0859, decode.loss_mask: 0.2232, decode.loss_dice: 0.6239, decode.d0.loss_cls: 0.3230, decode.d0.loss_mask: 0.2345, decode.d0.loss_dice: 0.6613, decode.d1.loss_cls: 0.1072, decode.d1.loss_mask: 0.2255, decode.d1.loss_dice: 0.6375, decode.d2.loss_cls: 0.0949, decode.d2.loss_mask: 0.2250, decode.d2.loss_dice: 0.6269, decode.d3.loss_cls: 0.0922, decode.d3.loss_mask: 0.2237, decode.d3.loss_dice: 0.6266, decode.d4.loss_cls: 0.0893, decode.d4.loss_mask: 0.2238, decode.d4.loss_dice: 0.6212, decode.d5.loss_cls: 0.0870, decode.d5.loss_mask: 0.2238, decode.d5.loss_dice: 0.6222, decode.d6.loss_cls: 0.0851, decode.d6.loss_mask: 0.2236, decode.d6.loss_dice: 0.6262, decode.d7.loss_cls: 0.0869, decode.d7.loss_mask: 0.2234, decode.d7.loss_dice: 0.6229, decode.d8.loss_cls: 0.0879, decode.d8.loss_mask: 0.2229, decode.d8.loss_dice: 0.6226, loss: 9.6802 +2022-05-10 08:26:26,181 - mmseg - INFO - Iter [27700/80000] lr: 9.387e-07, eta: 1 day, 4:16:38, time: 1.817, data_time: 0.020, memory: 64699, decode.loss_cls: 0.0950, decode.loss_mask: 0.2190, decode.loss_dice: 0.6024, decode.d0.loss_cls: 0.3131, decode.d0.loss_mask: 0.2284, decode.d0.loss_dice: 0.6310, decode.d1.loss_cls: 0.1140, decode.d1.loss_mask: 0.2204, decode.d1.loss_dice: 0.6083, decode.d2.loss_cls: 0.1020, decode.d2.loss_mask: 0.2198, decode.d2.loss_dice: 0.6054, decode.d3.loss_cls: 0.0972, decode.d3.loss_mask: 0.2197, decode.d3.loss_dice: 0.5992, decode.d4.loss_cls: 0.0990, decode.d4.loss_mask: 0.2201, decode.d4.loss_dice: 0.6048, decode.d5.loss_cls: 0.0909, decode.d5.loss_mask: 0.2199, decode.d5.loss_dice: 0.6016, decode.d6.loss_cls: 0.0914, decode.d6.loss_mask: 0.2192, decode.d6.loss_dice: 0.6023, decode.d7.loss_cls: 0.0944, decode.d7.loss_mask: 0.2195, decode.d7.loss_dice: 0.5999, decode.d8.loss_cls: 0.0924, decode.d8.loss_mask: 0.2185, decode.d8.loss_dice: 0.5988, loss: 9.4476 +2022-05-10 08:27:58,955 - mmseg - INFO - Iter [27750/80000] lr: 9.378e-07, eta: 1 day, 4:14:52, time: 1.856, data_time: 0.067, memory: 64699, decode.loss_cls: 0.0977, decode.loss_mask: 0.2215, decode.loss_dice: 0.6062, decode.d0.loss_cls: 0.3239, decode.d0.loss_mask: 0.2299, decode.d0.loss_dice: 0.6344, decode.d1.loss_cls: 0.1089, decode.d1.loss_mask: 0.2231, decode.d1.loss_dice: 0.6147, decode.d2.loss_cls: 0.1017, decode.d2.loss_mask: 0.2220, decode.d2.loss_dice: 0.6097, decode.d3.loss_cls: 0.0945, decode.d3.loss_mask: 0.2219, decode.d3.loss_dice: 0.6025, decode.d4.loss_cls: 0.0998, decode.d4.loss_mask: 0.2216, decode.d4.loss_dice: 0.6059, decode.d5.loss_cls: 0.1031, decode.d5.loss_mask: 0.2215, decode.d5.loss_dice: 0.6073, decode.d6.loss_cls: 0.0963, decode.d6.loss_mask: 0.2223, decode.d6.loss_dice: 0.6050, decode.d7.loss_cls: 0.0965, decode.d7.loss_mask: 0.2215, decode.d7.loss_dice: 0.6036, decode.d8.loss_cls: 0.1000, decode.d8.loss_mask: 0.2213, decode.d8.loss_dice: 0.5994, loss: 9.5378 +2022-05-10 08:29:29,534 - mmseg - INFO - Iter [27800/80000] lr: 9.369e-07, eta: 1 day, 4:13:02, time: 1.811, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0809, decode.loss_mask: 0.2226, decode.loss_dice: 0.6060, decode.d0.loss_cls: 0.3214, decode.d0.loss_mask: 0.2332, decode.d0.loss_dice: 0.6399, decode.d1.loss_cls: 0.1018, decode.d1.loss_mask: 0.2256, decode.d1.loss_dice: 0.6199, decode.d2.loss_cls: 0.0905, decode.d2.loss_mask: 0.2238, decode.d2.loss_dice: 0.6172, decode.d3.loss_cls: 0.0893, decode.d3.loss_mask: 0.2239, decode.d3.loss_dice: 0.6076, decode.d4.loss_cls: 0.0905, decode.d4.loss_mask: 0.2235, decode.d4.loss_dice: 0.6073, decode.d5.loss_cls: 0.0911, decode.d5.loss_mask: 0.2238, decode.d5.loss_dice: 0.6165, decode.d6.loss_cls: 0.0869, decode.d6.loss_mask: 0.2234, decode.d6.loss_dice: 0.6057, decode.d7.loss_cls: 0.0879, decode.d7.loss_mask: 0.2233, decode.d7.loss_dice: 0.6071, decode.d8.loss_cls: 0.0837, decode.d8.loss_mask: 0.2230, decode.d8.loss_dice: 0.6035, loss: 9.5010 +2022-05-10 08:30:59,476 - mmseg - INFO - Iter [27850/80000] lr: 9.360e-07, eta: 1 day, 4:11:11, time: 1.799, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0975, decode.loss_mask: 0.2260, decode.loss_dice: 0.6279, decode.d0.loss_cls: 0.3273, decode.d0.loss_mask: 0.2378, decode.d0.loss_dice: 0.6553, decode.d1.loss_cls: 0.1195, decode.d1.loss_mask: 0.2265, decode.d1.loss_dice: 0.6363, decode.d2.loss_cls: 0.1128, decode.d2.loss_mask: 0.2266, decode.d2.loss_dice: 0.6306, decode.d3.loss_cls: 0.1044, decode.d3.loss_mask: 0.2263, decode.d3.loss_dice: 0.6288, decode.d4.loss_cls: 0.1010, decode.d4.loss_mask: 0.2260, decode.d4.loss_dice: 0.6303, decode.d5.loss_cls: 0.1064, decode.d5.loss_mask: 0.2253, decode.d5.loss_dice: 0.6310, decode.d6.loss_cls: 0.0983, decode.d6.loss_mask: 0.2260, decode.d6.loss_dice: 0.6273, decode.d7.loss_cls: 0.1051, decode.d7.loss_mask: 0.2265, decode.d7.loss_dice: 0.6274, decode.d8.loss_cls: 0.1026, decode.d8.loss_mask: 0.2268, decode.d8.loss_dice: 0.6305, loss: 9.8741 +2022-05-10 08:32:29,598 - mmseg - INFO - Iter [27900/80000] lr: 9.351e-07, eta: 1 day, 4:09:21, time: 1.802, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0820, decode.loss_mask: 0.2231, decode.loss_dice: 0.6077, decode.d0.loss_cls: 0.3261, decode.d0.loss_mask: 0.2328, decode.d0.loss_dice: 0.6427, decode.d1.loss_cls: 0.1038, decode.d1.loss_mask: 0.2244, decode.d1.loss_dice: 0.6168, decode.d2.loss_cls: 0.1018, decode.d2.loss_mask: 0.2239, decode.d2.loss_dice: 0.6101, decode.d3.loss_cls: 0.0924, decode.d3.loss_mask: 0.2241, decode.d3.loss_dice: 0.6092, decode.d4.loss_cls: 0.0881, decode.d4.loss_mask: 0.2235, decode.d4.loss_dice: 0.6065, decode.d5.loss_cls: 0.0857, decode.d5.loss_mask: 0.2234, decode.d5.loss_dice: 0.6058, decode.d6.loss_cls: 0.0795, decode.d6.loss_mask: 0.2235, decode.d6.loss_dice: 0.6079, decode.d7.loss_cls: 0.0822, decode.d7.loss_mask: 0.2234, decode.d7.loss_dice: 0.6085, decode.d8.loss_cls: 0.0828, decode.d8.loss_mask: 0.2228, decode.d8.loss_dice: 0.6056, loss: 9.4902 +2022-05-10 08:34:02,282 - mmseg - INFO - Iter [27950/80000] lr: 9.342e-07, eta: 1 day, 4:07:35, time: 1.854, data_time: 0.066, memory: 64699, decode.loss_cls: 0.0801, decode.loss_mask: 0.2255, decode.loss_dice: 0.6067, decode.d0.loss_cls: 0.3209, decode.d0.loss_mask: 0.2360, decode.d0.loss_dice: 0.6400, decode.d1.loss_cls: 0.1013, decode.d1.loss_mask: 0.2281, decode.d1.loss_dice: 0.6205, decode.d2.loss_cls: 0.0923, decode.d2.loss_mask: 0.2275, decode.d2.loss_dice: 0.6185, decode.d3.loss_cls: 0.0837, decode.d3.loss_mask: 0.2260, decode.d3.loss_dice: 0.6133, decode.d4.loss_cls: 0.0914, decode.d4.loss_mask: 0.2257, decode.d4.loss_dice: 0.6111, decode.d5.loss_cls: 0.0811, decode.d5.loss_mask: 0.2268, decode.d5.loss_dice: 0.6089, decode.d6.loss_cls: 0.0902, decode.d6.loss_mask: 0.2259, decode.d6.loss_dice: 0.6085, decode.d7.loss_cls: 0.0849, decode.d7.loss_mask: 0.2259, decode.d7.loss_dice: 0.6110, decode.d8.loss_cls: 0.0861, decode.d8.loss_mask: 0.2252, decode.d8.loss_dice: 0.6109, loss: 9.5339 +2022-05-10 08:35:31,062 - mmseg - INFO - Saving checkpoint at 28000 iterations +2022-05-10 08:36:04,169 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 08:36:04,172 - mmseg - INFO - Iter [28000/80000] lr: 9.333e-07, eta: 1 day, 4:06:43, time: 2.436, data_time: 0.020, memory: 64699, decode.loss_cls: 0.0889, decode.loss_mask: 0.2249, decode.loss_dice: 0.6059, decode.d0.loss_cls: 0.3172, decode.d0.loss_mask: 0.2331, decode.d0.loss_dice: 0.6447, decode.d1.loss_cls: 0.1176, decode.d1.loss_mask: 0.2264, decode.d1.loss_dice: 0.6196, decode.d2.loss_cls: 0.1064, decode.d2.loss_mask: 0.2250, decode.d2.loss_dice: 0.6143, decode.d3.loss_cls: 0.0962, decode.d3.loss_mask: 0.2250, decode.d3.loss_dice: 0.6060, decode.d4.loss_cls: 0.1013, decode.d4.loss_mask: 0.2260, decode.d4.loss_dice: 0.6120, decode.d5.loss_cls: 0.0969, decode.d5.loss_mask: 0.2249, decode.d5.loss_dice: 0.6112, decode.d6.loss_cls: 0.0879, decode.d6.loss_mask: 0.2249, decode.d6.loss_dice: 0.6068, decode.d7.loss_cls: 0.0964, decode.d7.loss_mask: 0.2252, decode.d7.loss_dice: 0.6072, decode.d8.loss_cls: 0.0869, decode.d8.loss_mask: 0.2251, decode.d8.loss_dice: 0.6037, loss: 9.5874 +2022-05-10 08:37:59,487 - mmseg - INFO - per class results: +2022-05-10 08:37:59,493 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.65 | 99.3 | +| sidewalk | 88.98 | 93.81 | +| building | 94.36 | 97.02 | +| wall | 67.77 | 78.32 | +| fence | 75.29 | 82.69 | +| pole | 71.58 | 84.4 | +| traffic light | 77.35 | 88.96 | +| traffic sign | 84.16 | 90.33 | +| vegetation | 93.37 | 96.83 | +| terrain | 67.57 | 77.47 | +| sky | 95.85 | 98.42 | +| person | 86.94 | 94.35 | +| rider | 74.62 | 86.92 | +| car | 96.29 | 98.29 | +| truck | 82.33 | 95.01 | +| bus | 91.55 | 96.98 | +| train | 80.06 | 81.95 | +| motorcycle | 76.16 | 87.94 | +| bicycle | 82.85 | 91.25 | ++---------------+-------+-------+ +2022-05-10 08:37:59,494 - mmseg - INFO - Summary: +2022-05-10 08:37:59,494 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 97.01 | 83.46 | 90.54 | ++-------+-------+-------+ +2022-05-10 08:37:59,498 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 08:37:59,498 - mmseg - INFO - Iter(val) [32] aAcc: 0.9701, mIoU: 0.8346, mAcc: 0.9054, IoU.road: 0.9865, IoU.sidewalk: 0.8898, IoU.building: 0.9436, IoU.wall: 0.6777, IoU.fence: 0.7529, IoU.pole: 0.7158, IoU.traffic light: 0.7735, IoU.traffic sign: 0.8416, IoU.vegetation: 0.9337, IoU.terrain: 0.6757, IoU.sky: 0.9585, IoU.person: 0.8694, IoU.rider: 0.7462, IoU.car: 0.9629, IoU.truck: 0.8233, IoU.bus: 0.9155, IoU.train: 0.8006, IoU.motorcycle: 0.7616, IoU.bicycle: 0.8285, Acc.road: 0.9930, Acc.sidewalk: 0.9381, Acc.building: 0.9702, Acc.wall: 0.7832, Acc.fence: 0.8269, Acc.pole: 0.8440, Acc.traffic light: 0.8896, Acc.traffic sign: 0.9033, Acc.vegetation: 0.9683, Acc.terrain: 0.7747, Acc.sky: 0.9842, Acc.person: 0.9435, Acc.rider: 0.8692, Acc.car: 0.9829, Acc.truck: 0.9501, Acc.bus: 0.9698, Acc.train: 0.8195, Acc.motorcycle: 0.8794, Acc.bicycle: 0.9125 +2022-05-10 08:39:29,827 - mmseg - INFO - Iter [28050/80000] lr: 9.324e-07, eta: 1 day, 4:08:27, time: 4.115, data_time: 2.325, memory: 64699, decode.loss_cls: 0.0941, decode.loss_mask: 0.2259, decode.loss_dice: 0.6147, decode.d0.loss_cls: 0.3320, decode.d0.loss_mask: 0.2366, decode.d0.loss_dice: 0.6489, decode.d1.loss_cls: 0.1189, decode.d1.loss_mask: 0.2269, decode.d1.loss_dice: 0.6227, decode.d2.loss_cls: 0.1073, decode.d2.loss_mask: 0.2262, decode.d2.loss_dice: 0.6177, decode.d3.loss_cls: 0.1022, decode.d3.loss_mask: 0.2263, decode.d3.loss_dice: 0.6209, decode.d4.loss_cls: 0.0965, decode.d4.loss_mask: 0.2263, decode.d4.loss_dice: 0.6140, decode.d5.loss_cls: 0.0990, decode.d5.loss_mask: 0.2254, decode.d5.loss_dice: 0.6172, decode.d6.loss_cls: 0.0904, decode.d6.loss_mask: 0.2267, decode.d6.loss_dice: 0.6120, decode.d7.loss_cls: 0.0867, decode.d7.loss_mask: 0.2258, decode.d7.loss_dice: 0.6130, decode.d8.loss_cls: 0.0988, decode.d8.loss_mask: 0.2252, decode.d8.loss_dice: 0.6125, loss: 9.6908 +2022-05-10 08:41:01,313 - mmseg - INFO - Iter [28100/80000] lr: 9.315e-07, eta: 1 day, 4:06:38, time: 1.830, data_time: 0.066, memory: 64699, decode.loss_cls: 0.0942, decode.loss_mask: 0.2205, decode.loss_dice: 0.6224, decode.d0.loss_cls: 0.3208, decode.d0.loss_mask: 0.2296, decode.d0.loss_dice: 0.6560, decode.d1.loss_cls: 0.1104, decode.d1.loss_mask: 0.2209, decode.d1.loss_dice: 0.6352, decode.d2.loss_cls: 0.0999, decode.d2.loss_mask: 0.2216, decode.d2.loss_dice: 0.6299, decode.d3.loss_cls: 0.0924, decode.d3.loss_mask: 0.2207, decode.d3.loss_dice: 0.6279, decode.d4.loss_cls: 0.0902, decode.d4.loss_mask: 0.2205, decode.d4.loss_dice: 0.6257, decode.d5.loss_cls: 0.0890, decode.d5.loss_mask: 0.2197, decode.d5.loss_dice: 0.6272, decode.d6.loss_cls: 0.0907, decode.d6.loss_mask: 0.2204, decode.d6.loss_dice: 0.6257, decode.d7.loss_cls: 0.0917, decode.d7.loss_mask: 0.2200, decode.d7.loss_dice: 0.6242, decode.d8.loss_cls: 0.0849, decode.d8.loss_mask: 0.2198, decode.d8.loss_dice: 0.6246, loss: 9.6766 +2022-05-10 08:42:30,132 - mmseg - INFO - Iter [28150/80000] lr: 9.306e-07, eta: 1 day, 4:04:45, time: 1.776, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0919, decode.loss_mask: 0.2259, decode.loss_dice: 0.6190, decode.d0.loss_cls: 0.3245, decode.d0.loss_mask: 0.2360, decode.d0.loss_dice: 0.6523, decode.d1.loss_cls: 0.1188, decode.d1.loss_mask: 0.2266, decode.d1.loss_dice: 0.6295, decode.d2.loss_cls: 0.1016, decode.d2.loss_mask: 0.2261, decode.d2.loss_dice: 0.6196, decode.d3.loss_cls: 0.1058, decode.d3.loss_mask: 0.2264, decode.d3.loss_dice: 0.6163, decode.d4.loss_cls: 0.1056, decode.d4.loss_mask: 0.2257, decode.d4.loss_dice: 0.6186, decode.d5.loss_cls: 0.0967, decode.d5.loss_mask: 0.2261, decode.d5.loss_dice: 0.6144, decode.d6.loss_cls: 0.0987, decode.d6.loss_mask: 0.2265, decode.d6.loss_dice: 0.6203, decode.d7.loss_cls: 0.0939, decode.d7.loss_mask: 0.2260, decode.d7.loss_dice: 0.6214, decode.d8.loss_cls: 0.0952, decode.d8.loss_mask: 0.2261, decode.d8.loss_dice: 0.6116, loss: 9.7271 +2022-05-10 08:43:58,720 - mmseg - INFO - Iter [28200/80000] lr: 9.297e-07, eta: 1 day, 4:02:51, time: 1.772, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0828, decode.loss_mask: 0.2282, decode.loss_dice: 0.6169, decode.d0.loss_cls: 0.3081, decode.d0.loss_mask: 0.2377, decode.d0.loss_dice: 0.6492, decode.d1.loss_cls: 0.1059, decode.d1.loss_mask: 0.2286, decode.d1.loss_dice: 0.6252, decode.d2.loss_cls: 0.0955, decode.d2.loss_mask: 0.2280, decode.d2.loss_dice: 0.6234, decode.d3.loss_cls: 0.0863, decode.d3.loss_mask: 0.2279, decode.d3.loss_dice: 0.6162, decode.d4.loss_cls: 0.0858, decode.d4.loss_mask: 0.2276, decode.d4.loss_dice: 0.6192, decode.d5.loss_cls: 0.0861, decode.d5.loss_mask: 0.2280, decode.d5.loss_dice: 0.6183, decode.d6.loss_cls: 0.0877, decode.d6.loss_mask: 0.2271, decode.d6.loss_dice: 0.6171, decode.d7.loss_cls: 0.0828, decode.d7.loss_mask: 0.2273, decode.d7.loss_dice: 0.6186, decode.d8.loss_cls: 0.0844, decode.d8.loss_mask: 0.2274, decode.d8.loss_dice: 0.6174, loss: 9.6148 +2022-05-10 08:45:29,152 - mmseg - INFO - Iter [28250/80000] lr: 9.288e-07, eta: 1 day, 4:01:00, time: 1.809, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0840, decode.loss_mask: 0.2253, decode.loss_dice: 0.6174, decode.d0.loss_cls: 0.3233, decode.d0.loss_mask: 0.2373, decode.d0.loss_dice: 0.6570, decode.d1.loss_cls: 0.1097, decode.d1.loss_mask: 0.2277, decode.d1.loss_dice: 0.6283, decode.d2.loss_cls: 0.0940, decode.d2.loss_mask: 0.2267, decode.d2.loss_dice: 0.6244, decode.d3.loss_cls: 0.0968, decode.d3.loss_mask: 0.2259, decode.d3.loss_dice: 0.6211, decode.d4.loss_cls: 0.0933, decode.d4.loss_mask: 0.2255, decode.d4.loss_dice: 0.6180, decode.d5.loss_cls: 0.0925, decode.d5.loss_mask: 0.2252, decode.d5.loss_dice: 0.6225, decode.d6.loss_cls: 0.0902, decode.d6.loss_mask: 0.2243, decode.d6.loss_dice: 0.6140, decode.d7.loss_cls: 0.0881, decode.d7.loss_mask: 0.2250, decode.d7.loss_dice: 0.6196, decode.d8.loss_cls: 0.0844, decode.d8.loss_mask: 0.2251, decode.d8.loss_dice: 0.6196, loss: 9.6662 +2022-05-10 08:47:01,897 - mmseg - INFO - Iter [28300/80000] lr: 9.279e-07, eta: 1 day, 3:59:14, time: 1.854, data_time: 0.064, memory: 64699, decode.loss_cls: 0.0947, decode.loss_mask: 0.2257, decode.loss_dice: 0.6214, decode.d0.loss_cls: 0.3224, decode.d0.loss_mask: 0.2372, decode.d0.loss_dice: 0.6627, decode.d1.loss_cls: 0.1108, decode.d1.loss_mask: 0.2284, decode.d1.loss_dice: 0.6354, decode.d2.loss_cls: 0.0901, decode.d2.loss_mask: 0.2269, decode.d2.loss_dice: 0.6298, decode.d3.loss_cls: 0.0943, decode.d3.loss_mask: 0.2261, decode.d3.loss_dice: 0.6246, decode.d4.loss_cls: 0.0955, decode.d4.loss_mask: 0.2257, decode.d4.loss_dice: 0.6248, decode.d5.loss_cls: 0.0953, decode.d5.loss_mask: 0.2250, decode.d5.loss_dice: 0.6213, decode.d6.loss_cls: 0.0945, decode.d6.loss_mask: 0.2260, decode.d6.loss_dice: 0.6205, decode.d7.loss_cls: 0.0888, decode.d7.loss_mask: 0.2257, decode.d7.loss_dice: 0.6184, decode.d8.loss_cls: 0.0860, decode.d8.loss_mask: 0.2251, decode.d8.loss_dice: 0.6219, loss: 9.7248 +2022-05-10 08:48:31,748 - mmseg - INFO - Iter [28350/80000] lr: 9.270e-07, eta: 1 day, 3:57:23, time: 1.798, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0914, decode.loss_mask: 0.2228, decode.loss_dice: 0.6088, decode.d0.loss_cls: 0.3268, decode.d0.loss_mask: 0.2334, decode.d0.loss_dice: 0.6501, decode.d1.loss_cls: 0.1101, decode.d1.loss_mask: 0.2239, decode.d1.loss_dice: 0.6235, decode.d2.loss_cls: 0.0976, decode.d2.loss_mask: 0.2223, decode.d2.loss_dice: 0.6158, decode.d3.loss_cls: 0.0921, decode.d3.loss_mask: 0.2227, decode.d3.loss_dice: 0.6090, decode.d4.loss_cls: 0.1029, decode.d4.loss_mask: 0.2222, decode.d4.loss_dice: 0.6152, decode.d5.loss_cls: 0.0862, decode.d5.loss_mask: 0.2221, decode.d5.loss_dice: 0.6141, decode.d6.loss_cls: 0.0846, decode.d6.loss_mask: 0.2218, decode.d6.loss_dice: 0.6086, decode.d7.loss_cls: 0.0888, decode.d7.loss_mask: 0.2217, decode.d7.loss_dice: 0.6142, decode.d8.loss_cls: 0.0900, decode.d8.loss_mask: 0.2223, decode.d8.loss_dice: 0.6103, loss: 9.5754 +2022-05-10 08:50:01,605 - mmseg - INFO - Iter [28400/80000] lr: 9.261e-07, eta: 1 day, 3:55:32, time: 1.797, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0895, decode.loss_mask: 0.2229, decode.loss_dice: 0.6222, decode.d0.loss_cls: 0.3209, decode.d0.loss_mask: 0.2346, decode.d0.loss_dice: 0.6511, decode.d1.loss_cls: 0.0993, decode.d1.loss_mask: 0.2263, decode.d1.loss_dice: 0.6344, decode.d2.loss_cls: 0.1002, decode.d2.loss_mask: 0.2253, decode.d2.loss_dice: 0.6288, decode.d3.loss_cls: 0.0868, decode.d3.loss_mask: 0.2241, decode.d3.loss_dice: 0.6206, decode.d4.loss_cls: 0.0863, decode.d4.loss_mask: 0.2249, decode.d4.loss_dice: 0.6226, decode.d5.loss_cls: 0.0915, decode.d5.loss_mask: 0.2236, decode.d5.loss_dice: 0.6217, decode.d6.loss_cls: 0.0904, decode.d6.loss_mask: 0.2231, decode.d6.loss_dice: 0.6220, decode.d7.loss_cls: 0.0874, decode.d7.loss_mask: 0.2234, decode.d7.loss_dice: 0.6230, decode.d8.loss_cls: 0.0973, decode.d8.loss_mask: 0.2228, decode.d8.loss_dice: 0.6190, loss: 9.6662 +2022-05-10 08:51:31,502 - mmseg - INFO - Iter [28450/80000] lr: 9.252e-07, eta: 1 day, 3:53:41, time: 1.798, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0850, decode.loss_mask: 0.2230, decode.loss_dice: 0.6097, decode.d0.loss_cls: 0.3118, decode.d0.loss_mask: 0.2326, decode.d0.loss_dice: 0.6446, decode.d1.loss_cls: 0.1056, decode.d1.loss_mask: 0.2246, decode.d1.loss_dice: 0.6230, decode.d2.loss_cls: 0.0889, decode.d2.loss_mask: 0.2244, decode.d2.loss_dice: 0.6174, decode.d3.loss_cls: 0.0792, decode.d3.loss_mask: 0.2231, decode.d3.loss_dice: 0.6101, decode.d4.loss_cls: 0.0832, decode.d4.loss_mask: 0.2233, decode.d4.loss_dice: 0.6133, decode.d5.loss_cls: 0.0880, decode.d5.loss_mask: 0.2237, decode.d5.loss_dice: 0.6126, decode.d6.loss_cls: 0.0853, decode.d6.loss_mask: 0.2236, decode.d6.loss_dice: 0.6093, decode.d7.loss_cls: 0.0770, decode.d7.loss_mask: 0.2234, decode.d7.loss_dice: 0.6116, decode.d8.loss_cls: 0.0833, decode.d8.loss_mask: 0.2231, decode.d8.loss_dice: 0.6102, loss: 9.4937 +2022-05-10 08:53:02,795 - mmseg - INFO - Iter [28500/80000] lr: 9.243e-07, eta: 1 day, 3:51:52, time: 1.826, data_time: 0.066, memory: 64699, decode.loss_cls: 0.0836, decode.loss_mask: 0.2290, decode.loss_dice: 0.6090, decode.d0.loss_cls: 0.3170, decode.d0.loss_mask: 0.2385, decode.d0.loss_dice: 0.6416, decode.d1.loss_cls: 0.0995, decode.d1.loss_mask: 0.2306, decode.d1.loss_dice: 0.6193, decode.d2.loss_cls: 0.0947, decode.d2.loss_mask: 0.2290, decode.d2.loss_dice: 0.6130, decode.d3.loss_cls: 0.0839, decode.d3.loss_mask: 0.2291, decode.d3.loss_dice: 0.6090, decode.d4.loss_cls: 0.0833, decode.d4.loss_mask: 0.2293, decode.d4.loss_dice: 0.6121, decode.d5.loss_cls: 0.0840, decode.d5.loss_mask: 0.2292, decode.d5.loss_dice: 0.6097, decode.d6.loss_cls: 0.0857, decode.d6.loss_mask: 0.2292, decode.d6.loss_dice: 0.6103, decode.d7.loss_cls: 0.0789, decode.d7.loss_mask: 0.2289, decode.d7.loss_dice: 0.6114, decode.d8.loss_cls: 0.0821, decode.d8.loss_mask: 0.2288, decode.d8.loss_dice: 0.6096, loss: 9.5392 +2022-05-10 08:54:32,923 - mmseg - INFO - Iter [28550/80000] lr: 9.234e-07, eta: 1 day, 3:50:02, time: 1.803, data_time: 0.017, memory: 64699, decode.loss_cls: 0.1010, decode.loss_mask: 0.2218, decode.loss_dice: 0.6163, decode.d0.loss_cls: 0.3371, decode.d0.loss_mask: 0.2347, decode.d0.loss_dice: 0.6628, decode.d1.loss_cls: 0.1161, decode.d1.loss_mask: 0.2246, decode.d1.loss_dice: 0.6311, decode.d2.loss_cls: 0.1035, decode.d2.loss_mask: 0.2237, decode.d2.loss_dice: 0.6229, decode.d3.loss_cls: 0.1014, decode.d3.loss_mask: 0.2230, decode.d3.loss_dice: 0.6243, decode.d4.loss_cls: 0.1013, decode.d4.loss_mask: 0.2224, decode.d4.loss_dice: 0.6194, decode.d5.loss_cls: 0.1020, decode.d5.loss_mask: 0.2220, decode.d5.loss_dice: 0.6199, decode.d6.loss_cls: 0.0950, decode.d6.loss_mask: 0.2220, decode.d6.loss_dice: 0.6185, decode.d7.loss_cls: 0.0962, decode.d7.loss_mask: 0.2209, decode.d7.loss_dice: 0.6174, decode.d8.loss_cls: 0.1010, decode.d8.loss_mask: 0.2219, decode.d8.loss_dice: 0.6175, loss: 9.7419 +2022-05-10 08:56:02,122 - mmseg - INFO - Iter [28600/80000] lr: 9.225e-07, eta: 1 day, 3:48:10, time: 1.784, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0872, decode.loss_mask: 0.2165, decode.loss_dice: 0.6075, decode.d0.loss_cls: 0.3151, decode.d0.loss_mask: 0.2258, decode.d0.loss_dice: 0.6433, decode.d1.loss_cls: 0.1089, decode.d1.loss_mask: 0.2181, decode.d1.loss_dice: 0.6202, decode.d2.loss_cls: 0.1001, decode.d2.loss_mask: 0.2175, decode.d2.loss_dice: 0.6088, decode.d3.loss_cls: 0.0922, decode.d3.loss_mask: 0.2162, decode.d3.loss_dice: 0.6062, decode.d4.loss_cls: 0.0911, decode.d4.loss_mask: 0.2174, decode.d4.loss_dice: 0.6082, decode.d5.loss_cls: 0.0924, decode.d5.loss_mask: 0.2167, decode.d5.loss_dice: 0.6031, decode.d6.loss_cls: 0.0893, decode.d6.loss_mask: 0.2164, decode.d6.loss_dice: 0.6093, decode.d7.loss_cls: 0.0907, decode.d7.loss_mask: 0.2169, decode.d7.loss_dice: 0.6058, decode.d8.loss_cls: 0.0875, decode.d8.loss_mask: 0.2163, decode.d8.loss_dice: 0.6052, loss: 9.4502 +2022-05-10 08:57:33,432 - mmseg - INFO - Iter [28650/80000] lr: 9.216e-07, eta: 1 day, 3:46:22, time: 1.826, data_time: 0.066, memory: 64699, decode.loss_cls: 0.0848, decode.loss_mask: 0.2181, decode.loss_dice: 0.6212, decode.d0.loss_cls: 0.3166, decode.d0.loss_mask: 0.2269, decode.d0.loss_dice: 0.6562, decode.d1.loss_cls: 0.0971, decode.d1.loss_mask: 0.2204, decode.d1.loss_dice: 0.6334, decode.d2.loss_cls: 0.0932, decode.d2.loss_mask: 0.2190, decode.d2.loss_dice: 0.6275, decode.d3.loss_cls: 0.0865, decode.d3.loss_mask: 0.2191, decode.d3.loss_dice: 0.6207, decode.d4.loss_cls: 0.0944, decode.d4.loss_mask: 0.2187, decode.d4.loss_dice: 0.6250, decode.d5.loss_cls: 0.0902, decode.d5.loss_mask: 0.2188, decode.d5.loss_dice: 0.6247, decode.d6.loss_cls: 0.0835, decode.d6.loss_mask: 0.2181, decode.d6.loss_dice: 0.6221, decode.d7.loss_cls: 0.0874, decode.d7.loss_mask: 0.2182, decode.d7.loss_dice: 0.6203, decode.d8.loss_cls: 0.0866, decode.d8.loss_mask: 0.2181, decode.d8.loss_dice: 0.6226, loss: 9.5894 +2022-05-10 08:59:02,680 - mmseg - INFO - Iter [28700/80000] lr: 9.207e-07, eta: 1 day, 3:44:30, time: 1.785, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0922, decode.loss_mask: 0.2274, decode.loss_dice: 0.6165, decode.d0.loss_cls: 0.3303, decode.d0.loss_mask: 0.2407, decode.d0.loss_dice: 0.6535, decode.d1.loss_cls: 0.1088, decode.d1.loss_mask: 0.2312, decode.d1.loss_dice: 0.6333, decode.d2.loss_cls: 0.0981, decode.d2.loss_mask: 0.2285, decode.d2.loss_dice: 0.6275, decode.d3.loss_cls: 0.1027, decode.d3.loss_mask: 0.2270, decode.d3.loss_dice: 0.6151, decode.d4.loss_cls: 0.0944, decode.d4.loss_mask: 0.2268, decode.d4.loss_dice: 0.6167, decode.d5.loss_cls: 0.0979, decode.d5.loss_mask: 0.2263, decode.d5.loss_dice: 0.6193, decode.d6.loss_cls: 0.0876, decode.d6.loss_mask: 0.2268, decode.d6.loss_dice: 0.6171, decode.d7.loss_cls: 0.0942, decode.d7.loss_mask: 0.2265, decode.d7.loss_dice: 0.6150, decode.d8.loss_cls: 0.0931, decode.d8.loss_mask: 0.2272, decode.d8.loss_dice: 0.6179, loss: 9.7195 +2022-05-10 09:00:32,123 - mmseg - INFO - Iter [28750/80000] lr: 9.198e-07, eta: 1 day, 3:42:38, time: 1.789, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0989, decode.loss_mask: 0.2245, decode.loss_dice: 0.6199, decode.d0.loss_cls: 0.3118, decode.d0.loss_mask: 0.2326, decode.d0.loss_dice: 0.6545, decode.d1.loss_cls: 0.1195, decode.d1.loss_mask: 0.2261, decode.d1.loss_dice: 0.6311, decode.d2.loss_cls: 0.1113, decode.d2.loss_mask: 0.2241, decode.d2.loss_dice: 0.6270, decode.d3.loss_cls: 0.1058, decode.d3.loss_mask: 0.2243, decode.d3.loss_dice: 0.6241, decode.d4.loss_cls: 0.1016, decode.d4.loss_mask: 0.2245, decode.d4.loss_dice: 0.6196, decode.d5.loss_cls: 0.1032, decode.d5.loss_mask: 0.2248, decode.d5.loss_dice: 0.6235, decode.d6.loss_cls: 0.0935, decode.d6.loss_mask: 0.2241, decode.d6.loss_dice: 0.6182, decode.d7.loss_cls: 0.1000, decode.d7.loss_mask: 0.2244, decode.d7.loss_dice: 0.6243, decode.d8.loss_cls: 0.1013, decode.d8.loss_mask: 0.2244, decode.d8.loss_dice: 0.6219, loss: 9.7648 +2022-05-10 09:02:01,175 - mmseg - INFO - Iter [28800/80000] lr: 9.189e-07, eta: 1 day, 3:40:46, time: 1.781, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0873, decode.loss_mask: 0.2152, decode.loss_dice: 0.6020, decode.d0.loss_cls: 0.3296, decode.d0.loss_mask: 0.2242, decode.d0.loss_dice: 0.6392, decode.d1.loss_cls: 0.1033, decode.d1.loss_mask: 0.2165, decode.d1.loss_dice: 0.6154, decode.d2.loss_cls: 0.0901, decode.d2.loss_mask: 0.2156, decode.d2.loss_dice: 0.6084, decode.d3.loss_cls: 0.0875, decode.d3.loss_mask: 0.2147, decode.d3.loss_dice: 0.6037, decode.d4.loss_cls: 0.0877, decode.d4.loss_mask: 0.2147, decode.d4.loss_dice: 0.6043, decode.d5.loss_cls: 0.0843, decode.d5.loss_mask: 0.2158, decode.d5.loss_dice: 0.6082, decode.d6.loss_cls: 0.0816, decode.d6.loss_mask: 0.2152, decode.d6.loss_dice: 0.6036, decode.d7.loss_cls: 0.0836, decode.d7.loss_mask: 0.2155, decode.d7.loss_dice: 0.6018, decode.d8.loss_cls: 0.0825, decode.d8.loss_mask: 0.2161, decode.d8.loss_dice: 0.6034, loss: 9.3710 +2022-05-10 09:03:32,879 - mmseg - INFO - Iter [28850/80000] lr: 9.180e-07, eta: 1 day, 3:38:59, time: 1.834, data_time: 0.064, memory: 64699, decode.loss_cls: 0.0981, decode.loss_mask: 0.2298, decode.loss_dice: 0.6116, decode.d0.loss_cls: 0.3236, decode.d0.loss_mask: 0.2407, decode.d0.loss_dice: 0.6463, decode.d1.loss_cls: 0.1172, decode.d1.loss_mask: 0.2323, decode.d1.loss_dice: 0.6225, decode.d2.loss_cls: 0.1075, decode.d2.loss_mask: 0.2313, decode.d2.loss_dice: 0.6156, decode.d3.loss_cls: 0.1072, decode.d3.loss_mask: 0.2300, decode.d3.loss_dice: 0.6153, decode.d4.loss_cls: 0.1032, decode.d4.loss_mask: 0.2298, decode.d4.loss_dice: 0.6131, decode.d5.loss_cls: 0.1055, decode.d5.loss_mask: 0.2299, decode.d5.loss_dice: 0.6136, decode.d6.loss_cls: 0.1005, decode.d6.loss_mask: 0.2305, decode.d6.loss_dice: 0.6119, decode.d7.loss_cls: 0.1023, decode.d7.loss_mask: 0.2298, decode.d7.loss_dice: 0.6126, decode.d8.loss_cls: 0.0964, decode.d8.loss_mask: 0.2306, decode.d8.loss_dice: 0.6162, loss: 9.7548 +2022-05-10 09:05:01,889 - mmseg - INFO - Iter [28900/80000] lr: 9.171e-07, eta: 1 day, 3:37:07, time: 1.780, data_time: 0.020, memory: 64699, decode.loss_cls: 0.0887, decode.loss_mask: 0.2239, decode.loss_dice: 0.6090, decode.d0.loss_cls: 0.3156, decode.d0.loss_mask: 0.2347, decode.d0.loss_dice: 0.6459, decode.d1.loss_cls: 0.1129, decode.d1.loss_mask: 0.2255, decode.d1.loss_dice: 0.6201, decode.d2.loss_cls: 0.0960, decode.d2.loss_mask: 0.2253, decode.d2.loss_dice: 0.6174, decode.d3.loss_cls: 0.0954, decode.d3.loss_mask: 0.2239, decode.d3.loss_dice: 0.6112, decode.d4.loss_cls: 0.0918, decode.d4.loss_mask: 0.2239, decode.d4.loss_dice: 0.6135, decode.d5.loss_cls: 0.0967, decode.d5.loss_mask: 0.2238, decode.d5.loss_dice: 0.6105, decode.d6.loss_cls: 0.0879, decode.d6.loss_mask: 0.2244, decode.d6.loss_dice: 0.6088, decode.d7.loss_cls: 0.0920, decode.d7.loss_mask: 0.2240, decode.d7.loss_dice: 0.6113, decode.d8.loss_cls: 0.0879, decode.d8.loss_mask: 0.2236, decode.d8.loss_dice: 0.6084, loss: 9.5740 +2022-05-10 09:06:31,018 - mmseg - INFO - Iter [28950/80000] lr: 9.162e-07, eta: 1 day, 3:35:15, time: 1.783, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0882, decode.loss_mask: 0.2226, decode.loss_dice: 0.6225, decode.d0.loss_cls: 0.3256, decode.d0.loss_mask: 0.2327, decode.d0.loss_dice: 0.6618, decode.d1.loss_cls: 0.1013, decode.d1.loss_mask: 0.2246, decode.d1.loss_dice: 0.6341, decode.d2.loss_cls: 0.0981, decode.d2.loss_mask: 0.2236, decode.d2.loss_dice: 0.6276, decode.d3.loss_cls: 0.0899, decode.d3.loss_mask: 0.2232, decode.d3.loss_dice: 0.6219, decode.d4.loss_cls: 0.0943, decode.d4.loss_mask: 0.2227, decode.d4.loss_dice: 0.6234, decode.d5.loss_cls: 0.0906, decode.d5.loss_mask: 0.2228, decode.d5.loss_dice: 0.6236, decode.d6.loss_cls: 0.0889, decode.d6.loss_mask: 0.2226, decode.d6.loss_dice: 0.6243, decode.d7.loss_cls: 0.0890, decode.d7.loss_mask: 0.2222, decode.d7.loss_dice: 0.6231, decode.d8.loss_cls: 0.0877, decode.d8.loss_mask: 0.2220, decode.d8.loss_dice: 0.6219, loss: 9.6765 +2022-05-10 09:07:59,963 - mmseg - INFO - Saving checkpoint at 29000 iterations +2022-05-10 09:08:34,186 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 09:08:34,194 - mmseg - INFO - Iter [29000/80000] lr: 9.153e-07, eta: 1 day, 3:34:24, time: 2.461, data_time: 0.020, memory: 64699, decode.loss_cls: 0.0963, decode.loss_mask: 0.2221, decode.loss_dice: 0.6014, decode.d0.loss_cls: 0.3256, decode.d0.loss_mask: 0.2315, decode.d0.loss_dice: 0.6402, decode.d1.loss_cls: 0.1122, decode.d1.loss_mask: 0.2228, decode.d1.loss_dice: 0.6149, decode.d2.loss_cls: 0.1012, decode.d2.loss_mask: 0.2225, decode.d2.loss_dice: 0.6111, decode.d3.loss_cls: 0.0939, decode.d3.loss_mask: 0.2215, decode.d3.loss_dice: 0.6022, decode.d4.loss_cls: 0.0951, decode.d4.loss_mask: 0.2214, decode.d4.loss_dice: 0.6028, decode.d5.loss_cls: 0.0991, decode.d5.loss_mask: 0.2216, decode.d5.loss_dice: 0.6052, decode.d6.loss_cls: 0.1031, decode.d6.loss_mask: 0.2217, decode.d6.loss_dice: 0.6022, decode.d7.loss_cls: 0.1019, decode.d7.loss_mask: 0.2218, decode.d7.loss_dice: 0.6047, decode.d8.loss_cls: 0.0948, decode.d8.loss_mask: 0.2216, decode.d8.loss_dice: 0.6025, loss: 9.5388 +2022-05-10 09:10:29,732 - mmseg - INFO - per class results: +2022-05-10 09:10:29,743 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.58 | 99.27 | +| sidewalk | 88.63 | 93.51 | +| building | 94.35 | 96.9 | +| wall | 67.7 | 81.11 | +| fence | 75.18 | 84.76 | +| pole | 70.8 | 82.75 | +| traffic light | 77.3 | 87.67 | +| traffic sign | 83.54 | 90.09 | +| vegetation | 93.38 | 97.01 | +| terrain | 68.62 | 77.84 | +| sky | 95.83 | 98.38 | +| person | 86.84 | 93.86 | +| rider | 74.39 | 84.1 | +| car | 96.22 | 98.33 | +| truck | 91.46 | 94.56 | +| bus | 93.78 | 96.66 | +| train | 88.36 | 90.66 | +| motorcycle | 78.1 | 87.02 | +| bicycle | 82.65 | 92.28 | ++---------------+-------+-------+ +2022-05-10 09:10:29,744 - mmseg - INFO - Summary: +2022-05-10 09:10:29,744 - mmseg - INFO - ++------+-------+-------+ +| aAcc | mIoU | mAcc | ++------+-------+-------+ +| 97.0 | 84.51 | 90.88 | ++------+-------+-------+ +2022-05-10 09:10:29,747 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 09:10:29,747 - mmseg - INFO - Iter(val) [32] aAcc: 0.9700, mIoU: 0.8451, mAcc: 0.9088, IoU.road: 0.9858, IoU.sidewalk: 0.8863, IoU.building: 0.9435, IoU.wall: 0.6770, IoU.fence: 0.7518, IoU.pole: 0.7080, IoU.traffic light: 0.7730, IoU.traffic sign: 0.8354, IoU.vegetation: 0.9338, IoU.terrain: 0.6862, IoU.sky: 0.9583, IoU.person: 0.8684, IoU.rider: 0.7439, IoU.car: 0.9622, IoU.truck: 0.9146, IoU.bus: 0.9378, IoU.train: 0.8836, IoU.motorcycle: 0.7810, IoU.bicycle: 0.8265, Acc.road: 0.9927, Acc.sidewalk: 0.9351, Acc.building: 0.9690, Acc.wall: 0.8111, Acc.fence: 0.8476, Acc.pole: 0.8275, Acc.traffic light: 0.8767, Acc.traffic sign: 0.9009, Acc.vegetation: 0.9701, Acc.terrain: 0.7784, Acc.sky: 0.9838, Acc.person: 0.9386, Acc.rider: 0.8410, Acc.car: 0.9833, Acc.truck: 0.9456, Acc.bus: 0.9666, Acc.train: 0.9066, Acc.motorcycle: 0.8702, Acc.bicycle: 0.9228 +2022-05-10 09:12:02,906 - mmseg - INFO - Iter [29050/80000] lr: 9.144e-07, eta: 1 day, 3:36:02, time: 4.177, data_time: 2.378, memory: 64699, decode.loss_cls: 0.0942, decode.loss_mask: 0.2247, decode.loss_dice: 0.6144, decode.d0.loss_cls: 0.3166, decode.d0.loss_mask: 0.2372, decode.d0.loss_dice: 0.6498, decode.d1.loss_cls: 0.1123, decode.d1.loss_mask: 0.2281, decode.d1.loss_dice: 0.6281, decode.d2.loss_cls: 0.1094, decode.d2.loss_mask: 0.2262, decode.d2.loss_dice: 0.6180, decode.d3.loss_cls: 0.0918, decode.d3.loss_mask: 0.2266, decode.d3.loss_dice: 0.6120, decode.d4.loss_cls: 0.0955, decode.d4.loss_mask: 0.2265, decode.d4.loss_dice: 0.6112, decode.d5.loss_cls: 0.0977, decode.d5.loss_mask: 0.2265, decode.d5.loss_dice: 0.6128, decode.d6.loss_cls: 0.0930, decode.d6.loss_mask: 0.2256, decode.d6.loss_dice: 0.6135, decode.d7.loss_cls: 0.0907, decode.d7.loss_mask: 0.2252, decode.d7.loss_dice: 0.6130, decode.d8.loss_cls: 0.0951, decode.d8.loss_mask: 0.2255, decode.d8.loss_dice: 0.6116, loss: 9.6526 +2022-05-10 09:13:31,404 - mmseg - INFO - Iter [29100/80000] lr: 9.135e-07, eta: 1 day, 3:34:09, time: 1.770, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0834, decode.loss_mask: 0.2261, decode.loss_dice: 0.6049, decode.d0.loss_cls: 0.3141, decode.d0.loss_mask: 0.2347, decode.d0.loss_dice: 0.6405, decode.d1.loss_cls: 0.0935, decode.d1.loss_mask: 0.2272, decode.d1.loss_dice: 0.6228, decode.d2.loss_cls: 0.0889, decode.d2.loss_mask: 0.2268, decode.d2.loss_dice: 0.6150, decode.d3.loss_cls: 0.0853, decode.d3.loss_mask: 0.2266, decode.d3.loss_dice: 0.6098, decode.d4.loss_cls: 0.0888, decode.d4.loss_mask: 0.2266, decode.d4.loss_dice: 0.6098, decode.d5.loss_cls: 0.0870, decode.d5.loss_mask: 0.2262, decode.d5.loss_dice: 0.6115, decode.d6.loss_cls: 0.0780, decode.d6.loss_mask: 0.2262, decode.d6.loss_dice: 0.6095, decode.d7.loss_cls: 0.0807, decode.d7.loss_mask: 0.2260, decode.d7.loss_dice: 0.6055, decode.d8.loss_cls: 0.0802, decode.d8.loss_mask: 0.2261, decode.d8.loss_dice: 0.6069, loss: 9.4889 +2022-05-10 09:15:01,853 - mmseg - INFO - Iter [29150/80000] lr: 9.126e-07, eta: 1 day, 3:32:19, time: 1.809, data_time: 0.020, memory: 64699, decode.loss_cls: 0.0904, decode.loss_mask: 0.2249, decode.loss_dice: 0.6087, decode.d0.loss_cls: 0.3247, decode.d0.loss_mask: 0.2341, decode.d0.loss_dice: 0.6464, decode.d1.loss_cls: 0.1198, decode.d1.loss_mask: 0.2263, decode.d1.loss_dice: 0.6176, decode.d2.loss_cls: 0.0931, decode.d2.loss_mask: 0.2253, decode.d2.loss_dice: 0.6136, decode.d3.loss_cls: 0.0922, decode.d3.loss_mask: 0.2250, decode.d3.loss_dice: 0.6053, decode.d4.loss_cls: 0.0967, decode.d4.loss_mask: 0.2243, decode.d4.loss_dice: 0.6073, decode.d5.loss_cls: 0.0930, decode.d5.loss_mask: 0.2242, decode.d5.loss_dice: 0.6055, decode.d6.loss_cls: 0.0969, decode.d6.loss_mask: 0.2243, decode.d6.loss_dice: 0.6027, decode.d7.loss_cls: 0.0857, decode.d7.loss_mask: 0.2247, decode.d7.loss_dice: 0.6073, decode.d8.loss_cls: 0.0904, decode.d8.loss_mask: 0.2254, decode.d8.loss_dice: 0.6101, loss: 9.5656 +2022-05-10 09:16:32,579 - mmseg - INFO - Iter [29200/80000] lr: 9.117e-07, eta: 1 day, 3:30:29, time: 1.814, data_time: 0.017, memory: 64699, decode.loss_cls: 0.0870, decode.loss_mask: 0.2208, decode.loss_dice: 0.6109, decode.d0.loss_cls: 0.3359, decode.d0.loss_mask: 0.2313, decode.d0.loss_dice: 0.6467, decode.d1.loss_cls: 0.1098, decode.d1.loss_mask: 0.2232, decode.d1.loss_dice: 0.6260, decode.d2.loss_cls: 0.1047, decode.d2.loss_mask: 0.2225, decode.d2.loss_dice: 0.6183, decode.d3.loss_cls: 0.0974, decode.d3.loss_mask: 0.2216, decode.d3.loss_dice: 0.6138, decode.d4.loss_cls: 0.0956, decode.d4.loss_mask: 0.2208, decode.d4.loss_dice: 0.6095, decode.d5.loss_cls: 0.0885, decode.d5.loss_mask: 0.2215, decode.d5.loss_dice: 0.6137, decode.d6.loss_cls: 0.0869, decode.d6.loss_mask: 0.2215, decode.d6.loss_dice: 0.6106, decode.d7.loss_cls: 0.0854, decode.d7.loss_mask: 0.2206, decode.d7.loss_dice: 0.6152, decode.d8.loss_cls: 0.0865, decode.d8.loss_mask: 0.2201, decode.d8.loss_dice: 0.6084, loss: 9.5748 +2022-05-10 09:18:05,493 - mmseg - INFO - Iter [29250/80000] lr: 9.109e-07, eta: 1 day, 3:28:44, time: 1.859, data_time: 0.067, memory: 64699, decode.loss_cls: 0.0893, decode.loss_mask: 0.2207, decode.loss_dice: 0.5971, decode.d0.loss_cls: 0.3296, decode.d0.loss_mask: 0.2323, decode.d0.loss_dice: 0.6425, decode.d1.loss_cls: 0.1093, decode.d1.loss_mask: 0.2219, decode.d1.loss_dice: 0.6096, decode.d2.loss_cls: 0.1019, decode.d2.loss_mask: 0.2208, decode.d2.loss_dice: 0.6052, decode.d3.loss_cls: 0.0935, decode.d3.loss_mask: 0.2193, decode.d3.loss_dice: 0.6069, decode.d4.loss_cls: 0.0984, decode.d4.loss_mask: 0.2207, decode.d4.loss_dice: 0.6011, decode.d5.loss_cls: 0.0891, decode.d5.loss_mask: 0.2209, decode.d5.loss_dice: 0.6041, decode.d6.loss_cls: 0.0934, decode.d6.loss_mask: 0.2211, decode.d6.loss_dice: 0.5998, decode.d7.loss_cls: 0.0889, decode.d7.loss_mask: 0.2208, decode.d7.loss_dice: 0.5979, decode.d8.loss_cls: 0.0954, decode.d8.loss_mask: 0.2207, decode.d8.loss_dice: 0.6004, loss: 9.4727 +2022-05-10 09:19:33,985 - mmseg - INFO - Iter [29300/80000] lr: 9.100e-07, eta: 1 day, 3:26:51, time: 1.770, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0860, decode.loss_mask: 0.2295, decode.loss_dice: 0.6104, decode.d0.loss_cls: 0.3256, decode.d0.loss_mask: 0.2399, decode.d0.loss_dice: 0.6426, decode.d1.loss_cls: 0.1008, decode.d1.loss_mask: 0.2316, decode.d1.loss_dice: 0.6226, decode.d2.loss_cls: 0.0963, decode.d2.loss_mask: 0.2308, decode.d2.loss_dice: 0.6154, decode.d3.loss_cls: 0.0905, decode.d3.loss_mask: 0.2302, decode.d3.loss_dice: 0.6078, decode.d4.loss_cls: 0.0833, decode.d4.loss_mask: 0.2304, decode.d4.loss_dice: 0.6082, decode.d5.loss_cls: 0.0820, decode.d5.loss_mask: 0.2299, decode.d5.loss_dice: 0.6160, decode.d6.loss_cls: 0.0869, decode.d6.loss_mask: 0.2292, decode.d6.loss_dice: 0.6113, decode.d7.loss_cls: 0.0858, decode.d7.loss_mask: 0.2294, decode.d7.loss_dice: 0.6075, decode.d8.loss_cls: 0.0848, decode.d8.loss_mask: 0.2297, decode.d8.loss_dice: 0.6066, loss: 9.5808 +2022-05-10 09:21:03,998 - mmseg - INFO - Iter [29350/80000] lr: 9.091e-07, eta: 1 day, 3:25:01, time: 1.800, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0817, decode.loss_mask: 0.2216, decode.loss_dice: 0.6135, decode.d0.loss_cls: 0.3236, decode.d0.loss_mask: 0.2309, decode.d0.loss_dice: 0.6393, decode.d1.loss_cls: 0.1085, decode.d1.loss_mask: 0.2220, decode.d1.loss_dice: 0.6258, decode.d2.loss_cls: 0.0919, decode.d2.loss_mask: 0.2222, decode.d2.loss_dice: 0.6140, decode.d3.loss_cls: 0.0891, decode.d3.loss_mask: 0.2217, decode.d3.loss_dice: 0.6086, decode.d4.loss_cls: 0.0922, decode.d4.loss_mask: 0.2216, decode.d4.loss_dice: 0.6090, decode.d5.loss_cls: 0.0902, decode.d5.loss_mask: 0.2219, decode.d5.loss_dice: 0.6095, decode.d6.loss_cls: 0.0825, decode.d6.loss_mask: 0.2213, decode.d6.loss_dice: 0.6106, decode.d7.loss_cls: 0.0878, decode.d7.loss_mask: 0.2218, decode.d7.loss_dice: 0.6091, decode.d8.loss_cls: 0.0903, decode.d8.loss_mask: 0.2222, decode.d8.loss_dice: 0.6104, loss: 9.5147 +2022-05-10 09:22:36,408 - mmseg - INFO - Iter [29400/80000] lr: 9.082e-07, eta: 1 day, 3:23:15, time: 1.848, data_time: 0.066, memory: 64699, decode.loss_cls: 0.0936, decode.loss_mask: 0.2235, decode.loss_dice: 0.6073, decode.d0.loss_cls: 0.3174, decode.d0.loss_mask: 0.2357, decode.d0.loss_dice: 0.6437, decode.d1.loss_cls: 0.1170, decode.d1.loss_mask: 0.2259, decode.d1.loss_dice: 0.6205, decode.d2.loss_cls: 0.1040, decode.d2.loss_mask: 0.2250, decode.d2.loss_dice: 0.6118, decode.d3.loss_cls: 0.0987, decode.d3.loss_mask: 0.2237, decode.d3.loss_dice: 0.6131, decode.d4.loss_cls: 0.1019, decode.d4.loss_mask: 0.2240, decode.d4.loss_dice: 0.6080, decode.d5.loss_cls: 0.1013, decode.d5.loss_mask: 0.2243, decode.d5.loss_dice: 0.6063, decode.d6.loss_cls: 0.0978, decode.d6.loss_mask: 0.2242, decode.d6.loss_dice: 0.6097, decode.d7.loss_cls: 0.0959, decode.d7.loss_mask: 0.2238, decode.d7.loss_dice: 0.6077, decode.d8.loss_cls: 0.0963, decode.d8.loss_mask: 0.2231, decode.d8.loss_dice: 0.6071, loss: 9.6124 +2022-05-10 09:24:06,572 - mmseg - INFO - Iter [29450/80000] lr: 9.073e-07, eta: 1 day, 3:21:25, time: 1.803, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0832, decode.loss_mask: 0.2205, decode.loss_dice: 0.6113, decode.d0.loss_cls: 0.3266, decode.d0.loss_mask: 0.2312, decode.d0.loss_dice: 0.6449, decode.d1.loss_cls: 0.1107, decode.d1.loss_mask: 0.2224, decode.d1.loss_dice: 0.6208, decode.d2.loss_cls: 0.0996, decode.d2.loss_mask: 0.2218, decode.d2.loss_dice: 0.6209, decode.d3.loss_cls: 0.0915, decode.d3.loss_mask: 0.2212, decode.d3.loss_dice: 0.6132, decode.d4.loss_cls: 0.0887, decode.d4.loss_mask: 0.2208, decode.d4.loss_dice: 0.6101, decode.d5.loss_cls: 0.0883, decode.d5.loss_mask: 0.2215, decode.d5.loss_dice: 0.6147, decode.d6.loss_cls: 0.0890, decode.d6.loss_mask: 0.2210, decode.d6.loss_dice: 0.6120, decode.d7.loss_cls: 0.0877, decode.d7.loss_mask: 0.2207, decode.d7.loss_dice: 0.6147, decode.d8.loss_cls: 0.0900, decode.d8.loss_mask: 0.2206, decode.d8.loss_dice: 0.6094, loss: 9.5488 +2022-05-10 09:25:37,686 - mmseg - INFO - Iter [29500/80000] lr: 9.064e-07, eta: 1 day, 3:19:37, time: 1.822, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0791, decode.loss_mask: 0.2187, decode.loss_dice: 0.5966, decode.d0.loss_cls: 0.3143, decode.d0.loss_mask: 0.2278, decode.d0.loss_dice: 0.6251, decode.d1.loss_cls: 0.1093, decode.d1.loss_mask: 0.2194, decode.d1.loss_dice: 0.6059, decode.d2.loss_cls: 0.0950, decode.d2.loss_mask: 0.2196, decode.d2.loss_dice: 0.6030, decode.d3.loss_cls: 0.0923, decode.d3.loss_mask: 0.2204, decode.d3.loss_dice: 0.5947, decode.d4.loss_cls: 0.0820, decode.d4.loss_mask: 0.2205, decode.d4.loss_dice: 0.5961, decode.d5.loss_cls: 0.0806, decode.d5.loss_mask: 0.2198, decode.d5.loss_dice: 0.6000, decode.d6.loss_cls: 0.0824, decode.d6.loss_mask: 0.2189, decode.d6.loss_dice: 0.5929, decode.d7.loss_cls: 0.0847, decode.d7.loss_mask: 0.2191, decode.d7.loss_dice: 0.5960, decode.d8.loss_cls: 0.0758, decode.d8.loss_mask: 0.2192, decode.d8.loss_dice: 0.5975, loss: 9.3067 +2022-05-10 09:27:07,112 - mmseg - INFO - Iter [29550/80000] lr: 9.055e-07, eta: 1 day, 3:17:46, time: 1.789, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0916, decode.loss_mask: 0.2216, decode.loss_dice: 0.6032, decode.d0.loss_cls: 0.3246, decode.d0.loss_mask: 0.2315, decode.d0.loss_dice: 0.6444, decode.d1.loss_cls: 0.1126, decode.d1.loss_mask: 0.2234, decode.d1.loss_dice: 0.6170, decode.d2.loss_cls: 0.1009, decode.d2.loss_mask: 0.2223, decode.d2.loss_dice: 0.6127, decode.d3.loss_cls: 0.0951, decode.d3.loss_mask: 0.2224, decode.d3.loss_dice: 0.6063, decode.d4.loss_cls: 0.0924, decode.d4.loss_mask: 0.2220, decode.d4.loss_dice: 0.6086, decode.d5.loss_cls: 0.0954, decode.d5.loss_mask: 0.2210, decode.d5.loss_dice: 0.6077, decode.d6.loss_cls: 0.0887, decode.d6.loss_mask: 0.2214, decode.d6.loss_dice: 0.6034, decode.d7.loss_cls: 0.0908, decode.d7.loss_mask: 0.2208, decode.d7.loss_dice: 0.6038, decode.d8.loss_cls: 0.0911, decode.d8.loss_mask: 0.2210, decode.d8.loss_dice: 0.6054, loss: 9.5232 +2022-05-10 09:28:38,493 - mmseg - INFO - Iter [29600/80000] lr: 9.046e-07, eta: 1 day, 3:15:58, time: 1.827, data_time: 0.066, memory: 64699, decode.loss_cls: 0.0867, decode.loss_mask: 0.2199, decode.loss_dice: 0.6050, decode.d0.loss_cls: 0.3091, decode.d0.loss_mask: 0.2294, decode.d0.loss_dice: 0.6440, decode.d1.loss_cls: 0.1054, decode.d1.loss_mask: 0.2217, decode.d1.loss_dice: 0.6134, decode.d2.loss_cls: 0.0988, decode.d2.loss_mask: 0.2209, decode.d2.loss_dice: 0.6126, decode.d3.loss_cls: 0.0843, decode.d3.loss_mask: 0.2201, decode.d3.loss_dice: 0.6082, decode.d4.loss_cls: 0.0871, decode.d4.loss_mask: 0.2203, decode.d4.loss_dice: 0.6098, decode.d5.loss_cls: 0.0797, decode.d5.loss_mask: 0.2208, decode.d5.loss_dice: 0.6076, decode.d6.loss_cls: 0.0872, decode.d6.loss_mask: 0.2197, decode.d6.loss_dice: 0.6082, decode.d7.loss_cls: 0.0838, decode.d7.loss_mask: 0.2202, decode.d7.loss_dice: 0.6031, decode.d8.loss_cls: 0.0891, decode.d8.loss_mask: 0.2201, decode.d8.loss_dice: 0.6051, loss: 9.4413 +2022-05-10 09:30:08,692 - mmseg - INFO - Iter [29650/80000] lr: 9.037e-07, eta: 1 day, 3:14:08, time: 1.805, data_time: 0.022, memory: 64699, decode.loss_cls: 0.0965, decode.loss_mask: 0.2170, decode.loss_dice: 0.6138, decode.d0.loss_cls: 0.3267, decode.d0.loss_mask: 0.2262, decode.d0.loss_dice: 0.6550, decode.d1.loss_cls: 0.1183, decode.d1.loss_mask: 0.2191, decode.d1.loss_dice: 0.6249, decode.d2.loss_cls: 0.1035, decode.d2.loss_mask: 0.2181, decode.d2.loss_dice: 0.6227, decode.d3.loss_cls: 0.0988, decode.d3.loss_mask: 0.2164, decode.d3.loss_dice: 0.6133, decode.d4.loss_cls: 0.0976, decode.d4.loss_mask: 0.2172, decode.d4.loss_dice: 0.6194, decode.d5.loss_cls: 0.0953, decode.d5.loss_mask: 0.2172, decode.d5.loss_dice: 0.6162, decode.d6.loss_cls: 0.0882, decode.d6.loss_mask: 0.2168, decode.d6.loss_dice: 0.6130, decode.d7.loss_cls: 0.0967, decode.d7.loss_mask: 0.2169, decode.d7.loss_dice: 0.6101, decode.d8.loss_cls: 0.0919, decode.d8.loss_mask: 0.2168, decode.d8.loss_dice: 0.6156, loss: 9.5988 +2022-05-10 09:31:37,927 - mmseg - INFO - Iter [29700/80000] lr: 9.028e-07, eta: 1 day, 3:12:17, time: 1.785, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0876, decode.loss_mask: 0.2239, decode.loss_dice: 0.6138, decode.d0.loss_cls: 0.3180, decode.d0.loss_mask: 0.2331, decode.d0.loss_dice: 0.6477, decode.d1.loss_cls: 0.1083, decode.d1.loss_mask: 0.2255, decode.d1.loss_dice: 0.6226, decode.d2.loss_cls: 0.0947, decode.d2.loss_mask: 0.2242, decode.d2.loss_dice: 0.6200, decode.d3.loss_cls: 0.0989, decode.d3.loss_mask: 0.2239, decode.d3.loss_dice: 0.6143, decode.d4.loss_cls: 0.0991, decode.d4.loss_mask: 0.2245, decode.d4.loss_dice: 0.6165, decode.d5.loss_cls: 0.0907, decode.d5.loss_mask: 0.2244, decode.d5.loss_dice: 0.6176, decode.d6.loss_cls: 0.0927, decode.d6.loss_mask: 0.2240, decode.d6.loss_dice: 0.6133, decode.d7.loss_cls: 0.0866, decode.d7.loss_mask: 0.2245, decode.d7.loss_dice: 0.6156, decode.d8.loss_cls: 0.0889, decode.d8.loss_mask: 0.2244, decode.d8.loss_dice: 0.6129, loss: 9.6123 +2022-05-10 09:33:07,097 - mmseg - INFO - Iter [29750/80000] lr: 9.019e-07, eta: 1 day, 3:10:26, time: 1.783, data_time: 0.020, memory: 64699, decode.loss_cls: 0.0815, decode.loss_mask: 0.2222, decode.loss_dice: 0.6090, decode.d0.loss_cls: 0.3153, decode.d0.loss_mask: 0.2329, decode.d0.loss_dice: 0.6449, decode.d1.loss_cls: 0.1071, decode.d1.loss_mask: 0.2249, decode.d1.loss_dice: 0.6184, decode.d2.loss_cls: 0.0974, decode.d2.loss_mask: 0.2237, decode.d2.loss_dice: 0.6128, decode.d3.loss_cls: 0.0919, decode.d3.loss_mask: 0.2228, decode.d3.loss_dice: 0.6089, decode.d4.loss_cls: 0.0927, decode.d4.loss_mask: 0.2230, decode.d4.loss_dice: 0.6081, decode.d5.loss_cls: 0.0885, decode.d5.loss_mask: 0.2221, decode.d5.loss_dice: 0.6079, decode.d6.loss_cls: 0.0866, decode.d6.loss_mask: 0.2225, decode.d6.loss_dice: 0.6099, decode.d7.loss_cls: 0.0845, decode.d7.loss_mask: 0.2220, decode.d7.loss_dice: 0.6070, decode.d8.loss_cls: 0.0835, decode.d8.loss_mask: 0.2223, decode.d8.loss_dice: 0.6078, loss: 9.5022 +2022-05-10 09:34:40,119 - mmseg - INFO - Iter [29800/80000] lr: 9.010e-07, eta: 1 day, 3:08:41, time: 1.860, data_time: 0.066, memory: 64699, decode.loss_cls: 0.0743, decode.loss_mask: 0.2168, decode.loss_dice: 0.6058, decode.d0.loss_cls: 0.3173, decode.d0.loss_mask: 0.2284, decode.d0.loss_dice: 0.6362, decode.d1.loss_cls: 0.1007, decode.d1.loss_mask: 0.2207, decode.d1.loss_dice: 0.6144, decode.d2.loss_cls: 0.0859, decode.d2.loss_mask: 0.2190, decode.d2.loss_dice: 0.6088, decode.d3.loss_cls: 0.0759, decode.d3.loss_mask: 0.2173, decode.d3.loss_dice: 0.6091, decode.d4.loss_cls: 0.0799, decode.d4.loss_mask: 0.2178, decode.d4.loss_dice: 0.6088, decode.d5.loss_cls: 0.0854, decode.d5.loss_mask: 0.2181, decode.d5.loss_dice: 0.6079, decode.d6.loss_cls: 0.0766, decode.d6.loss_mask: 0.2181, decode.d6.loss_dice: 0.6059, decode.d7.loss_cls: 0.0777, decode.d7.loss_mask: 0.2174, decode.d7.loss_dice: 0.6044, decode.d8.loss_cls: 0.0751, decode.d8.loss_mask: 0.2173, decode.d8.loss_dice: 0.6075, loss: 9.3485 +2022-05-10 09:36:09,241 - mmseg - INFO - Iter [29850/80000] lr: 9.001e-07, eta: 1 day, 3:06:50, time: 1.783, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0868, decode.loss_mask: 0.2161, decode.loss_dice: 0.6038, decode.d0.loss_cls: 0.3220, decode.d0.loss_mask: 0.2256, decode.d0.loss_dice: 0.6393, decode.d1.loss_cls: 0.1064, decode.d1.loss_mask: 0.2175, decode.d1.loss_dice: 0.6192, decode.d2.loss_cls: 0.0913, decode.d2.loss_mask: 0.2168, decode.d2.loss_dice: 0.6093, decode.d3.loss_cls: 0.0892, decode.d3.loss_mask: 0.2161, decode.d3.loss_dice: 0.6030, decode.d4.loss_cls: 0.0888, decode.d4.loss_mask: 0.2157, decode.d4.loss_dice: 0.6040, decode.d5.loss_cls: 0.0875, decode.d5.loss_mask: 0.2164, decode.d5.loss_dice: 0.6081, decode.d6.loss_cls: 0.0856, decode.d6.loss_mask: 0.2154, decode.d6.loss_dice: 0.6016, decode.d7.loss_cls: 0.0875, decode.d7.loss_mask: 0.2155, decode.d7.loss_dice: 0.6063, decode.d8.loss_cls: 0.0868, decode.d8.loss_mask: 0.2155, decode.d8.loss_dice: 0.6060, loss: 9.4031 +2022-05-10 09:37:39,152 - mmseg - INFO - Iter [29900/80000] lr: 8.992e-07, eta: 1 day, 3:05:01, time: 1.798, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0841, decode.loss_mask: 0.2280, decode.loss_dice: 0.6129, decode.d0.loss_cls: 0.3193, decode.d0.loss_mask: 0.2369, decode.d0.loss_dice: 0.6533, decode.d1.loss_cls: 0.1015, decode.d1.loss_mask: 0.2298, decode.d1.loss_dice: 0.6280, decode.d2.loss_cls: 0.0996, decode.d2.loss_mask: 0.2295, decode.d2.loss_dice: 0.6210, decode.d3.loss_cls: 0.0889, decode.d3.loss_mask: 0.2291, decode.d3.loss_dice: 0.6169, decode.d4.loss_cls: 0.0870, decode.d4.loss_mask: 0.2286, decode.d4.loss_dice: 0.6149, decode.d5.loss_cls: 0.0930, decode.d5.loss_mask: 0.2287, decode.d5.loss_dice: 0.6161, decode.d6.loss_cls: 0.0874, decode.d6.loss_mask: 0.2284, decode.d6.loss_dice: 0.6112, decode.d7.loss_cls: 0.0866, decode.d7.loss_mask: 0.2294, decode.d7.loss_dice: 0.6142, decode.d8.loss_cls: 0.0804, decode.d8.loss_mask: 0.2285, decode.d8.loss_dice: 0.6103, loss: 9.6235 +2022-05-10 09:39:11,870 - mmseg - INFO - Iter [29950/80000] lr: 8.983e-07, eta: 1 day, 3:03:15, time: 1.853, data_time: 0.069, memory: 64699, decode.loss_cls: 0.0883, decode.loss_mask: 0.2246, decode.loss_dice: 0.6109, decode.d0.loss_cls: 0.3253, decode.d0.loss_mask: 0.2355, decode.d0.loss_dice: 0.6457, decode.d1.loss_cls: 0.0986, decode.d1.loss_mask: 0.2260, decode.d1.loss_dice: 0.6240, decode.d2.loss_cls: 0.0936, decode.d2.loss_mask: 0.2241, decode.d2.loss_dice: 0.6139, decode.d3.loss_cls: 0.0906, decode.d3.loss_mask: 0.2224, decode.d3.loss_dice: 0.6128, decode.d4.loss_cls: 0.0861, decode.d4.loss_mask: 0.2233, decode.d4.loss_dice: 0.6127, decode.d5.loss_cls: 0.0917, decode.d5.loss_mask: 0.2237, decode.d5.loss_dice: 0.6135, decode.d6.loss_cls: 0.0783, decode.d6.loss_mask: 0.2235, decode.d6.loss_dice: 0.6095, decode.d7.loss_cls: 0.0833, decode.d7.loss_mask: 0.2241, decode.d7.loss_dice: 0.6089, decode.d8.loss_cls: 0.0842, decode.d8.loss_mask: 0.2240, decode.d8.loss_dice: 0.6075, loss: 9.5308 +2022-05-10 09:40:39,960 - mmseg - INFO - Saving checkpoint at 30000 iterations +2022-05-10 09:41:13,277 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 09:41:13,287 - mmseg - INFO - Iter [30000/80000] lr: 8.974e-07, eta: 1 day, 3:02:18, time: 2.428, data_time: 0.020, memory: 64699, decode.loss_cls: 0.0736, decode.loss_mask: 0.2186, decode.loss_dice: 0.6027, decode.d0.loss_cls: 0.3109, decode.d0.loss_mask: 0.2286, decode.d0.loss_dice: 0.6358, decode.d1.loss_cls: 0.0852, decode.d1.loss_mask: 0.2210, decode.d1.loss_dice: 0.6164, decode.d2.loss_cls: 0.0823, decode.d2.loss_mask: 0.2200, decode.d2.loss_dice: 0.6099, decode.d3.loss_cls: 0.0822, decode.d3.loss_mask: 0.2196, decode.d3.loss_dice: 0.6033, decode.d4.loss_cls: 0.0811, decode.d4.loss_mask: 0.2193, decode.d4.loss_dice: 0.6104, decode.d5.loss_cls: 0.0760, decode.d5.loss_mask: 0.2189, decode.d5.loss_dice: 0.6100, decode.d6.loss_cls: 0.0811, decode.d6.loss_mask: 0.2187, decode.d6.loss_dice: 0.6041, decode.d7.loss_cls: 0.0804, decode.d7.loss_mask: 0.2186, decode.d7.loss_dice: 0.6052, decode.d8.loss_cls: 0.0792, decode.d8.loss_mask: 0.2189, decode.d8.loss_dice: 0.6059, loss: 9.3379 +2022-05-10 09:43:08,556 - mmseg - INFO - per class results: +2022-05-10 09:43:08,561 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.61 | 99.22 | +| sidewalk | 88.69 | 94.08 | +| building | 94.38 | 96.91 | +| wall | 65.97 | 82.36 | +| fence | 74.29 | 81.68 | +| pole | 71.67 | 83.8 | +| traffic light | 77.31 | 88.19 | +| traffic sign | 83.78 | 90.47 | +| vegetation | 93.3 | 96.84 | +| terrain | 67.71 | 78.83 | +| sky | 95.84 | 98.43 | +| person | 86.8 | 94.1 | +| rider | 74.45 | 83.07 | +| car | 96.26 | 98.3 | +| truck | 91.47 | 94.45 | +| bus | 93.62 | 96.77 | +| train | 87.8 | 91.06 | +| motorcycle | 76.31 | 88.34 | +| bicycle | 82.94 | 91.08 | ++---------------+-------+-------+ +2022-05-10 09:43:08,561 - mmseg - INFO - Summary: +2022-05-10 09:43:08,561 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 96.99 | 84.27 | 90.95 | ++-------+-------+-------+ +2022-05-10 09:43:08,566 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 09:43:08,566 - mmseg - INFO - Iter(val) [32] aAcc: 0.9699, mIoU: 0.8427, mAcc: 0.9095, IoU.road: 0.9861, IoU.sidewalk: 0.8869, IoU.building: 0.9438, IoU.wall: 0.6597, IoU.fence: 0.7429, IoU.pole: 0.7167, IoU.traffic light: 0.7731, IoU.traffic sign: 0.8378, IoU.vegetation: 0.9330, IoU.terrain: 0.6771, IoU.sky: 0.9584, IoU.person: 0.8680, IoU.rider: 0.7445, IoU.car: 0.9626, IoU.truck: 0.9147, IoU.bus: 0.9362, IoU.train: 0.8780, IoU.motorcycle: 0.7631, IoU.bicycle: 0.8294, Acc.road: 0.9922, Acc.sidewalk: 0.9408, Acc.building: 0.9691, Acc.wall: 0.8236, Acc.fence: 0.8168, Acc.pole: 0.8380, Acc.traffic light: 0.8819, Acc.traffic sign: 0.9047, Acc.vegetation: 0.9684, Acc.terrain: 0.7883, Acc.sky: 0.9843, Acc.person: 0.9410, Acc.rider: 0.8307, Acc.car: 0.9830, Acc.truck: 0.9445, Acc.bus: 0.9677, Acc.train: 0.9106, Acc.motorcycle: 0.8834, Acc.bicycle: 0.9108 +2022-05-10 09:44:38,236 - mmseg - INFO - Iter [30050/80000] lr: 8.965e-07, eta: 1 day, 3:03:40, time: 4.101, data_time: 2.326, memory: 64699, decode.loss_cls: 0.0806, decode.loss_mask: 0.2174, decode.loss_dice: 0.6015, decode.d0.loss_cls: 0.3179, decode.d0.loss_mask: 0.2284, decode.d0.loss_dice: 0.6369, decode.d1.loss_cls: 0.1076, decode.d1.loss_mask: 0.2203, decode.d1.loss_dice: 0.6198, decode.d2.loss_cls: 0.0932, decode.d2.loss_mask: 0.2195, decode.d2.loss_dice: 0.6125, decode.d3.loss_cls: 0.0919, decode.d3.loss_mask: 0.2186, decode.d3.loss_dice: 0.6033, decode.d4.loss_cls: 0.0877, decode.d4.loss_mask: 0.2188, decode.d4.loss_dice: 0.6018, decode.d5.loss_cls: 0.0914, decode.d5.loss_mask: 0.2189, decode.d5.loss_dice: 0.6013, decode.d6.loss_cls: 0.0853, decode.d6.loss_mask: 0.2184, decode.d6.loss_dice: 0.6059, decode.d7.loss_cls: 0.0837, decode.d7.loss_mask: 0.2187, decode.d7.loss_dice: 0.6055, decode.d8.loss_cls: 0.0873, decode.d8.loss_mask: 0.2177, decode.d8.loss_dice: 0.6000, loss: 9.4118 +2022-05-10 09:46:09,500 - mmseg - INFO - Iter [30100/80000] lr: 8.956e-07, eta: 1 day, 3:01:52, time: 1.824, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0913, decode.loss_mask: 0.2210, decode.loss_dice: 0.5901, decode.d0.loss_cls: 0.3194, decode.d0.loss_mask: 0.2299, decode.d0.loss_dice: 0.6253, decode.d1.loss_cls: 0.1107, decode.d1.loss_mask: 0.2215, decode.d1.loss_dice: 0.6024, decode.d2.loss_cls: 0.1023, decode.d2.loss_mask: 0.2209, decode.d2.loss_dice: 0.6029, decode.d3.loss_cls: 0.0916, decode.d3.loss_mask: 0.2204, decode.d3.loss_dice: 0.5928, decode.d4.loss_cls: 0.0918, decode.d4.loss_mask: 0.2215, decode.d4.loss_dice: 0.5929, decode.d5.loss_cls: 0.0949, decode.d5.loss_mask: 0.2219, decode.d5.loss_dice: 0.5878, decode.d6.loss_cls: 0.0927, decode.d6.loss_mask: 0.2213, decode.d6.loss_dice: 0.5868, decode.d7.loss_cls: 0.0934, decode.d7.loss_mask: 0.2212, decode.d7.loss_dice: 0.5895, decode.d8.loss_cls: 0.0900, decode.d8.loss_mask: 0.2215, decode.d8.loss_dice: 0.5927, loss: 9.3626 +2022-05-10 09:47:41,366 - mmseg - INFO - Iter [30150/80000] lr: 8.947e-07, eta: 1 day, 3:00:05, time: 1.837, data_time: 0.066, memory: 64699, decode.loss_cls: 0.0942, decode.loss_mask: 0.2214, decode.loss_dice: 0.6051, decode.d0.loss_cls: 0.3236, decode.d0.loss_mask: 0.2334, decode.d0.loss_dice: 0.6433, decode.d1.loss_cls: 0.1135, decode.d1.loss_mask: 0.2234, decode.d1.loss_dice: 0.6192, decode.d2.loss_cls: 0.0972, decode.d2.loss_mask: 0.2221, decode.d2.loss_dice: 0.6092, decode.d3.loss_cls: 0.0959, decode.d3.loss_mask: 0.2207, decode.d3.loss_dice: 0.6103, decode.d4.loss_cls: 0.0972, decode.d4.loss_mask: 0.2206, decode.d4.loss_dice: 0.6085, decode.d5.loss_cls: 0.0958, decode.d5.loss_mask: 0.2218, decode.d5.loss_dice: 0.6103, decode.d6.loss_cls: 0.0986, decode.d6.loss_mask: 0.2210, decode.d6.loss_dice: 0.6026, decode.d7.loss_cls: 0.0977, decode.d7.loss_mask: 0.2209, decode.d7.loss_dice: 0.6087, decode.d8.loss_cls: 0.0917, decode.d8.loss_mask: 0.2210, decode.d8.loss_dice: 0.6105, loss: 9.5595 +2022-05-10 09:49:09,465 - mmseg - INFO - Iter [30200/80000] lr: 8.938e-07, eta: 1 day, 2:58:12, time: 1.763, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0854, decode.loss_mask: 0.2199, decode.loss_dice: 0.6154, decode.d0.loss_cls: 0.3101, decode.d0.loss_mask: 0.2291, decode.d0.loss_dice: 0.6484, decode.d1.loss_cls: 0.1060, decode.d1.loss_mask: 0.2216, decode.d1.loss_dice: 0.6222, decode.d2.loss_cls: 0.0988, decode.d2.loss_mask: 0.2207, decode.d2.loss_dice: 0.6156, decode.d3.loss_cls: 0.0951, decode.d3.loss_mask: 0.2192, decode.d3.loss_dice: 0.6083, decode.d4.loss_cls: 0.0875, decode.d4.loss_mask: 0.2187, decode.d4.loss_dice: 0.6135, decode.d5.loss_cls: 0.0821, decode.d5.loss_mask: 0.2189, decode.d5.loss_dice: 0.6129, decode.d6.loss_cls: 0.0851, decode.d6.loss_mask: 0.2193, decode.d6.loss_dice: 0.6090, decode.d7.loss_cls: 0.0874, decode.d7.loss_mask: 0.2193, decode.d7.loss_dice: 0.6083, decode.d8.loss_cls: 0.0815, decode.d8.loss_mask: 0.2195, decode.d8.loss_dice: 0.6102, loss: 9.4891 +2022-05-10 09:50:38,601 - mmseg - INFO - Iter [30250/80000] lr: 8.929e-07, eta: 1 day, 2:56:21, time: 1.783, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0951, decode.loss_mask: 0.2196, decode.loss_dice: 0.6005, decode.d0.loss_cls: 0.3279, decode.d0.loss_mask: 0.2285, decode.d0.loss_dice: 0.6403, decode.d1.loss_cls: 0.1077, decode.d1.loss_mask: 0.2217, decode.d1.loss_dice: 0.6176, decode.d2.loss_cls: 0.0983, decode.d2.loss_mask: 0.2211, decode.d2.loss_dice: 0.6071, decode.d3.loss_cls: 0.0976, decode.d3.loss_mask: 0.2204, decode.d3.loss_dice: 0.6070, decode.d4.loss_cls: 0.0964, decode.d4.loss_mask: 0.2198, decode.d4.loss_dice: 0.6044, decode.d5.loss_cls: 0.0990, decode.d5.loss_mask: 0.2205, decode.d5.loss_dice: 0.6040, decode.d6.loss_cls: 0.1007, decode.d6.loss_mask: 0.2196, decode.d6.loss_dice: 0.6004, decode.d7.loss_cls: 0.0972, decode.d7.loss_mask: 0.2191, decode.d7.loss_dice: 0.6045, decode.d8.loss_cls: 0.0927, decode.d8.loss_mask: 0.2194, decode.d8.loss_dice: 0.6017, loss: 9.5099 +2022-05-10 09:52:08,993 - mmseg - INFO - Iter [30300/80000] lr: 8.920e-07, eta: 1 day, 2:54:32, time: 1.808, data_time: 0.020, memory: 64699, decode.loss_cls: 0.0831, decode.loss_mask: 0.2193, decode.loss_dice: 0.5976, decode.d0.loss_cls: 0.3281, decode.d0.loss_mask: 0.2291, decode.d0.loss_dice: 0.6298, decode.d1.loss_cls: 0.1017, decode.d1.loss_mask: 0.2210, decode.d1.loss_dice: 0.6071, decode.d2.loss_cls: 0.0937, decode.d2.loss_mask: 0.2198, decode.d2.loss_dice: 0.6036, decode.d3.loss_cls: 0.0848, decode.d3.loss_mask: 0.2195, decode.d3.loss_dice: 0.5994, decode.d4.loss_cls: 0.0859, decode.d4.loss_mask: 0.2199, decode.d4.loss_dice: 0.5983, decode.d5.loss_cls: 0.0876, decode.d5.loss_mask: 0.2201, decode.d5.loss_dice: 0.6010, decode.d6.loss_cls: 0.0821, decode.d6.loss_mask: 0.2195, decode.d6.loss_dice: 0.5976, decode.d7.loss_cls: 0.0828, decode.d7.loss_mask: 0.2199, decode.d7.loss_dice: 0.5949, decode.d8.loss_cls: 0.0833, decode.d8.loss_mask: 0.2196, decode.d8.loss_dice: 0.5962, loss: 9.3464 +2022-05-10 09:53:41,857 - mmseg - INFO - Iter [30350/80000] lr: 8.911e-07, eta: 1 day, 2:52:47, time: 1.857, data_time: 0.065, memory: 64699, decode.loss_cls: 0.0900, decode.loss_mask: 0.2230, decode.loss_dice: 0.6210, decode.d0.loss_cls: 0.3323, decode.d0.loss_mask: 0.2330, decode.d0.loss_dice: 0.6639, decode.d1.loss_cls: 0.1192, decode.d1.loss_mask: 0.2260, decode.d1.loss_dice: 0.6339, decode.d2.loss_cls: 0.1119, decode.d2.loss_mask: 0.2251, decode.d2.loss_dice: 0.6271, decode.d3.loss_cls: 0.1038, decode.d3.loss_mask: 0.2246, decode.d3.loss_dice: 0.6227, decode.d4.loss_cls: 0.0949, decode.d4.loss_mask: 0.2239, decode.d4.loss_dice: 0.6249, decode.d5.loss_cls: 0.0966, decode.d5.loss_mask: 0.2227, decode.d5.loss_dice: 0.6237, decode.d6.loss_cls: 0.0966, decode.d6.loss_mask: 0.2231, decode.d6.loss_dice: 0.6217, decode.d7.loss_cls: 0.0887, decode.d7.loss_mask: 0.2236, decode.d7.loss_dice: 0.6238, decode.d8.loss_cls: 0.0966, decode.d8.loss_mask: 0.2233, decode.d8.loss_dice: 0.6216, loss: 9.7632 +2022-05-10 09:55:10,894 - mmseg - INFO - Iter [30400/80000] lr: 8.902e-07, eta: 1 day, 2:50:56, time: 1.781, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0929, decode.loss_mask: 0.2242, decode.loss_dice: 0.6137, decode.d0.loss_cls: 0.3272, decode.d0.loss_mask: 0.2345, decode.d0.loss_dice: 0.6468, decode.d1.loss_cls: 0.1058, decode.d1.loss_mask: 0.2259, decode.d1.loss_dice: 0.6190, decode.d2.loss_cls: 0.0994, decode.d2.loss_mask: 0.2255, decode.d2.loss_dice: 0.6183, decode.d3.loss_cls: 0.0999, decode.d3.loss_mask: 0.2248, decode.d3.loss_dice: 0.6172, decode.d4.loss_cls: 0.0956, decode.d4.loss_mask: 0.2251, decode.d4.loss_dice: 0.6129, decode.d5.loss_cls: 0.0911, decode.d5.loss_mask: 0.2242, decode.d5.loss_dice: 0.6147, decode.d6.loss_cls: 0.0907, decode.d6.loss_mask: 0.2256, decode.d6.loss_dice: 0.6115, decode.d7.loss_cls: 0.0883, decode.d7.loss_mask: 0.2256, decode.d7.loss_dice: 0.6156, decode.d8.loss_cls: 0.0992, decode.d8.loss_mask: 0.2247, decode.d8.loss_dice: 0.6125, loss: 9.6324 +2022-05-10 09:56:40,667 - mmseg - INFO - Iter [30450/80000] lr: 8.893e-07, eta: 1 day, 2:49:06, time: 1.795, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0925, decode.loss_mask: 0.2216, decode.loss_dice: 0.5883, decode.d0.loss_cls: 0.3239, decode.d0.loss_mask: 0.2315, decode.d0.loss_dice: 0.6294, decode.d1.loss_cls: 0.1107, decode.d1.loss_mask: 0.2237, decode.d1.loss_dice: 0.6013, decode.d2.loss_cls: 0.0946, decode.d2.loss_mask: 0.2231, decode.d2.loss_dice: 0.5941, decode.d3.loss_cls: 0.0949, decode.d3.loss_mask: 0.2218, decode.d3.loss_dice: 0.5917, decode.d4.loss_cls: 0.0951, decode.d4.loss_mask: 0.2224, decode.d4.loss_dice: 0.5906, decode.d5.loss_cls: 0.0909, decode.d5.loss_mask: 0.2223, decode.d5.loss_dice: 0.5927, decode.d6.loss_cls: 0.0934, decode.d6.loss_mask: 0.2220, decode.d6.loss_dice: 0.5867, decode.d7.loss_cls: 0.0915, decode.d7.loss_mask: 0.2220, decode.d7.loss_dice: 0.5905, decode.d8.loss_cls: 0.0908, decode.d8.loss_mask: 0.2228, decode.d8.loss_dice: 0.5867, loss: 9.3636 +2022-05-10 09:58:10,800 - mmseg - INFO - Iter [30500/80000] lr: 8.884e-07, eta: 1 day, 2:47:17, time: 1.803, data_time: 0.020, memory: 64699, decode.loss_cls: 0.0841, decode.loss_mask: 0.2136, decode.loss_dice: 0.6110, decode.d0.loss_cls: 0.3270, decode.d0.loss_mask: 0.2257, decode.d0.loss_dice: 0.6457, decode.d1.loss_cls: 0.1031, decode.d1.loss_mask: 0.2162, decode.d1.loss_dice: 0.6191, decode.d2.loss_cls: 0.1013, decode.d2.loss_mask: 0.2151, decode.d2.loss_dice: 0.6161, decode.d3.loss_cls: 0.0894, decode.d3.loss_mask: 0.2142, decode.d3.loss_dice: 0.6093, decode.d4.loss_cls: 0.0841, decode.d4.loss_mask: 0.2140, decode.d4.loss_dice: 0.6078, decode.d5.loss_cls: 0.0892, decode.d5.loss_mask: 0.2143, decode.d5.loss_dice: 0.6080, decode.d6.loss_cls: 0.0866, decode.d6.loss_mask: 0.2146, decode.d6.loss_dice: 0.6110, decode.d7.loss_cls: 0.0797, decode.d7.loss_mask: 0.2136, decode.d7.loss_dice: 0.6067, decode.d8.loss_cls: 0.0866, decode.d8.loss_mask: 0.2133, decode.d8.loss_dice: 0.6054, loss: 9.4258 +2022-05-10 09:59:43,335 - mmseg - INFO - Iter [30550/80000] lr: 8.875e-07, eta: 1 day, 2:45:31, time: 1.851, data_time: 0.065, memory: 64699, decode.loss_cls: 0.0874, decode.loss_mask: 0.2235, decode.loss_dice: 0.6051, decode.d0.loss_cls: 0.3165, decode.d0.loss_mask: 0.2349, decode.d0.loss_dice: 0.6396, decode.d1.loss_cls: 0.1133, decode.d1.loss_mask: 0.2246, decode.d1.loss_dice: 0.6179, decode.d2.loss_cls: 0.0992, decode.d2.loss_mask: 0.2245, decode.d2.loss_dice: 0.6096, decode.d3.loss_cls: 0.0943, decode.d3.loss_mask: 0.2235, decode.d3.loss_dice: 0.6071, decode.d4.loss_cls: 0.0925, decode.d4.loss_mask: 0.2240, decode.d4.loss_dice: 0.6052, decode.d5.loss_cls: 0.0876, decode.d5.loss_mask: 0.2243, decode.d5.loss_dice: 0.6078, decode.d6.loss_cls: 0.0959, decode.d6.loss_mask: 0.2235, decode.d6.loss_dice: 0.6007, decode.d7.loss_cls: 0.0883, decode.d7.loss_mask: 0.2234, decode.d7.loss_dice: 0.6021, decode.d8.loss_cls: 0.0825, decode.d8.loss_mask: 0.2239, decode.d8.loss_dice: 0.6027, loss: 9.5053 +2022-05-10 10:01:12,793 - mmseg - INFO - Iter [30600/80000] lr: 8.866e-07, eta: 1 day, 2:43:41, time: 1.789, data_time: 0.020, memory: 64699, decode.loss_cls: 0.0782, decode.loss_mask: 0.2187, decode.loss_dice: 0.6100, decode.d0.loss_cls: 0.3097, decode.d0.loss_mask: 0.2288, decode.d0.loss_dice: 0.6447, decode.d1.loss_cls: 0.1041, decode.d1.loss_mask: 0.2204, decode.d1.loss_dice: 0.6211, decode.d2.loss_cls: 0.0881, decode.d2.loss_mask: 0.2198, decode.d2.loss_dice: 0.6147, decode.d3.loss_cls: 0.0827, decode.d3.loss_mask: 0.2199, decode.d3.loss_dice: 0.6128, decode.d4.loss_cls: 0.0832, decode.d4.loss_mask: 0.2199, decode.d4.loss_dice: 0.6119, decode.d5.loss_cls: 0.0806, decode.d5.loss_mask: 0.2202, decode.d5.loss_dice: 0.6105, decode.d6.loss_cls: 0.0784, decode.d6.loss_mask: 0.2194, decode.d6.loss_dice: 0.6072, decode.d7.loss_cls: 0.0760, decode.d7.loss_mask: 0.2191, decode.d7.loss_dice: 0.6089, decode.d8.loss_cls: 0.0744, decode.d8.loss_mask: 0.2190, decode.d8.loss_dice: 0.6126, loss: 9.4149 +2022-05-10 10:02:41,658 - mmseg - INFO - Iter [30650/80000] lr: 8.857e-07, eta: 1 day, 2:41:50, time: 1.777, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0796, decode.loss_mask: 0.2197, decode.loss_dice: 0.5909, decode.d0.loss_cls: 0.3217, decode.d0.loss_mask: 0.2293, decode.d0.loss_dice: 0.6244, decode.d1.loss_cls: 0.1037, decode.d1.loss_mask: 0.2220, decode.d1.loss_dice: 0.6028, decode.d2.loss_cls: 0.0943, decode.d2.loss_mask: 0.2209, decode.d2.loss_dice: 0.5956, decode.d3.loss_cls: 0.0895, decode.d3.loss_mask: 0.2195, decode.d3.loss_dice: 0.5864, decode.d4.loss_cls: 0.0876, decode.d4.loss_mask: 0.2202, decode.d4.loss_dice: 0.5911, decode.d5.loss_cls: 0.0821, decode.d5.loss_mask: 0.2197, decode.d5.loss_dice: 0.5936, decode.d6.loss_cls: 0.0845, decode.d6.loss_mask: 0.2197, decode.d6.loss_dice: 0.5892, decode.d7.loss_cls: 0.0865, decode.d7.loss_mask: 0.2191, decode.d7.loss_dice: 0.5877, decode.d8.loss_cls: 0.0845, decode.d8.loss_mask: 0.2194, decode.d8.loss_dice: 0.5949, loss: 9.2801 +2022-05-10 10:04:13,358 - mmseg - INFO - Iter [30700/80000] lr: 8.848e-07, eta: 1 day, 2:40:03, time: 1.833, data_time: 0.065, memory: 64699, decode.loss_cls: 0.0932, decode.loss_mask: 0.2206, decode.loss_dice: 0.6023, decode.d0.loss_cls: 0.3187, decode.d0.loss_mask: 0.2314, decode.d0.loss_dice: 0.6384, decode.d1.loss_cls: 0.1136, decode.d1.loss_mask: 0.2238, decode.d1.loss_dice: 0.6153, decode.d2.loss_cls: 0.0925, decode.d2.loss_mask: 0.2221, decode.d2.loss_dice: 0.6099, decode.d3.loss_cls: 0.0940, decode.d3.loss_mask: 0.2207, decode.d3.loss_dice: 0.6045, decode.d4.loss_cls: 0.0892, decode.d4.loss_mask: 0.2211, decode.d4.loss_dice: 0.6061, decode.d5.loss_cls: 0.0900, decode.d5.loss_mask: 0.2210, decode.d5.loss_dice: 0.6033, decode.d6.loss_cls: 0.0846, decode.d6.loss_mask: 0.2209, decode.d6.loss_dice: 0.6024, decode.d7.loss_cls: 0.0857, decode.d7.loss_mask: 0.2212, decode.d7.loss_dice: 0.5999, decode.d8.loss_cls: 0.0879, decode.d8.loss_mask: 0.2205, decode.d8.loss_dice: 0.6037, loss: 9.4585 +2022-05-10 10:05:43,446 - mmseg - INFO - Iter [30750/80000] lr: 8.839e-07, eta: 1 day, 2:38:14, time: 1.801, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0846, decode.loss_mask: 0.2231, decode.loss_dice: 0.6061, decode.d0.loss_cls: 0.3154, decode.d0.loss_mask: 0.2343, decode.d0.loss_dice: 0.6402, decode.d1.loss_cls: 0.1038, decode.d1.loss_mask: 0.2242, decode.d1.loss_dice: 0.6197, decode.d2.loss_cls: 0.1013, decode.d2.loss_mask: 0.2232, decode.d2.loss_dice: 0.6131, decode.d3.loss_cls: 0.0883, decode.d3.loss_mask: 0.2241, decode.d3.loss_dice: 0.6083, decode.d4.loss_cls: 0.0926, decode.d4.loss_mask: 0.2244, decode.d4.loss_dice: 0.6055, decode.d5.loss_cls: 0.0916, decode.d5.loss_mask: 0.2239, decode.d5.loss_dice: 0.6078, decode.d6.loss_cls: 0.0900, decode.d6.loss_mask: 0.2244, decode.d6.loss_dice: 0.6045, decode.d7.loss_cls: 0.0890, decode.d7.loss_mask: 0.2243, decode.d7.loss_dice: 0.6052, decode.d8.loss_cls: 0.0909, decode.d8.loss_mask: 0.2239, decode.d8.loss_dice: 0.6058, loss: 9.5134 +2022-05-10 10:07:12,704 - mmseg - INFO - Iter [30800/80000] lr: 8.830e-07, eta: 1 day, 2:36:24, time: 1.787, data_time: 0.020, memory: 64699, decode.loss_cls: 0.0796, decode.loss_mask: 0.2153, decode.loss_dice: 0.5948, decode.d0.loss_cls: 0.3142, decode.d0.loss_mask: 0.2259, decode.d0.loss_dice: 0.6282, decode.d1.loss_cls: 0.1060, decode.d1.loss_mask: 0.2175, decode.d1.loss_dice: 0.6087, decode.d2.loss_cls: 0.0896, decode.d2.loss_mask: 0.2165, decode.d2.loss_dice: 0.6010, decode.d3.loss_cls: 0.0850, decode.d3.loss_mask: 0.2159, decode.d3.loss_dice: 0.5988, decode.d4.loss_cls: 0.0826, decode.d4.loss_mask: 0.2164, decode.d4.loss_dice: 0.5992, decode.d5.loss_cls: 0.0896, decode.d5.loss_mask: 0.2156, decode.d5.loss_dice: 0.5998, decode.d6.loss_cls: 0.0880, decode.d6.loss_mask: 0.2156, decode.d6.loss_dice: 0.5968, decode.d7.loss_cls: 0.0858, decode.d7.loss_mask: 0.2159, decode.d7.loss_dice: 0.5939, decode.d8.loss_cls: 0.0802, decode.d8.loss_mask: 0.2158, decode.d8.loss_dice: 0.5977, loss: 9.2901 +2022-05-10 10:08:41,384 - mmseg - INFO - Iter [30850/80000] lr: 8.821e-07, eta: 1 day, 2:34:33, time: 1.774, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0844, decode.loss_mask: 0.2190, decode.loss_dice: 0.5955, decode.d0.loss_cls: 0.3279, decode.d0.loss_mask: 0.2269, decode.d0.loss_dice: 0.6293, decode.d1.loss_cls: 0.1047, decode.d1.loss_mask: 0.2210, decode.d1.loss_dice: 0.6036, decode.d2.loss_cls: 0.0848, decode.d2.loss_mask: 0.2195, decode.d2.loss_dice: 0.5986, decode.d3.loss_cls: 0.0859, decode.d3.loss_mask: 0.2183, decode.d3.loss_dice: 0.5945, decode.d4.loss_cls: 0.0844, decode.d4.loss_mask: 0.2182, decode.d4.loss_dice: 0.5951, decode.d5.loss_cls: 0.0805, decode.d5.loss_mask: 0.2184, decode.d5.loss_dice: 0.5961, decode.d6.loss_cls: 0.0804, decode.d6.loss_mask: 0.2191, decode.d6.loss_dice: 0.5913, decode.d7.loss_cls: 0.0811, decode.d7.loss_mask: 0.2185, decode.d7.loss_dice: 0.5958, decode.d8.loss_cls: 0.0822, decode.d8.loss_mask: 0.2175, decode.d8.loss_dice: 0.5941, loss: 9.2868 +2022-05-10 10:10:12,669 - mmseg - INFO - Iter [30900/80000] lr: 8.812e-07, eta: 1 day, 2:32:46, time: 1.826, data_time: 0.065, memory: 64699, decode.loss_cls: 0.0740, decode.loss_mask: 0.2218, decode.loss_dice: 0.5968, decode.d0.loss_cls: 0.3035, decode.d0.loss_mask: 0.2307, decode.d0.loss_dice: 0.6290, decode.d1.loss_cls: 0.0957, decode.d1.loss_mask: 0.2226, decode.d1.loss_dice: 0.6050, decode.d2.loss_cls: 0.0821, decode.d2.loss_mask: 0.2222, decode.d2.loss_dice: 0.6024, decode.d3.loss_cls: 0.0792, decode.d3.loss_mask: 0.2219, decode.d3.loss_dice: 0.6004, decode.d4.loss_cls: 0.0853, decode.d4.loss_mask: 0.2219, decode.d4.loss_dice: 0.5998, decode.d5.loss_cls: 0.0805, decode.d5.loss_mask: 0.2217, decode.d5.loss_dice: 0.5940, decode.d6.loss_cls: 0.0765, decode.d6.loss_mask: 0.2219, decode.d6.loss_dice: 0.5984, decode.d7.loss_cls: 0.0784, decode.d7.loss_mask: 0.2222, decode.d7.loss_dice: 0.5976, decode.d8.loss_cls: 0.0700, decode.d8.loss_mask: 0.2219, decode.d8.loss_dice: 0.5967, loss: 9.2738 +2022-05-10 10:11:43,880 - mmseg - INFO - Iter [30950/80000] lr: 8.803e-07, eta: 1 day, 2:30:59, time: 1.824, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0804, decode.loss_mask: 0.2258, decode.loss_dice: 0.5927, decode.d0.loss_cls: 0.3227, decode.d0.loss_mask: 0.2354, decode.d0.loss_dice: 0.6315, decode.d1.loss_cls: 0.0926, decode.d1.loss_mask: 0.2284, decode.d1.loss_dice: 0.6058, decode.d2.loss_cls: 0.0896, decode.d2.loss_mask: 0.2271, decode.d2.loss_dice: 0.5999, decode.d3.loss_cls: 0.0834, decode.d3.loss_mask: 0.2262, decode.d3.loss_dice: 0.5972, decode.d4.loss_cls: 0.0875, decode.d4.loss_mask: 0.2259, decode.d4.loss_dice: 0.5923, decode.d5.loss_cls: 0.0878, decode.d5.loss_mask: 0.2259, decode.d5.loss_dice: 0.6016, decode.d6.loss_cls: 0.0876, decode.d6.loss_mask: 0.2251, decode.d6.loss_dice: 0.5949, decode.d7.loss_cls: 0.0861, decode.d7.loss_mask: 0.2256, decode.d7.loss_dice: 0.5974, decode.d8.loss_cls: 0.0871, decode.d8.loss_mask: 0.2254, decode.d8.loss_dice: 0.5954, loss: 9.3841 +2022-05-10 10:13:12,963 - mmseg - INFO - Saving checkpoint at 31000 iterations +2022-05-10 10:13:45,924 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 10:13:45,932 - mmseg - INFO - Iter [31000/80000] lr: 8.794e-07, eta: 1 day, 2:30:01, time: 2.439, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0772, decode.loss_mask: 0.2111, decode.loss_dice: 0.5897, decode.d0.loss_cls: 0.3211, decode.d0.loss_mask: 0.2200, decode.d0.loss_dice: 0.6258, decode.d1.loss_cls: 0.1038, decode.d1.loss_mask: 0.2136, decode.d1.loss_dice: 0.6024, decode.d2.loss_cls: 0.0900, decode.d2.loss_mask: 0.2129, decode.d2.loss_dice: 0.5954, decode.d3.loss_cls: 0.0842, decode.d3.loss_mask: 0.2119, decode.d3.loss_dice: 0.5935, decode.d4.loss_cls: 0.0875, decode.d4.loss_mask: 0.2121, decode.d4.loss_dice: 0.5895, decode.d5.loss_cls: 0.0869, decode.d5.loss_mask: 0.2123, decode.d5.loss_dice: 0.5876, decode.d6.loss_cls: 0.0829, decode.d6.loss_mask: 0.2123, decode.d6.loss_dice: 0.5920, decode.d7.loss_cls: 0.0801, decode.d7.loss_mask: 0.2119, decode.d7.loss_dice: 0.5892, decode.d8.loss_cls: 0.0845, decode.d8.loss_mask: 0.2110, decode.d8.loss_dice: 0.5895, loss: 9.1820 +2022-05-10 10:15:41,656 - mmseg - INFO - per class results: +2022-05-10 10:15:41,661 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.57 | 99.28 | +| sidewalk | 88.44 | 93.47 | +| building | 94.45 | 97.24 | +| wall | 68.3 | 82.46 | +| fence | 74.28 | 82.69 | +| pole | 71.28 | 82.95 | +| traffic light | 77.32 | 87.77 | +| traffic sign | 84.02 | 90.42 | +| vegetation | 93.3 | 96.65 | +| terrain | 68.29 | 78.56 | +| sky | 95.76 | 98.67 | +| person | 86.88 | 92.71 | +| rider | 74.05 | 87.12 | +| car | 96.28 | 98.04 | +| truck | 91.54 | 94.9 | +| bus | 93.93 | 96.74 | +| train | 88.33 | 90.79 | +| motorcycle | 78.3 | 86.37 | +| bicycle | 82.76 | 91.92 | ++---------------+-------+-------+ +2022-05-10 10:15:41,661 - mmseg - INFO - Summary: +2022-05-10 10:15:41,661 - mmseg - INFO - ++------+-------+-------+ +| aAcc | mIoU | mAcc | ++------+-------+-------+ +| 97.0 | 84.53 | 90.99 | ++------+-------+-------+ +2022-05-10 10:15:41,665 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 10:15:41,666 - mmseg - INFO - Iter(val) [32] aAcc: 0.9700, mIoU: 0.8453, mAcc: 0.9099, IoU.road: 0.9857, IoU.sidewalk: 0.8844, IoU.building: 0.9445, IoU.wall: 0.6830, IoU.fence: 0.7428, IoU.pole: 0.7128, IoU.traffic light: 0.7732, IoU.traffic sign: 0.8402, IoU.vegetation: 0.9330, IoU.terrain: 0.6829, IoU.sky: 0.9576, IoU.person: 0.8688, IoU.rider: 0.7405, IoU.car: 0.9628, IoU.truck: 0.9154, IoU.bus: 0.9393, IoU.train: 0.8833, IoU.motorcycle: 0.7830, IoU.bicycle: 0.8276, Acc.road: 0.9928, Acc.sidewalk: 0.9347, Acc.building: 0.9724, Acc.wall: 0.8246, Acc.fence: 0.8269, Acc.pole: 0.8295, Acc.traffic light: 0.8777, Acc.traffic sign: 0.9042, Acc.vegetation: 0.9665, Acc.terrain: 0.7856, Acc.sky: 0.9867, Acc.person: 0.9271, Acc.rider: 0.8712, Acc.car: 0.9804, Acc.truck: 0.9490, Acc.bus: 0.9674, Acc.train: 0.9079, Acc.motorcycle: 0.8637, Acc.bicycle: 0.9192 +2022-05-10 10:17:10,491 - mmseg - INFO - Iter [31050/80000] lr: 8.785e-07, eta: 1 day, 2:31:13, time: 4.093, data_time: 2.335, memory: 64699, decode.loss_cls: 0.0947, decode.loss_mask: 0.2163, decode.loss_dice: 0.5968, decode.d0.loss_cls: 0.3212, decode.d0.loss_mask: 0.2255, decode.d0.loss_dice: 0.6406, decode.d1.loss_cls: 0.1119, decode.d1.loss_mask: 0.2185, decode.d1.loss_dice: 0.6116, decode.d2.loss_cls: 0.1007, decode.d2.loss_mask: 0.2169, decode.d2.loss_dice: 0.6009, decode.d3.loss_cls: 0.0972, decode.d3.loss_mask: 0.2163, decode.d3.loss_dice: 0.5988, decode.d4.loss_cls: 0.0967, decode.d4.loss_mask: 0.2164, decode.d4.loss_dice: 0.5983, decode.d5.loss_cls: 0.0956, decode.d5.loss_mask: 0.2162, decode.d5.loss_dice: 0.5973, decode.d6.loss_cls: 0.0892, decode.d6.loss_mask: 0.2166, decode.d6.loss_dice: 0.6022, decode.d7.loss_cls: 0.0967, decode.d7.loss_mask: 0.2167, decode.d7.loss_dice: 0.5982, decode.d8.loss_cls: 0.0969, decode.d8.loss_mask: 0.2168, decode.d8.loss_dice: 0.6002, loss: 9.4217 +2022-05-10 10:18:42,682 - mmseg - INFO - Iter [31100/80000] lr: 8.776e-07, eta: 1 day, 2:29:27, time: 1.844, data_time: 0.067, memory: 64699, decode.loss_cls: 0.0775, decode.loss_mask: 0.2159, decode.loss_dice: 0.6022, decode.d0.loss_cls: 0.3201, decode.d0.loss_mask: 0.2258, decode.d0.loss_dice: 0.6395, decode.d1.loss_cls: 0.1037, decode.d1.loss_mask: 0.2185, decode.d1.loss_dice: 0.6105, decode.d2.loss_cls: 0.0962, decode.d2.loss_mask: 0.2157, decode.d2.loss_dice: 0.6073, decode.d3.loss_cls: 0.0826, decode.d3.loss_mask: 0.2170, decode.d3.loss_dice: 0.5943, decode.d4.loss_cls: 0.0904, decode.d4.loss_mask: 0.2161, decode.d4.loss_dice: 0.5988, decode.d5.loss_cls: 0.0859, decode.d5.loss_mask: 0.2158, decode.d5.loss_dice: 0.6029, decode.d6.loss_cls: 0.0817, decode.d6.loss_mask: 0.2148, decode.d6.loss_dice: 0.5983, decode.d7.loss_cls: 0.0808, decode.d7.loss_mask: 0.2149, decode.d7.loss_dice: 0.6008, decode.d8.loss_cls: 0.0868, decode.d8.loss_mask: 0.2144, decode.d8.loss_dice: 0.5964, loss: 9.3258 +2022-05-10 10:20:12,008 - mmseg - INFO - Iter [31150/80000] lr: 8.768e-07, eta: 1 day, 2:27:36, time: 1.786, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0895, decode.loss_mask: 0.2175, decode.loss_dice: 0.5954, decode.d0.loss_cls: 0.3328, decode.d0.loss_mask: 0.2273, decode.d0.loss_dice: 0.6328, decode.d1.loss_cls: 0.1100, decode.d1.loss_mask: 0.2193, decode.d1.loss_dice: 0.6138, decode.d2.loss_cls: 0.1030, decode.d2.loss_mask: 0.2187, decode.d2.loss_dice: 0.6044, decode.d3.loss_cls: 0.0966, decode.d3.loss_mask: 0.2177, decode.d3.loss_dice: 0.6009, decode.d4.loss_cls: 0.0949, decode.d4.loss_mask: 0.2180, decode.d4.loss_dice: 0.5989, decode.d5.loss_cls: 0.0870, decode.d5.loss_mask: 0.2178, decode.d5.loss_dice: 0.6050, decode.d6.loss_cls: 0.0884, decode.d6.loss_mask: 0.2182, decode.d6.loss_dice: 0.5995, decode.d7.loss_cls: 0.0916, decode.d7.loss_mask: 0.2176, decode.d7.loss_dice: 0.6014, decode.d8.loss_cls: 0.0857, decode.d8.loss_mask: 0.2175, decode.d8.loss_dice: 0.5979, loss: 9.4190 +2022-05-10 10:21:40,548 - mmseg - INFO - Iter [31200/80000] lr: 8.759e-07, eta: 1 day, 2:25:45, time: 1.771, data_time: 0.020, memory: 64699, decode.loss_cls: 0.0924, decode.loss_mask: 0.2165, decode.loss_dice: 0.6012, decode.d0.loss_cls: 0.3309, decode.d0.loss_mask: 0.2245, decode.d0.loss_dice: 0.6342, decode.d1.loss_cls: 0.1125, decode.d1.loss_mask: 0.2189, decode.d1.loss_dice: 0.6134, decode.d2.loss_cls: 0.0998, decode.d2.loss_mask: 0.2178, decode.d2.loss_dice: 0.6119, decode.d3.loss_cls: 0.0963, decode.d3.loss_mask: 0.2166, decode.d3.loss_dice: 0.6034, decode.d4.loss_cls: 0.0925, decode.d4.loss_mask: 0.2164, decode.d4.loss_dice: 0.6006, decode.d5.loss_cls: 0.0960, decode.d5.loss_mask: 0.2163, decode.d5.loss_dice: 0.6025, decode.d6.loss_cls: 0.0957, decode.d6.loss_mask: 0.2168, decode.d6.loss_dice: 0.6057, decode.d7.loss_cls: 0.0919, decode.d7.loss_mask: 0.2160, decode.d7.loss_dice: 0.6048, decode.d8.loss_cls: 0.0959, decode.d8.loss_mask: 0.2165, decode.d8.loss_dice: 0.6017, loss: 9.4595 +2022-05-10 10:23:11,077 - mmseg - INFO - Iter [31250/80000] lr: 8.750e-07, eta: 1 day, 2:23:57, time: 1.811, data_time: 0.067, memory: 64699, decode.loss_cls: 0.0937, decode.loss_mask: 0.2212, decode.loss_dice: 0.6049, decode.d0.loss_cls: 0.3204, decode.d0.loss_mask: 0.2315, decode.d0.loss_dice: 0.6434, decode.d1.loss_cls: 0.1020, decode.d1.loss_mask: 0.2226, decode.d1.loss_dice: 0.6140, decode.d2.loss_cls: 0.0990, decode.d2.loss_mask: 0.2216, decode.d2.loss_dice: 0.6102, decode.d3.loss_cls: 0.0872, decode.d3.loss_mask: 0.2214, decode.d3.loss_dice: 0.6076, decode.d4.loss_cls: 0.0937, decode.d4.loss_mask: 0.2206, decode.d4.loss_dice: 0.6089, decode.d5.loss_cls: 0.0875, decode.d5.loss_mask: 0.2202, decode.d5.loss_dice: 0.6045, decode.d6.loss_cls: 0.0897, decode.d6.loss_mask: 0.2209, decode.d6.loss_dice: 0.6071, decode.d7.loss_cls: 0.0918, decode.d7.loss_mask: 0.2207, decode.d7.loss_dice: 0.6051, decode.d8.loss_cls: 0.0874, decode.d8.loss_mask: 0.2207, decode.d8.loss_dice: 0.6042, loss: 9.4836 +2022-05-10 10:24:38,496 - mmseg - INFO - Iter [31300/80000] lr: 8.741e-07, eta: 1 day, 2:22:03, time: 1.748, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0938, decode.loss_mask: 0.2234, decode.loss_dice: 0.6034, decode.d0.loss_cls: 0.3410, decode.d0.loss_mask: 0.2329, decode.d0.loss_dice: 0.6394, decode.d1.loss_cls: 0.1059, decode.d1.loss_mask: 0.2252, decode.d1.loss_dice: 0.6188, decode.d2.loss_cls: 0.0960, decode.d2.loss_mask: 0.2236, decode.d2.loss_dice: 0.6102, decode.d3.loss_cls: 0.0876, decode.d3.loss_mask: 0.2230, decode.d3.loss_dice: 0.6049, decode.d4.loss_cls: 0.0875, decode.d4.loss_mask: 0.2240, decode.d4.loss_dice: 0.6038, decode.d5.loss_cls: 0.0875, decode.d5.loss_mask: 0.2244, decode.d5.loss_dice: 0.6035, decode.d6.loss_cls: 0.0933, decode.d6.loss_mask: 0.2234, decode.d6.loss_dice: 0.6028, decode.d7.loss_cls: 0.0872, decode.d7.loss_mask: 0.2240, decode.d7.loss_dice: 0.6046, decode.d8.loss_cls: 0.0896, decode.d8.loss_mask: 0.2237, decode.d8.loss_dice: 0.6059, loss: 9.5143 +2022-05-10 10:26:06,723 - mmseg - INFO - Iter [31350/80000] lr: 8.732e-07, eta: 1 day, 2:20:12, time: 1.764, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0735, decode.loss_mask: 0.2156, decode.loss_dice: 0.5983, decode.d0.loss_cls: 0.3129, decode.d0.loss_mask: 0.2249, decode.d0.loss_dice: 0.6334, decode.d1.loss_cls: 0.0959, decode.d1.loss_mask: 0.2183, decode.d1.loss_dice: 0.6161, decode.d2.loss_cls: 0.0851, decode.d2.loss_mask: 0.2165, decode.d2.loss_dice: 0.6105, decode.d3.loss_cls: 0.0755, decode.d3.loss_mask: 0.2162, decode.d3.loss_dice: 0.6015, decode.d4.loss_cls: 0.0823, decode.d4.loss_mask: 0.2162, decode.d4.loss_dice: 0.6013, decode.d5.loss_cls: 0.0806, decode.d5.loss_mask: 0.2161, decode.d5.loss_dice: 0.6060, decode.d6.loss_cls: 0.0790, decode.d6.loss_mask: 0.2158, decode.d6.loss_dice: 0.6029, decode.d7.loss_cls: 0.0829, decode.d7.loss_mask: 0.2154, decode.d7.loss_dice: 0.6020, decode.d8.loss_cls: 0.0763, decode.d8.loss_mask: 0.2156, decode.d8.loss_dice: 0.6032, loss: 9.2897 +2022-05-10 10:27:34,685 - mmseg - INFO - Iter [31400/80000] lr: 8.723e-07, eta: 1 day, 2:18:19, time: 1.760, data_time: 0.020, memory: 64699, decode.loss_cls: 0.0877, decode.loss_mask: 0.2226, decode.loss_dice: 0.6032, decode.d0.loss_cls: 0.3143, decode.d0.loss_mask: 0.2327, decode.d0.loss_dice: 0.6446, decode.d1.loss_cls: 0.1076, decode.d1.loss_mask: 0.2251, decode.d1.loss_dice: 0.6212, decode.d2.loss_cls: 0.1010, decode.d2.loss_mask: 0.2240, decode.d2.loss_dice: 0.6099, decode.d3.loss_cls: 0.0937, decode.d3.loss_mask: 0.2220, decode.d3.loss_dice: 0.6057, decode.d4.loss_cls: 0.0922, decode.d4.loss_mask: 0.2232, decode.d4.loss_dice: 0.6041, decode.d5.loss_cls: 0.0897, decode.d5.loss_mask: 0.2235, decode.d5.loss_dice: 0.6046, decode.d6.loss_cls: 0.0873, decode.d6.loss_mask: 0.2223, decode.d6.loss_dice: 0.6020, decode.d7.loss_cls: 0.0862, decode.d7.loss_mask: 0.2228, decode.d7.loss_dice: 0.6068, decode.d8.loss_cls: 0.0857, decode.d8.loss_mask: 0.2232, decode.d8.loss_dice: 0.6019, loss: 9.4905 +2022-05-10 10:29:06,129 - mmseg - INFO - Iter [31450/80000] lr: 8.714e-07, eta: 1 day, 2:16:33, time: 1.829, data_time: 0.064, memory: 64699, decode.loss_cls: 0.0785, decode.loss_mask: 0.2167, decode.loss_dice: 0.5891, decode.d0.loss_cls: 0.3281, decode.d0.loss_mask: 0.2261, decode.d0.loss_dice: 0.6250, decode.d1.loss_cls: 0.1047, decode.d1.loss_mask: 0.2186, decode.d1.loss_dice: 0.6035, decode.d2.loss_cls: 0.0907, decode.d2.loss_mask: 0.2180, decode.d2.loss_dice: 0.5915, decode.d3.loss_cls: 0.0871, decode.d3.loss_mask: 0.2176, decode.d3.loss_dice: 0.5882, decode.d4.loss_cls: 0.0905, decode.d4.loss_mask: 0.2171, decode.d4.loss_dice: 0.5913, decode.d5.loss_cls: 0.0906, decode.d5.loss_mask: 0.2171, decode.d5.loss_dice: 0.5923, decode.d6.loss_cls: 0.0832, decode.d6.loss_mask: 0.2166, decode.d6.loss_dice: 0.5873, decode.d7.loss_cls: 0.0833, decode.d7.loss_mask: 0.2162, decode.d7.loss_dice: 0.5914, decode.d8.loss_cls: 0.0851, decode.d8.loss_mask: 0.2164, decode.d8.loss_dice: 0.5933, loss: 9.2552 +2022-05-10 10:30:34,394 - mmseg - INFO - Iter [31500/80000] lr: 8.705e-07, eta: 1 day, 2:14:41, time: 1.766, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0781, decode.loss_mask: 0.2154, decode.loss_dice: 0.5928, decode.d0.loss_cls: 0.3127, decode.d0.loss_mask: 0.2245, decode.d0.loss_dice: 0.6328, decode.d1.loss_cls: 0.1136, decode.d1.loss_mask: 0.2173, decode.d1.loss_dice: 0.6071, decode.d2.loss_cls: 0.0895, decode.d2.loss_mask: 0.2162, decode.d2.loss_dice: 0.6015, decode.d3.loss_cls: 0.0786, decode.d3.loss_mask: 0.2159, decode.d3.loss_dice: 0.5912, decode.d4.loss_cls: 0.0847, decode.d4.loss_mask: 0.2158, decode.d4.loss_dice: 0.5947, decode.d5.loss_cls: 0.0872, decode.d5.loss_mask: 0.2168, decode.d5.loss_dice: 0.5953, decode.d6.loss_cls: 0.0770, decode.d6.loss_mask: 0.2161, decode.d6.loss_dice: 0.5962, decode.d7.loss_cls: 0.0794, decode.d7.loss_mask: 0.2156, decode.d7.loss_dice: 0.5929, decode.d8.loss_cls: 0.0825, decode.d8.loss_mask: 0.2160, decode.d8.loss_dice: 0.5903, loss: 9.2478 +2022-05-10 10:32:02,204 - mmseg - INFO - Iter [31550/80000] lr: 8.696e-07, eta: 1 day, 2:12:49, time: 1.756, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0842, decode.loss_mask: 0.2225, decode.loss_dice: 0.6103, decode.d0.loss_cls: 0.3235, decode.d0.loss_mask: 0.2343, decode.d0.loss_dice: 0.6438, decode.d1.loss_cls: 0.1110, decode.d1.loss_mask: 0.2252, decode.d1.loss_dice: 0.6181, decode.d2.loss_cls: 0.0977, decode.d2.loss_mask: 0.2241, decode.d2.loss_dice: 0.6144, decode.d3.loss_cls: 0.0911, decode.d3.loss_mask: 0.2244, decode.d3.loss_dice: 0.6123, decode.d4.loss_cls: 0.0927, decode.d4.loss_mask: 0.2238, decode.d4.loss_dice: 0.6101, decode.d5.loss_cls: 0.0915, decode.d5.loss_mask: 0.2234, decode.d5.loss_dice: 0.6105, decode.d6.loss_cls: 0.0895, decode.d6.loss_mask: 0.2232, decode.d6.loss_dice: 0.6120, decode.d7.loss_cls: 0.1011, decode.d7.loss_mask: 0.2216, decode.d7.loss_dice: 0.6107, decode.d8.loss_cls: 0.0940, decode.d8.loss_mask: 0.2225, decode.d8.loss_dice: 0.6126, loss: 9.5762 +2022-05-10 10:33:30,097 - mmseg - INFO - Iter [31600/80000] lr: 8.687e-07, eta: 1 day, 2:10:57, time: 1.758, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0898, decode.loss_mask: 0.2189, decode.loss_dice: 0.6065, decode.d0.loss_cls: 0.3155, decode.d0.loss_mask: 0.2277, decode.d0.loss_dice: 0.6493, decode.d1.loss_cls: 0.1116, decode.d1.loss_mask: 0.2203, decode.d1.loss_dice: 0.6211, decode.d2.loss_cls: 0.0976, decode.d2.loss_mask: 0.2199, decode.d2.loss_dice: 0.6151, decode.d3.loss_cls: 0.0911, decode.d3.loss_mask: 0.2198, decode.d3.loss_dice: 0.6127, decode.d4.loss_cls: 0.0959, decode.d4.loss_mask: 0.2202, decode.d4.loss_dice: 0.6147, decode.d5.loss_cls: 0.0971, decode.d5.loss_mask: 0.2194, decode.d5.loss_dice: 0.6124, decode.d6.loss_cls: 0.0921, decode.d6.loss_mask: 0.2197, decode.d6.loss_dice: 0.6121, decode.d7.loss_cls: 0.0901, decode.d7.loss_mask: 0.2191, decode.d7.loss_dice: 0.6082, decode.d8.loss_cls: 0.0896, decode.d8.loss_mask: 0.2193, decode.d8.loss_dice: 0.6101, loss: 9.5368 +2022-05-10 10:35:00,165 - mmseg - INFO - Iter [31650/80000] lr: 8.678e-07, eta: 1 day, 2:09:09, time: 1.801, data_time: 0.068, memory: 64699, decode.loss_cls: 0.0842, decode.loss_mask: 0.2203, decode.loss_dice: 0.5997, decode.d0.loss_cls: 0.3223, decode.d0.loss_mask: 0.2308, decode.d0.loss_dice: 0.6337, decode.d1.loss_cls: 0.1050, decode.d1.loss_mask: 0.2207, decode.d1.loss_dice: 0.6164, decode.d2.loss_cls: 0.0952, decode.d2.loss_mask: 0.2195, decode.d2.loss_dice: 0.6035, decode.d3.loss_cls: 0.0942, decode.d3.loss_mask: 0.2198, decode.d3.loss_dice: 0.6045, decode.d4.loss_cls: 0.0961, decode.d4.loss_mask: 0.2196, decode.d4.loss_dice: 0.6051, decode.d5.loss_cls: 0.0923, decode.d5.loss_mask: 0.2201, decode.d5.loss_dice: 0.6033, decode.d6.loss_cls: 0.0868, decode.d6.loss_mask: 0.2202, decode.d6.loss_dice: 0.6043, decode.d7.loss_cls: 0.0881, decode.d7.loss_mask: 0.2203, decode.d7.loss_dice: 0.5983, decode.d8.loss_cls: 0.0904, decode.d8.loss_mask: 0.2209, decode.d8.loss_dice: 0.6047, loss: 9.4405 +2022-05-10 10:36:29,078 - mmseg - INFO - Iter [31700/80000] lr: 8.669e-07, eta: 1 day, 2:07:18, time: 1.778, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0744, decode.loss_mask: 0.2229, decode.loss_dice: 0.5929, decode.d0.loss_cls: 0.3204, decode.d0.loss_mask: 0.2313, decode.d0.loss_dice: 0.6230, decode.d1.loss_cls: 0.0981, decode.d1.loss_mask: 0.2240, decode.d1.loss_dice: 0.6051, decode.d2.loss_cls: 0.0920, decode.d2.loss_mask: 0.2238, decode.d2.loss_dice: 0.5992, decode.d3.loss_cls: 0.0784, decode.d3.loss_mask: 0.2232, decode.d3.loss_dice: 0.5948, decode.d4.loss_cls: 0.0818, decode.d4.loss_mask: 0.2231, decode.d4.loss_dice: 0.5976, decode.d5.loss_cls: 0.0858, decode.d5.loss_mask: 0.2235, decode.d5.loss_dice: 0.5977, decode.d6.loss_cls: 0.0778, decode.d6.loss_mask: 0.2229, decode.d6.loss_dice: 0.5920, decode.d7.loss_cls: 0.0863, decode.d7.loss_mask: 0.2235, decode.d7.loss_dice: 0.5975, decode.d8.loss_cls: 0.0823, decode.d8.loss_mask: 0.2232, decode.d8.loss_dice: 0.5961, loss: 9.3146 +2022-05-10 10:37:56,773 - mmseg - INFO - Iter [31750/80000] lr: 8.660e-07, eta: 1 day, 2:05:26, time: 1.754, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0774, decode.loss_mask: 0.2149, decode.loss_dice: 0.5918, decode.d0.loss_cls: 0.3140, decode.d0.loss_mask: 0.2249, decode.d0.loss_dice: 0.6255, decode.d1.loss_cls: 0.1031, decode.d1.loss_mask: 0.2172, decode.d1.loss_dice: 0.6033, decode.d2.loss_cls: 0.0904, decode.d2.loss_mask: 0.2156, decode.d2.loss_dice: 0.5984, decode.d3.loss_cls: 0.0873, decode.d3.loss_mask: 0.2158, decode.d3.loss_dice: 0.5939, decode.d4.loss_cls: 0.0835, decode.d4.loss_mask: 0.2157, decode.d4.loss_dice: 0.5917, decode.d5.loss_cls: 0.0812, decode.d5.loss_mask: 0.2155, decode.d5.loss_dice: 0.5931, decode.d6.loss_cls: 0.0855, decode.d6.loss_mask: 0.2153, decode.d6.loss_dice: 0.5900, decode.d7.loss_cls: 0.0854, decode.d7.loss_mask: 0.2150, decode.d7.loss_dice: 0.5913, decode.d8.loss_cls: 0.0819, decode.d8.loss_mask: 0.2147, decode.d8.loss_dice: 0.5946, loss: 9.2280 +2022-05-10 10:39:25,415 - mmseg - INFO - Iter [31800/80000] lr: 8.651e-07, eta: 1 day, 2:03:36, time: 1.773, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0797, decode.loss_mask: 0.2198, decode.loss_dice: 0.6105, decode.d0.loss_cls: 0.3177, decode.d0.loss_mask: 0.2311, decode.d0.loss_dice: 0.6393, decode.d1.loss_cls: 0.0957, decode.d1.loss_mask: 0.2210, decode.d1.loss_dice: 0.6196, decode.d2.loss_cls: 0.0855, decode.d2.loss_mask: 0.2201, decode.d2.loss_dice: 0.6148, decode.d3.loss_cls: 0.0802, decode.d3.loss_mask: 0.2200, decode.d3.loss_dice: 0.6073, decode.d4.loss_cls: 0.0864, decode.d4.loss_mask: 0.2204, decode.d4.loss_dice: 0.6082, decode.d5.loss_cls: 0.0845, decode.d5.loss_mask: 0.2204, decode.d5.loss_dice: 0.6080, decode.d6.loss_cls: 0.0822, decode.d6.loss_mask: 0.2207, decode.d6.loss_dice: 0.6089, decode.d7.loss_cls: 0.0821, decode.d7.loss_mask: 0.2196, decode.d7.loss_dice: 0.6080, decode.d8.loss_cls: 0.0794, decode.d8.loss_mask: 0.2201, decode.d8.loss_dice: 0.6069, loss: 9.4179 +2022-05-10 10:40:55,149 - mmseg - INFO - Iter [31850/80000] lr: 8.642e-07, eta: 1 day, 2:01:47, time: 1.795, data_time: 0.066, memory: 64699, decode.loss_cls: 0.0853, decode.loss_mask: 0.2151, decode.loss_dice: 0.5882, decode.d0.loss_cls: 0.3331, decode.d0.loss_mask: 0.2270, decode.d0.loss_dice: 0.6300, decode.d1.loss_cls: 0.1145, decode.d1.loss_mask: 0.2179, decode.d1.loss_dice: 0.6090, decode.d2.loss_cls: 0.0949, decode.d2.loss_mask: 0.2170, decode.d2.loss_dice: 0.6038, decode.d3.loss_cls: 0.0938, decode.d3.loss_mask: 0.2157, decode.d3.loss_dice: 0.5917, decode.d4.loss_cls: 0.0959, decode.d4.loss_mask: 0.2161, decode.d4.loss_dice: 0.5926, decode.d5.loss_cls: 0.0939, decode.d5.loss_mask: 0.2161, decode.d5.loss_dice: 0.5989, decode.d6.loss_cls: 0.0886, decode.d6.loss_mask: 0.2162, decode.d6.loss_dice: 0.5951, decode.d7.loss_cls: 0.0917, decode.d7.loss_mask: 0.2153, decode.d7.loss_dice: 0.5965, decode.d8.loss_cls: 0.0879, decode.d8.loss_mask: 0.2152, decode.d8.loss_dice: 0.5939, loss: 9.3511 +2022-05-10 10:42:22,738 - mmseg - INFO - Iter [31900/80000] lr: 8.633e-07, eta: 1 day, 1:59:55, time: 1.752, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0806, decode.loss_mask: 0.2181, decode.loss_dice: 0.6054, decode.d0.loss_cls: 0.3135, decode.d0.loss_mask: 0.2273, decode.d0.loss_dice: 0.6441, decode.d1.loss_cls: 0.1074, decode.d1.loss_mask: 0.2202, decode.d1.loss_dice: 0.6207, decode.d2.loss_cls: 0.0951, decode.d2.loss_mask: 0.2194, decode.d2.loss_dice: 0.6106, decode.d3.loss_cls: 0.0955, decode.d3.loss_mask: 0.2190, decode.d3.loss_dice: 0.6032, decode.d4.loss_cls: 0.0952, decode.d4.loss_mask: 0.2181, decode.d4.loss_dice: 0.6031, decode.d5.loss_cls: 0.0920, decode.d5.loss_mask: 0.2185, decode.d5.loss_dice: 0.6086, decode.d6.loss_cls: 0.0851, decode.d6.loss_mask: 0.2180, decode.d6.loss_dice: 0.6074, decode.d7.loss_cls: 0.0857, decode.d7.loss_mask: 0.2175, decode.d7.loss_dice: 0.6047, decode.d8.loss_cls: 0.0845, decode.d8.loss_mask: 0.2179, decode.d8.loss_dice: 0.6027, loss: 9.4394 +2022-05-10 10:43:51,261 - mmseg - INFO - Iter [31950/80000] lr: 8.624e-07, eta: 1 day, 1:58:05, time: 1.770, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0863, decode.loss_mask: 0.2161, decode.loss_dice: 0.5960, decode.d0.loss_cls: 0.3269, decode.d0.loss_mask: 0.2255, decode.d0.loss_dice: 0.6341, decode.d1.loss_cls: 0.1008, decode.d1.loss_mask: 0.2177, decode.d1.loss_dice: 0.6090, decode.d2.loss_cls: 0.0996, decode.d2.loss_mask: 0.2160, decode.d2.loss_dice: 0.6015, decode.d3.loss_cls: 0.0898, decode.d3.loss_mask: 0.2159, decode.d3.loss_dice: 0.5964, decode.d4.loss_cls: 0.0928, decode.d4.loss_mask: 0.2159, decode.d4.loss_dice: 0.5969, decode.d5.loss_cls: 0.0985, decode.d5.loss_mask: 0.2159, decode.d5.loss_dice: 0.5979, decode.d6.loss_cls: 0.0940, decode.d6.loss_mask: 0.2159, decode.d6.loss_dice: 0.5964, decode.d7.loss_cls: 0.0918, decode.d7.loss_mask: 0.2157, decode.d7.loss_dice: 0.5972, decode.d8.loss_cls: 0.0840, decode.d8.loss_mask: 0.2156, decode.d8.loss_dice: 0.5990, loss: 9.3591 +2022-05-10 10:45:20,707 - mmseg - INFO - Saving checkpoint at 32000 iterations +2022-05-10 10:45:49,917 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 10:45:49,926 - mmseg - INFO - Iter [32000/80000] lr: 8.615e-07, eta: 1 day, 1:56:59, time: 2.371, data_time: 0.067, memory: 64699, decode.loss_cls: 0.0779, decode.loss_mask: 0.2146, decode.loss_dice: 0.5858, decode.d0.loss_cls: 0.3050, decode.d0.loss_mask: 0.2242, decode.d0.loss_dice: 0.6192, decode.d1.loss_cls: 0.1018, decode.d1.loss_mask: 0.2174, decode.d1.loss_dice: 0.6057, decode.d2.loss_cls: 0.0922, decode.d2.loss_mask: 0.2156, decode.d2.loss_dice: 0.5990, decode.d3.loss_cls: 0.0849, decode.d3.loss_mask: 0.2150, decode.d3.loss_dice: 0.5879, decode.d4.loss_cls: 0.0795, decode.d4.loss_mask: 0.2150, decode.d4.loss_dice: 0.5939, decode.d5.loss_cls: 0.0757, decode.d5.loss_mask: 0.2151, decode.d5.loss_dice: 0.5958, decode.d6.loss_cls: 0.0786, decode.d6.loss_mask: 0.2152, decode.d6.loss_dice: 0.5877, decode.d7.loss_cls: 0.0794, decode.d7.loss_mask: 0.2154, decode.d7.loss_dice: 0.5932, decode.d8.loss_cls: 0.0796, decode.d8.loss_mask: 0.2150, decode.d8.loss_dice: 0.5883, loss: 9.1738 +2022-05-10 10:47:45,298 - mmseg - INFO - per class results: +2022-05-10 10:47:45,302 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.7 | 99.22 | +| sidewalk | 89.47 | 94.89 | +| building | 94.33 | 96.95 | +| wall | 68.0 | 78.97 | +| fence | 74.32 | 82.38 | +| pole | 71.14 | 83.28 | +| traffic light | 77.15 | 88.96 | +| traffic sign | 84.22 | 91.01 | +| vegetation | 93.31 | 96.86 | +| terrain | 67.56 | 77.76 | +| sky | 95.9 | 98.39 | +| person | 87.04 | 94.08 | +| rider | 75.0 | 85.07 | +| car | 96.27 | 98.29 | +| truck | 82.18 | 95.19 | +| bus | 93.84 | 96.8 | +| train | 88.2 | 91.35 | +| motorcycle | 76.4 | 90.17 | +| bicycle | 82.77 | 91.7 | ++---------------+-------+-------+ +2022-05-10 10:47:45,303 - mmseg - INFO - Summary: +2022-05-10 10:47:45,303 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 97.03 | 83.99 | 91.12 | ++-------+-------+-------+ +2022-05-10 10:47:45,306 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 10:47:45,306 - mmseg - INFO - Iter(val) [32] aAcc: 0.9703, mIoU: 0.8399, mAcc: 0.9112, IoU.road: 0.9870, IoU.sidewalk: 0.8947, IoU.building: 0.9433, IoU.wall: 0.6800, IoU.fence: 0.7432, IoU.pole: 0.7114, IoU.traffic light: 0.7715, IoU.traffic sign: 0.8422, IoU.vegetation: 0.9331, IoU.terrain: 0.6756, IoU.sky: 0.9590, IoU.person: 0.8704, IoU.rider: 0.7500, IoU.car: 0.9627, IoU.truck: 0.8218, IoU.bus: 0.9384, IoU.train: 0.8820, IoU.motorcycle: 0.7640, IoU.bicycle: 0.8277, Acc.road: 0.9922, Acc.sidewalk: 0.9489, Acc.building: 0.9695, Acc.wall: 0.7897, Acc.fence: 0.8238, Acc.pole: 0.8328, Acc.traffic light: 0.8896, Acc.traffic sign: 0.9101, Acc.vegetation: 0.9686, Acc.terrain: 0.7776, Acc.sky: 0.9839, Acc.person: 0.9408, Acc.rider: 0.8507, Acc.car: 0.9829, Acc.truck: 0.9519, Acc.bus: 0.9680, Acc.train: 0.9135, Acc.motorcycle: 0.9017, Acc.bicycle: 0.9170 +2022-05-10 10:49:14,124 - mmseg - INFO - Iter [32050/80000] lr: 8.606e-07, eta: 1 day, 1:58:02, time: 4.087, data_time: 2.327, memory: 64699, decode.loss_cls: 0.0838, decode.loss_mask: 0.2183, decode.loss_dice: 0.5836, decode.d0.loss_cls: 0.3228, decode.d0.loss_mask: 0.2281, decode.d0.loss_dice: 0.6245, decode.d1.loss_cls: 0.1076, decode.d1.loss_mask: 0.2215, decode.d1.loss_dice: 0.5954, decode.d2.loss_cls: 0.0920, decode.d2.loss_mask: 0.2191, decode.d2.loss_dice: 0.5949, decode.d3.loss_cls: 0.0927, decode.d3.loss_mask: 0.2195, decode.d3.loss_dice: 0.5886, decode.d4.loss_cls: 0.0919, decode.d4.loss_mask: 0.2184, decode.d4.loss_dice: 0.5853, decode.d5.loss_cls: 0.0881, decode.d5.loss_mask: 0.2184, decode.d5.loss_dice: 0.5898, decode.d6.loss_cls: 0.0890, decode.d6.loss_mask: 0.2191, decode.d6.loss_dice: 0.5873, decode.d7.loss_cls: 0.0927, decode.d7.loss_mask: 0.2182, decode.d7.loss_dice: 0.5835, decode.d8.loss_cls: 0.0897, decode.d8.loss_mask: 0.2182, decode.d8.loss_dice: 0.5859, loss: 9.2677 +2022-05-10 10:50:42,288 - mmseg - INFO - Iter [32100/80000] lr: 8.597e-07, eta: 1 day, 1:56:11, time: 1.763, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0784, decode.loss_mask: 0.2181, decode.loss_dice: 0.6014, decode.d0.loss_cls: 0.3209, decode.d0.loss_mask: 0.2312, decode.d0.loss_dice: 0.6385, decode.d1.loss_cls: 0.0998, decode.d1.loss_mask: 0.2207, decode.d1.loss_dice: 0.6083, decode.d2.loss_cls: 0.0936, decode.d2.loss_mask: 0.2195, decode.d2.loss_dice: 0.6106, decode.d3.loss_cls: 0.0864, decode.d3.loss_mask: 0.2195, decode.d3.loss_dice: 0.6048, decode.d4.loss_cls: 0.0881, decode.d4.loss_mask: 0.2187, decode.d4.loss_dice: 0.6028, decode.d5.loss_cls: 0.0842, decode.d5.loss_mask: 0.2180, decode.d5.loss_dice: 0.6055, decode.d6.loss_cls: 0.0809, decode.d6.loss_mask: 0.2180, decode.d6.loss_dice: 0.5981, decode.d7.loss_cls: 0.0845, decode.d7.loss_mask: 0.2183, decode.d7.loss_dice: 0.6019, decode.d8.loss_cls: 0.0851, decode.d8.loss_mask: 0.2184, decode.d8.loss_dice: 0.5998, loss: 9.3741 +2022-05-10 10:52:11,067 - mmseg - INFO - Iter [32150/80000] lr: 8.588e-07, eta: 1 day, 1:54:20, time: 1.776, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0811, decode.loss_mask: 0.2163, decode.loss_dice: 0.5928, decode.d0.loss_cls: 0.3183, decode.d0.loss_mask: 0.2262, decode.d0.loss_dice: 0.6321, decode.d1.loss_cls: 0.1070, decode.d1.loss_mask: 0.2186, decode.d1.loss_dice: 0.6085, decode.d2.loss_cls: 0.0904, decode.d2.loss_mask: 0.2178, decode.d2.loss_dice: 0.6055, decode.d3.loss_cls: 0.0845, decode.d3.loss_mask: 0.2175, decode.d3.loss_dice: 0.5947, decode.d4.loss_cls: 0.0866, decode.d4.loss_mask: 0.2170, decode.d4.loss_dice: 0.5959, decode.d5.loss_cls: 0.0846, decode.d5.loss_mask: 0.2164, decode.d5.loss_dice: 0.5974, decode.d6.loss_cls: 0.0842, decode.d6.loss_mask: 0.2164, decode.d6.loss_dice: 0.5965, decode.d7.loss_cls: 0.0875, decode.d7.loss_mask: 0.2165, decode.d7.loss_dice: 0.5969, decode.d8.loss_cls: 0.0885, decode.d8.loss_mask: 0.2163, decode.d8.loss_dice: 0.5968, loss: 9.3090 +2022-05-10 10:53:40,712 - mmseg - INFO - Iter [32200/80000] lr: 8.579e-07, eta: 1 day, 1:52:31, time: 1.793, data_time: 0.064, memory: 64699, decode.loss_cls: 0.0836, decode.loss_mask: 0.2168, decode.loss_dice: 0.5972, decode.d0.loss_cls: 0.3134, decode.d0.loss_mask: 0.2261, decode.d0.loss_dice: 0.6241, decode.d1.loss_cls: 0.0924, decode.d1.loss_mask: 0.2178, decode.d1.loss_dice: 0.6021, decode.d2.loss_cls: 0.0901, decode.d2.loss_mask: 0.2164, decode.d2.loss_dice: 0.6007, decode.d3.loss_cls: 0.0876, decode.d3.loss_mask: 0.2166, decode.d3.loss_dice: 0.5972, decode.d4.loss_cls: 0.0864, decode.d4.loss_mask: 0.2165, decode.d4.loss_dice: 0.5896, decode.d5.loss_cls: 0.0841, decode.d5.loss_mask: 0.2168, decode.d5.loss_dice: 0.5934, decode.d6.loss_cls: 0.0834, decode.d6.loss_mask: 0.2163, decode.d6.loss_dice: 0.5900, decode.d7.loss_cls: 0.0764, decode.d7.loss_mask: 0.2165, decode.d7.loss_dice: 0.5935, decode.d8.loss_cls: 0.0841, decode.d8.loss_mask: 0.2165, decode.d8.loss_dice: 0.5961, loss: 9.2417 +2022-05-10 10:55:08,739 - mmseg - INFO - Iter [32250/80000] lr: 8.570e-07, eta: 1 day, 1:50:40, time: 1.760, data_time: 0.020, memory: 64699, decode.loss_cls: 0.0782, decode.loss_mask: 0.2173, decode.loss_dice: 0.5961, decode.d0.loss_cls: 0.3165, decode.d0.loss_mask: 0.2270, decode.d0.loss_dice: 0.6239, decode.d1.loss_cls: 0.0895, decode.d1.loss_mask: 0.2185, decode.d1.loss_dice: 0.6081, decode.d2.loss_cls: 0.0851, decode.d2.loss_mask: 0.2186, decode.d2.loss_dice: 0.6023, decode.d3.loss_cls: 0.0847, decode.d3.loss_mask: 0.2176, decode.d3.loss_dice: 0.5958, decode.d4.loss_cls: 0.0798, decode.d4.loss_mask: 0.2184, decode.d4.loss_dice: 0.5955, decode.d5.loss_cls: 0.0759, decode.d5.loss_mask: 0.2176, decode.d5.loss_dice: 0.5963, decode.d6.loss_cls: 0.0710, decode.d6.loss_mask: 0.2167, decode.d6.loss_dice: 0.5944, decode.d7.loss_cls: 0.0748, decode.d7.loss_mask: 0.2164, decode.d7.loss_dice: 0.5967, decode.d8.loss_cls: 0.0751, decode.d8.loss_mask: 0.2167, decode.d8.loss_dice: 0.5987, loss: 9.2227 +2022-05-10 10:56:36,572 - mmseg - INFO - Iter [32300/80000] lr: 8.561e-07, eta: 1 day, 1:48:48, time: 1.757, data_time: 0.021, memory: 64699, decode.loss_cls: 0.0742, decode.loss_mask: 0.2120, decode.loss_dice: 0.5784, decode.d0.loss_cls: 0.3027, decode.d0.loss_mask: 0.2212, decode.d0.loss_dice: 0.6081, decode.d1.loss_cls: 0.0867, decode.d1.loss_mask: 0.2130, decode.d1.loss_dice: 0.5888, decode.d2.loss_cls: 0.0850, decode.d2.loss_mask: 0.2123, decode.d2.loss_dice: 0.5840, decode.d3.loss_cls: 0.0820, decode.d3.loss_mask: 0.2119, decode.d3.loss_dice: 0.5805, decode.d4.loss_cls: 0.0811, decode.d4.loss_mask: 0.2114, decode.d4.loss_dice: 0.5789, decode.d5.loss_cls: 0.0741, decode.d5.loss_mask: 0.2119, decode.d5.loss_dice: 0.5800, decode.d6.loss_cls: 0.0707, decode.d6.loss_mask: 0.2116, decode.d6.loss_dice: 0.5772, decode.d7.loss_cls: 0.0746, decode.d7.loss_mask: 0.2119, decode.d7.loss_dice: 0.5793, decode.d8.loss_cls: 0.0722, decode.d8.loss_mask: 0.2116, decode.d8.loss_dice: 0.5800, loss: 8.9672 +2022-05-10 10:58:04,603 - mmseg - INFO - Iter [32350/80000] lr: 8.552e-07, eta: 1 day, 1:46:57, time: 1.761, data_time: 0.019, memory: 64699, decode.loss_cls: 0.1022, decode.loss_mask: 0.2172, decode.loss_dice: 0.6054, decode.d0.loss_cls: 0.3273, decode.d0.loss_mask: 0.2264, decode.d0.loss_dice: 0.6415, decode.d1.loss_cls: 0.1330, decode.d1.loss_mask: 0.2189, decode.d1.loss_dice: 0.6202, decode.d2.loss_cls: 0.1238, decode.d2.loss_mask: 0.2178, decode.d2.loss_dice: 0.6147, decode.d3.loss_cls: 0.1066, decode.d3.loss_mask: 0.2173, decode.d3.loss_dice: 0.6109, decode.d4.loss_cls: 0.1088, decode.d4.loss_mask: 0.2168, decode.d4.loss_dice: 0.6085, decode.d5.loss_cls: 0.1039, decode.d5.loss_mask: 0.2163, decode.d5.loss_dice: 0.6119, decode.d6.loss_cls: 0.0989, decode.d6.loss_mask: 0.2163, decode.d6.loss_dice: 0.6081, decode.d7.loss_cls: 0.1044, decode.d7.loss_mask: 0.2164, decode.d7.loss_dice: 0.6027, decode.d8.loss_cls: 0.1067, decode.d8.loss_mask: 0.2172, decode.d8.loss_dice: 0.6037, loss: 9.6237 +2022-05-10 10:59:35,248 - mmseg - INFO - Iter [32400/80000] lr: 8.543e-07, eta: 1 day, 1:45:10, time: 1.813, data_time: 0.066, memory: 64699, decode.loss_cls: 0.0761, decode.loss_mask: 0.2182, decode.loss_dice: 0.5814, decode.d0.loss_cls: 0.3055, decode.d0.loss_mask: 0.2279, decode.d0.loss_dice: 0.6130, decode.d1.loss_cls: 0.0923, decode.d1.loss_mask: 0.2213, decode.d1.loss_dice: 0.5900, decode.d2.loss_cls: 0.0853, decode.d2.loss_mask: 0.2198, decode.d2.loss_dice: 0.5860, decode.d3.loss_cls: 0.0841, decode.d3.loss_mask: 0.2190, decode.d3.loss_dice: 0.5806, decode.d4.loss_cls: 0.0766, decode.d4.loss_mask: 0.2183, decode.d4.loss_dice: 0.5806, decode.d5.loss_cls: 0.0783, decode.d5.loss_mask: 0.2192, decode.d5.loss_dice: 0.5816, decode.d6.loss_cls: 0.0772, decode.d6.loss_mask: 0.2188, decode.d6.loss_dice: 0.5847, decode.d7.loss_cls: 0.0818, decode.d7.loss_mask: 0.2187, decode.d7.loss_dice: 0.5815, decode.d8.loss_cls: 0.0756, decode.d8.loss_mask: 0.2181, decode.d8.loss_dice: 0.5771, loss: 9.0888 +2022-05-10 11:01:03,083 - mmseg - INFO - Iter [32450/80000] lr: 8.534e-07, eta: 1 day, 1:43:18, time: 1.757, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0931, decode.loss_mask: 0.2169, decode.loss_dice: 0.5904, decode.d0.loss_cls: 0.3137, decode.d0.loss_mask: 0.2263, decode.d0.loss_dice: 0.6172, decode.d1.loss_cls: 0.1030, decode.d1.loss_mask: 0.2193, decode.d1.loss_dice: 0.5978, decode.d2.loss_cls: 0.0994, decode.d2.loss_mask: 0.2178, decode.d2.loss_dice: 0.5926, decode.d3.loss_cls: 0.0945, decode.d3.loss_mask: 0.2170, decode.d3.loss_dice: 0.5909, decode.d4.loss_cls: 0.0971, decode.d4.loss_mask: 0.2170, decode.d4.loss_dice: 0.5856, decode.d5.loss_cls: 0.0945, decode.d5.loss_mask: 0.2174, decode.d5.loss_dice: 0.5916, decode.d6.loss_cls: 0.0953, decode.d6.loss_mask: 0.2176, decode.d6.loss_dice: 0.5913, decode.d7.loss_cls: 0.0862, decode.d7.loss_mask: 0.2173, decode.d7.loss_dice: 0.5871, decode.d8.loss_cls: 0.0958, decode.d8.loss_mask: 0.2175, decode.d8.loss_dice: 0.5915, loss: 9.2925 +2022-05-10 11:02:31,354 - mmseg - INFO - Iter [32500/80000] lr: 8.525e-07, eta: 1 day, 1:41:28, time: 1.765, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0794, decode.loss_mask: 0.2147, decode.loss_dice: 0.5848, decode.d0.loss_cls: 0.3088, decode.d0.loss_mask: 0.2249, decode.d0.loss_dice: 0.6116, decode.d1.loss_cls: 0.0964, decode.d1.loss_mask: 0.2160, decode.d1.loss_dice: 0.5934, decode.d2.loss_cls: 0.0855, decode.d2.loss_mask: 0.2153, decode.d2.loss_dice: 0.5895, decode.d3.loss_cls: 0.0840, decode.d3.loss_mask: 0.2143, decode.d3.loss_dice: 0.5826, decode.d4.loss_cls: 0.0850, decode.d4.loss_mask: 0.2145, decode.d4.loss_dice: 0.5815, decode.d5.loss_cls: 0.0854, decode.d5.loss_mask: 0.2153, decode.d5.loss_dice: 0.5839, decode.d6.loss_cls: 0.0777, decode.d6.loss_mask: 0.2142, decode.d6.loss_dice: 0.5838, decode.d7.loss_cls: 0.0796, decode.d7.loss_mask: 0.2141, decode.d7.loss_dice: 0.5840, decode.d8.loss_cls: 0.0788, decode.d8.loss_mask: 0.2147, decode.d8.loss_dice: 0.5807, loss: 9.0946 +2022-05-10 11:03:59,528 - mmseg - INFO - Iter [32550/80000] lr: 8.516e-07, eta: 1 day, 1:39:37, time: 1.763, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0758, decode.loss_mask: 0.2171, decode.loss_dice: 0.5928, decode.d0.loss_cls: 0.3092, decode.d0.loss_mask: 0.2277, decode.d0.loss_dice: 0.6307, decode.d1.loss_cls: 0.0978, decode.d1.loss_mask: 0.2188, decode.d1.loss_dice: 0.6068, decode.d2.loss_cls: 0.0926, decode.d2.loss_mask: 0.2188, decode.d2.loss_dice: 0.6001, decode.d3.loss_cls: 0.0902, decode.d3.loss_mask: 0.2177, decode.d3.loss_dice: 0.5955, decode.d4.loss_cls: 0.0776, decode.d4.loss_mask: 0.2170, decode.d4.loss_dice: 0.5894, decode.d5.loss_cls: 0.0800, decode.d5.loss_mask: 0.2160, decode.d5.loss_dice: 0.5972, decode.d6.loss_cls: 0.0832, decode.d6.loss_mask: 0.2156, decode.d6.loss_dice: 0.5904, decode.d7.loss_cls: 0.0794, decode.d7.loss_mask: 0.2169, decode.d7.loss_dice: 0.5933, decode.d8.loss_cls: 0.0750, decode.d8.loss_mask: 0.2171, decode.d8.loss_dice: 0.5931, loss: 9.2327 +2022-05-10 11:05:30,051 - mmseg - INFO - Iter [32600/80000] lr: 8.507e-07, eta: 1 day, 1:37:50, time: 1.810, data_time: 0.064, memory: 64699, decode.loss_cls: 0.0793, decode.loss_mask: 0.2121, decode.loss_dice: 0.5840, decode.d0.loss_cls: 0.3225, decode.d0.loss_mask: 0.2224, decode.d0.loss_dice: 0.6262, decode.d1.loss_cls: 0.1075, decode.d1.loss_mask: 0.2130, decode.d1.loss_dice: 0.5928, decode.d2.loss_cls: 0.0908, decode.d2.loss_mask: 0.2129, decode.d2.loss_dice: 0.5898, decode.d3.loss_cls: 0.0891, decode.d3.loss_mask: 0.2126, decode.d3.loss_dice: 0.5906, decode.d4.loss_cls: 0.0904, decode.d4.loss_mask: 0.2123, decode.d4.loss_dice: 0.5872, decode.d5.loss_cls: 0.0852, decode.d5.loss_mask: 0.2121, decode.d5.loss_dice: 0.5867, decode.d6.loss_cls: 0.0823, decode.d6.loss_mask: 0.2119, decode.d6.loss_dice: 0.5818, decode.d7.loss_cls: 0.0824, decode.d7.loss_mask: 0.2124, decode.d7.loss_dice: 0.5854, decode.d8.loss_cls: 0.0898, decode.d8.loss_mask: 0.2114, decode.d8.loss_dice: 0.5859, loss: 9.1628 +2022-05-10 11:06:57,612 - mmseg - INFO - Iter [32650/80000] lr: 8.498e-07, eta: 1 day, 1:35:58, time: 1.752, data_time: 0.021, memory: 64699, decode.loss_cls: 0.0735, decode.loss_mask: 0.2126, decode.loss_dice: 0.5988, decode.d0.loss_cls: 0.3205, decode.d0.loss_mask: 0.2220, decode.d0.loss_dice: 0.6254, decode.d1.loss_cls: 0.1005, decode.d1.loss_mask: 0.2146, decode.d1.loss_dice: 0.6003, decode.d2.loss_cls: 0.0836, decode.d2.loss_mask: 0.2139, decode.d2.loss_dice: 0.5996, decode.d3.loss_cls: 0.0834, decode.d3.loss_mask: 0.2126, decode.d3.loss_dice: 0.5916, decode.d4.loss_cls: 0.0793, decode.d4.loss_mask: 0.2129, decode.d4.loss_dice: 0.5959, decode.d5.loss_cls: 0.0773, decode.d5.loss_mask: 0.2132, decode.d5.loss_dice: 0.5988, decode.d6.loss_cls: 0.0745, decode.d6.loss_mask: 0.2132, decode.d6.loss_dice: 0.5882, decode.d7.loss_cls: 0.0810, decode.d7.loss_mask: 0.2124, decode.d7.loss_dice: 0.5880, decode.d8.loss_cls: 0.0770, decode.d8.loss_mask: 0.2125, decode.d8.loss_dice: 0.5910, loss: 9.1682 +2022-05-10 11:08:25,792 - mmseg - INFO - Iter [32700/80000] lr: 8.489e-07, eta: 1 day, 1:34:08, time: 1.763, data_time: 0.021, memory: 64699, decode.loss_cls: 0.0876, decode.loss_mask: 0.2155, decode.loss_dice: 0.6000, decode.d0.loss_cls: 0.3214, decode.d0.loss_mask: 0.2268, decode.d0.loss_dice: 0.6295, decode.d1.loss_cls: 0.1060, decode.d1.loss_mask: 0.2192, decode.d1.loss_dice: 0.6063, decode.d2.loss_cls: 0.1017, decode.d2.loss_mask: 0.2168, decode.d2.loss_dice: 0.6013, decode.d3.loss_cls: 0.0868, decode.d3.loss_mask: 0.2167, decode.d3.loss_dice: 0.5951, decode.d4.loss_cls: 0.0889, decode.d4.loss_mask: 0.2168, decode.d4.loss_dice: 0.6025, decode.d5.loss_cls: 0.0870, decode.d5.loss_mask: 0.2165, decode.d5.loss_dice: 0.5980, decode.d6.loss_cls: 0.0869, decode.d6.loss_mask: 0.2158, decode.d6.loss_dice: 0.5965, decode.d7.loss_cls: 0.0895, decode.d7.loss_mask: 0.2157, decode.d7.loss_dice: 0.6029, decode.d8.loss_cls: 0.0846, decode.d8.loss_mask: 0.2155, decode.d8.loss_dice: 0.5970, loss: 9.3451 +2022-05-10 11:09:55,962 - mmseg - INFO - Iter [32750/80000] lr: 8.480e-07, eta: 1 day, 1:32:20, time: 1.803, data_time: 0.065, memory: 64699, decode.loss_cls: 0.0756, decode.loss_mask: 0.2201, decode.loss_dice: 0.6015, decode.d0.loss_cls: 0.3117, decode.d0.loss_mask: 0.2293, decode.d0.loss_dice: 0.6338, decode.d1.loss_cls: 0.0862, decode.d1.loss_mask: 0.2230, decode.d1.loss_dice: 0.6144, decode.d2.loss_cls: 0.0760, decode.d2.loss_mask: 0.2210, decode.d2.loss_dice: 0.6067, decode.d3.loss_cls: 0.0736, decode.d3.loss_mask: 0.2215, decode.d3.loss_dice: 0.6041, decode.d4.loss_cls: 0.0783, decode.d4.loss_mask: 0.2203, decode.d4.loss_dice: 0.6046, decode.d5.loss_cls: 0.0718, decode.d5.loss_mask: 0.2204, decode.d5.loss_dice: 0.6055, decode.d6.loss_cls: 0.0673, decode.d6.loss_mask: 0.2204, decode.d6.loss_dice: 0.6051, decode.d7.loss_cls: 0.0697, decode.d7.loss_mask: 0.2207, decode.d7.loss_dice: 0.6065, decode.d8.loss_cls: 0.0727, decode.d8.loss_mask: 0.2210, decode.d8.loss_dice: 0.6048, loss: 9.2879 +2022-05-10 11:11:24,352 - mmseg - INFO - Iter [32800/80000] lr: 8.471e-07, eta: 1 day, 1:30:30, time: 1.768, data_time: 0.017, memory: 64699, decode.loss_cls: 0.0786, decode.loss_mask: 0.2168, decode.loss_dice: 0.5814, decode.d0.loss_cls: 0.3104, decode.d0.loss_mask: 0.2274, decode.d0.loss_dice: 0.6100, decode.d1.loss_cls: 0.0998, decode.d1.loss_mask: 0.2191, decode.d1.loss_dice: 0.5967, decode.d2.loss_cls: 0.0906, decode.d2.loss_mask: 0.2176, decode.d2.loss_dice: 0.5916, decode.d3.loss_cls: 0.0785, decode.d3.loss_mask: 0.2171, decode.d3.loss_dice: 0.5830, decode.d4.loss_cls: 0.0840, decode.d4.loss_mask: 0.2173, decode.d4.loss_dice: 0.5849, decode.d5.loss_cls: 0.0824, decode.d5.loss_mask: 0.2174, decode.d5.loss_dice: 0.5889, decode.d6.loss_cls: 0.0809, decode.d6.loss_mask: 0.2169, decode.d6.loss_dice: 0.5801, decode.d7.loss_cls: 0.0798, decode.d7.loss_mask: 0.2168, decode.d7.loss_dice: 0.5826, decode.d8.loss_cls: 0.0790, decode.d8.loss_mask: 0.2167, decode.d8.loss_dice: 0.5820, loss: 9.1281 +2022-05-10 11:12:52,321 - mmseg - INFO - Iter [32850/80000] lr: 8.462e-07, eta: 1 day, 1:28:39, time: 1.759, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0787, decode.loss_mask: 0.2152, decode.loss_dice: 0.5897, decode.d0.loss_cls: 0.3235, decode.d0.loss_mask: 0.2229, decode.d0.loss_dice: 0.6249, decode.d1.loss_cls: 0.0947, decode.d1.loss_mask: 0.2170, decode.d1.loss_dice: 0.6014, decode.d2.loss_cls: 0.0848, decode.d2.loss_mask: 0.2165, decode.d2.loss_dice: 0.5990, decode.d3.loss_cls: 0.0810, decode.d3.loss_mask: 0.2154, decode.d3.loss_dice: 0.5949, decode.d4.loss_cls: 0.0814, decode.d4.loss_mask: 0.2154, decode.d4.loss_dice: 0.5943, decode.d5.loss_cls: 0.0811, decode.d5.loss_mask: 0.2153, decode.d5.loss_dice: 0.5935, decode.d6.loss_cls: 0.0767, decode.d6.loss_mask: 0.2151, decode.d6.loss_dice: 0.5915, decode.d7.loss_cls: 0.0728, decode.d7.loss_mask: 0.2154, decode.d7.loss_dice: 0.5956, decode.d8.loss_cls: 0.0823, decode.d8.loss_mask: 0.2148, decode.d8.loss_dice: 0.5932, loss: 9.1979 +2022-05-10 11:14:19,528 - mmseg - INFO - Iter [32900/80000] lr: 8.453e-07, eta: 1 day, 1:26:48, time: 1.744, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0891, decode.loss_mask: 0.2157, decode.loss_dice: 0.5964, decode.d0.loss_cls: 0.3185, decode.d0.loss_mask: 0.2259, decode.d0.loss_dice: 0.6243, decode.d1.loss_cls: 0.0996, decode.d1.loss_mask: 0.2169, decode.d1.loss_dice: 0.6070, decode.d2.loss_cls: 0.1012, decode.d2.loss_mask: 0.2163, decode.d2.loss_dice: 0.5962, decode.d3.loss_cls: 0.0915, decode.d3.loss_mask: 0.2152, decode.d3.loss_dice: 0.5946, decode.d4.loss_cls: 0.0953, decode.d4.loss_mask: 0.2157, decode.d4.loss_dice: 0.5937, decode.d5.loss_cls: 0.0931, decode.d5.loss_mask: 0.2156, decode.d5.loss_dice: 0.5954, decode.d6.loss_cls: 0.0907, decode.d6.loss_mask: 0.2156, decode.d6.loss_dice: 0.5934, decode.d7.loss_cls: 0.0909, decode.d7.loss_mask: 0.2153, decode.d7.loss_dice: 0.5936, decode.d8.loss_cls: 0.0882, decode.d8.loss_mask: 0.2158, decode.d8.loss_dice: 0.5916, loss: 9.3124 +2022-05-10 11:15:50,483 - mmseg - INFO - Iter [32950/80000] lr: 8.444e-07, eta: 1 day, 1:25:01, time: 1.820, data_time: 0.068, memory: 64699, decode.loss_cls: 0.0891, decode.loss_mask: 0.2148, decode.loss_dice: 0.5929, decode.d0.loss_cls: 0.3192, decode.d0.loss_mask: 0.2236, decode.d0.loss_dice: 0.6188, decode.d1.loss_cls: 0.1099, decode.d1.loss_mask: 0.2166, decode.d1.loss_dice: 0.6033, decode.d2.loss_cls: 0.0972, decode.d2.loss_mask: 0.2161, decode.d2.loss_dice: 0.5975, decode.d3.loss_cls: 0.0905, decode.d3.loss_mask: 0.2147, decode.d3.loss_dice: 0.5941, decode.d4.loss_cls: 0.0905, decode.d4.loss_mask: 0.2142, decode.d4.loss_dice: 0.5903, decode.d5.loss_cls: 0.0877, decode.d5.loss_mask: 0.2150, decode.d5.loss_dice: 0.5904, decode.d6.loss_cls: 0.0867, decode.d6.loss_mask: 0.2150, decode.d6.loss_dice: 0.5897, decode.d7.loss_cls: 0.0850, decode.d7.loss_mask: 0.2144, decode.d7.loss_dice: 0.5929, decode.d8.loss_cls: 0.0833, decode.d8.loss_mask: 0.2144, decode.d8.loss_dice: 0.5915, loss: 9.2593 +2022-05-10 11:17:18,009 - mmseg - INFO - Saving checkpoint at 33000 iterations +2022-05-10 11:17:51,499 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 11:17:51,507 - mmseg - INFO - Iter [33000/80000] lr: 8.435e-07, eta: 1 day, 1:23:58, time: 2.418, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0825, decode.loss_mask: 0.2180, decode.loss_dice: 0.5949, decode.d0.loss_cls: 0.3121, decode.d0.loss_mask: 0.2267, decode.d0.loss_dice: 0.6255, decode.d1.loss_cls: 0.0985, decode.d1.loss_mask: 0.2200, decode.d1.loss_dice: 0.6095, decode.d2.loss_cls: 0.0975, decode.d2.loss_mask: 0.2188, decode.d2.loss_dice: 0.6038, decode.d3.loss_cls: 0.0863, decode.d3.loss_mask: 0.2176, decode.d3.loss_dice: 0.5978, decode.d4.loss_cls: 0.0819, decode.d4.loss_mask: 0.2178, decode.d4.loss_dice: 0.5996, decode.d5.loss_cls: 0.0840, decode.d5.loss_mask: 0.2172, decode.d5.loss_dice: 0.5946, decode.d6.loss_cls: 0.0899, decode.d6.loss_mask: 0.2175, decode.d6.loss_dice: 0.5935, decode.d7.loss_cls: 0.0865, decode.d7.loss_mask: 0.2175, decode.d7.loss_dice: 0.5954, decode.d8.loss_cls: 0.0850, decode.d8.loss_mask: 0.2169, decode.d8.loss_dice: 0.5940, loss: 9.3006 +2022-05-10 11:19:46,412 - mmseg - INFO - per class results: +2022-05-10 11:19:46,416 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.62 | 99.3 | +| sidewalk | 88.71 | 93.45 | +| building | 94.43 | 97.22 | +| wall | 67.79 | 80.96 | +| fence | 74.16 | 83.15 | +| pole | 71.71 | 83.48 | +| traffic light | 77.03 | 89.11 | +| traffic sign | 83.9 | 90.5 | +| vegetation | 93.38 | 96.67 | +| terrain | 67.72 | 77.02 | +| sky | 95.81 | 98.59 | +| person | 86.81 | 94.03 | +| rider | 74.51 | 85.72 | +| car | 96.31 | 98.36 | +| truck | 91.87 | 94.42 | +| bus | 93.87 | 96.84 | +| train | 88.05 | 90.59 | +| motorcycle | 77.24 | 88.29 | +| bicycle | 82.85 | 91.14 | ++---------------+-------+-------+ +2022-05-10 11:19:46,417 - mmseg - INFO - Summary: +2022-05-10 11:19:46,417 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 97.03 | 84.46 | 90.99 | ++-------+-------+-------+ +2022-05-10 11:19:46,421 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 11:19:46,421 - mmseg - INFO - Iter(val) [32] aAcc: 0.9703, mIoU: 0.8446, mAcc: 0.9099, IoU.road: 0.9862, IoU.sidewalk: 0.8871, IoU.building: 0.9443, IoU.wall: 0.6779, IoU.fence: 0.7416, IoU.pole: 0.7171, IoU.traffic light: 0.7703, IoU.traffic sign: 0.8390, IoU.vegetation: 0.9338, IoU.terrain: 0.6772, IoU.sky: 0.9581, IoU.person: 0.8681, IoU.rider: 0.7451, IoU.car: 0.9631, IoU.truck: 0.9187, IoU.bus: 0.9387, IoU.train: 0.8805, IoU.motorcycle: 0.7724, IoU.bicycle: 0.8285, Acc.road: 0.9930, Acc.sidewalk: 0.9345, Acc.building: 0.9722, Acc.wall: 0.8096, Acc.fence: 0.8315, Acc.pole: 0.8348, Acc.traffic light: 0.8911, Acc.traffic sign: 0.9050, Acc.vegetation: 0.9667, Acc.terrain: 0.7702, Acc.sky: 0.9859, Acc.person: 0.9403, Acc.rider: 0.8572, Acc.car: 0.9836, Acc.truck: 0.9442, Acc.bus: 0.9684, Acc.train: 0.9059, Acc.motorcycle: 0.8829, Acc.bicycle: 0.9114 +2022-05-10 11:21:14,819 - mmseg - INFO - Iter [33050/80000] lr: 8.427e-07, eta: 1 day, 1:24:51, time: 4.068, data_time: 2.318, memory: 64699, decode.loss_cls: 0.0780, decode.loss_mask: 0.2132, decode.loss_dice: 0.5909, decode.d0.loss_cls: 0.3112, decode.d0.loss_mask: 0.2214, decode.d0.loss_dice: 0.6246, decode.d1.loss_cls: 0.0931, decode.d1.loss_mask: 0.2152, decode.d1.loss_dice: 0.6042, decode.d2.loss_cls: 0.0878, decode.d2.loss_mask: 0.2145, decode.d2.loss_dice: 0.5962, decode.d3.loss_cls: 0.0881, decode.d3.loss_mask: 0.2137, decode.d3.loss_dice: 0.5939, decode.d4.loss_cls: 0.0793, decode.d4.loss_mask: 0.2132, decode.d4.loss_dice: 0.5932, decode.d5.loss_cls: 0.0873, decode.d5.loss_mask: 0.2132, decode.d5.loss_dice: 0.5936, decode.d6.loss_cls: 0.0780, decode.d6.loss_mask: 0.2130, decode.d6.loss_dice: 0.5916, decode.d7.loss_cls: 0.0875, decode.d7.loss_mask: 0.2127, decode.d7.loss_dice: 0.5916, decode.d8.loss_cls: 0.0797, decode.d8.loss_mask: 0.2129, decode.d8.loss_dice: 0.5914, loss: 9.1841 +2022-05-10 11:22:42,640 - mmseg - INFO - Iter [33100/80000] lr: 8.418e-07, eta: 1 day, 1:23:00, time: 1.757, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0901, decode.loss_mask: 0.2172, decode.loss_dice: 0.5917, decode.d0.loss_cls: 0.3182, decode.d0.loss_mask: 0.2267, decode.d0.loss_dice: 0.6245, decode.d1.loss_cls: 0.1057, decode.d1.loss_mask: 0.2189, decode.d1.loss_dice: 0.6050, decode.d2.loss_cls: 0.0979, decode.d2.loss_mask: 0.2169, decode.d2.loss_dice: 0.5963, decode.d3.loss_cls: 0.0928, decode.d3.loss_mask: 0.2172, decode.d3.loss_dice: 0.5972, decode.d4.loss_cls: 0.0906, decode.d4.loss_mask: 0.2167, decode.d4.loss_dice: 0.5941, decode.d5.loss_cls: 0.0872, decode.d5.loss_mask: 0.2172, decode.d5.loss_dice: 0.5949, decode.d6.loss_cls: 0.0883, decode.d6.loss_mask: 0.2176, decode.d6.loss_dice: 0.5899, decode.d7.loss_cls: 0.0875, decode.d7.loss_mask: 0.2172, decode.d7.loss_dice: 0.5881, decode.d8.loss_cls: 0.0894, decode.d8.loss_mask: 0.2170, decode.d8.loss_dice: 0.5911, loss: 9.3032 +2022-05-10 11:24:13,095 - mmseg - INFO - Iter [33150/80000] lr: 8.409e-07, eta: 1 day, 1:21:13, time: 1.809, data_time: 0.065, memory: 64699, decode.loss_cls: 0.0844, decode.loss_mask: 0.2148, decode.loss_dice: 0.5825, decode.d0.loss_cls: 0.3211, decode.d0.loss_mask: 0.2236, decode.d0.loss_dice: 0.6186, decode.d1.loss_cls: 0.0953, decode.d1.loss_mask: 0.2173, decode.d1.loss_dice: 0.5958, decode.d2.loss_cls: 0.0928, decode.d2.loss_mask: 0.2155, decode.d2.loss_dice: 0.5937, decode.d3.loss_cls: 0.0859, decode.d3.loss_mask: 0.2154, decode.d3.loss_dice: 0.5829, decode.d4.loss_cls: 0.0875, decode.d4.loss_mask: 0.2154, decode.d4.loss_dice: 0.5878, decode.d5.loss_cls: 0.0866, decode.d5.loss_mask: 0.2151, decode.d5.loss_dice: 0.5918, decode.d6.loss_cls: 0.0771, decode.d6.loss_mask: 0.2148, decode.d6.loss_dice: 0.5855, decode.d7.loss_cls: 0.0854, decode.d7.loss_mask: 0.2144, decode.d7.loss_dice: 0.5875, decode.d8.loss_cls: 0.0764, decode.d8.loss_mask: 0.2142, decode.d8.loss_dice: 0.5874, loss: 9.1665 +2022-05-10 11:25:41,359 - mmseg - INFO - Iter [33200/80000] lr: 8.400e-07, eta: 1 day, 1:19:23, time: 1.765, data_time: 0.020, memory: 64699, decode.loss_cls: 0.0738, decode.loss_mask: 0.2185, decode.loss_dice: 0.5792, decode.d0.loss_cls: 0.3106, decode.d0.loss_mask: 0.2290, decode.d0.loss_dice: 0.6106, decode.d1.loss_cls: 0.0941, decode.d1.loss_mask: 0.2204, decode.d1.loss_dice: 0.5908, decode.d2.loss_cls: 0.0819, decode.d2.loss_mask: 0.2201, decode.d2.loss_dice: 0.5879, decode.d3.loss_cls: 0.0758, decode.d3.loss_mask: 0.2194, decode.d3.loss_dice: 0.5780, decode.d4.loss_cls: 0.0763, decode.d4.loss_mask: 0.2191, decode.d4.loss_dice: 0.5800, decode.d5.loss_cls: 0.0789, decode.d5.loss_mask: 0.2191, decode.d5.loss_dice: 0.5799, decode.d6.loss_cls: 0.0734, decode.d6.loss_mask: 0.2188, decode.d6.loss_dice: 0.5819, decode.d7.loss_cls: 0.0703, decode.d7.loss_mask: 0.2186, decode.d7.loss_dice: 0.5765, decode.d8.loss_cls: 0.0710, decode.d8.loss_mask: 0.2186, decode.d8.loss_dice: 0.5801, loss: 9.0527 +2022-05-10 11:27:09,854 - mmseg - INFO - Iter [33250/80000] lr: 8.391e-07, eta: 1 day, 1:17:33, time: 1.770, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0822, decode.loss_mask: 0.2183, decode.loss_dice: 0.5888, decode.d0.loss_cls: 0.3304, decode.d0.loss_mask: 0.2290, decode.d0.loss_dice: 0.6223, decode.d1.loss_cls: 0.0988, decode.d1.loss_mask: 0.2205, decode.d1.loss_dice: 0.6079, decode.d2.loss_cls: 0.0937, decode.d2.loss_mask: 0.2190, decode.d2.loss_dice: 0.6014, decode.d3.loss_cls: 0.0862, decode.d3.loss_mask: 0.2186, decode.d3.loss_dice: 0.5950, decode.d4.loss_cls: 0.0851, decode.d4.loss_mask: 0.2184, decode.d4.loss_dice: 0.5912, decode.d5.loss_cls: 0.0852, decode.d5.loss_mask: 0.2190, decode.d5.loss_dice: 0.5964, decode.d6.loss_cls: 0.0802, decode.d6.loss_mask: 0.2178, decode.d6.loss_dice: 0.5892, decode.d7.loss_cls: 0.0801, decode.d7.loss_mask: 0.2179, decode.d7.loss_dice: 0.5912, decode.d8.loss_cls: 0.0818, decode.d8.loss_mask: 0.2186, decode.d8.loss_dice: 0.5920, loss: 9.2761 +2022-05-10 11:28:39,756 - mmseg - INFO - Iter [33300/80000] lr: 8.382e-07, eta: 1 day, 1:15:45, time: 1.798, data_time: 0.065, memory: 64699, decode.loss_cls: 0.0849, decode.loss_mask: 0.2152, decode.loss_dice: 0.5975, decode.d0.loss_cls: 0.3143, decode.d0.loss_mask: 0.2244, decode.d0.loss_dice: 0.6287, decode.d1.loss_cls: 0.1146, decode.d1.loss_mask: 0.2169, decode.d1.loss_dice: 0.6086, decode.d2.loss_cls: 0.0944, decode.d2.loss_mask: 0.2147, decode.d2.loss_dice: 0.6009, decode.d3.loss_cls: 0.0821, decode.d3.loss_mask: 0.2148, decode.d3.loss_dice: 0.5991, decode.d4.loss_cls: 0.0773, decode.d4.loss_mask: 0.2153, decode.d4.loss_dice: 0.5988, decode.d5.loss_cls: 0.0810, decode.d5.loss_mask: 0.2152, decode.d5.loss_dice: 0.5958, decode.d6.loss_cls: 0.0829, decode.d6.loss_mask: 0.2147, decode.d6.loss_dice: 0.5951, decode.d7.loss_cls: 0.0762, decode.d7.loss_mask: 0.2146, decode.d7.loss_dice: 0.5915, decode.d8.loss_cls: 0.0833, decode.d8.loss_mask: 0.2148, decode.d8.loss_dice: 0.5958, loss: 9.2637 +2022-05-10 11:30:06,981 - mmseg - INFO - Iter [33350/80000] lr: 8.373e-07, eta: 1 day, 1:13:53, time: 1.745, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0800, decode.loss_mask: 0.2185, decode.loss_dice: 0.5975, decode.d0.loss_cls: 0.3176, decode.d0.loss_mask: 0.2291, decode.d0.loss_dice: 0.6279, decode.d1.loss_cls: 0.1055, decode.d1.loss_mask: 0.2194, decode.d1.loss_dice: 0.6069, decode.d2.loss_cls: 0.0904, decode.d2.loss_mask: 0.2189, decode.d2.loss_dice: 0.6032, decode.d3.loss_cls: 0.0867, decode.d3.loss_mask: 0.2189, decode.d3.loss_dice: 0.5966, decode.d4.loss_cls: 0.0864, decode.d4.loss_mask: 0.2186, decode.d4.loss_dice: 0.5946, decode.d5.loss_cls: 0.0854, decode.d5.loss_mask: 0.2188, decode.d5.loss_dice: 0.5950, decode.d6.loss_cls: 0.0851, decode.d6.loss_mask: 0.2182, decode.d6.loss_dice: 0.5930, decode.d7.loss_cls: 0.0806, decode.d7.loss_mask: 0.2187, decode.d7.loss_dice: 0.5975, decode.d8.loss_cls: 0.0874, decode.d8.loss_mask: 0.2184, decode.d8.loss_dice: 0.5926, loss: 9.3075 +2022-05-10 11:31:35,656 - mmseg - INFO - Iter [33400/80000] lr: 8.364e-07, eta: 1 day, 1:12:04, time: 1.773, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0755, decode.loss_mask: 0.2107, decode.loss_dice: 0.5734, decode.d0.loss_cls: 0.3147, decode.d0.loss_mask: 0.2204, decode.d0.loss_dice: 0.6076, decode.d1.loss_cls: 0.0947, decode.d1.loss_mask: 0.2127, decode.d1.loss_dice: 0.5888, decode.d2.loss_cls: 0.0816, decode.d2.loss_mask: 0.2115, decode.d2.loss_dice: 0.5822, decode.d3.loss_cls: 0.0795, decode.d3.loss_mask: 0.2115, decode.d3.loss_dice: 0.5775, decode.d4.loss_cls: 0.0784, decode.d4.loss_mask: 0.2117, decode.d4.loss_dice: 0.5844, decode.d5.loss_cls: 0.0731, decode.d5.loss_mask: 0.2109, decode.d5.loss_dice: 0.5763, decode.d6.loss_cls: 0.0756, decode.d6.loss_mask: 0.2108, decode.d6.loss_dice: 0.5767, decode.d7.loss_cls: 0.0758, decode.d7.loss_mask: 0.2106, decode.d7.loss_dice: 0.5774, decode.d8.loss_cls: 0.0730, decode.d8.loss_mask: 0.2102, decode.d8.loss_dice: 0.5755, loss: 8.9626 +2022-05-10 11:33:04,357 - mmseg - INFO - Iter [33450/80000] lr: 8.355e-07, eta: 1 day, 1:10:15, time: 1.774, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0817, decode.loss_mask: 0.2222, decode.loss_dice: 0.5926, decode.d0.loss_cls: 0.3058, decode.d0.loss_mask: 0.2343, decode.d0.loss_dice: 0.6217, decode.d1.loss_cls: 0.0985, decode.d1.loss_mask: 0.2243, decode.d1.loss_dice: 0.6008, decode.d2.loss_cls: 0.0948, decode.d2.loss_mask: 0.2230, decode.d2.loss_dice: 0.5941, decode.d3.loss_cls: 0.0919, decode.d3.loss_mask: 0.2219, decode.d3.loss_dice: 0.5933, decode.d4.loss_cls: 0.0872, decode.d4.loss_mask: 0.2228, decode.d4.loss_dice: 0.5935, decode.d5.loss_cls: 0.0812, decode.d5.loss_mask: 0.2242, decode.d5.loss_dice: 0.5909, decode.d6.loss_cls: 0.0857, decode.d6.loss_mask: 0.2232, decode.d6.loss_dice: 0.5885, decode.d7.loss_cls: 0.0872, decode.d7.loss_mask: 0.2223, decode.d7.loss_dice: 0.5870, decode.d8.loss_cls: 0.0840, decode.d8.loss_mask: 0.2215, decode.d8.loss_dice: 0.5909, loss: 9.2912 +2022-05-10 11:34:34,249 - mmseg - INFO - Iter [33500/80000] lr: 8.346e-07, eta: 1 day, 1:08:27, time: 1.798, data_time: 0.065, memory: 64699, decode.loss_cls: 0.0800, decode.loss_mask: 0.2154, decode.loss_dice: 0.5835, decode.d0.loss_cls: 0.3274, decode.d0.loss_mask: 0.2267, decode.d0.loss_dice: 0.6140, decode.d1.loss_cls: 0.1056, decode.d1.loss_mask: 0.2169, decode.d1.loss_dice: 0.5965, decode.d2.loss_cls: 0.0914, decode.d2.loss_mask: 0.2170, decode.d2.loss_dice: 0.5881, decode.d3.loss_cls: 0.0865, decode.d3.loss_mask: 0.2158, decode.d3.loss_dice: 0.5837, decode.d4.loss_cls: 0.0766, decode.d4.loss_mask: 0.2156, decode.d4.loss_dice: 0.5809, decode.d5.loss_cls: 0.0837, decode.d5.loss_mask: 0.2157, decode.d5.loss_dice: 0.5826, decode.d6.loss_cls: 0.0845, decode.d6.loss_mask: 0.2155, decode.d6.loss_dice: 0.5856, decode.d7.loss_cls: 0.0751, decode.d7.loss_mask: 0.2152, decode.d7.loss_dice: 0.5834, decode.d8.loss_cls: 0.0747, decode.d8.loss_mask: 0.2152, decode.d8.loss_dice: 0.5835, loss: 9.1361 +2022-05-10 11:36:02,200 - mmseg - INFO - Iter [33550/80000] lr: 8.337e-07, eta: 1 day, 1:06:36, time: 1.758, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0754, decode.loss_mask: 0.2095, decode.loss_dice: 0.5944, decode.d0.loss_cls: 0.3206, decode.d0.loss_mask: 0.2179, decode.d0.loss_dice: 0.6259, decode.d1.loss_cls: 0.0976, decode.d1.loss_mask: 0.2108, decode.d1.loss_dice: 0.6039, decode.d2.loss_cls: 0.0884, decode.d2.loss_mask: 0.2093, decode.d2.loss_dice: 0.6013, decode.d3.loss_cls: 0.0757, decode.d3.loss_mask: 0.2092, decode.d3.loss_dice: 0.6006, decode.d4.loss_cls: 0.0821, decode.d4.loss_mask: 0.2090, decode.d4.loss_dice: 0.5970, decode.d5.loss_cls: 0.0798, decode.d5.loss_mask: 0.2100, decode.d5.loss_dice: 0.5973, decode.d6.loss_cls: 0.0798, decode.d6.loss_mask: 0.2098, decode.d6.loss_dice: 0.5937, decode.d7.loss_cls: 0.0787, decode.d7.loss_mask: 0.2097, decode.d7.loss_dice: 0.5930, decode.d8.loss_cls: 0.0809, decode.d8.loss_mask: 0.2094, decode.d8.loss_dice: 0.5917, loss: 9.1625 +2022-05-10 11:37:30,533 - mmseg - INFO - Iter [33600/80000] lr: 8.328e-07, eta: 1 day, 1:04:47, time: 1.768, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0815, decode.loss_mask: 0.2174, decode.loss_dice: 0.5843, decode.d0.loss_cls: 0.3161, decode.d0.loss_mask: 0.2287, decode.d0.loss_dice: 0.6090, decode.d1.loss_cls: 0.0998, decode.d1.loss_mask: 0.2197, decode.d1.loss_dice: 0.5904, decode.d2.loss_cls: 0.0855, decode.d2.loss_mask: 0.2180, decode.d2.loss_dice: 0.5798, decode.d3.loss_cls: 0.0814, decode.d3.loss_mask: 0.2182, decode.d3.loss_dice: 0.5784, decode.d4.loss_cls: 0.0807, decode.d4.loss_mask: 0.2172, decode.d4.loss_dice: 0.5798, decode.d5.loss_cls: 0.0780, decode.d5.loss_mask: 0.2178, decode.d5.loss_dice: 0.5825, decode.d6.loss_cls: 0.0810, decode.d6.loss_mask: 0.2172, decode.d6.loss_dice: 0.5816, decode.d7.loss_cls: 0.0819, decode.d7.loss_mask: 0.2171, decode.d7.loss_dice: 0.5808, decode.d8.loss_cls: 0.0786, decode.d8.loss_mask: 0.2173, decode.d8.loss_dice: 0.5805, loss: 9.1006 +2022-05-10 11:38:57,900 - mmseg - INFO - Iter [33650/80000] lr: 8.319e-07, eta: 1 day, 1:02:56, time: 1.747, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0804, decode.loss_mask: 0.2212, decode.loss_dice: 0.5767, decode.d0.loss_cls: 0.3094, decode.d0.loss_mask: 0.2323, decode.d0.loss_dice: 0.6083, decode.d1.loss_cls: 0.0947, decode.d1.loss_mask: 0.2253, decode.d1.loss_dice: 0.5867, decode.d2.loss_cls: 0.0898, decode.d2.loss_mask: 0.2237, decode.d2.loss_dice: 0.5832, decode.d3.loss_cls: 0.0786, decode.d3.loss_mask: 0.2224, decode.d3.loss_dice: 0.5778, decode.d4.loss_cls: 0.0776, decode.d4.loss_mask: 0.2220, decode.d4.loss_dice: 0.5804, decode.d5.loss_cls: 0.0858, decode.d5.loss_mask: 0.2210, decode.d5.loss_dice: 0.5820, decode.d6.loss_cls: 0.0845, decode.d6.loss_mask: 0.2220, decode.d6.loss_dice: 0.5756, decode.d7.loss_cls: 0.0784, decode.d7.loss_mask: 0.2224, decode.d7.loss_dice: 0.5767, decode.d8.loss_cls: 0.0818, decode.d8.loss_mask: 0.2220, decode.d8.loss_dice: 0.5786, loss: 9.1212 +2022-05-10 11:40:28,943 - mmseg - INFO - Iter [33700/80000] lr: 8.310e-07, eta: 1 day, 1:01:10, time: 1.821, data_time: 0.065, memory: 64699, decode.loss_cls: 0.0846, decode.loss_mask: 0.2204, decode.loss_dice: 0.5911, decode.d0.loss_cls: 0.3269, decode.d0.loss_mask: 0.2305, decode.d0.loss_dice: 0.6299, decode.d1.loss_cls: 0.1020, decode.d1.loss_mask: 0.2218, decode.d1.loss_dice: 0.6059, decode.d2.loss_cls: 0.0989, decode.d2.loss_mask: 0.2212, decode.d2.loss_dice: 0.5982, decode.d3.loss_cls: 0.0917, decode.d3.loss_mask: 0.2209, decode.d3.loss_dice: 0.5955, decode.d4.loss_cls: 0.0890, decode.d4.loss_mask: 0.2207, decode.d4.loss_dice: 0.5944, decode.d5.loss_cls: 0.0966, decode.d5.loss_mask: 0.2204, decode.d5.loss_dice: 0.5958, decode.d6.loss_cls: 0.0880, decode.d6.loss_mask: 0.2208, decode.d6.loss_dice: 0.5959, decode.d7.loss_cls: 0.0900, decode.d7.loss_mask: 0.2205, decode.d7.loss_dice: 0.5913, decode.d8.loss_cls: 0.0904, decode.d8.loss_mask: 0.2201, decode.d8.loss_dice: 0.5919, loss: 9.3651 +2022-05-10 11:41:56,636 - mmseg - INFO - Iter [33750/80000] lr: 8.301e-07, eta: 1 day, 0:59:20, time: 1.754, data_time: 0.020, memory: 64699, decode.loss_cls: 0.0793, decode.loss_mask: 0.2180, decode.loss_dice: 0.5980, decode.d0.loss_cls: 0.3073, decode.d0.loss_mask: 0.2291, decode.d0.loss_dice: 0.6324, decode.d1.loss_cls: 0.1044, decode.d1.loss_mask: 0.2206, decode.d1.loss_dice: 0.6080, decode.d2.loss_cls: 0.0886, decode.d2.loss_mask: 0.2186, decode.d2.loss_dice: 0.5977, decode.d3.loss_cls: 0.0841, decode.d3.loss_mask: 0.2185, decode.d3.loss_dice: 0.5921, decode.d4.loss_cls: 0.0820, decode.d4.loss_mask: 0.2185, decode.d4.loss_dice: 0.5959, decode.d5.loss_cls: 0.0843, decode.d5.loss_mask: 0.2179, decode.d5.loss_dice: 0.5985, decode.d6.loss_cls: 0.0844, decode.d6.loss_mask: 0.2174, decode.d6.loss_dice: 0.5968, decode.d7.loss_cls: 0.0831, decode.d7.loss_mask: 0.2180, decode.d7.loss_dice: 0.5977, decode.d8.loss_cls: 0.0848, decode.d8.loss_mask: 0.2176, decode.d8.loss_dice: 0.5976, loss: 9.2912 +2022-05-10 11:43:24,483 - mmseg - INFO - Iter [33800/80000] lr: 8.292e-07, eta: 1 day, 0:57:30, time: 1.756, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0814, decode.loss_mask: 0.2138, decode.loss_dice: 0.5754, decode.d0.loss_cls: 0.3087, decode.d0.loss_mask: 0.2228, decode.d0.loss_dice: 0.6123, decode.d1.loss_cls: 0.0993, decode.d1.loss_mask: 0.2147, decode.d1.loss_dice: 0.5884, decode.d2.loss_cls: 0.0910, decode.d2.loss_mask: 0.2144, decode.d2.loss_dice: 0.5797, decode.d3.loss_cls: 0.0889, decode.d3.loss_mask: 0.2142, decode.d3.loss_dice: 0.5793, decode.d4.loss_cls: 0.0880, decode.d4.loss_mask: 0.2143, decode.d4.loss_dice: 0.5805, decode.d5.loss_cls: 0.0817, decode.d5.loss_mask: 0.2140, decode.d5.loss_dice: 0.5771, decode.d6.loss_cls: 0.0804, decode.d6.loss_mask: 0.2141, decode.d6.loss_dice: 0.5769, decode.d7.loss_cls: 0.0874, decode.d7.loss_mask: 0.2145, decode.d7.loss_dice: 0.5724, decode.d8.loss_cls: 0.0813, decode.d8.loss_mask: 0.2143, decode.d8.loss_dice: 0.5776, loss: 9.0588 +2022-05-10 11:44:51,871 - mmseg - INFO - Iter [33850/80000] lr: 8.283e-07, eta: 1 day, 0:55:39, time: 1.748, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0888, decode.loss_mask: 0.2178, decode.loss_dice: 0.5917, decode.d0.loss_cls: 0.3120, decode.d0.loss_mask: 0.2247, decode.d0.loss_dice: 0.6274, decode.d1.loss_cls: 0.1078, decode.d1.loss_mask: 0.2179, decode.d1.loss_dice: 0.6061, decode.d2.loss_cls: 0.0909, decode.d2.loss_mask: 0.2173, decode.d2.loss_dice: 0.5971, decode.d3.loss_cls: 0.0867, decode.d3.loss_mask: 0.2165, decode.d3.loss_dice: 0.5931, decode.d4.loss_cls: 0.0914, decode.d4.loss_mask: 0.2169, decode.d4.loss_dice: 0.5937, decode.d5.loss_cls: 0.0898, decode.d5.loss_mask: 0.2178, decode.d5.loss_dice: 0.5938, decode.d6.loss_cls: 0.0871, decode.d6.loss_mask: 0.2188, decode.d6.loss_dice: 0.5906, decode.d7.loss_cls: 0.0837, decode.d7.loss_mask: 0.2175, decode.d7.loss_dice: 0.5882, decode.d8.loss_cls: 0.0902, decode.d8.loss_mask: 0.2175, decode.d8.loss_dice: 0.5909, loss: 9.2837 +2022-05-10 11:46:22,269 - mmseg - INFO - Iter [33900/80000] lr: 8.274e-07, eta: 1 day, 0:53:52, time: 1.808, data_time: 0.065, memory: 64699, decode.loss_cls: 0.0808, decode.loss_mask: 0.2149, decode.loss_dice: 0.5838, decode.d0.loss_cls: 0.3241, decode.d0.loss_mask: 0.2253, decode.d0.loss_dice: 0.6181, decode.d1.loss_cls: 0.1025, decode.d1.loss_mask: 0.2164, decode.d1.loss_dice: 0.5948, decode.d2.loss_cls: 0.0895, decode.d2.loss_mask: 0.2159, decode.d2.loss_dice: 0.5905, decode.d3.loss_cls: 0.0886, decode.d3.loss_mask: 0.2153, decode.d3.loss_dice: 0.5834, decode.d4.loss_cls: 0.0842, decode.d4.loss_mask: 0.2151, decode.d4.loss_dice: 0.5881, decode.d5.loss_cls: 0.0821, decode.d5.loss_mask: 0.2150, decode.d5.loss_dice: 0.5869, decode.d6.loss_cls: 0.0862, decode.d6.loss_mask: 0.2152, decode.d6.loss_dice: 0.5866, decode.d7.loss_cls: 0.0859, decode.d7.loss_mask: 0.2144, decode.d7.loss_dice: 0.5837, decode.d8.loss_cls: 0.0880, decode.d8.loss_mask: 0.2148, decode.d8.loss_dice: 0.5867, loss: 9.1771 +2022-05-10 11:47:49,959 - mmseg - INFO - Iter [33950/80000] lr: 8.265e-07, eta: 1 day, 0:52:02, time: 1.754, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0769, decode.loss_mask: 0.2075, decode.loss_dice: 0.5853, decode.d0.loss_cls: 0.3124, decode.d0.loss_mask: 0.2162, decode.d0.loss_dice: 0.6243, decode.d1.loss_cls: 0.0945, decode.d1.loss_mask: 0.2096, decode.d1.loss_dice: 0.5997, decode.d2.loss_cls: 0.0939, decode.d2.loss_mask: 0.2077, decode.d2.loss_dice: 0.6005, decode.d3.loss_cls: 0.0883, decode.d3.loss_mask: 0.2085, decode.d3.loss_dice: 0.5913, decode.d4.loss_cls: 0.0799, decode.d4.loss_mask: 0.2079, decode.d4.loss_dice: 0.5903, decode.d5.loss_cls: 0.0809, decode.d5.loss_mask: 0.2087, decode.d5.loss_dice: 0.5942, decode.d6.loss_cls: 0.0758, decode.d6.loss_mask: 0.2085, decode.d6.loss_dice: 0.5945, decode.d7.loss_cls: 0.0784, decode.d7.loss_mask: 0.2085, decode.d7.loss_dice: 0.5918, decode.d8.loss_cls: 0.0793, decode.d8.loss_mask: 0.2082, decode.d8.loss_dice: 0.5913, loss: 9.1147 +2022-05-10 11:49:18,234 - mmseg - INFO - Saving checkpoint at 34000 iterations +2022-05-10 11:49:48,228 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 11:49:48,236 - mmseg - INFO - Iter [34000/80000] lr: 8.256e-07, eta: 1 day, 0:50:53, time: 2.363, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0949, decode.loss_mask: 0.2169, decode.loss_dice: 0.5933, decode.d0.loss_cls: 0.3222, decode.d0.loss_mask: 0.2274, decode.d0.loss_dice: 0.6345, decode.d1.loss_cls: 0.1111, decode.d1.loss_mask: 0.2185, decode.d1.loss_dice: 0.6089, decode.d2.loss_cls: 0.1071, decode.d2.loss_mask: 0.2179, decode.d2.loss_dice: 0.6024, decode.d3.loss_cls: 0.1041, decode.d3.loss_mask: 0.2175, decode.d3.loss_dice: 0.5976, decode.d4.loss_cls: 0.0972, decode.d4.loss_mask: 0.2173, decode.d4.loss_dice: 0.5964, decode.d5.loss_cls: 0.1004, decode.d5.loss_mask: 0.2175, decode.d5.loss_dice: 0.5981, decode.d6.loss_cls: 0.0964, decode.d6.loss_mask: 0.2175, decode.d6.loss_dice: 0.5976, decode.d7.loss_cls: 0.0947, decode.d7.loss_mask: 0.2176, decode.d7.loss_dice: 0.6003, decode.d8.loss_cls: 0.0948, decode.d8.loss_mask: 0.2170, decode.d8.loss_dice: 0.5961, loss: 9.4331 +2022-05-10 11:51:44,383 - mmseg - INFO - per class results: +2022-05-10 11:51:44,389 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.58 | 99.17 | +| sidewalk | 88.7 | 94.43 | +| building | 94.3 | 96.72 | +| wall | 67.24 | 77.25 | +| fence | 77.58 | 88.0 | +| pole | 71.32 | 85.28 | +| traffic light | 77.28 | 88.99 | +| traffic sign | 84.24 | 91.09 | +| vegetation | 93.34 | 96.92 | +| terrain | 67.06 | 78.35 | +| sky | 95.89 | 98.25 | +| person | 87.05 | 93.72 | +| rider | 74.14 | 87.92 | +| car | 96.21 | 98.28 | +| truck | 89.52 | 92.6 | +| bus | 93.8 | 96.59 | +| train | 88.22 | 91.17 | +| motorcycle | 77.65 | 87.08 | +| bicycle | 83.01 | 91.59 | ++---------------+-------+-------+ +2022-05-10 11:51:44,389 - mmseg - INFO - Summary: +2022-05-10 11:51:44,390 - mmseg - INFO - ++------+-------+-------+ +| aAcc | mIoU | mAcc | ++------+-------+-------+ +| 97.0 | 84.48 | 91.23 | ++------+-------+-------+ +2022-05-10 11:51:44,394 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 11:51:44,394 - mmseg - INFO - Iter(val) [32] aAcc: 0.9700, mIoU: 0.8448, mAcc: 0.9123, IoU.road: 0.9858, IoU.sidewalk: 0.8870, IoU.building: 0.9430, IoU.wall: 0.6724, IoU.fence: 0.7758, IoU.pole: 0.7132, IoU.traffic light: 0.7728, IoU.traffic sign: 0.8424, IoU.vegetation: 0.9334, IoU.terrain: 0.6706, IoU.sky: 0.9589, IoU.person: 0.8705, IoU.rider: 0.7414, IoU.car: 0.9621, IoU.truck: 0.8952, IoU.bus: 0.9380, IoU.train: 0.8822, IoU.motorcycle: 0.7765, IoU.bicycle: 0.8301, Acc.road: 0.9917, Acc.sidewalk: 0.9443, Acc.building: 0.9672, Acc.wall: 0.7725, Acc.fence: 0.8800, Acc.pole: 0.8528, Acc.traffic light: 0.8899, Acc.traffic sign: 0.9109, Acc.vegetation: 0.9692, Acc.terrain: 0.7835, Acc.sky: 0.9825, Acc.person: 0.9372, Acc.rider: 0.8792, Acc.car: 0.9828, Acc.truck: 0.9260, Acc.bus: 0.9659, Acc.train: 0.9117, Acc.motorcycle: 0.8708, Acc.bicycle: 0.9159 +2022-05-10 11:53:15,796 - mmseg - INFO - Iter [34050/80000] lr: 8.247e-07, eta: 1 day, 0:51:45, time: 4.153, data_time: 2.391, memory: 64699, decode.loss_cls: 0.0727, decode.loss_mask: 0.2084, decode.loss_dice: 0.5840, decode.d0.loss_cls: 0.3214, decode.d0.loss_mask: 0.2172, decode.d0.loss_dice: 0.6203, decode.d1.loss_cls: 0.0900, decode.d1.loss_mask: 0.2106, decode.d1.loss_dice: 0.5981, decode.d2.loss_cls: 0.0914, decode.d2.loss_mask: 0.2095, decode.d2.loss_dice: 0.5914, decode.d3.loss_cls: 0.0798, decode.d3.loss_mask: 0.2086, decode.d3.loss_dice: 0.5823, decode.d4.loss_cls: 0.0818, decode.d4.loss_mask: 0.2087, decode.d4.loss_dice: 0.5866, decode.d5.loss_cls: 0.0862, decode.d5.loss_mask: 0.2083, decode.d5.loss_dice: 0.5871, decode.d6.loss_cls: 0.0794, decode.d6.loss_mask: 0.2085, decode.d6.loss_dice: 0.5846, decode.d7.loss_cls: 0.0788, decode.d7.loss_mask: 0.2091, decode.d7.loss_dice: 0.5840, decode.d8.loss_cls: 0.0795, decode.d8.loss_mask: 0.2087, decode.d8.loss_dice: 0.5859, loss: 9.0630 +2022-05-10 11:54:44,480 - mmseg - INFO - Iter [34100/80000] lr: 8.238e-07, eta: 1 day, 0:49:56, time: 1.774, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0846, decode.loss_mask: 0.2164, decode.loss_dice: 0.5948, decode.d0.loss_cls: 0.3100, decode.d0.loss_mask: 0.2260, decode.d0.loss_dice: 0.6315, decode.d1.loss_cls: 0.1086, decode.d1.loss_mask: 0.2191, decode.d1.loss_dice: 0.6066, decode.d2.loss_cls: 0.0941, decode.d2.loss_mask: 0.2185, decode.d2.loss_dice: 0.6040, decode.d3.loss_cls: 0.0877, decode.d3.loss_mask: 0.2173, decode.d3.loss_dice: 0.5961, decode.d4.loss_cls: 0.0917, decode.d4.loss_mask: 0.2171, decode.d4.loss_dice: 0.5977, decode.d5.loss_cls: 0.0874, decode.d5.loss_mask: 0.2166, decode.d5.loss_dice: 0.5946, decode.d6.loss_cls: 0.0862, decode.d6.loss_mask: 0.2166, decode.d6.loss_dice: 0.5970, decode.d7.loss_cls: 0.0822, decode.d7.loss_mask: 0.2163, decode.d7.loss_dice: 0.5984, decode.d8.loss_cls: 0.0888, decode.d8.loss_mask: 0.2167, decode.d8.loss_dice: 0.5928, loss: 9.3155 +2022-05-10 11:56:13,931 - mmseg - INFO - Iter [34150/80000] lr: 8.229e-07, eta: 1 day, 0:48:08, time: 1.788, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0796, decode.loss_mask: 0.2122, decode.loss_dice: 0.5838, decode.d0.loss_cls: 0.3079, decode.d0.loss_mask: 0.2206, decode.d0.loss_dice: 0.6141, decode.d1.loss_cls: 0.0913, decode.d1.loss_mask: 0.2136, decode.d1.loss_dice: 0.5894, decode.d2.loss_cls: 0.0821, decode.d2.loss_mask: 0.2131, decode.d2.loss_dice: 0.5889, decode.d3.loss_cls: 0.0829, decode.d3.loss_mask: 0.2129, decode.d3.loss_dice: 0.5853, decode.d4.loss_cls: 0.0857, decode.d4.loss_mask: 0.2124, decode.d4.loss_dice: 0.5818, decode.d5.loss_cls: 0.0791, decode.d5.loss_mask: 0.2127, decode.d5.loss_dice: 0.5830, decode.d6.loss_cls: 0.0814, decode.d6.loss_mask: 0.2121, decode.d6.loss_dice: 0.5869, decode.d7.loss_cls: 0.0794, decode.d7.loss_mask: 0.2120, decode.d7.loss_dice: 0.5846, decode.d8.loss_cls: 0.0841, decode.d8.loss_mask: 0.2126, decode.d8.loss_dice: 0.5810, loss: 9.0666 +2022-05-10 11:57:42,343 - mmseg - INFO - Iter [34200/80000] lr: 8.220e-07, eta: 1 day, 0:46:19, time: 1.769, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0781, decode.loss_mask: 0.2231, decode.loss_dice: 0.5783, decode.d0.loss_cls: 0.3089, decode.d0.loss_mask: 0.2325, decode.d0.loss_dice: 0.6127, decode.d1.loss_cls: 0.1029, decode.d1.loss_mask: 0.2237, decode.d1.loss_dice: 0.5912, decode.d2.loss_cls: 0.0805, decode.d2.loss_mask: 0.2237, decode.d2.loss_dice: 0.5873, decode.d3.loss_cls: 0.0783, decode.d3.loss_mask: 0.2228, decode.d3.loss_dice: 0.5845, decode.d4.loss_cls: 0.0781, decode.d4.loss_mask: 0.2229, decode.d4.loss_dice: 0.5816, decode.d5.loss_cls: 0.0814, decode.d5.loss_mask: 0.2232, decode.d5.loss_dice: 0.5850, decode.d6.loss_cls: 0.0798, decode.d6.loss_mask: 0.2235, decode.d6.loss_dice: 0.5837, decode.d7.loss_cls: 0.0772, decode.d7.loss_mask: 0.2230, decode.d7.loss_dice: 0.5854, decode.d8.loss_cls: 0.0771, decode.d8.loss_mask: 0.2236, decode.d8.loss_dice: 0.5865, loss: 9.1607 +2022-05-10 11:59:12,400 - mmseg - INFO - Iter [34250/80000] lr: 8.211e-07, eta: 1 day, 0:44:32, time: 1.801, data_time: 0.065, memory: 64699, decode.loss_cls: 0.0667, decode.loss_mask: 0.2110, decode.loss_dice: 0.5955, decode.d0.loss_cls: 0.3022, decode.d0.loss_mask: 0.2210, decode.d0.loss_dice: 0.6280, decode.d1.loss_cls: 0.0831, decode.d1.loss_mask: 0.2134, decode.d1.loss_dice: 0.6020, decode.d2.loss_cls: 0.0780, decode.d2.loss_mask: 0.2123, decode.d2.loss_dice: 0.6031, decode.d3.loss_cls: 0.0663, decode.d3.loss_mask: 0.2112, decode.d3.loss_dice: 0.6000, decode.d4.loss_cls: 0.0714, decode.d4.loss_mask: 0.2109, decode.d4.loss_dice: 0.5949, decode.d5.loss_cls: 0.0684, decode.d5.loss_mask: 0.2107, decode.d5.loss_dice: 0.5953, decode.d6.loss_cls: 0.0607, decode.d6.loss_mask: 0.2108, decode.d6.loss_dice: 0.5945, decode.d7.loss_cls: 0.0653, decode.d7.loss_mask: 0.2107, decode.d7.loss_dice: 0.5957, decode.d8.loss_cls: 0.0670, decode.d8.loss_mask: 0.2111, decode.d8.loss_dice: 0.5940, loss: 9.0550 +2022-05-10 12:00:41,930 - mmseg - INFO - Iter [34300/80000] lr: 8.202e-07, eta: 1 day, 0:42:44, time: 1.791, data_time: 0.020, memory: 64699, decode.loss_cls: 0.0834, decode.loss_mask: 0.2105, decode.loss_dice: 0.5829, decode.d0.loss_cls: 0.3132, decode.d0.loss_mask: 0.2205, decode.d0.loss_dice: 0.6220, decode.d1.loss_cls: 0.1003, decode.d1.loss_mask: 0.2145, decode.d1.loss_dice: 0.5960, decode.d2.loss_cls: 0.0924, decode.d2.loss_mask: 0.2133, decode.d2.loss_dice: 0.5925, decode.d3.loss_cls: 0.0814, decode.d3.loss_mask: 0.2118, decode.d3.loss_dice: 0.5867, decode.d4.loss_cls: 0.0852, decode.d4.loss_mask: 0.2121, decode.d4.loss_dice: 0.5841, decode.d5.loss_cls: 0.0819, decode.d5.loss_mask: 0.2127, decode.d5.loss_dice: 0.5858, decode.d6.loss_cls: 0.0826, decode.d6.loss_mask: 0.2109, decode.d6.loss_dice: 0.5825, decode.d7.loss_cls: 0.0829, decode.d7.loss_mask: 0.2111, decode.d7.loss_dice: 0.5853, decode.d8.loss_cls: 0.0827, decode.d8.loss_mask: 0.2119, decode.d8.loss_dice: 0.5856, loss: 9.1183 +2022-05-10 12:02:09,531 - mmseg - INFO - Iter [34350/80000] lr: 8.193e-07, eta: 1 day, 0:40:54, time: 1.752, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0720, decode.loss_mask: 0.2162, decode.loss_dice: 0.5829, decode.d0.loss_cls: 0.3202, decode.d0.loss_mask: 0.2251, decode.d0.loss_dice: 0.6159, decode.d1.loss_cls: 0.0933, decode.d1.loss_mask: 0.2184, decode.d1.loss_dice: 0.5905, decode.d2.loss_cls: 0.0885, decode.d2.loss_mask: 0.2171, decode.d2.loss_dice: 0.5897, decode.d3.loss_cls: 0.0841, decode.d3.loss_mask: 0.2166, decode.d3.loss_dice: 0.5851, decode.d4.loss_cls: 0.0815, decode.d4.loss_mask: 0.2174, decode.d4.loss_dice: 0.5877, decode.d5.loss_cls: 0.0815, decode.d5.loss_mask: 0.2176, decode.d5.loss_dice: 0.5845, decode.d6.loss_cls: 0.0820, decode.d6.loss_mask: 0.2172, decode.d6.loss_dice: 0.5838, decode.d7.loss_cls: 0.0767, decode.d7.loss_mask: 0.2171, decode.d7.loss_dice: 0.5885, decode.d8.loss_cls: 0.0759, decode.d8.loss_mask: 0.2167, decode.d8.loss_dice: 0.5888, loss: 9.1328 +2022-05-10 12:03:37,640 - mmseg - INFO - Iter [34400/80000] lr: 8.184e-07, eta: 1 day, 0:39:04, time: 1.762, data_time: 0.020, memory: 64699, decode.loss_cls: 0.0802, decode.loss_mask: 0.2191, decode.loss_dice: 0.5832, decode.d0.loss_cls: 0.3134, decode.d0.loss_mask: 0.2298, decode.d0.loss_dice: 0.6145, decode.d1.loss_cls: 0.0927, decode.d1.loss_mask: 0.2216, decode.d1.loss_dice: 0.5944, decode.d2.loss_cls: 0.0903, decode.d2.loss_mask: 0.2197, decode.d2.loss_dice: 0.5890, decode.d3.loss_cls: 0.0801, decode.d3.loss_mask: 0.2198, decode.d3.loss_dice: 0.5826, decode.d4.loss_cls: 0.0746, decode.d4.loss_mask: 0.2203, decode.d4.loss_dice: 0.5826, decode.d5.loss_cls: 0.0809, decode.d5.loss_mask: 0.2198, decode.d5.loss_dice: 0.5891, decode.d6.loss_cls: 0.0863, decode.d6.loss_mask: 0.2198, decode.d6.loss_dice: 0.5810, decode.d7.loss_cls: 0.0812, decode.d7.loss_mask: 0.2196, decode.d7.loss_dice: 0.5838, decode.d8.loss_cls: 0.0820, decode.d8.loss_mask: 0.2193, decode.d8.loss_dice: 0.5842, loss: 9.1550 +2022-05-10 12:05:07,939 - mmseg - INFO - Iter [34450/80000] lr: 8.175e-07, eta: 1 day, 0:37:17, time: 1.806, data_time: 0.065, memory: 64699, decode.loss_cls: 0.0676, decode.loss_mask: 0.2045, decode.loss_dice: 0.5807, decode.d0.loss_cls: 0.3222, decode.d0.loss_mask: 0.2133, decode.d0.loss_dice: 0.6044, decode.d1.loss_cls: 0.0863, decode.d1.loss_mask: 0.2061, decode.d1.loss_dice: 0.5878, decode.d2.loss_cls: 0.0806, decode.d2.loss_mask: 0.2046, decode.d2.loss_dice: 0.5869, decode.d3.loss_cls: 0.0698, decode.d3.loss_mask: 0.2046, decode.d3.loss_dice: 0.5762, decode.d4.loss_cls: 0.0755, decode.d4.loss_mask: 0.2046, decode.d4.loss_dice: 0.5815, decode.d5.loss_cls: 0.0791, decode.d5.loss_mask: 0.2046, decode.d5.loss_dice: 0.5806, decode.d6.loss_cls: 0.0726, decode.d6.loss_mask: 0.2048, decode.d6.loss_dice: 0.5787, decode.d7.loss_cls: 0.0782, decode.d7.loss_mask: 0.2050, decode.d7.loss_dice: 0.5802, decode.d8.loss_cls: 0.0696, decode.d8.loss_mask: 0.2045, decode.d8.loss_dice: 0.5784, loss: 8.8934 +2022-05-10 12:06:36,097 - mmseg - INFO - Iter [34500/80000] lr: 8.166e-07, eta: 1 day, 0:35:28, time: 1.760, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0808, decode.loss_mask: 0.2119, decode.loss_dice: 0.5786, decode.d0.loss_cls: 0.3170, decode.d0.loss_mask: 0.2220, decode.d0.loss_dice: 0.6104, decode.d1.loss_cls: 0.1008, decode.d1.loss_mask: 0.2130, decode.d1.loss_dice: 0.5877, decode.d2.loss_cls: 0.0966, decode.d2.loss_mask: 0.2122, decode.d2.loss_dice: 0.5840, decode.d3.loss_cls: 0.0896, decode.d3.loss_mask: 0.2126, decode.d3.loss_dice: 0.5762, decode.d4.loss_cls: 0.0813, decode.d4.loss_mask: 0.2122, decode.d4.loss_dice: 0.5800, decode.d5.loss_cls: 0.0848, decode.d5.loss_mask: 0.2123, decode.d5.loss_dice: 0.5782, decode.d6.loss_cls: 0.0826, decode.d6.loss_mask: 0.2120, decode.d6.loss_dice: 0.5813, decode.d7.loss_cls: 0.0790, decode.d7.loss_mask: 0.2121, decode.d7.loss_dice: 0.5788, decode.d8.loss_cls: 0.0867, decode.d8.loss_mask: 0.2122, decode.d8.loss_dice: 0.5781, loss: 9.0650 +2022-05-10 12:08:03,263 - mmseg - INFO - Iter [34550/80000] lr: 8.157e-07, eta: 1 day, 0:33:37, time: 1.746, data_time: 0.021, memory: 64699, decode.loss_cls: 0.0767, decode.loss_mask: 0.2105, decode.loss_dice: 0.5763, decode.d0.loss_cls: 0.3237, decode.d0.loss_mask: 0.2211, decode.d0.loss_dice: 0.6116, decode.d1.loss_cls: 0.1001, decode.d1.loss_mask: 0.2137, decode.d1.loss_dice: 0.5903, decode.d2.loss_cls: 0.0885, decode.d2.loss_mask: 0.2123, decode.d2.loss_dice: 0.5869, decode.d3.loss_cls: 0.0832, decode.d3.loss_mask: 0.2125, decode.d3.loss_dice: 0.5831, decode.d4.loss_cls: 0.0891, decode.d4.loss_mask: 0.2115, decode.d4.loss_dice: 0.5835, decode.d5.loss_cls: 0.0821, decode.d5.loss_mask: 0.2107, decode.d5.loss_dice: 0.5812, decode.d6.loss_cls: 0.0778, decode.d6.loss_mask: 0.2113, decode.d6.loss_dice: 0.5766, decode.d7.loss_cls: 0.0838, decode.d7.loss_mask: 0.2106, decode.d7.loss_dice: 0.5798, decode.d8.loss_cls: 0.0801, decode.d8.loss_mask: 0.2107, decode.d8.loss_dice: 0.5781, loss: 9.0574 +2022-05-10 12:09:34,200 - mmseg - INFO - Iter [34600/80000] lr: 8.148e-07, eta: 1 day, 0:31:52, time: 1.819, data_time: 0.067, memory: 64699, decode.loss_cls: 0.0844, decode.loss_mask: 0.2126, decode.loss_dice: 0.5946, decode.d0.loss_cls: 0.3160, decode.d0.loss_mask: 0.2223, decode.d0.loss_dice: 0.6245, decode.d1.loss_cls: 0.1005, decode.d1.loss_mask: 0.2133, decode.d1.loss_dice: 0.6046, decode.d2.loss_cls: 0.0976, decode.d2.loss_mask: 0.2127, decode.d2.loss_dice: 0.5973, decode.d3.loss_cls: 0.0859, decode.d3.loss_mask: 0.2123, decode.d3.loss_dice: 0.5934, decode.d4.loss_cls: 0.0884, decode.d4.loss_mask: 0.2126, decode.d4.loss_dice: 0.5959, decode.d5.loss_cls: 0.0859, decode.d5.loss_mask: 0.2133, decode.d5.loss_dice: 0.5969, decode.d6.loss_cls: 0.0790, decode.d6.loss_mask: 0.2128, decode.d6.loss_dice: 0.5926, decode.d7.loss_cls: 0.0887, decode.d7.loss_mask: 0.2123, decode.d7.loss_dice: 0.5931, decode.d8.loss_cls: 0.0800, decode.d8.loss_mask: 0.2126, decode.d8.loss_dice: 0.5964, loss: 9.2326 +2022-05-10 12:11:02,935 - mmseg - INFO - Iter [34650/80000] lr: 8.139e-07, eta: 1 day, 0:30:03, time: 1.775, data_time: 0.020, memory: 64699, decode.loss_cls: 0.0662, decode.loss_mask: 0.2106, decode.loss_dice: 0.5751, decode.d0.loss_cls: 0.2982, decode.d0.loss_mask: 0.2194, decode.d0.loss_dice: 0.6027, decode.d1.loss_cls: 0.0846, decode.d1.loss_mask: 0.2126, decode.d1.loss_dice: 0.5847, decode.d2.loss_cls: 0.0754, decode.d2.loss_mask: 0.2109, decode.d2.loss_dice: 0.5781, decode.d3.loss_cls: 0.0716, decode.d3.loss_mask: 0.2114, decode.d3.loss_dice: 0.5722, decode.d4.loss_cls: 0.0780, decode.d4.loss_mask: 0.2104, decode.d4.loss_dice: 0.5693, decode.d5.loss_cls: 0.0769, decode.d5.loss_mask: 0.2109, decode.d5.loss_dice: 0.5719, decode.d6.loss_cls: 0.0721, decode.d6.loss_mask: 0.2106, decode.d6.loss_dice: 0.5712, decode.d7.loss_cls: 0.0708, decode.d7.loss_mask: 0.2106, decode.d7.loss_dice: 0.5717, decode.d8.loss_cls: 0.0679, decode.d8.loss_mask: 0.2108, decode.d8.loss_dice: 0.5720, loss: 8.8489 +2022-05-10 12:12:31,919 - mmseg - INFO - Iter [34700/80000] lr: 8.130e-07, eta: 1 day, 0:28:15, time: 1.780, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0826, decode.loss_mask: 0.2136, decode.loss_dice: 0.5896, decode.d0.loss_cls: 0.3174, decode.d0.loss_mask: 0.2240, decode.d0.loss_dice: 0.6192, decode.d1.loss_cls: 0.0979, decode.d1.loss_mask: 0.2150, decode.d1.loss_dice: 0.5979, decode.d2.loss_cls: 0.0778, decode.d2.loss_mask: 0.2147, decode.d2.loss_dice: 0.5926, decode.d3.loss_cls: 0.0863, decode.d3.loss_mask: 0.2139, decode.d3.loss_dice: 0.5894, decode.d4.loss_cls: 0.0816, decode.d4.loss_mask: 0.2141, decode.d4.loss_dice: 0.5920, decode.d5.loss_cls: 0.0837, decode.d5.loss_mask: 0.2141, decode.d5.loss_dice: 0.5912, decode.d6.loss_cls: 0.0795, decode.d6.loss_mask: 0.2142, decode.d6.loss_dice: 0.5879, decode.d7.loss_cls: 0.0843, decode.d7.loss_mask: 0.2139, decode.d7.loss_dice: 0.5880, decode.d8.loss_cls: 0.0860, decode.d8.loss_mask: 0.2142, decode.d8.loss_dice: 0.5871, loss: 9.1637 +2022-05-10 12:14:00,479 - mmseg - INFO - Iter [34750/80000] lr: 8.121e-07, eta: 1 day, 0:26:27, time: 1.771, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0825, decode.loss_mask: 0.2227, decode.loss_dice: 0.5940, decode.d0.loss_cls: 0.3164, decode.d0.loss_mask: 0.2330, decode.d0.loss_dice: 0.6241, decode.d1.loss_cls: 0.1052, decode.d1.loss_mask: 0.2237, decode.d1.loss_dice: 0.6044, decode.d2.loss_cls: 0.0962, decode.d2.loss_mask: 0.2240, decode.d2.loss_dice: 0.5995, decode.d3.loss_cls: 0.0886, decode.d3.loss_mask: 0.2230, decode.d3.loss_dice: 0.5948, decode.d4.loss_cls: 0.0907, decode.d4.loss_mask: 0.2233, decode.d4.loss_dice: 0.5943, decode.d5.loss_cls: 0.0869, decode.d5.loss_mask: 0.2235, decode.d5.loss_dice: 0.5981, decode.d6.loss_cls: 0.0854, decode.d6.loss_mask: 0.2230, decode.d6.loss_dice: 0.5920, decode.d7.loss_cls: 0.0880, decode.d7.loss_mask: 0.2229, decode.d7.loss_dice: 0.5915, decode.d8.loss_cls: 0.0832, decode.d8.loss_mask: 0.2225, decode.d8.loss_dice: 0.5924, loss: 9.3497 +2022-05-10 12:15:31,099 - mmseg - INFO - Iter [34800/80000] lr: 8.112e-07, eta: 1 day, 0:24:41, time: 1.812, data_time: 0.068, memory: 64699, decode.loss_cls: 0.0899, decode.loss_mask: 0.2102, decode.loss_dice: 0.5965, decode.d0.loss_cls: 0.3178, decode.d0.loss_mask: 0.2191, decode.d0.loss_dice: 0.6271, decode.d1.loss_cls: 0.1033, decode.d1.loss_mask: 0.2121, decode.d1.loss_dice: 0.6043, decode.d2.loss_cls: 0.0987, decode.d2.loss_mask: 0.2113, decode.d2.loss_dice: 0.5963, decode.d3.loss_cls: 0.0932, decode.d3.loss_mask: 0.2110, decode.d3.loss_dice: 0.5972, decode.d4.loss_cls: 0.0943, decode.d4.loss_mask: 0.2108, decode.d4.loss_dice: 0.5968, decode.d5.loss_cls: 0.0861, decode.d5.loss_mask: 0.2104, decode.d5.loss_dice: 0.5935, decode.d6.loss_cls: 0.0864, decode.d6.loss_mask: 0.2100, decode.d6.loss_dice: 0.5931, decode.d7.loss_cls: 0.0899, decode.d7.loss_mask: 0.2095, decode.d7.loss_dice: 0.6016, decode.d8.loss_cls: 0.0879, decode.d8.loss_mask: 0.2099, decode.d8.loss_dice: 0.5968, loss: 9.2652 +2022-05-10 12:16:58,885 - mmseg - INFO - Iter [34850/80000] lr: 8.103e-07, eta: 1 day, 0:22:52, time: 1.756, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0707, decode.loss_mask: 0.2117, decode.loss_dice: 0.5987, decode.d0.loss_cls: 0.3002, decode.d0.loss_mask: 0.2233, decode.d0.loss_dice: 0.6330, decode.d1.loss_cls: 0.0908, decode.d1.loss_mask: 0.2131, decode.d1.loss_dice: 0.6102, decode.d2.loss_cls: 0.0779, decode.d2.loss_mask: 0.2129, decode.d2.loss_dice: 0.6083, decode.d3.loss_cls: 0.0765, decode.d3.loss_mask: 0.2123, decode.d3.loss_dice: 0.6008, decode.d4.loss_cls: 0.0778, decode.d4.loss_mask: 0.2119, decode.d4.loss_dice: 0.5972, decode.d5.loss_cls: 0.0751, decode.d5.loss_mask: 0.2114, decode.d5.loss_dice: 0.5995, decode.d6.loss_cls: 0.0746, decode.d6.loss_mask: 0.2117, decode.d6.loss_dice: 0.5983, decode.d7.loss_cls: 0.0785, decode.d7.loss_mask: 0.2121, decode.d7.loss_dice: 0.5994, decode.d8.loss_cls: 0.0740, decode.d8.loss_mask: 0.2116, decode.d8.loss_dice: 0.5981, loss: 9.1714 +2022-05-10 12:18:26,737 - mmseg - INFO - Iter [34900/80000] lr: 8.094e-07, eta: 1 day, 0:21:02, time: 1.757, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0791, decode.loss_mask: 0.2108, decode.loss_dice: 0.5883, decode.d0.loss_cls: 0.3167, decode.d0.loss_mask: 0.2209, decode.d0.loss_dice: 0.6138, decode.d1.loss_cls: 0.1060, decode.d1.loss_mask: 0.2132, decode.d1.loss_dice: 0.5967, decode.d2.loss_cls: 0.0886, decode.d2.loss_mask: 0.2118, decode.d2.loss_dice: 0.5930, decode.d3.loss_cls: 0.0769, decode.d3.loss_mask: 0.2107, decode.d3.loss_dice: 0.5905, decode.d4.loss_cls: 0.0824, decode.d4.loss_mask: 0.2108, decode.d4.loss_dice: 0.5891, decode.d5.loss_cls: 0.0843, decode.d5.loss_mask: 0.2106, decode.d5.loss_dice: 0.5870, decode.d6.loss_cls: 0.0744, decode.d6.loss_mask: 0.2105, decode.d6.loss_dice: 0.5849, decode.d7.loss_cls: 0.0811, decode.d7.loss_mask: 0.2106, decode.d7.loss_dice: 0.5883, decode.d8.loss_cls: 0.0829, decode.d8.loss_mask: 0.2103, decode.d8.loss_dice: 0.5894, loss: 9.1137 +2022-05-10 12:19:54,701 - mmseg - INFO - Iter [34950/80000] lr: 8.086e-07, eta: 1 day, 0:19:13, time: 1.760, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0678, decode.loss_mask: 0.2108, decode.loss_dice: 0.5644, decode.d0.loss_cls: 0.3074, decode.d0.loss_mask: 0.2213, decode.d0.loss_dice: 0.5932, decode.d1.loss_cls: 0.0896, decode.d1.loss_mask: 0.2126, decode.d1.loss_dice: 0.5763, decode.d2.loss_cls: 0.0753, decode.d2.loss_mask: 0.2117, decode.d2.loss_dice: 0.5722, decode.d3.loss_cls: 0.0641, decode.d3.loss_mask: 0.2113, decode.d3.loss_dice: 0.5668, decode.d4.loss_cls: 0.0705, decode.d4.loss_mask: 0.2112, decode.d4.loss_dice: 0.5691, decode.d5.loss_cls: 0.0643, decode.d5.loss_mask: 0.2113, decode.d5.loss_dice: 0.5678, decode.d6.loss_cls: 0.0659, decode.d6.loss_mask: 0.2106, decode.d6.loss_dice: 0.5674, decode.d7.loss_cls: 0.0688, decode.d7.loss_mask: 0.2113, decode.d7.loss_dice: 0.5668, decode.d8.loss_cls: 0.0642, decode.d8.loss_mask: 0.2105, decode.d8.loss_dice: 0.5709, loss: 8.7754 +2022-05-10 12:21:23,812 - mmseg - INFO - Saving checkpoint at 35000 iterations +2022-05-10 12:21:57,436 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 12:21:57,445 - mmseg - INFO - Iter [35000/80000] lr: 8.077e-07, eta: 1 day, 0:18:09, time: 2.452, data_time: 0.066, memory: 64699, decode.loss_cls: 0.0802, decode.loss_mask: 0.2172, decode.loss_dice: 0.5862, decode.d0.loss_cls: 0.3396, decode.d0.loss_mask: 0.2249, decode.d0.loss_dice: 0.6162, decode.d1.loss_cls: 0.0920, decode.d1.loss_mask: 0.2181, decode.d1.loss_dice: 0.5968, decode.d2.loss_cls: 0.0827, decode.d2.loss_mask: 0.2171, decode.d2.loss_dice: 0.5949, decode.d3.loss_cls: 0.0788, decode.d3.loss_mask: 0.2165, decode.d3.loss_dice: 0.5909, decode.d4.loss_cls: 0.0815, decode.d4.loss_mask: 0.2166, decode.d4.loss_dice: 0.5895, decode.d5.loss_cls: 0.0816, decode.d5.loss_mask: 0.2171, decode.d5.loss_dice: 0.5910, decode.d6.loss_cls: 0.0809, decode.d6.loss_mask: 0.2162, decode.d6.loss_dice: 0.5887, decode.d7.loss_cls: 0.0811, decode.d7.loss_mask: 0.2164, decode.d7.loss_dice: 0.5889, decode.d8.loss_cls: 0.0788, decode.d8.loss_mask: 0.2166, decode.d8.loss_dice: 0.5934, loss: 9.1908 +2022-05-10 12:23:53,522 - mmseg - INFO - per class results: +2022-05-10 12:23:53,526 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.57 | 99.25 | +| sidewalk | 88.54 | 93.64 | +| building | 94.38 | 96.93 | +| wall | 65.19 | 81.59 | +| fence | 74.43 | 81.67 | +| pole | 71.52 | 83.71 | +| traffic light | 77.3 | 88.21 | +| traffic sign | 84.12 | 90.78 | +| vegetation | 93.43 | 96.99 | +| terrain | 69.66 | 78.12 | +| sky | 95.75 | 98.52 | +| person | 86.92 | 93.78 | +| rider | 74.84 | 85.3 | +| car | 96.23 | 98.29 | +| truck | 91.57 | 94.53 | +| bus | 93.81 | 96.39 | +| train | 88.15 | 91.24 | +| motorcycle | 77.98 | 87.95 | +| bicycle | 83.12 | 91.4 | ++---------------+-------+-------+ +2022-05-10 12:23:53,526 - mmseg - INFO - Summary: +2022-05-10 12:23:53,527 - mmseg - INFO - ++------+------+-------+ +| aAcc | mIoU | mAcc | ++------+------+-------+ +| 97.0 | 84.5 | 90.96 | ++------+------+-------+ +2022-05-10 12:23:53,530 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 12:23:53,531 - mmseg - INFO - Iter(val) [32] aAcc: 0.9700, mIoU: 0.8450, mAcc: 0.9096, IoU.road: 0.9857, IoU.sidewalk: 0.8854, IoU.building: 0.9438, IoU.wall: 0.6519, IoU.fence: 0.7443, IoU.pole: 0.7152, IoU.traffic light: 0.7730, IoU.traffic sign: 0.8412, IoU.vegetation: 0.9343, IoU.terrain: 0.6966, IoU.sky: 0.9575, IoU.person: 0.8692, IoU.rider: 0.7484, IoU.car: 0.9623, IoU.truck: 0.9157, IoU.bus: 0.9381, IoU.train: 0.8815, IoU.motorcycle: 0.7798, IoU.bicycle: 0.8312, Acc.road: 0.9925, Acc.sidewalk: 0.9364, Acc.building: 0.9693, Acc.wall: 0.8159, Acc.fence: 0.8167, Acc.pole: 0.8371, Acc.traffic light: 0.8821, Acc.traffic sign: 0.9078, Acc.vegetation: 0.9699, Acc.terrain: 0.7812, Acc.sky: 0.9852, Acc.person: 0.9378, Acc.rider: 0.8530, Acc.car: 0.9829, Acc.truck: 0.9453, Acc.bus: 0.9639, Acc.train: 0.9124, Acc.motorcycle: 0.8795, Acc.bicycle: 0.9140 +2022-05-10 12:25:21,813 - mmseg - INFO - Iter [35050/80000] lr: 8.068e-07, eta: 1 day, 0:18:49, time: 4.090, data_time: 2.343, memory: 64699, decode.loss_cls: 0.0639, decode.loss_mask: 0.2064, decode.loss_dice: 0.5741, decode.d0.loss_cls: 0.3138, decode.d0.loss_mask: 0.2152, decode.d0.loss_dice: 0.6074, decode.d1.loss_cls: 0.0861, decode.d1.loss_mask: 0.2089, decode.d1.loss_dice: 0.5868, decode.d2.loss_cls: 0.0754, decode.d2.loss_mask: 0.2076, decode.d2.loss_dice: 0.5808, decode.d3.loss_cls: 0.0700, decode.d3.loss_mask: 0.2070, decode.d3.loss_dice: 0.5771, decode.d4.loss_cls: 0.0773, decode.d4.loss_mask: 0.2071, decode.d4.loss_dice: 0.5803, decode.d5.loss_cls: 0.0729, decode.d5.loss_mask: 0.2066, decode.d5.loss_dice: 0.5777, decode.d6.loss_cls: 0.0684, decode.d6.loss_mask: 0.2076, decode.d6.loss_dice: 0.5741, decode.d7.loss_cls: 0.0679, decode.d7.loss_mask: 0.2064, decode.d7.loss_dice: 0.5728, decode.d8.loss_cls: 0.0732, decode.d8.loss_mask: 0.2057, decode.d8.loss_dice: 0.5748, loss: 8.8534 +2022-05-10 12:26:51,234 - mmseg - INFO - Iter [35100/80000] lr: 8.059e-07, eta: 1 day, 0:17:02, time: 1.788, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0732, decode.loss_mask: 0.2150, decode.loss_dice: 0.5971, decode.d0.loss_cls: 0.3063, decode.d0.loss_mask: 0.2249, decode.d0.loss_dice: 0.6312, decode.d1.loss_cls: 0.0919, decode.d1.loss_mask: 0.2170, decode.d1.loss_dice: 0.6092, decode.d2.loss_cls: 0.0879, decode.d2.loss_mask: 0.2160, decode.d2.loss_dice: 0.6076, decode.d3.loss_cls: 0.0777, decode.d3.loss_mask: 0.2155, decode.d3.loss_dice: 0.6056, decode.d4.loss_cls: 0.0824, decode.d4.loss_mask: 0.2150, decode.d4.loss_dice: 0.6031, decode.d5.loss_cls: 0.0827, decode.d5.loss_mask: 0.2152, decode.d5.loss_dice: 0.6009, decode.d6.loss_cls: 0.0778, decode.d6.loss_mask: 0.2153, decode.d6.loss_dice: 0.6032, decode.d7.loss_cls: 0.0769, decode.d7.loss_mask: 0.2154, decode.d7.loss_dice: 0.5989, decode.d8.loss_cls: 0.0823, decode.d8.loss_mask: 0.2147, decode.d8.loss_dice: 0.6022, loss: 9.2620 +2022-05-10 12:28:20,040 - mmseg - INFO - Iter [35150/80000] lr: 8.050e-07, eta: 1 day, 0:15:13, time: 1.776, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0825, decode.loss_mask: 0.2114, decode.loss_dice: 0.5870, decode.d0.loss_cls: 0.3147, decode.d0.loss_mask: 0.2212, decode.d0.loss_dice: 0.6139, decode.d1.loss_cls: 0.0901, decode.d1.loss_mask: 0.2126, decode.d1.loss_dice: 0.5928, decode.d2.loss_cls: 0.0828, decode.d2.loss_mask: 0.2120, decode.d2.loss_dice: 0.5944, decode.d3.loss_cls: 0.0814, decode.d3.loss_mask: 0.2117, decode.d3.loss_dice: 0.5840, decode.d4.loss_cls: 0.0833, decode.d4.loss_mask: 0.2113, decode.d4.loss_dice: 0.5853, decode.d5.loss_cls: 0.0820, decode.d5.loss_mask: 0.2120, decode.d5.loss_dice: 0.5866, decode.d6.loss_cls: 0.0817, decode.d6.loss_mask: 0.2120, decode.d6.loss_dice: 0.5854, decode.d7.loss_cls: 0.0776, decode.d7.loss_mask: 0.2123, decode.d7.loss_dice: 0.5887, decode.d8.loss_cls: 0.0785, decode.d8.loss_mask: 0.2122, decode.d8.loss_dice: 0.5859, loss: 9.0876 +2022-05-10 12:29:50,655 - mmseg - INFO - Iter [35200/80000] lr: 8.041e-07, eta: 1 day, 0:13:28, time: 1.812, data_time: 0.067, memory: 64699, decode.loss_cls: 0.0778, decode.loss_mask: 0.2153, decode.loss_dice: 0.5779, decode.d0.loss_cls: 0.3313, decode.d0.loss_mask: 0.2244, decode.d0.loss_dice: 0.6110, decode.d1.loss_cls: 0.0951, decode.d1.loss_mask: 0.2170, decode.d1.loss_dice: 0.5874, decode.d2.loss_cls: 0.0879, decode.d2.loss_mask: 0.2166, decode.d2.loss_dice: 0.5830, decode.d3.loss_cls: 0.0822, decode.d3.loss_mask: 0.2159, decode.d3.loss_dice: 0.5754, decode.d4.loss_cls: 0.0823, decode.d4.loss_mask: 0.2153, decode.d4.loss_dice: 0.5821, decode.d5.loss_cls: 0.0809, decode.d5.loss_mask: 0.2156, decode.d5.loss_dice: 0.5806, decode.d6.loss_cls: 0.0749, decode.d6.loss_mask: 0.2154, decode.d6.loss_dice: 0.5773, decode.d7.loss_cls: 0.0781, decode.d7.loss_mask: 0.2160, decode.d7.loss_dice: 0.5787, decode.d8.loss_cls: 0.0776, decode.d8.loss_mask: 0.2153, decode.d8.loss_dice: 0.5784, loss: 9.0669 +2022-05-10 12:31:18,818 - mmseg - INFO - Iter [35250/80000] lr: 8.032e-07, eta: 1 day, 0:11:39, time: 1.763, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0748, decode.loss_mask: 0.2134, decode.loss_dice: 0.5885, decode.d0.loss_cls: 0.3186, decode.d0.loss_mask: 0.2218, decode.d0.loss_dice: 0.6205, decode.d1.loss_cls: 0.0916, decode.d1.loss_mask: 0.2155, decode.d1.loss_dice: 0.5995, decode.d2.loss_cls: 0.0954, decode.d2.loss_mask: 0.2146, decode.d2.loss_dice: 0.5936, decode.d3.loss_cls: 0.0804, decode.d3.loss_mask: 0.2145, decode.d3.loss_dice: 0.5868, decode.d4.loss_cls: 0.0783, decode.d4.loss_mask: 0.2139, decode.d4.loss_dice: 0.5883, decode.d5.loss_cls: 0.0859, decode.d5.loss_mask: 0.2136, decode.d5.loss_dice: 0.5899, decode.d6.loss_cls: 0.0809, decode.d6.loss_mask: 0.2141, decode.d6.loss_dice: 0.5898, decode.d7.loss_cls: 0.0807, decode.d7.loss_mask: 0.2136, decode.d7.loss_dice: 0.5874, decode.d8.loss_cls: 0.0781, decode.d8.loss_mask: 0.2140, decode.d8.loss_dice: 0.5900, loss: 9.1481 +2022-05-10 12:32:46,697 - mmseg - INFO - Iter [35300/80000] lr: 8.023e-07, eta: 1 day, 0:09:49, time: 1.758, data_time: 0.017, memory: 64699, decode.loss_cls: 0.0694, decode.loss_mask: 0.2180, decode.loss_dice: 0.5832, decode.d0.loss_cls: 0.3173, decode.d0.loss_mask: 0.2264, decode.d0.loss_dice: 0.6109, decode.d1.loss_cls: 0.0851, decode.d1.loss_mask: 0.2196, decode.d1.loss_dice: 0.5925, decode.d2.loss_cls: 0.0733, decode.d2.loss_mask: 0.2181, decode.d2.loss_dice: 0.5929, decode.d3.loss_cls: 0.0729, decode.d3.loss_mask: 0.2187, decode.d3.loss_dice: 0.5868, decode.d4.loss_cls: 0.0704, decode.d4.loss_mask: 0.2178, decode.d4.loss_dice: 0.5852, decode.d5.loss_cls: 0.0703, decode.d5.loss_mask: 0.2181, decode.d5.loss_dice: 0.5829, decode.d6.loss_cls: 0.0735, decode.d6.loss_mask: 0.2181, decode.d6.loss_dice: 0.5814, decode.d7.loss_cls: 0.0707, decode.d7.loss_mask: 0.2186, decode.d7.loss_dice: 0.5823, decode.d8.loss_cls: 0.0684, decode.d8.loss_mask: 0.2184, decode.d8.loss_dice: 0.5817, loss: 9.0429 +2022-05-10 12:34:17,436 - mmseg - INFO - Iter [35350/80000] lr: 8.014e-07, eta: 1 day, 0:08:04, time: 1.814, data_time: 0.066, memory: 64699, decode.loss_cls: 0.0718, decode.loss_mask: 0.2061, decode.loss_dice: 0.5768, decode.d0.loss_cls: 0.3020, decode.d0.loss_mask: 0.2135, decode.d0.loss_dice: 0.6050, decode.d1.loss_cls: 0.0888, decode.d1.loss_mask: 0.2075, decode.d1.loss_dice: 0.5885, decode.d2.loss_cls: 0.0815, decode.d2.loss_mask: 0.2068, decode.d2.loss_dice: 0.5840, decode.d3.loss_cls: 0.0757, decode.d3.loss_mask: 0.2064, decode.d3.loss_dice: 0.5786, decode.d4.loss_cls: 0.0777, decode.d4.loss_mask: 0.2064, decode.d4.loss_dice: 0.5790, decode.d5.loss_cls: 0.0744, decode.d5.loss_mask: 0.2070, decode.d5.loss_dice: 0.5806, decode.d6.loss_cls: 0.0752, decode.d6.loss_mask: 0.2060, decode.d6.loss_dice: 0.5790, decode.d7.loss_cls: 0.0718, decode.d7.loss_mask: 0.2063, decode.d7.loss_dice: 0.5770, decode.d8.loss_cls: 0.0733, decode.d8.loss_mask: 0.2062, decode.d8.loss_dice: 0.5769, loss: 8.8897 +2022-05-10 12:35:46,140 - mmseg - INFO - Iter [35400/80000] lr: 8.005e-07, eta: 1 day, 0:06:16, time: 1.775, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0720, decode.loss_mask: 0.2111, decode.loss_dice: 0.5882, decode.d0.loss_cls: 0.3125, decode.d0.loss_mask: 0.2195, decode.d0.loss_dice: 0.6216, decode.d1.loss_cls: 0.0964, decode.d1.loss_mask: 0.2134, decode.d1.loss_dice: 0.5960, decode.d2.loss_cls: 0.0883, decode.d2.loss_mask: 0.2125, decode.d2.loss_dice: 0.5915, decode.d3.loss_cls: 0.0844, decode.d3.loss_mask: 0.2118, decode.d3.loss_dice: 0.5904, decode.d4.loss_cls: 0.0894, decode.d4.loss_mask: 0.2115, decode.d4.loss_dice: 0.5898, decode.d5.loss_cls: 0.0816, decode.d5.loss_mask: 0.2117, decode.d5.loss_dice: 0.5919, decode.d6.loss_cls: 0.0747, decode.d6.loss_mask: 0.2109, decode.d6.loss_dice: 0.5871, decode.d7.loss_cls: 0.0789, decode.d7.loss_mask: 0.2113, decode.d7.loss_dice: 0.5864, decode.d8.loss_cls: 0.0706, decode.d8.loss_mask: 0.2109, decode.d8.loss_dice: 0.5869, loss: 9.1031 +2022-05-10 12:37:14,532 - mmseg - INFO - Iter [35450/80000] lr: 7.996e-07, eta: 1 day, 0:04:27, time: 1.768, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0783, decode.loss_mask: 0.2121, decode.loss_dice: 0.5859, decode.d0.loss_cls: 0.3129, decode.d0.loss_mask: 0.2224, decode.d0.loss_dice: 0.6109, decode.d1.loss_cls: 0.0980, decode.d1.loss_mask: 0.2154, decode.d1.loss_dice: 0.5950, decode.d2.loss_cls: 0.0836, decode.d2.loss_mask: 0.2130, decode.d2.loss_dice: 0.5883, decode.d3.loss_cls: 0.0781, decode.d3.loss_mask: 0.2124, decode.d3.loss_dice: 0.5829, decode.d4.loss_cls: 0.0790, decode.d4.loss_mask: 0.2134, decode.d4.loss_dice: 0.5864, decode.d5.loss_cls: 0.0776, decode.d5.loss_mask: 0.2130, decode.d5.loss_dice: 0.5876, decode.d6.loss_cls: 0.0746, decode.d6.loss_mask: 0.2125, decode.d6.loss_dice: 0.5833, decode.d7.loss_cls: 0.0767, decode.d7.loss_mask: 0.2123, decode.d7.loss_dice: 0.5813, decode.d8.loss_cls: 0.0733, decode.d8.loss_mask: 0.2128, decode.d8.loss_dice: 0.5817, loss: 9.0548 +2022-05-10 12:38:43,439 - mmseg - INFO - Iter [35500/80000] lr: 7.987e-07, eta: 1 day, 0:02:39, time: 1.778, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0821, decode.loss_mask: 0.2120, decode.loss_dice: 0.5751, decode.d0.loss_cls: 0.3165, decode.d0.loss_mask: 0.2223, decode.d0.loss_dice: 0.6114, decode.d1.loss_cls: 0.0935, decode.d1.loss_mask: 0.2144, decode.d1.loss_dice: 0.5895, decode.d2.loss_cls: 0.0884, decode.d2.loss_mask: 0.2130, decode.d2.loss_dice: 0.5853, decode.d3.loss_cls: 0.0813, decode.d3.loss_mask: 0.2123, decode.d3.loss_dice: 0.5775, decode.d4.loss_cls: 0.0864, decode.d4.loss_mask: 0.2126, decode.d4.loss_dice: 0.5773, decode.d5.loss_cls: 0.0819, decode.d5.loss_mask: 0.2124, decode.d5.loss_dice: 0.5821, decode.d6.loss_cls: 0.0793, decode.d6.loss_mask: 0.2127, decode.d6.loss_dice: 0.5780, decode.d7.loss_cls: 0.0851, decode.d7.loss_mask: 0.2125, decode.d7.loss_dice: 0.5781, decode.d8.loss_cls: 0.0731, decode.d8.loss_mask: 0.2126, decode.d8.loss_dice: 0.5789, loss: 9.0375 +2022-05-10 12:40:14,166 - mmseg - INFO - Iter [35550/80000] lr: 7.978e-07, eta: 1 day, 0:00:54, time: 1.815, data_time: 0.065, memory: 64699, decode.loss_cls: 0.0788, decode.loss_mask: 0.2146, decode.loss_dice: 0.5905, decode.d0.loss_cls: 0.3156, decode.d0.loss_mask: 0.2252, decode.d0.loss_dice: 0.6277, decode.d1.loss_cls: 0.1008, decode.d1.loss_mask: 0.2168, decode.d1.loss_dice: 0.6029, decode.d2.loss_cls: 0.0900, decode.d2.loss_mask: 0.2155, decode.d2.loss_dice: 0.5950, decode.d3.loss_cls: 0.0907, decode.d3.loss_mask: 0.2148, decode.d3.loss_dice: 0.5903, decode.d4.loss_cls: 0.0820, decode.d4.loss_mask: 0.2148, decode.d4.loss_dice: 0.5927, decode.d5.loss_cls: 0.0904, decode.d5.loss_mask: 0.2146, decode.d5.loss_dice: 0.5892, decode.d6.loss_cls: 0.0831, decode.d6.loss_mask: 0.2139, decode.d6.loss_dice: 0.5863, decode.d7.loss_cls: 0.0824, decode.d7.loss_mask: 0.2135, decode.d7.loss_dice: 0.5893, decode.d8.loss_cls: 0.0780, decode.d8.loss_mask: 0.2147, decode.d8.loss_dice: 0.5895, loss: 9.2033 +2022-05-10 12:41:42,207 - mmseg - INFO - Iter [35600/80000] lr: 7.969e-07, eta: 23:59:05, time: 1.761, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0773, decode.loss_mask: 0.2109, decode.loss_dice: 0.5857, decode.d0.loss_cls: 0.3097, decode.d0.loss_mask: 0.2200, decode.d0.loss_dice: 0.6131, decode.d1.loss_cls: 0.1029, decode.d1.loss_mask: 0.2121, decode.d1.loss_dice: 0.5906, decode.d2.loss_cls: 0.0913, decode.d2.loss_mask: 0.2117, decode.d2.loss_dice: 0.5856, decode.d3.loss_cls: 0.0872, decode.d3.loss_mask: 0.2114, decode.d3.loss_dice: 0.5818, decode.d4.loss_cls: 0.0887, decode.d4.loss_mask: 0.2105, decode.d4.loss_dice: 0.5788, decode.d5.loss_cls: 0.0885, decode.d5.loss_mask: 0.2103, decode.d5.loss_dice: 0.5801, decode.d6.loss_cls: 0.0804, decode.d6.loss_mask: 0.2109, decode.d6.loss_dice: 0.5826, decode.d7.loss_cls: 0.0809, decode.d7.loss_mask: 0.2106, decode.d7.loss_dice: 0.5833, decode.d8.loss_cls: 0.0830, decode.d8.loss_mask: 0.2105, decode.d8.loss_dice: 0.5766, loss: 9.0671 +2022-05-10 12:43:11,133 - mmseg - INFO - Iter [35650/80000] lr: 7.960e-07, eta: 23:57:18, time: 1.778, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0731, decode.loss_mask: 0.2100, decode.loss_dice: 0.5837, decode.d0.loss_cls: 0.3059, decode.d0.loss_mask: 0.2169, decode.d0.loss_dice: 0.6129, decode.d1.loss_cls: 0.0915, decode.d1.loss_mask: 0.2111, decode.d1.loss_dice: 0.5933, decode.d2.loss_cls: 0.0864, decode.d2.loss_mask: 0.2098, decode.d2.loss_dice: 0.5871, decode.d3.loss_cls: 0.0772, decode.d3.loss_mask: 0.2094, decode.d3.loss_dice: 0.5809, decode.d4.loss_cls: 0.0774, decode.d4.loss_mask: 0.2098, decode.d4.loss_dice: 0.5766, decode.d5.loss_cls: 0.0795, decode.d5.loss_mask: 0.2089, decode.d5.loss_dice: 0.5819, decode.d6.loss_cls: 0.0771, decode.d6.loss_mask: 0.2097, decode.d6.loss_dice: 0.5823, decode.d7.loss_cls: 0.0738, decode.d7.loss_mask: 0.2099, decode.d7.loss_dice: 0.5829, decode.d8.loss_cls: 0.0745, decode.d8.loss_mask: 0.2095, decode.d8.loss_dice: 0.5837, loss: 8.9864 +2022-05-10 12:44:39,753 - mmseg - INFO - Iter [35700/80000] lr: 7.951e-07, eta: 23:55:30, time: 1.770, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0832, decode.loss_mask: 0.2088, decode.loss_dice: 0.5823, decode.d0.loss_cls: 0.3038, decode.d0.loss_mask: 0.2193, decode.d0.loss_dice: 0.6134, decode.d1.loss_cls: 0.0990, decode.d1.loss_mask: 0.2109, decode.d1.loss_dice: 0.5954, decode.d2.loss_cls: 0.0904, decode.d2.loss_mask: 0.2097, decode.d2.loss_dice: 0.5855, decode.d3.loss_cls: 0.0866, decode.d3.loss_mask: 0.2094, decode.d3.loss_dice: 0.5838, decode.d4.loss_cls: 0.0854, decode.d4.loss_mask: 0.2092, decode.d4.loss_dice: 0.5846, decode.d5.loss_cls: 0.0827, decode.d5.loss_mask: 0.2092, decode.d5.loss_dice: 0.5856, decode.d6.loss_cls: 0.0844, decode.d6.loss_mask: 0.2097, decode.d6.loss_dice: 0.5839, decode.d7.loss_cls: 0.0804, decode.d7.loss_mask: 0.2095, decode.d7.loss_dice: 0.5831, decode.d8.loss_cls: 0.0854, decode.d8.loss_mask: 0.2090, decode.d8.loss_dice: 0.5858, loss: 9.0692 +2022-05-10 12:46:10,381 - mmseg - INFO - Iter [35750/80000] lr: 7.942e-07, eta: 23:53:44, time: 1.815, data_time: 0.067, memory: 64699, decode.loss_cls: 0.0794, decode.loss_mask: 0.2117, decode.loss_dice: 0.5750, decode.d0.loss_cls: 0.3184, decode.d0.loss_mask: 0.2206, decode.d0.loss_dice: 0.5994, decode.d1.loss_cls: 0.1039, decode.d1.loss_mask: 0.2145, decode.d1.loss_dice: 0.5827, decode.d2.loss_cls: 0.0877, decode.d2.loss_mask: 0.2132, decode.d2.loss_dice: 0.5761, decode.d3.loss_cls: 0.0897, decode.d3.loss_mask: 0.2130, decode.d3.loss_dice: 0.5748, decode.d4.loss_cls: 0.0869, decode.d4.loss_mask: 0.2117, decode.d4.loss_dice: 0.5782, decode.d5.loss_cls: 0.0835, decode.d5.loss_mask: 0.2134, decode.d5.loss_dice: 0.5778, decode.d6.loss_cls: 0.0798, decode.d6.loss_mask: 0.2123, decode.d6.loss_dice: 0.5709, decode.d7.loss_cls: 0.0805, decode.d7.loss_mask: 0.2118, decode.d7.loss_dice: 0.5742, decode.d8.loss_cls: 0.0765, decode.d8.loss_mask: 0.2124, decode.d8.loss_dice: 0.5759, loss: 9.0059 +2022-05-10 12:47:38,454 - mmseg - INFO - Iter [35800/80000] lr: 7.933e-07, eta: 23:51:56, time: 1.762, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0830, decode.loss_mask: 0.2154, decode.loss_dice: 0.5952, decode.d0.loss_cls: 0.3185, decode.d0.loss_mask: 0.2246, decode.d0.loss_dice: 0.6278, decode.d1.loss_cls: 0.1007, decode.d1.loss_mask: 0.2169, decode.d1.loss_dice: 0.6068, decode.d2.loss_cls: 0.0907, decode.d2.loss_mask: 0.2156, decode.d2.loss_dice: 0.6036, decode.d3.loss_cls: 0.0833, decode.d3.loss_mask: 0.2161, decode.d3.loss_dice: 0.6008, decode.d4.loss_cls: 0.0956, decode.d4.loss_mask: 0.2162, decode.d4.loss_dice: 0.5980, decode.d5.loss_cls: 0.0886, decode.d5.loss_mask: 0.2159, decode.d5.loss_dice: 0.5990, decode.d6.loss_cls: 0.0857, decode.d6.loss_mask: 0.2150, decode.d6.loss_dice: 0.5982, decode.d7.loss_cls: 0.0840, decode.d7.loss_mask: 0.2155, decode.d7.loss_dice: 0.5986, decode.d8.loss_cls: 0.0804, decode.d8.loss_mask: 0.2158, decode.d8.loss_dice: 0.5992, loss: 9.3048 +2022-05-10 12:49:07,136 - mmseg - INFO - Iter [35850/80000] lr: 7.924e-07, eta: 23:50:08, time: 1.774, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0774, decode.loss_mask: 0.2172, decode.loss_dice: 0.5729, decode.d0.loss_cls: 0.3205, decode.d0.loss_mask: 0.2268, decode.d0.loss_dice: 0.5999, decode.d1.loss_cls: 0.1000, decode.d1.loss_mask: 0.2203, decode.d1.loss_dice: 0.5862, decode.d2.loss_cls: 0.0827, decode.d2.loss_mask: 0.2181, decode.d2.loss_dice: 0.5785, decode.d3.loss_cls: 0.0803, decode.d3.loss_mask: 0.2176, decode.d3.loss_dice: 0.5739, decode.d4.loss_cls: 0.0851, decode.d4.loss_mask: 0.2175, decode.d4.loss_dice: 0.5713, decode.d5.loss_cls: 0.0810, decode.d5.loss_mask: 0.2178, decode.d5.loss_dice: 0.5735, decode.d6.loss_cls: 0.0822, decode.d6.loss_mask: 0.2175, decode.d6.loss_dice: 0.5705, decode.d7.loss_cls: 0.0780, decode.d7.loss_mask: 0.2173, decode.d7.loss_dice: 0.5708, decode.d8.loss_cls: 0.0794, decode.d8.loss_mask: 0.2172, decode.d8.loss_dice: 0.5731, loss: 9.0246 +2022-05-10 12:50:38,053 - mmseg - INFO - Iter [35900/80000] lr: 7.915e-07, eta: 23:48:23, time: 1.818, data_time: 0.066, memory: 64699, decode.loss_cls: 0.0827, decode.loss_mask: 0.2121, decode.loss_dice: 0.5899, decode.d0.loss_cls: 0.3203, decode.d0.loss_mask: 0.2192, decode.d0.loss_dice: 0.6220, decode.d1.loss_cls: 0.0932, decode.d1.loss_mask: 0.2129, decode.d1.loss_dice: 0.5987, decode.d2.loss_cls: 0.0849, decode.d2.loss_mask: 0.2126, decode.d2.loss_dice: 0.5929, decode.d3.loss_cls: 0.0814, decode.d3.loss_mask: 0.2121, decode.d3.loss_dice: 0.5892, decode.d4.loss_cls: 0.0804, decode.d4.loss_mask: 0.2123, decode.d4.loss_dice: 0.5891, decode.d5.loss_cls: 0.0793, decode.d5.loss_mask: 0.2132, decode.d5.loss_dice: 0.5871, decode.d6.loss_cls: 0.0741, decode.d6.loss_mask: 0.2124, decode.d6.loss_dice: 0.5886, decode.d7.loss_cls: 0.0738, decode.d7.loss_mask: 0.2126, decode.d7.loss_dice: 0.5893, decode.d8.loss_cls: 0.0776, decode.d8.loss_mask: 0.2123, decode.d8.loss_dice: 0.5898, loss: 9.1159 +2022-05-10 12:52:06,109 - mmseg - INFO - Iter [35950/80000] lr: 7.906e-07, eta: 23:46:35, time: 1.761, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0706, decode.loss_mask: 0.2093, decode.loss_dice: 0.5792, decode.d0.loss_cls: 0.3007, decode.d0.loss_mask: 0.2182, decode.d0.loss_dice: 0.6082, decode.d1.loss_cls: 0.0861, decode.d1.loss_mask: 0.2117, decode.d1.loss_dice: 0.5908, decode.d2.loss_cls: 0.0831, decode.d2.loss_mask: 0.2106, decode.d2.loss_dice: 0.5824, decode.d3.loss_cls: 0.0789, decode.d3.loss_mask: 0.2093, decode.d3.loss_dice: 0.5812, decode.d4.loss_cls: 0.0739, decode.d4.loss_mask: 0.2097, decode.d4.loss_dice: 0.5828, decode.d5.loss_cls: 0.0750, decode.d5.loss_mask: 0.2101, decode.d5.loss_dice: 0.5834, decode.d6.loss_cls: 0.0755, decode.d6.loss_mask: 0.2095, decode.d6.loss_dice: 0.5816, decode.d7.loss_cls: 0.0722, decode.d7.loss_mask: 0.2093, decode.d7.loss_dice: 0.5798, decode.d8.loss_cls: 0.0716, decode.d8.loss_mask: 0.2096, decode.d8.loss_dice: 0.5783, loss: 8.9427 +2022-05-10 12:53:34,129 - mmseg - INFO - Saving checkpoint at 36000 iterations +2022-05-10 12:54:07,178 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 12:54:07,187 - mmseg - INFO - Iter [36000/80000] lr: 7.897e-07, eta: 23:45:27, time: 2.418, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0737, decode.loss_mask: 0.2143, decode.loss_dice: 0.5822, decode.d0.loss_cls: 0.3051, decode.d0.loss_mask: 0.2251, decode.d0.loss_dice: 0.6089, decode.d1.loss_cls: 0.0994, decode.d1.loss_mask: 0.2154, decode.d1.loss_dice: 0.5939, decode.d2.loss_cls: 0.0836, decode.d2.loss_mask: 0.2153, decode.d2.loss_dice: 0.5860, decode.d3.loss_cls: 0.0762, decode.d3.loss_mask: 0.2145, decode.d3.loss_dice: 0.5865, decode.d4.loss_cls: 0.0740, decode.d4.loss_mask: 0.2146, decode.d4.loss_dice: 0.5804, decode.d5.loss_cls: 0.0737, decode.d5.loss_mask: 0.2156, decode.d5.loss_dice: 0.5841, decode.d6.loss_cls: 0.0735, decode.d6.loss_mask: 0.2144, decode.d6.loss_dice: 0.5830, decode.d7.loss_cls: 0.0770, decode.d7.loss_mask: 0.2147, decode.d7.loss_dice: 0.5843, decode.d8.loss_cls: 0.0790, decode.d8.loss_mask: 0.2148, decode.d8.loss_dice: 0.5850, loss: 9.0486 +2022-05-10 12:56:02,453 - mmseg - INFO - per class results: +2022-05-10 12:56:02,460 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.54 | 99.25 | +| sidewalk | 88.22 | 93.38 | +| building | 94.38 | 96.81 | +| wall | 66.23 | 80.17 | +| fence | 74.2 | 81.99 | +| pole | 71.79 | 83.38 | +| traffic light | 77.05 | 89.23 | +| traffic sign | 84.03 | 90.36 | +| vegetation | 93.36 | 97.09 | +| terrain | 67.91 | 80.31 | +| sky | 95.93 | 98.28 | +| person | 86.91 | 94.04 | +| rider | 74.39 | 83.62 | +| car | 96.32 | 98.31 | +| truck | 91.28 | 94.69 | +| bus | 93.95 | 96.81 | +| train | 88.24 | 90.95 | +| motorcycle | 76.13 | 88.02 | +| bicycle | 82.94 | 91.93 | ++---------------+-------+-------+ +2022-05-10 12:56:02,461 - mmseg - INFO - Summary: +2022-05-10 12:56:02,461 - mmseg - INFO - ++-------+------+-------+ +| aAcc | mIoU | mAcc | ++-------+------+-------+ +| 96.98 | 84.3 | 90.98 | ++-------+------+-------+ +2022-05-10 12:56:02,464 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 12:56:02,465 - mmseg - INFO - Iter(val) [32] aAcc: 0.9698, mIoU: 0.8430, mAcc: 0.9098, IoU.road: 0.9854, IoU.sidewalk: 0.8822, IoU.building: 0.9438, IoU.wall: 0.6623, IoU.fence: 0.7420, IoU.pole: 0.7179, IoU.traffic light: 0.7705, IoU.traffic sign: 0.8403, IoU.vegetation: 0.9336, IoU.terrain: 0.6791, IoU.sky: 0.9593, IoU.person: 0.8691, IoU.rider: 0.7439, IoU.car: 0.9632, IoU.truck: 0.9128, IoU.bus: 0.9395, IoU.train: 0.8824, IoU.motorcycle: 0.7613, IoU.bicycle: 0.8294, Acc.road: 0.9925, Acc.sidewalk: 0.9338, Acc.building: 0.9681, Acc.wall: 0.8017, Acc.fence: 0.8199, Acc.pole: 0.8338, Acc.traffic light: 0.8923, Acc.traffic sign: 0.9036, Acc.vegetation: 0.9709, Acc.terrain: 0.8031, Acc.sky: 0.9828, Acc.person: 0.9404, Acc.rider: 0.8362, Acc.car: 0.9831, Acc.truck: 0.9469, Acc.bus: 0.9681, Acc.train: 0.9095, Acc.motorcycle: 0.8802, Acc.bicycle: 0.9193 +2022-05-10 12:57:30,817 - mmseg - INFO - Iter [36050/80000] lr: 7.888e-07, eta: 23:46:00, time: 4.075, data_time: 2.328, memory: 64699, decode.loss_cls: 0.0939, decode.loss_mask: 0.2129, decode.loss_dice: 0.5836, decode.d0.loss_cls: 0.3152, decode.d0.loss_mask: 0.2215, decode.d0.loss_dice: 0.6220, decode.d1.loss_cls: 0.1099, decode.d1.loss_mask: 0.2154, decode.d1.loss_dice: 0.6009, decode.d2.loss_cls: 0.1042, decode.d2.loss_mask: 0.2148, decode.d2.loss_dice: 0.5918, decode.d3.loss_cls: 0.0948, decode.d3.loss_mask: 0.2143, decode.d3.loss_dice: 0.5841, decode.d4.loss_cls: 0.1010, decode.d4.loss_mask: 0.2131, decode.d4.loss_dice: 0.5884, decode.d5.loss_cls: 0.0980, decode.d5.loss_mask: 0.2139, decode.d5.loss_dice: 0.5871, decode.d6.loss_cls: 0.0934, decode.d6.loss_mask: 0.2138, decode.d6.loss_dice: 0.5856, decode.d7.loss_cls: 0.0890, decode.d7.loss_mask: 0.2135, decode.d7.loss_dice: 0.5866, decode.d8.loss_cls: 0.0929, decode.d8.loss_mask: 0.2138, decode.d8.loss_dice: 0.5872, loss: 9.2568 +2022-05-10 12:59:02,150 - mmseg - INFO - Iter [36100/80000] lr: 7.879e-07, eta: 23:44:15, time: 1.828, data_time: 0.067, memory: 64699, decode.loss_cls: 0.0763, decode.loss_mask: 0.2072, decode.loss_dice: 0.5863, decode.d0.loss_cls: 0.3154, decode.d0.loss_mask: 0.2168, decode.d0.loss_dice: 0.6137, decode.d1.loss_cls: 0.0983, decode.d1.loss_mask: 0.2075, decode.d1.loss_dice: 0.5937, decode.d2.loss_cls: 0.0868, decode.d2.loss_mask: 0.2072, decode.d2.loss_dice: 0.5910, decode.d3.loss_cls: 0.0890, decode.d3.loss_mask: 0.2069, decode.d3.loss_dice: 0.5847, decode.d4.loss_cls: 0.0813, decode.d4.loss_mask: 0.2064, decode.d4.loss_dice: 0.5842, decode.d5.loss_cls: 0.0794, decode.d5.loss_mask: 0.2071, decode.d5.loss_dice: 0.5813, decode.d6.loss_cls: 0.0819, decode.d6.loss_mask: 0.2071, decode.d6.loss_dice: 0.5847, decode.d7.loss_cls: 0.0822, decode.d7.loss_mask: 0.2074, decode.d7.loss_dice: 0.5841, decode.d8.loss_cls: 0.0786, decode.d8.loss_mask: 0.2071, decode.d8.loss_dice: 0.5862, loss: 9.0395 +2022-05-10 13:00:30,291 - mmseg - INFO - Iter [36150/80000] lr: 7.870e-07, eta: 23:42:26, time: 1.762, data_time: 0.020, memory: 64699, decode.loss_cls: 0.0656, decode.loss_mask: 0.2105, decode.loss_dice: 0.5886, decode.d0.loss_cls: 0.2939, decode.d0.loss_mask: 0.2181, decode.d0.loss_dice: 0.6177, decode.d1.loss_cls: 0.0844, decode.d1.loss_mask: 0.2115, decode.d1.loss_dice: 0.5956, decode.d2.loss_cls: 0.0816, decode.d2.loss_mask: 0.2101, decode.d2.loss_dice: 0.5913, decode.d3.loss_cls: 0.0793, decode.d3.loss_mask: 0.2105, decode.d3.loss_dice: 0.5830, decode.d4.loss_cls: 0.0809, decode.d4.loss_mask: 0.2107, decode.d4.loss_dice: 0.5847, decode.d5.loss_cls: 0.0732, decode.d5.loss_mask: 0.2097, decode.d5.loss_dice: 0.5859, decode.d6.loss_cls: 0.0728, decode.d6.loss_mask: 0.2101, decode.d6.loss_dice: 0.5846, decode.d7.loss_cls: 0.0730, decode.d7.loss_mask: 0.2102, decode.d7.loss_dice: 0.5855, decode.d8.loss_cls: 0.0721, decode.d8.loss_mask: 0.2103, decode.d8.loss_dice: 0.5857, loss: 8.9910 +2022-05-10 13:01:58,422 - mmseg - INFO - Iter [36200/80000] lr: 7.861e-07, eta: 23:40:38, time: 1.763, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0782, decode.loss_mask: 0.2103, decode.loss_dice: 0.5721, decode.d0.loss_cls: 0.3255, decode.d0.loss_mask: 0.2188, decode.d0.loss_dice: 0.5992, decode.d1.loss_cls: 0.1022, decode.d1.loss_mask: 0.2125, decode.d1.loss_dice: 0.5878, decode.d2.loss_cls: 0.0843, decode.d2.loss_mask: 0.2114, decode.d2.loss_dice: 0.5812, decode.d3.loss_cls: 0.0818, decode.d3.loss_mask: 0.2109, decode.d3.loss_dice: 0.5712, decode.d4.loss_cls: 0.0826, decode.d4.loss_mask: 0.2110, decode.d4.loss_dice: 0.5742, decode.d5.loss_cls: 0.0792, decode.d5.loss_mask: 0.2112, decode.d5.loss_dice: 0.5757, decode.d6.loss_cls: 0.0846, decode.d6.loss_mask: 0.2106, decode.d6.loss_dice: 0.5775, decode.d7.loss_cls: 0.0812, decode.d7.loss_mask: 0.2100, decode.d7.loss_dice: 0.5748, decode.d8.loss_cls: 0.0800, decode.d8.loss_mask: 0.2102, decode.d8.loss_dice: 0.5744, loss: 8.9845 +2022-05-10 13:03:25,240 - mmseg - INFO - Iter [36250/80000] lr: 7.852e-07, eta: 23:38:48, time: 1.736, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0872, decode.loss_mask: 0.2174, decode.loss_dice: 0.5854, decode.d0.loss_cls: 0.3139, decode.d0.loss_mask: 0.2281, decode.d0.loss_dice: 0.6224, decode.d1.loss_cls: 0.1105, decode.d1.loss_mask: 0.2202, decode.d1.loss_dice: 0.6002, decode.d2.loss_cls: 0.0950, decode.d2.loss_mask: 0.2185, decode.d2.loss_dice: 0.5917, decode.d3.loss_cls: 0.0865, decode.d3.loss_mask: 0.2187, decode.d3.loss_dice: 0.5879, decode.d4.loss_cls: 0.0916, decode.d4.loss_mask: 0.2178, decode.d4.loss_dice: 0.5849, decode.d5.loss_cls: 0.0850, decode.d5.loss_mask: 0.2181, decode.d5.loss_dice: 0.5822, decode.d6.loss_cls: 0.0869, decode.d6.loss_mask: 0.2177, decode.d6.loss_dice: 0.5840, decode.d7.loss_cls: 0.0894, decode.d7.loss_mask: 0.2168, decode.d7.loss_dice: 0.5866, decode.d8.loss_cls: 0.0858, decode.d8.loss_mask: 0.2170, decode.d8.loss_dice: 0.5807, loss: 9.2281 +2022-05-10 13:04:56,092 - mmseg - INFO - Iter [36300/80000] lr: 7.843e-07, eta: 23:37:03, time: 1.817, data_time: 0.066, memory: 64699, decode.loss_cls: 0.0758, decode.loss_mask: 0.2116, decode.loss_dice: 0.5863, decode.d0.loss_cls: 0.3172, decode.d0.loss_mask: 0.2209, decode.d0.loss_dice: 0.6270, decode.d1.loss_cls: 0.1051, decode.d1.loss_mask: 0.2126, decode.d1.loss_dice: 0.6032, decode.d2.loss_cls: 0.0874, decode.d2.loss_mask: 0.2117, decode.d2.loss_dice: 0.5977, decode.d3.loss_cls: 0.0767, decode.d3.loss_mask: 0.2115, decode.d3.loss_dice: 0.5913, decode.d4.loss_cls: 0.0830, decode.d4.loss_mask: 0.2115, decode.d4.loss_dice: 0.5892, decode.d5.loss_cls: 0.0776, decode.d5.loss_mask: 0.2118, decode.d5.loss_dice: 0.5921, decode.d6.loss_cls: 0.0772, decode.d6.loss_mask: 0.2114, decode.d6.loss_dice: 0.5883, decode.d7.loss_cls: 0.0790, decode.d7.loss_mask: 0.2114, decode.d7.loss_dice: 0.5914, decode.d8.loss_cls: 0.0756, decode.d8.loss_mask: 0.2115, decode.d8.loss_dice: 0.5902, loss: 9.1374 +2022-05-10 13:06:23,964 - mmseg - INFO - Iter [36350/80000] lr: 7.834e-07, eta: 23:35:14, time: 1.757, data_time: 0.020, memory: 64699, decode.loss_cls: 0.0754, decode.loss_mask: 0.2056, decode.loss_dice: 0.5654, decode.d0.loss_cls: 0.3027, decode.d0.loss_mask: 0.2143, decode.d0.loss_dice: 0.5994, decode.d1.loss_cls: 0.1000, decode.d1.loss_mask: 0.2074, decode.d1.loss_dice: 0.5792, decode.d2.loss_cls: 0.0859, decode.d2.loss_mask: 0.2069, decode.d2.loss_dice: 0.5748, decode.d3.loss_cls: 0.0800, decode.d3.loss_mask: 0.2062, decode.d3.loss_dice: 0.5700, decode.d4.loss_cls: 0.0792, decode.d4.loss_mask: 0.2063, decode.d4.loss_dice: 0.5653, decode.d5.loss_cls: 0.0815, decode.d5.loss_mask: 0.2064, decode.d5.loss_dice: 0.5680, decode.d6.loss_cls: 0.0763, decode.d6.loss_mask: 0.2061, decode.d6.loss_dice: 0.5698, decode.d7.loss_cls: 0.0785, decode.d7.loss_mask: 0.2060, decode.d7.loss_dice: 0.5709, decode.d8.loss_cls: 0.0797, decode.d8.loss_mask: 0.2063, decode.d8.loss_dice: 0.5705, loss: 8.8441 +2022-05-10 13:07:51,322 - mmseg - INFO - Iter [36400/80000] lr: 7.825e-07, eta: 23:33:25, time: 1.747, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0747, decode.loss_mask: 0.2078, decode.loss_dice: 0.5978, decode.d0.loss_cls: 0.3143, decode.d0.loss_mask: 0.2153, decode.d0.loss_dice: 0.6337, decode.d1.loss_cls: 0.0924, decode.d1.loss_mask: 0.2092, decode.d1.loss_dice: 0.6085, decode.d2.loss_cls: 0.0830, decode.d2.loss_mask: 0.2087, decode.d2.loss_dice: 0.6018, decode.d3.loss_cls: 0.0839, decode.d3.loss_mask: 0.2078, decode.d3.loss_dice: 0.5989, decode.d4.loss_cls: 0.0809, decode.d4.loss_mask: 0.2081, decode.d4.loss_dice: 0.5994, decode.d5.loss_cls: 0.0781, decode.d5.loss_mask: 0.2075, decode.d5.loss_dice: 0.5975, decode.d6.loss_cls: 0.0722, decode.d6.loss_mask: 0.2070, decode.d6.loss_dice: 0.5972, decode.d7.loss_cls: 0.0800, decode.d7.loss_mask: 0.2077, decode.d7.loss_dice: 0.5991, decode.d8.loss_cls: 0.0805, decode.d8.loss_mask: 0.2077, decode.d8.loss_dice: 0.5953, loss: 9.1558 +2022-05-10 13:09:19,580 - mmseg - INFO - Iter [36450/80000] lr: 7.816e-07, eta: 23:31:37, time: 1.765, data_time: 0.019, memory: 64699, decode.loss_cls: 0.0749, decode.loss_mask: 0.2102, decode.loss_dice: 0.5627, decode.d0.loss_cls: 0.3127, decode.d0.loss_mask: 0.2185, decode.d0.loss_dice: 0.5946, decode.d1.loss_cls: 0.0979, decode.d1.loss_mask: 0.2125, decode.d1.loss_dice: 0.5770, decode.d2.loss_cls: 0.0836, decode.d2.loss_mask: 0.2111, decode.d2.loss_dice: 0.5701, decode.d3.loss_cls: 0.0795, decode.d3.loss_mask: 0.2103, decode.d3.loss_dice: 0.5674, decode.d4.loss_cls: 0.0826, decode.d4.loss_mask: 0.2102, decode.d4.loss_dice: 0.5615, decode.d5.loss_cls: 0.0833, decode.d5.loss_mask: 0.2104, decode.d5.loss_dice: 0.5628, decode.d6.loss_cls: 0.0805, decode.d6.loss_mask: 0.2109, decode.d6.loss_dice: 0.5612, decode.d7.loss_cls: 0.0799, decode.d7.loss_mask: 0.2111, decode.d7.loss_dice: 0.5662, decode.d8.loss_cls: 0.0723, decode.d8.loss_mask: 0.2105, decode.d8.loss_dice: 0.5638, loss: 8.8503 +2022-05-10 13:10:49,986 - mmseg - INFO - Iter [36500/80000] lr: 7.807e-07, eta: 23:29:52, time: 1.808, data_time: 0.067, memory: 64699, decode.loss_cls: 0.0804, decode.loss_mask: 0.2057, decode.loss_dice: 0.5935, decode.d0.loss_cls: 0.3181, decode.d0.loss_mask: 0.2141, decode.d0.loss_dice: 0.6206, decode.d1.loss_cls: 0.0976, decode.d1.loss_mask: 0.2075, decode.d1.loss_dice: 0.6003, decode.d2.loss_cls: 0.0868, decode.d2.loss_mask: 0.2069, decode.d2.loss_dice: 0.5948, decode.d3.loss_cls: 0.0817, decode.d3.loss_mask: 0.2057, decode.d3.loss_dice: 0.5954, decode.d4.loss_cls: 0.0809, decode.d4.loss_mask: 0.2057, decode.d4.loss_dice: 0.5939, decode.d5.loss_cls: 0.0816, decode.d5.loss_mask: 0.2064, decode.d5.loss_dice: 0.5936, decode.d6.loss_cls: 0.0736, decode.d6.loss_mask: 0.2058, decode.d6.loss_dice: 0.5934, decode.d7.loss_cls: 0.0843, decode.d7.loss_mask: 0.2056, decode.d7.loss_dice: 0.5947, decode.d8.loss_cls: 0.0818, decode.d8.loss_mask: 0.2055, decode.d8.loss_dice: 0.5943, loss: 9.1103 +2022-05-10 13:12:17,845 - mmseg - INFO - Iter [36550/80000] lr: 7.798e-07, eta: 23:28:04, time: 1.757, data_time: 0.018, memory: 64699, decode.loss_cls: 0.0737, decode.loss_mask: 0.2122, decode.loss_dice: 0.5758, decode.d0.loss_cls: 0.3176, decode.d0.loss_mask: 0.2208, decode.d0.loss_dice: 0.6034, decode.d1.loss_cls: 0.0937, decode.d1.loss_mask: 0.2138, decode.d1.loss_dice: 0.5814, decode.d2.loss_cls: 0.0805, decode.d2.loss_mask: 0.2128, decode.d2.loss_dice: 0.5785, decode.d3.loss_cls: 0.0760, decode.d3.loss_mask: 0.2126, decode.d3.loss_dice: 0.5753, decode.d4.loss_cls: 0.0778, decode.d4.loss_mask: 0.2123, decode.d4.loss_dice: 0.5777, decode.d5.loss_cls: 0.0778, decode.d5.loss_mask: 0.2115, decode.d5.loss_dice: 0.5760, decode.d6.loss_cls: 0.0762, decode.d6.loss_mask: 0.2126, decode.d6.loss_dice: 0.5745, decode.d7.loss_cls: 0.0778, decode.d7.loss_mask: 0.2124, decode.d7.loss_dice: 0.5748, decode.d8.loss_cls: 0.0765, decode.d8.loss_mask: 0.2121, decode.d8.loss_dice: 0.5742, loss: 8.9523 +2022-05-10 13:13:46,245 - mmseg - INFO - Iter [36600/80000] lr: 7.789e-07, eta: 23:26:16, time: 1.768, data_time: 0.020, memory: 64699, decode.loss_cls: 0.0828, decode.loss_mask: 0.2125, decode.loss_dice: 0.5798, decode.d0.loss_cls: 0.3072, decode.d0.loss_mask: 0.2231, decode.d0.loss_dice: 0.6101, decode.d1.loss_cls: 0.0973, decode.d1.loss_mask: 0.2134, decode.d1.loss_dice: 0.5907, decode.d2.loss_cls: 0.0910, decode.d2.loss_mask: 0.2133, decode.d2.loss_dice: 0.5826, decode.d3.loss_cls: 0.0854, decode.d3.loss_mask: 0.2130, decode.d3.loss_dice: 0.5796, decode.d4.loss_cls: 0.0824, decode.d4.loss_mask: 0.2134, decode.d4.loss_dice: 0.5821, decode.d5.loss_cls: 0.0842, decode.d5.loss_mask: 0.2129, decode.d5.loss_dice: 0.5799, decode.d6.loss_cls: 0.0859, decode.d6.loss_mask: 0.2126, decode.d6.loss_dice: 0.5807, decode.d7.loss_cls: 0.0844, decode.d7.loss_mask: 0.2133, decode.d7.loss_dice: 0.5817, decode.d8.loss_cls: 0.0817, decode.d8.loss_mask: 0.2130, decode.d8.loss_dice: 0.5799, loss: 9.0702 +2022-05-10 13:21:40,285 - mmseg - INFO - Environment info: +------------------------------------------------------------ +sys.platform: linux +Python: 3.7.11 (default, Jul 27 2021, 14:32:16) [GCC 7.5.0] +CUDA available: True +GPU 0,1,2,3,4,5,6,7: A100-SXM-80GB +CUDA_HOME: /mnt/lustre/share/cuda-11.1 +NVCC: Build cuda_11.1.TC455_06.29069683_0 +GCC: gcc (GCC) 5.4.0 +PyTorch: 1.9.0+cu111 +PyTorch compiling details: PyTorch built with: + - GCC 7.3 + - C++ Version: 201402 + - Intel(R) Math Kernel Library Version 2020.0.0 Product Build 20191122 for Intel(R) 64 architecture applications + - Intel(R) MKL-DNN v2.1.2 (Git Hash 98be7e8afa711dc9b66c8ff3504129cb82013cdb) + - OpenMP 201511 (a.k.a. OpenMP 4.5) + - NNPACK is enabled + - CPU capability usage: AVX2 + - CUDA Runtime 11.1 + - NVCC architecture flags: -gencode;arch=compute_37,code=sm_37;-gencode;arch=compute_50,code=sm_50;-gencode;arch=compute_60,code=sm_60;-gencode;arch=compute_70,code=sm_70;-gencode;arch=compute_75,code=sm_75;-gencode;arch=compute_80,code=sm_80;-gencode;arch=compute_86,code=sm_86 + - CuDNN 8.0.5 + - Magma 2.5.2 + - Build settings: BLAS_INFO=mkl, BUILD_TYPE=Release, CUDA_VERSION=11.1, CUDNN_VERSION=8.0.5, CXX_COMPILER=/opt/rh/devtoolset-7/root/usr/bin/c++, CXX_FLAGS= -Wno-deprecated -fvisibility-inlines-hidden -DUSE_PTHREADPOOL -fopenmp -DNDEBUG -DUSE_KINETO -DUSE_FBGEMM -DUSE_QNNPACK -DUSE_PYTORCH_QNNPACK -DUSE_XNNPACK -DSYMBOLICATE_MOBILE_DEBUG_HANDLE -O2 -fPIC -Wno-narrowing -Wall -Wextra -Werror=return-type -Wno-missing-field-initializers -Wno-type-limits -Wno-array-bounds -Wno-unknown-pragmas -Wno-sign-compare -Wno-unused-parameter -Wno-unused-variable -Wno-unused-function -Wno-unused-result -Wno-unused-local-typedefs -Wno-strict-overflow -Wno-strict-aliasing -Wno-error=deprecated-declarations -Wno-stringop-overflow -Wno-psabi -Wno-error=pedantic -Wno-error=redundant-decls -Wno-error=old-style-cast -fdiagnostics-color=always -faligned-new -Wno-unused-but-set-variable -Wno-maybe-uninitialized -fno-math-errno -fno-trapping-math -Werror=format -Wno-stringop-overflow, LAPACK_INFO=mkl, PERF_WITH_AVX=1, PERF_WITH_AVX2=1, PERF_WITH_AVX512=1, TORCH_VERSION=1.9.0, USE_CUDA=ON, USE_CUDNN=ON, USE_EXCEPTION_PTR=1, USE_GFLAGS=OFF, USE_GLOG=OFF, USE_MKL=ON, USE_MKLDNN=ON, USE_MPI=OFF, USE_NCCL=ON, USE_NNPACK=ON, USE_OPENMP=ON, + +TorchVision: 0.10.0+cu111 +OpenCV: 4.5.5 +MMCV: 1.4.2 +MMCV Compiler: GCC 7.3 +MMCV CUDA Compiler: 11.1 +MMSegmentation: 0.20.2+ +------------------------------------------------------------ + +2022-05-10 13:21:40,286 - mmseg - INFO - Distributed training: True +2022-05-10 13:21:40,863 - mmseg - INFO - Config: +num_things_classes = 8 +num_stuff_classes = 11 +num_classes = 19 +norm_cfg = dict(type='SyncBN', requires_grad=True) +model = dict( + type='EncoderDecoderMask2Former', + pretrained='pretrained/beit_large_patch16_224_pt22k_ft22k.pth', + backbone=dict( + type='BEiTAdapter', + patch_size=16, + embed_dim=1024, + depth=24, + num_heads=16, + mlp_ratio=4, + qkv_bias=True, + use_abs_pos_emb=False, + use_rel_pos_bias=True, + img_size=896, + init_values=1e-06, + drop_path_rate=0.3, + conv_inplane=64, + n_points=4, + deform_num_heads=16, + interact_with_ffn=True, + interact_ffn_ratio=0.25, + interact_deform_ratio=0.5, + extract_with_ffn=True, + extract_ffn_ratio=0.25, + extract_deform_ratio=0.5, + num_extract_block=2, + add_vit_feature=True, + interact_indexes=[[0, 5], [6, 11], [12, 17], [18, 23]]), + decode_head=dict( + type='Mask2FormerHead', + in_channels=[1024, 1024, 1024, 1024], + feat_channels=1024, + out_channels=1024, + in_index=[0, 1, 2, 3], + num_things_classes=8, + num_stuff_classes=11, + num_queries=100, + num_transformer_feat_level=3, + pixel_decoder=dict( + type='MSDeformAttnPixelDecoder', + num_outs=3, + norm_cfg=dict(type='GN', num_groups=32), + act_cfg=dict(type='ReLU'), + encoder=dict( + type='DetrTransformerEncoder', + num_layers=6, + transformerlayers=dict( + type='BaseTransformerLayer', + attn_cfgs=dict( + type='MultiScaleDeformableAttention', + embed_dims=1024, + num_heads=32, + num_levels=3, + num_points=4, + im2col_step=64, + dropout=0.0, + batch_first=False, + norm_cfg=None, + init_cfg=None), + ffn_cfgs=dict( + type='FFN', + embed_dims=1024, + feedforward_channels=4096, + num_fcs=2, + ffn_drop=0.0, + act_cfg=dict(type='ReLU', inplace=True)), + operation_order=('self_attn', 'norm', 'ffn', 'norm')), + init_cfg=None), + positional_encoding=dict( + type='SinePositionalEncoding', num_feats=512, normalize=True), + init_cfg=None), + enforce_decoder_input_project=False, + positional_encoding=dict( + type='SinePositionalEncoding', num_feats=512, normalize=True), + transformer_decoder=dict( + type='DetrTransformerDecoder', + return_intermediate=True, + num_layers=9, + transformerlayers=dict( + type='DetrTransformerDecoderLayer', + attn_cfgs=dict( + type='MultiheadAttention', + embed_dims=1024, + num_heads=32, + attn_drop=0.0, + proj_drop=0.0, + dropout_layer=None, + batch_first=False), + ffn_cfgs=dict( + embed_dims=1024, + feedforward_channels=4096, + num_fcs=2, + act_cfg=dict(type='ReLU', inplace=True), + ffn_drop=0.0, + dropout_layer=None, + add_identity=True), + feedforward_channels=4096, + operation_order=('cross_attn', 'norm', 'self_attn', 'norm', + 'ffn', 'norm')), + init_cfg=None), + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=2.0, + reduction='mean', + class_weight=[ + 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, + 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.1 + ]), + loss_mask=dict( + type='CrossEntropyLoss', + use_sigmoid=True, + reduction='mean', + loss_weight=5.0), + loss_dice=dict( + type='DiceLoss', + use_sigmoid=True, + activate=True, + reduction='mean', + naive_dice=True, + eps=1.0, + loss_weight=5.0)), + train_cfg=dict( + num_points=12544, + oversample_ratio=3.0, + importance_sample_ratio=0.75, + assigner=dict( + type='MaskHungarianAssigner', + cls_cost=dict(type='ClassificationCost', weight=2.0), + mask_cost=dict( + type='CrossEntropyLossCost', weight=5.0, use_sigmoid=True), + dice_cost=dict( + type='DiceCost', weight=5.0, pred_act=True, eps=1.0)), + sampler=dict(type='MaskPseudoSampler')), + test_cfg=dict( + panoptic_on=True, + semantic_on=False, + instance_on=True, + max_per_image=100, + iou_thr=0.8, + filter_low_score=True, + mode='slide', + crop_size=(896, 896), + stride=(512, 512)), + init_cfg=None) +find_unused_parameters = True +dataset_type = 'CityscapesDataset' +data_root = 'data/cityscapes/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +crop_size = (896, 896) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations'), + dict(type='Resize', img_scale=(2048, 1024), ratio_range=(0.5, 2.0)), + dict(type='RandomCrop', crop_size=(896, 896), cat_max_ratio=0.75), + dict(type='RandomFlip', prob=0.5), + dict(type='PhotoMetricDistortion'), + dict( + type='Normalize', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True), + dict(type='Pad', size=(896, 896), pad_val=0, seg_pad_val=255), + dict(type='ToMask'), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + keys=['img', 'gt_semantic_seg', 'gt_masks', 'gt_labels']) +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(2048, 1024), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict( + type='Normalize', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']) + ]) +] +data = dict( + samples_per_gpu=1, + workers_per_gpu=2, + train=dict( + type='CityscapesDataset', + data_root='data/cityscapes/', + img_dir='leftImg8bit/train', + ann_dir='gtFine/train', + pipeline=[ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations'), + dict( + type='Resize', img_scale=(2048, 1024), ratio_range=(0.5, 2.0)), + dict(type='RandomCrop', crop_size=(896, 896), cat_max_ratio=0.75), + dict(type='RandomFlip', prob=0.5), + dict(type='PhotoMetricDistortion'), + dict( + type='Normalize', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True), + dict(type='Pad', size=(896, 896), pad_val=0, seg_pad_val=255), + dict(type='ToMask'), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + keys=['img', 'gt_semantic_seg', 'gt_masks', 'gt_labels']) + ]), + val=dict( + type='CityscapesDataset', + data_root='data/cityscapes/', + img_dir='leftImg8bit/val', + ann_dir='gtFine/val', + pipeline=[ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(2048, 1024), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict( + type='Normalize', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']) + ]) + ]), + test=dict( + type='CityscapesDataset', + data_root='data/cityscapes/', + img_dir='leftImg8bit/val', + ann_dir='gtFine/val', + pipeline=[ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(2048, 1024), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict( + type='Normalize', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']) + ]) + ])) +log_config = dict( + interval=50, hooks=[dict(type='TextLoggerHook', by_epoch=False)]) +dist_params = dict(backend='nccl') +log_level = 'INFO' +load_from = None +resume_from = 'work_dirs/mask2former_beit_adapter_large_896_80k_cityscapes_ss/latest.pth' +workflow = [('train', 1)] +cudnn_benchmark = True +optimizer = dict( + type='AdamW', + lr=2e-05, + betas=(0.9, 0.999), + weight_decay=0.05, + constructor='LayerDecayOptimizerConstructor', + paramwise_cfg=dict(num_layers=24, layer_decay_rate=0.9)) +optimizer_config = dict() +lr_config = dict( + policy='poly', + warmup='linear', + warmup_iters=1500, + warmup_ratio=1e-06, + power=1.0, + min_lr=0.0, + by_epoch=False) +runner = dict(type='IterBasedRunner', max_iters=80000) +checkpoint_config = dict(by_epoch=False, interval=1000, max_keep_ckpts=1) +evaluation = dict( + interval=1000, metric='mIoU', pre_eval=True, save_best='mIoU') +work_dir = './work_dirs/mask2former_beit_adapter_large_896_80k_cityscapes_ss' +gpu_ids = range(0, 16) +auto_resume = False + +2022-05-10 13:21:48,831 - mmseg - INFO - Set random seed to 923664152, deterministic: False +2022-05-10 13:22:28,222 - mmseg - WARNING - The model and loaded state dict do not match exactly + +unexpected key in source state_dict: fc_norm.weight, fc_norm.bias, head.weight, head.bias + +missing keys in source state_dict: blocks.0.attn.relative_position_index, blocks.1.attn.relative_position_index, blocks.2.attn.relative_position_index, blocks.3.attn.relative_position_index, blocks.4.attn.relative_position_index, blocks.5.attn.relative_position_index, blocks.6.attn.relative_position_index, blocks.7.attn.relative_position_index, blocks.8.attn.relative_position_index, blocks.9.attn.relative_position_index, blocks.10.attn.relative_position_index, blocks.11.attn.relative_position_index, blocks.12.attn.relative_position_index, blocks.13.attn.relative_position_index, blocks.14.attn.relative_position_index, blocks.15.attn.relative_position_index, blocks.16.attn.relative_position_index, blocks.17.attn.relative_position_index, blocks.18.attn.relative_position_index, blocks.19.attn.relative_position_index, blocks.20.attn.relative_position_index, blocks.21.attn.relative_position_index, blocks.22.attn.relative_position_index, blocks.23.attn.relative_position_index + +Name of parameter - Initialization information + +backbone.cls_token - torch.Size([1, 1, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.level_embed - torch.Size([3, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.patch_embed.proj.weight - torch.Size([1024, 3, 16, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.patch_embed.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.stem.0.weight - torch.Size([64, 3, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.stem.1.weight - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.stem.1.bias - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.stem.3.weight - torch.Size([64, 64, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.stem.4.weight - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.stem.4.bias - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.stem.6.weight - torch.Size([64, 64, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.stem.7.weight - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.stem.7.bias - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.conv2.0.weight - torch.Size([128, 64, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.conv2.1.weight - torch.Size([128]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.conv2.1.bias - torch.Size([128]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.conv3.0.weight - torch.Size([256, 128, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.conv3.1.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.conv3.1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.conv4.0.weight - torch.Size([256, 256, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.conv4.1.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.conv4.1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.fc1.weight - torch.Size([1024, 64, 1, 1]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.fc1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.fc2.weight - torch.Size([1024, 128, 1, 1]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.fc3.weight - torch.Size([1024, 256, 1, 1]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.fc3.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.fc4.weight - torch.Size([1024, 256, 1, 1]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.fc4.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.extract.query_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.extract.query_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.extract.feat_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.extract.feat_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.extract.attn.sampling_offsets.weight - torch.Size([128, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.extract.attn.sampling_offsets.bias - torch.Size([128]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.extract.attn.attention_weights.weight - torch.Size([64, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.extract.attn.attention_weights.bias - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.extract.attn.value_proj.weight - torch.Size([512, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.extract.attn.value_proj.bias - torch.Size([512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.extract.attn.output_proj.weight - torch.Size([1024, 512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.extract.attn.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.insert.gamma - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.insert.query_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.insert.query_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.insert.feat_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.insert.feat_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.insert.attn.sampling_offsets.weight - torch.Size([384, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.insert.attn.sampling_offsets.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.insert.attn.attention_weights.weight - torch.Size([192, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.insert.attn.attention_weights.bias - torch.Size([192]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.insert.attn.value_proj.weight - torch.Size([512, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.insert.attn.value_proj.bias - torch.Size([512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.insert.attn.output_proj.weight - torch.Size([1024, 512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.insert.attn.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.ffn.fc1.weight - torch.Size([256, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.ffn.fc1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.ffn.dwconv.dwconv.weight - torch.Size([256, 1, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.ffn.dwconv.dwconv.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.ffn.fc2.weight - torch.Size([1024, 256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.ffn.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.ffn_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.ffn_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.extract.query_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.extract.query_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.extract.feat_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.extract.feat_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.extract.attn.sampling_offsets.weight - torch.Size([128, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.extract.attn.sampling_offsets.bias - torch.Size([128]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.extract.attn.attention_weights.weight - torch.Size([64, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.extract.attn.attention_weights.bias - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.extract.attn.value_proj.weight - torch.Size([512, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.extract.attn.value_proj.bias - torch.Size([512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.extract.attn.output_proj.weight - torch.Size([1024, 512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.extract.attn.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.insert.gamma - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.insert.query_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.insert.query_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.insert.feat_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.insert.feat_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.insert.attn.sampling_offsets.weight - torch.Size([384, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.insert.attn.sampling_offsets.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.insert.attn.attention_weights.weight - torch.Size([192, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.insert.attn.attention_weights.bias - torch.Size([192]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.insert.attn.value_proj.weight - torch.Size([512, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.insert.attn.value_proj.bias - torch.Size([512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.insert.attn.output_proj.weight - torch.Size([1024, 512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.insert.attn.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.ffn.fc1.weight - torch.Size([256, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.ffn.fc1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.ffn.dwconv.dwconv.weight - torch.Size([256, 1, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.ffn.dwconv.dwconv.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.ffn.fc2.weight - torch.Size([1024, 256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.ffn.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.ffn_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.ffn_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.extract.query_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.extract.query_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.extract.feat_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.extract.feat_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.extract.attn.sampling_offsets.weight - torch.Size([128, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.extract.attn.sampling_offsets.bias - torch.Size([128]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.extract.attn.attention_weights.weight - torch.Size([64, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.extract.attn.attention_weights.bias - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.extract.attn.value_proj.weight - torch.Size([512, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.extract.attn.value_proj.bias - torch.Size([512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.extract.attn.output_proj.weight - torch.Size([1024, 512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.extract.attn.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.insert.gamma - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.insert.query_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.insert.query_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.insert.feat_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.insert.feat_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.insert.attn.sampling_offsets.weight - torch.Size([384, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.insert.attn.sampling_offsets.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.insert.attn.attention_weights.weight - torch.Size([192, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.insert.attn.attention_weights.bias - torch.Size([192]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.insert.attn.value_proj.weight - torch.Size([512, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.insert.attn.value_proj.bias - torch.Size([512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.insert.attn.output_proj.weight - torch.Size([1024, 512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.insert.attn.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.ffn.fc1.weight - torch.Size([256, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.ffn.fc1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.ffn.dwconv.dwconv.weight - torch.Size([256, 1, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.ffn.dwconv.dwconv.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.ffn.fc2.weight - torch.Size([1024, 256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.ffn.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.ffn_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.ffn_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.extract.query_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.extract.query_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.extract.feat_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.extract.feat_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.extract.attn.sampling_offsets.weight - torch.Size([128, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.extract.attn.sampling_offsets.bias - torch.Size([128]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.extract.attn.attention_weights.weight - torch.Size([64, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.extract.attn.attention_weights.bias - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.extract.attn.value_proj.weight - torch.Size([512, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.extract.attn.value_proj.bias - torch.Size([512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.extract.attn.output_proj.weight - torch.Size([1024, 512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.extract.attn.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.insert.gamma - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.insert.query_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.insert.query_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.insert.feat_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.insert.feat_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.insert.attn.sampling_offsets.weight - torch.Size([384, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.insert.attn.sampling_offsets.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.insert.attn.attention_weights.weight - torch.Size([192, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.insert.attn.attention_weights.bias - torch.Size([192]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.insert.attn.value_proj.weight - torch.Size([512, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.insert.attn.value_proj.bias - torch.Size([512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.insert.attn.output_proj.weight - torch.Size([1024, 512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.insert.attn.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.ffn.fc1.weight - torch.Size([256, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.ffn.fc1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.ffn.dwconv.dwconv.weight - torch.Size([256, 1, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.ffn.dwconv.dwconv.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.ffn.fc2.weight - torch.Size([1024, 256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.ffn.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.ffn_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.ffn_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.extract.query_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.extract.query_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.extract.feat_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.extract.feat_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.extract.attn.sampling_offsets.weight - torch.Size([128, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.extract.attn.sampling_offsets.bias - torch.Size([128]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.extract.attn.attention_weights.weight - torch.Size([64, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.extract.attn.attention_weights.bias - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.extract.attn.value_proj.weight - torch.Size([512, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.extract.attn.value_proj.bias - torch.Size([512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.extract.attn.output_proj.weight - torch.Size([1024, 512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.extract.attn.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.ffn.fc1.weight - torch.Size([256, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.ffn.fc1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.ffn.dwconv.dwconv.weight - torch.Size([256, 1, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.ffn.dwconv.dwconv.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.ffn.fc2.weight - torch.Size([1024, 256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.ffn.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.ffn_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.ffn_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.extract.query_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.extract.query_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.extract.feat_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.extract.feat_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.extract.attn.sampling_offsets.weight - torch.Size([128, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.extract.attn.sampling_offsets.bias - torch.Size([128]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.extract.attn.attention_weights.weight - torch.Size([64, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.extract.attn.attention_weights.bias - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.extract.attn.value_proj.weight - torch.Size([512, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.extract.attn.value_proj.bias - torch.Size([512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.extract.attn.output_proj.weight - torch.Size([1024, 512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.extract.attn.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.ffn.fc1.weight - torch.Size([256, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.ffn.fc1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.ffn.dwconv.dwconv.weight - torch.Size([256, 1, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.ffn.dwconv.dwconv.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.ffn.fc2.weight - torch.Size([1024, 256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.ffn.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.ffn_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.ffn_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.up.weight - torch.Size([1024, 1024, 2, 2]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.up.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.norm3.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.norm3.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.norm4.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.norm4.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.conv_seg.weight - torch.Size([19, 1024, 1, 1]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.conv_seg.bias - torch.Size([19]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.input_convs.0.conv.weight - torch.Size([1024, 1024, 1, 1]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.input_convs.0.conv.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.input_convs.0.gn.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.input_convs.0.gn.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.input_convs.1.conv.weight - torch.Size([1024, 1024, 1, 1]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.input_convs.1.conv.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.input_convs.1.gn.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.input_convs.1.gn.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.input_convs.2.conv.weight - torch.Size([1024, 1024, 1, 1]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.input_convs.2.conv.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.input_convs.2.gn.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.input_convs.2.gn.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.attentions.0.sampling_offsets.weight - torch.Size([768, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.attentions.0.sampling_offsets.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.attentions.0.attention_weights.weight - torch.Size([384, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.attentions.0.attention_weights.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.attentions.0.value_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.0.attentions.0.value_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.attentions.0.output_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.0.attentions.0.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.0.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.0.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.attentions.0.sampling_offsets.weight - torch.Size([768, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.attentions.0.sampling_offsets.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.attentions.0.attention_weights.weight - torch.Size([384, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.attentions.0.attention_weights.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.attentions.0.value_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.1.attentions.0.value_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.attentions.0.output_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.1.attentions.0.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.1.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.1.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.attentions.0.sampling_offsets.weight - torch.Size([768, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.attentions.0.sampling_offsets.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.attentions.0.attention_weights.weight - torch.Size([384, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.attentions.0.attention_weights.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.attentions.0.value_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.2.attentions.0.value_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.attentions.0.output_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.2.attentions.0.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.2.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.2.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.attentions.0.sampling_offsets.weight - torch.Size([768, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.attentions.0.sampling_offsets.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.attentions.0.attention_weights.weight - torch.Size([384, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.attentions.0.attention_weights.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.attentions.0.value_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.3.attentions.0.value_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.attentions.0.output_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.3.attentions.0.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.3.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.3.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.attentions.0.sampling_offsets.weight - torch.Size([768, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.attentions.0.sampling_offsets.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.attentions.0.attention_weights.weight - torch.Size([384, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.attentions.0.attention_weights.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.attentions.0.value_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.4.attentions.0.value_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.attentions.0.output_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.4.attentions.0.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.4.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.4.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.attentions.0.sampling_offsets.weight - torch.Size([768, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.attentions.0.sampling_offsets.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.attentions.0.attention_weights.weight - torch.Size([384, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.attentions.0.attention_weights.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.attentions.0.value_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.5.attentions.0.value_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.attentions.0.output_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.5.attentions.0.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.5.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.5.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.level_encoding.weight - torch.Size([3, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.lateral_convs.0.conv.weight - torch.Size([1024, 1024, 1, 1]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.lateral_convs.0.gn.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.lateral_convs.0.gn.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.output_convs.0.conv.weight - torch.Size([1024, 1024, 3, 3]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.output_convs.0.gn.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.output_convs.0.gn.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.mask_feature.weight - torch.Size([1024, 1024, 1, 1]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.mask_feature.bias - torch.Size([1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.0.attentions.0.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.0.attentions.0.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.attentions.0.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.0.attentions.0.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.attentions.1.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.0.attentions.1.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.attentions.1.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.0.attentions.1.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.0.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.0.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.norms.2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.norms.2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.attentions.0.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.1.attentions.0.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.attentions.0.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.1.attentions.0.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.attentions.1.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.1.attentions.1.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.attentions.1.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.1.attentions.1.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.1.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.1.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.norms.2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.norms.2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.attentions.0.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.2.attentions.0.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.attentions.0.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.2.attentions.0.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.attentions.1.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.2.attentions.1.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.attentions.1.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.2.attentions.1.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.2.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.2.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.norms.2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.norms.2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.attentions.0.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.3.attentions.0.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.attentions.0.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.3.attentions.0.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.attentions.1.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.3.attentions.1.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.attentions.1.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.3.attentions.1.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.3.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.3.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.norms.2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.norms.2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.attentions.0.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.4.attentions.0.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.attentions.0.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.4.attentions.0.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.attentions.1.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.4.attentions.1.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.attentions.1.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.4.attentions.1.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.4.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.4.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.norms.2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.norms.2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.attentions.0.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.5.attentions.0.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.attentions.0.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.5.attentions.0.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.attentions.1.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.5.attentions.1.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.attentions.1.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.5.attentions.1.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.5.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.5.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.norms.2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.norms.2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.attentions.0.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.6.attentions.0.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.attentions.0.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.6.attentions.0.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.attentions.1.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.6.attentions.1.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.attentions.1.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.6.attentions.1.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.6.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.6.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.norms.2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.norms.2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.attentions.0.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.7.attentions.0.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.attentions.0.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.7.attentions.0.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.attentions.1.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.7.attentions.1.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.attentions.1.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.7.attentions.1.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.7.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.7.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.norms.2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.norms.2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.attentions.0.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.8.attentions.0.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.attentions.0.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.8.attentions.0.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.attentions.1.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.8.attentions.1.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.attentions.1.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.8.attentions.1.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.8.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.8.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.norms.2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.norms.2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.post_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.post_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.query_embed.weight - torch.Size([100, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.query_feat.weight - torch.Size([100, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.level_embed.weight - torch.Size([3, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.cls_embed.weight - torch.Size([20, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.cls_embed.bias - torch.Size([20]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.mask_embed.0.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.mask_embed.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.mask_embed.2.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.mask_embed.2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.mask_embed.4.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.mask_embed.4.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former +2022-05-10 13:23:01,759 - mmseg - INFO - EncoderDecoderMask2Former( + (backbone): BEiTAdapter( + (patch_embed): PatchEmbed( + (proj): Conv2d(3, 1024, kernel_size=(16, 16), stride=(16, 16)) + ) + (pos_drop): Dropout(p=0.0, inplace=False) + (blocks): ModuleList( + (0): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): Identity() + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (1): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.013043479062616825) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (2): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.02608695812523365) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (3): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.03913043811917305) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (4): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.0521739162504673) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (5): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.06521739810705185) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (6): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.0782608762383461) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (7): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.09130435436964035) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (8): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.1043478325009346) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (9): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.11739131063222885) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (10): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.1304347962141037) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (11): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.14347827434539795) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (12): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.1565217524766922) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (13): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.16956523060798645) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (14): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.1826087087392807) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (15): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.19565218687057495) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (16): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.2086956650018692) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (17): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.22173914313316345) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (18): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.2347826212644577) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (19): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.24782609939575195) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (20): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.260869562625885) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (21): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.27391305565834045) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (22): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.2869565188884735) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (23): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.30000001192092896) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + ) + (conv_branch): ConvBranch( + (stem): Sequential( + (0): Conv2d(3, 64, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False) + (1): SyncBatchNorm(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (2): ReLU(inplace=True) + (3): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (4): SyncBatchNorm(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (5): ReLU(inplace=True) + (6): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (7): SyncBatchNorm(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (8): ReLU(inplace=True) + (9): MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=1, ceil_mode=False) + ) + (conv2): Sequential( + (0): Conv2d(64, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False) + (1): SyncBatchNorm(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (2): ReLU(inplace=True) + ) + (conv3): Sequential( + (0): Conv2d(128, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False) + (1): SyncBatchNorm(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (2): ReLU(inplace=True) + ) + (conv4): Sequential( + (0): Conv2d(256, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False) + (1): SyncBatchNorm(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (2): ReLU(inplace=True) + ) + (fc1): Conv2d(64, 1024, kernel_size=(1, 1), stride=(1, 1)) + (fc2): Conv2d(128, 1024, kernel_size=(1, 1), stride=(1, 1)) + (fc3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1)) + (fc4): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1)) + ) + (interact_blocks): Sequential( + (0): InteractBlock( + (extract): ExtractLayer( + (query_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=1024, out_features=128, bias=True) + (attention_weights): Linear(in_features=1024, out_features=64, bias=True) + (value_proj): Linear(in_features=1024, out_features=512, bias=True) + (output_proj): Linear(in_features=512, out_features=1024, bias=True) + ) + ) + (insert): InsertLayer( + (query_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=1024, out_features=384, bias=True) + (attention_weights): Linear(in_features=1024, out_features=192, bias=True) + (value_proj): Linear(in_features=1024, out_features=512, bias=True) + (output_proj): Linear(in_features=512, out_features=1024, bias=True) + ) + ) + (ffn): ConvFFN( + (fc1): Linear(in_features=1024, out_features=256, bias=True) + (dwconv): DWConv( + (dwconv): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=256) + ) + (act): GELU() + (fc2): Linear(in_features=256, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + (ffn_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (drop_path): DropPath() + ) + (1): InteractBlock( + (extract): ExtractLayer( + (query_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=1024, out_features=128, bias=True) + (attention_weights): Linear(in_features=1024, out_features=64, bias=True) + (value_proj): Linear(in_features=1024, out_features=512, bias=True) + (output_proj): Linear(in_features=512, out_features=1024, bias=True) + ) + ) + (insert): InsertLayer( + (query_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=1024, out_features=384, bias=True) + (attention_weights): Linear(in_features=1024, out_features=192, bias=True) + (value_proj): Linear(in_features=1024, out_features=512, bias=True) + (output_proj): Linear(in_features=512, out_features=1024, bias=True) + ) + ) + (ffn): ConvFFN( + (fc1): Linear(in_features=1024, out_features=256, bias=True) + (dwconv): DWConv( + (dwconv): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=256) + ) + (act): GELU() + (fc2): Linear(in_features=256, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + (ffn_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (drop_path): DropPath() + ) + (2): InteractBlock( + (extract): ExtractLayer( + (query_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=1024, out_features=128, bias=True) + (attention_weights): Linear(in_features=1024, out_features=64, bias=True) + (value_proj): Linear(in_features=1024, out_features=512, bias=True) + (output_proj): Linear(in_features=512, out_features=1024, bias=True) + ) + ) + (insert): InsertLayer( + (query_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=1024, out_features=384, bias=True) + (attention_weights): Linear(in_features=1024, out_features=192, bias=True) + (value_proj): Linear(in_features=1024, out_features=512, bias=True) + (output_proj): Linear(in_features=512, out_features=1024, bias=True) + ) + ) + (ffn): ConvFFN( + (fc1): Linear(in_features=1024, out_features=256, bias=True) + (dwconv): DWConv( + (dwconv): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=256) + ) + (act): GELU() + (fc2): Linear(in_features=256, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + (ffn_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (drop_path): DropPath() + ) + (3): InteractBlock( + (extract): ExtractLayer( + (query_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=1024, out_features=128, bias=True) + (attention_weights): Linear(in_features=1024, out_features=64, bias=True) + (value_proj): Linear(in_features=1024, out_features=512, bias=True) + (output_proj): Linear(in_features=512, out_features=1024, bias=True) + ) + ) + (insert): InsertLayer( + (query_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=1024, out_features=384, bias=True) + (attention_weights): Linear(in_features=1024, out_features=192, bias=True) + (value_proj): Linear(in_features=1024, out_features=512, bias=True) + (output_proj): Linear(in_features=512, out_features=1024, bias=True) + ) + ) + (ffn): ConvFFN( + (fc1): Linear(in_features=1024, out_features=256, bias=True) + (dwconv): DWConv( + (dwconv): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=256) + ) + (act): GELU() + (fc2): Linear(in_features=256, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + (ffn_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (drop_path): DropPath() + ) + ) + (extract_blocks): Sequential( + (0): ExtractBlock( + (extract): ExtractLayer( + (query_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=1024, out_features=128, bias=True) + (attention_weights): Linear(in_features=1024, out_features=64, bias=True) + (value_proj): Linear(in_features=1024, out_features=512, bias=True) + (output_proj): Linear(in_features=512, out_features=1024, bias=True) + ) + ) + (ffn): ConvFFN( + (fc1): Linear(in_features=1024, out_features=256, bias=True) + (dwconv): DWConv( + (dwconv): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=256) + ) + (act): GELU() + (fc2): Linear(in_features=256, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + (ffn_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (drop_path): Identity() + ) + (1): ExtractBlock( + (extract): ExtractLayer( + (query_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=1024, out_features=128, bias=True) + (attention_weights): Linear(in_features=1024, out_features=64, bias=True) + (value_proj): Linear(in_features=1024, out_features=512, bias=True) + (output_proj): Linear(in_features=512, out_features=1024, bias=True) + ) + ) + (ffn): ConvFFN( + (fc1): Linear(in_features=1024, out_features=256, bias=True) + (dwconv): DWConv( + (dwconv): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=256) + ) + (act): GELU() + (fc2): Linear(in_features=256, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + (ffn_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (drop_path): Identity() + ) + ) + (up): ConvTranspose2d(1024, 1024, kernel_size=(2, 2), stride=(2, 2)) + (norm1): SyncBatchNorm(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (norm2): SyncBatchNorm(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (norm3): SyncBatchNorm(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (norm4): SyncBatchNorm(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + ) + (decode_head): Mask2FormerHead( + input_transform=multiple_select, ignore_index=255, align_corners=False + (loss_decode): CrossEntropyLoss(avg_non_ignore=False) + (conv_seg): Conv2d(1024, 19, kernel_size=(1, 1), stride=(1, 1)) + (dropout): Dropout2d(p=0.1, inplace=False) + (pixel_decoder): MSDeformAttnPixelDecoder( + (input_convs): ModuleList( + (0): ConvModule( + (conv): Conv2d(1024, 1024, kernel_size=(1, 1), stride=(1, 1)) + (gn): GroupNorm(32, 1024, eps=1e-05, affine=True) + ) + (1): ConvModule( + (conv): Conv2d(1024, 1024, kernel_size=(1, 1), stride=(1, 1)) + (gn): GroupNorm(32, 1024, eps=1e-05, affine=True) + ) + (2): ConvModule( + (conv): Conv2d(1024, 1024, kernel_size=(1, 1), stride=(1, 1)) + (gn): GroupNorm(32, 1024, eps=1e-05, affine=True) + ) + ) + (encoder): DetrTransformerEncoder( + (layers): ModuleList( + (0): BaseTransformerLayer( + (attentions): ModuleList( + (0): MultiScaleDeformableAttention( + (dropout): Dropout(p=0.0, inplace=False) + (sampling_offsets): Linear(in_features=1024, out_features=768, bias=True) + (attention_weights): Linear(in_features=1024, out_features=384, bias=True) + (value_proj): Linear(in_features=1024, out_features=1024, bias=True) + (output_proj): Linear(in_features=1024, out_features=1024, bias=True) + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + (1): BaseTransformerLayer( + (attentions): ModuleList( + (0): MultiScaleDeformableAttention( + (dropout): Dropout(p=0.0, inplace=False) + (sampling_offsets): Linear(in_features=1024, out_features=768, bias=True) + (attention_weights): Linear(in_features=1024, out_features=384, bias=True) + (value_proj): Linear(in_features=1024, out_features=1024, bias=True) + (output_proj): Linear(in_features=1024, out_features=1024, bias=True) + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + (2): BaseTransformerLayer( + (attentions): ModuleList( + (0): MultiScaleDeformableAttention( + (dropout): Dropout(p=0.0, inplace=False) + (sampling_offsets): Linear(in_features=1024, out_features=768, bias=True) + (attention_weights): Linear(in_features=1024, out_features=384, bias=True) + (value_proj): Linear(in_features=1024, out_features=1024, bias=True) + (output_proj): Linear(in_features=1024, out_features=1024, bias=True) + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + (3): BaseTransformerLayer( + (attentions): ModuleList( + (0): MultiScaleDeformableAttention( + (dropout): Dropout(p=0.0, inplace=False) + (sampling_offsets): Linear(in_features=1024, out_features=768, bias=True) + (attention_weights): Linear(in_features=1024, out_features=384, bias=True) + (value_proj): Linear(in_features=1024, out_features=1024, bias=True) + (output_proj): Linear(in_features=1024, out_features=1024, bias=True) + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + (4): BaseTransformerLayer( + (attentions): ModuleList( + (0): MultiScaleDeformableAttention( + (dropout): Dropout(p=0.0, inplace=False) + (sampling_offsets): Linear(in_features=1024, out_features=768, bias=True) + (attention_weights): Linear(in_features=1024, out_features=384, bias=True) + (value_proj): Linear(in_features=1024, out_features=1024, bias=True) + (output_proj): Linear(in_features=1024, out_features=1024, bias=True) + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + (5): BaseTransformerLayer( + (attentions): ModuleList( + (0): MultiScaleDeformableAttention( + (dropout): Dropout(p=0.0, inplace=False) + (sampling_offsets): Linear(in_features=1024, out_features=768, bias=True) + (attention_weights): Linear(in_features=1024, out_features=384, bias=True) + (value_proj): Linear(in_features=1024, out_features=1024, bias=True) + (output_proj): Linear(in_features=1024, out_features=1024, bias=True) + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + ) + ) + (postional_encoding): SinePositionalEncoding(num_feats=512, temperature=10000, normalize=True, scale=6.283185307179586, eps=1e-06) + (level_encoding): Embedding(3, 1024) + (lateral_convs): ModuleList( + (0): ConvModule( + (conv): Conv2d(1024, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False) + (gn): GroupNorm(32, 1024, eps=1e-05, affine=True) + ) + ) + (output_convs): ModuleList( + (0): ConvModule( + (conv): Conv2d(1024, 1024, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (gn): GroupNorm(32, 1024, eps=1e-05, affine=True) + (activate): ReLU(inplace=True) + ) + ) + (mask_feature): Conv2d(1024, 1024, kernel_size=(1, 1), stride=(1, 1)) + ) + (transformer_decoder): DetrTransformerDecoder( + (layers): ModuleList( + (0): DetrTransformerDecoderLayer( + (attentions): ModuleList( + (0): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + (1): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (2): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + (1): DetrTransformerDecoderLayer( + (attentions): ModuleList( + (0): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + (1): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (2): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + (2): DetrTransformerDecoderLayer( + (attentions): ModuleList( + (0): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + (1): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (2): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + (3): DetrTransformerDecoderLayer( + (attentions): ModuleList( + (0): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + (1): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (2): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + (4): DetrTransformerDecoderLayer( + (attentions): ModuleList( + (0): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + (1): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (2): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + (5): DetrTransformerDecoderLayer( + (attentions): ModuleList( + (0): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + (1): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (2): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + (6): DetrTransformerDecoderLayer( + (attentions): ModuleList( + (0): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + (1): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (2): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + (7): DetrTransformerDecoderLayer( + (attentions): ModuleList( + (0): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + (1): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (2): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + (8): DetrTransformerDecoderLayer( + (attentions): ModuleList( + (0): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + (1): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (2): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + ) + (post_norm): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + (decoder_input_projs): ModuleList( + (0): Identity() + (1): Identity() + (2): Identity() + ) + (decoder_positional_encoding): SinePositionalEncoding(num_feats=512, temperature=10000, normalize=True, scale=6.283185307179586, eps=1e-06) + (query_embed): Embedding(100, 1024) + (query_feat): Embedding(100, 1024) + (level_embed): Embedding(3, 1024) + (cls_embed): Linear(in_features=1024, out_features=20, bias=True) + (mask_embed): Sequential( + (0): Linear(in_features=1024, out_features=1024, bias=True) + (1): ReLU(inplace=True) + (2): Linear(in_features=1024, out_features=1024, bias=True) + (3): ReLU(inplace=True) + (4): Linear(in_features=1024, out_features=1024, bias=True) + ) + (loss_cls): CrossEntropyLoss(avg_non_ignore=False) + (loss_mask): CrossEntropyLoss(avg_non_ignore=False) + (loss_dice): DiceLoss() + ) +) +2022-05-10 13:23:02,229 - mmseg - INFO - Loaded 2975 images +2022-05-10 13:23:04,598 - mmseg - INFO - Loaded 500 images +2022-05-10 13:23:04,599 - mmseg - INFO - load checkpoint from local path: work_dirs/mask2former_beit_adapter_large_896_80k_cityscapes_ss/latest.pth +2022-05-10 13:23:47,647 - mmseg - INFO - resumed from epoch: 194, iter 35999 +2022-05-10 13:23:47,657 - mmseg - INFO - Start running, host: chenzhe.vendor@SH-IDC1-10-140-1-141, work_dir: /mnt/lustre/chenzhe.vendor/workspace/ViT-Adapter/segmentation/work_dirs/mask2former_beit_adapter_large_896_80k_cityscapes_ss +2022-05-10 13:23:47,658 - mmseg - INFO - Hooks will be executed in the following order: +before_run: +(VERY_HIGH ) PolyLrUpdaterHook +(NORMAL ) CheckpointHook +(LOW ) DistEvalHook +(VERY_LOW ) TextLoggerHook + -------------------- +before_train_epoch: +(VERY_HIGH ) PolyLrUpdaterHook +(LOW ) IterTimerHook +(LOW ) DistEvalHook +(VERY_LOW ) TextLoggerHook + -------------------- +before_train_iter: +(VERY_HIGH ) PolyLrUpdaterHook +(LOW ) IterTimerHook +(LOW ) DistEvalHook + -------------------- +after_train_iter: +(ABOVE_NORMAL) OptimizerHook +(NORMAL ) CheckpointHook +(LOW ) IterTimerHook +(LOW ) DistEvalHook +(VERY_LOW ) TextLoggerHook + -------------------- +after_train_epoch: +(NORMAL ) CheckpointHook +(LOW ) DistEvalHook +(VERY_LOW ) TextLoggerHook + -------------------- +before_val_epoch: +(LOW ) IterTimerHook +(VERY_LOW ) TextLoggerHook + -------------------- +before_val_iter: +(LOW ) IterTimerHook + -------------------- +after_val_iter: +(LOW ) IterTimerHook + -------------------- +after_val_epoch: +(VERY_LOW ) TextLoggerHook + -------------------- +after_run: +(VERY_LOW ) TextLoggerHook + -------------------- +2022-05-10 13:23:47,660 - mmseg - INFO - workflow: [('train', 1)], max: 80000 iters +2022-05-10 13:23:47,661 - mmseg - INFO - Checkpoints will be saved to /mnt/lustre/chenzhe.vendor/workspace/ViT-Adapter/segmentation/work_dirs/mask2former_beit_adapter_large_896_80k_cityscapes_ss by HardDiskBackend. +2022-05-10 13:24:41,222 - mmseg - INFO - Saving checkpoint at 36000 iterations +2022-05-10 13:25:18,727 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 13:25:18,738 - mmseg - INFO - Iter [36000/80000] lr: 7.897e-07, eta: 1476 days, 20:29:41, time: 58.000, data_time: 0.272, memory: 69053, decode.loss_cls: 0.0288, decode.loss_mask: 0.2188, decode.loss_dice: 0.5372, decode.d0.loss_cls: 0.2775, decode.d0.loss_mask: 0.2358, decode.d0.loss_dice: 0.5580, decode.d1.loss_cls: 0.0812, decode.d1.loss_mask: 0.2180, decode.d1.loss_dice: 0.5242, decode.d2.loss_cls: 0.0781, decode.d2.loss_mask: 0.2188, decode.d2.loss_dice: 0.5282, decode.d3.loss_cls: 0.0272, decode.d3.loss_mask: 0.2145, decode.d3.loss_dice: 0.5340, decode.d4.loss_cls: 0.0281, decode.d4.loss_mask: 0.2158, decode.d4.loss_dice: 0.5170, decode.d5.loss_cls: 0.0306, decode.d5.loss_mask: 0.2156, decode.d5.loss_dice: 0.5162, decode.d6.loss_cls: 0.0289, decode.d6.loss_mask: 0.2181, decode.d6.loss_dice: 0.5320, decode.d7.loss_cls: 0.0281, decode.d7.loss_mask: 0.2172, decode.d7.loss_dice: 0.5250, decode.d8.loss_cls: 0.0294, decode.d8.loss_mask: 0.2167, decode.d8.loss_dice: 0.5340, loss: 8.1330 +2022-05-10 13:27:29,268 - mmseg - INFO - per class results: +2022-05-10 13:27:29,279 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.58 | 99.24 | +| sidewalk | 88.46 | 93.74 | +| building | 94.39 | 96.81 | +| wall | 66.19 | 80.01 | +| fence | 74.21 | 81.87 | +| pole | 71.78 | 82.85 | +| traffic light | 76.99 | 89.42 | +| traffic sign | 84.02 | 90.45 | +| vegetation | 93.36 | 97.12 | +| terrain | 67.86 | 80.02 | +| sky | 95.93 | 98.3 | +| person | 86.88 | 94.11 | +| rider | 74.36 | 83.47 | +| car | 96.33 | 98.33 | +| truck | 91.3 | 94.62 | +| bus | 93.95 | 96.79 | +| train | 88.24 | 90.88 | +| motorcycle | 76.14 | 87.93 | +| bicycle | 82.95 | 92.09 | ++---------------+-------+-------+ +2022-05-10 13:27:29,281 - mmseg - INFO - Summary: +2022-05-10 13:27:29,281 - mmseg - INFO - ++------+-------+-------+ +| aAcc | mIoU | mAcc | ++------+-------+-------+ +| 97.0 | 84.31 | 90.95 | ++------+-------+-------+ +2022-05-10 13:27:59,527 - mmseg - INFO - Now best checkpoint is saved as best_mIoU_iter_36000.pth. +2022-05-10 13:27:59,539 - mmseg - INFO - Best mIoU is 0.8431 at 36000 iter. +2022-05-10 13:27:59,548 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 13:27:59,548 - mmseg - INFO - Iter(val) [32] aAcc: 0.9700, mIoU: 0.8431, mAcc: 0.9095, IoU.road: 0.9858, IoU.sidewalk: 0.8846, IoU.building: 0.9439, IoU.wall: 0.6619, IoU.fence: 0.7421, IoU.pole: 0.7178, IoU.traffic light: 0.7699, IoU.traffic sign: 0.8402, IoU.vegetation: 0.9336, IoU.terrain: 0.6786, IoU.sky: 0.9593, IoU.person: 0.8688, IoU.rider: 0.7436, IoU.car: 0.9633, IoU.truck: 0.9130, IoU.bus: 0.9395, IoU.train: 0.8824, IoU.motorcycle: 0.7614, IoU.bicycle: 0.8295, Acc.road: 0.9924, Acc.sidewalk: 0.9374, Acc.building: 0.9681, Acc.wall: 0.8001, Acc.fence: 0.8187, Acc.pole: 0.8285, Acc.traffic light: 0.8942, Acc.traffic sign: 0.9045, Acc.vegetation: 0.9712, Acc.terrain: 0.8002, Acc.sky: 0.9830, Acc.person: 0.9411, Acc.rider: 0.8347, Acc.car: 0.9833, Acc.truck: 0.9462, Acc.bus: 0.9679, Acc.train: 0.9088, Acc.motorcycle: 0.8793, Acc.bicycle: 0.9209 +2022-05-10 13:29:30,626 - mmseg - INFO - Iter [36050/80000] lr: 7.888e-07, eta: 31 days, 10:31:57, time: 5.041, data_time: 3.240, memory: 69053, decode.loss_cls: 0.0765, decode.loss_mask: 0.2171, decode.loss_dice: 0.5897, decode.d0.loss_cls: 0.3119, decode.d0.loss_mask: 0.2267, decode.d0.loss_dice: 0.6244, decode.d1.loss_cls: 0.0951, decode.d1.loss_mask: 0.2188, decode.d1.loss_dice: 0.5982, decode.d2.loss_cls: 0.0898, decode.d2.loss_mask: 0.2174, decode.d2.loss_dice: 0.5892, decode.d3.loss_cls: 0.0868, decode.d3.loss_mask: 0.2175, decode.d3.loss_dice: 0.5884, decode.d4.loss_cls: 0.0793, decode.d4.loss_mask: 0.2171, decode.d4.loss_dice: 0.5851, decode.d5.loss_cls: 0.0841, decode.d5.loss_mask: 0.2170, decode.d5.loss_dice: 0.5882, decode.d6.loss_cls: 0.0798, decode.d6.loss_mask: 0.2169, decode.d6.loss_dice: 0.5873, decode.d7.loss_cls: 0.0746, decode.d7.loss_mask: 0.2171, decode.d7.loss_dice: 0.5870, decode.d8.loss_cls: 0.0788, decode.d8.loss_mask: 0.2171, decode.d8.loss_dice: 0.5867, loss: 9.1633 +2022-05-10 13:31:00,990 - mmseg - INFO - Iter [36100/80000] lr: 7.879e-07, eta: 16 days, 7:28:43, time: 1.807, data_time: 0.021, memory: 69053, decode.loss_cls: 0.0688, decode.loss_mask: 0.2120, decode.loss_dice: 0.5858, decode.d0.loss_cls: 0.3060, decode.d0.loss_mask: 0.2222, decode.d0.loss_dice: 0.6245, decode.d1.loss_cls: 0.0900, decode.d1.loss_mask: 0.2138, decode.d1.loss_dice: 0.5980, decode.d2.loss_cls: 0.0832, decode.d2.loss_mask: 0.2128, decode.d2.loss_dice: 0.5916, decode.d3.loss_cls: 0.0776, decode.d3.loss_mask: 0.2121, decode.d3.loss_dice: 0.5912, decode.d4.loss_cls: 0.0771, decode.d4.loss_mask: 0.2118, decode.d4.loss_dice: 0.5916, decode.d5.loss_cls: 0.0733, decode.d5.loss_mask: 0.2122, decode.d5.loss_dice: 0.5871, decode.d6.loss_cls: 0.0764, decode.d6.loss_mask: 0.2123, decode.d6.loss_dice: 0.5877, decode.d7.loss_cls: 0.0776, decode.d7.loss_mask: 0.2122, decode.d7.loss_dice: 0.5885, decode.d8.loss_cls: 0.0726, decode.d8.loss_mask: 0.2120, decode.d8.loss_dice: 0.5879, loss: 9.0700 +2022-05-10 13:32:29,734 - mmseg - INFO - Iter [36150/80000] lr: 7.870e-07, eta: 11 days, 4:42:24, time: 1.774, data_time: 0.020, memory: 69053, decode.loss_cls: 0.0829, decode.loss_mask: 0.2079, decode.loss_dice: 0.5910, decode.d0.loss_cls: 0.3200, decode.d0.loss_mask: 0.2150, decode.d0.loss_dice: 0.6219, decode.d1.loss_cls: 0.1014, decode.d1.loss_mask: 0.2094, decode.d1.loss_dice: 0.5977, decode.d2.loss_cls: 0.0990, decode.d2.loss_mask: 0.2090, decode.d2.loss_dice: 0.5906, decode.d3.loss_cls: 0.0889, decode.d3.loss_mask: 0.2092, decode.d3.loss_dice: 0.5880, decode.d4.loss_cls: 0.0937, decode.d4.loss_mask: 0.2084, decode.d4.loss_dice: 0.5916, decode.d5.loss_cls: 0.0854, decode.d5.loss_mask: 0.2086, decode.d5.loss_dice: 0.5909, decode.d6.loss_cls: 0.0844, decode.d6.loss_mask: 0.2083, decode.d6.loss_dice: 0.5900, decode.d7.loss_cls: 0.0867, decode.d7.loss_mask: 0.2085, decode.d7.loss_dice: 0.5877, decode.d8.loss_cls: 0.0862, decode.d8.loss_mask: 0.2087, decode.d8.loss_dice: 0.5885, loss: 9.1595 +2022-05-10 13:34:01,280 - mmseg - INFO - Iter [36200/80000] lr: 7.861e-07, eta: 8 days, 15:10:39, time: 1.832, data_time: 0.070, memory: 69053, decode.loss_cls: 0.0816, decode.loss_mask: 0.2112, decode.loss_dice: 0.5838, decode.d0.loss_cls: 0.3137, decode.d0.loss_mask: 0.2190, decode.d0.loss_dice: 0.6101, decode.d1.loss_cls: 0.0949, decode.d1.loss_mask: 0.2131, decode.d1.loss_dice: 0.5939, decode.d2.loss_cls: 0.0886, decode.d2.loss_mask: 0.2129, decode.d2.loss_dice: 0.5889, decode.d3.loss_cls: 0.0868, decode.d3.loss_mask: 0.2121, decode.d3.loss_dice: 0.5811, decode.d4.loss_cls: 0.0791, decode.d4.loss_mask: 0.2121, decode.d4.loss_dice: 0.5818, decode.d5.loss_cls: 0.0867, decode.d5.loss_mask: 0.2105, decode.d5.loss_dice: 0.5847, decode.d6.loss_cls: 0.0794, decode.d6.loss_mask: 0.2109, decode.d6.loss_dice: 0.5794, decode.d7.loss_cls: 0.0790, decode.d7.loss_mask: 0.2098, decode.d7.loss_dice: 0.5793, decode.d8.loss_cls: 0.0731, decode.d8.loss_mask: 0.2113, decode.d8.loss_dice: 0.5831, loss: 9.0522 +2022-05-10 13:35:31,141 - mmseg - INFO - Iter [36250/80000] lr: 7.852e-07, eta: 7 days, 2:04:08, time: 1.797, data_time: 0.020, memory: 69053, decode.loss_cls: 0.0841, decode.loss_mask: 0.2136, decode.loss_dice: 0.5696, decode.d0.loss_cls: 0.3037, decode.d0.loss_mask: 0.2246, decode.d0.loss_dice: 0.6003, decode.d1.loss_cls: 0.0997, decode.d1.loss_mask: 0.2161, decode.d1.loss_dice: 0.5746, decode.d2.loss_cls: 0.0898, decode.d2.loss_mask: 0.2140, decode.d2.loss_dice: 0.5751, decode.d3.loss_cls: 0.0863, decode.d3.loss_mask: 0.2131, decode.d3.loss_dice: 0.5716, decode.d4.loss_cls: 0.0824, decode.d4.loss_mask: 0.2139, decode.d4.loss_dice: 0.5675, decode.d5.loss_cls: 0.0837, decode.d5.loss_mask: 0.2140, decode.d5.loss_dice: 0.5731, decode.d6.loss_cls: 0.0782, decode.d6.loss_mask: 0.2135, decode.d6.loss_dice: 0.5705, decode.d7.loss_cls: 0.0753, decode.d7.loss_mask: 0.2137, decode.d7.loss_dice: 0.5671, decode.d8.loss_cls: 0.0734, decode.d8.loss_mask: 0.2133, decode.d8.loss_dice: 0.5669, loss: 8.9429 +2022-05-10 13:37:00,484 - mmseg - INFO - Iter [36300/80000] lr: 7.843e-07, eta: 6 days, 1:15:35, time: 1.787, data_time: 0.023, memory: 69053, decode.loss_cls: 0.0760, decode.loss_mask: 0.2148, decode.loss_dice: 0.5817, decode.d0.loss_cls: 0.3085, decode.d0.loss_mask: 0.2255, decode.d0.loss_dice: 0.6135, decode.d1.loss_cls: 0.0962, decode.d1.loss_mask: 0.2162, decode.d1.loss_dice: 0.5985, decode.d2.loss_cls: 0.0974, decode.d2.loss_mask: 0.2157, decode.d2.loss_dice: 0.5870, decode.d3.loss_cls: 0.0829, decode.d3.loss_mask: 0.2154, decode.d3.loss_dice: 0.5846, decode.d4.loss_cls: 0.0808, decode.d4.loss_mask: 0.2159, decode.d4.loss_dice: 0.5843, decode.d5.loss_cls: 0.0793, decode.d5.loss_mask: 0.2165, decode.d5.loss_dice: 0.5856, decode.d6.loss_cls: 0.0834, decode.d6.loss_mask: 0.2156, decode.d6.loss_dice: 0.5834, decode.d7.loss_cls: 0.0765, decode.d7.loss_mask: 0.2154, decode.d7.loss_dice: 0.5817, decode.d8.loss_cls: 0.0756, decode.d8.loss_mask: 0.2152, decode.d8.loss_dice: 0.5851, loss: 9.1081 +2022-05-10 13:38:30,203 - mmseg - INFO - Iter [36350/80000] lr: 7.834e-07, eta: 5 days, 7:31:08, time: 1.791, data_time: 0.020, memory: 69053, decode.loss_cls: 0.0749, decode.loss_mask: 0.2116, decode.loss_dice: 0.5858, decode.d0.loss_cls: 0.3098, decode.d0.loss_mask: 0.2222, decode.d0.loss_dice: 0.6207, decode.d1.loss_cls: 0.0937, decode.d1.loss_mask: 0.2142, decode.d1.loss_dice: 0.6017, decode.d2.loss_cls: 0.0907, decode.d2.loss_mask: 0.2144, decode.d2.loss_dice: 0.5927, decode.d3.loss_cls: 0.0856, decode.d3.loss_mask: 0.2130, decode.d3.loss_dice: 0.5865, decode.d4.loss_cls: 0.0830, decode.d4.loss_mask: 0.2126, decode.d4.loss_dice: 0.5903, decode.d5.loss_cls: 0.0810, decode.d5.loss_mask: 0.2117, decode.d5.loss_dice: 0.5876, decode.d6.loss_cls: 0.0781, decode.d6.loss_mask: 0.2125, decode.d6.loss_dice: 0.5872, decode.d7.loss_cls: 0.0799, decode.d7.loss_mask: 0.2121, decode.d7.loss_dice: 0.5882, decode.d8.loss_cls: 0.0807, decode.d8.loss_mask: 0.2126, decode.d8.loss_dice: 0.5897, loss: 9.1247 +2022-05-10 13:40:03,622 - mmseg - INFO - Iter [36400/80000] lr: 7.825e-07, eta: 4 days, 18:19:00, time: 1.871, data_time: 0.071, memory: 69053, decode.loss_cls: 0.0783, decode.loss_mask: 0.2144, decode.loss_dice: 0.5662, decode.d0.loss_cls: 0.3234, decode.d0.loss_mask: 0.2238, decode.d0.loss_dice: 0.5956, decode.d1.loss_cls: 0.1034, decode.d1.loss_mask: 0.2158, decode.d1.loss_dice: 0.5711, decode.d2.loss_cls: 0.0883, decode.d2.loss_mask: 0.2146, decode.d2.loss_dice: 0.5711, decode.d3.loss_cls: 0.0827, decode.d3.loss_mask: 0.2142, decode.d3.loss_dice: 0.5689, decode.d4.loss_cls: 0.0860, decode.d4.loss_mask: 0.2145, decode.d4.loss_dice: 0.5648, decode.d5.loss_cls: 0.0834, decode.d5.loss_mask: 0.2141, decode.d5.loss_dice: 0.5692, decode.d6.loss_cls: 0.0758, decode.d6.loss_mask: 0.2139, decode.d6.loss_dice: 0.5660, decode.d7.loss_cls: 0.0793, decode.d7.loss_mask: 0.2149, decode.d7.loss_dice: 0.5671, decode.d8.loss_cls: 0.0770, decode.d8.loss_mask: 0.2144, decode.d8.loss_dice: 0.5680, loss: 8.9401 +2022-05-10 13:41:33,504 - mmseg - INFO - Iter [36450/80000] lr: 7.816e-07, eta: 4 days, 7:56:14, time: 1.798, data_time: 0.020, memory: 69053, decode.loss_cls: 0.0809, decode.loss_mask: 0.2112, decode.loss_dice: 0.5860, decode.d0.loss_cls: 0.3071, decode.d0.loss_mask: 0.2215, decode.d0.loss_dice: 0.6229, decode.d1.loss_cls: 0.1101, decode.d1.loss_mask: 0.2145, decode.d1.loss_dice: 0.6004, decode.d2.loss_cls: 0.0949, decode.d2.loss_mask: 0.2131, decode.d2.loss_dice: 0.6007, decode.d3.loss_cls: 0.0894, decode.d3.loss_mask: 0.2120, decode.d3.loss_dice: 0.5917, decode.d4.loss_cls: 0.0941, decode.d4.loss_mask: 0.2124, decode.d4.loss_dice: 0.5885, decode.d5.loss_cls: 0.0924, decode.d5.loss_mask: 0.2122, decode.d5.loss_dice: 0.5933, decode.d6.loss_cls: 0.0826, decode.d6.loss_mask: 0.2118, decode.d6.loss_dice: 0.5863, decode.d7.loss_cls: 0.0819, decode.d7.loss_mask: 0.2118, decode.d7.loss_dice: 0.5925, decode.d8.loss_cls: 0.0771, decode.d8.loss_mask: 0.2116, decode.d8.loss_dice: 0.5907, loss: 9.1955 +2022-05-10 13:43:03,420 - mmseg - INFO - Iter [36500/80000] lr: 7.807e-07, eta: 3 days, 23:37:32, time: 1.798, data_time: 0.019, memory: 69053, decode.loss_cls: 0.0871, decode.loss_mask: 0.2116, decode.loss_dice: 0.5839, decode.d0.loss_cls: 0.3289, decode.d0.loss_mask: 0.2221, decode.d0.loss_dice: 0.6160, decode.d1.loss_cls: 0.1128, decode.d1.loss_mask: 0.2128, decode.d1.loss_dice: 0.5900, decode.d2.loss_cls: 0.0936, decode.d2.loss_mask: 0.2128, decode.d2.loss_dice: 0.5924, decode.d3.loss_cls: 0.0946, decode.d3.loss_mask: 0.2123, decode.d3.loss_dice: 0.5811, decode.d4.loss_cls: 0.0957, decode.d4.loss_mask: 0.2128, decode.d4.loss_dice: 0.5843, decode.d5.loss_cls: 0.0876, decode.d5.loss_mask: 0.2119, decode.d5.loss_dice: 0.5817, decode.d6.loss_cls: 0.0904, decode.d6.loss_mask: 0.2105, decode.d6.loss_dice: 0.5848, decode.d7.loss_cls: 0.0893, decode.d7.loss_mask: 0.2121, decode.d7.loss_dice: 0.5809, decode.d8.loss_cls: 0.0906, decode.d8.loss_mask: 0.2119, decode.d8.loss_dice: 0.5808, loss: 9.1772 +2022-05-10 13:44:33,370 - mmseg - INFO - Iter [36550/80000] lr: 7.798e-07, eta: 3 days, 16:49:06, time: 1.799, data_time: 0.019, memory: 69053, decode.loss_cls: 0.0763, decode.loss_mask: 0.2082, decode.loss_dice: 0.5822, decode.d0.loss_cls: 0.3147, decode.d0.loss_mask: 0.2177, decode.d0.loss_dice: 0.6187, decode.d1.loss_cls: 0.1018, decode.d1.loss_mask: 0.2103, decode.d1.loss_dice: 0.5899, decode.d2.loss_cls: 0.0841, decode.d2.loss_mask: 0.2092, decode.d2.loss_dice: 0.5867, decode.d3.loss_cls: 0.0795, decode.d3.loss_mask: 0.2089, decode.d3.loss_dice: 0.5869, decode.d4.loss_cls: 0.0790, decode.d4.loss_mask: 0.2084, decode.d4.loss_dice: 0.5845, decode.d5.loss_cls: 0.0808, decode.d5.loss_mask: 0.2086, decode.d5.loss_dice: 0.5820, decode.d6.loss_cls: 0.0786, decode.d6.loss_mask: 0.2082, decode.d6.loss_dice: 0.5843, decode.d7.loss_cls: 0.0765, decode.d7.loss_mask: 0.2082, decode.d7.loss_dice: 0.5815, decode.d8.loss_cls: 0.0798, decode.d8.loss_mask: 0.2082, decode.d8.loss_dice: 0.5840, loss: 9.0277 +2022-05-10 13:46:04,903 - mmseg - INFO - Iter [36600/80000] lr: 7.789e-07, eta: 3 days, 11:10:17, time: 1.831, data_time: 0.066, memory: 69053, decode.loss_cls: 0.0919, decode.loss_mask: 0.2088, decode.loss_dice: 0.5720, decode.d0.loss_cls: 0.3287, decode.d0.loss_mask: 0.2170, decode.d0.loss_dice: 0.6024, decode.d1.loss_cls: 0.1145, decode.d1.loss_mask: 0.2102, decode.d1.loss_dice: 0.5786, decode.d2.loss_cls: 0.0959, decode.d2.loss_mask: 0.2102, decode.d2.loss_dice: 0.5749, decode.d3.loss_cls: 0.0971, decode.d3.loss_mask: 0.2093, decode.d3.loss_dice: 0.5776, decode.d4.loss_cls: 0.0938, decode.d4.loss_mask: 0.2092, decode.d4.loss_dice: 0.5751, decode.d5.loss_cls: 0.0866, decode.d5.loss_mask: 0.2095, decode.d5.loss_dice: 0.5665, decode.d6.loss_cls: 0.0856, decode.d6.loss_mask: 0.2092, decode.d6.loss_dice: 0.5737, decode.d7.loss_cls: 0.0902, decode.d7.loss_mask: 0.2089, decode.d7.loss_dice: 0.5730, decode.d8.loss_cls: 0.0890, decode.d8.loss_mask: 0.2088, decode.d8.loss_dice: 0.5759, loss: 9.0440 +2022-05-10 13:47:35,127 - mmseg - INFO - Iter [36650/80000] lr: 7.780e-07, eta: 3 days, 6:21:50, time: 1.805, data_time: 0.020, memory: 69053, decode.loss_cls: 0.0685, decode.loss_mask: 0.2092, decode.loss_dice: 0.5741, decode.d0.loss_cls: 0.3033, decode.d0.loss_mask: 0.2160, decode.d0.loss_dice: 0.6055, decode.d1.loss_cls: 0.0934, decode.d1.loss_mask: 0.2105, decode.d1.loss_dice: 0.5854, decode.d2.loss_cls: 0.0783, decode.d2.loss_mask: 0.2099, decode.d2.loss_dice: 0.5790, decode.d3.loss_cls: 0.0711, decode.d3.loss_mask: 0.2089, decode.d3.loss_dice: 0.5739, decode.d4.loss_cls: 0.0666, decode.d4.loss_mask: 0.2094, decode.d4.loss_dice: 0.5777, decode.d5.loss_cls: 0.0745, decode.d5.loss_mask: 0.2089, decode.d5.loss_dice: 0.5774, decode.d6.loss_cls: 0.0719, decode.d6.loss_mask: 0.2095, decode.d6.loss_dice: 0.5738, decode.d7.loss_cls: 0.0767, decode.d7.loss_mask: 0.2095, decode.d7.loss_dice: 0.5744, decode.d8.loss_cls: 0.0708, decode.d8.loss_mask: 0.2092, decode.d8.loss_dice: 0.5749, loss: 8.8725 +2022-05-10 13:49:06,459 - mmseg - INFO - Iter [36700/80000] lr: 7.771e-07, eta: 3 days, 2:15:28, time: 1.827, data_time: 0.020, memory: 69053, decode.loss_cls: 0.0756, decode.loss_mask: 0.2089, decode.loss_dice: 0.5769, decode.d0.loss_cls: 0.3082, decode.d0.loss_mask: 0.2174, decode.d0.loss_dice: 0.6079, decode.d1.loss_cls: 0.0921, decode.d1.loss_mask: 0.2097, decode.d1.loss_dice: 0.5871, decode.d2.loss_cls: 0.0887, decode.d2.loss_mask: 0.2097, decode.d2.loss_dice: 0.5763, decode.d3.loss_cls: 0.0856, decode.d3.loss_mask: 0.2096, decode.d3.loss_dice: 0.5792, decode.d4.loss_cls: 0.0900, decode.d4.loss_mask: 0.2094, decode.d4.loss_dice: 0.5741, decode.d5.loss_cls: 0.0851, decode.d5.loss_mask: 0.2095, decode.d5.loss_dice: 0.5810, decode.d6.loss_cls: 0.0810, decode.d6.loss_mask: 0.2091, decode.d6.loss_dice: 0.5738, decode.d7.loss_cls: 0.0819, decode.d7.loss_mask: 0.2092, decode.d7.loss_dice: 0.5762, decode.d8.loss_cls: 0.0807, decode.d8.loss_mask: 0.2095, decode.d8.loss_dice: 0.5735, loss: 8.9769 +2022-05-10 13:50:39,634 - mmseg - INFO - Iter [36750/80000] lr: 7.762e-07, eta: 2 days, 22:43:27, time: 1.863, data_time: 0.066, memory: 69053, decode.loss_cls: 0.0711, decode.loss_mask: 0.2115, decode.loss_dice: 0.5768, decode.d0.loss_cls: 0.3202, decode.d0.loss_mask: 0.2207, decode.d0.loss_dice: 0.6068, decode.d1.loss_cls: 0.0986, decode.d1.loss_mask: 0.2138, decode.d1.loss_dice: 0.5847, decode.d2.loss_cls: 0.0813, decode.d2.loss_mask: 0.2127, decode.d2.loss_dice: 0.5855, decode.d3.loss_cls: 0.0738, decode.d3.loss_mask: 0.2113, decode.d3.loss_dice: 0.5797, decode.d4.loss_cls: 0.0776, decode.d4.loss_mask: 0.2115, decode.d4.loss_dice: 0.5797, decode.d5.loss_cls: 0.0787, decode.d5.loss_mask: 0.2120, decode.d5.loss_dice: 0.5856, decode.d6.loss_cls: 0.0751, decode.d6.loss_mask: 0.2114, decode.d6.loss_dice: 0.5779, decode.d7.loss_cls: 0.0766, decode.d7.loss_mask: 0.2121, decode.d7.loss_dice: 0.5762, decode.d8.loss_cls: 0.0750, decode.d8.loss_mask: 0.2120, decode.d8.loss_dice: 0.5783, loss: 8.9881 +2022-05-10 13:52:10,492 - mmseg - INFO - Iter [36800/80000] lr: 7.753e-07, eta: 2 days, 19:35:38, time: 1.817, data_time: 0.018, memory: 69053, decode.loss_cls: 0.0808, decode.loss_mask: 0.2093, decode.loss_dice: 0.5774, decode.d0.loss_cls: 0.3142, decode.d0.loss_mask: 0.2158, decode.d0.loss_dice: 0.6126, decode.d1.loss_cls: 0.1072, decode.d1.loss_mask: 0.2101, decode.d1.loss_dice: 0.5928, decode.d2.loss_cls: 0.0901, decode.d2.loss_mask: 0.2091, decode.d2.loss_dice: 0.5853, decode.d3.loss_cls: 0.0905, decode.d3.loss_mask: 0.2091, decode.d3.loss_dice: 0.5792, decode.d4.loss_cls: 0.0855, decode.d4.loss_mask: 0.2091, decode.d4.loss_dice: 0.5795, decode.d5.loss_cls: 0.0821, decode.d5.loss_mask: 0.2090, decode.d5.loss_dice: 0.5765, decode.d6.loss_cls: 0.0847, decode.d6.loss_mask: 0.2081, decode.d6.loss_dice: 0.5757, decode.d7.loss_cls: 0.0850, decode.d7.loss_mask: 0.2085, decode.d7.loss_dice: 0.5766, decode.d8.loss_cls: 0.0791, decode.d8.loss_mask: 0.2087, decode.d8.loss_dice: 0.5762, loss: 9.0280 +2022-05-10 13:53:41,752 - mmseg - INFO - Iter [36850/80000] lr: 7.745e-07, eta: 2 days, 16:50:01, time: 1.824, data_time: 0.019, memory: 69053, decode.loss_cls: 0.0776, decode.loss_mask: 0.2081, decode.loss_dice: 0.5811, decode.d0.loss_cls: 0.3221, decode.d0.loss_mask: 0.2151, decode.d0.loss_dice: 0.6056, decode.d1.loss_cls: 0.0980, decode.d1.loss_mask: 0.2094, decode.d1.loss_dice: 0.5866, decode.d2.loss_cls: 0.0889, decode.d2.loss_mask: 0.2092, decode.d2.loss_dice: 0.5809, decode.d3.loss_cls: 0.0831, decode.d3.loss_mask: 0.2091, decode.d3.loss_dice: 0.5790, decode.d4.loss_cls: 0.0776, decode.d4.loss_mask: 0.2094, decode.d4.loss_dice: 0.5806, decode.d5.loss_cls: 0.0797, decode.d5.loss_mask: 0.2090, decode.d5.loss_dice: 0.5826, decode.d6.loss_cls: 0.0771, decode.d6.loss_mask: 0.2083, decode.d6.loss_dice: 0.5791, decode.d7.loss_cls: 0.0783, decode.d7.loss_mask: 0.2080, decode.d7.loss_dice: 0.5770, decode.d8.loss_cls: 0.0774, decode.d8.loss_mask: 0.2075, decode.d8.loss_dice: 0.5814, loss: 8.9869 +2022-05-10 13:55:12,890 - mmseg - INFO - Iter [36900/80000] lr: 7.736e-07, eta: 2 days, 14:22:35, time: 1.824, data_time: 0.022, memory: 69053, decode.loss_cls: 0.0762, decode.loss_mask: 0.2108, decode.loss_dice: 0.5797, decode.d0.loss_cls: 0.3132, decode.d0.loss_mask: 0.2185, decode.d0.loss_dice: 0.6089, decode.d1.loss_cls: 0.0894, decode.d1.loss_mask: 0.2116, decode.d1.loss_dice: 0.5916, decode.d2.loss_cls: 0.0849, decode.d2.loss_mask: 0.2109, decode.d2.loss_dice: 0.5840, decode.d3.loss_cls: 0.0812, decode.d3.loss_mask: 0.2103, decode.d3.loss_dice: 0.5808, decode.d4.loss_cls: 0.0794, decode.d4.loss_mask: 0.2107, decode.d4.loss_dice: 0.5783, decode.d5.loss_cls: 0.0794, decode.d5.loss_mask: 0.2107, decode.d5.loss_dice: 0.5778, decode.d6.loss_cls: 0.0833, decode.d6.loss_mask: 0.2105, decode.d6.loss_dice: 0.5822, decode.d7.loss_cls: 0.0800, decode.d7.loss_mask: 0.2107, decode.d7.loss_dice: 0.5813, decode.d8.loss_cls: 0.0760, decode.d8.loss_mask: 0.2107, decode.d8.loss_dice: 0.5786, loss: 9.0017 +2022-05-10 13:56:43,919 - mmseg - INFO - Iter [36950/80000] lr: 7.727e-07, eta: 2 days, 12:10:22, time: 1.821, data_time: 0.064, memory: 69053, decode.loss_cls: 0.0728, decode.loss_mask: 0.2043, decode.loss_dice: 0.5755, decode.d0.loss_cls: 0.3195, decode.d0.loss_mask: 0.2132, decode.d0.loss_dice: 0.6071, decode.d1.loss_cls: 0.1042, decode.d1.loss_mask: 0.2061, decode.d1.loss_dice: 0.5871, decode.d2.loss_cls: 0.0820, decode.d2.loss_mask: 0.2047, decode.d2.loss_dice: 0.5806, decode.d3.loss_cls: 0.0824, decode.d3.loss_mask: 0.2038, decode.d3.loss_dice: 0.5759, decode.d4.loss_cls: 0.0807, decode.d4.loss_mask: 0.2045, decode.d4.loss_dice: 0.5767, decode.d5.loss_cls: 0.0808, decode.d5.loss_mask: 0.2046, decode.d5.loss_dice: 0.5743, decode.d6.loss_cls: 0.0738, decode.d6.loss_mask: 0.2043, decode.d6.loss_dice: 0.5779, decode.d7.loss_cls: 0.0745, decode.d7.loss_mask: 0.2046, decode.d7.loss_dice: 0.5802, decode.d8.loss_cls: 0.0805, decode.d8.loss_mask: 0.2039, decode.d8.loss_dice: 0.5757, loss: 8.9164 +2022-05-10 13:58:14,587 - mmseg - INFO - Saving checkpoint at 37000 iterations +2022-05-10 13:58:49,897 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 13:58:49,907 - mmseg - INFO - Iter [37000/80000] lr: 7.718e-07, eta: 2 days, 10:36:08, time: 2.517, data_time: 0.020, memory: 69053, decode.loss_cls: 0.0752, decode.loss_mask: 0.1977, decode.loss_dice: 0.5751, decode.d0.loss_cls: 0.3078, decode.d0.loss_mask: 0.2052, decode.d0.loss_dice: 0.6016, decode.d1.loss_cls: 0.0860, decode.d1.loss_mask: 0.1994, decode.d1.loss_dice: 0.5819, decode.d2.loss_cls: 0.0837, decode.d2.loss_mask: 0.1984, decode.d2.loss_dice: 0.5768, decode.d3.loss_cls: 0.0778, decode.d3.loss_mask: 0.1982, decode.d3.loss_dice: 0.5747, decode.d4.loss_cls: 0.0738, decode.d4.loss_mask: 0.1974, decode.d4.loss_dice: 0.5709, decode.d5.loss_cls: 0.0775, decode.d5.loss_mask: 0.1977, decode.d5.loss_dice: 0.5752, decode.d6.loss_cls: 0.0725, decode.d6.loss_mask: 0.1977, decode.d6.loss_dice: 0.5739, decode.d7.loss_cls: 0.0712, decode.d7.loss_mask: 0.1978, decode.d7.loss_dice: 0.5743, decode.d8.loss_cls: 0.0760, decode.d8.loss_mask: 0.1974, decode.d8.loss_dice: 0.5710, loss: 8.7636 +2022-05-10 14:00:45,485 - mmseg - INFO - per class results: +2022-05-10 14:00:45,490 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.62 | 99.22 | +| sidewalk | 88.71 | 93.82 | +| building | 94.34 | 97.11 | +| wall | 69.78 | 81.26 | +| fence | 78.24 | 87.84 | +| pole | 71.33 | 83.36 | +| traffic light | 76.98 | 87.53 | +| traffic sign | 84.13 | 90.36 | +| vegetation | 93.23 | 96.69 | +| terrain | 67.12 | 78.52 | +| sky | 95.88 | 98.3 | +| person | 86.88 | 93.64 | +| rider | 74.5 | 83.62 | +| car | 96.2 | 98.26 | +| truck | 91.34 | 94.4 | +| bus | 93.59 | 96.77 | +| train | 88.01 | 91.09 | +| motorcycle | 77.61 | 87.61 | +| bicycle | 82.83 | 91.92 | ++---------------+-------+-------+ +2022-05-10 14:00:45,490 - mmseg - INFO - Summary: +2022-05-10 14:00:45,490 - mmseg - INFO - ++-------+------+-------+ +| aAcc | mIoU | mAcc | ++-------+------+-------+ +| 97.02 | 84.7 | 91.12 | ++-------+------+-------+ +2022-05-10 14:00:45,493 - mmseg - INFO - The previous best checkpoint /mnt/lustre/chenzhe.vendor/workspace/ViT-Adapter/segmentation/work_dirs/mask2former_beit_adapter_large_896_80k_cityscapes_ss/best_mIoU_iter_36000.pth was removed +2022-05-10 14:01:19,102 - mmseg - INFO - Now best checkpoint is saved as best_mIoU_iter_37000.pth. +2022-05-10 14:01:19,113 - mmseg - INFO - Best mIoU is 0.8470 at 37000 iter. +2022-05-10 14:01:19,124 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 14:01:19,124 - mmseg - INFO - Iter(val) [32] aAcc: 0.9702, mIoU: 0.8470, mAcc: 0.9112, IoU.road: 0.9862, IoU.sidewalk: 0.8871, IoU.building: 0.9434, IoU.wall: 0.6978, IoU.fence: 0.7824, IoU.pole: 0.7133, IoU.traffic light: 0.7698, IoU.traffic sign: 0.8413, IoU.vegetation: 0.9323, IoU.terrain: 0.6712, IoU.sky: 0.9588, IoU.person: 0.8688, IoU.rider: 0.7450, IoU.car: 0.9620, IoU.truck: 0.9134, IoU.bus: 0.9359, IoU.train: 0.8801, IoU.motorcycle: 0.7761, IoU.bicycle: 0.8283, Acc.road: 0.9922, Acc.sidewalk: 0.9382, Acc.building: 0.9711, Acc.wall: 0.8126, Acc.fence: 0.8784, Acc.pole: 0.8336, Acc.traffic light: 0.8753, Acc.traffic sign: 0.9036, Acc.vegetation: 0.9669, Acc.terrain: 0.7852, Acc.sky: 0.9830, Acc.person: 0.9364, Acc.rider: 0.8362, Acc.car: 0.9826, Acc.truck: 0.9440, Acc.bus: 0.9677, Acc.train: 0.9109, Acc.motorcycle: 0.8761, Acc.bicycle: 0.9192 +2022-05-10 14:02:48,922 - mmseg - INFO - Iter [37050/80000] lr: 7.709e-07, eta: 2 days, 10:27:52, time: 4.783, data_time: 3.007, memory: 69053, decode.loss_cls: 0.0758, decode.loss_mask: 0.2114, decode.loss_dice: 0.5985, decode.d0.loss_cls: 0.3130, decode.d0.loss_mask: 0.2204, decode.d0.loss_dice: 0.6261, decode.d1.loss_cls: 0.0997, decode.d1.loss_mask: 0.2132, decode.d1.loss_dice: 0.6104, decode.d2.loss_cls: 0.0940, decode.d2.loss_mask: 0.2124, decode.d2.loss_dice: 0.6035, decode.d3.loss_cls: 0.0880, decode.d3.loss_mask: 0.2120, decode.d3.loss_dice: 0.5979, decode.d4.loss_cls: 0.0901, decode.d4.loss_mask: 0.2119, decode.d4.loss_dice: 0.5972, decode.d5.loss_cls: 0.0806, decode.d5.loss_mask: 0.2122, decode.d5.loss_dice: 0.5967, decode.d6.loss_cls: 0.0781, decode.d6.loss_mask: 0.2114, decode.d6.loss_dice: 0.5972, decode.d7.loss_cls: 0.0849, decode.d7.loss_mask: 0.2120, decode.d7.loss_dice: 0.5959, decode.d8.loss_cls: 0.0780, decode.d8.loss_mask: 0.2122, decode.d8.loss_dice: 0.5969, loss: 9.2315 +2022-05-10 14:04:18,908 - mmseg - INFO - Iter [37100/80000] lr: 7.700e-07, eta: 2 days, 8:43:06, time: 1.800, data_time: 0.019, memory: 69053, decode.loss_cls: 0.0773, decode.loss_mask: 0.2108, decode.loss_dice: 0.5821, decode.d0.loss_cls: 0.3025, decode.d0.loss_mask: 0.2202, decode.d0.loss_dice: 0.6117, decode.d1.loss_cls: 0.0987, decode.d1.loss_mask: 0.2128, decode.d1.loss_dice: 0.5939, decode.d2.loss_cls: 0.0842, decode.d2.loss_mask: 0.2124, decode.d2.loss_dice: 0.5879, decode.d3.loss_cls: 0.0771, decode.d3.loss_mask: 0.2119, decode.d3.loss_dice: 0.5846, decode.d4.loss_cls: 0.0769, decode.d4.loss_mask: 0.2114, decode.d4.loss_dice: 0.5895, decode.d5.loss_cls: 0.0762, decode.d5.loss_mask: 0.2113, decode.d5.loss_dice: 0.5864, decode.d6.loss_cls: 0.0735, decode.d6.loss_mask: 0.2110, decode.d6.loss_dice: 0.5824, decode.d7.loss_cls: 0.0751, decode.d7.loss_mask: 0.2105, decode.d7.loss_dice: 0.5846, decode.d8.loss_cls: 0.0773, decode.d8.loss_mask: 0.2108, decode.d8.loss_dice: 0.5824, loss: 9.0275 +2022-05-10 14:05:51,972 - mmseg - INFO - Iter [37150/80000] lr: 7.691e-07, eta: 2 days, 7:09:13, time: 1.861, data_time: 0.067, memory: 69053, decode.loss_cls: 0.0823, decode.loss_mask: 0.2067, decode.loss_dice: 0.5584, decode.d0.loss_cls: 0.3139, decode.d0.loss_mask: 0.2173, decode.d0.loss_dice: 0.5886, decode.d1.loss_cls: 0.0948, decode.d1.loss_mask: 0.2079, decode.d1.loss_dice: 0.5701, decode.d2.loss_cls: 0.0895, decode.d2.loss_mask: 0.2068, decode.d2.loss_dice: 0.5664, decode.d3.loss_cls: 0.0722, decode.d3.loss_mask: 0.2069, decode.d3.loss_dice: 0.5615, decode.d4.loss_cls: 0.0815, decode.d4.loss_mask: 0.2075, decode.d4.loss_dice: 0.5611, decode.d5.loss_cls: 0.0819, decode.d5.loss_mask: 0.2074, decode.d5.loss_dice: 0.5617, decode.d6.loss_cls: 0.0790, decode.d6.loss_mask: 0.2069, decode.d6.loss_dice: 0.5584, decode.d7.loss_cls: 0.0779, decode.d7.loss_mask: 0.2065, decode.d7.loss_dice: 0.5577, decode.d8.loss_cls: 0.0830, decode.d8.loss_mask: 0.2065, decode.d8.loss_dice: 0.5561, loss: 8.7763 +2022-05-10 14:07:21,903 - mmseg - INFO - Iter [37200/80000] lr: 7.682e-07, eta: 2 days, 5:41:10, time: 1.799, data_time: 0.019, memory: 69053, decode.loss_cls: 0.0798, decode.loss_mask: 0.2049, decode.loss_dice: 0.5810, decode.d0.loss_cls: 0.3186, decode.d0.loss_mask: 0.2117, decode.d0.loss_dice: 0.6067, decode.d1.loss_cls: 0.0975, decode.d1.loss_mask: 0.2063, decode.d1.loss_dice: 0.5897, decode.d2.loss_cls: 0.0854, decode.d2.loss_mask: 0.2049, decode.d2.loss_dice: 0.5828, decode.d3.loss_cls: 0.0778, decode.d3.loss_mask: 0.2047, decode.d3.loss_dice: 0.5828, decode.d4.loss_cls: 0.0774, decode.d4.loss_mask: 0.2045, decode.d4.loss_dice: 0.5804, decode.d5.loss_cls: 0.0787, decode.d5.loss_mask: 0.2044, decode.d5.loss_dice: 0.5824, decode.d6.loss_cls: 0.0744, decode.d6.loss_mask: 0.2043, decode.d6.loss_dice: 0.5781, decode.d7.loss_cls: 0.0760, decode.d7.loss_mask: 0.2049, decode.d7.loss_dice: 0.5758, decode.d8.loss_cls: 0.0755, decode.d8.loss_mask: 0.2047, decode.d8.loss_dice: 0.5793, loss: 8.9357 +2022-05-10 14:08:50,665 - mmseg - INFO - Iter [37250/80000] lr: 7.673e-07, eta: 2 days, 4:19:22, time: 1.775, data_time: 0.019, memory: 69053, decode.loss_cls: 0.0692, decode.loss_mask: 0.2078, decode.loss_dice: 0.5729, decode.d0.loss_cls: 0.3046, decode.d0.loss_mask: 0.2169, decode.d0.loss_dice: 0.5971, decode.d1.loss_cls: 0.0945, decode.d1.loss_mask: 0.2103, decode.d1.loss_dice: 0.5771, decode.d2.loss_cls: 0.0749, decode.d2.loss_mask: 0.2084, decode.d2.loss_dice: 0.5739, decode.d3.loss_cls: 0.0677, decode.d3.loss_mask: 0.2081, decode.d3.loss_dice: 0.5722, decode.d4.loss_cls: 0.0729, decode.d4.loss_mask: 0.2077, decode.d4.loss_dice: 0.5743, decode.d5.loss_cls: 0.0686, decode.d5.loss_mask: 0.2076, decode.d5.loss_dice: 0.5725, decode.d6.loss_cls: 0.0711, decode.d6.loss_mask: 0.2077, decode.d6.loss_dice: 0.5764, decode.d7.loss_cls: 0.0672, decode.d7.loss_mask: 0.2081, decode.d7.loss_dice: 0.5732, decode.d8.loss_cls: 0.0645, decode.d8.loss_mask: 0.2080, decode.d8.loss_dice: 0.5743, loss: 8.8100 +2022-05-10 14:10:19,195 - mmseg - INFO - Iter [37300/80000] lr: 7.664e-07, eta: 2 days, 3:03:36, time: 1.770, data_time: 0.020, memory: 69053, decode.loss_cls: 0.0687, decode.loss_mask: 0.2061, decode.loss_dice: 0.5708, decode.d0.loss_cls: 0.3057, decode.d0.loss_mask: 0.2166, decode.d0.loss_dice: 0.6016, decode.d1.loss_cls: 0.0806, decode.d1.loss_mask: 0.2070, decode.d1.loss_dice: 0.5799, decode.d2.loss_cls: 0.0806, decode.d2.loss_mask: 0.2069, decode.d2.loss_dice: 0.5760, decode.d3.loss_cls: 0.0721, decode.d3.loss_mask: 0.2067, decode.d3.loss_dice: 0.5680, decode.d4.loss_cls: 0.0673, decode.d4.loss_mask: 0.2075, decode.d4.loss_dice: 0.5692, decode.d5.loss_cls: 0.0679, decode.d5.loss_mask: 0.2072, decode.d5.loss_dice: 0.5654, decode.d6.loss_cls: 0.0694, decode.d6.loss_mask: 0.2058, decode.d6.loss_dice: 0.5700, decode.d7.loss_cls: 0.0716, decode.d7.loss_mask: 0.2059, decode.d7.loss_dice: 0.5709, decode.d8.loss_cls: 0.0743, decode.d8.loss_mask: 0.2060, decode.d8.loss_dice: 0.5699, loss: 8.7755 +2022-05-10 14:11:51,061 - mmseg - INFO - Iter [37350/80000] lr: 7.655e-07, eta: 2 days, 1:55:06, time: 1.837, data_time: 0.067, memory: 69053, decode.loss_cls: 0.0760, decode.loss_mask: 0.2070, decode.loss_dice: 0.5822, decode.d0.loss_cls: 0.3197, decode.d0.loss_mask: 0.2165, decode.d0.loss_dice: 0.6146, decode.d1.loss_cls: 0.0967, decode.d1.loss_mask: 0.2092, decode.d1.loss_dice: 0.5920, decode.d2.loss_cls: 0.0932, decode.d2.loss_mask: 0.2081, decode.d2.loss_dice: 0.5870, decode.d3.loss_cls: 0.0818, decode.d3.loss_mask: 0.2079, decode.d3.loss_dice: 0.5841, decode.d4.loss_cls: 0.0787, decode.d4.loss_mask: 0.2075, decode.d4.loss_dice: 0.5872, decode.d5.loss_cls: 0.0817, decode.d5.loss_mask: 0.2085, decode.d5.loss_dice: 0.5869, decode.d6.loss_cls: 0.0777, decode.d6.loss_mask: 0.2071, decode.d6.loss_dice: 0.5857, decode.d7.loss_cls: 0.0751, decode.d7.loss_mask: 0.2072, decode.d7.loss_dice: 0.5844, decode.d8.loss_cls: 0.0726, decode.d8.loss_mask: 0.2073, decode.d8.loss_dice: 0.5825, loss: 9.0260 +2022-05-10 14:13:20,925 - mmseg - INFO - Iter [37400/80000] lr: 7.646e-07, eta: 2 days, 0:50:22, time: 1.797, data_time: 0.020, memory: 69053, decode.loss_cls: 0.0797, decode.loss_mask: 0.2132, decode.loss_dice: 0.5944, decode.d0.loss_cls: 0.3164, decode.d0.loss_mask: 0.2216, decode.d0.loss_dice: 0.6296, decode.d1.loss_cls: 0.1071, decode.d1.loss_mask: 0.2149, decode.d1.loss_dice: 0.6043, decode.d2.loss_cls: 0.0947, decode.d2.loss_mask: 0.2141, decode.d2.loss_dice: 0.6090, decode.d3.loss_cls: 0.0860, decode.d3.loss_mask: 0.2138, decode.d3.loss_dice: 0.5985, decode.d4.loss_cls: 0.0820, decode.d4.loss_mask: 0.2134, decode.d4.loss_dice: 0.5963, decode.d5.loss_cls: 0.0842, decode.d5.loss_mask: 0.2132, decode.d5.loss_dice: 0.6018, decode.d6.loss_cls: 0.0782, decode.d6.loss_mask: 0.2135, decode.d6.loss_dice: 0.5967, decode.d7.loss_cls: 0.0730, decode.d7.loss_mask: 0.2138, decode.d7.loss_dice: 0.5950, decode.d8.loss_cls: 0.0876, decode.d8.loss_mask: 0.2138, decode.d8.loss_dice: 0.5976, loss: 9.2574 +2022-05-10 14:14:49,132 - mmseg - INFO - Iter [37450/80000] lr: 7.637e-07, eta: 1 day, 23:49:11, time: 1.764, data_time: 0.020, memory: 69053, decode.loss_cls: 0.0757, decode.loss_mask: 0.2139, decode.loss_dice: 0.5690, decode.d0.loss_cls: 0.3112, decode.d0.loss_mask: 0.2231, decode.d0.loss_dice: 0.5991, decode.d1.loss_cls: 0.0956, decode.d1.loss_mask: 0.2170, decode.d1.loss_dice: 0.5805, decode.d2.loss_cls: 0.0854, decode.d2.loss_mask: 0.2150, decode.d2.loss_dice: 0.5759, decode.d3.loss_cls: 0.0866, decode.d3.loss_mask: 0.2136, decode.d3.loss_dice: 0.5712, decode.d4.loss_cls: 0.0818, decode.d4.loss_mask: 0.2139, decode.d4.loss_dice: 0.5696, decode.d5.loss_cls: 0.0812, decode.d5.loss_mask: 0.2138, decode.d5.loss_dice: 0.5732, decode.d6.loss_cls: 0.0831, decode.d6.loss_mask: 0.2143, decode.d6.loss_dice: 0.5683, decode.d7.loss_cls: 0.0795, decode.d7.loss_mask: 0.2144, decode.d7.loss_dice: 0.5685, decode.d8.loss_cls: 0.0810, decode.d8.loss_mask: 0.2147, decode.d8.loss_dice: 0.5711, loss: 8.9611 +2022-05-10 14:16:22,475 - mmseg - INFO - Iter [37500/80000] lr: 7.628e-07, eta: 1 day, 22:54:23, time: 1.867, data_time: 0.067, memory: 69053, decode.loss_cls: 0.0688, decode.loss_mask: 0.2070, decode.loss_dice: 0.5787, decode.d0.loss_cls: 0.2928, decode.d0.loss_mask: 0.2163, decode.d0.loss_dice: 0.6085, decode.d1.loss_cls: 0.0879, decode.d1.loss_mask: 0.2091, decode.d1.loss_dice: 0.5866, decode.d2.loss_cls: 0.0841, decode.d2.loss_mask: 0.2075, decode.d2.loss_dice: 0.5847, decode.d3.loss_cls: 0.0832, decode.d3.loss_mask: 0.2074, decode.d3.loss_dice: 0.5800, decode.d4.loss_cls: 0.0773, decode.d4.loss_mask: 0.2083, decode.d4.loss_dice: 0.5781, decode.d5.loss_cls: 0.0752, decode.d5.loss_mask: 0.2075, decode.d5.loss_dice: 0.5764, decode.d6.loss_cls: 0.0771, decode.d6.loss_mask: 0.2074, decode.d6.loss_dice: 0.5775, decode.d7.loss_cls: 0.0755, decode.d7.loss_mask: 0.2071, decode.d7.loss_dice: 0.5809, decode.d8.loss_cls: 0.0714, decode.d8.loss_mask: 0.2077, decode.d8.loss_dice: 0.5820, loss: 8.9122 +2022-05-10 14:17:51,847 - mmseg - INFO - Iter [37550/80000] lr: 7.619e-07, eta: 1 day, 22:01:14, time: 1.788, data_time: 0.020, memory: 69053, decode.loss_cls: 0.0858, decode.loss_mask: 0.2090, decode.loss_dice: 0.5602, decode.d0.loss_cls: 0.3124, decode.d0.loss_mask: 0.2185, decode.d0.loss_dice: 0.6010, decode.d1.loss_cls: 0.1051, decode.d1.loss_mask: 0.2111, decode.d1.loss_dice: 0.5743, decode.d2.loss_cls: 0.0914, decode.d2.loss_mask: 0.2096, decode.d2.loss_dice: 0.5721, decode.d3.loss_cls: 0.0865, decode.d3.loss_mask: 0.2088, decode.d3.loss_dice: 0.5667, decode.d4.loss_cls: 0.0863, decode.d4.loss_mask: 0.2089, decode.d4.loss_dice: 0.5688, decode.d5.loss_cls: 0.0854, decode.d5.loss_mask: 0.2083, decode.d5.loss_dice: 0.5667, decode.d6.loss_cls: 0.0834, decode.d6.loss_mask: 0.2081, decode.d6.loss_dice: 0.5681, decode.d7.loss_cls: 0.0820, decode.d7.loss_mask: 0.2081, decode.d7.loss_dice: 0.5647, decode.d8.loss_cls: 0.0805, decode.d8.loss_mask: 0.2087, decode.d8.loss_dice: 0.5659, loss: 8.9063 +2022-05-10 14:19:22,739 - mmseg - INFO - Iter [37600/80000] lr: 7.610e-07, eta: 1 day, 21:11:57, time: 1.818, data_time: 0.021, memory: 69053, decode.loss_cls: 0.0764, decode.loss_mask: 0.2072, decode.loss_dice: 0.5708, decode.d0.loss_cls: 0.3174, decode.d0.loss_mask: 0.2166, decode.d0.loss_dice: 0.6073, decode.d1.loss_cls: 0.0972, decode.d1.loss_mask: 0.2094, decode.d1.loss_dice: 0.5831, decode.d2.loss_cls: 0.0831, decode.d2.loss_mask: 0.2086, decode.d2.loss_dice: 0.5776, decode.d3.loss_cls: 0.0850, decode.d3.loss_mask: 0.2076, decode.d3.loss_dice: 0.5762, decode.d4.loss_cls: 0.0810, decode.d4.loss_mask: 0.2081, decode.d4.loss_dice: 0.5771, decode.d5.loss_cls: 0.0773, decode.d5.loss_mask: 0.2076, decode.d5.loss_dice: 0.5724, decode.d6.loss_cls: 0.0816, decode.d6.loss_mask: 0.2080, decode.d6.loss_dice: 0.5713, decode.d7.loss_cls: 0.0836, decode.d7.loss_mask: 0.2077, decode.d7.loss_dice: 0.5726, decode.d8.loss_cls: 0.0772, decode.d8.loss_mask: 0.2076, decode.d8.loss_dice: 0.5711, loss: 8.9279 +2022-05-10 14:20:51,671 - mmseg - INFO - Iter [37650/80000] lr: 7.601e-07, eta: 1 day, 20:24:45, time: 1.779, data_time: 0.020, memory: 69053, decode.loss_cls: 0.0704, decode.loss_mask: 0.2112, decode.loss_dice: 0.5752, decode.d0.loss_cls: 0.3135, decode.d0.loss_mask: 0.2218, decode.d0.loss_dice: 0.6037, decode.d1.loss_cls: 0.0962, decode.d1.loss_mask: 0.2148, decode.d1.loss_dice: 0.5822, decode.d2.loss_cls: 0.0906, decode.d2.loss_mask: 0.2127, decode.d2.loss_dice: 0.5784, decode.d3.loss_cls: 0.0709, decode.d3.loss_mask: 0.2122, decode.d3.loss_dice: 0.5727, decode.d4.loss_cls: 0.0734, decode.d4.loss_mask: 0.2116, decode.d4.loss_dice: 0.5776, decode.d5.loss_cls: 0.0799, decode.d5.loss_mask: 0.2118, decode.d5.loss_dice: 0.5809, decode.d6.loss_cls: 0.0749, decode.d6.loss_mask: 0.2118, decode.d6.loss_dice: 0.5792, decode.d7.loss_cls: 0.0749, decode.d7.loss_mask: 0.2112, decode.d7.loss_dice: 0.5724, decode.d8.loss_cls: 0.0719, decode.d8.loss_mask: 0.2120, decode.d8.loss_dice: 0.5740, loss: 8.9437 +2022-05-10 14:22:26,482 - mmseg - INFO - Iter [37700/80000] lr: 7.592e-07, eta: 1 day, 19:42:38, time: 1.895, data_time: 0.070, memory: 69053, decode.loss_cls: 0.0701, decode.loss_mask: 0.2034, decode.loss_dice: 0.5741, decode.d0.loss_cls: 0.3091, decode.d0.loss_mask: 0.2125, decode.d0.loss_dice: 0.6055, decode.d1.loss_cls: 0.0938, decode.d1.loss_mask: 0.2048, decode.d1.loss_dice: 0.5837, decode.d2.loss_cls: 0.0790, decode.d2.loss_mask: 0.2031, decode.d2.loss_dice: 0.5822, decode.d3.loss_cls: 0.0706, decode.d3.loss_mask: 0.2034, decode.d3.loss_dice: 0.5773, decode.d4.loss_cls: 0.0775, decode.d4.loss_mask: 0.2036, decode.d4.loss_dice: 0.5735, decode.d5.loss_cls: 0.0786, decode.d5.loss_mask: 0.2037, decode.d5.loss_dice: 0.5769, decode.d6.loss_cls: 0.0726, decode.d6.loss_mask: 0.2037, decode.d6.loss_dice: 0.5739, decode.d7.loss_cls: 0.0680, decode.d7.loss_mask: 0.2032, decode.d7.loss_dice: 0.5694, decode.d8.loss_cls: 0.0727, decode.d8.loss_mask: 0.2037, decode.d8.loss_dice: 0.5737, loss: 8.8273 +2022-05-10 14:23:57,031 - mmseg - INFO - Iter [37750/80000] lr: 7.583e-07, eta: 1 day, 19:01:10, time: 1.812, data_time: 0.021, memory: 69053, decode.loss_cls: 0.0708, decode.loss_mask: 0.2106, decode.loss_dice: 0.5763, decode.d0.loss_cls: 0.3145, decode.d0.loss_mask: 0.2197, decode.d0.loss_dice: 0.6142, decode.d1.loss_cls: 0.0876, decode.d1.loss_mask: 0.2121, decode.d1.loss_dice: 0.5892, decode.d2.loss_cls: 0.0857, decode.d2.loss_mask: 0.2105, decode.d2.loss_dice: 0.5849, decode.d3.loss_cls: 0.0768, decode.d3.loss_mask: 0.2112, decode.d3.loss_dice: 0.5803, decode.d4.loss_cls: 0.0777, decode.d4.loss_mask: 0.2106, decode.d4.loss_dice: 0.5788, decode.d5.loss_cls: 0.0790, decode.d5.loss_mask: 0.2112, decode.d5.loss_dice: 0.5830, decode.d6.loss_cls: 0.0707, decode.d6.loss_mask: 0.2108, decode.d6.loss_dice: 0.5765, decode.d7.loss_cls: 0.0715, decode.d7.loss_mask: 0.2107, decode.d7.loss_dice: 0.5761, decode.d8.loss_cls: 0.0755, decode.d8.loss_mask: 0.2111, decode.d8.loss_dice: 0.5759, loss: 8.9635 +2022-05-10 14:25:25,632 - mmseg - INFO - Iter [37800/80000] lr: 7.574e-07, eta: 1 day, 18:21:08, time: 1.772, data_time: 0.020, memory: 69053, decode.loss_cls: 0.0635, decode.loss_mask: 0.2070, decode.loss_dice: 0.5772, decode.d0.loss_cls: 0.2985, decode.d0.loss_mask: 0.2163, decode.d0.loss_dice: 0.6100, decode.d1.loss_cls: 0.0906, decode.d1.loss_mask: 0.2089, decode.d1.loss_dice: 0.5866, decode.d2.loss_cls: 0.0764, decode.d2.loss_mask: 0.2081, decode.d2.loss_dice: 0.5812, decode.d3.loss_cls: 0.0638, decode.d3.loss_mask: 0.2074, decode.d3.loss_dice: 0.5749, decode.d4.loss_cls: 0.0646, decode.d4.loss_mask: 0.2071, decode.d4.loss_dice: 0.5752, decode.d5.loss_cls: 0.0686, decode.d5.loss_mask: 0.2069, decode.d5.loss_dice: 0.5769, decode.d6.loss_cls: 0.0664, decode.d6.loss_mask: 0.2075, decode.d6.loss_dice: 0.5751, decode.d7.loss_cls: 0.0658, decode.d7.loss_mask: 0.2074, decode.d7.loss_dice: 0.5801, decode.d8.loss_cls: 0.0655, decode.d8.loss_mask: 0.2072, decode.d8.loss_dice: 0.5742, loss: 8.8188 +2022-05-10 14:26:54,030 - mmseg - INFO - Iter [37850/80000] lr: 7.565e-07, eta: 1 day, 17:43:07, time: 1.768, data_time: 0.020, memory: 69053, decode.loss_cls: 0.0717, decode.loss_mask: 0.2115, decode.loss_dice: 0.5681, decode.d0.loss_cls: 0.3007, decode.d0.loss_mask: 0.2220, decode.d0.loss_dice: 0.5997, decode.d1.loss_cls: 0.0910, decode.d1.loss_mask: 0.2142, decode.d1.loss_dice: 0.5783, decode.d2.loss_cls: 0.0878, decode.d2.loss_mask: 0.2131, decode.d2.loss_dice: 0.5723, decode.d3.loss_cls: 0.0771, decode.d3.loss_mask: 0.2126, decode.d3.loss_dice: 0.5689, decode.d4.loss_cls: 0.0766, decode.d4.loss_mask: 0.2127, decode.d4.loss_dice: 0.5728, decode.d5.loss_cls: 0.0757, decode.d5.loss_mask: 0.2122, decode.d5.loss_dice: 0.5705, decode.d6.loss_cls: 0.0705, decode.d6.loss_mask: 0.2115, decode.d6.loss_dice: 0.5699, decode.d7.loss_cls: 0.0754, decode.d7.loss_mask: 0.2122, decode.d7.loss_dice: 0.5691, decode.d8.loss_cls: 0.0740, decode.d8.loss_mask: 0.2122, decode.d8.loss_dice: 0.5684, loss: 8.8725 +2022-05-10 14:28:27,333 - mmseg - INFO - Iter [37900/80000] lr: 7.556e-07, eta: 1 day, 17:08:49, time: 1.866, data_time: 0.069, memory: 69053, decode.loss_cls: 0.0673, decode.loss_mask: 0.2054, decode.loss_dice: 0.5592, decode.d0.loss_cls: 0.3184, decode.d0.loss_mask: 0.2132, decode.d0.loss_dice: 0.5879, decode.d1.loss_cls: 0.0855, decode.d1.loss_mask: 0.2057, decode.d1.loss_dice: 0.5716, decode.d2.loss_cls: 0.0771, decode.d2.loss_mask: 0.2051, decode.d2.loss_dice: 0.5655, decode.d3.loss_cls: 0.0661, decode.d3.loss_mask: 0.2051, decode.d3.loss_dice: 0.5636, decode.d4.loss_cls: 0.0712, decode.d4.loss_mask: 0.2056, decode.d4.loss_dice: 0.5604, decode.d5.loss_cls: 0.0714, decode.d5.loss_mask: 0.2053, decode.d5.loss_dice: 0.5635, decode.d6.loss_cls: 0.0671, decode.d6.loss_mask: 0.2053, decode.d6.loss_dice: 0.5618, decode.d7.loss_cls: 0.0765, decode.d7.loss_mask: 0.2053, decode.d7.loss_dice: 0.5694, decode.d8.loss_cls: 0.0719, decode.d8.loss_mask: 0.2055, decode.d8.loss_dice: 0.5635, loss: 8.7004 +2022-05-10 14:29:57,534 - mmseg - INFO - Iter [37950/80000] lr: 7.547e-07, eta: 1 day, 16:35:05, time: 1.803, data_time: 0.019, memory: 69053, decode.loss_cls: 0.0870, decode.loss_mask: 0.2080, decode.loss_dice: 0.5739, decode.d0.loss_cls: 0.3181, decode.d0.loss_mask: 0.2164, decode.d0.loss_dice: 0.6050, decode.d1.loss_cls: 0.1054, decode.d1.loss_mask: 0.2095, decode.d1.loss_dice: 0.5843, decode.d2.loss_cls: 0.0922, decode.d2.loss_mask: 0.2085, decode.d2.loss_dice: 0.5767, decode.d3.loss_cls: 0.0967, decode.d3.loss_mask: 0.2084, decode.d3.loss_dice: 0.5764, decode.d4.loss_cls: 0.0910, decode.d4.loss_mask: 0.2088, decode.d4.loss_dice: 0.5781, decode.d5.loss_cls: 0.0890, decode.d5.loss_mask: 0.2092, decode.d5.loss_dice: 0.5747, decode.d6.loss_cls: 0.0919, decode.d6.loss_mask: 0.2079, decode.d6.loss_dice: 0.5733, decode.d7.loss_cls: 0.0921, decode.d7.loss_mask: 0.2083, decode.d7.loss_dice: 0.5743, decode.d8.loss_cls: 0.0904, decode.d8.loss_mask: 0.2084, decode.d8.loss_dice: 0.5750, loss: 9.0390 +2022-05-10 14:31:27,056 - mmseg - INFO - Saving checkpoint at 38000 iterations +2022-05-10 14:31:58,081 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 14:31:58,093 - mmseg - INFO - Iter [38000/80000] lr: 7.538e-07, eta: 1 day, 16:13:32, time: 2.408, data_time: 0.020, memory: 69053, decode.loss_cls: 0.0666, decode.loss_mask: 0.2088, decode.loss_dice: 0.5610, decode.d0.loss_cls: 0.3043, decode.d0.loss_mask: 0.2174, decode.d0.loss_dice: 0.5929, decode.d1.loss_cls: 0.0831, decode.d1.loss_mask: 0.2100, decode.d1.loss_dice: 0.5705, decode.d2.loss_cls: 0.0737, decode.d2.loss_mask: 0.2099, decode.d2.loss_dice: 0.5680, decode.d3.loss_cls: 0.0640, decode.d3.loss_mask: 0.2100, decode.d3.loss_dice: 0.5673, decode.d4.loss_cls: 0.0637, decode.d4.loss_mask: 0.2094, decode.d4.loss_dice: 0.5647, decode.d5.loss_cls: 0.0703, decode.d5.loss_mask: 0.2091, decode.d5.loss_dice: 0.5672, decode.d6.loss_cls: 0.0631, decode.d6.loss_mask: 0.2087, decode.d6.loss_dice: 0.5619, decode.d7.loss_cls: 0.0685, decode.d7.loss_mask: 0.2096, decode.d7.loss_dice: 0.5633, decode.d8.loss_cls: 0.0645, decode.d8.loss_mask: 0.2090, decode.d8.loss_dice: 0.5611, loss: 8.7014 +2022-05-10 14:33:53,594 - mmseg - INFO - per class results: +2022-05-10 14:33:53,599 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.33 | 99.01 | +| sidewalk | 87.01 | 93.69 | +| building | 94.36 | 96.75 | +| wall | 69.32 | 80.26 | +| fence | 78.86 | 86.67 | +| pole | 71.25 | 84.03 | +| traffic light | 76.74 | 89.4 | +| traffic sign | 84.01 | 90.96 | +| vegetation | 93.32 | 97.0 | +| terrain | 68.43 | 78.92 | +| sky | 95.7 | 98.65 | +| person | 86.72 | 93.93 | +| rider | 74.18 | 84.77 | +| car | 96.23 | 98.17 | +| truck | 91.81 | 94.44 | +| bus | 93.79 | 96.86 | +| train | 88.1 | 91.03 | +| motorcycle | 77.61 | 88.56 | +| bicycle | 82.77 | 91.58 | ++---------------+-------+-------+ +2022-05-10 14:33:53,599 - mmseg - INFO - Summary: +2022-05-10 14:33:53,600 - mmseg - INFO - ++-------+-------+------+ +| aAcc | mIoU | mAcc | ++-------+-------+------+ +| 96.92 | 84.66 | 91.3 | ++-------+-------+------+ +2022-05-10 14:33:53,603 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 14:33:53,603 - mmseg - INFO - Iter(val) [32] aAcc: 0.9692, mIoU: 0.8466, mAcc: 0.9130, IoU.road: 0.9833, IoU.sidewalk: 0.8701, IoU.building: 0.9436, IoU.wall: 0.6932, IoU.fence: 0.7886, IoU.pole: 0.7125, IoU.traffic light: 0.7674, IoU.traffic sign: 0.8401, IoU.vegetation: 0.9332, IoU.terrain: 0.6843, IoU.sky: 0.9570, IoU.person: 0.8672, IoU.rider: 0.7418, IoU.car: 0.9623, IoU.truck: 0.9181, IoU.bus: 0.9379, IoU.train: 0.8810, IoU.motorcycle: 0.7761, IoU.bicycle: 0.8277, Acc.road: 0.9901, Acc.sidewalk: 0.9369, Acc.building: 0.9675, Acc.wall: 0.8026, Acc.fence: 0.8667, Acc.pole: 0.8403, Acc.traffic light: 0.8940, Acc.traffic sign: 0.9096, Acc.vegetation: 0.9700, Acc.terrain: 0.7892, Acc.sky: 0.9865, Acc.person: 0.9393, Acc.rider: 0.8477, Acc.car: 0.9817, Acc.truck: 0.9444, Acc.bus: 0.9686, Acc.train: 0.9103, Acc.motorcycle: 0.8856, Acc.bicycle: 0.9158 +2022-05-10 14:35:25,115 - mmseg - INFO - Iter [38050/80000] lr: 7.529e-07, eta: 1 day, 16:22:32, time: 4.144, data_time: 2.384, memory: 69053, decode.loss_cls: 0.0784, decode.loss_mask: 0.2122, decode.loss_dice: 0.5623, decode.d0.loss_cls: 0.3087, decode.d0.loss_mask: 0.2207, decode.d0.loss_dice: 0.5918, decode.d1.loss_cls: 0.0966, decode.d1.loss_mask: 0.2131, decode.d1.loss_dice: 0.5735, decode.d2.loss_cls: 0.0801, decode.d2.loss_mask: 0.2123, decode.d2.loss_dice: 0.5690, decode.d3.loss_cls: 0.0761, decode.d3.loss_mask: 0.2119, decode.d3.loss_dice: 0.5627, decode.d4.loss_cls: 0.0705, decode.d4.loss_mask: 0.2121, decode.d4.loss_dice: 0.5660, decode.d5.loss_cls: 0.0733, decode.d5.loss_mask: 0.2119, decode.d5.loss_dice: 0.5683, decode.d6.loss_cls: 0.0721, decode.d6.loss_mask: 0.2113, decode.d6.loss_dice: 0.5626, decode.d7.loss_cls: 0.0735, decode.d7.loss_mask: 0.2115, decode.d7.loss_dice: 0.5641, decode.d8.loss_cls: 0.0696, decode.d8.loss_mask: 0.2119, decode.d8.loss_dice: 0.5640, loss: 8.8123 +2022-05-10 14:36:54,584 - mmseg - INFO - Iter [38100/80000] lr: 7.520e-07, eta: 1 day, 15:51:48, time: 1.789, data_time: 0.020, memory: 69053, decode.loss_cls: 0.0753, decode.loss_mask: 0.2052, decode.loss_dice: 0.5663, decode.d0.loss_cls: 0.3100, decode.d0.loss_mask: 0.2127, decode.d0.loss_dice: 0.5949, decode.d1.loss_cls: 0.0879, decode.d1.loss_mask: 0.2069, decode.d1.loss_dice: 0.5746, decode.d2.loss_cls: 0.0810, decode.d2.loss_mask: 0.2069, decode.d2.loss_dice: 0.5709, decode.d3.loss_cls: 0.0735, decode.d3.loss_mask: 0.2061, decode.d3.loss_dice: 0.5660, decode.d4.loss_cls: 0.0819, decode.d4.loss_mask: 0.2058, decode.d4.loss_dice: 0.5685, decode.d5.loss_cls: 0.0707, decode.d5.loss_mask: 0.2057, decode.d5.loss_dice: 0.5670, decode.d6.loss_cls: 0.0714, decode.d6.loss_mask: 0.2061, decode.d6.loss_dice: 0.5646, decode.d7.loss_cls: 0.0736, decode.d7.loss_mask: 0.2057, decode.d7.loss_dice: 0.5691, decode.d8.loss_cls: 0.0710, decode.d8.loss_mask: 0.2055, decode.d8.loss_dice: 0.5648, loss: 8.7698 +2022-05-10 14:38:23,486 - mmseg - INFO - Iter [38150/80000] lr: 7.511e-07, eta: 1 day, 15:22:14, time: 1.778, data_time: 0.018, memory: 69053, decode.loss_cls: 0.0720, decode.loss_mask: 0.2080, decode.loss_dice: 0.5504, decode.d0.loss_cls: 0.3073, decode.d0.loss_mask: 0.2184, decode.d0.loss_dice: 0.5796, decode.d1.loss_cls: 0.0888, decode.d1.loss_mask: 0.2091, decode.d1.loss_dice: 0.5602, decode.d2.loss_cls: 0.0743, decode.d2.loss_mask: 0.2088, decode.d2.loss_dice: 0.5570, decode.d3.loss_cls: 0.0757, decode.d3.loss_mask: 0.2082, decode.d3.loss_dice: 0.5535, decode.d4.loss_cls: 0.0715, decode.d4.loss_mask: 0.2083, decode.d4.loss_dice: 0.5544, decode.d5.loss_cls: 0.0769, decode.d5.loss_mask: 0.2079, decode.d5.loss_dice: 0.5547, decode.d6.loss_cls: 0.0676, decode.d6.loss_mask: 0.2077, decode.d6.loss_dice: 0.5517, decode.d7.loss_cls: 0.0725, decode.d7.loss_mask: 0.2080, decode.d7.loss_dice: 0.5535, decode.d8.loss_cls: 0.0750, decode.d8.loss_mask: 0.2078, decode.d8.loss_dice: 0.5491, loss: 8.6380 +2022-05-10 14:39:54,418 - mmseg - INFO - Iter [38200/80000] lr: 7.502e-07, eta: 1 day, 14:54:36, time: 1.818, data_time: 0.019, memory: 69053, decode.loss_cls: 0.0730, decode.loss_mask: 0.2069, decode.loss_dice: 0.5663, decode.d0.loss_cls: 0.3066, decode.d0.loss_mask: 0.2149, decode.d0.loss_dice: 0.6012, decode.d1.loss_cls: 0.0911, decode.d1.loss_mask: 0.2083, decode.d1.loss_dice: 0.5764, decode.d2.loss_cls: 0.0858, decode.d2.loss_mask: 0.2083, decode.d2.loss_dice: 0.5711, decode.d3.loss_cls: 0.0791, decode.d3.loss_mask: 0.2076, decode.d3.loss_dice: 0.5668, decode.d4.loss_cls: 0.0714, decode.d4.loss_mask: 0.2080, decode.d4.loss_dice: 0.5703, decode.d5.loss_cls: 0.0764, decode.d5.loss_mask: 0.2081, decode.d5.loss_dice: 0.5713, decode.d6.loss_cls: 0.0734, decode.d6.loss_mask: 0.2073, decode.d6.loss_dice: 0.5694, decode.d7.loss_cls: 0.0791, decode.d7.loss_mask: 0.2076, decode.d7.loss_dice: 0.5652, decode.d8.loss_cls: 0.0744, decode.d8.loss_mask: 0.2078, decode.d8.loss_dice: 0.5677, loss: 8.8207 +2022-05-10 14:41:28,528 - mmseg - INFO - Iter [38250/80000] lr: 7.493e-07, eta: 1 day, 14:29:06, time: 1.882, data_time: 0.066, memory: 69053, decode.loss_cls: 0.0787, decode.loss_mask: 0.2057, decode.loss_dice: 0.5798, decode.d0.loss_cls: 0.3064, decode.d0.loss_mask: 0.2140, decode.d0.loss_dice: 0.6104, decode.d1.loss_cls: 0.0861, decode.d1.loss_mask: 0.2068, decode.d1.loss_dice: 0.5946, decode.d2.loss_cls: 0.0770, decode.d2.loss_mask: 0.2061, decode.d2.loss_dice: 0.5881, decode.d3.loss_cls: 0.0781, decode.d3.loss_mask: 0.2053, decode.d3.loss_dice: 0.5855, decode.d4.loss_cls: 0.0767, decode.d4.loss_mask: 0.2055, decode.d4.loss_dice: 0.5792, decode.d5.loss_cls: 0.0765, decode.d5.loss_mask: 0.2062, decode.d5.loss_dice: 0.5852, decode.d6.loss_cls: 0.0724, decode.d6.loss_mask: 0.2061, decode.d6.loss_dice: 0.5834, decode.d7.loss_cls: 0.0783, decode.d7.loss_mask: 0.2058, decode.d7.loss_dice: 0.5858, decode.d8.loss_cls: 0.0759, decode.d8.loss_mask: 0.2058, decode.d8.loss_dice: 0.5791, loss: 8.9445 +2022-05-10 14:42:59,976 - mmseg - INFO - Iter [38300/80000] lr: 7.484e-07, eta: 1 day, 14:03:50, time: 1.829, data_time: 0.020, memory: 69053, decode.loss_cls: 0.0685, decode.loss_mask: 0.2075, decode.loss_dice: 0.5694, decode.d0.loss_cls: 0.3164, decode.d0.loss_mask: 0.2157, decode.d0.loss_dice: 0.5949, decode.d1.loss_cls: 0.0817, decode.d1.loss_mask: 0.2097, decode.d1.loss_dice: 0.5744, decode.d2.loss_cls: 0.0703, decode.d2.loss_mask: 0.2090, decode.d2.loss_dice: 0.5713, decode.d3.loss_cls: 0.0750, decode.d3.loss_mask: 0.2085, decode.d3.loss_dice: 0.5676, decode.d4.loss_cls: 0.0715, decode.d4.loss_mask: 0.2087, decode.d4.loss_dice: 0.5717, decode.d5.loss_cls: 0.0750, decode.d5.loss_mask: 0.2078, decode.d5.loss_dice: 0.5709, decode.d6.loss_cls: 0.0702, decode.d6.loss_mask: 0.2077, decode.d6.loss_dice: 0.5684, decode.d7.loss_cls: 0.0675, decode.d7.loss_mask: 0.2074, decode.d7.loss_dice: 0.5677, decode.d8.loss_cls: 0.0666, decode.d8.loss_mask: 0.2078, decode.d8.loss_dice: 0.5676, loss: 8.7764 +2022-05-10 14:44:29,727 - mmseg - INFO - Iter [38350/80000] lr: 7.475e-07, eta: 1 day, 13:39:05, time: 1.795, data_time: 0.021, memory: 69053, decode.loss_cls: 0.0850, decode.loss_mask: 0.2031, decode.loss_dice: 0.5757, decode.d0.loss_cls: 0.3158, decode.d0.loss_mask: 0.2112, decode.d0.loss_dice: 0.6141, decode.d1.loss_cls: 0.1089, decode.d1.loss_mask: 0.2057, decode.d1.loss_dice: 0.5912, decode.d2.loss_cls: 0.0890, decode.d2.loss_mask: 0.2048, decode.d2.loss_dice: 0.5887, decode.d3.loss_cls: 0.0859, decode.d3.loss_mask: 0.2040, decode.d3.loss_dice: 0.5816, decode.d4.loss_cls: 0.0801, decode.d4.loss_mask: 0.2033, decode.d4.loss_dice: 0.5796, decode.d5.loss_cls: 0.0766, decode.d5.loss_mask: 0.2037, decode.d5.loss_dice: 0.5840, decode.d6.loss_cls: 0.0818, decode.d6.loss_mask: 0.2041, decode.d6.loss_dice: 0.5833, decode.d7.loss_cls: 0.0789, decode.d7.loss_mask: 0.2039, decode.d7.loss_dice: 0.5809, decode.d8.loss_cls: 0.0773, decode.d8.loss_mask: 0.2038, decode.d8.loss_dice: 0.5798, loss: 8.9856 +2022-05-10 14:45:58,758 - mmseg - INFO - Iter [38400/80000] lr: 7.466e-07, eta: 1 day, 13:15:06, time: 1.781, data_time: 0.020, memory: 69053, decode.loss_cls: 0.0737, decode.loss_mask: 0.2066, decode.loss_dice: 0.5514, decode.d0.loss_cls: 0.3138, decode.d0.loss_mask: 0.2134, decode.d0.loss_dice: 0.5801, decode.d1.loss_cls: 0.0866, decode.d1.loss_mask: 0.2074, decode.d1.loss_dice: 0.5615, decode.d2.loss_cls: 0.0709, decode.d2.loss_mask: 0.2074, decode.d2.loss_dice: 0.5595, decode.d3.loss_cls: 0.0719, decode.d3.loss_mask: 0.2064, decode.d3.loss_dice: 0.5549, decode.d4.loss_cls: 0.0695, decode.d4.loss_mask: 0.2068, decode.d4.loss_dice: 0.5503, decode.d5.loss_cls: 0.0710, decode.d5.loss_mask: 0.2065, decode.d5.loss_dice: 0.5553, decode.d6.loss_cls: 0.0733, decode.d6.loss_mask: 0.2062, decode.d6.loss_dice: 0.5517, decode.d7.loss_cls: 0.0740, decode.d7.loss_mask: 0.2059, decode.d7.loss_dice: 0.5497, decode.d8.loss_cls: 0.0687, decode.d8.loss_mask: 0.2061, decode.d8.loss_dice: 0.5513, loss: 8.6120 +2022-05-10 14:47:30,622 - mmseg - INFO - Iter [38450/80000] lr: 7.457e-07, eta: 1 day, 12:52:49, time: 1.837, data_time: 0.065, memory: 69053, decode.loss_cls: 0.0843, decode.loss_mask: 0.2009, decode.loss_dice: 0.5671, decode.d0.loss_cls: 0.3070, decode.d0.loss_mask: 0.2076, decode.d0.loss_dice: 0.6040, decode.d1.loss_cls: 0.1008, decode.d1.loss_mask: 0.2024, decode.d1.loss_dice: 0.5799, decode.d2.loss_cls: 0.0852, decode.d2.loss_mask: 0.2014, decode.d2.loss_dice: 0.5759, decode.d3.loss_cls: 0.0852, decode.d3.loss_mask: 0.2013, decode.d3.loss_dice: 0.5724, decode.d4.loss_cls: 0.0839, decode.d4.loss_mask: 0.2017, decode.d4.loss_dice: 0.5738, decode.d5.loss_cls: 0.0837, decode.d5.loss_mask: 0.2010, decode.d5.loss_dice: 0.5760, decode.d6.loss_cls: 0.0777, decode.d6.loss_mask: 0.2012, decode.d6.loss_dice: 0.5685, decode.d7.loss_cls: 0.0781, decode.d7.loss_mask: 0.2005, decode.d7.loss_dice: 0.5688, decode.d8.loss_cls: 0.0815, decode.d8.loss_mask: 0.2004, decode.d8.loss_dice: 0.5715, loss: 8.8435 +2022-05-10 14:49:01,601 - mmseg - INFO - Iter [38500/80000] lr: 7.448e-07, eta: 1 day, 12:31:08, time: 1.819, data_time: 0.019, memory: 69053, decode.loss_cls: 0.0710, decode.loss_mask: 0.2044, decode.loss_dice: 0.5843, decode.d0.loss_cls: 0.3055, decode.d0.loss_mask: 0.2126, decode.d0.loss_dice: 0.6093, decode.d1.loss_cls: 0.0975, decode.d1.loss_mask: 0.2066, decode.d1.loss_dice: 0.5879, decode.d2.loss_cls: 0.0813, decode.d2.loss_mask: 0.2046, decode.d2.loss_dice: 0.5850, decode.d3.loss_cls: 0.0725, decode.d3.loss_mask: 0.2039, decode.d3.loss_dice: 0.5848, decode.d4.loss_cls: 0.0762, decode.d4.loss_mask: 0.2051, decode.d4.loss_dice: 0.5858, decode.d5.loss_cls: 0.0831, decode.d5.loss_mask: 0.2046, decode.d5.loss_dice: 0.5840, decode.d6.loss_cls: 0.0700, decode.d6.loss_mask: 0.2043, decode.d6.loss_dice: 0.5805, decode.d7.loss_cls: 0.0714, decode.d7.loss_mask: 0.2039, decode.d7.loss_dice: 0.5802, decode.d8.loss_cls: 0.0748, decode.d8.loss_mask: 0.2044, decode.d8.loss_dice: 0.5818, loss: 8.9212 +2022-05-10 14:50:31,142 - mmseg - INFO - Iter [38550/80000] lr: 7.439e-07, eta: 1 day, 12:09:51, time: 1.791, data_time: 0.020, memory: 69053, decode.loss_cls: 0.0717, decode.loss_mask: 0.2089, decode.loss_dice: 0.5660, decode.d0.loss_cls: 0.3172, decode.d0.loss_mask: 0.2212, decode.d0.loss_dice: 0.5968, decode.d1.loss_cls: 0.0924, decode.d1.loss_mask: 0.2106, decode.d1.loss_dice: 0.5744, decode.d2.loss_cls: 0.0787, decode.d2.loss_mask: 0.2103, decode.d2.loss_dice: 0.5689, decode.d3.loss_cls: 0.0730, decode.d3.loss_mask: 0.2099, decode.d3.loss_dice: 0.5667, decode.d4.loss_cls: 0.0753, decode.d4.loss_mask: 0.2099, decode.d4.loss_dice: 0.5651, decode.d5.loss_cls: 0.0715, decode.d5.loss_mask: 0.2089, decode.d5.loss_dice: 0.5673, decode.d6.loss_cls: 0.0728, decode.d6.loss_mask: 0.2089, decode.d6.loss_dice: 0.5679, decode.d7.loss_cls: 0.0778, decode.d7.loss_mask: 0.2088, decode.d7.loss_dice: 0.5624, decode.d8.loss_cls: 0.0711, decode.d8.loss_mask: 0.2092, decode.d8.loss_dice: 0.5659, loss: 8.8093 +2022-05-10 14:52:01,729 - mmseg - INFO - Iter [38600/80000] lr: 7.430e-07, eta: 1 day, 11:49:36, time: 1.812, data_time: 0.020, memory: 69053, decode.loss_cls: 0.0673, decode.loss_mask: 0.2092, decode.loss_dice: 0.5658, decode.d0.loss_cls: 0.3125, decode.d0.loss_mask: 0.2200, decode.d0.loss_dice: 0.5921, decode.d1.loss_cls: 0.0888, decode.d1.loss_mask: 0.2108, decode.d1.loss_dice: 0.5721, decode.d2.loss_cls: 0.0734, decode.d2.loss_mask: 0.2104, decode.d2.loss_dice: 0.5731, decode.d3.loss_cls: 0.0717, decode.d3.loss_mask: 0.2096, decode.d3.loss_dice: 0.5671, decode.d4.loss_cls: 0.0722, decode.d4.loss_mask: 0.2094, decode.d4.loss_dice: 0.5653, decode.d5.loss_cls: 0.0694, decode.d5.loss_mask: 0.2096, decode.d5.loss_dice: 0.5665, decode.d6.loss_cls: 0.0674, decode.d6.loss_mask: 0.2091, decode.d6.loss_dice: 0.5639, decode.d7.loss_cls: 0.0673, decode.d7.loss_mask: 0.2094, decode.d7.loss_dice: 0.5671, decode.d8.loss_cls: 0.0743, decode.d8.loss_mask: 0.2087, decode.d8.loss_dice: 0.5673, loss: 8.7709 +2022-05-10 14:53:33,146 - mmseg - INFO - Iter [38650/80000] lr: 7.421e-07, eta: 1 day, 11:30:16, time: 1.828, data_time: 0.070, memory: 69053, decode.loss_cls: 0.0752, decode.loss_mask: 0.2067, decode.loss_dice: 0.5707, decode.d0.loss_cls: 0.3173, decode.d0.loss_mask: 0.2152, decode.d0.loss_dice: 0.5988, decode.d1.loss_cls: 0.0874, decode.d1.loss_mask: 0.2079, decode.d1.loss_dice: 0.5782, decode.d2.loss_cls: 0.0823, decode.d2.loss_mask: 0.2067, decode.d2.loss_dice: 0.5757, decode.d3.loss_cls: 0.0779, decode.d3.loss_mask: 0.2069, decode.d3.loss_dice: 0.5690, decode.d4.loss_cls: 0.0749, decode.d4.loss_mask: 0.2073, decode.d4.loss_dice: 0.5720, decode.d5.loss_cls: 0.0717, decode.d5.loss_mask: 0.2069, decode.d5.loss_dice: 0.5749, decode.d6.loss_cls: 0.0726, decode.d6.loss_mask: 0.2071, decode.d6.loss_dice: 0.5691, decode.d7.loss_cls: 0.0753, decode.d7.loss_mask: 0.2067, decode.d7.loss_dice: 0.5699, decode.d8.loss_cls: 0.0731, decode.d8.loss_mask: 0.2065, decode.d8.loss_dice: 0.5699, loss: 8.8340 +2022-05-10 14:55:03,208 - mmseg - INFO - Iter [38700/80000] lr: 7.412e-07, eta: 1 day, 11:11:16, time: 1.801, data_time: 0.020, memory: 69053, decode.loss_cls: 0.0729, decode.loss_mask: 0.2086, decode.loss_dice: 0.5844, decode.d0.loss_cls: 0.3077, decode.d0.loss_mask: 0.2164, decode.d0.loss_dice: 0.6157, decode.d1.loss_cls: 0.0856, decode.d1.loss_mask: 0.2097, decode.d1.loss_dice: 0.5952, decode.d2.loss_cls: 0.0795, decode.d2.loss_mask: 0.2092, decode.d2.loss_dice: 0.5937, decode.d3.loss_cls: 0.0757, decode.d3.loss_mask: 0.2091, decode.d3.loss_dice: 0.5868, decode.d4.loss_cls: 0.0781, decode.d4.loss_mask: 0.2089, decode.d4.loss_dice: 0.5862, decode.d5.loss_cls: 0.0735, decode.d5.loss_mask: 0.2091, decode.d5.loss_dice: 0.5849, decode.d6.loss_cls: 0.0701, decode.d6.loss_mask: 0.2087, decode.d6.loss_dice: 0.5843, decode.d7.loss_cls: 0.0685, decode.d7.loss_mask: 0.2083, decode.d7.loss_dice: 0.5868, decode.d8.loss_cls: 0.0664, decode.d8.loss_mask: 0.2085, decode.d8.loss_dice: 0.5876, loss: 8.9802 +2022-05-10 14:56:33,036 - mmseg - INFO - Iter [38750/80000] lr: 7.404e-07, eta: 1 day, 10:52:50, time: 1.797, data_time: 0.019, memory: 69053, decode.loss_cls: 0.0716, decode.loss_mask: 0.2120, decode.loss_dice: 0.5671, decode.d0.loss_cls: 0.3137, decode.d0.loss_mask: 0.2230, decode.d0.loss_dice: 0.5953, decode.d1.loss_cls: 0.0935, decode.d1.loss_mask: 0.2152, decode.d1.loss_dice: 0.5727, decode.d2.loss_cls: 0.0827, decode.d2.loss_mask: 0.2144, decode.d2.loss_dice: 0.5717, decode.d3.loss_cls: 0.0785, decode.d3.loss_mask: 0.2126, decode.d3.loss_dice: 0.5694, decode.d4.loss_cls: 0.0790, decode.d4.loss_mask: 0.2124, decode.d4.loss_dice: 0.5690, decode.d5.loss_cls: 0.0848, decode.d5.loss_mask: 0.2120, decode.d5.loss_dice: 0.5682, decode.d6.loss_cls: 0.0739, decode.d6.loss_mask: 0.2122, decode.d6.loss_dice: 0.5670, decode.d7.loss_cls: 0.0746, decode.d7.loss_mask: 0.2118, decode.d7.loss_dice: 0.5663, decode.d8.loss_cls: 0.0813, decode.d8.loss_mask: 0.2119, decode.d8.loss_dice: 0.5686, loss: 8.8863 +2022-05-10 14:58:05,147 - mmseg - INFO - Iter [38800/80000] lr: 7.395e-07, eta: 1 day, 10:35:33, time: 1.842, data_time: 0.069, memory: 69053, decode.loss_cls: 0.0729, decode.loss_mask: 0.2034, decode.loss_dice: 0.5741, decode.d0.loss_cls: 0.3102, decode.d0.loss_mask: 0.2135, decode.d0.loss_dice: 0.6054, decode.d1.loss_cls: 0.0849, decode.d1.loss_mask: 0.2042, decode.d1.loss_dice: 0.5867, decode.d2.loss_cls: 0.0829, decode.d2.loss_mask: 0.2046, decode.d2.loss_dice: 0.5853, decode.d3.loss_cls: 0.0795, decode.d3.loss_mask: 0.2037, decode.d3.loss_dice: 0.5748, decode.d4.loss_cls: 0.0806, decode.d4.loss_mask: 0.2036, decode.d4.loss_dice: 0.5738, decode.d5.loss_cls: 0.0780, decode.d5.loss_mask: 0.2035, decode.d5.loss_dice: 0.5755, decode.d6.loss_cls: 0.0733, decode.d6.loss_mask: 0.2035, decode.d6.loss_dice: 0.5721, decode.d7.loss_cls: 0.0722, decode.d7.loss_mask: 0.2036, decode.d7.loss_dice: 0.5735, decode.d8.loss_cls: 0.0695, decode.d8.loss_mask: 0.2040, decode.d8.loss_dice: 0.5747, loss: 8.8474 +2022-05-10 14:59:34,283 - mmseg - INFO - Iter [38850/80000] lr: 7.386e-07, eta: 1 day, 10:18:07, time: 1.783, data_time: 0.019, memory: 69053, decode.loss_cls: 0.0740, decode.loss_mask: 0.2022, decode.loss_dice: 0.5619, decode.d0.loss_cls: 0.2962, decode.d0.loss_mask: 0.2115, decode.d0.loss_dice: 0.5886, decode.d1.loss_cls: 0.0982, decode.d1.loss_mask: 0.2043, decode.d1.loss_dice: 0.5718, decode.d2.loss_cls: 0.0879, decode.d2.loss_mask: 0.2032, decode.d2.loss_dice: 0.5679, decode.d3.loss_cls: 0.0795, decode.d3.loss_mask: 0.2028, decode.d3.loss_dice: 0.5666, decode.d4.loss_cls: 0.0727, decode.d4.loss_mask: 0.2024, decode.d4.loss_dice: 0.5638, decode.d5.loss_cls: 0.0745, decode.d5.loss_mask: 0.2025, decode.d5.loss_dice: 0.5639, decode.d6.loss_cls: 0.0750, decode.d6.loss_mask: 0.2023, decode.d6.loss_dice: 0.5625, decode.d7.loss_cls: 0.0703, decode.d7.loss_mask: 0.2022, decode.d7.loss_dice: 0.5626, decode.d8.loss_cls: 0.0721, decode.d8.loss_mask: 0.2022, decode.d8.loss_dice: 0.5649, loss: 8.7106 +2022-05-10 15:01:03,642 - mmseg - INFO - Iter [38900/80000] lr: 7.377e-07, eta: 1 day, 10:01:18, time: 1.787, data_time: 0.019, memory: 69053, decode.loss_cls: 0.0664, decode.loss_mask: 0.2059, decode.loss_dice: 0.5708, decode.d0.loss_cls: 0.2984, decode.d0.loss_mask: 0.2153, decode.d0.loss_dice: 0.5973, decode.d1.loss_cls: 0.0890, decode.d1.loss_mask: 0.2086, decode.d1.loss_dice: 0.5812, decode.d2.loss_cls: 0.0861, decode.d2.loss_mask: 0.2073, decode.d2.loss_dice: 0.5746, decode.d3.loss_cls: 0.0749, decode.d3.loss_mask: 0.2072, decode.d3.loss_dice: 0.5714, decode.d4.loss_cls: 0.0731, decode.d4.loss_mask: 0.2070, decode.d4.loss_dice: 0.5698, decode.d5.loss_cls: 0.0731, decode.d5.loss_mask: 0.2072, decode.d5.loss_dice: 0.5721, decode.d6.loss_cls: 0.0708, decode.d6.loss_mask: 0.2071, decode.d6.loss_dice: 0.5675, decode.d7.loss_cls: 0.0745, decode.d7.loss_mask: 0.2069, decode.d7.loss_dice: 0.5729, decode.d8.loss_cls: 0.0708, decode.d8.loss_mask: 0.2070, decode.d8.loss_dice: 0.5707, loss: 8.8050 +2022-05-10 15:02:34,894 - mmseg - INFO - Iter [38950/80000] lr: 7.368e-07, eta: 1 day, 9:45:25, time: 1.825, data_time: 0.018, memory: 69053, decode.loss_cls: 0.0656, decode.loss_mask: 0.2142, decode.loss_dice: 0.5725, decode.d0.loss_cls: 0.3182, decode.d0.loss_mask: 0.2231, decode.d0.loss_dice: 0.6007, decode.d1.loss_cls: 0.0929, decode.d1.loss_mask: 0.2161, decode.d1.loss_dice: 0.5814, decode.d2.loss_cls: 0.0894, decode.d2.loss_mask: 0.2147, decode.d2.loss_dice: 0.5779, decode.d3.loss_cls: 0.0734, decode.d3.loss_mask: 0.2138, decode.d3.loss_dice: 0.5712, decode.d4.loss_cls: 0.0744, decode.d4.loss_mask: 0.2137, decode.d4.loss_dice: 0.5703, decode.d5.loss_cls: 0.0748, decode.d5.loss_mask: 0.2140, decode.d5.loss_dice: 0.5744, decode.d6.loss_cls: 0.0704, decode.d6.loss_mask: 0.2146, decode.d6.loss_dice: 0.5718, decode.d7.loss_cls: 0.0799, decode.d7.loss_mask: 0.2142, decode.d7.loss_dice: 0.5724, decode.d8.loss_cls: 0.0765, decode.d8.loss_mask: 0.2145, decode.d8.loss_dice: 0.5717, loss: 8.9328 +2022-05-10 15:04:05,785 - mmseg - INFO - Saving checkpoint at 39000 iterations +2022-05-10 15:04:37,548 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 15:04:37,560 - mmseg - INFO - Iter [39000/80000] lr: 7.359e-07, eta: 1 day, 9:37:09, time: 2.450, data_time: 0.065, memory: 69053, decode.loss_cls: 0.0725, decode.loss_mask: 0.2015, decode.loss_dice: 0.5597, decode.d0.loss_cls: 0.3119, decode.d0.loss_mask: 0.2103, decode.d0.loss_dice: 0.5880, decode.d1.loss_cls: 0.0987, decode.d1.loss_mask: 0.2045, decode.d1.loss_dice: 0.5716, decode.d2.loss_cls: 0.0805, decode.d2.loss_mask: 0.2028, decode.d2.loss_dice: 0.5668, decode.d3.loss_cls: 0.0791, decode.d3.loss_mask: 0.2029, decode.d3.loss_dice: 0.5651, decode.d4.loss_cls: 0.0754, decode.d4.loss_mask: 0.2018, decode.d4.loss_dice: 0.5606, decode.d5.loss_cls: 0.0759, decode.d5.loss_mask: 0.2022, decode.d5.loss_dice: 0.5632, decode.d6.loss_cls: 0.0743, decode.d6.loss_mask: 0.2029, decode.d6.loss_dice: 0.5656, decode.d7.loss_cls: 0.0745, decode.d7.loss_mask: 0.2026, decode.d7.loss_dice: 0.5619, decode.d8.loss_cls: 0.0703, decode.d8.loss_mask: 0.2017, decode.d8.loss_dice: 0.5611, loss: 8.7099 +2022-05-10 15:06:32,927 - mmseg - INFO - per class results: +2022-05-10 15:06:32,932 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.29 | 98.79 | +| sidewalk | 87.0 | 94.69 | +| building | 94.34 | 96.99 | +| wall | 65.37 | 79.98 | +| fence | 73.66 | 79.79 | +| pole | 71.72 | 85.17 | +| traffic light | 77.16 | 89.31 | +| traffic sign | 84.08 | 90.99 | +| vegetation | 93.38 | 96.78 | +| terrain | 68.48 | 79.09 | +| sky | 95.75 | 98.53 | +| person | 86.89 | 93.42 | +| rider | 74.73 | 86.04 | +| car | 96.25 | 98.32 | +| truck | 91.86 | 94.52 | +| bus | 93.62 | 96.81 | +| train | 88.29 | 90.43 | +| motorcycle | 76.87 | 89.39 | +| bicycle | 82.87 | 92.18 | ++---------------+-------+-------+ +2022-05-10 15:06:32,932 - mmseg - INFO - Summary: +2022-05-10 15:06:32,932 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 96.87 | 84.24 | 91.12 | ++-------+-------+-------+ +2022-05-10 15:06:32,936 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 15:06:32,937 - mmseg - INFO - Iter(val) [32] aAcc: 0.9687, mIoU: 0.8424, mAcc: 0.9112, IoU.road: 0.9829, IoU.sidewalk: 0.8700, IoU.building: 0.9434, IoU.wall: 0.6537, IoU.fence: 0.7366, IoU.pole: 0.7172, IoU.traffic light: 0.7716, IoU.traffic sign: 0.8408, IoU.vegetation: 0.9338, IoU.terrain: 0.6848, IoU.sky: 0.9575, IoU.person: 0.8689, IoU.rider: 0.7473, IoU.car: 0.9625, IoU.truck: 0.9186, IoU.bus: 0.9362, IoU.train: 0.8829, IoU.motorcycle: 0.7687, IoU.bicycle: 0.8287, Acc.road: 0.9879, Acc.sidewalk: 0.9469, Acc.building: 0.9699, Acc.wall: 0.7998, Acc.fence: 0.7979, Acc.pole: 0.8517, Acc.traffic light: 0.8931, Acc.traffic sign: 0.9099, Acc.vegetation: 0.9678, Acc.terrain: 0.7909, Acc.sky: 0.9853, Acc.person: 0.9342, Acc.rider: 0.8604, Acc.car: 0.9832, Acc.truck: 0.9452, Acc.bus: 0.9681, Acc.train: 0.9043, Acc.motorcycle: 0.8939, Acc.bicycle: 0.9218 +2022-05-10 15:08:03,122 - mmseg - INFO - Iter [39050/80000] lr: 7.350e-07, eta: 1 day, 9:47:41, time: 4.114, data_time: 2.330, memory: 69053, decode.loss_cls: 0.0895, decode.loss_mask: 0.2092, decode.loss_dice: 0.5675, decode.d0.loss_cls: 0.3206, decode.d0.loss_mask: 0.2196, decode.d0.loss_dice: 0.6033, decode.d1.loss_cls: 0.1061, decode.d1.loss_mask: 0.2108, decode.d1.loss_dice: 0.5788, decode.d2.loss_cls: 0.1015, decode.d2.loss_mask: 0.2103, decode.d2.loss_dice: 0.5738, decode.d3.loss_cls: 0.0906, decode.d3.loss_mask: 0.2095, decode.d3.loss_dice: 0.5690, decode.d4.loss_cls: 0.0938, decode.d4.loss_mask: 0.2086, decode.d4.loss_dice: 0.5715, decode.d5.loss_cls: 0.0865, decode.d5.loss_mask: 0.2092, decode.d5.loss_dice: 0.5717, decode.d6.loss_cls: 0.0922, decode.d6.loss_mask: 0.2091, decode.d6.loss_dice: 0.5708, decode.d7.loss_cls: 0.0928, decode.d7.loss_mask: 0.2092, decode.d7.loss_dice: 0.5716, decode.d8.loss_cls: 0.0835, decode.d8.loss_mask: 0.2087, decode.d8.loss_dice: 0.5695, loss: 9.0085 +2022-05-10 15:09:32,294 - mmseg - INFO - Iter [39100/80000] lr: 7.341e-07, eta: 1 day, 9:32:09, time: 1.783, data_time: 0.017, memory: 69053, decode.loss_cls: 0.0700, decode.loss_mask: 0.2006, decode.loss_dice: 0.5607, decode.d0.loss_cls: 0.2977, decode.d0.loss_mask: 0.2084, decode.d0.loss_dice: 0.5888, decode.d1.loss_cls: 0.0802, decode.d1.loss_mask: 0.2027, decode.d1.loss_dice: 0.5685, decode.d2.loss_cls: 0.0768, decode.d2.loss_mask: 0.2015, decode.d2.loss_dice: 0.5688, decode.d3.loss_cls: 0.0750, decode.d3.loss_mask: 0.2011, decode.d3.loss_dice: 0.5634, decode.d4.loss_cls: 0.0702, decode.d4.loss_mask: 0.2011, decode.d4.loss_dice: 0.5654, decode.d5.loss_cls: 0.0728, decode.d5.loss_mask: 0.2010, decode.d5.loss_dice: 0.5617, decode.d6.loss_cls: 0.0665, decode.d6.loss_mask: 0.2005, decode.d6.loss_dice: 0.5642, decode.d7.loss_cls: 0.0655, decode.d7.loss_mask: 0.2004, decode.d7.loss_dice: 0.5612, decode.d8.loss_cls: 0.0720, decode.d8.loss_mask: 0.2008, decode.d8.loss_dice: 0.5603, loss: 8.6276 +2022-05-10 15:11:01,001 - mmseg - INFO - Iter [39150/80000] lr: 7.332e-07, eta: 1 day, 9:16:58, time: 1.774, data_time: 0.018, memory: 69053, decode.loss_cls: 0.0663, decode.loss_mask: 0.2108, decode.loss_dice: 0.5773, decode.d0.loss_cls: 0.3046, decode.d0.loss_mask: 0.2191, decode.d0.loss_dice: 0.6023, decode.d1.loss_cls: 0.0841, decode.d1.loss_mask: 0.2117, decode.d1.loss_dice: 0.5891, decode.d2.loss_cls: 0.0768, decode.d2.loss_mask: 0.2117, decode.d2.loss_dice: 0.5824, decode.d3.loss_cls: 0.0792, decode.d3.loss_mask: 0.2115, decode.d3.loss_dice: 0.5802, decode.d4.loss_cls: 0.0770, decode.d4.loss_mask: 0.2108, decode.d4.loss_dice: 0.5778, decode.d5.loss_cls: 0.0746, decode.d5.loss_mask: 0.2113, decode.d5.loss_dice: 0.5795, decode.d6.loss_cls: 0.0703, decode.d6.loss_mask: 0.2107, decode.d6.loss_dice: 0.5765, decode.d7.loss_cls: 0.0670, decode.d7.loss_mask: 0.2107, decode.d7.loss_dice: 0.5786, decode.d8.loss_cls: 0.0757, decode.d8.loss_mask: 0.2106, decode.d8.loss_dice: 0.5742, loss: 8.9125 +2022-05-10 15:12:33,795 - mmseg - INFO - Iter [39200/80000] lr: 7.323e-07, eta: 1 day, 9:03:05, time: 1.856, data_time: 0.066, memory: 69053, decode.loss_cls: 0.0803, decode.loss_mask: 0.2053, decode.loss_dice: 0.5877, decode.d0.loss_cls: 0.3091, decode.d0.loss_mask: 0.2139, decode.d0.loss_dice: 0.6176, decode.d1.loss_cls: 0.1012, decode.d1.loss_mask: 0.2069, decode.d1.loss_dice: 0.5972, decode.d2.loss_cls: 0.0932, decode.d2.loss_mask: 0.2054, decode.d2.loss_dice: 0.5916, decode.d3.loss_cls: 0.0869, decode.d3.loss_mask: 0.2052, decode.d3.loss_dice: 0.5893, decode.d4.loss_cls: 0.0878, decode.d4.loss_mask: 0.2048, decode.d4.loss_dice: 0.5877, decode.d5.loss_cls: 0.0974, decode.d5.loss_mask: 0.2052, decode.d5.loss_dice: 0.5915, decode.d6.loss_cls: 0.0796, decode.d6.loss_mask: 0.2043, decode.d6.loss_dice: 0.5861, decode.d7.loss_cls: 0.0815, decode.d7.loss_mask: 0.2046, decode.d7.loss_dice: 0.5881, decode.d8.loss_cls: 0.0806, decode.d8.loss_mask: 0.2049, decode.d8.loss_dice: 0.5908, loss: 9.0859 +2022-05-10 15:14:03,360 - mmseg - INFO - Iter [39250/80000] lr: 7.314e-07, eta: 1 day, 8:48:54, time: 1.791, data_time: 0.018, memory: 69053, decode.loss_cls: 0.0620, decode.loss_mask: 0.2066, decode.loss_dice: 0.5560, decode.d0.loss_cls: 0.3003, decode.d0.loss_mask: 0.2152, decode.d0.loss_dice: 0.5843, decode.d1.loss_cls: 0.0873, decode.d1.loss_mask: 0.2084, decode.d1.loss_dice: 0.5722, decode.d2.loss_cls: 0.0748, decode.d2.loss_mask: 0.2065, decode.d2.loss_dice: 0.5666, decode.d3.loss_cls: 0.0661, decode.d3.loss_mask: 0.2068, decode.d3.loss_dice: 0.5610, decode.d4.loss_cls: 0.0745, decode.d4.loss_mask: 0.2072, decode.d4.loss_dice: 0.5588, decode.d5.loss_cls: 0.0719, decode.d5.loss_mask: 0.2078, decode.d5.loss_dice: 0.5586, decode.d6.loss_cls: 0.0661, decode.d6.loss_mask: 0.2077, decode.d6.loss_dice: 0.5549, decode.d7.loss_cls: 0.0688, decode.d7.loss_mask: 0.2072, decode.d7.loss_dice: 0.5595, decode.d8.loss_cls: 0.0636, decode.d8.loss_mask: 0.2071, decode.d8.loss_dice: 0.5600, loss: 8.6480 +2022-05-10 15:15:33,679 - mmseg - INFO - Iter [39300/80000] lr: 7.305e-07, eta: 1 day, 8:35:16, time: 1.806, data_time: 0.018, memory: 69053, decode.loss_cls: 0.0775, decode.loss_mask: 0.2078, decode.loss_dice: 0.5715, decode.d0.loss_cls: 0.3099, decode.d0.loss_mask: 0.2170, decode.d0.loss_dice: 0.6007, decode.d1.loss_cls: 0.0934, decode.d1.loss_mask: 0.2108, decode.d1.loss_dice: 0.5870, decode.d2.loss_cls: 0.0817, decode.d2.loss_mask: 0.2098, decode.d2.loss_dice: 0.5747, decode.d3.loss_cls: 0.0802, decode.d3.loss_mask: 0.2079, decode.d3.loss_dice: 0.5751, decode.d4.loss_cls: 0.0807, decode.d4.loss_mask: 0.2082, decode.d4.loss_dice: 0.5719, decode.d5.loss_cls: 0.0838, decode.d5.loss_mask: 0.2076, decode.d5.loss_dice: 0.5712, decode.d6.loss_cls: 0.0771, decode.d6.loss_mask: 0.2078, decode.d6.loss_dice: 0.5760, decode.d7.loss_cls: 0.0790, decode.d7.loss_mask: 0.2084, decode.d7.loss_dice: 0.5752, decode.d8.loss_cls: 0.0744, decode.d8.loss_mask: 0.2080, decode.d8.loss_dice: 0.5689, loss: 8.9032 +2022-05-10 15:17:05,190 - mmseg - INFO - Iter [39350/80000] lr: 7.296e-07, eta: 1 day, 8:22:13, time: 1.830, data_time: 0.069, memory: 69053, decode.loss_cls: 0.0829, decode.loss_mask: 0.2087, decode.loss_dice: 0.5685, decode.d0.loss_cls: 0.3155, decode.d0.loss_mask: 0.2190, decode.d0.loss_dice: 0.6081, decode.d1.loss_cls: 0.0981, decode.d1.loss_mask: 0.2105, decode.d1.loss_dice: 0.5851, decode.d2.loss_cls: 0.0839, decode.d2.loss_mask: 0.2098, decode.d2.loss_dice: 0.5779, decode.d3.loss_cls: 0.0799, decode.d3.loss_mask: 0.2096, decode.d3.loss_dice: 0.5711, decode.d4.loss_cls: 0.0810, decode.d4.loss_mask: 0.2097, decode.d4.loss_dice: 0.5699, decode.d5.loss_cls: 0.0773, decode.d5.loss_mask: 0.2089, decode.d5.loss_dice: 0.5696, decode.d6.loss_cls: 0.0729, decode.d6.loss_mask: 0.2094, decode.d6.loss_dice: 0.5723, decode.d7.loss_cls: 0.0783, decode.d7.loss_mask: 0.2093, decode.d7.loss_dice: 0.5676, decode.d8.loss_cls: 0.0775, decode.d8.loss_mask: 0.2093, decode.d8.loss_dice: 0.5722, loss: 8.9136 +2022-05-10 15:18:35,237 - mmseg - INFO - Iter [39400/80000] lr: 7.287e-07, eta: 1 day, 8:09:14, time: 1.801, data_time: 0.020, memory: 69053, decode.loss_cls: 0.0747, decode.loss_mask: 0.2022, decode.loss_dice: 0.5766, decode.d0.loss_cls: 0.3102, decode.d0.loss_mask: 0.2112, decode.d0.loss_dice: 0.6089, decode.d1.loss_cls: 0.0880, decode.d1.loss_mask: 0.2040, decode.d1.loss_dice: 0.5864, decode.d2.loss_cls: 0.0770, decode.d2.loss_mask: 0.2029, decode.d2.loss_dice: 0.5833, decode.d3.loss_cls: 0.0710, decode.d3.loss_mask: 0.2027, decode.d3.loss_dice: 0.5793, decode.d4.loss_cls: 0.0734, decode.d4.loss_mask: 0.2028, decode.d4.loss_dice: 0.5769, decode.d5.loss_cls: 0.0731, decode.d5.loss_mask: 0.2028, decode.d5.loss_dice: 0.5784, decode.d6.loss_cls: 0.0650, decode.d6.loss_mask: 0.2023, decode.d6.loss_dice: 0.5769, decode.d7.loss_cls: 0.0723, decode.d7.loss_mask: 0.2023, decode.d7.loss_dice: 0.5762, decode.d8.loss_cls: 0.0753, decode.d8.loss_mask: 0.2022, decode.d8.loss_dice: 0.5750, loss: 8.8332 +2022-05-10 15:20:04,990 - mmseg - INFO - Iter [39450/80000] lr: 7.278e-07, eta: 1 day, 7:56:31, time: 1.795, data_time: 0.019, memory: 69053, decode.loss_cls: 0.0739, decode.loss_mask: 0.2090, decode.loss_dice: 0.5627, decode.d0.loss_cls: 0.3121, decode.d0.loss_mask: 0.2179, decode.d0.loss_dice: 0.5982, decode.d1.loss_cls: 0.1025, decode.d1.loss_mask: 0.2110, decode.d1.loss_dice: 0.5757, decode.d2.loss_cls: 0.0869, decode.d2.loss_mask: 0.2106, decode.d2.loss_dice: 0.5689, decode.d3.loss_cls: 0.0822, decode.d3.loss_mask: 0.2097, decode.d3.loss_dice: 0.5663, decode.d4.loss_cls: 0.0816, decode.d4.loss_mask: 0.2100, decode.d4.loss_dice: 0.5635, decode.d5.loss_cls: 0.0748, decode.d5.loss_mask: 0.2095, decode.d5.loss_dice: 0.5659, decode.d6.loss_cls: 0.0771, decode.d6.loss_mask: 0.2096, decode.d6.loss_dice: 0.5632, decode.d7.loss_cls: 0.0818, decode.d7.loss_mask: 0.2095, decode.d7.loss_dice: 0.5637, decode.d8.loss_cls: 0.0772, decode.d8.loss_mask: 0.2093, decode.d8.loss_dice: 0.5618, loss: 8.8462 +2022-05-10 15:21:36,547 - mmseg - INFO - Iter [39500/80000] lr: 7.269e-07, eta: 1 day, 7:44:28, time: 1.831, data_time: 0.017, memory: 69053, decode.loss_cls: 0.0728, decode.loss_mask: 0.2065, decode.loss_dice: 0.5596, decode.d0.loss_cls: 0.3122, decode.d0.loss_mask: 0.2173, decode.d0.loss_dice: 0.5946, decode.d1.loss_cls: 0.0914, decode.d1.loss_mask: 0.2094, decode.d1.loss_dice: 0.5687, decode.d2.loss_cls: 0.0794, decode.d2.loss_mask: 0.2077, decode.d2.loss_dice: 0.5632, decode.d3.loss_cls: 0.0808, decode.d3.loss_mask: 0.2065, decode.d3.loss_dice: 0.5616, decode.d4.loss_cls: 0.0779, decode.d4.loss_mask: 0.2069, decode.d4.loss_dice: 0.5600, decode.d5.loss_cls: 0.0750, decode.d5.loss_mask: 0.2068, decode.d5.loss_dice: 0.5603, decode.d6.loss_cls: 0.0793, decode.d6.loss_mask: 0.2068, decode.d6.loss_dice: 0.5605, decode.d7.loss_cls: 0.0720, decode.d7.loss_mask: 0.2068, decode.d7.loss_dice: 0.5604, decode.d8.loss_cls: 0.0694, decode.d8.loss_mask: 0.2062, decode.d8.loss_dice: 0.5602, loss: 8.7402 +2022-05-10 15:23:08,231 - mmseg - INFO - Iter [39550/80000] lr: 7.260e-07, eta: 1 day, 7:32:44, time: 1.834, data_time: 0.066, memory: 69053, decode.loss_cls: 0.0657, decode.loss_mask: 0.2060, decode.loss_dice: 0.5627, decode.d0.loss_cls: 0.3022, decode.d0.loss_mask: 0.2132, decode.d0.loss_dice: 0.5946, decode.d1.loss_cls: 0.0883, decode.d1.loss_mask: 0.2068, decode.d1.loss_dice: 0.5750, decode.d2.loss_cls: 0.0730, decode.d2.loss_mask: 0.2060, decode.d2.loss_dice: 0.5691, decode.d3.loss_cls: 0.0764, decode.d3.loss_mask: 0.2060, decode.d3.loss_dice: 0.5678, decode.d4.loss_cls: 0.0721, decode.d4.loss_mask: 0.2066, decode.d4.loss_dice: 0.5677, decode.d5.loss_cls: 0.0677, decode.d5.loss_mask: 0.2067, decode.d5.loss_dice: 0.5681, decode.d6.loss_cls: 0.0667, decode.d6.loss_mask: 0.2066, decode.d6.loss_dice: 0.5638, decode.d7.loss_cls: 0.0657, decode.d7.loss_mask: 0.2067, decode.d7.loss_dice: 0.5630, decode.d8.loss_cls: 0.0623, decode.d8.loss_mask: 0.2061, decode.d8.loss_dice: 0.5631, loss: 8.7056 +2022-05-10 15:24:36,130 - mmseg - INFO - Iter [39600/80000] lr: 7.251e-07, eta: 1 day, 7:20:35, time: 1.758, data_time: 0.016, memory: 69053, decode.loss_cls: 0.0699, decode.loss_mask: 0.2101, decode.loss_dice: 0.5576, decode.d0.loss_cls: 0.3111, decode.d0.loss_mask: 0.2183, decode.d0.loss_dice: 0.5799, decode.d1.loss_cls: 0.0818, decode.d1.loss_mask: 0.2126, decode.d1.loss_dice: 0.5649, decode.d2.loss_cls: 0.0747, decode.d2.loss_mask: 0.2117, decode.d2.loss_dice: 0.5602, decode.d3.loss_cls: 0.0774, decode.d3.loss_mask: 0.2115, decode.d3.loss_dice: 0.5532, decode.d4.loss_cls: 0.0755, decode.d4.loss_mask: 0.2109, decode.d4.loss_dice: 0.5555, decode.d5.loss_cls: 0.0808, decode.d5.loss_mask: 0.2103, decode.d5.loss_dice: 0.5550, decode.d6.loss_cls: 0.0768, decode.d6.loss_mask: 0.2105, decode.d6.loss_dice: 0.5539, decode.d7.loss_cls: 0.0749, decode.d7.loss_mask: 0.2103, decode.d7.loss_dice: 0.5535, decode.d8.loss_cls: 0.0762, decode.d8.loss_mask: 0.2100, decode.d8.loss_dice: 0.5568, loss: 8.7060 +2022-05-10 15:26:05,631 - mmseg - INFO - Iter [39650/80000] lr: 7.242e-07, eta: 1 day, 7:09:01, time: 1.790, data_time: 0.019, memory: 69053, decode.loss_cls: 0.0674, decode.loss_mask: 0.2046, decode.loss_dice: 0.5603, decode.d0.loss_cls: 0.2962, decode.d0.loss_mask: 0.2133, decode.d0.loss_dice: 0.5907, decode.d1.loss_cls: 0.0776, decode.d1.loss_mask: 0.2062, decode.d1.loss_dice: 0.5737, decode.d2.loss_cls: 0.0755, decode.d2.loss_mask: 0.2057, decode.d2.loss_dice: 0.5633, decode.d3.loss_cls: 0.0643, decode.d3.loss_mask: 0.2040, decode.d3.loss_dice: 0.5586, decode.d4.loss_cls: 0.0655, decode.d4.loss_mask: 0.2038, decode.d4.loss_dice: 0.5576, decode.d5.loss_cls: 0.0650, decode.d5.loss_mask: 0.2043, decode.d5.loss_dice: 0.5635, decode.d6.loss_cls: 0.0598, decode.d6.loss_mask: 0.2047, decode.d6.loss_dice: 0.5621, decode.d7.loss_cls: 0.0629, decode.d7.loss_mask: 0.2046, decode.d7.loss_dice: 0.5601, decode.d8.loss_cls: 0.0611, decode.d8.loss_mask: 0.2053, decode.d8.loss_dice: 0.5653, loss: 8.6066 +2022-05-10 15:27:34,722 - mmseg - INFO - Iter [39700/80000] lr: 7.233e-07, eta: 1 day, 6:57:39, time: 1.782, data_time: 0.019, memory: 69053, decode.loss_cls: 0.0704, decode.loss_mask: 0.1984, decode.loss_dice: 0.5554, decode.d0.loss_cls: 0.3037, decode.d0.loss_mask: 0.2069, decode.d0.loss_dice: 0.5839, decode.d1.loss_cls: 0.0926, decode.d1.loss_mask: 0.2000, decode.d1.loss_dice: 0.5704, decode.d2.loss_cls: 0.0862, decode.d2.loss_mask: 0.1992, decode.d2.loss_dice: 0.5662, decode.d3.loss_cls: 0.0800, decode.d3.loss_mask: 0.1982, decode.d3.loss_dice: 0.5583, decode.d4.loss_cls: 0.0822, decode.d4.loss_mask: 0.1985, decode.d4.loss_dice: 0.5581, decode.d5.loss_cls: 0.0775, decode.d5.loss_mask: 0.1985, decode.d5.loss_dice: 0.5646, decode.d6.loss_cls: 0.0755, decode.d6.loss_mask: 0.1984, decode.d6.loss_dice: 0.5601, decode.d7.loss_cls: 0.0769, decode.d7.loss_mask: 0.1983, decode.d7.loss_dice: 0.5572, decode.d8.loss_cls: 0.0749, decode.d8.loss_mask: 0.1990, decode.d8.loss_dice: 0.5597, loss: 8.6491 +2022-05-10 15:29:06,810 - mmseg - INFO - Iter [39750/80000] lr: 7.224e-07, eta: 1 day, 6:47:05, time: 1.842, data_time: 0.067, memory: 69053, decode.loss_cls: 0.0650, decode.loss_mask: 0.2091, decode.loss_dice: 0.5558, decode.d0.loss_cls: 0.3165, decode.d0.loss_mask: 0.2188, decode.d0.loss_dice: 0.5828, decode.d1.loss_cls: 0.0778, decode.d1.loss_mask: 0.2102, decode.d1.loss_dice: 0.5648, decode.d2.loss_cls: 0.0778, decode.d2.loss_mask: 0.2106, decode.d2.loss_dice: 0.5654, decode.d3.loss_cls: 0.0723, decode.d3.loss_mask: 0.2099, decode.d3.loss_dice: 0.5573, decode.d4.loss_cls: 0.0647, decode.d4.loss_mask: 0.2103, decode.d4.loss_dice: 0.5552, decode.d5.loss_cls: 0.0729, decode.d5.loss_mask: 0.2099, decode.d5.loss_dice: 0.5566, decode.d6.loss_cls: 0.0653, decode.d6.loss_mask: 0.2096, decode.d6.loss_dice: 0.5509, decode.d7.loss_cls: 0.0635, decode.d7.loss_mask: 0.2094, decode.d7.loss_dice: 0.5554, decode.d8.loss_cls: 0.0654, decode.d8.loss_mask: 0.2092, decode.d8.loss_dice: 0.5548, loss: 8.6471 +2022-05-10 15:30:35,151 - mmseg - INFO - Iter [39800/80000] lr: 7.215e-07, eta: 1 day, 6:36:06, time: 1.767, data_time: 0.018, memory: 69053, decode.loss_cls: 0.0688, decode.loss_mask: 0.2046, decode.loss_dice: 0.5674, decode.d0.loss_cls: 0.3092, decode.d0.loss_mask: 0.2122, decode.d0.loss_dice: 0.5964, decode.d1.loss_cls: 0.0926, decode.d1.loss_mask: 0.2064, decode.d1.loss_dice: 0.5758, decode.d2.loss_cls: 0.0781, decode.d2.loss_mask: 0.2048, decode.d2.loss_dice: 0.5723, decode.d3.loss_cls: 0.0675, decode.d3.loss_mask: 0.2048, decode.d3.loss_dice: 0.5736, decode.d4.loss_cls: 0.0692, decode.d4.loss_mask: 0.2042, decode.d4.loss_dice: 0.5700, decode.d5.loss_cls: 0.0690, decode.d5.loss_mask: 0.2042, decode.d5.loss_dice: 0.5733, decode.d6.loss_cls: 0.0653, decode.d6.loss_mask: 0.2048, decode.d6.loss_dice: 0.5701, decode.d7.loss_cls: 0.0639, decode.d7.loss_mask: 0.2045, decode.d7.loss_dice: 0.5680, decode.d8.loss_cls: 0.0669, decode.d8.loss_mask: 0.2045, decode.d8.loss_dice: 0.5705, loss: 8.7428 +2022-05-10 15:32:05,688 - mmseg - INFO - Iter [39850/80000] lr: 7.206e-07, eta: 1 day, 6:25:44, time: 1.811, data_time: 0.018, memory: 69053, decode.loss_cls: 0.0719, decode.loss_mask: 0.2102, decode.loss_dice: 0.5678, decode.d0.loss_cls: 0.3144, decode.d0.loss_mask: 0.2175, decode.d0.loss_dice: 0.5960, decode.d1.loss_cls: 0.0886, decode.d1.loss_mask: 0.2111, decode.d1.loss_dice: 0.5840, decode.d2.loss_cls: 0.0837, decode.d2.loss_mask: 0.2107, decode.d2.loss_dice: 0.5735, decode.d3.loss_cls: 0.0797, decode.d3.loss_mask: 0.2112, decode.d3.loss_dice: 0.5670, decode.d4.loss_cls: 0.0757, decode.d4.loss_mask: 0.2110, decode.d4.loss_dice: 0.5696, decode.d5.loss_cls: 0.0761, decode.d5.loss_mask: 0.2106, decode.d5.loss_dice: 0.5704, decode.d6.loss_cls: 0.0796, decode.d6.loss_mask: 0.2106, decode.d6.loss_dice: 0.5711, decode.d7.loss_cls: 0.0736, decode.d7.loss_mask: 0.2101, decode.d7.loss_dice: 0.5694, decode.d8.loss_cls: 0.0754, decode.d8.loss_mask: 0.2096, decode.d8.loss_dice: 0.5696, loss: 8.8695 +2022-05-10 15:33:35,050 - mmseg - INFO - Iter [39900/80000] lr: 7.197e-07, eta: 1 day, 6:15:24, time: 1.787, data_time: 0.017, memory: 69053, decode.loss_cls: 0.0641, decode.loss_mask: 0.1994, decode.loss_dice: 0.5511, decode.d0.loss_cls: 0.3081, decode.d0.loss_mask: 0.2075, decode.d0.loss_dice: 0.5769, decode.d1.loss_cls: 0.0869, decode.d1.loss_mask: 0.2011, decode.d1.loss_dice: 0.5576, decode.d2.loss_cls: 0.0728, decode.d2.loss_mask: 0.2000, decode.d2.loss_dice: 0.5570, decode.d3.loss_cls: 0.0645, decode.d3.loss_mask: 0.2001, decode.d3.loss_dice: 0.5539, decode.d4.loss_cls: 0.0651, decode.d4.loss_mask: 0.1999, decode.d4.loss_dice: 0.5509, decode.d5.loss_cls: 0.0633, decode.d5.loss_mask: 0.1996, decode.d5.loss_dice: 0.5522, decode.d6.loss_cls: 0.0643, decode.d6.loss_mask: 0.1996, decode.d6.loss_dice: 0.5504, decode.d7.loss_cls: 0.0650, decode.d7.loss_mask: 0.1998, decode.d7.loss_dice: 0.5514, decode.d8.loss_cls: 0.0690, decode.d8.loss_mask: 0.1999, decode.d8.loss_dice: 0.5484, loss: 8.4799 +2022-05-10 15:35:06,927 - mmseg - INFO - Iter [39950/80000] lr: 7.188e-07, eta: 1 day, 6:05:43, time: 1.837, data_time: 0.065, memory: 69053, decode.loss_cls: 0.0727, decode.loss_mask: 0.2062, decode.loss_dice: 0.5711, decode.d0.loss_cls: 0.3040, decode.d0.loss_mask: 0.2152, decode.d0.loss_dice: 0.5957, decode.d1.loss_cls: 0.1010, decode.d1.loss_mask: 0.2079, decode.d1.loss_dice: 0.5847, decode.d2.loss_cls: 0.0877, decode.d2.loss_mask: 0.2066, decode.d2.loss_dice: 0.5796, decode.d3.loss_cls: 0.0765, decode.d3.loss_mask: 0.2064, decode.d3.loss_dice: 0.5711, decode.d4.loss_cls: 0.0774, decode.d4.loss_mask: 0.2061, decode.d4.loss_dice: 0.5694, decode.d5.loss_cls: 0.0820, decode.d5.loss_mask: 0.2063, decode.d5.loss_dice: 0.5726, decode.d6.loss_cls: 0.0737, decode.d6.loss_mask: 0.2057, decode.d6.loss_dice: 0.5737, decode.d7.loss_cls: 0.0752, decode.d7.loss_mask: 0.2062, decode.d7.loss_dice: 0.5724, decode.d8.loss_cls: 0.0782, decode.d8.loss_mask: 0.2064, decode.d8.loss_dice: 0.5709, loss: 8.8626 +2022-05-10 15:36:35,553 - mmseg - INFO - Saving checkpoint at 40000 iterations +2022-05-10 15:37:07,084 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 15:37:07,097 - mmseg - INFO - Iter [40000/80000] lr: 7.179e-07, eta: 1 day, 6:00:55, time: 2.400, data_time: 0.018, memory: 69053, decode.loss_cls: 0.0729, decode.loss_mask: 0.2132, decode.loss_dice: 0.5579, decode.d0.loss_cls: 0.3133, decode.d0.loss_mask: 0.2249, decode.d0.loss_dice: 0.5956, decode.d1.loss_cls: 0.0905, decode.d1.loss_mask: 0.2158, decode.d1.loss_dice: 0.5765, decode.d2.loss_cls: 0.0859, decode.d2.loss_mask: 0.2138, decode.d2.loss_dice: 0.5660, decode.d3.loss_cls: 0.0738, decode.d3.loss_mask: 0.2145, decode.d3.loss_dice: 0.5648, decode.d4.loss_cls: 0.0768, decode.d4.loss_mask: 0.2146, decode.d4.loss_dice: 0.5627, decode.d5.loss_cls: 0.0774, decode.d5.loss_mask: 0.2135, decode.d5.loss_dice: 0.5599, decode.d6.loss_cls: 0.0739, decode.d6.loss_mask: 0.2141, decode.d6.loss_dice: 0.5595, decode.d7.loss_cls: 0.0744, decode.d7.loss_mask: 0.2143, decode.d7.loss_dice: 0.5631, decode.d8.loss_cls: 0.0705, decode.d8.loss_mask: 0.2140, decode.d8.loss_dice: 0.5601, loss: 8.8283 +2022-05-10 15:39:03,917 - mmseg - INFO - per class results: +2022-05-10 15:39:03,923 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.55 | 99.1 | +| sidewalk | 88.43 | 94.43 | +| building | 94.23 | 96.84 | +| wall | 68.67 | 81.65 | +| fence | 74.35 | 82.86 | +| pole | 71.32 | 82.95 | +| traffic light | 77.18 | 87.39 | +| traffic sign | 83.83 | 90.47 | +| vegetation | 93.37 | 97.06 | +| terrain | 68.77 | 76.47 | +| sky | 95.75 | 98.55 | +| person | 86.85 | 93.34 | +| rider | 74.86 | 85.87 | +| car | 96.22 | 98.32 | +| truck | 81.65 | 94.93 | +| bus | 93.22 | 96.79 | +| train | 87.97 | 90.55 | +| motorcycle | 76.96 | 87.0 | +| bicycle | 82.72 | 91.68 | ++---------------+-------+-------+ +2022-05-10 15:39:03,923 - mmseg - INFO - Summary: +2022-05-10 15:39:03,923 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 96.96 | 83.94 | 90.85 | ++-------+-------+-------+ +2022-05-10 15:39:03,927 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 15:39:03,927 - mmseg - INFO - Iter(val) [32] aAcc: 0.9696, mIoU: 0.8394, mAcc: 0.9085, IoU.road: 0.9855, IoU.sidewalk: 0.8843, IoU.building: 0.9423, IoU.wall: 0.6867, IoU.fence: 0.7435, IoU.pole: 0.7132, IoU.traffic light: 0.7718, IoU.traffic sign: 0.8383, IoU.vegetation: 0.9337, IoU.terrain: 0.6877, IoU.sky: 0.9575, IoU.person: 0.8685, IoU.rider: 0.7486, IoU.car: 0.9622, IoU.truck: 0.8165, IoU.bus: 0.9322, IoU.train: 0.8797, IoU.motorcycle: 0.7696, IoU.bicycle: 0.8272, Acc.road: 0.9910, Acc.sidewalk: 0.9443, Acc.building: 0.9684, Acc.wall: 0.8165, Acc.fence: 0.8286, Acc.pole: 0.8295, Acc.traffic light: 0.8739, Acc.traffic sign: 0.9047, Acc.vegetation: 0.9706, Acc.terrain: 0.7647, Acc.sky: 0.9855, Acc.person: 0.9334, Acc.rider: 0.8587, Acc.car: 0.9832, Acc.truck: 0.9493, Acc.bus: 0.9679, Acc.train: 0.9055, Acc.motorcycle: 0.8700, Acc.bicycle: 0.9168 +2022-05-10 15:40:33,869 - mmseg - INFO - Iter [40050/80000] lr: 7.170e-07, eta: 1 day, 6:10:29, time: 4.139, data_time: 2.359, memory: 69053, decode.loss_cls: 0.0733, decode.loss_mask: 0.2043, decode.loss_dice: 0.5595, decode.d0.loss_cls: 0.3144, decode.d0.loss_mask: 0.2127, decode.d0.loss_dice: 0.5895, decode.d1.loss_cls: 0.0958, decode.d1.loss_mask: 0.2064, decode.d1.loss_dice: 0.5701, decode.d2.loss_cls: 0.0874, decode.d2.loss_mask: 0.2050, decode.d2.loss_dice: 0.5619, decode.d3.loss_cls: 0.0748, decode.d3.loss_mask: 0.2042, decode.d3.loss_dice: 0.5610, decode.d4.loss_cls: 0.0776, decode.d4.loss_mask: 0.2042, decode.d4.loss_dice: 0.5578, decode.d5.loss_cls: 0.0746, decode.d5.loss_mask: 0.2045, decode.d5.loss_dice: 0.5593, decode.d6.loss_cls: 0.0743, decode.d6.loss_mask: 0.2046, decode.d6.loss_dice: 0.5592, decode.d7.loss_cls: 0.0710, decode.d7.loss_mask: 0.2048, decode.d7.loss_dice: 0.5587, decode.d8.loss_cls: 0.0756, decode.d8.loss_mask: 0.2044, decode.d8.loss_dice: 0.5601, loss: 8.7109 +2022-05-10 15:42:07,037 - mmseg - INFO - Iter [40100/80000] lr: 7.161e-07, eta: 1 day, 6:01:16, time: 1.863, data_time: 0.067, memory: 69053, decode.loss_cls: 0.0648, decode.loss_mask: 0.2080, decode.loss_dice: 0.5558, decode.d0.loss_cls: 0.3140, decode.d0.loss_mask: 0.2176, decode.d0.loss_dice: 0.5869, decode.d1.loss_cls: 0.0899, decode.d1.loss_mask: 0.2099, decode.d1.loss_dice: 0.5686, decode.d2.loss_cls: 0.0741, decode.d2.loss_mask: 0.2087, decode.d2.loss_dice: 0.5629, decode.d3.loss_cls: 0.0663, decode.d3.loss_mask: 0.2084, decode.d3.loss_dice: 0.5594, decode.d4.loss_cls: 0.0758, decode.d4.loss_mask: 0.2089, decode.d4.loss_dice: 0.5609, decode.d5.loss_cls: 0.0659, decode.d5.loss_mask: 0.2089, decode.d5.loss_dice: 0.5599, decode.d6.loss_cls: 0.0696, decode.d6.loss_mask: 0.2085, decode.d6.loss_dice: 0.5577, decode.d7.loss_cls: 0.0649, decode.d7.loss_mask: 0.2082, decode.d7.loss_dice: 0.5564, decode.d8.loss_cls: 0.0643, decode.d8.loss_mask: 0.2077, decode.d8.loss_dice: 0.5539, loss: 8.6668 +2022-05-10 15:43:35,906 - mmseg - INFO - Iter [40150/80000] lr: 7.152e-07, eta: 1 day, 5:51:34, time: 1.777, data_time: 0.020, memory: 69053, decode.loss_cls: 0.0679, decode.loss_mask: 0.2062, decode.loss_dice: 0.5465, decode.d0.loss_cls: 0.3070, decode.d0.loss_mask: 0.2162, decode.d0.loss_dice: 0.5754, decode.d1.loss_cls: 0.0869, decode.d1.loss_mask: 0.2081, decode.d1.loss_dice: 0.5565, decode.d2.loss_cls: 0.0763, decode.d2.loss_mask: 0.2082, decode.d2.loss_dice: 0.5540, decode.d3.loss_cls: 0.0685, decode.d3.loss_mask: 0.2077, decode.d3.loss_dice: 0.5524, decode.d4.loss_cls: 0.0682, decode.d4.loss_mask: 0.2079, decode.d4.loss_dice: 0.5503, decode.d5.loss_cls: 0.0662, decode.d5.loss_mask: 0.2079, decode.d5.loss_dice: 0.5530, decode.d6.loss_cls: 0.0685, decode.d6.loss_mask: 0.2076, decode.d6.loss_dice: 0.5519, decode.d7.loss_cls: 0.0657, decode.d7.loss_mask: 0.2079, decode.d7.loss_dice: 0.5510, decode.d8.loss_cls: 0.0643, decode.d8.loss_mask: 0.2077, decode.d8.loss_dice: 0.5517, loss: 8.5678 +2022-05-10 15:45:07,108 - mmseg - INFO - Iter [40200/80000] lr: 7.143e-07, eta: 1 day, 5:42:25, time: 1.824, data_time: 0.018, memory: 69053, decode.loss_cls: 0.0625, decode.loss_mask: 0.2076, decode.loss_dice: 0.5580, decode.d0.loss_cls: 0.3083, decode.d0.loss_mask: 0.2161, decode.d0.loss_dice: 0.5783, decode.d1.loss_cls: 0.0824, decode.d1.loss_mask: 0.2098, decode.d1.loss_dice: 0.5668, decode.d2.loss_cls: 0.0705, decode.d2.loss_mask: 0.2092, decode.d2.loss_dice: 0.5641, decode.d3.loss_cls: 0.0652, decode.d3.loss_mask: 0.2076, decode.d3.loss_dice: 0.5586, decode.d4.loss_cls: 0.0688, decode.d4.loss_mask: 0.2074, decode.d4.loss_dice: 0.5558, decode.d5.loss_cls: 0.0671, decode.d5.loss_mask: 0.2084, decode.d5.loss_dice: 0.5595, decode.d6.loss_cls: 0.0661, decode.d6.loss_mask: 0.2072, decode.d6.loss_dice: 0.5529, decode.d7.loss_cls: 0.0674, decode.d7.loss_mask: 0.2072, decode.d7.loss_dice: 0.5584, decode.d8.loss_cls: 0.0626, decode.d8.loss_mask: 0.2079, decode.d8.loss_dice: 0.5574, loss: 8.6188 +2022-05-10 15:46:36,512 - mmseg - INFO - Iter [40250/80000] lr: 7.134e-07, eta: 1 day, 5:33:11, time: 1.788, data_time: 0.018, memory: 69053, decode.loss_cls: 0.0764, decode.loss_mask: 0.1984, decode.loss_dice: 0.5653, decode.d0.loss_cls: 0.3110, decode.d0.loss_mask: 0.2063, decode.d0.loss_dice: 0.5972, decode.d1.loss_cls: 0.0966, decode.d1.loss_mask: 0.1996, decode.d1.loss_dice: 0.5784, decode.d2.loss_cls: 0.0888, decode.d2.loss_mask: 0.1981, decode.d2.loss_dice: 0.5738, decode.d3.loss_cls: 0.0848, decode.d3.loss_mask: 0.1978, decode.d3.loss_dice: 0.5666, decode.d4.loss_cls: 0.0764, decode.d4.loss_mask: 0.1980, decode.d4.loss_dice: 0.5653, decode.d5.loss_cls: 0.0825, decode.d5.loss_mask: 0.1984, decode.d5.loss_dice: 0.5692, decode.d6.loss_cls: 0.0787, decode.d6.loss_mask: 0.1982, decode.d6.loss_dice: 0.5642, decode.d7.loss_cls: 0.0789, decode.d7.loss_mask: 0.1981, decode.d7.loss_dice: 0.5682, decode.d8.loss_cls: 0.0807, decode.d8.loss_mask: 0.1981, decode.d8.loss_dice: 0.5649, loss: 8.7591 +2022-05-10 15:48:09,791 - mmseg - INFO - Iter [40300/80000] lr: 7.125e-07, eta: 1 day, 5:24:42, time: 1.865, data_time: 0.063, memory: 69053, decode.loss_cls: 0.0765, decode.loss_mask: 0.2068, decode.loss_dice: 0.5738, decode.d0.loss_cls: 0.3192, decode.d0.loss_mask: 0.2154, decode.d0.loss_dice: 0.6047, decode.d1.loss_cls: 0.0947, decode.d1.loss_mask: 0.2090, decode.d1.loss_dice: 0.5800, decode.d2.loss_cls: 0.0908, decode.d2.loss_mask: 0.2081, decode.d2.loss_dice: 0.5714, decode.d3.loss_cls: 0.0865, decode.d3.loss_mask: 0.2078, decode.d3.loss_dice: 0.5714, decode.d4.loss_cls: 0.0822, decode.d4.loss_mask: 0.2080, decode.d4.loss_dice: 0.5711, decode.d5.loss_cls: 0.0804, decode.d5.loss_mask: 0.2081, decode.d5.loss_dice: 0.5729, decode.d6.loss_cls: 0.0864, decode.d6.loss_mask: 0.2072, decode.d6.loss_dice: 0.5712, decode.d7.loss_cls: 0.0820, decode.d7.loss_mask: 0.2068, decode.d7.loss_dice: 0.5737, decode.d8.loss_cls: 0.0781, decode.d8.loss_mask: 0.2072, decode.d8.loss_dice: 0.5741, loss: 8.9254 +2022-05-10 15:49:40,370 - mmseg - INFO - Iter [40350/80000] lr: 7.116e-07, eta: 1 day, 5:15:59, time: 1.811, data_time: 0.017, memory: 69053, decode.loss_cls: 0.0678, decode.loss_mask: 0.2043, decode.loss_dice: 0.5639, decode.d0.loss_cls: 0.3149, decode.d0.loss_mask: 0.2150, decode.d0.loss_dice: 0.5925, decode.d1.loss_cls: 0.0951, decode.d1.loss_mask: 0.2070, decode.d1.loss_dice: 0.5687, decode.d2.loss_cls: 0.0818, decode.d2.loss_mask: 0.2071, decode.d2.loss_dice: 0.5663, decode.d3.loss_cls: 0.0751, decode.d3.loss_mask: 0.2058, decode.d3.loss_dice: 0.5638, decode.d4.loss_cls: 0.0713, decode.d4.loss_mask: 0.2051, decode.d4.loss_dice: 0.5625, decode.d5.loss_cls: 0.0715, decode.d5.loss_mask: 0.2060, decode.d5.loss_dice: 0.5657, decode.d6.loss_cls: 0.0707, decode.d6.loss_mask: 0.2052, decode.d6.loss_dice: 0.5660, decode.d7.loss_cls: 0.0752, decode.d7.loss_mask: 0.2052, decode.d7.loss_dice: 0.5627, decode.d8.loss_cls: 0.0708, decode.d8.loss_mask: 0.2055, decode.d8.loss_dice: 0.5611, loss: 8.7336 +2022-05-10 15:51:09,799 - mmseg - INFO - Iter [40400/80000] lr: 7.107e-07, eta: 1 day, 5:07:16, time: 1.789, data_time: 0.018, memory: 69053, decode.loss_cls: 0.0777, decode.loss_mask: 0.2097, decode.loss_dice: 0.5751, decode.d0.loss_cls: 0.3167, decode.d0.loss_mask: 0.2189, decode.d0.loss_dice: 0.6066, decode.d1.loss_cls: 0.0978, decode.d1.loss_mask: 0.2119, decode.d1.loss_dice: 0.5871, decode.d2.loss_cls: 0.0975, decode.d2.loss_mask: 0.2111, decode.d2.loss_dice: 0.5809, decode.d3.loss_cls: 0.0861, decode.d3.loss_mask: 0.2108, decode.d3.loss_dice: 0.5755, decode.d4.loss_cls: 0.0851, decode.d4.loss_mask: 0.2096, decode.d4.loss_dice: 0.5759, decode.d5.loss_cls: 0.0874, decode.d5.loss_mask: 0.2099, decode.d5.loss_dice: 0.5768, decode.d6.loss_cls: 0.0797, decode.d6.loss_mask: 0.2099, decode.d6.loss_dice: 0.5772, decode.d7.loss_cls: 0.0830, decode.d7.loss_mask: 0.2105, decode.d7.loss_dice: 0.5769, decode.d8.loss_cls: 0.0808, decode.d8.loss_mask: 0.2101, decode.d8.loss_dice: 0.5745, loss: 9.0109 +2022-05-10 15:52:38,972 - mmseg - INFO - Iter [40450/80000] lr: 7.098e-07, eta: 1 day, 4:58:39, time: 1.784, data_time: 0.018, memory: 69053, decode.loss_cls: 0.0832, decode.loss_mask: 0.2061, decode.loss_dice: 0.5680, decode.d0.loss_cls: 0.3204, decode.d0.loss_mask: 0.2164, decode.d0.loss_dice: 0.5989, decode.d1.loss_cls: 0.1023, decode.d1.loss_mask: 0.2092, decode.d1.loss_dice: 0.5849, decode.d2.loss_cls: 0.0979, decode.d2.loss_mask: 0.2085, decode.d2.loss_dice: 0.5738, decode.d3.loss_cls: 0.0948, decode.d3.loss_mask: 0.2070, decode.d3.loss_dice: 0.5716, decode.d4.loss_cls: 0.0897, decode.d4.loss_mask: 0.2072, decode.d4.loss_dice: 0.5741, decode.d5.loss_cls: 0.0869, decode.d5.loss_mask: 0.2069, decode.d5.loss_dice: 0.5727, decode.d6.loss_cls: 0.0887, decode.d6.loss_mask: 0.2066, decode.d6.loss_dice: 0.5699, decode.d7.loss_cls: 0.0833, decode.d7.loss_mask: 0.2060, decode.d7.loss_dice: 0.5695, decode.d8.loss_cls: 0.0863, decode.d8.loss_mask: 0.2064, decode.d8.loss_dice: 0.5700, loss: 8.9672 +2022-05-10 15:54:11,147 - mmseg - INFO - Iter [40500/80000] lr: 7.089e-07, eta: 1 day, 4:50:39, time: 1.843, data_time: 0.066, memory: 69053, decode.loss_cls: 0.0772, decode.loss_mask: 0.2042, decode.loss_dice: 0.5472, decode.d0.loss_cls: 0.3163, decode.d0.loss_mask: 0.2125, decode.d0.loss_dice: 0.5812, decode.d1.loss_cls: 0.0982, decode.d1.loss_mask: 0.2055, decode.d1.loss_dice: 0.5572, decode.d2.loss_cls: 0.0909, decode.d2.loss_mask: 0.2049, decode.d2.loss_dice: 0.5551, decode.d3.loss_cls: 0.0816, decode.d3.loss_mask: 0.2045, decode.d3.loss_dice: 0.5502, decode.d4.loss_cls: 0.0786, decode.d4.loss_mask: 0.2047, decode.d4.loss_dice: 0.5526, decode.d5.loss_cls: 0.0805, decode.d5.loss_mask: 0.2048, decode.d5.loss_dice: 0.5513, decode.d6.loss_cls: 0.0798, decode.d6.loss_mask: 0.2041, decode.d6.loss_dice: 0.5523, decode.d7.loss_cls: 0.0779, decode.d7.loss_mask: 0.2043, decode.d7.loss_dice: 0.5504, decode.d8.loss_cls: 0.0798, decode.d8.loss_mask: 0.2050, decode.d8.loss_dice: 0.5494, loss: 8.6621 +2022-05-10 15:55:40,657 - mmseg - INFO - Iter [40550/80000] lr: 7.080e-07, eta: 1 day, 4:42:24, time: 1.790, data_time: 0.018, memory: 69053, decode.loss_cls: 0.0582, decode.loss_mask: 0.2045, decode.loss_dice: 0.5626, decode.d0.loss_cls: 0.3049, decode.d0.loss_mask: 0.2132, decode.d0.loss_dice: 0.5914, decode.d1.loss_cls: 0.0844, decode.d1.loss_mask: 0.2056, decode.d1.loss_dice: 0.5704, decode.d2.loss_cls: 0.0718, decode.d2.loss_mask: 0.2050, decode.d2.loss_dice: 0.5679, decode.d3.loss_cls: 0.0591, decode.d3.loss_mask: 0.2046, decode.d3.loss_dice: 0.5599, decode.d4.loss_cls: 0.0610, decode.d4.loss_mask: 0.2045, decode.d4.loss_dice: 0.5647, decode.d5.loss_cls: 0.0599, decode.d5.loss_mask: 0.2053, decode.d5.loss_dice: 0.5646, decode.d6.loss_cls: 0.0576, decode.d6.loss_mask: 0.2049, decode.d6.loss_dice: 0.5627, decode.d7.loss_cls: 0.0606, decode.d7.loss_mask: 0.2049, decode.d7.loss_dice: 0.5649, decode.d8.loss_cls: 0.0585, decode.d8.loss_mask: 0.2046, decode.d8.loss_dice: 0.5615, loss: 8.6038 +2022-05-10 15:57:08,913 - mmseg - INFO - Iter [40600/80000] lr: 7.071e-07, eta: 1 day, 4:34:07, time: 1.765, data_time: 0.017, memory: 69053, decode.loss_cls: 0.0620, decode.loss_mask: 0.2007, decode.loss_dice: 0.5713, decode.d0.loss_cls: 0.3058, decode.d0.loss_mask: 0.2088, decode.d0.loss_dice: 0.5913, decode.d1.loss_cls: 0.0892, decode.d1.loss_mask: 0.2030, decode.d1.loss_dice: 0.5785, decode.d2.loss_cls: 0.0793, decode.d2.loss_mask: 0.2011, decode.d2.loss_dice: 0.5759, decode.d3.loss_cls: 0.0744, decode.d3.loss_mask: 0.2003, decode.d3.loss_dice: 0.5679, decode.d4.loss_cls: 0.0676, decode.d4.loss_mask: 0.2006, decode.d4.loss_dice: 0.5680, decode.d5.loss_cls: 0.0721, decode.d5.loss_mask: 0.1998, decode.d5.loss_dice: 0.5687, decode.d6.loss_cls: 0.0640, decode.d6.loss_mask: 0.2002, decode.d6.loss_dice: 0.5683, decode.d7.loss_cls: 0.0746, decode.d7.loss_mask: 0.2003, decode.d7.loss_dice: 0.5666, decode.d8.loss_cls: 0.0669, decode.d8.loss_mask: 0.2004, decode.d8.loss_dice: 0.5647, loss: 8.6921 +2022-05-10 15:58:40,697 - mmseg - INFO - Iter [40650/80000] lr: 7.063e-07, eta: 1 day, 4:26:29, time: 1.836, data_time: 0.066, memory: 69053, decode.loss_cls: 0.0741, decode.loss_mask: 0.2080, decode.loss_dice: 0.5591, decode.d0.loss_cls: 0.2984, decode.d0.loss_mask: 0.2159, decode.d0.loss_dice: 0.5829, decode.d1.loss_cls: 0.0900, decode.d1.loss_mask: 0.2112, decode.d1.loss_dice: 0.5746, decode.d2.loss_cls: 0.0823, decode.d2.loss_mask: 0.2086, decode.d2.loss_dice: 0.5634, decode.d3.loss_cls: 0.0780, decode.d3.loss_mask: 0.2082, decode.d3.loss_dice: 0.5611, decode.d4.loss_cls: 0.0793, decode.d4.loss_mask: 0.2078, decode.d4.loss_dice: 0.5623, decode.d5.loss_cls: 0.0820, decode.d5.loss_mask: 0.2075, decode.d5.loss_dice: 0.5579, decode.d6.loss_cls: 0.0803, decode.d6.loss_mask: 0.2073, decode.d6.loss_dice: 0.5535, decode.d7.loss_cls: 0.0773, decode.d7.loss_mask: 0.2078, decode.d7.loss_dice: 0.5554, decode.d8.loss_cls: 0.0785, decode.d8.loss_mask: 0.2074, decode.d8.loss_dice: 0.5559, loss: 8.7363 +2022-05-10 16:00:10,598 - mmseg - INFO - Iter [40700/80000] lr: 7.054e-07, eta: 1 day, 4:18:43, time: 1.798, data_time: 0.016, memory: 69053, decode.loss_cls: 0.0748, decode.loss_mask: 0.2048, decode.loss_dice: 0.5623, decode.d0.loss_cls: 0.3143, decode.d0.loss_mask: 0.2137, decode.d0.loss_dice: 0.5898, decode.d1.loss_cls: 0.0954, decode.d1.loss_mask: 0.2070, decode.d1.loss_dice: 0.5704, decode.d2.loss_cls: 0.0799, decode.d2.loss_mask: 0.2059, decode.d2.loss_dice: 0.5665, decode.d3.loss_cls: 0.0795, decode.d3.loss_mask: 0.2059, decode.d3.loss_dice: 0.5625, decode.d4.loss_cls: 0.0763, decode.d4.loss_mask: 0.2057, decode.d4.loss_dice: 0.5597, decode.d5.loss_cls: 0.0746, decode.d5.loss_mask: 0.2055, decode.d5.loss_dice: 0.5622, decode.d6.loss_cls: 0.0759, decode.d6.loss_mask: 0.2052, decode.d6.loss_dice: 0.5586, decode.d7.loss_cls: 0.0738, decode.d7.loss_mask: 0.2055, decode.d7.loss_dice: 0.5586, decode.d8.loss_cls: 0.0748, decode.d8.loss_mask: 0.2056, decode.d8.loss_dice: 0.5652, loss: 8.7398 +2022-05-10 16:01:40,295 - mmseg - INFO - Iter [40750/80000] lr: 7.045e-07, eta: 1 day, 4:11:03, time: 1.794, data_time: 0.017, memory: 69053, decode.loss_cls: 0.0693, decode.loss_mask: 0.2065, decode.loss_dice: 0.5572, decode.d0.loss_cls: 0.3054, decode.d0.loss_mask: 0.2176, decode.d0.loss_dice: 0.5826, decode.d1.loss_cls: 0.0871, decode.d1.loss_mask: 0.2095, decode.d1.loss_dice: 0.5633, decode.d2.loss_cls: 0.0765, decode.d2.loss_mask: 0.2071, decode.d2.loss_dice: 0.5609, decode.d3.loss_cls: 0.0740, decode.d3.loss_mask: 0.2071, decode.d3.loss_dice: 0.5588, decode.d4.loss_cls: 0.0701, decode.d4.loss_mask: 0.2068, decode.d4.loss_dice: 0.5525, decode.d5.loss_cls: 0.0664, decode.d5.loss_mask: 0.2067, decode.d5.loss_dice: 0.5588, decode.d6.loss_cls: 0.0580, decode.d6.loss_mask: 0.2068, decode.d6.loss_dice: 0.5597, decode.d7.loss_cls: 0.0623, decode.d7.loss_mask: 0.2066, decode.d7.loss_dice: 0.5584, decode.d8.loss_cls: 0.0603, decode.d8.loss_mask: 0.2064, decode.d8.loss_dice: 0.5564, loss: 8.6192 +2022-05-10 16:03:11,037 - mmseg - INFO - Iter [40800/80000] lr: 7.036e-07, eta: 1 day, 4:03:39, time: 1.815, data_time: 0.020, memory: 69053, decode.loss_cls: 0.0761, decode.loss_mask: 0.2075, decode.loss_dice: 0.5546, decode.d0.loss_cls: 0.2983, decode.d0.loss_mask: 0.2183, decode.d0.loss_dice: 0.5884, decode.d1.loss_cls: 0.0994, decode.d1.loss_mask: 0.2092, decode.d1.loss_dice: 0.5682, decode.d2.loss_cls: 0.0915, decode.d2.loss_mask: 0.2087, decode.d2.loss_dice: 0.5596, decode.d3.loss_cls: 0.0815, decode.d3.loss_mask: 0.2078, decode.d3.loss_dice: 0.5560, decode.d4.loss_cls: 0.0829, decode.d4.loss_mask: 0.2084, decode.d4.loss_dice: 0.5573, decode.d5.loss_cls: 0.0820, decode.d5.loss_mask: 0.2076, decode.d5.loss_dice: 0.5570, decode.d6.loss_cls: 0.0797, decode.d6.loss_mask: 0.2080, decode.d6.loss_dice: 0.5545, decode.d7.loss_cls: 0.0806, decode.d7.loss_mask: 0.2081, decode.d7.loss_dice: 0.5521, decode.d8.loss_cls: 0.0785, decode.d8.loss_mask: 0.2074, decode.d8.loss_dice: 0.5549, loss: 8.7444 +2022-05-10 16:04:45,230 - mmseg - INFO - Iter [40850/80000] lr: 7.027e-07, eta: 1 day, 3:56:50, time: 1.883, data_time: 0.067, memory: 69053, decode.loss_cls: 0.0648, decode.loss_mask: 0.2072, decode.loss_dice: 0.5533, decode.d0.loss_cls: 0.3067, decode.d0.loss_mask: 0.2142, decode.d0.loss_dice: 0.5812, decode.d1.loss_cls: 0.0866, decode.d1.loss_mask: 0.2082, decode.d1.loss_dice: 0.5610, decode.d2.loss_cls: 0.0778, decode.d2.loss_mask: 0.2080, decode.d2.loss_dice: 0.5573, decode.d3.loss_cls: 0.0728, decode.d3.loss_mask: 0.2069, decode.d3.loss_dice: 0.5517, decode.d4.loss_cls: 0.0772, decode.d4.loss_mask: 0.2068, decode.d4.loss_dice: 0.5543, decode.d5.loss_cls: 0.0742, decode.d5.loss_mask: 0.2071, decode.d5.loss_dice: 0.5563, decode.d6.loss_cls: 0.0700, decode.d6.loss_mask: 0.2064, decode.d6.loss_dice: 0.5538, decode.d7.loss_cls: 0.0686, decode.d7.loss_mask: 0.2058, decode.d7.loss_dice: 0.5518, decode.d8.loss_cls: 0.0692, decode.d8.loss_mask: 0.2067, decode.d8.loss_dice: 0.5528, loss: 8.6186 +2022-05-10 16:06:14,770 - mmseg - INFO - Iter [40900/80000] lr: 7.018e-07, eta: 1 day, 3:49:31, time: 1.791, data_time: 0.017, memory: 69053, decode.loss_cls: 0.0620, decode.loss_mask: 0.1977, decode.loss_dice: 0.5461, decode.d0.loss_cls: 0.3059, decode.d0.loss_mask: 0.2050, decode.d0.loss_dice: 0.5757, decode.d1.loss_cls: 0.0738, decode.d1.loss_mask: 0.1992, decode.d1.loss_dice: 0.5536, decode.d2.loss_cls: 0.0607, decode.d2.loss_mask: 0.1986, decode.d2.loss_dice: 0.5507, decode.d3.loss_cls: 0.0633, decode.d3.loss_mask: 0.1979, decode.d3.loss_dice: 0.5501, decode.d4.loss_cls: 0.0649, decode.d4.loss_mask: 0.1973, decode.d4.loss_dice: 0.5494, decode.d5.loss_cls: 0.0616, decode.d5.loss_mask: 0.1980, decode.d5.loss_dice: 0.5482, decode.d6.loss_cls: 0.0545, decode.d6.loss_mask: 0.1985, decode.d6.loss_dice: 0.5478, decode.d7.loss_cls: 0.0603, decode.d7.loss_mask: 0.1981, decode.d7.loss_dice: 0.5470, decode.d8.loss_cls: 0.0610, decode.d8.loss_mask: 0.1981, decode.d8.loss_dice: 0.5496, loss: 8.3746 +2022-05-10 16:07:44,912 - mmseg - INFO - Iter [40950/80000] lr: 7.009e-07, eta: 1 day, 3:42:24, time: 1.803, data_time: 0.018, memory: 69053, decode.loss_cls: 0.0774, decode.loss_mask: 0.2046, decode.loss_dice: 0.5762, decode.d0.loss_cls: 0.3059, decode.d0.loss_mask: 0.2127, decode.d0.loss_dice: 0.6091, decode.d1.loss_cls: 0.1009, decode.d1.loss_mask: 0.2056, decode.d1.loss_dice: 0.5872, decode.d2.loss_cls: 0.0939, decode.d2.loss_mask: 0.2046, decode.d2.loss_dice: 0.5838, decode.d3.loss_cls: 0.0786, decode.d3.loss_mask: 0.2048, decode.d3.loss_dice: 0.5842, decode.d4.loss_cls: 0.0837, decode.d4.loss_mask: 0.2050, decode.d4.loss_dice: 0.5785, decode.d5.loss_cls: 0.0859, decode.d5.loss_mask: 0.2053, decode.d5.loss_dice: 0.5816, decode.d6.loss_cls: 0.0822, decode.d6.loss_mask: 0.2047, decode.d6.loss_dice: 0.5757, decode.d7.loss_cls: 0.0784, decode.d7.loss_mask: 0.2050, decode.d7.loss_dice: 0.5825, decode.d8.loss_cls: 0.0781, decode.d8.loss_mask: 0.2045, decode.d8.loss_dice: 0.5783, loss: 8.9588 +2022-05-10 16:09:15,226 - mmseg - INFO - Saving checkpoint at 41000 iterations +2022-05-10 16:09:47,556 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 16:09:47,568 - mmseg - INFO - Iter [41000/80000] lr: 7.000e-07, eta: 1 day, 3:39:35, time: 2.450, data_time: 0.018, memory: 69053, decode.loss_cls: 0.0678, decode.loss_mask: 0.2081, decode.loss_dice: 0.5541, decode.d0.loss_cls: 0.2976, decode.d0.loss_mask: 0.2179, decode.d0.loss_dice: 0.5865, decode.d1.loss_cls: 0.0859, decode.d1.loss_mask: 0.2108, decode.d1.loss_dice: 0.5666, decode.d2.loss_cls: 0.0724, decode.d2.loss_mask: 0.2098, decode.d2.loss_dice: 0.5614, decode.d3.loss_cls: 0.0798, decode.d3.loss_mask: 0.2089, decode.d3.loss_dice: 0.5572, decode.d4.loss_cls: 0.0734, decode.d4.loss_mask: 0.2090, decode.d4.loss_dice: 0.5584, decode.d5.loss_cls: 0.0748, decode.d5.loss_mask: 0.2093, decode.d5.loss_dice: 0.5592, decode.d6.loss_cls: 0.0702, decode.d6.loss_mask: 0.2094, decode.d6.loss_dice: 0.5581, decode.d7.loss_cls: 0.0729, decode.d7.loss_mask: 0.2089, decode.d7.loss_dice: 0.5546, decode.d8.loss_cls: 0.0715, decode.d8.loss_mask: 0.2087, decode.d8.loss_dice: 0.5546, loss: 8.6782 +2022-05-10 16:11:43,099 - mmseg - INFO - per class results: +2022-05-10 16:11:43,106 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.6 | 99.26 | +| sidewalk | 88.7 | 94.11 | +| building | 94.3 | 97.01 | +| wall | 67.51 | 80.78 | +| fence | 77.93 | 88.55 | +| pole | 71.45 | 83.28 | +| traffic light | 77.36 | 87.92 | +| traffic sign | 83.87 | 90.14 | +| vegetation | 93.52 | 96.91 | +| terrain | 68.03 | 74.91 | +| sky | 95.82 | 98.41 | +| person | 86.85 | 93.0 | +| rider | 74.53 | 86.86 | +| car | 96.27 | 98.26 | +| truck | 91.97 | 94.98 | +| bus | 93.31 | 96.23 | +| train | 88.11 | 90.67 | +| motorcycle | 77.64 | 87.86 | +| bicycle | 82.86 | 91.48 | ++---------------+-------+-------+ +2022-05-10 16:11:43,107 - mmseg - INFO - Summary: +2022-05-10 16:11:43,107 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 97.04 | 84.66 | 91.08 | ++-------+-------+-------+ +2022-05-10 16:11:43,110 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 16:11:43,110 - mmseg - INFO - Iter(val) [32] aAcc: 0.9704, mIoU: 0.8466, mAcc: 0.9108, IoU.road: 0.9860, IoU.sidewalk: 0.8870, IoU.building: 0.9430, IoU.wall: 0.6751, IoU.fence: 0.7793, IoU.pole: 0.7145, IoU.traffic light: 0.7736, IoU.traffic sign: 0.8387, IoU.vegetation: 0.9352, IoU.terrain: 0.6803, IoU.sky: 0.9582, IoU.person: 0.8685, IoU.rider: 0.7453, IoU.car: 0.9627, IoU.truck: 0.9197, IoU.bus: 0.9331, IoU.train: 0.8811, IoU.motorcycle: 0.7764, IoU.bicycle: 0.8286, Acc.road: 0.9926, Acc.sidewalk: 0.9411, Acc.building: 0.9701, Acc.wall: 0.8078, Acc.fence: 0.8855, Acc.pole: 0.8328, Acc.traffic light: 0.8792, Acc.traffic sign: 0.9014, Acc.vegetation: 0.9691, Acc.terrain: 0.7491, Acc.sky: 0.9841, Acc.person: 0.9300, Acc.rider: 0.8686, Acc.car: 0.9826, Acc.truck: 0.9498, Acc.bus: 0.9623, Acc.train: 0.9067, Acc.motorcycle: 0.8786, Acc.bicycle: 0.9148 +2022-05-10 16:13:15,269 - mmseg - INFO - Iter [41050/80000] lr: 6.991e-07, eta: 1 day, 3:47:44, time: 4.152, data_time: 2.379, memory: 69053, decode.loss_cls: 0.0688, decode.loss_mask: 0.2072, decode.loss_dice: 0.5552, decode.d0.loss_cls: 0.3128, decode.d0.loss_mask: 0.2162, decode.d0.loss_dice: 0.5818, decode.d1.loss_cls: 0.0896, decode.d1.loss_mask: 0.2089, decode.d1.loss_dice: 0.5654, decode.d2.loss_cls: 0.0747, decode.d2.loss_mask: 0.2086, decode.d2.loss_dice: 0.5571, decode.d3.loss_cls: 0.0737, decode.d3.loss_mask: 0.2081, decode.d3.loss_dice: 0.5572, decode.d4.loss_cls: 0.0698, decode.d4.loss_mask: 0.2076, decode.d4.loss_dice: 0.5579, decode.d5.loss_cls: 0.0734, decode.d5.loss_mask: 0.2077, decode.d5.loss_dice: 0.5549, decode.d6.loss_cls: 0.0675, decode.d6.loss_mask: 0.2073, decode.d6.loss_dice: 0.5542, decode.d7.loss_cls: 0.0672, decode.d7.loss_mask: 0.2072, decode.d7.loss_dice: 0.5552, decode.d8.loss_cls: 0.0706, decode.d8.loss_mask: 0.2074, decode.d8.loss_dice: 0.5548, loss: 8.6480 +2022-05-10 16:14:45,078 - mmseg - INFO - Iter [41100/80000] lr: 6.982e-07, eta: 1 day, 3:40:42, time: 1.799, data_time: 0.023, memory: 69053, decode.loss_cls: 0.0656, decode.loss_mask: 0.2128, decode.loss_dice: 0.5636, decode.d0.loss_cls: 0.3031, decode.d0.loss_mask: 0.2213, decode.d0.loss_dice: 0.5916, decode.d1.loss_cls: 0.0821, decode.d1.loss_mask: 0.2146, decode.d1.loss_dice: 0.5722, decode.d2.loss_cls: 0.0708, decode.d2.loss_mask: 0.2145, decode.d2.loss_dice: 0.5653, decode.d3.loss_cls: 0.0614, decode.d3.loss_mask: 0.2134, decode.d3.loss_dice: 0.5648, decode.d4.loss_cls: 0.0650, decode.d4.loss_mask: 0.2129, decode.d4.loss_dice: 0.5628, decode.d5.loss_cls: 0.0669, decode.d5.loss_mask: 0.2131, decode.d5.loss_dice: 0.5612, decode.d6.loss_cls: 0.0717, decode.d6.loss_mask: 0.2131, decode.d6.loss_dice: 0.5675, decode.d7.loss_cls: 0.0620, decode.d7.loss_mask: 0.2137, decode.d7.loss_dice: 0.5653, decode.d8.loss_cls: 0.0663, decode.d8.loss_mask: 0.2138, decode.d8.loss_dice: 0.5626, loss: 8.7349 +2022-05-10 16:16:14,269 - mmseg - INFO - Iter [41150/80000] lr: 6.973e-07, eta: 1 day, 3:33:42, time: 1.787, data_time: 0.020, memory: 69053, decode.loss_cls: 0.0701, decode.loss_mask: 0.2039, decode.loss_dice: 0.5632, decode.d0.loss_cls: 0.3249, decode.d0.loss_mask: 0.2118, decode.d0.loss_dice: 0.5927, decode.d1.loss_cls: 0.0889, decode.d1.loss_mask: 0.2052, decode.d1.loss_dice: 0.5717, decode.d2.loss_cls: 0.0790, decode.d2.loss_mask: 0.2040, decode.d2.loss_dice: 0.5707, decode.d3.loss_cls: 0.0709, decode.d3.loss_mask: 0.2037, decode.d3.loss_dice: 0.5629, decode.d4.loss_cls: 0.0801, decode.d4.loss_mask: 0.2038, decode.d4.loss_dice: 0.5642, decode.d5.loss_cls: 0.0754, decode.d5.loss_mask: 0.2041, decode.d5.loss_dice: 0.5671, decode.d6.loss_cls: 0.0687, decode.d6.loss_mask: 0.2042, decode.d6.loss_dice: 0.5625, decode.d7.loss_cls: 0.0689, decode.d7.loss_mask: 0.2040, decode.d7.loss_dice: 0.5641, decode.d8.loss_cls: 0.0678, decode.d8.loss_mask: 0.2040, decode.d8.loss_dice: 0.5627, loss: 8.7253 +2022-05-10 16:17:42,488 - mmseg - INFO - Iter [41200/80000] lr: 6.964e-07, eta: 1 day, 3:26:39, time: 1.764, data_time: 0.019, memory: 69053, decode.loss_cls: 0.0782, decode.loss_mask: 0.2091, decode.loss_dice: 0.5705, decode.d0.loss_cls: 0.3150, decode.d0.loss_mask: 0.2178, decode.d0.loss_dice: 0.5990, decode.d1.loss_cls: 0.0978, decode.d1.loss_mask: 0.2109, decode.d1.loss_dice: 0.5783, decode.d2.loss_cls: 0.0860, decode.d2.loss_mask: 0.2089, decode.d2.loss_dice: 0.5778, decode.d3.loss_cls: 0.0817, decode.d3.loss_mask: 0.2094, decode.d3.loss_dice: 0.5698, decode.d4.loss_cls: 0.0876, decode.d4.loss_mask: 0.2093, decode.d4.loss_dice: 0.5670, decode.d5.loss_cls: 0.0870, decode.d5.loss_mask: 0.2092, decode.d5.loss_dice: 0.5730, decode.d6.loss_cls: 0.0808, decode.d6.loss_mask: 0.2083, decode.d6.loss_dice: 0.5642, decode.d7.loss_cls: 0.0795, decode.d7.loss_mask: 0.2092, decode.d7.loss_dice: 0.5689, decode.d8.loss_cls: 0.0792, decode.d8.loss_mask: 0.2084, decode.d8.loss_dice: 0.5712, loss: 8.9130 +2022-05-10 16:19:13,694 - mmseg - INFO - Iter [41250/80000] lr: 6.955e-07, eta: 1 day, 3:20:06, time: 1.824, data_time: 0.066, memory: 69053, decode.loss_cls: 0.0722, decode.loss_mask: 0.2054, decode.loss_dice: 0.5594, decode.d0.loss_cls: 0.3017, decode.d0.loss_mask: 0.2142, decode.d0.loss_dice: 0.5893, decode.d1.loss_cls: 0.0890, decode.d1.loss_mask: 0.2069, decode.d1.loss_dice: 0.5681, decode.d2.loss_cls: 0.0851, decode.d2.loss_mask: 0.2055, decode.d2.loss_dice: 0.5697, decode.d3.loss_cls: 0.0851, decode.d3.loss_mask: 0.2049, decode.d3.loss_dice: 0.5617, decode.d4.loss_cls: 0.0814, decode.d4.loss_mask: 0.2053, decode.d4.loss_dice: 0.5627, decode.d5.loss_cls: 0.0794, decode.d5.loss_mask: 0.2049, decode.d5.loss_dice: 0.5579, decode.d6.loss_cls: 0.0790, decode.d6.loss_mask: 0.2053, decode.d6.loss_dice: 0.5607, decode.d7.loss_cls: 0.0811, decode.d7.loss_mask: 0.2056, decode.d7.loss_dice: 0.5593, decode.d8.loss_cls: 0.0749, decode.d8.loss_mask: 0.2052, decode.d8.loss_dice: 0.5602, loss: 8.7413 +2022-05-10 16:20:43,045 - mmseg - INFO - Iter [41300/80000] lr: 6.946e-07, eta: 1 day, 3:13:24, time: 1.787, data_time: 0.017, memory: 69053, decode.loss_cls: 0.0778, decode.loss_mask: 0.2020, decode.loss_dice: 0.5674, decode.d0.loss_cls: 0.3148, decode.d0.loss_mask: 0.2094, decode.d0.loss_dice: 0.5955, decode.d1.loss_cls: 0.0850, decode.d1.loss_mask: 0.2035, decode.d1.loss_dice: 0.5808, decode.d2.loss_cls: 0.0756, decode.d2.loss_mask: 0.2027, decode.d2.loss_dice: 0.5769, decode.d3.loss_cls: 0.0702, decode.d3.loss_mask: 0.2028, decode.d3.loss_dice: 0.5723, decode.d4.loss_cls: 0.0762, decode.d4.loss_mask: 0.2029, decode.d4.loss_dice: 0.5729, decode.d5.loss_cls: 0.0736, decode.d5.loss_mask: 0.2025, decode.d5.loss_dice: 0.5725, decode.d6.loss_cls: 0.0728, decode.d6.loss_mask: 0.2023, decode.d6.loss_dice: 0.5661, decode.d7.loss_cls: 0.0705, decode.d7.loss_mask: 0.2025, decode.d7.loss_dice: 0.5677, decode.d8.loss_cls: 0.0754, decode.d8.loss_mask: 0.2021, decode.d8.loss_dice: 0.5708, loss: 8.7674 +2022-05-10 16:22:13,140 - mmseg - INFO - Iter [41350/80000] lr: 6.937e-07, eta: 1 day, 3:06:54, time: 1.802, data_time: 0.017, memory: 69053, decode.loss_cls: 0.0755, decode.loss_mask: 0.2068, decode.loss_dice: 0.5624, decode.d0.loss_cls: 0.3165, decode.d0.loss_mask: 0.2187, decode.d0.loss_dice: 0.5951, decode.d1.loss_cls: 0.1063, decode.d1.loss_mask: 0.2088, decode.d1.loss_dice: 0.5747, decode.d2.loss_cls: 0.1013, decode.d2.loss_mask: 0.2085, decode.d2.loss_dice: 0.5672, decode.d3.loss_cls: 0.0899, decode.d3.loss_mask: 0.2069, decode.d3.loss_dice: 0.5639, decode.d4.loss_cls: 0.0884, decode.d4.loss_mask: 0.2069, decode.d4.loss_dice: 0.5653, decode.d5.loss_cls: 0.0866, decode.d5.loss_mask: 0.2074, decode.d5.loss_dice: 0.5609, decode.d6.loss_cls: 0.0805, decode.d6.loss_mask: 0.2065, decode.d6.loss_dice: 0.5630, decode.d7.loss_cls: 0.0835, decode.d7.loss_mask: 0.2067, decode.d7.loss_dice: 0.5652, decode.d8.loss_cls: 0.0783, decode.d8.loss_mask: 0.2069, decode.d8.loss_dice: 0.5657, loss: 8.8743 +2022-05-10 16:23:44,184 - mmseg - INFO - Iter [41400/80000] lr: 6.928e-07, eta: 1 day, 3:00:35, time: 1.821, data_time: 0.066, memory: 69053, decode.loss_cls: 0.0682, decode.loss_mask: 0.2080, decode.loss_dice: 0.5474, decode.d0.loss_cls: 0.3279, decode.d0.loss_mask: 0.2163, decode.d0.loss_dice: 0.5750, decode.d1.loss_cls: 0.0888, decode.d1.loss_mask: 0.2092, decode.d1.loss_dice: 0.5560, decode.d2.loss_cls: 0.0781, decode.d2.loss_mask: 0.2090, decode.d2.loss_dice: 0.5535, decode.d3.loss_cls: 0.0689, decode.d3.loss_mask: 0.2089, decode.d3.loss_dice: 0.5551, decode.d4.loss_cls: 0.0724, decode.d4.loss_mask: 0.2088, decode.d4.loss_dice: 0.5547, decode.d5.loss_cls: 0.0709, decode.d5.loss_mask: 0.2085, decode.d5.loss_dice: 0.5484, decode.d6.loss_cls: 0.0696, decode.d6.loss_mask: 0.2083, decode.d6.loss_dice: 0.5482, decode.d7.loss_cls: 0.0717, decode.d7.loss_mask: 0.2085, decode.d7.loss_dice: 0.5482, decode.d8.loss_cls: 0.0680, decode.d8.loss_mask: 0.2085, decode.d8.loss_dice: 0.5522, loss: 8.6170 +2022-05-10 16:25:13,487 - mmseg - INFO - Iter [41450/80000] lr: 6.919e-07, eta: 1 day, 2:54:10, time: 1.786, data_time: 0.017, memory: 69053, decode.loss_cls: 0.0713, decode.loss_mask: 0.1971, decode.loss_dice: 0.5525, decode.d0.loss_cls: 0.3036, decode.d0.loss_mask: 0.2053, decode.d0.loss_dice: 0.5832, decode.d1.loss_cls: 0.0869, decode.d1.loss_mask: 0.1990, decode.d1.loss_dice: 0.5659, decode.d2.loss_cls: 0.0816, decode.d2.loss_mask: 0.1976, decode.d2.loss_dice: 0.5606, decode.d3.loss_cls: 0.0770, decode.d3.loss_mask: 0.1965, decode.d3.loss_dice: 0.5508, decode.d4.loss_cls: 0.0768, decode.d4.loss_mask: 0.1967, decode.d4.loss_dice: 0.5526, decode.d5.loss_cls: 0.0773, decode.d5.loss_mask: 0.1966, decode.d5.loss_dice: 0.5554, decode.d6.loss_cls: 0.0702, decode.d6.loss_mask: 0.1969, decode.d6.loss_dice: 0.5543, decode.d7.loss_cls: 0.0685, decode.d7.loss_mask: 0.1970, decode.d7.loss_dice: 0.5542, decode.d8.loss_cls: 0.0679, decode.d8.loss_mask: 0.1969, decode.d8.loss_dice: 0.5570, loss: 8.5471 +2022-05-10 16:26:42,709 - mmseg - INFO - Iter [41500/80000] lr: 6.910e-07, eta: 1 day, 2:47:50, time: 1.784, data_time: 0.018, memory: 69053, decode.loss_cls: 0.0734, decode.loss_mask: 0.2053, decode.loss_dice: 0.5547, decode.d0.loss_cls: 0.3066, decode.d0.loss_mask: 0.2149, decode.d0.loss_dice: 0.5925, decode.d1.loss_cls: 0.0863, decode.d1.loss_mask: 0.2073, decode.d1.loss_dice: 0.5753, decode.d2.loss_cls: 0.0762, decode.d2.loss_mask: 0.2070, decode.d2.loss_dice: 0.5698, decode.d3.loss_cls: 0.0737, decode.d3.loss_mask: 0.2067, decode.d3.loss_dice: 0.5579, decode.d4.loss_cls: 0.0770, decode.d4.loss_mask: 0.2061, decode.d4.loss_dice: 0.5643, decode.d5.loss_cls: 0.0722, decode.d5.loss_mask: 0.2065, decode.d5.loss_dice: 0.5642, decode.d6.loss_cls: 0.0707, decode.d6.loss_mask: 0.2062, decode.d6.loss_dice: 0.5594, decode.d7.loss_cls: 0.0691, decode.d7.loss_mask: 0.2059, decode.d7.loss_dice: 0.5614, decode.d8.loss_cls: 0.0724, decode.d8.loss_mask: 0.2058, decode.d8.loss_dice: 0.5606, loss: 8.7094 +2022-05-10 16:28:12,381 - mmseg - INFO - Iter [41550/80000] lr: 6.901e-07, eta: 1 day, 2:41:38, time: 1.793, data_time: 0.017, memory: 69053, decode.loss_cls: 0.0774, decode.loss_mask: 0.2074, decode.loss_dice: 0.5626, decode.d0.loss_cls: 0.3105, decode.d0.loss_mask: 0.2167, decode.d0.loss_dice: 0.5933, decode.d1.loss_cls: 0.1049, decode.d1.loss_mask: 0.2092, decode.d1.loss_dice: 0.5733, decode.d2.loss_cls: 0.0902, decode.d2.loss_mask: 0.2081, decode.d2.loss_dice: 0.5705, decode.d3.loss_cls: 0.0866, decode.d3.loss_mask: 0.2076, decode.d3.loss_dice: 0.5651, decode.d4.loss_cls: 0.0843, decode.d4.loss_mask: 0.2077, decode.d4.loss_dice: 0.5656, decode.d5.loss_cls: 0.0836, decode.d5.loss_mask: 0.2069, decode.d5.loss_dice: 0.5652, decode.d6.loss_cls: 0.0782, decode.d6.loss_mask: 0.2072, decode.d6.loss_dice: 0.5615, decode.d7.loss_cls: 0.0757, decode.d7.loss_mask: 0.2072, decode.d7.loss_dice: 0.5664, decode.d8.loss_cls: 0.0851, decode.d8.loss_mask: 0.2070, decode.d8.loss_dice: 0.5644, loss: 8.8493 +2022-05-10 16:29:43,658 - mmseg - INFO - Iter [41600/80000] lr: 6.892e-07, eta: 1 day, 2:35:42, time: 1.825, data_time: 0.064, memory: 69053, decode.loss_cls: 0.0670, decode.loss_mask: 0.2005, decode.loss_dice: 0.5563, decode.d0.loss_cls: 0.3049, decode.d0.loss_mask: 0.2080, decode.d0.loss_dice: 0.5866, decode.d1.loss_cls: 0.0832, decode.d1.loss_mask: 0.2020, decode.d1.loss_dice: 0.5695, decode.d2.loss_cls: 0.0716, decode.d2.loss_mask: 0.2013, decode.d2.loss_dice: 0.5618, decode.d3.loss_cls: 0.0678, decode.d3.loss_mask: 0.2012, decode.d3.loss_dice: 0.5575, decode.d4.loss_cls: 0.0707, decode.d4.loss_mask: 0.2009, decode.d4.loss_dice: 0.5552, decode.d5.loss_cls: 0.0677, decode.d5.loss_mask: 0.2010, decode.d5.loss_dice: 0.5572, decode.d6.loss_cls: 0.0720, decode.d6.loss_mask: 0.2010, decode.d6.loss_dice: 0.5554, decode.d7.loss_cls: 0.0632, decode.d7.loss_mask: 0.2008, decode.d7.loss_dice: 0.5571, decode.d8.loss_cls: 0.0750, decode.d8.loss_mask: 0.2007, decode.d8.loss_dice: 0.5577, loss: 8.5749 +2022-05-10 16:31:12,945 - mmseg - INFO - Iter [41650/80000] lr: 6.883e-07, eta: 1 day, 2:29:37, time: 1.786, data_time: 0.019, memory: 69053, decode.loss_cls: 0.0696, decode.loss_mask: 0.2048, decode.loss_dice: 0.5518, decode.d0.loss_cls: 0.3128, decode.d0.loss_mask: 0.2133, decode.d0.loss_dice: 0.5789, decode.d1.loss_cls: 0.0882, decode.d1.loss_mask: 0.2071, decode.d1.loss_dice: 0.5621, decode.d2.loss_cls: 0.0758, decode.d2.loss_mask: 0.2063, decode.d2.loss_dice: 0.5549, decode.d3.loss_cls: 0.0806, decode.d3.loss_mask: 0.2055, decode.d3.loss_dice: 0.5515, decode.d4.loss_cls: 0.0758, decode.d4.loss_mask: 0.2057, decode.d4.loss_dice: 0.5532, decode.d5.loss_cls: 0.0713, decode.d5.loss_mask: 0.2056, decode.d5.loss_dice: 0.5525, decode.d6.loss_cls: 0.0672, decode.d6.loss_mask: 0.2057, decode.d6.loss_dice: 0.5536, decode.d7.loss_cls: 0.0743, decode.d7.loss_mask: 0.2047, decode.d7.loss_dice: 0.5545, decode.d8.loss_cls: 0.0720, decode.d8.loss_mask: 0.2057, decode.d8.loss_dice: 0.5526, loss: 8.6175 +2022-05-10 16:32:42,819 - mmseg - INFO - Iter [41700/80000] lr: 6.874e-07, eta: 1 day, 2:23:41, time: 1.797, data_time: 0.020, memory: 69053, decode.loss_cls: 0.0737, decode.loss_mask: 0.2016, decode.loss_dice: 0.5606, decode.d0.loss_cls: 0.3120, decode.d0.loss_mask: 0.2118, decode.d0.loss_dice: 0.5921, decode.d1.loss_cls: 0.0898, decode.d1.loss_mask: 0.2032, decode.d1.loss_dice: 0.5759, decode.d2.loss_cls: 0.0811, decode.d2.loss_mask: 0.2026, decode.d2.loss_dice: 0.5720, decode.d3.loss_cls: 0.0751, decode.d3.loss_mask: 0.2021, decode.d3.loss_dice: 0.5643, decode.d4.loss_cls: 0.0737, decode.d4.loss_mask: 0.2016, decode.d4.loss_dice: 0.5624, decode.d5.loss_cls: 0.0725, decode.d5.loss_mask: 0.2020, decode.d5.loss_dice: 0.5694, decode.d6.loss_cls: 0.0682, decode.d6.loss_mask: 0.2015, decode.d6.loss_dice: 0.5627, decode.d7.loss_cls: 0.0703, decode.d7.loss_mask: 0.2016, decode.d7.loss_dice: 0.5593, decode.d8.loss_cls: 0.0679, decode.d8.loss_mask: 0.2018, decode.d8.loss_dice: 0.5642, loss: 8.6971 +2022-05-10 16:34:13,303 - mmseg - INFO - Iter [41750/80000] lr: 6.865e-07, eta: 1 day, 2:17:54, time: 1.810, data_time: 0.019, memory: 69053, decode.loss_cls: 0.0692, decode.loss_mask: 0.2030, decode.loss_dice: 0.5512, decode.d0.loss_cls: 0.3130, decode.d0.loss_mask: 0.2128, decode.d0.loss_dice: 0.5827, decode.d1.loss_cls: 0.0895, decode.d1.loss_mask: 0.2044, decode.d1.loss_dice: 0.5647, decode.d2.loss_cls: 0.0767, decode.d2.loss_mask: 0.2038, decode.d2.loss_dice: 0.5569, decode.d3.loss_cls: 0.0773, decode.d3.loss_mask: 0.2029, decode.d3.loss_dice: 0.5549, decode.d4.loss_cls: 0.0715, decode.d4.loss_mask: 0.2035, decode.d4.loss_dice: 0.5578, decode.d5.loss_cls: 0.0703, decode.d5.loss_mask: 0.2037, decode.d5.loss_dice: 0.5566, decode.d6.loss_cls: 0.0706, decode.d6.loss_mask: 0.2042, decode.d6.loss_dice: 0.5552, decode.d7.loss_cls: 0.0699, decode.d7.loss_mask: 0.2038, decode.d7.loss_dice: 0.5543, decode.d8.loss_cls: 0.0652, decode.d8.loss_mask: 0.2032, decode.d8.loss_dice: 0.5585, loss: 8.6113 +2022-05-10 16:35:45,176 - mmseg - INFO - Iter [41800/80000] lr: 6.856e-07, eta: 1 day, 2:12:20, time: 1.837, data_time: 0.064, memory: 69053, decode.loss_cls: 0.0657, decode.loss_mask: 0.2050, decode.loss_dice: 0.5479, decode.d0.loss_cls: 0.3129, decode.d0.loss_mask: 0.2132, decode.d0.loss_dice: 0.5833, decode.d1.loss_cls: 0.0776, decode.d1.loss_mask: 0.2073, decode.d1.loss_dice: 0.5593, decode.d2.loss_cls: 0.0785, decode.d2.loss_mask: 0.2051, decode.d2.loss_dice: 0.5538, decode.d3.loss_cls: 0.0761, decode.d3.loss_mask: 0.2051, decode.d3.loss_dice: 0.5474, decode.d4.loss_cls: 0.0671, decode.d4.loss_mask: 0.2051, decode.d4.loss_dice: 0.5479, decode.d5.loss_cls: 0.0673, decode.d5.loss_mask: 0.2050, decode.d5.loss_dice: 0.5496, decode.d6.loss_cls: 0.0686, decode.d6.loss_mask: 0.2043, decode.d6.loss_dice: 0.5433, decode.d7.loss_cls: 0.0703, decode.d7.loss_mask: 0.2048, decode.d7.loss_dice: 0.5516, decode.d8.loss_cls: 0.0656, decode.d8.loss_mask: 0.2048, decode.d8.loss_dice: 0.5445, loss: 8.5382 +2022-05-10 16:37:15,500 - mmseg - INFO - Iter [41850/80000] lr: 6.847e-07, eta: 1 day, 2:06:41, time: 1.807, data_time: 0.019, memory: 69053, decode.loss_cls: 0.0671, decode.loss_mask: 0.2046, decode.loss_dice: 0.5686, decode.d0.loss_cls: 0.3047, decode.d0.loss_mask: 0.2142, decode.d0.loss_dice: 0.5925, decode.d1.loss_cls: 0.0914, decode.d1.loss_mask: 0.2064, decode.d1.loss_dice: 0.5799, decode.d2.loss_cls: 0.0798, decode.d2.loss_mask: 0.2054, decode.d2.loss_dice: 0.5719, decode.d3.loss_cls: 0.0798, decode.d3.loss_mask: 0.2049, decode.d3.loss_dice: 0.5664, decode.d4.loss_cls: 0.0744, decode.d4.loss_mask: 0.2049, decode.d4.loss_dice: 0.5687, decode.d5.loss_cls: 0.0664, decode.d5.loss_mask: 0.2052, decode.d5.loss_dice: 0.5703, decode.d6.loss_cls: 0.0713, decode.d6.loss_mask: 0.2049, decode.d6.loss_dice: 0.5639, decode.d7.loss_cls: 0.0690, decode.d7.loss_mask: 0.2051, decode.d7.loss_dice: 0.5671, decode.d8.loss_cls: 0.0667, decode.d8.loss_mask: 0.2052, decode.d8.loss_dice: 0.5657, loss: 8.7462 +2022-05-10 16:38:43,525 - mmseg - INFO - Iter [41900/80000] lr: 6.838e-07, eta: 1 day, 2:00:50, time: 1.761, data_time: 0.018, memory: 69053, decode.loss_cls: 0.0785, decode.loss_mask: 0.2124, decode.loss_dice: 0.5571, decode.d0.loss_cls: 0.3198, decode.d0.loss_mask: 0.2225, decode.d0.loss_dice: 0.5916, decode.d1.loss_cls: 0.0932, decode.d1.loss_mask: 0.2150, decode.d1.loss_dice: 0.5695, decode.d2.loss_cls: 0.0927, decode.d2.loss_mask: 0.2122, decode.d2.loss_dice: 0.5623, decode.d3.loss_cls: 0.0820, decode.d3.loss_mask: 0.2124, decode.d3.loss_dice: 0.5601, decode.d4.loss_cls: 0.0795, decode.d4.loss_mask: 0.2127, decode.d4.loss_dice: 0.5601, decode.d5.loss_cls: 0.0791, decode.d5.loss_mask: 0.2132, decode.d5.loss_dice: 0.5614, decode.d6.loss_cls: 0.0771, decode.d6.loss_mask: 0.2123, decode.d6.loss_dice: 0.5574, decode.d7.loss_cls: 0.0817, decode.d7.loss_mask: 0.2122, decode.d7.loss_dice: 0.5581, decode.d8.loss_cls: 0.0744, decode.d8.loss_mask: 0.2123, decode.d8.loss_dice: 0.5589, loss: 8.8318 +2022-05-10 16:40:14,024 - mmseg - INFO - Iter [41950/80000] lr: 6.829e-07, eta: 1 day, 1:55:20, time: 1.810, data_time: 0.017, memory: 69053, decode.loss_cls: 0.0794, decode.loss_mask: 0.2010, decode.loss_dice: 0.5723, decode.d0.loss_cls: 0.3205, decode.d0.loss_mask: 0.2102, decode.d0.loss_dice: 0.6032, decode.d1.loss_cls: 0.1013, decode.d1.loss_mask: 0.2024, decode.d1.loss_dice: 0.5867, decode.d2.loss_cls: 0.0945, decode.d2.loss_mask: 0.2027, decode.d2.loss_dice: 0.5779, decode.d3.loss_cls: 0.0842, decode.d3.loss_mask: 0.2017, decode.d3.loss_dice: 0.5728, decode.d4.loss_cls: 0.0822, decode.d4.loss_mask: 0.2014, decode.d4.loss_dice: 0.5750, decode.d5.loss_cls: 0.0869, decode.d5.loss_mask: 0.2007, decode.d5.loss_dice: 0.5744, decode.d6.loss_cls: 0.0845, decode.d6.loss_mask: 0.2009, decode.d6.loss_dice: 0.5708, decode.d7.loss_cls: 0.0852, decode.d7.loss_mask: 0.2009, decode.d7.loss_dice: 0.5723, decode.d8.loss_cls: 0.0867, decode.d8.loss_mask: 0.2012, decode.d8.loss_dice: 0.5722, loss: 8.9059 +2022-05-10 16:41:45,025 - mmseg - INFO - Saving checkpoint at 42000 iterations +2022-05-10 16:42:17,935 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 16:42:17,944 - mmseg - INFO - Iter [42000/80000] lr: 6.820e-07, eta: 1 day, 1:53:25, time: 2.476, data_time: 0.064, memory: 69053, decode.loss_cls: 0.0637, decode.loss_mask: 0.2011, decode.loss_dice: 0.5531, decode.d0.loss_cls: 0.3085, decode.d0.loss_mask: 0.2085, decode.d0.loss_dice: 0.5770, decode.d1.loss_cls: 0.0912, decode.d1.loss_mask: 0.2020, decode.d1.loss_dice: 0.5578, decode.d2.loss_cls: 0.0775, decode.d2.loss_mask: 0.2020, decode.d2.loss_dice: 0.5574, decode.d3.loss_cls: 0.0712, decode.d3.loss_mask: 0.2013, decode.d3.loss_dice: 0.5497, decode.d4.loss_cls: 0.0654, decode.d4.loss_mask: 0.2016, decode.d4.loss_dice: 0.5543, decode.d5.loss_cls: 0.0705, decode.d5.loss_mask: 0.2017, decode.d5.loss_dice: 0.5528, decode.d6.loss_cls: 0.0745, decode.d6.loss_mask: 0.2011, decode.d6.loss_dice: 0.5524, decode.d7.loss_cls: 0.0713, decode.d7.loss_mask: 0.2008, decode.d7.loss_dice: 0.5528, decode.d8.loss_cls: 0.0672, decode.d8.loss_mask: 0.2006, decode.d8.loss_dice: 0.5510, loss: 8.5399 +2022-05-10 16:44:13,048 - mmseg - INFO - per class results: +2022-05-10 16:44:13,055 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.51 | 99.24 | +| sidewalk | 88.19 | 93.31 | +| building | 94.29 | 96.89 | +| wall | 66.78 | 78.11 | +| fence | 78.33 | 87.25 | +| pole | 71.26 | 83.34 | +| traffic light | 77.27 | 88.87 | +| traffic sign | 83.83 | 90.54 | +| vegetation | 93.39 | 97.08 | +| terrain | 68.67 | 77.86 | +| sky | 95.92 | 98.31 | +| person | 86.91 | 93.56 | +| rider | 74.56 | 86.84 | +| car | 96.22 | 98.25 | +| truck | 91.44 | 94.71 | +| bus | 93.82 | 96.58 | +| train | 88.5 | 91.05 | +| motorcycle | 75.46 | 87.93 | +| bicycle | 82.71 | 91.16 | ++---------------+-------+-------+ +2022-05-10 16:44:13,055 - mmseg - INFO - Summary: +2022-05-10 16:44:13,056 - mmseg - INFO - ++-------+-------+------+ +| aAcc | mIoU | mAcc | ++-------+-------+------+ +| 96.99 | 84.53 | 91.1 | ++-------+-------+------+ +2022-05-10 16:44:13,059 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 16:44:13,059 - mmseg - INFO - Iter(val) [32] aAcc: 0.9699, mIoU: 0.8453, mAcc: 0.9110, IoU.road: 0.9851, IoU.sidewalk: 0.8819, IoU.building: 0.9429, IoU.wall: 0.6678, IoU.fence: 0.7833, IoU.pole: 0.7126, IoU.traffic light: 0.7727, IoU.traffic sign: 0.8383, IoU.vegetation: 0.9339, IoU.terrain: 0.6867, IoU.sky: 0.9592, IoU.person: 0.8691, IoU.rider: 0.7456, IoU.car: 0.9622, IoU.truck: 0.9144, IoU.bus: 0.9382, IoU.train: 0.8850, IoU.motorcycle: 0.7546, IoU.bicycle: 0.8271, Acc.road: 0.9924, Acc.sidewalk: 0.9331, Acc.building: 0.9689, Acc.wall: 0.7811, Acc.fence: 0.8725, Acc.pole: 0.8334, Acc.traffic light: 0.8887, Acc.traffic sign: 0.9054, Acc.vegetation: 0.9708, Acc.terrain: 0.7786, Acc.sky: 0.9831, Acc.person: 0.9356, Acc.rider: 0.8684, Acc.car: 0.9825, Acc.truck: 0.9471, Acc.bus: 0.9658, Acc.train: 0.9105, Acc.motorcycle: 0.8793, Acc.bicycle: 0.9116 +2022-05-10 16:45:43,320 - mmseg - INFO - Iter [42050/80000] lr: 6.811e-07, eta: 1 day, 2:00:02, time: 4.110, data_time: 2.321, memory: 69053, decode.loss_cls: 0.0715, decode.loss_mask: 0.2086, decode.loss_dice: 0.5581, decode.d0.loss_cls: 0.3076, decode.d0.loss_mask: 0.2160, decode.d0.loss_dice: 0.5886, decode.d1.loss_cls: 0.0856, decode.d1.loss_mask: 0.2097, decode.d1.loss_dice: 0.5690, decode.d2.loss_cls: 0.0821, decode.d2.loss_mask: 0.2092, decode.d2.loss_dice: 0.5697, decode.d3.loss_cls: 0.0712, decode.d3.loss_mask: 0.2090, decode.d3.loss_dice: 0.5628, decode.d4.loss_cls: 0.0732, decode.d4.loss_mask: 0.2085, decode.d4.loss_dice: 0.5641, decode.d5.loss_cls: 0.0750, decode.d5.loss_mask: 0.2082, decode.d5.loss_dice: 0.5620, decode.d6.loss_cls: 0.0742, decode.d6.loss_mask: 0.2082, decode.d6.loss_dice: 0.5582, decode.d7.loss_cls: 0.0731, decode.d7.loss_mask: 0.2083, decode.d7.loss_dice: 0.5591, decode.d8.loss_cls: 0.0700, decode.d8.loss_mask: 0.2079, decode.d8.loss_dice: 0.5586, loss: 8.7270 +2022-05-10 16:47:13,701 - mmseg - INFO - Iter [42100/80000] lr: 6.802e-07, eta: 1 day, 1:54:34, time: 1.807, data_time: 0.015, memory: 69053, decode.loss_cls: 0.0738, decode.loss_mask: 0.2099, decode.loss_dice: 0.5669, decode.d0.loss_cls: 0.3314, decode.d0.loss_mask: 0.2196, decode.d0.loss_dice: 0.5960, decode.d1.loss_cls: 0.1024, decode.d1.loss_mask: 0.2108, decode.d1.loss_dice: 0.5828, decode.d2.loss_cls: 0.0873, decode.d2.loss_mask: 0.2105, decode.d2.loss_dice: 0.5739, decode.d3.loss_cls: 0.0862, decode.d3.loss_mask: 0.2099, decode.d3.loss_dice: 0.5711, decode.d4.loss_cls: 0.0848, decode.d4.loss_mask: 0.2101, decode.d4.loss_dice: 0.5684, decode.d5.loss_cls: 0.0812, decode.d5.loss_mask: 0.2100, decode.d5.loss_dice: 0.5698, decode.d6.loss_cls: 0.0836, decode.d6.loss_mask: 0.2097, decode.d6.loss_dice: 0.5662, decode.d7.loss_cls: 0.0816, decode.d7.loss_mask: 0.2097, decode.d7.loss_dice: 0.5667, decode.d8.loss_cls: 0.0785, decode.d8.loss_mask: 0.2094, decode.d8.loss_dice: 0.5683, loss: 8.9303 +2022-05-10 16:48:46,331 - mmseg - INFO - Iter [42150/80000] lr: 6.793e-07, eta: 1 day, 1:49:24, time: 1.853, data_time: 0.064, memory: 69053, decode.loss_cls: 0.0708, decode.loss_mask: 0.1978, decode.loss_dice: 0.5552, decode.d0.loss_cls: 0.3050, decode.d0.loss_mask: 0.2055, decode.d0.loss_dice: 0.5834, decode.d1.loss_cls: 0.0968, decode.d1.loss_mask: 0.1994, decode.d1.loss_dice: 0.5635, decode.d2.loss_cls: 0.0838, decode.d2.loss_mask: 0.1984, decode.d2.loss_dice: 0.5628, decode.d3.loss_cls: 0.0676, decode.d3.loss_mask: 0.1985, decode.d3.loss_dice: 0.5570, decode.d4.loss_cls: 0.0679, decode.d4.loss_mask: 0.1983, decode.d4.loss_dice: 0.5535, decode.d5.loss_cls: 0.0709, decode.d5.loss_mask: 0.1982, decode.d5.loss_dice: 0.5569, decode.d6.loss_cls: 0.0700, decode.d6.loss_mask: 0.1975, decode.d6.loss_dice: 0.5576, decode.d7.loss_cls: 0.0688, decode.d7.loss_mask: 0.1973, decode.d7.loss_dice: 0.5524, decode.d8.loss_cls: 0.0691, decode.d8.loss_mask: 0.1977, decode.d8.loss_dice: 0.5554, loss: 8.5570 +2022-05-10 16:50:15,011 - mmseg - INFO - Iter [42200/80000] lr: 6.784e-07, eta: 1 day, 1:43:53, time: 1.774, data_time: 0.015, memory: 69053, decode.loss_cls: 0.0717, decode.loss_mask: 0.2072, decode.loss_dice: 0.5548, decode.d0.loss_cls: 0.3164, decode.d0.loss_mask: 0.2168, decode.d0.loss_dice: 0.5856, decode.d1.loss_cls: 0.0980, decode.d1.loss_mask: 0.2094, decode.d1.loss_dice: 0.5620, decode.d2.loss_cls: 0.0917, decode.d2.loss_mask: 0.2081, decode.d2.loss_dice: 0.5613, decode.d3.loss_cls: 0.0786, decode.d3.loss_mask: 0.2077, decode.d3.loss_dice: 0.5572, decode.d4.loss_cls: 0.0762, decode.d4.loss_mask: 0.2082, decode.d4.loss_dice: 0.5580, decode.d5.loss_cls: 0.0738, decode.d5.loss_mask: 0.2085, decode.d5.loss_dice: 0.5608, decode.d6.loss_cls: 0.0760, decode.d6.loss_mask: 0.2076, decode.d6.loss_dice: 0.5535, decode.d7.loss_cls: 0.0749, decode.d7.loss_mask: 0.2083, decode.d7.loss_dice: 0.5585, decode.d8.loss_cls: 0.0732, decode.d8.loss_mask: 0.2080, decode.d8.loss_dice: 0.5569, loss: 8.7290 +2022-05-10 16:51:43,980 - mmseg - INFO - Iter [42250/80000] lr: 6.775e-07, eta: 1 day, 1:38:28, time: 1.779, data_time: 0.016, memory: 69053, decode.loss_cls: 0.0708, decode.loss_mask: 0.2093, decode.loss_dice: 0.5556, decode.d0.loss_cls: 0.3025, decode.d0.loss_mask: 0.2182, decode.d0.loss_dice: 0.5839, decode.d1.loss_cls: 0.0875, decode.d1.loss_mask: 0.2108, decode.d1.loss_dice: 0.5680, decode.d2.loss_cls: 0.0770, decode.d2.loss_mask: 0.2102, decode.d2.loss_dice: 0.5622, decode.d3.loss_cls: 0.0718, decode.d3.loss_mask: 0.2098, decode.d3.loss_dice: 0.5563, decode.d4.loss_cls: 0.0704, decode.d4.loss_mask: 0.2103, decode.d4.loss_dice: 0.5593, decode.d5.loss_cls: 0.0751, decode.d5.loss_mask: 0.2103, decode.d5.loss_dice: 0.5609, decode.d6.loss_cls: 0.0687, decode.d6.loss_mask: 0.2092, decode.d6.loss_dice: 0.5594, decode.d7.loss_cls: 0.0677, decode.d7.loss_mask: 0.2098, decode.d7.loss_dice: 0.5587, decode.d8.loss_cls: 0.0635, decode.d8.loss_mask: 0.2096, decode.d8.loss_dice: 0.5620, loss: 8.6888 +2022-05-10 16:53:12,311 - mmseg - INFO - Iter [42300/80000] lr: 6.766e-07, eta: 1 day, 1:33:02, time: 1.766, data_time: 0.018, memory: 69053, decode.loss_cls: 0.0651, decode.loss_mask: 0.2025, decode.loss_dice: 0.5592, decode.d0.loss_cls: 0.3168, decode.d0.loss_mask: 0.2115, decode.d0.loss_dice: 0.5880, decode.d1.loss_cls: 0.0963, decode.d1.loss_mask: 0.2049, decode.d1.loss_dice: 0.5707, decode.d2.loss_cls: 0.0895, decode.d2.loss_mask: 0.2040, decode.d2.loss_dice: 0.5685, decode.d3.loss_cls: 0.0774, decode.d3.loss_mask: 0.2032, decode.d3.loss_dice: 0.5616, decode.d4.loss_cls: 0.0762, decode.d4.loss_mask: 0.2026, decode.d4.loss_dice: 0.5610, decode.d5.loss_cls: 0.0772, decode.d5.loss_mask: 0.2031, decode.d5.loss_dice: 0.5674, decode.d6.loss_cls: 0.0733, decode.d6.loss_mask: 0.2032, decode.d6.loss_dice: 0.5634, decode.d7.loss_cls: 0.0722, decode.d7.loss_mask: 0.2032, decode.d7.loss_dice: 0.5620, decode.d8.loss_cls: 0.0711, decode.d8.loss_mask: 0.2031, decode.d8.loss_dice: 0.5607, loss: 8.7187 +2022-05-10 16:54:43,300 - mmseg - INFO - Iter [42350/80000] lr: 6.757e-07, eta: 1 day, 1:27:57, time: 1.820, data_time: 0.064, memory: 69053, decode.loss_cls: 0.0755, decode.loss_mask: 0.2063, decode.loss_dice: 0.5666, decode.d0.loss_cls: 0.3128, decode.d0.loss_mask: 0.2155, decode.d0.loss_dice: 0.5993, decode.d1.loss_cls: 0.0898, decode.d1.loss_mask: 0.2083, decode.d1.loss_dice: 0.5760, decode.d2.loss_cls: 0.0806, decode.d2.loss_mask: 0.2072, decode.d2.loss_dice: 0.5736, decode.d3.loss_cls: 0.0812, decode.d3.loss_mask: 0.2075, decode.d3.loss_dice: 0.5677, decode.d4.loss_cls: 0.0798, decode.d4.loss_mask: 0.2069, decode.d4.loss_dice: 0.5689, decode.d5.loss_cls: 0.0791, decode.d5.loss_mask: 0.2069, decode.d5.loss_dice: 0.5688, decode.d6.loss_cls: 0.0738, decode.d6.loss_mask: 0.2066, decode.d6.loss_dice: 0.5666, decode.d7.loss_cls: 0.0727, decode.d7.loss_mask: 0.2072, decode.d7.loss_dice: 0.5713, decode.d8.loss_cls: 0.0782, decode.d8.loss_mask: 0.2067, decode.d8.loss_dice: 0.5724, loss: 8.8337 +2022-05-10 16:56:11,985 - mmseg - INFO - Iter [42400/80000] lr: 6.748e-07, eta: 1 day, 1:22:41, time: 1.773, data_time: 0.016, memory: 69053, decode.loss_cls: 0.0613, decode.loss_mask: 0.1962, decode.loss_dice: 0.5521, decode.d0.loss_cls: 0.2979, decode.d0.loss_mask: 0.2045, decode.d0.loss_dice: 0.5847, decode.d1.loss_cls: 0.0736, decode.d1.loss_mask: 0.1991, decode.d1.loss_dice: 0.5632, decode.d2.loss_cls: 0.0679, decode.d2.loss_mask: 0.1971, decode.d2.loss_dice: 0.5607, decode.d3.loss_cls: 0.0656, decode.d3.loss_mask: 0.1968, decode.d3.loss_dice: 0.5514, decode.d4.loss_cls: 0.0623, decode.d4.loss_mask: 0.1975, decode.d4.loss_dice: 0.5570, decode.d5.loss_cls: 0.0631, decode.d5.loss_mask: 0.1972, decode.d5.loss_dice: 0.5560, decode.d6.loss_cls: 0.0679, decode.d6.loss_mask: 0.1969, decode.d6.loss_dice: 0.5560, decode.d7.loss_cls: 0.0593, decode.d7.loss_mask: 0.1967, decode.d7.loss_dice: 0.5540, decode.d8.loss_cls: 0.0608, decode.d8.loss_mask: 0.1962, decode.d8.loss_dice: 0.5513, loss: 8.4443 +2022-05-10 16:57:41,756 - mmseg - INFO - Iter [42450/80000] lr: 6.739e-07, eta: 1 day, 1:17:34, time: 1.796, data_time: 0.018, memory: 69053, decode.loss_cls: 0.0717, decode.loss_mask: 0.2081, decode.loss_dice: 0.5568, decode.d0.loss_cls: 0.3074, decode.d0.loss_mask: 0.2181, decode.d0.loss_dice: 0.5830, decode.d1.loss_cls: 0.0870, decode.d1.loss_mask: 0.2094, decode.d1.loss_dice: 0.5612, decode.d2.loss_cls: 0.0769, decode.d2.loss_mask: 0.2084, decode.d2.loss_dice: 0.5644, decode.d3.loss_cls: 0.0711, decode.d3.loss_mask: 0.2084, decode.d3.loss_dice: 0.5575, decode.d4.loss_cls: 0.0649, decode.d4.loss_mask: 0.2084, decode.d4.loss_dice: 0.5572, decode.d5.loss_cls: 0.0748, decode.d5.loss_mask: 0.2080, decode.d5.loss_dice: 0.5576, decode.d6.loss_cls: 0.0700, decode.d6.loss_mask: 0.2081, decode.d6.loss_dice: 0.5560, decode.d7.loss_cls: 0.0717, decode.d7.loss_mask: 0.2075, decode.d7.loss_dice: 0.5572, decode.d8.loss_cls: 0.0696, decode.d8.loss_mask: 0.2074, decode.d8.loss_dice: 0.5551, loss: 8.6627 +2022-05-10 16:59:10,185 - mmseg - INFO - Iter [42500/80000] lr: 6.730e-07, eta: 1 day, 1:12:24, time: 1.768, data_time: 0.017, memory: 69053, decode.loss_cls: 0.0679, decode.loss_mask: 0.1993, decode.loss_dice: 0.5500, decode.d0.loss_cls: 0.3070, decode.d0.loss_mask: 0.2075, decode.d0.loss_dice: 0.5804, decode.d1.loss_cls: 0.0872, decode.d1.loss_mask: 0.2007, decode.d1.loss_dice: 0.5589, decode.d2.loss_cls: 0.0805, decode.d2.loss_mask: 0.1998, decode.d2.loss_dice: 0.5514, decode.d3.loss_cls: 0.0712, decode.d3.loss_mask: 0.1998, decode.d3.loss_dice: 0.5533, decode.d4.loss_cls: 0.0684, decode.d4.loss_mask: 0.2000, decode.d4.loss_dice: 0.5508, decode.d5.loss_cls: 0.0681, decode.d5.loss_mask: 0.1993, decode.d5.loss_dice: 0.5529, decode.d6.loss_cls: 0.0725, decode.d6.loss_mask: 0.1996, decode.d6.loss_dice: 0.5538, decode.d7.loss_cls: 0.0688, decode.d7.loss_mask: 0.1990, decode.d7.loss_dice: 0.5529, decode.d8.loss_cls: 0.0688, decode.d8.loss_mask: 0.1993, decode.d8.loss_dice: 0.5522, loss: 8.5212 +2022-05-10 17:00:43,710 - mmseg - INFO - Iter [42550/80000] lr: 6.721e-07, eta: 1 day, 1:07:46, time: 1.871, data_time: 0.068, memory: 69053, decode.loss_cls: 0.0752, decode.loss_mask: 0.2034, decode.loss_dice: 0.5374, decode.d0.loss_cls: 0.3108, decode.d0.loss_mask: 0.2107, decode.d0.loss_dice: 0.5695, decode.d1.loss_cls: 0.0876, decode.d1.loss_mask: 0.2048, decode.d1.loss_dice: 0.5526, decode.d2.loss_cls: 0.0824, decode.d2.loss_mask: 0.2043, decode.d2.loss_dice: 0.5485, decode.d3.loss_cls: 0.0705, decode.d3.loss_mask: 0.2035, decode.d3.loss_dice: 0.5429, decode.d4.loss_cls: 0.0740, decode.d4.loss_mask: 0.2035, decode.d4.loss_dice: 0.5397, decode.d5.loss_cls: 0.0745, decode.d5.loss_mask: 0.2039, decode.d5.loss_dice: 0.5425, decode.d6.loss_cls: 0.0735, decode.d6.loss_mask: 0.2034, decode.d6.loss_dice: 0.5428, decode.d7.loss_cls: 0.0697, decode.d7.loss_mask: 0.2034, decode.d7.loss_dice: 0.5406, decode.d8.loss_cls: 0.0703, decode.d8.loss_mask: 0.2033, decode.d8.loss_dice: 0.5431, loss: 8.4923 +2022-05-10 17:02:13,440 - mmseg - INFO - Iter [42600/80000] lr: 6.713e-07, eta: 1 day, 1:02:49, time: 1.795, data_time: 0.015, memory: 69053, decode.loss_cls: 0.0749, decode.loss_mask: 0.2042, decode.loss_dice: 0.5623, decode.d0.loss_cls: 0.3162, decode.d0.loss_mask: 0.2128, decode.d0.loss_dice: 0.5981, decode.d1.loss_cls: 0.0885, decode.d1.loss_mask: 0.2056, decode.d1.loss_dice: 0.5741, decode.d2.loss_cls: 0.0801, decode.d2.loss_mask: 0.2054, decode.d2.loss_dice: 0.5669, decode.d3.loss_cls: 0.0755, decode.d3.loss_mask: 0.2049, decode.d3.loss_dice: 0.5682, decode.d4.loss_cls: 0.0799, decode.d4.loss_mask: 0.2052, decode.d4.loss_dice: 0.5604, decode.d5.loss_cls: 0.0825, decode.d5.loss_mask: 0.2042, decode.d5.loss_dice: 0.5643, decode.d6.loss_cls: 0.0780, decode.d6.loss_mask: 0.2043, decode.d6.loss_dice: 0.5685, decode.d7.loss_cls: 0.0724, decode.d7.loss_mask: 0.2045, decode.d7.loss_dice: 0.5616, decode.d8.loss_cls: 0.0757, decode.d8.loss_mask: 0.2042, decode.d8.loss_dice: 0.5629, loss: 8.7662 +2022-05-10 17:03:43,684 - mmseg - INFO - Iter [42650/80000] lr: 6.704e-07, eta: 1 day, 0:57:58, time: 1.805, data_time: 0.016, memory: 69053, decode.loss_cls: 0.0661, decode.loss_mask: 0.2001, decode.loss_dice: 0.5649, decode.d0.loss_cls: 0.3050, decode.d0.loss_mask: 0.2088, decode.d0.loss_dice: 0.5906, decode.d1.loss_cls: 0.0985, decode.d1.loss_mask: 0.2024, decode.d1.loss_dice: 0.5693, decode.d2.loss_cls: 0.0834, decode.d2.loss_mask: 0.2016, decode.d2.loss_dice: 0.5723, decode.d3.loss_cls: 0.0705, decode.d3.loss_mask: 0.2009, decode.d3.loss_dice: 0.5626, decode.d4.loss_cls: 0.0698, decode.d4.loss_mask: 0.2009, decode.d4.loss_dice: 0.5626, decode.d5.loss_cls: 0.0702, decode.d5.loss_mask: 0.2002, decode.d5.loss_dice: 0.5654, decode.d6.loss_cls: 0.0683, decode.d6.loss_mask: 0.2008, decode.d6.loss_dice: 0.5621, decode.d7.loss_cls: 0.0671, decode.d7.loss_mask: 0.2007, decode.d7.loss_dice: 0.5660, decode.d8.loss_cls: 0.0691, decode.d8.loss_mask: 0.2002, decode.d8.loss_dice: 0.5649, loss: 8.6651 +2022-05-10 17:05:15,569 - mmseg - INFO - Iter [42700/80000] lr: 6.695e-07, eta: 1 day, 0:53:20, time: 1.838, data_time: 0.064, memory: 69053, decode.loss_cls: 0.0689, decode.loss_mask: 0.1970, decode.loss_dice: 0.5637, decode.d0.loss_cls: 0.3009, decode.d0.loss_mask: 0.2039, decode.d0.loss_dice: 0.5851, decode.d1.loss_cls: 0.0898, decode.d1.loss_mask: 0.1978, decode.d1.loss_dice: 0.5700, decode.d2.loss_cls: 0.0790, decode.d2.loss_mask: 0.1972, decode.d2.loss_dice: 0.5671, decode.d3.loss_cls: 0.0738, decode.d3.loss_mask: 0.1963, decode.d3.loss_dice: 0.5624, decode.d4.loss_cls: 0.0675, decode.d4.loss_mask: 0.1964, decode.d4.loss_dice: 0.5652, decode.d5.loss_cls: 0.0687, decode.d5.loss_mask: 0.1962, decode.d5.loss_dice: 0.5645, decode.d6.loss_cls: 0.0717, decode.d6.loss_mask: 0.1959, decode.d6.loss_dice: 0.5640, decode.d7.loss_cls: 0.0681, decode.d7.loss_mask: 0.1963, decode.d7.loss_dice: 0.5668, decode.d8.loss_cls: 0.0714, decode.d8.loss_mask: 0.1969, decode.d8.loss_dice: 0.5669, loss: 8.6095 +2022-05-10 17:06:46,701 - mmseg - INFO - Iter [42750/80000] lr: 6.686e-07, eta: 1 day, 0:48:40, time: 1.823, data_time: 0.016, memory: 69053, decode.loss_cls: 0.0661, decode.loss_mask: 0.2067, decode.loss_dice: 0.5533, decode.d0.loss_cls: 0.3062, decode.d0.loss_mask: 0.2154, decode.d0.loss_dice: 0.5827, decode.d1.loss_cls: 0.0792, decode.d1.loss_mask: 0.2095, decode.d1.loss_dice: 0.5660, decode.d2.loss_cls: 0.0741, decode.d2.loss_mask: 0.2076, decode.d2.loss_dice: 0.5580, decode.d3.loss_cls: 0.0656, decode.d3.loss_mask: 0.2067, decode.d3.loss_dice: 0.5556, decode.d4.loss_cls: 0.0716, decode.d4.loss_mask: 0.2066, decode.d4.loss_dice: 0.5578, decode.d5.loss_cls: 0.0630, decode.d5.loss_mask: 0.2074, decode.d5.loss_dice: 0.5571, decode.d6.loss_cls: 0.0649, decode.d6.loss_mask: 0.2061, decode.d6.loss_dice: 0.5523, decode.d7.loss_cls: 0.0698, decode.d7.loss_mask: 0.2064, decode.d7.loss_dice: 0.5561, decode.d8.loss_cls: 0.0660, decode.d8.loss_mask: 0.2066, decode.d8.loss_dice: 0.5548, loss: 8.5990 +2022-05-10 17:08:15,556 - mmseg - INFO - Iter [42800/80000] lr: 6.677e-07, eta: 1 day, 0:43:50, time: 1.777, data_time: 0.016, memory: 69053, decode.loss_cls: 0.0562, decode.loss_mask: 0.2040, decode.loss_dice: 0.5714, decode.d0.loss_cls: 0.3042, decode.d0.loss_mask: 0.2124, decode.d0.loss_dice: 0.5966, decode.d1.loss_cls: 0.0881, decode.d1.loss_mask: 0.2057, decode.d1.loss_dice: 0.5771, decode.d2.loss_cls: 0.0718, decode.d2.loss_mask: 0.2053, decode.d2.loss_dice: 0.5765, decode.d3.loss_cls: 0.0596, decode.d3.loss_mask: 0.2039, decode.d3.loss_dice: 0.5693, decode.d4.loss_cls: 0.0600, decode.d4.loss_mask: 0.2041, decode.d4.loss_dice: 0.5731, decode.d5.loss_cls: 0.0678, decode.d5.loss_mask: 0.2039, decode.d5.loss_dice: 0.5682, decode.d6.loss_cls: 0.0626, decode.d6.loss_mask: 0.2030, decode.d6.loss_dice: 0.5684, decode.d7.loss_cls: 0.0628, decode.d7.loss_mask: 0.2031, decode.d7.loss_dice: 0.5667, decode.d8.loss_cls: 0.0639, decode.d8.loss_mask: 0.2033, decode.d8.loss_dice: 0.5646, loss: 8.6778 +2022-05-10 17:09:44,496 - mmseg - INFO - Iter [42850/80000] lr: 6.668e-07, eta: 1 day, 0:39:04, time: 1.779, data_time: 0.017, memory: 69053, decode.loss_cls: 0.0576, decode.loss_mask: 0.2048, decode.loss_dice: 0.5499, decode.d0.loss_cls: 0.3158, decode.d0.loss_mask: 0.2137, decode.d0.loss_dice: 0.5737, decode.d1.loss_cls: 0.0756, decode.d1.loss_mask: 0.2063, decode.d1.loss_dice: 0.5609, decode.d2.loss_cls: 0.0753, decode.d2.loss_mask: 0.2052, decode.d2.loss_dice: 0.5580, decode.d3.loss_cls: 0.0650, decode.d3.loss_mask: 0.2045, decode.d3.loss_dice: 0.5480, decode.d4.loss_cls: 0.0655, decode.d4.loss_mask: 0.2048, decode.d4.loss_dice: 0.5533, decode.d5.loss_cls: 0.0591, decode.d5.loss_mask: 0.2044, decode.d5.loss_dice: 0.5499, decode.d6.loss_cls: 0.0632, decode.d6.loss_mask: 0.2045, decode.d6.loss_dice: 0.5476, decode.d7.loss_cls: 0.0620, decode.d7.loss_mask: 0.2048, decode.d7.loss_dice: 0.5514, decode.d8.loss_cls: 0.0614, decode.d8.loss_mask: 0.2050, decode.d8.loss_dice: 0.5484, loss: 8.4995 +2022-05-10 17:11:16,380 - mmseg - INFO - Iter [42900/80000] lr: 6.659e-07, eta: 1 day, 0:34:36, time: 1.838, data_time: 0.063, memory: 69053, decode.loss_cls: 0.0660, decode.loss_mask: 0.2022, decode.loss_dice: 0.5505, decode.d0.loss_cls: 0.3019, decode.d0.loss_mask: 0.2107, decode.d0.loss_dice: 0.5822, decode.d1.loss_cls: 0.0904, decode.d1.loss_mask: 0.2031, decode.d1.loss_dice: 0.5649, decode.d2.loss_cls: 0.0784, decode.d2.loss_mask: 0.2020, decode.d2.loss_dice: 0.5567, decode.d3.loss_cls: 0.0666, decode.d3.loss_mask: 0.2025, decode.d3.loss_dice: 0.5578, decode.d4.loss_cls: 0.0731, decode.d4.loss_mask: 0.2024, decode.d4.loss_dice: 0.5532, decode.d5.loss_cls: 0.0692, decode.d5.loss_mask: 0.2017, decode.d5.loss_dice: 0.5517, decode.d6.loss_cls: 0.0648, decode.d6.loss_mask: 0.2016, decode.d6.loss_dice: 0.5548, decode.d7.loss_cls: 0.0670, decode.d7.loss_mask: 0.2017, decode.d7.loss_dice: 0.5515, decode.d8.loss_cls: 0.0677, decode.d8.loss_mask: 0.2013, decode.d8.loss_dice: 0.5546, loss: 8.5522 +2022-05-10 17:12:46,247 - mmseg - INFO - Iter [42950/80000] lr: 6.650e-07, eta: 1 day, 0:30:00, time: 1.797, data_time: 0.016, memory: 69053, decode.loss_cls: 0.0621, decode.loss_mask: 0.1969, decode.loss_dice: 0.5563, decode.d0.loss_cls: 0.3072, decode.d0.loss_mask: 0.2058, decode.d0.loss_dice: 0.5906, decode.d1.loss_cls: 0.0830, decode.d1.loss_mask: 0.1994, decode.d1.loss_dice: 0.5702, decode.d2.loss_cls: 0.0759, decode.d2.loss_mask: 0.1984, decode.d2.loss_dice: 0.5656, decode.d3.loss_cls: 0.0691, decode.d3.loss_mask: 0.1976, decode.d3.loss_dice: 0.5552, decode.d4.loss_cls: 0.0723, decode.d4.loss_mask: 0.1983, decode.d4.loss_dice: 0.5598, decode.d5.loss_cls: 0.0715, decode.d5.loss_mask: 0.1981, decode.d5.loss_dice: 0.5611, decode.d6.loss_cls: 0.0622, decode.d6.loss_mask: 0.1973, decode.d6.loss_dice: 0.5534, decode.d7.loss_cls: 0.0679, decode.d7.loss_mask: 0.1979, decode.d7.loss_dice: 0.5603, decode.d8.loss_cls: 0.0678, decode.d8.loss_mask: 0.1976, decode.d8.loss_dice: 0.5624, loss: 8.5610 +2022-05-10 17:14:15,140 - mmseg - INFO - Saving checkpoint at 43000 iterations +2022-05-10 17:14:49,184 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 17:14:49,196 - mmseg - INFO - Iter [43000/80000] lr: 6.641e-07, eta: 1 day, 0:28:21, time: 2.456, data_time: 0.017, memory: 69053, decode.loss_cls: 0.0739, decode.loss_mask: 0.2013, decode.loss_dice: 0.5468, decode.d0.loss_cls: 0.3194, decode.d0.loss_mask: 0.2098, decode.d0.loss_dice: 0.5758, decode.d1.loss_cls: 0.0988, decode.d1.loss_mask: 0.2026, decode.d1.loss_dice: 0.5574, decode.d2.loss_cls: 0.0909, decode.d2.loss_mask: 0.2019, decode.d2.loss_dice: 0.5555, decode.d3.loss_cls: 0.0784, decode.d3.loss_mask: 0.2023, decode.d3.loss_dice: 0.5501, decode.d4.loss_cls: 0.0775, decode.d4.loss_mask: 0.2021, decode.d4.loss_dice: 0.5513, decode.d5.loss_cls: 0.0794, decode.d5.loss_mask: 0.2023, decode.d5.loss_dice: 0.5506, decode.d6.loss_cls: 0.0832, decode.d6.loss_mask: 0.2018, decode.d6.loss_dice: 0.5502, decode.d7.loss_cls: 0.0798, decode.d7.loss_mask: 0.2020, decode.d7.loss_dice: 0.5437, decode.d8.loss_cls: 0.0787, decode.d8.loss_mask: 0.2016, decode.d8.loss_dice: 0.5504, loss: 8.6193 +2022-05-10 17:16:45,300 - mmseg - INFO - per class results: +2022-05-10 17:16:45,309 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.46 | 99.0 | +| sidewalk | 87.88 | 94.92 | +| building | 94.3 | 96.87 | +| wall | 69.06 | 80.94 | +| fence | 73.25 | 80.42 | +| pole | 71.53 | 84.02 | +| traffic light | 77.23 | 88.28 | +| traffic sign | 84.04 | 90.59 | +| vegetation | 93.35 | 96.89 | +| terrain | 67.25 | 77.39 | +| sky | 95.93 | 98.35 | +| person | 86.83 | 93.85 | +| rider | 74.21 | 84.47 | +| car | 96.26 | 98.25 | +| truck | 80.0 | 95.17 | +| bus | 93.71 | 96.5 | +| train | 88.33 | 91.15 | +| motorcycle | 76.73 | 87.45 | +| bicycle | 83.0 | 91.82 | ++---------------+-------+-------+ +2022-05-10 17:16:45,309 - mmseg - INFO - Summary: +2022-05-10 17:16:45,310 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 96.93 | 83.76 | 90.86 | ++-------+-------+-------+ +2022-05-10 17:16:45,314 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 17:16:45,315 - mmseg - INFO - Iter(val) [32] aAcc: 0.9693, mIoU: 0.8376, mAcc: 0.9086, IoU.road: 0.9846, IoU.sidewalk: 0.8788, IoU.building: 0.9430, IoU.wall: 0.6906, IoU.fence: 0.7325, IoU.pole: 0.7153, IoU.traffic light: 0.7723, IoU.traffic sign: 0.8404, IoU.vegetation: 0.9335, IoU.terrain: 0.6725, IoU.sky: 0.9593, IoU.person: 0.8683, IoU.rider: 0.7421, IoU.car: 0.9626, IoU.truck: 0.8000, IoU.bus: 0.9371, IoU.train: 0.8833, IoU.motorcycle: 0.7673, IoU.bicycle: 0.8300, Acc.road: 0.9900, Acc.sidewalk: 0.9492, Acc.building: 0.9687, Acc.wall: 0.8094, Acc.fence: 0.8042, Acc.pole: 0.8402, Acc.traffic light: 0.8828, Acc.traffic sign: 0.9059, Acc.vegetation: 0.9689, Acc.terrain: 0.7739, Acc.sky: 0.9835, Acc.person: 0.9385, Acc.rider: 0.8447, Acc.car: 0.9825, Acc.truck: 0.9517, Acc.bus: 0.9650, Acc.train: 0.9115, Acc.motorcycle: 0.8745, Acc.bicycle: 0.9182 +2022-05-10 17:18:15,237 - mmseg - INFO - Iter [43050/80000] lr: 6.632e-07, eta: 1 day, 0:33:59, time: 4.124, data_time: 2.342, memory: 69053, decode.loss_cls: 0.0819, decode.loss_mask: 0.2057, decode.loss_dice: 0.5603, decode.d0.loss_cls: 0.3347, decode.d0.loss_mask: 0.2149, decode.d0.loss_dice: 0.5954, decode.d1.loss_cls: 0.1033, decode.d1.loss_mask: 0.2077, decode.d1.loss_dice: 0.5711, decode.d2.loss_cls: 0.0953, decode.d2.loss_mask: 0.2069, decode.d2.loss_dice: 0.5708, decode.d3.loss_cls: 0.0866, decode.d3.loss_mask: 0.2072, decode.d3.loss_dice: 0.5651, decode.d4.loss_cls: 0.0879, decode.d4.loss_mask: 0.2068, decode.d4.loss_dice: 0.5655, decode.d5.loss_cls: 0.0895, decode.d5.loss_mask: 0.2063, decode.d5.loss_dice: 0.5615, decode.d6.loss_cls: 0.0871, decode.d6.loss_mask: 0.2060, decode.d6.loss_dice: 0.5608, decode.d7.loss_cls: 0.0874, decode.d7.loss_mask: 0.2059, decode.d7.loss_dice: 0.5611, decode.d8.loss_cls: 0.0814, decode.d8.loss_mask: 0.2060, decode.d8.loss_dice: 0.5636, loss: 8.8835 +2022-05-10 17:19:45,490 - mmseg - INFO - Iter [43100/80000] lr: 6.623e-07, eta: 1 day, 0:29:26, time: 1.805, data_time: 0.062, memory: 69053, decode.loss_cls: 0.0572, decode.loss_mask: 0.1998, decode.loss_dice: 0.5465, decode.d0.loss_cls: 0.3060, decode.d0.loss_mask: 0.2061, decode.d0.loss_dice: 0.5782, decode.d1.loss_cls: 0.0769, decode.d1.loss_mask: 0.2010, decode.d1.loss_dice: 0.5556, decode.d2.loss_cls: 0.0672, decode.d2.loss_mask: 0.2000, decode.d2.loss_dice: 0.5547, decode.d3.loss_cls: 0.0574, decode.d3.loss_mask: 0.2000, decode.d3.loss_dice: 0.5482, decode.d4.loss_cls: 0.0592, decode.d4.loss_mask: 0.1998, decode.d4.loss_dice: 0.5492, decode.d5.loss_cls: 0.0644, decode.d5.loss_mask: 0.1995, decode.d5.loss_dice: 0.5470, decode.d6.loss_cls: 0.0597, decode.d6.loss_mask: 0.1993, decode.d6.loss_dice: 0.5451, decode.d7.loss_cls: 0.0586, decode.d7.loss_mask: 0.1993, decode.d7.loss_dice: 0.5488, decode.d8.loss_cls: 0.0571, decode.d8.loss_mask: 0.1993, decode.d8.loss_dice: 0.5488, loss: 8.3899 +2022-05-10 17:21:14,332 - mmseg - INFO - Iter [43150/80000] lr: 6.614e-07, eta: 1 day, 0:24:49, time: 1.777, data_time: 0.017, memory: 69053, decode.loss_cls: 0.0768, decode.loss_mask: 0.1997, decode.loss_dice: 0.5569, decode.d0.loss_cls: 0.3179, decode.d0.loss_mask: 0.2069, decode.d0.loss_dice: 0.5914, decode.d1.loss_cls: 0.0949, decode.d1.loss_mask: 0.2001, decode.d1.loss_dice: 0.5682, decode.d2.loss_cls: 0.0793, decode.d2.loss_mask: 0.1997, decode.d2.loss_dice: 0.5646, decode.d3.loss_cls: 0.0828, decode.d3.loss_mask: 0.1992, decode.d3.loss_dice: 0.5619, decode.d4.loss_cls: 0.0765, decode.d4.loss_mask: 0.1982, decode.d4.loss_dice: 0.5594, decode.d5.loss_cls: 0.0769, decode.d5.loss_mask: 0.1987, decode.d5.loss_dice: 0.5582, decode.d6.loss_cls: 0.0731, decode.d6.loss_mask: 0.1989, decode.d6.loss_dice: 0.5542, decode.d7.loss_cls: 0.0749, decode.d7.loss_mask: 0.1989, decode.d7.loss_dice: 0.5566, decode.d8.loss_cls: 0.0750, decode.d8.loss_mask: 0.1991, decode.d8.loss_dice: 0.5577, loss: 8.6566 +2022-05-10 17:22:43,584 - mmseg - INFO - Iter [43200/80000] lr: 6.605e-07, eta: 1 day, 0:20:17, time: 1.785, data_time: 0.017, memory: 69053, decode.loss_cls: 0.0678, decode.loss_mask: 0.2009, decode.loss_dice: 0.5606, decode.d0.loss_cls: 0.3076, decode.d0.loss_mask: 0.2109, decode.d0.loss_dice: 0.5929, decode.d1.loss_cls: 0.0950, decode.d1.loss_mask: 0.2034, decode.d1.loss_dice: 0.5690, decode.d2.loss_cls: 0.0789, decode.d2.loss_mask: 0.2016, decode.d2.loss_dice: 0.5647, decode.d3.loss_cls: 0.0733, decode.d3.loss_mask: 0.2011, decode.d3.loss_dice: 0.5621, decode.d4.loss_cls: 0.0663, decode.d4.loss_mask: 0.2019, decode.d4.loss_dice: 0.5580, decode.d5.loss_cls: 0.0664, decode.d5.loss_mask: 0.2022, decode.d5.loss_dice: 0.5619, decode.d6.loss_cls: 0.0750, decode.d6.loss_mask: 0.2011, decode.d6.loss_dice: 0.5573, decode.d7.loss_cls: 0.0723, decode.d7.loss_mask: 0.2014, decode.d7.loss_dice: 0.5589, decode.d8.loss_cls: 0.0689, decode.d8.loss_mask: 0.2013, decode.d8.loss_dice: 0.5608, loss: 8.6437 +2022-05-10 17:24:12,502 - mmseg - INFO - Iter [43250/80000] lr: 6.596e-07, eta: 1 day, 0:15:45, time: 1.778, data_time: 0.019, memory: 69053, decode.loss_cls: 0.0627, decode.loss_mask: 0.1984, decode.loss_dice: 0.5537, decode.d0.loss_cls: 0.2965, decode.d0.loss_mask: 0.2066, decode.d0.loss_dice: 0.5872, decode.d1.loss_cls: 0.0870, decode.d1.loss_mask: 0.2005, decode.d1.loss_dice: 0.5667, decode.d2.loss_cls: 0.0721, decode.d2.loss_mask: 0.1994, decode.d2.loss_dice: 0.5623, decode.d3.loss_cls: 0.0696, decode.d3.loss_mask: 0.1994, decode.d3.loss_dice: 0.5536, decode.d4.loss_cls: 0.0650, decode.d4.loss_mask: 0.1995, decode.d4.loss_dice: 0.5570, decode.d5.loss_cls: 0.0615, decode.d5.loss_mask: 0.1996, decode.d5.loss_dice: 0.5520, decode.d6.loss_cls: 0.0658, decode.d6.loss_mask: 0.1989, decode.d6.loss_dice: 0.5535, decode.d7.loss_cls: 0.0592, decode.d7.loss_mask: 0.1985, decode.d7.loss_dice: 0.5530, decode.d8.loss_cls: 0.0584, decode.d8.loss_mask: 0.1987, decode.d8.loss_dice: 0.5537, loss: 8.4900 +2022-05-10 17:25:44,245 - mmseg - INFO - Iter [43300/80000] lr: 6.587e-07, eta: 1 day, 0:11:30, time: 1.835, data_time: 0.064, memory: 69053, decode.loss_cls: 0.0726, decode.loss_mask: 0.1986, decode.loss_dice: 0.5516, decode.d0.loss_cls: 0.3062, decode.d0.loss_mask: 0.2064, decode.d0.loss_dice: 0.5771, decode.d1.loss_cls: 0.0902, decode.d1.loss_mask: 0.2009, decode.d1.loss_dice: 0.5597, decode.d2.loss_cls: 0.0730, decode.d2.loss_mask: 0.1996, decode.d2.loss_dice: 0.5551, decode.d3.loss_cls: 0.0729, decode.d3.loss_mask: 0.2000, decode.d3.loss_dice: 0.5524, decode.d4.loss_cls: 0.0704, decode.d4.loss_mask: 0.1994, decode.d4.loss_dice: 0.5491, decode.d5.loss_cls: 0.0714, decode.d5.loss_mask: 0.1991, decode.d5.loss_dice: 0.5541, decode.d6.loss_cls: 0.0706, decode.d6.loss_mask: 0.1988, decode.d6.loss_dice: 0.5482, decode.d7.loss_cls: 0.0678, decode.d7.loss_mask: 0.1993, decode.d7.loss_dice: 0.5511, decode.d8.loss_cls: 0.0676, decode.d8.loss_mask: 0.1991, decode.d8.loss_dice: 0.5522, loss: 8.5146 +2022-05-10 17:27:12,802 - mmseg - INFO - Iter [43350/80000] lr: 6.578e-07, eta: 1 day, 0:07:01, time: 1.769, data_time: 0.018, memory: 69053, decode.loss_cls: 0.0662, decode.loss_mask: 0.1975, decode.loss_dice: 0.5509, decode.d0.loss_cls: 0.3014, decode.d0.loss_mask: 0.2044, decode.d0.loss_dice: 0.5776, decode.d1.loss_cls: 0.0885, decode.d1.loss_mask: 0.1998, decode.d1.loss_dice: 0.5618, decode.d2.loss_cls: 0.0817, decode.d2.loss_mask: 0.1989, decode.d2.loss_dice: 0.5582, decode.d3.loss_cls: 0.0718, decode.d3.loss_mask: 0.1990, decode.d3.loss_dice: 0.5529, decode.d4.loss_cls: 0.0689, decode.d4.loss_mask: 0.1989, decode.d4.loss_dice: 0.5527, decode.d5.loss_cls: 0.0694, decode.d5.loss_mask: 0.1989, decode.d5.loss_dice: 0.5500, decode.d6.loss_cls: 0.0661, decode.d6.loss_mask: 0.1988, decode.d6.loss_dice: 0.5527, decode.d7.loss_cls: 0.0639, decode.d7.loss_mask: 0.1984, decode.d7.loss_dice: 0.5508, decode.d8.loss_cls: 0.0676, decode.d8.loss_mask: 0.1981, decode.d8.loss_dice: 0.5506, loss: 8.4964 +2022-05-10 17:28:42,643 - mmseg - INFO - Iter [43400/80000] lr: 6.569e-07, eta: 1 day, 0:02:41, time: 1.797, data_time: 0.019, memory: 69053, decode.loss_cls: 0.0694, decode.loss_mask: 0.2038, decode.loss_dice: 0.5701, decode.d0.loss_cls: 0.3162, decode.d0.loss_mask: 0.2115, decode.d0.loss_dice: 0.5989, decode.d1.loss_cls: 0.0931, decode.d1.loss_mask: 0.2042, decode.d1.loss_dice: 0.5783, decode.d2.loss_cls: 0.0784, decode.d2.loss_mask: 0.2033, decode.d2.loss_dice: 0.5762, decode.d3.loss_cls: 0.0734, decode.d3.loss_mask: 0.2036, decode.d3.loss_dice: 0.5733, decode.d4.loss_cls: 0.0712, decode.d4.loss_mask: 0.2036, decode.d4.loss_dice: 0.5746, decode.d5.loss_cls: 0.0712, decode.d5.loss_mask: 0.2036, decode.d5.loss_dice: 0.5715, decode.d6.loss_cls: 0.0660, decode.d6.loss_mask: 0.2039, decode.d6.loss_dice: 0.5720, decode.d7.loss_cls: 0.0693, decode.d7.loss_mask: 0.2036, decode.d7.loss_dice: 0.5713, decode.d8.loss_cls: 0.0731, decode.d8.loss_mask: 0.2032, decode.d8.loss_dice: 0.5700, loss: 8.7818 +2022-05-10 17:30:14,343 - mmseg - INFO - Iter [43450/80000] lr: 6.560e-07, eta: 23:58:32, time: 1.835, data_time: 0.068, memory: 69053, decode.loss_cls: 0.0702, decode.loss_mask: 0.2061, decode.loss_dice: 0.5460, decode.d0.loss_cls: 0.3078, decode.d0.loss_mask: 0.2143, decode.d0.loss_dice: 0.5751, decode.d1.loss_cls: 0.0920, decode.d1.loss_mask: 0.2074, decode.d1.loss_dice: 0.5565, decode.d2.loss_cls: 0.0758, decode.d2.loss_mask: 0.2063, decode.d2.loss_dice: 0.5505, decode.d3.loss_cls: 0.0735, decode.d3.loss_mask: 0.2067, decode.d3.loss_dice: 0.5456, decode.d4.loss_cls: 0.0691, decode.d4.loss_mask: 0.2067, decode.d4.loss_dice: 0.5501, decode.d5.loss_cls: 0.0687, decode.d5.loss_mask: 0.2064, decode.d5.loss_dice: 0.5478, decode.d6.loss_cls: 0.0641, decode.d6.loss_mask: 0.2064, decode.d6.loss_dice: 0.5467, decode.d7.loss_cls: 0.0620, decode.d7.loss_mask: 0.2064, decode.d7.loss_dice: 0.5459, decode.d8.loss_cls: 0.0646, decode.d8.loss_mask: 0.2063, decode.d8.loss_dice: 0.5478, loss: 8.5325 +2022-05-10 17:31:42,007 - mmseg - INFO - Iter [43500/80000] lr: 6.551e-07, eta: 23:54:07, time: 1.754, data_time: 0.018, memory: 69053, decode.loss_cls: 0.0619, decode.loss_mask: 0.2012, decode.loss_dice: 0.5576, decode.d0.loss_cls: 0.3078, decode.d0.loss_mask: 0.2099, decode.d0.loss_dice: 0.5845, decode.d1.loss_cls: 0.0753, decode.d1.loss_mask: 0.2034, decode.d1.loss_dice: 0.5669, decode.d2.loss_cls: 0.0757, decode.d2.loss_mask: 0.2024, decode.d2.loss_dice: 0.5625, decode.d3.loss_cls: 0.0676, decode.d3.loss_mask: 0.2018, decode.d3.loss_dice: 0.5617, decode.d4.loss_cls: 0.0712, decode.d4.loss_mask: 0.2017, decode.d4.loss_dice: 0.5616, decode.d5.loss_cls: 0.0686, decode.d5.loss_mask: 0.2018, decode.d5.loss_dice: 0.5550, decode.d6.loss_cls: 0.0650, decode.d6.loss_mask: 0.2013, decode.d6.loss_dice: 0.5552, decode.d7.loss_cls: 0.0638, decode.d7.loss_mask: 0.2017, decode.d7.loss_dice: 0.5585, decode.d8.loss_cls: 0.0622, decode.d8.loss_mask: 0.2019, decode.d8.loss_dice: 0.5607, loss: 8.5704 +2022-05-10 17:33:10,335 - mmseg - INFO - Iter [43550/80000] lr: 6.542e-07, eta: 23:49:46, time: 1.766, data_time: 0.017, memory: 69053, decode.loss_cls: 0.0715, decode.loss_mask: 0.1986, decode.loss_dice: 0.5502, decode.d0.loss_cls: 0.3163, decode.d0.loss_mask: 0.2084, decode.d0.loss_dice: 0.5722, decode.d1.loss_cls: 0.0769, decode.d1.loss_mask: 0.2014, decode.d1.loss_dice: 0.5591, decode.d2.loss_cls: 0.0812, decode.d2.loss_mask: 0.1993, decode.d2.loss_dice: 0.5590, decode.d3.loss_cls: 0.0705, decode.d3.loss_mask: 0.1988, decode.d3.loss_dice: 0.5465, decode.d4.loss_cls: 0.0704, decode.d4.loss_mask: 0.1993, decode.d4.loss_dice: 0.5496, decode.d5.loss_cls: 0.0706, decode.d5.loss_mask: 0.1994, decode.d5.loss_dice: 0.5498, decode.d6.loss_cls: 0.0663, decode.d6.loss_mask: 0.1991, decode.d6.loss_dice: 0.5460, decode.d7.loss_cls: 0.0718, decode.d7.loss_mask: 0.1992, decode.d7.loss_dice: 0.5492, decode.d8.loss_cls: 0.0730, decode.d8.loss_mask: 0.1990, decode.d8.loss_dice: 0.5467, loss: 8.4991 +2022-05-10 17:34:40,668 - mmseg - INFO - Iter [43600/80000] lr: 6.533e-07, eta: 23:45:37, time: 1.807, data_time: 0.017, memory: 69053, decode.loss_cls: 0.0696, decode.loss_mask: 0.2023, decode.loss_dice: 0.5565, decode.d0.loss_cls: 0.3015, decode.d0.loss_mask: 0.2116, decode.d0.loss_dice: 0.5794, decode.d1.loss_cls: 0.0836, decode.d1.loss_mask: 0.2039, decode.d1.loss_dice: 0.5650, decode.d2.loss_cls: 0.0742, decode.d2.loss_mask: 0.2033, decode.d2.loss_dice: 0.5646, decode.d3.loss_cls: 0.0693, decode.d3.loss_mask: 0.2031, decode.d3.loss_dice: 0.5592, decode.d4.loss_cls: 0.0727, decode.d4.loss_mask: 0.2018, decode.d4.loss_dice: 0.5547, decode.d5.loss_cls: 0.0740, decode.d5.loss_mask: 0.2030, decode.d5.loss_dice: 0.5627, decode.d6.loss_cls: 0.0641, decode.d6.loss_mask: 0.2023, decode.d6.loss_dice: 0.5580, decode.d7.loss_cls: 0.0621, decode.d7.loss_mask: 0.2021, decode.d7.loss_dice: 0.5541, decode.d8.loss_cls: 0.0641, decode.d8.loss_mask: 0.2018, decode.d8.loss_dice: 0.5549, loss: 8.5795 +2022-05-10 17:36:13,057 - mmseg - INFO - Iter [43650/80000] lr: 6.524e-07, eta: 23:41:40, time: 1.845, data_time: 0.065, memory: 69053, decode.loss_cls: 0.0683, decode.loss_mask: 0.2033, decode.loss_dice: 0.5470, decode.d0.loss_cls: 0.3030, decode.d0.loss_mask: 0.2119, decode.d0.loss_dice: 0.5806, decode.d1.loss_cls: 0.0853, decode.d1.loss_mask: 0.2048, decode.d1.loss_dice: 0.5607, decode.d2.loss_cls: 0.0783, decode.d2.loss_mask: 0.2036, decode.d2.loss_dice: 0.5536, decode.d3.loss_cls: 0.0693, decode.d3.loss_mask: 0.2037, decode.d3.loss_dice: 0.5515, decode.d4.loss_cls: 0.0690, decode.d4.loss_mask: 0.2034, decode.d4.loss_dice: 0.5499, decode.d5.loss_cls: 0.0685, decode.d5.loss_mask: 0.2039, decode.d5.loss_dice: 0.5517, decode.d6.loss_cls: 0.0685, decode.d6.loss_mask: 0.2031, decode.d6.loss_dice: 0.5494, decode.d7.loss_cls: 0.0641, decode.d7.loss_mask: 0.2030, decode.d7.loss_dice: 0.5540, decode.d8.loss_cls: 0.0660, decode.d8.loss_mask: 0.2028, decode.d8.loss_dice: 0.5474, loss: 8.5295 +2022-05-10 17:37:41,964 - mmseg - INFO - Iter [43700/80000] lr: 6.515e-07, eta: 23:37:29, time: 1.781, data_time: 0.020, memory: 69053, decode.loss_cls: 0.0763, decode.loss_mask: 0.2023, decode.loss_dice: 0.5550, decode.d0.loss_cls: 0.3165, decode.d0.loss_mask: 0.2087, decode.d0.loss_dice: 0.5857, decode.d1.loss_cls: 0.0817, decode.d1.loss_mask: 0.2032, decode.d1.loss_dice: 0.5647, decode.d2.loss_cls: 0.0793, decode.d2.loss_mask: 0.2023, decode.d2.loss_dice: 0.5617, decode.d3.loss_cls: 0.0751, decode.d3.loss_mask: 0.2016, decode.d3.loss_dice: 0.5577, decode.d4.loss_cls: 0.0799, decode.d4.loss_mask: 0.2025, decode.d4.loss_dice: 0.5610, decode.d5.loss_cls: 0.0792, decode.d5.loss_mask: 0.2028, decode.d5.loss_dice: 0.5636, decode.d6.loss_cls: 0.0705, decode.d6.loss_mask: 0.2024, decode.d6.loss_dice: 0.5572, decode.d7.loss_cls: 0.0709, decode.d7.loss_mask: 0.2022, decode.d7.loss_dice: 0.5591, decode.d8.loss_cls: 0.0678, decode.d8.loss_mask: 0.2021, decode.d8.loss_dice: 0.5612, loss: 8.6542 +2022-05-10 17:39:10,258 - mmseg - INFO - Iter [43750/80000] lr: 6.506e-07, eta: 23:33:17, time: 1.766, data_time: 0.019, memory: 69053, decode.loss_cls: 0.0682, decode.loss_mask: 0.2028, decode.loss_dice: 0.5691, decode.d0.loss_cls: 0.3075, decode.d0.loss_mask: 0.2114, decode.d0.loss_dice: 0.5964, decode.d1.loss_cls: 0.0950, decode.d1.loss_mask: 0.2055, decode.d1.loss_dice: 0.5741, decode.d2.loss_cls: 0.0857, decode.d2.loss_mask: 0.2045, decode.d2.loss_dice: 0.5722, decode.d3.loss_cls: 0.0746, decode.d3.loss_mask: 0.2039, decode.d3.loss_dice: 0.5674, decode.d4.loss_cls: 0.0778, decode.d4.loss_mask: 0.2024, decode.d4.loss_dice: 0.5620, decode.d5.loss_cls: 0.0771, decode.d5.loss_mask: 0.2021, decode.d5.loss_dice: 0.5652, decode.d6.loss_cls: 0.0754, decode.d6.loss_mask: 0.2024, decode.d6.loss_dice: 0.5662, decode.d7.loss_cls: 0.0737, decode.d7.loss_mask: 0.2023, decode.d7.loss_dice: 0.5634, decode.d8.loss_cls: 0.0686, decode.d8.loss_mask: 0.2025, decode.d8.loss_dice: 0.5628, loss: 8.7425 +2022-05-10 17:40:40,536 - mmseg - INFO - Iter [43800/80000] lr: 6.497e-07, eta: 23:29:16, time: 1.806, data_time: 0.017, memory: 69053, decode.loss_cls: 0.0663, decode.loss_mask: 0.2034, decode.loss_dice: 0.5482, decode.d0.loss_cls: 0.3062, decode.d0.loss_mask: 0.2123, decode.d0.loss_dice: 0.5800, decode.d1.loss_cls: 0.0822, decode.d1.loss_mask: 0.2053, decode.d1.loss_dice: 0.5595, decode.d2.loss_cls: 0.0731, decode.d2.loss_mask: 0.2044, decode.d2.loss_dice: 0.5565, decode.d3.loss_cls: 0.0657, decode.d3.loss_mask: 0.2027, decode.d3.loss_dice: 0.5479, decode.d4.loss_cls: 0.0640, decode.d4.loss_mask: 0.2032, decode.d4.loss_dice: 0.5489, decode.d5.loss_cls: 0.0657, decode.d5.loss_mask: 0.2034, decode.d5.loss_dice: 0.5476, decode.d6.loss_cls: 0.0637, decode.d6.loss_mask: 0.2035, decode.d6.loss_dice: 0.5466, decode.d7.loss_cls: 0.0713, decode.d7.loss_mask: 0.2039, decode.d7.loss_dice: 0.5502, decode.d8.loss_cls: 0.0644, decode.d8.loss_mask: 0.2033, decode.d8.loss_dice: 0.5469, loss: 8.5002 +2022-05-10 17:42:13,569 - mmseg - INFO - Iter [43850/80000] lr: 6.488e-07, eta: 23:25:29, time: 1.856, data_time: 0.065, memory: 69053, decode.loss_cls: 0.0673, decode.loss_mask: 0.2043, decode.loss_dice: 0.5534, decode.d0.loss_cls: 0.3092, decode.d0.loss_mask: 0.2124, decode.d0.loss_dice: 0.5832, decode.d1.loss_cls: 0.0834, decode.d1.loss_mask: 0.2051, decode.d1.loss_dice: 0.5600, decode.d2.loss_cls: 0.0766, decode.d2.loss_mask: 0.2048, decode.d2.loss_dice: 0.5561, decode.d3.loss_cls: 0.0732, decode.d3.loss_mask: 0.2038, decode.d3.loss_dice: 0.5546, decode.d4.loss_cls: 0.0764, decode.d4.loss_mask: 0.2044, decode.d4.loss_dice: 0.5564, decode.d5.loss_cls: 0.0746, decode.d5.loss_mask: 0.2044, decode.d5.loss_dice: 0.5531, decode.d6.loss_cls: 0.0694, decode.d6.loss_mask: 0.2038, decode.d6.loss_dice: 0.5530, decode.d7.loss_cls: 0.0708, decode.d7.loss_mask: 0.2041, decode.d7.loss_dice: 0.5527, decode.d8.loss_cls: 0.0662, decode.d8.loss_mask: 0.2039, decode.d8.loss_dice: 0.5511, loss: 8.5921 +2022-05-10 17:43:45,377 - mmseg - INFO - Iter [43900/80000] lr: 6.479e-07, eta: 23:21:40, time: 1.840, data_time: 0.022, memory: 69053, decode.loss_cls: 0.0719, decode.loss_mask: 0.2058, decode.loss_dice: 0.5488, decode.d0.loss_cls: 0.3152, decode.d0.loss_mask: 0.2157, decode.d0.loss_dice: 0.5789, decode.d1.loss_cls: 0.0868, decode.d1.loss_mask: 0.2077, decode.d1.loss_dice: 0.5612, decode.d2.loss_cls: 0.0752, decode.d2.loss_mask: 0.2074, decode.d2.loss_dice: 0.5573, decode.d3.loss_cls: 0.0721, decode.d3.loss_mask: 0.2064, decode.d3.loss_dice: 0.5517, decode.d4.loss_cls: 0.0688, decode.d4.loss_mask: 0.2067, decode.d4.loss_dice: 0.5503, decode.d5.loss_cls: 0.0714, decode.d5.loss_mask: 0.2068, decode.d5.loss_dice: 0.5489, decode.d6.loss_cls: 0.0644, decode.d6.loss_mask: 0.2060, decode.d6.loss_dice: 0.5510, decode.d7.loss_cls: 0.0650, decode.d7.loss_mask: 0.2062, decode.d7.loss_dice: 0.5479, decode.d8.loss_cls: 0.0673, decode.d8.loss_mask: 0.2061, decode.d8.loss_dice: 0.5467, loss: 8.5755 +2022-05-10 17:45:14,487 - mmseg - INFO - Iter [43950/80000] lr: 6.470e-07, eta: 23:17:39, time: 1.781, data_time: 0.016, memory: 69053, decode.loss_cls: 0.0661, decode.loss_mask: 0.2039, decode.loss_dice: 0.5462, decode.d0.loss_cls: 0.3064, decode.d0.loss_mask: 0.2123, decode.d0.loss_dice: 0.5691, decode.d1.loss_cls: 0.0827, decode.d1.loss_mask: 0.2049, decode.d1.loss_dice: 0.5582, decode.d2.loss_cls: 0.0729, decode.d2.loss_mask: 0.2051, decode.d2.loss_dice: 0.5567, decode.d3.loss_cls: 0.0697, decode.d3.loss_mask: 0.2049, decode.d3.loss_dice: 0.5468, decode.d4.loss_cls: 0.0693, decode.d4.loss_mask: 0.2044, decode.d4.loss_dice: 0.5500, decode.d5.loss_cls: 0.0689, decode.d5.loss_mask: 0.2042, decode.d5.loss_dice: 0.5506, decode.d6.loss_cls: 0.0703, decode.d6.loss_mask: 0.2041, decode.d6.loss_dice: 0.5478, decode.d7.loss_cls: 0.0649, decode.d7.loss_mask: 0.2041, decode.d7.loss_dice: 0.5519, decode.d8.loss_cls: 0.0629, decode.d8.loss_mask: 0.2045, decode.d8.loss_dice: 0.5486, loss: 8.5124 +2022-05-10 17:46:47,501 - mmseg - INFO - Saving checkpoint at 44000 iterations +2022-05-10 17:47:18,762 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 17:47:18,772 - mmseg - INFO - Iter [44000/80000] lr: 6.461e-07, eta: 23:16:18, time: 2.484, data_time: 0.065, memory: 69053, decode.loss_cls: 0.0716, decode.loss_mask: 0.2018, decode.loss_dice: 0.5603, decode.d0.loss_cls: 0.3043, decode.d0.loss_mask: 0.2098, decode.d0.loss_dice: 0.5920, decode.d1.loss_cls: 0.0931, decode.d1.loss_mask: 0.2041, decode.d1.loss_dice: 0.5724, decode.d2.loss_cls: 0.0823, decode.d2.loss_mask: 0.2030, decode.d2.loss_dice: 0.5685, decode.d3.loss_cls: 0.0713, decode.d3.loss_mask: 0.2025, decode.d3.loss_dice: 0.5622, decode.d4.loss_cls: 0.0750, decode.d4.loss_mask: 0.2024, decode.d4.loss_dice: 0.5633, decode.d5.loss_cls: 0.0710, decode.d5.loss_mask: 0.2027, decode.d5.loss_dice: 0.5613, decode.d6.loss_cls: 0.0683, decode.d6.loss_mask: 0.2023, decode.d6.loss_dice: 0.5583, decode.d7.loss_cls: 0.0720, decode.d7.loss_mask: 0.2027, decode.d7.loss_dice: 0.5595, decode.d8.loss_cls: 0.0705, decode.d8.loss_mask: 0.2021, decode.d8.loss_dice: 0.5613, loss: 8.6721 +2022-05-10 17:49:14,529 - mmseg - INFO - per class results: +2022-05-10 17:49:14,540 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.45 | 99.06 | +| sidewalk | 87.67 | 94.14 | +| building | 94.27 | 97.06 | +| wall | 68.02 | 78.8 | +| fence | 72.79 | 80.83 | +| pole | 71.32 | 83.19 | +| traffic light | 77.23 | 88.53 | +| traffic sign | 83.89 | 90.91 | +| vegetation | 93.41 | 96.89 | +| terrain | 67.91 | 75.85 | +| sky | 95.91 | 98.23 | +| person | 86.75 | 93.65 | +| rider | 74.41 | 86.28 | +| car | 96.17 | 98.43 | +| truck | 80.47 | 95.14 | +| bus | 93.8 | 96.82 | +| train | 88.1 | 91.32 | +| motorcycle | 78.09 | 88.83 | +| bicycle | 82.64 | 90.79 | ++---------------+-------+-------+ +2022-05-10 17:49:14,540 - mmseg - INFO - Summary: +2022-05-10 17:49:14,540 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 96.91 | 83.75 | 90.78 | ++-------+-------+-------+ +2022-05-10 17:49:14,544 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 17:49:14,544 - mmseg - INFO - Iter(val) [32] aAcc: 0.9691, mIoU: 0.8375, mAcc: 0.9078, IoU.road: 0.9845, IoU.sidewalk: 0.8767, IoU.building: 0.9427, IoU.wall: 0.6802, IoU.fence: 0.7279, IoU.pole: 0.7132, IoU.traffic light: 0.7723, IoU.traffic sign: 0.8389, IoU.vegetation: 0.9341, IoU.terrain: 0.6791, IoU.sky: 0.9591, IoU.person: 0.8675, IoU.rider: 0.7441, IoU.car: 0.9617, IoU.truck: 0.8047, IoU.bus: 0.9380, IoU.train: 0.8810, IoU.motorcycle: 0.7809, IoU.bicycle: 0.8264, Acc.road: 0.9906, Acc.sidewalk: 0.9414, Acc.building: 0.9706, Acc.wall: 0.7880, Acc.fence: 0.8083, Acc.pole: 0.8319, Acc.traffic light: 0.8853, Acc.traffic sign: 0.9091, Acc.vegetation: 0.9689, Acc.terrain: 0.7585, Acc.sky: 0.9823, Acc.person: 0.9365, Acc.rider: 0.8628, Acc.car: 0.9843, Acc.truck: 0.9514, Acc.bus: 0.9682, Acc.train: 0.9132, Acc.motorcycle: 0.8883, Acc.bicycle: 0.9079 +2022-05-10 17:50:43,696 - mmseg - INFO - Iter [44050/80000] lr: 6.452e-07, eta: 23:20:58, time: 4.100, data_time: 2.334, memory: 69053, decode.loss_cls: 0.0730, decode.loss_mask: 0.2067, decode.loss_dice: 0.5543, decode.d0.loss_cls: 0.3060, decode.d0.loss_mask: 0.2153, decode.d0.loss_dice: 0.5845, decode.d1.loss_cls: 0.0942, decode.d1.loss_mask: 0.2072, decode.d1.loss_dice: 0.5637, decode.d2.loss_cls: 0.0812, decode.d2.loss_mask: 0.2074, decode.d2.loss_dice: 0.5574, decode.d3.loss_cls: 0.0801, decode.d3.loss_mask: 0.2073, decode.d3.loss_dice: 0.5518, decode.d4.loss_cls: 0.0797, decode.d4.loss_mask: 0.2064, decode.d4.loss_dice: 0.5575, decode.d5.loss_cls: 0.0761, decode.d5.loss_mask: 0.2068, decode.d5.loss_dice: 0.5568, decode.d6.loss_cls: 0.0814, decode.d6.loss_mask: 0.2069, decode.d6.loss_dice: 0.5547, decode.d7.loss_cls: 0.0698, decode.d7.loss_mask: 0.2063, decode.d7.loss_dice: 0.5530, decode.d8.loss_cls: 0.0768, decode.d8.loss_mask: 0.2060, decode.d8.loss_dice: 0.5548, loss: 8.6832 +2022-05-10 17:52:13,555 - mmseg - INFO - Iter [44100/80000] lr: 6.443e-07, eta: 23:17:01, time: 1.799, data_time: 0.017, memory: 69053, decode.loss_cls: 0.0687, decode.loss_mask: 0.2068, decode.loss_dice: 0.5614, decode.d0.loss_cls: 0.3026, decode.d0.loss_mask: 0.2158, decode.d0.loss_dice: 0.5855, decode.d1.loss_cls: 0.0840, decode.d1.loss_mask: 0.2088, decode.d1.loss_dice: 0.5678, decode.d2.loss_cls: 0.0782, decode.d2.loss_mask: 0.2080, decode.d2.loss_dice: 0.5610, decode.d3.loss_cls: 0.0692, decode.d3.loss_mask: 0.2079, decode.d3.loss_dice: 0.5601, decode.d4.loss_cls: 0.0686, decode.d4.loss_mask: 0.2070, decode.d4.loss_dice: 0.5608, decode.d5.loss_cls: 0.0664, decode.d5.loss_mask: 0.2075, decode.d5.loss_dice: 0.5608, decode.d6.loss_cls: 0.0642, decode.d6.loss_mask: 0.2070, decode.d6.loss_dice: 0.5580, decode.d7.loss_cls: 0.0667, decode.d7.loss_mask: 0.2069, decode.d7.loss_dice: 0.5588, decode.d8.loss_cls: 0.0696, decode.d8.loss_mask: 0.2071, decode.d8.loss_dice: 0.5601, loss: 8.6552 +2022-05-10 17:53:43,189 - mmseg - INFO - Iter [44150/80000] lr: 6.434e-07, eta: 23:13:05, time: 1.793, data_time: 0.018, memory: 69053, decode.loss_cls: 0.0680, decode.loss_mask: 0.1967, decode.loss_dice: 0.5427, decode.d0.loss_cls: 0.3034, decode.d0.loss_mask: 0.2043, decode.d0.loss_dice: 0.5744, decode.d1.loss_cls: 0.0896, decode.d1.loss_mask: 0.1986, decode.d1.loss_dice: 0.5517, decode.d2.loss_cls: 0.0876, decode.d2.loss_mask: 0.1981, decode.d2.loss_dice: 0.5462, decode.d3.loss_cls: 0.0741, decode.d3.loss_mask: 0.1977, decode.d3.loss_dice: 0.5433, decode.d4.loss_cls: 0.0765, decode.d4.loss_mask: 0.1975, decode.d4.loss_dice: 0.5422, decode.d5.loss_cls: 0.0702, decode.d5.loss_mask: 0.1973, decode.d5.loss_dice: 0.5440, decode.d6.loss_cls: 0.0765, decode.d6.loss_mask: 0.1977, decode.d6.loss_dice: 0.5389, decode.d7.loss_cls: 0.0721, decode.d7.loss_mask: 0.1977, decode.d7.loss_dice: 0.5435, decode.d8.loss_cls: 0.0682, decode.d8.loss_mask: 0.1974, decode.d8.loss_dice: 0.5437, loss: 8.4399 +2022-05-10 17:55:15,887 - mmseg - INFO - Iter [44200/80000] lr: 6.425e-07, eta: 23:09:25, time: 1.854, data_time: 0.067, memory: 69053, decode.loss_cls: 0.0660, decode.loss_mask: 0.2068, decode.loss_dice: 0.5500, decode.d0.loss_cls: 0.2969, decode.d0.loss_mask: 0.2176, decode.d0.loss_dice: 0.5821, decode.d1.loss_cls: 0.0852, decode.d1.loss_mask: 0.2097, decode.d1.loss_dice: 0.5610, decode.d2.loss_cls: 0.0751, decode.d2.loss_mask: 0.2087, decode.d2.loss_dice: 0.5537, decode.d3.loss_cls: 0.0658, decode.d3.loss_mask: 0.2079, decode.d3.loss_dice: 0.5530, decode.d4.loss_cls: 0.0687, decode.d4.loss_mask: 0.2074, decode.d4.loss_dice: 0.5574, decode.d5.loss_cls: 0.0695, decode.d5.loss_mask: 0.2079, decode.d5.loss_dice: 0.5564, decode.d6.loss_cls: 0.0610, decode.d6.loss_mask: 0.2074, decode.d6.loss_dice: 0.5524, decode.d7.loss_cls: 0.0638, decode.d7.loss_mask: 0.2071, decode.d7.loss_dice: 0.5542, decode.d8.loss_cls: 0.0703, decode.d8.loss_mask: 0.2068, decode.d8.loss_dice: 0.5523, loss: 8.5822 +2022-05-10 17:56:45,676 - mmseg - INFO - Iter [44250/80000] lr: 6.416e-07, eta: 23:05:33, time: 1.795, data_time: 0.017, memory: 69053, decode.loss_cls: 0.0719, decode.loss_mask: 0.1972, decode.loss_dice: 0.5457, decode.d0.loss_cls: 0.3189, decode.d0.loss_mask: 0.2049, decode.d0.loss_dice: 0.5701, decode.d1.loss_cls: 0.0915, decode.d1.loss_mask: 0.1979, decode.d1.loss_dice: 0.5552, decode.d2.loss_cls: 0.0838, decode.d2.loss_mask: 0.1968, decode.d2.loss_dice: 0.5546, decode.d3.loss_cls: 0.0733, decode.d3.loss_mask: 0.1963, decode.d3.loss_dice: 0.5438, decode.d4.loss_cls: 0.0730, decode.d4.loss_mask: 0.1970, decode.d4.loss_dice: 0.5506, decode.d5.loss_cls: 0.0769, decode.d5.loss_mask: 0.1973, decode.d5.loss_dice: 0.5470, decode.d6.loss_cls: 0.0716, decode.d6.loss_mask: 0.1970, decode.d6.loss_dice: 0.5467, decode.d7.loss_cls: 0.0718, decode.d7.loss_mask: 0.1966, decode.d7.loss_dice: 0.5469, decode.d8.loss_cls: 0.0722, decode.d8.loss_mask: 0.1971, decode.d8.loss_dice: 0.5474, loss: 8.4910 +2022-05-10 17:58:15,543 - mmseg - INFO - Iter [44300/80000] lr: 6.407e-07, eta: 23:01:43, time: 1.797, data_time: 0.018, memory: 69053, decode.loss_cls: 0.0596, decode.loss_mask: 0.2038, decode.loss_dice: 0.5505, decode.d0.loss_cls: 0.2931, decode.d0.loss_mask: 0.2124, decode.d0.loss_dice: 0.5764, decode.d1.loss_cls: 0.0739, decode.d1.loss_mask: 0.2050, decode.d1.loss_dice: 0.5569, decode.d2.loss_cls: 0.0608, decode.d2.loss_mask: 0.2046, decode.d2.loss_dice: 0.5567, decode.d3.loss_cls: 0.0569, decode.d3.loss_mask: 0.2038, decode.d3.loss_dice: 0.5533, decode.d4.loss_cls: 0.0590, decode.d4.loss_mask: 0.2036, decode.d4.loss_dice: 0.5523, decode.d5.loss_cls: 0.0610, decode.d5.loss_mask: 0.2040, decode.d5.loss_dice: 0.5495, decode.d6.loss_cls: 0.0562, decode.d6.loss_mask: 0.2042, decode.d6.loss_dice: 0.5510, decode.d7.loss_cls: 0.0598, decode.d7.loss_mask: 0.2037, decode.d7.loss_dice: 0.5512, decode.d8.loss_cls: 0.0597, decode.d8.loss_mask: 0.2040, decode.d8.loss_dice: 0.5506, loss: 8.4377 +2022-05-10 17:59:44,173 - mmseg - INFO - Iter [44350/80000] lr: 6.398e-07, eta: 22:57:49, time: 1.774, data_time: 0.018, memory: 69053, decode.loss_cls: 0.0584, decode.loss_mask: 0.1955, decode.loss_dice: 0.5459, decode.d0.loss_cls: 0.3031, decode.d0.loss_mask: 0.2025, decode.d0.loss_dice: 0.5721, decode.d1.loss_cls: 0.0748, decode.d1.loss_mask: 0.1965, decode.d1.loss_dice: 0.5494, decode.d2.loss_cls: 0.0727, decode.d2.loss_mask: 0.1952, decode.d2.loss_dice: 0.5494, decode.d3.loss_cls: 0.0626, decode.d3.loss_mask: 0.1949, decode.d3.loss_dice: 0.5482, decode.d4.loss_cls: 0.0676, decode.d4.loss_mask: 0.1949, decode.d4.loss_dice: 0.5439, decode.d5.loss_cls: 0.0652, decode.d5.loss_mask: 0.1952, decode.d5.loss_dice: 0.5479, decode.d6.loss_cls: 0.0581, decode.d6.loss_mask: 0.1952, decode.d6.loss_dice: 0.5427, decode.d7.loss_cls: 0.0655, decode.d7.loss_mask: 0.1951, decode.d7.loss_dice: 0.5415, decode.d8.loss_cls: 0.0615, decode.d8.loss_mask: 0.1953, decode.d8.loss_dice: 0.5443, loss: 8.3350 +2022-05-10 18:01:18,333 - mmseg - INFO - Iter [44400/80000] lr: 6.389e-07, eta: 22:54:21, time: 1.883, data_time: 0.063, memory: 69053, decode.loss_cls: 0.0644, decode.loss_mask: 0.1951, decode.loss_dice: 0.5430, decode.d0.loss_cls: 0.3115, decode.d0.loss_mask: 0.2017, decode.d0.loss_dice: 0.5687, decode.d1.loss_cls: 0.0843, decode.d1.loss_mask: 0.1971, decode.d1.loss_dice: 0.5471, decode.d2.loss_cls: 0.0754, decode.d2.loss_mask: 0.1954, decode.d2.loss_dice: 0.5447, decode.d3.loss_cls: 0.0709, decode.d3.loss_mask: 0.1943, decode.d3.loss_dice: 0.5422, decode.d4.loss_cls: 0.0675, decode.d4.loss_mask: 0.1951, decode.d4.loss_dice: 0.5447, decode.d5.loss_cls: 0.0706, decode.d5.loss_mask: 0.1945, decode.d5.loss_dice: 0.5438, decode.d6.loss_cls: 0.0742, decode.d6.loss_mask: 0.1945, decode.d6.loss_dice: 0.5419, decode.d7.loss_cls: 0.0670, decode.d7.loss_mask: 0.1949, decode.d7.loss_dice: 0.5394, decode.d8.loss_cls: 0.0703, decode.d8.loss_mask: 0.1948, decode.d8.loss_dice: 0.5387, loss: 8.3678 +2022-05-10 18:02:47,973 - mmseg - INFO - Iter [44450/80000] lr: 6.380e-07, eta: 22:50:35, time: 1.793, data_time: 0.018, memory: 69053, decode.loss_cls: 0.0695, decode.loss_mask: 0.1943, decode.loss_dice: 0.5339, decode.d0.loss_cls: 0.3041, decode.d0.loss_mask: 0.2014, decode.d0.loss_dice: 0.5714, decode.d1.loss_cls: 0.0919, decode.d1.loss_mask: 0.1964, decode.d1.loss_dice: 0.5483, decode.d2.loss_cls: 0.0803, decode.d2.loss_mask: 0.1956, decode.d2.loss_dice: 0.5398, decode.d3.loss_cls: 0.0778, decode.d3.loss_mask: 0.1946, decode.d3.loss_dice: 0.5344, decode.d4.loss_cls: 0.0687, decode.d4.loss_mask: 0.1951, decode.d4.loss_dice: 0.5380, decode.d5.loss_cls: 0.0677, decode.d5.loss_mask: 0.1943, decode.d5.loss_dice: 0.5377, decode.d6.loss_cls: 0.0716, decode.d6.loss_mask: 0.1948, decode.d6.loss_dice: 0.5383, decode.d7.loss_cls: 0.0667, decode.d7.loss_mask: 0.1948, decode.d7.loss_dice: 0.5362, decode.d8.loss_cls: 0.0703, decode.d8.loss_mask: 0.1942, decode.d8.loss_dice: 0.5388, loss: 8.3409 +2022-05-10 18:04:19,216 - mmseg - INFO - Iter [44500/80000] lr: 6.372e-07, eta: 22:46:58, time: 1.825, data_time: 0.018, memory: 69053, decode.loss_cls: 0.0583, decode.loss_mask: 0.1976, decode.loss_dice: 0.5504, decode.d0.loss_cls: 0.3042, decode.d0.loss_mask: 0.2052, decode.d0.loss_dice: 0.5800, decode.d1.loss_cls: 0.0811, decode.d1.loss_mask: 0.1997, decode.d1.loss_dice: 0.5638, decode.d2.loss_cls: 0.0730, decode.d2.loss_mask: 0.1978, decode.d2.loss_dice: 0.5577, decode.d3.loss_cls: 0.0686, decode.d3.loss_mask: 0.1978, decode.d3.loss_dice: 0.5534, decode.d4.loss_cls: 0.0700, decode.d4.loss_mask: 0.1981, decode.d4.loss_dice: 0.5575, decode.d5.loss_cls: 0.0658, decode.d5.loss_mask: 0.1984, decode.d5.loss_dice: 0.5497, decode.d6.loss_cls: 0.0640, decode.d6.loss_mask: 0.1982, decode.d6.loss_dice: 0.5557, decode.d7.loss_cls: 0.0677, decode.d7.loss_mask: 0.1972, decode.d7.loss_dice: 0.5543, decode.d8.loss_cls: 0.0590, decode.d8.loss_mask: 0.1974, decode.d8.loss_dice: 0.5570, loss: 8.4785 +2022-05-10 18:05:49,550 - mmseg - INFO - Iter [44550/80000] lr: 6.363e-07, eta: 22:43:18, time: 1.807, data_time: 0.018, memory: 69053, decode.loss_cls: 0.0747, decode.loss_mask: 0.1977, decode.loss_dice: 0.5620, decode.d0.loss_cls: 0.3204, decode.d0.loss_mask: 0.2051, decode.d0.loss_dice: 0.5922, decode.d1.loss_cls: 0.0893, decode.d1.loss_mask: 0.1996, decode.d1.loss_dice: 0.5681, decode.d2.loss_cls: 0.0812, decode.d2.loss_mask: 0.1978, decode.d2.loss_dice: 0.5640, decode.d3.loss_cls: 0.0784, decode.d3.loss_mask: 0.1975, decode.d3.loss_dice: 0.5614, decode.d4.loss_cls: 0.0762, decode.d4.loss_mask: 0.1983, decode.d4.loss_dice: 0.5663, decode.d5.loss_cls: 0.0723, decode.d5.loss_mask: 0.1980, decode.d5.loss_dice: 0.5630, decode.d6.loss_cls: 0.0720, decode.d6.loss_mask: 0.1980, decode.d6.loss_dice: 0.5633, decode.d7.loss_cls: 0.0722, decode.d7.loss_mask: 0.1974, decode.d7.loss_dice: 0.5613, decode.d8.loss_cls: 0.0691, decode.d8.loss_mask: 0.1975, decode.d8.loss_dice: 0.5662, loss: 8.6603 +2022-05-10 18:07:22,693 - mmseg - INFO - Iter [44600/80000] lr: 6.354e-07, eta: 22:39:51, time: 1.863, data_time: 0.062, memory: 69053, decode.loss_cls: 0.0641, decode.loss_mask: 0.1989, decode.loss_dice: 0.5358, decode.d0.loss_cls: 0.3106, decode.d0.loss_mask: 0.2087, decode.d0.loss_dice: 0.5647, decode.d1.loss_cls: 0.0883, decode.d1.loss_mask: 0.2001, decode.d1.loss_dice: 0.5466, decode.d2.loss_cls: 0.0751, decode.d2.loss_mask: 0.1997, decode.d2.loss_dice: 0.5393, decode.d3.loss_cls: 0.0704, decode.d3.loss_mask: 0.1996, decode.d3.loss_dice: 0.5378, decode.d4.loss_cls: 0.0710, decode.d4.loss_mask: 0.1992, decode.d4.loss_dice: 0.5361, decode.d5.loss_cls: 0.0713, decode.d5.loss_mask: 0.1994, decode.d5.loss_dice: 0.5374, decode.d6.loss_cls: 0.0688, decode.d6.loss_mask: 0.1988, decode.d6.loss_dice: 0.5353, decode.d7.loss_cls: 0.0672, decode.d7.loss_mask: 0.1991, decode.d7.loss_dice: 0.5398, decode.d8.loss_cls: 0.0731, decode.d8.loss_mask: 0.1993, decode.d8.loss_dice: 0.5361, loss: 8.3717 +2022-05-10 18:08:53,955 - mmseg - INFO - Iter [44650/80000] lr: 6.345e-07, eta: 22:36:18, time: 1.825, data_time: 0.019, memory: 69053, decode.loss_cls: 0.0662, decode.loss_mask: 0.1972, decode.loss_dice: 0.5562, decode.d0.loss_cls: 0.3064, decode.d0.loss_mask: 0.2059, decode.d0.loss_dice: 0.5859, decode.d1.loss_cls: 0.0868, decode.d1.loss_mask: 0.1984, decode.d1.loss_dice: 0.5642, decode.d2.loss_cls: 0.0755, decode.d2.loss_mask: 0.1973, decode.d2.loss_dice: 0.5595, decode.d3.loss_cls: 0.0691, decode.d3.loss_mask: 0.1974, decode.d3.loss_dice: 0.5588, decode.d4.loss_cls: 0.0687, decode.d4.loss_mask: 0.1977, decode.d4.loss_dice: 0.5599, decode.d5.loss_cls: 0.0688, decode.d5.loss_mask: 0.1977, decode.d5.loss_dice: 0.5575, decode.d6.loss_cls: 0.0702, decode.d6.loss_mask: 0.1973, decode.d6.loss_dice: 0.5527, decode.d7.loss_cls: 0.0706, decode.d7.loss_mask: 0.1981, decode.d7.loss_dice: 0.5570, decode.d8.loss_cls: 0.0683, decode.d8.loss_mask: 0.1973, decode.d8.loss_dice: 0.5528, loss: 8.5393 +2022-05-10 18:10:24,028 - mmseg - INFO - Iter [44700/80000] lr: 6.336e-07, eta: 22:32:41, time: 1.801, data_time: 0.015, memory: 69053, decode.loss_cls: 0.0735, decode.loss_mask: 0.2025, decode.loss_dice: 0.5542, decode.d0.loss_cls: 0.3102, decode.d0.loss_mask: 0.2117, decode.d0.loss_dice: 0.5808, decode.d1.loss_cls: 0.0913, decode.d1.loss_mask: 0.2044, decode.d1.loss_dice: 0.5632, decode.d2.loss_cls: 0.0811, decode.d2.loss_mask: 0.2040, decode.d2.loss_dice: 0.5574, decode.d3.loss_cls: 0.0834, decode.d3.loss_mask: 0.2037, decode.d3.loss_dice: 0.5575, decode.d4.loss_cls: 0.0786, decode.d4.loss_mask: 0.2028, decode.d4.loss_dice: 0.5564, decode.d5.loss_cls: 0.0790, decode.d5.loss_mask: 0.2026, decode.d5.loss_dice: 0.5567, decode.d6.loss_cls: 0.0693, decode.d6.loss_mask: 0.2026, decode.d6.loss_dice: 0.5569, decode.d7.loss_cls: 0.0731, decode.d7.loss_mask: 0.2025, decode.d7.loss_dice: 0.5534, decode.d8.loss_cls: 0.0720, decode.d8.loss_mask: 0.2029, decode.d8.loss_dice: 0.5503, loss: 8.6380 +2022-05-10 18:11:56,854 - mmseg - INFO - Iter [44750/80000] lr: 6.327e-07, eta: 22:29:17, time: 1.856, data_time: 0.065, memory: 69053, decode.loss_cls: 0.0648, decode.loss_mask: 0.2078, decode.loss_dice: 0.5390, decode.d0.loss_cls: 0.3059, decode.d0.loss_mask: 0.2154, decode.d0.loss_dice: 0.5715, decode.d1.loss_cls: 0.0796, decode.d1.loss_mask: 0.2086, decode.d1.loss_dice: 0.5517, decode.d2.loss_cls: 0.0797, decode.d2.loss_mask: 0.2073, decode.d2.loss_dice: 0.5458, decode.d3.loss_cls: 0.0710, decode.d3.loss_mask: 0.2073, decode.d3.loss_dice: 0.5422, decode.d4.loss_cls: 0.0716, decode.d4.loss_mask: 0.2070, decode.d4.loss_dice: 0.5421, decode.d5.loss_cls: 0.0703, decode.d5.loss_mask: 0.2070, decode.d5.loss_dice: 0.5420, decode.d6.loss_cls: 0.0681, decode.d6.loss_mask: 0.2063, decode.d6.loss_dice: 0.5433, decode.d7.loss_cls: 0.0680, decode.d7.loss_mask: 0.2068, decode.d7.loss_dice: 0.5379, decode.d8.loss_cls: 0.0687, decode.d8.loss_mask: 0.2074, decode.d8.loss_dice: 0.5391, loss: 8.4832 +2022-05-10 18:13:27,992 - mmseg - INFO - Iter [44800/80000] lr: 6.318e-07, eta: 22:25:47, time: 1.823, data_time: 0.017, memory: 69053, decode.loss_cls: 0.0508, decode.loss_mask: 0.2060, decode.loss_dice: 0.5456, decode.d0.loss_cls: 0.2980, decode.d0.loss_mask: 0.2150, decode.d0.loss_dice: 0.5712, decode.d1.loss_cls: 0.0693, decode.d1.loss_mask: 0.2088, decode.d1.loss_dice: 0.5523, decode.d2.loss_cls: 0.0601, decode.d2.loss_mask: 0.2078, decode.d2.loss_dice: 0.5510, decode.d3.loss_cls: 0.0529, decode.d3.loss_mask: 0.2076, decode.d3.loss_dice: 0.5485, decode.d4.loss_cls: 0.0542, decode.d4.loss_mask: 0.2065, decode.d4.loss_dice: 0.5469, decode.d5.loss_cls: 0.0571, decode.d5.loss_mask: 0.2065, decode.d5.loss_dice: 0.5461, decode.d6.loss_cls: 0.0529, decode.d6.loss_mask: 0.2069, decode.d6.loss_dice: 0.5481, decode.d7.loss_cls: 0.0553, decode.d7.loss_mask: 0.2063, decode.d7.loss_dice: 0.5447, decode.d8.loss_cls: 0.0537, decode.d8.loss_mask: 0.2067, decode.d8.loss_dice: 0.5465, loss: 8.3834 +2022-05-10 18:14:58,532 - mmseg - INFO - Iter [44850/80000] lr: 6.309e-07, eta: 22:22:17, time: 1.811, data_time: 0.016, memory: 69053, decode.loss_cls: 0.0649, decode.loss_mask: 0.1908, decode.loss_dice: 0.5452, decode.d0.loss_cls: 0.3067, decode.d0.loss_mask: 0.1975, decode.d0.loss_dice: 0.5657, decode.d1.loss_cls: 0.0822, decode.d1.loss_mask: 0.1923, decode.d1.loss_dice: 0.5493, decode.d2.loss_cls: 0.0688, decode.d2.loss_mask: 0.1913, decode.d2.loss_dice: 0.5487, decode.d3.loss_cls: 0.0670, decode.d3.loss_mask: 0.1914, decode.d3.loss_dice: 0.5422, decode.d4.loss_cls: 0.0660, decode.d4.loss_mask: 0.1919, decode.d4.loss_dice: 0.5440, decode.d5.loss_cls: 0.0613, decode.d5.loss_mask: 0.1913, decode.d5.loss_dice: 0.5415, decode.d6.loss_cls: 0.0609, decode.d6.loss_mask: 0.1908, decode.d6.loss_dice: 0.5426, decode.d7.loss_cls: 0.0624, decode.d7.loss_mask: 0.1913, decode.d7.loss_dice: 0.5415, decode.d8.loss_cls: 0.0648, decode.d8.loss_mask: 0.1909, decode.d8.loss_dice: 0.5410, loss: 8.2864 +2022-05-10 18:16:27,333 - mmseg - INFO - Iter [44900/80000] lr: 6.300e-07, eta: 22:18:40, time: 1.776, data_time: 0.016, memory: 69053, decode.loss_cls: 0.0589, decode.loss_mask: 0.1960, decode.loss_dice: 0.5433, decode.d0.loss_cls: 0.3044, decode.d0.loss_mask: 0.2035, decode.d0.loss_dice: 0.5647, decode.d1.loss_cls: 0.0744, decode.d1.loss_mask: 0.1972, decode.d1.loss_dice: 0.5498, decode.d2.loss_cls: 0.0710, decode.d2.loss_mask: 0.1962, decode.d2.loss_dice: 0.5481, decode.d3.loss_cls: 0.0653, decode.d3.loss_mask: 0.1958, decode.d3.loss_dice: 0.5430, decode.d4.loss_cls: 0.0631, decode.d4.loss_mask: 0.1958, decode.d4.loss_dice: 0.5402, decode.d5.loss_cls: 0.0650, decode.d5.loss_mask: 0.1958, decode.d5.loss_dice: 0.5443, decode.d6.loss_cls: 0.0603, decode.d6.loss_mask: 0.1954, decode.d6.loss_dice: 0.5401, decode.d7.loss_cls: 0.0625, decode.d7.loss_mask: 0.1958, decode.d7.loss_dice: 0.5416, decode.d8.loss_cls: 0.0612, decode.d8.loss_mask: 0.1954, decode.d8.loss_dice: 0.5397, loss: 8.3079 +2022-05-10 18:17:59,255 - mmseg - INFO - Iter [44950/80000] lr: 6.291e-07, eta: 22:15:18, time: 1.839, data_time: 0.066, memory: 69053, decode.loss_cls: 0.0647, decode.loss_mask: 0.2049, decode.loss_dice: 0.5414, decode.d0.loss_cls: 0.3035, decode.d0.loss_mask: 0.2143, decode.d0.loss_dice: 0.5679, decode.d1.loss_cls: 0.0807, decode.d1.loss_mask: 0.2058, decode.d1.loss_dice: 0.5504, decode.d2.loss_cls: 0.0768, decode.d2.loss_mask: 0.2049, decode.d2.loss_dice: 0.5483, decode.d3.loss_cls: 0.0738, decode.d3.loss_mask: 0.2049, decode.d3.loss_dice: 0.5413, decode.d4.loss_cls: 0.0728, decode.d4.loss_mask: 0.2053, decode.d4.loss_dice: 0.5381, decode.d5.loss_cls: 0.0690, decode.d5.loss_mask: 0.2056, decode.d5.loss_dice: 0.5444, decode.d6.loss_cls: 0.0680, decode.d6.loss_mask: 0.2056, decode.d6.loss_dice: 0.5411, decode.d7.loss_cls: 0.0704, decode.d7.loss_mask: 0.2051, decode.d7.loss_dice: 0.5436, decode.d8.loss_cls: 0.0686, decode.d8.loss_mask: 0.2047, decode.d8.loss_dice: 0.5397, loss: 8.4655 +2022-05-10 18:19:28,929 - mmseg - INFO - Saving checkpoint at 45000 iterations +2022-05-10 18:20:02,451 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 18:20:02,461 - mmseg - INFO - Iter [45000/80000] lr: 6.282e-07, eta: 22:13:58, time: 2.461, data_time: 0.019, memory: 69053, decode.loss_cls: 0.0691, decode.loss_mask: 0.2026, decode.loss_dice: 0.5540, decode.d0.loss_cls: 0.2974, decode.d0.loss_mask: 0.2132, decode.d0.loss_dice: 0.5817, decode.d1.loss_cls: 0.0783, decode.d1.loss_mask: 0.2052, decode.d1.loss_dice: 0.5631, decode.d2.loss_cls: 0.0771, decode.d2.loss_mask: 0.2038, decode.d2.loss_dice: 0.5573, decode.d3.loss_cls: 0.0690, decode.d3.loss_mask: 0.2040, decode.d3.loss_dice: 0.5571, decode.d4.loss_cls: 0.0758, decode.d4.loss_mask: 0.2036, decode.d4.loss_dice: 0.5576, decode.d5.loss_cls: 0.0673, decode.d5.loss_mask: 0.2032, decode.d5.loss_dice: 0.5529, decode.d6.loss_cls: 0.0671, decode.d6.loss_mask: 0.2027, decode.d6.loss_dice: 0.5576, decode.d7.loss_cls: 0.0728, decode.d7.loss_mask: 0.2023, decode.d7.loss_dice: 0.5551, decode.d8.loss_cls: 0.0702, decode.d8.loss_mask: 0.2024, decode.d8.loss_dice: 0.5545, loss: 8.5781 +2022-05-10 18:21:58,534 - mmseg - INFO - per class results: +2022-05-10 18:21:58,542 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.61 | 99.24 | +| sidewalk | 88.61 | 93.49 | +| building | 94.36 | 96.93 | +| wall | 70.09 | 81.79 | +| fence | 74.66 | 82.74 | +| pole | 71.16 | 85.2 | +| traffic light | 77.25 | 87.61 | +| traffic sign | 84.16 | 90.54 | +| vegetation | 93.38 | 96.85 | +| terrain | 68.53 | 76.94 | +| sky | 95.76 | 98.5 | +| person | 86.64 | 94.54 | +| rider | 74.11 | 85.24 | +| car | 96.17 | 98.25 | +| truck | 81.9 | 95.41 | +| bus | 93.63 | 96.91 | +| train | 87.89 | 91.08 | +| motorcycle | 77.92 | 87.99 | +| bicycle | 82.72 | 91.37 | ++---------------+-------+-------+ +2022-05-10 18:21:58,543 - mmseg - INFO - Summary: +2022-05-10 18:21:58,543 - mmseg - INFO - ++------+-------+-------+ +| aAcc | mIoU | mAcc | ++------+-------+-------+ +| 97.0 | 84.08 | 91.09 | ++------+-------+-------+ +2022-05-10 18:21:58,546 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 18:21:58,546 - mmseg - INFO - Iter(val) [32] aAcc: 0.9700, mIoU: 0.8408, mAcc: 0.9109, IoU.road: 0.9861, IoU.sidewalk: 0.8861, IoU.building: 0.9436, IoU.wall: 0.7009, IoU.fence: 0.7466, IoU.pole: 0.7116, IoU.traffic light: 0.7725, IoU.traffic sign: 0.8416, IoU.vegetation: 0.9338, IoU.terrain: 0.6853, IoU.sky: 0.9576, IoU.person: 0.8664, IoU.rider: 0.7411, IoU.car: 0.9617, IoU.truck: 0.8190, IoU.bus: 0.9363, IoU.train: 0.8789, IoU.motorcycle: 0.7792, IoU.bicycle: 0.8272, Acc.road: 0.9924, Acc.sidewalk: 0.9349, Acc.building: 0.9693, Acc.wall: 0.8179, Acc.fence: 0.8274, Acc.pole: 0.8520, Acc.traffic light: 0.8761, Acc.traffic sign: 0.9054, Acc.vegetation: 0.9685, Acc.terrain: 0.7694, Acc.sky: 0.9850, Acc.person: 0.9454, Acc.rider: 0.8524, Acc.car: 0.9825, Acc.truck: 0.9541, Acc.bus: 0.9691, Acc.train: 0.9108, Acc.motorcycle: 0.8799, Acc.bicycle: 0.9137 +2022-05-10 18:23:30,704 - mmseg - INFO - Iter [45050/80000] lr: 6.273e-07, eta: 22:18:07, time: 4.168, data_time: 2.340, memory: 69053, decode.loss_cls: 0.0693, decode.loss_mask: 0.1951, decode.loss_dice: 0.5334, decode.d0.loss_cls: 0.3134, decode.d0.loss_mask: 0.2047, decode.d0.loss_dice: 0.5615, decode.d1.loss_cls: 0.0843, decode.d1.loss_mask: 0.1963, decode.d1.loss_dice: 0.5423, decode.d2.loss_cls: 0.0762, decode.d2.loss_mask: 0.1960, decode.d2.loss_dice: 0.5368, decode.d3.loss_cls: 0.0675, decode.d3.loss_mask: 0.1959, decode.d3.loss_dice: 0.5374, decode.d4.loss_cls: 0.0746, decode.d4.loss_mask: 0.1959, decode.d4.loss_dice: 0.5355, decode.d5.loss_cls: 0.0692, decode.d5.loss_mask: 0.1956, decode.d5.loss_dice: 0.5338, decode.d6.loss_cls: 0.0678, decode.d6.loss_mask: 0.1953, decode.d6.loss_dice: 0.5328, decode.d7.loss_cls: 0.0729, decode.d7.loss_mask: 0.1958, decode.d7.loss_dice: 0.5374, decode.d8.loss_cls: 0.0671, decode.d8.loss_mask: 0.1957, decode.d8.loss_dice: 0.5358, loss: 8.3152 +2022-05-10 18:25:00,796 - mmseg - INFO - Iter [45100/80000] lr: 6.264e-07, eta: 22:14:37, time: 1.802, data_time: 0.016, memory: 69053, decode.loss_cls: 0.0710, decode.loss_mask: 0.2003, decode.loss_dice: 0.5434, decode.d0.loss_cls: 0.3085, decode.d0.loss_mask: 0.2086, decode.d0.loss_dice: 0.5716, decode.d1.loss_cls: 0.0892, decode.d1.loss_mask: 0.2016, decode.d1.loss_dice: 0.5556, decode.d2.loss_cls: 0.0856, decode.d2.loss_mask: 0.2012, decode.d2.loss_dice: 0.5493, decode.d3.loss_cls: 0.0723, decode.d3.loss_mask: 0.2008, decode.d3.loss_dice: 0.5466, decode.d4.loss_cls: 0.0766, decode.d4.loss_mask: 0.2009, decode.d4.loss_dice: 0.5448, decode.d5.loss_cls: 0.0820, decode.d5.loss_mask: 0.2011, decode.d5.loss_dice: 0.5466, decode.d6.loss_cls: 0.0718, decode.d6.loss_mask: 0.2002, decode.d6.loss_dice: 0.5410, decode.d7.loss_cls: 0.0686, decode.d7.loss_mask: 0.2006, decode.d7.loss_dice: 0.5439, decode.d8.loss_cls: 0.0751, decode.d8.loss_mask: 0.2005, decode.d8.loss_dice: 0.5470, loss: 8.5062 +2022-05-10 18:26:34,673 - mmseg - INFO - Iter [45150/80000] lr: 6.255e-07, eta: 22:11:23, time: 1.877, data_time: 0.068, memory: 69053, decode.loss_cls: 0.0732, decode.loss_mask: 0.2014, decode.loss_dice: 0.5528, decode.d0.loss_cls: 0.3122, decode.d0.loss_mask: 0.2112, decode.d0.loss_dice: 0.5769, decode.d1.loss_cls: 0.0896, decode.d1.loss_mask: 0.2046, decode.d1.loss_dice: 0.5622, decode.d2.loss_cls: 0.0822, decode.d2.loss_mask: 0.2020, decode.d2.loss_dice: 0.5550, decode.d3.loss_cls: 0.0775, decode.d3.loss_mask: 0.2012, decode.d3.loss_dice: 0.5547, decode.d4.loss_cls: 0.0762, decode.d4.loss_mask: 0.2013, decode.d4.loss_dice: 0.5521, decode.d5.loss_cls: 0.0760, decode.d5.loss_mask: 0.2011, decode.d5.loss_dice: 0.5506, decode.d6.loss_cls: 0.0700, decode.d6.loss_mask: 0.2012, decode.d6.loss_dice: 0.5527, decode.d7.loss_cls: 0.0754, decode.d7.loss_mask: 0.2015, decode.d7.loss_dice: 0.5523, decode.d8.loss_cls: 0.0706, decode.d8.loss_mask: 0.2010, decode.d8.loss_dice: 0.5533, loss: 8.5919 +2022-05-10 18:28:05,196 - mmseg - INFO - Iter [45200/80000] lr: 6.246e-07, eta: 22:07:57, time: 1.810, data_time: 0.019, memory: 69053, decode.loss_cls: 0.0663, decode.loss_mask: 0.2032, decode.loss_dice: 0.5527, decode.d0.loss_cls: 0.3028, decode.d0.loss_mask: 0.2102, decode.d0.loss_dice: 0.5826, decode.d1.loss_cls: 0.0861, decode.d1.loss_mask: 0.2040, decode.d1.loss_dice: 0.5623, decode.d2.loss_cls: 0.0698, decode.d2.loss_mask: 0.2035, decode.d2.loss_dice: 0.5520, decode.d3.loss_cls: 0.0666, decode.d3.loss_mask: 0.2030, decode.d3.loss_dice: 0.5480, decode.d4.loss_cls: 0.0682, decode.d4.loss_mask: 0.2029, decode.d4.loss_dice: 0.5463, decode.d5.loss_cls: 0.0670, decode.d5.loss_mask: 0.2028, decode.d5.loss_dice: 0.5529, decode.d6.loss_cls: 0.0621, decode.d6.loss_mask: 0.2022, decode.d6.loss_dice: 0.5500, decode.d7.loss_cls: 0.0692, decode.d7.loss_mask: 0.2028, decode.d7.loss_dice: 0.5519, decode.d8.loss_cls: 0.0676, decode.d8.loss_mask: 0.2024, decode.d8.loss_dice: 0.5533, loss: 8.5146 +2022-05-10 18:29:35,802 - mmseg - INFO - Iter [45250/80000] lr: 6.237e-07, eta: 22:04:33, time: 1.812, data_time: 0.017, memory: 69053, decode.loss_cls: 0.0505, decode.loss_mask: 0.1991, decode.loss_dice: 0.5354, decode.d0.loss_cls: 0.2989, decode.d0.loss_mask: 0.2066, decode.d0.loss_dice: 0.5590, decode.d1.loss_cls: 0.0730, decode.d1.loss_mask: 0.2004, decode.d1.loss_dice: 0.5464, decode.d2.loss_cls: 0.0679, decode.d2.loss_mask: 0.2004, decode.d2.loss_dice: 0.5434, decode.d3.loss_cls: 0.0601, decode.d3.loss_mask: 0.1997, decode.d3.loss_dice: 0.5379, decode.d4.loss_cls: 0.0600, decode.d4.loss_mask: 0.2000, decode.d4.loss_dice: 0.5384, decode.d5.loss_cls: 0.0546, decode.d5.loss_mask: 0.2001, decode.d5.loss_dice: 0.5366, decode.d6.loss_cls: 0.0559, decode.d6.loss_mask: 0.1999, decode.d6.loss_dice: 0.5384, decode.d7.loss_cls: 0.0502, decode.d7.loss_mask: 0.2000, decode.d7.loss_dice: 0.5395, decode.d8.loss_cls: 0.0565, decode.d8.loss_mask: 0.1996, decode.d8.loss_dice: 0.5384, loss: 8.2467 +2022-05-10 18:31:07,199 - mmseg - INFO - Iter [45300/80000] lr: 6.228e-07, eta: 22:01:13, time: 1.828, data_time: 0.062, memory: 69053, decode.loss_cls: 0.0696, decode.loss_mask: 0.1957, decode.loss_dice: 0.5468, decode.d0.loss_cls: 0.3081, decode.d0.loss_mask: 0.2030, decode.d0.loss_dice: 0.5750, decode.d1.loss_cls: 0.0913, decode.d1.loss_mask: 0.1967, decode.d1.loss_dice: 0.5540, decode.d2.loss_cls: 0.0705, decode.d2.loss_mask: 0.1959, decode.d2.loss_dice: 0.5469, decode.d3.loss_cls: 0.0715, decode.d3.loss_mask: 0.1959, decode.d3.loss_dice: 0.5445, decode.d4.loss_cls: 0.0698, decode.d4.loss_mask: 0.1962, decode.d4.loss_dice: 0.5498, decode.d5.loss_cls: 0.0737, decode.d5.loss_mask: 0.1959, decode.d5.loss_dice: 0.5461, decode.d6.loss_cls: 0.0693, decode.d6.loss_mask: 0.1956, decode.d6.loss_dice: 0.5453, decode.d7.loss_cls: 0.0649, decode.d7.loss_mask: 0.1953, decode.d7.loss_dice: 0.5488, decode.d8.loss_cls: 0.0662, decode.d8.loss_mask: 0.1954, decode.d8.loss_dice: 0.5471, loss: 8.4249 +2022-05-10 18:32:36,289 - mmseg - INFO - Iter [45350/80000] lr: 6.219e-07, eta: 21:57:45, time: 1.782, data_time: 0.015, memory: 69053, decode.loss_cls: 0.0613, decode.loss_mask: 0.2026, decode.loss_dice: 0.5389, decode.d0.loss_cls: 0.3062, decode.d0.loss_mask: 0.2129, decode.d0.loss_dice: 0.5626, decode.d1.loss_cls: 0.0825, decode.d1.loss_mask: 0.2050, decode.d1.loss_dice: 0.5485, decode.d2.loss_cls: 0.0692, decode.d2.loss_mask: 0.2041, decode.d2.loss_dice: 0.5416, decode.d3.loss_cls: 0.0653, decode.d3.loss_mask: 0.2040, decode.d3.loss_dice: 0.5393, decode.d4.loss_cls: 0.0659, decode.d4.loss_mask: 0.2041, decode.d4.loss_dice: 0.5379, decode.d5.loss_cls: 0.0720, decode.d5.loss_mask: 0.2038, decode.d5.loss_dice: 0.5418, decode.d6.loss_cls: 0.0611, decode.d6.loss_mask: 0.2032, decode.d6.loss_dice: 0.5368, decode.d7.loss_cls: 0.0604, decode.d7.loss_mask: 0.2036, decode.d7.loss_dice: 0.5377, decode.d8.loss_cls: 0.0614, decode.d8.loss_mask: 0.2023, decode.d8.loss_dice: 0.5353, loss: 8.3713 +2022-05-10 18:34:05,769 - mmseg - INFO - Iter [45400/80000] lr: 6.210e-07, eta: 21:54:20, time: 1.789, data_time: 0.017, memory: 69053, decode.loss_cls: 0.0620, decode.loss_mask: 0.2027, decode.loss_dice: 0.5515, decode.d0.loss_cls: 0.3003, decode.d0.loss_mask: 0.2117, decode.d0.loss_dice: 0.5784, decode.d1.loss_cls: 0.0765, decode.d1.loss_mask: 0.2043, decode.d1.loss_dice: 0.5642, decode.d2.loss_cls: 0.0684, decode.d2.loss_mask: 0.2038, decode.d2.loss_dice: 0.5544, decode.d3.loss_cls: 0.0716, decode.d3.loss_mask: 0.2028, decode.d3.loss_dice: 0.5503, decode.d4.loss_cls: 0.0596, decode.d4.loss_mask: 0.2029, decode.d4.loss_dice: 0.5522, decode.d5.loss_cls: 0.0587, decode.d5.loss_mask: 0.2023, decode.d5.loss_dice: 0.5533, decode.d6.loss_cls: 0.0637, decode.d6.loss_mask: 0.2019, decode.d6.loss_dice: 0.5531, decode.d7.loss_cls: 0.0618, decode.d7.loss_mask: 0.2027, decode.d7.loss_dice: 0.5496, decode.d8.loss_cls: 0.0574, decode.d8.loss_mask: 0.2028, decode.d8.loss_dice: 0.5482, loss: 8.4729 +2022-05-10 18:35:35,429 - mmseg - INFO - Iter [45450/80000] lr: 6.201e-07, eta: 21:50:58, time: 1.794, data_time: 0.016, memory: 69053, decode.loss_cls: 0.0560, decode.loss_mask: 0.1982, decode.loss_dice: 0.5276, decode.d0.loss_cls: 0.3008, decode.d0.loss_mask: 0.2071, decode.d0.loss_dice: 0.5551, decode.d1.loss_cls: 0.0761, decode.d1.loss_mask: 0.2000, decode.d1.loss_dice: 0.5391, decode.d2.loss_cls: 0.0699, decode.d2.loss_mask: 0.1998, decode.d2.loss_dice: 0.5314, decode.d3.loss_cls: 0.0571, decode.d3.loss_mask: 0.1998, decode.d3.loss_dice: 0.5304, decode.d4.loss_cls: 0.0621, decode.d4.loss_mask: 0.2000, decode.d4.loss_dice: 0.5303, decode.d5.loss_cls: 0.0605, decode.d5.loss_mask: 0.1992, decode.d5.loss_dice: 0.5326, decode.d6.loss_cls: 0.0575, decode.d6.loss_mask: 0.1992, decode.d6.loss_dice: 0.5257, decode.d7.loss_cls: 0.0539, decode.d7.loss_mask: 0.1992, decode.d7.loss_dice: 0.5314, decode.d8.loss_cls: 0.0588, decode.d8.loss_mask: 0.1988, decode.d8.loss_dice: 0.5290, loss: 8.1866 +2022-05-10 18:37:06,766 - mmseg - INFO - Iter [45500/80000] lr: 6.192e-07, eta: 21:47:42, time: 1.827, data_time: 0.064, memory: 69053, decode.loss_cls: 0.0618, decode.loss_mask: 0.2010, decode.loss_dice: 0.5472, decode.d0.loss_cls: 0.3013, decode.d0.loss_mask: 0.2098, decode.d0.loss_dice: 0.5756, decode.d1.loss_cls: 0.0775, decode.d1.loss_mask: 0.2033, decode.d1.loss_dice: 0.5628, decode.d2.loss_cls: 0.0666, decode.d2.loss_mask: 0.2024, decode.d2.loss_dice: 0.5541, decode.d3.loss_cls: 0.0654, decode.d3.loss_mask: 0.2018, decode.d3.loss_dice: 0.5521, decode.d4.loss_cls: 0.0683, decode.d4.loss_mask: 0.2017, decode.d4.loss_dice: 0.5526, decode.d5.loss_cls: 0.0585, decode.d5.loss_mask: 0.2016, decode.d5.loss_dice: 0.5569, decode.d6.loss_cls: 0.0589, decode.d6.loss_mask: 0.2011, decode.d6.loss_dice: 0.5463, decode.d7.loss_cls: 0.0615, decode.d7.loss_mask: 0.2019, decode.d7.loss_dice: 0.5498, decode.d8.loss_cls: 0.0556, decode.d8.loss_mask: 0.2011, decode.d8.loss_dice: 0.5512, loss: 8.4498 +2022-05-10 18:38:36,830 - mmseg - INFO - Iter [45550/80000] lr: 6.183e-07, eta: 21:44:23, time: 1.801, data_time: 0.018, memory: 69053, decode.loss_cls: 0.0641, decode.loss_mask: 0.1978, decode.loss_dice: 0.5392, decode.d0.loss_cls: 0.3068, decode.d0.loss_mask: 0.2039, decode.d0.loss_dice: 0.5662, decode.d1.loss_cls: 0.0926, decode.d1.loss_mask: 0.1993, decode.d1.loss_dice: 0.5491, decode.d2.loss_cls: 0.0814, decode.d2.loss_mask: 0.1982, decode.d2.loss_dice: 0.5382, decode.d3.loss_cls: 0.0751, decode.d3.loss_mask: 0.1978, decode.d3.loss_dice: 0.5382, decode.d4.loss_cls: 0.0691, decode.d4.loss_mask: 0.1978, decode.d4.loss_dice: 0.5427, decode.d5.loss_cls: 0.0731, decode.d5.loss_mask: 0.1980, decode.d5.loss_dice: 0.5418, decode.d6.loss_cls: 0.0703, decode.d6.loss_mask: 0.1980, decode.d6.loss_dice: 0.5363, decode.d7.loss_cls: 0.0656, decode.d7.loss_mask: 0.1985, decode.d7.loss_dice: 0.5415, decode.d8.loss_cls: 0.0669, decode.d8.loss_mask: 0.1977, decode.d8.loss_dice: 0.5361, loss: 8.3815 +2022-05-10 18:40:07,684 - mmseg - INFO - Iter [45600/80000] lr: 6.174e-07, eta: 21:41:08, time: 1.817, data_time: 0.017, memory: 69053, decode.loss_cls: 0.0755, decode.loss_mask: 0.2023, decode.loss_dice: 0.5469, decode.d0.loss_cls: 0.3171, decode.d0.loss_mask: 0.2111, decode.d0.loss_dice: 0.5766, decode.d1.loss_cls: 0.0938, decode.d1.loss_mask: 0.2038, decode.d1.loss_dice: 0.5558, decode.d2.loss_cls: 0.0814, decode.d2.loss_mask: 0.2023, decode.d2.loss_dice: 0.5577, decode.d3.loss_cls: 0.0890, decode.d3.loss_mask: 0.2021, decode.d3.loss_dice: 0.5490, decode.d4.loss_cls: 0.0823, decode.d4.loss_mask: 0.2022, decode.d4.loss_dice: 0.5488, decode.d5.loss_cls: 0.0812, decode.d5.loss_mask: 0.2019, decode.d5.loss_dice: 0.5521, decode.d6.loss_cls: 0.0778, decode.d6.loss_mask: 0.2016, decode.d6.loss_dice: 0.5499, decode.d7.loss_cls: 0.0772, decode.d7.loss_mask: 0.2015, decode.d7.loss_dice: 0.5485, decode.d8.loss_cls: 0.0741, decode.d8.loss_mask: 0.2024, decode.d8.loss_dice: 0.5502, loss: 8.6162 +2022-05-10 18:41:38,191 - mmseg - INFO - Iter [45650/80000] lr: 6.165e-07, eta: 21:37:53, time: 1.810, data_time: 0.018, memory: 69053, decode.loss_cls: 0.0558, decode.loss_mask: 0.1993, decode.loss_dice: 0.5442, decode.d0.loss_cls: 0.2924, decode.d0.loss_mask: 0.2075, decode.d0.loss_dice: 0.5700, decode.d1.loss_cls: 0.0759, decode.d1.loss_mask: 0.2001, decode.d1.loss_dice: 0.5545, decode.d2.loss_cls: 0.0659, decode.d2.loss_mask: 0.1991, decode.d2.loss_dice: 0.5533, decode.d3.loss_cls: 0.0615, decode.d3.loss_mask: 0.1993, decode.d3.loss_dice: 0.5461, decode.d4.loss_cls: 0.0607, decode.d4.loss_mask: 0.1995, decode.d4.loss_dice: 0.5501, decode.d5.loss_cls: 0.0613, decode.d5.loss_mask: 0.1987, decode.d5.loss_dice: 0.5420, decode.d6.loss_cls: 0.0612, decode.d6.loss_mask: 0.1992, decode.d6.loss_dice: 0.5467, decode.d7.loss_cls: 0.0550, decode.d7.loss_mask: 0.1991, decode.d7.loss_dice: 0.5456, decode.d8.loss_cls: 0.0538, decode.d8.loss_mask: 0.1990, decode.d8.loss_dice: 0.5409, loss: 8.3376 +2022-05-10 18:43:12,574 - mmseg - INFO - Iter [45700/80000] lr: 6.156e-07, eta: 21:34:53, time: 1.888, data_time: 0.067, memory: 69053, decode.loss_cls: 0.0629, decode.loss_mask: 0.2009, decode.loss_dice: 0.5534, decode.d0.loss_cls: 0.2904, decode.d0.loss_mask: 0.2102, decode.d0.loss_dice: 0.5793, decode.d1.loss_cls: 0.0816, decode.d1.loss_mask: 0.2018, decode.d1.loss_dice: 0.5590, decode.d2.loss_cls: 0.0689, decode.d2.loss_mask: 0.2013, decode.d2.loss_dice: 0.5568, decode.d3.loss_cls: 0.0682, decode.d3.loss_mask: 0.2014, decode.d3.loss_dice: 0.5520, decode.d4.loss_cls: 0.0625, decode.d4.loss_mask: 0.2018, decode.d4.loss_dice: 0.5518, decode.d5.loss_cls: 0.0680, decode.d5.loss_mask: 0.2015, decode.d5.loss_dice: 0.5520, decode.d6.loss_cls: 0.0611, decode.d6.loss_mask: 0.2017, decode.d6.loss_dice: 0.5522, decode.d7.loss_cls: 0.0609, decode.d7.loss_mask: 0.2016, decode.d7.loss_dice: 0.5530, decode.d8.loss_cls: 0.0610, decode.d8.loss_mask: 0.2009, decode.d8.loss_dice: 0.5498, loss: 8.4680 +2022-05-10 18:44:43,345 - mmseg - INFO - Iter [45750/80000] lr: 6.147e-07, eta: 21:31:40, time: 1.815, data_time: 0.019, memory: 69053, decode.loss_cls: 0.0754, decode.loss_mask: 0.1967, decode.loss_dice: 0.5384, decode.d0.loss_cls: 0.3137, decode.d0.loss_mask: 0.2083, decode.d0.loss_dice: 0.5640, decode.d1.loss_cls: 0.0949, decode.d1.loss_mask: 0.1979, decode.d1.loss_dice: 0.5475, decode.d2.loss_cls: 0.0822, decode.d2.loss_mask: 0.1970, decode.d2.loss_dice: 0.5426, decode.d3.loss_cls: 0.0850, decode.d3.loss_mask: 0.1970, decode.d3.loss_dice: 0.5409, decode.d4.loss_cls: 0.0827, decode.d4.loss_mask: 0.1972, decode.d4.loss_dice: 0.5381, decode.d5.loss_cls: 0.0766, decode.d5.loss_mask: 0.1975, decode.d5.loss_dice: 0.5410, decode.d6.loss_cls: 0.0717, decode.d6.loss_mask: 0.1968, decode.d6.loss_dice: 0.5347, decode.d7.loss_cls: 0.0805, decode.d7.loss_mask: 0.1968, decode.d7.loss_dice: 0.5383, decode.d8.loss_cls: 0.0795, decode.d8.loss_mask: 0.1962, decode.d8.loss_dice: 0.5332, loss: 8.4423 +2022-05-10 18:46:12,506 - mmseg - INFO - Iter [45800/80000] lr: 6.138e-07, eta: 21:28:24, time: 1.783, data_time: 0.017, memory: 69053, decode.loss_cls: 0.0648, decode.loss_mask: 0.2001, decode.loss_dice: 0.5432, decode.d0.loss_cls: 0.3038, decode.d0.loss_mask: 0.2091, decode.d0.loss_dice: 0.5681, decode.d1.loss_cls: 0.0750, decode.d1.loss_mask: 0.2020, decode.d1.loss_dice: 0.5498, decode.d2.loss_cls: 0.0641, decode.d2.loss_mask: 0.2011, decode.d2.loss_dice: 0.5501, decode.d3.loss_cls: 0.0626, decode.d3.loss_mask: 0.2007, decode.d3.loss_dice: 0.5413, decode.d4.loss_cls: 0.0630, decode.d4.loss_mask: 0.2013, decode.d4.loss_dice: 0.5461, decode.d5.loss_cls: 0.0653, decode.d5.loss_mask: 0.2011, decode.d5.loss_dice: 0.5437, decode.d6.loss_cls: 0.0587, decode.d6.loss_mask: 0.2007, decode.d6.loss_dice: 0.5422, decode.d7.loss_cls: 0.0614, decode.d7.loss_mask: 0.2007, decode.d7.loss_dice: 0.5411, decode.d8.loss_cls: 0.0658, decode.d8.loss_mask: 0.2006, decode.d8.loss_dice: 0.5416, loss: 8.3691 +2022-05-10 18:47:43,360 - mmseg - INFO - Iter [45850/80000] lr: 6.129e-07, eta: 21:25:14, time: 1.817, data_time: 0.015, memory: 69053, decode.loss_cls: 0.0608, decode.loss_mask: 0.2012, decode.loss_dice: 0.5369, decode.d0.loss_cls: 0.3066, decode.d0.loss_mask: 0.2089, decode.d0.loss_dice: 0.5601, decode.d1.loss_cls: 0.0758, decode.d1.loss_mask: 0.2021, decode.d1.loss_dice: 0.5449, decode.d2.loss_cls: 0.0728, decode.d2.loss_mask: 0.2019, decode.d2.loss_dice: 0.5419, decode.d3.loss_cls: 0.0646, decode.d3.loss_mask: 0.2020, decode.d3.loss_dice: 0.5393, decode.d4.loss_cls: 0.0653, decode.d4.loss_mask: 0.2020, decode.d4.loss_dice: 0.5375, decode.d5.loss_cls: 0.0631, decode.d5.loss_mask: 0.2018, decode.d5.loss_dice: 0.5349, decode.d6.loss_cls: 0.0678, decode.d6.loss_mask: 0.2015, decode.d6.loss_dice: 0.5320, decode.d7.loss_cls: 0.0614, decode.d7.loss_mask: 0.2014, decode.d7.loss_dice: 0.5331, decode.d8.loss_cls: 0.0633, decode.d8.loss_mask: 0.2013, decode.d8.loss_dice: 0.5335, loss: 8.3196 +2022-05-10 18:49:16,343 - mmseg - INFO - Iter [45900/80000] lr: 6.120e-07, eta: 21:22:12, time: 1.860, data_time: 0.062, memory: 69053, decode.loss_cls: 0.0570, decode.loss_mask: 0.1997, decode.loss_dice: 0.5521, decode.d0.loss_cls: 0.3013, decode.d0.loss_mask: 0.2070, decode.d0.loss_dice: 0.5753, decode.d1.loss_cls: 0.0757, decode.d1.loss_mask: 0.2017, decode.d1.loss_dice: 0.5537, decode.d2.loss_cls: 0.0694, decode.d2.loss_mask: 0.2012, decode.d2.loss_dice: 0.5560, decode.d3.loss_cls: 0.0630, decode.d3.loss_mask: 0.2004, decode.d3.loss_dice: 0.5479, decode.d4.loss_cls: 0.0610, decode.d4.loss_mask: 0.2001, decode.d4.loss_dice: 0.5523, decode.d5.loss_cls: 0.0578, decode.d5.loss_mask: 0.2002, decode.d5.loss_dice: 0.5531, decode.d6.loss_cls: 0.0622, decode.d6.loss_mask: 0.1998, decode.d6.loss_dice: 0.5485, decode.d7.loss_cls: 0.0591, decode.d7.loss_mask: 0.1999, decode.d7.loss_dice: 0.5491, decode.d8.loss_cls: 0.0639, decode.d8.loss_mask: 0.1994, decode.d8.loss_dice: 0.5488, loss: 8.4164 +2022-05-10 18:50:48,361 - mmseg - INFO - Iter [45950/80000] lr: 6.111e-07, eta: 21:19:08, time: 1.841, data_time: 0.018, memory: 69053, decode.loss_cls: 0.0637, decode.loss_mask: 0.1990, decode.loss_dice: 0.5427, decode.d0.loss_cls: 0.3048, decode.d0.loss_mask: 0.2076, decode.d0.loss_dice: 0.5699, decode.d1.loss_cls: 0.0811, decode.d1.loss_mask: 0.2001, decode.d1.loss_dice: 0.5539, decode.d2.loss_cls: 0.0694, decode.d2.loss_mask: 0.1995, decode.d2.loss_dice: 0.5508, decode.d3.loss_cls: 0.0680, decode.d3.loss_mask: 0.1993, decode.d3.loss_dice: 0.5455, decode.d4.loss_cls: 0.0701, decode.d4.loss_mask: 0.1995, decode.d4.loss_dice: 0.5462, decode.d5.loss_cls: 0.0724, decode.d5.loss_mask: 0.1993, decode.d5.loss_dice: 0.5460, decode.d6.loss_cls: 0.0658, decode.d6.loss_mask: 0.1991, decode.d6.loss_dice: 0.5424, decode.d7.loss_cls: 0.0627, decode.d7.loss_mask: 0.1994, decode.d7.loss_dice: 0.5439, decode.d8.loss_cls: 0.0685, decode.d8.loss_mask: 0.1990, decode.d8.loss_dice: 0.5444, loss: 8.4139 +2022-05-10 18:52:18,741 - mmseg - INFO - Saving checkpoint at 46000 iterations +2022-05-10 18:52:50,152 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 18:52:50,171 - mmseg - INFO - Iter [46000/80000] lr: 6.102e-07, eta: 21:17:46, time: 2.432, data_time: 0.016, memory: 69053, decode.loss_cls: 0.0744, decode.loss_mask: 0.2033, decode.loss_dice: 0.5591, decode.d0.loss_cls: 0.3078, decode.d0.loss_mask: 0.2108, decode.d0.loss_dice: 0.5878, decode.d1.loss_cls: 0.1022, decode.d1.loss_mask: 0.2042, decode.d1.loss_dice: 0.5696, decode.d2.loss_cls: 0.0903, decode.d2.loss_mask: 0.2037, decode.d2.loss_dice: 0.5662, decode.d3.loss_cls: 0.0886, decode.d3.loss_mask: 0.2038, decode.d3.loss_dice: 0.5605, decode.d4.loss_cls: 0.0827, decode.d4.loss_mask: 0.2033, decode.d4.loss_dice: 0.5644, decode.d5.loss_cls: 0.0854, decode.d5.loss_mask: 0.2031, decode.d5.loss_dice: 0.5642, decode.d6.loss_cls: 0.0804, decode.d6.loss_mask: 0.2029, decode.d6.loss_dice: 0.5618, decode.d7.loss_cls: 0.0796, decode.d7.loss_mask: 0.2022, decode.d7.loss_dice: 0.5600, decode.d8.loss_cls: 0.0751, decode.d8.loss_mask: 0.2034, decode.d8.loss_dice: 0.5580, loss: 8.7588 +2022-05-10 18:54:46,006 - mmseg - INFO - per class results: +2022-05-10 18:54:46,019 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.51 | 99.18 | +| sidewalk | 88.59 | 93.6 | +| building | 94.37 | 97.05 | +| wall | 68.54 | 79.14 | +| fence | 73.55 | 81.18 | +| pole | 71.31 | 83.85 | +| traffic light | 77.23 | 87.5 | +| traffic sign | 83.84 | 90.13 | +| vegetation | 93.31 | 96.93 | +| terrain | 68.27 | 76.66 | +| sky | 95.73 | 98.63 | +| person | 86.8 | 93.7 | +| rider | 74.72 | 85.51 | +| car | 96.11 | 98.36 | +| truck | 81.24 | 95.0 | +| bus | 93.79 | 97.0 | +| train | 87.27 | 91.19 | +| motorcycle | 78.42 | 88.75 | +| bicycle | 82.97 | 91.67 | ++---------------+-------+-------+ +2022-05-10 18:54:46,020 - mmseg - INFO - Summary: +2022-05-10 18:54:46,020 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 96.97 | 83.92 | 90.79 | ++-------+-------+-------+ +2022-05-10 18:54:46,023 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 18:54:46,023 - mmseg - INFO - Iter(val) [32] aAcc: 0.9697, mIoU: 0.8392, mAcc: 0.9079, IoU.road: 0.9851, IoU.sidewalk: 0.8859, IoU.building: 0.9437, IoU.wall: 0.6854, IoU.fence: 0.7355, IoU.pole: 0.7131, IoU.traffic light: 0.7723, IoU.traffic sign: 0.8384, IoU.vegetation: 0.9331, IoU.terrain: 0.6827, IoU.sky: 0.9573, IoU.person: 0.8680, IoU.rider: 0.7472, IoU.car: 0.9611, IoU.truck: 0.8124, IoU.bus: 0.9379, IoU.train: 0.8727, IoU.motorcycle: 0.7842, IoU.bicycle: 0.8297, Acc.road: 0.9918, Acc.sidewalk: 0.9360, Acc.building: 0.9705, Acc.wall: 0.7914, Acc.fence: 0.8118, Acc.pole: 0.8385, Acc.traffic light: 0.8750, Acc.traffic sign: 0.9013, Acc.vegetation: 0.9693, Acc.terrain: 0.7666, Acc.sky: 0.9863, Acc.person: 0.9370, Acc.rider: 0.8551, Acc.car: 0.9836, Acc.truck: 0.9500, Acc.bus: 0.9700, Acc.train: 0.9119, Acc.motorcycle: 0.8875, Acc.bicycle: 0.9167 +2022-05-10 18:56:19,693 - mmseg - INFO - Iter [46050/80000] lr: 6.093e-07, eta: 21:21:21, time: 4.194, data_time: 2.392, memory: 69053, decode.loss_cls: 0.0676, decode.loss_mask: 0.2032, decode.loss_dice: 0.5497, decode.d0.loss_cls: 0.3058, decode.d0.loss_mask: 0.2132, decode.d0.loss_dice: 0.5791, decode.d1.loss_cls: 0.0920, decode.d1.loss_mask: 0.2051, decode.d1.loss_dice: 0.5640, decode.d2.loss_cls: 0.0721, decode.d2.loss_mask: 0.2046, decode.d2.loss_dice: 0.5571, decode.d3.loss_cls: 0.0706, decode.d3.loss_mask: 0.2037, decode.d3.loss_dice: 0.5545, decode.d4.loss_cls: 0.0657, decode.d4.loss_mask: 0.2031, decode.d4.loss_dice: 0.5544, decode.d5.loss_cls: 0.0640, decode.d5.loss_mask: 0.2037, decode.d5.loss_dice: 0.5553, decode.d6.loss_cls: 0.0648, decode.d6.loss_mask: 0.2038, decode.d6.loss_dice: 0.5503, decode.d7.loss_cls: 0.0656, decode.d7.loss_mask: 0.2037, decode.d7.loss_dice: 0.5489, decode.d8.loss_cls: 0.0641, decode.d8.loss_mask: 0.2033, decode.d8.loss_dice: 0.5504, loss: 8.5437 +2022-05-10 18:57:49,484 - mmseg - INFO - Iter [46100/80000] lr: 6.084e-07, eta: 21:18:09, time: 1.796, data_time: 0.015, memory: 69053, decode.loss_cls: 0.0676, decode.loss_mask: 0.1983, decode.loss_dice: 0.5488, decode.d0.loss_cls: 0.3159, decode.d0.loss_mask: 0.2074, decode.d0.loss_dice: 0.5797, decode.d1.loss_cls: 0.0827, decode.d1.loss_mask: 0.1999, decode.d1.loss_dice: 0.5626, decode.d2.loss_cls: 0.0760, decode.d2.loss_mask: 0.1996, decode.d2.loss_dice: 0.5554, decode.d3.loss_cls: 0.0735, decode.d3.loss_mask: 0.1986, decode.d3.loss_dice: 0.5518, decode.d4.loss_cls: 0.0712, decode.d4.loss_mask: 0.1989, decode.d4.loss_dice: 0.5527, decode.d5.loss_cls: 0.0716, decode.d5.loss_mask: 0.1991, decode.d5.loss_dice: 0.5517, decode.d6.loss_cls: 0.0663, decode.d6.loss_mask: 0.1991, decode.d6.loss_dice: 0.5493, decode.d7.loss_cls: 0.0720, decode.d7.loss_mask: 0.1984, decode.d7.loss_dice: 0.5485, decode.d8.loss_cls: 0.0660, decode.d8.loss_mask: 0.1985, decode.d8.loss_dice: 0.5494, loss: 8.5105 +2022-05-10 18:59:19,546 - mmseg - INFO - Iter [46150/80000] lr: 6.075e-07, eta: 21:14:59, time: 1.801, data_time: 0.015, memory: 69053, decode.loss_cls: 0.0651, decode.loss_mask: 0.2045, decode.loss_dice: 0.5458, decode.d0.loss_cls: 0.3109, decode.d0.loss_mask: 0.2126, decode.d0.loss_dice: 0.5718, decode.d1.loss_cls: 0.0807, decode.d1.loss_mask: 0.2055, decode.d1.loss_dice: 0.5562, decode.d2.loss_cls: 0.0817, decode.d2.loss_mask: 0.2055, decode.d2.loss_dice: 0.5539, decode.d3.loss_cls: 0.0651, decode.d3.loss_mask: 0.2048, decode.d3.loss_dice: 0.5502, decode.d4.loss_cls: 0.0763, decode.d4.loss_mask: 0.2052, decode.d4.loss_dice: 0.5499, decode.d5.loss_cls: 0.0729, decode.d5.loss_mask: 0.2050, decode.d5.loss_dice: 0.5522, decode.d6.loss_cls: 0.0674, decode.d6.loss_mask: 0.2049, decode.d6.loss_dice: 0.5502, decode.d7.loss_cls: 0.0713, decode.d7.loss_mask: 0.2048, decode.d7.loss_dice: 0.5489, decode.d8.loss_cls: 0.0633, decode.d8.loss_mask: 0.2049, decode.d8.loss_dice: 0.5503, loss: 8.5418 +2022-05-10 19:00:49,954 - mmseg - INFO - Iter [46200/80000] lr: 6.066e-07, eta: 21:11:51, time: 1.808, data_time: 0.019, memory: 69053, decode.loss_cls: 0.0667, decode.loss_mask: 0.1969, decode.loss_dice: 0.5585, decode.d0.loss_cls: 0.2976, decode.d0.loss_mask: 0.2048, decode.d0.loss_dice: 0.5856, decode.d1.loss_cls: 0.0856, decode.d1.loss_mask: 0.1992, decode.d1.loss_dice: 0.5685, decode.d2.loss_cls: 0.0750, decode.d2.loss_mask: 0.1989, decode.d2.loss_dice: 0.5657, decode.d3.loss_cls: 0.0684, decode.d3.loss_mask: 0.1979, decode.d3.loss_dice: 0.5587, decode.d4.loss_cls: 0.0719, decode.d4.loss_mask: 0.1982, decode.d4.loss_dice: 0.5539, decode.d5.loss_cls: 0.0690, decode.d5.loss_mask: 0.1977, decode.d5.loss_dice: 0.5583, decode.d6.loss_cls: 0.0694, decode.d6.loss_mask: 0.1974, decode.d6.loss_dice: 0.5551, decode.d7.loss_cls: 0.0699, decode.d7.loss_mask: 0.1978, decode.d7.loss_dice: 0.5578, decode.d8.loss_cls: 0.0670, decode.d8.loss_mask: 0.1970, decode.d8.loss_dice: 0.5575, loss: 8.5458 +2022-05-10 19:02:23,590 - mmseg - INFO - Iter [46250/80000] lr: 6.057e-07, eta: 21:08:55, time: 1.873, data_time: 0.064, memory: 69053, decode.loss_cls: 0.0724, decode.loss_mask: 0.1981, decode.loss_dice: 0.5364, decode.d0.loss_cls: 0.3115, decode.d0.loss_mask: 0.2080, decode.d0.loss_dice: 0.5669, decode.d1.loss_cls: 0.0925, decode.d1.loss_mask: 0.1999, decode.d1.loss_dice: 0.5449, decode.d2.loss_cls: 0.0850, decode.d2.loss_mask: 0.1991, decode.d2.loss_dice: 0.5402, decode.d3.loss_cls: 0.0837, decode.d3.loss_mask: 0.1988, decode.d3.loss_dice: 0.5386, decode.d4.loss_cls: 0.0796, decode.d4.loss_mask: 0.1987, decode.d4.loss_dice: 0.5384, decode.d5.loss_cls: 0.0702, decode.d5.loss_mask: 0.1982, decode.d5.loss_dice: 0.5432, decode.d6.loss_cls: 0.0704, decode.d6.loss_mask: 0.1984, decode.d6.loss_dice: 0.5387, decode.d7.loss_cls: 0.0729, decode.d7.loss_mask: 0.1985, decode.d7.loss_dice: 0.5359, decode.d8.loss_cls: 0.0725, decode.d8.loss_mask: 0.1987, decode.d8.loss_dice: 0.5396, loss: 8.4295 +2022-05-10 19:03:55,013 - mmseg - INFO - Iter [46300/80000] lr: 6.048e-07, eta: 21:05:52, time: 1.828, data_time: 0.017, memory: 69053, decode.loss_cls: 0.0605, decode.loss_mask: 0.1969, decode.loss_dice: 0.5395, decode.d0.loss_cls: 0.3012, decode.d0.loss_mask: 0.2045, decode.d0.loss_dice: 0.5647, decode.d1.loss_cls: 0.0802, decode.d1.loss_mask: 0.1993, decode.d1.loss_dice: 0.5513, decode.d2.loss_cls: 0.0742, decode.d2.loss_mask: 0.1981, decode.d2.loss_dice: 0.5462, decode.d3.loss_cls: 0.0675, decode.d3.loss_mask: 0.1976, decode.d3.loss_dice: 0.5428, decode.d4.loss_cls: 0.0714, decode.d4.loss_mask: 0.1974, decode.d4.loss_dice: 0.5397, decode.d5.loss_cls: 0.0643, decode.d5.loss_mask: 0.1975, decode.d5.loss_dice: 0.5420, decode.d6.loss_cls: 0.0637, decode.d6.loss_mask: 0.1977, decode.d6.loss_dice: 0.5392, decode.d7.loss_cls: 0.0649, decode.d7.loss_mask: 0.1978, decode.d7.loss_dice: 0.5414, decode.d8.loss_cls: 0.0707, decode.d8.loss_mask: 0.1969, decode.d8.loss_dice: 0.5390, loss: 8.3483 +2022-05-10 19:05:26,528 - mmseg - INFO - Iter [46350/80000] lr: 6.039e-07, eta: 21:02:50, time: 1.830, data_time: 0.018, memory: 69053, decode.loss_cls: 0.0661, decode.loss_mask: 0.1975, decode.loss_dice: 0.5515, decode.d0.loss_cls: 0.3132, decode.d0.loss_mask: 0.2061, decode.d0.loss_dice: 0.5831, decode.d1.loss_cls: 0.0915, decode.d1.loss_mask: 0.1987, decode.d1.loss_dice: 0.5586, decode.d2.loss_cls: 0.0820, decode.d2.loss_mask: 0.1985, decode.d2.loss_dice: 0.5540, decode.d3.loss_cls: 0.0725, decode.d3.loss_mask: 0.1980, decode.d3.loss_dice: 0.5523, decode.d4.loss_cls: 0.0708, decode.d4.loss_mask: 0.1985, decode.d4.loss_dice: 0.5502, decode.d5.loss_cls: 0.0777, decode.d5.loss_mask: 0.1973, decode.d5.loss_dice: 0.5496, decode.d6.loss_cls: 0.0699, decode.d6.loss_mask: 0.1972, decode.d6.loss_dice: 0.5495, decode.d7.loss_cls: 0.0728, decode.d7.loss_mask: 0.1973, decode.d7.loss_dice: 0.5502, decode.d8.loss_cls: 0.0704, decode.d8.loss_mask: 0.1976, decode.d8.loss_dice: 0.5498, loss: 8.5223 +2022-05-10 19:06:56,236 - mmseg - INFO - Iter [46400/80000] lr: 6.031e-07, eta: 20:59:44, time: 1.794, data_time: 0.017, memory: 69053, decode.loss_cls: 0.0638, decode.loss_mask: 0.1903, decode.loss_dice: 0.5395, decode.d0.loss_cls: 0.3024, decode.d0.loss_mask: 0.1963, decode.d0.loss_dice: 0.5664, decode.d1.loss_cls: 0.0885, decode.d1.loss_mask: 0.1921, decode.d1.loss_dice: 0.5466, decode.d2.loss_cls: 0.0792, decode.d2.loss_mask: 0.1914, decode.d2.loss_dice: 0.5464, decode.d3.loss_cls: 0.0765, decode.d3.loss_mask: 0.1908, decode.d3.loss_dice: 0.5420, decode.d4.loss_cls: 0.0640, decode.d4.loss_mask: 0.1913, decode.d4.loss_dice: 0.5446, decode.d5.loss_cls: 0.0757, decode.d5.loss_mask: 0.1906, decode.d5.loss_dice: 0.5421, decode.d6.loss_cls: 0.0651, decode.d6.loss_mask: 0.1911, decode.d6.loss_dice: 0.5491, decode.d7.loss_cls: 0.0645, decode.d7.loss_mask: 0.1903, decode.d7.loss_dice: 0.5412, decode.d8.loss_cls: 0.0676, decode.d8.loss_mask: 0.1905, decode.d8.loss_dice: 0.5398, loss: 8.3196 +2022-05-10 19:08:30,400 - mmseg - INFO - Iter [46450/80000] lr: 6.022e-07, eta: 20:56:53, time: 1.883, data_time: 0.065, memory: 69053, decode.loss_cls: 0.0643, decode.loss_mask: 0.2035, decode.loss_dice: 0.5313, decode.d0.loss_cls: 0.2967, decode.d0.loss_mask: 0.2133, decode.d0.loss_dice: 0.5594, decode.d1.loss_cls: 0.0801, decode.d1.loss_mask: 0.2046, decode.d1.loss_dice: 0.5434, decode.d2.loss_cls: 0.0693, decode.d2.loss_mask: 0.2039, decode.d2.loss_dice: 0.5396, decode.d3.loss_cls: 0.0691, decode.d3.loss_mask: 0.2040, decode.d3.loss_dice: 0.5359, decode.d4.loss_cls: 0.0622, decode.d4.loss_mask: 0.2037, decode.d4.loss_dice: 0.5328, decode.d5.loss_cls: 0.0694, decode.d5.loss_mask: 0.2047, decode.d5.loss_dice: 0.5379, decode.d6.loss_cls: 0.0600, decode.d6.loss_mask: 0.2043, decode.d6.loss_dice: 0.5367, decode.d7.loss_cls: 0.0637, decode.d7.loss_mask: 0.2042, decode.d7.loss_dice: 0.5371, decode.d8.loss_cls: 0.0678, decode.d8.loss_mask: 0.2040, decode.d8.loss_dice: 0.5331, loss: 8.3402 +2022-05-10 19:10:00,392 - mmseg - INFO - Iter [46500/80000] lr: 6.013e-07, eta: 20:53:49, time: 1.800, data_time: 0.018, memory: 69053, decode.loss_cls: 0.0670, decode.loss_mask: 0.1974, decode.loss_dice: 0.5296, decode.d0.loss_cls: 0.3038, decode.d0.loss_mask: 0.2054, decode.d0.loss_dice: 0.5590, decode.d1.loss_cls: 0.0995, decode.d1.loss_mask: 0.1990, decode.d1.loss_dice: 0.5426, decode.d2.loss_cls: 0.0839, decode.d2.loss_mask: 0.1987, decode.d2.loss_dice: 0.5434, decode.d3.loss_cls: 0.0708, decode.d3.loss_mask: 0.1983, decode.d3.loss_dice: 0.5316, decode.d4.loss_cls: 0.0707, decode.d4.loss_mask: 0.1980, decode.d4.loss_dice: 0.5316, decode.d5.loss_cls: 0.0680, decode.d5.loss_mask: 0.1973, decode.d5.loss_dice: 0.5331, decode.d6.loss_cls: 0.0688, decode.d6.loss_mask: 0.1974, decode.d6.loss_dice: 0.5320, decode.d7.loss_cls: 0.0723, decode.d7.loss_mask: 0.1977, decode.d7.loss_dice: 0.5327, decode.d8.loss_cls: 0.0666, decode.d8.loss_mask: 0.1976, decode.d8.loss_dice: 0.5309, loss: 8.3249 +2022-05-10 19:11:31,186 - mmseg - INFO - Iter [46550/80000] lr: 6.004e-07, eta: 20:50:48, time: 1.815, data_time: 0.018, memory: 69053, decode.loss_cls: 0.0649, decode.loss_mask: 0.2013, decode.loss_dice: 0.5464, decode.d0.loss_cls: 0.3127, decode.d0.loss_mask: 0.2107, decode.d0.loss_dice: 0.5701, decode.d1.loss_cls: 0.0920, decode.d1.loss_mask: 0.2039, decode.d1.loss_dice: 0.5520, decode.d2.loss_cls: 0.0832, decode.d2.loss_mask: 0.2025, decode.d2.loss_dice: 0.5496, decode.d3.loss_cls: 0.0748, decode.d3.loss_mask: 0.2018, decode.d3.loss_dice: 0.5438, decode.d4.loss_cls: 0.0766, decode.d4.loss_mask: 0.2015, decode.d4.loss_dice: 0.5459, decode.d5.loss_cls: 0.0740, decode.d5.loss_mask: 0.2019, decode.d5.loss_dice: 0.5461, decode.d6.loss_cls: 0.0710, decode.d6.loss_mask: 0.2009, decode.d6.loss_dice: 0.5427, decode.d7.loss_cls: 0.0626, decode.d7.loss_mask: 0.2017, decode.d7.loss_dice: 0.5457, decode.d8.loss_cls: 0.0673, decode.d8.loss_mask: 0.2016, decode.d8.loss_dice: 0.5463, loss: 8.4953 +2022-05-10 19:13:01,986 - mmseg - INFO - Iter [46600/80000] lr: 5.995e-07, eta: 20:47:48, time: 1.814, data_time: 0.016, memory: 69053, decode.loss_cls: 0.0578, decode.loss_mask: 0.2010, decode.loss_dice: 0.5484, decode.d0.loss_cls: 0.2956, decode.d0.loss_mask: 0.2095, decode.d0.loss_dice: 0.5693, decode.d1.loss_cls: 0.0789, decode.d1.loss_mask: 0.2021, decode.d1.loss_dice: 0.5557, decode.d2.loss_cls: 0.0671, decode.d2.loss_mask: 0.2010, decode.d2.loss_dice: 0.5513, decode.d3.loss_cls: 0.0635, decode.d3.loss_mask: 0.2011, decode.d3.loss_dice: 0.5438, decode.d4.loss_cls: 0.0677, decode.d4.loss_mask: 0.2015, decode.d4.loss_dice: 0.5473, decode.d5.loss_cls: 0.0620, decode.d5.loss_mask: 0.2012, decode.d5.loss_dice: 0.5479, decode.d6.loss_cls: 0.0704, decode.d6.loss_mask: 0.2015, decode.d6.loss_dice: 0.5466, decode.d7.loss_cls: 0.0625, decode.d7.loss_mask: 0.2017, decode.d7.loss_dice: 0.5479, decode.d8.loss_cls: 0.0606, decode.d8.loss_mask: 0.2010, decode.d8.loss_dice: 0.5450, loss: 8.4110 +2022-05-10 19:14:35,370 - mmseg - INFO - Iter [46650/80000] lr: 5.986e-07, eta: 20:44:58, time: 1.870, data_time: 0.066, memory: 69053, decode.loss_cls: 0.0743, decode.loss_mask: 0.2022, decode.loss_dice: 0.5508, decode.d0.loss_cls: 0.3099, decode.d0.loss_mask: 0.2109, decode.d0.loss_dice: 0.5782, decode.d1.loss_cls: 0.0906, decode.d1.loss_mask: 0.2047, decode.d1.loss_dice: 0.5666, decode.d2.loss_cls: 0.0829, decode.d2.loss_mask: 0.2025, decode.d2.loss_dice: 0.5548, decode.d3.loss_cls: 0.0793, decode.d3.loss_mask: 0.2032, decode.d3.loss_dice: 0.5585, decode.d4.loss_cls: 0.0784, decode.d4.loss_mask: 0.2033, decode.d4.loss_dice: 0.5504, decode.d5.loss_cls: 0.0813, decode.d5.loss_mask: 0.2029, decode.d5.loss_dice: 0.5553, decode.d6.loss_cls: 0.0722, decode.d6.loss_mask: 0.2032, decode.d6.loss_dice: 0.5513, decode.d7.loss_cls: 0.0788, decode.d7.loss_mask: 0.2022, decode.d7.loss_dice: 0.5500, decode.d8.loss_cls: 0.0727, decode.d8.loss_mask: 0.2026, decode.d8.loss_dice: 0.5494, loss: 8.6234 +2022-05-10 19:16:06,143 - mmseg - INFO - Iter [46700/80000] lr: 5.977e-07, eta: 20:42:00, time: 1.815, data_time: 0.015, memory: 69053, decode.loss_cls: 0.0693, decode.loss_mask: 0.2014, decode.loss_dice: 0.5584, decode.d0.loss_cls: 0.3133, decode.d0.loss_mask: 0.2090, decode.d0.loss_dice: 0.5872, decode.d1.loss_cls: 0.0929, decode.d1.loss_mask: 0.2028, decode.d1.loss_dice: 0.5692, decode.d2.loss_cls: 0.0768, decode.d2.loss_mask: 0.2021, decode.d2.loss_dice: 0.5666, decode.d3.loss_cls: 0.0766, decode.d3.loss_mask: 0.2017, decode.d3.loss_dice: 0.5628, decode.d4.loss_cls: 0.0772, decode.d4.loss_mask: 0.2016, decode.d4.loss_dice: 0.5613, decode.d5.loss_cls: 0.0747, decode.d5.loss_mask: 0.2020, decode.d5.loss_dice: 0.5603, decode.d6.loss_cls: 0.0752, decode.d6.loss_mask: 0.2016, decode.d6.loss_dice: 0.5599, decode.d7.loss_cls: 0.0710, decode.d7.loss_mask: 0.2013, decode.d7.loss_dice: 0.5596, decode.d8.loss_cls: 0.0708, decode.d8.loss_mask: 0.2011, decode.d8.loss_dice: 0.5604, loss: 8.6682 +2022-05-10 19:17:36,968 - mmseg - INFO - Iter [46750/80000] lr: 5.968e-07, eta: 20:39:03, time: 1.816, data_time: 0.016, memory: 69053, decode.loss_cls: 0.0641, decode.loss_mask: 0.2005, decode.loss_dice: 0.5451, decode.d0.loss_cls: 0.2977, decode.d0.loss_mask: 0.2101, decode.d0.loss_dice: 0.5764, decode.d1.loss_cls: 0.0828, decode.d1.loss_mask: 0.2029, decode.d1.loss_dice: 0.5562, decode.d2.loss_cls: 0.0713, decode.d2.loss_mask: 0.2013, decode.d2.loss_dice: 0.5514, decode.d3.loss_cls: 0.0726, decode.d3.loss_mask: 0.2007, decode.d3.loss_dice: 0.5488, decode.d4.loss_cls: 0.0704, decode.d4.loss_mask: 0.2005, decode.d4.loss_dice: 0.5473, decode.d5.loss_cls: 0.0654, decode.d5.loss_mask: 0.2011, decode.d5.loss_dice: 0.5507, decode.d6.loss_cls: 0.0710, decode.d6.loss_mask: 0.2005, decode.d6.loss_dice: 0.5461, decode.d7.loss_cls: 0.0652, decode.d7.loss_mask: 0.2000, decode.d7.loss_dice: 0.5438, decode.d8.loss_cls: 0.0648, decode.d8.loss_mask: 0.2000, decode.d8.loss_dice: 0.5451, loss: 8.4541 +2022-05-10 19:19:10,084 - mmseg - INFO - Iter [46800/80000] lr: 5.959e-07, eta: 20:36:14, time: 1.863, data_time: 0.064, memory: 69053, decode.loss_cls: 0.0623, decode.loss_mask: 0.1946, decode.loss_dice: 0.5561, decode.d0.loss_cls: 0.2902, decode.d0.loss_mask: 0.2022, decode.d0.loss_dice: 0.5798, decode.d1.loss_cls: 0.0720, decode.d1.loss_mask: 0.1960, decode.d1.loss_dice: 0.5643, decode.d2.loss_cls: 0.0695, decode.d2.loss_mask: 0.1940, decode.d2.loss_dice: 0.5582, decode.d3.loss_cls: 0.0658, decode.d3.loss_mask: 0.1940, decode.d3.loss_dice: 0.5537, decode.d4.loss_cls: 0.0682, decode.d4.loss_mask: 0.1938, decode.d4.loss_dice: 0.5545, decode.d5.loss_cls: 0.0640, decode.d5.loss_mask: 0.1938, decode.d5.loss_dice: 0.5521, decode.d6.loss_cls: 0.0638, decode.d6.loss_mask: 0.1933, decode.d6.loss_dice: 0.5525, decode.d7.loss_cls: 0.0648, decode.d7.loss_mask: 0.1937, decode.d7.loss_dice: 0.5538, decode.d8.loss_cls: 0.0670, decode.d8.loss_mask: 0.1938, decode.d8.loss_dice: 0.5541, loss: 8.4159 +2022-05-10 19:20:41,085 - mmseg - INFO - Iter [46850/80000] lr: 5.950e-07, eta: 20:33:19, time: 1.820, data_time: 0.017, memory: 69053, decode.loss_cls: 0.0653, decode.loss_mask: 0.1937, decode.loss_dice: 0.5369, decode.d0.loss_cls: 0.2969, decode.d0.loss_mask: 0.2011, decode.d0.loss_dice: 0.5623, decode.d1.loss_cls: 0.0833, decode.d1.loss_mask: 0.1945, decode.d1.loss_dice: 0.5458, decode.d2.loss_cls: 0.0736, decode.d2.loss_mask: 0.1938, decode.d2.loss_dice: 0.5427, decode.d3.loss_cls: 0.0696, decode.d3.loss_mask: 0.1941, decode.d3.loss_dice: 0.5391, decode.d4.loss_cls: 0.0661, decode.d4.loss_mask: 0.1937, decode.d4.loss_dice: 0.5353, decode.d5.loss_cls: 0.0693, decode.d5.loss_mask: 0.1941, decode.d5.loss_dice: 0.5390, decode.d6.loss_cls: 0.0739, decode.d6.loss_mask: 0.1938, decode.d6.loss_dice: 0.5329, decode.d7.loss_cls: 0.0688, decode.d7.loss_mask: 0.1935, decode.d7.loss_dice: 0.5346, decode.d8.loss_cls: 0.0666, decode.d8.loss_mask: 0.1942, decode.d8.loss_dice: 0.5394, loss: 8.2878 +2022-05-10 19:22:11,902 - mmseg - INFO - Iter [46900/80000] lr: 5.941e-07, eta: 20:30:24, time: 1.816, data_time: 0.018, memory: 69053, decode.loss_cls: 0.0635, decode.loss_mask: 0.1950, decode.loss_dice: 0.5369, decode.d0.loss_cls: 0.3050, decode.d0.loss_mask: 0.2029, decode.d0.loss_dice: 0.5619, decode.d1.loss_cls: 0.0823, decode.d1.loss_mask: 0.1964, decode.d1.loss_dice: 0.5487, decode.d2.loss_cls: 0.0688, decode.d2.loss_mask: 0.1960, decode.d2.loss_dice: 0.5408, decode.d3.loss_cls: 0.0628, decode.d3.loss_mask: 0.1954, decode.d3.loss_dice: 0.5371, decode.d4.loss_cls: 0.0624, decode.d4.loss_mask: 0.1947, decode.d4.loss_dice: 0.5389, decode.d5.loss_cls: 0.0687, decode.d5.loss_mask: 0.1949, decode.d5.loss_dice: 0.5378, decode.d6.loss_cls: 0.0625, decode.d6.loss_mask: 0.1947, decode.d6.loss_dice: 0.5358, decode.d7.loss_cls: 0.0641, decode.d7.loss_mask: 0.1947, decode.d7.loss_dice: 0.5373, decode.d8.loss_cls: 0.0666, decode.d8.loss_mask: 0.1950, decode.d8.loss_dice: 0.5429, loss: 8.2845 +2022-05-10 19:23:43,504 - mmseg - INFO - Iter [46950/80000] lr: 5.932e-07, eta: 20:27:32, time: 1.831, data_time: 0.016, memory: 69053, decode.loss_cls: 0.0636, decode.loss_mask: 0.1953, decode.loss_dice: 0.5437, decode.d0.loss_cls: 0.3066, decode.d0.loss_mask: 0.2027, decode.d0.loss_dice: 0.5701, decode.d1.loss_cls: 0.0819, decode.d1.loss_mask: 0.1965, decode.d1.loss_dice: 0.5465, decode.d2.loss_cls: 0.0725, decode.d2.loss_mask: 0.1954, decode.d2.loss_dice: 0.5439, decode.d3.loss_cls: 0.0700, decode.d3.loss_mask: 0.1950, decode.d3.loss_dice: 0.5426, decode.d4.loss_cls: 0.0652, decode.d4.loss_mask: 0.1951, decode.d4.loss_dice: 0.5391, decode.d5.loss_cls: 0.0663, decode.d5.loss_mask: 0.1951, decode.d5.loss_dice: 0.5409, decode.d6.loss_cls: 0.0582, decode.d6.loss_mask: 0.1951, decode.d6.loss_dice: 0.5414, decode.d7.loss_cls: 0.0665, decode.d7.loss_mask: 0.1951, decode.d7.loss_dice: 0.5380, decode.d8.loss_cls: 0.0589, decode.d8.loss_mask: 0.1953, decode.d8.loss_dice: 0.5379, loss: 8.3145 +2022-05-10 19:25:15,519 - mmseg - INFO - Saving checkpoint at 47000 iterations +2022-05-10 19:25:49,260 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 19:25:49,269 - mmseg - INFO - Iter [47000/80000] lr: 5.923e-07, eta: 20:26:23, time: 2.513, data_time: 0.065, memory: 69053, decode.loss_cls: 0.0586, decode.loss_mask: 0.1969, decode.loss_dice: 0.5405, decode.d0.loss_cls: 0.3122, decode.d0.loss_mask: 0.2059, decode.d0.loss_dice: 0.5619, decode.d1.loss_cls: 0.0857, decode.d1.loss_mask: 0.1992, decode.d1.loss_dice: 0.5520, decode.d2.loss_cls: 0.0758, decode.d2.loss_mask: 0.1979, decode.d2.loss_dice: 0.5453, decode.d3.loss_cls: 0.0696, decode.d3.loss_mask: 0.1972, decode.d3.loss_dice: 0.5395, decode.d4.loss_cls: 0.0654, decode.d4.loss_mask: 0.1974, decode.d4.loss_dice: 0.5416, decode.d5.loss_cls: 0.0692, decode.d5.loss_mask: 0.1968, decode.d5.loss_dice: 0.5370, decode.d6.loss_cls: 0.0606, decode.d6.loss_mask: 0.1966, decode.d6.loss_dice: 0.5400, decode.d7.loss_cls: 0.0660, decode.d7.loss_mask: 0.1967, decode.d7.loss_dice: 0.5426, decode.d8.loss_cls: 0.0615, decode.d8.loss_mask: 0.1970, decode.d8.loss_dice: 0.5412, loss: 8.3476 +2022-05-10 19:27:45,161 - mmseg - INFO - per class results: +2022-05-10 19:27:45,169 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.55 | 99.29 | +| sidewalk | 88.39 | 93.71 | +| building | 94.38 | 96.96 | +| wall | 68.56 | 79.21 | +| fence | 77.95 | 87.32 | +| pole | 71.67 | 83.31 | +| traffic light | 77.32 | 87.22 | +| traffic sign | 84.09 | 90.26 | +| vegetation | 93.38 | 96.99 | +| terrain | 68.03 | 76.51 | +| sky | 95.86 | 98.4 | +| person | 86.82 | 93.63 | +| rider | 74.35 | 84.49 | +| car | 96.24 | 98.13 | +| truck | 91.88 | 94.6 | +| bus | 93.72 | 97.05 | +| train | 87.92 | 91.47 | +| motorcycle | 78.34 | 88.28 | +| bicycle | 82.86 | 92.49 | ++---------------+-------+-------+ +2022-05-10 19:27:45,169 - mmseg - INFO - Summary: +2022-05-10 19:27:45,170 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 97.02 | 84.75 | 91.02 | ++-------+-------+-------+ +2022-05-10 19:27:45,172 - mmseg - INFO - The previous best checkpoint /mnt/lustre/chenzhe.vendor/workspace/ViT-Adapter/segmentation/work_dirs/mask2former_beit_adapter_large_896_80k_cityscapes_ss/best_mIoU_iter_37000.pth was removed +2022-05-10 19:28:16,279 - mmseg - INFO - Now best checkpoint is saved as best_mIoU_iter_47000.pth. +2022-05-10 19:28:16,300 - mmseg - INFO - Best mIoU is 0.8475 at 47000 iter. +2022-05-10 19:28:16,313 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 19:28:16,314 - mmseg - INFO - Iter(val) [32] aAcc: 0.9702, mIoU: 0.8475, mAcc: 0.9102, IoU.road: 0.9855, IoU.sidewalk: 0.8839, IoU.building: 0.9438, IoU.wall: 0.6856, IoU.fence: 0.7795, IoU.pole: 0.7167, IoU.traffic light: 0.7732, IoU.traffic sign: 0.8409, IoU.vegetation: 0.9338, IoU.terrain: 0.6803, IoU.sky: 0.9586, IoU.person: 0.8682, IoU.rider: 0.7435, IoU.car: 0.9624, IoU.truck: 0.9188, IoU.bus: 0.9372, IoU.train: 0.8792, IoU.motorcycle: 0.7834, IoU.bicycle: 0.8286, Acc.road: 0.9929, Acc.sidewalk: 0.9371, Acc.building: 0.9696, Acc.wall: 0.7921, Acc.fence: 0.8732, Acc.pole: 0.8331, Acc.traffic light: 0.8722, Acc.traffic sign: 0.9026, Acc.vegetation: 0.9699, Acc.terrain: 0.7651, Acc.sky: 0.9840, Acc.person: 0.9363, Acc.rider: 0.8449, Acc.car: 0.9813, Acc.truck: 0.9460, Acc.bus: 0.9705, Acc.train: 0.9147, Acc.motorcycle: 0.8828, Acc.bicycle: 0.9249 +2022-05-10 19:29:47,794 - mmseg - INFO - Iter [47050/80000] lr: 5.914e-07, eta: 20:30:51, time: 4.774, data_time: 2.962, memory: 69053, decode.loss_cls: 0.0637, decode.loss_mask: 0.1941, decode.loss_dice: 0.5408, decode.d0.loss_cls: 0.3076, decode.d0.loss_mask: 0.2034, decode.d0.loss_dice: 0.5681, decode.d1.loss_cls: 0.0877, decode.d1.loss_mask: 0.1948, decode.d1.loss_dice: 0.5550, decode.d2.loss_cls: 0.0712, decode.d2.loss_mask: 0.1949, decode.d2.loss_dice: 0.5426, decode.d3.loss_cls: 0.0691, decode.d3.loss_mask: 0.1937, decode.d3.loss_dice: 0.5429, decode.d4.loss_cls: 0.0685, decode.d4.loss_mask: 0.1942, decode.d4.loss_dice: 0.5435, decode.d5.loss_cls: 0.0687, decode.d5.loss_mask: 0.1947, decode.d5.loss_dice: 0.5426, decode.d6.loss_cls: 0.0640, decode.d6.loss_mask: 0.1933, decode.d6.loss_dice: 0.5409, decode.d7.loss_cls: 0.0746, decode.d7.loss_mask: 0.1935, decode.d7.loss_dice: 0.5441, decode.d8.loss_cls: 0.0633, decode.d8.loss_mask: 0.1941, decode.d8.loss_dice: 0.5368, loss: 8.3463 +2022-05-10 19:31:19,267 - mmseg - INFO - Iter [47100/80000] lr: 5.905e-07, eta: 20:27:58, time: 1.829, data_time: 0.018, memory: 69053, decode.loss_cls: 0.0654, decode.loss_mask: 0.1995, decode.loss_dice: 0.5519, decode.d0.loss_cls: 0.3117, decode.d0.loss_mask: 0.2062, decode.d0.loss_dice: 0.5761, decode.d1.loss_cls: 0.0814, decode.d1.loss_mask: 0.2006, decode.d1.loss_dice: 0.5597, decode.d2.loss_cls: 0.0714, decode.d2.loss_mask: 0.2003, decode.d2.loss_dice: 0.5614, decode.d3.loss_cls: 0.0624, decode.d3.loss_mask: 0.1996, decode.d3.loss_dice: 0.5555, decode.d4.loss_cls: 0.0696, decode.d4.loss_mask: 0.1995, decode.d4.loss_dice: 0.5553, decode.d5.loss_cls: 0.0684, decode.d5.loss_mask: 0.1994, decode.d5.loss_dice: 0.5500, decode.d6.loss_cls: 0.0608, decode.d6.loss_mask: 0.1993, decode.d6.loss_dice: 0.5538, decode.d7.loss_cls: 0.0632, decode.d7.loss_mask: 0.2001, decode.d7.loss_dice: 0.5530, decode.d8.loss_cls: 0.0696, decode.d8.loss_mask: 0.1993, decode.d8.loss_dice: 0.5542, loss: 8.4987 +2022-05-10 19:32:48,608 - mmseg - INFO - Iter [47150/80000] lr: 5.896e-07, eta: 20:25:00, time: 1.787, data_time: 0.019, memory: 69053, decode.loss_cls: 0.0615, decode.loss_mask: 0.1911, decode.loss_dice: 0.5450, decode.d0.loss_cls: 0.3016, decode.d0.loss_mask: 0.1998, decode.d0.loss_dice: 0.5710, decode.d1.loss_cls: 0.0820, decode.d1.loss_mask: 0.1925, decode.d1.loss_dice: 0.5528, decode.d2.loss_cls: 0.0724, decode.d2.loss_mask: 0.1918, decode.d2.loss_dice: 0.5480, decode.d3.loss_cls: 0.0665, decode.d3.loss_mask: 0.1913, decode.d3.loss_dice: 0.5501, decode.d4.loss_cls: 0.0675, decode.d4.loss_mask: 0.1910, decode.d4.loss_dice: 0.5481, decode.d5.loss_cls: 0.0706, decode.d5.loss_mask: 0.1910, decode.d5.loss_dice: 0.5473, decode.d6.loss_cls: 0.0679, decode.d6.loss_mask: 0.1910, decode.d6.loss_dice: 0.5493, decode.d7.loss_cls: 0.0619, decode.d7.loss_mask: 0.1911, decode.d7.loss_dice: 0.5431, decode.d8.loss_cls: 0.0681, decode.d8.loss_mask: 0.1907, decode.d8.loss_dice: 0.5465, loss: 8.3424 +2022-05-10 19:34:21,856 - mmseg - INFO - Iter [47200/80000] lr: 5.887e-07, eta: 20:22:13, time: 1.865, data_time: 0.064, memory: 69053, decode.loss_cls: 0.0746, decode.loss_mask: 0.2022, decode.loss_dice: 0.5398, decode.d0.loss_cls: 0.3040, decode.d0.loss_mask: 0.2109, decode.d0.loss_dice: 0.5724, decode.d1.loss_cls: 0.0941, decode.d1.loss_mask: 0.2050, decode.d1.loss_dice: 0.5514, decode.d2.loss_cls: 0.0818, decode.d2.loss_mask: 0.2036, decode.d2.loss_dice: 0.5439, decode.d3.loss_cls: 0.0842, decode.d3.loss_mask: 0.2031, decode.d3.loss_dice: 0.5413, decode.d4.loss_cls: 0.0776, decode.d4.loss_mask: 0.2032, decode.d4.loss_dice: 0.5425, decode.d5.loss_cls: 0.0761, decode.d5.loss_mask: 0.2034, decode.d5.loss_dice: 0.5419, decode.d6.loss_cls: 0.0796, decode.d6.loss_mask: 0.2027, decode.d6.loss_dice: 0.5395, decode.d7.loss_cls: 0.0727, decode.d7.loss_mask: 0.2025, decode.d7.loss_dice: 0.5404, decode.d8.loss_cls: 0.0704, decode.d8.loss_mask: 0.2028, decode.d8.loss_dice: 0.5452, loss: 8.5129 +2022-05-10 19:35:52,884 - mmseg - INFO - Iter [47250/80000] lr: 5.878e-07, eta: 20:19:21, time: 1.820, data_time: 0.016, memory: 69053, decode.loss_cls: 0.0617, decode.loss_mask: 0.1975, decode.loss_dice: 0.5460, decode.d0.loss_cls: 0.3169, decode.d0.loss_mask: 0.2068, decode.d0.loss_dice: 0.5745, decode.d1.loss_cls: 0.0960, decode.d1.loss_mask: 0.1984, decode.d1.loss_dice: 0.5539, decode.d2.loss_cls: 0.0755, decode.d2.loss_mask: 0.1981, decode.d2.loss_dice: 0.5490, decode.d3.loss_cls: 0.0639, decode.d3.loss_mask: 0.1975, decode.d3.loss_dice: 0.5434, decode.d4.loss_cls: 0.0734, decode.d4.loss_mask: 0.1967, decode.d4.loss_dice: 0.5429, decode.d5.loss_cls: 0.0691, decode.d5.loss_mask: 0.1976, decode.d5.loss_dice: 0.5470, decode.d6.loss_cls: 0.0693, decode.d6.loss_mask: 0.1974, decode.d6.loss_dice: 0.5461, decode.d7.loss_cls: 0.0665, decode.d7.loss_mask: 0.1974, decode.d7.loss_dice: 0.5456, decode.d8.loss_cls: 0.0682, decode.d8.loss_mask: 0.1971, decode.d8.loss_dice: 0.5471, loss: 8.4405 +2022-05-10 19:37:22,375 - mmseg - INFO - Iter [47300/80000] lr: 5.869e-07, eta: 20:16:25, time: 1.790, data_time: 0.017, memory: 69053, decode.loss_cls: 0.0696, decode.loss_mask: 0.1989, decode.loss_dice: 0.5458, decode.d0.loss_cls: 0.3004, decode.d0.loss_mask: 0.2083, decode.d0.loss_dice: 0.5766, decode.d1.loss_cls: 0.0883, decode.d1.loss_mask: 0.2010, decode.d1.loss_dice: 0.5516, decode.d2.loss_cls: 0.0844, decode.d2.loss_mask: 0.2004, decode.d2.loss_dice: 0.5528, decode.d3.loss_cls: 0.0741, decode.d3.loss_mask: 0.2000, decode.d3.loss_dice: 0.5497, decode.d4.loss_cls: 0.0694, decode.d4.loss_mask: 0.1998, decode.d4.loss_dice: 0.5512, decode.d5.loss_cls: 0.0756, decode.d5.loss_mask: 0.1999, decode.d5.loss_dice: 0.5513, decode.d6.loss_cls: 0.0723, decode.d6.loss_mask: 0.1993, decode.d6.loss_dice: 0.5491, decode.d7.loss_cls: 0.0656, decode.d7.loss_mask: 0.1993, decode.d7.loss_dice: 0.5471, decode.d8.loss_cls: 0.0698, decode.d8.loss_mask: 0.1993, decode.d8.loss_dice: 0.5465, loss: 8.4972 +2022-05-10 19:38:56,106 - mmseg - INFO - Iter [47350/80000] lr: 5.860e-07, eta: 20:13:42, time: 1.874, data_time: 0.063, memory: 69053, decode.loss_cls: 0.0543, decode.loss_mask: 0.1925, decode.loss_dice: 0.5488, decode.d0.loss_cls: 0.2985, decode.d0.loss_mask: 0.1987, decode.d0.loss_dice: 0.5809, decode.d1.loss_cls: 0.0731, decode.d1.loss_mask: 0.1938, decode.d1.loss_dice: 0.5576, decode.d2.loss_cls: 0.0641, decode.d2.loss_mask: 0.1927, decode.d2.loss_dice: 0.5561, decode.d3.loss_cls: 0.0599, decode.d3.loss_mask: 0.1927, decode.d3.loss_dice: 0.5499, decode.d4.loss_cls: 0.0570, decode.d4.loss_mask: 0.1925, decode.d4.loss_dice: 0.5515, decode.d5.loss_cls: 0.0592, decode.d5.loss_mask: 0.1930, decode.d5.loss_dice: 0.5547, decode.d6.loss_cls: 0.0627, decode.d6.loss_mask: 0.1925, decode.d6.loss_dice: 0.5523, decode.d7.loss_cls: 0.0518, decode.d7.loss_mask: 0.1923, decode.d7.loss_dice: 0.5506, decode.d8.loss_cls: 0.0611, decode.d8.loss_mask: 0.1918, decode.d8.loss_dice: 0.5502, loss: 8.3266 +2022-05-10 19:40:26,197 - mmseg - INFO - Iter [47400/80000] lr: 5.851e-07, eta: 20:10:49, time: 1.802, data_time: 0.016, memory: 69053, decode.loss_cls: 0.0565, decode.loss_mask: 0.2012, decode.loss_dice: 0.5380, decode.d0.loss_cls: 0.3066, decode.d0.loss_mask: 0.2101, decode.d0.loss_dice: 0.5617, decode.d1.loss_cls: 0.0793, decode.d1.loss_mask: 0.2027, decode.d1.loss_dice: 0.5475, decode.d2.loss_cls: 0.0678, decode.d2.loss_mask: 0.2019, decode.d2.loss_dice: 0.5454, decode.d3.loss_cls: 0.0593, decode.d3.loss_mask: 0.2019, decode.d3.loss_dice: 0.5399, decode.d4.loss_cls: 0.0595, decode.d4.loss_mask: 0.2013, decode.d4.loss_dice: 0.5404, decode.d5.loss_cls: 0.0552, decode.d5.loss_mask: 0.2020, decode.d5.loss_dice: 0.5417, decode.d6.loss_cls: 0.0510, decode.d6.loss_mask: 0.2020, decode.d6.loss_dice: 0.5376, decode.d7.loss_cls: 0.0547, decode.d7.loss_mask: 0.2018, decode.d7.loss_dice: 0.5417, decode.d8.loss_cls: 0.0566, decode.d8.loss_mask: 0.2012, decode.d8.loss_dice: 0.5369, loss: 8.3033 +2022-05-10 19:41:55,318 - mmseg - INFO - Iter [47450/80000] lr: 5.842e-07, eta: 20:07:54, time: 1.782, data_time: 0.016, memory: 69053, decode.loss_cls: 0.0643, decode.loss_mask: 0.1983, decode.loss_dice: 0.5508, decode.d0.loss_cls: 0.2990, decode.d0.loss_mask: 0.2071, decode.d0.loss_dice: 0.5756, decode.d1.loss_cls: 0.0810, decode.d1.loss_mask: 0.2000, decode.d1.loss_dice: 0.5572, decode.d2.loss_cls: 0.0717, decode.d2.loss_mask: 0.2002, decode.d2.loss_dice: 0.5548, decode.d3.loss_cls: 0.0653, decode.d3.loss_mask: 0.1996, decode.d3.loss_dice: 0.5500, decode.d4.loss_cls: 0.0644, decode.d4.loss_mask: 0.1994, decode.d4.loss_dice: 0.5519, decode.d5.loss_cls: 0.0619, decode.d5.loss_mask: 0.1992, decode.d5.loss_dice: 0.5504, decode.d6.loss_cls: 0.0640, decode.d6.loss_mask: 0.1989, decode.d6.loss_dice: 0.5462, decode.d7.loss_cls: 0.0682, decode.d7.loss_mask: 0.1990, decode.d7.loss_dice: 0.5497, decode.d8.loss_cls: 0.0637, decode.d8.loss_mask: 0.1988, decode.d8.loss_dice: 0.5498, loss: 8.4405 +2022-05-10 19:43:25,155 - mmseg - INFO - Iter [47500/80000] lr: 5.833e-07, eta: 20:05:02, time: 1.796, data_time: 0.016, memory: 69053, decode.loss_cls: 0.0606, decode.loss_mask: 0.1972, decode.loss_dice: 0.5408, decode.d0.loss_cls: 0.3068, decode.d0.loss_mask: 0.2040, decode.d0.loss_dice: 0.5680, decode.d1.loss_cls: 0.0818, decode.d1.loss_mask: 0.1987, decode.d1.loss_dice: 0.5525, decode.d2.loss_cls: 0.0662, decode.d2.loss_mask: 0.1984, decode.d2.loss_dice: 0.5494, decode.d3.loss_cls: 0.0636, decode.d3.loss_mask: 0.1979, decode.d3.loss_dice: 0.5438, decode.d4.loss_cls: 0.0670, decode.d4.loss_mask: 0.1970, decode.d4.loss_dice: 0.5436, decode.d5.loss_cls: 0.0635, decode.d5.loss_mask: 0.1971, decode.d5.loss_dice: 0.5454, decode.d6.loss_cls: 0.0591, decode.d6.loss_mask: 0.1971, decode.d6.loss_dice: 0.5424, decode.d7.loss_cls: 0.0589, decode.d7.loss_mask: 0.1972, decode.d7.loss_dice: 0.5393, decode.d8.loss_cls: 0.0600, decode.d8.loss_mask: 0.1970, decode.d8.loss_dice: 0.5413, loss: 8.3355 +2022-05-10 19:44:57,456 - mmseg - INFO - Iter [47550/80000] lr: 5.824e-07, eta: 20:02:18, time: 1.847, data_time: 0.066, memory: 69053, decode.loss_cls: 0.0644, decode.loss_mask: 0.1978, decode.loss_dice: 0.5429, decode.d0.loss_cls: 0.3066, decode.d0.loss_mask: 0.2066, decode.d0.loss_dice: 0.5706, decode.d1.loss_cls: 0.0784, decode.d1.loss_mask: 0.1992, decode.d1.loss_dice: 0.5523, decode.d2.loss_cls: 0.0739, decode.d2.loss_mask: 0.1985, decode.d2.loss_dice: 0.5553, decode.d3.loss_cls: 0.0691, decode.d3.loss_mask: 0.1975, decode.d3.loss_dice: 0.5492, decode.d4.loss_cls: 0.0671, decode.d4.loss_mask: 0.1976, decode.d4.loss_dice: 0.5455, decode.d5.loss_cls: 0.0676, decode.d5.loss_mask: 0.1977, decode.d5.loss_dice: 0.5484, decode.d6.loss_cls: 0.0659, decode.d6.loss_mask: 0.1979, decode.d6.loss_dice: 0.5451, decode.d7.loss_cls: 0.0661, decode.d7.loss_mask: 0.1975, decode.d7.loss_dice: 0.5439, decode.d8.loss_cls: 0.0662, decode.d8.loss_mask: 0.1980, decode.d8.loss_dice: 0.5461, loss: 8.4131 +2022-05-10 19:46:27,673 - mmseg - INFO - Iter [47600/80000] lr: 5.815e-07, eta: 19:59:28, time: 1.804, data_time: 0.017, memory: 69053, decode.loss_cls: 0.0588, decode.loss_mask: 0.1944, decode.loss_dice: 0.5327, decode.d0.loss_cls: 0.3024, decode.d0.loss_mask: 0.2017, decode.d0.loss_dice: 0.5624, decode.d1.loss_cls: 0.0724, decode.d1.loss_mask: 0.1964, decode.d1.loss_dice: 0.5424, decode.d2.loss_cls: 0.0643, decode.d2.loss_mask: 0.1956, decode.d2.loss_dice: 0.5392, decode.d3.loss_cls: 0.0639, decode.d3.loss_mask: 0.1946, decode.d3.loss_dice: 0.5375, decode.d4.loss_cls: 0.0596, decode.d4.loss_mask: 0.1945, decode.d4.loss_dice: 0.5374, decode.d5.loss_cls: 0.0635, decode.d5.loss_mask: 0.1952, decode.d5.loss_dice: 0.5418, decode.d6.loss_cls: 0.0581, decode.d6.loss_mask: 0.1945, decode.d6.loss_dice: 0.5342, decode.d7.loss_cls: 0.0651, decode.d7.loss_mask: 0.1947, decode.d7.loss_dice: 0.5369, decode.d8.loss_cls: 0.0596, decode.d8.loss_mask: 0.1943, decode.d8.loss_dice: 0.5332, loss: 8.2214 +2022-05-10 19:47:57,683 - mmseg - INFO - Iter [47650/80000] lr: 5.806e-07, eta: 19:56:39, time: 1.800, data_time: 0.017, memory: 69053, decode.loss_cls: 0.0652, decode.loss_mask: 0.1970, decode.loss_dice: 0.5488, decode.d0.loss_cls: 0.3064, decode.d0.loss_mask: 0.2068, decode.d0.loss_dice: 0.5776, decode.d1.loss_cls: 0.0859, decode.d1.loss_mask: 0.2001, decode.d1.loss_dice: 0.5554, decode.d2.loss_cls: 0.0755, decode.d2.loss_mask: 0.1981, decode.d2.loss_dice: 0.5513, decode.d3.loss_cls: 0.0648, decode.d3.loss_mask: 0.1981, decode.d3.loss_dice: 0.5510, decode.d4.loss_cls: 0.0723, decode.d4.loss_mask: 0.1977, decode.d4.loss_dice: 0.5543, decode.d5.loss_cls: 0.0687, decode.d5.loss_mask: 0.1984, decode.d5.loss_dice: 0.5500, decode.d6.loss_cls: 0.0666, decode.d6.loss_mask: 0.1980, decode.d6.loss_dice: 0.5496, decode.d7.loss_cls: 0.0704, decode.d7.loss_mask: 0.1973, decode.d7.loss_dice: 0.5471, decode.d8.loss_cls: 0.0704, decode.d8.loss_mask: 0.1973, decode.d8.loss_dice: 0.5472, loss: 8.4675 +2022-05-10 19:49:27,544 - mmseg - INFO - Iter [47700/80000] lr: 5.797e-07, eta: 19:53:49, time: 1.797, data_time: 0.016, memory: 69053, decode.loss_cls: 0.0603, decode.loss_mask: 0.1975, decode.loss_dice: 0.5334, decode.d0.loss_cls: 0.3039, decode.d0.loss_mask: 0.2055, decode.d0.loss_dice: 0.5656, decode.d1.loss_cls: 0.0758, decode.d1.loss_mask: 0.1988, decode.d1.loss_dice: 0.5443, decode.d2.loss_cls: 0.0690, decode.d2.loss_mask: 0.1977, decode.d2.loss_dice: 0.5439, decode.d3.loss_cls: 0.0625, decode.d3.loss_mask: 0.1972, decode.d3.loss_dice: 0.5360, decode.d4.loss_cls: 0.0651, decode.d4.loss_mask: 0.1977, decode.d4.loss_dice: 0.5387, decode.d5.loss_cls: 0.0657, decode.d5.loss_mask: 0.1973, decode.d5.loss_dice: 0.5384, decode.d6.loss_cls: 0.0605, decode.d6.loss_mask: 0.1978, decode.d6.loss_dice: 0.5367, decode.d7.loss_cls: 0.0620, decode.d7.loss_mask: 0.1975, decode.d7.loss_dice: 0.5361, decode.d8.loss_cls: 0.0607, decode.d8.loss_mask: 0.1971, decode.d8.loss_dice: 0.5387, loss: 8.2812 +2022-05-10 19:51:01,128 - mmseg - INFO - Iter [47750/80000] lr: 5.788e-07, eta: 19:51:11, time: 1.872, data_time: 0.062, memory: 69053, decode.loss_cls: 0.0661, decode.loss_mask: 0.1951, decode.loss_dice: 0.5461, decode.d0.loss_cls: 0.2874, decode.d0.loss_mask: 0.2007, decode.d0.loss_dice: 0.5701, decode.d1.loss_cls: 0.0869, decode.d1.loss_mask: 0.1963, decode.d1.loss_dice: 0.5524, decode.d2.loss_cls: 0.0711, decode.d2.loss_mask: 0.1951, decode.d2.loss_dice: 0.5497, decode.d3.loss_cls: 0.0683, decode.d3.loss_mask: 0.1953, decode.d3.loss_dice: 0.5469, decode.d4.loss_cls: 0.0736, decode.d4.loss_mask: 0.1952, decode.d4.loss_dice: 0.5444, decode.d5.loss_cls: 0.0651, decode.d5.loss_mask: 0.1952, decode.d5.loss_dice: 0.5470, decode.d6.loss_cls: 0.0636, decode.d6.loss_mask: 0.1953, decode.d6.loss_dice: 0.5473, decode.d7.loss_cls: 0.0688, decode.d7.loss_mask: 0.1952, decode.d7.loss_dice: 0.5440, decode.d8.loss_cls: 0.0616, decode.d8.loss_mask: 0.1951, decode.d8.loss_dice: 0.5449, loss: 8.3639 +2022-05-10 19:52:31,907 - mmseg - INFO - Iter [47800/80000] lr: 5.779e-07, eta: 19:48:26, time: 1.816, data_time: 0.017, memory: 69053, decode.loss_cls: 0.0614, decode.loss_mask: 0.1923, decode.loss_dice: 0.5254, decode.d0.loss_cls: 0.3208, decode.d0.loss_mask: 0.1995, decode.d0.loss_dice: 0.5492, decode.d1.loss_cls: 0.0744, decode.d1.loss_mask: 0.1939, decode.d1.loss_dice: 0.5354, decode.d2.loss_cls: 0.0704, decode.d2.loss_mask: 0.1924, decode.d2.loss_dice: 0.5307, decode.d3.loss_cls: 0.0593, decode.d3.loss_mask: 0.1925, decode.d3.loss_dice: 0.5251, decode.d4.loss_cls: 0.0593, decode.d4.loss_mask: 0.1922, decode.d4.loss_dice: 0.5221, decode.d5.loss_cls: 0.0615, decode.d5.loss_mask: 0.1924, decode.d5.loss_dice: 0.5271, decode.d6.loss_cls: 0.0605, decode.d6.loss_mask: 0.1917, decode.d6.loss_dice: 0.5205, decode.d7.loss_cls: 0.0606, decode.d7.loss_mask: 0.1925, decode.d7.loss_dice: 0.5255, decode.d8.loss_cls: 0.0580, decode.d8.loss_mask: 0.1926, decode.d8.loss_dice: 0.5240, loss: 8.1033 +2022-05-10 19:54:01,039 - mmseg - INFO - Iter [47850/80000] lr: 5.770e-07, eta: 19:45:36, time: 1.783, data_time: 0.016, memory: 69053, decode.loss_cls: 0.0720, decode.loss_mask: 0.1985, decode.loss_dice: 0.5494, decode.d0.loss_cls: 0.3028, decode.d0.loss_mask: 0.2074, decode.d0.loss_dice: 0.5758, decode.d1.loss_cls: 0.0875, decode.d1.loss_mask: 0.2006, decode.d1.loss_dice: 0.5543, decode.d2.loss_cls: 0.0759, decode.d2.loss_mask: 0.2001, decode.d2.loss_dice: 0.5526, decode.d3.loss_cls: 0.0722, decode.d3.loss_mask: 0.1993, decode.d3.loss_dice: 0.5477, decode.d4.loss_cls: 0.0756, decode.d4.loss_mask: 0.1985, decode.d4.loss_dice: 0.5463, decode.d5.loss_cls: 0.0700, decode.d5.loss_mask: 0.1986, decode.d5.loss_dice: 0.5518, decode.d6.loss_cls: 0.0701, decode.d6.loss_mask: 0.1982, decode.d6.loss_dice: 0.5478, decode.d7.loss_cls: 0.0748, decode.d7.loss_mask: 0.1985, decode.d7.loss_dice: 0.5506, decode.d8.loss_cls: 0.0703, decode.d8.loss_mask: 0.1983, decode.d8.loss_dice: 0.5525, loss: 8.4982 +2022-05-10 19:55:31,445 - mmseg - INFO - Iter [47900/80000] lr: 5.761e-07, eta: 19:42:51, time: 1.808, data_time: 0.016, memory: 69053, decode.loss_cls: 0.0650, decode.loss_mask: 0.1929, decode.loss_dice: 0.5434, decode.d0.loss_cls: 0.2926, decode.d0.loss_mask: 0.2011, decode.d0.loss_dice: 0.5719, decode.d1.loss_cls: 0.0700, decode.d1.loss_mask: 0.1944, decode.d1.loss_dice: 0.5525, decode.d2.loss_cls: 0.0664, decode.d2.loss_mask: 0.1930, decode.d2.loss_dice: 0.5474, decode.d3.loss_cls: 0.0662, decode.d3.loss_mask: 0.1932, decode.d3.loss_dice: 0.5431, decode.d4.loss_cls: 0.0603, decode.d4.loss_mask: 0.1932, decode.d4.loss_dice: 0.5436, decode.d5.loss_cls: 0.0628, decode.d5.loss_mask: 0.1937, decode.d5.loss_dice: 0.5457, decode.d6.loss_cls: 0.0668, decode.d6.loss_mask: 0.1928, decode.d6.loss_dice: 0.5466, decode.d7.loss_cls: 0.0601, decode.d7.loss_mask: 0.1930, decode.d7.loss_dice: 0.5438, decode.d8.loss_cls: 0.0603, decode.d8.loss_mask: 0.1924, decode.d8.loss_dice: 0.5413, loss: 8.2895 +2022-05-10 19:57:02,294 - mmseg - INFO - Iter [47950/80000] lr: 5.752e-07, eta: 19:40:08, time: 1.817, data_time: 0.063, memory: 69053, decode.loss_cls: 0.0553, decode.loss_mask: 0.2001, decode.loss_dice: 0.5351, decode.d0.loss_cls: 0.3014, decode.d0.loss_mask: 0.2084, decode.d0.loss_dice: 0.5590, decode.d1.loss_cls: 0.0757, decode.d1.loss_mask: 0.2010, decode.d1.loss_dice: 0.5464, decode.d2.loss_cls: 0.0606, decode.d2.loss_mask: 0.2008, decode.d2.loss_dice: 0.5431, decode.d3.loss_cls: 0.0647, decode.d3.loss_mask: 0.2004, decode.d3.loss_dice: 0.5386, decode.d4.loss_cls: 0.0571, decode.d4.loss_mask: 0.2000, decode.d4.loss_dice: 0.5374, decode.d5.loss_cls: 0.0569, decode.d5.loss_mask: 0.2003, decode.d5.loss_dice: 0.5390, decode.d6.loss_cls: 0.0548, decode.d6.loss_mask: 0.2003, decode.d6.loss_dice: 0.5345, decode.d7.loss_cls: 0.0607, decode.d7.loss_mask: 0.1998, decode.d7.loss_dice: 0.5356, decode.d8.loss_cls: 0.0583, decode.d8.loss_mask: 0.1999, decode.d8.loss_dice: 0.5377, loss: 8.2628 +2022-05-10 19:58:32,800 - mmseg - INFO - Saving checkpoint at 48000 iterations +2022-05-10 19:59:04,497 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 19:59:04,507 - mmseg - INFO - Iter [48000/80000] lr: 5.743e-07, eta: 19:38:48, time: 2.441, data_time: 0.016, memory: 69053, decode.loss_cls: 0.0604, decode.loss_mask: 0.1960, decode.loss_dice: 0.5320, decode.d0.loss_cls: 0.3027, decode.d0.loss_mask: 0.2043, decode.d0.loss_dice: 0.5587, decode.d1.loss_cls: 0.0783, decode.d1.loss_mask: 0.1969, decode.d1.loss_dice: 0.5386, decode.d2.loss_cls: 0.0735, decode.d2.loss_mask: 0.1965, decode.d2.loss_dice: 0.5385, decode.d3.loss_cls: 0.0668, decode.d3.loss_mask: 0.1965, decode.d3.loss_dice: 0.5330, decode.d4.loss_cls: 0.0653, decode.d4.loss_mask: 0.1963, decode.d4.loss_dice: 0.5367, decode.d5.loss_cls: 0.0643, decode.d5.loss_mask: 0.1962, decode.d5.loss_dice: 0.5361, decode.d6.loss_cls: 0.0659, decode.d6.loss_mask: 0.1955, decode.d6.loss_dice: 0.5340, decode.d7.loss_cls: 0.0606, decode.d7.loss_mask: 0.1956, decode.d7.loss_dice: 0.5322, decode.d8.loss_cls: 0.0594, decode.d8.loss_mask: 0.1957, decode.d8.loss_dice: 0.5325, loss: 8.2391 +2022-05-10 20:01:00,255 - mmseg - INFO - per class results: +2022-05-10 20:01:00,267 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.56 | 99.17 | +| sidewalk | 88.24 | 94.07 | +| building | 94.36 | 97.02 | +| wall | 68.73 | 79.45 | +| fence | 73.97 | 81.83 | +| pole | 71.42 | 84.8 | +| traffic light | 76.95 | 87.53 | +| traffic sign | 84.02 | 90.47 | +| vegetation | 93.43 | 97.0 | +| terrain | 68.58 | 75.48 | +| sky | 95.89 | 98.3 | +| person | 87.04 | 93.44 | +| rider | 74.67 | 84.83 | +| car | 96.15 | 98.24 | +| truck | 81.81 | 94.9 | +| bus | 93.95 | 96.89 | +| train | 88.16 | 91.27 | +| motorcycle | 78.0 | 87.67 | +| bicycle | 82.87 | 91.94 | ++---------------+-------+-------+ +2022-05-10 20:01:00,268 - mmseg - INFO - Summary: +2022-05-10 20:01:00,268 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 96.99 | 84.04 | 90.75 | ++-------+-------+-------+ +2022-05-10 20:01:00,272 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 20:01:00,272 - mmseg - INFO - Iter(val) [32] aAcc: 0.9699, mIoU: 0.8404, mAcc: 0.9075, IoU.road: 0.9856, IoU.sidewalk: 0.8824, IoU.building: 0.9436, IoU.wall: 0.6873, IoU.fence: 0.7397, IoU.pole: 0.7142, IoU.traffic light: 0.7695, IoU.traffic sign: 0.8402, IoU.vegetation: 0.9343, IoU.terrain: 0.6858, IoU.sky: 0.9589, IoU.person: 0.8704, IoU.rider: 0.7467, IoU.car: 0.9615, IoU.truck: 0.8181, IoU.bus: 0.9395, IoU.train: 0.8816, IoU.motorcycle: 0.7800, IoU.bicycle: 0.8287, Acc.road: 0.9917, Acc.sidewalk: 0.9407, Acc.building: 0.9702, Acc.wall: 0.7945, Acc.fence: 0.8183, Acc.pole: 0.8480, Acc.traffic light: 0.8753, Acc.traffic sign: 0.9047, Acc.vegetation: 0.9700, Acc.terrain: 0.7548, Acc.sky: 0.9830, Acc.person: 0.9344, Acc.rider: 0.8483, Acc.car: 0.9824, Acc.truck: 0.9490, Acc.bus: 0.9689, Acc.train: 0.9127, Acc.motorcycle: 0.8767, Acc.bicycle: 0.9194 +2022-05-10 20:02:32,192 - mmseg - INFO - Iter [48050/80000] lr: 5.734e-07, eta: 19:41:16, time: 4.156, data_time: 2.335, memory: 69053, decode.loss_cls: 0.0599, decode.loss_mask: 0.1917, decode.loss_dice: 0.5428, decode.d0.loss_cls: 0.3054, decode.d0.loss_mask: 0.1985, decode.d0.loss_dice: 0.5676, decode.d1.loss_cls: 0.0840, decode.d1.loss_mask: 0.1935, decode.d1.loss_dice: 0.5518, decode.d2.loss_cls: 0.0722, decode.d2.loss_mask: 0.1917, decode.d2.loss_dice: 0.5488, decode.d3.loss_cls: 0.0692, decode.d3.loss_mask: 0.1911, decode.d3.loss_dice: 0.5453, decode.d4.loss_cls: 0.0729, decode.d4.loss_mask: 0.1915, decode.d4.loss_dice: 0.5459, decode.d5.loss_cls: 0.0688, decode.d5.loss_mask: 0.1914, decode.d5.loss_dice: 0.5445, decode.d6.loss_cls: 0.0623, decode.d6.loss_mask: 0.1909, decode.d6.loss_dice: 0.5433, decode.d7.loss_cls: 0.0665, decode.d7.loss_mask: 0.1912, decode.d7.loss_dice: 0.5452, decode.d8.loss_cls: 0.0641, decode.d8.loss_mask: 0.1914, decode.d8.loss_dice: 0.5405, loss: 8.3238 +2022-05-10 20:04:04,591 - mmseg - INFO - Iter [48100/80000] lr: 5.725e-07, eta: 19:38:36, time: 1.848, data_time: 0.062, memory: 69053, decode.loss_cls: 0.0615, decode.loss_mask: 0.1958, decode.loss_dice: 0.5332, decode.d0.loss_cls: 0.3048, decode.d0.loss_mask: 0.2037, decode.d0.loss_dice: 0.5564, decode.d1.loss_cls: 0.0793, decode.d1.loss_mask: 0.1974, decode.d1.loss_dice: 0.5393, decode.d2.loss_cls: 0.0661, decode.d2.loss_mask: 0.1969, decode.d2.loss_dice: 0.5381, decode.d3.loss_cls: 0.0684, decode.d3.loss_mask: 0.1964, decode.d3.loss_dice: 0.5376, decode.d4.loss_cls: 0.0643, decode.d4.loss_mask: 0.1967, decode.d4.loss_dice: 0.5384, decode.d5.loss_cls: 0.0649, decode.d5.loss_mask: 0.1965, decode.d5.loss_dice: 0.5368, decode.d6.loss_cls: 0.0586, decode.d6.loss_mask: 0.1962, decode.d6.loss_dice: 0.5394, decode.d7.loss_cls: 0.0646, decode.d7.loss_mask: 0.1961, decode.d7.loss_dice: 0.5347, decode.d8.loss_cls: 0.0628, decode.d8.loss_mask: 0.1961, decode.d8.loss_dice: 0.5331, loss: 8.2543 +2022-05-10 20:05:35,647 - mmseg - INFO - Iter [48150/80000] lr: 5.716e-07, eta: 19:35:53, time: 1.821, data_time: 0.016, memory: 69053, decode.loss_cls: 0.0603, decode.loss_mask: 0.1956, decode.loss_dice: 0.5324, decode.d0.loss_cls: 0.3024, decode.d0.loss_mask: 0.2027, decode.d0.loss_dice: 0.5510, decode.d1.loss_cls: 0.0677, decode.d1.loss_mask: 0.1969, decode.d1.loss_dice: 0.5410, decode.d2.loss_cls: 0.0694, decode.d2.loss_mask: 0.1973, decode.d2.loss_dice: 0.5362, decode.d3.loss_cls: 0.0647, decode.d3.loss_mask: 0.1963, decode.d3.loss_dice: 0.5316, decode.d4.loss_cls: 0.0590, decode.d4.loss_mask: 0.1961, decode.d4.loss_dice: 0.5326, decode.d5.loss_cls: 0.0640, decode.d5.loss_mask: 0.1961, decode.d5.loss_dice: 0.5337, decode.d6.loss_cls: 0.0575, decode.d6.loss_mask: 0.1954, decode.d6.loss_dice: 0.5302, decode.d7.loss_cls: 0.0646, decode.d7.loss_mask: 0.1957, decode.d7.loss_dice: 0.5319, decode.d8.loss_cls: 0.0639, decode.d8.loss_mask: 0.1962, decode.d8.loss_dice: 0.5280, loss: 8.1906 +2022-05-10 20:07:05,540 - mmseg - INFO - Iter [48200/80000] lr: 5.707e-07, eta: 19:33:08, time: 1.798, data_time: 0.016, memory: 69053, decode.loss_cls: 0.0665, decode.loss_mask: 0.1928, decode.loss_dice: 0.5442, decode.d0.loss_cls: 0.3029, decode.d0.loss_mask: 0.2007, decode.d0.loss_dice: 0.5706, decode.d1.loss_cls: 0.0843, decode.d1.loss_mask: 0.1938, decode.d1.loss_dice: 0.5522, decode.d2.loss_cls: 0.0775, decode.d2.loss_mask: 0.1937, decode.d2.loss_dice: 0.5466, decode.d3.loss_cls: 0.0710, decode.d3.loss_mask: 0.1931, decode.d3.loss_dice: 0.5423, decode.d4.loss_cls: 0.0751, decode.d4.loss_mask: 0.1924, decode.d4.loss_dice: 0.5443, decode.d5.loss_cls: 0.0658, decode.d5.loss_mask: 0.1928, decode.d5.loss_dice: 0.5438, decode.d6.loss_cls: 0.0650, decode.d6.loss_mask: 0.1928, decode.d6.loss_dice: 0.5436, decode.d7.loss_cls: 0.0634, decode.d7.loss_mask: 0.1929, decode.d7.loss_dice: 0.5394, decode.d8.loss_cls: 0.0658, decode.d8.loss_mask: 0.1927, decode.d8.loss_dice: 0.5421, loss: 8.3442 +2022-05-10 20:08:35,444 - mmseg - INFO - Iter [48250/80000] lr: 5.698e-07, eta: 19:30:24, time: 1.798, data_time: 0.016, memory: 69053, decode.loss_cls: 0.0586, decode.loss_mask: 0.1973, decode.loss_dice: 0.5407, decode.d0.loss_cls: 0.2956, decode.d0.loss_mask: 0.2058, decode.d0.loss_dice: 0.5638, decode.d1.loss_cls: 0.0678, decode.d1.loss_mask: 0.1989, decode.d1.loss_dice: 0.5468, decode.d2.loss_cls: 0.0648, decode.d2.loss_mask: 0.1983, decode.d2.loss_dice: 0.5422, decode.d3.loss_cls: 0.0638, decode.d3.loss_mask: 0.1970, decode.d3.loss_dice: 0.5404, decode.d4.loss_cls: 0.0623, decode.d4.loss_mask: 0.1973, decode.d4.loss_dice: 0.5418, decode.d5.loss_cls: 0.0564, decode.d5.loss_mask: 0.1970, decode.d5.loss_dice: 0.5399, decode.d6.loss_cls: 0.0544, decode.d6.loss_mask: 0.1973, decode.d6.loss_dice: 0.5380, decode.d7.loss_cls: 0.0585, decode.d7.loss_mask: 0.1972, decode.d7.loss_dice: 0.5371, decode.d8.loss_cls: 0.0576, decode.d8.loss_mask: 0.1971, decode.d8.loss_dice: 0.5385, loss: 8.2524 +2022-05-10 20:11:09,436 - mmseg - INFO - Environment info: +------------------------------------------------------------ +sys.platform: linux +Python: 3.7.11 (default, Jul 27 2021, 14:32:16) [GCC 7.5.0] +CUDA available: True +GPU 0,1,2,3,4,5,6,7: A100-SXM-80GB +CUDA_HOME: /mnt/lustre/share/cuda-11.1 +NVCC: Build cuda_11.1.TC455_06.29069683_0 +GCC: gcc (GCC) 5.4.0 +PyTorch: 1.9.0+cu111 +PyTorch compiling details: PyTorch built with: + - GCC 7.3 + - C++ Version: 201402 + - Intel(R) Math Kernel Library Version 2020.0.0 Product Build 20191122 for Intel(R) 64 architecture applications + - Intel(R) MKL-DNN v2.1.2 (Git Hash 98be7e8afa711dc9b66c8ff3504129cb82013cdb) + - OpenMP 201511 (a.k.a. OpenMP 4.5) + - NNPACK is enabled + - CPU capability usage: AVX2 + - CUDA Runtime 11.1 + - NVCC architecture flags: -gencode;arch=compute_37,code=sm_37;-gencode;arch=compute_50,code=sm_50;-gencode;arch=compute_60,code=sm_60;-gencode;arch=compute_70,code=sm_70;-gencode;arch=compute_75,code=sm_75;-gencode;arch=compute_80,code=sm_80;-gencode;arch=compute_86,code=sm_86 + - CuDNN 8.0.5 + - Magma 2.5.2 + - Build settings: BLAS_INFO=mkl, BUILD_TYPE=Release, CUDA_VERSION=11.1, CUDNN_VERSION=8.0.5, CXX_COMPILER=/opt/rh/devtoolset-7/root/usr/bin/c++, CXX_FLAGS= -Wno-deprecated -fvisibility-inlines-hidden -DUSE_PTHREADPOOL -fopenmp -DNDEBUG -DUSE_KINETO -DUSE_FBGEMM -DUSE_QNNPACK -DUSE_PYTORCH_QNNPACK -DUSE_XNNPACK -DSYMBOLICATE_MOBILE_DEBUG_HANDLE -O2 -fPIC -Wno-narrowing -Wall -Wextra -Werror=return-type -Wno-missing-field-initializers -Wno-type-limits -Wno-array-bounds -Wno-unknown-pragmas -Wno-sign-compare -Wno-unused-parameter -Wno-unused-variable -Wno-unused-function -Wno-unused-result -Wno-unused-local-typedefs -Wno-strict-overflow -Wno-strict-aliasing -Wno-error=deprecated-declarations -Wno-stringop-overflow -Wno-psabi -Wno-error=pedantic -Wno-error=redundant-decls -Wno-error=old-style-cast -fdiagnostics-color=always -faligned-new -Wno-unused-but-set-variable -Wno-maybe-uninitialized -fno-math-errno -fno-trapping-math -Werror=format -Wno-stringop-overflow, LAPACK_INFO=mkl, PERF_WITH_AVX=1, PERF_WITH_AVX2=1, PERF_WITH_AVX512=1, TORCH_VERSION=1.9.0, USE_CUDA=ON, USE_CUDNN=ON, USE_EXCEPTION_PTR=1, USE_GFLAGS=OFF, USE_GLOG=OFF, USE_MKL=ON, USE_MKLDNN=ON, USE_MPI=OFF, USE_NCCL=ON, USE_NNPACK=ON, USE_OPENMP=ON, + +TorchVision: 0.10.0+cu111 +OpenCV: 4.5.5 +MMCV: 1.4.2 +MMCV Compiler: GCC 7.3 +MMCV CUDA Compiler: 11.1 +MMSegmentation: 0.20.2+ +------------------------------------------------------------ + +2022-05-10 20:11:09,437 - mmseg - INFO - Distributed training: True +2022-05-10 20:11:10,012 - mmseg - INFO - Config: +num_things_classes = 8 +num_stuff_classes = 11 +num_classes = 19 +norm_cfg = dict(type='SyncBN', requires_grad=True) +model = dict( + type='EncoderDecoderMask2Former', + pretrained='pretrained/beit_large_patch16_224_pt22k_ft22k.pth', + backbone=dict( + type='BEiTAdapter', + patch_size=16, + embed_dim=1024, + depth=24, + num_heads=16, + mlp_ratio=4, + qkv_bias=True, + use_abs_pos_emb=False, + use_rel_pos_bias=True, + img_size=896, + init_values=1e-06, + drop_path_rate=0.3, + conv_inplane=64, + n_points=4, + deform_num_heads=16, + interact_with_ffn=True, + interact_ffn_ratio=0.25, + interact_deform_ratio=0.5, + extract_with_ffn=True, + extract_ffn_ratio=0.25, + extract_deform_ratio=0.5, + num_extract_block=2, + add_vit_feature=True, + interact_indexes=[[0, 5], [6, 11], [12, 17], [18, 23]]), + decode_head=dict( + type='Mask2FormerHead', + in_channels=[1024, 1024, 1024, 1024], + feat_channels=1024, + out_channels=1024, + in_index=[0, 1, 2, 3], + num_things_classes=8, + num_stuff_classes=11, + num_queries=100, + num_transformer_feat_level=3, + pixel_decoder=dict( + type='MSDeformAttnPixelDecoder', + num_outs=3, + norm_cfg=dict(type='GN', num_groups=32), + act_cfg=dict(type='ReLU'), + encoder=dict( + type='DetrTransformerEncoder', + num_layers=6, + transformerlayers=dict( + type='BaseTransformerLayer', + attn_cfgs=dict( + type='MultiScaleDeformableAttention', + embed_dims=1024, + num_heads=32, + num_levels=3, + num_points=4, + im2col_step=64, + dropout=0.0, + batch_first=False, + norm_cfg=None, + init_cfg=None), + ffn_cfgs=dict( + type='FFN', + embed_dims=1024, + feedforward_channels=4096, + num_fcs=2, + ffn_drop=0.0, + act_cfg=dict(type='ReLU', inplace=True)), + operation_order=('self_attn', 'norm', 'ffn', 'norm')), + init_cfg=None), + positional_encoding=dict( + type='SinePositionalEncoding', num_feats=512, normalize=True), + init_cfg=None), + enforce_decoder_input_project=False, + positional_encoding=dict( + type='SinePositionalEncoding', num_feats=512, normalize=True), + transformer_decoder=dict( + type='DetrTransformerDecoder', + return_intermediate=True, + num_layers=9, + transformerlayers=dict( + type='DetrTransformerDecoderLayer', + attn_cfgs=dict( + type='MultiheadAttention', + embed_dims=1024, + num_heads=32, + attn_drop=0.0, + proj_drop=0.0, + dropout_layer=None, + batch_first=False), + ffn_cfgs=dict( + embed_dims=1024, + feedforward_channels=4096, + num_fcs=2, + act_cfg=dict(type='ReLU', inplace=True), + ffn_drop=0.0, + dropout_layer=None, + add_identity=True), + feedforward_channels=4096, + operation_order=('cross_attn', 'norm', 'self_attn', 'norm', + 'ffn', 'norm')), + init_cfg=None), + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=2.0, + reduction='mean', + class_weight=[ + 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, + 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.1 + ]), + loss_mask=dict( + type='CrossEntropyLoss', + use_sigmoid=True, + reduction='mean', + loss_weight=5.0), + loss_dice=dict( + type='DiceLoss', + use_sigmoid=True, + activate=True, + reduction='mean', + naive_dice=True, + eps=1.0, + loss_weight=5.0)), + train_cfg=dict( + num_points=12544, + oversample_ratio=3.0, + importance_sample_ratio=0.75, + assigner=dict( + type='MaskHungarianAssigner', + cls_cost=dict(type='ClassificationCost', weight=2.0), + mask_cost=dict( + type='CrossEntropyLossCost', weight=5.0, use_sigmoid=True), + dice_cost=dict( + type='DiceCost', weight=5.0, pred_act=True, eps=1.0)), + sampler=dict(type='MaskPseudoSampler')), + test_cfg=dict( + panoptic_on=True, + semantic_on=False, + instance_on=True, + max_per_image=100, + iou_thr=0.8, + filter_low_score=True, + mode='slide', + crop_size=(896, 896), + stride=(512, 512)), + init_cfg=None) +find_unused_parameters = True +dataset_type = 'CityscapesDataset' +data_root = 'data/cityscapes/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +crop_size = (896, 896) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations'), + dict(type='Resize', img_scale=(2048, 1024), ratio_range=(0.5, 2.0)), + dict(type='RandomCrop', crop_size=(896, 896), cat_max_ratio=0.75), + dict(type='RandomFlip', prob=0.5), + dict(type='PhotoMetricDistortion'), + dict( + type='Normalize', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True), + dict(type='Pad', size=(896, 896), pad_val=0, seg_pad_val=255), + dict(type='ToMask'), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + keys=['img', 'gt_semantic_seg', 'gt_masks', 'gt_labels']) +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(2048, 1024), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict( + type='Normalize', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']) + ]) +] +data = dict( + samples_per_gpu=1, + workers_per_gpu=2, + train=dict( + type='CityscapesDataset', + data_root='data/cityscapes/', + img_dir='leftImg8bit/train', + ann_dir='gtFine/train', + pipeline=[ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations'), + dict( + type='Resize', img_scale=(2048, 1024), ratio_range=(0.5, 2.0)), + dict(type='RandomCrop', crop_size=(896, 896), cat_max_ratio=0.75), + dict(type='RandomFlip', prob=0.5), + dict(type='PhotoMetricDistortion'), + dict( + type='Normalize', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True), + dict(type='Pad', size=(896, 896), pad_val=0, seg_pad_val=255), + dict(type='ToMask'), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + keys=['img', 'gt_semantic_seg', 'gt_masks', 'gt_labels']) + ]), + val=dict( + type='CityscapesDataset', + data_root='data/cityscapes/', + img_dir='leftImg8bit/val', + ann_dir='gtFine/val', + pipeline=[ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(2048, 1024), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict( + type='Normalize', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']) + ]) + ]), + test=dict( + type='CityscapesDataset', + data_root='data/cityscapes/', + img_dir='leftImg8bit/val', + ann_dir='gtFine/val', + pipeline=[ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(2048, 1024), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict( + type='Normalize', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']) + ]) + ])) +log_config = dict( + interval=50, hooks=[dict(type='TextLoggerHook', by_epoch=False)]) +dist_params = dict(backend='nccl') +log_level = 'INFO' +load_from = None +resume_from = 'work_dirs/mask2former_beit_adapter_large_896_80k_cityscapes_ss/latest.pth' +workflow = [('train', 1)] +cudnn_benchmark = True +optimizer = dict( + type='AdamW', + lr=2e-05, + betas=(0.9, 0.999), + weight_decay=0.05, + constructor='LayerDecayOptimizerConstructor', + paramwise_cfg=dict(num_layers=24, layer_decay_rate=0.9)) +optimizer_config = dict() +lr_config = dict( + policy='poly', + warmup='linear', + warmup_iters=1500, + warmup_ratio=1e-06, + power=1.0, + min_lr=0.0, + by_epoch=False) +runner = dict(type='IterBasedRunner', max_iters=80000) +checkpoint_config = dict(by_epoch=False, interval=1000, max_keep_ckpts=1) +evaluation = dict( + interval=1000, metric='mIoU', pre_eval=True, save_best='mIoU') +work_dir = './work_dirs/mask2former_beit_adapter_large_896_80k_cityscapes_ss' +gpu_ids = range(0, 16) +auto_resume = False + +2022-05-10 20:11:17,998 - mmseg - INFO - Set random seed to 1300920293, deterministic: False +2022-05-10 20:11:57,021 - mmseg - WARNING - The model and loaded state dict do not match exactly + +unexpected key in source state_dict: fc_norm.weight, fc_norm.bias, head.weight, head.bias + +missing keys in source state_dict: blocks.0.attn.relative_position_index, blocks.1.attn.relative_position_index, blocks.2.attn.relative_position_index, blocks.3.attn.relative_position_index, blocks.4.attn.relative_position_index, blocks.5.attn.relative_position_index, blocks.6.attn.relative_position_index, blocks.7.attn.relative_position_index, blocks.8.attn.relative_position_index, blocks.9.attn.relative_position_index, blocks.10.attn.relative_position_index, blocks.11.attn.relative_position_index, blocks.12.attn.relative_position_index, blocks.13.attn.relative_position_index, blocks.14.attn.relative_position_index, blocks.15.attn.relative_position_index, blocks.16.attn.relative_position_index, blocks.17.attn.relative_position_index, blocks.18.attn.relative_position_index, blocks.19.attn.relative_position_index, blocks.20.attn.relative_position_index, blocks.21.attn.relative_position_index, blocks.22.attn.relative_position_index, blocks.23.attn.relative_position_index + +Name of parameter - Initialization information + +backbone.cls_token - torch.Size([1, 1, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.level_embed - torch.Size([3, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.patch_embed.proj.weight - torch.Size([1024, 3, 16, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.patch_embed.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.0.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.1.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.2.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.3.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.4.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.5.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.6.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.7.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.8.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.9.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.10.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.11.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.12.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.13.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.14.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.15.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.16.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.17.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.18.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.19.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.20.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.21.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.22.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.gamma_1 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.gamma_2 - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.attn.q_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.attn.v_bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.attn.relative_position_bias_table - torch.Size([12324, 16]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.attn.qkv.weight - torch.Size([3072, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.attn.proj.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.attn.proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.mlp.fc1.weight - torch.Size([4096, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.mlp.fc1.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.mlp.fc2.weight - torch.Size([1024, 4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.blocks.23.mlp.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.stem.0.weight - torch.Size([64, 3, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.stem.1.weight - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.stem.1.bias - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.stem.3.weight - torch.Size([64, 64, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.stem.4.weight - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.stem.4.bias - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.stem.6.weight - torch.Size([64, 64, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.stem.7.weight - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.stem.7.bias - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.conv2.0.weight - torch.Size([128, 64, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.conv2.1.weight - torch.Size([128]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.conv2.1.bias - torch.Size([128]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.conv3.0.weight - torch.Size([256, 128, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.conv3.1.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.conv3.1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.conv4.0.weight - torch.Size([256, 256, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.conv4.1.weight - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.conv4.1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.fc1.weight - torch.Size([1024, 64, 1, 1]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.fc1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.fc2.weight - torch.Size([1024, 128, 1, 1]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.fc3.weight - torch.Size([1024, 256, 1, 1]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.fc3.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.fc4.weight - torch.Size([1024, 256, 1, 1]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.conv_branch.fc4.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.extract.query_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.extract.query_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.extract.feat_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.extract.feat_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.extract.attn.sampling_offsets.weight - torch.Size([128, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.extract.attn.sampling_offsets.bias - torch.Size([128]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.extract.attn.attention_weights.weight - torch.Size([64, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.extract.attn.attention_weights.bias - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.extract.attn.value_proj.weight - torch.Size([512, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.extract.attn.value_proj.bias - torch.Size([512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.extract.attn.output_proj.weight - torch.Size([1024, 512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.extract.attn.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.insert.gamma - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.insert.query_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.insert.query_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.insert.feat_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.insert.feat_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.insert.attn.sampling_offsets.weight - torch.Size([384, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.insert.attn.sampling_offsets.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.insert.attn.attention_weights.weight - torch.Size([192, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.insert.attn.attention_weights.bias - torch.Size([192]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.insert.attn.value_proj.weight - torch.Size([512, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.insert.attn.value_proj.bias - torch.Size([512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.insert.attn.output_proj.weight - torch.Size([1024, 512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.insert.attn.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.ffn.fc1.weight - torch.Size([256, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.ffn.fc1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.ffn.dwconv.dwconv.weight - torch.Size([256, 1, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.ffn.dwconv.dwconv.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.ffn.fc2.weight - torch.Size([1024, 256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.ffn.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.ffn_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.0.ffn_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.extract.query_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.extract.query_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.extract.feat_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.extract.feat_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.extract.attn.sampling_offsets.weight - torch.Size([128, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.extract.attn.sampling_offsets.bias - torch.Size([128]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.extract.attn.attention_weights.weight - torch.Size([64, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.extract.attn.attention_weights.bias - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.extract.attn.value_proj.weight - torch.Size([512, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.extract.attn.value_proj.bias - torch.Size([512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.extract.attn.output_proj.weight - torch.Size([1024, 512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.extract.attn.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.insert.gamma - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.insert.query_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.insert.query_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.insert.feat_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.insert.feat_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.insert.attn.sampling_offsets.weight - torch.Size([384, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.insert.attn.sampling_offsets.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.insert.attn.attention_weights.weight - torch.Size([192, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.insert.attn.attention_weights.bias - torch.Size([192]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.insert.attn.value_proj.weight - torch.Size([512, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.insert.attn.value_proj.bias - torch.Size([512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.insert.attn.output_proj.weight - torch.Size([1024, 512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.insert.attn.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.ffn.fc1.weight - torch.Size([256, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.ffn.fc1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.ffn.dwconv.dwconv.weight - torch.Size([256, 1, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.ffn.dwconv.dwconv.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.ffn.fc2.weight - torch.Size([1024, 256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.ffn.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.ffn_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.1.ffn_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.extract.query_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.extract.query_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.extract.feat_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.extract.feat_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.extract.attn.sampling_offsets.weight - torch.Size([128, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.extract.attn.sampling_offsets.bias - torch.Size([128]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.extract.attn.attention_weights.weight - torch.Size([64, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.extract.attn.attention_weights.bias - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.extract.attn.value_proj.weight - torch.Size([512, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.extract.attn.value_proj.bias - torch.Size([512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.extract.attn.output_proj.weight - torch.Size([1024, 512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.extract.attn.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.insert.gamma - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.insert.query_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.insert.query_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.insert.feat_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.insert.feat_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.insert.attn.sampling_offsets.weight - torch.Size([384, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.insert.attn.sampling_offsets.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.insert.attn.attention_weights.weight - torch.Size([192, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.insert.attn.attention_weights.bias - torch.Size([192]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.insert.attn.value_proj.weight - torch.Size([512, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.insert.attn.value_proj.bias - torch.Size([512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.insert.attn.output_proj.weight - torch.Size([1024, 512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.insert.attn.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.ffn.fc1.weight - torch.Size([256, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.ffn.fc1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.ffn.dwconv.dwconv.weight - torch.Size([256, 1, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.ffn.dwconv.dwconv.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.ffn.fc2.weight - torch.Size([1024, 256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.ffn.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.ffn_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.2.ffn_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.extract.query_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.extract.query_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.extract.feat_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.extract.feat_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.extract.attn.sampling_offsets.weight - torch.Size([128, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.extract.attn.sampling_offsets.bias - torch.Size([128]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.extract.attn.attention_weights.weight - torch.Size([64, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.extract.attn.attention_weights.bias - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.extract.attn.value_proj.weight - torch.Size([512, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.extract.attn.value_proj.bias - torch.Size([512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.extract.attn.output_proj.weight - torch.Size([1024, 512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.extract.attn.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.insert.gamma - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.insert.query_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.insert.query_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.insert.feat_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.insert.feat_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.insert.attn.sampling_offsets.weight - torch.Size([384, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.insert.attn.sampling_offsets.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.insert.attn.attention_weights.weight - torch.Size([192, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.insert.attn.attention_weights.bias - torch.Size([192]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.insert.attn.value_proj.weight - torch.Size([512, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.insert.attn.value_proj.bias - torch.Size([512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.insert.attn.output_proj.weight - torch.Size([1024, 512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.insert.attn.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.ffn.fc1.weight - torch.Size([256, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.ffn.fc1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.ffn.dwconv.dwconv.weight - torch.Size([256, 1, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.ffn.dwconv.dwconv.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.ffn.fc2.weight - torch.Size([1024, 256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.ffn.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.ffn_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.interact_blocks.3.ffn_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.extract.query_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.extract.query_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.extract.feat_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.extract.feat_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.extract.attn.sampling_offsets.weight - torch.Size([128, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.extract.attn.sampling_offsets.bias - torch.Size([128]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.extract.attn.attention_weights.weight - torch.Size([64, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.extract.attn.attention_weights.bias - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.extract.attn.value_proj.weight - torch.Size([512, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.extract.attn.value_proj.bias - torch.Size([512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.extract.attn.output_proj.weight - torch.Size([1024, 512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.extract.attn.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.ffn.fc1.weight - torch.Size([256, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.ffn.fc1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.ffn.dwconv.dwconv.weight - torch.Size([256, 1, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.ffn.dwconv.dwconv.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.ffn.fc2.weight - torch.Size([1024, 256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.ffn.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.ffn_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.0.ffn_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.extract.query_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.extract.query_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.extract.feat_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.extract.feat_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.extract.attn.sampling_offsets.weight - torch.Size([128, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.extract.attn.sampling_offsets.bias - torch.Size([128]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.extract.attn.attention_weights.weight - torch.Size([64, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.extract.attn.attention_weights.bias - torch.Size([64]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.extract.attn.value_proj.weight - torch.Size([512, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.extract.attn.value_proj.bias - torch.Size([512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.extract.attn.output_proj.weight - torch.Size([1024, 512]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.extract.attn.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.ffn.fc1.weight - torch.Size([256, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.ffn.fc1.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.ffn.dwconv.dwconv.weight - torch.Size([256, 1, 3, 3]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.ffn.dwconv.dwconv.bias - torch.Size([256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.ffn.fc2.weight - torch.Size([1024, 256]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.ffn.fc2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.ffn_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.extract_blocks.1.ffn_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.up.weight - torch.Size([1024, 1024, 2, 2]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.up.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.norm1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.norm1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.norm2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.norm2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.norm3.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.norm3.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.norm4.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +backbone.norm4.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.conv_seg.weight - torch.Size([19, 1024, 1, 1]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.conv_seg.bias - torch.Size([19]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.input_convs.0.conv.weight - torch.Size([1024, 1024, 1, 1]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.input_convs.0.conv.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.input_convs.0.gn.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.input_convs.0.gn.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.input_convs.1.conv.weight - torch.Size([1024, 1024, 1, 1]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.input_convs.1.conv.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.input_convs.1.gn.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.input_convs.1.gn.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.input_convs.2.conv.weight - torch.Size([1024, 1024, 1, 1]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.input_convs.2.conv.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.input_convs.2.gn.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.input_convs.2.gn.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.attentions.0.sampling_offsets.weight - torch.Size([768, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.attentions.0.sampling_offsets.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.attentions.0.attention_weights.weight - torch.Size([384, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.attentions.0.attention_weights.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.attentions.0.value_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.0.attentions.0.value_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.attentions.0.output_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.0.attentions.0.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.0.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.0.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.0.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.attentions.0.sampling_offsets.weight - torch.Size([768, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.attentions.0.sampling_offsets.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.attentions.0.attention_weights.weight - torch.Size([384, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.attentions.0.attention_weights.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.attentions.0.value_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.1.attentions.0.value_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.attentions.0.output_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.1.attentions.0.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.1.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.1.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.1.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.attentions.0.sampling_offsets.weight - torch.Size([768, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.attentions.0.sampling_offsets.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.attentions.0.attention_weights.weight - torch.Size([384, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.attentions.0.attention_weights.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.attentions.0.value_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.2.attentions.0.value_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.attentions.0.output_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.2.attentions.0.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.2.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.2.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.2.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.attentions.0.sampling_offsets.weight - torch.Size([768, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.attentions.0.sampling_offsets.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.attentions.0.attention_weights.weight - torch.Size([384, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.attentions.0.attention_weights.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.attentions.0.value_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.3.attentions.0.value_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.attentions.0.output_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.3.attentions.0.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.3.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.3.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.3.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.attentions.0.sampling_offsets.weight - torch.Size([768, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.attentions.0.sampling_offsets.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.attentions.0.attention_weights.weight - torch.Size([384, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.attentions.0.attention_weights.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.attentions.0.value_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.4.attentions.0.value_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.attentions.0.output_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.4.attentions.0.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.4.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.4.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.4.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.attentions.0.sampling_offsets.weight - torch.Size([768, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.attentions.0.sampling_offsets.bias - torch.Size([768]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.attentions.0.attention_weights.weight - torch.Size([384, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.attentions.0.attention_weights.bias - torch.Size([384]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.attentions.0.value_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.5.attentions.0.value_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.attentions.0.output_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.5.attentions.0.output_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.5.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.encoder.layers.5.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.encoder.layers.5.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.level_encoding.weight - torch.Size([3, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.lateral_convs.0.conv.weight - torch.Size([1024, 1024, 1, 1]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.lateral_convs.0.gn.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.lateral_convs.0.gn.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.output_convs.0.conv.weight - torch.Size([1024, 1024, 3, 3]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.output_convs.0.gn.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.output_convs.0.gn.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.pixel_decoder.mask_feature.weight - torch.Size([1024, 1024, 1, 1]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.pixel_decoder.mask_feature.bias - torch.Size([1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.0.attentions.0.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.0.attentions.0.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.attentions.0.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.0.attentions.0.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.attentions.1.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.0.attentions.1.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.attentions.1.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.0.attentions.1.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.0.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.0.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.norms.2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.0.norms.2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.attentions.0.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.1.attentions.0.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.attentions.0.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.1.attentions.0.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.attentions.1.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.1.attentions.1.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.attentions.1.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.1.attentions.1.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.1.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.1.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.norms.2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.1.norms.2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.attentions.0.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.2.attentions.0.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.attentions.0.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.2.attentions.0.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.attentions.1.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.2.attentions.1.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.attentions.1.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.2.attentions.1.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.2.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.2.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.norms.2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.2.norms.2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.attentions.0.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.3.attentions.0.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.attentions.0.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.3.attentions.0.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.attentions.1.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.3.attentions.1.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.attentions.1.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.3.attentions.1.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.3.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.3.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.norms.2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.3.norms.2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.attentions.0.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.4.attentions.0.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.attentions.0.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.4.attentions.0.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.attentions.1.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.4.attentions.1.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.attentions.1.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.4.attentions.1.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.4.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.4.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.norms.2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.4.norms.2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.attentions.0.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.5.attentions.0.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.attentions.0.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.5.attentions.0.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.attentions.1.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.5.attentions.1.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.attentions.1.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.5.attentions.1.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.5.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.5.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.norms.2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.5.norms.2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.attentions.0.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.6.attentions.0.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.attentions.0.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.6.attentions.0.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.attentions.1.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.6.attentions.1.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.attentions.1.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.6.attentions.1.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.6.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.6.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.norms.2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.6.norms.2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.attentions.0.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.7.attentions.0.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.attentions.0.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.7.attentions.0.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.attentions.1.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.7.attentions.1.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.attentions.1.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.7.attentions.1.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.7.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.7.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.norms.2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.7.norms.2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.attentions.0.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.8.attentions.0.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.attentions.0.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.8.attentions.0.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.attentions.1.attn.in_proj_weight - torch.Size([3072, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.8.attentions.1.attn.in_proj_bias - torch.Size([3072]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.attentions.1.attn.out_proj.weight - torch.Size([1024, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.8.attentions.1.attn.out_proj.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.ffns.0.layers.0.0.weight - torch.Size([4096, 1024]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.8.ffns.0.layers.0.0.bias - torch.Size([4096]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.ffns.0.layers.1.weight - torch.Size([1024, 4096]): +Initialized by user-defined `init_weights` in Mask2FormerHead + +decode_head.transformer_decoder.layers.8.ffns.0.layers.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.norms.0.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.norms.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.norms.1.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.norms.1.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.norms.2.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.layers.8.norms.2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.post_norm.weight - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.transformer_decoder.post_norm.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.query_embed.weight - torch.Size([100, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.query_feat.weight - torch.Size([100, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.level_embed.weight - torch.Size([3, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.cls_embed.weight - torch.Size([20, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.cls_embed.bias - torch.Size([20]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.mask_embed.0.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.mask_embed.0.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.mask_embed.2.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.mask_embed.2.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.mask_embed.4.weight - torch.Size([1024, 1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former + +decode_head.mask_embed.4.bias - torch.Size([1024]): +The value is the same before and after calling `init_weights` of EncoderDecoderMask2Former +2022-05-10 20:12:29,174 - mmseg - INFO - EncoderDecoderMask2Former( + (backbone): BEiTAdapter( + (patch_embed): PatchEmbed( + (proj): Conv2d(3, 1024, kernel_size=(16, 16), stride=(16, 16)) + ) + (pos_drop): Dropout(p=0.0, inplace=False) + (blocks): ModuleList( + (0): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): Identity() + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (1): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.013043479062616825) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (2): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.02608695812523365) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (3): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.03913043811917305) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (4): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.0521739162504673) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (5): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.06521739810705185) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (6): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.0782608762383461) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (7): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.09130435436964035) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (8): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.1043478325009346) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (9): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.11739131063222885) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (10): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.1304347962141037) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (11): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.14347827434539795) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (12): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.1565217524766922) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (13): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.16956523060798645) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (14): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.1826087087392807) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (15): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.19565218687057495) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (16): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.2086956650018692) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (17): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.22173914313316345) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (18): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.2347826212644577) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (19): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.24782609939575195) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (20): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.260869562625885) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (21): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.27391305565834045) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (22): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.2869565188884735) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + (23): Block( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): Attention( + (qkv): Linear(in_features=1024, out_features=3072, bias=False) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (drop_path): DropPath(p=0.30000001192092896) + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU() + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + ) + (conv_branch): ConvBranch( + (stem): Sequential( + (0): Conv2d(3, 64, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False) + (1): SyncBatchNorm(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (2): ReLU(inplace=True) + (3): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (4): SyncBatchNorm(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (5): ReLU(inplace=True) + (6): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (7): SyncBatchNorm(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (8): ReLU(inplace=True) + (9): MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=1, ceil_mode=False) + ) + (conv2): Sequential( + (0): Conv2d(64, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False) + (1): SyncBatchNorm(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (2): ReLU(inplace=True) + ) + (conv3): Sequential( + (0): Conv2d(128, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False) + (1): SyncBatchNorm(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (2): ReLU(inplace=True) + ) + (conv4): Sequential( + (0): Conv2d(256, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False) + (1): SyncBatchNorm(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (2): ReLU(inplace=True) + ) + (fc1): Conv2d(64, 1024, kernel_size=(1, 1), stride=(1, 1)) + (fc2): Conv2d(128, 1024, kernel_size=(1, 1), stride=(1, 1)) + (fc3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1)) + (fc4): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1)) + ) + (interact_blocks): Sequential( + (0): InteractBlock( + (extract): ExtractLayer( + (query_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=1024, out_features=128, bias=True) + (attention_weights): Linear(in_features=1024, out_features=64, bias=True) + (value_proj): Linear(in_features=1024, out_features=512, bias=True) + (output_proj): Linear(in_features=512, out_features=1024, bias=True) + ) + ) + (insert): InsertLayer( + (query_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=1024, out_features=384, bias=True) + (attention_weights): Linear(in_features=1024, out_features=192, bias=True) + (value_proj): Linear(in_features=1024, out_features=512, bias=True) + (output_proj): Linear(in_features=512, out_features=1024, bias=True) + ) + ) + (ffn): ConvFFN( + (fc1): Linear(in_features=1024, out_features=256, bias=True) + (dwconv): DWConv( + (dwconv): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=256) + ) + (act): GELU() + (fc2): Linear(in_features=256, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + (ffn_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (drop_path): DropPath() + ) + (1): InteractBlock( + (extract): ExtractLayer( + (query_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=1024, out_features=128, bias=True) + (attention_weights): Linear(in_features=1024, out_features=64, bias=True) + (value_proj): Linear(in_features=1024, out_features=512, bias=True) + (output_proj): Linear(in_features=512, out_features=1024, bias=True) + ) + ) + (insert): InsertLayer( + (query_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=1024, out_features=384, bias=True) + (attention_weights): Linear(in_features=1024, out_features=192, bias=True) + (value_proj): Linear(in_features=1024, out_features=512, bias=True) + (output_proj): Linear(in_features=512, out_features=1024, bias=True) + ) + ) + (ffn): ConvFFN( + (fc1): Linear(in_features=1024, out_features=256, bias=True) + (dwconv): DWConv( + (dwconv): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=256) + ) + (act): GELU() + (fc2): Linear(in_features=256, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + (ffn_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (drop_path): DropPath() + ) + (2): InteractBlock( + (extract): ExtractLayer( + (query_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=1024, out_features=128, bias=True) + (attention_weights): Linear(in_features=1024, out_features=64, bias=True) + (value_proj): Linear(in_features=1024, out_features=512, bias=True) + (output_proj): Linear(in_features=512, out_features=1024, bias=True) + ) + ) + (insert): InsertLayer( + (query_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=1024, out_features=384, bias=True) + (attention_weights): Linear(in_features=1024, out_features=192, bias=True) + (value_proj): Linear(in_features=1024, out_features=512, bias=True) + (output_proj): Linear(in_features=512, out_features=1024, bias=True) + ) + ) + (ffn): ConvFFN( + (fc1): Linear(in_features=1024, out_features=256, bias=True) + (dwconv): DWConv( + (dwconv): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=256) + ) + (act): GELU() + (fc2): Linear(in_features=256, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + (ffn_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (drop_path): DropPath() + ) + (3): InteractBlock( + (extract): ExtractLayer( + (query_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=1024, out_features=128, bias=True) + (attention_weights): Linear(in_features=1024, out_features=64, bias=True) + (value_proj): Linear(in_features=1024, out_features=512, bias=True) + (output_proj): Linear(in_features=512, out_features=1024, bias=True) + ) + ) + (insert): InsertLayer( + (query_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=1024, out_features=384, bias=True) + (attention_weights): Linear(in_features=1024, out_features=192, bias=True) + (value_proj): Linear(in_features=1024, out_features=512, bias=True) + (output_proj): Linear(in_features=512, out_features=1024, bias=True) + ) + ) + (ffn): ConvFFN( + (fc1): Linear(in_features=1024, out_features=256, bias=True) + (dwconv): DWConv( + (dwconv): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=256) + ) + (act): GELU() + (fc2): Linear(in_features=256, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + (ffn_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (drop_path): DropPath() + ) + ) + (extract_blocks): Sequential( + (0): ExtractBlock( + (extract): ExtractLayer( + (query_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=1024, out_features=128, bias=True) + (attention_weights): Linear(in_features=1024, out_features=64, bias=True) + (value_proj): Linear(in_features=1024, out_features=512, bias=True) + (output_proj): Linear(in_features=512, out_features=1024, bias=True) + ) + ) + (ffn): ConvFFN( + (fc1): Linear(in_features=1024, out_features=256, bias=True) + (dwconv): DWConv( + (dwconv): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=256) + ) + (act): GELU() + (fc2): Linear(in_features=256, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + (ffn_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (drop_path): Identity() + ) + (1): ExtractBlock( + (extract): ExtractLayer( + (query_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (feat_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): MSDeformAttn( + (sampling_offsets): Linear(in_features=1024, out_features=128, bias=True) + (attention_weights): Linear(in_features=1024, out_features=64, bias=True) + (value_proj): Linear(in_features=1024, out_features=512, bias=True) + (output_proj): Linear(in_features=512, out_features=1024, bias=True) + ) + ) + (ffn): ConvFFN( + (fc1): Linear(in_features=1024, out_features=256, bias=True) + (dwconv): DWConv( + (dwconv): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=256) + ) + (act): GELU() + (fc2): Linear(in_features=256, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + (ffn_norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (drop_path): Identity() + ) + ) + (up): ConvTranspose2d(1024, 1024, kernel_size=(2, 2), stride=(2, 2)) + (norm1): SyncBatchNorm(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (norm2): SyncBatchNorm(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (norm3): SyncBatchNorm(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (norm4): SyncBatchNorm(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + ) + (decode_head): Mask2FormerHead( + input_transform=multiple_select, ignore_index=255, align_corners=False + (loss_decode): CrossEntropyLoss(avg_non_ignore=False) + (conv_seg): Conv2d(1024, 19, kernel_size=(1, 1), stride=(1, 1)) + (dropout): Dropout2d(p=0.1, inplace=False) + (pixel_decoder): MSDeformAttnPixelDecoder( + (input_convs): ModuleList( + (0): ConvModule( + (conv): Conv2d(1024, 1024, kernel_size=(1, 1), stride=(1, 1)) + (gn): GroupNorm(32, 1024, eps=1e-05, affine=True) + ) + (1): ConvModule( + (conv): Conv2d(1024, 1024, kernel_size=(1, 1), stride=(1, 1)) + (gn): GroupNorm(32, 1024, eps=1e-05, affine=True) + ) + (2): ConvModule( + (conv): Conv2d(1024, 1024, kernel_size=(1, 1), stride=(1, 1)) + (gn): GroupNorm(32, 1024, eps=1e-05, affine=True) + ) + ) + (encoder): DetrTransformerEncoder( + (layers): ModuleList( + (0): BaseTransformerLayer( + (attentions): ModuleList( + (0): MultiScaleDeformableAttention( + (dropout): Dropout(p=0.0, inplace=False) + (sampling_offsets): Linear(in_features=1024, out_features=768, bias=True) + (attention_weights): Linear(in_features=1024, out_features=384, bias=True) + (value_proj): Linear(in_features=1024, out_features=1024, bias=True) + (output_proj): Linear(in_features=1024, out_features=1024, bias=True) + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + (1): BaseTransformerLayer( + (attentions): ModuleList( + (0): MultiScaleDeformableAttention( + (dropout): Dropout(p=0.0, inplace=False) + (sampling_offsets): Linear(in_features=1024, out_features=768, bias=True) + (attention_weights): Linear(in_features=1024, out_features=384, bias=True) + (value_proj): Linear(in_features=1024, out_features=1024, bias=True) + (output_proj): Linear(in_features=1024, out_features=1024, bias=True) + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + (2): BaseTransformerLayer( + (attentions): ModuleList( + (0): MultiScaleDeformableAttention( + (dropout): Dropout(p=0.0, inplace=False) + (sampling_offsets): Linear(in_features=1024, out_features=768, bias=True) + (attention_weights): Linear(in_features=1024, out_features=384, bias=True) + (value_proj): Linear(in_features=1024, out_features=1024, bias=True) + (output_proj): Linear(in_features=1024, out_features=1024, bias=True) + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + (3): BaseTransformerLayer( + (attentions): ModuleList( + (0): MultiScaleDeformableAttention( + (dropout): Dropout(p=0.0, inplace=False) + (sampling_offsets): Linear(in_features=1024, out_features=768, bias=True) + (attention_weights): Linear(in_features=1024, out_features=384, bias=True) + (value_proj): Linear(in_features=1024, out_features=1024, bias=True) + (output_proj): Linear(in_features=1024, out_features=1024, bias=True) + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + (4): BaseTransformerLayer( + (attentions): ModuleList( + (0): MultiScaleDeformableAttention( + (dropout): Dropout(p=0.0, inplace=False) + (sampling_offsets): Linear(in_features=1024, out_features=768, bias=True) + (attention_weights): Linear(in_features=1024, out_features=384, bias=True) + (value_proj): Linear(in_features=1024, out_features=1024, bias=True) + (output_proj): Linear(in_features=1024, out_features=1024, bias=True) + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + (5): BaseTransformerLayer( + (attentions): ModuleList( + (0): MultiScaleDeformableAttention( + (dropout): Dropout(p=0.0, inplace=False) + (sampling_offsets): Linear(in_features=1024, out_features=768, bias=True) + (attention_weights): Linear(in_features=1024, out_features=384, bias=True) + (value_proj): Linear(in_features=1024, out_features=1024, bias=True) + (output_proj): Linear(in_features=1024, out_features=1024, bias=True) + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + ) + ) + (postional_encoding): SinePositionalEncoding(num_feats=512, temperature=10000, normalize=True, scale=6.283185307179586, eps=1e-06) + (level_encoding): Embedding(3, 1024) + (lateral_convs): ModuleList( + (0): ConvModule( + (conv): Conv2d(1024, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False) + (gn): GroupNorm(32, 1024, eps=1e-05, affine=True) + ) + ) + (output_convs): ModuleList( + (0): ConvModule( + (conv): Conv2d(1024, 1024, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (gn): GroupNorm(32, 1024, eps=1e-05, affine=True) + (activate): ReLU(inplace=True) + ) + ) + (mask_feature): Conv2d(1024, 1024, kernel_size=(1, 1), stride=(1, 1)) + ) + (transformer_decoder): DetrTransformerDecoder( + (layers): ModuleList( + (0): DetrTransformerDecoderLayer( + (attentions): ModuleList( + (0): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + (1): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (2): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + (1): DetrTransformerDecoderLayer( + (attentions): ModuleList( + (0): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + (1): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (2): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + (2): DetrTransformerDecoderLayer( + (attentions): ModuleList( + (0): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + (1): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (2): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + (3): DetrTransformerDecoderLayer( + (attentions): ModuleList( + (0): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + (1): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (2): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + (4): DetrTransformerDecoderLayer( + (attentions): ModuleList( + (0): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + (1): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (2): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + (5): DetrTransformerDecoderLayer( + (attentions): ModuleList( + (0): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + (1): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (2): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + (6): DetrTransformerDecoderLayer( + (attentions): ModuleList( + (0): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + (1): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (2): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + (7): DetrTransformerDecoderLayer( + (attentions): ModuleList( + (0): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + (1): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (2): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + (8): DetrTransformerDecoderLayer( + (attentions): ModuleList( + (0): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + (1): MultiheadAttention( + (attn): MultiheadAttention( + (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True) + ) + (proj_drop): Dropout(p=0.0, inplace=False) + (dropout_layer): Identity() + ) + ) + (ffns): ModuleList( + (0): FFN( + (activate): ReLU(inplace=True) + (layers): Sequential( + (0): Sequential( + (0): Linear(in_features=1024, out_features=4096, bias=True) + (1): ReLU(inplace=True) + (2): Dropout(p=0.0, inplace=False) + ) + (1): Linear(in_features=4096, out_features=1024, bias=True) + (2): Dropout(p=0.0, inplace=False) + ) + (dropout_layer): Identity() + ) + ) + (norms): ModuleList( + (0): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + (2): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + ) + ) + (post_norm): LayerNorm((1024,), eps=1e-05, elementwise_affine=True) + ) + (decoder_input_projs): ModuleList( + (0): Identity() + (1): Identity() + (2): Identity() + ) + (decoder_positional_encoding): SinePositionalEncoding(num_feats=512, temperature=10000, normalize=True, scale=6.283185307179586, eps=1e-06) + (query_embed): Embedding(100, 1024) + (query_feat): Embedding(100, 1024) + (level_embed): Embedding(3, 1024) + (cls_embed): Linear(in_features=1024, out_features=20, bias=True) + (mask_embed): Sequential( + (0): Linear(in_features=1024, out_features=1024, bias=True) + (1): ReLU(inplace=True) + (2): Linear(in_features=1024, out_features=1024, bias=True) + (3): ReLU(inplace=True) + (4): Linear(in_features=1024, out_features=1024, bias=True) + ) + (loss_cls): CrossEntropyLoss(avg_non_ignore=False) + (loss_mask): CrossEntropyLoss(avg_non_ignore=False) + (loss_dice): DiceLoss() + ) +) +2022-05-10 20:12:29,467 - mmseg - INFO - Loaded 2975 images +2022-05-10 20:12:31,432 - mmseg - INFO - Loaded 500 images +2022-05-10 20:12:31,433 - mmseg - INFO - load checkpoint from local path: work_dirs/mask2former_beit_adapter_large_896_80k_cityscapes_ss/latest.pth +2022-05-10 20:12:51,910 - mmseg - INFO - resumed from epoch: 65, iter 47999 +2022-05-10 20:12:51,914 - mmseg - INFO - Start running, host: chenzhe.vendor@SH-IDC1-10-140-1-143, work_dir: /mnt/lustre/chenzhe.vendor/workspace/ViT-Adapter/segmentation/work_dirs/mask2former_beit_adapter_large_896_80k_cityscapes_ss +2022-05-10 20:12:51,915 - mmseg - INFO - Hooks will be executed in the following order: +before_run: +(VERY_HIGH ) PolyLrUpdaterHook +(NORMAL ) CheckpointHook +(LOW ) DistEvalHook +(VERY_LOW ) TextLoggerHook + -------------------- +before_train_epoch: +(VERY_HIGH ) PolyLrUpdaterHook +(LOW ) IterTimerHook +(LOW ) DistEvalHook +(VERY_LOW ) TextLoggerHook + -------------------- +before_train_iter: +(VERY_HIGH ) PolyLrUpdaterHook +(LOW ) IterTimerHook +(LOW ) DistEvalHook + -------------------- +after_train_iter: +(ABOVE_NORMAL) OptimizerHook +(NORMAL ) CheckpointHook +(LOW ) IterTimerHook +(LOW ) DistEvalHook +(VERY_LOW ) TextLoggerHook + -------------------- +after_train_epoch: +(NORMAL ) CheckpointHook +(LOW ) DistEvalHook +(VERY_LOW ) TextLoggerHook + -------------------- +before_val_epoch: +(LOW ) IterTimerHook +(VERY_LOW ) TextLoggerHook + -------------------- +before_val_iter: +(LOW ) IterTimerHook + -------------------- +after_val_iter: +(LOW ) IterTimerHook + -------------------- +after_val_epoch: +(VERY_LOW ) TextLoggerHook + -------------------- +after_run: +(VERY_LOW ) TextLoggerHook + -------------------- +2022-05-10 20:12:51,915 - mmseg - INFO - workflow: [('train', 1)], max: 80000 iters +2022-05-10 20:12:51,915 - mmseg - INFO - Checkpoints will be saved to /mnt/lustre/chenzhe.vendor/workspace/ViT-Adapter/segmentation/work_dirs/mask2former_beit_adapter_large_896_80k_cityscapes_ss by HardDiskBackend. +2022-05-10 20:14:13,197 - mmseg - INFO - Saving checkpoint at 48000 iterations +2022-05-10 20:14:45,706 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 20:14:45,722 - mmseg - INFO - Iter [48000/80000] lr: 5.743e-07, eta: 1645 days, 8:18:32, time: 88.849, data_time: 0.272, memory: 69063, decode.loss_cls: 0.0601, decode.loss_mask: 0.1909, decode.loss_dice: 0.5791, decode.d0.loss_cls: 0.2432, decode.d0.loss_mask: 0.1929, decode.d0.loss_dice: 0.5802, decode.d1.loss_cls: 0.0326, decode.d1.loss_mask: 0.1916, decode.d1.loss_dice: 0.5846, decode.d2.loss_cls: 0.0565, decode.d2.loss_mask: 0.1931, decode.d2.loss_dice: 0.5885, decode.d3.loss_cls: 0.0232, decode.d3.loss_mask: 0.1941, decode.d3.loss_dice: 0.5940, decode.d4.loss_cls: 0.0210, decode.d4.loss_mask: 0.1921, decode.d4.loss_dice: 0.5801, decode.d5.loss_cls: 0.0202, decode.d5.loss_mask: 0.1947, decode.d5.loss_dice: 0.5821, decode.d6.loss_cls: 0.0200, decode.d6.loss_mask: 0.1907, decode.d6.loss_dice: 0.5621, decode.d7.loss_cls: 0.0832, decode.d7.loss_mask: 0.1938, decode.d7.loss_dice: 0.5605, decode.d8.loss_cls: 0.0199, decode.d8.loss_mask: 0.1900, decode.d8.loss_dice: 0.5631, loss: 8.2784 +2022-05-10 20:16:55,308 - mmseg - INFO - per class results: +2022-05-10 20:16:55,313 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.55 | 99.16 | +| sidewalk | 88.18 | 94.08 | +| building | 94.37 | 97.04 | +| wall | 68.73 | 79.45 | +| fence | 73.99 | 81.88 | +| pole | 71.44 | 84.73 | +| traffic light | 76.96 | 87.44 | +| traffic sign | 84.01 | 90.52 | +| vegetation | 93.43 | 96.98 | +| terrain | 68.45 | 75.31 | +| sky | 95.89 | 98.3 | +| person | 87.03 | 93.46 | +| rider | 74.65 | 84.62 | +| car | 96.14 | 98.27 | +| truck | 81.81 | 94.87 | +| bus | 93.95 | 96.91 | +| train | 88.15 | 91.26 | +| motorcycle | 78.01 | 87.71 | +| bicycle | 82.88 | 91.95 | ++---------------+-------+-------+ +2022-05-10 20:16:55,313 - mmseg - INFO - Summary: +2022-05-10 20:16:55,313 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 96.98 | 84.03 | 90.73 | ++-------+-------+-------+ +2022-05-10 20:17:29,950 - mmseg - INFO - Now best checkpoint is saved as best_mIoU_iter_48000.pth. +2022-05-10 20:17:29,962 - mmseg - INFO - Best mIoU is 0.8403 at 48000 iter. +2022-05-10 20:17:29,971 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 20:17:29,972 - mmseg - INFO - Iter(val) [32] aAcc: 0.9698, mIoU: 0.8403, mAcc: 0.9073, IoU.road: 0.9855, IoU.sidewalk: 0.8818, IoU.building: 0.9437, IoU.wall: 0.6873, IoU.fence: 0.7399, IoU.pole: 0.7144, IoU.traffic light: 0.7696, IoU.traffic sign: 0.8401, IoU.vegetation: 0.9343, IoU.terrain: 0.6845, IoU.sky: 0.9589, IoU.person: 0.8703, IoU.rider: 0.7465, IoU.car: 0.9614, IoU.truck: 0.8181, IoU.bus: 0.9395, IoU.train: 0.8815, IoU.motorcycle: 0.7801, IoU.bicycle: 0.8288, Acc.road: 0.9916, Acc.sidewalk: 0.9408, Acc.building: 0.9704, Acc.wall: 0.7945, Acc.fence: 0.8188, Acc.pole: 0.8473, Acc.traffic light: 0.8744, Acc.traffic sign: 0.9052, Acc.vegetation: 0.9698, Acc.terrain: 0.7531, Acc.sky: 0.9830, Acc.person: 0.9346, Acc.rider: 0.8462, Acc.car: 0.9827, Acc.truck: 0.9487, Acc.bus: 0.9691, Acc.train: 0.9126, Acc.motorcycle: 0.8771, Acc.bicycle: 0.9195 +2022-05-10 20:19:01,949 - mmseg - INFO - Iter [48050/80000] lr: 5.734e-07, eta: 34 days, 1:40:42, time: 5.127, data_time: 3.307, memory: 69063, decode.loss_cls: 0.0632, decode.loss_mask: 0.1942, decode.loss_dice: 0.5326, decode.d0.loss_cls: 0.3133, decode.d0.loss_mask: 0.2011, decode.d0.loss_dice: 0.5635, decode.d1.loss_cls: 0.0816, decode.d1.loss_mask: 0.1952, decode.d1.loss_dice: 0.5443, decode.d2.loss_cls: 0.0750, decode.d2.loss_mask: 0.1949, decode.d2.loss_dice: 0.5388, decode.d3.loss_cls: 0.0663, decode.d3.loss_mask: 0.1942, decode.d3.loss_dice: 0.5362, decode.d4.loss_cls: 0.0718, decode.d4.loss_mask: 0.1941, decode.d4.loss_dice: 0.5399, decode.d5.loss_cls: 0.0700, decode.d5.loss_mask: 0.1942, decode.d5.loss_dice: 0.5365, decode.d6.loss_cls: 0.0683, decode.d6.loss_mask: 0.1936, decode.d6.loss_dice: 0.5336, decode.d7.loss_cls: 0.0646, decode.d7.loss_mask: 0.1942, decode.d7.loss_dice: 0.5366, decode.d8.loss_cls: 0.0689, decode.d8.loss_mask: 0.1942, decode.d8.loss_dice: 0.5416, loss: 8.2966 +2022-05-10 20:20:32,837 - mmseg - INFO - Iter [48100/80000] lr: 5.725e-07, eta: 17 days, 12:13:07, time: 1.819, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0651, decode.loss_mask: 0.1994, decode.loss_dice: 0.5390, decode.d0.loss_cls: 0.3023, decode.d0.loss_mask: 0.2081, decode.d0.loss_dice: 0.5631, decode.d1.loss_cls: 0.0835, decode.d1.loss_mask: 0.2017, decode.d1.loss_dice: 0.5473, decode.d2.loss_cls: 0.0746, decode.d2.loss_mask: 0.1999, decode.d2.loss_dice: 0.5433, decode.d3.loss_cls: 0.0744, decode.d3.loss_mask: 0.1997, decode.d3.loss_dice: 0.5374, decode.d4.loss_cls: 0.0665, decode.d4.loss_mask: 0.1996, decode.d4.loss_dice: 0.5398, decode.d5.loss_cls: 0.0689, decode.d5.loss_mask: 0.1995, decode.d5.loss_dice: 0.5359, decode.d6.loss_cls: 0.0634, decode.d6.loss_mask: 0.1995, decode.d6.loss_dice: 0.5412, decode.d7.loss_cls: 0.0662, decode.d7.loss_mask: 0.1993, decode.d7.loss_dice: 0.5336, decode.d8.loss_cls: 0.0649, decode.d8.loss_mask: 0.1994, decode.d8.loss_dice: 0.5390, loss: 8.3554 +2022-05-10 20:22:03,582 - mmseg - INFO - Iter [48150/80000] lr: 5.716e-07, eta: 11 days, 21:56:55, time: 1.815, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0521, decode.loss_mask: 0.1949, decode.loss_dice: 0.5243, decode.d0.loss_cls: 0.2973, decode.d0.loss_mask: 0.2027, decode.d0.loss_dice: 0.5543, decode.d1.loss_cls: 0.0735, decode.d1.loss_mask: 0.1967, decode.d1.loss_dice: 0.5415, decode.d2.loss_cls: 0.0660, decode.d2.loss_mask: 0.1954, decode.d2.loss_dice: 0.5366, decode.d3.loss_cls: 0.0623, decode.d3.loss_mask: 0.1949, decode.d3.loss_dice: 0.5315, decode.d4.loss_cls: 0.0634, decode.d4.loss_mask: 0.1953, decode.d4.loss_dice: 0.5339, decode.d5.loss_cls: 0.0609, decode.d5.loss_mask: 0.1951, decode.d5.loss_dice: 0.5305, decode.d6.loss_cls: 0.0595, decode.d6.loss_mask: 0.1947, decode.d6.loss_dice: 0.5318, decode.d7.loss_cls: 0.0604, decode.d7.loss_mask: 0.1952, decode.d7.loss_dice: 0.5337, decode.d8.loss_cls: 0.0538, decode.d8.loss_mask: 0.1946, decode.d8.loss_dice: 0.5260, loss: 8.1528 +2022-05-10 20:23:37,009 - mmseg - INFO - Iter [48200/80000] lr: 5.707e-07, eta: 9 days, 2:35:11, time: 1.869, data_time: 0.067, memory: 69063, decode.loss_cls: 0.0616, decode.loss_mask: 0.1949, decode.loss_dice: 0.5264, decode.d0.loss_cls: 0.3113, decode.d0.loss_mask: 0.2043, decode.d0.loss_dice: 0.5570, decode.d1.loss_cls: 0.0797, decode.d1.loss_mask: 0.1967, decode.d1.loss_dice: 0.5396, decode.d2.loss_cls: 0.0684, decode.d2.loss_mask: 0.1955, decode.d2.loss_dice: 0.5357, decode.d3.loss_cls: 0.0679, decode.d3.loss_mask: 0.1962, decode.d3.loss_dice: 0.5318, decode.d4.loss_cls: 0.0663, decode.d4.loss_mask: 0.1959, decode.d4.loss_dice: 0.5313, decode.d5.loss_cls: 0.0690, decode.d5.loss_mask: 0.1957, decode.d5.loss_dice: 0.5366, decode.d6.loss_cls: 0.0581, decode.d6.loss_mask: 0.1953, decode.d6.loss_dice: 0.5293, decode.d7.loss_cls: 0.0673, decode.d7.loss_mask: 0.1958, decode.d7.loss_dice: 0.5321, decode.d8.loss_cls: 0.0625, decode.d8.loss_mask: 0.1953, decode.d8.loss_dice: 0.5316, loss: 8.2290 +2022-05-10 20:25:08,300 - mmseg - INFO - Iter [48250/80000] lr: 5.698e-07, eta: 7 days, 9:58:22, time: 1.824, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0647, decode.loss_mask: 0.1956, decode.loss_dice: 0.5415, decode.d0.loss_cls: 0.2979, decode.d0.loss_mask: 0.2033, decode.d0.loss_dice: 0.5628, decode.d1.loss_cls: 0.0831, decode.d1.loss_mask: 0.1962, decode.d1.loss_dice: 0.5500, decode.d2.loss_cls: 0.0761, decode.d2.loss_mask: 0.1948, decode.d2.loss_dice: 0.5477, decode.d3.loss_cls: 0.0704, decode.d3.loss_mask: 0.1957, decode.d3.loss_dice: 0.5412, decode.d4.loss_cls: 0.0775, decode.d4.loss_mask: 0.1951, decode.d4.loss_dice: 0.5404, decode.d5.loss_cls: 0.0678, decode.d5.loss_mask: 0.1951, decode.d5.loss_dice: 0.5437, decode.d6.loss_cls: 0.0693, decode.d6.loss_mask: 0.1954, decode.d6.loss_dice: 0.5416, decode.d7.loss_cls: 0.0646, decode.d7.loss_mask: 0.1952, decode.d7.loss_dice: 0.5423, decode.d8.loss_cls: 0.0641, decode.d8.loss_mask: 0.1952, decode.d8.loss_dice: 0.5433, loss: 8.3514 +2022-05-10 20:26:38,814 - mmseg - INFO - Iter [48300/80000] lr: 5.690e-07, eta: 6 days, 6:49:32, time: 1.812, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0655, decode.loss_mask: 0.1957, decode.loss_dice: 0.5377, decode.d0.loss_cls: 0.3097, decode.d0.loss_mask: 0.2026, decode.d0.loss_dice: 0.5603, decode.d1.loss_cls: 0.0858, decode.d1.loss_mask: 0.1978, decode.d1.loss_dice: 0.5433, decode.d2.loss_cls: 0.0838, decode.d2.loss_mask: 0.1967, decode.d2.loss_dice: 0.5391, decode.d3.loss_cls: 0.0735, decode.d3.loss_mask: 0.1969, decode.d3.loss_dice: 0.5363, decode.d4.loss_cls: 0.0708, decode.d4.loss_mask: 0.1962, decode.d4.loss_dice: 0.5367, decode.d5.loss_cls: 0.0758, decode.d5.loss_mask: 0.1958, decode.d5.loss_dice: 0.5400, decode.d6.loss_cls: 0.0653, decode.d6.loss_mask: 0.1966, decode.d6.loss_dice: 0.5369, decode.d7.loss_cls: 0.0662, decode.d7.loss_mask: 0.1963, decode.d7.loss_dice: 0.5360, decode.d8.loss_cls: 0.0717, decode.d8.loss_mask: 0.1962, decode.d8.loss_dice: 0.5327, loss: 8.3379 +2022-05-10 20:28:08,345 - mmseg - INFO - Iter [48350/80000] lr: 5.681e-07, eta: 5 days, 11:22:42, time: 1.790, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0583, decode.loss_mask: 0.2028, decode.loss_dice: 0.5316, decode.d0.loss_cls: 0.3104, decode.d0.loss_mask: 0.2129, decode.d0.loss_dice: 0.5543, decode.d1.loss_cls: 0.0719, decode.d1.loss_mask: 0.2047, decode.d1.loss_dice: 0.5375, decode.d2.loss_cls: 0.0686, decode.d2.loss_mask: 0.2033, decode.d2.loss_dice: 0.5377, decode.d3.loss_cls: 0.0675, decode.d3.loss_mask: 0.2036, decode.d3.loss_dice: 0.5346, decode.d4.loss_cls: 0.0697, decode.d4.loss_mask: 0.2030, decode.d4.loss_dice: 0.5344, decode.d5.loss_cls: 0.0641, decode.d5.loss_mask: 0.2030, decode.d5.loss_dice: 0.5325, decode.d6.loss_cls: 0.0648, decode.d6.loss_mask: 0.2030, decode.d6.loss_dice: 0.5334, decode.d7.loss_cls: 0.0592, decode.d7.loss_mask: 0.2024, decode.d7.loss_dice: 0.5287, decode.d8.loss_cls: 0.0653, decode.d8.loss_mask: 0.2026, decode.d8.loss_dice: 0.5303, loss: 8.2962 +2022-05-10 20:29:43,276 - mmseg - INFO - Iter [48400/80000] lr: 5.672e-07, eta: 4 days, 20:53:37, time: 1.899, data_time: 0.065, memory: 69063, decode.loss_cls: 0.0568, decode.loss_mask: 0.1956, decode.loss_dice: 0.5339, decode.d0.loss_cls: 0.2985, decode.d0.loss_mask: 0.2044, decode.d0.loss_dice: 0.5588, decode.d1.loss_cls: 0.0771, decode.d1.loss_mask: 0.1971, decode.d1.loss_dice: 0.5453, decode.d2.loss_cls: 0.0742, decode.d2.loss_mask: 0.1964, decode.d2.loss_dice: 0.5386, decode.d3.loss_cls: 0.0603, decode.d3.loss_mask: 0.1962, decode.d3.loss_dice: 0.5304, decode.d4.loss_cls: 0.0652, decode.d4.loss_mask: 0.1959, decode.d4.loss_dice: 0.5311, decode.d5.loss_cls: 0.0650, decode.d5.loss_mask: 0.1964, decode.d5.loss_dice: 0.5359, decode.d6.loss_cls: 0.0577, decode.d6.loss_mask: 0.1961, decode.d6.loss_dice: 0.5319, decode.d7.loss_cls: 0.0628, decode.d7.loss_mask: 0.1959, decode.d7.loss_dice: 0.5292, decode.d8.loss_cls: 0.0584, decode.d8.loss_mask: 0.1957, decode.d8.loss_dice: 0.5291, loss: 8.2100 +2022-05-10 20:31:13,284 - mmseg - INFO - Iter [48450/80000] lr: 5.663e-07, eta: 4 days, 9:31:08, time: 1.800, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0585, decode.loss_mask: 0.1911, decode.loss_dice: 0.5431, decode.d0.loss_cls: 0.3030, decode.d0.loss_mask: 0.2000, decode.d0.loss_dice: 0.5665, decode.d1.loss_cls: 0.0749, decode.d1.loss_mask: 0.1926, decode.d1.loss_dice: 0.5482, decode.d2.loss_cls: 0.0673, decode.d2.loss_mask: 0.1920, decode.d2.loss_dice: 0.5442, decode.d3.loss_cls: 0.0653, decode.d3.loss_mask: 0.1915, decode.d3.loss_dice: 0.5422, decode.d4.loss_cls: 0.0646, decode.d4.loss_mask: 0.1918, decode.d4.loss_dice: 0.5411, decode.d5.loss_cls: 0.0564, decode.d5.loss_mask: 0.1918, decode.d5.loss_dice: 0.5467, decode.d6.loss_cls: 0.0609, decode.d6.loss_mask: 0.1912, decode.d6.loss_dice: 0.5448, decode.d7.loss_cls: 0.0580, decode.d7.loss_mask: 0.1916, decode.d7.loss_dice: 0.5463, decode.d8.loss_cls: 0.0584, decode.d8.loss_mask: 0.1919, decode.d8.loss_dice: 0.5427, loss: 8.2584 +2022-05-10 20:32:45,030 - mmseg - INFO - Iter [48500/80000] lr: 5.654e-07, eta: 4 days, 0:26:24, time: 1.835, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0580, decode.loss_mask: 0.1989, decode.loss_dice: 0.5420, decode.d0.loss_cls: 0.2992, decode.d0.loss_mask: 0.2068, decode.d0.loss_dice: 0.5607, decode.d1.loss_cls: 0.0758, decode.d1.loss_mask: 0.2000, decode.d1.loss_dice: 0.5514, decode.d2.loss_cls: 0.0699, decode.d2.loss_mask: 0.1998, decode.d2.loss_dice: 0.5479, decode.d3.loss_cls: 0.0649, decode.d3.loss_mask: 0.1994, decode.d3.loss_dice: 0.5433, decode.d4.loss_cls: 0.0644, decode.d4.loss_mask: 0.1990, decode.d4.loss_dice: 0.5431, decode.d5.loss_cls: 0.0630, decode.d5.loss_mask: 0.1987, decode.d5.loss_dice: 0.5457, decode.d6.loss_cls: 0.0651, decode.d6.loss_mask: 0.1987, decode.d6.loss_dice: 0.5459, decode.d7.loss_cls: 0.0608, decode.d7.loss_mask: 0.1988, decode.d7.loss_dice: 0.5402, decode.d8.loss_cls: 0.0595, decode.d8.loss_mask: 0.1987, decode.d8.loss_dice: 0.5392, loss: 8.3390 +2022-05-10 20:34:16,020 - mmseg - INFO - Iter [48550/80000] lr: 5.645e-07, eta: 3 days, 16:59:31, time: 1.820, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0755, decode.loss_mask: 0.2001, decode.loss_dice: 0.5472, decode.d0.loss_cls: 0.3099, decode.d0.loss_mask: 0.2076, decode.d0.loss_dice: 0.5774, decode.d1.loss_cls: 0.0913, decode.d1.loss_mask: 0.2022, decode.d1.loss_dice: 0.5573, decode.d2.loss_cls: 0.0846, decode.d2.loss_mask: 0.2016, decode.d2.loss_dice: 0.5507, decode.d3.loss_cls: 0.0730, decode.d3.loss_mask: 0.2004, decode.d3.loss_dice: 0.5498, decode.d4.loss_cls: 0.0819, decode.d4.loss_mask: 0.1997, decode.d4.loss_dice: 0.5459, decode.d5.loss_cls: 0.0799, decode.d5.loss_mask: 0.2002, decode.d5.loss_dice: 0.5510, decode.d6.loss_cls: 0.0712, decode.d6.loss_mask: 0.2002, decode.d6.loss_dice: 0.5464, decode.d7.loss_cls: 0.0725, decode.d7.loss_mask: 0.1996, decode.d7.loss_dice: 0.5446, decode.d8.loss_cls: 0.0751, decode.d8.loss_mask: 0.1999, decode.d8.loss_dice: 0.5467, loss: 8.5433 +2022-05-10 20:35:50,212 - mmseg - INFO - Iter [48600/80000] lr: 5.636e-07, eta: 3 days, 10:49:30, time: 1.883, data_time: 0.064, memory: 69063, decode.loss_cls: 0.0674, decode.loss_mask: 0.1968, decode.loss_dice: 0.5315, decode.d0.loss_cls: 0.3088, decode.d0.loss_mask: 0.2066, decode.d0.loss_dice: 0.5636, decode.d1.loss_cls: 0.0823, decode.d1.loss_mask: 0.1987, decode.d1.loss_dice: 0.5430, decode.d2.loss_cls: 0.0765, decode.d2.loss_mask: 0.1981, decode.d2.loss_dice: 0.5372, decode.d3.loss_cls: 0.0739, decode.d3.loss_mask: 0.1972, decode.d3.loss_dice: 0.5367, decode.d4.loss_cls: 0.0712, decode.d4.loss_mask: 0.1973, decode.d4.loss_dice: 0.5324, decode.d5.loss_cls: 0.0721, decode.d5.loss_mask: 0.1977, decode.d5.loss_dice: 0.5343, decode.d6.loss_cls: 0.0692, decode.d6.loss_mask: 0.1970, decode.d6.loss_dice: 0.5295, decode.d7.loss_cls: 0.0642, decode.d7.loss_mask: 0.1974, decode.d7.loss_dice: 0.5362, decode.d8.loss_cls: 0.0635, decode.d8.loss_mask: 0.1974, decode.d8.loss_dice: 0.5315, loss: 8.3096 +2022-05-10 20:37:21,987 - mmseg - INFO - Iter [48650/80000] lr: 5.627e-07, eta: 3 days, 5:34:12, time: 1.836, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0718, decode.loss_mask: 0.1909, decode.loss_dice: 0.5395, decode.d0.loss_cls: 0.2988, decode.d0.loss_mask: 0.1991, decode.d0.loss_dice: 0.5716, decode.d1.loss_cls: 0.0880, decode.d1.loss_mask: 0.1928, decode.d1.loss_dice: 0.5554, decode.d2.loss_cls: 0.0805, decode.d2.loss_mask: 0.1919, decode.d2.loss_dice: 0.5486, decode.d3.loss_cls: 0.0733, decode.d3.loss_mask: 0.1917, decode.d3.loss_dice: 0.5418, decode.d4.loss_cls: 0.0720, decode.d4.loss_mask: 0.1922, decode.d4.loss_dice: 0.5455, decode.d5.loss_cls: 0.0696, decode.d5.loss_mask: 0.1919, decode.d5.loss_dice: 0.5432, decode.d6.loss_cls: 0.0700, decode.d6.loss_mask: 0.1913, decode.d6.loss_dice: 0.5435, decode.d7.loss_cls: 0.0759, decode.d7.loss_mask: 0.1916, decode.d7.loss_dice: 0.5387, decode.d8.loss_cls: 0.0699, decode.d8.loss_mask: 0.1912, decode.d8.loss_dice: 0.5444, loss: 8.3668 +2022-05-10 20:38:52,909 - mmseg - INFO - Iter [48700/80000] lr: 5.618e-07, eta: 3 days, 1:03:00, time: 1.818, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0705, decode.loss_mask: 0.1922, decode.loss_dice: 0.5402, decode.d0.loss_cls: 0.3043, decode.d0.loss_mask: 0.2017, decode.d0.loss_dice: 0.5657, decode.d1.loss_cls: 0.0839, decode.d1.loss_mask: 0.1928, decode.d1.loss_dice: 0.5501, decode.d2.loss_cls: 0.0806, decode.d2.loss_mask: 0.1925, decode.d2.loss_dice: 0.5516, decode.d3.loss_cls: 0.0715, decode.d3.loss_mask: 0.1924, decode.d3.loss_dice: 0.5415, decode.d4.loss_cls: 0.0732, decode.d4.loss_mask: 0.1928, decode.d4.loss_dice: 0.5427, decode.d5.loss_cls: 0.0738, decode.d5.loss_mask: 0.1924, decode.d5.loss_dice: 0.5418, decode.d6.loss_cls: 0.0642, decode.d6.loss_mask: 0.1923, decode.d6.loss_dice: 0.5422, decode.d7.loss_cls: 0.0639, decode.d7.loss_mask: 0.1921, decode.d7.loss_dice: 0.5421, decode.d8.loss_cls: 0.0657, decode.d8.loss_mask: 0.1923, decode.d8.loss_dice: 0.5381, loss: 8.3413 +2022-05-10 20:40:25,961 - mmseg - INFO - Iter [48750/80000] lr: 5.609e-07, eta: 2 days, 21:09:11, time: 1.861, data_time: 0.064, memory: 69063, decode.loss_cls: 0.0633, decode.loss_mask: 0.1975, decode.loss_dice: 0.5424, decode.d0.loss_cls: 0.2901, decode.d0.loss_mask: 0.2036, decode.d0.loss_dice: 0.5656, decode.d1.loss_cls: 0.0894, decode.d1.loss_mask: 0.1985, decode.d1.loss_dice: 0.5485, decode.d2.loss_cls: 0.0772, decode.d2.loss_mask: 0.1982, decode.d2.loss_dice: 0.5412, decode.d3.loss_cls: 0.0642, decode.d3.loss_mask: 0.1976, decode.d3.loss_dice: 0.5411, decode.d4.loss_cls: 0.0668, decode.d4.loss_mask: 0.1978, decode.d4.loss_dice: 0.5390, decode.d5.loss_cls: 0.0722, decode.d5.loss_mask: 0.1982, decode.d5.loss_dice: 0.5420, decode.d6.loss_cls: 0.0641, decode.d6.loss_mask: 0.1975, decode.d6.loss_dice: 0.5381, decode.d7.loss_cls: 0.0654, decode.d7.loss_mask: 0.1973, decode.d7.loss_dice: 0.5382, decode.d8.loss_cls: 0.0674, decode.d8.loss_mask: 0.1976, decode.d8.loss_dice: 0.5383, loss: 8.3379 +2022-05-10 20:41:56,795 - mmseg - INFO - Iter [48800/80000] lr: 5.600e-07, eta: 2 days, 17:42:56, time: 1.817, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0591, decode.loss_mask: 0.1939, decode.loss_dice: 0.5249, decode.d0.loss_cls: 0.2977, decode.d0.loss_mask: 0.2018, decode.d0.loss_dice: 0.5458, decode.d1.loss_cls: 0.0742, decode.d1.loss_mask: 0.1960, decode.d1.loss_dice: 0.5334, decode.d2.loss_cls: 0.0684, decode.d2.loss_mask: 0.1958, decode.d2.loss_dice: 0.5302, decode.d3.loss_cls: 0.0588, decode.d3.loss_mask: 0.1945, decode.d3.loss_dice: 0.5299, decode.d4.loss_cls: 0.0599, decode.d4.loss_mask: 0.1942, decode.d4.loss_dice: 0.5257, decode.d5.loss_cls: 0.0607, decode.d5.loss_mask: 0.1948, decode.d5.loss_dice: 0.5269, decode.d6.loss_cls: 0.0576, decode.d6.loss_mask: 0.1943, decode.d6.loss_dice: 0.5267, decode.d7.loss_cls: 0.0638, decode.d7.loss_mask: 0.1942, decode.d7.loss_dice: 0.5277, decode.d8.loss_cls: 0.0589, decode.d8.loss_mask: 0.1943, decode.d8.loss_dice: 0.5257, loss: 8.1097 +2022-05-10 20:43:26,805 - mmseg - INFO - Iter [48850/80000] lr: 5.591e-07, eta: 2 days, 14:40:14, time: 1.800, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0592, decode.loss_mask: 0.1973, decode.loss_dice: 0.5280, decode.d0.loss_cls: 0.3089, decode.d0.loss_mask: 0.2038, decode.d0.loss_dice: 0.5556, decode.d1.loss_cls: 0.0771, decode.d1.loss_mask: 0.1990, decode.d1.loss_dice: 0.5402, decode.d2.loss_cls: 0.0648, decode.d2.loss_mask: 0.1982, decode.d2.loss_dice: 0.5380, decode.d3.loss_cls: 0.0589, decode.d3.loss_mask: 0.1972, decode.d3.loss_dice: 0.5367, decode.d4.loss_cls: 0.0612, decode.d4.loss_mask: 0.1973, decode.d4.loss_dice: 0.5398, decode.d5.loss_cls: 0.0678, decode.d5.loss_mask: 0.1976, decode.d5.loss_dice: 0.5347, decode.d6.loss_cls: 0.0588, decode.d6.loss_mask: 0.1972, decode.d6.loss_dice: 0.5317, decode.d7.loss_cls: 0.0609, decode.d7.loss_mask: 0.1977, decode.d7.loss_dice: 0.5347, decode.d8.loss_cls: 0.0608, decode.d8.loss_mask: 0.1973, decode.d8.loss_dice: 0.5349, loss: 8.2353 +2022-05-10 20:44:58,758 - mmseg - INFO - Iter [48900/80000] lr: 5.582e-07, eta: 2 days, 11:58:45, time: 1.839, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0570, decode.loss_mask: 0.1934, decode.loss_dice: 0.5448, decode.d0.loss_cls: 0.2998, decode.d0.loss_mask: 0.2004, decode.d0.loss_dice: 0.5708, decode.d1.loss_cls: 0.0810, decode.d1.loss_mask: 0.1941, decode.d1.loss_dice: 0.5554, decode.d2.loss_cls: 0.0716, decode.d2.loss_mask: 0.1934, decode.d2.loss_dice: 0.5510, decode.d3.loss_cls: 0.0634, decode.d3.loss_mask: 0.1931, decode.d3.loss_dice: 0.5483, decode.d4.loss_cls: 0.0651, decode.d4.loss_mask: 0.1939, decode.d4.loss_dice: 0.5460, decode.d5.loss_cls: 0.0654, decode.d5.loss_mask: 0.1939, decode.d5.loss_dice: 0.5448, decode.d6.loss_cls: 0.0585, decode.d6.loss_mask: 0.1938, decode.d6.loss_dice: 0.5431, decode.d7.loss_cls: 0.0629, decode.d7.loss_mask: 0.1936, decode.d7.loss_dice: 0.5433, decode.d8.loss_cls: 0.0606, decode.d8.loss_mask: 0.1935, decode.d8.loss_dice: 0.5465, loss: 8.3226 +2022-05-10 20:46:32,212 - mmseg - INFO - Iter [48950/80000] lr: 5.573e-07, eta: 2 days, 9:34:55, time: 1.869, data_time: 0.062, memory: 69063, decode.loss_cls: 0.0568, decode.loss_mask: 0.1925, decode.loss_dice: 0.5322, decode.d0.loss_cls: 0.3068, decode.d0.loss_mask: 0.2003, decode.d0.loss_dice: 0.5524, decode.d1.loss_cls: 0.0689, decode.d1.loss_mask: 0.1955, decode.d1.loss_dice: 0.5400, decode.d2.loss_cls: 0.0633, decode.d2.loss_mask: 0.1939, decode.d2.loss_dice: 0.5390, decode.d3.loss_cls: 0.0648, decode.d3.loss_mask: 0.1932, decode.d3.loss_dice: 0.5357, decode.d4.loss_cls: 0.0625, decode.d4.loss_mask: 0.1936, decode.d4.loss_dice: 0.5350, decode.d5.loss_cls: 0.0571, decode.d5.loss_mask: 0.1938, decode.d5.loss_dice: 0.5346, decode.d6.loss_cls: 0.0610, decode.d6.loss_mask: 0.1937, decode.d6.loss_dice: 0.5323, decode.d7.loss_cls: 0.0583, decode.d7.loss_mask: 0.1937, decode.d7.loss_dice: 0.5330, decode.d8.loss_cls: 0.0636, decode.d8.loss_mask: 0.1934, decode.d8.loss_dice: 0.5352, loss: 8.1763 +2022-05-10 20:48:02,837 - mmseg - INFO - Saving checkpoint at 49000 iterations +2022-05-10 20:48:36,277 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 20:48:36,287 - mmseg - INFO - Iter [49000/80000] lr: 5.564e-07, eta: 2 days, 7:41:02, time: 2.479, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0552, decode.loss_mask: 0.1915, decode.loss_dice: 0.5309, decode.d0.loss_cls: 0.2977, decode.d0.loss_mask: 0.1972, decode.d0.loss_dice: 0.5595, decode.d1.loss_cls: 0.0809, decode.d1.loss_mask: 0.1929, decode.d1.loss_dice: 0.5376, decode.d2.loss_cls: 0.0686, decode.d2.loss_mask: 0.1919, decode.d2.loss_dice: 0.5350, decode.d3.loss_cls: 0.0685, decode.d3.loss_mask: 0.1920, decode.d3.loss_dice: 0.5372, decode.d4.loss_cls: 0.0583, decode.d4.loss_mask: 0.1921, decode.d4.loss_dice: 0.5343, decode.d5.loss_cls: 0.0635, decode.d5.loss_mask: 0.1920, decode.d5.loss_dice: 0.5318, decode.d6.loss_cls: 0.0637, decode.d6.loss_mask: 0.1920, decode.d6.loss_dice: 0.5320, decode.d7.loss_cls: 0.0638, decode.d7.loss_mask: 0.1917, decode.d7.loss_dice: 0.5289, decode.d8.loss_cls: 0.0641, decode.d8.loss_mask: 0.1913, decode.d8.loss_dice: 0.5350, loss: 8.1713 +2022-05-10 20:50:32,383 - mmseg - INFO - per class results: +2022-05-10 20:50:32,391 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.5 | 99.08 | +| sidewalk | 87.99 | 94.02 | +| building | 94.37 | 96.96 | +| wall | 68.7 | 78.07 | +| fence | 77.91 | 87.0 | +| pole | 71.0 | 83.26 | +| traffic light | 77.0 | 88.41 | +| traffic sign | 83.78 | 90.57 | +| vegetation | 93.26 | 96.9 | +| terrain | 67.65 | 78.76 | +| sky | 95.78 | 98.44 | +| person | 86.79 | 93.77 | +| rider | 74.17 | 87.07 | +| car | 96.17 | 98.32 | +| truck | 92.1 | 94.96 | +| bus | 93.81 | 96.81 | +| train | 88.03 | 90.75 | +| motorcycle | 78.01 | 88.38 | +| bicycle | 82.6 | 90.94 | ++---------------+-------+-------+ +2022-05-10 20:50:32,391 - mmseg - INFO - Summary: +2022-05-10 20:50:32,392 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 96.97 | 84.61 | 91.18 | ++-------+-------+-------+ +2022-05-10 20:50:32,393 - mmseg - INFO - The previous best checkpoint /mnt/lustre/chenzhe.vendor/workspace/ViT-Adapter/segmentation/work_dirs/mask2former_beit_adapter_large_896_80k_cityscapes_ss/best_mIoU_iter_48000.pth was removed +2022-05-10 20:51:06,066 - mmseg - INFO - Now best checkpoint is saved as best_mIoU_iter_49000.pth. +2022-05-10 20:51:06,079 - mmseg - INFO - Best mIoU is 0.8461 at 49000 iter. +2022-05-10 20:51:06,088 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 20:51:06,089 - mmseg - INFO - Iter(val) [32] aAcc: 0.9697, mIoU: 0.8461, mAcc: 0.9118, IoU.road: 0.9850, IoU.sidewalk: 0.8799, IoU.building: 0.9437, IoU.wall: 0.6870, IoU.fence: 0.7791, IoU.pole: 0.7100, IoU.traffic light: 0.7700, IoU.traffic sign: 0.8378, IoU.vegetation: 0.9326, IoU.terrain: 0.6765, IoU.sky: 0.9578, IoU.person: 0.8679, IoU.rider: 0.7417, IoU.car: 0.9617, IoU.truck: 0.9210, IoU.bus: 0.9381, IoU.train: 0.8803, IoU.motorcycle: 0.7801, IoU.bicycle: 0.8260, Acc.road: 0.9908, Acc.sidewalk: 0.9402, Acc.building: 0.9696, Acc.wall: 0.7807, Acc.fence: 0.8700, Acc.pole: 0.8326, Acc.traffic light: 0.8841, Acc.traffic sign: 0.9057, Acc.vegetation: 0.9690, Acc.terrain: 0.7876, Acc.sky: 0.9844, Acc.person: 0.9377, Acc.rider: 0.8707, Acc.car: 0.9832, Acc.truck: 0.9496, Acc.bus: 0.9681, Acc.train: 0.9075, Acc.motorcycle: 0.8838, Acc.bicycle: 0.9094 +2022-05-10 20:52:39,173 - mmseg - INFO - Iter [49050/80000] lr: 5.555e-07, eta: 2 days, 6:56:14, time: 4.860, data_time: 3.017, memory: 69063, decode.loss_cls: 0.0690, decode.loss_mask: 0.1937, decode.loss_dice: 0.5451, decode.d0.loss_cls: 0.3100, decode.d0.loss_mask: 0.2012, decode.d0.loss_dice: 0.5772, decode.d1.loss_cls: 0.0835, decode.d1.loss_mask: 0.1956, decode.d1.loss_dice: 0.5539, decode.d2.loss_cls: 0.0717, decode.d2.loss_mask: 0.1945, decode.d2.loss_dice: 0.5527, decode.d3.loss_cls: 0.0682, decode.d3.loss_mask: 0.1939, decode.d3.loss_dice: 0.5486, decode.d4.loss_cls: 0.0711, decode.d4.loss_mask: 0.1940, decode.d4.loss_dice: 0.5487, decode.d5.loss_cls: 0.0731, decode.d5.loss_mask: 0.1940, decode.d5.loss_dice: 0.5483, decode.d6.loss_cls: 0.0606, decode.d6.loss_mask: 0.1946, decode.d6.loss_dice: 0.5478, decode.d7.loss_cls: 0.0682, decode.d7.loss_mask: 0.1940, decode.d7.loss_dice: 0.5467, decode.d8.loss_cls: 0.0685, decode.d8.loss_mask: 0.1940, decode.d8.loss_dice: 0.5474, loss: 8.4098 +2022-05-10 20:54:09,804 - mmseg - INFO - Iter [49100/80000] lr: 5.546e-07, eta: 2 days, 5:03:51, time: 1.813, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0748, decode.loss_mask: 0.1997, decode.loss_dice: 0.5327, decode.d0.loss_cls: 0.3096, decode.d0.loss_mask: 0.2090, decode.d0.loss_dice: 0.5631, decode.d1.loss_cls: 0.0941, decode.d1.loss_mask: 0.2022, decode.d1.loss_dice: 0.5451, decode.d2.loss_cls: 0.0887, decode.d2.loss_mask: 0.2009, decode.d2.loss_dice: 0.5401, decode.d3.loss_cls: 0.0792, decode.d3.loss_mask: 0.2000, decode.d3.loss_dice: 0.5368, decode.d4.loss_cls: 0.0769, decode.d4.loss_mask: 0.2006, decode.d4.loss_dice: 0.5322, decode.d5.loss_cls: 0.0754, decode.d5.loss_mask: 0.2003, decode.d5.loss_dice: 0.5368, decode.d6.loss_cls: 0.0764, decode.d6.loss_mask: 0.1999, decode.d6.loss_dice: 0.5379, decode.d7.loss_cls: 0.0786, decode.d7.loss_mask: 0.2002, decode.d7.loss_dice: 0.5401, decode.d8.loss_cls: 0.0768, decode.d8.loss_mask: 0.1996, decode.d8.loss_dice: 0.5326, loss: 8.4404 +2022-05-10 20:55:44,090 - mmseg - INFO - Iter [49150/80000] lr: 5.537e-07, eta: 2 days, 3:22:43, time: 1.886, data_time: 0.064, memory: 69063, decode.loss_cls: 0.0590, decode.loss_mask: 0.1912, decode.loss_dice: 0.5322, decode.d0.loss_cls: 0.2979, decode.d0.loss_mask: 0.1982, decode.d0.loss_dice: 0.5562, decode.d1.loss_cls: 0.0804, decode.d1.loss_mask: 0.1928, decode.d1.loss_dice: 0.5438, decode.d2.loss_cls: 0.0665, decode.d2.loss_mask: 0.1921, decode.d2.loss_dice: 0.5411, decode.d3.loss_cls: 0.0629, decode.d3.loss_mask: 0.1919, decode.d3.loss_dice: 0.5398, decode.d4.loss_cls: 0.0592, decode.d4.loss_mask: 0.1914, decode.d4.loss_dice: 0.5356, decode.d5.loss_cls: 0.0588, decode.d5.loss_mask: 0.1913, decode.d5.loss_dice: 0.5391, decode.d6.loss_cls: 0.0611, decode.d6.loss_mask: 0.1920, decode.d6.loss_dice: 0.5375, decode.d7.loss_cls: 0.0562, decode.d7.loss_mask: 0.1917, decode.d7.loss_dice: 0.5344, decode.d8.loss_cls: 0.0579, decode.d8.loss_mask: 0.1914, decode.d8.loss_dice: 0.5343, loss: 8.1778 +2022-05-10 20:57:13,677 - mmseg - INFO - Iter [49200/80000] lr: 5.528e-07, eta: 2 days, 1:47:53, time: 1.792, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0703, decode.loss_mask: 0.1929, decode.loss_dice: 0.5429, decode.d0.loss_cls: 0.2981, decode.d0.loss_mask: 0.2001, decode.d0.loss_dice: 0.5642, decode.d1.loss_cls: 0.0843, decode.d1.loss_mask: 0.1943, decode.d1.loss_dice: 0.5473, decode.d2.loss_cls: 0.0764, decode.d2.loss_mask: 0.1935, decode.d2.loss_dice: 0.5490, decode.d3.loss_cls: 0.0704, decode.d3.loss_mask: 0.1936, decode.d3.loss_dice: 0.5428, decode.d4.loss_cls: 0.0674, decode.d4.loss_mask: 0.1932, decode.d4.loss_dice: 0.5383, decode.d5.loss_cls: 0.0700, decode.d5.loss_mask: 0.1928, decode.d5.loss_dice: 0.5406, decode.d6.loss_cls: 0.0662, decode.d6.loss_mask: 0.1929, decode.d6.loss_dice: 0.5381, decode.d7.loss_cls: 0.0607, decode.d7.loss_mask: 0.1930, decode.d7.loss_dice: 0.5402, decode.d8.loss_cls: 0.0633, decode.d8.loss_mask: 0.1927, decode.d8.loss_dice: 0.5384, loss: 8.3080 +2022-05-10 20:58:44,313 - mmseg - INFO - Iter [49250/80000] lr: 5.519e-07, eta: 2 days, 0:20:57, time: 1.813, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0687, decode.loss_mask: 0.1981, decode.loss_dice: 0.5404, decode.d0.loss_cls: 0.3112, decode.d0.loss_mask: 0.2056, decode.d0.loss_dice: 0.5655, decode.d1.loss_cls: 0.0868, decode.d1.loss_mask: 0.1992, decode.d1.loss_dice: 0.5453, decode.d2.loss_cls: 0.0771, decode.d2.loss_mask: 0.1990, decode.d2.loss_dice: 0.5468, decode.d3.loss_cls: 0.0727, decode.d3.loss_mask: 0.1980, decode.d3.loss_dice: 0.5421, decode.d4.loss_cls: 0.0784, decode.d4.loss_mask: 0.1977, decode.d4.loss_dice: 0.5448, decode.d5.loss_cls: 0.0694, decode.d5.loss_mask: 0.1978, decode.d5.loss_dice: 0.5395, decode.d6.loss_cls: 0.0701, decode.d6.loss_mask: 0.1982, decode.d6.loss_dice: 0.5368, decode.d7.loss_cls: 0.0698, decode.d7.loss_mask: 0.1982, decode.d7.loss_dice: 0.5391, decode.d8.loss_cls: 0.0695, decode.d8.loss_mask: 0.1982, decode.d8.loss_dice: 0.5417, loss: 8.4057 +2022-05-10 21:00:15,925 - mmseg - INFO - Iter [49300/80000] lr: 5.510e-07, eta: 1 day, 23:00:56, time: 1.831, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0666, decode.loss_mask: 0.1945, decode.loss_dice: 0.5312, decode.d0.loss_cls: 0.3087, decode.d0.loss_mask: 0.2035, decode.d0.loss_dice: 0.5563, decode.d1.loss_cls: 0.0832, decode.d1.loss_mask: 0.1968, decode.d1.loss_dice: 0.5394, decode.d2.loss_cls: 0.0752, decode.d2.loss_mask: 0.1962, decode.d2.loss_dice: 0.5357, decode.d3.loss_cls: 0.0666, decode.d3.loss_mask: 0.1950, decode.d3.loss_dice: 0.5283, decode.d4.loss_cls: 0.0678, decode.d4.loss_mask: 0.1948, decode.d4.loss_dice: 0.5311, decode.d5.loss_cls: 0.0711, decode.d5.loss_mask: 0.1949, decode.d5.loss_dice: 0.5320, decode.d6.loss_cls: 0.0640, decode.d6.loss_mask: 0.1944, decode.d6.loss_dice: 0.5280, decode.d7.loss_cls: 0.0700, decode.d7.loss_mask: 0.1940, decode.d7.loss_dice: 0.5286, decode.d8.loss_cls: 0.0668, decode.d8.loss_mask: 0.1942, decode.d8.loss_dice: 0.5301, loss: 8.2389 +2022-05-10 21:01:48,251 - mmseg - INFO - Iter [49350/80000] lr: 5.501e-07, eta: 1 day, 21:47:02, time: 1.847, data_time: 0.063, memory: 69063, decode.loss_cls: 0.0715, decode.loss_mask: 0.1987, decode.loss_dice: 0.5418, decode.d0.loss_cls: 0.3127, decode.d0.loss_mask: 0.2090, decode.d0.loss_dice: 0.5645, decode.d1.loss_cls: 0.0913, decode.d1.loss_mask: 0.1994, decode.d1.loss_dice: 0.5472, decode.d2.loss_cls: 0.0811, decode.d2.loss_mask: 0.1993, decode.d2.loss_dice: 0.5440, decode.d3.loss_cls: 0.0753, decode.d3.loss_mask: 0.1992, decode.d3.loss_dice: 0.5437, decode.d4.loss_cls: 0.0718, decode.d4.loss_mask: 0.1993, decode.d4.loss_dice: 0.5406, decode.d5.loss_cls: 0.0742, decode.d5.loss_mask: 0.1998, decode.d5.loss_dice: 0.5393, decode.d6.loss_cls: 0.0732, decode.d6.loss_mask: 0.1988, decode.d6.loss_dice: 0.5383, decode.d7.loss_cls: 0.0684, decode.d7.loss_mask: 0.1993, decode.d7.loss_dice: 0.5431, decode.d8.loss_cls: 0.0721, decode.d8.loss_mask: 0.1987, decode.d8.loss_dice: 0.5418, loss: 8.4373 +2022-05-10 21:03:18,059 - mmseg - INFO - Iter [49400/80000] lr: 5.492e-07, eta: 1 day, 20:37:22, time: 1.796, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0615, decode.loss_mask: 0.1938, decode.loss_dice: 0.5414, decode.d0.loss_cls: 0.2988, decode.d0.loss_mask: 0.2018, decode.d0.loss_dice: 0.5680, decode.d1.loss_cls: 0.0872, decode.d1.loss_mask: 0.1955, decode.d1.loss_dice: 0.5514, decode.d2.loss_cls: 0.0738, decode.d2.loss_mask: 0.1946, decode.d2.loss_dice: 0.5432, decode.d3.loss_cls: 0.0646, decode.d3.loss_mask: 0.1940, decode.d3.loss_dice: 0.5433, decode.d4.loss_cls: 0.0667, decode.d4.loss_mask: 0.1948, decode.d4.loss_dice: 0.5413, decode.d5.loss_cls: 0.0678, decode.d5.loss_mask: 0.1940, decode.d5.loss_dice: 0.5429, decode.d6.loss_cls: 0.0666, decode.d6.loss_mask: 0.1940, decode.d6.loss_dice: 0.5390, decode.d7.loss_cls: 0.0607, decode.d7.loss_mask: 0.1939, decode.d7.loss_dice: 0.5401, decode.d8.loss_cls: 0.0650, decode.d8.loss_mask: 0.1940, decode.d8.loss_dice: 0.5420, loss: 8.3158 +2022-05-10 21:04:49,083 - mmseg - INFO - Iter [49450/80000] lr: 5.483e-07, eta: 1 day, 19:32:49, time: 1.820, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0524, decode.loss_mask: 0.1992, decode.loss_dice: 0.5204, decode.d0.loss_cls: 0.2939, decode.d0.loss_mask: 0.2069, decode.d0.loss_dice: 0.5436, decode.d1.loss_cls: 0.0734, decode.d1.loss_mask: 0.2004, decode.d1.loss_dice: 0.5262, decode.d2.loss_cls: 0.0648, decode.d2.loss_mask: 0.1997, decode.d2.loss_dice: 0.5212, decode.d3.loss_cls: 0.0531, decode.d3.loss_mask: 0.1991, decode.d3.loss_dice: 0.5210, decode.d4.loss_cls: 0.0528, decode.d4.loss_mask: 0.1994, decode.d4.loss_dice: 0.5214, decode.d5.loss_cls: 0.0530, decode.d5.loss_mask: 0.1992, decode.d5.loss_dice: 0.5212, decode.d6.loss_cls: 0.0584, decode.d6.loss_mask: 0.1992, decode.d6.loss_dice: 0.5194, decode.d7.loss_cls: 0.0569, decode.d7.loss_mask: 0.1991, decode.d7.loss_dice: 0.5178, decode.d8.loss_cls: 0.0586, decode.d8.loss_mask: 0.1990, decode.d8.loss_dice: 0.5194, loss: 8.0500 +2022-05-10 21:06:21,996 - mmseg - INFO - Iter [49500/80000] lr: 5.474e-07, eta: 1 day, 18:33:06, time: 1.857, data_time: 0.063, memory: 69063, decode.loss_cls: 0.0664, decode.loss_mask: 0.1960, decode.loss_dice: 0.5250, decode.d0.loss_cls: 0.3077, decode.d0.loss_mask: 0.2064, decode.d0.loss_dice: 0.5536, decode.d1.loss_cls: 0.0900, decode.d1.loss_mask: 0.1985, decode.d1.loss_dice: 0.5345, decode.d2.loss_cls: 0.0742, decode.d2.loss_mask: 0.1965, decode.d2.loss_dice: 0.5257, decode.d3.loss_cls: 0.0695, decode.d3.loss_mask: 0.1969, decode.d3.loss_dice: 0.5272, decode.d4.loss_cls: 0.0705, decode.d4.loss_mask: 0.1968, decode.d4.loss_dice: 0.5265, decode.d5.loss_cls: 0.0689, decode.d5.loss_mask: 0.1970, decode.d5.loss_dice: 0.5264, decode.d6.loss_cls: 0.0655, decode.d6.loss_mask: 0.1965, decode.d6.loss_dice: 0.5254, decode.d7.loss_cls: 0.0663, decode.d7.loss_mask: 0.1969, decode.d7.loss_dice: 0.5269, decode.d8.loss_cls: 0.0693, decode.d8.loss_mask: 0.1963, decode.d8.loss_dice: 0.5255, loss: 8.2229 +2022-05-10 21:07:51,187 - mmseg - INFO - Iter [49550/80000] lr: 5.465e-07, eta: 1 day, 17:35:56, time: 1.785, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0643, decode.loss_mask: 0.1903, decode.loss_dice: 0.5418, decode.d0.loss_cls: 0.2995, decode.d0.loss_mask: 0.1986, decode.d0.loss_dice: 0.5681, decode.d1.loss_cls: 0.0899, decode.d1.loss_mask: 0.1919, decode.d1.loss_dice: 0.5516, decode.d2.loss_cls: 0.0767, decode.d2.loss_mask: 0.1901, decode.d2.loss_dice: 0.5472, decode.d3.loss_cls: 0.0717, decode.d3.loss_mask: 0.1907, decode.d3.loss_dice: 0.5430, decode.d4.loss_cls: 0.0787, decode.d4.loss_mask: 0.1903, decode.d4.loss_dice: 0.5395, decode.d5.loss_cls: 0.0783, decode.d5.loss_mask: 0.1904, decode.d5.loss_dice: 0.5394, decode.d6.loss_cls: 0.0705, decode.d6.loss_mask: 0.1904, decode.d6.loss_dice: 0.5422, decode.d7.loss_cls: 0.0669, decode.d7.loss_mask: 0.1904, decode.d7.loss_dice: 0.5385, decode.d8.loss_cls: 0.0733, decode.d8.loss_mask: 0.1898, decode.d8.loss_dice: 0.5405, loss: 8.3346 +2022-05-10 21:09:21,346 - mmseg - INFO - Iter [49600/80000] lr: 5.456e-07, eta: 1 day, 16:42:33, time: 1.803, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0604, decode.loss_mask: 0.1964, decode.loss_dice: 0.5389, decode.d0.loss_cls: 0.3016, decode.d0.loss_mask: 0.2059, decode.d0.loss_dice: 0.5612, decode.d1.loss_cls: 0.0840, decode.d1.loss_mask: 0.1978, decode.d1.loss_dice: 0.5435, decode.d2.loss_cls: 0.0692, decode.d2.loss_mask: 0.1970, decode.d2.loss_dice: 0.5410, decode.d3.loss_cls: 0.0613, decode.d3.loss_mask: 0.1971, decode.d3.loss_dice: 0.5381, decode.d4.loss_cls: 0.0626, decode.d4.loss_mask: 0.1970, decode.d4.loss_dice: 0.5347, decode.d5.loss_cls: 0.0621, decode.d5.loss_mask: 0.1964, decode.d5.loss_dice: 0.5367, decode.d6.loss_cls: 0.0604, decode.d6.loss_mask: 0.1966, decode.d6.loss_dice: 0.5398, decode.d7.loss_cls: 0.0578, decode.d7.loss_mask: 0.1965, decode.d7.loss_dice: 0.5366, decode.d8.loss_cls: 0.0583, decode.d8.loss_mask: 0.1965, decode.d8.loss_dice: 0.5358, loss: 8.2611 +2022-05-10 21:10:52,482 - mmseg - INFO - Iter [49650/80000] lr: 5.447e-07, eta: 1 day, 15:52:36, time: 1.823, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0586, decode.loss_mask: 0.1965, decode.loss_dice: 0.5324, decode.d0.loss_cls: 0.3012, decode.d0.loss_mask: 0.2054, decode.d0.loss_dice: 0.5559, decode.d1.loss_cls: 0.0749, decode.d1.loss_mask: 0.1977, decode.d1.loss_dice: 0.5392, decode.d2.loss_cls: 0.0656, decode.d2.loss_mask: 0.1970, decode.d2.loss_dice: 0.5401, decode.d3.loss_cls: 0.0664, decode.d3.loss_mask: 0.1970, decode.d3.loss_dice: 0.5360, decode.d4.loss_cls: 0.0627, decode.d4.loss_mask: 0.1969, decode.d4.loss_dice: 0.5325, decode.d5.loss_cls: 0.0625, decode.d5.loss_mask: 0.1969, decode.d5.loss_dice: 0.5324, decode.d6.loss_cls: 0.0567, decode.d6.loss_mask: 0.1962, decode.d6.loss_dice: 0.5348, decode.d7.loss_cls: 0.0619, decode.d7.loss_mask: 0.1965, decode.d7.loss_dice: 0.5324, decode.d8.loss_cls: 0.0550, decode.d8.loss_mask: 0.1959, decode.d8.loss_dice: 0.5343, loss: 8.2114 +2022-05-10 21:12:25,804 - mmseg - INFO - Iter [49700/80000] lr: 5.438e-07, eta: 1 day, 15:06:09, time: 1.866, data_time: 0.066, memory: 69063, decode.loss_cls: 0.0605, decode.loss_mask: 0.1901, decode.loss_dice: 0.5359, decode.d0.loss_cls: 0.3046, decode.d0.loss_mask: 0.1964, decode.d0.loss_dice: 0.5577, decode.d1.loss_cls: 0.0781, decode.d1.loss_mask: 0.1915, decode.d1.loss_dice: 0.5426, decode.d2.loss_cls: 0.0642, decode.d2.loss_mask: 0.1908, decode.d2.loss_dice: 0.5364, decode.d3.loss_cls: 0.0644, decode.d3.loss_mask: 0.1906, decode.d3.loss_dice: 0.5340, decode.d4.loss_cls: 0.0610, decode.d4.loss_mask: 0.1904, decode.d4.loss_dice: 0.5311, decode.d5.loss_cls: 0.0574, decode.d5.loss_mask: 0.1910, decode.d5.loss_dice: 0.5362, decode.d6.loss_cls: 0.0582, decode.d6.loss_mask: 0.1909, decode.d6.loss_dice: 0.5340, decode.d7.loss_cls: 0.0627, decode.d7.loss_mask: 0.1909, decode.d7.loss_dice: 0.5349, decode.d8.loss_cls: 0.0620, decode.d8.loss_mask: 0.1903, decode.d8.loss_dice: 0.5353, loss: 8.1640 +2022-05-10 21:13:57,217 - mmseg - INFO - Iter [49750/80000] lr: 5.429e-07, eta: 1 day, 14:21:43, time: 1.828, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0625, decode.loss_mask: 0.2011, decode.loss_dice: 0.5204, decode.d0.loss_cls: 0.2988, decode.d0.loss_mask: 0.2103, decode.d0.loss_dice: 0.5505, decode.d1.loss_cls: 0.0795, decode.d1.loss_mask: 0.2021, decode.d1.loss_dice: 0.5344, decode.d2.loss_cls: 0.0698, decode.d2.loss_mask: 0.2017, decode.d2.loss_dice: 0.5248, decode.d3.loss_cls: 0.0658, decode.d3.loss_mask: 0.2009, decode.d3.loss_dice: 0.5224, decode.d4.loss_cls: 0.0633, decode.d4.loss_mask: 0.2018, decode.d4.loss_dice: 0.5215, decode.d5.loss_cls: 0.0675, decode.d5.loss_mask: 0.2013, decode.d5.loss_dice: 0.5215, decode.d6.loss_cls: 0.0630, decode.d6.loss_mask: 0.2014, decode.d6.loss_dice: 0.5222, decode.d7.loss_cls: 0.0646, decode.d7.loss_mask: 0.2013, decode.d7.loss_dice: 0.5213, decode.d8.loss_cls: 0.0661, decode.d8.loss_mask: 0.2013, decode.d8.loss_dice: 0.5263, loss: 8.1893 +2022-05-10 21:15:28,102 - mmseg - INFO - Iter [49800/80000] lr: 5.420e-07, eta: 1 day, 13:39:30, time: 1.817, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0632, decode.loss_mask: 0.1993, decode.loss_dice: 0.5424, decode.d0.loss_cls: 0.3088, decode.d0.loss_mask: 0.2089, decode.d0.loss_dice: 0.5701, decode.d1.loss_cls: 0.0784, decode.d1.loss_mask: 0.2015, decode.d1.loss_dice: 0.5491, decode.d2.loss_cls: 0.0716, decode.d2.loss_mask: 0.2009, decode.d2.loss_dice: 0.5434, decode.d3.loss_cls: 0.0674, decode.d3.loss_mask: 0.2003, decode.d3.loss_dice: 0.5428, decode.d4.loss_cls: 0.0668, decode.d4.loss_mask: 0.1998, decode.d4.loss_dice: 0.5452, decode.d5.loss_cls: 0.0650, decode.d5.loss_mask: 0.2005, decode.d5.loss_dice: 0.5444, decode.d6.loss_cls: 0.0640, decode.d6.loss_mask: 0.1999, decode.d6.loss_dice: 0.5374, decode.d7.loss_cls: 0.0609, decode.d7.loss_mask: 0.1998, decode.d7.loss_dice: 0.5404, decode.d8.loss_cls: 0.0617, decode.d8.loss_mask: 0.1994, decode.d8.loss_dice: 0.5401, loss: 8.3735 +2022-05-10 21:16:58,735 - mmseg - INFO - Iter [49850/80000] lr: 5.411e-07, eta: 1 day, 12:59:27, time: 1.814, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0640, decode.loss_mask: 0.1947, decode.loss_dice: 0.5290, decode.d0.loss_cls: 0.3014, decode.d0.loss_mask: 0.2010, decode.d0.loss_dice: 0.5551, decode.d1.loss_cls: 0.0833, decode.d1.loss_mask: 0.1952, decode.d1.loss_dice: 0.5399, decode.d2.loss_cls: 0.0739, decode.d2.loss_mask: 0.1942, decode.d2.loss_dice: 0.5328, decode.d3.loss_cls: 0.0599, decode.d3.loss_mask: 0.1946, decode.d3.loss_dice: 0.5291, decode.d4.loss_cls: 0.0577, decode.d4.loss_mask: 0.1941, decode.d4.loss_dice: 0.5332, decode.d5.loss_cls: 0.0622, decode.d5.loss_mask: 0.1941, decode.d5.loss_dice: 0.5295, decode.d6.loss_cls: 0.0615, decode.d6.loss_mask: 0.1944, decode.d6.loss_dice: 0.5301, decode.d7.loss_cls: 0.0646, decode.d7.loss_mask: 0.1936, decode.d7.loss_dice: 0.5302, decode.d8.loss_cls: 0.0596, decode.d8.loss_mask: 0.1941, decode.d8.loss_dice: 0.5281, loss: 8.1750 +2022-05-10 21:18:30,840 - mmseg - INFO - Iter [49900/80000] lr: 5.402e-07, eta: 1 day, 12:21:48, time: 1.842, data_time: 0.064, memory: 69063, decode.loss_cls: 0.0589, decode.loss_mask: 0.1881, decode.loss_dice: 0.5408, decode.d0.loss_cls: 0.2958, decode.d0.loss_mask: 0.1966, decode.d0.loss_dice: 0.5673, decode.d1.loss_cls: 0.0778, decode.d1.loss_mask: 0.1900, decode.d1.loss_dice: 0.5498, decode.d2.loss_cls: 0.0749, decode.d2.loss_mask: 0.1892, decode.d2.loss_dice: 0.5470, decode.d3.loss_cls: 0.0656, decode.d3.loss_mask: 0.1886, decode.d3.loss_dice: 0.5443, decode.d4.loss_cls: 0.0617, decode.d4.loss_mask: 0.1880, decode.d4.loss_dice: 0.5459, decode.d5.loss_cls: 0.0649, decode.d5.loss_mask: 0.1884, decode.d5.loss_dice: 0.5373, decode.d6.loss_cls: 0.0596, decode.d6.loss_mask: 0.1884, decode.d6.loss_dice: 0.5392, decode.d7.loss_cls: 0.0592, decode.d7.loss_mask: 0.1876, decode.d7.loss_dice: 0.5367, decode.d8.loss_cls: 0.0661, decode.d8.loss_mask: 0.1881, decode.d8.loss_dice: 0.5379, loss: 8.2236 +2022-05-10 21:20:01,705 - mmseg - INFO - Iter [49950/80000] lr: 5.393e-07, eta: 1 day, 11:45:40, time: 1.817, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0540, decode.loss_mask: 0.1905, decode.loss_dice: 0.5317, decode.d0.loss_cls: 0.2987, decode.d0.loss_mask: 0.1968, decode.d0.loss_dice: 0.5580, decode.d1.loss_cls: 0.0764, decode.d1.loss_mask: 0.1918, decode.d1.loss_dice: 0.5443, decode.d2.loss_cls: 0.0668, decode.d2.loss_mask: 0.1918, decode.d2.loss_dice: 0.5346, decode.d3.loss_cls: 0.0590, decode.d3.loss_mask: 0.1910, decode.d3.loss_dice: 0.5325, decode.d4.loss_cls: 0.0602, decode.d4.loss_mask: 0.1908, decode.d4.loss_dice: 0.5346, decode.d5.loss_cls: 0.0593, decode.d5.loss_mask: 0.1913, decode.d5.loss_dice: 0.5342, decode.d6.loss_cls: 0.0586, decode.d6.loss_mask: 0.1913, decode.d6.loss_dice: 0.5327, decode.d7.loss_cls: 0.0639, decode.d7.loss_mask: 0.1910, decode.d7.loss_dice: 0.5329, decode.d8.loss_cls: 0.0606, decode.d8.loss_mask: 0.1909, decode.d8.loss_dice: 0.5315, loss: 8.1416 +2022-05-10 21:21:31,506 - mmseg - INFO - Saving checkpoint at 50000 iterations +2022-05-10 21:22:05,930 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 21:22:05,934 - mmseg - INFO - Iter [50000/80000] lr: 5.384e-07, eta: 1 day, 11:19:35, time: 2.482, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0598, decode.loss_mask: 0.1949, decode.loss_dice: 0.5337, decode.d0.loss_cls: 0.3052, decode.d0.loss_mask: 0.2045, decode.d0.loss_dice: 0.5555, decode.d1.loss_cls: 0.0713, decode.d1.loss_mask: 0.1970, decode.d1.loss_dice: 0.5416, decode.d2.loss_cls: 0.0630, decode.d2.loss_mask: 0.1963, decode.d2.loss_dice: 0.5387, decode.d3.loss_cls: 0.0567, decode.d3.loss_mask: 0.1957, decode.d3.loss_dice: 0.5377, decode.d4.loss_cls: 0.0560, decode.d4.loss_mask: 0.1955, decode.d4.loss_dice: 0.5349, decode.d5.loss_cls: 0.0595, decode.d5.loss_mask: 0.1952, decode.d5.loss_dice: 0.5331, decode.d6.loss_cls: 0.0582, decode.d6.loss_mask: 0.1956, decode.d6.loss_dice: 0.5330, decode.d7.loss_cls: 0.0547, decode.d7.loss_mask: 0.1949, decode.d7.loss_dice: 0.5320, decode.d8.loss_cls: 0.0564, decode.d8.loss_mask: 0.1951, decode.d8.loss_dice: 0.5331, loss: 8.1786 +2022-05-10 21:24:02,066 - mmseg - INFO - per class results: +2022-05-10 21:24:02,070 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.49 | 99.26 | +| sidewalk | 87.79 | 93.03 | +| building | 94.21 | 96.96 | +| wall | 70.66 | 81.01 | +| fence | 74.33 | 81.93 | +| pole | 71.34 | 83.04 | +| traffic light | 77.13 | 87.97 | +| traffic sign | 83.91 | 90.81 | +| vegetation | 93.38 | 97.04 | +| terrain | 68.93 | 76.79 | +| sky | 95.68 | 98.55 | +| person | 86.8 | 93.85 | +| rider | 74.42 | 85.38 | +| car | 96.16 | 98.27 | +| truck | 92.6 | 95.13 | +| bus | 93.45 | 96.52 | +| train | 87.82 | 90.32 | +| motorcycle | 78.68 | 87.54 | +| bicycle | 82.81 | 91.31 | ++---------------+-------+-------+ +2022-05-10 21:24:02,070 - mmseg - INFO - Summary: +2022-05-10 21:24:02,071 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 96.97 | 84.66 | 90.77 | ++-------+-------+-------+ +2022-05-10 21:24:02,072 - mmseg - INFO - The previous best checkpoint /mnt/lustre/chenzhe.vendor/workspace/ViT-Adapter/segmentation/work_dirs/mask2former_beit_adapter_large_896_80k_cityscapes_ss/best_mIoU_iter_49000.pth was removed +2022-05-10 21:24:33,008 - mmseg - INFO - Now best checkpoint is saved as best_mIoU_iter_50000.pth. +2022-05-10 21:24:33,020 - mmseg - INFO - Best mIoU is 0.8466 at 50000 iter. +2022-05-10 21:24:33,030 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 21:24:33,030 - mmseg - INFO - Iter(val) [32] aAcc: 0.9697, mIoU: 0.8466, mAcc: 0.9077, IoU.road: 0.9849, IoU.sidewalk: 0.8779, IoU.building: 0.9421, IoU.wall: 0.7066, IoU.fence: 0.7433, IoU.pole: 0.7134, IoU.traffic light: 0.7713, IoU.traffic sign: 0.8391, IoU.vegetation: 0.9338, IoU.terrain: 0.6893, IoU.sky: 0.9568, IoU.person: 0.8680, IoU.rider: 0.7442, IoU.car: 0.9616, IoU.truck: 0.9260, IoU.bus: 0.9345, IoU.train: 0.8782, IoU.motorcycle: 0.7868, IoU.bicycle: 0.8281, Acc.road: 0.9926, Acc.sidewalk: 0.9303, Acc.building: 0.9696, Acc.wall: 0.8101, Acc.fence: 0.8193, Acc.pole: 0.8304, Acc.traffic light: 0.8797, Acc.traffic sign: 0.9081, Acc.vegetation: 0.9704, Acc.terrain: 0.7679, Acc.sky: 0.9855, Acc.person: 0.9385, Acc.rider: 0.8538, Acc.car: 0.9827, Acc.truck: 0.9513, Acc.bus: 0.9652, Acc.train: 0.9032, Acc.motorcycle: 0.8754, Acc.bicycle: 0.9131 +2022-05-10 21:26:07,172 - mmseg - INFO - Iter [50050/80000] lr: 5.375e-07, eta: 1 day, 11:23:13, time: 4.827, data_time: 3.012, memory: 69063, decode.loss_cls: 0.0517, decode.loss_mask: 0.2014, decode.loss_dice: 0.5363, decode.d0.loss_cls: 0.2878, decode.d0.loss_mask: 0.2100, decode.d0.loss_dice: 0.5589, decode.d1.loss_cls: 0.0695, decode.d1.loss_mask: 0.2022, decode.d1.loss_dice: 0.5433, decode.d2.loss_cls: 0.0692, decode.d2.loss_mask: 0.2016, decode.d2.loss_dice: 0.5362, decode.d3.loss_cls: 0.0589, decode.d3.loss_mask: 0.2014, decode.d3.loss_dice: 0.5346, decode.d4.loss_cls: 0.0598, decode.d4.loss_mask: 0.2014, decode.d4.loss_dice: 0.5329, decode.d5.loss_cls: 0.0644, decode.d5.loss_mask: 0.2018, decode.d5.loss_dice: 0.5343, decode.d6.loss_cls: 0.0564, decode.d6.loss_mask: 0.2015, decode.d6.loss_dice: 0.5323, decode.d7.loss_cls: 0.0537, decode.d7.loss_mask: 0.2013, decode.d7.loss_dice: 0.5365, decode.d8.loss_cls: 0.0578, decode.d8.loss_mask: 0.2011, decode.d8.loss_dice: 0.5333, loss: 8.2315 +2022-05-10 21:27:37,558 - mmseg - INFO - Iter [50100/80000] lr: 5.366e-07, eta: 1 day, 10:50:40, time: 1.808, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0550, decode.loss_mask: 0.1886, decode.loss_dice: 0.5300, decode.d0.loss_cls: 0.3051, decode.d0.loss_mask: 0.1955, decode.d0.loss_dice: 0.5583, decode.d1.loss_cls: 0.0747, decode.d1.loss_mask: 0.1899, decode.d1.loss_dice: 0.5444, decode.d2.loss_cls: 0.0674, decode.d2.loss_mask: 0.1880, decode.d2.loss_dice: 0.5336, decode.d3.loss_cls: 0.0593, decode.d3.loss_mask: 0.1876, decode.d3.loss_dice: 0.5304, decode.d4.loss_cls: 0.0605, decode.d4.loss_mask: 0.1882, decode.d4.loss_dice: 0.5299, decode.d5.loss_cls: 0.0634, decode.d5.loss_mask: 0.1884, decode.d5.loss_dice: 0.5315, decode.d6.loss_cls: 0.0595, decode.d6.loss_mask: 0.1881, decode.d6.loss_dice: 0.5274, decode.d7.loss_cls: 0.0531, decode.d7.loss_mask: 0.1885, decode.d7.loss_dice: 0.5288, decode.d8.loss_cls: 0.0583, decode.d8.loss_mask: 0.1888, decode.d8.loss_dice: 0.5338, loss: 8.0962 +2022-05-10 21:29:08,977 - mmseg - INFO - Iter [50150/80000] lr: 5.357e-07, eta: 1 day, 10:19:46, time: 1.826, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0581, decode.loss_mask: 0.1920, decode.loss_dice: 0.5237, decode.d0.loss_cls: 0.3078, decode.d0.loss_mask: 0.1997, decode.d0.loss_dice: 0.5515, decode.d1.loss_cls: 0.0852, decode.d1.loss_mask: 0.1933, decode.d1.loss_dice: 0.5338, decode.d2.loss_cls: 0.0688, decode.d2.loss_mask: 0.1923, decode.d2.loss_dice: 0.5295, decode.d3.loss_cls: 0.0629, decode.d3.loss_mask: 0.1920, decode.d3.loss_dice: 0.5264, decode.d4.loss_cls: 0.0654, decode.d4.loss_mask: 0.1920, decode.d4.loss_dice: 0.5258, decode.d5.loss_cls: 0.0604, decode.d5.loss_mask: 0.1923, decode.d5.loss_dice: 0.5259, decode.d6.loss_cls: 0.0633, decode.d6.loss_mask: 0.1918, decode.d6.loss_dice: 0.5244, decode.d7.loss_cls: 0.0590, decode.d7.loss_mask: 0.1918, decode.d7.loss_dice: 0.5247, decode.d8.loss_cls: 0.0585, decode.d8.loss_mask: 0.1921, decode.d8.loss_dice: 0.5257, loss: 8.1101 +2022-05-10 21:30:40,665 - mmseg - INFO - Iter [50200/80000] lr: 5.349e-07, eta: 1 day, 9:50:19, time: 1.835, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0625, decode.loss_mask: 0.1951, decode.loss_dice: 0.5361, decode.d0.loss_cls: 0.2966, decode.d0.loss_mask: 0.2032, decode.d0.loss_dice: 0.5614, decode.d1.loss_cls: 0.0742, decode.d1.loss_mask: 0.1967, decode.d1.loss_dice: 0.5390, decode.d2.loss_cls: 0.0723, decode.d2.loss_mask: 0.1960, decode.d2.loss_dice: 0.5416, decode.d3.loss_cls: 0.0646, decode.d3.loss_mask: 0.1949, decode.d3.loss_dice: 0.5377, decode.d4.loss_cls: 0.0620, decode.d4.loss_mask: 0.1955, decode.d4.loss_dice: 0.5401, decode.d5.loss_cls: 0.0694, decode.d5.loss_mask: 0.1955, decode.d5.loss_dice: 0.5398, decode.d6.loss_cls: 0.0629, decode.d6.loss_mask: 0.1954, decode.d6.loss_dice: 0.5297, decode.d7.loss_cls: 0.0625, decode.d7.loss_mask: 0.1953, decode.d7.loss_dice: 0.5349, decode.d8.loss_cls: 0.0597, decode.d8.loss_mask: 0.1951, decode.d8.loss_dice: 0.5357, loss: 8.2453 +2022-05-10 21:32:13,812 - mmseg - INFO - Iter [50250/80000] lr: 5.340e-07, eta: 1 day, 9:22:24, time: 1.863, data_time: 0.064, memory: 69063, decode.loss_cls: 0.0549, decode.loss_mask: 0.1965, decode.loss_dice: 0.5304, decode.d0.loss_cls: 0.3062, decode.d0.loss_mask: 0.2036, decode.d0.loss_dice: 0.5531, decode.d1.loss_cls: 0.0757, decode.d1.loss_mask: 0.1976, decode.d1.loss_dice: 0.5385, decode.d2.loss_cls: 0.0688, decode.d2.loss_mask: 0.1972, decode.d2.loss_dice: 0.5319, decode.d3.loss_cls: 0.0564, decode.d3.loss_mask: 0.1975, decode.d3.loss_dice: 0.5324, decode.d4.loss_cls: 0.0570, decode.d4.loss_mask: 0.1970, decode.d4.loss_dice: 0.5318, decode.d5.loss_cls: 0.0547, decode.d5.loss_mask: 0.1969, decode.d5.loss_dice: 0.5323, decode.d6.loss_cls: 0.0571, decode.d6.loss_mask: 0.1967, decode.d6.loss_dice: 0.5311, decode.d7.loss_cls: 0.0552, decode.d7.loss_mask: 0.1968, decode.d7.loss_dice: 0.5350, decode.d8.loss_cls: 0.0560, decode.d8.loss_mask: 0.1964, decode.d8.loss_dice: 0.5311, loss: 8.1658 +2022-05-10 21:33:43,451 - mmseg - INFO - Iter [50300/80000] lr: 5.331e-07, eta: 1 day, 8:54:53, time: 1.793, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0596, decode.loss_mask: 0.2014, decode.loss_dice: 0.5351, decode.d0.loss_cls: 0.3035, decode.d0.loss_mask: 0.2096, decode.d0.loss_dice: 0.5575, decode.d1.loss_cls: 0.0770, decode.d1.loss_mask: 0.2028, decode.d1.loss_dice: 0.5457, decode.d2.loss_cls: 0.0725, decode.d2.loss_mask: 0.2023, decode.d2.loss_dice: 0.5391, decode.d3.loss_cls: 0.0655, decode.d3.loss_mask: 0.2014, decode.d3.loss_dice: 0.5369, decode.d4.loss_cls: 0.0664, decode.d4.loss_mask: 0.2019, decode.d4.loss_dice: 0.5404, decode.d5.loss_cls: 0.0644, decode.d5.loss_mask: 0.2015, decode.d5.loss_dice: 0.5367, decode.d6.loss_cls: 0.0597, decode.d6.loss_mask: 0.2014, decode.d6.loss_dice: 0.5391, decode.d7.loss_cls: 0.0639, decode.d7.loss_mask: 0.2013, decode.d7.loss_dice: 0.5375, decode.d8.loss_cls: 0.0635, decode.d8.loss_mask: 0.2010, decode.d8.loss_dice: 0.5382, loss: 8.3270 +2022-05-10 21:35:14,002 - mmseg - INFO - Iter [50350/80000] lr: 5.322e-07, eta: 1 day, 8:28:40, time: 1.811, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0613, decode.loss_mask: 0.1947, decode.loss_dice: 0.5365, decode.d0.loss_cls: 0.2993, decode.d0.loss_mask: 0.2034, decode.d0.loss_dice: 0.5610, decode.d1.loss_cls: 0.0821, decode.d1.loss_mask: 0.1960, decode.d1.loss_dice: 0.5458, decode.d2.loss_cls: 0.0766, decode.d2.loss_mask: 0.1953, decode.d2.loss_dice: 0.5344, decode.d3.loss_cls: 0.0635, decode.d3.loss_mask: 0.1948, decode.d3.loss_dice: 0.5312, decode.d4.loss_cls: 0.0659, decode.d4.loss_mask: 0.1957, decode.d4.loss_dice: 0.5357, decode.d5.loss_cls: 0.0699, decode.d5.loss_mask: 0.1953, decode.d5.loss_dice: 0.5325, decode.d6.loss_cls: 0.0659, decode.d6.loss_mask: 0.1944, decode.d6.loss_dice: 0.5325, decode.d7.loss_cls: 0.0627, decode.d7.loss_mask: 0.1942, decode.d7.loss_dice: 0.5377, decode.d8.loss_cls: 0.0565, decode.d8.loss_mask: 0.1946, decode.d8.loss_dice: 0.5342, loss: 8.2434 +2022-05-10 21:36:44,694 - mmseg - INFO - Iter [50400/80000] lr: 5.313e-07, eta: 1 day, 8:03:29, time: 1.813, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0716, decode.loss_mask: 0.1986, decode.loss_dice: 0.5325, decode.d0.loss_cls: 0.2957, decode.d0.loss_mask: 0.2070, decode.d0.loss_dice: 0.5604, decode.d1.loss_cls: 0.0886, decode.d1.loss_mask: 0.2001, decode.d1.loss_dice: 0.5428, decode.d2.loss_cls: 0.0799, decode.d2.loss_mask: 0.1995, decode.d2.loss_dice: 0.5358, decode.d3.loss_cls: 0.0711, decode.d3.loss_mask: 0.1991, decode.d3.loss_dice: 0.5275, decode.d4.loss_cls: 0.0733, decode.d4.loss_mask: 0.1995, decode.d4.loss_dice: 0.5339, decode.d5.loss_cls: 0.0700, decode.d5.loss_mask: 0.1997, decode.d5.loss_dice: 0.5337, decode.d6.loss_cls: 0.0740, decode.d6.loss_mask: 0.1986, decode.d6.loss_dice: 0.5314, decode.d7.loss_cls: 0.0677, decode.d7.loss_mask: 0.1995, decode.d7.loss_dice: 0.5345, decode.d8.loss_cls: 0.0709, decode.d8.loss_mask: 0.1991, decode.d8.loss_dice: 0.5333, loss: 8.3295 +2022-05-10 21:38:18,039 - mmseg - INFO - Iter [50450/80000] lr: 5.304e-07, eta: 1 day, 7:39:50, time: 1.868, data_time: 0.065, memory: 69063, decode.loss_cls: 0.0619, decode.loss_mask: 0.1856, decode.loss_dice: 0.5242, decode.d0.loss_cls: 0.2942, decode.d0.loss_mask: 0.1919, decode.d0.loss_dice: 0.5517, decode.d1.loss_cls: 0.0791, decode.d1.loss_mask: 0.1864, decode.d1.loss_dice: 0.5383, decode.d2.loss_cls: 0.0706, decode.d2.loss_mask: 0.1859, decode.d2.loss_dice: 0.5335, decode.d3.loss_cls: 0.0654, decode.d3.loss_mask: 0.1857, decode.d3.loss_dice: 0.5300, decode.d4.loss_cls: 0.0644, decode.d4.loss_mask: 0.1859, decode.d4.loss_dice: 0.5261, decode.d5.loss_cls: 0.0694, decode.d5.loss_mask: 0.1860, decode.d5.loss_dice: 0.5315, decode.d6.loss_cls: 0.0625, decode.d6.loss_mask: 0.1858, decode.d6.loss_dice: 0.5305, decode.d7.loss_cls: 0.0617, decode.d7.loss_mask: 0.1856, decode.d7.loss_dice: 0.5291, decode.d8.loss_cls: 0.0626, decode.d8.loss_mask: 0.1852, decode.d8.loss_dice: 0.5290, loss: 8.0798 +2022-05-10 21:39:49,034 - mmseg - INFO - Iter [50500/80000] lr: 5.295e-07, eta: 1 day, 7:16:35, time: 1.820, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0606, decode.loss_mask: 0.1943, decode.loss_dice: 0.5368, decode.d0.loss_cls: 0.3059, decode.d0.loss_mask: 0.2022, decode.d0.loss_dice: 0.5596, decode.d1.loss_cls: 0.0872, decode.d1.loss_mask: 0.1952, decode.d1.loss_dice: 0.5491, decode.d2.loss_cls: 0.0739, decode.d2.loss_mask: 0.1950, decode.d2.loss_dice: 0.5417, decode.d3.loss_cls: 0.0666, decode.d3.loss_mask: 0.1942, decode.d3.loss_dice: 0.5312, decode.d4.loss_cls: 0.0674, decode.d4.loss_mask: 0.1943, decode.d4.loss_dice: 0.5385, decode.d5.loss_cls: 0.0678, decode.d5.loss_mask: 0.1946, decode.d5.loss_dice: 0.5372, decode.d6.loss_cls: 0.0649, decode.d6.loss_mask: 0.1947, decode.d6.loss_dice: 0.5367, decode.d7.loss_cls: 0.0677, decode.d7.loss_mask: 0.1941, decode.d7.loss_dice: 0.5386, decode.d8.loss_cls: 0.0654, decode.d8.loss_mask: 0.1939, decode.d8.loss_dice: 0.5347, loss: 8.2840 +2022-05-10 21:41:18,611 - mmseg - INFO - Iter [50550/80000] lr: 5.286e-07, eta: 1 day, 6:53:55, time: 1.792, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0633, decode.loss_mask: 0.1908, decode.loss_dice: 0.5272, decode.d0.loss_cls: 0.3055, decode.d0.loss_mask: 0.1981, decode.d0.loss_dice: 0.5510, decode.d1.loss_cls: 0.0726, decode.d1.loss_mask: 0.1923, decode.d1.loss_dice: 0.5364, decode.d2.loss_cls: 0.0721, decode.d2.loss_mask: 0.1916, decode.d2.loss_dice: 0.5345, decode.d3.loss_cls: 0.0670, decode.d3.loss_mask: 0.1921, decode.d3.loss_dice: 0.5291, decode.d4.loss_cls: 0.0695, decode.d4.loss_mask: 0.1914, decode.d4.loss_dice: 0.5282, decode.d5.loss_cls: 0.0668, decode.d5.loss_mask: 0.1913, decode.d5.loss_dice: 0.5245, decode.d6.loss_cls: 0.0662, decode.d6.loss_mask: 0.1911, decode.d6.loss_dice: 0.5217, decode.d7.loss_cls: 0.0655, decode.d7.loss_mask: 0.1917, decode.d7.loss_dice: 0.5250, decode.d8.loss_cls: 0.0644, decode.d8.loss_mask: 0.1908, decode.d8.loss_dice: 0.5278, loss: 8.1395 +2022-05-10 21:42:50,063 - mmseg - INFO - Iter [50600/80000] lr: 5.277e-07, eta: 1 day, 6:32:25, time: 1.829, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0671, decode.loss_mask: 0.1943, decode.loss_dice: 0.5271, decode.d0.loss_cls: 0.3216, decode.d0.loss_mask: 0.2037, decode.d0.loss_dice: 0.5545, decode.d1.loss_cls: 0.0877, decode.d1.loss_mask: 0.1967, decode.d1.loss_dice: 0.5405, decode.d2.loss_cls: 0.0719, decode.d2.loss_mask: 0.1960, decode.d2.loss_dice: 0.5344, decode.d3.loss_cls: 0.0704, decode.d3.loss_mask: 0.1959, decode.d3.loss_dice: 0.5352, decode.d4.loss_cls: 0.0744, decode.d4.loss_mask: 0.1961, decode.d4.loss_dice: 0.5341, decode.d5.loss_cls: 0.0716, decode.d5.loss_mask: 0.1948, decode.d5.loss_dice: 0.5342, decode.d6.loss_cls: 0.0711, decode.d6.loss_mask: 0.1948, decode.d6.loss_dice: 0.5297, decode.d7.loss_cls: 0.0658, decode.d7.loss_mask: 0.1952, decode.d7.loss_dice: 0.5310, decode.d8.loss_cls: 0.0730, decode.d8.loss_mask: 0.1947, decode.d8.loss_dice: 0.5297, loss: 8.2872 +2022-05-10 21:44:23,086 - mmseg - INFO - Iter [50650/80000] lr: 5.268e-07, eta: 1 day, 6:11:58, time: 1.861, data_time: 0.064, memory: 69063, decode.loss_cls: 0.0576, decode.loss_mask: 0.1902, decode.loss_dice: 0.5293, decode.d0.loss_cls: 0.2982, decode.d0.loss_mask: 0.1969, decode.d0.loss_dice: 0.5579, decode.d1.loss_cls: 0.0808, decode.d1.loss_mask: 0.1919, decode.d1.loss_dice: 0.5412, decode.d2.loss_cls: 0.0605, decode.d2.loss_mask: 0.1909, decode.d2.loss_dice: 0.5383, decode.d3.loss_cls: 0.0577, decode.d3.loss_mask: 0.1906, decode.d3.loss_dice: 0.5343, decode.d4.loss_cls: 0.0589, decode.d4.loss_mask: 0.1902, decode.d4.loss_dice: 0.5349, decode.d5.loss_cls: 0.0589, decode.d5.loss_mask: 0.1907, decode.d5.loss_dice: 0.5370, decode.d6.loss_cls: 0.0641, decode.d6.loss_mask: 0.1907, decode.d6.loss_dice: 0.5337, decode.d7.loss_cls: 0.0607, decode.d7.loss_mask: 0.1901, decode.d7.loss_dice: 0.5349, decode.d8.loss_cls: 0.0543, decode.d8.loss_mask: 0.1898, decode.d8.loss_dice: 0.5300, loss: 8.1353 +2022-05-10 21:45:53,320 - mmseg - INFO - Iter [50700/80000] lr: 5.259e-07, eta: 1 day, 5:51:43, time: 1.805, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0663, decode.loss_mask: 0.1976, decode.loss_dice: 0.5393, decode.d0.loss_cls: 0.3131, decode.d0.loss_mask: 0.2050, decode.d0.loss_dice: 0.5656, decode.d1.loss_cls: 0.0920, decode.d1.loss_mask: 0.1986, decode.d1.loss_dice: 0.5474, decode.d2.loss_cls: 0.0778, decode.d2.loss_mask: 0.1975, decode.d2.loss_dice: 0.5419, decode.d3.loss_cls: 0.0744, decode.d3.loss_mask: 0.1967, decode.d3.loss_dice: 0.5461, decode.d4.loss_cls: 0.0774, decode.d4.loss_mask: 0.1968, decode.d4.loss_dice: 0.5419, decode.d5.loss_cls: 0.0764, decode.d5.loss_mask: 0.1973, decode.d5.loss_dice: 0.5419, decode.d6.loss_cls: 0.0689, decode.d6.loss_mask: 0.1972, decode.d6.loss_dice: 0.5410, decode.d7.loss_cls: 0.0680, decode.d7.loss_mask: 0.1971, decode.d7.loss_dice: 0.5420, decode.d8.loss_cls: 0.0669, decode.d8.loss_mask: 0.1976, decode.d8.loss_dice: 0.5403, loss: 8.4101 +2022-05-10 21:47:23,577 - mmseg - INFO - Iter [50750/80000] lr: 5.250e-07, eta: 1 day, 5:32:08, time: 1.805, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0630, decode.loss_mask: 0.1976, decode.loss_dice: 0.5170, decode.d0.loss_cls: 0.2965, decode.d0.loss_mask: 0.2051, decode.d0.loss_dice: 0.5432, decode.d1.loss_cls: 0.0776, decode.d1.loss_mask: 0.1988, decode.d1.loss_dice: 0.5279, decode.d2.loss_cls: 0.0668, decode.d2.loss_mask: 0.1982, decode.d2.loss_dice: 0.5239, decode.d3.loss_cls: 0.0641, decode.d3.loss_mask: 0.1972, decode.d3.loss_dice: 0.5175, decode.d4.loss_cls: 0.0599, decode.d4.loss_mask: 0.1979, decode.d4.loss_dice: 0.5157, decode.d5.loss_cls: 0.0576, decode.d5.loss_mask: 0.1984, decode.d5.loss_dice: 0.5190, decode.d6.loss_cls: 0.0588, decode.d6.loss_mask: 0.1974, decode.d6.loss_dice: 0.5165, decode.d7.loss_cls: 0.0585, decode.d7.loss_mask: 0.1975, decode.d7.loss_dice: 0.5177, decode.d8.loss_cls: 0.0565, decode.d8.loss_mask: 0.1973, decode.d8.loss_dice: 0.5141, loss: 8.0572 +2022-05-10 21:48:57,330 - mmseg - INFO - Iter [50800/80000] lr: 5.241e-07, eta: 1 day, 5:13:49, time: 1.875, data_time: 0.063, memory: 69063, decode.loss_cls: 0.0551, decode.loss_mask: 0.1970, decode.loss_dice: 0.5218, decode.d0.loss_cls: 0.2933, decode.d0.loss_mask: 0.2034, decode.d0.loss_dice: 0.5486, decode.d1.loss_cls: 0.0675, decode.d1.loss_mask: 0.1975, decode.d1.loss_dice: 0.5312, decode.d2.loss_cls: 0.0648, decode.d2.loss_mask: 0.1972, decode.d2.loss_dice: 0.5259, decode.d3.loss_cls: 0.0552, decode.d3.loss_mask: 0.1970, decode.d3.loss_dice: 0.5234, decode.d4.loss_cls: 0.0597, decode.d4.loss_mask: 0.1964, decode.d4.loss_dice: 0.5248, decode.d5.loss_cls: 0.0537, decode.d5.loss_mask: 0.1968, decode.d5.loss_dice: 0.5242, decode.d6.loss_cls: 0.0525, decode.d6.loss_mask: 0.1967, decode.d6.loss_dice: 0.5251, decode.d7.loss_cls: 0.0505, decode.d7.loss_mask: 0.1965, decode.d7.loss_dice: 0.5230, decode.d8.loss_cls: 0.0481, decode.d8.loss_mask: 0.1968, decode.d8.loss_dice: 0.5223, loss: 8.0461 +2022-05-10 21:50:27,158 - mmseg - INFO - Iter [50850/80000] lr: 5.232e-07, eta: 1 day, 4:55:25, time: 1.797, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0659, decode.loss_mask: 0.1945, decode.loss_dice: 0.5299, decode.d0.loss_cls: 0.3054, decode.d0.loss_mask: 0.2020, decode.d0.loss_dice: 0.5581, decode.d1.loss_cls: 0.0785, decode.d1.loss_mask: 0.1971, decode.d1.loss_dice: 0.5401, decode.d2.loss_cls: 0.0700, decode.d2.loss_mask: 0.1963, decode.d2.loss_dice: 0.5321, decode.d3.loss_cls: 0.0656, decode.d3.loss_mask: 0.1956, decode.d3.loss_dice: 0.5271, decode.d4.loss_cls: 0.0635, decode.d4.loss_mask: 0.1955, decode.d4.loss_dice: 0.5287, decode.d5.loss_cls: 0.0635, decode.d5.loss_mask: 0.1954, decode.d5.loss_dice: 0.5323, decode.d6.loss_cls: 0.0652, decode.d6.loss_mask: 0.1954, decode.d6.loss_dice: 0.5267, decode.d7.loss_cls: 0.0603, decode.d7.loss_mask: 0.1955, decode.d7.loss_dice: 0.5308, decode.d8.loss_cls: 0.0662, decode.d8.loss_mask: 0.1948, decode.d8.loss_dice: 0.5289, loss: 8.2010 +2022-05-10 21:51:56,193 - mmseg - INFO - Iter [50900/80000] lr: 5.223e-07, eta: 1 day, 4:37:27, time: 1.778, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0680, decode.loss_mask: 0.1958, decode.loss_dice: 0.5374, decode.d0.loss_cls: 0.3074, decode.d0.loss_mask: 0.2039, decode.d0.loss_dice: 0.5654, decode.d1.loss_cls: 0.0888, decode.d1.loss_mask: 0.1978, decode.d1.loss_dice: 0.5486, decode.d2.loss_cls: 0.0797, decode.d2.loss_mask: 0.1961, decode.d2.loss_dice: 0.5425, decode.d3.loss_cls: 0.0766, decode.d3.loss_mask: 0.1971, decode.d3.loss_dice: 0.5415, decode.d4.loss_cls: 0.0705, decode.d4.loss_mask: 0.1957, decode.d4.loss_dice: 0.5392, decode.d5.loss_cls: 0.0698, decode.d5.loss_mask: 0.1957, decode.d5.loss_dice: 0.5411, decode.d6.loss_cls: 0.0656, decode.d6.loss_mask: 0.1958, decode.d6.loss_dice: 0.5347, decode.d7.loss_cls: 0.0724, decode.d7.loss_mask: 0.1956, decode.d7.loss_dice: 0.5384, decode.d8.loss_cls: 0.0731, decode.d8.loss_mask: 0.1958, decode.d8.loss_dice: 0.5345, loss: 8.3646 +2022-05-10 21:53:27,899 - mmseg - INFO - Iter [50950/80000] lr: 5.214e-07, eta: 1 day, 4:20:31, time: 1.836, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0568, decode.loss_mask: 0.1964, decode.loss_dice: 0.5331, decode.d0.loss_cls: 0.2954, decode.d0.loss_mask: 0.2037, decode.d0.loss_dice: 0.5564, decode.d1.loss_cls: 0.0706, decode.d1.loss_mask: 0.1986, decode.d1.loss_dice: 0.5392, decode.d2.loss_cls: 0.0664, decode.d2.loss_mask: 0.1971, decode.d2.loss_dice: 0.5342, decode.d3.loss_cls: 0.0628, decode.d3.loss_mask: 0.1969, decode.d3.loss_dice: 0.5334, decode.d4.loss_cls: 0.0642, decode.d4.loss_mask: 0.1967, decode.d4.loss_dice: 0.5319, decode.d5.loss_cls: 0.0602, decode.d5.loss_mask: 0.1967, decode.d5.loss_dice: 0.5324, decode.d6.loss_cls: 0.0601, decode.d6.loss_mask: 0.1971, decode.d6.loss_dice: 0.5325, decode.d7.loss_cls: 0.0591, decode.d7.loss_mask: 0.1972, decode.d7.loss_dice: 0.5315, decode.d8.loss_cls: 0.0607, decode.d8.loss_mask: 0.1972, decode.d8.loss_dice: 0.5345, loss: 8.1933 +2022-05-10 21:55:01,553 - mmseg - INFO - Saving checkpoint at 51000 iterations +2022-05-10 21:55:35,046 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 21:55:35,055 - mmseg - INFO - Iter [51000/80000] lr: 5.205e-07, eta: 1 day, 4:09:46, time: 2.541, data_time: 0.063, memory: 69063, decode.loss_cls: 0.0596, decode.loss_mask: 0.1922, decode.loss_dice: 0.5332, decode.d0.loss_cls: 0.2989, decode.d0.loss_mask: 0.1994, decode.d0.loss_dice: 0.5578, decode.d1.loss_cls: 0.0734, decode.d1.loss_mask: 0.1946, decode.d1.loss_dice: 0.5378, decode.d2.loss_cls: 0.0650, decode.d2.loss_mask: 0.1938, decode.d2.loss_dice: 0.5363, decode.d3.loss_cls: 0.0590, decode.d3.loss_mask: 0.1938, decode.d3.loss_dice: 0.5359, decode.d4.loss_cls: 0.0605, decode.d4.loss_mask: 0.1932, decode.d4.loss_dice: 0.5329, decode.d5.loss_cls: 0.0598, decode.d5.loss_mask: 0.1931, decode.d5.loss_dice: 0.5334, decode.d6.loss_cls: 0.0611, decode.d6.loss_mask: 0.1929, decode.d6.loss_dice: 0.5338, decode.d7.loss_cls: 0.0559, decode.d7.loss_mask: 0.1928, decode.d7.loss_dice: 0.5301, decode.d8.loss_cls: 0.0623, decode.d8.loss_mask: 0.1927, decode.d8.loss_dice: 0.5309, loss: 8.1559 +2022-05-10 21:57:32,335 - mmseg - INFO - per class results: +2022-05-10 21:57:32,339 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.67 | 99.29 | +| sidewalk | 88.89 | 94.21 | +| building | 94.33 | 97.08 | +| wall | 69.6 | 80.38 | +| fence | 74.42 | 81.7 | +| pole | 71.52 | 84.6 | +| traffic light | 77.27 | 87.93 | +| traffic sign | 84.19 | 90.67 | +| vegetation | 93.4 | 97.0 | +| terrain | 67.81 | 74.17 | +| sky | 95.81 | 98.45 | +| person | 86.81 | 93.58 | +| rider | 74.01 | 85.46 | +| car | 96.21 | 98.25 | +| truck | 91.97 | 94.4 | +| bus | 93.79 | 96.56 | +| train | 88.01 | 90.79 | +| motorcycle | 77.55 | 88.69 | +| bicycle | 82.61 | 91.0 | ++---------------+-------+-------+ +2022-05-10 21:57:32,339 - mmseg - INFO - Summary: +2022-05-10 21:57:32,339 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 97.04 | 84.57 | 90.75 | ++-------+-------+-------+ +2022-05-10 21:57:32,343 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 21:57:32,343 - mmseg - INFO - Iter(val) [32] aAcc: 0.9704, mIoU: 0.8457, mAcc: 0.9075, IoU.road: 0.9867, IoU.sidewalk: 0.8889, IoU.building: 0.9433, IoU.wall: 0.6960, IoU.fence: 0.7442, IoU.pole: 0.7152, IoU.traffic light: 0.7727, IoU.traffic sign: 0.8419, IoU.vegetation: 0.9340, IoU.terrain: 0.6781, IoU.sky: 0.9581, IoU.person: 0.8681, IoU.rider: 0.7401, IoU.car: 0.9621, IoU.truck: 0.9197, IoU.bus: 0.9379, IoU.train: 0.8801, IoU.motorcycle: 0.7755, IoU.bicycle: 0.8261, Acc.road: 0.9929, Acc.sidewalk: 0.9421, Acc.building: 0.9708, Acc.wall: 0.8038, Acc.fence: 0.8170, Acc.pole: 0.8460, Acc.traffic light: 0.8793, Acc.traffic sign: 0.9067, Acc.vegetation: 0.9700, Acc.terrain: 0.7417, Acc.sky: 0.9845, Acc.person: 0.9358, Acc.rider: 0.8546, Acc.car: 0.9825, Acc.truck: 0.9440, Acc.bus: 0.9656, Acc.train: 0.9079, Acc.motorcycle: 0.8869, Acc.bicycle: 0.9100 +2022-05-10 21:59:03,604 - mmseg - INFO - Iter [51050/80000] lr: 5.196e-07, eta: 1 day, 4:12:12, time: 4.173, data_time: 2.364, memory: 69063, decode.loss_cls: 0.0631, decode.loss_mask: 0.1910, decode.loss_dice: 0.5343, decode.d0.loss_cls: 0.2989, decode.d0.loss_mask: 0.1978, decode.d0.loss_dice: 0.5595, decode.d1.loss_cls: 0.0842, decode.d1.loss_mask: 0.1932, decode.d1.loss_dice: 0.5444, decode.d2.loss_cls: 0.0708, decode.d2.loss_mask: 0.1912, decode.d2.loss_dice: 0.5383, decode.d3.loss_cls: 0.0677, decode.d3.loss_mask: 0.1911, decode.d3.loss_dice: 0.5350, decode.d4.loss_cls: 0.0727, decode.d4.loss_mask: 0.1913, decode.d4.loss_dice: 0.5360, decode.d5.loss_cls: 0.0606, decode.d5.loss_mask: 0.1916, decode.d5.loss_dice: 0.5353, decode.d6.loss_cls: 0.0609, decode.d6.loss_mask: 0.1909, decode.d6.loss_dice: 0.5340, decode.d7.loss_cls: 0.0631, decode.d7.loss_mask: 0.1907, decode.d7.loss_dice: 0.5357, decode.d8.loss_cls: 0.0654, decode.d8.loss_mask: 0.1907, decode.d8.loss_dice: 0.5348, loss: 8.2141 +2022-05-10 22:00:33,511 - mmseg - INFO - Iter [51100/80000] lr: 5.187e-07, eta: 1 day, 3:56:00, time: 1.798, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0564, decode.loss_mask: 0.1939, decode.loss_dice: 0.5285, decode.d0.loss_cls: 0.2959, decode.d0.loss_mask: 0.2013, decode.d0.loss_dice: 0.5504, decode.d1.loss_cls: 0.0784, decode.d1.loss_mask: 0.1958, decode.d1.loss_dice: 0.5342, decode.d2.loss_cls: 0.0668, decode.d2.loss_mask: 0.1948, decode.d2.loss_dice: 0.5300, decode.d3.loss_cls: 0.0627, decode.d3.loss_mask: 0.1947, decode.d3.loss_dice: 0.5286, decode.d4.loss_cls: 0.0608, decode.d4.loss_mask: 0.1950, decode.d4.loss_dice: 0.5264, decode.d5.loss_cls: 0.0638, decode.d5.loss_mask: 0.1941, decode.d5.loss_dice: 0.5283, decode.d6.loss_cls: 0.0557, decode.d6.loss_mask: 0.1939, decode.d6.loss_dice: 0.5262, decode.d7.loss_cls: 0.0601, decode.d7.loss_mask: 0.1941, decode.d7.loss_dice: 0.5271, decode.d8.loss_cls: 0.0551, decode.d8.loss_mask: 0.1940, decode.d8.loss_dice: 0.5303, loss: 8.1170 +2022-05-10 22:02:03,136 - mmseg - INFO - Iter [51150/80000] lr: 5.178e-07, eta: 1 day, 3:40:14, time: 1.793, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0564, decode.loss_mask: 0.1926, decode.loss_dice: 0.5343, decode.d0.loss_cls: 0.3021, decode.d0.loss_mask: 0.2001, decode.d0.loss_dice: 0.5628, decode.d1.loss_cls: 0.0801, decode.d1.loss_mask: 0.1949, decode.d1.loss_dice: 0.5426, decode.d2.loss_cls: 0.0693, decode.d2.loss_mask: 0.1939, decode.d2.loss_dice: 0.5389, decode.d3.loss_cls: 0.0624, decode.d3.loss_mask: 0.1930, decode.d3.loss_dice: 0.5365, decode.d4.loss_cls: 0.0665, decode.d4.loss_mask: 0.1929, decode.d4.loss_dice: 0.5339, decode.d5.loss_cls: 0.0628, decode.d5.loss_mask: 0.1931, decode.d5.loss_dice: 0.5355, decode.d6.loss_cls: 0.0573, decode.d6.loss_mask: 0.1929, decode.d6.loss_dice: 0.5361, decode.d7.loss_cls: 0.0597, decode.d7.loss_mask: 0.1930, decode.d7.loss_dice: 0.5332, decode.d8.loss_cls: 0.0578, decode.d8.loss_mask: 0.1924, decode.d8.loss_dice: 0.5312, loss: 8.1981 +2022-05-10 22:03:35,736 - mmseg - INFO - Iter [51200/80000] lr: 5.169e-07, eta: 1 day, 3:25:21, time: 1.852, data_time: 0.063, memory: 69063, decode.loss_cls: 0.0586, decode.loss_mask: 0.1912, decode.loss_dice: 0.5368, decode.d0.loss_cls: 0.2982, decode.d0.loss_mask: 0.1994, decode.d0.loss_dice: 0.5563, decode.d1.loss_cls: 0.0727, decode.d1.loss_mask: 0.1937, decode.d1.loss_dice: 0.5418, decode.d2.loss_cls: 0.0630, decode.d2.loss_mask: 0.1923, decode.d2.loss_dice: 0.5360, decode.d3.loss_cls: 0.0630, decode.d3.loss_mask: 0.1915, decode.d3.loss_dice: 0.5345, decode.d4.loss_cls: 0.0616, decode.d4.loss_mask: 0.1920, decode.d4.loss_dice: 0.5309, decode.d5.loss_cls: 0.0604, decode.d5.loss_mask: 0.1917, decode.d5.loss_dice: 0.5339, decode.d6.loss_cls: 0.0586, decode.d6.loss_mask: 0.1912, decode.d6.loss_dice: 0.5343, decode.d7.loss_cls: 0.0609, decode.d7.loss_mask: 0.1912, decode.d7.loss_dice: 0.5315, decode.d8.loss_cls: 0.0623, decode.d8.loss_mask: 0.1917, decode.d8.loss_dice: 0.5363, loss: 8.1576 +2022-05-10 22:05:04,339 - mmseg - INFO - Iter [51250/80000] lr: 5.160e-07, eta: 1 day, 3:10:18, time: 1.772, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0673, decode.loss_mask: 0.1916, decode.loss_dice: 0.5270, decode.d0.loss_cls: 0.2964, decode.d0.loss_mask: 0.1997, decode.d0.loss_dice: 0.5500, decode.d1.loss_cls: 0.0802, decode.d1.loss_mask: 0.1944, decode.d1.loss_dice: 0.5301, decode.d2.loss_cls: 0.0696, decode.d2.loss_mask: 0.1930, decode.d2.loss_dice: 0.5296, decode.d3.loss_cls: 0.0688, decode.d3.loss_mask: 0.1924, decode.d3.loss_dice: 0.5280, decode.d4.loss_cls: 0.0660, decode.d4.loss_mask: 0.1923, decode.d4.loss_dice: 0.5254, decode.d5.loss_cls: 0.0670, decode.d5.loss_mask: 0.1920, decode.d5.loss_dice: 0.5279, decode.d6.loss_cls: 0.0666, decode.d6.loss_mask: 0.1915, decode.d6.loss_dice: 0.5252, decode.d7.loss_cls: 0.0635, decode.d7.loss_mask: 0.1921, decode.d7.loss_dice: 0.5246, decode.d8.loss_cls: 0.0676, decode.d8.loss_mask: 0.1921, decode.d8.loss_dice: 0.5268, loss: 8.1384 +2022-05-10 22:06:35,735 - mmseg - INFO - Iter [51300/80000] lr: 5.151e-07, eta: 1 day, 2:56:03, time: 1.828, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0625, decode.loss_mask: 0.1888, decode.loss_dice: 0.5380, decode.d0.loss_cls: 0.2975, decode.d0.loss_mask: 0.1971, decode.d0.loss_dice: 0.5672, decode.d1.loss_cls: 0.0777, decode.d1.loss_mask: 0.1900, decode.d1.loss_dice: 0.5492, decode.d2.loss_cls: 0.0719, decode.d2.loss_mask: 0.1896, decode.d2.loss_dice: 0.5480, decode.d3.loss_cls: 0.0690, decode.d3.loss_mask: 0.1892, decode.d3.loss_dice: 0.5434, decode.d4.loss_cls: 0.0618, decode.d4.loss_mask: 0.1892, decode.d4.loss_dice: 0.5402, decode.d5.loss_cls: 0.0643, decode.d5.loss_mask: 0.1891, decode.d5.loss_dice: 0.5410, decode.d6.loss_cls: 0.0586, decode.d6.loss_mask: 0.1890, decode.d6.loss_dice: 0.5434, decode.d7.loss_cls: 0.0676, decode.d7.loss_mask: 0.1892, decode.d7.loss_dice: 0.5407, decode.d8.loss_cls: 0.0664, decode.d8.loss_mask: 0.1891, decode.d8.loss_dice: 0.5405, loss: 8.2490 +2022-05-10 22:08:07,834 - mmseg - INFO - Iter [51350/80000] lr: 5.142e-07, eta: 1 day, 2:42:17, time: 1.841, data_time: 0.066, memory: 69063, decode.loss_cls: 0.0577, decode.loss_mask: 0.1910, decode.loss_dice: 0.5301, decode.d0.loss_cls: 0.3001, decode.d0.loss_mask: 0.2001, decode.d0.loss_dice: 0.5513, decode.d1.loss_cls: 0.0703, decode.d1.loss_mask: 0.1924, decode.d1.loss_dice: 0.5370, decode.d2.loss_cls: 0.0671, decode.d2.loss_mask: 0.1915, decode.d2.loss_dice: 0.5293, decode.d3.loss_cls: 0.0573, decode.d3.loss_mask: 0.1915, decode.d3.loss_dice: 0.5276, decode.d4.loss_cls: 0.0616, decode.d4.loss_mask: 0.1909, decode.d4.loss_dice: 0.5297, decode.d5.loss_cls: 0.0665, decode.d5.loss_mask: 0.1910, decode.d5.loss_dice: 0.5322, decode.d6.loss_cls: 0.0561, decode.d6.loss_mask: 0.1911, decode.d6.loss_dice: 0.5279, decode.d7.loss_cls: 0.0568, decode.d7.loss_mask: 0.1907, decode.d7.loss_dice: 0.5254, decode.d8.loss_cls: 0.0567, decode.d8.loss_mask: 0.1912, decode.d8.loss_dice: 0.5287, loss: 8.0909 +2022-05-10 22:09:37,696 - mmseg - INFO - Iter [51400/80000] lr: 5.133e-07, eta: 1 day, 2:28:33, time: 1.794, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0652, decode.loss_mask: 0.1892, decode.loss_dice: 0.5288, decode.d0.loss_cls: 0.3094, decode.d0.loss_mask: 0.1958, decode.d0.loss_dice: 0.5522, decode.d1.loss_cls: 0.0780, decode.d1.loss_mask: 0.1896, decode.d1.loss_dice: 0.5373, decode.d2.loss_cls: 0.0694, decode.d2.loss_mask: 0.1904, decode.d2.loss_dice: 0.5363, decode.d3.loss_cls: 0.0647, decode.d3.loss_mask: 0.1900, decode.d3.loss_dice: 0.5284, decode.d4.loss_cls: 0.0685, decode.d4.loss_mask: 0.1901, decode.d4.loss_dice: 0.5323, decode.d5.loss_cls: 0.0589, decode.d5.loss_mask: 0.1897, decode.d5.loss_dice: 0.5331, decode.d6.loss_cls: 0.0583, decode.d6.loss_mask: 0.1897, decode.d6.loss_dice: 0.5295, decode.d7.loss_cls: 0.0593, decode.d7.loss_mask: 0.1900, decode.d7.loss_dice: 0.5289, decode.d8.loss_cls: 0.0573, decode.d8.loss_mask: 0.1894, decode.d8.loss_dice: 0.5299, loss: 8.1295 +2022-05-10 22:11:07,719 - mmseg - INFO - Iter [51450/80000] lr: 5.124e-07, eta: 1 day, 2:15:13, time: 1.801, data_time: 0.021, memory: 69063, decode.loss_cls: 0.0531, decode.loss_mask: 0.1927, decode.loss_dice: 0.5216, decode.d0.loss_cls: 0.3107, decode.d0.loss_mask: 0.2002, decode.d0.loss_dice: 0.5432, decode.d1.loss_cls: 0.0785, decode.d1.loss_mask: 0.1946, decode.d1.loss_dice: 0.5313, decode.d2.loss_cls: 0.0607, decode.d2.loss_mask: 0.1941, decode.d2.loss_dice: 0.5269, decode.d3.loss_cls: 0.0632, decode.d3.loss_mask: 0.1934, decode.d3.loss_dice: 0.5200, decode.d4.loss_cls: 0.0579, decode.d4.loss_mask: 0.1927, decode.d4.loss_dice: 0.5232, decode.d5.loss_cls: 0.0538, decode.d5.loss_mask: 0.1933, decode.d5.loss_dice: 0.5225, decode.d6.loss_cls: 0.0552, decode.d6.loss_mask: 0.1935, decode.d6.loss_dice: 0.5251, decode.d7.loss_cls: 0.0560, decode.d7.loss_mask: 0.1934, decode.d7.loss_dice: 0.5210, decode.d8.loss_cls: 0.0541, decode.d8.loss_mask: 0.1927, decode.d8.loss_dice: 0.5256, loss: 8.0442 +2022-05-10 22:12:37,354 - mmseg - INFO - Iter [51500/80000] lr: 5.115e-07, eta: 1 day, 2:02:11, time: 1.795, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0675, decode.loss_mask: 0.1933, decode.loss_dice: 0.5275, decode.d0.loss_cls: 0.2913, decode.d0.loss_mask: 0.2003, decode.d0.loss_dice: 0.5494, decode.d1.loss_cls: 0.0867, decode.d1.loss_mask: 0.1955, decode.d1.loss_dice: 0.5378, decode.d2.loss_cls: 0.0738, decode.d2.loss_mask: 0.1941, decode.d2.loss_dice: 0.5321, decode.d3.loss_cls: 0.0687, decode.d3.loss_mask: 0.1948, decode.d3.loss_dice: 0.5285, decode.d4.loss_cls: 0.0715, decode.d4.loss_mask: 0.1944, decode.d4.loss_dice: 0.5256, decode.d5.loss_cls: 0.0716, decode.d5.loss_mask: 0.1939, decode.d5.loss_dice: 0.5300, decode.d6.loss_cls: 0.0680, decode.d6.loss_mask: 0.1938, decode.d6.loss_dice: 0.5265, decode.d7.loss_cls: 0.0679, decode.d7.loss_mask: 0.1937, decode.d7.loss_dice: 0.5272, decode.d8.loss_cls: 0.0611, decode.d8.loss_mask: 0.1936, decode.d8.loss_dice: 0.5268, loss: 8.1868 +2022-05-10 22:14:09,903 - mmseg - INFO - Iter [51550/80000] lr: 5.106e-07, eta: 1 day, 1:49:50, time: 1.851, data_time: 0.064, memory: 69063, decode.loss_cls: 0.0654, decode.loss_mask: 0.1934, decode.loss_dice: 0.5375, decode.d0.loss_cls: 0.3045, decode.d0.loss_mask: 0.2004, decode.d0.loss_dice: 0.5660, decode.d1.loss_cls: 0.0795, decode.d1.loss_mask: 0.1950, decode.d1.loss_dice: 0.5507, decode.d2.loss_cls: 0.0719, decode.d2.loss_mask: 0.1947, decode.d2.loss_dice: 0.5491, decode.d3.loss_cls: 0.0700, decode.d3.loss_mask: 0.1941, decode.d3.loss_dice: 0.5409, decode.d4.loss_cls: 0.0691, decode.d4.loss_mask: 0.1940, decode.d4.loss_dice: 0.5421, decode.d5.loss_cls: 0.0647, decode.d5.loss_mask: 0.1935, decode.d5.loss_dice: 0.5406, decode.d6.loss_cls: 0.0581, decode.d6.loss_mask: 0.1935, decode.d6.loss_dice: 0.5413, decode.d7.loss_cls: 0.0612, decode.d7.loss_mask: 0.1932, decode.d7.loss_dice: 0.5370, decode.d8.loss_cls: 0.0621, decode.d8.loss_mask: 0.1935, decode.d8.loss_dice: 0.5407, loss: 8.2978 +2022-05-10 22:15:40,784 - mmseg - INFO - Iter [51600/80000] lr: 5.097e-07, eta: 1 day, 1:37:35, time: 1.818, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0593, decode.loss_mask: 0.1907, decode.loss_dice: 0.5165, decode.d0.loss_cls: 0.2895, decode.d0.loss_mask: 0.1986, decode.d0.loss_dice: 0.5446, decode.d1.loss_cls: 0.0729, decode.d1.loss_mask: 0.1921, decode.d1.loss_dice: 0.5300, decode.d2.loss_cls: 0.0700, decode.d2.loss_mask: 0.1913, decode.d2.loss_dice: 0.5221, decode.d3.loss_cls: 0.0596, decode.d3.loss_mask: 0.1908, decode.d3.loss_dice: 0.5222, decode.d4.loss_cls: 0.0602, decode.d4.loss_mask: 0.1910, decode.d4.loss_dice: 0.5195, decode.d5.loss_cls: 0.0659, decode.d5.loss_mask: 0.1911, decode.d5.loss_dice: 0.5218, decode.d6.loss_cls: 0.0587, decode.d6.loss_mask: 0.1908, decode.d6.loss_dice: 0.5208, decode.d7.loss_cls: 0.0591, decode.d7.loss_mask: 0.1906, decode.d7.loss_dice: 0.5215, decode.d8.loss_cls: 0.0590, decode.d8.loss_mask: 0.1910, decode.d8.loss_dice: 0.5169, loss: 8.0077 +2022-05-10 22:17:10,867 - mmseg - INFO - Iter [51650/80000] lr: 5.088e-07, eta: 1 day, 1:25:30, time: 1.802, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0568, decode.loss_mask: 0.1952, decode.loss_dice: 0.5277, decode.d0.loss_cls: 0.3018, decode.d0.loss_mask: 0.2031, decode.d0.loss_dice: 0.5534, decode.d1.loss_cls: 0.0810, decode.d1.loss_mask: 0.1979, decode.d1.loss_dice: 0.5330, decode.d2.loss_cls: 0.0664, decode.d2.loss_mask: 0.1965, decode.d2.loss_dice: 0.5372, decode.d3.loss_cls: 0.0614, decode.d3.loss_mask: 0.1952, decode.d3.loss_dice: 0.5276, decode.d4.loss_cls: 0.0586, decode.d4.loss_mask: 0.1959, decode.d4.loss_dice: 0.5294, decode.d5.loss_cls: 0.0594, decode.d5.loss_mask: 0.1958, decode.d5.loss_dice: 0.5279, decode.d6.loss_cls: 0.0592, decode.d6.loss_mask: 0.1955, decode.d6.loss_dice: 0.5296, decode.d7.loss_cls: 0.0624, decode.d7.loss_mask: 0.1955, decode.d7.loss_dice: 0.5309, decode.d8.loss_cls: 0.0562, decode.d8.loss_mask: 0.1958, decode.d8.loss_dice: 0.5257, loss: 8.1519 +2022-05-10 22:18:41,035 - mmseg - INFO - Iter [51700/80000] lr: 5.079e-07, eta: 1 day, 1:13:44, time: 1.803, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0652, decode.loss_mask: 0.1908, decode.loss_dice: 0.5327, decode.d0.loss_cls: 0.2992, decode.d0.loss_mask: 0.1999, decode.d0.loss_dice: 0.5524, decode.d1.loss_cls: 0.0849, decode.d1.loss_mask: 0.1935, decode.d1.loss_dice: 0.5391, decode.d2.loss_cls: 0.0779, decode.d2.loss_mask: 0.1912, decode.d2.loss_dice: 0.5347, decode.d3.loss_cls: 0.0718, decode.d3.loss_mask: 0.1905, decode.d3.loss_dice: 0.5314, decode.d4.loss_cls: 0.0760, decode.d4.loss_mask: 0.1907, decode.d4.loss_dice: 0.5324, decode.d5.loss_cls: 0.0714, decode.d5.loss_mask: 0.1906, decode.d5.loss_dice: 0.5288, decode.d6.loss_cls: 0.0658, decode.d6.loss_mask: 0.1907, decode.d6.loss_dice: 0.5265, decode.d7.loss_cls: 0.0634, decode.d7.loss_mask: 0.1914, decode.d7.loss_dice: 0.5304, decode.d8.loss_cls: 0.0678, decode.d8.loss_mask: 0.1914, decode.d8.loss_dice: 0.5279, loss: 8.2004 +2022-05-10 22:20:14,148 - mmseg - INFO - Iter [51750/80000] lr: 5.070e-07, eta: 1 day, 1:02:36, time: 1.862, data_time: 0.063, memory: 69063, decode.loss_cls: 0.0601, decode.loss_mask: 0.1919, decode.loss_dice: 0.5146, decode.d0.loss_cls: 0.3007, decode.d0.loss_mask: 0.1994, decode.d0.loss_dice: 0.5380, decode.d1.loss_cls: 0.0672, decode.d1.loss_mask: 0.1940, decode.d1.loss_dice: 0.5218, decode.d2.loss_cls: 0.0689, decode.d2.loss_mask: 0.1928, decode.d2.loss_dice: 0.5197, decode.d3.loss_cls: 0.0568, decode.d3.loss_mask: 0.1929, decode.d3.loss_dice: 0.5178, decode.d4.loss_cls: 0.0604, decode.d4.loss_mask: 0.1925, decode.d4.loss_dice: 0.5147, decode.d5.loss_cls: 0.0582, decode.d5.loss_mask: 0.1924, decode.d5.loss_dice: 0.5149, decode.d6.loss_cls: 0.0590, decode.d6.loss_mask: 0.1925, decode.d6.loss_dice: 0.5152, decode.d7.loss_cls: 0.0526, decode.d7.loss_mask: 0.1929, decode.d7.loss_dice: 0.5153, decode.d8.loss_cls: 0.0580, decode.d8.loss_mask: 0.1926, decode.d8.loss_dice: 0.5147, loss: 7.9623 +2022-05-10 22:21:43,743 - mmseg - INFO - Iter [51800/80000] lr: 5.061e-07, eta: 1 day, 0:51:17, time: 1.791, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0633, decode.loss_mask: 0.1933, decode.loss_dice: 0.5379, decode.d0.loss_cls: 0.3019, decode.d0.loss_mask: 0.2008, decode.d0.loss_dice: 0.5615, decode.d1.loss_cls: 0.0803, decode.d1.loss_mask: 0.1952, decode.d1.loss_dice: 0.5477, decode.d2.loss_cls: 0.0714, decode.d2.loss_mask: 0.1940, decode.d2.loss_dice: 0.5437, decode.d3.loss_cls: 0.0638, decode.d3.loss_mask: 0.1943, decode.d3.loss_dice: 0.5362, decode.d4.loss_cls: 0.0663, decode.d4.loss_mask: 0.1940, decode.d4.loss_dice: 0.5395, decode.d5.loss_cls: 0.0645, decode.d5.loss_mask: 0.1940, decode.d5.loss_dice: 0.5409, decode.d6.loss_cls: 0.0656, decode.d6.loss_mask: 0.1942, decode.d6.loss_dice: 0.5414, decode.d7.loss_cls: 0.0614, decode.d7.loss_mask: 0.1939, decode.d7.loss_dice: 0.5390, decode.d8.loss_cls: 0.0607, decode.d8.loss_mask: 0.1936, decode.d8.loss_dice: 0.5372, loss: 8.2714 +2022-05-10 22:23:13,744 - mmseg - INFO - Iter [51850/80000] lr: 5.052e-07, eta: 1 day, 0:40:17, time: 1.800, data_time: 0.020, memory: 69063, decode.loss_cls: 0.0658, decode.loss_mask: 0.1913, decode.loss_dice: 0.5267, decode.d0.loss_cls: 0.3018, decode.d0.loss_mask: 0.1982, decode.d0.loss_dice: 0.5541, decode.d1.loss_cls: 0.0887, decode.d1.loss_mask: 0.1935, decode.d1.loss_dice: 0.5374, decode.d2.loss_cls: 0.0750, decode.d2.loss_mask: 0.1921, decode.d2.loss_dice: 0.5321, decode.d3.loss_cls: 0.0699, decode.d3.loss_mask: 0.1910, decode.d3.loss_dice: 0.5292, decode.d4.loss_cls: 0.0716, decode.d4.loss_mask: 0.1919, decode.d4.loss_dice: 0.5325, decode.d5.loss_cls: 0.0709, decode.d5.loss_mask: 0.1916, decode.d5.loss_dice: 0.5283, decode.d6.loss_cls: 0.0685, decode.d6.loss_mask: 0.1915, decode.d6.loss_dice: 0.5238, decode.d7.loss_cls: 0.0691, decode.d7.loss_mask: 0.1915, decode.d7.loss_dice: 0.5270, decode.d8.loss_cls: 0.0665, decode.d8.loss_mask: 0.1917, decode.d8.loss_dice: 0.5285, loss: 8.1915 +2022-05-10 22:24:43,978 - mmseg - INFO - Iter [51900/80000] lr: 5.043e-07, eta: 1 day, 0:29:33, time: 1.805, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0667, decode.loss_mask: 0.1921, decode.loss_dice: 0.5202, decode.d0.loss_cls: 0.3047, decode.d0.loss_mask: 0.1998, decode.d0.loss_dice: 0.5472, decode.d1.loss_cls: 0.0775, decode.d1.loss_mask: 0.1952, decode.d1.loss_dice: 0.5293, decode.d2.loss_cls: 0.0773, decode.d2.loss_mask: 0.1940, decode.d2.loss_dice: 0.5263, decode.d3.loss_cls: 0.0716, decode.d3.loss_mask: 0.1925, decode.d3.loss_dice: 0.5215, decode.d4.loss_cls: 0.0722, decode.d4.loss_mask: 0.1934, decode.d4.loss_dice: 0.5222, decode.d5.loss_cls: 0.0669, decode.d5.loss_mask: 0.1933, decode.d5.loss_dice: 0.5218, decode.d6.loss_cls: 0.0683, decode.d6.loss_mask: 0.1930, decode.d6.loss_dice: 0.5225, decode.d7.loss_cls: 0.0674, decode.d7.loss_mask: 0.1930, decode.d7.loss_dice: 0.5199, decode.d8.loss_cls: 0.0667, decode.d8.loss_mask: 0.1926, decode.d8.loss_dice: 0.5178, loss: 8.1268 +2022-05-10 22:26:18,056 - mmseg - INFO - Iter [51950/80000] lr: 5.034e-07, eta: 1 day, 0:19:30, time: 1.882, data_time: 0.063, memory: 69063, decode.loss_cls: 0.0671, decode.loss_mask: 0.1952, decode.loss_dice: 0.5480, decode.d0.loss_cls: 0.3028, decode.d0.loss_mask: 0.2033, decode.d0.loss_dice: 0.5699, decode.d1.loss_cls: 0.0855, decode.d1.loss_mask: 0.1967, decode.d1.loss_dice: 0.5522, decode.d2.loss_cls: 0.0725, decode.d2.loss_mask: 0.1956, decode.d2.loss_dice: 0.5509, decode.d3.loss_cls: 0.0762, decode.d3.loss_mask: 0.1951, decode.d3.loss_dice: 0.5452, decode.d4.loss_cls: 0.0701, decode.d4.loss_mask: 0.1948, decode.d4.loss_dice: 0.5468, decode.d5.loss_cls: 0.0698, decode.d5.loss_mask: 0.1954, decode.d5.loss_dice: 0.5453, decode.d6.loss_cls: 0.0647, decode.d6.loss_mask: 0.1952, decode.d6.loss_dice: 0.5457, decode.d7.loss_cls: 0.0723, decode.d7.loss_mask: 0.1951, decode.d7.loss_dice: 0.5459, decode.d8.loss_cls: 0.0695, decode.d8.loss_mask: 0.1951, decode.d8.loss_dice: 0.5475, loss: 8.4096 +2022-05-10 22:27:48,224 - mmseg - INFO - Saving checkpoint at 52000 iterations +2022-05-10 22:28:26,502 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 22:28:26,510 - mmseg - INFO - Iter [52000/80000] lr: 5.025e-07, eta: 1 day, 0:13:40, time: 2.567, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0644, decode.loss_mask: 0.1925, decode.loss_dice: 0.5257, decode.d0.loss_cls: 0.3062, decode.d0.loss_mask: 0.1993, decode.d0.loss_dice: 0.5512, decode.d1.loss_cls: 0.0772, decode.d1.loss_mask: 0.1938, decode.d1.loss_dice: 0.5356, decode.d2.loss_cls: 0.0636, decode.d2.loss_mask: 0.1933, decode.d2.loss_dice: 0.5308, decode.d3.loss_cls: 0.0616, decode.d3.loss_mask: 0.1931, decode.d3.loss_dice: 0.5291, decode.d4.loss_cls: 0.0628, decode.d4.loss_mask: 0.1929, decode.d4.loss_dice: 0.5308, decode.d5.loss_cls: 0.0595, decode.d5.loss_mask: 0.1927, decode.d5.loss_dice: 0.5278, decode.d6.loss_cls: 0.0659, decode.d6.loss_mask: 0.1926, decode.d6.loss_dice: 0.5279, decode.d7.loss_cls: 0.0617, decode.d7.loss_mask: 0.1925, decode.d7.loss_dice: 0.5283, decode.d8.loss_cls: 0.0698, decode.d8.loss_mask: 0.1924, decode.d8.loss_dice: 0.5256, loss: 8.1408 +2022-05-10 22:30:22,614 - mmseg - INFO - per class results: +2022-05-10 22:30:22,622 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.62 | 99.29 | +| sidewalk | 88.67 | 93.75 | +| building | 94.41 | 97.2 | +| wall | 70.25 | 79.97 | +| fence | 73.92 | 81.45 | +| pole | 71.33 | 83.65 | +| traffic light | 77.21 | 87.6 | +| traffic sign | 84.0 | 90.11 | +| vegetation | 93.47 | 96.88 | +| terrain | 69.2 | 77.96 | +| sky | 95.75 | 98.49 | +| person | 86.74 | 93.9 | +| rider | 74.31 | 83.15 | +| car | 96.17 | 98.21 | +| truck | 81.99 | 94.43 | +| bus | 93.67 | 96.38 | +| train | 87.88 | 90.65 | +| motorcycle | 77.53 | 88.16 | +| bicycle | 82.76 | 91.02 | ++---------------+-------+-------+ +2022-05-10 22:30:22,622 - mmseg - INFO - Summary: +2022-05-10 22:30:22,622 - mmseg - INFO - ++-------+------+-------+ +| aAcc | mIoU | mAcc | ++-------+------+-------+ +| 97.03 | 84.1 | 90.64 | ++-------+------+-------+ +2022-05-10 22:30:22,626 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 22:30:22,626 - mmseg - INFO - Iter(val) [32] aAcc: 0.9703, mIoU: 0.8410, mAcc: 0.9064, IoU.road: 0.9862, IoU.sidewalk: 0.8867, IoU.building: 0.9441, IoU.wall: 0.7025, IoU.fence: 0.7392, IoU.pole: 0.7133, IoU.traffic light: 0.7721, IoU.traffic sign: 0.8400, IoU.vegetation: 0.9347, IoU.terrain: 0.6920, IoU.sky: 0.9575, IoU.person: 0.8674, IoU.rider: 0.7431, IoU.car: 0.9617, IoU.truck: 0.8199, IoU.bus: 0.9367, IoU.train: 0.8788, IoU.motorcycle: 0.7753, IoU.bicycle: 0.8276, Acc.road: 0.9929, Acc.sidewalk: 0.9375, Acc.building: 0.9720, Acc.wall: 0.7997, Acc.fence: 0.8145, Acc.pole: 0.8365, Acc.traffic light: 0.8760, Acc.traffic sign: 0.9011, Acc.vegetation: 0.9688, Acc.terrain: 0.7796, Acc.sky: 0.9849, Acc.person: 0.9390, Acc.rider: 0.8315, Acc.car: 0.9821, Acc.truck: 0.9443, Acc.bus: 0.9638, Acc.train: 0.9065, Acc.motorcycle: 0.8816, Acc.bicycle: 0.9102 +2022-05-10 22:31:53,309 - mmseg - INFO - Iter [52050/80000] lr: 5.016e-07, eta: 1 day, 0:16:57, time: 4.138, data_time: 2.341, memory: 69063, decode.loss_cls: 0.0651, decode.loss_mask: 0.1919, decode.loss_dice: 0.5203, decode.d0.loss_cls: 0.3031, decode.d0.loss_mask: 0.1999, decode.d0.loss_dice: 0.5521, decode.d1.loss_cls: 0.0731, decode.d1.loss_mask: 0.1937, decode.d1.loss_dice: 0.5321, decode.d2.loss_cls: 0.0677, decode.d2.loss_mask: 0.1925, decode.d2.loss_dice: 0.5254, decode.d3.loss_cls: 0.0679, decode.d3.loss_mask: 0.1926, decode.d3.loss_dice: 0.5234, decode.d4.loss_cls: 0.0656, decode.d4.loss_mask: 0.1922, decode.d4.loss_dice: 0.5251, decode.d5.loss_cls: 0.0656, decode.d5.loss_mask: 0.1924, decode.d5.loss_dice: 0.5225, decode.d6.loss_cls: 0.0662, decode.d6.loss_mask: 0.1926, decode.d6.loss_dice: 0.5215, decode.d7.loss_cls: 0.0652, decode.d7.loss_mask: 0.1922, decode.d7.loss_dice: 0.5217, decode.d8.loss_cls: 0.0614, decode.d8.loss_mask: 0.1923, decode.d8.loss_dice: 0.5258, loss: 8.1030 +2022-05-10 22:33:26,252 - mmseg - INFO - Iter [52100/80000] lr: 5.008e-07, eta: 1 day, 0:07:09, time: 1.859, data_time: 0.064, memory: 69063, decode.loss_cls: 0.0607, decode.loss_mask: 0.1912, decode.loss_dice: 0.5143, decode.d0.loss_cls: 0.2961, decode.d0.loss_mask: 0.1989, decode.d0.loss_dice: 0.5429, decode.d1.loss_cls: 0.0776, decode.d1.loss_mask: 0.1929, decode.d1.loss_dice: 0.5262, decode.d2.loss_cls: 0.0708, decode.d2.loss_mask: 0.1919, decode.d2.loss_dice: 0.5258, decode.d3.loss_cls: 0.0618, decode.d3.loss_mask: 0.1916, decode.d3.loss_dice: 0.5233, decode.d4.loss_cls: 0.0667, decode.d4.loss_mask: 0.1911, decode.d4.loss_dice: 0.5186, decode.d5.loss_cls: 0.0603, decode.d5.loss_mask: 0.1918, decode.d5.loss_dice: 0.5188, decode.d6.loss_cls: 0.0603, decode.d6.loss_mask: 0.1917, decode.d6.loss_dice: 0.5194, decode.d7.loss_cls: 0.0591, decode.d7.loss_mask: 0.1913, decode.d7.loss_dice: 0.5227, decode.d8.loss_cls: 0.0590, decode.d8.loss_mask: 0.1914, decode.d8.loss_dice: 0.5172, loss: 8.0254 +2022-05-10 22:34:57,854 - mmseg - INFO - Iter [52150/80000] lr: 4.999e-07, eta: 23:57:24, time: 1.832, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0573, decode.loss_mask: 0.1929, decode.loss_dice: 0.5376, decode.d0.loss_cls: 0.2963, decode.d0.loss_mask: 0.2017, decode.d0.loss_dice: 0.5612, decode.d1.loss_cls: 0.0695, decode.d1.loss_mask: 0.1955, decode.d1.loss_dice: 0.5508, decode.d2.loss_cls: 0.0616, decode.d2.loss_mask: 0.1946, decode.d2.loss_dice: 0.5441, decode.d3.loss_cls: 0.0723, decode.d3.loss_mask: 0.1934, decode.d3.loss_dice: 0.5424, decode.d4.loss_cls: 0.0605, decode.d4.loss_mask: 0.1940, decode.d4.loss_dice: 0.5394, decode.d5.loss_cls: 0.0603, decode.d5.loss_mask: 0.1942, decode.d5.loss_dice: 0.5420, decode.d6.loss_cls: 0.0559, decode.d6.loss_mask: 0.1929, decode.d6.loss_dice: 0.5443, decode.d7.loss_cls: 0.0611, decode.d7.loss_mask: 0.1933, decode.d7.loss_dice: 0.5423, decode.d8.loss_cls: 0.0550, decode.d8.loss_mask: 0.1933, decode.d8.loss_dice: 0.5434, loss: 8.2432 +2022-05-10 22:36:29,205 - mmseg - INFO - Iter [52200/80000] lr: 4.990e-07, eta: 23:47:49, time: 1.827, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0594, decode.loss_mask: 0.1920, decode.loss_dice: 0.5461, decode.d0.loss_cls: 0.3064, decode.d0.loss_mask: 0.1997, decode.d0.loss_dice: 0.5718, decode.d1.loss_cls: 0.0750, decode.d1.loss_mask: 0.1938, decode.d1.loss_dice: 0.5539, decode.d2.loss_cls: 0.0716, decode.d2.loss_mask: 0.1925, decode.d2.loss_dice: 0.5505, decode.d3.loss_cls: 0.0691, decode.d3.loss_mask: 0.1925, decode.d3.loss_dice: 0.5485, decode.d4.loss_cls: 0.0680, decode.d4.loss_mask: 0.1922, decode.d4.loss_dice: 0.5488, decode.d5.loss_cls: 0.0598, decode.d5.loss_mask: 0.1925, decode.d5.loss_dice: 0.5464, decode.d6.loss_cls: 0.0616, decode.d6.loss_mask: 0.1923, decode.d6.loss_dice: 0.5444, decode.d7.loss_cls: 0.0678, decode.d7.loss_mask: 0.1919, decode.d7.loss_dice: 0.5431, decode.d8.loss_cls: 0.0597, decode.d8.loss_mask: 0.1920, decode.d8.loss_dice: 0.5463, loss: 8.3298 +2022-05-10 22:37:59,064 - mmseg - INFO - Iter [52250/80000] lr: 4.981e-07, eta: 23:38:16, time: 1.797, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0591, decode.loss_mask: 0.1909, decode.loss_dice: 0.5221, decode.d0.loss_cls: 0.3041, decode.d0.loss_mask: 0.1999, decode.d0.loss_dice: 0.5421, decode.d1.loss_cls: 0.0687, decode.d1.loss_mask: 0.1926, decode.d1.loss_dice: 0.5291, decode.d2.loss_cls: 0.0675, decode.d2.loss_mask: 0.1912, decode.d2.loss_dice: 0.5250, decode.d3.loss_cls: 0.0646, decode.d3.loss_mask: 0.1910, decode.d3.loss_dice: 0.5183, decode.d4.loss_cls: 0.0601, decode.d4.loss_mask: 0.1910, decode.d4.loss_dice: 0.5209, decode.d5.loss_cls: 0.0591, decode.d5.loss_mask: 0.1907, decode.d5.loss_dice: 0.5231, decode.d6.loss_cls: 0.0638, decode.d6.loss_mask: 0.1906, decode.d6.loss_dice: 0.5184, decode.d7.loss_cls: 0.0587, decode.d7.loss_mask: 0.1909, decode.d7.loss_dice: 0.5208, decode.d8.loss_cls: 0.0568, decode.d8.loss_mask: 0.1907, decode.d8.loss_dice: 0.5197, loss: 8.0211 +2022-05-10 22:39:34,271 - mmseg - INFO - Iter [52300/80000] lr: 4.972e-07, eta: 23:29:28, time: 1.904, data_time: 0.064, memory: 69063, decode.loss_cls: 0.0560, decode.loss_mask: 0.1937, decode.loss_dice: 0.5380, decode.d0.loss_cls: 0.2937, decode.d0.loss_mask: 0.2017, decode.d0.loss_dice: 0.5614, decode.d1.loss_cls: 0.0696, decode.d1.loss_mask: 0.1959, decode.d1.loss_dice: 0.5453, decode.d2.loss_cls: 0.0601, decode.d2.loss_mask: 0.1942, decode.d2.loss_dice: 0.5387, decode.d3.loss_cls: 0.0589, decode.d3.loss_mask: 0.1933, decode.d3.loss_dice: 0.5390, decode.d4.loss_cls: 0.0552, decode.d4.loss_mask: 0.1938, decode.d4.loss_dice: 0.5379, decode.d5.loss_cls: 0.0520, decode.d5.loss_mask: 0.1932, decode.d5.loss_dice: 0.5392, decode.d6.loss_cls: 0.0529, decode.d6.loss_mask: 0.1938, decode.d6.loss_dice: 0.5362, decode.d7.loss_cls: 0.0541, decode.d7.loss_mask: 0.1939, decode.d7.loss_dice: 0.5412, decode.d8.loss_cls: 0.0529, decode.d8.loss_mask: 0.1936, decode.d8.loss_dice: 0.5384, loss: 8.1679 +2022-05-10 22:41:05,659 - mmseg - INFO - Iter [52350/80000] lr: 4.963e-07, eta: 23:20:26, time: 1.828, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0643, decode.loss_mask: 0.1924, decode.loss_dice: 0.5232, decode.d0.loss_cls: 0.3089, decode.d0.loss_mask: 0.1993, decode.d0.loss_dice: 0.5520, decode.d1.loss_cls: 0.0746, decode.d1.loss_mask: 0.1936, decode.d1.loss_dice: 0.5386, decode.d2.loss_cls: 0.0657, decode.d2.loss_mask: 0.1930, decode.d2.loss_dice: 0.5263, decode.d3.loss_cls: 0.0601, decode.d3.loss_mask: 0.1924, decode.d3.loss_dice: 0.5249, decode.d4.loss_cls: 0.0632, decode.d4.loss_mask: 0.1928, decode.d4.loss_dice: 0.5265, decode.d5.loss_cls: 0.0597, decode.d5.loss_mask: 0.1919, decode.d5.loss_dice: 0.5242, decode.d6.loss_cls: 0.0574, decode.d6.loss_mask: 0.1924, decode.d6.loss_dice: 0.5214, decode.d7.loss_cls: 0.0603, decode.d7.loss_mask: 0.1924, decode.d7.loss_dice: 0.5241, decode.d8.loss_cls: 0.0568, decode.d8.loss_mask: 0.1925, decode.d8.loss_dice: 0.5255, loss: 8.0906 +2022-05-10 22:42:35,795 - mmseg - INFO - Iter [52400/80000] lr: 4.954e-07, eta: 23:11:26, time: 1.803, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0584, decode.loss_mask: 0.1949, decode.loss_dice: 0.5416, decode.d0.loss_cls: 0.3069, decode.d0.loss_mask: 0.2028, decode.d0.loss_dice: 0.5666, decode.d1.loss_cls: 0.0819, decode.d1.loss_mask: 0.1972, decode.d1.loss_dice: 0.5487, decode.d2.loss_cls: 0.0625, decode.d2.loss_mask: 0.1967, decode.d2.loss_dice: 0.5458, decode.d3.loss_cls: 0.0575, decode.d3.loss_mask: 0.1959, decode.d3.loss_dice: 0.5441, decode.d4.loss_cls: 0.0576, decode.d4.loss_mask: 0.1959, decode.d4.loss_dice: 0.5438, decode.d5.loss_cls: 0.0619, decode.d5.loss_mask: 0.1957, decode.d5.loss_dice: 0.5431, decode.d6.loss_cls: 0.0580, decode.d6.loss_mask: 0.1956, decode.d6.loss_dice: 0.5416, decode.d7.loss_cls: 0.0584, decode.d7.loss_mask: 0.1955, decode.d7.loss_dice: 0.5439, decode.d8.loss_cls: 0.0607, decode.d8.loss_mask: 0.1956, decode.d8.loss_dice: 0.5438, loss: 8.2925 +2022-05-10 22:44:05,000 - mmseg - INFO - Iter [52450/80000] lr: 4.945e-07, eta: 23:02:31, time: 1.784, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0569, decode.loss_mask: 0.1889, decode.loss_dice: 0.5314, decode.d0.loss_cls: 0.3033, decode.d0.loss_mask: 0.1970, decode.d0.loss_dice: 0.5619, decode.d1.loss_cls: 0.0808, decode.d1.loss_mask: 0.1914, decode.d1.loss_dice: 0.5382, decode.d2.loss_cls: 0.0720, decode.d2.loss_mask: 0.1897, decode.d2.loss_dice: 0.5357, decode.d3.loss_cls: 0.0576, decode.d3.loss_mask: 0.1896, decode.d3.loss_dice: 0.5302, decode.d4.loss_cls: 0.0578, decode.d4.loss_mask: 0.1891, decode.d4.loss_dice: 0.5314, decode.d5.loss_cls: 0.0636, decode.d5.loss_mask: 0.1891, decode.d5.loss_dice: 0.5323, decode.d6.loss_cls: 0.0571, decode.d6.loss_mask: 0.1893, decode.d6.loss_dice: 0.5287, decode.d7.loss_cls: 0.0636, decode.d7.loss_mask: 0.1891, decode.d7.loss_dice: 0.5322, decode.d8.loss_cls: 0.0574, decode.d8.loss_mask: 0.1889, decode.d8.loss_dice: 0.5265, loss: 8.1205 +2022-05-10 22:45:37,472 - mmseg - INFO - Iter [52500/80000] lr: 4.936e-07, eta: 22:54:06, time: 1.849, data_time: 0.064, memory: 69063, decode.loss_cls: 0.0604, decode.loss_mask: 0.1901, decode.loss_dice: 0.5129, decode.d0.loss_cls: 0.2871, decode.d0.loss_mask: 0.1975, decode.d0.loss_dice: 0.5378, decode.d1.loss_cls: 0.0714, decode.d1.loss_mask: 0.1921, decode.d1.loss_dice: 0.5243, decode.d2.loss_cls: 0.0631, decode.d2.loss_mask: 0.1911, decode.d2.loss_dice: 0.5182, decode.d3.loss_cls: 0.0593, decode.d3.loss_mask: 0.1908, decode.d3.loss_dice: 0.5170, decode.d4.loss_cls: 0.0595, decode.d4.loss_mask: 0.1907, decode.d4.loss_dice: 0.5135, decode.d5.loss_cls: 0.0630, decode.d5.loss_mask: 0.1905, decode.d5.loss_dice: 0.5149, decode.d6.loss_cls: 0.0609, decode.d6.loss_mask: 0.1902, decode.d6.loss_dice: 0.5137, decode.d7.loss_cls: 0.0609, decode.d7.loss_mask: 0.1903, decode.d7.loss_dice: 0.5126, decode.d8.loss_cls: 0.0588, decode.d8.loss_mask: 0.1899, decode.d8.loss_dice: 0.5146, loss: 7.9373 +2022-05-10 22:47:07,316 - mmseg - INFO - Iter [52550/80000] lr: 4.927e-07, eta: 22:45:34, time: 1.797, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0640, decode.loss_mask: 0.1918, decode.loss_dice: 0.5294, decode.d0.loss_cls: 0.3065, decode.d0.loss_mask: 0.1997, decode.d0.loss_dice: 0.5532, decode.d1.loss_cls: 0.0841, decode.d1.loss_mask: 0.1934, decode.d1.loss_dice: 0.5382, decode.d2.loss_cls: 0.0762, decode.d2.loss_mask: 0.1920, decode.d2.loss_dice: 0.5345, decode.d3.loss_cls: 0.0718, decode.d3.loss_mask: 0.1924, decode.d3.loss_dice: 0.5294, decode.d4.loss_cls: 0.0769, decode.d4.loss_mask: 0.1923, decode.d4.loss_dice: 0.5315, decode.d5.loss_cls: 0.0667, decode.d5.loss_mask: 0.1926, decode.d5.loss_dice: 0.5286, decode.d6.loss_cls: 0.0616, decode.d6.loss_mask: 0.1921, decode.d6.loss_dice: 0.5291, decode.d7.loss_cls: 0.0592, decode.d7.loss_mask: 0.1920, decode.d7.loss_dice: 0.5307, decode.d8.loss_cls: 0.0679, decode.d8.loss_mask: 0.1915, decode.d8.loss_dice: 0.5314, loss: 8.2008 +2022-05-10 22:48:37,219 - mmseg - INFO - Iter [52600/80000] lr: 4.918e-07, eta: 22:37:11, time: 1.798, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0546, decode.loss_mask: 0.1885, decode.loss_dice: 0.5316, decode.d0.loss_cls: 0.2958, decode.d0.loss_mask: 0.1957, decode.d0.loss_dice: 0.5573, decode.d1.loss_cls: 0.0700, decode.d1.loss_mask: 0.1909, decode.d1.loss_dice: 0.5414, decode.d2.loss_cls: 0.0608, decode.d2.loss_mask: 0.1897, decode.d2.loss_dice: 0.5409, decode.d3.loss_cls: 0.0575, decode.d3.loss_mask: 0.1892, decode.d3.loss_dice: 0.5340, decode.d4.loss_cls: 0.0513, decode.d4.loss_mask: 0.1892, decode.d4.loss_dice: 0.5325, decode.d5.loss_cls: 0.0574, decode.d5.loss_mask: 0.1893, decode.d5.loss_dice: 0.5311, decode.d6.loss_cls: 0.0530, decode.d6.loss_mask: 0.1897, decode.d6.loss_dice: 0.5348, decode.d7.loss_cls: 0.0584, decode.d7.loss_mask: 0.1888, decode.d7.loss_dice: 0.5327, decode.d8.loss_cls: 0.0545, decode.d8.loss_mask: 0.1887, decode.d8.loss_dice: 0.5343, loss: 8.0834 +2022-05-10 22:50:09,915 - mmseg - INFO - Iter [52650/80000] lr: 4.909e-07, eta: 22:29:14, time: 1.854, data_time: 0.066, memory: 69063, decode.loss_cls: 0.0635, decode.loss_mask: 0.1939, decode.loss_dice: 0.5361, decode.d0.loss_cls: 0.3005, decode.d0.loss_mask: 0.2027, decode.d0.loss_dice: 0.5624, decode.d1.loss_cls: 0.0759, decode.d1.loss_mask: 0.1958, decode.d1.loss_dice: 0.5484, decode.d2.loss_cls: 0.0736, decode.d2.loss_mask: 0.1950, decode.d2.loss_dice: 0.5424, decode.d3.loss_cls: 0.0659, decode.d3.loss_mask: 0.1947, decode.d3.loss_dice: 0.5388, decode.d4.loss_cls: 0.0665, decode.d4.loss_mask: 0.1946, decode.d4.loss_dice: 0.5378, decode.d5.loss_cls: 0.0660, decode.d5.loss_mask: 0.1942, decode.d5.loss_dice: 0.5384, decode.d6.loss_cls: 0.0624, decode.d6.loss_mask: 0.1942, decode.d6.loss_dice: 0.5379, decode.d7.loss_cls: 0.0606, decode.d7.loss_mask: 0.1937, decode.d7.loss_dice: 0.5347, decode.d8.loss_cls: 0.0630, decode.d8.loss_mask: 0.1943, decode.d8.loss_dice: 0.5324, loss: 8.2603 +2022-05-10 22:51:39,690 - mmseg - INFO - Iter [52700/80000] lr: 4.900e-07, eta: 22:21:08, time: 1.795, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0511, decode.loss_mask: 0.1891, decode.loss_dice: 0.5303, decode.d0.loss_cls: 0.3051, decode.d0.loss_mask: 0.1964, decode.d0.loss_dice: 0.5594, decode.d1.loss_cls: 0.0651, decode.d1.loss_mask: 0.1911, decode.d1.loss_dice: 0.5443, decode.d2.loss_cls: 0.0681, decode.d2.loss_mask: 0.1911, decode.d2.loss_dice: 0.5417, decode.d3.loss_cls: 0.0672, decode.d3.loss_mask: 0.1901, decode.d3.loss_dice: 0.5330, decode.d4.loss_cls: 0.0600, decode.d4.loss_mask: 0.1901, decode.d4.loss_dice: 0.5342, decode.d5.loss_cls: 0.0617, decode.d5.loss_mask: 0.1897, decode.d5.loss_dice: 0.5337, decode.d6.loss_cls: 0.0584, decode.d6.loss_mask: 0.1895, decode.d6.loss_dice: 0.5349, decode.d7.loss_cls: 0.0589, decode.d7.loss_mask: 0.1894, decode.d7.loss_dice: 0.5341, decode.d8.loss_cls: 0.0547, decode.d8.loss_mask: 0.1891, decode.d8.loss_dice: 0.5306, loss: 8.1321 +2022-05-10 22:53:10,176 - mmseg - INFO - Iter [52750/80000] lr: 4.891e-07, eta: 22:13:14, time: 1.810, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0641, decode.loss_mask: 0.1896, decode.loss_dice: 0.5223, decode.d0.loss_cls: 0.3008, decode.d0.loss_mask: 0.1984, decode.d0.loss_dice: 0.5493, decode.d1.loss_cls: 0.0706, decode.d1.loss_mask: 0.1903, decode.d1.loss_dice: 0.5337, decode.d2.loss_cls: 0.0663, decode.d2.loss_mask: 0.1900, decode.d2.loss_dice: 0.5314, decode.d3.loss_cls: 0.0576, decode.d3.loss_mask: 0.1897, decode.d3.loss_dice: 0.5231, decode.d4.loss_cls: 0.0633, decode.d4.loss_mask: 0.1894, decode.d4.loss_dice: 0.5227, decode.d5.loss_cls: 0.0617, decode.d5.loss_mask: 0.1900, decode.d5.loss_dice: 0.5264, decode.d6.loss_cls: 0.0607, decode.d6.loss_mask: 0.1900, decode.d6.loss_dice: 0.5220, decode.d7.loss_cls: 0.0664, decode.d7.loss_mask: 0.1899, decode.d7.loss_dice: 0.5265, decode.d8.loss_cls: 0.0566, decode.d8.loss_mask: 0.1898, decode.d8.loss_dice: 0.5231, loss: 8.0558 +2022-05-10 22:54:41,021 - mmseg - INFO - Iter [52800/80000] lr: 4.882e-07, eta: 22:05:30, time: 1.815, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0544, decode.loss_mask: 0.1884, decode.loss_dice: 0.5175, decode.d0.loss_cls: 0.2894, decode.d0.loss_mask: 0.1951, decode.d0.loss_dice: 0.5443, decode.d1.loss_cls: 0.0688, decode.d1.loss_mask: 0.1898, decode.d1.loss_dice: 0.5338, decode.d2.loss_cls: 0.0637, decode.d2.loss_mask: 0.1894, decode.d2.loss_dice: 0.5260, decode.d3.loss_cls: 0.0572, decode.d3.loss_mask: 0.1889, decode.d3.loss_dice: 0.5213, decode.d4.loss_cls: 0.0543, decode.d4.loss_mask: 0.1892, decode.d4.loss_dice: 0.5193, decode.d5.loss_cls: 0.0652, decode.d5.loss_mask: 0.1893, decode.d5.loss_dice: 0.5244, decode.d6.loss_cls: 0.0555, decode.d6.loss_mask: 0.1888, decode.d6.loss_dice: 0.5195, decode.d7.loss_cls: 0.0599, decode.d7.loss_mask: 0.1885, decode.d7.loss_dice: 0.5242, decode.d8.loss_cls: 0.0587, decode.d8.loss_mask: 0.1884, decode.d8.loss_dice: 0.5218, loss: 7.9749 +2022-05-10 22:56:14,652 - mmseg - INFO - Iter [52850/80000] lr: 4.873e-07, eta: 21:58:10, time: 1.874, data_time: 0.070, memory: 69063, decode.loss_cls: 0.0541, decode.loss_mask: 0.1932, decode.loss_dice: 0.5238, decode.d0.loss_cls: 0.2941, decode.d0.loss_mask: 0.2010, decode.d0.loss_dice: 0.5480, decode.d1.loss_cls: 0.0681, decode.d1.loss_mask: 0.1953, decode.d1.loss_dice: 0.5310, decode.d2.loss_cls: 0.0640, decode.d2.loss_mask: 0.1937, decode.d2.loss_dice: 0.5277, decode.d3.loss_cls: 0.0594, decode.d3.loss_mask: 0.1937, decode.d3.loss_dice: 0.5237, decode.d4.loss_cls: 0.0557, decode.d4.loss_mask: 0.1933, decode.d4.loss_dice: 0.5252, decode.d5.loss_cls: 0.0588, decode.d5.loss_mask: 0.1935, decode.d5.loss_dice: 0.5240, decode.d6.loss_cls: 0.0562, decode.d6.loss_mask: 0.1932, decode.d6.loss_dice: 0.5251, decode.d7.loss_cls: 0.0585, decode.d7.loss_mask: 0.1928, decode.d7.loss_dice: 0.5208, decode.d8.loss_cls: 0.0625, decode.d8.loss_mask: 0.1931, decode.d8.loss_dice: 0.5225, loss: 8.0458 +2022-05-10 22:57:44,471 - mmseg - INFO - Iter [52900/80000] lr: 4.864e-07, eta: 21:50:35, time: 1.796, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0619, decode.loss_mask: 0.1922, decode.loss_dice: 0.5288, decode.d0.loss_cls: 0.3033, decode.d0.loss_mask: 0.1995, decode.d0.loss_dice: 0.5527, decode.d1.loss_cls: 0.0861, decode.d1.loss_mask: 0.1935, decode.d1.loss_dice: 0.5368, decode.d2.loss_cls: 0.0678, decode.d2.loss_mask: 0.1937, decode.d2.loss_dice: 0.5338, decode.d3.loss_cls: 0.0666, decode.d3.loss_mask: 0.1927, decode.d3.loss_dice: 0.5300, decode.d4.loss_cls: 0.0677, decode.d4.loss_mask: 0.1919, decode.d4.loss_dice: 0.5254, decode.d5.loss_cls: 0.0657, decode.d5.loss_mask: 0.1925, decode.d5.loss_dice: 0.5279, decode.d6.loss_cls: 0.0650, decode.d6.loss_mask: 0.1922, decode.d6.loss_dice: 0.5298, decode.d7.loss_cls: 0.0653, decode.d7.loss_mask: 0.1924, decode.d7.loss_dice: 0.5240, decode.d8.loss_cls: 0.0658, decode.d8.loss_mask: 0.1924, decode.d8.loss_dice: 0.5243, loss: 8.1616 +2022-05-10 22:59:14,774 - mmseg - INFO - Iter [52950/80000] lr: 4.855e-07, eta: 21:43:11, time: 1.806, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0582, decode.loss_mask: 0.1886, decode.loss_dice: 0.5301, decode.d0.loss_cls: 0.2916, decode.d0.loss_mask: 0.1972, decode.d0.loss_dice: 0.5551, decode.d1.loss_cls: 0.0708, decode.d1.loss_mask: 0.1908, decode.d1.loss_dice: 0.5401, decode.d2.loss_cls: 0.0641, decode.d2.loss_mask: 0.1898, decode.d2.loss_dice: 0.5351, decode.d3.loss_cls: 0.0637, decode.d3.loss_mask: 0.1888, decode.d3.loss_dice: 0.5317, decode.d4.loss_cls: 0.0618, decode.d4.loss_mask: 0.1887, decode.d4.loss_dice: 0.5349, decode.d5.loss_cls: 0.0647, decode.d5.loss_mask: 0.1894, decode.d5.loss_dice: 0.5302, decode.d6.loss_cls: 0.0586, decode.d6.loss_mask: 0.1886, decode.d6.loss_dice: 0.5281, decode.d7.loss_cls: 0.0540, decode.d7.loss_mask: 0.1888, decode.d7.loss_dice: 0.5299, decode.d8.loss_cls: 0.0557, decode.d8.loss_mask: 0.1888, decode.d8.loss_dice: 0.5295, loss: 8.0874 +2022-05-10 23:00:45,644 - mmseg - INFO - Saving checkpoint at 53000 iterations +2022-05-10 23:01:16,999 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 23:01:17,001 - mmseg - INFO - Iter [53000/80000] lr: 4.846e-07, eta: 21:38:46, time: 2.442, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0613, decode.loss_mask: 0.1910, decode.loss_dice: 0.5163, decode.d0.loss_cls: 0.2965, decode.d0.loss_mask: 0.1974, decode.d0.loss_dice: 0.5441, decode.d1.loss_cls: 0.0723, decode.d1.loss_mask: 0.1914, decode.d1.loss_dice: 0.5201, decode.d2.loss_cls: 0.0637, decode.d2.loss_mask: 0.1905, decode.d2.loss_dice: 0.5174, decode.d3.loss_cls: 0.0689, decode.d3.loss_mask: 0.1905, decode.d3.loss_dice: 0.5151, decode.d4.loss_cls: 0.0623, decode.d4.loss_mask: 0.1909, decode.d4.loss_dice: 0.5129, decode.d5.loss_cls: 0.0644, decode.d5.loss_mask: 0.1908, decode.d5.loss_dice: 0.5134, decode.d6.loss_cls: 0.0645, decode.d6.loss_mask: 0.1909, decode.d6.loss_dice: 0.5138, decode.d7.loss_cls: 0.0652, decode.d7.loss_mask: 0.1913, decode.d7.loss_dice: 0.5134, decode.d8.loss_cls: 0.0631, decode.d8.loss_mask: 0.1909, decode.d8.loss_dice: 0.5106, loss: 7.9746 +2022-05-10 23:03:14,709 - mmseg - INFO - per class results: +2022-05-10 23:03:14,715 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.62 | 99.24 | +| sidewalk | 88.85 | 94.14 | +| building | 94.43 | 97.1 | +| wall | 70.35 | 79.9 | +| fence | 74.02 | 81.85 | +| pole | 71.6 | 84.04 | +| traffic light | 77.07 | 86.6 | +| traffic sign | 83.98 | 90.49 | +| vegetation | 93.38 | 96.97 | +| terrain | 68.55 | 76.98 | +| sky | 95.84 | 98.41 | +| person | 86.79 | 93.47 | +| rider | 74.36 | 83.12 | +| car | 96.21 | 98.24 | +| truck | 80.81 | 94.41 | +| bus | 93.8 | 96.61 | +| train | 88.06 | 91.02 | +| motorcycle | 77.62 | 88.7 | +| bicycle | 82.72 | 91.67 | ++---------------+-------+-------+ +2022-05-10 23:03:14,715 - mmseg - INFO - Summary: +2022-05-10 23:03:14,715 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 97.03 | 84.06 | 90.68 | ++-------+-------+-------+ +2022-05-10 23:03:14,719 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 23:03:14,719 - mmseg - INFO - Iter(val) [32] aAcc: 0.9703, mIoU: 0.8406, mAcc: 0.9068, IoU.road: 0.9862, IoU.sidewalk: 0.8885, IoU.building: 0.9443, IoU.wall: 0.7035, IoU.fence: 0.7402, IoU.pole: 0.7160, IoU.traffic light: 0.7707, IoU.traffic sign: 0.8398, IoU.vegetation: 0.9338, IoU.terrain: 0.6855, IoU.sky: 0.9584, IoU.person: 0.8679, IoU.rider: 0.7436, IoU.car: 0.9621, IoU.truck: 0.8081, IoU.bus: 0.9380, IoU.train: 0.8806, IoU.motorcycle: 0.7762, IoU.bicycle: 0.8272, Acc.road: 0.9924, Acc.sidewalk: 0.9414, Acc.building: 0.9710, Acc.wall: 0.7990, Acc.fence: 0.8185, Acc.pole: 0.8404, Acc.traffic light: 0.8660, Acc.traffic sign: 0.9049, Acc.vegetation: 0.9697, Acc.terrain: 0.7698, Acc.sky: 0.9841, Acc.person: 0.9347, Acc.rider: 0.8312, Acc.car: 0.9824, Acc.truck: 0.9441, Acc.bus: 0.9661, Acc.train: 0.9102, Acc.motorcycle: 0.8870, Acc.bicycle: 0.9167 +2022-05-10 23:04:47,982 - mmseg - INFO - Iter [53050/80000] lr: 4.837e-07, eta: 21:42:17, time: 4.221, data_time: 2.420, memory: 69063, decode.loss_cls: 0.0555, decode.loss_mask: 0.1946, decode.loss_dice: 0.5233, decode.d0.loss_cls: 0.3086, decode.d0.loss_mask: 0.2013, decode.d0.loss_dice: 0.5357, decode.d1.loss_cls: 0.0648, decode.d1.loss_mask: 0.1975, decode.d1.loss_dice: 0.5270, decode.d2.loss_cls: 0.0612, decode.d2.loss_mask: 0.1962, decode.d2.loss_dice: 0.5287, decode.d3.loss_cls: 0.0513, decode.d3.loss_mask: 0.1948, decode.d3.loss_dice: 0.5207, decode.d4.loss_cls: 0.0529, decode.d4.loss_mask: 0.1954, decode.d4.loss_dice: 0.5214, decode.d5.loss_cls: 0.0519, decode.d5.loss_mask: 0.1951, decode.d5.loss_dice: 0.5259, decode.d6.loss_cls: 0.0539, decode.d6.loss_mask: 0.1946, decode.d6.loss_dice: 0.5194, decode.d7.loss_cls: 0.0535, decode.d7.loss_mask: 0.1948, decode.d7.loss_dice: 0.5211, decode.d8.loss_cls: 0.0504, decode.d8.loss_mask: 0.1953, decode.d8.loss_dice: 0.5187, loss: 8.0055 +2022-05-10 23:06:18,982 - mmseg - INFO - Iter [53100/80000] lr: 4.828e-07, eta: 21:35:08, time: 1.820, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0548, decode.loss_mask: 0.1953, decode.loss_dice: 0.5274, decode.d0.loss_cls: 0.3044, decode.d0.loss_mask: 0.2024, decode.d0.loss_dice: 0.5489, decode.d1.loss_cls: 0.0780, decode.d1.loss_mask: 0.1968, decode.d1.loss_dice: 0.5353, decode.d2.loss_cls: 0.0659, decode.d2.loss_mask: 0.1963, decode.d2.loss_dice: 0.5323, decode.d3.loss_cls: 0.0630, decode.d3.loss_mask: 0.1958, decode.d3.loss_dice: 0.5325, decode.d4.loss_cls: 0.0562, decode.d4.loss_mask: 0.1954, decode.d4.loss_dice: 0.5307, decode.d5.loss_cls: 0.0547, decode.d5.loss_mask: 0.1960, decode.d5.loss_dice: 0.5327, decode.d6.loss_cls: 0.0580, decode.d6.loss_mask: 0.1957, decode.d6.loss_dice: 0.5294, decode.d7.loss_cls: 0.0548, decode.d7.loss_mask: 0.1954, decode.d7.loss_dice: 0.5287, decode.d8.loss_cls: 0.0577, decode.d8.loss_mask: 0.1953, decode.d8.loss_dice: 0.5253, loss: 8.1350 +2022-05-10 23:07:50,052 - mmseg - INFO - Iter [53150/80000] lr: 4.819e-07, eta: 21:28:05, time: 1.822, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0556, decode.loss_mask: 0.1923, decode.loss_dice: 0.5228, decode.d0.loss_cls: 0.3052, decode.d0.loss_mask: 0.2004, decode.d0.loss_dice: 0.5471, decode.d1.loss_cls: 0.0712, decode.d1.loss_mask: 0.1945, decode.d1.loss_dice: 0.5345, decode.d2.loss_cls: 0.0638, decode.d2.loss_mask: 0.1937, decode.d2.loss_dice: 0.5306, decode.d3.loss_cls: 0.0543, decode.d3.loss_mask: 0.1930, decode.d3.loss_dice: 0.5285, decode.d4.loss_cls: 0.0560, decode.d4.loss_mask: 0.1927, decode.d4.loss_dice: 0.5273, decode.d5.loss_cls: 0.0609, decode.d5.loss_mask: 0.1929, decode.d5.loss_dice: 0.5274, decode.d6.loss_cls: 0.0586, decode.d6.loss_mask: 0.1929, decode.d6.loss_dice: 0.5251, decode.d7.loss_cls: 0.0546, decode.d7.loss_mask: 0.1927, decode.d7.loss_dice: 0.5248, decode.d8.loss_cls: 0.0524, decode.d8.loss_mask: 0.1919, decode.d8.loss_dice: 0.5244, loss: 8.0621 +2022-05-10 23:09:20,901 - mmseg - INFO - Iter [53200/80000] lr: 4.810e-07, eta: 21:21:08, time: 1.817, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0615, decode.loss_mask: 0.1910, decode.loss_dice: 0.5156, decode.d0.loss_cls: 0.3057, decode.d0.loss_mask: 0.1986, decode.d0.loss_dice: 0.5466, decode.d1.loss_cls: 0.0770, decode.d1.loss_mask: 0.1916, decode.d1.loss_dice: 0.5265, decode.d2.loss_cls: 0.0668, decode.d2.loss_mask: 0.1916, decode.d2.loss_dice: 0.5216, decode.d3.loss_cls: 0.0590, decode.d3.loss_mask: 0.1914, decode.d3.loss_dice: 0.5212, decode.d4.loss_cls: 0.0562, decode.d4.loss_mask: 0.1917, decode.d4.loss_dice: 0.5141, decode.d5.loss_cls: 0.0595, decode.d5.loss_mask: 0.1908, decode.d5.loss_dice: 0.5197, decode.d6.loss_cls: 0.0566, decode.d6.loss_mask: 0.1908, decode.d6.loss_dice: 0.5176, decode.d7.loss_cls: 0.0588, decode.d7.loss_mask: 0.1907, decode.d7.loss_dice: 0.5188, decode.d8.loss_cls: 0.0610, decode.d8.loss_mask: 0.1908, decode.d8.loss_dice: 0.5174, loss: 8.0003 +2022-05-10 23:10:53,162 - mmseg - INFO - Iter [53250/80000] lr: 4.801e-07, eta: 21:14:24, time: 1.845, data_time: 0.064, memory: 69063, decode.loss_cls: 0.0627, decode.loss_mask: 0.1925, decode.loss_dice: 0.5277, decode.d0.loss_cls: 0.3006, decode.d0.loss_mask: 0.1988, decode.d0.loss_dice: 0.5482, decode.d1.loss_cls: 0.0746, decode.d1.loss_mask: 0.1937, decode.d1.loss_dice: 0.5323, decode.d2.loss_cls: 0.0692, decode.d2.loss_mask: 0.1928, decode.d2.loss_dice: 0.5338, decode.d3.loss_cls: 0.0668, decode.d3.loss_mask: 0.1930, decode.d3.loss_dice: 0.5250, decode.d4.loss_cls: 0.0660, decode.d4.loss_mask: 0.1926, decode.d4.loss_dice: 0.5281, decode.d5.loss_cls: 0.0593, decode.d5.loss_mask: 0.1920, decode.d5.loss_dice: 0.5303, decode.d6.loss_cls: 0.0595, decode.d6.loss_mask: 0.1925, decode.d6.loss_dice: 0.5283, decode.d7.loss_cls: 0.0593, decode.d7.loss_mask: 0.1922, decode.d7.loss_dice: 0.5255, decode.d8.loss_cls: 0.0614, decode.d8.loss_mask: 0.1924, decode.d8.loss_dice: 0.5275, loss: 8.1186 +2022-05-10 23:12:25,676 - mmseg - INFO - Iter [53300/80000] lr: 4.792e-07, eta: 21:07:47, time: 1.850, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0599, decode.loss_mask: 0.1950, decode.loss_dice: 0.5319, decode.d0.loss_cls: 0.3145, decode.d0.loss_mask: 0.2027, decode.d0.loss_dice: 0.5585, decode.d1.loss_cls: 0.0823, decode.d1.loss_mask: 0.1959, decode.d1.loss_dice: 0.5371, decode.d2.loss_cls: 0.0706, decode.d2.loss_mask: 0.1957, decode.d2.loss_dice: 0.5358, decode.d3.loss_cls: 0.0660, decode.d3.loss_mask: 0.1959, decode.d3.loss_dice: 0.5337, decode.d4.loss_cls: 0.0654, decode.d4.loss_mask: 0.1958, decode.d4.loss_dice: 0.5332, decode.d5.loss_cls: 0.0666, decode.d5.loss_mask: 0.1956, decode.d5.loss_dice: 0.5336, decode.d6.loss_cls: 0.0635, decode.d6.loss_mask: 0.1953, decode.d6.loss_dice: 0.5295, decode.d7.loss_cls: 0.0643, decode.d7.loss_mask: 0.1947, decode.d7.loss_dice: 0.5304, decode.d8.loss_cls: 0.0596, decode.d8.loss_mask: 0.1952, decode.d8.loss_dice: 0.5326, loss: 8.2307 +2022-05-10 23:13:55,918 - mmseg - INFO - Iter [53350/80000] lr: 4.783e-07, eta: 21:01:05, time: 1.805, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0589, decode.loss_mask: 0.1895, decode.loss_dice: 0.5284, decode.d0.loss_cls: 0.2959, decode.d0.loss_mask: 0.1963, decode.d0.loss_dice: 0.5501, decode.d1.loss_cls: 0.0709, decode.d1.loss_mask: 0.1915, decode.d1.loss_dice: 0.5358, decode.d2.loss_cls: 0.0629, decode.d2.loss_mask: 0.1902, decode.d2.loss_dice: 0.5337, decode.d3.loss_cls: 0.0600, decode.d3.loss_mask: 0.1906, decode.d3.loss_dice: 0.5298, decode.d4.loss_cls: 0.0552, decode.d4.loss_mask: 0.1903, decode.d4.loss_dice: 0.5296, decode.d5.loss_cls: 0.0561, decode.d5.loss_mask: 0.1904, decode.d5.loss_dice: 0.5275, decode.d6.loss_cls: 0.0606, decode.d6.loss_mask: 0.1901, decode.d6.loss_dice: 0.5284, decode.d7.loss_cls: 0.0581, decode.d7.loss_mask: 0.1899, decode.d7.loss_dice: 0.5272, decode.d8.loss_cls: 0.0543, decode.d8.loss_mask: 0.1895, decode.d8.loss_dice: 0.5272, loss: 8.0590 +2022-05-10 23:15:30,627 - mmseg - INFO - Iter [53400/80000] lr: 4.774e-07, eta: 20:54:50, time: 1.894, data_time: 0.064, memory: 69063, decode.loss_cls: 0.0537, decode.loss_mask: 0.1944, decode.loss_dice: 0.5169, decode.d0.loss_cls: 0.2970, decode.d0.loss_mask: 0.2023, decode.d0.loss_dice: 0.5404, decode.d1.loss_cls: 0.0764, decode.d1.loss_mask: 0.1958, decode.d1.loss_dice: 0.5281, decode.d2.loss_cls: 0.0693, decode.d2.loss_mask: 0.1943, decode.d2.loss_dice: 0.5209, decode.d3.loss_cls: 0.0583, decode.d3.loss_mask: 0.1944, decode.d3.loss_dice: 0.5188, decode.d4.loss_cls: 0.0634, decode.d4.loss_mask: 0.1945, decode.d4.loss_dice: 0.5187, decode.d5.loss_cls: 0.0595, decode.d5.loss_mask: 0.1951, decode.d5.loss_dice: 0.5203, decode.d6.loss_cls: 0.0604, decode.d6.loss_mask: 0.1938, decode.d6.loss_dice: 0.5150, decode.d7.loss_cls: 0.0604, decode.d7.loss_mask: 0.1946, decode.d7.loss_dice: 0.5186, decode.d8.loss_cls: 0.0593, decode.d8.loss_mask: 0.1943, decode.d8.loss_dice: 0.5150, loss: 8.0238 +2022-05-10 23:17:01,972 - mmseg - INFO - Iter [53450/80000] lr: 4.765e-07, eta: 20:48:24, time: 1.827, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0464, decode.loss_mask: 0.1927, decode.loss_dice: 0.5134, decode.d0.loss_cls: 0.2861, decode.d0.loss_mask: 0.1996, decode.d0.loss_dice: 0.5378, decode.d1.loss_cls: 0.0590, decode.d1.loss_mask: 0.1946, decode.d1.loss_dice: 0.5213, decode.d2.loss_cls: 0.0565, decode.d2.loss_mask: 0.1937, decode.d2.loss_dice: 0.5184, decode.d3.loss_cls: 0.0486, decode.d3.loss_mask: 0.1933, decode.d3.loss_dice: 0.5130, decode.d4.loss_cls: 0.0508, decode.d4.loss_mask: 0.1935, decode.d4.loss_dice: 0.5185, decode.d5.loss_cls: 0.0498, decode.d5.loss_mask: 0.1934, decode.d5.loss_dice: 0.5165, decode.d6.loss_cls: 0.0482, decode.d6.loss_mask: 0.1932, decode.d6.loss_dice: 0.5186, decode.d7.loss_cls: 0.0517, decode.d7.loss_mask: 0.1929, decode.d7.loss_dice: 0.5165, decode.d8.loss_cls: 0.0472, decode.d8.loss_mask: 0.1929, decode.d8.loss_dice: 0.5141, loss: 7.8723 +2022-05-10 23:18:32,946 - mmseg - INFO - Iter [53500/80000] lr: 4.756e-07, eta: 20:42:02, time: 1.819, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0576, decode.loss_mask: 0.1937, decode.loss_dice: 0.5218, decode.d0.loss_cls: 0.3009, decode.d0.loss_mask: 0.2021, decode.d0.loss_dice: 0.5419, decode.d1.loss_cls: 0.0754, decode.d1.loss_mask: 0.1951, decode.d1.loss_dice: 0.5263, decode.d2.loss_cls: 0.0655, decode.d2.loss_mask: 0.1950, decode.d2.loss_dice: 0.5240, decode.d3.loss_cls: 0.0589, decode.d3.loss_mask: 0.1943, decode.d3.loss_dice: 0.5209, decode.d4.loss_cls: 0.0576, decode.d4.loss_mask: 0.1941, decode.d4.loss_dice: 0.5193, decode.d5.loss_cls: 0.0564, decode.d5.loss_mask: 0.1943, decode.d5.loss_dice: 0.5218, decode.d6.loss_cls: 0.0575, decode.d6.loss_mask: 0.1943, decode.d6.loss_dice: 0.5167, decode.d7.loss_cls: 0.0540, decode.d7.loss_mask: 0.1943, decode.d7.loss_dice: 0.5179, decode.d8.loss_cls: 0.0566, decode.d8.loss_mask: 0.1939, decode.d8.loss_dice: 0.5180, loss: 8.0204 +2022-05-10 23:20:03,711 - mmseg - INFO - Iter [53550/80000] lr: 4.747e-07, eta: 20:35:43, time: 1.815, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0534, decode.loss_mask: 0.1911, decode.loss_dice: 0.5132, decode.d0.loss_cls: 0.2981, decode.d0.loss_mask: 0.1990, decode.d0.loss_dice: 0.5394, decode.d1.loss_cls: 0.0763, decode.d1.loss_mask: 0.1928, decode.d1.loss_dice: 0.5259, decode.d2.loss_cls: 0.0651, decode.d2.loss_mask: 0.1919, decode.d2.loss_dice: 0.5162, decode.d3.loss_cls: 0.0598, decode.d3.loss_mask: 0.1914, decode.d3.loss_dice: 0.5159, decode.d4.loss_cls: 0.0548, decode.d4.loss_mask: 0.1911, decode.d4.loss_dice: 0.5158, decode.d5.loss_cls: 0.0537, decode.d5.loss_mask: 0.1913, decode.d5.loss_dice: 0.5165, decode.d6.loss_cls: 0.0537, decode.d6.loss_mask: 0.1910, decode.d6.loss_dice: 0.5144, decode.d7.loss_cls: 0.0552, decode.d7.loss_mask: 0.1912, decode.d7.loss_dice: 0.5168, decode.d8.loss_cls: 0.0529, decode.d8.loss_mask: 0.1915, decode.d8.loss_dice: 0.5176, loss: 7.9369 +2022-05-10 23:21:37,302 - mmseg - INFO - Iter [53600/80000] lr: 4.738e-07, eta: 20:29:44, time: 1.872, data_time: 0.065, memory: 69063, decode.loss_cls: 0.0588, decode.loss_mask: 0.1930, decode.loss_dice: 0.5258, decode.d0.loss_cls: 0.3038, decode.d0.loss_mask: 0.2017, decode.d0.loss_dice: 0.5438, decode.d1.loss_cls: 0.0710, decode.d1.loss_mask: 0.1951, decode.d1.loss_dice: 0.5315, decode.d2.loss_cls: 0.0655, decode.d2.loss_mask: 0.1941, decode.d2.loss_dice: 0.5277, decode.d3.loss_cls: 0.0608, decode.d3.loss_mask: 0.1938, decode.d3.loss_dice: 0.5227, decode.d4.loss_cls: 0.0619, decode.d4.loss_mask: 0.1935, decode.d4.loss_dice: 0.5226, decode.d5.loss_cls: 0.0614, decode.d5.loss_mask: 0.1931, decode.d5.loss_dice: 0.5278, decode.d6.loss_cls: 0.0610, decode.d6.loss_mask: 0.1932, decode.d6.loss_dice: 0.5238, decode.d7.loss_cls: 0.0605, decode.d7.loss_mask: 0.1931, decode.d7.loss_dice: 0.5241, decode.d8.loss_cls: 0.0585, decode.d8.loss_mask: 0.1933, decode.d8.loss_dice: 0.5232, loss: 8.0799 +2022-05-10 23:23:07,864 - mmseg - INFO - Iter [53650/80000] lr: 4.729e-07, eta: 20:23:35, time: 1.811, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0693, decode.loss_mask: 0.1880, decode.loss_dice: 0.5299, decode.d0.loss_cls: 0.3036, decode.d0.loss_mask: 0.1960, decode.d0.loss_dice: 0.5589, decode.d1.loss_cls: 0.0898, decode.d1.loss_mask: 0.1891, decode.d1.loss_dice: 0.5466, decode.d2.loss_cls: 0.0728, decode.d2.loss_mask: 0.1886, decode.d2.loss_dice: 0.5385, decode.d3.loss_cls: 0.0680, decode.d3.loss_mask: 0.1886, decode.d3.loss_dice: 0.5363, decode.d4.loss_cls: 0.0683, decode.d4.loss_mask: 0.1880, decode.d4.loss_dice: 0.5323, decode.d5.loss_cls: 0.0716, decode.d5.loss_mask: 0.1879, decode.d5.loss_dice: 0.5301, decode.d6.loss_cls: 0.0681, decode.d6.loss_mask: 0.1879, decode.d6.loss_dice: 0.5300, decode.d7.loss_cls: 0.0676, decode.d7.loss_mask: 0.1879, decode.d7.loss_dice: 0.5278, decode.d8.loss_cls: 0.0657, decode.d8.loss_mask: 0.1884, decode.d8.loss_dice: 0.5301, loss: 8.1957 +2022-05-10 23:24:38,960 - mmseg - INFO - Iter [53700/80000] lr: 4.720e-07, eta: 20:17:33, time: 1.822, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0565, decode.loss_mask: 0.1930, decode.loss_dice: 0.5245, decode.d0.loss_cls: 0.2895, decode.d0.loss_mask: 0.2012, decode.d0.loss_dice: 0.5477, decode.d1.loss_cls: 0.0708, decode.d1.loss_mask: 0.1939, decode.d1.loss_dice: 0.5339, decode.d2.loss_cls: 0.0611, decode.d2.loss_mask: 0.1939, decode.d2.loss_dice: 0.5281, decode.d3.loss_cls: 0.0598, decode.d3.loss_mask: 0.1934, decode.d3.loss_dice: 0.5232, decode.d4.loss_cls: 0.0584, decode.d4.loss_mask: 0.1929, decode.d4.loss_dice: 0.5251, decode.d5.loss_cls: 0.0625, decode.d5.loss_mask: 0.1931, decode.d5.loss_dice: 0.5237, decode.d6.loss_cls: 0.0581, decode.d6.loss_mask: 0.1929, decode.d6.loss_dice: 0.5248, decode.d7.loss_cls: 0.0584, decode.d7.loss_mask: 0.1928, decode.d7.loss_dice: 0.5247, decode.d8.loss_cls: 0.0594, decode.d8.loss_mask: 0.1930, decode.d8.loss_dice: 0.5226, loss: 8.0528 +2022-05-10 23:26:10,446 - mmseg - INFO - Iter [53750/80000] lr: 4.711e-07, eta: 20:11:38, time: 1.829, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0569, decode.loss_mask: 0.1920, decode.loss_dice: 0.5106, decode.d0.loss_cls: 0.3068, decode.d0.loss_mask: 0.1995, decode.d0.loss_dice: 0.5353, decode.d1.loss_cls: 0.0767, decode.d1.loss_mask: 0.1930, decode.d1.loss_dice: 0.5225, decode.d2.loss_cls: 0.0716, decode.d2.loss_mask: 0.1921, decode.d2.loss_dice: 0.5145, decode.d3.loss_cls: 0.0661, decode.d3.loss_mask: 0.1912, decode.d3.loss_dice: 0.5102, decode.d4.loss_cls: 0.0607, decode.d4.loss_mask: 0.1922, decode.d4.loss_dice: 0.5132, decode.d5.loss_cls: 0.0641, decode.d5.loss_mask: 0.1920, decode.d5.loss_dice: 0.5141, decode.d6.loss_cls: 0.0604, decode.d6.loss_mask: 0.1918, decode.d6.loss_dice: 0.5091, decode.d7.loss_cls: 0.0611, decode.d7.loss_mask: 0.1917, decode.d7.loss_dice: 0.5118, decode.d8.loss_cls: 0.0550, decode.d8.loss_mask: 0.1923, decode.d8.loss_dice: 0.5142, loss: 7.9627 +2022-05-10 23:27:43,216 - mmseg - INFO - Iter [53800/80000] lr: 4.702e-07, eta: 20:05:53, time: 1.856, data_time: 0.067, memory: 69063, decode.loss_cls: 0.0639, decode.loss_mask: 0.1951, decode.loss_dice: 0.5247, decode.d0.loss_cls: 0.2971, decode.d0.loss_mask: 0.2030, decode.d0.loss_dice: 0.5482, decode.d1.loss_cls: 0.0848, decode.d1.loss_mask: 0.1972, decode.d1.loss_dice: 0.5366, decode.d2.loss_cls: 0.0720, decode.d2.loss_mask: 0.1955, decode.d2.loss_dice: 0.5277, decode.d3.loss_cls: 0.0706, decode.d3.loss_mask: 0.1950, decode.d3.loss_dice: 0.5223, decode.d4.loss_cls: 0.0719, decode.d4.loss_mask: 0.1953, decode.d4.loss_dice: 0.5231, decode.d5.loss_cls: 0.0635, decode.d5.loss_mask: 0.1952, decode.d5.loss_dice: 0.5230, decode.d6.loss_cls: 0.0626, decode.d6.loss_mask: 0.1948, decode.d6.loss_dice: 0.5228, decode.d7.loss_cls: 0.0622, decode.d7.loss_mask: 0.1951, decode.d7.loss_dice: 0.5208, decode.d8.loss_cls: 0.0652, decode.d8.loss_mask: 0.1948, decode.d8.loss_dice: 0.5206, loss: 8.1446 +2022-05-10 23:29:12,514 - mmseg - INFO - Iter [53850/80000] lr: 4.693e-07, eta: 19:59:57, time: 1.786, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0596, decode.loss_mask: 0.1945, decode.loss_dice: 0.5268, decode.d0.loss_cls: 0.3047, decode.d0.loss_mask: 0.2032, decode.d0.loss_dice: 0.5452, decode.d1.loss_cls: 0.0868, decode.d1.loss_mask: 0.1954, decode.d1.loss_dice: 0.5293, decode.d2.loss_cls: 0.0707, decode.d2.loss_mask: 0.1946, decode.d2.loss_dice: 0.5236, decode.d3.loss_cls: 0.0661, decode.d3.loss_mask: 0.1937, decode.d3.loss_dice: 0.5245, decode.d4.loss_cls: 0.0657, decode.d4.loss_mask: 0.1957, decode.d4.loss_dice: 0.5254, decode.d5.loss_cls: 0.0665, decode.d5.loss_mask: 0.1946, decode.d5.loss_dice: 0.5258, decode.d6.loss_cls: 0.0685, decode.d6.loss_mask: 0.1949, decode.d6.loss_dice: 0.5237, decode.d7.loss_cls: 0.0626, decode.d7.loss_mask: 0.1949, decode.d7.loss_dice: 0.5218, decode.d8.loss_cls: 0.0620, decode.d8.loss_mask: 0.1948, decode.d8.loss_dice: 0.5226, loss: 8.1380 +2022-05-10 23:30:43,137 - mmseg - INFO - Iter [53900/80000] lr: 4.684e-07, eta: 19:54:11, time: 1.812, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0591, decode.loss_mask: 0.1894, decode.loss_dice: 0.5205, decode.d0.loss_cls: 0.3019, decode.d0.loss_mask: 0.1968, decode.d0.loss_dice: 0.5472, decode.d1.loss_cls: 0.0702, decode.d1.loss_mask: 0.1909, decode.d1.loss_dice: 0.5301, decode.d2.loss_cls: 0.0618, decode.d2.loss_mask: 0.1901, decode.d2.loss_dice: 0.5281, decode.d3.loss_cls: 0.0637, decode.d3.loss_mask: 0.1892, decode.d3.loss_dice: 0.5234, decode.d4.loss_cls: 0.0638, decode.d4.loss_mask: 0.1898, decode.d4.loss_dice: 0.5266, decode.d5.loss_cls: 0.0585, decode.d5.loss_mask: 0.1895, decode.d5.loss_dice: 0.5254, decode.d6.loss_cls: 0.0546, decode.d6.loss_mask: 0.1895, decode.d6.loss_dice: 0.5236, decode.d7.loss_cls: 0.0516, decode.d7.loss_mask: 0.1898, decode.d7.loss_dice: 0.5189, decode.d8.loss_cls: 0.0539, decode.d8.loss_mask: 0.1894, decode.d8.loss_dice: 0.5220, loss: 8.0091 +2022-05-10 23:32:14,324 - mmseg - INFO - Iter [53950/80000] lr: 4.675e-07, eta: 19:48:32, time: 1.823, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0632, decode.loss_mask: 0.1857, decode.loss_dice: 0.5260, decode.d0.loss_cls: 0.2986, decode.d0.loss_mask: 0.1932, decode.d0.loss_dice: 0.5575, decode.d1.loss_cls: 0.0886, decode.d1.loss_mask: 0.1871, decode.d1.loss_dice: 0.5369, decode.d2.loss_cls: 0.0723, decode.d2.loss_mask: 0.1866, decode.d2.loss_dice: 0.5346, decode.d3.loss_cls: 0.0664, decode.d3.loss_mask: 0.1864, decode.d3.loss_dice: 0.5285, decode.d4.loss_cls: 0.0644, decode.d4.loss_mask: 0.1859, decode.d4.loss_dice: 0.5328, decode.d5.loss_cls: 0.0685, decode.d5.loss_mask: 0.1856, decode.d5.loss_dice: 0.5280, decode.d6.loss_cls: 0.0673, decode.d6.loss_mask: 0.1856, decode.d6.loss_dice: 0.5275, decode.d7.loss_cls: 0.0652, decode.d7.loss_mask: 0.1858, decode.d7.loss_dice: 0.5271, decode.d8.loss_cls: 0.0667, decode.d8.loss_mask: 0.1858, decode.d8.loss_dice: 0.5317, loss: 8.1196 +2022-05-10 23:33:45,868 - mmseg - INFO - Saving checkpoint at 54000 iterations +2022-05-10 23:34:22,636 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 23:34:22,643 - mmseg - INFO - Iter [54000/80000] lr: 4.667e-07, eta: 19:45:38, time: 2.564, data_time: 0.063, memory: 69063, decode.loss_cls: 0.0564, decode.loss_mask: 0.1988, decode.loss_dice: 0.5230, decode.d0.loss_cls: 0.3096, decode.d0.loss_mask: 0.2080, decode.d0.loss_dice: 0.5462, decode.d1.loss_cls: 0.0764, decode.d1.loss_mask: 0.2005, decode.d1.loss_dice: 0.5366, decode.d2.loss_cls: 0.0704, decode.d2.loss_mask: 0.1993, decode.d2.loss_dice: 0.5284, decode.d3.loss_cls: 0.0674, decode.d3.loss_mask: 0.1995, decode.d3.loss_dice: 0.5278, decode.d4.loss_cls: 0.0632, decode.d4.loss_mask: 0.1989, decode.d4.loss_dice: 0.5279, decode.d5.loss_cls: 0.0606, decode.d5.loss_mask: 0.1984, decode.d5.loss_dice: 0.5229, decode.d6.loss_cls: 0.0592, decode.d6.loss_mask: 0.1980, decode.d6.loss_dice: 0.5243, decode.d7.loss_cls: 0.0594, decode.d7.loss_mask: 0.1981, decode.d7.loss_dice: 0.5266, decode.d8.loss_cls: 0.0628, decode.d8.loss_mask: 0.1984, decode.d8.loss_dice: 0.5239, loss: 8.1710 +2022-05-10 23:36:18,175 - mmseg - INFO - per class results: +2022-05-10 23:36:18,185 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.59 | 99.27 | +| sidewalk | 88.57 | 93.76 | +| building | 94.36 | 97.17 | +| wall | 69.18 | 79.71 | +| fence | 75.37 | 82.97 | +| pole | 71.61 | 84.22 | +| traffic light | 77.39 | 87.39 | +| traffic sign | 83.75 | 90.56 | +| vegetation | 93.49 | 96.83 | +| terrain | 68.43 | 75.97 | +| sky | 95.9 | 98.35 | +| person | 86.82 | 93.65 | +| rider | 74.13 | 85.82 | +| car | 96.09 | 98.34 | +| truck | 82.67 | 94.51 | +| bus | 93.72 | 96.56 | +| train | 88.16 | 90.94 | +| motorcycle | 77.4 | 87.67 | +| bicycle | 82.8 | 90.91 | ++---------------+-------+-------+ +2022-05-10 23:36:18,186 - mmseg - INFO - Summary: +2022-05-10 23:36:18,186 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 97.02 | 84.13 | 90.77 | ++-------+-------+-------+ +2022-05-10 23:36:18,190 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-10 23:36:18,190 - mmseg - INFO - Iter(val) [32] aAcc: 0.9702, mIoU: 0.8413, mAcc: 0.9077, IoU.road: 0.9859, IoU.sidewalk: 0.8857, IoU.building: 0.9436, IoU.wall: 0.6918, IoU.fence: 0.7537, IoU.pole: 0.7161, IoU.traffic light: 0.7739, IoU.traffic sign: 0.8375, IoU.vegetation: 0.9349, IoU.terrain: 0.6843, IoU.sky: 0.9590, IoU.person: 0.8682, IoU.rider: 0.7413, IoU.car: 0.9609, IoU.truck: 0.8267, IoU.bus: 0.9372, IoU.train: 0.8816, IoU.motorcycle: 0.7740, IoU.bicycle: 0.8280, Acc.road: 0.9927, Acc.sidewalk: 0.9376, Acc.building: 0.9717, Acc.wall: 0.7971, Acc.fence: 0.8297, Acc.pole: 0.8422, Acc.traffic light: 0.8739, Acc.traffic sign: 0.9056, Acc.vegetation: 0.9683, Acc.terrain: 0.7597, Acc.sky: 0.9835, Acc.person: 0.9365, Acc.rider: 0.8582, Acc.car: 0.9834, Acc.truck: 0.9451, Acc.bus: 0.9656, Acc.train: 0.9094, Acc.motorcycle: 0.8767, Acc.bicycle: 0.9091 +2022-05-10 23:37:49,163 - mmseg - INFO - Iter [54050/80000] lr: 4.658e-07, eta: 19:48:20, time: 4.133, data_time: 2.331, memory: 69063, decode.loss_cls: 0.0560, decode.loss_mask: 0.1905, decode.loss_dice: 0.5326, decode.d0.loss_cls: 0.2958, decode.d0.loss_mask: 0.1972, decode.d0.loss_dice: 0.5652, decode.d1.loss_cls: 0.0744, decode.d1.loss_mask: 0.1916, decode.d1.loss_dice: 0.5466, decode.d2.loss_cls: 0.0656, decode.d2.loss_mask: 0.1910, decode.d2.loss_dice: 0.5421, decode.d3.loss_cls: 0.0575, decode.d3.loss_mask: 0.1900, decode.d3.loss_dice: 0.5339, decode.d4.loss_cls: 0.0603, decode.d4.loss_mask: 0.1908, decode.d4.loss_dice: 0.5382, decode.d5.loss_cls: 0.0612, decode.d5.loss_mask: 0.1908, decode.d5.loss_dice: 0.5403, decode.d6.loss_cls: 0.0535, decode.d6.loss_mask: 0.1905, decode.d6.loss_dice: 0.5361, decode.d7.loss_cls: 0.0598, decode.d7.loss_mask: 0.1909, decode.d7.loss_dice: 0.5384, decode.d8.loss_cls: 0.0561, decode.d8.loss_mask: 0.1907, decode.d8.loss_dice: 0.5387, loss: 8.1660 +2022-05-10 23:39:20,326 - mmseg - INFO - Iter [54100/80000] lr: 4.649e-07, eta: 19:42:47, time: 1.823, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0708, decode.loss_mask: 0.1878, decode.loss_dice: 0.5267, decode.d0.loss_cls: 0.3045, decode.d0.loss_mask: 0.1949, decode.d0.loss_dice: 0.5486, decode.d1.loss_cls: 0.0829, decode.d1.loss_mask: 0.1887, decode.d1.loss_dice: 0.5318, decode.d2.loss_cls: 0.0803, decode.d2.loss_mask: 0.1880, decode.d2.loss_dice: 0.5253, decode.d3.loss_cls: 0.0795, decode.d3.loss_mask: 0.1884, decode.d3.loss_dice: 0.5221, decode.d4.loss_cls: 0.0751, decode.d4.loss_mask: 0.1888, decode.d4.loss_dice: 0.5263, decode.d5.loss_cls: 0.0724, decode.d5.loss_mask: 0.1884, decode.d5.loss_dice: 0.5240, decode.d6.loss_cls: 0.0698, decode.d6.loss_mask: 0.1878, decode.d6.loss_dice: 0.5230, decode.d7.loss_cls: 0.0693, decode.d7.loss_mask: 0.1886, decode.d7.loss_dice: 0.5225, decode.d8.loss_cls: 0.0706, decode.d8.loss_mask: 0.1878, decode.d8.loss_dice: 0.5206, loss: 8.1352 +2022-05-10 23:40:52,642 - mmseg - INFO - Iter [54150/80000] lr: 4.640e-07, eta: 19:37:22, time: 1.846, data_time: 0.065, memory: 69063, decode.loss_cls: 0.0602, decode.loss_mask: 0.1904, decode.loss_dice: 0.5355, decode.d0.loss_cls: 0.3049, decode.d0.loss_mask: 0.1980, decode.d0.loss_dice: 0.5596, decode.d1.loss_cls: 0.0910, decode.d1.loss_mask: 0.1923, decode.d1.loss_dice: 0.5429, decode.d2.loss_cls: 0.0749, decode.d2.loss_mask: 0.1916, decode.d2.loss_dice: 0.5388, decode.d3.loss_cls: 0.0665, decode.d3.loss_mask: 0.1914, decode.d3.loss_dice: 0.5366, decode.d4.loss_cls: 0.0681, decode.d4.loss_mask: 0.1912, decode.d4.loss_dice: 0.5385, decode.d5.loss_cls: 0.0693, decode.d5.loss_mask: 0.1915, decode.d5.loss_dice: 0.5349, decode.d6.loss_cls: 0.0654, decode.d6.loss_mask: 0.1910, decode.d6.loss_dice: 0.5321, decode.d7.loss_cls: 0.0644, decode.d7.loss_mask: 0.1908, decode.d7.loss_dice: 0.5347, decode.d8.loss_cls: 0.0671, decode.d8.loss_mask: 0.1905, decode.d8.loss_dice: 0.5366, loss: 8.2405 +2022-05-10 23:42:20,986 - mmseg - INFO - Iter [54200/80000] lr: 4.631e-07, eta: 19:31:44, time: 1.767, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0623, decode.loss_mask: 0.1917, decode.loss_dice: 0.5161, decode.d0.loss_cls: 0.2978, decode.d0.loss_mask: 0.1998, decode.d0.loss_dice: 0.5385, decode.d1.loss_cls: 0.0776, decode.d1.loss_mask: 0.1934, decode.d1.loss_dice: 0.5302, decode.d2.loss_cls: 0.0692, decode.d2.loss_mask: 0.1927, decode.d2.loss_dice: 0.5210, decode.d3.loss_cls: 0.0648, decode.d3.loss_mask: 0.1930, decode.d3.loss_dice: 0.5200, decode.d4.loss_cls: 0.0605, decode.d4.loss_mask: 0.1924, decode.d4.loss_dice: 0.5221, decode.d5.loss_cls: 0.0620, decode.d5.loss_mask: 0.1921, decode.d5.loss_dice: 0.5225, decode.d6.loss_cls: 0.0518, decode.d6.loss_mask: 0.1917, decode.d6.loss_dice: 0.5185, decode.d7.loss_cls: 0.0566, decode.d7.loss_mask: 0.1920, decode.d7.loss_dice: 0.5227, decode.d8.loss_cls: 0.0604, decode.d8.loss_mask: 0.1915, decode.d8.loss_dice: 0.5204, loss: 8.0252 +2022-05-10 23:43:50,859 - mmseg - INFO - Iter [54250/80000] lr: 4.622e-07, eta: 19:26:17, time: 1.797, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0568, decode.loss_mask: 0.1972, decode.loss_dice: 0.5264, decode.d0.loss_cls: 0.2935, decode.d0.loss_mask: 0.2043, decode.d0.loss_dice: 0.5519, decode.d1.loss_cls: 0.0644, decode.d1.loss_mask: 0.1975, decode.d1.loss_dice: 0.5372, decode.d2.loss_cls: 0.0661, decode.d2.loss_mask: 0.1970, decode.d2.loss_dice: 0.5323, decode.d3.loss_cls: 0.0642, decode.d3.loss_mask: 0.1971, decode.d3.loss_dice: 0.5295, decode.d4.loss_cls: 0.0635, decode.d4.loss_mask: 0.1972, decode.d4.loss_dice: 0.5317, decode.d5.loss_cls: 0.0634, decode.d5.loss_mask: 0.1968, decode.d5.loss_dice: 0.5283, decode.d6.loss_cls: 0.0646, decode.d6.loss_mask: 0.1971, decode.d6.loss_dice: 0.5304, decode.d7.loss_cls: 0.0600, decode.d7.loss_mask: 0.1972, decode.d7.loss_dice: 0.5262, decode.d8.loss_cls: 0.0532, decode.d8.loss_mask: 0.1970, decode.d8.loss_dice: 0.5270, loss: 8.1488 +2022-05-10 23:45:21,554 - mmseg - INFO - Iter [54300/80000] lr: 4.613e-07, eta: 19:20:57, time: 1.813, data_time: 0.021, memory: 69063, decode.loss_cls: 0.0615, decode.loss_mask: 0.1874, decode.loss_dice: 0.5193, decode.d0.loss_cls: 0.2941, decode.d0.loss_mask: 0.1964, decode.d0.loss_dice: 0.5478, decode.d1.loss_cls: 0.0829, decode.d1.loss_mask: 0.1891, decode.d1.loss_dice: 0.5304, decode.d2.loss_cls: 0.0680, decode.d2.loss_mask: 0.1876, decode.d2.loss_dice: 0.5290, decode.d3.loss_cls: 0.0672, decode.d3.loss_mask: 0.1871, decode.d3.loss_dice: 0.5193, decode.d4.loss_cls: 0.0648, decode.d4.loss_mask: 0.1877, decode.d4.loss_dice: 0.5210, decode.d5.loss_cls: 0.0589, decode.d5.loss_mask: 0.1873, decode.d5.loss_dice: 0.5248, decode.d6.loss_cls: 0.0587, decode.d6.loss_mask: 0.1872, decode.d6.loss_dice: 0.5224, decode.d7.loss_cls: 0.0597, decode.d7.loss_mask: 0.1873, decode.d7.loss_dice: 0.5188, decode.d8.loss_cls: 0.0602, decode.d8.loss_mask: 0.1874, decode.d8.loss_dice: 0.5236, loss: 8.0167 +2022-05-10 23:46:53,287 - mmseg - INFO - Iter [54350/80000] lr: 4.604e-07, eta: 19:15:44, time: 1.835, data_time: 0.066, memory: 69063, decode.loss_cls: 0.0542, decode.loss_mask: 0.1928, decode.loss_dice: 0.5256, decode.d0.loss_cls: 0.2975, decode.d0.loss_mask: 0.2010, decode.d0.loss_dice: 0.5458, decode.d1.loss_cls: 0.0754, decode.d1.loss_mask: 0.1953, decode.d1.loss_dice: 0.5322, decode.d2.loss_cls: 0.0654, decode.d2.loss_mask: 0.1942, decode.d2.loss_dice: 0.5273, decode.d3.loss_cls: 0.0549, decode.d3.loss_mask: 0.1934, decode.d3.loss_dice: 0.5288, decode.d4.loss_cls: 0.0603, decode.d4.loss_mask: 0.1943, decode.d4.loss_dice: 0.5258, decode.d5.loss_cls: 0.0633, decode.d5.loss_mask: 0.1938, decode.d5.loss_dice: 0.5239, decode.d6.loss_cls: 0.0600, decode.d6.loss_mask: 0.1924, decode.d6.loss_dice: 0.5229, decode.d7.loss_cls: 0.0569, decode.d7.loss_mask: 0.1929, decode.d7.loss_dice: 0.5249, decode.d8.loss_cls: 0.0530, decode.d8.loss_mask: 0.1930, decode.d8.loss_dice: 0.5231, loss: 8.0644 +2022-05-10 23:48:22,757 - mmseg - INFO - Iter [54400/80000] lr: 4.595e-07, eta: 19:10:26, time: 1.789, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0511, decode.loss_mask: 0.1893, decode.loss_dice: 0.5135, decode.d0.loss_cls: 0.2873, decode.d0.loss_mask: 0.1976, decode.d0.loss_dice: 0.5325, decode.d1.loss_cls: 0.0701, decode.d1.loss_mask: 0.1903, decode.d1.loss_dice: 0.5221, decode.d2.loss_cls: 0.0589, decode.d2.loss_mask: 0.1895, decode.d2.loss_dice: 0.5164, decode.d3.loss_cls: 0.0601, decode.d3.loss_mask: 0.1879, decode.d3.loss_dice: 0.5156, decode.d4.loss_cls: 0.0583, decode.d4.loss_mask: 0.1881, decode.d4.loss_dice: 0.5151, decode.d5.loss_cls: 0.0545, decode.d5.loss_mask: 0.1889, decode.d5.loss_dice: 0.5177, decode.d6.loss_cls: 0.0520, decode.d6.loss_mask: 0.1887, decode.d6.loss_dice: 0.5132, decode.d7.loss_cls: 0.0488, decode.d7.loss_mask: 0.1885, decode.d7.loss_dice: 0.5156, decode.d8.loss_cls: 0.0482, decode.d8.loss_mask: 0.1888, decode.d8.loss_dice: 0.5121, loss: 7.8609 +2022-05-10 23:49:53,411 - mmseg - INFO - Iter [54450/80000] lr: 4.586e-07, eta: 19:05:17, time: 1.813, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0528, decode.loss_mask: 0.1884, decode.loss_dice: 0.5212, decode.d0.loss_cls: 0.3001, decode.d0.loss_mask: 0.1969, decode.d0.loss_dice: 0.5425, decode.d1.loss_cls: 0.0675, decode.d1.loss_mask: 0.1902, decode.d1.loss_dice: 0.5265, decode.d2.loss_cls: 0.0605, decode.d2.loss_mask: 0.1896, decode.d2.loss_dice: 0.5204, decode.d3.loss_cls: 0.0502, decode.d3.loss_mask: 0.1882, decode.d3.loss_dice: 0.5194, decode.d4.loss_cls: 0.0529, decode.d4.loss_mask: 0.1893, decode.d4.loss_dice: 0.5186, decode.d5.loss_cls: 0.0543, decode.d5.loss_mask: 0.1887, decode.d5.loss_dice: 0.5214, decode.d6.loss_cls: 0.0559, decode.d6.loss_mask: 0.1887, decode.d6.loss_dice: 0.5223, decode.d7.loss_cls: 0.0532, decode.d7.loss_mask: 0.1888, decode.d7.loss_dice: 0.5199, decode.d8.loss_cls: 0.0519, decode.d8.loss_mask: 0.1887, decode.d8.loss_dice: 0.5199, loss: 7.9291 +2022-05-10 23:51:23,622 - mmseg - INFO - Iter [54500/80000] lr: 4.577e-07, eta: 19:00:09, time: 1.804, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0541, decode.loss_mask: 0.1926, decode.loss_dice: 0.5206, decode.d0.loss_cls: 0.2979, decode.d0.loss_mask: 0.2008, decode.d0.loss_dice: 0.5458, decode.d1.loss_cls: 0.0733, decode.d1.loss_mask: 0.1940, decode.d1.loss_dice: 0.5272, decode.d2.loss_cls: 0.0647, decode.d2.loss_mask: 0.1938, decode.d2.loss_dice: 0.5239, decode.d3.loss_cls: 0.0552, decode.d3.loss_mask: 0.1926, decode.d3.loss_dice: 0.5237, decode.d4.loss_cls: 0.0557, decode.d4.loss_mask: 0.1925, decode.d4.loss_dice: 0.5226, decode.d5.loss_cls: 0.0572, decode.d5.loss_mask: 0.1930, decode.d5.loss_dice: 0.5227, decode.d6.loss_cls: 0.0557, decode.d6.loss_mask: 0.1928, decode.d6.loss_dice: 0.5190, decode.d7.loss_cls: 0.0528, decode.d7.loss_mask: 0.1931, decode.d7.loss_dice: 0.5184, decode.d8.loss_cls: 0.0560, decode.d8.loss_mask: 0.1929, decode.d8.loss_dice: 0.5200, loss: 8.0048 +2022-05-10 23:52:56,536 - mmseg - INFO - Iter [54550/80000] lr: 4.568e-07, eta: 18:55:14, time: 1.858, data_time: 0.063, memory: 69063, decode.loss_cls: 0.0618, decode.loss_mask: 0.1944, decode.loss_dice: 0.5255, decode.d0.loss_cls: 0.3173, decode.d0.loss_mask: 0.2051, decode.d0.loss_dice: 0.5511, decode.d1.loss_cls: 0.0786, decode.d1.loss_mask: 0.1969, decode.d1.loss_dice: 0.5375, decode.d2.loss_cls: 0.0716, decode.d2.loss_mask: 0.1962, decode.d2.loss_dice: 0.5342, decode.d3.loss_cls: 0.0698, decode.d3.loss_mask: 0.1954, decode.d3.loss_dice: 0.5311, decode.d4.loss_cls: 0.0679, decode.d4.loss_mask: 0.1951, decode.d4.loss_dice: 0.5286, decode.d5.loss_cls: 0.0704, decode.d5.loss_mask: 0.1956, decode.d5.loss_dice: 0.5313, decode.d6.loss_cls: 0.0610, decode.d6.loss_mask: 0.1950, decode.d6.loss_dice: 0.5277, decode.d7.loss_cls: 0.0665, decode.d7.loss_mask: 0.1952, decode.d7.loss_dice: 0.5297, decode.d8.loss_cls: 0.0613, decode.d8.loss_mask: 0.1949, decode.d8.loss_dice: 0.5319, loss: 8.2184 +2022-05-10 23:54:26,959 - mmseg - INFO - Iter [54600/80000] lr: 4.559e-07, eta: 18:50:14, time: 1.808, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0611, decode.loss_mask: 0.1962, decode.loss_dice: 0.5243, decode.d0.loss_cls: 0.3025, decode.d0.loss_mask: 0.2049, decode.d0.loss_dice: 0.5510, decode.d1.loss_cls: 0.0872, decode.d1.loss_mask: 0.1987, decode.d1.loss_dice: 0.5365, decode.d2.loss_cls: 0.0692, decode.d2.loss_mask: 0.1972, decode.d2.loss_dice: 0.5276, decode.d3.loss_cls: 0.0692, decode.d3.loss_mask: 0.1972, decode.d3.loss_dice: 0.5271, decode.d4.loss_cls: 0.0670, decode.d4.loss_mask: 0.1971, decode.d4.loss_dice: 0.5250, decode.d5.loss_cls: 0.0702, decode.d5.loss_mask: 0.1971, decode.d5.loss_dice: 0.5251, decode.d6.loss_cls: 0.0631, decode.d6.loss_mask: 0.1962, decode.d6.loss_dice: 0.5225, decode.d7.loss_cls: 0.0623, decode.d7.loss_mask: 0.1964, decode.d7.loss_dice: 0.5254, decode.d8.loss_cls: 0.0607, decode.d8.loss_mask: 0.1962, decode.d8.loss_dice: 0.5255, loss: 8.1800 +2022-05-10 23:55:56,542 - mmseg - INFO - Iter [54650/80000] lr: 4.550e-07, eta: 18:45:13, time: 1.792, data_time: 0.020, memory: 69063, decode.loss_cls: 0.0541, decode.loss_mask: 0.1923, decode.loss_dice: 0.5237, decode.d0.loss_cls: 0.2981, decode.d0.loss_mask: 0.2005, decode.d0.loss_dice: 0.5515, decode.d1.loss_cls: 0.0777, decode.d1.loss_mask: 0.1947, decode.d1.loss_dice: 0.5362, decode.d2.loss_cls: 0.0639, decode.d2.loss_mask: 0.1931, decode.d2.loss_dice: 0.5292, decode.d3.loss_cls: 0.0621, decode.d3.loss_mask: 0.1923, decode.d3.loss_dice: 0.5239, decode.d4.loss_cls: 0.0597, decode.d4.loss_mask: 0.1929, decode.d4.loss_dice: 0.5268, decode.d5.loss_cls: 0.0659, decode.d5.loss_mask: 0.1927, decode.d5.loss_dice: 0.5231, decode.d6.loss_cls: 0.0597, decode.d6.loss_mask: 0.1930, decode.d6.loss_dice: 0.5236, decode.d7.loss_cls: 0.0583, decode.d7.loss_mask: 0.1924, decode.d7.loss_dice: 0.5232, decode.d8.loss_cls: 0.0574, decode.d8.loss_mask: 0.1923, decode.d8.loss_dice: 0.5227, loss: 8.0770 +2022-05-10 23:57:30,036 - mmseg - INFO - Iter [54700/80000] lr: 4.541e-07, eta: 18:40:30, time: 1.870, data_time: 0.067, memory: 69063, decode.loss_cls: 0.0564, decode.loss_mask: 0.1958, decode.loss_dice: 0.5299, decode.d0.loss_cls: 0.2996, decode.d0.loss_mask: 0.2031, decode.d0.loss_dice: 0.5537, decode.d1.loss_cls: 0.0778, decode.d1.loss_mask: 0.1978, decode.d1.loss_dice: 0.5386, decode.d2.loss_cls: 0.0695, decode.d2.loss_mask: 0.1965, decode.d2.loss_dice: 0.5296, decode.d3.loss_cls: 0.0624, decode.d3.loss_mask: 0.1956, decode.d3.loss_dice: 0.5338, decode.d4.loss_cls: 0.0565, decode.d4.loss_mask: 0.1960, decode.d4.loss_dice: 0.5297, decode.d5.loss_cls: 0.0601, decode.d5.loss_mask: 0.1957, decode.d5.loss_dice: 0.5283, decode.d6.loss_cls: 0.0577, decode.d6.loss_mask: 0.1956, decode.d6.loss_dice: 0.5319, decode.d7.loss_cls: 0.0539, decode.d7.loss_mask: 0.1955, decode.d7.loss_dice: 0.5291, decode.d8.loss_cls: 0.0503, decode.d8.loss_mask: 0.1955, decode.d8.loss_dice: 0.5283, loss: 8.1441 +2022-05-10 23:59:02,473 - mmseg - INFO - Iter [54750/80000] lr: 4.532e-07, eta: 18:35:46, time: 1.849, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0568, decode.loss_mask: 0.1880, decode.loss_dice: 0.5069, decode.d0.loss_cls: 0.2855, decode.d0.loss_mask: 0.1958, decode.d0.loss_dice: 0.5267, decode.d1.loss_cls: 0.0676, decode.d1.loss_mask: 0.1895, decode.d1.loss_dice: 0.5153, decode.d2.loss_cls: 0.0560, decode.d2.loss_mask: 0.1884, decode.d2.loss_dice: 0.5107, decode.d3.loss_cls: 0.0592, decode.d3.loss_mask: 0.1881, decode.d3.loss_dice: 0.5098, decode.d4.loss_cls: 0.0532, decode.d4.loss_mask: 0.1887, decode.d4.loss_dice: 0.5089, decode.d5.loss_cls: 0.0594, decode.d5.loss_mask: 0.1886, decode.d5.loss_dice: 0.5073, decode.d6.loss_cls: 0.0523, decode.d6.loss_mask: 0.1886, decode.d6.loss_dice: 0.5058, decode.d7.loss_cls: 0.0601, decode.d7.loss_mask: 0.1885, decode.d7.loss_dice: 0.5043, decode.d8.loss_cls: 0.0495, decode.d8.loss_mask: 0.1883, decode.d8.loss_dice: 0.5028, loss: 7.7906 +2022-05-11 00:00:33,537 - mmseg - INFO - Iter [54800/80000] lr: 4.523e-07, eta: 18:30:59, time: 1.821, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0570, decode.loss_mask: 0.1925, decode.loss_dice: 0.5262, decode.d0.loss_cls: 0.2991, decode.d0.loss_mask: 0.2011, decode.d0.loss_dice: 0.5541, decode.d1.loss_cls: 0.0688, decode.d1.loss_mask: 0.1956, decode.d1.loss_dice: 0.5361, decode.d2.loss_cls: 0.0638, decode.d2.loss_mask: 0.1940, decode.d2.loss_dice: 0.5308, decode.d3.loss_cls: 0.0567, decode.d3.loss_mask: 0.1944, decode.d3.loss_dice: 0.5285, decode.d4.loss_cls: 0.0578, decode.d4.loss_mask: 0.1939, decode.d4.loss_dice: 0.5279, decode.d5.loss_cls: 0.0582, decode.d5.loss_mask: 0.1932, decode.d5.loss_dice: 0.5308, decode.d6.loss_cls: 0.0572, decode.d6.loss_mask: 0.1924, decode.d6.loss_dice: 0.5287, decode.d7.loss_cls: 0.0561, decode.d7.loss_mask: 0.1931, decode.d7.loss_dice: 0.5273, decode.d8.loss_cls: 0.0553, decode.d8.loss_mask: 0.1925, decode.d8.loss_dice: 0.5296, loss: 8.0925 +2022-05-11 00:02:04,120 - mmseg - INFO - Iter [54850/80000] lr: 4.514e-07, eta: 18:26:14, time: 1.812, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0520, decode.loss_mask: 0.1900, decode.loss_dice: 0.5190, decode.d0.loss_cls: 0.3044, decode.d0.loss_mask: 0.1987, decode.d0.loss_dice: 0.5425, decode.d1.loss_cls: 0.0761, decode.d1.loss_mask: 0.1916, decode.d1.loss_dice: 0.5288, decode.d2.loss_cls: 0.0612, decode.d2.loss_mask: 0.1906, decode.d2.loss_dice: 0.5245, decode.d3.loss_cls: 0.0528, decode.d3.loss_mask: 0.1908, decode.d3.loss_dice: 0.5230, decode.d4.loss_cls: 0.0570, decode.d4.loss_mask: 0.1910, decode.d4.loss_dice: 0.5185, decode.d5.loss_cls: 0.0536, decode.d5.loss_mask: 0.1908, decode.d5.loss_dice: 0.5220, decode.d6.loss_cls: 0.0513, decode.d6.loss_mask: 0.1912, decode.d6.loss_dice: 0.5201, decode.d7.loss_cls: 0.0530, decode.d7.loss_mask: 0.1910, decode.d7.loss_dice: 0.5232, decode.d8.loss_cls: 0.0524, decode.d8.loss_mask: 0.1903, decode.d8.loss_dice: 0.5211, loss: 7.9724 +2022-05-11 00:03:37,034 - mmseg - INFO - Iter [54900/80000] lr: 4.505e-07, eta: 18:21:40, time: 1.858, data_time: 0.065, memory: 69063, decode.loss_cls: 0.0589, decode.loss_mask: 0.1877, decode.loss_dice: 0.5084, decode.d0.loss_cls: 0.2989, decode.d0.loss_mask: 0.1963, decode.d0.loss_dice: 0.5371, decode.d1.loss_cls: 0.0741, decode.d1.loss_mask: 0.1893, decode.d1.loss_dice: 0.5206, decode.d2.loss_cls: 0.0644, decode.d2.loss_mask: 0.1882, decode.d2.loss_dice: 0.5175, decode.d3.loss_cls: 0.0650, decode.d3.loss_mask: 0.1883, decode.d3.loss_dice: 0.5139, decode.d4.loss_cls: 0.0650, decode.d4.loss_mask: 0.1887, decode.d4.loss_dice: 0.5115, decode.d5.loss_cls: 0.0569, decode.d5.loss_mask: 0.1881, decode.d5.loss_dice: 0.5123, decode.d6.loss_cls: 0.0583, decode.d6.loss_mask: 0.1881, decode.d6.loss_dice: 0.5110, decode.d7.loss_cls: 0.0566, decode.d7.loss_mask: 0.1880, decode.d7.loss_dice: 0.5132, decode.d8.loss_cls: 0.0616, decode.d8.loss_mask: 0.1880, decode.d8.loss_dice: 0.5100, loss: 7.9060 +2022-05-11 00:05:07,931 - mmseg - INFO - Iter [54950/80000] lr: 4.496e-07, eta: 18:17:01, time: 1.818, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0523, decode.loss_mask: 0.1886, decode.loss_dice: 0.5160, decode.d0.loss_cls: 0.2955, decode.d0.loss_mask: 0.1960, decode.d0.loss_dice: 0.5432, decode.d1.loss_cls: 0.0706, decode.d1.loss_mask: 0.1903, decode.d1.loss_dice: 0.5244, decode.d2.loss_cls: 0.0593, decode.d2.loss_mask: 0.1897, decode.d2.loss_dice: 0.5197, decode.d3.loss_cls: 0.0553, decode.d3.loss_mask: 0.1885, decode.d3.loss_dice: 0.5203, decode.d4.loss_cls: 0.0528, decode.d4.loss_mask: 0.1892, decode.d4.loss_dice: 0.5176, decode.d5.loss_cls: 0.0528, decode.d5.loss_mask: 0.1892, decode.d5.loss_dice: 0.5179, decode.d6.loss_cls: 0.0544, decode.d6.loss_mask: 0.1886, decode.d6.loss_dice: 0.5173, decode.d7.loss_cls: 0.0545, decode.d7.loss_mask: 0.1885, decode.d7.loss_dice: 0.5173, decode.d8.loss_cls: 0.0527, decode.d8.loss_mask: 0.1888, decode.d8.loss_dice: 0.5164, loss: 7.9078 +2022-05-11 00:06:38,968 - mmseg - INFO - Saving checkpoint at 55000 iterations +2022-05-11 00:07:11,295 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 00:07:11,303 - mmseg - INFO - Iter [55000/80000] lr: 4.487e-07, eta: 18:14:21, time: 2.465, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0621, decode.loss_mask: 0.1942, decode.loss_dice: 0.5180, decode.d0.loss_cls: 0.2977, decode.d0.loss_mask: 0.2009, decode.d0.loss_dice: 0.5401, decode.d1.loss_cls: 0.0764, decode.d1.loss_mask: 0.1946, decode.d1.loss_dice: 0.5233, decode.d2.loss_cls: 0.0700, decode.d2.loss_mask: 0.1948, decode.d2.loss_dice: 0.5207, decode.d3.loss_cls: 0.0673, decode.d3.loss_mask: 0.1941, decode.d3.loss_dice: 0.5157, decode.d4.loss_cls: 0.0701, decode.d4.loss_mask: 0.1938, decode.d4.loss_dice: 0.5162, decode.d5.loss_cls: 0.0663, decode.d5.loss_mask: 0.1940, decode.d5.loss_dice: 0.5206, decode.d6.loss_cls: 0.0647, decode.d6.loss_mask: 0.1939, decode.d6.loss_dice: 0.5152, decode.d7.loss_cls: 0.0665, decode.d7.loss_mask: 0.1944, decode.d7.loss_dice: 0.5197, decode.d8.loss_cls: 0.0628, decode.d8.loss_mask: 0.1942, decode.d8.loss_dice: 0.5154, loss: 8.0576 +2022-05-11 00:09:06,467 - mmseg - INFO - per class results: +2022-05-11 00:09:06,472 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.54 | 99.12 | +| sidewalk | 88.47 | 94.25 | +| building | 94.36 | 97.08 | +| wall | 68.62 | 79.86 | +| fence | 72.65 | 80.32 | +| pole | 71.72 | 83.43 | +| traffic light | 77.24 | 87.89 | +| traffic sign | 84.01 | 90.83 | +| vegetation | 93.44 | 96.93 | +| terrain | 69.23 | 78.14 | +| sky | 95.82 | 98.49 | +| person | 86.83 | 94.13 | +| rider | 74.5 | 84.87 | +| car | 96.17 | 98.22 | +| truck | 80.77 | 95.77 | +| bus | 93.57 | 96.63 | +| train | 88.1 | 90.8 | +| motorcycle | 77.25 | 88.39 | +| bicycle | 82.63 | 91.44 | ++---------------+-------+-------+ +2022-05-11 00:09:06,472 - mmseg - INFO - Summary: +2022-05-11 00:09:06,472 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 96.98 | 83.89 | 90.87 | ++-------+-------+-------+ +2022-05-11 00:09:06,476 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 00:09:06,476 - mmseg - INFO - Iter(val) [32] aAcc: 0.9698, mIoU: 0.8389, mAcc: 0.9087, IoU.road: 0.9854, IoU.sidewalk: 0.8847, IoU.building: 0.9436, IoU.wall: 0.6862, IoU.fence: 0.7265, IoU.pole: 0.7172, IoU.traffic light: 0.7724, IoU.traffic sign: 0.8401, IoU.vegetation: 0.9344, IoU.terrain: 0.6923, IoU.sky: 0.9582, IoU.person: 0.8683, IoU.rider: 0.7450, IoU.car: 0.9617, IoU.truck: 0.8077, IoU.bus: 0.9357, IoU.train: 0.8810, IoU.motorcycle: 0.7725, IoU.bicycle: 0.8263, Acc.road: 0.9912, Acc.sidewalk: 0.9425, Acc.building: 0.9708, Acc.wall: 0.7986, Acc.fence: 0.8032, Acc.pole: 0.8343, Acc.traffic light: 0.8789, Acc.traffic sign: 0.9083, Acc.vegetation: 0.9693, Acc.terrain: 0.7814, Acc.sky: 0.9849, Acc.person: 0.9413, Acc.rider: 0.8487, Acc.car: 0.9822, Acc.truck: 0.9577, Acc.bus: 0.9663, Acc.train: 0.9080, Acc.motorcycle: 0.8839, Acc.bicycle: 0.9144 +2022-05-11 00:10:37,639 - mmseg - INFO - Iter [55050/80000] lr: 4.478e-07, eta: 18:16:36, time: 4.129, data_time: 2.322, memory: 69063, decode.loss_cls: 0.0646, decode.loss_mask: 0.1917, decode.loss_dice: 0.5335, decode.d0.loss_cls: 0.3184, decode.d0.loss_mask: 0.1994, decode.d0.loss_dice: 0.5578, decode.d1.loss_cls: 0.0854, decode.d1.loss_mask: 0.1931, decode.d1.loss_dice: 0.5377, decode.d2.loss_cls: 0.0771, decode.d2.loss_mask: 0.1919, decode.d2.loss_dice: 0.5356, decode.d3.loss_cls: 0.0719, decode.d3.loss_mask: 0.1925, decode.d3.loss_dice: 0.5310, decode.d4.loss_cls: 0.0727, decode.d4.loss_mask: 0.1923, decode.d4.loss_dice: 0.5324, decode.d5.loss_cls: 0.0716, decode.d5.loss_mask: 0.1919, decode.d5.loss_dice: 0.5357, decode.d6.loss_cls: 0.0638, decode.d6.loss_mask: 0.1921, decode.d6.loss_dice: 0.5341, decode.d7.loss_cls: 0.0705, decode.d7.loss_mask: 0.1924, decode.d7.loss_dice: 0.5354, decode.d8.loss_cls: 0.0661, decode.d8.loss_mask: 0.1917, decode.d8.loss_dice: 0.5321, loss: 8.2561 +2022-05-11 00:12:09,648 - mmseg - INFO - Iter [55100/80000] lr: 4.469e-07, eta: 18:12:04, time: 1.839, data_time: 0.066, memory: 69063, decode.loss_cls: 0.0551, decode.loss_mask: 0.1872, decode.loss_dice: 0.5174, decode.d0.loss_cls: 0.2931, decode.d0.loss_mask: 0.1935, decode.d0.loss_dice: 0.5441, decode.d1.loss_cls: 0.0733, decode.d1.loss_mask: 0.1881, decode.d1.loss_dice: 0.5217, decode.d2.loss_cls: 0.0694, decode.d2.loss_mask: 0.1875, decode.d2.loss_dice: 0.5227, decode.d3.loss_cls: 0.0654, decode.d3.loss_mask: 0.1877, decode.d3.loss_dice: 0.5177, decode.d4.loss_cls: 0.0566, decode.d4.loss_mask: 0.1872, decode.d4.loss_dice: 0.5195, decode.d5.loss_cls: 0.0536, decode.d5.loss_mask: 0.1875, decode.d5.loss_dice: 0.5186, decode.d6.loss_cls: 0.0595, decode.d6.loss_mask: 0.1871, decode.d6.loss_dice: 0.5186, decode.d7.loss_cls: 0.0653, decode.d7.loss_mask: 0.1872, decode.d7.loss_dice: 0.5204, decode.d8.loss_cls: 0.0571, decode.d8.loss_mask: 0.1867, decode.d8.loss_dice: 0.5182, loss: 7.9468 +2022-05-11 00:13:40,927 - mmseg - INFO - Iter [55150/80000] lr: 4.460e-07, eta: 18:07:32, time: 1.826, data_time: 0.021, memory: 69063, decode.loss_cls: 0.0557, decode.loss_mask: 0.1893, decode.loss_dice: 0.5253, decode.d0.loss_cls: 0.3072, decode.d0.loss_mask: 0.1952, decode.d0.loss_dice: 0.5507, decode.d1.loss_cls: 0.0749, decode.d1.loss_mask: 0.1907, decode.d1.loss_dice: 0.5349, decode.d2.loss_cls: 0.0513, decode.d2.loss_mask: 0.1896, decode.d2.loss_dice: 0.5294, decode.d3.loss_cls: 0.0566, decode.d3.loss_mask: 0.1889, decode.d3.loss_dice: 0.5259, decode.d4.loss_cls: 0.0590, decode.d4.loss_mask: 0.1892, decode.d4.loss_dice: 0.5290, decode.d5.loss_cls: 0.0555, decode.d5.loss_mask: 0.1895, decode.d5.loss_dice: 0.5269, decode.d6.loss_cls: 0.0554, decode.d6.loss_mask: 0.1897, decode.d6.loss_dice: 0.5279, decode.d7.loss_cls: 0.0547, decode.d7.loss_mask: 0.1896, decode.d7.loss_dice: 0.5271, decode.d8.loss_cls: 0.0497, decode.d8.loss_mask: 0.1894, decode.d8.loss_dice: 0.5242, loss: 8.0223 +2022-05-11 00:15:10,212 - mmseg - INFO - Iter [55200/80000] lr: 4.451e-07, eta: 18:02:56, time: 1.785, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0500, decode.loss_mask: 0.1873, decode.loss_dice: 0.5153, decode.d0.loss_cls: 0.2912, decode.d0.loss_mask: 0.1936, decode.d0.loss_dice: 0.5425, decode.d1.loss_cls: 0.0721, decode.d1.loss_mask: 0.1881, decode.d1.loss_dice: 0.5246, decode.d2.loss_cls: 0.0645, decode.d2.loss_mask: 0.1874, decode.d2.loss_dice: 0.5212, decode.d3.loss_cls: 0.0553, decode.d3.loss_mask: 0.1872, decode.d3.loss_dice: 0.5176, decode.d4.loss_cls: 0.0530, decode.d4.loss_mask: 0.1873, decode.d4.loss_dice: 0.5201, decode.d5.loss_cls: 0.0608, decode.d5.loss_mask: 0.1880, decode.d5.loss_dice: 0.5191, decode.d6.loss_cls: 0.0552, decode.d6.loss_mask: 0.1876, decode.d6.loss_dice: 0.5171, decode.d7.loss_cls: 0.0508, decode.d7.loss_mask: 0.1876, decode.d7.loss_dice: 0.5218, decode.d8.loss_cls: 0.0532, decode.d8.loss_mask: 0.1872, decode.d8.loss_dice: 0.5176, loss: 7.9039 +2022-05-11 00:16:39,397 - mmseg - INFO - Iter [55250/80000] lr: 4.442e-07, eta: 17:58:23, time: 1.784, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0548, decode.loss_mask: 0.1989, decode.loss_dice: 0.5275, decode.d0.loss_cls: 0.2936, decode.d0.loss_mask: 0.2072, decode.d0.loss_dice: 0.5500, decode.d1.loss_cls: 0.0813, decode.d1.loss_mask: 0.2003, decode.d1.loss_dice: 0.5326, decode.d2.loss_cls: 0.0611, decode.d2.loss_mask: 0.1995, decode.d2.loss_dice: 0.5332, decode.d3.loss_cls: 0.0554, decode.d3.loss_mask: 0.2001, decode.d3.loss_dice: 0.5285, decode.d4.loss_cls: 0.0589, decode.d4.loss_mask: 0.1993, decode.d4.loss_dice: 0.5269, decode.d5.loss_cls: 0.0595, decode.d5.loss_mask: 0.1994, decode.d5.loss_dice: 0.5325, decode.d6.loss_cls: 0.0578, decode.d6.loss_mask: 0.1989, decode.d6.loss_dice: 0.5308, decode.d7.loss_cls: 0.0643, decode.d7.loss_mask: 0.1991, decode.d7.loss_dice: 0.5329, decode.d8.loss_cls: 0.0583, decode.d8.loss_mask: 0.1990, decode.d8.loss_dice: 0.5305, loss: 8.1721 +2022-05-11 00:18:12,394 - mmseg - INFO - Iter [55300/80000] lr: 4.433e-07, eta: 17:54:04, time: 1.860, data_time: 0.062, memory: 69063, decode.loss_cls: 0.0594, decode.loss_mask: 0.1901, decode.loss_dice: 0.5211, decode.d0.loss_cls: 0.2992, decode.d0.loss_mask: 0.1975, decode.d0.loss_dice: 0.5495, decode.d1.loss_cls: 0.0832, decode.d1.loss_mask: 0.1911, decode.d1.loss_dice: 0.5279, decode.d2.loss_cls: 0.0656, decode.d2.loss_mask: 0.1909, decode.d2.loss_dice: 0.5278, decode.d3.loss_cls: 0.0662, decode.d3.loss_mask: 0.1907, decode.d3.loss_dice: 0.5244, decode.d4.loss_cls: 0.0651, decode.d4.loss_mask: 0.1893, decode.d4.loss_dice: 0.5260, decode.d5.loss_cls: 0.0668, decode.d5.loss_mask: 0.1897, decode.d5.loss_dice: 0.5221, decode.d6.loss_cls: 0.0589, decode.d6.loss_mask: 0.1895, decode.d6.loss_dice: 0.5180, decode.d7.loss_cls: 0.0615, decode.d7.loss_mask: 0.1899, decode.d7.loss_dice: 0.5222, decode.d8.loss_cls: 0.0601, decode.d8.loss_mask: 0.1894, decode.d8.loss_dice: 0.5232, loss: 8.0563 +2022-05-11 00:19:42,925 - mmseg - INFO - Iter [55350/80000] lr: 4.424e-07, eta: 17:49:40, time: 1.811, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0640, decode.loss_mask: 0.1874, decode.loss_dice: 0.5218, decode.d0.loss_cls: 0.3001, decode.d0.loss_mask: 0.1956, decode.d0.loss_dice: 0.5481, decode.d1.loss_cls: 0.0775, decode.d1.loss_mask: 0.1894, decode.d1.loss_dice: 0.5301, decode.d2.loss_cls: 0.0724, decode.d2.loss_mask: 0.1883, decode.d2.loss_dice: 0.5235, decode.d3.loss_cls: 0.0696, decode.d3.loss_mask: 0.1880, decode.d3.loss_dice: 0.5229, decode.d4.loss_cls: 0.0660, decode.d4.loss_mask: 0.1877, decode.d4.loss_dice: 0.5236, decode.d5.loss_cls: 0.0629, decode.d5.loss_mask: 0.1883, decode.d5.loss_dice: 0.5243, decode.d6.loss_cls: 0.0623, decode.d6.loss_mask: 0.1881, decode.d6.loss_dice: 0.5209, decode.d7.loss_cls: 0.0634, decode.d7.loss_mask: 0.1875, decode.d7.loss_dice: 0.5216, decode.d8.loss_cls: 0.0615, decode.d8.loss_mask: 0.1881, decode.d8.loss_dice: 0.5213, loss: 8.0460 +2022-05-11 00:21:12,820 - mmseg - INFO - Iter [55400/80000] lr: 4.415e-07, eta: 17:45:16, time: 1.798, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0632, decode.loss_mask: 0.1915, decode.loss_dice: 0.5315, decode.d0.loss_cls: 0.3031, decode.d0.loss_mask: 0.1979, decode.d0.loss_dice: 0.5507, decode.d1.loss_cls: 0.0780, decode.d1.loss_mask: 0.1924, decode.d1.loss_dice: 0.5382, decode.d2.loss_cls: 0.0664, decode.d2.loss_mask: 0.1915, decode.d2.loss_dice: 0.5332, decode.d3.loss_cls: 0.0677, decode.d3.loss_mask: 0.1916, decode.d3.loss_dice: 0.5336, decode.d4.loss_cls: 0.0687, decode.d4.loss_mask: 0.1914, decode.d4.loss_dice: 0.5314, decode.d5.loss_cls: 0.0678, decode.d5.loss_mask: 0.1913, decode.d5.loss_dice: 0.5316, decode.d6.loss_cls: 0.0602, decode.d6.loss_mask: 0.1916, decode.d6.loss_dice: 0.5339, decode.d7.loss_cls: 0.0662, decode.d7.loss_mask: 0.1916, decode.d7.loss_dice: 0.5326, decode.d8.loss_cls: 0.0683, decode.d8.loss_mask: 0.1918, decode.d8.loss_dice: 0.5290, loss: 8.1779 +2022-05-11 00:22:45,929 - mmseg - INFO - Iter [55450/80000] lr: 4.406e-07, eta: 17:41:05, time: 1.862, data_time: 0.066, memory: 69063, decode.loss_cls: 0.0557, decode.loss_mask: 0.1889, decode.loss_dice: 0.5115, decode.d0.loss_cls: 0.2931, decode.d0.loss_mask: 0.1974, decode.d0.loss_dice: 0.5292, decode.d1.loss_cls: 0.0687, decode.d1.loss_mask: 0.1913, decode.d1.loss_dice: 0.5146, decode.d2.loss_cls: 0.0634, decode.d2.loss_mask: 0.1905, decode.d2.loss_dice: 0.5142, decode.d3.loss_cls: 0.0655, decode.d3.loss_mask: 0.1892, decode.d3.loss_dice: 0.5133, decode.d4.loss_cls: 0.0614, decode.d4.loss_mask: 0.1893, decode.d4.loss_dice: 0.5134, decode.d5.loss_cls: 0.0575, decode.d5.loss_mask: 0.1887, decode.d5.loss_dice: 0.5104, decode.d6.loss_cls: 0.0568, decode.d6.loss_mask: 0.1890, decode.d6.loss_dice: 0.5088, decode.d7.loss_cls: 0.0574, decode.d7.loss_mask: 0.1890, decode.d7.loss_dice: 0.5129, decode.d8.loss_cls: 0.0570, decode.d8.loss_mask: 0.1889, decode.d8.loss_dice: 0.5120, loss: 7.8791 +2022-05-11 00:24:14,249 - mmseg - INFO - Iter [55500/80000] lr: 4.397e-07, eta: 17:36:40, time: 1.767, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0544, decode.loss_mask: 0.1920, decode.loss_dice: 0.5113, decode.d0.loss_cls: 0.2922, decode.d0.loss_mask: 0.1996, decode.d0.loss_dice: 0.5382, decode.d1.loss_cls: 0.0603, decode.d1.loss_mask: 0.1933, decode.d1.loss_dice: 0.5205, decode.d2.loss_cls: 0.0553, decode.d2.loss_mask: 0.1923, decode.d2.loss_dice: 0.5164, decode.d3.loss_cls: 0.0530, decode.d3.loss_mask: 0.1920, decode.d3.loss_dice: 0.5139, decode.d4.loss_cls: 0.0495, decode.d4.loss_mask: 0.1927, decode.d4.loss_dice: 0.5138, decode.d5.loss_cls: 0.0528, decode.d5.loss_mask: 0.1927, decode.d5.loss_dice: 0.5145, decode.d6.loss_cls: 0.0557, decode.d6.loss_mask: 0.1925, decode.d6.loss_dice: 0.5110, decode.d7.loss_cls: 0.0524, decode.d7.loss_mask: 0.1926, decode.d7.loss_dice: 0.5177, decode.d8.loss_cls: 0.0522, decode.d8.loss_mask: 0.1922, decode.d8.loss_dice: 0.5144, loss: 7.8814 +2022-05-11 00:25:44,420 - mmseg - INFO - Iter [55550/80000] lr: 4.388e-07, eta: 17:32:24, time: 1.803, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0608, decode.loss_mask: 0.1835, decode.loss_dice: 0.5156, decode.d0.loss_cls: 0.2945, decode.d0.loss_mask: 0.1900, decode.d0.loss_dice: 0.5359, decode.d1.loss_cls: 0.0805, decode.d1.loss_mask: 0.1853, decode.d1.loss_dice: 0.5238, decode.d2.loss_cls: 0.0651, decode.d2.loss_mask: 0.1849, decode.d2.loss_dice: 0.5215, decode.d3.loss_cls: 0.0654, decode.d3.loss_mask: 0.1843, decode.d3.loss_dice: 0.5169, decode.d4.loss_cls: 0.0680, decode.d4.loss_mask: 0.1844, decode.d4.loss_dice: 0.5169, decode.d5.loss_cls: 0.0615, decode.d5.loss_mask: 0.1838, decode.d5.loss_dice: 0.5152, decode.d6.loss_cls: 0.0601, decode.d6.loss_mask: 0.1840, decode.d6.loss_dice: 0.5168, decode.d7.loss_cls: 0.0591, decode.d7.loss_mask: 0.1844, decode.d7.loss_dice: 0.5181, decode.d8.loss_cls: 0.0593, decode.d8.loss_mask: 0.1844, decode.d8.loss_dice: 0.5145, loss: 7.9186 +2022-05-11 00:27:14,063 - mmseg - INFO - Iter [55600/80000] lr: 4.379e-07, eta: 17:28:08, time: 1.793, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0610, decode.loss_mask: 0.1869, decode.loss_dice: 0.5167, decode.d0.loss_cls: 0.3015, decode.d0.loss_mask: 0.1925, decode.d0.loss_dice: 0.5361, decode.d1.loss_cls: 0.0730, decode.d1.loss_mask: 0.1883, decode.d1.loss_dice: 0.5231, decode.d2.loss_cls: 0.0635, decode.d2.loss_mask: 0.1878, decode.d2.loss_dice: 0.5184, decode.d3.loss_cls: 0.0572, decode.d3.loss_mask: 0.1873, decode.d3.loss_dice: 0.5176, decode.d4.loss_cls: 0.0596, decode.d4.loss_mask: 0.1875, decode.d4.loss_dice: 0.5141, decode.d5.loss_cls: 0.0607, decode.d5.loss_mask: 0.1872, decode.d5.loss_dice: 0.5161, decode.d6.loss_cls: 0.0586, decode.d6.loss_mask: 0.1870, decode.d6.loss_dice: 0.5156, decode.d7.loss_cls: 0.0588, decode.d7.loss_mask: 0.1874, decode.d7.loss_dice: 0.5154, decode.d8.loss_cls: 0.0601, decode.d8.loss_mask: 0.1869, decode.d8.loss_dice: 0.5179, loss: 7.9239 +2022-05-11 00:28:45,311 - mmseg - INFO - Iter [55650/80000] lr: 4.370e-07, eta: 17:23:59, time: 1.825, data_time: 0.065, memory: 69063, decode.loss_cls: 0.0562, decode.loss_mask: 0.1919, decode.loss_dice: 0.5209, decode.d0.loss_cls: 0.2989, decode.d0.loss_mask: 0.1993, decode.d0.loss_dice: 0.5460, decode.d1.loss_cls: 0.0686, decode.d1.loss_mask: 0.1929, decode.d1.loss_dice: 0.5279, decode.d2.loss_cls: 0.0598, decode.d2.loss_mask: 0.1929, decode.d2.loss_dice: 0.5239, decode.d3.loss_cls: 0.0507, decode.d3.loss_mask: 0.1921, decode.d3.loss_dice: 0.5224, decode.d4.loss_cls: 0.0488, decode.d4.loss_mask: 0.1927, decode.d4.loss_dice: 0.5235, decode.d5.loss_cls: 0.0510, decode.d5.loss_mask: 0.1922, decode.d5.loss_dice: 0.5212, decode.d6.loss_cls: 0.0544, decode.d6.loss_mask: 0.1922, decode.d6.loss_dice: 0.5186, decode.d7.loss_cls: 0.0525, decode.d7.loss_mask: 0.1921, decode.d7.loss_dice: 0.5200, decode.d8.loss_cls: 0.0525, decode.d8.loss_mask: 0.1918, decode.d8.loss_dice: 0.5198, loss: 7.9677 +2022-05-11 00:30:15,661 - mmseg - INFO - Iter [55700/80000] lr: 4.361e-07, eta: 17:19:50, time: 1.806, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0532, decode.loss_mask: 0.1874, decode.loss_dice: 0.5118, decode.d0.loss_cls: 0.2936, decode.d0.loss_mask: 0.1944, decode.d0.loss_dice: 0.5401, decode.d1.loss_cls: 0.0682, decode.d1.loss_mask: 0.1883, decode.d1.loss_dice: 0.5215, decode.d2.loss_cls: 0.0575, decode.d2.loss_mask: 0.1880, decode.d2.loss_dice: 0.5192, decode.d3.loss_cls: 0.0599, decode.d3.loss_mask: 0.1881, decode.d3.loss_dice: 0.5184, decode.d4.loss_cls: 0.0540, decode.d4.loss_mask: 0.1878, decode.d4.loss_dice: 0.5174, decode.d5.loss_cls: 0.0510, decode.d5.loss_mask: 0.1883, decode.d5.loss_dice: 0.5167, decode.d6.loss_cls: 0.0500, decode.d6.loss_mask: 0.1878, decode.d6.loss_dice: 0.5140, decode.d7.loss_cls: 0.0512, decode.d7.loss_mask: 0.1875, decode.d7.loss_dice: 0.5172, decode.d8.loss_cls: 0.0517, decode.d8.loss_mask: 0.1874, decode.d8.loss_dice: 0.5157, loss: 7.8673 +2022-05-11 00:31:45,882 - mmseg - INFO - Iter [55750/80000] lr: 4.352e-07, eta: 17:15:42, time: 1.805, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0591, decode.loss_mask: 0.1928, decode.loss_dice: 0.5272, decode.d0.loss_cls: 0.2970, decode.d0.loss_mask: 0.2026, decode.d0.loss_dice: 0.5508, decode.d1.loss_cls: 0.0735, decode.d1.loss_mask: 0.1952, decode.d1.loss_dice: 0.5310, decode.d2.loss_cls: 0.0674, decode.d2.loss_mask: 0.1951, decode.d2.loss_dice: 0.5328, decode.d3.loss_cls: 0.0645, decode.d3.loss_mask: 0.1942, decode.d3.loss_dice: 0.5296, decode.d4.loss_cls: 0.0647, decode.d4.loss_mask: 0.1941, decode.d4.loss_dice: 0.5241, decode.d5.loss_cls: 0.0655, decode.d5.loss_mask: 0.1937, decode.d5.loss_dice: 0.5278, decode.d6.loss_cls: 0.0644, decode.d6.loss_mask: 0.1935, decode.d6.loss_dice: 0.5296, decode.d7.loss_cls: 0.0547, decode.d7.loss_mask: 0.1935, decode.d7.loss_dice: 0.5264, decode.d8.loss_cls: 0.0582, decode.d8.loss_mask: 0.1931, decode.d8.loss_dice: 0.5268, loss: 8.1228 +2022-05-11 00:33:15,158 - mmseg - INFO - Iter [55800/80000] lr: 4.343e-07, eta: 17:11:33, time: 1.786, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0513, decode.loss_mask: 0.1958, decode.loss_dice: 0.5204, decode.d0.loss_cls: 0.2948, decode.d0.loss_mask: 0.2066, decode.d0.loss_dice: 0.5451, decode.d1.loss_cls: 0.0622, decode.d1.loss_mask: 0.1972, decode.d1.loss_dice: 0.5291, decode.d2.loss_cls: 0.0580, decode.d2.loss_mask: 0.1966, decode.d2.loss_dice: 0.5254, decode.d3.loss_cls: 0.0552, decode.d3.loss_mask: 0.1965, decode.d3.loss_dice: 0.5214, decode.d4.loss_cls: 0.0523, decode.d4.loss_mask: 0.1965, decode.d4.loss_dice: 0.5212, decode.d5.loss_cls: 0.0529, decode.d5.loss_mask: 0.1957, decode.d5.loss_dice: 0.5221, decode.d6.loss_cls: 0.0529, decode.d6.loss_mask: 0.1962, decode.d6.loss_dice: 0.5207, decode.d7.loss_cls: 0.0536, decode.d7.loss_mask: 0.1950, decode.d7.loss_dice: 0.5229, decode.d8.loss_cls: 0.0496, decode.d8.loss_mask: 0.1954, decode.d8.loss_dice: 0.5209, loss: 8.0035 +2022-05-11 00:34:48,266 - mmseg - INFO - Iter [55850/80000] lr: 4.334e-07, eta: 17:07:38, time: 1.862, data_time: 0.065, memory: 69063, decode.loss_cls: 0.0603, decode.loss_mask: 0.1858, decode.loss_dice: 0.5156, decode.d0.loss_cls: 0.2963, decode.d0.loss_mask: 0.1926, decode.d0.loss_dice: 0.5431, decode.d1.loss_cls: 0.0744, decode.d1.loss_mask: 0.1872, decode.d1.loss_dice: 0.5279, decode.d2.loss_cls: 0.0600, decode.d2.loss_mask: 0.1865, decode.d2.loss_dice: 0.5211, decode.d3.loss_cls: 0.0700, decode.d3.loss_mask: 0.1866, decode.d3.loss_dice: 0.5179, decode.d4.loss_cls: 0.0621, decode.d4.loss_mask: 0.1869, decode.d4.loss_dice: 0.5179, decode.d5.loss_cls: 0.0636, decode.d5.loss_mask: 0.1865, decode.d5.loss_dice: 0.5189, decode.d6.loss_cls: 0.0619, decode.d6.loss_mask: 0.1867, decode.d6.loss_dice: 0.5192, decode.d7.loss_cls: 0.0623, decode.d7.loss_mask: 0.1863, decode.d7.loss_dice: 0.5174, decode.d8.loss_cls: 0.0568, decode.d8.loss_mask: 0.1867, decode.d8.loss_dice: 0.5163, loss: 7.9546 +2022-05-11 00:36:20,101 - mmseg - INFO - Iter [55900/80000] lr: 4.326e-07, eta: 17:03:42, time: 1.837, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0526, decode.loss_mask: 0.1879, decode.loss_dice: 0.5113, decode.d0.loss_cls: 0.2989, decode.d0.loss_mask: 0.1946, decode.d0.loss_dice: 0.5369, decode.d1.loss_cls: 0.0714, decode.d1.loss_mask: 0.1888, decode.d1.loss_dice: 0.5215, decode.d2.loss_cls: 0.0559, decode.d2.loss_mask: 0.1886, decode.d2.loss_dice: 0.5178, decode.d3.loss_cls: 0.0549, decode.d3.loss_mask: 0.1886, decode.d3.loss_dice: 0.5118, decode.d4.loss_cls: 0.0581, decode.d4.loss_mask: 0.1880, decode.d4.loss_dice: 0.5116, decode.d5.loss_cls: 0.0548, decode.d5.loss_mask: 0.1888, decode.d5.loss_dice: 0.5111, decode.d6.loss_cls: 0.0534, decode.d6.loss_mask: 0.1882, decode.d6.loss_dice: 0.5107, decode.d7.loss_cls: 0.0525, decode.d7.loss_mask: 0.1881, decode.d7.loss_dice: 0.5132, decode.d8.loss_cls: 0.0522, decode.d8.loss_mask: 0.1874, decode.d8.loss_dice: 0.5129, loss: 7.8524 +2022-05-11 00:37:50,088 - mmseg - INFO - Iter [55950/80000] lr: 4.317e-07, eta: 16:59:41, time: 1.800, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0558, decode.loss_mask: 0.1883, decode.loss_dice: 0.5202, decode.d0.loss_cls: 0.2959, decode.d0.loss_mask: 0.1951, decode.d0.loss_dice: 0.5400, decode.d1.loss_cls: 0.0687, decode.d1.loss_mask: 0.1898, decode.d1.loss_dice: 0.5279, decode.d2.loss_cls: 0.0587, decode.d2.loss_mask: 0.1895, decode.d2.loss_dice: 0.5246, decode.d3.loss_cls: 0.0614, decode.d3.loss_mask: 0.1889, decode.d3.loss_dice: 0.5215, decode.d4.loss_cls: 0.0548, decode.d4.loss_mask: 0.1889, decode.d4.loss_dice: 0.5204, decode.d5.loss_cls: 0.0572, decode.d5.loss_mask: 0.1888, decode.d5.loss_dice: 0.5189, decode.d6.loss_cls: 0.0544, decode.d6.loss_mask: 0.1886, decode.d6.loss_dice: 0.5199, decode.d7.loss_cls: 0.0563, decode.d7.loss_mask: 0.1888, decode.d7.loss_dice: 0.5214, decode.d8.loss_cls: 0.0581, decode.d8.loss_mask: 0.1884, decode.d8.loss_dice: 0.5218, loss: 7.9533 +2022-05-11 00:39:21,995 - mmseg - INFO - Saving checkpoint at 56000 iterations +2022-05-11 00:39:54,272 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 00:39:54,284 - mmseg - INFO - Iter [56000/80000] lr: 4.308e-07, eta: 16:57:24, time: 2.481, data_time: 0.065, memory: 69063, decode.loss_cls: 0.0565, decode.loss_mask: 0.1921, decode.loss_dice: 0.5324, decode.d0.loss_cls: 0.3006, decode.d0.loss_mask: 0.1981, decode.d0.loss_dice: 0.5621, decode.d1.loss_cls: 0.0797, decode.d1.loss_mask: 0.1930, decode.d1.loss_dice: 0.5475, decode.d2.loss_cls: 0.0727, decode.d2.loss_mask: 0.1933, decode.d2.loss_dice: 0.5414, decode.d3.loss_cls: 0.0661, decode.d3.loss_mask: 0.1927, decode.d3.loss_dice: 0.5381, decode.d4.loss_cls: 0.0679, decode.d4.loss_mask: 0.1918, decode.d4.loss_dice: 0.5363, decode.d5.loss_cls: 0.0691, decode.d5.loss_mask: 0.1923, decode.d5.loss_dice: 0.5410, decode.d6.loss_cls: 0.0667, decode.d6.loss_mask: 0.1914, decode.d6.loss_dice: 0.5357, decode.d7.loss_cls: 0.0643, decode.d7.loss_mask: 0.1917, decode.d7.loss_dice: 0.5362, decode.d8.loss_cls: 0.0618, decode.d8.loss_mask: 0.1925, decode.d8.loss_dice: 0.5352, loss: 8.2403 +2022-05-11 00:41:49,669 - mmseg - INFO - per class results: +2022-05-11 00:41:49,675 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.51 | 99.14 | +| sidewalk | 88.04 | 93.86 | +| building | 94.31 | 96.88 | +| wall | 68.26 | 77.87 | +| fence | 73.5 | 80.98 | +| pole | 71.01 | 83.96 | +| traffic light | 77.31 | 87.72 | +| traffic sign | 83.73 | 90.69 | +| vegetation | 93.3 | 97.02 | +| terrain | 67.49 | 78.33 | +| sky | 95.83 | 98.35 | +| person | 86.88 | 93.9 | +| rider | 74.18 | 83.83 | +| car | 96.22 | 98.33 | +| truck | 82.11 | 94.84 | +| bus | 93.63 | 96.56 | +| train | 87.93 | 90.62 | +| motorcycle | 77.49 | 87.9 | +| bicycle | 82.82 | 91.36 | ++---------------+-------+-------+ +2022-05-11 00:41:49,675 - mmseg - INFO - Summary: +2022-05-11 00:41:49,675 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 96.93 | 83.82 | 90.64 | ++-------+-------+-------+ +2022-05-11 00:41:49,681 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 00:41:49,681 - mmseg - INFO - Iter(val) [32] aAcc: 0.9693, mIoU: 0.8382, mAcc: 0.9064, IoU.road: 0.9851, IoU.sidewalk: 0.8804, IoU.building: 0.9431, IoU.wall: 0.6826, IoU.fence: 0.7350, IoU.pole: 0.7101, IoU.traffic light: 0.7731, IoU.traffic sign: 0.8373, IoU.vegetation: 0.9330, IoU.terrain: 0.6749, IoU.sky: 0.9583, IoU.person: 0.8688, IoU.rider: 0.7418, IoU.car: 0.9622, IoU.truck: 0.8211, IoU.bus: 0.9363, IoU.train: 0.8793, IoU.motorcycle: 0.7749, IoU.bicycle: 0.8282, Acc.road: 0.9914, Acc.sidewalk: 0.9386, Acc.building: 0.9688, Acc.wall: 0.7787, Acc.fence: 0.8098, Acc.pole: 0.8396, Acc.traffic light: 0.8772, Acc.traffic sign: 0.9069, Acc.vegetation: 0.9702, Acc.terrain: 0.7833, Acc.sky: 0.9835, Acc.person: 0.9390, Acc.rider: 0.8383, Acc.car: 0.9833, Acc.truck: 0.9484, Acc.bus: 0.9656, Acc.train: 0.9062, Acc.motorcycle: 0.8790, Acc.bicycle: 0.9136 +2022-05-11 00:43:20,762 - mmseg - INFO - Iter [56050/80000] lr: 4.299e-07, eta: 16:59:13, time: 4.132, data_time: 2.328, memory: 69063, decode.loss_cls: 0.0481, decode.loss_mask: 0.1881, decode.loss_dice: 0.5087, decode.d0.loss_cls: 0.3004, decode.d0.loss_mask: 0.1948, decode.d0.loss_dice: 0.5319, decode.d1.loss_cls: 0.0561, decode.d1.loss_mask: 0.1887, decode.d1.loss_dice: 0.5218, decode.d2.loss_cls: 0.0495, decode.d2.loss_mask: 0.1887, decode.d2.loss_dice: 0.5194, decode.d3.loss_cls: 0.0468, decode.d3.loss_mask: 0.1890, decode.d3.loss_dice: 0.5149, decode.d4.loss_cls: 0.0530, decode.d4.loss_mask: 0.1887, decode.d4.loss_dice: 0.5136, decode.d5.loss_cls: 0.0473, decode.d5.loss_mask: 0.1883, decode.d5.loss_dice: 0.5131, decode.d6.loss_cls: 0.0478, decode.d6.loss_mask: 0.1882, decode.d6.loss_dice: 0.5107, decode.d7.loss_cls: 0.0479, decode.d7.loss_mask: 0.1882, decode.d7.loss_dice: 0.5151, decode.d8.loss_cls: 0.0450, decode.d8.loss_mask: 0.1883, decode.d8.loss_dice: 0.5134, loss: 7.7955 +2022-05-11 00:44:53,382 - mmseg - INFO - Iter [56100/80000] lr: 4.290e-07, eta: 16:55:22, time: 1.852, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0557, decode.loss_mask: 0.1908, decode.loss_dice: 0.5140, decode.d0.loss_cls: 0.3064, decode.d0.loss_mask: 0.1989, decode.d0.loss_dice: 0.5401, decode.d1.loss_cls: 0.0711, decode.d1.loss_mask: 0.1923, decode.d1.loss_dice: 0.5235, decode.d2.loss_cls: 0.0660, decode.d2.loss_mask: 0.1910, decode.d2.loss_dice: 0.5179, decode.d3.loss_cls: 0.0592, decode.d3.loss_mask: 0.1914, decode.d3.loss_dice: 0.5194, decode.d4.loss_cls: 0.0611, decode.d4.loss_mask: 0.1914, decode.d4.loss_dice: 0.5161, decode.d5.loss_cls: 0.0653, decode.d5.loss_mask: 0.1913, decode.d5.loss_dice: 0.5203, decode.d6.loss_cls: 0.0605, decode.d6.loss_mask: 0.1911, decode.d6.loss_dice: 0.5158, decode.d7.loss_cls: 0.0622, decode.d7.loss_mask: 0.1914, decode.d7.loss_dice: 0.5154, decode.d8.loss_cls: 0.0599, decode.d8.loss_mask: 0.1912, decode.d8.loss_dice: 0.5178, loss: 7.9885 +2022-05-11 00:46:24,160 - mmseg - INFO - Iter [56150/80000] lr: 4.281e-07, eta: 16:51:28, time: 1.815, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0650, decode.loss_mask: 0.1906, decode.loss_dice: 0.5125, decode.d0.loss_cls: 0.3033, decode.d0.loss_mask: 0.1984, decode.d0.loss_dice: 0.5349, decode.d1.loss_cls: 0.0784, decode.d1.loss_mask: 0.1940, decode.d1.loss_dice: 0.5204, decode.d2.loss_cls: 0.0764, decode.d2.loss_mask: 0.1925, decode.d2.loss_dice: 0.5178, decode.d3.loss_cls: 0.0716, decode.d3.loss_mask: 0.1916, decode.d3.loss_dice: 0.5139, decode.d4.loss_cls: 0.0713, decode.d4.loss_mask: 0.1905, decode.d4.loss_dice: 0.5110, decode.d5.loss_cls: 0.0709, decode.d5.loss_mask: 0.1912, decode.d5.loss_dice: 0.5158, decode.d6.loss_cls: 0.0642, decode.d6.loss_mask: 0.1907, decode.d6.loss_dice: 0.5113, decode.d7.loss_cls: 0.0612, decode.d7.loss_mask: 0.1905, decode.d7.loss_dice: 0.5109, decode.d8.loss_cls: 0.0657, decode.d8.loss_mask: 0.1910, decode.d8.loss_dice: 0.5131, loss: 8.0108 +2022-05-11 00:47:55,830 - mmseg - INFO - Iter [56200/80000] lr: 4.272e-07, eta: 16:47:37, time: 1.833, data_time: 0.065, memory: 69063, decode.loss_cls: 0.0533, decode.loss_mask: 0.1939, decode.loss_dice: 0.5144, decode.d0.loss_cls: 0.2947, decode.d0.loss_mask: 0.2045, decode.d0.loss_dice: 0.5413, decode.d1.loss_cls: 0.0659, decode.d1.loss_mask: 0.1963, decode.d1.loss_dice: 0.5254, decode.d2.loss_cls: 0.0625, decode.d2.loss_mask: 0.1953, decode.d2.loss_dice: 0.5196, decode.d3.loss_cls: 0.0576, decode.d3.loss_mask: 0.1953, decode.d3.loss_dice: 0.5195, decode.d4.loss_cls: 0.0525, decode.d4.loss_mask: 0.1952, decode.d4.loss_dice: 0.5224, decode.d5.loss_cls: 0.0629, decode.d5.loss_mask: 0.1947, decode.d5.loss_dice: 0.5182, decode.d6.loss_cls: 0.0563, decode.d6.loss_mask: 0.1947, decode.d6.loss_dice: 0.5183, decode.d7.loss_cls: 0.0605, decode.d7.loss_mask: 0.1945, decode.d7.loss_dice: 0.5195, decode.d8.loss_cls: 0.0564, decode.d8.loss_mask: 0.1945, decode.d8.loss_dice: 0.5127, loss: 7.9930 +2022-05-11 00:49:26,942 - mmseg - INFO - Iter [56250/80000] lr: 4.263e-07, eta: 16:43:47, time: 1.821, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0585, decode.loss_mask: 0.1848, decode.loss_dice: 0.5191, decode.d0.loss_cls: 0.2955, decode.d0.loss_mask: 0.1916, decode.d0.loss_dice: 0.5464, decode.d1.loss_cls: 0.0698, decode.d1.loss_mask: 0.1856, decode.d1.loss_dice: 0.5278, decode.d2.loss_cls: 0.0620, decode.d2.loss_mask: 0.1844, decode.d2.loss_dice: 0.5242, decode.d3.loss_cls: 0.0630, decode.d3.loss_mask: 0.1846, decode.d3.loss_dice: 0.5249, decode.d4.loss_cls: 0.0624, decode.d4.loss_mask: 0.1847, decode.d4.loss_dice: 0.5251, decode.d5.loss_cls: 0.0537, decode.d5.loss_mask: 0.1848, decode.d5.loss_dice: 0.5245, decode.d6.loss_cls: 0.0560, decode.d6.loss_mask: 0.1840, decode.d6.loss_dice: 0.5213, decode.d7.loss_cls: 0.0566, decode.d7.loss_mask: 0.1841, decode.d7.loss_dice: 0.5223, decode.d8.loss_cls: 0.0545, decode.d8.loss_mask: 0.1841, decode.d8.loss_dice: 0.5209, loss: 7.9408 +2022-05-11 00:50:58,054 - mmseg - INFO - Iter [56300/80000] lr: 4.254e-07, eta: 16:39:58, time: 1.823, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0653, decode.loss_mask: 0.1901, decode.loss_dice: 0.5219, decode.d0.loss_cls: 0.3066, decode.d0.loss_mask: 0.1983, decode.d0.loss_dice: 0.5450, decode.d1.loss_cls: 0.0834, decode.d1.loss_mask: 0.1916, decode.d1.loss_dice: 0.5290, decode.d2.loss_cls: 0.0730, decode.d2.loss_mask: 0.1912, decode.d2.loss_dice: 0.5277, decode.d3.loss_cls: 0.0657, decode.d3.loss_mask: 0.1908, decode.d3.loss_dice: 0.5234, decode.d4.loss_cls: 0.0677, decode.d4.loss_mask: 0.1906, decode.d4.loss_dice: 0.5264, decode.d5.loss_cls: 0.0647, decode.d5.loss_mask: 0.1898, decode.d5.loss_dice: 0.5233, decode.d6.loss_cls: 0.0678, decode.d6.loss_mask: 0.1905, decode.d6.loss_dice: 0.5200, decode.d7.loss_cls: 0.0601, decode.d7.loss_mask: 0.1909, decode.d7.loss_dice: 0.5249, decode.d8.loss_cls: 0.0615, decode.d8.loss_mask: 0.1900, decode.d8.loss_dice: 0.5197, loss: 8.0909 +2022-05-11 00:52:27,591 - mmseg - INFO - Iter [56350/80000] lr: 4.245e-07, eta: 16:36:07, time: 1.791, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0531, decode.loss_mask: 0.1889, decode.loss_dice: 0.5233, decode.d0.loss_cls: 0.2912, decode.d0.loss_mask: 0.1958, decode.d0.loss_dice: 0.5468, decode.d1.loss_cls: 0.0767, decode.d1.loss_mask: 0.1900, decode.d1.loss_dice: 0.5284, decode.d2.loss_cls: 0.0612, decode.d2.loss_mask: 0.1894, decode.d2.loss_dice: 0.5273, decode.d3.loss_cls: 0.0564, decode.d3.loss_mask: 0.1892, decode.d3.loss_dice: 0.5244, decode.d4.loss_cls: 0.0593, decode.d4.loss_mask: 0.1891, decode.d4.loss_dice: 0.5252, decode.d5.loss_cls: 0.0563, decode.d5.loss_mask: 0.1894, decode.d5.loss_dice: 0.5198, decode.d6.loss_cls: 0.0566, decode.d6.loss_mask: 0.1890, decode.d6.loss_dice: 0.5201, decode.d7.loss_cls: 0.0522, decode.d7.loss_mask: 0.1887, decode.d7.loss_dice: 0.5226, decode.d8.loss_cls: 0.0565, decode.d8.loss_mask: 0.1886, decode.d8.loss_dice: 0.5220, loss: 7.9772 +2022-05-11 00:54:01,881 - mmseg - INFO - Iter [56400/80000] lr: 4.236e-07, eta: 16:32:30, time: 1.886, data_time: 0.065, memory: 69063, decode.loss_cls: 0.0495, decode.loss_mask: 0.1839, decode.loss_dice: 0.4928, decode.d0.loss_cls: 0.2976, decode.d0.loss_mask: 0.1912, decode.d0.loss_dice: 0.5176, decode.d1.loss_cls: 0.0650, decode.d1.loss_mask: 0.1856, decode.d1.loss_dice: 0.5052, decode.d2.loss_cls: 0.0563, decode.d2.loss_mask: 0.1846, decode.d2.loss_dice: 0.4957, decode.d3.loss_cls: 0.0540, decode.d3.loss_mask: 0.1844, decode.d3.loss_dice: 0.4971, decode.d4.loss_cls: 0.0509, decode.d4.loss_mask: 0.1846, decode.d4.loss_dice: 0.4936, decode.d5.loss_cls: 0.0492, decode.d5.loss_mask: 0.1847, decode.d5.loss_dice: 0.4966, decode.d6.loss_cls: 0.0518, decode.d6.loss_mask: 0.1845, decode.d6.loss_dice: 0.4964, decode.d7.loss_cls: 0.0482, decode.d7.loss_mask: 0.1844, decode.d7.loss_dice: 0.4952, decode.d8.loss_cls: 0.0499, decode.d8.loss_mask: 0.1842, decode.d8.loss_dice: 0.4950, loss: 7.6095 +2022-05-11 00:55:31,980 - mmseg - INFO - Iter [56450/80000] lr: 4.227e-07, eta: 16:28:43, time: 1.802, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0577, decode.loss_mask: 0.1865, decode.loss_dice: 0.5138, decode.d0.loss_cls: 0.2971, decode.d0.loss_mask: 0.1940, decode.d0.loss_dice: 0.5410, decode.d1.loss_cls: 0.0787, decode.d1.loss_mask: 0.1887, decode.d1.loss_dice: 0.5187, decode.d2.loss_cls: 0.0659, decode.d2.loss_mask: 0.1877, decode.d2.loss_dice: 0.5127, decode.d3.loss_cls: 0.0594, decode.d3.loss_mask: 0.1875, decode.d3.loss_dice: 0.5104, decode.d4.loss_cls: 0.0594, decode.d4.loss_mask: 0.1872, decode.d4.loss_dice: 0.5139, decode.d5.loss_cls: 0.0565, decode.d5.loss_mask: 0.1871, decode.d5.loss_dice: 0.5110, decode.d6.loss_cls: 0.0575, decode.d6.loss_mask: 0.1873, decode.d6.loss_dice: 0.5134, decode.d7.loss_cls: 0.0566, decode.d7.loss_mask: 0.1872, decode.d7.loss_dice: 0.5108, decode.d8.loss_cls: 0.0544, decode.d8.loss_mask: 0.1868, decode.d8.loss_dice: 0.5120, loss: 7.8808 +2022-05-11 00:57:02,459 - mmseg - INFO - Iter [56500/80000] lr: 4.218e-07, eta: 16:24:59, time: 1.810, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0613, decode.loss_mask: 0.1898, decode.loss_dice: 0.5197, decode.d0.loss_cls: 0.3076, decode.d0.loss_mask: 0.1981, decode.d0.loss_dice: 0.5442, decode.d1.loss_cls: 0.0774, decode.d1.loss_mask: 0.1906, decode.d1.loss_dice: 0.5270, decode.d2.loss_cls: 0.0659, decode.d2.loss_mask: 0.1914, decode.d2.loss_dice: 0.5252, decode.d3.loss_cls: 0.0612, decode.d3.loss_mask: 0.1905, decode.d3.loss_dice: 0.5220, decode.d4.loss_cls: 0.0601, decode.d4.loss_mask: 0.1898, decode.d4.loss_dice: 0.5188, decode.d5.loss_cls: 0.0586, decode.d5.loss_mask: 0.1897, decode.d5.loss_dice: 0.5181, decode.d6.loss_cls: 0.0632, decode.d6.loss_mask: 0.1901, decode.d6.loss_dice: 0.5210, decode.d7.loss_cls: 0.0615, decode.d7.loss_mask: 0.1895, decode.d7.loss_dice: 0.5177, decode.d8.loss_cls: 0.0550, decode.d8.loss_mask: 0.1898, decode.d8.loss_dice: 0.5185, loss: 8.0132 +2022-05-11 00:58:32,351 - mmseg - INFO - Iter [56550/80000] lr: 4.209e-07, eta: 16:21:15, time: 1.798, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0563, decode.loss_mask: 0.1933, decode.loss_dice: 0.5232, decode.d0.loss_cls: 0.3058, decode.d0.loss_mask: 0.2011, decode.d0.loss_dice: 0.5483, decode.d1.loss_cls: 0.0780, decode.d1.loss_mask: 0.1938, decode.d1.loss_dice: 0.5343, decode.d2.loss_cls: 0.0624, decode.d2.loss_mask: 0.1944, decode.d2.loss_dice: 0.5302, decode.d3.loss_cls: 0.0601, decode.d3.loss_mask: 0.1934, decode.d3.loss_dice: 0.5236, decode.d4.loss_cls: 0.0644, decode.d4.loss_mask: 0.1942, decode.d4.loss_dice: 0.5272, decode.d5.loss_cls: 0.0639, decode.d5.loss_mask: 0.1940, decode.d5.loss_dice: 0.5207, decode.d6.loss_cls: 0.0585, decode.d6.loss_mask: 0.1938, decode.d6.loss_dice: 0.5229, decode.d7.loss_cls: 0.0601, decode.d7.loss_mask: 0.1936, decode.d7.loss_dice: 0.5224, decode.d8.loss_cls: 0.0572, decode.d8.loss_mask: 0.1932, decode.d8.loss_dice: 0.5234, loss: 8.0880 +2022-05-11 01:00:04,798 - mmseg - INFO - Iter [56600/80000] lr: 4.200e-07, eta: 16:17:40, time: 1.849, data_time: 0.063, memory: 69063, decode.loss_cls: 0.0485, decode.loss_mask: 0.1856, decode.loss_dice: 0.5111, decode.d0.loss_cls: 0.2961, decode.d0.loss_mask: 0.1924, decode.d0.loss_dice: 0.5336, decode.d1.loss_cls: 0.0675, decode.d1.loss_mask: 0.1864, decode.d1.loss_dice: 0.5197, decode.d2.loss_cls: 0.0607, decode.d2.loss_mask: 0.1860, decode.d2.loss_dice: 0.5172, decode.d3.loss_cls: 0.0588, decode.d3.loss_mask: 0.1854, decode.d3.loss_dice: 0.5159, decode.d4.loss_cls: 0.0540, decode.d4.loss_mask: 0.1855, decode.d4.loss_dice: 0.5148, decode.d5.loss_cls: 0.0535, decode.d5.loss_mask: 0.1857, decode.d5.loss_dice: 0.5148, decode.d6.loss_cls: 0.0587, decode.d6.loss_mask: 0.1855, decode.d6.loss_dice: 0.5133, decode.d7.loss_cls: 0.0565, decode.d7.loss_mask: 0.1857, decode.d7.loss_dice: 0.5175, decode.d8.loss_cls: 0.0503, decode.d8.loss_mask: 0.1855, decode.d8.loss_dice: 0.5115, loss: 7.8378 +2022-05-11 01:01:36,329 - mmseg - INFO - Iter [56650/80000] lr: 4.191e-07, eta: 16:14:03, time: 1.831, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0501, decode.loss_mask: 0.1903, decode.loss_dice: 0.5160, decode.d0.loss_cls: 0.2837, decode.d0.loss_mask: 0.1972, decode.d0.loss_dice: 0.5322, decode.d1.loss_cls: 0.0650, decode.d1.loss_mask: 0.1917, decode.d1.loss_dice: 0.5175, decode.d2.loss_cls: 0.0576, decode.d2.loss_mask: 0.1911, decode.d2.loss_dice: 0.5168, decode.d3.loss_cls: 0.0559, decode.d3.loss_mask: 0.1900, decode.d3.loss_dice: 0.5141, decode.d4.loss_cls: 0.0589, decode.d4.loss_mask: 0.1903, decode.d4.loss_dice: 0.5149, decode.d5.loss_cls: 0.0582, decode.d5.loss_mask: 0.1904, decode.d5.loss_dice: 0.5124, decode.d6.loss_cls: 0.0516, decode.d6.loss_mask: 0.1903, decode.d6.loss_dice: 0.5112, decode.d7.loss_cls: 0.0512, decode.d7.loss_mask: 0.1908, decode.d7.loss_dice: 0.5175, decode.d8.loss_cls: 0.0519, decode.d8.loss_mask: 0.1902, decode.d8.loss_dice: 0.5134, loss: 7.8624 +2022-05-11 01:03:07,377 - mmseg - INFO - Iter [56700/80000] lr: 4.182e-07, eta: 16:10:27, time: 1.821, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0508, decode.loss_mask: 0.1898, decode.loss_dice: 0.5110, decode.d0.loss_cls: 0.2836, decode.d0.loss_mask: 0.1976, decode.d0.loss_dice: 0.5345, decode.d1.loss_cls: 0.0721, decode.d1.loss_mask: 0.1920, decode.d1.loss_dice: 0.5239, decode.d2.loss_cls: 0.0555, decode.d2.loss_mask: 0.1903, decode.d2.loss_dice: 0.5207, decode.d3.loss_cls: 0.0523, decode.d3.loss_mask: 0.1904, decode.d3.loss_dice: 0.5149, decode.d4.loss_cls: 0.0530, decode.d4.loss_mask: 0.1904, decode.d4.loss_dice: 0.5136, decode.d5.loss_cls: 0.0543, decode.d5.loss_mask: 0.1903, decode.d5.loss_dice: 0.5153, decode.d6.loss_cls: 0.0487, decode.d6.loss_mask: 0.1898, decode.d6.loss_dice: 0.5127, decode.d7.loss_cls: 0.0483, decode.d7.loss_mask: 0.1902, decode.d7.loss_dice: 0.5140, decode.d8.loss_cls: 0.0450, decode.d8.loss_mask: 0.1900, decode.d8.loss_dice: 0.5152, loss: 7.8501 +2022-05-11 01:04:40,277 - mmseg - INFO - Iter [56750/80000] lr: 4.173e-07, eta: 16:06:57, time: 1.858, data_time: 0.064, memory: 69063, decode.loss_cls: 0.0507, decode.loss_mask: 0.1914, decode.loss_dice: 0.5028, decode.d0.loss_cls: 0.2917, decode.d0.loss_mask: 0.1996, decode.d0.loss_dice: 0.5319, decode.d1.loss_cls: 0.0652, decode.d1.loss_mask: 0.1932, decode.d1.loss_dice: 0.5143, decode.d2.loss_cls: 0.0498, decode.d2.loss_mask: 0.1920, decode.d2.loss_dice: 0.5102, decode.d3.loss_cls: 0.0459, decode.d3.loss_mask: 0.1915, decode.d3.loss_dice: 0.5099, decode.d4.loss_cls: 0.0507, decode.d4.loss_mask: 0.1916, decode.d4.loss_dice: 0.5081, decode.d5.loss_cls: 0.0517, decode.d5.loss_mask: 0.1918, decode.d5.loss_dice: 0.5074, decode.d6.loss_cls: 0.0510, decode.d6.loss_mask: 0.1917, decode.d6.loss_dice: 0.5060, decode.d7.loss_cls: 0.0464, decode.d7.loss_mask: 0.1920, decode.d7.loss_dice: 0.5069, decode.d8.loss_cls: 0.0513, decode.d8.loss_mask: 0.1916, decode.d8.loss_dice: 0.5084, loss: 7.7865 +2022-05-11 01:06:09,493 - mmseg - INFO - Iter [56800/80000] lr: 4.164e-07, eta: 16:03:18, time: 1.784, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0517, decode.loss_mask: 0.1947, decode.loss_dice: 0.5008, decode.d0.loss_cls: 0.2980, decode.d0.loss_mask: 0.2020, decode.d0.loss_dice: 0.5269, decode.d1.loss_cls: 0.0724, decode.d1.loss_mask: 0.1959, decode.d1.loss_dice: 0.5072, decode.d2.loss_cls: 0.0621, decode.d2.loss_mask: 0.1954, decode.d2.loss_dice: 0.5065, decode.d3.loss_cls: 0.0615, decode.d3.loss_mask: 0.1953, decode.d3.loss_dice: 0.5037, decode.d4.loss_cls: 0.0607, decode.d4.loss_mask: 0.1950, decode.d4.loss_dice: 0.5046, decode.d5.loss_cls: 0.0552, decode.d5.loss_mask: 0.1952, decode.d5.loss_dice: 0.5037, decode.d6.loss_cls: 0.0585, decode.d6.loss_mask: 0.1949, decode.d6.loss_dice: 0.5016, decode.d7.loss_cls: 0.0552, decode.d7.loss_mask: 0.1947, decode.d7.loss_dice: 0.5043, decode.d8.loss_cls: 0.0519, decode.d8.loss_mask: 0.1945, decode.d8.loss_dice: 0.5036, loss: 7.8476 +2022-05-11 01:07:38,838 - mmseg - INFO - Iter [56850/80000] lr: 4.155e-07, eta: 15:59:41, time: 1.787, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0618, decode.loss_mask: 0.1878, decode.loss_dice: 0.5102, decode.d0.loss_cls: 0.3051, decode.d0.loss_mask: 0.1959, decode.d0.loss_dice: 0.5375, decode.d1.loss_cls: 0.0805, decode.d1.loss_mask: 0.1898, decode.d1.loss_dice: 0.5181, decode.d2.loss_cls: 0.0657, decode.d2.loss_mask: 0.1898, decode.d2.loss_dice: 0.5131, decode.d3.loss_cls: 0.0677, decode.d3.loss_mask: 0.1890, decode.d3.loss_dice: 0.5098, decode.d4.loss_cls: 0.0706, decode.d4.loss_mask: 0.1889, decode.d4.loss_dice: 0.5077, decode.d5.loss_cls: 0.0618, decode.d5.loss_mask: 0.1882, decode.d5.loss_dice: 0.5079, decode.d6.loss_cls: 0.0597, decode.d6.loss_mask: 0.1880, decode.d6.loss_dice: 0.5037, decode.d7.loss_cls: 0.0637, decode.d7.loss_mask: 0.1881, decode.d7.loss_dice: 0.5098, decode.d8.loss_cls: 0.0611, decode.d8.loss_mask: 0.1875, decode.d8.loss_dice: 0.5071, loss: 7.9153 +2022-05-11 01:09:07,466 - mmseg - INFO - Iter [56900/80000] lr: 4.146e-07, eta: 15:56:04, time: 1.773, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0609, decode.loss_mask: 0.1847, decode.loss_dice: 0.5141, decode.d0.loss_cls: 0.2992, decode.d0.loss_mask: 0.1927, decode.d0.loss_dice: 0.5458, decode.d1.loss_cls: 0.0778, decode.d1.loss_mask: 0.1871, decode.d1.loss_dice: 0.5252, decode.d2.loss_cls: 0.0683, decode.d2.loss_mask: 0.1863, decode.d2.loss_dice: 0.5219, decode.d3.loss_cls: 0.0636, decode.d3.loss_mask: 0.1860, decode.d3.loss_dice: 0.5143, decode.d4.loss_cls: 0.0659, decode.d4.loss_mask: 0.1863, decode.d4.loss_dice: 0.5166, decode.d5.loss_cls: 0.0655, decode.d5.loss_mask: 0.1854, decode.d5.loss_dice: 0.5166, decode.d6.loss_cls: 0.0653, decode.d6.loss_mask: 0.1853, decode.d6.loss_dice: 0.5148, decode.d7.loss_cls: 0.0623, decode.d7.loss_mask: 0.1855, decode.d7.loss_dice: 0.5171, decode.d8.loss_cls: 0.0590, decode.d8.loss_mask: 0.1849, decode.d8.loss_dice: 0.5151, loss: 7.9535 +2022-05-11 01:10:39,317 - mmseg - INFO - Iter [56950/80000] lr: 4.137e-07, eta: 15:52:37, time: 1.837, data_time: 0.065, memory: 69063, decode.loss_cls: 0.0581, decode.loss_mask: 0.1848, decode.loss_dice: 0.5183, decode.d0.loss_cls: 0.3056, decode.d0.loss_mask: 0.1933, decode.d0.loss_dice: 0.5457, decode.d1.loss_cls: 0.0733, decode.d1.loss_mask: 0.1865, decode.d1.loss_dice: 0.5300, decode.d2.loss_cls: 0.0729, decode.d2.loss_mask: 0.1858, decode.d2.loss_dice: 0.5302, decode.d3.loss_cls: 0.0613, decode.d3.loss_mask: 0.1857, decode.d3.loss_dice: 0.5223, decode.d4.loss_cls: 0.0616, decode.d4.loss_mask: 0.1860, decode.d4.loss_dice: 0.5219, decode.d5.loss_cls: 0.0608, decode.d5.loss_mask: 0.1854, decode.d5.loss_dice: 0.5210, decode.d6.loss_cls: 0.0669, decode.d6.loss_mask: 0.1849, decode.d6.loss_dice: 0.5203, decode.d7.loss_cls: 0.0606, decode.d7.loss_mask: 0.1852, decode.d7.loss_dice: 0.5229, decode.d8.loss_cls: 0.0606, decode.d8.loss_mask: 0.1851, decode.d8.loss_dice: 0.5228, loss: 7.9999 +2022-05-11 01:12:09,038 - mmseg - INFO - Saving checkpoint at 57000 iterations +2022-05-11 01:12:40,915 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 01:12:40,923 - mmseg - INFO - Iter [57000/80000] lr: 4.128e-07, eta: 15:50:27, time: 2.430, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0578, decode.loss_mask: 0.1921, decode.loss_dice: 0.5232, decode.d0.loss_cls: 0.2858, decode.d0.loss_mask: 0.2010, decode.d0.loss_dice: 0.5464, decode.d1.loss_cls: 0.0760, decode.d1.loss_mask: 0.1947, decode.d1.loss_dice: 0.5335, decode.d2.loss_cls: 0.0695, decode.d2.loss_mask: 0.1931, decode.d2.loss_dice: 0.5308, decode.d3.loss_cls: 0.0560, decode.d3.loss_mask: 0.1925, decode.d3.loss_dice: 0.5233, decode.d4.loss_cls: 0.0584, decode.d4.loss_mask: 0.1929, decode.d4.loss_dice: 0.5287, decode.d5.loss_cls: 0.0591, decode.d5.loss_mask: 0.1923, decode.d5.loss_dice: 0.5216, decode.d6.loss_cls: 0.0597, decode.d6.loss_mask: 0.1928, decode.d6.loss_dice: 0.5264, decode.d7.loss_cls: 0.0594, decode.d7.loss_mask: 0.1926, decode.d7.loss_dice: 0.5242, decode.d8.loss_cls: 0.0544, decode.d8.loss_mask: 0.1923, decode.d8.loss_dice: 0.5234, loss: 8.0539 +2022-05-11 01:14:38,222 - mmseg - INFO - per class results: +2022-05-11 01:14:38,233 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.54 | 99.18 | +| sidewalk | 88.34 | 93.85 | +| building | 94.37 | 97.01 | +| wall | 70.57 | 80.91 | +| fence | 74.28 | 81.29 | +| pole | 71.42 | 83.62 | +| traffic light | 77.06 | 87.36 | +| traffic sign | 83.93 | 90.5 | +| vegetation | 93.33 | 96.97 | +| terrain | 67.86 | 76.07 | +| sky | 95.66 | 98.63 | +| person | 86.71 | 94.35 | +| rider | 74.28 | 84.79 | +| car | 96.2 | 98.27 | +| truck | 81.78 | 94.64 | +| bus | 93.47 | 96.62 | +| train | 87.86 | 90.43 | +| motorcycle | 77.85 | 87.24 | +| bicycle | 82.81 | 91.48 | ++---------------+-------+-------+ +2022-05-11 01:14:38,233 - mmseg - INFO - Summary: +2022-05-11 01:14:38,233 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 96.98 | 84.02 | 90.69 | ++-------+-------+-------+ +2022-05-11 01:14:38,236 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 01:14:38,236 - mmseg - INFO - Iter(val) [32] aAcc: 0.9698, mIoU: 0.8402, mAcc: 0.9069, IoU.road: 0.9854, IoU.sidewalk: 0.8834, IoU.building: 0.9437, IoU.wall: 0.7057, IoU.fence: 0.7428, IoU.pole: 0.7142, IoU.traffic light: 0.7706, IoU.traffic sign: 0.8393, IoU.vegetation: 0.9333, IoU.terrain: 0.6786, IoU.sky: 0.9566, IoU.person: 0.8671, IoU.rider: 0.7428, IoU.car: 0.9620, IoU.truck: 0.8178, IoU.bus: 0.9347, IoU.train: 0.8786, IoU.motorcycle: 0.7785, IoU.bicycle: 0.8281, Acc.road: 0.9918, Acc.sidewalk: 0.9385, Acc.building: 0.9701, Acc.wall: 0.8091, Acc.fence: 0.8129, Acc.pole: 0.8362, Acc.traffic light: 0.8736, Acc.traffic sign: 0.9050, Acc.vegetation: 0.9697, Acc.terrain: 0.7607, Acc.sky: 0.9863, Acc.person: 0.9435, Acc.rider: 0.8479, Acc.car: 0.9827, Acc.truck: 0.9464, Acc.bus: 0.9662, Acc.train: 0.9043, Acc.motorcycle: 0.8724, Acc.bicycle: 0.9148 +2022-05-11 01:16:07,619 - mmseg - INFO - Iter [57050/80000] lr: 4.119e-07, eta: 15:51:53, time: 4.136, data_time: 2.365, memory: 69063, decode.loss_cls: 0.0526, decode.loss_mask: 0.1889, decode.loss_dice: 0.5131, decode.d0.loss_cls: 0.2884, decode.d0.loss_mask: 0.1976, decode.d0.loss_dice: 0.5371, decode.d1.loss_cls: 0.0684, decode.d1.loss_mask: 0.1912, decode.d1.loss_dice: 0.5234, decode.d2.loss_cls: 0.0562, decode.d2.loss_mask: 0.1906, decode.d2.loss_dice: 0.5161, decode.d3.loss_cls: 0.0549, decode.d3.loss_mask: 0.1898, decode.d3.loss_dice: 0.5138, decode.d4.loss_cls: 0.0583, decode.d4.loss_mask: 0.1899, decode.d4.loss_dice: 0.5086, decode.d5.loss_cls: 0.0548, decode.d5.loss_mask: 0.1894, decode.d5.loss_dice: 0.5128, decode.d6.loss_cls: 0.0556, decode.d6.loss_mask: 0.1895, decode.d6.loss_dice: 0.5156, decode.d7.loss_cls: 0.0540, decode.d7.loss_mask: 0.1890, decode.d7.loss_dice: 0.5104, decode.d8.loss_cls: 0.0531, decode.d8.loss_mask: 0.1893, decode.d8.loss_dice: 0.5122, loss: 7.8646 +2022-05-11 01:17:37,445 - mmseg - INFO - Iter [57100/80000] lr: 4.110e-07, eta: 15:48:21, time: 1.797, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0532, decode.loss_mask: 0.1836, decode.loss_dice: 0.5224, decode.d0.loss_cls: 0.2955, decode.d0.loss_mask: 0.1895, decode.d0.loss_dice: 0.5442, decode.d1.loss_cls: 0.0734, decode.d1.loss_mask: 0.1854, decode.d1.loss_dice: 0.5273, decode.d2.loss_cls: 0.0744, decode.d2.loss_mask: 0.1842, decode.d2.loss_dice: 0.5243, decode.d3.loss_cls: 0.0593, decode.d3.loss_mask: 0.1835, decode.d3.loss_dice: 0.5196, decode.d4.loss_cls: 0.0626, decode.d4.loss_mask: 0.1835, decode.d4.loss_dice: 0.5257, decode.d5.loss_cls: 0.0600, decode.d5.loss_mask: 0.1835, decode.d5.loss_dice: 0.5200, decode.d6.loss_cls: 0.0583, decode.d6.loss_mask: 0.1835, decode.d6.loss_dice: 0.5204, decode.d7.loss_cls: 0.0575, decode.d7.loss_mask: 0.1833, decode.d7.loss_dice: 0.5174, decode.d8.loss_cls: 0.0570, decode.d8.loss_mask: 0.1833, decode.d8.loss_dice: 0.5188, loss: 7.9346 +2022-05-11 01:19:09,399 - mmseg - INFO - Iter [57150/80000] lr: 4.101e-07, eta: 15:44:56, time: 1.840, data_time: 0.066, memory: 69063, decode.loss_cls: 0.0673, decode.loss_mask: 0.1933, decode.loss_dice: 0.5214, decode.d0.loss_cls: 0.3114, decode.d0.loss_mask: 0.2000, decode.d0.loss_dice: 0.5417, decode.d1.loss_cls: 0.0848, decode.d1.loss_mask: 0.1946, decode.d1.loss_dice: 0.5283, decode.d2.loss_cls: 0.0772, decode.d2.loss_mask: 0.1928, decode.d2.loss_dice: 0.5264, decode.d3.loss_cls: 0.0745, decode.d3.loss_mask: 0.1925, decode.d3.loss_dice: 0.5221, decode.d4.loss_cls: 0.0729, decode.d4.loss_mask: 0.1928, decode.d4.loss_dice: 0.5216, decode.d5.loss_cls: 0.0731, decode.d5.loss_mask: 0.1933, decode.d5.loss_dice: 0.5212, decode.d6.loss_cls: 0.0679, decode.d6.loss_mask: 0.1928, decode.d6.loss_dice: 0.5207, decode.d7.loss_cls: 0.0659, decode.d7.loss_mask: 0.1932, decode.d7.loss_dice: 0.5217, decode.d8.loss_cls: 0.0697, decode.d8.loss_mask: 0.1932, decode.d8.loss_dice: 0.5212, loss: 8.1498 +2022-05-11 01:20:38,993 - mmseg - INFO - Iter [57200/80000] lr: 4.092e-07, eta: 15:41:27, time: 1.792, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0556, decode.loss_mask: 0.1913, decode.loss_dice: 0.5160, decode.d0.loss_cls: 0.3190, decode.d0.loss_mask: 0.1994, decode.d0.loss_dice: 0.5448, decode.d1.loss_cls: 0.0798, decode.d1.loss_mask: 0.1931, decode.d1.loss_dice: 0.5300, decode.d2.loss_cls: 0.0620, decode.d2.loss_mask: 0.1926, decode.d2.loss_dice: 0.5216, decode.d3.loss_cls: 0.0660, decode.d3.loss_mask: 0.1912, decode.d3.loss_dice: 0.5235, decode.d4.loss_cls: 0.0589, decode.d4.loss_mask: 0.1916, decode.d4.loss_dice: 0.5211, decode.d5.loss_cls: 0.0593, decode.d5.loss_mask: 0.1921, decode.d5.loss_dice: 0.5210, decode.d6.loss_cls: 0.0565, decode.d6.loss_mask: 0.1910, decode.d6.loss_dice: 0.5175, decode.d7.loss_cls: 0.0551, decode.d7.loss_mask: 0.1916, decode.d7.loss_dice: 0.5140, decode.d8.loss_cls: 0.0564, decode.d8.loss_mask: 0.1920, decode.d8.loss_dice: 0.5194, loss: 8.0235 +2022-05-11 01:22:08,989 - mmseg - INFO - Iter [57250/80000] lr: 4.083e-07, eta: 15:38:00, time: 1.800, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0499, decode.loss_mask: 0.1905, decode.loss_dice: 0.5078, decode.d0.loss_cls: 0.2879, decode.d0.loss_mask: 0.1971, decode.d0.loss_dice: 0.5292, decode.d1.loss_cls: 0.0612, decode.d1.loss_mask: 0.1921, decode.d1.loss_dice: 0.5130, decode.d2.loss_cls: 0.0549, decode.d2.loss_mask: 0.1914, decode.d2.loss_dice: 0.5160, decode.d3.loss_cls: 0.0577, decode.d3.loss_mask: 0.1914, decode.d3.loss_dice: 0.5109, decode.d4.loss_cls: 0.0523, decode.d4.loss_mask: 0.1915, decode.d4.loss_dice: 0.5085, decode.d5.loss_cls: 0.0539, decode.d5.loss_mask: 0.1911, decode.d5.loss_dice: 0.5110, decode.d6.loss_cls: 0.0556, decode.d6.loss_mask: 0.1910, decode.d6.loss_dice: 0.5123, decode.d7.loss_cls: 0.0545, decode.d7.loss_mask: 0.1911, decode.d7.loss_dice: 0.5083, decode.d8.loss_cls: 0.0481, decode.d8.loss_mask: 0.1908, decode.d8.loss_dice: 0.5067, loss: 7.8176 +2022-05-11 01:23:40,750 - mmseg - INFO - Iter [57300/80000] lr: 4.074e-07, eta: 15:34:38, time: 1.835, data_time: 0.063, memory: 69063, decode.loss_cls: 0.0569, decode.loss_mask: 0.1864, decode.loss_dice: 0.5155, decode.d0.loss_cls: 0.3030, decode.d0.loss_mask: 0.1918, decode.d0.loss_dice: 0.5414, decode.d1.loss_cls: 0.0790, decode.d1.loss_mask: 0.1873, decode.d1.loss_dice: 0.5217, decode.d2.loss_cls: 0.0637, decode.d2.loss_mask: 0.1867, decode.d2.loss_dice: 0.5231, decode.d3.loss_cls: 0.0601, decode.d3.loss_mask: 0.1870, decode.d3.loss_dice: 0.5176, decode.d4.loss_cls: 0.0587, decode.d4.loss_mask: 0.1869, decode.d4.loss_dice: 0.5167, decode.d5.loss_cls: 0.0598, decode.d5.loss_mask: 0.1865, decode.d5.loss_dice: 0.5126, decode.d6.loss_cls: 0.0579, decode.d6.loss_mask: 0.1873, decode.d6.loss_dice: 0.5174, decode.d7.loss_cls: 0.0533, decode.d7.loss_mask: 0.1865, decode.d7.loss_dice: 0.5122, decode.d8.loss_cls: 0.0579, decode.d8.loss_mask: 0.1862, decode.d8.loss_dice: 0.5126, loss: 7.9137 +2022-05-11 01:25:10,726 - mmseg - INFO - Iter [57350/80000] lr: 4.065e-07, eta: 15:31:13, time: 1.800, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0528, decode.loss_mask: 0.1860, decode.loss_dice: 0.5088, decode.d0.loss_cls: 0.2921, decode.d0.loss_mask: 0.1925, decode.d0.loss_dice: 0.5339, decode.d1.loss_cls: 0.0707, decode.d1.loss_mask: 0.1875, decode.d1.loss_dice: 0.5158, decode.d2.loss_cls: 0.0584, decode.d2.loss_mask: 0.1870, decode.d2.loss_dice: 0.5144, decode.d3.loss_cls: 0.0592, decode.d3.loss_mask: 0.1863, decode.d3.loss_dice: 0.5109, decode.d4.loss_cls: 0.0583, decode.d4.loss_mask: 0.1862, decode.d4.loss_dice: 0.5097, decode.d5.loss_cls: 0.0599, decode.d5.loss_mask: 0.1864, decode.d5.loss_dice: 0.5100, decode.d6.loss_cls: 0.0482, decode.d6.loss_mask: 0.1864, decode.d6.loss_dice: 0.5124, decode.d7.loss_cls: 0.0508, decode.d7.loss_mask: 0.1857, decode.d7.loss_dice: 0.5089, decode.d8.loss_cls: 0.0498, decode.d8.loss_mask: 0.1859, decode.d8.loss_dice: 0.5079, loss: 7.8027 +2022-05-11 01:26:40,206 - mmseg - INFO - Iter [57400/80000] lr: 4.056e-07, eta: 15:27:48, time: 1.790, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0630, decode.loss_mask: 0.1946, decode.loss_dice: 0.5262, decode.d0.loss_cls: 0.3066, decode.d0.loss_mask: 0.2031, decode.d0.loss_dice: 0.5553, decode.d1.loss_cls: 0.0731, decode.d1.loss_mask: 0.1957, decode.d1.loss_dice: 0.5349, decode.d2.loss_cls: 0.0715, decode.d2.loss_mask: 0.1954, decode.d2.loss_dice: 0.5288, decode.d3.loss_cls: 0.0647, decode.d3.loss_mask: 0.1946, decode.d3.loss_dice: 0.5282, decode.d4.loss_cls: 0.0568, decode.d4.loss_mask: 0.1945, decode.d4.loss_dice: 0.5265, decode.d5.loss_cls: 0.0671, decode.d5.loss_mask: 0.1950, decode.d5.loss_dice: 0.5274, decode.d6.loss_cls: 0.0605, decode.d6.loss_mask: 0.1941, decode.d6.loss_dice: 0.5260, decode.d7.loss_cls: 0.0599, decode.d7.loss_mask: 0.1943, decode.d7.loss_dice: 0.5260, decode.d8.loss_cls: 0.0589, decode.d8.loss_mask: 0.1943, decode.d8.loss_dice: 0.5263, loss: 8.1433 +2022-05-11 01:28:11,774 - mmseg - INFO - Iter [57450/80000] lr: 4.047e-07, eta: 15:24:30, time: 1.831, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0519, decode.loss_mask: 0.1902, decode.loss_dice: 0.5085, decode.d0.loss_cls: 0.2973, decode.d0.loss_mask: 0.1963, decode.d0.loss_dice: 0.5364, decode.d1.loss_cls: 0.0722, decode.d1.loss_mask: 0.1907, decode.d1.loss_dice: 0.5145, decode.d2.loss_cls: 0.0589, decode.d2.loss_mask: 0.1907, decode.d2.loss_dice: 0.5141, decode.d3.loss_cls: 0.0535, decode.d3.loss_mask: 0.1906, decode.d3.loss_dice: 0.5084, decode.d4.loss_cls: 0.0534, decode.d4.loss_mask: 0.1898, decode.d4.loss_dice: 0.5100, decode.d5.loss_cls: 0.0591, decode.d5.loss_mask: 0.1901, decode.d5.loss_dice: 0.5098, decode.d6.loss_cls: 0.0541, decode.d6.loss_mask: 0.1899, decode.d6.loss_dice: 0.5060, decode.d7.loss_cls: 0.0495, decode.d7.loss_mask: 0.1901, decode.d7.loss_dice: 0.5091, decode.d8.loss_cls: 0.0517, decode.d8.loss_mask: 0.1904, decode.d8.loss_dice: 0.5071, loss: 7.8342 +2022-05-11 01:29:45,251 - mmseg - INFO - Iter [57500/80000] lr: 4.038e-07, eta: 15:21:17, time: 1.870, data_time: 0.065, memory: 69063, decode.loss_cls: 0.0428, decode.loss_mask: 0.1881, decode.loss_dice: 0.5144, decode.d0.loss_cls: 0.2910, decode.d0.loss_mask: 0.1946, decode.d0.loss_dice: 0.5330, decode.d1.loss_cls: 0.0642, decode.d1.loss_mask: 0.1894, decode.d1.loss_dice: 0.5157, decode.d2.loss_cls: 0.0605, decode.d2.loss_mask: 0.1887, decode.d2.loss_dice: 0.5111, decode.d3.loss_cls: 0.0533, decode.d3.loss_mask: 0.1889, decode.d3.loss_dice: 0.5093, decode.d4.loss_cls: 0.0511, decode.d4.loss_mask: 0.1885, decode.d4.loss_dice: 0.5127, decode.d5.loss_cls: 0.0486, decode.d5.loss_mask: 0.1880, decode.d5.loss_dice: 0.5091, decode.d6.loss_cls: 0.0474, decode.d6.loss_mask: 0.1882, decode.d6.loss_dice: 0.5069, decode.d7.loss_cls: 0.0444, decode.d7.loss_mask: 0.1881, decode.d7.loss_dice: 0.5088, decode.d8.loss_cls: 0.0462, decode.d8.loss_mask: 0.1882, decode.d8.loss_dice: 0.5094, loss: 7.7708 +2022-05-11 01:31:13,510 - mmseg - INFO - Iter [57550/80000] lr: 4.029e-07, eta: 15:17:53, time: 1.765, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0570, decode.loss_mask: 0.1820, decode.loss_dice: 0.5144, decode.d0.loss_cls: 0.3054, decode.d0.loss_mask: 0.1903, decode.d0.loss_dice: 0.5420, decode.d1.loss_cls: 0.0758, decode.d1.loss_mask: 0.1832, decode.d1.loss_dice: 0.5209, decode.d2.loss_cls: 0.0632, decode.d2.loss_mask: 0.1829, decode.d2.loss_dice: 0.5215, decode.d3.loss_cls: 0.0705, decode.d3.loss_mask: 0.1821, decode.d3.loss_dice: 0.5216, decode.d4.loss_cls: 0.0654, decode.d4.loss_mask: 0.1818, decode.d4.loss_dice: 0.5159, decode.d5.loss_cls: 0.0641, decode.d5.loss_mask: 0.1824, decode.d5.loss_dice: 0.5156, decode.d6.loss_cls: 0.0586, decode.d6.loss_mask: 0.1821, decode.d6.loss_dice: 0.5170, decode.d7.loss_cls: 0.0634, decode.d7.loss_mask: 0.1818, decode.d7.loss_dice: 0.5160, decode.d8.loss_cls: 0.0639, decode.d8.loss_mask: 0.1821, decode.d8.loss_dice: 0.5124, loss: 7.9154 +2022-05-11 01:32:43,600 - mmseg - INFO - Iter [57600/80000] lr: 4.020e-07, eta: 15:14:34, time: 1.802, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0596, decode.loss_mask: 0.1842, decode.loss_dice: 0.5115, decode.d0.loss_cls: 0.2986, decode.d0.loss_mask: 0.1904, decode.d0.loss_dice: 0.5431, decode.d1.loss_cls: 0.0832, decode.d1.loss_mask: 0.1855, decode.d1.loss_dice: 0.5210, decode.d2.loss_cls: 0.0727, decode.d2.loss_mask: 0.1846, decode.d2.loss_dice: 0.5163, decode.d3.loss_cls: 0.0692, decode.d3.loss_mask: 0.1846, decode.d3.loss_dice: 0.5185, decode.d4.loss_cls: 0.0656, decode.d4.loss_mask: 0.1842, decode.d4.loss_dice: 0.5148, decode.d5.loss_cls: 0.0660, decode.d5.loss_mask: 0.1850, decode.d5.loss_dice: 0.5159, decode.d6.loss_cls: 0.0626, decode.d6.loss_mask: 0.1846, decode.d6.loss_dice: 0.5145, decode.d7.loss_cls: 0.0655, decode.d7.loss_mask: 0.1846, decode.d7.loss_dice: 0.5137, decode.d8.loss_cls: 0.0593, decode.d8.loss_mask: 0.1841, decode.d8.loss_dice: 0.5118, loss: 7.9354 +2022-05-11 01:34:12,757 - mmseg - INFO - Iter [57650/80000] lr: 4.011e-07, eta: 15:11:15, time: 1.783, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0560, decode.loss_mask: 0.1926, decode.loss_dice: 0.4969, decode.d0.loss_cls: 0.3028, decode.d0.loss_mask: 0.2005, decode.d0.loss_dice: 0.5218, decode.d1.loss_cls: 0.0704, decode.d1.loss_mask: 0.1932, decode.d1.loss_dice: 0.5111, decode.d2.loss_cls: 0.0679, decode.d2.loss_mask: 0.1925, decode.d2.loss_dice: 0.5045, decode.d3.loss_cls: 0.0632, decode.d3.loss_mask: 0.1923, decode.d3.loss_dice: 0.5024, decode.d4.loss_cls: 0.0651, decode.d4.loss_mask: 0.1921, decode.d4.loss_dice: 0.5025, decode.d5.loss_cls: 0.0606, decode.d5.loss_mask: 0.1920, decode.d5.loss_dice: 0.5004, decode.d6.loss_cls: 0.0617, decode.d6.loss_mask: 0.1927, decode.d6.loss_dice: 0.4980, decode.d7.loss_cls: 0.0666, decode.d7.loss_mask: 0.1927, decode.d7.loss_dice: 0.5033, decode.d8.loss_cls: 0.0582, decode.d8.loss_mask: 0.1921, decode.d8.loss_dice: 0.5010, loss: 7.8473 +2022-05-11 01:35:45,439 - mmseg - INFO - Iter [57700/80000] lr: 4.002e-07, eta: 15:08:04, time: 1.854, data_time: 0.068, memory: 69063, decode.loss_cls: 0.0552, decode.loss_mask: 0.1897, decode.loss_dice: 0.5167, decode.d0.loss_cls: 0.3060, decode.d0.loss_mask: 0.1970, decode.d0.loss_dice: 0.5420, decode.d1.loss_cls: 0.0775, decode.d1.loss_mask: 0.1904, decode.d1.loss_dice: 0.5242, decode.d2.loss_cls: 0.0700, decode.d2.loss_mask: 0.1900, decode.d2.loss_dice: 0.5227, decode.d3.loss_cls: 0.0639, decode.d3.loss_mask: 0.1897, decode.d3.loss_dice: 0.5168, decode.d4.loss_cls: 0.0666, decode.d4.loss_mask: 0.1901, decode.d4.loss_dice: 0.5188, decode.d5.loss_cls: 0.0604, decode.d5.loss_mask: 0.1896, decode.d5.loss_dice: 0.5184, decode.d6.loss_cls: 0.0593, decode.d6.loss_mask: 0.1897, decode.d6.loss_dice: 0.5168, decode.d7.loss_cls: 0.0596, decode.d7.loss_mask: 0.1897, decode.d7.loss_dice: 0.5142, decode.d8.loss_cls: 0.0630, decode.d8.loss_mask: 0.1893, decode.d8.loss_dice: 0.5173, loss: 7.9948 +2022-05-11 01:37:16,151 - mmseg - INFO - Iter [57750/80000] lr: 3.993e-07, eta: 15:04:50, time: 1.814, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0732, decode.loss_mask: 0.1897, decode.loss_dice: 0.5069, decode.d0.loss_cls: 0.3062, decode.d0.loss_mask: 0.1986, decode.d0.loss_dice: 0.5324, decode.d1.loss_cls: 0.0863, decode.d1.loss_mask: 0.1916, decode.d1.loss_dice: 0.5182, decode.d2.loss_cls: 0.0765, decode.d2.loss_mask: 0.1911, decode.d2.loss_dice: 0.5113, decode.d3.loss_cls: 0.0707, decode.d3.loss_mask: 0.1902, decode.d3.loss_dice: 0.5105, decode.d4.loss_cls: 0.0719, decode.d4.loss_mask: 0.1900, decode.d4.loss_dice: 0.5100, decode.d5.loss_cls: 0.0754, decode.d5.loss_mask: 0.1900, decode.d5.loss_dice: 0.5097, decode.d6.loss_cls: 0.0696, decode.d6.loss_mask: 0.1891, decode.d6.loss_dice: 0.5050, decode.d7.loss_cls: 0.0723, decode.d7.loss_mask: 0.1897, decode.d7.loss_dice: 0.5073, decode.d8.loss_cls: 0.0679, decode.d8.loss_mask: 0.1895, decode.d8.loss_dice: 0.5073, loss: 7.9978 +2022-05-11 01:38:44,874 - mmseg - INFO - Iter [57800/80000] lr: 3.985e-07, eta: 15:01:33, time: 1.774, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0528, decode.loss_mask: 0.1868, decode.loss_dice: 0.5176, decode.d0.loss_cls: 0.2872, decode.d0.loss_mask: 0.1927, decode.d0.loss_dice: 0.5412, decode.d1.loss_cls: 0.0661, decode.d1.loss_mask: 0.1878, decode.d1.loss_dice: 0.5245, decode.d2.loss_cls: 0.0607, decode.d2.loss_mask: 0.1868, decode.d2.loss_dice: 0.5209, decode.d3.loss_cls: 0.0603, decode.d3.loss_mask: 0.1867, decode.d3.loss_dice: 0.5214, decode.d4.loss_cls: 0.0565, decode.d4.loss_mask: 0.1864, decode.d4.loss_dice: 0.5200, decode.d5.loss_cls: 0.0620, decode.d5.loss_mask: 0.1865, decode.d5.loss_dice: 0.5215, decode.d6.loss_cls: 0.0525, decode.d6.loss_mask: 0.1860, decode.d6.loss_dice: 0.5201, decode.d7.loss_cls: 0.0529, decode.d7.loss_mask: 0.1867, decode.d7.loss_dice: 0.5216, decode.d8.loss_cls: 0.0554, decode.d8.loss_mask: 0.1865, decode.d8.loss_dice: 0.5187, loss: 7.9067 +2022-05-11 01:40:16,176 - mmseg - INFO - Iter [57850/80000] lr: 3.976e-07, eta: 14:58:22, time: 1.826, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0590, decode.loss_mask: 0.1921, decode.loss_dice: 0.5054, decode.d0.loss_cls: 0.2980, decode.d0.loss_mask: 0.1995, decode.d0.loss_dice: 0.5299, decode.d1.loss_cls: 0.0723, decode.d1.loss_mask: 0.1936, decode.d1.loss_dice: 0.5123, decode.d2.loss_cls: 0.0651, decode.d2.loss_mask: 0.1926, decode.d2.loss_dice: 0.5146, decode.d3.loss_cls: 0.0634, decode.d3.loss_mask: 0.1925, decode.d3.loss_dice: 0.5091, decode.d4.loss_cls: 0.0643, decode.d4.loss_mask: 0.1922, decode.d4.loss_dice: 0.5104, decode.d5.loss_cls: 0.0580, decode.d5.loss_mask: 0.1922, decode.d5.loss_dice: 0.5065, decode.d6.loss_cls: 0.0552, decode.d6.loss_mask: 0.1923, decode.d6.loss_dice: 0.5099, decode.d7.loss_cls: 0.0573, decode.d7.loss_mask: 0.1927, decode.d7.loss_dice: 0.5071, decode.d8.loss_cls: 0.0559, decode.d8.loss_mask: 0.1925, decode.d8.loss_dice: 0.5077, loss: 7.8937 +2022-05-11 01:41:48,556 - mmseg - INFO - Iter [57900/80000] lr: 3.967e-07, eta: 14:55:15, time: 1.847, data_time: 0.064, memory: 69063, decode.loss_cls: 0.0642, decode.loss_mask: 0.1839, decode.loss_dice: 0.5156, decode.d0.loss_cls: 0.2997, decode.d0.loss_mask: 0.1909, decode.d0.loss_dice: 0.5454, decode.d1.loss_cls: 0.0780, decode.d1.loss_mask: 0.1853, decode.d1.loss_dice: 0.5265, decode.d2.loss_cls: 0.0776, decode.d2.loss_mask: 0.1850, decode.d2.loss_dice: 0.5239, decode.d3.loss_cls: 0.0700, decode.d3.loss_mask: 0.1844, decode.d3.loss_dice: 0.5245, decode.d4.loss_cls: 0.0714, decode.d4.loss_mask: 0.1837, decode.d4.loss_dice: 0.5174, decode.d5.loss_cls: 0.0700, decode.d5.loss_mask: 0.1846, decode.d5.loss_dice: 0.5168, decode.d6.loss_cls: 0.0666, decode.d6.loss_mask: 0.1840, decode.d6.loss_dice: 0.5161, decode.d7.loss_cls: 0.0637, decode.d7.loss_mask: 0.1841, decode.d7.loss_dice: 0.5180, decode.d8.loss_cls: 0.0634, decode.d8.loss_mask: 0.1839, decode.d8.loss_dice: 0.5194, loss: 7.9980 +2022-05-11 01:43:18,198 - mmseg - INFO - Iter [57950/80000] lr: 3.958e-07, eta: 14:52:03, time: 1.790, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0529, decode.loss_mask: 0.1908, decode.loss_dice: 0.5100, decode.d0.loss_cls: 0.2967, decode.d0.loss_mask: 0.1985, decode.d0.loss_dice: 0.5283, decode.d1.loss_cls: 0.0702, decode.d1.loss_mask: 0.1916, decode.d1.loss_dice: 0.5173, decode.d2.loss_cls: 0.0631, decode.d2.loss_mask: 0.1914, decode.d2.loss_dice: 0.5139, decode.d3.loss_cls: 0.0588, decode.d3.loss_mask: 0.1908, decode.d3.loss_dice: 0.5132, decode.d4.loss_cls: 0.0560, decode.d4.loss_mask: 0.1908, decode.d4.loss_dice: 0.5136, decode.d5.loss_cls: 0.0596, decode.d5.loss_mask: 0.1908, decode.d5.loss_dice: 0.5119, decode.d6.loss_cls: 0.0576, decode.d6.loss_mask: 0.1906, decode.d6.loss_dice: 0.5111, decode.d7.loss_cls: 0.0544, decode.d7.loss_mask: 0.1907, decode.d7.loss_dice: 0.5106, decode.d8.loss_cls: 0.0587, decode.d8.loss_mask: 0.1904, decode.d8.loss_dice: 0.5097, loss: 7.8841 +2022-05-11 01:44:46,951 - mmseg - INFO - Saving checkpoint at 58000 iterations +2022-05-11 01:45:18,483 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 01:45:18,491 - mmseg - INFO - Iter [58000/80000] lr: 3.949e-07, eta: 14:49:59, time: 2.407, data_time: 0.021, memory: 69063, decode.loss_cls: 0.0545, decode.loss_mask: 0.1879, decode.loss_dice: 0.5093, decode.d0.loss_cls: 0.3012, decode.d0.loss_mask: 0.1949, decode.d0.loss_dice: 0.5350, decode.d1.loss_cls: 0.0706, decode.d1.loss_mask: 0.1897, decode.d1.loss_dice: 0.5133, decode.d2.loss_cls: 0.0620, decode.d2.loss_mask: 0.1887, decode.d2.loss_dice: 0.5122, decode.d3.loss_cls: 0.0605, decode.d3.loss_mask: 0.1883, decode.d3.loss_dice: 0.5140, decode.d4.loss_cls: 0.0563, decode.d4.loss_mask: 0.1880, decode.d4.loss_dice: 0.5094, decode.d5.loss_cls: 0.0560, decode.d5.loss_mask: 0.1882, decode.d5.loss_dice: 0.5094, decode.d6.loss_cls: 0.0548, decode.d6.loss_mask: 0.1877, decode.d6.loss_dice: 0.5146, decode.d7.loss_cls: 0.0505, decode.d7.loss_mask: 0.1878, decode.d7.loss_dice: 0.5092, decode.d8.loss_cls: 0.0553, decode.d8.loss_mask: 0.1880, decode.d8.loss_dice: 0.5076, loss: 7.8451 +2022-05-11 01:47:14,303 - mmseg - INFO - per class results: +2022-05-11 01:47:14,310 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.6 | 99.23 | +| sidewalk | 88.31 | 93.9 | +| building | 94.35 | 96.94 | +| wall | 69.76 | 80.59 | +| fence | 74.05 | 81.86 | +| pole | 70.93 | 83.45 | +| traffic light | 77.09 | 88.23 | +| traffic sign | 84.01 | 90.87 | +| vegetation | 93.32 | 97.06 | +| terrain | 65.28 | 72.38 | +| sky | 95.74 | 98.51 | +| person | 86.78 | 93.67 | +| rider | 74.48 | 85.2 | +| car | 96.17 | 98.25 | +| truck | 81.57 | 94.38 | +| bus | 93.68 | 96.57 | +| train | 87.97 | 91.06 | +| motorcycle | 77.73 | 86.63 | +| bicycle | 82.88 | 91.82 | ++---------------+-------+-------+ +2022-05-11 01:47:14,310 - mmseg - INFO - Summary: +2022-05-11 01:47:14,311 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 96.96 | 83.83 | 90.56 | ++-------+-------+-------+ +2022-05-11 01:47:14,314 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 01:47:14,314 - mmseg - INFO - Iter(val) [32] aAcc: 0.9696, mIoU: 0.8383, mAcc: 0.9056, IoU.road: 0.9860, IoU.sidewalk: 0.8831, IoU.building: 0.9435, IoU.wall: 0.6976, IoU.fence: 0.7405, IoU.pole: 0.7093, IoU.traffic light: 0.7709, IoU.traffic sign: 0.8401, IoU.vegetation: 0.9332, IoU.terrain: 0.6528, IoU.sky: 0.9574, IoU.person: 0.8678, IoU.rider: 0.7448, IoU.car: 0.9617, IoU.truck: 0.8157, IoU.bus: 0.9368, IoU.train: 0.8797, IoU.motorcycle: 0.7773, IoU.bicycle: 0.8288, Acc.road: 0.9923, Acc.sidewalk: 0.9390, Acc.building: 0.9694, Acc.wall: 0.8059, Acc.fence: 0.8186, Acc.pole: 0.8345, Acc.traffic light: 0.8823, Acc.traffic sign: 0.9087, Acc.vegetation: 0.9706, Acc.terrain: 0.7238, Acc.sky: 0.9851, Acc.person: 0.9367, Acc.rider: 0.8520, Acc.car: 0.9825, Acc.truck: 0.9438, Acc.bus: 0.9657, Acc.train: 0.9106, Acc.motorcycle: 0.8663, Acc.bicycle: 0.9182 +2022-05-11 01:48:48,345 - mmseg - INFO - Iter [58050/80000] lr: 3.940e-07, eta: 14:51:11, time: 4.199, data_time: 2.385, memory: 69063, decode.loss_cls: 0.0478, decode.loss_mask: 0.1857, decode.loss_dice: 0.5102, decode.d0.loss_cls: 0.2854, decode.d0.loss_mask: 0.1931, decode.d0.loss_dice: 0.5313, decode.d1.loss_cls: 0.0731, decode.d1.loss_mask: 0.1866, decode.d1.loss_dice: 0.5170, decode.d2.loss_cls: 0.0567, decode.d2.loss_mask: 0.1863, decode.d2.loss_dice: 0.5129, decode.d3.loss_cls: 0.0520, decode.d3.loss_mask: 0.1858, decode.d3.loss_dice: 0.5105, decode.d4.loss_cls: 0.0489, decode.d4.loss_mask: 0.1861, decode.d4.loss_dice: 0.5111, decode.d5.loss_cls: 0.0542, decode.d5.loss_mask: 0.1860, decode.d5.loss_dice: 0.5105, decode.d6.loss_cls: 0.0550, decode.d6.loss_mask: 0.1859, decode.d6.loss_dice: 0.5120, decode.d7.loss_cls: 0.0572, decode.d7.loss_mask: 0.1856, decode.d7.loss_dice: 0.5115, decode.d8.loss_cls: 0.0471, decode.d8.loss_mask: 0.1856, decode.d8.loss_dice: 0.5102, loss: 7.7813 +2022-05-11 01:50:19,360 - mmseg - INFO - Iter [58100/80000] lr: 3.931e-07, eta: 14:48:03, time: 1.820, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0476, decode.loss_mask: 0.1851, decode.loss_dice: 0.5129, decode.d0.loss_cls: 0.2884, decode.d0.loss_mask: 0.1924, decode.d0.loss_dice: 0.5321, decode.d1.loss_cls: 0.0609, decode.d1.loss_mask: 0.1862, decode.d1.loss_dice: 0.5197, decode.d2.loss_cls: 0.0513, decode.d2.loss_mask: 0.1852, decode.d2.loss_dice: 0.5130, decode.d3.loss_cls: 0.0550, decode.d3.loss_mask: 0.1851, decode.d3.loss_dice: 0.5136, decode.d4.loss_cls: 0.0521, decode.d4.loss_mask: 0.1850, decode.d4.loss_dice: 0.5110, decode.d5.loss_cls: 0.0490, decode.d5.loss_mask: 0.1852, decode.d5.loss_dice: 0.5144, decode.d6.loss_cls: 0.0488, decode.d6.loss_mask: 0.1847, decode.d6.loss_dice: 0.5117, decode.d7.loss_cls: 0.0524, decode.d7.loss_mask: 0.1847, decode.d7.loss_dice: 0.5122, decode.d8.loss_cls: 0.0462, decode.d8.loss_mask: 0.1848, decode.d8.loss_dice: 0.5107, loss: 7.7615 +2022-05-11 01:51:49,507 - mmseg - INFO - Iter [58150/80000] lr: 3.922e-07, eta: 14:44:53, time: 1.804, data_time: 0.021, memory: 69063, decode.loss_cls: 0.0680, decode.loss_mask: 0.1920, decode.loss_dice: 0.5245, decode.d0.loss_cls: 0.2908, decode.d0.loss_mask: 0.2003, decode.d0.loss_dice: 0.5536, decode.d1.loss_cls: 0.0807, decode.d1.loss_mask: 0.1937, decode.d1.loss_dice: 0.5284, decode.d2.loss_cls: 0.0712, decode.d2.loss_mask: 0.1933, decode.d2.loss_dice: 0.5285, decode.d3.loss_cls: 0.0619, decode.d3.loss_mask: 0.1926, decode.d3.loss_dice: 0.5243, decode.d4.loss_cls: 0.0634, decode.d4.loss_mask: 0.1927, decode.d4.loss_dice: 0.5220, decode.d5.loss_cls: 0.0605, decode.d5.loss_mask: 0.1922, decode.d5.loss_dice: 0.5243, decode.d6.loss_cls: 0.0617, decode.d6.loss_mask: 0.1921, decode.d6.loss_dice: 0.5242, decode.d7.loss_cls: 0.0638, decode.d7.loss_mask: 0.1920, decode.d7.loss_dice: 0.5222, decode.d8.loss_cls: 0.0687, decode.d8.loss_mask: 0.1921, decode.d8.loss_dice: 0.5256, loss: 8.1015 +2022-05-11 01:53:20,100 - mmseg - INFO - Iter [58200/80000] lr: 3.913e-07, eta: 14:41:46, time: 1.812, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0647, decode.loss_mask: 0.1872, decode.loss_dice: 0.5116, decode.d0.loss_cls: 0.3036, decode.d0.loss_mask: 0.1955, decode.d0.loss_dice: 0.5449, decode.d1.loss_cls: 0.0827, decode.d1.loss_mask: 0.1891, decode.d1.loss_dice: 0.5251, decode.d2.loss_cls: 0.0767, decode.d2.loss_mask: 0.1879, decode.d2.loss_dice: 0.5174, decode.d3.loss_cls: 0.0674, decode.d3.loss_mask: 0.1878, decode.d3.loss_dice: 0.5118, decode.d4.loss_cls: 0.0655, decode.d4.loss_mask: 0.1875, decode.d4.loss_dice: 0.5131, decode.d5.loss_cls: 0.0624, decode.d5.loss_mask: 0.1876, decode.d5.loss_dice: 0.5119, decode.d6.loss_cls: 0.0655, decode.d6.loss_mask: 0.1874, decode.d6.loss_dice: 0.5153, decode.d7.loss_cls: 0.0688, decode.d7.loss_mask: 0.1875, decode.d7.loss_dice: 0.5135, decode.d8.loss_cls: 0.0636, decode.d8.loss_mask: 0.1877, decode.d8.loss_dice: 0.5158, loss: 7.9864 +2022-05-11 01:54:53,021 - mmseg - INFO - Iter [58250/80000] lr: 3.904e-07, eta: 14:38:44, time: 1.859, data_time: 0.066, memory: 69063, decode.loss_cls: 0.0562, decode.loss_mask: 0.1882, decode.loss_dice: 0.5077, decode.d0.loss_cls: 0.2933, decode.d0.loss_mask: 0.1956, decode.d0.loss_dice: 0.5352, decode.d1.loss_cls: 0.0666, decode.d1.loss_mask: 0.1901, decode.d1.loss_dice: 0.5175, decode.d2.loss_cls: 0.0626, decode.d2.loss_mask: 0.1887, decode.d2.loss_dice: 0.5101, decode.d3.loss_cls: 0.0580, decode.d3.loss_mask: 0.1888, decode.d3.loss_dice: 0.5091, decode.d4.loss_cls: 0.0560, decode.d4.loss_mask: 0.1888, decode.d4.loss_dice: 0.5094, decode.d5.loss_cls: 0.0597, decode.d5.loss_mask: 0.1885, decode.d5.loss_dice: 0.5129, decode.d6.loss_cls: 0.0554, decode.d6.loss_mask: 0.1877, decode.d6.loss_dice: 0.5104, decode.d7.loss_cls: 0.0577, decode.d7.loss_mask: 0.1885, decode.d7.loss_dice: 0.5073, decode.d8.loss_cls: 0.0536, decode.d8.loss_mask: 0.1882, decode.d8.loss_dice: 0.5077, loss: 7.8394 +2022-05-11 01:56:23,178 - mmseg - INFO - Iter [58300/80000] lr: 3.895e-07, eta: 14:35:37, time: 1.803, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0560, decode.loss_mask: 0.1834, decode.loss_dice: 0.5217, decode.d0.loss_cls: 0.2935, decode.d0.loss_mask: 0.1908, decode.d0.loss_dice: 0.5466, decode.d1.loss_cls: 0.0684, decode.d1.loss_mask: 0.1854, decode.d1.loss_dice: 0.5296, decode.d2.loss_cls: 0.0597, decode.d2.loss_mask: 0.1844, decode.d2.loss_dice: 0.5264, decode.d3.loss_cls: 0.0575, decode.d3.loss_mask: 0.1839, decode.d3.loss_dice: 0.5212, decode.d4.loss_cls: 0.0578, decode.d4.loss_mask: 0.1836, decode.d4.loss_dice: 0.5234, decode.d5.loss_cls: 0.0598, decode.d5.loss_mask: 0.1841, decode.d5.loss_dice: 0.5242, decode.d6.loss_cls: 0.0568, decode.d6.loss_mask: 0.1839, decode.d6.loss_dice: 0.5210, decode.d7.loss_cls: 0.0592, decode.d7.loss_mask: 0.1840, decode.d7.loss_dice: 0.5249, decode.d8.loss_cls: 0.0542, decode.d8.loss_mask: 0.1837, decode.d8.loss_dice: 0.5209, loss: 7.9300 +2022-05-11 01:57:53,109 - mmseg - INFO - Iter [58350/80000] lr: 3.886e-07, eta: 14:32:31, time: 1.799, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0548, decode.loss_mask: 0.1847, decode.loss_dice: 0.5139, decode.d0.loss_cls: 0.2980, decode.d0.loss_mask: 0.1918, decode.d0.loss_dice: 0.5460, decode.d1.loss_cls: 0.0608, decode.d1.loss_mask: 0.1866, decode.d1.loss_dice: 0.5273, decode.d2.loss_cls: 0.0570, decode.d2.loss_mask: 0.1861, decode.d2.loss_dice: 0.5243, decode.d3.loss_cls: 0.0548, decode.d3.loss_mask: 0.1858, decode.d3.loss_dice: 0.5176, decode.d4.loss_cls: 0.0552, decode.d4.loss_mask: 0.1859, decode.d4.loss_dice: 0.5175, decode.d5.loss_cls: 0.0514, decode.d5.loss_mask: 0.1863, decode.d5.loss_dice: 0.5152, decode.d6.loss_cls: 0.0512, decode.d6.loss_mask: 0.1852, decode.d6.loss_dice: 0.5159, decode.d7.loss_cls: 0.0533, decode.d7.loss_mask: 0.1853, decode.d7.loss_dice: 0.5182, decode.d8.loss_cls: 0.0522, decode.d8.loss_mask: 0.1853, decode.d8.loss_dice: 0.5163, loss: 7.8641 +2022-05-11 01:59:22,346 - mmseg - INFO - Iter [58400/80000] lr: 3.877e-07, eta: 14:29:25, time: 1.785, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0520, decode.loss_mask: 0.1900, decode.loss_dice: 0.5124, decode.d0.loss_cls: 0.2895, decode.d0.loss_mask: 0.1974, decode.d0.loss_dice: 0.5376, decode.d1.loss_cls: 0.0696, decode.d1.loss_mask: 0.1920, decode.d1.loss_dice: 0.5230, decode.d2.loss_cls: 0.0627, decode.d2.loss_mask: 0.1917, decode.d2.loss_dice: 0.5198, decode.d3.loss_cls: 0.0580, decode.d3.loss_mask: 0.1916, decode.d3.loss_dice: 0.5147, decode.d4.loss_cls: 0.0605, decode.d4.loss_mask: 0.1902, decode.d4.loss_dice: 0.5127, decode.d5.loss_cls: 0.0564, decode.d5.loss_mask: 0.1896, decode.d5.loss_dice: 0.5179, decode.d6.loss_cls: 0.0570, decode.d6.loss_mask: 0.1895, decode.d6.loss_dice: 0.5129, decode.d7.loss_cls: 0.0566, decode.d7.loss_mask: 0.1901, decode.d7.loss_dice: 0.5160, decode.d8.loss_cls: 0.0525, decode.d8.loss_mask: 0.1898, decode.d8.loss_dice: 0.5133, loss: 7.9072 +2022-05-11 02:00:56,997 - mmseg - INFO - Iter [58450/80000] lr: 3.868e-07, eta: 14:26:30, time: 1.893, data_time: 0.063, memory: 69063, decode.loss_cls: 0.0540, decode.loss_mask: 0.1900, decode.loss_dice: 0.5040, decode.d0.loss_cls: 0.2971, decode.d0.loss_mask: 0.1978, decode.d0.loss_dice: 0.5215, decode.d1.loss_cls: 0.0779, decode.d1.loss_mask: 0.1907, decode.d1.loss_dice: 0.5087, decode.d2.loss_cls: 0.0640, decode.d2.loss_mask: 0.1897, decode.d2.loss_dice: 0.5064, decode.d3.loss_cls: 0.0599, decode.d3.loss_mask: 0.1896, decode.d3.loss_dice: 0.5036, decode.d4.loss_cls: 0.0601, decode.d4.loss_mask: 0.1893, decode.d4.loss_dice: 0.5040, decode.d5.loss_cls: 0.0637, decode.d5.loss_mask: 0.1893, decode.d5.loss_dice: 0.5052, decode.d6.loss_cls: 0.0637, decode.d6.loss_mask: 0.1897, decode.d6.loss_dice: 0.4999, decode.d7.loss_cls: 0.0621, decode.d7.loss_mask: 0.1901, decode.d7.loss_dice: 0.5042, decode.d8.loss_cls: 0.0549, decode.d8.loss_mask: 0.1900, decode.d8.loss_dice: 0.5002, loss: 7.8211 +2022-05-11 02:02:26,229 - mmseg - INFO - Iter [58500/80000] lr: 3.859e-07, eta: 14:23:25, time: 1.785, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0565, decode.loss_mask: 0.1800, decode.loss_dice: 0.5035, decode.d0.loss_cls: 0.3002, decode.d0.loss_mask: 0.1857, decode.d0.loss_dice: 0.5293, decode.d1.loss_cls: 0.0718, decode.d1.loss_mask: 0.1814, decode.d1.loss_dice: 0.5135, decode.d2.loss_cls: 0.0653, decode.d2.loss_mask: 0.1812, decode.d2.loss_dice: 0.5098, decode.d3.loss_cls: 0.0626, decode.d3.loss_mask: 0.1805, decode.d3.loss_dice: 0.5058, decode.d4.loss_cls: 0.0604, decode.d4.loss_mask: 0.1808, decode.d4.loss_dice: 0.5029, decode.d5.loss_cls: 0.0597, decode.d5.loss_mask: 0.1808, decode.d5.loss_dice: 0.5037, decode.d6.loss_cls: 0.0603, decode.d6.loss_mask: 0.1801, decode.d6.loss_dice: 0.5022, decode.d7.loss_cls: 0.0583, decode.d7.loss_mask: 0.1803, decode.d7.loss_dice: 0.5030, decode.d8.loss_cls: 0.0587, decode.d8.loss_mask: 0.1799, decode.d8.loss_dice: 0.5045, loss: 7.7429 +2022-05-11 02:03:56,819 - mmseg - INFO - Iter [58550/80000] lr: 3.850e-07, eta: 14:20:24, time: 1.812, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0534, decode.loss_mask: 0.1860, decode.loss_dice: 0.5072, decode.d0.loss_cls: 0.2887, decode.d0.loss_mask: 0.1910, decode.d0.loss_dice: 0.5295, decode.d1.loss_cls: 0.0674, decode.d1.loss_mask: 0.1872, decode.d1.loss_dice: 0.5151, decode.d2.loss_cls: 0.0579, decode.d2.loss_mask: 0.1867, decode.d2.loss_dice: 0.5114, decode.d3.loss_cls: 0.0532, decode.d3.loss_mask: 0.1868, decode.d3.loss_dice: 0.5101, decode.d4.loss_cls: 0.0547, decode.d4.loss_mask: 0.1862, decode.d4.loss_dice: 0.5086, decode.d5.loss_cls: 0.0495, decode.d5.loss_mask: 0.1865, decode.d5.loss_dice: 0.5100, decode.d6.loss_cls: 0.0516, decode.d6.loss_mask: 0.1861, decode.d6.loss_dice: 0.5097, decode.d7.loss_cls: 0.0524, decode.d7.loss_mask: 0.1859, decode.d7.loss_dice: 0.5104, decode.d8.loss_cls: 0.0512, decode.d8.loss_mask: 0.1857, decode.d8.loss_dice: 0.5112, loss: 7.7712 +2022-05-11 02:05:25,745 - mmseg - INFO - Iter [58600/80000] lr: 3.841e-07, eta: 14:17:20, time: 1.778, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0668, decode.loss_mask: 0.1907, decode.loss_dice: 0.5219, decode.d0.loss_cls: 0.2954, decode.d0.loss_mask: 0.1974, decode.d0.loss_dice: 0.5401, decode.d1.loss_cls: 0.0719, decode.d1.loss_mask: 0.1922, decode.d1.loss_dice: 0.5267, decode.d2.loss_cls: 0.0682, decode.d2.loss_mask: 0.1909, decode.d2.loss_dice: 0.5242, decode.d3.loss_cls: 0.0698, decode.d3.loss_mask: 0.1907, decode.d3.loss_dice: 0.5197, decode.d4.loss_cls: 0.0633, decode.d4.loss_mask: 0.1903, decode.d4.loss_dice: 0.5199, decode.d5.loss_cls: 0.0689, decode.d5.loss_mask: 0.1902, decode.d5.loss_dice: 0.5212, decode.d6.loss_cls: 0.0583, decode.d6.loss_mask: 0.1906, decode.d6.loss_dice: 0.5205, decode.d7.loss_cls: 0.0609, decode.d7.loss_mask: 0.1902, decode.d7.loss_dice: 0.5201, decode.d8.loss_cls: 0.0559, decode.d8.loss_mask: 0.1905, decode.d8.loss_dice: 0.5219, loss: 8.0293 +2022-05-11 02:06:58,701 - mmseg - INFO - Iter [58650/80000] lr: 3.832e-07, eta: 14:14:25, time: 1.859, data_time: 0.064, memory: 69063, decode.loss_cls: 0.0593, decode.loss_mask: 0.1897, decode.loss_dice: 0.5147, decode.d0.loss_cls: 0.2995, decode.d0.loss_mask: 0.1964, decode.d0.loss_dice: 0.5379, decode.d1.loss_cls: 0.0705, decode.d1.loss_mask: 0.1911, decode.d1.loss_dice: 0.5236, decode.d2.loss_cls: 0.0596, decode.d2.loss_mask: 0.1904, decode.d2.loss_dice: 0.5232, decode.d3.loss_cls: 0.0581, decode.d3.loss_mask: 0.1899, decode.d3.loss_dice: 0.5177, decode.d4.loss_cls: 0.0616, decode.d4.loss_mask: 0.1899, decode.d4.loss_dice: 0.5196, decode.d5.loss_cls: 0.0514, decode.d5.loss_mask: 0.1899, decode.d5.loss_dice: 0.5170, decode.d6.loss_cls: 0.0564, decode.d6.loss_mask: 0.1899, decode.d6.loss_dice: 0.5166, decode.d7.loss_cls: 0.0529, decode.d7.loss_mask: 0.1899, decode.d7.loss_dice: 0.5150, decode.d8.loss_cls: 0.0548, decode.d8.loss_mask: 0.1896, decode.d8.loss_dice: 0.5148, loss: 7.9310 +2022-05-11 02:08:30,248 - mmseg - INFO - Iter [58700/80000] lr: 3.823e-07, eta: 14:11:29, time: 1.831, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0519, decode.loss_mask: 0.1896, decode.loss_dice: 0.5062, decode.d0.loss_cls: 0.2917, decode.d0.loss_mask: 0.1964, decode.d0.loss_dice: 0.5369, decode.d1.loss_cls: 0.0699, decode.d1.loss_mask: 0.1906, decode.d1.loss_dice: 0.5199, decode.d2.loss_cls: 0.0587, decode.d2.loss_mask: 0.1906, decode.d2.loss_dice: 0.5180, decode.d3.loss_cls: 0.0530, decode.d3.loss_mask: 0.1896, decode.d3.loss_dice: 0.5087, decode.d4.loss_cls: 0.0508, decode.d4.loss_mask: 0.1897, decode.d4.loss_dice: 0.5092, decode.d5.loss_cls: 0.0563, decode.d5.loss_mask: 0.1898, decode.d5.loss_dice: 0.5076, decode.d6.loss_cls: 0.0496, decode.d6.loss_mask: 0.1897, decode.d6.loss_dice: 0.5065, decode.d7.loss_cls: 0.0545, decode.d7.loss_mask: 0.1900, decode.d7.loss_dice: 0.5113, decode.d8.loss_cls: 0.0522, decode.d8.loss_mask: 0.1898, decode.d8.loss_dice: 0.5072, loss: 7.8258 +2022-05-11 02:10:01,117 - mmseg - INFO - Iter [58750/80000] lr: 3.814e-07, eta: 14:08:31, time: 1.812, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0560, decode.loss_mask: 0.1849, decode.loss_dice: 0.5134, decode.d0.loss_cls: 0.2948, decode.d0.loss_mask: 0.1900, decode.d0.loss_dice: 0.5362, decode.d1.loss_cls: 0.0798, decode.d1.loss_mask: 0.1857, decode.d1.loss_dice: 0.5226, decode.d2.loss_cls: 0.0689, decode.d2.loss_mask: 0.1857, decode.d2.loss_dice: 0.5163, decode.d3.loss_cls: 0.0629, decode.d3.loss_mask: 0.1852, decode.d3.loss_dice: 0.5152, decode.d4.loss_cls: 0.0676, decode.d4.loss_mask: 0.1854, decode.d4.loss_dice: 0.5132, decode.d5.loss_cls: 0.0573, decode.d5.loss_mask: 0.1852, decode.d5.loss_dice: 0.5152, decode.d6.loss_cls: 0.0590, decode.d6.loss_mask: 0.1859, decode.d6.loss_dice: 0.5149, decode.d7.loss_cls: 0.0591, decode.d7.loss_mask: 0.1853, decode.d7.loss_dice: 0.5155, decode.d8.loss_cls: 0.0565, decode.d8.loss_mask: 0.1850, decode.d8.loss_dice: 0.5145, loss: 7.8971 +2022-05-11 02:11:33,835 - mmseg - INFO - Iter [58800/80000] lr: 3.805e-07, eta: 14:05:38, time: 1.859, data_time: 0.069, memory: 69063, decode.loss_cls: 0.0581, decode.loss_mask: 0.1858, decode.loss_dice: 0.5094, decode.d0.loss_cls: 0.2969, decode.d0.loss_mask: 0.1930, decode.d0.loss_dice: 0.5307, decode.d1.loss_cls: 0.0731, decode.d1.loss_mask: 0.1866, decode.d1.loss_dice: 0.5165, decode.d2.loss_cls: 0.0654, decode.d2.loss_mask: 0.1861, decode.d2.loss_dice: 0.5109, decode.d3.loss_cls: 0.0590, decode.d3.loss_mask: 0.1853, decode.d3.loss_dice: 0.5075, decode.d4.loss_cls: 0.0611, decode.d4.loss_mask: 0.1852, decode.d4.loss_dice: 0.5063, decode.d5.loss_cls: 0.0596, decode.d5.loss_mask: 0.1855, decode.d5.loss_dice: 0.5040, decode.d6.loss_cls: 0.0590, decode.d6.loss_mask: 0.1857, decode.d6.loss_dice: 0.5048, decode.d7.loss_cls: 0.0601, decode.d7.loss_mask: 0.1857, decode.d7.loss_dice: 0.5029, decode.d8.loss_cls: 0.0600, decode.d8.loss_mask: 0.1855, decode.d8.loss_dice: 0.5068, loss: 7.8164 +2022-05-11 02:13:05,058 - mmseg - INFO - Iter [58850/80000] lr: 3.796e-07, eta: 14:02:43, time: 1.824, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0495, decode.loss_mask: 0.1854, decode.loss_dice: 0.5134, decode.d0.loss_cls: 0.2970, decode.d0.loss_mask: 0.1906, decode.d0.loss_dice: 0.5311, decode.d1.loss_cls: 0.0602, decode.d1.loss_mask: 0.1866, decode.d1.loss_dice: 0.5202, decode.d2.loss_cls: 0.0558, decode.d2.loss_mask: 0.1860, decode.d2.loss_dice: 0.5182, decode.d3.loss_cls: 0.0565, decode.d3.loss_mask: 0.1852, decode.d3.loss_dice: 0.5142, decode.d4.loss_cls: 0.0529, decode.d4.loss_mask: 0.1851, decode.d4.loss_dice: 0.5128, decode.d5.loss_cls: 0.0533, decode.d5.loss_mask: 0.1853, decode.d5.loss_dice: 0.5145, decode.d6.loss_cls: 0.0561, decode.d6.loss_mask: 0.1855, decode.d6.loss_dice: 0.5137, decode.d7.loss_cls: 0.0491, decode.d7.loss_mask: 0.1856, decode.d7.loss_dice: 0.5152, decode.d8.loss_cls: 0.0457, decode.d8.loss_mask: 0.1853, decode.d8.loss_dice: 0.5126, loss: 7.8025 +2022-05-11 02:14:35,233 - mmseg - INFO - Iter [58900/80000] lr: 3.787e-07, eta: 13:59:47, time: 1.803, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0476, decode.loss_mask: 0.1930, decode.loss_dice: 0.5128, decode.d0.loss_cls: 0.2963, decode.d0.loss_mask: 0.1998, decode.d0.loss_dice: 0.5390, decode.d1.loss_cls: 0.0678, decode.d1.loss_mask: 0.1944, decode.d1.loss_dice: 0.5215, decode.d2.loss_cls: 0.0587, decode.d2.loss_mask: 0.1944, decode.d2.loss_dice: 0.5165, decode.d3.loss_cls: 0.0524, decode.d3.loss_mask: 0.1940, decode.d3.loss_dice: 0.5138, decode.d4.loss_cls: 0.0550, decode.d4.loss_mask: 0.1932, decode.d4.loss_dice: 0.5171, decode.d5.loss_cls: 0.0497, decode.d5.loss_mask: 0.1925, decode.d5.loss_dice: 0.5147, decode.d6.loss_cls: 0.0542, decode.d6.loss_mask: 0.1928, decode.d6.loss_dice: 0.5107, decode.d7.loss_cls: 0.0477, decode.d7.loss_mask: 0.1928, decode.d7.loss_dice: 0.5109, decode.d8.loss_cls: 0.0498, decode.d8.loss_mask: 0.1930, decode.d8.loss_dice: 0.5142, loss: 7.8905 +2022-05-11 02:16:06,349 - mmseg - INFO - Iter [58950/80000] lr: 3.778e-07, eta: 13:56:53, time: 1.822, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0545, decode.loss_mask: 0.1870, decode.loss_dice: 0.5129, decode.d0.loss_cls: 0.3029, decode.d0.loss_mask: 0.1957, decode.d0.loss_dice: 0.5378, decode.d1.loss_cls: 0.0740, decode.d1.loss_mask: 0.1890, decode.d1.loss_dice: 0.5182, decode.d2.loss_cls: 0.0685, decode.d2.loss_mask: 0.1874, decode.d2.loss_dice: 0.5141, decode.d3.loss_cls: 0.0609, decode.d3.loss_mask: 0.1875, decode.d3.loss_dice: 0.5159, decode.d4.loss_cls: 0.0561, decode.d4.loss_mask: 0.1872, decode.d4.loss_dice: 0.5091, decode.d5.loss_cls: 0.0539, decode.d5.loss_mask: 0.1870, decode.d5.loss_dice: 0.5107, decode.d6.loss_cls: 0.0551, decode.d6.loss_mask: 0.1870, decode.d6.loss_dice: 0.5100, decode.d7.loss_cls: 0.0594, decode.d7.loss_mask: 0.1865, decode.d7.loss_dice: 0.5105, decode.d8.loss_cls: 0.0547, decode.d8.loss_mask: 0.1866, decode.d8.loss_dice: 0.5080, loss: 7.8681 +2022-05-11 02:17:39,156 - mmseg - INFO - Saving checkpoint at 59000 iterations +2022-05-11 02:18:09,389 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 02:18:09,397 - mmseg - INFO - Iter [59000/80000] lr: 3.769e-07, eta: 13:55:00, time: 2.457, data_time: 0.064, memory: 69063, decode.loss_cls: 0.0527, decode.loss_mask: 0.1852, decode.loss_dice: 0.5031, decode.d0.loss_cls: 0.3033, decode.d0.loss_mask: 0.1927, decode.d0.loss_dice: 0.5332, decode.d1.loss_cls: 0.0756, decode.d1.loss_mask: 0.1871, decode.d1.loss_dice: 0.5122, decode.d2.loss_cls: 0.0663, decode.d2.loss_mask: 0.1860, decode.d2.loss_dice: 0.5091, decode.d3.loss_cls: 0.0521, decode.d3.loss_mask: 0.1856, decode.d3.loss_dice: 0.5034, decode.d4.loss_cls: 0.0548, decode.d4.loss_mask: 0.1857, decode.d4.loss_dice: 0.5047, decode.d5.loss_cls: 0.0529, decode.d5.loss_mask: 0.1853, decode.d5.loss_dice: 0.5056, decode.d6.loss_cls: 0.0555, decode.d6.loss_mask: 0.1855, decode.d6.loss_dice: 0.5049, decode.d7.loss_cls: 0.0601, decode.d7.loss_mask: 0.1851, decode.d7.loss_dice: 0.5076, decode.d8.loss_cls: 0.0480, decode.d8.loss_mask: 0.1854, decode.d8.loss_dice: 0.5033, loss: 7.7719 +2022-05-11 02:20:05,758 - mmseg - INFO - per class results: +2022-05-11 02:20:05,762 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.64 | 99.26 | +| sidewalk | 88.98 | 94.07 | +| building | 94.32 | 97.12 | +| wall | 67.37 | 81.06 | +| fence | 74.36 | 81.83 | +| pole | 71.4 | 83.84 | +| traffic light | 77.1 | 86.37 | +| traffic sign | 84.01 | 90.53 | +| vegetation | 93.29 | 96.65 | +| terrain | 68.19 | 78.4 | +| sky | 95.66 | 98.57 | +| person | 86.73 | 93.77 | +| rider | 74.0 | 84.3 | +| car | 96.17 | 98.19 | +| truck | 91.68 | 94.19 | +| bus | 93.62 | 96.49 | +| train | 88.11 | 90.68 | +| motorcycle | 76.84 | 87.15 | +| bicycle | 82.71 | 91.9 | ++---------------+-------+-------+ +2022-05-11 02:20:05,762 - mmseg - INFO - Summary: +2022-05-11 02:20:05,762 - mmseg - INFO - ++------+-------+-------+ +| aAcc | mIoU | mAcc | ++------+-------+-------+ +| 97.0 | 84.38 | 90.76 | ++------+-------+-------+ +2022-05-11 02:20:05,765 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 02:20:05,766 - mmseg - INFO - Iter(val) [32] aAcc: 0.9700, mIoU: 0.8438, mAcc: 0.9076, IoU.road: 0.9864, IoU.sidewalk: 0.8898, IoU.building: 0.9432, IoU.wall: 0.6737, IoU.fence: 0.7436, IoU.pole: 0.7140, IoU.traffic light: 0.7710, IoU.traffic sign: 0.8401, IoU.vegetation: 0.9329, IoU.terrain: 0.6819, IoU.sky: 0.9566, IoU.person: 0.8673, IoU.rider: 0.7400, IoU.car: 0.9617, IoU.truck: 0.9168, IoU.bus: 0.9362, IoU.train: 0.8811, IoU.motorcycle: 0.7684, IoU.bicycle: 0.8271, Acc.road: 0.9926, Acc.sidewalk: 0.9407, Acc.building: 0.9712, Acc.wall: 0.8106, Acc.fence: 0.8183, Acc.pole: 0.8384, Acc.traffic light: 0.8637, Acc.traffic sign: 0.9053, Acc.vegetation: 0.9665, Acc.terrain: 0.7840, Acc.sky: 0.9857, Acc.person: 0.9377, Acc.rider: 0.8430, Acc.car: 0.9819, Acc.truck: 0.9419, Acc.bus: 0.9649, Acc.train: 0.9068, Acc.motorcycle: 0.8715, Acc.bicycle: 0.9190 +2022-05-11 02:21:36,841 - mmseg - INFO - Iter [59050/80000] lr: 3.760e-07, eta: 13:55:49, time: 4.152, data_time: 2.348, memory: 69063, decode.loss_cls: 0.0531, decode.loss_mask: 0.1860, decode.loss_dice: 0.5071, decode.d0.loss_cls: 0.3061, decode.d0.loss_mask: 0.1931, decode.d0.loss_dice: 0.5335, decode.d1.loss_cls: 0.0654, decode.d1.loss_mask: 0.1868, decode.d1.loss_dice: 0.5178, decode.d2.loss_cls: 0.0586, decode.d2.loss_mask: 0.1856, decode.d2.loss_dice: 0.5150, decode.d3.loss_cls: 0.0598, decode.d3.loss_mask: 0.1864, decode.d3.loss_dice: 0.5103, decode.d4.loss_cls: 0.0577, decode.d4.loss_mask: 0.1862, decode.d4.loss_dice: 0.5075, decode.d5.loss_cls: 0.0501, decode.d5.loss_mask: 0.1859, decode.d5.loss_dice: 0.5096, decode.d6.loss_cls: 0.0525, decode.d6.loss_mask: 0.1862, decode.d6.loss_dice: 0.5075, decode.d7.loss_cls: 0.0566, decode.d7.loss_mask: 0.1861, decode.d7.loss_dice: 0.5082, decode.d8.loss_cls: 0.0535, decode.d8.loss_mask: 0.1858, decode.d8.loss_dice: 0.5090, loss: 7.8074 +2022-05-11 02:23:07,783 - mmseg - INFO - Iter [59100/80000] lr: 3.751e-07, eta: 13:52:55, time: 1.820, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0540, decode.loss_mask: 0.1898, decode.loss_dice: 0.5142, decode.d0.loss_cls: 0.2965, decode.d0.loss_mask: 0.1972, decode.d0.loss_dice: 0.5343, decode.d1.loss_cls: 0.0720, decode.d1.loss_mask: 0.1910, decode.d1.loss_dice: 0.5197, decode.d2.loss_cls: 0.0622, decode.d2.loss_mask: 0.1904, decode.d2.loss_dice: 0.5186, decode.d3.loss_cls: 0.0558, decode.d3.loss_mask: 0.1898, decode.d3.loss_dice: 0.5152, decode.d4.loss_cls: 0.0576, decode.d4.loss_mask: 0.1900, decode.d4.loss_dice: 0.5102, decode.d5.loss_cls: 0.0556, decode.d5.loss_mask: 0.1899, decode.d5.loss_dice: 0.5113, decode.d6.loss_cls: 0.0601, decode.d6.loss_mask: 0.1894, decode.d6.loss_dice: 0.5124, decode.d7.loss_cls: 0.0575, decode.d7.loss_mask: 0.1894, decode.d7.loss_dice: 0.5129, decode.d8.loss_cls: 0.0581, decode.d8.loss_mask: 0.1894, decode.d8.loss_dice: 0.5128, loss: 7.8971 +2022-05-11 02:24:38,893 - mmseg - INFO - Iter [59150/80000] lr: 3.742e-07, eta: 13:50:02, time: 1.822, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0512, decode.loss_mask: 0.1847, decode.loss_dice: 0.5033, decode.d0.loss_cls: 0.2926, decode.d0.loss_mask: 0.1900, decode.d0.loss_dice: 0.5257, decode.d1.loss_cls: 0.0650, decode.d1.loss_mask: 0.1855, decode.d1.loss_dice: 0.5142, decode.d2.loss_cls: 0.0598, decode.d2.loss_mask: 0.1852, decode.d2.loss_dice: 0.5116, decode.d3.loss_cls: 0.0532, decode.d3.loss_mask: 0.1850, decode.d3.loss_dice: 0.5054, decode.d4.loss_cls: 0.0518, decode.d4.loss_mask: 0.1847, decode.d4.loss_dice: 0.5046, decode.d5.loss_cls: 0.0554, decode.d5.loss_mask: 0.1845, decode.d5.loss_dice: 0.5019, decode.d6.loss_cls: 0.0574, decode.d6.loss_mask: 0.1844, decode.d6.loss_dice: 0.5063, decode.d7.loss_cls: 0.0545, decode.d7.loss_mask: 0.1844, decode.d7.loss_dice: 0.5048, decode.d8.loss_cls: 0.0566, decode.d8.loss_mask: 0.1845, decode.d8.loss_dice: 0.5039, loss: 7.7320 +2022-05-11 02:26:11,029 - mmseg - INFO - Iter [59200/80000] lr: 3.733e-07, eta: 13:47:12, time: 1.843, data_time: 0.066, memory: 69063, decode.loss_cls: 0.0548, decode.loss_mask: 0.1898, decode.loss_dice: 0.5023, decode.d0.loss_cls: 0.2967, decode.d0.loss_mask: 0.1989, decode.d0.loss_dice: 0.5310, decode.d1.loss_cls: 0.0724, decode.d1.loss_mask: 0.1920, decode.d1.loss_dice: 0.5112, decode.d2.loss_cls: 0.0596, decode.d2.loss_mask: 0.1908, decode.d2.loss_dice: 0.5053, decode.d3.loss_cls: 0.0598, decode.d3.loss_mask: 0.1901, decode.d3.loss_dice: 0.5057, decode.d4.loss_cls: 0.0598, decode.d4.loss_mask: 0.1897, decode.d4.loss_dice: 0.5042, decode.d5.loss_cls: 0.0618, decode.d5.loss_mask: 0.1898, decode.d5.loss_dice: 0.5030, decode.d6.loss_cls: 0.0596, decode.d6.loss_mask: 0.1899, decode.d6.loss_dice: 0.5035, decode.d7.loss_cls: 0.0592, decode.d7.loss_mask: 0.1900, decode.d7.loss_dice: 0.5016, decode.d8.loss_cls: 0.0528, decode.d8.loss_mask: 0.1899, decode.d8.loss_dice: 0.5005, loss: 7.8160 +2022-05-11 02:27:41,760 - mmseg - INFO - Iter [59250/80000] lr: 3.724e-07, eta: 13:44:20, time: 1.815, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0554, decode.loss_mask: 0.1851, decode.loss_dice: 0.5118, decode.d0.loss_cls: 0.3031, decode.d0.loss_mask: 0.1920, decode.d0.loss_dice: 0.5372, decode.d1.loss_cls: 0.0855, decode.d1.loss_mask: 0.1870, decode.d1.loss_dice: 0.5210, decode.d2.loss_cls: 0.0682, decode.d2.loss_mask: 0.1863, decode.d2.loss_dice: 0.5170, decode.d3.loss_cls: 0.0599, decode.d3.loss_mask: 0.1852, decode.d3.loss_dice: 0.5143, decode.d4.loss_cls: 0.0635, decode.d4.loss_mask: 0.1853, decode.d4.loss_dice: 0.5153, decode.d5.loss_cls: 0.0632, decode.d5.loss_mask: 0.1854, decode.d5.loss_dice: 0.5168, decode.d6.loss_cls: 0.0570, decode.d6.loss_mask: 0.1855, decode.d6.loss_dice: 0.5108, decode.d7.loss_cls: 0.0593, decode.d7.loss_mask: 0.1854, decode.d7.loss_dice: 0.5139, decode.d8.loss_cls: 0.0620, decode.d8.loss_mask: 0.1855, decode.d8.loss_dice: 0.5120, loss: 7.9100 +2022-05-11 02:29:12,828 - mmseg - INFO - Iter [59300/80000] lr: 3.715e-07, eta: 13:41:29, time: 1.820, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0474, decode.loss_mask: 0.1869, decode.loss_dice: 0.5073, decode.d0.loss_cls: 0.2967, decode.d0.loss_mask: 0.1965, decode.d0.loss_dice: 0.5288, decode.d1.loss_cls: 0.0628, decode.d1.loss_mask: 0.1892, decode.d1.loss_dice: 0.5126, decode.d2.loss_cls: 0.0542, decode.d2.loss_mask: 0.1885, decode.d2.loss_dice: 0.5072, decode.d3.loss_cls: 0.0557, decode.d3.loss_mask: 0.1885, decode.d3.loss_dice: 0.5060, decode.d4.loss_cls: 0.0493, decode.d4.loss_mask: 0.1878, decode.d4.loss_dice: 0.5047, decode.d5.loss_cls: 0.0517, decode.d5.loss_mask: 0.1876, decode.d5.loss_dice: 0.5054, decode.d6.loss_cls: 0.0496, decode.d6.loss_mask: 0.1872, decode.d6.loss_dice: 0.5060, decode.d7.loss_cls: 0.0551, decode.d7.loss_mask: 0.1872, decode.d7.loss_dice: 0.5039, decode.d8.loss_cls: 0.0489, decode.d8.loss_mask: 0.1872, decode.d8.loss_dice: 0.5022, loss: 7.7419 +2022-05-11 02:30:45,070 - mmseg - INFO - Iter [59350/80000] lr: 3.706e-07, eta: 13:38:41, time: 1.846, data_time: 0.064, memory: 69063, decode.loss_cls: 0.0601, decode.loss_mask: 0.1859, decode.loss_dice: 0.5181, decode.d0.loss_cls: 0.2978, decode.d0.loss_mask: 0.1943, decode.d0.loss_dice: 0.5428, decode.d1.loss_cls: 0.0727, decode.d1.loss_mask: 0.1874, decode.d1.loss_dice: 0.5279, decode.d2.loss_cls: 0.0652, decode.d2.loss_mask: 0.1871, decode.d2.loss_dice: 0.5202, decode.d3.loss_cls: 0.0576, decode.d3.loss_mask: 0.1863, decode.d3.loss_dice: 0.5163, decode.d4.loss_cls: 0.0585, decode.d4.loss_mask: 0.1862, decode.d4.loss_dice: 0.5188, decode.d5.loss_cls: 0.0559, decode.d5.loss_mask: 0.1863, decode.d5.loss_dice: 0.5192, decode.d6.loss_cls: 0.0557, decode.d6.loss_mask: 0.1858, decode.d6.loss_dice: 0.5132, decode.d7.loss_cls: 0.0607, decode.d7.loss_mask: 0.1861, decode.d7.loss_dice: 0.5181, decode.d8.loss_cls: 0.0610, decode.d8.loss_mask: 0.1865, decode.d8.loss_dice: 0.5182, loss: 7.9298 +2022-05-11 02:32:17,209 - mmseg - INFO - Iter [59400/80000] lr: 3.697e-07, eta: 13:35:54, time: 1.843, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0526, decode.loss_mask: 0.1895, decode.loss_dice: 0.5128, decode.d0.loss_cls: 0.2970, decode.d0.loss_mask: 0.1974, decode.d0.loss_dice: 0.5344, decode.d1.loss_cls: 0.0682, decode.d1.loss_mask: 0.1909, decode.d1.loss_dice: 0.5173, decode.d2.loss_cls: 0.0604, decode.d2.loss_mask: 0.1909, decode.d2.loss_dice: 0.5164, decode.d3.loss_cls: 0.0545, decode.d3.loss_mask: 0.1900, decode.d3.loss_dice: 0.5104, decode.d4.loss_cls: 0.0541, decode.d4.loss_mask: 0.1900, decode.d4.loss_dice: 0.5088, decode.d5.loss_cls: 0.0485, decode.d5.loss_mask: 0.1900, decode.d5.loss_dice: 0.5104, decode.d6.loss_cls: 0.0490, decode.d6.loss_mask: 0.1898, decode.d6.loss_dice: 0.5101, decode.d7.loss_cls: 0.0482, decode.d7.loss_mask: 0.1898, decode.d7.loss_dice: 0.5101, decode.d8.loss_cls: 0.0488, decode.d8.loss_mask: 0.1900, decode.d8.loss_dice: 0.5111, loss: 7.8313 +2022-05-11 02:33:46,943 - mmseg - INFO - Iter [59450/80000] lr: 3.688e-07, eta: 13:33:03, time: 1.795, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0529, decode.loss_mask: 0.1895, decode.loss_dice: 0.5147, decode.d0.loss_cls: 0.3028, decode.d0.loss_mask: 0.1979, decode.d0.loss_dice: 0.5393, decode.d1.loss_cls: 0.0689, decode.d1.loss_mask: 0.1928, decode.d1.loss_dice: 0.5238, decode.d2.loss_cls: 0.0572, decode.d2.loss_mask: 0.1911, decode.d2.loss_dice: 0.5190, decode.d3.loss_cls: 0.0550, decode.d3.loss_mask: 0.1903, decode.d3.loss_dice: 0.5148, decode.d4.loss_cls: 0.0567, decode.d4.loss_mask: 0.1901, decode.d4.loss_dice: 0.5152, decode.d5.loss_cls: 0.0549, decode.d5.loss_mask: 0.1903, decode.d5.loss_dice: 0.5163, decode.d6.loss_cls: 0.0554, decode.d6.loss_mask: 0.1897, decode.d6.loss_dice: 0.5102, decode.d7.loss_cls: 0.0549, decode.d7.loss_mask: 0.1897, decode.d7.loss_dice: 0.5135, decode.d8.loss_cls: 0.0527, decode.d8.loss_mask: 0.1896, decode.d8.loss_dice: 0.5116, loss: 7.9006 +2022-05-11 02:35:18,529 - mmseg - INFO - Iter [59500/80000] lr: 3.679e-07, eta: 13:30:16, time: 1.832, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0478, decode.loss_mask: 0.1858, decode.loss_dice: 0.5053, decode.d0.loss_cls: 0.2926, decode.d0.loss_mask: 0.1922, decode.d0.loss_dice: 0.5346, decode.d1.loss_cls: 0.0737, decode.d1.loss_mask: 0.1868, decode.d1.loss_dice: 0.5181, decode.d2.loss_cls: 0.0647, decode.d2.loss_mask: 0.1863, decode.d2.loss_dice: 0.5115, decode.d3.loss_cls: 0.0566, decode.d3.loss_mask: 0.1862, decode.d3.loss_dice: 0.5129, decode.d4.loss_cls: 0.0570, decode.d4.loss_mask: 0.1863, decode.d4.loss_dice: 0.5111, decode.d5.loss_cls: 0.0573, decode.d5.loss_mask: 0.1856, decode.d5.loss_dice: 0.5113, decode.d6.loss_cls: 0.0591, decode.d6.loss_mask: 0.1857, decode.d6.loss_dice: 0.5080, decode.d7.loss_cls: 0.0535, decode.d7.loss_mask: 0.1861, decode.d7.loss_dice: 0.5081, decode.d8.loss_cls: 0.0573, decode.d8.loss_mask: 0.1860, decode.d8.loss_dice: 0.5079, loss: 7.8153 +2022-05-11 02:36:51,181 - mmseg - INFO - Iter [59550/80000] lr: 3.670e-07, eta: 13:27:32, time: 1.853, data_time: 0.067, memory: 69063, decode.loss_cls: 0.0453, decode.loss_mask: 0.1862, decode.loss_dice: 0.5119, decode.d0.loss_cls: 0.2953, decode.d0.loss_mask: 0.1933, decode.d0.loss_dice: 0.5340, decode.d1.loss_cls: 0.0517, decode.d1.loss_mask: 0.1878, decode.d1.loss_dice: 0.5166, decode.d2.loss_cls: 0.0552, decode.d2.loss_mask: 0.1872, decode.d2.loss_dice: 0.5132, decode.d3.loss_cls: 0.0453, decode.d3.loss_mask: 0.1869, decode.d3.loss_dice: 0.5112, decode.d4.loss_cls: 0.0423, decode.d4.loss_mask: 0.1869, decode.d4.loss_dice: 0.5117, decode.d5.loss_cls: 0.0415, decode.d5.loss_mask: 0.1869, decode.d5.loss_dice: 0.5115, decode.d6.loss_cls: 0.0420, decode.d6.loss_mask: 0.1868, decode.d6.loss_dice: 0.5110, decode.d7.loss_cls: 0.0470, decode.d7.loss_mask: 0.1866, decode.d7.loss_dice: 0.5105, decode.d8.loss_cls: 0.0433, decode.d8.loss_mask: 0.1864, decode.d8.loss_dice: 0.5112, loss: 7.7266 +2022-05-11 02:38:22,320 - mmseg - INFO - Iter [59600/80000] lr: 3.661e-07, eta: 13:24:45, time: 1.823, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0529, decode.loss_mask: 0.1814, decode.loss_dice: 0.5149, decode.d0.loss_cls: 0.2932, decode.d0.loss_mask: 0.1875, decode.d0.loss_dice: 0.5392, decode.d1.loss_cls: 0.0685, decode.d1.loss_mask: 0.1814, decode.d1.loss_dice: 0.5263, decode.d2.loss_cls: 0.0657, decode.d2.loss_mask: 0.1816, decode.d2.loss_dice: 0.5207, decode.d3.loss_cls: 0.0594, decode.d3.loss_mask: 0.1818, decode.d3.loss_dice: 0.5188, decode.d4.loss_cls: 0.0602, decode.d4.loss_mask: 0.1819, decode.d4.loss_dice: 0.5158, decode.d5.loss_cls: 0.0563, decode.d5.loss_mask: 0.1817, decode.d5.loss_dice: 0.5146, decode.d6.loss_cls: 0.0525, decode.d6.loss_mask: 0.1816, decode.d6.loss_dice: 0.5125, decode.d7.loss_cls: 0.0554, decode.d7.loss_mask: 0.1810, decode.d7.loss_dice: 0.5100, decode.d8.loss_cls: 0.0523, decode.d8.loss_mask: 0.1809, decode.d8.loss_dice: 0.5179, loss: 7.8278 +2022-05-11 02:39:50,652 - mmseg - INFO - Iter [59650/80000] lr: 3.652e-07, eta: 13:21:54, time: 1.767, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0423, decode.loss_mask: 0.1870, decode.loss_dice: 0.5131, decode.d0.loss_cls: 0.3035, decode.d0.loss_mask: 0.1942, decode.d0.loss_dice: 0.5342, decode.d1.loss_cls: 0.0623, decode.d1.loss_mask: 0.1886, decode.d1.loss_dice: 0.5206, decode.d2.loss_cls: 0.0622, decode.d2.loss_mask: 0.1881, decode.d2.loss_dice: 0.5166, decode.d3.loss_cls: 0.0512, decode.d3.loss_mask: 0.1875, decode.d3.loss_dice: 0.5127, decode.d4.loss_cls: 0.0517, decode.d4.loss_mask: 0.1880, decode.d4.loss_dice: 0.5125, decode.d5.loss_cls: 0.0492, decode.d5.loss_mask: 0.1876, decode.d5.loss_dice: 0.5115, decode.d6.loss_cls: 0.0496, decode.d6.loss_mask: 0.1876, decode.d6.loss_dice: 0.5081, decode.d7.loss_cls: 0.0430, decode.d7.loss_mask: 0.1875, decode.d7.loss_dice: 0.5097, decode.d8.loss_cls: 0.0516, decode.d8.loss_mask: 0.1867, decode.d8.loss_dice: 0.5136, loss: 7.8019 +2022-05-11 02:41:20,350 - mmseg - INFO - Iter [59700/80000] lr: 3.644e-07, eta: 13:19:07, time: 1.794, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0569, decode.loss_mask: 0.1806, decode.loss_dice: 0.5098, decode.d0.loss_cls: 0.2939, decode.d0.loss_mask: 0.1870, decode.d0.loss_dice: 0.5319, decode.d1.loss_cls: 0.0704, decode.d1.loss_mask: 0.1820, decode.d1.loss_dice: 0.5191, decode.d2.loss_cls: 0.0651, decode.d2.loss_mask: 0.1807, decode.d2.loss_dice: 0.5121, decode.d3.loss_cls: 0.0588, decode.d3.loss_mask: 0.1809, decode.d3.loss_dice: 0.5098, decode.d4.loss_cls: 0.0607, decode.d4.loss_mask: 0.1808, decode.d4.loss_dice: 0.5071, decode.d5.loss_cls: 0.0621, decode.d5.loss_mask: 0.1808, decode.d5.loss_dice: 0.5102, decode.d6.loss_cls: 0.0603, decode.d6.loss_mask: 0.1804, decode.d6.loss_dice: 0.5124, decode.d7.loss_cls: 0.0575, decode.d7.loss_mask: 0.1809, decode.d7.loss_dice: 0.5099, decode.d8.loss_cls: 0.0585, decode.d8.loss_mask: 0.1803, decode.d8.loss_dice: 0.5107, loss: 7.7916 +2022-05-11 02:42:53,388 - mmseg - INFO - Iter [59750/80000] lr: 3.635e-07, eta: 13:16:25, time: 1.861, data_time: 0.063, memory: 69063, decode.loss_cls: 0.0558, decode.loss_mask: 0.1878, decode.loss_dice: 0.5144, decode.d0.loss_cls: 0.2860, decode.d0.loss_mask: 0.1942, decode.d0.loss_dice: 0.5402, decode.d1.loss_cls: 0.0676, decode.d1.loss_mask: 0.1888, decode.d1.loss_dice: 0.5256, decode.d2.loss_cls: 0.0608, decode.d2.loss_mask: 0.1880, decode.d2.loss_dice: 0.5187, decode.d3.loss_cls: 0.0563, decode.d3.loss_mask: 0.1885, decode.d3.loss_dice: 0.5146, decode.d4.loss_cls: 0.0578, decode.d4.loss_mask: 0.1880, decode.d4.loss_dice: 0.5150, decode.d5.loss_cls: 0.0591, decode.d5.loss_mask: 0.1883, decode.d5.loss_dice: 0.5188, decode.d6.loss_cls: 0.0597, decode.d6.loss_mask: 0.1876, decode.d6.loss_dice: 0.5183, decode.d7.loss_cls: 0.0555, decode.d7.loss_mask: 0.1875, decode.d7.loss_dice: 0.5144, decode.d8.loss_cls: 0.0573, decode.d8.loss_mask: 0.1879, decode.d8.loss_dice: 0.5173, loss: 7.8999 +2022-05-11 02:44:23,336 - mmseg - INFO - Iter [59800/80000] lr: 3.626e-07, eta: 13:13:39, time: 1.799, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0505, decode.loss_mask: 0.1845, decode.loss_dice: 0.5012, decode.d0.loss_cls: 0.3061, decode.d0.loss_mask: 0.1908, decode.d0.loss_dice: 0.5305, decode.d1.loss_cls: 0.0573, decode.d1.loss_mask: 0.1864, decode.d1.loss_dice: 0.5136, decode.d2.loss_cls: 0.0548, decode.d2.loss_mask: 0.1858, decode.d2.loss_dice: 0.5103, decode.d3.loss_cls: 0.0554, decode.d3.loss_mask: 0.1852, decode.d3.loss_dice: 0.5032, decode.d4.loss_cls: 0.0474, decode.d4.loss_mask: 0.1853, decode.d4.loss_dice: 0.5055, decode.d5.loss_cls: 0.0624, decode.d5.loss_mask: 0.1852, decode.d5.loss_dice: 0.5091, decode.d6.loss_cls: 0.0471, decode.d6.loss_mask: 0.1852, decode.d6.loss_dice: 0.5061, decode.d7.loss_cls: 0.0531, decode.d7.loss_mask: 0.1852, decode.d7.loss_dice: 0.5082, decode.d8.loss_cls: 0.0531, decode.d8.loss_mask: 0.1849, decode.d8.loss_dice: 0.5051, loss: 7.7384 +2022-05-11 02:45:53,530 - mmseg - INFO - Iter [59850/80000] lr: 3.617e-07, eta: 13:10:54, time: 1.804, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0509, decode.loss_mask: 0.1821, decode.loss_dice: 0.5124, decode.d0.loss_cls: 0.2873, decode.d0.loss_mask: 0.1889, decode.d0.loss_dice: 0.5327, decode.d1.loss_cls: 0.0746, decode.d1.loss_mask: 0.1833, decode.d1.loss_dice: 0.5196, decode.d2.loss_cls: 0.0630, decode.d2.loss_mask: 0.1832, decode.d2.loss_dice: 0.5127, decode.d3.loss_cls: 0.0547, decode.d3.loss_mask: 0.1829, decode.d3.loss_dice: 0.5133, decode.d4.loss_cls: 0.0551, decode.d4.loss_mask: 0.1827, decode.d4.loss_dice: 0.5148, decode.d5.loss_cls: 0.0579, decode.d5.loss_mask: 0.1821, decode.d5.loss_dice: 0.5143, decode.d6.loss_cls: 0.0497, decode.d6.loss_mask: 0.1819, decode.d6.loss_dice: 0.5126, decode.d7.loss_cls: 0.0528, decode.d7.loss_mask: 0.1813, decode.d7.loss_dice: 0.5101, decode.d8.loss_cls: 0.0561, decode.d8.loss_mask: 0.1816, decode.d8.loss_dice: 0.5095, loss: 7.7840 +2022-05-11 02:47:24,466 - mmseg - INFO - Iter [59900/80000] lr: 3.608e-07, eta: 13:08:11, time: 1.816, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0445, decode.loss_mask: 0.1901, decode.loss_dice: 0.5130, decode.d0.loss_cls: 0.2940, decode.d0.loss_mask: 0.1969, decode.d0.loss_dice: 0.5384, decode.d1.loss_cls: 0.0662, decode.d1.loss_mask: 0.1912, decode.d1.loss_dice: 0.5221, decode.d2.loss_cls: 0.0570, decode.d2.loss_mask: 0.1910, decode.d2.loss_dice: 0.5193, decode.d3.loss_cls: 0.0533, decode.d3.loss_mask: 0.1906, decode.d3.loss_dice: 0.5154, decode.d4.loss_cls: 0.0516, decode.d4.loss_mask: 0.1905, decode.d4.loss_dice: 0.5170, decode.d5.loss_cls: 0.0477, decode.d5.loss_mask: 0.1907, decode.d5.loss_dice: 0.5156, decode.d6.loss_cls: 0.0495, decode.d6.loss_mask: 0.1902, decode.d6.loss_dice: 0.5120, decode.d7.loss_cls: 0.0500, decode.d7.loss_mask: 0.1906, decode.d7.loss_dice: 0.5127, decode.d8.loss_cls: 0.0487, decode.d8.loss_mask: 0.1903, decode.d8.loss_dice: 0.5132, loss: 7.8532 +2022-05-11 02:48:57,459 - mmseg - INFO - Iter [59950/80000] lr: 3.599e-07, eta: 13:05:32, time: 1.862, data_time: 0.065, memory: 69063, decode.loss_cls: 0.0584, decode.loss_mask: 0.1837, decode.loss_dice: 0.4992, decode.d0.loss_cls: 0.3048, decode.d0.loss_mask: 0.1893, decode.d0.loss_dice: 0.5297, decode.d1.loss_cls: 0.0741, decode.d1.loss_mask: 0.1851, decode.d1.loss_dice: 0.5095, decode.d2.loss_cls: 0.0722, decode.d2.loss_mask: 0.1848, decode.d2.loss_dice: 0.5088, decode.d3.loss_cls: 0.0631, decode.d3.loss_mask: 0.1846, decode.d3.loss_dice: 0.5020, decode.d4.loss_cls: 0.0624, decode.d4.loss_mask: 0.1847, decode.d4.loss_dice: 0.5010, decode.d5.loss_cls: 0.0637, decode.d5.loss_mask: 0.1841, decode.d5.loss_dice: 0.5012, decode.d6.loss_cls: 0.0581, decode.d6.loss_mask: 0.1842, decode.d6.loss_dice: 0.5022, decode.d7.loss_cls: 0.0538, decode.d7.loss_mask: 0.1841, decode.d7.loss_dice: 0.5024, decode.d8.loss_cls: 0.0573, decode.d8.loss_mask: 0.1840, decode.d8.loss_dice: 0.5022, loss: 7.7746 +2022-05-11 02:50:27,492 - mmseg - INFO - Saving checkpoint at 60000 iterations +2022-05-11 02:50:57,992 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 02:50:57,994 - mmseg - INFO - Iter [60000/80000] lr: 3.590e-07, eta: 13:03:40, time: 2.409, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0567, decode.loss_mask: 0.1850, decode.loss_dice: 0.5149, decode.d0.loss_cls: 0.3044, decode.d0.loss_mask: 0.1927, decode.d0.loss_dice: 0.5409, decode.d1.loss_cls: 0.0774, decode.d1.loss_mask: 0.1871, decode.d1.loss_dice: 0.5214, decode.d2.loss_cls: 0.0642, decode.d2.loss_mask: 0.1862, decode.d2.loss_dice: 0.5149, decode.d3.loss_cls: 0.0609, decode.d3.loss_mask: 0.1855, decode.d3.loss_dice: 0.5150, decode.d4.loss_cls: 0.0560, decode.d4.loss_mask: 0.1854, decode.d4.loss_dice: 0.5132, decode.d5.loss_cls: 0.0561, decode.d5.loss_mask: 0.1852, decode.d5.loss_dice: 0.5148, decode.d6.loss_cls: 0.0574, decode.d6.loss_mask: 0.1851, decode.d6.loss_dice: 0.5117, decode.d7.loss_cls: 0.0567, decode.d7.loss_mask: 0.1848, decode.d7.loss_dice: 0.5124, decode.d8.loss_cls: 0.0557, decode.d8.loss_mask: 0.1850, decode.d8.loss_dice: 0.5125, loss: 7.8790 +2022-05-11 02:52:52,962 - mmseg - INFO - per class results: +2022-05-11 02:52:52,969 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.54 | 99.24 | +| sidewalk | 88.32 | 93.62 | +| building | 94.45 | 96.99 | +| wall | 70.66 | 80.06 | +| fence | 74.61 | 82.55 | +| pole | 71.45 | 83.68 | +| traffic light | 77.36 | 87.64 | +| traffic sign | 84.12 | 90.43 | +| vegetation | 93.4 | 97.0 | +| terrain | 68.87 | 78.07 | +| sky | 95.74 | 98.53 | +| person | 86.97 | 93.51 | +| rider | 74.69 | 84.71 | +| car | 96.23 | 98.3 | +| truck | 82.6 | 95.03 | +| bus | 93.61 | 96.44 | +| train | 88.06 | 90.86 | +| motorcycle | 76.87 | 88.19 | +| bicycle | 82.73 | 92.2 | ++---------------+-------+-------+ +2022-05-11 02:52:52,970 - mmseg - INFO - Summary: +2022-05-11 02:52:52,970 - mmseg - INFO - ++-------+-------+------+ +| aAcc | mIoU | mAcc | ++-------+-------+------+ +| 97.01 | 84.17 | 90.9 | ++-------+-------+------+ +2022-05-11 02:52:52,973 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 02:52:52,973 - mmseg - INFO - Iter(val) [32] aAcc: 0.9701, mIoU: 0.8417, mAcc: 0.9090, IoU.road: 0.9854, IoU.sidewalk: 0.8832, IoU.building: 0.9445, IoU.wall: 0.7066, IoU.fence: 0.7461, IoU.pole: 0.7145, IoU.traffic light: 0.7736, IoU.traffic sign: 0.8412, IoU.vegetation: 0.9340, IoU.terrain: 0.6887, IoU.sky: 0.9574, IoU.person: 0.8697, IoU.rider: 0.7469, IoU.car: 0.9623, IoU.truck: 0.8260, IoU.bus: 0.9361, IoU.train: 0.8806, IoU.motorcycle: 0.7687, IoU.bicycle: 0.8273, Acc.road: 0.9924, Acc.sidewalk: 0.9362, Acc.building: 0.9699, Acc.wall: 0.8006, Acc.fence: 0.8255, Acc.pole: 0.8368, Acc.traffic light: 0.8764, Acc.traffic sign: 0.9043, Acc.vegetation: 0.9700, Acc.terrain: 0.7807, Acc.sky: 0.9853, Acc.person: 0.9351, Acc.rider: 0.8471, Acc.car: 0.9830, Acc.truck: 0.9503, Acc.bus: 0.9644, Acc.train: 0.9086, Acc.motorcycle: 0.8819, Acc.bicycle: 0.9220 +2022-05-11 02:54:21,515 - mmseg - INFO - Iter [60050/80000] lr: 3.581e-07, eta: 13:04:05, time: 4.072, data_time: 2.318, memory: 69063, decode.loss_cls: 0.0504, decode.loss_mask: 0.1832, decode.loss_dice: 0.5071, decode.d0.loss_cls: 0.2879, decode.d0.loss_mask: 0.1900, decode.d0.loss_dice: 0.5271, decode.d1.loss_cls: 0.0646, decode.d1.loss_mask: 0.1853, decode.d1.loss_dice: 0.5138, decode.d2.loss_cls: 0.0564, decode.d2.loss_mask: 0.1842, decode.d2.loss_dice: 0.5116, decode.d3.loss_cls: 0.0526, decode.d3.loss_mask: 0.1838, decode.d3.loss_dice: 0.5058, decode.d4.loss_cls: 0.0542, decode.d4.loss_mask: 0.1835, decode.d4.loss_dice: 0.5063, decode.d5.loss_cls: 0.0515, decode.d5.loss_mask: 0.1834, decode.d5.loss_dice: 0.5075, decode.d6.loss_cls: 0.0515, decode.d6.loss_mask: 0.1831, decode.d6.loss_dice: 0.5045, decode.d7.loss_cls: 0.0471, decode.d7.loss_mask: 0.1834, decode.d7.loss_dice: 0.5041, decode.d8.loss_cls: 0.0517, decode.d8.loss_mask: 0.1832, decode.d8.loss_dice: 0.5048, loss: 7.7037 +2022-05-11 02:55:54,546 - mmseg - INFO - Iter [60100/80000] lr: 3.572e-07, eta: 13:01:26, time: 1.861, data_time: 0.065, memory: 69063, decode.loss_cls: 0.0576, decode.loss_mask: 0.1887, decode.loss_dice: 0.5136, decode.d0.loss_cls: 0.3009, decode.d0.loss_mask: 0.1960, decode.d0.loss_dice: 0.5331, decode.d1.loss_cls: 0.0771, decode.d1.loss_mask: 0.1904, decode.d1.loss_dice: 0.5186, decode.d2.loss_cls: 0.0682, decode.d2.loss_mask: 0.1891, decode.d2.loss_dice: 0.5144, decode.d3.loss_cls: 0.0644, decode.d3.loss_mask: 0.1886, decode.d3.loss_dice: 0.5088, decode.d4.loss_cls: 0.0623, decode.d4.loss_mask: 0.1892, decode.d4.loss_dice: 0.5124, decode.d5.loss_cls: 0.0560, decode.d5.loss_mask: 0.1886, decode.d5.loss_dice: 0.5117, decode.d6.loss_cls: 0.0652, decode.d6.loss_mask: 0.1885, decode.d6.loss_dice: 0.5103, decode.d7.loss_cls: 0.0583, decode.d7.loss_mask: 0.1892, decode.d7.loss_dice: 0.5111, decode.d8.loss_cls: 0.0625, decode.d8.loss_mask: 0.1885, decode.d8.loss_dice: 0.5112, loss: 7.9149 +2022-05-11 02:57:25,481 - mmseg - INFO - Iter [60150/80000] lr: 3.563e-07, eta: 12:58:44, time: 1.816, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0530, decode.loss_mask: 0.1907, decode.loss_dice: 0.5189, decode.d0.loss_cls: 0.3059, decode.d0.loss_mask: 0.1987, decode.d0.loss_dice: 0.5417, decode.d1.loss_cls: 0.0804, decode.d1.loss_mask: 0.1922, decode.d1.loss_dice: 0.5280, decode.d2.loss_cls: 0.0606, decode.d2.loss_mask: 0.1910, decode.d2.loss_dice: 0.5205, decode.d3.loss_cls: 0.0636, decode.d3.loss_mask: 0.1908, decode.d3.loss_dice: 0.5176, decode.d4.loss_cls: 0.0599, decode.d4.loss_mask: 0.1912, decode.d4.loss_dice: 0.5235, decode.d5.loss_cls: 0.0526, decode.d5.loss_mask: 0.1910, decode.d5.loss_dice: 0.5213, decode.d6.loss_cls: 0.0521, decode.d6.loss_mask: 0.1909, decode.d6.loss_dice: 0.5163, decode.d7.loss_cls: 0.0507, decode.d7.loss_mask: 0.1910, decode.d7.loss_dice: 0.5200, decode.d8.loss_cls: 0.0501, decode.d8.loss_mask: 0.1909, decode.d8.loss_dice: 0.5191, loss: 7.9742 +2022-05-11 02:58:55,619 - mmseg - INFO - Iter [60200/80000] lr: 3.554e-07, eta: 12:56:02, time: 1.804, data_time: 0.021, memory: 69063, decode.loss_cls: 0.0482, decode.loss_mask: 0.1841, decode.loss_dice: 0.4989, decode.d0.loss_cls: 0.2916, decode.d0.loss_mask: 0.1920, decode.d0.loss_dice: 0.5250, decode.d1.loss_cls: 0.0683, decode.d1.loss_mask: 0.1856, decode.d1.loss_dice: 0.5104, decode.d2.loss_cls: 0.0538, decode.d2.loss_mask: 0.1844, decode.d2.loss_dice: 0.4999, decode.d3.loss_cls: 0.0526, decode.d3.loss_mask: 0.1846, decode.d3.loss_dice: 0.5004, decode.d4.loss_cls: 0.0554, decode.d4.loss_mask: 0.1843, decode.d4.loss_dice: 0.4993, decode.d5.loss_cls: 0.0500, decode.d5.loss_mask: 0.1844, decode.d5.loss_dice: 0.4993, decode.d6.loss_cls: 0.0492, decode.d6.loss_mask: 0.1842, decode.d6.loss_dice: 0.4964, decode.d7.loss_cls: 0.0487, decode.d7.loss_mask: 0.1841, decode.d7.loss_dice: 0.4979, decode.d8.loss_cls: 0.0455, decode.d8.loss_mask: 0.1837, decode.d8.loss_dice: 0.4999, loss: 7.6422 +2022-05-11 03:00:24,831 - mmseg - INFO - Iter [60250/80000] lr: 3.545e-07, eta: 12:53:18, time: 1.786, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0524, decode.loss_mask: 0.1900, decode.loss_dice: 0.5033, decode.d0.loss_cls: 0.2969, decode.d0.loss_mask: 0.1971, decode.d0.loss_dice: 0.5265, decode.d1.loss_cls: 0.0750, decode.d1.loss_mask: 0.1921, decode.d1.loss_dice: 0.5138, decode.d2.loss_cls: 0.0670, decode.d2.loss_mask: 0.1916, decode.d2.loss_dice: 0.5094, decode.d3.loss_cls: 0.0550, decode.d3.loss_mask: 0.1909, decode.d3.loss_dice: 0.5072, decode.d4.loss_cls: 0.0547, decode.d4.loss_mask: 0.1911, decode.d4.loss_dice: 0.5077, decode.d5.loss_cls: 0.0517, decode.d5.loss_mask: 0.1909, decode.d5.loss_dice: 0.5062, decode.d6.loss_cls: 0.0530, decode.d6.loss_mask: 0.1903, decode.d6.loss_dice: 0.5057, decode.d7.loss_cls: 0.0500, decode.d7.loss_mask: 0.1900, decode.d7.loss_dice: 0.5032, decode.d8.loss_cls: 0.0519, decode.d8.loss_mask: 0.1897, decode.d8.loss_dice: 0.4996, loss: 7.8036 +2022-05-11 03:01:57,144 - mmseg - INFO - Iter [60300/80000] lr: 3.536e-07, eta: 12:50:41, time: 1.846, data_time: 0.065, memory: 69063, decode.loss_cls: 0.0528, decode.loss_mask: 0.1869, decode.loss_dice: 0.5159, decode.d0.loss_cls: 0.2859, decode.d0.loss_mask: 0.1943, decode.d0.loss_dice: 0.5345, decode.d1.loss_cls: 0.0685, decode.d1.loss_mask: 0.1880, decode.d1.loss_dice: 0.5195, decode.d2.loss_cls: 0.0593, decode.d2.loss_mask: 0.1874, decode.d2.loss_dice: 0.5174, decode.d3.loss_cls: 0.0526, decode.d3.loss_mask: 0.1871, decode.d3.loss_dice: 0.5156, decode.d4.loss_cls: 0.0598, decode.d4.loss_mask: 0.1867, decode.d4.loss_dice: 0.5182, decode.d5.loss_cls: 0.0538, decode.d5.loss_mask: 0.1867, decode.d5.loss_dice: 0.5147, decode.d6.loss_cls: 0.0583, decode.d6.loss_mask: 0.1868, decode.d6.loss_dice: 0.5121, decode.d7.loss_cls: 0.0479, decode.d7.loss_mask: 0.1869, decode.d7.loss_dice: 0.5127, decode.d8.loss_cls: 0.0518, decode.d8.loss_mask: 0.1865, decode.d8.loss_dice: 0.5125, loss: 7.8410 +2022-05-11 03:03:26,639 - mmseg - INFO - Iter [60350/80000] lr: 3.527e-07, eta: 12:47:59, time: 1.790, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0536, decode.loss_mask: 0.1919, decode.loss_dice: 0.5152, decode.d0.loss_cls: 0.2959, decode.d0.loss_mask: 0.2002, decode.d0.loss_dice: 0.5393, decode.d1.loss_cls: 0.0652, decode.d1.loss_mask: 0.1934, decode.d1.loss_dice: 0.5299, decode.d2.loss_cls: 0.0683, decode.d2.loss_mask: 0.1930, decode.d2.loss_dice: 0.5219, decode.d3.loss_cls: 0.0552, decode.d3.loss_mask: 0.1922, decode.d3.loss_dice: 0.5180, decode.d4.loss_cls: 0.0616, decode.d4.loss_mask: 0.1919, decode.d4.loss_dice: 0.5185, decode.d5.loss_cls: 0.0559, decode.d5.loss_mask: 0.1915, decode.d5.loss_dice: 0.5156, decode.d6.loss_cls: 0.0530, decode.d6.loss_mask: 0.1922, decode.d6.loss_dice: 0.5151, decode.d7.loss_cls: 0.0561, decode.d7.loss_mask: 0.1918, decode.d7.loss_dice: 0.5157, decode.d8.loss_cls: 0.0576, decode.d8.loss_mask: 0.1920, decode.d8.loss_dice: 0.5131, loss: 7.9547 +2022-05-11 03:04:57,302 - mmseg - INFO - Iter [60400/80000] lr: 3.518e-07, eta: 12:45:20, time: 1.813, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0608, decode.loss_mask: 0.1835, decode.loss_dice: 0.5123, decode.d0.loss_cls: 0.2988, decode.d0.loss_mask: 0.1904, decode.d0.loss_dice: 0.5350, decode.d1.loss_cls: 0.0749, decode.d1.loss_mask: 0.1848, decode.d1.loss_dice: 0.5171, decode.d2.loss_cls: 0.0637, decode.d2.loss_mask: 0.1842, decode.d2.loss_dice: 0.5183, decode.d3.loss_cls: 0.0659, decode.d3.loss_mask: 0.1835, decode.d3.loss_dice: 0.5116, decode.d4.loss_cls: 0.0656, decode.d4.loss_mask: 0.1839, decode.d4.loss_dice: 0.5121, decode.d5.loss_cls: 0.0621, decode.d5.loss_mask: 0.1836, decode.d5.loss_dice: 0.5085, decode.d6.loss_cls: 0.0603, decode.d6.loss_mask: 0.1836, decode.d6.loss_dice: 0.5100, decode.d7.loss_cls: 0.0683, decode.d7.loss_mask: 0.1837, decode.d7.loss_dice: 0.5152, decode.d8.loss_cls: 0.0651, decode.d8.loss_mask: 0.1839, decode.d8.loss_dice: 0.5125, loss: 7.8835 +2022-05-11 03:06:27,025 - mmseg - INFO - Iter [60450/80000] lr: 3.509e-07, eta: 12:42:39, time: 1.794, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0530, decode.loss_mask: 0.1848, decode.loss_dice: 0.5052, decode.d0.loss_cls: 0.2969, decode.d0.loss_mask: 0.1907, decode.d0.loss_dice: 0.5268, decode.d1.loss_cls: 0.0720, decode.d1.loss_mask: 0.1862, decode.d1.loss_dice: 0.5083, decode.d2.loss_cls: 0.0586, decode.d2.loss_mask: 0.1856, decode.d2.loss_dice: 0.5068, decode.d3.loss_cls: 0.0579, decode.d3.loss_mask: 0.1849, decode.d3.loss_dice: 0.5049, decode.d4.loss_cls: 0.0577, decode.d4.loss_mask: 0.1852, decode.d4.loss_dice: 0.5045, decode.d5.loss_cls: 0.0558, decode.d5.loss_mask: 0.1852, decode.d5.loss_dice: 0.5023, decode.d6.loss_cls: 0.0544, decode.d6.loss_mask: 0.1847, decode.d6.loss_dice: 0.5045, decode.d7.loss_cls: 0.0565, decode.d7.loss_mask: 0.1849, decode.d7.loss_dice: 0.5035, decode.d8.loss_cls: 0.0528, decode.d8.loss_mask: 0.1849, decode.d8.loss_dice: 0.5018, loss: 7.7411 +2022-05-11 03:07:59,500 - mmseg - INFO - Iter [60500/80000] lr: 3.500e-07, eta: 12:40:04, time: 1.850, data_time: 0.066, memory: 69063, decode.loss_cls: 0.0595, decode.loss_mask: 0.1851, decode.loss_dice: 0.5120, decode.d0.loss_cls: 0.2974, decode.d0.loss_mask: 0.1932, decode.d0.loss_dice: 0.5394, decode.d1.loss_cls: 0.0753, decode.d1.loss_mask: 0.1871, decode.d1.loss_dice: 0.5189, decode.d2.loss_cls: 0.0606, decode.d2.loss_mask: 0.1860, decode.d2.loss_dice: 0.5160, decode.d3.loss_cls: 0.0619, decode.d3.loss_mask: 0.1851, decode.d3.loss_dice: 0.5112, decode.d4.loss_cls: 0.0594, decode.d4.loss_mask: 0.1860, decode.d4.loss_dice: 0.5130, decode.d5.loss_cls: 0.0642, decode.d5.loss_mask: 0.1857, decode.d5.loss_dice: 0.5152, decode.d6.loss_cls: 0.0617, decode.d6.loss_mask: 0.1853, decode.d6.loss_dice: 0.5124, decode.d7.loss_cls: 0.0643, decode.d7.loss_mask: 0.1854, decode.d7.loss_dice: 0.5110, decode.d8.loss_cls: 0.0646, decode.d8.loss_mask: 0.1853, decode.d8.loss_dice: 0.5135, loss: 7.8956 +2022-05-11 03:09:28,922 - mmseg - INFO - Iter [60550/80000] lr: 3.491e-07, eta: 12:37:25, time: 1.788, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0555, decode.loss_mask: 0.1828, decode.loss_dice: 0.5073, decode.d0.loss_cls: 0.2925, decode.d0.loss_mask: 0.1891, decode.d0.loss_dice: 0.5310, decode.d1.loss_cls: 0.0735, decode.d1.loss_mask: 0.1834, decode.d1.loss_dice: 0.5177, decode.d2.loss_cls: 0.0640, decode.d2.loss_mask: 0.1835, decode.d2.loss_dice: 0.5166, decode.d3.loss_cls: 0.0624, decode.d3.loss_mask: 0.1831, decode.d3.loss_dice: 0.5147, decode.d4.loss_cls: 0.0661, decode.d4.loss_mask: 0.1832, decode.d4.loss_dice: 0.5090, decode.d5.loss_cls: 0.0583, decode.d5.loss_mask: 0.1829, decode.d5.loss_dice: 0.5092, decode.d6.loss_cls: 0.0603, decode.d6.loss_mask: 0.1833, decode.d6.loss_dice: 0.5099, decode.d7.loss_cls: 0.0626, decode.d7.loss_mask: 0.1831, decode.d7.loss_dice: 0.5092, decode.d8.loss_cls: 0.0550, decode.d8.loss_mask: 0.1831, decode.d8.loss_dice: 0.5096, loss: 7.8218 +2022-05-11 03:10:58,989 - mmseg - INFO - Iter [60600/80000] lr: 3.482e-07, eta: 12:34:46, time: 1.801, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0465, decode.loss_mask: 0.1875, decode.loss_dice: 0.5045, decode.d0.loss_cls: 0.2899, decode.d0.loss_mask: 0.1951, decode.d0.loss_dice: 0.5256, decode.d1.loss_cls: 0.0672, decode.d1.loss_mask: 0.1892, decode.d1.loss_dice: 0.5051, decode.d2.loss_cls: 0.0544, decode.d2.loss_mask: 0.1885, decode.d2.loss_dice: 0.5068, decode.d3.loss_cls: 0.0492, decode.d3.loss_mask: 0.1878, decode.d3.loss_dice: 0.5016, decode.d4.loss_cls: 0.0495, decode.d4.loss_mask: 0.1876, decode.d4.loss_dice: 0.5024, decode.d5.loss_cls: 0.0474, decode.d5.loss_mask: 0.1879, decode.d5.loss_dice: 0.5040, decode.d6.loss_cls: 0.0496, decode.d6.loss_mask: 0.1879, decode.d6.loss_dice: 0.5027, decode.d7.loss_cls: 0.0458, decode.d7.loss_mask: 0.1877, decode.d7.loss_dice: 0.5027, decode.d8.loss_cls: 0.0482, decode.d8.loss_mask: 0.1877, decode.d8.loss_dice: 0.5038, loss: 7.6941 +2022-05-11 03:12:31,659 - mmseg - INFO - Iter [60650/80000] lr: 3.473e-07, eta: 12:32:13, time: 1.853, data_time: 0.065, memory: 69063, decode.loss_cls: 0.0593, decode.loss_mask: 0.1823, decode.loss_dice: 0.5146, decode.d0.loss_cls: 0.2988, decode.d0.loss_mask: 0.1887, decode.d0.loss_dice: 0.5317, decode.d1.loss_cls: 0.0801, decode.d1.loss_mask: 0.1842, decode.d1.loss_dice: 0.5237, decode.d2.loss_cls: 0.0714, decode.d2.loss_mask: 0.1822, decode.d2.loss_dice: 0.5105, decode.d3.loss_cls: 0.0618, decode.d3.loss_mask: 0.1827, decode.d3.loss_dice: 0.5117, decode.d4.loss_cls: 0.0622, decode.d4.loss_mask: 0.1822, decode.d4.loss_dice: 0.5109, decode.d5.loss_cls: 0.0630, decode.d5.loss_mask: 0.1816, decode.d5.loss_dice: 0.5099, decode.d6.loss_cls: 0.0591, decode.d6.loss_mask: 0.1823, decode.d6.loss_dice: 0.5116, decode.d7.loss_cls: 0.0575, decode.d7.loss_mask: 0.1826, decode.d7.loss_dice: 0.5134, decode.d8.loss_cls: 0.0600, decode.d8.loss_mask: 0.1821, decode.d8.loss_dice: 0.5138, loss: 7.8560 +2022-05-11 03:14:01,226 - mmseg - INFO - Iter [60700/80000] lr: 3.464e-07, eta: 12:29:35, time: 1.792, data_time: 0.020, memory: 69063, decode.loss_cls: 0.0596, decode.loss_mask: 0.1857, decode.loss_dice: 0.5141, decode.d0.loss_cls: 0.2951, decode.d0.loss_mask: 0.1917, decode.d0.loss_dice: 0.5348, decode.d1.loss_cls: 0.0774, decode.d1.loss_mask: 0.1870, decode.d1.loss_dice: 0.5160, decode.d2.loss_cls: 0.0643, decode.d2.loss_mask: 0.1871, decode.d2.loss_dice: 0.5220, decode.d3.loss_cls: 0.0624, decode.d3.loss_mask: 0.1858, decode.d3.loss_dice: 0.5166, decode.d4.loss_cls: 0.0629, decode.d4.loss_mask: 0.1859, decode.d4.loss_dice: 0.5125, decode.d5.loss_cls: 0.0571, decode.d5.loss_mask: 0.1863, decode.d5.loss_dice: 0.5140, decode.d6.loss_cls: 0.0577, decode.d6.loss_mask: 0.1859, decode.d6.loss_dice: 0.5158, decode.d7.loss_cls: 0.0608, decode.d7.loss_mask: 0.1859, decode.d7.loss_dice: 0.5121, decode.d8.loss_cls: 0.0577, decode.d8.loss_mask: 0.1857, decode.d8.loss_dice: 0.5138, loss: 7.8937 +2022-05-11 03:15:30,973 - mmseg - INFO - Iter [60750/80000] lr: 3.455e-07, eta: 12:26:58, time: 1.795, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0499, decode.loss_mask: 0.1887, decode.loss_dice: 0.5180, decode.d0.loss_cls: 0.3023, decode.d0.loss_mask: 0.1955, decode.d0.loss_dice: 0.5394, decode.d1.loss_cls: 0.0709, decode.d1.loss_mask: 0.1897, decode.d1.loss_dice: 0.5272, decode.d2.loss_cls: 0.0614, decode.d2.loss_mask: 0.1888, decode.d2.loss_dice: 0.5234, decode.d3.loss_cls: 0.0536, decode.d3.loss_mask: 0.1888, decode.d3.loss_dice: 0.5172, decode.d4.loss_cls: 0.0537, decode.d4.loss_mask: 0.1892, decode.d4.loss_dice: 0.5206, decode.d5.loss_cls: 0.0549, decode.d5.loss_mask: 0.1884, decode.d5.loss_dice: 0.5174, decode.d6.loss_cls: 0.0494, decode.d6.loss_mask: 0.1884, decode.d6.loss_dice: 0.5196, decode.d7.loss_cls: 0.0565, decode.d7.loss_mask: 0.1883, decode.d7.loss_dice: 0.5201, decode.d8.loss_cls: 0.0497, decode.d8.loss_mask: 0.1887, decode.d8.loss_dice: 0.5205, loss: 7.9202 +2022-05-11 03:17:00,276 - mmseg - INFO - Iter [60800/80000] lr: 3.446e-07, eta: 12:24:21, time: 1.786, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0530, decode.loss_mask: 0.1923, decode.loss_dice: 0.5219, decode.d0.loss_cls: 0.2964, decode.d0.loss_mask: 0.1987, decode.d0.loss_dice: 0.5449, decode.d1.loss_cls: 0.0722, decode.d1.loss_mask: 0.1933, decode.d1.loss_dice: 0.5286, decode.d2.loss_cls: 0.0620, decode.d2.loss_mask: 0.1938, decode.d2.loss_dice: 0.5265, decode.d3.loss_cls: 0.0660, decode.d3.loss_mask: 0.1925, decode.d3.loss_dice: 0.5228, decode.d4.loss_cls: 0.0616, decode.d4.loss_mask: 0.1922, decode.d4.loss_dice: 0.5168, decode.d5.loss_cls: 0.0608, decode.d5.loss_mask: 0.1925, decode.d5.loss_dice: 0.5171, decode.d6.loss_cls: 0.0560, decode.d6.loss_mask: 0.1926, decode.d6.loss_dice: 0.5169, decode.d7.loss_cls: 0.0581, decode.d7.loss_mask: 0.1924, decode.d7.loss_dice: 0.5199, decode.d8.loss_cls: 0.0590, decode.d8.loss_mask: 0.1924, decode.d8.loss_dice: 0.5240, loss: 8.0173 +2022-05-11 03:18:33,574 - mmseg - INFO - Iter [60850/80000] lr: 3.437e-07, eta: 12:21:51, time: 1.866, data_time: 0.066, memory: 69063, decode.loss_cls: 0.0470, decode.loss_mask: 0.1830, decode.loss_dice: 0.5047, decode.d0.loss_cls: 0.2914, decode.d0.loss_mask: 0.1909, decode.d0.loss_dice: 0.5320, decode.d1.loss_cls: 0.0746, decode.d1.loss_mask: 0.1847, decode.d1.loss_dice: 0.5149, decode.d2.loss_cls: 0.0638, decode.d2.loss_mask: 0.1845, decode.d2.loss_dice: 0.5106, decode.d3.loss_cls: 0.0543, decode.d3.loss_mask: 0.1836, decode.d3.loss_dice: 0.5025, decode.d4.loss_cls: 0.0538, decode.d4.loss_mask: 0.1836, decode.d4.loss_dice: 0.5022, decode.d5.loss_cls: 0.0519, decode.d5.loss_mask: 0.1834, decode.d5.loss_dice: 0.5003, decode.d6.loss_cls: 0.0498, decode.d6.loss_mask: 0.1832, decode.d6.loss_dice: 0.5037, decode.d7.loss_cls: 0.0545, decode.d7.loss_mask: 0.1832, decode.d7.loss_dice: 0.5033, decode.d8.loss_cls: 0.0539, decode.d8.loss_mask: 0.1830, decode.d8.loss_dice: 0.4994, loss: 7.7117 +2022-05-11 03:20:05,834 - mmseg - INFO - Iter [60900/80000] lr: 3.428e-07, eta: 12:19:19, time: 1.845, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0568, decode.loss_mask: 0.1887, decode.loss_dice: 0.5023, decode.d0.loss_cls: 0.2967, decode.d0.loss_mask: 0.1948, decode.d0.loss_dice: 0.5243, decode.d1.loss_cls: 0.0667, decode.d1.loss_mask: 0.1909, decode.d1.loss_dice: 0.5126, decode.d2.loss_cls: 0.0688, decode.d2.loss_mask: 0.1895, decode.d2.loss_dice: 0.5073, decode.d3.loss_cls: 0.0628, decode.d3.loss_mask: 0.1892, decode.d3.loss_dice: 0.5040, decode.d4.loss_cls: 0.0600, decode.d4.loss_mask: 0.1891, decode.d4.loss_dice: 0.5044, decode.d5.loss_cls: 0.0646, decode.d5.loss_mask: 0.1891, decode.d5.loss_dice: 0.5029, decode.d6.loss_cls: 0.0573, decode.d6.loss_mask: 0.1888, decode.d6.loss_dice: 0.5017, decode.d7.loss_cls: 0.0563, decode.d7.loss_mask: 0.1890, decode.d7.loss_dice: 0.4984, decode.d8.loss_cls: 0.0576, decode.d8.loss_mask: 0.1887, decode.d8.loss_dice: 0.5043, loss: 7.8074 +2022-05-11 03:21:36,893 - mmseg - INFO - Iter [60950/80000] lr: 3.419e-07, eta: 12:16:46, time: 1.821, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0525, decode.loss_mask: 0.1885, decode.loss_dice: 0.5020, decode.d0.loss_cls: 0.2978, decode.d0.loss_mask: 0.1959, decode.d0.loss_dice: 0.5266, decode.d1.loss_cls: 0.0623, decode.d1.loss_mask: 0.1895, decode.d1.loss_dice: 0.5117, decode.d2.loss_cls: 0.0540, decode.d2.loss_mask: 0.1885, decode.d2.loss_dice: 0.5081, decode.d3.loss_cls: 0.0518, decode.d3.loss_mask: 0.1884, decode.d3.loss_dice: 0.5042, decode.d4.loss_cls: 0.0525, decode.d4.loss_mask: 0.1889, decode.d4.loss_dice: 0.5059, decode.d5.loss_cls: 0.0535, decode.d5.loss_mask: 0.1894, decode.d5.loss_dice: 0.5062, decode.d6.loss_cls: 0.0521, decode.d6.loss_mask: 0.1889, decode.d6.loss_dice: 0.5026, decode.d7.loss_cls: 0.0505, decode.d7.loss_mask: 0.1890, decode.d7.loss_dice: 0.5060, decode.d8.loss_cls: 0.0485, decode.d8.loss_mask: 0.1885, decode.d8.loss_dice: 0.5025, loss: 7.7471 +2022-05-11 03:23:07,951 - mmseg - INFO - Saving checkpoint at 61000 iterations +2022-05-11 03:23:39,840 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 03:23:39,848 - mmseg - INFO - Iter [61000/80000] lr: 3.410e-07, eta: 12:15:00, time: 2.457, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0571, decode.loss_mask: 0.1821, decode.loss_dice: 0.4988, decode.d0.loss_cls: 0.2900, decode.d0.loss_mask: 0.1885, decode.d0.loss_dice: 0.5164, decode.d1.loss_cls: 0.0716, decode.d1.loss_mask: 0.1835, decode.d1.loss_dice: 0.5004, decode.d2.loss_cls: 0.0644, decode.d2.loss_mask: 0.1828, decode.d2.loss_dice: 0.5000, decode.d3.loss_cls: 0.0585, decode.d3.loss_mask: 0.1827, decode.d3.loss_dice: 0.4974, decode.d4.loss_cls: 0.0571, decode.d4.loss_mask: 0.1832, decode.d4.loss_dice: 0.4942, decode.d5.loss_cls: 0.0585, decode.d5.loss_mask: 0.1829, decode.d5.loss_dice: 0.4943, decode.d6.loss_cls: 0.0605, decode.d6.loss_mask: 0.1825, decode.d6.loss_dice: 0.4958, decode.d7.loss_cls: 0.0570, decode.d7.loss_mask: 0.1828, decode.d7.loss_dice: 0.4938, decode.d8.loss_cls: 0.0560, decode.d8.loss_mask: 0.1822, decode.d8.loss_dice: 0.4942, loss: 7.6491 +2022-05-11 03:25:35,566 - mmseg - INFO - per class results: +2022-05-11 03:25:35,571 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.57 | 99.18 | +| sidewalk | 88.35 | 94.0 | +| building | 94.43 | 97.1 | +| wall | 70.3 | 80.36 | +| fence | 74.57 | 81.47 | +| pole | 71.54 | 84.19 | +| traffic light | 77.33 | 88.19 | +| traffic sign | 84.19 | 90.42 | +| vegetation | 93.43 | 96.83 | +| terrain | 69.09 | 78.58 | +| sky | 95.8 | 98.47 | +| person | 86.98 | 93.99 | +| rider | 74.43 | 84.99 | +| car | 96.21 | 98.34 | +| truck | 82.5 | 95.1 | +| bus | 93.69 | 96.73 | +| train | 87.92 | 90.71 | +| motorcycle | 77.36 | 87.57 | +| bicycle | 82.79 | 91.79 | ++---------------+-------+-------+ +2022-05-11 03:25:35,571 - mmseg - INFO - Summary: +2022-05-11 03:25:35,571 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 97.01 | 84.18 | 90.95 | ++-------+-------+-------+ +2022-05-11 03:25:35,574 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 03:25:35,574 - mmseg - INFO - Iter(val) [32] aAcc: 0.9701, mIoU: 0.8418, mAcc: 0.9095, IoU.road: 0.9857, IoU.sidewalk: 0.8835, IoU.building: 0.9443, IoU.wall: 0.7030, IoU.fence: 0.7457, IoU.pole: 0.7154, IoU.traffic light: 0.7733, IoU.traffic sign: 0.8419, IoU.vegetation: 0.9343, IoU.terrain: 0.6909, IoU.sky: 0.9580, IoU.person: 0.8698, IoU.rider: 0.7443, IoU.car: 0.9621, IoU.truck: 0.8250, IoU.bus: 0.9369, IoU.train: 0.8792, IoU.motorcycle: 0.7736, IoU.bicycle: 0.8279, Acc.road: 0.9918, Acc.sidewalk: 0.9400, Acc.building: 0.9710, Acc.wall: 0.8036, Acc.fence: 0.8147, Acc.pole: 0.8419, Acc.traffic light: 0.8819, Acc.traffic sign: 0.9042, Acc.vegetation: 0.9683, Acc.terrain: 0.7858, Acc.sky: 0.9847, Acc.person: 0.9399, Acc.rider: 0.8499, Acc.car: 0.9834, Acc.truck: 0.9510, Acc.bus: 0.9673, Acc.train: 0.9071, Acc.motorcycle: 0.8757, Acc.bicycle: 0.9179 +2022-05-11 03:27:10,053 - mmseg - INFO - Iter [61050/80000] lr: 3.401e-07, eta: 12:15:21, time: 4.206, data_time: 2.384, memory: 69063, decode.loss_cls: 0.0532, decode.loss_mask: 0.1854, decode.loss_dice: 0.5026, decode.d0.loss_cls: 0.2934, decode.d0.loss_mask: 0.1922, decode.d0.loss_dice: 0.5294, decode.d1.loss_cls: 0.0694, decode.d1.loss_mask: 0.1869, decode.d1.loss_dice: 0.5111, decode.d2.loss_cls: 0.0608, decode.d2.loss_mask: 0.1859, decode.d2.loss_dice: 0.5071, decode.d3.loss_cls: 0.0597, decode.d3.loss_mask: 0.1850, decode.d3.loss_dice: 0.5021, decode.d4.loss_cls: 0.0557, decode.d4.loss_mask: 0.1854, decode.d4.loss_dice: 0.5055, decode.d5.loss_cls: 0.0602, decode.d5.loss_mask: 0.1853, decode.d5.loss_dice: 0.5032, decode.d6.loss_cls: 0.0595, decode.d6.loss_mask: 0.1848, decode.d6.loss_dice: 0.4994, decode.d7.loss_cls: 0.0586, decode.d7.loss_mask: 0.1853, decode.d7.loss_dice: 0.4992, decode.d8.loss_cls: 0.0524, decode.d8.loss_mask: 0.1853, decode.d8.loss_dice: 0.5006, loss: 7.7445 +2022-05-11 03:28:40,406 - mmseg - INFO - Iter [61100/80000] lr: 3.392e-07, eta: 12:12:47, time: 1.807, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0477, decode.loss_mask: 0.1859, decode.loss_dice: 0.5072, decode.d0.loss_cls: 0.2920, decode.d0.loss_mask: 0.1936, decode.d0.loss_dice: 0.5312, decode.d1.loss_cls: 0.0703, decode.d1.loss_mask: 0.1893, decode.d1.loss_dice: 0.5214, decode.d2.loss_cls: 0.0639, decode.d2.loss_mask: 0.1875, decode.d2.loss_dice: 0.5151, decode.d3.loss_cls: 0.0536, decode.d3.loss_mask: 0.1867, decode.d3.loss_dice: 0.5129, decode.d4.loss_cls: 0.0567, decode.d4.loss_mask: 0.1867, decode.d4.loss_dice: 0.5126, decode.d5.loss_cls: 0.0549, decode.d5.loss_mask: 0.1864, decode.d5.loss_dice: 0.5132, decode.d6.loss_cls: 0.0509, decode.d6.loss_mask: 0.1864, decode.d6.loss_dice: 0.5100, decode.d7.loss_cls: 0.0510, decode.d7.loss_mask: 0.1866, decode.d7.loss_dice: 0.5106, decode.d8.loss_cls: 0.0525, decode.d8.loss_mask: 0.1859, decode.d8.loss_dice: 0.5103, loss: 7.8131 +2022-05-11 03:30:10,907 - mmseg - INFO - Iter [61150/80000] lr: 3.383e-07, eta: 12:10:13, time: 1.810, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0501, decode.loss_mask: 0.1838, decode.loss_dice: 0.5063, decode.d0.loss_cls: 0.2926, decode.d0.loss_mask: 0.1918, decode.d0.loss_dice: 0.5199, decode.d1.loss_cls: 0.0625, decode.d1.loss_mask: 0.1859, decode.d1.loss_dice: 0.5118, decode.d2.loss_cls: 0.0561, decode.d2.loss_mask: 0.1844, decode.d2.loss_dice: 0.5063, decode.d3.loss_cls: 0.0485, decode.d3.loss_mask: 0.1835, decode.d3.loss_dice: 0.5007, decode.d4.loss_cls: 0.0532, decode.d4.loss_mask: 0.1831, decode.d4.loss_dice: 0.5016, decode.d5.loss_cls: 0.0474, decode.d5.loss_mask: 0.1837, decode.d5.loss_dice: 0.5049, decode.d6.loss_cls: 0.0442, decode.d6.loss_mask: 0.1833, decode.d6.loss_dice: 0.5014, decode.d7.loss_cls: 0.0477, decode.d7.loss_mask: 0.1835, decode.d7.loss_dice: 0.4988, decode.d8.loss_cls: 0.0495, decode.d8.loss_mask: 0.1829, decode.d8.loss_dice: 0.5021, loss: 7.6515 +2022-05-11 03:31:39,860 - mmseg - INFO - Iter [61200/80000] lr: 3.374e-07, eta: 12:07:38, time: 1.779, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0544, decode.loss_mask: 0.1846, decode.loss_dice: 0.5132, decode.d0.loss_cls: 0.2936, decode.d0.loss_mask: 0.1908, decode.d0.loss_dice: 0.5353, decode.d1.loss_cls: 0.0680, decode.d1.loss_mask: 0.1861, decode.d1.loss_dice: 0.5199, decode.d2.loss_cls: 0.0656, decode.d2.loss_mask: 0.1855, decode.d2.loss_dice: 0.5219, decode.d3.loss_cls: 0.0576, decode.d3.loss_mask: 0.1853, decode.d3.loss_dice: 0.5130, decode.d4.loss_cls: 0.0600, decode.d4.loss_mask: 0.1852, decode.d4.loss_dice: 0.5137, decode.d5.loss_cls: 0.0551, decode.d5.loss_mask: 0.1855, decode.d5.loss_dice: 0.5162, decode.d6.loss_cls: 0.0565, decode.d6.loss_mask: 0.1848, decode.d6.loss_dice: 0.5141, decode.d7.loss_cls: 0.0646, decode.d7.loss_mask: 0.1850, decode.d7.loss_dice: 0.5142, decode.d8.loss_cls: 0.0564, decode.d8.loss_mask: 0.1851, decode.d8.loss_dice: 0.5118, loss: 7.8631 +2022-05-11 03:33:11,906 - mmseg - INFO - Iter [61250/80000] lr: 3.365e-07, eta: 12:05:08, time: 1.841, data_time: 0.065, memory: 69063, decode.loss_cls: 0.0659, decode.loss_mask: 0.1937, decode.loss_dice: 0.5132, decode.d0.loss_cls: 0.3018, decode.d0.loss_mask: 0.2024, decode.d0.loss_dice: 0.5430, decode.d1.loss_cls: 0.0754, decode.d1.loss_mask: 0.1959, decode.d1.loss_dice: 0.5276, decode.d2.loss_cls: 0.0671, decode.d2.loss_mask: 0.1945, decode.d2.loss_dice: 0.5215, decode.d3.loss_cls: 0.0630, decode.d3.loss_mask: 0.1942, decode.d3.loss_dice: 0.5175, decode.d4.loss_cls: 0.0654, decode.d4.loss_mask: 0.1934, decode.d4.loss_dice: 0.5162, decode.d5.loss_cls: 0.0639, decode.d5.loss_mask: 0.1934, decode.d5.loss_dice: 0.5229, decode.d6.loss_cls: 0.0603, decode.d6.loss_mask: 0.1928, decode.d6.loss_dice: 0.5161, decode.d7.loss_cls: 0.0586, decode.d7.loss_mask: 0.1931, decode.d7.loss_dice: 0.5169, decode.d8.loss_cls: 0.0609, decode.d8.loss_mask: 0.1934, decode.d8.loss_dice: 0.5184, loss: 8.0424 +2022-05-11 03:34:42,571 - mmseg - INFO - Iter [61300/80000] lr: 3.356e-07, eta: 12:02:36, time: 1.813, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0577, decode.loss_mask: 0.1868, decode.loss_dice: 0.5027, decode.d0.loss_cls: 0.2965, decode.d0.loss_mask: 0.1940, decode.d0.loss_dice: 0.5252, decode.d1.loss_cls: 0.0752, decode.d1.loss_mask: 0.1878, decode.d1.loss_dice: 0.5127, decode.d2.loss_cls: 0.0681, decode.d2.loss_mask: 0.1868, decode.d2.loss_dice: 0.5101, decode.d3.loss_cls: 0.0623, decode.d3.loss_mask: 0.1859, decode.d3.loss_dice: 0.5063, decode.d4.loss_cls: 0.0602, decode.d4.loss_mask: 0.1865, decode.d4.loss_dice: 0.5057, decode.d5.loss_cls: 0.0605, decode.d5.loss_mask: 0.1867, decode.d5.loss_dice: 0.5029, decode.d6.loss_cls: 0.0555, decode.d6.loss_mask: 0.1867, decode.d6.loss_dice: 0.5050, decode.d7.loss_cls: 0.0592, decode.d7.loss_mask: 0.1868, decode.d7.loss_dice: 0.5058, decode.d8.loss_cls: 0.0593, decode.d8.loss_mask: 0.1864, decode.d8.loss_dice: 0.5015, loss: 7.8069 +2022-05-11 03:36:12,873 - mmseg - INFO - Iter [61350/80000] lr: 3.347e-07, eta: 12:00:05, time: 1.806, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0516, decode.loss_mask: 0.1803, decode.loss_dice: 0.4965, decode.d0.loss_cls: 0.2948, decode.d0.loss_mask: 0.1867, decode.d0.loss_dice: 0.5159, decode.d1.loss_cls: 0.0724, decode.d1.loss_mask: 0.1815, decode.d1.loss_dice: 0.5081, decode.d2.loss_cls: 0.0629, decode.d2.loss_mask: 0.1809, decode.d2.loss_dice: 0.5025, decode.d3.loss_cls: 0.0591, decode.d3.loss_mask: 0.1803, decode.d3.loss_dice: 0.4963, decode.d4.loss_cls: 0.0519, decode.d4.loss_mask: 0.1811, decode.d4.loss_dice: 0.4980, decode.d5.loss_cls: 0.0521, decode.d5.loss_mask: 0.1811, decode.d5.loss_dice: 0.4996, decode.d6.loss_cls: 0.0525, decode.d6.loss_mask: 0.1801, decode.d6.loss_dice: 0.4931, decode.d7.loss_cls: 0.0476, decode.d7.loss_mask: 0.1805, decode.d7.loss_dice: 0.4956, decode.d8.loss_cls: 0.0510, decode.d8.loss_mask: 0.1804, decode.d8.loss_dice: 0.4999, loss: 7.6144 +2022-05-11 03:37:45,885 - mmseg - INFO - Iter [61400/80000] lr: 3.338e-07, eta: 11:57:37, time: 1.860, data_time: 0.064, memory: 69063, decode.loss_cls: 0.0529, decode.loss_mask: 0.1849, decode.loss_dice: 0.5031, decode.d0.loss_cls: 0.2968, decode.d0.loss_mask: 0.1929, decode.d0.loss_dice: 0.5277, decode.d1.loss_cls: 0.0714, decode.d1.loss_mask: 0.1864, decode.d1.loss_dice: 0.5125, decode.d2.loss_cls: 0.0652, decode.d2.loss_mask: 0.1859, decode.d2.loss_dice: 0.5082, decode.d3.loss_cls: 0.0571, decode.d3.loss_mask: 0.1856, decode.d3.loss_dice: 0.5094, decode.d4.loss_cls: 0.0519, decode.d4.loss_mask: 0.1857, decode.d4.loss_dice: 0.5050, decode.d5.loss_cls: 0.0584, decode.d5.loss_mask: 0.1855, decode.d5.loss_dice: 0.5081, decode.d6.loss_cls: 0.0527, decode.d6.loss_mask: 0.1852, decode.d6.loss_dice: 0.5060, decode.d7.loss_cls: 0.0547, decode.d7.loss_mask: 0.1851, decode.d7.loss_dice: 0.5077, decode.d8.loss_cls: 0.0563, decode.d8.loss_mask: 0.1851, decode.d8.loss_dice: 0.5009, loss: 7.7685 +2022-05-11 03:39:16,657 - mmseg - INFO - Iter [61450/80000] lr: 3.329e-07, eta: 11:55:07, time: 1.815, data_time: 0.021, memory: 69063, decode.loss_cls: 0.0548, decode.loss_mask: 0.1836, decode.loss_dice: 0.5123, decode.d0.loss_cls: 0.2948, decode.d0.loss_mask: 0.1914, decode.d0.loss_dice: 0.5400, decode.d1.loss_cls: 0.0771, decode.d1.loss_mask: 0.1856, decode.d1.loss_dice: 0.5212, decode.d2.loss_cls: 0.0724, decode.d2.loss_mask: 0.1848, decode.d2.loss_dice: 0.5211, decode.d3.loss_cls: 0.0602, decode.d3.loss_mask: 0.1843, decode.d3.loss_dice: 0.5166, decode.d4.loss_cls: 0.0586, decode.d4.loss_mask: 0.1843, decode.d4.loss_dice: 0.5174, decode.d5.loss_cls: 0.0606, decode.d5.loss_mask: 0.1845, decode.d5.loss_dice: 0.5151, decode.d6.loss_cls: 0.0563, decode.d6.loss_mask: 0.1840, decode.d6.loss_dice: 0.5125, decode.d7.loss_cls: 0.0582, decode.d7.loss_mask: 0.1841, decode.d7.loss_dice: 0.5163, decode.d8.loss_cls: 0.0565, decode.d8.loss_mask: 0.1841, decode.d8.loss_dice: 0.5113, loss: 7.8841 +2022-05-11 03:40:47,656 - mmseg - INFO - Iter [61500/80000] lr: 3.320e-07, eta: 11:52:38, time: 1.820, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0527, decode.loss_mask: 0.1869, decode.loss_dice: 0.5018, decode.d0.loss_cls: 0.2946, decode.d0.loss_mask: 0.1945, decode.d0.loss_dice: 0.5252, decode.d1.loss_cls: 0.0772, decode.d1.loss_mask: 0.1882, decode.d1.loss_dice: 0.5097, decode.d2.loss_cls: 0.0668, decode.d2.loss_mask: 0.1877, decode.d2.loss_dice: 0.5086, decode.d3.loss_cls: 0.0577, decode.d3.loss_mask: 0.1869, decode.d3.loss_dice: 0.5036, decode.d4.loss_cls: 0.0524, decode.d4.loss_mask: 0.1872, decode.d4.loss_dice: 0.5029, decode.d5.loss_cls: 0.0584, decode.d5.loss_mask: 0.1878, decode.d5.loss_dice: 0.5078, decode.d6.loss_cls: 0.0547, decode.d6.loss_mask: 0.1873, decode.d6.loss_dice: 0.5104, decode.d7.loss_cls: 0.0559, decode.d7.loss_mask: 0.1875, decode.d7.loss_dice: 0.5035, decode.d8.loss_cls: 0.0597, decode.d8.loss_mask: 0.1874, decode.d8.loss_dice: 0.5034, loss: 7.7884 +2022-05-11 03:42:18,871 - mmseg - INFO - Iter [61550/80000] lr: 3.311e-07, eta: 11:50:09, time: 1.824, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0541, decode.loss_mask: 0.1832, decode.loss_dice: 0.5060, decode.d0.loss_cls: 0.2963, decode.d0.loss_mask: 0.1934, decode.d0.loss_dice: 0.5360, decode.d1.loss_cls: 0.0728, decode.d1.loss_mask: 0.1851, decode.d1.loss_dice: 0.5148, decode.d2.loss_cls: 0.0735, decode.d2.loss_mask: 0.1841, decode.d2.loss_dice: 0.5116, decode.d3.loss_cls: 0.0624, decode.d3.loss_mask: 0.1838, decode.d3.loss_dice: 0.5091, decode.d4.loss_cls: 0.0590, decode.d4.loss_mask: 0.1837, decode.d4.loss_dice: 0.5040, decode.d5.loss_cls: 0.0581, decode.d5.loss_mask: 0.1839, decode.d5.loss_dice: 0.5088, decode.d6.loss_cls: 0.0555, decode.d6.loss_mask: 0.1836, decode.d6.loss_dice: 0.5087, decode.d7.loss_cls: 0.0582, decode.d7.loss_mask: 0.1831, decode.d7.loss_dice: 0.5070, decode.d8.loss_cls: 0.0557, decode.d8.loss_mask: 0.1832, decode.d8.loss_dice: 0.5052, loss: 7.8037 +2022-05-11 03:43:52,060 - mmseg - INFO - Iter [61600/80000] lr: 3.303e-07, eta: 11:47:43, time: 1.864, data_time: 0.065, memory: 69063, decode.loss_cls: 0.0470, decode.loss_mask: 0.1792, decode.loss_dice: 0.5034, decode.d0.loss_cls: 0.3002, decode.d0.loss_mask: 0.1849, decode.d0.loss_dice: 0.5282, decode.d1.loss_cls: 0.0575, decode.d1.loss_mask: 0.1812, decode.d1.loss_dice: 0.5116, decode.d2.loss_cls: 0.0666, decode.d2.loss_mask: 0.1802, decode.d2.loss_dice: 0.5104, decode.d3.loss_cls: 0.0533, decode.d3.loss_mask: 0.1800, decode.d3.loss_dice: 0.5053, decode.d4.loss_cls: 0.0546, decode.d4.loss_mask: 0.1796, decode.d4.loss_dice: 0.5076, decode.d5.loss_cls: 0.0566, decode.d5.loss_mask: 0.1795, decode.d5.loss_dice: 0.5065, decode.d6.loss_cls: 0.0564, decode.d6.loss_mask: 0.1801, decode.d6.loss_dice: 0.5052, decode.d7.loss_cls: 0.0540, decode.d7.loss_mask: 0.1791, decode.d7.loss_dice: 0.5054, decode.d8.loss_cls: 0.0518, decode.d8.loss_mask: 0.1790, decode.d8.loss_dice: 0.5070, loss: 7.6912 +2022-05-11 03:45:22,837 - mmseg - INFO - Iter [61650/80000] lr: 3.294e-07, eta: 11:45:15, time: 1.816, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0438, decode.loss_mask: 0.1869, decode.loss_dice: 0.4923, decode.d0.loss_cls: 0.2913, decode.d0.loss_mask: 0.1939, decode.d0.loss_dice: 0.5134, decode.d1.loss_cls: 0.0569, decode.d1.loss_mask: 0.1891, decode.d1.loss_dice: 0.4993, decode.d2.loss_cls: 0.0493, decode.d2.loss_mask: 0.1881, decode.d2.loss_dice: 0.4937, decode.d3.loss_cls: 0.0407, decode.d3.loss_mask: 0.1871, decode.d3.loss_dice: 0.4929, decode.d4.loss_cls: 0.0446, decode.d4.loss_mask: 0.1870, decode.d4.loss_dice: 0.4968, decode.d5.loss_cls: 0.0462, decode.d5.loss_mask: 0.1869, decode.d5.loss_dice: 0.4931, decode.d6.loss_cls: 0.0460, decode.d6.loss_mask: 0.1870, decode.d6.loss_dice: 0.4951, decode.d7.loss_cls: 0.0420, decode.d7.loss_mask: 0.1869, decode.d7.loss_dice: 0.4915, decode.d8.loss_cls: 0.0407, decode.d8.loss_mask: 0.1869, decode.d8.loss_dice: 0.4943, loss: 7.5436 +2022-05-11 03:46:52,695 - mmseg - INFO - Iter [61700/80000] lr: 3.285e-07, eta: 11:42:46, time: 1.797, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0510, decode.loss_mask: 0.1840, decode.loss_dice: 0.5020, decode.d0.loss_cls: 0.2890, decode.d0.loss_mask: 0.1920, decode.d0.loss_dice: 0.5292, decode.d1.loss_cls: 0.0719, decode.d1.loss_mask: 0.1852, decode.d1.loss_dice: 0.5161, decode.d2.loss_cls: 0.0594, decode.d2.loss_mask: 0.1848, decode.d2.loss_dice: 0.5091, decode.d3.loss_cls: 0.0588, decode.d3.loss_mask: 0.1840, decode.d3.loss_dice: 0.5065, decode.d4.loss_cls: 0.0549, decode.d4.loss_mask: 0.1844, decode.d4.loss_dice: 0.5081, decode.d5.loss_cls: 0.0512, decode.d5.loss_mask: 0.1838, decode.d5.loss_dice: 0.5056, decode.d6.loss_cls: 0.0515, decode.d6.loss_mask: 0.1842, decode.d6.loss_dice: 0.5083, decode.d7.loss_cls: 0.0550, decode.d7.loss_mask: 0.1845, decode.d7.loss_dice: 0.5019, decode.d8.loss_cls: 0.0514, decode.d8.loss_mask: 0.1847, decode.d8.loss_dice: 0.5056, loss: 7.7382 +2022-05-11 03:48:21,481 - mmseg - INFO - Iter [61750/80000] lr: 3.276e-07, eta: 11:40:15, time: 1.775, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0546, decode.loss_mask: 0.1875, decode.loss_dice: 0.5017, decode.d0.loss_cls: 0.2935, decode.d0.loss_mask: 0.1958, decode.d0.loss_dice: 0.5261, decode.d1.loss_cls: 0.0652, decode.d1.loss_mask: 0.1896, decode.d1.loss_dice: 0.5141, decode.d2.loss_cls: 0.0608, decode.d2.loss_mask: 0.1890, decode.d2.loss_dice: 0.5072, decode.d3.loss_cls: 0.0534, decode.d3.loss_mask: 0.1880, decode.d3.loss_dice: 0.5063, decode.d4.loss_cls: 0.0553, decode.d4.loss_mask: 0.1881, decode.d4.loss_dice: 0.5091, decode.d5.loss_cls: 0.0570, decode.d5.loss_mask: 0.1880, decode.d5.loss_dice: 0.5088, decode.d6.loss_cls: 0.0597, decode.d6.loss_mask: 0.1873, decode.d6.loss_dice: 0.5048, decode.d7.loss_cls: 0.0540, decode.d7.loss_mask: 0.1877, decode.d7.loss_dice: 0.5026, decode.d8.loss_cls: 0.0515, decode.d8.loss_mask: 0.1879, decode.d8.loss_dice: 0.5059, loss: 7.7802 +2022-05-11 03:49:52,742 - mmseg - INFO - Iter [61800/80000] lr: 3.267e-07, eta: 11:37:49, time: 1.825, data_time: 0.065, memory: 69063, decode.loss_cls: 0.0538, decode.loss_mask: 0.1859, decode.loss_dice: 0.5021, decode.d0.loss_cls: 0.2911, decode.d0.loss_mask: 0.1930, decode.d0.loss_dice: 0.5265, decode.d1.loss_cls: 0.0708, decode.d1.loss_mask: 0.1871, decode.d1.loss_dice: 0.5124, decode.d2.loss_cls: 0.0644, decode.d2.loss_mask: 0.1862, decode.d2.loss_dice: 0.5143, decode.d3.loss_cls: 0.0595, decode.d3.loss_mask: 0.1861, decode.d3.loss_dice: 0.5059, decode.d4.loss_cls: 0.0581, decode.d4.loss_mask: 0.1859, decode.d4.loss_dice: 0.5044, decode.d5.loss_cls: 0.0580, decode.d5.loss_mask: 0.1859, decode.d5.loss_dice: 0.5023, decode.d6.loss_cls: 0.0513, decode.d6.loss_mask: 0.1859, decode.d6.loss_dice: 0.5050, decode.d7.loss_cls: 0.0574, decode.d7.loss_mask: 0.1853, decode.d7.loss_dice: 0.5006, decode.d8.loss_cls: 0.0574, decode.d8.loss_mask: 0.1858, decode.d8.loss_dice: 0.5022, loss: 7.7646 +2022-05-11 03:51:22,589 - mmseg - INFO - Iter [61850/80000] lr: 3.258e-07, eta: 11:35:21, time: 1.797, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0463, decode.loss_mask: 0.1838, decode.loss_dice: 0.4980, decode.d0.loss_cls: 0.2958, decode.d0.loss_mask: 0.1905, decode.d0.loss_dice: 0.5173, decode.d1.loss_cls: 0.0555, decode.d1.loss_mask: 0.1855, decode.d1.loss_dice: 0.5055, decode.d2.loss_cls: 0.0551, decode.d2.loss_mask: 0.1846, decode.d2.loss_dice: 0.5065, decode.d3.loss_cls: 0.0515, decode.d3.loss_mask: 0.1850, decode.d3.loss_dice: 0.4986, decode.d4.loss_cls: 0.0548, decode.d4.loss_mask: 0.1843, decode.d4.loss_dice: 0.5026, decode.d5.loss_cls: 0.0492, decode.d5.loss_mask: 0.1843, decode.d5.loss_dice: 0.5004, decode.d6.loss_cls: 0.0480, decode.d6.loss_mask: 0.1842, decode.d6.loss_dice: 0.5015, decode.d7.loss_cls: 0.0536, decode.d7.loss_mask: 0.1838, decode.d7.loss_dice: 0.4987, decode.d8.loss_cls: 0.0538, decode.d8.loss_mask: 0.1835, decode.d8.loss_dice: 0.4967, loss: 7.6389 +2022-05-11 03:52:52,676 - mmseg - INFO - Iter [61900/80000] lr: 3.249e-07, eta: 11:32:53, time: 1.802, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0457, decode.loss_mask: 0.1883, decode.loss_dice: 0.4994, decode.d0.loss_cls: 0.2934, decode.d0.loss_mask: 0.1970, decode.d0.loss_dice: 0.5215, decode.d1.loss_cls: 0.0698, decode.d1.loss_mask: 0.1899, decode.d1.loss_dice: 0.5077, decode.d2.loss_cls: 0.0588, decode.d2.loss_mask: 0.1891, decode.d2.loss_dice: 0.5052, decode.d3.loss_cls: 0.0565, decode.d3.loss_mask: 0.1885, decode.d3.loss_dice: 0.4999, decode.d4.loss_cls: 0.0497, decode.d4.loss_mask: 0.1891, decode.d4.loss_dice: 0.5012, decode.d5.loss_cls: 0.0515, decode.d5.loss_mask: 0.1887, decode.d5.loss_dice: 0.5013, decode.d6.loss_cls: 0.0504, decode.d6.loss_mask: 0.1886, decode.d6.loss_dice: 0.5003, decode.d7.loss_cls: 0.0515, decode.d7.loss_mask: 0.1887, decode.d7.loss_dice: 0.4981, decode.d8.loss_cls: 0.0489, decode.d8.loss_mask: 0.1885, decode.d8.loss_dice: 0.5009, loss: 7.7081 +2022-05-11 03:54:25,585 - mmseg - INFO - Iter [61950/80000] lr: 3.240e-07, eta: 11:30:30, time: 1.858, data_time: 0.062, memory: 69063, decode.loss_cls: 0.0435, decode.loss_mask: 0.1855, decode.loss_dice: 0.5052, decode.d0.loss_cls: 0.2946, decode.d0.loss_mask: 0.1924, decode.d0.loss_dice: 0.5226, decode.d1.loss_cls: 0.0583, decode.d1.loss_mask: 0.1869, decode.d1.loss_dice: 0.5150, decode.d2.loss_cls: 0.0546, decode.d2.loss_mask: 0.1864, decode.d2.loss_dice: 0.5095, decode.d3.loss_cls: 0.0497, decode.d3.loss_mask: 0.1862, decode.d3.loss_dice: 0.5030, decode.d4.loss_cls: 0.0466, decode.d4.loss_mask: 0.1862, decode.d4.loss_dice: 0.5049, decode.d5.loss_cls: 0.0457, decode.d5.loss_mask: 0.1865, decode.d5.loss_dice: 0.5064, decode.d6.loss_cls: 0.0497, decode.d6.loss_mask: 0.1864, decode.d6.loss_dice: 0.5040, decode.d7.loss_cls: 0.0450, decode.d7.loss_mask: 0.1862, decode.d7.loss_dice: 0.5032, decode.d8.loss_cls: 0.0508, decode.d8.loss_mask: 0.1857, decode.d8.loss_dice: 0.5039, loss: 7.6844 +2022-05-11 03:55:54,946 - mmseg - INFO - Saving checkpoint at 62000 iterations +2022-05-11 03:56:24,905 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 03:56:24,908 - mmseg - INFO - Iter [62000/80000] lr: 3.231e-07, eta: 11:28:41, time: 2.384, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0495, decode.loss_mask: 0.1826, decode.loss_dice: 0.4979, decode.d0.loss_cls: 0.2872, decode.d0.loss_mask: 0.1874, decode.d0.loss_dice: 0.5229, decode.d1.loss_cls: 0.0655, decode.d1.loss_mask: 0.1835, decode.d1.loss_dice: 0.5084, decode.d2.loss_cls: 0.0588, decode.d2.loss_mask: 0.1828, decode.d2.loss_dice: 0.5052, decode.d3.loss_cls: 0.0537, decode.d3.loss_mask: 0.1827, decode.d3.loss_dice: 0.4980, decode.d4.loss_cls: 0.0545, decode.d4.loss_mask: 0.1828, decode.d4.loss_dice: 0.4970, decode.d5.loss_cls: 0.0506, decode.d5.loss_mask: 0.1828, decode.d5.loss_dice: 0.4995, decode.d6.loss_cls: 0.0510, decode.d6.loss_mask: 0.1828, decode.d6.loss_dice: 0.4973, decode.d7.loss_cls: 0.0517, decode.d7.loss_mask: 0.1831, decode.d7.loss_dice: 0.5002, decode.d8.loss_cls: 0.0524, decode.d8.loss_mask: 0.1830, decode.d8.loss_dice: 0.4996, loss: 7.6344 +2022-05-11 03:58:20,634 - mmseg - INFO - per class results: +2022-05-11 03:58:20,639 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.53 | 99.18 | +| sidewalk | 88.18 | 93.83 | +| building | 94.38 | 97.03 | +| wall | 69.72 | 80.01 | +| fence | 74.27 | 81.19 | +| pole | 71.56 | 83.57 | +| traffic light | 77.22 | 88.24 | +| traffic sign | 83.85 | 90.01 | +| vegetation | 93.26 | 96.84 | +| terrain | 66.82 | 78.4 | +| sky | 95.77 | 98.4 | +| person | 86.88 | 93.49 | +| rider | 74.33 | 85.36 | +| car | 95.65 | 98.34 | +| truck | 92.3 | 94.75 | +| bus | 93.57 | 96.55 | +| train | 87.63 | 90.41 | +| motorcycle | 77.55 | 87.06 | +| bicycle | 82.74 | 91.5 | ++---------------+-------+-------+ +2022-05-11 03:58:20,639 - mmseg - INFO - Summary: +2022-05-11 03:58:20,639 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 96.96 | 84.43 | 90.75 | ++-------+-------+-------+ +2022-05-11 03:58:20,642 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 03:58:20,642 - mmseg - INFO - Iter(val) [32] aAcc: 0.9696, mIoU: 0.8443, mAcc: 0.9075, IoU.road: 0.9853, IoU.sidewalk: 0.8818, IoU.building: 0.9438, IoU.wall: 0.6972, IoU.fence: 0.7427, IoU.pole: 0.7156, IoU.traffic light: 0.7722, IoU.traffic sign: 0.8385, IoU.vegetation: 0.9326, IoU.terrain: 0.6682, IoU.sky: 0.9577, IoU.person: 0.8688, IoU.rider: 0.7433, IoU.car: 0.9565, IoU.truck: 0.9230, IoU.bus: 0.9357, IoU.train: 0.8763, IoU.motorcycle: 0.7755, IoU.bicycle: 0.8274, Acc.road: 0.9918, Acc.sidewalk: 0.9383, Acc.building: 0.9703, Acc.wall: 0.8001, Acc.fence: 0.8119, Acc.pole: 0.8357, Acc.traffic light: 0.8824, Acc.traffic sign: 0.9001, Acc.vegetation: 0.9684, Acc.terrain: 0.7840, Acc.sky: 0.9840, Acc.person: 0.9349, Acc.rider: 0.8536, Acc.car: 0.9834, Acc.truck: 0.9475, Acc.bus: 0.9655, Acc.train: 0.9041, Acc.motorcycle: 0.8706, Acc.bicycle: 0.9150 +2022-05-11 03:59:50,564 - mmseg - INFO - Iter [62050/80000] lr: 3.222e-07, eta: 11:28:43, time: 4.116, data_time: 2.333, memory: 69063, decode.loss_cls: 0.0544, decode.loss_mask: 0.1832, decode.loss_dice: 0.5136, decode.d0.loss_cls: 0.2960, decode.d0.loss_mask: 0.1897, decode.d0.loss_dice: 0.5424, decode.d1.loss_cls: 0.0702, decode.d1.loss_mask: 0.1846, decode.d1.loss_dice: 0.5283, decode.d2.loss_cls: 0.0632, decode.d2.loss_mask: 0.1837, decode.d2.loss_dice: 0.5151, decode.d3.loss_cls: 0.0637, decode.d3.loss_mask: 0.1837, decode.d3.loss_dice: 0.5173, decode.d4.loss_cls: 0.0580, decode.d4.loss_mask: 0.1832, decode.d4.loss_dice: 0.5138, decode.d5.loss_cls: 0.0556, decode.d5.loss_mask: 0.1835, decode.d5.loss_dice: 0.5152, decode.d6.loss_cls: 0.0534, decode.d6.loss_mask: 0.1833, decode.d6.loss_dice: 0.5156, decode.d7.loss_cls: 0.0604, decode.d7.loss_mask: 0.1835, decode.d7.loss_dice: 0.5173, decode.d8.loss_cls: 0.0624, decode.d8.loss_mask: 0.1832, decode.d8.loss_dice: 0.5184, loss: 7.8760 +2022-05-11 04:01:21,685 - mmseg - INFO - Iter [62100/80000] lr: 3.213e-07, eta: 11:26:17, time: 1.822, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0528, decode.loss_mask: 0.1843, decode.loss_dice: 0.5067, decode.d0.loss_cls: 0.2953, decode.d0.loss_mask: 0.1910, decode.d0.loss_dice: 0.5330, decode.d1.loss_cls: 0.0728, decode.d1.loss_mask: 0.1855, decode.d1.loss_dice: 0.5130, decode.d2.loss_cls: 0.0607, decode.d2.loss_mask: 0.1850, decode.d2.loss_dice: 0.5134, decode.d3.loss_cls: 0.0560, decode.d3.loss_mask: 0.1845, decode.d3.loss_dice: 0.5088, decode.d4.loss_cls: 0.0607, decode.d4.loss_mask: 0.1849, decode.d4.loss_dice: 0.5094, decode.d5.loss_cls: 0.0543, decode.d5.loss_mask: 0.1847, decode.d5.loss_dice: 0.5063, decode.d6.loss_cls: 0.0509, decode.d6.loss_mask: 0.1846, decode.d6.loss_dice: 0.5062, decode.d7.loss_cls: 0.0545, decode.d7.loss_mask: 0.1843, decode.d7.loss_dice: 0.5049, decode.d8.loss_cls: 0.0499, decode.d8.loss_mask: 0.1843, decode.d8.loss_dice: 0.5079, loss: 7.7703 +2022-05-11 04:02:54,829 - mmseg - INFO - Iter [62150/80000] lr: 3.204e-07, eta: 11:23:54, time: 1.863, data_time: 0.064, memory: 69063, decode.loss_cls: 0.0456, decode.loss_mask: 0.1835, decode.loss_dice: 0.5067, decode.d0.loss_cls: 0.3007, decode.d0.loss_mask: 0.1892, decode.d0.loss_dice: 0.5235, decode.d1.loss_cls: 0.0627, decode.d1.loss_mask: 0.1850, decode.d1.loss_dice: 0.5119, decode.d2.loss_cls: 0.0574, decode.d2.loss_mask: 0.1835, decode.d2.loss_dice: 0.5069, decode.d3.loss_cls: 0.0489, decode.d3.loss_mask: 0.1839, decode.d3.loss_dice: 0.5000, decode.d4.loss_cls: 0.0483, decode.d4.loss_mask: 0.1832, decode.d4.loss_dice: 0.5033, decode.d5.loss_cls: 0.0439, decode.d5.loss_mask: 0.1831, decode.d5.loss_dice: 0.5005, decode.d6.loss_cls: 0.0431, decode.d6.loss_mask: 0.1833, decode.d6.loss_dice: 0.4994, decode.d7.loss_cls: 0.0452, decode.d7.loss_mask: 0.1833, decode.d7.loss_dice: 0.4967, decode.d8.loss_cls: 0.0451, decode.d8.loss_mask: 0.1831, decode.d8.loss_dice: 0.5009, loss: 7.6319 +2022-05-11 04:04:24,202 - mmseg - INFO - Iter [62200/80000] lr: 3.195e-07, eta: 11:21:27, time: 1.787, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0517, decode.loss_mask: 0.1855, decode.loss_dice: 0.5113, decode.d0.loss_cls: 0.2884, decode.d0.loss_mask: 0.1927, decode.d0.loss_dice: 0.5327, decode.d1.loss_cls: 0.0749, decode.d1.loss_mask: 0.1868, decode.d1.loss_dice: 0.5201, decode.d2.loss_cls: 0.0679, decode.d2.loss_mask: 0.1863, decode.d2.loss_dice: 0.5141, decode.d3.loss_cls: 0.0571, decode.d3.loss_mask: 0.1861, decode.d3.loss_dice: 0.5136, decode.d4.loss_cls: 0.0589, decode.d4.loss_mask: 0.1859, decode.d4.loss_dice: 0.5134, decode.d5.loss_cls: 0.0555, decode.d5.loss_mask: 0.1856, decode.d5.loss_dice: 0.5108, decode.d6.loss_cls: 0.0552, decode.d6.loss_mask: 0.1857, decode.d6.loss_dice: 0.5117, decode.d7.loss_cls: 0.0598, decode.d7.loss_mask: 0.1856, decode.d7.loss_dice: 0.5106, decode.d8.loss_cls: 0.0529, decode.d8.loss_mask: 0.1856, decode.d8.loss_dice: 0.5115, loss: 7.8380 +2022-05-11 04:05:53,976 - mmseg - INFO - Iter [62250/80000] lr: 3.186e-07, eta: 11:19:01, time: 1.796, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0534, decode.loss_mask: 0.1773, decode.loss_dice: 0.4964, decode.d0.loss_cls: 0.2936, decode.d0.loss_mask: 0.1838, decode.d0.loss_dice: 0.5209, decode.d1.loss_cls: 0.0734, decode.d1.loss_mask: 0.1771, decode.d1.loss_dice: 0.5076, decode.d2.loss_cls: 0.0632, decode.d2.loss_mask: 0.1771, decode.d2.loss_dice: 0.5019, decode.d3.loss_cls: 0.0574, decode.d3.loss_mask: 0.1768, decode.d3.loss_dice: 0.4992, decode.d4.loss_cls: 0.0563, decode.d4.loss_mask: 0.1768, decode.d4.loss_dice: 0.4983, decode.d5.loss_cls: 0.0568, decode.d5.loss_mask: 0.1770, decode.d5.loss_dice: 0.4977, decode.d6.loss_cls: 0.0516, decode.d6.loss_mask: 0.1766, decode.d6.loss_dice: 0.4946, decode.d7.loss_cls: 0.0514, decode.d7.loss_mask: 0.1769, decode.d7.loss_dice: 0.4981, decode.d8.loss_cls: 0.0547, decode.d8.loss_mask: 0.1764, decode.d8.loss_dice: 0.4945, loss: 7.5970 +2022-05-11 04:07:25,317 - mmseg - INFO - Iter [62300/80000] lr: 3.177e-07, eta: 11:16:38, time: 1.827, data_time: 0.020, memory: 69063, decode.loss_cls: 0.0662, decode.loss_mask: 0.1822, decode.loss_dice: 0.5079, decode.d0.loss_cls: 0.3093, decode.d0.loss_mask: 0.1895, decode.d0.loss_dice: 0.5363, decode.d1.loss_cls: 0.0887, decode.d1.loss_mask: 0.1841, decode.d1.loss_dice: 0.5183, decode.d2.loss_cls: 0.0758, decode.d2.loss_mask: 0.1833, decode.d2.loss_dice: 0.5139, decode.d3.loss_cls: 0.0695, decode.d3.loss_mask: 0.1826, decode.d3.loss_dice: 0.5124, decode.d4.loss_cls: 0.0703, decode.d4.loss_mask: 0.1821, decode.d4.loss_dice: 0.5148, decode.d5.loss_cls: 0.0671, decode.d5.loss_mask: 0.1825, decode.d5.loss_dice: 0.5084, decode.d6.loss_cls: 0.0670, decode.d6.loss_mask: 0.1818, decode.d6.loss_dice: 0.5106, decode.d7.loss_cls: 0.0742, decode.d7.loss_mask: 0.1819, decode.d7.loss_dice: 0.5094, decode.d8.loss_cls: 0.0697, decode.d8.loss_mask: 0.1822, decode.d8.loss_dice: 0.5108, loss: 7.9328 +2022-05-11 04:08:57,070 - mmseg - INFO - Iter [62350/80000] lr: 3.168e-07, eta: 11:14:15, time: 1.834, data_time: 0.064, memory: 69063, decode.loss_cls: 0.0500, decode.loss_mask: 0.1898, decode.loss_dice: 0.5038, decode.d0.loss_cls: 0.2892, decode.d0.loss_mask: 0.1957, decode.d0.loss_dice: 0.5296, decode.d1.loss_cls: 0.0663, decode.d1.loss_mask: 0.1912, decode.d1.loss_dice: 0.5136, decode.d2.loss_cls: 0.0620, decode.d2.loss_mask: 0.1905, decode.d2.loss_dice: 0.5097, decode.d3.loss_cls: 0.0568, decode.d3.loss_mask: 0.1905, decode.d3.loss_dice: 0.5054, decode.d4.loss_cls: 0.0576, decode.d4.loss_mask: 0.1906, decode.d4.loss_dice: 0.5067, decode.d5.loss_cls: 0.0602, decode.d5.loss_mask: 0.1908, decode.d5.loss_dice: 0.5059, decode.d6.loss_cls: 0.0530, decode.d6.loss_mask: 0.1902, decode.d6.loss_dice: 0.5074, decode.d7.loss_cls: 0.0553, decode.d7.loss_mask: 0.1903, decode.d7.loss_dice: 0.5094, decode.d8.loss_cls: 0.0515, decode.d8.loss_mask: 0.1904, decode.d8.loss_dice: 0.5069, loss: 7.8103 +2022-05-11 04:10:28,850 - mmseg - INFO - Iter [62400/80000] lr: 3.159e-07, eta: 11:11:52, time: 1.836, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0569, decode.loss_mask: 0.1878, decode.loss_dice: 0.5111, decode.d0.loss_cls: 0.3030, decode.d0.loss_mask: 0.1974, decode.d0.loss_dice: 0.5373, decode.d1.loss_cls: 0.0687, decode.d1.loss_mask: 0.1896, decode.d1.loss_dice: 0.5222, decode.d2.loss_cls: 0.0592, decode.d2.loss_mask: 0.1885, decode.d2.loss_dice: 0.5203, decode.d3.loss_cls: 0.0562, decode.d3.loss_mask: 0.1882, decode.d3.loss_dice: 0.5142, decode.d4.loss_cls: 0.0558, decode.d4.loss_mask: 0.1882, decode.d4.loss_dice: 0.5116, decode.d5.loss_cls: 0.0534, decode.d5.loss_mask: 0.1884, decode.d5.loss_dice: 0.5147, decode.d6.loss_cls: 0.0526, decode.d6.loss_mask: 0.1878, decode.d6.loss_dice: 0.5137, decode.d7.loss_cls: 0.0511, decode.d7.loss_mask: 0.1879, decode.d7.loss_dice: 0.5111, decode.d8.loss_cls: 0.0557, decode.d8.loss_mask: 0.1876, decode.d8.loss_dice: 0.5116, loss: 7.8716 +2022-05-11 04:11:58,305 - mmseg - INFO - Iter [62450/80000] lr: 3.150e-07, eta: 11:09:27, time: 1.789, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0525, decode.loss_mask: 0.1865, decode.loss_dice: 0.4953, decode.d0.loss_cls: 0.2985, decode.d0.loss_mask: 0.1917, decode.d0.loss_dice: 0.5184, decode.d1.loss_cls: 0.0683, decode.d1.loss_mask: 0.1876, decode.d1.loss_dice: 0.5071, decode.d2.loss_cls: 0.0634, decode.d2.loss_mask: 0.1871, decode.d2.loss_dice: 0.5013, decode.d3.loss_cls: 0.0508, decode.d3.loss_mask: 0.1866, decode.d3.loss_dice: 0.4952, decode.d4.loss_cls: 0.0530, decode.d4.loss_mask: 0.1872, decode.d4.loss_dice: 0.4999, decode.d5.loss_cls: 0.0529, decode.d5.loss_mask: 0.1866, decode.d5.loss_dice: 0.4983, decode.d6.loss_cls: 0.0515, decode.d6.loss_mask: 0.1865, decode.d6.loss_dice: 0.4981, decode.d7.loss_cls: 0.0510, decode.d7.loss_mask: 0.1865, decode.d7.loss_dice: 0.4972, decode.d8.loss_cls: 0.0492, decode.d8.loss_mask: 0.1861, decode.d8.loss_dice: 0.4985, loss: 7.6727 +2022-05-11 04:13:29,394 - mmseg - INFO - Iter [62500/80000] lr: 3.141e-07, eta: 11:07:05, time: 1.822, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0568, decode.loss_mask: 0.1793, decode.loss_dice: 0.4914, decode.d0.loss_cls: 0.3053, decode.d0.loss_mask: 0.1872, decode.d0.loss_dice: 0.5133, decode.d1.loss_cls: 0.0726, decode.d1.loss_mask: 0.1798, decode.d1.loss_dice: 0.4995, decode.d2.loss_cls: 0.0641, decode.d2.loss_mask: 0.1804, decode.d2.loss_dice: 0.4984, decode.d3.loss_cls: 0.0584, decode.d3.loss_mask: 0.1793, decode.d3.loss_dice: 0.4908, decode.d4.loss_cls: 0.0575, decode.d4.loss_mask: 0.1795, decode.d4.loss_dice: 0.4954, decode.d5.loss_cls: 0.0539, decode.d5.loss_mask: 0.1795, decode.d5.loss_dice: 0.4920, decode.d6.loss_cls: 0.0556, decode.d6.loss_mask: 0.1789, decode.d6.loss_dice: 0.4910, decode.d7.loss_cls: 0.0605, decode.d7.loss_mask: 0.1791, decode.d7.loss_dice: 0.4943, decode.d8.loss_cls: 0.0563, decode.d8.loss_mask: 0.1787, decode.d8.loss_dice: 0.4902, loss: 7.5990 +2022-05-11 04:15:01,958 - mmseg - INFO - Iter [62550/80000] lr: 3.132e-07, eta: 11:04:44, time: 1.851, data_time: 0.064, memory: 69063, decode.loss_cls: 0.0556, decode.loss_mask: 0.1837, decode.loss_dice: 0.4958, decode.d0.loss_cls: 0.2944, decode.d0.loss_mask: 0.1919, decode.d0.loss_dice: 0.5246, decode.d1.loss_cls: 0.0734, decode.d1.loss_mask: 0.1862, decode.d1.loss_dice: 0.5086, decode.d2.loss_cls: 0.0662, decode.d2.loss_mask: 0.1856, decode.d2.loss_dice: 0.5022, decode.d3.loss_cls: 0.0676, decode.d3.loss_mask: 0.1840, decode.d3.loss_dice: 0.4989, decode.d4.loss_cls: 0.0630, decode.d4.loss_mask: 0.1840, decode.d4.loss_dice: 0.5010, decode.d5.loss_cls: 0.0619, decode.d5.loss_mask: 0.1840, decode.d5.loss_dice: 0.4978, decode.d6.loss_cls: 0.0620, decode.d6.loss_mask: 0.1836, decode.d6.loss_dice: 0.4983, decode.d7.loss_cls: 0.0583, decode.d7.loss_mask: 0.1840, decode.d7.loss_dice: 0.4995, decode.d8.loss_cls: 0.0593, decode.d8.loss_mask: 0.1836, decode.d8.loss_dice: 0.4961, loss: 7.7349 +2022-05-11 04:16:31,720 - mmseg - INFO - Iter [62600/80000] lr: 3.123e-07, eta: 11:02:21, time: 1.795, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0552, decode.loss_mask: 0.1781, decode.loss_dice: 0.5000, decode.d0.loss_cls: 0.2942, decode.d0.loss_mask: 0.1860, decode.d0.loss_dice: 0.5256, decode.d1.loss_cls: 0.0706, decode.d1.loss_mask: 0.1809, decode.d1.loss_dice: 0.5085, decode.d2.loss_cls: 0.0606, decode.d2.loss_mask: 0.1804, decode.d2.loss_dice: 0.5023, decode.d3.loss_cls: 0.0629, decode.d3.loss_mask: 0.1798, decode.d3.loss_dice: 0.5043, decode.d4.loss_cls: 0.0581, decode.d4.loss_mask: 0.1790, decode.d4.loss_dice: 0.4984, decode.d5.loss_cls: 0.0532, decode.d5.loss_mask: 0.1793, decode.d5.loss_dice: 0.4987, decode.d6.loss_cls: 0.0544, decode.d6.loss_mask: 0.1787, decode.d6.loss_dice: 0.4985, decode.d7.loss_cls: 0.0569, decode.d7.loss_mask: 0.1784, decode.d7.loss_dice: 0.4975, decode.d8.loss_cls: 0.0576, decode.d8.loss_mask: 0.1784, decode.d8.loss_dice: 0.4971, loss: 7.6537 +2022-05-11 04:18:01,938 - mmseg - INFO - Iter [62650/80000] lr: 3.114e-07, eta: 10:59:58, time: 1.804, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0504, decode.loss_mask: 0.1822, decode.loss_dice: 0.5068, decode.d0.loss_cls: 0.3050, decode.d0.loss_mask: 0.1892, decode.d0.loss_dice: 0.5291, decode.d1.loss_cls: 0.0735, decode.d1.loss_mask: 0.1838, decode.d1.loss_dice: 0.5141, decode.d2.loss_cls: 0.0642, decode.d2.loss_mask: 0.1832, decode.d2.loss_dice: 0.5108, decode.d3.loss_cls: 0.0555, decode.d3.loss_mask: 0.1824, decode.d3.loss_dice: 0.5058, decode.d4.loss_cls: 0.0599, decode.d4.loss_mask: 0.1826, decode.d4.loss_dice: 0.5058, decode.d5.loss_cls: 0.0535, decode.d5.loss_mask: 0.1824, decode.d5.loss_dice: 0.5041, decode.d6.loss_cls: 0.0539, decode.d6.loss_mask: 0.1826, decode.d6.loss_dice: 0.5091, decode.d7.loss_cls: 0.0609, decode.d7.loss_mask: 0.1822, decode.d7.loss_dice: 0.5084, decode.d8.loss_cls: 0.0579, decode.d8.loss_mask: 0.1826, decode.d8.loss_dice: 0.5081, loss: 7.7699 +2022-05-11 04:19:34,885 - mmseg - INFO - Iter [62700/80000] lr: 3.105e-07, eta: 10:57:39, time: 1.859, data_time: 0.063, memory: 69063, decode.loss_cls: 0.0510, decode.loss_mask: 0.1839, decode.loss_dice: 0.5052, decode.d0.loss_cls: 0.2932, decode.d0.loss_mask: 0.1902, decode.d0.loss_dice: 0.5280, decode.d1.loss_cls: 0.0636, decode.d1.loss_mask: 0.1847, decode.d1.loss_dice: 0.5123, decode.d2.loss_cls: 0.0577, decode.d2.loss_mask: 0.1842, decode.d2.loss_dice: 0.5125, decode.d3.loss_cls: 0.0521, decode.d3.loss_mask: 0.1838, decode.d3.loss_dice: 0.5070, decode.d4.loss_cls: 0.0506, decode.d4.loss_mask: 0.1840, decode.d4.loss_dice: 0.5046, decode.d5.loss_cls: 0.0487, decode.d5.loss_mask: 0.1842, decode.d5.loss_dice: 0.5044, decode.d6.loss_cls: 0.0488, decode.d6.loss_mask: 0.1837, decode.d6.loss_dice: 0.5070, decode.d7.loss_cls: 0.0483, decode.d7.loss_mask: 0.1844, decode.d7.loss_dice: 0.5067, decode.d8.loss_cls: 0.0450, decode.d8.loss_mask: 0.1841, decode.d8.loss_dice: 0.5038, loss: 7.6977 +2022-05-11 04:21:04,564 - mmseg - INFO - Iter [62750/80000] lr: 3.096e-07, eta: 10:55:16, time: 1.794, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0424, decode.loss_mask: 0.1844, decode.loss_dice: 0.4990, decode.d0.loss_cls: 0.2900, decode.d0.loss_mask: 0.1916, decode.d0.loss_dice: 0.5191, decode.d1.loss_cls: 0.0627, decode.d1.loss_mask: 0.1858, decode.d1.loss_dice: 0.5074, decode.d2.loss_cls: 0.0472, decode.d2.loss_mask: 0.1846, decode.d2.loss_dice: 0.5057, decode.d3.loss_cls: 0.0492, decode.d3.loss_mask: 0.1844, decode.d3.loss_dice: 0.5018, decode.d4.loss_cls: 0.0467, decode.d4.loss_mask: 0.1847, decode.d4.loss_dice: 0.4994, decode.d5.loss_cls: 0.0432, decode.d5.loss_mask: 0.1847, decode.d5.loss_dice: 0.5020, decode.d6.loss_cls: 0.0436, decode.d6.loss_mask: 0.1842, decode.d6.loss_dice: 0.5005, decode.d7.loss_cls: 0.0442, decode.d7.loss_mask: 0.1848, decode.d7.loss_dice: 0.5031, decode.d8.loss_cls: 0.0430, decode.d8.loss_mask: 0.1849, decode.d8.loss_dice: 0.5019, loss: 7.6061 +2022-05-11 04:22:34,943 - mmseg - INFO - Iter [62800/80000] lr: 3.087e-07, eta: 10:52:55, time: 1.808, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0610, decode.loss_mask: 0.1815, decode.loss_dice: 0.5082, decode.d0.loss_cls: 0.3126, decode.d0.loss_mask: 0.1909, decode.d0.loss_dice: 0.5346, decode.d1.loss_cls: 0.0836, decode.d1.loss_mask: 0.1836, decode.d1.loss_dice: 0.5207, decode.d2.loss_cls: 0.0727, decode.d2.loss_mask: 0.1828, decode.d2.loss_dice: 0.5150, decode.d3.loss_cls: 0.0667, decode.d3.loss_mask: 0.1826, decode.d3.loss_dice: 0.5155, decode.d4.loss_cls: 0.0679, decode.d4.loss_mask: 0.1824, decode.d4.loss_dice: 0.5098, decode.d5.loss_cls: 0.0695, decode.d5.loss_mask: 0.1820, decode.d5.loss_dice: 0.5129, decode.d6.loss_cls: 0.0680, decode.d6.loss_mask: 0.1818, decode.d6.loss_dice: 0.5154, decode.d7.loss_cls: 0.0670, decode.d7.loss_mask: 0.1822, decode.d7.loss_dice: 0.5145, decode.d8.loss_cls: 0.0672, decode.d8.loss_mask: 0.1819, decode.d8.loss_dice: 0.5121, loss: 7.9264 +2022-05-11 04:24:04,575 - mmseg - INFO - Iter [62850/80000] lr: 3.078e-07, eta: 10:50:33, time: 1.793, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0532, decode.loss_mask: 0.1831, decode.loss_dice: 0.5002, decode.d0.loss_cls: 0.2995, decode.d0.loss_mask: 0.1931, decode.d0.loss_dice: 0.5268, decode.d1.loss_cls: 0.0709, decode.d1.loss_mask: 0.1858, decode.d1.loss_dice: 0.5122, decode.d2.loss_cls: 0.0651, decode.d2.loss_mask: 0.1839, decode.d2.loss_dice: 0.5095, decode.d3.loss_cls: 0.0614, decode.d3.loss_mask: 0.1833, decode.d3.loss_dice: 0.5017, decode.d4.loss_cls: 0.0614, decode.d4.loss_mask: 0.1835, decode.d4.loss_dice: 0.5025, decode.d5.loss_cls: 0.0581, decode.d5.loss_mask: 0.1834, decode.d5.loss_dice: 0.5038, decode.d6.loss_cls: 0.0586, decode.d6.loss_mask: 0.1831, decode.d6.loss_dice: 0.5031, decode.d7.loss_cls: 0.0545, decode.d7.loss_mask: 0.1831, decode.d7.loss_dice: 0.5023, decode.d8.loss_cls: 0.0556, decode.d8.loss_mask: 0.1831, decode.d8.loss_dice: 0.5008, loss: 7.7465 +2022-05-11 04:25:37,700 - mmseg - INFO - Iter [62900/80000] lr: 3.069e-07, eta: 10:48:16, time: 1.862, data_time: 0.065, memory: 69063, decode.loss_cls: 0.0554, decode.loss_mask: 0.1845, decode.loss_dice: 0.4947, decode.d0.loss_cls: 0.3009, decode.d0.loss_mask: 0.1915, decode.d0.loss_dice: 0.5174, decode.d1.loss_cls: 0.0692, decode.d1.loss_mask: 0.1856, decode.d1.loss_dice: 0.5049, decode.d2.loss_cls: 0.0614, decode.d2.loss_mask: 0.1850, decode.d2.loss_dice: 0.5010, decode.d3.loss_cls: 0.0587, decode.d3.loss_mask: 0.1848, decode.d3.loss_dice: 0.4946, decode.d4.loss_cls: 0.0595, decode.d4.loss_mask: 0.1854, decode.d4.loss_dice: 0.4981, decode.d5.loss_cls: 0.0594, decode.d5.loss_mask: 0.1844, decode.d5.loss_dice: 0.4940, decode.d6.loss_cls: 0.0551, decode.d6.loss_mask: 0.1845, decode.d6.loss_dice: 0.4921, decode.d7.loss_cls: 0.0572, decode.d7.loss_mask: 0.1844, decode.d7.loss_dice: 0.4932, decode.d8.loss_cls: 0.0506, decode.d8.loss_mask: 0.1843, decode.d8.loss_dice: 0.4937, loss: 7.6655 +2022-05-11 04:27:09,172 - mmseg - INFO - Iter [62950/80000] lr: 3.060e-07, eta: 10:45:56, time: 1.829, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0558, decode.loss_mask: 0.1831, decode.loss_dice: 0.4958, decode.d0.loss_cls: 0.2916, decode.d0.loss_mask: 0.1887, decode.d0.loss_dice: 0.5195, decode.d1.loss_cls: 0.0741, decode.d1.loss_mask: 0.1841, decode.d1.loss_dice: 0.5047, decode.d2.loss_cls: 0.0645, decode.d2.loss_mask: 0.1830, decode.d2.loss_dice: 0.4999, decode.d3.loss_cls: 0.0583, decode.d3.loss_mask: 0.1834, decode.d3.loss_dice: 0.4973, decode.d4.loss_cls: 0.0565, decode.d4.loss_mask: 0.1838, decode.d4.loss_dice: 0.4956, decode.d5.loss_cls: 0.0540, decode.d5.loss_mask: 0.1832, decode.d5.loss_dice: 0.4956, decode.d6.loss_cls: 0.0569, decode.d6.loss_mask: 0.1835, decode.d6.loss_dice: 0.4997, decode.d7.loss_cls: 0.0556, decode.d7.loss_mask: 0.1828, decode.d7.loss_dice: 0.4986, decode.d8.loss_cls: 0.0531, decode.d8.loss_mask: 0.1833, decode.d8.loss_dice: 0.4977, loss: 7.6636 +2022-05-11 04:28:38,227 - mmseg - INFO - Saving checkpoint at 63000 iterations +2022-05-11 04:29:07,757 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 04:29:07,760 - mmseg - INFO - Iter [63000/80000] lr: 3.051e-07, eta: 10:44:08, time: 2.370, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0516, decode.loss_mask: 0.1864, decode.loss_dice: 0.5004, decode.d0.loss_cls: 0.2968, decode.d0.loss_mask: 0.1942, decode.d0.loss_dice: 0.5266, decode.d1.loss_cls: 0.0639, decode.d1.loss_mask: 0.1886, decode.d1.loss_dice: 0.5125, decode.d2.loss_cls: 0.0638, decode.d2.loss_mask: 0.1868, decode.d2.loss_dice: 0.5060, decode.d3.loss_cls: 0.0553, decode.d3.loss_mask: 0.1871, decode.d3.loss_dice: 0.5035, decode.d4.loss_cls: 0.0539, decode.d4.loss_mask: 0.1867, decode.d4.loss_dice: 0.5016, decode.d5.loss_cls: 0.0592, decode.d5.loss_mask: 0.1858, decode.d5.loss_dice: 0.4995, decode.d6.loss_cls: 0.0547, decode.d6.loss_mask: 0.1864, decode.d6.loss_dice: 0.5050, decode.d7.loss_cls: 0.0557, decode.d7.loss_mask: 0.1864, decode.d7.loss_dice: 0.5071, decode.d8.loss_cls: 0.0528, decode.d8.loss_mask: 0.1856, decode.d8.loss_dice: 0.5001, loss: 7.7439 +2022-05-11 04:31:03,015 - mmseg - INFO - per class results: +2022-05-11 04:31:03,020 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.54 | 99.19 | +| sidewalk | 88.12 | 93.64 | +| building | 94.38 | 97.02 | +| wall | 69.99 | 79.26 | +| fence | 74.34 | 81.37 | +| pole | 71.05 | 83.03 | +| traffic light | 77.23 | 88.41 | +| traffic sign | 84.01 | 90.83 | +| vegetation | 93.27 | 96.95 | +| terrain | 68.45 | 78.75 | +| sky | 95.78 | 98.45 | +| person | 86.83 | 93.99 | +| rider | 74.47 | 85.18 | +| car | 96.21 | 98.28 | +| truck | 82.44 | 95.07 | +| bus | 93.49 | 96.43 | +| train | 87.88 | 90.65 | +| motorcycle | 76.93 | 88.46 | +| bicycle | 82.75 | 91.22 | ++---------------+-------+-------+ +2022-05-11 04:31:03,021 - mmseg - INFO - Summary: +2022-05-11 04:31:03,021 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 96.97 | 84.01 | 90.85 | ++-------+-------+-------+ +2022-05-11 04:31:03,024 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 04:31:03,024 - mmseg - INFO - Iter(val) [32] aAcc: 0.9697, mIoU: 0.8401, mAcc: 0.9085, IoU.road: 0.9854, IoU.sidewalk: 0.8812, IoU.building: 0.9438, IoU.wall: 0.6999, IoU.fence: 0.7434, IoU.pole: 0.7105, IoU.traffic light: 0.7723, IoU.traffic sign: 0.8401, IoU.vegetation: 0.9327, IoU.terrain: 0.6845, IoU.sky: 0.9578, IoU.person: 0.8683, IoU.rider: 0.7447, IoU.car: 0.9621, IoU.truck: 0.8244, IoU.bus: 0.9349, IoU.train: 0.8788, IoU.motorcycle: 0.7693, IoU.bicycle: 0.8275, Acc.road: 0.9919, Acc.sidewalk: 0.9364, Acc.building: 0.9702, Acc.wall: 0.7926, Acc.fence: 0.8137, Acc.pole: 0.8303, Acc.traffic light: 0.8841, Acc.traffic sign: 0.9083, Acc.vegetation: 0.9695, Acc.terrain: 0.7875, Acc.sky: 0.9845, Acc.person: 0.9399, Acc.rider: 0.8518, Acc.car: 0.9828, Acc.truck: 0.9507, Acc.bus: 0.9643, Acc.train: 0.9065, Acc.motorcycle: 0.8846, Acc.bicycle: 0.9122 +2022-05-11 04:32:34,035 - mmseg - INFO - Iter [63050/80000] lr: 3.042e-07, eta: 10:43:59, time: 4.128, data_time: 2.324, memory: 69063, decode.loss_cls: 0.0440, decode.loss_mask: 0.1872, decode.loss_dice: 0.5066, decode.d0.loss_cls: 0.2959, decode.d0.loss_mask: 0.1960, decode.d0.loss_dice: 0.5344, decode.d1.loss_cls: 0.0593, decode.d1.loss_mask: 0.1894, decode.d1.loss_dice: 0.5119, decode.d2.loss_cls: 0.0525, decode.d2.loss_mask: 0.1883, decode.d2.loss_dice: 0.5100, decode.d3.loss_cls: 0.0491, decode.d3.loss_mask: 0.1879, decode.d3.loss_dice: 0.5074, decode.d4.loss_cls: 0.0489, decode.d4.loss_mask: 0.1881, decode.d4.loss_dice: 0.5109, decode.d5.loss_cls: 0.0503, decode.d5.loss_mask: 0.1873, decode.d5.loss_dice: 0.5089, decode.d6.loss_cls: 0.0510, decode.d6.loss_mask: 0.1879, decode.d6.loss_dice: 0.5096, decode.d7.loss_cls: 0.0505, decode.d7.loss_mask: 0.1876, decode.d7.loss_dice: 0.5073, decode.d8.loss_cls: 0.0482, decode.d8.loss_mask: 0.1875, decode.d8.loss_dice: 0.5057, loss: 7.7498 +2022-05-11 04:34:06,449 - mmseg - INFO - Iter [63100/80000] lr: 3.033e-07, eta: 10:41:41, time: 1.848, data_time: 0.065, memory: 69063, decode.loss_cls: 0.0519, decode.loss_mask: 0.1858, decode.loss_dice: 0.5001, decode.d0.loss_cls: 0.2957, decode.d0.loss_mask: 0.1924, decode.d0.loss_dice: 0.5204, decode.d1.loss_cls: 0.0699, decode.d1.loss_mask: 0.1869, decode.d1.loss_dice: 0.5076, decode.d2.loss_cls: 0.0611, decode.d2.loss_mask: 0.1861, decode.d2.loss_dice: 0.5025, decode.d3.loss_cls: 0.0581, decode.d3.loss_mask: 0.1861, decode.d3.loss_dice: 0.5011, decode.d4.loss_cls: 0.0573, decode.d4.loss_mask: 0.1861, decode.d4.loss_dice: 0.5039, decode.d5.loss_cls: 0.0553, decode.d5.loss_mask: 0.1859, decode.d5.loss_dice: 0.4963, decode.d6.loss_cls: 0.0574, decode.d6.loss_mask: 0.1856, decode.d6.loss_dice: 0.4974, decode.d7.loss_cls: 0.0550, decode.d7.loss_mask: 0.1855, decode.d7.loss_dice: 0.4995, decode.d8.loss_cls: 0.0568, decode.d8.loss_mask: 0.1853, decode.d8.loss_dice: 0.4981, loss: 7.7111 +2022-05-11 04:35:37,076 - mmseg - INFO - Iter [63150/80000] lr: 3.024e-07, eta: 10:39:21, time: 1.812, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0491, decode.loss_mask: 0.1864, decode.loss_dice: 0.4988, decode.d0.loss_cls: 0.2948, decode.d0.loss_mask: 0.1945, decode.d0.loss_dice: 0.5188, decode.d1.loss_cls: 0.0639, decode.d1.loss_mask: 0.1883, decode.d1.loss_dice: 0.5075, decode.d2.loss_cls: 0.0598, decode.d2.loss_mask: 0.1872, decode.d2.loss_dice: 0.5052, decode.d3.loss_cls: 0.0507, decode.d3.loss_mask: 0.1871, decode.d3.loss_dice: 0.5031, decode.d4.loss_cls: 0.0603, decode.d4.loss_mask: 0.1867, decode.d4.loss_dice: 0.4998, decode.d5.loss_cls: 0.0548, decode.d5.loss_mask: 0.1866, decode.d5.loss_dice: 0.4956, decode.d6.loss_cls: 0.0506, decode.d6.loss_mask: 0.1867, decode.d6.loss_dice: 0.5005, decode.d7.loss_cls: 0.0502, decode.d7.loss_mask: 0.1866, decode.d7.loss_dice: 0.4996, decode.d8.loss_cls: 0.0485, decode.d8.loss_mask: 0.1863, decode.d8.loss_dice: 0.5001, loss: 7.6880 +2022-05-11 04:37:07,738 - mmseg - INFO - Iter [63200/80000] lr: 3.015e-07, eta: 10:37:02, time: 1.813, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0559, decode.loss_mask: 0.1891, decode.loss_dice: 0.5018, decode.d0.loss_cls: 0.2933, decode.d0.loss_mask: 0.1965, decode.d0.loss_dice: 0.5175, decode.d1.loss_cls: 0.0687, decode.d1.loss_mask: 0.1907, decode.d1.loss_dice: 0.5069, decode.d2.loss_cls: 0.0617, decode.d2.loss_mask: 0.1897, decode.d2.loss_dice: 0.5014, decode.d3.loss_cls: 0.0570, decode.d3.loss_mask: 0.1892, decode.d3.loss_dice: 0.5008, decode.d4.loss_cls: 0.0585, decode.d4.loss_mask: 0.1893, decode.d4.loss_dice: 0.5012, decode.d5.loss_cls: 0.0565, decode.d5.loss_mask: 0.1892, decode.d5.loss_dice: 0.5018, decode.d6.loss_cls: 0.0510, decode.d6.loss_mask: 0.1888, decode.d6.loss_dice: 0.4976, decode.d7.loss_cls: 0.0528, decode.d7.loss_mask: 0.1887, decode.d7.loss_dice: 0.4991, decode.d8.loss_cls: 0.0534, decode.d8.loss_mask: 0.1886, decode.d8.loss_dice: 0.4995, loss: 7.7360 +2022-05-11 04:38:38,931 - mmseg - INFO - Iter [63250/80000] lr: 3.006e-07, eta: 10:34:43, time: 1.824, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0513, decode.loss_mask: 0.1826, decode.loss_dice: 0.5139, decode.d0.loss_cls: 0.2967, decode.d0.loss_mask: 0.1900, decode.d0.loss_dice: 0.5355, decode.d1.loss_cls: 0.0680, decode.d1.loss_mask: 0.1846, decode.d1.loss_dice: 0.5216, decode.d2.loss_cls: 0.0590, decode.d2.loss_mask: 0.1840, decode.d2.loss_dice: 0.5157, decode.d3.loss_cls: 0.0506, decode.d3.loss_mask: 0.1825, decode.d3.loss_dice: 0.5121, decode.d4.loss_cls: 0.0530, decode.d4.loss_mask: 0.1832, decode.d4.loss_dice: 0.5142, decode.d5.loss_cls: 0.0471, decode.d5.loss_mask: 0.1827, decode.d5.loss_dice: 0.5115, decode.d6.loss_cls: 0.0508, decode.d6.loss_mask: 0.1827, decode.d6.loss_dice: 0.5122, decode.d7.loss_cls: 0.0417, decode.d7.loss_mask: 0.1833, decode.d7.loss_dice: 0.5135, decode.d8.loss_cls: 0.0481, decode.d8.loss_mask: 0.1826, decode.d8.loss_dice: 0.5154, loss: 7.7704 +2022-05-11 04:40:13,072 - mmseg - INFO - Iter [63300/80000] lr: 2.997e-07, eta: 10:32:28, time: 1.883, data_time: 0.066, memory: 69063, decode.loss_cls: 0.0452, decode.loss_mask: 0.1830, decode.loss_dice: 0.5043, decode.d0.loss_cls: 0.2902, decode.d0.loss_mask: 0.1902, decode.d0.loss_dice: 0.5282, decode.d1.loss_cls: 0.0594, decode.d1.loss_mask: 0.1845, decode.d1.loss_dice: 0.5162, decode.d2.loss_cls: 0.0538, decode.d2.loss_mask: 0.1837, decode.d2.loss_dice: 0.5130, decode.d3.loss_cls: 0.0501, decode.d3.loss_mask: 0.1833, decode.d3.loss_dice: 0.5104, decode.d4.loss_cls: 0.0487, decode.d4.loss_mask: 0.1836, decode.d4.loss_dice: 0.5045, decode.d5.loss_cls: 0.0506, decode.d5.loss_mask: 0.1832, decode.d5.loss_dice: 0.5064, decode.d6.loss_cls: 0.0512, decode.d6.loss_mask: 0.1833, decode.d6.loss_dice: 0.5064, decode.d7.loss_cls: 0.0486, decode.d7.loss_mask: 0.1830, decode.d7.loss_dice: 0.5061, decode.d8.loss_cls: 0.0524, decode.d8.loss_mask: 0.1829, decode.d8.loss_dice: 0.5072, loss: 7.6936 +2022-05-11 04:41:43,062 - mmseg - INFO - Iter [63350/80000] lr: 2.988e-07, eta: 10:30:09, time: 1.800, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0542, decode.loss_mask: 0.1872, decode.loss_dice: 0.4962, decode.d0.loss_cls: 0.2977, decode.d0.loss_mask: 0.1938, decode.d0.loss_dice: 0.5189, decode.d1.loss_cls: 0.0658, decode.d1.loss_mask: 0.1885, decode.d1.loss_dice: 0.5089, decode.d2.loss_cls: 0.0538, decode.d2.loss_mask: 0.1876, decode.d2.loss_dice: 0.5018, decode.d3.loss_cls: 0.0476, decode.d3.loss_mask: 0.1876, decode.d3.loss_dice: 0.4972, decode.d4.loss_cls: 0.0552, decode.d4.loss_mask: 0.1877, decode.d4.loss_dice: 0.4998, decode.d5.loss_cls: 0.0491, decode.d5.loss_mask: 0.1881, decode.d5.loss_dice: 0.5004, decode.d6.loss_cls: 0.0478, decode.d6.loss_mask: 0.1878, decode.d6.loss_dice: 0.4977, decode.d7.loss_cls: 0.0519, decode.d7.loss_mask: 0.1872, decode.d7.loss_dice: 0.4998, decode.d8.loss_cls: 0.0481, decode.d8.loss_mask: 0.1877, decode.d8.loss_dice: 0.4996, loss: 7.6749 +2022-05-11 04:43:13,304 - mmseg - INFO - Iter [63400/80000] lr: 2.979e-07, eta: 10:27:50, time: 1.805, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0472, decode.loss_mask: 0.1848, decode.loss_dice: 0.4944, decode.d0.loss_cls: 0.2976, decode.d0.loss_mask: 0.1934, decode.d0.loss_dice: 0.5166, decode.d1.loss_cls: 0.0683, decode.d1.loss_mask: 0.1863, decode.d1.loss_dice: 0.5016, decode.d2.loss_cls: 0.0578, decode.d2.loss_mask: 0.1857, decode.d2.loss_dice: 0.5003, decode.d3.loss_cls: 0.0535, decode.d3.loss_mask: 0.1855, decode.d3.loss_dice: 0.4964, decode.d4.loss_cls: 0.0497, decode.d4.loss_mask: 0.1860, decode.d4.loss_dice: 0.4951, decode.d5.loss_cls: 0.0507, decode.d5.loss_mask: 0.1860, decode.d5.loss_dice: 0.4951, decode.d6.loss_cls: 0.0471, decode.d6.loss_mask: 0.1850, decode.d6.loss_dice: 0.4956, decode.d7.loss_cls: 0.0488, decode.d7.loss_mask: 0.1847, decode.d7.loss_dice: 0.4991, decode.d8.loss_cls: 0.0526, decode.d8.loss_mask: 0.1848, decode.d8.loss_dice: 0.5001, loss: 7.6296 +2022-05-11 04:44:44,346 - mmseg - INFO - Iter [63450/80000] lr: 2.970e-07, eta: 10:25:33, time: 1.821, data_time: 0.065, memory: 69063, decode.loss_cls: 0.0511, decode.loss_mask: 0.1839, decode.loss_dice: 0.5036, decode.d0.loss_cls: 0.2935, decode.d0.loss_mask: 0.1898, decode.d0.loss_dice: 0.5241, decode.d1.loss_cls: 0.0683, decode.d1.loss_mask: 0.1853, decode.d1.loss_dice: 0.5070, decode.d2.loss_cls: 0.0626, decode.d2.loss_mask: 0.1851, decode.d2.loss_dice: 0.5044, decode.d3.loss_cls: 0.0628, decode.d3.loss_mask: 0.1843, decode.d3.loss_dice: 0.5018, decode.d4.loss_cls: 0.0487, decode.d4.loss_mask: 0.1849, decode.d4.loss_dice: 0.5056, decode.d5.loss_cls: 0.0521, decode.d5.loss_mask: 0.1838, decode.d5.loss_dice: 0.5045, decode.d6.loss_cls: 0.0569, decode.d6.loss_mask: 0.1845, decode.d6.loss_dice: 0.5047, decode.d7.loss_cls: 0.0554, decode.d7.loss_mask: 0.1839, decode.d7.loss_dice: 0.5049, decode.d8.loss_cls: 0.0538, decode.d8.loss_mask: 0.1842, decode.d8.loss_dice: 0.5048, loss: 7.7203 +2022-05-11 04:46:16,019 - mmseg - INFO - Iter [63500/80000] lr: 2.962e-07, eta: 10:23:16, time: 1.833, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0548, decode.loss_mask: 0.1793, decode.loss_dice: 0.4962, decode.d0.loss_cls: 0.2895, decode.d0.loss_mask: 0.1856, decode.d0.loss_dice: 0.5170, decode.d1.loss_cls: 0.0655, decode.d1.loss_mask: 0.1813, decode.d1.loss_dice: 0.5042, decode.d2.loss_cls: 0.0600, decode.d2.loss_mask: 0.1803, decode.d2.loss_dice: 0.5054, decode.d3.loss_cls: 0.0545, decode.d3.loss_mask: 0.1795, decode.d3.loss_dice: 0.4999, decode.d4.loss_cls: 0.0493, decode.d4.loss_mask: 0.1793, decode.d4.loss_dice: 0.4958, decode.d5.loss_cls: 0.0518, decode.d5.loss_mask: 0.1800, decode.d5.loss_dice: 0.5009, decode.d6.loss_cls: 0.0526, decode.d6.loss_mask: 0.1796, decode.d6.loss_dice: 0.4989, decode.d7.loss_cls: 0.0524, decode.d7.loss_mask: 0.1796, decode.d7.loss_dice: 0.4994, decode.d8.loss_cls: 0.0511, decode.d8.loss_mask: 0.1796, decode.d8.loss_dice: 0.4970, loss: 7.6001 +2022-05-11 04:47:46,642 - mmseg - INFO - Iter [63550/80000] lr: 2.953e-07, eta: 10:20:59, time: 1.811, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0515, decode.loss_mask: 0.1841, decode.loss_dice: 0.5004, decode.d0.loss_cls: 0.2902, decode.d0.loss_mask: 0.1921, decode.d0.loss_dice: 0.5237, decode.d1.loss_cls: 0.0685, decode.d1.loss_mask: 0.1858, decode.d1.loss_dice: 0.5062, decode.d2.loss_cls: 0.0630, decode.d2.loss_mask: 0.1851, decode.d2.loss_dice: 0.5088, decode.d3.loss_cls: 0.0640, decode.d3.loss_mask: 0.1846, decode.d3.loss_dice: 0.5008, decode.d4.loss_cls: 0.0587, decode.d4.loss_mask: 0.1846, decode.d4.loss_dice: 0.5071, decode.d5.loss_cls: 0.0580, decode.d5.loss_mask: 0.1843, decode.d5.loss_dice: 0.5038, decode.d6.loss_cls: 0.0599, decode.d6.loss_mask: 0.1843, decode.d6.loss_dice: 0.5014, decode.d7.loss_cls: 0.0528, decode.d7.loss_mask: 0.1838, decode.d7.loss_dice: 0.5018, decode.d8.loss_cls: 0.0516, decode.d8.loss_mask: 0.1838, decode.d8.loss_dice: 0.5019, loss: 7.7265 +2022-05-11 04:49:15,805 - mmseg - INFO - Iter [63600/80000] lr: 2.944e-07, eta: 10:18:40, time: 1.783, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0449, decode.loss_mask: 0.1834, decode.loss_dice: 0.5048, decode.d0.loss_cls: 0.2938, decode.d0.loss_mask: 0.1891, decode.d0.loss_dice: 0.5244, decode.d1.loss_cls: 0.0614, decode.d1.loss_mask: 0.1844, decode.d1.loss_dice: 0.5118, decode.d2.loss_cls: 0.0556, decode.d2.loss_mask: 0.1834, decode.d2.loss_dice: 0.5106, decode.d3.loss_cls: 0.0452, decode.d3.loss_mask: 0.1833, decode.d3.loss_dice: 0.5058, decode.d4.loss_cls: 0.0461, decode.d4.loss_mask: 0.1831, decode.d4.loss_dice: 0.5059, decode.d5.loss_cls: 0.0458, decode.d5.loss_mask: 0.1834, decode.d5.loss_dice: 0.5042, decode.d6.loss_cls: 0.0461, decode.d6.loss_mask: 0.1830, decode.d6.loss_dice: 0.5026, decode.d7.loss_cls: 0.0473, decode.d7.loss_mask: 0.1830, decode.d7.loss_dice: 0.5032, decode.d8.loss_cls: 0.0448, decode.d8.loss_mask: 0.1828, decode.d8.loss_dice: 0.4991, loss: 7.6423 +2022-05-11 04:50:47,870 - mmseg - INFO - Iter [63650/80000] lr: 2.935e-07, eta: 10:16:25, time: 1.843, data_time: 0.065, memory: 69063, decode.loss_cls: 0.0511, decode.loss_mask: 0.1830, decode.loss_dice: 0.5066, decode.d0.loss_cls: 0.2954, decode.d0.loss_mask: 0.1889, decode.d0.loss_dice: 0.5274, decode.d1.loss_cls: 0.0644, decode.d1.loss_mask: 0.1838, decode.d1.loss_dice: 0.5147, decode.d2.loss_cls: 0.0628, decode.d2.loss_mask: 0.1837, decode.d2.loss_dice: 0.5129, decode.d3.loss_cls: 0.0524, decode.d3.loss_mask: 0.1833, decode.d3.loss_dice: 0.5109, decode.d4.loss_cls: 0.0533, decode.d4.loss_mask: 0.1834, decode.d4.loss_dice: 0.5118, decode.d5.loss_cls: 0.0553, decode.d5.loss_mask: 0.1837, decode.d5.loss_dice: 0.5103, decode.d6.loss_cls: 0.0517, decode.d6.loss_mask: 0.1837, decode.d6.loss_dice: 0.5091, decode.d7.loss_cls: 0.0523, decode.d7.loss_mask: 0.1836, decode.d7.loss_dice: 0.5083, decode.d8.loss_cls: 0.0496, decode.d8.loss_mask: 0.1832, decode.d8.loss_dice: 0.5104, loss: 7.7511 +2022-05-11 04:52:18,290 - mmseg - INFO - Iter [63700/80000] lr: 2.926e-07, eta: 10:14:08, time: 1.809, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0593, decode.loss_mask: 0.1810, decode.loss_dice: 0.5029, decode.d0.loss_cls: 0.2980, decode.d0.loss_mask: 0.1872, decode.d0.loss_dice: 0.5296, decode.d1.loss_cls: 0.0672, decode.d1.loss_mask: 0.1824, decode.d1.loss_dice: 0.5058, decode.d2.loss_cls: 0.0646, decode.d2.loss_mask: 0.1815, decode.d2.loss_dice: 0.5044, decode.d3.loss_cls: 0.0576, decode.d3.loss_mask: 0.1814, decode.d3.loss_dice: 0.5024, decode.d4.loss_cls: 0.0616, decode.d4.loss_mask: 0.1817, decode.d4.loss_dice: 0.5068, decode.d5.loss_cls: 0.0604, decode.d5.loss_mask: 0.1819, decode.d5.loss_dice: 0.5037, decode.d6.loss_cls: 0.0626, decode.d6.loss_mask: 0.1813, decode.d6.loss_dice: 0.5028, decode.d7.loss_cls: 0.0566, decode.d7.loss_mask: 0.1814, decode.d7.loss_dice: 0.5017, decode.d8.loss_cls: 0.0573, decode.d8.loss_mask: 0.1815, decode.d8.loss_dice: 0.5021, loss: 7.7285 +2022-05-11 04:53:49,475 - mmseg - INFO - Iter [63750/80000] lr: 2.917e-07, eta: 10:11:53, time: 1.824, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0462, decode.loss_mask: 0.1801, decode.loss_dice: 0.4949, decode.d0.loss_cls: 0.2970, decode.d0.loss_mask: 0.1863, decode.d0.loss_dice: 0.5130, decode.d1.loss_cls: 0.0650, decode.d1.loss_mask: 0.1815, decode.d1.loss_dice: 0.5017, decode.d2.loss_cls: 0.0535, decode.d2.loss_mask: 0.1806, decode.d2.loss_dice: 0.4991, decode.d3.loss_cls: 0.0513, decode.d3.loss_mask: 0.1810, decode.d3.loss_dice: 0.4975, decode.d4.loss_cls: 0.0526, decode.d4.loss_mask: 0.1805, decode.d4.loss_dice: 0.4978, decode.d5.loss_cls: 0.0497, decode.d5.loss_mask: 0.1804, decode.d5.loss_dice: 0.4981, decode.d6.loss_cls: 0.0468, decode.d6.loss_mask: 0.1803, decode.d6.loss_dice: 0.4962, decode.d7.loss_cls: 0.0488, decode.d7.loss_mask: 0.1803, decode.d7.loss_dice: 0.4972, decode.d8.loss_cls: 0.0483, decode.d8.loss_mask: 0.1804, decode.d8.loss_dice: 0.4976, loss: 7.5634 +2022-05-11 04:55:20,410 - mmseg - INFO - Iter [63800/80000] lr: 2.908e-07, eta: 10:09:37, time: 1.819, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0439, decode.loss_mask: 0.1873, decode.loss_dice: 0.4964, decode.d0.loss_cls: 0.2882, decode.d0.loss_mask: 0.1949, decode.d0.loss_dice: 0.5160, decode.d1.loss_cls: 0.0584, decode.d1.loss_mask: 0.1876, decode.d1.loss_dice: 0.5052, decode.d2.loss_cls: 0.0550, decode.d2.loss_mask: 0.1871, decode.d2.loss_dice: 0.5002, decode.d3.loss_cls: 0.0500, decode.d3.loss_mask: 0.1876, decode.d3.loss_dice: 0.4990, decode.d4.loss_cls: 0.0488, decode.d4.loss_mask: 0.1874, decode.d4.loss_dice: 0.4956, decode.d5.loss_cls: 0.0465, decode.d5.loss_mask: 0.1880, decode.d5.loss_dice: 0.4990, decode.d6.loss_cls: 0.0459, decode.d6.loss_mask: 0.1875, decode.d6.loss_dice: 0.4976, decode.d7.loss_cls: 0.0504, decode.d7.loss_mask: 0.1873, decode.d7.loss_dice: 0.4961, decode.d8.loss_cls: 0.0476, decode.d8.loss_mask: 0.1873, decode.d8.loss_dice: 0.4984, loss: 7.6202 +2022-05-11 04:56:52,953 - mmseg - INFO - Iter [63850/80000] lr: 2.899e-07, eta: 10:07:24, time: 1.851, data_time: 0.066, memory: 69063, decode.loss_cls: 0.0475, decode.loss_mask: 0.1756, decode.loss_dice: 0.4850, decode.d0.loss_cls: 0.2926, decode.d0.loss_mask: 0.1824, decode.d0.loss_dice: 0.5107, decode.d1.loss_cls: 0.0619, decode.d1.loss_mask: 0.1772, decode.d1.loss_dice: 0.4918, decode.d2.loss_cls: 0.0560, decode.d2.loss_mask: 0.1769, decode.d2.loss_dice: 0.4889, decode.d3.loss_cls: 0.0554, decode.d3.loss_mask: 0.1763, decode.d3.loss_dice: 0.4844, decode.d4.loss_cls: 0.0488, decode.d4.loss_mask: 0.1763, decode.d4.loss_dice: 0.4822, decode.d5.loss_cls: 0.0524, decode.d5.loss_mask: 0.1761, decode.d5.loss_dice: 0.4849, decode.d6.loss_cls: 0.0432, decode.d6.loss_mask: 0.1757, decode.d6.loss_dice: 0.4863, decode.d7.loss_cls: 0.0494, decode.d7.loss_mask: 0.1757, decode.d7.loss_dice: 0.4870, decode.d8.loss_cls: 0.0467, decode.d8.loss_mask: 0.1757, decode.d8.loss_dice: 0.4837, loss: 7.4067 +2022-05-11 04:58:24,044 - mmseg - INFO - Iter [63900/80000] lr: 2.890e-07, eta: 10:05:09, time: 1.822, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0464, decode.loss_mask: 0.1804, decode.loss_dice: 0.4890, decode.d0.loss_cls: 0.2869, decode.d0.loss_mask: 0.1876, decode.d0.loss_dice: 0.5177, decode.d1.loss_cls: 0.0638, decode.d1.loss_mask: 0.1818, decode.d1.loss_dice: 0.5005, decode.d2.loss_cls: 0.0542, decode.d2.loss_mask: 0.1809, decode.d2.loss_dice: 0.4933, decode.d3.loss_cls: 0.0475, decode.d3.loss_mask: 0.1811, decode.d3.loss_dice: 0.4907, decode.d4.loss_cls: 0.0519, decode.d4.loss_mask: 0.1812, decode.d4.loss_dice: 0.4930, decode.d5.loss_cls: 0.0500, decode.d5.loss_mask: 0.1808, decode.d5.loss_dice: 0.4924, decode.d6.loss_cls: 0.0565, decode.d6.loss_mask: 0.1807, decode.d6.loss_dice: 0.4937, decode.d7.loss_cls: 0.0512, decode.d7.loss_mask: 0.1806, decode.d7.loss_dice: 0.4885, decode.d8.loss_cls: 0.0507, decode.d8.loss_mask: 0.1808, decode.d8.loss_dice: 0.4905, loss: 7.5241 +2022-05-11 04:59:53,795 - mmseg - INFO - Iter [63950/80000] lr: 2.881e-07, eta: 10:02:53, time: 1.795, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0552, decode.loss_mask: 0.1822, decode.loss_dice: 0.5059, decode.d0.loss_cls: 0.2959, decode.d0.loss_mask: 0.1886, decode.d0.loss_dice: 0.5271, decode.d1.loss_cls: 0.0761, decode.d1.loss_mask: 0.1836, decode.d1.loss_dice: 0.5116, decode.d2.loss_cls: 0.0557, decode.d2.loss_mask: 0.1829, decode.d2.loss_dice: 0.5110, decode.d3.loss_cls: 0.0569, decode.d3.loss_mask: 0.1829, decode.d3.loss_dice: 0.5044, decode.d4.loss_cls: 0.0617, decode.d4.loss_mask: 0.1830, decode.d4.loss_dice: 0.5057, decode.d5.loss_cls: 0.0566, decode.d5.loss_mask: 0.1828, decode.d5.loss_dice: 0.5068, decode.d6.loss_cls: 0.0549, decode.d6.loss_mask: 0.1827, decode.d6.loss_dice: 0.5036, decode.d7.loss_cls: 0.0571, decode.d7.loss_mask: 0.1827, decode.d7.loss_dice: 0.5045, decode.d8.loss_cls: 0.0582, decode.d8.loss_mask: 0.1824, decode.d8.loss_dice: 0.5077, loss: 7.7503 +2022-05-11 05:01:27,693 - mmseg - INFO - Saving checkpoint at 64000 iterations +2022-05-11 05:02:00,279 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 05:02:00,282 - mmseg - INFO - Iter [64000/80000] lr: 2.872e-07, eta: 10:01:14, time: 2.528, data_time: 0.064, memory: 69063, decode.loss_cls: 0.0581, decode.loss_mask: 0.1868, decode.loss_dice: 0.5013, decode.d0.loss_cls: 0.2982, decode.d0.loss_mask: 0.1935, decode.d0.loss_dice: 0.5242, decode.d1.loss_cls: 0.0781, decode.d1.loss_mask: 0.1887, decode.d1.loss_dice: 0.5112, decode.d2.loss_cls: 0.0650, decode.d2.loss_mask: 0.1874, decode.d2.loss_dice: 0.5046, decode.d3.loss_cls: 0.0589, decode.d3.loss_mask: 0.1874, decode.d3.loss_dice: 0.5022, decode.d4.loss_cls: 0.0595, decode.d4.loss_mask: 0.1871, decode.d4.loss_dice: 0.5059, decode.d5.loss_cls: 0.0600, decode.d5.loss_mask: 0.1876, decode.d5.loss_dice: 0.5051, decode.d6.loss_cls: 0.0537, decode.d6.loss_mask: 0.1871, decode.d6.loss_dice: 0.5019, decode.d7.loss_cls: 0.0572, decode.d7.loss_mask: 0.1869, decode.d7.loss_dice: 0.5045, decode.d8.loss_cls: 0.0527, decode.d8.loss_mask: 0.1872, decode.d8.loss_dice: 0.5008, loss: 7.7826 +2022-05-11 05:03:56,969 - mmseg - INFO - per class results: +2022-05-11 05:03:56,977 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.59 | 99.28 | +| sidewalk | 88.51 | 93.77 | +| building | 94.39 | 97.1 | +| wall | 69.92 | 79.93 | +| fence | 78.96 | 86.59 | +| pole | 71.22 | 83.72 | +| traffic light | 77.12 | 88.55 | +| traffic sign | 84.15 | 90.84 | +| vegetation | 93.38 | 96.72 | +| terrain | 68.24 | 77.84 | +| sky | 95.79 | 98.45 | +| person | 86.89 | 93.94 | +| rider | 74.53 | 84.89 | +| car | 96.22 | 98.35 | +| truck | 92.1 | 94.78 | +| bus | 93.71 | 96.6 | +| train | 88.06 | 90.91 | +| motorcycle | 77.74 | 87.05 | +| bicycle | 82.81 | 91.36 | ++---------------+-------+-------+ +2022-05-11 05:03:56,977 - mmseg - INFO - Summary: +2022-05-11 05:03:56,978 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 97.04 | 84.86 | 91.09 | ++-------+-------+-------+ +2022-05-11 05:03:57,019 - mmseg - INFO - The previous best checkpoint /mnt/lustre/chenzhe.vendor/workspace/ViT-Adapter/segmentation/work_dirs/mask2former_beit_adapter_large_896_80k_cityscapes_ss/best_mIoU_iter_50000.pth was removed +2022-05-11 05:04:28,977 - mmseg - INFO - Now best checkpoint is saved as best_mIoU_iter_64000.pth. +2022-05-11 05:04:28,988 - mmseg - INFO - Best mIoU is 0.8486 at 64000 iter. +2022-05-11 05:04:28,997 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 05:04:28,998 - mmseg - INFO - Iter(val) [32] aAcc: 0.9704, mIoU: 0.8486, mAcc: 0.9109, IoU.road: 0.9859, IoU.sidewalk: 0.8851, IoU.building: 0.9439, IoU.wall: 0.6992, IoU.fence: 0.7896, IoU.pole: 0.7122, IoU.traffic light: 0.7712, IoU.traffic sign: 0.8415, IoU.vegetation: 0.9338, IoU.terrain: 0.6824, IoU.sky: 0.9579, IoU.person: 0.8689, IoU.rider: 0.7453, IoU.car: 0.9622, IoU.truck: 0.9210, IoU.bus: 0.9371, IoU.train: 0.8806, IoU.motorcycle: 0.7774, IoU.bicycle: 0.8281, Acc.road: 0.9928, Acc.sidewalk: 0.9377, Acc.building: 0.9710, Acc.wall: 0.7993, Acc.fence: 0.8659, Acc.pole: 0.8372, Acc.traffic light: 0.8855, Acc.traffic sign: 0.9084, Acc.vegetation: 0.9672, Acc.terrain: 0.7784, Acc.sky: 0.9845, Acc.person: 0.9394, Acc.rider: 0.8489, Acc.car: 0.9835, Acc.truck: 0.9478, Acc.bus: 0.9660, Acc.train: 0.9091, Acc.motorcycle: 0.8705, Acc.bicycle: 0.9136 +2022-05-11 05:06:00,866 - mmseg - INFO - Iter [64050/80000] lr: 2.863e-07, eta: 10:01:28, time: 4.814, data_time: 2.994, memory: 69063, decode.loss_cls: 0.0490, decode.loss_mask: 0.1834, decode.loss_dice: 0.4985, decode.d0.loss_cls: 0.2954, decode.d0.loss_mask: 0.1891, decode.d0.loss_dice: 0.5192, decode.d1.loss_cls: 0.0600, decode.d1.loss_mask: 0.1848, decode.d1.loss_dice: 0.5010, decode.d2.loss_cls: 0.0524, decode.d2.loss_mask: 0.1839, decode.d2.loss_dice: 0.5024, decode.d3.loss_cls: 0.0505, decode.d3.loss_mask: 0.1839, decode.d3.loss_dice: 0.4959, decode.d4.loss_cls: 0.0528, decode.d4.loss_mask: 0.1840, decode.d4.loss_dice: 0.4976, decode.d5.loss_cls: 0.0482, decode.d5.loss_mask: 0.1838, decode.d5.loss_dice: 0.4969, decode.d6.loss_cls: 0.0505, decode.d6.loss_mask: 0.1836, decode.d6.loss_dice: 0.4981, decode.d7.loss_cls: 0.0481, decode.d7.loss_mask: 0.1836, decode.d7.loss_dice: 0.4947, decode.d8.loss_cls: 0.0513, decode.d8.loss_mask: 0.1836, decode.d8.loss_dice: 0.4957, loss: 7.6019 +2022-05-11 05:07:30,847 - mmseg - INFO - Iter [64100/80000] lr: 2.854e-07, eta: 9:59:12, time: 1.800, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0475, decode.loss_mask: 0.1851, decode.loss_dice: 0.4967, decode.d0.loss_cls: 0.2849, decode.d0.loss_mask: 0.1920, decode.d0.loss_dice: 0.5169, decode.d1.loss_cls: 0.0640, decode.d1.loss_mask: 0.1872, decode.d1.loss_dice: 0.5001, decode.d2.loss_cls: 0.0529, decode.d2.loss_mask: 0.1863, decode.d2.loss_dice: 0.4953, decode.d3.loss_cls: 0.0475, decode.d3.loss_mask: 0.1856, decode.d3.loss_dice: 0.4945, decode.d4.loss_cls: 0.0495, decode.d4.loss_mask: 0.1849, decode.d4.loss_dice: 0.4940, decode.d5.loss_cls: 0.0496, decode.d5.loss_mask: 0.1852, decode.d5.loss_dice: 0.5001, decode.d6.loss_cls: 0.0481, decode.d6.loss_mask: 0.1849, decode.d6.loss_dice: 0.4944, decode.d7.loss_cls: 0.0532, decode.d7.loss_mask: 0.1851, decode.d7.loss_dice: 0.4989, decode.d8.loss_cls: 0.0470, decode.d8.loss_mask: 0.1855, decode.d8.loss_dice: 0.4965, loss: 7.5937 +2022-05-11 05:09:00,973 - mmseg - INFO - Iter [64150/80000] lr: 2.845e-07, eta: 9:56:57, time: 1.802, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0455, decode.loss_mask: 0.1757, decode.loss_dice: 0.4880, decode.d0.loss_cls: 0.2860, decode.d0.loss_mask: 0.1819, decode.d0.loss_dice: 0.5091, decode.d1.loss_cls: 0.0546, decode.d1.loss_mask: 0.1767, decode.d1.loss_dice: 0.4960, decode.d2.loss_cls: 0.0527, decode.d2.loss_mask: 0.1760, decode.d2.loss_dice: 0.4946, decode.d3.loss_cls: 0.0485, decode.d3.loss_mask: 0.1757, decode.d3.loss_dice: 0.4884, decode.d4.loss_cls: 0.0485, decode.d4.loss_mask: 0.1755, decode.d4.loss_dice: 0.4926, decode.d5.loss_cls: 0.0476, decode.d5.loss_mask: 0.1757, decode.d5.loss_dice: 0.4915, decode.d6.loss_cls: 0.0489, decode.d6.loss_mask: 0.1756, decode.d6.loss_dice: 0.4900, decode.d7.loss_cls: 0.0455, decode.d7.loss_mask: 0.1754, decode.d7.loss_dice: 0.4905, decode.d8.loss_cls: 0.0488, decode.d8.loss_mask: 0.1756, decode.d8.loss_dice: 0.4880, loss: 7.4191 +2022-05-11 05:10:34,123 - mmseg - INFO - Iter [64200/80000] lr: 2.836e-07, eta: 9:54:44, time: 1.863, data_time: 0.065, memory: 69063, decode.loss_cls: 0.0396, decode.loss_mask: 0.1876, decode.loss_dice: 0.5006, decode.d0.loss_cls: 0.2900, decode.d0.loss_mask: 0.1950, decode.d0.loss_dice: 0.5171, decode.d1.loss_cls: 0.0556, decode.d1.loss_mask: 0.1890, decode.d1.loss_dice: 0.5077, decode.d2.loss_cls: 0.0488, decode.d2.loss_mask: 0.1886, decode.d2.loss_dice: 0.5011, decode.d3.loss_cls: 0.0481, decode.d3.loss_mask: 0.1882, decode.d3.loss_dice: 0.5014, decode.d4.loss_cls: 0.0457, decode.d4.loss_mask: 0.1876, decode.d4.loss_dice: 0.4986, decode.d5.loss_cls: 0.0452, decode.d5.loss_mask: 0.1878, decode.d5.loss_dice: 0.5002, decode.d6.loss_cls: 0.0482, decode.d6.loss_mask: 0.1878, decode.d6.loss_dice: 0.4985, decode.d7.loss_cls: 0.0421, decode.d7.loss_mask: 0.1877, decode.d7.loss_dice: 0.4988, decode.d8.loss_cls: 0.0437, decode.d8.loss_mask: 0.1878, decode.d8.loss_dice: 0.4987, loss: 7.6169 +2022-05-11 05:12:06,399 - mmseg - INFO - Iter [64250/80000] lr: 2.827e-07, eta: 9:52:31, time: 1.846, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0498, decode.loss_mask: 0.1855, decode.loss_dice: 0.4975, decode.d0.loss_cls: 0.3007, decode.d0.loss_mask: 0.1929, decode.d0.loss_dice: 0.5189, decode.d1.loss_cls: 0.0649, decode.d1.loss_mask: 0.1870, decode.d1.loss_dice: 0.5031, decode.d2.loss_cls: 0.0545, decode.d2.loss_mask: 0.1859, decode.d2.loss_dice: 0.4996, decode.d3.loss_cls: 0.0528, decode.d3.loss_mask: 0.1853, decode.d3.loss_dice: 0.4939, decode.d4.loss_cls: 0.0568, decode.d4.loss_mask: 0.1855, decode.d4.loss_dice: 0.4992, decode.d5.loss_cls: 0.0527, decode.d5.loss_mask: 0.1854, decode.d5.loss_dice: 0.4998, decode.d6.loss_cls: 0.0504, decode.d6.loss_mask: 0.1851, decode.d6.loss_dice: 0.4954, decode.d7.loss_cls: 0.0497, decode.d7.loss_mask: 0.1851, decode.d7.loss_dice: 0.4968, decode.d8.loss_cls: 0.0531, decode.d8.loss_mask: 0.1851, decode.d8.loss_dice: 0.4939, loss: 7.6464 +2022-05-11 05:13:36,170 - mmseg - INFO - Iter [64300/80000] lr: 2.818e-07, eta: 9:50:16, time: 1.794, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0513, decode.loss_mask: 0.1837, decode.loss_dice: 0.5025, decode.d0.loss_cls: 0.3140, decode.d0.loss_mask: 0.1898, decode.d0.loss_dice: 0.5242, decode.d1.loss_cls: 0.0708, decode.d1.loss_mask: 0.1858, decode.d1.loss_dice: 0.5122, decode.d2.loss_cls: 0.0581, decode.d2.loss_mask: 0.1838, decode.d2.loss_dice: 0.5101, decode.d3.loss_cls: 0.0590, decode.d3.loss_mask: 0.1827, decode.d3.loss_dice: 0.5046, decode.d4.loss_cls: 0.0536, decode.d4.loss_mask: 0.1839, decode.d4.loss_dice: 0.5051, decode.d5.loss_cls: 0.0535, decode.d5.loss_mask: 0.1839, decode.d5.loss_dice: 0.5056, decode.d6.loss_cls: 0.0513, decode.d6.loss_mask: 0.1840, decode.d6.loss_dice: 0.5057, decode.d7.loss_cls: 0.0462, decode.d7.loss_mask: 0.1841, decode.d7.loss_dice: 0.5043, decode.d8.loss_cls: 0.0534, decode.d8.loss_mask: 0.1842, decode.d8.loss_dice: 0.5008, loss: 7.7320 +2022-05-11 05:15:07,325 - mmseg - INFO - Iter [64350/80000] lr: 2.809e-07, eta: 9:48:03, time: 1.824, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0513, decode.loss_mask: 0.1859, decode.loss_dice: 0.5038, decode.d0.loss_cls: 0.2824, decode.d0.loss_mask: 0.1944, decode.d0.loss_dice: 0.5278, decode.d1.loss_cls: 0.0634, decode.d1.loss_mask: 0.1878, decode.d1.loss_dice: 0.5104, decode.d2.loss_cls: 0.0584, decode.d2.loss_mask: 0.1867, decode.d2.loss_dice: 0.5090, decode.d3.loss_cls: 0.0518, decode.d3.loss_mask: 0.1869, decode.d3.loss_dice: 0.5047, decode.d4.loss_cls: 0.0503, decode.d4.loss_mask: 0.1864, decode.d4.loss_dice: 0.5040, decode.d5.loss_cls: 0.0463, decode.d5.loss_mask: 0.1861, decode.d5.loss_dice: 0.5045, decode.d6.loss_cls: 0.0504, decode.d6.loss_mask: 0.1860, decode.d6.loss_dice: 0.5021, decode.d7.loss_cls: 0.0482, decode.d7.loss_mask: 0.1861, decode.d7.loss_dice: 0.5053, decode.d8.loss_cls: 0.0482, decode.d8.loss_mask: 0.1863, decode.d8.loss_dice: 0.5034, loss: 7.6982 +2022-05-11 05:16:40,806 - mmseg - INFO - Iter [64400/80000] lr: 2.800e-07, eta: 9:45:52, time: 1.870, data_time: 0.063, memory: 69063, decode.loss_cls: 0.0586, decode.loss_mask: 0.1833, decode.loss_dice: 0.4985, decode.d0.loss_cls: 0.2914, decode.d0.loss_mask: 0.1909, decode.d0.loss_dice: 0.5272, decode.d1.loss_cls: 0.0723, decode.d1.loss_mask: 0.1844, decode.d1.loss_dice: 0.5046, decode.d2.loss_cls: 0.0666, decode.d2.loss_mask: 0.1838, decode.d2.loss_dice: 0.5029, decode.d3.loss_cls: 0.0610, decode.d3.loss_mask: 0.1840, decode.d3.loss_dice: 0.5019, decode.d4.loss_cls: 0.0613, decode.d4.loss_mask: 0.1841, decode.d4.loss_dice: 0.5023, decode.d5.loss_cls: 0.0577, decode.d5.loss_mask: 0.1842, decode.d5.loss_dice: 0.5017, decode.d6.loss_cls: 0.0595, decode.d6.loss_mask: 0.1832, decode.d6.loss_dice: 0.5010, decode.d7.loss_cls: 0.0613, decode.d7.loss_mask: 0.1834, decode.d7.loss_dice: 0.5008, decode.d8.loss_cls: 0.0619, decode.d8.loss_mask: 0.1837, decode.d8.loss_dice: 0.4955, loss: 7.7329 +2022-05-11 05:18:11,796 - mmseg - INFO - Iter [64450/80000] lr: 2.791e-07, eta: 9:43:39, time: 1.820, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0582, decode.loss_mask: 0.1839, decode.loss_dice: 0.5124, decode.d0.loss_cls: 0.3043, decode.d0.loss_mask: 0.1918, decode.d0.loss_dice: 0.5375, decode.d1.loss_cls: 0.0721, decode.d1.loss_mask: 0.1862, decode.d1.loss_dice: 0.5192, decode.d2.loss_cls: 0.0671, decode.d2.loss_mask: 0.1853, decode.d2.loss_dice: 0.5146, decode.d3.loss_cls: 0.0614, decode.d3.loss_mask: 0.1849, decode.d3.loss_dice: 0.5133, decode.d4.loss_cls: 0.0577, decode.d4.loss_mask: 0.1844, decode.d4.loss_dice: 0.5105, decode.d5.loss_cls: 0.0612, decode.d5.loss_mask: 0.1844, decode.d5.loss_dice: 0.5116, decode.d6.loss_cls: 0.0648, decode.d6.loss_mask: 0.1842, decode.d6.loss_dice: 0.5076, decode.d7.loss_cls: 0.0622, decode.d7.loss_mask: 0.1840, decode.d7.loss_dice: 0.5102, decode.d8.loss_cls: 0.0622, decode.d8.loss_mask: 0.1839, decode.d8.loss_dice: 0.5099, loss: 7.8709 +2022-05-11 05:19:42,801 - mmseg - INFO - Iter [64500/80000] lr: 2.782e-07, eta: 9:41:26, time: 1.820, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0531, decode.loss_mask: 0.1865, decode.loss_dice: 0.5080, decode.d0.loss_cls: 0.2952, decode.d0.loss_mask: 0.1951, decode.d0.loss_dice: 0.5392, decode.d1.loss_cls: 0.0754, decode.d1.loss_mask: 0.1878, decode.d1.loss_dice: 0.5209, decode.d2.loss_cls: 0.0669, decode.d2.loss_mask: 0.1873, decode.d2.loss_dice: 0.5165, decode.d3.loss_cls: 0.0669, decode.d3.loss_mask: 0.1861, decode.d3.loss_dice: 0.5112, decode.d4.loss_cls: 0.0604, decode.d4.loss_mask: 0.1859, decode.d4.loss_dice: 0.5100, decode.d5.loss_cls: 0.0555, decode.d5.loss_mask: 0.1864, decode.d5.loss_dice: 0.5142, decode.d6.loss_cls: 0.0556, decode.d6.loss_mask: 0.1856, decode.d6.loss_dice: 0.5101, decode.d7.loss_cls: 0.0566, decode.d7.loss_mask: 0.1862, decode.d7.loss_dice: 0.5164, decode.d8.loss_cls: 0.0597, decode.d8.loss_mask: 0.1866, decode.d8.loss_dice: 0.5100, loss: 7.8755 +2022-05-11 05:21:13,770 - mmseg - INFO - Iter [64550/80000] lr: 2.773e-07, eta: 9:39:13, time: 1.818, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0497, decode.loss_mask: 0.1893, decode.loss_dice: 0.5088, decode.d0.loss_cls: 0.3059, decode.d0.loss_mask: 0.1984, decode.d0.loss_dice: 0.5285, decode.d1.loss_cls: 0.0749, decode.d1.loss_mask: 0.1913, decode.d1.loss_dice: 0.5136, decode.d2.loss_cls: 0.0607, decode.d2.loss_mask: 0.1902, decode.d2.loss_dice: 0.5167, decode.d3.loss_cls: 0.0587, decode.d3.loss_mask: 0.1899, decode.d3.loss_dice: 0.5120, decode.d4.loss_cls: 0.0585, decode.d4.loss_mask: 0.1901, decode.d4.loss_dice: 0.5133, decode.d5.loss_cls: 0.0574, decode.d5.loss_mask: 0.1894, decode.d5.loss_dice: 0.5162, decode.d6.loss_cls: 0.0536, decode.d6.loss_mask: 0.1896, decode.d6.loss_dice: 0.5118, decode.d7.loss_cls: 0.0565, decode.d7.loss_mask: 0.1900, decode.d7.loss_dice: 0.5123, decode.d8.loss_cls: 0.0510, decode.d8.loss_mask: 0.1897, decode.d8.loss_dice: 0.5076, loss: 7.8755 +2022-05-11 05:22:46,689 - mmseg - INFO - Iter [64600/80000] lr: 2.764e-07, eta: 9:37:02, time: 1.859, data_time: 0.064, memory: 69063, decode.loss_cls: 0.0552, decode.loss_mask: 0.1841, decode.loss_dice: 0.4894, decode.d0.loss_cls: 0.3094, decode.d0.loss_mask: 0.1920, decode.d0.loss_dice: 0.5149, decode.d1.loss_cls: 0.0655, decode.d1.loss_mask: 0.1854, decode.d1.loss_dice: 0.4999, decode.d2.loss_cls: 0.0593, decode.d2.loss_mask: 0.1846, decode.d2.loss_dice: 0.4942, decode.d3.loss_cls: 0.0527, decode.d3.loss_mask: 0.1846, decode.d3.loss_dice: 0.4888, decode.d4.loss_cls: 0.0536, decode.d4.loss_mask: 0.1849, decode.d4.loss_dice: 0.4901, decode.d5.loss_cls: 0.0517, decode.d5.loss_mask: 0.1846, decode.d5.loss_dice: 0.4905, decode.d6.loss_cls: 0.0538, decode.d6.loss_mask: 0.1847, decode.d6.loss_dice: 0.4881, decode.d7.loss_cls: 0.0533, decode.d7.loss_mask: 0.1846, decode.d7.loss_dice: 0.4898, decode.d8.loss_cls: 0.0534, decode.d8.loss_mask: 0.1841, decode.d8.loss_dice: 0.4913, loss: 7.5985 +2022-05-11 05:24:16,101 - mmseg - INFO - Iter [64650/80000] lr: 2.755e-07, eta: 9:34:49, time: 1.788, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0503, decode.loss_mask: 0.1785, decode.loss_dice: 0.4903, decode.d0.loss_cls: 0.3209, decode.d0.loss_mask: 0.1856, decode.d0.loss_dice: 0.5115, decode.d1.loss_cls: 0.0710, decode.d1.loss_mask: 0.1800, decode.d1.loss_dice: 0.4958, decode.d2.loss_cls: 0.0581, decode.d2.loss_mask: 0.1795, decode.d2.loss_dice: 0.4891, decode.d3.loss_cls: 0.0582, decode.d3.loss_mask: 0.1784, decode.d3.loss_dice: 0.4915, decode.d4.loss_cls: 0.0579, decode.d4.loss_mask: 0.1791, decode.d4.loss_dice: 0.4908, decode.d5.loss_cls: 0.0500, decode.d5.loss_mask: 0.1796, decode.d5.loss_dice: 0.4889, decode.d6.loss_cls: 0.0496, decode.d6.loss_mask: 0.1789, decode.d6.loss_dice: 0.4933, decode.d7.loss_cls: 0.0509, decode.d7.loss_mask: 0.1790, decode.d7.loss_dice: 0.4895, decode.d8.loss_cls: 0.0525, decode.d8.loss_mask: 0.1788, decode.d8.loss_dice: 0.4886, loss: 7.5461 +2022-05-11 05:25:45,437 - mmseg - INFO - Iter [64700/80000] lr: 2.746e-07, eta: 9:32:35, time: 1.787, data_time: 0.020, memory: 69063, decode.loss_cls: 0.0558, decode.loss_mask: 0.1830, decode.loss_dice: 0.5118, decode.d0.loss_cls: 0.3007, decode.d0.loss_mask: 0.1902, decode.d0.loss_dice: 0.5312, decode.d1.loss_cls: 0.0775, decode.d1.loss_mask: 0.1840, decode.d1.loss_dice: 0.5152, decode.d2.loss_cls: 0.0675, decode.d2.loss_mask: 0.1840, decode.d2.loss_dice: 0.5127, decode.d3.loss_cls: 0.0598, decode.d3.loss_mask: 0.1833, decode.d3.loss_dice: 0.5093, decode.d4.loss_cls: 0.0667, decode.d4.loss_mask: 0.1837, decode.d4.loss_dice: 0.5108, decode.d5.loss_cls: 0.0615, decode.d5.loss_mask: 0.1835, decode.d5.loss_dice: 0.5079, decode.d6.loss_cls: 0.0610, decode.d6.loss_mask: 0.1830, decode.d6.loss_dice: 0.5098, decode.d7.loss_cls: 0.0628, decode.d7.loss_mask: 0.1834, decode.d7.loss_dice: 0.5113, decode.d8.loss_cls: 0.0622, decode.d8.loss_mask: 0.1831, decode.d8.loss_dice: 0.5067, loss: 7.8433 +2022-05-11 05:27:19,151 - mmseg - INFO - Iter [64750/80000] lr: 2.737e-07, eta: 9:30:26, time: 1.874, data_time: 0.066, memory: 69063, decode.loss_cls: 0.0541, decode.loss_mask: 0.1807, decode.loss_dice: 0.5062, decode.d0.loss_cls: 0.2836, decode.d0.loss_mask: 0.1876, decode.d0.loss_dice: 0.5319, decode.d1.loss_cls: 0.0674, decode.d1.loss_mask: 0.1826, decode.d1.loss_dice: 0.5185, decode.d2.loss_cls: 0.0602, decode.d2.loss_mask: 0.1819, decode.d2.loss_dice: 0.5140, decode.d3.loss_cls: 0.0560, decode.d3.loss_mask: 0.1812, decode.d3.loss_dice: 0.5108, decode.d4.loss_cls: 0.0564, decode.d4.loss_mask: 0.1813, decode.d4.loss_dice: 0.5106, decode.d5.loss_cls: 0.0516, decode.d5.loss_mask: 0.1810, decode.d5.loss_dice: 0.5112, decode.d6.loss_cls: 0.0518, decode.d6.loss_mask: 0.1805, decode.d6.loss_dice: 0.5150, decode.d7.loss_cls: 0.0536, decode.d7.loss_mask: 0.1808, decode.d7.loss_dice: 0.5100, decode.d8.loss_cls: 0.0528, decode.d8.loss_mask: 0.1806, decode.d8.loss_dice: 0.5082, loss: 7.7422 +2022-05-11 05:28:50,136 - mmseg - INFO - Iter [64800/80000] lr: 2.728e-07, eta: 9:28:15, time: 1.819, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0632, decode.loss_mask: 0.1834, decode.loss_dice: 0.5081, decode.d0.loss_cls: 0.3023, decode.d0.loss_mask: 0.1898, decode.d0.loss_dice: 0.5291, decode.d1.loss_cls: 0.0746, decode.d1.loss_mask: 0.1855, decode.d1.loss_dice: 0.5161, decode.d2.loss_cls: 0.0685, decode.d2.loss_mask: 0.1849, decode.d2.loss_dice: 0.5153, decode.d3.loss_cls: 0.0650, decode.d3.loss_mask: 0.1838, decode.d3.loss_dice: 0.5050, decode.d4.loss_cls: 0.0645, decode.d4.loss_mask: 0.1836, decode.d4.loss_dice: 0.5051, decode.d5.loss_cls: 0.0614, decode.d5.loss_mask: 0.1837, decode.d5.loss_dice: 0.5060, decode.d6.loss_cls: 0.0633, decode.d6.loss_mask: 0.1832, decode.d6.loss_dice: 0.5027, decode.d7.loss_cls: 0.0543, decode.d7.loss_mask: 0.1833, decode.d7.loss_dice: 0.5073, decode.d8.loss_cls: 0.0596, decode.d8.loss_mask: 0.1834, decode.d8.loss_dice: 0.5056, loss: 7.8219 +2022-05-11 05:30:19,650 - mmseg - INFO - Iter [64850/80000] lr: 2.719e-07, eta: 9:26:02, time: 1.791, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0518, decode.loss_mask: 0.1849, decode.loss_dice: 0.5069, decode.d0.loss_cls: 0.2924, decode.d0.loss_mask: 0.1905, decode.d0.loss_dice: 0.5309, decode.d1.loss_cls: 0.0720, decode.d1.loss_mask: 0.1861, decode.d1.loss_dice: 0.5144, decode.d2.loss_cls: 0.0669, decode.d2.loss_mask: 0.1853, decode.d2.loss_dice: 0.5086, decode.d3.loss_cls: 0.0567, decode.d3.loss_mask: 0.1850, decode.d3.loss_dice: 0.5063, decode.d4.loss_cls: 0.0592, decode.d4.loss_mask: 0.1851, decode.d4.loss_dice: 0.5065, decode.d5.loss_cls: 0.0553, decode.d5.loss_mask: 0.1853, decode.d5.loss_dice: 0.5086, decode.d6.loss_cls: 0.0536, decode.d6.loss_mask: 0.1845, decode.d6.loss_dice: 0.5041, decode.d7.loss_cls: 0.0583, decode.d7.loss_mask: 0.1846, decode.d7.loss_dice: 0.5046, decode.d8.loss_cls: 0.0590, decode.d8.loss_mask: 0.1846, decode.d8.loss_dice: 0.5041, loss: 7.7762 +2022-05-11 05:31:50,749 - mmseg - INFO - Iter [64900/80000] lr: 2.710e-07, eta: 9:23:51, time: 1.821, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0483, decode.loss_mask: 0.1812, decode.loss_dice: 0.4991, decode.d0.loss_cls: 0.2853, decode.d0.loss_mask: 0.1887, decode.d0.loss_dice: 0.5220, decode.d1.loss_cls: 0.0680, decode.d1.loss_mask: 0.1825, decode.d1.loss_dice: 0.5056, decode.d2.loss_cls: 0.0616, decode.d2.loss_mask: 0.1819, decode.d2.loss_dice: 0.5045, decode.d3.loss_cls: 0.0511, decode.d3.loss_mask: 0.1813, decode.d3.loss_dice: 0.4992, decode.d4.loss_cls: 0.0525, decode.d4.loss_mask: 0.1811, decode.d4.loss_dice: 0.5007, decode.d5.loss_cls: 0.0517, decode.d5.loss_mask: 0.1813, decode.d5.loss_dice: 0.5004, decode.d6.loss_cls: 0.0503, decode.d6.loss_mask: 0.1813, decode.d6.loss_dice: 0.4950, decode.d7.loss_cls: 0.0467, decode.d7.loss_mask: 0.1817, decode.d7.loss_dice: 0.4950, decode.d8.loss_cls: 0.0514, decode.d8.loss_mask: 0.1814, decode.d8.loss_dice: 0.4992, loss: 7.6102 +2022-05-11 05:33:23,704 - mmseg - INFO - Iter [64950/80000] lr: 2.701e-07, eta: 9:21:43, time: 1.860, data_time: 0.065, memory: 69063, decode.loss_cls: 0.0487, decode.loss_mask: 0.1817, decode.loss_dice: 0.4969, decode.d0.loss_cls: 0.2975, decode.d0.loss_mask: 0.1887, decode.d0.loss_dice: 0.5183, decode.d1.loss_cls: 0.0697, decode.d1.loss_mask: 0.1838, decode.d1.loss_dice: 0.5020, decode.d2.loss_cls: 0.0588, decode.d2.loss_mask: 0.1832, decode.d2.loss_dice: 0.5007, decode.d3.loss_cls: 0.0515, decode.d3.loss_mask: 0.1826, decode.d3.loss_dice: 0.4976, decode.d4.loss_cls: 0.0478, decode.d4.loss_mask: 0.1828, decode.d4.loss_dice: 0.4994, decode.d5.loss_cls: 0.0518, decode.d5.loss_mask: 0.1830, decode.d5.loss_dice: 0.4947, decode.d6.loss_cls: 0.0471, decode.d6.loss_mask: 0.1826, decode.d6.loss_dice: 0.4937, decode.d7.loss_cls: 0.0485, decode.d7.loss_mask: 0.1824, decode.d7.loss_dice: 0.4922, decode.d8.loss_cls: 0.0500, decode.d8.loss_mask: 0.1821, decode.d8.loss_dice: 0.4939, loss: 7.5936 +2022-05-11 05:34:53,099 - mmseg - INFO - Saving checkpoint at 65000 iterations +2022-05-11 05:35:25,510 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 05:35:25,518 - mmseg - INFO - Iter [65000/80000] lr: 2.692e-07, eta: 9:19:59, time: 2.434, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0462, decode.loss_mask: 0.1828, decode.loss_dice: 0.5011, decode.d0.loss_cls: 0.2865, decode.d0.loss_mask: 0.1885, decode.d0.loss_dice: 0.5275, decode.d1.loss_cls: 0.0594, decode.d1.loss_mask: 0.1836, decode.d1.loss_dice: 0.5095, decode.d2.loss_cls: 0.0581, decode.d2.loss_mask: 0.1827, decode.d2.loss_dice: 0.5093, decode.d3.loss_cls: 0.0542, decode.d3.loss_mask: 0.1827, decode.d3.loss_dice: 0.5036, decode.d4.loss_cls: 0.0512, decode.d4.loss_mask: 0.1827, decode.d4.loss_dice: 0.5068, decode.d5.loss_cls: 0.0478, decode.d5.loss_mask: 0.1830, decode.d5.loss_dice: 0.5066, decode.d6.loss_cls: 0.0523, decode.d6.loss_mask: 0.1826, decode.d6.loss_dice: 0.5024, decode.d7.loss_cls: 0.0522, decode.d7.loss_mask: 0.1830, decode.d7.loss_dice: 0.5038, decode.d8.loss_cls: 0.0519, decode.d8.loss_mask: 0.1828, decode.d8.loss_dice: 0.5055, loss: 7.6704 +2022-05-11 05:37:20,988 - mmseg - INFO - per class results: +2022-05-11 05:37:21,000 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.62 | 99.25 | +| sidewalk | 88.61 | 93.93 | +| building | 94.16 | 97.1 | +| wall | 68.69 | 80.01 | +| fence | 74.42 | 81.31 | +| pole | 71.39 | 83.55 | +| traffic light | 76.94 | 87.14 | +| traffic sign | 84.22 | 90.39 | +| vegetation | 93.35 | 96.84 | +| terrain | 68.17 | 77.73 | +| sky | 95.85 | 98.26 | +| person | 86.76 | 93.75 | +| rider | 74.11 | 84.87 | +| car | 96.17 | 98.29 | +| truck | 92.09 | 94.64 | +| bus | 93.51 | 96.44 | +| train | 87.86 | 90.52 | +| motorcycle | 77.62 | 87.19 | +| bicycle | 82.68 | 91.27 | ++---------------+-------+-------+ +2022-05-11 05:37:21,000 - mmseg - INFO - Summary: +2022-05-11 05:37:21,000 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 96.99 | 84.48 | 90.66 | ++-------+-------+-------+ +2022-05-11 05:37:21,005 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 05:37:21,005 - mmseg - INFO - Iter(val) [32] aAcc: 0.9699, mIoU: 0.8448, mAcc: 0.9066, IoU.road: 0.9862, IoU.sidewalk: 0.8861, IoU.building: 0.9416, IoU.wall: 0.6869, IoU.fence: 0.7442, IoU.pole: 0.7139, IoU.traffic light: 0.7694, IoU.traffic sign: 0.8422, IoU.vegetation: 0.9335, IoU.terrain: 0.6817, IoU.sky: 0.9585, IoU.person: 0.8676, IoU.rider: 0.7411, IoU.car: 0.9617, IoU.truck: 0.9209, IoU.bus: 0.9351, IoU.train: 0.8786, IoU.motorcycle: 0.7762, IoU.bicycle: 0.8268, Acc.road: 0.9925, Acc.sidewalk: 0.9393, Acc.building: 0.9710, Acc.wall: 0.8001, Acc.fence: 0.8131, Acc.pole: 0.8355, Acc.traffic light: 0.8714, Acc.traffic sign: 0.9039, Acc.vegetation: 0.9684, Acc.terrain: 0.7773, Acc.sky: 0.9826, Acc.person: 0.9375, Acc.rider: 0.8487, Acc.car: 0.9829, Acc.truck: 0.9464, Acc.bus: 0.9644, Acc.train: 0.9052, Acc.motorcycle: 0.8719, Acc.bicycle: 0.9127 +2022-05-11 05:38:51,896 - mmseg - INFO - Iter [65050/80000] lr: 2.683e-07, eta: 9:19:30, time: 4.130, data_time: 2.330, memory: 69063, decode.loss_cls: 0.0501, decode.loss_mask: 0.1836, decode.loss_dice: 0.5027, decode.d0.loss_cls: 0.2973, decode.d0.loss_mask: 0.1893, decode.d0.loss_dice: 0.5269, decode.d1.loss_cls: 0.0690, decode.d1.loss_mask: 0.1850, decode.d1.loss_dice: 0.5138, decode.d2.loss_cls: 0.0577, decode.d2.loss_mask: 0.1846, decode.d2.loss_dice: 0.5102, decode.d3.loss_cls: 0.0568, decode.d3.loss_mask: 0.1839, decode.d3.loss_dice: 0.5079, decode.d4.loss_cls: 0.0561, decode.d4.loss_mask: 0.1835, decode.d4.loss_dice: 0.5058, decode.d5.loss_cls: 0.0524, decode.d5.loss_mask: 0.1838, decode.d5.loss_dice: 0.5072, decode.d6.loss_cls: 0.0541, decode.d6.loss_mask: 0.1841, decode.d6.loss_dice: 0.5035, decode.d7.loss_cls: 0.0510, decode.d7.loss_mask: 0.1839, decode.d7.loss_dice: 0.5078, decode.d8.loss_cls: 0.0516, decode.d8.loss_mask: 0.1834, decode.d8.loss_dice: 0.5047, loss: 7.7317 +2022-05-11 05:40:23,078 - mmseg - INFO - Iter [65100/80000] lr: 2.674e-07, eta: 9:17:19, time: 1.824, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0531, decode.loss_mask: 0.1862, decode.loss_dice: 0.4936, decode.d0.loss_cls: 0.3069, decode.d0.loss_mask: 0.1925, decode.d0.loss_dice: 0.5135, decode.d1.loss_cls: 0.0675, decode.d1.loss_mask: 0.1870, decode.d1.loss_dice: 0.5058, decode.d2.loss_cls: 0.0604, decode.d2.loss_mask: 0.1870, decode.d2.loss_dice: 0.5004, decode.d3.loss_cls: 0.0611, decode.d3.loss_mask: 0.1866, decode.d3.loss_dice: 0.4988, decode.d4.loss_cls: 0.0536, decode.d4.loss_mask: 0.1867, decode.d4.loss_dice: 0.5001, decode.d5.loss_cls: 0.0556, decode.d5.loss_mask: 0.1868, decode.d5.loss_dice: 0.4979, decode.d6.loss_cls: 0.0564, decode.d6.loss_mask: 0.1867, decode.d6.loss_dice: 0.4937, decode.d7.loss_cls: 0.0583, decode.d7.loss_mask: 0.1864, decode.d7.loss_dice: 0.4915, decode.d8.loss_cls: 0.0563, decode.d8.loss_mask: 0.1866, decode.d8.loss_dice: 0.4946, loss: 7.6916 +2022-05-11 05:41:54,770 - mmseg - INFO - Iter [65150/80000] lr: 2.665e-07, eta: 9:15:09, time: 1.834, data_time: 0.066, memory: 69063, decode.loss_cls: 0.0527, decode.loss_mask: 0.1855, decode.loss_dice: 0.4979, decode.d0.loss_cls: 0.2893, decode.d0.loss_mask: 0.1930, decode.d0.loss_dice: 0.5245, decode.d1.loss_cls: 0.0828, decode.d1.loss_mask: 0.1877, decode.d1.loss_dice: 0.5080, decode.d2.loss_cls: 0.0655, decode.d2.loss_mask: 0.1873, decode.d2.loss_dice: 0.5055, decode.d3.loss_cls: 0.0643, decode.d3.loss_mask: 0.1865, decode.d3.loss_dice: 0.5021, decode.d4.loss_cls: 0.0611, decode.d4.loss_mask: 0.1862, decode.d4.loss_dice: 0.5003, decode.d5.loss_cls: 0.0621, decode.d5.loss_mask: 0.1864, decode.d5.loss_dice: 0.4987, decode.d6.loss_cls: 0.0546, decode.d6.loss_mask: 0.1859, decode.d6.loss_dice: 0.4997, decode.d7.loss_cls: 0.0581, decode.d7.loss_mask: 0.1861, decode.d7.loss_dice: 0.5019, decode.d8.loss_cls: 0.0529, decode.d8.loss_mask: 0.1861, decode.d8.loss_dice: 0.5040, loss: 7.7567 +2022-05-11 05:43:25,129 - mmseg - INFO - Iter [65200/80000] lr: 2.656e-07, eta: 9:12:58, time: 1.807, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0600, decode.loss_mask: 0.1841, decode.loss_dice: 0.5018, decode.d0.loss_cls: 0.3024, decode.d0.loss_mask: 0.1903, decode.d0.loss_dice: 0.5257, decode.d1.loss_cls: 0.0730, decode.d1.loss_mask: 0.1859, decode.d1.loss_dice: 0.5110, decode.d2.loss_cls: 0.0626, decode.d2.loss_mask: 0.1850, decode.d2.loss_dice: 0.5063, decode.d3.loss_cls: 0.0583, decode.d3.loss_mask: 0.1843, decode.d3.loss_dice: 0.5022, decode.d4.loss_cls: 0.0581, decode.d4.loss_mask: 0.1841, decode.d4.loss_dice: 0.5021, decode.d5.loss_cls: 0.0583, decode.d5.loss_mask: 0.1843, decode.d5.loss_dice: 0.5035, decode.d6.loss_cls: 0.0550, decode.d6.loss_mask: 0.1841, decode.d6.loss_dice: 0.5012, decode.d7.loss_cls: 0.0563, decode.d7.loss_mask: 0.1839, decode.d7.loss_dice: 0.5006, decode.d8.loss_cls: 0.0610, decode.d8.loss_mask: 0.1843, decode.d8.loss_dice: 0.5047, loss: 7.7545 +2022-05-11 05:44:55,326 - mmseg - INFO - Iter [65250/80000] lr: 2.647e-07, eta: 9:10:48, time: 1.803, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0521, decode.loss_mask: 0.1855, decode.loss_dice: 0.5060, decode.d0.loss_cls: 0.2996, decode.d0.loss_mask: 0.1942, decode.d0.loss_dice: 0.5321, decode.d1.loss_cls: 0.0670, decode.d1.loss_mask: 0.1880, decode.d1.loss_dice: 0.5162, decode.d2.loss_cls: 0.0601, decode.d2.loss_mask: 0.1866, decode.d2.loss_dice: 0.5084, decode.d3.loss_cls: 0.0564, decode.d3.loss_mask: 0.1861, decode.d3.loss_dice: 0.5051, decode.d4.loss_cls: 0.0556, decode.d4.loss_mask: 0.1860, decode.d4.loss_dice: 0.5081, decode.d5.loss_cls: 0.0520, decode.d5.loss_mask: 0.1863, decode.d5.loss_dice: 0.5044, decode.d6.loss_cls: 0.0526, decode.d6.loss_mask: 0.1858, decode.d6.loss_dice: 0.5046, decode.d7.loss_cls: 0.0569, decode.d7.loss_mask: 0.1863, decode.d7.loss_dice: 0.5091, decode.d8.loss_cls: 0.0535, decode.d8.loss_mask: 0.1857, decode.d8.loss_dice: 0.5097, loss: 7.7803 +2022-05-11 05:46:28,412 - mmseg - INFO - Iter [65300/80000] lr: 2.638e-07, eta: 9:08:39, time: 1.862, data_time: 0.064, memory: 69063, decode.loss_cls: 0.0522, decode.loss_mask: 0.1869, decode.loss_dice: 0.5066, decode.d0.loss_cls: 0.2987, decode.d0.loss_mask: 0.1956, decode.d0.loss_dice: 0.5292, decode.d1.loss_cls: 0.0674, decode.d1.loss_mask: 0.1888, decode.d1.loss_dice: 0.5169, decode.d2.loss_cls: 0.0576, decode.d2.loss_mask: 0.1882, decode.d2.loss_dice: 0.5141, decode.d3.loss_cls: 0.0565, decode.d3.loss_mask: 0.1877, decode.d3.loss_dice: 0.5125, decode.d4.loss_cls: 0.0571, decode.d4.loss_mask: 0.1877, decode.d4.loss_dice: 0.5070, decode.d5.loss_cls: 0.0528, decode.d5.loss_mask: 0.1877, decode.d5.loss_dice: 0.5097, decode.d6.loss_cls: 0.0496, decode.d6.loss_mask: 0.1872, decode.d6.loss_dice: 0.5070, decode.d7.loss_cls: 0.0519, decode.d7.loss_mask: 0.1873, decode.d7.loss_dice: 0.5076, decode.d8.loss_cls: 0.0545, decode.d8.loss_mask: 0.1873, decode.d8.loss_dice: 0.5091, loss: 7.8024 +2022-05-11 05:47:58,447 - mmseg - INFO - Iter [65350/80000] lr: 2.629e-07, eta: 9:06:29, time: 1.801, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0463, decode.loss_mask: 0.1770, decode.loss_dice: 0.4986, decode.d0.loss_cls: 0.2938, decode.d0.loss_mask: 0.1832, decode.d0.loss_dice: 0.5162, decode.d1.loss_cls: 0.0616, decode.d1.loss_mask: 0.1784, decode.d1.loss_dice: 0.5038, decode.d2.loss_cls: 0.0519, decode.d2.loss_mask: 0.1774, decode.d2.loss_dice: 0.5028, decode.d3.loss_cls: 0.0522, decode.d3.loss_mask: 0.1772, decode.d3.loss_dice: 0.4984, decode.d4.loss_cls: 0.0489, decode.d4.loss_mask: 0.1777, decode.d4.loss_dice: 0.4973, decode.d5.loss_cls: 0.0539, decode.d5.loss_mask: 0.1772, decode.d5.loss_dice: 0.4963, decode.d6.loss_cls: 0.0465, decode.d6.loss_mask: 0.1775, decode.d6.loss_dice: 0.4969, decode.d7.loss_cls: 0.0509, decode.d7.loss_mask: 0.1776, decode.d7.loss_dice: 0.4968, decode.d8.loss_cls: 0.0479, decode.d8.loss_mask: 0.1775, decode.d8.loss_dice: 0.4972, loss: 7.5386 +2022-05-11 05:49:27,532 - mmseg - INFO - Iter [65400/80000] lr: 2.621e-07, eta: 9:04:18, time: 1.782, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0540, decode.loss_mask: 0.1835, decode.loss_dice: 0.5030, decode.d0.loss_cls: 0.2997, decode.d0.loss_mask: 0.1898, decode.d0.loss_dice: 0.5250, decode.d1.loss_cls: 0.0669, decode.d1.loss_mask: 0.1847, decode.d1.loss_dice: 0.5090, decode.d2.loss_cls: 0.0642, decode.d2.loss_mask: 0.1844, decode.d2.loss_dice: 0.5074, decode.d3.loss_cls: 0.0547, decode.d3.loss_mask: 0.1844, decode.d3.loss_dice: 0.5055, decode.d4.loss_cls: 0.0588, decode.d4.loss_mask: 0.1849, decode.d4.loss_dice: 0.5031, decode.d5.loss_cls: 0.0555, decode.d5.loss_mask: 0.1839, decode.d5.loss_dice: 0.5010, decode.d6.loss_cls: 0.0549, decode.d6.loss_mask: 0.1838, decode.d6.loss_dice: 0.5043, decode.d7.loss_cls: 0.0572, decode.d7.loss_mask: 0.1842, decode.d7.loss_dice: 0.5019, decode.d8.loss_cls: 0.0539, decode.d8.loss_mask: 0.1843, decode.d8.loss_dice: 0.5037, loss: 7.7317 +2022-05-11 05:50:56,421 - mmseg - INFO - Iter [65450/80000] lr: 2.612e-07, eta: 9:02:07, time: 1.777, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0500, decode.loss_mask: 0.1839, decode.loss_dice: 0.4992, decode.d0.loss_cls: 0.2944, decode.d0.loss_mask: 0.1913, decode.d0.loss_dice: 0.5167, decode.d1.loss_cls: 0.0574, decode.d1.loss_mask: 0.1853, decode.d1.loss_dice: 0.4991, decode.d2.loss_cls: 0.0546, decode.d2.loss_mask: 0.1843, decode.d2.loss_dice: 0.4997, decode.d3.loss_cls: 0.0504, decode.d3.loss_mask: 0.1843, decode.d3.loss_dice: 0.4930, decode.d4.loss_cls: 0.0570, decode.d4.loss_mask: 0.1846, decode.d4.loss_dice: 0.4975, decode.d5.loss_cls: 0.0547, decode.d5.loss_mask: 0.1848, decode.d5.loss_dice: 0.4995, decode.d6.loss_cls: 0.0542, decode.d6.loss_mask: 0.1849, decode.d6.loss_dice: 0.4994, decode.d7.loss_cls: 0.0497, decode.d7.loss_mask: 0.1848, decode.d7.loss_dice: 0.4935, decode.d8.loss_cls: 0.0542, decode.d8.loss_mask: 0.1841, decode.d8.loss_dice: 0.4923, loss: 7.6185 +2022-05-11 05:52:28,722 - mmseg - INFO - Iter [65500/80000] lr: 2.603e-07, eta: 8:59:59, time: 1.847, data_time: 0.067, memory: 69063, decode.loss_cls: 0.0539, decode.loss_mask: 0.1805, decode.loss_dice: 0.4953, decode.d0.loss_cls: 0.3037, decode.d0.loss_mask: 0.1889, decode.d0.loss_dice: 0.5225, decode.d1.loss_cls: 0.0684, decode.d1.loss_mask: 0.1819, decode.d1.loss_dice: 0.5080, decode.d2.loss_cls: 0.0632, decode.d2.loss_mask: 0.1815, decode.d2.loss_dice: 0.5032, decode.d3.loss_cls: 0.0581, decode.d3.loss_mask: 0.1807, decode.d3.loss_dice: 0.4963, decode.d4.loss_cls: 0.0626, decode.d4.loss_mask: 0.1809, decode.d4.loss_dice: 0.4990, decode.d5.loss_cls: 0.0557, decode.d5.loss_mask: 0.1809, decode.d5.loss_dice: 0.4985, decode.d6.loss_cls: 0.0604, decode.d6.loss_mask: 0.1811, decode.d6.loss_dice: 0.4985, decode.d7.loss_cls: 0.0551, decode.d7.loss_mask: 0.1809, decode.d7.loss_dice: 0.4986, decode.d8.loss_cls: 0.0553, decode.d8.loss_mask: 0.1809, decode.d8.loss_dice: 0.4993, loss: 7.6739 +2022-05-11 05:53:57,839 - mmseg - INFO - Iter [65550/80000] lr: 2.594e-07, eta: 8:57:49, time: 1.782, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0542, decode.loss_mask: 0.1830, decode.loss_dice: 0.5002, decode.d0.loss_cls: 0.3027, decode.d0.loss_mask: 0.1892, decode.d0.loss_dice: 0.5251, decode.d1.loss_cls: 0.0617, decode.d1.loss_mask: 0.1841, decode.d1.loss_dice: 0.5096, decode.d2.loss_cls: 0.0633, decode.d2.loss_mask: 0.1834, decode.d2.loss_dice: 0.5059, decode.d3.loss_cls: 0.0620, decode.d3.loss_mask: 0.1835, decode.d3.loss_dice: 0.5056, decode.d4.loss_cls: 0.0602, decode.d4.loss_mask: 0.1834, decode.d4.loss_dice: 0.5032, decode.d5.loss_cls: 0.0550, decode.d5.loss_mask: 0.1832, decode.d5.loss_dice: 0.5055, decode.d6.loss_cls: 0.0560, decode.d6.loss_mask: 0.1830, decode.d6.loss_dice: 0.5030, decode.d7.loss_cls: 0.0544, decode.d7.loss_mask: 0.1831, decode.d7.loss_dice: 0.5060, decode.d8.loss_cls: 0.0552, decode.d8.loss_mask: 0.1827, decode.d8.loss_dice: 0.5047, loss: 7.7318 +2022-05-11 05:55:27,516 - mmseg - INFO - Iter [65600/80000] lr: 2.585e-07, eta: 8:55:39, time: 1.794, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0517, decode.loss_mask: 0.1833, decode.loss_dice: 0.4976, decode.d0.loss_cls: 0.2875, decode.d0.loss_mask: 0.1889, decode.d0.loss_dice: 0.5200, decode.d1.loss_cls: 0.0578, decode.d1.loss_mask: 0.1852, decode.d1.loss_dice: 0.5057, decode.d2.loss_cls: 0.0640, decode.d2.loss_mask: 0.1841, decode.d2.loss_dice: 0.5040, decode.d3.loss_cls: 0.0596, decode.d3.loss_mask: 0.1834, decode.d3.loss_dice: 0.5011, decode.d4.loss_cls: 0.0605, decode.d4.loss_mask: 0.1833, decode.d4.loss_dice: 0.4996, decode.d5.loss_cls: 0.0578, decode.d5.loss_mask: 0.1838, decode.d5.loss_dice: 0.4989, decode.d6.loss_cls: 0.0566, decode.d6.loss_mask: 0.1831, decode.d6.loss_dice: 0.5016, decode.d7.loss_cls: 0.0605, decode.d7.loss_mask: 0.1831, decode.d7.loss_dice: 0.5041, decode.d8.loss_cls: 0.0614, decode.d8.loss_mask: 0.1833, decode.d8.loss_dice: 0.5011, loss: 7.6925 +2022-05-11 05:56:59,382 - mmseg - INFO - Iter [65650/80000] lr: 2.576e-07, eta: 8:53:31, time: 1.837, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0584, decode.loss_mask: 0.1851, decode.loss_dice: 0.5078, decode.d0.loss_cls: 0.3060, decode.d0.loss_mask: 0.1939, decode.d0.loss_dice: 0.5286, decode.d1.loss_cls: 0.0786, decode.d1.loss_mask: 0.1870, decode.d1.loss_dice: 0.5162, decode.d2.loss_cls: 0.0643, decode.d2.loss_mask: 0.1854, decode.d2.loss_dice: 0.5146, decode.d3.loss_cls: 0.0635, decode.d3.loss_mask: 0.1851, decode.d3.loss_dice: 0.5106, decode.d4.loss_cls: 0.0648, decode.d4.loss_mask: 0.1850, decode.d4.loss_dice: 0.5096, decode.d5.loss_cls: 0.0608, decode.d5.loss_mask: 0.1851, decode.d5.loss_dice: 0.5081, decode.d6.loss_cls: 0.0592, decode.d6.loss_mask: 0.1850, decode.d6.loss_dice: 0.5041, decode.d7.loss_cls: 0.0597, decode.d7.loss_mask: 0.1853, decode.d7.loss_dice: 0.5100, decode.d8.loss_cls: 0.0604, decode.d8.loss_mask: 0.1846, decode.d8.loss_dice: 0.5081, loss: 7.8548 +2022-05-11 05:58:32,984 - mmseg - INFO - Iter [65700/80000] lr: 2.567e-07, eta: 8:51:25, time: 1.868, data_time: 0.064, memory: 69063, decode.loss_cls: 0.0628, decode.loss_mask: 0.1831, decode.loss_dice: 0.5057, decode.d0.loss_cls: 0.3023, decode.d0.loss_mask: 0.1892, decode.d0.loss_dice: 0.5292, decode.d1.loss_cls: 0.0733, decode.d1.loss_mask: 0.1845, decode.d1.loss_dice: 0.5116, decode.d2.loss_cls: 0.0666, decode.d2.loss_mask: 0.1837, decode.d2.loss_dice: 0.5104, decode.d3.loss_cls: 0.0633, decode.d3.loss_mask: 0.1840, decode.d3.loss_dice: 0.5037, decode.d4.loss_cls: 0.0639, decode.d4.loss_mask: 0.1836, decode.d4.loss_dice: 0.5040, decode.d5.loss_cls: 0.0637, decode.d5.loss_mask: 0.1835, decode.d5.loss_dice: 0.5050, decode.d6.loss_cls: 0.0566, decode.d6.loss_mask: 0.1833, decode.d6.loss_dice: 0.5051, decode.d7.loss_cls: 0.0619, decode.d7.loss_mask: 0.1832, decode.d7.loss_dice: 0.5014, decode.d8.loss_cls: 0.0568, decode.d8.loss_mask: 0.1833, decode.d8.loss_dice: 0.5040, loss: 7.7925 +2022-05-11 06:00:05,792 - mmseg - INFO - Iter [65750/80000] lr: 2.558e-07, eta: 8:49:19, time: 1.860, data_time: 0.021, memory: 69063, decode.loss_cls: 0.0496, decode.loss_mask: 0.1888, decode.loss_dice: 0.5048, decode.d0.loss_cls: 0.2839, decode.d0.loss_mask: 0.1972, decode.d0.loss_dice: 0.5247, decode.d1.loss_cls: 0.0591, decode.d1.loss_mask: 0.1906, decode.d1.loss_dice: 0.5111, decode.d2.loss_cls: 0.0547, decode.d2.loss_mask: 0.1905, decode.d2.loss_dice: 0.5088, decode.d3.loss_cls: 0.0510, decode.d3.loss_mask: 0.1900, decode.d3.loss_dice: 0.5050, decode.d4.loss_cls: 0.0492, decode.d4.loss_mask: 0.1897, decode.d4.loss_dice: 0.5002, decode.d5.loss_cls: 0.0497, decode.d5.loss_mask: 0.1901, decode.d5.loss_dice: 0.5039, decode.d6.loss_cls: 0.0497, decode.d6.loss_mask: 0.1893, decode.d6.loss_dice: 0.5048, decode.d7.loss_cls: 0.0494, decode.d7.loss_mask: 0.1885, decode.d7.loss_dice: 0.4987, decode.d8.loss_cls: 0.0512, decode.d8.loss_mask: 0.1888, decode.d8.loss_dice: 0.5035, loss: 7.7163 +2022-05-11 06:01:35,865 - mmseg - INFO - Iter [65800/80000] lr: 2.549e-07, eta: 8:47:10, time: 1.801, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0498, decode.loss_mask: 0.1839, decode.loss_dice: 0.4882, decode.d0.loss_cls: 0.2909, decode.d0.loss_mask: 0.1904, decode.d0.loss_dice: 0.5150, decode.d1.loss_cls: 0.0705, decode.d1.loss_mask: 0.1855, decode.d1.loss_dice: 0.5003, decode.d2.loss_cls: 0.0615, decode.d2.loss_mask: 0.1848, decode.d2.loss_dice: 0.4957, decode.d3.loss_cls: 0.0535, decode.d3.loss_mask: 0.1837, decode.d3.loss_dice: 0.4904, decode.d4.loss_cls: 0.0558, decode.d4.loss_mask: 0.1838, decode.d4.loss_dice: 0.4928, decode.d5.loss_cls: 0.0544, decode.d5.loss_mask: 0.1839, decode.d5.loss_dice: 0.4926, decode.d6.loss_cls: 0.0433, decode.d6.loss_mask: 0.1835, decode.d6.loss_dice: 0.4910, decode.d7.loss_cls: 0.0573, decode.d7.loss_mask: 0.1848, decode.d7.loss_dice: 0.4917, decode.d8.loss_cls: 0.0515, decode.d8.loss_mask: 0.1845, decode.d8.loss_dice: 0.4921, loss: 7.5870 +2022-05-11 06:03:06,824 - mmseg - INFO - Iter [65850/80000] lr: 2.540e-07, eta: 8:45:03, time: 1.819, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0452, decode.loss_mask: 0.1820, decode.loss_dice: 0.4951, decode.d0.loss_cls: 0.3017, decode.d0.loss_mask: 0.1887, decode.d0.loss_dice: 0.5166, decode.d1.loss_cls: 0.0747, decode.d1.loss_mask: 0.1838, decode.d1.loss_dice: 0.5010, decode.d2.loss_cls: 0.0660, decode.d2.loss_mask: 0.1834, decode.d2.loss_dice: 0.5014, decode.d3.loss_cls: 0.0510, decode.d3.loss_mask: 0.1828, decode.d3.loss_dice: 0.4961, decode.d4.loss_cls: 0.0538, decode.d4.loss_mask: 0.1824, decode.d4.loss_dice: 0.4952, decode.d5.loss_cls: 0.0515, decode.d5.loss_mask: 0.1829, decode.d5.loss_dice: 0.4931, decode.d6.loss_cls: 0.0513, decode.d6.loss_mask: 0.1821, decode.d6.loss_dice: 0.4895, decode.d7.loss_cls: 0.0523, decode.d7.loss_mask: 0.1823, decode.d7.loss_dice: 0.4942, decode.d8.loss_cls: 0.0549, decode.d8.loss_mask: 0.1819, decode.d8.loss_dice: 0.4919, loss: 7.6085 +2022-05-11 06:04:41,277 - mmseg - INFO - Iter [65900/80000] lr: 2.531e-07, eta: 8:42:58, time: 1.889, data_time: 0.067, memory: 69063, decode.loss_cls: 0.0510, decode.loss_mask: 0.1785, decode.loss_dice: 0.5032, decode.d0.loss_cls: 0.2970, decode.d0.loss_mask: 0.1857, decode.d0.loss_dice: 0.5243, decode.d1.loss_cls: 0.0668, decode.d1.loss_mask: 0.1806, decode.d1.loss_dice: 0.5099, decode.d2.loss_cls: 0.0605, decode.d2.loss_mask: 0.1795, decode.d2.loss_dice: 0.5063, decode.d3.loss_cls: 0.0532, decode.d3.loss_mask: 0.1790, decode.d3.loss_dice: 0.5012, decode.d4.loss_cls: 0.0557, decode.d4.loss_mask: 0.1790, decode.d4.loss_dice: 0.5039, decode.d5.loss_cls: 0.0534, decode.d5.loss_mask: 0.1793, decode.d5.loss_dice: 0.5033, decode.d6.loss_cls: 0.0548, decode.d6.loss_mask: 0.1790, decode.d6.loss_dice: 0.5028, decode.d7.loss_cls: 0.0569, decode.d7.loss_mask: 0.1789, decode.d7.loss_dice: 0.5055, decode.d8.loss_cls: 0.0494, decode.d8.loss_mask: 0.1788, decode.d8.loss_dice: 0.5042, loss: 7.6614 +2022-05-11 06:06:11,005 - mmseg - INFO - Iter [65950/80000] lr: 2.522e-07, eta: 8:40:50, time: 1.795, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0449, decode.loss_mask: 0.1839, decode.loss_dice: 0.4903, decode.d0.loss_cls: 0.2880, decode.d0.loss_mask: 0.1897, decode.d0.loss_dice: 0.5091, decode.d1.loss_cls: 0.0614, decode.d1.loss_mask: 0.1853, decode.d1.loss_dice: 0.4988, decode.d2.loss_cls: 0.0517, decode.d2.loss_mask: 0.1843, decode.d2.loss_dice: 0.4961, decode.d3.loss_cls: 0.0515, decode.d3.loss_mask: 0.1833, decode.d3.loss_dice: 0.4898, decode.d4.loss_cls: 0.0501, decode.d4.loss_mask: 0.1836, decode.d4.loss_dice: 0.4924, decode.d5.loss_cls: 0.0466, decode.d5.loss_mask: 0.1840, decode.d5.loss_dice: 0.4882, decode.d6.loss_cls: 0.0476, decode.d6.loss_mask: 0.1838, decode.d6.loss_dice: 0.4887, decode.d7.loss_cls: 0.0481, decode.d7.loss_mask: 0.1839, decode.d7.loss_dice: 0.4900, decode.d8.loss_cls: 0.0510, decode.d8.loss_mask: 0.1838, decode.d8.loss_dice: 0.4892, loss: 7.5192 +2022-05-11 06:07:40,285 - mmseg - INFO - Saving checkpoint at 66000 iterations +2022-05-11 06:08:12,329 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 06:08:12,337 - mmseg - INFO - Iter [66000/80000] lr: 2.513e-07, eta: 8:39:07, time: 2.424, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0457, decode.loss_mask: 0.1788, decode.loss_dice: 0.4910, decode.d0.loss_cls: 0.2876, decode.d0.loss_mask: 0.1847, decode.d0.loss_dice: 0.5124, decode.d1.loss_cls: 0.0594, decode.d1.loss_mask: 0.1809, decode.d1.loss_dice: 0.4936, decode.d2.loss_cls: 0.0544, decode.d2.loss_mask: 0.1798, decode.d2.loss_dice: 0.4955, decode.d3.loss_cls: 0.0464, decode.d3.loss_mask: 0.1789, decode.d3.loss_dice: 0.4954, decode.d4.loss_cls: 0.0493, decode.d4.loss_mask: 0.1791, decode.d4.loss_dice: 0.4926, decode.d5.loss_cls: 0.0504, decode.d5.loss_mask: 0.1792, decode.d5.loss_dice: 0.4902, decode.d6.loss_cls: 0.0497, decode.d6.loss_mask: 0.1795, decode.d6.loss_dice: 0.4930, decode.d7.loss_cls: 0.0491, decode.d7.loss_mask: 0.1792, decode.d7.loss_dice: 0.4914, decode.d8.loss_cls: 0.0457, decode.d8.loss_mask: 0.1794, decode.d8.loss_dice: 0.4914, loss: 7.4834 +2022-05-11 06:10:07,576 - mmseg - INFO - per class results: +2022-05-11 06:10:07,582 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.63 | 99.28 | +| sidewalk | 88.73 | 93.62 | +| building | 94.35 | 96.95 | +| wall | 69.92 | 79.91 | +| fence | 74.46 | 81.01 | +| pole | 71.14 | 84.16 | +| traffic light | 76.97 | 88.05 | +| traffic sign | 84.03 | 90.61 | +| vegetation | 93.33 | 96.96 | +| terrain | 68.26 | 78.1 | +| sky | 95.74 | 98.43 | +| person | 86.73 | 93.74 | +| rider | 74.4 | 85.44 | +| car | 96.15 | 98.32 | +| truck | 82.53 | 94.88 | +| bus | 93.52 | 96.7 | +| train | 87.7 | 90.45 | +| motorcycle | 77.86 | 86.02 | +| bicycle | 82.68 | 91.75 | ++---------------+-------+-------+ +2022-05-11 06:10:07,582 - mmseg - INFO - Summary: +2022-05-11 06:10:07,583 - mmseg - INFO - ++------+-------+-------+ +| aAcc | mIoU | mAcc | ++------+-------+-------+ +| 97.0 | 84.06 | 90.76 | ++------+-------+-------+ +2022-05-11 06:10:07,585 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 06:10:07,586 - mmseg - INFO - Iter(val) [32] aAcc: 0.9700, mIoU: 0.8406, mAcc: 0.9076, IoU.road: 0.9863, IoU.sidewalk: 0.8873, IoU.building: 0.9435, IoU.wall: 0.6992, IoU.fence: 0.7446, IoU.pole: 0.7114, IoU.traffic light: 0.7697, IoU.traffic sign: 0.8403, IoU.vegetation: 0.9333, IoU.terrain: 0.6826, IoU.sky: 0.9574, IoU.person: 0.8673, IoU.rider: 0.7440, IoU.car: 0.9615, IoU.truck: 0.8253, IoU.bus: 0.9352, IoU.train: 0.8770, IoU.motorcycle: 0.7786, IoU.bicycle: 0.8268, Acc.road: 0.9928, Acc.sidewalk: 0.9362, Acc.building: 0.9695, Acc.wall: 0.7991, Acc.fence: 0.8101, Acc.pole: 0.8416, Acc.traffic light: 0.8805, Acc.traffic sign: 0.9061, Acc.vegetation: 0.9696, Acc.terrain: 0.7810, Acc.sky: 0.9843, Acc.person: 0.9374, Acc.rider: 0.8544, Acc.car: 0.9832, Acc.truck: 0.9488, Acc.bus: 0.9670, Acc.train: 0.9045, Acc.motorcycle: 0.8602, Acc.bicycle: 0.9175 +2022-05-11 06:11:41,052 - mmseg - INFO - Iter [66050/80000] lr: 2.504e-07, eta: 8:38:31, time: 4.177, data_time: 2.370, memory: 69063, decode.loss_cls: 0.0510, decode.loss_mask: 0.1795, decode.loss_dice: 0.4794, decode.d0.loss_cls: 0.2996, decode.d0.loss_mask: 0.1856, decode.d0.loss_dice: 0.5024, decode.d1.loss_cls: 0.0616, decode.d1.loss_mask: 0.1814, decode.d1.loss_dice: 0.4892, decode.d2.loss_cls: 0.0576, decode.d2.loss_mask: 0.1804, decode.d2.loss_dice: 0.4811, decode.d3.loss_cls: 0.0523, decode.d3.loss_mask: 0.1797, decode.d3.loss_dice: 0.4850, decode.d4.loss_cls: 0.0489, decode.d4.loss_mask: 0.1797, decode.d4.loss_dice: 0.4843, decode.d5.loss_cls: 0.0463, decode.d5.loss_mask: 0.1796, decode.d5.loss_dice: 0.4832, decode.d6.loss_cls: 0.0507, decode.d6.loss_mask: 0.1797, decode.d6.loss_dice: 0.4844, decode.d7.loss_cls: 0.0551, decode.d7.loss_mask: 0.1798, decode.d7.loss_dice: 0.4836, decode.d8.loss_cls: 0.0500, decode.d8.loss_mask: 0.1796, decode.d8.loss_dice: 0.4786, loss: 7.4292 +2022-05-11 06:13:10,638 - mmseg - INFO - Iter [66100/80000] lr: 2.495e-07, eta: 8:36:23, time: 1.792, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0508, decode.loss_mask: 0.1801, decode.loss_dice: 0.4875, decode.d0.loss_cls: 0.2886, decode.d0.loss_mask: 0.1868, decode.d0.loss_dice: 0.5045, decode.d1.loss_cls: 0.0546, decode.d1.loss_mask: 0.1822, decode.d1.loss_dice: 0.4975, decode.d2.loss_cls: 0.0553, decode.d2.loss_mask: 0.1811, decode.d2.loss_dice: 0.4909, decode.d3.loss_cls: 0.0520, decode.d3.loss_mask: 0.1801, decode.d3.loss_dice: 0.4851, decode.d4.loss_cls: 0.0549, decode.d4.loss_mask: 0.1805, decode.d4.loss_dice: 0.4910, decode.d5.loss_cls: 0.0470, decode.d5.loss_mask: 0.1804, decode.d5.loss_dice: 0.4908, decode.d6.loss_cls: 0.0490, decode.d6.loss_mask: 0.1808, decode.d6.loss_dice: 0.4876, decode.d7.loss_cls: 0.0492, decode.d7.loss_mask: 0.1804, decode.d7.loss_dice: 0.4861, decode.d8.loss_cls: 0.0465, decode.d8.loss_mask: 0.1805, decode.d8.loss_dice: 0.4888, loss: 7.4706 +2022-05-11 06:14:41,317 - mmseg - INFO - Iter [66150/80000] lr: 2.486e-07, eta: 8:34:15, time: 1.814, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0507, decode.loss_mask: 0.1793, decode.loss_dice: 0.4989, decode.d0.loss_cls: 0.2890, decode.d0.loss_mask: 0.1850, decode.d0.loss_dice: 0.5189, decode.d1.loss_cls: 0.0707, decode.d1.loss_mask: 0.1800, decode.d1.loss_dice: 0.5050, decode.d2.loss_cls: 0.0620, decode.d2.loss_mask: 0.1801, decode.d2.loss_dice: 0.5057, decode.d3.loss_cls: 0.0532, decode.d3.loss_mask: 0.1790, decode.d3.loss_dice: 0.5007, decode.d4.loss_cls: 0.0563, decode.d4.loss_mask: 0.1791, decode.d4.loss_dice: 0.4982, decode.d5.loss_cls: 0.0557, decode.d5.loss_mask: 0.1797, decode.d5.loss_dice: 0.5001, decode.d6.loss_cls: 0.0585, decode.d6.loss_mask: 0.1791, decode.d6.loss_dice: 0.5019, decode.d7.loss_cls: 0.0544, decode.d7.loss_mask: 0.1791, decode.d7.loss_dice: 0.4985, decode.d8.loss_cls: 0.0537, decode.d8.loss_mask: 0.1789, decode.d8.loss_dice: 0.4999, loss: 7.6314 +2022-05-11 06:16:11,813 - mmseg - INFO - Iter [66200/80000] lr: 2.477e-07, eta: 8:32:08, time: 1.810, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0508, decode.loss_mask: 0.1832, decode.loss_dice: 0.5003, decode.d0.loss_cls: 0.2896, decode.d0.loss_mask: 0.1904, decode.d0.loss_dice: 0.5265, decode.d1.loss_cls: 0.0696, decode.d1.loss_mask: 0.1852, decode.d1.loss_dice: 0.5095, decode.d2.loss_cls: 0.0609, decode.d2.loss_mask: 0.1847, decode.d2.loss_dice: 0.5040, decode.d3.loss_cls: 0.0555, decode.d3.loss_mask: 0.1849, decode.d3.loss_dice: 0.5026, decode.d4.loss_cls: 0.0561, decode.d4.loss_mask: 0.1838, decode.d4.loss_dice: 0.4987, decode.d5.loss_cls: 0.0536, decode.d5.loss_mask: 0.1843, decode.d5.loss_dice: 0.5028, decode.d6.loss_cls: 0.0508, decode.d6.loss_mask: 0.1835, decode.d6.loss_dice: 0.5006, decode.d7.loss_cls: 0.0502, decode.d7.loss_mask: 0.1837, decode.d7.loss_dice: 0.5015, decode.d8.loss_cls: 0.0489, decode.d8.loss_mask: 0.1833, decode.d8.loss_dice: 0.5047, loss: 7.6843 +2022-05-11 06:17:44,047 - mmseg - INFO - Iter [66250/80000] lr: 2.468e-07, eta: 8:30:02, time: 1.845, data_time: 0.063, memory: 69063, decode.loss_cls: 0.0525, decode.loss_mask: 0.1862, decode.loss_dice: 0.4958, decode.d0.loss_cls: 0.2999, decode.d0.loss_mask: 0.1950, decode.d0.loss_dice: 0.5185, decode.d1.loss_cls: 0.0643, decode.d1.loss_mask: 0.1879, decode.d1.loss_dice: 0.5019, decode.d2.loss_cls: 0.0613, decode.d2.loss_mask: 0.1877, decode.d2.loss_dice: 0.4989, decode.d3.loss_cls: 0.0523, decode.d3.loss_mask: 0.1870, decode.d3.loss_dice: 0.5014, decode.d4.loss_cls: 0.0532, decode.d4.loss_mask: 0.1864, decode.d4.loss_dice: 0.4958, decode.d5.loss_cls: 0.0539, decode.d5.loss_mask: 0.1869, decode.d5.loss_dice: 0.4943, decode.d6.loss_cls: 0.0516, decode.d6.loss_mask: 0.1872, decode.d6.loss_dice: 0.4985, decode.d7.loss_cls: 0.0487, decode.d7.loss_mask: 0.1870, decode.d7.loss_dice: 0.4973, decode.d8.loss_cls: 0.0508, decode.d8.loss_mask: 0.1867, decode.d8.loss_dice: 0.4944, loss: 7.6635 +2022-05-11 06:19:13,460 - mmseg - INFO - Iter [66300/80000] lr: 2.459e-07, eta: 8:27:55, time: 1.788, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0529, decode.loss_mask: 0.1791, decode.loss_dice: 0.5037, decode.d0.loss_cls: 0.2977, decode.d0.loss_mask: 0.1845, decode.d0.loss_dice: 0.5245, decode.d1.loss_cls: 0.0725, decode.d1.loss_mask: 0.1800, decode.d1.loss_dice: 0.5094, decode.d2.loss_cls: 0.0626, decode.d2.loss_mask: 0.1801, decode.d2.loss_dice: 0.5072, decode.d3.loss_cls: 0.0574, decode.d3.loss_mask: 0.1795, decode.d3.loss_dice: 0.5052, decode.d4.loss_cls: 0.0655, decode.d4.loss_mask: 0.1797, decode.d4.loss_dice: 0.5016, decode.d5.loss_cls: 0.0593, decode.d5.loss_mask: 0.1798, decode.d5.loss_dice: 0.5040, decode.d6.loss_cls: 0.0544, decode.d6.loss_mask: 0.1798, decode.d6.loss_dice: 0.4994, decode.d7.loss_cls: 0.0579, decode.d7.loss_mask: 0.1793, decode.d7.loss_dice: 0.5011, decode.d8.loss_cls: 0.0527, decode.d8.loss_mask: 0.1792, decode.d8.loss_dice: 0.5010, loss: 7.6908 +2022-05-11 06:20:44,393 - mmseg - INFO - Iter [66350/80000] lr: 2.450e-07, eta: 8:25:48, time: 1.819, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0411, decode.loss_mask: 0.1849, decode.loss_dice: 0.4923, decode.d0.loss_cls: 0.2939, decode.d0.loss_mask: 0.1920, decode.d0.loss_dice: 0.5148, decode.d1.loss_cls: 0.0563, decode.d1.loss_mask: 0.1870, decode.d1.loss_dice: 0.4993, decode.d2.loss_cls: 0.0493, decode.d2.loss_mask: 0.1863, decode.d2.loss_dice: 0.4959, decode.d3.loss_cls: 0.0471, decode.d3.loss_mask: 0.1854, decode.d3.loss_dice: 0.4938, decode.d4.loss_cls: 0.0442, decode.d4.loss_mask: 0.1849, decode.d4.loss_dice: 0.4902, decode.d5.loss_cls: 0.0395, decode.d5.loss_mask: 0.1850, decode.d5.loss_dice: 0.4957, decode.d6.loss_cls: 0.0406, decode.d6.loss_mask: 0.1848, decode.d6.loss_dice: 0.4948, decode.d7.loss_cls: 0.0466, decode.d7.loss_mask: 0.1850, decode.d7.loss_dice: 0.4934, decode.d8.loss_cls: 0.0464, decode.d8.loss_mask: 0.1844, decode.d8.loss_dice: 0.4949, loss: 7.5299 +2022-05-11 06:22:14,398 - mmseg - INFO - Iter [66400/80000] lr: 2.441e-07, eta: 8:23:41, time: 1.800, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0477, decode.loss_mask: 0.1816, decode.loss_dice: 0.4845, decode.d0.loss_cls: 0.2884, decode.d0.loss_mask: 0.1876, decode.d0.loss_dice: 0.5030, decode.d1.loss_cls: 0.0681, decode.d1.loss_mask: 0.1826, decode.d1.loss_dice: 0.4944, decode.d2.loss_cls: 0.0553, decode.d2.loss_mask: 0.1823, decode.d2.loss_dice: 0.4895, decode.d3.loss_cls: 0.0532, decode.d3.loss_mask: 0.1811, decode.d3.loss_dice: 0.4878, decode.d4.loss_cls: 0.0501, decode.d4.loss_mask: 0.1811, decode.d4.loss_dice: 0.4883, decode.d5.loss_cls: 0.0489, decode.d5.loss_mask: 0.1814, decode.d5.loss_dice: 0.4901, decode.d6.loss_cls: 0.0462, decode.d6.loss_mask: 0.1808, decode.d6.loss_dice: 0.4848, decode.d7.loss_cls: 0.0499, decode.d7.loss_mask: 0.1814, decode.d7.loss_dice: 0.4886, decode.d8.loss_cls: 0.0537, decode.d8.loss_mask: 0.1817, decode.d8.loss_dice: 0.4891, loss: 7.4833 +2022-05-11 06:23:46,936 - mmseg - INFO - Iter [66450/80000] lr: 2.432e-07, eta: 8:21:37, time: 1.851, data_time: 0.065, memory: 69063, decode.loss_cls: 0.0521, decode.loss_mask: 0.1816, decode.loss_dice: 0.4924, decode.d0.loss_cls: 0.2986, decode.d0.loss_mask: 0.1871, decode.d0.loss_dice: 0.5171, decode.d1.loss_cls: 0.0663, decode.d1.loss_mask: 0.1831, decode.d1.loss_dice: 0.5001, decode.d2.loss_cls: 0.0606, decode.d2.loss_mask: 0.1829, decode.d2.loss_dice: 0.4958, decode.d3.loss_cls: 0.0582, decode.d3.loss_mask: 0.1822, decode.d3.loss_dice: 0.4950, decode.d4.loss_cls: 0.0543, decode.d4.loss_mask: 0.1822, decode.d4.loss_dice: 0.4913, decode.d5.loss_cls: 0.0594, decode.d5.loss_mask: 0.1819, decode.d5.loss_dice: 0.4922, decode.d6.loss_cls: 0.0542, decode.d6.loss_mask: 0.1818, decode.d6.loss_dice: 0.4933, decode.d7.loss_cls: 0.0530, decode.d7.loss_mask: 0.1818, decode.d7.loss_dice: 0.4902, decode.d8.loss_cls: 0.0531, decode.d8.loss_mask: 0.1821, decode.d8.loss_dice: 0.4940, loss: 7.5980 +2022-05-11 06:25:16,306 - mmseg - INFO - Iter [66500/80000] lr: 2.423e-07, eta: 8:19:30, time: 1.787, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0454, decode.loss_mask: 0.1832, decode.loss_dice: 0.4864, decode.d0.loss_cls: 0.2861, decode.d0.loss_mask: 0.1893, decode.d0.loss_dice: 0.5077, decode.d1.loss_cls: 0.0557, decode.d1.loss_mask: 0.1847, decode.d1.loss_dice: 0.4970, decode.d2.loss_cls: 0.0509, decode.d2.loss_mask: 0.1844, decode.d2.loss_dice: 0.4928, decode.d3.loss_cls: 0.0477, decode.d3.loss_mask: 0.1842, decode.d3.loss_dice: 0.4913, decode.d4.loss_cls: 0.0473, decode.d4.loss_mask: 0.1843, decode.d4.loss_dice: 0.4883, decode.d5.loss_cls: 0.0480, decode.d5.loss_mask: 0.1843, decode.d5.loss_dice: 0.4869, decode.d6.loss_cls: 0.0447, decode.d6.loss_mask: 0.1839, decode.d6.loss_dice: 0.4894, decode.d7.loss_cls: 0.0507, decode.d7.loss_mask: 0.1840, decode.d7.loss_dice: 0.4918, decode.d8.loss_cls: 0.0450, decode.d8.loss_mask: 0.1835, decode.d8.loss_dice: 0.4867, loss: 7.4854 +2022-05-11 06:26:45,525 - mmseg - INFO - Iter [66550/80000] lr: 2.414e-07, eta: 8:17:23, time: 1.785, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0493, decode.loss_mask: 0.1853, decode.loss_dice: 0.4928, decode.d0.loss_cls: 0.2959, decode.d0.loss_mask: 0.1922, decode.d0.loss_dice: 0.5136, decode.d1.loss_cls: 0.0576, decode.d1.loss_mask: 0.1871, decode.d1.loss_dice: 0.5024, decode.d2.loss_cls: 0.0570, decode.d2.loss_mask: 0.1860, decode.d2.loss_dice: 0.4961, decode.d3.loss_cls: 0.0538, decode.d3.loss_mask: 0.1858, decode.d3.loss_dice: 0.4983, decode.d4.loss_cls: 0.0549, decode.d4.loss_mask: 0.1858, decode.d4.loss_dice: 0.4920, decode.d5.loss_cls: 0.0555, decode.d5.loss_mask: 0.1857, decode.d5.loss_dice: 0.4960, decode.d6.loss_cls: 0.0517, decode.d6.loss_mask: 0.1855, decode.d6.loss_dice: 0.4953, decode.d7.loss_cls: 0.0565, decode.d7.loss_mask: 0.1855, decode.d7.loss_dice: 0.4947, decode.d8.loss_cls: 0.0563, decode.d8.loss_mask: 0.1853, decode.d8.loss_dice: 0.4918, loss: 7.6259 +2022-05-11 06:28:17,258 - mmseg - INFO - Iter [66600/80000] lr: 2.405e-07, eta: 8:15:18, time: 1.835, data_time: 0.062, memory: 69063, decode.loss_cls: 0.0521, decode.loss_mask: 0.1843, decode.loss_dice: 0.4953, decode.d0.loss_cls: 0.2869, decode.d0.loss_mask: 0.1914, decode.d0.loss_dice: 0.5175, decode.d1.loss_cls: 0.0640, decode.d1.loss_mask: 0.1871, decode.d1.loss_dice: 0.5059, decode.d2.loss_cls: 0.0552, decode.d2.loss_mask: 0.1852, decode.d2.loss_dice: 0.4977, decode.d3.loss_cls: 0.0535, decode.d3.loss_mask: 0.1846, decode.d3.loss_dice: 0.4948, decode.d4.loss_cls: 0.0553, decode.d4.loss_mask: 0.1855, decode.d4.loss_dice: 0.4963, decode.d5.loss_cls: 0.0515, decode.d5.loss_mask: 0.1850, decode.d5.loss_dice: 0.4960, decode.d6.loss_cls: 0.0446, decode.d6.loss_mask: 0.1846, decode.d6.loss_dice: 0.4969, decode.d7.loss_cls: 0.0500, decode.d7.loss_mask: 0.1845, decode.d7.loss_dice: 0.4963, decode.d8.loss_cls: 0.0516, decode.d8.loss_mask: 0.1841, decode.d8.loss_dice: 0.4953, loss: 7.6133 +2022-05-11 06:29:47,893 - mmseg - INFO - Iter [66650/80000] lr: 2.396e-07, eta: 8:13:13, time: 1.813, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0464, decode.loss_mask: 0.1815, decode.loss_dice: 0.4930, decode.d0.loss_cls: 0.2984, decode.d0.loss_mask: 0.1868, decode.d0.loss_dice: 0.5151, decode.d1.loss_cls: 0.0615, decode.d1.loss_mask: 0.1826, decode.d1.loss_dice: 0.4990, decode.d2.loss_cls: 0.0541, decode.d2.loss_mask: 0.1822, decode.d2.loss_dice: 0.4975, decode.d3.loss_cls: 0.0480, decode.d3.loss_mask: 0.1815, decode.d3.loss_dice: 0.4937, decode.d4.loss_cls: 0.0470, decode.d4.loss_mask: 0.1815, decode.d4.loss_dice: 0.4935, decode.d5.loss_cls: 0.0504, decode.d5.loss_mask: 0.1815, decode.d5.loss_dice: 0.4929, decode.d6.loss_cls: 0.0481, decode.d6.loss_mask: 0.1820, decode.d6.loss_dice: 0.4935, decode.d7.loss_cls: 0.0451, decode.d7.loss_mask: 0.1817, decode.d7.loss_dice: 0.4909, decode.d8.loss_cls: 0.0522, decode.d8.loss_mask: 0.1817, decode.d8.loss_dice: 0.4953, loss: 7.5387 +2022-05-11 06:31:17,503 - mmseg - INFO - Iter [66700/80000] lr: 2.387e-07, eta: 8:11:07, time: 1.792, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0494, decode.loss_mask: 0.1766, decode.loss_dice: 0.4872, decode.d0.loss_cls: 0.2929, decode.d0.loss_mask: 0.1829, decode.d0.loss_dice: 0.5112, decode.d1.loss_cls: 0.0716, decode.d1.loss_mask: 0.1777, decode.d1.loss_dice: 0.4967, decode.d2.loss_cls: 0.0649, decode.d2.loss_mask: 0.1772, decode.d2.loss_dice: 0.4929, decode.d3.loss_cls: 0.0564, decode.d3.loss_mask: 0.1766, decode.d3.loss_dice: 0.4882, decode.d4.loss_cls: 0.0611, decode.d4.loss_mask: 0.1769, decode.d4.loss_dice: 0.4916, decode.d5.loss_cls: 0.0549, decode.d5.loss_mask: 0.1769, decode.d5.loss_dice: 0.4893, decode.d6.loss_cls: 0.0491, decode.d6.loss_mask: 0.1768, decode.d6.loss_dice: 0.4929, decode.d7.loss_cls: 0.0509, decode.d7.loss_mask: 0.1769, decode.d7.loss_dice: 0.4908, decode.d8.loss_cls: 0.0517, decode.d8.loss_mask: 0.1763, decode.d8.loss_dice: 0.4920, loss: 7.5103 +2022-05-11 06:32:46,313 - mmseg - INFO - Iter [66750/80000] lr: 2.378e-07, eta: 8:09:01, time: 1.775, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0500, decode.loss_mask: 0.1880, decode.loss_dice: 0.4955, decode.d0.loss_cls: 0.2948, decode.d0.loss_mask: 0.1952, decode.d0.loss_dice: 0.5160, decode.d1.loss_cls: 0.0666, decode.d1.loss_mask: 0.1907, decode.d1.loss_dice: 0.5023, decode.d2.loss_cls: 0.0561, decode.d2.loss_mask: 0.1897, decode.d2.loss_dice: 0.4988, decode.d3.loss_cls: 0.0567, decode.d3.loss_mask: 0.1892, decode.d3.loss_dice: 0.4983, decode.d4.loss_cls: 0.0558, decode.d4.loss_mask: 0.1892, decode.d4.loss_dice: 0.4967, decode.d5.loss_cls: 0.0486, decode.d5.loss_mask: 0.1891, decode.d5.loss_dice: 0.4960, decode.d6.loss_cls: 0.0485, decode.d6.loss_mask: 0.1882, decode.d6.loss_dice: 0.4968, decode.d7.loss_cls: 0.0495, decode.d7.loss_mask: 0.1885, decode.d7.loss_dice: 0.4959, decode.d8.loss_cls: 0.0475, decode.d8.loss_mask: 0.1883, decode.d8.loss_dice: 0.4942, loss: 7.6609 +2022-05-11 06:34:19,218 - mmseg - INFO - Iter [66800/80000] lr: 2.369e-07, eta: 8:06:57, time: 1.860, data_time: 0.066, memory: 69063, decode.loss_cls: 0.0573, decode.loss_mask: 0.1848, decode.loss_dice: 0.5001, decode.d0.loss_cls: 0.2942, decode.d0.loss_mask: 0.1911, decode.d0.loss_dice: 0.5244, decode.d1.loss_cls: 0.0830, decode.d1.loss_mask: 0.1860, decode.d1.loss_dice: 0.5085, decode.d2.loss_cls: 0.0635, decode.d2.loss_mask: 0.1855, decode.d2.loss_dice: 0.5054, decode.d3.loss_cls: 0.0649, decode.d3.loss_mask: 0.1849, decode.d3.loss_dice: 0.5051, decode.d4.loss_cls: 0.0634, decode.d4.loss_mask: 0.1848, decode.d4.loss_dice: 0.5037, decode.d5.loss_cls: 0.0618, decode.d5.loss_mask: 0.1851, decode.d5.loss_dice: 0.5056, decode.d6.loss_cls: 0.0657, decode.d6.loss_mask: 0.1846, decode.d6.loss_dice: 0.5049, decode.d7.loss_cls: 0.0625, decode.d7.loss_mask: 0.1847, decode.d7.loss_dice: 0.5040, decode.d8.loss_cls: 0.0593, decode.d8.loss_mask: 0.1848, decode.d8.loss_dice: 0.5054, loss: 7.7990 +2022-05-11 06:35:48,341 - mmseg - INFO - Iter [66850/80000] lr: 2.360e-07, eta: 8:04:52, time: 1.783, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0463, decode.loss_mask: 0.1837, decode.loss_dice: 0.4866, decode.d0.loss_cls: 0.2916, decode.d0.loss_mask: 0.1894, decode.d0.loss_dice: 0.5021, decode.d1.loss_cls: 0.0548, decode.d1.loss_mask: 0.1852, decode.d1.loss_dice: 0.4930, decode.d2.loss_cls: 0.0537, decode.d2.loss_mask: 0.1840, decode.d2.loss_dice: 0.4893, decode.d3.loss_cls: 0.0494, decode.d3.loss_mask: 0.1836, decode.d3.loss_dice: 0.4889, decode.d4.loss_cls: 0.0464, decode.d4.loss_mask: 0.1837, decode.d4.loss_dice: 0.4841, decode.d5.loss_cls: 0.0462, decode.d5.loss_mask: 0.1839, decode.d5.loss_dice: 0.4890, decode.d6.loss_cls: 0.0471, decode.d6.loss_mask: 0.1832, decode.d6.loss_dice: 0.4851, decode.d7.loss_cls: 0.0460, decode.d7.loss_mask: 0.1833, decode.d7.loss_dice: 0.4877, decode.d8.loss_cls: 0.0468, decode.d8.loss_mask: 0.1835, decode.d8.loss_dice: 0.4856, loss: 7.4632 +2022-05-11 06:37:18,694 - mmseg - INFO - Iter [66900/80000] lr: 2.351e-07, eta: 8:02:47, time: 1.807, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0442, decode.loss_mask: 0.1848, decode.loss_dice: 0.4892, decode.d0.loss_cls: 0.2998, decode.d0.loss_mask: 0.1913, decode.d0.loss_dice: 0.5108, decode.d1.loss_cls: 0.0652, decode.d1.loss_mask: 0.1867, decode.d1.loss_dice: 0.4978, decode.d2.loss_cls: 0.0566, decode.d2.loss_mask: 0.1856, decode.d2.loss_dice: 0.4949, decode.d3.loss_cls: 0.0547, decode.d3.loss_mask: 0.1853, decode.d3.loss_dice: 0.4913, decode.d4.loss_cls: 0.0579, decode.d4.loss_mask: 0.1852, decode.d4.loss_dice: 0.4900, decode.d5.loss_cls: 0.0527, decode.d5.loss_mask: 0.1849, decode.d5.loss_dice: 0.4914, decode.d6.loss_cls: 0.0555, decode.d6.loss_mask: 0.1849, decode.d6.loss_dice: 0.4895, decode.d7.loss_cls: 0.0458, decode.d7.loss_mask: 0.1849, decode.d7.loss_dice: 0.4879, decode.d8.loss_cls: 0.0509, decode.d8.loss_mask: 0.1847, decode.d8.loss_dice: 0.4883, loss: 7.5724 +2022-05-11 06:38:47,005 - mmseg - INFO - Iter [66950/80000] lr: 2.342e-07, eta: 8:00:41, time: 1.766, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0427, decode.loss_mask: 0.1828, decode.loss_dice: 0.4845, decode.d0.loss_cls: 0.2994, decode.d0.loss_mask: 0.1898, decode.d0.loss_dice: 0.5080, decode.d1.loss_cls: 0.0544, decode.d1.loss_mask: 0.1854, decode.d1.loss_dice: 0.4934, decode.d2.loss_cls: 0.0557, decode.d2.loss_mask: 0.1839, decode.d2.loss_dice: 0.4929, decode.d3.loss_cls: 0.0446, decode.d3.loss_mask: 0.1836, decode.d3.loss_dice: 0.4880, decode.d4.loss_cls: 0.0459, decode.d4.loss_mask: 0.1842, decode.d4.loss_dice: 0.4883, decode.d5.loss_cls: 0.0440, decode.d5.loss_mask: 0.1834, decode.d5.loss_dice: 0.4873, decode.d6.loss_cls: 0.0472, decode.d6.loss_mask: 0.1830, decode.d6.loss_dice: 0.4859, decode.d7.loss_cls: 0.0411, decode.d7.loss_mask: 0.1831, decode.d7.loss_dice: 0.4871, decode.d8.loss_cls: 0.0384, decode.d8.loss_mask: 0.1826, decode.d8.loss_dice: 0.4874, loss: 7.4580 +2022-05-11 06:40:20,026 - mmseg - INFO - Saving checkpoint at 67000 iterations +2022-05-11 06:40:54,032 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 06:40:54,039 - mmseg - INFO - Iter [67000/80000] lr: 2.333e-07, eta: 7:59:02, time: 2.538, data_time: 0.064, memory: 69063, decode.loss_cls: 0.0433, decode.loss_mask: 0.1859, decode.loss_dice: 0.4888, decode.d0.loss_cls: 0.2870, decode.d0.loss_mask: 0.1930, decode.d0.loss_dice: 0.5117, decode.d1.loss_cls: 0.0625, decode.d1.loss_mask: 0.1874, decode.d1.loss_dice: 0.4968, decode.d2.loss_cls: 0.0549, decode.d2.loss_mask: 0.1862, decode.d2.loss_dice: 0.4976, decode.d3.loss_cls: 0.0501, decode.d3.loss_mask: 0.1862, decode.d3.loss_dice: 0.4940, decode.d4.loss_cls: 0.0498, decode.d4.loss_mask: 0.1862, decode.d4.loss_dice: 0.4898, decode.d5.loss_cls: 0.0488, decode.d5.loss_mask: 0.1865, decode.d5.loss_dice: 0.4929, decode.d6.loss_cls: 0.0484, decode.d6.loss_mask: 0.1859, decode.d6.loss_dice: 0.4925, decode.d7.loss_cls: 0.0488, decode.d7.loss_mask: 0.1856, decode.d7.loss_dice: 0.4905, decode.d8.loss_cls: 0.0467, decode.d8.loss_mask: 0.1858, decode.d8.loss_dice: 0.4891, loss: 7.5528 +2022-05-11 06:42:49,765 - mmseg - INFO - per class results: +2022-05-11 06:42:49,774 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.64 | 99.26 | +| sidewalk | 88.86 | 93.95 | +| building | 94.21 | 97.04 | +| wall | 69.48 | 79.95 | +| fence | 74.39 | 81.3 | +| pole | 71.34 | 84.02 | +| traffic light | 76.96 | 86.66 | +| traffic sign | 84.04 | 89.98 | +| vegetation | 93.4 | 96.94 | +| terrain | 68.83 | 77.82 | +| sky | 95.78 | 98.43 | +| person | 86.76 | 93.75 | +| rider | 74.25 | 85.19 | +| car | 96.15 | 98.28 | +| truck | 92.17 | 94.62 | +| bus | 93.53 | 96.66 | +| train | 87.92 | 90.71 | +| motorcycle | 77.45 | 87.19 | +| bicycle | 82.67 | 91.69 | ++---------------+-------+-------+ +2022-05-11 06:42:49,774 - mmseg - INFO - Summary: +2022-05-11 06:42:49,774 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 97.02 | 84.57 | 90.71 | ++-------+-------+-------+ +2022-05-11 06:42:49,777 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 06:42:49,777 - mmseg - INFO - Iter(val) [32] aAcc: 0.9702, mIoU: 0.8457, mAcc: 0.9071, IoU.road: 0.9864, IoU.sidewalk: 0.8886, IoU.building: 0.9421, IoU.wall: 0.6948, IoU.fence: 0.7439, IoU.pole: 0.7134, IoU.traffic light: 0.7696, IoU.traffic sign: 0.8404, IoU.vegetation: 0.9340, IoU.terrain: 0.6883, IoU.sky: 0.9578, IoU.person: 0.8676, IoU.rider: 0.7425, IoU.car: 0.9615, IoU.truck: 0.9217, IoU.bus: 0.9353, IoU.train: 0.8792, IoU.motorcycle: 0.7745, IoU.bicycle: 0.8267, Acc.road: 0.9926, Acc.sidewalk: 0.9395, Acc.building: 0.9704, Acc.wall: 0.7995, Acc.fence: 0.8130, Acc.pole: 0.8402, Acc.traffic light: 0.8666, Acc.traffic sign: 0.8998, Acc.vegetation: 0.9694, Acc.terrain: 0.7782, Acc.sky: 0.9843, Acc.person: 0.9375, Acc.rider: 0.8519, Acc.car: 0.9828, Acc.truck: 0.9462, Acc.bus: 0.9666, Acc.train: 0.9071, Acc.motorcycle: 0.8719, Acc.bicycle: 0.9169 +2022-05-11 06:44:18,302 - mmseg - INFO - Iter [67050/80000] lr: 2.324e-07, eta: 7:58:15, time: 4.088, data_time: 2.333, memory: 69063, decode.loss_cls: 0.0534, decode.loss_mask: 0.1756, decode.loss_dice: 0.4919, decode.d0.loss_cls: 0.3024, decode.d0.loss_mask: 0.1819, decode.d0.loss_dice: 0.5166, decode.d1.loss_cls: 0.0759, decode.d1.loss_mask: 0.1774, decode.d1.loss_dice: 0.5036, decode.d2.loss_cls: 0.0627, decode.d2.loss_mask: 0.1761, decode.d2.loss_dice: 0.4952, decode.d3.loss_cls: 0.0578, decode.d3.loss_mask: 0.1758, decode.d3.loss_dice: 0.4954, decode.d4.loss_cls: 0.0552, decode.d4.loss_mask: 0.1759, decode.d4.loss_dice: 0.4880, decode.d5.loss_cls: 0.0537, decode.d5.loss_mask: 0.1763, decode.d5.loss_dice: 0.4985, decode.d6.loss_cls: 0.0551, decode.d6.loss_mask: 0.1754, decode.d6.loss_dice: 0.4911, decode.d7.loss_cls: 0.0562, decode.d7.loss_mask: 0.1756, decode.d7.loss_dice: 0.4945, decode.d8.loss_cls: 0.0524, decode.d8.loss_mask: 0.1755, decode.d8.loss_dice: 0.4933, loss: 7.5585 +2022-05-11 06:45:47,258 - mmseg - INFO - Iter [67100/80000] lr: 2.315e-07, eta: 7:56:09, time: 1.776, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0494, decode.loss_mask: 0.1820, decode.loss_dice: 0.5074, decode.d0.loss_cls: 0.2972, decode.d0.loss_mask: 0.1888, decode.d0.loss_dice: 0.5277, decode.d1.loss_cls: 0.0705, decode.d1.loss_mask: 0.1831, decode.d1.loss_dice: 0.5150, decode.d2.loss_cls: 0.0586, decode.d2.loss_mask: 0.1827, decode.d2.loss_dice: 0.5077, decode.d3.loss_cls: 0.0558, decode.d3.loss_mask: 0.1823, decode.d3.loss_dice: 0.5075, decode.d4.loss_cls: 0.0581, decode.d4.loss_mask: 0.1825, decode.d4.loss_dice: 0.5080, decode.d5.loss_cls: 0.0543, decode.d5.loss_mask: 0.1825, decode.d5.loss_dice: 0.5054, decode.d6.loss_cls: 0.0505, decode.d6.loss_mask: 0.1824, decode.d6.loss_dice: 0.5033, decode.d7.loss_cls: 0.0548, decode.d7.loss_mask: 0.1828, decode.d7.loss_dice: 0.5048, decode.d8.loss_cls: 0.0544, decode.d8.loss_mask: 0.1824, decode.d8.loss_dice: 0.5048, loss: 7.7267 +2022-05-11 06:47:17,713 - mmseg - INFO - Iter [67150/80000] lr: 2.306e-07, eta: 7:54:05, time: 1.811, data_time: 0.020, memory: 69063, decode.loss_cls: 0.0460, decode.loss_mask: 0.1818, decode.loss_dice: 0.4931, decode.d0.loss_cls: 0.2895, decode.d0.loss_mask: 0.1891, decode.d0.loss_dice: 0.5175, decode.d1.loss_cls: 0.0561, decode.d1.loss_mask: 0.1835, decode.d1.loss_dice: 0.4988, decode.d2.loss_cls: 0.0488, decode.d2.loss_mask: 0.1825, decode.d2.loss_dice: 0.4959, decode.d3.loss_cls: 0.0510, decode.d3.loss_mask: 0.1821, decode.d3.loss_dice: 0.4925, decode.d4.loss_cls: 0.0460, decode.d4.loss_mask: 0.1820, decode.d4.loss_dice: 0.4936, decode.d5.loss_cls: 0.0500, decode.d5.loss_mask: 0.1818, decode.d5.loss_dice: 0.4924, decode.d6.loss_cls: 0.0431, decode.d6.loss_mask: 0.1824, decode.d6.loss_dice: 0.4924, decode.d7.loss_cls: 0.0505, decode.d7.loss_mask: 0.1819, decode.d7.loss_dice: 0.4901, decode.d8.loss_cls: 0.0497, decode.d8.loss_mask: 0.1819, decode.d8.loss_dice: 0.4953, loss: 7.5212 +2022-05-11 06:48:51,560 - mmseg - INFO - Iter [67200/80000] lr: 2.297e-07, eta: 7:52:03, time: 1.878, data_time: 0.066, memory: 69063, decode.loss_cls: 0.0417, decode.loss_mask: 0.1807, decode.loss_dice: 0.4884, decode.d0.loss_cls: 0.2963, decode.d0.loss_mask: 0.1871, decode.d0.loss_dice: 0.5061, decode.d1.loss_cls: 0.0566, decode.d1.loss_mask: 0.1824, decode.d1.loss_dice: 0.4946, decode.d2.loss_cls: 0.0518, decode.d2.loss_mask: 0.1817, decode.d2.loss_dice: 0.4930, decode.d3.loss_cls: 0.0489, decode.d3.loss_mask: 0.1815, decode.d3.loss_dice: 0.4885, decode.d4.loss_cls: 0.0432, decode.d4.loss_mask: 0.1819, decode.d4.loss_dice: 0.4876, decode.d5.loss_cls: 0.0463, decode.d5.loss_mask: 0.1815, decode.d5.loss_dice: 0.4915, decode.d6.loss_cls: 0.0476, decode.d6.loss_mask: 0.1812, decode.d6.loss_dice: 0.4863, decode.d7.loss_cls: 0.0456, decode.d7.loss_mask: 0.1813, decode.d7.loss_dice: 0.4908, decode.d8.loss_cls: 0.0483, decode.d8.loss_mask: 0.1810, decode.d8.loss_dice: 0.4877, loss: 7.4607 +2022-05-11 06:50:21,935 - mmseg - INFO - Iter [67250/80000] lr: 2.288e-07, eta: 7:49:59, time: 1.807, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0607, decode.loss_mask: 0.1891, decode.loss_dice: 0.4958, decode.d0.loss_cls: 0.2979, decode.d0.loss_mask: 0.1968, decode.d0.loss_dice: 0.5209, decode.d1.loss_cls: 0.0798, decode.d1.loss_mask: 0.1910, decode.d1.loss_dice: 0.5033, decode.d2.loss_cls: 0.0757, decode.d2.loss_mask: 0.1898, decode.d2.loss_dice: 0.5012, decode.d3.loss_cls: 0.0648, decode.d3.loss_mask: 0.1901, decode.d3.loss_dice: 0.5010, decode.d4.loss_cls: 0.0667, decode.d4.loss_mask: 0.1898, decode.d4.loss_dice: 0.4944, decode.d5.loss_cls: 0.0643, decode.d5.loss_mask: 0.1898, decode.d5.loss_dice: 0.4984, decode.d6.loss_cls: 0.0619, decode.d6.loss_mask: 0.1893, decode.d6.loss_dice: 0.4971, decode.d7.loss_cls: 0.0590, decode.d7.loss_mask: 0.1893, decode.d7.loss_dice: 0.4987, decode.d8.loss_cls: 0.0636, decode.d8.loss_mask: 0.1892, decode.d8.loss_dice: 0.4944, loss: 7.8036 +2022-05-11 06:51:52,473 - mmseg - INFO - Iter [67300/80000] lr: 2.280e-07, eta: 7:47:55, time: 1.810, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0461, decode.loss_mask: 0.1797, decode.loss_dice: 0.4943, decode.d0.loss_cls: 0.2989, decode.d0.loss_mask: 0.1857, decode.d0.loss_dice: 0.5171, decode.d1.loss_cls: 0.0610, decode.d1.loss_mask: 0.1806, decode.d1.loss_dice: 0.5025, decode.d2.loss_cls: 0.0541, decode.d2.loss_mask: 0.1803, decode.d2.loss_dice: 0.4990, decode.d3.loss_cls: 0.0517, decode.d3.loss_mask: 0.1802, decode.d3.loss_dice: 0.4969, decode.d4.loss_cls: 0.0491, decode.d4.loss_mask: 0.1804, decode.d4.loss_dice: 0.4942, decode.d5.loss_cls: 0.0443, decode.d5.loss_mask: 0.1804, decode.d5.loss_dice: 0.4948, decode.d6.loss_cls: 0.0487, decode.d6.loss_mask: 0.1798, decode.d6.loss_dice: 0.4939, decode.d7.loss_cls: 0.0433, decode.d7.loss_mask: 0.1795, decode.d7.loss_dice: 0.4942, decode.d8.loss_cls: 0.0476, decode.d8.loss_mask: 0.1801, decode.d8.loss_dice: 0.4975, loss: 7.5359 +2022-05-11 06:53:24,675 - mmseg - INFO - Iter [67350/80000] lr: 2.271e-07, eta: 7:45:53, time: 1.845, data_time: 0.066, memory: 69063, decode.loss_cls: 0.0444, decode.loss_mask: 0.1837, decode.loss_dice: 0.4875, decode.d0.loss_cls: 0.2870, decode.d0.loss_mask: 0.1903, decode.d0.loss_dice: 0.5119, decode.d1.loss_cls: 0.0550, decode.d1.loss_mask: 0.1854, decode.d1.loss_dice: 0.4973, decode.d2.loss_cls: 0.0563, decode.d2.loss_mask: 0.1851, decode.d2.loss_dice: 0.4964, decode.d3.loss_cls: 0.0480, decode.d3.loss_mask: 0.1845, decode.d3.loss_dice: 0.4953, decode.d4.loss_cls: 0.0475, decode.d4.loss_mask: 0.1841, decode.d4.loss_dice: 0.4931, decode.d5.loss_cls: 0.0484, decode.d5.loss_mask: 0.1837, decode.d5.loss_dice: 0.4912, decode.d6.loss_cls: 0.0457, decode.d6.loss_mask: 0.1836, decode.d6.loss_dice: 0.4912, decode.d7.loss_cls: 0.0476, decode.d7.loss_mask: 0.1833, decode.d7.loss_dice: 0.4913, decode.d8.loss_cls: 0.0505, decode.d8.loss_mask: 0.1838, decode.d8.loss_dice: 0.4918, loss: 7.5249 +2022-05-11 06:54:53,847 - mmseg - INFO - Iter [67400/80000] lr: 2.262e-07, eta: 7:43:49, time: 1.783, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0439, decode.loss_mask: 0.1849, decode.loss_dice: 0.4965, decode.d0.loss_cls: 0.2861, decode.d0.loss_mask: 0.1910, decode.d0.loss_dice: 0.5116, decode.d1.loss_cls: 0.0525, decode.d1.loss_mask: 0.1859, decode.d1.loss_dice: 0.5016, decode.d2.loss_cls: 0.0452, decode.d2.loss_mask: 0.1856, decode.d2.loss_dice: 0.4965, decode.d3.loss_cls: 0.0475, decode.d3.loss_mask: 0.1850, decode.d3.loss_dice: 0.4914, decode.d4.loss_cls: 0.0431, decode.d4.loss_mask: 0.1852, decode.d4.loss_dice: 0.4950, decode.d5.loss_cls: 0.0406, decode.d5.loss_mask: 0.1848, decode.d5.loss_dice: 0.4908, decode.d6.loss_cls: 0.0446, decode.d6.loss_mask: 0.1847, decode.d6.loss_dice: 0.4931, decode.d7.loss_cls: 0.0460, decode.d7.loss_mask: 0.1850, decode.d7.loss_dice: 0.4926, decode.d8.loss_cls: 0.0451, decode.d8.loss_mask: 0.1850, decode.d8.loss_dice: 0.4951, loss: 7.5157 +2022-05-11 06:56:23,027 - mmseg - INFO - Iter [67450/80000] lr: 2.253e-07, eta: 7:41:44, time: 1.784, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0483, decode.loss_mask: 0.1859, decode.loss_dice: 0.4911, decode.d0.loss_cls: 0.2836, decode.d0.loss_mask: 0.1933, decode.d0.loss_dice: 0.5163, decode.d1.loss_cls: 0.0591, decode.d1.loss_mask: 0.1865, decode.d1.loss_dice: 0.5016, decode.d2.loss_cls: 0.0549, decode.d2.loss_mask: 0.1859, decode.d2.loss_dice: 0.4999, decode.d3.loss_cls: 0.0441, decode.d3.loss_mask: 0.1859, decode.d3.loss_dice: 0.4971, decode.d4.loss_cls: 0.0490, decode.d4.loss_mask: 0.1858, decode.d4.loss_dice: 0.4979, decode.d5.loss_cls: 0.0499, decode.d5.loss_mask: 0.1859, decode.d5.loss_dice: 0.5000, decode.d6.loss_cls: 0.0401, decode.d6.loss_mask: 0.1859, decode.d6.loss_dice: 0.4964, decode.d7.loss_cls: 0.0448, decode.d7.loss_mask: 0.1857, decode.d7.loss_dice: 0.4995, decode.d8.loss_cls: 0.0476, decode.d8.loss_mask: 0.1858, decode.d8.loss_dice: 0.4936, loss: 7.5811 +2022-05-11 06:57:51,631 - mmseg - INFO - Iter [67500/80000] lr: 2.244e-07, eta: 7:39:40, time: 1.772, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0566, decode.loss_mask: 0.1819, decode.loss_dice: 0.5014, decode.d0.loss_cls: 0.2992, decode.d0.loss_mask: 0.1887, decode.d0.loss_dice: 0.5302, decode.d1.loss_cls: 0.0726, decode.d1.loss_mask: 0.1835, decode.d1.loss_dice: 0.5110, decode.d2.loss_cls: 0.0701, decode.d2.loss_mask: 0.1827, decode.d2.loss_dice: 0.5084, decode.d3.loss_cls: 0.0651, decode.d3.loss_mask: 0.1822, decode.d3.loss_dice: 0.5039, decode.d4.loss_cls: 0.0612, decode.d4.loss_mask: 0.1823, decode.d4.loss_dice: 0.5069, decode.d5.loss_cls: 0.0593, decode.d5.loss_mask: 0.1820, decode.d5.loss_dice: 0.5046, decode.d6.loss_cls: 0.0565, decode.d6.loss_mask: 0.1820, decode.d6.loss_dice: 0.5071, decode.d7.loss_cls: 0.0608, decode.d7.loss_mask: 0.1816, decode.d7.loss_dice: 0.5047, decode.d8.loss_cls: 0.0574, decode.d8.loss_mask: 0.1820, decode.d8.loss_dice: 0.5028, loss: 7.7688 +2022-05-11 06:59:22,679 - mmseg - INFO - Iter [67550/80000] lr: 2.235e-07, eta: 7:37:38, time: 1.821, data_time: 0.066, memory: 69063, decode.loss_cls: 0.0474, decode.loss_mask: 0.1808, decode.loss_dice: 0.4927, decode.d0.loss_cls: 0.2958, decode.d0.loss_mask: 0.1884, decode.d0.loss_dice: 0.5156, decode.d1.loss_cls: 0.0615, decode.d1.loss_mask: 0.1837, decode.d1.loss_dice: 0.4980, decode.d2.loss_cls: 0.0564, decode.d2.loss_mask: 0.1824, decode.d2.loss_dice: 0.4977, decode.d3.loss_cls: 0.0544, decode.d3.loss_mask: 0.1816, decode.d3.loss_dice: 0.4939, decode.d4.loss_cls: 0.0573, decode.d4.loss_mask: 0.1817, decode.d4.loss_dice: 0.4937, decode.d5.loss_cls: 0.0481, decode.d5.loss_mask: 0.1814, decode.d5.loss_dice: 0.4943, decode.d6.loss_cls: 0.0563, decode.d6.loss_mask: 0.1809, decode.d6.loss_dice: 0.4931, decode.d7.loss_cls: 0.0531, decode.d7.loss_mask: 0.1811, decode.d7.loss_dice: 0.4954, decode.d8.loss_cls: 0.0533, decode.d8.loss_mask: 0.1807, decode.d8.loss_dice: 0.4940, loss: 7.5749 +2022-05-11 07:00:52,373 - mmseg - INFO - Iter [67600/80000] lr: 2.226e-07, eta: 7:35:34, time: 1.794, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0481, decode.loss_mask: 0.1826, decode.loss_dice: 0.4980, decode.d0.loss_cls: 0.2902, decode.d0.loss_mask: 0.1897, decode.d0.loss_dice: 0.5216, decode.d1.loss_cls: 0.0568, decode.d1.loss_mask: 0.1845, decode.d1.loss_dice: 0.5043, decode.d2.loss_cls: 0.0551, decode.d2.loss_mask: 0.1831, decode.d2.loss_dice: 0.5055, decode.d3.loss_cls: 0.0501, decode.d3.loss_mask: 0.1829, decode.d3.loss_dice: 0.5003, decode.d4.loss_cls: 0.0529, decode.d4.loss_mask: 0.1830, decode.d4.loss_dice: 0.4999, decode.d5.loss_cls: 0.0384, decode.d5.loss_mask: 0.1832, decode.d5.loss_dice: 0.4980, decode.d6.loss_cls: 0.0429, decode.d6.loss_mask: 0.1827, decode.d6.loss_dice: 0.5011, decode.d7.loss_cls: 0.0475, decode.d7.loss_mask: 0.1826, decode.d7.loss_dice: 0.4995, decode.d8.loss_cls: 0.0417, decode.d8.loss_mask: 0.1826, decode.d8.loss_dice: 0.4974, loss: 7.5861 +2022-05-11 07:02:22,922 - mmseg - INFO - Iter [67650/80000] lr: 2.217e-07, eta: 7:33:32, time: 1.811, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0543, decode.loss_mask: 0.1805, decode.loss_dice: 0.4920, decode.d0.loss_cls: 0.2965, decode.d0.loss_mask: 0.1865, decode.d0.loss_dice: 0.5132, decode.d1.loss_cls: 0.0618, decode.d1.loss_mask: 0.1823, decode.d1.loss_dice: 0.4991, decode.d2.loss_cls: 0.0571, decode.d2.loss_mask: 0.1815, decode.d2.loss_dice: 0.4940, decode.d3.loss_cls: 0.0555, decode.d3.loss_mask: 0.1816, decode.d3.loss_dice: 0.4930, decode.d4.loss_cls: 0.0550, decode.d4.loss_mask: 0.1812, decode.d4.loss_dice: 0.4919, decode.d5.loss_cls: 0.0536, decode.d5.loss_mask: 0.1811, decode.d5.loss_dice: 0.4949, decode.d6.loss_cls: 0.0522, decode.d6.loss_mask: 0.1808, decode.d6.loss_dice: 0.4930, decode.d7.loss_cls: 0.0547, decode.d7.loss_mask: 0.1805, decode.d7.loss_dice: 0.4950, decode.d8.loss_cls: 0.0558, decode.d8.loss_mask: 0.1803, decode.d8.loss_dice: 0.4929, loss: 7.5719 +2022-05-11 07:03:52,765 - mmseg - INFO - Iter [67700/80000] lr: 2.208e-07, eta: 7:31:29, time: 1.797, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0481, decode.loss_mask: 0.1810, decode.loss_dice: 0.4875, decode.d0.loss_cls: 0.2929, decode.d0.loss_mask: 0.1873, decode.d0.loss_dice: 0.5110, decode.d1.loss_cls: 0.0590, decode.d1.loss_mask: 0.1825, decode.d1.loss_dice: 0.4934, decode.d2.loss_cls: 0.0595, decode.d2.loss_mask: 0.1825, decode.d2.loss_dice: 0.4956, decode.d3.loss_cls: 0.0494, decode.d3.loss_mask: 0.1815, decode.d3.loss_dice: 0.4931, decode.d4.loss_cls: 0.0479, decode.d4.loss_mask: 0.1811, decode.d4.loss_dice: 0.4884, decode.d5.loss_cls: 0.0489, decode.d5.loss_mask: 0.1811, decode.d5.loss_dice: 0.4878, decode.d6.loss_cls: 0.0421, decode.d6.loss_mask: 0.1811, decode.d6.loss_dice: 0.4883, decode.d7.loss_cls: 0.0487, decode.d7.loss_mask: 0.1812, decode.d7.loss_dice: 0.4879, decode.d8.loss_cls: 0.0475, decode.d8.loss_mask: 0.1810, decode.d8.loss_dice: 0.4863, loss: 7.4834 +2022-05-11 07:05:24,966 - mmseg - INFO - Iter [67750/80000] lr: 2.199e-07, eta: 7:29:28, time: 1.844, data_time: 0.065, memory: 69063, decode.loss_cls: 0.0525, decode.loss_mask: 0.1832, decode.loss_dice: 0.4940, decode.d0.loss_cls: 0.2994, decode.d0.loss_mask: 0.1890, decode.d0.loss_dice: 0.5162, decode.d1.loss_cls: 0.0703, decode.d1.loss_mask: 0.1836, decode.d1.loss_dice: 0.5012, decode.d2.loss_cls: 0.0694, decode.d2.loss_mask: 0.1834, decode.d2.loss_dice: 0.4998, decode.d3.loss_cls: 0.0617, decode.d3.loss_mask: 0.1830, decode.d3.loss_dice: 0.4948, decode.d4.loss_cls: 0.0625, decode.d4.loss_mask: 0.1826, decode.d4.loss_dice: 0.4942, decode.d5.loss_cls: 0.0622, decode.d5.loss_mask: 0.1834, decode.d5.loss_dice: 0.4931, decode.d6.loss_cls: 0.0577, decode.d6.loss_mask: 0.1825, decode.d6.loss_dice: 0.4943, decode.d7.loss_cls: 0.0577, decode.d7.loss_mask: 0.1827, decode.d7.loss_dice: 0.4928, decode.d8.loss_cls: 0.0584, decode.d8.loss_mask: 0.1828, decode.d8.loss_dice: 0.4939, loss: 7.6622 +2022-05-11 07:06:54,683 - mmseg - INFO - Iter [67800/80000] lr: 2.190e-07, eta: 7:27:25, time: 1.793, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0445, decode.loss_mask: 0.1825, decode.loss_dice: 0.4891, decode.d0.loss_cls: 0.2924, decode.d0.loss_mask: 0.1893, decode.d0.loss_dice: 0.5136, decode.d1.loss_cls: 0.0610, decode.d1.loss_mask: 0.1846, decode.d1.loss_dice: 0.4984, decode.d2.loss_cls: 0.0487, decode.d2.loss_mask: 0.1824, decode.d2.loss_dice: 0.4934, decode.d3.loss_cls: 0.0531, decode.d3.loss_mask: 0.1826, decode.d3.loss_dice: 0.4919, decode.d4.loss_cls: 0.0546, decode.d4.loss_mask: 0.1826, decode.d4.loss_dice: 0.4927, decode.d5.loss_cls: 0.0503, decode.d5.loss_mask: 0.1825, decode.d5.loss_dice: 0.4915, decode.d6.loss_cls: 0.0523, decode.d6.loss_mask: 0.1823, decode.d6.loss_dice: 0.4895, decode.d7.loss_cls: 0.0489, decode.d7.loss_mask: 0.1823, decode.d7.loss_dice: 0.4886, decode.d8.loss_cls: 0.0448, decode.d8.loss_mask: 0.1818, decode.d8.loss_dice: 0.4884, loss: 7.5206 +2022-05-11 07:08:24,165 - mmseg - INFO - Iter [67850/80000] lr: 2.181e-07, eta: 7:25:22, time: 1.790, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0511, decode.loss_mask: 0.1792, decode.loss_dice: 0.4971, decode.d0.loss_cls: 0.3012, decode.d0.loss_mask: 0.1861, decode.d0.loss_dice: 0.5147, decode.d1.loss_cls: 0.0666, decode.d1.loss_mask: 0.1809, decode.d1.loss_dice: 0.4993, decode.d2.loss_cls: 0.0593, decode.d2.loss_mask: 0.1802, decode.d2.loss_dice: 0.4996, decode.d3.loss_cls: 0.0569, decode.d3.loss_mask: 0.1797, decode.d3.loss_dice: 0.4955, decode.d4.loss_cls: 0.0558, decode.d4.loss_mask: 0.1800, decode.d4.loss_dice: 0.4976, decode.d5.loss_cls: 0.0566, decode.d5.loss_mask: 0.1799, decode.d5.loss_dice: 0.4957, decode.d6.loss_cls: 0.0538, decode.d6.loss_mask: 0.1797, decode.d6.loss_dice: 0.4938, decode.d7.loss_cls: 0.0516, decode.d7.loss_mask: 0.1795, decode.d7.loss_dice: 0.4968, decode.d8.loss_cls: 0.0567, decode.d8.loss_mask: 0.1792, decode.d8.loss_dice: 0.4975, loss: 7.6013 +2022-05-11 07:09:53,615 - mmseg - INFO - Iter [67900/80000] lr: 2.172e-07, eta: 7:23:20, time: 1.789, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0424, decode.loss_mask: 0.1770, decode.loss_dice: 0.4856, decode.d0.loss_cls: 0.2921, decode.d0.loss_mask: 0.1822, decode.d0.loss_dice: 0.5093, decode.d1.loss_cls: 0.0643, decode.d1.loss_mask: 0.1780, decode.d1.loss_dice: 0.4967, decode.d2.loss_cls: 0.0490, decode.d2.loss_mask: 0.1780, decode.d2.loss_dice: 0.4924, decode.d3.loss_cls: 0.0471, decode.d3.loss_mask: 0.1772, decode.d3.loss_dice: 0.4889, decode.d4.loss_cls: 0.0491, decode.d4.loss_mask: 0.1775, decode.d4.loss_dice: 0.4900, decode.d5.loss_cls: 0.0458, decode.d5.loss_mask: 0.1774, decode.d5.loss_dice: 0.4903, decode.d6.loss_cls: 0.0436, decode.d6.loss_mask: 0.1771, decode.d6.loss_dice: 0.4904, decode.d7.loss_cls: 0.0470, decode.d7.loss_mask: 0.1768, decode.d7.loss_dice: 0.4892, decode.d8.loss_cls: 0.0504, decode.d8.loss_mask: 0.1770, decode.d8.loss_dice: 0.4901, loss: 7.4318 +2022-05-11 07:11:27,485 - mmseg - INFO - Iter [67950/80000] lr: 2.163e-07, eta: 7:21:20, time: 1.877, data_time: 0.064, memory: 69063, decode.loss_cls: 0.0600, decode.loss_mask: 0.1812, decode.loss_dice: 0.4954, decode.d0.loss_cls: 0.3106, decode.d0.loss_mask: 0.1889, decode.d0.loss_dice: 0.5194, decode.d1.loss_cls: 0.0751, decode.d1.loss_mask: 0.1823, decode.d1.loss_dice: 0.5009, decode.d2.loss_cls: 0.0708, decode.d2.loss_mask: 0.1820, decode.d2.loss_dice: 0.4968, decode.d3.loss_cls: 0.0669, decode.d3.loss_mask: 0.1813, decode.d3.loss_dice: 0.4951, decode.d4.loss_cls: 0.0655, decode.d4.loss_mask: 0.1809, decode.d4.loss_dice: 0.4950, decode.d5.loss_cls: 0.0675, decode.d5.loss_mask: 0.1814, decode.d5.loss_dice: 0.4944, decode.d6.loss_cls: 0.0612, decode.d6.loss_mask: 0.1811, decode.d6.loss_dice: 0.4908, decode.d7.loss_cls: 0.0581, decode.d7.loss_mask: 0.1815, decode.d7.loss_dice: 0.4923, decode.d8.loss_cls: 0.0539, decode.d8.loss_mask: 0.1808, decode.d8.loss_dice: 0.4925, loss: 7.6835 +2022-05-11 07:12:57,180 - mmseg - INFO - Saving checkpoint at 68000 iterations +2022-05-11 07:13:30,851 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 07:13:30,859 - mmseg - INFO - Iter [68000/80000] lr: 2.154e-07, eta: 7:19:38, time: 2.465, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0509, decode.loss_mask: 0.1811, decode.loss_dice: 0.5054, decode.d0.loss_cls: 0.2906, decode.d0.loss_mask: 0.1871, decode.d0.loss_dice: 0.5248, decode.d1.loss_cls: 0.0676, decode.d1.loss_mask: 0.1824, decode.d1.loss_dice: 0.5052, decode.d2.loss_cls: 0.0641, decode.d2.loss_mask: 0.1818, decode.d2.loss_dice: 0.5052, decode.d3.loss_cls: 0.0513, decode.d3.loss_mask: 0.1814, decode.d3.loss_dice: 0.5071, decode.d4.loss_cls: 0.0545, decode.d4.loss_mask: 0.1810, decode.d4.loss_dice: 0.5063, decode.d5.loss_cls: 0.0529, decode.d5.loss_mask: 0.1807, decode.d5.loss_dice: 0.5017, decode.d6.loss_cls: 0.0536, decode.d6.loss_mask: 0.1809, decode.d6.loss_dice: 0.5016, decode.d7.loss_cls: 0.0523, decode.d7.loss_mask: 0.1807, decode.d7.loss_dice: 0.5034, decode.d8.loss_cls: 0.0538, decode.d8.loss_mask: 0.1803, decode.d8.loss_dice: 0.5052, loss: 7.6748 +2022-05-11 07:15:27,554 - mmseg - INFO - per class results: +2022-05-11 07:15:27,560 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.56 | 99.24 | +| sidewalk | 88.29 | 93.3 | +| building | 94.35 | 96.95 | +| wall | 70.02 | 81.23 | +| fence | 73.67 | 80.45 | +| pole | 71.31 | 83.7 | +| traffic light | 77.1 | 87.91 | +| traffic sign | 84.12 | 90.53 | +| vegetation | 93.33 | 97.03 | +| terrain | 68.24 | 77.88 | +| sky | 95.78 | 98.33 | +| person | 86.64 | 94.09 | +| rider | 74.16 | 85.14 | +| car | 96.14 | 98.24 | +| truck | 80.92 | 95.17 | +| bus | 93.57 | 96.76 | +| train | 87.85 | 90.62 | +| motorcycle | 77.91 | 87.52 | +| bicycle | 82.77 | 91.21 | ++---------------+-------+-------+ +2022-05-11 07:15:27,561 - mmseg - INFO - Summary: +2022-05-11 07:15:27,561 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 96.97 | 83.93 | 90.81 | ++-------+-------+-------+ +2022-05-11 07:15:27,564 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 07:15:27,564 - mmseg - INFO - Iter(val) [32] aAcc: 0.9697, mIoU: 0.8393, mAcc: 0.9081, IoU.road: 0.9856, IoU.sidewalk: 0.8829, IoU.building: 0.9435, IoU.wall: 0.7002, IoU.fence: 0.7367, IoU.pole: 0.7131, IoU.traffic light: 0.7710, IoU.traffic sign: 0.8412, IoU.vegetation: 0.9333, IoU.terrain: 0.6824, IoU.sky: 0.9578, IoU.person: 0.8664, IoU.rider: 0.7416, IoU.car: 0.9614, IoU.truck: 0.8092, IoU.bus: 0.9357, IoU.train: 0.8785, IoU.motorcycle: 0.7791, IoU.bicycle: 0.8277, Acc.road: 0.9924, Acc.sidewalk: 0.9330, Acc.building: 0.9695, Acc.wall: 0.8123, Acc.fence: 0.8045, Acc.pole: 0.8370, Acc.traffic light: 0.8791, Acc.traffic sign: 0.9053, Acc.vegetation: 0.9703, Acc.terrain: 0.7788, Acc.sky: 0.9833, Acc.person: 0.9409, Acc.rider: 0.8514, Acc.car: 0.9824, Acc.truck: 0.9517, Acc.bus: 0.9676, Acc.train: 0.9062, Acc.motorcycle: 0.8752, Acc.bicycle: 0.9121 +2022-05-11 07:16:57,261 - mmseg - INFO - Iter [68050/80000] lr: 2.145e-07, eta: 7:18:46, time: 4.130, data_time: 2.354, memory: 69063, decode.loss_cls: 0.0480, decode.loss_mask: 0.1833, decode.loss_dice: 0.4855, decode.d0.loss_cls: 0.2953, decode.d0.loss_mask: 0.1902, decode.d0.loss_dice: 0.5082, decode.d1.loss_cls: 0.0625, decode.d1.loss_mask: 0.1847, decode.d1.loss_dice: 0.4946, decode.d2.loss_cls: 0.0498, decode.d2.loss_mask: 0.1844, decode.d2.loss_dice: 0.4934, decode.d3.loss_cls: 0.0528, decode.d3.loss_mask: 0.1838, decode.d3.loss_dice: 0.4894, decode.d4.loss_cls: 0.0500, decode.d4.loss_mask: 0.1841, decode.d4.loss_dice: 0.4882, decode.d5.loss_cls: 0.0528, decode.d5.loss_mask: 0.1846, decode.d5.loss_dice: 0.4894, decode.d6.loss_cls: 0.0474, decode.d6.loss_mask: 0.1841, decode.d6.loss_dice: 0.4904, decode.d7.loss_cls: 0.0484, decode.d7.loss_mask: 0.1838, decode.d7.loss_dice: 0.4847, decode.d8.loss_cls: 0.0487, decode.d8.loss_mask: 0.1835, decode.d8.loss_dice: 0.4879, loss: 7.5138 +2022-05-11 07:18:30,237 - mmseg - INFO - Iter [68100/80000] lr: 2.136e-07, eta: 7:16:46, time: 1.857, data_time: 0.068, memory: 69063, decode.loss_cls: 0.0584, decode.loss_mask: 0.1791, decode.loss_dice: 0.4974, decode.d0.loss_cls: 0.2918, decode.d0.loss_mask: 0.1847, decode.d0.loss_dice: 0.5216, decode.d1.loss_cls: 0.0705, decode.d1.loss_mask: 0.1800, decode.d1.loss_dice: 0.5046, decode.d2.loss_cls: 0.0644, decode.d2.loss_mask: 0.1799, decode.d2.loss_dice: 0.5034, decode.d3.loss_cls: 0.0588, decode.d3.loss_mask: 0.1793, decode.d3.loss_dice: 0.5003, decode.d4.loss_cls: 0.0565, decode.d4.loss_mask: 0.1793, decode.d4.loss_dice: 0.5015, decode.d5.loss_cls: 0.0587, decode.d5.loss_mask: 0.1795, decode.d5.loss_dice: 0.4983, decode.d6.loss_cls: 0.0554, decode.d6.loss_mask: 0.1792, decode.d6.loss_dice: 0.5020, decode.d7.loss_cls: 0.0515, decode.d7.loss_mask: 0.1788, decode.d7.loss_dice: 0.4963, decode.d8.loss_cls: 0.0563, decode.d8.loss_mask: 0.1789, decode.d8.loss_dice: 0.5017, loss: 7.6481 +2022-05-11 07:19:59,295 - mmseg - INFO - Iter [68150/80000] lr: 2.127e-07, eta: 7:14:43, time: 1.783, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0399, decode.loss_mask: 0.1811, decode.loss_dice: 0.4899, decode.d0.loss_cls: 0.2909, decode.d0.loss_mask: 0.1875, decode.d0.loss_dice: 0.5180, decode.d1.loss_cls: 0.0562, decode.d1.loss_mask: 0.1828, decode.d1.loss_dice: 0.4991, decode.d2.loss_cls: 0.0502, decode.d2.loss_mask: 0.1819, decode.d2.loss_dice: 0.4955, decode.d3.loss_cls: 0.0465, decode.d3.loss_mask: 0.1812, decode.d3.loss_dice: 0.4898, decode.d4.loss_cls: 0.0431, decode.d4.loss_mask: 0.1814, decode.d4.loss_dice: 0.4915, decode.d5.loss_cls: 0.0415, decode.d5.loss_mask: 0.1813, decode.d5.loss_dice: 0.4893, decode.d6.loss_cls: 0.0432, decode.d6.loss_mask: 0.1810, decode.d6.loss_dice: 0.4938, decode.d7.loss_cls: 0.0461, decode.d7.loss_mask: 0.1809, decode.d7.loss_dice: 0.4916, decode.d8.loss_cls: 0.0363, decode.d8.loss_mask: 0.1813, decode.d8.loss_dice: 0.4919, loss: 7.4643 +2022-05-11 07:21:30,751 - mmseg - INFO - Iter [68200/80000] lr: 2.118e-07, eta: 7:12:42, time: 1.829, data_time: 0.023, memory: 69063, decode.loss_cls: 0.0515, decode.loss_mask: 0.1790, decode.loss_dice: 0.4939, decode.d0.loss_cls: 0.2855, decode.d0.loss_mask: 0.1849, decode.d0.loss_dice: 0.5146, decode.d1.loss_cls: 0.0646, decode.d1.loss_mask: 0.1802, decode.d1.loss_dice: 0.5012, decode.d2.loss_cls: 0.0621, decode.d2.loss_mask: 0.1792, decode.d2.loss_dice: 0.5008, decode.d3.loss_cls: 0.0534, decode.d3.loss_mask: 0.1791, decode.d3.loss_dice: 0.4985, decode.d4.loss_cls: 0.0527, decode.d4.loss_mask: 0.1793, decode.d4.loss_dice: 0.4970, decode.d5.loss_cls: 0.0550, decode.d5.loss_mask: 0.1790, decode.d5.loss_dice: 0.4970, decode.d6.loss_cls: 0.0469, decode.d6.loss_mask: 0.1783, decode.d6.loss_dice: 0.4927, decode.d7.loss_cls: 0.0491, decode.d7.loss_mask: 0.1784, decode.d7.loss_dice: 0.4907, decode.d8.loss_cls: 0.0519, decode.d8.loss_mask: 0.1785, decode.d8.loss_dice: 0.4938, loss: 7.5486 +2022-05-11 07:23:01,982 - mmseg - INFO - Iter [68250/80000] lr: 2.109e-07, eta: 7:10:41, time: 1.825, data_time: 0.020, memory: 69063, decode.loss_cls: 0.0489, decode.loss_mask: 0.1856, decode.loss_dice: 0.4923, decode.d0.loss_cls: 0.3046, decode.d0.loss_mask: 0.1929, decode.d0.loss_dice: 0.5187, decode.d1.loss_cls: 0.0762, decode.d1.loss_mask: 0.1865, decode.d1.loss_dice: 0.4994, decode.d2.loss_cls: 0.0618, decode.d2.loss_mask: 0.1863, decode.d2.loss_dice: 0.5008, decode.d3.loss_cls: 0.0546, decode.d3.loss_mask: 0.1858, decode.d3.loss_dice: 0.4947, decode.d4.loss_cls: 0.0540, decode.d4.loss_mask: 0.1858, decode.d4.loss_dice: 0.4977, decode.d5.loss_cls: 0.0544, decode.d5.loss_mask: 0.1859, decode.d5.loss_dice: 0.4932, decode.d6.loss_cls: 0.0525, decode.d6.loss_mask: 0.1858, decode.d6.loss_dice: 0.4966, decode.d7.loss_cls: 0.0518, decode.d7.loss_mask: 0.1854, decode.d7.loss_dice: 0.4929, decode.d8.loss_cls: 0.0513, decode.d8.loss_mask: 0.1851, decode.d8.loss_dice: 0.4941, loss: 7.6556 +2022-05-11 07:24:36,322 - mmseg - INFO - Iter [68300/80000] lr: 2.100e-07, eta: 7:08:42, time: 1.887, data_time: 0.068, memory: 69063, decode.loss_cls: 0.0469, decode.loss_mask: 0.1796, decode.loss_dice: 0.4989, decode.d0.loss_cls: 0.2917, decode.d0.loss_mask: 0.1865, decode.d0.loss_dice: 0.5223, decode.d1.loss_cls: 0.0631, decode.d1.loss_mask: 0.1807, decode.d1.loss_dice: 0.5057, decode.d2.loss_cls: 0.0638, decode.d2.loss_mask: 0.1803, decode.d2.loss_dice: 0.5019, decode.d3.loss_cls: 0.0590, decode.d3.loss_mask: 0.1799, decode.d3.loss_dice: 0.5007, decode.d4.loss_cls: 0.0536, decode.d4.loss_mask: 0.1797, decode.d4.loss_dice: 0.4979, decode.d5.loss_cls: 0.0534, decode.d5.loss_mask: 0.1800, decode.d5.loss_dice: 0.4973, decode.d6.loss_cls: 0.0550, decode.d6.loss_mask: 0.1801, decode.d6.loss_dice: 0.5013, decode.d7.loss_cls: 0.0559, decode.d7.loss_mask: 0.1800, decode.d7.loss_dice: 0.4996, decode.d8.loss_cls: 0.0496, decode.d8.loss_mask: 0.1795, decode.d8.loss_dice: 0.4966, loss: 7.6204 +2022-05-11 07:26:05,184 - mmseg - INFO - Iter [68350/80000] lr: 2.091e-07, eta: 7:06:40, time: 1.778, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0449, decode.loss_mask: 0.1833, decode.loss_dice: 0.4877, decode.d0.loss_cls: 0.2837, decode.d0.loss_mask: 0.1901, decode.d0.loss_dice: 0.5097, decode.d1.loss_cls: 0.0569, decode.d1.loss_mask: 0.1844, decode.d1.loss_dice: 0.4921, decode.d2.loss_cls: 0.0516, decode.d2.loss_mask: 0.1844, decode.d2.loss_dice: 0.4897, decode.d3.loss_cls: 0.0456, decode.d3.loss_mask: 0.1842, decode.d3.loss_dice: 0.4890, decode.d4.loss_cls: 0.0465, decode.d4.loss_mask: 0.1839, decode.d4.loss_dice: 0.4894, decode.d5.loss_cls: 0.0470, decode.d5.loss_mask: 0.1844, decode.d5.loss_dice: 0.4893, decode.d6.loss_cls: 0.0462, decode.d6.loss_mask: 0.1836, decode.d6.loss_dice: 0.4904, decode.d7.loss_cls: 0.0467, decode.d7.loss_mask: 0.1836, decode.d7.loss_dice: 0.4861, decode.d8.loss_cls: 0.0472, decode.d8.loss_mask: 0.1837, decode.d8.loss_dice: 0.4867, loss: 7.4719 +2022-05-11 07:27:34,982 - mmseg - INFO - Iter [68400/80000] lr: 2.082e-07, eta: 7:04:39, time: 1.796, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0480, decode.loss_mask: 0.1831, decode.loss_dice: 0.4993, decode.d0.loss_cls: 0.2884, decode.d0.loss_mask: 0.1905, decode.d0.loss_dice: 0.5210, decode.d1.loss_cls: 0.0587, decode.d1.loss_mask: 0.1846, decode.d1.loss_dice: 0.5050, decode.d2.loss_cls: 0.0467, decode.d2.loss_mask: 0.1849, decode.d2.loss_dice: 0.5026, decode.d3.loss_cls: 0.0508, decode.d3.loss_mask: 0.1844, decode.d3.loss_dice: 0.5022, decode.d4.loss_cls: 0.0472, decode.d4.loss_mask: 0.1837, decode.d4.loss_dice: 0.5002, decode.d5.loss_cls: 0.0451, decode.d5.loss_mask: 0.1841, decode.d5.loss_dice: 0.5050, decode.d6.loss_cls: 0.0450, decode.d6.loss_mask: 0.1839, decode.d6.loss_dice: 0.4996, decode.d7.loss_cls: 0.0465, decode.d7.loss_mask: 0.1836, decode.d7.loss_dice: 0.4984, decode.d8.loss_cls: 0.0431, decode.d8.loss_mask: 0.1835, decode.d8.loss_dice: 0.4965, loss: 7.5957 +2022-05-11 07:29:05,255 - mmseg - INFO - Iter [68450/80000] lr: 2.073e-07, eta: 7:02:38, time: 1.805, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0572, decode.loss_mask: 0.1814, decode.loss_dice: 0.4940, decode.d0.loss_cls: 0.2943, decode.d0.loss_mask: 0.1888, decode.d0.loss_dice: 0.5218, decode.d1.loss_cls: 0.0714, decode.d1.loss_mask: 0.1822, decode.d1.loss_dice: 0.5008, decode.d2.loss_cls: 0.0611, decode.d2.loss_mask: 0.1819, decode.d2.loss_dice: 0.5005, decode.d3.loss_cls: 0.0595, decode.d3.loss_mask: 0.1813, decode.d3.loss_dice: 0.4956, decode.d4.loss_cls: 0.0616, decode.d4.loss_mask: 0.1815, decode.d4.loss_dice: 0.4965, decode.d5.loss_cls: 0.0542, decode.d5.loss_mask: 0.1813, decode.d5.loss_dice: 0.4938, decode.d6.loss_cls: 0.0594, decode.d6.loss_mask: 0.1813, decode.d6.loss_dice: 0.4964, decode.d7.loss_cls: 0.0545, decode.d7.loss_mask: 0.1817, decode.d7.loss_dice: 0.4955, decode.d8.loss_cls: 0.0618, decode.d8.loss_mask: 0.1813, decode.d8.loss_dice: 0.4933, loss: 7.6459 +2022-05-11 07:30:38,088 - mmseg - INFO - Iter [68500/80000] lr: 2.064e-07, eta: 7:00:39, time: 1.857, data_time: 0.065, memory: 69063, decode.loss_cls: 0.0555, decode.loss_mask: 0.1815, decode.loss_dice: 0.4897, decode.d0.loss_cls: 0.3066, decode.d0.loss_mask: 0.1890, decode.d0.loss_dice: 0.5123, decode.d1.loss_cls: 0.0650, decode.d1.loss_mask: 0.1830, decode.d1.loss_dice: 0.5001, decode.d2.loss_cls: 0.0563, decode.d2.loss_mask: 0.1831, decode.d2.loss_dice: 0.4938, decode.d3.loss_cls: 0.0538, decode.d3.loss_mask: 0.1828, decode.d3.loss_dice: 0.4919, decode.d4.loss_cls: 0.0515, decode.d4.loss_mask: 0.1821, decode.d4.loss_dice: 0.4916, decode.d5.loss_cls: 0.0552, decode.d5.loss_mask: 0.1822, decode.d5.loss_dice: 0.4898, decode.d6.loss_cls: 0.0548, decode.d6.loss_mask: 0.1822, decode.d6.loss_dice: 0.4896, decode.d7.loss_cls: 0.0562, decode.d7.loss_mask: 0.1816, decode.d7.loss_dice: 0.4936, decode.d8.loss_cls: 0.0512, decode.d8.loss_mask: 0.1818, decode.d8.loss_dice: 0.4901, loss: 7.5779 +2022-05-11 07:32:09,264 - mmseg - INFO - Iter [68550/80000] lr: 2.055e-07, eta: 6:58:39, time: 1.823, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0524, decode.loss_mask: 0.1826, decode.loss_dice: 0.5006, decode.d0.loss_cls: 0.2910, decode.d0.loss_mask: 0.1913, decode.d0.loss_dice: 0.5251, decode.d1.loss_cls: 0.0691, decode.d1.loss_mask: 0.1851, decode.d1.loss_dice: 0.5100, decode.d2.loss_cls: 0.0626, decode.d2.loss_mask: 0.1841, decode.d2.loss_dice: 0.5050, decode.d3.loss_cls: 0.0609, decode.d3.loss_mask: 0.1834, decode.d3.loss_dice: 0.5050, decode.d4.loss_cls: 0.0544, decode.d4.loss_mask: 0.1833, decode.d4.loss_dice: 0.5039, decode.d5.loss_cls: 0.0520, decode.d5.loss_mask: 0.1832, decode.d5.loss_dice: 0.4991, decode.d6.loss_cls: 0.0544, decode.d6.loss_mask: 0.1826, decode.d6.loss_dice: 0.5018, decode.d7.loss_cls: 0.0524, decode.d7.loss_mask: 0.1828, decode.d7.loss_dice: 0.5013, decode.d8.loss_cls: 0.0523, decode.d8.loss_mask: 0.1825, decode.d8.loss_dice: 0.4978, loss: 7.6918 +2022-05-11 07:33:40,279 - mmseg - INFO - Iter [68600/80000] lr: 2.046e-07, eta: 6:56:39, time: 1.820, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0477, decode.loss_mask: 0.1797, decode.loss_dice: 0.4877, decode.d0.loss_cls: 0.3007, decode.d0.loss_mask: 0.1858, decode.d0.loss_dice: 0.5170, decode.d1.loss_cls: 0.0658, decode.d1.loss_mask: 0.1800, decode.d1.loss_dice: 0.4964, decode.d2.loss_cls: 0.0569, decode.d2.loss_mask: 0.1797, decode.d2.loss_dice: 0.4921, decode.d3.loss_cls: 0.0561, decode.d3.loss_mask: 0.1797, decode.d3.loss_dice: 0.4897, decode.d4.loss_cls: 0.0516, decode.d4.loss_mask: 0.1794, decode.d4.loss_dice: 0.4905, decode.d5.loss_cls: 0.0518, decode.d5.loss_mask: 0.1798, decode.d5.loss_dice: 0.4876, decode.d6.loss_cls: 0.0527, decode.d6.loss_mask: 0.1795, decode.d6.loss_dice: 0.4891, decode.d7.loss_cls: 0.0495, decode.d7.loss_mask: 0.1795, decode.d7.loss_dice: 0.4887, decode.d8.loss_cls: 0.0494, decode.d8.loss_mask: 0.1792, decode.d8.loss_dice: 0.4911, loss: 7.5146 +2022-05-11 07:35:13,762 - mmseg - INFO - Iter [68650/80000] lr: 2.037e-07, eta: 6:54:40, time: 1.870, data_time: 0.065, memory: 69063, decode.loss_cls: 0.0496, decode.loss_mask: 0.1792, decode.loss_dice: 0.4877, decode.d0.loss_cls: 0.2945, decode.d0.loss_mask: 0.1859, decode.d0.loss_dice: 0.5057, decode.d1.loss_cls: 0.0709, decode.d1.loss_mask: 0.1805, decode.d1.loss_dice: 0.4950, decode.d2.loss_cls: 0.0563, decode.d2.loss_mask: 0.1801, decode.d2.loss_dice: 0.4902, decode.d3.loss_cls: 0.0548, decode.d3.loss_mask: 0.1795, decode.d3.loss_dice: 0.4902, decode.d4.loss_cls: 0.0501, decode.d4.loss_mask: 0.1797, decode.d4.loss_dice: 0.4883, decode.d5.loss_cls: 0.0540, decode.d5.loss_mask: 0.1794, decode.d5.loss_dice: 0.4889, decode.d6.loss_cls: 0.0504, decode.d6.loss_mask: 0.1791, decode.d6.loss_dice: 0.4831, decode.d7.loss_cls: 0.0495, decode.d7.loss_mask: 0.1797, decode.d7.loss_dice: 0.4870, decode.d8.loss_cls: 0.0512, decode.d8.loss_mask: 0.1795, decode.d8.loss_dice: 0.4849, loss: 7.4851 +2022-05-11 07:36:44,654 - mmseg - INFO - Iter [68700/80000] lr: 2.028e-07, eta: 6:52:41, time: 1.818, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0386, decode.loss_mask: 0.1823, decode.loss_dice: 0.4886, decode.d0.loss_cls: 0.2886, decode.d0.loss_mask: 0.1894, decode.d0.loss_dice: 0.5093, decode.d1.loss_cls: 0.0598, decode.d1.loss_mask: 0.1836, decode.d1.loss_dice: 0.4986, decode.d2.loss_cls: 0.0487, decode.d2.loss_mask: 0.1836, decode.d2.loss_dice: 0.4983, decode.d3.loss_cls: 0.0435, decode.d3.loss_mask: 0.1824, decode.d3.loss_dice: 0.4913, decode.d4.loss_cls: 0.0445, decode.d4.loss_mask: 0.1829, decode.d4.loss_dice: 0.4915, decode.d5.loss_cls: 0.0418, decode.d5.loss_mask: 0.1825, decode.d5.loss_dice: 0.4900, decode.d6.loss_cls: 0.0421, decode.d6.loss_mask: 0.1825, decode.d6.loss_dice: 0.4896, decode.d7.loss_cls: 0.0397, decode.d7.loss_mask: 0.1825, decode.d7.loss_dice: 0.4902, decode.d8.loss_cls: 0.0443, decode.d8.loss_mask: 0.1825, decode.d8.loss_dice: 0.4891, loss: 7.4622 +2022-05-11 07:38:15,030 - mmseg - INFO - Iter [68750/80000] lr: 2.019e-07, eta: 6:50:41, time: 1.808, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0387, decode.loss_mask: 0.1835, decode.loss_dice: 0.4869, decode.d0.loss_cls: 0.3023, decode.d0.loss_mask: 0.1889, decode.d0.loss_dice: 0.5097, decode.d1.loss_cls: 0.0591, decode.d1.loss_mask: 0.1840, decode.d1.loss_dice: 0.4925, decode.d2.loss_cls: 0.0511, decode.d2.loss_mask: 0.1831, decode.d2.loss_dice: 0.4870, decode.d3.loss_cls: 0.0486, decode.d3.loss_mask: 0.1837, decode.d3.loss_dice: 0.4864, decode.d4.loss_cls: 0.0494, decode.d4.loss_mask: 0.1833, decode.d4.loss_dice: 0.4880, decode.d5.loss_cls: 0.0476, decode.d5.loss_mask: 0.1833, decode.d5.loss_dice: 0.4881, decode.d6.loss_cls: 0.0470, decode.d6.loss_mask: 0.1833, decode.d6.loss_dice: 0.4876, decode.d7.loss_cls: 0.0429, decode.d7.loss_mask: 0.1836, decode.d7.loss_dice: 0.4868, decode.d8.loss_cls: 0.0462, decode.d8.loss_mask: 0.1833, decode.d8.loss_dice: 0.4851, loss: 7.4712 +2022-05-11 07:39:46,197 - mmseg - INFO - Iter [68800/80000] lr: 2.010e-07, eta: 6:48:41, time: 1.823, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0588, decode.loss_mask: 0.1795, decode.loss_dice: 0.4954, decode.d0.loss_cls: 0.3026, decode.d0.loss_mask: 0.1861, decode.d0.loss_dice: 0.5193, decode.d1.loss_cls: 0.0696, decode.d1.loss_mask: 0.1811, decode.d1.loss_dice: 0.5040, decode.d2.loss_cls: 0.0659, decode.d2.loss_mask: 0.1805, decode.d2.loss_dice: 0.5029, decode.d3.loss_cls: 0.0569, decode.d3.loss_mask: 0.1795, decode.d3.loss_dice: 0.4985, decode.d4.loss_cls: 0.0597, decode.d4.loss_mask: 0.1793, decode.d4.loss_dice: 0.4963, decode.d5.loss_cls: 0.0585, decode.d5.loss_mask: 0.1799, decode.d5.loss_dice: 0.4988, decode.d6.loss_cls: 0.0571, decode.d6.loss_mask: 0.1793, decode.d6.loss_dice: 0.4963, decode.d7.loss_cls: 0.0573, decode.d7.loss_mask: 0.1793, decode.d7.loss_dice: 0.5023, decode.d8.loss_cls: 0.0616, decode.d8.loss_mask: 0.1792, decode.d8.loss_dice: 0.4941, loss: 7.6597 +2022-05-11 07:41:19,168 - mmseg - INFO - Iter [68850/80000] lr: 2.001e-07, eta: 6:46:43, time: 1.859, data_time: 0.064, memory: 69063, decode.loss_cls: 0.0542, decode.loss_mask: 0.1751, decode.loss_dice: 0.4838, decode.d0.loss_cls: 0.3038, decode.d0.loss_mask: 0.1825, decode.d0.loss_dice: 0.5085, decode.d1.loss_cls: 0.0715, decode.d1.loss_mask: 0.1765, decode.d1.loss_dice: 0.4941, decode.d2.loss_cls: 0.0605, decode.d2.loss_mask: 0.1760, decode.d2.loss_dice: 0.4907, decode.d3.loss_cls: 0.0606, decode.d3.loss_mask: 0.1756, decode.d3.loss_dice: 0.4899, decode.d4.loss_cls: 0.0547, decode.d4.loss_mask: 0.1755, decode.d4.loss_dice: 0.4904, decode.d5.loss_cls: 0.0554, decode.d5.loss_mask: 0.1752, decode.d5.loss_dice: 0.4870, decode.d6.loss_cls: 0.0566, decode.d6.loss_mask: 0.1751, decode.d6.loss_dice: 0.4861, decode.d7.loss_cls: 0.0559, decode.d7.loss_mask: 0.1753, decode.d7.loss_dice: 0.4856, decode.d8.loss_cls: 0.0546, decode.d8.loss_mask: 0.1752, decode.d8.loss_dice: 0.4840, loss: 7.4897 +2022-05-11 07:42:49,792 - mmseg - INFO - Iter [68900/80000] lr: 1.992e-07, eta: 6:44:43, time: 1.812, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0491, decode.loss_mask: 0.1809, decode.loss_dice: 0.4905, decode.d0.loss_cls: 0.2941, decode.d0.loss_mask: 0.1880, decode.d0.loss_dice: 0.5138, decode.d1.loss_cls: 0.0605, decode.d1.loss_mask: 0.1826, decode.d1.loss_dice: 0.4988, decode.d2.loss_cls: 0.0557, decode.d2.loss_mask: 0.1815, decode.d2.loss_dice: 0.4920, decode.d3.loss_cls: 0.0539, decode.d3.loss_mask: 0.1811, decode.d3.loss_dice: 0.4923, decode.d4.loss_cls: 0.0581, decode.d4.loss_mask: 0.1808, decode.d4.loss_dice: 0.4928, decode.d5.loss_cls: 0.0542, decode.d5.loss_mask: 0.1808, decode.d5.loss_dice: 0.4948, decode.d6.loss_cls: 0.0495, decode.d6.loss_mask: 0.1807, decode.d6.loss_dice: 0.4903, decode.d7.loss_cls: 0.0543, decode.d7.loss_mask: 0.1806, decode.d7.loss_dice: 0.4934, decode.d8.loss_cls: 0.0533, decode.d8.loss_mask: 0.1806, decode.d8.loss_dice: 0.4900, loss: 7.5491 +2022-05-11 07:44:19,897 - mmseg - INFO - Iter [68950/80000] lr: 1.983e-07, eta: 6:42:44, time: 1.802, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0522, decode.loss_mask: 0.1830, decode.loss_dice: 0.4978, decode.d0.loss_cls: 0.2852, decode.d0.loss_mask: 0.1900, decode.d0.loss_dice: 0.5205, decode.d1.loss_cls: 0.0681, decode.d1.loss_mask: 0.1840, decode.d1.loss_dice: 0.5085, decode.d2.loss_cls: 0.0561, decode.d2.loss_mask: 0.1838, decode.d2.loss_dice: 0.5030, decode.d3.loss_cls: 0.0500, decode.d3.loss_mask: 0.1837, decode.d3.loss_dice: 0.5009, decode.d4.loss_cls: 0.0522, decode.d4.loss_mask: 0.1836, decode.d4.loss_dice: 0.5001, decode.d5.loss_cls: 0.0519, decode.d5.loss_mask: 0.1837, decode.d5.loss_dice: 0.5015, decode.d6.loss_cls: 0.0488, decode.d6.loss_mask: 0.1825, decode.d6.loss_dice: 0.4982, decode.d7.loss_cls: 0.0574, decode.d7.loss_mask: 0.1831, decode.d7.loss_dice: 0.4997, decode.d8.loss_cls: 0.0494, decode.d8.loss_mask: 0.1829, decode.d8.loss_dice: 0.4989, loss: 7.6405 +2022-05-11 07:45:50,151 - mmseg - INFO - Saving checkpoint at 69000 iterations +2022-05-11 07:46:23,932 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 07:46:23,941 - mmseg - INFO - Iter [69000/80000] lr: 1.974e-07, eta: 6:41:02, time: 2.479, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0543, decode.loss_mask: 0.1776, decode.loss_dice: 0.4921, decode.d0.loss_cls: 0.3072, decode.d0.loss_mask: 0.1841, decode.d0.loss_dice: 0.5205, decode.d1.loss_cls: 0.0761, decode.d1.loss_mask: 0.1789, decode.d1.loss_dice: 0.5004, decode.d2.loss_cls: 0.0685, decode.d2.loss_mask: 0.1782, decode.d2.loss_dice: 0.4969, decode.d3.loss_cls: 0.0687, decode.d3.loss_mask: 0.1781, decode.d3.loss_dice: 0.4980, decode.d4.loss_cls: 0.0615, decode.d4.loss_mask: 0.1782, decode.d4.loss_dice: 0.4956, decode.d5.loss_cls: 0.0577, decode.d5.loss_mask: 0.1781, decode.d5.loss_dice: 0.4987, decode.d6.loss_cls: 0.0595, decode.d6.loss_mask: 0.1778, decode.d6.loss_dice: 0.4946, decode.d7.loss_cls: 0.0581, decode.d7.loss_mask: 0.1778, decode.d7.loss_dice: 0.4964, decode.d8.loss_cls: 0.0590, decode.d8.loss_mask: 0.1781, decode.d8.loss_dice: 0.4948, loss: 7.6455 +2022-05-11 07:48:20,100 - mmseg - INFO - per class results: +2022-05-11 07:48:20,105 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.62 | 99.2 | +| sidewalk | 88.59 | 94.12 | +| building | 94.41 | 97.07 | +| wall | 69.69 | 80.0 | +| fence | 73.88 | 80.83 | +| pole | 71.2 | 83.92 | +| traffic light | 77.05 | 86.53 | +| traffic sign | 84.04 | 90.39 | +| vegetation | 93.28 | 96.81 | +| terrain | 67.73 | 77.85 | +| sky | 95.68 | 98.53 | +| person | 86.68 | 93.71 | +| rider | 74.14 | 84.94 | +| car | 96.16 | 98.3 | +| truck | 82.16 | 94.82 | +| bus | 93.55 | 96.68 | +| train | 87.82 | 90.43 | +| motorcycle | 77.69 | 86.58 | +| bicycle | 82.6 | 91.92 | ++---------------+-------+-------+ +2022-05-11 07:48:20,105 - mmseg - INFO - Summary: +2022-05-11 07:48:20,106 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 96.99 | 83.95 | 90.67 | ++-------+-------+-------+ +2022-05-11 07:48:20,109 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 07:48:20,109 - mmseg - INFO - Iter(val) [32] aAcc: 0.9699, mIoU: 0.8395, mAcc: 0.9067, IoU.road: 0.9862, IoU.sidewalk: 0.8859, IoU.building: 0.9441, IoU.wall: 0.6969, IoU.fence: 0.7388, IoU.pole: 0.7120, IoU.traffic light: 0.7705, IoU.traffic sign: 0.8404, IoU.vegetation: 0.9328, IoU.terrain: 0.6773, IoU.sky: 0.9568, IoU.person: 0.8668, IoU.rider: 0.7414, IoU.car: 0.9616, IoU.truck: 0.8216, IoU.bus: 0.9355, IoU.train: 0.8782, IoU.motorcycle: 0.7769, IoU.bicycle: 0.8260, Acc.road: 0.9920, Acc.sidewalk: 0.9412, Acc.building: 0.9707, Acc.wall: 0.8000, Acc.fence: 0.8083, Acc.pole: 0.8392, Acc.traffic light: 0.8653, Acc.traffic sign: 0.9039, Acc.vegetation: 0.9681, Acc.terrain: 0.7785, Acc.sky: 0.9853, Acc.person: 0.9371, Acc.rider: 0.8494, Acc.car: 0.9830, Acc.truck: 0.9482, Acc.bus: 0.9668, Acc.train: 0.9043, Acc.motorcycle: 0.8658, Acc.bicycle: 0.9192 +2022-05-11 07:49:52,415 - mmseg - INFO - Iter [69050/80000] lr: 1.965e-07, eta: 6:40:04, time: 4.172, data_time: 2.389, memory: 69063, decode.loss_cls: 0.0527, decode.loss_mask: 0.1771, decode.loss_dice: 0.4865, decode.d0.loss_cls: 0.2918, decode.d0.loss_mask: 0.1838, decode.d0.loss_dice: 0.5115, decode.d1.loss_cls: 0.0703, decode.d1.loss_mask: 0.1780, decode.d1.loss_dice: 0.4938, decode.d2.loss_cls: 0.0602, decode.d2.loss_mask: 0.1774, decode.d2.loss_dice: 0.4938, decode.d3.loss_cls: 0.0539, decode.d3.loss_mask: 0.1775, decode.d3.loss_dice: 0.4876, decode.d4.loss_cls: 0.0516, decode.d4.loss_mask: 0.1777, decode.d4.loss_dice: 0.4897, decode.d5.loss_cls: 0.0562, decode.d5.loss_mask: 0.1774, decode.d5.loss_dice: 0.4932, decode.d6.loss_cls: 0.0536, decode.d6.loss_mask: 0.1773, decode.d6.loss_dice: 0.4862, decode.d7.loss_cls: 0.0570, decode.d7.loss_mask: 0.1773, decode.d7.loss_dice: 0.4906, decode.d8.loss_cls: 0.0535, decode.d8.loss_mask: 0.1773, decode.d8.loss_dice: 0.4950, loss: 7.5096 +2022-05-11 07:51:23,910 - mmseg - INFO - Iter [69100/80000] lr: 1.956e-07, eta: 6:38:05, time: 1.829, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0383, decode.loss_mask: 0.1824, decode.loss_dice: 0.4967, decode.d0.loss_cls: 0.2929, decode.d0.loss_mask: 0.1893, decode.d0.loss_dice: 0.5141, decode.d1.loss_cls: 0.0560, decode.d1.loss_mask: 0.1842, decode.d1.loss_dice: 0.5034, decode.d2.loss_cls: 0.0513, decode.d2.loss_mask: 0.1831, decode.d2.loss_dice: 0.4990, decode.d3.loss_cls: 0.0402, decode.d3.loss_mask: 0.1830, decode.d3.loss_dice: 0.4992, decode.d4.loss_cls: 0.0442, decode.d4.loss_mask: 0.1830, decode.d4.loss_dice: 0.4972, decode.d5.loss_cls: 0.0436, decode.d5.loss_mask: 0.1827, decode.d5.loss_dice: 0.4975, decode.d6.loss_cls: 0.0407, decode.d6.loss_mask: 0.1827, decode.d6.loss_dice: 0.4973, decode.d7.loss_cls: 0.0428, decode.d7.loss_mask: 0.1822, decode.d7.loss_dice: 0.4956, decode.d8.loss_cls: 0.0442, decode.d8.loss_mask: 0.1823, decode.d8.loss_dice: 0.4959, loss: 7.5250 +2022-05-11 07:52:54,916 - mmseg - INFO - Iter [69150/80000] lr: 1.947e-07, eta: 6:36:06, time: 1.821, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0532, decode.loss_mask: 0.1731, decode.loss_dice: 0.4898, decode.d0.loss_cls: 0.2855, decode.d0.loss_mask: 0.1791, decode.d0.loss_dice: 0.5111, decode.d1.loss_cls: 0.0629, decode.d1.loss_mask: 0.1747, decode.d1.loss_dice: 0.4963, decode.d2.loss_cls: 0.0606, decode.d2.loss_mask: 0.1741, decode.d2.loss_dice: 0.4950, decode.d3.loss_cls: 0.0591, decode.d3.loss_mask: 0.1738, decode.d3.loss_dice: 0.4887, decode.d4.loss_cls: 0.0493, decode.d4.loss_mask: 0.1738, decode.d4.loss_dice: 0.4895, decode.d5.loss_cls: 0.0496, decode.d5.loss_mask: 0.1736, decode.d5.loss_dice: 0.4913, decode.d6.loss_cls: 0.0510, decode.d6.loss_mask: 0.1738, decode.d6.loss_dice: 0.4887, decode.d7.loss_cls: 0.0571, decode.d7.loss_mask: 0.1738, decode.d7.loss_dice: 0.4905, decode.d8.loss_cls: 0.0477, decode.d8.loss_mask: 0.1734, decode.d8.loss_dice: 0.4900, loss: 7.4502 +2022-05-11 07:54:25,166 - mmseg - INFO - Iter [69200/80000] lr: 1.939e-07, eta: 6:34:07, time: 1.805, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0546, decode.loss_mask: 0.1820, decode.loss_dice: 0.4961, decode.d0.loss_cls: 0.2968, decode.d0.loss_mask: 0.1895, decode.d0.loss_dice: 0.5162, decode.d1.loss_cls: 0.0657, decode.d1.loss_mask: 0.1833, decode.d1.loss_dice: 0.5014, decode.d2.loss_cls: 0.0628, decode.d2.loss_mask: 0.1827, decode.d2.loss_dice: 0.4976, decode.d3.loss_cls: 0.0607, decode.d3.loss_mask: 0.1824, decode.d3.loss_dice: 0.4962, decode.d4.loss_cls: 0.0557, decode.d4.loss_mask: 0.1820, decode.d4.loss_dice: 0.5005, decode.d5.loss_cls: 0.0586, decode.d5.loss_mask: 0.1819, decode.d5.loss_dice: 0.4979, decode.d6.loss_cls: 0.0587, decode.d6.loss_mask: 0.1821, decode.d6.loss_dice: 0.4988, decode.d7.loss_cls: 0.0599, decode.d7.loss_mask: 0.1817, decode.d7.loss_dice: 0.4974, decode.d8.loss_cls: 0.0531, decode.d8.loss_mask: 0.1817, decode.d8.loss_dice: 0.4953, loss: 7.6530 +2022-05-11 07:55:57,932 - mmseg - INFO - Iter [69250/80000] lr: 1.930e-07, eta: 6:32:09, time: 1.855, data_time: 0.064, memory: 69063, decode.loss_cls: 0.0451, decode.loss_mask: 0.1821, decode.loss_dice: 0.4856, decode.d0.loss_cls: 0.3014, decode.d0.loss_mask: 0.1904, decode.d0.loss_dice: 0.5104, decode.d1.loss_cls: 0.0617, decode.d1.loss_mask: 0.1841, decode.d1.loss_dice: 0.4950, decode.d2.loss_cls: 0.0586, decode.d2.loss_mask: 0.1834, decode.d2.loss_dice: 0.4903, decode.d3.loss_cls: 0.0510, decode.d3.loss_mask: 0.1830, decode.d3.loss_dice: 0.4880, decode.d4.loss_cls: 0.0454, decode.d4.loss_mask: 0.1831, decode.d4.loss_dice: 0.4863, decode.d5.loss_cls: 0.0471, decode.d5.loss_mask: 0.1825, decode.d5.loss_dice: 0.4854, decode.d6.loss_cls: 0.0421, decode.d6.loss_mask: 0.1825, decode.d6.loss_dice: 0.4831, decode.d7.loss_cls: 0.0508, decode.d7.loss_mask: 0.1826, decode.d7.loss_dice: 0.4889, decode.d8.loss_cls: 0.0445, decode.d8.loss_mask: 0.1823, decode.d8.loss_dice: 0.4878, loss: 7.4845 +2022-05-11 07:57:28,928 - mmseg - INFO - Iter [69300/80000] lr: 1.921e-07, eta: 6:30:10, time: 1.819, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0520, decode.loss_mask: 0.1808, decode.loss_dice: 0.4978, decode.d0.loss_cls: 0.2983, decode.d0.loss_mask: 0.1885, decode.d0.loss_dice: 0.5175, decode.d1.loss_cls: 0.0724, decode.d1.loss_mask: 0.1823, decode.d1.loss_dice: 0.4951, decode.d2.loss_cls: 0.0716, decode.d2.loss_mask: 0.1822, decode.d2.loss_dice: 0.5005, decode.d3.loss_cls: 0.0629, decode.d3.loss_mask: 0.1815, decode.d3.loss_dice: 0.4948, decode.d4.loss_cls: 0.0585, decode.d4.loss_mask: 0.1814, decode.d4.loss_dice: 0.4966, decode.d5.loss_cls: 0.0583, decode.d5.loss_mask: 0.1812, decode.d5.loss_dice: 0.4923, decode.d6.loss_cls: 0.0587, decode.d6.loss_mask: 0.1816, decode.d6.loss_dice: 0.4968, decode.d7.loss_cls: 0.0581, decode.d7.loss_mask: 0.1817, decode.d7.loss_dice: 0.4948, decode.d8.loss_cls: 0.0578, decode.d8.loss_mask: 0.1812, decode.d8.loss_dice: 0.4956, loss: 7.6525 +2022-05-11 07:58:58,940 - mmseg - INFO - Iter [69350/80000] lr: 1.912e-07, eta: 6:28:11, time: 1.800, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0583, decode.loss_mask: 0.1769, decode.loss_dice: 0.4910, decode.d0.loss_cls: 0.2842, decode.d0.loss_mask: 0.1824, decode.d0.loss_dice: 0.5145, decode.d1.loss_cls: 0.0700, decode.d1.loss_mask: 0.1782, decode.d1.loss_dice: 0.5014, decode.d2.loss_cls: 0.0590, decode.d2.loss_mask: 0.1776, decode.d2.loss_dice: 0.4938, decode.d3.loss_cls: 0.0544, decode.d3.loss_mask: 0.1773, decode.d3.loss_dice: 0.4940, decode.d4.loss_cls: 0.0572, decode.d4.loss_mask: 0.1769, decode.d4.loss_dice: 0.4935, decode.d5.loss_cls: 0.0533, decode.d5.loss_mask: 0.1772, decode.d5.loss_dice: 0.4958, decode.d6.loss_cls: 0.0544, decode.d6.loss_mask: 0.1767, decode.d6.loss_dice: 0.4937, decode.d7.loss_cls: 0.0509, decode.d7.loss_mask: 0.1771, decode.d7.loss_dice: 0.4925, decode.d8.loss_cls: 0.0553, decode.d8.loss_mask: 0.1767, decode.d8.loss_dice: 0.4911, loss: 7.5350 +2022-05-11 08:00:32,707 - mmseg - INFO - Iter [69400/80000] lr: 1.903e-07, eta: 6:26:14, time: 1.876, data_time: 0.067, memory: 69063, decode.loss_cls: 0.0556, decode.loss_mask: 0.1816, decode.loss_dice: 0.4926, decode.d0.loss_cls: 0.3050, decode.d0.loss_mask: 0.1886, decode.d0.loss_dice: 0.5159, decode.d1.loss_cls: 0.0650, decode.d1.loss_mask: 0.1829, decode.d1.loss_dice: 0.5003, decode.d2.loss_cls: 0.0590, decode.d2.loss_mask: 0.1823, decode.d2.loss_dice: 0.4982, decode.d3.loss_cls: 0.0565, decode.d3.loss_mask: 0.1822, decode.d3.loss_dice: 0.4919, decode.d4.loss_cls: 0.0577, decode.d4.loss_mask: 0.1816, decode.d4.loss_dice: 0.4969, decode.d5.loss_cls: 0.0574, decode.d5.loss_mask: 0.1821, decode.d5.loss_dice: 0.4977, decode.d6.loss_cls: 0.0565, decode.d6.loss_mask: 0.1821, decode.d6.loss_dice: 0.4919, decode.d7.loss_cls: 0.0544, decode.d7.loss_mask: 0.1820, decode.d7.loss_dice: 0.4956, decode.d8.loss_cls: 0.0520, decode.d8.loss_mask: 0.1816, decode.d8.loss_dice: 0.4943, loss: 7.6214 +2022-05-11 08:02:04,101 - mmseg - INFO - Iter [69450/80000] lr: 1.894e-07, eta: 6:24:16, time: 1.828, data_time: 0.020, memory: 69063, decode.loss_cls: 0.0482, decode.loss_mask: 0.1787, decode.loss_dice: 0.4903, decode.d0.loss_cls: 0.2919, decode.d0.loss_mask: 0.1854, decode.d0.loss_dice: 0.5114, decode.d1.loss_cls: 0.0564, decode.d1.loss_mask: 0.1801, decode.d1.loss_dice: 0.4978, decode.d2.loss_cls: 0.0526, decode.d2.loss_mask: 0.1796, decode.d2.loss_dice: 0.4962, decode.d3.loss_cls: 0.0495, decode.d3.loss_mask: 0.1795, decode.d3.loss_dice: 0.4966, decode.d4.loss_cls: 0.0531, decode.d4.loss_mask: 0.1796, decode.d4.loss_dice: 0.4931, decode.d5.loss_cls: 0.0532, decode.d5.loss_mask: 0.1792, decode.d5.loss_dice: 0.4926, decode.d6.loss_cls: 0.0532, decode.d6.loss_mask: 0.1790, decode.d6.loss_dice: 0.4929, decode.d7.loss_cls: 0.0462, decode.d7.loss_mask: 0.1791, decode.d7.loss_dice: 0.4916, decode.d8.loss_cls: 0.0489, decode.d8.loss_mask: 0.1787, decode.d8.loss_dice: 0.4912, loss: 7.5061 +2022-05-11 08:03:33,034 - mmseg - INFO - Iter [69500/80000] lr: 1.885e-07, eta: 6:22:17, time: 1.779, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0531, decode.loss_mask: 0.1848, decode.loss_dice: 0.5057, decode.d0.loss_cls: 0.2940, decode.d0.loss_mask: 0.1911, decode.d0.loss_dice: 0.5213, decode.d1.loss_cls: 0.0628, decode.d1.loss_mask: 0.1866, decode.d1.loss_dice: 0.5120, decode.d2.loss_cls: 0.0625, decode.d2.loss_mask: 0.1859, decode.d2.loss_dice: 0.5054, decode.d3.loss_cls: 0.0557, decode.d3.loss_mask: 0.1852, decode.d3.loss_dice: 0.5029, decode.d4.loss_cls: 0.0572, decode.d4.loss_mask: 0.1856, decode.d4.loss_dice: 0.5071, decode.d5.loss_cls: 0.0493, decode.d5.loss_mask: 0.1853, decode.d5.loss_dice: 0.5051, decode.d6.loss_cls: 0.0527, decode.d6.loss_mask: 0.1848, decode.d6.loss_dice: 0.5028, decode.d7.loss_cls: 0.0543, decode.d7.loss_mask: 0.1851, decode.d7.loss_dice: 0.4999, decode.d8.loss_cls: 0.0533, decode.d8.loss_mask: 0.1846, decode.d8.loss_dice: 0.5009, loss: 7.7169 +2022-05-11 08:05:04,161 - mmseg - INFO - Iter [69550/80000] lr: 1.876e-07, eta: 6:20:19, time: 1.822, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0504, decode.loss_mask: 0.1820, decode.loss_dice: 0.4924, decode.d0.loss_cls: 0.2975, decode.d0.loss_mask: 0.1884, decode.d0.loss_dice: 0.5048, decode.d1.loss_cls: 0.0664, decode.d1.loss_mask: 0.1837, decode.d1.loss_dice: 0.4945, decode.d2.loss_cls: 0.0573, decode.d2.loss_mask: 0.1825, decode.d2.loss_dice: 0.4913, decode.d3.loss_cls: 0.0543, decode.d3.loss_mask: 0.1823, decode.d3.loss_dice: 0.4898, decode.d4.loss_cls: 0.0546, decode.d4.loss_mask: 0.1822, decode.d4.loss_dice: 0.4937, decode.d5.loss_cls: 0.0560, decode.d5.loss_mask: 0.1821, decode.d5.loss_dice: 0.4900, decode.d6.loss_cls: 0.0536, decode.d6.loss_mask: 0.1821, decode.d6.loss_dice: 0.4917, decode.d7.loss_cls: 0.0495, decode.d7.loss_mask: 0.1820, decode.d7.loss_dice: 0.4890, decode.d8.loss_cls: 0.0481, decode.d8.loss_mask: 0.1819, decode.d8.loss_dice: 0.4889, loss: 7.5431 +2022-05-11 08:06:37,392 - mmseg - INFO - Iter [69600/80000] lr: 1.867e-07, eta: 6:18:22, time: 1.864, data_time: 0.066, memory: 69063, decode.loss_cls: 0.0417, decode.loss_mask: 0.1781, decode.loss_dice: 0.4952, decode.d0.loss_cls: 0.2866, decode.d0.loss_mask: 0.1847, decode.d0.loss_dice: 0.5116, decode.d1.loss_cls: 0.0562, decode.d1.loss_mask: 0.1789, decode.d1.loss_dice: 0.5008, decode.d2.loss_cls: 0.0526, decode.d2.loss_mask: 0.1780, decode.d2.loss_dice: 0.4974, decode.d3.loss_cls: 0.0457, decode.d3.loss_mask: 0.1784, decode.d3.loss_dice: 0.4931, decode.d4.loss_cls: 0.0446, decode.d4.loss_mask: 0.1782, decode.d4.loss_dice: 0.4943, decode.d5.loss_cls: 0.0465, decode.d5.loss_mask: 0.1780, decode.d5.loss_dice: 0.4941, decode.d6.loss_cls: 0.0396, decode.d6.loss_mask: 0.1780, decode.d6.loss_dice: 0.4949, decode.d7.loss_cls: 0.0454, decode.d7.loss_mask: 0.1780, decode.d7.loss_dice: 0.4939, decode.d8.loss_cls: 0.0405, decode.d8.loss_mask: 0.1780, decode.d8.loss_dice: 0.4941, loss: 7.4571 +2022-05-11 08:08:08,060 - mmseg - INFO - Iter [69650/80000] lr: 1.858e-07, eta: 6:16:24, time: 1.814, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0457, decode.loss_mask: 0.1787, decode.loss_dice: 0.4958, decode.d0.loss_cls: 0.2974, decode.d0.loss_mask: 0.1857, decode.d0.loss_dice: 0.5169, decode.d1.loss_cls: 0.0673, decode.d1.loss_mask: 0.1800, decode.d1.loss_dice: 0.5076, decode.d2.loss_cls: 0.0573, decode.d2.loss_mask: 0.1791, decode.d2.loss_dice: 0.4969, decode.d3.loss_cls: 0.0485, decode.d3.loss_mask: 0.1789, decode.d3.loss_dice: 0.4958, decode.d4.loss_cls: 0.0559, decode.d4.loss_mask: 0.1792, decode.d4.loss_dice: 0.4964, decode.d5.loss_cls: 0.0532, decode.d5.loss_mask: 0.1790, decode.d5.loss_dice: 0.4958, decode.d6.loss_cls: 0.0476, decode.d6.loss_mask: 0.1785, decode.d6.loss_dice: 0.4966, decode.d7.loss_cls: 0.0565, decode.d7.loss_mask: 0.1788, decode.d7.loss_dice: 0.4938, decode.d8.loss_cls: 0.0468, decode.d8.loss_mask: 0.1787, decode.d8.loss_dice: 0.4965, loss: 7.5652 +2022-05-11 08:09:37,147 - mmseg - INFO - Iter [69700/80000] lr: 1.849e-07, eta: 6:14:25, time: 1.782, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0454, decode.loss_mask: 0.1815, decode.loss_dice: 0.4967, decode.d0.loss_cls: 0.2958, decode.d0.loss_mask: 0.1878, decode.d0.loss_dice: 0.5171, decode.d1.loss_cls: 0.0639, decode.d1.loss_mask: 0.1828, decode.d1.loss_dice: 0.5029, decode.d2.loss_cls: 0.0526, decode.d2.loss_mask: 0.1817, decode.d2.loss_dice: 0.5019, decode.d3.loss_cls: 0.0532, decode.d3.loss_mask: 0.1816, decode.d3.loss_dice: 0.4988, decode.d4.loss_cls: 0.0526, decode.d4.loss_mask: 0.1817, decode.d4.loss_dice: 0.4984, decode.d5.loss_cls: 0.0493, decode.d5.loss_mask: 0.1817, decode.d5.loss_dice: 0.4987, decode.d6.loss_cls: 0.0474, decode.d6.loss_mask: 0.1816, decode.d6.loss_dice: 0.4955, decode.d7.loss_cls: 0.0524, decode.d7.loss_mask: 0.1819, decode.d7.loss_dice: 0.5000, decode.d8.loss_cls: 0.0520, decode.d8.loss_mask: 0.1818, decode.d8.loss_dice: 0.4938, loss: 7.5923 +2022-05-11 08:11:07,690 - mmseg - INFO - Iter [69750/80000] lr: 1.840e-07, eta: 6:12:28, time: 1.811, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0421, decode.loss_mask: 0.1799, decode.loss_dice: 0.4825, decode.d0.loss_cls: 0.2843, decode.d0.loss_mask: 0.1866, decode.d0.loss_dice: 0.5064, decode.d1.loss_cls: 0.0616, decode.d1.loss_mask: 0.1813, decode.d1.loss_dice: 0.4917, decode.d2.loss_cls: 0.0515, decode.d2.loss_mask: 0.1805, decode.d2.loss_dice: 0.4894, decode.d3.loss_cls: 0.0487, decode.d3.loss_mask: 0.1803, decode.d3.loss_dice: 0.4850, decode.d4.loss_cls: 0.0525, decode.d4.loss_mask: 0.1801, decode.d4.loss_dice: 0.4867, decode.d5.loss_cls: 0.0468, decode.d5.loss_mask: 0.1805, decode.d5.loss_dice: 0.4854, decode.d6.loss_cls: 0.0453, decode.d6.loss_mask: 0.1805, decode.d6.loss_dice: 0.4863, decode.d7.loss_cls: 0.0506, decode.d7.loss_mask: 0.1802, decode.d7.loss_dice: 0.4811, decode.d8.loss_cls: 0.0476, decode.d8.loss_mask: 0.1804, decode.d8.loss_dice: 0.4858, loss: 7.4214 +2022-05-11 08:12:39,732 - mmseg - INFO - Iter [69800/80000] lr: 1.831e-07, eta: 6:10:31, time: 1.841, data_time: 0.067, memory: 69063, decode.loss_cls: 0.0511, decode.loss_mask: 0.1757, decode.loss_dice: 0.4884, decode.d0.loss_cls: 0.2887, decode.d0.loss_mask: 0.1815, decode.d0.loss_dice: 0.5057, decode.d1.loss_cls: 0.0525, decode.d1.loss_mask: 0.1773, decode.d1.loss_dice: 0.4973, decode.d2.loss_cls: 0.0480, decode.d2.loss_mask: 0.1765, decode.d2.loss_dice: 0.4907, decode.d3.loss_cls: 0.0474, decode.d3.loss_mask: 0.1762, decode.d3.loss_dice: 0.4824, decode.d4.loss_cls: 0.0505, decode.d4.loss_mask: 0.1760, decode.d4.loss_dice: 0.4874, decode.d5.loss_cls: 0.0517, decode.d5.loss_mask: 0.1762, decode.d5.loss_dice: 0.4851, decode.d6.loss_cls: 0.0451, decode.d6.loss_mask: 0.1761, decode.d6.loss_dice: 0.4851, decode.d7.loss_cls: 0.0487, decode.d7.loss_mask: 0.1757, decode.d7.loss_dice: 0.4865, decode.d8.loss_cls: 0.0453, decode.d8.loss_mask: 0.1754, decode.d8.loss_dice: 0.4874, loss: 7.3915 +2022-05-11 08:14:09,271 - mmseg - INFO - Iter [69850/80000] lr: 1.822e-07, eta: 6:08:33, time: 1.791, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0456, decode.loss_mask: 0.1770, decode.loss_dice: 0.4727, decode.d0.loss_cls: 0.2939, decode.d0.loss_mask: 0.1834, decode.d0.loss_dice: 0.4920, decode.d1.loss_cls: 0.0535, decode.d1.loss_mask: 0.1774, decode.d1.loss_dice: 0.4783, decode.d2.loss_cls: 0.0498, decode.d2.loss_mask: 0.1775, decode.d2.loss_dice: 0.4715, decode.d3.loss_cls: 0.0479, decode.d3.loss_mask: 0.1767, decode.d3.loss_dice: 0.4705, decode.d4.loss_cls: 0.0460, decode.d4.loss_mask: 0.1772, decode.d4.loss_dice: 0.4731, decode.d5.loss_cls: 0.0445, decode.d5.loss_mask: 0.1773, decode.d5.loss_dice: 0.4729, decode.d6.loss_cls: 0.0443, decode.d6.loss_mask: 0.1770, decode.d6.loss_dice: 0.4682, decode.d7.loss_cls: 0.0458, decode.d7.loss_mask: 0.1770, decode.d7.loss_dice: 0.4742, decode.d8.loss_cls: 0.0478, decode.d8.loss_mask: 0.1770, decode.d8.loss_dice: 0.4711, loss: 7.2410 +2022-05-11 08:15:39,671 - mmseg - INFO - Iter [69900/80000] lr: 1.813e-07, eta: 6:06:35, time: 1.808, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0507, decode.loss_mask: 0.1820, decode.loss_dice: 0.4984, decode.d0.loss_cls: 0.3028, decode.d0.loss_mask: 0.1887, decode.d0.loss_dice: 0.5189, decode.d1.loss_cls: 0.0685, decode.d1.loss_mask: 0.1836, decode.d1.loss_dice: 0.5075, decode.d2.loss_cls: 0.0550, decode.d2.loss_mask: 0.1831, decode.d2.loss_dice: 0.5015, decode.d3.loss_cls: 0.0544, decode.d3.loss_mask: 0.1828, decode.d3.loss_dice: 0.5014, decode.d4.loss_cls: 0.0564, decode.d4.loss_mask: 0.1829, decode.d4.loss_dice: 0.4997, decode.d5.loss_cls: 0.0517, decode.d5.loss_mask: 0.1828, decode.d5.loss_dice: 0.5001, decode.d6.loss_cls: 0.0499, decode.d6.loss_mask: 0.1824, decode.d6.loss_dice: 0.4974, decode.d7.loss_cls: 0.0468, decode.d7.loss_mask: 0.1822, decode.d7.loss_dice: 0.4953, decode.d8.loss_cls: 0.0522, decode.d8.loss_mask: 0.1820, decode.d8.loss_dice: 0.4967, loss: 7.6381 +2022-05-11 08:17:13,039 - mmseg - INFO - Iter [69950/80000] lr: 1.804e-07, eta: 6:04:39, time: 1.867, data_time: 0.065, memory: 69063, decode.loss_cls: 0.0447, decode.loss_mask: 0.1858, decode.loss_dice: 0.4925, decode.d0.loss_cls: 0.2859, decode.d0.loss_mask: 0.1928, decode.d0.loss_dice: 0.5182, decode.d1.loss_cls: 0.0627, decode.d1.loss_mask: 0.1868, decode.d1.loss_dice: 0.5020, decode.d2.loss_cls: 0.0541, decode.d2.loss_mask: 0.1861, decode.d2.loss_dice: 0.4971, decode.d3.loss_cls: 0.0458, decode.d3.loss_mask: 0.1862, decode.d3.loss_dice: 0.4943, decode.d4.loss_cls: 0.0465, decode.d4.loss_mask: 0.1861, decode.d4.loss_dice: 0.4946, decode.d5.loss_cls: 0.0445, decode.d5.loss_mask: 0.1860, decode.d5.loss_dice: 0.4936, decode.d6.loss_cls: 0.0436, decode.d6.loss_mask: 0.1859, decode.d6.loss_dice: 0.4954, decode.d7.loss_cls: 0.0421, decode.d7.loss_mask: 0.1860, decode.d7.loss_dice: 0.4943, decode.d8.loss_cls: 0.0410, decode.d8.loss_mask: 0.1857, decode.d8.loss_dice: 0.4893, loss: 7.5492 +2022-05-11 08:18:43,622 - mmseg - INFO - Saving checkpoint at 70000 iterations +2022-05-11 08:19:13,261 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 08:19:13,270 - mmseg - INFO - Iter [70000/80000] lr: 1.795e-07, eta: 6:02:56, time: 2.402, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0488, decode.loss_mask: 0.1775, decode.loss_dice: 0.4862, decode.d0.loss_cls: 0.2827, decode.d0.loss_mask: 0.1827, decode.d0.loss_dice: 0.5081, decode.d1.loss_cls: 0.0652, decode.d1.loss_mask: 0.1783, decode.d1.loss_dice: 0.4957, decode.d2.loss_cls: 0.0609, decode.d2.loss_mask: 0.1779, decode.d2.loss_dice: 0.4918, decode.d3.loss_cls: 0.0483, decode.d3.loss_mask: 0.1776, decode.d3.loss_dice: 0.4877, decode.d4.loss_cls: 0.0495, decode.d4.loss_mask: 0.1783, decode.d4.loss_dice: 0.4864, decode.d5.loss_cls: 0.0497, decode.d5.loss_mask: 0.1778, decode.d5.loss_dice: 0.4886, decode.d6.loss_cls: 0.0492, decode.d6.loss_mask: 0.1778, decode.d6.loss_dice: 0.4870, decode.d7.loss_cls: 0.0539, decode.d7.loss_mask: 0.1778, decode.d7.loss_dice: 0.4892, decode.d8.loss_cls: 0.0505, decode.d8.loss_mask: 0.1778, decode.d8.loss_dice: 0.4882, loss: 7.4511 +2022-05-11 08:21:08,723 - mmseg - INFO - per class results: +2022-05-11 08:21:08,727 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.62 | 99.21 | +| sidewalk | 88.72 | 94.2 | +| building | 94.37 | 97.01 | +| wall | 69.86 | 80.11 | +| fence | 74.62 | 81.63 | +| pole | 71.16 | 83.81 | +| traffic light | 77.14 | 87.03 | +| traffic sign | 84.1 | 90.2 | +| vegetation | 93.35 | 96.96 | +| terrain | 69.34 | 77.99 | +| sky | 95.73 | 98.48 | +| person | 86.78 | 93.71 | +| rider | 74.3 | 85.39 | +| car | 96.16 | 98.3 | +| truck | 82.21 | 94.93 | +| bus | 93.49 | 96.35 | +| train | 87.94 | 90.64 | +| motorcycle | 77.44 | 87.49 | +| bicycle | 82.7 | 91.24 | ++---------------+-------+-------+ +2022-05-11 08:21:08,728 - mmseg - INFO - Summary: +2022-05-11 08:21:08,728 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 97.01 | 84.11 | 90.77 | ++-------+-------+-------+ +2022-05-11 08:21:08,732 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 08:21:08,732 - mmseg - INFO - Iter(val) [32] aAcc: 0.9701, mIoU: 0.8411, mAcc: 0.9077, IoU.road: 0.9862, IoU.sidewalk: 0.8872, IoU.building: 0.9437, IoU.wall: 0.6986, IoU.fence: 0.7462, IoU.pole: 0.7116, IoU.traffic light: 0.7714, IoU.traffic sign: 0.8410, IoU.vegetation: 0.9335, IoU.terrain: 0.6934, IoU.sky: 0.9573, IoU.person: 0.8678, IoU.rider: 0.7430, IoU.car: 0.9616, IoU.truck: 0.8221, IoU.bus: 0.9349, IoU.train: 0.8794, IoU.motorcycle: 0.7744, IoU.bicycle: 0.8270, Acc.road: 0.9921, Acc.sidewalk: 0.9420, Acc.building: 0.9701, Acc.wall: 0.8011, Acc.fence: 0.8163, Acc.pole: 0.8381, Acc.traffic light: 0.8703, Acc.traffic sign: 0.9020, Acc.vegetation: 0.9696, Acc.terrain: 0.7799, Acc.sky: 0.9848, Acc.person: 0.9371, Acc.rider: 0.8539, Acc.car: 0.9830, Acc.truck: 0.9493, Acc.bus: 0.9635, Acc.train: 0.9064, Acc.motorcycle: 0.8749, Acc.bicycle: 0.9124 +2022-05-11 08:22:38,968 - mmseg - INFO - Iter [70050/80000] lr: 1.786e-07, eta: 6:01:50, time: 4.116, data_time: 2.328, memory: 69063, decode.loss_cls: 0.0417, decode.loss_mask: 0.1803, decode.loss_dice: 0.4844, decode.d0.loss_cls: 0.2846, decode.d0.loss_mask: 0.1862, decode.d0.loss_dice: 0.5051, decode.d1.loss_cls: 0.0564, decode.d1.loss_mask: 0.1816, decode.d1.loss_dice: 0.4935, decode.d2.loss_cls: 0.0538, decode.d2.loss_mask: 0.1812, decode.d2.loss_dice: 0.4902, decode.d3.loss_cls: 0.0456, decode.d3.loss_mask: 0.1811, decode.d3.loss_dice: 0.4871, decode.d4.loss_cls: 0.0487, decode.d4.loss_mask: 0.1809, decode.d4.loss_dice: 0.4877, decode.d5.loss_cls: 0.0486, decode.d5.loss_mask: 0.1811, decode.d5.loss_dice: 0.4861, decode.d6.loss_cls: 0.0455, decode.d6.loss_mask: 0.1810, decode.d6.loss_dice: 0.4859, decode.d7.loss_cls: 0.0458, decode.d7.loss_mask: 0.1808, decode.d7.loss_dice: 0.4866, decode.d8.loss_cls: 0.0469, decode.d8.loss_mask: 0.1807, decode.d8.loss_dice: 0.4853, loss: 7.4243 +2022-05-11 08:24:10,579 - mmseg - INFO - Iter [70100/80000] lr: 1.777e-07, eta: 5:59:53, time: 1.832, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0540, decode.loss_mask: 0.1846, decode.loss_dice: 0.5034, decode.d0.loss_cls: 0.2921, decode.d0.loss_mask: 0.1911, decode.d0.loss_dice: 0.5280, decode.d1.loss_cls: 0.0738, decode.d1.loss_mask: 0.1855, decode.d1.loss_dice: 0.5108, decode.d2.loss_cls: 0.0639, decode.d2.loss_mask: 0.1852, decode.d2.loss_dice: 0.5087, decode.d3.loss_cls: 0.0605, decode.d3.loss_mask: 0.1851, decode.d3.loss_dice: 0.5044, decode.d4.loss_cls: 0.0571, decode.d4.loss_mask: 0.1853, decode.d4.loss_dice: 0.5060, decode.d5.loss_cls: 0.0494, decode.d5.loss_mask: 0.1849, decode.d5.loss_dice: 0.5041, decode.d6.loss_cls: 0.0571, decode.d6.loss_mask: 0.1846, decode.d6.loss_dice: 0.5024, decode.d7.loss_cls: 0.0502, decode.d7.loss_mask: 0.1846, decode.d7.loss_dice: 0.5022, decode.d8.loss_cls: 0.0535, decode.d8.loss_mask: 0.1847, decode.d8.loss_dice: 0.5034, loss: 7.7404 +2022-05-11 08:25:43,387 - mmseg - INFO - Iter [70150/80000] lr: 1.768e-07, eta: 5:57:57, time: 1.856, data_time: 0.063, memory: 69063, decode.loss_cls: 0.0529, decode.loss_mask: 0.1787, decode.loss_dice: 0.4952, decode.d0.loss_cls: 0.3064, decode.d0.loss_mask: 0.1844, decode.d0.loss_dice: 0.5145, decode.d1.loss_cls: 0.0695, decode.d1.loss_mask: 0.1796, decode.d1.loss_dice: 0.4985, decode.d2.loss_cls: 0.0646, decode.d2.loss_mask: 0.1792, decode.d2.loss_dice: 0.4962, decode.d3.loss_cls: 0.0561, decode.d3.loss_mask: 0.1789, decode.d3.loss_dice: 0.4924, decode.d4.loss_cls: 0.0586, decode.d4.loss_mask: 0.1786, decode.d4.loss_dice: 0.4908, decode.d5.loss_cls: 0.0576, decode.d5.loss_mask: 0.1779, decode.d5.loss_dice: 0.4894, decode.d6.loss_cls: 0.0546, decode.d6.loss_mask: 0.1781, decode.d6.loss_dice: 0.4928, decode.d7.loss_cls: 0.0574, decode.d7.loss_mask: 0.1782, decode.d7.loss_dice: 0.4894, decode.d8.loss_cls: 0.0523, decode.d8.loss_mask: 0.1786, decode.d8.loss_dice: 0.4912, loss: 7.5728 +2022-05-11 08:27:14,859 - mmseg - INFO - Iter [70200/80000] lr: 1.759e-07, eta: 5:56:00, time: 1.829, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0550, decode.loss_mask: 0.1768, decode.loss_dice: 0.4978, decode.d0.loss_cls: 0.2889, decode.d0.loss_mask: 0.1830, decode.d0.loss_dice: 0.5188, decode.d1.loss_cls: 0.0719, decode.d1.loss_mask: 0.1787, decode.d1.loss_dice: 0.5033, decode.d2.loss_cls: 0.0635, decode.d2.loss_mask: 0.1781, decode.d2.loss_dice: 0.5008, decode.d3.loss_cls: 0.0553, decode.d3.loss_mask: 0.1777, decode.d3.loss_dice: 0.5021, decode.d4.loss_cls: 0.0595, decode.d4.loss_mask: 0.1777, decode.d4.loss_dice: 0.5006, decode.d5.loss_cls: 0.0550, decode.d5.loss_mask: 0.1778, decode.d5.loss_dice: 0.4982, decode.d6.loss_cls: 0.0573, decode.d6.loss_mask: 0.1776, decode.d6.loss_dice: 0.5004, decode.d7.loss_cls: 0.0546, decode.d7.loss_mask: 0.1772, decode.d7.loss_dice: 0.4998, decode.d8.loss_cls: 0.0495, decode.d8.loss_mask: 0.1771, decode.d8.loss_dice: 0.4973, loss: 7.6111 +2022-05-11 08:28:45,421 - mmseg - INFO - Iter [70250/80000] lr: 1.750e-07, eta: 5:54:03, time: 1.811, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0544, decode.loss_mask: 0.1781, decode.loss_dice: 0.4905, decode.d0.loss_cls: 0.2984, decode.d0.loss_mask: 0.1839, decode.d0.loss_dice: 0.5107, decode.d1.loss_cls: 0.0731, decode.d1.loss_mask: 0.1801, decode.d1.loss_dice: 0.4985, decode.d2.loss_cls: 0.0623, decode.d2.loss_mask: 0.1791, decode.d2.loss_dice: 0.4994, decode.d3.loss_cls: 0.0609, decode.d3.loss_mask: 0.1785, decode.d3.loss_dice: 0.4946, decode.d4.loss_cls: 0.0571, decode.d4.loss_mask: 0.1782, decode.d4.loss_dice: 0.4952, decode.d5.loss_cls: 0.0564, decode.d5.loss_mask: 0.1784, decode.d5.loss_dice: 0.4940, decode.d6.loss_cls: 0.0609, decode.d6.loss_mask: 0.1779, decode.d6.loss_dice: 0.4910, decode.d7.loss_cls: 0.0521, decode.d7.loss_mask: 0.1780, decode.d7.loss_dice: 0.4904, decode.d8.loss_cls: 0.0544, decode.d8.loss_mask: 0.1786, decode.d8.loss_dice: 0.4990, loss: 7.5840 +2022-05-11 08:30:15,495 - mmseg - INFO - Iter [70300/80000] lr: 1.741e-07, eta: 5:52:06, time: 1.801, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0468, decode.loss_mask: 0.1797, decode.loss_dice: 0.5010, decode.d0.loss_cls: 0.2943, decode.d0.loss_mask: 0.1867, decode.d0.loss_dice: 0.5217, decode.d1.loss_cls: 0.0558, decode.d1.loss_mask: 0.1821, decode.d1.loss_dice: 0.5058, decode.d2.loss_cls: 0.0567, decode.d2.loss_mask: 0.1810, decode.d2.loss_dice: 0.5033, decode.d3.loss_cls: 0.0528, decode.d3.loss_mask: 0.1804, decode.d3.loss_dice: 0.5012, decode.d4.loss_cls: 0.0456, decode.d4.loss_mask: 0.1795, decode.d4.loss_dice: 0.5007, decode.d5.loss_cls: 0.0475, decode.d5.loss_mask: 0.1797, decode.d5.loss_dice: 0.4992, decode.d6.loss_cls: 0.0459, decode.d6.loss_mask: 0.1801, decode.d6.loss_dice: 0.5024, decode.d7.loss_cls: 0.0448, decode.d7.loss_mask: 0.1797, decode.d7.loss_dice: 0.4970, decode.d8.loss_cls: 0.0434, decode.d8.loss_mask: 0.1800, decode.d8.loss_dice: 0.4992, loss: 7.5742 +2022-05-11 08:31:48,028 - mmseg - INFO - Iter [70350/80000] lr: 1.732e-07, eta: 5:50:10, time: 1.850, data_time: 0.063, memory: 69063, decode.loss_cls: 0.0545, decode.loss_mask: 0.1828, decode.loss_dice: 0.5107, decode.d0.loss_cls: 0.2984, decode.d0.loss_mask: 0.1901, decode.d0.loss_dice: 0.5355, decode.d1.loss_cls: 0.0731, decode.d1.loss_mask: 0.1846, decode.d1.loss_dice: 0.5186, decode.d2.loss_cls: 0.0682, decode.d2.loss_mask: 0.1833, decode.d2.loss_dice: 0.5149, decode.d3.loss_cls: 0.0605, decode.d3.loss_mask: 0.1830, decode.d3.loss_dice: 0.5103, decode.d4.loss_cls: 0.0627, decode.d4.loss_mask: 0.1826, decode.d4.loss_dice: 0.5145, decode.d5.loss_cls: 0.0617, decode.d5.loss_mask: 0.1828, decode.d5.loss_dice: 0.5138, decode.d6.loss_cls: 0.0529, decode.d6.loss_mask: 0.1826, decode.d6.loss_dice: 0.5107, decode.d7.loss_cls: 0.0623, decode.d7.loss_mask: 0.1825, decode.d7.loss_dice: 0.5082, decode.d8.loss_cls: 0.0570, decode.d8.loss_mask: 0.1824, decode.d8.loss_dice: 0.5111, loss: 7.8365 +2022-05-11 08:33:17,896 - mmseg - INFO - Iter [70400/80000] lr: 1.723e-07, eta: 5:48:13, time: 1.797, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0473, decode.loss_mask: 0.1895, decode.loss_dice: 0.4911, decode.d0.loss_cls: 0.2927, decode.d0.loss_mask: 0.1975, decode.d0.loss_dice: 0.5117, decode.d1.loss_cls: 0.0660, decode.d1.loss_mask: 0.1916, decode.d1.loss_dice: 0.4997, decode.d2.loss_cls: 0.0545, decode.d2.loss_mask: 0.1904, decode.d2.loss_dice: 0.4990, decode.d3.loss_cls: 0.0476, decode.d3.loss_mask: 0.1897, decode.d3.loss_dice: 0.4915, decode.d4.loss_cls: 0.0485, decode.d4.loss_mask: 0.1899, decode.d4.loss_dice: 0.4906, decode.d5.loss_cls: 0.0557, decode.d5.loss_mask: 0.1896, decode.d5.loss_dice: 0.4892, decode.d6.loss_cls: 0.0491, decode.d6.loss_mask: 0.1896, decode.d6.loss_dice: 0.4915, decode.d7.loss_cls: 0.0480, decode.d7.loss_mask: 0.1896, decode.d7.loss_dice: 0.4938, decode.d8.loss_cls: 0.0514, decode.d8.loss_mask: 0.1897, decode.d8.loss_dice: 0.4939, loss: 7.6200 +2022-05-11 08:34:49,090 - mmseg - INFO - Iter [70450/80000] lr: 1.714e-07, eta: 5:46:17, time: 1.824, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0480, decode.loss_mask: 0.1734, decode.loss_dice: 0.4770, decode.d0.loss_cls: 0.2993, decode.d0.loss_mask: 0.1784, decode.d0.loss_dice: 0.5003, decode.d1.loss_cls: 0.0615, decode.d1.loss_mask: 0.1734, decode.d1.loss_dice: 0.4885, decode.d2.loss_cls: 0.0601, decode.d2.loss_mask: 0.1732, decode.d2.loss_dice: 0.4847, decode.d3.loss_cls: 0.0523, decode.d3.loss_mask: 0.1733, decode.d3.loss_dice: 0.4789, decode.d4.loss_cls: 0.0516, decode.d4.loss_mask: 0.1737, decode.d4.loss_dice: 0.4803, decode.d5.loss_cls: 0.0536, decode.d5.loss_mask: 0.1731, decode.d5.loss_dice: 0.4791, decode.d6.loss_cls: 0.0499, decode.d6.loss_mask: 0.1735, decode.d6.loss_dice: 0.4840, decode.d7.loss_cls: 0.0523, decode.d7.loss_mask: 0.1736, decode.d7.loss_dice: 0.4805, decode.d8.loss_cls: 0.0507, decode.d8.loss_mask: 0.1736, decode.d8.loss_dice: 0.4787, loss: 7.3506 +2022-05-11 08:36:18,501 - mmseg - INFO - Iter [70500/80000] lr: 1.705e-07, eta: 5:44:20, time: 1.788, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0489, decode.loss_mask: 0.1775, decode.loss_dice: 0.4887, decode.d0.loss_cls: 0.2966, decode.d0.loss_mask: 0.1847, decode.d0.loss_dice: 0.5111, decode.d1.loss_cls: 0.0744, decode.d1.loss_mask: 0.1791, decode.d1.loss_dice: 0.4928, decode.d2.loss_cls: 0.0597, decode.d2.loss_mask: 0.1785, decode.d2.loss_dice: 0.4884, decode.d3.loss_cls: 0.0551, decode.d3.loss_mask: 0.1786, decode.d3.loss_dice: 0.4861, decode.d4.loss_cls: 0.0516, decode.d4.loss_mask: 0.1774, decode.d4.loss_dice: 0.4894, decode.d5.loss_cls: 0.0506, decode.d5.loss_mask: 0.1778, decode.d5.loss_dice: 0.4867, decode.d6.loss_cls: 0.0504, decode.d6.loss_mask: 0.1777, decode.d6.loss_dice: 0.4883, decode.d7.loss_cls: 0.0471, decode.d7.loss_mask: 0.1777, decode.d7.loss_dice: 0.4870, decode.d8.loss_cls: 0.0490, decode.d8.loss_mask: 0.1778, decode.d8.loss_dice: 0.4846, loss: 7.4735 +2022-05-11 08:37:51,541 - mmseg - INFO - Iter [70550/80000] lr: 1.696e-07, eta: 5:42:25, time: 1.861, data_time: 0.065, memory: 69063, decode.loss_cls: 0.0477, decode.loss_mask: 0.1845, decode.loss_dice: 0.5099, decode.d0.loss_cls: 0.2945, decode.d0.loss_mask: 0.1901, decode.d0.loss_dice: 0.5253, decode.d1.loss_cls: 0.0604, decode.d1.loss_mask: 0.1859, decode.d1.loss_dice: 0.5170, decode.d2.loss_cls: 0.0506, decode.d2.loss_mask: 0.1854, decode.d2.loss_dice: 0.5126, decode.d3.loss_cls: 0.0521, decode.d3.loss_mask: 0.1852, decode.d3.loss_dice: 0.5082, decode.d4.loss_cls: 0.0510, decode.d4.loss_mask: 0.1851, decode.d4.loss_dice: 0.5102, decode.d5.loss_cls: 0.0509, decode.d5.loss_mask: 0.1848, decode.d5.loss_dice: 0.5077, decode.d6.loss_cls: 0.0438, decode.d6.loss_mask: 0.1846, decode.d6.loss_dice: 0.5100, decode.d7.loss_cls: 0.0487, decode.d7.loss_mask: 0.1844, decode.d7.loss_dice: 0.5074, decode.d8.loss_cls: 0.0484, decode.d8.loss_mask: 0.1841, decode.d8.loss_dice: 0.5052, loss: 7.7155 +2022-05-11 08:39:21,325 - mmseg - INFO - Iter [70600/80000] lr: 1.687e-07, eta: 5:40:28, time: 1.795, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0470, decode.loss_mask: 0.1828, decode.loss_dice: 0.4840, decode.d0.loss_cls: 0.2900, decode.d0.loss_mask: 0.1899, decode.d0.loss_dice: 0.5062, decode.d1.loss_cls: 0.0622, decode.d1.loss_mask: 0.1841, decode.d1.loss_dice: 0.4928, decode.d2.loss_cls: 0.0546, decode.d2.loss_mask: 0.1836, decode.d2.loss_dice: 0.4911, decode.d3.loss_cls: 0.0523, decode.d3.loss_mask: 0.1831, decode.d3.loss_dice: 0.4926, decode.d4.loss_cls: 0.0484, decode.d4.loss_mask: 0.1828, decode.d4.loss_dice: 0.4864, decode.d5.loss_cls: 0.0537, decode.d5.loss_mask: 0.1829, decode.d5.loss_dice: 0.4922, decode.d6.loss_cls: 0.0480, decode.d6.loss_mask: 0.1827, decode.d6.loss_dice: 0.4885, decode.d7.loss_cls: 0.0497, decode.d7.loss_mask: 0.1830, decode.d7.loss_dice: 0.4870, decode.d8.loss_cls: 0.0510, decode.d8.loss_mask: 0.1830, decode.d8.loss_dice: 0.4862, loss: 7.5015 +2022-05-11 08:40:50,953 - mmseg - INFO - Iter [70650/80000] lr: 1.678e-07, eta: 5:38:32, time: 1.793, data_time: 0.020, memory: 69063, decode.loss_cls: 0.0471, decode.loss_mask: 0.1809, decode.loss_dice: 0.4772, decode.d0.loss_cls: 0.2867, decode.d0.loss_mask: 0.1881, decode.d0.loss_dice: 0.5013, decode.d1.loss_cls: 0.0662, decode.d1.loss_mask: 0.1815, decode.d1.loss_dice: 0.4855, decode.d2.loss_cls: 0.0599, decode.d2.loss_mask: 0.1816, decode.d2.loss_dice: 0.4874, decode.d3.loss_cls: 0.0525, decode.d3.loss_mask: 0.1807, decode.d3.loss_dice: 0.4788, decode.d4.loss_cls: 0.0541, decode.d4.loss_mask: 0.1808, decode.d4.loss_dice: 0.4785, decode.d5.loss_cls: 0.0601, decode.d5.loss_mask: 0.1814, decode.d5.loss_dice: 0.4794, decode.d6.loss_cls: 0.0516, decode.d6.loss_mask: 0.1811, decode.d6.loss_dice: 0.4813, decode.d7.loss_cls: 0.0534, decode.d7.loss_mask: 0.1815, decode.d7.loss_dice: 0.4787, decode.d8.loss_cls: 0.0536, decode.d8.loss_mask: 0.1811, decode.d8.loss_dice: 0.4787, loss: 7.4309 +2022-05-11 08:42:24,135 - mmseg - INFO - Iter [70700/80000] lr: 1.669e-07, eta: 5:36:37, time: 1.863, data_time: 0.064, memory: 69063, decode.loss_cls: 0.0554, decode.loss_mask: 0.1788, decode.loss_dice: 0.4933, decode.d0.loss_cls: 0.3023, decode.d0.loss_mask: 0.1859, decode.d0.loss_dice: 0.5132, decode.d1.loss_cls: 0.0746, decode.d1.loss_mask: 0.1800, decode.d1.loss_dice: 0.4994, decode.d2.loss_cls: 0.0643, decode.d2.loss_mask: 0.1801, decode.d2.loss_dice: 0.4998, decode.d3.loss_cls: 0.0615, decode.d3.loss_mask: 0.1791, decode.d3.loss_dice: 0.4902, decode.d4.loss_cls: 0.0553, decode.d4.loss_mask: 0.1793, decode.d4.loss_dice: 0.4878, decode.d5.loss_cls: 0.0579, decode.d5.loss_mask: 0.1790, decode.d5.loss_dice: 0.4912, decode.d6.loss_cls: 0.0595, decode.d6.loss_mask: 0.1791, decode.d6.loss_dice: 0.4962, decode.d7.loss_cls: 0.0523, decode.d7.loss_mask: 0.1793, decode.d7.loss_dice: 0.4956, decode.d8.loss_cls: 0.0554, decode.d8.loss_mask: 0.1790, decode.d8.loss_dice: 0.4959, loss: 7.6009 +2022-05-11 08:43:54,623 - mmseg - INFO - Iter [70750/80000] lr: 1.660e-07, eta: 5:34:41, time: 1.810, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0478, decode.loss_mask: 0.1847, decode.loss_dice: 0.4989, decode.d0.loss_cls: 0.2997, decode.d0.loss_mask: 0.1916, decode.d0.loss_dice: 0.5206, decode.d1.loss_cls: 0.0625, decode.d1.loss_mask: 0.1859, decode.d1.loss_dice: 0.5086, decode.d2.loss_cls: 0.0537, decode.d2.loss_mask: 0.1865, decode.d2.loss_dice: 0.5053, decode.d3.loss_cls: 0.0477, decode.d3.loss_mask: 0.1854, decode.d3.loss_dice: 0.5025, decode.d4.loss_cls: 0.0451, decode.d4.loss_mask: 0.1847, decode.d4.loss_dice: 0.4988, decode.d5.loss_cls: 0.0510, decode.d5.loss_mask: 0.1850, decode.d5.loss_dice: 0.5042, decode.d6.loss_cls: 0.0471, decode.d6.loss_mask: 0.1851, decode.d6.loss_dice: 0.4998, decode.d7.loss_cls: 0.0458, decode.d7.loss_mask: 0.1849, decode.d7.loss_dice: 0.5021, decode.d8.loss_cls: 0.0526, decode.d8.loss_mask: 0.1848, decode.d8.loss_dice: 0.5041, loss: 7.6566 +2022-05-11 08:45:23,987 - mmseg - INFO - Iter [70800/80000] lr: 1.651e-07, eta: 5:32:44, time: 1.787, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0537, decode.loss_mask: 0.1819, decode.loss_dice: 0.4860, decode.d0.loss_cls: 0.2986, decode.d0.loss_mask: 0.1913, decode.d0.loss_dice: 0.5146, decode.d1.loss_cls: 0.0684, decode.d1.loss_mask: 0.1844, decode.d1.loss_dice: 0.4965, decode.d2.loss_cls: 0.0639, decode.d2.loss_mask: 0.1823, decode.d2.loss_dice: 0.4939, decode.d3.loss_cls: 0.0597, decode.d3.loss_mask: 0.1824, decode.d3.loss_dice: 0.4888, decode.d4.loss_cls: 0.0563, decode.d4.loss_mask: 0.1821, decode.d4.loss_dice: 0.4865, decode.d5.loss_cls: 0.0502, decode.d5.loss_mask: 0.1826, decode.d5.loss_dice: 0.4888, decode.d6.loss_cls: 0.0481, decode.d6.loss_mask: 0.1821, decode.d6.loss_dice: 0.4887, decode.d7.loss_cls: 0.0512, decode.d7.loss_mask: 0.1818, decode.d7.loss_dice: 0.4909, decode.d8.loss_cls: 0.0494, decode.d8.loss_mask: 0.1817, decode.d8.loss_dice: 0.4868, loss: 7.5536 +2022-05-11 08:46:55,193 - mmseg - INFO - Iter [70850/80000] lr: 1.642e-07, eta: 5:30:49, time: 1.824, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0553, decode.loss_mask: 0.1827, decode.loss_dice: 0.4958, decode.d0.loss_cls: 0.2961, decode.d0.loss_mask: 0.1900, decode.d0.loss_dice: 0.5174, decode.d1.loss_cls: 0.0695, decode.d1.loss_mask: 0.1838, decode.d1.loss_dice: 0.5014, decode.d2.loss_cls: 0.0625, decode.d2.loss_mask: 0.1835, decode.d2.loss_dice: 0.4952, decode.d3.loss_cls: 0.0552, decode.d3.loss_mask: 0.1830, decode.d3.loss_dice: 0.4942, decode.d4.loss_cls: 0.0538, decode.d4.loss_mask: 0.1830, decode.d4.loss_dice: 0.4977, decode.d5.loss_cls: 0.0535, decode.d5.loss_mask: 0.1830, decode.d5.loss_dice: 0.4929, decode.d6.loss_cls: 0.0551, decode.d6.loss_mask: 0.1828, decode.d6.loss_dice: 0.4994, decode.d7.loss_cls: 0.0519, decode.d7.loss_mask: 0.1828, decode.d7.loss_dice: 0.4983, decode.d8.loss_cls: 0.0552, decode.d8.loss_mask: 0.1828, decode.d8.loss_dice: 0.5000, loss: 7.6379 +2022-05-11 08:48:28,318 - mmseg - INFO - Iter [70900/80000] lr: 1.633e-07, eta: 5:28:54, time: 1.863, data_time: 0.063, memory: 69063, decode.loss_cls: 0.0401, decode.loss_mask: 0.1855, decode.loss_dice: 0.5015, decode.d0.loss_cls: 0.2953, decode.d0.loss_mask: 0.1924, decode.d0.loss_dice: 0.5233, decode.d1.loss_cls: 0.0529, decode.d1.loss_mask: 0.1874, decode.d1.loss_dice: 0.5093, decode.d2.loss_cls: 0.0482, decode.d2.loss_mask: 0.1856, decode.d2.loss_dice: 0.5053, decode.d3.loss_cls: 0.0484, decode.d3.loss_mask: 0.1850, decode.d3.loss_dice: 0.5025, decode.d4.loss_cls: 0.0422, decode.d4.loss_mask: 0.1857, decode.d4.loss_dice: 0.5035, decode.d5.loss_cls: 0.0428, decode.d5.loss_mask: 0.1854, decode.d5.loss_dice: 0.5058, decode.d6.loss_cls: 0.0470, decode.d6.loss_mask: 0.1855, decode.d6.loss_dice: 0.5019, decode.d7.loss_cls: 0.0365, decode.d7.loss_mask: 0.1854, decode.d7.loss_dice: 0.5016, decode.d8.loss_cls: 0.0429, decode.d8.loss_mask: 0.1857, decode.d8.loss_dice: 0.5029, loss: 7.6174 +2022-05-11 08:49:59,358 - mmseg - INFO - Iter [70950/80000] lr: 1.624e-07, eta: 5:26:59, time: 1.816, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0505, decode.loss_mask: 0.1828, decode.loss_dice: 0.4868, decode.d0.loss_cls: 0.3019, decode.d0.loss_mask: 0.1887, decode.d0.loss_dice: 0.5075, decode.d1.loss_cls: 0.0699, decode.d1.loss_mask: 0.1840, decode.d1.loss_dice: 0.4923, decode.d2.loss_cls: 0.0557, decode.d2.loss_mask: 0.1830, decode.d2.loss_dice: 0.4912, decode.d3.loss_cls: 0.0562, decode.d3.loss_mask: 0.1831, decode.d3.loss_dice: 0.4874, decode.d4.loss_cls: 0.0583, decode.d4.loss_mask: 0.1833, decode.d4.loss_dice: 0.4928, decode.d5.loss_cls: 0.0567, decode.d5.loss_mask: 0.1832, decode.d5.loss_dice: 0.4887, decode.d6.loss_cls: 0.0562, decode.d6.loss_mask: 0.1828, decode.d6.loss_dice: 0.4860, decode.d7.loss_cls: 0.0509, decode.d7.loss_mask: 0.1828, decode.d7.loss_dice: 0.4874, decode.d8.loss_cls: 0.0522, decode.d8.loss_mask: 0.1827, decode.d8.loss_dice: 0.4889, loss: 7.5538 +2022-05-11 08:51:29,926 - mmseg - INFO - Saving checkpoint at 71000 iterations +2022-05-11 08:51:59,797 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 08:51:59,805 - mmseg - INFO - Iter [71000/80000] lr: 1.615e-07, eta: 5:25:15, time: 2.411, data_time: 0.023, memory: 69063, decode.loss_cls: 0.0577, decode.loss_mask: 0.1755, decode.loss_dice: 0.4984, decode.d0.loss_cls: 0.2958, decode.d0.loss_mask: 0.1811, decode.d0.loss_dice: 0.5193, decode.d1.loss_cls: 0.0756, decode.d1.loss_mask: 0.1770, decode.d1.loss_dice: 0.5029, decode.d2.loss_cls: 0.0648, decode.d2.loss_mask: 0.1762, decode.d2.loss_dice: 0.5025, decode.d3.loss_cls: 0.0642, decode.d3.loss_mask: 0.1758, decode.d3.loss_dice: 0.4968, decode.d4.loss_cls: 0.0583, decode.d4.loss_mask: 0.1761, decode.d4.loss_dice: 0.4956, decode.d5.loss_cls: 0.0584, decode.d5.loss_mask: 0.1759, decode.d5.loss_dice: 0.4996, decode.d6.loss_cls: 0.0535, decode.d6.loss_mask: 0.1759, decode.d6.loss_dice: 0.4969, decode.d7.loss_cls: 0.0549, decode.d7.loss_mask: 0.1756, decode.d7.loss_dice: 0.4951, decode.d8.loss_cls: 0.0560, decode.d8.loss_mask: 0.1756, decode.d8.loss_dice: 0.4977, loss: 7.6089 +2022-05-11 08:53:55,721 - mmseg - INFO - per class results: +2022-05-11 08:53:55,729 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.65 | 99.24 | +| sidewalk | 88.87 | 94.33 | +| building | 94.33 | 96.96 | +| wall | 69.56 | 80.43 | +| fence | 74.25 | 80.99 | +| pole | 71.06 | 83.61 | +| traffic light | 77.04 | 87.74 | +| traffic sign | 84.08 | 90.51 | +| vegetation | 93.33 | 96.84 | +| terrain | 68.0 | 78.08 | +| sky | 95.7 | 98.5 | +| person | 86.79 | 93.69 | +| rider | 74.37 | 84.8 | +| car | 96.16 | 98.32 | +| truck | 81.7 | 94.33 | +| bus | 93.6 | 96.6 | +| train | 87.91 | 90.73 | +| motorcycle | 77.77 | 87.34 | +| bicycle | 82.66 | 92.17 | ++---------------+-------+-------+ +2022-05-11 08:53:55,729 - mmseg - INFO - Summary: +2022-05-11 08:53:55,729 - mmseg - INFO - ++------+-------+------+ +| aAcc | mIoU | mAcc | ++------+-------+------+ +| 97.0 | 83.99 | 90.8 | ++------+-------+------+ +2022-05-11 08:53:55,734 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 08:53:55,735 - mmseg - INFO - Iter(val) [32] aAcc: 0.9700, mIoU: 0.8399, mAcc: 0.9080, IoU.road: 0.9865, IoU.sidewalk: 0.8887, IoU.building: 0.9433, IoU.wall: 0.6956, IoU.fence: 0.7425, IoU.pole: 0.7106, IoU.traffic light: 0.7704, IoU.traffic sign: 0.8408, IoU.vegetation: 0.9333, IoU.terrain: 0.6800, IoU.sky: 0.9570, IoU.person: 0.8679, IoU.rider: 0.7437, IoU.car: 0.9616, IoU.truck: 0.8170, IoU.bus: 0.9360, IoU.train: 0.8791, IoU.motorcycle: 0.7777, IoU.bicycle: 0.8266, Acc.road: 0.9924, Acc.sidewalk: 0.9433, Acc.building: 0.9696, Acc.wall: 0.8043, Acc.fence: 0.8099, Acc.pole: 0.8361, Acc.traffic light: 0.8774, Acc.traffic sign: 0.9051, Acc.vegetation: 0.9684, Acc.terrain: 0.7808, Acc.sky: 0.9850, Acc.person: 0.9369, Acc.rider: 0.8480, Acc.car: 0.9832, Acc.truck: 0.9433, Acc.bus: 0.9660, Acc.train: 0.9073, Acc.motorcycle: 0.8734, Acc.bicycle: 0.9217 +2022-05-11 08:55:25,792 - mmseg - INFO - Iter [71050/80000] lr: 1.606e-07, eta: 5:24:05, time: 4.122, data_time: 2.339, memory: 69063, decode.loss_cls: 0.0411, decode.loss_mask: 0.1804, decode.loss_dice: 0.4870, decode.d0.loss_cls: 0.2889, decode.d0.loss_mask: 0.1860, decode.d0.loss_dice: 0.5028, decode.d1.loss_cls: 0.0560, decode.d1.loss_mask: 0.1810, decode.d1.loss_dice: 0.4890, decode.d2.loss_cls: 0.0484, decode.d2.loss_mask: 0.1803, decode.d2.loss_dice: 0.4855, decode.d3.loss_cls: 0.0450, decode.d3.loss_mask: 0.1803, decode.d3.loss_dice: 0.4873, decode.d4.loss_cls: 0.0450, decode.d4.loss_mask: 0.1804, decode.d4.loss_dice: 0.4852, decode.d5.loss_cls: 0.0439, decode.d5.loss_mask: 0.1799, decode.d5.loss_dice: 0.4836, decode.d6.loss_cls: 0.0459, decode.d6.loss_mask: 0.1805, decode.d6.loss_dice: 0.4853, decode.d7.loss_cls: 0.0443, decode.d7.loss_mask: 0.1801, decode.d7.loss_dice: 0.4821, decode.d8.loss_cls: 0.0449, decode.d8.loss_mask: 0.1801, decode.d8.loss_dice: 0.4829, loss: 7.3831 +2022-05-11 08:56:59,566 - mmseg - INFO - Iter [71100/80000] lr: 1.598e-07, eta: 5:22:10, time: 1.876, data_time: 0.063, memory: 69063, decode.loss_cls: 0.0625, decode.loss_mask: 0.1819, decode.loss_dice: 0.4965, decode.d0.loss_cls: 0.3049, decode.d0.loss_mask: 0.1899, decode.d0.loss_dice: 0.5210, decode.d1.loss_cls: 0.0772, decode.d1.loss_mask: 0.1837, decode.d1.loss_dice: 0.5040, decode.d2.loss_cls: 0.0642, decode.d2.loss_mask: 0.1825, decode.d2.loss_dice: 0.4973, decode.d3.loss_cls: 0.0622, decode.d3.loss_mask: 0.1823, decode.d3.loss_dice: 0.4925, decode.d4.loss_cls: 0.0605, decode.d4.loss_mask: 0.1822, decode.d4.loss_dice: 0.4939, decode.d5.loss_cls: 0.0604, decode.d5.loss_mask: 0.1821, decode.d5.loss_dice: 0.4935, decode.d6.loss_cls: 0.0567, decode.d6.loss_mask: 0.1817, decode.d6.loss_dice: 0.4909, decode.d7.loss_cls: 0.0555, decode.d7.loss_mask: 0.1818, decode.d7.loss_dice: 0.4942, decode.d8.loss_cls: 0.0653, decode.d8.loss_mask: 0.1816, decode.d8.loss_dice: 0.4936, loss: 7.6761 +2022-05-11 08:58:28,247 - mmseg - INFO - Iter [71150/80000] lr: 1.589e-07, eta: 5:20:14, time: 1.774, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0510, decode.loss_mask: 0.1789, decode.loss_dice: 0.4876, decode.d0.loss_cls: 0.2958, decode.d0.loss_mask: 0.1855, decode.d0.loss_dice: 0.5146, decode.d1.loss_cls: 0.0692, decode.d1.loss_mask: 0.1805, decode.d1.loss_dice: 0.5024, decode.d2.loss_cls: 0.0616, decode.d2.loss_mask: 0.1795, decode.d2.loss_dice: 0.4959, decode.d3.loss_cls: 0.0576, decode.d3.loss_mask: 0.1795, decode.d3.loss_dice: 0.4897, decode.d4.loss_cls: 0.0561, decode.d4.loss_mask: 0.1795, decode.d4.loss_dice: 0.4903, decode.d5.loss_cls: 0.0550, decode.d5.loss_mask: 0.1796, decode.d5.loss_dice: 0.4922, decode.d6.loss_cls: 0.0509, decode.d6.loss_mask: 0.1791, decode.d6.loss_dice: 0.4891, decode.d7.loss_cls: 0.0479, decode.d7.loss_mask: 0.1790, decode.d7.loss_dice: 0.4911, decode.d8.loss_cls: 0.0491, decode.d8.loss_mask: 0.1791, decode.d8.loss_dice: 0.4923, loss: 7.5397 +2022-05-11 08:59:57,358 - mmseg - INFO - Iter [71200/80000] lr: 1.580e-07, eta: 5:18:18, time: 1.782, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0454, decode.loss_mask: 0.1787, decode.loss_dice: 0.4853, decode.d0.loss_cls: 0.2926, decode.d0.loss_mask: 0.1845, decode.d0.loss_dice: 0.5031, decode.d1.loss_cls: 0.0670, decode.d1.loss_mask: 0.1803, decode.d1.loss_dice: 0.4963, decode.d2.loss_cls: 0.0530, decode.d2.loss_mask: 0.1788, decode.d2.loss_dice: 0.4878, decode.d3.loss_cls: 0.0515, decode.d3.loss_mask: 0.1789, decode.d3.loss_dice: 0.4863, decode.d4.loss_cls: 0.0484, decode.d4.loss_mask: 0.1791, decode.d4.loss_dice: 0.4877, decode.d5.loss_cls: 0.0469, decode.d5.loss_mask: 0.1789, decode.d5.loss_dice: 0.4880, decode.d6.loss_cls: 0.0497, decode.d6.loss_mask: 0.1786, decode.d6.loss_dice: 0.4884, decode.d7.loss_cls: 0.0471, decode.d7.loss_mask: 0.1783, decode.d7.loss_dice: 0.4860, decode.d8.loss_cls: 0.0452, decode.d8.loss_mask: 0.1785, decode.d8.loss_dice: 0.4879, loss: 7.4385 +2022-05-11 09:01:30,871 - mmseg - INFO - Iter [71250/80000] lr: 1.571e-07, eta: 5:16:24, time: 1.870, data_time: 0.065, memory: 69063, decode.loss_cls: 0.0457, decode.loss_mask: 0.1822, decode.loss_dice: 0.4978, decode.d0.loss_cls: 0.2924, decode.d0.loss_mask: 0.1890, decode.d0.loss_dice: 0.5177, decode.d1.loss_cls: 0.0591, decode.d1.loss_mask: 0.1835, decode.d1.loss_dice: 0.5020, decode.d2.loss_cls: 0.0546, decode.d2.loss_mask: 0.1824, decode.d2.loss_dice: 0.5004, decode.d3.loss_cls: 0.0449, decode.d3.loss_mask: 0.1827, decode.d3.loss_dice: 0.4980, decode.d4.loss_cls: 0.0483, decode.d4.loss_mask: 0.1826, decode.d4.loss_dice: 0.4974, decode.d5.loss_cls: 0.0547, decode.d5.loss_mask: 0.1822, decode.d5.loss_dice: 0.4980, decode.d6.loss_cls: 0.0460, decode.d6.loss_mask: 0.1823, decode.d6.loss_dice: 0.5012, decode.d7.loss_cls: 0.0457, decode.d7.loss_mask: 0.1818, decode.d7.loss_dice: 0.4990, decode.d8.loss_cls: 0.0452, decode.d8.loss_mask: 0.1818, decode.d8.loss_dice: 0.4966, loss: 7.5750 +2022-05-11 09:03:00,722 - mmseg - INFO - Iter [71300/80000] lr: 1.562e-07, eta: 5:14:29, time: 1.795, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0471, decode.loss_mask: 0.1776, decode.loss_dice: 0.4883, decode.d0.loss_cls: 0.2839, decode.d0.loss_mask: 0.1830, decode.d0.loss_dice: 0.5078, decode.d1.loss_cls: 0.0522, decode.d1.loss_mask: 0.1784, decode.d1.loss_dice: 0.4915, decode.d2.loss_cls: 0.0457, decode.d2.loss_mask: 0.1774, decode.d2.loss_dice: 0.4911, decode.d3.loss_cls: 0.0448, decode.d3.loss_mask: 0.1774, decode.d3.loss_dice: 0.4892, decode.d4.loss_cls: 0.0486, decode.d4.loss_mask: 0.1775, decode.d4.loss_dice: 0.4880, decode.d5.loss_cls: 0.0435, decode.d5.loss_mask: 0.1775, decode.d5.loss_dice: 0.4878, decode.d6.loss_cls: 0.0419, decode.d6.loss_mask: 0.1771, decode.d6.loss_dice: 0.4848, decode.d7.loss_cls: 0.0481, decode.d7.loss_mask: 0.1777, decode.d7.loss_dice: 0.4891, decode.d8.loss_cls: 0.0533, decode.d8.loss_mask: 0.1778, decode.d8.loss_dice: 0.4887, loss: 7.3968 +2022-05-11 09:04:30,963 - mmseg - INFO - Iter [71350/80000] lr: 1.553e-07, eta: 5:12:34, time: 1.807, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0421, decode.loss_mask: 0.1832, decode.loss_dice: 0.4935, decode.d0.loss_cls: 0.2907, decode.d0.loss_mask: 0.1890, decode.d0.loss_dice: 0.5157, decode.d1.loss_cls: 0.0593, decode.d1.loss_mask: 0.1847, decode.d1.loss_dice: 0.5016, decode.d2.loss_cls: 0.0533, decode.d2.loss_mask: 0.1839, decode.d2.loss_dice: 0.4977, decode.d3.loss_cls: 0.0459, decode.d3.loss_mask: 0.1837, decode.d3.loss_dice: 0.4935, decode.d4.loss_cls: 0.0490, decode.d4.loss_mask: 0.1843, decode.d4.loss_dice: 0.4976, decode.d5.loss_cls: 0.0458, decode.d5.loss_mask: 0.1834, decode.d5.loss_dice: 0.4955, decode.d6.loss_cls: 0.0471, decode.d6.loss_mask: 0.1836, decode.d6.loss_dice: 0.4953, decode.d7.loss_cls: 0.0418, decode.d7.loss_mask: 0.1836, decode.d7.loss_dice: 0.4925, decode.d8.loss_cls: 0.0443, decode.d8.loss_mask: 0.1835, decode.d8.loss_dice: 0.4944, loss: 7.5396 +2022-05-11 09:06:01,883 - mmseg - INFO - Iter [71400/80000] lr: 1.544e-07, eta: 5:10:39, time: 1.819, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0439, decode.loss_mask: 0.1842, decode.loss_dice: 0.4807, decode.d0.loss_cls: 0.2916, decode.d0.loss_mask: 0.1897, decode.d0.loss_dice: 0.5095, decode.d1.loss_cls: 0.0600, decode.d1.loss_mask: 0.1846, decode.d1.loss_dice: 0.4912, decode.d2.loss_cls: 0.0531, decode.d2.loss_mask: 0.1844, decode.d2.loss_dice: 0.4902, decode.d3.loss_cls: 0.0568, decode.d3.loss_mask: 0.1837, decode.d3.loss_dice: 0.4891, decode.d4.loss_cls: 0.0487, decode.d4.loss_mask: 0.1839, decode.d4.loss_dice: 0.4856, decode.d5.loss_cls: 0.0471, decode.d5.loss_mask: 0.1841, decode.d5.loss_dice: 0.4849, decode.d6.loss_cls: 0.0474, decode.d6.loss_mask: 0.1836, decode.d6.loss_dice: 0.4851, decode.d7.loss_cls: 0.0490, decode.d7.loss_mask: 0.1834, decode.d7.loss_dice: 0.4816, decode.d8.loss_cls: 0.0471, decode.d8.loss_mask: 0.1839, decode.d8.loss_dice: 0.4830, loss: 7.4713 +2022-05-11 09:07:35,818 - mmseg - INFO - Iter [71450/80000] lr: 1.535e-07, eta: 5:08:45, time: 1.879, data_time: 0.065, memory: 69063, decode.loss_cls: 0.0519, decode.loss_mask: 0.1818, decode.loss_dice: 0.4954, decode.d0.loss_cls: 0.2894, decode.d0.loss_mask: 0.1871, decode.d0.loss_dice: 0.5197, decode.d1.loss_cls: 0.0583, decode.d1.loss_mask: 0.1827, decode.d1.loss_dice: 0.5062, decode.d2.loss_cls: 0.0571, decode.d2.loss_mask: 0.1821, decode.d2.loss_dice: 0.4994, decode.d3.loss_cls: 0.0554, decode.d3.loss_mask: 0.1816, decode.d3.loss_dice: 0.4995, decode.d4.loss_cls: 0.0549, decode.d4.loss_mask: 0.1820, decode.d4.loss_dice: 0.4959, decode.d5.loss_cls: 0.0540, decode.d5.loss_mask: 0.1817, decode.d5.loss_dice: 0.4970, decode.d6.loss_cls: 0.0525, decode.d6.loss_mask: 0.1817, decode.d6.loss_dice: 0.5002, decode.d7.loss_cls: 0.0544, decode.d7.loss_mask: 0.1817, decode.d7.loss_dice: 0.4969, decode.d8.loss_cls: 0.0528, decode.d8.loss_mask: 0.1823, decode.d8.loss_dice: 0.4976, loss: 7.6131 +2022-05-11 09:09:07,104 - mmseg - INFO - Iter [71500/80000] lr: 1.526e-07, eta: 5:06:51, time: 1.826, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0525, decode.loss_mask: 0.1750, decode.loss_dice: 0.4875, decode.d0.loss_cls: 0.3067, decode.d0.loss_mask: 0.1809, decode.d0.loss_dice: 0.5129, decode.d1.loss_cls: 0.0626, decode.d1.loss_mask: 0.1761, decode.d1.loss_dice: 0.4971, decode.d2.loss_cls: 0.0604, decode.d2.loss_mask: 0.1752, decode.d2.loss_dice: 0.4918, decode.d3.loss_cls: 0.0555, decode.d3.loss_mask: 0.1750, decode.d3.loss_dice: 0.4903, decode.d4.loss_cls: 0.0530, decode.d4.loss_mask: 0.1749, decode.d4.loss_dice: 0.4897, decode.d5.loss_cls: 0.0498, decode.d5.loss_mask: 0.1748, decode.d5.loss_dice: 0.4927, decode.d6.loss_cls: 0.0580, decode.d6.loss_mask: 0.1748, decode.d6.loss_dice: 0.4950, decode.d7.loss_cls: 0.0504, decode.d7.loss_mask: 0.1752, decode.d7.loss_dice: 0.4890, decode.d8.loss_cls: 0.0520, decode.d8.loss_mask: 0.1749, decode.d8.loss_dice: 0.4923, loss: 7.4960 +2022-05-11 09:10:36,180 - mmseg - INFO - Iter [71550/80000] lr: 1.517e-07, eta: 5:04:55, time: 1.781, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0425, decode.loss_mask: 0.1860, decode.loss_dice: 0.4863, decode.d0.loss_cls: 0.2933, decode.d0.loss_mask: 0.1933, decode.d0.loss_dice: 0.5017, decode.d1.loss_cls: 0.0625, decode.d1.loss_mask: 0.1873, decode.d1.loss_dice: 0.4878, decode.d2.loss_cls: 0.0511, decode.d2.loss_mask: 0.1865, decode.d2.loss_dice: 0.4878, decode.d3.loss_cls: 0.0497, decode.d3.loss_mask: 0.1857, decode.d3.loss_dice: 0.4836, decode.d4.loss_cls: 0.0459, decode.d4.loss_mask: 0.1858, decode.d4.loss_dice: 0.4880, decode.d5.loss_cls: 0.0465, decode.d5.loss_mask: 0.1859, decode.d5.loss_dice: 0.4837, decode.d6.loss_cls: 0.0425, decode.d6.loss_mask: 0.1861, decode.d6.loss_dice: 0.4853, decode.d7.loss_cls: 0.0409, decode.d7.loss_mask: 0.1859, decode.d7.loss_dice: 0.4841, decode.d8.loss_cls: 0.0434, decode.d8.loss_mask: 0.1860, decode.d8.loss_dice: 0.4831, loss: 7.4583 +2022-05-11 09:12:06,804 - mmseg - INFO - Iter [71600/80000] lr: 1.508e-07, eta: 5:03:01, time: 1.813, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0343, decode.loss_mask: 0.1782, decode.loss_dice: 0.4804, decode.d0.loss_cls: 0.2906, decode.d0.loss_mask: 0.1842, decode.d0.loss_dice: 0.4965, decode.d1.loss_cls: 0.0517, decode.d1.loss_mask: 0.1791, decode.d1.loss_dice: 0.4853, decode.d2.loss_cls: 0.0407, decode.d2.loss_mask: 0.1788, decode.d2.loss_dice: 0.4837, decode.d3.loss_cls: 0.0425, decode.d3.loss_mask: 0.1780, decode.d3.loss_dice: 0.4783, decode.d4.loss_cls: 0.0412, decode.d4.loss_mask: 0.1785, decode.d4.loss_dice: 0.4755, decode.d5.loss_cls: 0.0393, decode.d5.loss_mask: 0.1781, decode.d5.loss_dice: 0.4771, decode.d6.loss_cls: 0.0379, decode.d6.loss_mask: 0.1783, decode.d6.loss_dice: 0.4784, decode.d7.loss_cls: 0.0373, decode.d7.loss_mask: 0.1782, decode.d7.loss_dice: 0.4773, decode.d8.loss_cls: 0.0378, decode.d8.loss_mask: 0.1777, decode.d8.loss_dice: 0.4750, loss: 7.2499 +2022-05-11 09:13:40,046 - mmseg - INFO - Iter [71650/80000] lr: 1.499e-07, eta: 5:01:07, time: 1.865, data_time: 0.065, memory: 69063, decode.loss_cls: 0.0469, decode.loss_mask: 0.1853, decode.loss_dice: 0.4892, decode.d0.loss_cls: 0.2842, decode.d0.loss_mask: 0.1922, decode.d0.loss_dice: 0.5065, decode.d1.loss_cls: 0.0601, decode.d1.loss_mask: 0.1873, decode.d1.loss_dice: 0.4936, decode.d2.loss_cls: 0.0579, decode.d2.loss_mask: 0.1860, decode.d2.loss_dice: 0.4926, decode.d3.loss_cls: 0.0485, decode.d3.loss_mask: 0.1856, decode.d3.loss_dice: 0.4869, decode.d4.loss_cls: 0.0507, decode.d4.loss_mask: 0.1854, decode.d4.loss_dice: 0.4876, decode.d5.loss_cls: 0.0473, decode.d5.loss_mask: 0.1851, decode.d5.loss_dice: 0.4884, decode.d6.loss_cls: 0.0446, decode.d6.loss_mask: 0.1851, decode.d6.loss_dice: 0.4866, decode.d7.loss_cls: 0.0414, decode.d7.loss_mask: 0.1850, decode.d7.loss_dice: 0.4827, decode.d8.loss_cls: 0.0475, decode.d8.loss_mask: 0.1853, decode.d8.loss_dice: 0.4891, loss: 7.4945 +2022-05-11 09:15:09,134 - mmseg - INFO - Iter [71700/80000] lr: 1.490e-07, eta: 4:59:12, time: 1.782, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0500, decode.loss_mask: 0.1807, decode.loss_dice: 0.4871, decode.d0.loss_cls: 0.2944, decode.d0.loss_mask: 0.1884, decode.d0.loss_dice: 0.5157, decode.d1.loss_cls: 0.0759, decode.d1.loss_mask: 0.1819, decode.d1.loss_dice: 0.4952, decode.d2.loss_cls: 0.0598, decode.d2.loss_mask: 0.1814, decode.d2.loss_dice: 0.4923, decode.d3.loss_cls: 0.0558, decode.d3.loss_mask: 0.1818, decode.d3.loss_dice: 0.4896, decode.d4.loss_cls: 0.0520, decode.d4.loss_mask: 0.1815, decode.d4.loss_dice: 0.4895, decode.d5.loss_cls: 0.0553, decode.d5.loss_mask: 0.1818, decode.d5.loss_dice: 0.4916, decode.d6.loss_cls: 0.0557, decode.d6.loss_mask: 0.1809, decode.d6.loss_dice: 0.4868, decode.d7.loss_cls: 0.0509, decode.d7.loss_mask: 0.1810, decode.d7.loss_dice: 0.4910, decode.d8.loss_cls: 0.0513, decode.d8.loss_mask: 0.1806, decode.d8.loss_dice: 0.4864, loss: 7.5466 +2022-05-11 09:16:39,120 - mmseg - INFO - Iter [71750/80000] lr: 1.481e-07, eta: 4:57:18, time: 1.800, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0486, decode.loss_mask: 0.1786, decode.loss_dice: 0.4855, decode.d0.loss_cls: 0.2921, decode.d0.loss_mask: 0.1835, decode.d0.loss_dice: 0.5099, decode.d1.loss_cls: 0.0647, decode.d1.loss_mask: 0.1791, decode.d1.loss_dice: 0.4928, decode.d2.loss_cls: 0.0613, decode.d2.loss_mask: 0.1786, decode.d2.loss_dice: 0.4871, decode.d3.loss_cls: 0.0499, decode.d3.loss_mask: 0.1787, decode.d3.loss_dice: 0.4863, decode.d4.loss_cls: 0.0546, decode.d4.loss_mask: 0.1784, decode.d4.loss_dice: 0.4855, decode.d5.loss_cls: 0.0523, decode.d5.loss_mask: 0.1786, decode.d5.loss_dice: 0.4853, decode.d6.loss_cls: 0.0475, decode.d6.loss_mask: 0.1781, decode.d6.loss_dice: 0.4867, decode.d7.loss_cls: 0.0493, decode.d7.loss_mask: 0.1783, decode.d7.loss_dice: 0.4837, decode.d8.loss_cls: 0.0469, decode.d8.loss_mask: 0.1782, decode.d8.loss_dice: 0.4861, loss: 7.4464 +2022-05-11 09:18:07,791 - mmseg - INFO - Iter [71800/80000] lr: 1.472e-07, eta: 4:55:23, time: 1.773, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0400, decode.loss_mask: 0.1741, decode.loss_dice: 0.4817, decode.d0.loss_cls: 0.2915, decode.d0.loss_mask: 0.1808, decode.d0.loss_dice: 0.4980, decode.d1.loss_cls: 0.0507, decode.d1.loss_mask: 0.1752, decode.d1.loss_dice: 0.4852, decode.d2.loss_cls: 0.0422, decode.d2.loss_mask: 0.1739, decode.d2.loss_dice: 0.4797, decode.d3.loss_cls: 0.0385, decode.d3.loss_mask: 0.1737, decode.d3.loss_dice: 0.4806, decode.d4.loss_cls: 0.0386, decode.d4.loss_mask: 0.1743, decode.d4.loss_dice: 0.4786, decode.d5.loss_cls: 0.0384, decode.d5.loss_mask: 0.1741, decode.d5.loss_dice: 0.4811, decode.d6.loss_cls: 0.0385, decode.d6.loss_mask: 0.1742, decode.d6.loss_dice: 0.4789, decode.d7.loss_cls: 0.0424, decode.d7.loss_mask: 0.1739, decode.d7.loss_dice: 0.4765, decode.d8.loss_cls: 0.0423, decode.d8.loss_mask: 0.1738, decode.d8.loss_dice: 0.4771, loss: 7.2285 +2022-05-11 09:19:39,873 - mmseg - INFO - Iter [71850/80000] lr: 1.463e-07, eta: 4:53:30, time: 1.842, data_time: 0.063, memory: 69063, decode.loss_cls: 0.0474, decode.loss_mask: 0.1805, decode.loss_dice: 0.4930, decode.d0.loss_cls: 0.2978, decode.d0.loss_mask: 0.1865, decode.d0.loss_dice: 0.5076, decode.d1.loss_cls: 0.0732, decode.d1.loss_mask: 0.1816, decode.d1.loss_dice: 0.4968, decode.d2.loss_cls: 0.0523, decode.d2.loss_mask: 0.1815, decode.d2.loss_dice: 0.4919, decode.d3.loss_cls: 0.0559, decode.d3.loss_mask: 0.1810, decode.d3.loss_dice: 0.4922, decode.d4.loss_cls: 0.0543, decode.d4.loss_mask: 0.1809, decode.d4.loss_dice: 0.4932, decode.d5.loss_cls: 0.0492, decode.d5.loss_mask: 0.1812, decode.d5.loss_dice: 0.4923, decode.d6.loss_cls: 0.0507, decode.d6.loss_mask: 0.1810, decode.d6.loss_dice: 0.4913, decode.d7.loss_cls: 0.0477, decode.d7.loss_mask: 0.1804, decode.d7.loss_dice: 0.4885, decode.d8.loss_cls: 0.0499, decode.d8.loss_mask: 0.1799, decode.d8.loss_dice: 0.4916, loss: 7.5312 +2022-05-11 09:21:10,041 - mmseg - INFO - Iter [71900/80000] lr: 1.454e-07, eta: 4:51:36, time: 1.803, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0522, decode.loss_mask: 0.1791, decode.loss_dice: 0.4940, decode.d0.loss_cls: 0.3026, decode.d0.loss_mask: 0.1858, decode.d0.loss_dice: 0.5146, decode.d1.loss_cls: 0.0812, decode.d1.loss_mask: 0.1804, decode.d1.loss_dice: 0.4987, decode.d2.loss_cls: 0.0625, decode.d2.loss_mask: 0.1798, decode.d2.loss_dice: 0.4973, decode.d3.loss_cls: 0.0563, decode.d3.loss_mask: 0.1796, decode.d3.loss_dice: 0.4905, decode.d4.loss_cls: 0.0589, decode.d4.loss_mask: 0.1796, decode.d4.loss_dice: 0.4914, decode.d5.loss_cls: 0.0539, decode.d5.loss_mask: 0.1796, decode.d5.loss_dice: 0.4945, decode.d6.loss_cls: 0.0554, decode.d6.loss_mask: 0.1794, decode.d6.loss_dice: 0.4932, decode.d7.loss_cls: 0.0558, decode.d7.loss_mask: 0.1795, decode.d7.loss_dice: 0.4921, decode.d8.loss_cls: 0.0564, decode.d8.loss_mask: 0.1793, decode.d8.loss_dice: 0.4933, loss: 7.5971 +2022-05-11 09:22:41,399 - mmseg - INFO - Iter [71950/80000] lr: 1.445e-07, eta: 4:49:42, time: 1.827, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0512, decode.loss_mask: 0.1778, decode.loss_dice: 0.4896, decode.d0.loss_cls: 0.2945, decode.d0.loss_mask: 0.1838, decode.d0.loss_dice: 0.5092, decode.d1.loss_cls: 0.0633, decode.d1.loss_mask: 0.1790, decode.d1.loss_dice: 0.4989, decode.d2.loss_cls: 0.0578, decode.d2.loss_mask: 0.1785, decode.d2.loss_dice: 0.4963, decode.d3.loss_cls: 0.0514, decode.d3.loss_mask: 0.1785, decode.d3.loss_dice: 0.4928, decode.d4.loss_cls: 0.0511, decode.d4.loss_mask: 0.1783, decode.d4.loss_dice: 0.4918, decode.d5.loss_cls: 0.0490, decode.d5.loss_mask: 0.1782, decode.d5.loss_dice: 0.4914, decode.d6.loss_cls: 0.0502, decode.d6.loss_mask: 0.1778, decode.d6.loss_dice: 0.4904, decode.d7.loss_cls: 0.0538, decode.d7.loss_mask: 0.1778, decode.d7.loss_dice: 0.4916, decode.d8.loss_cls: 0.0516, decode.d8.loss_mask: 0.1781, decode.d8.loss_dice: 0.4904, loss: 7.5040 +2022-05-11 09:24:12,732 - mmseg - INFO - Saving checkpoint at 72000 iterations +2022-05-11 09:24:42,050 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 09:24:42,058 - mmseg - INFO - Iter [72000/80000] lr: 1.436e-07, eta: 4:47:58, time: 2.411, data_time: 0.063, memory: 69063, decode.loss_cls: 0.0437, decode.loss_mask: 0.1712, decode.loss_dice: 0.4781, decode.d0.loss_cls: 0.2941, decode.d0.loss_mask: 0.1777, decode.d0.loss_dice: 0.5002, decode.d1.loss_cls: 0.0519, decode.d1.loss_mask: 0.1723, decode.d1.loss_dice: 0.4830, decode.d2.loss_cls: 0.0528, decode.d2.loss_mask: 0.1723, decode.d2.loss_dice: 0.4811, decode.d3.loss_cls: 0.0454, decode.d3.loss_mask: 0.1714, decode.d3.loss_dice: 0.4779, decode.d4.loss_cls: 0.0429, decode.d4.loss_mask: 0.1715, decode.d4.loss_dice: 0.4762, decode.d5.loss_cls: 0.0425, decode.d5.loss_mask: 0.1712, decode.d5.loss_dice: 0.4768, decode.d6.loss_cls: 0.0412, decode.d6.loss_mask: 0.1714, decode.d6.loss_dice: 0.4749, decode.d7.loss_cls: 0.0418, decode.d7.loss_mask: 0.1717, decode.d7.loss_dice: 0.4758, decode.d8.loss_cls: 0.0432, decode.d8.loss_mask: 0.1712, decode.d8.loss_dice: 0.4776, loss: 7.2231 +2022-05-11 09:26:37,400 - mmseg - INFO - per class results: +2022-05-11 09:26:37,409 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.64 | 99.25 | +| sidewalk | 88.86 | 94.05 | +| building | 94.36 | 97.0 | +| wall | 66.8 | 80.26 | +| fence | 74.45 | 81.27 | +| pole | 71.36 | 84.26 | +| traffic light | 77.02 | 87.65 | +| traffic sign | 84.06 | 90.88 | +| vegetation | 93.4 | 96.87 | +| terrain | 68.7 | 77.76 | +| sky | 95.71 | 98.53 | +| person | 86.63 | 93.96 | +| rider | 74.09 | 85.01 | +| car | 96.15 | 98.3 | +| truck | 91.85 | 94.39 | +| bus | 93.5 | 96.65 | +| train | 87.91 | 90.98 | +| motorcycle | 76.95 | 87.13 | +| bicycle | 82.73 | 91.42 | ++---------------+-------+-------+ +2022-05-11 09:26:37,409 - mmseg - INFO - Summary: +2022-05-11 09:26:37,409 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 97.01 | 84.38 | 90.82 | ++-------+-------+-------+ +2022-05-11 09:26:37,412 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 09:26:37,413 - mmseg - INFO - Iter(val) [32] aAcc: 0.9701, mIoU: 0.8438, mAcc: 0.9082, IoU.road: 0.9864, IoU.sidewalk: 0.8886, IoU.building: 0.9436, IoU.wall: 0.6680, IoU.fence: 0.7445, IoU.pole: 0.7136, IoU.traffic light: 0.7702, IoU.traffic sign: 0.8406, IoU.vegetation: 0.9340, IoU.terrain: 0.6870, IoU.sky: 0.9571, IoU.person: 0.8663, IoU.rider: 0.7409, IoU.car: 0.9615, IoU.truck: 0.9185, IoU.bus: 0.9350, IoU.train: 0.8791, IoU.motorcycle: 0.7695, IoU.bicycle: 0.8273, Acc.road: 0.9925, Acc.sidewalk: 0.9405, Acc.building: 0.9700, Acc.wall: 0.8026, Acc.fence: 0.8127, Acc.pole: 0.8426, Acc.traffic light: 0.8765, Acc.traffic sign: 0.9088, Acc.vegetation: 0.9687, Acc.terrain: 0.7776, Acc.sky: 0.9853, Acc.person: 0.9396, Acc.rider: 0.8501, Acc.car: 0.9830, Acc.truck: 0.9439, Acc.bus: 0.9665, Acc.train: 0.9098, Acc.motorcycle: 0.8713, Acc.bicycle: 0.9142 +2022-05-11 09:28:07,254 - mmseg - INFO - Iter [72050/80000] lr: 1.427e-07, eta: 4:46:42, time: 4.106, data_time: 2.327, memory: 69063, decode.loss_cls: 0.0485, decode.loss_mask: 0.1765, decode.loss_dice: 0.4825, decode.d0.loss_cls: 0.2962, decode.d0.loss_mask: 0.1820, decode.d0.loss_dice: 0.5013, decode.d1.loss_cls: 0.0680, decode.d1.loss_mask: 0.1777, decode.d1.loss_dice: 0.4915, decode.d2.loss_cls: 0.0567, decode.d2.loss_mask: 0.1767, decode.d2.loss_dice: 0.4804, decode.d3.loss_cls: 0.0544, decode.d3.loss_mask: 0.1764, decode.d3.loss_dice: 0.4822, decode.d4.loss_cls: 0.0571, decode.d4.loss_mask: 0.1769, decode.d4.loss_dice: 0.4883, decode.d5.loss_cls: 0.0553, decode.d5.loss_mask: 0.1766, decode.d5.loss_dice: 0.4842, decode.d6.loss_cls: 0.0507, decode.d6.loss_mask: 0.1765, decode.d6.loss_dice: 0.4830, decode.d7.loss_cls: 0.0457, decode.d7.loss_mask: 0.1769, decode.d7.loss_dice: 0.4827, decode.d8.loss_cls: 0.0487, decode.d8.loss_mask: 0.1763, decode.d8.loss_dice: 0.4813, loss: 7.4110 +2022-05-11 09:29:37,670 - mmseg - INFO - Iter [72100/80000] lr: 1.418e-07, eta: 4:44:48, time: 1.807, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0453, decode.loss_mask: 0.1810, decode.loss_dice: 0.4898, decode.d0.loss_cls: 0.3012, decode.d0.loss_mask: 0.1877, decode.d0.loss_dice: 0.5160, decode.d1.loss_cls: 0.0593, decode.d1.loss_mask: 0.1824, decode.d1.loss_dice: 0.5004, decode.d2.loss_cls: 0.0494, decode.d2.loss_mask: 0.1812, decode.d2.loss_dice: 0.4974, decode.d3.loss_cls: 0.0505, decode.d3.loss_mask: 0.1811, decode.d3.loss_dice: 0.4943, decode.d4.loss_cls: 0.0459, decode.d4.loss_mask: 0.1812, decode.d4.loss_dice: 0.4959, decode.d5.loss_cls: 0.0513, decode.d5.loss_mask: 0.1812, decode.d5.loss_dice: 0.4936, decode.d6.loss_cls: 0.0498, decode.d6.loss_mask: 0.1811, decode.d6.loss_dice: 0.4921, decode.d7.loss_cls: 0.0428, decode.d7.loss_mask: 0.1811, decode.d7.loss_dice: 0.4913, decode.d8.loss_cls: 0.0502, decode.d8.loss_mask: 0.1805, decode.d8.loss_dice: 0.4941, loss: 7.5292 +2022-05-11 09:31:09,753 - mmseg - INFO - Iter [72150/80000] lr: 1.409e-07, eta: 4:42:55, time: 1.843, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0563, decode.loss_mask: 0.1808, decode.loss_dice: 0.4925, decode.d0.loss_cls: 0.2971, decode.d0.loss_mask: 0.1878, decode.d0.loss_dice: 0.5113, decode.d1.loss_cls: 0.0729, decode.d1.loss_mask: 0.1823, decode.d1.loss_dice: 0.4983, decode.d2.loss_cls: 0.0559, decode.d2.loss_mask: 0.1816, decode.d2.loss_dice: 0.4955, decode.d3.loss_cls: 0.0609, decode.d3.loss_mask: 0.1815, decode.d3.loss_dice: 0.4911, decode.d4.loss_cls: 0.0580, decode.d4.loss_mask: 0.1812, decode.d4.loss_dice: 0.4908, decode.d5.loss_cls: 0.0573, decode.d5.loss_mask: 0.1811, decode.d5.loss_dice: 0.4873, decode.d6.loss_cls: 0.0548, decode.d6.loss_mask: 0.1809, decode.d6.loss_dice: 0.4903, decode.d7.loss_cls: 0.0565, decode.d7.loss_mask: 0.1807, decode.d7.loss_dice: 0.4892, decode.d8.loss_cls: 0.0550, decode.d8.loss_mask: 0.1808, decode.d8.loss_dice: 0.4886, loss: 7.5784 +2022-05-11 09:32:41,350 - mmseg - INFO - Iter [72200/80000] lr: 1.400e-07, eta: 4:41:01, time: 1.832, data_time: 0.064, memory: 69063, decode.loss_cls: 0.0479, decode.loss_mask: 0.1789, decode.loss_dice: 0.4903, decode.d0.loss_cls: 0.2945, decode.d0.loss_mask: 0.1848, decode.d0.loss_dice: 0.5072, decode.d1.loss_cls: 0.0614, decode.d1.loss_mask: 0.1805, decode.d1.loss_dice: 0.4972, decode.d2.loss_cls: 0.0556, decode.d2.loss_mask: 0.1797, decode.d2.loss_dice: 0.4869, decode.d3.loss_cls: 0.0497, decode.d3.loss_mask: 0.1796, decode.d3.loss_dice: 0.4863, decode.d4.loss_cls: 0.0501, decode.d4.loss_mask: 0.1793, decode.d4.loss_dice: 0.4863, decode.d5.loss_cls: 0.0473, decode.d5.loss_mask: 0.1796, decode.d5.loss_dice: 0.4885, decode.d6.loss_cls: 0.0430, decode.d6.loss_mask: 0.1792, decode.d6.loss_dice: 0.4863, decode.d7.loss_cls: 0.0436, decode.d7.loss_mask: 0.1793, decode.d7.loss_dice: 0.4869, decode.d8.loss_cls: 0.0479, decode.d8.loss_mask: 0.1791, decode.d8.loss_dice: 0.4858, loss: 7.4426 +2022-05-11 09:34:11,691 - mmseg - INFO - Iter [72250/80000] lr: 1.391e-07, eta: 4:39:08, time: 1.807, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0478, decode.loss_mask: 0.1779, decode.loss_dice: 0.4895, decode.d0.loss_cls: 0.2909, decode.d0.loss_mask: 0.1836, decode.d0.loss_dice: 0.5101, decode.d1.loss_cls: 0.0616, decode.d1.loss_mask: 0.1791, decode.d1.loss_dice: 0.4952, decode.d2.loss_cls: 0.0535, decode.d2.loss_mask: 0.1787, decode.d2.loss_dice: 0.4955, decode.d3.loss_cls: 0.0512, decode.d3.loss_mask: 0.1783, decode.d3.loss_dice: 0.4918, decode.d4.loss_cls: 0.0493, decode.d4.loss_mask: 0.1783, decode.d4.loss_dice: 0.4924, decode.d5.loss_cls: 0.0476, decode.d5.loss_mask: 0.1781, decode.d5.loss_dice: 0.4878, decode.d6.loss_cls: 0.0530, decode.d6.loss_mask: 0.1777, decode.d6.loss_dice: 0.4917, decode.d7.loss_cls: 0.0486, decode.d7.loss_mask: 0.1776, decode.d7.loss_dice: 0.4890, decode.d8.loss_cls: 0.0479, decode.d8.loss_mask: 0.1780, decode.d8.loss_dice: 0.4912, loss: 7.4728 +2022-05-11 09:35:40,865 - mmseg - INFO - Iter [72300/80000] lr: 1.382e-07, eta: 4:37:14, time: 1.784, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0515, decode.loss_mask: 0.1803, decode.loss_dice: 0.4904, decode.d0.loss_cls: 0.3148, decode.d0.loss_mask: 0.1881, decode.d0.loss_dice: 0.5125, decode.d1.loss_cls: 0.0678, decode.d1.loss_mask: 0.1824, decode.d1.loss_dice: 0.4996, decode.d2.loss_cls: 0.0617, decode.d2.loss_mask: 0.1812, decode.d2.loss_dice: 0.4957, decode.d3.loss_cls: 0.0549, decode.d3.loss_mask: 0.1804, decode.d3.loss_dice: 0.4934, decode.d4.loss_cls: 0.0558, decode.d4.loss_mask: 0.1811, decode.d4.loss_dice: 0.4889, decode.d5.loss_cls: 0.0552, decode.d5.loss_mask: 0.1809, decode.d5.loss_dice: 0.4946, decode.d6.loss_cls: 0.0549, decode.d6.loss_mask: 0.1807, decode.d6.loss_dice: 0.4911, decode.d7.loss_cls: 0.0538, decode.d7.loss_mask: 0.1803, decode.d7.loss_dice: 0.4901, decode.d8.loss_cls: 0.0483, decode.d8.loss_mask: 0.1804, decode.d8.loss_dice: 0.4934, loss: 7.5842 +2022-05-11 09:37:12,356 - mmseg - INFO - Iter [72350/80000] lr: 1.373e-07, eta: 4:35:20, time: 1.830, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0554, decode.loss_mask: 0.1738, decode.loss_dice: 0.4854, decode.d0.loss_cls: 0.2839, decode.d0.loss_mask: 0.1796, decode.d0.loss_dice: 0.5052, decode.d1.loss_cls: 0.0621, decode.d1.loss_mask: 0.1755, decode.d1.loss_dice: 0.4953, decode.d2.loss_cls: 0.0558, decode.d2.loss_mask: 0.1747, decode.d2.loss_dice: 0.4895, decode.d3.loss_cls: 0.0524, decode.d3.loss_mask: 0.1740, decode.d3.loss_dice: 0.4874, decode.d4.loss_cls: 0.0473, decode.d4.loss_mask: 0.1743, decode.d4.loss_dice: 0.4889, decode.d5.loss_cls: 0.0516, decode.d5.loss_mask: 0.1746, decode.d5.loss_dice: 0.4872, decode.d6.loss_cls: 0.0510, decode.d6.loss_mask: 0.1740, decode.d6.loss_dice: 0.4856, decode.d7.loss_cls: 0.0500, decode.d7.loss_mask: 0.1738, decode.d7.loss_dice: 0.4873, decode.d8.loss_cls: 0.0467, decode.d8.loss_mask: 0.1741, decode.d8.loss_dice: 0.4900, loss: 7.4065 +2022-05-11 09:38:44,447 - mmseg - INFO - Iter [72400/80000] lr: 1.364e-07, eta: 4:33:28, time: 1.842, data_time: 0.063, memory: 69063, decode.loss_cls: 0.0495, decode.loss_mask: 0.1800, decode.loss_dice: 0.4804, decode.d0.loss_cls: 0.3043, decode.d0.loss_mask: 0.1864, decode.d0.loss_dice: 0.5041, decode.d1.loss_cls: 0.0661, decode.d1.loss_mask: 0.1813, decode.d1.loss_dice: 0.4915, decode.d2.loss_cls: 0.0591, decode.d2.loss_mask: 0.1810, decode.d2.loss_dice: 0.4883, decode.d3.loss_cls: 0.0548, decode.d3.loss_mask: 0.1803, decode.d3.loss_dice: 0.4860, decode.d4.loss_cls: 0.0479, decode.d4.loss_mask: 0.1803, decode.d4.loss_dice: 0.4860, decode.d5.loss_cls: 0.0514, decode.d5.loss_mask: 0.1805, decode.d5.loss_dice: 0.4871, decode.d6.loss_cls: 0.0513, decode.d6.loss_mask: 0.1800, decode.d6.loss_dice: 0.4848, decode.d7.loss_cls: 0.0495, decode.d7.loss_mask: 0.1800, decode.d7.loss_dice: 0.4823, decode.d8.loss_cls: 0.0532, decode.d8.loss_mask: 0.1803, decode.d8.loss_dice: 0.4805, loss: 7.4682 +2022-05-11 09:40:13,957 - mmseg - INFO - Iter [72450/80000] lr: 1.355e-07, eta: 4:31:34, time: 1.789, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0524, decode.loss_mask: 0.1805, decode.loss_dice: 0.5021, decode.d0.loss_cls: 0.2888, decode.d0.loss_mask: 0.1867, decode.d0.loss_dice: 0.5222, decode.d1.loss_cls: 0.0672, decode.d1.loss_mask: 0.1834, decode.d1.loss_dice: 0.5079, decode.d2.loss_cls: 0.0585, decode.d2.loss_mask: 0.1823, decode.d2.loss_dice: 0.5101, decode.d3.loss_cls: 0.0542, decode.d3.loss_mask: 0.1818, decode.d3.loss_dice: 0.5060, decode.d4.loss_cls: 0.0492, decode.d4.loss_mask: 0.1818, decode.d4.loss_dice: 0.5037, decode.d5.loss_cls: 0.0543, decode.d5.loss_mask: 0.1812, decode.d5.loss_dice: 0.5063, decode.d6.loss_cls: 0.0505, decode.d6.loss_mask: 0.1816, decode.d6.loss_dice: 0.5028, decode.d7.loss_cls: 0.0486, decode.d7.loss_mask: 0.1806, decode.d7.loss_dice: 0.5029, decode.d8.loss_cls: 0.0504, decode.d8.loss_mask: 0.1808, decode.d8.loss_dice: 0.5057, loss: 7.6646 +2022-05-11 09:41:44,349 - mmseg - INFO - Iter [72500/80000] lr: 1.346e-07, eta: 4:29:41, time: 1.808, data_time: 0.020, memory: 69063, decode.loss_cls: 0.0394, decode.loss_mask: 0.1825, decode.loss_dice: 0.4932, decode.d0.loss_cls: 0.2906, decode.d0.loss_mask: 0.1893, decode.d0.loss_dice: 0.5117, decode.d1.loss_cls: 0.0593, decode.d1.loss_mask: 0.1841, decode.d1.loss_dice: 0.4997, decode.d2.loss_cls: 0.0505, decode.d2.loss_mask: 0.1835, decode.d2.loss_dice: 0.5010, decode.d3.loss_cls: 0.0442, decode.d3.loss_mask: 0.1830, decode.d3.loss_dice: 0.4983, decode.d4.loss_cls: 0.0422, decode.d4.loss_mask: 0.1826, decode.d4.loss_dice: 0.4941, decode.d5.loss_cls: 0.0420, decode.d5.loss_mask: 0.1831, decode.d5.loss_dice: 0.4935, decode.d6.loss_cls: 0.0419, decode.d6.loss_mask: 0.1827, decode.d6.loss_dice: 0.4938, decode.d7.loss_cls: 0.0456, decode.d7.loss_mask: 0.1823, decode.d7.loss_dice: 0.4912, decode.d8.loss_cls: 0.0431, decode.d8.loss_mask: 0.1823, decode.d8.loss_dice: 0.4948, loss: 7.5053 +2022-05-11 09:43:16,534 - mmseg - INFO - Iter [72550/80000] lr: 1.337e-07, eta: 4:27:48, time: 1.844, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0485, decode.loss_mask: 0.1761, decode.loss_dice: 0.4749, decode.d0.loss_cls: 0.2952, decode.d0.loss_mask: 0.1824, decode.d0.loss_dice: 0.4922, decode.d1.loss_cls: 0.0668, decode.d1.loss_mask: 0.1773, decode.d1.loss_dice: 0.4817, decode.d2.loss_cls: 0.0585, decode.d2.loss_mask: 0.1767, decode.d2.loss_dice: 0.4794, decode.d3.loss_cls: 0.0527, decode.d3.loss_mask: 0.1763, decode.d3.loss_dice: 0.4795, decode.d4.loss_cls: 0.0577, decode.d4.loss_mask: 0.1761, decode.d4.loss_dice: 0.4737, decode.d5.loss_cls: 0.0502, decode.d5.loss_mask: 0.1767, decode.d5.loss_dice: 0.4759, decode.d6.loss_cls: 0.0481, decode.d6.loss_mask: 0.1760, decode.d6.loss_dice: 0.4744, decode.d7.loss_cls: 0.0496, decode.d7.loss_mask: 0.1761, decode.d7.loss_dice: 0.4761, decode.d8.loss_cls: 0.0582, decode.d8.loss_mask: 0.1762, decode.d8.loss_dice: 0.4749, loss: 7.3386 +2022-05-11 09:44:49,328 - mmseg - INFO - Iter [72600/80000] lr: 1.328e-07, eta: 4:25:56, time: 1.856, data_time: 0.063, memory: 69063, decode.loss_cls: 0.0483, decode.loss_mask: 0.1812, decode.loss_dice: 0.4822, decode.d0.loss_cls: 0.3018, decode.d0.loss_mask: 0.1888, decode.d0.loss_dice: 0.5059, decode.d1.loss_cls: 0.0674, decode.d1.loss_mask: 0.1822, decode.d1.loss_dice: 0.4910, decode.d2.loss_cls: 0.0629, decode.d2.loss_mask: 0.1815, decode.d2.loss_dice: 0.4848, decode.d3.loss_cls: 0.0579, decode.d3.loss_mask: 0.1805, decode.d3.loss_dice: 0.4825, decode.d4.loss_cls: 0.0535, decode.d4.loss_mask: 0.1809, decode.d4.loss_dice: 0.4891, decode.d5.loss_cls: 0.0530, decode.d5.loss_mask: 0.1812, decode.d5.loss_dice: 0.4839, decode.d6.loss_cls: 0.0549, decode.d6.loss_mask: 0.1807, decode.d6.loss_dice: 0.4809, decode.d7.loss_cls: 0.0541, decode.d7.loss_mask: 0.1812, decode.d7.loss_dice: 0.4810, decode.d8.loss_cls: 0.0511, decode.d8.loss_mask: 0.1813, decode.d8.loss_dice: 0.4796, loss: 7.4849 +2022-05-11 09:46:20,877 - mmseg - INFO - Iter [72650/80000] lr: 1.319e-07, eta: 4:24:03, time: 1.831, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0565, decode.loss_mask: 0.1781, decode.loss_dice: 0.4753, decode.d0.loss_cls: 0.3046, decode.d0.loss_mask: 0.1847, decode.d0.loss_dice: 0.5012, decode.d1.loss_cls: 0.0653, decode.d1.loss_mask: 0.1791, decode.d1.loss_dice: 0.4845, decode.d2.loss_cls: 0.0617, decode.d2.loss_mask: 0.1789, decode.d2.loss_dice: 0.4831, decode.d3.loss_cls: 0.0602, decode.d3.loss_mask: 0.1778, decode.d3.loss_dice: 0.4772, decode.d4.loss_cls: 0.0558, decode.d4.loss_mask: 0.1783, decode.d4.loss_dice: 0.4761, decode.d5.loss_cls: 0.0572, decode.d5.loss_mask: 0.1781, decode.d5.loss_dice: 0.4779, decode.d6.loss_cls: 0.0520, decode.d6.loss_mask: 0.1784, decode.d6.loss_dice: 0.4763, decode.d7.loss_cls: 0.0563, decode.d7.loss_mask: 0.1784, decode.d7.loss_dice: 0.4788, decode.d8.loss_cls: 0.0530, decode.d8.loss_mask: 0.1782, decode.d8.loss_dice: 0.4746, loss: 7.4176 +2022-05-11 09:47:52,544 - mmseg - INFO - Iter [72700/80000] lr: 1.310e-07, eta: 4:22:10, time: 1.833, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0395, decode.loss_mask: 0.1765, decode.loss_dice: 0.4849, decode.d0.loss_cls: 0.2851, decode.d0.loss_mask: 0.1829, decode.d0.loss_dice: 0.5054, decode.d1.loss_cls: 0.0537, decode.d1.loss_mask: 0.1780, decode.d1.loss_dice: 0.4905, decode.d2.loss_cls: 0.0507, decode.d2.loss_mask: 0.1777, decode.d2.loss_dice: 0.4918, decode.d3.loss_cls: 0.0443, decode.d3.loss_mask: 0.1771, decode.d3.loss_dice: 0.4865, decode.d4.loss_cls: 0.0434, decode.d4.loss_mask: 0.1770, decode.d4.loss_dice: 0.4872, decode.d5.loss_cls: 0.0439, decode.d5.loss_mask: 0.1772, decode.d5.loss_dice: 0.4872, decode.d6.loss_cls: 0.0440, decode.d6.loss_mask: 0.1766, decode.d6.loss_dice: 0.4837, decode.d7.loss_cls: 0.0404, decode.d7.loss_mask: 0.1765, decode.d7.loss_dice: 0.4858, decode.d8.loss_cls: 0.0450, decode.d8.loss_mask: 0.1765, decode.d8.loss_dice: 0.4846, loss: 7.3537 +2022-05-11 09:49:26,606 - mmseg - INFO - Iter [72750/80000] lr: 1.301e-07, eta: 4:20:19, time: 1.881, data_time: 0.063, memory: 69063, decode.loss_cls: 0.0467, decode.loss_mask: 0.1793, decode.loss_dice: 0.4954, decode.d0.loss_cls: 0.2923, decode.d0.loss_mask: 0.1859, decode.d0.loss_dice: 0.5103, decode.d1.loss_cls: 0.0644, decode.d1.loss_mask: 0.1812, decode.d1.loss_dice: 0.4992, decode.d2.loss_cls: 0.0576, decode.d2.loss_mask: 0.1808, decode.d2.loss_dice: 0.4971, decode.d3.loss_cls: 0.0510, decode.d3.loss_mask: 0.1804, decode.d3.loss_dice: 0.4926, decode.d4.loss_cls: 0.0514, decode.d4.loss_mask: 0.1801, decode.d4.loss_dice: 0.4935, decode.d5.loss_cls: 0.0499, decode.d5.loss_mask: 0.1800, decode.d5.loss_dice: 0.4919, decode.d6.loss_cls: 0.0510, decode.d6.loss_mask: 0.1797, decode.d6.loss_dice: 0.4926, decode.d7.loss_cls: 0.0469, decode.d7.loss_mask: 0.1797, decode.d7.loss_dice: 0.4900, decode.d8.loss_cls: 0.0551, decode.d8.loss_mask: 0.1798, decode.d8.loss_dice: 0.4914, loss: 7.5271 +2022-05-11 09:50:56,362 - mmseg - INFO - Iter [72800/80000] lr: 1.292e-07, eta: 4:18:26, time: 1.795, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0515, decode.loss_mask: 0.1807, decode.loss_dice: 0.4876, decode.d0.loss_cls: 0.3021, decode.d0.loss_mask: 0.1880, decode.d0.loss_dice: 0.5090, decode.d1.loss_cls: 0.0653, decode.d1.loss_mask: 0.1820, decode.d1.loss_dice: 0.4955, decode.d2.loss_cls: 0.0590, decode.d2.loss_mask: 0.1814, decode.d2.loss_dice: 0.4922, decode.d3.loss_cls: 0.0511, decode.d3.loss_mask: 0.1808, decode.d3.loss_dice: 0.4890, decode.d4.loss_cls: 0.0505, decode.d4.loss_mask: 0.1809, decode.d4.loss_dice: 0.4870, decode.d5.loss_cls: 0.0504, decode.d5.loss_mask: 0.1810, decode.d5.loss_dice: 0.4896, decode.d6.loss_cls: 0.0503, decode.d6.loss_mask: 0.1805, decode.d6.loss_dice: 0.4863, decode.d7.loss_cls: 0.0512, decode.d7.loss_mask: 0.1811, decode.d7.loss_dice: 0.4907, decode.d8.loss_cls: 0.0513, decode.d8.loss_mask: 0.1806, decode.d8.loss_dice: 0.4851, loss: 7.5116 +2022-05-11 09:52:25,400 - mmseg - INFO - Iter [72850/80000] lr: 1.283e-07, eta: 4:16:33, time: 1.781, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0579, decode.loss_mask: 0.1811, decode.loss_dice: 0.5102, decode.d0.loss_cls: 0.2938, decode.d0.loss_mask: 0.1876, decode.d0.loss_dice: 0.5273, decode.d1.loss_cls: 0.0811, decode.d1.loss_mask: 0.1829, decode.d1.loss_dice: 0.5124, decode.d2.loss_cls: 0.0747, decode.d2.loss_mask: 0.1816, decode.d2.loss_dice: 0.5119, decode.d3.loss_cls: 0.0706, decode.d3.loss_mask: 0.1812, decode.d3.loss_dice: 0.5108, decode.d4.loss_cls: 0.0739, decode.d4.loss_mask: 0.1812, decode.d4.loss_dice: 0.5089, decode.d5.loss_cls: 0.0637, decode.d5.loss_mask: 0.1811, decode.d5.loss_dice: 0.5037, decode.d6.loss_cls: 0.0587, decode.d6.loss_mask: 0.1808, decode.d6.loss_dice: 0.5063, decode.d7.loss_cls: 0.0648, decode.d7.loss_mask: 0.1811, decode.d7.loss_dice: 0.5090, decode.d8.loss_cls: 0.0641, decode.d8.loss_mask: 0.1811, decode.d8.loss_dice: 0.5085, loss: 7.8321 +2022-05-11 09:53:55,659 - mmseg - INFO - Iter [72900/80000] lr: 1.274e-07, eta: 4:14:40, time: 1.803, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0495, decode.loss_mask: 0.1748, decode.loss_dice: 0.4854, decode.d0.loss_cls: 0.2937, decode.d0.loss_mask: 0.1797, decode.d0.loss_dice: 0.5037, decode.d1.loss_cls: 0.0678, decode.d1.loss_mask: 0.1758, decode.d1.loss_dice: 0.4954, decode.d2.loss_cls: 0.0616, decode.d2.loss_mask: 0.1754, decode.d2.loss_dice: 0.4887, decode.d3.loss_cls: 0.0546, decode.d3.loss_mask: 0.1753, decode.d3.loss_dice: 0.4868, decode.d4.loss_cls: 0.0519, decode.d4.loss_mask: 0.1752, decode.d4.loss_dice: 0.4895, decode.d5.loss_cls: 0.0516, decode.d5.loss_mask: 0.1751, decode.d5.loss_dice: 0.4862, decode.d6.loss_cls: 0.0512, decode.d6.loss_mask: 0.1747, decode.d6.loss_dice: 0.4831, decode.d7.loss_cls: 0.0458, decode.d7.loss_mask: 0.1748, decode.d7.loss_dice: 0.4846, decode.d8.loss_cls: 0.0497, decode.d8.loss_mask: 0.1755, decode.d8.loss_dice: 0.4889, loss: 7.4262 +2022-05-11 09:55:27,628 - mmseg - INFO - Iter [72950/80000] lr: 1.265e-07, eta: 4:12:48, time: 1.842, data_time: 0.068, memory: 69063, decode.loss_cls: 0.0555, decode.loss_mask: 0.1780, decode.loss_dice: 0.4937, decode.d0.loss_cls: 0.2956, decode.d0.loss_mask: 0.1841, decode.d0.loss_dice: 0.5233, decode.d1.loss_cls: 0.0712, decode.d1.loss_mask: 0.1799, decode.d1.loss_dice: 0.5038, decode.d2.loss_cls: 0.0591, decode.d2.loss_mask: 0.1795, decode.d2.loss_dice: 0.4996, decode.d3.loss_cls: 0.0583, decode.d3.loss_mask: 0.1791, decode.d3.loss_dice: 0.4945, decode.d4.loss_cls: 0.0600, decode.d4.loss_mask: 0.1794, decode.d4.loss_dice: 0.5010, decode.d5.loss_cls: 0.0551, decode.d5.loss_mask: 0.1789, decode.d5.loss_dice: 0.5004, decode.d6.loss_cls: 0.0532, decode.d6.loss_mask: 0.1786, decode.d6.loss_dice: 0.4949, decode.d7.loss_cls: 0.0581, decode.d7.loss_mask: 0.1785, decode.d7.loss_dice: 0.4945, decode.d8.loss_cls: 0.0535, decode.d8.loss_mask: 0.1781, decode.d8.loss_dice: 0.4961, loss: 7.6154 +2022-05-11 09:56:58,330 - mmseg - INFO - Saving checkpoint at 73000 iterations +2022-05-11 09:57:27,620 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 09:57:27,622 - mmseg - INFO - Iter [73000/80000] lr: 1.257e-07, eta: 4:11:04, time: 2.398, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0549, decode.loss_mask: 0.1847, decode.loss_dice: 0.4950, decode.d0.loss_cls: 0.2814, decode.d0.loss_mask: 0.1927, decode.d0.loss_dice: 0.5146, decode.d1.loss_cls: 0.0665, decode.d1.loss_mask: 0.1859, decode.d1.loss_dice: 0.4986, decode.d2.loss_cls: 0.0544, decode.d2.loss_mask: 0.1859, decode.d2.loss_dice: 0.4971, decode.d3.loss_cls: 0.0540, decode.d3.loss_mask: 0.1856, decode.d3.loss_dice: 0.4973, decode.d4.loss_cls: 0.0506, decode.d4.loss_mask: 0.1852, decode.d4.loss_dice: 0.4967, decode.d5.loss_cls: 0.0489, decode.d5.loss_mask: 0.1850, decode.d5.loss_dice: 0.4953, decode.d6.loss_cls: 0.0435, decode.d6.loss_mask: 0.1856, decode.d6.loss_dice: 0.4955, decode.d7.loss_cls: 0.0523, decode.d7.loss_mask: 0.1854, decode.d7.loss_dice: 0.4941, decode.d8.loss_cls: 0.0525, decode.d8.loss_mask: 0.1853, decode.d8.loss_dice: 0.4952, loss: 7.5998 +2022-05-11 09:59:23,600 - mmseg - INFO - per class results: +2022-05-11 09:59:23,605 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.61 | 99.25 | +| sidewalk | 88.64 | 93.96 | +| building | 94.42 | 97.02 | +| wall | 67.31 | 79.48 | +| fence | 74.44 | 81.64 | +| pole | 71.23 | 83.73 | +| traffic light | 76.9 | 86.83 | +| traffic sign | 84.12 | 90.22 | +| vegetation | 93.37 | 96.99 | +| terrain | 68.51 | 77.63 | +| sky | 95.76 | 98.46 | +| person | 86.74 | 93.86 | +| rider | 74.09 | 85.09 | +| car | 96.18 | 98.3 | +| truck | 91.71 | 94.2 | +| bus | 93.53 | 96.37 | +| train | 87.9 | 90.67 | +| motorcycle | 77.38 | 87.63 | +| bicycle | 82.79 | 91.76 | ++---------------+-------+-------+ +2022-05-11 09:59:23,606 - mmseg - INFO - Summary: +2022-05-11 09:59:23,606 - mmseg - INFO - ++-------+------+-------+ +| aAcc | mIoU | mAcc | ++-------+------+-------+ +| 97.01 | 84.4 | 90.69 | ++-------+------+-------+ +2022-05-11 09:59:23,608 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 09:59:23,608 - mmseg - INFO - Iter(val) [32] aAcc: 0.9701, mIoU: 0.8440, mAcc: 0.9069, IoU.road: 0.9861, IoU.sidewalk: 0.8864, IoU.building: 0.9442, IoU.wall: 0.6731, IoU.fence: 0.7444, IoU.pole: 0.7123, IoU.traffic light: 0.7690, IoU.traffic sign: 0.8412, IoU.vegetation: 0.9337, IoU.terrain: 0.6851, IoU.sky: 0.9576, IoU.person: 0.8674, IoU.rider: 0.7409, IoU.car: 0.9618, IoU.truck: 0.9171, IoU.bus: 0.9353, IoU.train: 0.8790, IoU.motorcycle: 0.7738, IoU.bicycle: 0.8279, Acc.road: 0.9925, Acc.sidewalk: 0.9396, Acc.building: 0.9702, Acc.wall: 0.7948, Acc.fence: 0.8164, Acc.pole: 0.8373, Acc.traffic light: 0.8683, Acc.traffic sign: 0.9022, Acc.vegetation: 0.9699, Acc.terrain: 0.7763, Acc.sky: 0.9846, Acc.person: 0.9386, Acc.rider: 0.8509, Acc.car: 0.9830, Acc.truck: 0.9420, Acc.bus: 0.9637, Acc.train: 0.9067, Acc.motorcycle: 0.8763, Acc.bicycle: 0.9176 +2022-05-11 10:00:53,048 - mmseg - INFO - Iter [73050/80000] lr: 1.248e-07, eta: 4:09:44, time: 4.111, data_time: 2.338, memory: 69063, decode.loss_cls: 0.0440, decode.loss_mask: 0.1780, decode.loss_dice: 0.4769, decode.d0.loss_cls: 0.2980, decode.d0.loss_mask: 0.1850, decode.d0.loss_dice: 0.4997, decode.d1.loss_cls: 0.0592, decode.d1.loss_mask: 0.1796, decode.d1.loss_dice: 0.4839, decode.d2.loss_cls: 0.0574, decode.d2.loss_mask: 0.1793, decode.d2.loss_dice: 0.4841, decode.d3.loss_cls: 0.0521, decode.d3.loss_mask: 0.1786, decode.d3.loss_dice: 0.4781, decode.d4.loss_cls: 0.0505, decode.d4.loss_mask: 0.1784, decode.d4.loss_dice: 0.4781, decode.d5.loss_cls: 0.0513, decode.d5.loss_mask: 0.1782, decode.d5.loss_dice: 0.4793, decode.d6.loss_cls: 0.0456, decode.d6.loss_mask: 0.1782, decode.d6.loss_dice: 0.4801, decode.d7.loss_cls: 0.0478, decode.d7.loss_mask: 0.1780, decode.d7.loss_dice: 0.4793, decode.d8.loss_cls: 0.0490, decode.d8.loss_mask: 0.1784, decode.d8.loss_dice: 0.4790, loss: 7.3650 +2022-05-11 10:02:24,513 - mmseg - INFO - Iter [73100/80000] lr: 1.239e-07, eta: 4:07:51, time: 1.829, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0490, decode.loss_mask: 0.1757, decode.loss_dice: 0.4919, decode.d0.loss_cls: 0.2973, decode.d0.loss_mask: 0.1829, decode.d0.loss_dice: 0.5099, decode.d1.loss_cls: 0.0629, decode.d1.loss_mask: 0.1772, decode.d1.loss_dice: 0.4970, decode.d2.loss_cls: 0.0515, decode.d2.loss_mask: 0.1765, decode.d2.loss_dice: 0.4957, decode.d3.loss_cls: 0.0503, decode.d3.loss_mask: 0.1760, decode.d3.loss_dice: 0.4918, decode.d4.loss_cls: 0.0481, decode.d4.loss_mask: 0.1762, decode.d4.loss_dice: 0.4860, decode.d5.loss_cls: 0.0493, decode.d5.loss_mask: 0.1760, decode.d5.loss_dice: 0.4944, decode.d6.loss_cls: 0.0476, decode.d6.loss_mask: 0.1758, decode.d6.loss_dice: 0.4887, decode.d7.loss_cls: 0.0457, decode.d7.loss_mask: 0.1757, decode.d7.loss_dice: 0.4899, decode.d8.loss_cls: 0.0429, decode.d8.loss_mask: 0.1755, decode.d8.loss_dice: 0.4857, loss: 7.4429 +2022-05-11 10:03:58,406 - mmseg - INFO - Iter [73150/80000] lr: 1.230e-07, eta: 4:06:00, time: 1.877, data_time: 0.067, memory: 69063, decode.loss_cls: 0.0449, decode.loss_mask: 0.1773, decode.loss_dice: 0.4831, decode.d0.loss_cls: 0.3018, decode.d0.loss_mask: 0.1838, decode.d0.loss_dice: 0.5044, decode.d1.loss_cls: 0.0642, decode.d1.loss_mask: 0.1787, decode.d1.loss_dice: 0.4925, decode.d2.loss_cls: 0.0540, decode.d2.loss_mask: 0.1780, decode.d2.loss_dice: 0.4859, decode.d3.loss_cls: 0.0591, decode.d3.loss_mask: 0.1769, decode.d3.loss_dice: 0.4836, decode.d4.loss_cls: 0.0536, decode.d4.loss_mask: 0.1774, decode.d4.loss_dice: 0.4808, decode.d5.loss_cls: 0.0498, decode.d5.loss_mask: 0.1774, decode.d5.loss_dice: 0.4793, decode.d6.loss_cls: 0.0540, decode.d6.loss_mask: 0.1771, decode.d6.loss_dice: 0.4819, decode.d7.loss_cls: 0.0497, decode.d7.loss_mask: 0.1769, decode.d7.loss_dice: 0.4814, decode.d8.loss_cls: 0.0545, decode.d8.loss_mask: 0.1770, decode.d8.loss_dice: 0.4825, loss: 7.4214 +2022-05-11 10:05:29,952 - mmseg - INFO - Iter [73200/80000] lr: 1.221e-07, eta: 4:04:08, time: 1.832, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0467, decode.loss_mask: 0.1795, decode.loss_dice: 0.4894, decode.d0.loss_cls: 0.2850, decode.d0.loss_mask: 0.1859, decode.d0.loss_dice: 0.5066, decode.d1.loss_cls: 0.0617, decode.d1.loss_mask: 0.1807, decode.d1.loss_dice: 0.4932, decode.d2.loss_cls: 0.0514, decode.d2.loss_mask: 0.1802, decode.d2.loss_dice: 0.4934, decode.d3.loss_cls: 0.0519, decode.d3.loss_mask: 0.1799, decode.d3.loss_dice: 0.4888, decode.d4.loss_cls: 0.0494, decode.d4.loss_mask: 0.1799, decode.d4.loss_dice: 0.4910, decode.d5.loss_cls: 0.0477, decode.d5.loss_mask: 0.1796, decode.d5.loss_dice: 0.4885, decode.d6.loss_cls: 0.0479, decode.d6.loss_mask: 0.1796, decode.d6.loss_dice: 0.4873, decode.d7.loss_cls: 0.0479, decode.d7.loss_mask: 0.1798, decode.d7.loss_dice: 0.4878, decode.d8.loss_cls: 0.0460, decode.d8.loss_mask: 0.1797, decode.d8.loss_dice: 0.4871, loss: 7.4537 +2022-05-11 10:07:01,509 - mmseg - INFO - Iter [73250/80000] lr: 1.212e-07, eta: 4:02:16, time: 1.831, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0430, decode.loss_mask: 0.1846, decode.loss_dice: 0.4902, decode.d0.loss_cls: 0.3028, decode.d0.loss_mask: 0.1916, decode.d0.loss_dice: 0.5139, decode.d1.loss_cls: 0.0580, decode.d1.loss_mask: 0.1854, decode.d1.loss_dice: 0.5010, decode.d2.loss_cls: 0.0551, decode.d2.loss_mask: 0.1848, decode.d2.loss_dice: 0.4978, decode.d3.loss_cls: 0.0475, decode.d3.loss_mask: 0.1853, decode.d3.loss_dice: 0.4936, decode.d4.loss_cls: 0.0518, decode.d4.loss_mask: 0.1843, decode.d4.loss_dice: 0.4931, decode.d5.loss_cls: 0.0436, decode.d5.loss_mask: 0.1847, decode.d5.loss_dice: 0.4944, decode.d6.loss_cls: 0.0440, decode.d6.loss_mask: 0.1846, decode.d6.loss_dice: 0.4910, decode.d7.loss_cls: 0.0441, decode.d7.loss_mask: 0.1847, decode.d7.loss_dice: 0.4922, decode.d8.loss_cls: 0.0410, decode.d8.loss_mask: 0.1850, decode.d8.loss_dice: 0.4922, loss: 7.5451 +2022-05-11 10:08:34,445 - mmseg - INFO - Iter [73300/80000] lr: 1.203e-07, eta: 4:00:24, time: 1.859, data_time: 0.066, memory: 69063, decode.loss_cls: 0.0432, decode.loss_mask: 0.1827, decode.loss_dice: 0.4882, decode.d0.loss_cls: 0.2946, decode.d0.loss_mask: 0.1901, decode.d0.loss_dice: 0.5111, decode.d1.loss_cls: 0.0556, decode.d1.loss_mask: 0.1844, decode.d1.loss_dice: 0.4975, decode.d2.loss_cls: 0.0507, decode.d2.loss_mask: 0.1830, decode.d2.loss_dice: 0.4969, decode.d3.loss_cls: 0.0446, decode.d3.loss_mask: 0.1826, decode.d3.loss_dice: 0.4898, decode.d4.loss_cls: 0.0435, decode.d4.loss_mask: 0.1827, decode.d4.loss_dice: 0.4896, decode.d5.loss_cls: 0.0381, decode.d5.loss_mask: 0.1830, decode.d5.loss_dice: 0.4888, decode.d6.loss_cls: 0.0410, decode.d6.loss_mask: 0.1830, decode.d6.loss_dice: 0.4893, decode.d7.loss_cls: 0.0399, decode.d7.loss_mask: 0.1826, decode.d7.loss_dice: 0.4895, decode.d8.loss_cls: 0.0427, decode.d8.loss_mask: 0.1831, decode.d8.loss_dice: 0.4921, loss: 7.4638 +2022-05-11 10:10:06,108 - mmseg - INFO - Iter [73350/80000] lr: 1.194e-07, eta: 3:58:32, time: 1.833, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0473, decode.loss_mask: 0.1802, decode.loss_dice: 0.4825, decode.d0.loss_cls: 0.2878, decode.d0.loss_mask: 0.1856, decode.d0.loss_dice: 0.5053, decode.d1.loss_cls: 0.0549, decode.d1.loss_mask: 0.1816, decode.d1.loss_dice: 0.4893, decode.d2.loss_cls: 0.0499, decode.d2.loss_mask: 0.1810, decode.d2.loss_dice: 0.4885, decode.d3.loss_cls: 0.0516, decode.d3.loss_mask: 0.1811, decode.d3.loss_dice: 0.4847, decode.d4.loss_cls: 0.0412, decode.d4.loss_mask: 0.1810, decode.d4.loss_dice: 0.4825, decode.d5.loss_cls: 0.0478, decode.d5.loss_mask: 0.1807, decode.d5.loss_dice: 0.4828, decode.d6.loss_cls: 0.0466, decode.d6.loss_mask: 0.1805, decode.d6.loss_dice: 0.4853, decode.d7.loss_cls: 0.0477, decode.d7.loss_mask: 0.1805, decode.d7.loss_dice: 0.4799, decode.d8.loss_cls: 0.0503, decode.d8.loss_mask: 0.1805, decode.d8.loss_dice: 0.4821, loss: 7.4009 +2022-05-11 10:11:36,061 - mmseg - INFO - Iter [73400/80000] lr: 1.185e-07, eta: 3:56:40, time: 1.799, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0480, decode.loss_mask: 0.1822, decode.loss_dice: 0.4957, decode.d0.loss_cls: 0.3030, decode.d0.loss_mask: 0.1903, decode.d0.loss_dice: 0.5196, decode.d1.loss_cls: 0.0706, decode.d1.loss_mask: 0.1839, decode.d1.loss_dice: 0.5027, decode.d2.loss_cls: 0.0628, decode.d2.loss_mask: 0.1832, decode.d2.loss_dice: 0.4985, decode.d3.loss_cls: 0.0575, decode.d3.loss_mask: 0.1828, decode.d3.loss_dice: 0.4974, decode.d4.loss_cls: 0.0549, decode.d4.loss_mask: 0.1827, decode.d4.loss_dice: 0.4942, decode.d5.loss_cls: 0.0593, decode.d5.loss_mask: 0.1827, decode.d5.loss_dice: 0.4975, decode.d6.loss_cls: 0.0581, decode.d6.loss_mask: 0.1826, decode.d6.loss_dice: 0.5002, decode.d7.loss_cls: 0.0561, decode.d7.loss_mask: 0.1823, decode.d7.loss_dice: 0.4980, decode.d8.loss_cls: 0.0509, decode.d8.loss_mask: 0.1826, decode.d8.loss_dice: 0.4970, loss: 7.6572 +2022-05-11 10:13:07,858 - mmseg - INFO - Iter [73450/80000] lr: 1.176e-07, eta: 3:54:48, time: 1.835, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0440, decode.loss_mask: 0.1797, decode.loss_dice: 0.4794, decode.d0.loss_cls: 0.2905, decode.d0.loss_mask: 0.1860, decode.d0.loss_dice: 0.5059, decode.d1.loss_cls: 0.0545, decode.d1.loss_mask: 0.1814, decode.d1.loss_dice: 0.4928, decode.d2.loss_cls: 0.0485, decode.d2.loss_mask: 0.1800, decode.d2.loss_dice: 0.4900, decode.d3.loss_cls: 0.0452, decode.d3.loss_mask: 0.1800, decode.d3.loss_dice: 0.4861, decode.d4.loss_cls: 0.0524, decode.d4.loss_mask: 0.1800, decode.d4.loss_dice: 0.4856, decode.d5.loss_cls: 0.0502, decode.d5.loss_mask: 0.1800, decode.d5.loss_dice: 0.4869, decode.d6.loss_cls: 0.0451, decode.d6.loss_mask: 0.1800, decode.d6.loss_dice: 0.4813, decode.d7.loss_cls: 0.0486, decode.d7.loss_mask: 0.1804, decode.d7.loss_dice: 0.4894, decode.d8.loss_cls: 0.0480, decode.d8.loss_mask: 0.1799, decode.d8.loss_dice: 0.4878, loss: 7.4196 +2022-05-11 10:14:39,607 - mmseg - INFO - Iter [73500/80000] lr: 1.167e-07, eta: 3:52:57, time: 1.836, data_time: 0.067, memory: 69063, decode.loss_cls: 0.0551, decode.loss_mask: 0.1743, decode.loss_dice: 0.4950, decode.d0.loss_cls: 0.3083, decode.d0.loss_mask: 0.1804, decode.d0.loss_dice: 0.5115, decode.d1.loss_cls: 0.0706, decode.d1.loss_mask: 0.1757, decode.d1.loss_dice: 0.5014, decode.d2.loss_cls: 0.0633, decode.d2.loss_mask: 0.1752, decode.d2.loss_dice: 0.4961, decode.d3.loss_cls: 0.0579, decode.d3.loss_mask: 0.1750, decode.d3.loss_dice: 0.4974, decode.d4.loss_cls: 0.0598, decode.d4.loss_mask: 0.1750, decode.d4.loss_dice: 0.4964, decode.d5.loss_cls: 0.0568, decode.d5.loss_mask: 0.1750, decode.d5.loss_dice: 0.4958, decode.d6.loss_cls: 0.0543, decode.d6.loss_mask: 0.1746, decode.d6.loss_dice: 0.4938, decode.d7.loss_cls: 0.0556, decode.d7.loss_mask: 0.1745, decode.d7.loss_dice: 0.4955, decode.d8.loss_cls: 0.0551, decode.d8.loss_mask: 0.1744, decode.d8.loss_dice: 0.4935, loss: 7.5672 +2022-05-11 10:16:09,328 - mmseg - INFO - Iter [73550/80000] lr: 1.158e-07, eta: 3:51:05, time: 1.794, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0479, decode.loss_mask: 0.1829, decode.loss_dice: 0.4853, decode.d0.loss_cls: 0.2970, decode.d0.loss_mask: 0.1897, decode.d0.loss_dice: 0.5006, decode.d1.loss_cls: 0.0580, decode.d1.loss_mask: 0.1843, decode.d1.loss_dice: 0.4877, decode.d2.loss_cls: 0.0542, decode.d2.loss_mask: 0.1833, decode.d2.loss_dice: 0.4901, decode.d3.loss_cls: 0.0518, decode.d3.loss_mask: 0.1834, decode.d3.loss_dice: 0.4850, decode.d4.loss_cls: 0.0474, decode.d4.loss_mask: 0.1832, decode.d4.loss_dice: 0.4881, decode.d5.loss_cls: 0.0504, decode.d5.loss_mask: 0.1827, decode.d5.loss_dice: 0.4814, decode.d6.loss_cls: 0.0501, decode.d6.loss_mask: 0.1830, decode.d6.loss_dice: 0.4872, decode.d7.loss_cls: 0.0480, decode.d7.loss_mask: 0.1827, decode.d7.loss_dice: 0.4847, decode.d8.loss_cls: 0.0504, decode.d8.loss_mask: 0.1827, decode.d8.loss_dice: 0.4840, loss: 7.4674 +2022-05-11 10:17:40,690 - mmseg - INFO - Iter [73600/80000] lr: 1.149e-07, eta: 3:49:13, time: 1.827, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0448, decode.loss_mask: 0.1835, decode.loss_dice: 0.4838, decode.d0.loss_cls: 0.2852, decode.d0.loss_mask: 0.1900, decode.d0.loss_dice: 0.5054, decode.d1.loss_cls: 0.0606, decode.d1.loss_mask: 0.1852, decode.d1.loss_dice: 0.4916, decode.d2.loss_cls: 0.0473, decode.d2.loss_mask: 0.1836, decode.d2.loss_dice: 0.4877, decode.d3.loss_cls: 0.0467, decode.d3.loss_mask: 0.1838, decode.d3.loss_dice: 0.4858, decode.d4.loss_cls: 0.0465, decode.d4.loss_mask: 0.1834, decode.d4.loss_dice: 0.4866, decode.d5.loss_cls: 0.0401, decode.d5.loss_mask: 0.1833, decode.d5.loss_dice: 0.4842, decode.d6.loss_cls: 0.0421, decode.d6.loss_mask: 0.1837, decode.d6.loss_dice: 0.4852, decode.d7.loss_cls: 0.0439, decode.d7.loss_mask: 0.1833, decode.d7.loss_dice: 0.4903, decode.d8.loss_cls: 0.0437, decode.d8.loss_mask: 0.1831, decode.d8.loss_dice: 0.4855, loss: 7.4298 +2022-05-11 10:19:11,196 - mmseg - INFO - Iter [73650/80000] lr: 1.140e-07, eta: 3:47:22, time: 1.810, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0470, decode.loss_mask: 0.1814, decode.loss_dice: 0.4785, decode.d0.loss_cls: 0.2934, decode.d0.loss_mask: 0.1879, decode.d0.loss_dice: 0.5008, decode.d1.loss_cls: 0.0636, decode.d1.loss_mask: 0.1829, decode.d1.loss_dice: 0.4870, decode.d2.loss_cls: 0.0567, decode.d2.loss_mask: 0.1815, decode.d2.loss_dice: 0.4828, decode.d3.loss_cls: 0.0481, decode.d3.loss_mask: 0.1820, decode.d3.loss_dice: 0.4786, decode.d4.loss_cls: 0.0461, decode.d4.loss_mask: 0.1821, decode.d4.loss_dice: 0.4782, decode.d5.loss_cls: 0.0441, decode.d5.loss_mask: 0.1819, decode.d5.loss_dice: 0.4814, decode.d6.loss_cls: 0.0457, decode.d6.loss_mask: 0.1820, decode.d6.loss_dice: 0.4805, decode.d7.loss_cls: 0.0503, decode.d7.loss_mask: 0.1815, decode.d7.loss_dice: 0.4827, decode.d8.loss_cls: 0.0449, decode.d8.loss_mask: 0.1816, decode.d8.loss_dice: 0.4799, loss: 7.3952 +2022-05-11 10:20:44,350 - mmseg - INFO - Iter [73700/80000] lr: 1.131e-07, eta: 3:45:31, time: 1.863, data_time: 0.066, memory: 69063, decode.loss_cls: 0.0492, decode.loss_mask: 0.1777, decode.loss_dice: 0.4795, decode.d0.loss_cls: 0.2925, decode.d0.loss_mask: 0.1848, decode.d0.loss_dice: 0.5088, decode.d1.loss_cls: 0.0643, decode.d1.loss_mask: 0.1796, decode.d1.loss_dice: 0.4905, decode.d2.loss_cls: 0.0612, decode.d2.loss_mask: 0.1789, decode.d2.loss_dice: 0.4882, decode.d3.loss_cls: 0.0500, decode.d3.loss_mask: 0.1784, decode.d3.loss_dice: 0.4838, decode.d4.loss_cls: 0.0518, decode.d4.loss_mask: 0.1780, decode.d4.loss_dice: 0.4839, decode.d5.loss_cls: 0.0526, decode.d5.loss_mask: 0.1783, decode.d5.loss_dice: 0.4832, decode.d6.loss_cls: 0.0554, decode.d6.loss_mask: 0.1784, decode.d6.loss_dice: 0.4832, decode.d7.loss_cls: 0.0484, decode.d7.loss_mask: 0.1776, decode.d7.loss_dice: 0.4780, decode.d8.loss_cls: 0.0491, decode.d8.loss_mask: 0.1780, decode.d8.loss_dice: 0.4823, loss: 7.4255 +2022-05-11 10:22:14,793 - mmseg - INFO - Iter [73750/80000] lr: 1.122e-07, eta: 3:43:39, time: 1.809, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0429, decode.loss_mask: 0.1798, decode.loss_dice: 0.4795, decode.d0.loss_cls: 0.2961, decode.d0.loss_mask: 0.1864, decode.d0.loss_dice: 0.5019, decode.d1.loss_cls: 0.0598, decode.d1.loss_mask: 0.1804, decode.d1.loss_dice: 0.4875, decode.d2.loss_cls: 0.0529, decode.d2.loss_mask: 0.1803, decode.d2.loss_dice: 0.4848, decode.d3.loss_cls: 0.0475, decode.d3.loss_mask: 0.1800, decode.d3.loss_dice: 0.4836, decode.d4.loss_cls: 0.0505, decode.d4.loss_mask: 0.1795, decode.d4.loss_dice: 0.4787, decode.d5.loss_cls: 0.0458, decode.d5.loss_mask: 0.1798, decode.d5.loss_dice: 0.4820, decode.d6.loss_cls: 0.0452, decode.d6.loss_mask: 0.1797, decode.d6.loss_dice: 0.4788, decode.d7.loss_cls: 0.0473, decode.d7.loss_mask: 0.1798, decode.d7.loss_dice: 0.4792, decode.d8.loss_cls: 0.0438, decode.d8.loss_mask: 0.1796, decode.d8.loss_dice: 0.4804, loss: 7.3734 +2022-05-11 10:23:45,330 - mmseg - INFO - Iter [73800/80000] lr: 1.113e-07, eta: 3:41:48, time: 1.811, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0510, decode.loss_mask: 0.1786, decode.loss_dice: 0.4913, decode.d0.loss_cls: 0.2911, decode.d0.loss_mask: 0.1846, decode.d0.loss_dice: 0.5101, decode.d1.loss_cls: 0.0671, decode.d1.loss_mask: 0.1800, decode.d1.loss_dice: 0.4948, decode.d2.loss_cls: 0.0523, decode.d2.loss_mask: 0.1796, decode.d2.loss_dice: 0.4915, decode.d3.loss_cls: 0.0526, decode.d3.loss_mask: 0.1792, decode.d3.loss_dice: 0.4903, decode.d4.loss_cls: 0.0484, decode.d4.loss_mask: 0.1790, decode.d4.loss_dice: 0.4895, decode.d5.loss_cls: 0.0529, decode.d5.loss_mask: 0.1791, decode.d5.loss_dice: 0.4902, decode.d6.loss_cls: 0.0474, decode.d6.loss_mask: 0.1790, decode.d6.loss_dice: 0.4872, decode.d7.loss_cls: 0.0461, decode.d7.loss_mask: 0.1789, decode.d7.loss_dice: 0.4845, decode.d8.loss_cls: 0.0489, decode.d8.loss_mask: 0.1784, decode.d8.loss_dice: 0.4835, loss: 7.4672 +2022-05-11 10:25:15,793 - mmseg - INFO - Iter [73850/80000] lr: 1.104e-07, eta: 3:39:56, time: 1.809, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0509, decode.loss_mask: 0.1789, decode.loss_dice: 0.4855, decode.d0.loss_cls: 0.3019, decode.d0.loss_mask: 0.1864, decode.d0.loss_dice: 0.5057, decode.d1.loss_cls: 0.0664, decode.d1.loss_mask: 0.1805, decode.d1.loss_dice: 0.4956, decode.d2.loss_cls: 0.0606, decode.d2.loss_mask: 0.1798, decode.d2.loss_dice: 0.4891, decode.d3.loss_cls: 0.0591, decode.d3.loss_mask: 0.1792, decode.d3.loss_dice: 0.4885, decode.d4.loss_cls: 0.0561, decode.d4.loss_mask: 0.1793, decode.d4.loss_dice: 0.4856, decode.d5.loss_cls: 0.0568, decode.d5.loss_mask: 0.1797, decode.d5.loss_dice: 0.4878, decode.d6.loss_cls: 0.0545, decode.d6.loss_mask: 0.1793, decode.d6.loss_dice: 0.4850, decode.d7.loss_cls: 0.0495, decode.d7.loss_mask: 0.1795, decode.d7.loss_dice: 0.4902, decode.d8.loss_cls: 0.0550, decode.d8.loss_mask: 0.1794, decode.d8.loss_dice: 0.4851, loss: 7.5105 +2022-05-11 10:26:50,404 - mmseg - INFO - Iter [73900/80000] lr: 1.095e-07, eta: 3:38:06, time: 1.892, data_time: 0.062, memory: 69063, decode.loss_cls: 0.0477, decode.loss_mask: 0.1782, decode.loss_dice: 0.4844, decode.d0.loss_cls: 0.2816, decode.d0.loss_mask: 0.1854, decode.d0.loss_dice: 0.5044, decode.d1.loss_cls: 0.0685, decode.d1.loss_mask: 0.1796, decode.d1.loss_dice: 0.4885, decode.d2.loss_cls: 0.0522, decode.d2.loss_mask: 0.1790, decode.d2.loss_dice: 0.4853, decode.d3.loss_cls: 0.0564, decode.d3.loss_mask: 0.1787, decode.d3.loss_dice: 0.4820, decode.d4.loss_cls: 0.0526, decode.d4.loss_mask: 0.1786, decode.d4.loss_dice: 0.4807, decode.d5.loss_cls: 0.0561, decode.d5.loss_mask: 0.1788, decode.d5.loss_dice: 0.4828, decode.d6.loss_cls: 0.0525, decode.d6.loss_mask: 0.1785, decode.d6.loss_dice: 0.4820, decode.d7.loss_cls: 0.0533, decode.d7.loss_mask: 0.1786, decode.d7.loss_dice: 0.4814, decode.d8.loss_cls: 0.0490, decode.d8.loss_mask: 0.1782, decode.d8.loss_dice: 0.4763, loss: 7.4115 +2022-05-11 10:28:21,154 - mmseg - INFO - Iter [73950/80000] lr: 1.086e-07, eta: 3:36:15, time: 1.815, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0560, decode.loss_mask: 0.1826, decode.loss_dice: 0.5013, decode.d0.loss_cls: 0.2925, decode.d0.loss_mask: 0.1894, decode.d0.loss_dice: 0.5244, decode.d1.loss_cls: 0.0811, decode.d1.loss_mask: 0.1841, decode.d1.loss_dice: 0.5090, decode.d2.loss_cls: 0.0679, decode.d2.loss_mask: 0.1841, decode.d2.loss_dice: 0.5069, decode.d3.loss_cls: 0.0677, decode.d3.loss_mask: 0.1827, decode.d3.loss_dice: 0.5023, decode.d4.loss_cls: 0.0628, decode.d4.loss_mask: 0.1826, decode.d4.loss_dice: 0.5005, decode.d5.loss_cls: 0.0600, decode.d5.loss_mask: 0.1831, decode.d5.loss_dice: 0.5032, decode.d6.loss_cls: 0.0613, decode.d6.loss_mask: 0.1827, decode.d6.loss_dice: 0.5008, decode.d7.loss_cls: 0.0610, decode.d7.loss_mask: 0.1827, decode.d7.loss_dice: 0.5036, decode.d8.loss_cls: 0.0615, decode.d8.loss_mask: 0.1826, decode.d8.loss_dice: 0.5017, loss: 7.7622 +2022-05-11 10:29:52,040 - mmseg - INFO - Saving checkpoint at 74000 iterations +2022-05-11 10:30:25,524 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 10:30:25,533 - mmseg - INFO - Iter [74000/80000] lr: 1.077e-07, eta: 3:34:32, time: 2.485, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0493, decode.loss_mask: 0.1739, decode.loss_dice: 0.4940, decode.d0.loss_cls: 0.2908, decode.d0.loss_mask: 0.1792, decode.d0.loss_dice: 0.5194, decode.d1.loss_cls: 0.0608, decode.d1.loss_mask: 0.1755, decode.d1.loss_dice: 0.5030, decode.d2.loss_cls: 0.0522, decode.d2.loss_mask: 0.1747, decode.d2.loss_dice: 0.5012, decode.d3.loss_cls: 0.0505, decode.d3.loss_mask: 0.1742, decode.d3.loss_dice: 0.4961, decode.d4.loss_cls: 0.0503, decode.d4.loss_mask: 0.1741, decode.d4.loss_dice: 0.4965, decode.d5.loss_cls: 0.0504, decode.d5.loss_mask: 0.1740, decode.d5.loss_dice: 0.4946, decode.d6.loss_cls: 0.0468, decode.d6.loss_mask: 0.1744, decode.d6.loss_dice: 0.4968, decode.d7.loss_cls: 0.0455, decode.d7.loss_mask: 0.1739, decode.d7.loss_dice: 0.4934, decode.d8.loss_cls: 0.0430, decode.d8.loss_mask: 0.1739, decode.d8.loss_dice: 0.4981, loss: 7.4805 +2022-05-11 10:32:21,203 - mmseg - INFO - per class results: +2022-05-11 10:32:21,214 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.66 | 99.26 | +| sidewalk | 88.9 | 94.22 | +| building | 94.4 | 97.16 | +| wall | 67.06 | 79.56 | +| fence | 74.28 | 80.77 | +| pole | 71.25 | 83.47 | +| traffic light | 76.91 | 87.04 | +| traffic sign | 84.17 | 90.35 | +| vegetation | 93.4 | 96.9 | +| terrain | 68.43 | 77.16 | +| sky | 95.83 | 98.4 | +| person | 86.71 | 93.86 | +| rider | 74.49 | 85.33 | +| car | 96.15 | 98.28 | +| truck | 91.92 | 94.34 | +| bus | 93.61 | 96.41 | +| train | 87.9 | 90.71 | +| motorcycle | 77.06 | 86.8 | +| bicycle | 82.79 | 91.63 | ++---------------+-------+-------+ +2022-05-11 10:32:21,214 - mmseg - INFO - Summary: +2022-05-11 10:32:21,215 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 97.03 | 84.42 | 90.61 | ++-------+-------+-------+ +2022-05-11 10:32:21,218 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 10:32:21,219 - mmseg - INFO - Iter(val) [32] aAcc: 0.9703, mIoU: 0.8442, mAcc: 0.9061, IoU.road: 0.9866, IoU.sidewalk: 0.8890, IoU.building: 0.9440, IoU.wall: 0.6706, IoU.fence: 0.7428, IoU.pole: 0.7125, IoU.traffic light: 0.7691, IoU.traffic sign: 0.8417, IoU.vegetation: 0.9340, IoU.terrain: 0.6843, IoU.sky: 0.9583, IoU.person: 0.8671, IoU.rider: 0.7449, IoU.car: 0.9615, IoU.truck: 0.9192, IoU.bus: 0.9361, IoU.train: 0.8790, IoU.motorcycle: 0.7706, IoU.bicycle: 0.8279, Acc.road: 0.9926, Acc.sidewalk: 0.9422, Acc.building: 0.9716, Acc.wall: 0.7956, Acc.fence: 0.8077, Acc.pole: 0.8347, Acc.traffic light: 0.8704, Acc.traffic sign: 0.9035, Acc.vegetation: 0.9690, Acc.terrain: 0.7716, Acc.sky: 0.9840, Acc.person: 0.9386, Acc.rider: 0.8533, Acc.car: 0.9828, Acc.truck: 0.9434, Acc.bus: 0.9641, Acc.train: 0.9071, Acc.motorcycle: 0.8680, Acc.bicycle: 0.9163 +2022-05-11 10:33:54,455 - mmseg - INFO - Iter [74050/80000] lr: 1.068e-07, eta: 3:33:08, time: 4.181, data_time: 2.381, memory: 69063, decode.loss_cls: 0.0578, decode.loss_mask: 0.1734, decode.loss_dice: 0.4827, decode.d0.loss_cls: 0.2994, decode.d0.loss_mask: 0.1794, decode.d0.loss_dice: 0.5108, decode.d1.loss_cls: 0.0689, decode.d1.loss_mask: 0.1752, decode.d1.loss_dice: 0.4919, decode.d2.loss_cls: 0.0685, decode.d2.loss_mask: 0.1757, decode.d2.loss_dice: 0.4937, decode.d3.loss_cls: 0.0613, decode.d3.loss_mask: 0.1740, decode.d3.loss_dice: 0.4847, decode.d4.loss_cls: 0.0618, decode.d4.loss_mask: 0.1741, decode.d4.loss_dice: 0.4846, decode.d5.loss_cls: 0.0658, decode.d5.loss_mask: 0.1743, decode.d5.loss_dice: 0.4892, decode.d6.loss_cls: 0.0606, decode.d6.loss_mask: 0.1736, decode.d6.loss_dice: 0.4850, decode.d7.loss_cls: 0.0604, decode.d7.loss_mask: 0.1740, decode.d7.loss_dice: 0.4850, decode.d8.loss_cls: 0.0627, decode.d8.loss_mask: 0.1738, decode.d8.loss_dice: 0.4876, loss: 7.5099 +2022-05-11 10:35:25,090 - mmseg - INFO - Iter [74100/80000] lr: 1.059e-07, eta: 3:31:16, time: 1.813, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0544, decode.loss_mask: 0.1782, decode.loss_dice: 0.4929, decode.d0.loss_cls: 0.3029, decode.d0.loss_mask: 0.1838, decode.d0.loss_dice: 0.5159, decode.d1.loss_cls: 0.0694, decode.d1.loss_mask: 0.1794, decode.d1.loss_dice: 0.4958, decode.d2.loss_cls: 0.0650, decode.d2.loss_mask: 0.1790, decode.d2.loss_dice: 0.4991, decode.d3.loss_cls: 0.0567, decode.d3.loss_mask: 0.1792, decode.d3.loss_dice: 0.4957, decode.d4.loss_cls: 0.0597, decode.d4.loss_mask: 0.1788, decode.d4.loss_dice: 0.4940, decode.d5.loss_cls: 0.0559, decode.d5.loss_mask: 0.1790, decode.d5.loss_dice: 0.4945, decode.d6.loss_cls: 0.0588, decode.d6.loss_mask: 0.1785, decode.d6.loss_dice: 0.4938, decode.d7.loss_cls: 0.0544, decode.d7.loss_mask: 0.1786, decode.d7.loss_dice: 0.4951, decode.d8.loss_cls: 0.0519, decode.d8.loss_mask: 0.1785, decode.d8.loss_dice: 0.4963, loss: 7.5952 +2022-05-11 10:36:56,694 - mmseg - INFO - Iter [74150/80000] lr: 1.050e-07, eta: 3:29:25, time: 1.832, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0439, decode.loss_mask: 0.1801, decode.loss_dice: 0.4889, decode.d0.loss_cls: 0.2901, decode.d0.loss_mask: 0.1858, decode.d0.loss_dice: 0.5070, decode.d1.loss_cls: 0.0603, decode.d1.loss_mask: 0.1811, decode.d1.loss_dice: 0.4920, decode.d2.loss_cls: 0.0566, decode.d2.loss_mask: 0.1805, decode.d2.loss_dice: 0.4901, decode.d3.loss_cls: 0.0513, decode.d3.loss_mask: 0.1805, decode.d3.loss_dice: 0.4912, decode.d4.loss_cls: 0.0505, decode.d4.loss_mask: 0.1802, decode.d4.loss_dice: 0.4851, decode.d5.loss_cls: 0.0438, decode.d5.loss_mask: 0.1806, decode.d5.loss_dice: 0.4861, decode.d6.loss_cls: 0.0489, decode.d6.loss_mask: 0.1802, decode.d6.loss_dice: 0.4873, decode.d7.loss_cls: 0.0461, decode.d7.loss_mask: 0.1802, decode.d7.loss_dice: 0.4878, decode.d8.loss_cls: 0.0483, decode.d8.loss_mask: 0.1800, decode.d8.loss_dice: 0.4858, loss: 7.4503 +2022-05-11 10:38:27,042 - mmseg - INFO - Iter [74200/80000] lr: 1.041e-07, eta: 3:27:34, time: 1.807, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0430, decode.loss_mask: 0.1788, decode.loss_dice: 0.4902, decode.d0.loss_cls: 0.2898, decode.d0.loss_mask: 0.1849, decode.d0.loss_dice: 0.5112, decode.d1.loss_cls: 0.0553, decode.d1.loss_mask: 0.1803, decode.d1.loss_dice: 0.5012, decode.d2.loss_cls: 0.0490, decode.d2.loss_mask: 0.1792, decode.d2.loss_dice: 0.4940, decode.d3.loss_cls: 0.0499, decode.d3.loss_mask: 0.1791, decode.d3.loss_dice: 0.4948, decode.d4.loss_cls: 0.0405, decode.d4.loss_mask: 0.1790, decode.d4.loss_dice: 0.4918, decode.d5.loss_cls: 0.0468, decode.d5.loss_mask: 0.1794, decode.d5.loss_dice: 0.4944, decode.d6.loss_cls: 0.0420, decode.d6.loss_mask: 0.1793, decode.d6.loss_dice: 0.4944, decode.d7.loss_cls: 0.0456, decode.d7.loss_mask: 0.1788, decode.d7.loss_dice: 0.4896, decode.d8.loss_cls: 0.0433, decode.d8.loss_mask: 0.1787, decode.d8.loss_dice: 0.4917, loss: 7.4562 +2022-05-11 10:40:01,889 - mmseg - INFO - Iter [74250/80000] lr: 1.032e-07, eta: 3:25:44, time: 1.897, data_time: 0.067, memory: 69063, decode.loss_cls: 0.0454, decode.loss_mask: 0.1770, decode.loss_dice: 0.4883, decode.d0.loss_cls: 0.2999, decode.d0.loss_mask: 0.1835, decode.d0.loss_dice: 0.5107, decode.d1.loss_cls: 0.0604, decode.d1.loss_mask: 0.1774, decode.d1.loss_dice: 0.4958, decode.d2.loss_cls: 0.0528, decode.d2.loss_mask: 0.1776, decode.d2.loss_dice: 0.4925, decode.d3.loss_cls: 0.0520, decode.d3.loss_mask: 0.1772, decode.d3.loss_dice: 0.4910, decode.d4.loss_cls: 0.0507, decode.d4.loss_mask: 0.1770, decode.d4.loss_dice: 0.4879, decode.d5.loss_cls: 0.0481, decode.d5.loss_mask: 0.1769, decode.d5.loss_dice: 0.4905, decode.d6.loss_cls: 0.0469, decode.d6.loss_mask: 0.1767, decode.d6.loss_dice: 0.4898, decode.d7.loss_cls: 0.0448, decode.d7.loss_mask: 0.1767, decode.d7.loss_dice: 0.4891, decode.d8.loss_cls: 0.0458, decode.d8.loss_mask: 0.1771, decode.d8.loss_dice: 0.4877, loss: 7.4474 +2022-05-11 10:41:32,587 - mmseg - INFO - Iter [74300/80000] lr: 1.023e-07, eta: 3:23:53, time: 1.814, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0496, decode.loss_mask: 0.1823, decode.loss_dice: 0.4860, decode.d0.loss_cls: 0.2965, decode.d0.loss_mask: 0.1893, decode.d0.loss_dice: 0.5104, decode.d1.loss_cls: 0.0577, decode.d1.loss_mask: 0.1834, decode.d1.loss_dice: 0.4954, decode.d2.loss_cls: 0.0569, decode.d2.loss_mask: 0.1835, decode.d2.loss_dice: 0.4922, decode.d3.loss_cls: 0.0502, decode.d3.loss_mask: 0.1830, decode.d3.loss_dice: 0.4904, decode.d4.loss_cls: 0.0518, decode.d4.loss_mask: 0.1831, decode.d4.loss_dice: 0.4911, decode.d5.loss_cls: 0.0498, decode.d5.loss_mask: 0.1827, decode.d5.loss_dice: 0.4889, decode.d6.loss_cls: 0.0496, decode.d6.loss_mask: 0.1828, decode.d6.loss_dice: 0.4916, decode.d7.loss_cls: 0.0480, decode.d7.loss_mask: 0.1824, decode.d7.loss_dice: 0.4905, decode.d8.loss_cls: 0.0471, decode.d8.loss_mask: 0.1828, decode.d8.loss_dice: 0.4934, loss: 7.5226 +2022-05-11 10:43:05,084 - mmseg - INFO - Iter [74350/80000] lr: 1.014e-07, eta: 3:22:03, time: 1.850, data_time: 0.020, memory: 69063, decode.loss_cls: 0.0488, decode.loss_mask: 0.1768, decode.loss_dice: 0.4873, decode.d0.loss_cls: 0.2895, decode.d0.loss_mask: 0.1826, decode.d0.loss_dice: 0.5107, decode.d1.loss_cls: 0.0598, decode.d1.loss_mask: 0.1779, decode.d1.loss_dice: 0.4941, decode.d2.loss_cls: 0.0520, decode.d2.loss_mask: 0.1776, decode.d2.loss_dice: 0.4890, decode.d3.loss_cls: 0.0484, decode.d3.loss_mask: 0.1774, decode.d3.loss_dice: 0.4863, decode.d4.loss_cls: 0.0500, decode.d4.loss_mask: 0.1770, decode.d4.loss_dice: 0.4856, decode.d5.loss_cls: 0.0488, decode.d5.loss_mask: 0.1766, decode.d5.loss_dice: 0.4882, decode.d6.loss_cls: 0.0534, decode.d6.loss_mask: 0.1768, decode.d6.loss_dice: 0.4865, decode.d7.loss_cls: 0.0462, decode.d7.loss_mask: 0.1767, decode.d7.loss_dice: 0.4834, decode.d8.loss_cls: 0.0505, decode.d8.loss_mask: 0.1771, decode.d8.loss_dice: 0.4903, loss: 7.4255 +2022-05-11 10:44:36,076 - mmseg - INFO - Iter [74400/80000] lr: 1.005e-07, eta: 3:20:12, time: 1.820, data_time: 0.015, memory: 69063, decode.loss_cls: 0.0482, decode.loss_mask: 0.1816, decode.loss_dice: 0.4877, decode.d0.loss_cls: 0.2799, decode.d0.loss_mask: 0.1885, decode.d0.loss_dice: 0.5117, decode.d1.loss_cls: 0.0608, decode.d1.loss_mask: 0.1829, decode.d1.loss_dice: 0.4950, decode.d2.loss_cls: 0.0593, decode.d2.loss_mask: 0.1822, decode.d2.loss_dice: 0.4873, decode.d3.loss_cls: 0.0572, decode.d3.loss_mask: 0.1822, decode.d3.loss_dice: 0.4888, decode.d4.loss_cls: 0.0486, decode.d4.loss_mask: 0.1815, decode.d4.loss_dice: 0.4882, decode.d5.loss_cls: 0.0502, decode.d5.loss_mask: 0.1817, decode.d5.loss_dice: 0.4874, decode.d6.loss_cls: 0.0469, decode.d6.loss_mask: 0.1813, decode.d6.loss_dice: 0.4847, decode.d7.loss_cls: 0.0499, decode.d7.loss_mask: 0.1810, decode.d7.loss_dice: 0.4868, decode.d8.loss_cls: 0.0499, decode.d8.loss_mask: 0.1813, decode.d8.loss_dice: 0.4881, loss: 7.4808 +2022-05-11 10:46:08,848 - mmseg - INFO - Iter [74450/80000] lr: 9.963e-08, eta: 3:18:22, time: 1.855, data_time: 0.064, memory: 69063, decode.loss_cls: 0.0533, decode.loss_mask: 0.1773, decode.loss_dice: 0.4936, decode.d0.loss_cls: 0.2985, decode.d0.loss_mask: 0.1844, decode.d0.loss_dice: 0.5214, decode.d1.loss_cls: 0.0738, decode.d1.loss_mask: 0.1790, decode.d1.loss_dice: 0.5030, decode.d2.loss_cls: 0.0613, decode.d2.loss_mask: 0.1780, decode.d2.loss_dice: 0.5052, decode.d3.loss_cls: 0.0592, decode.d3.loss_mask: 0.1776, decode.d3.loss_dice: 0.4973, decode.d4.loss_cls: 0.0604, decode.d4.loss_mask: 0.1774, decode.d4.loss_dice: 0.4990, decode.d5.loss_cls: 0.0510, decode.d5.loss_mask: 0.1776, decode.d5.loss_dice: 0.5004, decode.d6.loss_cls: 0.0581, decode.d6.loss_mask: 0.1774, decode.d6.loss_dice: 0.4950, decode.d7.loss_cls: 0.0526, decode.d7.loss_mask: 0.1772, decode.d7.loss_dice: 0.4987, decode.d8.loss_cls: 0.0560, decode.d8.loss_mask: 0.1776, decode.d8.loss_dice: 0.4967, loss: 7.6180 +2022-05-11 10:47:38,899 - mmseg - INFO - Iter [74500/80000] lr: 9.873e-08, eta: 3:16:31, time: 1.801, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0520, decode.loss_mask: 0.1822, decode.loss_dice: 0.4836, decode.d0.loss_cls: 0.3037, decode.d0.loss_mask: 0.1890, decode.d0.loss_dice: 0.5090, decode.d1.loss_cls: 0.0699, decode.d1.loss_mask: 0.1843, decode.d1.loss_dice: 0.4959, decode.d2.loss_cls: 0.0612, decode.d2.loss_mask: 0.1829, decode.d2.loss_dice: 0.4907, decode.d3.loss_cls: 0.0512, decode.d3.loss_mask: 0.1825, decode.d3.loss_dice: 0.4896, decode.d4.loss_cls: 0.0505, decode.d4.loss_mask: 0.1825, decode.d4.loss_dice: 0.4884, decode.d5.loss_cls: 0.0519, decode.d5.loss_mask: 0.1828, decode.d5.loss_dice: 0.4884, decode.d6.loss_cls: 0.0467, decode.d6.loss_mask: 0.1821, decode.d6.loss_dice: 0.4873, decode.d7.loss_cls: 0.0517, decode.d7.loss_mask: 0.1820, decode.d7.loss_dice: 0.4861, decode.d8.loss_cls: 0.0490, decode.d8.loss_mask: 0.1822, decode.d8.loss_dice: 0.4855, loss: 7.5251 +2022-05-11 10:49:08,602 - mmseg - INFO - Iter [74550/80000] lr: 9.783e-08, eta: 3:14:40, time: 1.794, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0445, decode.loss_mask: 0.1795, decode.loss_dice: 0.4805, decode.d0.loss_cls: 0.2929, decode.d0.loss_mask: 0.1855, decode.d0.loss_dice: 0.4979, decode.d1.loss_cls: 0.0659, decode.d1.loss_mask: 0.1805, decode.d1.loss_dice: 0.4831, decode.d2.loss_cls: 0.0586, decode.d2.loss_mask: 0.1800, decode.d2.loss_dice: 0.4810, decode.d3.loss_cls: 0.0484, decode.d3.loss_mask: 0.1798, decode.d3.loss_dice: 0.4829, decode.d4.loss_cls: 0.0484, decode.d4.loss_mask: 0.1796, decode.d4.loss_dice: 0.4812, decode.d5.loss_cls: 0.0466, decode.d5.loss_mask: 0.1795, decode.d5.loss_dice: 0.4838, decode.d6.loss_cls: 0.0468, decode.d6.loss_mask: 0.1796, decode.d6.loss_dice: 0.4817, decode.d7.loss_cls: 0.0472, decode.d7.loss_mask: 0.1799, decode.d7.loss_dice: 0.4790, decode.d8.loss_cls: 0.0464, decode.d8.loss_mask: 0.1794, decode.d8.loss_dice: 0.4824, loss: 7.3826 +2022-05-11 10:50:40,502 - mmseg - INFO - Iter [74600/80000] lr: 9.693e-08, eta: 3:12:50, time: 1.838, data_time: 0.062, memory: 69063, decode.loss_cls: 0.0402, decode.loss_mask: 0.1792, decode.loss_dice: 0.4807, decode.d0.loss_cls: 0.2952, decode.d0.loss_mask: 0.1853, decode.d0.loss_dice: 0.4988, decode.d1.loss_cls: 0.0582, decode.d1.loss_mask: 0.1803, decode.d1.loss_dice: 0.4874, decode.d2.loss_cls: 0.0500, decode.d2.loss_mask: 0.1799, decode.d2.loss_dice: 0.4820, decode.d3.loss_cls: 0.0454, decode.d3.loss_mask: 0.1794, decode.d3.loss_dice: 0.4818, decode.d4.loss_cls: 0.0416, decode.d4.loss_mask: 0.1794, decode.d4.loss_dice: 0.4827, decode.d5.loss_cls: 0.0456, decode.d5.loss_mask: 0.1794, decode.d5.loss_dice: 0.4795, decode.d6.loss_cls: 0.0401, decode.d6.loss_mask: 0.1794, decode.d6.loss_dice: 0.4812, decode.d7.loss_cls: 0.0416, decode.d7.loss_mask: 0.1794, decode.d7.loss_dice: 0.4776, decode.d8.loss_cls: 0.0417, decode.d8.loss_mask: 0.1790, decode.d8.loss_dice: 0.4785, loss: 7.3306 +2022-05-11 10:52:10,115 - mmseg - INFO - Iter [74650/80000] lr: 9.604e-08, eta: 3:10:59, time: 1.793, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0438, decode.loss_mask: 0.1740, decode.loss_dice: 0.4954, decode.d0.loss_cls: 0.2891, decode.d0.loss_mask: 0.1805, decode.d0.loss_dice: 0.5137, decode.d1.loss_cls: 0.0560, decode.d1.loss_mask: 0.1753, decode.d1.loss_dice: 0.5024, decode.d2.loss_cls: 0.0551, decode.d2.loss_mask: 0.1746, decode.d2.loss_dice: 0.4995, decode.d3.loss_cls: 0.0498, decode.d3.loss_mask: 0.1740, decode.d3.loss_dice: 0.4933, decode.d4.loss_cls: 0.0429, decode.d4.loss_mask: 0.1739, decode.d4.loss_dice: 0.4945, decode.d5.loss_cls: 0.0459, decode.d5.loss_mask: 0.1739, decode.d5.loss_dice: 0.4962, decode.d6.loss_cls: 0.0404, decode.d6.loss_mask: 0.1742, decode.d6.loss_dice: 0.4949, decode.d7.loss_cls: 0.0480, decode.d7.loss_mask: 0.1742, decode.d7.loss_dice: 0.4979, decode.d8.loss_cls: 0.0443, decode.d8.loss_mask: 0.1735, decode.d8.loss_dice: 0.4978, loss: 7.4491 +2022-05-11 10:53:39,214 - mmseg - INFO - Iter [74700/80000] lr: 9.514e-08, eta: 3:09:08, time: 1.782, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0460, decode.loss_mask: 0.1791, decode.loss_dice: 0.4911, decode.d0.loss_cls: 0.2869, decode.d0.loss_mask: 0.1855, decode.d0.loss_dice: 0.5145, decode.d1.loss_cls: 0.0648, decode.d1.loss_mask: 0.1803, decode.d1.loss_dice: 0.4997, decode.d2.loss_cls: 0.0565, decode.d2.loss_mask: 0.1798, decode.d2.loss_dice: 0.4982, decode.d3.loss_cls: 0.0469, decode.d3.loss_mask: 0.1796, decode.d3.loss_dice: 0.4930, decode.d4.loss_cls: 0.0474, decode.d4.loss_mask: 0.1792, decode.d4.loss_dice: 0.4934, decode.d5.loss_cls: 0.0509, decode.d5.loss_mask: 0.1796, decode.d5.loss_dice: 0.4912, decode.d6.loss_cls: 0.0442, decode.d6.loss_mask: 0.1792, decode.d6.loss_dice: 0.4900, decode.d7.loss_cls: 0.0476, decode.d7.loss_mask: 0.1793, decode.d7.loss_dice: 0.4906, decode.d8.loss_cls: 0.0495, decode.d8.loss_mask: 0.1790, decode.d8.loss_dice: 0.4901, loss: 7.4934 +2022-05-11 10:55:08,719 - mmseg - INFO - Iter [74750/80000] lr: 9.424e-08, eta: 3:07:18, time: 1.790, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0552, decode.loss_mask: 0.1793, decode.loss_dice: 0.4788, decode.d0.loss_cls: 0.2967, decode.d0.loss_mask: 0.1864, decode.d0.loss_dice: 0.5015, decode.d1.loss_cls: 0.0635, decode.d1.loss_mask: 0.1805, decode.d1.loss_dice: 0.4854, decode.d2.loss_cls: 0.0583, decode.d2.loss_mask: 0.1802, decode.d2.loss_dice: 0.4824, decode.d3.loss_cls: 0.0543, decode.d3.loss_mask: 0.1797, decode.d3.loss_dice: 0.4809, decode.d4.loss_cls: 0.0508, decode.d4.loss_mask: 0.1795, decode.d4.loss_dice: 0.4794, decode.d5.loss_cls: 0.0482, decode.d5.loss_mask: 0.1796, decode.d5.loss_dice: 0.4818, decode.d6.loss_cls: 0.0505, decode.d6.loss_mask: 0.1797, decode.d6.loss_dice: 0.4794, decode.d7.loss_cls: 0.0572, decode.d7.loss_mask: 0.1793, decode.d7.loss_dice: 0.4770, decode.d8.loss_cls: 0.0537, decode.d8.loss_mask: 0.1790, decode.d8.loss_dice: 0.4779, loss: 7.4163 +2022-05-11 10:56:41,226 - mmseg - INFO - Iter [74800/80000] lr: 9.334e-08, eta: 3:05:28, time: 1.848, data_time: 0.064, memory: 69063, decode.loss_cls: 0.0545, decode.loss_mask: 0.1817, decode.loss_dice: 0.4822, decode.d0.loss_cls: 0.3068, decode.d0.loss_mask: 0.1886, decode.d0.loss_dice: 0.5041, decode.d1.loss_cls: 0.0714, decode.d1.loss_mask: 0.1832, decode.d1.loss_dice: 0.4915, decode.d2.loss_cls: 0.0664, decode.d2.loss_mask: 0.1823, decode.d2.loss_dice: 0.4867, decode.d3.loss_cls: 0.0614, decode.d3.loss_mask: 0.1820, decode.d3.loss_dice: 0.4831, decode.d4.loss_cls: 0.0566, decode.d4.loss_mask: 0.1821, decode.d4.loss_dice: 0.4872, decode.d5.loss_cls: 0.0576, decode.d5.loss_mask: 0.1815, decode.d5.loss_dice: 0.4830, decode.d6.loss_cls: 0.0542, decode.d6.loss_mask: 0.1814, decode.d6.loss_dice: 0.4819, decode.d7.loss_cls: 0.0608, decode.d7.loss_mask: 0.1815, decode.d7.loss_dice: 0.4803, decode.d8.loss_cls: 0.0563, decode.d8.loss_mask: 0.1817, decode.d8.loss_dice: 0.4876, loss: 7.5397 +2022-05-11 10:58:11,899 - mmseg - INFO - Iter [74850/80000] lr: 9.245e-08, eta: 3:03:38, time: 1.816, data_time: 0.020, memory: 69063, decode.loss_cls: 0.0487, decode.loss_mask: 0.1759, decode.loss_dice: 0.4850, decode.d0.loss_cls: 0.3007, decode.d0.loss_mask: 0.1816, decode.d0.loss_dice: 0.5032, decode.d1.loss_cls: 0.0673, decode.d1.loss_mask: 0.1767, decode.d1.loss_dice: 0.4911, decode.d2.loss_cls: 0.0546, decode.d2.loss_mask: 0.1767, decode.d2.loss_dice: 0.4884, decode.d3.loss_cls: 0.0618, decode.d3.loss_mask: 0.1758, decode.d3.loss_dice: 0.4815, decode.d4.loss_cls: 0.0558, decode.d4.loss_mask: 0.1759, decode.d4.loss_dice: 0.4861, decode.d5.loss_cls: 0.0518, decode.d5.loss_mask: 0.1763, decode.d5.loss_dice: 0.4892, decode.d6.loss_cls: 0.0482, decode.d6.loss_mask: 0.1755, decode.d6.loss_dice: 0.4852, decode.d7.loss_cls: 0.0517, decode.d7.loss_mask: 0.1758, decode.d7.loss_dice: 0.4860, decode.d8.loss_cls: 0.0517, decode.d8.loss_mask: 0.1756, decode.d8.loss_dice: 0.4834, loss: 7.4372 +2022-05-11 10:59:40,530 - mmseg - INFO - Iter [74900/80000] lr: 9.155e-08, eta: 3:01:48, time: 1.773, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0430, decode.loss_mask: 0.1776, decode.loss_dice: 0.4775, decode.d0.loss_cls: 0.2895, decode.d0.loss_mask: 0.1835, decode.d0.loss_dice: 0.5020, decode.d1.loss_cls: 0.0591, decode.d1.loss_mask: 0.1785, decode.d1.loss_dice: 0.4866, decode.d2.loss_cls: 0.0531, decode.d2.loss_mask: 0.1782, decode.d2.loss_dice: 0.4837, decode.d3.loss_cls: 0.0523, decode.d3.loss_mask: 0.1780, decode.d3.loss_dice: 0.4782, decode.d4.loss_cls: 0.0483, decode.d4.loss_mask: 0.1781, decode.d4.loss_dice: 0.4822, decode.d5.loss_cls: 0.0423, decode.d5.loss_mask: 0.1781, decode.d5.loss_dice: 0.4809, decode.d6.loss_cls: 0.0443, decode.d6.loss_mask: 0.1777, decode.d6.loss_dice: 0.4785, decode.d7.loss_cls: 0.0492, decode.d7.loss_mask: 0.1777, decode.d7.loss_dice: 0.4790, decode.d8.loss_cls: 0.0433, decode.d8.loss_mask: 0.1776, decode.d8.loss_dice: 0.4821, loss: 7.3403 +2022-05-11 11:01:09,410 - mmseg - INFO - Iter [74950/80000] lr: 9.065e-08, eta: 2:59:57, time: 1.778, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0531, decode.loss_mask: 0.1728, decode.loss_dice: 0.4835, decode.d0.loss_cls: 0.2907, decode.d0.loss_mask: 0.1793, decode.d0.loss_dice: 0.5074, decode.d1.loss_cls: 0.0724, decode.d1.loss_mask: 0.1743, decode.d1.loss_dice: 0.4913, decode.d2.loss_cls: 0.0611, decode.d2.loss_mask: 0.1733, decode.d2.loss_dice: 0.4913, decode.d3.loss_cls: 0.0543, decode.d3.loss_mask: 0.1736, decode.d3.loss_dice: 0.4854, decode.d4.loss_cls: 0.0626, decode.d4.loss_mask: 0.1733, decode.d4.loss_dice: 0.4858, decode.d5.loss_cls: 0.0568, decode.d5.loss_mask: 0.1733, decode.d5.loss_dice: 0.4852, decode.d6.loss_cls: 0.0531, decode.d6.loss_mask: 0.1732, decode.d6.loss_dice: 0.4840, decode.d7.loss_cls: 0.0547, decode.d7.loss_mask: 0.1734, decode.d7.loss_dice: 0.4831, decode.d8.loss_cls: 0.0571, decode.d8.loss_mask: 0.1730, decode.d8.loss_dice: 0.4838, loss: 7.4361 +2022-05-11 11:02:42,142 - mmseg - INFO - Saving checkpoint at 75000 iterations +2022-05-11 11:03:16,237 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 11:03:16,245 - mmseg - INFO - Iter [75000/80000] lr: 8.976e-08, eta: 2:58:14, time: 2.534, data_time: 0.064, memory: 69063, decode.loss_cls: 0.0400, decode.loss_mask: 0.1814, decode.loss_dice: 0.4912, decode.d0.loss_cls: 0.2883, decode.d0.loss_mask: 0.1875, decode.d0.loss_dice: 0.5079, decode.d1.loss_cls: 0.0567, decode.d1.loss_mask: 0.1829, decode.d1.loss_dice: 0.4940, decode.d2.loss_cls: 0.0527, decode.d2.loss_mask: 0.1818, decode.d2.loss_dice: 0.4924, decode.d3.loss_cls: 0.0495, decode.d3.loss_mask: 0.1824, decode.d3.loss_dice: 0.4906, decode.d4.loss_cls: 0.0465, decode.d4.loss_mask: 0.1820, decode.d4.loss_dice: 0.4886, decode.d5.loss_cls: 0.0407, decode.d5.loss_mask: 0.1821, decode.d5.loss_dice: 0.4892, decode.d6.loss_cls: 0.0420, decode.d6.loss_mask: 0.1820, decode.d6.loss_dice: 0.4902, decode.d7.loss_cls: 0.0470, decode.d7.loss_mask: 0.1817, decode.d7.loss_dice: 0.4905, decode.d8.loss_cls: 0.0436, decode.d8.loss_mask: 0.1815, decode.d8.loss_dice: 0.4938, loss: 7.4605 +2022-05-11 11:05:11,190 - mmseg - INFO - per class results: +2022-05-11 11:05:11,197 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.63 | 99.25 | +| sidewalk | 88.78 | 94.05 | +| building | 94.37 | 97.2 | +| wall | 67.6 | 79.72 | +| fence | 74.07 | 80.89 | +| pole | 71.14 | 83.18 | +| traffic light | 76.93 | 86.72 | +| traffic sign | 84.01 | 90.07 | +| vegetation | 93.33 | 96.84 | +| terrain | 68.39 | 77.3 | +| sky | 95.76 | 98.52 | +| person | 86.78 | 93.7 | +| rider | 74.56 | 85.39 | +| car | 96.14 | 98.26 | +| truck | 92.19 | 94.65 | +| bus | 93.52 | 96.52 | +| train | 87.92 | 90.58 | +| motorcycle | 77.16 | 87.16 | +| bicycle | 82.74 | 91.29 | ++---------------+-------+-------+ +2022-05-11 11:05:11,198 - mmseg - INFO - Summary: +2022-05-11 11:05:11,198 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 97.01 | 84.42 | 90.59 | ++-------+-------+-------+ +2022-05-11 11:05:11,201 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 11:05:11,201 - mmseg - INFO - Iter(val) [32] aAcc: 0.9701, mIoU: 0.8442, mAcc: 0.9059, IoU.road: 0.9863, IoU.sidewalk: 0.8878, IoU.building: 0.9437, IoU.wall: 0.6760, IoU.fence: 0.7407, IoU.pole: 0.7114, IoU.traffic light: 0.7693, IoU.traffic sign: 0.8401, IoU.vegetation: 0.9333, IoU.terrain: 0.6839, IoU.sky: 0.9576, IoU.person: 0.8678, IoU.rider: 0.7456, IoU.car: 0.9614, IoU.truck: 0.9219, IoU.bus: 0.9352, IoU.train: 0.8792, IoU.motorcycle: 0.7716, IoU.bicycle: 0.8274, Acc.road: 0.9925, Acc.sidewalk: 0.9405, Acc.building: 0.9720, Acc.wall: 0.7972, Acc.fence: 0.8089, Acc.pole: 0.8318, Acc.traffic light: 0.8672, Acc.traffic sign: 0.9007, Acc.vegetation: 0.9684, Acc.terrain: 0.7730, Acc.sky: 0.9852, Acc.person: 0.9370, Acc.rider: 0.8539, Acc.car: 0.9826, Acc.truck: 0.9465, Acc.bus: 0.9652, Acc.train: 0.9058, Acc.motorcycle: 0.8716, Acc.bicycle: 0.9129 +2022-05-11 11:06:41,480 - mmseg - INFO - Iter [75050/80000] lr: 8.886e-08, eta: 2:56:45, time: 4.107, data_time: 2.318, memory: 69063, decode.loss_cls: 0.0424, decode.loss_mask: 0.1795, decode.loss_dice: 0.4811, decode.d0.loss_cls: 0.2934, decode.d0.loss_mask: 0.1860, decode.d0.loss_dice: 0.5003, decode.d1.loss_cls: 0.0620, decode.d1.loss_mask: 0.1816, decode.d1.loss_dice: 0.4884, decode.d2.loss_cls: 0.0479, decode.d2.loss_mask: 0.1805, decode.d2.loss_dice: 0.4860, decode.d3.loss_cls: 0.0506, decode.d3.loss_mask: 0.1805, decode.d3.loss_dice: 0.4828, decode.d4.loss_cls: 0.0425, decode.d4.loss_mask: 0.1804, decode.d4.loss_dice: 0.4813, decode.d5.loss_cls: 0.0465, decode.d5.loss_mask: 0.1802, decode.d5.loss_dice: 0.4794, decode.d6.loss_cls: 0.0446, decode.d6.loss_mask: 0.1799, decode.d6.loss_dice: 0.4807, decode.d7.loss_cls: 0.0438, decode.d7.loss_mask: 0.1800, decode.d7.loss_dice: 0.4802, decode.d8.loss_cls: 0.0426, decode.d8.loss_mask: 0.1797, decode.d8.loss_dice: 0.4820, loss: 7.3670 +2022-05-11 11:08:12,336 - mmseg - INFO - Iter [75100/80000] lr: 8.796e-08, eta: 2:54:55, time: 1.814, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0531, decode.loss_mask: 0.1779, decode.loss_dice: 0.4776, decode.d0.loss_cls: 0.2815, decode.d0.loss_mask: 0.1830, decode.d0.loss_dice: 0.4977, decode.d1.loss_cls: 0.0615, decode.d1.loss_mask: 0.1790, decode.d1.loss_dice: 0.4871, decode.d2.loss_cls: 0.0552, decode.d2.loss_mask: 0.1784, decode.d2.loss_dice: 0.4827, decode.d3.loss_cls: 0.0604, decode.d3.loss_mask: 0.1785, decode.d3.loss_dice: 0.4786, decode.d4.loss_cls: 0.0495, decode.d4.loss_mask: 0.1775, decode.d4.loss_dice: 0.4811, decode.d5.loss_cls: 0.0540, decode.d5.loss_mask: 0.1779, decode.d5.loss_dice: 0.4829, decode.d6.loss_cls: 0.0570, decode.d6.loss_mask: 0.1777, decode.d6.loss_dice: 0.4834, decode.d7.loss_cls: 0.0507, decode.d7.loss_mask: 0.1780, decode.d7.loss_dice: 0.4794, decode.d8.loss_cls: 0.0555, decode.d8.loss_mask: 0.1778, decode.d8.loss_dice: 0.4799, loss: 7.3946 +2022-05-11 11:09:41,299 - mmseg - INFO - Iter [75150/80000] lr: 8.706e-08, eta: 2:53:05, time: 1.783, data_time: 0.020, memory: 69063, decode.loss_cls: 0.0403, decode.loss_mask: 0.1838, decode.loss_dice: 0.4834, decode.d0.loss_cls: 0.2883, decode.d0.loss_mask: 0.1892, decode.d0.loss_dice: 0.5034, decode.d1.loss_cls: 0.0573, decode.d1.loss_mask: 0.1842, decode.d1.loss_dice: 0.4905, decode.d2.loss_cls: 0.0468, decode.d2.loss_mask: 0.1843, decode.d2.loss_dice: 0.4887, decode.d3.loss_cls: 0.0473, decode.d3.loss_mask: 0.1842, decode.d3.loss_dice: 0.4851, decode.d4.loss_cls: 0.0440, decode.d4.loss_mask: 0.1838, decode.d4.loss_dice: 0.4827, decode.d5.loss_cls: 0.0444, decode.d5.loss_mask: 0.1839, decode.d5.loss_dice: 0.4858, decode.d6.loss_cls: 0.0419, decode.d6.loss_mask: 0.1837, decode.d6.loss_dice: 0.4857, decode.d7.loss_cls: 0.0411, decode.d7.loss_mask: 0.1837, decode.d7.loss_dice: 0.4847, decode.d8.loss_cls: 0.0476, decode.d8.loss_mask: 0.1833, decode.d8.loss_dice: 0.4857, loss: 7.4189 +2022-05-11 11:11:13,940 - mmseg - INFO - Iter [75200/80000] lr: 8.617e-08, eta: 2:51:15, time: 1.852, data_time: 0.063, memory: 69063, decode.loss_cls: 0.0553, decode.loss_mask: 0.1747, decode.loss_dice: 0.4845, decode.d0.loss_cls: 0.3022, decode.d0.loss_mask: 0.1799, decode.d0.loss_dice: 0.5065, decode.d1.loss_cls: 0.0701, decode.d1.loss_mask: 0.1759, decode.d1.loss_dice: 0.4926, decode.d2.loss_cls: 0.0623, decode.d2.loss_mask: 0.1753, decode.d2.loss_dice: 0.4937, decode.d3.loss_cls: 0.0565, decode.d3.loss_mask: 0.1751, decode.d3.loss_dice: 0.4846, decode.d4.loss_cls: 0.0551, decode.d4.loss_mask: 0.1748, decode.d4.loss_dice: 0.4846, decode.d5.loss_cls: 0.0573, decode.d5.loss_mask: 0.1750, decode.d5.loss_dice: 0.4896, decode.d6.loss_cls: 0.0537, decode.d6.loss_mask: 0.1749, decode.d6.loss_dice: 0.4879, decode.d7.loss_cls: 0.0549, decode.d7.loss_mask: 0.1748, decode.d7.loss_dice: 0.4856, decode.d8.loss_cls: 0.0559, decode.d8.loss_mask: 0.1751, decode.d8.loss_dice: 0.4869, loss: 7.4753 +2022-05-11 11:12:43,822 - mmseg - INFO - Iter [75250/80000] lr: 8.527e-08, eta: 2:49:25, time: 1.798, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0426, decode.loss_mask: 0.1821, decode.loss_dice: 0.4759, decode.d0.loss_cls: 0.2972, decode.d0.loss_mask: 0.1884, decode.d0.loss_dice: 0.4988, decode.d1.loss_cls: 0.0574, decode.d1.loss_mask: 0.1829, decode.d1.loss_dice: 0.4854, decode.d2.loss_cls: 0.0480, decode.d2.loss_mask: 0.1828, decode.d2.loss_dice: 0.4827, decode.d3.loss_cls: 0.0442, decode.d3.loss_mask: 0.1825, decode.d3.loss_dice: 0.4775, decode.d4.loss_cls: 0.0468, decode.d4.loss_mask: 0.1824, decode.d4.loss_dice: 0.4779, decode.d5.loss_cls: 0.0418, decode.d5.loss_mask: 0.1821, decode.d5.loss_dice: 0.4766, decode.d6.loss_cls: 0.0452, decode.d6.loss_mask: 0.1819, decode.d6.loss_dice: 0.4761, decode.d7.loss_cls: 0.0418, decode.d7.loss_mask: 0.1817, decode.d7.loss_dice: 0.4776, decode.d8.loss_cls: 0.0382, decode.d8.loss_mask: 0.1819, decode.d8.loss_dice: 0.4774, loss: 7.3376 +2022-05-11 11:14:14,894 - mmseg - INFO - Iter [75300/80000] lr: 8.437e-08, eta: 2:47:35, time: 1.822, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0390, decode.loss_mask: 0.1766, decode.loss_dice: 0.4883, decode.d0.loss_cls: 0.2845, decode.d0.loss_mask: 0.1825, decode.d0.loss_dice: 0.5092, decode.d1.loss_cls: 0.0465, decode.d1.loss_mask: 0.1786, decode.d1.loss_dice: 0.4934, decode.d2.loss_cls: 0.0473, decode.d2.loss_mask: 0.1777, decode.d2.loss_dice: 0.4942, decode.d3.loss_cls: 0.0393, decode.d3.loss_mask: 0.1772, decode.d3.loss_dice: 0.4862, decode.d4.loss_cls: 0.0391, decode.d4.loss_mask: 0.1771, decode.d4.loss_dice: 0.4877, decode.d5.loss_cls: 0.0398, decode.d5.loss_mask: 0.1766, decode.d5.loss_dice: 0.4894, decode.d6.loss_cls: 0.0404, decode.d6.loss_mask: 0.1766, decode.d6.loss_dice: 0.4925, decode.d7.loss_cls: 0.0347, decode.d7.loss_mask: 0.1766, decode.d7.loss_dice: 0.4899, decode.d8.loss_cls: 0.0412, decode.d8.loss_mask: 0.1765, decode.d8.loss_dice: 0.4861, loss: 7.3446 +2022-05-11 11:15:48,467 - mmseg - INFO - Iter [75350/80000] lr: 8.347e-08, eta: 2:45:46, time: 1.871, data_time: 0.064, memory: 69063, decode.loss_cls: 0.0510, decode.loss_mask: 0.1766, decode.loss_dice: 0.4897, decode.d0.loss_cls: 0.2926, decode.d0.loss_mask: 0.1833, decode.d0.loss_dice: 0.5115, decode.d1.loss_cls: 0.0733, decode.d1.loss_mask: 0.1780, decode.d1.loss_dice: 0.4998, decode.d2.loss_cls: 0.0620, decode.d2.loss_mask: 0.1777, decode.d2.loss_dice: 0.4976, decode.d3.loss_cls: 0.0600, decode.d3.loss_mask: 0.1776, decode.d3.loss_dice: 0.4936, decode.d4.loss_cls: 0.0528, decode.d4.loss_mask: 0.1776, decode.d4.loss_dice: 0.4948, decode.d5.loss_cls: 0.0507, decode.d5.loss_mask: 0.1775, decode.d5.loss_dice: 0.4905, decode.d6.loss_cls: 0.0528, decode.d6.loss_mask: 0.1772, decode.d6.loss_dice: 0.4903, decode.d7.loss_cls: 0.0528, decode.d7.loss_mask: 0.1774, decode.d7.loss_dice: 0.4899, decode.d8.loss_cls: 0.0536, decode.d8.loss_mask: 0.1769, decode.d8.loss_dice: 0.4878, loss: 7.5266 +2022-05-11 11:17:19,342 - mmseg - INFO - Iter [75400/80000] lr: 8.258e-08, eta: 2:43:56, time: 1.818, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0505, decode.loss_mask: 0.1771, decode.loss_dice: 0.4858, decode.d0.loss_cls: 0.3064, decode.d0.loss_mask: 0.1832, decode.d0.loss_dice: 0.5024, decode.d1.loss_cls: 0.0641, decode.d1.loss_mask: 0.1788, decode.d1.loss_dice: 0.4891, decode.d2.loss_cls: 0.0604, decode.d2.loss_mask: 0.1785, decode.d2.loss_dice: 0.4862, decode.d3.loss_cls: 0.0518, decode.d3.loss_mask: 0.1782, decode.d3.loss_dice: 0.4831, decode.d4.loss_cls: 0.0513, decode.d4.loss_mask: 0.1777, decode.d4.loss_dice: 0.4790, decode.d5.loss_cls: 0.0561, decode.d5.loss_mask: 0.1777, decode.d5.loss_dice: 0.4836, decode.d6.loss_cls: 0.0516, decode.d6.loss_mask: 0.1774, decode.d6.loss_dice: 0.4835, decode.d7.loss_cls: 0.0558, decode.d7.loss_mask: 0.1776, decode.d7.loss_dice: 0.4808, decode.d8.loss_cls: 0.0561, decode.d8.loss_mask: 0.1775, decode.d8.loss_dice: 0.4846, loss: 7.4460 +2022-05-11 11:18:50,057 - mmseg - INFO - Iter [75450/80000] lr: 8.168e-08, eta: 2:42:07, time: 1.814, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0456, decode.loss_mask: 0.1821, decode.loss_dice: 0.4786, decode.d0.loss_cls: 0.2976, decode.d0.loss_mask: 0.1880, decode.d0.loss_dice: 0.4973, decode.d1.loss_cls: 0.0638, decode.d1.loss_mask: 0.1834, decode.d1.loss_dice: 0.4887, decode.d2.loss_cls: 0.0558, decode.d2.loss_mask: 0.1831, decode.d2.loss_dice: 0.4854, decode.d3.loss_cls: 0.0491, decode.d3.loss_mask: 0.1826, decode.d3.loss_dice: 0.4839, decode.d4.loss_cls: 0.0503, decode.d4.loss_mask: 0.1819, decode.d4.loss_dice: 0.4806, decode.d5.loss_cls: 0.0435, decode.d5.loss_mask: 0.1823, decode.d5.loss_dice: 0.4794, decode.d6.loss_cls: 0.0453, decode.d6.loss_mask: 0.1820, decode.d6.loss_dice: 0.4843, decode.d7.loss_cls: 0.0423, decode.d7.loss_mask: 0.1821, decode.d7.loss_dice: 0.4808, decode.d8.loss_cls: 0.0429, decode.d8.loss_mask: 0.1821, decode.d8.loss_dice: 0.4817, loss: 7.4064 +2022-05-11 11:20:20,085 - mmseg - INFO - Iter [75500/80000] lr: 8.078e-08, eta: 2:40:17, time: 1.800, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0504, decode.loss_mask: 0.1764, decode.loss_dice: 0.4725, decode.d0.loss_cls: 0.2897, decode.d0.loss_mask: 0.1820, decode.d0.loss_dice: 0.4879, decode.d1.loss_cls: 0.0599, decode.d1.loss_mask: 0.1776, decode.d1.loss_dice: 0.4811, decode.d2.loss_cls: 0.0562, decode.d2.loss_mask: 0.1770, decode.d2.loss_dice: 0.4748, decode.d3.loss_cls: 0.0526, decode.d3.loss_mask: 0.1773, decode.d3.loss_dice: 0.4786, decode.d4.loss_cls: 0.0501, decode.d4.loss_mask: 0.1770, decode.d4.loss_dice: 0.4753, decode.d5.loss_cls: 0.0495, decode.d5.loss_mask: 0.1766, decode.d5.loss_dice: 0.4746, decode.d6.loss_cls: 0.0479, decode.d6.loss_mask: 0.1761, decode.d6.loss_dice: 0.4738, decode.d7.loss_cls: 0.0496, decode.d7.loss_mask: 0.1764, decode.d7.loss_dice: 0.4734, decode.d8.loss_cls: 0.0514, decode.d8.loss_mask: 0.1763, decode.d8.loss_dice: 0.4749, loss: 7.2969 +2022-05-11 11:21:52,166 - mmseg - INFO - Iter [75550/80000] lr: 7.988e-08, eta: 2:38:28, time: 1.842, data_time: 0.064, memory: 69063, decode.loss_cls: 0.0469, decode.loss_mask: 0.1850, decode.loss_dice: 0.4882, decode.d0.loss_cls: 0.3028, decode.d0.loss_mask: 0.1914, decode.d0.loss_dice: 0.5104, decode.d1.loss_cls: 0.0604, decode.d1.loss_mask: 0.1862, decode.d1.loss_dice: 0.4937, decode.d2.loss_cls: 0.0541, decode.d2.loss_mask: 0.1857, decode.d2.loss_dice: 0.4897, decode.d3.loss_cls: 0.0543, decode.d3.loss_mask: 0.1855, decode.d3.loss_dice: 0.4931, decode.d4.loss_cls: 0.0448, decode.d4.loss_mask: 0.1849, decode.d4.loss_dice: 0.4903, decode.d5.loss_cls: 0.0529, decode.d5.loss_mask: 0.1852, decode.d5.loss_dice: 0.4926, decode.d6.loss_cls: 0.0551, decode.d6.loss_mask: 0.1851, decode.d6.loss_dice: 0.4887, decode.d7.loss_cls: 0.0490, decode.d7.loss_mask: 0.1851, decode.d7.loss_dice: 0.4909, decode.d8.loss_cls: 0.0464, decode.d8.loss_mask: 0.1849, decode.d8.loss_dice: 0.4875, loss: 7.5508 +2022-05-11 11:23:21,253 - mmseg - INFO - Iter [75600/80000] lr: 7.899e-08, eta: 2:36:38, time: 1.782, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0490, decode.loss_mask: 0.1817, decode.loss_dice: 0.4960, decode.d0.loss_cls: 0.2907, decode.d0.loss_mask: 0.1877, decode.d0.loss_dice: 0.5190, decode.d1.loss_cls: 0.0676, decode.d1.loss_mask: 0.1835, decode.d1.loss_dice: 0.5045, decode.d2.loss_cls: 0.0557, decode.d2.loss_mask: 0.1828, decode.d2.loss_dice: 0.5021, decode.d3.loss_cls: 0.0529, decode.d3.loss_mask: 0.1819, decode.d3.loss_dice: 0.4960, decode.d4.loss_cls: 0.0508, decode.d4.loss_mask: 0.1821, decode.d4.loss_dice: 0.4943, decode.d5.loss_cls: 0.0489, decode.d5.loss_mask: 0.1821, decode.d5.loss_dice: 0.4981, decode.d6.loss_cls: 0.0462, decode.d6.loss_mask: 0.1818, decode.d6.loss_dice: 0.5004, decode.d7.loss_cls: 0.0463, decode.d7.loss_mask: 0.1817, decode.d7.loss_dice: 0.4964, decode.d8.loss_cls: 0.0501, decode.d8.loss_mask: 0.1816, decode.d8.loss_dice: 0.4957, loss: 7.5874 +2022-05-11 11:24:52,387 - mmseg - INFO - Iter [75650/80000] lr: 7.809e-08, eta: 2:34:49, time: 1.823, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0499, decode.loss_mask: 0.1818, decode.loss_dice: 0.4813, decode.d0.loss_cls: 0.2978, decode.d0.loss_mask: 0.1869, decode.d0.loss_dice: 0.5012, decode.d1.loss_cls: 0.0638, decode.d1.loss_mask: 0.1831, decode.d1.loss_dice: 0.4914, decode.d2.loss_cls: 0.0530, decode.d2.loss_mask: 0.1826, decode.d2.loss_dice: 0.4842, decode.d3.loss_cls: 0.0505, decode.d3.loss_mask: 0.1817, decode.d3.loss_dice: 0.4831, decode.d4.loss_cls: 0.0495, decode.d4.loss_mask: 0.1820, decode.d4.loss_dice: 0.4854, decode.d5.loss_cls: 0.0505, decode.d5.loss_mask: 0.1820, decode.d5.loss_dice: 0.4808, decode.d6.loss_cls: 0.0445, decode.d6.loss_mask: 0.1815, decode.d6.loss_dice: 0.4848, decode.d7.loss_cls: 0.0448, decode.d7.loss_mask: 0.1816, decode.d7.loss_dice: 0.4791, decode.d8.loss_cls: 0.0484, decode.d8.loss_mask: 0.1819, decode.d8.loss_dice: 0.4786, loss: 7.4278 +2022-05-11 11:26:22,696 - mmseg - INFO - Iter [75700/80000] lr: 7.719e-08, eta: 2:33:00, time: 1.806, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0404, decode.loss_mask: 0.1712, decode.loss_dice: 0.4751, decode.d0.loss_cls: 0.2853, decode.d0.loss_mask: 0.1772, decode.d0.loss_dice: 0.4952, decode.d1.loss_cls: 0.0525, decode.d1.loss_mask: 0.1721, decode.d1.loss_dice: 0.4836, decode.d2.loss_cls: 0.0490, decode.d2.loss_mask: 0.1718, decode.d2.loss_dice: 0.4798, decode.d3.loss_cls: 0.0438, decode.d3.loss_mask: 0.1715, decode.d3.loss_dice: 0.4762, decode.d4.loss_cls: 0.0445, decode.d4.loss_mask: 0.1714, decode.d4.loss_dice: 0.4736, decode.d5.loss_cls: 0.0466, decode.d5.loss_mask: 0.1716, decode.d5.loss_dice: 0.4785, decode.d6.loss_cls: 0.0440, decode.d6.loss_mask: 0.1716, decode.d6.loss_dice: 0.4762, decode.d7.loss_cls: 0.0484, decode.d7.loss_mask: 0.1713, decode.d7.loss_dice: 0.4765, decode.d8.loss_cls: 0.0466, decode.d8.loss_mask: 0.1713, decode.d8.loss_dice: 0.4772, loss: 7.2140 +2022-05-11 11:27:54,404 - mmseg - INFO - Iter [75750/80000] lr: 7.629e-08, eta: 2:31:11, time: 1.832, data_time: 0.066, memory: 69063, decode.loss_cls: 0.0409, decode.loss_mask: 0.1760, decode.loss_dice: 0.4773, decode.d0.loss_cls: 0.2852, decode.d0.loss_mask: 0.1813, decode.d0.loss_dice: 0.4984, decode.d1.loss_cls: 0.0550, decode.d1.loss_mask: 0.1774, decode.d1.loss_dice: 0.4866, decode.d2.loss_cls: 0.0458, decode.d2.loss_mask: 0.1766, decode.d2.loss_dice: 0.4819, decode.d3.loss_cls: 0.0480, decode.d3.loss_mask: 0.1763, decode.d3.loss_dice: 0.4788, decode.d4.loss_cls: 0.0404, decode.d4.loss_mask: 0.1761, decode.d4.loss_dice: 0.4797, decode.d5.loss_cls: 0.0421, decode.d5.loss_mask: 0.1764, decode.d5.loss_dice: 0.4802, decode.d6.loss_cls: 0.0379, decode.d6.loss_mask: 0.1759, decode.d6.loss_dice: 0.4806, decode.d7.loss_cls: 0.0491, decode.d7.loss_mask: 0.1761, decode.d7.loss_dice: 0.4781, decode.d8.loss_cls: 0.0415, decode.d8.loss_mask: 0.1758, decode.d8.loss_dice: 0.4800, loss: 7.2754 +2022-05-11 11:29:24,196 - mmseg - INFO - Iter [75800/80000] lr: 7.540e-08, eta: 2:29:21, time: 1.796, data_time: 0.020, memory: 69063, decode.loss_cls: 0.0504, decode.loss_mask: 0.1781, decode.loss_dice: 0.4731, decode.d0.loss_cls: 0.2869, decode.d0.loss_mask: 0.1837, decode.d0.loss_dice: 0.4902, decode.d1.loss_cls: 0.0599, decode.d1.loss_mask: 0.1796, decode.d1.loss_dice: 0.4838, decode.d2.loss_cls: 0.0551, decode.d2.loss_mask: 0.1791, decode.d2.loss_dice: 0.4791, decode.d3.loss_cls: 0.0480, decode.d3.loss_mask: 0.1782, decode.d3.loss_dice: 0.4755, decode.d4.loss_cls: 0.0511, decode.d4.loss_mask: 0.1781, decode.d4.loss_dice: 0.4736, decode.d5.loss_cls: 0.0485, decode.d5.loss_mask: 0.1781, decode.d5.loss_dice: 0.4747, decode.d6.loss_cls: 0.0439, decode.d6.loss_mask: 0.1775, decode.d6.loss_dice: 0.4737, decode.d7.loss_cls: 0.0498, decode.d7.loss_mask: 0.1778, decode.d7.loss_dice: 0.4747, decode.d8.loss_cls: 0.0469, decode.d8.loss_mask: 0.1779, decode.d8.loss_dice: 0.4742, loss: 7.3011 +2022-05-11 11:30:53,314 - mmseg - INFO - Iter [75850/80000] lr: 7.450e-08, eta: 2:27:32, time: 1.784, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0431, decode.loss_mask: 0.1768, decode.loss_dice: 0.4768, decode.d0.loss_cls: 0.2946, decode.d0.loss_mask: 0.1832, decode.d0.loss_dice: 0.5009, decode.d1.loss_cls: 0.0567, decode.d1.loss_mask: 0.1781, decode.d1.loss_dice: 0.4857, decode.d2.loss_cls: 0.0520, decode.d2.loss_mask: 0.1777, decode.d2.loss_dice: 0.4847, decode.d3.loss_cls: 0.0481, decode.d3.loss_mask: 0.1768, decode.d3.loss_dice: 0.4804, decode.d4.loss_cls: 0.0485, decode.d4.loss_mask: 0.1770, decode.d4.loss_dice: 0.4810, decode.d5.loss_cls: 0.0438, decode.d5.loss_mask: 0.1774, decode.d5.loss_dice: 0.4842, decode.d6.loss_cls: 0.0424, decode.d6.loss_mask: 0.1766, decode.d6.loss_dice: 0.4793, decode.d7.loss_cls: 0.0436, decode.d7.loss_mask: 0.1767, decode.d7.loss_dice: 0.4807, decode.d8.loss_cls: 0.0454, decode.d8.loss_mask: 0.1766, decode.d8.loss_dice: 0.4808, loss: 7.3298 +2022-05-11 11:32:25,156 - mmseg - INFO - Iter [75900/80000] lr: 7.360e-08, eta: 2:25:43, time: 1.836, data_time: 0.061, memory: 69063, decode.loss_cls: 0.0502, decode.loss_mask: 0.1812, decode.loss_dice: 0.4837, decode.d0.loss_cls: 0.2924, decode.d0.loss_mask: 0.1872, decode.d0.loss_dice: 0.5026, decode.d1.loss_cls: 0.0576, decode.d1.loss_mask: 0.1828, decode.d1.loss_dice: 0.4881, decode.d2.loss_cls: 0.0569, decode.d2.loss_mask: 0.1822, decode.d2.loss_dice: 0.4886, decode.d3.loss_cls: 0.0507, decode.d3.loss_mask: 0.1815, decode.d3.loss_dice: 0.4850, decode.d4.loss_cls: 0.0485, decode.d4.loss_mask: 0.1816, decode.d4.loss_dice: 0.4850, decode.d5.loss_cls: 0.0513, decode.d5.loss_mask: 0.1821, decode.d5.loss_dice: 0.4800, decode.d6.loss_cls: 0.0466, decode.d6.loss_mask: 0.1814, decode.d6.loss_dice: 0.4835, decode.d7.loss_cls: 0.0483, decode.d7.loss_mask: 0.1812, decode.d7.loss_dice: 0.4832, decode.d8.loss_cls: 0.0481, decode.d8.loss_mask: 0.1810, decode.d8.loss_dice: 0.4834, loss: 7.4359 +2022-05-11 11:33:55,544 - mmseg - INFO - Iter [75950/80000] lr: 7.271e-08, eta: 2:23:54, time: 1.808, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0453, decode.loss_mask: 0.1825, decode.loss_dice: 0.4801, decode.d0.loss_cls: 0.2953, decode.d0.loss_mask: 0.1895, decode.d0.loss_dice: 0.5020, decode.d1.loss_cls: 0.0616, decode.d1.loss_mask: 0.1848, decode.d1.loss_dice: 0.4887, decode.d2.loss_cls: 0.0577, decode.d2.loss_mask: 0.1837, decode.d2.loss_dice: 0.4848, decode.d3.loss_cls: 0.0505, decode.d3.loss_mask: 0.1830, decode.d3.loss_dice: 0.4834, decode.d4.loss_cls: 0.0500, decode.d4.loss_mask: 0.1828, decode.d4.loss_dice: 0.4816, decode.d5.loss_cls: 0.0531, decode.d5.loss_mask: 0.1835, decode.d5.loss_dice: 0.4818, decode.d6.loss_cls: 0.0431, decode.d6.loss_mask: 0.1826, decode.d6.loss_dice: 0.4814, decode.d7.loss_cls: 0.0506, decode.d7.loss_mask: 0.1824, decode.d7.loss_dice: 0.4801, decode.d8.loss_cls: 0.0434, decode.d8.loss_mask: 0.1825, decode.d8.loss_dice: 0.4802, loss: 7.4322 +2022-05-11 11:35:25,509 - mmseg - INFO - Saving checkpoint at 76000 iterations +2022-05-11 11:35:55,230 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 11:35:55,239 - mmseg - INFO - Iter [76000/80000] lr: 7.181e-08, eta: 2:22:09, time: 2.391, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0474, decode.loss_mask: 0.1781, decode.loss_dice: 0.4832, decode.d0.loss_cls: 0.2927, decode.d0.loss_mask: 0.1841, decode.d0.loss_dice: 0.5038, decode.d1.loss_cls: 0.0570, decode.d1.loss_mask: 0.1801, decode.d1.loss_dice: 0.4900, decode.d2.loss_cls: 0.0519, decode.d2.loss_mask: 0.1788, decode.d2.loss_dice: 0.4874, decode.d3.loss_cls: 0.0522, decode.d3.loss_mask: 0.1786, decode.d3.loss_dice: 0.4840, decode.d4.loss_cls: 0.0489, decode.d4.loss_mask: 0.1784, decode.d4.loss_dice: 0.4820, decode.d5.loss_cls: 0.0452, decode.d5.loss_mask: 0.1791, decode.d5.loss_dice: 0.4841, decode.d6.loss_cls: 0.0447, decode.d6.loss_mask: 0.1792, decode.d6.loss_dice: 0.4822, decode.d7.loss_cls: 0.0461, decode.d7.loss_mask: 0.1785, decode.d7.loss_dice: 0.4842, decode.d8.loss_cls: 0.0457, decode.d8.loss_mask: 0.1781, decode.d8.loss_dice: 0.4836, loss: 7.3891 +2022-05-11 11:37:51,225 - mmseg - INFO - per class results: +2022-05-11 11:37:51,231 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.61 | 99.26 | +| sidewalk | 88.65 | 93.89 | +| building | 94.41 | 97.1 | +| wall | 70.4 | 79.92 | +| fence | 74.56 | 81.79 | +| pole | 71.18 | 83.83 | +| traffic light | 77.01 | 86.82 | +| traffic sign | 84.09 | 90.36 | +| vegetation | 93.37 | 96.87 | +| terrain | 68.48 | 77.89 | +| sky | 95.79 | 98.43 | +| person | 86.77 | 93.69 | +| rider | 74.38 | 85.35 | +| car | 96.16 | 98.27 | +| truck | 82.62 | 94.77 | +| bus | 93.55 | 96.56 | +| train | 87.95 | 90.87 | +| motorcycle | 77.48 | 87.27 | +| bicycle | 82.67 | 91.52 | ++---------------+-------+-------+ +2022-05-11 11:37:51,231 - mmseg - INFO - Summary: +2022-05-11 11:37:51,231 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 97.01 | 84.11 | 90.76 | ++-------+-------+-------+ +2022-05-11 11:37:51,235 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 11:37:51,235 - mmseg - INFO - Iter(val) [32] aAcc: 0.9701, mIoU: 0.8411, mAcc: 0.9076, IoU.road: 0.9861, IoU.sidewalk: 0.8865, IoU.building: 0.9441, IoU.wall: 0.7040, IoU.fence: 0.7456, IoU.pole: 0.7118, IoU.traffic light: 0.7701, IoU.traffic sign: 0.8409, IoU.vegetation: 0.9337, IoU.terrain: 0.6848, IoU.sky: 0.9579, IoU.person: 0.8677, IoU.rider: 0.7438, IoU.car: 0.9616, IoU.truck: 0.8262, IoU.bus: 0.9355, IoU.train: 0.8795, IoU.motorcycle: 0.7748, IoU.bicycle: 0.8267, Acc.road: 0.9926, Acc.sidewalk: 0.9389, Acc.building: 0.9710, Acc.wall: 0.7992, Acc.fence: 0.8179, Acc.pole: 0.8383, Acc.traffic light: 0.8682, Acc.traffic sign: 0.9036, Acc.vegetation: 0.9687, Acc.terrain: 0.7789, Acc.sky: 0.9843, Acc.person: 0.9369, Acc.rider: 0.8535, Acc.car: 0.9827, Acc.truck: 0.9477, Acc.bus: 0.9656, Acc.train: 0.9087, Acc.motorcycle: 0.8727, Acc.bicycle: 0.9152 +2022-05-11 11:39:22,651 - mmseg - INFO - Iter [76050/80000] lr: 7.091e-08, eta: 2:20:37, time: 4.151, data_time: 2.338, memory: 69063, decode.loss_cls: 0.0493, decode.loss_mask: 0.1796, decode.loss_dice: 0.4824, decode.d0.loss_cls: 0.2879, decode.d0.loss_mask: 0.1868, decode.d0.loss_dice: 0.5032, decode.d1.loss_cls: 0.0589, decode.d1.loss_mask: 0.1809, decode.d1.loss_dice: 0.4912, decode.d2.loss_cls: 0.0572, decode.d2.loss_mask: 0.1804, decode.d2.loss_dice: 0.4885, decode.d3.loss_cls: 0.0526, decode.d3.loss_mask: 0.1799, decode.d3.loss_dice: 0.4848, decode.d4.loss_cls: 0.0570, decode.d4.loss_mask: 0.1798, decode.d4.loss_dice: 0.4846, decode.d5.loss_cls: 0.0503, decode.d5.loss_mask: 0.1797, decode.d5.loss_dice: 0.4867, decode.d6.loss_cls: 0.0458, decode.d6.loss_mask: 0.1796, decode.d6.loss_dice: 0.4835, decode.d7.loss_cls: 0.0451, decode.d7.loss_mask: 0.1794, decode.d7.loss_dice: 0.4874, decode.d8.loss_cls: 0.0487, decode.d8.loss_mask: 0.1794, decode.d8.loss_dice: 0.4867, loss: 7.4372 +2022-05-11 11:40:54,762 - mmseg - INFO - Iter [76100/80000] lr: 7.001e-08, eta: 2:18:48, time: 1.842, data_time: 0.064, memory: 69063, decode.loss_cls: 0.0437, decode.loss_mask: 0.1786, decode.loss_dice: 0.4816, decode.d0.loss_cls: 0.3012, decode.d0.loss_mask: 0.1853, decode.d0.loss_dice: 0.4990, decode.d1.loss_cls: 0.0647, decode.d1.loss_mask: 0.1811, decode.d1.loss_dice: 0.4839, decode.d2.loss_cls: 0.0528, decode.d2.loss_mask: 0.1804, decode.d2.loss_dice: 0.4859, decode.d3.loss_cls: 0.0509, decode.d3.loss_mask: 0.1799, decode.d3.loss_dice: 0.4840, decode.d4.loss_cls: 0.0536, decode.d4.loss_mask: 0.1796, decode.d4.loss_dice: 0.4809, decode.d5.loss_cls: 0.0537, decode.d5.loss_mask: 0.1796, decode.d5.loss_dice: 0.4815, decode.d6.loss_cls: 0.0481, decode.d6.loss_mask: 0.1793, decode.d6.loss_dice: 0.4818, decode.d7.loss_cls: 0.0440, decode.d7.loss_mask: 0.1793, decode.d7.loss_dice: 0.4796, decode.d8.loss_cls: 0.0434, decode.d8.loss_mask: 0.1790, decode.d8.loss_dice: 0.4805, loss: 7.3971 +2022-05-11 11:42:26,393 - mmseg - INFO - Iter [76150/80000] lr: 6.912e-08, eta: 2:16:59, time: 1.831, data_time: 0.015, memory: 69063, decode.loss_cls: 0.0515, decode.loss_mask: 0.1789, decode.loss_dice: 0.4740, decode.d0.loss_cls: 0.3002, decode.d0.loss_mask: 0.1865, decode.d0.loss_dice: 0.4945, decode.d1.loss_cls: 0.0583, decode.d1.loss_mask: 0.1806, decode.d1.loss_dice: 0.4822, decode.d2.loss_cls: 0.0517, decode.d2.loss_mask: 0.1795, decode.d2.loss_dice: 0.4774, decode.d3.loss_cls: 0.0560, decode.d3.loss_mask: 0.1789, decode.d3.loss_dice: 0.4763, decode.d4.loss_cls: 0.0492, decode.d4.loss_mask: 0.1785, decode.d4.loss_dice: 0.4765, decode.d5.loss_cls: 0.0544, decode.d5.loss_mask: 0.1794, decode.d5.loss_dice: 0.4778, decode.d6.loss_cls: 0.0522, decode.d6.loss_mask: 0.1782, decode.d6.loss_dice: 0.4765, decode.d7.loss_cls: 0.0493, decode.d7.loss_mask: 0.1788, decode.d7.loss_dice: 0.4755, decode.d8.loss_cls: 0.0438, decode.d8.loss_mask: 0.1787, decode.d8.loss_dice: 0.4740, loss: 7.3493 +2022-05-11 11:43:56,803 - mmseg - INFO - Iter [76200/80000] lr: 6.822e-08, eta: 2:15:10, time: 1.809, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0516, decode.loss_mask: 0.1803, decode.loss_dice: 0.4904, decode.d0.loss_cls: 0.2886, decode.d0.loss_mask: 0.1863, decode.d0.loss_dice: 0.5123, decode.d1.loss_cls: 0.0667, decode.d1.loss_mask: 0.1812, decode.d1.loss_dice: 0.4940, decode.d2.loss_cls: 0.0571, decode.d2.loss_mask: 0.1810, decode.d2.loss_dice: 0.4971, decode.d3.loss_cls: 0.0530, decode.d3.loss_mask: 0.1808, decode.d3.loss_dice: 0.4923, decode.d4.loss_cls: 0.0485, decode.d4.loss_mask: 0.1804, decode.d4.loss_dice: 0.4898, decode.d5.loss_cls: 0.0505, decode.d5.loss_mask: 0.1803, decode.d5.loss_dice: 0.4900, decode.d6.loss_cls: 0.0542, decode.d6.loss_mask: 0.1804, decode.d6.loss_dice: 0.4900, decode.d7.loss_cls: 0.0531, decode.d7.loss_mask: 0.1806, decode.d7.loss_dice: 0.4896, decode.d8.loss_cls: 0.0552, decode.d8.loss_mask: 0.1801, decode.d8.loss_dice: 0.4880, loss: 7.5236 +2022-05-11 11:45:27,890 - mmseg - INFO - Iter [76250/80000] lr: 6.732e-08, eta: 2:13:22, time: 1.822, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0397, decode.loss_mask: 0.1773, decode.loss_dice: 0.4834, decode.d0.loss_cls: 0.2857, decode.d0.loss_mask: 0.1837, decode.d0.loss_dice: 0.5037, decode.d1.loss_cls: 0.0507, decode.d1.loss_mask: 0.1782, decode.d1.loss_dice: 0.4869, decode.d2.loss_cls: 0.0497, decode.d2.loss_mask: 0.1780, decode.d2.loss_dice: 0.4885, decode.d3.loss_cls: 0.0499, decode.d3.loss_mask: 0.1778, decode.d3.loss_dice: 0.4861, decode.d4.loss_cls: 0.0438, decode.d4.loss_mask: 0.1774, decode.d4.loss_dice: 0.4860, decode.d5.loss_cls: 0.0450, decode.d5.loss_mask: 0.1777, decode.d5.loss_dice: 0.4840, decode.d6.loss_cls: 0.0438, decode.d6.loss_mask: 0.1772, decode.d6.loss_dice: 0.4849, decode.d7.loss_cls: 0.0376, decode.d7.loss_mask: 0.1771, decode.d7.loss_dice: 0.4822, decode.d8.loss_cls: 0.0418, decode.d8.loss_mask: 0.1771, decode.d8.loss_dice: 0.4838, loss: 7.3382 +2022-05-11 11:47:01,197 - mmseg - INFO - Iter [76300/80000] lr: 6.642e-08, eta: 2:11:33, time: 1.865, data_time: 0.063, memory: 69063, decode.loss_cls: 0.0464, decode.loss_mask: 0.1817, decode.loss_dice: 0.4891, decode.d0.loss_cls: 0.2973, decode.d0.loss_mask: 0.1880, decode.d0.loss_dice: 0.5174, decode.d1.loss_cls: 0.0608, decode.d1.loss_mask: 0.1828, decode.d1.loss_dice: 0.4990, decode.d2.loss_cls: 0.0573, decode.d2.loss_mask: 0.1822, decode.d2.loss_dice: 0.4934, decode.d3.loss_cls: 0.0526, decode.d3.loss_mask: 0.1814, decode.d3.loss_dice: 0.4871, decode.d4.loss_cls: 0.0491, decode.d4.loss_mask: 0.1820, decode.d4.loss_dice: 0.4886, decode.d5.loss_cls: 0.0543, decode.d5.loss_mask: 0.1817, decode.d5.loss_dice: 0.4941, decode.d6.loss_cls: 0.0496, decode.d6.loss_mask: 0.1814, decode.d6.loss_dice: 0.4900, decode.d7.loss_cls: 0.0491, decode.d7.loss_mask: 0.1815, decode.d7.loss_dice: 0.4923, decode.d8.loss_cls: 0.0486, decode.d8.loss_mask: 0.1814, decode.d8.loss_dice: 0.4906, loss: 7.5309 +2022-05-11 11:48:32,068 - mmseg - INFO - Iter [76350/80000] lr: 6.553e-08, eta: 2:09:44, time: 1.818, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0608, decode.loss_mask: 0.1778, decode.loss_dice: 0.4946, decode.d0.loss_cls: 0.3138, decode.d0.loss_mask: 0.1854, decode.d0.loss_dice: 0.5157, decode.d1.loss_cls: 0.0817, decode.d1.loss_mask: 0.1790, decode.d1.loss_dice: 0.4982, decode.d2.loss_cls: 0.0708, decode.d2.loss_mask: 0.1786, decode.d2.loss_dice: 0.4995, decode.d3.loss_cls: 0.0658, decode.d3.loss_mask: 0.1780, decode.d3.loss_dice: 0.4928, decode.d4.loss_cls: 0.0698, decode.d4.loss_mask: 0.1781, decode.d4.loss_dice: 0.4924, decode.d5.loss_cls: 0.0606, decode.d5.loss_mask: 0.1780, decode.d5.loss_dice: 0.4959, decode.d6.loss_cls: 0.0683, decode.d6.loss_mask: 0.1775, decode.d6.loss_dice: 0.4932, decode.d7.loss_cls: 0.0617, decode.d7.loss_mask: 0.1781, decode.d7.loss_dice: 0.4945, decode.d8.loss_cls: 0.0598, decode.d8.loss_mask: 0.1776, decode.d8.loss_dice: 0.4935, loss: 7.6713 +2022-05-11 11:50:03,045 - mmseg - INFO - Iter [76400/80000] lr: 6.463e-08, eta: 2:07:56, time: 1.819, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0489, decode.loss_mask: 0.1729, decode.loss_dice: 0.4851, decode.d0.loss_cls: 0.3004, decode.d0.loss_mask: 0.1790, decode.d0.loss_dice: 0.5082, decode.d1.loss_cls: 0.0628, decode.d1.loss_mask: 0.1743, decode.d1.loss_dice: 0.4950, decode.d2.loss_cls: 0.0601, decode.d2.loss_mask: 0.1739, decode.d2.loss_dice: 0.4943, decode.d3.loss_cls: 0.0469, decode.d3.loss_mask: 0.1733, decode.d3.loss_dice: 0.4853, decode.d4.loss_cls: 0.0460, decode.d4.loss_mask: 0.1733, decode.d4.loss_dice: 0.4881, decode.d5.loss_cls: 0.0471, decode.d5.loss_mask: 0.1730, decode.d5.loss_dice: 0.4882, decode.d6.loss_cls: 0.0453, decode.d6.loss_mask: 0.1730, decode.d6.loss_dice: 0.4852, decode.d7.loss_cls: 0.0469, decode.d7.loss_mask: 0.1730, decode.d7.loss_dice: 0.4879, decode.d8.loss_cls: 0.0476, decode.d8.loss_mask: 0.1726, decode.d8.loss_dice: 0.4874, loss: 7.3951 +2022-05-11 11:51:34,075 - mmseg - INFO - Iter [76450/80000] lr: 6.373e-08, eta: 2:06:07, time: 1.821, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0493, decode.loss_mask: 0.1784, decode.loss_dice: 0.4886, decode.d0.loss_cls: 0.2909, decode.d0.loss_mask: 0.1846, decode.d0.loss_dice: 0.5075, decode.d1.loss_cls: 0.0739, decode.d1.loss_mask: 0.1793, decode.d1.loss_dice: 0.4911, decode.d2.loss_cls: 0.0548, decode.d2.loss_mask: 0.1792, decode.d2.loss_dice: 0.4934, decode.d3.loss_cls: 0.0505, decode.d3.loss_mask: 0.1788, decode.d3.loss_dice: 0.4890, decode.d4.loss_cls: 0.0502, decode.d4.loss_mask: 0.1790, decode.d4.loss_dice: 0.4853, decode.d5.loss_cls: 0.0549, decode.d5.loss_mask: 0.1784, decode.d5.loss_dice: 0.4850, decode.d6.loss_cls: 0.0492, decode.d6.loss_mask: 0.1785, decode.d6.loss_dice: 0.4883, decode.d7.loss_cls: 0.0496, decode.d7.loss_mask: 0.1779, decode.d7.loss_dice: 0.4911, decode.d8.loss_cls: 0.0489, decode.d8.loss_mask: 0.1779, decode.d8.loss_dice: 0.4832, loss: 7.4667 +2022-05-11 11:53:07,134 - mmseg - INFO - Iter [76500/80000] lr: 6.283e-08, eta: 2:04:19, time: 1.860, data_time: 0.061, memory: 69063, decode.loss_cls: 0.0513, decode.loss_mask: 0.1808, decode.loss_dice: 0.4826, decode.d0.loss_cls: 0.2932, decode.d0.loss_mask: 0.1872, decode.d0.loss_dice: 0.4994, decode.d1.loss_cls: 0.0687, decode.d1.loss_mask: 0.1815, decode.d1.loss_dice: 0.4838, decode.d2.loss_cls: 0.0606, decode.d2.loss_mask: 0.1813, decode.d2.loss_dice: 0.4818, decode.d3.loss_cls: 0.0519, decode.d3.loss_mask: 0.1809, decode.d3.loss_dice: 0.4780, decode.d4.loss_cls: 0.0492, decode.d4.loss_mask: 0.1810, decode.d4.loss_dice: 0.4799, decode.d5.loss_cls: 0.0510, decode.d5.loss_mask: 0.1807, decode.d5.loss_dice: 0.4833, decode.d6.loss_cls: 0.0512, decode.d6.loss_mask: 0.1805, decode.d6.loss_dice: 0.4831, decode.d7.loss_cls: 0.0523, decode.d7.loss_mask: 0.1806, decode.d7.loss_dice: 0.4796, decode.d8.loss_cls: 0.0478, decode.d8.loss_mask: 0.1804, decode.d8.loss_dice: 0.4799, loss: 7.4236 +2022-05-11 11:54:37,347 - mmseg - INFO - Iter [76550/80000] lr: 6.194e-08, eta: 2:02:30, time: 1.805, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0488, decode.loss_mask: 0.1799, decode.loss_dice: 0.4795, decode.d0.loss_cls: 0.2922, decode.d0.loss_mask: 0.1860, decode.d0.loss_dice: 0.5008, decode.d1.loss_cls: 0.0595, decode.d1.loss_mask: 0.1811, decode.d1.loss_dice: 0.4880, decode.d2.loss_cls: 0.0565, decode.d2.loss_mask: 0.1809, decode.d2.loss_dice: 0.4886, decode.d3.loss_cls: 0.0479, decode.d3.loss_mask: 0.1804, decode.d3.loss_dice: 0.4829, decode.d4.loss_cls: 0.0505, decode.d4.loss_mask: 0.1806, decode.d4.loss_dice: 0.4847, decode.d5.loss_cls: 0.0473, decode.d5.loss_mask: 0.1801, decode.d5.loss_dice: 0.4790, decode.d6.loss_cls: 0.0435, decode.d6.loss_mask: 0.1801, decode.d6.loss_dice: 0.4780, decode.d7.loss_cls: 0.0467, decode.d7.loss_mask: 0.1799, decode.d7.loss_dice: 0.4788, decode.d8.loss_cls: 0.0445, decode.d8.loss_mask: 0.1801, decode.d8.loss_dice: 0.4796, loss: 7.3863 +2022-05-11 11:56:09,582 - mmseg - INFO - Iter [76600/80000] lr: 6.104e-08, eta: 2:00:42, time: 1.845, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0410, decode.loss_mask: 0.1798, decode.loss_dice: 0.4830, decode.d0.loss_cls: 0.2879, decode.d0.loss_mask: 0.1864, decode.d0.loss_dice: 0.5056, decode.d1.loss_cls: 0.0591, decode.d1.loss_mask: 0.1817, decode.d1.loss_dice: 0.4924, decode.d2.loss_cls: 0.0558, decode.d2.loss_mask: 0.1809, decode.d2.loss_dice: 0.4911, decode.d3.loss_cls: 0.0527, decode.d3.loss_mask: 0.1807, decode.d3.loss_dice: 0.4873, decode.d4.loss_cls: 0.0490, decode.d4.loss_mask: 0.1807, decode.d4.loss_dice: 0.4875, decode.d5.loss_cls: 0.0456, decode.d5.loss_mask: 0.1802, decode.d5.loss_dice: 0.4896, decode.d6.loss_cls: 0.0486, decode.d6.loss_mask: 0.1803, decode.d6.loss_dice: 0.4876, decode.d7.loss_cls: 0.0438, decode.d7.loss_mask: 0.1803, decode.d7.loss_dice: 0.4855, decode.d8.loss_cls: 0.0465, decode.d8.loss_mask: 0.1802, decode.d8.loss_dice: 0.4847, loss: 7.4353 +2022-05-11 11:57:42,431 - mmseg - INFO - Iter [76650/80000] lr: 6.014e-08, eta: 1:58:54, time: 1.856, data_time: 0.065, memory: 69063, decode.loss_cls: 0.0495, decode.loss_mask: 0.1763, decode.loss_dice: 0.4814, decode.d0.loss_cls: 0.2885, decode.d0.loss_mask: 0.1823, decode.d0.loss_dice: 0.5074, decode.d1.loss_cls: 0.0681, decode.d1.loss_mask: 0.1774, decode.d1.loss_dice: 0.4915, decode.d2.loss_cls: 0.0529, decode.d2.loss_mask: 0.1769, decode.d2.loss_dice: 0.4890, decode.d3.loss_cls: 0.0489, decode.d3.loss_mask: 0.1764, decode.d3.loss_dice: 0.4860, decode.d4.loss_cls: 0.0462, decode.d4.loss_mask: 0.1764, decode.d4.loss_dice: 0.4802, decode.d5.loss_cls: 0.0487, decode.d5.loss_mask: 0.1768, decode.d5.loss_dice: 0.4819, decode.d6.loss_cls: 0.0451, decode.d6.loss_mask: 0.1764, decode.d6.loss_dice: 0.4809, decode.d7.loss_cls: 0.0521, decode.d7.loss_mask: 0.1760, decode.d7.loss_dice: 0.4809, decode.d8.loss_cls: 0.0507, decode.d8.loss_mask: 0.1762, decode.d8.loss_dice: 0.4829, loss: 7.3836 +2022-05-11 11:59:12,816 - mmseg - INFO - Iter [76700/80000] lr: 5.924e-08, eta: 1:57:06, time: 1.808, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0372, decode.loss_mask: 0.1740, decode.loss_dice: 0.4705, decode.d0.loss_cls: 0.2901, decode.d0.loss_mask: 0.1798, decode.d0.loss_dice: 0.4942, decode.d1.loss_cls: 0.0524, decode.d1.loss_mask: 0.1748, decode.d1.loss_dice: 0.4743, decode.d2.loss_cls: 0.0470, decode.d2.loss_mask: 0.1743, decode.d2.loss_dice: 0.4753, decode.d3.loss_cls: 0.0454, decode.d3.loss_mask: 0.1741, decode.d3.loss_dice: 0.4709, decode.d4.loss_cls: 0.0378, decode.d4.loss_mask: 0.1742, decode.d4.loss_dice: 0.4732, decode.d5.loss_cls: 0.0385, decode.d5.loss_mask: 0.1742, decode.d5.loss_dice: 0.4737, decode.d6.loss_cls: 0.0378, decode.d6.loss_mask: 0.1743, decode.d6.loss_dice: 0.4730, decode.d7.loss_cls: 0.0367, decode.d7.loss_mask: 0.1739, decode.d7.loss_dice: 0.4718, decode.d8.loss_cls: 0.0378, decode.d8.loss_mask: 0.1740, decode.d8.loss_dice: 0.4734, loss: 7.1588 +2022-05-11 12:00:45,524 - mmseg - INFO - Iter [76750/80000] lr: 5.835e-08, eta: 1:55:18, time: 1.855, data_time: 0.015, memory: 69063, decode.loss_cls: 0.0513, decode.loss_mask: 0.1844, decode.loss_dice: 0.4955, decode.d0.loss_cls: 0.2920, decode.d0.loss_mask: 0.1918, decode.d0.loss_dice: 0.5177, decode.d1.loss_cls: 0.0675, decode.d1.loss_mask: 0.1852, decode.d1.loss_dice: 0.4998, decode.d2.loss_cls: 0.0587, decode.d2.loss_mask: 0.1837, decode.d2.loss_dice: 0.4954, decode.d3.loss_cls: 0.0503, decode.d3.loss_mask: 0.1835, decode.d3.loss_dice: 0.4972, decode.d4.loss_cls: 0.0494, decode.d4.loss_mask: 0.1838, decode.d4.loss_dice: 0.4912, decode.d5.loss_cls: 0.0480, decode.d5.loss_mask: 0.1840, decode.d5.loss_dice: 0.4932, decode.d6.loss_cls: 0.0501, decode.d6.loss_mask: 0.1840, decode.d6.loss_dice: 0.4913, decode.d7.loss_cls: 0.0518, decode.d7.loss_mask: 0.1841, decode.d7.loss_dice: 0.4932, decode.d8.loss_cls: 0.0479, decode.d8.loss_mask: 0.1840, decode.d8.loss_dice: 0.4918, loss: 7.5819 +2022-05-11 12:02:18,294 - mmseg - INFO - Iter [76800/80000] lr: 5.745e-08, eta: 1:53:30, time: 1.854, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0479, decode.loss_mask: 0.1745, decode.loss_dice: 0.4786, decode.d0.loss_cls: 0.2973, decode.d0.loss_mask: 0.1811, decode.d0.loss_dice: 0.5013, decode.d1.loss_cls: 0.0695, decode.d1.loss_mask: 0.1757, decode.d1.loss_dice: 0.4849, decode.d2.loss_cls: 0.0636, decode.d2.loss_mask: 0.1756, decode.d2.loss_dice: 0.4845, decode.d3.loss_cls: 0.0515, decode.d3.loss_mask: 0.1754, decode.d3.loss_dice: 0.4805, decode.d4.loss_cls: 0.0589, decode.d4.loss_mask: 0.1753, decode.d4.loss_dice: 0.4796, decode.d5.loss_cls: 0.0598, decode.d5.loss_mask: 0.1749, decode.d5.loss_dice: 0.4798, decode.d6.loss_cls: 0.0538, decode.d6.loss_mask: 0.1749, decode.d6.loss_dice: 0.4812, decode.d7.loss_cls: 0.0567, decode.d7.loss_mask: 0.1746, decode.d7.loss_dice: 0.4770, decode.d8.loss_cls: 0.0512, decode.d8.loss_mask: 0.1748, decode.d8.loss_dice: 0.4813, loss: 7.3958 +2022-05-11 12:03:50,112 - mmseg - INFO - Iter [76850/80000] lr: 5.655e-08, eta: 1:51:42, time: 1.838, data_time: 0.067, memory: 69063, decode.loss_cls: 0.0450, decode.loss_mask: 0.1738, decode.loss_dice: 0.4792, decode.d0.loss_cls: 0.2947, decode.d0.loss_mask: 0.1797, decode.d0.loss_dice: 0.4944, decode.d1.loss_cls: 0.0572, decode.d1.loss_mask: 0.1744, decode.d1.loss_dice: 0.4813, decode.d2.loss_cls: 0.0540, decode.d2.loss_mask: 0.1744, decode.d2.loss_dice: 0.4841, decode.d3.loss_cls: 0.0467, decode.d3.loss_mask: 0.1735, decode.d3.loss_dice: 0.4785, decode.d4.loss_cls: 0.0474, decode.d4.loss_mask: 0.1736, decode.d4.loss_dice: 0.4785, decode.d5.loss_cls: 0.0452, decode.d5.loss_mask: 0.1737, decode.d5.loss_dice: 0.4781, decode.d6.loss_cls: 0.0493, decode.d6.loss_mask: 0.1736, decode.d6.loss_dice: 0.4787, decode.d7.loss_cls: 0.0446, decode.d7.loss_mask: 0.1735, decode.d7.loss_dice: 0.4768, decode.d8.loss_cls: 0.0455, decode.d8.loss_mask: 0.1736, decode.d8.loss_dice: 0.4777, loss: 7.2809 +2022-05-11 12:05:20,594 - mmseg - INFO - Iter [76900/80000] lr: 5.566e-08, eta: 1:49:54, time: 1.810, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0458, decode.loss_mask: 0.1769, decode.loss_dice: 0.4816, decode.d0.loss_cls: 0.2860, decode.d0.loss_mask: 0.1832, decode.d0.loss_dice: 0.5002, decode.d1.loss_cls: 0.0530, decode.d1.loss_mask: 0.1783, decode.d1.loss_dice: 0.4892, decode.d2.loss_cls: 0.0556, decode.d2.loss_mask: 0.1776, decode.d2.loss_dice: 0.4856, decode.d3.loss_cls: 0.0489, decode.d3.loss_mask: 0.1769, decode.d3.loss_dice: 0.4833, decode.d4.loss_cls: 0.0515, decode.d4.loss_mask: 0.1767, decode.d4.loss_dice: 0.4829, decode.d5.loss_cls: 0.0491, decode.d5.loss_mask: 0.1768, decode.d5.loss_dice: 0.4822, decode.d6.loss_cls: 0.0495, decode.d6.loss_mask: 0.1768, decode.d6.loss_dice: 0.4823, decode.d7.loss_cls: 0.0501, decode.d7.loss_mask: 0.1766, decode.d7.loss_dice: 0.4844, decode.d8.loss_cls: 0.0469, decode.d8.loss_mask: 0.1770, decode.d8.loss_dice: 0.4802, loss: 7.3651 +2022-05-11 12:06:52,167 - mmseg - INFO - Iter [76950/80000] lr: 5.476e-08, eta: 1:48:06, time: 1.832, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0460, decode.loss_mask: 0.1740, decode.loss_dice: 0.4772, decode.d0.loss_cls: 0.2993, decode.d0.loss_mask: 0.1804, decode.d0.loss_dice: 0.5013, decode.d1.loss_cls: 0.0574, decode.d1.loss_mask: 0.1758, decode.d1.loss_dice: 0.4875, decode.d2.loss_cls: 0.0513, decode.d2.loss_mask: 0.1753, decode.d2.loss_dice: 0.4829, decode.d3.loss_cls: 0.0519, decode.d3.loss_mask: 0.1747, decode.d3.loss_dice: 0.4770, decode.d4.loss_cls: 0.0478, decode.d4.loss_mask: 0.1745, decode.d4.loss_dice: 0.4765, decode.d5.loss_cls: 0.0490, decode.d5.loss_mask: 0.1745, decode.d5.loss_dice: 0.4790, decode.d6.loss_cls: 0.0427, decode.d6.loss_mask: 0.1742, decode.d6.loss_dice: 0.4769, decode.d7.loss_cls: 0.0471, decode.d7.loss_mask: 0.1742, decode.d7.loss_dice: 0.4758, decode.d8.loss_cls: 0.0456, decode.d8.loss_mask: 0.1742, decode.d8.loss_dice: 0.4745, loss: 7.2985 +2022-05-11 12:08:23,075 - mmseg - INFO - Saving checkpoint at 77000 iterations +2022-05-11 12:08:56,847 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 12:08:56,854 - mmseg - INFO - Iter [77000/80000] lr: 5.386e-08, eta: 1:46:21, time: 2.492, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0448, decode.loss_mask: 0.1822, decode.loss_dice: 0.4848, decode.d0.loss_cls: 0.2815, decode.d0.loss_mask: 0.1898, decode.d0.loss_dice: 0.5098, decode.d1.loss_cls: 0.0653, decode.d1.loss_mask: 0.1843, decode.d1.loss_dice: 0.4879, decode.d2.loss_cls: 0.0537, decode.d2.loss_mask: 0.1832, decode.d2.loss_dice: 0.4894, decode.d3.loss_cls: 0.0481, decode.d3.loss_mask: 0.1828, decode.d3.loss_dice: 0.4854, decode.d4.loss_cls: 0.0526, decode.d4.loss_mask: 0.1828, decode.d4.loss_dice: 0.4853, decode.d5.loss_cls: 0.0470, decode.d5.loss_mask: 0.1826, decode.d5.loss_dice: 0.4898, decode.d6.loss_cls: 0.0530, decode.d6.loss_mask: 0.1824, decode.d6.loss_dice: 0.4822, decode.d7.loss_cls: 0.0504, decode.d7.loss_mask: 0.1823, decode.d7.loss_dice: 0.4842, decode.d8.loss_cls: 0.0492, decode.d8.loss_mask: 0.1823, decode.d8.loss_dice: 0.4837, loss: 7.4629 +2022-05-11 12:10:52,320 - mmseg - INFO - per class results: +2022-05-11 12:10:52,326 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.62 | 99.25 | +| sidewalk | 88.69 | 94.05 | +| building | 94.38 | 97.03 | +| wall | 67.03 | 80.1 | +| fence | 74.29 | 80.98 | +| pole | 71.15 | 83.73 | +| traffic light | 77.1 | 87.52 | +| traffic sign | 84.01 | 90.52 | +| vegetation | 93.37 | 96.95 | +| terrain | 68.35 | 77.08 | +| sky | 95.77 | 98.46 | +| person | 86.69 | 93.85 | +| rider | 74.06 | 84.7 | +| car | 96.17 | 98.24 | +| truck | 91.97 | 94.48 | +| bus | 93.54 | 96.49 | +| train | 87.89 | 90.7 | +| motorcycle | 77.8 | 87.29 | +| bicycle | 82.67 | 91.82 | ++---------------+-------+-------+ +2022-05-11 12:10:52,326 - mmseg - INFO - Summary: +2022-05-11 12:10:52,326 - mmseg - INFO - ++-------+------+------+ +| aAcc | mIoU | mAcc | ++-------+------+------+ +| 97.01 | 84.4 | 90.7 | ++-------+------+------+ +2022-05-11 12:10:52,329 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 12:10:52,329 - mmseg - INFO - Iter(val) [32] aAcc: 0.9701, mIoU: 0.8440, mAcc: 0.9070, IoU.road: 0.9862, IoU.sidewalk: 0.8869, IoU.building: 0.9438, IoU.wall: 0.6703, IoU.fence: 0.7429, IoU.pole: 0.7115, IoU.traffic light: 0.7710, IoU.traffic sign: 0.8401, IoU.vegetation: 0.9337, IoU.terrain: 0.6835, IoU.sky: 0.9577, IoU.person: 0.8669, IoU.rider: 0.7406, IoU.car: 0.9617, IoU.truck: 0.9197, IoU.bus: 0.9354, IoU.train: 0.8789, IoU.motorcycle: 0.7780, IoU.bicycle: 0.8267, Acc.road: 0.9925, Acc.sidewalk: 0.9405, Acc.building: 0.9703, Acc.wall: 0.8010, Acc.fence: 0.8098, Acc.pole: 0.8373, Acc.traffic light: 0.8752, Acc.traffic sign: 0.9052, Acc.vegetation: 0.9695, Acc.terrain: 0.7708, Acc.sky: 0.9846, Acc.person: 0.9385, Acc.rider: 0.8470, Acc.car: 0.9824, Acc.truck: 0.9448, Acc.bus: 0.9649, Acc.train: 0.9070, Acc.motorcycle: 0.8729, Acc.bicycle: 0.9182 +2022-05-11 12:12:27,087 - mmseg - INFO - Iter [77050/80000] lr: 5.296e-08, eta: 1:44:45, time: 4.207, data_time: 2.372, memory: 69063, decode.loss_cls: 0.0541, decode.loss_mask: 0.1823, decode.loss_dice: 0.4834, decode.d0.loss_cls: 0.2962, decode.d0.loss_mask: 0.1904, decode.d0.loss_dice: 0.5094, decode.d1.loss_cls: 0.0780, decode.d1.loss_mask: 0.1846, decode.d1.loss_dice: 0.5009, decode.d2.loss_cls: 0.0645, decode.d2.loss_mask: 0.1839, decode.d2.loss_dice: 0.4938, decode.d3.loss_cls: 0.0601, decode.d3.loss_mask: 0.1834, decode.d3.loss_dice: 0.4946, decode.d4.loss_cls: 0.0585, decode.d4.loss_mask: 0.1831, decode.d4.loss_dice: 0.4877, decode.d5.loss_cls: 0.0647, decode.d5.loss_mask: 0.1826, decode.d5.loss_dice: 0.4924, decode.d6.loss_cls: 0.0568, decode.d6.loss_mask: 0.1829, decode.d6.loss_dice: 0.4914, decode.d7.loss_cls: 0.0556, decode.d7.loss_mask: 0.1828, decode.d7.loss_dice: 0.4907, decode.d8.loss_cls: 0.0564, decode.d8.loss_mask: 0.1826, decode.d8.loss_dice: 0.4885, loss: 7.6163 +2022-05-11 12:13:57,791 - mmseg - INFO - Iter [77100/80000] lr: 5.207e-08, eta: 1:42:57, time: 1.814, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0525, decode.loss_mask: 0.1786, decode.loss_dice: 0.4896, decode.d0.loss_cls: 0.2988, decode.d0.loss_mask: 0.1855, decode.d0.loss_dice: 0.5104, decode.d1.loss_cls: 0.0704, decode.d1.loss_mask: 0.1803, decode.d1.loss_dice: 0.4983, decode.d2.loss_cls: 0.0616, decode.d2.loss_mask: 0.1801, decode.d2.loss_dice: 0.4975, decode.d3.loss_cls: 0.0594, decode.d3.loss_mask: 0.1792, decode.d3.loss_dice: 0.4894, decode.d4.loss_cls: 0.0537, decode.d4.loss_mask: 0.1789, decode.d4.loss_dice: 0.4911, decode.d5.loss_cls: 0.0494, decode.d5.loss_mask: 0.1788, decode.d5.loss_dice: 0.4901, decode.d6.loss_cls: 0.0467, decode.d6.loss_mask: 0.1786, decode.d6.loss_dice: 0.4865, decode.d7.loss_cls: 0.0528, decode.d7.loss_mask: 0.1784, decode.d7.loss_dice: 0.4880, decode.d8.loss_cls: 0.0510, decode.d8.loss_mask: 0.1786, decode.d8.loss_dice: 0.4871, loss: 7.5212 +2022-05-11 12:15:27,979 - mmseg - INFO - Iter [77150/80000] lr: 5.117e-08, eta: 1:41:09, time: 1.804, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0537, decode.loss_mask: 0.1744, decode.loss_dice: 0.4820, decode.d0.loss_cls: 0.3028, decode.d0.loss_mask: 0.1801, decode.d0.loss_dice: 0.5073, decode.d1.loss_cls: 0.0713, decode.d1.loss_mask: 0.1758, decode.d1.loss_dice: 0.4944, decode.d2.loss_cls: 0.0636, decode.d2.loss_mask: 0.1752, decode.d2.loss_dice: 0.4909, decode.d3.loss_cls: 0.0548, decode.d3.loss_mask: 0.1749, decode.d3.loss_dice: 0.4867, decode.d4.loss_cls: 0.0580, decode.d4.loss_mask: 0.1745, decode.d4.loss_dice: 0.4833, decode.d5.loss_cls: 0.0521, decode.d5.loss_mask: 0.1749, decode.d5.loss_dice: 0.4882, decode.d6.loss_cls: 0.0539, decode.d6.loss_mask: 0.1746, decode.d6.loss_dice: 0.4858, decode.d7.loss_cls: 0.0557, decode.d7.loss_mask: 0.1748, decode.d7.loss_dice: 0.4849, decode.d8.loss_cls: 0.0564, decode.d8.loss_mask: 0.1745, decode.d8.loss_dice: 0.4863, loss: 7.4658 +2022-05-11 12:16:58,011 - mmseg - INFO - Iter [77200/80000] lr: 5.027e-08, eta: 1:39:21, time: 1.801, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0453, decode.loss_mask: 0.1777, decode.loss_dice: 0.4882, decode.d0.loss_cls: 0.3030, decode.d0.loss_mask: 0.1833, decode.d0.loss_dice: 0.5113, decode.d1.loss_cls: 0.0666, decode.d1.loss_mask: 0.1788, decode.d1.loss_dice: 0.4963, decode.d2.loss_cls: 0.0607, decode.d2.loss_mask: 0.1783, decode.d2.loss_dice: 0.4927, decode.d3.loss_cls: 0.0562, decode.d3.loss_mask: 0.1778, decode.d3.loss_dice: 0.4932, decode.d4.loss_cls: 0.0525, decode.d4.loss_mask: 0.1782, decode.d4.loss_dice: 0.4921, decode.d5.loss_cls: 0.0507, decode.d5.loss_mask: 0.1776, decode.d5.loss_dice: 0.4915, decode.d6.loss_cls: 0.0487, decode.d6.loss_mask: 0.1778, decode.d6.loss_dice: 0.4919, decode.d7.loss_cls: 0.0475, decode.d7.loss_mask: 0.1776, decode.d7.loss_dice: 0.4871, decode.d8.loss_cls: 0.0489, decode.d8.loss_mask: 0.1779, decode.d8.loss_dice: 0.4906, loss: 7.5001 +2022-05-11 12:18:30,999 - mmseg - INFO - Iter [77250/80000] lr: 4.937e-08, eta: 1:37:33, time: 1.860, data_time: 0.061, memory: 69063, decode.loss_cls: 0.0446, decode.loss_mask: 0.1784, decode.loss_dice: 0.4747, decode.d0.loss_cls: 0.2913, decode.d0.loss_mask: 0.1847, decode.d0.loss_dice: 0.4952, decode.d1.loss_cls: 0.0585, decode.d1.loss_mask: 0.1801, decode.d1.loss_dice: 0.4840, decode.d2.loss_cls: 0.0569, decode.d2.loss_mask: 0.1795, decode.d2.loss_dice: 0.4805, decode.d3.loss_cls: 0.0476, decode.d3.loss_mask: 0.1788, decode.d3.loss_dice: 0.4750, decode.d4.loss_cls: 0.0406, decode.d4.loss_mask: 0.1786, decode.d4.loss_dice: 0.4750, decode.d5.loss_cls: 0.0432, decode.d5.loss_mask: 0.1787, decode.d5.loss_dice: 0.4761, decode.d6.loss_cls: 0.0491, decode.d6.loss_mask: 0.1787, decode.d6.loss_dice: 0.4741, decode.d7.loss_cls: 0.0455, decode.d7.loss_mask: 0.1785, decode.d7.loss_dice: 0.4771, decode.d8.loss_cls: 0.0409, decode.d8.loss_mask: 0.1787, decode.d8.loss_dice: 0.4773, loss: 7.3015 +2022-05-11 12:20:05,507 - mmseg - INFO - Iter [77300/80000] lr: 4.848e-08, eta: 1:35:46, time: 1.890, data_time: 0.015, memory: 69063, decode.loss_cls: 0.0411, decode.loss_mask: 0.1786, decode.loss_dice: 0.4767, decode.d0.loss_cls: 0.2960, decode.d0.loss_mask: 0.1851, decode.d0.loss_dice: 0.4967, decode.d1.loss_cls: 0.0591, decode.d1.loss_mask: 0.1804, decode.d1.loss_dice: 0.4851, decode.d2.loss_cls: 0.0506, decode.d2.loss_mask: 0.1795, decode.d2.loss_dice: 0.4809, decode.d3.loss_cls: 0.0446, decode.d3.loss_mask: 0.1794, decode.d3.loss_dice: 0.4802, decode.d4.loss_cls: 0.0433, decode.d4.loss_mask: 0.1795, decode.d4.loss_dice: 0.4818, decode.d5.loss_cls: 0.0419, decode.d5.loss_mask: 0.1788, decode.d5.loss_dice: 0.4810, decode.d6.loss_cls: 0.0401, decode.d6.loss_mask: 0.1789, decode.d6.loss_dice: 0.4798, decode.d7.loss_cls: 0.0379, decode.d7.loss_mask: 0.1787, decode.d7.loss_dice: 0.4754, decode.d8.loss_cls: 0.0449, decode.d8.loss_mask: 0.1789, decode.d8.loss_dice: 0.4803, loss: 7.3154 +2022-05-11 12:21:36,399 - mmseg - INFO - Iter [77350/80000] lr: 4.758e-08, eta: 1:33:58, time: 1.818, data_time: 0.015, memory: 69063, decode.loss_cls: 0.0480, decode.loss_mask: 0.1767, decode.loss_dice: 0.4770, decode.d0.loss_cls: 0.3017, decode.d0.loss_mask: 0.1834, decode.d0.loss_dice: 0.4962, decode.d1.loss_cls: 0.0600, decode.d1.loss_mask: 0.1781, decode.d1.loss_dice: 0.4833, decode.d2.loss_cls: 0.0570, decode.d2.loss_mask: 0.1775, decode.d2.loss_dice: 0.4810, decode.d3.loss_cls: 0.0503, decode.d3.loss_mask: 0.1774, decode.d3.loss_dice: 0.4790, decode.d4.loss_cls: 0.0531, decode.d4.loss_mask: 0.1769, decode.d4.loss_dice: 0.4805, decode.d5.loss_cls: 0.0469, decode.d5.loss_mask: 0.1771, decode.d5.loss_dice: 0.4796, decode.d6.loss_cls: 0.0481, decode.d6.loss_mask: 0.1768, decode.d6.loss_dice: 0.4801, decode.d7.loss_cls: 0.0501, decode.d7.loss_mask: 0.1769, decode.d7.loss_dice: 0.4809, decode.d8.loss_cls: 0.0456, decode.d8.loss_mask: 0.1768, decode.d8.loss_dice: 0.4781, loss: 7.3541 +2022-05-11 12:23:08,050 - mmseg - INFO - Iter [77400/80000] lr: 4.668e-08, eta: 1:32:10, time: 1.833, data_time: 0.065, memory: 69063, decode.loss_cls: 0.0414, decode.loss_mask: 0.1779, decode.loss_dice: 0.4815, decode.d0.loss_cls: 0.2946, decode.d0.loss_mask: 0.1839, decode.d0.loss_dice: 0.5011, decode.d1.loss_cls: 0.0572, decode.d1.loss_mask: 0.1787, decode.d1.loss_dice: 0.4889, decode.d2.loss_cls: 0.0524, decode.d2.loss_mask: 0.1783, decode.d2.loss_dice: 0.4832, decode.d3.loss_cls: 0.0463, decode.d3.loss_mask: 0.1780, decode.d3.loss_dice: 0.4750, decode.d4.loss_cls: 0.0460, decode.d4.loss_mask: 0.1783, decode.d4.loss_dice: 0.4793, decode.d5.loss_cls: 0.0480, decode.d5.loss_mask: 0.1784, decode.d5.loss_dice: 0.4814, decode.d6.loss_cls: 0.0476, decode.d6.loss_mask: 0.1780, decode.d6.loss_dice: 0.4785, decode.d7.loss_cls: 0.0463, decode.d7.loss_mask: 0.1779, decode.d7.loss_dice: 0.4802, decode.d8.loss_cls: 0.0465, decode.d8.loss_mask: 0.1777, decode.d8.loss_dice: 0.4807, loss: 7.3433 +2022-05-11 12:24:38,202 - mmseg - INFO - Iter [77450/80000] lr: 4.578e-08, eta: 1:30:23, time: 1.803, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0369, decode.loss_mask: 0.1794, decode.loss_dice: 0.4819, decode.d0.loss_cls: 0.2978, decode.d0.loss_mask: 0.1862, decode.d0.loss_dice: 0.4994, decode.d1.loss_cls: 0.0555, decode.d1.loss_mask: 0.1813, decode.d1.loss_dice: 0.4906, decode.d2.loss_cls: 0.0445, decode.d2.loss_mask: 0.1806, decode.d2.loss_dice: 0.4869, decode.d3.loss_cls: 0.0423, decode.d3.loss_mask: 0.1802, decode.d3.loss_dice: 0.4791, decode.d4.loss_cls: 0.0371, decode.d4.loss_mask: 0.1801, decode.d4.loss_dice: 0.4828, decode.d5.loss_cls: 0.0398, decode.d5.loss_mask: 0.1795, decode.d5.loss_dice: 0.4793, decode.d6.loss_cls: 0.0352, decode.d6.loss_mask: 0.1798, decode.d6.loss_dice: 0.4786, decode.d7.loss_cls: 0.0389, decode.d7.loss_mask: 0.1796, decode.d7.loss_dice: 0.4813, decode.d8.loss_cls: 0.0387, decode.d8.loss_mask: 0.1795, decode.d8.loss_dice: 0.4812, loss: 7.3143 +2022-05-11 12:26:08,831 - mmseg - INFO - Iter [77500/80000] lr: 4.489e-08, eta: 1:28:35, time: 1.813, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0416, decode.loss_mask: 0.1758, decode.loss_dice: 0.4681, decode.d0.loss_cls: 0.2905, decode.d0.loss_mask: 0.1818, decode.d0.loss_dice: 0.4916, decode.d1.loss_cls: 0.0553, decode.d1.loss_mask: 0.1768, decode.d1.loss_dice: 0.4762, decode.d2.loss_cls: 0.0555, decode.d2.loss_mask: 0.1766, decode.d2.loss_dice: 0.4738, decode.d3.loss_cls: 0.0437, decode.d3.loss_mask: 0.1766, decode.d3.loss_dice: 0.4705, decode.d4.loss_cls: 0.0438, decode.d4.loss_mask: 0.1761, decode.d4.loss_dice: 0.4685, decode.d5.loss_cls: 0.0479, decode.d5.loss_mask: 0.1758, decode.d5.loss_dice: 0.4687, decode.d6.loss_cls: 0.0400, decode.d6.loss_mask: 0.1757, decode.d6.loss_dice: 0.4684, decode.d7.loss_cls: 0.0417, decode.d7.loss_mask: 0.1761, decode.d7.loss_dice: 0.4664, decode.d8.loss_cls: 0.0439, decode.d8.loss_mask: 0.1758, decode.d8.loss_dice: 0.4667, loss: 7.1898 +2022-05-11 12:27:39,836 - mmseg - INFO - Iter [77550/80000] lr: 4.399e-08, eta: 1:26:47, time: 1.818, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0514, decode.loss_mask: 0.1757, decode.loss_dice: 0.4859, decode.d0.loss_cls: 0.3009, decode.d0.loss_mask: 0.1836, decode.d0.loss_dice: 0.5105, decode.d1.loss_cls: 0.0704, decode.d1.loss_mask: 0.1773, decode.d1.loss_dice: 0.4904, decode.d2.loss_cls: 0.0577, decode.d2.loss_mask: 0.1767, decode.d2.loss_dice: 0.4880, decode.d3.loss_cls: 0.0647, decode.d3.loss_mask: 0.1759, decode.d3.loss_dice: 0.4852, decode.d4.loss_cls: 0.0607, decode.d4.loss_mask: 0.1756, decode.d4.loss_dice: 0.4857, decode.d5.loss_cls: 0.0618, decode.d5.loss_mask: 0.1758, decode.d5.loss_dice: 0.4858, decode.d6.loss_cls: 0.0599, decode.d6.loss_mask: 0.1754, decode.d6.loss_dice: 0.4856, decode.d7.loss_cls: 0.0584, decode.d7.loss_mask: 0.1756, decode.d7.loss_dice: 0.4842, decode.d8.loss_cls: 0.0539, decode.d8.loss_mask: 0.1757, decode.d8.loss_dice: 0.4833, loss: 7.4917 +2022-05-11 12:29:11,683 - mmseg - INFO - Iter [77600/80000] lr: 4.309e-08, eta: 1:25:00, time: 1.839, data_time: 0.064, memory: 69063, decode.loss_cls: 0.0395, decode.loss_mask: 0.1814, decode.loss_dice: 0.4786, decode.d0.loss_cls: 0.2968, decode.d0.loss_mask: 0.1867, decode.d0.loss_dice: 0.4926, decode.d1.loss_cls: 0.0532, decode.d1.loss_mask: 0.1823, decode.d1.loss_dice: 0.4874, decode.d2.loss_cls: 0.0468, decode.d2.loss_mask: 0.1812, decode.d2.loss_dice: 0.4824, decode.d3.loss_cls: 0.0428, decode.d3.loss_mask: 0.1810, decode.d3.loss_dice: 0.4820, decode.d4.loss_cls: 0.0466, decode.d4.loss_mask: 0.1812, decode.d4.loss_dice: 0.4828, decode.d5.loss_cls: 0.0429, decode.d5.loss_mask: 0.1810, decode.d5.loss_dice: 0.4803, decode.d6.loss_cls: 0.0428, decode.d6.loss_mask: 0.1809, decode.d6.loss_dice: 0.4798, decode.d7.loss_cls: 0.0415, decode.d7.loss_mask: 0.1812, decode.d7.loss_dice: 0.4840, decode.d8.loss_cls: 0.0424, decode.d8.loss_mask: 0.1812, decode.d8.loss_dice: 0.4802, loss: 7.3434 +2022-05-11 12:30:42,797 - mmseg - INFO - Iter [77650/80000] lr: 4.219e-08, eta: 1:23:12, time: 1.822, data_time: 0.015, memory: 69063, decode.loss_cls: 0.0516, decode.loss_mask: 0.1771, decode.loss_dice: 0.4813, decode.d0.loss_cls: 0.3072, decode.d0.loss_mask: 0.1833, decode.d0.loss_dice: 0.5090, decode.d1.loss_cls: 0.0604, decode.d1.loss_mask: 0.1787, decode.d1.loss_dice: 0.4885, decode.d2.loss_cls: 0.0528, decode.d2.loss_mask: 0.1776, decode.d2.loss_dice: 0.4870, decode.d3.loss_cls: 0.0489, decode.d3.loss_mask: 0.1771, decode.d3.loss_dice: 0.4813, decode.d4.loss_cls: 0.0470, decode.d4.loss_mask: 0.1771, decode.d4.loss_dice: 0.4815, decode.d5.loss_cls: 0.0547, decode.d5.loss_mask: 0.1771, decode.d5.loss_dice: 0.4855, decode.d6.loss_cls: 0.0482, decode.d6.loss_mask: 0.1771, decode.d6.loss_dice: 0.4831, decode.d7.loss_cls: 0.0499, decode.d7.loss_mask: 0.1769, decode.d7.loss_dice: 0.4833, decode.d8.loss_cls: 0.0506, decode.d8.loss_mask: 0.1768, decode.d8.loss_dice: 0.4819, loss: 7.4125 +2022-05-11 12:32:12,591 - mmseg - INFO - Iter [77700/80000] lr: 4.130e-08, eta: 1:21:25, time: 1.796, data_time: 0.015, memory: 69063, decode.loss_cls: 0.0483, decode.loss_mask: 0.1759, decode.loss_dice: 0.4853, decode.d0.loss_cls: 0.3007, decode.d0.loss_mask: 0.1825, decode.d0.loss_dice: 0.5030, decode.d1.loss_cls: 0.0576, decode.d1.loss_mask: 0.1773, decode.d1.loss_dice: 0.4940, decode.d2.loss_cls: 0.0589, decode.d2.loss_mask: 0.1768, decode.d2.loss_dice: 0.4865, decode.d3.loss_cls: 0.0529, decode.d3.loss_mask: 0.1766, decode.d3.loss_dice: 0.4845, decode.d4.loss_cls: 0.0522, decode.d4.loss_mask: 0.1763, decode.d4.loss_dice: 0.4871, decode.d5.loss_cls: 0.0444, decode.d5.loss_mask: 0.1766, decode.d5.loss_dice: 0.4872, decode.d6.loss_cls: 0.0485, decode.d6.loss_mask: 0.1765, decode.d6.loss_dice: 0.4848, decode.d7.loss_cls: 0.0447, decode.d7.loss_mask: 0.1762, decode.d7.loss_dice: 0.4848, decode.d8.loss_cls: 0.0469, decode.d8.loss_mask: 0.1760, decode.d8.loss_dice: 0.4860, loss: 7.4092 +2022-05-11 12:33:42,735 - mmseg - INFO - Iter [77750/80000] lr: 4.040e-08, eta: 1:19:38, time: 1.803, data_time: 0.015, memory: 69063, decode.loss_cls: 0.0522, decode.loss_mask: 0.1794, decode.loss_dice: 0.4833, decode.d0.loss_cls: 0.2909, decode.d0.loss_mask: 0.1861, decode.d0.loss_dice: 0.5048, decode.d1.loss_cls: 0.0692, decode.d1.loss_mask: 0.1813, decode.d1.loss_dice: 0.4902, decode.d2.loss_cls: 0.0533, decode.d2.loss_mask: 0.1801, decode.d2.loss_dice: 0.4884, decode.d3.loss_cls: 0.0517, decode.d3.loss_mask: 0.1801, decode.d3.loss_dice: 0.4836, decode.d4.loss_cls: 0.0507, decode.d4.loss_mask: 0.1794, decode.d4.loss_dice: 0.4804, decode.d5.loss_cls: 0.0574, decode.d5.loss_mask: 0.1793, decode.d5.loss_dice: 0.4853, decode.d6.loss_cls: 0.0546, decode.d6.loss_mask: 0.1793, decode.d6.loss_dice: 0.4820, decode.d7.loss_cls: 0.0564, decode.d7.loss_mask: 0.1796, decode.d7.loss_dice: 0.4825, decode.d8.loss_cls: 0.0510, decode.d8.loss_mask: 0.1794, decode.d8.loss_dice: 0.4827, loss: 7.4545 +2022-05-11 12:35:16,401 - mmseg - INFO - Iter [77800/80000] lr: 3.950e-08, eta: 1:17:50, time: 1.873, data_time: 0.062, memory: 69063, decode.loss_cls: 0.0606, decode.loss_mask: 0.1776, decode.loss_dice: 0.4837, decode.d0.loss_cls: 0.3025, decode.d0.loss_mask: 0.1843, decode.d0.loss_dice: 0.5060, decode.d1.loss_cls: 0.0771, decode.d1.loss_mask: 0.1788, decode.d1.loss_dice: 0.4944, decode.d2.loss_cls: 0.0684, decode.d2.loss_mask: 0.1784, decode.d2.loss_dice: 0.4917, decode.d3.loss_cls: 0.0604, decode.d3.loss_mask: 0.1782, decode.d3.loss_dice: 0.4881, decode.d4.loss_cls: 0.0589, decode.d4.loss_mask: 0.1782, decode.d4.loss_dice: 0.4884, decode.d5.loss_cls: 0.0550, decode.d5.loss_mask: 0.1777, decode.d5.loss_dice: 0.4886, decode.d6.loss_cls: 0.0590, decode.d6.loss_mask: 0.1777, decode.d6.loss_dice: 0.4847, decode.d7.loss_cls: 0.0579, decode.d7.loss_mask: 0.1778, decode.d7.loss_dice: 0.4865, decode.d8.loss_cls: 0.0570, decode.d8.loss_mask: 0.1775, decode.d8.loss_dice: 0.4846, loss: 7.5395 +2022-05-11 12:36:44,966 - mmseg - INFO - Iter [77850/80000] lr: 3.860e-08, eta: 1:16:03, time: 1.772, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0586, decode.loss_mask: 0.1767, decode.loss_dice: 0.4842, decode.d0.loss_cls: 0.3069, decode.d0.loss_mask: 0.1830, decode.d0.loss_dice: 0.5063, decode.d1.loss_cls: 0.0789, decode.d1.loss_mask: 0.1780, decode.d1.loss_dice: 0.4907, decode.d2.loss_cls: 0.0703, decode.d2.loss_mask: 0.1774, decode.d2.loss_dice: 0.4876, decode.d3.loss_cls: 0.0646, decode.d3.loss_mask: 0.1774, decode.d3.loss_dice: 0.4851, decode.d4.loss_cls: 0.0591, decode.d4.loss_mask: 0.1776, decode.d4.loss_dice: 0.4904, decode.d5.loss_cls: 0.0586, decode.d5.loss_mask: 0.1772, decode.d5.loss_dice: 0.4903, decode.d6.loss_cls: 0.0615, decode.d6.loss_mask: 0.1770, decode.d6.loss_dice: 0.4808, decode.d7.loss_cls: 0.0569, decode.d7.loss_mask: 0.1770, decode.d7.loss_dice: 0.4823, decode.d8.loss_cls: 0.0576, decode.d8.loss_mask: 0.1771, decode.d8.loss_dice: 0.4820, loss: 7.5314 +2022-05-11 12:38:14,353 - mmseg - INFO - Iter [77900/80000] lr: 3.771e-08, eta: 1:14:16, time: 1.788, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0498, decode.loss_mask: 0.1747, decode.loss_dice: 0.4870, decode.d0.loss_cls: 0.2996, decode.d0.loss_mask: 0.1809, decode.d0.loss_dice: 0.5048, decode.d1.loss_cls: 0.0708, decode.d1.loss_mask: 0.1757, decode.d1.loss_dice: 0.4929, decode.d2.loss_cls: 0.0609, decode.d2.loss_mask: 0.1752, decode.d2.loss_dice: 0.4862, decode.d3.loss_cls: 0.0552, decode.d3.loss_mask: 0.1750, decode.d3.loss_dice: 0.4847, decode.d4.loss_cls: 0.0539, decode.d4.loss_mask: 0.1747, decode.d4.loss_dice: 0.4865, decode.d5.loss_cls: 0.0505, decode.d5.loss_mask: 0.1744, decode.d5.loss_dice: 0.4854, decode.d6.loss_cls: 0.0510, decode.d6.loss_mask: 0.1745, decode.d6.loss_dice: 0.4850, decode.d7.loss_cls: 0.0525, decode.d7.loss_mask: 0.1749, decode.d7.loss_dice: 0.4842, decode.d8.loss_cls: 0.0518, decode.d8.loss_mask: 0.1750, decode.d8.loss_dice: 0.4872, loss: 7.4350 +2022-05-11 12:39:47,006 - mmseg - INFO - Iter [77950/80000] lr: 3.681e-08, eta: 1:12:29, time: 1.853, data_time: 0.063, memory: 69063, decode.loss_cls: 0.0434, decode.loss_mask: 0.1760, decode.loss_dice: 0.4831, decode.d0.loss_cls: 0.2971, decode.d0.loss_mask: 0.1827, decode.d0.loss_dice: 0.5058, decode.d1.loss_cls: 0.0695, decode.d1.loss_mask: 0.1772, decode.d1.loss_dice: 0.4895, decode.d2.loss_cls: 0.0585, decode.d2.loss_mask: 0.1767, decode.d2.loss_dice: 0.4854, decode.d3.loss_cls: 0.0526, decode.d3.loss_mask: 0.1763, decode.d3.loss_dice: 0.4834, decode.d4.loss_cls: 0.0526, decode.d4.loss_mask: 0.1763, decode.d4.loss_dice: 0.4815, decode.d5.loss_cls: 0.0504, decode.d5.loss_mask: 0.1764, decode.d5.loss_dice: 0.4794, decode.d6.loss_cls: 0.0482, decode.d6.loss_mask: 0.1759, decode.d6.loss_dice: 0.4810, decode.d7.loss_cls: 0.0485, decode.d7.loss_mask: 0.1763, decode.d7.loss_dice: 0.4837, decode.d8.loss_cls: 0.0487, decode.d8.loss_mask: 0.1759, decode.d8.loss_dice: 0.4799, loss: 7.3917 +2022-05-11 12:41:17,316 - mmseg - INFO - Saving checkpoint at 78000 iterations +2022-05-11 12:41:49,628 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 12:41:49,638 - mmseg - INFO - Iter [78000/80000] lr: 3.591e-08, eta: 1:10:44, time: 2.449, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0526, decode.loss_mask: 0.1775, decode.loss_dice: 0.4827, decode.d0.loss_cls: 0.2946, decode.d0.loss_mask: 0.1835, decode.d0.loss_dice: 0.5053, decode.d1.loss_cls: 0.0628, decode.d1.loss_mask: 0.1788, decode.d1.loss_dice: 0.4896, decode.d2.loss_cls: 0.0547, decode.d2.loss_mask: 0.1786, decode.d2.loss_dice: 0.4862, decode.d3.loss_cls: 0.0571, decode.d3.loss_mask: 0.1781, decode.d3.loss_dice: 0.4831, decode.d4.loss_cls: 0.0582, decode.d4.loss_mask: 0.1778, decode.d4.loss_dice: 0.4839, decode.d5.loss_cls: 0.0536, decode.d5.loss_mask: 0.1780, decode.d5.loss_dice: 0.4851, decode.d6.loss_cls: 0.0529, decode.d6.loss_mask: 0.1782, decode.d6.loss_dice: 0.4849, decode.d7.loss_cls: 0.0486, decode.d7.loss_mask: 0.1776, decode.d7.loss_dice: 0.4837, decode.d8.loss_cls: 0.0545, decode.d8.loss_mask: 0.1778, decode.d8.loss_dice: 0.4840, loss: 7.4442 +2022-05-11 12:43:45,298 - mmseg - INFO - per class results: +2022-05-11 12:43:45,303 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.63 | 99.25 | +| sidewalk | 88.74 | 94.0 | +| building | 94.4 | 97.05 | +| wall | 69.73 | 78.94 | +| fence | 74.34 | 81.7 | +| pole | 71.15 | 83.88 | +| traffic light | 77.13 | 87.29 | +| traffic sign | 84.04 | 90.58 | +| vegetation | 93.34 | 96.95 | +| terrain | 68.41 | 76.99 | +| sky | 95.78 | 98.43 | +| person | 86.73 | 93.82 | +| rider | 74.47 | 85.53 | +| car | 96.17 | 98.28 | +| truck | 82.59 | 94.53 | +| bus | 93.54 | 96.46 | +| train | 87.87 | 90.7 | +| motorcycle | 78.0 | 87.12 | +| bicycle | 82.7 | 91.69 | ++---------------+-------+-------+ +2022-05-11 12:43:45,303 - mmseg - INFO - Summary: +2022-05-11 12:43:45,304 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 97.01 | 84.09 | 90.69 | ++-------+-------+-------+ +2022-05-11 12:43:45,307 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 12:43:45,307 - mmseg - INFO - Iter(val) [32] aAcc: 0.9701, mIoU: 0.8409, mAcc: 0.9069, IoU.road: 0.9863, IoU.sidewalk: 0.8874, IoU.building: 0.9440, IoU.wall: 0.6973, IoU.fence: 0.7434, IoU.pole: 0.7115, IoU.traffic light: 0.7713, IoU.traffic sign: 0.8404, IoU.vegetation: 0.9334, IoU.terrain: 0.6841, IoU.sky: 0.9578, IoU.person: 0.8673, IoU.rider: 0.7447, IoU.car: 0.9617, IoU.truck: 0.8259, IoU.bus: 0.9354, IoU.train: 0.8787, IoU.motorcycle: 0.7800, IoU.bicycle: 0.8270, Acc.road: 0.9925, Acc.sidewalk: 0.9400, Acc.building: 0.9705, Acc.wall: 0.7894, Acc.fence: 0.8170, Acc.pole: 0.8388, Acc.traffic light: 0.8729, Acc.traffic sign: 0.9058, Acc.vegetation: 0.9695, Acc.terrain: 0.7699, Acc.sky: 0.9843, Acc.person: 0.9382, Acc.rider: 0.8553, Acc.car: 0.9828, Acc.truck: 0.9453, Acc.bus: 0.9646, Acc.train: 0.9070, Acc.motorcycle: 0.8712, Acc.bicycle: 0.9169 +2022-05-11 12:45:15,664 - mmseg - INFO - Iter [78050/80000] lr: 3.502e-08, eta: 1:09:04, time: 4.124, data_time: 2.333, memory: 69063, decode.loss_cls: 0.0411, decode.loss_mask: 0.1775, decode.loss_dice: 0.4886, decode.d0.loss_cls: 0.2803, decode.d0.loss_mask: 0.1843, decode.d0.loss_dice: 0.5080, decode.d1.loss_cls: 0.0613, decode.d1.loss_mask: 0.1791, decode.d1.loss_dice: 0.4912, decode.d2.loss_cls: 0.0502, decode.d2.loss_mask: 0.1783, decode.d2.loss_dice: 0.4927, decode.d3.loss_cls: 0.0442, decode.d3.loss_mask: 0.1779, decode.d3.loss_dice: 0.4890, decode.d4.loss_cls: 0.0505, decode.d4.loss_mask: 0.1776, decode.d4.loss_dice: 0.4917, decode.d5.loss_cls: 0.0422, decode.d5.loss_mask: 0.1778, decode.d5.loss_dice: 0.4881, decode.d6.loss_cls: 0.0482, decode.d6.loss_mask: 0.1775, decode.d6.loss_dice: 0.4886, decode.d7.loss_cls: 0.0430, decode.d7.loss_mask: 0.1775, decode.d7.loss_dice: 0.4863, decode.d8.loss_cls: 0.0472, decode.d8.loss_mask: 0.1775, decode.d8.loss_dice: 0.4861, loss: 7.4036 +2022-05-11 12:46:46,637 - mmseg - INFO - Iter [78100/80000] lr: 3.412e-08, eta: 1:07:17, time: 1.819, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0516, decode.loss_mask: 0.1803, decode.loss_dice: 0.4807, decode.d0.loss_cls: 0.2870, decode.d0.loss_mask: 0.1867, decode.d0.loss_dice: 0.5006, decode.d1.loss_cls: 0.0652, decode.d1.loss_mask: 0.1819, decode.d1.loss_dice: 0.4860, decode.d2.loss_cls: 0.0552, decode.d2.loss_mask: 0.1808, decode.d2.loss_dice: 0.4811, decode.d3.loss_cls: 0.0494, decode.d3.loss_mask: 0.1803, decode.d3.loss_dice: 0.4790, decode.d4.loss_cls: 0.0515, decode.d4.loss_mask: 0.1801, decode.d4.loss_dice: 0.4807, decode.d5.loss_cls: 0.0542, decode.d5.loss_mask: 0.1799, decode.d5.loss_dice: 0.4780, decode.d6.loss_cls: 0.0527, decode.d6.loss_mask: 0.1801, decode.d6.loss_dice: 0.4793, decode.d7.loss_cls: 0.0515, decode.d7.loss_mask: 0.1805, decode.d7.loss_dice: 0.4785, decode.d8.loss_cls: 0.0491, decode.d8.loss_mask: 0.1806, decode.d8.loss_dice: 0.4804, loss: 7.4026 +2022-05-11 12:48:20,058 - mmseg - INFO - Iter [78150/80000] lr: 3.322e-08, eta: 1:05:30, time: 1.869, data_time: 0.063, memory: 69063, decode.loss_cls: 0.0487, decode.loss_mask: 0.1792, decode.loss_dice: 0.4860, decode.d0.loss_cls: 0.2950, decode.d0.loss_mask: 0.1847, decode.d0.loss_dice: 0.5105, decode.d1.loss_cls: 0.0670, decode.d1.loss_mask: 0.1809, decode.d1.loss_dice: 0.4966, decode.d2.loss_cls: 0.0579, decode.d2.loss_mask: 0.1805, decode.d2.loss_dice: 0.4883, decode.d3.loss_cls: 0.0510, decode.d3.loss_mask: 0.1799, decode.d3.loss_dice: 0.4877, decode.d4.loss_cls: 0.0520, decode.d4.loss_mask: 0.1794, decode.d4.loss_dice: 0.4878, decode.d5.loss_cls: 0.0523, decode.d5.loss_mask: 0.1796, decode.d5.loss_dice: 0.4887, decode.d6.loss_cls: 0.0515, decode.d6.loss_mask: 0.1792, decode.d6.loss_dice: 0.4870, decode.d7.loss_cls: 0.0483, decode.d7.loss_mask: 0.1789, decode.d7.loss_dice: 0.4862, decode.d8.loss_cls: 0.0475, decode.d8.loss_mask: 0.1790, decode.d8.loss_dice: 0.4886, loss: 7.4799 +2022-05-11 12:49:49,986 - mmseg - INFO - Iter [78200/80000] lr: 3.232e-08, eta: 1:03:43, time: 1.799, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0506, decode.loss_mask: 0.1831, decode.loss_dice: 0.4786, decode.d0.loss_cls: 0.2862, decode.d0.loss_mask: 0.1899, decode.d0.loss_dice: 0.4974, decode.d1.loss_cls: 0.0645, decode.d1.loss_mask: 0.1845, decode.d1.loss_dice: 0.4895, decode.d2.loss_cls: 0.0552, decode.d2.loss_mask: 0.1841, decode.d2.loss_dice: 0.4832, decode.d3.loss_cls: 0.0577, decode.d3.loss_mask: 0.1836, decode.d3.loss_dice: 0.4836, decode.d4.loss_cls: 0.0553, decode.d4.loss_mask: 0.1834, decode.d4.loss_dice: 0.4822, decode.d5.loss_cls: 0.0482, decode.d5.loss_mask: 0.1828, decode.d5.loss_dice: 0.4796, decode.d6.loss_cls: 0.0500, decode.d6.loss_mask: 0.1826, decode.d6.loss_dice: 0.4780, decode.d7.loss_cls: 0.0505, decode.d7.loss_mask: 0.1831, decode.d7.loss_dice: 0.4834, decode.d8.loss_cls: 0.0526, decode.d8.loss_mask: 0.1829, decode.d8.loss_dice: 0.4795, loss: 7.4458 +2022-05-11 12:51:21,140 - mmseg - INFO - Iter [78250/80000] lr: 3.143e-08, eta: 1:01:56, time: 1.823, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0421, decode.loss_mask: 0.1754, decode.loss_dice: 0.4786, decode.d0.loss_cls: 0.2866, decode.d0.loss_mask: 0.1809, decode.d0.loss_dice: 0.4976, decode.d1.loss_cls: 0.0533, decode.d1.loss_mask: 0.1761, decode.d1.loss_dice: 0.4855, decode.d2.loss_cls: 0.0511, decode.d2.loss_mask: 0.1759, decode.d2.loss_dice: 0.4836, decode.d3.loss_cls: 0.0442, decode.d3.loss_mask: 0.1756, decode.d3.loss_dice: 0.4846, decode.d4.loss_cls: 0.0526, decode.d4.loss_mask: 0.1753, decode.d4.loss_dice: 0.4827, decode.d5.loss_cls: 0.0496, decode.d5.loss_mask: 0.1752, decode.d5.loss_dice: 0.4810, decode.d6.loss_cls: 0.0482, decode.d6.loss_mask: 0.1751, decode.d6.loss_dice: 0.4797, decode.d7.loss_cls: 0.0476, decode.d7.loss_mask: 0.1755, decode.d7.loss_dice: 0.4823, decode.d8.loss_cls: 0.0437, decode.d8.loss_mask: 0.1755, decode.d8.loss_dice: 0.4828, loss: 7.3177 +2022-05-11 12:52:51,616 - mmseg - INFO - Iter [78300/80000] lr: 3.053e-08, eta: 1:00:09, time: 1.810, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0485, decode.loss_mask: 0.1758, decode.loss_dice: 0.4809, decode.d0.loss_cls: 0.2990, decode.d0.loss_mask: 0.1820, decode.d0.loss_dice: 0.5017, decode.d1.loss_cls: 0.0617, decode.d1.loss_mask: 0.1771, decode.d1.loss_dice: 0.4872, decode.d2.loss_cls: 0.0568, decode.d2.loss_mask: 0.1770, decode.d2.loss_dice: 0.4838, decode.d3.loss_cls: 0.0533, decode.d3.loss_mask: 0.1760, decode.d3.loss_dice: 0.4831, decode.d4.loss_cls: 0.0497, decode.d4.loss_mask: 0.1759, decode.d4.loss_dice: 0.4802, decode.d5.loss_cls: 0.0477, decode.d5.loss_mask: 0.1762, decode.d5.loss_dice: 0.4854, decode.d6.loss_cls: 0.0498, decode.d6.loss_mask: 0.1757, decode.d6.loss_dice: 0.4798, decode.d7.loss_cls: 0.0478, decode.d7.loss_mask: 0.1758, decode.d7.loss_dice: 0.4798, decode.d8.loss_cls: 0.0452, decode.d8.loss_mask: 0.1756, decode.d8.loss_dice: 0.4811, loss: 7.3697 +2022-05-11 12:54:24,713 - mmseg - INFO - Iter [78350/80000] lr: 2.963e-08, eta: 0:58:22, time: 1.862, data_time: 0.068, memory: 69063, decode.loss_cls: 0.0468, decode.loss_mask: 0.1798, decode.loss_dice: 0.4964, decode.d0.loss_cls: 0.2945, decode.d0.loss_mask: 0.1861, decode.d0.loss_dice: 0.5178, decode.d1.loss_cls: 0.0640, decode.d1.loss_mask: 0.1812, decode.d1.loss_dice: 0.5051, decode.d2.loss_cls: 0.0588, decode.d2.loss_mask: 0.1805, decode.d2.loss_dice: 0.5010, decode.d3.loss_cls: 0.0556, decode.d3.loss_mask: 0.1798, decode.d3.loss_dice: 0.4997, decode.d4.loss_cls: 0.0540, decode.d4.loss_mask: 0.1801, decode.d4.loss_dice: 0.5008, decode.d5.loss_cls: 0.0473, decode.d5.loss_mask: 0.1799, decode.d5.loss_dice: 0.4971, decode.d6.loss_cls: 0.0507, decode.d6.loss_mask: 0.1796, decode.d6.loss_dice: 0.4967, decode.d7.loss_cls: 0.0519, decode.d7.loss_mask: 0.1795, decode.d7.loss_dice: 0.4980, decode.d8.loss_cls: 0.0528, decode.d8.loss_mask: 0.1795, decode.d8.loss_dice: 0.4947, loss: 7.5897 +2022-05-11 12:55:55,726 - mmseg - INFO - Iter [78400/80000] lr: 2.873e-08, eta: 0:56:35, time: 1.820, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0382, decode.loss_mask: 0.1754, decode.loss_dice: 0.4833, decode.d0.loss_cls: 0.2855, decode.d0.loss_mask: 0.1804, decode.d0.loss_dice: 0.5089, decode.d1.loss_cls: 0.0571, decode.d1.loss_mask: 0.1763, decode.d1.loss_dice: 0.4913, decode.d2.loss_cls: 0.0482, decode.d2.loss_mask: 0.1760, decode.d2.loss_dice: 0.4897, decode.d3.loss_cls: 0.0473, decode.d3.loss_mask: 0.1755, decode.d3.loss_dice: 0.4872, decode.d4.loss_cls: 0.0456, decode.d4.loss_mask: 0.1753, decode.d4.loss_dice: 0.4836, decode.d5.loss_cls: 0.0469, decode.d5.loss_mask: 0.1758, decode.d5.loss_dice: 0.4919, decode.d6.loss_cls: 0.0461, decode.d6.loss_mask: 0.1753, decode.d6.loss_dice: 0.4851, decode.d7.loss_cls: 0.0397, decode.d7.loss_mask: 0.1753, decode.d7.loss_dice: 0.4870, decode.d8.loss_cls: 0.0442, decode.d8.loss_mask: 0.1753, decode.d8.loss_dice: 0.4851, loss: 7.3523 +2022-05-11 12:57:26,761 - mmseg - INFO - Iter [78450/80000] lr: 2.784e-08, eta: 0:54:48, time: 1.821, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0404, decode.loss_mask: 0.1786, decode.loss_dice: 0.4680, decode.d0.loss_cls: 0.2903, decode.d0.loss_mask: 0.1846, decode.d0.loss_dice: 0.4944, decode.d1.loss_cls: 0.0575, decode.d1.loss_mask: 0.1798, decode.d1.loss_dice: 0.4794, decode.d2.loss_cls: 0.0490, decode.d2.loss_mask: 0.1795, decode.d2.loss_dice: 0.4772, decode.d3.loss_cls: 0.0429, decode.d3.loss_mask: 0.1789, decode.d3.loss_dice: 0.4723, decode.d4.loss_cls: 0.0450, decode.d4.loss_mask: 0.1793, decode.d4.loss_dice: 0.4739, decode.d5.loss_cls: 0.0459, decode.d5.loss_mask: 0.1791, decode.d5.loss_dice: 0.4733, decode.d6.loss_cls: 0.0437, decode.d6.loss_mask: 0.1790, decode.d6.loss_dice: 0.4727, decode.d7.loss_cls: 0.0399, decode.d7.loss_mask: 0.1792, decode.d7.loss_dice: 0.4710, decode.d8.loss_cls: 0.0407, decode.d8.loss_mask: 0.1786, decode.d8.loss_dice: 0.4706, loss: 7.2446 +2022-05-11 12:58:56,252 - mmseg - INFO - Iter [78500/80000] lr: 2.694e-08, eta: 0:53:01, time: 1.790, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0459, decode.loss_mask: 0.1793, decode.loss_dice: 0.4784, decode.d0.loss_cls: 0.3003, decode.d0.loss_mask: 0.1857, decode.d0.loss_dice: 0.4996, decode.d1.loss_cls: 0.0591, decode.d1.loss_mask: 0.1816, decode.d1.loss_dice: 0.4878, decode.d2.loss_cls: 0.0455, decode.d2.loss_mask: 0.1805, decode.d2.loss_dice: 0.4844, decode.d3.loss_cls: 0.0475, decode.d3.loss_mask: 0.1799, decode.d3.loss_dice: 0.4784, decode.d4.loss_cls: 0.0395, decode.d4.loss_mask: 0.1800, decode.d4.loss_dice: 0.4779, decode.d5.loss_cls: 0.0440, decode.d5.loss_mask: 0.1796, decode.d5.loss_dice: 0.4807, decode.d6.loss_cls: 0.0428, decode.d6.loss_mask: 0.1798, decode.d6.loss_dice: 0.4797, decode.d7.loss_cls: 0.0444, decode.d7.loss_mask: 0.1795, decode.d7.loss_dice: 0.4792, decode.d8.loss_cls: 0.0393, decode.d8.loss_mask: 0.1795, decode.d8.loss_dice: 0.4751, loss: 7.3348 +2022-05-11 13:00:29,501 - mmseg - INFO - Iter [78550/80000] lr: 2.604e-08, eta: 0:51:14, time: 1.865, data_time: 0.063, memory: 69063, decode.loss_cls: 0.0488, decode.loss_mask: 0.1838, decode.loss_dice: 0.4787, decode.d0.loss_cls: 0.3024, decode.d0.loss_mask: 0.1909, decode.d0.loss_dice: 0.4944, decode.d1.loss_cls: 0.0677, decode.d1.loss_mask: 0.1852, decode.d1.loss_dice: 0.4868, decode.d2.loss_cls: 0.0550, decode.d2.loss_mask: 0.1848, decode.d2.loss_dice: 0.4824, decode.d3.loss_cls: 0.0525, decode.d3.loss_mask: 0.1844, decode.d3.loss_dice: 0.4756, decode.d4.loss_cls: 0.0564, decode.d4.loss_mask: 0.1845, decode.d4.loss_dice: 0.4812, decode.d5.loss_cls: 0.0485, decode.d5.loss_mask: 0.1838, decode.d5.loss_dice: 0.4799, decode.d6.loss_cls: 0.0552, decode.d6.loss_mask: 0.1833, decode.d6.loss_dice: 0.4825, decode.d7.loss_cls: 0.0519, decode.d7.loss_mask: 0.1835, decode.d7.loss_dice: 0.4834, decode.d8.loss_cls: 0.0468, decode.d8.loss_mask: 0.1835, decode.d8.loss_dice: 0.4794, loss: 7.4570 +2022-05-11 13:01:59,977 - mmseg - INFO - Iter [78600/80000] lr: 2.514e-08, eta: 0:49:28, time: 1.809, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0384, decode.loss_mask: 0.1737, decode.loss_dice: 0.4772, decode.d0.loss_cls: 0.2940, decode.d0.loss_mask: 0.1798, decode.d0.loss_dice: 0.4999, decode.d1.loss_cls: 0.0570, decode.d1.loss_mask: 0.1749, decode.d1.loss_dice: 0.4864, decode.d2.loss_cls: 0.0526, decode.d2.loss_mask: 0.1746, decode.d2.loss_dice: 0.4828, decode.d3.loss_cls: 0.0449, decode.d3.loss_mask: 0.1741, decode.d3.loss_dice: 0.4803, decode.d4.loss_cls: 0.0431, decode.d4.loss_mask: 0.1742, decode.d4.loss_dice: 0.4782, decode.d5.loss_cls: 0.0401, decode.d5.loss_mask: 0.1736, decode.d5.loss_dice: 0.4779, decode.d6.loss_cls: 0.0427, decode.d6.loss_mask: 0.1736, decode.d6.loss_dice: 0.4770, decode.d7.loss_cls: 0.0442, decode.d7.loss_mask: 0.1739, decode.d7.loss_dice: 0.4765, decode.d8.loss_cls: 0.0396, decode.d8.loss_mask: 0.1737, decode.d8.loss_dice: 0.4791, loss: 7.2581 +2022-05-11 13:03:30,854 - mmseg - INFO - Iter [78650/80000] lr: 2.425e-08, eta: 0:47:41, time: 1.818, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0478, decode.loss_mask: 0.1780, decode.loss_dice: 0.4848, decode.d0.loss_cls: 0.2899, decode.d0.loss_mask: 0.1849, decode.d0.loss_dice: 0.5088, decode.d1.loss_cls: 0.0599, decode.d1.loss_mask: 0.1799, decode.d1.loss_dice: 0.4935, decode.d2.loss_cls: 0.0587, decode.d2.loss_mask: 0.1787, decode.d2.loss_dice: 0.4924, decode.d3.loss_cls: 0.0505, decode.d3.loss_mask: 0.1783, decode.d3.loss_dice: 0.4881, decode.d4.loss_cls: 0.0469, decode.d4.loss_mask: 0.1784, decode.d4.loss_dice: 0.4875, decode.d5.loss_cls: 0.0486, decode.d5.loss_mask: 0.1783, decode.d5.loss_dice: 0.4861, decode.d6.loss_cls: 0.0448, decode.d6.loss_mask: 0.1782, decode.d6.loss_dice: 0.4879, decode.d7.loss_cls: 0.0427, decode.d7.loss_mask: 0.1780, decode.d7.loss_dice: 0.4835, decode.d8.loss_cls: 0.0431, decode.d8.loss_mask: 0.1781, decode.d8.loss_dice: 0.4840, loss: 7.4204 +2022-05-11 13:05:03,818 - mmseg - INFO - Iter [78700/80000] lr: 2.335e-08, eta: 0:45:54, time: 1.859, data_time: 0.062, memory: 69063, decode.loss_cls: 0.0473, decode.loss_mask: 0.1799, decode.loss_dice: 0.4942, decode.d0.loss_cls: 0.2776, decode.d0.loss_mask: 0.1862, decode.d0.loss_dice: 0.5160, decode.d1.loss_cls: 0.0617, decode.d1.loss_mask: 0.1818, decode.d1.loss_dice: 0.5038, decode.d2.loss_cls: 0.0538, decode.d2.loss_mask: 0.1808, decode.d2.loss_dice: 0.4979, decode.d3.loss_cls: 0.0487, decode.d3.loss_mask: 0.1806, decode.d3.loss_dice: 0.4961, decode.d4.loss_cls: 0.0452, decode.d4.loss_mask: 0.1806, decode.d4.loss_dice: 0.4952, decode.d5.loss_cls: 0.0430, decode.d5.loss_mask: 0.1805, decode.d5.loss_dice: 0.4965, decode.d6.loss_cls: 0.0456, decode.d6.loss_mask: 0.1803, decode.d6.loss_dice: 0.4925, decode.d7.loss_cls: 0.0470, decode.d7.loss_mask: 0.1804, decode.d7.loss_dice: 0.4912, decode.d8.loss_cls: 0.0461, decode.d8.loss_mask: 0.1803, decode.d8.loss_dice: 0.4950, loss: 7.5056 +2022-05-11 13:06:33,528 - mmseg - INFO - Iter [78750/80000] lr: 2.245e-08, eta: 0:44:08, time: 1.794, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0523, decode.loss_mask: 0.1732, decode.loss_dice: 0.4830, decode.d0.loss_cls: 0.2973, decode.d0.loss_mask: 0.1802, decode.d0.loss_dice: 0.5072, decode.d1.loss_cls: 0.0644, decode.d1.loss_mask: 0.1751, decode.d1.loss_dice: 0.4933, decode.d2.loss_cls: 0.0532, decode.d2.loss_mask: 0.1740, decode.d2.loss_dice: 0.4907, decode.d3.loss_cls: 0.0490, decode.d3.loss_mask: 0.1743, decode.d3.loss_dice: 0.4855, decode.d4.loss_cls: 0.0547, decode.d4.loss_mask: 0.1737, decode.d4.loss_dice: 0.4865, decode.d5.loss_cls: 0.0542, decode.d5.loss_mask: 0.1733, decode.d5.loss_dice: 0.4827, decode.d6.loss_cls: 0.0494, decode.d6.loss_mask: 0.1739, decode.d6.loss_dice: 0.4847, decode.d7.loss_cls: 0.0545, decode.d7.loss_mask: 0.1731, decode.d7.loss_dice: 0.4809, decode.d8.loss_cls: 0.0515, decode.d8.loss_mask: 0.1731, decode.d8.loss_dice: 0.4801, loss: 7.3989 +2022-05-11 13:08:04,060 - mmseg - INFO - Iter [78800/80000] lr: 2.155e-08, eta: 0:42:21, time: 1.810, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0479, decode.loss_mask: 0.1790, decode.loss_dice: 0.4858, decode.d0.loss_cls: 0.3091, decode.d0.loss_mask: 0.1855, decode.d0.loss_dice: 0.5065, decode.d1.loss_cls: 0.0647, decode.d1.loss_mask: 0.1804, decode.d1.loss_dice: 0.4891, decode.d2.loss_cls: 0.0598, decode.d2.loss_mask: 0.1795, decode.d2.loss_dice: 0.4899, decode.d3.loss_cls: 0.0535, decode.d3.loss_mask: 0.1792, decode.d3.loss_dice: 0.4851, decode.d4.loss_cls: 0.0533, decode.d4.loss_mask: 0.1792, decode.d4.loss_dice: 0.4853, decode.d5.loss_cls: 0.0495, decode.d5.loss_mask: 0.1791, decode.d5.loss_dice: 0.4836, decode.d6.loss_cls: 0.0503, decode.d6.loss_mask: 0.1790, decode.d6.loss_dice: 0.4856, decode.d7.loss_cls: 0.0505, decode.d7.loss_mask: 0.1792, decode.d7.loss_dice: 0.4809, decode.d8.loss_cls: 0.0494, decode.d8.loss_mask: 0.1791, decode.d8.loss_dice: 0.4845, loss: 7.4635 +2022-05-11 13:09:34,769 - mmseg - INFO - Iter [78850/80000] lr: 2.066e-08, eta: 0:40:35, time: 1.816, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0492, decode.loss_mask: 0.1797, decode.loss_dice: 0.4821, decode.d0.loss_cls: 0.2982, decode.d0.loss_mask: 0.1857, decode.d0.loss_dice: 0.5044, decode.d1.loss_cls: 0.0656, decode.d1.loss_mask: 0.1809, decode.d1.loss_dice: 0.4938, decode.d2.loss_cls: 0.0537, decode.d2.loss_mask: 0.1807, decode.d2.loss_dice: 0.4929, decode.d3.loss_cls: 0.0515, decode.d3.loss_mask: 0.1801, decode.d3.loss_dice: 0.4871, decode.d4.loss_cls: 0.0522, decode.d4.loss_mask: 0.1802, decode.d4.loss_dice: 0.4864, decode.d5.loss_cls: 0.0483, decode.d5.loss_mask: 0.1800, decode.d5.loss_dice: 0.4851, decode.d6.loss_cls: 0.0506, decode.d6.loss_mask: 0.1798, decode.d6.loss_dice: 0.4846, decode.d7.loss_cls: 0.0489, decode.d7.loss_mask: 0.1798, decode.d7.loss_dice: 0.4843, decode.d8.loss_cls: 0.0511, decode.d8.loss_mask: 0.1801, decode.d8.loss_dice: 0.4854, loss: 7.4625 +2022-05-11 13:11:06,858 - mmseg - INFO - Iter [78900/80000] lr: 1.976e-08, eta: 0:38:48, time: 1.842, data_time: 0.065, memory: 69063, decode.loss_cls: 0.0363, decode.loss_mask: 0.1719, decode.loss_dice: 0.4777, decode.d0.loss_cls: 0.2804, decode.d0.loss_mask: 0.1772, decode.d0.loss_dice: 0.4911, decode.d1.loss_cls: 0.0540, decode.d1.loss_mask: 0.1734, decode.d1.loss_dice: 0.4817, decode.d2.loss_cls: 0.0484, decode.d2.loss_mask: 0.1727, decode.d2.loss_dice: 0.4794, decode.d3.loss_cls: 0.0400, decode.d3.loss_mask: 0.1730, decode.d3.loss_dice: 0.4734, decode.d4.loss_cls: 0.0397, decode.d4.loss_mask: 0.1729, decode.d4.loss_dice: 0.4758, decode.d5.loss_cls: 0.0428, decode.d5.loss_mask: 0.1728, decode.d5.loss_dice: 0.4769, decode.d6.loss_cls: 0.0386, decode.d6.loss_mask: 0.1725, decode.d6.loss_dice: 0.4731, decode.d7.loss_cls: 0.0417, decode.d7.loss_mask: 0.1723, decode.d7.loss_dice: 0.4742, decode.d8.loss_cls: 0.0361, decode.d8.loss_mask: 0.1724, decode.d8.loss_dice: 0.4764, loss: 7.1688 +2022-05-11 13:12:36,746 - mmseg - INFO - Iter [78950/80000] lr: 1.886e-08, eta: 0:37:02, time: 1.797, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0448, decode.loss_mask: 0.1825, decode.loss_dice: 0.4883, decode.d0.loss_cls: 0.2978, decode.d0.loss_mask: 0.1891, decode.d0.loss_dice: 0.5111, decode.d1.loss_cls: 0.0579, decode.d1.loss_mask: 0.1839, decode.d1.loss_dice: 0.4970, decode.d2.loss_cls: 0.0516, decode.d2.loss_mask: 0.1832, decode.d2.loss_dice: 0.4938, decode.d3.loss_cls: 0.0487, decode.d3.loss_mask: 0.1827, decode.d3.loss_dice: 0.4906, decode.d4.loss_cls: 0.0476, decode.d4.loss_mask: 0.1827, decode.d4.loss_dice: 0.4944, decode.d5.loss_cls: 0.0460, decode.d5.loss_mask: 0.1829, decode.d5.loss_dice: 0.4909, decode.d6.loss_cls: 0.0445, decode.d6.loss_mask: 0.1822, decode.d6.loss_dice: 0.4888, decode.d7.loss_cls: 0.0424, decode.d7.loss_mask: 0.1825, decode.d7.loss_dice: 0.4901, decode.d8.loss_cls: 0.0410, decode.d8.loss_mask: 0.1824, decode.d8.loss_dice: 0.4904, loss: 7.4917 +2022-05-11 13:14:07,537 - mmseg - INFO - Saving checkpoint at 79000 iterations +2022-05-11 13:14:39,632 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 13:14:39,641 - mmseg - INFO - Iter [79000/80000] lr: 1.797e-08, eta: 0:35:17, time: 2.455, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0517, decode.loss_mask: 0.1834, decode.loss_dice: 0.4814, decode.d0.loss_cls: 0.2980, decode.d0.loss_mask: 0.1900, decode.d0.loss_dice: 0.4964, decode.d1.loss_cls: 0.0640, decode.d1.loss_mask: 0.1848, decode.d1.loss_dice: 0.4833, decode.d2.loss_cls: 0.0539, decode.d2.loss_mask: 0.1843, decode.d2.loss_dice: 0.4835, decode.d3.loss_cls: 0.0523, decode.d3.loss_mask: 0.1836, decode.d3.loss_dice: 0.4788, decode.d4.loss_cls: 0.0540, decode.d4.loss_mask: 0.1838, decode.d4.loss_dice: 0.4795, decode.d5.loss_cls: 0.0548, decode.d5.loss_mask: 0.1836, decode.d5.loss_dice: 0.4796, decode.d6.loss_cls: 0.0498, decode.d6.loss_mask: 0.1836, decode.d6.loss_dice: 0.4810, decode.d7.loss_cls: 0.0498, decode.d7.loss_mask: 0.1836, decode.d7.loss_dice: 0.4771, decode.d8.loss_cls: 0.0447, decode.d8.loss_mask: 0.1834, decode.d8.loss_dice: 0.4786, loss: 7.4364 +2022-05-11 13:16:35,134 - mmseg - INFO - per class results: +2022-05-11 13:16:35,140 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.63 | 99.25 | +| sidewalk | 88.77 | 93.97 | +| building | 94.39 | 97.03 | +| wall | 67.18 | 79.93 | +| fence | 74.35 | 81.3 | +| pole | 71.16 | 83.68 | +| traffic light | 77.11 | 87.23 | +| traffic sign | 84.08 | 90.5 | +| vegetation | 93.35 | 96.99 | +| terrain | 68.7 | 77.43 | +| sky | 95.76 | 98.45 | +| person | 86.73 | 93.9 | +| rider | 74.11 | 85.02 | +| car | 96.17 | 98.28 | +| truck | 92.09 | 94.58 | +| bus | 93.56 | 96.56 | +| train | 87.88 | 90.68 | +| motorcycle | 77.8 | 87.23 | +| bicycle | 82.75 | 91.6 | ++---------------+-------+-------+ +2022-05-11 13:16:35,141 - mmseg - INFO - Summary: +2022-05-11 13:16:35,141 - mmseg - INFO - ++-------+-------+-------+ +| aAcc | mIoU | mAcc | ++-------+-------+-------+ +| 97.02 | 84.45 | 90.72 | ++-------+-------+-------+ +2022-05-11 13:16:35,144 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 13:16:35,144 - mmseg - INFO - Iter(val) [32] aAcc: 0.9702, mIoU: 0.8445, mAcc: 0.9072, IoU.road: 0.9863, IoU.sidewalk: 0.8877, IoU.building: 0.9439, IoU.wall: 0.6718, IoU.fence: 0.7435, IoU.pole: 0.7116, IoU.traffic light: 0.7711, IoU.traffic sign: 0.8408, IoU.vegetation: 0.9335, IoU.terrain: 0.6870, IoU.sky: 0.9576, IoU.person: 0.8673, IoU.rider: 0.7411, IoU.car: 0.9617, IoU.truck: 0.9209, IoU.bus: 0.9356, IoU.train: 0.8788, IoU.motorcycle: 0.7780, IoU.bicycle: 0.8275, Acc.road: 0.9925, Acc.sidewalk: 0.9397, Acc.building: 0.9703, Acc.wall: 0.7993, Acc.fence: 0.8130, Acc.pole: 0.8368, Acc.traffic light: 0.8723, Acc.traffic sign: 0.9050, Acc.vegetation: 0.9699, Acc.terrain: 0.7743, Acc.sky: 0.9845, Acc.person: 0.9390, Acc.rider: 0.8502, Acc.car: 0.9828, Acc.truck: 0.9458, Acc.bus: 0.9656, Acc.train: 0.9068, Acc.motorcycle: 0.8723, Acc.bicycle: 0.9160 +2022-05-11 13:18:06,048 - mmseg - INFO - Iter [79050/80000] lr: 1.707e-08, eta: 0:33:34, time: 4.131, data_time: 2.329, memory: 69063, decode.loss_cls: 0.0556, decode.loss_mask: 0.1754, decode.loss_dice: 0.4935, decode.d0.loss_cls: 0.2906, decode.d0.loss_mask: 0.1808, decode.d0.loss_dice: 0.5146, decode.d1.loss_cls: 0.0678, decode.d1.loss_mask: 0.1766, decode.d1.loss_dice: 0.5001, decode.d2.loss_cls: 0.0621, decode.d2.loss_mask: 0.1764, decode.d2.loss_dice: 0.4983, decode.d3.loss_cls: 0.0583, decode.d3.loss_mask: 0.1760, decode.d3.loss_dice: 0.4904, decode.d4.loss_cls: 0.0564, decode.d4.loss_mask: 0.1758, decode.d4.loss_dice: 0.4911, decode.d5.loss_cls: 0.0549, decode.d5.loss_mask: 0.1758, decode.d5.loss_dice: 0.4932, decode.d6.loss_cls: 0.0573, decode.d6.loss_mask: 0.1756, decode.d6.loss_dice: 0.4922, decode.d7.loss_cls: 0.0565, decode.d7.loss_mask: 0.1753, decode.d7.loss_dice: 0.4886, decode.d8.loss_cls: 0.0551, decode.d8.loss_mask: 0.1753, decode.d8.loss_dice: 0.4937, loss: 7.5335 +2022-05-11 13:19:38,896 - mmseg - INFO - Iter [79100/80000] lr: 1.617e-08, eta: 0:31:48, time: 1.857, data_time: 0.062, memory: 69063, decode.loss_cls: 0.0567, decode.loss_mask: 0.1760, decode.loss_dice: 0.4818, decode.d0.loss_cls: 0.2888, decode.d0.loss_mask: 0.1818, decode.d0.loss_dice: 0.5043, decode.d1.loss_cls: 0.0675, decode.d1.loss_mask: 0.1773, decode.d1.loss_dice: 0.4863, decode.d2.loss_cls: 0.0655, decode.d2.loss_mask: 0.1772, decode.d2.loss_dice: 0.4886, decode.d3.loss_cls: 0.0608, decode.d3.loss_mask: 0.1771, decode.d3.loss_dice: 0.4840, decode.d4.loss_cls: 0.0574, decode.d4.loss_mask: 0.1768, decode.d4.loss_dice: 0.4826, decode.d5.loss_cls: 0.0530, decode.d5.loss_mask: 0.1767, decode.d5.loss_dice: 0.4823, decode.d6.loss_cls: 0.0543, decode.d6.loss_mask: 0.1766, decode.d6.loss_dice: 0.4797, decode.d7.loss_cls: 0.0540, decode.d7.loss_mask: 0.1765, decode.d7.loss_dice: 0.4820, decode.d8.loss_cls: 0.0540, decode.d8.loss_mask: 0.1762, decode.d8.loss_dice: 0.4828, loss: 7.4386 +2022-05-11 13:21:08,294 - mmseg - INFO - Iter [79150/80000] lr: 1.527e-08, eta: 0:30:01, time: 1.788, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0519, decode.loss_mask: 0.1815, decode.loss_dice: 0.4845, decode.d0.loss_cls: 0.2892, decode.d0.loss_mask: 0.1887, decode.d0.loss_dice: 0.5053, decode.d1.loss_cls: 0.0586, decode.d1.loss_mask: 0.1837, decode.d1.loss_dice: 0.4910, decode.d2.loss_cls: 0.0549, decode.d2.loss_mask: 0.1825, decode.d2.loss_dice: 0.4917, decode.d3.loss_cls: 0.0424, decode.d3.loss_mask: 0.1823, decode.d3.loss_dice: 0.4866, decode.d4.loss_cls: 0.0442, decode.d4.loss_mask: 0.1825, decode.d4.loss_dice: 0.4879, decode.d5.loss_cls: 0.0451, decode.d5.loss_mask: 0.1823, decode.d5.loss_dice: 0.4879, decode.d6.loss_cls: 0.0440, decode.d6.loss_mask: 0.1823, decode.d6.loss_dice: 0.4872, decode.d7.loss_cls: 0.0437, decode.d7.loss_mask: 0.1819, decode.d7.loss_dice: 0.4876, decode.d8.loss_cls: 0.0420, decode.d8.loss_mask: 0.1819, decode.d8.loss_dice: 0.4832, loss: 7.4385 +2022-05-11 13:22:36,683 - mmseg - INFO - Iter [79200/80000] lr: 1.438e-08, eta: 0:28:15, time: 1.768, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0559, decode.loss_mask: 0.1777, decode.loss_dice: 0.4848, decode.d0.loss_cls: 0.3043, decode.d0.loss_mask: 0.1848, decode.d0.loss_dice: 0.5122, decode.d1.loss_cls: 0.0700, decode.d1.loss_mask: 0.1802, decode.d1.loss_dice: 0.4976, decode.d2.loss_cls: 0.0612, decode.d2.loss_mask: 0.1791, decode.d2.loss_dice: 0.4914, decode.d3.loss_cls: 0.0557, decode.d3.loss_mask: 0.1785, decode.d3.loss_dice: 0.4894, decode.d4.loss_cls: 0.0531, decode.d4.loss_mask: 0.1782, decode.d4.loss_dice: 0.4845, decode.d5.loss_cls: 0.0582, decode.d5.loss_mask: 0.1783, decode.d5.loss_dice: 0.4863, decode.d6.loss_cls: 0.0652, decode.d6.loss_mask: 0.1777, decode.d6.loss_dice: 0.4865, decode.d7.loss_cls: 0.0536, decode.d7.loss_mask: 0.1778, decode.d7.loss_dice: 0.4871, decode.d8.loss_cls: 0.0592, decode.d8.loss_mask: 0.1778, decode.d8.loss_dice: 0.4884, loss: 7.5347 +2022-05-11 13:24:08,600 - mmseg - INFO - Iter [79250/80000] lr: 1.348e-08, eta: 0:26:28, time: 1.838, data_time: 0.062, memory: 69063, decode.loss_cls: 0.0509, decode.loss_mask: 0.1798, decode.loss_dice: 0.4842, decode.d0.loss_cls: 0.2969, decode.d0.loss_mask: 0.1866, decode.d0.loss_dice: 0.5109, decode.d1.loss_cls: 0.0693, decode.d1.loss_mask: 0.1819, decode.d1.loss_dice: 0.4952, decode.d2.loss_cls: 0.0586, decode.d2.loss_mask: 0.1811, decode.d2.loss_dice: 0.4906, decode.d3.loss_cls: 0.0480, decode.d3.loss_mask: 0.1807, decode.d3.loss_dice: 0.4844, decode.d4.loss_cls: 0.0503, decode.d4.loss_mask: 0.1808, decode.d4.loss_dice: 0.4855, decode.d5.loss_cls: 0.0524, decode.d5.loss_mask: 0.1805, decode.d5.loss_dice: 0.4858, decode.d6.loss_cls: 0.0509, decode.d6.loss_mask: 0.1799, decode.d6.loss_dice: 0.4866, decode.d7.loss_cls: 0.0511, decode.d7.loss_mask: 0.1802, decode.d7.loss_dice: 0.4872, decode.d8.loss_cls: 0.0509, decode.d8.loss_mask: 0.1798, decode.d8.loss_dice: 0.4847, loss: 7.4856 +2022-05-11 13:25:37,879 - mmseg - INFO - Iter [79300/80000] lr: 1.258e-08, eta: 0:24:42, time: 1.786, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0579, decode.loss_mask: 0.1753, decode.loss_dice: 0.4897, decode.d0.loss_cls: 0.3059, decode.d0.loss_mask: 0.1821, decode.d0.loss_dice: 0.5141, decode.d1.loss_cls: 0.0698, decode.d1.loss_mask: 0.1770, decode.d1.loss_dice: 0.5002, decode.d2.loss_cls: 0.0590, decode.d2.loss_mask: 0.1760, decode.d2.loss_dice: 0.4940, decode.d3.loss_cls: 0.0638, decode.d3.loss_mask: 0.1756, decode.d3.loss_dice: 0.4897, decode.d4.loss_cls: 0.0574, decode.d4.loss_mask: 0.1756, decode.d4.loss_dice: 0.4915, decode.d5.loss_cls: 0.0569, decode.d5.loss_mask: 0.1754, decode.d5.loss_dice: 0.4915, decode.d6.loss_cls: 0.0554, decode.d6.loss_mask: 0.1756, decode.d6.loss_dice: 0.4904, decode.d7.loss_cls: 0.0553, decode.d7.loss_mask: 0.1756, decode.d7.loss_dice: 0.4928, decode.d8.loss_cls: 0.0558, decode.d8.loss_mask: 0.1753, decode.d8.loss_dice: 0.4913, loss: 7.5461 +2022-05-11 13:27:08,987 - mmseg - INFO - Iter [79350/80000] lr: 1.168e-08, eta: 0:22:56, time: 1.822, data_time: 0.019, memory: 69063, decode.loss_cls: 0.0462, decode.loss_mask: 0.1794, decode.loss_dice: 0.4891, decode.d0.loss_cls: 0.2918, decode.d0.loss_mask: 0.1847, decode.d0.loss_dice: 0.5088, decode.d1.loss_cls: 0.0701, decode.d1.loss_mask: 0.1807, decode.d1.loss_dice: 0.4965, decode.d2.loss_cls: 0.0546, decode.d2.loss_mask: 0.1800, decode.d2.loss_dice: 0.4940, decode.d3.loss_cls: 0.0519, decode.d3.loss_mask: 0.1798, decode.d3.loss_dice: 0.4913, decode.d4.loss_cls: 0.0483, decode.d4.loss_mask: 0.1797, decode.d4.loss_dice: 0.4914, decode.d5.loss_cls: 0.0471, decode.d5.loss_mask: 0.1801, decode.d5.loss_dice: 0.4919, decode.d6.loss_cls: 0.0459, decode.d6.loss_mask: 0.1794, decode.d6.loss_dice: 0.4885, decode.d7.loss_cls: 0.0474, decode.d7.loss_mask: 0.1797, decode.d7.loss_dice: 0.4920, decode.d8.loss_cls: 0.0482, decode.d8.loss_mask: 0.1794, decode.d8.loss_dice: 0.4912, loss: 7.4893 +2022-05-11 13:28:39,058 - mmseg - INFO - Iter [79400/80000] lr: 1.079e-08, eta: 0:21:10, time: 1.801, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0582, decode.loss_mask: 0.1774, decode.loss_dice: 0.4848, decode.d0.loss_cls: 0.2999, decode.d0.loss_mask: 0.1847, decode.d0.loss_dice: 0.5138, decode.d1.loss_cls: 0.0691, decode.d1.loss_mask: 0.1792, decode.d1.loss_dice: 0.4951, decode.d2.loss_cls: 0.0628, decode.d2.loss_mask: 0.1782, decode.d2.loss_dice: 0.4947, decode.d3.loss_cls: 0.0610, decode.d3.loss_mask: 0.1780, decode.d3.loss_dice: 0.4929, decode.d4.loss_cls: 0.0635, decode.d4.loss_mask: 0.1778, decode.d4.loss_dice: 0.4883, decode.d5.loss_cls: 0.0585, decode.d5.loss_mask: 0.1781, decode.d5.loss_dice: 0.4893, decode.d6.loss_cls: 0.0570, decode.d6.loss_mask: 0.1777, decode.d6.loss_dice: 0.4875, decode.d7.loss_cls: 0.0634, decode.d7.loss_mask: 0.1775, decode.d7.loss_dice: 0.4876, decode.d8.loss_cls: 0.0590, decode.d8.loss_mask: 0.1775, decode.d8.loss_dice: 0.4858, loss: 7.5583 +2022-05-11 13:30:10,802 - mmseg - INFO - Iter [79450/80000] lr: 9.889e-09, eta: 0:19:24, time: 1.835, data_time: 0.063, memory: 69063, decode.loss_cls: 0.0424, decode.loss_mask: 0.1797, decode.loss_dice: 0.4618, decode.d0.loss_cls: 0.2912, decode.d0.loss_mask: 0.1865, decode.d0.loss_dice: 0.4814, decode.d1.loss_cls: 0.0623, decode.d1.loss_mask: 0.1807, decode.d1.loss_dice: 0.4709, decode.d2.loss_cls: 0.0549, decode.d2.loss_mask: 0.1803, decode.d2.loss_dice: 0.4669, decode.d3.loss_cls: 0.0428, decode.d3.loss_mask: 0.1804, decode.d3.loss_dice: 0.4665, decode.d4.loss_cls: 0.0417, decode.d4.loss_mask: 0.1802, decode.d4.loss_dice: 0.4625, decode.d5.loss_cls: 0.0419, decode.d5.loss_mask: 0.1797, decode.d5.loss_dice: 0.4618, decode.d6.loss_cls: 0.0427, decode.d6.loss_mask: 0.1796, decode.d6.loss_dice: 0.4615, decode.d7.loss_cls: 0.0424, decode.d7.loss_mask: 0.1795, decode.d7.loss_dice: 0.4626, decode.d8.loss_cls: 0.0473, decode.d8.loss_mask: 0.1794, decode.d8.loss_dice: 0.4612, loss: 7.1726 +2022-05-11 13:31:41,018 - mmseg - INFO - Iter [79500/80000] lr: 8.992e-09, eta: 0:17:38, time: 1.804, data_time: 0.018, memory: 69063, decode.loss_cls: 0.0485, decode.loss_mask: 0.1743, decode.loss_dice: 0.4892, decode.d0.loss_cls: 0.2882, decode.d0.loss_mask: 0.1805, decode.d0.loss_dice: 0.5088, decode.d1.loss_cls: 0.0643, decode.d1.loss_mask: 0.1755, decode.d1.loss_dice: 0.4925, decode.d2.loss_cls: 0.0567, decode.d2.loss_mask: 0.1754, decode.d2.loss_dice: 0.4954, decode.d3.loss_cls: 0.0572, decode.d3.loss_mask: 0.1746, decode.d3.loss_dice: 0.4881, decode.d4.loss_cls: 0.0541, decode.d4.loss_mask: 0.1744, decode.d4.loss_dice: 0.4872, decode.d5.loss_cls: 0.0529, decode.d5.loss_mask: 0.1746, decode.d5.loss_dice: 0.4861, decode.d6.loss_cls: 0.0527, decode.d6.loss_mask: 0.1742, decode.d6.loss_dice: 0.4872, decode.d7.loss_cls: 0.0475, decode.d7.loss_mask: 0.1747, decode.d7.loss_dice: 0.4889, decode.d8.loss_cls: 0.0491, decode.d8.loss_mask: 0.1745, decode.d8.loss_dice: 0.4875, loss: 7.4347 +2022-05-11 13:33:11,557 - mmseg - INFO - Iter [79550/80000] lr: 8.094e-09, eta: 0:15:51, time: 1.811, data_time: 0.020, memory: 69063, decode.loss_cls: 0.0439, decode.loss_mask: 0.1788, decode.loss_dice: 0.4883, decode.d0.loss_cls: 0.2825, decode.d0.loss_mask: 0.1841, decode.d0.loss_dice: 0.5060, decode.d1.loss_cls: 0.0617, decode.d1.loss_mask: 0.1801, decode.d1.loss_dice: 0.4960, decode.d2.loss_cls: 0.0567, decode.d2.loss_mask: 0.1796, decode.d2.loss_dice: 0.4895, decode.d3.loss_cls: 0.0489, decode.d3.loss_mask: 0.1793, decode.d3.loss_dice: 0.4875, decode.d4.loss_cls: 0.0457, decode.d4.loss_mask: 0.1796, decode.d4.loss_dice: 0.4871, decode.d5.loss_cls: 0.0449, decode.d5.loss_mask: 0.1791, decode.d5.loss_dice: 0.4871, decode.d6.loss_cls: 0.0448, decode.d6.loss_mask: 0.1788, decode.d6.loss_dice: 0.4876, decode.d7.loss_cls: 0.0420, decode.d7.loss_mask: 0.1793, decode.d7.loss_dice: 0.4881, decode.d8.loss_cls: 0.0407, decode.d8.loss_mask: 0.1785, decode.d8.loss_dice: 0.4874, loss: 7.4137 +2022-05-11 13:34:41,223 - mmseg - INFO - Iter [79600/80000] lr: 7.197e-09, eta: 0:14:06, time: 1.793, data_time: 0.016, memory: 69063, decode.loss_cls: 0.0436, decode.loss_mask: 0.1786, decode.loss_dice: 0.4822, decode.d0.loss_cls: 0.3000, decode.d0.loss_mask: 0.1844, decode.d0.loss_dice: 0.5029, decode.d1.loss_cls: 0.0561, decode.d1.loss_mask: 0.1795, decode.d1.loss_dice: 0.4896, decode.d2.loss_cls: 0.0571, decode.d2.loss_mask: 0.1800, decode.d2.loss_dice: 0.4894, decode.d3.loss_cls: 0.0492, decode.d3.loss_mask: 0.1795, decode.d3.loss_dice: 0.4833, decode.d4.loss_cls: 0.0467, decode.d4.loss_mask: 0.1795, decode.d4.loss_dice: 0.4834, decode.d5.loss_cls: 0.0441, decode.d5.loss_mask: 0.1793, decode.d5.loss_dice: 0.4834, decode.d6.loss_cls: 0.0465, decode.d6.loss_mask: 0.1795, decode.d6.loss_dice: 0.4827, decode.d7.loss_cls: 0.0438, decode.d7.loss_mask: 0.1791, decode.d7.loss_dice: 0.4818, decode.d8.loss_cls: 0.0432, decode.d8.loss_mask: 0.1789, decode.d8.loss_dice: 0.4839, loss: 7.3910 +2022-05-11 13:36:14,992 - mmseg - INFO - Iter [79650/80000] lr: 6.300e-09, eta: 0:12:20, time: 1.876, data_time: 0.063, memory: 69063, decode.loss_cls: 0.0429, decode.loss_mask: 0.1758, decode.loss_dice: 0.4811, decode.d0.loss_cls: 0.2978, decode.d0.loss_mask: 0.1824, decode.d0.loss_dice: 0.5073, decode.d1.loss_cls: 0.0587, decode.d1.loss_mask: 0.1777, decode.d1.loss_dice: 0.4918, decode.d2.loss_cls: 0.0460, decode.d2.loss_mask: 0.1768, decode.d2.loss_dice: 0.4871, decode.d3.loss_cls: 0.0483, decode.d3.loss_mask: 0.1762, decode.d3.loss_dice: 0.4891, decode.d4.loss_cls: 0.0431, decode.d4.loss_mask: 0.1766, decode.d4.loss_dice: 0.4868, decode.d5.loss_cls: 0.0454, decode.d5.loss_mask: 0.1764, decode.d5.loss_dice: 0.4876, decode.d6.loss_cls: 0.0419, decode.d6.loss_mask: 0.1762, decode.d6.loss_dice: 0.4870, decode.d7.loss_cls: 0.0459, decode.d7.loss_mask: 0.1758, decode.d7.loss_dice: 0.4837, decode.d8.loss_cls: 0.0473, decode.d8.loss_mask: 0.1759, decode.d8.loss_dice: 0.4841, loss: 7.3725 +2022-05-11 13:37:45,692 - mmseg - INFO - Iter [79700/80000] lr: 5.402e-09, eta: 0:10:34, time: 1.814, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0473, decode.loss_mask: 0.1781, decode.loss_dice: 0.4908, decode.d0.loss_cls: 0.2938, decode.d0.loss_mask: 0.1848, decode.d0.loss_dice: 0.5175, decode.d1.loss_cls: 0.0634, decode.d1.loss_mask: 0.1801, decode.d1.loss_dice: 0.5016, decode.d2.loss_cls: 0.0528, decode.d2.loss_mask: 0.1793, decode.d2.loss_dice: 0.4995, decode.d3.loss_cls: 0.0480, decode.d3.loss_mask: 0.1787, decode.d3.loss_dice: 0.4967, decode.d4.loss_cls: 0.0511, decode.d4.loss_mask: 0.1787, decode.d4.loss_dice: 0.4980, decode.d5.loss_cls: 0.0504, decode.d5.loss_mask: 0.1786, decode.d5.loss_dice: 0.4959, decode.d6.loss_cls: 0.0502, decode.d6.loss_mask: 0.1780, decode.d6.loss_dice: 0.4948, decode.d7.loss_cls: 0.0518, decode.d7.loss_mask: 0.1783, decode.d7.loss_dice: 0.4928, decode.d8.loss_cls: 0.0506, decode.d8.loss_mask: 0.1782, decode.d8.loss_dice: 0.4938, loss: 7.5339 +2022-05-11 13:39:16,066 - mmseg - INFO - Iter [79750/80000] lr: 4.505e-09, eta: 0:08:48, time: 1.807, data_time: 0.020, memory: 69063, decode.loss_cls: 0.0491, decode.loss_mask: 0.1765, decode.loss_dice: 0.4851, decode.d0.loss_cls: 0.2966, decode.d0.loss_mask: 0.1827, decode.d0.loss_dice: 0.5075, decode.d1.loss_cls: 0.0715, decode.d1.loss_mask: 0.1773, decode.d1.loss_dice: 0.4858, decode.d2.loss_cls: 0.0581, decode.d2.loss_mask: 0.1769, decode.d2.loss_dice: 0.4856, decode.d3.loss_cls: 0.0579, decode.d3.loss_mask: 0.1766, decode.d3.loss_dice: 0.4823, decode.d4.loss_cls: 0.0487, decode.d4.loss_mask: 0.1767, decode.d4.loss_dice: 0.4862, decode.d5.loss_cls: 0.0535, decode.d5.loss_mask: 0.1768, decode.d5.loss_dice: 0.4862, decode.d6.loss_cls: 0.0518, decode.d6.loss_mask: 0.1770, decode.d6.loss_dice: 0.4892, decode.d7.loss_cls: 0.0491, decode.d7.loss_mask: 0.1767, decode.d7.loss_dice: 0.4831, decode.d8.loss_cls: 0.0479, decode.d8.loss_mask: 0.1766, decode.d8.loss_dice: 0.4829, loss: 7.4320 +2022-05-11 13:40:47,675 - mmseg - INFO - Iter [79800/80000] lr: 3.607e-09, eta: 0:07:02, time: 1.832, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0468, decode.loss_mask: 0.1780, decode.loss_dice: 0.4955, decode.d0.loss_cls: 0.2913, decode.d0.loss_mask: 0.1847, decode.d0.loss_dice: 0.5160, decode.d1.loss_cls: 0.0598, decode.d1.loss_mask: 0.1797, decode.d1.loss_dice: 0.5006, decode.d2.loss_cls: 0.0569, decode.d2.loss_mask: 0.1789, decode.d2.loss_dice: 0.4990, decode.d3.loss_cls: 0.0549, decode.d3.loss_mask: 0.1791, decode.d3.loss_dice: 0.4955, decode.d4.loss_cls: 0.0542, decode.d4.loss_mask: 0.1788, decode.d4.loss_dice: 0.4966, decode.d5.loss_cls: 0.0475, decode.d5.loss_mask: 0.1781, decode.d5.loss_dice: 0.4946, decode.d6.loss_cls: 0.0450, decode.d6.loss_mask: 0.1784, decode.d6.loss_dice: 0.4921, decode.d7.loss_cls: 0.0497, decode.d7.loss_mask: 0.1783, decode.d7.loss_dice: 0.4925, decode.d8.loss_cls: 0.0423, decode.d8.loss_mask: 0.1787, decode.d8.loss_dice: 0.4935, loss: 7.5170 +2022-05-11 13:42:21,050 - mmseg - INFO - Iter [79850/80000] lr: 2.710e-09, eta: 0:05:16, time: 1.868, data_time: 0.064, memory: 69063, decode.loss_cls: 0.0365, decode.loss_mask: 0.1722, decode.loss_dice: 0.4758, decode.d0.loss_cls: 0.3019, decode.d0.loss_mask: 0.1776, decode.d0.loss_dice: 0.4937, decode.d1.loss_cls: 0.0493, decode.d1.loss_mask: 0.1734, decode.d1.loss_dice: 0.4841, decode.d2.loss_cls: 0.0421, decode.d2.loss_mask: 0.1727, decode.d2.loss_dice: 0.4771, decode.d3.loss_cls: 0.0404, decode.d3.loss_mask: 0.1725, decode.d3.loss_dice: 0.4763, decode.d4.loss_cls: 0.0392, decode.d4.loss_mask: 0.1725, decode.d4.loss_dice: 0.4754, decode.d5.loss_cls: 0.0392, decode.d5.loss_mask: 0.1723, decode.d5.loss_dice: 0.4771, decode.d6.loss_cls: 0.0372, decode.d6.loss_mask: 0.1725, decode.d6.loss_dice: 0.4789, decode.d7.loss_cls: 0.0408, decode.d7.loss_mask: 0.1723, decode.d7.loss_dice: 0.4737, decode.d8.loss_cls: 0.0373, decode.d8.loss_mask: 0.1721, decode.d8.loss_dice: 0.4741, loss: 7.1800 +2022-05-11 13:43:52,299 - mmseg - INFO - Iter [79900/80000] lr: 1.813e-09, eta: 0:03:31, time: 1.825, data_time: 0.015, memory: 69063, decode.loss_cls: 0.0408, decode.loss_mask: 0.1811, decode.loss_dice: 0.4848, decode.d0.loss_cls: 0.2886, decode.d0.loss_mask: 0.1880, decode.d0.loss_dice: 0.5086, decode.d1.loss_cls: 0.0572, decode.d1.loss_mask: 0.1827, decode.d1.loss_dice: 0.4917, decode.d2.loss_cls: 0.0509, decode.d2.loss_mask: 0.1820, decode.d2.loss_dice: 0.4887, decode.d3.loss_cls: 0.0421, decode.d3.loss_mask: 0.1818, decode.d3.loss_dice: 0.4869, decode.d4.loss_cls: 0.0442, decode.d4.loss_mask: 0.1817, decode.d4.loss_dice: 0.4872, decode.d5.loss_cls: 0.0415, decode.d5.loss_mask: 0.1819, decode.d5.loss_dice: 0.4879, decode.d6.loss_cls: 0.0439, decode.d6.loss_mask: 0.1817, decode.d6.loss_dice: 0.4877, decode.d7.loss_cls: 0.0407, decode.d7.loss_mask: 0.1820, decode.d7.loss_dice: 0.4882, decode.d8.loss_cls: 0.0351, decode.d8.loss_mask: 0.1818, decode.d8.loss_dice: 0.4872, loss: 7.4087 +2022-05-11 13:45:21,312 - mmseg - INFO - Iter [79950/80000] lr: 9.153e-10, eta: 0:01:45, time: 1.781, data_time: 0.017, memory: 69063, decode.loss_cls: 0.0479, decode.loss_mask: 0.1797, decode.loss_dice: 0.4909, decode.d0.loss_cls: 0.2937, decode.d0.loss_mask: 0.1870, decode.d0.loss_dice: 0.5113, decode.d1.loss_cls: 0.0632, decode.d1.loss_mask: 0.1816, decode.d1.loss_dice: 0.4987, decode.d2.loss_cls: 0.0545, decode.d2.loss_mask: 0.1806, decode.d2.loss_dice: 0.4991, decode.d3.loss_cls: 0.0531, decode.d3.loss_mask: 0.1802, decode.d3.loss_dice: 0.4942, decode.d4.loss_cls: 0.0494, decode.d4.loss_mask: 0.1800, decode.d4.loss_dice: 0.4902, decode.d5.loss_cls: 0.0530, decode.d5.loss_mask: 0.1800, decode.d5.loss_dice: 0.4929, decode.d6.loss_cls: 0.0520, decode.d6.loss_mask: 0.1797, decode.d6.loss_dice: 0.4914, decode.d7.loss_cls: 0.0510, decode.d7.loss_mask: 0.1796, decode.d7.loss_dice: 0.4912, decode.d8.loss_cls: 0.0466, decode.d8.loss_mask: 0.1792, decode.d8.loss_dice: 0.4908, loss: 7.5228 +2022-05-11 13:46:53,779 - mmseg - INFO - Saving checkpoint at 80000 iterations +2022-05-11 13:47:28,897 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 13:47:28,907 - mmseg - INFO - Iter [80000/80000] lr: 1.795e-11, eta: 0:00:00, time: 2.549, data_time: 0.062, memory: 69063, decode.loss_cls: 0.0426, decode.loss_mask: 0.1750, decode.loss_dice: 0.4782, decode.d0.loss_cls: 0.2900, decode.d0.loss_mask: 0.1805, decode.d0.loss_dice: 0.4964, decode.d1.loss_cls: 0.0596, decode.d1.loss_mask: 0.1763, decode.d1.loss_dice: 0.4844, decode.d2.loss_cls: 0.0539, decode.d2.loss_mask: 0.1757, decode.d2.loss_dice: 0.4802, decode.d3.loss_cls: 0.0476, decode.d3.loss_mask: 0.1756, decode.d3.loss_dice: 0.4766, decode.d4.loss_cls: 0.0461, decode.d4.loss_mask: 0.1752, decode.d4.loss_dice: 0.4768, decode.d5.loss_cls: 0.0471, decode.d5.loss_mask: 0.1752, decode.d5.loss_dice: 0.4812, decode.d6.loss_cls: 0.0548, decode.d6.loss_mask: 0.1751, decode.d6.loss_dice: 0.4813, decode.d7.loss_cls: 0.0428, decode.d7.loss_mask: 0.1752, decode.d7.loss_dice: 0.4751, decode.d8.loss_cls: 0.0455, decode.d8.loss_mask: 0.1753, decode.d8.loss_dice: 0.4777, loss: 7.2971 +2022-05-11 13:49:24,651 - mmseg - INFO - per class results: +2022-05-11 13:49:24,655 - mmseg - INFO - ++---------------+-------+-------+ +| Class | IoU | Acc | ++---------------+-------+-------+ +| road | 98.63 | 99.24 | +| sidewalk | 88.78 | 94.09 | +| building | 94.39 | 97.07 | +| wall | 67.53 | 79.87 | +| fence | 74.25 | 81.22 | +| pole | 71.22 | 83.77 | +| traffic light | 77.11 | 87.37 | +| traffic sign | 84.13 | 90.58 | +| vegetation | 93.36 | 96.95 | +| terrain | 68.47 | 77.14 | +| sky | 95.77 | 98.43 | +| person | 86.73 | 93.82 | +| rider | 74.15 | 84.88 | +| car | 96.16 | 98.29 | +| truck | 92.08 | 94.61 | +| bus | 93.57 | 96.51 | +| train | 87.86 | 90.67 | +| motorcycle | 77.75 | 87.33 | +| bicycle | 82.77 | 91.53 | ++---------------+-------+-------+ +2022-05-11 13:49:24,655 - mmseg - INFO - Summary: +2022-05-11 13:49:24,655 - mmseg - INFO - ++-------+-------+------+ +| aAcc | mIoU | mAcc | ++-------+-------+------+ +| 97.02 | 84.46 | 90.7 | ++-------+-------+------+ +2022-05-11 13:49:24,657 - mmseg - INFO - Exp name: mask2former_beit_adapter_large_896_80k_cityscapes_ss.py +2022-05-11 13:49:24,657 - mmseg - INFO - Iter(val) [32] aAcc: 0.9702, mIoU: 0.8446, mAcc: 0.9070, IoU.road: 0.9863, IoU.sidewalk: 0.8878, IoU.building: 0.9439, IoU.wall: 0.6753, IoU.fence: 0.7425, IoU.pole: 0.7122, IoU.traffic light: 0.7711, IoU.traffic sign: 0.8413, IoU.vegetation: 0.9336, IoU.terrain: 0.6847, IoU.sky: 0.9577, IoU.person: 0.8673, IoU.rider: 0.7415, IoU.car: 0.9616, IoU.truck: 0.9208, IoU.bus: 0.9357, IoU.train: 0.8786, IoU.motorcycle: 0.7775, IoU.bicycle: 0.8277, Acc.road: 0.9924, Acc.sidewalk: 0.9409, Acc.building: 0.9707, Acc.wall: 0.7987, Acc.fence: 0.8122, Acc.pole: 0.8377, Acc.traffic light: 0.8737, Acc.traffic sign: 0.9058, Acc.vegetation: 0.9695, Acc.terrain: 0.7714, Acc.sky: 0.9843, Acc.person: 0.9382, Acc.rider: 0.8488, Acc.car: 0.9829, Acc.truck: 0.9461, Acc.bus: 0.9651, Acc.train: 0.9067, Acc.motorcycle: 0.8733, Acc.bicycle: 0.9153