2023-02-11 19:08:57,978 - mmseg - INFO - Multi-processing start method is `None` 2023-02-11 19:08:57,979 - mmseg - INFO - OpenCV num_threads is `112 2023-02-11 19:08:57,979 - mmseg - INFO - OMP num threads is 1 2023-02-11 19:08:58,023 - mmseg - INFO - Environment info: ------------------------------------------------------------ sys.platform: linux Python: 3.7.13 (default, Mar 29 2022, 02:18:16) [GCC 7.5.0] CUDA available: True GPU 0,1,2,3,4,5,6,7: A100-SXM-80GB CUDA_HOME: /usr/local/cuda NVCC: Build cuda_11.2.r11.2/compiler.29618528_0 GCC: gcc (GCC) 5.4.0 PyTorch: 1.9.0+cu111 PyTorch compiling details: PyTorch built with: - GCC 7.3 - C++ Version: 201402 - Intel(R) Math Kernel Library Version 2020.0.0 Product Build 20191122 for Intel(R) 64 architecture applications - Intel(R) MKL-DNN v2.1.2 (Git Hash 98be7e8afa711dc9b66c8ff3504129cb82013cdb) - OpenMP 201511 (a.k.a. OpenMP 4.5) - NNPACK is enabled - CPU capability usage: AVX2 - CUDA Runtime 11.1 - NVCC architecture flags: -gencode;arch=compute_37,code=sm_37;-gencode;arch=compute_50,code=sm_50;-gencode;arch=compute_60,code=sm_60;-gencode;arch=compute_70,code=sm_70;-gencode;arch=compute_75,code=sm_75;-gencode;arch=compute_80,code=sm_80;-gencode;arch=compute_86,code=sm_86 - CuDNN 8.0.5 - Magma 2.5.2 - Build settings: BLAS_INFO=mkl, BUILD_TYPE=Release, CUDA_VERSION=11.1, CUDNN_VERSION=8.0.5, CXX_COMPILER=/opt/rh/devtoolset-7/root/usr/bin/c++, CXX_FLAGS= -Wno-deprecated -fvisibility-inlines-hidden -DUSE_PTHREADPOOL -fopenmp -DNDEBUG -DUSE_KINETO -DUSE_FBGEMM -DUSE_QNNPACK -DUSE_PYTORCH_QNNPACK -DUSE_XNNPACK -DSYMBOLICATE_MOBILE_DEBUG_HANDLE -O2 -fPIC -Wno-narrowing -Wall -Wextra -Werror=return-type -Wno-missing-field-initializers -Wno-type-limits -Wno-array-bounds -Wno-unknown-pragmas -Wno-sign-compare -Wno-unused-parameter -Wno-unused-variable -Wno-unused-function -Wno-unused-result -Wno-unused-local-typedefs -Wno-strict-overflow -Wno-strict-aliasing -Wno-error=deprecated-declarations -Wno-stringop-overflow -Wno-psabi -Wno-error=pedantic -Wno-error=redundant-decls -Wno-error=old-style-cast -fdiagnostics-color=always -faligned-new -Wno-unused-but-set-variable -Wno-maybe-uninitialized -fno-math-errno -fno-trapping-math -Werror=format -Wno-stringop-overflow, LAPACK_INFO=mkl, PERF_WITH_AVX=1, PERF_WITH_AVX2=1, PERF_WITH_AVX512=1, TORCH_VERSION=1.9.0, USE_CUDA=ON, USE_CUDNN=ON, USE_EXCEPTION_PTR=1, USE_GFLAGS=OFF, USE_GLOG=OFF, USE_MKL=ON, USE_MKLDNN=ON, USE_MPI=OFF, USE_NCCL=ON, USE_NNPACK=ON, USE_OPENMP=ON, TorchVision: 0.10.0+cu111 OpenCV: 4.6.0 MMCV: 1.4.2 MMCV Compiler: GCC 7.3 MMCV CUDA Compiler: 11.1 MMSegmentation: 0.29.0+ ------------------------------------------------------------ 2023-02-11 19:08:58,023 - mmseg - INFO - Distributed training: True 2023-02-11 19:08:58,508 - mmseg - INFO - Config: dataset_type = 'ADE20KDataset' data_root = 'data/ade/ADEChallengeData2016' img_norm_cfg = dict( mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) crop_size = (512, 512) train_pipeline = [ dict(type='LoadImageFromFile'), dict(type='LoadAnnotations', reduce_zero_label=True), dict(type='Resize', img_scale=(2048, 512), ratio_range=(0.5, 2.0)), dict(type='RandomCrop', crop_size=(512, 512), cat_max_ratio=0.75), dict(type='RandomFlip', prob=0.5), dict(type='PhotoMetricDistortion'), dict( type='Normalize', mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True), dict(type='Pad', size=(512, 512), pad_val=0, seg_pad_val=255), dict(type='DefaultFormatBundle'), dict(type='Collect', keys=['img', 'gt_semantic_seg']) ] test_pipeline = [ dict(type='LoadImageFromFile'), dict( type='MultiScaleFlipAug', img_scale=(2048, 512), flip=False, transforms=[ dict(type='Resize', keep_ratio=True), dict(type='RandomFlip'), dict( type='Normalize', mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True), dict(type='ImageToTensor', keys=['img']), dict(type='Collect', keys=['img']) ]) ] data = dict( samples_per_gpu=2, workers_per_gpu=2, train=dict( type='ADE20KDataset', data_root='data/ade/ADEChallengeData2016', img_dir='images/training', ann_dir='annotations/training', pipeline=[ dict(type='LoadImageFromFile'), dict(type='LoadAnnotations', reduce_zero_label=True), dict(type='Resize', img_scale=(2048, 512), ratio_range=(0.5, 2.0)), dict(type='RandomCrop', crop_size=(512, 512), cat_max_ratio=0.75), dict(type='RandomFlip', prob=0.5), dict(type='PhotoMetricDistortion'), dict( type='Normalize', mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True), dict(type='Pad', size=(512, 512), pad_val=0, seg_pad_val=255), dict(type='DefaultFormatBundle'), dict(type='Collect', keys=['img', 'gt_semantic_seg']) ]), val=dict( type='ADE20KDataset', data_root='data/ade/ADEChallengeData2016', img_dir='images/validation', ann_dir='annotations/validation', pipeline=[ dict(type='LoadImageFromFile'), dict( type='MultiScaleFlipAug', img_scale=(2048, 512), flip=False, transforms=[ dict(type='Resize', keep_ratio=True), dict(type='RandomFlip'), dict( type='Normalize', mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True), dict(type='ImageToTensor', keys=['img']), dict(type='Collect', keys=['img']) ]) ]), test=dict( type='ADE20KDataset', data_root='data/ade/ADEChallengeData2016', img_dir='images/validation', ann_dir='annotations/validation', pipeline=[ dict(type='LoadImageFromFile'), dict( type='MultiScaleFlipAug', img_scale=(2048, 512), flip=False, transforms=[ dict(type='Resize', keep_ratio=True), dict(type='RandomFlip'), dict( type='Normalize', mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True), dict(type='ImageToTensor', keys=['img']), dict(type='Collect', keys=['img']) ]) ])) log_config = dict( interval=50, hooks=[dict(type='TextLoggerHook', by_epoch=False)]) dist_params = dict(backend='nccl') log_level = 'INFO' load_from = None resume_from = None workflow = [('train', 1)] cudnn_benchmark = True optimizer = dict( type='AdamW', lr=6e-05, betas=(0.9, 0.999), weight_decay=0.01, paramwise_cfg=dict( custom_keys=dict( pos_block=dict(decay_mult=0.0), norm=dict(decay_mult=0.0)))) optimizer_config = dict(grad_clip=dict(max_norm=0.1, norm_type=2)) lr_config = dict( policy='poly', warmup='linear', warmup_iters=1500, warmup_ratio=1e-06, power=1.0, min_lr=0.0, by_epoch=False) runner = dict(type='IterBasedRunner', max_iters=160000) checkpoint_config = dict(by_epoch=False, interval=1000, max_keep_ckpts=1) evaluation = dict( interval=16000, metric='mIoU', pre_eval=True, save_best='mIoU') checkpoint_file = 'https://download.openmmlab.com/mmsegmentation/v0.5/pretrain/swin/swin_tiny_patch4_window7_224_20220317-1cdeb081.pth' norm_cfg = dict(type='SyncBN', requires_grad=True) backbone_norm_cfg = dict(type='LN', requires_grad=True) model = dict( type='DiffSegV20', bit_scale=0.01, pretrained=None, backbone=dict( type='SwinTransformer', init_cfg=dict( type='Pretrained', checkpoint= 'https://download.openmmlab.com/mmsegmentation/v0.5/pretrain/swin/swin_tiny_patch4_window7_224_20220317-1cdeb081.pth' ), pretrain_img_size=224, in_channels=3, embed_dims=96, patch_size=4, window_size=7, mlp_ratio=4, depths=[2, 2, 6, 2], num_heads=[3, 6, 12, 24], strides=(4, 2, 2, 2), out_indices=(0, 1, 2, 3), qkv_bias=True, qk_scale=None, patch_norm=True, drop_rate=0.0, attn_drop_rate=0.0, drop_path_rate=0.3, use_abs_pos_embed=False, act_cfg=dict(type='GELU'), norm_cfg=dict(type='LN', requires_grad=True)), neck=[ dict( type='FPN', in_channels=[96, 192, 384, 768], out_channels=256, act_cfg=None, norm_cfg=dict(type='GN', num_groups=32), num_outs=4), dict( type='MultiStageMerging', in_channels=[256, 256, 256, 256], out_channels=256, kernel_size=1, norm_cfg=dict(type='GN', num_groups=32), act_cfg=None) ], auxiliary_head=dict( type='FCNHead', in_channels=256, in_index=0, channels=256, num_convs=1, concat_input=False, dropout_ratio=0.1, num_classes=150, norm_cfg=dict(type='SyncBN', requires_grad=True), align_corners=False, loss_decode=dict( type='CrossEntropyLoss', use_sigmoid=False, loss_weight=0.4)), decode_head=dict( type='DeformableHeadWithTime', in_channels=[256], channels=256, in_index=[0], dropout_ratio=0.0, num_classes=150, norm_cfg=dict(type='SyncBN', requires_grad=True), align_corners=False, num_feature_levels=1, encoder=dict( type='DetrTransformerEncoder', num_layers=6, transformerlayers=dict( type='BaseTransformerLayer', use_time_mlp=True, attn_cfgs=dict( type='MultiScaleDeformableAttention', embed_dims=256, num_levels=1, num_heads=8, dropout=0.0), ffn_cfgs=dict( type='FFN', embed_dims=256, feedforward_channels=1024, ffn_drop=0.0, act_cfg=dict(type='GELU')), operation_order=('self_attn', 'norm', 'ffn', 'norm'))), positional_encoding=dict( type='SinePositionalEncoding', num_feats=128, normalize=True, offset=-0.5), loss_decode=dict( type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0)), train_cfg=dict(), test_cfg=dict(mode='whole')) work_dir = './work_dirs/diffseg_swin_t_2x8_512x512_160k_ade20k_v20' gpu_ids = range(0, 8) auto_resume = True 2023-02-11 19:09:05,290 - mmseg - INFO - Set random seed to 88117976, deterministic: True 2023-02-11 19:09:05,608 - mmseg - INFO - load checkpoint from http path: https://download.openmmlab.com/mmsegmentation/v0.5/pretrain/swin/swin_tiny_patch4_window7_224_20220317-1cdeb081.pth 2023-02-11 19:09:05,690 - mmseg - WARNING - The model and loaded state dict do not match exactly unexpected key in source state_dict: norm.weight, norm.bias, stages.0.blocks.1.attn_mask, stages.1.blocks.1.attn_mask, stages.2.blocks.1.attn_mask, stages.2.blocks.3.attn_mask, stages.2.blocks.5.attn_mask missing keys in source state_dict: norm0.weight, norm0.bias, norm1.weight, norm1.bias, norm2.weight, norm2.bias, norm3.weight, norm3.bias 2023-02-11 19:09:05,703 - mmseg - INFO - initialize FPN with init_cfg {'type': 'Xavier', 'layer': 'Conv2d', 'distribution': 'uniform'} 2023-02-11 19:09:05,718 - mmseg - INFO - initialize MultiStageMerging with init_cfg {'type': 'Xavier', 'layer': 'Conv2d', 'distribution': 'uniform'} 2023-02-11 19:09:05,765 - mmseg - INFO - initialize FCNHead with init_cfg {'type': 'Normal', 'std': 0.01, 'override': {'name': 'conv_seg'}} Name of parameter - Initialization information backbone.patch_embed.projection.weight - torch.Size([96, 3, 4, 4]): Initialized by user-defined `init_weights` in SwinTransformer backbone.patch_embed.projection.bias - torch.Size([96]): Initialized by user-defined `init_weights` in SwinTransformer backbone.patch_embed.norm.weight - torch.Size([96]): Initialized by user-defined `init_weights` in SwinTransformer backbone.patch_embed.norm.bias - torch.Size([96]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.0.blocks.0.norm1.weight - torch.Size([96]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.0.blocks.0.norm1.bias - torch.Size([96]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.0.blocks.0.attn.w_msa.relative_position_bias_table - torch.Size([169, 3]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.0.blocks.0.attn.w_msa.qkv.weight - torch.Size([288, 96]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.0.blocks.0.attn.w_msa.qkv.bias - torch.Size([288]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.0.blocks.0.attn.w_msa.proj.weight - torch.Size([96, 96]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.0.blocks.0.attn.w_msa.proj.bias - torch.Size([96]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.0.blocks.0.norm2.weight - torch.Size([96]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.0.blocks.0.norm2.bias - torch.Size([96]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.0.blocks.0.ffn.layers.0.0.weight - torch.Size([384, 96]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.0.blocks.0.ffn.layers.0.0.bias - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.0.blocks.0.ffn.layers.1.weight - torch.Size([96, 384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.0.blocks.0.ffn.layers.1.bias - torch.Size([96]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.0.blocks.1.norm1.weight - torch.Size([96]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.0.blocks.1.norm1.bias - torch.Size([96]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.0.blocks.1.attn.w_msa.relative_position_bias_table - torch.Size([169, 3]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.0.blocks.1.attn.w_msa.qkv.weight - torch.Size([288, 96]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.0.blocks.1.attn.w_msa.qkv.bias - torch.Size([288]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.0.blocks.1.attn.w_msa.proj.weight - torch.Size([96, 96]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.0.blocks.1.attn.w_msa.proj.bias - torch.Size([96]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.0.blocks.1.norm2.weight - torch.Size([96]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.0.blocks.1.norm2.bias - torch.Size([96]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.0.blocks.1.ffn.layers.0.0.weight - torch.Size([384, 96]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.0.blocks.1.ffn.layers.0.0.bias - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.0.blocks.1.ffn.layers.1.weight - torch.Size([96, 384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.0.blocks.1.ffn.layers.1.bias - torch.Size([96]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.0.downsample.norm.weight - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.0.downsample.norm.bias - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.0.downsample.reduction.weight - torch.Size([192, 384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.1.blocks.0.norm1.weight - torch.Size([192]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.1.blocks.0.norm1.bias - torch.Size([192]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.1.blocks.0.attn.w_msa.relative_position_bias_table - torch.Size([169, 6]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.1.blocks.0.attn.w_msa.qkv.weight - torch.Size([576, 192]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.1.blocks.0.attn.w_msa.qkv.bias - torch.Size([576]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.1.blocks.0.attn.w_msa.proj.weight - torch.Size([192, 192]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.1.blocks.0.attn.w_msa.proj.bias - torch.Size([192]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.1.blocks.0.norm2.weight - torch.Size([192]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.1.blocks.0.norm2.bias - torch.Size([192]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.1.blocks.0.ffn.layers.0.0.weight - torch.Size([768, 192]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.1.blocks.0.ffn.layers.0.0.bias - torch.Size([768]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.1.blocks.0.ffn.layers.1.weight - torch.Size([192, 768]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.1.blocks.0.ffn.layers.1.bias - torch.Size([192]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.1.blocks.1.norm1.weight - torch.Size([192]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.1.blocks.1.norm1.bias - torch.Size([192]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.1.blocks.1.attn.w_msa.relative_position_bias_table - torch.Size([169, 6]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.1.blocks.1.attn.w_msa.qkv.weight - torch.Size([576, 192]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.1.blocks.1.attn.w_msa.qkv.bias - torch.Size([576]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.1.blocks.1.attn.w_msa.proj.weight - torch.Size([192, 192]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.1.blocks.1.attn.w_msa.proj.bias - torch.Size([192]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.1.blocks.1.norm2.weight - torch.Size([192]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.1.blocks.1.norm2.bias - torch.Size([192]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.1.blocks.1.ffn.layers.0.0.weight - torch.Size([768, 192]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.1.blocks.1.ffn.layers.0.0.bias - torch.Size([768]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.1.blocks.1.ffn.layers.1.weight - torch.Size([192, 768]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.1.blocks.1.ffn.layers.1.bias - torch.Size([192]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.1.downsample.norm.weight - torch.Size([768]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.1.downsample.norm.bias - torch.Size([768]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.1.downsample.reduction.weight - torch.Size([384, 768]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.0.norm1.weight - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.0.norm1.bias - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.0.attn.w_msa.relative_position_bias_table - torch.Size([169, 12]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.0.attn.w_msa.qkv.weight - torch.Size([1152, 384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.0.attn.w_msa.qkv.bias - torch.Size([1152]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.0.attn.w_msa.proj.weight - torch.Size([384, 384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.0.attn.w_msa.proj.bias - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.0.norm2.weight - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.0.norm2.bias - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.0.ffn.layers.0.0.weight - torch.Size([1536, 384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.0.ffn.layers.0.0.bias - torch.Size([1536]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.0.ffn.layers.1.weight - torch.Size([384, 1536]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.0.ffn.layers.1.bias - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.1.norm1.weight - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.1.norm1.bias - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.1.attn.w_msa.relative_position_bias_table - torch.Size([169, 12]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.1.attn.w_msa.qkv.weight - torch.Size([1152, 384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.1.attn.w_msa.qkv.bias - torch.Size([1152]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.1.attn.w_msa.proj.weight - torch.Size([384, 384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.1.attn.w_msa.proj.bias - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.1.norm2.weight - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.1.norm2.bias - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.1.ffn.layers.0.0.weight - torch.Size([1536, 384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.1.ffn.layers.0.0.bias - torch.Size([1536]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.1.ffn.layers.1.weight - torch.Size([384, 1536]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.1.ffn.layers.1.bias - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.2.norm1.weight - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.2.norm1.bias - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.2.attn.w_msa.relative_position_bias_table - torch.Size([169, 12]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.2.attn.w_msa.qkv.weight - torch.Size([1152, 384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.2.attn.w_msa.qkv.bias - torch.Size([1152]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.2.attn.w_msa.proj.weight - torch.Size([384, 384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.2.attn.w_msa.proj.bias - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.2.norm2.weight - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.2.norm2.bias - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.2.ffn.layers.0.0.weight - torch.Size([1536, 384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.2.ffn.layers.0.0.bias - torch.Size([1536]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.2.ffn.layers.1.weight - torch.Size([384, 1536]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.2.ffn.layers.1.bias - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.3.norm1.weight - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.3.norm1.bias - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.3.attn.w_msa.relative_position_bias_table - torch.Size([169, 12]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.3.attn.w_msa.qkv.weight - torch.Size([1152, 384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.3.attn.w_msa.qkv.bias - torch.Size([1152]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.3.attn.w_msa.proj.weight - torch.Size([384, 384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.3.attn.w_msa.proj.bias - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.3.norm2.weight - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.3.norm2.bias - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.3.ffn.layers.0.0.weight - torch.Size([1536, 384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.3.ffn.layers.0.0.bias - torch.Size([1536]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.3.ffn.layers.1.weight - torch.Size([384, 1536]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.3.ffn.layers.1.bias - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.4.norm1.weight - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.4.norm1.bias - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.4.attn.w_msa.relative_position_bias_table - torch.Size([169, 12]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.4.attn.w_msa.qkv.weight - torch.Size([1152, 384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.4.attn.w_msa.qkv.bias - torch.Size([1152]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.4.attn.w_msa.proj.weight - torch.Size([384, 384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.4.attn.w_msa.proj.bias - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.4.norm2.weight - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.4.norm2.bias - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.4.ffn.layers.0.0.weight - torch.Size([1536, 384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.4.ffn.layers.0.0.bias - torch.Size([1536]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.4.ffn.layers.1.weight - torch.Size([384, 1536]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.4.ffn.layers.1.bias - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.5.norm1.weight - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.5.norm1.bias - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.5.attn.w_msa.relative_position_bias_table - torch.Size([169, 12]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.5.attn.w_msa.qkv.weight - torch.Size([1152, 384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.5.attn.w_msa.qkv.bias - torch.Size([1152]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.5.attn.w_msa.proj.weight - torch.Size([384, 384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.5.attn.w_msa.proj.bias - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.5.norm2.weight - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.5.norm2.bias - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.5.ffn.layers.0.0.weight - torch.Size([1536, 384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.5.ffn.layers.0.0.bias - torch.Size([1536]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.5.ffn.layers.1.weight - torch.Size([384, 1536]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.blocks.5.ffn.layers.1.bias - torch.Size([384]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.downsample.norm.weight - torch.Size([1536]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.downsample.norm.bias - torch.Size([1536]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.2.downsample.reduction.weight - torch.Size([768, 1536]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.3.blocks.0.norm1.weight - torch.Size([768]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.3.blocks.0.norm1.bias - torch.Size([768]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.3.blocks.0.attn.w_msa.relative_position_bias_table - torch.Size([169, 24]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.3.blocks.0.attn.w_msa.qkv.weight - torch.Size([2304, 768]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.3.blocks.0.attn.w_msa.qkv.bias - torch.Size([2304]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.3.blocks.0.attn.w_msa.proj.weight - torch.Size([768, 768]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.3.blocks.0.attn.w_msa.proj.bias - torch.Size([768]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.3.blocks.0.norm2.weight - torch.Size([768]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.3.blocks.0.norm2.bias - torch.Size([768]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.3.blocks.0.ffn.layers.0.0.weight - torch.Size([3072, 768]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.3.blocks.0.ffn.layers.0.0.bias - torch.Size([3072]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.3.blocks.0.ffn.layers.1.weight - torch.Size([768, 3072]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.3.blocks.0.ffn.layers.1.bias - torch.Size([768]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.3.blocks.1.norm1.weight - torch.Size([768]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.3.blocks.1.norm1.bias - torch.Size([768]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.3.blocks.1.attn.w_msa.relative_position_bias_table - torch.Size([169, 24]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.3.blocks.1.attn.w_msa.qkv.weight - torch.Size([2304, 768]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.3.blocks.1.attn.w_msa.qkv.bias - torch.Size([2304]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.3.blocks.1.attn.w_msa.proj.weight - torch.Size([768, 768]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.3.blocks.1.attn.w_msa.proj.bias - torch.Size([768]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.3.blocks.1.norm2.weight - torch.Size([768]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.3.blocks.1.norm2.bias - torch.Size([768]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.3.blocks.1.ffn.layers.0.0.weight - torch.Size([3072, 768]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.3.blocks.1.ffn.layers.0.0.bias - torch.Size([3072]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.3.blocks.1.ffn.layers.1.weight - torch.Size([768, 3072]): Initialized by user-defined `init_weights` in SwinTransformer backbone.stages.3.blocks.1.ffn.layers.1.bias - torch.Size([768]): Initialized by user-defined `init_weights` in SwinTransformer backbone.norm0.weight - torch.Size([96]): The value is the same before and after calling `init_weights` of DiffSegV20 backbone.norm0.bias - torch.Size([96]): The value is the same before and after calling `init_weights` of DiffSegV20 backbone.norm1.weight - torch.Size([192]): The value is the same before and after calling `init_weights` of DiffSegV20 backbone.norm1.bias - torch.Size([192]): The value is the same before and after calling `init_weights` of DiffSegV20 backbone.norm2.weight - torch.Size([384]): The value is the same before and after calling `init_weights` of DiffSegV20 backbone.norm2.bias - torch.Size([384]): The value is the same before and after calling `init_weights` of DiffSegV20 backbone.norm3.weight - torch.Size([768]): The value is the same before and after calling `init_weights` of DiffSegV20 backbone.norm3.bias - torch.Size([768]): The value is the same before and after calling `init_weights` of DiffSegV20 neck.0.lateral_convs.0.conv.weight - torch.Size([256, 96, 1, 1]): XavierInit: gain=1, distribution=uniform, bias=0 neck.0.lateral_convs.0.gn.weight - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 neck.0.lateral_convs.0.gn.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 neck.0.lateral_convs.1.conv.weight - torch.Size([256, 192, 1, 1]): XavierInit: gain=1, distribution=uniform, bias=0 neck.0.lateral_convs.1.gn.weight - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 neck.0.lateral_convs.1.gn.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 neck.0.lateral_convs.2.conv.weight - torch.Size([256, 384, 1, 1]): XavierInit: gain=1, distribution=uniform, bias=0 neck.0.lateral_convs.2.gn.weight - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 neck.0.lateral_convs.2.gn.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 neck.0.lateral_convs.3.conv.weight - torch.Size([256, 768, 1, 1]): XavierInit: gain=1, distribution=uniform, bias=0 neck.0.lateral_convs.3.gn.weight - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 neck.0.lateral_convs.3.gn.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 neck.0.fpn_convs.0.conv.weight - torch.Size([256, 256, 3, 3]): XavierInit: gain=1, distribution=uniform, bias=0 neck.0.fpn_convs.0.gn.weight - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 neck.0.fpn_convs.0.gn.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 neck.0.fpn_convs.1.conv.weight - torch.Size([256, 256, 3, 3]): XavierInit: gain=1, distribution=uniform, bias=0 neck.0.fpn_convs.1.gn.weight - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 neck.0.fpn_convs.1.gn.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 neck.0.fpn_convs.2.conv.weight - torch.Size([256, 256, 3, 3]): XavierInit: gain=1, distribution=uniform, bias=0 neck.0.fpn_convs.2.gn.weight - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 neck.0.fpn_convs.2.gn.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 neck.0.fpn_convs.3.conv.weight - torch.Size([256, 256, 3, 3]): XavierInit: gain=1, distribution=uniform, bias=0 neck.0.fpn_convs.3.gn.weight - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 neck.0.fpn_convs.3.gn.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 neck.1.down.conv.weight - torch.Size([256, 1024, 1, 1]): Initialized by user-defined `init_weights` in ConvModule neck.1.down.gn.weight - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 neck.1.down.gn.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.conv_seg.weight - torch.Size([150, 256, 1, 1]): Initialized by user-defined `init_weights` in DeformableHeadWithTime decode_head.conv_seg.bias - torch.Size([150]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.0.attentions.0.sampling_offsets.weight - torch.Size([64, 256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.0.attentions.0.sampling_offsets.bias - torch.Size([64]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.0.attentions.0.attention_weights.weight - torch.Size([32, 256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.0.attentions.0.attention_weights.bias - torch.Size([32]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.0.attentions.0.value_proj.weight - torch.Size([256, 256]): Initialized by user-defined `init_weights` in DeformableHeadWithTime decode_head.encoder.layers.0.attentions.0.value_proj.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.0.attentions.0.output_proj.weight - torch.Size([256, 256]): Initialized by user-defined `init_weights` in DeformableHeadWithTime decode_head.encoder.layers.0.attentions.0.output_proj.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.0.time_mlp.1.weight - torch.Size([512, 1024]): Initialized by user-defined `init_weights` in DeformableHeadWithTime decode_head.encoder.layers.0.time_mlp.1.bias - torch.Size([512]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.0.ffns.0.layers.0.0.weight - torch.Size([1024, 256]): Initialized by user-defined `init_weights` in DeformableHeadWithTime decode_head.encoder.layers.0.ffns.0.layers.0.0.bias - torch.Size([1024]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.0.ffns.0.layers.1.weight - torch.Size([256, 1024]): Initialized by user-defined `init_weights` in DeformableHeadWithTime decode_head.encoder.layers.0.ffns.0.layers.1.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.0.norms.0.weight - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.0.norms.0.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.0.norms.1.weight - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.0.norms.1.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.1.attentions.0.sampling_offsets.weight - torch.Size([64, 256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.1.attentions.0.sampling_offsets.bias - torch.Size([64]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.1.attentions.0.attention_weights.weight - torch.Size([32, 256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.1.attentions.0.attention_weights.bias - torch.Size([32]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.1.attentions.0.value_proj.weight - torch.Size([256, 256]): Initialized by user-defined `init_weights` in DeformableHeadWithTime decode_head.encoder.layers.1.attentions.0.value_proj.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.1.attentions.0.output_proj.weight - torch.Size([256, 256]): Initialized by user-defined `init_weights` in DeformableHeadWithTime decode_head.encoder.layers.1.attentions.0.output_proj.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.1.time_mlp.1.weight - torch.Size([512, 1024]): Initialized by user-defined `init_weights` in DeformableHeadWithTime decode_head.encoder.layers.1.time_mlp.1.bias - torch.Size([512]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.1.ffns.0.layers.0.0.weight - torch.Size([1024, 256]): Initialized by user-defined `init_weights` in DeformableHeadWithTime decode_head.encoder.layers.1.ffns.0.layers.0.0.bias - torch.Size([1024]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.1.ffns.0.layers.1.weight - torch.Size([256, 1024]): Initialized by user-defined `init_weights` in DeformableHeadWithTime decode_head.encoder.layers.1.ffns.0.layers.1.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.1.norms.0.weight - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.1.norms.0.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.1.norms.1.weight - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.1.norms.1.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.2.attentions.0.sampling_offsets.weight - torch.Size([64, 256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.2.attentions.0.sampling_offsets.bias - torch.Size([64]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.2.attentions.0.attention_weights.weight - torch.Size([32, 256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.2.attentions.0.attention_weights.bias - torch.Size([32]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.2.attentions.0.value_proj.weight - torch.Size([256, 256]): Initialized by user-defined `init_weights` in DeformableHeadWithTime decode_head.encoder.layers.2.attentions.0.value_proj.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.2.attentions.0.output_proj.weight - torch.Size([256, 256]): Initialized by user-defined `init_weights` in DeformableHeadWithTime decode_head.encoder.layers.2.attentions.0.output_proj.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.2.time_mlp.1.weight - torch.Size([512, 1024]): Initialized by user-defined `init_weights` in DeformableHeadWithTime decode_head.encoder.layers.2.time_mlp.1.bias - torch.Size([512]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.2.ffns.0.layers.0.0.weight - torch.Size([1024, 256]): Initialized by user-defined `init_weights` in DeformableHeadWithTime decode_head.encoder.layers.2.ffns.0.layers.0.0.bias - torch.Size([1024]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.2.ffns.0.layers.1.weight - torch.Size([256, 1024]): Initialized by user-defined `init_weights` in DeformableHeadWithTime decode_head.encoder.layers.2.ffns.0.layers.1.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.2.norms.0.weight - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.2.norms.0.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.2.norms.1.weight - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.2.norms.1.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.3.attentions.0.sampling_offsets.weight - torch.Size([64, 256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.3.attentions.0.sampling_offsets.bias - torch.Size([64]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.3.attentions.0.attention_weights.weight - torch.Size([32, 256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.3.attentions.0.attention_weights.bias - torch.Size([32]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.3.attentions.0.value_proj.weight - torch.Size([256, 256]): Initialized by user-defined `init_weights` in DeformableHeadWithTime decode_head.encoder.layers.3.attentions.0.value_proj.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.3.attentions.0.output_proj.weight - torch.Size([256, 256]): Initialized by user-defined `init_weights` in DeformableHeadWithTime decode_head.encoder.layers.3.attentions.0.output_proj.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.3.time_mlp.1.weight - torch.Size([512, 1024]): Initialized by user-defined `init_weights` in DeformableHeadWithTime decode_head.encoder.layers.3.time_mlp.1.bias - torch.Size([512]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.3.ffns.0.layers.0.0.weight - torch.Size([1024, 256]): Initialized by user-defined `init_weights` in DeformableHeadWithTime decode_head.encoder.layers.3.ffns.0.layers.0.0.bias - torch.Size([1024]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.3.ffns.0.layers.1.weight - torch.Size([256, 1024]): Initialized by user-defined `init_weights` in DeformableHeadWithTime decode_head.encoder.layers.3.ffns.0.layers.1.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.3.norms.0.weight - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.3.norms.0.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.3.norms.1.weight - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.3.norms.1.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.4.attentions.0.sampling_offsets.weight - torch.Size([64, 256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.4.attentions.0.sampling_offsets.bias - torch.Size([64]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.4.attentions.0.attention_weights.weight - torch.Size([32, 256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.4.attentions.0.attention_weights.bias - torch.Size([32]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.4.attentions.0.value_proj.weight - torch.Size([256, 256]): Initialized by user-defined `init_weights` in DeformableHeadWithTime decode_head.encoder.layers.4.attentions.0.value_proj.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.4.attentions.0.output_proj.weight - torch.Size([256, 256]): Initialized by user-defined `init_weights` in DeformableHeadWithTime decode_head.encoder.layers.4.attentions.0.output_proj.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.4.time_mlp.1.weight - torch.Size([512, 1024]): Initialized by user-defined `init_weights` in DeformableHeadWithTime decode_head.encoder.layers.4.time_mlp.1.bias - torch.Size([512]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.4.ffns.0.layers.0.0.weight - torch.Size([1024, 256]): Initialized by user-defined `init_weights` in DeformableHeadWithTime decode_head.encoder.layers.4.ffns.0.layers.0.0.bias - torch.Size([1024]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.4.ffns.0.layers.1.weight - torch.Size([256, 1024]): Initialized by user-defined `init_weights` in DeformableHeadWithTime decode_head.encoder.layers.4.ffns.0.layers.1.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.4.norms.0.weight - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.4.norms.0.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.4.norms.1.weight - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.4.norms.1.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.5.attentions.0.sampling_offsets.weight - torch.Size([64, 256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.5.attentions.0.sampling_offsets.bias - torch.Size([64]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.5.attentions.0.attention_weights.weight - torch.Size([32, 256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.5.attentions.0.attention_weights.bias - torch.Size([32]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.5.attentions.0.value_proj.weight - torch.Size([256, 256]): Initialized by user-defined `init_weights` in DeformableHeadWithTime decode_head.encoder.layers.5.attentions.0.value_proj.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.5.attentions.0.output_proj.weight - torch.Size([256, 256]): Initialized by user-defined `init_weights` in DeformableHeadWithTime decode_head.encoder.layers.5.attentions.0.output_proj.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.5.time_mlp.1.weight - torch.Size([512, 1024]): Initialized by user-defined `init_weights` in DeformableHeadWithTime decode_head.encoder.layers.5.time_mlp.1.bias - torch.Size([512]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.5.ffns.0.layers.0.0.weight - torch.Size([1024, 256]): Initialized by user-defined `init_weights` in DeformableHeadWithTime decode_head.encoder.layers.5.ffns.0.layers.0.0.bias - torch.Size([1024]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.5.ffns.0.layers.1.weight - torch.Size([256, 1024]): Initialized by user-defined `init_weights` in DeformableHeadWithTime decode_head.encoder.layers.5.ffns.0.layers.1.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.5.norms.0.weight - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.5.norms.0.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.5.norms.1.weight - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 decode_head.encoder.layers.5.norms.1.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 auxiliary_head.conv_seg.weight - torch.Size([150, 256, 1, 1]): NormalInit: mean=0, std=0.01, bias=0 auxiliary_head.conv_seg.bias - torch.Size([150]): NormalInit: mean=0, std=0.01, bias=0 auxiliary_head.convs.0.conv.weight - torch.Size([256, 256, 3, 3]): The value is the same before and after calling `init_weights` of DiffSegV20 auxiliary_head.convs.0.bn.weight - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 auxiliary_head.convs.0.bn.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 embedding_table.weight - torch.Size([151, 256]): The value is the same before and after calling `init_weights` of DiffSegV20 transform.conv.weight - torch.Size([256, 512, 1, 1]): Initialized by user-defined `init_weights` in ConvModule transform.conv.bias - torch.Size([256]): The value is the same before and after calling `init_weights` of DiffSegV20 time_mlp.0.weights - torch.Size([8]): The value is the same before and after calling `init_weights` of DiffSegV20 time_mlp.1.weight - torch.Size([1024, 17]): The value is the same before and after calling `init_weights` of DiffSegV20 time_mlp.1.bias - torch.Size([1024]): The value is the same before and after calling `init_weights` of DiffSegV20 time_mlp.3.weight - torch.Size([1024, 1024]): The value is the same before and after calling `init_weights` of DiffSegV20 time_mlp.3.bias - torch.Size([1024]): The value is the same before and after calling `init_weights` of DiffSegV20 2023-02-11 19:09:05,770 - mmseg - INFO - DiffSegV20( (backbone): SwinTransformer( (patch_embed): PatchEmbed( (adap_padding): AdaptivePadding() (projection): Conv2d(3, 96, kernel_size=(4, 4), stride=(4, 4)) (norm): LayerNorm((96,), eps=1e-05, elementwise_affine=True) ) (drop_after_pos): Dropout(p=0.0, inplace=False) (stages): ModuleList( (0): SwinBlockSequence( (blocks): ModuleList( (0): SwinBlock( (norm1): LayerNorm((96,), eps=1e-05, elementwise_affine=True) (attn): ShiftWindowMSA( (w_msa): WindowMSA( (qkv): Linear(in_features=96, out_features=288, bias=True) (attn_drop): Dropout(p=0.0, inplace=False) (proj): Linear(in_features=96, out_features=96, bias=True) (proj_drop): Dropout(p=0.0, inplace=False) (softmax): Softmax(dim=-1) ) (drop): DropPath() ) (norm2): LayerNorm((96,), eps=1e-05, elementwise_affine=True) (ffn): FFN( (activate): GELU() (layers): Sequential( (0): Sequential( (0): Linear(in_features=96, out_features=384, bias=True) (1): GELU() (2): Dropout(p=0.0, inplace=False) ) (1): Linear(in_features=384, out_features=96, bias=True) (2): Dropout(p=0.0, inplace=False) ) (dropout_layer): DropPath() ) ) (1): SwinBlock( (norm1): LayerNorm((96,), eps=1e-05, elementwise_affine=True) (attn): ShiftWindowMSA( (w_msa): WindowMSA( (qkv): Linear(in_features=96, out_features=288, bias=True) (attn_drop): Dropout(p=0.0, inplace=False) (proj): Linear(in_features=96, out_features=96, bias=True) (proj_drop): Dropout(p=0.0, inplace=False) (softmax): Softmax(dim=-1) ) (drop): DropPath() ) (norm2): LayerNorm((96,), eps=1e-05, elementwise_affine=True) (ffn): FFN( (activate): GELU() (layers): Sequential( (0): Sequential( (0): Linear(in_features=96, out_features=384, bias=True) (1): GELU() (2): Dropout(p=0.0, inplace=False) ) (1): Linear(in_features=384, out_features=96, bias=True) (2): Dropout(p=0.0, inplace=False) ) (dropout_layer): DropPath() ) ) ) (downsample): PatchMerging( (adap_padding): AdaptivePadding() (sampler): Unfold(kernel_size=(2, 2), dilation=(1, 1), padding=(0, 0), stride=(2, 2)) (norm): LayerNorm((384,), eps=1e-05, elementwise_affine=True) (reduction): Linear(in_features=384, out_features=192, bias=False) ) ) (1): SwinBlockSequence( (blocks): ModuleList( (0): SwinBlock( (norm1): LayerNorm((192,), eps=1e-05, elementwise_affine=True) (attn): ShiftWindowMSA( (w_msa): WindowMSA( (qkv): Linear(in_features=192, out_features=576, bias=True) (attn_drop): Dropout(p=0.0, inplace=False) (proj): Linear(in_features=192, out_features=192, bias=True) (proj_drop): Dropout(p=0.0, inplace=False) (softmax): Softmax(dim=-1) ) (drop): DropPath() ) (norm2): LayerNorm((192,), eps=1e-05, elementwise_affine=True) (ffn): FFN( (activate): GELU() (layers): Sequential( (0): Sequential( (0): Linear(in_features=192, out_features=768, bias=True) (1): GELU() (2): Dropout(p=0.0, inplace=False) ) (1): Linear(in_features=768, out_features=192, bias=True) (2): Dropout(p=0.0, inplace=False) ) (dropout_layer): DropPath() ) ) (1): SwinBlock( (norm1): LayerNorm((192,), eps=1e-05, elementwise_affine=True) (attn): ShiftWindowMSA( (w_msa): WindowMSA( (qkv): Linear(in_features=192, out_features=576, bias=True) (attn_drop): Dropout(p=0.0, inplace=False) (proj): Linear(in_features=192, out_features=192, bias=True) (proj_drop): Dropout(p=0.0, inplace=False) (softmax): Softmax(dim=-1) ) (drop): DropPath() ) (norm2): LayerNorm((192,), eps=1e-05, elementwise_affine=True) (ffn): FFN( (activate): GELU() (layers): Sequential( (0): Sequential( (0): Linear(in_features=192, out_features=768, bias=True) (1): GELU() (2): Dropout(p=0.0, inplace=False) ) (1): Linear(in_features=768, out_features=192, bias=True) (2): Dropout(p=0.0, inplace=False) ) (dropout_layer): DropPath() ) ) ) (downsample): PatchMerging( (adap_padding): AdaptivePadding() (sampler): Unfold(kernel_size=(2, 2), dilation=(1, 1), padding=(0, 0), stride=(2, 2)) (norm): LayerNorm((768,), eps=1e-05, elementwise_affine=True) (reduction): Linear(in_features=768, out_features=384, bias=False) ) ) (2): SwinBlockSequence( (blocks): ModuleList( (0): SwinBlock( (norm1): LayerNorm((384,), eps=1e-05, elementwise_affine=True) (attn): ShiftWindowMSA( (w_msa): WindowMSA( (qkv): Linear(in_features=384, out_features=1152, bias=True) (attn_drop): Dropout(p=0.0, inplace=False) (proj): Linear(in_features=384, out_features=384, bias=True) (proj_drop): Dropout(p=0.0, inplace=False) (softmax): Softmax(dim=-1) ) (drop): DropPath() ) (norm2): LayerNorm((384,), eps=1e-05, elementwise_affine=True) (ffn): FFN( (activate): GELU() (layers): Sequential( (0): Sequential( (0): Linear(in_features=384, out_features=1536, bias=True) (1): GELU() (2): Dropout(p=0.0, inplace=False) ) (1): Linear(in_features=1536, out_features=384, bias=True) (2): Dropout(p=0.0, inplace=False) ) (dropout_layer): DropPath() ) ) (1): SwinBlock( (norm1): LayerNorm((384,), eps=1e-05, elementwise_affine=True) (attn): ShiftWindowMSA( (w_msa): WindowMSA( (qkv): Linear(in_features=384, out_features=1152, bias=True) (attn_drop): Dropout(p=0.0, inplace=False) (proj): Linear(in_features=384, out_features=384, bias=True) (proj_drop): Dropout(p=0.0, inplace=False) (softmax): Softmax(dim=-1) ) (drop): DropPath() ) (norm2): LayerNorm((384,), eps=1e-05, elementwise_affine=True) (ffn): FFN( (activate): GELU() (layers): Sequential( (0): Sequential( (0): Linear(in_features=384, out_features=1536, bias=True) (1): GELU() (2): Dropout(p=0.0, inplace=False) ) (1): Linear(in_features=1536, out_features=384, bias=True) (2): Dropout(p=0.0, inplace=False) ) (dropout_layer): DropPath() ) ) (2): SwinBlock( (norm1): LayerNorm((384,), eps=1e-05, elementwise_affine=True) (attn): ShiftWindowMSA( (w_msa): WindowMSA( (qkv): Linear(in_features=384, out_features=1152, bias=True) (attn_drop): Dropout(p=0.0, inplace=False) (proj): Linear(in_features=384, out_features=384, bias=True) (proj_drop): Dropout(p=0.0, inplace=False) (softmax): Softmax(dim=-1) ) (drop): DropPath() ) (norm2): LayerNorm((384,), eps=1e-05, elementwise_affine=True) (ffn): FFN( (activate): GELU() (layers): Sequential( (0): Sequential( (0): Linear(in_features=384, out_features=1536, bias=True) (1): GELU() (2): Dropout(p=0.0, inplace=False) ) (1): Linear(in_features=1536, out_features=384, bias=True) (2): Dropout(p=0.0, inplace=False) ) (dropout_layer): DropPath() ) ) (3): SwinBlock( (norm1): LayerNorm((384,), eps=1e-05, elementwise_affine=True) (attn): ShiftWindowMSA( (w_msa): WindowMSA( (qkv): Linear(in_features=384, out_features=1152, bias=True) (attn_drop): Dropout(p=0.0, inplace=False) (proj): Linear(in_features=384, out_features=384, bias=True) (proj_drop): Dropout(p=0.0, inplace=False) (softmax): Softmax(dim=-1) ) (drop): DropPath() ) (norm2): LayerNorm((384,), eps=1e-05, elementwise_affine=True) (ffn): FFN( (activate): GELU() (layers): Sequential( (0): Sequential( (0): Linear(in_features=384, out_features=1536, bias=True) (1): GELU() (2): Dropout(p=0.0, inplace=False) ) (1): Linear(in_features=1536, out_features=384, bias=True) (2): Dropout(p=0.0, inplace=False) ) (dropout_layer): DropPath() ) ) (4): SwinBlock( (norm1): LayerNorm((384,), eps=1e-05, elementwise_affine=True) (attn): ShiftWindowMSA( (w_msa): WindowMSA( (qkv): Linear(in_features=384, out_features=1152, bias=True) (attn_drop): Dropout(p=0.0, inplace=False) (proj): Linear(in_features=384, out_features=384, bias=True) (proj_drop): Dropout(p=0.0, inplace=False) (softmax): Softmax(dim=-1) ) (drop): DropPath() ) (norm2): LayerNorm((384,), eps=1e-05, elementwise_affine=True) (ffn): FFN( (activate): GELU() (layers): Sequential( (0): Sequential( (0): Linear(in_features=384, out_features=1536, bias=True) (1): GELU() (2): Dropout(p=0.0, inplace=False) ) (1): Linear(in_features=1536, out_features=384, bias=True) (2): Dropout(p=0.0, inplace=False) ) (dropout_layer): DropPath() ) ) (5): SwinBlock( (norm1): LayerNorm((384,), eps=1e-05, elementwise_affine=True) (attn): ShiftWindowMSA( (w_msa): WindowMSA( (qkv): Linear(in_features=384, out_features=1152, bias=True) (attn_drop): Dropout(p=0.0, inplace=False) (proj): Linear(in_features=384, out_features=384, bias=True) (proj_drop): Dropout(p=0.0, inplace=False) (softmax): Softmax(dim=-1) ) (drop): DropPath() ) (norm2): LayerNorm((384,), eps=1e-05, elementwise_affine=True) (ffn): FFN( (activate): GELU() (layers): Sequential( (0): Sequential( (0): Linear(in_features=384, out_features=1536, bias=True) (1): GELU() (2): Dropout(p=0.0, inplace=False) ) (1): Linear(in_features=1536, out_features=384, bias=True) (2): Dropout(p=0.0, inplace=False) ) (dropout_layer): DropPath() ) ) ) (downsample): PatchMerging( (adap_padding): AdaptivePadding() (sampler): Unfold(kernel_size=(2, 2), dilation=(1, 1), padding=(0, 0), stride=(2, 2)) (norm): LayerNorm((1536,), eps=1e-05, elementwise_affine=True) (reduction): Linear(in_features=1536, out_features=768, bias=False) ) ) (3): SwinBlockSequence( (blocks): ModuleList( (0): SwinBlock( (norm1): LayerNorm((768,), eps=1e-05, elementwise_affine=True) (attn): ShiftWindowMSA( (w_msa): WindowMSA( (qkv): Linear(in_features=768, out_features=2304, bias=True) (attn_drop): Dropout(p=0.0, inplace=False) (proj): Linear(in_features=768, out_features=768, bias=True) (proj_drop): Dropout(p=0.0, inplace=False) (softmax): Softmax(dim=-1) ) (drop): DropPath() ) (norm2): LayerNorm((768,), eps=1e-05, elementwise_affine=True) (ffn): FFN( (activate): GELU() (layers): Sequential( (0): Sequential( (0): Linear(in_features=768, out_features=3072, bias=True) (1): GELU() (2): Dropout(p=0.0, inplace=False) ) (1): Linear(in_features=3072, out_features=768, bias=True) (2): Dropout(p=0.0, inplace=False) ) (dropout_layer): DropPath() ) ) (1): SwinBlock( (norm1): LayerNorm((768,), eps=1e-05, elementwise_affine=True) (attn): ShiftWindowMSA( (w_msa): WindowMSA( (qkv): Linear(in_features=768, out_features=2304, bias=True) (attn_drop): Dropout(p=0.0, inplace=False) (proj): Linear(in_features=768, out_features=768, bias=True) (proj_drop): Dropout(p=0.0, inplace=False) (softmax): Softmax(dim=-1) ) (drop): DropPath() ) (norm2): LayerNorm((768,), eps=1e-05, elementwise_affine=True) (ffn): FFN( (activate): GELU() (layers): Sequential( (0): Sequential( (0): Linear(in_features=768, out_features=3072, bias=True) (1): GELU() (2): Dropout(p=0.0, inplace=False) ) (1): Linear(in_features=3072, out_features=768, bias=True) (2): Dropout(p=0.0, inplace=False) ) (dropout_layer): DropPath() ) ) ) ) ) (norm0): LayerNorm((96,), eps=1e-05, elementwise_affine=True) (norm1): LayerNorm((192,), eps=1e-05, elementwise_affine=True) (norm2): LayerNorm((384,), eps=1e-05, elementwise_affine=True) (norm3): LayerNorm((768,), eps=1e-05, elementwise_affine=True) ) init_cfg={'type': 'Pretrained', 'checkpoint': 'https://download.openmmlab.com/mmsegmentation/v0.5/pretrain/swin/swin_tiny_patch4_window7_224_20220317-1cdeb081.pth'} (neck): Sequential( (0): FPN( (lateral_convs): ModuleList( (0): ConvModule( (conv): Conv2d(96, 256, kernel_size=(1, 1), stride=(1, 1), bias=False) (gn): GroupNorm(32, 256, eps=1e-05, affine=True) ) (1): ConvModule( (conv): Conv2d(192, 256, kernel_size=(1, 1), stride=(1, 1), bias=False) (gn): GroupNorm(32, 256, eps=1e-05, affine=True) ) (2): ConvModule( (conv): Conv2d(384, 256, kernel_size=(1, 1), stride=(1, 1), bias=False) (gn): GroupNorm(32, 256, eps=1e-05, affine=True) ) (3): ConvModule( (conv): Conv2d(768, 256, kernel_size=(1, 1), stride=(1, 1), bias=False) (gn): GroupNorm(32, 256, eps=1e-05, affine=True) ) ) (fpn_convs): ModuleList( (0): ConvModule( (conv): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) (gn): GroupNorm(32, 256, eps=1e-05, affine=True) ) (1): ConvModule( (conv): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) (gn): GroupNorm(32, 256, eps=1e-05, affine=True) ) (2): ConvModule( (conv): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) (gn): GroupNorm(32, 256, eps=1e-05, affine=True) ) (3): ConvModule( (conv): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) (gn): GroupNorm(32, 256, eps=1e-05, affine=True) ) ) ) init_cfg={'type': 'Xavier', 'layer': 'Conv2d', 'distribution': 'uniform'} (1): MultiStageMerging( (down): ConvModule( (conv): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False) (gn): GroupNorm(32, 256, eps=1e-05, affine=True) ) ) init_cfg={'type': 'Xavier', 'layer': 'Conv2d', 'distribution': 'uniform'} ) (decode_head): DeformableHeadWithTime( input_transform=multiple_select, ignore_index=255, align_corners=False (loss_decode): CrossEntropyLoss(avg_non_ignore=False) (conv_seg): Conv2d(256, 150, kernel_size=(1, 1), stride=(1, 1)) (encoder): DetrTransformerEncoder( (layers): ModuleList( (0): BaseTransformerLayer( (attentions): ModuleList( (0): MultiScaleDeformableAttention( (dropout): Dropout(p=0.0, inplace=False) (sampling_offsets): Linear(in_features=256, out_features=64, bias=True) (attention_weights): Linear(in_features=256, out_features=32, bias=True) (value_proj): Linear(in_features=256, out_features=256, bias=True) (output_proj): Linear(in_features=256, out_features=256, bias=True) ) ) (time_mlp): Sequential( (0): SiLU() (1): Linear(in_features=1024, out_features=512, bias=True) ) (ffns): ModuleList( (0): FFN( (activate): GELU() (layers): Sequential( (0): Sequential( (0): Linear(in_features=256, out_features=1024, bias=True) (1): GELU() (2): Dropout(p=0.0, inplace=False) ) (1): Linear(in_features=1024, out_features=256, bias=True) (2): Dropout(p=0.0, inplace=False) ) (dropout_layer): Identity() ) ) (norms): ModuleList( (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True) (1): LayerNorm((256,), eps=1e-05, elementwise_affine=True) ) ) (1): BaseTransformerLayer( (attentions): ModuleList( (0): MultiScaleDeformableAttention( (dropout): Dropout(p=0.0, inplace=False) (sampling_offsets): Linear(in_features=256, out_features=64, bias=True) (attention_weights): Linear(in_features=256, out_features=32, bias=True) (value_proj): Linear(in_features=256, out_features=256, bias=True) (output_proj): Linear(in_features=256, out_features=256, bias=True) ) ) (time_mlp): Sequential( (0): SiLU() (1): Linear(in_features=1024, out_features=512, bias=True) ) (ffns): ModuleList( (0): FFN( (activate): GELU() (layers): Sequential( (0): Sequential( (0): Linear(in_features=256, out_features=1024, bias=True) (1): GELU() (2): Dropout(p=0.0, inplace=False) ) (1): Linear(in_features=1024, out_features=256, bias=True) (2): Dropout(p=0.0, inplace=False) ) (dropout_layer): Identity() ) ) (norms): ModuleList( (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True) (1): LayerNorm((256,), eps=1e-05, elementwise_affine=True) ) ) (2): BaseTransformerLayer( (attentions): ModuleList( (0): MultiScaleDeformableAttention( (dropout): Dropout(p=0.0, inplace=False) (sampling_offsets): Linear(in_features=256, out_features=64, bias=True) (attention_weights): Linear(in_features=256, out_features=32, bias=True) (value_proj): Linear(in_features=256, out_features=256, bias=True) (output_proj): Linear(in_features=256, out_features=256, bias=True) ) ) (time_mlp): Sequential( (0): SiLU() (1): Linear(in_features=1024, out_features=512, bias=True) ) (ffns): ModuleList( (0): FFN( (activate): GELU() (layers): Sequential( (0): Sequential( (0): Linear(in_features=256, out_features=1024, bias=True) (1): GELU() (2): Dropout(p=0.0, inplace=False) ) (1): Linear(in_features=1024, out_features=256, bias=True) (2): Dropout(p=0.0, inplace=False) ) (dropout_layer): Identity() ) ) (norms): ModuleList( (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True) (1): LayerNorm((256,), eps=1e-05, elementwise_affine=True) ) ) (3): BaseTransformerLayer( (attentions): ModuleList( (0): MultiScaleDeformableAttention( (dropout): Dropout(p=0.0, inplace=False) (sampling_offsets): Linear(in_features=256, out_features=64, bias=True) (attention_weights): Linear(in_features=256, out_features=32, bias=True) (value_proj): Linear(in_features=256, out_features=256, bias=True) (output_proj): Linear(in_features=256, out_features=256, bias=True) ) ) (time_mlp): Sequential( (0): SiLU() (1): Linear(in_features=1024, out_features=512, bias=True) ) (ffns): ModuleList( (0): FFN( (activate): GELU() (layers): Sequential( (0): Sequential( (0): Linear(in_features=256, out_features=1024, bias=True) (1): GELU() (2): Dropout(p=0.0, inplace=False) ) (1): Linear(in_features=1024, out_features=256, bias=True) (2): Dropout(p=0.0, inplace=False) ) (dropout_layer): Identity() ) ) (norms): ModuleList( (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True) (1): LayerNorm((256,), eps=1e-05, elementwise_affine=True) ) ) (4): BaseTransformerLayer( (attentions): ModuleList( (0): MultiScaleDeformableAttention( (dropout): Dropout(p=0.0, inplace=False) (sampling_offsets): Linear(in_features=256, out_features=64, bias=True) (attention_weights): Linear(in_features=256, out_features=32, bias=True) (value_proj): Linear(in_features=256, out_features=256, bias=True) (output_proj): Linear(in_features=256, out_features=256, bias=True) ) ) (time_mlp): Sequential( (0): SiLU() (1): Linear(in_features=1024, out_features=512, bias=True) ) (ffns): ModuleList( (0): FFN( (activate): GELU() (layers): Sequential( (0): Sequential( (0): Linear(in_features=256, out_features=1024, bias=True) (1): GELU() (2): Dropout(p=0.0, inplace=False) ) (1): Linear(in_features=1024, out_features=256, bias=True) (2): Dropout(p=0.0, inplace=False) ) (dropout_layer): Identity() ) ) (norms): ModuleList( (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True) (1): LayerNorm((256,), eps=1e-05, elementwise_affine=True) ) ) (5): BaseTransformerLayer( (attentions): ModuleList( (0): MultiScaleDeformableAttention( (dropout): Dropout(p=0.0, inplace=False) (sampling_offsets): Linear(in_features=256, out_features=64, bias=True) (attention_weights): Linear(in_features=256, out_features=32, bias=True) (value_proj): Linear(in_features=256, out_features=256, bias=True) (output_proj): Linear(in_features=256, out_features=256, bias=True) ) ) (time_mlp): Sequential( (0): SiLU() (1): Linear(in_features=1024, out_features=512, bias=True) ) (ffns): ModuleList( (0): FFN( (activate): GELU() (layers): Sequential( (0): Sequential( (0): Linear(in_features=256, out_features=1024, bias=True) (1): GELU() (2): Dropout(p=0.0, inplace=False) ) (1): Linear(in_features=1024, out_features=256, bias=True) (2): Dropout(p=0.0, inplace=False) ) (dropout_layer): Identity() ) ) (norms): ModuleList( (0): LayerNorm((256,), eps=1e-05, elementwise_affine=True) (1): LayerNorm((256,), eps=1e-05, elementwise_affine=True) ) ) ) ) (positional_encoding): SinePositionalEncoding(num_feats=128, temperature=10000, normalize=True, scale=6.283185307179586, eps=1e-06) ) init_cfg={'type': 'Normal', 'std': 0.01, 'override': {'name': 'conv_seg'}} (auxiliary_head): FCNHead( input_transform=None, ignore_index=255, align_corners=False (loss_decode): CrossEntropyLoss(avg_non_ignore=False) (conv_seg): Conv2d(256, 150, kernel_size=(1, 1), stride=(1, 1)) (dropout): Dropout2d(p=0.1, inplace=False) (convs): Sequential( (0): ConvModule( (conv): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) (bn): SyncBatchNorm(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (activate): ReLU(inplace=True) ) ) ) init_cfg={'type': 'Normal', 'std': 0.01, 'override': {'name': 'conv_seg'}} (embedding_table): Embedding(151, 256) (transform): ConvModule( (conv): Conv2d(512, 256, kernel_size=(1, 1), stride=(1, 1)) ) (time_mlp): Sequential( (0): LearnedSinusoidalPosEmb() (1): Linear(in_features=17, out_features=1024, bias=True) (2): GELU() (3): Linear(in_features=1024, out_features=1024, bias=True) ) ) 2023-02-11 19:09:05,776 - mmseg - INFO - Model size:135.39 2023-02-11 19:09:06,081 - mmseg - INFO - Loaded 20210 images 2023-02-11 19:09:06,752 - mmseg - INFO - Loaded 2000 images 2023-02-11 19:09:06,765 - mmseg - INFO - load checkpoint from local path: ./work_dirs/diffseg_swin_t_2x8_512x512_160k_ade20k_v20/latest.pth 2023-02-11 19:09:07,095 - mmseg - INFO - resumed from epoch: 1, iter 15999 2023-02-11 19:09:07,096 - mmseg - INFO - Hooks will be executed in the following order: before_run: (VERY_HIGH ) PolyLrUpdaterHook (NORMAL ) CheckpointHook (LOW ) DistEvalHook (VERY_LOW ) TextLoggerHook -------------------- before_train_epoch: (VERY_HIGH ) PolyLrUpdaterHook (LOW ) IterTimerHook (LOW ) DistEvalHook (VERY_LOW ) TextLoggerHook -------------------- before_train_iter: (VERY_HIGH ) PolyLrUpdaterHook (LOW ) IterTimerHook (LOW ) DistEvalHook -------------------- after_train_iter: (ABOVE_NORMAL) OptimizerHook (NORMAL ) CheckpointHook (LOW ) IterTimerHook (LOW ) DistEvalHook (VERY_LOW ) TextLoggerHook -------------------- after_train_epoch: (NORMAL ) CheckpointHook (LOW ) DistEvalHook (VERY_LOW ) TextLoggerHook -------------------- before_val_epoch: (LOW ) IterTimerHook (VERY_LOW ) TextLoggerHook -------------------- before_val_iter: (LOW ) IterTimerHook -------------------- after_val_iter: (LOW ) IterTimerHook -------------------- after_val_epoch: (VERY_LOW ) TextLoggerHook -------------------- after_run: (VERY_LOW ) TextLoggerHook -------------------- 2023-02-11 19:09:07,097 - mmseg - INFO - workflow: [('train', 1)], max: 160000 iters 2023-02-11 19:09:14,990 - mmseg - INFO - Saving checkpoint at 16000 iterations 2023-02-11 19:09:15,833 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 19:09:15,833 - mmseg - INFO - Iter [16000/160000] lr: 5.400e-05, eta: 163 days, 19:13:17, time: 1.966, data_time: 0.070, memory: 7585, decode.loss_ce: 0.8096, decode.acc_seg: 76.7131, aux.loss_ce: 0.3078, aux.acc_seg: 73.6436, loss: 1.1175, grad_norm: 21.5561 2023-02-11 19:09:33,614 - mmseg - INFO - per class results: 2023-02-11 19:09:33,620 - mmseg - INFO - +---------------------+-------+-------+ | Class | IoU | Acc | +---------------------+-------+-------+ | wall | 69.46 | 85.32 | | building | 77.68 | 85.69 | | sky | 93.09 | 96.35 | | floor | 76.4 | 86.98 | | tree | 64.82 | 90.73 | | ceiling | 77.11 | 88.33 | | road | 76.33 | 82.38 | | bed | 80.47 | 92.54 | | windowpane | 52.53 | 70.41 | | grass | 56.45 | 81.59 | | cabinet | 38.25 | 42.04 | | sidewalk | 58.68 | 81.37 | | person | 72.54 | 83.01 | | earth | 15.46 | 16.97 | | door | 14.53 | 15.1 | | table | 42.64 | 66.03 | | mountain | 52.41 | 82.22 | | plant | 26.5 | 28.59 | | curtain | 62.68 | 80.52 | | chair | 39.5 | 72.37 | | car | 78.67 | 84.58 | | water | 45.48 | 82.24 | | painting | 57.93 | 84.44 | | sofa | 48.42 | 71.88 | | shelf | 18.64 | 20.91 | | house | 37.92 | 50.11 | | sea | 58.15 | 85.8 | | mirror | 40.62 | 47.19 | | rug | 48.83 | 56.58 | | field | 20.23 | 54.42 | | armchair | 16.23 | 20.82 | | seat | 42.03 | 63.76 | | fence | 19.51 | 21.3 | | desk | 13.43 | 17.93 | | rock | 33.93 | 45.3 | | wardrobe | 35.93 | 43.01 | | lamp | 45.17 | 62.45 | | bathtub | 62.14 | 80.81 | | railing | 26.82 | 46.92 | | cushion | 38.69 | 53.19 | | base | 1.04 | 1.04 | | box | 10.89 | 12.7 | | column | 23.02 | 24.29 | | signboard | 26.49 | 34.15 | | chest of drawers | 23.35 | 71.87 | | counter | 11.83 | 15.86 | | sand | 30.16 | 51.12 | | sink | 48.2 | 51.32 | | skyscraper | 51.03 | 91.0 | | fireplace | 48.06 | 68.76 | | refrigerator | 49.93 | 74.74 | | grandstand | 36.04 | 56.43 | | path | 15.67 | 31.16 | | stairs | 28.61 | 47.15 | | runway | 68.01 | 90.47 | | case | 41.56 | 60.54 | | pool table | 87.06 | 92.32 | | pillow | 45.24 | 62.51 | | screen door | 2.19 | 2.2 | | stairway | 27.96 | 33.31 | | river | 1.22 | 1.27 | | bridge | 41.44 | 56.9 | | bookcase | 17.78 | 54.73 | | blind | 12.5 | 12.95 | | coffee table | 44.86 | 74.56 | | toilet | 79.43 | 83.2 | | flower | 23.59 | 65.07 | | book | 22.36 | 76.16 | | hill | 0.0 | 0.0 | | bench | 36.38 | 44.06 | | countertop | 35.64 | 47.13 | | stove | 43.09 | 51.78 | | palm | 43.52 | 56.52 | | kitchen island | 10.31 | 73.18 | | computer | 57.38 | 78.26 | | swivel chair | 29.29 | 37.22 | | boat | 36.11 | 38.66 | | bar | 13.09 | 26.6 | | arcade machine | 11.53 | 13.38 | | hovel | 2.62 | 2.72 | | bus | 78.07 | 84.72 | | towel | 44.45 | 50.63 | | light | 33.34 | 52.61 | | truck | 25.03 | 50.26 | | tower | 35.53 | 64.91 | | chandelier | 36.56 | 82.55 | | awning | 21.79 | 31.87 | | streetlight | 12.65 | 33.78 | | booth | 20.17 | 23.39 | | television receiver | 27.27 | 80.42 | | airplane | 35.89 | 66.69 | | dirt track | 2.76 | 8.97 | | apparel | 19.85 | 31.25 | | pole | 1.08 | 1.1 | | land | 11.99 | 12.39 | | bannister | 0.0 | 0.0 | | escalator | 9.15 | 10.68 | | ottoman | 28.99 | 49.46 | | bottle | 33.92 | 61.65 | | buffet | 22.71 | 81.98 | | poster | 0.75 | 0.79 | | stage | 0.93 | 1.13 | | van | 26.94 | 28.43 | | ship | 20.58 | 23.16 | | fountain | 0.01 | 0.01 | | conveyer belt | 46.65 | 55.14 | | canopy | 0.24 | 0.28 | | washer | 58.68 | 60.23 | | plaything | 8.26 | 9.87 | | swimming pool | 31.8 | 36.78 | | stool | 1.74 | 1.75 | | barrel | 30.28 | 63.83 | | basket | 17.44 | 29.3 | | waterfall | 37.49 | 58.39 | | tent | 90.19 | 97.8 | | bag | 0.64 | 0.64 | | minibike | 48.53 | 65.07 | | cradle | 61.26 | 93.12 | | oven | 0.0 | 0.0 | | ball | 41.01 | 62.42 | | food | 48.03 | 75.22 | | step | 0.44 | 0.44 | | tank | 23.77 | 25.99 | | trade name | 11.82 | 12.16 | | microwave | 13.35 | 14.69 | | pot | 14.95 | 15.92 | | animal | 57.31 | 60.49 | | bicycle | 43.07 | 62.22 | | lake | 0.0 | 0.0 | | dishwasher | 23.11 | 64.59 | | screen | 37.24 | 86.89 | | blanket | 0.04 | 0.04 | | sculpture | 29.95 | 35.91 | | hood | 21.82 | 26.34 | | sconce | 1.64 | 1.69 | | vase | 14.87 | 33.01 | | traffic light | 14.52 | 38.02 | | tray | 1.2 | 1.26 | | ashcan | 17.09 | 26.8 | | fan | 29.99 | 63.5 | | pier | 20.97 | 71.33 | | crt screen | 0.0 | 0.0 | | plate | 31.65 | 50.21 | | monitor | 0.0 | 0.0 | | bulletin board | 21.74 | 22.7 | | shower | 0.0 | 0.0 | | radiator | 42.72 | 64.33 | | glass | 0.0 | 0.0 | | clock | 6.07 | 6.53 | | flag | 18.93 | 20.63 | +---------------------+-------+-------+ 2023-02-11 19:09:33,620 - mmseg - INFO - Summary: 2023-02-11 19:09:33,620 - mmseg - INFO - +-------+-------+-------+ | aAcc | mIoU | mAcc | +-------+-------+-------+ | 75.37 | 31.64 | 45.59 | +-------+-------+-------+ 2023-02-11 19:09:34,411 - mmseg - INFO - Now best checkpoint is saved as best_mIoU_iter_16000.pth. 2023-02-11 19:09:34,412 - mmseg - INFO - Best mIoU is 0.3164 at 16000 iter. 2023-02-11 19:09:34,412 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 19:09:34,412 - mmseg - INFO - Iter(val) [250] aAcc: 0.7537, mIoU: 0.3164, mAcc: 0.4559, IoU.wall: 0.6946, IoU.building: 0.7768, IoU.sky: 0.9309, IoU.floor: 0.7640, IoU.tree: 0.6482, IoU.ceiling: 0.7711, IoU.road: 0.7633, IoU.bed : 0.8047, IoU.windowpane: 0.5253, IoU.grass: 0.5645, IoU.cabinet: 0.3825, IoU.sidewalk: 0.5868, IoU.person: 0.7254, IoU.earth: 0.1546, IoU.door: 0.1453, IoU.table: 0.4264, IoU.mountain: 0.5241, IoU.plant: 0.2650, IoU.curtain: 0.6268, IoU.chair: 0.3950, IoU.car: 0.7867, IoU.water: 0.4548, IoU.painting: 0.5793, IoU.sofa: 0.4842, IoU.shelf: 0.1864, IoU.house: 0.3792, IoU.sea: 0.5815, IoU.mirror: 0.4062, IoU.rug: 0.4883, IoU.field: 0.2023, IoU.armchair: 0.1623, IoU.seat: 0.4203, IoU.fence: 0.1951, IoU.desk: 0.1343, IoU.rock: 0.3393, IoU.wardrobe: 0.3593, IoU.lamp: 0.4517, IoU.bathtub: 0.6214, IoU.railing: 0.2682, IoU.cushion: 0.3869, IoU.base: 0.0104, IoU.box: 0.1089, IoU.column: 0.2302, IoU.signboard: 0.2649, IoU.chest of drawers: 0.2335, IoU.counter: 0.1183, IoU.sand: 0.3016, IoU.sink: 0.4820, IoU.skyscraper: 0.5103, IoU.fireplace: 0.4806, IoU.refrigerator: 0.4993, IoU.grandstand: 0.3604, IoU.path: 0.1567, IoU.stairs: 0.2861, IoU.runway: 0.6801, IoU.case: 0.4156, IoU.pool table: 0.8706, IoU.pillow: 0.4524, IoU.screen door: 0.0219, IoU.stairway: 0.2796, IoU.river: 0.0122, IoU.bridge: 0.4144, IoU.bookcase: 0.1778, IoU.blind: 0.1250, IoU.coffee table: 0.4486, IoU.toilet: 0.7943, IoU.flower: 0.2359, IoU.book: 0.2236, IoU.hill: 0.0000, IoU.bench: 0.3638, IoU.countertop: 0.3564, IoU.stove: 0.4309, IoU.palm: 0.4352, IoU.kitchen island: 0.1031, IoU.computer: 0.5738, IoU.swivel chair: 0.2929, IoU.boat: 0.3611, IoU.bar: 0.1309, IoU.arcade machine: 0.1153, IoU.hovel: 0.0262, IoU.bus: 0.7807, IoU.towel: 0.4445, IoU.light: 0.3334, IoU.truck: 0.2503, IoU.tower: 0.3553, IoU.chandelier: 0.3656, IoU.awning: 0.2179, IoU.streetlight: 0.1265, IoU.booth: 0.2017, IoU.television receiver: 0.2727, IoU.airplane: 0.3589, IoU.dirt track: 0.0276, IoU.apparel: 0.1985, IoU.pole: 0.0108, IoU.land: 0.1199, IoU.bannister: 0.0000, IoU.escalator: 0.0915, IoU.ottoman: 0.2899, IoU.bottle: 0.3392, IoU.buffet: 0.2271, IoU.poster: 0.0075, IoU.stage: 0.0093, IoU.van: 0.2694, IoU.ship: 0.2058, IoU.fountain: 0.0001, IoU.conveyer belt: 0.4665, IoU.canopy: 0.0024, IoU.washer: 0.5868, IoU.plaything: 0.0826, IoU.swimming pool: 0.3180, IoU.stool: 0.0174, IoU.barrel: 0.3028, IoU.basket: 0.1744, IoU.waterfall: 0.3749, IoU.tent: 0.9019, IoU.bag: 0.0064, IoU.minibike: 0.4853, IoU.cradle: 0.6126, IoU.oven: 0.0000, IoU.ball: 0.4101, IoU.food: 0.4803, IoU.step: 0.0044, IoU.tank: 0.2377, IoU.trade name: 0.1182, IoU.microwave: 0.1335, IoU.pot: 0.1495, IoU.animal: 0.5731, IoU.bicycle: 0.4307, IoU.lake: 0.0000, IoU.dishwasher: 0.2311, IoU.screen: 0.3724, IoU.blanket: 0.0004, IoU.sculpture: 0.2995, IoU.hood: 0.2182, IoU.sconce: 0.0164, IoU.vase: 0.1487, IoU.traffic light: 0.1452, IoU.tray: 0.0120, IoU.ashcan: 0.1709, IoU.fan: 0.2999, IoU.pier: 0.2097, IoU.crt screen: 0.0000, IoU.plate: 0.3165, IoU.monitor: 0.0000, IoU.bulletin board: 0.2174, IoU.shower: 0.0000, IoU.radiator: 0.4272, IoU.glass: 0.0000, IoU.clock: 0.0607, IoU.flag: 0.1893, Acc.wall: 0.8532, Acc.building: 0.8569, Acc.sky: 0.9635, Acc.floor: 0.8698, Acc.tree: 0.9073, Acc.ceiling: 0.8833, Acc.road: 0.8238, Acc.bed : 0.9254, Acc.windowpane: 0.7041, Acc.grass: 0.8159, Acc.cabinet: 0.4204, Acc.sidewalk: 0.8137, Acc.person: 0.8301, Acc.earth: 0.1697, Acc.door: 0.1510, Acc.table: 0.6603, Acc.mountain: 0.8222, Acc.plant: 0.2859, Acc.curtain: 0.8052, Acc.chair: 0.7237, Acc.car: 0.8458, Acc.water: 0.8224, Acc.painting: 0.8444, Acc.sofa: 0.7188, Acc.shelf: 0.2091, Acc.house: 0.5011, Acc.sea: 0.8580, Acc.mirror: 0.4719, Acc.rug: 0.5658, Acc.field: 0.5442, Acc.armchair: 0.2082, Acc.seat: 0.6376, Acc.fence: 0.2130, Acc.desk: 0.1793, Acc.rock: 0.4530, Acc.wardrobe: 0.4301, Acc.lamp: 0.6245, Acc.bathtub: 0.8081, Acc.railing: 0.4692, Acc.cushion: 0.5319, Acc.base: 0.0104, Acc.box: 0.1270, Acc.column: 0.2429, Acc.signboard: 0.3415, Acc.chest of drawers: 0.7187, Acc.counter: 0.1586, Acc.sand: 0.5112, Acc.sink: 0.5132, Acc.skyscraper: 0.9100, Acc.fireplace: 0.6876, Acc.refrigerator: 0.7474, Acc.grandstand: 0.5643, Acc.path: 0.3116, Acc.stairs: 0.4715, Acc.runway: 0.9047, Acc.case: 0.6054, Acc.pool table: 0.9232, Acc.pillow: 0.6251, Acc.screen door: 0.0220, Acc.stairway: 0.3331, Acc.river: 0.0127, Acc.bridge: 0.5690, Acc.bookcase: 0.5473, Acc.blind: 0.1295, Acc.coffee table: 0.7456, Acc.toilet: 0.8320, Acc.flower: 0.6507, Acc.book: 0.7616, Acc.hill: 0.0000, Acc.bench: 0.4406, Acc.countertop: 0.4713, Acc.stove: 0.5178, Acc.palm: 0.5652, Acc.kitchen island: 0.7318, Acc.computer: 0.7826, Acc.swivel chair: 0.3722, Acc.boat: 0.3866, Acc.bar: 0.2660, Acc.arcade machine: 0.1338, Acc.hovel: 0.0272, Acc.bus: 0.8472, Acc.towel: 0.5063, Acc.light: 0.5261, Acc.truck: 0.5026, Acc.tower: 0.6491, Acc.chandelier: 0.8255, Acc.awning: 0.3187, Acc.streetlight: 0.3378, Acc.booth: 0.2339, Acc.television receiver: 0.8042, Acc.airplane: 0.6669, Acc.dirt track: 0.0897, Acc.apparel: 0.3125, Acc.pole: 0.0110, Acc.land: 0.1239, Acc.bannister: 0.0000, Acc.escalator: 0.1068, Acc.ottoman: 0.4946, Acc.bottle: 0.6165, Acc.buffet: 0.8198, Acc.poster: 0.0079, Acc.stage: 0.0113, Acc.van: 0.2843, Acc.ship: 0.2316, Acc.fountain: 0.0001, Acc.conveyer belt: 0.5514, Acc.canopy: 0.0028, Acc.washer: 0.6023, Acc.plaything: 0.0987, Acc.swimming pool: 0.3678, Acc.stool: 0.0175, Acc.barrel: 0.6383, Acc.basket: 0.2930, Acc.waterfall: 0.5839, Acc.tent: 0.9780, Acc.bag: 0.0064, Acc.minibike: 0.6507, Acc.cradle: 0.9312, Acc.oven: 0.0000, Acc.ball: 0.6242, Acc.food: 0.7522, Acc.step: 0.0044, Acc.tank: 0.2599, Acc.trade name: 0.1216, Acc.microwave: 0.1469, Acc.pot: 0.1592, Acc.animal: 0.6049, Acc.bicycle: 0.6222, Acc.lake: 0.0000, Acc.dishwasher: 0.6459, Acc.screen: 0.8689, Acc.blanket: 0.0004, Acc.sculpture: 0.3591, Acc.hood: 0.2634, Acc.sconce: 0.0169, Acc.vase: 0.3301, Acc.traffic light: 0.3802, Acc.tray: 0.0126, Acc.ashcan: 0.2680, Acc.fan: 0.6350, Acc.pier: 0.7133, Acc.crt screen: 0.0000, Acc.plate: 0.5021, Acc.monitor: 0.0000, Acc.bulletin board: 0.2270, Acc.shower: 0.0000, Acc.radiator: 0.6433, Acc.glass: 0.0000, Acc.clock: 0.0653, Acc.flag: 0.2063 2023-02-11 19:09:44,714 - mmseg - INFO - Iter [16050/160000] lr: 5.398e-05, eta: 4 days, 3:41:56, time: 0.578, data_time: 0.376, memory: 7748, decode.loss_ce: 0.6460, decode.acc_seg: 76.3935, aux.loss_ce: 0.3051, aux.acc_seg: 73.0418, loss: 0.9510, grad_norm: 7.5155 2023-02-11 19:09:54,565 - mmseg - INFO - Iter [16100/160000] lr: 5.396e-05, eta: 2 days, 6:13:28, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6499, decode.acc_seg: 76.4138, aux.loss_ce: 0.3069, aux.acc_seg: 72.9016, loss: 0.9567, grad_norm: 8.5968 2023-02-11 19:10:04,464 - mmseg - INFO - Iter [16150/160000] lr: 5.394e-05, eta: 1 day, 14:52:34, time: 0.198, data_time: 0.003, memory: 7748, decode.loss_ce: 0.6721, decode.acc_seg: 75.9219, aux.loss_ce: 0.3174, aux.acc_seg: 72.2587, loss: 0.9895, grad_norm: 7.2444 2023-02-11 19:10:14,367 - mmseg - INFO - Iter [16200/160000] lr: 5.393e-05, eta: 1 day, 7:09:47, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6663, decode.acc_seg: 76.0571, aux.loss_ce: 0.3216, aux.acc_seg: 72.1466, loss: 0.9879, grad_norm: 8.0002 2023-02-11 19:10:24,000 - mmseg - INFO - Iter [16250/160000] lr: 5.391e-05, eta: 1 day, 2:28:45, time: 0.193, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6394, decode.acc_seg: 76.5290, aux.loss_ce: 0.3032, aux.acc_seg: 72.8109, loss: 0.9427, grad_norm: 7.7145 2023-02-11 19:10:34,212 - mmseg - INFO - Iter [16300/160000] lr: 5.389e-05, eta: 23:25:27, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6381, decode.acc_seg: 77.1623, aux.loss_ce: 0.3050, aux.acc_seg: 73.2053, loss: 0.9431, grad_norm: 6.7610 2023-02-11 19:10:44,085 - mmseg - INFO - Iter [16350/160000] lr: 5.387e-05, eta: 21:12:19, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6380, decode.acc_seg: 76.9988, aux.loss_ce: 0.3047, aux.acc_seg: 72.8703, loss: 0.9427, grad_norm: 6.9682 2023-02-11 19:10:54,538 - mmseg - INFO - Iter [16400/160000] lr: 5.385e-05, eta: 19:35:32, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6681, decode.acc_seg: 75.5528, aux.loss_ce: 0.3182, aux.acc_seg: 71.8990, loss: 0.9862, grad_norm: 7.5236 2023-02-11 19:11:04,565 - mmseg - INFO - Iter [16450/160000] lr: 5.383e-05, eta: 18:18:09, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6425, decode.acc_seg: 76.6421, aux.loss_ce: 0.3078, aux.acc_seg: 72.1924, loss: 0.9503, grad_norm: 7.0980 2023-02-11 19:11:14,849 - mmseg - INFO - Iter [16500/160000] lr: 5.381e-05, eta: 17:17:18, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6112, decode.acc_seg: 77.8580, aux.loss_ce: 0.2999, aux.acc_seg: 73.7524, loss: 0.9110, grad_norm: 6.8611 2023-02-11 19:11:24,851 - mmseg - INFO - Iter [16550/160000] lr: 5.379e-05, eta: 16:26:14, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6379, decode.acc_seg: 76.8769, aux.loss_ce: 0.3041, aux.acc_seg: 73.3450, loss: 0.9420, grad_norm: 9.3947 2023-02-11 19:11:34,680 - mmseg - INFO - Iter [16600/160000] lr: 5.378e-05, eta: 15:42:53, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6349, decode.acc_seg: 76.8340, aux.loss_ce: 0.3023, aux.acc_seg: 73.1561, loss: 0.9372, grad_norm: 6.9906 2023-02-11 19:11:44,981 - mmseg - INFO - Iter [16650/160000] lr: 5.376e-05, eta: 15:07:57, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6308, decode.acc_seg: 76.6636, aux.loss_ce: 0.3008, aux.acc_seg: 73.1229, loss: 0.9316, grad_norm: 7.4062 2023-02-11 19:11:54,655 - mmseg - INFO - Iter [16700/160000] lr: 5.374e-05, eta: 14:35:56, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6882, decode.acc_seg: 75.8882, aux.loss_ce: 0.3281, aux.acc_seg: 71.6664, loss: 1.0163, grad_norm: 8.2001 2023-02-11 19:12:04,631 - mmseg - INFO - Iter [16750/160000] lr: 5.372e-05, eta: 14:09:03, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6452, decode.acc_seg: 76.7812, aux.loss_ce: 0.3081, aux.acc_seg: 72.8899, loss: 0.9534, grad_norm: 6.8524 2023-02-11 19:12:15,009 - mmseg - INFO - Iter [16800/160000] lr: 5.370e-05, eta: 13:46:41, time: 0.208, data_time: 0.003, memory: 7748, decode.loss_ce: 0.6719, decode.acc_seg: 76.2892, aux.loss_ce: 0.3225, aux.acc_seg: 71.7810, loss: 0.9943, grad_norm: 8.1069 2023-02-11 19:12:25,029 - mmseg - INFO - Iter [16850/160000] lr: 5.368e-05, eta: 13:25:56, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6622, decode.acc_seg: 76.3446, aux.loss_ce: 0.3207, aux.acc_seg: 71.8895, loss: 0.9829, grad_norm: 8.0384 2023-02-11 19:12:35,054 - mmseg - INFO - Iter [16900/160000] lr: 5.366e-05, eta: 13:07:29, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6329, decode.acc_seg: 77.3532, aux.loss_ce: 0.3016, aux.acc_seg: 73.2856, loss: 0.9346, grad_norm: 6.8453 2023-02-11 19:12:45,729 - mmseg - INFO - Iter [16950/160000] lr: 5.364e-05, eta: 12:52:35, time: 0.214, data_time: 0.005, memory: 7748, decode.loss_ce: 0.6396, decode.acc_seg: 76.1552, aux.loss_ce: 0.3035, aux.acc_seg: 72.8840, loss: 0.9431, grad_norm: 7.5689 2023-02-11 19:12:56,950 - mmseg - INFO - Saving checkpoint at 17000 iterations 2023-02-11 19:12:57,666 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 19:12:57,666 - mmseg - INFO - Iter [17000/160000] lr: 5.363e-05, eta: 12:42:09, time: 0.239, data_time: 0.005, memory: 7748, decode.loss_ce: 0.6307, decode.acc_seg: 77.3327, aux.loss_ce: 0.3023, aux.acc_seg: 73.2005, loss: 0.9330, grad_norm: 6.8988 2023-02-11 19:13:07,798 - mmseg - INFO - Iter [17050/160000] lr: 5.361e-05, eta: 12:28:36, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6341, decode.acc_seg: 77.4148, aux.loss_ce: 0.3060, aux.acc_seg: 73.2938, loss: 0.9401, grad_norm: 7.2472 2023-02-11 19:13:17,502 - mmseg - INFO - Iter [17100/160000] lr: 5.359e-05, eta: 12:15:21, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6289, decode.acc_seg: 77.4945, aux.loss_ce: 0.2999, aux.acc_seg: 73.2634, loss: 0.9288, grad_norm: 6.9944 2023-02-11 19:13:27,724 - mmseg - INFO - Iter [17150/160000] lr: 5.357e-05, eta: 12:04:18, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6513, decode.acc_seg: 76.9977, aux.loss_ce: 0.3164, aux.acc_seg: 72.6201, loss: 0.9678, grad_norm: 7.3533 2023-02-11 19:13:37,835 - mmseg - INFO - Iter [17200/160000] lr: 5.355e-05, eta: 11:53:57, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6371, decode.acc_seg: 77.0102, aux.loss_ce: 0.3100, aux.acc_seg: 72.6692, loss: 0.9472, grad_norm: 7.8890 2023-02-11 19:13:47,762 - mmseg - INFO - Iter [17250/160000] lr: 5.353e-05, eta: 11:44:03, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6802, decode.acc_seg: 76.1839, aux.loss_ce: 0.3205, aux.acc_seg: 72.0318, loss: 1.0007, grad_norm: 8.2504 2023-02-11 19:13:59,650 - mmseg - INFO - Iter [17300/160000] lr: 5.351e-05, eta: 11:38:29, time: 0.238, data_time: 0.047, memory: 7748, decode.loss_ce: 0.6089, decode.acc_seg: 78.3392, aux.loss_ce: 0.2956, aux.acc_seg: 74.5018, loss: 0.9045, grad_norm: 7.0343 2023-02-11 19:14:09,337 - mmseg - INFO - Iter [17350/160000] lr: 5.349e-05, eta: 11:29:26, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6466, decode.acc_seg: 76.4942, aux.loss_ce: 0.3110, aux.acc_seg: 72.2502, loss: 0.9576, grad_norm: 7.8447 2023-02-11 19:14:19,653 - mmseg - INFO - Iter [17400/160000] lr: 5.348e-05, eta: 11:22:06, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6097, decode.acc_seg: 77.7860, aux.loss_ce: 0.2927, aux.acc_seg: 73.4567, loss: 0.9024, grad_norm: 6.4105 2023-02-11 19:14:30,312 - mmseg - INFO - Iter [17450/160000] lr: 5.346e-05, eta: 11:15:47, time: 0.213, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6360, decode.acc_seg: 77.0502, aux.loss_ce: 0.3048, aux.acc_seg: 73.1672, loss: 0.9408, grad_norm: 8.1984 2023-02-11 19:14:40,130 - mmseg - INFO - Iter [17500/160000] lr: 5.344e-05, eta: 11:08:34, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6729, decode.acc_seg: 76.2970, aux.loss_ce: 0.3312, aux.acc_seg: 71.6282, loss: 1.0041, grad_norm: 7.8338 2023-02-11 19:14:51,057 - mmseg - INFO - Iter [17550/160000] lr: 5.342e-05, eta: 11:03:31, time: 0.219, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6444, decode.acc_seg: 76.8733, aux.loss_ce: 0.3105, aux.acc_seg: 73.0246, loss: 0.9550, grad_norm: 7.8612 2023-02-11 19:15:01,026 - mmseg - INFO - Iter [17600/160000] lr: 5.340e-05, eta: 10:57:23, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6618, decode.acc_seg: 75.4238, aux.loss_ce: 0.3118, aux.acc_seg: 71.7819, loss: 0.9736, grad_norm: 7.9818 2023-02-11 19:15:10,960 - mmseg - INFO - Iter [17650/160000] lr: 5.338e-05, eta: 10:51:32, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6487, decode.acc_seg: 76.7444, aux.loss_ce: 0.3048, aux.acc_seg: 73.2329, loss: 0.9536, grad_norm: 7.4266 2023-02-11 19:15:21,617 - mmseg - INFO - Iter [17700/160000] lr: 5.336e-05, eta: 10:46:59, time: 0.213, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5979, decode.acc_seg: 78.0707, aux.loss_ce: 0.2931, aux.acc_seg: 73.7763, loss: 0.8910, grad_norm: 6.9365 2023-02-11 19:15:31,474 - mmseg - INFO - Iter [17750/160000] lr: 5.334e-05, eta: 10:41:40, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6266, decode.acc_seg: 77.0122, aux.loss_ce: 0.3009, aux.acc_seg: 73.1069, loss: 0.9275, grad_norm: 7.6764 2023-02-11 19:15:41,363 - mmseg - INFO - Iter [17800/160000] lr: 5.333e-05, eta: 10:36:38, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6030, decode.acc_seg: 77.7391, aux.loss_ce: 0.2946, aux.acc_seg: 73.5147, loss: 0.8976, grad_norm: 7.8925 2023-02-11 19:15:51,505 - mmseg - INFO - Iter [17850/160000] lr: 5.331e-05, eta: 10:32:12, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6124, decode.acc_seg: 77.4793, aux.loss_ce: 0.2998, aux.acc_seg: 73.2598, loss: 0.9122, grad_norm: 7.9056 2023-02-11 19:16:01,323 - mmseg - INFO - Iter [17900/160000] lr: 5.329e-05, eta: 10:27:36, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6352, decode.acc_seg: 77.2703, aux.loss_ce: 0.3060, aux.acc_seg: 72.7318, loss: 0.9412, grad_norm: 8.0380 2023-02-11 19:16:11,276 - mmseg - INFO - Iter [17950/160000] lr: 5.327e-05, eta: 10:23:22, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6315, decode.acc_seg: 76.9533, aux.loss_ce: 0.3102, aux.acc_seg: 72.3593, loss: 0.9417, grad_norm: 8.2122 2023-02-11 19:16:21,309 - mmseg - INFO - Saving checkpoint at 18000 iterations 2023-02-11 19:16:22,004 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 19:16:22,004 - mmseg - INFO - Iter [18000/160000] lr: 5.325e-05, eta: 10:20:16, time: 0.215, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6389, decode.acc_seg: 76.8357, aux.loss_ce: 0.3032, aux.acc_seg: 73.2277, loss: 0.9421, grad_norm: 7.2192 2023-02-11 19:16:32,095 - mmseg - INFO - Iter [18050/160000] lr: 5.323e-05, eta: 10:16:34, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6186, decode.acc_seg: 77.9409, aux.loss_ce: 0.3042, aux.acc_seg: 73.7147, loss: 0.9228, grad_norm: 6.6738 2023-02-11 19:16:42,330 - mmseg - INFO - Iter [18100/160000] lr: 5.321e-05, eta: 10:13:11, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5706, decode.acc_seg: 78.6022, aux.loss_ce: 0.2809, aux.acc_seg: 74.4108, loss: 0.8515, grad_norm: 5.4902 2023-02-11 19:16:52,611 - mmseg - INFO - Iter [18150/160000] lr: 5.319e-05, eta: 10:10:01, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6720, decode.acc_seg: 76.7195, aux.loss_ce: 0.3226, aux.acc_seg: 72.2046, loss: 0.9946, grad_norm: 7.6140 2023-02-11 19:17:02,497 - mmseg - INFO - Iter [18200/160000] lr: 5.318e-05, eta: 10:06:35, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6253, decode.acc_seg: 77.1455, aux.loss_ce: 0.3108, aux.acc_seg: 72.1775, loss: 0.9361, grad_norm: 7.2665 2023-02-11 19:17:12,791 - mmseg - INFO - Iter [18250/160000] lr: 5.316e-05, eta: 10:03:43, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6150, decode.acc_seg: 77.5245, aux.loss_ce: 0.3027, aux.acc_seg: 73.1659, loss: 0.9176, grad_norm: 7.8868 2023-02-11 19:17:22,683 - mmseg - INFO - Iter [18300/160000] lr: 5.314e-05, eta: 10:00:32, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6240, decode.acc_seg: 77.0620, aux.loss_ce: 0.3017, aux.acc_seg: 72.6774, loss: 0.9257, grad_norm: 7.6858 2023-02-11 19:17:32,591 - mmseg - INFO - Iter [18350/160000] lr: 5.312e-05, eta: 9:57:30, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6156, decode.acc_seg: 78.1860, aux.loss_ce: 0.2946, aux.acc_seg: 74.3572, loss: 0.9102, grad_norm: 6.1639 2023-02-11 19:17:42,571 - mmseg - INFO - Iter [18400/160000] lr: 5.310e-05, eta: 9:54:40, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6102, decode.acc_seg: 78.4601, aux.loss_ce: 0.2906, aux.acc_seg: 74.8514, loss: 0.9008, grad_norm: 7.9407 2023-02-11 19:17:52,732 - mmseg - INFO - Iter [18450/160000] lr: 5.308e-05, eta: 9:52:06, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6554, decode.acc_seg: 76.1991, aux.loss_ce: 0.3179, aux.acc_seg: 71.2716, loss: 0.9732, grad_norm: 7.6321 2023-02-11 19:18:02,932 - mmseg - INFO - Iter [18500/160000] lr: 5.306e-05, eta: 9:49:41, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6319, decode.acc_seg: 77.5252, aux.loss_ce: 0.3093, aux.acc_seg: 72.7976, loss: 0.9411, grad_norm: 7.8966 2023-02-11 19:18:14,842 - mmseg - INFO - Iter [18550/160000] lr: 5.304e-05, eta: 9:48:56, time: 0.238, data_time: 0.048, memory: 7748, decode.loss_ce: 0.5939, decode.acc_seg: 78.5077, aux.loss_ce: 0.2898, aux.acc_seg: 74.3487, loss: 0.8837, grad_norm: 6.5441 2023-02-11 19:18:24,893 - mmseg - INFO - Iter [18600/160000] lr: 5.303e-05, eta: 9:46:31, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6055, decode.acc_seg: 78.1249, aux.loss_ce: 0.2962, aux.acc_seg: 73.9175, loss: 0.9017, grad_norm: 7.1398 2023-02-11 19:18:35,112 - mmseg - INFO - Iter [18650/160000] lr: 5.301e-05, eta: 9:44:19, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6018, decode.acc_seg: 77.9809, aux.loss_ce: 0.3029, aux.acc_seg: 73.2595, loss: 0.9047, grad_norm: 6.4013 2023-02-11 19:18:45,452 - mmseg - INFO - Iter [18700/160000] lr: 5.299e-05, eta: 9:42:19, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6102, decode.acc_seg: 78.0714, aux.loss_ce: 0.2995, aux.acc_seg: 73.6275, loss: 0.9096, grad_norm: 8.3976 2023-02-11 19:18:55,685 - mmseg - INFO - Iter [18750/160000] lr: 5.297e-05, eta: 9:40:17, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6002, decode.acc_seg: 78.6370, aux.loss_ce: 0.2915, aux.acc_seg: 73.9149, loss: 0.8916, grad_norm: 6.8211 2023-02-11 19:19:06,469 - mmseg - INFO - Iter [18800/160000] lr: 5.295e-05, eta: 9:38:46, time: 0.215, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6294, decode.acc_seg: 77.4108, aux.loss_ce: 0.3054, aux.acc_seg: 73.5907, loss: 0.9348, grad_norm: 6.8805 2023-02-11 19:19:16,738 - mmseg - INFO - Iter [18850/160000] lr: 5.293e-05, eta: 9:36:55, time: 0.206, data_time: 0.005, memory: 7748, decode.loss_ce: 0.6022, decode.acc_seg: 77.5259, aux.loss_ce: 0.2921, aux.acc_seg: 73.6297, loss: 0.8943, grad_norm: 6.5247 2023-02-11 19:19:26,834 - mmseg - INFO - Iter [18900/160000] lr: 5.291e-05, eta: 9:34:57, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6060, decode.acc_seg: 77.9033, aux.loss_ce: 0.2952, aux.acc_seg: 74.0592, loss: 0.9012, grad_norm: 7.5156 2023-02-11 19:19:36,921 - mmseg - INFO - Iter [18950/160000] lr: 5.289e-05, eta: 9:33:01, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6255, decode.acc_seg: 77.3172, aux.loss_ce: 0.3000, aux.acc_seg: 73.2132, loss: 0.9255, grad_norm: 8.1054 2023-02-11 19:19:46,814 - mmseg - INFO - Saving checkpoint at 19000 iterations 2023-02-11 19:19:47,502 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 19:19:47,502 - mmseg - INFO - Iter [19000/160000] lr: 5.288e-05, eta: 9:31:35, time: 0.212, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6405, decode.acc_seg: 77.0622, aux.loss_ce: 0.3073, aux.acc_seg: 72.8784, loss: 0.9478, grad_norm: 7.8165 2023-02-11 19:19:57,330 - mmseg - INFO - Iter [19050/160000] lr: 5.286e-05, eta: 9:29:35, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5743, decode.acc_seg: 78.9237, aux.loss_ce: 0.2848, aux.acc_seg: 74.3440, loss: 0.8591, grad_norm: 6.5606 2023-02-11 19:20:07,450 - mmseg - INFO - Iter [19100/160000] lr: 5.284e-05, eta: 9:27:52, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6171, decode.acc_seg: 77.5636, aux.loss_ce: 0.2968, aux.acc_seg: 73.2083, loss: 0.9139, grad_norm: 7.2369 2023-02-11 19:20:17,212 - mmseg - INFO - Iter [19150/160000] lr: 5.282e-05, eta: 9:25:55, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6169, decode.acc_seg: 77.1463, aux.loss_ce: 0.3029, aux.acc_seg: 72.9549, loss: 0.9199, grad_norm: 6.9599 2023-02-11 19:20:27,599 - mmseg - INFO - Iter [19200/160000] lr: 5.280e-05, eta: 9:24:30, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6150, decode.acc_seg: 77.6902, aux.loss_ce: 0.3033, aux.acc_seg: 73.2881, loss: 0.9183, grad_norm: 7.5636 2023-02-11 19:20:37,979 - mmseg - INFO - Iter [19250/160000] lr: 5.278e-05, eta: 9:23:06, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6014, decode.acc_seg: 77.6905, aux.loss_ce: 0.2914, aux.acc_seg: 73.5760, loss: 0.8928, grad_norm: 6.8703 2023-02-11 19:20:47,894 - mmseg - INFO - Iter [19300/160000] lr: 5.276e-05, eta: 9:21:26, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6046, decode.acc_seg: 77.9023, aux.loss_ce: 0.2945, aux.acc_seg: 73.6375, loss: 0.8991, grad_norm: 7.3328 2023-02-11 19:20:58,292 - mmseg - INFO - Iter [19350/160000] lr: 5.274e-05, eta: 9:20:08, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5994, decode.acc_seg: 77.8364, aux.loss_ce: 0.2913, aux.acc_seg: 73.0054, loss: 0.8907, grad_norm: 7.0830 2023-02-11 19:21:08,042 - mmseg - INFO - Iter [19400/160000] lr: 5.273e-05, eta: 9:18:25, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6083, decode.acc_seg: 77.6485, aux.loss_ce: 0.3009, aux.acc_seg: 73.3795, loss: 0.9092, grad_norm: 7.0837 2023-02-11 19:21:18,115 - mmseg - INFO - Iter [19450/160000] lr: 5.271e-05, eta: 9:16:58, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6033, decode.acc_seg: 77.9459, aux.loss_ce: 0.2952, aux.acc_seg: 73.8842, loss: 0.8984, grad_norm: 7.3193 2023-02-11 19:21:28,271 - mmseg - INFO - Iter [19500/160000] lr: 5.269e-05, eta: 9:15:37, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6000, decode.acc_seg: 77.7178, aux.loss_ce: 0.2920, aux.acc_seg: 73.5155, loss: 0.8921, grad_norm: 7.1859 2023-02-11 19:21:38,028 - mmseg - INFO - Iter [19550/160000] lr: 5.267e-05, eta: 9:14:01, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6340, decode.acc_seg: 77.5540, aux.loss_ce: 0.3025, aux.acc_seg: 73.4355, loss: 0.9366, grad_norm: 7.7407 2023-02-11 19:21:47,896 - mmseg - INFO - Iter [19600/160000] lr: 5.265e-05, eta: 9:12:33, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5745, decode.acc_seg: 78.4491, aux.loss_ce: 0.2805, aux.acc_seg: 74.0161, loss: 0.8550, grad_norm: 7.9413 2023-02-11 19:21:57,988 - mmseg - INFO - Iter [19650/160000] lr: 5.263e-05, eta: 9:11:15, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6098, decode.acc_seg: 77.9812, aux.loss_ce: 0.2932, aux.acc_seg: 73.7529, loss: 0.9030, grad_norm: 6.8676 2023-02-11 19:22:08,276 - mmseg - INFO - Iter [19700/160000] lr: 5.261e-05, eta: 9:10:07, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6033, decode.acc_seg: 77.9790, aux.loss_ce: 0.2914, aux.acc_seg: 73.6734, loss: 0.8946, grad_norm: 6.9431 2023-02-11 19:22:18,578 - mmseg - INFO - Iter [19750/160000] lr: 5.259e-05, eta: 9:09:00, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6114, decode.acc_seg: 77.8718, aux.loss_ce: 0.3069, aux.acc_seg: 72.6848, loss: 0.9182, grad_norm: 7.7937 2023-02-11 19:22:31,230 - mmseg - INFO - Iter [19800/160000] lr: 5.258e-05, eta: 9:09:22, time: 0.253, data_time: 0.046, memory: 7748, decode.loss_ce: 0.5882, decode.acc_seg: 78.1934, aux.loss_ce: 0.2840, aux.acc_seg: 74.0939, loss: 0.8722, grad_norm: 7.4310 2023-02-11 19:22:41,217 - mmseg - INFO - Iter [19850/160000] lr: 5.256e-05, eta: 9:08:06, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5627, decode.acc_seg: 78.9857, aux.loss_ce: 0.2898, aux.acc_seg: 73.6219, loss: 0.8525, grad_norm: 6.4849 2023-02-11 19:22:51,491 - mmseg - INFO - Iter [19900/160000] lr: 5.254e-05, eta: 9:07:01, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5800, decode.acc_seg: 78.5132, aux.loss_ce: 0.2881, aux.acc_seg: 74.1131, loss: 0.8682, grad_norm: 6.4769 2023-02-11 19:23:02,516 - mmseg - INFO - Iter [19950/160000] lr: 5.252e-05, eta: 9:06:25, time: 0.221, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5950, decode.acc_seg: 77.9887, aux.loss_ce: 0.2846, aux.acc_seg: 74.2495, loss: 0.8796, grad_norm: 8.0935 2023-02-11 19:23:12,718 - mmseg - INFO - Saving checkpoint at 20000 iterations 2023-02-11 19:23:13,397 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 19:23:13,397 - mmseg - INFO - Iter [20000/160000] lr: 5.250e-05, eta: 9:05:45, time: 0.218, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6116, decode.acc_seg: 77.5640, aux.loss_ce: 0.3001, aux.acc_seg: 73.0021, loss: 0.9117, grad_norm: 7.0256 2023-02-11 19:23:23,127 - mmseg - INFO - Iter [20050/160000] lr: 5.248e-05, eta: 9:04:25, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6033, decode.acc_seg: 78.5133, aux.loss_ce: 0.2957, aux.acc_seg: 73.9671, loss: 0.8990, grad_norm: 7.0440 2023-02-11 19:23:33,136 - mmseg - INFO - Iter [20100/160000] lr: 5.246e-05, eta: 9:03:17, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6117, decode.acc_seg: 78.2057, aux.loss_ce: 0.3066, aux.acc_seg: 73.3380, loss: 0.9184, grad_norm: 7.1605 2023-02-11 19:23:43,559 - mmseg - INFO - Iter [20150/160000] lr: 5.244e-05, eta: 9:02:24, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5643, decode.acc_seg: 79.7557, aux.loss_ce: 0.2830, aux.acc_seg: 75.1307, loss: 0.8473, grad_norm: 6.3157 2023-02-11 19:23:53,652 - mmseg - INFO - Iter [20200/160000] lr: 5.243e-05, eta: 9:01:21, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5768, decode.acc_seg: 78.3930, aux.loss_ce: 0.2831, aux.acc_seg: 74.1148, loss: 0.8599, grad_norm: 7.1468 2023-02-11 19:24:04,007 - mmseg - INFO - Iter [20250/160000] lr: 5.241e-05, eta: 9:00:28, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6134, decode.acc_seg: 77.5248, aux.loss_ce: 0.2982, aux.acc_seg: 73.7307, loss: 0.9116, grad_norm: 7.9287 2023-02-11 19:24:14,446 - mmseg - INFO - Iter [20300/160000] lr: 5.239e-05, eta: 8:59:39, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5952, decode.acc_seg: 78.1898, aux.loss_ce: 0.2883, aux.acc_seg: 74.1612, loss: 0.8835, grad_norm: 6.9841 2023-02-11 19:24:24,545 - mmseg - INFO - Iter [20350/160000] lr: 5.237e-05, eta: 8:58:39, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5732, decode.acc_seg: 79.0658, aux.loss_ce: 0.2854, aux.acc_seg: 74.6668, loss: 0.8586, grad_norm: 6.3402 2023-02-11 19:24:34,511 - mmseg - INFO - Iter [20400/160000] lr: 5.235e-05, eta: 8:57:37, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5957, decode.acc_seg: 78.0652, aux.loss_ce: 0.2915, aux.acc_seg: 73.5553, loss: 0.8872, grad_norm: 6.9138 2023-02-11 19:24:45,360 - mmseg - INFO - Iter [20450/160000] lr: 5.233e-05, eta: 8:57:03, time: 0.217, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6409, decode.acc_seg: 77.6139, aux.loss_ce: 0.3127, aux.acc_seg: 72.6545, loss: 0.9536, grad_norm: 7.4404 2023-02-11 19:24:56,004 - mmseg - INFO - Iter [20500/160000] lr: 5.231e-05, eta: 8:56:23, time: 0.212, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6132, decode.acc_seg: 77.6821, aux.loss_ce: 0.2963, aux.acc_seg: 73.5321, loss: 0.9095, grad_norm: 7.4831 2023-02-11 19:25:06,122 - mmseg - INFO - Iter [20550/160000] lr: 5.229e-05, eta: 8:55:29, time: 0.203, data_time: 0.005, memory: 7748, decode.loss_ce: 0.5675, decode.acc_seg: 79.2498, aux.loss_ce: 0.2819, aux.acc_seg: 74.1263, loss: 0.8494, grad_norm: 6.9215 2023-02-11 19:25:15,854 - mmseg - INFO - Iter [20600/160000] lr: 5.228e-05, eta: 8:54:22, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5995, decode.acc_seg: 78.5828, aux.loss_ce: 0.2956, aux.acc_seg: 74.1710, loss: 0.8951, grad_norm: 7.0883 2023-02-11 19:25:25,846 - mmseg - INFO - Iter [20650/160000] lr: 5.226e-05, eta: 8:53:26, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5948, decode.acc_seg: 78.7664, aux.loss_ce: 0.2986, aux.acc_seg: 73.5962, loss: 0.8935, grad_norm: 6.4250 2023-02-11 19:25:35,849 - mmseg - INFO - Iter [20700/160000] lr: 5.224e-05, eta: 8:52:31, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5892, decode.acc_seg: 78.7963, aux.loss_ce: 0.2900, aux.acc_seg: 74.3161, loss: 0.8792, grad_norm: 6.3005 2023-02-11 19:25:45,868 - mmseg - INFO - Iter [20750/160000] lr: 5.222e-05, eta: 8:51:37, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6139, decode.acc_seg: 77.3065, aux.loss_ce: 0.2990, aux.acc_seg: 72.3650, loss: 0.9130, grad_norm: 7.4010 2023-02-11 19:25:56,442 - mmseg - INFO - Iter [20800/160000] lr: 5.220e-05, eta: 8:51:00, time: 0.211, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5670, decode.acc_seg: 79.0938, aux.loss_ce: 0.2872, aux.acc_seg: 74.3299, loss: 0.8543, grad_norm: 7.0998 2023-02-11 19:26:06,603 - mmseg - INFO - Iter [20850/160000] lr: 5.218e-05, eta: 8:50:12, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6101, decode.acc_seg: 78.4781, aux.loss_ce: 0.2958, aux.acc_seg: 74.1554, loss: 0.9058, grad_norm: 6.9332 2023-02-11 19:26:16,590 - mmseg - INFO - Iter [20900/160000] lr: 5.216e-05, eta: 8:49:19, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5812, decode.acc_seg: 78.8513, aux.loss_ce: 0.2884, aux.acc_seg: 74.2085, loss: 0.8696, grad_norm: 7.5585 2023-02-11 19:26:26,800 - mmseg - INFO - Iter [20950/160000] lr: 5.214e-05, eta: 8:48:34, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6042, decode.acc_seg: 77.3436, aux.loss_ce: 0.2990, aux.acc_seg: 72.5854, loss: 0.9032, grad_norm: 7.3870 2023-02-11 19:26:36,861 - mmseg - INFO - Saving checkpoint at 21000 iterations 2023-02-11 19:26:37,553 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 19:26:37,554 - mmseg - INFO - Iter [21000/160000] lr: 5.213e-05, eta: 8:48:04, time: 0.215, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5988, decode.acc_seg: 78.3457, aux.loss_ce: 0.2973, aux.acc_seg: 73.5135, loss: 0.8960, grad_norm: 7.9171 2023-02-11 19:26:47,996 - mmseg - INFO - Iter [21050/160000] lr: 5.211e-05, eta: 8:47:27, time: 0.209, data_time: 0.003, memory: 7748, decode.loss_ce: 0.6125, decode.acc_seg: 78.5398, aux.loss_ce: 0.3078, aux.acc_seg: 73.5603, loss: 0.9203, grad_norm: 6.8525 2023-02-11 19:27:00,436 - mmseg - INFO - Iter [21100/160000] lr: 5.209e-05, eta: 8:47:44, time: 0.249, data_time: 0.047, memory: 7748, decode.loss_ce: 0.5870, decode.acc_seg: 78.2713, aux.loss_ce: 0.2952, aux.acc_seg: 73.2061, loss: 0.8822, grad_norm: 7.4432 2023-02-11 19:27:10,748 - mmseg - INFO - Iter [21150/160000] lr: 5.207e-05, eta: 8:47:03, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6090, decode.acc_seg: 78.4059, aux.loss_ce: 0.2966, aux.acc_seg: 73.6071, loss: 0.9056, grad_norm: 7.9466 2023-02-11 19:27:20,695 - mmseg - INFO - Iter [21200/160000] lr: 5.205e-05, eta: 8:46:13, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5557, decode.acc_seg: 79.7343, aux.loss_ce: 0.2799, aux.acc_seg: 74.4836, loss: 0.8356, grad_norm: 7.7211 2023-02-11 19:27:30,637 - mmseg - INFO - Iter [21250/160000] lr: 5.203e-05, eta: 8:45:24, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5760, decode.acc_seg: 78.8786, aux.loss_ce: 0.2853, aux.acc_seg: 73.9427, loss: 0.8613, grad_norm: 6.1369 2023-02-11 19:27:41,212 - mmseg - INFO - Iter [21300/160000] lr: 5.201e-05, eta: 8:44:52, time: 0.212, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5523, decode.acc_seg: 79.3911, aux.loss_ce: 0.2769, aux.acc_seg: 74.3726, loss: 0.8292, grad_norm: 6.5727 2023-02-11 19:27:51,570 - mmseg - INFO - Iter [21350/160000] lr: 5.199e-05, eta: 8:44:15, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5388, decode.acc_seg: 80.2082, aux.loss_ce: 0.2720, aux.acc_seg: 75.2672, loss: 0.8109, grad_norm: 6.5506 2023-02-11 19:28:01,363 - mmseg - INFO - Iter [21400/160000] lr: 5.198e-05, eta: 8:43:24, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5651, decode.acc_seg: 79.3834, aux.loss_ce: 0.2790, aux.acc_seg: 75.2727, loss: 0.8441, grad_norm: 6.8431 2023-02-11 19:28:11,278 - mmseg - INFO - Iter [21450/160000] lr: 5.196e-05, eta: 8:42:36, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5419, decode.acc_seg: 79.1603, aux.loss_ce: 0.2729, aux.acc_seg: 74.9293, loss: 0.8148, grad_norm: 6.3995 2023-02-11 19:28:22,073 - mmseg - INFO - Iter [21500/160000] lr: 5.194e-05, eta: 8:42:12, time: 0.216, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5848, decode.acc_seg: 78.7367, aux.loss_ce: 0.2893, aux.acc_seg: 73.7774, loss: 0.8741, grad_norm: 6.7223 2023-02-11 19:28:31,823 - mmseg - INFO - Iter [21550/160000] lr: 5.192e-05, eta: 8:41:22, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5522, decode.acc_seg: 79.2829, aux.loss_ce: 0.2736, aux.acc_seg: 75.3195, loss: 0.8258, grad_norm: 6.6486 2023-02-11 19:28:41,917 - mmseg - INFO - Iter [21600/160000] lr: 5.190e-05, eta: 8:40:41, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5567, decode.acc_seg: 79.6504, aux.loss_ce: 0.2817, aux.acc_seg: 74.3847, loss: 0.8384, grad_norm: 7.0221 2023-02-11 19:28:52,455 - mmseg - INFO - Iter [21650/160000] lr: 5.188e-05, eta: 8:40:11, time: 0.211, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5986, decode.acc_seg: 78.2736, aux.loss_ce: 0.2952, aux.acc_seg: 73.5574, loss: 0.8938, grad_norm: 6.9239 2023-02-11 19:29:02,386 - mmseg - INFO - Iter [21700/160000] lr: 5.186e-05, eta: 8:39:27, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5754, decode.acc_seg: 79.1910, aux.loss_ce: 0.2837, aux.acc_seg: 74.4046, loss: 0.8591, grad_norm: 6.8359 2023-02-11 19:29:12,373 - mmseg - INFO - Iter [21750/160000] lr: 5.184e-05, eta: 8:38:45, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6021, decode.acc_seg: 77.8876, aux.loss_ce: 0.3015, aux.acc_seg: 73.0622, loss: 0.9036, grad_norm: 7.1894 2023-02-11 19:29:22,843 - mmseg - INFO - Iter [21800/160000] lr: 5.183e-05, eta: 8:38:15, time: 0.209, data_time: 0.005, memory: 7748, decode.loss_ce: 0.5845, decode.acc_seg: 78.6256, aux.loss_ce: 0.2940, aux.acc_seg: 73.8162, loss: 0.8785, grad_norm: 6.5468 2023-02-11 19:29:33,471 - mmseg - INFO - Iter [21850/160000] lr: 5.181e-05, eta: 8:37:49, time: 0.212, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5651, decode.acc_seg: 79.3673, aux.loss_ce: 0.2820, aux.acc_seg: 74.7769, loss: 0.8471, grad_norm: 7.8567 2023-02-11 19:29:43,912 - mmseg - INFO - Iter [21900/160000] lr: 5.179e-05, eta: 8:37:19, time: 0.209, data_time: 0.005, memory: 7748, decode.loss_ce: 0.5687, decode.acc_seg: 78.8042, aux.loss_ce: 0.2840, aux.acc_seg: 73.7316, loss: 0.8527, grad_norm: 7.0541 2023-02-11 19:29:53,937 - mmseg - INFO - Iter [21950/160000] lr: 5.177e-05, eta: 8:36:39, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5972, decode.acc_seg: 78.8822, aux.loss_ce: 0.2887, aux.acc_seg: 74.6038, loss: 0.8859, grad_norm: 6.9219 2023-02-11 19:30:04,256 - mmseg - INFO - Saving checkpoint at 22000 iterations 2023-02-11 19:30:04,976 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 19:30:04,976 - mmseg - INFO - Iter [22000/160000] lr: 5.175e-05, eta: 8:36:24, time: 0.221, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5755, decode.acc_seg: 78.5638, aux.loss_ce: 0.2810, aux.acc_seg: 74.0253, loss: 0.8564, grad_norm: 6.7686 2023-02-11 19:30:14,995 - mmseg - INFO - Iter [22050/160000] lr: 5.173e-05, eta: 8:35:46, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5587, decode.acc_seg: 79.6040, aux.loss_ce: 0.2832, aux.acc_seg: 74.7373, loss: 0.8419, grad_norm: 7.2057 2023-02-11 19:30:25,109 - mmseg - INFO - Iter [22100/160000] lr: 5.171e-05, eta: 8:35:09, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5931, decode.acc_seg: 78.5811, aux.loss_ce: 0.2936, aux.acc_seg: 73.8377, loss: 0.8867, grad_norm: 6.7572 2023-02-11 19:30:34,856 - mmseg - INFO - Iter [22150/160000] lr: 5.169e-05, eta: 8:34:26, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5928, decode.acc_seg: 78.4621, aux.loss_ce: 0.2979, aux.acc_seg: 73.2471, loss: 0.8907, grad_norm: 6.7509 2023-02-11 19:30:45,454 - mmseg - INFO - Iter [22200/160000] lr: 5.168e-05, eta: 8:34:01, time: 0.212, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5789, decode.acc_seg: 78.7470, aux.loss_ce: 0.2863, aux.acc_seg: 74.3697, loss: 0.8652, grad_norm: 6.2794 2023-02-11 19:30:55,746 - mmseg - INFO - Iter [22250/160000] lr: 5.166e-05, eta: 8:33:30, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5816, decode.acc_seg: 78.7584, aux.loss_ce: 0.2888, aux.acc_seg: 74.4167, loss: 0.8704, grad_norm: 7.7179 2023-02-11 19:31:06,127 - mmseg - INFO - Iter [22300/160000] lr: 5.164e-05, eta: 8:33:01, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.6122, decode.acc_seg: 77.8860, aux.loss_ce: 0.3064, aux.acc_seg: 73.0255, loss: 0.9186, grad_norm: 7.5107 2023-02-11 19:31:18,112 - mmseg - INFO - Iter [22350/160000] lr: 5.162e-05, eta: 8:33:08, time: 0.240, data_time: 0.047, memory: 7748, decode.loss_ce: 0.5628, decode.acc_seg: 79.4883, aux.loss_ce: 0.2928, aux.acc_seg: 73.8725, loss: 0.8556, grad_norm: 8.7115 2023-02-11 19:31:27,992 - mmseg - INFO - Iter [22400/160000] lr: 5.160e-05, eta: 8:32:28, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5520, decode.acc_seg: 78.8537, aux.loss_ce: 0.2805, aux.acc_seg: 73.9225, loss: 0.8325, grad_norm: 6.6830 2023-02-11 19:31:37,988 - mmseg - INFO - Iter [22450/160000] lr: 5.158e-05, eta: 8:31:52, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5542, decode.acc_seg: 79.8463, aux.loss_ce: 0.2794, aux.acc_seg: 74.7094, loss: 0.8336, grad_norm: 6.8464 2023-02-11 19:31:48,410 - mmseg - INFO - Iter [22500/160000] lr: 5.156e-05, eta: 8:31:25, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5521, decode.acc_seg: 80.2720, aux.loss_ce: 0.2822, aux.acc_seg: 74.9476, loss: 0.8342, grad_norm: 6.7575 2023-02-11 19:31:59,121 - mmseg - INFO - Iter [22550/160000] lr: 5.154e-05, eta: 8:31:05, time: 0.214, data_time: 0.005, memory: 7748, decode.loss_ce: 0.5402, decode.acc_seg: 79.5187, aux.loss_ce: 0.2752, aux.acc_seg: 74.3793, loss: 0.8153, grad_norm: 6.6215 2023-02-11 19:32:08,846 - mmseg - INFO - Iter [22600/160000] lr: 5.153e-05, eta: 8:30:24, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5785, decode.acc_seg: 79.0517, aux.loss_ce: 0.2891, aux.acc_seg: 74.0193, loss: 0.8676, grad_norm: 6.2982 2023-02-11 19:32:18,800 - mmseg - INFO - Iter [22650/160000] lr: 5.151e-05, eta: 8:29:48, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5520, decode.acc_seg: 79.9309, aux.loss_ce: 0.2779, aux.acc_seg: 75.0725, loss: 0.8299, grad_norm: 7.6993 2023-02-11 19:32:29,012 - mmseg - INFO - Iter [22700/160000] lr: 5.149e-05, eta: 8:29:18, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5503, decode.acc_seg: 79.6498, aux.loss_ce: 0.2776, aux.acc_seg: 74.5283, loss: 0.8279, grad_norm: 7.4900 2023-02-11 19:32:39,072 - mmseg - INFO - Iter [22750/160000] lr: 5.147e-05, eta: 8:28:45, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5541, decode.acc_seg: 79.4057, aux.loss_ce: 0.2845, aux.acc_seg: 74.3978, loss: 0.8385, grad_norm: 7.5794 2023-02-11 19:32:48,880 - mmseg - INFO - Iter [22800/160000] lr: 5.145e-05, eta: 8:28:08, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5928, decode.acc_seg: 78.2257, aux.loss_ce: 0.3030, aux.acc_seg: 72.4277, loss: 0.8958, grad_norm: 7.6305 2023-02-11 19:32:58,984 - mmseg - INFO - Iter [22850/160000] lr: 5.143e-05, eta: 8:27:36, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5574, decode.acc_seg: 79.5946, aux.loss_ce: 0.2849, aux.acc_seg: 74.4509, loss: 0.8423, grad_norm: 6.2900 2023-02-11 19:33:09,127 - mmseg - INFO - Iter [22900/160000] lr: 5.141e-05, eta: 8:27:06, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5709, decode.acc_seg: 78.8536, aux.loss_ce: 0.2832, aux.acc_seg: 74.1716, loss: 0.8542, grad_norm: 7.0983 2023-02-11 19:33:18,983 - mmseg - INFO - Iter [22950/160000] lr: 5.139e-05, eta: 8:26:31, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5314, decode.acc_seg: 80.1099, aux.loss_ce: 0.2682, aux.acc_seg: 75.3289, loss: 0.7995, grad_norm: 6.5333 2023-02-11 19:33:28,936 - mmseg - INFO - Saving checkpoint at 23000 iterations 2023-02-11 19:33:29,611 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 19:33:29,611 - mmseg - INFO - Iter [23000/160000] lr: 5.138e-05, eta: 8:26:10, time: 0.213, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5669, decode.acc_seg: 79.4736, aux.loss_ce: 0.2850, aux.acc_seg: 74.1742, loss: 0.8519, grad_norm: 6.6941 2023-02-11 19:33:39,375 - mmseg - INFO - Iter [23050/160000] lr: 5.136e-05, eta: 8:25:34, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5440, decode.acc_seg: 79.9566, aux.loss_ce: 0.2792, aux.acc_seg: 74.9771, loss: 0.8232, grad_norm: 6.8392 2023-02-11 19:33:49,230 - mmseg - INFO - Iter [23100/160000] lr: 5.134e-05, eta: 8:24:59, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5926, decode.acc_seg: 79.0233, aux.loss_ce: 0.2910, aux.acc_seg: 74.2832, loss: 0.8836, grad_norm: 7.6568 2023-02-11 19:33:59,518 - mmseg - INFO - Iter [23150/160000] lr: 5.132e-05, eta: 8:24:33, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5654, decode.acc_seg: 79.5361, aux.loss_ce: 0.2864, aux.acc_seg: 74.2027, loss: 0.8518, grad_norm: 7.0843 2023-02-11 19:34:09,214 - mmseg - INFO - Iter [23200/160000] lr: 5.130e-05, eta: 8:23:56, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5622, decode.acc_seg: 79.2997, aux.loss_ce: 0.2854, aux.acc_seg: 74.2850, loss: 0.8477, grad_norm: 6.9802 2023-02-11 19:34:19,004 - mmseg - INFO - Iter [23250/160000] lr: 5.128e-05, eta: 8:23:21, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5679, decode.acc_seg: 79.7750, aux.loss_ce: 0.2880, aux.acc_seg: 74.6344, loss: 0.8559, grad_norm: 6.5578 2023-02-11 19:34:29,281 - mmseg - INFO - Iter [23300/160000] lr: 5.126e-05, eta: 8:22:56, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5707, decode.acc_seg: 78.3658, aux.loss_ce: 0.2922, aux.acc_seg: 72.7522, loss: 0.8629, grad_norm: 7.3801 2023-02-11 19:34:39,748 - mmseg - INFO - Iter [23350/160000] lr: 5.124e-05, eta: 8:22:34, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5647, decode.acc_seg: 79.4490, aux.loss_ce: 0.2864, aux.acc_seg: 73.7952, loss: 0.8511, grad_norm: 6.6339 2023-02-11 19:34:49,758 - mmseg - INFO - Iter [23400/160000] lr: 5.123e-05, eta: 8:22:04, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5703, decode.acc_seg: 79.7535, aux.loss_ce: 0.2834, aux.acc_seg: 75.1607, loss: 0.8537, grad_norm: 7.3060 2023-02-11 19:34:59,753 - mmseg - INFO - Iter [23450/160000] lr: 5.121e-05, eta: 8:21:34, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5874, decode.acc_seg: 78.8198, aux.loss_ce: 0.3031, aux.acc_seg: 72.9479, loss: 0.8904, grad_norm: 7.8619 2023-02-11 19:35:09,894 - mmseg - INFO - Iter [23500/160000] lr: 5.119e-05, eta: 8:21:07, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5627, decode.acc_seg: 79.3604, aux.loss_ce: 0.2871, aux.acc_seg: 74.1037, loss: 0.8498, grad_norm: 6.9633 2023-02-11 19:35:19,730 - mmseg - INFO - Iter [23550/160000] lr: 5.117e-05, eta: 8:20:35, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5635, decode.acc_seg: 79.3496, aux.loss_ce: 0.2806, aux.acc_seg: 74.5706, loss: 0.8441, grad_norm: 6.9217 2023-02-11 19:35:32,022 - mmseg - INFO - Iter [23600/160000] lr: 5.115e-05, eta: 8:20:47, time: 0.246, data_time: 0.047, memory: 7748, decode.loss_ce: 0.5490, decode.acc_seg: 79.5621, aux.loss_ce: 0.2851, aux.acc_seg: 73.6925, loss: 0.8341, grad_norm: 6.8154 2023-02-11 19:35:41,822 - mmseg - INFO - Iter [23650/160000] lr: 5.113e-05, eta: 8:20:15, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5688, decode.acc_seg: 78.9850, aux.loss_ce: 0.2904, aux.acc_seg: 73.6904, loss: 0.8591, grad_norm: 7.6570 2023-02-11 19:35:52,036 - mmseg - INFO - Iter [23700/160000] lr: 5.111e-05, eta: 8:19:49, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5486, decode.acc_seg: 79.7841, aux.loss_ce: 0.2857, aux.acc_seg: 74.4602, loss: 0.8343, grad_norm: 7.9013 2023-02-11 19:36:02,044 - mmseg - INFO - Iter [23750/160000] lr: 5.109e-05, eta: 8:19:21, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5806, decode.acc_seg: 78.7737, aux.loss_ce: 0.2967, aux.acc_seg: 73.5073, loss: 0.8773, grad_norm: 6.9452 2023-02-11 19:36:12,641 - mmseg - INFO - Iter [23800/160000] lr: 5.108e-05, eta: 8:19:03, time: 0.211, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5138, decode.acc_seg: 81.1539, aux.loss_ce: 0.2602, aux.acc_seg: 76.8288, loss: 0.7740, grad_norm: 7.2981 2023-02-11 19:36:22,720 - mmseg - INFO - Iter [23850/160000] lr: 5.106e-05, eta: 8:18:36, time: 0.202, data_time: 0.005, memory: 7748, decode.loss_ce: 0.5499, decode.acc_seg: 80.3656, aux.loss_ce: 0.2881, aux.acc_seg: 74.6727, loss: 0.8379, grad_norm: 6.5412 2023-02-11 19:36:32,675 - mmseg - INFO - Iter [23900/160000] lr: 5.104e-05, eta: 8:18:07, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5279, decode.acc_seg: 80.5919, aux.loss_ce: 0.2744, aux.acc_seg: 74.8340, loss: 0.8023, grad_norm: 7.8500 2023-02-11 19:36:43,016 - mmseg - INFO - Iter [23950/160000] lr: 5.102e-05, eta: 8:17:46, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5345, decode.acc_seg: 80.1508, aux.loss_ce: 0.2787, aux.acc_seg: 74.8007, loss: 0.8132, grad_norm: 6.7697 2023-02-11 19:36:52,985 - mmseg - INFO - Saving checkpoint at 24000 iterations 2023-02-11 19:36:53,763 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 19:36:53,764 - mmseg - INFO - Iter [24000/160000] lr: 5.100e-05, eta: 8:17:31, time: 0.215, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5312, decode.acc_seg: 80.4533, aux.loss_ce: 0.2771, aux.acc_seg: 75.1388, loss: 0.8083, grad_norm: 6.4323 2023-02-11 19:37:03,468 - mmseg - INFO - Iter [24050/160000] lr: 5.098e-05, eta: 8:16:58, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5624, decode.acc_seg: 79.4284, aux.loss_ce: 0.2869, aux.acc_seg: 74.3161, loss: 0.8494, grad_norm: 7.2158 2023-02-11 19:37:13,292 - mmseg - INFO - Iter [24100/160000] lr: 5.096e-05, eta: 8:16:28, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5583, decode.acc_seg: 79.9628, aux.loss_ce: 0.2764, aux.acc_seg: 75.0500, loss: 0.8346, grad_norm: 7.6073 2023-02-11 19:37:23,315 - mmseg - INFO - Iter [24150/160000] lr: 5.094e-05, eta: 8:16:01, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5455, decode.acc_seg: 79.4161, aux.loss_ce: 0.2777, aux.acc_seg: 74.3887, loss: 0.8232, grad_norm: 6.3396 2023-02-11 19:37:33,994 - mmseg - INFO - Iter [24200/160000] lr: 5.093e-05, eta: 8:15:46, time: 0.214, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5549, decode.acc_seg: 79.9109, aux.loss_ce: 0.2757, aux.acc_seg: 75.3557, loss: 0.8306, grad_norm: 7.2142 2023-02-11 19:37:43,909 - mmseg - INFO - Iter [24250/160000] lr: 5.091e-05, eta: 8:15:18, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5308, decode.acc_seg: 80.1435, aux.loss_ce: 0.2650, aux.acc_seg: 75.7881, loss: 0.7959, grad_norm: 6.2336 2023-02-11 19:37:54,415 - mmseg - INFO - Iter [24300/160000] lr: 5.089e-05, eta: 8:14:59, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5458, decode.acc_seg: 79.6403, aux.loss_ce: 0.2767, aux.acc_seg: 74.7117, loss: 0.8225, grad_norm: 6.5409 2023-02-11 19:38:04,435 - mmseg - INFO - Iter [24350/160000] lr: 5.087e-05, eta: 8:14:34, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5453, decode.acc_seg: 80.1822, aux.loss_ce: 0.2783, aux.acc_seg: 75.3826, loss: 0.8236, grad_norm: 6.8694 2023-02-11 19:38:14,664 - mmseg - INFO - Iter [24400/160000] lr: 5.085e-05, eta: 8:14:12, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5260, decode.acc_seg: 80.3563, aux.loss_ce: 0.2710, aux.acc_seg: 75.1909, loss: 0.7971, grad_norm: 7.7247 2023-02-11 19:38:24,589 - mmseg - INFO - Iter [24450/160000] lr: 5.083e-05, eta: 8:13:44, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5440, decode.acc_seg: 79.4915, aux.loss_ce: 0.2851, aux.acc_seg: 73.9321, loss: 0.8290, grad_norm: 6.9167 2023-02-11 19:38:34,720 - mmseg - INFO - Iter [24500/160000] lr: 5.081e-05, eta: 8:13:21, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5969, decode.acc_seg: 78.5955, aux.loss_ce: 0.2959, aux.acc_seg: 74.0499, loss: 0.8927, grad_norm: 8.3237 2023-02-11 19:38:45,004 - mmseg - INFO - Iter [24550/160000] lr: 5.079e-05, eta: 8:13:00, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5728, decode.acc_seg: 78.7601, aux.loss_ce: 0.2935, aux.acc_seg: 73.0305, loss: 0.8663, grad_norm: 7.2636 2023-02-11 19:38:55,030 - mmseg - INFO - Iter [24600/160000] lr: 5.078e-05, eta: 8:12:35, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5106, decode.acc_seg: 80.7605, aux.loss_ce: 0.2677, aux.acc_seg: 75.4145, loss: 0.7783, grad_norm: 6.0680 2023-02-11 19:39:05,132 - mmseg - INFO - Iter [24650/160000] lr: 5.076e-05, eta: 8:12:11, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5423, decode.acc_seg: 79.7497, aux.loss_ce: 0.2823, aux.acc_seg: 74.5093, loss: 0.8246, grad_norm: 6.5601 2023-02-11 19:39:15,051 - mmseg - INFO - Iter [24700/160000] lr: 5.074e-05, eta: 8:11:45, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5427, decode.acc_seg: 79.6832, aux.loss_ce: 0.2769, aux.acc_seg: 74.4680, loss: 0.8196, grad_norm: 6.9743 2023-02-11 19:39:24,842 - mmseg - INFO - Iter [24750/160000] lr: 5.072e-05, eta: 8:11:17, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5471, decode.acc_seg: 78.8902, aux.loss_ce: 0.2780, aux.acc_seg: 73.7062, loss: 0.8251, grad_norm: 6.3508 2023-02-11 19:39:35,292 - mmseg - INFO - Iter [24800/160000] lr: 5.070e-05, eta: 8:10:59, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5463, decode.acc_seg: 80.1673, aux.loss_ce: 0.2814, aux.acc_seg: 74.4451, loss: 0.8277, grad_norm: 7.1823 2023-02-11 19:39:48,214 - mmseg - INFO - Iter [24850/160000] lr: 5.068e-05, eta: 8:11:19, time: 0.258, data_time: 0.046, memory: 7748, decode.loss_ce: 0.5380, decode.acc_seg: 80.2700, aux.loss_ce: 0.2744, aux.acc_seg: 74.9120, loss: 0.8125, grad_norm: 6.9308 2023-02-11 19:39:58,218 - mmseg - INFO - Iter [24900/160000] lr: 5.066e-05, eta: 8:10:54, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5148, decode.acc_seg: 81.1004, aux.loss_ce: 0.2675, aux.acc_seg: 75.8536, loss: 0.7824, grad_norm: 6.7383 2023-02-11 19:40:08,187 - mmseg - INFO - Iter [24950/160000] lr: 5.064e-05, eta: 8:10:29, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5188, decode.acc_seg: 80.5131, aux.loss_ce: 0.2699, aux.acc_seg: 74.7747, loss: 0.7887, grad_norm: 7.8908 2023-02-11 19:40:18,277 - mmseg - INFO - Saving checkpoint at 25000 iterations 2023-02-11 19:40:18,970 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 19:40:18,971 - mmseg - INFO - Iter [25000/160000] lr: 5.063e-05, eta: 8:10:17, time: 0.216, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5493, decode.acc_seg: 80.2600, aux.loss_ce: 0.2950, aux.acc_seg: 73.5690, loss: 0.8443, grad_norm: 7.4894 2023-02-11 19:40:29,180 - mmseg - INFO - Iter [25050/160000] lr: 5.061e-05, eta: 8:09:56, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5274, decode.acc_seg: 80.8189, aux.loss_ce: 0.2718, aux.acc_seg: 75.3571, loss: 0.7991, grad_norm: 6.9491 2023-02-11 19:40:39,399 - mmseg - INFO - Iter [25100/160000] lr: 5.059e-05, eta: 8:09:35, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5430, decode.acc_seg: 80.5531, aux.loss_ce: 0.2856, aux.acc_seg: 74.6932, loss: 0.8287, grad_norm: 9.9734 2023-02-11 19:40:50,167 - mmseg - INFO - Iter [25150/160000] lr: 5.057e-05, eta: 8:09:22, time: 0.215, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5653, decode.acc_seg: 79.7210, aux.loss_ce: 0.2896, aux.acc_seg: 73.9124, loss: 0.8549, grad_norm: 7.9913 2023-02-11 19:41:01,127 - mmseg - INFO - Iter [25200/160000] lr: 5.055e-05, eta: 8:09:12, time: 0.219, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5218, decode.acc_seg: 81.0030, aux.loss_ce: 0.2684, aux.acc_seg: 75.9008, loss: 0.7902, grad_norm: 6.8529 2023-02-11 19:41:11,554 - mmseg - INFO - Iter [25250/160000] lr: 5.053e-05, eta: 8:08:55, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5192, decode.acc_seg: 80.3326, aux.loss_ce: 0.2752, aux.acc_seg: 74.9305, loss: 0.7945, grad_norm: 7.3330 2023-02-11 19:41:21,883 - mmseg - INFO - Iter [25300/160000] lr: 5.051e-05, eta: 8:08:36, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5372, decode.acc_seg: 79.8127, aux.loss_ce: 0.2877, aux.acc_seg: 73.7570, loss: 0.8249, grad_norm: 7.7339 2023-02-11 19:41:31,912 - mmseg - INFO - Iter [25350/160000] lr: 5.049e-05, eta: 8:08:13, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5180, decode.acc_seg: 81.2003, aux.loss_ce: 0.2789, aux.acc_seg: 74.8844, loss: 0.7969, grad_norm: 6.5739 2023-02-11 19:41:41,669 - mmseg - INFO - Iter [25400/160000] lr: 5.048e-05, eta: 8:07:46, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5203, decode.acc_seg: 80.6506, aux.loss_ce: 0.2717, aux.acc_seg: 75.2224, loss: 0.7920, grad_norm: 6.5071 2023-02-11 19:41:51,557 - mmseg - INFO - Iter [25450/160000] lr: 5.046e-05, eta: 8:07:21, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5289, decode.acc_seg: 80.8650, aux.loss_ce: 0.2696, aux.acc_seg: 75.5109, loss: 0.7985, grad_norm: 6.6895 2023-02-11 19:42:01,966 - mmseg - INFO - Iter [25500/160000] lr: 5.044e-05, eta: 8:07:03, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5235, decode.acc_seg: 81.1735, aux.loss_ce: 0.2746, aux.acc_seg: 75.1988, loss: 0.7981, grad_norm: 8.1933 2023-02-11 19:42:12,492 - mmseg - INFO - Iter [25550/160000] lr: 5.042e-05, eta: 8:06:48, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5127, decode.acc_seg: 81.3955, aux.loss_ce: 0.2670, aux.acc_seg: 75.9189, loss: 0.7797, grad_norm: 6.4348 2023-02-11 19:42:22,807 - mmseg - INFO - Iter [25600/160000] lr: 5.040e-05, eta: 8:06:29, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5266, decode.acc_seg: 80.5013, aux.loss_ce: 0.2763, aux.acc_seg: 74.6432, loss: 0.8029, grad_norm: 6.7571 2023-02-11 19:42:32,873 - mmseg - INFO - Iter [25650/160000] lr: 5.038e-05, eta: 8:06:07, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5697, decode.acc_seg: 79.8317, aux.loss_ce: 0.2870, aux.acc_seg: 74.5012, loss: 0.8567, grad_norm: 7.2156 2023-02-11 19:42:43,065 - mmseg - INFO - Iter [25700/160000] lr: 5.036e-05, eta: 8:05:47, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5283, decode.acc_seg: 79.7262, aux.loss_ce: 0.2768, aux.acc_seg: 74.3338, loss: 0.8051, grad_norm: 6.4796 2023-02-11 19:42:53,144 - mmseg - INFO - Iter [25750/160000] lr: 5.034e-05, eta: 8:05:26, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5076, decode.acc_seg: 80.6285, aux.loss_ce: 0.2609, aux.acc_seg: 75.5764, loss: 0.7685, grad_norm: 7.1651 2023-02-11 19:43:03,249 - mmseg - INFO - Iter [25800/160000] lr: 5.033e-05, eta: 8:05:05, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5285, decode.acc_seg: 80.7175, aux.loss_ce: 0.2790, aux.acc_seg: 75.0196, loss: 0.8074, grad_norm: 7.4790 2023-02-11 19:43:13,368 - mmseg - INFO - Iter [25850/160000] lr: 5.031e-05, eta: 8:04:44, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5618, decode.acc_seg: 79.4667, aux.loss_ce: 0.2926, aux.acc_seg: 73.4008, loss: 0.8544, grad_norm: 7.1394 2023-02-11 19:43:23,505 - mmseg - INFO - Iter [25900/160000] lr: 5.029e-05, eta: 8:04:24, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5627, decode.acc_seg: 79.6928, aux.loss_ce: 0.2894, aux.acc_seg: 74.3020, loss: 0.8520, grad_norm: 7.5452 2023-02-11 19:43:33,295 - mmseg - INFO - Iter [25950/160000] lr: 5.027e-05, eta: 8:03:59, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5419, decode.acc_seg: 80.0975, aux.loss_ce: 0.2755, aux.acc_seg: 75.1410, loss: 0.8175, grad_norm: 6.7378 2023-02-11 19:43:43,152 - mmseg - INFO - Saving checkpoint at 26000 iterations 2023-02-11 19:43:43,841 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 19:43:43,841 - mmseg - INFO - Iter [26000/160000] lr: 5.025e-05, eta: 8:03:44, time: 0.211, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5381, decode.acc_seg: 80.6047, aux.loss_ce: 0.2813, aux.acc_seg: 74.8920, loss: 0.8194, grad_norm: 6.9436 2023-02-11 19:43:53,758 - mmseg - INFO - Iter [26050/160000] lr: 5.023e-05, eta: 8:03:21, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5319, decode.acc_seg: 80.1929, aux.loss_ce: 0.2753, aux.acc_seg: 74.7246, loss: 0.8072, grad_norm: 6.4084 2023-02-11 19:44:04,109 - mmseg - INFO - Iter [26100/160000] lr: 5.021e-05, eta: 8:03:04, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5560, decode.acc_seg: 80.1550, aux.loss_ce: 0.2898, aux.acc_seg: 74.4173, loss: 0.8459, grad_norm: 6.5359 2023-02-11 19:44:16,141 - mmseg - INFO - Iter [26150/160000] lr: 5.019e-05, eta: 8:03:09, time: 0.241, data_time: 0.048, memory: 7748, decode.loss_ce: 0.5178, decode.acc_seg: 80.8777, aux.loss_ce: 0.2770, aux.acc_seg: 74.5004, loss: 0.7948, grad_norm: 6.6269 2023-02-11 19:44:26,300 - mmseg - INFO - Iter [26200/160000] lr: 5.018e-05, eta: 8:02:50, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5055, decode.acc_seg: 80.8649, aux.loss_ce: 0.2646, aux.acc_seg: 75.4913, loss: 0.7701, grad_norm: 7.1845 2023-02-11 19:44:36,082 - mmseg - INFO - Iter [26250/160000] lr: 5.016e-05, eta: 8:02:25, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5398, decode.acc_seg: 80.8513, aux.loss_ce: 0.2859, aux.acc_seg: 74.6925, loss: 0.8256, grad_norm: 7.2011 2023-02-11 19:44:46,220 - mmseg - INFO - Iter [26300/160000] lr: 5.014e-05, eta: 8:02:05, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5360, decode.acc_seg: 79.9741, aux.loss_ce: 0.2864, aux.acc_seg: 73.6417, loss: 0.8224, grad_norm: 7.3003 2023-02-11 19:44:56,122 - mmseg - INFO - Iter [26350/160000] lr: 5.012e-05, eta: 8:01:43, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5179, decode.acc_seg: 80.9652, aux.loss_ce: 0.2819, aux.acc_seg: 74.7171, loss: 0.7999, grad_norm: 7.5127 2023-02-11 19:45:06,054 - mmseg - INFO - Iter [26400/160000] lr: 5.010e-05, eta: 8:01:21, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5390, decode.acc_seg: 81.3114, aux.loss_ce: 0.2923, aux.acc_seg: 74.5650, loss: 0.8312, grad_norm: 6.8453 2023-02-11 19:45:16,094 - mmseg - INFO - Iter [26450/160000] lr: 5.008e-05, eta: 8:01:00, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5016, decode.acc_seg: 81.4341, aux.loss_ce: 0.2625, aux.acc_seg: 76.1672, loss: 0.7640, grad_norm: 6.2631 2023-02-11 19:45:26,326 - mmseg - INFO - Iter [26500/160000] lr: 5.006e-05, eta: 8:00:42, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5161, decode.acc_seg: 81.2102, aux.loss_ce: 0.2690, aux.acc_seg: 75.5148, loss: 0.7851, grad_norm: 6.8595 2023-02-11 19:45:36,376 - mmseg - INFO - Iter [26550/160000] lr: 5.004e-05, eta: 8:00:22, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5048, decode.acc_seg: 81.5743, aux.loss_ce: 0.2711, aux.acc_seg: 75.6147, loss: 0.7759, grad_norm: 6.8444 2023-02-11 19:45:46,408 - mmseg - INFO - Iter [26600/160000] lr: 5.003e-05, eta: 8:00:01, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5250, decode.acc_seg: 81.2599, aux.loss_ce: 0.2769, aux.acc_seg: 75.3525, loss: 0.8019, grad_norm: 6.3048 2023-02-11 19:45:56,855 - mmseg - INFO - Iter [26650/160000] lr: 5.001e-05, eta: 7:59:46, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4881, decode.acc_seg: 81.5218, aux.loss_ce: 0.2615, aux.acc_seg: 75.6043, loss: 0.7496, grad_norm: 6.4386 2023-02-11 19:46:06,533 - mmseg - INFO - Iter [26700/160000] lr: 4.999e-05, eta: 7:59:21, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5308, decode.acc_seg: 80.9953, aux.loss_ce: 0.2763, aux.acc_seg: 75.6557, loss: 0.8071, grad_norm: 6.9767 2023-02-11 19:46:16,450 - mmseg - INFO - Iter [26750/160000] lr: 4.997e-05, eta: 7:59:00, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5220, decode.acc_seg: 80.8235, aux.loss_ce: 0.2729, aux.acc_seg: 75.6652, loss: 0.7949, grad_norm: 7.1205 2023-02-11 19:46:26,746 - mmseg - INFO - Iter [26800/160000] lr: 4.995e-05, eta: 7:58:43, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5324, decode.acc_seg: 80.7184, aux.loss_ce: 0.2777, aux.acc_seg: 75.2336, loss: 0.8101, grad_norm: 6.9636 2023-02-11 19:46:36,851 - mmseg - INFO - Iter [26850/160000] lr: 4.993e-05, eta: 7:58:24, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5016, decode.acc_seg: 80.9253, aux.loss_ce: 0.2695, aux.acc_seg: 74.9720, loss: 0.7711, grad_norm: 7.4154 2023-02-11 19:46:47,031 - mmseg - INFO - Iter [26900/160000] lr: 4.991e-05, eta: 7:58:05, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4839, decode.acc_seg: 82.3247, aux.loss_ce: 0.2583, aux.acc_seg: 76.2490, loss: 0.7422, grad_norm: 6.5791 2023-02-11 19:46:57,609 - mmseg - INFO - Iter [26950/160000] lr: 4.989e-05, eta: 7:57:53, time: 0.212, data_time: 0.005, memory: 7748, decode.loss_ce: 0.5460, decode.acc_seg: 80.0848, aux.loss_ce: 0.2892, aux.acc_seg: 73.8873, loss: 0.8353, grad_norm: 7.5640 2023-02-11 19:47:07,772 - mmseg - INFO - Saving checkpoint at 27000 iterations 2023-02-11 19:47:08,454 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 19:47:08,454 - mmseg - INFO - Iter [27000/160000] lr: 4.988e-05, eta: 7:57:43, time: 0.217, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5330, decode.acc_seg: 80.3766, aux.loss_ce: 0.2763, aux.acc_seg: 74.4066, loss: 0.8093, grad_norm: 7.5386 2023-02-11 19:47:18,516 - mmseg - INFO - Iter [27050/160000] lr: 4.986e-05, eta: 7:57:23, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5372, decode.acc_seg: 80.7285, aux.loss_ce: 0.2794, aux.acc_seg: 75.1575, loss: 0.8166, grad_norm: 7.3371 2023-02-11 19:47:28,613 - mmseg - INFO - Iter [27100/160000] lr: 4.984e-05, eta: 7:57:04, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5025, decode.acc_seg: 81.3758, aux.loss_ce: 0.2730, aux.acc_seg: 75.4531, loss: 0.7755, grad_norm: 6.5210 2023-02-11 19:47:38,855 - mmseg - INFO - Iter [27150/160000] lr: 4.982e-05, eta: 7:56:47, time: 0.205, data_time: 0.005, memory: 7748, decode.loss_ce: 0.4893, decode.acc_seg: 82.0429, aux.loss_ce: 0.2623, aux.acc_seg: 75.9716, loss: 0.7517, grad_norm: 7.5373 2023-02-11 19:47:49,728 - mmseg - INFO - Iter [27200/160000] lr: 4.980e-05, eta: 7:56:38, time: 0.217, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5195, decode.acc_seg: 81.1515, aux.loss_ce: 0.2708, aux.acc_seg: 75.6500, loss: 0.7903, grad_norm: 6.7154 2023-02-11 19:47:59,537 - mmseg - INFO - Iter [27250/160000] lr: 4.978e-05, eta: 7:56:16, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5256, decode.acc_seg: 80.9784, aux.loss_ce: 0.2855, aux.acc_seg: 74.6659, loss: 0.8111, grad_norm: 8.4471 2023-02-11 19:48:09,668 - mmseg - INFO - Iter [27300/160000] lr: 4.976e-05, eta: 7:55:58, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4975, decode.acc_seg: 81.5783, aux.loss_ce: 0.2631, aux.acc_seg: 75.8123, loss: 0.7606, grad_norm: 6.2945 2023-02-11 19:48:19,817 - mmseg - INFO - Iter [27350/160000] lr: 4.974e-05, eta: 7:55:40, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5219, decode.acc_seg: 79.9410, aux.loss_ce: 0.2721, aux.acc_seg: 74.0833, loss: 0.7940, grad_norm: 7.1744 2023-02-11 19:48:31,696 - mmseg - INFO - Iter [27400/160000] lr: 4.973e-05, eta: 7:55:42, time: 0.238, data_time: 0.047, memory: 7748, decode.loss_ce: 0.5097, decode.acc_seg: 81.6633, aux.loss_ce: 0.2722, aux.acc_seg: 75.5288, loss: 0.7820, grad_norm: 6.8241 2023-02-11 19:48:41,787 - mmseg - INFO - Iter [27450/160000] lr: 4.971e-05, eta: 7:55:23, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5029, decode.acc_seg: 81.8040, aux.loss_ce: 0.2703, aux.acc_seg: 75.5462, loss: 0.7732, grad_norm: 6.7447 2023-02-11 19:48:51,978 - mmseg - INFO - Iter [27500/160000] lr: 4.969e-05, eta: 7:55:06, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4953, decode.acc_seg: 81.3653, aux.loss_ce: 0.2655, aux.acc_seg: 75.5739, loss: 0.7609, grad_norm: 6.0632 2023-02-11 19:49:01,762 - mmseg - INFO - Iter [27550/160000] lr: 4.967e-05, eta: 7:54:44, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4952, decode.acc_seg: 82.2079, aux.loss_ce: 0.2656, aux.acc_seg: 76.3957, loss: 0.7608, grad_norm: 6.6589 2023-02-11 19:49:11,951 - mmseg - INFO - Iter [27600/160000] lr: 4.965e-05, eta: 7:54:27, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5054, decode.acc_seg: 81.2410, aux.loss_ce: 0.2662, aux.acc_seg: 75.1853, loss: 0.7716, grad_norm: 6.8093 2023-02-11 19:49:22,418 - mmseg - INFO - Iter [27650/160000] lr: 4.963e-05, eta: 7:54:13, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5300, decode.acc_seg: 80.5930, aux.loss_ce: 0.2816, aux.acc_seg: 74.7440, loss: 0.8116, grad_norm: 7.8619 2023-02-11 19:49:32,246 - mmseg - INFO - Iter [27700/160000] lr: 4.961e-05, eta: 7:53:52, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5089, decode.acc_seg: 81.3789, aux.loss_ce: 0.2754, aux.acc_seg: 75.3269, loss: 0.7843, grad_norm: 7.1681 2023-02-11 19:49:42,150 - mmseg - INFO - Iter [27750/160000] lr: 4.959e-05, eta: 7:53:32, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5093, decode.acc_seg: 81.2004, aux.loss_ce: 0.2640, aux.acc_seg: 76.0536, loss: 0.7733, grad_norm: 6.1717 2023-02-11 19:49:52,124 - mmseg - INFO - Iter [27800/160000] lr: 4.958e-05, eta: 7:53:12, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4971, decode.acc_seg: 81.3820, aux.loss_ce: 0.2652, aux.acc_seg: 75.4301, loss: 0.7623, grad_norm: 6.6812 2023-02-11 19:50:01,999 - mmseg - INFO - Iter [27850/160000] lr: 4.956e-05, eta: 7:52:52, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4993, decode.acc_seg: 81.2702, aux.loss_ce: 0.2694, aux.acc_seg: 75.2951, loss: 0.7687, grad_norm: 7.1686 2023-02-11 19:50:11,817 - mmseg - INFO - Iter [27900/160000] lr: 4.954e-05, eta: 7:52:31, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4954, decode.acc_seg: 81.0300, aux.loss_ce: 0.2661, aux.acc_seg: 74.9924, loss: 0.7615, grad_norm: 5.9988 2023-02-11 19:50:21,582 - mmseg - INFO - Iter [27950/160000] lr: 4.952e-05, eta: 7:52:10, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5072, decode.acc_seg: 80.8697, aux.loss_ce: 0.2785, aux.acc_seg: 74.3247, loss: 0.7858, grad_norm: 6.6969 2023-02-11 19:50:31,566 - mmseg - INFO - Saving checkpoint at 28000 iterations 2023-02-11 19:50:32,250 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 19:50:32,250 - mmseg - INFO - Iter [28000/160000] lr: 4.950e-05, eta: 7:51:58, time: 0.213, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5293, decode.acc_seg: 80.7967, aux.loss_ce: 0.2791, aux.acc_seg: 75.2995, loss: 0.8085, grad_norm: 8.0196 2023-02-11 19:50:42,049 - mmseg - INFO - Iter [28050/160000] lr: 4.948e-05, eta: 7:51:37, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5157, decode.acc_seg: 80.3814, aux.loss_ce: 0.2787, aux.acc_seg: 74.5294, loss: 0.7944, grad_norm: 7.2563 2023-02-11 19:50:51,948 - mmseg - INFO - Iter [28100/160000] lr: 4.946e-05, eta: 7:51:18, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5054, decode.acc_seg: 81.8201, aux.loss_ce: 0.2693, aux.acc_seg: 75.6017, loss: 0.7747, grad_norm: 6.1235 2023-02-11 19:51:01,681 - mmseg - INFO - Iter [28150/160000] lr: 4.944e-05, eta: 7:50:56, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5278, decode.acc_seg: 80.3417, aux.loss_ce: 0.2741, aux.acc_seg: 75.0408, loss: 0.8018, grad_norm: 7.3172 2023-02-11 19:51:11,802 - mmseg - INFO - Iter [28200/160000] lr: 4.943e-05, eta: 7:50:39, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4751, decode.acc_seg: 82.7202, aux.loss_ce: 0.2558, aux.acc_seg: 76.7965, loss: 0.7309, grad_norm: 6.4641 2023-02-11 19:51:21,950 - mmseg - INFO - Iter [28250/160000] lr: 4.941e-05, eta: 7:50:22, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5151, decode.acc_seg: 81.3270, aux.loss_ce: 0.2712, aux.acc_seg: 75.1680, loss: 0.7863, grad_norm: 7.4796 2023-02-11 19:51:31,918 - mmseg - INFO - Iter [28300/160000] lr: 4.939e-05, eta: 7:50:04, time: 0.200, data_time: 0.005, memory: 7748, decode.loss_ce: 0.4980, decode.acc_seg: 81.8919, aux.loss_ce: 0.2721, aux.acc_seg: 75.1316, loss: 0.7701, grad_norm: 6.5074 2023-02-11 19:51:42,349 - mmseg - INFO - Iter [28350/160000] lr: 4.937e-05, eta: 7:49:50, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5087, decode.acc_seg: 80.9832, aux.loss_ce: 0.2664, aux.acc_seg: 75.5739, loss: 0.7750, grad_norm: 7.5044 2023-02-11 19:51:52,188 - mmseg - INFO - Iter [28400/160000] lr: 4.935e-05, eta: 7:49:30, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5148, decode.acc_seg: 81.2898, aux.loss_ce: 0.2719, aux.acc_seg: 75.8639, loss: 0.7867, grad_norm: 7.7848 2023-02-11 19:52:02,468 - mmseg - INFO - Iter [28450/160000] lr: 4.933e-05, eta: 7:49:15, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5025, decode.acc_seg: 80.9903, aux.loss_ce: 0.2744, aux.acc_seg: 74.6056, loss: 0.7769, grad_norm: 7.0743 2023-02-11 19:52:12,263 - mmseg - INFO - Iter [28500/160000] lr: 4.931e-05, eta: 7:48:55, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5345, decode.acc_seg: 80.4400, aux.loss_ce: 0.2924, aux.acc_seg: 73.3308, loss: 0.8269, grad_norm: 6.9713 2023-02-11 19:52:22,655 - mmseg - INFO - Iter [28550/160000] lr: 4.929e-05, eta: 7:48:41, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5093, decode.acc_seg: 81.3380, aux.loss_ce: 0.2707, aux.acc_seg: 75.8613, loss: 0.7800, grad_norm: 6.7086 2023-02-11 19:52:32,500 - mmseg - INFO - Iter [28600/160000] lr: 4.928e-05, eta: 7:48:21, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4792, decode.acc_seg: 82.7545, aux.loss_ce: 0.2625, aux.acc_seg: 76.6574, loss: 0.7418, grad_norm: 7.4355 2023-02-11 19:52:44,489 - mmseg - INFO - Iter [28650/160000] lr: 4.926e-05, eta: 7:48:24, time: 0.240, data_time: 0.048, memory: 7748, decode.loss_ce: 0.5099, decode.acc_seg: 81.9168, aux.loss_ce: 0.2775, aux.acc_seg: 75.3698, loss: 0.7873, grad_norm: 7.0612 2023-02-11 19:52:54,932 - mmseg - INFO - Iter [28700/160000] lr: 4.924e-05, eta: 7:48:11, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4878, decode.acc_seg: 82.3203, aux.loss_ce: 0.2675, aux.acc_seg: 75.9625, loss: 0.7553, grad_norm: 5.9833 2023-02-11 19:53:04,721 - mmseg - INFO - Iter [28750/160000] lr: 4.922e-05, eta: 7:47:50, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5146, decode.acc_seg: 81.0790, aux.loss_ce: 0.2781, aux.acc_seg: 74.6444, loss: 0.7927, grad_norm: 6.5483 2023-02-11 19:53:14,802 - mmseg - INFO - Iter [28800/160000] lr: 4.920e-05, eta: 7:47:33, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4663, decode.acc_seg: 82.9531, aux.loss_ce: 0.2494, aux.acc_seg: 77.4529, loss: 0.7157, grad_norm: 5.8669 2023-02-11 19:53:24,924 - mmseg - INFO - Iter [28850/160000] lr: 4.918e-05, eta: 7:47:17, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4801, decode.acc_seg: 82.3060, aux.loss_ce: 0.2677, aux.acc_seg: 76.1748, loss: 0.7478, grad_norm: 6.3415 2023-02-11 19:53:35,398 - mmseg - INFO - Iter [28900/160000] lr: 4.916e-05, eta: 7:47:04, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5047, decode.acc_seg: 80.6397, aux.loss_ce: 0.2697, aux.acc_seg: 74.6960, loss: 0.7744, grad_norm: 6.3958 2023-02-11 19:53:46,055 - mmseg - INFO - Iter [28950/160000] lr: 4.914e-05, eta: 7:46:53, time: 0.214, data_time: 0.005, memory: 7748, decode.loss_ce: 0.5017, decode.acc_seg: 80.9487, aux.loss_ce: 0.2667, aux.acc_seg: 75.4029, loss: 0.7684, grad_norm: 6.5966 2023-02-11 19:53:55,955 - mmseg - INFO - Saving checkpoint at 29000 iterations 2023-02-11 19:53:56,648 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 19:53:56,648 - mmseg - INFO - Iter [29000/160000] lr: 4.913e-05, eta: 7:46:41, time: 0.212, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5057, decode.acc_seg: 81.1840, aux.loss_ce: 0.2717, aux.acc_seg: 75.1311, loss: 0.7775, grad_norm: 7.7537 2023-02-11 19:54:06,717 - mmseg - INFO - Iter [29050/160000] lr: 4.911e-05, eta: 7:46:25, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4779, decode.acc_seg: 82.2496, aux.loss_ce: 0.2614, aux.acc_seg: 76.0497, loss: 0.7393, grad_norm: 7.6177 2023-02-11 19:54:17,061 - mmseg - INFO - Iter [29100/160000] lr: 4.909e-05, eta: 7:46:10, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4901, decode.acc_seg: 81.6554, aux.loss_ce: 0.2602, aux.acc_seg: 76.0258, loss: 0.7503, grad_norm: 6.7154 2023-02-11 19:54:26,844 - mmseg - INFO - Iter [29150/160000] lr: 4.907e-05, eta: 7:45:51, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4948, decode.acc_seg: 81.3297, aux.loss_ce: 0.2704, aux.acc_seg: 74.9381, loss: 0.7652, grad_norm: 6.8422 2023-02-11 19:54:37,080 - mmseg - INFO - Iter [29200/160000] lr: 4.905e-05, eta: 7:45:36, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4863, decode.acc_seg: 82.2853, aux.loss_ce: 0.2662, aux.acc_seg: 76.0302, loss: 0.7525, grad_norm: 6.5621 2023-02-11 19:54:46,845 - mmseg - INFO - Iter [29250/160000] lr: 4.903e-05, eta: 7:45:16, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4821, decode.acc_seg: 81.9383, aux.loss_ce: 0.2626, aux.acc_seg: 76.1734, loss: 0.7448, grad_norm: 7.5926 2023-02-11 19:54:57,279 - mmseg - INFO - Iter [29300/160000] lr: 4.901e-05, eta: 7:45:03, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5347, decode.acc_seg: 80.0500, aux.loss_ce: 0.2902, aux.acc_seg: 73.9043, loss: 0.8249, grad_norm: 7.3572 2023-02-11 19:55:07,199 - mmseg - INFO - Iter [29350/160000] lr: 4.899e-05, eta: 7:44:45, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4992, decode.acc_seg: 81.5765, aux.loss_ce: 0.2755, aux.acc_seg: 75.1432, loss: 0.7747, grad_norm: 6.4290 2023-02-11 19:55:17,010 - mmseg - INFO - Iter [29400/160000] lr: 4.898e-05, eta: 7:44:26, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4723, decode.acc_seg: 82.2003, aux.loss_ce: 0.2597, aux.acc_seg: 75.5099, loss: 0.7320, grad_norm: 6.1867 2023-02-11 19:55:26,837 - mmseg - INFO - Iter [29450/160000] lr: 4.896e-05, eta: 7:44:07, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4978, decode.acc_seg: 81.6385, aux.loss_ce: 0.2719, aux.acc_seg: 75.8262, loss: 0.7697, grad_norm: 6.4045 2023-02-11 19:55:36,970 - mmseg - INFO - Iter [29500/160000] lr: 4.894e-05, eta: 7:43:51, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4923, decode.acc_seg: 81.7773, aux.loss_ce: 0.2690, aux.acc_seg: 75.8956, loss: 0.7612, grad_norm: 7.0270 2023-02-11 19:55:47,748 - mmseg - INFO - Iter [29550/160000] lr: 4.892e-05, eta: 7:43:42, time: 0.215, data_time: 0.005, memory: 7748, decode.loss_ce: 0.4849, decode.acc_seg: 81.7635, aux.loss_ce: 0.2690, aux.acc_seg: 75.2708, loss: 0.7539, grad_norm: 6.4684 2023-02-11 19:55:57,714 - mmseg - INFO - Iter [29600/160000] lr: 4.890e-05, eta: 7:43:24, time: 0.199, data_time: 0.005, memory: 7748, decode.loss_ce: 0.4939, decode.acc_seg: 81.9832, aux.loss_ce: 0.2629, aux.acc_seg: 76.2725, loss: 0.7568, grad_norm: 5.6451 2023-02-11 19:56:08,162 - mmseg - INFO - Iter [29650/160000] lr: 4.888e-05, eta: 7:43:12, time: 0.209, data_time: 0.005, memory: 7748, decode.loss_ce: 0.5054, decode.acc_seg: 81.7354, aux.loss_ce: 0.2723, aux.acc_seg: 75.4098, loss: 0.7777, grad_norm: 5.9354 2023-02-11 19:56:18,100 - mmseg - INFO - Iter [29700/160000] lr: 4.886e-05, eta: 7:42:54, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5114, decode.acc_seg: 81.5653, aux.loss_ce: 0.2826, aux.acc_seg: 74.6023, loss: 0.7939, grad_norm: 6.3525 2023-02-11 19:56:28,420 - mmseg - INFO - Iter [29750/160000] lr: 4.884e-05, eta: 7:42:40, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4975, decode.acc_seg: 81.2224, aux.loss_ce: 0.2729, aux.acc_seg: 74.6997, loss: 0.7703, grad_norm: 6.8626 2023-02-11 19:56:38,477 - mmseg - INFO - Iter [29800/160000] lr: 4.883e-05, eta: 7:42:24, time: 0.201, data_time: 0.005, memory: 7748, decode.loss_ce: 0.4934, decode.acc_seg: 81.6206, aux.loss_ce: 0.2652, aux.acc_seg: 75.7117, loss: 0.7585, grad_norm: 6.2400 2023-02-11 19:56:48,516 - mmseg - INFO - Iter [29850/160000] lr: 4.881e-05, eta: 7:42:07, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5071, decode.acc_seg: 81.5703, aux.loss_ce: 0.2758, aux.acc_seg: 74.9463, loss: 0.7829, grad_norm: 7.0674 2023-02-11 19:57:00,910 - mmseg - INFO - Iter [29900/160000] lr: 4.879e-05, eta: 7:42:13, time: 0.248, data_time: 0.047, memory: 7748, decode.loss_ce: 0.4947, decode.acc_seg: 82.0375, aux.loss_ce: 0.2697, aux.acc_seg: 75.6521, loss: 0.7644, grad_norm: 6.7390 2023-02-11 19:57:11,152 - mmseg - INFO - Iter [29950/160000] lr: 4.877e-05, eta: 7:41:58, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4449, decode.acc_seg: 83.2593, aux.loss_ce: 0.2517, aux.acc_seg: 76.8234, loss: 0.6966, grad_norm: 5.9263 2023-02-11 19:57:21,689 - mmseg - INFO - Saving checkpoint at 30000 iterations 2023-02-11 19:57:22,372 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 19:57:22,372 - mmseg - INFO - Iter [30000/160000] lr: 4.875e-05, eta: 7:41:53, time: 0.225, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4891, decode.acc_seg: 82.4842, aux.loss_ce: 0.2670, aux.acc_seg: 76.1500, loss: 0.7561, grad_norm: 6.0887 2023-02-11 19:57:32,122 - mmseg - INFO - Iter [30050/160000] lr: 4.873e-05, eta: 7:41:34, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4588, decode.acc_seg: 82.9994, aux.loss_ce: 0.2582, aux.acc_seg: 76.1869, loss: 0.7170, grad_norm: 6.4221 2023-02-11 19:57:41,927 - mmseg - INFO - Iter [30100/160000] lr: 4.871e-05, eta: 7:41:16, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4799, decode.acc_seg: 82.1472, aux.loss_ce: 0.2678, aux.acc_seg: 75.4004, loss: 0.7478, grad_norm: 6.8992 2023-02-11 19:57:52,199 - mmseg - INFO - Iter [30150/160000] lr: 4.869e-05, eta: 7:41:01, time: 0.205, data_time: 0.005, memory: 7748, decode.loss_ce: 0.5130, decode.acc_seg: 81.9836, aux.loss_ce: 0.2778, aux.acc_seg: 75.8999, loss: 0.7908, grad_norm: 7.3565 2023-02-11 19:58:02,193 - mmseg - INFO - Iter [30200/160000] lr: 4.868e-05, eta: 7:40:45, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4566, decode.acc_seg: 82.8807, aux.loss_ce: 0.2611, aux.acc_seg: 76.1867, loss: 0.7177, grad_norm: 6.2634 2023-02-11 19:58:12,117 - mmseg - INFO - Iter [30250/160000] lr: 4.866e-05, eta: 7:40:27, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4752, decode.acc_seg: 82.2551, aux.loss_ce: 0.2618, aux.acc_seg: 75.7798, loss: 0.7370, grad_norm: 6.3610 2023-02-11 19:58:21,950 - mmseg - INFO - Iter [30300/160000] lr: 4.864e-05, eta: 7:40:09, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4803, decode.acc_seg: 81.8421, aux.loss_ce: 0.2674, aux.acc_seg: 75.3298, loss: 0.7477, grad_norm: 6.3866 2023-02-11 19:58:31,928 - mmseg - INFO - Iter [30350/160000] lr: 4.862e-05, eta: 7:39:53, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5122, decode.acc_seg: 81.7397, aux.loss_ce: 0.2731, aux.acc_seg: 75.5035, loss: 0.7853, grad_norm: 6.5726 2023-02-11 19:58:41,686 - mmseg - INFO - Iter [30400/160000] lr: 4.860e-05, eta: 7:39:34, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4693, decode.acc_seg: 82.6880, aux.loss_ce: 0.2563, aux.acc_seg: 76.1697, loss: 0.7256, grad_norm: 5.6311 2023-02-11 19:58:51,686 - mmseg - INFO - Iter [30450/160000] lr: 4.858e-05, eta: 7:39:18, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4786, decode.acc_seg: 82.1254, aux.loss_ce: 0.2633, aux.acc_seg: 75.5944, loss: 0.7419, grad_norm: 6.5994 2023-02-11 19:59:01,853 - mmseg - INFO - Iter [30500/160000] lr: 4.856e-05, eta: 7:39:03, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5069, decode.acc_seg: 81.6152, aux.loss_ce: 0.2781, aux.acc_seg: 75.0036, loss: 0.7850, grad_norm: 6.8661 2023-02-11 19:59:11,661 - mmseg - INFO - Iter [30550/160000] lr: 4.854e-05, eta: 7:38:45, time: 0.197, data_time: 0.005, memory: 7748, decode.loss_ce: 0.5089, decode.acc_seg: 81.3783, aux.loss_ce: 0.2733, aux.acc_seg: 75.4899, loss: 0.7822, grad_norm: 6.6100 2023-02-11 19:59:22,258 - mmseg - INFO - Iter [30600/160000] lr: 4.853e-05, eta: 7:38:34, time: 0.212, data_time: 0.005, memory: 7748, decode.loss_ce: 0.4689, decode.acc_seg: 82.3071, aux.loss_ce: 0.2647, aux.acc_seg: 75.4052, loss: 0.7336, grad_norm: 6.7211 2023-02-11 19:59:31,994 - mmseg - INFO - Iter [30650/160000] lr: 4.851e-05, eta: 7:38:15, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4884, decode.acc_seg: 82.2404, aux.loss_ce: 0.2619, aux.acc_seg: 76.2277, loss: 0.7503, grad_norm: 6.2526 2023-02-11 19:59:41,962 - mmseg - INFO - Iter [30700/160000] lr: 4.849e-05, eta: 7:37:59, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4561, decode.acc_seg: 83.2596, aux.loss_ce: 0.2608, aux.acc_seg: 76.6024, loss: 0.7168, grad_norm: 6.8770 2023-02-11 19:59:52,067 - mmseg - INFO - Iter [30750/160000] lr: 4.847e-05, eta: 7:37:44, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4839, decode.acc_seg: 82.3054, aux.loss_ce: 0.2515, aux.acc_seg: 77.4210, loss: 0.7354, grad_norm: 6.0392 2023-02-11 20:00:02,606 - mmseg - INFO - Iter [30800/160000] lr: 4.845e-05, eta: 7:37:32, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4844, decode.acc_seg: 82.3469, aux.loss_ce: 0.2633, aux.acc_seg: 76.2804, loss: 0.7477, grad_norm: 7.5536 2023-02-11 20:00:12,511 - mmseg - INFO - Iter [30850/160000] lr: 4.843e-05, eta: 7:37:15, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4497, decode.acc_seg: 83.1266, aux.loss_ce: 0.2496, aux.acc_seg: 76.9293, loss: 0.6993, grad_norm: 8.9473 2023-02-11 20:00:22,369 - mmseg - INFO - Iter [30900/160000] lr: 4.841e-05, eta: 7:36:58, time: 0.198, data_time: 0.005, memory: 7748, decode.loss_ce: 0.5094, decode.acc_seg: 81.5681, aux.loss_ce: 0.2781, aux.acc_seg: 74.9441, loss: 0.7876, grad_norm: 6.8407 2023-02-11 20:00:32,850 - mmseg - INFO - Iter [30950/160000] lr: 4.839e-05, eta: 7:36:47, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4644, decode.acc_seg: 83.2539, aux.loss_ce: 0.2545, aux.acc_seg: 77.0042, loss: 0.7189, grad_norm: 7.6114 2023-02-11 20:00:42,760 - mmseg - INFO - Saving checkpoint at 31000 iterations 2023-02-11 20:00:43,464 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 20:00:43,464 - mmseg - INFO - Iter [31000/160000] lr: 4.838e-05, eta: 7:36:36, time: 0.212, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4586, decode.acc_seg: 82.3544, aux.loss_ce: 0.2546, aux.acc_seg: 76.2958, loss: 0.7131, grad_norm: 7.1694 2023-02-11 20:00:53,586 - mmseg - INFO - Iter [31050/160000] lr: 4.836e-05, eta: 7:36:21, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4889, decode.acc_seg: 82.2764, aux.loss_ce: 0.2619, aux.acc_seg: 76.1329, loss: 0.7508, grad_norm: 6.5751 2023-02-11 20:01:03,648 - mmseg - INFO - Iter [31100/160000] lr: 4.834e-05, eta: 7:36:06, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5184, decode.acc_seg: 81.1343, aux.loss_ce: 0.2835, aux.acc_seg: 74.5051, loss: 0.8020, grad_norm: 6.9684 2023-02-11 20:01:13,266 - mmseg - INFO - Iter [31150/160000] lr: 4.832e-05, eta: 7:35:47, time: 0.192, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5083, decode.acc_seg: 81.6506, aux.loss_ce: 0.2782, aux.acc_seg: 75.2824, loss: 0.7865, grad_norm: 6.6039 2023-02-11 20:01:25,228 - mmseg - INFO - Iter [31200/160000] lr: 4.830e-05, eta: 7:35:47, time: 0.239, data_time: 0.047, memory: 7748, decode.loss_ce: 0.4818, decode.acc_seg: 81.3913, aux.loss_ce: 0.2668, aux.acc_seg: 74.7178, loss: 0.7486, grad_norm: 6.4302 2023-02-11 20:01:35,155 - mmseg - INFO - Iter [31250/160000] lr: 4.828e-05, eta: 7:35:31, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4637, decode.acc_seg: 82.5348, aux.loss_ce: 0.2584, aux.acc_seg: 76.3734, loss: 0.7221, grad_norm: 6.4647 2023-02-11 20:01:45,067 - mmseg - INFO - Iter [31300/160000] lr: 4.826e-05, eta: 7:35:14, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4434, decode.acc_seg: 83.5650, aux.loss_ce: 0.2489, aux.acc_seg: 76.8496, loss: 0.6923, grad_norm: 5.8286 2023-02-11 20:01:54,814 - mmseg - INFO - Iter [31350/160000] lr: 4.824e-05, eta: 7:34:57, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4645, decode.acc_seg: 82.9307, aux.loss_ce: 0.2585, aux.acc_seg: 76.5365, loss: 0.7230, grad_norm: 6.0722 2023-02-11 20:02:04,780 - mmseg - INFO - Iter [31400/160000] lr: 4.823e-05, eta: 7:34:41, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4577, decode.acc_seg: 83.0542, aux.loss_ce: 0.2567, aux.acc_seg: 76.2930, loss: 0.7144, grad_norm: 5.9589 2023-02-11 20:02:14,669 - mmseg - INFO - Iter [31450/160000] lr: 4.821e-05, eta: 7:34:24, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4726, decode.acc_seg: 82.8359, aux.loss_ce: 0.2674, aux.acc_seg: 75.9213, loss: 0.7400, grad_norm: 5.5877 2023-02-11 20:02:24,474 - mmseg - INFO - Iter [31500/160000] lr: 4.819e-05, eta: 7:34:07, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5056, decode.acc_seg: 81.4996, aux.loss_ce: 0.2721, aux.acc_seg: 75.1141, loss: 0.7778, grad_norm: 6.7292 2023-02-11 20:02:34,634 - mmseg - INFO - Iter [31550/160000] lr: 4.817e-05, eta: 7:33:53, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4722, decode.acc_seg: 81.8953, aux.loss_ce: 0.2573, aux.acc_seg: 76.3842, loss: 0.7295, grad_norm: 8.4058 2023-02-11 20:02:44,683 - mmseg - INFO - Iter [31600/160000] lr: 4.815e-05, eta: 7:33:37, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4535, decode.acc_seg: 83.0114, aux.loss_ce: 0.2520, aux.acc_seg: 76.7415, loss: 0.7055, grad_norm: 6.7167 2023-02-11 20:02:55,156 - mmseg - INFO - Iter [31650/160000] lr: 4.813e-05, eta: 7:33:26, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4875, decode.acc_seg: 81.6191, aux.loss_ce: 0.2711, aux.acc_seg: 75.3477, loss: 0.7586, grad_norm: 6.2645 2023-02-11 20:03:04,979 - mmseg - INFO - Iter [31700/160000] lr: 4.811e-05, eta: 7:33:09, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4808, decode.acc_seg: 82.2902, aux.loss_ce: 0.2729, aux.acc_seg: 74.9200, loss: 0.7537, grad_norm: 7.7223 2023-02-11 20:03:14,980 - mmseg - INFO - Iter [31750/160000] lr: 4.809e-05, eta: 7:32:53, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4711, decode.acc_seg: 82.4315, aux.loss_ce: 0.2613, aux.acc_seg: 75.7037, loss: 0.7324, grad_norm: 6.2038 2023-02-11 20:03:24,949 - mmseg - INFO - Iter [31800/160000] lr: 4.808e-05, eta: 7:32:38, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4502, decode.acc_seg: 83.0964, aux.loss_ce: 0.2610, aux.acc_seg: 75.9693, loss: 0.7112, grad_norm: 6.4143 2023-02-11 20:03:34,900 - mmseg - INFO - Iter [31850/160000] lr: 4.806e-05, eta: 7:32:22, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4672, decode.acc_seg: 82.3214, aux.loss_ce: 0.2573, aux.acc_seg: 75.7778, loss: 0.7246, grad_norm: 6.4303 2023-02-11 20:03:44,946 - mmseg - INFO - Iter [31900/160000] lr: 4.804e-05, eta: 7:32:07, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4822, decode.acc_seg: 81.3902, aux.loss_ce: 0.2713, aux.acc_seg: 74.2670, loss: 0.7535, grad_norm: 6.7063 2023-02-11 20:03:54,887 - mmseg - INFO - Iter [31950/160000] lr: 4.802e-05, eta: 7:31:51, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4823, decode.acc_seg: 82.2203, aux.loss_ce: 0.2700, aux.acc_seg: 75.1675, loss: 0.7522, grad_norm: 7.1743 2023-02-11 20:04:04,817 - mmseg - INFO - Saving checkpoint at 32000 iterations 2023-02-11 20:04:05,519 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 20:04:05,519 - mmseg - INFO - Iter [32000/160000] lr: 4.800e-05, eta: 7:31:41, time: 0.213, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4958, decode.acc_seg: 81.5335, aux.loss_ce: 0.2715, aux.acc_seg: 75.2467, loss: 0.7673, grad_norm: 7.1795 2023-02-11 20:04:17,117 - mmseg - INFO - per class results: 2023-02-11 20:04:17,123 - mmseg - INFO - +---------------------+-------+-------+ | Class | IoU | Acc | +---------------------+-------+-------+ | wall | 71.04 | 80.3 | | building | 78.97 | 93.21 | | sky | 92.74 | 94.3 | | floor | 76.31 | 89.51 | | tree | 68.76 | 91.28 | | ceiling | 77.68 | 93.54 | | road | 76.25 | 92.0 | | bed | 79.9 | 95.46 | | windowpane | 55.39 | 77.36 | | grass | 55.75 | 59.38 | | cabinet | 49.1 | 69.97 | | sidewalk | 56.32 | 67.72 | | person | 75.07 | 88.81 | | earth | 25.48 | 38.77 | | door | 37.03 | 62.95 | | table | 46.64 | 69.09 | | mountain | 49.8 | 77.23 | | plant | 45.73 | 65.74 | | curtain | 65.99 | 80.42 | | chair | 46.23 | 74.04 | | car | 79.54 | 88.31 | | water | 46.91 | 85.84 | | painting | 64.2 | 74.14 | | sofa | 48.15 | 54.93 | | shelf | 35.99 | 46.22 | | house | 8.92 | 9.46 | | sea | 38.77 | 47.33 | | mirror | 47.17 | 54.67 | | rug | 49.58 | 53.71 | | field | 25.92 | 47.09 | | armchair | 31.84 | 61.03 | | seat | 50.89 | 66.56 | | fence | 20.52 | 23.19 | | desk | 23.75 | 28.28 | | rock | 21.03 | 24.63 | | wardrobe | 42.95 | 74.56 | | lamp | 52.99 | 65.19 | | bathtub | 60.82 | 64.32 | | railing | 20.88 | 22.73 | | cushion | 37.97 | 41.5 | | base | 22.61 | 62.85 | | box | 8.66 | 9.14 | | column | 39.22 | 42.5 | | signboard | 20.29 | 21.71 | | chest of drawers | 32.43 | 60.04 | | counter | 13.61 | 14.2 | | sand | 38.01 | 41.77 | | sink | 58.23 | 69.88 | | skyscraper | 43.61 | 47.69 | | fireplace | 45.61 | 47.64 | | refrigerator | 62.01 | 77.44 | | grandstand | 37.94 | 70.02 | | path | 9.59 | 12.25 | | stairs | 26.19 | 28.64 | | runway | 62.08 | 79.42 | | case | 48.29 | 62.59 | | pool table | 89.28 | 91.58 | | pillow | 41.52 | 53.77 | | screen door | 56.94 | 73.27 | | stairway | 32.03 | 40.92 | | river | 16.88 | 25.02 | | bridge | 42.5 | 73.71 | | bookcase | 32.68 | 51.74 | | blind | 28.3 | 37.38 | | coffee table | 52.56 | 64.96 | | toilet | 71.67 | 91.29 | | flower | 31.56 | 36.9 | | book | 40.09 | 52.53 | | hill | 3.06 | 5.22 | | bench | 36.79 | 40.56 | | countertop | 39.59 | 50.91 | | stove | 56.08 | 59.5 | | palm | 25.11 | 26.75 | | kitchen island | 21.42 | 48.39 | | computer | 57.71 | 65.71 | | swivel chair | 14.03 | 14.56 | | boat | 47.89 | 57.38 | | bar | 21.49 | 24.35 | | arcade machine | 36.76 | 37.81 | | hovel | 48.36 | 52.17 | | bus | 80.72 | 82.27 | | towel | 51.62 | 67.45 | | light | 37.5 | 46.58 | | truck | 20.51 | 25.89 | | tower | 1.49 | 1.49 | | chandelier | 59.58 | 77.5 | | awning | 23.06 | 26.01 | | streetlight | 15.82 | 25.92 | | booth | 27.66 | 27.75 | | television receiver | 54.3 | 58.3 | | airplane | 52.89 | 55.62 | | dirt track | 3.37 | 53.54 | | apparel | 19.46 | 26.45 | | pole | 11.96 | 18.52 | | land | 10.62 | 14.7 | | bannister | 8.16 | 12.63 | | escalator | 27.73 | 44.42 | | ottoman | 22.57 | 25.94 | | bottle | 22.28 | 23.77 | | buffet | 35.22 | 55.75 | | poster | 19.6 | 23.05 | | stage | 5.36 | 7.12 | | van | 29.59 | 32.2 | | ship | 20.69 | 22.96 | | fountain | 0.08 | 0.08 | | conveyer belt | 49.64 | 62.95 | | canopy | 1.67 | 1.76 | | washer | 65.61 | 68.28 | | plaything | 22.43 | 33.33 | | swimming pool | 36.87 | 47.41 | | stool | 13.69 | 32.46 | | barrel | 36.05 | 55.52 | | basket | 18.75 | 25.31 | | waterfall | 32.71 | 34.91 | | tent | 89.08 | 98.17 | | bag | 8.96 | 10.23 | | minibike | 50.22 | 63.98 | | cradle | 67.74 | 84.41 | | oven | 21.48 | 50.07 | | ball | 40.45 | 60.61 | | food | 38.44 | 44.97 | | step | 2.03 | 2.1 | | tank | 15.3 | 15.31 | | trade name | 0.06 | 0.06 | | microwave | 32.61 | 33.95 | | pot | 18.06 | 19.11 | | animal | 52.06 | 55.12 | | bicycle | 42.65 | 58.45 | | lake | 0.05 | 0.06 | | dishwasher | 53.15 | 57.17 | | screen | 37.89 | 38.87 | | blanket | 0.0 | 0.0 | | sculpture | 13.43 | 13.86 | | hood | 38.06 | 46.78 | | sconce | 21.39 | 25.15 | | vase | 18.84 | 22.67 | | traffic light | 15.35 | 21.06 | | tray | 0.0 | 0.0 | | ashcan | 26.11 | 34.1 | | fan | 47.03 | 58.8 | | pier | 36.24 | 69.91 | | crt screen | 5.99 | 25.0 | | plate | 37.39 | 56.02 | | monitor | 2.55 | 2.9 | | bulletin board | 39.13 | 58.54 | | shower | 0.0 | 0.0 | | radiator | 40.27 | 44.03 | | glass | 1.89 | 1.91 | | clock | 8.35 | 9.17 | | flag | 25.62 | 26.34 | +---------------------+-------+-------+ 2023-02-11 20:04:17,124 - mmseg - INFO - Summary: 2023-02-11 20:04:17,124 - mmseg - INFO - +-------+-------+------+ | aAcc | mIoU | mAcc | +-------+-------+------+ | 77.46 | 36.26 | 46.3 | +-------+-------+------+ 2023-02-11 20:04:17,770 - mmseg - INFO - Now best checkpoint is saved as best_mIoU_iter_32000.pth. 2023-02-11 20:04:17,770 - mmseg - INFO - Best mIoU is 0.3626 at 32000 iter. 2023-02-11 20:04:17,770 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 20:04:17,771 - mmseg - INFO - Iter(val) [250] aAcc: 0.7746, mIoU: 0.3626, mAcc: 0.4630, IoU.wall: 0.7104, IoU.building: 0.7897, IoU.sky: 0.9274, IoU.floor: 0.7631, IoU.tree: 0.6876, IoU.ceiling: 0.7768, IoU.road: 0.7625, IoU.bed : 0.7990, IoU.windowpane: 0.5539, IoU.grass: 0.5575, IoU.cabinet: 0.4910, IoU.sidewalk: 0.5632, IoU.person: 0.7507, IoU.earth: 0.2548, IoU.door: 0.3703, IoU.table: 0.4664, IoU.mountain: 0.4980, IoU.plant: 0.4573, IoU.curtain: 0.6599, IoU.chair: 0.4623, IoU.car: 0.7954, IoU.water: 0.4691, IoU.painting: 0.6420, IoU.sofa: 0.4815, IoU.shelf: 0.3599, IoU.house: 0.0892, IoU.sea: 0.3877, IoU.mirror: 0.4717, IoU.rug: 0.4958, IoU.field: 0.2592, IoU.armchair: 0.3184, IoU.seat: 0.5089, IoU.fence: 0.2052, IoU.desk: 0.2375, IoU.rock: 0.2103, IoU.wardrobe: 0.4295, IoU.lamp: 0.5299, IoU.bathtub: 0.6082, IoU.railing: 0.2088, IoU.cushion: 0.3797, IoU.base: 0.2261, IoU.box: 0.0866, IoU.column: 0.3922, IoU.signboard: 0.2029, IoU.chest of drawers: 0.3243, IoU.counter: 0.1361, IoU.sand: 0.3801, IoU.sink: 0.5823, IoU.skyscraper: 0.4361, IoU.fireplace: 0.4561, IoU.refrigerator: 0.6201, IoU.grandstand: 0.3794, IoU.path: 0.0959, IoU.stairs: 0.2619, IoU.runway: 0.6208, IoU.case: 0.4829, IoU.pool table: 0.8928, IoU.pillow: 0.4152, IoU.screen door: 0.5694, IoU.stairway: 0.3203, IoU.river: 0.1688, IoU.bridge: 0.4250, IoU.bookcase: 0.3268, IoU.blind: 0.2830, IoU.coffee table: 0.5256, IoU.toilet: 0.7167, IoU.flower: 0.3156, IoU.book: 0.4009, IoU.hill: 0.0306, IoU.bench: 0.3679, IoU.countertop: 0.3959, IoU.stove: 0.5608, IoU.palm: 0.2511, IoU.kitchen island: 0.2142, IoU.computer: 0.5771, IoU.swivel chair: 0.1403, IoU.boat: 0.4789, IoU.bar: 0.2149, IoU.arcade machine: 0.3676, IoU.hovel: 0.4836, IoU.bus: 0.8072, IoU.towel: 0.5162, IoU.light: 0.3750, IoU.truck: 0.2051, IoU.tower: 0.0149, IoU.chandelier: 0.5958, IoU.awning: 0.2306, IoU.streetlight: 0.1582, IoU.booth: 0.2766, IoU.television receiver: 0.5430, IoU.airplane: 0.5289, IoU.dirt track: 0.0337, IoU.apparel: 0.1946, IoU.pole: 0.1196, IoU.land: 0.1062, IoU.bannister: 0.0816, IoU.escalator: 0.2773, IoU.ottoman: 0.2257, IoU.bottle: 0.2228, IoU.buffet: 0.3522, IoU.poster: 0.1960, IoU.stage: 0.0536, IoU.van: 0.2959, IoU.ship: 0.2069, IoU.fountain: 0.0008, IoU.conveyer belt: 0.4964, IoU.canopy: 0.0167, IoU.washer: 0.6561, IoU.plaything: 0.2243, IoU.swimming pool: 0.3687, IoU.stool: 0.1369, IoU.barrel: 0.3605, IoU.basket: 0.1875, IoU.waterfall: 0.3271, IoU.tent: 0.8908, IoU.bag: 0.0896, IoU.minibike: 0.5022, IoU.cradle: 0.6774, IoU.oven: 0.2148, IoU.ball: 0.4045, IoU.food: 0.3844, IoU.step: 0.0203, IoU.tank: 0.1530, IoU.trade name: 0.0006, IoU.microwave: 0.3261, IoU.pot: 0.1806, IoU.animal: 0.5206, IoU.bicycle: 0.4265, IoU.lake: 0.0005, IoU.dishwasher: 0.5315, IoU.screen: 0.3789, IoU.blanket: 0.0000, IoU.sculpture: 0.1343, IoU.hood: 0.3806, IoU.sconce: 0.2139, IoU.vase: 0.1884, IoU.traffic light: 0.1535, IoU.tray: 0.0000, IoU.ashcan: 0.2611, IoU.fan: 0.4703, IoU.pier: 0.3624, IoU.crt screen: 0.0599, IoU.plate: 0.3739, IoU.monitor: 0.0255, IoU.bulletin board: 0.3913, IoU.shower: 0.0000, IoU.radiator: 0.4027, IoU.glass: 0.0189, IoU.clock: 0.0835, IoU.flag: 0.2562, Acc.wall: 0.8030, Acc.building: 0.9321, Acc.sky: 0.9430, Acc.floor: 0.8951, Acc.tree: 0.9128, Acc.ceiling: 0.9354, Acc.road: 0.9200, Acc.bed : 0.9546, Acc.windowpane: 0.7736, Acc.grass: 0.5938, Acc.cabinet: 0.6997, Acc.sidewalk: 0.6772, Acc.person: 0.8881, Acc.earth: 0.3877, Acc.door: 0.6295, Acc.table: 0.6909, Acc.mountain: 0.7723, Acc.plant: 0.6574, Acc.curtain: 0.8042, Acc.chair: 0.7404, Acc.car: 0.8831, Acc.water: 0.8584, Acc.painting: 0.7414, Acc.sofa: 0.5493, Acc.shelf: 0.4622, Acc.house: 0.0946, Acc.sea: 0.4733, Acc.mirror: 0.5467, Acc.rug: 0.5371, Acc.field: 0.4709, Acc.armchair: 0.6103, Acc.seat: 0.6656, Acc.fence: 0.2319, Acc.desk: 0.2828, Acc.rock: 0.2463, Acc.wardrobe: 0.7456, Acc.lamp: 0.6519, Acc.bathtub: 0.6432, Acc.railing: 0.2273, Acc.cushion: 0.4150, Acc.base: 0.6285, Acc.box: 0.0914, Acc.column: 0.4250, Acc.signboard: 0.2171, Acc.chest of drawers: 0.6004, Acc.counter: 0.1420, Acc.sand: 0.4177, Acc.sink: 0.6988, Acc.skyscraper: 0.4769, Acc.fireplace: 0.4764, Acc.refrigerator: 0.7744, Acc.grandstand: 0.7002, Acc.path: 0.1225, Acc.stairs: 0.2864, Acc.runway: 0.7942, Acc.case: 0.6259, Acc.pool table: 0.9158, Acc.pillow: 0.5377, Acc.screen door: 0.7327, Acc.stairway: 0.4092, Acc.river: 0.2502, Acc.bridge: 0.7371, Acc.bookcase: 0.5174, Acc.blind: 0.3738, Acc.coffee table: 0.6496, Acc.toilet: 0.9129, Acc.flower: 0.3690, Acc.book: 0.5253, Acc.hill: 0.0522, Acc.bench: 0.4056, Acc.countertop: 0.5091, Acc.stove: 0.5950, Acc.palm: 0.2675, Acc.kitchen island: 0.4839, Acc.computer: 0.6571, Acc.swivel chair: 0.1456, Acc.boat: 0.5738, Acc.bar: 0.2435, Acc.arcade machine: 0.3781, Acc.hovel: 0.5217, Acc.bus: 0.8227, Acc.towel: 0.6745, Acc.light: 0.4658, Acc.truck: 0.2589, Acc.tower: 0.0149, Acc.chandelier: 0.7750, Acc.awning: 0.2601, Acc.streetlight: 0.2592, Acc.booth: 0.2775, Acc.television receiver: 0.5830, Acc.airplane: 0.5562, Acc.dirt track: 0.5354, Acc.apparel: 0.2645, Acc.pole: 0.1852, Acc.land: 0.1470, Acc.bannister: 0.1263, Acc.escalator: 0.4442, Acc.ottoman: 0.2594, Acc.bottle: 0.2377, Acc.buffet: 0.5575, Acc.poster: 0.2305, Acc.stage: 0.0712, Acc.van: 0.3220, Acc.ship: 0.2296, Acc.fountain: 0.0008, Acc.conveyer belt: 0.6295, Acc.canopy: 0.0176, Acc.washer: 0.6828, Acc.plaything: 0.3333, Acc.swimming pool: 0.4741, Acc.stool: 0.3246, Acc.barrel: 0.5552, Acc.basket: 0.2531, Acc.waterfall: 0.3491, Acc.tent: 0.9817, Acc.bag: 0.1023, Acc.minibike: 0.6398, Acc.cradle: 0.8441, Acc.oven: 0.5007, Acc.ball: 0.6061, Acc.food: 0.4497, Acc.step: 0.0210, Acc.tank: 0.1531, Acc.trade name: 0.0006, Acc.microwave: 0.3395, Acc.pot: 0.1911, Acc.animal: 0.5512, Acc.bicycle: 0.5845, Acc.lake: 0.0006, Acc.dishwasher: 0.5717, Acc.screen: 0.3887, Acc.blanket: 0.0000, Acc.sculpture: 0.1386, Acc.hood: 0.4678, Acc.sconce: 0.2515, Acc.vase: 0.2267, Acc.traffic light: 0.2106, Acc.tray: 0.0000, Acc.ashcan: 0.3410, Acc.fan: 0.5880, Acc.pier: 0.6991, Acc.crt screen: 0.2500, Acc.plate: 0.5602, Acc.monitor: 0.0290, Acc.bulletin board: 0.5854, Acc.shower: 0.0000, Acc.radiator: 0.4403, Acc.glass: 0.0191, Acc.clock: 0.0917, Acc.flag: 0.2634 2023-02-11 20:04:27,973 - mmseg - INFO - Iter [32050/160000] lr: 4.798e-05, eta: 7:33:05, time: 0.449, data_time: 0.249, memory: 7748, decode.loss_ce: 0.4451, decode.acc_seg: 83.5544, aux.loss_ce: 0.2555, aux.acc_seg: 76.7125, loss: 0.7007, grad_norm: 6.1477 2023-02-11 20:04:38,142 - mmseg - INFO - Iter [32100/160000] lr: 4.796e-05, eta: 7:32:51, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4844, decode.acc_seg: 82.3382, aux.loss_ce: 0.2685, aux.acc_seg: 75.5264, loss: 0.7529, grad_norm: 6.5781 2023-02-11 20:04:48,290 - mmseg - INFO - Iter [32150/160000] lr: 4.794e-05, eta: 7:32:36, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.5010, decode.acc_seg: 82.4977, aux.loss_ce: 0.2718, aux.acc_seg: 75.7058, loss: 0.7727, grad_norm: 6.8632 2023-02-11 20:04:58,225 - mmseg - INFO - Iter [32200/160000] lr: 4.793e-05, eta: 7:32:20, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4730, decode.acc_seg: 82.0811, aux.loss_ce: 0.2670, aux.acc_seg: 75.2928, loss: 0.7401, grad_norm: 6.7825 2023-02-11 20:05:07,976 - mmseg - INFO - Iter [32250/160000] lr: 4.791e-05, eta: 7:32:03, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4420, decode.acc_seg: 82.6246, aux.loss_ce: 0.2548, aux.acc_seg: 76.0298, loss: 0.6968, grad_norm: 6.2808 2023-02-11 20:05:17,605 - mmseg - INFO - Iter [32300/160000] lr: 4.789e-05, eta: 7:31:45, time: 0.193, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4487, decode.acc_seg: 83.4799, aux.loss_ce: 0.2457, aux.acc_seg: 77.0722, loss: 0.6944, grad_norm: 5.9834 2023-02-11 20:05:27,519 - mmseg - INFO - Iter [32350/160000] lr: 4.787e-05, eta: 7:31:28, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4654, decode.acc_seg: 83.2604, aux.loss_ce: 0.2609, aux.acc_seg: 76.6141, loss: 0.7263, grad_norm: 5.7966 2023-02-11 20:05:37,668 - mmseg - INFO - Iter [32400/160000] lr: 4.785e-05, eta: 7:31:14, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4916, decode.acc_seg: 82.2565, aux.loss_ce: 0.2680, aux.acc_seg: 75.7047, loss: 0.7596, grad_norm: 6.3226 2023-02-11 20:05:49,632 - mmseg - INFO - Iter [32450/160000] lr: 4.783e-05, eta: 7:31:14, time: 0.240, data_time: 0.046, memory: 7748, decode.loss_ce: 0.4850, decode.acc_seg: 82.3038, aux.loss_ce: 0.2651, aux.acc_seg: 76.1577, loss: 0.7502, grad_norm: 6.9170 2023-02-11 20:05:59,665 - mmseg - INFO - Iter [32500/160000] lr: 4.781e-05, eta: 7:30:59, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4452, decode.acc_seg: 83.4621, aux.loss_ce: 0.2594, aux.acc_seg: 76.1255, loss: 0.7046, grad_norm: 5.6248 2023-02-11 20:06:09,448 - mmseg - INFO - Iter [32550/160000] lr: 4.779e-05, eta: 7:30:42, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4723, decode.acc_seg: 82.3926, aux.loss_ce: 0.2634, aux.acc_seg: 75.7334, loss: 0.7357, grad_norm: 6.7864 2023-02-11 20:06:19,676 - mmseg - INFO - Iter [32600/160000] lr: 4.778e-05, eta: 7:30:28, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4295, decode.acc_seg: 84.2942, aux.loss_ce: 0.2415, aux.acc_seg: 78.0111, loss: 0.6710, grad_norm: 5.4015 2023-02-11 20:06:29,909 - mmseg - INFO - Iter [32650/160000] lr: 4.776e-05, eta: 7:30:15, time: 0.205, data_time: 0.005, memory: 7748, decode.loss_ce: 0.4334, decode.acc_seg: 83.5364, aux.loss_ce: 0.2448, aux.acc_seg: 77.1530, loss: 0.6782, grad_norm: 5.9621 2023-02-11 20:06:39,855 - mmseg - INFO - Iter [32700/160000] lr: 4.774e-05, eta: 7:29:59, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4677, decode.acc_seg: 82.6254, aux.loss_ce: 0.2634, aux.acc_seg: 75.5788, loss: 0.7312, grad_norm: 7.5223 2023-02-11 20:06:50,172 - mmseg - INFO - Iter [32750/160000] lr: 4.772e-05, eta: 7:29:47, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4692, decode.acc_seg: 82.7706, aux.loss_ce: 0.2556, aux.acc_seg: 76.9786, loss: 0.7248, grad_norm: 6.2135 2023-02-11 20:07:00,311 - mmseg - INFO - Iter [32800/160000] lr: 4.770e-05, eta: 7:29:32, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4695, decode.acc_seg: 82.7422, aux.loss_ce: 0.2617, aux.acc_seg: 75.7781, loss: 0.7312, grad_norm: 6.7955 2023-02-11 20:07:10,162 - mmseg - INFO - Iter [32850/160000] lr: 4.768e-05, eta: 7:29:16, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4652, decode.acc_seg: 82.6446, aux.loss_ce: 0.2537, aux.acc_seg: 76.3471, loss: 0.7189, grad_norm: 6.7220 2023-02-11 20:07:20,361 - mmseg - INFO - Iter [32900/160000] lr: 4.766e-05, eta: 7:29:03, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4720, decode.acc_seg: 83.0369, aux.loss_ce: 0.2589, aux.acc_seg: 76.5380, loss: 0.7309, grad_norm: 5.8000 2023-02-11 20:07:30,729 - mmseg - INFO - Iter [32950/160000] lr: 4.764e-05, eta: 7:28:50, time: 0.207, data_time: 0.005, memory: 7748, decode.loss_ce: 0.4796, decode.acc_seg: 81.7400, aux.loss_ce: 0.2655, aux.acc_seg: 75.6285, loss: 0.7451, grad_norm: 6.0048 2023-02-11 20:07:40,896 - mmseg - INFO - Saving checkpoint at 33000 iterations 2023-02-11 20:07:41,596 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 20:07:41,596 - mmseg - INFO - Iter [33000/160000] lr: 4.763e-05, eta: 7:28:42, time: 0.217, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4816, decode.acc_seg: 81.9197, aux.loss_ce: 0.2592, aux.acc_seg: 76.2745, loss: 0.7409, grad_norm: 6.1461 2023-02-11 20:07:51,726 - mmseg - INFO - Iter [33050/160000] lr: 4.761e-05, eta: 7:28:28, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4656, decode.acc_seg: 82.7398, aux.loss_ce: 0.2629, aux.acc_seg: 75.7713, loss: 0.7285, grad_norm: 6.5261 2023-02-11 20:08:02,559 - mmseg - INFO - Iter [33100/160000] lr: 4.759e-05, eta: 7:28:19, time: 0.216, data_time: 0.005, memory: 7748, decode.loss_ce: 0.4624, decode.acc_seg: 82.8130, aux.loss_ce: 0.2603, aux.acc_seg: 76.7569, loss: 0.7227, grad_norm: 6.3960 2023-02-11 20:08:13,265 - mmseg - INFO - Iter [33150/160000] lr: 4.757e-05, eta: 7:28:09, time: 0.214, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4897, decode.acc_seg: 82.6650, aux.loss_ce: 0.2695, aux.acc_seg: 76.1583, loss: 0.7592, grad_norm: 6.1375 2023-02-11 20:08:23,180 - mmseg - INFO - Iter [33200/160000] lr: 4.755e-05, eta: 7:27:53, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4467, decode.acc_seg: 82.8588, aux.loss_ce: 0.2470, aux.acc_seg: 76.5432, loss: 0.6937, grad_norm: 5.7818 2023-02-11 20:08:33,233 - mmseg - INFO - Iter [33250/160000] lr: 4.753e-05, eta: 7:27:39, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4652, decode.acc_seg: 83.1441, aux.loss_ce: 0.2588, aux.acc_seg: 76.3786, loss: 0.7240, grad_norm: 6.9041 2023-02-11 20:08:43,310 - mmseg - INFO - Iter [33300/160000] lr: 4.751e-05, eta: 7:27:24, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4505, decode.acc_seg: 82.9189, aux.loss_ce: 0.2477, aux.acc_seg: 77.0945, loss: 0.6982, grad_norm: 6.0918 2023-02-11 20:08:53,369 - mmseg - INFO - Iter [33350/160000] lr: 4.749e-05, eta: 7:27:10, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4570, decode.acc_seg: 83.0217, aux.loss_ce: 0.2595, aux.acc_seg: 76.3870, loss: 0.7164, grad_norm: 6.6182 2023-02-11 20:09:03,523 - mmseg - INFO - Iter [33400/160000] lr: 4.748e-05, eta: 7:26:56, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4495, decode.acc_seg: 83.2039, aux.loss_ce: 0.2522, aux.acc_seg: 76.4786, loss: 0.7017, grad_norm: 6.3972 2023-02-11 20:09:13,602 - mmseg - INFO - Iter [33450/160000] lr: 4.746e-05, eta: 7:26:41, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4779, decode.acc_seg: 82.1153, aux.loss_ce: 0.2694, aux.acc_seg: 74.9688, loss: 0.7473, grad_norm: 6.9338 2023-02-11 20:09:23,583 - mmseg - INFO - Iter [33500/160000] lr: 4.744e-05, eta: 7:26:27, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4961, decode.acc_seg: 82.3220, aux.loss_ce: 0.2778, aux.acc_seg: 75.5608, loss: 0.7739, grad_norm: 7.0403 2023-02-11 20:09:34,148 - mmseg - INFO - Iter [33550/160000] lr: 4.742e-05, eta: 7:26:16, time: 0.211, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4551, decode.acc_seg: 82.8094, aux.loss_ce: 0.2609, aux.acc_seg: 75.8404, loss: 0.7160, grad_norm: 7.0001 2023-02-11 20:09:44,641 - mmseg - INFO - Iter [33600/160000] lr: 4.740e-05, eta: 7:26:05, time: 0.210, data_time: 0.005, memory: 7748, decode.loss_ce: 0.4773, decode.acc_seg: 82.1459, aux.loss_ce: 0.2678, aux.acc_seg: 75.2662, loss: 0.7452, grad_norm: 6.4720 2023-02-11 20:09:54,875 - mmseg - INFO - Iter [33650/160000] lr: 4.738e-05, eta: 7:25:51, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4849, decode.acc_seg: 82.1669, aux.loss_ce: 0.2727, aux.acc_seg: 75.3847, loss: 0.7576, grad_norm: 6.4232 2023-02-11 20:10:06,831 - mmseg - INFO - Iter [33700/160000] lr: 4.736e-05, eta: 7:25:51, time: 0.240, data_time: 0.047, memory: 7748, decode.loss_ce: 0.4528, decode.acc_seg: 83.2973, aux.loss_ce: 0.2541, aux.acc_seg: 76.7774, loss: 0.7070, grad_norm: 6.3387 2023-02-11 20:10:16,688 - mmseg - INFO - Iter [33750/160000] lr: 4.734e-05, eta: 7:25:35, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4625, decode.acc_seg: 82.8647, aux.loss_ce: 0.2633, aux.acc_seg: 75.6278, loss: 0.7258, grad_norm: 6.2631 2023-02-11 20:10:26,627 - mmseg - INFO - Iter [33800/160000] lr: 4.733e-05, eta: 7:25:20, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4586, decode.acc_seg: 82.7634, aux.loss_ce: 0.2663, aux.acc_seg: 75.8201, loss: 0.7248, grad_norm: 6.3109 2023-02-11 20:10:36,421 - mmseg - INFO - Iter [33850/160000] lr: 4.731e-05, eta: 7:25:03, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4307, decode.acc_seg: 83.8165, aux.loss_ce: 0.2481, aux.acc_seg: 76.9443, loss: 0.6788, grad_norm: 6.2928 2023-02-11 20:10:46,582 - mmseg - INFO - Iter [33900/160000] lr: 4.729e-05, eta: 7:24:50, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4434, decode.acc_seg: 83.4244, aux.loss_ce: 0.2504, aux.acc_seg: 76.8094, loss: 0.6938, grad_norm: 5.7279 2023-02-11 20:10:56,598 - mmseg - INFO - Iter [33950/160000] lr: 4.727e-05, eta: 7:24:35, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4532, decode.acc_seg: 83.5437, aux.loss_ce: 0.2572, aux.acc_seg: 76.8641, loss: 0.7104, grad_norm: 6.9623 2023-02-11 20:11:06,878 - mmseg - INFO - Saving checkpoint at 34000 iterations 2023-02-11 20:11:07,574 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 20:11:07,574 - mmseg - INFO - Iter [34000/160000] lr: 4.725e-05, eta: 7:24:28, time: 0.220, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4554, decode.acc_seg: 83.5060, aux.loss_ce: 0.2565, aux.acc_seg: 76.8165, loss: 0.7119, grad_norm: 6.5327 2023-02-11 20:11:17,742 - mmseg - INFO - Iter [34050/160000] lr: 4.723e-05, eta: 7:24:14, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4376, decode.acc_seg: 83.7264, aux.loss_ce: 0.2500, aux.acc_seg: 76.9893, loss: 0.6876, grad_norm: 5.9903 2023-02-11 20:11:27,527 - mmseg - INFO - Iter [34100/160000] lr: 4.721e-05, eta: 7:23:58, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4392, decode.acc_seg: 83.9497, aux.loss_ce: 0.2495, aux.acc_seg: 77.3145, loss: 0.6887, grad_norm: 6.3792 2023-02-11 20:11:37,545 - mmseg - INFO - Iter [34150/160000] lr: 4.719e-05, eta: 7:23:43, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4699, decode.acc_seg: 83.3648, aux.loss_ce: 0.2674, aux.acc_seg: 76.0319, loss: 0.7372, grad_norm: 6.2196 2023-02-11 20:11:47,553 - mmseg - INFO - Iter [34200/160000] lr: 4.718e-05, eta: 7:23:29, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4490, decode.acc_seg: 83.5177, aux.loss_ce: 0.2554, aux.acc_seg: 76.9350, loss: 0.7043, grad_norm: 6.1321 2023-02-11 20:11:58,147 - mmseg - INFO - Iter [34250/160000] lr: 4.716e-05, eta: 7:23:18, time: 0.212, data_time: 0.005, memory: 7748, decode.loss_ce: 0.4306, decode.acc_seg: 83.6869, aux.loss_ce: 0.2404, aux.acc_seg: 77.5432, loss: 0.6710, grad_norm: 5.7585 2023-02-11 20:12:08,067 - mmseg - INFO - Iter [34300/160000] lr: 4.714e-05, eta: 7:23:03, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4444, decode.acc_seg: 83.2206, aux.loss_ce: 0.2530, aux.acc_seg: 76.7158, loss: 0.6974, grad_norm: 6.0611 2023-02-11 20:12:17,772 - mmseg - INFO - Iter [34350/160000] lr: 4.712e-05, eta: 7:22:47, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4305, decode.acc_seg: 83.9094, aux.loss_ce: 0.2433, aux.acc_seg: 77.4297, loss: 0.6738, grad_norm: 5.8122 2023-02-11 20:12:28,120 - mmseg - INFO - Iter [34400/160000] lr: 4.710e-05, eta: 7:22:35, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4694, decode.acc_seg: 82.8381, aux.loss_ce: 0.2721, aux.acc_seg: 75.2218, loss: 0.7415, grad_norm: 7.0572 2023-02-11 20:12:38,402 - mmseg - INFO - Iter [34450/160000] lr: 4.708e-05, eta: 7:22:22, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4497, decode.acc_seg: 82.9303, aux.loss_ce: 0.2516, aux.acc_seg: 76.4878, loss: 0.7013, grad_norm: 6.1716 2023-02-11 20:12:48,455 - mmseg - INFO - Iter [34500/160000] lr: 4.706e-05, eta: 7:22:08, time: 0.202, data_time: 0.005, memory: 7748, decode.loss_ce: 0.4707, decode.acc_seg: 82.7500, aux.loss_ce: 0.2597, aux.acc_seg: 76.3088, loss: 0.7304, grad_norm: 6.7790 2023-02-11 20:12:58,682 - mmseg - INFO - Iter [34550/160000] lr: 4.704e-05, eta: 7:21:55, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4448, decode.acc_seg: 83.3741, aux.loss_ce: 0.2444, aux.acc_seg: 77.8142, loss: 0.6892, grad_norm: 5.3099 2023-02-11 20:13:08,328 - mmseg - INFO - Iter [34600/160000] lr: 4.703e-05, eta: 7:21:38, time: 0.193, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4479, decode.acc_seg: 83.5306, aux.loss_ce: 0.2531, aux.acc_seg: 76.7682, loss: 0.7010, grad_norm: 5.9719 2023-02-11 20:13:18,122 - mmseg - INFO - Iter [34650/160000] lr: 4.701e-05, eta: 7:21:23, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4538, decode.acc_seg: 83.6594, aux.loss_ce: 0.2516, aux.acc_seg: 77.8109, loss: 0.7054, grad_norm: 6.7336 2023-02-11 20:13:28,319 - mmseg - INFO - Iter [34700/160000] lr: 4.699e-05, eta: 7:21:10, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4349, decode.acc_seg: 84.0285, aux.loss_ce: 0.2452, aux.acc_seg: 77.7507, loss: 0.6801, grad_norm: 6.0065 2023-02-11 20:13:38,141 - mmseg - INFO - Iter [34750/160000] lr: 4.697e-05, eta: 7:20:54, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4301, decode.acc_seg: 83.9442, aux.loss_ce: 0.2520, aux.acc_seg: 76.0657, loss: 0.6821, grad_norm: 6.1231 2023-02-11 20:13:48,125 - mmseg - INFO - Iter [34800/160000] lr: 4.695e-05, eta: 7:20:40, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4633, decode.acc_seg: 82.0806, aux.loss_ce: 0.2629, aux.acc_seg: 75.8838, loss: 0.7262, grad_norm: 7.4409 2023-02-11 20:13:58,354 - mmseg - INFO - Iter [34850/160000] lr: 4.693e-05, eta: 7:20:27, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4431, decode.acc_seg: 83.2600, aux.loss_ce: 0.2523, aux.acc_seg: 76.2755, loss: 0.6954, grad_norm: 6.6750 2023-02-11 20:14:08,212 - mmseg - INFO - Iter [34900/160000] lr: 4.691e-05, eta: 7:20:12, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4314, decode.acc_seg: 84.0567, aux.loss_ce: 0.2510, aux.acc_seg: 76.3931, loss: 0.6824, grad_norm: 6.0452 2023-02-11 20:14:20,163 - mmseg - INFO - Iter [34950/160000] lr: 4.689e-05, eta: 7:20:10, time: 0.239, data_time: 0.046, memory: 7748, decode.loss_ce: 0.4318, decode.acc_seg: 83.9740, aux.loss_ce: 0.2476, aux.acc_seg: 77.1510, loss: 0.6794, grad_norm: 5.6652 2023-02-11 20:14:30,998 - mmseg - INFO - Saving checkpoint at 35000 iterations 2023-02-11 20:14:31,677 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 20:14:31,677 - mmseg - INFO - Iter [35000/160000] lr: 4.688e-05, eta: 7:20:06, time: 0.230, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4303, decode.acc_seg: 83.8082, aux.loss_ce: 0.2540, aux.acc_seg: 76.3999, loss: 0.6843, grad_norm: 6.1007 2023-02-11 20:14:41,913 - mmseg - INFO - Iter [35050/160000] lr: 4.686e-05, eta: 7:19:53, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4105, decode.acc_seg: 84.5145, aux.loss_ce: 0.2334, aux.acc_seg: 78.0330, loss: 0.6440, grad_norm: 6.4883 2023-02-11 20:14:51,698 - mmseg - INFO - Iter [35100/160000] lr: 4.684e-05, eta: 7:19:38, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4295, decode.acc_seg: 84.3477, aux.loss_ce: 0.2498, aux.acc_seg: 77.5731, loss: 0.6793, grad_norm: 6.5607 2023-02-11 20:15:01,715 - mmseg - INFO - Iter [35150/160000] lr: 4.682e-05, eta: 7:19:24, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4089, decode.acc_seg: 84.2302, aux.loss_ce: 0.2360, aux.acc_seg: 77.6454, loss: 0.6450, grad_norm: 5.4088 2023-02-11 20:15:11,720 - mmseg - INFO - Iter [35200/160000] lr: 4.680e-05, eta: 7:19:10, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4450, decode.acc_seg: 83.4441, aux.loss_ce: 0.2486, aux.acc_seg: 77.4122, loss: 0.6936, grad_norm: 6.4981 2023-02-11 20:15:21,861 - mmseg - INFO - Iter [35250/160000] lr: 4.678e-05, eta: 7:18:56, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4548, decode.acc_seg: 83.6807, aux.loss_ce: 0.2704, aux.acc_seg: 75.6478, loss: 0.7252, grad_norm: 7.3130 2023-02-11 20:15:31,819 - mmseg - INFO - Iter [35300/160000] lr: 4.676e-05, eta: 7:18:42, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4577, decode.acc_seg: 83.2025, aux.loss_ce: 0.2663, aux.acc_seg: 75.7287, loss: 0.7240, grad_norm: 6.5496 2023-02-11 20:15:41,449 - mmseg - INFO - Iter [35350/160000] lr: 4.674e-05, eta: 7:18:25, time: 0.193, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4495, decode.acc_seg: 83.4422, aux.loss_ce: 0.2589, aux.acc_seg: 76.1662, loss: 0.7084, grad_norm: 7.0164 2023-02-11 20:15:51,737 - mmseg - INFO - Iter [35400/160000] lr: 4.673e-05, eta: 7:18:13, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4263, decode.acc_seg: 83.8560, aux.loss_ce: 0.2402, aux.acc_seg: 78.0332, loss: 0.6665, grad_norm: 6.7776 2023-02-11 20:16:01,970 - mmseg - INFO - Iter [35450/160000] lr: 4.671e-05, eta: 7:18:01, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4703, decode.acc_seg: 82.2818, aux.loss_ce: 0.2613, aux.acc_seg: 76.2030, loss: 0.7316, grad_norm: 7.0631 2023-02-11 20:16:12,019 - mmseg - INFO - Iter [35500/160000] lr: 4.669e-05, eta: 7:17:47, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4883, decode.acc_seg: 82.2103, aux.loss_ce: 0.2760, aux.acc_seg: 74.8720, loss: 0.7643, grad_norm: 7.7053 2023-02-11 20:16:22,124 - mmseg - INFO - Iter [35550/160000] lr: 4.667e-05, eta: 7:17:33, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4161, decode.acc_seg: 84.3529, aux.loss_ce: 0.2460, aux.acc_seg: 77.0781, loss: 0.6621, grad_norm: 5.7804 2023-02-11 20:16:32,414 - mmseg - INFO - Iter [35600/160000] lr: 4.665e-05, eta: 7:17:21, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4572, decode.acc_seg: 82.9910, aux.loss_ce: 0.2570, aux.acc_seg: 76.1331, loss: 0.7142, grad_norm: 6.0176 2023-02-11 20:16:42,425 - mmseg - INFO - Iter [35650/160000] lr: 4.663e-05, eta: 7:17:07, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4201, decode.acc_seg: 84.1065, aux.loss_ce: 0.2445, aux.acc_seg: 77.4328, loss: 0.6646, grad_norm: 5.6702 2023-02-11 20:16:52,668 - mmseg - INFO - Iter [35700/160000] lr: 4.661e-05, eta: 7:16:55, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4731, decode.acc_seg: 82.5361, aux.loss_ce: 0.2658, aux.acc_seg: 75.9431, loss: 0.7389, grad_norm: 7.1600 2023-02-11 20:17:02,855 - mmseg - INFO - Iter [35750/160000] lr: 4.659e-05, eta: 7:16:42, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4247, decode.acc_seg: 84.0920, aux.loss_ce: 0.2438, aux.acc_seg: 77.3601, loss: 0.6686, grad_norm: 6.1655 2023-02-11 20:17:13,241 - mmseg - INFO - Iter [35800/160000] lr: 4.658e-05, eta: 7:16:31, time: 0.208, data_time: 0.005, memory: 7748, decode.loss_ce: 0.4607, decode.acc_seg: 83.0491, aux.loss_ce: 0.2593, aux.acc_seg: 76.2649, loss: 0.7200, grad_norm: 6.1215 2023-02-11 20:17:23,341 - mmseg - INFO - Iter [35850/160000] lr: 4.656e-05, eta: 7:16:17, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4463, decode.acc_seg: 83.2363, aux.loss_ce: 0.2546, aux.acc_seg: 76.6447, loss: 0.7010, grad_norm: 6.6906 2023-02-11 20:17:34,143 - mmseg - INFO - Iter [35900/160000] lr: 4.654e-05, eta: 7:16:08, time: 0.216, data_time: 0.005, memory: 7748, decode.loss_ce: 0.4257, decode.acc_seg: 83.8265, aux.loss_ce: 0.2476, aux.acc_seg: 77.1730, loss: 0.6732, grad_norm: 6.2629 2023-02-11 20:17:44,408 - mmseg - INFO - Iter [35950/160000] lr: 4.652e-05, eta: 7:15:56, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4585, decode.acc_seg: 82.9272, aux.loss_ce: 0.2608, aux.acc_seg: 75.8436, loss: 0.7193, grad_norm: 7.1493 2023-02-11 20:17:54,336 - mmseg - INFO - Saving checkpoint at 36000 iterations 2023-02-11 20:17:55,075 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 20:17:55,075 - mmseg - INFO - Iter [36000/160000] lr: 4.650e-05, eta: 7:15:46, time: 0.213, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4457, decode.acc_seg: 83.3269, aux.loss_ce: 0.2556, aux.acc_seg: 76.4075, loss: 0.7013, grad_norm: 5.6904 2023-02-11 20:18:05,025 - mmseg - INFO - Iter [36050/160000] lr: 4.648e-05, eta: 7:15:32, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4216, decode.acc_seg: 83.9112, aux.loss_ce: 0.2443, aux.acc_seg: 77.1660, loss: 0.6659, grad_norm: 5.7867 2023-02-11 20:18:15,248 - mmseg - INFO - Iter [36100/160000] lr: 4.646e-05, eta: 7:15:19, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4375, decode.acc_seg: 83.6791, aux.loss_ce: 0.2491, aux.acc_seg: 76.9450, loss: 0.6866, grad_norm: 6.0104 2023-02-11 20:18:25,580 - mmseg - INFO - Iter [36150/160000] lr: 4.644e-05, eta: 7:15:08, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4215, decode.acc_seg: 84.5287, aux.loss_ce: 0.2422, aux.acc_seg: 78.1477, loss: 0.6636, grad_norm: 6.3965 2023-02-11 20:18:35,849 - mmseg - INFO - Iter [36200/160000] lr: 4.643e-05, eta: 7:14:55, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4355, decode.acc_seg: 83.9041, aux.loss_ce: 0.2526, aux.acc_seg: 76.5357, loss: 0.6881, grad_norm: 5.8981 2023-02-11 20:18:48,655 - mmseg - INFO - Iter [36250/160000] lr: 4.641e-05, eta: 7:14:59, time: 0.256, data_time: 0.048, memory: 7748, decode.loss_ce: 0.4371, decode.acc_seg: 83.5255, aux.loss_ce: 0.2480, aux.acc_seg: 77.3170, loss: 0.6851, grad_norm: 5.5403 2023-02-11 20:18:58,875 - mmseg - INFO - Iter [36300/160000] lr: 4.639e-05, eta: 7:14:46, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4621, decode.acc_seg: 82.7588, aux.loss_ce: 0.2672, aux.acc_seg: 75.2041, loss: 0.7293, grad_norm: 7.9260 2023-02-11 20:19:09,196 - mmseg - INFO - Iter [36350/160000] lr: 4.637e-05, eta: 7:14:34, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4469, decode.acc_seg: 82.9092, aux.loss_ce: 0.2562, aux.acc_seg: 75.8902, loss: 0.7031, grad_norm: 6.0574 2023-02-11 20:19:19,305 - mmseg - INFO - Iter [36400/160000] lr: 4.635e-05, eta: 7:14:21, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4504, decode.acc_seg: 83.2218, aux.loss_ce: 0.2570, aux.acc_seg: 76.3834, loss: 0.7074, grad_norm: 6.5438 2023-02-11 20:19:29,320 - mmseg - INFO - Iter [36450/160000] lr: 4.633e-05, eta: 7:14:07, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4217, decode.acc_seg: 83.9252, aux.loss_ce: 0.2417, aux.acc_seg: 77.5859, loss: 0.6633, grad_norm: 7.0421 2023-02-11 20:19:39,751 - mmseg - INFO - Iter [36500/160000] lr: 4.631e-05, eta: 7:13:56, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4348, decode.acc_seg: 83.5224, aux.loss_ce: 0.2572, aux.acc_seg: 76.1672, loss: 0.6919, grad_norm: 6.8399 2023-02-11 20:19:49,980 - mmseg - INFO - Iter [36550/160000] lr: 4.629e-05, eta: 7:13:44, time: 0.205, data_time: 0.005, memory: 7748, decode.loss_ce: 0.4382, decode.acc_seg: 83.6212, aux.loss_ce: 0.2562, aux.acc_seg: 76.5051, loss: 0.6944, grad_norm: 6.0600 2023-02-11 20:20:00,210 - mmseg - INFO - Iter [36600/160000] lr: 4.628e-05, eta: 7:13:31, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4377, decode.acc_seg: 83.7398, aux.loss_ce: 0.2579, aux.acc_seg: 76.3614, loss: 0.6956, grad_norm: 5.8882 2023-02-11 20:20:10,295 - mmseg - INFO - Iter [36650/160000] lr: 4.626e-05, eta: 7:13:18, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4263, decode.acc_seg: 83.9250, aux.loss_ce: 0.2428, aux.acc_seg: 77.4703, loss: 0.6691, grad_norm: 5.6589 2023-02-11 20:20:20,671 - mmseg - INFO - Iter [36700/160000] lr: 4.624e-05, eta: 7:13:07, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4662, decode.acc_seg: 83.2060, aux.loss_ce: 0.2641, aux.acc_seg: 76.3983, loss: 0.7303, grad_norm: 7.3551 2023-02-11 20:20:30,639 - mmseg - INFO - Iter [36750/160000] lr: 4.622e-05, eta: 7:12:53, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4444, decode.acc_seg: 83.8373, aux.loss_ce: 0.2563, aux.acc_seg: 76.5278, loss: 0.7006, grad_norm: 6.6378 2023-02-11 20:20:40,558 - mmseg - INFO - Iter [36800/160000] lr: 4.620e-05, eta: 7:12:39, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4256, decode.acc_seg: 84.2122, aux.loss_ce: 0.2423, aux.acc_seg: 77.4873, loss: 0.6680, grad_norm: 5.7277 2023-02-11 20:20:50,279 - mmseg - INFO - Iter [36850/160000] lr: 4.618e-05, eta: 7:12:23, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4096, decode.acc_seg: 84.4863, aux.loss_ce: 0.2397, aux.acc_seg: 77.1047, loss: 0.6492, grad_norm: 6.6007 2023-02-11 20:21:00,163 - mmseg - INFO - Iter [36900/160000] lr: 4.616e-05, eta: 7:12:09, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4149, decode.acc_seg: 84.3549, aux.loss_ce: 0.2377, aux.acc_seg: 77.3447, loss: 0.6526, grad_norm: 5.8111 2023-02-11 20:21:10,072 - mmseg - INFO - Iter [36950/160000] lr: 4.614e-05, eta: 7:11:55, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4006, decode.acc_seg: 84.5656, aux.loss_ce: 0.2404, aux.acc_seg: 76.9218, loss: 0.6410, grad_norm: 5.7669 2023-02-11 20:21:20,737 - mmseg - INFO - Saving checkpoint at 37000 iterations 2023-02-11 20:21:21,417 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 20:21:21,417 - mmseg - INFO - Iter [37000/160000] lr: 4.613e-05, eta: 7:11:49, time: 0.227, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4464, decode.acc_seg: 83.1138, aux.loss_ce: 0.2527, aux.acc_seg: 76.2901, loss: 0.6992, grad_norm: 6.4971 2023-02-11 20:21:31,168 - mmseg - INFO - Iter [37050/160000] lr: 4.611e-05, eta: 7:11:34, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4516, decode.acc_seg: 83.3026, aux.loss_ce: 0.2608, aux.acc_seg: 75.8833, loss: 0.7123, grad_norm: 6.7265 2023-02-11 20:21:41,007 - mmseg - INFO - Iter [37100/160000] lr: 4.609e-05, eta: 7:11:19, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4194, decode.acc_seg: 84.3637, aux.loss_ce: 0.2373, aux.acc_seg: 77.7463, loss: 0.6567, grad_norm: 6.0968 2023-02-11 20:21:50,883 - mmseg - INFO - Iter [37150/160000] lr: 4.607e-05, eta: 7:11:05, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4584, decode.acc_seg: 82.5462, aux.loss_ce: 0.2605, aux.acc_seg: 75.6768, loss: 0.7189, grad_norm: 12.0957 2023-02-11 20:22:00,685 - mmseg - INFO - Iter [37200/160000] lr: 4.605e-05, eta: 7:10:50, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4344, decode.acc_seg: 84.1484, aux.loss_ce: 0.2499, aux.acc_seg: 77.1843, loss: 0.6844, grad_norm: 5.5866 2023-02-11 20:22:10,454 - mmseg - INFO - Iter [37250/160000] lr: 4.603e-05, eta: 7:10:35, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4380, decode.acc_seg: 84.2390, aux.loss_ce: 0.2517, aux.acc_seg: 76.8217, loss: 0.6897, grad_norm: 6.5852 2023-02-11 20:22:21,374 - mmseg - INFO - Iter [37300/160000] lr: 4.601e-05, eta: 7:10:27, time: 0.219, data_time: 0.005, memory: 7748, decode.loss_ce: 0.4378, decode.acc_seg: 84.0154, aux.loss_ce: 0.2478, aux.acc_seg: 77.2778, loss: 0.6856, grad_norm: 5.8821 2023-02-11 20:22:31,110 - mmseg - INFO - Iter [37350/160000] lr: 4.599e-05, eta: 7:10:12, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4060, decode.acc_seg: 84.4128, aux.loss_ce: 0.2367, aux.acc_seg: 77.6322, loss: 0.6427, grad_norm: 6.8133 2023-02-11 20:22:41,154 - mmseg - INFO - Iter [37400/160000] lr: 4.598e-05, eta: 7:09:59, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4394, decode.acc_seg: 83.5049, aux.loss_ce: 0.2472, aux.acc_seg: 76.8540, loss: 0.6866, grad_norm: 5.6337 2023-02-11 20:22:51,251 - mmseg - INFO - Iter [37450/160000] lr: 4.596e-05, eta: 7:09:46, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4318, decode.acc_seg: 84.0786, aux.loss_ce: 0.2515, aux.acc_seg: 76.6841, loss: 0.6834, grad_norm: 6.4547 2023-02-11 20:23:03,417 - mmseg - INFO - Iter [37500/160000] lr: 4.594e-05, eta: 7:09:45, time: 0.243, data_time: 0.046, memory: 7748, decode.loss_ce: 0.4136, decode.acc_seg: 84.9258, aux.loss_ce: 0.2415, aux.acc_seg: 77.8682, loss: 0.6551, grad_norm: 5.6662 2023-02-11 20:23:13,231 - mmseg - INFO - Iter [37550/160000] lr: 4.592e-05, eta: 7:09:30, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4050, decode.acc_seg: 84.9239, aux.loss_ce: 0.2387, aux.acc_seg: 77.6838, loss: 0.6437, grad_norm: 5.4910 2023-02-11 20:23:23,496 - mmseg - INFO - Iter [37600/160000] lr: 4.590e-05, eta: 7:09:18, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4349, decode.acc_seg: 83.5670, aux.loss_ce: 0.2528, aux.acc_seg: 76.8608, loss: 0.6877, grad_norm: 6.0243 2023-02-11 20:23:33,767 - mmseg - INFO - Iter [37650/160000] lr: 4.588e-05, eta: 7:09:06, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4226, decode.acc_seg: 84.3344, aux.loss_ce: 0.2461, aux.acc_seg: 77.4819, loss: 0.6687, grad_norm: 5.1665 2023-02-11 20:23:44,007 - mmseg - INFO - Iter [37700/160000] lr: 4.586e-05, eta: 7:08:54, time: 0.205, data_time: 0.005, memory: 7748, decode.loss_ce: 0.4435, decode.acc_seg: 83.7953, aux.loss_ce: 0.2539, aux.acc_seg: 76.9702, loss: 0.6974, grad_norm: 5.7745 2023-02-11 20:23:53,807 - mmseg - INFO - Iter [37750/160000] lr: 4.584e-05, eta: 7:08:39, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4009, decode.acc_seg: 84.8189, aux.loss_ce: 0.2390, aux.acc_seg: 76.9193, loss: 0.6399, grad_norm: 5.7249 2023-02-11 20:24:04,311 - mmseg - INFO - Iter [37800/160000] lr: 4.583e-05, eta: 7:08:29, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4488, decode.acc_seg: 83.7302, aux.loss_ce: 0.2605, aux.acc_seg: 76.1493, loss: 0.7093, grad_norm: 6.2144 2023-02-11 20:24:14,674 - mmseg - INFO - Iter [37850/160000] lr: 4.581e-05, eta: 7:08:17, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4268, decode.acc_seg: 84.2028, aux.loss_ce: 0.2571, aux.acc_seg: 76.5150, loss: 0.6839, grad_norm: 6.8089 2023-02-11 20:24:24,981 - mmseg - INFO - Iter [37900/160000] lr: 4.579e-05, eta: 7:08:06, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4172, decode.acc_seg: 84.1700, aux.loss_ce: 0.2418, aux.acc_seg: 77.2036, loss: 0.6589, grad_norm: 5.5891 2023-02-11 20:24:34,980 - mmseg - INFO - Iter [37950/160000] lr: 4.577e-05, eta: 7:07:52, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4255, decode.acc_seg: 84.3357, aux.loss_ce: 0.2523, aux.acc_seg: 76.6728, loss: 0.6778, grad_norm: 6.1992 2023-02-11 20:24:45,223 - mmseg - INFO - Saving checkpoint at 38000 iterations 2023-02-11 20:24:45,916 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 20:24:45,916 - mmseg - INFO - Iter [38000/160000] lr: 4.575e-05, eta: 7:07:44, time: 0.219, data_time: 0.005, memory: 7748, decode.loss_ce: 0.4356, decode.acc_seg: 83.3536, aux.loss_ce: 0.2482, aux.acc_seg: 77.1472, loss: 0.6838, grad_norm: 6.0405 2023-02-11 20:24:56,100 - mmseg - INFO - Iter [38050/160000] lr: 4.573e-05, eta: 7:07:32, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4235, decode.acc_seg: 84.2616, aux.loss_ce: 0.2476, aux.acc_seg: 77.3356, loss: 0.6711, grad_norm: 6.3439 2023-02-11 20:25:06,278 - mmseg - INFO - Iter [38100/160000] lr: 4.571e-05, eta: 7:07:19, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4303, decode.acc_seg: 83.5419, aux.loss_ce: 0.2505, aux.acc_seg: 76.2048, loss: 0.6808, grad_norm: 7.1343 2023-02-11 20:25:16,626 - mmseg - INFO - Iter [38150/160000] lr: 4.569e-05, eta: 7:07:08, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4487, decode.acc_seg: 82.9273, aux.loss_ce: 0.2604, aux.acc_seg: 75.7330, loss: 0.7091, grad_norm: 7.0439 2023-02-11 20:25:26,643 - mmseg - INFO - Iter [38200/160000] lr: 4.568e-05, eta: 7:06:55, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4457, decode.acc_seg: 83.8928, aux.loss_ce: 0.2561, aux.acc_seg: 76.6727, loss: 0.7018, grad_norm: 6.1044 2023-02-11 20:25:36,476 - mmseg - INFO - Iter [38250/160000] lr: 4.566e-05, eta: 7:06:40, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4011, decode.acc_seg: 84.6747, aux.loss_ce: 0.2371, aux.acc_seg: 77.4894, loss: 0.6382, grad_norm: 6.1738 2023-02-11 20:25:46,982 - mmseg - INFO - Iter [38300/160000] lr: 4.564e-05, eta: 7:06:30, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4583, decode.acc_seg: 82.6992, aux.loss_ce: 0.2606, aux.acc_seg: 75.7289, loss: 0.7189, grad_norm: 6.4401 2023-02-11 20:25:57,320 - mmseg - INFO - Iter [38350/160000] lr: 4.562e-05, eta: 7:06:18, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4266, decode.acc_seg: 84.4447, aux.loss_ce: 0.2534, aux.acc_seg: 77.4160, loss: 0.6800, grad_norm: 5.9396 2023-02-11 20:26:07,170 - mmseg - INFO - Iter [38400/160000] lr: 4.560e-05, eta: 7:06:04, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4252, decode.acc_seg: 84.0309, aux.loss_ce: 0.2514, aux.acc_seg: 76.8220, loss: 0.6766, grad_norm: 6.4235 2023-02-11 20:26:17,095 - mmseg - INFO - Iter [38450/160000] lr: 4.558e-05, eta: 7:05:51, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4149, decode.acc_seg: 84.5778, aux.loss_ce: 0.2398, aux.acc_seg: 77.6238, loss: 0.6546, grad_norm: 5.9511 2023-02-11 20:26:26,952 - mmseg - INFO - Iter [38500/160000] lr: 4.556e-05, eta: 7:05:36, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4264, decode.acc_seg: 83.7489, aux.loss_ce: 0.2384, aux.acc_seg: 77.4283, loss: 0.6648, grad_norm: 6.2482 2023-02-11 20:26:36,925 - mmseg - INFO - Iter [38550/160000] lr: 4.554e-05, eta: 7:05:23, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4301, decode.acc_seg: 84.1957, aux.loss_ce: 0.2499, aux.acc_seg: 77.5091, loss: 0.6800, grad_norm: 6.1315 2023-02-11 20:26:47,005 - mmseg - INFO - Iter [38600/160000] lr: 4.553e-05, eta: 7:05:10, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4172, decode.acc_seg: 84.5946, aux.loss_ce: 0.2411, aux.acc_seg: 77.4400, loss: 0.6583, grad_norm: 5.5371 2023-02-11 20:26:57,203 - mmseg - INFO - Iter [38650/160000] lr: 4.551e-05, eta: 7:04:58, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4316, decode.acc_seg: 83.6740, aux.loss_ce: 0.2481, aux.acc_seg: 76.7788, loss: 0.6797, grad_norm: 6.6707 2023-02-11 20:27:07,335 - mmseg - INFO - Iter [38700/160000] lr: 4.549e-05, eta: 7:04:46, time: 0.203, data_time: 0.005, memory: 7748, decode.loss_ce: 0.4255, decode.acc_seg: 83.8372, aux.loss_ce: 0.2576, aux.acc_seg: 75.8598, loss: 0.6831, grad_norm: 6.6450 2023-02-11 20:27:19,373 - mmseg - INFO - Iter [38750/160000] lr: 4.547e-05, eta: 7:04:43, time: 0.241, data_time: 0.048, memory: 7748, decode.loss_ce: 0.3995, decode.acc_seg: 85.2303, aux.loss_ce: 0.2358, aux.acc_seg: 78.3157, loss: 0.6353, grad_norm: 5.9095 2023-02-11 20:27:29,560 - mmseg - INFO - Iter [38800/160000] lr: 4.545e-05, eta: 7:04:31, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4122, decode.acc_seg: 84.9105, aux.loss_ce: 0.2486, aux.acc_seg: 77.1191, loss: 0.6607, grad_norm: 5.6763 2023-02-11 20:27:39,552 - mmseg - INFO - Iter [38850/160000] lr: 4.543e-05, eta: 7:04:18, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4524, decode.acc_seg: 83.3325, aux.loss_ce: 0.2581, aux.acc_seg: 76.4937, loss: 0.7104, grad_norm: 6.7657 2023-02-11 20:27:49,537 - mmseg - INFO - Iter [38900/160000] lr: 4.541e-05, eta: 7:04:05, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4073, decode.acc_seg: 84.9838, aux.loss_ce: 0.2376, aux.acc_seg: 78.4317, loss: 0.6449, grad_norm: 5.2744 2023-02-11 20:27:59,631 - mmseg - INFO - Iter [38950/160000] lr: 4.539e-05, eta: 7:03:52, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3973, decode.acc_seg: 84.6215, aux.loss_ce: 0.2341, aux.acc_seg: 77.8623, loss: 0.6314, grad_norm: 6.5584 2023-02-11 20:28:09,809 - mmseg - INFO - Saving checkpoint at 39000 iterations 2023-02-11 20:28:10,506 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 20:28:10,506 - mmseg - INFO - Iter [39000/160000] lr: 4.538e-05, eta: 7:03:43, time: 0.218, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4111, decode.acc_seg: 84.6935, aux.loss_ce: 0.2472, aux.acc_seg: 77.1695, loss: 0.6582, grad_norm: 5.9900 2023-02-11 20:28:20,757 - mmseg - INFO - Iter [39050/160000] lr: 4.536e-05, eta: 7:03:31, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4278, decode.acc_seg: 84.2069, aux.loss_ce: 0.2440, aux.acc_seg: 77.5882, loss: 0.6718, grad_norm: 5.9917 2023-02-11 20:28:30,690 - mmseg - INFO - Iter [39100/160000] lr: 4.534e-05, eta: 7:03:18, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4029, decode.acc_seg: 84.5113, aux.loss_ce: 0.2373, aux.acc_seg: 77.4371, loss: 0.6402, grad_norm: 7.1238 2023-02-11 20:28:40,809 - mmseg - INFO - Iter [39150/160000] lr: 4.532e-05, eta: 7:03:05, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4479, decode.acc_seg: 83.3766, aux.loss_ce: 0.2546, aux.acc_seg: 76.7098, loss: 0.7025, grad_norm: 6.4678 2023-02-11 20:28:50,725 - mmseg - INFO - Iter [39200/160000] lr: 4.530e-05, eta: 7:02:52, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4442, decode.acc_seg: 83.6463, aux.loss_ce: 0.2618, aux.acc_seg: 76.1629, loss: 0.7060, grad_norm: 6.2678 2023-02-11 20:29:00,712 - mmseg - INFO - Iter [39250/160000] lr: 4.528e-05, eta: 7:02:39, time: 0.200, data_time: 0.005, memory: 7748, decode.loss_ce: 0.4258, decode.acc_seg: 84.3298, aux.loss_ce: 0.2455, aux.acc_seg: 77.3295, loss: 0.6712, grad_norm: 7.0642 2023-02-11 20:29:10,684 - mmseg - INFO - Iter [39300/160000] lr: 4.526e-05, eta: 7:02:25, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4211, decode.acc_seg: 84.2060, aux.loss_ce: 0.2539, aux.acc_seg: 75.9676, loss: 0.6750, grad_norm: 5.7184 2023-02-11 20:29:20,658 - mmseg - INFO - Iter [39350/160000] lr: 4.524e-05, eta: 7:02:12, time: 0.200, data_time: 0.005, memory: 7748, decode.loss_ce: 0.4234, decode.acc_seg: 84.2327, aux.loss_ce: 0.2508, aux.acc_seg: 76.7270, loss: 0.6743, grad_norm: 6.4622 2023-02-11 20:29:30,592 - mmseg - INFO - Iter [39400/160000] lr: 4.523e-05, eta: 7:01:59, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4132, decode.acc_seg: 84.1898, aux.loss_ce: 0.2348, aux.acc_seg: 77.9824, loss: 0.6480, grad_norm: 6.3251 2023-02-11 20:29:40,669 - mmseg - INFO - Iter [39450/160000] lr: 4.521e-05, eta: 7:01:46, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4077, decode.acc_seg: 84.4966, aux.loss_ce: 0.2338, aux.acc_seg: 78.2143, loss: 0.6415, grad_norm: 5.8431 2023-02-11 20:29:50,627 - mmseg - INFO - Iter [39500/160000] lr: 4.519e-05, eta: 7:01:33, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4265, decode.acc_seg: 84.0442, aux.loss_ce: 0.2592, aux.acc_seg: 76.4309, loss: 0.6857, grad_norm: 6.2289 2023-02-11 20:30:00,542 - mmseg - INFO - Iter [39550/160000] lr: 4.517e-05, eta: 7:01:19, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4131, decode.acc_seg: 84.5898, aux.loss_ce: 0.2408, aux.acc_seg: 77.9272, loss: 0.6539, grad_norm: 6.1889 2023-02-11 20:30:10,587 - mmseg - INFO - Iter [39600/160000] lr: 4.515e-05, eta: 7:01:07, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4032, decode.acc_seg: 84.9896, aux.loss_ce: 0.2421, aux.acc_seg: 77.2829, loss: 0.6453, grad_norm: 6.1362 2023-02-11 20:30:20,307 - mmseg - INFO - Iter [39650/160000] lr: 4.513e-05, eta: 7:00:52, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4136, decode.acc_seg: 84.6630, aux.loss_ce: 0.2401, aux.acc_seg: 78.2866, loss: 0.6537, grad_norm: 5.8509 2023-02-11 20:30:30,516 - mmseg - INFO - Iter [39700/160000] lr: 4.511e-05, eta: 7:00:40, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4310, decode.acc_seg: 83.6284, aux.loss_ce: 0.2439, aux.acc_seg: 77.4413, loss: 0.6750, grad_norm: 6.0627 2023-02-11 20:30:40,259 - mmseg - INFO - Iter [39750/160000] lr: 4.509e-05, eta: 7:00:26, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4205, decode.acc_seg: 84.8264, aux.loss_ce: 0.2460, aux.acc_seg: 77.9099, loss: 0.6665, grad_norm: 6.6907 2023-02-11 20:30:50,428 - mmseg - INFO - Iter [39800/160000] lr: 4.508e-05, eta: 7:00:14, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4062, decode.acc_seg: 84.5676, aux.loss_ce: 0.2424, aux.acc_seg: 77.2090, loss: 0.6486, grad_norm: 6.0138 2023-02-11 20:31:01,051 - mmseg - INFO - Iter [39850/160000] lr: 4.506e-05, eta: 7:00:04, time: 0.213, data_time: 0.005, memory: 7748, decode.loss_ce: 0.4170, decode.acc_seg: 84.3396, aux.loss_ce: 0.2459, aux.acc_seg: 77.6290, loss: 0.6629, grad_norm: 6.2585 2023-02-11 20:31:10,852 - mmseg - INFO - Iter [39900/160000] lr: 4.504e-05, eta: 6:59:50, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3908, decode.acc_seg: 85.2718, aux.loss_ce: 0.2317, aux.acc_seg: 78.1025, loss: 0.6225, grad_norm: 5.7372 2023-02-11 20:31:20,892 - mmseg - INFO - Iter [39950/160000] lr: 4.502e-05, eta: 6:59:37, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3976, decode.acc_seg: 84.7559, aux.loss_ce: 0.2392, aux.acc_seg: 77.3258, loss: 0.6368, grad_norm: 6.0418 2023-02-11 20:31:33,053 - mmseg - INFO - Saving checkpoint at 40000 iterations 2023-02-11 20:31:33,753 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 20:31:33,753 - mmseg - INFO - Iter [40000/160000] lr: 4.500e-05, eta: 6:59:39, time: 0.257, data_time: 0.047, memory: 7748, decode.loss_ce: 0.4344, decode.acc_seg: 83.9563, aux.loss_ce: 0.2553, aux.acc_seg: 76.6765, loss: 0.6898, grad_norm: 6.7390 2023-02-11 20:31:43,796 - mmseg - INFO - Iter [40050/160000] lr: 4.498e-05, eta: 6:59:26, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4000, decode.acc_seg: 84.6481, aux.loss_ce: 0.2414, aux.acc_seg: 77.1728, loss: 0.6415, grad_norm: 5.9605 2023-02-11 20:31:53,692 - mmseg - INFO - Iter [40100/160000] lr: 4.496e-05, eta: 6:59:13, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4244, decode.acc_seg: 84.3995, aux.loss_ce: 0.2441, aux.acc_seg: 77.8116, loss: 0.6685, grad_norm: 5.8534 2023-02-11 20:32:03,821 - mmseg - INFO - Iter [40150/160000] lr: 4.494e-05, eta: 6:59:00, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4012, decode.acc_seg: 84.6653, aux.loss_ce: 0.2361, aux.acc_seg: 77.5473, loss: 0.6373, grad_norm: 5.6553 2023-02-11 20:32:13,737 - mmseg - INFO - Iter [40200/160000] lr: 4.493e-05, eta: 6:58:47, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4281, decode.acc_seg: 84.3835, aux.loss_ce: 0.2509, aux.acc_seg: 77.1866, loss: 0.6790, grad_norm: 7.4396 2023-02-11 20:32:23,637 - mmseg - INFO - Iter [40250/160000] lr: 4.491e-05, eta: 6:58:34, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4121, decode.acc_seg: 84.2031, aux.loss_ce: 0.2475, aux.acc_seg: 76.8908, loss: 0.6596, grad_norm: 6.1617 2023-02-11 20:32:33,828 - mmseg - INFO - Iter [40300/160000] lr: 4.489e-05, eta: 6:58:22, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4038, decode.acc_seg: 84.7407, aux.loss_ce: 0.2407, aux.acc_seg: 77.9858, loss: 0.6445, grad_norm: 6.4504 2023-02-11 20:32:44,210 - mmseg - INFO - Iter [40350/160000] lr: 4.487e-05, eta: 6:58:11, time: 0.208, data_time: 0.005, memory: 7748, decode.loss_ce: 0.4131, decode.acc_seg: 84.1809, aux.loss_ce: 0.2425, aux.acc_seg: 77.5318, loss: 0.6556, grad_norm: 5.7594 2023-02-11 20:32:54,242 - mmseg - INFO - Iter [40400/160000] lr: 4.485e-05, eta: 6:57:58, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4176, decode.acc_seg: 84.1631, aux.loss_ce: 0.2504, aux.acc_seg: 76.3078, loss: 0.6681, grad_norm: 7.0924 2023-02-11 20:33:04,484 - mmseg - INFO - Iter [40450/160000] lr: 4.483e-05, eta: 6:57:46, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4147, decode.acc_seg: 84.4553, aux.loss_ce: 0.2421, aux.acc_seg: 77.6496, loss: 0.6568, grad_norm: 6.1284 2023-02-11 20:33:14,377 - mmseg - INFO - Iter [40500/160000] lr: 4.481e-05, eta: 6:57:33, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3993, decode.acc_seg: 85.1665, aux.loss_ce: 0.2390, aux.acc_seg: 77.6535, loss: 0.6383, grad_norm: 5.8781 2023-02-11 20:33:24,584 - mmseg - INFO - Iter [40550/160000] lr: 4.479e-05, eta: 6:57:21, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4480, decode.acc_seg: 83.4994, aux.loss_ce: 0.2579, aux.acc_seg: 76.3734, loss: 0.7059, grad_norm: 6.9380 2023-02-11 20:33:34,368 - mmseg - INFO - Iter [40600/160000] lr: 4.478e-05, eta: 6:57:07, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4070, decode.acc_seg: 84.6079, aux.loss_ce: 0.2389, aux.acc_seg: 77.3498, loss: 0.6459, grad_norm: 5.4757 2023-02-11 20:33:44,324 - mmseg - INFO - Iter [40650/160000] lr: 4.476e-05, eta: 6:56:54, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4140, decode.acc_seg: 84.1865, aux.loss_ce: 0.2491, aux.acc_seg: 76.7143, loss: 0.6631, grad_norm: 6.4376 2023-02-11 20:33:54,633 - mmseg - INFO - Iter [40700/160000] lr: 4.474e-05, eta: 6:56:43, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4183, decode.acc_seg: 84.5713, aux.loss_ce: 0.2463, aux.acc_seg: 77.6612, loss: 0.6646, grad_norm: 6.1293 2023-02-11 20:34:04,807 - mmseg - INFO - Iter [40750/160000] lr: 4.472e-05, eta: 6:56:31, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4215, decode.acc_seg: 84.4519, aux.loss_ce: 0.2529, aux.acc_seg: 76.9430, loss: 0.6745, grad_norm: 5.9537 2023-02-11 20:34:14,704 - mmseg - INFO - Iter [40800/160000] lr: 4.470e-05, eta: 6:56:18, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4170, decode.acc_seg: 84.9154, aux.loss_ce: 0.2561, aux.acc_seg: 76.9709, loss: 0.6730, grad_norm: 6.4477 2023-02-11 20:34:24,719 - mmseg - INFO - Iter [40850/160000] lr: 4.468e-05, eta: 6:56:05, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4127, decode.acc_seg: 84.6114, aux.loss_ce: 0.2509, aux.acc_seg: 76.6890, loss: 0.6636, grad_norm: 6.0708 2023-02-11 20:34:34,755 - mmseg - INFO - Iter [40900/160000] lr: 4.466e-05, eta: 6:55:52, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4057, decode.acc_seg: 85.4633, aux.loss_ce: 0.2517, aux.acc_seg: 77.0390, loss: 0.6574, grad_norm: 6.2873 2023-02-11 20:34:44,856 - mmseg - INFO - Iter [40950/160000] lr: 4.464e-05, eta: 6:55:40, time: 0.203, data_time: 0.005, memory: 7748, decode.loss_ce: 0.4230, decode.acc_seg: 84.2651, aux.loss_ce: 0.2465, aux.acc_seg: 76.9531, loss: 0.6695, grad_norm: 6.8624 2023-02-11 20:34:55,091 - mmseg - INFO - Saving checkpoint at 41000 iterations 2023-02-11 20:34:55,807 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 20:34:55,807 - mmseg - INFO - Iter [41000/160000] lr: 4.463e-05, eta: 6:55:32, time: 0.219, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4127, decode.acc_seg: 84.7403, aux.loss_ce: 0.2433, aux.acc_seg: 77.5181, loss: 0.6560, grad_norm: 7.3745 2023-02-11 20:35:05,817 - mmseg - INFO - Iter [41050/160000] lr: 4.461e-05, eta: 6:55:19, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4137, decode.acc_seg: 84.5694, aux.loss_ce: 0.2455, aux.acc_seg: 77.6670, loss: 0.6591, grad_norm: 6.3415 2023-02-11 20:35:16,036 - mmseg - INFO - Iter [41100/160000] lr: 4.459e-05, eta: 6:55:08, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4103, decode.acc_seg: 84.2500, aux.loss_ce: 0.2439, aux.acc_seg: 77.1038, loss: 0.6542, grad_norm: 5.9045 2023-02-11 20:35:25,927 - mmseg - INFO - Iter [41150/160000] lr: 4.457e-05, eta: 6:54:54, time: 0.198, data_time: 0.005, memory: 7748, decode.loss_ce: 0.4131, decode.acc_seg: 84.5464, aux.loss_ce: 0.2381, aux.acc_seg: 78.0735, loss: 0.6512, grad_norm: 5.4492 2023-02-11 20:35:36,190 - mmseg - INFO - Iter [41200/160000] lr: 4.455e-05, eta: 6:54:43, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4115, decode.acc_seg: 84.6254, aux.loss_ce: 0.2472, aux.acc_seg: 77.6366, loss: 0.6586, grad_norm: 6.6817 2023-02-11 20:35:46,381 - mmseg - INFO - Iter [41250/160000] lr: 4.453e-05, eta: 6:54:31, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4211, decode.acc_seg: 84.5225, aux.loss_ce: 0.2546, aux.acc_seg: 76.8934, loss: 0.6757, grad_norm: 6.5934 2023-02-11 20:35:58,696 - mmseg - INFO - Iter [41300/160000] lr: 4.451e-05, eta: 6:54:29, time: 0.246, data_time: 0.047, memory: 7748, decode.loss_ce: 0.3712, decode.acc_seg: 86.3351, aux.loss_ce: 0.2370, aux.acc_seg: 78.2557, loss: 0.6082, grad_norm: 5.0929 2023-02-11 20:36:08,630 - mmseg - INFO - Iter [41350/160000] lr: 4.449e-05, eta: 6:54:16, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3965, decode.acc_seg: 85.5365, aux.loss_ce: 0.2377, aux.acc_seg: 78.0917, loss: 0.6342, grad_norm: 5.2999 2023-02-11 20:36:18,451 - mmseg - INFO - Iter [41400/160000] lr: 4.448e-05, eta: 6:54:03, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4163, decode.acc_seg: 84.3355, aux.loss_ce: 0.2421, aux.acc_seg: 77.7922, loss: 0.6584, grad_norm: 8.9688 2023-02-11 20:36:28,508 - mmseg - INFO - Iter [41450/160000] lr: 4.446e-05, eta: 6:53:50, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4149, decode.acc_seg: 84.5696, aux.loss_ce: 0.2462, aux.acc_seg: 77.3768, loss: 0.6611, grad_norm: 6.2958 2023-02-11 20:36:38,481 - mmseg - INFO - Iter [41500/160000] lr: 4.444e-05, eta: 6:53:38, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3919, decode.acc_seg: 85.4659, aux.loss_ce: 0.2401, aux.acc_seg: 78.1675, loss: 0.6320, grad_norm: 5.5259 2023-02-11 20:36:48,573 - mmseg - INFO - Iter [41550/160000] lr: 4.442e-05, eta: 6:53:25, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3899, decode.acc_seg: 85.7858, aux.loss_ce: 0.2278, aux.acc_seg: 78.7623, loss: 0.6177, grad_norm: 5.3518 2023-02-11 20:36:58,649 - mmseg - INFO - Iter [41600/160000] lr: 4.440e-05, eta: 6:53:13, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4024, decode.acc_seg: 85.0945, aux.loss_ce: 0.2365, aux.acc_seg: 78.1281, loss: 0.6388, grad_norm: 5.4228 2023-02-11 20:37:09,090 - mmseg - INFO - Iter [41650/160000] lr: 4.438e-05, eta: 6:53:02, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4112, decode.acc_seg: 84.2769, aux.loss_ce: 0.2404, aux.acc_seg: 77.3804, loss: 0.6517, grad_norm: 5.9352 2023-02-11 20:37:19,440 - mmseg - INFO - Iter [41700/160000] lr: 4.436e-05, eta: 6:52:51, time: 0.207, data_time: 0.005, memory: 7748, decode.loss_ce: 0.4123, decode.acc_seg: 84.2290, aux.loss_ce: 0.2383, aux.acc_seg: 77.8735, loss: 0.6506, grad_norm: 6.9608 2023-02-11 20:37:29,760 - mmseg - INFO - Iter [41750/160000] lr: 4.434e-05, eta: 6:52:40, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3995, decode.acc_seg: 85.1349, aux.loss_ce: 0.2378, aux.acc_seg: 77.8826, loss: 0.6373, grad_norm: 5.2233 2023-02-11 20:37:39,645 - mmseg - INFO - Iter [41800/160000] lr: 4.433e-05, eta: 6:52:27, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3892, decode.acc_seg: 84.9240, aux.loss_ce: 0.2362, aux.acc_seg: 77.1478, loss: 0.6254, grad_norm: 5.8904 2023-02-11 20:37:49,389 - mmseg - INFO - Iter [41850/160000] lr: 4.431e-05, eta: 6:52:13, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3879, decode.acc_seg: 85.4335, aux.loss_ce: 0.2416, aux.acc_seg: 77.4512, loss: 0.6295, grad_norm: 6.0389 2023-02-11 20:37:59,527 - mmseg - INFO - Iter [41900/160000] lr: 4.429e-05, eta: 6:52:01, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4213, decode.acc_seg: 84.4216, aux.loss_ce: 0.2503, aux.acc_seg: 76.6485, loss: 0.6716, grad_norm: 5.8841 2023-02-11 20:38:09,514 - mmseg - INFO - Iter [41950/160000] lr: 4.427e-05, eta: 6:51:49, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4038, decode.acc_seg: 84.9341, aux.loss_ce: 0.2435, aux.acc_seg: 77.6821, loss: 0.6473, grad_norm: 6.5860 2023-02-11 20:38:19,875 - mmseg - INFO - Saving checkpoint at 42000 iterations 2023-02-11 20:38:20,566 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 20:38:20,567 - mmseg - INFO - Iter [42000/160000] lr: 4.425e-05, eta: 6:51:41, time: 0.221, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3902, decode.acc_seg: 85.5785, aux.loss_ce: 0.2366, aux.acc_seg: 78.1079, loss: 0.6268, grad_norm: 5.9117 2023-02-11 20:38:31,070 - mmseg - INFO - Iter [42050/160000] lr: 4.423e-05, eta: 6:51:31, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4001, decode.acc_seg: 84.5287, aux.loss_ce: 0.2315, aux.acc_seg: 78.0246, loss: 0.6315, grad_norm: 6.9701 2023-02-11 20:38:41,472 - mmseg - INFO - Iter [42100/160000] lr: 4.421e-05, eta: 6:51:20, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4026, decode.acc_seg: 85.5015, aux.loss_ce: 0.2421, aux.acc_seg: 77.6778, loss: 0.6447, grad_norm: 5.8613 2023-02-11 20:38:51,656 - mmseg - INFO - Iter [42150/160000] lr: 4.419e-05, eta: 6:51:08, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3984, decode.acc_seg: 85.1357, aux.loss_ce: 0.2408, aux.acc_seg: 77.6270, loss: 0.6392, grad_norm: 5.6962 2023-02-11 20:39:01,927 - mmseg - INFO - Iter [42200/160000] lr: 4.418e-05, eta: 6:50:57, time: 0.206, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3912, decode.acc_seg: 85.1491, aux.loss_ce: 0.2447, aux.acc_seg: 77.3779, loss: 0.6358, grad_norm: 5.8971 2023-02-11 20:39:11,612 - mmseg - INFO - Iter [42250/160000] lr: 4.416e-05, eta: 6:50:43, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4004, decode.acc_seg: 84.9599, aux.loss_ce: 0.2399, aux.acc_seg: 77.2938, loss: 0.6403, grad_norm: 5.6450 2023-02-11 20:39:21,523 - mmseg - INFO - Iter [42300/160000] lr: 4.414e-05, eta: 6:50:30, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4043, decode.acc_seg: 85.0220, aux.loss_ce: 0.2430, aux.acc_seg: 77.5718, loss: 0.6473, grad_norm: 5.7576 2023-02-11 20:39:31,481 - mmseg - INFO - Iter [42350/160000] lr: 4.412e-05, eta: 6:50:17, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4029, decode.acc_seg: 85.5087, aux.loss_ce: 0.2435, aux.acc_seg: 77.9412, loss: 0.6464, grad_norm: 6.2615 2023-02-11 20:39:41,522 - mmseg - INFO - Iter [42400/160000] lr: 4.410e-05, eta: 6:50:05, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4143, decode.acc_seg: 84.5149, aux.loss_ce: 0.2550, aux.acc_seg: 76.1773, loss: 0.6693, grad_norm: 6.4200 2023-02-11 20:39:51,503 - mmseg - INFO - Iter [42450/160000] lr: 4.408e-05, eta: 6:49:52, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4125, decode.acc_seg: 84.7560, aux.loss_ce: 0.2436, aux.acc_seg: 77.4936, loss: 0.6561, grad_norm: 6.3618 2023-02-11 20:40:01,621 - mmseg - INFO - Iter [42500/160000] lr: 4.406e-05, eta: 6:49:40, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4202, decode.acc_seg: 84.7252, aux.loss_ce: 0.2576, aux.acc_seg: 76.7626, loss: 0.6778, grad_norm: 6.5299 2023-02-11 20:40:14,030 - mmseg - INFO - Iter [42550/160000] lr: 4.404e-05, eta: 6:49:38, time: 0.249, data_time: 0.047, memory: 7748, decode.loss_ce: 0.3947, decode.acc_seg: 85.0828, aux.loss_ce: 0.2420, aux.acc_seg: 77.5219, loss: 0.6367, grad_norm: 5.7221 2023-02-11 20:40:24,043 - mmseg - INFO - Iter [42600/160000] lr: 4.403e-05, eta: 6:49:26, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3921, decode.acc_seg: 85.4499, aux.loss_ce: 0.2425, aux.acc_seg: 78.0914, loss: 0.6346, grad_norm: 5.8145 2023-02-11 20:40:33,790 - mmseg - INFO - Iter [42650/160000] lr: 4.401e-05, eta: 6:49:12, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4113, decode.acc_seg: 84.2445, aux.loss_ce: 0.2433, aux.acc_seg: 76.7826, loss: 0.6547, grad_norm: 6.2626 2023-02-11 20:40:43,649 - mmseg - INFO - Iter [42700/160000] lr: 4.399e-05, eta: 6:48:59, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3962, decode.acc_seg: 85.5545, aux.loss_ce: 0.2421, aux.acc_seg: 78.1634, loss: 0.6383, grad_norm: 5.6110 2023-02-11 20:40:53,840 - mmseg - INFO - Iter [42750/160000] lr: 4.397e-05, eta: 6:48:47, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3895, decode.acc_seg: 85.8429, aux.loss_ce: 0.2347, aux.acc_seg: 78.1474, loss: 0.6242, grad_norm: 6.0436 2023-02-11 20:41:04,112 - mmseg - INFO - Iter [42800/160000] lr: 4.395e-05, eta: 6:48:36, time: 0.206, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3875, decode.acc_seg: 85.6415, aux.loss_ce: 0.2418, aux.acc_seg: 78.1550, loss: 0.6293, grad_norm: 5.8994 2023-02-11 20:41:14,425 - mmseg - INFO - Iter [42850/160000] lr: 4.393e-05, eta: 6:48:25, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4021, decode.acc_seg: 84.9769, aux.loss_ce: 0.2395, aux.acc_seg: 77.9328, loss: 0.6416, grad_norm: 5.4603 2023-02-11 20:41:24,805 - mmseg - INFO - Iter [42900/160000] lr: 4.391e-05, eta: 6:48:14, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3973, decode.acc_seg: 84.6867, aux.loss_ce: 0.2353, aux.acc_seg: 77.4745, loss: 0.6326, grad_norm: 6.3120 2023-02-11 20:41:34,647 - mmseg - INFO - Iter [42950/160000] lr: 4.389e-05, eta: 6:48:01, time: 0.197, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3884, decode.acc_seg: 85.2131, aux.loss_ce: 0.2316, aux.acc_seg: 78.1834, loss: 0.6200, grad_norm: 6.1685 2023-02-11 20:41:44,745 - mmseg - INFO - Saving checkpoint at 43000 iterations 2023-02-11 20:41:45,431 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 20:41:45,431 - mmseg - INFO - Iter [43000/160000] lr: 4.388e-05, eta: 6:47:52, time: 0.216, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4064, decode.acc_seg: 84.7199, aux.loss_ce: 0.2429, aux.acc_seg: 77.1030, loss: 0.6492, grad_norm: 6.0801 2023-02-11 20:41:55,437 - mmseg - INFO - Iter [43050/160000] lr: 4.386e-05, eta: 6:47:40, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3697, decode.acc_seg: 86.0277, aux.loss_ce: 0.2299, aux.acc_seg: 78.6570, loss: 0.5996, grad_norm: 5.1228 2023-02-11 20:42:05,741 - mmseg - INFO - Iter [43100/160000] lr: 4.384e-05, eta: 6:47:29, time: 0.207, data_time: 0.005, memory: 7748, decode.loss_ce: 0.4201, decode.acc_seg: 84.7096, aux.loss_ce: 0.2429, aux.acc_seg: 77.8930, loss: 0.6629, grad_norm: 5.3959 2023-02-11 20:42:15,678 - mmseg - INFO - Iter [43150/160000] lr: 4.382e-05, eta: 6:47:16, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3919, decode.acc_seg: 85.0823, aux.loss_ce: 0.2358, aux.acc_seg: 77.7606, loss: 0.6277, grad_norm: 6.6814 2023-02-11 20:42:25,789 - mmseg - INFO - Iter [43200/160000] lr: 4.380e-05, eta: 6:47:04, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4038, decode.acc_seg: 84.8223, aux.loss_ce: 0.2434, aux.acc_seg: 77.6484, loss: 0.6472, grad_norm: 5.8885 2023-02-11 20:42:35,861 - mmseg - INFO - Iter [43250/160000] lr: 4.378e-05, eta: 6:46:52, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3875, decode.acc_seg: 85.4758, aux.loss_ce: 0.2366, aux.acc_seg: 78.2707, loss: 0.6241, grad_norm: 5.9069 2023-02-11 20:42:46,179 - mmseg - INFO - Iter [43300/160000] lr: 4.376e-05, eta: 6:46:41, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4042, decode.acc_seg: 85.1984, aux.loss_ce: 0.2522, aux.acc_seg: 77.3081, loss: 0.6564, grad_norm: 6.8398 2023-02-11 20:42:57,134 - mmseg - INFO - Iter [43350/160000] lr: 4.374e-05, eta: 6:46:32, time: 0.219, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3956, decode.acc_seg: 85.3298, aux.loss_ce: 0.2466, aux.acc_seg: 77.4708, loss: 0.6421, grad_norm: 7.1099 2023-02-11 20:43:06,878 - mmseg - INFO - Iter [43400/160000] lr: 4.373e-05, eta: 6:46:19, time: 0.195, data_time: 0.005, memory: 7748, decode.loss_ce: 0.4016, decode.acc_seg: 85.1644, aux.loss_ce: 0.2453, aux.acc_seg: 77.3081, loss: 0.6468, grad_norm: 5.8343 2023-02-11 20:43:16,731 - mmseg - INFO - Iter [43450/160000] lr: 4.371e-05, eta: 6:46:06, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4028, decode.acc_seg: 84.6302, aux.loss_ce: 0.2389, aux.acc_seg: 77.7483, loss: 0.6417, grad_norm: 6.2828 2023-02-11 20:43:26,653 - mmseg - INFO - Iter [43500/160000] lr: 4.369e-05, eta: 6:45:53, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4166, decode.acc_seg: 84.6460, aux.loss_ce: 0.2415, aux.acc_seg: 77.6258, loss: 0.6580, grad_norm: 6.1296 2023-02-11 20:43:36,555 - mmseg - INFO - Iter [43550/160000] lr: 4.367e-05, eta: 6:45:41, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3750, decode.acc_seg: 85.7456, aux.loss_ce: 0.2338, aux.acc_seg: 77.9932, loss: 0.6088, grad_norm: 5.6100 2023-02-11 20:43:46,216 - mmseg - INFO - Iter [43600/160000] lr: 4.365e-05, eta: 6:45:27, time: 0.193, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3856, decode.acc_seg: 85.7287, aux.loss_ce: 0.2320, aux.acc_seg: 78.3234, loss: 0.6177, grad_norm: 6.1153 2023-02-11 20:43:56,842 - mmseg - INFO - Iter [43650/160000] lr: 4.363e-05, eta: 6:45:17, time: 0.212, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4140, decode.acc_seg: 84.7101, aux.loss_ce: 0.2429, aux.acc_seg: 77.5172, loss: 0.6569, grad_norm: 6.4410 2023-02-11 20:44:07,043 - mmseg - INFO - Iter [43700/160000] lr: 4.361e-05, eta: 6:45:06, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4010, decode.acc_seg: 84.3901, aux.loss_ce: 0.2365, aux.acc_seg: 77.4361, loss: 0.6376, grad_norm: 5.7668 2023-02-11 20:44:17,038 - mmseg - INFO - Iter [43750/160000] lr: 4.359e-05, eta: 6:44:53, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4007, decode.acc_seg: 84.7175, aux.loss_ce: 0.2374, aux.acc_seg: 77.6499, loss: 0.6381, grad_norm: 6.2080 2023-02-11 20:44:29,139 - mmseg - INFO - Iter [43800/160000] lr: 4.358e-05, eta: 6:44:50, time: 0.242, data_time: 0.046, memory: 7748, decode.loss_ce: 0.3656, decode.acc_seg: 86.2442, aux.loss_ce: 0.2308, aux.acc_seg: 77.9824, loss: 0.5964, grad_norm: 5.6041 2023-02-11 20:44:39,522 - mmseg - INFO - Iter [43850/160000] lr: 4.356e-05, eta: 6:44:39, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3920, decode.acc_seg: 85.2821, aux.loss_ce: 0.2439, aux.acc_seg: 77.6713, loss: 0.6359, grad_norm: 5.6736 2023-02-11 20:44:49,994 - mmseg - INFO - Iter [43900/160000] lr: 4.354e-05, eta: 6:44:29, time: 0.210, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3819, decode.acc_seg: 85.5602, aux.loss_ce: 0.2331, aux.acc_seg: 78.1994, loss: 0.6150, grad_norm: 6.1060 2023-02-11 20:44:59,925 - mmseg - INFO - Iter [43950/160000] lr: 4.352e-05, eta: 6:44:16, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4051, decode.acc_seg: 85.2526, aux.loss_ce: 0.2452, aux.acc_seg: 77.3627, loss: 0.6504, grad_norm: 5.8594 2023-02-11 20:45:09,877 - mmseg - INFO - Saving checkpoint at 44000 iterations 2023-02-11 20:45:10,580 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 20:45:10,580 - mmseg - INFO - Iter [44000/160000] lr: 4.350e-05, eta: 6:44:06, time: 0.213, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3605, decode.acc_seg: 86.2647, aux.loss_ce: 0.2255, aux.acc_seg: 78.7073, loss: 0.5860, grad_norm: 6.1453 2023-02-11 20:45:21,007 - mmseg - INFO - Iter [44050/160000] lr: 4.348e-05, eta: 6:43:56, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3997, decode.acc_seg: 84.9043, aux.loss_ce: 0.2298, aux.acc_seg: 78.1076, loss: 0.6295, grad_norm: 6.1843 2023-02-11 20:45:31,496 - mmseg - INFO - Iter [44100/160000] lr: 4.346e-05, eta: 6:43:45, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3771, decode.acc_seg: 85.4538, aux.loss_ce: 0.2357, aux.acc_seg: 77.8470, loss: 0.6128, grad_norm: 5.5522 2023-02-11 20:45:41,433 - mmseg - INFO - Iter [44150/160000] lr: 4.344e-05, eta: 6:43:33, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4001, decode.acc_seg: 85.7063, aux.loss_ce: 0.2475, aux.acc_seg: 77.5855, loss: 0.6477, grad_norm: 5.8744 2023-02-11 20:45:51,218 - mmseg - INFO - Iter [44200/160000] lr: 4.343e-05, eta: 6:43:20, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3889, decode.acc_seg: 85.2798, aux.loss_ce: 0.2344, aux.acc_seg: 77.9132, loss: 0.6232, grad_norm: 5.2795 2023-02-11 20:46:01,432 - mmseg - INFO - Iter [44250/160000] lr: 4.341e-05, eta: 6:43:08, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3944, decode.acc_seg: 85.1964, aux.loss_ce: 0.2406, aux.acc_seg: 77.9445, loss: 0.6350, grad_norm: 7.0690 2023-02-11 20:46:11,997 - mmseg - INFO - Iter [44300/160000] lr: 4.339e-05, eta: 6:42:58, time: 0.211, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4004, decode.acc_seg: 85.4815, aux.loss_ce: 0.2477, aux.acc_seg: 77.8760, loss: 0.6482, grad_norm: 5.7952 2023-02-11 20:46:22,055 - mmseg - INFO - Iter [44350/160000] lr: 4.337e-05, eta: 6:42:46, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3965, decode.acc_seg: 85.5338, aux.loss_ce: 0.2416, aux.acc_seg: 77.8840, loss: 0.6382, grad_norm: 6.0281 2023-02-11 20:46:31,918 - mmseg - INFO - Iter [44400/160000] lr: 4.335e-05, eta: 6:42:33, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3706, decode.acc_seg: 86.0848, aux.loss_ce: 0.2269, aux.acc_seg: 78.7097, loss: 0.5975, grad_norm: 5.6273 2023-02-11 20:46:42,633 - mmseg - INFO - Iter [44450/160000] lr: 4.333e-05, eta: 6:42:24, time: 0.214, data_time: 0.005, memory: 7748, decode.loss_ce: 0.4001, decode.acc_seg: 84.7378, aux.loss_ce: 0.2401, aux.acc_seg: 77.2332, loss: 0.6402, grad_norm: 5.0387 2023-02-11 20:46:52,517 - mmseg - INFO - Iter [44500/160000] lr: 4.331e-05, eta: 6:42:11, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3774, decode.acc_seg: 85.9930, aux.loss_ce: 0.2286, aux.acc_seg: 78.6836, loss: 0.6060, grad_norm: 5.3052 2023-02-11 20:47:02,382 - mmseg - INFO - Iter [44550/160000] lr: 4.329e-05, eta: 6:41:59, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3905, decode.acc_seg: 85.7465, aux.loss_ce: 0.2341, aux.acc_seg: 78.5755, loss: 0.6246, grad_norm: 6.9371 2023-02-11 20:47:12,436 - mmseg - INFO - Iter [44600/160000] lr: 4.328e-05, eta: 6:41:46, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3886, decode.acc_seg: 85.4422, aux.loss_ce: 0.2405, aux.acc_seg: 78.0114, loss: 0.6292, grad_norm: 6.3813 2023-02-11 20:47:22,690 - mmseg - INFO - Iter [44650/160000] lr: 4.326e-05, eta: 6:41:35, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3829, decode.acc_seg: 85.8078, aux.loss_ce: 0.2313, aux.acc_seg: 78.3612, loss: 0.6142, grad_norm: 5.8716 2023-02-11 20:47:32,439 - mmseg - INFO - Iter [44700/160000] lr: 4.324e-05, eta: 6:41:22, time: 0.195, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3742, decode.acc_seg: 86.1470, aux.loss_ce: 0.2320, aux.acc_seg: 78.2099, loss: 0.6062, grad_norm: 6.2308 2023-02-11 20:47:42,872 - mmseg - INFO - Iter [44750/160000] lr: 4.322e-05, eta: 6:41:12, time: 0.209, data_time: 0.005, memory: 7748, decode.loss_ce: 0.4013, decode.acc_seg: 85.0637, aux.loss_ce: 0.2433, aux.acc_seg: 77.1122, loss: 0.6446, grad_norm: 6.6568 2023-02-11 20:47:53,013 - mmseg - INFO - Iter [44800/160000] lr: 4.320e-05, eta: 6:41:00, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3884, decode.acc_seg: 85.1559, aux.loss_ce: 0.2350, aux.acc_seg: 78.4389, loss: 0.6234, grad_norm: 6.4354 2023-02-11 20:48:03,339 - mmseg - INFO - Iter [44850/160000] lr: 4.318e-05, eta: 6:40:49, time: 0.207, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3856, decode.acc_seg: 85.7377, aux.loss_ce: 0.2313, aux.acc_seg: 78.5351, loss: 0.6169, grad_norm: 7.0211 2023-02-11 20:48:13,257 - mmseg - INFO - Iter [44900/160000] lr: 4.316e-05, eta: 6:40:37, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3858, decode.acc_seg: 84.6594, aux.loss_ce: 0.2291, aux.acc_seg: 77.6440, loss: 0.6149, grad_norm: 6.2018 2023-02-11 20:48:23,514 - mmseg - INFO - Iter [44950/160000] lr: 4.314e-05, eta: 6:40:25, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3791, decode.acc_seg: 85.7321, aux.loss_ce: 0.2385, aux.acc_seg: 77.4490, loss: 0.6176, grad_norm: 6.3514 2023-02-11 20:48:33,303 - mmseg - INFO - Saving checkpoint at 45000 iterations 2023-02-11 20:48:34,012 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 20:48:34,012 - mmseg - INFO - Iter [45000/160000] lr: 4.313e-05, eta: 6:40:15, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3914, decode.acc_seg: 85.3458, aux.loss_ce: 0.2364, aux.acc_seg: 77.9801, loss: 0.6278, grad_norm: 6.1227 2023-02-11 20:48:45,989 - mmseg - INFO - Iter [45050/160000] lr: 4.311e-05, eta: 6:40:11, time: 0.240, data_time: 0.047, memory: 7748, decode.loss_ce: 0.4091, decode.acc_seg: 84.9092, aux.loss_ce: 0.2447, aux.acc_seg: 77.3249, loss: 0.6538, grad_norm: 6.4571 2023-02-11 20:48:56,048 - mmseg - INFO - Iter [45100/160000] lr: 4.309e-05, eta: 6:39:59, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3873, decode.acc_seg: 85.7980, aux.loss_ce: 0.2373, aux.acc_seg: 78.3591, loss: 0.6246, grad_norm: 5.6795 2023-02-11 20:49:06,173 - mmseg - INFO - Iter [45150/160000] lr: 4.307e-05, eta: 6:39:47, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3828, decode.acc_seg: 85.3489, aux.loss_ce: 0.2361, aux.acc_seg: 78.1172, loss: 0.6189, grad_norm: 6.1362 2023-02-11 20:49:16,154 - mmseg - INFO - Iter [45200/160000] lr: 4.305e-05, eta: 6:39:35, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3792, decode.acc_seg: 85.9494, aux.loss_ce: 0.2311, aux.acc_seg: 78.3216, loss: 0.6103, grad_norm: 6.5002 2023-02-11 20:49:26,928 - mmseg - INFO - Iter [45250/160000] lr: 4.303e-05, eta: 6:39:26, time: 0.215, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3837, decode.acc_seg: 85.5988, aux.loss_ce: 0.2368, aux.acc_seg: 78.1029, loss: 0.6205, grad_norm: 5.7643 2023-02-11 20:49:37,098 - mmseg - INFO - Iter [45300/160000] lr: 4.301e-05, eta: 6:39:14, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3897, decode.acc_seg: 85.5146, aux.loss_ce: 0.2326, aux.acc_seg: 78.7089, loss: 0.6223, grad_norm: 5.8432 2023-02-11 20:49:46,859 - mmseg - INFO - Iter [45350/160000] lr: 4.299e-05, eta: 6:39:01, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3733, decode.acc_seg: 86.1169, aux.loss_ce: 0.2348, aux.acc_seg: 78.4099, loss: 0.6080, grad_norm: 7.0071 2023-02-11 20:49:56,640 - mmseg - INFO - Iter [45400/160000] lr: 4.298e-05, eta: 6:38:48, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3785, decode.acc_seg: 85.9074, aux.loss_ce: 0.2362, aux.acc_seg: 77.8086, loss: 0.6147, grad_norm: 6.5730 2023-02-11 20:50:06,748 - mmseg - INFO - Iter [45450/160000] lr: 4.296e-05, eta: 6:38:36, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4010, decode.acc_seg: 85.0035, aux.loss_ce: 0.2453, aux.acc_seg: 77.8510, loss: 0.6463, grad_norm: 5.6956 2023-02-11 20:50:16,809 - mmseg - INFO - Iter [45500/160000] lr: 4.294e-05, eta: 6:38:24, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3619, decode.acc_seg: 86.4240, aux.loss_ce: 0.2304, aux.acc_seg: 78.8669, loss: 0.5923, grad_norm: 5.4539 2023-02-11 20:50:27,252 - mmseg - INFO - Iter [45550/160000] lr: 4.292e-05, eta: 6:38:14, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3696, decode.acc_seg: 86.3099, aux.loss_ce: 0.2300, aux.acc_seg: 78.6405, loss: 0.5996, grad_norm: 5.5272 2023-02-11 20:50:37,457 - mmseg - INFO - Iter [45600/160000] lr: 4.290e-05, eta: 6:38:03, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4003, decode.acc_seg: 85.1617, aux.loss_ce: 0.2372, aux.acc_seg: 78.1876, loss: 0.6374, grad_norm: 6.1451 2023-02-11 20:50:47,659 - mmseg - INFO - Iter [45650/160000] lr: 4.288e-05, eta: 6:37:51, time: 0.204, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3704, decode.acc_seg: 85.9112, aux.loss_ce: 0.2260, aux.acc_seg: 78.7618, loss: 0.5964, grad_norm: 5.8961 2023-02-11 20:50:57,422 - mmseg - INFO - Iter [45700/160000] lr: 4.286e-05, eta: 6:37:38, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4052, decode.acc_seg: 85.2311, aux.loss_ce: 0.2483, aux.acc_seg: 77.0903, loss: 0.6535, grad_norm: 6.7174 2023-02-11 20:51:07,347 - mmseg - INFO - Iter [45750/160000] lr: 4.284e-05, eta: 6:37:26, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3684, decode.acc_seg: 85.9015, aux.loss_ce: 0.2252, aux.acc_seg: 78.5461, loss: 0.5936, grad_norm: 6.7157 2023-02-11 20:51:17,327 - mmseg - INFO - Iter [45800/160000] lr: 4.283e-05, eta: 6:37:14, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3833, decode.acc_seg: 86.0728, aux.loss_ce: 0.2293, aux.acc_seg: 79.1135, loss: 0.6126, grad_norm: 7.3660 2023-02-11 20:51:27,316 - mmseg - INFO - Iter [45850/160000] lr: 4.281e-05, eta: 6:37:02, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3723, decode.acc_seg: 85.7456, aux.loss_ce: 0.2306, aux.acc_seg: 78.6123, loss: 0.6029, grad_norm: 5.1735 2023-02-11 20:51:37,288 - mmseg - INFO - Iter [45900/160000] lr: 4.279e-05, eta: 6:36:49, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3950, decode.acc_seg: 84.8434, aux.loss_ce: 0.2405, aux.acc_seg: 77.4382, loss: 0.6355, grad_norm: 6.4180 2023-02-11 20:51:47,357 - mmseg - INFO - Iter [45950/160000] lr: 4.277e-05, eta: 6:36:37, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3609, decode.acc_seg: 85.7593, aux.loss_ce: 0.2276, aux.acc_seg: 78.0640, loss: 0.5885, grad_norm: 5.4861 2023-02-11 20:51:57,447 - mmseg - INFO - Saving checkpoint at 46000 iterations 2023-02-11 20:51:58,138 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 20:51:58,138 - mmseg - INFO - Iter [46000/160000] lr: 4.275e-05, eta: 6:36:28, time: 0.216, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3935, decode.acc_seg: 85.2650, aux.loss_ce: 0.2460, aux.acc_seg: 77.7205, loss: 0.6395, grad_norm: 5.9494 2023-02-11 20:52:08,032 - mmseg - INFO - Iter [46050/160000] lr: 4.273e-05, eta: 6:36:16, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.4074, decode.acc_seg: 85.0118, aux.loss_ce: 0.2480, aux.acc_seg: 76.8575, loss: 0.6554, grad_norm: 6.1437 2023-02-11 20:52:18,077 - mmseg - INFO - Iter [46100/160000] lr: 4.271e-05, eta: 6:36:04, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3791, decode.acc_seg: 85.4512, aux.loss_ce: 0.2274, aux.acc_seg: 78.9536, loss: 0.6065, grad_norm: 5.1603 2023-02-11 20:52:28,112 - mmseg - INFO - Iter [46150/160000] lr: 4.269e-05, eta: 6:35:52, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3931, decode.acc_seg: 85.4975, aux.loss_ce: 0.2397, aux.acc_seg: 78.1232, loss: 0.6328, grad_norm: 6.4517 2023-02-11 20:52:38,338 - mmseg - INFO - Iter [46200/160000] lr: 4.268e-05, eta: 6:35:41, time: 0.205, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3677, decode.acc_seg: 86.5309, aux.loss_ce: 0.2252, aux.acc_seg: 79.0472, loss: 0.5929, grad_norm: 6.7681 2023-02-11 20:52:48,487 - mmseg - INFO - Iter [46250/160000] lr: 4.266e-05, eta: 6:35:29, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3756, decode.acc_seg: 85.9745, aux.loss_ce: 0.2360, aux.acc_seg: 78.2436, loss: 0.6116, grad_norm: 5.8464 2023-02-11 20:52:58,475 - mmseg - INFO - Iter [46300/160000] lr: 4.264e-05, eta: 6:35:17, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3914, decode.acc_seg: 85.4228, aux.loss_ce: 0.2440, aux.acc_seg: 77.1268, loss: 0.6354, grad_norm: 6.1988 2023-02-11 20:53:10,743 - mmseg - INFO - Iter [46350/160000] lr: 4.262e-05, eta: 6:35:14, time: 0.245, data_time: 0.047, memory: 7748, decode.loss_ce: 0.3716, decode.acc_seg: 86.2560, aux.loss_ce: 0.2277, aux.acc_seg: 78.5771, loss: 0.5993, grad_norm: 5.2952 2023-02-11 20:53:21,227 - mmseg - INFO - Iter [46400/160000] lr: 4.260e-05, eta: 6:35:03, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3967, decode.acc_seg: 85.4833, aux.loss_ce: 0.2447, aux.acc_seg: 77.8565, loss: 0.6414, grad_norm: 7.2543 2023-02-11 20:53:31,056 - mmseg - INFO - Iter [46450/160000] lr: 4.258e-05, eta: 6:34:51, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3710, decode.acc_seg: 85.7333, aux.loss_ce: 0.2285, aux.acc_seg: 78.0775, loss: 0.5994, grad_norm: 8.4031 2023-02-11 20:53:40,807 - mmseg - INFO - Iter [46500/160000] lr: 4.256e-05, eta: 6:34:38, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3562, decode.acc_seg: 86.4309, aux.loss_ce: 0.2164, aux.acc_seg: 79.6058, loss: 0.5726, grad_norm: 6.1357 2023-02-11 20:53:50,738 - mmseg - INFO - Iter [46550/160000] lr: 4.254e-05, eta: 6:34:26, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3730, decode.acc_seg: 85.6499, aux.loss_ce: 0.2294, aux.acc_seg: 78.5658, loss: 0.6024, grad_norm: 6.2681 2023-02-11 20:54:01,095 - mmseg - INFO - Iter [46600/160000] lr: 4.253e-05, eta: 6:34:15, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3755, decode.acc_seg: 85.7057, aux.loss_ce: 0.2303, aux.acc_seg: 78.3890, loss: 0.6058, grad_norm: 6.0350 2023-02-11 20:54:12,168 - mmseg - INFO - Iter [46650/160000] lr: 4.251e-05, eta: 6:34:07, time: 0.222, data_time: 0.006, memory: 7748, decode.loss_ce: 0.4013, decode.acc_seg: 84.7842, aux.loss_ce: 0.2451, aux.acc_seg: 77.3601, loss: 0.6463, grad_norm: 6.5144 2023-02-11 20:54:21,911 - mmseg - INFO - Iter [46700/160000] lr: 4.249e-05, eta: 6:33:54, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3728, decode.acc_seg: 85.8079, aux.loss_ce: 0.2313, aux.acc_seg: 78.3601, loss: 0.6041, grad_norm: 6.0082 2023-02-11 20:54:31,761 - mmseg - INFO - Iter [46750/160000] lr: 4.247e-05, eta: 6:33:41, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3737, decode.acc_seg: 85.3182, aux.loss_ce: 0.2274, aux.acc_seg: 78.2152, loss: 0.6011, grad_norm: 5.2383 2023-02-11 20:54:42,014 - mmseg - INFO - Iter [46800/160000] lr: 4.245e-05, eta: 6:33:30, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3710, decode.acc_seg: 85.9997, aux.loss_ce: 0.2388, aux.acc_seg: 77.6120, loss: 0.6098, grad_norm: 6.4389 2023-02-11 20:54:52,357 - mmseg - INFO - Iter [46850/160000] lr: 4.243e-05, eta: 6:33:19, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3824, decode.acc_seg: 85.3426, aux.loss_ce: 0.2316, aux.acc_seg: 78.0397, loss: 0.6140, grad_norm: 6.0971 2023-02-11 20:55:02,773 - mmseg - INFO - Iter [46900/160000] lr: 4.241e-05, eta: 6:33:09, time: 0.208, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3905, decode.acc_seg: 85.0901, aux.loss_ce: 0.2337, aux.acc_seg: 77.8503, loss: 0.6242, grad_norm: 6.5292 2023-02-11 20:55:13,092 - mmseg - INFO - Iter [46950/160000] lr: 4.239e-05, eta: 6:32:58, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3928, decode.acc_seg: 85.1644, aux.loss_ce: 0.2424, aux.acc_seg: 77.3329, loss: 0.6352, grad_norm: 5.9405 2023-02-11 20:55:23,669 - mmseg - INFO - Saving checkpoint at 47000 iterations 2023-02-11 20:55:24,373 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 20:55:24,373 - mmseg - INFO - Iter [47000/160000] lr: 4.238e-05, eta: 6:32:51, time: 0.226, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3773, decode.acc_seg: 85.8620, aux.loss_ce: 0.2404, aux.acc_seg: 77.5348, loss: 0.6177, grad_norm: 7.0758 2023-02-11 20:55:34,468 - mmseg - INFO - Iter [47050/160000] lr: 4.236e-05, eta: 6:32:39, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3706, decode.acc_seg: 86.1319, aux.loss_ce: 0.2340, aux.acc_seg: 78.1139, loss: 0.6046, grad_norm: 6.5254 2023-02-11 20:55:44,992 - mmseg - INFO - Iter [47100/160000] lr: 4.234e-05, eta: 6:32:29, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3813, decode.acc_seg: 85.5976, aux.loss_ce: 0.2317, aux.acc_seg: 78.1242, loss: 0.6130, grad_norm: 6.5898 2023-02-11 20:55:55,538 - mmseg - INFO - Iter [47150/160000] lr: 4.232e-05, eta: 6:32:19, time: 0.212, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3784, decode.acc_seg: 86.3309, aux.loss_ce: 0.2323, aux.acc_seg: 78.8119, loss: 0.6107, grad_norm: 5.8176 2023-02-11 20:56:05,819 - mmseg - INFO - Iter [47200/160000] lr: 4.230e-05, eta: 6:32:08, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3810, decode.acc_seg: 85.8114, aux.loss_ce: 0.2382, aux.acc_seg: 77.7657, loss: 0.6193, grad_norm: 5.9152 2023-02-11 20:56:15,713 - mmseg - INFO - Iter [47250/160000] lr: 4.228e-05, eta: 6:31:56, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3677, decode.acc_seg: 86.0159, aux.loss_ce: 0.2271, aux.acc_seg: 78.9131, loss: 0.5948, grad_norm: 5.3383 2023-02-11 20:56:25,385 - mmseg - INFO - Iter [47300/160000] lr: 4.226e-05, eta: 6:31:43, time: 0.193, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3720, decode.acc_seg: 85.9324, aux.loss_ce: 0.2299, aux.acc_seg: 78.1759, loss: 0.6020, grad_norm: 5.3389 2023-02-11 20:56:35,277 - mmseg - INFO - Iter [47350/160000] lr: 4.224e-05, eta: 6:31:30, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3802, decode.acc_seg: 85.4978, aux.loss_ce: 0.2269, aux.acc_seg: 78.7143, loss: 0.6072, grad_norm: 5.7022 2023-02-11 20:56:45,604 - mmseg - INFO - Iter [47400/160000] lr: 4.223e-05, eta: 6:31:19, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3694, decode.acc_seg: 86.1467, aux.loss_ce: 0.2330, aux.acc_seg: 78.6238, loss: 0.6025, grad_norm: 6.2465 2023-02-11 20:56:55,634 - mmseg - INFO - Iter [47450/160000] lr: 4.221e-05, eta: 6:31:08, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3935, decode.acc_seg: 85.1539, aux.loss_ce: 0.2373, aux.acc_seg: 78.0910, loss: 0.6308, grad_norm: 6.3838 2023-02-11 20:57:05,406 - mmseg - INFO - Iter [47500/160000] lr: 4.219e-05, eta: 6:30:55, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3689, decode.acc_seg: 85.7179, aux.loss_ce: 0.2277, aux.acc_seg: 78.4505, loss: 0.5966, grad_norm: 5.9781 2023-02-11 20:57:15,210 - mmseg - INFO - Iter [47550/160000] lr: 4.217e-05, eta: 6:30:42, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3767, decode.acc_seg: 85.6008, aux.loss_ce: 0.2285, aux.acc_seg: 78.4464, loss: 0.6052, grad_norm: 5.8499 2023-02-11 20:57:27,323 - mmseg - INFO - Iter [47600/160000] lr: 4.215e-05, eta: 6:30:38, time: 0.242, data_time: 0.046, memory: 7748, decode.loss_ce: 0.3779, decode.acc_seg: 85.6925, aux.loss_ce: 0.2371, aux.acc_seg: 77.5030, loss: 0.6150, grad_norm: 5.7133 2023-02-11 20:57:37,379 - mmseg - INFO - Iter [47650/160000] lr: 4.213e-05, eta: 6:30:26, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3602, decode.acc_seg: 86.4941, aux.loss_ce: 0.2252, aux.acc_seg: 79.1561, loss: 0.5854, grad_norm: 6.1230 2023-02-11 20:57:47,423 - mmseg - INFO - Iter [47700/160000] lr: 4.211e-05, eta: 6:30:14, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3679, decode.acc_seg: 86.1064, aux.loss_ce: 0.2343, aux.acc_seg: 78.1306, loss: 0.6022, grad_norm: 6.8605 2023-02-11 20:57:58,007 - mmseg - INFO - Iter [47750/160000] lr: 4.209e-05, eta: 6:30:04, time: 0.212, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3800, decode.acc_seg: 85.5994, aux.loss_ce: 0.2265, aux.acc_seg: 78.9503, loss: 0.6065, grad_norm: 5.3768 2023-02-11 20:58:08,104 - mmseg - INFO - Iter [47800/160000] lr: 4.208e-05, eta: 6:29:53, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3401, decode.acc_seg: 87.0305, aux.loss_ce: 0.2154, aux.acc_seg: 79.4784, loss: 0.5555, grad_norm: 5.2075 2023-02-11 20:58:18,386 - mmseg - INFO - Iter [47850/160000] lr: 4.206e-05, eta: 6:29:42, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3468, decode.acc_seg: 86.8708, aux.loss_ce: 0.2322, aux.acc_seg: 78.3906, loss: 0.5790, grad_norm: 5.9121 2023-02-11 20:58:28,655 - mmseg - INFO - Iter [47900/160000] lr: 4.204e-05, eta: 6:29:31, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3482, decode.acc_seg: 86.4261, aux.loss_ce: 0.2205, aux.acc_seg: 78.8313, loss: 0.5687, grad_norm: 6.1722 2023-02-11 20:58:38,510 - mmseg - INFO - Iter [47950/160000] lr: 4.202e-05, eta: 6:29:18, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3981, decode.acc_seg: 85.4360, aux.loss_ce: 0.2433, aux.acc_seg: 78.0853, loss: 0.6414, grad_norm: 6.5004 2023-02-11 20:58:48,561 - mmseg - INFO - Saving checkpoint at 48000 iterations 2023-02-11 20:58:49,278 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 20:58:49,278 - mmseg - INFO - Iter [48000/160000] lr: 4.200e-05, eta: 6:29:09, time: 0.216, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3881, decode.acc_seg: 85.5916, aux.loss_ce: 0.2351, aux.acc_seg: 77.9788, loss: 0.6233, grad_norm: 5.5775 2023-02-11 20:59:01,149 - mmseg - INFO - per class results: 2023-02-11 20:59:01,155 - mmseg - INFO - +---------------------+-------+-------+ | Class | IoU | Acc | +---------------------+-------+-------+ | wall | 71.12 | 79.93 | | building | 80.83 | 94.22 | | sky | 93.5 | 96.12 | | floor | 77.86 | 90.05 | | tree | 71.05 | 79.0 | | ceiling | 80.99 | 91.64 | | road | 79.43 | 90.32 | | bed | 83.45 | 94.82 | | windowpane | 56.28 | 70.64 | | grass | 66.92 | 87.01 | | cabinet | 52.82 | 68.12 | | sidewalk | 58.72 | 68.45 | | person | 76.85 | 86.8 | | earth | 29.81 | 40.07 | | door | 40.88 | 68.56 | | table | 47.22 | 64.95 | | mountain | 53.18 | 73.43 | | plant | 48.41 | 65.28 | | curtain | 60.89 | 90.82 | | chair | 45.48 | 55.27 | | car | 79.93 | 88.71 | | water | 51.82 | 89.75 | | painting | 57.64 | 88.51 | | sofa | 50.3 | 61.44 | | shelf | 35.39 | 65.84 | | house | 49.86 | 59.74 | | sea | 51.08 | 60.09 | | mirror | 48.08 | 55.3 | | rug | 54.77 | 62.35 | | field | 25.53 | 34.49 | | armchair | 33.03 | 56.09 | | seat | 56.47 | 81.53 | | fence | 29.01 | 41.43 | | desk | 34.37 | 42.65 | | rock | 40.88 | 72.13 | | wardrobe | 46.09 | 59.65 | | lamp | 52.69 | 67.55 | | bathtub | 64.53 | 72.18 | | railing | 32.5 | 43.32 | | cushion | 47.82 | 65.47 | | base | 18.66 | 28.6 | | box | 12.08 | 13.58 | | column | 38.22 | 50.5 | | signboard | 29.69 | 42.62 | | chest of drawers | 32.3 | 62.7 | | counter | 29.64 | 46.14 | | sand | 31.18 | 38.09 | | sink | 48.55 | 84.62 | | skyscraper | 48.3 | 56.78 | | fireplace | 60.65 | 73.11 | | refrigerator | 56.15 | 82.66 | | grandstand | 35.74 | 62.61 | | path | 19.24 | 30.89 | | stairs | 31.66 | 38.85 | | runway | 63.39 | 77.95 | | case | 41.3 | 50.93 | | pool table | 91.12 | 95.81 | | pillow | 49.58 | 67.91 | | screen door | 38.99 | 78.89 | | stairway | 29.47 | 34.81 | | river | 4.94 | 5.36 | | bridge | 47.76 | 67.35 | | bookcase | 33.66 | 65.41 | | blind | 44.54 | 68.0 | | coffee table | 39.53 | 86.64 | | toilet | 75.98 | 93.09 | | flower | 34.98 | 56.18 | | book | 36.03 | 44.02 | | hill | 4.83 | 7.68 | | bench | 42.27 | 45.44 | | countertop | 46.72 | 53.55 | | stove | 64.68 | 82.35 | | palm | 41.26 | 85.87 | | kitchen island | 28.71 | 56.19 | | computer | 58.83 | 67.27 | | swivel chair | 32.89 | 50.4 | | boat | 45.28 | 52.27 | | bar | 21.12 | 27.48 | | arcade machine | 67.87 | 73.58 | | hovel | 32.9 | 41.56 | | bus | 82.22 | 92.39 | | towel | 49.83 | 72.03 | | light | 46.67 | 60.75 | | truck | 21.43 | 24.52 | | tower | 3.1 | 3.16 | | chandelier | 53.58 | 89.1 | | awning | 26.37 | 32.77 | | streetlight | 17.76 | 22.16 | | booth | 7.48 | 7.48 | | television receiver | 58.2 | 80.92 | | airplane | 54.73 | 62.86 | | dirt track | 1.0 | 5.16 | | apparel | 13.33 | 18.55 | | pole | 9.93 | 11.44 | | land | 0.0 | 0.0 | | bannister | 8.24 | 22.56 | | escalator | 24.76 | 35.68 | | ottoman | 41.57 | 54.24 | | bottle | 8.92 | 10.06 | | buffet | 39.43 | 54.11 | | poster | 4.43 | 4.68 | | stage | 11.28 | 24.58 | | van | 39.12 | 49.76 | | ship | 26.57 | 34.59 | | fountain | 9.4 | 11.25 | | conveyer belt | 55.02 | 83.23 | | canopy | 0.04 | 0.05 | | washer | 67.53 | 71.08 | | plaything | 17.0 | 21.89 | | swimming pool | 52.53 | 67.72 | | stool | 31.23 | 54.92 | | barrel | 20.34 | 64.54 | | basket | 23.02 | 27.13 | | waterfall | 53.69 | 87.66 | | tent | 65.12 | 98.59 | | bag | 20.6 | 25.66 | | minibike | 49.78 | 59.69 | | cradle | 74.9 | 86.88 | | oven | 18.5 | 29.97 | | ball | 43.65 | 61.47 | | food | 21.01 | 21.61 | | step | 0.04 | 0.04 | | tank | 25.05 | 25.14 | | trade name | 20.04 | 21.72 | | microwave | 48.46 | 61.52 | | pot | 33.17 | 37.1 | | animal | 53.39 | 58.72 | | bicycle | 42.97 | 75.12 | | lake | 2.2 | 2.67 | | dishwasher | 55.74 | 78.66 | | screen | 75.03 | 85.81 | | blanket | 8.91 | 11.6 | | sculpture | 28.68 | 54.05 | | hood | 39.01 | 74.65 | | sconce | 30.14 | 44.09 | | vase | 26.56 | 37.15 | | traffic light | 23.68 | 42.98 | | tray | 1.22 | 1.53 | | ashcan | 29.22 | 48.48 | | fan | 46.47 | 67.06 | | pier | 46.26 | 49.72 | | crt screen | 8.7 | 27.18 | | plate | 37.8 | 63.56 | | monitor | 0.0 | 0.0 | | bulletin board | 42.58 | 62.05 | | shower | 0.0 | 0.0 | | radiator | 43.28 | 59.7 | | glass | 7.12 | 8.01 | | clock | 9.22 | 11.37 | | flag | 27.32 | 28.67 | +---------------------+-------+-------+ 2023-02-11 20:59:01,156 - mmseg - INFO - Summary: 2023-02-11 20:59:01,156 - mmseg - INFO - +-------+-------+-------+ | aAcc | mIoU | mAcc | +-------+-------+-------+ | 79.06 | 39.42 | 53.05 | +-------+-------+-------+ 2023-02-11 20:59:01,828 - mmseg - INFO - Now best checkpoint is saved as best_mIoU_iter_48000.pth. 2023-02-11 20:59:01,828 - mmseg - INFO - Best mIoU is 0.3942 at 48000 iter. 2023-02-11 20:59:01,828 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 20:59:01,829 - mmseg - INFO - Iter(val) [250] aAcc: 0.7906, mIoU: 0.3942, mAcc: 0.5305, IoU.wall: 0.7112, IoU.building: 0.8083, IoU.sky: 0.9350, IoU.floor: 0.7786, IoU.tree: 0.7105, IoU.ceiling: 0.8099, IoU.road: 0.7943, IoU.bed : 0.8345, IoU.windowpane: 0.5628, IoU.grass: 0.6692, IoU.cabinet: 0.5282, IoU.sidewalk: 0.5872, IoU.person: 0.7685, IoU.earth: 0.2981, IoU.door: 0.4088, IoU.table: 0.4722, IoU.mountain: 0.5318, IoU.plant: 0.4841, IoU.curtain: 0.6089, IoU.chair: 0.4548, IoU.car: 0.7993, IoU.water: 0.5182, IoU.painting: 0.5764, IoU.sofa: 0.5030, IoU.shelf: 0.3539, IoU.house: 0.4986, IoU.sea: 0.5108, IoU.mirror: 0.4808, IoU.rug: 0.5477, IoU.field: 0.2553, IoU.armchair: 0.3303, IoU.seat: 0.5647, IoU.fence: 0.2901, IoU.desk: 0.3437, IoU.rock: 0.4088, IoU.wardrobe: 0.4609, IoU.lamp: 0.5269, IoU.bathtub: 0.6453, IoU.railing: 0.3250, IoU.cushion: 0.4782, IoU.base: 0.1866, IoU.box: 0.1208, IoU.column: 0.3822, IoU.signboard: 0.2969, IoU.chest of drawers: 0.3230, IoU.counter: 0.2964, IoU.sand: 0.3118, IoU.sink: 0.4855, IoU.skyscraper: 0.4830, IoU.fireplace: 0.6065, IoU.refrigerator: 0.5615, IoU.grandstand: 0.3574, IoU.path: 0.1924, IoU.stairs: 0.3166, IoU.runway: 0.6339, IoU.case: 0.4130, IoU.pool table: 0.9112, IoU.pillow: 0.4958, IoU.screen door: 0.3899, IoU.stairway: 0.2947, IoU.river: 0.0494, IoU.bridge: 0.4776, IoU.bookcase: 0.3366, IoU.blind: 0.4454, IoU.coffee table: 0.3953, IoU.toilet: 0.7598, IoU.flower: 0.3498, IoU.book: 0.3603, IoU.hill: 0.0483, IoU.bench: 0.4227, IoU.countertop: 0.4672, IoU.stove: 0.6468, IoU.palm: 0.4126, IoU.kitchen island: 0.2871, IoU.computer: 0.5883, IoU.swivel chair: 0.3289, IoU.boat: 0.4528, IoU.bar: 0.2112, IoU.arcade machine: 0.6787, IoU.hovel: 0.3290, IoU.bus: 0.8222, IoU.towel: 0.4983, IoU.light: 0.4667, IoU.truck: 0.2143, IoU.tower: 0.0310, IoU.chandelier: 0.5358, IoU.awning: 0.2637, IoU.streetlight: 0.1776, IoU.booth: 0.0748, IoU.television receiver: 0.5820, IoU.airplane: 0.5473, IoU.dirt track: 0.0100, IoU.apparel: 0.1333, IoU.pole: 0.0993, IoU.land: 0.0000, IoU.bannister: 0.0824, IoU.escalator: 0.2476, IoU.ottoman: 0.4157, IoU.bottle: 0.0892, IoU.buffet: 0.3943, IoU.poster: 0.0443, IoU.stage: 0.1128, IoU.van: 0.3912, IoU.ship: 0.2657, IoU.fountain: 0.0940, IoU.conveyer belt: 0.5502, IoU.canopy: 0.0004, IoU.washer: 0.6753, IoU.plaything: 0.1700, IoU.swimming pool: 0.5253, IoU.stool: 0.3123, IoU.barrel: 0.2034, IoU.basket: 0.2302, IoU.waterfall: 0.5369, IoU.tent: 0.6512, IoU.bag: 0.2060, IoU.minibike: 0.4978, IoU.cradle: 0.7490, IoU.oven: 0.1850, IoU.ball: 0.4365, IoU.food: 0.2101, IoU.step: 0.0004, IoU.tank: 0.2505, IoU.trade name: 0.2004, IoU.microwave: 0.4846, IoU.pot: 0.3317, IoU.animal: 0.5339, IoU.bicycle: 0.4297, IoU.lake: 0.0220, IoU.dishwasher: 0.5574, IoU.screen: 0.7503, IoU.blanket: 0.0891, IoU.sculpture: 0.2868, IoU.hood: 0.3901, IoU.sconce: 0.3014, IoU.vase: 0.2656, IoU.traffic light: 0.2368, IoU.tray: 0.0122, IoU.ashcan: 0.2922, IoU.fan: 0.4647, IoU.pier: 0.4626, IoU.crt screen: 0.0870, IoU.plate: 0.3780, IoU.monitor: 0.0000, IoU.bulletin board: 0.4258, IoU.shower: 0.0000, IoU.radiator: 0.4328, IoU.glass: 0.0712, IoU.clock: 0.0922, IoU.flag: 0.2732, Acc.wall: 0.7993, Acc.building: 0.9422, Acc.sky: 0.9612, Acc.floor: 0.9005, Acc.tree: 0.7900, Acc.ceiling: 0.9164, Acc.road: 0.9032, Acc.bed : 0.9482, Acc.windowpane: 0.7064, Acc.grass: 0.8701, Acc.cabinet: 0.6812, Acc.sidewalk: 0.6845, Acc.person: 0.8680, Acc.earth: 0.4007, Acc.door: 0.6856, Acc.table: 0.6495, Acc.mountain: 0.7343, Acc.plant: 0.6528, Acc.curtain: 0.9082, Acc.chair: 0.5527, Acc.car: 0.8871, Acc.water: 0.8975, Acc.painting: 0.8851, Acc.sofa: 0.6144, Acc.shelf: 0.6584, Acc.house: 0.5974, Acc.sea: 0.6009, Acc.mirror: 0.5530, Acc.rug: 0.6235, Acc.field: 0.3449, Acc.armchair: 0.5609, Acc.seat: 0.8153, Acc.fence: 0.4143, Acc.desk: 0.4265, Acc.rock: 0.7213, Acc.wardrobe: 0.5965, Acc.lamp: 0.6755, Acc.bathtub: 0.7218, Acc.railing: 0.4332, Acc.cushion: 0.6547, Acc.base: 0.2860, Acc.box: 0.1358, Acc.column: 0.5050, Acc.signboard: 0.4262, Acc.chest of drawers: 0.6270, Acc.counter: 0.4614, Acc.sand: 0.3809, Acc.sink: 0.8462, Acc.skyscraper: 0.5678, Acc.fireplace: 0.7311, Acc.refrigerator: 0.8266, Acc.grandstand: 0.6261, Acc.path: 0.3089, Acc.stairs: 0.3885, Acc.runway: 0.7795, Acc.case: 0.5093, Acc.pool table: 0.9581, Acc.pillow: 0.6791, Acc.screen door: 0.7889, Acc.stairway: 0.3481, Acc.river: 0.0536, Acc.bridge: 0.6735, Acc.bookcase: 0.6541, Acc.blind: 0.6800, Acc.coffee table: 0.8664, Acc.toilet: 0.9309, Acc.flower: 0.5618, Acc.book: 0.4402, Acc.hill: 0.0768, Acc.bench: 0.4544, Acc.countertop: 0.5355, Acc.stove: 0.8235, Acc.palm: 0.8587, Acc.kitchen island: 0.5619, Acc.computer: 0.6727, Acc.swivel chair: 0.5040, Acc.boat: 0.5227, Acc.bar: 0.2748, Acc.arcade machine: 0.7358, Acc.hovel: 0.4156, Acc.bus: 0.9239, Acc.towel: 0.7203, Acc.light: 0.6075, Acc.truck: 0.2452, Acc.tower: 0.0316, Acc.chandelier: 0.8910, Acc.awning: 0.3277, Acc.streetlight: 0.2216, Acc.booth: 0.0748, Acc.television receiver: 0.8092, Acc.airplane: 0.6286, Acc.dirt track: 0.0516, Acc.apparel: 0.1855, Acc.pole: 0.1144, Acc.land: 0.0000, Acc.bannister: 0.2256, Acc.escalator: 0.3568, Acc.ottoman: 0.5424, Acc.bottle: 0.1006, Acc.buffet: 0.5411, Acc.poster: 0.0468, Acc.stage: 0.2458, Acc.van: 0.4976, Acc.ship: 0.3459, Acc.fountain: 0.1125, Acc.conveyer belt: 0.8323, Acc.canopy: 0.0005, Acc.washer: 0.7108, Acc.plaything: 0.2189, Acc.swimming pool: 0.6772, Acc.stool: 0.5492, Acc.barrel: 0.6454, Acc.basket: 0.2713, Acc.waterfall: 0.8766, Acc.tent: 0.9859, Acc.bag: 0.2566, Acc.minibike: 0.5969, Acc.cradle: 0.8688, Acc.oven: 0.2997, Acc.ball: 0.6147, Acc.food: 0.2161, Acc.step: 0.0004, Acc.tank: 0.2514, Acc.trade name: 0.2172, Acc.microwave: 0.6152, Acc.pot: 0.3710, Acc.animal: 0.5872, Acc.bicycle: 0.7512, Acc.lake: 0.0267, Acc.dishwasher: 0.7866, Acc.screen: 0.8581, Acc.blanket: 0.1160, Acc.sculpture: 0.5405, Acc.hood: 0.7465, Acc.sconce: 0.4409, Acc.vase: 0.3715, Acc.traffic light: 0.4298, Acc.tray: 0.0153, Acc.ashcan: 0.4848, Acc.fan: 0.6706, Acc.pier: 0.4972, Acc.crt screen: 0.2718, Acc.plate: 0.6356, Acc.monitor: 0.0000, Acc.bulletin board: 0.6205, Acc.shower: 0.0000, Acc.radiator: 0.5970, Acc.glass: 0.0801, Acc.clock: 0.1137, Acc.flag: 0.2867 2023-02-11 20:59:11,697 - mmseg - INFO - Iter [48050/160000] lr: 4.198e-05, eta: 6:29:41, time: 0.448, data_time: 0.255, memory: 7748, decode.loss_ce: 0.3563, decode.acc_seg: 86.3962, aux.loss_ce: 0.2254, aux.acc_seg: 78.8837, loss: 0.5817, grad_norm: 5.6434 2023-02-11 20:59:21,815 - mmseg - INFO - Iter [48100/160000] lr: 4.196e-05, eta: 6:29:29, time: 0.203, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3691, decode.acc_seg: 86.0581, aux.loss_ce: 0.2361, aux.acc_seg: 77.5607, loss: 0.6052, grad_norm: 6.3454 2023-02-11 20:59:32,355 - mmseg - INFO - Iter [48150/160000] lr: 4.194e-05, eta: 6:29:19, time: 0.211, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3830, decode.acc_seg: 85.6873, aux.loss_ce: 0.2348, aux.acc_seg: 78.0490, loss: 0.6178, grad_norm: 5.4436 2023-02-11 20:59:42,154 - mmseg - INFO - Iter [48200/160000] lr: 4.193e-05, eta: 6:29:06, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3588, decode.acc_seg: 86.2709, aux.loss_ce: 0.2284, aux.acc_seg: 78.0965, loss: 0.5872, grad_norm: 5.9508 2023-02-11 20:59:52,140 - mmseg - INFO - Iter [48250/160000] lr: 4.191e-05, eta: 6:28:54, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3820, decode.acc_seg: 85.7780, aux.loss_ce: 0.2337, aux.acc_seg: 78.3988, loss: 0.6156, grad_norm: 5.9955 2023-02-11 21:00:01,995 - mmseg - INFO - Iter [48300/160000] lr: 4.189e-05, eta: 6:28:42, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3540, decode.acc_seg: 86.7111, aux.loss_ce: 0.2250, aux.acc_seg: 78.6351, loss: 0.5789, grad_norm: 8.0546 2023-02-11 21:00:11,848 - mmseg - INFO - Iter [48350/160000] lr: 4.187e-05, eta: 6:28:29, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3946, decode.acc_seg: 85.5098, aux.loss_ce: 0.2429, aux.acc_seg: 77.2705, loss: 0.6375, grad_norm: 6.6129 2023-02-11 21:00:21,727 - mmseg - INFO - Iter [48400/160000] lr: 4.185e-05, eta: 6:28:17, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3756, decode.acc_seg: 85.9227, aux.loss_ce: 0.2332, aux.acc_seg: 78.4485, loss: 0.6088, grad_norm: 6.0996 2023-02-11 21:00:31,834 - mmseg - INFO - Iter [48450/160000] lr: 4.183e-05, eta: 6:28:05, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3643, decode.acc_seg: 86.0496, aux.loss_ce: 0.2264, aux.acc_seg: 78.8784, loss: 0.5907, grad_norm: 5.9396 2023-02-11 21:00:41,663 - mmseg - INFO - Iter [48500/160000] lr: 4.181e-05, eta: 6:27:53, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3964, decode.acc_seg: 84.8888, aux.loss_ce: 0.2483, aux.acc_seg: 76.8967, loss: 0.6447, grad_norm: 6.6113 2023-02-11 21:00:51,571 - mmseg - INFO - Iter [48550/160000] lr: 4.179e-05, eta: 6:27:41, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3821, decode.acc_seg: 86.0055, aux.loss_ce: 0.2388, aux.acc_seg: 78.4490, loss: 0.6208, grad_norm: 7.0272 2023-02-11 21:01:02,029 - mmseg - INFO - Iter [48600/160000] lr: 4.178e-05, eta: 6:27:30, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3525, decode.acc_seg: 86.4610, aux.loss_ce: 0.2198, aux.acc_seg: 78.9783, loss: 0.5723, grad_norm: 5.6470 2023-02-11 21:01:11,912 - mmseg - INFO - Iter [48650/160000] lr: 4.176e-05, eta: 6:27:18, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3773, decode.acc_seg: 85.5155, aux.loss_ce: 0.2349, aux.acc_seg: 77.8560, loss: 0.6121, grad_norm: 5.9968 2023-02-11 21:01:22,298 - mmseg - INFO - Iter [48700/160000] lr: 4.174e-05, eta: 6:27:07, time: 0.208, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3812, decode.acc_seg: 85.8711, aux.loss_ce: 0.2401, aux.acc_seg: 78.3035, loss: 0.6212, grad_norm: 6.2665 2023-02-11 21:01:32,329 - mmseg - INFO - Iter [48750/160000] lr: 4.172e-05, eta: 6:26:56, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3559, decode.acc_seg: 86.7905, aux.loss_ce: 0.2331, aux.acc_seg: 78.5016, loss: 0.5890, grad_norm: 5.5120 2023-02-11 21:01:42,429 - mmseg - INFO - Iter [48800/160000] lr: 4.170e-05, eta: 6:26:44, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3768, decode.acc_seg: 85.7947, aux.loss_ce: 0.2313, aux.acc_seg: 78.3286, loss: 0.6081, grad_norm: 5.8180 2023-02-11 21:01:54,631 - mmseg - INFO - Iter [48850/160000] lr: 4.168e-05, eta: 6:26:40, time: 0.244, data_time: 0.046, memory: 7748, decode.loss_ce: 0.3623, decode.acc_seg: 85.8952, aux.loss_ce: 0.2356, aux.acc_seg: 77.6640, loss: 0.5979, grad_norm: 6.1239 2023-02-11 21:02:04,707 - mmseg - INFO - Iter [48900/160000] lr: 4.166e-05, eta: 6:26:28, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3852, decode.acc_seg: 85.3761, aux.loss_ce: 0.2399, aux.acc_seg: 77.5634, loss: 0.6250, grad_norm: 6.2494 2023-02-11 21:02:14,765 - mmseg - INFO - Iter [48950/160000] lr: 4.164e-05, eta: 6:26:16, time: 0.202, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3435, decode.acc_seg: 87.0426, aux.loss_ce: 0.2135, aux.acc_seg: 80.0081, loss: 0.5570, grad_norm: 5.5650 2023-02-11 21:02:24,735 - mmseg - INFO - Saving checkpoint at 49000 iterations 2023-02-11 21:02:25,420 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 21:02:25,420 - mmseg - INFO - Iter [49000/160000] lr: 4.163e-05, eta: 6:26:07, time: 0.213, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3717, decode.acc_seg: 86.3301, aux.loss_ce: 0.2227, aux.acc_seg: 79.4044, loss: 0.5944, grad_norm: 5.9882 2023-02-11 21:02:35,479 - mmseg - INFO - Iter [49050/160000] lr: 4.161e-05, eta: 6:25:55, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3426, decode.acc_seg: 87.3275, aux.loss_ce: 0.2179, aux.acc_seg: 79.6178, loss: 0.5605, grad_norm: 5.5315 2023-02-11 21:02:45,625 - mmseg - INFO - Iter [49100/160000] lr: 4.159e-05, eta: 6:25:43, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3855, decode.acc_seg: 85.4995, aux.loss_ce: 0.2332, aux.acc_seg: 78.2163, loss: 0.6188, grad_norm: 6.4717 2023-02-11 21:02:55,532 - mmseg - INFO - Iter [49150/160000] lr: 4.157e-05, eta: 6:25:31, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3454, decode.acc_seg: 86.6747, aux.loss_ce: 0.2236, aux.acc_seg: 78.5817, loss: 0.5690, grad_norm: 5.6868 2023-02-11 21:03:05,349 - mmseg - INFO - Iter [49200/160000] lr: 4.155e-05, eta: 6:25:19, time: 0.197, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3854, decode.acc_seg: 85.4943, aux.loss_ce: 0.2462, aux.acc_seg: 77.0424, loss: 0.6316, grad_norm: 5.5931 2023-02-11 21:03:15,744 - mmseg - INFO - Iter [49250/160000] lr: 4.153e-05, eta: 6:25:08, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3730, decode.acc_seg: 85.7863, aux.loss_ce: 0.2244, aux.acc_seg: 78.4967, loss: 0.5974, grad_norm: 5.7657 2023-02-11 21:03:26,098 - mmseg - INFO - Iter [49300/160000] lr: 4.151e-05, eta: 6:24:57, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3873, decode.acc_seg: 85.8176, aux.loss_ce: 0.2473, aux.acc_seg: 77.3524, loss: 0.6346, grad_norm: 5.7516 2023-02-11 21:03:36,045 - mmseg - INFO - Iter [49350/160000] lr: 4.149e-05, eta: 6:24:45, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3602, decode.acc_seg: 86.1471, aux.loss_ce: 0.2242, aux.acc_seg: 78.7434, loss: 0.5844, grad_norm: 5.2668 2023-02-11 21:03:46,111 - mmseg - INFO - Iter [49400/160000] lr: 4.148e-05, eta: 6:24:34, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3635, decode.acc_seg: 85.9100, aux.loss_ce: 0.2270, aux.acc_seg: 78.1573, loss: 0.5905, grad_norm: 5.4798 2023-02-11 21:03:56,281 - mmseg - INFO - Iter [49450/160000] lr: 4.146e-05, eta: 6:24:22, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3431, decode.acc_seg: 87.0173, aux.loss_ce: 0.2220, aux.acc_seg: 78.8356, loss: 0.5651, grad_norm: 5.5414 2023-02-11 21:04:06,079 - mmseg - INFO - Iter [49500/160000] lr: 4.144e-05, eta: 6:24:10, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3638, decode.acc_seg: 86.4366, aux.loss_ce: 0.2296, aux.acc_seg: 78.3544, loss: 0.5934, grad_norm: 5.6882 2023-02-11 21:04:16,720 - mmseg - INFO - Iter [49550/160000] lr: 4.142e-05, eta: 6:24:00, time: 0.213, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3663, decode.acc_seg: 86.1444, aux.loss_ce: 0.2329, aux.acc_seg: 78.4790, loss: 0.5991, grad_norm: 6.1214 2023-02-11 21:04:26,634 - mmseg - INFO - Iter [49600/160000] lr: 4.140e-05, eta: 6:23:48, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3675, decode.acc_seg: 85.8290, aux.loss_ce: 0.2256, aux.acc_seg: 78.8039, loss: 0.5931, grad_norm: 5.1887 2023-02-11 21:04:36,512 - mmseg - INFO - Iter [49650/160000] lr: 4.138e-05, eta: 6:23:36, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3851, decode.acc_seg: 85.4881, aux.loss_ce: 0.2407, aux.acc_seg: 77.6991, loss: 0.6258, grad_norm: 6.3700 2023-02-11 21:04:46,481 - mmseg - INFO - Iter [49700/160000] lr: 4.136e-05, eta: 6:23:24, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3520, decode.acc_seg: 86.7984, aux.loss_ce: 0.2256, aux.acc_seg: 78.7111, loss: 0.5775, grad_norm: 5.1747 2023-02-11 21:04:56,294 - mmseg - INFO - Iter [49750/160000] lr: 4.134e-05, eta: 6:23:12, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3555, decode.acc_seg: 86.6996, aux.loss_ce: 0.2217, aux.acc_seg: 78.8334, loss: 0.5773, grad_norm: 5.6258 2023-02-11 21:05:06,382 - mmseg - INFO - Iter [49800/160000] lr: 4.133e-05, eta: 6:23:00, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3710, decode.acc_seg: 86.2123, aux.loss_ce: 0.2350, aux.acc_seg: 78.3237, loss: 0.6060, grad_norm: 5.4577 2023-02-11 21:05:16,628 - mmseg - INFO - Iter [49850/160000] lr: 4.131e-05, eta: 6:22:49, time: 0.205, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3661, decode.acc_seg: 86.3868, aux.loss_ce: 0.2383, aux.acc_seg: 77.3151, loss: 0.6044, grad_norm: 7.2492 2023-02-11 21:05:26,588 - mmseg - INFO - Iter [49900/160000] lr: 4.129e-05, eta: 6:22:37, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3517, decode.acc_seg: 86.9791, aux.loss_ce: 0.2289, aux.acc_seg: 78.5661, loss: 0.5805, grad_norm: 6.6207 2023-02-11 21:05:36,366 - mmseg - INFO - Iter [49950/160000] lr: 4.127e-05, eta: 6:22:24, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3536, decode.acc_seg: 86.2516, aux.loss_ce: 0.2256, aux.acc_seg: 78.2712, loss: 0.5792, grad_norm: 6.0844 2023-02-11 21:05:46,146 - mmseg - INFO - Saving checkpoint at 50000 iterations 2023-02-11 21:05:46,824 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 21:05:46,824 - mmseg - INFO - Iter [50000/160000] lr: 4.125e-05, eta: 6:22:14, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3572, decode.acc_seg: 86.2950, aux.loss_ce: 0.2199, aux.acc_seg: 79.3134, loss: 0.5771, grad_norm: 5.6314 2023-02-11 21:05:56,879 - mmseg - INFO - Iter [50050/160000] lr: 4.123e-05, eta: 6:22:03, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3944, decode.acc_seg: 85.3095, aux.loss_ce: 0.2407, aux.acc_seg: 78.0935, loss: 0.6352, grad_norm: 5.7082 2023-02-11 21:06:06,916 - mmseg - INFO - Iter [50100/160000] lr: 4.121e-05, eta: 6:21:51, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3572, decode.acc_seg: 86.7913, aux.loss_ce: 0.2310, aux.acc_seg: 78.6181, loss: 0.5882, grad_norm: 5.4554 2023-02-11 21:06:20,271 - mmseg - INFO - Iter [50150/160000] lr: 4.119e-05, eta: 6:21:50, time: 0.267, data_time: 0.047, memory: 7748, decode.loss_ce: 0.3537, decode.acc_seg: 86.3664, aux.loss_ce: 0.2152, aux.acc_seg: 79.4216, loss: 0.5689, grad_norm: 5.6532 2023-02-11 21:06:30,283 - mmseg - INFO - Iter [50200/160000] lr: 4.118e-05, eta: 6:21:38, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3864, decode.acc_seg: 85.2058, aux.loss_ce: 0.2375, aux.acc_seg: 77.7262, loss: 0.6238, grad_norm: 6.0064 2023-02-11 21:06:40,224 - mmseg - INFO - Iter [50250/160000] lr: 4.116e-05, eta: 6:21:26, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3471, decode.acc_seg: 86.6225, aux.loss_ce: 0.2226, aux.acc_seg: 78.8190, loss: 0.5698, grad_norm: 5.4686 2023-02-11 21:06:50,290 - mmseg - INFO - Iter [50300/160000] lr: 4.114e-05, eta: 6:21:15, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3520, decode.acc_seg: 86.9560, aux.loss_ce: 0.2247, aux.acc_seg: 79.1033, loss: 0.5766, grad_norm: 6.0701 2023-02-11 21:07:00,424 - mmseg - INFO - Iter [50350/160000] lr: 4.112e-05, eta: 6:21:03, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3537, decode.acc_seg: 86.7795, aux.loss_ce: 0.2281, aux.acc_seg: 78.9349, loss: 0.5819, grad_norm: 5.0863 2023-02-11 21:07:11,137 - mmseg - INFO - Iter [50400/160000] lr: 4.110e-05, eta: 6:20:54, time: 0.214, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3484, decode.acc_seg: 87.1050, aux.loss_ce: 0.2211, aux.acc_seg: 79.2872, loss: 0.5694, grad_norm: 5.2948 2023-02-11 21:07:21,925 - mmseg - INFO - Iter [50450/160000] lr: 4.108e-05, eta: 6:20:44, time: 0.216, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3557, decode.acc_seg: 86.6373, aux.loss_ce: 0.2170, aux.acc_seg: 79.4952, loss: 0.5727, grad_norm: 4.5735 2023-02-11 21:07:32,115 - mmseg - INFO - Iter [50500/160000] lr: 4.106e-05, eta: 6:20:33, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3698, decode.acc_seg: 86.8492, aux.loss_ce: 0.2304, aux.acc_seg: 79.1813, loss: 0.6001, grad_norm: 5.2908 2023-02-11 21:07:42,886 - mmseg - INFO - Iter [50550/160000] lr: 4.104e-05, eta: 6:20:24, time: 0.215, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3509, decode.acc_seg: 86.7307, aux.loss_ce: 0.2234, aux.acc_seg: 78.4737, loss: 0.5743, grad_norm: 6.2408 2023-02-11 21:07:52,895 - mmseg - INFO - Iter [50600/160000] lr: 4.103e-05, eta: 6:20:12, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3515, decode.acc_seg: 86.4845, aux.loss_ce: 0.2220, aux.acc_seg: 79.4269, loss: 0.5735, grad_norm: 6.0809 2023-02-11 21:08:03,040 - mmseg - INFO - Iter [50650/160000] lr: 4.101e-05, eta: 6:20:01, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3902, decode.acc_seg: 85.5487, aux.loss_ce: 0.2433, aux.acc_seg: 78.0202, loss: 0.6335, grad_norm: 6.6701 2023-02-11 21:08:12,654 - mmseg - INFO - Iter [50700/160000] lr: 4.099e-05, eta: 6:19:48, time: 0.192, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3667, decode.acc_seg: 86.0153, aux.loss_ce: 0.2317, aux.acc_seg: 78.5475, loss: 0.5985, grad_norm: 7.3666 2023-02-11 21:08:22,571 - mmseg - INFO - Iter [50750/160000] lr: 4.097e-05, eta: 6:19:36, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3628, decode.acc_seg: 86.3427, aux.loss_ce: 0.2319, aux.acc_seg: 78.2802, loss: 0.5947, grad_norm: 5.8796 2023-02-11 21:08:32,593 - mmseg - INFO - Iter [50800/160000] lr: 4.095e-05, eta: 6:19:24, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3602, decode.acc_seg: 86.2813, aux.loss_ce: 0.2286, aux.acc_seg: 78.1047, loss: 0.5888, grad_norm: 5.7981 2023-02-11 21:08:42,796 - mmseg - INFO - Iter [50850/160000] lr: 4.093e-05, eta: 6:19:13, time: 0.205, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3579, decode.acc_seg: 86.4312, aux.loss_ce: 0.2241, aux.acc_seg: 78.9850, loss: 0.5820, grad_norm: 6.7849 2023-02-11 21:08:52,734 - mmseg - INFO - Iter [50900/160000] lr: 4.091e-05, eta: 6:19:01, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3473, decode.acc_seg: 86.6355, aux.loss_ce: 0.2259, aux.acc_seg: 78.9178, loss: 0.5732, grad_norm: 5.8232 2023-02-11 21:09:02,649 - mmseg - INFO - Iter [50950/160000] lr: 4.089e-05, eta: 6:18:49, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3489, decode.acc_seg: 86.2309, aux.loss_ce: 0.2235, aux.acc_seg: 78.5258, loss: 0.5724, grad_norm: 5.3023 2023-02-11 21:09:12,568 - mmseg - INFO - Saving checkpoint at 51000 iterations 2023-02-11 21:09:13,254 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 21:09:13,254 - mmseg - INFO - Iter [51000/160000] lr: 4.088e-05, eta: 6:18:39, time: 0.213, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3303, decode.acc_seg: 86.7689, aux.loss_ce: 0.2129, aux.acc_seg: 79.1893, loss: 0.5432, grad_norm: 6.9131 2023-02-11 21:09:23,369 - mmseg - INFO - Iter [51050/160000] lr: 4.086e-05, eta: 6:18:28, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3375, decode.acc_seg: 87.0598, aux.loss_ce: 0.2183, aux.acc_seg: 79.4345, loss: 0.5558, grad_norm: 5.4141 2023-02-11 21:09:33,550 - mmseg - INFO - Iter [51100/160000] lr: 4.084e-05, eta: 6:18:17, time: 0.204, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3575, decode.acc_seg: 86.7364, aux.loss_ce: 0.2252, aux.acc_seg: 79.2297, loss: 0.5828, grad_norm: 5.2780 2023-02-11 21:09:43,457 - mmseg - INFO - Iter [51150/160000] lr: 4.082e-05, eta: 6:18:05, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3632, decode.acc_seg: 85.8361, aux.loss_ce: 0.2345, aux.acc_seg: 77.6549, loss: 0.5977, grad_norm: 6.3914 2023-02-11 21:09:53,393 - mmseg - INFO - Iter [51200/160000] lr: 4.080e-05, eta: 6:17:53, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3741, decode.acc_seg: 85.9274, aux.loss_ce: 0.2297, aux.acc_seg: 78.2170, loss: 0.6038, grad_norm: 6.3219 2023-02-11 21:10:03,296 - mmseg - INFO - Iter [51250/160000] lr: 4.078e-05, eta: 6:17:41, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3763, decode.acc_seg: 85.6581, aux.loss_ce: 0.2295, aux.acc_seg: 77.8714, loss: 0.6059, grad_norm: 6.5122 2023-02-11 21:10:13,373 - mmseg - INFO - Iter [51300/160000] lr: 4.076e-05, eta: 6:17:29, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3461, decode.acc_seg: 86.7653, aux.loss_ce: 0.2197, aux.acc_seg: 79.2161, loss: 0.5658, grad_norm: 6.9371 2023-02-11 21:10:23,681 - mmseg - INFO - Iter [51350/160000] lr: 4.074e-05, eta: 6:17:18, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3966, decode.acc_seg: 85.2875, aux.loss_ce: 0.2543, aux.acc_seg: 76.8875, loss: 0.6509, grad_norm: 5.8517 2023-02-11 21:10:35,994 - mmseg - INFO - Iter [51400/160000] lr: 4.073e-05, eta: 6:17:14, time: 0.246, data_time: 0.048, memory: 7748, decode.loss_ce: 0.3667, decode.acc_seg: 86.1743, aux.loss_ce: 0.2378, aux.acc_seg: 77.9957, loss: 0.6045, grad_norm: 6.1872 2023-02-11 21:10:46,655 - mmseg - INFO - Iter [51450/160000] lr: 4.071e-05, eta: 6:17:04, time: 0.213, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3428, decode.acc_seg: 86.9300, aux.loss_ce: 0.2176, aux.acc_seg: 79.5244, loss: 0.5604, grad_norm: 6.1722 2023-02-11 21:10:56,834 - mmseg - INFO - Iter [51500/160000] lr: 4.069e-05, eta: 6:16:53, time: 0.204, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3540, decode.acc_seg: 86.7152, aux.loss_ce: 0.2268, aux.acc_seg: 79.0539, loss: 0.5808, grad_norm: 6.9921 2023-02-11 21:11:06,686 - mmseg - INFO - Iter [51550/160000] lr: 4.067e-05, eta: 6:16:41, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3359, decode.acc_seg: 87.0255, aux.loss_ce: 0.2230, aux.acc_seg: 78.6946, loss: 0.5589, grad_norm: 5.8728 2023-02-11 21:11:16,783 - mmseg - INFO - Iter [51600/160000] lr: 4.065e-05, eta: 6:16:29, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3531, decode.acc_seg: 86.5364, aux.loss_ce: 0.2182, aux.acc_seg: 79.0266, loss: 0.5713, grad_norm: 5.2393 2023-02-11 21:11:27,420 - mmseg - INFO - Iter [51650/160000] lr: 4.063e-05, eta: 6:16:20, time: 0.213, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3334, decode.acc_seg: 87.0841, aux.loss_ce: 0.2166, aux.acc_seg: 79.5056, loss: 0.5500, grad_norm: 5.1745 2023-02-11 21:11:37,484 - mmseg - INFO - Iter [51700/160000] lr: 4.061e-05, eta: 6:16:08, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3686, decode.acc_seg: 86.2147, aux.loss_ce: 0.2327, aux.acc_seg: 78.7184, loss: 0.6013, grad_norm: 6.4612 2023-02-11 21:11:47,421 - mmseg - INFO - Iter [51750/160000] lr: 4.059e-05, eta: 6:15:56, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3576, decode.acc_seg: 86.4963, aux.loss_ce: 0.2260, aux.acc_seg: 78.7699, loss: 0.5836, grad_norm: 6.0120 2023-02-11 21:11:57,259 - mmseg - INFO - Iter [51800/160000] lr: 4.058e-05, eta: 6:15:44, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3606, decode.acc_seg: 86.2560, aux.loss_ce: 0.2268, aux.acc_seg: 78.8844, loss: 0.5874, grad_norm: 6.8220 2023-02-11 21:12:07,374 - mmseg - INFO - Iter [51850/160000] lr: 4.056e-05, eta: 6:15:33, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3535, decode.acc_seg: 86.1322, aux.loss_ce: 0.2229, aux.acc_seg: 78.3674, loss: 0.5764, grad_norm: 5.3653 2023-02-11 21:12:17,560 - mmseg - INFO - Iter [51900/160000] lr: 4.054e-05, eta: 6:15:22, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3533, decode.acc_seg: 86.8885, aux.loss_ce: 0.2263, aux.acc_seg: 78.7025, loss: 0.5796, grad_norm: 5.6175 2023-02-11 21:12:28,744 - mmseg - INFO - Iter [51950/160000] lr: 4.052e-05, eta: 6:15:14, time: 0.224, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3732, decode.acc_seg: 85.9930, aux.loss_ce: 0.2281, aux.acc_seg: 78.7659, loss: 0.6012, grad_norm: 6.0932 2023-02-11 21:12:38,741 - mmseg - INFO - Saving checkpoint at 52000 iterations 2023-02-11 21:12:39,429 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 21:12:39,429 - mmseg - INFO - Iter [52000/160000] lr: 4.050e-05, eta: 6:15:04, time: 0.214, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3690, decode.acc_seg: 86.0951, aux.loss_ce: 0.2305, aux.acc_seg: 78.2826, loss: 0.5995, grad_norm: 6.6233 2023-02-11 21:12:49,271 - mmseg - INFO - Iter [52050/160000] lr: 4.048e-05, eta: 6:14:52, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3438, decode.acc_seg: 86.7384, aux.loss_ce: 0.2191, aux.acc_seg: 79.5130, loss: 0.5629, grad_norm: 4.7527 2023-02-11 21:12:59,708 - mmseg - INFO - Iter [52100/160000] lr: 4.046e-05, eta: 6:14:41, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3585, decode.acc_seg: 86.5525, aux.loss_ce: 0.2320, aux.acc_seg: 78.3620, loss: 0.5905, grad_norm: 5.4397 2023-02-11 21:13:09,644 - mmseg - INFO - Iter [52150/160000] lr: 4.044e-05, eta: 6:14:30, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3607, decode.acc_seg: 86.2259, aux.loss_ce: 0.2343, aux.acc_seg: 77.9113, loss: 0.5951, grad_norm: 6.1525 2023-02-11 21:13:19,840 - mmseg - INFO - Iter [52200/160000] lr: 4.043e-05, eta: 6:14:19, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3639, decode.acc_seg: 86.3624, aux.loss_ce: 0.2284, aux.acc_seg: 78.8900, loss: 0.5923, grad_norm: 6.8751 2023-02-11 21:13:30,358 - mmseg - INFO - Iter [52250/160000] lr: 4.041e-05, eta: 6:14:08, time: 0.210, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3658, decode.acc_seg: 86.1072, aux.loss_ce: 0.2323, aux.acc_seg: 78.1153, loss: 0.5981, grad_norm: 5.6179 2023-02-11 21:13:40,342 - mmseg - INFO - Iter [52300/160000] lr: 4.039e-05, eta: 6:13:57, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3311, decode.acc_seg: 87.1639, aux.loss_ce: 0.2146, aux.acc_seg: 79.3211, loss: 0.5458, grad_norm: 5.8306 2023-02-11 21:13:50,592 - mmseg - INFO - Iter [52350/160000] lr: 4.037e-05, eta: 6:13:46, time: 0.205, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3409, decode.acc_seg: 86.7949, aux.loss_ce: 0.2174, aux.acc_seg: 79.2097, loss: 0.5583, grad_norm: 5.3040 2023-02-11 21:14:00,585 - mmseg - INFO - Iter [52400/160000] lr: 4.035e-05, eta: 6:13:34, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3491, decode.acc_seg: 86.6242, aux.loss_ce: 0.2204, aux.acc_seg: 79.0895, loss: 0.5695, grad_norm: 5.3748 2023-02-11 21:14:10,655 - mmseg - INFO - Iter [52450/160000] lr: 4.033e-05, eta: 6:13:23, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3695, decode.acc_seg: 85.7893, aux.loss_ce: 0.2371, aux.acc_seg: 77.7832, loss: 0.6067, grad_norm: 5.9598 2023-02-11 21:14:20,529 - mmseg - INFO - Iter [52500/160000] lr: 4.031e-05, eta: 6:13:11, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3780, decode.acc_seg: 85.8554, aux.loss_ce: 0.2380, aux.acc_seg: 78.1585, loss: 0.6160, grad_norm: 5.8819 2023-02-11 21:14:30,778 - mmseg - INFO - Iter [52550/160000] lr: 4.029e-05, eta: 6:13:00, time: 0.205, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3562, decode.acc_seg: 86.6856, aux.loss_ce: 0.2356, aux.acc_seg: 77.6391, loss: 0.5917, grad_norm: 5.7799 2023-02-11 21:14:40,542 - mmseg - INFO - Iter [52600/160000] lr: 4.028e-05, eta: 6:12:47, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3405, decode.acc_seg: 86.9689, aux.loss_ce: 0.2195, aux.acc_seg: 79.2741, loss: 0.5600, grad_norm: 5.3217 2023-02-11 21:14:52,663 - mmseg - INFO - Iter [52650/160000] lr: 4.026e-05, eta: 6:12:42, time: 0.242, data_time: 0.047, memory: 7748, decode.loss_ce: 0.3296, decode.acc_seg: 87.2769, aux.loss_ce: 0.2081, aux.acc_seg: 79.9925, loss: 0.5378, grad_norm: 5.1839 2023-02-11 21:15:02,463 - mmseg - INFO - Iter [52700/160000] lr: 4.024e-05, eta: 6:12:30, time: 0.196, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3456, decode.acc_seg: 86.7368, aux.loss_ce: 0.2270, aux.acc_seg: 78.4473, loss: 0.5726, grad_norm: 7.2432 2023-02-11 21:15:12,156 - mmseg - INFO - Iter [52750/160000] lr: 4.022e-05, eta: 6:12:17, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3381, decode.acc_seg: 86.6248, aux.loss_ce: 0.2212, aux.acc_seg: 78.6550, loss: 0.5593, grad_norm: 5.7675 2023-02-11 21:15:22,202 - mmseg - INFO - Iter [52800/160000] lr: 4.020e-05, eta: 6:12:06, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3482, decode.acc_seg: 86.6004, aux.loss_ce: 0.2156, aux.acc_seg: 79.5778, loss: 0.5638, grad_norm: 5.9156 2023-02-11 21:15:32,642 - mmseg - INFO - Iter [52850/160000] lr: 4.018e-05, eta: 6:11:55, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3495, decode.acc_seg: 86.8154, aux.loss_ce: 0.2291, aux.acc_seg: 78.5808, loss: 0.5786, grad_norm: 6.0183 2023-02-11 21:15:42,911 - mmseg - INFO - Iter [52900/160000] lr: 4.016e-05, eta: 6:11:45, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3591, decode.acc_seg: 86.3839, aux.loss_ce: 0.2342, aux.acc_seg: 78.0806, loss: 0.5933, grad_norm: 5.8257 2023-02-11 21:15:53,080 - mmseg - INFO - Iter [52950/160000] lr: 4.014e-05, eta: 6:11:34, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3302, decode.acc_seg: 87.3900, aux.loss_ce: 0.2177, aux.acc_seg: 79.3732, loss: 0.5480, grad_norm: 5.3849 2023-02-11 21:16:03,242 - mmseg - INFO - Saving checkpoint at 53000 iterations 2023-02-11 21:16:03,937 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 21:16:03,938 - mmseg - INFO - Iter [53000/160000] lr: 4.013e-05, eta: 6:11:24, time: 0.217, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3547, decode.acc_seg: 87.1449, aux.loss_ce: 0.2201, aux.acc_seg: 79.5633, loss: 0.5748, grad_norm: 5.6315 2023-02-11 21:16:14,033 - mmseg - INFO - Iter [53050/160000] lr: 4.011e-05, eta: 6:11:13, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3525, decode.acc_seg: 86.2439, aux.loss_ce: 0.2269, aux.acc_seg: 78.3511, loss: 0.5794, grad_norm: 4.8066 2023-02-11 21:16:24,386 - mmseg - INFO - Iter [53100/160000] lr: 4.009e-05, eta: 6:11:02, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3619, decode.acc_seg: 86.4197, aux.loss_ce: 0.2274, aux.acc_seg: 78.3511, loss: 0.5892, grad_norm: 5.2905 2023-02-11 21:16:34,093 - mmseg - INFO - Iter [53150/160000] lr: 4.007e-05, eta: 6:10:50, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3407, decode.acc_seg: 86.9686, aux.loss_ce: 0.2198, aux.acc_seg: 79.0459, loss: 0.5604, grad_norm: 5.3186 2023-02-11 21:16:44,028 - mmseg - INFO - Iter [53200/160000] lr: 4.005e-05, eta: 6:10:38, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3595, decode.acc_seg: 86.3150, aux.loss_ce: 0.2227, aux.acc_seg: 78.9744, loss: 0.5822, grad_norm: 5.8723 2023-02-11 21:16:54,284 - mmseg - INFO - Iter [53250/160000] lr: 4.003e-05, eta: 6:10:27, time: 0.205, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3421, decode.acc_seg: 86.9920, aux.loss_ce: 0.2187, aux.acc_seg: 79.4824, loss: 0.5608, grad_norm: 6.3341 2023-02-11 21:17:04,056 - mmseg - INFO - Iter [53300/160000] lr: 4.001e-05, eta: 6:10:15, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3554, decode.acc_seg: 86.4903, aux.loss_ce: 0.2210, aux.acc_seg: 79.0049, loss: 0.5764, grad_norm: 5.2254 2023-02-11 21:17:13,934 - mmseg - INFO - Iter [53350/160000] lr: 3.999e-05, eta: 6:10:03, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3501, decode.acc_seg: 87.0684, aux.loss_ce: 0.2226, aux.acc_seg: 79.4504, loss: 0.5727, grad_norm: 6.5004 2023-02-11 21:17:23,767 - mmseg - INFO - Iter [53400/160000] lr: 3.998e-05, eta: 6:09:51, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3334, decode.acc_seg: 87.5689, aux.loss_ce: 0.2116, aux.acc_seg: 80.1539, loss: 0.5450, grad_norm: 5.6912 2023-02-11 21:17:34,607 - mmseg - INFO - Iter [53450/160000] lr: 3.996e-05, eta: 6:09:42, time: 0.217, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3638, decode.acc_seg: 85.8813, aux.loss_ce: 0.2226, aux.acc_seg: 78.7287, loss: 0.5864, grad_norm: 5.6322 2023-02-11 21:17:44,467 - mmseg - INFO - Iter [53500/160000] lr: 3.994e-05, eta: 6:09:30, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3465, decode.acc_seg: 87.0859, aux.loss_ce: 0.2289, aux.acc_seg: 78.9005, loss: 0.5754, grad_norm: 5.7797 2023-02-11 21:17:54,307 - mmseg - INFO - Iter [53550/160000] lr: 3.992e-05, eta: 6:09:18, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3619, decode.acc_seg: 86.1392, aux.loss_ce: 0.2287, aux.acc_seg: 78.1174, loss: 0.5905, grad_norm: 6.3963 2023-02-11 21:18:04,530 - mmseg - INFO - Iter [53600/160000] lr: 3.990e-05, eta: 6:09:07, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3509, decode.acc_seg: 86.6298, aux.loss_ce: 0.2214, aux.acc_seg: 78.9116, loss: 0.5723, grad_norm: 5.8934 2023-02-11 21:18:15,400 - mmseg - INFO - Iter [53650/160000] lr: 3.988e-05, eta: 6:08:58, time: 0.217, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3677, decode.acc_seg: 86.1402, aux.loss_ce: 0.2353, aux.acc_seg: 78.4617, loss: 0.6030, grad_norm: 6.1174 2023-02-11 21:18:25,715 - mmseg - INFO - Iter [53700/160000] lr: 3.986e-05, eta: 6:08:47, time: 0.206, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3625, decode.acc_seg: 86.7412, aux.loss_ce: 0.2295, aux.acc_seg: 78.6006, loss: 0.5920, grad_norm: 5.3688 2023-02-11 21:18:35,965 - mmseg - INFO - Iter [53750/160000] lr: 3.984e-05, eta: 6:08:36, time: 0.205, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3622, decode.acc_seg: 86.5469, aux.loss_ce: 0.2284, aux.acc_seg: 78.8744, loss: 0.5906, grad_norm: 6.3207 2023-02-11 21:18:46,074 - mmseg - INFO - Iter [53800/160000] lr: 3.983e-05, eta: 6:08:25, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3658, decode.acc_seg: 86.2050, aux.loss_ce: 0.2252, aux.acc_seg: 78.8818, loss: 0.5910, grad_norm: 5.7590 2023-02-11 21:18:55,995 - mmseg - INFO - Iter [53850/160000] lr: 3.981e-05, eta: 6:08:13, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3610, decode.acc_seg: 86.4244, aux.loss_ce: 0.2240, aux.acc_seg: 79.0717, loss: 0.5850, grad_norm: 6.4970 2023-02-11 21:19:08,198 - mmseg - INFO - Iter [53900/160000] lr: 3.979e-05, eta: 6:08:08, time: 0.244, data_time: 0.046, memory: 7748, decode.loss_ce: 0.3268, decode.acc_seg: 87.3394, aux.loss_ce: 0.2192, aux.acc_seg: 78.8141, loss: 0.5460, grad_norm: 5.7542 2023-02-11 21:19:18,029 - mmseg - INFO - Iter [53950/160000] lr: 3.977e-05, eta: 6:07:56, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3420, decode.acc_seg: 86.7771, aux.loss_ce: 0.2219, aux.acc_seg: 79.2872, loss: 0.5639, grad_norm: 5.2952 2023-02-11 21:19:27,793 - mmseg - INFO - Saving checkpoint at 54000 iterations 2023-02-11 21:19:28,483 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 21:19:28,484 - mmseg - INFO - Iter [54000/160000] lr: 3.975e-05, eta: 6:07:46, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3417, decode.acc_seg: 86.6096, aux.loss_ce: 0.2217, aux.acc_seg: 78.8789, loss: 0.5635, grad_norm: 5.6238 2023-02-11 21:19:38,216 - mmseg - INFO - Iter [54050/160000] lr: 3.973e-05, eta: 6:07:33, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3465, decode.acc_seg: 87.0117, aux.loss_ce: 0.2243, aux.acc_seg: 78.7424, loss: 0.5709, grad_norm: 5.8250 2023-02-11 21:19:48,488 - mmseg - INFO - Iter [54100/160000] lr: 3.971e-05, eta: 6:07:23, time: 0.206, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3367, decode.acc_seg: 86.8077, aux.loss_ce: 0.2088, aux.acc_seg: 79.8397, loss: 0.5455, grad_norm: 5.2927 2023-02-11 21:19:58,449 - mmseg - INFO - Iter [54150/160000] lr: 3.969e-05, eta: 6:07:11, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3615, decode.acc_seg: 86.2654, aux.loss_ce: 0.2283, aux.acc_seg: 78.4024, loss: 0.5899, grad_norm: 6.1242 2023-02-11 21:20:08,862 - mmseg - INFO - Iter [54200/160000] lr: 3.968e-05, eta: 6:07:01, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3611, decode.acc_seg: 86.4512, aux.loss_ce: 0.2298, aux.acc_seg: 78.5495, loss: 0.5908, grad_norm: 5.8178 2023-02-11 21:20:18,727 - mmseg - INFO - Iter [54250/160000] lr: 3.966e-05, eta: 6:06:49, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3520, decode.acc_seg: 86.9588, aux.loss_ce: 0.2253, aux.acc_seg: 79.2594, loss: 0.5773, grad_norm: 5.3512 2023-02-11 21:20:28,916 - mmseg - INFO - Iter [54300/160000] lr: 3.964e-05, eta: 6:06:38, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3472, decode.acc_seg: 86.8001, aux.loss_ce: 0.2257, aux.acc_seg: 78.7725, loss: 0.5729, grad_norm: 5.4944 2023-02-11 21:20:39,211 - mmseg - INFO - Iter [54350/160000] lr: 3.962e-05, eta: 6:06:27, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3246, decode.acc_seg: 87.3735, aux.loss_ce: 0.2020, aux.acc_seg: 80.7795, loss: 0.5266, grad_norm: 4.7049 2023-02-11 21:20:49,034 - mmseg - INFO - Iter [54400/160000] lr: 3.960e-05, eta: 6:06:15, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3496, decode.acc_seg: 86.7503, aux.loss_ce: 0.2241, aux.acc_seg: 78.6173, loss: 0.5737, grad_norm: 5.8013 2023-02-11 21:20:59,325 - mmseg - INFO - Iter [54450/160000] lr: 3.958e-05, eta: 6:06:04, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3433, decode.acc_seg: 86.8715, aux.loss_ce: 0.2156, aux.acc_seg: 79.4992, loss: 0.5589, grad_norm: 5.4512 2023-02-11 21:21:09,315 - mmseg - INFO - Iter [54500/160000] lr: 3.956e-05, eta: 6:05:53, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3592, decode.acc_seg: 86.3548, aux.loss_ce: 0.2249, aux.acc_seg: 78.7571, loss: 0.5841, grad_norm: 5.8535 2023-02-11 21:21:19,353 - mmseg - INFO - Iter [54550/160000] lr: 3.954e-05, eta: 6:05:41, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3457, decode.acc_seg: 87.1014, aux.loss_ce: 0.2292, aux.acc_seg: 78.9072, loss: 0.5749, grad_norm: 5.9683 2023-02-11 21:21:29,823 - mmseg - INFO - Iter [54600/160000] lr: 3.953e-05, eta: 6:05:31, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3314, decode.acc_seg: 87.1977, aux.loss_ce: 0.2142, aux.acc_seg: 79.4696, loss: 0.5457, grad_norm: 6.1363 2023-02-11 21:21:40,224 - mmseg - INFO - Iter [54650/160000] lr: 3.951e-05, eta: 6:05:21, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3429, decode.acc_seg: 86.9932, aux.loss_ce: 0.2185, aux.acc_seg: 79.0170, loss: 0.5614, grad_norm: 5.3447 2023-02-11 21:21:50,306 - mmseg - INFO - Iter [54700/160000] lr: 3.949e-05, eta: 6:05:09, time: 0.202, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3559, decode.acc_seg: 86.4155, aux.loss_ce: 0.2265, aux.acc_seg: 78.9561, loss: 0.5824, grad_norm: 5.2254 2023-02-11 21:22:00,154 - mmseg - INFO - Iter [54750/160000] lr: 3.947e-05, eta: 6:04:57, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3482, decode.acc_seg: 86.8356, aux.loss_ce: 0.2298, aux.acc_seg: 77.9235, loss: 0.5780, grad_norm: 5.1783 2023-02-11 21:22:10,808 - mmseg - INFO - Iter [54800/160000] lr: 3.945e-05, eta: 6:04:48, time: 0.213, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3305, decode.acc_seg: 87.0621, aux.loss_ce: 0.2164, aux.acc_seg: 79.0317, loss: 0.5468, grad_norm: 5.0531 2023-02-11 21:22:20,690 - mmseg - INFO - Iter [54850/160000] lr: 3.943e-05, eta: 6:04:36, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3484, decode.acc_seg: 86.9922, aux.loss_ce: 0.2156, aux.acc_seg: 79.5780, loss: 0.5640, grad_norm: 6.1235 2023-02-11 21:22:30,742 - mmseg - INFO - Iter [54900/160000] lr: 3.941e-05, eta: 6:04:25, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3568, decode.acc_seg: 86.2577, aux.loss_ce: 0.2165, aux.acc_seg: 79.6256, loss: 0.5734, grad_norm: 5.6031 2023-02-11 21:22:40,988 - mmseg - INFO - Iter [54950/160000] lr: 3.939e-05, eta: 6:04:14, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3517, decode.acc_seg: 86.7183, aux.loss_ce: 0.2253, aux.acc_seg: 78.9480, loss: 0.5770, grad_norm: 5.8950 2023-02-11 21:22:51,149 - mmseg - INFO - Saving checkpoint at 55000 iterations 2023-02-11 21:22:51,830 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 21:22:51,830 - mmseg - INFO - Iter [55000/160000] lr: 3.938e-05, eta: 6:04:05, time: 0.217, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3442, decode.acc_seg: 87.2751, aux.loss_ce: 0.2252, aux.acc_seg: 79.2705, loss: 0.5694, grad_norm: 6.1043 2023-02-11 21:23:01,569 - mmseg - INFO - Iter [55050/160000] lr: 3.936e-05, eta: 6:03:52, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3514, decode.acc_seg: 86.9800, aux.loss_ce: 0.2285, aux.acc_seg: 79.0339, loss: 0.5798, grad_norm: 6.0277 2023-02-11 21:23:11,581 - mmseg - INFO - Iter [55100/160000] lr: 3.934e-05, eta: 6:03:41, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3603, decode.acc_seg: 86.3780, aux.loss_ce: 0.2361, aux.acc_seg: 77.9836, loss: 0.5964, grad_norm: 5.5406 2023-02-11 21:23:21,503 - mmseg - INFO - Iter [55150/160000] lr: 3.932e-05, eta: 6:03:29, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3424, decode.acc_seg: 87.3145, aux.loss_ce: 0.2289, aux.acc_seg: 78.2412, loss: 0.5713, grad_norm: 5.3751 2023-02-11 21:23:33,748 - mmseg - INFO - Iter [55200/160000] lr: 3.930e-05, eta: 6:03:24, time: 0.244, data_time: 0.047, memory: 7748, decode.loss_ce: 0.3209, decode.acc_seg: 87.5753, aux.loss_ce: 0.2156, aux.acc_seg: 79.5095, loss: 0.5365, grad_norm: 5.8943 2023-02-11 21:23:43,944 - mmseg - INFO - Iter [55250/160000] lr: 3.928e-05, eta: 6:03:13, time: 0.204, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3499, decode.acc_seg: 86.6088, aux.loss_ce: 0.2194, aux.acc_seg: 79.5591, loss: 0.5692, grad_norm: 5.5856 2023-02-11 21:23:54,104 - mmseg - INFO - Iter [55300/160000] lr: 3.926e-05, eta: 6:03:02, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3422, decode.acc_seg: 87.3049, aux.loss_ce: 0.2240, aux.acc_seg: 79.2247, loss: 0.5662, grad_norm: 5.9271 2023-02-11 21:24:03,967 - mmseg - INFO - Iter [55350/160000] lr: 3.924e-05, eta: 6:02:50, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3632, decode.acc_seg: 86.3307, aux.loss_ce: 0.2310, aux.acc_seg: 78.4645, loss: 0.5943, grad_norm: 6.1093 2023-02-11 21:24:13,901 - mmseg - INFO - Iter [55400/160000] lr: 3.923e-05, eta: 6:02:38, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3314, decode.acc_seg: 87.5694, aux.loss_ce: 0.2104, aux.acc_seg: 80.0648, loss: 0.5418, grad_norm: 5.4647 2023-02-11 21:24:23,972 - mmseg - INFO - Iter [55450/160000] lr: 3.921e-05, eta: 6:02:27, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3399, decode.acc_seg: 87.3639, aux.loss_ce: 0.2279, aux.acc_seg: 79.1483, loss: 0.5678, grad_norm: 6.1546 2023-02-11 21:24:33,607 - mmseg - INFO - Iter [55500/160000] lr: 3.919e-05, eta: 6:02:15, time: 0.193, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3463, decode.acc_seg: 86.8450, aux.loss_ce: 0.2294, aux.acc_seg: 78.7337, loss: 0.5756, grad_norm: 5.3766 2023-02-11 21:24:44,363 - mmseg - INFO - Iter [55550/160000] lr: 3.917e-05, eta: 6:02:05, time: 0.215, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3459, decode.acc_seg: 86.9830, aux.loss_ce: 0.2179, aux.acc_seg: 79.3698, loss: 0.5638, grad_norm: 6.1744 2023-02-11 21:24:54,553 - mmseg - INFO - Iter [55600/160000] lr: 3.915e-05, eta: 6:01:54, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3367, decode.acc_seg: 87.5232, aux.loss_ce: 0.2187, aux.acc_seg: 79.6053, loss: 0.5553, grad_norm: 5.6017 2023-02-11 21:25:04,253 - mmseg - INFO - Iter [55650/160000] lr: 3.913e-05, eta: 6:01:42, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3563, decode.acc_seg: 87.0556, aux.loss_ce: 0.2320, aux.acc_seg: 78.7288, loss: 0.5883, grad_norm: 7.4195 2023-02-11 21:25:14,315 - mmseg - INFO - Iter [55700/160000] lr: 3.911e-05, eta: 6:01:31, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3418, decode.acc_seg: 87.0907, aux.loss_ce: 0.2227, aux.acc_seg: 79.1650, loss: 0.5645, grad_norm: 4.8501 2023-02-11 21:25:24,186 - mmseg - INFO - Iter [55750/160000] lr: 3.909e-05, eta: 6:01:19, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3456, decode.acc_seg: 86.9300, aux.loss_ce: 0.2295, aux.acc_seg: 78.6706, loss: 0.5751, grad_norm: 6.4787 2023-02-11 21:25:34,116 - mmseg - INFO - Iter [55800/160000] lr: 3.908e-05, eta: 6:01:07, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3346, decode.acc_seg: 87.4088, aux.loss_ce: 0.2115, aux.acc_seg: 79.9129, loss: 0.5461, grad_norm: 5.1607 2023-02-11 21:25:43,963 - mmseg - INFO - Iter [55850/160000] lr: 3.906e-05, eta: 6:00:55, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3061, decode.acc_seg: 88.0853, aux.loss_ce: 0.2052, aux.acc_seg: 80.2088, loss: 0.5113, grad_norm: 5.9566 2023-02-11 21:25:54,260 - mmseg - INFO - Iter [55900/160000] lr: 3.904e-05, eta: 6:00:45, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3339, decode.acc_seg: 87.4471, aux.loss_ce: 0.2170, aux.acc_seg: 79.6621, loss: 0.5509, grad_norm: 5.7040 2023-02-11 21:26:03,975 - mmseg - INFO - Iter [55950/160000] lr: 3.902e-05, eta: 6:00:33, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3395, decode.acc_seg: 87.0220, aux.loss_ce: 0.2143, aux.acc_seg: 79.8441, loss: 0.5538, grad_norm: 5.6084 2023-02-11 21:26:14,001 - mmseg - INFO - Saving checkpoint at 56000 iterations 2023-02-11 21:26:14,686 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 21:26:14,686 - mmseg - INFO - Iter [56000/160000] lr: 3.900e-05, eta: 6:00:23, time: 0.214, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3338, decode.acc_seg: 87.2450, aux.loss_ce: 0.2172, aux.acc_seg: 79.6461, loss: 0.5510, grad_norm: 7.2367 2023-02-11 21:26:24,811 - mmseg - INFO - Iter [56050/160000] lr: 3.898e-05, eta: 6:00:12, time: 0.203, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3457, decode.acc_seg: 86.7205, aux.loss_ce: 0.2173, aux.acc_seg: 79.1755, loss: 0.5630, grad_norm: 5.6650 2023-02-11 21:26:34,585 - mmseg - INFO - Iter [56100/160000] lr: 3.896e-05, eta: 6:00:00, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3237, decode.acc_seg: 87.7349, aux.loss_ce: 0.2143, aux.acc_seg: 79.8683, loss: 0.5380, grad_norm: 5.4603 2023-02-11 21:26:44,603 - mmseg - INFO - Iter [56150/160000] lr: 3.894e-05, eta: 5:59:48, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3460, decode.acc_seg: 87.2588, aux.loss_ce: 0.2251, aux.acc_seg: 79.3882, loss: 0.5711, grad_norm: 5.8004 2023-02-11 21:26:54,551 - mmseg - INFO - Iter [56200/160000] lr: 3.893e-05, eta: 5:59:37, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3329, decode.acc_seg: 87.2472, aux.loss_ce: 0.2068, aux.acc_seg: 79.9821, loss: 0.5396, grad_norm: 5.2822 2023-02-11 21:27:04,353 - mmseg - INFO - Iter [56250/160000] lr: 3.891e-05, eta: 5:59:25, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3428, decode.acc_seg: 86.8577, aux.loss_ce: 0.2203, aux.acc_seg: 79.5608, loss: 0.5631, grad_norm: 5.5019 2023-02-11 21:27:14,349 - mmseg - INFO - Iter [56300/160000] lr: 3.889e-05, eta: 5:59:14, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3635, decode.acc_seg: 86.5770, aux.loss_ce: 0.2267, aux.acc_seg: 79.0423, loss: 0.5901, grad_norm: 5.9192 2023-02-11 21:27:25,027 - mmseg - INFO - Iter [56350/160000] lr: 3.887e-05, eta: 5:59:04, time: 0.214, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3300, decode.acc_seg: 87.2493, aux.loss_ce: 0.2075, aux.acc_seg: 80.1301, loss: 0.5375, grad_norm: 5.9103 2023-02-11 21:27:35,151 - mmseg - INFO - Iter [56400/160000] lr: 3.885e-05, eta: 5:58:53, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3319, decode.acc_seg: 87.3705, aux.loss_ce: 0.2178, aux.acc_seg: 79.4319, loss: 0.5497, grad_norm: 6.0905 2023-02-11 21:27:47,403 - mmseg - INFO - Iter [56450/160000] lr: 3.883e-05, eta: 5:58:47, time: 0.245, data_time: 0.047, memory: 7748, decode.loss_ce: 0.3616, decode.acc_seg: 86.4294, aux.loss_ce: 0.2244, aux.acc_seg: 78.9344, loss: 0.5860, grad_norm: 5.6877 2023-02-11 21:27:57,753 - mmseg - INFO - Iter [56500/160000] lr: 3.881e-05, eta: 5:58:37, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3309, decode.acc_seg: 87.0294, aux.loss_ce: 0.2159, aux.acc_seg: 78.9526, loss: 0.5469, grad_norm: 5.6273 2023-02-11 21:28:08,268 - mmseg - INFO - Iter [56550/160000] lr: 3.879e-05, eta: 5:58:27, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3400, decode.acc_seg: 87.1581, aux.loss_ce: 0.2191, aux.acc_seg: 79.3790, loss: 0.5592, grad_norm: 5.5930 2023-02-11 21:28:18,554 - mmseg - INFO - Iter [56600/160000] lr: 3.878e-05, eta: 5:58:16, time: 0.206, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3443, decode.acc_seg: 87.1698, aux.loss_ce: 0.2142, aux.acc_seg: 80.0225, loss: 0.5585, grad_norm: 5.2725 2023-02-11 21:28:28,885 - mmseg - INFO - Iter [56650/160000] lr: 3.876e-05, eta: 5:58:05, time: 0.207, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3282, decode.acc_seg: 87.6172, aux.loss_ce: 0.2185, aux.acc_seg: 79.4442, loss: 0.5467, grad_norm: 5.4410 2023-02-11 21:28:39,041 - mmseg - INFO - Iter [56700/160000] lr: 3.874e-05, eta: 5:57:54, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3228, decode.acc_seg: 87.3269, aux.loss_ce: 0.2137, aux.acc_seg: 79.7391, loss: 0.5365, grad_norm: 5.2202 2023-02-11 21:28:48,939 - mmseg - INFO - Iter [56750/160000] lr: 3.872e-05, eta: 5:57:43, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3202, decode.acc_seg: 87.7043, aux.loss_ce: 0.2129, aux.acc_seg: 79.9888, loss: 0.5330, grad_norm: 6.4399 2023-02-11 21:28:59,056 - mmseg - INFO - Iter [56800/160000] lr: 3.870e-05, eta: 5:57:32, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3363, decode.acc_seg: 87.1449, aux.loss_ce: 0.2199, aux.acc_seg: 79.2016, loss: 0.5562, grad_norm: 5.0161 2023-02-11 21:29:08,934 - mmseg - INFO - Iter [56850/160000] lr: 3.868e-05, eta: 5:57:20, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3337, decode.acc_seg: 87.6893, aux.loss_ce: 0.2154, aux.acc_seg: 80.1522, loss: 0.5491, grad_norm: 5.4538 2023-02-11 21:29:18,778 - mmseg - INFO - Iter [56900/160000] lr: 3.866e-05, eta: 5:57:08, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3350, decode.acc_seg: 87.3282, aux.loss_ce: 0.2143, aux.acc_seg: 80.0207, loss: 0.5493, grad_norm: 5.6346 2023-02-11 21:29:28,690 - mmseg - INFO - Iter [56950/160000] lr: 3.864e-05, eta: 5:56:57, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3445, decode.acc_seg: 86.7990, aux.loss_ce: 0.2179, aux.acc_seg: 79.6389, loss: 0.5624, grad_norm: 5.9969 2023-02-11 21:29:38,520 - mmseg - INFO - Saving checkpoint at 57000 iterations 2023-02-11 21:29:39,203 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 21:29:39,203 - mmseg - INFO - Iter [57000/160000] lr: 3.863e-05, eta: 5:56:47, time: 0.211, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3437, decode.acc_seg: 87.0951, aux.loss_ce: 0.2246, aux.acc_seg: 78.8772, loss: 0.5683, grad_norm: 5.9328 2023-02-11 21:29:48,970 - mmseg - INFO - Iter [57050/160000] lr: 3.861e-05, eta: 5:56:35, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3305, decode.acc_seg: 86.9568, aux.loss_ce: 0.2179, aux.acc_seg: 79.1719, loss: 0.5484, grad_norm: 5.6992 2023-02-11 21:29:58,940 - mmseg - INFO - Iter [57100/160000] lr: 3.859e-05, eta: 5:56:23, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3377, decode.acc_seg: 87.2138, aux.loss_ce: 0.2206, aux.acc_seg: 79.0593, loss: 0.5582, grad_norm: 5.9954 2023-02-11 21:30:09,072 - mmseg - INFO - Iter [57150/160000] lr: 3.857e-05, eta: 5:56:12, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3134, decode.acc_seg: 88.0825, aux.loss_ce: 0.2048, aux.acc_seg: 80.8349, loss: 0.5182, grad_norm: 5.3545 2023-02-11 21:30:19,063 - mmseg - INFO - Iter [57200/160000] lr: 3.855e-05, eta: 5:56:01, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3381, decode.acc_seg: 87.6299, aux.loss_ce: 0.2249, aux.acc_seg: 79.1050, loss: 0.5630, grad_norm: 5.7365 2023-02-11 21:30:28,983 - mmseg - INFO - Iter [57250/160000] lr: 3.853e-05, eta: 5:55:49, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3394, decode.acc_seg: 87.0592, aux.loss_ce: 0.2211, aux.acc_seg: 79.2254, loss: 0.5606, grad_norm: 5.3046 2023-02-11 21:30:39,109 - mmseg - INFO - Iter [57300/160000] lr: 3.851e-05, eta: 5:55:38, time: 0.203, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3160, decode.acc_seg: 88.2289, aux.loss_ce: 0.2135, aux.acc_seg: 80.0132, loss: 0.5295, grad_norm: 5.8098 2023-02-11 21:30:48,947 - mmseg - INFO - Iter [57350/160000] lr: 3.849e-05, eta: 5:55:26, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3393, decode.acc_seg: 87.0683, aux.loss_ce: 0.2233, aux.acc_seg: 78.8330, loss: 0.5627, grad_norm: 5.9516 2023-02-11 21:30:59,624 - mmseg - INFO - Iter [57400/160000] lr: 3.848e-05, eta: 5:55:17, time: 0.214, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3326, decode.acc_seg: 87.0807, aux.loss_ce: 0.2162, aux.acc_seg: 79.2865, loss: 0.5487, grad_norm: 5.9367 2023-02-11 21:31:09,482 - mmseg - INFO - Iter [57450/160000] lr: 3.846e-05, eta: 5:55:05, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3417, decode.acc_seg: 87.1527, aux.loss_ce: 0.2249, aux.acc_seg: 79.3394, loss: 0.5666, grad_norm: 6.2383 2023-02-11 21:31:19,560 - mmseg - INFO - Iter [57500/160000] lr: 3.844e-05, eta: 5:54:54, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3623, decode.acc_seg: 86.4918, aux.loss_ce: 0.2277, aux.acc_seg: 79.0522, loss: 0.5900, grad_norm: 5.7994 2023-02-11 21:31:29,486 - mmseg - INFO - Iter [57550/160000] lr: 3.842e-05, eta: 5:54:42, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3456, decode.acc_seg: 86.9740, aux.loss_ce: 0.2272, aux.acc_seg: 78.3052, loss: 0.5728, grad_norm: 6.4121 2023-02-11 21:31:39,561 - mmseg - INFO - Iter [57600/160000] lr: 3.840e-05, eta: 5:54:31, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3393, decode.acc_seg: 87.0856, aux.loss_ce: 0.2219, aux.acc_seg: 78.7983, loss: 0.5612, grad_norm: 5.9870 2023-02-11 21:31:49,447 - mmseg - INFO - Iter [57650/160000] lr: 3.838e-05, eta: 5:54:20, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3279, decode.acc_seg: 87.3485, aux.loss_ce: 0.2135, aux.acc_seg: 79.7078, loss: 0.5414, grad_norm: 5.5774 2023-02-11 21:32:01,760 - mmseg - INFO - Iter [57700/160000] lr: 3.836e-05, eta: 5:54:14, time: 0.246, data_time: 0.047, memory: 7748, decode.loss_ce: 0.3256, decode.acc_seg: 87.3572, aux.loss_ce: 0.2130, aux.acc_seg: 79.7110, loss: 0.5385, grad_norm: 5.4229 2023-02-11 21:32:11,527 - mmseg - INFO - Iter [57750/160000] lr: 3.834e-05, eta: 5:54:02, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3293, decode.acc_seg: 87.1815, aux.loss_ce: 0.2169, aux.acc_seg: 79.3277, loss: 0.5462, grad_norm: 5.9012 2023-02-11 21:32:21,501 - mmseg - INFO - Iter [57800/160000] lr: 3.833e-05, eta: 5:53:51, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3269, decode.acc_seg: 87.1534, aux.loss_ce: 0.2173, aux.acc_seg: 78.7431, loss: 0.5441, grad_norm: 4.8931 2023-02-11 21:32:31,673 - mmseg - INFO - Iter [57850/160000] lr: 3.831e-05, eta: 5:53:40, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3317, decode.acc_seg: 87.2826, aux.loss_ce: 0.2172, aux.acc_seg: 79.8472, loss: 0.5489, grad_norm: 5.7063 2023-02-11 21:32:41,735 - mmseg - INFO - Iter [57900/160000] lr: 3.829e-05, eta: 5:53:28, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3258, decode.acc_seg: 88.0211, aux.loss_ce: 0.2157, aux.acc_seg: 80.3842, loss: 0.5415, grad_norm: 5.3235 2023-02-11 21:32:51,859 - mmseg - INFO - Iter [57950/160000] lr: 3.827e-05, eta: 5:53:17, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3363, decode.acc_seg: 87.2798, aux.loss_ce: 0.2224, aux.acc_seg: 78.5048, loss: 0.5587, grad_norm: 5.8592 2023-02-11 21:33:01,994 - mmseg - INFO - Saving checkpoint at 58000 iterations 2023-02-11 21:33:02,688 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 21:33:02,688 - mmseg - INFO - Iter [58000/160000] lr: 3.825e-05, eta: 5:53:08, time: 0.217, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3359, decode.acc_seg: 87.0364, aux.loss_ce: 0.2239, aux.acc_seg: 78.7308, loss: 0.5598, grad_norm: 5.4282 2023-02-11 21:33:12,790 - mmseg - INFO - Iter [58050/160000] lr: 3.823e-05, eta: 5:52:57, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3592, decode.acc_seg: 87.0663, aux.loss_ce: 0.2246, aux.acc_seg: 79.8665, loss: 0.5838, grad_norm: 6.2948 2023-02-11 21:33:22,835 - mmseg - INFO - Iter [58100/160000] lr: 3.821e-05, eta: 5:52:46, time: 0.201, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3220, decode.acc_seg: 87.7962, aux.loss_ce: 0.2120, aux.acc_seg: 79.6562, loss: 0.5341, grad_norm: 5.4795 2023-02-11 21:33:32,782 - mmseg - INFO - Iter [58150/160000] lr: 3.819e-05, eta: 5:52:34, time: 0.199, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3361, decode.acc_seg: 87.3083, aux.loss_ce: 0.2271, aux.acc_seg: 78.5073, loss: 0.5632, grad_norm: 6.0276 2023-02-11 21:33:43,412 - mmseg - INFO - Iter [58200/160000] lr: 3.818e-05, eta: 5:52:25, time: 0.213, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3360, decode.acc_seg: 87.1011, aux.loss_ce: 0.2148, aux.acc_seg: 79.6561, loss: 0.5509, grad_norm: 6.1148 2023-02-11 21:33:54,009 - mmseg - INFO - Iter [58250/160000] lr: 3.816e-05, eta: 5:52:15, time: 0.212, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3348, decode.acc_seg: 87.4215, aux.loss_ce: 0.2122, aux.acc_seg: 80.1956, loss: 0.5469, grad_norm: 5.6668 2023-02-11 21:34:03,791 - mmseg - INFO - Iter [58300/160000] lr: 3.814e-05, eta: 5:52:03, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3307, decode.acc_seg: 87.2103, aux.loss_ce: 0.2143, aux.acc_seg: 79.3229, loss: 0.5449, grad_norm: 5.3728 2023-02-11 21:34:13,624 - mmseg - INFO - Iter [58350/160000] lr: 3.812e-05, eta: 5:51:51, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3217, decode.acc_seg: 87.6638, aux.loss_ce: 0.2161, aux.acc_seg: 79.0774, loss: 0.5377, grad_norm: 5.1067 2023-02-11 21:34:23,444 - mmseg - INFO - Iter [58400/160000] lr: 3.810e-05, eta: 5:51:39, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3512, decode.acc_seg: 86.6820, aux.loss_ce: 0.2225, aux.acc_seg: 79.2481, loss: 0.5737, grad_norm: 5.8843 2023-02-11 21:34:33,256 - mmseg - INFO - Iter [58450/160000] lr: 3.808e-05, eta: 5:51:28, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3395, decode.acc_seg: 86.9388, aux.loss_ce: 0.2136, aux.acc_seg: 79.6413, loss: 0.5531, grad_norm: 8.9606 2023-02-11 21:34:43,616 - mmseg - INFO - Iter [58500/160000] lr: 3.806e-05, eta: 5:51:17, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3329, decode.acc_seg: 86.9903, aux.loss_ce: 0.2241, aux.acc_seg: 78.8503, loss: 0.5570, grad_norm: 5.2581 2023-02-11 21:34:53,961 - mmseg - INFO - Iter [58550/160000] lr: 3.804e-05, eta: 5:51:07, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3272, decode.acc_seg: 87.6191, aux.loss_ce: 0.2078, aux.acc_seg: 80.1759, loss: 0.5350, grad_norm: 6.9500 2023-02-11 21:35:03,865 - mmseg - INFO - Iter [58600/160000] lr: 3.803e-05, eta: 5:50:55, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3308, decode.acc_seg: 87.1113, aux.loss_ce: 0.2130, aux.acc_seg: 79.8155, loss: 0.5437, grad_norm: 5.8752 2023-02-11 21:35:13,930 - mmseg - INFO - Iter [58650/160000] lr: 3.801e-05, eta: 5:50:44, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3248, decode.acc_seg: 87.6277, aux.loss_ce: 0.2137, aux.acc_seg: 79.7806, loss: 0.5385, grad_norm: 5.9453 2023-02-11 21:35:23,937 - mmseg - INFO - Iter [58700/160000] lr: 3.799e-05, eta: 5:50:33, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3293, decode.acc_seg: 87.4722, aux.loss_ce: 0.2127, aux.acc_seg: 80.3980, loss: 0.5420, grad_norm: 5.5513 2023-02-11 21:35:33,999 - mmseg - INFO - Iter [58750/160000] lr: 3.797e-05, eta: 5:50:22, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3246, decode.acc_seg: 87.6694, aux.loss_ce: 0.2168, aux.acc_seg: 80.0121, loss: 0.5414, grad_norm: 5.3731 2023-02-11 21:35:43,849 - mmseg - INFO - Iter [58800/160000] lr: 3.795e-05, eta: 5:50:10, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3415, decode.acc_seg: 87.1549, aux.loss_ce: 0.2200, aux.acc_seg: 79.5416, loss: 0.5616, grad_norm: 5.4547 2023-02-11 21:35:53,779 - mmseg - INFO - Iter [58850/160000] lr: 3.793e-05, eta: 5:49:59, time: 0.199, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3247, decode.acc_seg: 88.0918, aux.loss_ce: 0.2096, aux.acc_seg: 80.5203, loss: 0.5343, grad_norm: 5.4270 2023-02-11 21:36:04,135 - mmseg - INFO - Iter [58900/160000] lr: 3.791e-05, eta: 5:49:48, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3330, decode.acc_seg: 87.1150, aux.loss_ce: 0.2123, aux.acc_seg: 79.9116, loss: 0.5453, grad_norm: 5.6853 2023-02-11 21:36:16,191 - mmseg - INFO - Iter [58950/160000] lr: 3.789e-05, eta: 5:49:42, time: 0.241, data_time: 0.047, memory: 7748, decode.loss_ce: 0.3322, decode.acc_seg: 87.1980, aux.loss_ce: 0.2128, aux.acc_seg: 79.6469, loss: 0.5450, grad_norm: 4.9276 2023-02-11 21:36:26,327 - mmseg - INFO - Saving checkpoint at 59000 iterations 2023-02-11 21:36:27,017 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 21:36:27,017 - mmseg - INFO - Iter [59000/160000] lr: 3.788e-05, eta: 5:49:32, time: 0.217, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3307, decode.acc_seg: 87.5078, aux.loss_ce: 0.2195, aux.acc_seg: 79.4514, loss: 0.5502, grad_norm: 5.9032 2023-02-11 21:36:36,719 - mmseg - INFO - Iter [59050/160000] lr: 3.786e-05, eta: 5:49:20, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3373, decode.acc_seg: 87.3163, aux.loss_ce: 0.2131, aux.acc_seg: 80.1760, loss: 0.5503, grad_norm: 5.9911 2023-02-11 21:36:46,438 - mmseg - INFO - Iter [59100/160000] lr: 3.784e-05, eta: 5:49:08, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3459, decode.acc_seg: 87.0917, aux.loss_ce: 0.2200, aux.acc_seg: 79.6916, loss: 0.5659, grad_norm: 6.0912 2023-02-11 21:36:56,322 - mmseg - INFO - Iter [59150/160000] lr: 3.782e-05, eta: 5:48:57, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3214, decode.acc_seg: 87.7625, aux.loss_ce: 0.2115, aux.acc_seg: 80.5058, loss: 0.5329, grad_norm: 5.4384 2023-02-11 21:37:06,458 - mmseg - INFO - Iter [59200/160000] lr: 3.780e-05, eta: 5:48:46, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3108, decode.acc_seg: 87.7309, aux.loss_ce: 0.1987, aux.acc_seg: 80.4885, loss: 0.5095, grad_norm: 4.8306 2023-02-11 21:37:16,229 - mmseg - INFO - Iter [59250/160000] lr: 3.778e-05, eta: 5:48:34, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3391, decode.acc_seg: 87.1716, aux.loss_ce: 0.2192, aux.acc_seg: 79.7627, loss: 0.5583, grad_norm: 5.7303 2023-02-11 21:37:26,496 - mmseg - INFO - Iter [59300/160000] lr: 3.776e-05, eta: 5:48:24, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3244, decode.acc_seg: 87.2692, aux.loss_ce: 0.2090, aux.acc_seg: 79.9201, loss: 0.5335, grad_norm: 5.4232 2023-02-11 21:37:36,815 - mmseg - INFO - Iter [59350/160000] lr: 3.774e-05, eta: 5:48:13, time: 0.206, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3168, decode.acc_seg: 88.0792, aux.loss_ce: 0.2105, aux.acc_seg: 79.9006, loss: 0.5272, grad_norm: 5.3781 2023-02-11 21:37:47,167 - mmseg - INFO - Iter [59400/160000] lr: 3.773e-05, eta: 5:48:03, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3394, decode.acc_seg: 87.2225, aux.loss_ce: 0.2294, aux.acc_seg: 78.4179, loss: 0.5688, grad_norm: 5.6655 2023-02-11 21:37:56,980 - mmseg - INFO - Iter [59450/160000] lr: 3.771e-05, eta: 5:47:51, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3342, decode.acc_seg: 86.8771, aux.loss_ce: 0.2092, aux.acc_seg: 79.5955, loss: 0.5433, grad_norm: 5.6251 2023-02-11 21:38:07,146 - mmseg - INFO - Iter [59500/160000] lr: 3.769e-05, eta: 5:47:40, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3367, decode.acc_seg: 87.0466, aux.loss_ce: 0.2138, aux.acc_seg: 79.8947, loss: 0.5505, grad_norm: 7.3977 2023-02-11 21:38:17,153 - mmseg - INFO - Iter [59550/160000] lr: 3.767e-05, eta: 5:47:29, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3153, decode.acc_seg: 87.7878, aux.loss_ce: 0.2088, aux.acc_seg: 79.8027, loss: 0.5241, grad_norm: 6.0029 2023-02-11 21:38:27,373 - mmseg - INFO - Iter [59600/160000] lr: 3.765e-05, eta: 5:47:18, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3111, decode.acc_seg: 88.2208, aux.loss_ce: 0.2115, aux.acc_seg: 79.9035, loss: 0.5226, grad_norm: 4.7249 2023-02-11 21:38:37,401 - mmseg - INFO - Iter [59650/160000] lr: 3.763e-05, eta: 5:47:07, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3207, decode.acc_seg: 87.8271, aux.loss_ce: 0.2083, aux.acc_seg: 80.3051, loss: 0.5290, grad_norm: 5.3127 2023-02-11 21:38:47,434 - mmseg - INFO - Iter [59700/160000] lr: 3.761e-05, eta: 5:46:56, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3378, decode.acc_seg: 87.4016, aux.loss_ce: 0.2224, aux.acc_seg: 79.3339, loss: 0.5601, grad_norm: 5.4947 2023-02-11 21:38:57,405 - mmseg - INFO - Iter [59750/160000] lr: 3.759e-05, eta: 5:46:44, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3361, decode.acc_seg: 87.5694, aux.loss_ce: 0.2253, aux.acc_seg: 79.5435, loss: 0.5614, grad_norm: 5.6939 2023-02-11 21:39:07,426 - mmseg - INFO - Iter [59800/160000] lr: 3.758e-05, eta: 5:46:33, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3352, decode.acc_seg: 87.5697, aux.loss_ce: 0.2149, aux.acc_seg: 80.1050, loss: 0.5501, grad_norm: 5.2586 2023-02-11 21:39:17,251 - mmseg - INFO - Iter [59850/160000] lr: 3.756e-05, eta: 5:46:22, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3254, decode.acc_seg: 87.5001, aux.loss_ce: 0.2251, aux.acc_seg: 78.9869, loss: 0.5505, grad_norm: 5.4569 2023-02-11 21:39:27,957 - mmseg - INFO - Iter [59900/160000] lr: 3.754e-05, eta: 5:46:12, time: 0.214, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3213, decode.acc_seg: 87.5003, aux.loss_ce: 0.2128, aux.acc_seg: 79.3103, loss: 0.5341, grad_norm: 5.5746 2023-02-11 21:39:38,089 - mmseg - INFO - Iter [59950/160000] lr: 3.752e-05, eta: 5:46:01, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3164, decode.acc_seg: 87.8813, aux.loss_ce: 0.2113, aux.acc_seg: 79.8828, loss: 0.5278, grad_norm: 5.7459 2023-02-11 21:39:48,027 - mmseg - INFO - Saving checkpoint at 60000 iterations 2023-02-11 21:39:48,706 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 21:39:48,707 - mmseg - INFO - Iter [60000/160000] lr: 3.750e-05, eta: 5:45:51, time: 0.212, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3312, decode.acc_seg: 87.2178, aux.loss_ce: 0.2175, aux.acc_seg: 79.3496, loss: 0.5487, grad_norm: 5.7934 2023-02-11 21:39:58,356 - mmseg - INFO - Iter [60050/160000] lr: 3.748e-05, eta: 5:45:39, time: 0.193, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3368, decode.acc_seg: 87.3950, aux.loss_ce: 0.2256, aux.acc_seg: 79.2915, loss: 0.5624, grad_norm: 6.3105 2023-02-11 21:40:08,373 - mmseg - INFO - Iter [60100/160000] lr: 3.746e-05, eta: 5:45:28, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3179, decode.acc_seg: 87.9098, aux.loss_ce: 0.2163, aux.acc_seg: 79.7279, loss: 0.5342, grad_norm: 5.9822 2023-02-11 21:40:18,298 - mmseg - INFO - Iter [60150/160000] lr: 3.744e-05, eta: 5:45:17, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3090, decode.acc_seg: 87.9794, aux.loss_ce: 0.2129, aux.acc_seg: 79.4416, loss: 0.5219, grad_norm: 4.7351 2023-02-11 21:40:28,174 - mmseg - INFO - Iter [60200/160000] lr: 3.743e-05, eta: 5:45:05, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3356, decode.acc_seg: 87.1213, aux.loss_ce: 0.2264, aux.acc_seg: 78.1933, loss: 0.5620, grad_norm: 6.2315 2023-02-11 21:40:40,387 - mmseg - INFO - Iter [60250/160000] lr: 3.741e-05, eta: 5:44:59, time: 0.244, data_time: 0.047, memory: 7748, decode.loss_ce: 0.3366, decode.acc_seg: 87.1815, aux.loss_ce: 0.2079, aux.acc_seg: 80.4140, loss: 0.5446, grad_norm: 5.2983 2023-02-11 21:40:50,490 - mmseg - INFO - Iter [60300/160000] lr: 3.739e-05, eta: 5:44:48, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3045, decode.acc_seg: 88.3695, aux.loss_ce: 0.2104, aux.acc_seg: 79.9368, loss: 0.5149, grad_norm: 5.0095 2023-02-11 21:41:00,742 - mmseg - INFO - Iter [60350/160000] lr: 3.737e-05, eta: 5:44:37, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3239, decode.acc_seg: 87.9700, aux.loss_ce: 0.2144, aux.acc_seg: 80.2067, loss: 0.5383, grad_norm: 5.7365 2023-02-11 21:41:10,659 - mmseg - INFO - Iter [60400/160000] lr: 3.735e-05, eta: 5:44:26, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3244, decode.acc_seg: 87.6936, aux.loss_ce: 0.2094, aux.acc_seg: 80.3859, loss: 0.5338, grad_norm: 4.9669 2023-02-11 21:41:20,529 - mmseg - INFO - Iter [60450/160000] lr: 3.733e-05, eta: 5:44:14, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3226, decode.acc_seg: 87.4294, aux.loss_ce: 0.2201, aux.acc_seg: 79.0154, loss: 0.5427, grad_norm: 5.3675 2023-02-11 21:41:30,593 - mmseg - INFO - Iter [60500/160000] lr: 3.731e-05, eta: 5:44:03, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3311, decode.acc_seg: 87.7493, aux.loss_ce: 0.2128, aux.acc_seg: 80.7360, loss: 0.5439, grad_norm: 4.9317 2023-02-11 21:41:41,190 - mmseg - INFO - Iter [60550/160000] lr: 3.729e-05, eta: 5:43:53, time: 0.212, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3323, decode.acc_seg: 87.3019, aux.loss_ce: 0.2144, aux.acc_seg: 79.4452, loss: 0.5467, grad_norm: 5.8215 2023-02-11 21:41:51,196 - mmseg - INFO - Iter [60600/160000] lr: 3.728e-05, eta: 5:43:42, time: 0.200, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3178, decode.acc_seg: 87.7437, aux.loss_ce: 0.2118, aux.acc_seg: 79.9118, loss: 0.5296, grad_norm: 6.1335 2023-02-11 21:42:01,989 - mmseg - INFO - Iter [60650/160000] lr: 3.726e-05, eta: 5:43:33, time: 0.216, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3240, decode.acc_seg: 87.7557, aux.loss_ce: 0.2201, aux.acc_seg: 79.4622, loss: 0.5441, grad_norm: 5.4879 2023-02-11 21:42:12,252 - mmseg - INFO - Iter [60700/160000] lr: 3.724e-05, eta: 5:43:22, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3195, decode.acc_seg: 87.7590, aux.loss_ce: 0.2074, aux.acc_seg: 80.2756, loss: 0.5269, grad_norm: 5.5059 2023-02-11 21:42:22,379 - mmseg - INFO - Iter [60750/160000] lr: 3.722e-05, eta: 5:43:11, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3219, decode.acc_seg: 87.7469, aux.loss_ce: 0.2102, aux.acc_seg: 80.0812, loss: 0.5321, grad_norm: 5.6094 2023-02-11 21:42:32,327 - mmseg - INFO - Iter [60800/160000] lr: 3.720e-05, eta: 5:43:00, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3169, decode.acc_seg: 87.6175, aux.loss_ce: 0.2137, aux.acc_seg: 79.8416, loss: 0.5306, grad_norm: 5.5983 2023-02-11 21:42:42,211 - mmseg - INFO - Iter [60850/160000] lr: 3.718e-05, eta: 5:42:48, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3175, decode.acc_seg: 88.0106, aux.loss_ce: 0.2102, aux.acc_seg: 80.3907, loss: 0.5277, grad_norm: 5.4999 2023-02-11 21:42:52,117 - mmseg - INFO - Iter [60900/160000] lr: 3.716e-05, eta: 5:42:37, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3433, decode.acc_seg: 87.2221, aux.loss_ce: 0.2222, aux.acc_seg: 79.3909, loss: 0.5655, grad_norm: 5.6030 2023-02-11 21:43:02,067 - mmseg - INFO - Iter [60950/160000] lr: 3.714e-05, eta: 5:42:26, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3329, decode.acc_seg: 86.9802, aux.loss_ce: 0.2105, aux.acc_seg: 80.1175, loss: 0.5434, grad_norm: 5.9553 2023-02-11 21:43:12,320 - mmseg - INFO - Saving checkpoint at 61000 iterations 2023-02-11 21:43:12,995 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 21:43:12,995 - mmseg - INFO - Iter [61000/160000] lr: 3.713e-05, eta: 5:42:17, time: 0.219, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3220, decode.acc_seg: 87.6813, aux.loss_ce: 0.2213, aux.acc_seg: 79.2018, loss: 0.5433, grad_norm: 6.8406 2023-02-11 21:43:23,026 - mmseg - INFO - Iter [61050/160000] lr: 3.711e-05, eta: 5:42:05, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3356, decode.acc_seg: 87.0557, aux.loss_ce: 0.2206, aux.acc_seg: 79.2608, loss: 0.5562, grad_norm: 5.6632 2023-02-11 21:43:33,194 - mmseg - INFO - Iter [61100/160000] lr: 3.709e-05, eta: 5:41:55, time: 0.204, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3157, decode.acc_seg: 88.1991, aux.loss_ce: 0.2101, aux.acc_seg: 80.3070, loss: 0.5258, grad_norm: 5.3973 2023-02-11 21:43:43,282 - mmseg - INFO - Iter [61150/160000] lr: 3.707e-05, eta: 5:41:44, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3148, decode.acc_seg: 87.9928, aux.loss_ce: 0.2181, aux.acc_seg: 79.6127, loss: 0.5329, grad_norm: 5.7967 2023-02-11 21:43:54,090 - mmseg - INFO - Iter [61200/160000] lr: 3.705e-05, eta: 5:41:34, time: 0.216, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3223, decode.acc_seg: 87.9918, aux.loss_ce: 0.2009, aux.acc_seg: 81.0507, loss: 0.5232, grad_norm: 5.5897 2023-02-11 21:44:04,099 - mmseg - INFO - Iter [61250/160000] lr: 3.703e-05, eta: 5:41:23, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3287, decode.acc_seg: 87.5595, aux.loss_ce: 0.2193, aux.acc_seg: 79.1397, loss: 0.5480, grad_norm: 6.2508 2023-02-11 21:44:13,876 - mmseg - INFO - Iter [61300/160000] lr: 3.701e-05, eta: 5:41:11, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3120, decode.acc_seg: 88.0424, aux.loss_ce: 0.2133, aux.acc_seg: 79.7560, loss: 0.5253, grad_norm: 5.1414 2023-02-11 21:44:24,050 - mmseg - INFO - Iter [61350/160000] lr: 3.699e-05, eta: 5:41:01, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3248, decode.acc_seg: 87.7068, aux.loss_ce: 0.2216, aux.acc_seg: 79.1294, loss: 0.5464, grad_norm: 6.7179 2023-02-11 21:44:34,136 - mmseg - INFO - Iter [61400/160000] lr: 3.698e-05, eta: 5:40:50, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3424, decode.acc_seg: 87.3227, aux.loss_ce: 0.2279, aux.acc_seg: 78.8743, loss: 0.5703, grad_norm: 5.5335 2023-02-11 21:44:44,544 - mmseg - INFO - Iter [61450/160000] lr: 3.696e-05, eta: 5:40:39, time: 0.209, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3394, decode.acc_seg: 87.2385, aux.loss_ce: 0.2201, aux.acc_seg: 79.2420, loss: 0.5596, grad_norm: 6.2081 2023-02-11 21:44:56,948 - mmseg - INFO - Iter [61500/160000] lr: 3.694e-05, eta: 5:40:33, time: 0.248, data_time: 0.046, memory: 7748, decode.loss_ce: 0.3228, decode.acc_seg: 87.8040, aux.loss_ce: 0.2146, aux.acc_seg: 79.8598, loss: 0.5374, grad_norm: 5.3789 2023-02-11 21:45:07,279 - mmseg - INFO - Iter [61550/160000] lr: 3.692e-05, eta: 5:40:23, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3266, decode.acc_seg: 87.6248, aux.loss_ce: 0.2194, aux.acc_seg: 79.3945, loss: 0.5460, grad_norm: 5.1310 2023-02-11 21:45:17,208 - mmseg - INFO - Iter [61600/160000] lr: 3.690e-05, eta: 5:40:12, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3176, decode.acc_seg: 87.5597, aux.loss_ce: 0.2086, aux.acc_seg: 80.0030, loss: 0.5263, grad_norm: 6.4255 2023-02-11 21:45:27,437 - mmseg - INFO - Iter [61650/160000] lr: 3.688e-05, eta: 5:40:01, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3130, decode.acc_seg: 88.2254, aux.loss_ce: 0.2117, aux.acc_seg: 80.4433, loss: 0.5247, grad_norm: 4.7468 2023-02-11 21:45:37,656 - mmseg - INFO - Iter [61700/160000] lr: 3.686e-05, eta: 5:39:50, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3136, decode.acc_seg: 88.0344, aux.loss_ce: 0.2142, aux.acc_seg: 79.7979, loss: 0.5279, grad_norm: 5.6726 2023-02-11 21:45:47,852 - mmseg - INFO - Iter [61750/160000] lr: 3.684e-05, eta: 5:39:39, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3235, decode.acc_seg: 87.7233, aux.loss_ce: 0.2143, aux.acc_seg: 79.7046, loss: 0.5378, grad_norm: 5.0110 2023-02-11 21:45:57,737 - mmseg - INFO - Iter [61800/160000] lr: 3.683e-05, eta: 5:39:28, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3249, decode.acc_seg: 87.5985, aux.loss_ce: 0.2100, aux.acc_seg: 79.9104, loss: 0.5349, grad_norm: 6.4369 2023-02-11 21:46:07,825 - mmseg - INFO - Iter [61850/160000] lr: 3.681e-05, eta: 5:39:17, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3434, decode.acc_seg: 87.0459, aux.loss_ce: 0.2246, aux.acc_seg: 78.9516, loss: 0.5680, grad_norm: 5.4510 2023-02-11 21:46:18,054 - mmseg - INFO - Iter [61900/160000] lr: 3.679e-05, eta: 5:39:06, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3416, decode.acc_seg: 87.1532, aux.loss_ce: 0.2182, aux.acc_seg: 80.0465, loss: 0.5598, grad_norm: 5.4738 2023-02-11 21:46:27,964 - mmseg - INFO - Iter [61950/160000] lr: 3.677e-05, eta: 5:38:55, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3110, decode.acc_seg: 87.9412, aux.loss_ce: 0.2077, aux.acc_seg: 80.4445, loss: 0.5187, grad_norm: 5.1070 2023-02-11 21:46:37,675 - mmseg - INFO - Saving checkpoint at 62000 iterations 2023-02-11 21:46:38,357 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 21:46:38,357 - mmseg - INFO - Iter [62000/160000] lr: 3.675e-05, eta: 5:38:45, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3202, decode.acc_seg: 88.1107, aux.loss_ce: 0.2032, aux.acc_seg: 80.9925, loss: 0.5234, grad_norm: 5.1174 2023-02-11 21:46:48,241 - mmseg - INFO - Iter [62050/160000] lr: 3.673e-05, eta: 5:38:33, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3121, decode.acc_seg: 87.7916, aux.loss_ce: 0.2083, aux.acc_seg: 80.0365, loss: 0.5204, grad_norm: 5.7646 2023-02-11 21:46:58,358 - mmseg - INFO - Iter [62100/160000] lr: 3.671e-05, eta: 5:38:22, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3208, decode.acc_seg: 87.9711, aux.loss_ce: 0.2137, aux.acc_seg: 79.9841, loss: 0.5345, grad_norm: 5.1174 2023-02-11 21:47:08,401 - mmseg - INFO - Iter [62150/160000] lr: 3.669e-05, eta: 5:38:11, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3192, decode.acc_seg: 87.7776, aux.loss_ce: 0.2106, aux.acc_seg: 79.5577, loss: 0.5298, grad_norm: 5.5682 2023-02-11 21:47:18,694 - mmseg - INFO - Iter [62200/160000] lr: 3.668e-05, eta: 5:38:01, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2928, decode.acc_seg: 88.7767, aux.loss_ce: 0.1995, aux.acc_seg: 81.5634, loss: 0.4924, grad_norm: 5.3719 2023-02-11 21:47:28,791 - mmseg - INFO - Iter [62250/160000] lr: 3.666e-05, eta: 5:37:50, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3169, decode.acc_seg: 88.1014, aux.loss_ce: 0.2108, aux.acc_seg: 80.2260, loss: 0.5277, grad_norm: 5.9445 2023-02-11 21:47:38,584 - mmseg - INFO - Iter [62300/160000] lr: 3.664e-05, eta: 5:37:38, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3258, decode.acc_seg: 87.5008, aux.loss_ce: 0.2226, aux.acc_seg: 79.0043, loss: 0.5483, grad_norm: 6.0472 2023-02-11 21:47:48,335 - mmseg - INFO - Iter [62350/160000] lr: 3.662e-05, eta: 5:37:27, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3396, decode.acc_seg: 86.7465, aux.loss_ce: 0.2183, aux.acc_seg: 79.1446, loss: 0.5578, grad_norm: 6.3384 2023-02-11 21:47:58,323 - mmseg - INFO - Iter [62400/160000] lr: 3.660e-05, eta: 5:37:15, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3261, decode.acc_seg: 87.2796, aux.loss_ce: 0.2062, aux.acc_seg: 80.2303, loss: 0.5324, grad_norm: 4.5958 2023-02-11 21:48:08,323 - mmseg - INFO - Iter [62450/160000] lr: 3.658e-05, eta: 5:37:04, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3133, decode.acc_seg: 88.3367, aux.loss_ce: 0.2181, aux.acc_seg: 79.6936, loss: 0.5314, grad_norm: 4.8873 2023-02-11 21:48:18,017 - mmseg - INFO - Iter [62500/160000] lr: 3.656e-05, eta: 5:36:52, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3238, decode.acc_seg: 87.8966, aux.loss_ce: 0.2132, aux.acc_seg: 80.5174, loss: 0.5370, grad_norm: 5.4820 2023-02-11 21:48:27,814 - mmseg - INFO - Iter [62550/160000] lr: 3.654e-05, eta: 5:36:41, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3326, decode.acc_seg: 87.5550, aux.loss_ce: 0.2178, aux.acc_seg: 79.5607, loss: 0.5503, grad_norm: 5.9147 2023-02-11 21:48:38,073 - mmseg - INFO - Iter [62600/160000] lr: 3.653e-05, eta: 5:36:30, time: 0.205, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3279, decode.acc_seg: 87.6093, aux.loss_ce: 0.2097, aux.acc_seg: 79.9386, loss: 0.5376, grad_norm: 6.1712 2023-02-11 21:48:47,828 - mmseg - INFO - Iter [62650/160000] lr: 3.651e-05, eta: 5:36:19, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3301, decode.acc_seg: 87.3910, aux.loss_ce: 0.2208, aux.acc_seg: 79.5199, loss: 0.5509, grad_norm: 6.1392 2023-02-11 21:48:58,553 - mmseg - INFO - Iter [62700/160000] lr: 3.649e-05, eta: 5:36:09, time: 0.214, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3152, decode.acc_seg: 88.0415, aux.loss_ce: 0.2092, aux.acc_seg: 80.1515, loss: 0.5243, grad_norm: 6.2222 2023-02-11 21:49:11,075 - mmseg - INFO - Iter [62750/160000] lr: 3.647e-05, eta: 5:36:03, time: 0.250, data_time: 0.046, memory: 7748, decode.loss_ce: 0.3195, decode.acc_seg: 87.8415, aux.loss_ce: 0.2113, aux.acc_seg: 80.1088, loss: 0.5308, grad_norm: 5.6656 2023-02-11 21:49:20,933 - mmseg - INFO - Iter [62800/160000] lr: 3.645e-05, eta: 5:35:52, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3153, decode.acc_seg: 87.9247, aux.loss_ce: 0.2122, aux.acc_seg: 79.8911, loss: 0.5275, grad_norm: 5.4053 2023-02-11 21:49:30,965 - mmseg - INFO - Iter [62850/160000] lr: 3.643e-05, eta: 5:35:41, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2995, decode.acc_seg: 88.4570, aux.loss_ce: 0.2043, aux.acc_seg: 80.4443, loss: 0.5038, grad_norm: 5.1247 2023-02-11 21:49:40,911 - mmseg - INFO - Iter [62900/160000] lr: 3.641e-05, eta: 5:35:29, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3232, decode.acc_seg: 87.5554, aux.loss_ce: 0.2147, aux.acc_seg: 79.5496, loss: 0.5378, grad_norm: 5.6315 2023-02-11 21:49:50,947 - mmseg - INFO - Iter [62950/160000] lr: 3.639e-05, eta: 5:35:18, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2933, decode.acc_seg: 88.7414, aux.loss_ce: 0.1943, aux.acc_seg: 81.2213, loss: 0.4876, grad_norm: 5.3345 2023-02-11 21:50:00,892 - mmseg - INFO - Saving checkpoint at 63000 iterations 2023-02-11 21:50:01,568 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 21:50:01,568 - mmseg - INFO - Iter [63000/160000] lr: 3.638e-05, eta: 5:35:09, time: 0.213, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3224, decode.acc_seg: 87.7658, aux.loss_ce: 0.2138, aux.acc_seg: 79.3437, loss: 0.5362, grad_norm: 5.6739 2023-02-11 21:50:11,232 - mmseg - INFO - Iter [63050/160000] lr: 3.636e-05, eta: 5:34:57, time: 0.193, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3146, decode.acc_seg: 88.1964, aux.loss_ce: 0.2142, aux.acc_seg: 80.0056, loss: 0.5288, grad_norm: 4.5270 2023-02-11 21:50:21,109 - mmseg - INFO - Iter [63100/160000] lr: 3.634e-05, eta: 5:34:45, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2853, decode.acc_seg: 89.2080, aux.loss_ce: 0.2099, aux.acc_seg: 80.2067, loss: 0.4952, grad_norm: 4.6082 2023-02-11 21:50:31,106 - mmseg - INFO - Iter [63150/160000] lr: 3.632e-05, eta: 5:34:34, time: 0.200, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3159, decode.acc_seg: 88.0754, aux.loss_ce: 0.2081, aux.acc_seg: 80.3615, loss: 0.5241, grad_norm: 5.0406 2023-02-11 21:50:40,649 - mmseg - INFO - Iter [63200/160000] lr: 3.630e-05, eta: 5:34:22, time: 0.191, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3013, decode.acc_seg: 88.5481, aux.loss_ce: 0.2114, aux.acc_seg: 80.3815, loss: 0.5127, grad_norm: 5.0992 2023-02-11 21:50:50,724 - mmseg - INFO - Iter [63250/160000] lr: 3.628e-05, eta: 5:34:11, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3229, decode.acc_seg: 87.5556, aux.loss_ce: 0.2177, aux.acc_seg: 79.6838, loss: 0.5406, grad_norm: 5.4147 2023-02-11 21:51:00,511 - mmseg - INFO - Iter [63300/160000] lr: 3.626e-05, eta: 5:34:00, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3101, decode.acc_seg: 87.6286, aux.loss_ce: 0.2060, aux.acc_seg: 79.9008, loss: 0.5161, grad_norm: 4.8675 2023-02-11 21:51:10,847 - mmseg - INFO - Iter [63350/160000] lr: 3.624e-05, eta: 5:33:49, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3154, decode.acc_seg: 87.7340, aux.loss_ce: 0.2124, aux.acc_seg: 79.9655, loss: 0.5278, grad_norm: 5.6808 2023-02-11 21:51:20,846 - mmseg - INFO - Iter [63400/160000] lr: 3.623e-05, eta: 5:33:38, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3145, decode.acc_seg: 87.8022, aux.loss_ce: 0.2027, aux.acc_seg: 80.5083, loss: 0.5172, grad_norm: 5.2023 2023-02-11 21:51:31,309 - mmseg - INFO - Iter [63450/160000] lr: 3.621e-05, eta: 5:33:28, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3270, decode.acc_seg: 87.6317, aux.loss_ce: 0.2208, aux.acc_seg: 79.0200, loss: 0.5478, grad_norm: 5.9333 2023-02-11 21:51:41,153 - mmseg - INFO - Iter [63500/160000] lr: 3.619e-05, eta: 5:33:17, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2840, decode.acc_seg: 88.6235, aux.loss_ce: 0.1964, aux.acc_seg: 81.0069, loss: 0.4803, grad_norm: 4.1427 2023-02-11 21:51:51,264 - mmseg - INFO - Iter [63550/160000] lr: 3.617e-05, eta: 5:33:06, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3258, decode.acc_seg: 87.4469, aux.loss_ce: 0.2123, aux.acc_seg: 79.5730, loss: 0.5381, grad_norm: 5.9419 2023-02-11 21:52:01,474 - mmseg - INFO - Iter [63600/160000] lr: 3.615e-05, eta: 5:32:55, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3064, decode.acc_seg: 88.0392, aux.loss_ce: 0.2034, aux.acc_seg: 80.1304, loss: 0.5097, grad_norm: 5.1099 2023-02-11 21:52:11,447 - mmseg - INFO - Iter [63650/160000] lr: 3.613e-05, eta: 5:32:44, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3138, decode.acc_seg: 88.0949, aux.loss_ce: 0.2053, aux.acc_seg: 80.4593, loss: 0.5191, grad_norm: 5.0389 2023-02-11 21:52:21,857 - mmseg - INFO - Iter [63700/160000] lr: 3.611e-05, eta: 5:32:34, time: 0.209, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3179, decode.acc_seg: 87.8905, aux.loss_ce: 0.2192, aux.acc_seg: 79.4240, loss: 0.5371, grad_norm: 5.2826 2023-02-11 21:52:31,603 - mmseg - INFO - Iter [63750/160000] lr: 3.609e-05, eta: 5:32:22, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3106, decode.acc_seg: 87.9629, aux.loss_ce: 0.2072, aux.acc_seg: 80.2183, loss: 0.5177, grad_norm: 5.1563 2023-02-11 21:52:41,493 - mmseg - INFO - Iter [63800/160000] lr: 3.608e-05, eta: 5:32:11, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3303, decode.acc_seg: 87.5531, aux.loss_ce: 0.2128, aux.acc_seg: 80.4384, loss: 0.5432, grad_norm: 5.9637 2023-02-11 21:52:51,320 - mmseg - INFO - Iter [63850/160000] lr: 3.606e-05, eta: 5:31:59, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2978, decode.acc_seg: 88.6789, aux.loss_ce: 0.2054, aux.acc_seg: 80.6581, loss: 0.5032, grad_norm: 4.8353 2023-02-11 21:53:01,346 - mmseg - INFO - Iter [63900/160000] lr: 3.604e-05, eta: 5:31:48, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3058, decode.acc_seg: 88.2332, aux.loss_ce: 0.2078, aux.acc_seg: 80.0044, loss: 0.5136, grad_norm: 6.4952 2023-02-11 21:53:11,344 - mmseg - INFO - Iter [63950/160000] lr: 3.602e-05, eta: 5:31:37, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3191, decode.acc_seg: 87.4109, aux.loss_ce: 0.2103, aux.acc_seg: 79.2608, loss: 0.5294, grad_norm: 5.0312 2023-02-11 21:53:23,726 - mmseg - INFO - Saving checkpoint at 64000 iterations 2023-02-11 21:53:24,417 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 21:53:24,418 - mmseg - INFO - Iter [64000/160000] lr: 3.600e-05, eta: 5:31:32, time: 0.261, data_time: 0.046, memory: 7748, decode.loss_ce: 0.3242, decode.acc_seg: 87.3448, aux.loss_ce: 0.2203, aux.acc_seg: 78.8601, loss: 0.5445, grad_norm: 7.0737 2023-02-11 21:53:35,982 - mmseg - INFO - per class results: 2023-02-11 21:53:35,988 - mmseg - INFO - +---------------------+-------+-------+ | Class | IoU | Acc | +---------------------+-------+-------+ | wall | 73.48 | 83.95 | | building | 80.0 | 90.8 | | sky | 93.57 | 96.09 | | floor | 78.75 | 88.15 | | tree | 71.86 | 90.68 | | ceiling | 79.93 | 90.11 | | road | 80.94 | 87.75 | | bed | 85.67 | 93.76 | | windowpane | 57.79 | 77.05 | | grass | 67.99 | 80.3 | | cabinet | 54.16 | 73.9 | | sidewalk | 59.8 | 74.68 | | person | 75.6 | 92.62 | | earth | 32.88 | 47.16 | | door | 36.33 | 44.26 | | table | 52.05 | 70.73 | | mountain | 58.48 | 75.22 | | plant | 47.58 | 65.6 | | curtain | 68.33 | 84.25 | | chair | 49.39 | 66.32 | | car | 78.39 | 93.96 | | water | 55.77 | 75.18 | | painting | 63.59 | 83.6 | | sofa | 52.82 | 61.28 | | shelf | 34.2 | 46.68 | | house | 30.84 | 33.44 | | sea | 52.58 | 69.97 | | mirror | 55.53 | 68.23 | | rug | 53.96 | 60.43 | | field | 32.87 | 45.52 | | armchair | 34.74 | 65.91 | | seat | 60.04 | 76.23 | | fence | 39.23 | 55.84 | | desk | 42.56 | 53.93 | | rock | 36.72 | 70.85 | | wardrobe | 44.91 | 61.29 | | lamp | 52.39 | 74.54 | | bathtub | 72.68 | 78.71 | | railing | 32.05 | 44.69 | | cushion | 49.39 | 74.35 | | base | 32.41 | 52.51 | | box | 20.49 | 28.71 | | column | 36.94 | 53.19 | | signboard | 29.1 | 52.65 | | chest of drawers | 40.14 | 61.15 | | counter | 20.8 | 25.71 | | sand | 41.64 | 45.09 | | sink | 66.91 | 76.94 | | skyscraper | 42.36 | 45.22 | | fireplace | 65.32 | 80.17 | | refrigerator | 66.69 | 75.53 | | grandstand | 31.3 | 74.79 | | path | 15.74 | 19.51 | | stairs | 32.16 | 43.73 | | runway | 66.47 | 80.26 | | case | 45.94 | 58.97 | | pool table | 92.46 | 96.31 | | pillow | 40.67 | 45.62 | | screen door | 63.66 | 84.87 | | stairway | 28.05 | 46.82 | | river | 11.83 | 17.59 | | bridge | 54.68 | 68.78 | | bookcase | 32.51 | 63.78 | | blind | 28.8 | 31.17 | | coffee table | 46.82 | 80.84 | | toilet | 81.28 | 89.68 | | flower | 33.75 | 45.72 | | book | 41.04 | 58.65 | | hill | 2.05 | 2.28 | | bench | 42.12 | 44.82 | | countertop | 51.47 | 58.8 | | stove | 65.6 | 79.81 | | palm | 47.76 | 75.61 | | kitchen island | 21.8 | 28.31 | | computer | 51.16 | 65.37 | | swivel chair | 42.95 | 53.91 | | boat | 41.27 | 51.2 | | bar | 27.2 | 40.14 | | arcade machine | 61.77 | 79.31 | | hovel | 27.04 | 34.48 | | bus | 78.71 | 95.09 | | towel | 51.96 | 66.21 | | light | 48.6 | 67.76 | | truck | 33.12 | 45.06 | | tower | 37.11 | 61.84 | | chandelier | 54.39 | 85.69 | | awning | 21.57 | 24.59 | | streetlight | 19.46 | 27.38 | | booth | 30.66 | 43.36 | | television receiver | 63.21 | 72.99 | | airplane | 50.92 | 62.75 | | dirt track | 11.31 | 19.3 | | apparel | 22.1 | 55.84 | | pole | 14.13 | 20.04 | | land | 0.37 | 0.43 | | bannister | 11.44 | 14.82 | | escalator | 27.67 | 47.46 | | ottoman | 37.1 | 63.74 | | bottle | 28.73 | 40.96 | | buffet | 25.79 | 26.65 | | poster | 20.86 | 48.18 | | stage | 12.55 | 24.84 | | van | 19.14 | 21.89 | | ship | 50.77 | 92.46 | | fountain | 40.81 | 43.79 | | conveyer belt | 54.65 | 89.12 | | canopy | 9.42 | 11.31 | | washer | 61.89 | 72.6 | | plaything | 23.0 | 40.51 | | swimming pool | 57.3 | 64.12 | | stool | 29.91 | 35.18 | | barrel | 5.29 | 65.01 | | basket | 22.37 | 31.69 | | waterfall | 45.21 | 52.25 | | tent | 76.59 | 97.97 | | bag | 14.23 | 22.59 | | minibike | 63.39 | 75.66 | | cradle | 80.18 | 91.39 | | oven | 7.61 | 11.5 | | ball | 42.2 | 58.86 | | food | 56.3 | 68.55 | | step | 1.0 | 1.07 | | tank | 23.13 | 23.98 | | trade name | 23.92 | 30.48 | | microwave | 40.66 | 46.07 | | pot | 34.52 | 46.52 | | animal | 53.54 | 59.26 | | bicycle | 55.04 | 72.12 | | lake | 50.46 | 58.44 | | dishwasher | 60.28 | 71.93 | | screen | 54.71 | 92.62 | | blanket | 13.15 | 19.86 | | sculpture | 26.7 | 83.8 | | hood | 45.52 | 47.19 | | sconce | 24.11 | 28.05 | | vase | 27.4 | 52.84 | | traffic light | 26.52 | 43.93 | | tray | 3.25 | 4.53 | | ashcan | 29.36 | 36.24 | | fan | 37.33 | 79.72 | | pier | 27.53 | 47.99 | | crt screen | 3.0 | 18.92 | | plate | 33.52 | 41.89 | | monitor | 4.84 | 6.14 | | bulletin board | 30.75 | 58.1 | | shower | 0.0 | 0.0 | | radiator | 35.64 | 37.7 | | glass | 5.5 | 5.87 | | clock | 15.9 | 32.15 | | flag | 30.5 | 38.91 | +---------------------+-------+-------+ 2023-02-11 21:53:35,988 - mmseg - INFO - Summary: 2023-02-11 21:53:35,989 - mmseg - INFO - +-------+-------+-------+ | aAcc | mIoU | mAcc | +-------+-------+-------+ | 79.96 | 41.68 | 55.56 | +-------+-------+-------+ 2023-02-11 21:53:36,647 - mmseg - INFO - Now best checkpoint is saved as best_mIoU_iter_64000.pth. 2023-02-11 21:53:36,647 - mmseg - INFO - Best mIoU is 0.4168 at 64000 iter. 2023-02-11 21:53:36,648 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 21:53:36,648 - mmseg - INFO - Iter(val) [250] aAcc: 0.7996, mIoU: 0.4168, mAcc: 0.5556, IoU.wall: 0.7348, IoU.building: 0.8000, IoU.sky: 0.9357, IoU.floor: 0.7875, IoU.tree: 0.7186, IoU.ceiling: 0.7993, IoU.road: 0.8094, IoU.bed : 0.8567, IoU.windowpane: 0.5779, IoU.grass: 0.6799, IoU.cabinet: 0.5416, IoU.sidewalk: 0.5980, IoU.person: 0.7560, IoU.earth: 0.3288, IoU.door: 0.3633, IoU.table: 0.5205, IoU.mountain: 0.5848, IoU.plant: 0.4758, IoU.curtain: 0.6833, IoU.chair: 0.4939, IoU.car: 0.7839, IoU.water: 0.5577, IoU.painting: 0.6359, IoU.sofa: 0.5282, IoU.shelf: 0.3420, IoU.house: 0.3084, IoU.sea: 0.5258, IoU.mirror: 0.5553, IoU.rug: 0.5396, IoU.field: 0.3287, IoU.armchair: 0.3474, IoU.seat: 0.6004, IoU.fence: 0.3923, IoU.desk: 0.4256, IoU.rock: 0.3672, IoU.wardrobe: 0.4491, IoU.lamp: 0.5239, IoU.bathtub: 0.7268, IoU.railing: 0.3205, IoU.cushion: 0.4939, IoU.base: 0.3241, IoU.box: 0.2049, IoU.column: 0.3694, IoU.signboard: 0.2910, IoU.chest of drawers: 0.4014, IoU.counter: 0.2080, IoU.sand: 0.4164, IoU.sink: 0.6691, IoU.skyscraper: 0.4236, IoU.fireplace: 0.6532, IoU.refrigerator: 0.6669, IoU.grandstand: 0.3130, IoU.path: 0.1574, IoU.stairs: 0.3216, IoU.runway: 0.6647, IoU.case: 0.4594, IoU.pool table: 0.9246, IoU.pillow: 0.4067, IoU.screen door: 0.6366, IoU.stairway: 0.2805, IoU.river: 0.1183, IoU.bridge: 0.5468, IoU.bookcase: 0.3251, IoU.blind: 0.2880, IoU.coffee table: 0.4682, IoU.toilet: 0.8128, IoU.flower: 0.3375, IoU.book: 0.4104, IoU.hill: 0.0205, IoU.bench: 0.4212, IoU.countertop: 0.5147, IoU.stove: 0.6560, IoU.palm: 0.4776, IoU.kitchen island: 0.2180, IoU.computer: 0.5116, IoU.swivel chair: 0.4295, IoU.boat: 0.4127, IoU.bar: 0.2720, IoU.arcade machine: 0.6177, IoU.hovel: 0.2704, IoU.bus: 0.7871, IoU.towel: 0.5196, IoU.light: 0.4860, IoU.truck: 0.3312, IoU.tower: 0.3711, IoU.chandelier: 0.5439, IoU.awning: 0.2157, IoU.streetlight: 0.1946, IoU.booth: 0.3066, IoU.television receiver: 0.6321, IoU.airplane: 0.5092, IoU.dirt track: 0.1131, IoU.apparel: 0.2210, IoU.pole: 0.1413, IoU.land: 0.0037, IoU.bannister: 0.1144, IoU.escalator: 0.2767, IoU.ottoman: 0.3710, IoU.bottle: 0.2873, IoU.buffet: 0.2579, IoU.poster: 0.2086, IoU.stage: 0.1255, IoU.van: 0.1914, IoU.ship: 0.5077, IoU.fountain: 0.4081, IoU.conveyer belt: 0.5465, IoU.canopy: 0.0942, IoU.washer: 0.6189, IoU.plaything: 0.2300, IoU.swimming pool: 0.5730, IoU.stool: 0.2991, IoU.barrel: 0.0529, IoU.basket: 0.2237, IoU.waterfall: 0.4521, IoU.tent: 0.7659, IoU.bag: 0.1423, IoU.minibike: 0.6339, IoU.cradle: 0.8018, IoU.oven: 0.0761, IoU.ball: 0.4220, IoU.food: 0.5630, IoU.step: 0.0100, IoU.tank: 0.2313, IoU.trade name: 0.2392, IoU.microwave: 0.4066, IoU.pot: 0.3452, IoU.animal: 0.5354, IoU.bicycle: 0.5504, IoU.lake: 0.5046, IoU.dishwasher: 0.6028, IoU.screen: 0.5471, IoU.blanket: 0.1315, IoU.sculpture: 0.2670, IoU.hood: 0.4552, IoU.sconce: 0.2411, IoU.vase: 0.2740, IoU.traffic light: 0.2652, IoU.tray: 0.0325, IoU.ashcan: 0.2936, IoU.fan: 0.3733, IoU.pier: 0.2753, IoU.crt screen: 0.0300, IoU.plate: 0.3352, IoU.monitor: 0.0484, IoU.bulletin board: 0.3075, IoU.shower: 0.0000, IoU.radiator: 0.3564, IoU.glass: 0.0550, IoU.clock: 0.1590, IoU.flag: 0.3050, Acc.wall: 0.8395, Acc.building: 0.9080, Acc.sky: 0.9609, Acc.floor: 0.8815, Acc.tree: 0.9068, Acc.ceiling: 0.9011, Acc.road: 0.8775, Acc.bed : 0.9376, Acc.windowpane: 0.7705, Acc.grass: 0.8030, Acc.cabinet: 0.7390, Acc.sidewalk: 0.7468, Acc.person: 0.9262, Acc.earth: 0.4716, Acc.door: 0.4426, Acc.table: 0.7073, Acc.mountain: 0.7522, Acc.plant: 0.6560, Acc.curtain: 0.8425, Acc.chair: 0.6632, Acc.car: 0.9396, Acc.water: 0.7518, Acc.painting: 0.8360, Acc.sofa: 0.6128, Acc.shelf: 0.4668, Acc.house: 0.3344, Acc.sea: 0.6997, Acc.mirror: 0.6823, Acc.rug: 0.6043, Acc.field: 0.4552, Acc.armchair: 0.6591, Acc.seat: 0.7623, Acc.fence: 0.5584, Acc.desk: 0.5393, Acc.rock: 0.7085, Acc.wardrobe: 0.6129, Acc.lamp: 0.7454, Acc.bathtub: 0.7871, Acc.railing: 0.4469, Acc.cushion: 0.7435, Acc.base: 0.5251, Acc.box: 0.2871, Acc.column: 0.5319, Acc.signboard: 0.5265, Acc.chest of drawers: 0.6115, Acc.counter: 0.2571, Acc.sand: 0.4509, Acc.sink: 0.7694, Acc.skyscraper: 0.4522, Acc.fireplace: 0.8017, Acc.refrigerator: 0.7553, Acc.grandstand: 0.7479, Acc.path: 0.1951, Acc.stairs: 0.4373, Acc.runway: 0.8026, Acc.case: 0.5897, Acc.pool table: 0.9631, Acc.pillow: 0.4562, Acc.screen door: 0.8487, Acc.stairway: 0.4682, Acc.river: 0.1759, Acc.bridge: 0.6878, Acc.bookcase: 0.6378, Acc.blind: 0.3117, Acc.coffee table: 0.8084, Acc.toilet: 0.8968, Acc.flower: 0.4572, Acc.book: 0.5865, Acc.hill: 0.0228, Acc.bench: 0.4482, Acc.countertop: 0.5880, Acc.stove: 0.7981, Acc.palm: 0.7561, Acc.kitchen island: 0.2831, Acc.computer: 0.6537, Acc.swivel chair: 0.5391, Acc.boat: 0.5120, Acc.bar: 0.4014, Acc.arcade machine: 0.7931, Acc.hovel: 0.3448, Acc.bus: 0.9509, Acc.towel: 0.6621, Acc.light: 0.6776, Acc.truck: 0.4506, Acc.tower: 0.6184, Acc.chandelier: 0.8569, Acc.awning: 0.2459, Acc.streetlight: 0.2738, Acc.booth: 0.4336, Acc.television receiver: 0.7299, Acc.airplane: 0.6275, Acc.dirt track: 0.1930, Acc.apparel: 0.5584, Acc.pole: 0.2004, Acc.land: 0.0043, Acc.bannister: 0.1482, Acc.escalator: 0.4746, Acc.ottoman: 0.6374, Acc.bottle: 0.4096, Acc.buffet: 0.2665, Acc.poster: 0.4818, Acc.stage: 0.2484, Acc.van: 0.2189, Acc.ship: 0.9246, Acc.fountain: 0.4379, Acc.conveyer belt: 0.8912, Acc.canopy: 0.1131, Acc.washer: 0.7260, Acc.plaything: 0.4051, Acc.swimming pool: 0.6412, Acc.stool: 0.3518, Acc.barrel: 0.6501, Acc.basket: 0.3169, Acc.waterfall: 0.5225, Acc.tent: 0.9797, Acc.bag: 0.2259, Acc.minibike: 0.7566, Acc.cradle: 0.9139, Acc.oven: 0.1150, Acc.ball: 0.5886, Acc.food: 0.6855, Acc.step: 0.0107, Acc.tank: 0.2398, Acc.trade name: 0.3048, Acc.microwave: 0.4607, Acc.pot: 0.4652, Acc.animal: 0.5926, Acc.bicycle: 0.7212, Acc.lake: 0.5844, Acc.dishwasher: 0.7193, Acc.screen: 0.9262, Acc.blanket: 0.1986, Acc.sculpture: 0.8380, Acc.hood: 0.4719, Acc.sconce: 0.2805, Acc.vase: 0.5284, Acc.traffic light: 0.4393, Acc.tray: 0.0453, Acc.ashcan: 0.3624, Acc.fan: 0.7972, Acc.pier: 0.4799, Acc.crt screen: 0.1892, Acc.plate: 0.4189, Acc.monitor: 0.0614, Acc.bulletin board: 0.5810, Acc.shower: 0.0000, Acc.radiator: 0.3770, Acc.glass: 0.0587, Acc.clock: 0.3215, Acc.flag: 0.3891 2023-02-11 21:53:47,198 - mmseg - INFO - Iter [64050/160000] lr: 3.598e-05, eta: 5:31:47, time: 0.456, data_time: 0.249, memory: 7748, decode.loss_ce: 0.3019, decode.acc_seg: 88.4189, aux.loss_ce: 0.2003, aux.acc_seg: 80.7281, loss: 0.5023, grad_norm: 4.8367 2023-02-11 21:53:56,886 - mmseg - INFO - Iter [64100/160000] lr: 3.596e-05, eta: 5:31:35, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3097, decode.acc_seg: 87.6600, aux.loss_ce: 0.2090, aux.acc_seg: 79.5154, loss: 0.5186, grad_norm: 5.0950 2023-02-11 21:54:07,094 - mmseg - INFO - Iter [64150/160000] lr: 3.594e-05, eta: 5:31:24, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3141, decode.acc_seg: 87.8317, aux.loss_ce: 0.2129, aux.acc_seg: 79.7294, loss: 0.5270, grad_norm: 5.6640 2023-02-11 21:54:17,384 - mmseg - INFO - Iter [64200/160000] lr: 3.593e-05, eta: 5:31:14, time: 0.206, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3154, decode.acc_seg: 88.5727, aux.loss_ce: 0.2124, aux.acc_seg: 80.5686, loss: 0.5277, grad_norm: 6.2594 2023-02-11 21:54:27,371 - mmseg - INFO - Iter [64250/160000] lr: 3.591e-05, eta: 5:31:03, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3219, decode.acc_seg: 87.8015, aux.loss_ce: 0.2071, aux.acc_seg: 80.1456, loss: 0.5290, grad_norm: 5.0341 2023-02-11 21:54:37,355 - mmseg - INFO - Iter [64300/160000] lr: 3.589e-05, eta: 5:30:52, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2984, decode.acc_seg: 88.8212, aux.loss_ce: 0.2070, aux.acc_seg: 80.8123, loss: 0.5054, grad_norm: 4.8971 2023-02-11 21:54:47,282 - mmseg - INFO - Iter [64350/160000] lr: 3.587e-05, eta: 5:30:40, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3003, decode.acc_seg: 88.0453, aux.loss_ce: 0.2109, aux.acc_seg: 79.5271, loss: 0.5112, grad_norm: 5.8171 2023-02-11 21:54:57,743 - mmseg - INFO - Iter [64400/160000] lr: 3.585e-05, eta: 5:30:30, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2929, decode.acc_seg: 88.4156, aux.loss_ce: 0.1971, aux.acc_seg: 80.9038, loss: 0.4900, grad_norm: 4.4203 2023-02-11 21:55:08,113 - mmseg - INFO - Iter [64450/160000] lr: 3.583e-05, eta: 5:30:20, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3148, decode.acc_seg: 87.7419, aux.loss_ce: 0.2132, aux.acc_seg: 79.2254, loss: 0.5280, grad_norm: 5.0347 2023-02-11 21:55:18,632 - mmseg - INFO - Iter [64500/160000] lr: 3.581e-05, eta: 5:30:10, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3029, decode.acc_seg: 88.2257, aux.loss_ce: 0.2017, aux.acc_seg: 80.9092, loss: 0.5046, grad_norm: 4.8799 2023-02-11 21:55:28,820 - mmseg - INFO - Iter [64550/160000] lr: 3.579e-05, eta: 5:29:59, time: 0.204, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3132, decode.acc_seg: 88.1357, aux.loss_ce: 0.2046, aux.acc_seg: 80.6955, loss: 0.5178, grad_norm: 5.1672 2023-02-11 21:55:38,521 - mmseg - INFO - Iter [64600/160000] lr: 3.578e-05, eta: 5:29:47, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3069, decode.acc_seg: 87.9139, aux.loss_ce: 0.2113, aux.acc_seg: 79.7237, loss: 0.5183, grad_norm: 5.3641 2023-02-11 21:55:48,526 - mmseg - INFO - Iter [64650/160000] lr: 3.576e-05, eta: 5:29:36, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3124, decode.acc_seg: 87.9217, aux.loss_ce: 0.2168, aux.acc_seg: 79.6473, loss: 0.5292, grad_norm: 6.1644 2023-02-11 21:55:58,249 - mmseg - INFO - Iter [64700/160000] lr: 3.574e-05, eta: 5:29:24, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3122, decode.acc_seg: 87.5018, aux.loss_ce: 0.2029, aux.acc_seg: 80.1127, loss: 0.5151, grad_norm: 4.8451 2023-02-11 21:56:08,081 - mmseg - INFO - Iter [64750/160000] lr: 3.572e-05, eta: 5:29:13, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3231, decode.acc_seg: 87.9063, aux.loss_ce: 0.2076, aux.acc_seg: 80.3082, loss: 0.5306, grad_norm: 5.7019 2023-02-11 21:56:17,730 - mmseg - INFO - Iter [64800/160000] lr: 3.570e-05, eta: 5:29:01, time: 0.193, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3182, decode.acc_seg: 87.9381, aux.loss_ce: 0.2155, aux.acc_seg: 79.7464, loss: 0.5337, grad_norm: 4.9440 2023-02-11 21:56:27,919 - mmseg - INFO - Iter [64850/160000] lr: 3.568e-05, eta: 5:28:51, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3080, decode.acc_seg: 88.3621, aux.loss_ce: 0.2162, aux.acc_seg: 79.8228, loss: 0.5242, grad_norm: 4.8536 2023-02-11 21:56:38,349 - mmseg - INFO - Iter [64900/160000] lr: 3.566e-05, eta: 5:28:40, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3237, decode.acc_seg: 87.5352, aux.loss_ce: 0.2082, aux.acc_seg: 80.6017, loss: 0.5319, grad_norm: 5.4596 2023-02-11 21:56:48,504 - mmseg - INFO - Iter [64950/160000] lr: 3.564e-05, eta: 5:28:30, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3352, decode.acc_seg: 87.2432, aux.loss_ce: 0.2178, aux.acc_seg: 79.3526, loss: 0.5530, grad_norm: 5.2317 2023-02-11 21:56:58,504 - mmseg - INFO - Saving checkpoint at 65000 iterations 2023-02-11 21:56:59,239 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 21:56:59,239 - mmseg - INFO - Iter [65000/160000] lr: 3.563e-05, eta: 5:28:20, time: 0.215, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3312, decode.acc_seg: 87.5971, aux.loss_ce: 0.2202, aux.acc_seg: 79.5291, loss: 0.5514, grad_norm: 5.7867 2023-02-11 21:57:09,291 - mmseg - INFO - Iter [65050/160000] lr: 3.561e-05, eta: 5:28:09, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3161, decode.acc_seg: 88.1255, aux.loss_ce: 0.2114, aux.acc_seg: 80.2289, loss: 0.5275, grad_norm: 5.4030 2023-02-11 21:57:19,207 - mmseg - INFO - Iter [65100/160000] lr: 3.559e-05, eta: 5:27:58, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3085, decode.acc_seg: 88.1176, aux.loss_ce: 0.2105, aux.acc_seg: 79.8762, loss: 0.5190, grad_norm: 5.4548 2023-02-11 21:57:29,378 - mmseg - INFO - Iter [65150/160000] lr: 3.557e-05, eta: 5:27:47, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2961, decode.acc_seg: 88.8568, aux.loss_ce: 0.2052, aux.acc_seg: 80.9213, loss: 0.5013, grad_norm: 6.0536 2023-02-11 21:57:39,544 - mmseg - INFO - Iter [65200/160000] lr: 3.555e-05, eta: 5:27:36, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3151, decode.acc_seg: 88.1004, aux.loss_ce: 0.2208, aux.acc_seg: 79.2338, loss: 0.5359, grad_norm: 4.9202 2023-02-11 21:57:49,518 - mmseg - INFO - Iter [65250/160000] lr: 3.553e-05, eta: 5:27:25, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3026, decode.acc_seg: 88.6557, aux.loss_ce: 0.2031, aux.acc_seg: 81.1466, loss: 0.5057, grad_norm: 4.8888 2023-02-11 21:58:01,964 - mmseg - INFO - Iter [65300/160000] lr: 3.551e-05, eta: 5:27:19, time: 0.249, data_time: 0.047, memory: 7748, decode.loss_ce: 0.3051, decode.acc_seg: 88.4107, aux.loss_ce: 0.2086, aux.acc_seg: 80.5294, loss: 0.5137, grad_norm: 5.3088 2023-02-11 21:58:11,762 - mmseg - INFO - Iter [65350/160000] lr: 3.549e-05, eta: 5:27:07, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2973, decode.acc_seg: 88.5120, aux.loss_ce: 0.2022, aux.acc_seg: 80.2216, loss: 0.4995, grad_norm: 4.7561 2023-02-11 21:58:21,466 - mmseg - INFO - Iter [65400/160000] lr: 3.548e-05, eta: 5:26:56, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3049, decode.acc_seg: 88.2613, aux.loss_ce: 0.2058, aux.acc_seg: 80.4897, loss: 0.5106, grad_norm: 5.6088 2023-02-11 21:58:31,355 - mmseg - INFO - Iter [65450/160000] lr: 3.546e-05, eta: 5:26:44, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2982, decode.acc_seg: 88.7690, aux.loss_ce: 0.2081, aux.acc_seg: 80.6933, loss: 0.5063, grad_norm: 5.2459 2023-02-11 21:58:41,573 - mmseg - INFO - Iter [65500/160000] lr: 3.544e-05, eta: 5:26:34, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3070, decode.acc_seg: 88.2201, aux.loss_ce: 0.2099, aux.acc_seg: 79.6450, loss: 0.5168, grad_norm: 5.4668 2023-02-11 21:58:52,253 - mmseg - INFO - Iter [65550/160000] lr: 3.542e-05, eta: 5:26:24, time: 0.214, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2959, decode.acc_seg: 88.3208, aux.loss_ce: 0.2037, aux.acc_seg: 80.1241, loss: 0.4996, grad_norm: 5.0605 2023-02-11 21:59:02,745 - mmseg - INFO - Iter [65600/160000] lr: 3.540e-05, eta: 5:26:14, time: 0.210, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3098, decode.acc_seg: 88.0244, aux.loss_ce: 0.2055, aux.acc_seg: 80.5199, loss: 0.5153, grad_norm: 5.4657 2023-02-11 21:59:13,018 - mmseg - INFO - Iter [65650/160000] lr: 3.538e-05, eta: 5:26:03, time: 0.205, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3151, decode.acc_seg: 87.7367, aux.loss_ce: 0.2056, aux.acc_seg: 79.9725, loss: 0.5208, grad_norm: 6.3611 2023-02-11 21:59:22,935 - mmseg - INFO - Iter [65700/160000] lr: 3.536e-05, eta: 5:25:52, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3074, decode.acc_seg: 88.2307, aux.loss_ce: 0.2020, aux.acc_seg: 80.8590, loss: 0.5094, grad_norm: 5.2313 2023-02-11 21:59:33,052 - mmseg - INFO - Iter [65750/160000] lr: 3.534e-05, eta: 5:25:41, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3253, decode.acc_seg: 87.5857, aux.loss_ce: 0.2191, aux.acc_seg: 79.5612, loss: 0.5444, grad_norm: 5.5902 2023-02-11 21:59:42,621 - mmseg - INFO - Iter [65800/160000] lr: 3.533e-05, eta: 5:25:29, time: 0.191, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3021, decode.acc_seg: 88.2065, aux.loss_ce: 0.2043, aux.acc_seg: 80.3185, loss: 0.5064, grad_norm: 5.3077 2023-02-11 21:59:52,518 - mmseg - INFO - Iter [65850/160000] lr: 3.531e-05, eta: 5:25:18, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3026, decode.acc_seg: 88.1420, aux.loss_ce: 0.1992, aux.acc_seg: 81.0536, loss: 0.5018, grad_norm: 5.5210 2023-02-11 22:00:02,350 - mmseg - INFO - Iter [65900/160000] lr: 3.529e-05, eta: 5:25:07, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3054, decode.acc_seg: 88.3450, aux.loss_ce: 0.2030, aux.acc_seg: 80.7424, loss: 0.5084, grad_norm: 5.9151 2023-02-11 22:00:12,458 - mmseg - INFO - Iter [65950/160000] lr: 3.527e-05, eta: 5:24:56, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3100, decode.acc_seg: 88.3088, aux.loss_ce: 0.2040, aux.acc_seg: 80.7588, loss: 0.5140, grad_norm: 4.6540 2023-02-11 22:00:22,756 - mmseg - INFO - Saving checkpoint at 66000 iterations 2023-02-11 22:00:23,425 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 22:00:23,425 - mmseg - INFO - Iter [66000/160000] lr: 3.525e-05, eta: 5:24:46, time: 0.220, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3112, decode.acc_seg: 88.2373, aux.loss_ce: 0.2067, aux.acc_seg: 80.5011, loss: 0.5179, grad_norm: 4.8607 2023-02-11 22:00:33,289 - mmseg - INFO - Iter [66050/160000] lr: 3.523e-05, eta: 5:24:35, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3071, decode.acc_seg: 87.7040, aux.loss_ce: 0.2157, aux.acc_seg: 78.8123, loss: 0.5228, grad_norm: 5.4761 2023-02-11 22:00:43,502 - mmseg - INFO - Iter [66100/160000] lr: 3.521e-05, eta: 5:24:25, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3158, decode.acc_seg: 88.3052, aux.loss_ce: 0.2149, aux.acc_seg: 79.7892, loss: 0.5307, grad_norm: 5.0131 2023-02-11 22:00:53,827 - mmseg - INFO - Iter [66150/160000] lr: 3.519e-05, eta: 5:24:14, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2896, decode.acc_seg: 88.6631, aux.loss_ce: 0.2018, aux.acc_seg: 80.7921, loss: 0.4914, grad_norm: 4.7150 2023-02-11 22:01:04,099 - mmseg - INFO - Iter [66200/160000] lr: 3.518e-05, eta: 5:24:04, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3231, decode.acc_seg: 87.7198, aux.loss_ce: 0.2166, aux.acc_seg: 79.8347, loss: 0.5397, grad_norm: 5.5987 2023-02-11 22:01:14,410 - mmseg - INFO - Iter [66250/160000] lr: 3.516e-05, eta: 5:23:53, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3230, decode.acc_seg: 87.6060, aux.loss_ce: 0.2131, aux.acc_seg: 79.4953, loss: 0.5360, grad_norm: 5.2729 2023-02-11 22:01:24,664 - mmseg - INFO - Iter [66300/160000] lr: 3.514e-05, eta: 5:23:42, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3091, decode.acc_seg: 88.1798, aux.loss_ce: 0.2102, aux.acc_seg: 80.2326, loss: 0.5193, grad_norm: 6.2442 2023-02-11 22:01:34,814 - mmseg - INFO - Iter [66350/160000] lr: 3.512e-05, eta: 5:23:32, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3161, decode.acc_seg: 88.1222, aux.loss_ce: 0.2126, aux.acc_seg: 79.9250, loss: 0.5287, grad_norm: 5.9609 2023-02-11 22:01:45,258 - mmseg - INFO - Iter [66400/160000] lr: 3.510e-05, eta: 5:23:22, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3162, decode.acc_seg: 88.2933, aux.loss_ce: 0.2125, aux.acc_seg: 79.7816, loss: 0.5287, grad_norm: 5.6842 2023-02-11 22:01:55,550 - mmseg - INFO - Iter [66450/160000] lr: 3.508e-05, eta: 5:23:11, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3220, decode.acc_seg: 87.4000, aux.loss_ce: 0.2118, aux.acc_seg: 79.6054, loss: 0.5338, grad_norm: 5.2657 2023-02-11 22:02:05,597 - mmseg - INFO - Iter [66500/160000] lr: 3.506e-05, eta: 5:23:00, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3175, decode.acc_seg: 87.6223, aux.loss_ce: 0.2129, aux.acc_seg: 79.3377, loss: 0.5304, grad_norm: 5.9877 2023-02-11 22:02:17,524 - mmseg - INFO - Iter [66550/160000] lr: 3.504e-05, eta: 5:22:53, time: 0.238, data_time: 0.047, memory: 7748, decode.loss_ce: 0.2744, decode.acc_seg: 89.0832, aux.loss_ce: 0.1865, aux.acc_seg: 81.6759, loss: 0.4610, grad_norm: 4.4465 2023-02-11 22:02:27,804 - mmseg - INFO - Iter [66600/160000] lr: 3.503e-05, eta: 5:22:42, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3096, decode.acc_seg: 87.8832, aux.loss_ce: 0.2052, aux.acc_seg: 80.6685, loss: 0.5148, grad_norm: 5.5865 2023-02-11 22:02:37,831 - mmseg - INFO - Iter [66650/160000] lr: 3.501e-05, eta: 5:22:31, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2817, decode.acc_seg: 89.0352, aux.loss_ce: 0.1941, aux.acc_seg: 81.1580, loss: 0.4758, grad_norm: 4.3022 2023-02-11 22:02:47,598 - mmseg - INFO - Iter [66700/160000] lr: 3.499e-05, eta: 5:22:20, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3099, decode.acc_seg: 88.1736, aux.loss_ce: 0.2064, aux.acc_seg: 80.4003, loss: 0.5162, grad_norm: 4.9441 2023-02-11 22:02:57,655 - mmseg - INFO - Iter [66750/160000] lr: 3.497e-05, eta: 5:22:09, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2948, decode.acc_seg: 88.8956, aux.loss_ce: 0.2011, aux.acc_seg: 81.2102, loss: 0.4960, grad_norm: 9.2154 2023-02-11 22:03:07,795 - mmseg - INFO - Iter [66800/160000] lr: 3.495e-05, eta: 5:21:58, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3144, decode.acc_seg: 88.0071, aux.loss_ce: 0.2113, aux.acc_seg: 80.3051, loss: 0.5257, grad_norm: 6.5229 2023-02-11 22:03:18,324 - mmseg - INFO - Iter [66850/160000] lr: 3.493e-05, eta: 5:21:48, time: 0.211, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3110, decode.acc_seg: 87.8569, aux.loss_ce: 0.2099, aux.acc_seg: 79.8893, loss: 0.5208, grad_norm: 5.2580 2023-02-11 22:03:28,654 - mmseg - INFO - Iter [66900/160000] lr: 3.491e-05, eta: 5:21:37, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3017, decode.acc_seg: 88.3151, aux.loss_ce: 0.1976, aux.acc_seg: 81.2092, loss: 0.4994, grad_norm: 5.5029 2023-02-11 22:03:38,554 - mmseg - INFO - Iter [66950/160000] lr: 3.489e-05, eta: 5:21:26, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3015, decode.acc_seg: 88.0289, aux.loss_ce: 0.2049, aux.acc_seg: 80.2126, loss: 0.5064, grad_norm: 4.7275 2023-02-11 22:03:48,239 - mmseg - INFO - Saving checkpoint at 67000 iterations 2023-02-11 22:03:48,921 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 22:03:48,922 - mmseg - INFO - Iter [67000/160000] lr: 3.488e-05, eta: 5:21:16, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2818, decode.acc_seg: 89.2793, aux.loss_ce: 0.1963, aux.acc_seg: 81.3340, loss: 0.4781, grad_norm: 4.6231 2023-02-11 22:03:58,804 - mmseg - INFO - Iter [67050/160000] lr: 3.486e-05, eta: 5:21:05, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3027, decode.acc_seg: 88.2945, aux.loss_ce: 0.2102, aux.acc_seg: 80.1638, loss: 0.5129, grad_norm: 5.3093 2023-02-11 22:04:08,701 - mmseg - INFO - Iter [67100/160000] lr: 3.484e-05, eta: 5:20:53, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2986, decode.acc_seg: 88.3271, aux.loss_ce: 0.2024, aux.acc_seg: 80.6124, loss: 0.5010, grad_norm: 4.8212 2023-02-11 22:04:18,444 - mmseg - INFO - Iter [67150/160000] lr: 3.482e-05, eta: 5:20:42, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2940, decode.acc_seg: 88.5330, aux.loss_ce: 0.2050, aux.acc_seg: 80.2181, loss: 0.4990, grad_norm: 4.3825 2023-02-11 22:04:28,432 - mmseg - INFO - Iter [67200/160000] lr: 3.480e-05, eta: 5:20:31, time: 0.200, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3005, decode.acc_seg: 88.5565, aux.loss_ce: 0.2037, aux.acc_seg: 80.4933, loss: 0.5042, grad_norm: 5.1720 2023-02-11 22:04:39,057 - mmseg - INFO - Iter [67250/160000] lr: 3.478e-05, eta: 5:20:21, time: 0.212, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3078, decode.acc_seg: 87.8426, aux.loss_ce: 0.2041, aux.acc_seg: 80.1720, loss: 0.5119, grad_norm: 5.4853 2023-02-11 22:04:48,903 - mmseg - INFO - Iter [67300/160000] lr: 3.476e-05, eta: 5:20:10, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3201, decode.acc_seg: 87.9066, aux.loss_ce: 0.2173, aux.acc_seg: 79.2440, loss: 0.5374, grad_norm: 5.5705 2023-02-11 22:04:58,680 - mmseg - INFO - Iter [67350/160000] lr: 3.474e-05, eta: 5:19:58, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3135, decode.acc_seg: 88.0094, aux.loss_ce: 0.2038, aux.acc_seg: 80.4952, loss: 0.5174, grad_norm: 5.5948 2023-02-11 22:05:08,554 - mmseg - INFO - Iter [67400/160000] lr: 3.473e-05, eta: 5:19:47, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2947, decode.acc_seg: 88.5641, aux.loss_ce: 0.2112, aux.acc_seg: 79.7420, loss: 0.5059, grad_norm: 5.1108 2023-02-11 22:05:18,534 - mmseg - INFO - Iter [67450/160000] lr: 3.471e-05, eta: 5:19:36, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3065, decode.acc_seg: 88.2150, aux.loss_ce: 0.2090, aux.acc_seg: 80.1747, loss: 0.5155, grad_norm: 4.7711 2023-02-11 22:05:28,440 - mmseg - INFO - Iter [67500/160000] lr: 3.469e-05, eta: 5:19:25, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3084, decode.acc_seg: 87.9947, aux.loss_ce: 0.2024, aux.acc_seg: 80.3976, loss: 0.5108, grad_norm: 5.2509 2023-02-11 22:05:38,391 - mmseg - INFO - Iter [67550/160000] lr: 3.467e-05, eta: 5:19:14, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3228, decode.acc_seg: 87.3047, aux.loss_ce: 0.2155, aux.acc_seg: 79.2692, loss: 0.5383, grad_norm: 5.1941 2023-02-11 22:05:48,822 - mmseg - INFO - Iter [67600/160000] lr: 3.465e-05, eta: 5:19:04, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2951, decode.acc_seg: 88.6130, aux.loss_ce: 0.2102, aux.acc_seg: 80.0382, loss: 0.5053, grad_norm: 5.5761 2023-02-11 22:05:59,105 - mmseg - INFO - Iter [67650/160000] lr: 3.463e-05, eta: 5:18:53, time: 0.206, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2933, decode.acc_seg: 89.0456, aux.loss_ce: 0.2045, aux.acc_seg: 80.9243, loss: 0.4979, grad_norm: 4.9927 2023-02-11 22:06:08,795 - mmseg - INFO - Iter [67700/160000] lr: 3.461e-05, eta: 5:18:41, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3121, decode.acc_seg: 88.1546, aux.loss_ce: 0.2041, aux.acc_seg: 80.8760, loss: 0.5162, grad_norm: 5.1140 2023-02-11 22:06:18,969 - mmseg - INFO - Iter [67750/160000] lr: 3.459e-05, eta: 5:18:31, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3072, decode.acc_seg: 88.5165, aux.loss_ce: 0.2097, aux.acc_seg: 80.1290, loss: 0.5169, grad_norm: 5.5498 2023-02-11 22:06:30,908 - mmseg - INFO - Iter [67800/160000] lr: 3.458e-05, eta: 5:18:23, time: 0.239, data_time: 0.046, memory: 7748, decode.loss_ce: 0.3030, decode.acc_seg: 88.3724, aux.loss_ce: 0.2101, aux.acc_seg: 79.8378, loss: 0.5130, grad_norm: 5.6531 2023-02-11 22:06:40,977 - mmseg - INFO - Iter [67850/160000] lr: 3.456e-05, eta: 5:18:12, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2769, decode.acc_seg: 89.1814, aux.loss_ce: 0.1943, aux.acc_seg: 81.3941, loss: 0.4713, grad_norm: 4.5687 2023-02-11 22:06:50,749 - mmseg - INFO - Iter [67900/160000] lr: 3.454e-05, eta: 5:18:01, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3015, decode.acc_seg: 88.4553, aux.loss_ce: 0.2068, aux.acc_seg: 80.4144, loss: 0.5082, grad_norm: 5.6709 2023-02-11 22:07:00,711 - mmseg - INFO - Iter [67950/160000] lr: 3.452e-05, eta: 5:17:50, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3033, decode.acc_seg: 88.3908, aux.loss_ce: 0.2065, aux.acc_seg: 80.4261, loss: 0.5097, grad_norm: 5.0683 2023-02-11 22:07:11,333 - mmseg - INFO - Saving checkpoint at 68000 iterations 2023-02-11 22:07:12,011 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 22:07:12,012 - mmseg - INFO - Iter [68000/160000] lr: 3.450e-05, eta: 5:17:41, time: 0.226, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3016, decode.acc_seg: 88.2449, aux.loss_ce: 0.2119, aux.acc_seg: 80.0846, loss: 0.5135, grad_norm: 6.0871 2023-02-11 22:07:22,203 - mmseg - INFO - Iter [68050/160000] lr: 3.448e-05, eta: 5:17:31, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2972, decode.acc_seg: 88.9381, aux.loss_ce: 0.2039, aux.acc_seg: 80.7283, loss: 0.5011, grad_norm: 5.2491 2023-02-11 22:07:31,935 - mmseg - INFO - Iter [68100/160000] lr: 3.446e-05, eta: 5:17:19, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2943, decode.acc_seg: 88.7054, aux.loss_ce: 0.2004, aux.acc_seg: 80.7901, loss: 0.4947, grad_norm: 5.0588 2023-02-11 22:07:42,406 - mmseg - INFO - Iter [68150/160000] lr: 3.444e-05, eta: 5:17:09, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2944, decode.acc_seg: 88.7418, aux.loss_ce: 0.2063, aux.acc_seg: 80.5771, loss: 0.5007, grad_norm: 5.0260 2023-02-11 22:07:52,819 - mmseg - INFO - Iter [68200/160000] lr: 3.443e-05, eta: 5:16:59, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2945, decode.acc_seg: 88.2057, aux.loss_ce: 0.2034, aux.acc_seg: 80.0668, loss: 0.4979, grad_norm: 5.9891 2023-02-11 22:08:03,160 - mmseg - INFO - Iter [68250/160000] lr: 3.441e-05, eta: 5:16:48, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3107, decode.acc_seg: 87.8268, aux.loss_ce: 0.2076, aux.acc_seg: 79.9070, loss: 0.5183, grad_norm: 5.5053 2023-02-11 22:08:13,103 - mmseg - INFO - Iter [68300/160000] lr: 3.439e-05, eta: 5:16:37, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3011, decode.acc_seg: 88.5574, aux.loss_ce: 0.2016, aux.acc_seg: 80.8318, loss: 0.5026, grad_norm: 5.3218 2023-02-11 22:08:23,275 - mmseg - INFO - Iter [68350/160000] lr: 3.437e-05, eta: 5:16:26, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3174, decode.acc_seg: 87.8945, aux.loss_ce: 0.2101, aux.acc_seg: 79.9982, loss: 0.5275, grad_norm: 5.5690 2023-02-11 22:08:33,455 - mmseg - INFO - Iter [68400/160000] lr: 3.435e-05, eta: 5:16:16, time: 0.204, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2946, decode.acc_seg: 88.3649, aux.loss_ce: 0.1974, aux.acc_seg: 80.7632, loss: 0.4920, grad_norm: 5.2006 2023-02-11 22:08:43,832 - mmseg - INFO - Iter [68450/160000] lr: 3.433e-05, eta: 5:16:05, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2964, decode.acc_seg: 88.8551, aux.loss_ce: 0.2050, aux.acc_seg: 80.6785, loss: 0.5014, grad_norm: 5.0497 2023-02-11 22:08:53,847 - mmseg - INFO - Iter [68500/160000] lr: 3.431e-05, eta: 5:15:55, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2859, decode.acc_seg: 88.9162, aux.loss_ce: 0.2010, aux.acc_seg: 80.9881, loss: 0.4870, grad_norm: 5.0025 2023-02-11 22:09:03,631 - mmseg - INFO - Iter [68550/160000] lr: 3.429e-05, eta: 5:15:43, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2986, decode.acc_seg: 88.4719, aux.loss_ce: 0.2074, aux.acc_seg: 79.7847, loss: 0.5060, grad_norm: 5.7894 2023-02-11 22:09:13,787 - mmseg - INFO - Iter [68600/160000] lr: 3.428e-05, eta: 5:15:32, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3116, decode.acc_seg: 88.0464, aux.loss_ce: 0.2096, aux.acc_seg: 80.0218, loss: 0.5212, grad_norm: 4.8660 2023-02-11 22:09:24,661 - mmseg - INFO - Iter [68650/160000] lr: 3.426e-05, eta: 5:15:23, time: 0.217, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2847, decode.acc_seg: 88.8214, aux.loss_ce: 0.1942, aux.acc_seg: 81.4422, loss: 0.4789, grad_norm: 4.0475 2023-02-11 22:09:35,163 - mmseg - INFO - Iter [68700/160000] lr: 3.424e-05, eta: 5:15:13, time: 0.211, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3225, decode.acc_seg: 87.4072, aux.loss_ce: 0.2110, aux.acc_seg: 79.4929, loss: 0.5336, grad_norm: 4.8793 2023-02-11 22:09:45,070 - mmseg - INFO - Iter [68750/160000] lr: 3.422e-05, eta: 5:15:02, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3019, decode.acc_seg: 88.3894, aux.loss_ce: 0.2039, aux.acc_seg: 80.8523, loss: 0.5058, grad_norm: 5.0580 2023-02-11 22:09:55,375 - mmseg - INFO - Iter [68800/160000] lr: 3.420e-05, eta: 5:14:51, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2891, decode.acc_seg: 88.7827, aux.loss_ce: 0.1969, aux.acc_seg: 81.2002, loss: 0.4860, grad_norm: 5.0926 2023-02-11 22:10:05,297 - mmseg - INFO - Iter [68850/160000] lr: 3.418e-05, eta: 5:14:40, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3227, decode.acc_seg: 87.6255, aux.loss_ce: 0.2171, aux.acc_seg: 79.1338, loss: 0.5398, grad_norm: 5.9592 2023-02-11 22:10:15,367 - mmseg - INFO - Iter [68900/160000] lr: 3.416e-05, eta: 5:14:29, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3095, decode.acc_seg: 87.9333, aux.loss_ce: 0.2123, aux.acc_seg: 79.8043, loss: 0.5218, grad_norm: 5.7372 2023-02-11 22:10:25,172 - mmseg - INFO - Iter [68950/160000] lr: 3.414e-05, eta: 5:14:18, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3240, decode.acc_seg: 87.8228, aux.loss_ce: 0.2144, aux.acc_seg: 80.2271, loss: 0.5384, grad_norm: 5.0465 2023-02-11 22:10:35,369 - mmseg - INFO - Saving checkpoint at 69000 iterations 2023-02-11 22:10:36,059 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 22:10:36,059 - mmseg - INFO - Iter [69000/160000] lr: 3.413e-05, eta: 5:14:09, time: 0.218, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3109, decode.acc_seg: 88.3176, aux.loss_ce: 0.2136, aux.acc_seg: 80.1954, loss: 0.5245, grad_norm: 5.2043 2023-02-11 22:10:48,119 - mmseg - INFO - Iter [69050/160000] lr: 3.411e-05, eta: 5:14:01, time: 0.241, data_time: 0.047, memory: 7748, decode.loss_ce: 0.3035, decode.acc_seg: 88.3983, aux.loss_ce: 0.2081, aux.acc_seg: 80.4894, loss: 0.5116, grad_norm: 5.5564 2023-02-11 22:10:58,085 - mmseg - INFO - Iter [69100/160000] lr: 3.409e-05, eta: 5:13:50, time: 0.200, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2873, decode.acc_seg: 88.6949, aux.loss_ce: 0.2069, aux.acc_seg: 80.4120, loss: 0.4942, grad_norm: 5.4753 2023-02-11 22:11:08,143 - mmseg - INFO - Iter [69150/160000] lr: 3.407e-05, eta: 5:13:39, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3039, decode.acc_seg: 88.5296, aux.loss_ce: 0.2094, aux.acc_seg: 80.6807, loss: 0.5133, grad_norm: 5.4971 2023-02-11 22:11:18,308 - mmseg - INFO - Iter [69200/160000] lr: 3.405e-05, eta: 5:13:29, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2832, decode.acc_seg: 89.0808, aux.loss_ce: 0.1972, aux.acc_seg: 81.0708, loss: 0.4804, grad_norm: 4.7678 2023-02-11 22:11:28,520 - mmseg - INFO - Iter [69250/160000] lr: 3.403e-05, eta: 5:13:18, time: 0.204, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2923, decode.acc_seg: 89.1220, aux.loss_ce: 0.2024, aux.acc_seg: 81.3834, loss: 0.4947, grad_norm: 4.7448 2023-02-11 22:11:38,856 - mmseg - INFO - Iter [69300/160000] lr: 3.401e-05, eta: 5:13:08, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3108, decode.acc_seg: 87.9973, aux.loss_ce: 0.2136, aux.acc_seg: 80.4981, loss: 0.5245, grad_norm: 6.3894 2023-02-11 22:11:48,794 - mmseg - INFO - Iter [69350/160000] lr: 3.399e-05, eta: 5:12:57, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2970, decode.acc_seg: 88.5675, aux.loss_ce: 0.1973, aux.acc_seg: 81.3211, loss: 0.4943, grad_norm: 4.7778 2023-02-11 22:11:58,754 - mmseg - INFO - Iter [69400/160000] lr: 3.398e-05, eta: 5:12:45, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3174, decode.acc_seg: 87.8453, aux.loss_ce: 0.2073, aux.acc_seg: 80.6443, loss: 0.5247, grad_norm: 5.5975 2023-02-11 22:12:08,725 - mmseg - INFO - Iter [69450/160000] lr: 3.396e-05, eta: 5:12:34, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2911, decode.acc_seg: 88.7098, aux.loss_ce: 0.2079, aux.acc_seg: 80.2313, loss: 0.4989, grad_norm: 5.2366 2023-02-11 22:12:18,979 - mmseg - INFO - Iter [69500/160000] lr: 3.394e-05, eta: 5:12:24, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2921, decode.acc_seg: 88.7638, aux.loss_ce: 0.1958, aux.acc_seg: 80.9572, loss: 0.4879, grad_norm: 5.4982 2023-02-11 22:12:29,231 - mmseg - INFO - Iter [69550/160000] lr: 3.392e-05, eta: 5:12:13, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3009, decode.acc_seg: 88.4642, aux.loss_ce: 0.1994, aux.acc_seg: 80.6385, loss: 0.5003, grad_norm: 5.0910 2023-02-11 22:12:39,514 - mmseg - INFO - Iter [69600/160000] lr: 3.390e-05, eta: 5:12:03, time: 0.206, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3048, decode.acc_seg: 88.2403, aux.loss_ce: 0.2107, aux.acc_seg: 79.5451, loss: 0.5155, grad_norm: 4.7363 2023-02-11 22:12:49,269 - mmseg - INFO - Iter [69650/160000] lr: 3.388e-05, eta: 5:11:52, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3100, decode.acc_seg: 87.9549, aux.loss_ce: 0.2067, aux.acc_seg: 80.0154, loss: 0.5167, grad_norm: 5.3700 2023-02-11 22:12:59,302 - mmseg - INFO - Iter [69700/160000] lr: 3.386e-05, eta: 5:11:41, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3028, decode.acc_seg: 88.5591, aux.loss_ce: 0.2073, aux.acc_seg: 80.6019, loss: 0.5101, grad_norm: 4.9605 2023-02-11 22:13:09,366 - mmseg - INFO - Iter [69750/160000] lr: 3.384e-05, eta: 5:11:30, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3103, decode.acc_seg: 87.9229, aux.loss_ce: 0.2124, aux.acc_seg: 79.5597, loss: 0.5228, grad_norm: 5.4599 2023-02-11 22:13:19,082 - mmseg - INFO - Iter [69800/160000] lr: 3.383e-05, eta: 5:11:18, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3046, decode.acc_seg: 88.0209, aux.loss_ce: 0.2070, aux.acc_seg: 80.3454, loss: 0.5116, grad_norm: 5.2022 2023-02-11 22:13:29,072 - mmseg - INFO - Iter [69850/160000] lr: 3.381e-05, eta: 5:11:07, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3049, decode.acc_seg: 88.3577, aux.loss_ce: 0.2012, aux.acc_seg: 80.9036, loss: 0.5061, grad_norm: 5.5181 2023-02-11 22:13:39,094 - mmseg - INFO - Iter [69900/160000] lr: 3.379e-05, eta: 5:10:57, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2880, decode.acc_seg: 88.9599, aux.loss_ce: 0.2005, aux.acc_seg: 80.8653, loss: 0.4885, grad_norm: 5.0281 2023-02-11 22:13:48,989 - mmseg - INFO - Iter [69950/160000] lr: 3.377e-05, eta: 5:10:45, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3127, decode.acc_seg: 88.0181, aux.loss_ce: 0.2119, aux.acc_seg: 79.9563, loss: 0.5246, grad_norm: 5.0661 2023-02-11 22:13:59,252 - mmseg - INFO - Saving checkpoint at 70000 iterations 2023-02-11 22:13:59,925 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 22:13:59,925 - mmseg - INFO - Iter [70000/160000] lr: 3.375e-05, eta: 5:10:36, time: 0.219, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2855, decode.acc_seg: 88.5614, aux.loss_ce: 0.1964, aux.acc_seg: 80.4832, loss: 0.4819, grad_norm: 5.1156 2023-02-11 22:14:09,993 - mmseg - INFO - Iter [70050/160000] lr: 3.373e-05, eta: 5:10:25, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2870, decode.acc_seg: 88.9084, aux.loss_ce: 0.2021, aux.acc_seg: 80.4849, loss: 0.4891, grad_norm: 5.3022 2023-02-11 22:14:19,847 - mmseg - INFO - Iter [70100/160000] lr: 3.371e-05, eta: 5:10:14, time: 0.198, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2876, decode.acc_seg: 88.6386, aux.loss_ce: 0.2001, aux.acc_seg: 80.7375, loss: 0.4877, grad_norm: 4.6428 2023-02-11 22:14:29,868 - mmseg - INFO - Iter [70150/160000] lr: 3.369e-05, eta: 5:10:03, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2884, decode.acc_seg: 88.1746, aux.loss_ce: 0.1919, aux.acc_seg: 80.7911, loss: 0.4803, grad_norm: 5.1336 2023-02-11 22:14:39,727 - mmseg - INFO - Iter [70200/160000] lr: 3.368e-05, eta: 5:09:52, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2767, decode.acc_seg: 88.9814, aux.loss_ce: 0.1989, aux.acc_seg: 80.7201, loss: 0.4756, grad_norm: 4.9560 2023-02-11 22:14:49,795 - mmseg - INFO - Iter [70250/160000] lr: 3.366e-05, eta: 5:09:41, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3245, decode.acc_seg: 87.7726, aux.loss_ce: 0.2218, aux.acc_seg: 79.4683, loss: 0.5463, grad_norm: 6.0626 2023-02-11 22:14:59,547 - mmseg - INFO - Iter [70300/160000] lr: 3.364e-05, eta: 5:09:30, time: 0.196, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2915, decode.acc_seg: 88.6891, aux.loss_ce: 0.2063, aux.acc_seg: 80.3563, loss: 0.4978, grad_norm: 5.2663 2023-02-11 22:15:11,719 - mmseg - INFO - Iter [70350/160000] lr: 3.362e-05, eta: 5:09:22, time: 0.243, data_time: 0.048, memory: 7748, decode.loss_ce: 0.2816, decode.acc_seg: 89.1275, aux.loss_ce: 0.1970, aux.acc_seg: 80.8391, loss: 0.4786, grad_norm: 4.8604 2023-02-11 22:15:21,984 - mmseg - INFO - Iter [70400/160000] lr: 3.360e-05, eta: 5:09:12, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2821, decode.acc_seg: 88.8717, aux.loss_ce: 0.1941, aux.acc_seg: 81.1599, loss: 0.4762, grad_norm: 5.4638 2023-02-11 22:15:32,070 - mmseg - INFO - Iter [70450/160000] lr: 3.358e-05, eta: 5:09:01, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3051, decode.acc_seg: 88.2210, aux.loss_ce: 0.2086, aux.acc_seg: 80.4875, loss: 0.5137, grad_norm: 5.5342 2023-02-11 22:15:42,156 - mmseg - INFO - Iter [70500/160000] lr: 3.356e-05, eta: 5:08:50, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2829, decode.acc_seg: 88.9128, aux.loss_ce: 0.2006, aux.acc_seg: 80.2247, loss: 0.4835, grad_norm: 5.0967 2023-02-11 22:15:52,492 - mmseg - INFO - Iter [70550/160000] lr: 3.354e-05, eta: 5:08:40, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2836, decode.acc_seg: 89.0734, aux.loss_ce: 0.1944, aux.acc_seg: 81.5995, loss: 0.4780, grad_norm: 5.9928 2023-02-11 22:16:02,897 - mmseg - INFO - Iter [70600/160000] lr: 3.353e-05, eta: 5:08:30, time: 0.208, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2845, decode.acc_seg: 89.0332, aux.loss_ce: 0.1948, aux.acc_seg: 81.2056, loss: 0.4793, grad_norm: 5.0518 2023-02-11 22:16:12,780 - mmseg - INFO - Iter [70650/160000] lr: 3.351e-05, eta: 5:08:19, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2954, decode.acc_seg: 88.5339, aux.loss_ce: 0.2029, aux.acc_seg: 80.3743, loss: 0.4983, grad_norm: 4.8815 2023-02-11 22:16:23,252 - mmseg - INFO - Iter [70700/160000] lr: 3.349e-05, eta: 5:08:08, time: 0.209, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2882, decode.acc_seg: 89.2904, aux.loss_ce: 0.2015, aux.acc_seg: 81.2418, loss: 0.4896, grad_norm: 5.0223 2023-02-11 22:16:33,125 - mmseg - INFO - Iter [70750/160000] lr: 3.347e-05, eta: 5:07:57, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3039, decode.acc_seg: 88.1828, aux.loss_ce: 0.2017, aux.acc_seg: 80.3270, loss: 0.5056, grad_norm: 5.5703 2023-02-11 22:16:42,962 - mmseg - INFO - Iter [70800/160000] lr: 3.345e-05, eta: 5:07:46, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2800, decode.acc_seg: 88.9021, aux.loss_ce: 0.1907, aux.acc_seg: 81.4819, loss: 0.4707, grad_norm: 4.4138 2023-02-11 22:16:52,573 - mmseg - INFO - Iter [70850/160000] lr: 3.343e-05, eta: 5:07:35, time: 0.192, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3033, decode.acc_seg: 88.6626, aux.loss_ce: 0.2120, aux.acc_seg: 79.7881, loss: 0.5153, grad_norm: 5.4623 2023-02-11 22:17:02,832 - mmseg - INFO - Iter [70900/160000] lr: 3.341e-05, eta: 5:07:24, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2765, decode.acc_seg: 89.2979, aux.loss_ce: 0.1930, aux.acc_seg: 81.1561, loss: 0.4695, grad_norm: 4.5567 2023-02-11 22:17:12,658 - mmseg - INFO - Iter [70950/160000] lr: 3.339e-05, eta: 5:07:13, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3021, decode.acc_seg: 88.5158, aux.loss_ce: 0.2042, aux.acc_seg: 80.8366, loss: 0.5063, grad_norm: 5.6891 2023-02-11 22:17:22,619 - mmseg - INFO - Saving checkpoint at 71000 iterations 2023-02-11 22:17:23,292 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 22:17:23,292 - mmseg - INFO - Iter [71000/160000] lr: 3.338e-05, eta: 5:07:03, time: 0.213, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3141, decode.acc_seg: 87.7750, aux.loss_ce: 0.2097, aux.acc_seg: 80.1625, loss: 0.5238, grad_norm: 6.3476 2023-02-11 22:17:33,008 - mmseg - INFO - Iter [71050/160000] lr: 3.336e-05, eta: 5:06:52, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2865, decode.acc_seg: 88.9489, aux.loss_ce: 0.1963, aux.acc_seg: 81.3441, loss: 0.4828, grad_norm: 4.9843 2023-02-11 22:17:43,369 - mmseg - INFO - Iter [71100/160000] lr: 3.334e-05, eta: 5:06:41, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3059, decode.acc_seg: 87.9096, aux.loss_ce: 0.2110, aux.acc_seg: 79.9218, loss: 0.5169, grad_norm: 5.6989 2023-02-11 22:17:53,288 - mmseg - INFO - Iter [71150/160000] lr: 3.332e-05, eta: 5:06:30, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3169, decode.acc_seg: 88.1159, aux.loss_ce: 0.2156, aux.acc_seg: 79.8442, loss: 0.5325, grad_norm: 5.2266 2023-02-11 22:18:03,225 - mmseg - INFO - Iter [71200/160000] lr: 3.330e-05, eta: 5:06:19, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2988, decode.acc_seg: 88.5464, aux.loss_ce: 0.2111, aux.acc_seg: 79.8640, loss: 0.5099, grad_norm: 6.9692 2023-02-11 22:18:13,254 - mmseg - INFO - Iter [71250/160000] lr: 3.328e-05, eta: 5:06:08, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3073, decode.acc_seg: 88.0384, aux.loss_ce: 0.2034, aux.acc_seg: 80.5891, loss: 0.5107, grad_norm: 5.9410 2023-02-11 22:18:23,471 - mmseg - INFO - Iter [71300/160000] lr: 3.326e-05, eta: 5:05:58, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2910, decode.acc_seg: 88.6629, aux.loss_ce: 0.1939, aux.acc_seg: 81.1128, loss: 0.4849, grad_norm: 5.5904 2023-02-11 22:18:33,082 - mmseg - INFO - Iter [71350/160000] lr: 3.324e-05, eta: 5:05:46, time: 0.192, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2827, decode.acc_seg: 89.4028, aux.loss_ce: 0.1950, aux.acc_seg: 81.6008, loss: 0.4777, grad_norm: 4.5098 2023-02-11 22:18:42,826 - mmseg - INFO - Iter [71400/160000] lr: 3.323e-05, eta: 5:05:35, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3108, decode.acc_seg: 88.0367, aux.loss_ce: 0.2158, aux.acc_seg: 79.4635, loss: 0.5267, grad_norm: 5.2827 2023-02-11 22:18:53,006 - mmseg - INFO - Iter [71450/160000] lr: 3.321e-05, eta: 5:05:24, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3116, decode.acc_seg: 88.2690, aux.loss_ce: 0.2142, aux.acc_seg: 79.8594, loss: 0.5258, grad_norm: 5.0415 2023-02-11 22:19:03,088 - mmseg - INFO - Iter [71500/160000] lr: 3.319e-05, eta: 5:05:13, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2750, decode.acc_seg: 88.9103, aux.loss_ce: 0.1977, aux.acc_seg: 80.4093, loss: 0.4727, grad_norm: 5.1534 2023-02-11 22:19:13,755 - mmseg - INFO - Iter [71550/160000] lr: 3.317e-05, eta: 5:05:04, time: 0.214, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2805, decode.acc_seg: 88.9644, aux.loss_ce: 0.1962, aux.acc_seg: 81.0935, loss: 0.4768, grad_norm: 5.0264 2023-02-11 22:19:26,033 - mmseg - INFO - Iter [71600/160000] lr: 3.315e-05, eta: 5:04:56, time: 0.246, data_time: 0.048, memory: 7748, decode.loss_ce: 0.2952, decode.acc_seg: 88.7578, aux.loss_ce: 0.2060, aux.acc_seg: 80.9853, loss: 0.5012, grad_norm: 5.2138 2023-02-11 22:19:36,232 - mmseg - INFO - Iter [71650/160000] lr: 3.313e-05, eta: 5:04:46, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2712, decode.acc_seg: 89.3358, aux.loss_ce: 0.1993, aux.acc_seg: 80.9206, loss: 0.4704, grad_norm: 4.6715 2023-02-11 22:19:46,742 - mmseg - INFO - Iter [71700/160000] lr: 3.311e-05, eta: 5:04:36, time: 0.210, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2945, decode.acc_seg: 88.5574, aux.loss_ce: 0.2003, aux.acc_seg: 80.6410, loss: 0.4948, grad_norm: 5.1186 2023-02-11 22:19:56,734 - mmseg - INFO - Iter [71750/160000] lr: 3.309e-05, eta: 5:04:25, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3001, decode.acc_seg: 88.1493, aux.loss_ce: 0.2070, aux.acc_seg: 79.9561, loss: 0.5071, grad_norm: 5.6834 2023-02-11 22:20:07,055 - mmseg - INFO - Iter [71800/160000] lr: 3.308e-05, eta: 5:04:14, time: 0.207, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2842, decode.acc_seg: 89.0605, aux.loss_ce: 0.2001, aux.acc_seg: 80.9919, loss: 0.4843, grad_norm: 6.4753 2023-02-11 22:20:17,659 - mmseg - INFO - Iter [71850/160000] lr: 3.306e-05, eta: 5:04:04, time: 0.212, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2814, decode.acc_seg: 89.1642, aux.loss_ce: 0.1945, aux.acc_seg: 81.4972, loss: 0.4760, grad_norm: 4.8517 2023-02-11 22:20:27,409 - mmseg - INFO - Iter [71900/160000] lr: 3.304e-05, eta: 5:03:53, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2843, decode.acc_seg: 89.0198, aux.loss_ce: 0.1988, aux.acc_seg: 81.2797, loss: 0.4831, grad_norm: 4.6131 2023-02-11 22:20:37,225 - mmseg - INFO - Iter [71950/160000] lr: 3.302e-05, eta: 5:03:42, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3063, decode.acc_seg: 88.1887, aux.loss_ce: 0.1995, aux.acc_seg: 80.8295, loss: 0.5058, grad_norm: 5.5176 2023-02-11 22:20:47,156 - mmseg - INFO - Saving checkpoint at 72000 iterations 2023-02-11 22:20:47,840 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 22:20:47,840 - mmseg - INFO - Iter [72000/160000] lr: 3.300e-05, eta: 5:03:32, time: 0.212, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3077, decode.acc_seg: 88.1697, aux.loss_ce: 0.2029, aux.acc_seg: 80.3950, loss: 0.5106, grad_norm: 4.6479 2023-02-11 22:20:57,829 - mmseg - INFO - Iter [72050/160000] lr: 3.298e-05, eta: 5:03:21, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2900, decode.acc_seg: 88.6770, aux.loss_ce: 0.2014, aux.acc_seg: 80.6270, loss: 0.4914, grad_norm: 4.8473 2023-02-11 22:21:07,813 - mmseg - INFO - Iter [72100/160000] lr: 3.296e-05, eta: 5:03:10, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3067, decode.acc_seg: 88.1302, aux.loss_ce: 0.2112, aux.acc_seg: 80.3114, loss: 0.5179, grad_norm: 6.1450 2023-02-11 22:21:17,560 - mmseg - INFO - Iter [72150/160000] lr: 3.294e-05, eta: 5:02:59, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2972, decode.acc_seg: 88.6130, aux.loss_ce: 0.2047, aux.acc_seg: 80.5665, loss: 0.5019, grad_norm: 4.9981 2023-02-11 22:21:27,679 - mmseg - INFO - Iter [72200/160000] lr: 3.293e-05, eta: 5:02:48, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2808, decode.acc_seg: 89.2197, aux.loss_ce: 0.1944, aux.acc_seg: 81.5903, loss: 0.4753, grad_norm: 4.8315 2023-02-11 22:21:37,459 - mmseg - INFO - Iter [72250/160000] lr: 3.291e-05, eta: 5:02:37, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2923, decode.acc_seg: 88.6328, aux.loss_ce: 0.2049, aux.acc_seg: 80.3920, loss: 0.4972, grad_norm: 4.9783 2023-02-11 22:21:47,157 - mmseg - INFO - Iter [72300/160000] lr: 3.289e-05, eta: 5:02:26, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2958, decode.acc_seg: 88.3118, aux.loss_ce: 0.1999, aux.acc_seg: 80.7922, loss: 0.4957, grad_norm: 6.1866 2023-02-11 22:21:57,030 - mmseg - INFO - Iter [72350/160000] lr: 3.287e-05, eta: 5:02:15, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2679, decode.acc_seg: 89.6821, aux.loss_ce: 0.1990, aux.acc_seg: 80.9170, loss: 0.4669, grad_norm: 4.3814 2023-02-11 22:22:06,944 - mmseg - INFO - Iter [72400/160000] lr: 3.285e-05, eta: 5:02:04, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3043, decode.acc_seg: 88.6331, aux.loss_ce: 0.2056, aux.acc_seg: 81.1972, loss: 0.5099, grad_norm: 6.3919 2023-02-11 22:22:16,821 - mmseg - INFO - Iter [72450/160000] lr: 3.283e-05, eta: 5:01:52, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2794, decode.acc_seg: 89.1717, aux.loss_ce: 0.1953, aux.acc_seg: 81.5435, loss: 0.4747, grad_norm: 5.9608 2023-02-11 22:22:26,955 - mmseg - INFO - Iter [72500/160000] lr: 3.281e-05, eta: 5:01:42, time: 0.203, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2954, decode.acc_seg: 88.6314, aux.loss_ce: 0.2013, aux.acc_seg: 80.6884, loss: 0.4967, grad_norm: 4.7372 2023-02-11 22:22:37,118 - mmseg - INFO - Iter [72550/160000] lr: 3.279e-05, eta: 5:01:31, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2734, decode.acc_seg: 89.2771, aux.loss_ce: 0.1920, aux.acc_seg: 81.3358, loss: 0.4654, grad_norm: 4.5163 2023-02-11 22:22:47,137 - mmseg - INFO - Iter [72600/160000] lr: 3.278e-05, eta: 5:01:20, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3067, decode.acc_seg: 88.1334, aux.loss_ce: 0.2038, aux.acc_seg: 80.5917, loss: 0.5106, grad_norm: 5.4942 2023-02-11 22:22:56,829 - mmseg - INFO - Iter [72650/160000] lr: 3.276e-05, eta: 5:01:09, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2904, decode.acc_seg: 88.7416, aux.loss_ce: 0.2051, aux.acc_seg: 80.4418, loss: 0.4955, grad_norm: 4.8326 2023-02-11 22:23:06,679 - mmseg - INFO - Iter [72700/160000] lr: 3.274e-05, eta: 5:00:58, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2795, decode.acc_seg: 89.1946, aux.loss_ce: 0.2000, aux.acc_seg: 81.1220, loss: 0.4795, grad_norm: 4.7499 2023-02-11 22:23:16,830 - mmseg - INFO - Iter [72750/160000] lr: 3.272e-05, eta: 5:00:47, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2975, decode.acc_seg: 88.6063, aux.loss_ce: 0.2023, aux.acc_seg: 80.9262, loss: 0.4998, grad_norm: 5.1364 2023-02-11 22:23:26,515 - mmseg - INFO - Iter [72800/160000] lr: 3.270e-05, eta: 5:00:36, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2891, decode.acc_seg: 88.7930, aux.loss_ce: 0.1959, aux.acc_seg: 81.1807, loss: 0.4850, grad_norm: 5.3215 2023-02-11 22:23:38,697 - mmseg - INFO - Iter [72850/160000] lr: 3.268e-05, eta: 5:00:28, time: 0.244, data_time: 0.047, memory: 7748, decode.loss_ce: 0.2914, decode.acc_seg: 88.9679, aux.loss_ce: 0.2090, aux.acc_seg: 80.2765, loss: 0.5004, grad_norm: 4.6775 2023-02-11 22:23:48,725 - mmseg - INFO - Iter [72900/160000] lr: 3.266e-05, eta: 5:00:18, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2938, decode.acc_seg: 88.8801, aux.loss_ce: 0.1975, aux.acc_seg: 81.6244, loss: 0.4913, grad_norm: 5.3191 2023-02-11 22:23:58,787 - mmseg - INFO - Iter [72950/160000] lr: 3.264e-05, eta: 5:00:07, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2930, decode.acc_seg: 89.0924, aux.loss_ce: 0.2066, aux.acc_seg: 80.6881, loss: 0.4996, grad_norm: 5.2469 2023-02-11 22:24:08,956 - mmseg - INFO - Saving checkpoint at 73000 iterations 2023-02-11 22:24:09,653 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 22:24:09,653 - mmseg - INFO - Iter [73000/160000] lr: 3.263e-05, eta: 4:59:57, time: 0.218, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2865, decode.acc_seg: 88.7587, aux.loss_ce: 0.1914, aux.acc_seg: 81.1949, loss: 0.4779, grad_norm: 4.7540 2023-02-11 22:24:19,682 - mmseg - INFO - Iter [73050/160000] lr: 3.261e-05, eta: 4:59:46, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2810, decode.acc_seg: 89.2782, aux.loss_ce: 0.1971, aux.acc_seg: 81.1080, loss: 0.4781, grad_norm: 4.9193 2023-02-11 22:24:29,403 - mmseg - INFO - Iter [73100/160000] lr: 3.259e-05, eta: 4:59:35, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2882, decode.acc_seg: 88.7621, aux.loss_ce: 0.2090, aux.acc_seg: 79.9265, loss: 0.4972, grad_norm: 5.3001 2023-02-11 22:24:39,353 - mmseg - INFO - Iter [73150/160000] lr: 3.257e-05, eta: 4:59:24, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2847, decode.acc_seg: 88.8985, aux.loss_ce: 0.1925, aux.acc_seg: 81.5430, loss: 0.4772, grad_norm: 5.0578 2023-02-11 22:24:49,420 - mmseg - INFO - Iter [73200/160000] lr: 3.255e-05, eta: 4:59:13, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2807, decode.acc_seg: 89.0364, aux.loss_ce: 0.1893, aux.acc_seg: 81.3154, loss: 0.4699, grad_norm: 4.9640 2023-02-11 22:24:59,707 - mmseg - INFO - Iter [73250/160000] lr: 3.253e-05, eta: 4:59:03, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2762, decode.acc_seg: 89.0521, aux.loss_ce: 0.1898, aux.acc_seg: 81.8062, loss: 0.4661, grad_norm: 4.3938 2023-02-11 22:25:10,435 - mmseg - INFO - Iter [73300/160000] lr: 3.251e-05, eta: 4:58:53, time: 0.214, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2782, decode.acc_seg: 89.5414, aux.loss_ce: 0.1956, aux.acc_seg: 81.3200, loss: 0.4738, grad_norm: 4.7541 2023-02-11 22:25:20,724 - mmseg - INFO - Iter [73350/160000] lr: 3.249e-05, eta: 4:58:43, time: 0.206, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2888, decode.acc_seg: 89.0693, aux.loss_ce: 0.1977, aux.acc_seg: 81.4644, loss: 0.4864, grad_norm: 5.1707 2023-02-11 22:25:30,903 - mmseg - INFO - Iter [73400/160000] lr: 3.248e-05, eta: 4:58:32, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3004, decode.acc_seg: 88.6859, aux.loss_ce: 0.2098, aux.acc_seg: 80.2730, loss: 0.5101, grad_norm: 7.6108 2023-02-11 22:25:41,169 - mmseg - INFO - Iter [73450/160000] lr: 3.246e-05, eta: 4:58:22, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2870, decode.acc_seg: 89.0448, aux.loss_ce: 0.1957, aux.acc_seg: 81.8014, loss: 0.4828, grad_norm: 4.7171 2023-02-11 22:25:51,115 - mmseg - INFO - Iter [73500/160000] lr: 3.244e-05, eta: 4:58:11, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2763, decode.acc_seg: 89.4025, aux.loss_ce: 0.1979, aux.acc_seg: 81.0295, loss: 0.4742, grad_norm: 5.4809 2023-02-11 22:26:00,979 - mmseg - INFO - Iter [73550/160000] lr: 3.242e-05, eta: 4:58:00, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2809, decode.acc_seg: 88.8876, aux.loss_ce: 0.1978, aux.acc_seg: 80.6959, loss: 0.4787, grad_norm: 5.3382 2023-02-11 22:26:10,630 - mmseg - INFO - Iter [73600/160000] lr: 3.240e-05, eta: 4:57:48, time: 0.193, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2747, decode.acc_seg: 88.9996, aux.loss_ce: 0.1945, aux.acc_seg: 80.6876, loss: 0.4692, grad_norm: 5.0994 2023-02-11 22:26:20,416 - mmseg - INFO - Iter [73650/160000] lr: 3.238e-05, eta: 4:57:37, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2884, decode.acc_seg: 88.8666, aux.loss_ce: 0.2037, aux.acc_seg: 80.5187, loss: 0.4921, grad_norm: 5.1432 2023-02-11 22:26:30,391 - mmseg - INFO - Iter [73700/160000] lr: 3.236e-05, eta: 4:57:26, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2827, decode.acc_seg: 89.2194, aux.loss_ce: 0.1945, aux.acc_seg: 81.4156, loss: 0.4773, grad_norm: 4.9092 2023-02-11 22:26:40,199 - mmseg - INFO - Iter [73750/160000] lr: 3.234e-05, eta: 4:57:15, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3032, decode.acc_seg: 88.2736, aux.loss_ce: 0.2064, aux.acc_seg: 80.0514, loss: 0.5097, grad_norm: 4.8419 2023-02-11 22:26:50,042 - mmseg - INFO - Iter [73800/160000] lr: 3.233e-05, eta: 4:57:04, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2821, decode.acc_seg: 89.0808, aux.loss_ce: 0.2003, aux.acc_seg: 80.8785, loss: 0.4825, grad_norm: 5.3792 2023-02-11 22:27:00,259 - mmseg - INFO - Iter [73850/160000] lr: 3.231e-05, eta: 4:56:54, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2801, decode.acc_seg: 89.2207, aux.loss_ce: 0.2030, aux.acc_seg: 80.6680, loss: 0.4831, grad_norm: 7.1337 2023-02-11 22:27:10,905 - mmseg - INFO - Iter [73900/160000] lr: 3.229e-05, eta: 4:56:44, time: 0.213, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2839, decode.acc_seg: 89.1946, aux.loss_ce: 0.2025, aux.acc_seg: 81.0674, loss: 0.4865, grad_norm: 4.8315 2023-02-11 22:27:20,673 - mmseg - INFO - Iter [73950/160000] lr: 3.227e-05, eta: 4:56:33, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2824, decode.acc_seg: 88.8523, aux.loss_ce: 0.1977, aux.acc_seg: 80.9024, loss: 0.4800, grad_norm: 4.3557 2023-02-11 22:27:30,535 - mmseg - INFO - Saving checkpoint at 74000 iterations 2023-02-11 22:27:31,209 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 22:27:31,209 - mmseg - INFO - Iter [74000/160000] lr: 3.225e-05, eta: 4:56:22, time: 0.211, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2767, decode.acc_seg: 89.2825, aux.loss_ce: 0.1986, aux.acc_seg: 80.7446, loss: 0.4753, grad_norm: 4.8930 2023-02-11 22:27:41,281 - mmseg - INFO - Iter [74050/160000] lr: 3.223e-05, eta: 4:56:12, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2918, decode.acc_seg: 88.9039, aux.loss_ce: 0.2036, aux.acc_seg: 80.8586, loss: 0.4954, grad_norm: 5.5748 2023-02-11 22:27:53,635 - mmseg - INFO - Iter [74100/160000] lr: 3.221e-05, eta: 4:56:04, time: 0.247, data_time: 0.048, memory: 7748, decode.loss_ce: 0.3094, decode.acc_seg: 87.7517, aux.loss_ce: 0.2072, aux.acc_seg: 80.0013, loss: 0.5166, grad_norm: 6.1644 2023-02-11 22:28:03,388 - mmseg - INFO - Iter [74150/160000] lr: 3.219e-05, eta: 4:55:53, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2847, decode.acc_seg: 89.4234, aux.loss_ce: 0.1983, aux.acc_seg: 81.4673, loss: 0.4831, grad_norm: 4.9132 2023-02-11 22:28:13,523 - mmseg - INFO - Iter [74200/160000] lr: 3.218e-05, eta: 4:55:43, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2787, decode.acc_seg: 89.3468, aux.loss_ce: 0.1937, aux.acc_seg: 81.5193, loss: 0.4724, grad_norm: 4.8299 2023-02-11 22:28:23,610 - mmseg - INFO - Iter [74250/160000] lr: 3.216e-05, eta: 4:55:32, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2821, decode.acc_seg: 88.8552, aux.loss_ce: 0.1956, aux.acc_seg: 80.9687, loss: 0.4777, grad_norm: 5.0403 2023-02-11 22:28:33,813 - mmseg - INFO - Iter [74300/160000] lr: 3.214e-05, eta: 4:55:21, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2864, decode.acc_seg: 88.9154, aux.loss_ce: 0.2000, aux.acc_seg: 80.7400, loss: 0.4863, grad_norm: 4.8483 2023-02-11 22:28:44,020 - mmseg - INFO - Iter [74350/160000] lr: 3.212e-05, eta: 4:55:11, time: 0.204, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2745, decode.acc_seg: 89.5087, aux.loss_ce: 0.1891, aux.acc_seg: 82.0470, loss: 0.4636, grad_norm: 4.9613 2023-02-11 22:28:54,262 - mmseg - INFO - Iter [74400/160000] lr: 3.210e-05, eta: 4:55:00, time: 0.205, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2848, decode.acc_seg: 88.8797, aux.loss_ce: 0.1961, aux.acc_seg: 80.8816, loss: 0.4809, grad_norm: 5.2038 2023-02-11 22:29:04,062 - mmseg - INFO - Iter [74450/160000] lr: 3.208e-05, eta: 4:54:49, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2779, decode.acc_seg: 89.3567, aux.loss_ce: 0.1941, aux.acc_seg: 81.4011, loss: 0.4721, grad_norm: 5.8706 2023-02-11 22:29:13,723 - mmseg - INFO - Iter [74500/160000] lr: 3.206e-05, eta: 4:54:38, time: 0.193, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2817, decode.acc_seg: 89.3490, aux.loss_ce: 0.1950, aux.acc_seg: 81.5649, loss: 0.4767, grad_norm: 5.1463 2023-02-11 22:29:23,492 - mmseg - INFO - Iter [74550/160000] lr: 3.204e-05, eta: 4:54:27, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2841, decode.acc_seg: 88.9298, aux.loss_ce: 0.1989, aux.acc_seg: 80.8887, loss: 0.4830, grad_norm: 4.8200 2023-02-11 22:29:33,814 - mmseg - INFO - Iter [74600/160000] lr: 3.203e-05, eta: 4:54:16, time: 0.206, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2970, decode.acc_seg: 88.6197, aux.loss_ce: 0.1988, aux.acc_seg: 81.0888, loss: 0.4959, grad_norm: 5.4756 2023-02-11 22:29:43,639 - mmseg - INFO - Iter [74650/160000] lr: 3.201e-05, eta: 4:54:05, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2844, decode.acc_seg: 88.9983, aux.loss_ce: 0.1930, aux.acc_seg: 81.4452, loss: 0.4774, grad_norm: 4.7573 2023-02-11 22:29:53,526 - mmseg - INFO - Iter [74700/160000] lr: 3.199e-05, eta: 4:53:54, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2832, decode.acc_seg: 89.2667, aux.loss_ce: 0.1965, aux.acc_seg: 81.7589, loss: 0.4797, grad_norm: 5.1852 2023-02-11 22:30:03,639 - mmseg - INFO - Iter [74750/160000] lr: 3.197e-05, eta: 4:53:44, time: 0.202, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2856, decode.acc_seg: 88.8081, aux.loss_ce: 0.1996, aux.acc_seg: 81.0254, loss: 0.4852, grad_norm: 5.2286 2023-02-11 22:30:13,623 - mmseg - INFO - Iter [74800/160000] lr: 3.195e-05, eta: 4:53:33, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2756, decode.acc_seg: 89.2376, aux.loss_ce: 0.1956, aux.acc_seg: 81.6404, loss: 0.4713, grad_norm: 4.8445 2023-02-11 22:30:23,551 - mmseg - INFO - Iter [74850/160000] lr: 3.193e-05, eta: 4:53:22, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2798, decode.acc_seg: 89.4565, aux.loss_ce: 0.2016, aux.acc_seg: 80.9894, loss: 0.4813, grad_norm: 4.8723 2023-02-11 22:30:33,906 - mmseg - INFO - Iter [74900/160000] lr: 3.191e-05, eta: 4:53:11, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2985, decode.acc_seg: 88.5340, aux.loss_ce: 0.2035, aux.acc_seg: 80.7390, loss: 0.5021, grad_norm: 5.4382 2023-02-11 22:30:44,107 - mmseg - INFO - Iter [74950/160000] lr: 3.189e-05, eta: 4:53:01, time: 0.204, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3045, decode.acc_seg: 88.1290, aux.loss_ce: 0.2043, aux.acc_seg: 80.5790, loss: 0.5088, grad_norm: 5.3845 2023-02-11 22:30:53,928 - mmseg - INFO - Saving checkpoint at 75000 iterations 2023-02-11 22:30:54,621 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 22:30:54,621 - mmseg - INFO - Iter [75000/160000] lr: 3.188e-05, eta: 4:52:51, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2719, decode.acc_seg: 89.5304, aux.loss_ce: 0.1921, aux.acc_seg: 81.6370, loss: 0.4640, grad_norm: 5.5428 2023-02-11 22:31:04,368 - mmseg - INFO - Iter [75050/160000] lr: 3.186e-05, eta: 4:52:40, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2786, decode.acc_seg: 89.5920, aux.loss_ce: 0.2067, aux.acc_seg: 80.6991, loss: 0.4853, grad_norm: 5.2017 2023-02-11 22:31:14,266 - mmseg - INFO - Iter [75100/160000] lr: 3.184e-05, eta: 4:52:29, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2779, decode.acc_seg: 89.4526, aux.loss_ce: 0.1928, aux.acc_seg: 81.7163, loss: 0.4707, grad_norm: 5.3378 2023-02-11 22:31:24,143 - mmseg - INFO - Iter [75150/160000] lr: 3.182e-05, eta: 4:52:18, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2917, decode.acc_seg: 88.7768, aux.loss_ce: 0.2032, aux.acc_seg: 80.7164, loss: 0.4950, grad_norm: 5.3367 2023-02-11 22:31:33,947 - mmseg - INFO - Iter [75200/160000] lr: 3.180e-05, eta: 4:52:07, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2798, decode.acc_seg: 89.1080, aux.loss_ce: 0.1940, aux.acc_seg: 81.4746, loss: 0.4738, grad_norm: 5.9092 2023-02-11 22:31:43,835 - mmseg - INFO - Iter [75250/160000] lr: 3.178e-05, eta: 4:51:56, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2894, decode.acc_seg: 88.7530, aux.loss_ce: 0.1990, aux.acc_seg: 81.1371, loss: 0.4884, grad_norm: 5.1435 2023-02-11 22:31:53,751 - mmseg - INFO - Iter [75300/160000] lr: 3.176e-05, eta: 4:51:45, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2916, decode.acc_seg: 88.4850, aux.loss_ce: 0.1988, aux.acc_seg: 80.7429, loss: 0.4904, grad_norm: 5.4273 2023-02-11 22:32:03,882 - mmseg - INFO - Iter [75350/160000] lr: 3.174e-05, eta: 4:51:34, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2819, decode.acc_seg: 89.1061, aux.loss_ce: 0.1979, aux.acc_seg: 81.0553, loss: 0.4799, grad_norm: 4.5917 2023-02-11 22:32:16,035 - mmseg - INFO - Iter [75400/160000] lr: 3.173e-05, eta: 4:51:26, time: 0.243, data_time: 0.046, memory: 7748, decode.loss_ce: 0.2852, decode.acc_seg: 89.0400, aux.loss_ce: 0.1913, aux.acc_seg: 81.4899, loss: 0.4766, grad_norm: 4.9521 2023-02-11 22:32:26,357 - mmseg - INFO - Iter [75450/160000] lr: 3.171e-05, eta: 4:51:16, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2844, decode.acc_seg: 89.0371, aux.loss_ce: 0.2007, aux.acc_seg: 80.7202, loss: 0.4850, grad_norm: 5.1688 2023-02-11 22:32:35,988 - mmseg - INFO - Iter [75500/160000] lr: 3.169e-05, eta: 4:51:05, time: 0.193, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2858, decode.acc_seg: 89.3332, aux.loss_ce: 0.1991, aux.acc_seg: 81.0940, loss: 0.4850, grad_norm: 6.3126 2023-02-11 22:32:45,854 - mmseg - INFO - Iter [75550/160000] lr: 3.167e-05, eta: 4:50:54, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2862, decode.acc_seg: 88.6742, aux.loss_ce: 0.1937, aux.acc_seg: 81.0486, loss: 0.4800, grad_norm: 5.0167 2023-02-11 22:32:56,008 - mmseg - INFO - Iter [75600/160000] lr: 3.165e-05, eta: 4:50:43, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3028, decode.acc_seg: 88.4414, aux.loss_ce: 0.2082, aux.acc_seg: 80.3465, loss: 0.5110, grad_norm: 4.6029 2023-02-11 22:33:06,259 - mmseg - INFO - Iter [75650/160000] lr: 3.163e-05, eta: 4:50:33, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2704, decode.acc_seg: 89.8021, aux.loss_ce: 0.1932, aux.acc_seg: 81.7163, loss: 0.4636, grad_norm: 4.4519 2023-02-11 22:33:16,321 - mmseg - INFO - Iter [75700/160000] lr: 3.161e-05, eta: 4:50:22, time: 0.202, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2736, decode.acc_seg: 89.3579, aux.loss_ce: 0.1939, aux.acc_seg: 81.0805, loss: 0.4675, grad_norm: 4.6638 2023-02-11 22:33:26,402 - mmseg - INFO - Iter [75750/160000] lr: 3.159e-05, eta: 4:50:11, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2778, decode.acc_seg: 89.1980, aux.loss_ce: 0.1894, aux.acc_seg: 81.7669, loss: 0.4672, grad_norm: 4.7116 2023-02-11 22:33:36,680 - mmseg - INFO - Iter [75800/160000] lr: 3.158e-05, eta: 4:50:01, time: 0.206, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2692, decode.acc_seg: 89.3409, aux.loss_ce: 0.1890, aux.acc_seg: 81.5577, loss: 0.4582, grad_norm: 5.4643 2023-02-11 22:33:46,436 - mmseg - INFO - Iter [75850/160000] lr: 3.156e-05, eta: 4:49:50, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2931, decode.acc_seg: 88.9617, aux.loss_ce: 0.2008, aux.acc_seg: 80.9591, loss: 0.4938, grad_norm: 5.7279 2023-02-11 22:33:56,491 - mmseg - INFO - Iter [75900/160000] lr: 3.154e-05, eta: 4:49:39, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2555, decode.acc_seg: 90.0107, aux.loss_ce: 0.1828, aux.acc_seg: 82.2517, loss: 0.4383, grad_norm: 4.3712 2023-02-11 22:34:06,582 - mmseg - INFO - Iter [75950/160000] lr: 3.152e-05, eta: 4:49:28, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2888, decode.acc_seg: 89.0338, aux.loss_ce: 0.2025, aux.acc_seg: 80.7346, loss: 0.4914, grad_norm: 6.0986 2023-02-11 22:34:16,836 - mmseg - INFO - Saving checkpoint at 76000 iterations 2023-02-11 22:34:17,507 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 22:34:17,507 - mmseg - INFO - Iter [76000/160000] lr: 3.150e-05, eta: 4:49:19, time: 0.218, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2859, decode.acc_seg: 88.9896, aux.loss_ce: 0.2021, aux.acc_seg: 80.7625, loss: 0.4880, grad_norm: 4.7874 2023-02-11 22:34:27,699 - mmseg - INFO - Iter [76050/160000] lr: 3.148e-05, eta: 4:49:08, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3082, decode.acc_seg: 88.1175, aux.loss_ce: 0.2051, aux.acc_seg: 80.3859, loss: 0.5134, grad_norm: 5.9842 2023-02-11 22:34:37,533 - mmseg - INFO - Iter [76100/160000] lr: 3.146e-05, eta: 4:48:57, time: 0.197, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2796, decode.acc_seg: 88.8714, aux.loss_ce: 0.1950, aux.acc_seg: 81.1526, loss: 0.4746, grad_norm: 4.0701 2023-02-11 22:34:47,847 - mmseg - INFO - Iter [76150/160000] lr: 3.144e-05, eta: 4:48:47, time: 0.206, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2810, decode.acc_seg: 89.1964, aux.loss_ce: 0.1985, aux.acc_seg: 81.1197, loss: 0.4794, grad_norm: 4.5465 2023-02-11 22:34:57,832 - mmseg - INFO - Iter [76200/160000] lr: 3.143e-05, eta: 4:48:36, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2894, decode.acc_seg: 89.0945, aux.loss_ce: 0.1975, aux.acc_seg: 81.4620, loss: 0.4869, grad_norm: 5.7502 2023-02-11 22:35:08,151 - mmseg - INFO - Iter [76250/160000] lr: 3.141e-05, eta: 4:48:26, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2741, decode.acc_seg: 89.2241, aux.loss_ce: 0.1947, aux.acc_seg: 81.2052, loss: 0.4688, grad_norm: 4.7234 2023-02-11 22:35:18,842 - mmseg - INFO - Iter [76300/160000] lr: 3.139e-05, eta: 4:48:16, time: 0.214, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2890, decode.acc_seg: 88.7867, aux.loss_ce: 0.1956, aux.acc_seg: 81.1862, loss: 0.4846, grad_norm: 4.9208 2023-02-11 22:35:28,651 - mmseg - INFO - Iter [76350/160000] lr: 3.137e-05, eta: 4:48:05, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3030, decode.acc_seg: 88.4312, aux.loss_ce: 0.2124, aux.acc_seg: 80.4850, loss: 0.5154, grad_norm: 6.2009 2023-02-11 22:35:38,906 - mmseg - INFO - Iter [76400/160000] lr: 3.135e-05, eta: 4:47:54, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2878, decode.acc_seg: 88.9581, aux.loss_ce: 0.1991, aux.acc_seg: 81.0073, loss: 0.4869, grad_norm: 5.9942 2023-02-11 22:35:48,892 - mmseg - INFO - Iter [76450/160000] lr: 3.133e-05, eta: 4:47:43, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2782, decode.acc_seg: 89.0492, aux.loss_ce: 0.1926, aux.acc_seg: 81.2136, loss: 0.4708, grad_norm: 4.8879 2023-02-11 22:35:59,306 - mmseg - INFO - Iter [76500/160000] lr: 3.131e-05, eta: 4:47:33, time: 0.209, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2914, decode.acc_seg: 88.8806, aux.loss_ce: 0.2012, aux.acc_seg: 80.6938, loss: 0.4926, grad_norm: 5.5865 2023-02-11 22:36:09,293 - mmseg - INFO - Iter [76550/160000] lr: 3.129e-05, eta: 4:47:22, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2938, decode.acc_seg: 88.6649, aux.loss_ce: 0.1986, aux.acc_seg: 80.8002, loss: 0.4924, grad_norm: 5.3140 2023-02-11 22:36:19,087 - mmseg - INFO - Iter [76600/160000] lr: 3.128e-05, eta: 4:47:11, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2746, decode.acc_seg: 89.5680, aux.loss_ce: 0.1993, aux.acc_seg: 80.6280, loss: 0.4739, grad_norm: 5.0952 2023-02-11 22:36:31,273 - mmseg - INFO - Iter [76650/160000] lr: 3.126e-05, eta: 4:47:04, time: 0.244, data_time: 0.047, memory: 7748, decode.loss_ce: 0.2800, decode.acc_seg: 88.9812, aux.loss_ce: 0.2005, aux.acc_seg: 80.6643, loss: 0.4805, grad_norm: 5.1091 2023-02-11 22:36:40,914 - mmseg - INFO - Iter [76700/160000] lr: 3.124e-05, eta: 4:46:52, time: 0.193, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2853, decode.acc_seg: 89.2022, aux.loss_ce: 0.1972, aux.acc_seg: 81.7026, loss: 0.4825, grad_norm: 5.2190 2023-02-11 22:36:51,105 - mmseg - INFO - Iter [76750/160000] lr: 3.122e-05, eta: 4:46:42, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2803, decode.acc_seg: 88.9871, aux.loss_ce: 0.1922, aux.acc_seg: 81.2240, loss: 0.4725, grad_norm: 5.0784 2023-02-11 22:37:01,247 - mmseg - INFO - Iter [76800/160000] lr: 3.120e-05, eta: 4:46:31, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2776, decode.acc_seg: 89.5419, aux.loss_ce: 0.1984, aux.acc_seg: 81.3933, loss: 0.4760, grad_norm: 5.2777 2023-02-11 22:37:11,390 - mmseg - INFO - Iter [76850/160000] lr: 3.118e-05, eta: 4:46:21, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2869, decode.acc_seg: 88.8208, aux.loss_ce: 0.1904, aux.acc_seg: 81.9966, loss: 0.4773, grad_norm: 4.8289 2023-02-11 22:37:21,265 - mmseg - INFO - Iter [76900/160000] lr: 3.116e-05, eta: 4:46:10, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2715, decode.acc_seg: 89.5479, aux.loss_ce: 0.1856, aux.acc_seg: 81.8935, loss: 0.4572, grad_norm: 4.6790 2023-02-11 22:37:32,036 - mmseg - INFO - Iter [76950/160000] lr: 3.114e-05, eta: 4:46:00, time: 0.215, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2821, decode.acc_seg: 89.3604, aux.loss_ce: 0.1933, aux.acc_seg: 81.8933, loss: 0.4754, grad_norm: 5.7430 2023-02-11 22:37:42,055 - mmseg - INFO - Saving checkpoint at 77000 iterations 2023-02-11 22:37:42,739 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 22:37:42,739 - mmseg - INFO - Iter [77000/160000] lr: 3.113e-05, eta: 4:45:50, time: 0.214, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2952, decode.acc_seg: 88.5093, aux.loss_ce: 0.1974, aux.acc_seg: 80.8428, loss: 0.4926, grad_norm: 5.4199 2023-02-11 22:37:53,332 - mmseg - INFO - Iter [77050/160000] lr: 3.111e-05, eta: 4:45:40, time: 0.211, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2851, decode.acc_seg: 89.0606, aux.loss_ce: 0.1962, aux.acc_seg: 81.2134, loss: 0.4813, grad_norm: 4.7857 2023-02-11 22:38:03,676 - mmseg - INFO - Iter [77100/160000] lr: 3.109e-05, eta: 4:45:30, time: 0.207, data_time: 0.005, memory: 7748, decode.loss_ce: 0.3015, decode.acc_seg: 88.5043, aux.loss_ce: 0.2040, aux.acc_seg: 80.5171, loss: 0.5056, grad_norm: 6.3399 2023-02-11 22:38:14,223 - mmseg - INFO - Iter [77150/160000] lr: 3.107e-05, eta: 4:45:20, time: 0.211, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2894, decode.acc_seg: 89.0724, aux.loss_ce: 0.1975, aux.acc_seg: 81.5762, loss: 0.4869, grad_norm: 5.2895 2023-02-11 22:38:24,012 - mmseg - INFO - Iter [77200/160000] lr: 3.105e-05, eta: 4:45:09, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2700, decode.acc_seg: 89.8003, aux.loss_ce: 0.1932, aux.acc_seg: 81.7342, loss: 0.4631, grad_norm: 6.0113 2023-02-11 22:38:34,283 - mmseg - INFO - Iter [77250/160000] lr: 3.103e-05, eta: 4:44:58, time: 0.205, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2718, decode.acc_seg: 89.6156, aux.loss_ce: 0.1994, aux.acc_seg: 81.1251, loss: 0.4712, grad_norm: 4.9493 2023-02-11 22:38:44,142 - mmseg - INFO - Iter [77300/160000] lr: 3.101e-05, eta: 4:44:47, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2878, decode.acc_seg: 88.5869, aux.loss_ce: 0.1967, aux.acc_seg: 81.2633, loss: 0.4845, grad_norm: 4.7704 2023-02-11 22:38:53,933 - mmseg - INFO - Iter [77350/160000] lr: 3.099e-05, eta: 4:44:36, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2883, decode.acc_seg: 89.0360, aux.loss_ce: 0.1982, aux.acc_seg: 81.1844, loss: 0.4865, grad_norm: 5.2918 2023-02-11 22:39:03,840 - mmseg - INFO - Iter [77400/160000] lr: 3.098e-05, eta: 4:44:25, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2791, decode.acc_seg: 89.5678, aux.loss_ce: 0.2043, aux.acc_seg: 81.0569, loss: 0.4834, grad_norm: 4.7643 2023-02-11 22:39:13,681 - mmseg - INFO - Iter [77450/160000] lr: 3.096e-05, eta: 4:44:14, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2808, decode.acc_seg: 88.9014, aux.loss_ce: 0.1926, aux.acc_seg: 81.1781, loss: 0.4734, grad_norm: 4.6257 2023-02-11 22:39:23,466 - mmseg - INFO - Iter [77500/160000] lr: 3.094e-05, eta: 4:44:03, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2712, decode.acc_seg: 89.3106, aux.loss_ce: 0.1988, aux.acc_seg: 81.1002, loss: 0.4700, grad_norm: 5.0702 2023-02-11 22:39:33,262 - mmseg - INFO - Iter [77550/160000] lr: 3.092e-05, eta: 4:43:52, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2941, decode.acc_seg: 88.8110, aux.loss_ce: 0.2025, aux.acc_seg: 80.6769, loss: 0.4966, grad_norm: 6.4514 2023-02-11 22:39:43,008 - mmseg - INFO - Iter [77600/160000] lr: 3.090e-05, eta: 4:43:41, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2770, decode.acc_seg: 89.0848, aux.loss_ce: 0.1886, aux.acc_seg: 81.8420, loss: 0.4656, grad_norm: 4.3557 2023-02-11 22:39:53,110 - mmseg - INFO - Iter [77650/160000] lr: 3.088e-05, eta: 4:43:31, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3076, decode.acc_seg: 88.6980, aux.loss_ce: 0.2154, aux.acc_seg: 79.9032, loss: 0.5230, grad_norm: 5.2349 2023-02-11 22:40:03,114 - mmseg - INFO - Iter [77700/160000] lr: 3.086e-05, eta: 4:43:20, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2637, decode.acc_seg: 89.8388, aux.loss_ce: 0.1945, aux.acc_seg: 81.3384, loss: 0.4582, grad_norm: 4.1504 2023-02-11 22:40:13,454 - mmseg - INFO - Iter [77750/160000] lr: 3.084e-05, eta: 4:43:09, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2920, decode.acc_seg: 88.3974, aux.loss_ce: 0.2079, aux.acc_seg: 79.6125, loss: 0.4999, grad_norm: 5.1437 2023-02-11 22:40:23,649 - mmseg - INFO - Iter [77800/160000] lr: 3.083e-05, eta: 4:42:59, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2766, decode.acc_seg: 89.3244, aux.loss_ce: 0.1973, aux.acc_seg: 81.5521, loss: 0.4739, grad_norm: 5.0261 2023-02-11 22:40:33,920 - mmseg - INFO - Iter [77850/160000] lr: 3.081e-05, eta: 4:42:49, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2848, decode.acc_seg: 89.0054, aux.loss_ce: 0.1915, aux.acc_seg: 81.5401, loss: 0.4763, grad_norm: 5.4182 2023-02-11 22:40:46,298 - mmseg - INFO - Iter [77900/160000] lr: 3.079e-05, eta: 4:42:41, time: 0.248, data_time: 0.046, memory: 7748, decode.loss_ce: 0.2885, decode.acc_seg: 88.6750, aux.loss_ce: 0.2042, aux.acc_seg: 80.7106, loss: 0.4928, grad_norm: 6.5912 2023-02-11 22:40:56,722 - mmseg - INFO - Iter [77950/160000] lr: 3.077e-05, eta: 4:42:31, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2842, decode.acc_seg: 88.9955, aux.loss_ce: 0.1950, aux.acc_seg: 81.3692, loss: 0.4793, grad_norm: 5.4938 2023-02-11 22:41:06,848 - mmseg - INFO - Saving checkpoint at 78000 iterations 2023-02-11 22:41:07,518 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 22:41:07,518 - mmseg - INFO - Iter [78000/160000] lr: 3.075e-05, eta: 4:42:21, time: 0.216, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2643, decode.acc_seg: 89.4215, aux.loss_ce: 0.1812, aux.acc_seg: 82.4696, loss: 0.4455, grad_norm: 4.5367 2023-02-11 22:41:17,233 - mmseg - INFO - Iter [78050/160000] lr: 3.073e-05, eta: 4:42:10, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2878, decode.acc_seg: 89.2151, aux.loss_ce: 0.2071, aux.acc_seg: 80.8315, loss: 0.4949, grad_norm: 4.7615 2023-02-11 22:41:27,468 - mmseg - INFO - Iter [78100/160000] lr: 3.071e-05, eta: 4:41:59, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2934, decode.acc_seg: 88.9462, aux.loss_ce: 0.2013, aux.acc_seg: 80.8796, loss: 0.4946, grad_norm: 5.5369 2023-02-11 22:41:37,395 - mmseg - INFO - Iter [78150/160000] lr: 3.069e-05, eta: 4:41:49, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2779, decode.acc_seg: 89.2851, aux.loss_ce: 0.1915, aux.acc_seg: 81.6320, loss: 0.4694, grad_norm: 4.8299 2023-02-11 22:41:47,618 - mmseg - INFO - Iter [78200/160000] lr: 3.068e-05, eta: 4:41:38, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2655, decode.acc_seg: 89.6860, aux.loss_ce: 0.1921, aux.acc_seg: 81.2300, loss: 0.4576, grad_norm: 5.4070 2023-02-11 22:41:57,662 - mmseg - INFO - Iter [78250/160000] lr: 3.066e-05, eta: 4:41:27, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2869, decode.acc_seg: 89.1678, aux.loss_ce: 0.2091, aux.acc_seg: 80.4467, loss: 0.4960, grad_norm: 5.0400 2023-02-11 22:42:07,586 - mmseg - INFO - Iter [78300/160000] lr: 3.064e-05, eta: 4:41:17, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2861, decode.acc_seg: 88.6945, aux.loss_ce: 0.1967, aux.acc_seg: 81.1121, loss: 0.4829, grad_norm: 5.2099 2023-02-11 22:42:17,649 - mmseg - INFO - Iter [78350/160000] lr: 3.062e-05, eta: 4:41:06, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2614, decode.acc_seg: 90.0415, aux.loss_ce: 0.1823, aux.acc_seg: 82.7100, loss: 0.4437, grad_norm: 4.4933 2023-02-11 22:42:27,573 - mmseg - INFO - Iter [78400/160000] lr: 3.060e-05, eta: 4:40:55, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2652, decode.acc_seg: 89.4166, aux.loss_ce: 0.1823, aux.acc_seg: 82.1605, loss: 0.4475, grad_norm: 4.9931 2023-02-11 22:42:37,552 - mmseg - INFO - Iter [78450/160000] lr: 3.058e-05, eta: 4:40:44, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2820, decode.acc_seg: 89.2037, aux.loss_ce: 0.1954, aux.acc_seg: 81.4969, loss: 0.4774, grad_norm: 4.7755 2023-02-11 22:42:47,290 - mmseg - INFO - Iter [78500/160000] lr: 3.056e-05, eta: 4:40:33, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2870, decode.acc_seg: 89.0320, aux.loss_ce: 0.1892, aux.acc_seg: 81.8996, loss: 0.4762, grad_norm: 5.5154 2023-02-11 22:42:57,007 - mmseg - INFO - Iter [78550/160000] lr: 3.054e-05, eta: 4:40:22, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2587, decode.acc_seg: 89.9067, aux.loss_ce: 0.1854, aux.acc_seg: 82.2006, loss: 0.4441, grad_norm: 4.6182 2023-02-11 22:43:06,858 - mmseg - INFO - Iter [78600/160000] lr: 3.053e-05, eta: 4:40:11, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2905, decode.acc_seg: 88.6968, aux.loss_ce: 0.2012, aux.acc_seg: 80.7088, loss: 0.4917, grad_norm: 5.1469 2023-02-11 22:43:16,753 - mmseg - INFO - Iter [78650/160000] lr: 3.051e-05, eta: 4:40:00, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.3015, decode.acc_seg: 88.4623, aux.loss_ce: 0.2107, aux.acc_seg: 80.2411, loss: 0.5122, grad_norm: 5.9610 2023-02-11 22:43:26,596 - mmseg - INFO - Iter [78700/160000] lr: 3.049e-05, eta: 4:39:49, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2788, decode.acc_seg: 89.0848, aux.loss_ce: 0.1925, aux.acc_seg: 81.3170, loss: 0.4713, grad_norm: 4.6326 2023-02-11 22:43:36,451 - mmseg - INFO - Iter [78750/160000] lr: 3.047e-05, eta: 4:39:38, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2894, decode.acc_seg: 89.2742, aux.loss_ce: 0.2039, aux.acc_seg: 81.1387, loss: 0.4934, grad_norm: 5.5161 2023-02-11 22:43:46,238 - mmseg - INFO - Iter [78800/160000] lr: 3.045e-05, eta: 4:39:27, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2771, decode.acc_seg: 89.0668, aux.loss_ce: 0.1923, aux.acc_seg: 81.1159, loss: 0.4694, grad_norm: 4.4416 2023-02-11 22:43:56,114 - mmseg - INFO - Iter [78850/160000] lr: 3.043e-05, eta: 4:39:16, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2867, decode.acc_seg: 88.9604, aux.loss_ce: 0.1971, aux.acc_seg: 81.3412, loss: 0.4838, grad_norm: 5.7530 2023-02-11 22:44:05,855 - mmseg - INFO - Iter [78900/160000] lr: 3.041e-05, eta: 4:39:05, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2707, decode.acc_seg: 89.2799, aux.loss_ce: 0.1980, aux.acc_seg: 80.5822, loss: 0.4687, grad_norm: 4.9945 2023-02-11 22:44:16,139 - mmseg - INFO - Iter [78950/160000] lr: 3.039e-05, eta: 4:38:55, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2726, decode.acc_seg: 89.6366, aux.loss_ce: 0.1941, aux.acc_seg: 81.4881, loss: 0.4667, grad_norm: 5.1279 2023-02-11 22:44:25,867 - mmseg - INFO - Saving checkpoint at 79000 iterations 2023-02-11 22:44:26,542 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 22:44:26,542 - mmseg - INFO - Iter [79000/160000] lr: 3.038e-05, eta: 4:38:45, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2714, decode.acc_seg: 89.4715, aux.loss_ce: 0.1990, aux.acc_seg: 80.9187, loss: 0.4704, grad_norm: 5.4102 2023-02-11 22:44:36,925 - mmseg - INFO - Iter [79050/160000] lr: 3.036e-05, eta: 4:38:34, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2684, decode.acc_seg: 89.5213, aux.loss_ce: 0.1889, aux.acc_seg: 81.8848, loss: 0.4572, grad_norm: 6.1653 2023-02-11 22:44:47,150 - mmseg - INFO - Iter [79100/160000] lr: 3.034e-05, eta: 4:38:24, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2625, decode.acc_seg: 89.6408, aux.loss_ce: 0.1946, aux.acc_seg: 81.3286, loss: 0.4570, grad_norm: 4.3659 2023-02-11 22:44:59,416 - mmseg - INFO - Iter [79150/160000] lr: 3.032e-05, eta: 4:38:16, time: 0.245, data_time: 0.045, memory: 7748, decode.loss_ce: 0.2719, decode.acc_seg: 89.5148, aux.loss_ce: 0.1912, aux.acc_seg: 81.6826, loss: 0.4631, grad_norm: 4.8989 2023-02-11 22:45:09,898 - mmseg - INFO - Iter [79200/160000] lr: 3.030e-05, eta: 4:38:06, time: 0.210, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2883, decode.acc_seg: 89.1786, aux.loss_ce: 0.2021, aux.acc_seg: 81.2070, loss: 0.4904, grad_norm: 5.3205 2023-02-11 22:45:20,283 - mmseg - INFO - Iter [79250/160000] lr: 3.028e-05, eta: 4:37:56, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2578, decode.acc_seg: 89.8587, aux.loss_ce: 0.1894, aux.acc_seg: 82.0421, loss: 0.4472, grad_norm: 4.4332 2023-02-11 22:45:30,309 - mmseg - INFO - Iter [79300/160000] lr: 3.026e-05, eta: 4:37:45, time: 0.201, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2621, decode.acc_seg: 89.9218, aux.loss_ce: 0.1908, aux.acc_seg: 81.8143, loss: 0.4529, grad_norm: 4.6968 2023-02-11 22:45:40,113 - mmseg - INFO - Iter [79350/160000] lr: 3.024e-05, eta: 4:37:34, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2620, decode.acc_seg: 89.6894, aux.loss_ce: 0.1904, aux.acc_seg: 81.6534, loss: 0.4524, grad_norm: 5.3656 2023-02-11 22:45:50,205 - mmseg - INFO - Iter [79400/160000] lr: 3.023e-05, eta: 4:37:23, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2710, decode.acc_seg: 89.4311, aux.loss_ce: 0.1896, aux.acc_seg: 81.7709, loss: 0.4606, grad_norm: 5.8157 2023-02-11 22:46:00,150 - mmseg - INFO - Iter [79450/160000] lr: 3.021e-05, eta: 4:37:13, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2598, decode.acc_seg: 90.0357, aux.loss_ce: 0.1852, aux.acc_seg: 82.2511, loss: 0.4450, grad_norm: 4.3413 2023-02-11 22:46:10,583 - mmseg - INFO - Iter [79500/160000] lr: 3.019e-05, eta: 4:37:02, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2834, decode.acc_seg: 88.9331, aux.loss_ce: 0.1920, aux.acc_seg: 81.5534, loss: 0.4754, grad_norm: 5.0973 2023-02-11 22:46:20,444 - mmseg - INFO - Iter [79550/160000] lr: 3.017e-05, eta: 4:36:52, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2920, decode.acc_seg: 88.6151, aux.loss_ce: 0.1949, aux.acc_seg: 81.6876, loss: 0.4869, grad_norm: 5.6017 2023-02-11 22:46:30,708 - mmseg - INFO - Iter [79600/160000] lr: 3.015e-05, eta: 4:36:41, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2757, decode.acc_seg: 88.9912, aux.loss_ce: 0.1887, aux.acc_seg: 81.8358, loss: 0.4644, grad_norm: 4.8938 2023-02-11 22:46:40,824 - mmseg - INFO - Iter [79650/160000] lr: 3.013e-05, eta: 4:36:31, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2643, decode.acc_seg: 89.8551, aux.loss_ce: 0.1854, aux.acc_seg: 81.8546, loss: 0.4497, grad_norm: 4.9265 2023-02-11 22:46:50,908 - mmseg - INFO - Iter [79700/160000] lr: 3.011e-05, eta: 4:36:20, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2650, decode.acc_seg: 89.3835, aux.loss_ce: 0.1896, aux.acc_seg: 81.5543, loss: 0.4547, grad_norm: 5.3094 2023-02-11 22:47:00,679 - mmseg - INFO - Iter [79750/160000] lr: 3.009e-05, eta: 4:36:09, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2511, decode.acc_seg: 90.1047, aux.loss_ce: 0.1767, aux.acc_seg: 82.6359, loss: 0.4279, grad_norm: 4.6333 2023-02-11 22:47:11,157 - mmseg - INFO - Iter [79800/160000] lr: 3.008e-05, eta: 4:35:59, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2635, decode.acc_seg: 89.6574, aux.loss_ce: 0.1932, aux.acc_seg: 81.2963, loss: 0.4567, grad_norm: 5.3340 2023-02-11 22:47:21,497 - mmseg - INFO - Iter [79850/160000] lr: 3.006e-05, eta: 4:35:48, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2843, decode.acc_seg: 88.8615, aux.loss_ce: 0.1969, aux.acc_seg: 80.8656, loss: 0.4811, grad_norm: 5.0363 2023-02-11 22:47:31,467 - mmseg - INFO - Iter [79900/160000] lr: 3.004e-05, eta: 4:35:38, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2714, decode.acc_seg: 89.7803, aux.loss_ce: 0.1957, aux.acc_seg: 81.0266, loss: 0.4671, grad_norm: 4.5552 2023-02-11 22:47:42,249 - mmseg - INFO - Iter [79950/160000] lr: 3.002e-05, eta: 4:35:28, time: 0.216, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2835, decode.acc_seg: 89.2273, aux.loss_ce: 0.1928, aux.acc_seg: 81.6769, loss: 0.4763, grad_norm: 5.5594 2023-02-11 22:47:52,353 - mmseg - INFO - Saving checkpoint at 80000 iterations 2023-02-11 22:47:53,048 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 22:47:53,048 - mmseg - INFO - Iter [80000/160000] lr: 3.000e-05, eta: 4:35:18, time: 0.216, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2773, decode.acc_seg: 89.1244, aux.loss_ce: 0.1942, aux.acc_seg: 81.2737, loss: 0.4715, grad_norm: 6.2327 2023-02-11 22:48:04,463 - mmseg - INFO - per class results: 2023-02-11 22:48:04,469 - mmseg - INFO - +---------------------+-------+-------+ | Class | IoU | Acc | +---------------------+-------+-------+ | wall | 74.0 | 86.84 | | building | 81.31 | 88.4 | | sky | 93.4 | 97.82 | | floor | 77.09 | 89.55 | | tree | 72.34 | 87.51 | | ceiling | 81.94 | 90.18 | | road | 81.01 | 89.97 | | bed | 84.98 | 95.23 | | windowpane | 57.85 | 70.76 | | grass | 65.52 | 87.62 | | cabinet | 54.82 | 61.31 | | sidewalk | 60.38 | 78.77 | | person | 75.36 | 91.59 | | earth | 34.27 | 45.94 | | door | 42.88 | 68.3 | | table | 53.49 | 78.79 | | mountain | 55.97 | 70.51 | | plant | 50.7 | 63.68 | | curtain | 67.21 | 87.08 | | chair | 51.49 | 67.5 | | car | 81.85 | 90.8 | | water | 52.45 | 73.61 | | painting | 67.17 | 83.17 | | sofa | 57.86 | 70.5 | | shelf | 36.42 | 46.77 | | house | 48.17 | 64.75 | | sea | 54.26 | 79.96 | | mirror | 51.64 | 63.24 | | rug | 45.79 | 48.53 | | field | 23.84 | 27.43 | | armchair | 35.71 | 52.83 | | seat | 58.64 | 76.84 | | fence | 28.81 | 34.79 | | desk | 40.19 | 66.52 | | rock | 33.45 | 47.18 | | wardrobe | 44.39 | 65.3 | | lamp | 55.95 | 67.45 | | bathtub | 73.86 | 79.08 | | railing | 31.51 | 48.07 | | cushion | 51.13 | 64.33 | | base | 31.24 | 59.19 | | box | 13.42 | 14.79 | | column | 38.59 | 45.86 | | signboard | 33.69 | 43.07 | | chest of drawers | 42.17 | 60.02 | | counter | 27.99 | 45.59 | | sand | 39.44 | 49.17 | | sink | 63.89 | 81.45 | | skyscraper | 61.55 | 85.17 | | fireplace | 61.41 | 80.31 | | refrigerator | 70.66 | 90.59 | | grandstand | 30.83 | 58.37 | | path | 14.86 | 23.74 | | stairs | 27.82 | 39.66 | | runway | 68.45 | 89.05 | | case | 47.58 | 60.23 | | pool table | 90.49 | 92.57 | | pillow | 49.96 | 62.78 | | screen door | 55.81 | 84.93 | | stairway | 30.35 | 39.89 | | river | 11.96 | 13.76 | | bridge | 53.93 | 61.58 | | bookcase | 26.87 | 39.68 | | blind | 37.56 | 43.84 | | coffee table | 56.29 | 63.44 | | toilet | 77.97 | 91.27 | | flower | 38.99 | 57.2 | | book | 42.95 | 65.3 | | hill | 2.98 | 5.63 | | bench | 43.19 | 47.82 | | countertop | 45.34 | 67.61 | | stove | 65.33 | 74.31 | | palm | 46.11 | 80.73 | | kitchen island | 30.96 | 60.46 | | computer | 61.81 | 79.03 | | swivel chair | 36.07 | 42.24 | | boat | 38.53 | 43.75 | | bar | 27.72 | 33.39 | | arcade machine | 42.51 | 43.77 | | hovel | 30.02 | 37.18 | | bus | 82.01 | 88.47 | | towel | 55.76 | 75.23 | | light | 43.93 | 47.36 | | truck | 32.18 | 46.56 | | tower | 34.76 | 50.97 | | chandelier | 61.28 | 79.26 | | awning | 22.24 | 35.64 | | streetlight | 19.92 | 25.26 | | booth | 23.67 | 24.15 | | television receiver | 63.07 | 69.04 | | airplane | 52.91 | 63.12 | | dirt track | 13.99 | 26.99 | | apparel | 30.36 | 56.03 | | pole | 16.68 | 20.49 | | land | 0.1 | 0.12 | | bannister | 8.42 | 10.13 | | escalator | 24.58 | 35.13 | | ottoman | 37.29 | 49.35 | | bottle | 31.18 | 42.34 | | buffet | 28.84 | 32.92 | | poster | 19.22 | 21.37 | | stage | 16.41 | 39.65 | | van | 36.94 | 43.59 | | ship | 25.2 | 34.75 | | fountain | 23.39 | 29.74 | | conveyer belt | 67.64 | 77.87 | | canopy | 8.35 | 14.66 | | washer | 67.03 | 68.25 | | plaything | 17.42 | 22.44 | | swimming pool | 58.79 | 74.87 | | stool | 35.31 | 49.54 | | barrel | 7.11 | 64.99 | | basket | 24.01 | 30.22 | | waterfall | 51.76 | 64.74 | | tent | 65.24 | 98.67 | | bag | 13.91 | 16.91 | | minibike | 50.11 | 61.71 | | cradle | 76.26 | 90.25 | | oven | 29.0 | 44.62 | | ball | 43.3 | 60.09 | | food | 48.51 | 60.13 | | step | 4.23 | 5.21 | | tank | 30.59 | 32.37 | | trade name | 24.11 | 27.15 | | microwave | 40.79 | 45.85 | | pot | 39.71 | 43.76 | | animal | 53.23 | 55.98 | | bicycle | 43.76 | 74.08 | | lake | 57.96 | 59.11 | | dishwasher | 53.24 | 71.75 | | screen | 46.55 | 73.11 | | blanket | 5.47 | 6.83 | | sculpture | 37.42 | 52.04 | | hood | 54.09 | 57.87 | | sconce | 36.47 | 47.69 | | vase | 30.61 | 45.79 | | traffic light | 25.29 | 39.67 | | tray | 1.98 | 3.53 | | ashcan | 32.98 | 57.1 | | fan | 45.54 | 52.35 | | pier | 57.27 | 81.21 | | crt screen | 5.87 | 19.39 | | plate | 45.85 | 62.96 | | monitor | 4.02 | 4.16 | | bulletin board | 53.43 | 64.5 | | shower | 0.0 | 0.0 | | radiator | 54.25 | 65.65 | | glass | 5.98 | 6.33 | | clock | 15.73 | 17.04 | | flag | 49.77 | 61.5 | +---------------------+-------+-------+ 2023-02-11 22:48:04,469 - mmseg - INFO - Summary: 2023-02-11 22:48:04,470 - mmseg - INFO - +-------+-------+-------+ | aAcc | mIoU | mAcc | +-------+-------+-------+ | 80.52 | 42.87 | 55.26 | +-------+-------+-------+ 2023-02-11 22:48:05,131 - mmseg - INFO - Now best checkpoint is saved as best_mIoU_iter_80000.pth. 2023-02-11 22:48:05,131 - mmseg - INFO - Best mIoU is 0.4287 at 80000 iter. 2023-02-11 22:48:05,131 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 22:48:05,131 - mmseg - INFO - Iter(val) [250] aAcc: 0.8052, mIoU: 0.4287, mAcc: 0.5526, IoU.wall: 0.7400, IoU.building: 0.8131, IoU.sky: 0.9340, IoU.floor: 0.7709, IoU.tree: 0.7234, IoU.ceiling: 0.8194, IoU.road: 0.8101, IoU.bed : 0.8498, IoU.windowpane: 0.5785, IoU.grass: 0.6552, IoU.cabinet: 0.5482, IoU.sidewalk: 0.6038, IoU.person: 0.7536, IoU.earth: 0.3427, IoU.door: 0.4288, IoU.table: 0.5349, IoU.mountain: 0.5597, IoU.plant: 0.5070, IoU.curtain: 0.6721, IoU.chair: 0.5149, IoU.car: 0.8185, IoU.water: 0.5245, IoU.painting: 0.6717, IoU.sofa: 0.5786, IoU.shelf: 0.3642, IoU.house: 0.4817, IoU.sea: 0.5426, IoU.mirror: 0.5164, IoU.rug: 0.4579, IoU.field: 0.2384, IoU.armchair: 0.3571, IoU.seat: 0.5864, IoU.fence: 0.2881, IoU.desk: 0.4019, IoU.rock: 0.3345, IoU.wardrobe: 0.4439, IoU.lamp: 0.5595, IoU.bathtub: 0.7386, IoU.railing: 0.3151, IoU.cushion: 0.5113, IoU.base: 0.3124, IoU.box: 0.1342, IoU.column: 0.3859, IoU.signboard: 0.3369, IoU.chest of drawers: 0.4217, IoU.counter: 0.2799, IoU.sand: 0.3944, IoU.sink: 0.6389, IoU.skyscraper: 0.6155, IoU.fireplace: 0.6141, IoU.refrigerator: 0.7066, IoU.grandstand: 0.3083, IoU.path: 0.1486, IoU.stairs: 0.2782, IoU.runway: 0.6845, IoU.case: 0.4758, IoU.pool table: 0.9049, IoU.pillow: 0.4996, IoU.screen door: 0.5581, IoU.stairway: 0.3035, IoU.river: 0.1196, IoU.bridge: 0.5393, IoU.bookcase: 0.2687, IoU.blind: 0.3756, IoU.coffee table: 0.5629, IoU.toilet: 0.7797, IoU.flower: 0.3899, IoU.book: 0.4295, IoU.hill: 0.0298, IoU.bench: 0.4319, IoU.countertop: 0.4534, IoU.stove: 0.6533, IoU.palm: 0.4611, IoU.kitchen island: 0.3096, IoU.computer: 0.6181, IoU.swivel chair: 0.3607, IoU.boat: 0.3853, IoU.bar: 0.2772, IoU.arcade machine: 0.4251, IoU.hovel: 0.3002, IoU.bus: 0.8201, IoU.towel: 0.5576, IoU.light: 0.4393, IoU.truck: 0.3218, IoU.tower: 0.3476, IoU.chandelier: 0.6128, IoU.awning: 0.2224, IoU.streetlight: 0.1992, IoU.booth: 0.2367, IoU.television receiver: 0.6307, IoU.airplane: 0.5291, IoU.dirt track: 0.1399, IoU.apparel: 0.3036, IoU.pole: 0.1668, IoU.land: 0.0010, IoU.bannister: 0.0842, IoU.escalator: 0.2458, IoU.ottoman: 0.3729, IoU.bottle: 0.3118, IoU.buffet: 0.2884, IoU.poster: 0.1922, IoU.stage: 0.1641, IoU.van: 0.3694, IoU.ship: 0.2520, IoU.fountain: 0.2339, IoU.conveyer belt: 0.6764, IoU.canopy: 0.0835, IoU.washer: 0.6703, IoU.plaything: 0.1742, IoU.swimming pool: 0.5879, IoU.stool: 0.3531, IoU.barrel: 0.0711, IoU.basket: 0.2401, IoU.waterfall: 0.5176, IoU.tent: 0.6524, IoU.bag: 0.1391, IoU.minibike: 0.5011, IoU.cradle: 0.7626, IoU.oven: 0.2900, IoU.ball: 0.4330, IoU.food: 0.4851, IoU.step: 0.0423, IoU.tank: 0.3059, IoU.trade name: 0.2411, IoU.microwave: 0.4079, IoU.pot: 0.3971, IoU.animal: 0.5323, IoU.bicycle: 0.4376, IoU.lake: 0.5796, IoU.dishwasher: 0.5324, IoU.screen: 0.4655, IoU.blanket: 0.0547, IoU.sculpture: 0.3742, IoU.hood: 0.5409, IoU.sconce: 0.3647, IoU.vase: 0.3061, IoU.traffic light: 0.2529, IoU.tray: 0.0198, IoU.ashcan: 0.3298, IoU.fan: 0.4554, IoU.pier: 0.5727, IoU.crt screen: 0.0587, IoU.plate: 0.4585, IoU.monitor: 0.0402, IoU.bulletin board: 0.5343, IoU.shower: 0.0000, IoU.radiator: 0.5425, IoU.glass: 0.0598, IoU.clock: 0.1573, IoU.flag: 0.4977, Acc.wall: 0.8684, Acc.building: 0.8840, Acc.sky: 0.9782, Acc.floor: 0.8955, Acc.tree: 0.8751, Acc.ceiling: 0.9018, Acc.road: 0.8997, Acc.bed : 0.9523, Acc.windowpane: 0.7076, Acc.grass: 0.8762, Acc.cabinet: 0.6131, Acc.sidewalk: 0.7877, Acc.person: 0.9159, Acc.earth: 0.4594, Acc.door: 0.6830, Acc.table: 0.7879, Acc.mountain: 0.7051, Acc.plant: 0.6368, Acc.curtain: 0.8708, Acc.chair: 0.6750, Acc.car: 0.9080, Acc.water: 0.7361, Acc.painting: 0.8317, Acc.sofa: 0.7050, Acc.shelf: 0.4677, Acc.house: 0.6475, Acc.sea: 0.7996, Acc.mirror: 0.6324, Acc.rug: 0.4853, Acc.field: 0.2743, Acc.armchair: 0.5283, Acc.seat: 0.7684, Acc.fence: 0.3479, Acc.desk: 0.6652, Acc.rock: 0.4718, Acc.wardrobe: 0.6530, Acc.lamp: 0.6745, Acc.bathtub: 0.7908, Acc.railing: 0.4807, Acc.cushion: 0.6433, Acc.base: 0.5919, Acc.box: 0.1479, Acc.column: 0.4586, Acc.signboard: 0.4307, Acc.chest of drawers: 0.6002, Acc.counter: 0.4559, Acc.sand: 0.4917, Acc.sink: 0.8145, Acc.skyscraper: 0.8517, Acc.fireplace: 0.8031, Acc.refrigerator: 0.9059, Acc.grandstand: 0.5837, Acc.path: 0.2374, Acc.stairs: 0.3966, Acc.runway: 0.8905, Acc.case: 0.6023, Acc.pool table: 0.9257, Acc.pillow: 0.6278, Acc.screen door: 0.8493, Acc.stairway: 0.3989, Acc.river: 0.1376, Acc.bridge: 0.6158, Acc.bookcase: 0.3968, Acc.blind: 0.4384, Acc.coffee table: 0.6344, Acc.toilet: 0.9127, Acc.flower: 0.5720, Acc.book: 0.6530, Acc.hill: 0.0563, Acc.bench: 0.4782, Acc.countertop: 0.6761, Acc.stove: 0.7431, Acc.palm: 0.8073, Acc.kitchen island: 0.6046, Acc.computer: 0.7903, Acc.swivel chair: 0.4224, Acc.boat: 0.4375, Acc.bar: 0.3339, Acc.arcade machine: 0.4377, Acc.hovel: 0.3718, Acc.bus: 0.8847, Acc.towel: 0.7523, Acc.light: 0.4736, Acc.truck: 0.4656, Acc.tower: 0.5097, Acc.chandelier: 0.7926, Acc.awning: 0.3564, Acc.streetlight: 0.2526, Acc.booth: 0.2415, Acc.television receiver: 0.6904, Acc.airplane: 0.6312, Acc.dirt track: 0.2699, Acc.apparel: 0.5603, Acc.pole: 0.2049, Acc.land: 0.0012, Acc.bannister: 0.1013, Acc.escalator: 0.3513, Acc.ottoman: 0.4935, Acc.bottle: 0.4234, Acc.buffet: 0.3292, Acc.poster: 0.2137, Acc.stage: 0.3965, Acc.van: 0.4359, Acc.ship: 0.3475, Acc.fountain: 0.2974, Acc.conveyer belt: 0.7787, Acc.canopy: 0.1466, Acc.washer: 0.6825, Acc.plaything: 0.2244, Acc.swimming pool: 0.7487, Acc.stool: 0.4954, Acc.barrel: 0.6499, Acc.basket: 0.3022, Acc.waterfall: 0.6474, Acc.tent: 0.9867, Acc.bag: 0.1691, Acc.minibike: 0.6171, Acc.cradle: 0.9025, Acc.oven: 0.4462, Acc.ball: 0.6009, Acc.food: 0.6013, Acc.step: 0.0521, Acc.tank: 0.3237, Acc.trade name: 0.2715, Acc.microwave: 0.4585, Acc.pot: 0.4376, Acc.animal: 0.5598, Acc.bicycle: 0.7408, Acc.lake: 0.5911, Acc.dishwasher: 0.7175, Acc.screen: 0.7311, Acc.blanket: 0.0683, Acc.sculpture: 0.5204, Acc.hood: 0.5787, Acc.sconce: 0.4769, Acc.vase: 0.4579, Acc.traffic light: 0.3967, Acc.tray: 0.0353, Acc.ashcan: 0.5710, Acc.fan: 0.5235, Acc.pier: 0.8121, Acc.crt screen: 0.1939, Acc.plate: 0.6296, Acc.monitor: 0.0416, Acc.bulletin board: 0.6450, Acc.shower: 0.0000, Acc.radiator: 0.6565, Acc.glass: 0.0633, Acc.clock: 0.1704, Acc.flag: 0.6150 2023-02-11 22:48:15,424 - mmseg - INFO - Iter [80050/160000] lr: 2.998e-05, eta: 4:35:23, time: 0.448, data_time: 0.246, memory: 7748, decode.loss_ce: 0.2807, decode.acc_seg: 89.4707, aux.loss_ce: 0.1942, aux.acc_seg: 82.0336, loss: 0.4749, grad_norm: 5.5428 2023-02-11 22:48:25,968 - mmseg - INFO - Iter [80100/160000] lr: 2.996e-05, eta: 4:35:13, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2803, decode.acc_seg: 89.1816, aux.loss_ce: 0.1989, aux.acc_seg: 81.1420, loss: 0.4792, grad_norm: 4.9845 2023-02-11 22:48:35,867 - mmseg - INFO - Iter [80150/160000] lr: 2.994e-05, eta: 4:35:02, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2835, decode.acc_seg: 88.9493, aux.loss_ce: 0.1969, aux.acc_seg: 80.9538, loss: 0.4805, grad_norm: 6.6848 2023-02-11 22:48:45,712 - mmseg - INFO - Iter [80200/160000] lr: 2.993e-05, eta: 4:34:51, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2666, decode.acc_seg: 89.5573, aux.loss_ce: 0.1923, aux.acc_seg: 81.7234, loss: 0.4588, grad_norm: 4.4878 2023-02-11 22:48:55,587 - mmseg - INFO - Iter [80250/160000] lr: 2.991e-05, eta: 4:34:40, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2553, decode.acc_seg: 90.4435, aux.loss_ce: 0.1873, aux.acc_seg: 82.5789, loss: 0.4426, grad_norm: 4.6031 2023-02-11 22:49:05,749 - mmseg - INFO - Iter [80300/160000] lr: 2.989e-05, eta: 4:34:30, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2735, decode.acc_seg: 89.5284, aux.loss_ce: 0.1842, aux.acc_seg: 82.7087, loss: 0.4577, grad_norm: 5.4995 2023-02-11 22:49:16,098 - mmseg - INFO - Iter [80350/160000] lr: 2.987e-05, eta: 4:34:19, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2862, decode.acc_seg: 89.1769, aux.loss_ce: 0.2020, aux.acc_seg: 81.1091, loss: 0.4882, grad_norm: 6.5943 2023-02-11 22:49:25,991 - mmseg - INFO - Iter [80400/160000] lr: 2.985e-05, eta: 4:34:08, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2908, decode.acc_seg: 88.7050, aux.loss_ce: 0.1955, aux.acc_seg: 81.2130, loss: 0.4863, grad_norm: 5.5942 2023-02-11 22:49:37,942 - mmseg - INFO - Iter [80450/160000] lr: 2.983e-05, eta: 4:34:00, time: 0.239, data_time: 0.047, memory: 7748, decode.loss_ce: 0.2660, decode.acc_seg: 89.6136, aux.loss_ce: 0.1861, aux.acc_seg: 82.1594, loss: 0.4521, grad_norm: 5.5087 2023-02-11 22:49:48,167 - mmseg - INFO - Iter [80500/160000] lr: 2.981e-05, eta: 4:33:50, time: 0.204, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2863, decode.acc_seg: 89.2003, aux.loss_ce: 0.1966, aux.acc_seg: 81.2910, loss: 0.4829, grad_norm: 5.7375 2023-02-11 22:49:58,075 - mmseg - INFO - Iter [80550/160000] lr: 2.979e-05, eta: 4:33:39, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2737, decode.acc_seg: 89.1565, aux.loss_ce: 0.1922, aux.acc_seg: 81.3469, loss: 0.4660, grad_norm: 4.3414 2023-02-11 22:50:07,796 - mmseg - INFO - Iter [80600/160000] lr: 2.978e-05, eta: 4:33:28, time: 0.195, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2817, decode.acc_seg: 89.1333, aux.loss_ce: 0.1891, aux.acc_seg: 81.7493, loss: 0.4708, grad_norm: 5.4682 2023-02-11 22:50:17,755 - mmseg - INFO - Iter [80650/160000] lr: 2.976e-05, eta: 4:33:17, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2850, decode.acc_seg: 89.1679, aux.loss_ce: 0.1993, aux.acc_seg: 81.3514, loss: 0.4843, grad_norm: 5.8973 2023-02-11 22:50:27,731 - mmseg - INFO - Iter [80700/160000] lr: 2.974e-05, eta: 4:33:06, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2898, decode.acc_seg: 88.7105, aux.loss_ce: 0.1976, aux.acc_seg: 80.9987, loss: 0.4874, grad_norm: 5.0656 2023-02-11 22:50:37,882 - mmseg - INFO - Iter [80750/160000] lr: 2.972e-05, eta: 4:32:56, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2740, decode.acc_seg: 89.2333, aux.loss_ce: 0.1949, aux.acc_seg: 81.4905, loss: 0.4689, grad_norm: 5.7952 2023-02-11 22:50:48,188 - mmseg - INFO - Iter [80800/160000] lr: 2.970e-05, eta: 4:32:45, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2717, decode.acc_seg: 89.4870, aux.loss_ce: 0.1958, aux.acc_seg: 81.3980, loss: 0.4674, grad_norm: 5.1334 2023-02-11 22:50:58,466 - mmseg - INFO - Iter [80850/160000] lr: 2.968e-05, eta: 4:32:35, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2656, decode.acc_seg: 89.8288, aux.loss_ce: 0.1888, aux.acc_seg: 81.6679, loss: 0.4544, grad_norm: 4.8630 2023-02-11 22:51:08,473 - mmseg - INFO - Iter [80900/160000] lr: 2.966e-05, eta: 4:32:24, time: 0.201, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2531, decode.acc_seg: 90.0624, aux.loss_ce: 0.1817, aux.acc_seg: 82.1995, loss: 0.4348, grad_norm: 4.5422 2023-02-11 22:51:18,725 - mmseg - INFO - Iter [80950/160000] lr: 2.964e-05, eta: 4:32:14, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2720, decode.acc_seg: 89.2459, aux.loss_ce: 0.1904, aux.acc_seg: 81.1630, loss: 0.4623, grad_norm: 5.0192 2023-02-11 22:51:28,526 - mmseg - INFO - Saving checkpoint at 81000 iterations 2023-02-11 22:51:29,201 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 22:51:29,202 - mmseg - INFO - Iter [81000/160000] lr: 2.963e-05, eta: 4:32:03, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2725, decode.acc_seg: 89.4656, aux.loss_ce: 0.1886, aux.acc_seg: 81.8436, loss: 0.4611, grad_norm: 4.9218 2023-02-11 22:51:39,357 - mmseg - INFO - Iter [81050/160000] lr: 2.961e-05, eta: 4:31:53, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2632, decode.acc_seg: 89.5667, aux.loss_ce: 0.1891, aux.acc_seg: 81.8020, loss: 0.4523, grad_norm: 4.8570 2023-02-11 22:51:49,420 - mmseg - INFO - Iter [81100/160000] lr: 2.959e-05, eta: 4:31:42, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2762, decode.acc_seg: 89.0526, aux.loss_ce: 0.1883, aux.acc_seg: 81.4633, loss: 0.4646, grad_norm: 5.2347 2023-02-11 22:51:59,975 - mmseg - INFO - Iter [81150/160000] lr: 2.957e-05, eta: 4:31:32, time: 0.211, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2776, decode.acc_seg: 89.1523, aux.loss_ce: 0.1962, aux.acc_seg: 81.3458, loss: 0.4738, grad_norm: 6.0939 2023-02-11 22:52:10,023 - mmseg - INFO - Iter [81200/160000] lr: 2.955e-05, eta: 4:31:22, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2772, decode.acc_seg: 89.3492, aux.loss_ce: 0.1921, aux.acc_seg: 81.4746, loss: 0.4693, grad_norm: 5.1180 2023-02-11 22:52:20,531 - mmseg - INFO - Iter [81250/160000] lr: 2.953e-05, eta: 4:31:11, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2611, decode.acc_seg: 89.7821, aux.loss_ce: 0.1863, aux.acc_seg: 82.1101, loss: 0.4474, grad_norm: 4.5079 2023-02-11 22:52:31,163 - mmseg - INFO - Iter [81300/160000] lr: 2.951e-05, eta: 4:31:01, time: 0.213, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2705, decode.acc_seg: 89.3179, aux.loss_ce: 0.1936, aux.acc_seg: 81.3273, loss: 0.4641, grad_norm: 5.2331 2023-02-11 22:52:41,142 - mmseg - INFO - Iter [81350/160000] lr: 2.949e-05, eta: 4:30:51, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2703, decode.acc_seg: 89.8233, aux.loss_ce: 0.1922, aux.acc_seg: 82.2458, loss: 0.4625, grad_norm: 4.8706 2023-02-11 22:52:51,183 - mmseg - INFO - Iter [81400/160000] lr: 2.948e-05, eta: 4:30:40, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2723, decode.acc_seg: 89.2609, aux.loss_ce: 0.1943, aux.acc_seg: 81.3887, loss: 0.4666, grad_norm: 5.0824 2023-02-11 22:53:01,032 - mmseg - INFO - Iter [81450/160000] lr: 2.946e-05, eta: 4:30:29, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2729, decode.acc_seg: 89.3554, aux.loss_ce: 0.1940, aux.acc_seg: 81.4337, loss: 0.4668, grad_norm: 4.8357 2023-02-11 22:53:10,908 - mmseg - INFO - Iter [81500/160000] lr: 2.944e-05, eta: 4:30:18, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2909, decode.acc_seg: 88.8480, aux.loss_ce: 0.2013, aux.acc_seg: 81.0745, loss: 0.4921, grad_norm: 5.6906 2023-02-11 22:53:21,256 - mmseg - INFO - Iter [81550/160000] lr: 2.942e-05, eta: 4:30:08, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2730, decode.acc_seg: 89.5583, aux.loss_ce: 0.1895, aux.acc_seg: 82.1628, loss: 0.4625, grad_norm: 5.6855 2023-02-11 22:53:30,943 - mmseg - INFO - Iter [81600/160000] lr: 2.940e-05, eta: 4:29:57, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2680, decode.acc_seg: 89.7217, aux.loss_ce: 0.1966, aux.acc_seg: 81.1478, loss: 0.4647, grad_norm: 4.6691 2023-02-11 22:53:40,866 - mmseg - INFO - Iter [81650/160000] lr: 2.938e-05, eta: 4:29:46, time: 0.198, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2811, decode.acc_seg: 89.0121, aux.loss_ce: 0.1920, aux.acc_seg: 81.5434, loss: 0.4731, grad_norm: 6.0341 2023-02-11 22:53:52,808 - mmseg - INFO - Iter [81700/160000] lr: 2.936e-05, eta: 4:29:38, time: 0.239, data_time: 0.048, memory: 7748, decode.loss_ce: 0.2596, decode.acc_seg: 90.0583, aux.loss_ce: 0.1894, aux.acc_seg: 81.7038, loss: 0.4489, grad_norm: 4.9776 2023-02-11 22:54:02,782 - mmseg - INFO - Iter [81750/160000] lr: 2.934e-05, eta: 4:29:27, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2703, decode.acc_seg: 89.4814, aux.loss_ce: 0.1962, aux.acc_seg: 81.2746, loss: 0.4664, grad_norm: 4.5196 2023-02-11 22:54:12,582 - mmseg - INFO - Iter [81800/160000] lr: 2.933e-05, eta: 4:29:16, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2763, decode.acc_seg: 88.9256, aux.loss_ce: 0.1962, aux.acc_seg: 80.8680, loss: 0.4725, grad_norm: 5.5493 2023-02-11 22:54:22,715 - mmseg - INFO - Iter [81850/160000] lr: 2.931e-05, eta: 4:29:05, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2801, decode.acc_seg: 88.8886, aux.loss_ce: 0.2005, aux.acc_seg: 80.6348, loss: 0.4806, grad_norm: 5.7105 2023-02-11 22:54:33,457 - mmseg - INFO - Iter [81900/160000] lr: 2.929e-05, eta: 4:28:55, time: 0.215, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2705, decode.acc_seg: 89.4622, aux.loss_ce: 0.2033, aux.acc_seg: 80.3206, loss: 0.4738, grad_norm: 5.2068 2023-02-11 22:54:43,301 - mmseg - INFO - Iter [81950/160000] lr: 2.927e-05, eta: 4:28:45, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2741, decode.acc_seg: 89.3923, aux.loss_ce: 0.1880, aux.acc_seg: 81.9678, loss: 0.4621, grad_norm: 5.7026 2023-02-11 22:54:53,160 - mmseg - INFO - Saving checkpoint at 82000 iterations 2023-02-11 22:54:53,831 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 22:54:53,832 - mmseg - INFO - Iter [82000/160000] lr: 2.925e-05, eta: 4:28:34, time: 0.211, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2630, decode.acc_seg: 89.9057, aux.loss_ce: 0.1940, aux.acc_seg: 81.6645, loss: 0.4570, grad_norm: 5.0698 2023-02-11 22:55:04,027 - mmseg - INFO - Iter [82050/160000] lr: 2.923e-05, eta: 4:28:24, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2566, decode.acc_seg: 90.2146, aux.loss_ce: 0.1877, aux.acc_seg: 81.8250, loss: 0.4443, grad_norm: 4.6722 2023-02-11 22:55:13,857 - mmseg - INFO - Iter [82100/160000] lr: 2.921e-05, eta: 4:28:13, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2585, decode.acc_seg: 89.9095, aux.loss_ce: 0.1857, aux.acc_seg: 81.9694, loss: 0.4442, grad_norm: 4.4055 2023-02-11 22:55:23,920 - mmseg - INFO - Iter [82150/160000] lr: 2.919e-05, eta: 4:28:02, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2679, decode.acc_seg: 89.7313, aux.loss_ce: 0.1893, aux.acc_seg: 81.8906, loss: 0.4572, grad_norm: 4.8281 2023-02-11 22:55:34,272 - mmseg - INFO - Iter [82200/160000] lr: 2.918e-05, eta: 4:27:52, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2529, decode.acc_seg: 90.0901, aux.loss_ce: 0.1780, aux.acc_seg: 82.6771, loss: 0.4310, grad_norm: 5.3767 2023-02-11 22:55:44,188 - mmseg - INFO - Iter [82250/160000] lr: 2.916e-05, eta: 4:27:41, time: 0.199, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2654, decode.acc_seg: 89.7564, aux.loss_ce: 0.1961, aux.acc_seg: 81.2008, loss: 0.4614, grad_norm: 4.9639 2023-02-11 22:55:54,855 - mmseg - INFO - Iter [82300/160000] lr: 2.914e-05, eta: 4:27:31, time: 0.213, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2741, decode.acc_seg: 89.4177, aux.loss_ce: 0.1936, aux.acc_seg: 81.6160, loss: 0.4678, grad_norm: 5.4126 2023-02-11 22:56:05,038 - mmseg - INFO - Iter [82350/160000] lr: 2.912e-05, eta: 4:27:21, time: 0.204, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2957, decode.acc_seg: 88.6454, aux.loss_ce: 0.1983, aux.acc_seg: 80.9688, loss: 0.4940, grad_norm: 5.3815 2023-02-11 22:56:14,903 - mmseg - INFO - Iter [82400/160000] lr: 2.910e-05, eta: 4:27:10, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2522, decode.acc_seg: 90.2541, aux.loss_ce: 0.1843, aux.acc_seg: 81.9905, loss: 0.4365, grad_norm: 4.8988 2023-02-11 22:56:24,767 - mmseg - INFO - Iter [82450/160000] lr: 2.908e-05, eta: 4:26:59, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2537, decode.acc_seg: 89.9579, aux.loss_ce: 0.1856, aux.acc_seg: 82.2964, loss: 0.4393, grad_norm: 5.3765 2023-02-11 22:56:34,523 - mmseg - INFO - Iter [82500/160000] lr: 2.906e-05, eta: 4:26:48, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2548, decode.acc_seg: 89.8046, aux.loss_ce: 0.1890, aux.acc_seg: 81.5975, loss: 0.4439, grad_norm: 5.4521 2023-02-11 22:56:44,532 - mmseg - INFO - Iter [82550/160000] lr: 2.904e-05, eta: 4:26:37, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2677, decode.acc_seg: 89.3938, aux.loss_ce: 0.1917, aux.acc_seg: 81.5966, loss: 0.4595, grad_norm: 4.8761 2023-02-11 22:56:54,547 - mmseg - INFO - Iter [82600/160000] lr: 2.903e-05, eta: 4:26:27, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2722, decode.acc_seg: 89.3011, aux.loss_ce: 0.1884, aux.acc_seg: 81.9330, loss: 0.4607, grad_norm: 4.5540 2023-02-11 22:57:04,183 - mmseg - INFO - Iter [82650/160000] lr: 2.901e-05, eta: 4:26:16, time: 0.193, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2566, decode.acc_seg: 89.8668, aux.loss_ce: 0.1922, aux.acc_seg: 81.5819, loss: 0.4488, grad_norm: 4.6849 2023-02-11 22:57:14,401 - mmseg - INFO - Iter [82700/160000] lr: 2.899e-05, eta: 4:26:05, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2603, decode.acc_seg: 89.7034, aux.loss_ce: 0.1917, aux.acc_seg: 81.2714, loss: 0.4520, grad_norm: 5.2785 2023-02-11 22:57:24,614 - mmseg - INFO - Iter [82750/160000] lr: 2.897e-05, eta: 4:25:55, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2715, decode.acc_seg: 89.6562, aux.loss_ce: 0.1862, aux.acc_seg: 82.4320, loss: 0.4577, grad_norm: 4.3067 2023-02-11 22:57:35,141 - mmseg - INFO - Iter [82800/160000] lr: 2.895e-05, eta: 4:25:45, time: 0.211, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2722, decode.acc_seg: 89.6315, aux.loss_ce: 0.1948, aux.acc_seg: 81.8358, loss: 0.4669, grad_norm: 4.9567 2023-02-11 22:57:44,931 - mmseg - INFO - Iter [82850/160000] lr: 2.893e-05, eta: 4:25:34, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2709, decode.acc_seg: 89.1506, aux.loss_ce: 0.1820, aux.acc_seg: 81.8645, loss: 0.4530, grad_norm: 5.9313 2023-02-11 22:57:55,068 - mmseg - INFO - Iter [82900/160000] lr: 2.891e-05, eta: 4:25:23, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2625, decode.acc_seg: 89.5773, aux.loss_ce: 0.1873, aux.acc_seg: 81.8945, loss: 0.4497, grad_norm: 4.7078 2023-02-11 22:58:07,259 - mmseg - INFO - Iter [82950/160000] lr: 2.889e-05, eta: 4:25:15, time: 0.244, data_time: 0.045, memory: 7748, decode.loss_ce: 0.2705, decode.acc_seg: 89.5351, aux.loss_ce: 0.1897, aux.acc_seg: 81.2812, loss: 0.4602, grad_norm: 5.7157 2023-02-11 22:58:17,250 - mmseg - INFO - Saving checkpoint at 83000 iterations 2023-02-11 22:58:17,935 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 22:58:17,935 - mmseg - INFO - Iter [83000/160000] lr: 2.888e-05, eta: 4:25:05, time: 0.214, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2696, decode.acc_seg: 89.1974, aux.loss_ce: 0.1913, aux.acc_seg: 81.2446, loss: 0.4608, grad_norm: 4.7886 2023-02-11 22:58:27,789 - mmseg - INFO - Iter [83050/160000] lr: 2.886e-05, eta: 4:24:54, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2666, decode.acc_seg: 89.7871, aux.loss_ce: 0.1886, aux.acc_seg: 81.9684, loss: 0.4552, grad_norm: 4.6783 2023-02-11 22:58:37,425 - mmseg - INFO - Iter [83100/160000] lr: 2.884e-05, eta: 4:24:43, time: 0.193, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2437, decode.acc_seg: 90.5600, aux.loss_ce: 0.1820, aux.acc_seg: 82.8458, loss: 0.4257, grad_norm: 4.6156 2023-02-11 22:58:47,288 - mmseg - INFO - Iter [83150/160000] lr: 2.882e-05, eta: 4:24:32, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2770, decode.acc_seg: 89.3021, aux.loss_ce: 0.1925, aux.acc_seg: 81.2826, loss: 0.4695, grad_norm: 5.4541 2023-02-11 22:58:57,318 - mmseg - INFO - Iter [83200/160000] lr: 2.880e-05, eta: 4:24:21, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2655, decode.acc_seg: 89.5575, aux.loss_ce: 0.1878, aux.acc_seg: 81.6186, loss: 0.4533, grad_norm: 5.0703 2023-02-11 22:59:07,273 - mmseg - INFO - Iter [83250/160000] lr: 2.878e-05, eta: 4:24:11, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2624, decode.acc_seg: 89.5602, aux.loss_ce: 0.1804, aux.acc_seg: 82.1979, loss: 0.4428, grad_norm: 4.3964 2023-02-11 22:59:17,200 - mmseg - INFO - Iter [83300/160000] lr: 2.876e-05, eta: 4:24:00, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2669, decode.acc_seg: 89.4343, aux.loss_ce: 0.1844, aux.acc_seg: 82.0344, loss: 0.4513, grad_norm: 5.5651 2023-02-11 22:59:27,358 - mmseg - INFO - Iter [83350/160000] lr: 2.874e-05, eta: 4:23:49, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2459, decode.acc_seg: 90.2063, aux.loss_ce: 0.1789, aux.acc_seg: 82.6277, loss: 0.4248, grad_norm: 4.2346 2023-02-11 22:59:37,496 - mmseg - INFO - Iter [83400/160000] lr: 2.873e-05, eta: 4:23:39, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2607, decode.acc_seg: 89.5939, aux.loss_ce: 0.1832, aux.acc_seg: 81.8479, loss: 0.4440, grad_norm: 5.1169 2023-02-11 22:59:47,816 - mmseg - INFO - Iter [83450/160000] lr: 2.871e-05, eta: 4:23:29, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2692, decode.acc_seg: 89.5550, aux.loss_ce: 0.1889, aux.acc_seg: 81.8343, loss: 0.4581, grad_norm: 4.7788 2023-02-11 22:59:57,636 - mmseg - INFO - Iter [83500/160000] lr: 2.869e-05, eta: 4:23:18, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2481, decode.acc_seg: 90.2929, aux.loss_ce: 0.1774, aux.acc_seg: 82.4798, loss: 0.4255, grad_norm: 4.1352 2023-02-11 23:00:08,002 - mmseg - INFO - Iter [83550/160000] lr: 2.867e-05, eta: 4:23:07, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2602, decode.acc_seg: 89.7922, aux.loss_ce: 0.1822, aux.acc_seg: 82.4675, loss: 0.4424, grad_norm: 4.5159 2023-02-11 23:00:18,022 - mmseg - INFO - Iter [83600/160000] lr: 2.865e-05, eta: 4:22:57, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2657, decode.acc_seg: 89.7961, aux.loss_ce: 0.1859, aux.acc_seg: 82.3061, loss: 0.4516, grad_norm: 4.1843 2023-02-11 23:00:28,065 - mmseg - INFO - Iter [83650/160000] lr: 2.863e-05, eta: 4:22:46, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2681, decode.acc_seg: 89.4956, aux.loss_ce: 0.1903, aux.acc_seg: 81.5887, loss: 0.4584, grad_norm: 4.4661 2023-02-11 23:00:38,126 - mmseg - INFO - Iter [83700/160000] lr: 2.861e-05, eta: 4:22:35, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2715, decode.acc_seg: 89.5237, aux.loss_ce: 0.1927, aux.acc_seg: 81.6661, loss: 0.4642, grad_norm: 6.1109 2023-02-11 23:00:48,320 - mmseg - INFO - Iter [83750/160000] lr: 2.859e-05, eta: 4:22:25, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2668, decode.acc_seg: 89.5229, aux.loss_ce: 0.1897, aux.acc_seg: 81.4774, loss: 0.4565, grad_norm: 4.8953 2023-02-11 23:00:58,388 - mmseg - INFO - Iter [83800/160000] lr: 2.858e-05, eta: 4:22:14, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2718, decode.acc_seg: 89.6452, aux.loss_ce: 0.1966, aux.acc_seg: 81.7956, loss: 0.4684, grad_norm: 5.1800 2023-02-11 23:01:08,463 - mmseg - INFO - Iter [83850/160000] lr: 2.856e-05, eta: 4:22:04, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2529, decode.acc_seg: 89.7351, aux.loss_ce: 0.1816, aux.acc_seg: 81.7779, loss: 0.4345, grad_norm: 5.2058 2023-02-11 23:01:18,755 - mmseg - INFO - Iter [83900/160000] lr: 2.854e-05, eta: 4:21:53, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2700, decode.acc_seg: 89.6234, aux.loss_ce: 0.1904, aux.acc_seg: 82.1165, loss: 0.4604, grad_norm: 4.6185 2023-02-11 23:01:28,730 - mmseg - INFO - Iter [83950/160000] lr: 2.852e-05, eta: 4:21:43, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2620, decode.acc_seg: 90.0298, aux.loss_ce: 0.1863, aux.acc_seg: 82.1780, loss: 0.4484, grad_norm: 5.1224 2023-02-11 23:01:38,953 - mmseg - INFO - Saving checkpoint at 84000 iterations 2023-02-11 23:01:39,626 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 23:01:39,626 - mmseg - INFO - Iter [84000/160000] lr: 2.850e-05, eta: 4:21:33, time: 0.218, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2559, decode.acc_seg: 89.9854, aux.loss_ce: 0.1768, aux.acc_seg: 83.0586, loss: 0.4327, grad_norm: 4.4139 2023-02-11 23:01:49,431 - mmseg - INFO - Iter [84050/160000] lr: 2.848e-05, eta: 4:21:22, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2545, decode.acc_seg: 90.1079, aux.loss_ce: 0.1814, aux.acc_seg: 82.4792, loss: 0.4359, grad_norm: 4.4622 2023-02-11 23:01:59,308 - mmseg - INFO - Iter [84100/160000] lr: 2.846e-05, eta: 4:21:11, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2665, decode.acc_seg: 89.8723, aux.loss_ce: 0.1914, aux.acc_seg: 82.0065, loss: 0.4579, grad_norm: 5.0742 2023-02-11 23:02:09,738 - mmseg - INFO - Iter [84150/160000] lr: 2.844e-05, eta: 4:21:01, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2723, decode.acc_seg: 89.1081, aux.loss_ce: 0.1920, aux.acc_seg: 81.1411, loss: 0.4643, grad_norm: 6.0472 2023-02-11 23:02:19,428 - mmseg - INFO - Iter [84200/160000] lr: 2.843e-05, eta: 4:20:50, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2677, decode.acc_seg: 89.4273, aux.loss_ce: 0.1917, aux.acc_seg: 81.4668, loss: 0.4594, grad_norm: 5.1870 2023-02-11 23:02:31,282 - mmseg - INFO - Iter [84250/160000] lr: 2.841e-05, eta: 4:20:41, time: 0.237, data_time: 0.046, memory: 7748, decode.loss_ce: 0.2733, decode.acc_seg: 89.1676, aux.loss_ce: 0.1977, aux.acc_seg: 81.4366, loss: 0.4710, grad_norm: 5.4228 2023-02-11 23:02:41,166 - mmseg - INFO - Iter [84300/160000] lr: 2.839e-05, eta: 4:20:31, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2661, decode.acc_seg: 89.4931, aux.loss_ce: 0.1934, aux.acc_seg: 81.5783, loss: 0.4595, grad_norm: 4.5690 2023-02-11 23:02:51,197 - mmseg - INFO - Iter [84350/160000] lr: 2.837e-05, eta: 4:20:20, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2721, decode.acc_seg: 89.5061, aux.loss_ce: 0.1899, aux.acc_seg: 82.0803, loss: 0.4620, grad_norm: 5.5015 2023-02-11 23:03:01,348 - mmseg - INFO - Iter [84400/160000] lr: 2.835e-05, eta: 4:20:09, time: 0.203, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2601, decode.acc_seg: 89.7135, aux.loss_ce: 0.1848, aux.acc_seg: 82.3238, loss: 0.4450, grad_norm: 4.9014 2023-02-11 23:03:11,840 - mmseg - INFO - Iter [84450/160000] lr: 2.833e-05, eta: 4:19:59, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2664, decode.acc_seg: 89.7181, aux.loss_ce: 0.1933, aux.acc_seg: 81.5782, loss: 0.4597, grad_norm: 5.5592 2023-02-11 23:03:21,895 - mmseg - INFO - Iter [84500/160000] lr: 2.831e-05, eta: 4:19:49, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2715, decode.acc_seg: 89.2890, aux.loss_ce: 0.1942, aux.acc_seg: 81.1889, loss: 0.4657, grad_norm: 5.4894 2023-02-11 23:03:31,768 - mmseg - INFO - Iter [84550/160000] lr: 2.829e-05, eta: 4:19:38, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2532, decode.acc_seg: 89.5591, aux.loss_ce: 0.1878, aux.acc_seg: 81.5490, loss: 0.4410, grad_norm: 5.1168 2023-02-11 23:03:41,524 - mmseg - INFO - Iter [84600/160000] lr: 2.828e-05, eta: 4:19:27, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2683, decode.acc_seg: 89.2997, aux.loss_ce: 0.1900, aux.acc_seg: 81.3990, loss: 0.4583, grad_norm: 4.2987 2023-02-11 23:03:52,128 - mmseg - INFO - Iter [84650/160000] lr: 2.826e-05, eta: 4:19:17, time: 0.212, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2426, decode.acc_seg: 90.1240, aux.loss_ce: 0.1769, aux.acc_seg: 82.3774, loss: 0.4195, grad_norm: 4.1179 2023-02-11 23:04:02,009 - mmseg - INFO - Iter [84700/160000] lr: 2.824e-05, eta: 4:19:06, time: 0.198, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2581, decode.acc_seg: 90.0401, aux.loss_ce: 0.1875, aux.acc_seg: 82.0643, loss: 0.4456, grad_norm: 4.2070 2023-02-11 23:04:12,316 - mmseg - INFO - Iter [84750/160000] lr: 2.822e-05, eta: 4:18:56, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2524, decode.acc_seg: 90.3375, aux.loss_ce: 0.1766, aux.acc_seg: 83.3101, loss: 0.4290, grad_norm: 4.5853 2023-02-11 23:04:22,536 - mmseg - INFO - Iter [84800/160000] lr: 2.820e-05, eta: 4:18:45, time: 0.205, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2605, decode.acc_seg: 89.9619, aux.loss_ce: 0.1928, aux.acc_seg: 81.7712, loss: 0.4533, grad_norm: 5.1534 2023-02-11 23:04:32,970 - mmseg - INFO - Iter [84850/160000] lr: 2.818e-05, eta: 4:18:35, time: 0.209, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2653, decode.acc_seg: 89.5060, aux.loss_ce: 0.1903, aux.acc_seg: 81.7429, loss: 0.4556, grad_norm: 5.2512 2023-02-11 23:04:43,275 - mmseg - INFO - Iter [84900/160000] lr: 2.816e-05, eta: 4:18:25, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2659, decode.acc_seg: 89.4867, aux.loss_ce: 0.1833, aux.acc_seg: 82.3135, loss: 0.4492, grad_norm: 4.7321 2023-02-11 23:04:53,462 - mmseg - INFO - Iter [84950/160000] lr: 2.814e-05, eta: 4:18:14, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2712, decode.acc_seg: 89.0608, aux.loss_ce: 0.1905, aux.acc_seg: 81.6934, loss: 0.4617, grad_norm: 4.7713 2023-02-11 23:05:03,183 - mmseg - INFO - Saving checkpoint at 85000 iterations 2023-02-11 23:05:03,879 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 23:05:03,879 - mmseg - INFO - Iter [85000/160000] lr: 2.813e-05, eta: 4:18:04, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2711, decode.acc_seg: 89.2415, aux.loss_ce: 0.1832, aux.acc_seg: 82.0888, loss: 0.4543, grad_norm: 4.8003 2023-02-11 23:05:13,823 - mmseg - INFO - Iter [85050/160000] lr: 2.811e-05, eta: 4:17:53, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2566, decode.acc_seg: 89.9673, aux.loss_ce: 0.1880, aux.acc_seg: 82.1865, loss: 0.4446, grad_norm: 5.4330 2023-02-11 23:05:23,622 - mmseg - INFO - Iter [85100/160000] lr: 2.809e-05, eta: 4:17:42, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2746, decode.acc_seg: 89.4697, aux.loss_ce: 0.1947, aux.acc_seg: 81.1641, loss: 0.4692, grad_norm: 4.9388 2023-02-11 23:05:33,832 - mmseg - INFO - Iter [85150/160000] lr: 2.807e-05, eta: 4:17:32, time: 0.205, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2552, decode.acc_seg: 89.9263, aux.loss_ce: 0.1891, aux.acc_seg: 81.8600, loss: 0.4443, grad_norm: 5.4095 2023-02-11 23:05:44,064 - mmseg - INFO - Iter [85200/160000] lr: 2.805e-05, eta: 4:17:22, time: 0.205, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2459, decode.acc_seg: 90.6720, aux.loss_ce: 0.1851, aux.acc_seg: 82.3163, loss: 0.4311, grad_norm: 4.4699 2023-02-11 23:05:53,987 - mmseg - INFO - Iter [85250/160000] lr: 2.803e-05, eta: 4:17:11, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2745, decode.acc_seg: 89.3795, aux.loss_ce: 0.1931, aux.acc_seg: 81.0994, loss: 0.4676, grad_norm: 5.3891 2023-02-11 23:06:03,863 - mmseg - INFO - Iter [85300/160000] lr: 2.801e-05, eta: 4:17:00, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2723, decode.acc_seg: 89.5463, aux.loss_ce: 0.1930, aux.acc_seg: 81.5713, loss: 0.4653, grad_norm: 5.0555 2023-02-11 23:06:14,318 - mmseg - INFO - Iter [85350/160000] lr: 2.799e-05, eta: 4:16:50, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2724, decode.acc_seg: 89.4676, aux.loss_ce: 0.1880, aux.acc_seg: 82.1174, loss: 0.4605, grad_norm: 5.0886 2023-02-11 23:06:24,371 - mmseg - INFO - Iter [85400/160000] lr: 2.798e-05, eta: 4:16:39, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2501, decode.acc_seg: 90.2613, aux.loss_ce: 0.1834, aux.acc_seg: 82.4751, loss: 0.4335, grad_norm: 5.1250 2023-02-11 23:06:34,921 - mmseg - INFO - Iter [85450/160000] lr: 2.796e-05, eta: 4:16:29, time: 0.211, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2695, decode.acc_seg: 89.4986, aux.loss_ce: 0.1942, aux.acc_seg: 81.3008, loss: 0.4636, grad_norm: 4.6270 2023-02-11 23:06:47,280 - mmseg - INFO - Iter [85500/160000] lr: 2.794e-05, eta: 4:16:21, time: 0.247, data_time: 0.046, memory: 7748, decode.loss_ce: 0.2611, decode.acc_seg: 89.8614, aux.loss_ce: 0.1887, aux.acc_seg: 81.9320, loss: 0.4498, grad_norm: 4.3898 2023-02-11 23:06:57,434 - mmseg - INFO - Iter [85550/160000] lr: 2.792e-05, eta: 4:16:10, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2514, decode.acc_seg: 90.2973, aux.loss_ce: 0.1778, aux.acc_seg: 82.9150, loss: 0.4292, grad_norm: 4.2202 2023-02-11 23:07:07,928 - mmseg - INFO - Iter [85600/160000] lr: 2.790e-05, eta: 4:16:00, time: 0.210, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2556, decode.acc_seg: 90.0521, aux.loss_ce: 0.1843, aux.acc_seg: 82.4585, loss: 0.4400, grad_norm: 4.2447 2023-02-11 23:07:17,822 - mmseg - INFO - Iter [85650/160000] lr: 2.788e-05, eta: 4:15:50, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2603, decode.acc_seg: 90.1157, aux.loss_ce: 0.1850, aux.acc_seg: 82.5272, loss: 0.4452, grad_norm: 5.6444 2023-02-11 23:07:27,950 - mmseg - INFO - Iter [85700/160000] lr: 2.786e-05, eta: 4:15:39, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2624, decode.acc_seg: 89.9575, aux.loss_ce: 0.1877, aux.acc_seg: 81.9493, loss: 0.4501, grad_norm: 4.9973 2023-02-11 23:07:37,777 - mmseg - INFO - Iter [85750/160000] lr: 2.784e-05, eta: 4:15:28, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2381, decode.acc_seg: 90.8398, aux.loss_ce: 0.1815, aux.acc_seg: 82.8018, loss: 0.4196, grad_norm: 4.3295 2023-02-11 23:07:47,546 - mmseg - INFO - Iter [85800/160000] lr: 2.783e-05, eta: 4:15:17, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2558, decode.acc_seg: 90.0500, aux.loss_ce: 0.1831, aux.acc_seg: 82.4627, loss: 0.4389, grad_norm: 4.7666 2023-02-11 23:07:57,998 - mmseg - INFO - Iter [85850/160000] lr: 2.781e-05, eta: 4:15:07, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2751, decode.acc_seg: 89.2870, aux.loss_ce: 0.1887, aux.acc_seg: 81.9933, loss: 0.4639, grad_norm: 4.9267 2023-02-11 23:08:08,538 - mmseg - INFO - Iter [85900/160000] lr: 2.779e-05, eta: 4:14:57, time: 0.211, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2520, decode.acc_seg: 89.8831, aux.loss_ce: 0.1886, aux.acc_seg: 81.8172, loss: 0.4405, grad_norm: 4.8058 2023-02-11 23:08:18,402 - mmseg - INFO - Iter [85950/160000] lr: 2.777e-05, eta: 4:14:46, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2583, decode.acc_seg: 89.8780, aux.loss_ce: 0.1863, aux.acc_seg: 82.2608, loss: 0.4446, grad_norm: 6.9700 2023-02-11 23:08:28,647 - mmseg - INFO - Saving checkpoint at 86000 iterations 2023-02-11 23:08:29,337 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 23:08:29,337 - mmseg - INFO - Iter [86000/160000] lr: 2.775e-05, eta: 4:14:37, time: 0.219, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2678, decode.acc_seg: 89.4215, aux.loss_ce: 0.1888, aux.acc_seg: 81.8981, loss: 0.4566, grad_norm: 5.2703 2023-02-11 23:08:39,263 - mmseg - INFO - Iter [86050/160000] lr: 2.773e-05, eta: 4:14:26, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2617, decode.acc_seg: 89.8208, aux.loss_ce: 0.1981, aux.acc_seg: 81.2027, loss: 0.4599, grad_norm: 5.3653 2023-02-11 23:08:49,145 - mmseg - INFO - Iter [86100/160000] lr: 2.771e-05, eta: 4:14:15, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2632, decode.acc_seg: 89.4891, aux.loss_ce: 0.1901, aux.acc_seg: 81.5405, loss: 0.4534, grad_norm: 5.6948 2023-02-11 23:08:59,575 - mmseg - INFO - Iter [86150/160000] lr: 2.769e-05, eta: 4:14:05, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2659, decode.acc_seg: 89.6952, aux.loss_ce: 0.1896, aux.acc_seg: 82.0284, loss: 0.4555, grad_norm: 5.5290 2023-02-11 23:09:09,648 - mmseg - INFO - Iter [86200/160000] lr: 2.768e-05, eta: 4:13:54, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2559, decode.acc_seg: 90.1024, aux.loss_ce: 0.1791, aux.acc_seg: 82.8739, loss: 0.4350, grad_norm: 4.0313 2023-02-11 23:09:19,492 - mmseg - INFO - Iter [86250/160000] lr: 2.766e-05, eta: 4:13:43, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2769, decode.acc_seg: 89.1260, aux.loss_ce: 0.1878, aux.acc_seg: 82.1551, loss: 0.4647, grad_norm: 4.7416 2023-02-11 23:09:29,835 - mmseg - INFO - Iter [86300/160000] lr: 2.764e-05, eta: 4:13:33, time: 0.207, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2592, decode.acc_seg: 89.8909, aux.loss_ce: 0.1933, aux.acc_seg: 81.7616, loss: 0.4525, grad_norm: 4.5270 2023-02-11 23:09:39,534 - mmseg - INFO - Iter [86350/160000] lr: 2.762e-05, eta: 4:13:22, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2563, decode.acc_seg: 89.5532, aux.loss_ce: 0.1855, aux.acc_seg: 81.6700, loss: 0.4418, grad_norm: 4.5144 2023-02-11 23:09:49,864 - mmseg - INFO - Iter [86400/160000] lr: 2.760e-05, eta: 4:13:12, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2671, decode.acc_seg: 89.6194, aux.loss_ce: 0.1893, aux.acc_seg: 81.8116, loss: 0.4565, grad_norm: 4.2499 2023-02-11 23:10:00,107 - mmseg - INFO - Iter [86450/160000] lr: 2.758e-05, eta: 4:13:01, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2570, decode.acc_seg: 89.8316, aux.loss_ce: 0.1777, aux.acc_seg: 82.4012, loss: 0.4347, grad_norm: 5.2569 2023-02-11 23:10:10,074 - mmseg - INFO - Iter [86500/160000] lr: 2.756e-05, eta: 4:12:51, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2698, decode.acc_seg: 89.4614, aux.loss_ce: 0.1907, aux.acc_seg: 81.9060, loss: 0.4605, grad_norm: 4.5455 2023-02-11 23:10:20,119 - mmseg - INFO - Iter [86550/160000] lr: 2.754e-05, eta: 4:12:40, time: 0.201, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2546, decode.acc_seg: 89.8406, aux.loss_ce: 0.1850, aux.acc_seg: 81.8350, loss: 0.4397, grad_norm: 4.0108 2023-02-11 23:10:29,964 - mmseg - INFO - Iter [86600/160000] lr: 2.753e-05, eta: 4:12:29, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2501, decode.acc_seg: 90.1964, aux.loss_ce: 0.1815, aux.acc_seg: 82.5213, loss: 0.4316, grad_norm: 4.7383 2023-02-11 23:10:39,792 - mmseg - INFO - Iter [86650/160000] lr: 2.751e-05, eta: 4:12:18, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2720, decode.acc_seg: 89.5658, aux.loss_ce: 0.1974, aux.acc_seg: 80.8212, loss: 0.4694, grad_norm: 4.9956 2023-02-11 23:10:50,325 - mmseg - INFO - Iter [86700/160000] lr: 2.749e-05, eta: 4:12:08, time: 0.211, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2968, decode.acc_seg: 88.6697, aux.loss_ce: 0.2078, aux.acc_seg: 80.4891, loss: 0.5047, grad_norm: 6.4584 2023-02-11 23:11:02,548 - mmseg - INFO - Iter [86750/160000] lr: 2.747e-05, eta: 4:12:00, time: 0.244, data_time: 0.048, memory: 7748, decode.loss_ce: 0.2482, decode.acc_seg: 90.1459, aux.loss_ce: 0.1810, aux.acc_seg: 82.6123, loss: 0.4292, grad_norm: 5.1417 2023-02-11 23:11:12,695 - mmseg - INFO - Iter [86800/160000] lr: 2.745e-05, eta: 4:11:50, time: 0.203, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2514, decode.acc_seg: 89.8907, aux.loss_ce: 0.1787, aux.acc_seg: 82.4694, loss: 0.4301, grad_norm: 4.5197 2023-02-11 23:11:23,039 - mmseg - INFO - Iter [86850/160000] lr: 2.743e-05, eta: 4:11:39, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2627, decode.acc_seg: 89.5967, aux.loss_ce: 0.1873, aux.acc_seg: 81.8071, loss: 0.4500, grad_norm: 4.3915 2023-02-11 23:11:32,803 - mmseg - INFO - Iter [86900/160000] lr: 2.741e-05, eta: 4:11:28, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2514, decode.acc_seg: 90.1765, aux.loss_ce: 0.1734, aux.acc_seg: 83.2106, loss: 0.4248, grad_norm: 4.9333 2023-02-11 23:11:42,604 - mmseg - INFO - Iter [86950/160000] lr: 2.739e-05, eta: 4:11:17, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2611, decode.acc_seg: 89.8221, aux.loss_ce: 0.1871, aux.acc_seg: 81.9956, loss: 0.4482, grad_norm: 5.0169 2023-02-11 23:11:52,546 - mmseg - INFO - Saving checkpoint at 87000 iterations 2023-02-11 23:11:53,233 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 23:11:53,233 - mmseg - INFO - Iter [87000/160000] lr: 2.738e-05, eta: 4:11:07, time: 0.213, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2657, decode.acc_seg: 89.6668, aux.loss_ce: 0.1858, aux.acc_seg: 81.9475, loss: 0.4515, grad_norm: 4.9252 2023-02-11 23:12:03,278 - mmseg - INFO - Iter [87050/160000] lr: 2.736e-05, eta: 4:10:57, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2502, decode.acc_seg: 89.9958, aux.loss_ce: 0.1768, aux.acc_seg: 82.9456, loss: 0.4270, grad_norm: 3.7840 2023-02-11 23:12:13,166 - mmseg - INFO - Iter [87100/160000] lr: 2.734e-05, eta: 4:10:46, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2536, decode.acc_seg: 89.9858, aux.loss_ce: 0.1776, aux.acc_seg: 82.3689, loss: 0.4312, grad_norm: 4.7224 2023-02-11 23:12:23,191 - mmseg - INFO - Iter [87150/160000] lr: 2.732e-05, eta: 4:10:35, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2378, decode.acc_seg: 90.7499, aux.loss_ce: 0.1718, aux.acc_seg: 83.0867, loss: 0.4096, grad_norm: 3.8275 2023-02-11 23:12:33,182 - mmseg - INFO - Iter [87200/160000] lr: 2.730e-05, eta: 4:10:25, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2618, decode.acc_seg: 89.8858, aux.loss_ce: 0.1833, aux.acc_seg: 82.3634, loss: 0.4451, grad_norm: 4.2200 2023-02-11 23:12:43,088 - mmseg - INFO - Iter [87250/160000] lr: 2.728e-05, eta: 4:10:14, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2707, decode.acc_seg: 89.6230, aux.loss_ce: 0.2002, aux.acc_seg: 81.1599, loss: 0.4709, grad_norm: 5.0647 2023-02-11 23:12:53,031 - mmseg - INFO - Iter [87300/160000] lr: 2.726e-05, eta: 4:10:03, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2552, decode.acc_seg: 89.8351, aux.loss_ce: 0.1818, aux.acc_seg: 81.8590, loss: 0.4370, grad_norm: 4.5080 2023-02-11 23:13:03,305 - mmseg - INFO - Iter [87350/160000] lr: 2.724e-05, eta: 4:09:53, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2612, decode.acc_seg: 89.7495, aux.loss_ce: 0.1853, aux.acc_seg: 82.2642, loss: 0.4465, grad_norm: 4.7992 2023-02-11 23:13:13,364 - mmseg - INFO - Iter [87400/160000] lr: 2.723e-05, eta: 4:09:42, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2659, decode.acc_seg: 89.6746, aux.loss_ce: 0.1923, aux.acc_seg: 81.7655, loss: 0.4581, grad_norm: 4.2660 2023-02-11 23:13:23,379 - mmseg - INFO - Iter [87450/160000] lr: 2.721e-05, eta: 4:09:32, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2580, decode.acc_seg: 89.9980, aux.loss_ce: 0.1864, aux.acc_seg: 82.3035, loss: 0.4444, grad_norm: 4.5835 2023-02-11 23:13:33,379 - mmseg - INFO - Iter [87500/160000] lr: 2.719e-05, eta: 4:09:21, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2767, decode.acc_seg: 89.5356, aux.loss_ce: 0.1966, aux.acc_seg: 81.3118, loss: 0.4733, grad_norm: 5.3683 2023-02-11 23:13:43,324 - mmseg - INFO - Iter [87550/160000] lr: 2.717e-05, eta: 4:09:10, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2459, decode.acc_seg: 90.0136, aux.loss_ce: 0.1738, aux.acc_seg: 82.6705, loss: 0.4196, grad_norm: 4.0591 2023-02-11 23:13:53,507 - mmseg - INFO - Iter [87600/160000] lr: 2.715e-05, eta: 4:09:00, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2691, decode.acc_seg: 89.4754, aux.loss_ce: 0.1826, aux.acc_seg: 82.3403, loss: 0.4517, grad_norm: 5.3159 2023-02-11 23:14:03,322 - mmseg - INFO - Iter [87650/160000] lr: 2.713e-05, eta: 4:08:49, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2558, decode.acc_seg: 90.3471, aux.loss_ce: 0.1919, aux.acc_seg: 81.7385, loss: 0.4478, grad_norm: 4.6473 2023-02-11 23:14:13,129 - mmseg - INFO - Iter [87700/160000] lr: 2.711e-05, eta: 4:08:38, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2525, decode.acc_seg: 89.9445, aux.loss_ce: 0.1740, aux.acc_seg: 83.1740, loss: 0.4264, grad_norm: 4.4620 2023-02-11 23:14:22,961 - mmseg - INFO - Iter [87750/160000] lr: 2.709e-05, eta: 4:08:28, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2637, decode.acc_seg: 89.6649, aux.loss_ce: 0.1842, aux.acc_seg: 82.0277, loss: 0.4479, grad_norm: 5.9058 2023-02-11 23:14:33,385 - mmseg - INFO - Iter [87800/160000] lr: 2.708e-05, eta: 4:08:17, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2636, decode.acc_seg: 89.7845, aux.loss_ce: 0.1882, aux.acc_seg: 82.1228, loss: 0.4518, grad_norm: 4.8438 2023-02-11 23:14:43,335 - mmseg - INFO - Iter [87850/160000] lr: 2.706e-05, eta: 4:08:07, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2408, decode.acc_seg: 90.6283, aux.loss_ce: 0.1763, aux.acc_seg: 83.2744, loss: 0.4171, grad_norm: 4.4803 2023-02-11 23:14:53,208 - mmseg - INFO - Iter [87900/160000] lr: 2.704e-05, eta: 4:07:56, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2567, decode.acc_seg: 89.6851, aux.loss_ce: 0.1816, aux.acc_seg: 81.8080, loss: 0.4383, grad_norm: 4.5702 2023-02-11 23:15:02,996 - mmseg - INFO - Iter [87950/160000] lr: 2.702e-05, eta: 4:07:45, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2485, decode.acc_seg: 90.2423, aux.loss_ce: 0.1806, aux.acc_seg: 82.4778, loss: 0.4292, grad_norm: 4.4909 2023-02-11 23:15:15,485 - mmseg - INFO - Saving checkpoint at 88000 iterations 2023-02-11 23:15:16,157 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 23:15:16,157 - mmseg - INFO - Iter [88000/160000] lr: 2.700e-05, eta: 4:07:38, time: 0.264, data_time: 0.048, memory: 7748, decode.loss_ce: 0.2733, decode.acc_seg: 89.4809, aux.loss_ce: 0.1970, aux.acc_seg: 81.0033, loss: 0.4703, grad_norm: 4.7775 2023-02-11 23:15:26,332 - mmseg - INFO - Iter [88050/160000] lr: 2.698e-05, eta: 4:07:27, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2507, decode.acc_seg: 90.2217, aux.loss_ce: 0.1821, aux.acc_seg: 82.4812, loss: 0.4328, grad_norm: 4.1908 2023-02-11 23:15:36,178 - mmseg - INFO - Iter [88100/160000] lr: 2.696e-05, eta: 4:07:16, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2563, decode.acc_seg: 90.0371, aux.loss_ce: 0.1793, aux.acc_seg: 82.6840, loss: 0.4357, grad_norm: 4.7555 2023-02-11 23:15:46,106 - mmseg - INFO - Iter [88150/160000] lr: 2.694e-05, eta: 4:07:06, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2715, decode.acc_seg: 89.5419, aux.loss_ce: 0.1939, aux.acc_seg: 81.6386, loss: 0.4654, grad_norm: 5.0238 2023-02-11 23:15:56,282 - mmseg - INFO - Iter [88200/160000] lr: 2.693e-05, eta: 4:06:55, time: 0.203, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2476, decode.acc_seg: 90.3861, aux.loss_ce: 0.1811, aux.acc_seg: 82.6408, loss: 0.4286, grad_norm: 4.3148 2023-02-11 23:16:06,284 - mmseg - INFO - Iter [88250/160000] lr: 2.691e-05, eta: 4:06:45, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2536, decode.acc_seg: 90.1636, aux.loss_ce: 0.1770, aux.acc_seg: 82.9143, loss: 0.4306, grad_norm: 5.5773 2023-02-11 23:16:16,282 - mmseg - INFO - Iter [88300/160000] lr: 2.689e-05, eta: 4:06:34, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2665, decode.acc_seg: 89.4910, aux.loss_ce: 0.1924, aux.acc_seg: 81.0552, loss: 0.4589, grad_norm: 5.1436 2023-02-11 23:16:26,090 - mmseg - INFO - Iter [88350/160000] lr: 2.687e-05, eta: 4:06:23, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2540, decode.acc_seg: 90.0066, aux.loss_ce: 0.1894, aux.acc_seg: 81.9462, loss: 0.4434, grad_norm: 4.5016 2023-02-11 23:16:35,896 - mmseg - INFO - Iter [88400/160000] lr: 2.685e-05, eta: 4:06:12, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2576, decode.acc_seg: 89.8629, aux.loss_ce: 0.1779, aux.acc_seg: 82.7003, loss: 0.4355, grad_norm: 4.6895 2023-02-11 23:16:45,949 - mmseg - INFO - Iter [88450/160000] lr: 2.683e-05, eta: 4:06:02, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2677, decode.acc_seg: 89.3987, aux.loss_ce: 0.1915, aux.acc_seg: 81.3693, loss: 0.4592, grad_norm: 6.9853 2023-02-11 23:16:56,912 - mmseg - INFO - Iter [88500/160000] lr: 2.681e-05, eta: 4:05:52, time: 0.219, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2744, decode.acc_seg: 89.5369, aux.loss_ce: 0.1980, aux.acc_seg: 81.3570, loss: 0.4724, grad_norm: 5.1478 2023-02-11 23:17:07,175 - mmseg - INFO - Iter [88550/160000] lr: 2.679e-05, eta: 4:05:42, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2523, decode.acc_seg: 90.0313, aux.loss_ce: 0.1818, aux.acc_seg: 82.6492, loss: 0.4341, grad_norm: 4.6332 2023-02-11 23:17:17,789 - mmseg - INFO - Iter [88600/160000] lr: 2.678e-05, eta: 4:05:32, time: 0.212, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2601, decode.acc_seg: 89.8912, aux.loss_ce: 0.1789, aux.acc_seg: 82.6955, loss: 0.4390, grad_norm: 4.4723 2023-02-11 23:17:27,538 - mmseg - INFO - Iter [88650/160000] lr: 2.676e-05, eta: 4:05:21, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2473, decode.acc_seg: 90.3608, aux.loss_ce: 0.1823, aux.acc_seg: 82.2635, loss: 0.4297, grad_norm: 4.8750 2023-02-11 23:17:37,787 - mmseg - INFO - Iter [88700/160000] lr: 2.674e-05, eta: 4:05:10, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2710, decode.acc_seg: 89.3215, aux.loss_ce: 0.1883, aux.acc_seg: 81.5072, loss: 0.4594, grad_norm: 5.0562 2023-02-11 23:17:47,617 - mmseg - INFO - Iter [88750/160000] lr: 2.672e-05, eta: 4:05:00, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2454, decode.acc_seg: 89.9253, aux.loss_ce: 0.1727, aux.acc_seg: 83.1351, loss: 0.4180, grad_norm: 5.1330 2023-02-11 23:17:57,810 - mmseg - INFO - Iter [88800/160000] lr: 2.670e-05, eta: 4:04:49, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2646, decode.acc_seg: 89.6167, aux.loss_ce: 0.1868, aux.acc_seg: 82.0038, loss: 0.4514, grad_norm: 4.6760 2023-02-11 23:18:07,972 - mmseg - INFO - Iter [88850/160000] lr: 2.668e-05, eta: 4:04:39, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2557, decode.acc_seg: 89.8982, aux.loss_ce: 0.1840, aux.acc_seg: 82.2118, loss: 0.4397, grad_norm: 4.7767 2023-02-11 23:18:17,697 - mmseg - INFO - Iter [88900/160000] lr: 2.666e-05, eta: 4:04:28, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2593, decode.acc_seg: 89.4273, aux.loss_ce: 0.1850, aux.acc_seg: 81.6489, loss: 0.4443, grad_norm: 4.8486 2023-02-11 23:18:27,877 - mmseg - INFO - Iter [88950/160000] lr: 2.664e-05, eta: 4:04:17, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2514, decode.acc_seg: 89.8483, aux.loss_ce: 0.1856, aux.acc_seg: 81.7883, loss: 0.4369, grad_norm: 5.2291 2023-02-11 23:18:37,833 - mmseg - INFO - Saving checkpoint at 89000 iterations 2023-02-11 23:18:38,507 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 23:18:38,507 - mmseg - INFO - Iter [89000/160000] lr: 2.663e-05, eta: 4:04:07, time: 0.213, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2591, decode.acc_seg: 90.1068, aux.loss_ce: 0.1869, aux.acc_seg: 82.3945, loss: 0.4460, grad_norm: 4.4835 2023-02-11 23:18:49,042 - mmseg - INFO - Iter [89050/160000] lr: 2.661e-05, eta: 4:03:57, time: 0.211, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2509, decode.acc_seg: 90.2276, aux.loss_ce: 0.1851, aux.acc_seg: 82.0503, loss: 0.4360, grad_norm: 4.6542 2023-02-11 23:18:59,483 - mmseg - INFO - Iter [89100/160000] lr: 2.659e-05, eta: 4:03:47, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2677, decode.acc_seg: 89.9812, aux.loss_ce: 0.1848, aux.acc_seg: 82.5872, loss: 0.4525, grad_norm: 5.0209 2023-02-11 23:19:09,897 - mmseg - INFO - Iter [89150/160000] lr: 2.657e-05, eta: 4:03:37, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2399, decode.acc_seg: 90.5409, aux.loss_ce: 0.1738, aux.acc_seg: 83.1703, loss: 0.4136, grad_norm: 4.7704 2023-02-11 23:19:19,553 - mmseg - INFO - Iter [89200/160000] lr: 2.655e-05, eta: 4:03:26, time: 0.193, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2417, decode.acc_seg: 90.3985, aux.loss_ce: 0.1811, aux.acc_seg: 82.4397, loss: 0.4228, grad_norm: 5.1492 2023-02-11 23:19:29,408 - mmseg - INFO - Iter [89250/160000] lr: 2.653e-05, eta: 4:03:15, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2614, decode.acc_seg: 89.8377, aux.loss_ce: 0.1862, aux.acc_seg: 82.1094, loss: 0.4476, grad_norm: 4.5706 2023-02-11 23:19:41,810 - mmseg - INFO - Iter [89300/160000] lr: 2.651e-05, eta: 4:03:07, time: 0.248, data_time: 0.048, memory: 7748, decode.loss_ce: 0.2541, decode.acc_seg: 90.2337, aux.loss_ce: 0.1824, aux.acc_seg: 82.6532, loss: 0.4365, grad_norm: 4.7870 2023-02-11 23:19:51,987 - mmseg - INFO - Iter [89350/160000] lr: 2.649e-05, eta: 4:02:56, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2541, decode.acc_seg: 89.7912, aux.loss_ce: 0.1792, aux.acc_seg: 82.3825, loss: 0.4333, grad_norm: 4.7089 2023-02-11 23:20:02,495 - mmseg - INFO - Iter [89400/160000] lr: 2.648e-05, eta: 4:02:46, time: 0.211, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2443, decode.acc_seg: 90.5333, aux.loss_ce: 0.1827, aux.acc_seg: 82.4009, loss: 0.4270, grad_norm: 4.2899 2023-02-11 23:20:12,553 - mmseg - INFO - Iter [89450/160000] lr: 2.646e-05, eta: 4:02:36, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2659, decode.acc_seg: 89.7812, aux.loss_ce: 0.1920, aux.acc_seg: 81.8722, loss: 0.4579, grad_norm: 4.7656 2023-02-11 23:20:23,003 - mmseg - INFO - Iter [89500/160000] lr: 2.644e-05, eta: 4:02:25, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2443, decode.acc_seg: 90.2463, aux.loss_ce: 0.1744, aux.acc_seg: 83.0376, loss: 0.4187, grad_norm: 4.5044 2023-02-11 23:20:33,133 - mmseg - INFO - Iter [89550/160000] lr: 2.642e-05, eta: 4:02:15, time: 0.203, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2599, decode.acc_seg: 90.0514, aux.loss_ce: 0.1889, aux.acc_seg: 82.0520, loss: 0.4488, grad_norm: 4.7348 2023-02-11 23:20:43,164 - mmseg - INFO - Iter [89600/160000] lr: 2.640e-05, eta: 4:02:04, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2340, decode.acc_seg: 90.9770, aux.loss_ce: 0.1645, aux.acc_seg: 83.9260, loss: 0.3984, grad_norm: 3.8974 2023-02-11 23:20:52,926 - mmseg - INFO - Iter [89650/160000] lr: 2.638e-05, eta: 4:01:53, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2489, decode.acc_seg: 90.2638, aux.loss_ce: 0.1850, aux.acc_seg: 81.9637, loss: 0.4339, grad_norm: 4.7881 2023-02-11 23:21:02,816 - mmseg - INFO - Iter [89700/160000] lr: 2.636e-05, eta: 4:01:43, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2488, decode.acc_seg: 90.1649, aux.loss_ce: 0.1777, aux.acc_seg: 82.9059, loss: 0.4265, grad_norm: 4.5019 2023-02-11 23:21:12,712 - mmseg - INFO - Iter [89750/160000] lr: 2.634e-05, eta: 4:01:32, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2640, decode.acc_seg: 89.6668, aux.loss_ce: 0.1925, aux.acc_seg: 81.0718, loss: 0.4565, grad_norm: 4.5282 2023-02-11 23:21:23,437 - mmseg - INFO - Iter [89800/160000] lr: 2.633e-05, eta: 4:01:22, time: 0.215, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2783, decode.acc_seg: 89.6192, aux.loss_ce: 0.1898, aux.acc_seg: 81.8749, loss: 0.4682, grad_norm: 5.6435 2023-02-11 23:21:33,586 - mmseg - INFO - Iter [89850/160000] lr: 2.631e-05, eta: 4:01:12, time: 0.203, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2578, decode.acc_seg: 89.8485, aux.loss_ce: 0.1885, aux.acc_seg: 81.5208, loss: 0.4463, grad_norm: 4.5655 2023-02-11 23:21:43,601 - mmseg - INFO - Iter [89900/160000] lr: 2.629e-05, eta: 4:01:01, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2556, decode.acc_seg: 90.0542, aux.loss_ce: 0.1909, aux.acc_seg: 81.1360, loss: 0.4464, grad_norm: 5.2274 2023-02-11 23:21:53,362 - mmseg - INFO - Iter [89950/160000] lr: 2.627e-05, eta: 4:00:50, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2600, decode.acc_seg: 89.9060, aux.loss_ce: 0.1894, aux.acc_seg: 81.4340, loss: 0.4494, grad_norm: 5.2307 2023-02-11 23:22:03,551 - mmseg - INFO - Saving checkpoint at 90000 iterations 2023-02-11 23:22:04,283 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 23:22:04,284 - mmseg - INFO - Iter [90000/160000] lr: 2.625e-05, eta: 4:00:40, time: 0.218, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2759, decode.acc_seg: 89.6630, aux.loss_ce: 0.2030, aux.acc_seg: 81.2545, loss: 0.4789, grad_norm: 4.5223 2023-02-11 23:22:14,067 - mmseg - INFO - Iter [90050/160000] lr: 2.623e-05, eta: 4:00:30, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2658, decode.acc_seg: 89.5052, aux.loss_ce: 0.1912, aux.acc_seg: 81.7397, loss: 0.4569, grad_norm: 4.5246 2023-02-11 23:22:24,281 - mmseg - INFO - Iter [90100/160000] lr: 2.621e-05, eta: 4:00:19, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2342, decode.acc_seg: 90.8445, aux.loss_ce: 0.1785, aux.acc_seg: 83.0958, loss: 0.4127, grad_norm: 4.5512 2023-02-11 23:22:34,814 - mmseg - INFO - Iter [90150/160000] lr: 2.619e-05, eta: 4:00:09, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2331, decode.acc_seg: 91.0277, aux.loss_ce: 0.1735, aux.acc_seg: 83.5036, loss: 0.4067, grad_norm: 5.3325 2023-02-11 23:22:44,501 - mmseg - INFO - Iter [90200/160000] lr: 2.618e-05, eta: 3:59:58, time: 0.194, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2587, decode.acc_seg: 89.7941, aux.loss_ce: 0.1819, aux.acc_seg: 82.2852, loss: 0.4406, grad_norm: 4.9180 2023-02-11 23:22:55,023 - mmseg - INFO - Iter [90250/160000] lr: 2.616e-05, eta: 3:59:48, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2557, decode.acc_seg: 89.9654, aux.loss_ce: 0.1866, aux.acc_seg: 82.0768, loss: 0.4422, grad_norm: 5.4025 2023-02-11 23:23:05,223 - mmseg - INFO - Iter [90300/160000] lr: 2.614e-05, eta: 3:59:38, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2379, decode.acc_seg: 90.6646, aux.loss_ce: 0.1780, aux.acc_seg: 82.6700, loss: 0.4160, grad_norm: 4.0706 2023-02-11 23:23:15,491 - mmseg - INFO - Iter [90350/160000] lr: 2.612e-05, eta: 3:59:27, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2495, decode.acc_seg: 90.1172, aux.loss_ce: 0.1861, aux.acc_seg: 81.7289, loss: 0.4356, grad_norm: 5.1623 2023-02-11 23:23:25,577 - mmseg - INFO - Iter [90400/160000] lr: 2.610e-05, eta: 3:59:17, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2666, decode.acc_seg: 89.5812, aux.loss_ce: 0.1956, aux.acc_seg: 80.8406, loss: 0.4622, grad_norm: 5.1571 2023-02-11 23:23:35,942 - mmseg - INFO - Iter [90450/160000] lr: 2.608e-05, eta: 3:59:07, time: 0.207, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2432, decode.acc_seg: 90.4225, aux.loss_ce: 0.1801, aux.acc_seg: 82.4510, loss: 0.4233, grad_norm: 4.8673 2023-02-11 23:23:45,682 - mmseg - INFO - Iter [90500/160000] lr: 2.606e-05, eta: 3:58:56, time: 0.195, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2480, decode.acc_seg: 90.3418, aux.loss_ce: 0.1848, aux.acc_seg: 82.2305, loss: 0.4329, grad_norm: 5.1572 2023-02-11 23:23:57,792 - mmseg - INFO - Iter [90550/160000] lr: 2.604e-05, eta: 3:58:47, time: 0.242, data_time: 0.047, memory: 7748, decode.loss_ce: 0.2420, decode.acc_seg: 90.6814, aux.loss_ce: 0.1876, aux.acc_seg: 82.1536, loss: 0.4296, grad_norm: 6.0080 2023-02-11 23:24:07,537 - mmseg - INFO - Iter [90600/160000] lr: 2.603e-05, eta: 3:58:36, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2585, decode.acc_seg: 89.9719, aux.loss_ce: 0.1852, aux.acc_seg: 82.5311, loss: 0.4437, grad_norm: 4.5939 2023-02-11 23:24:17,595 - mmseg - INFO - Iter [90650/160000] lr: 2.601e-05, eta: 3:58:26, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2461, decode.acc_seg: 90.2329, aux.loss_ce: 0.1789, aux.acc_seg: 82.4721, loss: 0.4250, grad_norm: 6.4653 2023-02-11 23:24:27,731 - mmseg - INFO - Iter [90700/160000] lr: 2.599e-05, eta: 3:58:15, time: 0.203, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2623, decode.acc_seg: 89.9367, aux.loss_ce: 0.1871, aux.acc_seg: 82.1251, loss: 0.4495, grad_norm: 5.1032 2023-02-11 23:24:37,481 - mmseg - INFO - Iter [90750/160000] lr: 2.597e-05, eta: 3:58:04, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2557, decode.acc_seg: 90.0420, aux.loss_ce: 0.1793, aux.acc_seg: 82.7913, loss: 0.4350, grad_norm: 4.7015 2023-02-11 23:24:47,365 - mmseg - INFO - Iter [90800/160000] lr: 2.595e-05, eta: 3:57:54, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2576, decode.acc_seg: 90.0826, aux.loss_ce: 0.1935, aux.acc_seg: 81.5148, loss: 0.4510, grad_norm: 5.0613 2023-02-11 23:24:57,176 - mmseg - INFO - Iter [90850/160000] lr: 2.593e-05, eta: 3:57:43, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2556, decode.acc_seg: 89.6634, aux.loss_ce: 0.1783, aux.acc_seg: 82.8505, loss: 0.4339, grad_norm: 4.2933 2023-02-11 23:25:06,885 - mmseg - INFO - Iter [90900/160000] lr: 2.591e-05, eta: 3:57:32, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2546, decode.acc_seg: 89.7016, aux.loss_ce: 0.1808, aux.acc_seg: 82.1816, loss: 0.4354, grad_norm: 4.7610 2023-02-11 23:25:16,639 - mmseg - INFO - Iter [90950/160000] lr: 2.589e-05, eta: 3:57:21, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2390, decode.acc_seg: 90.7989, aux.loss_ce: 0.1769, aux.acc_seg: 82.7811, loss: 0.4159, grad_norm: 3.9868 2023-02-11 23:25:26,389 - mmseg - INFO - Saving checkpoint at 91000 iterations 2023-02-11 23:25:27,135 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 23:25:27,135 - mmseg - INFO - Iter [91000/160000] lr: 2.588e-05, eta: 3:57:11, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2452, decode.acc_seg: 90.5510, aux.loss_ce: 0.1756, aux.acc_seg: 83.4774, loss: 0.4208, grad_norm: 4.7519 2023-02-11 23:25:37,309 - mmseg - INFO - Iter [91050/160000] lr: 2.586e-05, eta: 3:57:01, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2445, decode.acc_seg: 90.5339, aux.loss_ce: 0.1836, aux.acc_seg: 82.6189, loss: 0.4281, grad_norm: 5.5501 2023-02-11 23:25:47,540 - mmseg - INFO - Iter [91100/160000] lr: 2.584e-05, eta: 3:56:50, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2412, decode.acc_seg: 90.4445, aux.loss_ce: 0.1774, aux.acc_seg: 82.4096, loss: 0.4186, grad_norm: 3.9488 2023-02-11 23:25:57,585 - mmseg - INFO - Iter [91150/160000] lr: 2.582e-05, eta: 3:56:40, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2829, decode.acc_seg: 89.1426, aux.loss_ce: 0.2028, aux.acc_seg: 80.8230, loss: 0.4856, grad_norm: 5.0155 2023-02-11 23:26:07,433 - mmseg - INFO - Iter [91200/160000] lr: 2.580e-05, eta: 3:56:29, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2469, decode.acc_seg: 90.0432, aux.loss_ce: 0.1770, aux.acc_seg: 82.5826, loss: 0.4239, grad_norm: 4.7052 2023-02-11 23:26:17,293 - mmseg - INFO - Iter [91250/160000] lr: 2.578e-05, eta: 3:56:18, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2472, decode.acc_seg: 89.9732, aux.loss_ce: 0.1809, aux.acc_seg: 82.4602, loss: 0.4280, grad_norm: 4.6094 2023-02-11 23:26:27,475 - mmseg - INFO - Iter [91300/160000] lr: 2.576e-05, eta: 3:56:08, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2643, decode.acc_seg: 89.4808, aux.loss_ce: 0.1895, aux.acc_seg: 81.8041, loss: 0.4539, grad_norm: 4.7734 2023-02-11 23:26:37,377 - mmseg - INFO - Iter [91350/160000] lr: 2.574e-05, eta: 3:55:57, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2555, decode.acc_seg: 89.7199, aux.loss_ce: 0.1822, aux.acc_seg: 82.2348, loss: 0.4377, grad_norm: 5.1128 2023-02-11 23:26:47,585 - mmseg - INFO - Iter [91400/160000] lr: 2.573e-05, eta: 3:55:47, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2559, decode.acc_seg: 90.0399, aux.loss_ce: 0.1836, aux.acc_seg: 82.4676, loss: 0.4395, grad_norm: 4.4884 2023-02-11 23:26:57,332 - mmseg - INFO - Iter [91450/160000] lr: 2.571e-05, eta: 3:55:36, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2664, decode.acc_seg: 89.5213, aux.loss_ce: 0.1885, aux.acc_seg: 81.7857, loss: 0.4550, grad_norm: 5.3865 2023-02-11 23:27:07,191 - mmseg - INFO - Iter [91500/160000] lr: 2.569e-05, eta: 3:55:25, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2449, decode.acc_seg: 90.2698, aux.loss_ce: 0.1782, aux.acc_seg: 82.9155, loss: 0.4231, grad_norm: 3.9799 2023-02-11 23:27:17,011 - mmseg - INFO - Iter [91550/160000] lr: 2.567e-05, eta: 3:55:14, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2626, decode.acc_seg: 89.8055, aux.loss_ce: 0.1975, aux.acc_seg: 81.4150, loss: 0.4601, grad_norm: 6.7789 2023-02-11 23:27:26,758 - mmseg - INFO - Iter [91600/160000] lr: 2.565e-05, eta: 3:55:04, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2725, decode.acc_seg: 89.7367, aux.loss_ce: 0.1944, aux.acc_seg: 82.1586, loss: 0.4669, grad_norm: 5.4386 2023-02-11 23:27:37,009 - mmseg - INFO - Iter [91650/160000] lr: 2.563e-05, eta: 3:54:53, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2464, decode.acc_seg: 90.3843, aux.loss_ce: 0.1809, aux.acc_seg: 82.2775, loss: 0.4273, grad_norm: 4.7338 2023-02-11 23:27:47,164 - mmseg - INFO - Iter [91700/160000] lr: 2.561e-05, eta: 3:54:43, time: 0.203, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2530, decode.acc_seg: 90.4779, aux.loss_ce: 0.1838, aux.acc_seg: 82.8253, loss: 0.4368, grad_norm: 4.6101 2023-02-11 23:27:56,965 - mmseg - INFO - Iter [91750/160000] lr: 2.559e-05, eta: 3:54:32, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2596, decode.acc_seg: 89.7851, aux.loss_ce: 0.1926, aux.acc_seg: 80.9170, loss: 0.4522, grad_norm: 4.9415 2023-02-11 23:28:08,946 - mmseg - INFO - Iter [91800/160000] lr: 2.558e-05, eta: 3:54:23, time: 0.240, data_time: 0.046, memory: 7748, decode.loss_ce: 0.2612, decode.acc_seg: 89.7137, aux.loss_ce: 0.1860, aux.acc_seg: 82.2916, loss: 0.4472, grad_norm: 5.5689 2023-02-11 23:28:18,799 - mmseg - INFO - Iter [91850/160000] lr: 2.556e-05, eta: 3:54:12, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2359, decode.acc_seg: 90.5760, aux.loss_ce: 0.1751, aux.acc_seg: 82.7224, loss: 0.4110, grad_norm: 4.4679 2023-02-11 23:28:28,769 - mmseg - INFO - Iter [91900/160000] lr: 2.554e-05, eta: 3:54:02, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2467, decode.acc_seg: 90.2527, aux.loss_ce: 0.1774, aux.acc_seg: 83.2590, loss: 0.4241, grad_norm: 4.1309 2023-02-11 23:28:38,781 - mmseg - INFO - Iter [91950/160000] lr: 2.552e-05, eta: 3:53:51, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2333, decode.acc_seg: 90.5349, aux.loss_ce: 0.1751, aux.acc_seg: 82.6600, loss: 0.4084, grad_norm: 3.9110 2023-02-11 23:28:48,591 - mmseg - INFO - Saving checkpoint at 92000 iterations 2023-02-11 23:28:49,268 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 23:28:49,268 - mmseg - INFO - Iter [92000/160000] lr: 2.550e-05, eta: 3:53:41, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2429, decode.acc_seg: 90.5666, aux.loss_ce: 0.1804, aux.acc_seg: 82.5859, loss: 0.4233, grad_norm: 4.4429 2023-02-11 23:28:59,188 - mmseg - INFO - Iter [92050/160000] lr: 2.548e-05, eta: 3:53:30, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2597, decode.acc_seg: 90.2059, aux.loss_ce: 0.1897, aux.acc_seg: 82.1016, loss: 0.4494, grad_norm: 4.5952 2023-02-11 23:29:09,323 - mmseg - INFO - Iter [92100/160000] lr: 2.546e-05, eta: 3:53:20, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2510, decode.acc_seg: 90.3249, aux.loss_ce: 0.1770, aux.acc_seg: 82.8836, loss: 0.4280, grad_norm: 5.2506 2023-02-11 23:29:19,630 - mmseg - INFO - Iter [92150/160000] lr: 2.544e-05, eta: 3:53:10, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2576, decode.acc_seg: 89.8924, aux.loss_ce: 0.1902, aux.acc_seg: 81.6041, loss: 0.4478, grad_norm: 4.6937 2023-02-11 23:29:30,136 - mmseg - INFO - Iter [92200/160000] lr: 2.543e-05, eta: 3:52:59, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2536, decode.acc_seg: 90.2138, aux.loss_ce: 0.1852, aux.acc_seg: 82.2608, loss: 0.4388, grad_norm: 3.9076 2023-02-11 23:29:40,058 - mmseg - INFO - Iter [92250/160000] lr: 2.541e-05, eta: 3:52:49, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2593, decode.acc_seg: 89.6576, aux.loss_ce: 0.1880, aux.acc_seg: 81.9501, loss: 0.4473, grad_norm: 4.9073 2023-02-11 23:29:50,013 - mmseg - INFO - Iter [92300/160000] lr: 2.539e-05, eta: 3:52:38, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2671, decode.acc_seg: 89.8368, aux.loss_ce: 0.1851, aux.acc_seg: 82.5777, loss: 0.4523, grad_norm: 4.8978 2023-02-11 23:30:00,178 - mmseg - INFO - Iter [92350/160000] lr: 2.537e-05, eta: 3:52:28, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2481, decode.acc_seg: 90.2031, aux.loss_ce: 0.1816, aux.acc_seg: 82.1241, loss: 0.4297, grad_norm: 4.5954 2023-02-11 23:30:09,962 - mmseg - INFO - Iter [92400/160000] lr: 2.535e-05, eta: 3:52:17, time: 0.196, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2562, decode.acc_seg: 90.0566, aux.loss_ce: 0.1909, aux.acc_seg: 81.8505, loss: 0.4471, grad_norm: 4.9919 2023-02-11 23:30:20,637 - mmseg - INFO - Iter [92450/160000] lr: 2.533e-05, eta: 3:52:07, time: 0.213, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2395, decode.acc_seg: 90.6570, aux.loss_ce: 0.1750, aux.acc_seg: 83.3924, loss: 0.4145, grad_norm: 4.5595 2023-02-11 23:30:30,970 - mmseg - INFO - Iter [92500/160000] lr: 2.531e-05, eta: 3:51:57, time: 0.207, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2434, decode.acc_seg: 90.4027, aux.loss_ce: 0.1770, aux.acc_seg: 82.5125, loss: 0.4204, grad_norm: 6.0651 2023-02-11 23:30:40,785 - mmseg - INFO - Iter [92550/160000] lr: 2.529e-05, eta: 3:51:46, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2877, decode.acc_seg: 89.2364, aux.loss_ce: 0.1972, aux.acc_seg: 81.4866, loss: 0.4849, grad_norm: 5.3855 2023-02-11 23:30:51,272 - mmseg - INFO - Iter [92600/160000] lr: 2.528e-05, eta: 3:51:36, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2465, decode.acc_seg: 90.3286, aux.loss_ce: 0.1846, aux.acc_seg: 82.7558, loss: 0.4311, grad_norm: 4.1898 2023-02-11 23:31:00,912 - mmseg - INFO - Iter [92650/160000] lr: 2.526e-05, eta: 3:51:25, time: 0.193, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2537, decode.acc_seg: 90.2342, aux.loss_ce: 0.1834, aux.acc_seg: 82.3474, loss: 0.4371, grad_norm: 4.4416 2023-02-11 23:31:11,372 - mmseg - INFO - Iter [92700/160000] lr: 2.524e-05, eta: 3:51:15, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2515, decode.acc_seg: 90.1096, aux.loss_ce: 0.1838, aux.acc_seg: 82.4026, loss: 0.4353, grad_norm: 5.2123 2023-02-11 23:31:21,148 - mmseg - INFO - Iter [92750/160000] lr: 2.522e-05, eta: 3:51:04, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2596, decode.acc_seg: 89.8373, aux.loss_ce: 0.1867, aux.acc_seg: 81.7769, loss: 0.4463, grad_norm: 5.8744 2023-02-11 23:31:31,315 - mmseg - INFO - Iter [92800/160000] lr: 2.520e-05, eta: 3:50:54, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2358, decode.acc_seg: 90.5257, aux.loss_ce: 0.1690, aux.acc_seg: 83.5959, loss: 0.4047, grad_norm: 4.0890 2023-02-11 23:31:41,172 - mmseg - INFO - Iter [92850/160000] lr: 2.518e-05, eta: 3:50:43, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2530, decode.acc_seg: 90.1332, aux.loss_ce: 0.1919, aux.acc_seg: 82.0150, loss: 0.4449, grad_norm: 5.5361 2023-02-11 23:31:51,009 - mmseg - INFO - Iter [92900/160000] lr: 2.516e-05, eta: 3:50:32, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2515, decode.acc_seg: 90.1480, aux.loss_ce: 0.1881, aux.acc_seg: 81.9117, loss: 0.4395, grad_norm: 5.0872 2023-02-11 23:32:01,300 - mmseg - INFO - Iter [92950/160000] lr: 2.514e-05, eta: 3:50:22, time: 0.206, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2625, decode.acc_seg: 89.8108, aux.loss_ce: 0.1961, aux.acc_seg: 81.4342, loss: 0.4587, grad_norm: 5.4059 2023-02-11 23:32:11,425 - mmseg - INFO - Saving checkpoint at 93000 iterations 2023-02-11 23:32:12,107 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 23:32:12,108 - mmseg - INFO - Iter [93000/160000] lr: 2.513e-05, eta: 3:50:12, time: 0.216, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2507, decode.acc_seg: 89.9790, aux.loss_ce: 0.1840, aux.acc_seg: 82.0198, loss: 0.4347, grad_norm: 7.4594 2023-02-11 23:32:24,152 - mmseg - INFO - Iter [93050/160000] lr: 2.511e-05, eta: 3:50:03, time: 0.241, data_time: 0.046, memory: 7748, decode.loss_ce: 0.2557, decode.acc_seg: 89.8677, aux.loss_ce: 0.1852, aux.acc_seg: 82.1690, loss: 0.4409, grad_norm: 4.9879 2023-02-11 23:32:34,185 - mmseg - INFO - Iter [93100/160000] lr: 2.509e-05, eta: 3:49:53, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2376, decode.acc_seg: 90.8368, aux.loss_ce: 0.1705, aux.acc_seg: 83.3056, loss: 0.4081, grad_norm: 4.1195 2023-02-11 23:32:44,569 - mmseg - INFO - Iter [93150/160000] lr: 2.507e-05, eta: 3:49:42, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2546, decode.acc_seg: 89.8679, aux.loss_ce: 0.1850, aux.acc_seg: 81.8917, loss: 0.4396, grad_norm: 7.3380 2023-02-11 23:32:54,480 - mmseg - INFO - Iter [93200/160000] lr: 2.505e-05, eta: 3:49:32, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2438, decode.acc_seg: 90.3550, aux.loss_ce: 0.1824, aux.acc_seg: 82.6034, loss: 0.4262, grad_norm: 4.9396 2023-02-11 23:33:04,616 - mmseg - INFO - Iter [93250/160000] lr: 2.503e-05, eta: 3:49:21, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2495, decode.acc_seg: 90.1173, aux.loss_ce: 0.1758, aux.acc_seg: 82.9126, loss: 0.4254, grad_norm: 4.6460 2023-02-11 23:33:14,246 - mmseg - INFO - Iter [93300/160000] lr: 2.501e-05, eta: 3:49:10, time: 0.193, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2462, decode.acc_seg: 90.3982, aux.loss_ce: 0.1786, aux.acc_seg: 83.0569, loss: 0.4247, grad_norm: 4.2083 2023-02-11 23:33:24,249 - mmseg - INFO - Iter [93350/160000] lr: 2.499e-05, eta: 3:49:00, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2599, decode.acc_seg: 90.3187, aux.loss_ce: 0.1929, aux.acc_seg: 81.6947, loss: 0.4527, grad_norm: 4.9920 2023-02-11 23:33:33,973 - mmseg - INFO - Iter [93400/160000] lr: 2.498e-05, eta: 3:48:49, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2452, decode.acc_seg: 90.1835, aux.loss_ce: 0.1763, aux.acc_seg: 82.6629, loss: 0.4215, grad_norm: 4.7434 2023-02-11 23:33:43,985 - mmseg - INFO - Iter [93450/160000] lr: 2.496e-05, eta: 3:48:38, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2481, decode.acc_seg: 90.2847, aux.loss_ce: 0.1829, aux.acc_seg: 82.8165, loss: 0.4310, grad_norm: 4.9734 2023-02-11 23:33:53,828 - mmseg - INFO - Iter [93500/160000] lr: 2.494e-05, eta: 3:48:28, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2497, decode.acc_seg: 90.2998, aux.loss_ce: 0.1799, aux.acc_seg: 82.6133, loss: 0.4296, grad_norm: 5.1062 2023-02-11 23:34:04,132 - mmseg - INFO - Iter [93550/160000] lr: 2.492e-05, eta: 3:48:17, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2484, decode.acc_seg: 90.1703, aux.loss_ce: 0.1793, aux.acc_seg: 82.2671, loss: 0.4278, grad_norm: 4.4739 2023-02-11 23:34:14,680 - mmseg - INFO - Iter [93600/160000] lr: 2.490e-05, eta: 3:48:07, time: 0.211, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2445, decode.acc_seg: 90.4171, aux.loss_ce: 0.1824, aux.acc_seg: 82.3447, loss: 0.4270, grad_norm: 5.1973 2023-02-11 23:34:24,567 - mmseg - INFO - Iter [93650/160000] lr: 2.488e-05, eta: 3:47:57, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2518, decode.acc_seg: 90.2515, aux.loss_ce: 0.1820, aux.acc_seg: 82.5228, loss: 0.4338, grad_norm: 4.4777 2023-02-11 23:34:34,590 - mmseg - INFO - Iter [93700/160000] lr: 2.486e-05, eta: 3:47:46, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2539, decode.acc_seg: 89.7185, aux.loss_ce: 0.1817, aux.acc_seg: 81.9236, loss: 0.4356, grad_norm: 4.0945 2023-02-11 23:34:44,663 - mmseg - INFO - Iter [93750/160000] lr: 2.484e-05, eta: 3:47:36, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2350, decode.acc_seg: 91.0957, aux.loss_ce: 0.1747, aux.acc_seg: 83.1953, loss: 0.4096, grad_norm: 5.5358 2023-02-11 23:34:54,420 - mmseg - INFO - Iter [93800/160000] lr: 2.483e-05, eta: 3:47:25, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2396, decode.acc_seg: 90.4332, aux.loss_ce: 0.1750, aux.acc_seg: 82.9522, loss: 0.4146, grad_norm: 4.9944 2023-02-11 23:35:04,240 - mmseg - INFO - Iter [93850/160000] lr: 2.481e-05, eta: 3:47:14, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2453, decode.acc_seg: 90.5726, aux.loss_ce: 0.1815, aux.acc_seg: 82.8228, loss: 0.4268, grad_norm: 4.4987 2023-02-11 23:35:14,202 - mmseg - INFO - Iter [93900/160000] lr: 2.479e-05, eta: 3:47:03, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2590, decode.acc_seg: 89.5175, aux.loss_ce: 0.1816, aux.acc_seg: 81.5958, loss: 0.4406, grad_norm: 5.5007 2023-02-11 23:35:25,166 - mmseg - INFO - Iter [93950/160000] lr: 2.477e-05, eta: 3:46:54, time: 0.219, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2584, decode.acc_seg: 89.9115, aux.loss_ce: 0.1834, aux.acc_seg: 82.0894, loss: 0.4418, grad_norm: 4.7244 2023-02-11 23:35:35,588 - mmseg - INFO - Saving checkpoint at 94000 iterations 2023-02-11 23:35:36,274 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 23:35:36,275 - mmseg - INFO - Iter [94000/160000] lr: 2.475e-05, eta: 3:46:44, time: 0.222, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2392, decode.acc_seg: 90.7586, aux.loss_ce: 0.1806, aux.acc_seg: 82.6638, loss: 0.4197, grad_norm: 4.8183 2023-02-11 23:35:46,313 - mmseg - INFO - Iter [94050/160000] lr: 2.473e-05, eta: 3:46:34, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2666, decode.acc_seg: 89.7699, aux.loss_ce: 0.1886, aux.acc_seg: 81.7741, loss: 0.4552, grad_norm: 4.5817 2023-02-11 23:35:56,266 - mmseg - INFO - Iter [94100/160000] lr: 2.471e-05, eta: 3:46:23, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2583, decode.acc_seg: 89.6434, aux.loss_ce: 0.1802, aux.acc_seg: 82.3508, loss: 0.4385, grad_norm: 4.6417 2023-02-11 23:36:06,433 - mmseg - INFO - Iter [94150/160000] lr: 2.469e-05, eta: 3:46:13, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2517, decode.acc_seg: 90.1457, aux.loss_ce: 0.1851, aux.acc_seg: 82.1757, loss: 0.4368, grad_norm: 4.8438 2023-02-11 23:36:16,629 - mmseg - INFO - Iter [94200/160000] lr: 2.468e-05, eta: 3:46:02, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2312, decode.acc_seg: 90.7277, aux.loss_ce: 0.1761, aux.acc_seg: 82.7488, loss: 0.4073, grad_norm: 4.3793 2023-02-11 23:36:26,850 - mmseg - INFO - Iter [94250/160000] lr: 2.466e-05, eta: 3:45:52, time: 0.205, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2412, decode.acc_seg: 90.8164, aux.loss_ce: 0.1784, aux.acc_seg: 83.3379, loss: 0.4196, grad_norm: 4.5351 2023-02-11 23:36:36,792 - mmseg - INFO - Iter [94300/160000] lr: 2.464e-05, eta: 3:45:41, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2551, decode.acc_seg: 89.9501, aux.loss_ce: 0.1886, aux.acc_seg: 81.9429, loss: 0.4438, grad_norm: 5.4808 2023-02-11 23:36:49,142 - mmseg - INFO - Iter [94350/160000] lr: 2.462e-05, eta: 3:45:33, time: 0.247, data_time: 0.047, memory: 7748, decode.loss_ce: 0.2418, decode.acc_seg: 90.6590, aux.loss_ce: 0.1761, aux.acc_seg: 82.9998, loss: 0.4179, grad_norm: 4.6792 2023-02-11 23:36:59,416 - mmseg - INFO - Iter [94400/160000] lr: 2.460e-05, eta: 3:45:22, time: 0.205, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2443, decode.acc_seg: 90.4161, aux.loss_ce: 0.1685, aux.acc_seg: 83.6610, loss: 0.4128, grad_norm: 3.8929 2023-02-11 23:37:09,296 - mmseg - INFO - Iter [94450/160000] lr: 2.458e-05, eta: 3:45:12, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2496, decode.acc_seg: 90.2246, aux.loss_ce: 0.1854, aux.acc_seg: 82.2055, loss: 0.4349, grad_norm: 4.7162 2023-02-11 23:37:19,169 - mmseg - INFO - Iter [94500/160000] lr: 2.456e-05, eta: 3:45:01, time: 0.198, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2369, decode.acc_seg: 90.7722, aux.loss_ce: 0.1728, aux.acc_seg: 83.4170, loss: 0.4097, grad_norm: 4.1396 2023-02-11 23:37:29,082 - mmseg - INFO - Iter [94550/160000] lr: 2.454e-05, eta: 3:44:50, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2469, decode.acc_seg: 90.6177, aux.loss_ce: 0.1831, aux.acc_seg: 82.5414, loss: 0.4300, grad_norm: 4.3909 2023-02-11 23:37:38,809 - mmseg - INFO - Iter [94600/160000] lr: 2.453e-05, eta: 3:44:39, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2303, decode.acc_seg: 90.9413, aux.loss_ce: 0.1735, aux.acc_seg: 83.3707, loss: 0.4039, grad_norm: 4.2059 2023-02-11 23:37:48,814 - mmseg - INFO - Iter [94650/160000] lr: 2.451e-05, eta: 3:44:29, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2285, decode.acc_seg: 90.7233, aux.loss_ce: 0.1704, aux.acc_seg: 82.9287, loss: 0.3989, grad_norm: 3.7145 2023-02-11 23:37:58,810 - mmseg - INFO - Iter [94700/160000] lr: 2.449e-05, eta: 3:44:18, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2428, decode.acc_seg: 90.4657, aux.loss_ce: 0.1803, aux.acc_seg: 82.7258, loss: 0.4231, grad_norm: 4.3097 2023-02-11 23:38:09,026 - mmseg - INFO - Iter [94750/160000] lr: 2.447e-05, eta: 3:44:08, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2361, decode.acc_seg: 90.5798, aux.loss_ce: 0.1804, aux.acc_seg: 82.6452, loss: 0.4165, grad_norm: 5.0361 2023-02-11 23:38:18,965 - mmseg - INFO - Iter [94800/160000] lr: 2.445e-05, eta: 3:43:57, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2361, decode.acc_seg: 90.6613, aux.loss_ce: 0.1778, aux.acc_seg: 82.7855, loss: 0.4139, grad_norm: 4.3133 2023-02-11 23:38:29,370 - mmseg - INFO - Iter [94850/160000] lr: 2.443e-05, eta: 3:43:47, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2499, decode.acc_seg: 90.0324, aux.loss_ce: 0.1859, aux.acc_seg: 82.1615, loss: 0.4358, grad_norm: 5.3855 2023-02-11 23:38:39,340 - mmseg - INFO - Iter [94900/160000] lr: 2.441e-05, eta: 3:43:37, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2556, decode.acc_seg: 89.9076, aux.loss_ce: 0.1863, aux.acc_seg: 81.9251, loss: 0.4419, grad_norm: 6.1434 2023-02-11 23:38:49,331 - mmseg - INFO - Iter [94950/160000] lr: 2.439e-05, eta: 3:43:26, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2512, decode.acc_seg: 90.0052, aux.loss_ce: 0.1841, aux.acc_seg: 81.9668, loss: 0.4353, grad_norm: 5.2199 2023-02-11 23:38:59,306 - mmseg - INFO - Saving checkpoint at 95000 iterations 2023-02-11 23:38:59,978 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 23:38:59,978 - mmseg - INFO - Iter [95000/160000] lr: 2.438e-05, eta: 3:43:16, time: 0.213, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2446, decode.acc_seg: 90.3048, aux.loss_ce: 0.1745, aux.acc_seg: 82.9128, loss: 0.4191, grad_norm: 4.2409 2023-02-11 23:39:09,908 - mmseg - INFO - Iter [95050/160000] lr: 2.436e-05, eta: 3:43:05, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2485, decode.acc_seg: 90.1740, aux.loss_ce: 0.1853, aux.acc_seg: 82.3237, loss: 0.4338, grad_norm: 5.2026 2023-02-11 23:39:19,758 - mmseg - INFO - Iter [95100/160000] lr: 2.434e-05, eta: 3:42:55, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2499, decode.acc_seg: 90.1066, aux.loss_ce: 0.1839, aux.acc_seg: 82.4402, loss: 0.4338, grad_norm: 4.1146 2023-02-11 23:39:30,016 - mmseg - INFO - Iter [95150/160000] lr: 2.432e-05, eta: 3:42:44, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2495, decode.acc_seg: 90.4045, aux.loss_ce: 0.1846, aux.acc_seg: 82.1685, loss: 0.4340, grad_norm: 4.8026 2023-02-11 23:39:39,922 - mmseg - INFO - Iter [95200/160000] lr: 2.430e-05, eta: 3:42:34, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2254, decode.acc_seg: 90.7215, aux.loss_ce: 0.1753, aux.acc_seg: 82.5530, loss: 0.4007, grad_norm: 4.4696 2023-02-11 23:39:50,051 - mmseg - INFO - Iter [95250/160000] lr: 2.428e-05, eta: 3:42:23, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2610, decode.acc_seg: 89.7009, aux.loss_ce: 0.1869, aux.acc_seg: 81.6753, loss: 0.4479, grad_norm: 6.6216 2023-02-11 23:40:00,352 - mmseg - INFO - Iter [95300/160000] lr: 2.426e-05, eta: 3:42:13, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2459, decode.acc_seg: 90.4594, aux.loss_ce: 0.1785, aux.acc_seg: 83.1020, loss: 0.4244, grad_norm: 5.1680 2023-02-11 23:40:10,365 - mmseg - INFO - Iter [95350/160000] lr: 2.424e-05, eta: 3:42:02, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2366, decode.acc_seg: 90.6602, aux.loss_ce: 0.1779, aux.acc_seg: 82.7343, loss: 0.4145, grad_norm: 4.1776 2023-02-11 23:40:20,288 - mmseg - INFO - Iter [95400/160000] lr: 2.423e-05, eta: 3:41:52, time: 0.198, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2332, decode.acc_seg: 90.5237, aux.loss_ce: 0.1689, aux.acc_seg: 82.9592, loss: 0.4021, grad_norm: 4.3696 2023-02-11 23:40:30,245 - mmseg - INFO - Iter [95450/160000] lr: 2.421e-05, eta: 3:41:41, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2543, decode.acc_seg: 90.2842, aux.loss_ce: 0.1927, aux.acc_seg: 81.7228, loss: 0.4470, grad_norm: 4.8983 2023-02-11 23:40:40,399 - mmseg - INFO - Iter [95500/160000] lr: 2.419e-05, eta: 3:41:31, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2376, decode.acc_seg: 90.5888, aux.loss_ce: 0.1776, aux.acc_seg: 82.9616, loss: 0.4152, grad_norm: 4.3772 2023-02-11 23:40:50,796 - mmseg - INFO - Iter [95550/160000] lr: 2.417e-05, eta: 3:41:21, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2386, decode.acc_seg: 90.7807, aux.loss_ce: 0.1693, aux.acc_seg: 83.8891, loss: 0.4079, grad_norm: 4.4177 2023-02-11 23:41:02,729 - mmseg - INFO - Iter [95600/160000] lr: 2.415e-05, eta: 3:41:12, time: 0.239, data_time: 0.047, memory: 7748, decode.loss_ce: 0.2477, decode.acc_seg: 90.3198, aux.loss_ce: 0.1721, aux.acc_seg: 83.0956, loss: 0.4198, grad_norm: 4.7501 2023-02-11 23:41:12,891 - mmseg - INFO - Iter [95650/160000] lr: 2.413e-05, eta: 3:41:01, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2271, decode.acc_seg: 90.7640, aux.loss_ce: 0.1762, aux.acc_seg: 82.3967, loss: 0.4033, grad_norm: 4.6541 2023-02-11 23:41:23,108 - mmseg - INFO - Iter [95700/160000] lr: 2.411e-05, eta: 3:40:51, time: 0.204, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2369, decode.acc_seg: 90.3012, aux.loss_ce: 0.1746, aux.acc_seg: 82.5449, loss: 0.4116, grad_norm: 4.6935 2023-02-11 23:41:33,423 - mmseg - INFO - Iter [95750/160000] lr: 2.409e-05, eta: 3:40:40, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2558, decode.acc_seg: 89.9577, aux.loss_ce: 0.1878, aux.acc_seg: 81.9453, loss: 0.4437, grad_norm: 4.7497 2023-02-11 23:41:43,100 - mmseg - INFO - Iter [95800/160000] lr: 2.408e-05, eta: 3:40:30, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2208, decode.acc_seg: 91.4006, aux.loss_ce: 0.1710, aux.acc_seg: 83.3708, loss: 0.3919, grad_norm: 4.1824 2023-02-11 23:41:53,422 - mmseg - INFO - Iter [95850/160000] lr: 2.406e-05, eta: 3:40:19, time: 0.206, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2424, decode.acc_seg: 90.6029, aux.loss_ce: 0.1850, aux.acc_seg: 82.1646, loss: 0.4274, grad_norm: 4.6472 2023-02-11 23:42:03,117 - mmseg - INFO - Iter [95900/160000] lr: 2.404e-05, eta: 3:40:09, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2475, decode.acc_seg: 89.9272, aux.loss_ce: 0.1748, aux.acc_seg: 82.8010, loss: 0.4223, grad_norm: 5.5544 2023-02-11 23:42:13,527 - mmseg - INFO - Iter [95950/160000] lr: 2.402e-05, eta: 3:39:58, time: 0.208, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2306, decode.acc_seg: 91.3379, aux.loss_ce: 0.1778, aux.acc_seg: 83.1897, loss: 0.4084, grad_norm: 3.9890 2023-02-11 23:42:23,723 - mmseg - INFO - Saving checkpoint at 96000 iterations 2023-02-11 23:42:24,420 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 23:42:24,420 - mmseg - INFO - Iter [96000/160000] lr: 2.400e-05, eta: 3:39:49, time: 0.218, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2498, decode.acc_seg: 90.1820, aux.loss_ce: 0.1834, aux.acc_seg: 82.4340, loss: 0.4332, grad_norm: 4.8004 2023-02-11 23:42:35,868 - mmseg - INFO - per class results: 2023-02-11 23:42:35,873 - mmseg - INFO - +---------------------+-------+-------+ | Class | IoU | Acc | +---------------------+-------+-------+ | wall | 72.95 | 87.84 | | building | 81.81 | 90.74 | | sky | 93.7 | 97.37 | | floor | 78.32 | 89.74 | | tree | 72.63 | 80.65 | | ceiling | 80.87 | 88.14 | | road | 80.5 | 87.65 | | bed | 86.27 | 92.54 | | windowpane | 60.22 | 75.63 | | grass | 65.2 | 85.24 | | cabinet | 58.24 | 71.0 | | sidewalk | 58.7 | 73.09 | | person | 77.93 | 91.09 | | earth | 29.29 | 35.61 | | door | 43.51 | 61.32 | | table | 53.21 | 64.76 | | mountain | 56.62 | 71.08 | | plant | 48.28 | 78.18 | | curtain | 72.69 | 82.53 | | chair | 51.58 | 63.22 | | car | 81.35 | 91.4 | | water | 54.34 | 73.05 | | painting | 69.09 | 83.41 | | sofa | 61.33 | 79.68 | | shelf | 40.07 | 57.73 | | house | 47.55 | 57.08 | | sea | 61.18 | 91.21 | | mirror | 53.04 | 58.4 | | rug | 58.98 | 65.76 | | field | 28.75 | 39.88 | | armchair | 34.62 | 50.98 | | seat | 60.2 | 76.46 | | fence | 30.41 | 43.84 | | desk | 38.55 | 67.26 | | rock | 34.45 | 58.54 | | wardrobe | 49.27 | 69.28 | | lamp | 56.08 | 74.17 | | bathtub | 73.95 | 78.88 | | railing | 31.94 | 51.45 | | cushion | 50.6 | 63.13 | | base | 26.09 | 36.98 | | box | 16.14 | 18.46 | | column | 38.09 | 57.97 | | signboard | 34.93 | 49.36 | | chest of drawers | 42.77 | 61.54 | | counter | 19.1 | 21.74 | | sand | 32.79 | 47.08 | | sink | 69.83 | 78.65 | | skyscraper | 60.64 | 81.81 | | fireplace | 68.82 | 78.23 | | refrigerator | 68.15 | 83.85 | | grandstand | 39.86 | 51.94 | | path | 16.69 | 41.68 | | stairs | 24.24 | 27.34 | | runway | 64.34 | 83.41 | | case | 40.22 | 54.24 | | pool table | 91.35 | 96.94 | | pillow | 54.05 | 79.48 | | screen door | 37.16 | 40.58 | | stairway | 30.24 | 45.77 | | river | 11.5 | 13.97 | | bridge | 40.61 | 48.89 | | bookcase | 31.52 | 65.74 | | blind | 39.93 | 48.89 | | coffee table | 45.67 | 84.48 | | toilet | 84.57 | 90.1 | | flower | 37.53 | 47.81 | | book | 43.89 | 63.49 | | hill | 4.66 | 6.66 | | bench | 43.49 | 52.06 | | countertop | 48.97 | 55.08 | | stove | 69.54 | 80.76 | | palm | 47.71 | 77.48 | | kitchen island | 29.29 | 52.47 | | computer | 66.58 | 78.26 | | swivel chair | 37.29 | 42.65 | | boat | 43.95 | 55.27 | | bar | 29.42 | 38.53 | | arcade machine | 33.46 | 35.02 | | hovel | 31.35 | 44.44 | | bus | 88.43 | 93.0 | | towel | 61.29 | 70.8 | | light | 52.49 | 61.92 | | truck | 35.23 | 43.4 | | tower | 29.19 | 36.87 | | chandelier | 60.17 | 75.51 | | awning | 28.54 | 46.82 | | streetlight | 22.46 | 28.55 | | booth | 36.26 | 36.66 | | television receiver | 64.87 | 78.89 | | airplane | 57.65 | 66.11 | | dirt track | 2.37 | 7.68 | | apparel | 35.35 | 56.37 | | pole | 18.72 | 28.03 | | land | 3.81 | 5.32 | | bannister | 10.78 | 15.51 | | escalator | 19.65 | 30.3 | | ottoman | 39.29 | 60.89 | | bottle | 29.94 | 47.11 | | buffet | 35.43 | 42.77 | | poster | 27.32 | 38.47 | | stage | 17.99 | 28.07 | | van | 46.64 | 61.6 | | ship | 54.65 | 79.08 | | fountain | 19.36 | 21.07 | | conveyer belt | 50.48 | 63.81 | | canopy | 16.75 | 25.93 | | washer | 62.98 | 69.75 | | plaything | 16.29 | 21.73 | | swimming pool | 59.27 | 74.51 | | stool | 35.09 | 56.41 | | barrel | 30.66 | 64.97 | | basket | 24.9 | 35.41 | | waterfall | 49.08 | 71.43 | | tent | 64.41 | 98.61 | | bag | 18.67 | 32.28 | | minibike | 61.98 | 71.34 | | cradle | 77.21 | 94.13 | | oven | 21.38 | 49.12 | | ball | 41.36 | 59.89 | | food | 48.38 | 54.56 | | step | 2.79 | 3.52 | | tank | 24.42 | 24.75 | | trade name | 27.94 | 34.03 | | microwave | 39.08 | 41.32 | | pot | 34.52 | 42.98 | | animal | 54.43 | 58.15 | | bicycle | 53.79 | 79.76 | | lake | 54.42 | 62.26 | | dishwasher | 58.63 | 74.58 | | screen | 54.12 | 71.97 | | blanket | 9.65 | 12.04 | | sculpture | 48.96 | 68.6 | | hood | 60.61 | 73.68 | | sconce | 31.0 | 35.01 | | vase | 31.17 | 49.94 | | traffic light | 30.25 | 53.57 | | tray | 3.45 | 5.99 | | ashcan | 27.87 | 55.04 | | fan | 51.32 | 60.01 | | pier | 62.12 | 80.04 | | crt screen | 1.51 | 4.59 | | plate | 49.35 | 69.12 | | monitor | 9.54 | 12.26 | | bulletin board | 41.61 | 78.36 | | shower | 0.0 | 0.0 | | radiator | 49.37 | 56.72 | | glass | 11.55 | 14.59 | | clock | 13.28 | 15.26 | | flag | 41.08 | 52.87 | +---------------------+-------+-------+ 2023-02-11 23:42:35,874 - mmseg - INFO - Summary: 2023-02-11 23:42:35,874 - mmseg - INFO - +-------+-------+-------+ | aAcc | mIoU | mAcc | +-------+-------+-------+ | 80.61 | 43.93 | 56.76 | +-------+-------+-------+ 2023-02-11 23:42:36,516 - mmseg - INFO - Now best checkpoint is saved as best_mIoU_iter_96000.pth. 2023-02-11 23:42:36,516 - mmseg - INFO - Best mIoU is 0.4393 at 96000 iter. 2023-02-11 23:42:36,516 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 23:42:36,517 - mmseg - INFO - Iter(val) [250] aAcc: 0.8061, mIoU: 0.4393, mAcc: 0.5676, IoU.wall: 0.7295, IoU.building: 0.8181, IoU.sky: 0.9370, IoU.floor: 0.7832, IoU.tree: 0.7263, IoU.ceiling: 0.8087, IoU.road: 0.8050, IoU.bed : 0.8627, IoU.windowpane: 0.6022, IoU.grass: 0.6520, IoU.cabinet: 0.5824, IoU.sidewalk: 0.5870, IoU.person: 0.7793, IoU.earth: 0.2929, IoU.door: 0.4351, IoU.table: 0.5321, IoU.mountain: 0.5662, IoU.plant: 0.4828, IoU.curtain: 0.7269, IoU.chair: 0.5158, IoU.car: 0.8135, IoU.water: 0.5434, IoU.painting: 0.6909, IoU.sofa: 0.6133, IoU.shelf: 0.4007, IoU.house: 0.4755, IoU.sea: 0.6118, IoU.mirror: 0.5304, IoU.rug: 0.5898, IoU.field: 0.2875, IoU.armchair: 0.3462, IoU.seat: 0.6020, IoU.fence: 0.3041, IoU.desk: 0.3855, IoU.rock: 0.3445, IoU.wardrobe: 0.4927, IoU.lamp: 0.5608, IoU.bathtub: 0.7395, IoU.railing: 0.3194, IoU.cushion: 0.5060, IoU.base: 0.2609, IoU.box: 0.1614, IoU.column: 0.3809, IoU.signboard: 0.3493, IoU.chest of drawers: 0.4277, IoU.counter: 0.1910, IoU.sand: 0.3279, IoU.sink: 0.6983, IoU.skyscraper: 0.6064, IoU.fireplace: 0.6882, IoU.refrigerator: 0.6815, IoU.grandstand: 0.3986, IoU.path: 0.1669, IoU.stairs: 0.2424, IoU.runway: 0.6434, IoU.case: 0.4022, IoU.pool table: 0.9135, IoU.pillow: 0.5405, IoU.screen door: 0.3716, IoU.stairway: 0.3024, IoU.river: 0.1150, IoU.bridge: 0.4061, IoU.bookcase: 0.3152, IoU.blind: 0.3993, IoU.coffee table: 0.4567, IoU.toilet: 0.8457, IoU.flower: 0.3753, IoU.book: 0.4389, IoU.hill: 0.0466, IoU.bench: 0.4349, IoU.countertop: 0.4897, IoU.stove: 0.6954, IoU.palm: 0.4771, IoU.kitchen island: 0.2929, IoU.computer: 0.6658, IoU.swivel chair: 0.3729, IoU.boat: 0.4395, IoU.bar: 0.2942, IoU.arcade machine: 0.3346, IoU.hovel: 0.3135, IoU.bus: 0.8843, IoU.towel: 0.6129, IoU.light: 0.5249, IoU.truck: 0.3523, IoU.tower: 0.2919, IoU.chandelier: 0.6017, IoU.awning: 0.2854, IoU.streetlight: 0.2246, IoU.booth: 0.3626, IoU.television receiver: 0.6487, IoU.airplane: 0.5765, IoU.dirt track: 0.0237, IoU.apparel: 0.3535, IoU.pole: 0.1872, IoU.land: 0.0381, IoU.bannister: 0.1078, IoU.escalator: 0.1965, IoU.ottoman: 0.3929, IoU.bottle: 0.2994, IoU.buffet: 0.3543, IoU.poster: 0.2732, IoU.stage: 0.1799, IoU.van: 0.4664, IoU.ship: 0.5465, IoU.fountain: 0.1936, IoU.conveyer belt: 0.5048, IoU.canopy: 0.1675, IoU.washer: 0.6298, IoU.plaything: 0.1629, IoU.swimming pool: 0.5927, IoU.stool: 0.3509, IoU.barrel: 0.3066, IoU.basket: 0.2490, IoU.waterfall: 0.4908, IoU.tent: 0.6441, IoU.bag: 0.1867, IoU.minibike: 0.6198, IoU.cradle: 0.7721, IoU.oven: 0.2138, IoU.ball: 0.4136, IoU.food: 0.4838, IoU.step: 0.0279, IoU.tank: 0.2442, IoU.trade name: 0.2794, IoU.microwave: 0.3908, IoU.pot: 0.3452, IoU.animal: 0.5443, IoU.bicycle: 0.5379, IoU.lake: 0.5442, IoU.dishwasher: 0.5863, IoU.screen: 0.5412, IoU.blanket: 0.0965, IoU.sculpture: 0.4896, IoU.hood: 0.6061, IoU.sconce: 0.3100, IoU.vase: 0.3117, IoU.traffic light: 0.3025, IoU.tray: 0.0345, IoU.ashcan: 0.2787, IoU.fan: 0.5132, IoU.pier: 0.6212, IoU.crt screen: 0.0151, IoU.plate: 0.4935, IoU.monitor: 0.0954, IoU.bulletin board: 0.4161, IoU.shower: 0.0000, IoU.radiator: 0.4937, IoU.glass: 0.1155, IoU.clock: 0.1328, IoU.flag: 0.4108, Acc.wall: 0.8784, Acc.building: 0.9074, Acc.sky: 0.9737, Acc.floor: 0.8974, Acc.tree: 0.8065, Acc.ceiling: 0.8814, Acc.road: 0.8765, Acc.bed : 0.9254, Acc.windowpane: 0.7563, Acc.grass: 0.8524, Acc.cabinet: 0.7100, Acc.sidewalk: 0.7309, Acc.person: 0.9109, Acc.earth: 0.3561, Acc.door: 0.6132, Acc.table: 0.6476, Acc.mountain: 0.7108, Acc.plant: 0.7818, Acc.curtain: 0.8253, Acc.chair: 0.6322, Acc.car: 0.9140, Acc.water: 0.7305, Acc.painting: 0.8341, Acc.sofa: 0.7968, Acc.shelf: 0.5773, Acc.house: 0.5708, Acc.sea: 0.9121, Acc.mirror: 0.5840, Acc.rug: 0.6576, Acc.field: 0.3988, Acc.armchair: 0.5098, Acc.seat: 0.7646, Acc.fence: 0.4384, Acc.desk: 0.6726, Acc.rock: 0.5854, Acc.wardrobe: 0.6928, Acc.lamp: 0.7417, Acc.bathtub: 0.7888, Acc.railing: 0.5145, Acc.cushion: 0.6313, Acc.base: 0.3698, Acc.box: 0.1846, Acc.column: 0.5797, Acc.signboard: 0.4936, Acc.chest of drawers: 0.6154, Acc.counter: 0.2174, Acc.sand: 0.4708, Acc.sink: 0.7865, Acc.skyscraper: 0.8181, Acc.fireplace: 0.7823, Acc.refrigerator: 0.8385, Acc.grandstand: 0.5194, Acc.path: 0.4168, Acc.stairs: 0.2734, Acc.runway: 0.8341, Acc.case: 0.5424, Acc.pool table: 0.9694, Acc.pillow: 0.7948, Acc.screen door: 0.4058, Acc.stairway: 0.4577, Acc.river: 0.1397, Acc.bridge: 0.4889, Acc.bookcase: 0.6574, Acc.blind: 0.4889, Acc.coffee table: 0.8448, Acc.toilet: 0.9010, Acc.flower: 0.4781, Acc.book: 0.6349, Acc.hill: 0.0666, Acc.bench: 0.5206, Acc.countertop: 0.5508, Acc.stove: 0.8076, Acc.palm: 0.7748, Acc.kitchen island: 0.5247, Acc.computer: 0.7826, Acc.swivel chair: 0.4265, Acc.boat: 0.5527, Acc.bar: 0.3853, Acc.arcade machine: 0.3502, Acc.hovel: 0.4444, Acc.bus: 0.9300, Acc.towel: 0.7080, Acc.light: 0.6192, Acc.truck: 0.4340, Acc.tower: 0.3687, Acc.chandelier: 0.7551, Acc.awning: 0.4682, Acc.streetlight: 0.2855, Acc.booth: 0.3666, Acc.television receiver: 0.7889, Acc.airplane: 0.6611, Acc.dirt track: 0.0768, Acc.apparel: 0.5637, Acc.pole: 0.2803, Acc.land: 0.0532, Acc.bannister: 0.1551, Acc.escalator: 0.3030, Acc.ottoman: 0.6089, Acc.bottle: 0.4711, Acc.buffet: 0.4277, Acc.poster: 0.3847, Acc.stage: 0.2807, Acc.van: 0.6160, Acc.ship: 0.7908, Acc.fountain: 0.2107, Acc.conveyer belt: 0.6381, Acc.canopy: 0.2593, Acc.washer: 0.6975, Acc.plaything: 0.2173, Acc.swimming pool: 0.7451, Acc.stool: 0.5641, Acc.barrel: 0.6497, Acc.basket: 0.3541, Acc.waterfall: 0.7143, Acc.tent: 0.9861, Acc.bag: 0.3228, Acc.minibike: 0.7134, Acc.cradle: 0.9413, Acc.oven: 0.4912, Acc.ball: 0.5989, Acc.food: 0.5456, Acc.step: 0.0352, Acc.tank: 0.2475, Acc.trade name: 0.3403, Acc.microwave: 0.4132, Acc.pot: 0.4298, Acc.animal: 0.5815, Acc.bicycle: 0.7976, Acc.lake: 0.6226, Acc.dishwasher: 0.7458, Acc.screen: 0.7197, Acc.blanket: 0.1204, Acc.sculpture: 0.6860, Acc.hood: 0.7368, Acc.sconce: 0.3501, Acc.vase: 0.4994, Acc.traffic light: 0.5357, Acc.tray: 0.0599, Acc.ashcan: 0.5504, Acc.fan: 0.6001, Acc.pier: 0.8004, Acc.crt screen: 0.0459, Acc.plate: 0.6912, Acc.monitor: 0.1226, Acc.bulletin board: 0.7836, Acc.shower: 0.0000, Acc.radiator: 0.5672, Acc.glass: 0.1459, Acc.clock: 0.1526, Acc.flag: 0.5287 2023-02-11 23:42:46,896 - mmseg - INFO - Iter [96050/160000] lr: 2.398e-05, eta: 3:39:48, time: 0.450, data_time: 0.246, memory: 7748, decode.loss_ce: 0.2570, decode.acc_seg: 90.0972, aux.loss_ce: 0.1879, aux.acc_seg: 82.2235, loss: 0.4449, grad_norm: 4.8089 2023-02-11 23:42:57,293 - mmseg - INFO - Iter [96100/160000] lr: 2.396e-05, eta: 3:39:38, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2337, decode.acc_seg: 90.8556, aux.loss_ce: 0.1696, aux.acc_seg: 83.7450, loss: 0.4033, grad_norm: 4.0425 2023-02-11 23:43:07,365 - mmseg - INFO - Iter [96150/160000] lr: 2.394e-05, eta: 3:39:27, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2534, decode.acc_seg: 90.0165, aux.loss_ce: 0.1829, aux.acc_seg: 82.3342, loss: 0.4363, grad_norm: 4.7989 2023-02-11 23:43:17,683 - mmseg - INFO - Iter [96200/160000] lr: 2.393e-05, eta: 3:39:17, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2298, decode.acc_seg: 90.9350, aux.loss_ce: 0.1714, aux.acc_seg: 83.1773, loss: 0.4012, grad_norm: 4.1378 2023-02-11 23:43:27,611 - mmseg - INFO - Iter [96250/160000] lr: 2.391e-05, eta: 3:39:06, time: 0.199, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2465, decode.acc_seg: 90.3885, aux.loss_ce: 0.1786, aux.acc_seg: 83.0575, loss: 0.4251, grad_norm: 5.8997 2023-02-11 23:43:37,417 - mmseg - INFO - Iter [96300/160000] lr: 2.389e-05, eta: 3:38:56, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2447, decode.acc_seg: 90.5946, aux.loss_ce: 0.1811, aux.acc_seg: 82.6270, loss: 0.4258, grad_norm: 4.7125 2023-02-11 23:43:47,573 - mmseg - INFO - Iter [96350/160000] lr: 2.387e-05, eta: 3:38:45, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2476, decode.acc_seg: 90.1302, aux.loss_ce: 0.1803, aux.acc_seg: 82.5765, loss: 0.4279, grad_norm: 4.4635 2023-02-11 23:43:57,380 - mmseg - INFO - Iter [96400/160000] lr: 2.385e-05, eta: 3:38:34, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2362, decode.acc_seg: 90.6112, aux.loss_ce: 0.1833, aux.acc_seg: 82.1407, loss: 0.4195, grad_norm: 4.6924 2023-02-11 23:44:07,120 - mmseg - INFO - Iter [96450/160000] lr: 2.383e-05, eta: 3:38:24, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2385, decode.acc_seg: 90.5287, aux.loss_ce: 0.1786, aux.acc_seg: 82.8380, loss: 0.4172, grad_norm: 4.3001 2023-02-11 23:44:17,005 - mmseg - INFO - Iter [96500/160000] lr: 2.381e-05, eta: 3:38:13, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2361, decode.acc_seg: 90.6917, aux.loss_ce: 0.1802, aux.acc_seg: 82.4531, loss: 0.4163, grad_norm: 5.3293 2023-02-11 23:44:27,082 - mmseg - INFO - Iter [96550/160000] lr: 2.379e-05, eta: 3:38:03, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2408, decode.acc_seg: 90.6451, aux.loss_ce: 0.1778, aux.acc_seg: 83.1658, loss: 0.4186, grad_norm: 4.7469 2023-02-11 23:44:36,878 - mmseg - INFO - Iter [96600/160000] lr: 2.378e-05, eta: 3:37:52, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2408, decode.acc_seg: 90.1880, aux.loss_ce: 0.1795, aux.acc_seg: 82.1592, loss: 0.4203, grad_norm: 4.1028 2023-02-11 23:44:47,523 - mmseg - INFO - Iter [96650/160000] lr: 2.376e-05, eta: 3:37:42, time: 0.212, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2380, decode.acc_seg: 90.9751, aux.loss_ce: 0.1728, aux.acc_seg: 83.4415, loss: 0.4108, grad_norm: 4.2349 2023-02-11 23:44:57,992 - mmseg - INFO - Iter [96700/160000] lr: 2.374e-05, eta: 3:37:32, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2395, decode.acc_seg: 90.5346, aux.loss_ce: 0.1756, aux.acc_seg: 82.8624, loss: 0.4151, grad_norm: 4.7533 2023-02-11 23:45:08,114 - mmseg - INFO - Iter [96750/160000] lr: 2.372e-05, eta: 3:37:21, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2330, decode.acc_seg: 90.9894, aux.loss_ce: 0.1743, aux.acc_seg: 83.4338, loss: 0.4073, grad_norm: 4.3067 2023-02-11 23:45:17,949 - mmseg - INFO - Iter [96800/160000] lr: 2.370e-05, eta: 3:37:10, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2513, decode.acc_seg: 90.3124, aux.loss_ce: 0.1842, aux.acc_seg: 82.4026, loss: 0.4355, grad_norm: 4.3528 2023-02-11 23:45:29,858 - mmseg - INFO - Iter [96850/160000] lr: 2.368e-05, eta: 3:37:01, time: 0.238, data_time: 0.046, memory: 7748, decode.loss_ce: 0.2402, decode.acc_seg: 90.5369, aux.loss_ce: 0.1784, aux.acc_seg: 82.6962, loss: 0.4186, grad_norm: 4.8184 2023-02-11 23:45:40,051 - mmseg - INFO - Iter [96900/160000] lr: 2.366e-05, eta: 3:36:51, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2374, decode.acc_seg: 90.5807, aux.loss_ce: 0.1772, aux.acc_seg: 82.8976, loss: 0.4146, grad_norm: 3.8401 2023-02-11 23:45:50,111 - mmseg - INFO - Iter [96950/160000] lr: 2.364e-05, eta: 3:36:40, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2374, decode.acc_seg: 90.5857, aux.loss_ce: 0.1736, aux.acc_seg: 83.1324, loss: 0.4110, grad_norm: 4.0344 2023-02-11 23:46:00,121 - mmseg - INFO - Saving checkpoint at 97000 iterations 2023-02-11 23:46:00,846 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 23:46:00,846 - mmseg - INFO - Iter [97000/160000] lr: 2.363e-05, eta: 3:36:30, time: 0.215, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2510, decode.acc_seg: 90.1323, aux.loss_ce: 0.1789, aux.acc_seg: 82.7427, loss: 0.4299, grad_norm: 4.4888 2023-02-11 23:46:10,783 - mmseg - INFO - Iter [97050/160000] lr: 2.361e-05, eta: 3:36:20, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2387, decode.acc_seg: 90.8472, aux.loss_ce: 0.1725, aux.acc_seg: 83.7441, loss: 0.4112, grad_norm: 3.7801 2023-02-11 23:46:21,245 - mmseg - INFO - Iter [97100/160000] lr: 2.359e-05, eta: 3:36:10, time: 0.209, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2352, decode.acc_seg: 90.9090, aux.loss_ce: 0.1796, aux.acc_seg: 82.8380, loss: 0.4148, grad_norm: 4.5706 2023-02-11 23:46:31,546 - mmseg - INFO - Iter [97150/160000] lr: 2.357e-05, eta: 3:35:59, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2339, decode.acc_seg: 90.6625, aux.loss_ce: 0.1659, aux.acc_seg: 83.6462, loss: 0.3998, grad_norm: 4.1302 2023-02-11 23:46:41,460 - mmseg - INFO - Iter [97200/160000] lr: 2.355e-05, eta: 3:35:49, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2485, decode.acc_seg: 90.6150, aux.loss_ce: 0.1815, aux.acc_seg: 82.9780, loss: 0.4299, grad_norm: 4.8192 2023-02-11 23:46:51,314 - mmseg - INFO - Iter [97250/160000] lr: 2.353e-05, eta: 3:35:38, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2251, decode.acc_seg: 91.1055, aux.loss_ce: 0.1768, aux.acc_seg: 82.9755, loss: 0.4019, grad_norm: 4.4848 2023-02-11 23:47:01,232 - mmseg - INFO - Iter [97300/160000] lr: 2.351e-05, eta: 3:35:27, time: 0.199, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2328, decode.acc_seg: 90.8632, aux.loss_ce: 0.1785, aux.acc_seg: 83.0454, loss: 0.4113, grad_norm: 4.3740 2023-02-11 23:47:10,946 - mmseg - INFO - Iter [97350/160000] lr: 2.349e-05, eta: 3:35:17, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2465, decode.acc_seg: 90.3077, aux.loss_ce: 0.1776, aux.acc_seg: 82.9885, loss: 0.4241, grad_norm: 4.1437 2023-02-11 23:47:21,283 - mmseg - INFO - Iter [97400/160000] lr: 2.348e-05, eta: 3:35:06, time: 0.207, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2468, decode.acc_seg: 90.0365, aux.loss_ce: 0.1711, aux.acc_seg: 82.6653, loss: 0.4179, grad_norm: 5.0216 2023-02-11 23:47:31,543 - mmseg - INFO - Iter [97450/160000] lr: 2.346e-05, eta: 3:34:56, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2353, decode.acc_seg: 90.5168, aux.loss_ce: 0.1734, aux.acc_seg: 82.7135, loss: 0.4088, grad_norm: 4.3687 2023-02-11 23:47:41,335 - mmseg - INFO - Iter [97500/160000] lr: 2.344e-05, eta: 3:34:45, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2565, decode.acc_seg: 90.1889, aux.loss_ce: 0.1872, aux.acc_seg: 82.5941, loss: 0.4436, grad_norm: 4.6761 2023-02-11 23:47:51,114 - mmseg - INFO - Iter [97550/160000] lr: 2.342e-05, eta: 3:34:35, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2432, decode.acc_seg: 90.4392, aux.loss_ce: 0.1791, aux.acc_seg: 82.6121, loss: 0.4222, grad_norm: 5.0060 2023-02-11 23:48:00,930 - mmseg - INFO - Iter [97600/160000] lr: 2.340e-05, eta: 3:34:24, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2644, decode.acc_seg: 89.8377, aux.loss_ce: 0.1879, aux.acc_seg: 81.8953, loss: 0.4523, grad_norm: 5.4702 2023-02-11 23:48:11,365 - mmseg - INFO - Iter [97650/160000] lr: 2.338e-05, eta: 3:34:14, time: 0.209, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2441, decode.acc_seg: 90.4925, aux.loss_ce: 0.1727, aux.acc_seg: 83.5259, loss: 0.4168, grad_norm: 4.5390 2023-02-11 23:48:21,359 - mmseg - INFO - Iter [97700/160000] lr: 2.336e-05, eta: 3:34:03, time: 0.200, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2303, decode.acc_seg: 90.9731, aux.loss_ce: 0.1647, aux.acc_seg: 83.9226, loss: 0.3950, grad_norm: 4.1219 2023-02-11 23:48:31,391 - mmseg - INFO - Iter [97750/160000] lr: 2.334e-05, eta: 3:33:53, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2450, decode.acc_seg: 90.5438, aux.loss_ce: 0.1816, aux.acc_seg: 82.6273, loss: 0.4265, grad_norm: 5.0809 2023-02-11 23:48:41,695 - mmseg - INFO - Iter [97800/160000] lr: 2.333e-05, eta: 3:33:42, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2359, decode.acc_seg: 90.5123, aux.loss_ce: 0.1785, aux.acc_seg: 82.5441, loss: 0.4144, grad_norm: 4.3918 2023-02-11 23:48:51,684 - mmseg - INFO - Iter [97850/160000] lr: 2.331e-05, eta: 3:33:32, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2359, decode.acc_seg: 90.5804, aux.loss_ce: 0.1755, aux.acc_seg: 82.5560, loss: 0.4114, grad_norm: 4.4466 2023-02-11 23:49:01,491 - mmseg - INFO - Iter [97900/160000] lr: 2.329e-05, eta: 3:33:21, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2590, decode.acc_seg: 89.8808, aux.loss_ce: 0.1931, aux.acc_seg: 81.4045, loss: 0.4521, grad_norm: 5.1348 2023-02-11 23:49:11,450 - mmseg - INFO - Iter [97950/160000] lr: 2.327e-05, eta: 3:33:11, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2437, decode.acc_seg: 90.5098, aux.loss_ce: 0.1782, aux.acc_seg: 82.9561, loss: 0.4219, grad_norm: 4.8050 2023-02-11 23:49:21,790 - mmseg - INFO - Saving checkpoint at 98000 iterations 2023-02-11 23:49:22,458 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 23:49:22,459 - mmseg - INFO - Iter [98000/160000] lr: 2.325e-05, eta: 3:33:01, time: 0.220, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2444, decode.acc_seg: 90.4076, aux.loss_ce: 0.1722, aux.acc_seg: 83.7305, loss: 0.4166, grad_norm: 4.7486 2023-02-11 23:49:33,180 - mmseg - INFO - Iter [98050/160000] lr: 2.323e-05, eta: 3:32:51, time: 0.214, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2499, decode.acc_seg: 90.3565, aux.loss_ce: 0.1815, aux.acc_seg: 82.6392, loss: 0.4315, grad_norm: 5.0071 2023-02-11 23:49:45,244 - mmseg - INFO - Iter [98100/160000] lr: 2.321e-05, eta: 3:32:42, time: 0.241, data_time: 0.047, memory: 7748, decode.loss_ce: 0.2498, decode.acc_seg: 90.1114, aux.loss_ce: 0.1814, aux.acc_seg: 82.3412, loss: 0.4312, grad_norm: 4.6797 2023-02-11 23:49:55,211 - mmseg - INFO - Iter [98150/160000] lr: 2.319e-05, eta: 3:32:31, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2439, decode.acc_seg: 90.5124, aux.loss_ce: 0.1784, aux.acc_seg: 83.1027, loss: 0.4222, grad_norm: 4.6688 2023-02-11 23:50:05,360 - mmseg - INFO - Iter [98200/160000] lr: 2.318e-05, eta: 3:32:21, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2525, decode.acc_seg: 90.6546, aux.loss_ce: 0.1750, aux.acc_seg: 83.4381, loss: 0.4275, grad_norm: 4.7790 2023-02-11 23:50:15,574 - mmseg - INFO - Iter [98250/160000] lr: 2.316e-05, eta: 3:32:10, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2454, decode.acc_seg: 90.2871, aux.loss_ce: 0.1771, aux.acc_seg: 82.5415, loss: 0.4225, grad_norm: 5.3601 2023-02-11 23:50:25,627 - mmseg - INFO - Iter [98300/160000] lr: 2.314e-05, eta: 3:32:00, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2268, decode.acc_seg: 91.1568, aux.loss_ce: 0.1783, aux.acc_seg: 82.7931, loss: 0.4051, grad_norm: 4.5733 2023-02-11 23:50:35,794 - mmseg - INFO - Iter [98350/160000] lr: 2.312e-05, eta: 3:31:50, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2434, decode.acc_seg: 90.3974, aux.loss_ce: 0.1715, aux.acc_seg: 83.2453, loss: 0.4149, grad_norm: 4.6818 2023-02-11 23:50:45,706 - mmseg - INFO - Iter [98400/160000] lr: 2.310e-05, eta: 3:31:39, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2481, decode.acc_seg: 90.3443, aux.loss_ce: 0.1834, aux.acc_seg: 82.5816, loss: 0.4315, grad_norm: 4.3490 2023-02-11 23:50:55,591 - mmseg - INFO - Iter [98450/160000] lr: 2.308e-05, eta: 3:31:28, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2379, decode.acc_seg: 90.4779, aux.loss_ce: 0.1678, aux.acc_seg: 83.3823, loss: 0.4057, grad_norm: 4.3420 2023-02-11 23:51:05,371 - mmseg - INFO - Iter [98500/160000] lr: 2.306e-05, eta: 3:31:18, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2457, decode.acc_seg: 90.4060, aux.loss_ce: 0.1772, aux.acc_seg: 82.6305, loss: 0.4229, grad_norm: 4.0203 2023-02-11 23:51:15,249 - mmseg - INFO - Iter [98550/160000] lr: 2.304e-05, eta: 3:31:07, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2485, decode.acc_seg: 90.3113, aux.loss_ce: 0.1850, aux.acc_seg: 82.5318, loss: 0.4335, grad_norm: 5.5773 2023-02-11 23:51:25,227 - mmseg - INFO - Iter [98600/160000] lr: 2.303e-05, eta: 3:30:56, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2371, decode.acc_seg: 90.7488, aux.loss_ce: 0.1685, aux.acc_seg: 83.3845, loss: 0.4056, grad_norm: 4.3631 2023-02-11 23:51:35,650 - mmseg - INFO - Iter [98650/160000] lr: 2.301e-05, eta: 3:30:46, time: 0.208, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2359, decode.acc_seg: 90.6832, aux.loss_ce: 0.1728, aux.acc_seg: 83.0966, loss: 0.4088, grad_norm: 4.5659 2023-02-11 23:51:46,372 - mmseg - INFO - Iter [98700/160000] lr: 2.299e-05, eta: 3:30:36, time: 0.214, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2288, decode.acc_seg: 90.7484, aux.loss_ce: 0.1720, aux.acc_seg: 82.8577, loss: 0.4008, grad_norm: 4.1556 2023-02-11 23:51:56,021 - mmseg - INFO - Iter [98750/160000] lr: 2.297e-05, eta: 3:30:25, time: 0.193, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2513, decode.acc_seg: 90.2039, aux.loss_ce: 0.1859, aux.acc_seg: 82.2232, loss: 0.4372, grad_norm: 4.6449 2023-02-11 23:52:05,951 - mmseg - INFO - Iter [98800/160000] lr: 2.295e-05, eta: 3:30:15, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2310, decode.acc_seg: 90.8578, aux.loss_ce: 0.1733, aux.acc_seg: 83.2095, loss: 0.4043, grad_norm: 3.9812 2023-02-11 23:52:15,712 - mmseg - INFO - Iter [98850/160000] lr: 2.293e-05, eta: 3:30:04, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2725, decode.acc_seg: 89.4375, aux.loss_ce: 0.1986, aux.acc_seg: 81.0075, loss: 0.4711, grad_norm: 4.5621 2023-02-11 23:52:25,591 - mmseg - INFO - Iter [98900/160000] lr: 2.291e-05, eta: 3:29:53, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2385, decode.acc_seg: 90.4856, aux.loss_ce: 0.1758, aux.acc_seg: 82.7426, loss: 0.4143, grad_norm: 4.0223 2023-02-11 23:52:35,409 - mmseg - INFO - Iter [98950/160000] lr: 2.289e-05, eta: 3:29:43, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2510, decode.acc_seg: 90.1362, aux.loss_ce: 0.1841, aux.acc_seg: 81.9407, loss: 0.4350, grad_norm: 4.9376 2023-02-11 23:52:45,220 - mmseg - INFO - Saving checkpoint at 99000 iterations 2023-02-11 23:52:45,905 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 23:52:45,905 - mmseg - INFO - Iter [99000/160000] lr: 2.288e-05, eta: 3:29:33, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2282, decode.acc_seg: 91.2726, aux.loss_ce: 0.1720, aux.acc_seg: 83.3496, loss: 0.4003, grad_norm: 4.0580 2023-02-11 23:52:55,938 - mmseg - INFO - Iter [99050/160000] lr: 2.286e-05, eta: 3:29:22, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2357, decode.acc_seg: 90.7637, aux.loss_ce: 0.1760, aux.acc_seg: 82.8810, loss: 0.4117, grad_norm: 4.5701 2023-02-11 23:53:05,647 - mmseg - INFO - Iter [99100/160000] lr: 2.284e-05, eta: 3:29:11, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2545, decode.acc_seg: 90.2855, aux.loss_ce: 0.1816, aux.acc_seg: 82.6448, loss: 0.4361, grad_norm: 5.2410 2023-02-11 23:53:15,506 - mmseg - INFO - Iter [99150/160000] lr: 2.282e-05, eta: 3:29:01, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2475, decode.acc_seg: 90.3423, aux.loss_ce: 0.1777, aux.acc_seg: 82.6359, loss: 0.4252, grad_norm: 4.9755 2023-02-11 23:53:25,406 - mmseg - INFO - Iter [99200/160000] lr: 2.280e-05, eta: 3:28:50, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2294, decode.acc_seg: 90.8575, aux.loss_ce: 0.1692, aux.acc_seg: 83.4398, loss: 0.3985, grad_norm: 4.6839 2023-02-11 23:53:35,920 - mmseg - INFO - Iter [99250/160000] lr: 2.278e-05, eta: 3:28:40, time: 0.210, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2420, decode.acc_seg: 90.4062, aux.loss_ce: 0.1748, aux.acc_seg: 83.0087, loss: 0.4168, grad_norm: 4.9274 2023-02-11 23:53:46,257 - mmseg - INFO - Iter [99300/160000] lr: 2.276e-05, eta: 3:28:30, time: 0.207, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2519, decode.acc_seg: 90.4044, aux.loss_ce: 0.1863, aux.acc_seg: 82.2731, loss: 0.4382, grad_norm: 5.2178 2023-02-11 23:53:56,132 - mmseg - INFO - Iter [99350/160000] lr: 2.274e-05, eta: 3:28:19, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2371, decode.acc_seg: 90.6114, aux.loss_ce: 0.1776, aux.acc_seg: 83.0187, loss: 0.4148, grad_norm: 4.8805 2023-02-11 23:54:08,125 - mmseg - INFO - Iter [99400/160000] lr: 2.273e-05, eta: 3:28:10, time: 0.239, data_time: 0.047, memory: 7748, decode.loss_ce: 0.2432, decode.acc_seg: 90.6314, aux.loss_ce: 0.1801, aux.acc_seg: 82.9836, loss: 0.4232, grad_norm: 4.8919 2023-02-11 23:54:18,125 - mmseg - INFO - Iter [99450/160000] lr: 2.271e-05, eta: 3:28:00, time: 0.200, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2289, decode.acc_seg: 90.7718, aux.loss_ce: 0.1754, aux.acc_seg: 82.8444, loss: 0.4043, grad_norm: 5.2928 2023-02-11 23:54:28,393 - mmseg - INFO - Iter [99500/160000] lr: 2.269e-05, eta: 3:27:49, time: 0.206, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2412, decode.acc_seg: 90.5499, aux.loss_ce: 0.1802, aux.acc_seg: 82.9375, loss: 0.4214, grad_norm: 4.7937 2023-02-11 23:54:38,900 - mmseg - INFO - Iter [99550/160000] lr: 2.267e-05, eta: 3:27:39, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2330, decode.acc_seg: 90.8290, aux.loss_ce: 0.1742, aux.acc_seg: 83.1612, loss: 0.4072, grad_norm: 4.4733 2023-02-11 23:54:48,712 - mmseg - INFO - Iter [99600/160000] lr: 2.265e-05, eta: 3:27:28, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2222, decode.acc_seg: 91.1330, aux.loss_ce: 0.1712, aux.acc_seg: 83.1878, loss: 0.3934, grad_norm: 4.6742 2023-02-11 23:54:58,559 - mmseg - INFO - Iter [99650/160000] lr: 2.263e-05, eta: 3:27:18, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2368, decode.acc_seg: 90.3534, aux.loss_ce: 0.1797, aux.acc_seg: 82.3882, loss: 0.4166, grad_norm: 4.8247 2023-02-11 23:55:08,261 - mmseg - INFO - Iter [99700/160000] lr: 2.261e-05, eta: 3:27:07, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2416, decode.acc_seg: 90.5765, aux.loss_ce: 0.1785, aux.acc_seg: 83.2102, loss: 0.4201, grad_norm: 4.4319 2023-02-11 23:55:18,434 - mmseg - INFO - Iter [99750/160000] lr: 2.259e-05, eta: 3:26:57, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2387, decode.acc_seg: 90.6326, aux.loss_ce: 0.1770, aux.acc_seg: 83.0351, loss: 0.4157, grad_norm: 4.4643 2023-02-11 23:55:28,468 - mmseg - INFO - Iter [99800/160000] lr: 2.258e-05, eta: 3:26:46, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2305, decode.acc_seg: 90.9687, aux.loss_ce: 0.1781, aux.acc_seg: 82.5552, loss: 0.4086, grad_norm: 4.2818 2023-02-11 23:55:38,382 - mmseg - INFO - Iter [99850/160000] lr: 2.256e-05, eta: 3:26:36, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2353, decode.acc_seg: 90.6487, aux.loss_ce: 0.1709, aux.acc_seg: 83.4325, loss: 0.4062, grad_norm: 4.6869 2023-02-11 23:55:48,499 - mmseg - INFO - Iter [99900/160000] lr: 2.254e-05, eta: 3:26:25, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2386, decode.acc_seg: 90.3639, aux.loss_ce: 0.1718, aux.acc_seg: 83.0127, loss: 0.4104, grad_norm: 3.8900 2023-02-11 23:55:58,467 - mmseg - INFO - Iter [99950/160000] lr: 2.252e-05, eta: 3:26:15, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2353, decode.acc_seg: 90.6418, aux.loss_ce: 0.1669, aux.acc_seg: 83.6378, loss: 0.4022, grad_norm: 4.7502 2023-02-11 23:56:08,282 - mmseg - INFO - Saving checkpoint at 100000 iterations 2023-02-11 23:56:09,045 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 23:56:09,045 - mmseg - INFO - Iter [100000/160000] lr: 2.250e-05, eta: 3:26:04, time: 0.212, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2504, decode.acc_seg: 90.2022, aux.loss_ce: 0.1882, aux.acc_seg: 82.2412, loss: 0.4386, grad_norm: 5.3639 2023-02-11 23:56:19,333 - mmseg - INFO - Iter [100050/160000] lr: 2.248e-05, eta: 3:25:54, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2294, decode.acc_seg: 90.8979, aux.loss_ce: 0.1738, aux.acc_seg: 83.3074, loss: 0.4032, grad_norm: 4.8632 2023-02-11 23:56:29,712 - mmseg - INFO - Iter [100100/160000] lr: 2.246e-05, eta: 3:25:44, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2334, decode.acc_seg: 90.5372, aux.loss_ce: 0.1737, aux.acc_seg: 82.9959, loss: 0.4071, grad_norm: 4.9417 2023-02-11 23:56:39,664 - mmseg - INFO - Iter [100150/160000] lr: 2.244e-05, eta: 3:25:33, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2374, decode.acc_seg: 90.7444, aux.loss_ce: 0.1729, aux.acc_seg: 83.2552, loss: 0.4103, grad_norm: 4.5812 2023-02-11 23:56:49,688 - mmseg - INFO - Iter [100200/160000] lr: 2.243e-05, eta: 3:25:23, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2289, decode.acc_seg: 90.8425, aux.loss_ce: 0.1729, aux.acc_seg: 83.0770, loss: 0.4017, grad_norm: 4.2714 2023-02-11 23:56:59,873 - mmseg - INFO - Iter [100250/160000] lr: 2.241e-05, eta: 3:25:12, time: 0.204, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2346, decode.acc_seg: 90.8255, aux.loss_ce: 0.1795, aux.acc_seg: 82.7166, loss: 0.4141, grad_norm: 5.6024 2023-02-11 23:57:09,795 - mmseg - INFO - Iter [100300/160000] lr: 2.239e-05, eta: 3:25:02, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2330, decode.acc_seg: 90.7653, aux.loss_ce: 0.1801, aux.acc_seg: 82.0632, loss: 0.4131, grad_norm: 4.9283 2023-02-11 23:57:19,870 - mmseg - INFO - Iter [100350/160000] lr: 2.237e-05, eta: 3:24:51, time: 0.202, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2372, decode.acc_seg: 90.6832, aux.loss_ce: 0.1770, aux.acc_seg: 82.8342, loss: 0.4142, grad_norm: 4.5715 2023-02-11 23:57:30,036 - mmseg - INFO - Iter [100400/160000] lr: 2.235e-05, eta: 3:24:41, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2292, decode.acc_seg: 90.9313, aux.loss_ce: 0.1688, aux.acc_seg: 83.6735, loss: 0.3979, grad_norm: 4.2801 2023-02-11 23:57:40,294 - mmseg - INFO - Iter [100450/160000] lr: 2.233e-05, eta: 3:24:31, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2465, decode.acc_seg: 90.3797, aux.loss_ce: 0.1774, aux.acc_seg: 83.3964, loss: 0.4239, grad_norm: 5.4660 2023-02-11 23:57:50,116 - mmseg - INFO - Iter [100500/160000] lr: 2.231e-05, eta: 3:24:20, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2299, decode.acc_seg: 90.9303, aux.loss_ce: 0.1675, aux.acc_seg: 83.6918, loss: 0.3974, grad_norm: 4.3761 2023-02-11 23:58:00,699 - mmseg - INFO - Iter [100550/160000] lr: 2.229e-05, eta: 3:24:10, time: 0.212, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2397, decode.acc_seg: 90.4837, aux.loss_ce: 0.1763, aux.acc_seg: 82.9620, loss: 0.4160, grad_norm: 5.0781 2023-02-11 23:58:10,442 - mmseg - INFO - Iter [100600/160000] lr: 2.228e-05, eta: 3:23:59, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2427, decode.acc_seg: 90.6544, aux.loss_ce: 0.1741, aux.acc_seg: 83.1590, loss: 0.4167, grad_norm: 4.0718 2023-02-11 23:58:22,502 - mmseg - INFO - Iter [100650/160000] lr: 2.226e-05, eta: 3:23:50, time: 0.241, data_time: 0.046, memory: 7748, decode.loss_ce: 0.2346, decode.acc_seg: 90.6840, aux.loss_ce: 0.1671, aux.acc_seg: 83.7215, loss: 0.4017, grad_norm: 4.7084 2023-02-11 23:58:32,616 - mmseg - INFO - Iter [100700/160000] lr: 2.224e-05, eta: 3:23:40, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2377, decode.acc_seg: 90.5209, aux.loss_ce: 0.1739, aux.acc_seg: 83.2330, loss: 0.4116, grad_norm: 4.7096 2023-02-11 23:58:42,864 - mmseg - INFO - Iter [100750/160000] lr: 2.222e-05, eta: 3:23:29, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2398, decode.acc_seg: 90.3179, aux.loss_ce: 0.1730, aux.acc_seg: 83.1047, loss: 0.4128, grad_norm: 4.3701 2023-02-11 23:58:53,296 - mmseg - INFO - Iter [100800/160000] lr: 2.220e-05, eta: 3:23:19, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2380, decode.acc_seg: 90.8640, aux.loss_ce: 0.1749, aux.acc_seg: 83.1302, loss: 0.4129, grad_norm: 4.1371 2023-02-11 23:59:03,505 - mmseg - INFO - Iter [100850/160000] lr: 2.218e-05, eta: 3:23:09, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2537, decode.acc_seg: 90.4299, aux.loss_ce: 0.1802, aux.acc_seg: 83.1785, loss: 0.4339, grad_norm: 5.6607 2023-02-11 23:59:13,579 - mmseg - INFO - Iter [100900/160000] lr: 2.216e-05, eta: 3:22:58, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2380, decode.acc_seg: 90.3477, aux.loss_ce: 0.1740, aux.acc_seg: 82.6687, loss: 0.4120, grad_norm: 4.1139 2023-02-11 23:59:23,537 - mmseg - INFO - Iter [100950/160000] lr: 2.214e-05, eta: 3:22:48, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2374, decode.acc_seg: 90.5177, aux.loss_ce: 0.1762, aux.acc_seg: 82.8840, loss: 0.4137, grad_norm: 4.5205 2023-02-11 23:59:33,662 - mmseg - INFO - Saving checkpoint at 101000 iterations 2023-02-11 23:59:34,333 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-11 23:59:34,334 - mmseg - INFO - Iter [101000/160000] lr: 2.213e-05, eta: 3:22:38, time: 0.216, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2340, decode.acc_seg: 90.8876, aux.loss_ce: 0.1755, aux.acc_seg: 83.4123, loss: 0.4095, grad_norm: 4.1895 2023-02-11 23:59:44,430 - mmseg - INFO - Iter [101050/160000] lr: 2.211e-05, eta: 3:22:27, time: 0.202, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2223, decode.acc_seg: 91.0450, aux.loss_ce: 0.1692, aux.acc_seg: 83.3227, loss: 0.3914, grad_norm: 4.0184 2023-02-11 23:59:54,333 - mmseg - INFO - Iter [101100/160000] lr: 2.209e-05, eta: 3:22:17, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2245, decode.acc_seg: 91.0513, aux.loss_ce: 0.1664, aux.acc_seg: 83.7660, loss: 0.3909, grad_norm: 4.7749 2023-02-12 00:00:04,509 - mmseg - INFO - Iter [101150/160000] lr: 2.207e-05, eta: 3:22:06, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2489, decode.acc_seg: 90.1550, aux.loss_ce: 0.1889, aux.acc_seg: 81.6018, loss: 0.4378, grad_norm: 5.2074 2023-02-12 00:00:14,389 - mmseg - INFO - Iter [101200/160000] lr: 2.205e-05, eta: 3:21:56, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2410, decode.acc_seg: 90.5600, aux.loss_ce: 0.1804, aux.acc_seg: 82.5909, loss: 0.4214, grad_norm: 4.4516 2023-02-12 00:00:24,172 - mmseg - INFO - Iter [101250/160000] lr: 2.203e-05, eta: 3:21:45, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2268, decode.acc_seg: 91.0782, aux.loss_ce: 0.1762, aux.acc_seg: 83.0634, loss: 0.4030, grad_norm: 4.0546 2023-02-12 00:00:33,903 - mmseg - INFO - Iter [101300/160000] lr: 2.201e-05, eta: 3:21:34, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2293, decode.acc_seg: 90.9816, aux.loss_ce: 0.1707, aux.acc_seg: 83.5036, loss: 0.4000, grad_norm: 4.4686 2023-02-12 00:00:43,839 - mmseg - INFO - Iter [101350/160000] lr: 2.199e-05, eta: 3:21:24, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2441, decode.acc_seg: 90.7030, aux.loss_ce: 0.1817, aux.acc_seg: 82.7300, loss: 0.4258, grad_norm: 4.6732 2023-02-12 00:00:53,964 - mmseg - INFO - Iter [101400/160000] lr: 2.198e-05, eta: 3:21:13, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2356, decode.acc_seg: 90.6457, aux.loss_ce: 0.1661, aux.acc_seg: 83.6733, loss: 0.4017, grad_norm: 4.3873 2023-02-12 00:01:04,155 - mmseg - INFO - Iter [101450/160000] lr: 2.196e-05, eta: 3:21:03, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2324, decode.acc_seg: 90.9617, aux.loss_ce: 0.1706, aux.acc_seg: 83.5544, loss: 0.4030, grad_norm: 4.5008 2023-02-12 00:01:14,700 - mmseg - INFO - Iter [101500/160000] lr: 2.194e-05, eta: 3:20:53, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2370, decode.acc_seg: 90.5087, aux.loss_ce: 0.1738, aux.acc_seg: 82.8810, loss: 0.4108, grad_norm: 4.0027 2023-02-12 00:01:25,134 - mmseg - INFO - Iter [101550/160000] lr: 2.192e-05, eta: 3:20:43, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2208, decode.acc_seg: 91.3314, aux.loss_ce: 0.1659, aux.acc_seg: 83.8137, loss: 0.3868, grad_norm: 4.0096 2023-02-12 00:01:35,549 - mmseg - INFO - Iter [101600/160000] lr: 2.190e-05, eta: 3:20:32, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2540, decode.acc_seg: 89.9525, aux.loss_ce: 0.1868, aux.acc_seg: 82.1225, loss: 0.4409, grad_norm: 5.1779 2023-02-12 00:01:45,592 - mmseg - INFO - Iter [101650/160000] lr: 2.188e-05, eta: 3:20:22, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2315, decode.acc_seg: 91.2252, aux.loss_ce: 0.1714, aux.acc_seg: 84.0413, loss: 0.4030, grad_norm: 4.3541 2023-02-12 00:01:55,580 - mmseg - INFO - Iter [101700/160000] lr: 2.186e-05, eta: 3:20:11, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2323, decode.acc_seg: 90.6655, aux.loss_ce: 0.1742, aux.acc_seg: 82.5649, loss: 0.4065, grad_norm: 4.7636 2023-02-12 00:02:05,493 - mmseg - INFO - Iter [101750/160000] lr: 2.184e-05, eta: 3:20:01, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2253, decode.acc_seg: 91.1330, aux.loss_ce: 0.1696, aux.acc_seg: 83.5055, loss: 0.3949, grad_norm: 4.0848 2023-02-12 00:02:15,908 - mmseg - INFO - Iter [101800/160000] lr: 2.183e-05, eta: 3:19:51, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2295, decode.acc_seg: 90.6869, aux.loss_ce: 0.1720, aux.acc_seg: 83.0045, loss: 0.4015, grad_norm: 4.8439 2023-02-12 00:02:25,798 - mmseg - INFO - Iter [101850/160000] lr: 2.181e-05, eta: 3:19:40, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2347, decode.acc_seg: 90.9888, aux.loss_ce: 0.1766, aux.acc_seg: 83.3643, loss: 0.4113, grad_norm: 4.5483 2023-02-12 00:02:38,106 - mmseg - INFO - Iter [101900/160000] lr: 2.179e-05, eta: 3:19:31, time: 0.246, data_time: 0.048, memory: 7748, decode.loss_ce: 0.2277, decode.acc_seg: 90.9327, aux.loss_ce: 0.1697, aux.acc_seg: 83.5005, loss: 0.3974, grad_norm: 3.9961 2023-02-12 00:02:48,333 - mmseg - INFO - Iter [101950/160000] lr: 2.177e-05, eta: 3:19:21, time: 0.205, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2249, decode.acc_seg: 90.9593, aux.loss_ce: 0.1727, aux.acc_seg: 83.3020, loss: 0.3976, grad_norm: 4.5857 2023-02-12 00:02:58,426 - mmseg - INFO - Saving checkpoint at 102000 iterations 2023-02-12 00:02:59,107 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 00:02:59,107 - mmseg - INFO - Iter [102000/160000] lr: 2.175e-05, eta: 3:19:11, time: 0.215, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2386, decode.acc_seg: 90.9478, aux.loss_ce: 0.1780, aux.acc_seg: 83.1938, loss: 0.4166, grad_norm: 4.7752 2023-02-12 00:03:09,226 - mmseg - INFO - Iter [102050/160000] lr: 2.173e-05, eta: 3:19:00, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2214, decode.acc_seg: 91.3781, aux.loss_ce: 0.1643, aux.acc_seg: 83.8359, loss: 0.3857, grad_norm: 4.0261 2023-02-12 00:03:19,361 - mmseg - INFO - Iter [102100/160000] lr: 2.171e-05, eta: 3:18:50, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2333, decode.acc_seg: 91.0459, aux.loss_ce: 0.1658, aux.acc_seg: 84.0884, loss: 0.3991, grad_norm: 4.4971 2023-02-12 00:03:29,706 - mmseg - INFO - Iter [102150/160000] lr: 2.169e-05, eta: 3:18:40, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2598, decode.acc_seg: 89.9292, aux.loss_ce: 0.1863, aux.acc_seg: 82.2589, loss: 0.4461, grad_norm: 5.3333 2023-02-12 00:03:39,665 - mmseg - INFO - Iter [102200/160000] lr: 2.168e-05, eta: 3:18:29, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2314, decode.acc_seg: 90.7609, aux.loss_ce: 0.1666, aux.acc_seg: 83.4934, loss: 0.3980, grad_norm: 4.9132 2023-02-12 00:03:49,674 - mmseg - INFO - Iter [102250/160000] lr: 2.166e-05, eta: 3:18:19, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2378, decode.acc_seg: 90.7889, aux.loss_ce: 0.1750, aux.acc_seg: 83.2921, loss: 0.4128, grad_norm: 4.4718 2023-02-12 00:03:59,559 - mmseg - INFO - Iter [102300/160000] lr: 2.164e-05, eta: 3:18:08, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2317, decode.acc_seg: 90.9819, aux.loss_ce: 0.1722, aux.acc_seg: 83.5964, loss: 0.4039, grad_norm: 5.1616 2023-02-12 00:04:09,904 - mmseg - INFO - Iter [102350/160000] lr: 2.162e-05, eta: 3:17:58, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2148, decode.acc_seg: 91.1261, aux.loss_ce: 0.1679, aux.acc_seg: 83.2547, loss: 0.3827, grad_norm: 4.2361 2023-02-12 00:04:20,235 - mmseg - INFO - Iter [102400/160000] lr: 2.160e-05, eta: 3:17:48, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2403, decode.acc_seg: 90.7249, aux.loss_ce: 0.1767, aux.acc_seg: 83.0482, loss: 0.4170, grad_norm: 5.2712 2023-02-12 00:04:30,100 - mmseg - INFO - Iter [102450/160000] lr: 2.158e-05, eta: 3:17:37, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2326, decode.acc_seg: 90.7921, aux.loss_ce: 0.1757, aux.acc_seg: 82.9407, loss: 0.4083, grad_norm: 5.0933 2023-02-12 00:04:40,163 - mmseg - INFO - Iter [102500/160000] lr: 2.156e-05, eta: 3:17:26, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2251, decode.acc_seg: 91.2578, aux.loss_ce: 0.1709, aux.acc_seg: 83.5338, loss: 0.3960, grad_norm: 5.2171 2023-02-12 00:04:50,838 - mmseg - INFO - Iter [102550/160000] lr: 2.154e-05, eta: 3:17:16, time: 0.213, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2305, decode.acc_seg: 90.9815, aux.loss_ce: 0.1712, aux.acc_seg: 83.5426, loss: 0.4017, grad_norm: 4.0777 2023-02-12 00:05:00,857 - mmseg - INFO - Iter [102600/160000] lr: 2.153e-05, eta: 3:17:06, time: 0.200, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2314, decode.acc_seg: 90.9572, aux.loss_ce: 0.1734, aux.acc_seg: 83.1592, loss: 0.4048, grad_norm: 4.0005 2023-02-12 00:05:10,610 - mmseg - INFO - Iter [102650/160000] lr: 2.151e-05, eta: 3:16:55, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2444, decode.acc_seg: 90.5737, aux.loss_ce: 0.1815, aux.acc_seg: 82.6672, loss: 0.4259, grad_norm: 4.7193 2023-02-12 00:05:20,741 - mmseg - INFO - Iter [102700/160000] lr: 2.149e-05, eta: 3:16:45, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2310, decode.acc_seg: 90.9194, aux.loss_ce: 0.1748, aux.acc_seg: 83.4516, loss: 0.4058, grad_norm: 4.4547 2023-02-12 00:05:30,652 - mmseg - INFO - Iter [102750/160000] lr: 2.147e-05, eta: 3:16:34, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2285, decode.acc_seg: 90.9371, aux.loss_ce: 0.1710, aux.acc_seg: 83.2420, loss: 0.3994, grad_norm: 4.4747 2023-02-12 00:05:40,752 - mmseg - INFO - Iter [102800/160000] lr: 2.145e-05, eta: 3:16:24, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2418, decode.acc_seg: 90.4040, aux.loss_ce: 0.1768, aux.acc_seg: 82.9276, loss: 0.4187, grad_norm: 4.4990 2023-02-12 00:05:51,351 - mmseg - INFO - Iter [102850/160000] lr: 2.143e-05, eta: 3:16:14, time: 0.212, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2538, decode.acc_seg: 90.2266, aux.loss_ce: 0.1780, aux.acc_seg: 82.7405, loss: 0.4318, grad_norm: 4.7915 2023-02-12 00:06:01,127 - mmseg - INFO - Iter [102900/160000] lr: 2.141e-05, eta: 3:16:03, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2462, decode.acc_seg: 90.4846, aux.loss_ce: 0.1777, aux.acc_seg: 82.8573, loss: 0.4238, grad_norm: 5.6169 2023-02-12 00:06:11,703 - mmseg - INFO - Iter [102950/160000] lr: 2.139e-05, eta: 3:15:53, time: 0.212, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2407, decode.acc_seg: 90.3984, aux.loss_ce: 0.1713, aux.acc_seg: 82.9078, loss: 0.4120, grad_norm: 4.6780 2023-02-12 00:06:21,666 - mmseg - INFO - Saving checkpoint at 103000 iterations 2023-02-12 00:06:22,339 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 00:06:22,339 - mmseg - INFO - Iter [103000/160000] lr: 2.138e-05, eta: 3:15:43, time: 0.213, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2282, decode.acc_seg: 91.0086, aux.loss_ce: 0.1780, aux.acc_seg: 82.8213, loss: 0.4062, grad_norm: 4.2008 2023-02-12 00:06:32,275 - mmseg - INFO - Iter [103050/160000] lr: 2.136e-05, eta: 3:15:32, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2237, decode.acc_seg: 91.1807, aux.loss_ce: 0.1663, aux.acc_seg: 84.0236, loss: 0.3900, grad_norm: 3.8645 2023-02-12 00:06:42,477 - mmseg - INFO - Iter [103100/160000] lr: 2.134e-05, eta: 3:15:22, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2306, decode.acc_seg: 90.9517, aux.loss_ce: 0.1711, aux.acc_seg: 83.1911, loss: 0.4017, grad_norm: 4.5540 2023-02-12 00:06:54,374 - mmseg - INFO - Iter [103150/160000] lr: 2.132e-05, eta: 3:15:13, time: 0.238, data_time: 0.046, memory: 7748, decode.loss_ce: 0.2403, decode.acc_seg: 90.7205, aux.loss_ce: 0.1755, aux.acc_seg: 83.5307, loss: 0.4158, grad_norm: 5.0157 2023-02-12 00:07:04,349 - mmseg - INFO - Iter [103200/160000] lr: 2.130e-05, eta: 3:15:02, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2416, decode.acc_seg: 90.4570, aux.loss_ce: 0.1773, aux.acc_seg: 83.0615, loss: 0.4189, grad_norm: 4.8477 2023-02-12 00:07:14,408 - mmseg - INFO - Iter [103250/160000] lr: 2.128e-05, eta: 3:14:52, time: 0.202, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2390, decode.acc_seg: 90.4365, aux.loss_ce: 0.1749, aux.acc_seg: 82.9210, loss: 0.4139, grad_norm: 4.5333 2023-02-12 00:07:24,200 - mmseg - INFO - Iter [103300/160000] lr: 2.126e-05, eta: 3:14:41, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2313, decode.acc_seg: 91.0300, aux.loss_ce: 0.1737, aux.acc_seg: 83.4137, loss: 0.4050, grad_norm: 4.5382 2023-02-12 00:07:33,884 - mmseg - INFO - Iter [103350/160000] lr: 2.124e-05, eta: 3:14:30, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2365, decode.acc_seg: 90.7636, aux.loss_ce: 0.1732, aux.acc_seg: 83.3800, loss: 0.4096, grad_norm: 4.8993 2023-02-12 00:07:43,762 - mmseg - INFO - Iter [103400/160000] lr: 2.123e-05, eta: 3:14:20, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2316, decode.acc_seg: 90.8243, aux.loss_ce: 0.1768, aux.acc_seg: 82.7124, loss: 0.4084, grad_norm: 5.2986 2023-02-12 00:07:53,932 - mmseg - INFO - Iter [103450/160000] lr: 2.121e-05, eta: 3:14:09, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2291, decode.acc_seg: 90.8642, aux.loss_ce: 0.1703, aux.acc_seg: 83.2707, loss: 0.3993, grad_norm: 3.8206 2023-02-12 00:08:03,651 - mmseg - INFO - Iter [103500/160000] lr: 2.119e-05, eta: 3:13:59, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2343, decode.acc_seg: 90.8604, aux.loss_ce: 0.1727, aux.acc_seg: 82.8162, loss: 0.4069, grad_norm: 4.7619 2023-02-12 00:08:14,232 - mmseg - INFO - Iter [103550/160000] lr: 2.117e-05, eta: 3:13:49, time: 0.212, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2335, decode.acc_seg: 90.6102, aux.loss_ce: 0.1692, aux.acc_seg: 83.5789, loss: 0.4027, grad_norm: 4.4889 2023-02-12 00:08:24,577 - mmseg - INFO - Iter [103600/160000] lr: 2.115e-05, eta: 3:13:38, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2334, decode.acc_seg: 90.7835, aux.loss_ce: 0.1663, aux.acc_seg: 83.8783, loss: 0.3997, grad_norm: 4.2213 2023-02-12 00:08:34,615 - mmseg - INFO - Iter [103650/160000] lr: 2.113e-05, eta: 3:13:28, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2309, decode.acc_seg: 91.1578, aux.loss_ce: 0.1756, aux.acc_seg: 83.1421, loss: 0.4065, grad_norm: 4.5356 2023-02-12 00:08:44,789 - mmseg - INFO - Iter [103700/160000] lr: 2.111e-05, eta: 3:13:18, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2225, decode.acc_seg: 90.9912, aux.loss_ce: 0.1681, aux.acc_seg: 83.2031, loss: 0.3906, grad_norm: 4.2845 2023-02-12 00:08:54,982 - mmseg - INFO - Iter [103750/160000] lr: 2.109e-05, eta: 3:13:07, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2305, decode.acc_seg: 90.8677, aux.loss_ce: 0.1755, aux.acc_seg: 83.2879, loss: 0.4060, grad_norm: 4.6828 2023-02-12 00:09:04,946 - mmseg - INFO - Iter [103800/160000] lr: 2.108e-05, eta: 3:12:57, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2201, decode.acc_seg: 91.1502, aux.loss_ce: 0.1662, aux.acc_seg: 83.4146, loss: 0.3863, grad_norm: 3.8414 2023-02-12 00:09:15,268 - mmseg - INFO - Iter [103850/160000] lr: 2.106e-05, eta: 3:12:46, time: 0.206, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2315, decode.acc_seg: 90.9480, aux.loss_ce: 0.1855, aux.acc_seg: 82.4676, loss: 0.4170, grad_norm: 4.9245 2023-02-12 00:09:25,585 - mmseg - INFO - Iter [103900/160000] lr: 2.104e-05, eta: 3:12:36, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2176, decode.acc_seg: 91.1219, aux.loss_ce: 0.1650, aux.acc_seg: 83.8652, loss: 0.3826, grad_norm: 3.8238 2023-02-12 00:09:35,985 - mmseg - INFO - Iter [103950/160000] lr: 2.102e-05, eta: 3:12:26, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2438, decode.acc_seg: 90.3037, aux.loss_ce: 0.1832, aux.acc_seg: 82.3181, loss: 0.4271, grad_norm: 5.1950 2023-02-12 00:09:46,029 - mmseg - INFO - Saving checkpoint at 104000 iterations 2023-02-12 00:09:46,760 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 00:09:46,760 - mmseg - INFO - Iter [104000/160000] lr: 2.100e-05, eta: 3:12:16, time: 0.215, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2492, decode.acc_seg: 90.2485, aux.loss_ce: 0.1833, aux.acc_seg: 82.2429, loss: 0.4325, grad_norm: 5.1509 2023-02-12 00:09:57,136 - mmseg - INFO - Iter [104050/160000] lr: 2.098e-05, eta: 3:12:06, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2442, decode.acc_seg: 90.1712, aux.loss_ce: 0.1738, aux.acc_seg: 82.9769, loss: 0.4181, grad_norm: 4.4501 2023-02-12 00:10:07,228 - mmseg - INFO - Iter [104100/160000] lr: 2.096e-05, eta: 3:11:55, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2474, decode.acc_seg: 90.1748, aux.loss_ce: 0.1864, aux.acc_seg: 82.0560, loss: 0.4338, grad_norm: 5.5803 2023-02-12 00:10:17,371 - mmseg - INFO - Iter [104150/160000] lr: 2.094e-05, eta: 3:11:45, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2420, decode.acc_seg: 90.6602, aux.loss_ce: 0.1732, aux.acc_seg: 83.4210, loss: 0.4152, grad_norm: 5.1363 2023-02-12 00:10:28,288 - mmseg - INFO - Iter [104200/160000] lr: 2.093e-05, eta: 3:11:35, time: 0.218, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2381, decode.acc_seg: 90.6908, aux.loss_ce: 0.1750, aux.acc_seg: 83.3532, loss: 0.4131, grad_norm: 4.6222 2023-02-12 00:10:38,305 - mmseg - INFO - Iter [104250/160000] lr: 2.091e-05, eta: 3:11:24, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2343, decode.acc_seg: 90.7137, aux.loss_ce: 0.1756, aux.acc_seg: 83.1264, loss: 0.4099, grad_norm: 5.4373 2023-02-12 00:10:48,516 - mmseg - INFO - Iter [104300/160000] lr: 2.089e-05, eta: 3:11:14, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2341, decode.acc_seg: 90.5021, aux.loss_ce: 0.1720, aux.acc_seg: 83.1263, loss: 0.4060, grad_norm: 4.9277 2023-02-12 00:10:58,261 - mmseg - INFO - Iter [104350/160000] lr: 2.087e-05, eta: 3:11:03, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2279, decode.acc_seg: 90.7671, aux.loss_ce: 0.1756, aux.acc_seg: 82.4322, loss: 0.4035, grad_norm: 4.6515 2023-02-12 00:11:08,339 - mmseg - INFO - Iter [104400/160000] lr: 2.085e-05, eta: 3:10:53, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2309, decode.acc_seg: 90.8899, aux.loss_ce: 0.1703, aux.acc_seg: 83.2551, loss: 0.4012, grad_norm: 4.8554 2023-02-12 00:11:20,585 - mmseg - INFO - Iter [104450/160000] lr: 2.083e-05, eta: 3:10:44, time: 0.245, data_time: 0.047, memory: 7748, decode.loss_ce: 0.2232, decode.acc_seg: 91.3307, aux.loss_ce: 0.1643, aux.acc_seg: 84.2737, loss: 0.3875, grad_norm: 5.2320 2023-02-12 00:11:30,797 - mmseg - INFO - Iter [104500/160000] lr: 2.081e-05, eta: 3:10:34, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2173, decode.acc_seg: 91.4202, aux.loss_ce: 0.1637, aux.acc_seg: 83.8785, loss: 0.3810, grad_norm: 3.9727 2023-02-12 00:11:40,696 - mmseg - INFO - Iter [104550/160000] lr: 2.079e-05, eta: 3:10:23, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2379, decode.acc_seg: 90.6691, aux.loss_ce: 0.1762, aux.acc_seg: 82.8711, loss: 0.4141, grad_norm: 4.3952 2023-02-12 00:11:50,658 - mmseg - INFO - Iter [104600/160000] lr: 2.078e-05, eta: 3:10:12, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2390, decode.acc_seg: 90.7720, aux.loss_ce: 0.1724, aux.acc_seg: 83.6097, loss: 0.4114, grad_norm: 4.4011 2023-02-12 00:12:00,666 - mmseg - INFO - Iter [104650/160000] lr: 2.076e-05, eta: 3:10:02, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2308, decode.acc_seg: 90.7789, aux.loss_ce: 0.1741, aux.acc_seg: 82.9881, loss: 0.4049, grad_norm: 4.1812 2023-02-12 00:12:11,392 - mmseg - INFO - Iter [104700/160000] lr: 2.074e-05, eta: 3:09:52, time: 0.215, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2239, decode.acc_seg: 91.1478, aux.loss_ce: 0.1684, aux.acc_seg: 83.6893, loss: 0.3922, grad_norm: 4.1023 2023-02-12 00:12:21,407 - mmseg - INFO - Iter [104750/160000] lr: 2.072e-05, eta: 3:09:41, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2415, decode.acc_seg: 90.5046, aux.loss_ce: 0.1800, aux.acc_seg: 82.8626, loss: 0.4215, grad_norm: 4.5380 2023-02-12 00:12:32,193 - mmseg - INFO - Iter [104800/160000] lr: 2.070e-05, eta: 3:09:31, time: 0.216, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2209, decode.acc_seg: 91.2235, aux.loss_ce: 0.1664, aux.acc_seg: 83.7032, loss: 0.3873, grad_norm: 4.6733 2023-02-12 00:12:42,408 - mmseg - INFO - Iter [104850/160000] lr: 2.068e-05, eta: 3:09:21, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2442, decode.acc_seg: 90.5245, aux.loss_ce: 0.1798, aux.acc_seg: 82.4143, loss: 0.4240, grad_norm: 5.1137 2023-02-12 00:12:52,363 - mmseg - INFO - Iter [104900/160000] lr: 2.066e-05, eta: 3:09:11, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2224, decode.acc_seg: 90.8757, aux.loss_ce: 0.1562, aux.acc_seg: 84.2773, loss: 0.3786, grad_norm: 3.8430 2023-02-12 00:13:02,401 - mmseg - INFO - Iter [104950/160000] lr: 2.064e-05, eta: 3:09:00, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2356, decode.acc_seg: 90.5809, aux.loss_ce: 0.1769, aux.acc_seg: 82.5821, loss: 0.4125, grad_norm: 4.4318 2023-02-12 00:13:12,204 - mmseg - INFO - Saving checkpoint at 105000 iterations 2023-02-12 00:13:12,880 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 00:13:12,880 - mmseg - INFO - Iter [105000/160000] lr: 2.063e-05, eta: 3:08:50, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2278, decode.acc_seg: 91.0610, aux.loss_ce: 0.1740, aux.acc_seg: 83.6004, loss: 0.4018, grad_norm: 4.5117 2023-02-12 00:13:22,884 - mmseg - INFO - Iter [105050/160000] lr: 2.061e-05, eta: 3:08:39, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2435, decode.acc_seg: 90.4037, aux.loss_ce: 0.1763, aux.acc_seg: 82.9825, loss: 0.4198, grad_norm: 4.6036 2023-02-12 00:13:32,789 - mmseg - INFO - Iter [105100/160000] lr: 2.059e-05, eta: 3:08:29, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2278, decode.acc_seg: 90.7984, aux.loss_ce: 0.1650, aux.acc_seg: 83.7710, loss: 0.3928, grad_norm: 4.3667 2023-02-12 00:13:42,551 - mmseg - INFO - Iter [105150/160000] lr: 2.057e-05, eta: 3:08:18, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2498, decode.acc_seg: 90.2134, aux.loss_ce: 0.1810, aux.acc_seg: 82.6766, loss: 0.4308, grad_norm: 4.2044 2023-02-12 00:13:52,805 - mmseg - INFO - Iter [105200/160000] lr: 2.055e-05, eta: 3:08:08, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2198, decode.acc_seg: 91.2059, aux.loss_ce: 0.1666, aux.acc_seg: 83.9702, loss: 0.3865, grad_norm: 3.9111 2023-02-12 00:14:03,185 - mmseg - INFO - Iter [105250/160000] lr: 2.053e-05, eta: 3:07:58, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2357, decode.acc_seg: 90.6086, aux.loss_ce: 0.1767, aux.acc_seg: 82.7791, loss: 0.4123, grad_norm: 4.1477 2023-02-12 00:14:13,148 - mmseg - INFO - Iter [105300/160000] lr: 2.051e-05, eta: 3:07:47, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2272, decode.acc_seg: 91.0661, aux.loss_ce: 0.1692, aux.acc_seg: 83.4339, loss: 0.3964, grad_norm: 3.9189 2023-02-12 00:14:23,106 - mmseg - INFO - Iter [105350/160000] lr: 2.049e-05, eta: 3:07:37, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2343, decode.acc_seg: 90.7185, aux.loss_ce: 0.1723, aux.acc_seg: 83.4753, loss: 0.4066, grad_norm: 4.2830 2023-02-12 00:14:33,184 - mmseg - INFO - Iter [105400/160000] lr: 2.048e-05, eta: 3:07:26, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2283, decode.acc_seg: 90.8029, aux.loss_ce: 0.1721, aux.acc_seg: 83.4689, loss: 0.4004, grad_norm: 4.5298 2023-02-12 00:14:43,430 - mmseg - INFO - Iter [105450/160000] lr: 2.046e-05, eta: 3:07:16, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2180, decode.acc_seg: 91.4451, aux.loss_ce: 0.1688, aux.acc_seg: 84.0124, loss: 0.3868, grad_norm: 4.4748 2023-02-12 00:14:53,339 - mmseg - INFO - Iter [105500/160000] lr: 2.044e-05, eta: 3:07:05, time: 0.199, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2191, decode.acc_seg: 91.1702, aux.loss_ce: 0.1656, aux.acc_seg: 83.5251, loss: 0.3847, grad_norm: 4.2773 2023-02-12 00:15:03,156 - mmseg - INFO - Iter [105550/160000] lr: 2.042e-05, eta: 3:06:55, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2273, decode.acc_seg: 91.0436, aux.loss_ce: 0.1642, aux.acc_seg: 83.9637, loss: 0.3915, grad_norm: 4.1118 2023-02-12 00:15:13,074 - mmseg - INFO - Iter [105600/160000] lr: 2.040e-05, eta: 3:06:44, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2249, decode.acc_seg: 91.0437, aux.loss_ce: 0.1689, aux.acc_seg: 83.7342, loss: 0.3937, grad_norm: 4.3440 2023-02-12 00:15:23,235 - mmseg - INFO - Iter [105650/160000] lr: 2.038e-05, eta: 3:06:34, time: 0.203, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2468, decode.acc_seg: 90.1188, aux.loss_ce: 0.1785, aux.acc_seg: 82.6314, loss: 0.4253, grad_norm: 6.2822 2023-02-12 00:15:35,540 - mmseg - INFO - Iter [105700/160000] lr: 2.036e-05, eta: 3:06:25, time: 0.246, data_time: 0.047, memory: 7748, decode.loss_ce: 0.2461, decode.acc_seg: 90.3779, aux.loss_ce: 0.1767, aux.acc_seg: 82.5001, loss: 0.4227, grad_norm: 5.5909 2023-02-12 00:15:45,734 - mmseg - INFO - Iter [105750/160000] lr: 2.034e-05, eta: 3:06:14, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2355, decode.acc_seg: 90.7951, aux.loss_ce: 0.1740, aux.acc_seg: 83.1080, loss: 0.4095, grad_norm: 6.3818 2023-02-12 00:15:56,200 - mmseg - INFO - Iter [105800/160000] lr: 2.033e-05, eta: 3:06:04, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2246, decode.acc_seg: 91.0178, aux.loss_ce: 0.1700, aux.acc_seg: 83.2518, loss: 0.3946, grad_norm: 3.9301 2023-02-12 00:16:06,666 - mmseg - INFO - Iter [105850/160000] lr: 2.031e-05, eta: 3:05:54, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2279, decode.acc_seg: 90.8211, aux.loss_ce: 0.1777, aux.acc_seg: 82.6591, loss: 0.4055, grad_norm: 5.2503 2023-02-12 00:16:17,476 - mmseg - INFO - Iter [105900/160000] lr: 2.029e-05, eta: 3:05:44, time: 0.216, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2318, decode.acc_seg: 90.8022, aux.loss_ce: 0.1762, aux.acc_seg: 82.8520, loss: 0.4080, grad_norm: 4.5046 2023-02-12 00:16:27,605 - mmseg - INFO - Iter [105950/160000] lr: 2.027e-05, eta: 3:05:34, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2237, decode.acc_seg: 91.1040, aux.loss_ce: 0.1684, aux.acc_seg: 83.3781, loss: 0.3921, grad_norm: 4.4959 2023-02-12 00:16:37,457 - mmseg - INFO - Saving checkpoint at 106000 iterations 2023-02-12 00:16:38,132 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 00:16:38,132 - mmseg - INFO - Iter [106000/160000] lr: 2.025e-05, eta: 3:05:23, time: 0.211, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2415, decode.acc_seg: 90.3509, aux.loss_ce: 0.1808, aux.acc_seg: 82.6296, loss: 0.4223, grad_norm: 4.9240 2023-02-12 00:16:47,965 - mmseg - INFO - Iter [106050/160000] lr: 2.023e-05, eta: 3:05:13, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2313, decode.acc_seg: 90.9288, aux.loss_ce: 0.1712, aux.acc_seg: 83.8039, loss: 0.4025, grad_norm: 4.7123 2023-02-12 00:16:57,676 - mmseg - INFO - Iter [106100/160000] lr: 2.021e-05, eta: 3:05:02, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2222, decode.acc_seg: 91.1786, aux.loss_ce: 0.1685, aux.acc_seg: 83.9759, loss: 0.3907, grad_norm: 4.6500 2023-02-12 00:17:07,882 - mmseg - INFO - Iter [106150/160000] lr: 2.019e-05, eta: 3:04:52, time: 0.204, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2310, decode.acc_seg: 90.8416, aux.loss_ce: 0.1711, aux.acc_seg: 83.5211, loss: 0.4021, grad_norm: 4.4472 2023-02-12 00:17:17,642 - mmseg - INFO - Iter [106200/160000] lr: 2.018e-05, eta: 3:04:41, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2273, decode.acc_seg: 91.1101, aux.loss_ce: 0.1749, aux.acc_seg: 83.0465, loss: 0.4022, grad_norm: 4.4938 2023-02-12 00:17:27,652 - mmseg - INFO - Iter [106250/160000] lr: 2.016e-05, eta: 3:04:31, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2258, decode.acc_seg: 91.3067, aux.loss_ce: 0.1660, aux.acc_seg: 84.1110, loss: 0.3918, grad_norm: 4.8142 2023-02-12 00:17:37,621 - mmseg - INFO - Iter [106300/160000] lr: 2.014e-05, eta: 3:04:20, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2270, decode.acc_seg: 90.8869, aux.loss_ce: 0.1692, aux.acc_seg: 83.2928, loss: 0.3962, grad_norm: 3.7884 2023-02-12 00:17:48,426 - mmseg - INFO - Iter [106350/160000] lr: 2.012e-05, eta: 3:04:10, time: 0.216, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2041, decode.acc_seg: 91.7112, aux.loss_ce: 0.1539, aux.acc_seg: 84.7724, loss: 0.3580, grad_norm: 4.5904 2023-02-12 00:17:58,654 - mmseg - INFO - Iter [106400/160000] lr: 2.010e-05, eta: 3:04:00, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2287, decode.acc_seg: 90.8299, aux.loss_ce: 0.1760, aux.acc_seg: 83.1719, loss: 0.4047, grad_norm: 5.0825 2023-02-12 00:18:08,439 - mmseg - INFO - Iter [106450/160000] lr: 2.008e-05, eta: 3:03:49, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2313, decode.acc_seg: 90.7604, aux.loss_ce: 0.1729, aux.acc_seg: 83.1200, loss: 0.4043, grad_norm: 5.1191 2023-02-12 00:18:19,153 - mmseg - INFO - Iter [106500/160000] lr: 2.006e-05, eta: 3:03:39, time: 0.214, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2439, decode.acc_seg: 90.2071, aux.loss_ce: 0.1789, aux.acc_seg: 82.3935, loss: 0.4227, grad_norm: 4.7587 2023-02-12 00:18:29,637 - mmseg - INFO - Iter [106550/160000] lr: 2.004e-05, eta: 3:03:29, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2350, decode.acc_seg: 90.4783, aux.loss_ce: 0.1679, aux.acc_seg: 83.3800, loss: 0.4028, grad_norm: 4.9169 2023-02-12 00:18:39,407 - mmseg - INFO - Iter [106600/160000] lr: 2.003e-05, eta: 3:03:18, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2454, decode.acc_seg: 90.2635, aux.loss_ce: 0.1794, aux.acc_seg: 82.6502, loss: 0.4248, grad_norm: 4.7345 2023-02-12 00:18:49,257 - mmseg - INFO - Iter [106650/160000] lr: 2.001e-05, eta: 3:03:08, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2254, decode.acc_seg: 91.1156, aux.loss_ce: 0.1678, aux.acc_seg: 83.5798, loss: 0.3932, grad_norm: 4.1015 2023-02-12 00:18:59,154 - mmseg - INFO - Iter [106700/160000] lr: 1.999e-05, eta: 3:02:57, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2229, decode.acc_seg: 91.1604, aux.loss_ce: 0.1674, aux.acc_seg: 83.8200, loss: 0.3903, grad_norm: 4.6302 2023-02-12 00:19:08,840 - mmseg - INFO - Iter [106750/160000] lr: 1.997e-05, eta: 3:02:47, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2109, decode.acc_seg: 91.6210, aux.loss_ce: 0.1609, aux.acc_seg: 84.2075, loss: 0.3718, grad_norm: 3.7095 2023-02-12 00:19:18,829 - mmseg - INFO - Iter [106800/160000] lr: 1.995e-05, eta: 3:02:36, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2245, decode.acc_seg: 90.8970, aux.loss_ce: 0.1733, aux.acc_seg: 82.9380, loss: 0.3978, grad_norm: 5.1539 2023-02-12 00:19:28,756 - mmseg - INFO - Iter [106850/160000] lr: 1.993e-05, eta: 3:02:26, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2268, decode.acc_seg: 91.0359, aux.loss_ce: 0.1700, aux.acc_seg: 83.5111, loss: 0.3968, grad_norm: 4.5614 2023-02-12 00:19:39,177 - mmseg - INFO - Iter [106900/160000] lr: 1.991e-05, eta: 3:02:16, time: 0.209, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2247, decode.acc_seg: 91.0859, aux.loss_ce: 0.1641, aux.acc_seg: 84.0687, loss: 0.3888, grad_norm: 4.2850 2023-02-12 00:19:51,216 - mmseg - INFO - Iter [106950/160000] lr: 1.989e-05, eta: 3:02:06, time: 0.241, data_time: 0.046, memory: 7748, decode.loss_ce: 0.2379, decode.acc_seg: 90.7414, aux.loss_ce: 0.1735, aux.acc_seg: 83.2566, loss: 0.4114, grad_norm: 3.9734 2023-02-12 00:20:00,987 - mmseg - INFO - Saving checkpoint at 107000 iterations 2023-02-12 00:20:01,671 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 00:20:01,671 - mmseg - INFO - Iter [107000/160000] lr: 1.988e-05, eta: 3:01:56, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2349, decode.acc_seg: 90.9982, aux.loss_ce: 0.1737, aux.acc_seg: 83.5941, loss: 0.4086, grad_norm: 4.7124 2023-02-12 00:20:11,374 - mmseg - INFO - Iter [107050/160000] lr: 1.986e-05, eta: 3:01:45, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2129, decode.acc_seg: 91.5634, aux.loss_ce: 0.1633, aux.acc_seg: 84.0071, loss: 0.3762, grad_norm: 4.1779 2023-02-12 00:20:21,000 - mmseg - INFO - Iter [107100/160000] lr: 1.984e-05, eta: 3:01:35, time: 0.193, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2194, decode.acc_seg: 91.0670, aux.loss_ce: 0.1677, aux.acc_seg: 83.6213, loss: 0.3871, grad_norm: 4.4583 2023-02-12 00:20:30,948 - mmseg - INFO - Iter [107150/160000] lr: 1.982e-05, eta: 3:01:24, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2288, decode.acc_seg: 90.8913, aux.loss_ce: 0.1730, aux.acc_seg: 83.0458, loss: 0.4018, grad_norm: 4.9431 2023-02-12 00:20:41,632 - mmseg - INFO - Iter [107200/160000] lr: 1.980e-05, eta: 3:01:14, time: 0.214, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2105, decode.acc_seg: 91.6476, aux.loss_ce: 0.1620, aux.acc_seg: 84.0083, loss: 0.3726, grad_norm: 5.4793 2023-02-12 00:20:51,372 - mmseg - INFO - Iter [107250/160000] lr: 1.978e-05, eta: 3:01:04, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2232, decode.acc_seg: 91.1593, aux.loss_ce: 0.1643, aux.acc_seg: 83.9717, loss: 0.3876, grad_norm: 5.0303 2023-02-12 00:21:01,517 - mmseg - INFO - Iter [107300/160000] lr: 1.976e-05, eta: 3:00:53, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2332, decode.acc_seg: 90.4380, aux.loss_ce: 0.1758, aux.acc_seg: 82.6135, loss: 0.4090, grad_norm: 4.2637 2023-02-12 00:21:11,562 - mmseg - INFO - Iter [107350/160000] lr: 1.974e-05, eta: 3:00:43, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2366, decode.acc_seg: 90.6354, aux.loss_ce: 0.1770, aux.acc_seg: 83.1148, loss: 0.4136, grad_norm: 4.7487 2023-02-12 00:21:21,544 - mmseg - INFO - Iter [107400/160000] lr: 1.973e-05, eta: 3:00:32, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2351, decode.acc_seg: 90.8956, aux.loss_ce: 0.1708, aux.acc_seg: 83.4756, loss: 0.4059, grad_norm: 4.5301 2023-02-12 00:21:31,513 - mmseg - INFO - Iter [107450/160000] lr: 1.971e-05, eta: 3:00:22, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2306, decode.acc_seg: 90.9425, aux.loss_ce: 0.1702, aux.acc_seg: 83.5662, loss: 0.4008, grad_norm: 4.5120 2023-02-12 00:21:41,183 - mmseg - INFO - Iter [107500/160000] lr: 1.969e-05, eta: 3:00:11, time: 0.193, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2249, decode.acc_seg: 91.0694, aux.loss_ce: 0.1676, aux.acc_seg: 83.8887, loss: 0.3925, grad_norm: 4.0223 2023-02-12 00:21:50,996 - mmseg - INFO - Iter [107550/160000] lr: 1.967e-05, eta: 3:00:00, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2184, decode.acc_seg: 91.2328, aux.loss_ce: 0.1715, aux.acc_seg: 83.6365, loss: 0.3899, grad_norm: 4.7842 2023-02-12 00:22:01,120 - mmseg - INFO - Iter [107600/160000] lr: 1.965e-05, eta: 2:59:50, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2262, decode.acc_seg: 91.1248, aux.loss_ce: 0.1614, aux.acc_seg: 84.3259, loss: 0.3876, grad_norm: 4.6170 2023-02-12 00:22:11,107 - mmseg - INFO - Iter [107650/160000] lr: 1.963e-05, eta: 2:59:40, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2363, decode.acc_seg: 90.2990, aux.loss_ce: 0.1730, aux.acc_seg: 82.6720, loss: 0.4092, grad_norm: 4.7917 2023-02-12 00:22:20,828 - mmseg - INFO - Iter [107700/160000] lr: 1.961e-05, eta: 2:59:29, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2363, decode.acc_seg: 90.7705, aux.loss_ce: 0.1757, aux.acc_seg: 83.0220, loss: 0.4120, grad_norm: 4.7381 2023-02-12 00:22:30,797 - mmseg - INFO - Iter [107750/160000] lr: 1.959e-05, eta: 2:59:19, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2221, decode.acc_seg: 91.1220, aux.loss_ce: 0.1665, aux.acc_seg: 83.8218, loss: 0.3886, grad_norm: 4.4621 2023-02-12 00:22:40,843 - mmseg - INFO - Iter [107800/160000] lr: 1.958e-05, eta: 2:59:08, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2144, decode.acc_seg: 91.4845, aux.loss_ce: 0.1733, aux.acc_seg: 83.2902, loss: 0.3877, grad_norm: 4.0724 2023-02-12 00:22:50,594 - mmseg - INFO - Iter [107850/160000] lr: 1.956e-05, eta: 2:58:57, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2413, decode.acc_seg: 90.5844, aux.loss_ce: 0.1779, aux.acc_seg: 82.7619, loss: 0.4192, grad_norm: 5.0517 2023-02-12 00:23:00,610 - mmseg - INFO - Iter [107900/160000] lr: 1.954e-05, eta: 2:58:47, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2247, decode.acc_seg: 91.2874, aux.loss_ce: 0.1704, aux.acc_seg: 83.4842, loss: 0.3951, grad_norm: 4.1360 2023-02-12 00:23:10,607 - mmseg - INFO - Iter [107950/160000] lr: 1.952e-05, eta: 2:58:37, time: 0.200, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2347, decode.acc_seg: 90.7196, aux.loss_ce: 0.1715, aux.acc_seg: 83.1388, loss: 0.4062, grad_norm: 4.7158 2023-02-12 00:23:20,522 - mmseg - INFO - Saving checkpoint at 108000 iterations 2023-02-12 00:23:21,224 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 00:23:21,225 - mmseg - INFO - Iter [108000/160000] lr: 1.950e-05, eta: 2:58:26, time: 0.212, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2355, decode.acc_seg: 90.6338, aux.loss_ce: 0.1740, aux.acc_seg: 83.0600, loss: 0.4095, grad_norm: 4.6998 2023-02-12 00:23:30,966 - mmseg - INFO - Iter [108050/160000] lr: 1.948e-05, eta: 2:58:16, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2233, decode.acc_seg: 91.0052, aux.loss_ce: 0.1658, aux.acc_seg: 83.6908, loss: 0.3891, grad_norm: 4.7972 2023-02-12 00:23:40,701 - mmseg - INFO - Iter [108100/160000] lr: 1.946e-05, eta: 2:58:05, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2218, decode.acc_seg: 91.3328, aux.loss_ce: 0.1723, aux.acc_seg: 83.1913, loss: 0.3941, grad_norm: 4.5736 2023-02-12 00:23:51,107 - mmseg - INFO - Iter [108150/160000] lr: 1.944e-05, eta: 2:57:55, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2239, decode.acc_seg: 91.1642, aux.loss_ce: 0.1711, aux.acc_seg: 83.3885, loss: 0.3950, grad_norm: 3.7959 2023-02-12 00:24:03,131 - mmseg - INFO - Iter [108200/160000] lr: 1.943e-05, eta: 2:57:46, time: 0.241, data_time: 0.048, memory: 7748, decode.loss_ce: 0.2318, decode.acc_seg: 91.0765, aux.loss_ce: 0.1703, aux.acc_seg: 83.5722, loss: 0.4021, grad_norm: 4.5633 2023-02-12 00:24:12,972 - mmseg - INFO - Iter [108250/160000] lr: 1.941e-05, eta: 2:57:35, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2327, decode.acc_seg: 90.8033, aux.loss_ce: 0.1715, aux.acc_seg: 83.4368, loss: 0.4042, grad_norm: 5.1777 2023-02-12 00:24:22,947 - mmseg - INFO - Iter [108300/160000] lr: 1.939e-05, eta: 2:57:25, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2334, decode.acc_seg: 90.7920, aux.loss_ce: 0.1641, aux.acc_seg: 83.9669, loss: 0.3976, grad_norm: 4.4394 2023-02-12 00:24:32,725 - mmseg - INFO - Iter [108350/160000] lr: 1.937e-05, eta: 2:57:14, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2390, decode.acc_seg: 90.5743, aux.loss_ce: 0.1741, aux.acc_seg: 82.9085, loss: 0.4131, grad_norm: 3.9900 2023-02-12 00:24:42,471 - mmseg - INFO - Iter [108400/160000] lr: 1.935e-05, eta: 2:57:03, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2337, decode.acc_seg: 90.7262, aux.loss_ce: 0.1686, aux.acc_seg: 83.6615, loss: 0.4023, grad_norm: 4.0350 2023-02-12 00:24:52,932 - mmseg - INFO - Iter [108450/160000] lr: 1.933e-05, eta: 2:56:53, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2083, decode.acc_seg: 91.4888, aux.loss_ce: 0.1540, aux.acc_seg: 84.3676, loss: 0.3622, grad_norm: 3.8726 2023-02-12 00:25:02,713 - mmseg - INFO - Iter [108500/160000] lr: 1.931e-05, eta: 2:56:43, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2248, decode.acc_seg: 91.1253, aux.loss_ce: 0.1678, aux.acc_seg: 83.6600, loss: 0.3926, grad_norm: 4.0400 2023-02-12 00:25:13,327 - mmseg - INFO - Iter [108550/160000] lr: 1.929e-05, eta: 2:56:33, time: 0.212, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2210, decode.acc_seg: 91.0686, aux.loss_ce: 0.1676, aux.acc_seg: 83.5092, loss: 0.3886, grad_norm: 3.9077 2023-02-12 00:25:23,159 - mmseg - INFO - Iter [108600/160000] lr: 1.928e-05, eta: 2:56:22, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2340, decode.acc_seg: 90.7100, aux.loss_ce: 0.1782, aux.acc_seg: 82.7821, loss: 0.4122, grad_norm: 4.2899 2023-02-12 00:25:33,042 - mmseg - INFO - Iter [108650/160000] lr: 1.926e-05, eta: 2:56:11, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2199, decode.acc_seg: 91.2931, aux.loss_ce: 0.1726, aux.acc_seg: 83.2901, loss: 0.3925, grad_norm: 4.7618 2023-02-12 00:25:42,752 - mmseg - INFO - Iter [108700/160000] lr: 1.924e-05, eta: 2:56:01, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2313, decode.acc_seg: 90.9401, aux.loss_ce: 0.1737, aux.acc_seg: 83.3399, loss: 0.4050, grad_norm: 5.3729 2023-02-12 00:25:53,084 - mmseg - INFO - Iter [108750/160000] lr: 1.922e-05, eta: 2:55:51, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2411, decode.acc_seg: 90.5589, aux.loss_ce: 0.1745, aux.acc_seg: 83.0054, loss: 0.4157, grad_norm: 4.8486 2023-02-12 00:26:03,248 - mmseg - INFO - Iter [108800/160000] lr: 1.920e-05, eta: 2:55:40, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2174, decode.acc_seg: 91.3285, aux.loss_ce: 0.1641, aux.acc_seg: 83.7995, loss: 0.3816, grad_norm: 4.4157 2023-02-12 00:26:13,239 - mmseg - INFO - Iter [108850/160000] lr: 1.918e-05, eta: 2:55:30, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2241, decode.acc_seg: 91.0723, aux.loss_ce: 0.1626, aux.acc_seg: 84.2928, loss: 0.3867, grad_norm: 3.9209 2023-02-12 00:26:23,449 - mmseg - INFO - Iter [108900/160000] lr: 1.916e-05, eta: 2:55:19, time: 0.205, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2378, decode.acc_seg: 90.6494, aux.loss_ce: 0.1796, aux.acc_seg: 82.6487, loss: 0.4174, grad_norm: 5.2458 2023-02-12 00:26:33,126 - mmseg - INFO - Iter [108950/160000] lr: 1.914e-05, eta: 2:55:09, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2282, decode.acc_seg: 91.0624, aux.loss_ce: 0.1692, aux.acc_seg: 83.5113, loss: 0.3974, grad_norm: 4.4268 2023-02-12 00:26:43,229 - mmseg - INFO - Saving checkpoint at 109000 iterations 2023-02-12 00:26:43,907 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 00:26:43,907 - mmseg - INFO - Iter [109000/160000] lr: 1.913e-05, eta: 2:54:59, time: 0.216, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2210, decode.acc_seg: 91.0705, aux.loss_ce: 0.1668, aux.acc_seg: 83.6574, loss: 0.3877, grad_norm: 3.8557 2023-02-12 00:26:54,094 - mmseg - INFO - Iter [109050/160000] lr: 1.911e-05, eta: 2:54:48, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2202, decode.acc_seg: 91.2530, aux.loss_ce: 0.1646, aux.acc_seg: 83.9055, loss: 0.3848, grad_norm: 4.8141 2023-02-12 00:27:03,948 - mmseg - INFO - Iter [109100/160000] lr: 1.909e-05, eta: 2:54:38, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2305, decode.acc_seg: 90.7756, aux.loss_ce: 0.1753, aux.acc_seg: 83.0459, loss: 0.4058, grad_norm: 4.9093 2023-02-12 00:27:14,133 - mmseg - INFO - Iter [109150/160000] lr: 1.907e-05, eta: 2:54:28, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2087, decode.acc_seg: 91.6182, aux.loss_ce: 0.1586, aux.acc_seg: 84.5167, loss: 0.3673, grad_norm: 3.4943 2023-02-12 00:27:23,922 - mmseg - INFO - Iter [109200/160000] lr: 1.905e-05, eta: 2:54:17, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2183, decode.acc_seg: 91.2677, aux.loss_ce: 0.1644, aux.acc_seg: 84.1558, loss: 0.3826, grad_norm: 3.8135 2023-02-12 00:27:34,300 - mmseg - INFO - Iter [109250/160000] lr: 1.903e-05, eta: 2:54:07, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2200, decode.acc_seg: 91.2338, aux.loss_ce: 0.1604, aux.acc_seg: 84.4325, loss: 0.3805, grad_norm: 3.5819 2023-02-12 00:27:44,057 - mmseg - INFO - Iter [109300/160000] lr: 1.901e-05, eta: 2:53:56, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2227, decode.acc_seg: 91.3123, aux.loss_ce: 0.1689, aux.acc_seg: 83.8302, loss: 0.3916, grad_norm: 4.1843 2023-02-12 00:27:54,325 - mmseg - INFO - Iter [109350/160000] lr: 1.899e-05, eta: 2:53:46, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2337, decode.acc_seg: 90.5871, aux.loss_ce: 0.1716, aux.acc_seg: 83.2149, loss: 0.4053, grad_norm: 4.6710 2023-02-12 00:28:04,380 - mmseg - INFO - Iter [109400/160000] lr: 1.898e-05, eta: 2:53:35, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2277, decode.acc_seg: 90.9313, aux.loss_ce: 0.1706, aux.acc_seg: 83.2686, loss: 0.3983, grad_norm: 4.5123 2023-02-12 00:28:14,096 - mmseg - INFO - Iter [109450/160000] lr: 1.896e-05, eta: 2:53:25, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2259, decode.acc_seg: 91.2165, aux.loss_ce: 0.1703, aux.acc_seg: 83.2633, loss: 0.3962, grad_norm: 4.1741 2023-02-12 00:28:26,207 - mmseg - INFO - Iter [109500/160000] lr: 1.894e-05, eta: 2:53:15, time: 0.242, data_time: 0.047, memory: 7748, decode.loss_ce: 0.2188, decode.acc_seg: 91.3887, aux.loss_ce: 0.1642, aux.acc_seg: 84.1235, loss: 0.3830, grad_norm: 3.7918 2023-02-12 00:28:36,180 - mmseg - INFO - Iter [109550/160000] lr: 1.892e-05, eta: 2:53:05, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2225, decode.acc_seg: 91.2467, aux.loss_ce: 0.1700, aux.acc_seg: 83.6258, loss: 0.3924, grad_norm: 3.6726 2023-02-12 00:28:46,060 - mmseg - INFO - Iter [109600/160000] lr: 1.890e-05, eta: 2:52:55, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2214, decode.acc_seg: 90.9955, aux.loss_ce: 0.1687, aux.acc_seg: 83.7567, loss: 0.3901, grad_norm: 4.4565 2023-02-12 00:28:55,754 - mmseg - INFO - Iter [109650/160000] lr: 1.888e-05, eta: 2:52:44, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2220, decode.acc_seg: 91.1831, aux.loss_ce: 0.1720, aux.acc_seg: 83.3665, loss: 0.3939, grad_norm: 4.4514 2023-02-12 00:29:06,265 - mmseg - INFO - Iter [109700/160000] lr: 1.886e-05, eta: 2:52:34, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2152, decode.acc_seg: 91.5701, aux.loss_ce: 0.1674, aux.acc_seg: 83.9675, loss: 0.3826, grad_norm: 5.6090 2023-02-12 00:29:16,597 - mmseg - INFO - Iter [109750/160000] lr: 1.884e-05, eta: 2:52:23, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2146, decode.acc_seg: 91.2847, aux.loss_ce: 0.1661, aux.acc_seg: 83.8357, loss: 0.3807, grad_norm: 4.6313 2023-02-12 00:29:26,923 - mmseg - INFO - Iter [109800/160000] lr: 1.883e-05, eta: 2:52:13, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2112, decode.acc_seg: 91.4138, aux.loss_ce: 0.1651, aux.acc_seg: 83.7475, loss: 0.3764, grad_norm: 4.5457 2023-02-12 00:29:37,035 - mmseg - INFO - Iter [109850/160000] lr: 1.881e-05, eta: 2:52:03, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2096, decode.acc_seg: 91.6770, aux.loss_ce: 0.1637, aux.acc_seg: 84.1701, loss: 0.3733, grad_norm: 4.9270 2023-02-12 00:29:47,245 - mmseg - INFO - Iter [109900/160000] lr: 1.879e-05, eta: 2:51:52, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2318, decode.acc_seg: 90.8517, aux.loss_ce: 0.1689, aux.acc_seg: 83.4052, loss: 0.4007, grad_norm: 6.4460 2023-02-12 00:29:57,099 - mmseg - INFO - Iter [109950/160000] lr: 1.877e-05, eta: 2:51:42, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2310, decode.acc_seg: 90.8584, aux.loss_ce: 0.1686, aux.acc_seg: 83.7909, loss: 0.3996, grad_norm: 4.4738 2023-02-12 00:30:06,768 - mmseg - INFO - Saving checkpoint at 110000 iterations 2023-02-12 00:30:07,450 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 00:30:07,450 - mmseg - INFO - Iter [110000/160000] lr: 1.875e-05, eta: 2:51:32, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2204, decode.acc_seg: 91.1553, aux.loss_ce: 0.1707, aux.acc_seg: 83.4303, loss: 0.3911, grad_norm: 4.5454 2023-02-12 00:30:17,187 - mmseg - INFO - Iter [110050/160000] lr: 1.873e-05, eta: 2:51:21, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2170, decode.acc_seg: 91.0953, aux.loss_ce: 0.1661, aux.acc_seg: 83.3412, loss: 0.3830, grad_norm: 4.2082 2023-02-12 00:30:27,576 - mmseg - INFO - Iter [110100/160000] lr: 1.871e-05, eta: 2:51:11, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2297, decode.acc_seg: 90.9344, aux.loss_ce: 0.1726, aux.acc_seg: 83.2949, loss: 0.4023, grad_norm: 3.9383 2023-02-12 00:30:37,751 - mmseg - INFO - Iter [110150/160000] lr: 1.869e-05, eta: 2:51:00, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2299, decode.acc_seg: 90.8752, aux.loss_ce: 0.1790, aux.acc_seg: 82.4761, loss: 0.4090, grad_norm: 4.8961 2023-02-12 00:30:47,983 - mmseg - INFO - Iter [110200/160000] lr: 1.868e-05, eta: 2:50:50, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2317, decode.acc_seg: 90.6932, aux.loss_ce: 0.1681, aux.acc_seg: 83.7353, loss: 0.3999, grad_norm: 4.1121 2023-02-12 00:30:58,400 - mmseg - INFO - Iter [110250/160000] lr: 1.866e-05, eta: 2:50:40, time: 0.208, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2174, decode.acc_seg: 91.4844, aux.loss_ce: 0.1704, aux.acc_seg: 83.5663, loss: 0.3878, grad_norm: 3.9542 2023-02-12 00:31:08,233 - mmseg - INFO - Iter [110300/160000] lr: 1.864e-05, eta: 2:50:29, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2368, decode.acc_seg: 90.6592, aux.loss_ce: 0.1717, aux.acc_seg: 83.2894, loss: 0.4085, grad_norm: 4.6614 2023-02-12 00:31:17,984 - mmseg - INFO - Iter [110350/160000] lr: 1.862e-05, eta: 2:50:19, time: 0.195, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2428, decode.acc_seg: 90.4553, aux.loss_ce: 0.1778, aux.acc_seg: 82.9627, loss: 0.4206, grad_norm: 4.7622 2023-02-12 00:31:27,785 - mmseg - INFO - Iter [110400/160000] lr: 1.860e-05, eta: 2:50:08, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2276, decode.acc_seg: 91.0231, aux.loss_ce: 0.1752, aux.acc_seg: 83.2449, loss: 0.4028, grad_norm: 5.9048 2023-02-12 00:31:37,700 - mmseg - INFO - Iter [110450/160000] lr: 1.858e-05, eta: 2:49:58, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2124, decode.acc_seg: 91.4912, aux.loss_ce: 0.1664, aux.acc_seg: 84.0112, loss: 0.3788, grad_norm: 4.5880 2023-02-12 00:31:47,946 - mmseg - INFO - Iter [110500/160000] lr: 1.856e-05, eta: 2:49:47, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2197, decode.acc_seg: 91.2714, aux.loss_ce: 0.1660, aux.acc_seg: 83.6393, loss: 0.3857, grad_norm: 4.4906 2023-02-12 00:31:58,136 - mmseg - INFO - Iter [110550/160000] lr: 1.854e-05, eta: 2:49:37, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2332, decode.acc_seg: 90.7795, aux.loss_ce: 0.1710, aux.acc_seg: 83.4567, loss: 0.4042, grad_norm: 4.8818 2023-02-12 00:32:07,864 - mmseg - INFO - Iter [110600/160000] lr: 1.853e-05, eta: 2:49:27, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2289, decode.acc_seg: 91.1823, aux.loss_ce: 0.1676, aux.acc_seg: 84.2087, loss: 0.3965, grad_norm: 4.5352 2023-02-12 00:32:18,314 - mmseg - INFO - Iter [110650/160000] lr: 1.851e-05, eta: 2:49:16, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2331, decode.acc_seg: 90.6646, aux.loss_ce: 0.1679, aux.acc_seg: 83.6710, loss: 0.4010, grad_norm: 4.2632 2023-02-12 00:32:28,524 - mmseg - INFO - Iter [110700/160000] lr: 1.849e-05, eta: 2:49:06, time: 0.205, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2238, decode.acc_seg: 91.0860, aux.loss_ce: 0.1699, aux.acc_seg: 83.5880, loss: 0.3937, grad_norm: 4.8452 2023-02-12 00:32:41,085 - mmseg - INFO - Iter [110750/160000] lr: 1.847e-05, eta: 2:48:57, time: 0.251, data_time: 0.047, memory: 7748, decode.loss_ce: 0.2055, decode.acc_seg: 91.9669, aux.loss_ce: 0.1618, aux.acc_seg: 84.2769, loss: 0.3673, grad_norm: 4.2279 2023-02-12 00:32:51,050 - mmseg - INFO - Iter [110800/160000] lr: 1.845e-05, eta: 2:48:46, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2350, decode.acc_seg: 90.8496, aux.loss_ce: 0.1743, aux.acc_seg: 83.1593, loss: 0.4093, grad_norm: 5.0863 2023-02-12 00:33:00,870 - mmseg - INFO - Iter [110850/160000] lr: 1.843e-05, eta: 2:48:36, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2219, decode.acc_seg: 90.8839, aux.loss_ce: 0.1686, aux.acc_seg: 83.4470, loss: 0.3906, grad_norm: 4.5858 2023-02-12 00:33:11,248 - mmseg - INFO - Iter [110900/160000] lr: 1.841e-05, eta: 2:48:26, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2162, decode.acc_seg: 91.3970, aux.loss_ce: 0.1637, aux.acc_seg: 84.0453, loss: 0.3799, grad_norm: 3.9998 2023-02-12 00:33:21,579 - mmseg - INFO - Iter [110950/160000] lr: 1.839e-05, eta: 2:48:15, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2157, decode.acc_seg: 91.6288, aux.loss_ce: 0.1683, aux.acc_seg: 83.6703, loss: 0.3840, grad_norm: 3.7705 2023-02-12 00:33:31,555 - mmseg - INFO - Saving checkpoint at 111000 iterations 2023-02-12 00:33:32,232 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 00:33:32,232 - mmseg - INFO - Iter [111000/160000] lr: 1.838e-05, eta: 2:48:05, time: 0.213, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2222, decode.acc_seg: 90.9868, aux.loss_ce: 0.1723, aux.acc_seg: 83.2268, loss: 0.3945, grad_norm: 4.2125 2023-02-12 00:33:42,063 - mmseg - INFO - Iter [111050/160000] lr: 1.836e-05, eta: 2:47:55, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2269, decode.acc_seg: 91.0009, aux.loss_ce: 0.1712, aux.acc_seg: 83.3569, loss: 0.3982, grad_norm: 3.9937 2023-02-12 00:33:52,143 - mmseg - INFO - Iter [111100/160000] lr: 1.834e-05, eta: 2:47:44, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2213, decode.acc_seg: 91.3476, aux.loss_ce: 0.1668, aux.acc_seg: 84.2728, loss: 0.3881, grad_norm: 4.8604 2023-02-12 00:34:02,368 - mmseg - INFO - Iter [111150/160000] lr: 1.832e-05, eta: 2:47:34, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2102, decode.acc_seg: 91.3228, aux.loss_ce: 0.1593, aux.acc_seg: 83.9018, loss: 0.3696, grad_norm: 3.7776 2023-02-12 00:34:12,186 - mmseg - INFO - Iter [111200/160000] lr: 1.830e-05, eta: 2:47:23, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2180, decode.acc_seg: 91.4601, aux.loss_ce: 0.1679, aux.acc_seg: 84.2008, loss: 0.3859, grad_norm: 4.1768 2023-02-12 00:34:22,192 - mmseg - INFO - Iter [111250/160000] lr: 1.828e-05, eta: 2:47:13, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2373, decode.acc_seg: 90.6954, aux.loss_ce: 0.1757, aux.acc_seg: 83.2911, loss: 0.4130, grad_norm: 5.2170 2023-02-12 00:34:32,190 - mmseg - INFO - Iter [111300/160000] lr: 1.826e-05, eta: 2:47:03, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2204, decode.acc_seg: 91.3250, aux.loss_ce: 0.1681, aux.acc_seg: 83.9361, loss: 0.3885, grad_norm: 4.8412 2023-02-12 00:34:41,913 - mmseg - INFO - Iter [111350/160000] lr: 1.824e-05, eta: 2:46:52, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2251, decode.acc_seg: 91.1261, aux.loss_ce: 0.1686, aux.acc_seg: 83.4345, loss: 0.3937, grad_norm: 4.0920 2023-02-12 00:34:51,768 - mmseg - INFO - Iter [111400/160000] lr: 1.823e-05, eta: 2:46:42, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2149, decode.acc_seg: 91.2048, aux.loss_ce: 0.1673, aux.acc_seg: 83.6309, loss: 0.3822, grad_norm: 4.1921 2023-02-12 00:35:01,456 - mmseg - INFO - Iter [111450/160000] lr: 1.821e-05, eta: 2:46:31, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2221, decode.acc_seg: 91.2855, aux.loss_ce: 0.1684, aux.acc_seg: 83.6156, loss: 0.3905, grad_norm: 4.0621 2023-02-12 00:35:11,655 - mmseg - INFO - Iter [111500/160000] lr: 1.819e-05, eta: 2:46:21, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2186, decode.acc_seg: 91.2700, aux.loss_ce: 0.1656, aux.acc_seg: 83.9100, loss: 0.3843, grad_norm: 3.7732 2023-02-12 00:35:21,903 - mmseg - INFO - Iter [111550/160000] lr: 1.817e-05, eta: 2:46:10, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2161, decode.acc_seg: 91.2514, aux.loss_ce: 0.1610, aux.acc_seg: 84.3473, loss: 0.3771, grad_norm: 5.1338 2023-02-12 00:35:32,115 - mmseg - INFO - Iter [111600/160000] lr: 1.815e-05, eta: 2:46:00, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2283, decode.acc_seg: 90.8990, aux.loss_ce: 0.1714, aux.acc_seg: 83.1978, loss: 0.3997, grad_norm: 4.3565 2023-02-12 00:35:42,399 - mmseg - INFO - Iter [111650/160000] lr: 1.813e-05, eta: 2:45:50, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2341, decode.acc_seg: 90.6539, aux.loss_ce: 0.1725, aux.acc_seg: 83.2111, loss: 0.4066, grad_norm: 4.3149 2023-02-12 00:35:52,545 - mmseg - INFO - Iter [111700/160000] lr: 1.811e-05, eta: 2:45:39, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2417, decode.acc_seg: 90.4370, aux.loss_ce: 0.1872, aux.acc_seg: 82.2188, loss: 0.4289, grad_norm: 5.0731 2023-02-12 00:36:02,689 - mmseg - INFO - Iter [111750/160000] lr: 1.809e-05, eta: 2:45:29, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2490, decode.acc_seg: 90.2226, aux.loss_ce: 0.1845, aux.acc_seg: 82.3803, loss: 0.4335, grad_norm: 4.8884 2023-02-12 00:36:12,496 - mmseg - INFO - Iter [111800/160000] lr: 1.808e-05, eta: 2:45:18, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2248, decode.acc_seg: 90.7300, aux.loss_ce: 0.1647, aux.acc_seg: 83.4324, loss: 0.3895, grad_norm: 5.4860 2023-02-12 00:36:22,294 - mmseg - INFO - Iter [111850/160000] lr: 1.806e-05, eta: 2:45:08, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2163, decode.acc_seg: 91.3481, aux.loss_ce: 0.1627, aux.acc_seg: 84.2125, loss: 0.3790, grad_norm: 4.3690 2023-02-12 00:36:33,310 - mmseg - INFO - Iter [111900/160000] lr: 1.804e-05, eta: 2:44:58, time: 0.220, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2379, decode.acc_seg: 90.5744, aux.loss_ce: 0.1803, aux.acc_seg: 82.4530, loss: 0.4182, grad_norm: 5.0810 2023-02-12 00:36:43,049 - mmseg - INFO - Iter [111950/160000] lr: 1.802e-05, eta: 2:44:47, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2293, decode.acc_seg: 91.1179, aux.loss_ce: 0.1661, aux.acc_seg: 84.2042, loss: 0.3954, grad_norm: 4.1673 2023-02-12 00:36:54,997 - mmseg - INFO - Saving checkpoint at 112000 iterations 2023-02-12 00:36:55,694 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 00:36:55,694 - mmseg - INFO - Iter [112000/160000] lr: 1.800e-05, eta: 2:44:38, time: 0.253, data_time: 0.046, memory: 7748, decode.loss_ce: 0.2221, decode.acc_seg: 91.0986, aux.loss_ce: 0.1572, aux.acc_seg: 84.3533, loss: 0.3793, grad_norm: 4.3773 2023-02-12 00:37:07,006 - mmseg - INFO - per class results: 2023-02-12 00:37:07,012 - mmseg - INFO - +---------------------+-------+-------+ | Class | IoU | Acc | +---------------------+-------+-------+ | wall | 74.71 | 84.82 | | building | 80.67 | 91.79 | | sky | 94.02 | 96.76 | | floor | 79.32 | 90.1 | | tree | 73.37 | 88.14 | | ceiling | 82.43 | 90.09 | | road | 80.81 | 91.19 | | bed | 87.32 | 93.82 | | windowpane | 59.11 | 77.89 | | grass | 66.22 | 80.96 | | cabinet | 60.21 | 79.82 | | sidewalk | 60.94 | 72.02 | | person | 77.5 | 91.15 | | earth | 30.22 | 39.56 | | door | 42.92 | 64.34 | | table | 52.75 | 61.47 | | mountain | 57.16 | 75.62 | | plant | 50.55 | 68.2 | | curtain | 70.5 | 86.04 | | chair | 50.2 | 60.48 | | car | 81.78 | 92.14 | | water | 49.97 | 61.02 | | painting | 59.6 | 87.29 | | sofa | 59.61 | 79.3 | | shelf | 37.61 | 48.68 | | house | 41.74 | 50.05 | | sea | 60.57 | 91.92 | | mirror | 57.08 | 73.24 | | rug | 60.99 | 72.73 | | field | 25.98 | 47.92 | | armchair | 37.09 | 51.7 | | seat | 55.2 | 79.55 | | fence | 29.49 | 44.8 | | desk | 41.73 | 67.42 | | rock | 34.34 | 57.07 | | wardrobe | 47.34 | 52.34 | | lamp | 57.63 | 70.63 | | bathtub | 71.45 | 79.93 | | railing | 31.02 | 40.49 | | cushion | 50.26 | 64.13 | | base | 25.96 | 34.02 | | box | 16.95 | 19.54 | | column | 41.92 | 50.6 | | signboard | 31.56 | 48.68 | | chest of drawers | 40.84 | 50.4 | | counter | 31.79 | 49.84 | | sand | 40.49 | 53.14 | | sink | 69.34 | 81.64 | | skyscraper | 64.01 | 79.89 | | fireplace | 67.0 | 83.69 | | refrigerator | 73.16 | 79.55 | | grandstand | 32.0 | 56.3 | | path | 20.45 | 26.98 | | stairs | 26.89 | 30.74 | | runway | 66.86 | 86.81 | | case | 36.12 | 42.34 | | pool table | 91.5 | 96.52 | | pillow | 51.36 | 78.14 | | screen door | 46.0 | 47.62 | | stairway | 27.85 | 38.18 | | river | 10.43 | 19.35 | | bridge | 62.06 | 71.09 | | bookcase | 35.21 | 58.59 | | blind | 47.1 | 64.06 | | coffee table | 44.67 | 84.41 | | toilet | 84.49 | 91.37 | | flower | 37.48 | 50.61 | | book | 42.94 | 55.73 | | hill | 4.35 | 6.87 | | bench | 45.06 | 58.83 | | countertop | 54.07 | 66.33 | | stove | 69.07 | 77.57 | | palm | 47.1 | 76.71 | | kitchen island | 29.47 | 79.71 | | computer | 64.67 | 78.58 | | swivel chair | 41.01 | 66.48 | | boat | 40.93 | 53.66 | | bar | 14.7 | 16.51 | | arcade machine | 52.05 | 70.19 | | hovel | 9.67 | 11.67 | | bus | 85.52 | 89.95 | | towel | 59.86 | 69.84 | | light | 52.19 | 61.54 | | truck | 32.82 | 44.19 | | tower | 23.13 | 29.83 | | chandelier | 61.79 | 82.56 | | awning | 24.08 | 38.41 | | streetlight | 24.35 | 32.02 | | booth | 42.97 | 44.28 | | television receiver | 56.55 | 78.45 | | airplane | 55.93 | 69.15 | | dirt track | 7.81 | 34.5 | | apparel | 29.95 | 51.45 | | pole | 13.86 | 18.23 | | land | 4.13 | 9.2 | | bannister | 12.68 | 20.0 | | escalator | 29.5 | 34.21 | | ottoman | 38.28 | 52.61 | | bottle | 28.57 | 44.52 | | buffet | 34.83 | 40.74 | | poster | 6.0 | 7.16 | | stage | 16.31 | 39.68 | | van | 43.61 | 64.28 | | ship | 49.26 | 72.22 | | fountain | 21.97 | 23.85 | | conveyer belt | 58.04 | 68.17 | | canopy | 8.2 | 10.9 | | washer | 64.38 | 72.75 | | plaything | 17.78 | 29.67 | | swimming pool | 55.85 | 73.69 | | stool | 34.81 | 58.18 | | barrel | 20.99 | 64.72 | | basket | 27.63 | 41.44 | | waterfall | 54.19 | 69.79 | | tent | 92.88 | 98.53 | | bag | 15.25 | 22.99 | | minibike | 65.13 | 83.26 | | cradle | 78.76 | 91.4 | | oven | 32.4 | 60.46 | | ball | 41.77 | 66.34 | | food | 53.78 | 66.17 | | step | 5.13 | 5.56 | | tank | 22.32 | 22.53 | | trade name | 23.25 | 26.71 | | microwave | 38.51 | 40.28 | | pot | 36.39 | 48.71 | | animal | 58.56 | 65.4 | | bicycle | 48.99 | 80.57 | | lake | 54.71 | 69.54 | | dishwasher | 60.72 | 79.42 | | screen | 60.65 | 86.36 | | blanket | 14.81 | 19.89 | | sculpture | 53.53 | 65.89 | | hood | 56.88 | 62.4 | | sconce | 38.85 | 53.29 | | vase | 27.29 | 48.7 | | traffic light | 26.61 | 56.63 | | tray | 6.48 | 13.17 | | ashcan | 32.19 | 53.07 | | fan | 52.55 | 72.11 | | pier | 62.63 | 69.19 | | crt screen | 0.03 | 0.08 | | plate | 48.31 | 69.7 | | monitor | 3.68 | 4.96 | | bulletin board | 21.19 | 24.76 | | shower | 0.11 | 1.34 | | radiator | 51.26 | 56.92 | | glass | 7.81 | 8.58 | | clock | 22.19 | 26.19 | | flag | 33.47 | 37.67 | +---------------------+-------+-------+ 2023-02-12 00:37:07,012 - mmseg - INFO - Summary: 2023-02-12 00:37:07,012 - mmseg - INFO - +-------+-------+-------+ | aAcc | mIoU | mAcc | +-------+-------+-------+ | 80.89 | 44.05 | 57.27 | +-------+-------+-------+ 2023-02-12 00:37:07,704 - mmseg - INFO - Now best checkpoint is saved as best_mIoU_iter_112000.pth. 2023-02-12 00:37:07,705 - mmseg - INFO - Best mIoU is 0.4405 at 112000 iter. 2023-02-12 00:37:07,705 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 00:37:07,705 - mmseg - INFO - Iter(val) [250] aAcc: 0.8089, mIoU: 0.4405, mAcc: 0.5727, IoU.wall: 0.7471, IoU.building: 0.8067, IoU.sky: 0.9402, IoU.floor: 0.7932, IoU.tree: 0.7337, IoU.ceiling: 0.8243, IoU.road: 0.8081, IoU.bed : 0.8732, IoU.windowpane: 0.5911, IoU.grass: 0.6622, IoU.cabinet: 0.6021, IoU.sidewalk: 0.6094, IoU.person: 0.7750, IoU.earth: 0.3022, IoU.door: 0.4292, IoU.table: 0.5275, IoU.mountain: 0.5716, IoU.plant: 0.5055, IoU.curtain: 0.7050, IoU.chair: 0.5020, IoU.car: 0.8178, IoU.water: 0.4997, IoU.painting: 0.5960, IoU.sofa: 0.5961, IoU.shelf: 0.3761, IoU.house: 0.4174, IoU.sea: 0.6057, IoU.mirror: 0.5708, IoU.rug: 0.6099, IoU.field: 0.2598, IoU.armchair: 0.3709, IoU.seat: 0.5520, IoU.fence: 0.2949, IoU.desk: 0.4173, IoU.rock: 0.3434, IoU.wardrobe: 0.4734, IoU.lamp: 0.5763, IoU.bathtub: 0.7145, IoU.railing: 0.3102, IoU.cushion: 0.5026, IoU.base: 0.2596, IoU.box: 0.1695, IoU.column: 0.4192, IoU.signboard: 0.3156, IoU.chest of drawers: 0.4084, IoU.counter: 0.3179, IoU.sand: 0.4049, IoU.sink: 0.6934, IoU.skyscraper: 0.6401, IoU.fireplace: 0.6700, IoU.refrigerator: 0.7316, IoU.grandstand: 0.3200, IoU.path: 0.2045, IoU.stairs: 0.2689, IoU.runway: 0.6686, IoU.case: 0.3612, IoU.pool table: 0.9150, IoU.pillow: 0.5136, IoU.screen door: 0.4600, IoU.stairway: 0.2785, IoU.river: 0.1043, IoU.bridge: 0.6206, IoU.bookcase: 0.3521, IoU.blind: 0.4710, IoU.coffee table: 0.4467, IoU.toilet: 0.8449, IoU.flower: 0.3748, IoU.book: 0.4294, IoU.hill: 0.0435, IoU.bench: 0.4506, IoU.countertop: 0.5407, IoU.stove: 0.6907, IoU.palm: 0.4710, IoU.kitchen island: 0.2947, IoU.computer: 0.6467, IoU.swivel chair: 0.4101, IoU.boat: 0.4093, IoU.bar: 0.1470, IoU.arcade machine: 0.5205, IoU.hovel: 0.0967, IoU.bus: 0.8552, IoU.towel: 0.5986, IoU.light: 0.5219, IoU.truck: 0.3282, IoU.tower: 0.2313, IoU.chandelier: 0.6179, IoU.awning: 0.2408, IoU.streetlight: 0.2435, IoU.booth: 0.4297, IoU.television receiver: 0.5655, IoU.airplane: 0.5593, IoU.dirt track: 0.0781, IoU.apparel: 0.2995, IoU.pole: 0.1386, IoU.land: 0.0413, IoU.bannister: 0.1268, IoU.escalator: 0.2950, IoU.ottoman: 0.3828, IoU.bottle: 0.2857, IoU.buffet: 0.3483, IoU.poster: 0.0600, IoU.stage: 0.1631, IoU.van: 0.4361, IoU.ship: 0.4926, IoU.fountain: 0.2197, IoU.conveyer belt: 0.5804, IoU.canopy: 0.0820, IoU.washer: 0.6438, IoU.plaything: 0.1778, IoU.swimming pool: 0.5585, IoU.stool: 0.3481, IoU.barrel: 0.2099, IoU.basket: 0.2763, IoU.waterfall: 0.5419, IoU.tent: 0.9288, IoU.bag: 0.1525, IoU.minibike: 0.6513, IoU.cradle: 0.7876, IoU.oven: 0.3240, IoU.ball: 0.4177, IoU.food: 0.5378, IoU.step: 0.0513, IoU.tank: 0.2232, IoU.trade name: 0.2325, IoU.microwave: 0.3851, IoU.pot: 0.3639, IoU.animal: 0.5856, IoU.bicycle: 0.4899, IoU.lake: 0.5471, IoU.dishwasher: 0.6072, IoU.screen: 0.6065, IoU.blanket: 0.1481, IoU.sculpture: 0.5353, IoU.hood: 0.5688, IoU.sconce: 0.3885, IoU.vase: 0.2729, IoU.traffic light: 0.2661, IoU.tray: 0.0648, IoU.ashcan: 0.3219, IoU.fan: 0.5255, IoU.pier: 0.6263, IoU.crt screen: 0.0003, IoU.plate: 0.4831, IoU.monitor: 0.0368, IoU.bulletin board: 0.2119, IoU.shower: 0.0011, IoU.radiator: 0.5126, IoU.glass: 0.0781, IoU.clock: 0.2219, IoU.flag: 0.3347, Acc.wall: 0.8482, Acc.building: 0.9179, Acc.sky: 0.9676, Acc.floor: 0.9010, Acc.tree: 0.8814, Acc.ceiling: 0.9009, Acc.road: 0.9119, Acc.bed : 0.9382, Acc.windowpane: 0.7789, Acc.grass: 0.8096, Acc.cabinet: 0.7982, Acc.sidewalk: 0.7202, Acc.person: 0.9115, Acc.earth: 0.3956, Acc.door: 0.6434, Acc.table: 0.6147, Acc.mountain: 0.7562, Acc.plant: 0.6820, Acc.curtain: 0.8604, Acc.chair: 0.6048, Acc.car: 0.9214, Acc.water: 0.6102, Acc.painting: 0.8729, Acc.sofa: 0.7930, Acc.shelf: 0.4868, Acc.house: 0.5005, Acc.sea: 0.9192, Acc.mirror: 0.7324, Acc.rug: 0.7273, Acc.field: 0.4792, Acc.armchair: 0.5170, Acc.seat: 0.7955, Acc.fence: 0.4480, Acc.desk: 0.6742, Acc.rock: 0.5707, Acc.wardrobe: 0.5234, Acc.lamp: 0.7063, Acc.bathtub: 0.7993, Acc.railing: 0.4049, Acc.cushion: 0.6413, Acc.base: 0.3402, Acc.box: 0.1954, Acc.column: 0.5060, Acc.signboard: 0.4868, Acc.chest of drawers: 0.5040, Acc.counter: 0.4984, Acc.sand: 0.5314, Acc.sink: 0.8164, Acc.skyscraper: 0.7989, Acc.fireplace: 0.8369, Acc.refrigerator: 0.7955, Acc.grandstand: 0.5630, Acc.path: 0.2698, Acc.stairs: 0.3074, Acc.runway: 0.8681, Acc.case: 0.4234, Acc.pool table: 0.9652, Acc.pillow: 0.7814, Acc.screen door: 0.4762, Acc.stairway: 0.3818, Acc.river: 0.1935, Acc.bridge: 0.7109, Acc.bookcase: 0.5859, Acc.blind: 0.6406, Acc.coffee table: 0.8441, Acc.toilet: 0.9137, Acc.flower: 0.5061, Acc.book: 0.5573, Acc.hill: 0.0687, Acc.bench: 0.5883, Acc.countertop: 0.6633, Acc.stove: 0.7757, Acc.palm: 0.7671, Acc.kitchen island: 0.7971, Acc.computer: 0.7858, Acc.swivel chair: 0.6648, Acc.boat: 0.5366, Acc.bar: 0.1651, Acc.arcade machine: 0.7019, Acc.hovel: 0.1167, Acc.bus: 0.8995, Acc.towel: 0.6984, Acc.light: 0.6154, Acc.truck: 0.4419, Acc.tower: 0.2983, Acc.chandelier: 0.8256, Acc.awning: 0.3841, Acc.streetlight: 0.3202, Acc.booth: 0.4428, Acc.television receiver: 0.7845, Acc.airplane: 0.6915, Acc.dirt track: 0.3450, Acc.apparel: 0.5145, Acc.pole: 0.1823, Acc.land: 0.0920, Acc.bannister: 0.2000, Acc.escalator: 0.3421, Acc.ottoman: 0.5261, Acc.bottle: 0.4452, Acc.buffet: 0.4074, Acc.poster: 0.0716, Acc.stage: 0.3968, Acc.van: 0.6428, Acc.ship: 0.7222, Acc.fountain: 0.2385, Acc.conveyer belt: 0.6817, Acc.canopy: 0.1090, Acc.washer: 0.7275, Acc.plaything: 0.2967, Acc.swimming pool: 0.7369, Acc.stool: 0.5818, Acc.barrel: 0.6472, Acc.basket: 0.4144, Acc.waterfall: 0.6979, Acc.tent: 0.9853, Acc.bag: 0.2299, Acc.minibike: 0.8326, Acc.cradle: 0.9140, Acc.oven: 0.6046, Acc.ball: 0.6634, Acc.food: 0.6617, Acc.step: 0.0556, Acc.tank: 0.2253, Acc.trade name: 0.2671, Acc.microwave: 0.4028, Acc.pot: 0.4871, Acc.animal: 0.6540, Acc.bicycle: 0.8057, Acc.lake: 0.6954, Acc.dishwasher: 0.7942, Acc.screen: 0.8636, Acc.blanket: 0.1989, Acc.sculpture: 0.6589, Acc.hood: 0.6240, Acc.sconce: 0.5329, Acc.vase: 0.4870, Acc.traffic light: 0.5663, Acc.tray: 0.1317, Acc.ashcan: 0.5307, Acc.fan: 0.7211, Acc.pier: 0.6919, Acc.crt screen: 0.0008, Acc.plate: 0.6970, Acc.monitor: 0.0496, Acc.bulletin board: 0.2476, Acc.shower: 0.0134, Acc.radiator: 0.5692, Acc.glass: 0.0858, Acc.clock: 0.2619, Acc.flag: 0.3767 2023-02-12 00:37:17,868 - mmseg - INFO - Iter [112050/160000] lr: 1.798e-05, eta: 2:44:34, time: 0.443, data_time: 0.245, memory: 7748, decode.loss_ce: 0.2324, decode.acc_seg: 90.8734, aux.loss_ce: 0.1713, aux.acc_seg: 83.8699, loss: 0.4037, grad_norm: 4.7958 2023-02-12 00:37:27,766 - mmseg - INFO - Iter [112100/160000] lr: 1.796e-05, eta: 2:44:23, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2220, decode.acc_seg: 91.0573, aux.loss_ce: 0.1677, aux.acc_seg: 83.6495, loss: 0.3897, grad_norm: 4.3969 2023-02-12 00:37:37,591 - mmseg - INFO - Iter [112150/160000] lr: 1.794e-05, eta: 2:44:13, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2181, decode.acc_seg: 91.3658, aux.loss_ce: 0.1723, aux.acc_seg: 83.5063, loss: 0.3904, grad_norm: 4.9975 2023-02-12 00:37:47,566 - mmseg - INFO - Iter [112200/160000] lr: 1.793e-05, eta: 2:44:02, time: 0.200, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2173, decode.acc_seg: 91.4901, aux.loss_ce: 0.1663, aux.acc_seg: 84.0547, loss: 0.3836, grad_norm: 4.1190 2023-02-12 00:37:57,896 - mmseg - INFO - Iter [112250/160000] lr: 1.791e-05, eta: 2:43:52, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2261, decode.acc_seg: 90.9452, aux.loss_ce: 0.1666, aux.acc_seg: 83.7399, loss: 0.3928, grad_norm: 4.7047 2023-02-12 00:38:07,622 - mmseg - INFO - Iter [112300/160000] lr: 1.789e-05, eta: 2:43:42, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2120, decode.acc_seg: 91.4788, aux.loss_ce: 0.1620, aux.acc_seg: 83.9980, loss: 0.3740, grad_norm: 4.0357 2023-02-12 00:38:17,986 - mmseg - INFO - Iter [112350/160000] lr: 1.787e-05, eta: 2:43:31, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2184, decode.acc_seg: 91.3394, aux.loss_ce: 0.1644, aux.acc_seg: 84.0486, loss: 0.3828, grad_norm: 4.4703 2023-02-12 00:38:28,215 - mmseg - INFO - Iter [112400/160000] lr: 1.785e-05, eta: 2:43:21, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2179, decode.acc_seg: 91.4373, aux.loss_ce: 0.1658, aux.acc_seg: 83.7524, loss: 0.3837, grad_norm: 3.8369 2023-02-12 00:38:38,058 - mmseg - INFO - Iter [112450/160000] lr: 1.783e-05, eta: 2:43:10, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2275, decode.acc_seg: 90.9333, aux.loss_ce: 0.1680, aux.acc_seg: 83.3933, loss: 0.3955, grad_norm: 4.4700 2023-02-12 00:38:48,094 - mmseg - INFO - Iter [112500/160000] lr: 1.781e-05, eta: 2:43:00, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2142, decode.acc_seg: 91.4378, aux.loss_ce: 0.1632, aux.acc_seg: 83.7926, loss: 0.3774, grad_norm: 3.6660 2023-02-12 00:38:57,731 - mmseg - INFO - Iter [112550/160000] lr: 1.779e-05, eta: 2:42:49, time: 0.193, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2156, decode.acc_seg: 91.3054, aux.loss_ce: 0.1695, aux.acc_seg: 83.2816, loss: 0.3852, grad_norm: 4.0461 2023-02-12 00:39:07,768 - mmseg - INFO - Iter [112600/160000] lr: 1.778e-05, eta: 2:42:39, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2194, decode.acc_seg: 91.2587, aux.loss_ce: 0.1647, aux.acc_seg: 84.1057, loss: 0.3841, grad_norm: 3.9923 2023-02-12 00:39:17,980 - mmseg - INFO - Iter [112650/160000] lr: 1.776e-05, eta: 2:42:29, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2334, decode.acc_seg: 90.6548, aux.loss_ce: 0.1727, aux.acc_seg: 83.0694, loss: 0.4060, grad_norm: 4.7426 2023-02-12 00:39:27,828 - mmseg - INFO - Iter [112700/160000] lr: 1.774e-05, eta: 2:42:18, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2369, decode.acc_seg: 91.0553, aux.loss_ce: 0.1758, aux.acc_seg: 83.5528, loss: 0.4127, grad_norm: 4.7293 2023-02-12 00:39:38,021 - mmseg - INFO - Iter [112750/160000] lr: 1.772e-05, eta: 2:42:08, time: 0.204, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2198, decode.acc_seg: 91.4389, aux.loss_ce: 0.1628, aux.acc_seg: 84.1749, loss: 0.3825, grad_norm: 4.1922 2023-02-12 00:39:47,799 - mmseg - INFO - Iter [112800/160000] lr: 1.770e-05, eta: 2:41:57, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2307, decode.acc_seg: 90.7718, aux.loss_ce: 0.1667, aux.acc_seg: 83.3948, loss: 0.3974, grad_norm: 4.6766 2023-02-12 00:39:57,937 - mmseg - INFO - Iter [112850/160000] lr: 1.768e-05, eta: 2:41:47, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2136, decode.acc_seg: 91.3297, aux.loss_ce: 0.1629, aux.acc_seg: 84.0335, loss: 0.3765, grad_norm: 3.9554 2023-02-12 00:40:07,724 - mmseg - INFO - Iter [112900/160000] lr: 1.766e-05, eta: 2:41:36, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2260, decode.acc_seg: 90.9377, aux.loss_ce: 0.1645, aux.acc_seg: 84.0255, loss: 0.3905, grad_norm: 3.6849 2023-02-12 00:40:17,688 - mmseg - INFO - Iter [112950/160000] lr: 1.764e-05, eta: 2:41:26, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2104, decode.acc_seg: 91.8193, aux.loss_ce: 0.1623, aux.acc_seg: 84.0500, loss: 0.3727, grad_norm: 4.2600 2023-02-12 00:40:28,425 - mmseg - INFO - Saving checkpoint at 113000 iterations 2023-02-12 00:40:29,197 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 00:40:29,197 - mmseg - INFO - Iter [113000/160000] lr: 1.763e-05, eta: 2:41:16, time: 0.230, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2132, decode.acc_seg: 91.4504, aux.loss_ce: 0.1642, aux.acc_seg: 84.1770, loss: 0.3773, grad_norm: 3.7241 2023-02-12 00:40:39,529 - mmseg - INFO - Iter [113050/160000] lr: 1.761e-05, eta: 2:41:06, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2300, decode.acc_seg: 91.0493, aux.loss_ce: 0.1778, aux.acc_seg: 82.9673, loss: 0.4078, grad_norm: 4.3275 2023-02-12 00:40:49,704 - mmseg - INFO - Iter [113100/160000] lr: 1.759e-05, eta: 2:40:56, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2134, decode.acc_seg: 91.5562, aux.loss_ce: 0.1581, aux.acc_seg: 84.3750, loss: 0.3715, grad_norm: 4.6862 2023-02-12 00:40:59,974 - mmseg - INFO - Iter [113150/160000] lr: 1.757e-05, eta: 2:40:45, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2219, decode.acc_seg: 91.2605, aux.loss_ce: 0.1677, aux.acc_seg: 83.8925, loss: 0.3897, grad_norm: 5.4345 2023-02-12 00:41:10,560 - mmseg - INFO - Iter [113200/160000] lr: 1.755e-05, eta: 2:40:35, time: 0.212, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2191, decode.acc_seg: 91.4913, aux.loss_ce: 0.1681, aux.acc_seg: 83.8812, loss: 0.3872, grad_norm: 4.4646 2023-02-12 00:41:21,101 - mmseg - INFO - Iter [113250/160000] lr: 1.753e-05, eta: 2:40:25, time: 0.211, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2431, decode.acc_seg: 90.1454, aux.loss_ce: 0.1819, aux.acc_seg: 82.3273, loss: 0.4250, grad_norm: 5.2734 2023-02-12 00:41:33,132 - mmseg - INFO - Iter [113300/160000] lr: 1.751e-05, eta: 2:40:15, time: 0.241, data_time: 0.046, memory: 7748, decode.loss_ce: 0.2000, decode.acc_seg: 92.0719, aux.loss_ce: 0.1588, aux.acc_seg: 84.6938, loss: 0.3588, grad_norm: 4.3829 2023-02-12 00:41:43,028 - mmseg - INFO - Iter [113350/160000] lr: 1.749e-05, eta: 2:40:05, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2044, decode.acc_seg: 91.9015, aux.loss_ce: 0.1504, aux.acc_seg: 85.3126, loss: 0.3548, grad_norm: 5.6004 2023-02-12 00:41:52,940 - mmseg - INFO - Iter [113400/160000] lr: 1.748e-05, eta: 2:39:54, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2320, decode.acc_seg: 91.0844, aux.loss_ce: 0.1677, aux.acc_seg: 83.5490, loss: 0.3998, grad_norm: 5.2524 2023-02-12 00:42:03,577 - mmseg - INFO - Iter [113450/160000] lr: 1.746e-05, eta: 2:39:44, time: 0.212, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2122, decode.acc_seg: 91.3324, aux.loss_ce: 0.1628, aux.acc_seg: 83.8138, loss: 0.3751, grad_norm: 3.8907 2023-02-12 00:42:13,849 - mmseg - INFO - Iter [113500/160000] lr: 1.744e-05, eta: 2:39:34, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2173, decode.acc_seg: 91.3979, aux.loss_ce: 0.1649, aux.acc_seg: 84.1944, loss: 0.3822, grad_norm: 3.7334 2023-02-12 00:42:24,047 - mmseg - INFO - Iter [113550/160000] lr: 1.742e-05, eta: 2:39:24, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2185, decode.acc_seg: 91.1670, aux.loss_ce: 0.1609, aux.acc_seg: 84.2163, loss: 0.3794, grad_norm: 4.3982 2023-02-12 00:42:34,801 - mmseg - INFO - Iter [113600/160000] lr: 1.740e-05, eta: 2:39:14, time: 0.215, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2150, decode.acc_seg: 91.6762, aux.loss_ce: 0.1712, aux.acc_seg: 83.6027, loss: 0.3862, grad_norm: 4.4660 2023-02-12 00:42:45,098 - mmseg - INFO - Iter [113650/160000] lr: 1.738e-05, eta: 2:39:03, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2168, decode.acc_seg: 91.3873, aux.loss_ce: 0.1683, aux.acc_seg: 83.6038, loss: 0.3851, grad_norm: 4.4028 2023-02-12 00:42:55,233 - mmseg - INFO - Iter [113700/160000] lr: 1.736e-05, eta: 2:38:53, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2418, decode.acc_seg: 90.5264, aux.loss_ce: 0.1780, aux.acc_seg: 83.0500, loss: 0.4198, grad_norm: 4.8081 2023-02-12 00:43:05,064 - mmseg - INFO - Iter [113750/160000] lr: 1.734e-05, eta: 2:38:42, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2184, decode.acc_seg: 91.2264, aux.loss_ce: 0.1767, aux.acc_seg: 83.0041, loss: 0.3951, grad_norm: 4.2957 2023-02-12 00:43:15,440 - mmseg - INFO - Iter [113800/160000] lr: 1.733e-05, eta: 2:38:32, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2289, decode.acc_seg: 90.9437, aux.loss_ce: 0.1714, aux.acc_seg: 83.4132, loss: 0.4003, grad_norm: 4.4452 2023-02-12 00:43:25,202 - mmseg - INFO - Iter [113850/160000] lr: 1.731e-05, eta: 2:38:22, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2088, decode.acc_seg: 91.5791, aux.loss_ce: 0.1641, aux.acc_seg: 83.6066, loss: 0.3728, grad_norm: 3.6449 2023-02-12 00:43:35,037 - mmseg - INFO - Iter [113900/160000] lr: 1.729e-05, eta: 2:38:11, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2264, decode.acc_seg: 90.9986, aux.loss_ce: 0.1671, aux.acc_seg: 83.7801, loss: 0.3935, grad_norm: 4.8672 2023-02-12 00:43:45,560 - mmseg - INFO - Iter [113950/160000] lr: 1.727e-05, eta: 2:38:01, time: 0.210, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2264, decode.acc_seg: 91.0151, aux.loss_ce: 0.1709, aux.acc_seg: 83.5034, loss: 0.3973, grad_norm: 4.5490 2023-02-12 00:43:55,563 - mmseg - INFO - Saving checkpoint at 114000 iterations 2023-02-12 00:43:56,246 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 00:43:56,246 - mmseg - INFO - Iter [114000/160000] lr: 1.725e-05, eta: 2:37:51, time: 0.214, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2110, decode.acc_seg: 91.3367, aux.loss_ce: 0.1630, aux.acc_seg: 83.7246, loss: 0.3740, grad_norm: 4.4135 2023-02-12 00:44:06,378 - mmseg - INFO - Iter [114050/160000] lr: 1.723e-05, eta: 2:37:40, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2287, decode.acc_seg: 90.7678, aux.loss_ce: 0.1735, aux.acc_seg: 83.1578, loss: 0.4022, grad_norm: 4.5767 2023-02-12 00:44:16,750 - mmseg - INFO - Iter [114100/160000] lr: 1.721e-05, eta: 2:37:30, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2303, decode.acc_seg: 90.5879, aux.loss_ce: 0.1770, aux.acc_seg: 82.8914, loss: 0.4074, grad_norm: 4.5916 2023-02-12 00:44:26,810 - mmseg - INFO - Iter [114150/160000] lr: 1.719e-05, eta: 2:37:20, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2248, decode.acc_seg: 91.2477, aux.loss_ce: 0.1718, aux.acc_seg: 83.8594, loss: 0.3966, grad_norm: 4.4193 2023-02-12 00:44:36,649 - mmseg - INFO - Iter [114200/160000] lr: 1.718e-05, eta: 2:37:09, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2209, decode.acc_seg: 91.3280, aux.loss_ce: 0.1666, aux.acc_seg: 83.9277, loss: 0.3876, grad_norm: 4.7137 2023-02-12 00:44:47,188 - mmseg - INFO - Iter [114250/160000] lr: 1.716e-05, eta: 2:36:59, time: 0.211, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2078, decode.acc_seg: 91.7521, aux.loss_ce: 0.1652, aux.acc_seg: 83.8679, loss: 0.3730, grad_norm: 3.8458 2023-02-12 00:44:56,856 - mmseg - INFO - Iter [114300/160000] lr: 1.714e-05, eta: 2:36:49, time: 0.193, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2142, decode.acc_seg: 91.3340, aux.loss_ce: 0.1625, aux.acc_seg: 84.0802, loss: 0.3767, grad_norm: 4.1410 2023-02-12 00:45:06,754 - mmseg - INFO - Iter [114350/160000] lr: 1.712e-05, eta: 2:36:38, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2283, decode.acc_seg: 91.1235, aux.loss_ce: 0.1632, aux.acc_seg: 84.1817, loss: 0.3915, grad_norm: 4.7353 2023-02-12 00:45:16,883 - mmseg - INFO - Iter [114400/160000] lr: 1.710e-05, eta: 2:36:28, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2350, decode.acc_seg: 90.6254, aux.loss_ce: 0.1709, aux.acc_seg: 83.2905, loss: 0.4060, grad_norm: 4.5091 2023-02-12 00:45:26,989 - mmseg - INFO - Iter [114450/160000] lr: 1.708e-05, eta: 2:36:17, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2298, decode.acc_seg: 90.9678, aux.loss_ce: 0.1715, aux.acc_seg: 83.5309, loss: 0.4013, grad_norm: 4.6373 2023-02-12 00:45:36,826 - mmseg - INFO - Iter [114500/160000] lr: 1.706e-05, eta: 2:36:07, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2220, decode.acc_seg: 91.2008, aux.loss_ce: 0.1703, aux.acc_seg: 83.3818, loss: 0.3924, grad_norm: 4.8845 2023-02-12 00:45:49,094 - mmseg - INFO - Iter [114550/160000] lr: 1.704e-05, eta: 2:35:57, time: 0.245, data_time: 0.046, memory: 7748, decode.loss_ce: 0.2104, decode.acc_seg: 91.6111, aux.loss_ce: 0.1672, aux.acc_seg: 83.7771, loss: 0.3776, grad_norm: 3.9593 2023-02-12 00:45:58,976 - mmseg - INFO - Iter [114600/160000] lr: 1.703e-05, eta: 2:35:47, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2026, decode.acc_seg: 91.9835, aux.loss_ce: 0.1575, aux.acc_seg: 84.8079, loss: 0.3601, grad_norm: 3.5441 2023-02-12 00:46:09,014 - mmseg - INFO - Iter [114650/160000] lr: 1.701e-05, eta: 2:35:36, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2033, decode.acc_seg: 91.5421, aux.loss_ce: 0.1580, aux.acc_seg: 84.1136, loss: 0.3613, grad_norm: 4.5049 2023-02-12 00:46:18,809 - mmseg - INFO - Iter [114700/160000] lr: 1.699e-05, eta: 2:35:26, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2202, decode.acc_seg: 91.1477, aux.loss_ce: 0.1640, aux.acc_seg: 83.8608, loss: 0.3842, grad_norm: 4.7233 2023-02-12 00:46:28,671 - mmseg - INFO - Iter [114750/160000] lr: 1.697e-05, eta: 2:35:15, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2200, decode.acc_seg: 91.3453, aux.loss_ce: 0.1656, aux.acc_seg: 83.7031, loss: 0.3856, grad_norm: 4.4508 2023-02-12 00:46:38,492 - mmseg - INFO - Iter [114800/160000] lr: 1.695e-05, eta: 2:35:05, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2220, decode.acc_seg: 91.1175, aux.loss_ce: 0.1719, aux.acc_seg: 83.1840, loss: 0.3939, grad_norm: 4.3181 2023-02-12 00:46:48,525 - mmseg - INFO - Iter [114850/160000] lr: 1.693e-05, eta: 2:34:55, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2182, decode.acc_seg: 91.3817, aux.loss_ce: 0.1604, aux.acc_seg: 84.3564, loss: 0.3785, grad_norm: 3.8264 2023-02-12 00:46:59,213 - mmseg - INFO - Iter [114900/160000] lr: 1.691e-05, eta: 2:34:44, time: 0.214, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2026, decode.acc_seg: 91.8736, aux.loss_ce: 0.1618, aux.acc_seg: 84.3305, loss: 0.3645, grad_norm: 3.5911 2023-02-12 00:47:09,100 - mmseg - INFO - Iter [114950/160000] lr: 1.689e-05, eta: 2:34:34, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2345, decode.acc_seg: 90.7477, aux.loss_ce: 0.1772, aux.acc_seg: 82.8313, loss: 0.4117, grad_norm: 5.1875 2023-02-12 00:47:19,170 - mmseg - INFO - Saving checkpoint at 115000 iterations 2023-02-12 00:47:19,848 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 00:47:19,848 - mmseg - INFO - Iter [115000/160000] lr: 1.688e-05, eta: 2:34:24, time: 0.215, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2216, decode.acc_seg: 91.0848, aux.loss_ce: 0.1618, aux.acc_seg: 84.2301, loss: 0.3834, grad_norm: 4.1007 2023-02-12 00:47:29,523 - mmseg - INFO - Iter [115050/160000] lr: 1.686e-05, eta: 2:34:13, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2142, decode.acc_seg: 91.5452, aux.loss_ce: 0.1651, aux.acc_seg: 84.0120, loss: 0.3793, grad_norm: 4.0759 2023-02-12 00:47:39,775 - mmseg - INFO - Iter [115100/160000] lr: 1.684e-05, eta: 2:34:03, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2194, decode.acc_seg: 91.4787, aux.loss_ce: 0.1596, aux.acc_seg: 84.6343, loss: 0.3791, grad_norm: 4.1000 2023-02-12 00:47:49,656 - mmseg - INFO - Iter [115150/160000] lr: 1.682e-05, eta: 2:33:53, time: 0.198, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2265, decode.acc_seg: 90.9908, aux.loss_ce: 0.1708, aux.acc_seg: 83.2403, loss: 0.3973, grad_norm: 4.6965 2023-02-12 00:48:00,333 - mmseg - INFO - Iter [115200/160000] lr: 1.680e-05, eta: 2:33:42, time: 0.214, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2226, decode.acc_seg: 91.2924, aux.loss_ce: 0.1689, aux.acc_seg: 83.7470, loss: 0.3915, grad_norm: 3.9270 2023-02-12 00:48:10,093 - mmseg - INFO - Iter [115250/160000] lr: 1.678e-05, eta: 2:33:32, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2207, decode.acc_seg: 91.0825, aux.loss_ce: 0.1655, aux.acc_seg: 83.9125, loss: 0.3862, grad_norm: 4.3957 2023-02-12 00:48:20,661 - mmseg - INFO - Iter [115300/160000] lr: 1.676e-05, eta: 2:33:22, time: 0.211, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2089, decode.acc_seg: 91.6350, aux.loss_ce: 0.1641, aux.acc_seg: 84.2019, loss: 0.3731, grad_norm: 3.9962 2023-02-12 00:48:30,567 - mmseg - INFO - Iter [115350/160000] lr: 1.674e-05, eta: 2:33:11, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2075, decode.acc_seg: 91.7348, aux.loss_ce: 0.1664, aux.acc_seg: 83.8163, loss: 0.3740, grad_norm: 5.6509 2023-02-12 00:48:40,405 - mmseg - INFO - Iter [115400/160000] lr: 1.673e-05, eta: 2:33:01, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2303, decode.acc_seg: 91.0422, aux.loss_ce: 0.1720, aux.acc_seg: 83.4595, loss: 0.4023, grad_norm: 5.0698 2023-02-12 00:48:50,932 - mmseg - INFO - Iter [115450/160000] lr: 1.671e-05, eta: 2:32:51, time: 0.211, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2177, decode.acc_seg: 91.3543, aux.loss_ce: 0.1659, aux.acc_seg: 83.4078, loss: 0.3836, grad_norm: 4.3303 2023-02-12 00:49:01,048 - mmseg - INFO - Iter [115500/160000] lr: 1.669e-05, eta: 2:32:40, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2067, decode.acc_seg: 91.5720, aux.loss_ce: 0.1607, aux.acc_seg: 83.9414, loss: 0.3674, grad_norm: 4.1672 2023-02-12 00:49:11,593 - mmseg - INFO - Iter [115550/160000] lr: 1.667e-05, eta: 2:32:30, time: 0.211, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2210, decode.acc_seg: 91.3935, aux.loss_ce: 0.1655, aux.acc_seg: 84.1079, loss: 0.3866, grad_norm: 4.2701 2023-02-12 00:49:21,288 - mmseg - INFO - Iter [115600/160000] lr: 1.665e-05, eta: 2:32:19, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2076, decode.acc_seg: 91.7364, aux.loss_ce: 0.1597, aux.acc_seg: 84.2119, loss: 0.3672, grad_norm: 4.7693 2023-02-12 00:49:31,873 - mmseg - INFO - Iter [115650/160000] lr: 1.663e-05, eta: 2:32:09, time: 0.211, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2213, decode.acc_seg: 91.5855, aux.loss_ce: 0.1642, aux.acc_seg: 84.0114, loss: 0.3855, grad_norm: 4.7549 2023-02-12 00:49:41,981 - mmseg - INFO - Iter [115700/160000] lr: 1.661e-05, eta: 2:31:59, time: 0.203, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2195, decode.acc_seg: 90.9780, aux.loss_ce: 0.1725, aux.acc_seg: 82.7315, loss: 0.3920, grad_norm: 4.0849 2023-02-12 00:49:52,331 - mmseg - INFO - Iter [115750/160000] lr: 1.659e-05, eta: 2:31:49, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2260, decode.acc_seg: 91.2312, aux.loss_ce: 0.1687, aux.acc_seg: 83.7919, loss: 0.3947, grad_norm: 3.8374 2023-02-12 00:50:04,536 - mmseg - INFO - Iter [115800/160000] lr: 1.658e-05, eta: 2:31:39, time: 0.244, data_time: 0.047, memory: 7748, decode.loss_ce: 0.2326, decode.acc_seg: 90.8462, aux.loss_ce: 0.1742, aux.acc_seg: 83.3775, loss: 0.4068, grad_norm: 4.2146 2023-02-12 00:50:14,615 - mmseg - INFO - Iter [115850/160000] lr: 1.656e-05, eta: 2:31:29, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2175, decode.acc_seg: 91.3858, aux.loss_ce: 0.1635, aux.acc_seg: 84.1482, loss: 0.3810, grad_norm: 4.3536 2023-02-12 00:50:24,533 - mmseg - INFO - Iter [115900/160000] lr: 1.654e-05, eta: 2:31:18, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2115, decode.acc_seg: 91.6428, aux.loss_ce: 0.1626, aux.acc_seg: 84.2432, loss: 0.3741, grad_norm: 4.1421 2023-02-12 00:50:34,491 - mmseg - INFO - Iter [115950/160000] lr: 1.652e-05, eta: 2:31:08, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2286, decode.acc_seg: 90.7942, aux.loss_ce: 0.1703, aux.acc_seg: 83.4278, loss: 0.3989, grad_norm: 5.0289 2023-02-12 00:50:44,511 - mmseg - INFO - Saving checkpoint at 116000 iterations 2023-02-12 00:50:45,185 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 00:50:45,185 - mmseg - INFO - Iter [116000/160000] lr: 1.650e-05, eta: 2:30:58, time: 0.214, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2103, decode.acc_seg: 91.6688, aux.loss_ce: 0.1595, aux.acc_seg: 84.5700, loss: 0.3698, grad_norm: 4.4558 2023-02-12 00:50:55,425 - mmseg - INFO - Iter [116050/160000] lr: 1.648e-05, eta: 2:30:47, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2169, decode.acc_seg: 91.4676, aux.loss_ce: 0.1697, aux.acc_seg: 83.4634, loss: 0.3866, grad_norm: 4.7682 2023-02-12 00:51:05,323 - mmseg - INFO - Iter [116100/160000] lr: 1.646e-05, eta: 2:30:37, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2118, decode.acc_seg: 91.6918, aux.loss_ce: 0.1608, aux.acc_seg: 84.3453, loss: 0.3726, grad_norm: 4.8192 2023-02-12 00:51:15,370 - mmseg - INFO - Iter [116150/160000] lr: 1.644e-05, eta: 2:30:27, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2125, decode.acc_seg: 91.4852, aux.loss_ce: 0.1640, aux.acc_seg: 84.0351, loss: 0.3765, grad_norm: 4.4290 2023-02-12 00:51:25,560 - mmseg - INFO - Iter [116200/160000] lr: 1.643e-05, eta: 2:30:16, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2350, decode.acc_seg: 91.0078, aux.loss_ce: 0.1719, aux.acc_seg: 83.3591, loss: 0.4068, grad_norm: 4.3419 2023-02-12 00:51:35,598 - mmseg - INFO - Iter [116250/160000] lr: 1.641e-05, eta: 2:30:06, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2117, decode.acc_seg: 91.7462, aux.loss_ce: 0.1608, aux.acc_seg: 84.4372, loss: 0.3725, grad_norm: 4.6800 2023-02-12 00:51:45,703 - mmseg - INFO - Iter [116300/160000] lr: 1.639e-05, eta: 2:29:55, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2073, decode.acc_seg: 91.4584, aux.loss_ce: 0.1551, aux.acc_seg: 84.6134, loss: 0.3624, grad_norm: 4.2223 2023-02-12 00:51:56,069 - mmseg - INFO - Iter [116350/160000] lr: 1.637e-05, eta: 2:29:45, time: 0.208, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2122, decode.acc_seg: 91.6286, aux.loss_ce: 0.1668, aux.acc_seg: 83.8135, loss: 0.3790, grad_norm: 4.7386 2023-02-12 00:52:05,914 - mmseg - INFO - Iter [116400/160000] lr: 1.635e-05, eta: 2:29:35, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2185, decode.acc_seg: 91.4289, aux.loss_ce: 0.1668, aux.acc_seg: 83.6523, loss: 0.3854, grad_norm: 4.0676 2023-02-12 00:52:15,624 - mmseg - INFO - Iter [116450/160000] lr: 1.633e-05, eta: 2:29:24, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2153, decode.acc_seg: 91.4594, aux.loss_ce: 0.1671, aux.acc_seg: 83.7934, loss: 0.3825, grad_norm: 4.7519 2023-02-12 00:52:25,939 - mmseg - INFO - Iter [116500/160000] lr: 1.631e-05, eta: 2:29:14, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2142, decode.acc_seg: 91.4016, aux.loss_ce: 0.1607, aux.acc_seg: 84.0329, loss: 0.3749, grad_norm: 3.9085 2023-02-12 00:52:36,739 - mmseg - INFO - Iter [116550/160000] lr: 1.629e-05, eta: 2:29:04, time: 0.216, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2272, decode.acc_seg: 91.1854, aux.loss_ce: 0.1716, aux.acc_seg: 83.7590, loss: 0.3988, grad_norm: 4.3050 2023-02-12 00:52:46,807 - mmseg - INFO - Iter [116600/160000] lr: 1.628e-05, eta: 2:28:53, time: 0.202, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2320, decode.acc_seg: 90.8513, aux.loss_ce: 0.1811, aux.acc_seg: 82.5116, loss: 0.4131, grad_norm: 5.4633 2023-02-12 00:52:57,260 - mmseg - INFO - Iter [116650/160000] lr: 1.626e-05, eta: 2:28:43, time: 0.209, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1972, decode.acc_seg: 92.0851, aux.loss_ce: 0.1531, aux.acc_seg: 85.0371, loss: 0.3503, grad_norm: 3.6751 2023-02-12 00:53:07,061 - mmseg - INFO - Iter [116700/160000] lr: 1.624e-05, eta: 2:28:33, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2224, decode.acc_seg: 91.4661, aux.loss_ce: 0.1672, aux.acc_seg: 83.9164, loss: 0.3896, grad_norm: 3.9323 2023-02-12 00:53:16,889 - mmseg - INFO - Iter [116750/160000] lr: 1.622e-05, eta: 2:28:22, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2059, decode.acc_seg: 91.7976, aux.loss_ce: 0.1582, aux.acc_seg: 84.6488, loss: 0.3641, grad_norm: 4.1311 2023-02-12 00:53:26,855 - mmseg - INFO - Iter [116800/160000] lr: 1.620e-05, eta: 2:28:12, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2118, decode.acc_seg: 91.4967, aux.loss_ce: 0.1638, aux.acc_seg: 83.9056, loss: 0.3757, grad_norm: 3.8098 2023-02-12 00:53:36,733 - mmseg - INFO - Iter [116850/160000] lr: 1.618e-05, eta: 2:28:01, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2248, decode.acc_seg: 91.1382, aux.loss_ce: 0.1701, aux.acc_seg: 83.5389, loss: 0.3950, grad_norm: 4.0444 2023-02-12 00:53:46,784 - mmseg - INFO - Iter [116900/160000] lr: 1.616e-05, eta: 2:27:51, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2191, decode.acc_seg: 91.2635, aux.loss_ce: 0.1645, aux.acc_seg: 83.9774, loss: 0.3836, grad_norm: 4.0056 2023-02-12 00:53:56,994 - mmseg - INFO - Iter [116950/160000] lr: 1.614e-05, eta: 2:27:41, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2143, decode.acc_seg: 91.4567, aux.loss_ce: 0.1635, aux.acc_seg: 83.9783, loss: 0.3778, grad_norm: 4.0095 2023-02-12 00:54:06,747 - mmseg - INFO - Saving checkpoint at 117000 iterations 2023-02-12 00:54:07,447 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 00:54:07,447 - mmseg - INFO - Iter [117000/160000] lr: 1.613e-05, eta: 2:27:30, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2190, decode.acc_seg: 91.2689, aux.loss_ce: 0.1680, aux.acc_seg: 83.5276, loss: 0.3870, grad_norm: 4.3467 2023-02-12 00:54:19,434 - mmseg - INFO - Iter [117050/160000] lr: 1.611e-05, eta: 2:27:21, time: 0.240, data_time: 0.047, memory: 7748, decode.loss_ce: 0.2103, decode.acc_seg: 91.5471, aux.loss_ce: 0.1563, aux.acc_seg: 84.7227, loss: 0.3666, grad_norm: 3.5466 2023-02-12 00:54:29,813 - mmseg - INFO - Iter [117100/160000] lr: 1.609e-05, eta: 2:27:11, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2131, decode.acc_seg: 91.6469, aux.loss_ce: 0.1591, aux.acc_seg: 84.6286, loss: 0.3723, grad_norm: 4.2064 2023-02-12 00:54:39,579 - mmseg - INFO - Iter [117150/160000] lr: 1.607e-05, eta: 2:27:00, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2246, decode.acc_seg: 91.0522, aux.loss_ce: 0.1754, aux.acc_seg: 83.0863, loss: 0.4000, grad_norm: 4.7657 2023-02-12 00:54:49,362 - mmseg - INFO - Iter [117200/160000] lr: 1.605e-05, eta: 2:26:50, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2050, decode.acc_seg: 91.4923, aux.loss_ce: 0.1539, aux.acc_seg: 84.4602, loss: 0.3588, grad_norm: 4.8453 2023-02-12 00:54:59,515 - mmseg - INFO - Iter [117250/160000] lr: 1.603e-05, eta: 2:26:39, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2113, decode.acc_seg: 91.6163, aux.loss_ce: 0.1562, aux.acc_seg: 84.4313, loss: 0.3675, grad_norm: 4.0554 2023-02-12 00:55:09,726 - mmseg - INFO - Iter [117300/160000] lr: 1.601e-05, eta: 2:26:29, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2192, decode.acc_seg: 91.2980, aux.loss_ce: 0.1663, aux.acc_seg: 83.5335, loss: 0.3855, grad_norm: 4.0346 2023-02-12 00:55:19,683 - mmseg - INFO - Iter [117350/160000] lr: 1.599e-05, eta: 2:26:18, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2161, decode.acc_seg: 91.6491, aux.loss_ce: 0.1694, aux.acc_seg: 83.8064, loss: 0.3855, grad_norm: 4.5059 2023-02-12 00:55:30,153 - mmseg - INFO - Iter [117400/160000] lr: 1.598e-05, eta: 2:26:08, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2262, decode.acc_seg: 91.0575, aux.loss_ce: 0.1680, aux.acc_seg: 83.5575, loss: 0.3942, grad_norm: 6.3772 2023-02-12 00:55:40,479 - mmseg - INFO - Iter [117450/160000] lr: 1.596e-05, eta: 2:25:58, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2108, decode.acc_seg: 91.4296, aux.loss_ce: 0.1577, aux.acc_seg: 84.2973, loss: 0.3685, grad_norm: 4.5987 2023-02-12 00:55:50,587 - mmseg - INFO - Iter [117500/160000] lr: 1.594e-05, eta: 2:25:48, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2239, decode.acc_seg: 90.9141, aux.loss_ce: 0.1716, aux.acc_seg: 82.7415, loss: 0.3955, grad_norm: 4.3392 2023-02-12 00:56:01,203 - mmseg - INFO - Iter [117550/160000] lr: 1.592e-05, eta: 2:25:37, time: 0.213, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2062, decode.acc_seg: 91.8383, aux.loss_ce: 0.1602, aux.acc_seg: 84.4996, loss: 0.3664, grad_norm: 3.6439 2023-02-12 00:56:11,132 - mmseg - INFO - Iter [117600/160000] lr: 1.590e-05, eta: 2:25:27, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2211, decode.acc_seg: 91.0784, aux.loss_ce: 0.1680, aux.acc_seg: 83.5708, loss: 0.3891, grad_norm: 5.5667 2023-02-12 00:56:21,226 - mmseg - INFO - Iter [117650/160000] lr: 1.588e-05, eta: 2:25:17, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2073, decode.acc_seg: 91.7383, aux.loss_ce: 0.1608, aux.acc_seg: 84.4306, loss: 0.3681, grad_norm: 3.5894 2023-02-12 00:56:30,973 - mmseg - INFO - Iter [117700/160000] lr: 1.586e-05, eta: 2:25:06, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2232, decode.acc_seg: 91.1018, aux.loss_ce: 0.1666, aux.acc_seg: 83.7205, loss: 0.3897, grad_norm: 3.5789 2023-02-12 00:56:41,612 - mmseg - INFO - Iter [117750/160000] lr: 1.584e-05, eta: 2:24:56, time: 0.213, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2138, decode.acc_seg: 91.5212, aux.loss_ce: 0.1604, aux.acc_seg: 84.4387, loss: 0.3742, grad_norm: 4.1300 2023-02-12 00:56:51,868 - mmseg - INFO - Iter [117800/160000] lr: 1.583e-05, eta: 2:24:46, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2278, decode.acc_seg: 91.1778, aux.loss_ce: 0.1748, aux.acc_seg: 83.4979, loss: 0.4026, grad_norm: 4.9583 2023-02-12 00:57:02,663 - mmseg - INFO - Iter [117850/160000] lr: 1.581e-05, eta: 2:24:35, time: 0.216, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2120, decode.acc_seg: 91.4731, aux.loss_ce: 0.1581, aux.acc_seg: 84.5296, loss: 0.3701, grad_norm: 4.1800 2023-02-12 00:57:12,576 - mmseg - INFO - Iter [117900/160000] lr: 1.579e-05, eta: 2:24:25, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2165, decode.acc_seg: 91.3749, aux.loss_ce: 0.1658, aux.acc_seg: 83.9595, loss: 0.3823, grad_norm: 4.1413 2023-02-12 00:57:22,773 - mmseg - INFO - Iter [117950/160000] lr: 1.577e-05, eta: 2:24:15, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2152, decode.acc_seg: 91.4442, aux.loss_ce: 0.1639, aux.acc_seg: 83.7975, loss: 0.3791, grad_norm: 3.9185 2023-02-12 00:57:33,215 - mmseg - INFO - Saving checkpoint at 118000 iterations 2023-02-12 00:57:33,899 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 00:57:33,899 - mmseg - INFO - Iter [118000/160000] lr: 1.575e-05, eta: 2:24:05, time: 0.223, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2147, decode.acc_seg: 91.3646, aux.loss_ce: 0.1647, aux.acc_seg: 83.5968, loss: 0.3795, grad_norm: 5.0852 2023-02-12 00:57:44,305 - mmseg - INFO - Iter [118050/160000] lr: 1.573e-05, eta: 2:23:55, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1997, decode.acc_seg: 91.9066, aux.loss_ce: 0.1591, aux.acc_seg: 84.3156, loss: 0.3587, grad_norm: 3.4644 2023-02-12 00:57:54,209 - mmseg - INFO - Iter [118100/160000] lr: 1.571e-05, eta: 2:23:44, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2161, decode.acc_seg: 91.5413, aux.loss_ce: 0.1635, aux.acc_seg: 84.1534, loss: 0.3797, grad_norm: 5.0334 2023-02-12 00:58:04,559 - mmseg - INFO - Iter [118150/160000] lr: 1.569e-05, eta: 2:23:34, time: 0.207, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2248, decode.acc_seg: 91.1310, aux.loss_ce: 0.1697, aux.acc_seg: 83.6284, loss: 0.3945, grad_norm: 4.1882 2023-02-12 00:58:14,787 - mmseg - INFO - Iter [118200/160000] lr: 1.568e-05, eta: 2:23:23, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1983, decode.acc_seg: 91.9762, aux.loss_ce: 0.1581, aux.acc_seg: 84.2844, loss: 0.3565, grad_norm: 3.5832 2023-02-12 00:58:25,443 - mmseg - INFO - Iter [118250/160000] lr: 1.566e-05, eta: 2:23:13, time: 0.213, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2288, decode.acc_seg: 90.8750, aux.loss_ce: 0.1718, aux.acc_seg: 83.2032, loss: 0.4005, grad_norm: 4.5688 2023-02-12 00:58:35,832 - mmseg - INFO - Iter [118300/160000] lr: 1.564e-05, eta: 2:23:03, time: 0.208, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2035, decode.acc_seg: 92.1156, aux.loss_ce: 0.1613, aux.acc_seg: 84.4356, loss: 0.3648, grad_norm: 4.0308 2023-02-12 00:58:47,937 - mmseg - INFO - Iter [118350/160000] lr: 1.562e-05, eta: 2:22:54, time: 0.242, data_time: 0.046, memory: 7748, decode.loss_ce: 0.2122, decode.acc_seg: 91.3682, aux.loss_ce: 0.1654, aux.acc_seg: 83.8649, loss: 0.3776, grad_norm: 4.0827 2023-02-12 00:58:57,704 - mmseg - INFO - Iter [118400/160000] lr: 1.560e-05, eta: 2:22:43, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2161, decode.acc_seg: 91.6457, aux.loss_ce: 0.1619, aux.acc_seg: 84.0958, loss: 0.3780, grad_norm: 4.0939 2023-02-12 00:59:07,884 - mmseg - INFO - Iter [118450/160000] lr: 1.558e-05, eta: 2:22:33, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2096, decode.acc_seg: 91.4445, aux.loss_ce: 0.1604, aux.acc_seg: 83.8579, loss: 0.3700, grad_norm: 4.0305 2023-02-12 00:59:17,917 - mmseg - INFO - Iter [118500/160000] lr: 1.556e-05, eta: 2:22:22, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2131, decode.acc_seg: 91.4688, aux.loss_ce: 0.1627, aux.acc_seg: 83.9874, loss: 0.3758, grad_norm: 4.1770 2023-02-12 00:59:27,755 - mmseg - INFO - Iter [118550/160000] lr: 1.554e-05, eta: 2:22:12, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2054, decode.acc_seg: 91.9933, aux.loss_ce: 0.1661, aux.acc_seg: 83.9586, loss: 0.3716, grad_norm: 3.7635 2023-02-12 00:59:37,516 - mmseg - INFO - Iter [118600/160000] lr: 1.553e-05, eta: 2:22:01, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2083, decode.acc_seg: 91.5895, aux.loss_ce: 0.1606, aux.acc_seg: 84.1317, loss: 0.3689, grad_norm: 4.6231 2023-02-12 00:59:47,413 - mmseg - INFO - Iter [118650/160000] lr: 1.551e-05, eta: 2:21:51, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2025, decode.acc_seg: 91.6871, aux.loss_ce: 0.1516, aux.acc_seg: 84.9632, loss: 0.3542, grad_norm: 3.6417 2023-02-12 00:59:57,515 - mmseg - INFO - Iter [118700/160000] lr: 1.549e-05, eta: 2:21:40, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2139, decode.acc_seg: 91.4256, aux.loss_ce: 0.1581, aux.acc_seg: 84.6449, loss: 0.3719, grad_norm: 4.4246 2023-02-12 01:00:07,423 - mmseg - INFO - Iter [118750/160000] lr: 1.547e-05, eta: 2:21:30, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2136, decode.acc_seg: 91.5355, aux.loss_ce: 0.1595, aux.acc_seg: 84.7146, loss: 0.3731, grad_norm: 5.1940 2023-02-12 01:00:17,272 - mmseg - INFO - Iter [118800/160000] lr: 1.545e-05, eta: 2:21:20, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2080, decode.acc_seg: 91.6913, aux.loss_ce: 0.1633, aux.acc_seg: 83.9939, loss: 0.3713, grad_norm: 4.2451 2023-02-12 01:00:27,351 - mmseg - INFO - Iter [118850/160000] lr: 1.543e-05, eta: 2:21:09, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2060, decode.acc_seg: 91.5106, aux.loss_ce: 0.1581, aux.acc_seg: 84.3959, loss: 0.3641, grad_norm: 3.9549 2023-02-12 01:00:37,170 - mmseg - INFO - Iter [118900/160000] lr: 1.541e-05, eta: 2:20:59, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2154, decode.acc_seg: 91.5496, aux.loss_ce: 0.1658, aux.acc_seg: 84.0036, loss: 0.3813, grad_norm: 5.8559 2023-02-12 01:00:47,115 - mmseg - INFO - Iter [118950/160000] lr: 1.539e-05, eta: 2:20:48, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2178, decode.acc_seg: 91.2981, aux.loss_ce: 0.1628, aux.acc_seg: 84.1367, loss: 0.3806, grad_norm: 3.9256 2023-02-12 01:00:57,315 - mmseg - INFO - Saving checkpoint at 119000 iterations 2023-02-12 01:00:57,998 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 01:00:57,998 - mmseg - INFO - Iter [119000/160000] lr: 1.538e-05, eta: 2:20:38, time: 0.218, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2145, decode.acc_seg: 91.4922, aux.loss_ce: 0.1573, aux.acc_seg: 84.6698, loss: 0.3718, grad_norm: 3.5875 2023-02-12 01:01:08,313 - mmseg - INFO - Iter [119050/160000] lr: 1.536e-05, eta: 2:20:28, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2088, decode.acc_seg: 91.7156, aux.loss_ce: 0.1609, aux.acc_seg: 84.3015, loss: 0.3697, grad_norm: 4.1880 2023-02-12 01:01:18,255 - mmseg - INFO - Iter [119100/160000] lr: 1.534e-05, eta: 2:20:18, time: 0.199, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2152, decode.acc_seg: 91.5173, aux.loss_ce: 0.1646, aux.acc_seg: 83.8026, loss: 0.3797, grad_norm: 4.0076 2023-02-12 01:01:28,029 - mmseg - INFO - Iter [119150/160000] lr: 1.532e-05, eta: 2:20:07, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2201, decode.acc_seg: 91.5085, aux.loss_ce: 0.1611, aux.acc_seg: 84.3298, loss: 0.3812, grad_norm: 5.1876 2023-02-12 01:01:38,135 - mmseg - INFO - Iter [119200/160000] lr: 1.530e-05, eta: 2:19:57, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2128, decode.acc_seg: 91.3419, aux.loss_ce: 0.1613, aux.acc_seg: 84.2886, loss: 0.3741, grad_norm: 4.4001 2023-02-12 01:01:48,137 - mmseg - INFO - Iter [119250/160000] lr: 1.528e-05, eta: 2:19:46, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2068, decode.acc_seg: 91.5234, aux.loss_ce: 0.1618, aux.acc_seg: 84.0159, loss: 0.3686, grad_norm: 4.5090 2023-02-12 01:01:58,236 - mmseg - INFO - Iter [119300/160000] lr: 1.526e-05, eta: 2:19:36, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2140, decode.acc_seg: 91.6133, aux.loss_ce: 0.1714, aux.acc_seg: 83.3808, loss: 0.3854, grad_norm: 4.5007 2023-02-12 01:02:08,394 - mmseg - INFO - Iter [119350/160000] lr: 1.524e-05, eta: 2:19:26, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2216, decode.acc_seg: 91.4570, aux.loss_ce: 0.1707, aux.acc_seg: 83.4325, loss: 0.3923, grad_norm: 5.2253 2023-02-12 01:02:18,482 - mmseg - INFO - Iter [119400/160000] lr: 1.523e-05, eta: 2:19:15, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2242, decode.acc_seg: 91.2140, aux.loss_ce: 0.1655, aux.acc_seg: 83.9245, loss: 0.3897, grad_norm: 4.4034 2023-02-12 01:02:28,500 - mmseg - INFO - Iter [119450/160000] lr: 1.521e-05, eta: 2:19:05, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2380, decode.acc_seg: 90.6973, aux.loss_ce: 0.1749, aux.acc_seg: 83.2125, loss: 0.4129, grad_norm: 5.5491 2023-02-12 01:02:38,767 - mmseg - INFO - Iter [119500/160000] lr: 1.519e-05, eta: 2:18:54, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2202, decode.acc_seg: 91.2576, aux.loss_ce: 0.1634, aux.acc_seg: 84.0929, loss: 0.3836, grad_norm: 4.5921 2023-02-12 01:02:48,712 - mmseg - INFO - Iter [119550/160000] lr: 1.517e-05, eta: 2:18:44, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2184, decode.acc_seg: 91.1478, aux.loss_ce: 0.1680, aux.acc_seg: 83.4617, loss: 0.3864, grad_norm: 4.4847 2023-02-12 01:03:01,009 - mmseg - INFO - Iter [119600/160000] lr: 1.515e-05, eta: 2:18:35, time: 0.246, data_time: 0.046, memory: 7748, decode.loss_ce: 0.2150, decode.acc_seg: 91.4568, aux.loss_ce: 0.1685, aux.acc_seg: 83.7198, loss: 0.3835, grad_norm: 4.8423 2023-02-12 01:03:11,185 - mmseg - INFO - Iter [119650/160000] lr: 1.513e-05, eta: 2:18:24, time: 0.204, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2219, decode.acc_seg: 91.1837, aux.loss_ce: 0.1634, aux.acc_seg: 84.0962, loss: 0.3853, grad_norm: 4.6635 2023-02-12 01:03:21,045 - mmseg - INFO - Iter [119700/160000] lr: 1.511e-05, eta: 2:18:14, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2052, decode.acc_seg: 91.6580, aux.loss_ce: 0.1586, aux.acc_seg: 84.2660, loss: 0.3638, grad_norm: 3.8786 2023-02-12 01:03:30,780 - mmseg - INFO - Iter [119750/160000] lr: 1.509e-05, eta: 2:18:03, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2070, decode.acc_seg: 91.4509, aux.loss_ce: 0.1588, aux.acc_seg: 84.0935, loss: 0.3658, grad_norm: 4.1417 2023-02-12 01:03:40,737 - mmseg - INFO - Iter [119800/160000] lr: 1.508e-05, eta: 2:17:53, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2082, decode.acc_seg: 91.7082, aux.loss_ce: 0.1607, aux.acc_seg: 84.1710, loss: 0.3690, grad_norm: 4.9791 2023-02-12 01:03:51,206 - mmseg - INFO - Iter [119850/160000] lr: 1.506e-05, eta: 2:17:43, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2140, decode.acc_seg: 91.5694, aux.loss_ce: 0.1646, aux.acc_seg: 84.1581, loss: 0.3787, grad_norm: 4.6051 2023-02-12 01:04:02,002 - mmseg - INFO - Iter [119900/160000] lr: 1.504e-05, eta: 2:17:33, time: 0.216, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2023, decode.acc_seg: 91.8038, aux.loss_ce: 0.1550, aux.acc_seg: 84.8394, loss: 0.3573, grad_norm: 4.0378 2023-02-12 01:04:11,926 - mmseg - INFO - Iter [119950/160000] lr: 1.502e-05, eta: 2:17:22, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2092, decode.acc_seg: 91.5846, aux.loss_ce: 0.1583, aux.acc_seg: 84.2825, loss: 0.3675, grad_norm: 4.4032 2023-02-12 01:04:21,822 - mmseg - INFO - Saving checkpoint at 120000 iterations 2023-02-12 01:04:22,494 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 01:04:22,495 - mmseg - INFO - Iter [120000/160000] lr: 1.500e-05, eta: 2:17:12, time: 0.212, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2249, decode.acc_seg: 91.1024, aux.loss_ce: 0.1641, aux.acc_seg: 84.1623, loss: 0.3890, grad_norm: 5.2014 2023-02-12 01:04:32,225 - mmseg - INFO - Iter [120050/160000] lr: 1.498e-05, eta: 2:17:01, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2205, decode.acc_seg: 91.1012, aux.loss_ce: 0.1698, aux.acc_seg: 83.4663, loss: 0.3903, grad_norm: 4.0125 2023-02-12 01:04:42,132 - mmseg - INFO - Iter [120100/160000] lr: 1.496e-05, eta: 2:16:51, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2069, decode.acc_seg: 91.8364, aux.loss_ce: 0.1626, aux.acc_seg: 84.2334, loss: 0.3696, grad_norm: 4.5541 2023-02-12 01:04:52,075 - mmseg - INFO - Iter [120150/160000] lr: 1.494e-05, eta: 2:16:41, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2250, decode.acc_seg: 91.2716, aux.loss_ce: 0.1681, aux.acc_seg: 83.9550, loss: 0.3931, grad_norm: 6.6108 2023-02-12 01:05:02,233 - mmseg - INFO - Iter [120200/160000] lr: 1.493e-05, eta: 2:16:30, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2182, decode.acc_seg: 91.3858, aux.loss_ce: 0.1584, aux.acc_seg: 84.7911, loss: 0.3766, grad_norm: 4.2251 2023-02-12 01:05:12,134 - mmseg - INFO - Iter [120250/160000] lr: 1.491e-05, eta: 2:16:20, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2176, decode.acc_seg: 91.3002, aux.loss_ce: 0.1657, aux.acc_seg: 83.7269, loss: 0.3832, grad_norm: 4.6174 2023-02-12 01:05:21,877 - mmseg - INFO - Iter [120300/160000] lr: 1.489e-05, eta: 2:16:09, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2093, decode.acc_seg: 91.6139, aux.loss_ce: 0.1654, aux.acc_seg: 83.7825, loss: 0.3746, grad_norm: 4.2532 2023-02-12 01:05:31,700 - mmseg - INFO - Iter [120350/160000] lr: 1.487e-05, eta: 2:15:59, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2159, decode.acc_seg: 91.4074, aux.loss_ce: 0.1620, aux.acc_seg: 84.3023, loss: 0.3780, grad_norm: 4.5328 2023-02-12 01:05:41,440 - mmseg - INFO - Iter [120400/160000] lr: 1.485e-05, eta: 2:15:48, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2228, decode.acc_seg: 91.3226, aux.loss_ce: 0.1738, aux.acc_seg: 83.3506, loss: 0.3966, grad_norm: 4.9414 2023-02-12 01:05:51,279 - mmseg - INFO - Iter [120450/160000] lr: 1.483e-05, eta: 2:15:38, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2204, decode.acc_seg: 91.4527, aux.loss_ce: 0.1788, aux.acc_seg: 83.0455, loss: 0.3991, grad_norm: 4.6481 2023-02-12 01:06:01,089 - mmseg - INFO - Iter [120500/160000] lr: 1.481e-05, eta: 2:15:27, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2128, decode.acc_seg: 91.4103, aux.loss_ce: 0.1633, aux.acc_seg: 84.0626, loss: 0.3761, grad_norm: 4.1038 2023-02-12 01:06:11,327 - mmseg - INFO - Iter [120550/160000] lr: 1.479e-05, eta: 2:15:17, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2266, decode.acc_seg: 91.1342, aux.loss_ce: 0.1765, aux.acc_seg: 83.0378, loss: 0.4031, grad_norm: 4.1212 2023-02-12 01:06:21,171 - mmseg - INFO - Iter [120600/160000] lr: 1.478e-05, eta: 2:15:07, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2135, decode.acc_seg: 91.4638, aux.loss_ce: 0.1606, aux.acc_seg: 84.4749, loss: 0.3741, grad_norm: 4.0363 2023-02-12 01:06:31,245 - mmseg - INFO - Iter [120650/160000] lr: 1.476e-05, eta: 2:14:56, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2208, decode.acc_seg: 91.0242, aux.loss_ce: 0.1653, aux.acc_seg: 83.7224, loss: 0.3861, grad_norm: 6.1943 2023-02-12 01:06:41,617 - mmseg - INFO - Iter [120700/160000] lr: 1.474e-05, eta: 2:14:46, time: 0.207, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1995, decode.acc_seg: 91.8801, aux.loss_ce: 0.1546, aux.acc_seg: 84.4029, loss: 0.3542, grad_norm: 3.7821 2023-02-12 01:06:51,822 - mmseg - INFO - Iter [120750/160000] lr: 1.472e-05, eta: 2:14:36, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2253, decode.acc_seg: 91.0507, aux.loss_ce: 0.1654, aux.acc_seg: 83.8256, loss: 0.3907, grad_norm: 4.5933 2023-02-12 01:07:01,637 - mmseg - INFO - Iter [120800/160000] lr: 1.470e-05, eta: 2:14:25, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2194, decode.acc_seg: 91.1636, aux.loss_ce: 0.1552, aux.acc_seg: 84.6866, loss: 0.3746, grad_norm: 4.5810 2023-02-12 01:07:13,585 - mmseg - INFO - Iter [120850/160000] lr: 1.468e-05, eta: 2:14:16, time: 0.239, data_time: 0.047, memory: 7748, decode.loss_ce: 0.2086, decode.acc_seg: 91.7828, aux.loss_ce: 0.1588, aux.acc_seg: 84.6357, loss: 0.3673, grad_norm: 3.7268 2023-02-12 01:07:23,342 - mmseg - INFO - Iter [120900/160000] lr: 1.466e-05, eta: 2:14:05, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2115, decode.acc_seg: 91.7196, aux.loss_ce: 0.1646, aux.acc_seg: 84.0258, loss: 0.3761, grad_norm: 4.2105 2023-02-12 01:07:33,335 - mmseg - INFO - Iter [120950/160000] lr: 1.464e-05, eta: 2:13:55, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2209, decode.acc_seg: 91.2581, aux.loss_ce: 0.1590, aux.acc_seg: 84.5892, loss: 0.3800, grad_norm: 4.2838 2023-02-12 01:07:43,493 - mmseg - INFO - Saving checkpoint at 121000 iterations 2023-02-12 01:07:44,171 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 01:07:44,171 - mmseg - INFO - Iter [121000/160000] lr: 1.463e-05, eta: 2:13:45, time: 0.217, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2191, decode.acc_seg: 91.0697, aux.loss_ce: 0.1648, aux.acc_seg: 83.7870, loss: 0.3839, grad_norm: 4.6178 2023-02-12 01:07:54,142 - mmseg - INFO - Iter [121050/160000] lr: 1.461e-05, eta: 2:13:34, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1975, decode.acc_seg: 91.9132, aux.loss_ce: 0.1545, aux.acc_seg: 84.4432, loss: 0.3520, grad_norm: 3.9518 2023-02-12 01:08:04,072 - mmseg - INFO - Iter [121100/160000] lr: 1.459e-05, eta: 2:13:24, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2058, decode.acc_seg: 91.6631, aux.loss_ce: 0.1620, aux.acc_seg: 84.2756, loss: 0.3678, grad_norm: 4.3889 2023-02-12 01:08:14,345 - mmseg - INFO - Iter [121150/160000] lr: 1.457e-05, eta: 2:13:13, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2231, decode.acc_seg: 91.0978, aux.loss_ce: 0.1727, aux.acc_seg: 83.7811, loss: 0.3958, grad_norm: 5.5038 2023-02-12 01:08:24,107 - mmseg - INFO - Iter [121200/160000] lr: 1.455e-05, eta: 2:13:03, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2098, decode.acc_seg: 91.5067, aux.loss_ce: 0.1608, aux.acc_seg: 84.1472, loss: 0.3705, grad_norm: 4.7290 2023-02-12 01:08:34,421 - mmseg - INFO - Iter [121250/160000] lr: 1.453e-05, eta: 2:12:53, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2020, decode.acc_seg: 91.9138, aux.loss_ce: 0.1576, aux.acc_seg: 84.3594, loss: 0.3596, grad_norm: 4.5485 2023-02-12 01:08:44,488 - mmseg - INFO - Iter [121300/160000] lr: 1.451e-05, eta: 2:12:42, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2196, decode.acc_seg: 91.2670, aux.loss_ce: 0.1605, aux.acc_seg: 84.3539, loss: 0.3801, grad_norm: 4.1978 2023-02-12 01:08:54,697 - mmseg - INFO - Iter [121350/160000] lr: 1.449e-05, eta: 2:12:32, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2154, decode.acc_seg: 91.6114, aux.loss_ce: 0.1649, aux.acc_seg: 84.1313, loss: 0.3803, grad_norm: 4.3665 2023-02-12 01:09:04,961 - mmseg - INFO - Iter [121400/160000] lr: 1.448e-05, eta: 2:12:22, time: 0.205, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1961, decode.acc_seg: 92.4436, aux.loss_ce: 0.1547, aux.acc_seg: 85.2609, loss: 0.3508, grad_norm: 3.9291 2023-02-12 01:09:14,671 - mmseg - INFO - Iter [121450/160000] lr: 1.446e-05, eta: 2:12:11, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2125, decode.acc_seg: 91.5434, aux.loss_ce: 0.1647, aux.acc_seg: 83.9436, loss: 0.3772, grad_norm: 4.3982 2023-02-12 01:09:24,540 - mmseg - INFO - Iter [121500/160000] lr: 1.444e-05, eta: 2:12:01, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2090, decode.acc_seg: 91.6524, aux.loss_ce: 0.1603, aux.acc_seg: 84.4022, loss: 0.3694, grad_norm: 4.5598 2023-02-12 01:09:35,142 - mmseg - INFO - Iter [121550/160000] lr: 1.442e-05, eta: 2:11:51, time: 0.212, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2178, decode.acc_seg: 91.4834, aux.loss_ce: 0.1648, aux.acc_seg: 83.9309, loss: 0.3826, grad_norm: 4.1039 2023-02-12 01:09:45,232 - mmseg - INFO - Iter [121600/160000] lr: 1.440e-05, eta: 2:11:40, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2081, decode.acc_seg: 91.4305, aux.loss_ce: 0.1560, aux.acc_seg: 84.6859, loss: 0.3641, grad_norm: 4.3122 2023-02-12 01:09:55,121 - mmseg - INFO - Iter [121650/160000] lr: 1.438e-05, eta: 2:11:30, time: 0.198, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2096, decode.acc_seg: 91.5935, aux.loss_ce: 0.1620, aux.acc_seg: 84.3084, loss: 0.3716, grad_norm: 3.7281 2023-02-12 01:10:05,618 - mmseg - INFO - Iter [121700/160000] lr: 1.436e-05, eta: 2:11:20, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2244, decode.acc_seg: 91.0982, aux.loss_ce: 0.1701, aux.acc_seg: 83.5109, loss: 0.3945, grad_norm: 4.5528 2023-02-12 01:10:15,643 - mmseg - INFO - Iter [121750/160000] lr: 1.434e-05, eta: 2:11:09, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2167, decode.acc_seg: 91.4134, aux.loss_ce: 0.1618, aux.acc_seg: 84.3142, loss: 0.3785, grad_norm: 5.2957 2023-02-12 01:10:26,072 - mmseg - INFO - Iter [121800/160000] lr: 1.433e-05, eta: 2:10:59, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1940, decode.acc_seg: 92.1236, aux.loss_ce: 0.1493, aux.acc_seg: 85.1765, loss: 0.3432, grad_norm: 3.7545 2023-02-12 01:10:35,877 - mmseg - INFO - Iter [121850/160000] lr: 1.431e-05, eta: 2:10:48, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2131, decode.acc_seg: 91.4484, aux.loss_ce: 0.1614, aux.acc_seg: 84.0564, loss: 0.3744, grad_norm: 4.1991 2023-02-12 01:10:46,189 - mmseg - INFO - Iter [121900/160000] lr: 1.429e-05, eta: 2:10:38, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2046, decode.acc_seg: 91.8786, aux.loss_ce: 0.1589, aux.acc_seg: 84.4111, loss: 0.3634, grad_norm: 3.9665 2023-02-12 01:10:56,234 - mmseg - INFO - Iter [121950/160000] lr: 1.427e-05, eta: 2:10:28, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1976, decode.acc_seg: 92.0759, aux.loss_ce: 0.1549, aux.acc_seg: 84.7238, loss: 0.3525, grad_norm: 3.9972 2023-02-12 01:11:06,386 - mmseg - INFO - Saving checkpoint at 122000 iterations 2023-02-12 01:11:07,063 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 01:11:07,063 - mmseg - INFO - Iter [122000/160000] lr: 1.425e-05, eta: 2:10:18, time: 0.217, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2084, decode.acc_seg: 91.7231, aux.loss_ce: 0.1601, aux.acc_seg: 84.5166, loss: 0.3685, grad_norm: 3.8500 2023-02-12 01:11:16,845 - mmseg - INFO - Iter [122050/160000] lr: 1.423e-05, eta: 2:10:07, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2144, decode.acc_seg: 91.3675, aux.loss_ce: 0.1626, aux.acc_seg: 84.1973, loss: 0.3770, grad_norm: 7.1538 2023-02-12 01:11:29,171 - mmseg - INFO - Iter [122100/160000] lr: 1.421e-05, eta: 2:09:58, time: 0.247, data_time: 0.047, memory: 7748, decode.loss_ce: 0.2148, decode.acc_seg: 91.3303, aux.loss_ce: 0.1637, aux.acc_seg: 83.9670, loss: 0.3785, grad_norm: 4.1988 2023-02-12 01:11:39,033 - mmseg - INFO - Iter [122150/160000] lr: 1.419e-05, eta: 2:09:47, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2179, decode.acc_seg: 91.2029, aux.loss_ce: 0.1656, aux.acc_seg: 83.7715, loss: 0.3835, grad_norm: 5.0006 2023-02-12 01:11:49,262 - mmseg - INFO - Iter [122200/160000] lr: 1.418e-05, eta: 2:09:37, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2212, decode.acc_seg: 91.1419, aux.loss_ce: 0.1615, aux.acc_seg: 83.9410, loss: 0.3828, grad_norm: 5.5538 2023-02-12 01:11:59,031 - mmseg - INFO - Iter [122250/160000] lr: 1.416e-05, eta: 2:09:27, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2063, decode.acc_seg: 91.7559, aux.loss_ce: 0.1613, aux.acc_seg: 84.1306, loss: 0.3677, grad_norm: 4.3078 2023-02-12 01:12:09,038 - mmseg - INFO - Iter [122300/160000] lr: 1.414e-05, eta: 2:09:16, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2006, decode.acc_seg: 91.9125, aux.loss_ce: 0.1549, aux.acc_seg: 84.6508, loss: 0.3555, grad_norm: 3.9925 2023-02-12 01:12:19,443 - mmseg - INFO - Iter [122350/160000] lr: 1.412e-05, eta: 2:09:06, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2072, decode.acc_seg: 91.7257, aux.loss_ce: 0.1594, aux.acc_seg: 84.5350, loss: 0.3665, grad_norm: 5.0199 2023-02-12 01:12:30,004 - mmseg - INFO - Iter [122400/160000] lr: 1.410e-05, eta: 2:08:56, time: 0.211, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2105, decode.acc_seg: 91.6234, aux.loss_ce: 0.1603, aux.acc_seg: 84.2596, loss: 0.3708, grad_norm: 4.7448 2023-02-12 01:12:39,834 - mmseg - INFO - Iter [122450/160000] lr: 1.408e-05, eta: 2:08:45, time: 0.197, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1909, decode.acc_seg: 92.1683, aux.loss_ce: 0.1440, aux.acc_seg: 85.5155, loss: 0.3350, grad_norm: 3.2989 2023-02-12 01:12:50,449 - mmseg - INFO - Iter [122500/160000] lr: 1.406e-05, eta: 2:08:35, time: 0.212, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2010, decode.acc_seg: 91.7343, aux.loss_ce: 0.1573, aux.acc_seg: 84.0310, loss: 0.3583, grad_norm: 3.5174 2023-02-12 01:13:00,589 - mmseg - INFO - Iter [122550/160000] lr: 1.404e-05, eta: 2:08:25, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2225, decode.acc_seg: 91.0772, aux.loss_ce: 0.1725, aux.acc_seg: 83.2102, loss: 0.3950, grad_norm: 5.1272 2023-02-12 01:13:10,967 - mmseg - INFO - Iter [122600/160000] lr: 1.403e-05, eta: 2:08:14, time: 0.208, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2071, decode.acc_seg: 91.8036, aux.loss_ce: 0.1629, aux.acc_seg: 84.4496, loss: 0.3701, grad_norm: 5.2366 2023-02-12 01:13:21,055 - mmseg - INFO - Iter [122650/160000] lr: 1.401e-05, eta: 2:08:04, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2051, decode.acc_seg: 91.6991, aux.loss_ce: 0.1586, aux.acc_seg: 84.4043, loss: 0.3638, grad_norm: 4.1740 2023-02-12 01:13:30,922 - mmseg - INFO - Iter [122700/160000] lr: 1.399e-05, eta: 2:07:54, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2163, decode.acc_seg: 91.2519, aux.loss_ce: 0.1625, aux.acc_seg: 84.0704, loss: 0.3788, grad_norm: 4.7042 2023-02-12 01:13:40,809 - mmseg - INFO - Iter [122750/160000] lr: 1.397e-05, eta: 2:07:43, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2164, decode.acc_seg: 91.5040, aux.loss_ce: 0.1715, aux.acc_seg: 83.2488, loss: 0.3878, grad_norm: 4.4870 2023-02-12 01:13:50,850 - mmseg - INFO - Iter [122800/160000] lr: 1.395e-05, eta: 2:07:33, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2010, decode.acc_seg: 91.8749, aux.loss_ce: 0.1618, aux.acc_seg: 84.0206, loss: 0.3629, grad_norm: 4.1611 2023-02-12 01:14:01,400 - mmseg - INFO - Iter [122850/160000] lr: 1.393e-05, eta: 2:07:23, time: 0.211, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2006, decode.acc_seg: 91.9405, aux.loss_ce: 0.1604, aux.acc_seg: 84.3990, loss: 0.3610, grad_norm: 4.4304 2023-02-12 01:14:11,401 - mmseg - INFO - Iter [122900/160000] lr: 1.391e-05, eta: 2:07:12, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2127, decode.acc_seg: 91.4664, aux.loss_ce: 0.1642, aux.acc_seg: 84.2348, loss: 0.3770, grad_norm: 4.3877 2023-02-12 01:14:21,299 - mmseg - INFO - Iter [122950/160000] lr: 1.389e-05, eta: 2:07:02, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2102, decode.acc_seg: 91.6419, aux.loss_ce: 0.1613, aux.acc_seg: 84.2276, loss: 0.3715, grad_norm: 4.3765 2023-02-12 01:14:31,583 - mmseg - INFO - Saving checkpoint at 123000 iterations 2023-02-12 01:14:32,263 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 01:14:32,263 - mmseg - INFO - Iter [123000/160000] lr: 1.388e-05, eta: 2:06:52, time: 0.219, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2042, decode.acc_seg: 91.7533, aux.loss_ce: 0.1637, aux.acc_seg: 84.0080, loss: 0.3680, grad_norm: 4.1944 2023-02-12 01:14:42,031 - mmseg - INFO - Iter [123050/160000] lr: 1.386e-05, eta: 2:06:41, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2100, decode.acc_seg: 91.6236, aux.loss_ce: 0.1589, aux.acc_seg: 84.3254, loss: 0.3688, grad_norm: 4.3053 2023-02-12 01:14:52,072 - mmseg - INFO - Iter [123100/160000] lr: 1.384e-05, eta: 2:06:31, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2139, decode.acc_seg: 91.4111, aux.loss_ce: 0.1635, aux.acc_seg: 83.9016, loss: 0.3774, grad_norm: 4.4204 2023-02-12 01:15:02,187 - mmseg - INFO - Iter [123150/160000] lr: 1.382e-05, eta: 2:06:21, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2130, decode.acc_seg: 91.5261, aux.loss_ce: 0.1690, aux.acc_seg: 84.0649, loss: 0.3820, grad_norm: 5.0889 2023-02-12 01:15:12,029 - mmseg - INFO - Iter [123200/160000] lr: 1.380e-05, eta: 2:06:10, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2038, decode.acc_seg: 91.7100, aux.loss_ce: 0.1566, aux.acc_seg: 84.5097, loss: 0.3604, grad_norm: 4.7480 2023-02-12 01:15:21,773 - mmseg - INFO - Iter [123250/160000] lr: 1.378e-05, eta: 2:06:00, time: 0.195, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2058, decode.acc_seg: 91.8182, aux.loss_ce: 0.1569, aux.acc_seg: 84.8942, loss: 0.3627, grad_norm: 4.1749 2023-02-12 01:15:31,874 - mmseg - INFO - Iter [123300/160000] lr: 1.376e-05, eta: 2:05:49, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2194, decode.acc_seg: 91.1872, aux.loss_ce: 0.1615, aux.acc_seg: 84.0793, loss: 0.3809, grad_norm: 4.3994 2023-02-12 01:15:41,957 - mmseg - INFO - Iter [123350/160000] lr: 1.374e-05, eta: 2:05:39, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2072, decode.acc_seg: 91.9373, aux.loss_ce: 0.1601, aux.acc_seg: 84.4803, loss: 0.3673, grad_norm: 3.9795 2023-02-12 01:15:54,252 - mmseg - INFO - Iter [123400/160000] lr: 1.373e-05, eta: 2:05:29, time: 0.246, data_time: 0.047, memory: 7748, decode.loss_ce: 0.1983, decode.acc_seg: 92.0503, aux.loss_ce: 0.1550, aux.acc_seg: 84.6483, loss: 0.3533, grad_norm: 4.2939 2023-02-12 01:16:04,315 - mmseg - INFO - Iter [123450/160000] lr: 1.371e-05, eta: 2:05:19, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2068, decode.acc_seg: 91.5108, aux.loss_ce: 0.1542, aux.acc_seg: 84.6541, loss: 0.3609, grad_norm: 4.1033 2023-02-12 01:16:14,435 - mmseg - INFO - Iter [123500/160000] lr: 1.369e-05, eta: 2:05:09, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2013, decode.acc_seg: 91.8596, aux.loss_ce: 0.1598, aux.acc_seg: 84.3042, loss: 0.3610, grad_norm: 3.8184 2023-02-12 01:16:24,397 - mmseg - INFO - Iter [123550/160000] lr: 1.367e-05, eta: 2:04:58, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2095, decode.acc_seg: 91.5063, aux.loss_ce: 0.1640, aux.acc_seg: 83.7510, loss: 0.3736, grad_norm: 4.4458 2023-02-12 01:16:34,172 - mmseg - INFO - Iter [123600/160000] lr: 1.365e-05, eta: 2:04:48, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2157, decode.acc_seg: 91.3729, aux.loss_ce: 0.1695, aux.acc_seg: 83.5852, loss: 0.3852, grad_norm: 5.0101 2023-02-12 01:16:43,917 - mmseg - INFO - Iter [123650/160000] lr: 1.363e-05, eta: 2:04:37, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2129, decode.acc_seg: 91.6038, aux.loss_ce: 0.1664, aux.acc_seg: 83.8414, loss: 0.3794, grad_norm: 4.1320 2023-02-12 01:16:53,755 - mmseg - INFO - Iter [123700/160000] lr: 1.361e-05, eta: 2:04:27, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1978, decode.acc_seg: 91.9754, aux.loss_ce: 0.1543, aux.acc_seg: 84.8329, loss: 0.3520, grad_norm: 4.1796 2023-02-12 01:17:04,160 - mmseg - INFO - Iter [123750/160000] lr: 1.359e-05, eta: 2:04:17, time: 0.208, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2039, decode.acc_seg: 91.6848, aux.loss_ce: 0.1565, aux.acc_seg: 84.5752, loss: 0.3603, grad_norm: 3.6018 2023-02-12 01:17:13,954 - mmseg - INFO - Iter [123800/160000] lr: 1.358e-05, eta: 2:04:06, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2087, decode.acc_seg: 91.6150, aux.loss_ce: 0.1594, aux.acc_seg: 84.3456, loss: 0.3680, grad_norm: 3.9097 2023-02-12 01:17:23,965 - mmseg - INFO - Iter [123850/160000] lr: 1.356e-05, eta: 2:03:56, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2206, decode.acc_seg: 91.3756, aux.loss_ce: 0.1767, aux.acc_seg: 83.2122, loss: 0.3973, grad_norm: 5.1235 2023-02-12 01:17:33,628 - mmseg - INFO - Iter [123900/160000] lr: 1.354e-05, eta: 2:03:45, time: 0.193, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2041, decode.acc_seg: 91.9335, aux.loss_ce: 0.1610, aux.acc_seg: 84.4778, loss: 0.3651, grad_norm: 4.2858 2023-02-12 01:17:43,560 - mmseg - INFO - Iter [123950/160000] lr: 1.352e-05, eta: 2:03:35, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2034, decode.acc_seg: 91.8615, aux.loss_ce: 0.1585, aux.acc_seg: 84.3083, loss: 0.3619, grad_norm: 4.0074 2023-02-12 01:17:54,096 - mmseg - INFO - Saving checkpoint at 124000 iterations 2023-02-12 01:17:54,771 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 01:17:54,771 - mmseg - INFO - Iter [124000/160000] lr: 1.350e-05, eta: 2:03:25, time: 0.224, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2180, decode.acc_seg: 91.2158, aux.loss_ce: 0.1639, aux.acc_seg: 83.9555, loss: 0.3819, grad_norm: 3.9619 2023-02-12 01:18:04,674 - mmseg - INFO - Iter [124050/160000] lr: 1.348e-05, eta: 2:03:15, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2052, decode.acc_seg: 91.9346, aux.loss_ce: 0.1584, aux.acc_seg: 84.5491, loss: 0.3635, grad_norm: 4.3443 2023-02-12 01:18:15,352 - mmseg - INFO - Iter [124100/160000] lr: 1.346e-05, eta: 2:03:04, time: 0.214, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1972, decode.acc_seg: 92.0614, aux.loss_ce: 0.1588, aux.acc_seg: 84.3186, loss: 0.3560, grad_norm: 3.5390 2023-02-12 01:18:25,189 - mmseg - INFO - Iter [124150/160000] lr: 1.344e-05, eta: 2:02:54, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2084, decode.acc_seg: 91.5532, aux.loss_ce: 0.1548, aux.acc_seg: 84.9583, loss: 0.3632, grad_norm: 4.1635 2023-02-12 01:18:35,026 - mmseg - INFO - Iter [124200/160000] lr: 1.343e-05, eta: 2:02:44, time: 0.197, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2259, decode.acc_seg: 91.1159, aux.loss_ce: 0.1682, aux.acc_seg: 84.0349, loss: 0.3941, grad_norm: 4.2752 2023-02-12 01:18:45,053 - mmseg - INFO - Iter [124250/160000] lr: 1.341e-05, eta: 2:02:33, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2216, decode.acc_seg: 91.2609, aux.loss_ce: 0.1714, aux.acc_seg: 83.6261, loss: 0.3930, grad_norm: 6.7835 2023-02-12 01:18:55,053 - mmseg - INFO - Iter [124300/160000] lr: 1.339e-05, eta: 2:02:23, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2032, decode.acc_seg: 91.7113, aux.loss_ce: 0.1514, aux.acc_seg: 84.5635, loss: 0.3545, grad_norm: 3.8557 2023-02-12 01:19:05,969 - mmseg - INFO - Iter [124350/160000] lr: 1.337e-05, eta: 2:02:13, time: 0.219, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2161, decode.acc_seg: 91.1115, aux.loss_ce: 0.1651, aux.acc_seg: 83.4616, loss: 0.3813, grad_norm: 3.8810 2023-02-12 01:19:15,792 - mmseg - INFO - Iter [124400/160000] lr: 1.335e-05, eta: 2:02:02, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2065, decode.acc_seg: 91.8095, aux.loss_ce: 0.1540, aux.acc_seg: 85.0901, loss: 0.3606, grad_norm: 4.7668 2023-02-12 01:19:25,836 - mmseg - INFO - Iter [124450/160000] lr: 1.333e-05, eta: 2:01:52, time: 0.201, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2043, decode.acc_seg: 91.8490, aux.loss_ce: 0.1589, aux.acc_seg: 84.5087, loss: 0.3632, grad_norm: 5.4479 2023-02-12 01:19:35,830 - mmseg - INFO - Iter [124500/160000] lr: 1.331e-05, eta: 2:01:42, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2071, decode.acc_seg: 91.7670, aux.loss_ce: 0.1620, aux.acc_seg: 84.5507, loss: 0.3690, grad_norm: 4.1769 2023-02-12 01:19:46,109 - mmseg - INFO - Iter [124550/160000] lr: 1.329e-05, eta: 2:01:31, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2009, decode.acc_seg: 91.6441, aux.loss_ce: 0.1542, aux.acc_seg: 84.5553, loss: 0.3551, grad_norm: 3.9196 2023-02-12 01:19:56,429 - mmseg - INFO - Iter [124600/160000] lr: 1.328e-05, eta: 2:01:21, time: 0.206, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2139, decode.acc_seg: 91.4437, aux.loss_ce: 0.1648, aux.acc_seg: 84.0217, loss: 0.3787, grad_norm: 4.2041 2023-02-12 01:20:09,096 - mmseg - INFO - Iter [124650/160000] lr: 1.326e-05, eta: 2:01:11, time: 0.254, data_time: 0.048, memory: 7748, decode.loss_ce: 0.2088, decode.acc_seg: 91.6228, aux.loss_ce: 0.1587, aux.acc_seg: 84.4308, loss: 0.3676, grad_norm: 4.0915 2023-02-12 01:20:19,088 - mmseg - INFO - Iter [124700/160000] lr: 1.324e-05, eta: 2:01:01, time: 0.200, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2099, decode.acc_seg: 91.7123, aux.loss_ce: 0.1628, aux.acc_seg: 83.8670, loss: 0.3727, grad_norm: 4.4036 2023-02-12 01:20:28,843 - mmseg - INFO - Iter [124750/160000] lr: 1.322e-05, eta: 2:00:51, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2148, decode.acc_seg: 91.3683, aux.loss_ce: 0.1637, aux.acc_seg: 84.1228, loss: 0.3785, grad_norm: 3.9500 2023-02-12 01:20:38,676 - mmseg - INFO - Iter [124800/160000] lr: 1.320e-05, eta: 2:00:40, time: 0.197, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1999, decode.acc_seg: 91.9651, aux.loss_ce: 0.1486, aux.acc_seg: 85.1050, loss: 0.3485, grad_norm: 3.8740 2023-02-12 01:20:48,895 - mmseg - INFO - Iter [124850/160000] lr: 1.318e-05, eta: 2:00:30, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2160, decode.acc_seg: 91.1343, aux.loss_ce: 0.1613, aux.acc_seg: 84.1988, loss: 0.3773, grad_norm: 4.4373 2023-02-12 01:20:58,886 - mmseg - INFO - Iter [124900/160000] lr: 1.316e-05, eta: 2:00:20, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2037, decode.acc_seg: 91.7558, aux.loss_ce: 0.1551, aux.acc_seg: 84.7257, loss: 0.3588, grad_norm: 3.9628 2023-02-12 01:21:08,656 - mmseg - INFO - Iter [124950/160000] lr: 1.314e-05, eta: 2:00:09, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2116, decode.acc_seg: 91.6439, aux.loss_ce: 0.1671, aux.acc_seg: 83.9167, loss: 0.3786, grad_norm: 4.3537 2023-02-12 01:21:18,557 - mmseg - INFO - Saving checkpoint at 125000 iterations 2023-02-12 01:21:19,246 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 01:21:19,246 - mmseg - INFO - Iter [125000/160000] lr: 1.313e-05, eta: 1:59:59, time: 0.212, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2032, decode.acc_seg: 91.8171, aux.loss_ce: 0.1539, aux.acc_seg: 84.8911, loss: 0.3570, grad_norm: 3.9693 2023-02-12 01:21:28,915 - mmseg - INFO - Iter [125050/160000] lr: 1.311e-05, eta: 1:59:48, time: 0.193, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2141, decode.acc_seg: 91.5215, aux.loss_ce: 0.1640, aux.acc_seg: 84.0438, loss: 0.3780, grad_norm: 4.7718 2023-02-12 01:21:39,039 - mmseg - INFO - Iter [125100/160000] lr: 1.309e-05, eta: 1:59:38, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2077, decode.acc_seg: 91.6036, aux.loss_ce: 0.1665, aux.acc_seg: 83.7516, loss: 0.3742, grad_norm: 4.7166 2023-02-12 01:21:49,764 - mmseg - INFO - Iter [125150/160000] lr: 1.307e-05, eta: 1:59:28, time: 0.215, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2099, decode.acc_seg: 91.4075, aux.loss_ce: 0.1601, aux.acc_seg: 84.2703, loss: 0.3700, grad_norm: 4.2552 2023-02-12 01:22:00,135 - mmseg - INFO - Iter [125200/160000] lr: 1.305e-05, eta: 1:59:18, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2023, decode.acc_seg: 91.9877, aux.loss_ce: 0.1549, aux.acc_seg: 85.1618, loss: 0.3572, grad_norm: 4.2291 2023-02-12 01:22:10,042 - mmseg - INFO - Iter [125250/160000] lr: 1.303e-05, eta: 1:59:07, time: 0.199, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2019, decode.acc_seg: 91.8202, aux.loss_ce: 0.1553, aux.acc_seg: 84.4001, loss: 0.3572, grad_norm: 3.8856 2023-02-12 01:22:20,400 - mmseg - INFO - Iter [125300/160000] lr: 1.301e-05, eta: 1:58:57, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2134, decode.acc_seg: 91.6233, aux.loss_ce: 0.1659, aux.acc_seg: 84.0778, loss: 0.3794, grad_norm: 3.9097 2023-02-12 01:22:30,461 - mmseg - INFO - Iter [125350/160000] lr: 1.299e-05, eta: 1:58:47, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2053, decode.acc_seg: 91.5515, aux.loss_ce: 0.1552, aux.acc_seg: 84.6194, loss: 0.3605, grad_norm: 3.9068 2023-02-12 01:22:40,402 - mmseg - INFO - Iter [125400/160000] lr: 1.298e-05, eta: 1:58:36, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2106, decode.acc_seg: 91.8024, aux.loss_ce: 0.1706, aux.acc_seg: 83.6819, loss: 0.3811, grad_norm: 4.5946 2023-02-12 01:22:50,372 - mmseg - INFO - Iter [125450/160000] lr: 1.296e-05, eta: 1:58:26, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2083, decode.acc_seg: 91.9174, aux.loss_ce: 0.1627, aux.acc_seg: 84.3047, loss: 0.3711, grad_norm: 4.5112 2023-02-12 01:23:00,464 - mmseg - INFO - Iter [125500/160000] lr: 1.294e-05, eta: 1:58:16, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2040, decode.acc_seg: 92.1129, aux.loss_ce: 0.1571, aux.acc_seg: 84.9780, loss: 0.3611, grad_norm: 4.3486 2023-02-12 01:23:10,547 - mmseg - INFO - Iter [125550/160000] lr: 1.292e-05, eta: 1:58:05, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2074, decode.acc_seg: 91.6251, aux.loss_ce: 0.1555, aux.acc_seg: 84.7657, loss: 0.3629, grad_norm: 4.5171 2023-02-12 01:23:20,944 - mmseg - INFO - Iter [125600/160000] lr: 1.290e-05, eta: 1:57:55, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2105, decode.acc_seg: 92.0548, aux.loss_ce: 0.1648, aux.acc_seg: 84.2180, loss: 0.3753, grad_norm: 4.3847 2023-02-12 01:23:30,956 - mmseg - INFO - Iter [125650/160000] lr: 1.288e-05, eta: 1:57:45, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2125, decode.acc_seg: 91.5518, aux.loss_ce: 0.1614, aux.acc_seg: 84.1548, loss: 0.3739, grad_norm: 3.9833 2023-02-12 01:23:41,054 - mmseg - INFO - Iter [125700/160000] lr: 1.286e-05, eta: 1:57:34, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2171, decode.acc_seg: 91.5113, aux.loss_ce: 0.1662, aux.acc_seg: 83.7527, loss: 0.3834, grad_norm: 9.7688 2023-02-12 01:23:50,978 - mmseg - INFO - Iter [125750/160000] lr: 1.284e-05, eta: 1:57:24, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2128, decode.acc_seg: 91.2931, aux.loss_ce: 0.1655, aux.acc_seg: 83.7095, loss: 0.3783, grad_norm: 5.0740 2023-02-12 01:24:01,021 - mmseg - INFO - Iter [125800/160000] lr: 1.283e-05, eta: 1:57:13, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2062, decode.acc_seg: 91.7286, aux.loss_ce: 0.1584, aux.acc_seg: 84.4606, loss: 0.3646, grad_norm: 3.8146 2023-02-12 01:24:11,059 - mmseg - INFO - Iter [125850/160000] lr: 1.281e-05, eta: 1:57:03, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2021, decode.acc_seg: 91.8538, aux.loss_ce: 0.1570, aux.acc_seg: 84.4112, loss: 0.3591, grad_norm: 4.0899 2023-02-12 01:24:22,885 - mmseg - INFO - Iter [125900/160000] lr: 1.279e-05, eta: 1:56:53, time: 0.237, data_time: 0.047, memory: 7748, decode.loss_ce: 0.1946, decode.acc_seg: 92.0917, aux.loss_ce: 0.1482, aux.acc_seg: 85.1838, loss: 0.3428, grad_norm: 4.3869 2023-02-12 01:24:32,939 - mmseg - INFO - Iter [125950/160000] lr: 1.277e-05, eta: 1:56:43, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2082, decode.acc_seg: 91.9957, aux.loss_ce: 0.1582, aux.acc_seg: 85.1222, loss: 0.3663, grad_norm: 5.2548 2023-02-12 01:24:42,882 - mmseg - INFO - Saving checkpoint at 126000 iterations 2023-02-12 01:24:43,572 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 01:24:43,572 - mmseg - INFO - Iter [126000/160000] lr: 1.275e-05, eta: 1:56:33, time: 0.213, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2045, decode.acc_seg: 91.9342, aux.loss_ce: 0.1617, aux.acc_seg: 84.1614, loss: 0.3661, grad_norm: 4.4737 2023-02-12 01:24:53,648 - mmseg - INFO - Iter [126050/160000] lr: 1.273e-05, eta: 1:56:22, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1994, decode.acc_seg: 91.8086, aux.loss_ce: 0.1591, aux.acc_seg: 84.3008, loss: 0.3585, grad_norm: 4.0624 2023-02-12 01:25:03,354 - mmseg - INFO - Iter [126100/160000] lr: 1.271e-05, eta: 1:56:12, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2062, decode.acc_seg: 91.6522, aux.loss_ce: 0.1495, aux.acc_seg: 84.9682, loss: 0.3557, grad_norm: 4.3001 2023-02-12 01:25:13,818 - mmseg - INFO - Iter [126150/160000] lr: 1.269e-05, eta: 1:56:02, time: 0.209, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2082, decode.acc_seg: 91.8459, aux.loss_ce: 0.1532, aux.acc_seg: 85.5540, loss: 0.3614, grad_norm: 4.5140 2023-02-12 01:25:24,048 - mmseg - INFO - Iter [126200/160000] lr: 1.268e-05, eta: 1:55:51, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2032, decode.acc_seg: 91.9537, aux.loss_ce: 0.1559, aux.acc_seg: 84.9048, loss: 0.3591, grad_norm: 4.2592 2023-02-12 01:25:34,846 - mmseg - INFO - Iter [126250/160000] lr: 1.266e-05, eta: 1:55:41, time: 0.216, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2097, decode.acc_seg: 91.6603, aux.loss_ce: 0.1545, aux.acc_seg: 85.0616, loss: 0.3642, grad_norm: 4.9346 2023-02-12 01:25:44,962 - mmseg - INFO - Iter [126300/160000] lr: 1.264e-05, eta: 1:55:31, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1950, decode.acc_seg: 92.2069, aux.loss_ce: 0.1511, aux.acc_seg: 85.3756, loss: 0.3460, grad_norm: 3.7356 2023-02-12 01:25:55,158 - mmseg - INFO - Iter [126350/160000] lr: 1.262e-05, eta: 1:55:21, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1999, decode.acc_seg: 91.9083, aux.loss_ce: 0.1607, aux.acc_seg: 84.2995, loss: 0.3606, grad_norm: 3.8315 2023-02-12 01:26:05,117 - mmseg - INFO - Iter [126400/160000] lr: 1.260e-05, eta: 1:55:10, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2052, decode.acc_seg: 91.6618, aux.loss_ce: 0.1579, aux.acc_seg: 84.4524, loss: 0.3631, grad_norm: 4.1332 2023-02-12 01:26:15,248 - mmseg - INFO - Iter [126450/160000] lr: 1.258e-05, eta: 1:55:00, time: 0.203, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2037, decode.acc_seg: 91.9531, aux.loss_ce: 0.1622, aux.acc_seg: 83.9946, loss: 0.3659, grad_norm: 4.1135 2023-02-12 01:26:26,042 - mmseg - INFO - Iter [126500/160000] lr: 1.256e-05, eta: 1:54:50, time: 0.216, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2184, decode.acc_seg: 91.3733, aux.loss_ce: 0.1623, aux.acc_seg: 84.3965, loss: 0.3808, grad_norm: 4.1166 2023-02-12 01:26:36,542 - mmseg - INFO - Iter [126550/160000] lr: 1.254e-05, eta: 1:54:40, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2123, decode.acc_seg: 91.7889, aux.loss_ce: 0.1626, aux.acc_seg: 84.5213, loss: 0.3749, grad_norm: 4.3261 2023-02-12 01:26:46,549 - mmseg - INFO - Iter [126600/160000] lr: 1.253e-05, eta: 1:54:29, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2124, decode.acc_seg: 91.4507, aux.loss_ce: 0.1559, aux.acc_seg: 84.7095, loss: 0.3683, grad_norm: 3.8607 2023-02-12 01:26:56,620 - mmseg - INFO - Iter [126650/160000] lr: 1.251e-05, eta: 1:54:19, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1994, decode.acc_seg: 92.1326, aux.loss_ce: 0.1561, aux.acc_seg: 85.0304, loss: 0.3555, grad_norm: 3.9229 2023-02-12 01:27:06,854 - mmseg - INFO - Iter [126700/160000] lr: 1.249e-05, eta: 1:54:09, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2064, decode.acc_seg: 91.6167, aux.loss_ce: 0.1614, aux.acc_seg: 84.2294, loss: 0.3678, grad_norm: 3.9160 2023-02-12 01:27:16,636 - mmseg - INFO - Iter [126750/160000] lr: 1.247e-05, eta: 1:53:58, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2151, decode.acc_seg: 91.0682, aux.loss_ce: 0.1680, aux.acc_seg: 83.3070, loss: 0.3830, grad_norm: 4.2293 2023-02-12 01:27:26,484 - mmseg - INFO - Iter [126800/160000] lr: 1.245e-05, eta: 1:53:48, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2069, decode.acc_seg: 91.8942, aux.loss_ce: 0.1645, aux.acc_seg: 84.2896, loss: 0.3714, grad_norm: 4.2658 2023-02-12 01:27:36,574 - mmseg - INFO - Iter [126850/160000] lr: 1.243e-05, eta: 1:53:37, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2095, decode.acc_seg: 91.7008, aux.loss_ce: 0.1611, aux.acc_seg: 84.5636, loss: 0.3706, grad_norm: 3.6225 2023-02-12 01:27:46,718 - mmseg - INFO - Iter [126900/160000] lr: 1.241e-05, eta: 1:53:27, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1968, decode.acc_seg: 91.9436, aux.loss_ce: 0.1606, aux.acc_seg: 84.0964, loss: 0.3574, grad_norm: 4.0434 2023-02-12 01:27:56,666 - mmseg - INFO - Iter [126950/160000] lr: 1.239e-05, eta: 1:53:17, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1979, decode.acc_seg: 91.7815, aux.loss_ce: 0.1552, aux.acc_seg: 84.8508, loss: 0.3532, grad_norm: 3.9080 2023-02-12 01:28:07,038 - mmseg - INFO - Saving checkpoint at 127000 iterations 2023-02-12 01:28:07,731 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 01:28:07,731 - mmseg - INFO - Iter [127000/160000] lr: 1.238e-05, eta: 1:53:07, time: 0.221, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2023, decode.acc_seg: 91.7194, aux.loss_ce: 0.1555, aux.acc_seg: 84.6205, loss: 0.3578, grad_norm: 4.0649 2023-02-12 01:28:17,459 - mmseg - INFO - Iter [127050/160000] lr: 1.236e-05, eta: 1:52:56, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2161, decode.acc_seg: 91.7870, aux.loss_ce: 0.1663, aux.acc_seg: 83.7482, loss: 0.3824, grad_norm: 4.0453 2023-02-12 01:28:27,315 - mmseg - INFO - Iter [127100/160000] lr: 1.234e-05, eta: 1:52:46, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2063, decode.acc_seg: 91.7458, aux.loss_ce: 0.1547, aux.acc_seg: 84.7765, loss: 0.3609, grad_norm: 3.6622 2023-02-12 01:28:39,604 - mmseg - INFO - Iter [127150/160000] lr: 1.232e-05, eta: 1:52:36, time: 0.246, data_time: 0.047, memory: 7748, decode.loss_ce: 0.2137, decode.acc_seg: 91.4862, aux.loss_ce: 0.1680, aux.acc_seg: 83.7436, loss: 0.3817, grad_norm: 4.0872 2023-02-12 01:28:49,703 - mmseg - INFO - Iter [127200/160000] lr: 1.230e-05, eta: 1:52:26, time: 0.202, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2023, decode.acc_seg: 91.8016, aux.loss_ce: 0.1526, aux.acc_seg: 84.7382, loss: 0.3548, grad_norm: 3.6462 2023-02-12 01:28:59,548 - mmseg - INFO - Iter [127250/160000] lr: 1.228e-05, eta: 1:52:15, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1939, decode.acc_seg: 92.2864, aux.loss_ce: 0.1508, aux.acc_seg: 85.4070, loss: 0.3447, grad_norm: 3.2097 2023-02-12 01:29:09,728 - mmseg - INFO - Iter [127300/160000] lr: 1.226e-05, eta: 1:52:05, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2012, decode.acc_seg: 91.9821, aux.loss_ce: 0.1472, aux.acc_seg: 85.7096, loss: 0.3485, grad_norm: 3.5285 2023-02-12 01:29:19,384 - mmseg - INFO - Iter [127350/160000] lr: 1.224e-05, eta: 1:51:55, time: 0.193, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2034, decode.acc_seg: 91.5923, aux.loss_ce: 0.1571, aux.acc_seg: 84.1298, loss: 0.3606, grad_norm: 3.9203 2023-02-12 01:29:29,121 - mmseg - INFO - Iter [127400/160000] lr: 1.223e-05, eta: 1:51:44, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2102, decode.acc_seg: 91.5365, aux.loss_ce: 0.1639, aux.acc_seg: 83.9334, loss: 0.3741, grad_norm: 4.2499 2023-02-12 01:29:38,898 - mmseg - INFO - Iter [127450/160000] lr: 1.221e-05, eta: 1:51:34, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2161, decode.acc_seg: 91.5033, aux.loss_ce: 0.1642, aux.acc_seg: 83.9760, loss: 0.3804, grad_norm: 4.5990 2023-02-12 01:29:49,608 - mmseg - INFO - Iter [127500/160000] lr: 1.219e-05, eta: 1:51:24, time: 0.214, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1955, decode.acc_seg: 92.1959, aux.loss_ce: 0.1633, aux.acc_seg: 84.2790, loss: 0.3588, grad_norm: 4.6021 2023-02-12 01:29:59,358 - mmseg - INFO - Iter [127550/160000] lr: 1.217e-05, eta: 1:51:13, time: 0.196, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1952, decode.acc_seg: 92.3702, aux.loss_ce: 0.1477, aux.acc_seg: 85.6910, loss: 0.3429, grad_norm: 4.0109 2023-02-12 01:30:09,154 - mmseg - INFO - Iter [127600/160000] lr: 1.215e-05, eta: 1:51:03, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2033, decode.acc_seg: 92.0139, aux.loss_ce: 0.1514, aux.acc_seg: 85.1823, loss: 0.3547, grad_norm: 4.4715 2023-02-12 01:30:19,718 - mmseg - INFO - Iter [127650/160000] lr: 1.213e-05, eta: 1:50:52, time: 0.211, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2112, decode.acc_seg: 91.6024, aux.loss_ce: 0.1637, aux.acc_seg: 84.1936, loss: 0.3748, grad_norm: 4.5778 2023-02-12 01:30:29,381 - mmseg - INFO - Iter [127700/160000] lr: 1.211e-05, eta: 1:50:42, time: 0.193, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2041, decode.acc_seg: 91.8908, aux.loss_ce: 0.1597, aux.acc_seg: 84.4178, loss: 0.3638, grad_norm: 4.1740 2023-02-12 01:30:39,264 - mmseg - INFO - Iter [127750/160000] lr: 1.209e-05, eta: 1:50:32, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2119, decode.acc_seg: 91.6604, aux.loss_ce: 0.1648, aux.acc_seg: 84.1916, loss: 0.3767, grad_norm: 4.2088 2023-02-12 01:30:49,337 - mmseg - INFO - Iter [127800/160000] lr: 1.208e-05, eta: 1:50:21, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2015, decode.acc_seg: 92.0079, aux.loss_ce: 0.1548, aux.acc_seg: 84.9545, loss: 0.3563, grad_norm: 3.8119 2023-02-12 01:30:59,471 - mmseg - INFO - Iter [127850/160000] lr: 1.206e-05, eta: 1:50:11, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2050, decode.acc_seg: 91.6730, aux.loss_ce: 0.1519, aux.acc_seg: 85.1567, loss: 0.3569, grad_norm: 4.2856 2023-02-12 01:31:10,050 - mmseg - INFO - Iter [127900/160000] lr: 1.204e-05, eta: 1:50:01, time: 0.212, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2014, decode.acc_seg: 91.9942, aux.loss_ce: 0.1549, aux.acc_seg: 84.5803, loss: 0.3563, grad_norm: 4.3641 2023-02-12 01:31:20,004 - mmseg - INFO - Iter [127950/160000] lr: 1.202e-05, eta: 1:49:50, time: 0.199, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2050, decode.acc_seg: 91.6350, aux.loss_ce: 0.1618, aux.acc_seg: 84.0964, loss: 0.3668, grad_norm: 5.0546 2023-02-12 01:31:30,024 - mmseg - INFO - Saving checkpoint at 128000 iterations 2023-02-12 01:31:30,741 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 01:31:30,741 - mmseg - INFO - Iter [128000/160000] lr: 1.200e-05, eta: 1:49:40, time: 0.215, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2260, decode.acc_seg: 91.0142, aux.loss_ce: 0.1738, aux.acc_seg: 83.1848, loss: 0.3998, grad_norm: 4.9962 2023-02-12 01:31:42,224 - mmseg - INFO - per class results: 2023-02-12 01:31:42,230 - mmseg - INFO - +---------------------+-------+-------+ | Class | IoU | Acc | +---------------------+-------+-------+ | wall | 75.26 | 85.66 | | building | 80.74 | 90.12 | | sky | 93.77 | 97.44 | | floor | 78.9 | 91.26 | | tree | 73.18 | 84.89 | | ceiling | 82.41 | 90.71 | | road | 82.3 | 89.17 | | bed | 87.4 | 94.48 | | windowpane | 60.0 | 77.18 | | grass | 66.91 | 81.27 | | cabinet | 57.74 | 66.99 | | sidewalk | 60.47 | 75.29 | | person | 77.36 | 91.7 | | earth | 36.1 | 48.71 | | door | 42.85 | 59.11 | | table | 53.56 | 69.63 | | mountain | 57.69 | 73.84 | | plant | 50.81 | 68.89 | | curtain | 69.58 | 85.14 | | chair | 52.01 | 63.76 | | car | 81.67 | 89.36 | | water | 51.28 | 71.15 | | painting | 65.49 | 86.52 | | sofa | 62.98 | 85.11 | | shelf | 42.0 | 61.44 | | house | 43.22 | 59.47 | | sea | 59.64 | 88.51 | | mirror | 58.1 | 65.81 | | rug | 55.49 | 61.61 | | field | 30.44 | 45.89 | | armchair | 37.17 | 50.65 | | seat | 51.96 | 70.66 | | fence | 32.62 | 47.12 | | desk | 40.19 | 68.05 | | rock | 31.46 | 48.71 | | wardrobe | 53.96 | 69.01 | | lamp | 58.13 | 76.67 | | bathtub | 73.46 | 78.3 | | railing | 31.07 | 44.77 | | cushion | 53.25 | 69.67 | | base | 28.41 | 36.68 | | box | 16.49 | 18.9 | | column | 40.09 | 52.38 | | signboard | 34.11 | 52.28 | | chest of drawers | 35.69 | 71.81 | | counter | 25.29 | 39.59 | | sand | 41.22 | 52.53 | | sink | 71.2 | 79.43 | | skyscraper | 60.54 | 83.9 | | fireplace | 69.89 | 88.64 | | refrigerator | 70.32 | 82.9 | | grandstand | 33.83 | 64.11 | | path | 18.87 | 39.54 | | stairs | 29.74 | 35.73 | | runway | 69.35 | 92.65 | | case | 41.56 | 61.76 | | pool table | 90.91 | 94.11 | | pillow | 51.07 | 60.16 | | screen door | 53.26 | 61.51 | | stairway | 30.8 | 39.71 | | river | 11.91 | 16.97 | | bridge | 61.46 | 81.39 | | bookcase | 30.32 | 50.08 | | blind | 45.98 | 60.9 | | coffee table | 46.82 | 84.29 | | toilet | 84.1 | 90.17 | | flower | 37.83 | 49.99 | | book | 43.26 | 66.19 | | hill | 4.59 | 5.86 | | bench | 46.4 | 58.11 | | countertop | 52.11 | 73.48 | | stove | 70.18 | 78.41 | | palm | 42.59 | 85.17 | | kitchen island | 34.6 | 78.18 | | computer | 62.39 | 82.42 | | swivel chair | 41.55 | 48.98 | | boat | 37.35 | 47.38 | | bar | 24.86 | 31.97 | | arcade machine | 39.75 | 42.8 | | hovel | 30.81 | 42.8 | | bus | 84.1 | 94.44 | | towel | 61.48 | 69.38 | | light | 53.04 | 64.83 | | truck | 34.17 | 46.93 | | tower | 27.21 | 41.85 | | chandelier | 63.93 | 81.91 | | awning | 25.09 | 37.86 | | streetlight | 24.85 | 34.49 | | booth | 46.5 | 48.5 | | television receiver | 64.64 | 79.87 | | airplane | 56.33 | 63.56 | | dirt track | 22.03 | 31.55 | | apparel | 32.98 | 53.61 | | pole | 16.99 | 24.65 | | land | 4.15 | 6.17 | | bannister | 12.26 | 16.13 | | escalator | 23.3 | 30.63 | | ottoman | 40.99 | 49.64 | | bottle | 32.48 | 42.78 | | buffet | 39.87 | 50.59 | | poster | 22.84 | 28.81 | | stage | 16.98 | 32.13 | | van | 45.73 | 62.35 | | ship | 57.99 | 85.33 | | fountain | 22.37 | 26.35 | | conveyer belt | 66.53 | 83.9 | | canopy | 22.2 | 34.47 | | washer | 63.62 | 70.93 | | plaything | 19.48 | 27.01 | | swimming pool | 63.54 | 67.9 | | stool | 38.94 | 57.47 | | barrel | 41.23 | 64.87 | | basket | 25.25 | 41.19 | | waterfall | 41.47 | 50.78 | | tent | 79.26 | 97.73 | | bag | 14.76 | 29.29 | | minibike | 59.98 | 70.44 | | cradle | 78.68 | 93.79 | | oven | 33.07 | 61.65 | | ball | 43.51 | 57.17 | | food | 38.68 | 44.18 | | step | 8.55 | 11.93 | | tank | 29.7 | 29.91 | | trade name | 30.99 | 43.15 | | microwave | 46.33 | 50.23 | | pot | 36.62 | 44.28 | | animal | 54.37 | 58.1 | | bicycle | 50.89 | 79.41 | | lake | 58.88 | 68.74 | | dishwasher | 51.17 | 86.04 | | screen | 47.56 | 73.71 | | blanket | 11.67 | 13.07 | | sculpture | 46.53 | 65.98 | | hood | 56.88 | 67.42 | | sconce | 40.27 | 55.63 | | vase | 31.85 | 54.39 | | traffic light | 31.09 | 53.59 | | tray | 3.7 | 8.16 | | ashcan | 32.6 | 53.94 | | fan | 55.99 | 72.63 | | pier | 47.1 | 81.64 | | crt screen | 0.12 | 0.37 | | plate | 49.86 | 72.54 | | monitor | 2.03 | 3.27 | | bulletin board | 48.23 | 59.16 | | shower | 0.0 | 0.0 | | radiator | 47.96 | 53.21 | | glass | 9.71 | 10.9 | | clock | 27.09 | 37.35 | | flag | 48.13 | 53.62 | +---------------------+-------+-------+ 2023-02-12 01:31:42,230 - mmseg - INFO - Summary: 2023-02-12 01:31:42,230 - mmseg - INFO - +-------+-------+-------+ | aAcc | mIoU | mAcc | +-------+-------+-------+ | 81.17 | 45.22 | 58.85 | +-------+-------+-------+ 2023-02-12 01:31:42,888 - mmseg - INFO - Now best checkpoint is saved as best_mIoU_iter_128000.pth. 2023-02-12 01:31:42,888 - mmseg - INFO - Best mIoU is 0.4522 at 128000 iter. 2023-02-12 01:31:42,888 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 01:31:42,889 - mmseg - INFO - Iter(val) [250] aAcc: 0.8117, mIoU: 0.4522, mAcc: 0.5885, IoU.wall: 0.7526, IoU.building: 0.8074, IoU.sky: 0.9377, IoU.floor: 0.7890, IoU.tree: 0.7318, IoU.ceiling: 0.8241, IoU.road: 0.8230, IoU.bed : 0.8740, IoU.windowpane: 0.6000, IoU.grass: 0.6691, IoU.cabinet: 0.5774, IoU.sidewalk: 0.6047, IoU.person: 0.7736, IoU.earth: 0.3610, IoU.door: 0.4285, IoU.table: 0.5356, IoU.mountain: 0.5769, IoU.plant: 0.5081, IoU.curtain: 0.6958, IoU.chair: 0.5201, IoU.car: 0.8167, IoU.water: 0.5128, IoU.painting: 0.6549, IoU.sofa: 0.6298, IoU.shelf: 0.4200, IoU.house: 0.4322, IoU.sea: 0.5964, IoU.mirror: 0.5810, IoU.rug: 0.5549, IoU.field: 0.3044, IoU.armchair: 0.3717, IoU.seat: 0.5196, IoU.fence: 0.3262, IoU.desk: 0.4019, IoU.rock: 0.3146, IoU.wardrobe: 0.5396, IoU.lamp: 0.5813, IoU.bathtub: 0.7346, IoU.railing: 0.3107, IoU.cushion: 0.5325, IoU.base: 0.2841, IoU.box: 0.1649, IoU.column: 0.4009, IoU.signboard: 0.3411, IoU.chest of drawers: 0.3569, IoU.counter: 0.2529, IoU.sand: 0.4122, IoU.sink: 0.7120, IoU.skyscraper: 0.6054, IoU.fireplace: 0.6989, IoU.refrigerator: 0.7032, IoU.grandstand: 0.3383, IoU.path: 0.1887, IoU.stairs: 0.2974, IoU.runway: 0.6935, IoU.case: 0.4156, IoU.pool table: 0.9091, IoU.pillow: 0.5107, IoU.screen door: 0.5326, IoU.stairway: 0.3080, IoU.river: 0.1191, IoU.bridge: 0.6146, IoU.bookcase: 0.3032, IoU.blind: 0.4598, IoU.coffee table: 0.4682, IoU.toilet: 0.8410, IoU.flower: 0.3783, IoU.book: 0.4326, IoU.hill: 0.0459, IoU.bench: 0.4640, IoU.countertop: 0.5211, IoU.stove: 0.7018, IoU.palm: 0.4259, IoU.kitchen island: 0.3460, IoU.computer: 0.6239, IoU.swivel chair: 0.4155, IoU.boat: 0.3735, IoU.bar: 0.2486, IoU.arcade machine: 0.3975, IoU.hovel: 0.3081, IoU.bus: 0.8410, IoU.towel: 0.6148, IoU.light: 0.5304, IoU.truck: 0.3417, IoU.tower: 0.2721, IoU.chandelier: 0.6393, IoU.awning: 0.2509, IoU.streetlight: 0.2485, IoU.booth: 0.4650, IoU.television receiver: 0.6464, IoU.airplane: 0.5633, IoU.dirt track: 0.2203, IoU.apparel: 0.3298, IoU.pole: 0.1699, IoU.land: 0.0415, IoU.bannister: 0.1226, IoU.escalator: 0.2330, IoU.ottoman: 0.4099, IoU.bottle: 0.3248, IoU.buffet: 0.3987, IoU.poster: 0.2284, IoU.stage: 0.1698, IoU.van: 0.4573, IoU.ship: 0.5799, IoU.fountain: 0.2237, IoU.conveyer belt: 0.6653, IoU.canopy: 0.2220, IoU.washer: 0.6362, IoU.plaything: 0.1948, IoU.swimming pool: 0.6354, IoU.stool: 0.3894, IoU.barrel: 0.4123, IoU.basket: 0.2525, IoU.waterfall: 0.4147, IoU.tent: 0.7926, IoU.bag: 0.1476, IoU.minibike: 0.5998, IoU.cradle: 0.7868, IoU.oven: 0.3307, IoU.ball: 0.4351, IoU.food: 0.3868, IoU.step: 0.0855, IoU.tank: 0.2970, IoU.trade name: 0.3099, IoU.microwave: 0.4633, IoU.pot: 0.3662, IoU.animal: 0.5437, IoU.bicycle: 0.5089, IoU.lake: 0.5888, IoU.dishwasher: 0.5117, IoU.screen: 0.4756, IoU.blanket: 0.1167, IoU.sculpture: 0.4653, IoU.hood: 0.5688, IoU.sconce: 0.4027, IoU.vase: 0.3185, IoU.traffic light: 0.3109, IoU.tray: 0.0370, IoU.ashcan: 0.3260, IoU.fan: 0.5599, IoU.pier: 0.4710, IoU.crt screen: 0.0012, IoU.plate: 0.4986, IoU.monitor: 0.0203, IoU.bulletin board: 0.4823, IoU.shower: 0.0000, IoU.radiator: 0.4796, IoU.glass: 0.0971, IoU.clock: 0.2709, IoU.flag: 0.4813, Acc.wall: 0.8566, Acc.building: 0.9012, Acc.sky: 0.9744, Acc.floor: 0.9126, Acc.tree: 0.8489, Acc.ceiling: 0.9071, Acc.road: 0.8917, Acc.bed : 0.9448, Acc.windowpane: 0.7718, Acc.grass: 0.8127, Acc.cabinet: 0.6699, Acc.sidewalk: 0.7529, Acc.person: 0.9170, Acc.earth: 0.4871, Acc.door: 0.5911, Acc.table: 0.6963, Acc.mountain: 0.7384, Acc.plant: 0.6889, Acc.curtain: 0.8514, Acc.chair: 0.6376, Acc.car: 0.8936, Acc.water: 0.7115, Acc.painting: 0.8652, Acc.sofa: 0.8511, Acc.shelf: 0.6144, Acc.house: 0.5947, Acc.sea: 0.8851, Acc.mirror: 0.6581, Acc.rug: 0.6161, Acc.field: 0.4589, Acc.armchair: 0.5065, Acc.seat: 0.7066, Acc.fence: 0.4712, Acc.desk: 0.6805, Acc.rock: 0.4871, Acc.wardrobe: 0.6901, Acc.lamp: 0.7667, Acc.bathtub: 0.7830, Acc.railing: 0.4477, Acc.cushion: 0.6967, Acc.base: 0.3668, Acc.box: 0.1890, Acc.column: 0.5238, Acc.signboard: 0.5228, Acc.chest of drawers: 0.7181, Acc.counter: 0.3959, Acc.sand: 0.5253, Acc.sink: 0.7943, Acc.skyscraper: 0.8390, Acc.fireplace: 0.8864, Acc.refrigerator: 0.8290, Acc.grandstand: 0.6411, Acc.path: 0.3954, Acc.stairs: 0.3573, Acc.runway: 0.9265, Acc.case: 0.6176, Acc.pool table: 0.9411, Acc.pillow: 0.6016, Acc.screen door: 0.6151, Acc.stairway: 0.3971, Acc.river: 0.1697, Acc.bridge: 0.8139, Acc.bookcase: 0.5008, Acc.blind: 0.6090, Acc.coffee table: 0.8429, Acc.toilet: 0.9017, Acc.flower: 0.4999, Acc.book: 0.6619, Acc.hill: 0.0586, Acc.bench: 0.5811, Acc.countertop: 0.7348, Acc.stove: 0.7841, Acc.palm: 0.8517, Acc.kitchen island: 0.7818, Acc.computer: 0.8242, Acc.swivel chair: 0.4898, Acc.boat: 0.4738, Acc.bar: 0.3197, Acc.arcade machine: 0.4280, Acc.hovel: 0.4280, Acc.bus: 0.9444, Acc.towel: 0.6938, Acc.light: 0.6483, Acc.truck: 0.4693, Acc.tower: 0.4185, Acc.chandelier: 0.8191, Acc.awning: 0.3786, Acc.streetlight: 0.3449, Acc.booth: 0.4850, Acc.television receiver: 0.7987, Acc.airplane: 0.6356, Acc.dirt track: 0.3155, Acc.apparel: 0.5361, Acc.pole: 0.2465, Acc.land: 0.0617, Acc.bannister: 0.1613, Acc.escalator: 0.3063, Acc.ottoman: 0.4964, Acc.bottle: 0.4278, Acc.buffet: 0.5059, Acc.poster: 0.2881, Acc.stage: 0.3213, Acc.van: 0.6235, Acc.ship: 0.8533, Acc.fountain: 0.2635, Acc.conveyer belt: 0.8390, Acc.canopy: 0.3447, Acc.washer: 0.7093, Acc.plaything: 0.2701, Acc.swimming pool: 0.6790, Acc.stool: 0.5747, Acc.barrel: 0.6487, Acc.basket: 0.4119, Acc.waterfall: 0.5078, Acc.tent: 0.9773, Acc.bag: 0.2929, Acc.minibike: 0.7044, Acc.cradle: 0.9379, Acc.oven: 0.6165, Acc.ball: 0.5717, Acc.food: 0.4418, Acc.step: 0.1193, Acc.tank: 0.2991, Acc.trade name: 0.4315, Acc.microwave: 0.5023, Acc.pot: 0.4428, Acc.animal: 0.5810, Acc.bicycle: 0.7941, Acc.lake: 0.6874, Acc.dishwasher: 0.8604, Acc.screen: 0.7371, Acc.blanket: 0.1307, Acc.sculpture: 0.6598, Acc.hood: 0.6742, Acc.sconce: 0.5563, Acc.vase: 0.5439, Acc.traffic light: 0.5359, Acc.tray: 0.0816, Acc.ashcan: 0.5394, Acc.fan: 0.7263, Acc.pier: 0.8164, Acc.crt screen: 0.0037, Acc.plate: 0.7254, Acc.monitor: 0.0327, Acc.bulletin board: 0.5916, Acc.shower: 0.0000, Acc.radiator: 0.5321, Acc.glass: 0.1090, Acc.clock: 0.3735, Acc.flag: 0.5362 2023-02-12 01:31:52,735 - mmseg - INFO - Iter [128050/160000] lr: 1.198e-05, eta: 1:49:33, time: 0.439, data_time: 0.247, memory: 7748, decode.loss_ce: 0.1950, decode.acc_seg: 92.3183, aux.loss_ce: 0.1565, aux.acc_seg: 84.8512, loss: 0.3514, grad_norm: 3.6539 2023-02-12 01:32:02,875 - mmseg - INFO - Iter [128100/160000] lr: 1.196e-05, eta: 1:49:23, time: 0.203, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2073, decode.acc_seg: 91.7720, aux.loss_ce: 0.1660, aux.acc_seg: 83.9996, loss: 0.3733, grad_norm: 4.5679 2023-02-12 01:32:13,742 - mmseg - INFO - Iter [128150/160000] lr: 1.194e-05, eta: 1:49:13, time: 0.218, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2085, decode.acc_seg: 91.4776, aux.loss_ce: 0.1608, aux.acc_seg: 84.4814, loss: 0.3693, grad_norm: 4.3791 2023-02-12 01:32:23,827 - mmseg - INFO - Iter [128200/160000] lr: 1.193e-05, eta: 1:49:02, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2167, decode.acc_seg: 91.3652, aux.loss_ce: 0.1613, aux.acc_seg: 84.2334, loss: 0.3780, grad_norm: 4.5055 2023-02-12 01:32:33,685 - mmseg - INFO - Iter [128250/160000] lr: 1.191e-05, eta: 1:48:52, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2004, decode.acc_seg: 91.8117, aux.loss_ce: 0.1567, aux.acc_seg: 84.4527, loss: 0.3570, grad_norm: 4.5628 2023-02-12 01:32:43,423 - mmseg - INFO - Iter [128300/160000] lr: 1.189e-05, eta: 1:48:42, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2232, decode.acc_seg: 91.2865, aux.loss_ce: 0.1644, aux.acc_seg: 83.8971, loss: 0.3876, grad_norm: 4.4706 2023-02-12 01:32:53,784 - mmseg - INFO - Iter [128350/160000] lr: 1.187e-05, eta: 1:48:31, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1978, decode.acc_seg: 92.1243, aux.loss_ce: 0.1546, aux.acc_seg: 84.7426, loss: 0.3523, grad_norm: 4.5701 2023-02-12 01:33:03,731 - mmseg - INFO - Iter [128400/160000] lr: 1.185e-05, eta: 1:48:21, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2098, decode.acc_seg: 91.7599, aux.loss_ce: 0.1695, aux.acc_seg: 83.7187, loss: 0.3792, grad_norm: 4.1929 2023-02-12 01:33:15,759 - mmseg - INFO - Iter [128450/160000] lr: 1.183e-05, eta: 1:48:11, time: 0.241, data_time: 0.047, memory: 7748, decode.loss_ce: 0.2068, decode.acc_seg: 91.7433, aux.loss_ce: 0.1614, aux.acc_seg: 84.2530, loss: 0.3681, grad_norm: 4.7531 2023-02-12 01:33:25,868 - mmseg - INFO - Iter [128500/160000] lr: 1.181e-05, eta: 1:48:01, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2005, decode.acc_seg: 91.9522, aux.loss_ce: 0.1571, aux.acc_seg: 84.8329, loss: 0.3577, grad_norm: 4.3345 2023-02-12 01:33:36,385 - mmseg - INFO - Iter [128550/160000] lr: 1.179e-05, eta: 1:47:51, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2060, decode.acc_seg: 91.9426, aux.loss_ce: 0.1569, aux.acc_seg: 84.7295, loss: 0.3629, grad_norm: 4.2244 2023-02-12 01:33:46,163 - mmseg - INFO - Iter [128600/160000] lr: 1.178e-05, eta: 1:47:40, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2035, decode.acc_seg: 91.8041, aux.loss_ce: 0.1608, aux.acc_seg: 84.4880, loss: 0.3643, grad_norm: 4.4280 2023-02-12 01:33:56,068 - mmseg - INFO - Iter [128650/160000] lr: 1.176e-05, eta: 1:47:30, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2067, decode.acc_seg: 91.9394, aux.loss_ce: 0.1565, aux.acc_seg: 85.1806, loss: 0.3632, grad_norm: 4.2246 2023-02-12 01:34:06,443 - mmseg - INFO - Iter [128700/160000] lr: 1.174e-05, eta: 1:47:20, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1936, decode.acc_seg: 92.2279, aux.loss_ce: 0.1521, aux.acc_seg: 85.0695, loss: 0.3457, grad_norm: 3.8058 2023-02-12 01:34:16,599 - mmseg - INFO - Iter [128750/160000] lr: 1.172e-05, eta: 1:47:09, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2070, decode.acc_seg: 91.5841, aux.loss_ce: 0.1633, aux.acc_seg: 83.6016, loss: 0.3703, grad_norm: 4.1795 2023-02-12 01:34:26,805 - mmseg - INFO - Iter [128800/160000] lr: 1.170e-05, eta: 1:46:59, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2182, decode.acc_seg: 91.2458, aux.loss_ce: 0.1624, aux.acc_seg: 84.1696, loss: 0.3806, grad_norm: 4.5791 2023-02-12 01:34:37,140 - mmseg - INFO - Iter [128850/160000] lr: 1.168e-05, eta: 1:46:49, time: 0.207, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2130, decode.acc_seg: 91.4736, aux.loss_ce: 0.1627, aux.acc_seg: 84.0714, loss: 0.3757, grad_norm: 3.8871 2023-02-12 01:34:47,180 - mmseg - INFO - Iter [128900/160000] lr: 1.166e-05, eta: 1:46:38, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1861, decode.acc_seg: 92.6986, aux.loss_ce: 0.1475, aux.acc_seg: 85.5108, loss: 0.3336, grad_norm: 3.8099 2023-02-12 01:34:57,510 - mmseg - INFO - Iter [128950/160000] lr: 1.164e-05, eta: 1:46:28, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2027, decode.acc_seg: 91.9875, aux.loss_ce: 0.1532, aux.acc_seg: 84.9964, loss: 0.3560, grad_norm: 4.3426 2023-02-12 01:35:07,645 - mmseg - INFO - Saving checkpoint at 129000 iterations 2023-02-12 01:35:08,322 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 01:35:08,322 - mmseg - INFO - Iter [129000/160000] lr: 1.163e-05, eta: 1:46:18, time: 0.216, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2024, decode.acc_seg: 91.4564, aux.loss_ce: 0.1558, aux.acc_seg: 84.5512, loss: 0.3582, grad_norm: 3.9854 2023-02-12 01:35:18,759 - mmseg - INFO - Iter [129050/160000] lr: 1.161e-05, eta: 1:46:08, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1965, decode.acc_seg: 91.7838, aux.loss_ce: 0.1485, aux.acc_seg: 85.2792, loss: 0.3450, grad_norm: 4.0509 2023-02-12 01:35:28,720 - mmseg - INFO - Iter [129100/160000] lr: 1.159e-05, eta: 1:45:57, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2072, decode.acc_seg: 91.4473, aux.loss_ce: 0.1619, aux.acc_seg: 83.6826, loss: 0.3691, grad_norm: 4.5074 2023-02-12 01:35:39,035 - mmseg - INFO - Iter [129150/160000] lr: 1.157e-05, eta: 1:45:47, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2053, decode.acc_seg: 91.6931, aux.loss_ce: 0.1563, aux.acc_seg: 84.4267, loss: 0.3616, grad_norm: 3.9717 2023-02-12 01:35:49,083 - mmseg - INFO - Iter [129200/160000] lr: 1.155e-05, eta: 1:45:37, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2128, decode.acc_seg: 91.5294, aux.loss_ce: 0.1588, aux.acc_seg: 84.4758, loss: 0.3716, grad_norm: 4.7399 2023-02-12 01:35:59,104 - mmseg - INFO - Iter [129250/160000] lr: 1.153e-05, eta: 1:45:26, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2077, decode.acc_seg: 91.7583, aux.loss_ce: 0.1591, aux.acc_seg: 84.4907, loss: 0.3668, grad_norm: 3.9070 2023-02-12 01:36:09,600 - mmseg - INFO - Iter [129300/160000] lr: 1.151e-05, eta: 1:45:16, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2023, decode.acc_seg: 91.8765, aux.loss_ce: 0.1493, aux.acc_seg: 85.3608, loss: 0.3516, grad_norm: 4.6786 2023-02-12 01:36:19,462 - mmseg - INFO - Iter [129350/160000] lr: 1.149e-05, eta: 1:45:06, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1966, decode.acc_seg: 92.1104, aux.loss_ce: 0.1563, aux.acc_seg: 84.3236, loss: 0.3529, grad_norm: 3.6210 2023-02-12 01:36:29,218 - mmseg - INFO - Iter [129400/160000] lr: 1.148e-05, eta: 1:44:55, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2081, decode.acc_seg: 91.8607, aux.loss_ce: 0.1557, aux.acc_seg: 84.8854, loss: 0.3639, grad_norm: 4.2867 2023-02-12 01:36:39,342 - mmseg - INFO - Iter [129450/160000] lr: 1.146e-05, eta: 1:44:45, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2024, decode.acc_seg: 92.0202, aux.loss_ce: 0.1550, aux.acc_seg: 84.8820, loss: 0.3574, grad_norm: 4.6277 2023-02-12 01:36:49,230 - mmseg - INFO - Iter [129500/160000] lr: 1.144e-05, eta: 1:44:34, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2080, decode.acc_seg: 91.4966, aux.loss_ce: 0.1602, aux.acc_seg: 84.1283, loss: 0.3682, grad_norm: 3.6436 2023-02-12 01:36:59,350 - mmseg - INFO - Iter [129550/160000] lr: 1.142e-05, eta: 1:44:24, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2050, decode.acc_seg: 91.7179, aux.loss_ce: 0.1605, aux.acc_seg: 84.2658, loss: 0.3655, grad_norm: 4.1364 2023-02-12 01:37:09,386 - mmseg - INFO - Iter [129600/160000] lr: 1.140e-05, eta: 1:44:14, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2034, decode.acc_seg: 91.6694, aux.loss_ce: 0.1500, aux.acc_seg: 84.9951, loss: 0.3535, grad_norm: 3.7203 2023-02-12 01:37:19,092 - mmseg - INFO - Iter [129650/160000] lr: 1.138e-05, eta: 1:44:03, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2159, decode.acc_seg: 91.3384, aux.loss_ce: 0.1696, aux.acc_seg: 83.7584, loss: 0.3855, grad_norm: 4.7419 2023-02-12 01:37:31,009 - mmseg - INFO - Iter [129700/160000] lr: 1.136e-05, eta: 1:43:53, time: 0.238, data_time: 0.047, memory: 7748, decode.loss_ce: 0.1933, decode.acc_seg: 92.3741, aux.loss_ce: 0.1582, aux.acc_seg: 84.6375, loss: 0.3516, grad_norm: 3.4466 2023-02-12 01:37:40,898 - mmseg - INFO - Iter [129750/160000] lr: 1.134e-05, eta: 1:43:43, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2109, decode.acc_seg: 91.5093, aux.loss_ce: 0.1597, aux.acc_seg: 84.3465, loss: 0.3706, grad_norm: 4.9950 2023-02-12 01:37:51,183 - mmseg - INFO - Iter [129800/160000] lr: 1.133e-05, eta: 1:43:33, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1866, decode.acc_seg: 92.3533, aux.loss_ce: 0.1516, aux.acc_seg: 84.9512, loss: 0.3382, grad_norm: 4.1282 2023-02-12 01:38:01,744 - mmseg - INFO - Iter [129850/160000] lr: 1.131e-05, eta: 1:43:23, time: 0.211, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2087, decode.acc_seg: 91.5843, aux.loss_ce: 0.1605, aux.acc_seg: 84.3591, loss: 0.3692, grad_norm: 3.9322 2023-02-12 01:38:11,594 - mmseg - INFO - Iter [129900/160000] lr: 1.129e-05, eta: 1:43:12, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2014, decode.acc_seg: 91.8185, aux.loss_ce: 0.1593, aux.acc_seg: 84.3914, loss: 0.3607, grad_norm: 3.8958 2023-02-12 01:38:21,487 - mmseg - INFO - Iter [129950/160000] lr: 1.127e-05, eta: 1:43:02, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2162, decode.acc_seg: 91.4311, aux.loss_ce: 0.1654, aux.acc_seg: 84.1314, loss: 0.3816, grad_norm: 3.9440 2023-02-12 01:38:31,463 - mmseg - INFO - Saving checkpoint at 130000 iterations 2023-02-12 01:38:32,139 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 01:38:32,139 - mmseg - INFO - Iter [130000/160000] lr: 1.125e-05, eta: 1:42:52, time: 0.213, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1998, decode.acc_seg: 91.8604, aux.loss_ce: 0.1565, aux.acc_seg: 84.7450, loss: 0.3563, grad_norm: 3.5859 2023-02-12 01:38:42,358 - mmseg - INFO - Iter [130050/160000] lr: 1.123e-05, eta: 1:42:41, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2007, decode.acc_seg: 91.8347, aux.loss_ce: 0.1538, aux.acc_seg: 84.7496, loss: 0.3545, grad_norm: 4.1562 2023-02-12 01:38:52,713 - mmseg - INFO - Iter [130100/160000] lr: 1.121e-05, eta: 1:42:31, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1972, decode.acc_seg: 91.7677, aux.loss_ce: 0.1476, aux.acc_seg: 85.2359, loss: 0.3448, grad_norm: 4.0140 2023-02-12 01:39:02,656 - mmseg - INFO - Iter [130150/160000] lr: 1.119e-05, eta: 1:42:21, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1924, decode.acc_seg: 92.2033, aux.loss_ce: 0.1484, aux.acc_seg: 85.1684, loss: 0.3408, grad_norm: 3.3966 2023-02-12 01:39:12,972 - mmseg - INFO - Iter [130200/160000] lr: 1.118e-05, eta: 1:42:10, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1957, decode.acc_seg: 91.9251, aux.loss_ce: 0.1510, aux.acc_seg: 84.8571, loss: 0.3467, grad_norm: 3.7218 2023-02-12 01:39:22,939 - mmseg - INFO - Iter [130250/160000] lr: 1.116e-05, eta: 1:42:00, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2074, decode.acc_seg: 91.3058, aux.loss_ce: 0.1605, aux.acc_seg: 83.9845, loss: 0.3679, grad_norm: 4.9159 2023-02-12 01:39:32,826 - mmseg - INFO - Iter [130300/160000] lr: 1.114e-05, eta: 1:41:50, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1977, decode.acc_seg: 92.1296, aux.loss_ce: 0.1528, aux.acc_seg: 85.0736, loss: 0.3506, grad_norm: 3.9600 2023-02-12 01:39:42,628 - mmseg - INFO - Iter [130350/160000] lr: 1.112e-05, eta: 1:41:39, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1922, decode.acc_seg: 92.1711, aux.loss_ce: 0.1603, aux.acc_seg: 84.3358, loss: 0.3525, grad_norm: 3.7931 2023-02-12 01:39:53,050 - mmseg - INFO - Iter [130400/160000] lr: 1.110e-05, eta: 1:41:29, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2042, decode.acc_seg: 91.6554, aux.loss_ce: 0.1552, aux.acc_seg: 84.5522, loss: 0.3594, grad_norm: 3.9958 2023-02-12 01:40:02,928 - mmseg - INFO - Iter [130450/160000] lr: 1.108e-05, eta: 1:41:19, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1989, decode.acc_seg: 92.0058, aux.loss_ce: 0.1589, aux.acc_seg: 84.4773, loss: 0.3578, grad_norm: 5.1720 2023-02-12 01:40:13,101 - mmseg - INFO - Iter [130500/160000] lr: 1.106e-05, eta: 1:41:08, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1936, decode.acc_seg: 92.3722, aux.loss_ce: 0.1494, aux.acc_seg: 85.4031, loss: 0.3430, grad_norm: 3.9173 2023-02-12 01:40:23,112 - mmseg - INFO - Iter [130550/160000] lr: 1.104e-05, eta: 1:40:58, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2089, decode.acc_seg: 91.6800, aux.loss_ce: 0.1589, aux.acc_seg: 84.5369, loss: 0.3677, grad_norm: 4.1285 2023-02-12 01:40:33,266 - mmseg - INFO - Iter [130600/160000] lr: 1.103e-05, eta: 1:40:48, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1959, decode.acc_seg: 92.0831, aux.loss_ce: 0.1467, aux.acc_seg: 85.4446, loss: 0.3426, grad_norm: 3.7619 2023-02-12 01:40:43,327 - mmseg - INFO - Iter [130650/160000] lr: 1.101e-05, eta: 1:40:37, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1947, decode.acc_seg: 91.9947, aux.loss_ce: 0.1522, aux.acc_seg: 84.9996, loss: 0.3469, grad_norm: 4.4207 2023-02-12 01:40:53,727 - mmseg - INFO - Iter [130700/160000] lr: 1.099e-05, eta: 1:40:27, time: 0.208, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2112, decode.acc_seg: 91.4473, aux.loss_ce: 0.1603, aux.acc_seg: 84.1258, loss: 0.3715, grad_norm: 5.1048 2023-02-12 01:41:04,036 - mmseg - INFO - Iter [130750/160000] lr: 1.097e-05, eta: 1:40:17, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1959, decode.acc_seg: 92.0422, aux.loss_ce: 0.1569, aux.acc_seg: 84.4513, loss: 0.3528, grad_norm: 3.4438 2023-02-12 01:41:14,316 - mmseg - INFO - Iter [130800/160000] lr: 1.095e-05, eta: 1:40:06, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1939, decode.acc_seg: 92.3994, aux.loss_ce: 0.1544, aux.acc_seg: 85.0806, loss: 0.3483, grad_norm: 4.2583 2023-02-12 01:41:24,440 - mmseg - INFO - Iter [130850/160000] lr: 1.093e-05, eta: 1:39:56, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2038, decode.acc_seg: 91.9329, aux.loss_ce: 0.1589, aux.acc_seg: 84.6885, loss: 0.3627, grad_norm: 3.9470 2023-02-12 01:41:34,227 - mmseg - INFO - Iter [130900/160000] lr: 1.091e-05, eta: 1:39:46, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1987, decode.acc_seg: 91.9605, aux.loss_ce: 0.1584, aux.acc_seg: 84.0820, loss: 0.3570, grad_norm: 4.0268 2023-02-12 01:41:46,392 - mmseg - INFO - Iter [130950/160000] lr: 1.089e-05, eta: 1:39:36, time: 0.243, data_time: 0.047, memory: 7748, decode.loss_ce: 0.2052, decode.acc_seg: 91.6768, aux.loss_ce: 0.1610, aux.acc_seg: 84.3673, loss: 0.3662, grad_norm: 4.4251 2023-02-12 01:41:56,519 - mmseg - INFO - Saving checkpoint at 131000 iterations 2023-02-12 01:41:57,279 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 01:41:57,280 - mmseg - INFO - Iter [131000/160000] lr: 1.088e-05, eta: 1:39:26, time: 0.218, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2007, decode.acc_seg: 91.8715, aux.loss_ce: 0.1543, aux.acc_seg: 84.7132, loss: 0.3550, grad_norm: 4.1613 2023-02-12 01:42:07,187 - mmseg - INFO - Iter [131050/160000] lr: 1.086e-05, eta: 1:39:15, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1934, decode.acc_seg: 92.3730, aux.loss_ce: 0.1481, aux.acc_seg: 85.4854, loss: 0.3416, grad_norm: 3.9531 2023-02-12 01:42:17,764 - mmseg - INFO - Iter [131100/160000] lr: 1.084e-05, eta: 1:39:05, time: 0.212, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1944, decode.acc_seg: 92.1454, aux.loss_ce: 0.1527, aux.acc_seg: 84.8100, loss: 0.3471, grad_norm: 4.6995 2023-02-12 01:42:27,624 - mmseg - INFO - Iter [131150/160000] lr: 1.082e-05, eta: 1:38:55, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2031, decode.acc_seg: 91.8768, aux.loss_ce: 0.1548, aux.acc_seg: 84.7633, loss: 0.3579, grad_norm: 4.1225 2023-02-12 01:42:37,405 - mmseg - INFO - Iter [131200/160000] lr: 1.080e-05, eta: 1:38:44, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1974, decode.acc_seg: 92.3919, aux.loss_ce: 0.1583, aux.acc_seg: 84.9550, loss: 0.3557, grad_norm: 3.7504 2023-02-12 01:42:47,761 - mmseg - INFO - Iter [131250/160000] lr: 1.078e-05, eta: 1:38:34, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2077, decode.acc_seg: 91.8052, aux.loss_ce: 0.1566, aux.acc_seg: 84.8753, loss: 0.3643, grad_norm: 4.1404 2023-02-12 01:42:57,467 - mmseg - INFO - Iter [131300/160000] lr: 1.076e-05, eta: 1:38:24, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1954, decode.acc_seg: 91.9160, aux.loss_ce: 0.1535, aux.acc_seg: 84.5675, loss: 0.3490, grad_norm: 4.3985 2023-02-12 01:43:07,474 - mmseg - INFO - Iter [131350/160000] lr: 1.074e-05, eta: 1:38:13, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2100, decode.acc_seg: 91.5784, aux.loss_ce: 0.1610, aux.acc_seg: 84.4871, loss: 0.3710, grad_norm: 4.4181 2023-02-12 01:43:17,677 - mmseg - INFO - Iter [131400/160000] lr: 1.073e-05, eta: 1:38:03, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2074, decode.acc_seg: 91.8113, aux.loss_ce: 0.1633, aux.acc_seg: 83.9941, loss: 0.3708, grad_norm: 4.6625 2023-02-12 01:43:27,414 - mmseg - INFO - Iter [131450/160000] lr: 1.071e-05, eta: 1:37:52, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2036, decode.acc_seg: 91.8588, aux.loss_ce: 0.1638, aux.acc_seg: 84.3095, loss: 0.3674, grad_norm: 4.5073 2023-02-12 01:43:37,148 - mmseg - INFO - Iter [131500/160000] lr: 1.069e-05, eta: 1:37:42, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2078, decode.acc_seg: 91.6702, aux.loss_ce: 0.1601, aux.acc_seg: 84.3371, loss: 0.3679, grad_norm: 4.6726 2023-02-12 01:43:47,320 - mmseg - INFO - Iter [131550/160000] lr: 1.067e-05, eta: 1:37:32, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1974, decode.acc_seg: 92.0486, aux.loss_ce: 0.1630, aux.acc_seg: 84.1977, loss: 0.3604, grad_norm: 3.8897 2023-02-12 01:43:57,213 - mmseg - INFO - Iter [131600/160000] lr: 1.065e-05, eta: 1:37:21, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2067, decode.acc_seg: 91.5189, aux.loss_ce: 0.1598, aux.acc_seg: 84.4399, loss: 0.3664, grad_norm: 4.2813 2023-02-12 01:44:07,053 - mmseg - INFO - Iter [131650/160000] lr: 1.063e-05, eta: 1:37:11, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2107, decode.acc_seg: 91.4519, aux.loss_ce: 0.1608, aux.acc_seg: 84.0893, loss: 0.3715, grad_norm: 4.1684 2023-02-12 01:44:17,394 - mmseg - INFO - Iter [131700/160000] lr: 1.061e-05, eta: 1:37:01, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1966, decode.acc_seg: 92.2998, aux.loss_ce: 0.1497, aux.acc_seg: 85.5537, loss: 0.3462, grad_norm: 4.0931 2023-02-12 01:44:27,608 - mmseg - INFO - Iter [131750/160000] lr: 1.059e-05, eta: 1:36:50, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2032, decode.acc_seg: 91.9574, aux.loss_ce: 0.1546, aux.acc_seg: 85.0721, loss: 0.3578, grad_norm: 4.9728 2023-02-12 01:44:38,483 - mmseg - INFO - Iter [131800/160000] lr: 1.058e-05, eta: 1:36:40, time: 0.218, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2099, decode.acc_seg: 91.5975, aux.loss_ce: 0.1608, aux.acc_seg: 84.2504, loss: 0.3707, grad_norm: 3.9428 2023-02-12 01:44:49,354 - mmseg - INFO - Iter [131850/160000] lr: 1.056e-05, eta: 1:36:30, time: 0.217, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2054, decode.acc_seg: 91.6855, aux.loss_ce: 0.1576, aux.acc_seg: 84.4570, loss: 0.3630, grad_norm: 4.7742 2023-02-12 01:44:59,853 - mmseg - INFO - Iter [131900/160000] lr: 1.054e-05, eta: 1:36:20, time: 0.210, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1978, decode.acc_seg: 92.1913, aux.loss_ce: 0.1506, aux.acc_seg: 85.3195, loss: 0.3484, grad_norm: 4.1237 2023-02-12 01:45:10,156 - mmseg - INFO - Iter [131950/160000] lr: 1.052e-05, eta: 1:36:10, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1964, decode.acc_seg: 92.0487, aux.loss_ce: 0.1507, aux.acc_seg: 85.0732, loss: 0.3471, grad_norm: 3.4799 2023-02-12 01:45:20,717 - mmseg - INFO - Saving checkpoint at 132000 iterations 2023-02-12 01:45:21,395 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 01:45:21,395 - mmseg - INFO - Iter [132000/160000] lr: 1.050e-05, eta: 1:36:00, time: 0.225, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1934, decode.acc_seg: 92.2893, aux.loss_ce: 0.1486, aux.acc_seg: 85.5622, loss: 0.3420, grad_norm: 3.6245 2023-02-12 01:45:32,105 - mmseg - INFO - Iter [132050/160000] lr: 1.048e-05, eta: 1:35:49, time: 0.214, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2054, decode.acc_seg: 91.6316, aux.loss_ce: 0.1599, aux.acc_seg: 84.0457, loss: 0.3653, grad_norm: 4.0059 2023-02-12 01:45:41,959 - mmseg - INFO - Iter [132100/160000] lr: 1.046e-05, eta: 1:35:39, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1953, decode.acc_seg: 91.8935, aux.loss_ce: 0.1491, aux.acc_seg: 85.0523, loss: 0.3444, grad_norm: 3.9649 2023-02-12 01:45:52,256 - mmseg - INFO - Iter [132150/160000] lr: 1.044e-05, eta: 1:35:29, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1939, decode.acc_seg: 92.1619, aux.loss_ce: 0.1550, aux.acc_seg: 84.7927, loss: 0.3489, grad_norm: 3.7321 2023-02-12 01:46:04,841 - mmseg - INFO - Iter [132200/160000] lr: 1.043e-05, eta: 1:35:19, time: 0.252, data_time: 0.048, memory: 7748, decode.loss_ce: 0.1993, decode.acc_seg: 92.0437, aux.loss_ce: 0.1466, aux.acc_seg: 85.4812, loss: 0.3459, grad_norm: 3.5830 2023-02-12 01:46:14,749 - mmseg - INFO - Iter [132250/160000] lr: 1.041e-05, eta: 1:35:09, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1974, decode.acc_seg: 92.0804, aux.loss_ce: 0.1504, aux.acc_seg: 85.0299, loss: 0.3477, grad_norm: 3.7731 2023-02-12 01:46:25,125 - mmseg - INFO - Iter [132300/160000] lr: 1.039e-05, eta: 1:34:58, time: 0.208, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2093, decode.acc_seg: 91.7171, aux.loss_ce: 0.1614, aux.acc_seg: 84.1068, loss: 0.3707, grad_norm: 4.4729 2023-02-12 01:46:34,985 - mmseg - INFO - Iter [132350/160000] lr: 1.037e-05, eta: 1:34:48, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1935, decode.acc_seg: 92.1196, aux.loss_ce: 0.1508, aux.acc_seg: 85.1635, loss: 0.3443, grad_norm: 3.7134 2023-02-12 01:46:45,279 - mmseg - INFO - Iter [132400/160000] lr: 1.035e-05, eta: 1:34:38, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2025, decode.acc_seg: 91.8920, aux.loss_ce: 0.1561, aux.acc_seg: 84.8134, loss: 0.3585, grad_norm: 4.2328 2023-02-12 01:46:54,987 - mmseg - INFO - Iter [132450/160000] lr: 1.033e-05, eta: 1:34:27, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2057, decode.acc_seg: 91.6473, aux.loss_ce: 0.1580, aux.acc_seg: 84.4495, loss: 0.3637, grad_norm: 6.3700 2023-02-12 01:47:04,811 - mmseg - INFO - Iter [132500/160000] lr: 1.031e-05, eta: 1:34:17, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1979, decode.acc_seg: 92.1999, aux.loss_ce: 0.1583, aux.acc_seg: 84.7643, loss: 0.3562, grad_norm: 3.8995 2023-02-12 01:47:14,503 - mmseg - INFO - Iter [132550/160000] lr: 1.029e-05, eta: 1:34:06, time: 0.193, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2014, decode.acc_seg: 91.9993, aux.loss_ce: 0.1586, aux.acc_seg: 84.4370, loss: 0.3600, grad_norm: 4.0878 2023-02-12 01:47:25,029 - mmseg - INFO - Iter [132600/160000] lr: 1.028e-05, eta: 1:33:56, time: 0.211, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2114, decode.acc_seg: 91.4860, aux.loss_ce: 0.1616, aux.acc_seg: 84.3685, loss: 0.3730, grad_norm: 4.2536 2023-02-12 01:47:35,089 - mmseg - INFO - Iter [132650/160000] lr: 1.026e-05, eta: 1:33:46, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2038, decode.acc_seg: 91.9242, aux.loss_ce: 0.1584, aux.acc_seg: 84.6526, loss: 0.3622, grad_norm: 4.3739 2023-02-12 01:47:44,925 - mmseg - INFO - Iter [132700/160000] lr: 1.024e-05, eta: 1:33:35, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2162, decode.acc_seg: 91.2597, aux.loss_ce: 0.1651, aux.acc_seg: 83.7531, loss: 0.3814, grad_norm: 4.5480 2023-02-12 01:47:54,718 - mmseg - INFO - Iter [132750/160000] lr: 1.022e-05, eta: 1:33:25, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1864, decode.acc_seg: 92.3363, aux.loss_ce: 0.1456, aux.acc_seg: 85.2804, loss: 0.3320, grad_norm: 3.9698 2023-02-12 01:48:04,987 - mmseg - INFO - Iter [132800/160000] lr: 1.020e-05, eta: 1:33:15, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1937, decode.acc_seg: 92.1620, aux.loss_ce: 0.1494, aux.acc_seg: 85.2837, loss: 0.3431, grad_norm: 4.1163 2023-02-12 01:48:14,676 - mmseg - INFO - Iter [132850/160000] lr: 1.018e-05, eta: 1:33:04, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2106, decode.acc_seg: 91.6572, aux.loss_ce: 0.1604, aux.acc_seg: 84.4870, loss: 0.3710, grad_norm: 4.3445 2023-02-12 01:48:24,581 - mmseg - INFO - Iter [132900/160000] lr: 1.016e-05, eta: 1:32:54, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2064, decode.acc_seg: 91.6890, aux.loss_ce: 0.1568, aux.acc_seg: 84.4704, loss: 0.3632, grad_norm: 4.3532 2023-02-12 01:48:34,617 - mmseg - INFO - Iter [132950/160000] lr: 1.014e-05, eta: 1:32:44, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1882, decode.acc_seg: 92.2561, aux.loss_ce: 0.1481, aux.acc_seg: 85.1509, loss: 0.3363, grad_norm: 3.6824 2023-02-12 01:48:44,628 - mmseg - INFO - Saving checkpoint at 133000 iterations 2023-02-12 01:48:45,304 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 01:48:45,304 - mmseg - INFO - Iter [133000/160000] lr: 1.013e-05, eta: 1:32:33, time: 0.214, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2108, decode.acc_seg: 91.4576, aux.loss_ce: 0.1624, aux.acc_seg: 84.1190, loss: 0.3732, grad_norm: 4.8357 2023-02-12 01:48:55,367 - mmseg - INFO - Iter [133050/160000] lr: 1.011e-05, eta: 1:32:23, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2012, decode.acc_seg: 91.5865, aux.loss_ce: 0.1558, aux.acc_seg: 84.2248, loss: 0.3570, grad_norm: 4.4116 2023-02-12 01:49:05,521 - mmseg - INFO - Iter [133100/160000] lr: 1.009e-05, eta: 1:32:13, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2098, decode.acc_seg: 91.8188, aux.loss_ce: 0.1576, aux.acc_seg: 84.8318, loss: 0.3675, grad_norm: 4.7266 2023-02-12 01:49:15,696 - mmseg - INFO - Iter [133150/160000] lr: 1.007e-05, eta: 1:32:02, time: 0.203, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1981, decode.acc_seg: 92.1153, aux.loss_ce: 0.1576, aux.acc_seg: 84.9307, loss: 0.3557, grad_norm: 3.9079 2023-02-12 01:49:25,878 - mmseg - INFO - Iter [133200/160000] lr: 1.005e-05, eta: 1:31:52, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2052, decode.acc_seg: 91.7462, aux.loss_ce: 0.1586, aux.acc_seg: 84.5725, loss: 0.3638, grad_norm: 4.1439 2023-02-12 01:49:35,948 - mmseg - INFO - Iter [133250/160000] lr: 1.003e-05, eta: 1:31:42, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1954, decode.acc_seg: 92.0222, aux.loss_ce: 0.1537, aux.acc_seg: 84.8405, loss: 0.3492, grad_norm: 3.9043 2023-02-12 01:49:45,892 - mmseg - INFO - Iter [133300/160000] lr: 1.001e-05, eta: 1:31:31, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2092, decode.acc_seg: 91.3967, aux.loss_ce: 0.1572, aux.acc_seg: 84.5196, loss: 0.3664, grad_norm: 4.8903 2023-02-12 01:49:56,360 - mmseg - INFO - Iter [133350/160000] lr: 9.994e-06, eta: 1:31:21, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1912, decode.acc_seg: 92.3618, aux.loss_ce: 0.1490, aux.acc_seg: 85.5184, loss: 0.3402, grad_norm: 3.5154 2023-02-12 01:50:06,833 - mmseg - INFO - Iter [133400/160000] lr: 9.975e-06, eta: 1:31:11, time: 0.209, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2034, decode.acc_seg: 91.9591, aux.loss_ce: 0.1602, aux.acc_seg: 84.3327, loss: 0.3636, grad_norm: 3.9079 2023-02-12 01:50:16,936 - mmseg - INFO - Iter [133450/160000] lr: 9.957e-06, eta: 1:31:01, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1993, decode.acc_seg: 91.8198, aux.loss_ce: 0.1520, aux.acc_seg: 84.9421, loss: 0.3513, grad_norm: 3.5585 2023-02-12 01:50:29,249 - mmseg - INFO - Iter [133500/160000] lr: 9.938e-06, eta: 1:30:51, time: 0.246, data_time: 0.047, memory: 7748, decode.loss_ce: 0.1873, decode.acc_seg: 92.1440, aux.loss_ce: 0.1432, aux.acc_seg: 85.5737, loss: 0.3306, grad_norm: 4.0581 2023-02-12 01:50:39,350 - mmseg - INFO - Iter [133550/160000] lr: 9.919e-06, eta: 1:30:40, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2011, decode.acc_seg: 91.7847, aux.loss_ce: 0.1528, aux.acc_seg: 84.8576, loss: 0.3538, grad_norm: 4.2978 2023-02-12 01:50:49,352 - mmseg - INFO - Iter [133600/160000] lr: 9.900e-06, eta: 1:30:30, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1907, decode.acc_seg: 92.1526, aux.loss_ce: 0.1534, aux.acc_seg: 84.8617, loss: 0.3442, grad_norm: 3.8503 2023-02-12 01:50:59,319 - mmseg - INFO - Iter [133650/160000] lr: 9.882e-06, eta: 1:30:20, time: 0.199, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1941, decode.acc_seg: 92.1113, aux.loss_ce: 0.1456, aux.acc_seg: 85.4235, loss: 0.3396, grad_norm: 3.6623 2023-02-12 01:51:09,123 - mmseg - INFO - Iter [133700/160000] lr: 9.863e-06, eta: 1:30:09, time: 0.197, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1892, decode.acc_seg: 92.1641, aux.loss_ce: 0.1580, aux.acc_seg: 84.4859, loss: 0.3472, grad_norm: 4.9976 2023-02-12 01:51:19,847 - mmseg - INFO - Iter [133750/160000] lr: 9.844e-06, eta: 1:29:59, time: 0.214, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1940, decode.acc_seg: 91.9440, aux.loss_ce: 0.1545, aux.acc_seg: 84.6127, loss: 0.3485, grad_norm: 3.8621 2023-02-12 01:51:29,602 - mmseg - INFO - Iter [133800/160000] lr: 9.825e-06, eta: 1:29:49, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2031, decode.acc_seg: 91.9142, aux.loss_ce: 0.1584, aux.acc_seg: 84.7616, loss: 0.3615, grad_norm: 4.2381 2023-02-12 01:51:39,482 - mmseg - INFO - Iter [133850/160000] lr: 9.807e-06, eta: 1:29:38, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2128, decode.acc_seg: 91.4823, aux.loss_ce: 0.1617, aux.acc_seg: 84.4054, loss: 0.3745, grad_norm: 4.4036 2023-02-12 01:51:49,445 - mmseg - INFO - Iter [133900/160000] lr: 9.788e-06, eta: 1:29:28, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2011, decode.acc_seg: 91.7851, aux.loss_ce: 0.1568, aux.acc_seg: 84.6000, loss: 0.3579, grad_norm: 4.4140 2023-02-12 01:51:59,166 - mmseg - INFO - Iter [133950/160000] lr: 9.769e-06, eta: 1:29:18, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2013, decode.acc_seg: 91.9590, aux.loss_ce: 0.1589, aux.acc_seg: 84.3828, loss: 0.3602, grad_norm: 4.7405 2023-02-12 01:52:08,955 - mmseg - INFO - Saving checkpoint at 134000 iterations 2023-02-12 01:52:09,649 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 01:52:09,649 - mmseg - INFO - Iter [134000/160000] lr: 9.750e-06, eta: 1:29:07, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1884, decode.acc_seg: 92.4497, aux.loss_ce: 0.1518, aux.acc_seg: 85.0244, loss: 0.3402, grad_norm: 3.9061 2023-02-12 01:52:19,648 - mmseg - INFO - Iter [134050/160000] lr: 9.732e-06, eta: 1:28:57, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2031, decode.acc_seg: 91.7110, aux.loss_ce: 0.1551, aux.acc_seg: 84.8574, loss: 0.3582, grad_norm: 3.8008 2023-02-12 01:52:29,555 - mmseg - INFO - Iter [134100/160000] lr: 9.713e-06, eta: 1:28:47, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1946, decode.acc_seg: 92.2908, aux.loss_ce: 0.1552, aux.acc_seg: 84.6676, loss: 0.3498, grad_norm: 3.8454 2023-02-12 01:52:39,950 - mmseg - INFO - Iter [134150/160000] lr: 9.694e-06, eta: 1:28:36, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1918, decode.acc_seg: 92.3236, aux.loss_ce: 0.1444, aux.acc_seg: 85.6166, loss: 0.3362, grad_norm: 5.5513 2023-02-12 01:52:49,823 - mmseg - INFO - Iter [134200/160000] lr: 9.675e-06, eta: 1:28:26, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1949, decode.acc_seg: 92.1411, aux.loss_ce: 0.1548, aux.acc_seg: 84.8845, loss: 0.3497, grad_norm: 3.6264 2023-02-12 01:52:59,989 - mmseg - INFO - Iter [134250/160000] lr: 9.657e-06, eta: 1:28:16, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2063, decode.acc_seg: 91.6608, aux.loss_ce: 0.1537, aux.acc_seg: 84.7845, loss: 0.3600, grad_norm: 4.3263 2023-02-12 01:53:10,242 - mmseg - INFO - Iter [134300/160000] lr: 9.638e-06, eta: 1:28:05, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2043, decode.acc_seg: 91.8744, aux.loss_ce: 0.1565, aux.acc_seg: 84.8200, loss: 0.3607, grad_norm: 3.6433 2023-02-12 01:53:20,933 - mmseg - INFO - Iter [134350/160000] lr: 9.619e-06, eta: 1:27:55, time: 0.214, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1999, decode.acc_seg: 91.9920, aux.loss_ce: 0.1569, aux.acc_seg: 84.6736, loss: 0.3568, grad_norm: 4.3204 2023-02-12 01:53:30,632 - mmseg - INFO - Iter [134400/160000] lr: 9.600e-06, eta: 1:27:45, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1990, decode.acc_seg: 92.1394, aux.loss_ce: 0.1535, aux.acc_seg: 84.9100, loss: 0.3525, grad_norm: 3.9732 2023-02-12 01:53:40,631 - mmseg - INFO - Iter [134450/160000] lr: 9.582e-06, eta: 1:27:35, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1953, decode.acc_seg: 92.1056, aux.loss_ce: 0.1574, aux.acc_seg: 84.4616, loss: 0.3527, grad_norm: 4.1049 2023-02-12 01:53:51,068 - mmseg - INFO - Iter [134500/160000] lr: 9.563e-06, eta: 1:27:24, time: 0.209, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2084, decode.acc_seg: 91.5772, aux.loss_ce: 0.1604, aux.acc_seg: 84.2256, loss: 0.3689, grad_norm: 4.0313 2023-02-12 01:54:00,927 - mmseg - INFO - Iter [134550/160000] lr: 9.544e-06, eta: 1:27:14, time: 0.198, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2083, decode.acc_seg: 91.7309, aux.loss_ce: 0.1611, aux.acc_seg: 84.3088, loss: 0.3694, grad_norm: 4.0557 2023-02-12 01:54:10,689 - mmseg - INFO - Iter [134600/160000] lr: 9.525e-06, eta: 1:27:03, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2022, decode.acc_seg: 92.1247, aux.loss_ce: 0.1505, aux.acc_seg: 85.5503, loss: 0.3527, grad_norm: 4.8081 2023-02-12 01:54:20,752 - mmseg - INFO - Iter [134650/160000] lr: 9.507e-06, eta: 1:26:53, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2059, decode.acc_seg: 92.0157, aux.loss_ce: 0.1659, aux.acc_seg: 84.0494, loss: 0.3718, grad_norm: 4.5607 2023-02-12 01:54:30,746 - mmseg - INFO - Iter [134700/160000] lr: 9.488e-06, eta: 1:26:43, time: 0.200, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2014, decode.acc_seg: 91.7030, aux.loss_ce: 0.1527, aux.acc_seg: 84.8456, loss: 0.3541, grad_norm: 4.8356 2023-02-12 01:54:42,865 - mmseg - INFO - Iter [134750/160000] lr: 9.469e-06, eta: 1:26:33, time: 0.242, data_time: 0.047, memory: 7748, decode.loss_ce: 0.2011, decode.acc_seg: 92.0095, aux.loss_ce: 0.1585, aux.acc_seg: 84.2156, loss: 0.3596, grad_norm: 4.0721 2023-02-12 01:54:52,772 - mmseg - INFO - Iter [134800/160000] lr: 9.450e-06, eta: 1:26:23, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2052, decode.acc_seg: 91.6523, aux.loss_ce: 0.1582, aux.acc_seg: 84.2296, loss: 0.3634, grad_norm: 3.6118 2023-02-12 01:55:02,551 - mmseg - INFO - Iter [134850/160000] lr: 9.432e-06, eta: 1:26:12, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1966, decode.acc_seg: 91.9309, aux.loss_ce: 0.1531, aux.acc_seg: 84.5776, loss: 0.3497, grad_norm: 3.5972 2023-02-12 01:55:12,527 - mmseg - INFO - Iter [134900/160000] lr: 9.413e-06, eta: 1:26:02, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1892, decode.acc_seg: 92.4394, aux.loss_ce: 0.1545, aux.acc_seg: 84.7725, loss: 0.3438, grad_norm: 3.7267 2023-02-12 01:55:22,773 - mmseg - INFO - Iter [134950/160000] lr: 9.394e-06, eta: 1:25:52, time: 0.205, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2000, decode.acc_seg: 92.0550, aux.loss_ce: 0.1584, aux.acc_seg: 84.6611, loss: 0.3584, grad_norm: 4.0237 2023-02-12 01:55:32,670 - mmseg - INFO - Saving checkpoint at 135000 iterations 2023-02-12 01:55:33,347 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 01:55:33,347 - mmseg - INFO - Iter [135000/160000] lr: 9.375e-06, eta: 1:25:41, time: 0.211, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2007, decode.acc_seg: 91.8463, aux.loss_ce: 0.1574, aux.acc_seg: 84.7563, loss: 0.3581, grad_norm: 3.5374 2023-02-12 01:55:43,316 - mmseg - INFO - Iter [135050/160000] lr: 9.357e-06, eta: 1:25:31, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1867, decode.acc_seg: 92.3710, aux.loss_ce: 0.1484, aux.acc_seg: 85.2040, loss: 0.3350, grad_norm: 3.5712 2023-02-12 01:55:53,330 - mmseg - INFO - Iter [135100/160000] lr: 9.338e-06, eta: 1:25:21, time: 0.201, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2057, decode.acc_seg: 91.9527, aux.loss_ce: 0.1584, aux.acc_seg: 84.7736, loss: 0.3641, grad_norm: 4.8252 2023-02-12 01:56:04,332 - mmseg - INFO - Iter [135150/160000] lr: 9.319e-06, eta: 1:25:10, time: 0.220, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1946, decode.acc_seg: 92.2334, aux.loss_ce: 0.1528, aux.acc_seg: 85.2020, loss: 0.3474, grad_norm: 4.7004 2023-02-12 01:56:14,270 - mmseg - INFO - Iter [135200/160000] lr: 9.300e-06, eta: 1:25:00, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2060, decode.acc_seg: 91.6061, aux.loss_ce: 0.1605, aux.acc_seg: 84.6277, loss: 0.3665, grad_norm: 5.0003 2023-02-12 01:56:24,844 - mmseg - INFO - Iter [135250/160000] lr: 9.282e-06, eta: 1:24:50, time: 0.211, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2074, decode.acc_seg: 91.7995, aux.loss_ce: 0.1602, aux.acc_seg: 84.4129, loss: 0.3675, grad_norm: 4.6583 2023-02-12 01:56:34,701 - mmseg - INFO - Iter [135300/160000] lr: 9.263e-06, eta: 1:24:40, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1890, decode.acc_seg: 92.4378, aux.loss_ce: 0.1565, aux.acc_seg: 84.8566, loss: 0.3455, grad_norm: 4.2490 2023-02-12 01:56:44,852 - mmseg - INFO - Iter [135350/160000] lr: 9.244e-06, eta: 1:24:29, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2022, decode.acc_seg: 91.8961, aux.loss_ce: 0.1599, aux.acc_seg: 84.4973, loss: 0.3621, grad_norm: 3.8767 2023-02-12 01:56:54,819 - mmseg - INFO - Iter [135400/160000] lr: 9.225e-06, eta: 1:24:19, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1977, decode.acc_seg: 92.1347, aux.loss_ce: 0.1492, aux.acc_seg: 85.3932, loss: 0.3470, grad_norm: 3.4852 2023-02-12 01:57:04,899 - mmseg - INFO - Iter [135450/160000] lr: 9.207e-06, eta: 1:24:09, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1909, decode.acc_seg: 92.4454, aux.loss_ce: 0.1535, aux.acc_seg: 85.1018, loss: 0.3444, grad_norm: 4.1450 2023-02-12 01:57:15,052 - mmseg - INFO - Iter [135500/160000] lr: 9.188e-06, eta: 1:23:58, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1891, decode.acc_seg: 92.3705, aux.loss_ce: 0.1520, aux.acc_seg: 85.0260, loss: 0.3411, grad_norm: 3.8447 2023-02-12 01:57:24,852 - mmseg - INFO - Iter [135550/160000] lr: 9.169e-06, eta: 1:23:48, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1910, decode.acc_seg: 91.9485, aux.loss_ce: 0.1520, aux.acc_seg: 84.7028, loss: 0.3430, grad_norm: 3.8735 2023-02-12 01:57:35,715 - mmseg - INFO - Iter [135600/160000] lr: 9.150e-06, eta: 1:23:38, time: 0.218, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2030, decode.acc_seg: 91.8443, aux.loss_ce: 0.1541, aux.acc_seg: 84.9633, loss: 0.3571, grad_norm: 4.0291 2023-02-12 01:57:45,573 - mmseg - INFO - Iter [135650/160000] lr: 9.132e-06, eta: 1:23:27, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2086, decode.acc_seg: 91.7057, aux.loss_ce: 0.1622, aux.acc_seg: 84.4502, loss: 0.3708, grad_norm: 4.1734 2023-02-12 01:57:55,385 - mmseg - INFO - Iter [135700/160000] lr: 9.113e-06, eta: 1:23:17, time: 0.197, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2055, decode.acc_seg: 91.8256, aux.loss_ce: 0.1556, aux.acc_seg: 84.9122, loss: 0.3611, grad_norm: 4.0686 2023-02-12 01:58:05,165 - mmseg - INFO - Iter [135750/160000] lr: 9.094e-06, eta: 1:23:07, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2070, decode.acc_seg: 91.5442, aux.loss_ce: 0.1544, aux.acc_seg: 84.8431, loss: 0.3614, grad_norm: 4.1794 2023-02-12 01:58:15,178 - mmseg - INFO - Iter [135800/160000] lr: 9.075e-06, eta: 1:22:56, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2089, decode.acc_seg: 91.6459, aux.loss_ce: 0.1587, aux.acc_seg: 84.4944, loss: 0.3676, grad_norm: 4.1383 2023-02-12 01:58:25,369 - mmseg - INFO - Iter [135850/160000] lr: 9.057e-06, eta: 1:22:46, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2013, decode.acc_seg: 91.7449, aux.loss_ce: 0.1558, aux.acc_seg: 84.6649, loss: 0.3571, grad_norm: 4.2805 2023-02-12 01:58:35,558 - mmseg - INFO - Iter [135900/160000] lr: 9.038e-06, eta: 1:22:36, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1913, decode.acc_seg: 92.2511, aux.loss_ce: 0.1497, aux.acc_seg: 85.2683, loss: 0.3411, grad_norm: 3.9482 2023-02-12 01:58:45,825 - mmseg - INFO - Iter [135950/160000] lr: 9.019e-06, eta: 1:22:25, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2063, decode.acc_seg: 91.4374, aux.loss_ce: 0.1626, aux.acc_seg: 84.0783, loss: 0.3689, grad_norm: 4.9506 2023-02-12 01:58:58,719 - mmseg - INFO - Saving checkpoint at 136000 iterations 2023-02-12 01:58:59,388 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 01:58:59,389 - mmseg - INFO - Iter [136000/160000] lr: 9.000e-06, eta: 1:22:16, time: 0.271, data_time: 0.047, memory: 7748, decode.loss_ce: 0.2083, decode.acc_seg: 91.5588, aux.loss_ce: 0.1624, aux.acc_seg: 84.1748, loss: 0.3708, grad_norm: 4.2986 2023-02-12 01:59:09,364 - mmseg - INFO - Iter [136050/160000] lr: 8.982e-06, eta: 1:22:05, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1909, decode.acc_seg: 92.2860, aux.loss_ce: 0.1453, aux.acc_seg: 85.5767, loss: 0.3363, grad_norm: 3.3390 2023-02-12 01:59:19,670 - mmseg - INFO - Iter [136100/160000] lr: 8.963e-06, eta: 1:21:55, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1889, decode.acc_seg: 92.4478, aux.loss_ce: 0.1565, aux.acc_seg: 84.9416, loss: 0.3453, grad_norm: 3.8817 2023-02-12 01:59:29,344 - mmseg - INFO - Iter [136150/160000] lr: 8.944e-06, eta: 1:21:45, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2006, decode.acc_seg: 92.1016, aux.loss_ce: 0.1524, aux.acc_seg: 85.3982, loss: 0.3530, grad_norm: 4.0391 2023-02-12 01:59:39,205 - mmseg - INFO - Iter [136200/160000] lr: 8.925e-06, eta: 1:21:34, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2073, decode.acc_seg: 91.5823, aux.loss_ce: 0.1634, aux.acc_seg: 84.2788, loss: 0.3707, grad_norm: 4.5535 2023-02-12 01:59:49,458 - mmseg - INFO - Iter [136250/160000] lr: 8.907e-06, eta: 1:21:24, time: 0.205, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1998, decode.acc_seg: 92.2577, aux.loss_ce: 0.1505, aux.acc_seg: 85.2702, loss: 0.3502, grad_norm: 4.0230 2023-02-12 01:59:59,405 - mmseg - INFO - Iter [136300/160000] lr: 8.888e-06, eta: 1:21:14, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1897, decode.acc_seg: 92.3931, aux.loss_ce: 0.1454, aux.acc_seg: 85.9085, loss: 0.3351, grad_norm: 3.7204 2023-02-12 02:00:09,286 - mmseg - INFO - Iter [136350/160000] lr: 8.869e-06, eta: 1:21:03, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2011, decode.acc_seg: 91.9102, aux.loss_ce: 0.1605, aux.acc_seg: 84.2643, loss: 0.3616, grad_norm: 3.9764 2023-02-12 02:00:19,227 - mmseg - INFO - Iter [136400/160000] lr: 8.850e-06, eta: 1:20:53, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1989, decode.acc_seg: 91.9834, aux.loss_ce: 0.1553, aux.acc_seg: 84.6611, loss: 0.3542, grad_norm: 4.1860 2023-02-12 02:00:29,224 - mmseg - INFO - Iter [136450/160000] lr: 8.832e-06, eta: 1:20:43, time: 0.200, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1979, decode.acc_seg: 92.0052, aux.loss_ce: 0.1559, aux.acc_seg: 84.3473, loss: 0.3538, grad_norm: 3.6613 2023-02-12 02:00:39,026 - mmseg - INFO - Iter [136500/160000] lr: 8.813e-06, eta: 1:20:32, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2023, decode.acc_seg: 91.7462, aux.loss_ce: 0.1564, aux.acc_seg: 84.6328, loss: 0.3587, grad_norm: 4.2622 2023-02-12 02:00:49,608 - mmseg - INFO - Iter [136550/160000] lr: 8.794e-06, eta: 1:20:22, time: 0.211, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2044, decode.acc_seg: 91.8978, aux.loss_ce: 0.1637, aux.acc_seg: 83.9392, loss: 0.3681, grad_norm: 4.4598 2023-02-12 02:00:59,703 - mmseg - INFO - Iter [136600/160000] lr: 8.775e-06, eta: 1:20:12, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1957, decode.acc_seg: 92.0885, aux.loss_ce: 0.1546, aux.acc_seg: 85.0883, loss: 0.3502, grad_norm: 4.2378 2023-02-12 02:01:10,362 - mmseg - INFO - Iter [136650/160000] lr: 8.757e-06, eta: 1:20:01, time: 0.214, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2001, decode.acc_seg: 91.9776, aux.loss_ce: 0.1560, aux.acc_seg: 84.5256, loss: 0.3560, grad_norm: 3.6048 2023-02-12 02:01:21,166 - mmseg - INFO - Iter [136700/160000] lr: 8.738e-06, eta: 1:19:51, time: 0.216, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1990, decode.acc_seg: 91.8751, aux.loss_ce: 0.1604, aux.acc_seg: 84.2748, loss: 0.3595, grad_norm: 4.1432 2023-02-12 02:01:31,104 - mmseg - INFO - Iter [136750/160000] lr: 8.719e-06, eta: 1:19:41, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1946, decode.acc_seg: 92.2776, aux.loss_ce: 0.1594, aux.acc_seg: 84.5190, loss: 0.3540, grad_norm: 3.8802 2023-02-12 02:01:42,161 - mmseg - INFO - Iter [136800/160000] lr: 8.700e-06, eta: 1:19:31, time: 0.221, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2060, decode.acc_seg: 91.8568, aux.loss_ce: 0.1626, aux.acc_seg: 84.2852, loss: 0.3687, grad_norm: 5.0013 2023-02-12 02:01:52,776 - mmseg - INFO - Iter [136850/160000] lr: 8.682e-06, eta: 1:19:21, time: 0.212, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2025, decode.acc_seg: 91.8657, aux.loss_ce: 0.1609, aux.acc_seg: 84.4812, loss: 0.3634, grad_norm: 4.1136 2023-02-12 02:02:02,781 - mmseg - INFO - Iter [136900/160000] lr: 8.663e-06, eta: 1:19:10, time: 0.201, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1925, decode.acc_seg: 92.2951, aux.loss_ce: 0.1513, aux.acc_seg: 85.3129, loss: 0.3439, grad_norm: 4.0058 2023-02-12 02:02:12,858 - mmseg - INFO - Iter [136950/160000] lr: 8.644e-06, eta: 1:19:00, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1803, decode.acc_seg: 92.7289, aux.loss_ce: 0.1449, aux.acc_seg: 85.5883, loss: 0.3252, grad_norm: 3.2620 2023-02-12 02:02:23,084 - mmseg - INFO - Saving checkpoint at 137000 iterations 2023-02-12 02:02:23,778 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 02:02:23,778 - mmseg - INFO - Iter [137000/160000] lr: 8.625e-06, eta: 1:18:50, time: 0.218, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1814, decode.acc_seg: 92.6779, aux.loss_ce: 0.1443, aux.acc_seg: 85.5571, loss: 0.3257, grad_norm: 3.7095 2023-02-12 02:02:33,662 - mmseg - INFO - Iter [137050/160000] lr: 8.607e-06, eta: 1:18:39, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1926, decode.acc_seg: 92.3380, aux.loss_ce: 0.1578, aux.acc_seg: 84.7538, loss: 0.3504, grad_norm: 3.7059 2023-02-12 02:02:44,039 - mmseg - INFO - Iter [137100/160000] lr: 8.588e-06, eta: 1:18:29, time: 0.208, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1950, decode.acc_seg: 92.1616, aux.loss_ce: 0.1537, aux.acc_seg: 84.7134, loss: 0.3487, grad_norm: 3.8747 2023-02-12 02:02:54,165 - mmseg - INFO - Iter [137150/160000] lr: 8.569e-06, eta: 1:18:19, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2012, decode.acc_seg: 91.6466, aux.loss_ce: 0.1501, aux.acc_seg: 84.9641, loss: 0.3513, grad_norm: 4.7870 2023-02-12 02:03:03,966 - mmseg - INFO - Iter [137200/160000] lr: 8.550e-06, eta: 1:18:08, time: 0.196, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1914, decode.acc_seg: 92.2903, aux.loss_ce: 0.1435, aux.acc_seg: 85.9614, loss: 0.3348, grad_norm: 3.5453 2023-02-12 02:03:16,326 - mmseg - INFO - Iter [137250/160000] lr: 8.532e-06, eta: 1:17:59, time: 0.247, data_time: 0.048, memory: 7748, decode.loss_ce: 0.1952, decode.acc_seg: 92.2219, aux.loss_ce: 0.1527, aux.acc_seg: 85.1588, loss: 0.3479, grad_norm: 3.9451 2023-02-12 02:03:26,926 - mmseg - INFO - Iter [137300/160000] lr: 8.513e-06, eta: 1:17:48, time: 0.212, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1888, decode.acc_seg: 92.1973, aux.loss_ce: 0.1453, aux.acc_seg: 85.3336, loss: 0.3340, grad_norm: 4.0057 2023-02-12 02:03:37,169 - mmseg - INFO - Iter [137350/160000] lr: 8.494e-06, eta: 1:17:38, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1963, decode.acc_seg: 92.2386, aux.loss_ce: 0.1618, aux.acc_seg: 84.3413, loss: 0.3580, grad_norm: 5.3308 2023-02-12 02:03:47,527 - mmseg - INFO - Iter [137400/160000] lr: 8.475e-06, eta: 1:17:28, time: 0.207, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1918, decode.acc_seg: 92.5262, aux.loss_ce: 0.1529, aux.acc_seg: 85.3275, loss: 0.3447, grad_norm: 3.7067 2023-02-12 02:03:57,479 - mmseg - INFO - Iter [137450/160000] lr: 8.457e-06, eta: 1:17:17, time: 0.200, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1929, decode.acc_seg: 92.2363, aux.loss_ce: 0.1568, aux.acc_seg: 84.9742, loss: 0.3497, grad_norm: 3.9704 2023-02-12 02:04:07,698 - mmseg - INFO - Iter [137500/160000] lr: 8.438e-06, eta: 1:17:07, time: 0.204, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2073, decode.acc_seg: 91.4869, aux.loss_ce: 0.1557, aux.acc_seg: 84.8520, loss: 0.3630, grad_norm: 4.5067 2023-02-12 02:04:18,285 - mmseg - INFO - Iter [137550/160000] lr: 8.419e-06, eta: 1:16:57, time: 0.212, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2010, decode.acc_seg: 92.0413, aux.loss_ce: 0.1541, aux.acc_seg: 85.2128, loss: 0.3552, grad_norm: 3.6394 2023-02-12 02:04:28,351 - mmseg - INFO - Iter [137600/160000] lr: 8.400e-06, eta: 1:16:47, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1942, decode.acc_seg: 91.9766, aux.loss_ce: 0.1549, aux.acc_seg: 84.4350, loss: 0.3490, grad_norm: 3.8015 2023-02-12 02:04:38,439 - mmseg - INFO - Iter [137650/160000] lr: 8.382e-06, eta: 1:16:36, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1924, decode.acc_seg: 92.2141, aux.loss_ce: 0.1487, aux.acc_seg: 85.3334, loss: 0.3411, grad_norm: 3.5049 2023-02-12 02:04:48,641 - mmseg - INFO - Iter [137700/160000] lr: 8.363e-06, eta: 1:16:26, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2019, decode.acc_seg: 91.9401, aux.loss_ce: 0.1625, aux.acc_seg: 84.1504, loss: 0.3644, grad_norm: 4.3006 2023-02-12 02:04:58,632 - mmseg - INFO - Iter [137750/160000] lr: 8.344e-06, eta: 1:16:16, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1901, decode.acc_seg: 92.3295, aux.loss_ce: 0.1546, aux.acc_seg: 84.9501, loss: 0.3447, grad_norm: 3.5515 2023-02-12 02:05:08,717 - mmseg - INFO - Iter [137800/160000] lr: 8.325e-06, eta: 1:16:05, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2069, decode.acc_seg: 91.6237, aux.loss_ce: 0.1543, aux.acc_seg: 84.7093, loss: 0.3612, grad_norm: 4.6453 2023-02-12 02:05:19,228 - mmseg - INFO - Iter [137850/160000] lr: 8.307e-06, eta: 1:15:55, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1851, decode.acc_seg: 92.5504, aux.loss_ce: 0.1469, aux.acc_seg: 85.3103, loss: 0.3320, grad_norm: 3.7808 2023-02-12 02:05:29,750 - mmseg - INFO - Iter [137900/160000] lr: 8.288e-06, eta: 1:15:45, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2036, decode.acc_seg: 91.8436, aux.loss_ce: 0.1531, aux.acc_seg: 85.3036, loss: 0.3567, grad_norm: 3.8492 2023-02-12 02:05:40,057 - mmseg - INFO - Iter [137950/160000] lr: 8.269e-06, eta: 1:15:35, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2042, decode.acc_seg: 91.8291, aux.loss_ce: 0.1549, aux.acc_seg: 84.7729, loss: 0.3591, grad_norm: 3.8275 2023-02-12 02:05:50,971 - mmseg - INFO - Saving checkpoint at 138000 iterations 2023-02-12 02:05:51,663 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 02:05:51,663 - mmseg - INFO - Iter [138000/160000] lr: 8.250e-06, eta: 1:15:25, time: 0.233, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1881, decode.acc_seg: 92.3704, aux.loss_ce: 0.1404, aux.acc_seg: 86.0182, loss: 0.3285, grad_norm: 4.2302 2023-02-12 02:06:01,846 - mmseg - INFO - Iter [138050/160000] lr: 8.232e-06, eta: 1:15:14, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1915, decode.acc_seg: 92.2520, aux.loss_ce: 0.1546, aux.acc_seg: 84.7087, loss: 0.3461, grad_norm: 4.3313 2023-02-12 02:06:11,662 - mmseg - INFO - Iter [138100/160000] lr: 8.213e-06, eta: 1:15:04, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1952, decode.acc_seg: 92.0865, aux.loss_ce: 0.1472, aux.acc_seg: 85.5736, loss: 0.3424, grad_norm: 3.6242 2023-02-12 02:06:21,314 - mmseg - INFO - Iter [138150/160000] lr: 8.194e-06, eta: 1:14:53, time: 0.193, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1935, decode.acc_seg: 92.2536, aux.loss_ce: 0.1637, aux.acc_seg: 84.2427, loss: 0.3572, grad_norm: 3.8003 2023-02-12 02:06:31,727 - mmseg - INFO - Iter [138200/160000] lr: 8.175e-06, eta: 1:14:43, time: 0.208, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1918, decode.acc_seg: 92.3816, aux.loss_ce: 0.1501, aux.acc_seg: 85.4920, loss: 0.3419, grad_norm: 4.0509 2023-02-12 02:06:41,541 - mmseg - INFO - Iter [138250/160000] lr: 8.157e-06, eta: 1:14:33, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1904, decode.acc_seg: 92.2694, aux.loss_ce: 0.1539, aux.acc_seg: 84.8239, loss: 0.3442, grad_norm: 4.7264 2023-02-12 02:06:51,487 - mmseg - INFO - Iter [138300/160000] lr: 8.138e-06, eta: 1:14:22, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1972, decode.acc_seg: 92.1156, aux.loss_ce: 0.1482, aux.acc_seg: 85.4591, loss: 0.3453, grad_norm: 4.5376 2023-02-12 02:07:01,509 - mmseg - INFO - Iter [138350/160000] lr: 8.119e-06, eta: 1:14:12, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2039, decode.acc_seg: 91.7948, aux.loss_ce: 0.1571, aux.acc_seg: 84.7107, loss: 0.3610, grad_norm: 3.8413 2023-02-12 02:07:11,637 - mmseg - INFO - Iter [138400/160000] lr: 8.100e-06, eta: 1:14:02, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1985, decode.acc_seg: 91.8446, aux.loss_ce: 0.1540, aux.acc_seg: 84.5827, loss: 0.3525, grad_norm: 3.8375 2023-02-12 02:07:22,283 - mmseg - INFO - Iter [138450/160000] lr: 8.082e-06, eta: 1:13:52, time: 0.213, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1831, decode.acc_seg: 92.5281, aux.loss_ce: 0.1472, aux.acc_seg: 85.4174, loss: 0.3302, grad_norm: 3.2196 2023-02-12 02:07:32,482 - mmseg - INFO - Iter [138500/160000] lr: 8.063e-06, eta: 1:13:41, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2022, decode.acc_seg: 91.8042, aux.loss_ce: 0.1550, aux.acc_seg: 85.1825, loss: 0.3572, grad_norm: 3.9590 2023-02-12 02:07:44,753 - mmseg - INFO - Iter [138550/160000] lr: 8.044e-06, eta: 1:13:31, time: 0.246, data_time: 0.046, memory: 7748, decode.loss_ce: 0.1946, decode.acc_seg: 91.8672, aux.loss_ce: 0.1518, aux.acc_seg: 84.7844, loss: 0.3464, grad_norm: 4.6782 2023-02-12 02:07:54,533 - mmseg - INFO - Iter [138600/160000] lr: 8.025e-06, eta: 1:13:21, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1975, decode.acc_seg: 92.1343, aux.loss_ce: 0.1575, aux.acc_seg: 84.3038, loss: 0.3550, grad_norm: 3.9856 2023-02-12 02:08:05,309 - mmseg - INFO - Iter [138650/160000] lr: 8.007e-06, eta: 1:13:11, time: 0.215, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2105, decode.acc_seg: 91.7022, aux.loss_ce: 0.1655, aux.acc_seg: 84.1195, loss: 0.3761, grad_norm: 4.7246 2023-02-12 02:08:16,150 - mmseg - INFO - Iter [138700/160000] lr: 7.988e-06, eta: 1:13:01, time: 0.217, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1925, decode.acc_seg: 92.1580, aux.loss_ce: 0.1533, aux.acc_seg: 84.8411, loss: 0.3458, grad_norm: 4.3123 2023-02-12 02:08:26,277 - mmseg - INFO - Iter [138750/160000] lr: 7.969e-06, eta: 1:12:50, time: 0.203, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1959, decode.acc_seg: 92.0994, aux.loss_ce: 0.1512, aux.acc_seg: 85.2129, loss: 0.3470, grad_norm: 4.3910 2023-02-12 02:08:36,347 - mmseg - INFO - Iter [138800/160000] lr: 7.950e-06, eta: 1:12:40, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1916, decode.acc_seg: 92.3490, aux.loss_ce: 0.1483, aux.acc_seg: 85.4566, loss: 0.3399, grad_norm: 4.1915 2023-02-12 02:08:46,235 - mmseg - INFO - Iter [138850/160000] lr: 7.932e-06, eta: 1:12:30, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1915, decode.acc_seg: 92.1934, aux.loss_ce: 0.1534, aux.acc_seg: 84.6677, loss: 0.3449, grad_norm: 3.8640 2023-02-12 02:08:56,803 - mmseg - INFO - Iter [138900/160000] lr: 7.913e-06, eta: 1:12:19, time: 0.211, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1962, decode.acc_seg: 91.8743, aux.loss_ce: 0.1517, aux.acc_seg: 84.6509, loss: 0.3479, grad_norm: 3.9129 2023-02-12 02:09:07,127 - mmseg - INFO - Iter [138950/160000] lr: 7.894e-06, eta: 1:12:09, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1912, decode.acc_seg: 92.5294, aux.loss_ce: 0.1516, aux.acc_seg: 85.5311, loss: 0.3428, grad_norm: 4.0535 2023-02-12 02:09:16,913 - mmseg - INFO - Saving checkpoint at 139000 iterations 2023-02-12 02:09:17,588 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 02:09:17,588 - mmseg - INFO - Iter [139000/160000] lr: 7.875e-06, eta: 1:11:59, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2024, decode.acc_seg: 91.4597, aux.loss_ce: 0.1557, aux.acc_seg: 84.4448, loss: 0.3581, grad_norm: 4.0984 2023-02-12 02:09:27,714 - mmseg - INFO - Iter [139050/160000] lr: 7.857e-06, eta: 1:11:49, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2049, decode.acc_seg: 91.9983, aux.loss_ce: 0.1562, aux.acc_seg: 84.7777, loss: 0.3610, grad_norm: 3.8905 2023-02-12 02:09:37,774 - mmseg - INFO - Iter [139100/160000] lr: 7.838e-06, eta: 1:11:38, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2084, decode.acc_seg: 91.6598, aux.loss_ce: 0.1597, aux.acc_seg: 84.6630, loss: 0.3681, grad_norm: 4.8339 2023-02-12 02:09:47,626 - mmseg - INFO - Iter [139150/160000] lr: 7.819e-06, eta: 1:11:28, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1880, decode.acc_seg: 92.3494, aux.loss_ce: 0.1463, aux.acc_seg: 85.7448, loss: 0.3343, grad_norm: 3.6503 2023-02-12 02:09:58,655 - mmseg - INFO - Iter [139200/160000] lr: 7.800e-06, eta: 1:11:18, time: 0.221, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1916, decode.acc_seg: 92.0692, aux.loss_ce: 0.1491, aux.acc_seg: 85.2986, loss: 0.3407, grad_norm: 3.8906 2023-02-12 02:10:09,169 - mmseg - INFO - Iter [139250/160000] lr: 7.782e-06, eta: 1:11:07, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2007, decode.acc_seg: 92.0527, aux.loss_ce: 0.1557, aux.acc_seg: 84.7134, loss: 0.3564, grad_norm: 3.9576 2023-02-12 02:10:19,544 - mmseg - INFO - Iter [139300/160000] lr: 7.763e-06, eta: 1:10:57, time: 0.207, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1951, decode.acc_seg: 92.3192, aux.loss_ce: 0.1490, aux.acc_seg: 85.7445, loss: 0.3440, grad_norm: 3.8220 2023-02-12 02:10:29,656 - mmseg - INFO - Iter [139350/160000] lr: 7.744e-06, eta: 1:10:47, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1933, decode.acc_seg: 92.0652, aux.loss_ce: 0.1500, aux.acc_seg: 85.0859, loss: 0.3434, grad_norm: 3.6103 2023-02-12 02:10:39,440 - mmseg - INFO - Iter [139400/160000] lr: 7.725e-06, eta: 1:10:37, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1934, decode.acc_seg: 92.1995, aux.loss_ce: 0.1531, aux.acc_seg: 84.9823, loss: 0.3465, grad_norm: 3.9983 2023-02-12 02:10:49,923 - mmseg - INFO - Iter [139450/160000] lr: 7.707e-06, eta: 1:10:26, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2009, decode.acc_seg: 91.9478, aux.loss_ce: 0.1537, aux.acc_seg: 84.8906, loss: 0.3546, grad_norm: 5.2250 2023-02-12 02:10:59,789 - mmseg - INFO - Iter [139500/160000] lr: 7.688e-06, eta: 1:10:16, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2004, decode.acc_seg: 92.2073, aux.loss_ce: 0.1585, aux.acc_seg: 84.7828, loss: 0.3589, grad_norm: 4.3323 2023-02-12 02:11:10,408 - mmseg - INFO - Iter [139550/160000] lr: 7.669e-06, eta: 1:10:06, time: 0.212, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1952, decode.acc_seg: 92.2112, aux.loss_ce: 0.1500, aux.acc_seg: 85.3446, loss: 0.3453, grad_norm: 3.7634 2023-02-12 02:11:20,221 - mmseg - INFO - Iter [139600/160000] lr: 7.650e-06, eta: 1:09:55, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2006, decode.acc_seg: 92.0800, aux.loss_ce: 0.1558, aux.acc_seg: 84.8249, loss: 0.3564, grad_norm: 4.0468 2023-02-12 02:11:30,481 - mmseg - INFO - Iter [139650/160000] lr: 7.632e-06, eta: 1:09:45, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1952, decode.acc_seg: 92.0681, aux.loss_ce: 0.1528, aux.acc_seg: 84.8096, loss: 0.3479, grad_norm: 4.1811 2023-02-12 02:11:41,207 - mmseg - INFO - Iter [139700/160000] lr: 7.613e-06, eta: 1:09:35, time: 0.215, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2045, decode.acc_seg: 91.9450, aux.loss_ce: 0.1536, aux.acc_seg: 85.0489, loss: 0.3581, grad_norm: 4.4116 2023-02-12 02:11:51,445 - mmseg - INFO - Iter [139750/160000] lr: 7.594e-06, eta: 1:09:25, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1971, decode.acc_seg: 92.0003, aux.loss_ce: 0.1559, aux.acc_seg: 84.8261, loss: 0.3529, grad_norm: 3.9226 2023-02-12 02:12:03,660 - mmseg - INFO - Iter [139800/160000] lr: 7.575e-06, eta: 1:09:15, time: 0.244, data_time: 0.047, memory: 7748, decode.loss_ce: 0.1977, decode.acc_seg: 92.0376, aux.loss_ce: 0.1550, aux.acc_seg: 84.5768, loss: 0.3527, grad_norm: 4.1011 2023-02-12 02:12:13,569 - mmseg - INFO - Iter [139850/160000] lr: 7.557e-06, eta: 1:09:04, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1867, decode.acc_seg: 92.4000, aux.loss_ce: 0.1488, aux.acc_seg: 85.4332, loss: 0.3355, grad_norm: 3.6329 2023-02-12 02:12:24,362 - mmseg - INFO - Iter [139900/160000] lr: 7.538e-06, eta: 1:08:54, time: 0.216, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2020, decode.acc_seg: 91.9314, aux.loss_ce: 0.1546, aux.acc_seg: 84.8965, loss: 0.3566, grad_norm: 4.0922 2023-02-12 02:12:34,072 - mmseg - INFO - Iter [139950/160000] lr: 7.519e-06, eta: 1:08:44, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1943, decode.acc_seg: 92.0907, aux.loss_ce: 0.1510, aux.acc_seg: 85.0389, loss: 0.3453, grad_norm: 3.7259 2023-02-12 02:12:43,839 - mmseg - INFO - Saving checkpoint at 140000 iterations 2023-02-12 02:12:44,521 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 02:12:44,521 - mmseg - INFO - Iter [140000/160000] lr: 7.500e-06, eta: 1:08:33, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2049, decode.acc_seg: 91.9155, aux.loss_ce: 0.1606, aux.acc_seg: 84.5879, loss: 0.3654, grad_norm: 4.5215 2023-02-12 02:12:54,442 - mmseg - INFO - Iter [140050/160000] lr: 7.482e-06, eta: 1:08:23, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1881, decode.acc_seg: 92.2697, aux.loss_ce: 0.1506, aux.acc_seg: 84.8594, loss: 0.3387, grad_norm: 3.9767 2023-02-12 02:13:04,869 - mmseg - INFO - Iter [140100/160000] lr: 7.463e-06, eta: 1:08:13, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1970, decode.acc_seg: 92.0708, aux.loss_ce: 0.1514, aux.acc_seg: 84.9407, loss: 0.3484, grad_norm: 4.3090 2023-02-12 02:13:14,919 - mmseg - INFO - Iter [140150/160000] lr: 7.444e-06, eta: 1:08:02, time: 0.201, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1858, decode.acc_seg: 92.7134, aux.loss_ce: 0.1478, aux.acc_seg: 85.4338, loss: 0.3336, grad_norm: 3.9095 2023-02-12 02:13:24,783 - mmseg - INFO - Iter [140200/160000] lr: 7.425e-06, eta: 1:07:52, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1809, decode.acc_seg: 92.7491, aux.loss_ce: 0.1476, aux.acc_seg: 85.5622, loss: 0.3284, grad_norm: 3.7584 2023-02-12 02:13:35,081 - mmseg - INFO - Iter [140250/160000] lr: 7.407e-06, eta: 1:07:42, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1807, decode.acc_seg: 92.4784, aux.loss_ce: 0.1516, aux.acc_seg: 84.9271, loss: 0.3322, grad_norm: 3.9947 2023-02-12 02:13:44,836 - mmseg - INFO - Iter [140300/160000] lr: 7.388e-06, eta: 1:07:31, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1935, decode.acc_seg: 92.1499, aux.loss_ce: 0.1523, aux.acc_seg: 84.8343, loss: 0.3459, grad_norm: 3.4778 2023-02-12 02:13:54,670 - mmseg - INFO - Iter [140350/160000] lr: 7.369e-06, eta: 1:07:21, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1977, decode.acc_seg: 91.8700, aux.loss_ce: 0.1547, aux.acc_seg: 84.4594, loss: 0.3524, grad_norm: 4.1882 2023-02-12 02:14:05,036 - mmseg - INFO - Iter [140400/160000] lr: 7.350e-06, eta: 1:07:11, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1993, decode.acc_seg: 91.9125, aux.loss_ce: 0.1568, aux.acc_seg: 84.5089, loss: 0.3561, grad_norm: 4.6747 2023-02-12 02:14:14,926 - mmseg - INFO - Iter [140450/160000] lr: 7.332e-06, eta: 1:07:00, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1913, decode.acc_seg: 92.5124, aux.loss_ce: 0.1553, aux.acc_seg: 85.1444, loss: 0.3466, grad_norm: 4.5398 2023-02-12 02:14:24,600 - mmseg - INFO - Iter [140500/160000] lr: 7.313e-06, eta: 1:06:50, time: 0.193, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1901, decode.acc_seg: 92.2628, aux.loss_ce: 0.1508, aux.acc_seg: 85.1226, loss: 0.3409, grad_norm: 3.4186 2023-02-12 02:14:34,598 - mmseg - INFO - Iter [140550/160000] lr: 7.294e-06, eta: 1:06:40, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1978, decode.acc_seg: 91.9843, aux.loss_ce: 0.1551, aux.acc_seg: 84.8149, loss: 0.3529, grad_norm: 5.0260 2023-02-12 02:14:45,007 - mmseg - INFO - Iter [140600/160000] lr: 7.275e-06, eta: 1:06:30, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2127, decode.acc_seg: 91.7743, aux.loss_ce: 0.1577, aux.acc_seg: 85.1386, loss: 0.3704, grad_norm: 4.5029 2023-02-12 02:14:54,986 - mmseg - INFO - Iter [140650/160000] lr: 7.257e-06, eta: 1:06:19, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2005, decode.acc_seg: 91.9332, aux.loss_ce: 0.1591, aux.acc_seg: 84.6884, loss: 0.3596, grad_norm: 4.4301 2023-02-12 02:15:04,992 - mmseg - INFO - Iter [140700/160000] lr: 7.238e-06, eta: 1:06:09, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1933, decode.acc_seg: 92.2061, aux.loss_ce: 0.1480, aux.acc_seg: 85.2648, loss: 0.3413, grad_norm: 3.7214 2023-02-12 02:15:15,064 - mmseg - INFO - Iter [140750/160000] lr: 7.219e-06, eta: 1:05:59, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1912, decode.acc_seg: 92.4603, aux.loss_ce: 0.1536, aux.acc_seg: 84.9704, loss: 0.3448, grad_norm: 4.1737 2023-02-12 02:15:24,926 - mmseg - INFO - Iter [140800/160000] lr: 7.200e-06, eta: 1:05:48, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1919, decode.acc_seg: 92.2925, aux.loss_ce: 0.1539, aux.acc_seg: 85.0124, loss: 0.3458, grad_norm: 4.1630 2023-02-12 02:15:34,904 - mmseg - INFO - Iter [140850/160000] lr: 7.182e-06, eta: 1:05:38, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1863, decode.acc_seg: 92.3585, aux.loss_ce: 0.1463, aux.acc_seg: 85.4737, loss: 0.3325, grad_norm: 3.8320 2023-02-12 02:15:45,064 - mmseg - INFO - Iter [140900/160000] lr: 7.163e-06, eta: 1:05:28, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1907, decode.acc_seg: 92.2064, aux.loss_ce: 0.1433, aux.acc_seg: 85.9454, loss: 0.3340, grad_norm: 3.8809 2023-02-12 02:15:54,966 - mmseg - INFO - Iter [140950/160000] lr: 7.144e-06, eta: 1:05:17, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1900, decode.acc_seg: 92.4392, aux.loss_ce: 0.1509, aux.acc_seg: 85.0663, loss: 0.3409, grad_norm: 3.8236 2023-02-12 02:16:04,823 - mmseg - INFO - Saving checkpoint at 141000 iterations 2023-02-12 02:16:05,498 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 02:16:05,499 - mmseg - INFO - Iter [141000/160000] lr: 7.125e-06, eta: 1:05:07, time: 0.211, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1905, decode.acc_seg: 92.2260, aux.loss_ce: 0.1482, aux.acc_seg: 85.3062, loss: 0.3387, grad_norm: 3.7221 2023-02-12 02:16:17,645 - mmseg - INFO - Iter [141050/160000] lr: 7.107e-06, eta: 1:04:57, time: 0.243, data_time: 0.047, memory: 7748, decode.loss_ce: 0.1936, decode.acc_seg: 92.2590, aux.loss_ce: 0.1567, aux.acc_seg: 84.5941, loss: 0.3503, grad_norm: 3.6009 2023-02-12 02:16:27,542 - mmseg - INFO - Iter [141100/160000] lr: 7.088e-06, eta: 1:04:47, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2021, decode.acc_seg: 91.9482, aux.loss_ce: 0.1557, aux.acc_seg: 84.5843, loss: 0.3579, grad_norm: 4.1287 2023-02-12 02:16:37,884 - mmseg - INFO - Iter [141150/160000] lr: 7.069e-06, eta: 1:04:36, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1999, decode.acc_seg: 91.9329, aux.loss_ce: 0.1526, aux.acc_seg: 84.9386, loss: 0.3525, grad_norm: 4.5281 2023-02-12 02:16:47,963 - mmseg - INFO - Iter [141200/160000] lr: 7.050e-06, eta: 1:04:26, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1862, decode.acc_seg: 92.4098, aux.loss_ce: 0.1522, aux.acc_seg: 84.9221, loss: 0.3384, grad_norm: 3.5049 2023-02-12 02:16:58,068 - mmseg - INFO - Iter [141250/160000] lr: 7.032e-06, eta: 1:04:16, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1974, decode.acc_seg: 92.1656, aux.loss_ce: 0.1568, aux.acc_seg: 84.7455, loss: 0.3541, grad_norm: 4.2605 2023-02-12 02:17:08,066 - mmseg - INFO - Iter [141300/160000] lr: 7.013e-06, eta: 1:04:05, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1789, decode.acc_seg: 92.8203, aux.loss_ce: 0.1479, aux.acc_seg: 85.4392, loss: 0.3267, grad_norm: 3.8721 2023-02-12 02:17:18,716 - mmseg - INFO - Iter [141350/160000] lr: 6.994e-06, eta: 1:03:55, time: 0.213, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1879, decode.acc_seg: 92.3211, aux.loss_ce: 0.1514, aux.acc_seg: 85.2518, loss: 0.3393, grad_norm: 4.1140 2023-02-12 02:17:28,784 - mmseg - INFO - Iter [141400/160000] lr: 6.975e-06, eta: 1:03:45, time: 0.202, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1933, decode.acc_seg: 92.2564, aux.loss_ce: 0.1495, aux.acc_seg: 85.4114, loss: 0.3428, grad_norm: 4.0876 2023-02-12 02:17:38,845 - mmseg - INFO - Iter [141450/160000] lr: 6.957e-06, eta: 1:03:35, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1960, decode.acc_seg: 92.0384, aux.loss_ce: 0.1485, aux.acc_seg: 85.2434, loss: 0.3445, grad_norm: 4.6373 2023-02-12 02:17:48,564 - mmseg - INFO - Iter [141500/160000] lr: 6.938e-06, eta: 1:03:24, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2107, decode.acc_seg: 91.8834, aux.loss_ce: 0.1602, aux.acc_seg: 84.6297, loss: 0.3709, grad_norm: 4.3340 2023-02-12 02:17:58,885 - mmseg - INFO - Iter [141550/160000] lr: 6.919e-06, eta: 1:03:14, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1854, decode.acc_seg: 92.4079, aux.loss_ce: 0.1409, aux.acc_seg: 85.8483, loss: 0.3263, grad_norm: 4.2077 2023-02-12 02:18:09,093 - mmseg - INFO - Iter [141600/160000] lr: 6.900e-06, eta: 1:03:04, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2068, decode.acc_seg: 91.4273, aux.loss_ce: 0.1660, aux.acc_seg: 83.9198, loss: 0.3729, grad_norm: 4.6356 2023-02-12 02:18:19,542 - mmseg - INFO - Iter [141650/160000] lr: 6.882e-06, eta: 1:02:53, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1902, decode.acc_seg: 92.1135, aux.loss_ce: 0.1496, aux.acc_seg: 85.2354, loss: 0.3399, grad_norm: 3.9873 2023-02-12 02:18:29,966 - mmseg - INFO - Iter [141700/160000] lr: 6.863e-06, eta: 1:02:43, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1868, decode.acc_seg: 92.4802, aux.loss_ce: 0.1469, aux.acc_seg: 85.3319, loss: 0.3337, grad_norm: 3.9795 2023-02-12 02:18:39,741 - mmseg - INFO - Iter [141750/160000] lr: 6.844e-06, eta: 1:02:33, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1913, decode.acc_seg: 92.0608, aux.loss_ce: 0.1541, aux.acc_seg: 84.7286, loss: 0.3454, grad_norm: 3.4882 2023-02-12 02:18:49,499 - mmseg - INFO - Iter [141800/160000] lr: 6.825e-06, eta: 1:02:22, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1996, decode.acc_seg: 91.8552, aux.loss_ce: 0.1536, aux.acc_seg: 84.7260, loss: 0.3532, grad_norm: 4.1112 2023-02-12 02:19:00,298 - mmseg - INFO - Iter [141850/160000] lr: 6.807e-06, eta: 1:02:12, time: 0.216, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1949, decode.acc_seg: 92.0856, aux.loss_ce: 0.1513, aux.acc_seg: 84.9695, loss: 0.3462, grad_norm: 4.0397 2023-02-12 02:19:10,496 - mmseg - INFO - Iter [141900/160000] lr: 6.788e-06, eta: 1:02:02, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1915, decode.acc_seg: 92.3491, aux.loss_ce: 0.1507, aux.acc_seg: 85.2940, loss: 0.3423, grad_norm: 3.4830 2023-02-12 02:19:20,942 - mmseg - INFO - Iter [141950/160000] lr: 6.769e-06, eta: 1:01:52, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1961, decode.acc_seg: 92.2463, aux.loss_ce: 0.1481, aux.acc_seg: 85.9261, loss: 0.3442, grad_norm: 3.5338 2023-02-12 02:19:30,947 - mmseg - INFO - Saving checkpoint at 142000 iterations 2023-02-12 02:19:31,632 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 02:19:31,632 - mmseg - INFO - Iter [142000/160000] lr: 6.750e-06, eta: 1:01:41, time: 0.214, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1904, decode.acc_seg: 92.4861, aux.loss_ce: 0.1503, aux.acc_seg: 85.6454, loss: 0.3407, grad_norm: 3.7393 2023-02-12 02:19:41,911 - mmseg - INFO - Iter [142050/160000] lr: 6.732e-06, eta: 1:01:31, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1947, decode.acc_seg: 92.1126, aux.loss_ce: 0.1589, aux.acc_seg: 84.2116, loss: 0.3536, grad_norm: 4.6529 2023-02-12 02:19:52,277 - mmseg - INFO - Iter [142100/160000] lr: 6.713e-06, eta: 1:01:21, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1857, decode.acc_seg: 92.3835, aux.loss_ce: 0.1519, aux.acc_seg: 85.2438, loss: 0.3376, grad_norm: 3.8741 2023-02-12 02:20:02,161 - mmseg - INFO - Iter [142150/160000] lr: 6.694e-06, eta: 1:01:11, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1871, decode.acc_seg: 92.2905, aux.loss_ce: 0.1458, aux.acc_seg: 85.2919, loss: 0.3330, grad_norm: 4.2518 2023-02-12 02:20:12,696 - mmseg - INFO - Iter [142200/160000] lr: 6.675e-06, eta: 1:01:00, time: 0.211, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1836, decode.acc_seg: 92.5373, aux.loss_ce: 0.1478, aux.acc_seg: 85.7138, loss: 0.3314, grad_norm: 3.7688 2023-02-12 02:20:23,032 - mmseg - INFO - Iter [142250/160000] lr: 6.657e-06, eta: 1:00:50, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1888, decode.acc_seg: 92.3616, aux.loss_ce: 0.1496, aux.acc_seg: 85.2410, loss: 0.3383, grad_norm: 4.0256 2023-02-12 02:20:35,380 - mmseg - INFO - Iter [142300/160000] lr: 6.638e-06, eta: 1:00:40, time: 0.247, data_time: 0.046, memory: 7748, decode.loss_ce: 0.1925, decode.acc_seg: 92.2265, aux.loss_ce: 0.1548, aux.acc_seg: 84.9001, loss: 0.3473, grad_norm: 4.5387 2023-02-12 02:20:45,462 - mmseg - INFO - Iter [142350/160000] lr: 6.619e-06, eta: 1:00:30, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1905, decode.acc_seg: 92.2018, aux.loss_ce: 0.1516, aux.acc_seg: 84.9081, loss: 0.3421, grad_norm: 3.8216 2023-02-12 02:20:55,392 - mmseg - INFO - Iter [142400/160000] lr: 6.600e-06, eta: 1:00:19, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1868, decode.acc_seg: 92.4918, aux.loss_ce: 0.1415, aux.acc_seg: 86.1668, loss: 0.3283, grad_norm: 3.5765 2023-02-12 02:21:05,226 - mmseg - INFO - Iter [142450/160000] lr: 6.582e-06, eta: 1:00:09, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1968, decode.acc_seg: 92.2546, aux.loss_ce: 0.1575, aux.acc_seg: 84.6119, loss: 0.3543, grad_norm: 4.9136 2023-02-12 02:21:15,136 - mmseg - INFO - Iter [142500/160000] lr: 6.563e-06, eta: 0:59:59, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1986, decode.acc_seg: 92.1805, aux.loss_ce: 0.1527, aux.acc_seg: 85.4404, loss: 0.3513, grad_norm: 3.4764 2023-02-12 02:21:24,896 - mmseg - INFO - Iter [142550/160000] lr: 6.544e-06, eta: 0:59:48, time: 0.196, data_time: 0.005, memory: 7748, decode.loss_ce: 0.2023, decode.acc_seg: 91.8420, aux.loss_ce: 0.1569, aux.acc_seg: 84.3801, loss: 0.3592, grad_norm: 4.2628 2023-02-12 02:21:34,929 - mmseg - INFO - Iter [142600/160000] lr: 6.525e-06, eta: 0:59:38, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1855, decode.acc_seg: 92.4512, aux.loss_ce: 0.1452, aux.acc_seg: 85.5650, loss: 0.3307, grad_norm: 4.0566 2023-02-12 02:21:44,692 - mmseg - INFO - Iter [142650/160000] lr: 6.507e-06, eta: 0:59:28, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2201, decode.acc_seg: 91.5078, aux.loss_ce: 0.1605, aux.acc_seg: 84.4681, loss: 0.3806, grad_norm: 4.8991 2023-02-12 02:21:54,939 - mmseg - INFO - Iter [142700/160000] lr: 6.488e-06, eta: 0:59:17, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1953, decode.acc_seg: 92.0279, aux.loss_ce: 0.1507, aux.acc_seg: 85.0546, loss: 0.3461, grad_norm: 4.3251 2023-02-12 02:22:04,820 - mmseg - INFO - Iter [142750/160000] lr: 6.469e-06, eta: 0:59:07, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1776, decode.acc_seg: 92.6986, aux.loss_ce: 0.1412, aux.acc_seg: 85.7085, loss: 0.3189, grad_norm: 3.7995 2023-02-12 02:22:15,131 - mmseg - INFO - Iter [142800/160000] lr: 6.450e-06, eta: 0:58:57, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1912, decode.acc_seg: 92.3697, aux.loss_ce: 0.1455, aux.acc_seg: 85.7090, loss: 0.3368, grad_norm: 4.3556 2023-02-12 02:22:24,915 - mmseg - INFO - Iter [142850/160000] lr: 6.432e-06, eta: 0:58:46, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1959, decode.acc_seg: 91.9648, aux.loss_ce: 0.1516, aux.acc_seg: 84.6553, loss: 0.3475, grad_norm: 3.4985 2023-02-12 02:22:34,882 - mmseg - INFO - Iter [142900/160000] lr: 6.413e-06, eta: 0:58:36, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1944, decode.acc_seg: 92.1437, aux.loss_ce: 0.1537, aux.acc_seg: 84.9254, loss: 0.3482, grad_norm: 4.1225 2023-02-12 02:22:45,173 - mmseg - INFO - Iter [142950/160000] lr: 6.394e-06, eta: 0:58:26, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1971, decode.acc_seg: 91.9157, aux.loss_ce: 0.1571, aux.acc_seg: 84.9356, loss: 0.3542, grad_norm: 5.0890 2023-02-12 02:22:55,209 - mmseg - INFO - Saving checkpoint at 143000 iterations 2023-02-12 02:22:55,892 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 02:22:55,893 - mmseg - INFO - Iter [143000/160000] lr: 6.375e-06, eta: 0:58:16, time: 0.214, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1986, decode.acc_seg: 92.0819, aux.loss_ce: 0.1552, aux.acc_seg: 84.7350, loss: 0.3537, grad_norm: 4.1252 2023-02-12 02:23:05,984 - mmseg - INFO - Iter [143050/160000] lr: 6.357e-06, eta: 0:58:05, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2005, decode.acc_seg: 91.9520, aux.loss_ce: 0.1586, aux.acc_seg: 84.0451, loss: 0.3590, grad_norm: 5.2690 2023-02-12 02:23:15,682 - mmseg - INFO - Iter [143100/160000] lr: 6.338e-06, eta: 0:57:55, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1976, decode.acc_seg: 91.9542, aux.loss_ce: 0.1560, aux.acc_seg: 84.8287, loss: 0.3536, grad_norm: 4.7607 2023-02-12 02:23:25,347 - mmseg - INFO - Iter [143150/160000] lr: 6.319e-06, eta: 0:57:45, time: 0.193, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1946, decode.acc_seg: 92.0689, aux.loss_ce: 0.1526, aux.acc_seg: 84.9077, loss: 0.3473, grad_norm: 4.0535 2023-02-12 02:23:35,148 - mmseg - INFO - Iter [143200/160000] lr: 6.300e-06, eta: 0:57:34, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1954, decode.acc_seg: 91.9525, aux.loss_ce: 0.1483, aux.acc_seg: 85.3144, loss: 0.3436, grad_norm: 3.3315 2023-02-12 02:23:45,154 - mmseg - INFO - Iter [143250/160000] lr: 6.282e-06, eta: 0:57:24, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1971, decode.acc_seg: 92.0705, aux.loss_ce: 0.1551, aux.acc_seg: 84.6568, loss: 0.3522, grad_norm: 4.4996 2023-02-12 02:23:54,838 - mmseg - INFO - Iter [143300/160000] lr: 6.263e-06, eta: 0:57:13, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1887, decode.acc_seg: 92.2847, aux.loss_ce: 0.1519, aux.acc_seg: 84.7161, loss: 0.3406, grad_norm: 3.8496 2023-02-12 02:24:04,915 - mmseg - INFO - Iter [143350/160000] lr: 6.244e-06, eta: 0:57:03, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2035, decode.acc_seg: 91.9973, aux.loss_ce: 0.1535, aux.acc_seg: 85.0438, loss: 0.3570, grad_norm: 4.3472 2023-02-12 02:24:15,414 - mmseg - INFO - Iter [143400/160000] lr: 6.225e-06, eta: 0:56:53, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1860, decode.acc_seg: 92.3769, aux.loss_ce: 0.1561, aux.acc_seg: 84.5881, loss: 0.3420, grad_norm: 4.2371 2023-02-12 02:24:25,267 - mmseg - INFO - Iter [143450/160000] lr: 6.207e-06, eta: 0:56:43, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1908, decode.acc_seg: 92.4726, aux.loss_ce: 0.1508, aux.acc_seg: 85.3851, loss: 0.3416, grad_norm: 4.1347 2023-02-12 02:24:35,570 - mmseg - INFO - Iter [143500/160000] lr: 6.188e-06, eta: 0:56:32, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1974, decode.acc_seg: 92.2203, aux.loss_ce: 0.1543, aux.acc_seg: 85.1153, loss: 0.3517, grad_norm: 4.3366 2023-02-12 02:24:45,700 - mmseg - INFO - Iter [143550/160000] lr: 6.169e-06, eta: 0:56:22, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1908, decode.acc_seg: 92.2935, aux.loss_ce: 0.1502, aux.acc_seg: 85.1165, loss: 0.3411, grad_norm: 4.0908 2023-02-12 02:24:58,016 - mmseg - INFO - Iter [143600/160000] lr: 6.150e-06, eta: 0:56:12, time: 0.246, data_time: 0.047, memory: 7748, decode.loss_ce: 0.1831, decode.acc_seg: 92.6925, aux.loss_ce: 0.1479, aux.acc_seg: 85.3193, loss: 0.3310, grad_norm: 4.0085 2023-02-12 02:25:08,727 - mmseg - INFO - Iter [143650/160000] lr: 6.132e-06, eta: 0:56:02, time: 0.215, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1932, decode.acc_seg: 92.1446, aux.loss_ce: 0.1540, aux.acc_seg: 85.0714, loss: 0.3471, grad_norm: 4.5699 2023-02-12 02:25:18,708 - mmseg - INFO - Iter [143700/160000] lr: 6.113e-06, eta: 0:55:51, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1923, decode.acc_seg: 92.1272, aux.loss_ce: 0.1529, aux.acc_seg: 84.8619, loss: 0.3453, grad_norm: 4.2316 2023-02-12 02:25:28,820 - mmseg - INFO - Iter [143750/160000] lr: 6.094e-06, eta: 0:55:41, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1879, decode.acc_seg: 92.4276, aux.loss_ce: 0.1475, aux.acc_seg: 85.6114, loss: 0.3354, grad_norm: 3.6845 2023-02-12 02:25:38,863 - mmseg - INFO - Iter [143800/160000] lr: 6.075e-06, eta: 0:55:31, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1927, decode.acc_seg: 92.1599, aux.loss_ce: 0.1461, aux.acc_seg: 85.5554, loss: 0.3388, grad_norm: 4.0239 2023-02-12 02:25:48,707 - mmseg - INFO - Iter [143850/160000] lr: 6.057e-06, eta: 0:55:20, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1939, decode.acc_seg: 92.0503, aux.loss_ce: 0.1493, aux.acc_seg: 85.0079, loss: 0.3432, grad_norm: 5.0904 2023-02-12 02:25:58,793 - mmseg - INFO - Iter [143900/160000] lr: 6.038e-06, eta: 0:55:10, time: 0.202, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1980, decode.acc_seg: 91.8266, aux.loss_ce: 0.1551, aux.acc_seg: 84.4201, loss: 0.3532, grad_norm: 4.8184 2023-02-12 02:26:09,419 - mmseg - INFO - Iter [143950/160000] lr: 6.019e-06, eta: 0:55:00, time: 0.212, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1899, decode.acc_seg: 92.2160, aux.loss_ce: 0.1501, aux.acc_seg: 84.9374, loss: 0.3399, grad_norm: 4.7388 2023-02-12 02:26:19,733 - mmseg - INFO - Saving checkpoint at 144000 iterations 2023-02-12 02:26:20,428 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 02:26:20,429 - mmseg - INFO - Iter [144000/160000] lr: 6.000e-06, eta: 0:54:50, time: 0.221, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1956, decode.acc_seg: 92.1183, aux.loss_ce: 0.1554, aux.acc_seg: 84.7630, loss: 0.3510, grad_norm: 4.2726 2023-02-12 02:26:31,898 - mmseg - INFO - per class results: 2023-02-12 02:26:31,904 - mmseg - INFO - +---------------------+-------+-------+ | Class | IoU | Acc | +---------------------+-------+-------+ | wall | 75.07 | 84.84 | | building | 81.32 | 90.62 | | sky | 93.9 | 97.45 | | floor | 79.24 | 89.72 | | tree | 73.07 | 86.65 | | ceiling | 82.59 | 92.07 | | road | 81.1 | 91.62 | | bed | 88.02 | 94.21 | | windowpane | 59.17 | 72.45 | | grass | 66.78 | 85.37 | | cabinet | 56.92 | 71.0 | | sidewalk | 60.97 | 72.5 | | person | 77.47 | 91.81 | | earth | 34.48 | 43.07 | | door | 43.24 | 64.52 | | table | 54.22 | 65.61 | | mountain | 55.44 | 75.54 | | plant | 51.24 | 64.04 | | curtain | 70.8 | 85.75 | | chair | 53.46 | 70.41 | | car | 82.52 | 90.38 | | water | 51.15 | 68.24 | | painting | 67.17 | 85.29 | | sofa | 63.65 | 82.39 | | shelf | 40.69 | 58.92 | | house | 42.75 | 53.76 | | sea | 61.11 | 90.51 | | mirror | 60.92 | 72.57 | | rug | 60.62 | 77.79 | | field | 29.59 | 40.57 | | armchair | 37.74 | 50.76 | | seat | 53.9 | 74.77 | | fence | 31.63 | 44.43 | | desk | 42.98 | 64.57 | | rock | 27.84 | 45.21 | | wardrobe | 49.44 | 75.02 | | lamp | 60.47 | 71.13 | | bathtub | 75.9 | 79.61 | | railing | 30.56 | 43.66 | | cushion | 54.33 | 67.5 | | base | 31.11 | 42.75 | | box | 18.91 | 22.09 | | column | 39.3 | 56.25 | | signboard | 33.96 | 53.76 | | chest of drawers | 38.98 | 68.79 | | counter | 22.97 | 38.19 | | sand | 39.35 | 59.52 | | sink | 69.1 | 81.18 | | skyscraper | 59.98 | 82.14 | | fireplace | 67.8 | 84.83 | | refrigerator | 66.62 | 86.85 | | grandstand | 34.3 | 64.83 | | path | 20.71 | 32.57 | | stairs | 32.22 | 38.78 | | runway | 65.33 | 86.29 | | case | 42.43 | 58.32 | | pool table | 90.91 | 94.49 | | pillow | 53.34 | 65.17 | | screen door | 63.82 | 71.23 | | stairway | 30.25 | 38.94 | | river | 10.73 | 18.87 | | bridge | 58.44 | 74.2 | | bookcase | 33.94 | 58.18 | | blind | 45.62 | 58.84 | | coffee table | 44.9 | 85.78 | | toilet | 85.63 | 90.77 | | flower | 40.75 | 58.39 | | book | 46.04 | 67.89 | | hill | 5.12 | 7.5 | | bench | 46.41 | 55.14 | | countertop | 56.31 | 76.29 | | stove | 73.09 | 81.75 | | palm | 46.91 | 75.35 | | kitchen island | 36.68 | 73.83 | | computer | 70.42 | 86.46 | | swivel chair | 43.08 | 53.93 | | boat | 38.57 | 45.95 | | bar | 24.08 | 32.51 | | arcade machine | 55.6 | 58.66 | | hovel | 30.74 | 43.71 | | bus | 80.34 | 96.01 | | towel | 62.25 | 71.39 | | light | 51.95 | 58.48 | | truck | 36.42 | 49.97 | | tower | 27.53 | 42.97 | | chandelier | 65.41 | 85.95 | | awning | 24.38 | 33.56 | | streetlight | 24.62 | 32.79 | | booth | 40.62 | 43.92 | | television receiver | 71.28 | 81.73 | | airplane | 59.32 | 71.28 | | dirt track | 21.05 | 52.05 | | apparel | 28.43 | 54.17 | | pole | 13.42 | 16.53 | | land | 2.61 | 3.96 | | bannister | 13.27 | 18.55 | | escalator | 25.2 | 33.0 | | ottoman | 39.09 | 52.37 | | bottle | 32.7 | 49.03 | | buffet | 41.5 | 50.61 | | poster | 29.47 | 43.22 | | stage | 19.24 | 41.3 | | van | 48.73 | 67.84 | | ship | 48.21 | 71.03 | | fountain | 23.57 | 27.89 | | conveyer belt | 64.17 | 86.6 | | canopy | 14.21 | 20.12 | | washer | 65.1 | 73.49 | | plaything | 26.52 | 44.71 | | swimming pool | 58.64 | 64.08 | | stool | 32.61 | 43.15 | | barrel | 13.3 | 65.04 | | basket | 27.36 | 39.38 | | waterfall | 44.49 | 56.42 | | tent | 80.68 | 98.5 | | bag | 15.11 | 23.15 | | minibike | 63.14 | 74.34 | | cradle | 81.76 | 92.08 | | oven | 26.87 | 63.86 | | ball | 44.13 | 61.43 | | food | 40.3 | 48.11 | | step | 9.82 | 14.09 | | tank | 34.74 | 35.54 | | trade name | 26.72 | 33.57 | | microwave | 46.61 | 50.95 | | pot | 38.28 | 44.12 | | animal | 56.57 | 62.4 | | bicycle | 52.98 | 75.92 | | lake | 56.64 | 63.07 | | dishwasher | 58.31 | 70.78 | | screen | 41.78 | 70.9 | | blanket | 14.82 | 19.49 | | sculpture | 50.48 | 69.09 | | hood | 64.11 | 70.58 | | sconce | 41.85 | 52.65 | | vase | 32.53 | 56.03 | | traffic light | 31.22 | 53.45 | | tray | 4.87 | 10.05 | | ashcan | 36.06 | 55.12 | | fan | 54.88 | 71.12 | | pier | 53.52 | 79.29 | | crt screen | 2.93 | 9.87 | | plate | 50.44 | 72.05 | | monitor | 6.56 | 7.72 | | bulletin board | 51.55 | 65.06 | | shower | 0.0 | 0.0 | | radiator | 52.01 | 55.72 | | glass | 11.0 | 14.61 | | clock | 24.34 | 33.05 | | flag | 53.03 | 61.77 | +---------------------+-------+-------+ 2023-02-12 02:26:31,905 - mmseg - INFO - Summary: 2023-02-12 02:26:31,905 - mmseg - INFO - +------+-------+------+ | aAcc | mIoU | mAcc | +------+-------+------+ | 81.3 | 45.77 | 59.6 | +------+-------+------+ 2023-02-12 02:26:32,554 - mmseg - INFO - Now best checkpoint is saved as best_mIoU_iter_144000.pth. 2023-02-12 02:26:32,555 - mmseg - INFO - Best mIoU is 0.4577 at 144000 iter. 2023-02-12 02:26:32,555 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 02:26:32,555 - mmseg - INFO - Iter(val) [250] aAcc: 0.8130, mIoU: 0.4577, mAcc: 0.5960, IoU.wall: 0.7507, IoU.building: 0.8132, IoU.sky: 0.9390, IoU.floor: 0.7924, IoU.tree: 0.7307, IoU.ceiling: 0.8259, IoU.road: 0.8110, IoU.bed : 0.8802, IoU.windowpane: 0.5917, IoU.grass: 0.6678, IoU.cabinet: 0.5692, IoU.sidewalk: 0.6097, IoU.person: 0.7747, IoU.earth: 0.3448, IoU.door: 0.4324, IoU.table: 0.5422, IoU.mountain: 0.5544, IoU.plant: 0.5124, IoU.curtain: 0.7080, IoU.chair: 0.5346, IoU.car: 0.8252, IoU.water: 0.5115, IoU.painting: 0.6717, IoU.sofa: 0.6365, IoU.shelf: 0.4069, IoU.house: 0.4275, IoU.sea: 0.6111, IoU.mirror: 0.6092, IoU.rug: 0.6062, IoU.field: 0.2959, IoU.armchair: 0.3774, IoU.seat: 0.5390, IoU.fence: 0.3163, IoU.desk: 0.4298, IoU.rock: 0.2784, IoU.wardrobe: 0.4944, IoU.lamp: 0.6047, IoU.bathtub: 0.7590, IoU.railing: 0.3056, IoU.cushion: 0.5433, IoU.base: 0.3111, IoU.box: 0.1891, IoU.column: 0.3930, IoU.signboard: 0.3396, IoU.chest of drawers: 0.3898, IoU.counter: 0.2297, IoU.sand: 0.3935, IoU.sink: 0.6910, IoU.skyscraper: 0.5998, IoU.fireplace: 0.6780, IoU.refrigerator: 0.6662, IoU.grandstand: 0.3430, IoU.path: 0.2071, IoU.stairs: 0.3222, IoU.runway: 0.6533, IoU.case: 0.4243, IoU.pool table: 0.9091, IoU.pillow: 0.5334, IoU.screen door: 0.6382, IoU.stairway: 0.3025, IoU.river: 0.1073, IoU.bridge: 0.5844, IoU.bookcase: 0.3394, IoU.blind: 0.4562, IoU.coffee table: 0.4490, IoU.toilet: 0.8563, IoU.flower: 0.4075, IoU.book: 0.4604, IoU.hill: 0.0512, IoU.bench: 0.4641, IoU.countertop: 0.5631, IoU.stove: 0.7309, IoU.palm: 0.4691, IoU.kitchen island: 0.3668, IoU.computer: 0.7042, IoU.swivel chair: 0.4308, IoU.boat: 0.3857, IoU.bar: 0.2408, IoU.arcade machine: 0.5560, IoU.hovel: 0.3074, IoU.bus: 0.8034, IoU.towel: 0.6225, IoU.light: 0.5195, IoU.truck: 0.3642, IoU.tower: 0.2753, IoU.chandelier: 0.6541, IoU.awning: 0.2438, IoU.streetlight: 0.2462, IoU.booth: 0.4062, IoU.television receiver: 0.7128, IoU.airplane: 0.5932, IoU.dirt track: 0.2105, IoU.apparel: 0.2843, IoU.pole: 0.1342, IoU.land: 0.0261, IoU.bannister: 0.1327, IoU.escalator: 0.2520, IoU.ottoman: 0.3909, IoU.bottle: 0.3270, IoU.buffet: 0.4150, IoU.poster: 0.2947, IoU.stage: 0.1924, IoU.van: 0.4873, IoU.ship: 0.4821, IoU.fountain: 0.2357, IoU.conveyer belt: 0.6417, IoU.canopy: 0.1421, IoU.washer: 0.6510, IoU.plaything: 0.2652, IoU.swimming pool: 0.5864, IoU.stool: 0.3261, IoU.barrel: 0.1330, IoU.basket: 0.2736, IoU.waterfall: 0.4449, IoU.tent: 0.8068, IoU.bag: 0.1511, IoU.minibike: 0.6314, IoU.cradle: 0.8176, IoU.oven: 0.2687, IoU.ball: 0.4413, IoU.food: 0.4030, IoU.step: 0.0982, IoU.tank: 0.3474, IoU.trade name: 0.2672, IoU.microwave: 0.4661, IoU.pot: 0.3828, IoU.animal: 0.5657, IoU.bicycle: 0.5298, IoU.lake: 0.5664, IoU.dishwasher: 0.5831, IoU.screen: 0.4178, IoU.blanket: 0.1482, IoU.sculpture: 0.5048, IoU.hood: 0.6411, IoU.sconce: 0.4185, IoU.vase: 0.3253, IoU.traffic light: 0.3122, IoU.tray: 0.0487, IoU.ashcan: 0.3606, IoU.fan: 0.5488, IoU.pier: 0.5352, IoU.crt screen: 0.0293, IoU.plate: 0.5044, IoU.monitor: 0.0656, IoU.bulletin board: 0.5155, IoU.shower: 0.0000, IoU.radiator: 0.5201, IoU.glass: 0.1100, IoU.clock: 0.2434, IoU.flag: 0.5303, Acc.wall: 0.8484, Acc.building: 0.9062, Acc.sky: 0.9745, Acc.floor: 0.8972, Acc.tree: 0.8665, Acc.ceiling: 0.9207, Acc.road: 0.9162, Acc.bed : 0.9421, Acc.windowpane: 0.7245, Acc.grass: 0.8537, Acc.cabinet: 0.7100, Acc.sidewalk: 0.7250, Acc.person: 0.9181, Acc.earth: 0.4307, Acc.door: 0.6452, Acc.table: 0.6561, Acc.mountain: 0.7554, Acc.plant: 0.6404, Acc.curtain: 0.8575, Acc.chair: 0.7041, Acc.car: 0.9038, Acc.water: 0.6824, Acc.painting: 0.8529, Acc.sofa: 0.8239, Acc.shelf: 0.5892, Acc.house: 0.5376, Acc.sea: 0.9051, Acc.mirror: 0.7257, Acc.rug: 0.7779, Acc.field: 0.4057, Acc.armchair: 0.5076, Acc.seat: 0.7477, Acc.fence: 0.4443, Acc.desk: 0.6457, Acc.rock: 0.4521, Acc.wardrobe: 0.7502, Acc.lamp: 0.7113, Acc.bathtub: 0.7961, Acc.railing: 0.4366, Acc.cushion: 0.6750, Acc.base: 0.4275, Acc.box: 0.2209, Acc.column: 0.5625, Acc.signboard: 0.5376, Acc.chest of drawers: 0.6879, Acc.counter: 0.3819, Acc.sand: 0.5952, Acc.sink: 0.8118, Acc.skyscraper: 0.8214, Acc.fireplace: 0.8483, Acc.refrigerator: 0.8685, Acc.grandstand: 0.6483, Acc.path: 0.3257, Acc.stairs: 0.3878, Acc.runway: 0.8629, Acc.case: 0.5832, Acc.pool table: 0.9449, Acc.pillow: 0.6517, Acc.screen door: 0.7123, Acc.stairway: 0.3894, Acc.river: 0.1887, Acc.bridge: 0.7420, Acc.bookcase: 0.5818, Acc.blind: 0.5884, Acc.coffee table: 0.8578, Acc.toilet: 0.9077, Acc.flower: 0.5839, Acc.book: 0.6789, Acc.hill: 0.0750, Acc.bench: 0.5514, Acc.countertop: 0.7629, Acc.stove: 0.8175, Acc.palm: 0.7535, Acc.kitchen island: 0.7383, Acc.computer: 0.8646, Acc.swivel chair: 0.5393, Acc.boat: 0.4595, Acc.bar: 0.3251, Acc.arcade machine: 0.5866, Acc.hovel: 0.4371, Acc.bus: 0.9601, Acc.towel: 0.7139, Acc.light: 0.5848, Acc.truck: 0.4997, Acc.tower: 0.4297, Acc.chandelier: 0.8595, Acc.awning: 0.3356, Acc.streetlight: 0.3279, Acc.booth: 0.4392, Acc.television receiver: 0.8173, Acc.airplane: 0.7128, Acc.dirt track: 0.5205, Acc.apparel: 0.5417, Acc.pole: 0.1653, Acc.land: 0.0396, Acc.bannister: 0.1855, Acc.escalator: 0.3300, Acc.ottoman: 0.5237, Acc.bottle: 0.4903, Acc.buffet: 0.5061, Acc.poster: 0.4322, Acc.stage: 0.4130, Acc.van: 0.6784, Acc.ship: 0.7103, Acc.fountain: 0.2789, Acc.conveyer belt: 0.8660, Acc.canopy: 0.2012, Acc.washer: 0.7349, Acc.plaything: 0.4471, Acc.swimming pool: 0.6408, Acc.stool: 0.4315, Acc.barrel: 0.6504, Acc.basket: 0.3938, Acc.waterfall: 0.5642, Acc.tent: 0.9850, Acc.bag: 0.2315, Acc.minibike: 0.7434, Acc.cradle: 0.9208, Acc.oven: 0.6386, Acc.ball: 0.6143, Acc.food: 0.4811, Acc.step: 0.1409, Acc.tank: 0.3554, Acc.trade name: 0.3357, Acc.microwave: 0.5095, Acc.pot: 0.4412, Acc.animal: 0.6240, Acc.bicycle: 0.7592, Acc.lake: 0.6307, Acc.dishwasher: 0.7078, Acc.screen: 0.7090, Acc.blanket: 0.1949, Acc.sculpture: 0.6909, Acc.hood: 0.7058, Acc.sconce: 0.5265, Acc.vase: 0.5603, Acc.traffic light: 0.5345, Acc.tray: 0.1005, Acc.ashcan: 0.5512, Acc.fan: 0.7112, Acc.pier: 0.7929, Acc.crt screen: 0.0987, Acc.plate: 0.7205, Acc.monitor: 0.0772, Acc.bulletin board: 0.6506, Acc.shower: 0.0000, Acc.radiator: 0.5572, Acc.glass: 0.1461, Acc.clock: 0.3305, Acc.flag: 0.6177 2023-02-12 02:26:42,349 - mmseg - INFO - Iter [144050/160000] lr: 5.982e-06, eta: 0:54:41, time: 0.438, data_time: 0.247, memory: 7748, decode.loss_ce: 0.1899, decode.acc_seg: 92.4930, aux.loss_ce: 0.1541, aux.acc_seg: 85.0498, loss: 0.3439, grad_norm: 3.9908 2023-02-12 02:26:52,593 - mmseg - INFO - Iter [144100/160000] lr: 5.963e-06, eta: 0:54:31, time: 0.204, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1798, decode.acc_seg: 92.6976, aux.loss_ce: 0.1443, aux.acc_seg: 85.7916, loss: 0.3241, grad_norm: 4.3611 2023-02-12 02:27:03,082 - mmseg - INFO - Iter [144150/160000] lr: 5.944e-06, eta: 0:54:20, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1895, decode.acc_seg: 92.5257, aux.loss_ce: 0.1476, aux.acc_seg: 85.5417, loss: 0.3371, grad_norm: 3.5699 2023-02-12 02:27:12,968 - mmseg - INFO - Iter [144200/160000] lr: 5.925e-06, eta: 0:54:10, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1886, decode.acc_seg: 92.5262, aux.loss_ce: 0.1470, aux.acc_seg: 85.6031, loss: 0.3356, grad_norm: 3.8003 2023-02-12 02:27:22,861 - mmseg - INFO - Iter [144250/160000] lr: 5.907e-06, eta: 0:54:00, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1885, decode.acc_seg: 92.3526, aux.loss_ce: 0.1487, aux.acc_seg: 85.2825, loss: 0.3372, grad_norm: 3.4966 2023-02-12 02:27:32,824 - mmseg - INFO - Iter [144300/160000] lr: 5.888e-06, eta: 0:53:49, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1907, decode.acc_seg: 92.2175, aux.loss_ce: 0.1560, aux.acc_seg: 84.6924, loss: 0.3466, grad_norm: 3.9252 2023-02-12 02:27:43,719 - mmseg - INFO - Iter [144350/160000] lr: 5.869e-06, eta: 0:53:39, time: 0.217, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1918, decode.acc_seg: 92.1666, aux.loss_ce: 0.1509, aux.acc_seg: 85.1428, loss: 0.3427, grad_norm: 4.5328 2023-02-12 02:27:53,534 - mmseg - INFO - Iter [144400/160000] lr: 5.850e-06, eta: 0:53:29, time: 0.197, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1890, decode.acc_seg: 92.3916, aux.loss_ce: 0.1443, aux.acc_seg: 85.6480, loss: 0.3333, grad_norm: 3.5523 2023-02-12 02:28:03,264 - mmseg - INFO - Iter [144450/160000] lr: 5.832e-06, eta: 0:53:18, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1912, decode.acc_seg: 92.3693, aux.loss_ce: 0.1492, aux.acc_seg: 85.5075, loss: 0.3405, grad_norm: 4.0440 2023-02-12 02:28:12,947 - mmseg - INFO - Iter [144500/160000] lr: 5.813e-06, eta: 0:53:08, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1932, decode.acc_seg: 92.3686, aux.loss_ce: 0.1533, aux.acc_seg: 84.9866, loss: 0.3466, grad_norm: 4.7093 2023-02-12 02:28:22,816 - mmseg - INFO - Iter [144550/160000] lr: 5.794e-06, eta: 0:52:58, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1969, decode.acc_seg: 92.1081, aux.loss_ce: 0.1526, aux.acc_seg: 84.8780, loss: 0.3494, grad_norm: 3.7624 2023-02-12 02:28:33,278 - mmseg - INFO - Iter [144600/160000] lr: 5.775e-06, eta: 0:52:48, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1845, decode.acc_seg: 92.5575, aux.loss_ce: 0.1491, aux.acc_seg: 85.2143, loss: 0.3336, grad_norm: 3.9253 2023-02-12 02:28:42,970 - mmseg - INFO - Iter [144650/160000] lr: 5.757e-06, eta: 0:52:37, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1868, decode.acc_seg: 92.5147, aux.loss_ce: 0.1500, aux.acc_seg: 85.1448, loss: 0.3369, grad_norm: 4.0658 2023-02-12 02:28:52,893 - mmseg - INFO - Iter [144700/160000] lr: 5.738e-06, eta: 0:52:27, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1887, decode.acc_seg: 92.3113, aux.loss_ce: 0.1497, aux.acc_seg: 85.1950, loss: 0.3384, grad_norm: 3.6997 2023-02-12 02:29:03,098 - mmseg - INFO - Iter [144750/160000] lr: 5.719e-06, eta: 0:52:17, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1954, decode.acc_seg: 91.9623, aux.loss_ce: 0.1537, aux.acc_seg: 84.7883, loss: 0.3491, grad_norm: 4.2001 2023-02-12 02:29:13,079 - mmseg - INFO - Iter [144800/160000] lr: 5.700e-06, eta: 0:52:06, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2016, decode.acc_seg: 91.9093, aux.loss_ce: 0.1492, aux.acc_seg: 85.1924, loss: 0.3508, grad_norm: 4.1190 2023-02-12 02:29:24,937 - mmseg - INFO - Iter [144850/160000] lr: 5.682e-06, eta: 0:51:56, time: 0.238, data_time: 0.047, memory: 7748, decode.loss_ce: 0.1908, decode.acc_seg: 92.3569, aux.loss_ce: 0.1487, aux.acc_seg: 85.5526, loss: 0.3395, grad_norm: 4.0196 2023-02-12 02:29:35,002 - mmseg - INFO - Iter [144900/160000] lr: 5.663e-06, eta: 0:51:46, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2077, decode.acc_seg: 91.7974, aux.loss_ce: 0.1548, aux.acc_seg: 85.0209, loss: 0.3625, grad_norm: 5.1516 2023-02-12 02:29:44,589 - mmseg - INFO - Iter [144950/160000] lr: 5.644e-06, eta: 0:51:35, time: 0.192, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1981, decode.acc_seg: 91.9530, aux.loss_ce: 0.1471, aux.acc_seg: 85.2745, loss: 0.3452, grad_norm: 4.1558 2023-02-12 02:29:54,333 - mmseg - INFO - Saving checkpoint at 145000 iterations 2023-02-12 02:29:55,011 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 02:29:55,011 - mmseg - INFO - Iter [145000/160000] lr: 5.625e-06, eta: 0:51:25, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1888, decode.acc_seg: 92.2474, aux.loss_ce: 0.1494, aux.acc_seg: 85.1790, loss: 0.3383, grad_norm: 3.4195 2023-02-12 02:30:05,691 - mmseg - INFO - Iter [145050/160000] lr: 5.607e-06, eta: 0:51:15, time: 0.214, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1799, decode.acc_seg: 92.5100, aux.loss_ce: 0.1457, aux.acc_seg: 85.5048, loss: 0.3255, grad_norm: 3.8925 2023-02-12 02:30:15,652 - mmseg - INFO - Iter [145100/160000] lr: 5.588e-06, eta: 0:51:05, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1848, decode.acc_seg: 92.5174, aux.loss_ce: 0.1444, aux.acc_seg: 85.6555, loss: 0.3293, grad_norm: 3.3333 2023-02-12 02:30:25,515 - mmseg - INFO - Iter [145150/160000] lr: 5.569e-06, eta: 0:50:54, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1894, decode.acc_seg: 92.3337, aux.loss_ce: 0.1483, aux.acc_seg: 85.3604, loss: 0.3377, grad_norm: 3.2537 2023-02-12 02:30:36,475 - mmseg - INFO - Iter [145200/160000] lr: 5.550e-06, eta: 0:50:44, time: 0.219, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1969, decode.acc_seg: 91.9553, aux.loss_ce: 0.1531, aux.acc_seg: 84.7649, loss: 0.3500, grad_norm: 4.7439 2023-02-12 02:30:46,200 - mmseg - INFO - Iter [145250/160000] lr: 5.532e-06, eta: 0:50:34, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1876, decode.acc_seg: 92.5220, aux.loss_ce: 0.1503, aux.acc_seg: 85.3677, loss: 0.3379, grad_norm: 3.9814 2023-02-12 02:30:56,586 - mmseg - INFO - Iter [145300/160000] lr: 5.513e-06, eta: 0:50:23, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1952, decode.acc_seg: 92.1969, aux.loss_ce: 0.1504, aux.acc_seg: 85.0498, loss: 0.3455, grad_norm: 3.6274 2023-02-12 02:31:06,377 - mmseg - INFO - Iter [145350/160000] lr: 5.494e-06, eta: 0:50:13, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1985, decode.acc_seg: 91.9367, aux.loss_ce: 0.1563, aux.acc_seg: 84.9127, loss: 0.3549, grad_norm: 4.1836 2023-02-12 02:31:16,574 - mmseg - INFO - Iter [145400/160000] lr: 5.475e-06, eta: 0:50:03, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1862, decode.acc_seg: 92.4094, aux.loss_ce: 0.1488, aux.acc_seg: 85.1549, loss: 0.3350, grad_norm: 4.6775 2023-02-12 02:31:26,928 - mmseg - INFO - Iter [145450/160000] lr: 5.457e-06, eta: 0:49:53, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2021, decode.acc_seg: 91.9759, aux.loss_ce: 0.1552, aux.acc_seg: 84.7011, loss: 0.3573, grad_norm: 4.3080 2023-02-12 02:31:37,472 - mmseg - INFO - Iter [145500/160000] lr: 5.438e-06, eta: 0:49:42, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2013, decode.acc_seg: 92.0282, aux.loss_ce: 0.1528, aux.acc_seg: 85.0286, loss: 0.3542, grad_norm: 5.2197 2023-02-12 02:31:47,329 - mmseg - INFO - Iter [145550/160000] lr: 5.419e-06, eta: 0:49:32, time: 0.198, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1820, decode.acc_seg: 92.4922, aux.loss_ce: 0.1431, aux.acc_seg: 85.7527, loss: 0.3251, grad_norm: 3.9728 2023-02-12 02:31:57,050 - mmseg - INFO - Iter [145600/160000] lr: 5.400e-06, eta: 0:49:22, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1914, decode.acc_seg: 92.3561, aux.loss_ce: 0.1573, aux.acc_seg: 84.7971, loss: 0.3486, grad_norm: 4.1915 2023-02-12 02:32:07,206 - mmseg - INFO - Iter [145650/160000] lr: 5.382e-06, eta: 0:49:11, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1896, decode.acc_seg: 92.1941, aux.loss_ce: 0.1519, aux.acc_seg: 85.2811, loss: 0.3415, grad_norm: 4.1444 2023-02-12 02:32:17,151 - mmseg - INFO - Iter [145700/160000] lr: 5.363e-06, eta: 0:49:01, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1813, decode.acc_seg: 92.6077, aux.loss_ce: 0.1386, aux.acc_seg: 86.1999, loss: 0.3198, grad_norm: 3.2762 2023-02-12 02:32:26,940 - mmseg - INFO - Iter [145750/160000] lr: 5.344e-06, eta: 0:48:51, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1858, decode.acc_seg: 92.4839, aux.loss_ce: 0.1487, aux.acc_seg: 85.8394, loss: 0.3345, grad_norm: 4.3355 2023-02-12 02:32:37,201 - mmseg - INFO - Iter [145800/160000] lr: 5.325e-06, eta: 0:48:40, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1851, decode.acc_seg: 92.2832, aux.loss_ce: 0.1494, aux.acc_seg: 84.8583, loss: 0.3345, grad_norm: 3.8779 2023-02-12 02:32:47,341 - mmseg - INFO - Iter [145850/160000] lr: 5.307e-06, eta: 0:48:30, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1824, decode.acc_seg: 92.6800, aux.loss_ce: 0.1506, aux.acc_seg: 85.2273, loss: 0.3330, grad_norm: 3.6065 2023-02-12 02:32:57,260 - mmseg - INFO - Iter [145900/160000] lr: 5.288e-06, eta: 0:48:20, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1843, decode.acc_seg: 92.5059, aux.loss_ce: 0.1468, aux.acc_seg: 85.6116, loss: 0.3311, grad_norm: 3.4736 2023-02-12 02:33:07,135 - mmseg - INFO - Iter [145950/160000] lr: 5.269e-06, eta: 0:48:09, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1919, decode.acc_seg: 92.2801, aux.loss_ce: 0.1534, aux.acc_seg: 85.1037, loss: 0.3453, grad_norm: 4.2367 2023-02-12 02:33:17,327 - mmseg - INFO - Saving checkpoint at 146000 iterations 2023-02-12 02:33:18,005 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 02:33:18,005 - mmseg - INFO - Iter [146000/160000] lr: 5.250e-06, eta: 0:47:59, time: 0.217, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1939, decode.acc_seg: 92.2223, aux.loss_ce: 0.1480, aux.acc_seg: 85.4305, loss: 0.3419, grad_norm: 3.9494 2023-02-12 02:33:27,953 - mmseg - INFO - Iter [146050/160000] lr: 5.232e-06, eta: 0:47:49, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1877, decode.acc_seg: 92.1899, aux.loss_ce: 0.1524, aux.acc_seg: 84.9282, loss: 0.3401, grad_norm: 3.6587 2023-02-12 02:33:40,112 - mmseg - INFO - Iter [146100/160000] lr: 5.213e-06, eta: 0:47:39, time: 0.243, data_time: 0.048, memory: 7748, decode.loss_ce: 0.1828, decode.acc_seg: 92.7797, aux.loss_ce: 0.1467, aux.acc_seg: 85.8615, loss: 0.3295, grad_norm: 3.9341 2023-02-12 02:33:50,559 - mmseg - INFO - Iter [146150/160000] lr: 5.194e-06, eta: 0:47:28, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1881, decode.acc_seg: 92.5091, aux.loss_ce: 0.1525, aux.acc_seg: 85.0070, loss: 0.3406, grad_norm: 4.2133 2023-02-12 02:34:00,393 - mmseg - INFO - Iter [146200/160000] lr: 5.175e-06, eta: 0:47:18, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1944, decode.acc_seg: 92.2255, aux.loss_ce: 0.1508, aux.acc_seg: 85.2876, loss: 0.3452, grad_norm: 4.7808 2023-02-12 02:34:10,300 - mmseg - INFO - Iter [146250/160000] lr: 5.157e-06, eta: 0:47:08, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1917, decode.acc_seg: 92.5046, aux.loss_ce: 0.1490, aux.acc_seg: 85.5441, loss: 0.3407, grad_norm: 3.4187 2023-02-12 02:34:20,557 - mmseg - INFO - Iter [146300/160000] lr: 5.138e-06, eta: 0:46:58, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1875, decode.acc_seg: 92.2829, aux.loss_ce: 0.1529, aux.acc_seg: 84.9437, loss: 0.3404, grad_norm: 4.0421 2023-02-12 02:34:30,410 - mmseg - INFO - Iter [146350/160000] lr: 5.119e-06, eta: 0:46:47, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1831, decode.acc_seg: 92.6270, aux.loss_ce: 0.1462, aux.acc_seg: 85.6218, loss: 0.3293, grad_norm: 3.8744 2023-02-12 02:34:40,172 - mmseg - INFO - Iter [146400/160000] lr: 5.100e-06, eta: 0:46:37, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2047, decode.acc_seg: 91.8143, aux.loss_ce: 0.1592, aux.acc_seg: 84.5436, loss: 0.3639, grad_norm: 5.1390 2023-02-12 02:34:50,586 - mmseg - INFO - Iter [146450/160000] lr: 5.082e-06, eta: 0:46:27, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1925, decode.acc_seg: 91.8787, aux.loss_ce: 0.1535, aux.acc_seg: 84.9606, loss: 0.3460, grad_norm: 4.9691 2023-02-12 02:35:00,581 - mmseg - INFO - Iter [146500/160000] lr: 5.063e-06, eta: 0:46:16, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1885, decode.acc_seg: 92.4103, aux.loss_ce: 0.1448, aux.acc_seg: 85.6393, loss: 0.3333, grad_norm: 3.9537 2023-02-12 02:35:10,520 - mmseg - INFO - Iter [146550/160000] lr: 5.044e-06, eta: 0:46:06, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1974, decode.acc_seg: 92.0300, aux.loss_ce: 0.1531, aux.acc_seg: 84.9244, loss: 0.3506, grad_norm: 4.1829 2023-02-12 02:35:20,527 - mmseg - INFO - Iter [146600/160000] lr: 5.025e-06, eta: 0:45:56, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2006, decode.acc_seg: 91.7943, aux.loss_ce: 0.1499, aux.acc_seg: 85.0629, loss: 0.3505, grad_norm: 5.1859 2023-02-12 02:35:30,532 - mmseg - INFO - Iter [146650/160000] lr: 5.007e-06, eta: 0:45:45, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1859, decode.acc_seg: 92.3810, aux.loss_ce: 0.1471, aux.acc_seg: 85.3753, loss: 0.3330, grad_norm: 3.6274 2023-02-12 02:35:40,342 - mmseg - INFO - Iter [146700/160000] lr: 4.988e-06, eta: 0:45:35, time: 0.197, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1804, decode.acc_seg: 92.6916, aux.loss_ce: 0.1483, aux.acc_seg: 85.1822, loss: 0.3287, grad_norm: 3.4258 2023-02-12 02:35:50,348 - mmseg - INFO - Iter [146750/160000] lr: 4.969e-06, eta: 0:45:25, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2028, decode.acc_seg: 92.0209, aux.loss_ce: 0.1571, aux.acc_seg: 84.9043, loss: 0.3598, grad_norm: 4.7061 2023-02-12 02:36:00,070 - mmseg - INFO - Iter [146800/160000] lr: 4.950e-06, eta: 0:45:14, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1938, decode.acc_seg: 92.4872, aux.loss_ce: 0.1541, aux.acc_seg: 85.3437, loss: 0.3479, grad_norm: 3.8503 2023-02-12 02:36:10,067 - mmseg - INFO - Iter [146850/160000] lr: 4.932e-06, eta: 0:45:04, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1940, decode.acc_seg: 92.3964, aux.loss_ce: 0.1500, aux.acc_seg: 85.6810, loss: 0.3440, grad_norm: 3.7242 2023-02-12 02:36:20,316 - mmseg - INFO - Iter [146900/160000] lr: 4.913e-06, eta: 0:44:54, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1837, decode.acc_seg: 92.6394, aux.loss_ce: 0.1446, aux.acc_seg: 85.7885, loss: 0.3282, grad_norm: 3.5453 2023-02-12 02:36:30,483 - mmseg - INFO - Iter [146950/160000] lr: 4.894e-06, eta: 0:44:43, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1979, decode.acc_seg: 92.0738, aux.loss_ce: 0.1527, aux.acc_seg: 85.0214, loss: 0.3506, grad_norm: 4.4798 2023-02-12 02:36:40,569 - mmseg - INFO - Saving checkpoint at 147000 iterations 2023-02-12 02:36:41,259 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 02:36:41,259 - mmseg - INFO - Iter [147000/160000] lr: 4.875e-06, eta: 0:44:33, time: 0.216, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1849, decode.acc_seg: 92.5818, aux.loss_ce: 0.1445, aux.acc_seg: 85.9862, loss: 0.3294, grad_norm: 4.0033 2023-02-12 02:36:51,093 - mmseg - INFO - Iter [147050/160000] lr: 4.857e-06, eta: 0:44:23, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1900, decode.acc_seg: 92.3495, aux.loss_ce: 0.1475, aux.acc_seg: 85.7059, loss: 0.3375, grad_norm: 3.6980 2023-02-12 02:37:01,125 - mmseg - INFO - Iter [147100/160000] lr: 4.838e-06, eta: 0:44:13, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1933, decode.acc_seg: 92.0141, aux.loss_ce: 0.1523, aux.acc_seg: 84.8902, loss: 0.3456, grad_norm: 4.3249 2023-02-12 02:37:10,920 - mmseg - INFO - Iter [147150/160000] lr: 4.819e-06, eta: 0:44:02, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1916, decode.acc_seg: 92.2607, aux.loss_ce: 0.1563, aux.acc_seg: 84.6430, loss: 0.3480, grad_norm: 3.7782 2023-02-12 02:37:20,881 - mmseg - INFO - Iter [147200/160000] lr: 4.800e-06, eta: 0:43:52, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1783, decode.acc_seg: 92.8428, aux.loss_ce: 0.1412, aux.acc_seg: 86.0134, loss: 0.3195, grad_norm: 4.0726 2023-02-12 02:37:31,339 - mmseg - INFO - Iter [147250/160000] lr: 4.782e-06, eta: 0:43:42, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1810, decode.acc_seg: 92.5755, aux.loss_ce: 0.1445, aux.acc_seg: 85.5554, loss: 0.3255, grad_norm: 3.5372 2023-02-12 02:37:41,843 - mmseg - INFO - Iter [147300/160000] lr: 4.763e-06, eta: 0:43:31, time: 0.211, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1935, decode.acc_seg: 92.2463, aux.loss_ce: 0.1530, aux.acc_seg: 85.2909, loss: 0.3465, grad_norm: 4.1147 2023-02-12 02:37:52,052 - mmseg - INFO - Iter [147350/160000] lr: 4.744e-06, eta: 0:43:21, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1783, decode.acc_seg: 92.5618, aux.loss_ce: 0.1414, aux.acc_seg: 85.7749, loss: 0.3197, grad_norm: 3.6828 2023-02-12 02:38:04,090 - mmseg - INFO - Iter [147400/160000] lr: 4.725e-06, eta: 0:43:11, time: 0.241, data_time: 0.047, memory: 7748, decode.loss_ce: 0.1933, decode.acc_seg: 92.1322, aux.loss_ce: 0.1627, aux.acc_seg: 84.3548, loss: 0.3560, grad_norm: 5.4647 2023-02-12 02:38:14,319 - mmseg - INFO - Iter [147450/160000] lr: 4.707e-06, eta: 0:43:01, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2025, decode.acc_seg: 91.8576, aux.loss_ce: 0.1530, aux.acc_seg: 84.7495, loss: 0.3555, grad_norm: 4.6667 2023-02-12 02:38:24,375 - mmseg - INFO - Iter [147500/160000] lr: 4.688e-06, eta: 0:42:50, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1871, decode.acc_seg: 92.4855, aux.loss_ce: 0.1497, aux.acc_seg: 85.2430, loss: 0.3369, grad_norm: 3.7702 2023-02-12 02:38:34,777 - mmseg - INFO - Iter [147550/160000] lr: 4.669e-06, eta: 0:42:40, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1841, decode.acc_seg: 92.5280, aux.loss_ce: 0.1470, aux.acc_seg: 85.2715, loss: 0.3312, grad_norm: 3.8276 2023-02-12 02:38:44,723 - mmseg - INFO - Iter [147600/160000] lr: 4.650e-06, eta: 0:42:30, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1899, decode.acc_seg: 92.1094, aux.loss_ce: 0.1545, aux.acc_seg: 84.5562, loss: 0.3444, grad_norm: 4.7415 2023-02-12 02:38:54,441 - mmseg - INFO - Iter [147650/160000] lr: 4.632e-06, eta: 0:42:19, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1912, decode.acc_seg: 92.4015, aux.loss_ce: 0.1443, aux.acc_seg: 85.6757, loss: 0.3354, grad_norm: 3.7475 2023-02-12 02:39:04,631 - mmseg - INFO - Iter [147700/160000] lr: 4.613e-06, eta: 0:42:09, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1810, decode.acc_seg: 92.5263, aux.loss_ce: 0.1379, aux.acc_seg: 86.0922, loss: 0.3189, grad_norm: 3.1668 2023-02-12 02:39:14,864 - mmseg - INFO - Iter [147750/160000] lr: 4.594e-06, eta: 0:41:59, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2077, decode.acc_seg: 91.7301, aux.loss_ce: 0.1620, aux.acc_seg: 84.3240, loss: 0.3697, grad_norm: 4.0823 2023-02-12 02:39:24,877 - mmseg - INFO - Iter [147800/160000] lr: 4.575e-06, eta: 0:41:49, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1838, decode.acc_seg: 92.6612, aux.loss_ce: 0.1438, aux.acc_seg: 85.8287, loss: 0.3276, grad_norm: 4.0083 2023-02-12 02:39:34,915 - mmseg - INFO - Iter [147850/160000] lr: 4.557e-06, eta: 0:41:38, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1844, decode.acc_seg: 92.4713, aux.loss_ce: 0.1460, aux.acc_seg: 85.7756, loss: 0.3304, grad_norm: 3.2398 2023-02-12 02:39:44,671 - mmseg - INFO - Iter [147900/160000] lr: 4.538e-06, eta: 0:41:28, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1903, decode.acc_seg: 92.4069, aux.loss_ce: 0.1510, aux.acc_seg: 85.6491, loss: 0.3413, grad_norm: 4.2816 2023-02-12 02:39:54,950 - mmseg - INFO - Iter [147950/160000] lr: 4.519e-06, eta: 0:41:18, time: 0.206, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1829, decode.acc_seg: 92.6703, aux.loss_ce: 0.1429, aux.acc_seg: 86.0818, loss: 0.3258, grad_norm: 3.6037 2023-02-12 02:40:05,458 - mmseg - INFO - Saving checkpoint at 148000 iterations 2023-02-12 02:40:06,150 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 02:40:06,150 - mmseg - INFO - Iter [148000/160000] lr: 4.500e-06, eta: 0:41:07, time: 0.224, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1864, decode.acc_seg: 92.4290, aux.loss_ce: 0.1513, aux.acc_seg: 85.0038, loss: 0.3376, grad_norm: 3.3310 2023-02-12 02:40:16,003 - mmseg - INFO - Iter [148050/160000] lr: 4.482e-06, eta: 0:40:57, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1869, decode.acc_seg: 92.4115, aux.loss_ce: 0.1479, aux.acc_seg: 85.5085, loss: 0.3349, grad_norm: 4.4070 2023-02-12 02:40:25,999 - mmseg - INFO - Iter [148100/160000] lr: 4.463e-06, eta: 0:40:47, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1837, decode.acc_seg: 92.6043, aux.loss_ce: 0.1495, aux.acc_seg: 85.1807, loss: 0.3332, grad_norm: 4.1553 2023-02-12 02:40:36,312 - mmseg - INFO - Iter [148150/160000] lr: 4.444e-06, eta: 0:40:37, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1878, decode.acc_seg: 92.3302, aux.loss_ce: 0.1463, aux.acc_seg: 85.5372, loss: 0.3341, grad_norm: 3.9486 2023-02-12 02:40:46,748 - mmseg - INFO - Iter [148200/160000] lr: 4.425e-06, eta: 0:40:26, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1797, decode.acc_seg: 92.4303, aux.loss_ce: 0.1451, aux.acc_seg: 85.2457, loss: 0.3247, grad_norm: 3.5598 2023-02-12 02:40:56,542 - mmseg - INFO - Iter [148250/160000] lr: 4.407e-06, eta: 0:40:16, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1890, decode.acc_seg: 92.2709, aux.loss_ce: 0.1451, aux.acc_seg: 85.5468, loss: 0.3340, grad_norm: 3.8229 2023-02-12 02:41:06,604 - mmseg - INFO - Iter [148300/160000] lr: 4.388e-06, eta: 0:40:06, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1795, decode.acc_seg: 92.7076, aux.loss_ce: 0.1415, aux.acc_seg: 85.8602, loss: 0.3210, grad_norm: 3.1017 2023-02-12 02:41:16,414 - mmseg - INFO - Iter [148350/160000] lr: 4.369e-06, eta: 0:39:55, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2068, decode.acc_seg: 91.6085, aux.loss_ce: 0.1562, aux.acc_seg: 84.5911, loss: 0.3630, grad_norm: 4.6336 2023-02-12 02:41:26,983 - mmseg - INFO - Iter [148400/160000] lr: 4.350e-06, eta: 0:39:45, time: 0.211, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1756, decode.acc_seg: 92.7611, aux.loss_ce: 0.1419, aux.acc_seg: 85.8768, loss: 0.3175, grad_norm: 3.8132 2023-02-12 02:41:37,231 - mmseg - INFO - Iter [148450/160000] lr: 4.332e-06, eta: 0:39:35, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1891, decode.acc_seg: 92.3189, aux.loss_ce: 0.1398, aux.acc_seg: 86.0174, loss: 0.3289, grad_norm: 3.7794 2023-02-12 02:41:47,304 - mmseg - INFO - Iter [148500/160000] lr: 4.313e-06, eta: 0:39:24, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1823, decode.acc_seg: 92.5947, aux.loss_ce: 0.1415, aux.acc_seg: 85.8430, loss: 0.3238, grad_norm: 3.2669 2023-02-12 02:41:57,519 - mmseg - INFO - Iter [148550/160000] lr: 4.294e-06, eta: 0:39:14, time: 0.205, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1825, decode.acc_seg: 92.7376, aux.loss_ce: 0.1457, aux.acc_seg: 86.0805, loss: 0.3282, grad_norm: 4.2231 2023-02-12 02:42:07,520 - mmseg - INFO - Iter [148600/160000] lr: 4.275e-06, eta: 0:39:04, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1955, decode.acc_seg: 92.1389, aux.loss_ce: 0.1607, aux.acc_seg: 84.6782, loss: 0.3562, grad_norm: 4.2632 2023-02-12 02:42:19,551 - mmseg - INFO - Iter [148650/160000] lr: 4.257e-06, eta: 0:38:54, time: 0.241, data_time: 0.046, memory: 7748, decode.loss_ce: 0.1864, decode.acc_seg: 92.4085, aux.loss_ce: 0.1469, aux.acc_seg: 85.6125, loss: 0.3332, grad_norm: 3.6940 2023-02-12 02:42:29,441 - mmseg - INFO - Iter [148700/160000] lr: 4.238e-06, eta: 0:38:43, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1898, decode.acc_seg: 92.1925, aux.loss_ce: 0.1469, aux.acc_seg: 85.3130, loss: 0.3366, grad_norm: 3.4705 2023-02-12 02:42:39,544 - mmseg - INFO - Iter [148750/160000] lr: 4.219e-06, eta: 0:38:33, time: 0.202, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1866, decode.acc_seg: 92.3723, aux.loss_ce: 0.1495, aux.acc_seg: 85.2838, loss: 0.3362, grad_norm: 3.7877 2023-02-12 02:42:49,600 - mmseg - INFO - Iter [148800/160000] lr: 4.200e-06, eta: 0:38:23, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1846, decode.acc_seg: 92.3772, aux.loss_ce: 0.1467, aux.acc_seg: 85.5208, loss: 0.3313, grad_norm: 4.1106 2023-02-12 02:42:59,603 - mmseg - INFO - Iter [148850/160000] lr: 4.182e-06, eta: 0:38:13, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1840, decode.acc_seg: 92.4104, aux.loss_ce: 0.1490, aux.acc_seg: 85.1166, loss: 0.3330, grad_norm: 4.0661 2023-02-12 02:43:10,017 - mmseg - INFO - Iter [148900/160000] lr: 4.163e-06, eta: 0:38:02, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2000, decode.acc_seg: 91.8658, aux.loss_ce: 0.1562, aux.acc_seg: 84.8055, loss: 0.3562, grad_norm: 4.2259 2023-02-12 02:43:20,547 - mmseg - INFO - Iter [148950/160000] lr: 4.144e-06, eta: 0:37:52, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1934, decode.acc_seg: 92.2607, aux.loss_ce: 0.1533, aux.acc_seg: 85.1817, loss: 0.3467, grad_norm: 5.8136 2023-02-12 02:43:30,355 - mmseg - INFO - Saving checkpoint at 149000 iterations 2023-02-12 02:43:31,071 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 02:43:31,072 - mmseg - INFO - Iter [149000/160000] lr: 4.125e-06, eta: 0:37:42, time: 0.211, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1818, decode.acc_seg: 92.6933, aux.loss_ce: 0.1488, aux.acc_seg: 85.4848, loss: 0.3306, grad_norm: 4.1715 2023-02-12 02:43:41,378 - mmseg - INFO - Iter [149050/160000] lr: 4.107e-06, eta: 0:37:31, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2000, decode.acc_seg: 91.9428, aux.loss_ce: 0.1547, aux.acc_seg: 84.9332, loss: 0.3547, grad_norm: 3.9913 2023-02-12 02:43:51,506 - mmseg - INFO - Iter [149100/160000] lr: 4.088e-06, eta: 0:37:21, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1926, decode.acc_seg: 92.1218, aux.loss_ce: 0.1519, aux.acc_seg: 85.0905, loss: 0.3444, grad_norm: 4.0194 2023-02-12 02:44:01,521 - mmseg - INFO - Iter [149150/160000] lr: 4.069e-06, eta: 0:37:11, time: 0.200, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1739, decode.acc_seg: 92.7411, aux.loss_ce: 0.1356, aux.acc_seg: 86.4696, loss: 0.3094, grad_norm: 3.2594 2023-02-12 02:44:11,940 - mmseg - INFO - Iter [149200/160000] lr: 4.050e-06, eta: 0:37:01, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1900, decode.acc_seg: 92.3464, aux.loss_ce: 0.1463, aux.acc_seg: 85.7292, loss: 0.3363, grad_norm: 4.7985 2023-02-12 02:44:22,460 - mmseg - INFO - Iter [149250/160000] lr: 4.032e-06, eta: 0:36:50, time: 0.210, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1893, decode.acc_seg: 92.0600, aux.loss_ce: 0.1472, aux.acc_seg: 85.2574, loss: 0.3365, grad_norm: 3.7089 2023-02-12 02:44:32,544 - mmseg - INFO - Iter [149300/160000] lr: 4.013e-06, eta: 0:36:40, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1922, decode.acc_seg: 92.1097, aux.loss_ce: 0.1475, aux.acc_seg: 85.3247, loss: 0.3398, grad_norm: 3.8396 2023-02-12 02:44:42,282 - mmseg - INFO - Iter [149350/160000] lr: 3.994e-06, eta: 0:36:30, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2087, decode.acc_seg: 91.9136, aux.loss_ce: 0.1636, aux.acc_seg: 84.6766, loss: 0.3723, grad_norm: 5.2718 2023-02-12 02:44:52,228 - mmseg - INFO - Iter [149400/160000] lr: 3.975e-06, eta: 0:36:19, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1893, decode.acc_seg: 92.2994, aux.loss_ce: 0.1499, aux.acc_seg: 84.9957, loss: 0.3392, grad_norm: 3.8586 2023-02-12 02:45:02,200 - mmseg - INFO - Iter [149450/160000] lr: 3.957e-06, eta: 0:36:09, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1842, decode.acc_seg: 92.3818, aux.loss_ce: 0.1402, aux.acc_seg: 85.8368, loss: 0.3244, grad_norm: 3.4974 2023-02-12 02:45:12,543 - mmseg - INFO - Iter [149500/160000] lr: 3.938e-06, eta: 0:35:59, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1851, decode.acc_seg: 92.6183, aux.loss_ce: 0.1483, aux.acc_seg: 85.5491, loss: 0.3334, grad_norm: 4.0726 2023-02-12 02:45:22,243 - mmseg - INFO - Iter [149550/160000] lr: 3.919e-06, eta: 0:35:48, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1816, decode.acc_seg: 92.5559, aux.loss_ce: 0.1479, aux.acc_seg: 85.4597, loss: 0.3295, grad_norm: 4.0575 2023-02-12 02:45:32,250 - mmseg - INFO - Iter [149600/160000] lr: 3.900e-06, eta: 0:35:38, time: 0.200, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1790, decode.acc_seg: 92.8497, aux.loss_ce: 0.1481, aux.acc_seg: 85.4205, loss: 0.3271, grad_norm: 3.8331 2023-02-12 02:45:42,230 - mmseg - INFO - Iter [149650/160000] lr: 3.882e-06, eta: 0:35:28, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1991, decode.acc_seg: 92.0432, aux.loss_ce: 0.1525, aux.acc_seg: 84.9762, loss: 0.3516, grad_norm: 3.6725 2023-02-12 02:45:52,227 - mmseg - INFO - Iter [149700/160000] lr: 3.863e-06, eta: 0:35:18, time: 0.201, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1944, decode.acc_seg: 92.5084, aux.loss_ce: 0.1625, aux.acc_seg: 84.8639, loss: 0.3570, grad_norm: 4.3281 2023-02-12 02:46:02,291 - mmseg - INFO - Iter [149750/160000] lr: 3.844e-06, eta: 0:35:07, time: 0.201, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1836, decode.acc_seg: 92.4495, aux.loss_ce: 0.1437, aux.acc_seg: 85.8282, loss: 0.3274, grad_norm: 3.8675 2023-02-12 02:46:12,536 - mmseg - INFO - Iter [149800/160000] lr: 3.825e-06, eta: 0:34:57, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1809, decode.acc_seg: 92.6797, aux.loss_ce: 0.1421, aux.acc_seg: 86.1273, loss: 0.3230, grad_norm: 3.9254 2023-02-12 02:46:22,635 - mmseg - INFO - Iter [149850/160000] lr: 3.807e-06, eta: 0:34:47, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1837, decode.acc_seg: 92.3362, aux.loss_ce: 0.1481, aux.acc_seg: 85.0034, loss: 0.3318, grad_norm: 3.8447 2023-02-12 02:46:35,266 - mmseg - INFO - Iter [149900/160000] lr: 3.788e-06, eta: 0:34:37, time: 0.253, data_time: 0.048, memory: 7748, decode.loss_ce: 0.1908, decode.acc_seg: 92.2653, aux.loss_ce: 0.1539, aux.acc_seg: 85.1337, loss: 0.3447, grad_norm: 3.9398 2023-02-12 02:46:45,219 - mmseg - INFO - Iter [149950/160000] lr: 3.769e-06, eta: 0:34:26, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1941, decode.acc_seg: 92.0211, aux.loss_ce: 0.1507, aux.acc_seg: 85.0129, loss: 0.3448, grad_norm: 3.8838 2023-02-12 02:46:55,267 - mmseg - INFO - Saving checkpoint at 150000 iterations 2023-02-12 02:46:55,998 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 02:46:55,998 - mmseg - INFO - Iter [150000/160000] lr: 3.750e-06, eta: 0:34:16, time: 0.216, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1987, decode.acc_seg: 91.9793, aux.loss_ce: 0.1494, aux.acc_seg: 85.1155, loss: 0.3482, grad_norm: 3.6253 2023-02-12 02:47:06,318 - mmseg - INFO - Iter [150050/160000] lr: 3.732e-06, eta: 0:34:06, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1827, decode.acc_seg: 92.6405, aux.loss_ce: 0.1438, aux.acc_seg: 86.1166, loss: 0.3265, grad_norm: 3.3526 2023-02-12 02:47:16,571 - mmseg - INFO - Iter [150100/160000] lr: 3.713e-06, eta: 0:33:55, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1849, decode.acc_seg: 92.3872, aux.loss_ce: 0.1450, aux.acc_seg: 85.4882, loss: 0.3299, grad_norm: 3.5469 2023-02-12 02:47:27,165 - mmseg - INFO - Iter [150150/160000] lr: 3.694e-06, eta: 0:33:45, time: 0.212, data_time: 0.006, memory: 7748, decode.loss_ce: 0.1744, decode.acc_seg: 92.8797, aux.loss_ce: 0.1443, aux.acc_seg: 85.7344, loss: 0.3188, grad_norm: 3.7481 2023-02-12 02:47:36,842 - mmseg - INFO - Iter [150200/160000] lr: 3.675e-06, eta: 0:33:35, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1842, decode.acc_seg: 92.3602, aux.loss_ce: 0.1451, aux.acc_seg: 85.3565, loss: 0.3293, grad_norm: 4.1622 2023-02-12 02:47:47,011 - mmseg - INFO - Iter [150250/160000] lr: 3.657e-06, eta: 0:33:25, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1966, decode.acc_seg: 92.1095, aux.loss_ce: 0.1509, aux.acc_seg: 84.8528, loss: 0.3475, grad_norm: 3.9638 2023-02-12 02:47:56,957 - mmseg - INFO - Iter [150300/160000] lr: 3.638e-06, eta: 0:33:14, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1924, decode.acc_seg: 92.3342, aux.loss_ce: 0.1479, aux.acc_seg: 85.5723, loss: 0.3402, grad_norm: 3.8365 2023-02-12 02:48:06,699 - mmseg - INFO - Iter [150350/160000] lr: 3.619e-06, eta: 0:33:04, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1761, decode.acc_seg: 92.7711, aux.loss_ce: 0.1395, aux.acc_seg: 86.2724, loss: 0.3156, grad_norm: 3.3433 2023-02-12 02:48:16,780 - mmseg - INFO - Iter [150400/160000] lr: 3.600e-06, eta: 0:32:54, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1847, decode.acc_seg: 92.6456, aux.loss_ce: 0.1491, aux.acc_seg: 85.5732, loss: 0.3338, grad_norm: 4.0264 2023-02-12 02:48:26,741 - mmseg - INFO - Iter [150450/160000] lr: 3.582e-06, eta: 0:32:43, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1812, decode.acc_seg: 92.6315, aux.loss_ce: 0.1412, aux.acc_seg: 86.0789, loss: 0.3224, grad_norm: 3.8805 2023-02-12 02:48:36,961 - mmseg - INFO - Iter [150500/160000] lr: 3.563e-06, eta: 0:32:33, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1947, decode.acc_seg: 92.2120, aux.loss_ce: 0.1574, aux.acc_seg: 84.7519, loss: 0.3520, grad_norm: 4.5767 2023-02-12 02:48:47,041 - mmseg - INFO - Iter [150550/160000] lr: 3.544e-06, eta: 0:32:23, time: 0.202, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1745, decode.acc_seg: 92.7985, aux.loss_ce: 0.1362, aux.acc_seg: 85.8945, loss: 0.3107, grad_norm: 3.5559 2023-02-12 02:48:56,991 - mmseg - INFO - Iter [150600/160000] lr: 3.525e-06, eta: 0:32:12, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1826, decode.acc_seg: 92.7461, aux.loss_ce: 0.1444, aux.acc_seg: 85.9860, loss: 0.3269, grad_norm: 3.5818 2023-02-12 02:49:07,579 - mmseg - INFO - Iter [150650/160000] lr: 3.507e-06, eta: 0:32:02, time: 0.211, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1862, decode.acc_seg: 92.6552, aux.loss_ce: 0.1524, aux.acc_seg: 85.0971, loss: 0.3386, grad_norm: 3.7895 2023-02-12 02:49:17,809 - mmseg - INFO - Iter [150700/160000] lr: 3.488e-06, eta: 0:31:52, time: 0.205, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1911, decode.acc_seg: 92.0953, aux.loss_ce: 0.1509, aux.acc_seg: 84.9553, loss: 0.3420, grad_norm: 4.8839 2023-02-12 02:49:27,499 - mmseg - INFO - Iter [150750/160000] lr: 3.469e-06, eta: 0:31:42, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1831, decode.acc_seg: 92.5711, aux.loss_ce: 0.1455, aux.acc_seg: 85.5608, loss: 0.3286, grad_norm: 3.8362 2023-02-12 02:49:37,863 - mmseg - INFO - Iter [150800/160000] lr: 3.450e-06, eta: 0:31:31, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2017, decode.acc_seg: 91.9981, aux.loss_ce: 0.1643, aux.acc_seg: 84.2294, loss: 0.3660, grad_norm: 3.9803 2023-02-12 02:49:47,681 - mmseg - INFO - Iter [150850/160000] lr: 3.432e-06, eta: 0:31:21, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1811, decode.acc_seg: 92.7268, aux.loss_ce: 0.1459, aux.acc_seg: 85.5525, loss: 0.3269, grad_norm: 3.6926 2023-02-12 02:49:57,853 - mmseg - INFO - Iter [150900/160000] lr: 3.413e-06, eta: 0:31:11, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1903, decode.acc_seg: 92.5134, aux.loss_ce: 0.1558, aux.acc_seg: 84.9403, loss: 0.3461, grad_norm: 4.2224 2023-02-12 02:50:08,363 - mmseg - INFO - Iter [150950/160000] lr: 3.394e-06, eta: 0:31:00, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1925, decode.acc_seg: 92.1288, aux.loss_ce: 0.1525, aux.acc_seg: 84.9927, loss: 0.3451, grad_norm: 4.1626 2023-02-12 02:50:18,418 - mmseg - INFO - Saving checkpoint at 151000 iterations 2023-02-12 02:50:19,144 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 02:50:19,144 - mmseg - INFO - Iter [151000/160000] lr: 3.375e-06, eta: 0:30:50, time: 0.216, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1910, decode.acc_seg: 92.4022, aux.loss_ce: 0.1533, aux.acc_seg: 85.2009, loss: 0.3443, grad_norm: 4.3973 2023-02-12 02:50:28,898 - mmseg - INFO - Iter [151050/160000] lr: 3.357e-06, eta: 0:30:40, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1984, decode.acc_seg: 92.3649, aux.loss_ce: 0.1569, aux.acc_seg: 84.9937, loss: 0.3553, grad_norm: 3.8902 2023-02-12 02:50:39,374 - mmseg - INFO - Iter [151100/160000] lr: 3.338e-06, eta: 0:30:30, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1902, decode.acc_seg: 92.4147, aux.loss_ce: 0.1498, aux.acc_seg: 85.3305, loss: 0.3400, grad_norm: 4.7055 2023-02-12 02:50:51,524 - mmseg - INFO - Iter [151150/160000] lr: 3.319e-06, eta: 0:30:19, time: 0.243, data_time: 0.046, memory: 7748, decode.loss_ce: 0.1785, decode.acc_seg: 92.6344, aux.loss_ce: 0.1445, aux.acc_seg: 85.6020, loss: 0.3231, grad_norm: 3.5404 2023-02-12 02:51:01,348 - mmseg - INFO - Iter [151200/160000] lr: 3.300e-06, eta: 0:30:09, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1808, decode.acc_seg: 92.8495, aux.loss_ce: 0.1535, aux.acc_seg: 84.8901, loss: 0.3343, grad_norm: 3.5738 2023-02-12 02:51:11,417 - mmseg - INFO - Iter [151250/160000] lr: 3.282e-06, eta: 0:29:59, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1966, decode.acc_seg: 92.0930, aux.loss_ce: 0.1496, aux.acc_seg: 85.2847, loss: 0.3462, grad_norm: 3.9611 2023-02-12 02:51:21,517 - mmseg - INFO - Iter [151300/160000] lr: 3.263e-06, eta: 0:29:49, time: 0.202, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1899, decode.acc_seg: 92.4290, aux.loss_ce: 0.1442, aux.acc_seg: 85.9327, loss: 0.3341, grad_norm: 3.8945 2023-02-12 02:51:31,442 - mmseg - INFO - Iter [151350/160000] lr: 3.244e-06, eta: 0:29:38, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1891, decode.acc_seg: 92.3508, aux.loss_ce: 0.1542, aux.acc_seg: 85.0831, loss: 0.3433, grad_norm: 3.9602 2023-02-12 02:51:41,345 - mmseg - INFO - Iter [151400/160000] lr: 3.225e-06, eta: 0:29:28, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1914, decode.acc_seg: 92.2335, aux.loss_ce: 0.1475, aux.acc_seg: 85.2867, loss: 0.3389, grad_norm: 3.8802 2023-02-12 02:51:51,325 - mmseg - INFO - Iter [151450/160000] lr: 3.207e-06, eta: 0:29:18, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1854, decode.acc_seg: 92.4478, aux.loss_ce: 0.1449, aux.acc_seg: 85.6934, loss: 0.3304, grad_norm: 3.4772 2023-02-12 02:52:01,373 - mmseg - INFO - Iter [151500/160000] lr: 3.188e-06, eta: 0:29:07, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1873, decode.acc_seg: 92.3281, aux.loss_ce: 0.1435, aux.acc_seg: 85.4905, loss: 0.3308, grad_norm: 3.8504 2023-02-12 02:52:11,448 - mmseg - INFO - Iter [151550/160000] lr: 3.169e-06, eta: 0:28:57, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1913, decode.acc_seg: 92.1828, aux.loss_ce: 0.1550, aux.acc_seg: 84.8452, loss: 0.3464, grad_norm: 4.2917 2023-02-12 02:52:21,817 - mmseg - INFO - Iter [151600/160000] lr: 3.150e-06, eta: 0:28:47, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1917, decode.acc_seg: 92.3326, aux.loss_ce: 0.1518, aux.acc_seg: 84.9992, loss: 0.3435, grad_norm: 4.0317 2023-02-12 02:52:31,561 - mmseg - INFO - Iter [151650/160000] lr: 3.132e-06, eta: 0:28:36, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2000, decode.acc_seg: 92.2080, aux.loss_ce: 0.1581, aux.acc_seg: 84.7307, loss: 0.3581, grad_norm: 4.3490 2023-02-12 02:52:41,442 - mmseg - INFO - Iter [151700/160000] lr: 3.113e-06, eta: 0:28:26, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1854, decode.acc_seg: 92.5547, aux.loss_ce: 0.1462, aux.acc_seg: 85.8605, loss: 0.3316, grad_norm: 3.9181 2023-02-12 02:52:51,409 - mmseg - INFO - Iter [151750/160000] lr: 3.094e-06, eta: 0:28:16, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1923, decode.acc_seg: 92.3604, aux.loss_ce: 0.1529, aux.acc_seg: 85.0927, loss: 0.3452, grad_norm: 5.0883 2023-02-12 02:53:01,982 - mmseg - INFO - Iter [151800/160000] lr: 3.075e-06, eta: 0:28:06, time: 0.211, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1779, decode.acc_seg: 92.8369, aux.loss_ce: 0.1406, aux.acc_seg: 86.0841, loss: 0.3185, grad_norm: 3.3629 2023-02-12 02:53:12,056 - mmseg - INFO - Iter [151850/160000] lr: 3.057e-06, eta: 0:27:55, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1932, decode.acc_seg: 92.2466, aux.loss_ce: 0.1507, aux.acc_seg: 85.2003, loss: 0.3438, grad_norm: 4.5342 2023-02-12 02:53:22,441 - mmseg - INFO - Iter [151900/160000] lr: 3.038e-06, eta: 0:27:45, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1768, decode.acc_seg: 92.7828, aux.loss_ce: 0.1405, aux.acc_seg: 85.8840, loss: 0.3173, grad_norm: 3.5150 2023-02-12 02:53:33,106 - mmseg - INFO - Iter [151950/160000] lr: 3.019e-06, eta: 0:27:35, time: 0.213, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1779, decode.acc_seg: 92.8606, aux.loss_ce: 0.1430, aux.acc_seg: 85.9413, loss: 0.3209, grad_norm: 3.5949 2023-02-12 02:53:42,953 - mmseg - INFO - Saving checkpoint at 152000 iterations 2023-02-12 02:53:43,627 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 02:53:43,628 - mmseg - INFO - Iter [152000/160000] lr: 3.000e-06, eta: 0:27:25, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1929, decode.acc_seg: 92.2634, aux.loss_ce: 0.1550, aux.acc_seg: 84.6371, loss: 0.3479, grad_norm: 5.1791 2023-02-12 02:53:53,609 - mmseg - INFO - Iter [152050/160000] lr: 2.982e-06, eta: 0:27:14, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1837, decode.acc_seg: 92.3771, aux.loss_ce: 0.1422, aux.acc_seg: 85.5198, loss: 0.3258, grad_norm: 3.4596 2023-02-12 02:54:03,769 - mmseg - INFO - Iter [152100/160000] lr: 2.963e-06, eta: 0:27:04, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1782, decode.acc_seg: 92.6964, aux.loss_ce: 0.1433, aux.acc_seg: 85.6190, loss: 0.3215, grad_norm: 3.8069 2023-02-12 02:54:13,611 - mmseg - INFO - Iter [152150/160000] lr: 2.944e-06, eta: 0:26:54, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1962, decode.acc_seg: 92.1820, aux.loss_ce: 0.1569, aux.acc_seg: 84.3992, loss: 0.3531, grad_norm: 3.9850 2023-02-12 02:54:23,817 - mmseg - INFO - Iter [152200/160000] lr: 2.925e-06, eta: 0:26:43, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1800, decode.acc_seg: 92.7055, aux.loss_ce: 0.1428, aux.acc_seg: 85.7730, loss: 0.3228, grad_norm: 3.2481 2023-02-12 02:54:33,671 - mmseg - INFO - Iter [152250/160000] lr: 2.907e-06, eta: 0:26:33, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1843, decode.acc_seg: 92.6588, aux.loss_ce: 0.1489, aux.acc_seg: 85.7752, loss: 0.3332, grad_norm: 3.9670 2023-02-12 02:54:43,443 - mmseg - INFO - Iter [152300/160000] lr: 2.888e-06, eta: 0:26:23, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1987, decode.acc_seg: 92.0995, aux.loss_ce: 0.1535, aux.acc_seg: 85.0644, loss: 0.3521, grad_norm: 3.9891 2023-02-12 02:54:53,658 - mmseg - INFO - Iter [152350/160000] lr: 2.869e-06, eta: 0:26:12, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1988, decode.acc_seg: 92.0838, aux.loss_ce: 0.1561, aux.acc_seg: 84.7493, loss: 0.3549, grad_norm: 4.0349 2023-02-12 02:55:03,866 - mmseg - INFO - Iter [152400/160000] lr: 2.850e-06, eta: 0:26:02, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1888, decode.acc_seg: 92.2839, aux.loss_ce: 0.1498, aux.acc_seg: 84.9609, loss: 0.3386, grad_norm: 3.6959 2023-02-12 02:55:16,079 - mmseg - INFO - Iter [152450/160000] lr: 2.832e-06, eta: 0:25:52, time: 0.244, data_time: 0.047, memory: 7748, decode.loss_ce: 0.1981, decode.acc_seg: 92.0914, aux.loss_ce: 0.1529, aux.acc_seg: 85.0802, loss: 0.3509, grad_norm: 4.6490 2023-02-12 02:55:26,483 - mmseg - INFO - Iter [152500/160000] lr: 2.813e-06, eta: 0:25:42, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1734, decode.acc_seg: 92.9905, aux.loss_ce: 0.1389, aux.acc_seg: 86.3275, loss: 0.3123, grad_norm: 3.2386 2023-02-12 02:55:36,669 - mmseg - INFO - Iter [152550/160000] lr: 2.794e-06, eta: 0:25:31, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1888, decode.acc_seg: 92.3693, aux.loss_ce: 0.1451, aux.acc_seg: 85.7091, loss: 0.3339, grad_norm: 3.4791 2023-02-12 02:55:46,673 - mmseg - INFO - Iter [152600/160000] lr: 2.775e-06, eta: 0:25:21, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1935, decode.acc_seg: 92.1076, aux.loss_ce: 0.1468, aux.acc_seg: 85.5859, loss: 0.3403, grad_norm: 3.8554 2023-02-12 02:55:57,009 - mmseg - INFO - Iter [152650/160000] lr: 2.757e-06, eta: 0:25:11, time: 0.207, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1784, decode.acc_seg: 92.6209, aux.loss_ce: 0.1428, aux.acc_seg: 85.6995, loss: 0.3212, grad_norm: 3.7100 2023-02-12 02:56:07,290 - mmseg - INFO - Iter [152700/160000] lr: 2.738e-06, eta: 0:25:01, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1838, decode.acc_seg: 92.7171, aux.loss_ce: 0.1477, aux.acc_seg: 85.6423, loss: 0.3315, grad_norm: 3.4750 2023-02-12 02:56:17,404 - mmseg - INFO - Iter [152750/160000] lr: 2.719e-06, eta: 0:24:50, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2103, decode.acc_seg: 91.5995, aux.loss_ce: 0.1556, aux.acc_seg: 84.9583, loss: 0.3659, grad_norm: 4.6517 2023-02-12 02:56:27,160 - mmseg - INFO - Iter [152800/160000] lr: 2.700e-06, eta: 0:24:40, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1910, decode.acc_seg: 92.0440, aux.loss_ce: 0.1481, aux.acc_seg: 85.1356, loss: 0.3390, grad_norm: 3.7381 2023-02-12 02:56:37,988 - mmseg - INFO - Iter [152850/160000] lr: 2.682e-06, eta: 0:24:30, time: 0.216, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1906, decode.acc_seg: 92.1723, aux.loss_ce: 0.1541, aux.acc_seg: 84.5429, loss: 0.3446, grad_norm: 4.7797 2023-02-12 02:56:48,067 - mmseg - INFO - Iter [152900/160000] lr: 2.663e-06, eta: 0:24:19, time: 0.202, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1835, decode.acc_seg: 92.6878, aux.loss_ce: 0.1441, aux.acc_seg: 86.0068, loss: 0.3276, grad_norm: 3.4623 2023-02-12 02:56:58,207 - mmseg - INFO - Iter [152950/160000] lr: 2.644e-06, eta: 0:24:09, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1919, decode.acc_seg: 92.3303, aux.loss_ce: 0.1502, aux.acc_seg: 85.5773, loss: 0.3421, grad_norm: 4.1287 2023-02-12 02:57:07,975 - mmseg - INFO - Saving checkpoint at 153000 iterations 2023-02-12 02:57:08,669 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 02:57:08,669 - mmseg - INFO - Iter [153000/160000] lr: 2.625e-06, eta: 0:23:59, time: 0.210, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1910, decode.acc_seg: 92.2322, aux.loss_ce: 0.1540, aux.acc_seg: 84.7838, loss: 0.3450, grad_norm: 3.9359 2023-02-12 02:57:18,791 - mmseg - INFO - Iter [153050/160000] lr: 2.607e-06, eta: 0:23:49, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1877, decode.acc_seg: 92.3161, aux.loss_ce: 0.1455, aux.acc_seg: 85.5922, loss: 0.3332, grad_norm: 4.4563 2023-02-12 02:57:29,008 - mmseg - INFO - Iter [153100/160000] lr: 2.588e-06, eta: 0:23:38, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1994, decode.acc_seg: 91.7426, aux.loss_ce: 0.1599, aux.acc_seg: 84.5997, loss: 0.3593, grad_norm: 4.6707 2023-02-12 02:57:39,500 - mmseg - INFO - Iter [153150/160000] lr: 2.569e-06, eta: 0:23:28, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1839, decode.acc_seg: 92.5618, aux.loss_ce: 0.1439, aux.acc_seg: 85.8553, loss: 0.3278, grad_norm: 3.7141 2023-02-12 02:57:49,319 - mmseg - INFO - Iter [153200/160000] lr: 2.550e-06, eta: 0:23:18, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1896, decode.acc_seg: 92.2866, aux.loss_ce: 0.1508, aux.acc_seg: 85.1635, loss: 0.3404, grad_norm: 3.5269 2023-02-12 02:58:00,149 - mmseg - INFO - Iter [153250/160000] lr: 2.532e-06, eta: 0:23:07, time: 0.217, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1922, decode.acc_seg: 92.1258, aux.loss_ce: 0.1525, aux.acc_seg: 85.1467, loss: 0.3447, grad_norm: 4.1356 2023-02-12 02:58:10,394 - mmseg - INFO - Iter [153300/160000] lr: 2.513e-06, eta: 0:22:57, time: 0.205, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1931, decode.acc_seg: 92.1060, aux.loss_ce: 0.1432, aux.acc_seg: 85.9331, loss: 0.3363, grad_norm: 4.8697 2023-02-12 02:58:20,449 - mmseg - INFO - Iter [153350/160000] lr: 2.494e-06, eta: 0:22:47, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1850, decode.acc_seg: 92.6649, aux.loss_ce: 0.1473, aux.acc_seg: 85.7575, loss: 0.3323, grad_norm: 4.0195 2023-02-12 02:58:30,197 - mmseg - INFO - Iter [153400/160000] lr: 2.475e-06, eta: 0:22:37, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1844, decode.acc_seg: 92.1718, aux.loss_ce: 0.1430, aux.acc_seg: 85.3532, loss: 0.3274, grad_norm: 3.8449 2023-02-12 02:58:40,340 - mmseg - INFO - Iter [153450/160000] lr: 2.457e-06, eta: 0:22:26, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1790, decode.acc_seg: 92.7141, aux.loss_ce: 0.1385, aux.acc_seg: 86.3374, loss: 0.3175, grad_norm: 3.3772 2023-02-12 02:58:50,419 - mmseg - INFO - Iter [153500/160000] lr: 2.438e-06, eta: 0:22:16, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1865, decode.acc_seg: 92.4145, aux.loss_ce: 0.1457, aux.acc_seg: 85.3612, loss: 0.3321, grad_norm: 3.8818 2023-02-12 02:59:00,502 - mmseg - INFO - Iter [153550/160000] lr: 2.419e-06, eta: 0:22:06, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1815, decode.acc_seg: 92.6732, aux.loss_ce: 0.1429, aux.acc_seg: 85.7557, loss: 0.3243, grad_norm: 3.9674 2023-02-12 02:59:10,807 - mmseg - INFO - Iter [153600/160000] lr: 2.400e-06, eta: 0:21:55, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1881, decode.acc_seg: 92.2585, aux.loss_ce: 0.1463, aux.acc_seg: 85.3841, loss: 0.3344, grad_norm: 3.7931 2023-02-12 02:59:21,242 - mmseg - INFO - Iter [153650/160000] lr: 2.382e-06, eta: 0:21:45, time: 0.209, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1866, decode.acc_seg: 92.5171, aux.loss_ce: 0.1481, aux.acc_seg: 85.2903, loss: 0.3347, grad_norm: 4.1486 2023-02-12 02:59:33,300 - mmseg - INFO - Iter [153700/160000] lr: 2.363e-06, eta: 0:21:35, time: 0.241, data_time: 0.047, memory: 7748, decode.loss_ce: 0.1806, decode.acc_seg: 92.5847, aux.loss_ce: 0.1373, aux.acc_seg: 86.3596, loss: 0.3179, grad_norm: 3.6282 2023-02-12 02:59:43,690 - mmseg - INFO - Iter [153750/160000] lr: 2.344e-06, eta: 0:21:25, time: 0.208, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1929, decode.acc_seg: 91.9582, aux.loss_ce: 0.1494, aux.acc_seg: 85.0221, loss: 0.3423, grad_norm: 4.2741 2023-02-12 02:59:53,907 - mmseg - INFO - Iter [153800/160000] lr: 2.325e-06, eta: 0:21:14, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1808, decode.acc_seg: 92.6544, aux.loss_ce: 0.1472, aux.acc_seg: 85.5405, loss: 0.3280, grad_norm: 3.6964 2023-02-12 03:00:04,446 - mmseg - INFO - Iter [153850/160000] lr: 2.307e-06, eta: 0:21:04, time: 0.211, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1840, decode.acc_seg: 92.3737, aux.loss_ce: 0.1444, aux.acc_seg: 85.5828, loss: 0.3284, grad_norm: 3.6103 2023-02-12 03:00:14,305 - mmseg - INFO - Iter [153900/160000] lr: 2.288e-06, eta: 0:20:54, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1758, decode.acc_seg: 92.7834, aux.loss_ce: 0.1453, aux.acc_seg: 85.6926, loss: 0.3211, grad_norm: 3.2318 2023-02-12 03:00:24,207 - mmseg - INFO - Iter [153950/160000] lr: 2.269e-06, eta: 0:20:44, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1945, decode.acc_seg: 92.0993, aux.loss_ce: 0.1506, aux.acc_seg: 85.2288, loss: 0.3452, grad_norm: 3.5242 2023-02-12 03:00:34,181 - mmseg - INFO - Saving checkpoint at 154000 iterations 2023-02-12 03:00:34,852 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 03:00:34,853 - mmseg - INFO - Iter [154000/160000] lr: 2.250e-06, eta: 0:20:33, time: 0.213, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1719, decode.acc_seg: 92.9674, aux.loss_ce: 0.1375, aux.acc_seg: 86.4003, loss: 0.3094, grad_norm: 3.5629 2023-02-12 03:00:44,439 - mmseg - INFO - Iter [154050/160000] lr: 2.232e-06, eta: 0:20:23, time: 0.192, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1853, decode.acc_seg: 92.3174, aux.loss_ce: 0.1456, aux.acc_seg: 85.6276, loss: 0.3309, grad_norm: 4.5938 2023-02-12 03:00:54,620 - mmseg - INFO - Iter [154100/160000] lr: 2.213e-06, eta: 0:20:13, time: 0.204, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1774, decode.acc_seg: 92.8124, aux.loss_ce: 0.1396, aux.acc_seg: 86.2777, loss: 0.3170, grad_norm: 3.3046 2023-02-12 03:01:04,578 - mmseg - INFO - Iter [154150/160000] lr: 2.194e-06, eta: 0:20:02, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1889, decode.acc_seg: 92.3461, aux.loss_ce: 0.1476, aux.acc_seg: 85.6933, loss: 0.3365, grad_norm: 3.7530 2023-02-12 03:01:14,745 - mmseg - INFO - Iter [154200/160000] lr: 2.175e-06, eta: 0:19:52, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1864, decode.acc_seg: 92.5944, aux.loss_ce: 0.1523, aux.acc_seg: 85.1716, loss: 0.3387, grad_norm: 3.8932 2023-02-12 03:01:25,120 - mmseg - INFO - Iter [154250/160000] lr: 2.157e-06, eta: 0:19:42, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1816, decode.acc_seg: 92.5378, aux.loss_ce: 0.1451, aux.acc_seg: 85.6690, loss: 0.3266, grad_norm: 4.0173 2023-02-12 03:01:35,600 - mmseg - INFO - Iter [154300/160000] lr: 2.138e-06, eta: 0:19:32, time: 0.210, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1915, decode.acc_seg: 92.2953, aux.loss_ce: 0.1576, aux.acc_seg: 84.8494, loss: 0.3490, grad_norm: 4.3709 2023-02-12 03:01:45,332 - mmseg - INFO - Iter [154350/160000] lr: 2.119e-06, eta: 0:19:21, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1796, decode.acc_seg: 92.7036, aux.loss_ce: 0.1434, aux.acc_seg: 86.1838, loss: 0.3230, grad_norm: 3.6652 2023-02-12 03:01:55,438 - mmseg - INFO - Iter [154400/160000] lr: 2.100e-06, eta: 0:19:11, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1833, decode.acc_seg: 92.6444, aux.loss_ce: 0.1451, aux.acc_seg: 86.1166, loss: 0.3283, grad_norm: 3.6787 2023-02-12 03:02:05,206 - mmseg - INFO - Iter [154450/160000] lr: 2.082e-06, eta: 0:19:01, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2037, decode.acc_seg: 91.6420, aux.loss_ce: 0.1528, aux.acc_seg: 84.5479, loss: 0.3566, grad_norm: 4.7741 2023-02-12 03:02:15,162 - mmseg - INFO - Iter [154500/160000] lr: 2.063e-06, eta: 0:18:50, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1879, decode.acc_seg: 92.3704, aux.loss_ce: 0.1475, aux.acc_seg: 85.6247, loss: 0.3353, grad_norm: 3.6881 2023-02-12 03:02:25,302 - mmseg - INFO - Iter [154550/160000] lr: 2.044e-06, eta: 0:18:40, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1859, decode.acc_seg: 92.4497, aux.loss_ce: 0.1467, aux.acc_seg: 85.4825, loss: 0.3326, grad_norm: 3.6819 2023-02-12 03:02:35,082 - mmseg - INFO - Iter [154600/160000] lr: 2.025e-06, eta: 0:18:30, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1980, decode.acc_seg: 92.2797, aux.loss_ce: 0.1575, aux.acc_seg: 84.7347, loss: 0.3555, grad_norm: 4.1653 2023-02-12 03:02:45,137 - mmseg - INFO - Iter [154650/160000] lr: 2.007e-06, eta: 0:18:19, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1882, decode.acc_seg: 92.3918, aux.loss_ce: 0.1475, aux.acc_seg: 85.4826, loss: 0.3357, grad_norm: 3.9145 2023-02-12 03:02:55,276 - mmseg - INFO - Iter [154700/160000] lr: 1.988e-06, eta: 0:18:09, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1941, decode.acc_seg: 92.1681, aux.loss_ce: 0.1567, aux.acc_seg: 84.8264, loss: 0.3508, grad_norm: 3.9963 2023-02-12 03:03:05,810 - mmseg - INFO - Iter [154750/160000] lr: 1.969e-06, eta: 0:17:59, time: 0.211, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1890, decode.acc_seg: 92.3330, aux.loss_ce: 0.1508, aux.acc_seg: 85.2491, loss: 0.3398, grad_norm: 4.6136 2023-02-12 03:03:15,992 - mmseg - INFO - Iter [154800/160000] lr: 1.950e-06, eta: 0:17:49, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1892, decode.acc_seg: 92.2895, aux.loss_ce: 0.1434, aux.acc_seg: 85.8573, loss: 0.3326, grad_norm: 3.1566 2023-02-12 03:03:26,312 - mmseg - INFO - Iter [154850/160000] lr: 1.932e-06, eta: 0:17:38, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1925, decode.acc_seg: 92.2068, aux.loss_ce: 0.1510, aux.acc_seg: 85.1247, loss: 0.3435, grad_norm: 3.4973 2023-02-12 03:03:36,369 - mmseg - INFO - Iter [154900/160000] lr: 1.913e-06, eta: 0:17:28, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1822, decode.acc_seg: 92.6364, aux.loss_ce: 0.1434, aux.acc_seg: 85.9447, loss: 0.3256, grad_norm: 3.5543 2023-02-12 03:03:48,279 - mmseg - INFO - Iter [154950/160000] lr: 1.894e-06, eta: 0:17:18, time: 0.238, data_time: 0.048, memory: 7748, decode.loss_ce: 0.1927, decode.acc_seg: 92.2057, aux.loss_ce: 0.1472, aux.acc_seg: 85.6801, loss: 0.3399, grad_norm: 4.8190 2023-02-12 03:03:58,552 - mmseg - INFO - Saving checkpoint at 155000 iterations 2023-02-12 03:03:59,272 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 03:03:59,273 - mmseg - INFO - Iter [155000/160000] lr: 1.875e-06, eta: 0:17:08, time: 0.220, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1883, decode.acc_seg: 92.5101, aux.loss_ce: 0.1500, aux.acc_seg: 85.3752, loss: 0.3384, grad_norm: 4.1472 2023-02-12 03:04:09,026 - mmseg - INFO - Iter [155050/160000] lr: 1.857e-06, eta: 0:16:57, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1766, decode.acc_seg: 92.6381, aux.loss_ce: 0.1471, aux.acc_seg: 85.3328, loss: 0.3238, grad_norm: 3.3705 2023-02-12 03:04:18,818 - mmseg - INFO - Iter [155100/160000] lr: 1.838e-06, eta: 0:16:47, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1840, decode.acc_seg: 92.5214, aux.loss_ce: 0.1426, aux.acc_seg: 85.7673, loss: 0.3266, grad_norm: 3.2308 2023-02-12 03:04:28,895 - mmseg - INFO - Iter [155150/160000] lr: 1.819e-06, eta: 0:16:37, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1772, decode.acc_seg: 92.8420, aux.loss_ce: 0.1387, aux.acc_seg: 86.0498, loss: 0.3159, grad_norm: 3.3394 2023-02-12 03:04:39,261 - mmseg - INFO - Iter [155200/160000] lr: 1.800e-06, eta: 0:16:26, time: 0.207, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1839, decode.acc_seg: 92.5255, aux.loss_ce: 0.1478, aux.acc_seg: 85.3589, loss: 0.3317, grad_norm: 3.4971 2023-02-12 03:04:49,324 - mmseg - INFO - Iter [155250/160000] lr: 1.782e-06, eta: 0:16:16, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1839, decode.acc_seg: 92.6328, aux.loss_ce: 0.1445, aux.acc_seg: 85.8012, loss: 0.3285, grad_norm: 3.9390 2023-02-12 03:04:59,200 - mmseg - INFO - Iter [155300/160000] lr: 1.763e-06, eta: 0:16:06, time: 0.198, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1830, decode.acc_seg: 92.4581, aux.loss_ce: 0.1484, aux.acc_seg: 85.3338, loss: 0.3315, grad_norm: 4.2517 2023-02-12 03:05:09,182 - mmseg - INFO - Iter [155350/160000] lr: 1.744e-06, eta: 0:15:56, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1959, decode.acc_seg: 92.2373, aux.loss_ce: 0.1550, aux.acc_seg: 85.1471, loss: 0.3509, grad_norm: 4.3615 2023-02-12 03:05:19,364 - mmseg - INFO - Iter [155400/160000] lr: 1.725e-06, eta: 0:15:45, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1866, decode.acc_seg: 92.3689, aux.loss_ce: 0.1443, aux.acc_seg: 85.6854, loss: 0.3309, grad_norm: 3.5018 2023-02-12 03:05:29,285 - mmseg - INFO - Iter [155450/160000] lr: 1.707e-06, eta: 0:15:35, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1929, decode.acc_seg: 92.1856, aux.loss_ce: 0.1553, aux.acc_seg: 84.9099, loss: 0.3482, grad_norm: 4.4144 2023-02-12 03:05:39,694 - mmseg - INFO - Iter [155500/160000] lr: 1.688e-06, eta: 0:15:25, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1922, decode.acc_seg: 92.1232, aux.loss_ce: 0.1522, aux.acc_seg: 84.8704, loss: 0.3443, grad_norm: 4.1428 2023-02-12 03:05:49,630 - mmseg - INFO - Iter [155550/160000] lr: 1.669e-06, eta: 0:15:14, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1896, decode.acc_seg: 92.2378, aux.loss_ce: 0.1513, aux.acc_seg: 85.0627, loss: 0.3409, grad_norm: 4.4738 2023-02-12 03:06:00,192 - mmseg - INFO - Iter [155600/160000] lr: 1.650e-06, eta: 0:15:04, time: 0.211, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1813, decode.acc_seg: 92.3985, aux.loss_ce: 0.1438, aux.acc_seg: 85.4400, loss: 0.3252, grad_norm: 3.7130 2023-02-12 03:06:10,634 - mmseg - INFO - Iter [155650/160000] lr: 1.632e-06, eta: 0:14:54, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1855, decode.acc_seg: 92.3969, aux.loss_ce: 0.1449, aux.acc_seg: 85.5359, loss: 0.3304, grad_norm: 3.2706 2023-02-12 03:06:20,547 - mmseg - INFO - Iter [155700/160000] lr: 1.613e-06, eta: 0:14:44, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1751, decode.acc_seg: 92.9805, aux.loss_ce: 0.1457, aux.acc_seg: 85.8501, loss: 0.3207, grad_norm: 3.9405 2023-02-12 03:06:30,493 - mmseg - INFO - Iter [155750/160000] lr: 1.594e-06, eta: 0:14:33, time: 0.199, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1838, decode.acc_seg: 92.5659, aux.loss_ce: 0.1482, aux.acc_seg: 85.2162, loss: 0.3321, grad_norm: 4.4579 2023-02-12 03:06:40,519 - mmseg - INFO - Iter [155800/160000] lr: 1.575e-06, eta: 0:14:23, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1845, decode.acc_seg: 92.1751, aux.loss_ce: 0.1442, aux.acc_seg: 85.3781, loss: 0.3288, grad_norm: 3.5324 2023-02-12 03:06:50,726 - mmseg - INFO - Iter [155850/160000] lr: 1.557e-06, eta: 0:14:13, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1902, decode.acc_seg: 92.2760, aux.loss_ce: 0.1535, aux.acc_seg: 85.0185, loss: 0.3437, grad_norm: 4.2628 2023-02-12 03:07:00,824 - mmseg - INFO - Iter [155900/160000] lr: 1.538e-06, eta: 0:14:02, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1770, decode.acc_seg: 92.6743, aux.loss_ce: 0.1423, aux.acc_seg: 85.9282, loss: 0.3193, grad_norm: 4.0171 2023-02-12 03:07:11,046 - mmseg - INFO - Iter [155950/160000] lr: 1.519e-06, eta: 0:13:52, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1817, decode.acc_seg: 92.6552, aux.loss_ce: 0.1426, aux.acc_seg: 85.9650, loss: 0.3243, grad_norm: 3.5326 2023-02-12 03:07:20,802 - mmseg - INFO - Saving checkpoint at 156000 iterations 2023-02-12 03:07:21,491 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 03:07:21,491 - mmseg - INFO - Iter [156000/160000] lr: 1.500e-06, eta: 0:13:42, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1843, decode.acc_seg: 92.4921, aux.loss_ce: 0.1504, aux.acc_seg: 85.0922, loss: 0.3347, grad_norm: 4.0413 2023-02-12 03:07:32,036 - mmseg - INFO - Iter [156050/160000] lr: 1.482e-06, eta: 0:13:32, time: 0.211, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1863, decode.acc_seg: 92.4773, aux.loss_ce: 0.1501, aux.acc_seg: 85.3446, loss: 0.3364, grad_norm: 3.6026 2023-02-12 03:07:42,178 - mmseg - INFO - Iter [156100/160000] lr: 1.463e-06, eta: 0:13:21, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1853, decode.acc_seg: 92.4762, aux.loss_ce: 0.1464, aux.acc_seg: 85.8387, loss: 0.3317, grad_norm: 3.7084 2023-02-12 03:07:52,016 - mmseg - INFO - Iter [156150/160000] lr: 1.444e-06, eta: 0:13:11, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1953, decode.acc_seg: 92.1707, aux.loss_ce: 0.1530, aux.acc_seg: 85.3032, loss: 0.3483, grad_norm: 4.3672 2023-02-12 03:08:04,005 - mmseg - INFO - Iter [156200/160000] lr: 1.425e-06, eta: 0:13:01, time: 0.240, data_time: 0.047, memory: 7748, decode.loss_ce: 0.1925, decode.acc_seg: 92.1363, aux.loss_ce: 0.1497, aux.acc_seg: 85.0309, loss: 0.3422, grad_norm: 3.5527 2023-02-12 03:08:14,314 - mmseg - INFO - Iter [156250/160000] lr: 1.407e-06, eta: 0:12:51, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1927, decode.acc_seg: 92.2760, aux.loss_ce: 0.1538, aux.acc_seg: 85.1772, loss: 0.3466, grad_norm: 3.6482 2023-02-12 03:08:24,485 - mmseg - INFO - Iter [156300/160000] lr: 1.388e-06, eta: 0:12:40, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1854, decode.acc_seg: 92.5235, aux.loss_ce: 0.1503, aux.acc_seg: 85.2708, loss: 0.3357, grad_norm: 3.9312 2023-02-12 03:08:35,006 - mmseg - INFO - Iter [156350/160000] lr: 1.369e-06, eta: 0:12:30, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1836, decode.acc_seg: 92.4930, aux.loss_ce: 0.1438, aux.acc_seg: 85.6750, loss: 0.3273, grad_norm: 3.5694 2023-02-12 03:08:44,618 - mmseg - INFO - Iter [156400/160000] lr: 1.350e-06, eta: 0:12:20, time: 0.192, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1693, decode.acc_seg: 92.9332, aux.loss_ce: 0.1373, aux.acc_seg: 86.0805, loss: 0.3066, grad_norm: 3.3174 2023-02-12 03:08:54,882 - mmseg - INFO - Iter [156450/160000] lr: 1.332e-06, eta: 0:12:09, time: 0.205, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1877, decode.acc_seg: 92.6207, aux.loss_ce: 0.1508, aux.acc_seg: 85.3332, loss: 0.3386, grad_norm: 3.7933 2023-02-12 03:09:04,592 - mmseg - INFO - Iter [156500/160000] lr: 1.313e-06, eta: 0:11:59, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1791, decode.acc_seg: 92.5768, aux.loss_ce: 0.1400, aux.acc_seg: 86.0538, loss: 0.3190, grad_norm: 3.8164 2023-02-12 03:09:14,516 - mmseg - INFO - Iter [156550/160000] lr: 1.294e-06, eta: 0:11:49, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1769, decode.acc_seg: 92.5920, aux.loss_ce: 0.1356, aux.acc_seg: 86.1828, loss: 0.3125, grad_norm: 3.3299 2023-02-12 03:09:24,242 - mmseg - INFO - Iter [156600/160000] lr: 1.275e-06, eta: 0:11:39, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1829, decode.acc_seg: 92.4609, aux.loss_ce: 0.1454, aux.acc_seg: 85.3681, loss: 0.3282, grad_norm: 4.3973 2023-02-12 03:09:34,097 - mmseg - INFO - Iter [156650/160000] lr: 1.257e-06, eta: 0:11:28, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1893, decode.acc_seg: 92.3097, aux.loss_ce: 0.1452, aux.acc_seg: 85.4638, loss: 0.3345, grad_norm: 3.7239 2023-02-12 03:09:43,976 - mmseg - INFO - Iter [156700/160000] lr: 1.238e-06, eta: 0:11:18, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1953, decode.acc_seg: 92.0605, aux.loss_ce: 0.1582, aux.acc_seg: 84.2566, loss: 0.3534, grad_norm: 5.3428 2023-02-12 03:09:54,431 - mmseg - INFO - Iter [156750/160000] lr: 1.219e-06, eta: 0:11:08, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1940, decode.acc_seg: 92.1550, aux.loss_ce: 0.1538, aux.acc_seg: 84.7053, loss: 0.3478, grad_norm: 3.4665 2023-02-12 03:10:04,546 - mmseg - INFO - Iter [156800/160000] lr: 1.200e-06, eta: 0:10:57, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1847, decode.acc_seg: 92.4243, aux.loss_ce: 0.1456, aux.acc_seg: 85.3546, loss: 0.3303, grad_norm: 3.1983 2023-02-12 03:10:14,759 - mmseg - INFO - Iter [156850/160000] lr: 1.182e-06, eta: 0:10:47, time: 0.204, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1758, decode.acc_seg: 92.9844, aux.loss_ce: 0.1360, aux.acc_seg: 86.8062, loss: 0.3118, grad_norm: 3.2888 2023-02-12 03:10:24,473 - mmseg - INFO - Iter [156900/160000] lr: 1.163e-06, eta: 0:10:37, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2012, decode.acc_seg: 91.7718, aux.loss_ce: 0.1559, aux.acc_seg: 84.6721, loss: 0.3571, grad_norm: 4.1116 2023-02-12 03:10:34,525 - mmseg - INFO - Iter [156950/160000] lr: 1.144e-06, eta: 0:10:27, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1878, decode.acc_seg: 92.3857, aux.loss_ce: 0.1493, aux.acc_seg: 85.2394, loss: 0.3371, grad_norm: 4.6566 2023-02-12 03:10:44,560 - mmseg - INFO - Saving checkpoint at 157000 iterations 2023-02-12 03:10:45,230 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 03:10:45,230 - mmseg - INFO - Iter [157000/160000] lr: 1.125e-06, eta: 0:10:16, time: 0.214, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1988, decode.acc_seg: 92.2514, aux.loss_ce: 0.1551, aux.acc_seg: 85.3897, loss: 0.3539, grad_norm: 4.6851 2023-02-12 03:10:55,789 - mmseg - INFO - Iter [157050/160000] lr: 1.107e-06, eta: 0:10:06, time: 0.211, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1813, decode.acc_seg: 92.6584, aux.loss_ce: 0.1423, aux.acc_seg: 85.9095, loss: 0.3236, grad_norm: 3.3029 2023-02-12 03:11:06,162 - mmseg - INFO - Iter [157100/160000] lr: 1.088e-06, eta: 0:09:56, time: 0.208, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1938, decode.acc_seg: 92.3643, aux.loss_ce: 0.1509, aux.acc_seg: 85.4796, loss: 0.3446, grad_norm: 4.5346 2023-02-12 03:11:15,840 - mmseg - INFO - Iter [157150/160000] lr: 1.069e-06, eta: 0:09:45, time: 0.193, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2124, decode.acc_seg: 91.7002, aux.loss_ce: 0.1595, aux.acc_seg: 84.5551, loss: 0.3718, grad_norm: 5.8473 2023-02-12 03:11:26,124 - mmseg - INFO - Iter [157200/160000] lr: 1.050e-06, eta: 0:09:35, time: 0.206, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1734, decode.acc_seg: 92.8497, aux.loss_ce: 0.1392, aux.acc_seg: 86.1412, loss: 0.3126, grad_norm: 3.9197 2023-02-12 03:11:35,833 - mmseg - INFO - Iter [157250/160000] lr: 1.032e-06, eta: 0:09:25, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1661, decode.acc_seg: 93.1713, aux.loss_ce: 0.1368, aux.acc_seg: 86.2422, loss: 0.3029, grad_norm: 3.5260 2023-02-12 03:11:45,783 - mmseg - INFO - Iter [157300/160000] lr: 1.013e-06, eta: 0:09:15, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1852, decode.acc_seg: 92.5082, aux.loss_ce: 0.1503, aux.acc_seg: 85.4233, loss: 0.3355, grad_norm: 3.8837 2023-02-12 03:11:55,514 - mmseg - INFO - Iter [157350/160000] lr: 9.941e-07, eta: 0:09:04, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1836, decode.acc_seg: 92.6328, aux.loss_ce: 0.1435, aux.acc_seg: 85.9233, loss: 0.3272, grad_norm: 3.7498 2023-02-12 03:12:05,310 - mmseg - INFO - Iter [157400/160000] lr: 9.754e-07, eta: 0:08:54, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1756, decode.acc_seg: 92.7937, aux.loss_ce: 0.1400, aux.acc_seg: 86.1608, loss: 0.3156, grad_norm: 3.6099 2023-02-12 03:12:15,459 - mmseg - INFO - Iter [157450/160000] lr: 9.566e-07, eta: 0:08:44, time: 0.203, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1922, decode.acc_seg: 92.2192, aux.loss_ce: 0.1497, aux.acc_seg: 85.2684, loss: 0.3419, grad_norm: 4.1050 2023-02-12 03:12:27,577 - mmseg - INFO - Iter [157500/160000] lr: 9.379e-07, eta: 0:08:33, time: 0.242, data_time: 0.046, memory: 7748, decode.loss_ce: 0.1732, decode.acc_seg: 92.7257, aux.loss_ce: 0.1395, aux.acc_seg: 85.7004, loss: 0.3127, grad_norm: 3.1524 2023-02-12 03:12:38,169 - mmseg - INFO - Iter [157550/160000] lr: 9.191e-07, eta: 0:08:23, time: 0.211, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1801, decode.acc_seg: 92.8514, aux.loss_ce: 0.1410, aux.acc_seg: 85.9410, loss: 0.3212, grad_norm: 3.7656 2023-02-12 03:12:48,174 - mmseg - INFO - Iter [157600/160000] lr: 9.004e-07, eta: 0:08:13, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1791, decode.acc_seg: 92.7060, aux.loss_ce: 0.1450, aux.acc_seg: 85.8005, loss: 0.3241, grad_norm: 3.8467 2023-02-12 03:12:57,849 - mmseg - INFO - Iter [157650/160000] lr: 8.816e-07, eta: 0:08:03, time: 0.193, data_time: 0.004, memory: 7748, decode.loss_ce: 0.2032, decode.acc_seg: 91.7828, aux.loss_ce: 0.1541, aux.acc_seg: 84.9571, loss: 0.3574, grad_norm: 3.8491 2023-02-12 03:13:07,605 - mmseg - INFO - Iter [157700/160000] lr: 8.629e-07, eta: 0:07:52, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1736, decode.acc_seg: 92.7597, aux.loss_ce: 0.1386, aux.acc_seg: 86.0362, loss: 0.3123, grad_norm: 3.1931 2023-02-12 03:13:18,007 - mmseg - INFO - Iter [157750/160000] lr: 8.441e-07, eta: 0:07:42, time: 0.208, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1894, decode.acc_seg: 92.3059, aux.loss_ce: 0.1442, aux.acc_seg: 85.7620, loss: 0.3336, grad_norm: 3.8998 2023-02-12 03:13:28,491 - mmseg - INFO - Iter [157800/160000] lr: 8.254e-07, eta: 0:07:32, time: 0.210, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1945, decode.acc_seg: 92.0654, aux.loss_ce: 0.1549, aux.acc_seg: 84.6585, loss: 0.3495, grad_norm: 4.3406 2023-02-12 03:13:38,571 - mmseg - INFO - Iter [157850/160000] lr: 8.066e-07, eta: 0:07:21, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1858, decode.acc_seg: 92.3672, aux.loss_ce: 0.1423, aux.acc_seg: 85.9389, loss: 0.3281, grad_norm: 4.1450 2023-02-12 03:13:48,700 - mmseg - INFO - Iter [157900/160000] lr: 7.879e-07, eta: 0:07:11, time: 0.203, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1950, decode.acc_seg: 92.0110, aux.loss_ce: 0.1511, aux.acc_seg: 85.2287, loss: 0.3461, grad_norm: 4.0716 2023-02-12 03:13:58,925 - mmseg - INFO - Iter [157950/160000] lr: 7.691e-07, eta: 0:07:01, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1937, decode.acc_seg: 92.3634, aux.loss_ce: 0.1522, aux.acc_seg: 85.1523, loss: 0.3459, grad_norm: 4.3973 2023-02-12 03:14:09,283 - mmseg - INFO - Saving checkpoint at 158000 iterations 2023-02-12 03:14:09,951 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 03:14:09,951 - mmseg - INFO - Iter [158000/160000] lr: 7.504e-07, eta: 0:06:51, time: 0.221, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1797, decode.acc_seg: 92.6649, aux.loss_ce: 0.1455, aux.acc_seg: 85.6783, loss: 0.3252, grad_norm: 3.9389 2023-02-12 03:14:20,141 - mmseg - INFO - Iter [158050/160000] lr: 7.316e-07, eta: 0:06:40, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1726, decode.acc_seg: 92.8895, aux.loss_ce: 0.1401, aux.acc_seg: 86.3238, loss: 0.3127, grad_norm: 3.5776 2023-02-12 03:14:29,937 - mmseg - INFO - Iter [158100/160000] lr: 7.129e-07, eta: 0:06:30, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1907, decode.acc_seg: 92.2747, aux.loss_ce: 0.1479, aux.acc_seg: 85.4539, loss: 0.3386, grad_norm: 3.3735 2023-02-12 03:14:40,135 - mmseg - INFO - Iter [158150/160000] lr: 6.941e-07, eta: 0:06:20, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1892, decode.acc_seg: 92.1743, aux.loss_ce: 0.1518, aux.acc_seg: 84.8315, loss: 0.3410, grad_norm: 5.1146 2023-02-12 03:14:50,159 - mmseg - INFO - Iter [158200/160000] lr: 6.754e-07, eta: 0:06:10, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1796, decode.acc_seg: 92.7754, aux.loss_ce: 0.1437, aux.acc_seg: 86.0574, loss: 0.3233, grad_norm: 3.6717 2023-02-12 03:15:00,674 - mmseg - INFO - Iter [158250/160000] lr: 6.566e-07, eta: 0:05:59, time: 0.210, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1894, decode.acc_seg: 92.5736, aux.loss_ce: 0.1519, aux.acc_seg: 85.3511, loss: 0.3413, grad_norm: 3.4918 2023-02-12 03:15:10,749 - mmseg - INFO - Iter [158300/160000] lr: 6.379e-07, eta: 0:05:49, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1765, decode.acc_seg: 92.7060, aux.loss_ce: 0.1406, aux.acc_seg: 85.5870, loss: 0.3171, grad_norm: 3.8267 2023-02-12 03:15:20,840 - mmseg - INFO - Iter [158350/160000] lr: 6.191e-07, eta: 0:05:39, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1827, decode.acc_seg: 92.5278, aux.loss_ce: 0.1447, aux.acc_seg: 85.7921, loss: 0.3274, grad_norm: 3.3964 2023-02-12 03:15:30,845 - mmseg - INFO - Iter [158400/160000] lr: 6.004e-07, eta: 0:05:28, time: 0.200, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1907, decode.acc_seg: 92.3590, aux.loss_ce: 0.1488, aux.acc_seg: 85.2607, loss: 0.3395, grad_norm: 5.5543 2023-02-12 03:15:41,178 - mmseg - INFO - Iter [158450/160000] lr: 5.816e-07, eta: 0:05:18, time: 0.207, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1980, decode.acc_seg: 92.0871, aux.loss_ce: 0.1547, aux.acc_seg: 84.8910, loss: 0.3528, grad_norm: 4.3068 2023-02-12 03:15:51,447 - mmseg - INFO - Iter [158500/160000] lr: 5.629e-07, eta: 0:05:08, time: 0.205, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1952, decode.acc_seg: 92.2380, aux.loss_ce: 0.1540, aux.acc_seg: 85.0762, loss: 0.3492, grad_norm: 3.7524 2023-02-12 03:16:01,495 - mmseg - INFO - Iter [158550/160000] lr: 5.441e-07, eta: 0:04:58, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1890, decode.acc_seg: 92.3899, aux.loss_ce: 0.1488, aux.acc_seg: 85.6876, loss: 0.3377, grad_norm: 3.1856 2023-02-12 03:16:12,233 - mmseg - INFO - Iter [158600/160000] lr: 5.254e-07, eta: 0:04:47, time: 0.215, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1860, decode.acc_seg: 92.6983, aux.loss_ce: 0.1491, aux.acc_seg: 85.6229, loss: 0.3351, grad_norm: 4.5372 2023-02-12 03:16:22,142 - mmseg - INFO - Iter [158650/160000] lr: 5.066e-07, eta: 0:04:37, time: 0.198, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1912, decode.acc_seg: 92.3170, aux.loss_ce: 0.1458, aux.acc_seg: 85.4488, loss: 0.3370, grad_norm: 3.7586 2023-02-12 03:16:32,612 - mmseg - INFO - Iter [158700/160000] lr: 4.879e-07, eta: 0:04:27, time: 0.209, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1897, decode.acc_seg: 92.7496, aux.loss_ce: 0.1459, aux.acc_seg: 86.0518, loss: 0.3355, grad_norm: 3.6155 2023-02-12 03:16:44,581 - mmseg - INFO - Iter [158750/160000] lr: 4.691e-07, eta: 0:04:16, time: 0.239, data_time: 0.046, memory: 7748, decode.loss_ce: 0.1816, decode.acc_seg: 92.6216, aux.loss_ce: 0.1452, aux.acc_seg: 85.6072, loss: 0.3269, grad_norm: 4.7544 2023-02-12 03:16:54,647 - mmseg - INFO - Iter [158800/160000] lr: 4.504e-07, eta: 0:04:06, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1776, decode.acc_seg: 92.9144, aux.loss_ce: 0.1451, aux.acc_seg: 85.7520, loss: 0.3227, grad_norm: 3.5521 2023-02-12 03:17:04,553 - mmseg - INFO - Iter [158850/160000] lr: 4.316e-07, eta: 0:03:56, time: 0.198, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1911, decode.acc_seg: 92.3787, aux.loss_ce: 0.1533, aux.acc_seg: 85.1411, loss: 0.3443, grad_norm: 3.6376 2023-02-12 03:17:14,708 - mmseg - INFO - Iter [158900/160000] lr: 4.129e-07, eta: 0:03:46, time: 0.204, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1954, decode.acc_seg: 91.8906, aux.loss_ce: 0.1553, aux.acc_seg: 84.5052, loss: 0.3508, grad_norm: 3.8188 2023-02-12 03:17:24,388 - mmseg - INFO - Iter [158950/160000] lr: 3.941e-07, eta: 0:03:35, time: 0.194, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1884, decode.acc_seg: 92.4359, aux.loss_ce: 0.1466, aux.acc_seg: 85.6592, loss: 0.3350, grad_norm: 3.7347 2023-02-12 03:17:34,371 - mmseg - INFO - Saving checkpoint at 159000 iterations 2023-02-12 03:17:35,052 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 03:17:35,052 - mmseg - INFO - Iter [159000/160000] lr: 3.754e-07, eta: 0:03:25, time: 0.213, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1849, decode.acc_seg: 92.5231, aux.loss_ce: 0.1464, aux.acc_seg: 85.8339, loss: 0.3314, grad_norm: 4.3549 2023-02-12 03:17:44,877 - mmseg - INFO - Iter [159050/160000] lr: 3.566e-07, eta: 0:03:15, time: 0.196, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1793, decode.acc_seg: 92.9111, aux.loss_ce: 0.1455, aux.acc_seg: 85.7389, loss: 0.3248, grad_norm: 3.1951 2023-02-12 03:17:55,355 - mmseg - INFO - Iter [159100/160000] lr: 3.379e-07, eta: 0:03:05, time: 0.209, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1932, decode.acc_seg: 92.1661, aux.loss_ce: 0.1537, aux.acc_seg: 84.8028, loss: 0.3469, grad_norm: 4.8724 2023-02-12 03:18:05,667 - mmseg - INFO - Iter [159150/160000] lr: 3.191e-07, eta: 0:02:54, time: 0.207, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1715, decode.acc_seg: 92.9814, aux.loss_ce: 0.1372, aux.acc_seg: 86.2527, loss: 0.3088, grad_norm: 3.3148 2023-02-12 03:18:15,431 - mmseg - INFO - Iter [159200/160000] lr: 3.004e-07, eta: 0:02:44, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1813, decode.acc_seg: 92.6170, aux.loss_ce: 0.1451, aux.acc_seg: 85.7295, loss: 0.3264, grad_norm: 3.5360 2023-02-12 03:18:25,493 - mmseg - INFO - Iter [159250/160000] lr: 2.816e-07, eta: 0:02:34, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1826, decode.acc_seg: 92.7236, aux.loss_ce: 0.1448, aux.acc_seg: 85.9381, loss: 0.3274, grad_norm: 3.5331 2023-02-12 03:18:35,323 - mmseg - INFO - Iter [159300/160000] lr: 2.629e-07, eta: 0:02:23, time: 0.197, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1944, decode.acc_seg: 92.4496, aux.loss_ce: 0.1531, aux.acc_seg: 85.3597, loss: 0.3475, grad_norm: 4.1155 2023-02-12 03:18:45,563 - mmseg - INFO - Iter [159350/160000] lr: 2.441e-07, eta: 0:02:13, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1773, decode.acc_seg: 92.7499, aux.loss_ce: 0.1387, aux.acc_seg: 86.1704, loss: 0.3160, grad_norm: 3.0558 2023-02-12 03:18:55,592 - mmseg - INFO - Iter [159400/160000] lr: 2.254e-07, eta: 0:02:03, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1762, decode.acc_seg: 92.7756, aux.loss_ce: 0.1437, aux.acc_seg: 85.7305, loss: 0.3199, grad_norm: 3.7460 2023-02-12 03:19:06,022 - mmseg - INFO - Iter [159450/160000] lr: 2.066e-07, eta: 0:01:53, time: 0.209, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1882, decode.acc_seg: 92.2799, aux.loss_ce: 0.1497, aux.acc_seg: 85.2160, loss: 0.3379, grad_norm: 3.6356 2023-02-12 03:19:15,763 - mmseg - INFO - Iter [159500/160000] lr: 1.879e-07, eta: 0:01:42, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1841, decode.acc_seg: 92.4199, aux.loss_ce: 0.1431, aux.acc_seg: 85.4377, loss: 0.3272, grad_norm: 4.5716 2023-02-12 03:19:26,365 - mmseg - INFO - Iter [159550/160000] lr: 1.691e-07, eta: 0:01:32, time: 0.212, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1888, decode.acc_seg: 92.3400, aux.loss_ce: 0.1460, aux.acc_seg: 85.6636, loss: 0.3347, grad_norm: 3.5215 2023-02-12 03:19:36,697 - mmseg - INFO - Iter [159600/160000] lr: 1.504e-07, eta: 0:01:22, time: 0.207, data_time: 0.005, memory: 7748, decode.loss_ce: 0.1826, decode.acc_seg: 92.6620, aux.loss_ce: 0.1477, aux.acc_seg: 85.4926, loss: 0.3303, grad_norm: 3.4541 2023-02-12 03:19:46,464 - mmseg - INFO - Iter [159650/160000] lr: 1.316e-07, eta: 0:01:11, time: 0.195, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1828, decode.acc_seg: 92.4731, aux.loss_ce: 0.1460, aux.acc_seg: 85.2969, loss: 0.3288, grad_norm: 3.6074 2023-02-12 03:19:56,525 - mmseg - INFO - Iter [159700/160000] lr: 1.129e-07, eta: 0:01:01, time: 0.201, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1896, decode.acc_seg: 92.6044, aux.loss_ce: 0.1535, aux.acc_seg: 85.4919, loss: 0.3431, grad_norm: 3.5290 2023-02-12 03:20:07,076 - mmseg - INFO - Iter [159750/160000] lr: 9.413e-08, eta: 0:00:51, time: 0.211, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1857, decode.acc_seg: 92.2586, aux.loss_ce: 0.1472, aux.acc_seg: 85.2649, loss: 0.3329, grad_norm: 3.6963 2023-02-12 03:20:17,209 - mmseg - INFO - Iter [159800/160000] lr: 7.537e-08, eta: 0:00:41, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1797, decode.acc_seg: 92.4057, aux.loss_ce: 0.1417, aux.acc_seg: 85.6904, loss: 0.3213, grad_norm: 4.0801 2023-02-12 03:20:27,291 - mmseg - INFO - Iter [159850/160000] lr: 5.663e-08, eta: 0:00:30, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1799, decode.acc_seg: 92.6295, aux.loss_ce: 0.1429, aux.acc_seg: 85.5540, loss: 0.3228, grad_norm: 3.6005 2023-02-12 03:20:37,379 - mmseg - INFO - Iter [159900/160000] lr: 3.787e-08, eta: 0:00:20, time: 0.202, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1828, decode.acc_seg: 92.6553, aux.loss_ce: 0.1477, aux.acc_seg: 85.6794, loss: 0.3304, grad_norm: 3.3373 2023-02-12 03:20:47,557 - mmseg - INFO - Iter [159950/160000] lr: 1.913e-08, eta: 0:00:10, time: 0.204, data_time: 0.004, memory: 7748, decode.loss_ce: 0.1812, decode.acc_seg: 92.5034, aux.loss_ce: 0.1405, aux.acc_seg: 85.8714, loss: 0.3217, grad_norm: 3.5156 2023-02-12 03:20:59,701 - mmseg - INFO - Saving checkpoint at 160000 iterations 2023-02-12 03:21:00,403 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 03:21:00,403 - mmseg - INFO - Iter [160000/160000] lr: 3.750e-10, eta: 0:00:00, time: 0.257, data_time: 0.048, memory: 7748, decode.loss_ce: 0.1959, decode.acc_seg: 92.1861, aux.loss_ce: 0.1501, aux.acc_seg: 85.2591, loss: 0.3461, grad_norm: 4.3479 2023-02-12 03:21:11,787 - mmseg - INFO - per class results: 2023-02-12 03:21:11,793 - mmseg - INFO - +---------------------+-------+-------+ | Class | IoU | Acc | +---------------------+-------+-------+ | wall | 75.31 | 85.4 | | building | 81.21 | 91.12 | | sky | 93.92 | 97.54 | | floor | 79.18 | 89.79 | | tree | 73.28 | 85.42 | | ceiling | 82.6 | 92.08 | | road | 82.57 | 89.73 | | bed | 87.66 | 94.28 | | windowpane | 60.44 | 77.6 | | grass | 68.35 | 82.76 | | cabinet | 59.79 | 73.27 | | sidewalk | 63.31 | 79.43 | | person | 77.84 | 92.21 | | earth | 36.67 | 48.22 | | door | 45.13 | 62.93 | | table | 55.12 | 68.17 | | mountain | 55.38 | 74.33 | | plant | 51.21 | 67.26 | | curtain | 71.01 | 86.01 | | chair | 53.89 | 68.38 | | car | 83.23 | 91.01 | | water | 51.68 | 67.6 | | painting | 66.46 | 85.54 | | sofa | 63.56 | 81.42 | | shelf | 40.96 | 59.42 | | house | 44.28 | 54.53 | | sea | 60.61 | 88.98 | | mirror | 60.67 | 69.61 | | rug | 58.79 | 69.97 | | field | 30.59 | 44.96 | | armchair | 39.02 | 55.75 | | seat | 54.23 | 75.12 | | fence | 30.84 | 44.4 | | desk | 43.87 | 63.37 | | rock | 28.63 | 47.98 | | wardrobe | 51.4 | 68.46 | | lamp | 60.29 | 74.37 | | bathtub | 74.69 | 78.3 | | railing | 30.64 | 44.84 | | cushion | 54.54 | 67.4 | | base | 33.59 | 46.37 | | box | 17.03 | 19.17 | | column | 40.02 | 58.07 | | signboard | 35.09 | 51.68 | | chest of drawers | 39.04 | 63.93 | | counter | 22.71 | 35.26 | | sand | 42.78 | 53.09 | | sink | 69.45 | 80.85 | | skyscraper | 57.42 | 77.06 | | fireplace | 69.91 | 83.59 | | refrigerator | 73.53 | 84.71 | | grandstand | 35.15 | 62.06 | | path | 20.19 | 31.75 | | stairs | 31.71 | 39.41 | | runway | 65.28 | 84.49 | | case | 39.89 | 55.66 | | pool table | 91.06 | 94.28 | | pillow | 54.1 | 68.4 | | screen door | 58.45 | 65.36 | | stairway | 29.16 | 39.5 | | river | 12.51 | 22.19 | | bridge | 59.84 | 78.54 | | bookcase | 34.37 | 54.97 | | blind | 47.49 | 60.32 | | coffee table | 46.4 | 84.18 | | toilet | 85.11 | 90.93 | | flower | 40.24 | 54.56 | | book | 45.64 | 66.82 | | hill | 5.16 | 8.8 | | bench | 46.28 | 54.26 | | countertop | 56.81 | 78.05 | | stove | 71.55 | 79.27 | | palm | 45.53 | 79.62 | | kitchen island | 35.55 | 79.29 | | computer | 65.68 | 80.19 | | swivel chair | 44.96 | 56.91 | | boat | 39.96 | 49.24 | | bar | 24.01 | 32.41 | | arcade machine | 62.06 | 68.01 | | hovel | 30.57 | 40.81 | | bus | 84.05 | 95.46 | | towel | 63.38 | 77.02 | | light | 53.64 | 60.99 | | truck | 36.97 | 49.58 | | tower | 25.28 | 38.41 | | chandelier | 65.04 | 85.54 | | awning | 23.0 | 36.06 | | streetlight | 25.51 | 35.35 | | booth | 44.08 | 44.96 | | television receiver | 67.46 | 82.97 | | airplane | 57.3 | 66.06 | | dirt track | 19.95 | 39.35 | | apparel | 33.9 | 59.01 | | pole | 18.31 | 24.69 | | land | 3.92 | 5.31 | | bannister | 12.98 | 18.38 | | escalator | 30.2 | 41.52 | | ottoman | 42.79 | 59.35 | | bottle | 34.32 | 52.8 | | buffet | 38.85 | 47.51 | | poster | 23.71 | 30.99 | | stage | 18.17 | 37.33 | | van | 48.83 | 66.84 | | ship | 40.95 | 60.35 | | fountain | 23.75 | 26.71 | | conveyer belt | 64.84 | 85.36 | | canopy | 14.6 | 19.77 | | washer | 64.68 | 71.66 | | plaything | 26.45 | 40.34 | | swimming pool | 62.86 | 70.33 | | stool | 34.74 | 49.84 | | barrel | 23.32 | 64.98 | | basket | 27.06 | 39.31 | | waterfall | 43.96 | 55.35 | | tent | 80.42 | 98.23 | | bag | 14.38 | 22.5 | | minibike | 65.91 | 77.88 | | cradle | 81.28 | 91.08 | | oven | 25.49 | 61.7 | | ball | 45.4 | 64.55 | | food | 41.84 | 50.06 | | step | 13.51 | 19.91 | | tank | 29.92 | 30.35 | | trade name | 28.18 | 37.74 | | microwave | 42.2 | 45.4 | | pot | 38.94 | 48.83 | | animal | 54.81 | 59.15 | | bicycle | 54.36 | 77.51 | | lake | 57.65 | 63.92 | | dishwasher | 61.59 | 78.22 | | screen | 44.65 | 69.38 | | blanket | 12.32 | 14.89 | | sculpture | 48.9 | 69.95 | | hood | 67.69 | 75.54 | | sconce | 41.46 | 53.6 | | vase | 30.46 | 53.63 | | traffic light | 32.25 | 51.37 | | tray | 5.07 | 9.77 | | ashcan | 34.7 | 50.77 | | fan | 55.89 | 68.35 | | pier | 60.41 | 79.27 | | crt screen | 3.47 | 11.74 | | plate | 50.22 | 69.63 | | monitor | 7.96 | 10.96 | | bulletin board | 49.02 | 60.3 | | shower | 0.0 | 0.0 | | radiator | 47.37 | 53.17 | | glass | 9.99 | 12.59 | | clock | 24.67 | 32.26 | | flag | 55.52 | 64.39 | +---------------------+-------+-------+ 2023-02-12 03:21:11,793 - mmseg - INFO - Summary: 2023-02-12 03:21:11,794 - mmseg - INFO - +-------+-------+-------+ | aAcc | mIoU | mAcc | +-------+-------+-------+ | 81.57 | 46.17 | 59.54 | +-------+-------+-------+ 2023-02-12 03:21:12,463 - mmseg - INFO - Now best checkpoint is saved as best_mIoU_iter_160000.pth. 2023-02-12 03:21:12,463 - mmseg - INFO - Best mIoU is 0.4617 at 160000 iter. 2023-02-12 03:21:12,464 - mmseg - INFO - Exp name: diffseg_swin_t_2x8_512x512_160k_ade20k_v20.py 2023-02-12 03:21:12,464 - mmseg - INFO - Iter(val) [250] aAcc: 0.8157, mIoU: 0.4617, mAcc: 0.5954, IoU.wall: 0.7531, IoU.building: 0.8121, IoU.sky: 0.9392, IoU.floor: 0.7918, IoU.tree: 0.7328, IoU.ceiling: 0.8260, IoU.road: 0.8257, IoU.bed : 0.8766, IoU.windowpane: 0.6044, IoU.grass: 0.6835, IoU.cabinet: 0.5979, IoU.sidewalk: 0.6331, IoU.person: 0.7784, IoU.earth: 0.3667, IoU.door: 0.4513, IoU.table: 0.5512, IoU.mountain: 0.5538, IoU.plant: 0.5121, IoU.curtain: 0.7101, IoU.chair: 0.5389, IoU.car: 0.8323, IoU.water: 0.5168, IoU.painting: 0.6646, IoU.sofa: 0.6356, IoU.shelf: 0.4096, IoU.house: 0.4428, IoU.sea: 0.6061, IoU.mirror: 0.6067, IoU.rug: 0.5879, IoU.field: 0.3059, IoU.armchair: 0.3902, IoU.seat: 0.5423, IoU.fence: 0.3084, IoU.desk: 0.4387, IoU.rock: 0.2863, IoU.wardrobe: 0.5140, IoU.lamp: 0.6029, IoU.bathtub: 0.7469, IoU.railing: 0.3064, IoU.cushion: 0.5454, IoU.base: 0.3359, IoU.box: 0.1703, IoU.column: 0.4002, IoU.signboard: 0.3509, IoU.chest of drawers: 0.3904, IoU.counter: 0.2271, IoU.sand: 0.4278, IoU.sink: 0.6945, IoU.skyscraper: 0.5742, IoU.fireplace: 0.6991, IoU.refrigerator: 0.7353, IoU.grandstand: 0.3515, IoU.path: 0.2019, IoU.stairs: 0.3171, IoU.runway: 0.6528, IoU.case: 0.3989, IoU.pool table: 0.9106, IoU.pillow: 0.5410, IoU.screen door: 0.5845, IoU.stairway: 0.2916, IoU.river: 0.1251, IoU.bridge: 0.5984, IoU.bookcase: 0.3437, IoU.blind: 0.4749, IoU.coffee table: 0.4640, IoU.toilet: 0.8511, IoU.flower: 0.4024, IoU.book: 0.4564, IoU.hill: 0.0516, IoU.bench: 0.4628, IoU.countertop: 0.5681, IoU.stove: 0.7155, IoU.palm: 0.4553, IoU.kitchen island: 0.3555, IoU.computer: 0.6568, IoU.swivel chair: 0.4496, IoU.boat: 0.3996, IoU.bar: 0.2401, IoU.arcade machine: 0.6206, IoU.hovel: 0.3057, IoU.bus: 0.8405, IoU.towel: 0.6338, IoU.light: 0.5364, IoU.truck: 0.3697, IoU.tower: 0.2528, IoU.chandelier: 0.6504, IoU.awning: 0.2300, IoU.streetlight: 0.2551, IoU.booth: 0.4408, IoU.television receiver: 0.6746, IoU.airplane: 0.5730, IoU.dirt track: 0.1995, IoU.apparel: 0.3390, IoU.pole: 0.1831, IoU.land: 0.0392, IoU.bannister: 0.1298, IoU.escalator: 0.3020, IoU.ottoman: 0.4279, IoU.bottle: 0.3432, IoU.buffet: 0.3885, IoU.poster: 0.2371, IoU.stage: 0.1817, IoU.van: 0.4883, IoU.ship: 0.4095, IoU.fountain: 0.2375, IoU.conveyer belt: 0.6484, IoU.canopy: 0.1460, IoU.washer: 0.6468, IoU.plaything: 0.2645, IoU.swimming pool: 0.6286, IoU.stool: 0.3474, IoU.barrel: 0.2332, IoU.basket: 0.2706, IoU.waterfall: 0.4396, IoU.tent: 0.8042, IoU.bag: 0.1438, IoU.minibike: 0.6591, IoU.cradle: 0.8128, IoU.oven: 0.2549, IoU.ball: 0.4540, IoU.food: 0.4184, IoU.step: 0.1351, IoU.tank: 0.2992, IoU.trade name: 0.2818, IoU.microwave: 0.4220, IoU.pot: 0.3894, IoU.animal: 0.5481, IoU.bicycle: 0.5436, IoU.lake: 0.5765, IoU.dishwasher: 0.6159, IoU.screen: 0.4465, IoU.blanket: 0.1232, IoU.sculpture: 0.4890, IoU.hood: 0.6769, IoU.sconce: 0.4146, IoU.vase: 0.3046, IoU.traffic light: 0.3225, IoU.tray: 0.0507, IoU.ashcan: 0.3470, IoU.fan: 0.5589, IoU.pier: 0.6041, IoU.crt screen: 0.0347, IoU.plate: 0.5022, IoU.monitor: 0.0796, IoU.bulletin board: 0.4902, IoU.shower: 0.0000, IoU.radiator: 0.4737, IoU.glass: 0.0999, IoU.clock: 0.2467, IoU.flag: 0.5552, Acc.wall: 0.8540, Acc.building: 0.9112, Acc.sky: 0.9754, Acc.floor: 0.8979, Acc.tree: 0.8542, Acc.ceiling: 0.9208, Acc.road: 0.8973, Acc.bed : 0.9428, Acc.windowpane: 0.7760, Acc.grass: 0.8276, Acc.cabinet: 0.7327, Acc.sidewalk: 0.7943, Acc.person: 0.9221, Acc.earth: 0.4822, Acc.door: 0.6293, Acc.table: 0.6817, Acc.mountain: 0.7433, Acc.plant: 0.6726, Acc.curtain: 0.8601, Acc.chair: 0.6838, Acc.car: 0.9101, Acc.water: 0.6760, Acc.painting: 0.8554, Acc.sofa: 0.8142, Acc.shelf: 0.5942, Acc.house: 0.5453, Acc.sea: 0.8898, Acc.mirror: 0.6961, Acc.rug: 0.6997, Acc.field: 0.4496, Acc.armchair: 0.5575, Acc.seat: 0.7512, Acc.fence: 0.4440, Acc.desk: 0.6337, Acc.rock: 0.4798, Acc.wardrobe: 0.6846, Acc.lamp: 0.7437, Acc.bathtub: 0.7830, Acc.railing: 0.4484, Acc.cushion: 0.6740, Acc.base: 0.4637, Acc.box: 0.1917, Acc.column: 0.5807, Acc.signboard: 0.5168, Acc.chest of drawers: 0.6393, Acc.counter: 0.3526, Acc.sand: 0.5309, Acc.sink: 0.8085, Acc.skyscraper: 0.7706, Acc.fireplace: 0.8359, Acc.refrigerator: 0.8471, Acc.grandstand: 0.6206, Acc.path: 0.3175, Acc.stairs: 0.3941, Acc.runway: 0.8449, Acc.case: 0.5566, Acc.pool table: 0.9428, Acc.pillow: 0.6840, Acc.screen door: 0.6536, Acc.stairway: 0.3950, Acc.river: 0.2219, Acc.bridge: 0.7854, Acc.bookcase: 0.5497, Acc.blind: 0.6032, Acc.coffee table: 0.8418, Acc.toilet: 0.9093, Acc.flower: 0.5456, Acc.book: 0.6682, Acc.hill: 0.0880, Acc.bench: 0.5426, Acc.countertop: 0.7805, Acc.stove: 0.7927, Acc.palm: 0.7962, Acc.kitchen island: 0.7929, Acc.computer: 0.8019, Acc.swivel chair: 0.5691, Acc.boat: 0.4924, Acc.bar: 0.3241, Acc.arcade machine: 0.6801, Acc.hovel: 0.4081, Acc.bus: 0.9546, Acc.towel: 0.7702, Acc.light: 0.6099, Acc.truck: 0.4958, Acc.tower: 0.3841, Acc.chandelier: 0.8554, Acc.awning: 0.3606, Acc.streetlight: 0.3535, Acc.booth: 0.4496, Acc.television receiver: 0.8297, Acc.airplane: 0.6606, Acc.dirt track: 0.3935, Acc.apparel: 0.5901, Acc.pole: 0.2469, Acc.land: 0.0531, Acc.bannister: 0.1838, Acc.escalator: 0.4152, Acc.ottoman: 0.5935, Acc.bottle: 0.5280, Acc.buffet: 0.4751, Acc.poster: 0.3099, Acc.stage: 0.3733, Acc.van: 0.6684, Acc.ship: 0.6035, Acc.fountain: 0.2671, Acc.conveyer belt: 0.8536, Acc.canopy: 0.1977, Acc.washer: 0.7166, Acc.plaything: 0.4034, Acc.swimming pool: 0.7033, Acc.stool: 0.4984, Acc.barrel: 0.6498, Acc.basket: 0.3931, Acc.waterfall: 0.5535, Acc.tent: 0.9823, Acc.bag: 0.2250, Acc.minibike: 0.7788, Acc.cradle: 0.9108, Acc.oven: 0.6170, Acc.ball: 0.6455, Acc.food: 0.5006, Acc.step: 0.1991, Acc.tank: 0.3035, Acc.trade name: 0.3774, Acc.microwave: 0.4540, Acc.pot: 0.4883, Acc.animal: 0.5915, Acc.bicycle: 0.7751, Acc.lake: 0.6392, Acc.dishwasher: 0.7822, Acc.screen: 0.6938, Acc.blanket: 0.1489, Acc.sculpture: 0.6995, Acc.hood: 0.7554, Acc.sconce: 0.5360, Acc.vase: 0.5363, Acc.traffic light: 0.5137, Acc.tray: 0.0977, Acc.ashcan: 0.5077, Acc.fan: 0.6835, Acc.pier: 0.7927, Acc.crt screen: 0.1174, Acc.plate: 0.6963, Acc.monitor: 0.1096, Acc.bulletin board: 0.6030, Acc.shower: 0.0000, Acc.radiator: 0.5317, Acc.glass: 0.1259, Acc.clock: 0.3226, Acc.flag: 0.6439