File size: 1,086 Bytes
e5d4212 0d32764 e5d4212 80ed565 e5d4212 80ed565 e5d4212 80ed565 50ca96e e5d4212 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 |
{
"device": "$torch.device('cuda:' + os.environ['LOCAL_RANK'])",
"network": {
"_target_": "torch.nn.parallel.DistributedDataParallel",
"module": "$@network_def.to(@device)",
"device_ids": [
"@device"
]
},
"validate#sampler": {
"_target_": "DistributedSampler",
"dataset": "@validate#dataset",
"even_divisible": false,
"shuffle": false
},
"validate#dataloader#sampler": "@validate#sampler",
"validate#handlers#1#_disabled_": "$dist.get_rank() > 0",
"initialize": [
"$import torch.distributed as dist",
"$dist.is_initialized() or dist.init_process_group(backend='nccl')",
"$torch.cuda.set_device(@device)",
"$setattr(torch.backends.cudnn, 'benchmark', True)",
"$import logging",
"$@validate#evaluator.logger.setLevel(logging.WARNING if dist.get_rank() > 0 else logging.INFO)"
],
"run": [
"$@validate#evaluator.run()"
],
"finalize": [
"$dist.is_initialized() and dist.destroy_process_group()"
]
}
|