enable deterministic training
Browse files- README.md +1 -1
- configs/metadata.json +3 -2
- configs/multi_gpu_evaluate.json +0 -1
- configs/multi_gpu_train.json +0 -1
- configs/train.json +1 -2
- docs/README.md +1 -1
README.md
CHANGED
@@ -112,7 +112,7 @@ python -m monai.bundle run --config_file configs/inference.json
|
|
112 |
python -m monai.bundle trt_export --net_id network_def --filepath models/model_trt.ts --ckpt_file models/model.pt --meta_file configs/metadata.json --config_file configs/inference.json --precision <fp32/fp16> --dynamic_batchsize "[1, 4, 8]"
|
113 |
```
|
114 |
|
115 |
-
#### Execute inference with the TensorRT model
|
116 |
|
117 |
```
|
118 |
python -m monai.bundle run --config_file "['configs/inference.json', 'configs/inference_trt.json']"
|
|
|
112 |
python -m monai.bundle trt_export --net_id network_def --filepath models/model_trt.ts --ckpt_file models/model.pt --meta_file configs/metadata.json --config_file configs/inference.json --precision <fp32/fp16> --dynamic_batchsize "[1, 4, 8]"
|
113 |
```
|
114 |
|
115 |
+
#### Execute inference with the TensorRT model:
|
116 |
|
117 |
```
|
118 |
python -m monai.bundle run --config_file "['configs/inference.json', 'configs/inference_trt.json']"
|
configs/metadata.json
CHANGED
@@ -1,7 +1,8 @@
|
|
1 |
{
|
2 |
"schema": "https://github.com/Project-MONAI/MONAI-extra-test-data/releases/download/0.8.1/meta_schema_20220324.json",
|
3 |
-
"version": "0.4.
|
4 |
"changelog": {
|
|
|
5 |
"0.4.4": "add the command of executing inference with TensorRT models",
|
6 |
"0.4.3": "fix figure and weights inconsistent error",
|
7 |
"0.4.2": "use torch 1.13.1",
|
@@ -22,7 +23,7 @@
|
|
22 |
"0.1.0": "complete the model package",
|
23 |
"0.0.1": "initialize the model package structure"
|
24 |
},
|
25 |
-
"monai_version": "1.2.
|
26 |
"pytorch_version": "1.13.1",
|
27 |
"numpy_version": "1.22.2",
|
28 |
"optional_packages_version": {
|
|
|
1 |
{
|
2 |
"schema": "https://github.com/Project-MONAI/MONAI-extra-test-data/releases/download/0.8.1/meta_schema_20220324.json",
|
3 |
+
"version": "0.4.5",
|
4 |
"changelog": {
|
5 |
+
"0.4.5": "enable deterministic training",
|
6 |
"0.4.4": "add the command of executing inference with TensorRT models",
|
7 |
"0.4.3": "fix figure and weights inconsistent error",
|
8 |
"0.4.2": "use torch 1.13.1",
|
|
|
23 |
"0.1.0": "complete the model package",
|
24 |
"0.0.1": "initialize the model package structure"
|
25 |
},
|
26 |
+
"monai_version": "1.2.0rc4",
|
27 |
"pytorch_version": "1.13.1",
|
28 |
"numpy_version": "1.22.2",
|
29 |
"optional_packages_version": {
|
configs/multi_gpu_evaluate.json
CHANGED
@@ -19,7 +19,6 @@
|
|
19 |
"$import torch.distributed as dist",
|
20 |
"$dist.is_initialized() or dist.init_process_group(backend='nccl')",
|
21 |
"$torch.cuda.set_device(@device)",
|
22 |
-
"$setattr(torch.backends.cudnn, 'benchmark', True)",
|
23 |
"$import logging",
|
24 |
"$@validate#evaluator.logger.setLevel(logging.WARNING if dist.get_rank() > 0 else logging.INFO)"
|
25 |
],
|
|
|
19 |
"$import torch.distributed as dist",
|
20 |
"$dist.is_initialized() or dist.init_process_group(backend='nccl')",
|
21 |
"$torch.cuda.set_device(@device)",
|
|
|
22 |
"$import logging",
|
23 |
"$@validate#evaluator.logger.setLevel(logging.WARNING if dist.get_rank() > 0 else logging.INFO)"
|
24 |
],
|
configs/multi_gpu_train.json
CHANGED
@@ -29,7 +29,6 @@
|
|
29 |
"$dist.is_initialized() or dist.init_process_group(backend='nccl')",
|
30 |
"$torch.cuda.set_device(@device)",
|
31 |
"$monai.utils.set_determinism(seed=123)",
|
32 |
-
"$setattr(torch.backends.cudnn, 'benchmark', True)",
|
33 |
"$import logging",
|
34 |
"$@train#trainer.logger.setLevel(logging.WARNING if dist.get_rank() > 0 else logging.INFO)",
|
35 |
"$@validate#evaluator.logger.setLevel(logging.WARNING if dist.get_rank() > 0 else logging.INFO)"
|
|
|
29 |
"$dist.is_initialized() or dist.init_process_group(backend='nccl')",
|
30 |
"$torch.cuda.set_device(@device)",
|
31 |
"$monai.utils.set_determinism(seed=123)",
|
|
|
32 |
"$import logging",
|
33 |
"$@train#trainer.logger.setLevel(logging.WARNING if dist.get_rank() > 0 else logging.INFO)",
|
34 |
"$@validate#evaluator.logger.setLevel(logging.WARNING if dist.get_rank() > 0 else logging.INFO)"
|
configs/train.json
CHANGED
@@ -299,8 +299,7 @@
|
|
299 |
}
|
300 |
},
|
301 |
"initialize": [
|
302 |
-
"$monai.utils.set_determinism(seed=123)"
|
303 |
-
"$setattr(torch.backends.cudnn, 'benchmark', True)"
|
304 |
],
|
305 |
"run": [
|
306 |
"$@train#trainer.run()"
|
|
|
299 |
}
|
300 |
},
|
301 |
"initialize": [
|
302 |
+
"$monai.utils.set_determinism(seed=123)"
|
|
|
303 |
],
|
304 |
"run": [
|
305 |
"$@train#trainer.run()"
|
docs/README.md
CHANGED
@@ -105,7 +105,7 @@ python -m monai.bundle run --config_file configs/inference.json
|
|
105 |
python -m monai.bundle trt_export --net_id network_def --filepath models/model_trt.ts --ckpt_file models/model.pt --meta_file configs/metadata.json --config_file configs/inference.json --precision <fp32/fp16> --dynamic_batchsize "[1, 4, 8]"
|
106 |
```
|
107 |
|
108 |
-
#### Execute inference with the TensorRT model
|
109 |
|
110 |
```
|
111 |
python -m monai.bundle run --config_file "['configs/inference.json', 'configs/inference_trt.json']"
|
|
|
105 |
python -m monai.bundle trt_export --net_id network_def --filepath models/model_trt.ts --ckpt_file models/model.pt --meta_file configs/metadata.json --config_file configs/inference.json --precision <fp32/fp16> --dynamic_batchsize "[1, 4, 8]"
|
106 |
```
|
107 |
|
108 |
+
#### Execute inference with the TensorRT model:
|
109 |
|
110 |
```
|
111 |
python -m monai.bundle run --config_file "['configs/inference.json', 'configs/inference_trt.json']"
|