update to use rc1
Browse files- configs/metadata.json +3 -2
- configs/train.yaml +4 -10
configs/metadata.json
CHANGED
@@ -1,10 +1,11 @@
|
|
1 |
{
|
2 |
"schema": "https://github.com/Project-MONAI/MONAI-extra-test-data/releases/download/0.8.1/meta_schema_20220324.json",
|
3 |
-
"version": "0.0.
|
4 |
"changelog": {
|
|
|
5 |
"0.0.1": "Initial version"
|
6 |
},
|
7 |
-
"monai_version": "1.
|
8 |
"pytorch_version": "1.13.0",
|
9 |
"numpy_version": "1.22.2",
|
10 |
"optional_packages_version": {
|
|
|
1 |
{
|
2 |
"schema": "https://github.com/Project-MONAI/MONAI-extra-test-data/releases/download/0.8.1/meta_schema_20220324.json",
|
3 |
+
"version": "0.0.2",
|
4 |
"changelog": {
|
5 |
+
"0.0.2": "update to use rc1",
|
6 |
"0.0.1": "Initial version"
|
7 |
},
|
8 |
+
"monai_version": "1.1.0rc1",
|
9 |
"pytorch_version": "1.13.0",
|
10 |
"numpy_version": "1.22.2",
|
11 |
"optional_packages_version": {
|
configs/train.yaml
CHANGED
@@ -60,7 +60,7 @@ cache_datasets:
|
|
60 |
data: "@datalist"
|
61 |
transform: $@preprocessing.set_random_state(123)
|
62 |
hash_as_key: true
|
63 |
-
runtime_cache:
|
64 |
epochs: "@max_epochs"
|
65 |
seed: "$int(3) if @cross_subjects else int(2)"
|
66 |
- _target_: ShuffleBuffer
|
@@ -69,7 +69,7 @@ cache_datasets:
|
|
69 |
data: "@datalist"
|
70 |
transform: $@preprocessing.set_random_state(234)
|
71 |
hash_as_key: true
|
72 |
-
runtime_cache:
|
73 |
epochs: "@max_epochs"
|
74 |
seed: 2
|
75 |
|
@@ -78,9 +78,6 @@ zip_dataset:
|
|
78 |
data: "$map(lambda t: dict(image=monai.transforms.concatenate(t), label=t[1]), zip(*@cache_datasets))"
|
79 |
|
80 |
data_loader:
|
81 |
-
_requires_:
|
82 |
-
- $@cache_datasets[0].data.disable_share_memory_cache() # to cache on GPU
|
83 |
-
- $@cache_datasets[1].data.disable_share_memory_cache()
|
84 |
_target_: ThreadDataLoader
|
85 |
dataset: "@zip_dataset"
|
86 |
batch_size: 64
|
@@ -122,7 +119,7 @@ val:
|
|
122 |
data: "@val_datalist"
|
123 |
transform: $@preprocessing.set_random_state(123)
|
124 |
hash_as_key: true
|
125 |
-
runtime_cache:
|
126 |
epochs: -1 # infinite
|
127 |
seed: "$int(3) if @cross_subjects else int(2)"
|
128 |
- _target_: ShuffleBuffer
|
@@ -131,7 +128,7 @@ val:
|
|
131 |
data: "@val_datalist"
|
132 |
transform: $@preprocessing.set_random_state(234)
|
133 |
hash_as_key: true
|
134 |
-
runtime_cache:
|
135 |
epochs: -1 # infinite
|
136 |
seed: 2
|
137 |
|
@@ -140,9 +137,6 @@ val:
|
|
140 |
data: "$map(lambda t: dict(image=monai.transforms.concatenate(t), label=t[1]), zip(*@val#cache_datasets))"
|
141 |
|
142 |
data_loader:
|
143 |
-
_requires_:
|
144 |
-
- $@val#cache_datasets[0].data.disable_share_memory_cache()
|
145 |
-
- $@val#cache_datasets[1].data.disable_share_memory_cache()
|
146 |
_target_: ThreadDataLoader
|
147 |
dataset: "@val#zip_dataset"
|
148 |
batch_size: 64
|
|
|
60 |
data: "@datalist"
|
61 |
transform: $@preprocessing.set_random_state(123)
|
62 |
hash_as_key: true
|
63 |
+
runtime_cache: threads
|
64 |
epochs: "@max_epochs"
|
65 |
seed: "$int(3) if @cross_subjects else int(2)"
|
66 |
- _target_: ShuffleBuffer
|
|
|
69 |
data: "@datalist"
|
70 |
transform: $@preprocessing.set_random_state(234)
|
71 |
hash_as_key: true
|
72 |
+
runtime_cache: threads
|
73 |
epochs: "@max_epochs"
|
74 |
seed: 2
|
75 |
|
|
|
78 |
data: "$map(lambda t: dict(image=monai.transforms.concatenate(t), label=t[1]), zip(*@cache_datasets))"
|
79 |
|
80 |
data_loader:
|
|
|
|
|
|
|
81 |
_target_: ThreadDataLoader
|
82 |
dataset: "@zip_dataset"
|
83 |
batch_size: 64
|
|
|
119 |
data: "@val_datalist"
|
120 |
transform: $@preprocessing.set_random_state(123)
|
121 |
hash_as_key: true
|
122 |
+
runtime_cache: threads
|
123 |
epochs: -1 # infinite
|
124 |
seed: "$int(3) if @cross_subjects else int(2)"
|
125 |
- _target_: ShuffleBuffer
|
|
|
128 |
data: "@val_datalist"
|
129 |
transform: $@preprocessing.set_random_state(234)
|
130 |
hash_as_key: true
|
131 |
+
runtime_cache: threads
|
132 |
epochs: -1 # infinite
|
133 |
seed: 2
|
134 |
|
|
|
137 |
data: "$map(lambda t: dict(image=monai.transforms.concatenate(t), label=t[1]), zip(*@val#cache_datasets))"
|
138 |
|
139 |
data_loader:
|
|
|
|
|
|
|
140 |
_target_: ThreadDataLoader
|
141 |
dataset: "@val#zip_dataset"
|
142 |
batch_size: 64
|