project-monai commited on
Commit
a0ae4d2
·
verified ·
1 Parent(s): e77416f

Upload pediatric_abdominal_ct_segmentation version 0.4.5

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ models/A100/dynunet_FT_trt_16.ts filter=lfs diff=lfs merge=lfs -text
LICENSE ADDED
@@ -0,0 +1,201 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Apache License
2
+ Version 2.0, January 2004
3
+ http://www.apache.org/licenses/
4
+
5
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6
+
7
+ 1. Definitions.
8
+
9
+ "License" shall mean the terms and conditions for use, reproduction,
10
+ and distribution as defined by Sections 1 through 9 of this document.
11
+
12
+ "Licensor" shall mean the copyright owner or entity authorized by
13
+ the copyright owner that is granting the License.
14
+
15
+ "Legal Entity" shall mean the union of the acting entity and all
16
+ other entities that control, are controlled by, or are under common
17
+ control with that entity. For the purposes of this definition,
18
+ "control" means (i) the power, direct or indirect, to cause the
19
+ direction or management of such entity, whether by contract or
20
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
21
+ outstanding shares, or (iii) beneficial ownership of such entity.
22
+
23
+ "You" (or "Your") shall mean an individual or Legal Entity
24
+ exercising permissions granted by this License.
25
+
26
+ "Source" form shall mean the preferred form for making modifications,
27
+ including but not limited to software source code, documentation
28
+ source, and configuration files.
29
+
30
+ "Object" form shall mean any form resulting from mechanical
31
+ transformation or translation of a Source form, including but
32
+ not limited to compiled object code, generated documentation,
33
+ and conversions to other media types.
34
+
35
+ "Work" shall mean the work of authorship, whether in Source or
36
+ Object form, made available under the License, as indicated by a
37
+ copyright notice that is included in or attached to the work
38
+ (an example is provided in the Appendix below).
39
+
40
+ "Derivative Works" shall mean any work, whether in Source or Object
41
+ form, that is based on (or derived from) the Work and for which the
42
+ editorial revisions, annotations, elaborations, or other modifications
43
+ represent, as a whole, an original work of authorship. For the purposes
44
+ of this License, Derivative Works shall not include works that remain
45
+ separable from, or merely link (or bind by name) to the interfaces of,
46
+ the Work and Derivative Works thereof.
47
+
48
+ "Contribution" shall mean any work of authorship, including
49
+ the original version of the Work and any modifications or additions
50
+ to that Work or Derivative Works thereof, that is intentionally
51
+ submitted to Licensor for inclusion in the Work by the copyright owner
52
+ or by an individual or Legal Entity authorized to submit on behalf of
53
+ the copyright owner. For the purposes of this definition, "submitted"
54
+ means any form of electronic, verbal, or written communication sent
55
+ to the Licensor or its representatives, including but not limited to
56
+ communication on electronic mailing lists, source code control systems,
57
+ and issue tracking systems that are managed by, or on behalf of, the
58
+ Licensor for the purpose of discussing and improving the Work, but
59
+ excluding communication that is conspicuously marked or otherwise
60
+ designated in writing by the copyright owner as "Not a Contribution."
61
+
62
+ "Contributor" shall mean Licensor and any individual or Legal Entity
63
+ on behalf of whom a Contribution has been received by Licensor and
64
+ subsequently incorporated within the Work.
65
+
66
+ 2. Grant of Copyright License. Subject to the terms and conditions of
67
+ this License, each Contributor hereby grants to You a perpetual,
68
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69
+ copyright license to reproduce, prepare Derivative Works of,
70
+ publicly display, publicly perform, sublicense, and distribute the
71
+ Work and such Derivative Works in Source or Object form.
72
+
73
+ 3. Grant of Patent License. Subject to the terms and conditions of
74
+ this License, each Contributor hereby grants to You a perpetual,
75
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76
+ (except as stated in this section) patent license to make, have made,
77
+ use, offer to sell, sell, import, and otherwise transfer the Work,
78
+ where such license applies only to those patent claims licensable
79
+ by such Contributor that are necessarily infringed by their
80
+ Contribution(s) alone or by combination of their Contribution(s)
81
+ with the Work to which such Contribution(s) was submitted. If You
82
+ institute patent litigation against any entity (including a
83
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
84
+ or a Contribution incorporated within the Work constitutes direct
85
+ or contributory patent infringement, then any patent licenses
86
+ granted to You under this License for that Work shall terminate
87
+ as of the date such litigation is filed.
88
+
89
+ 4. Redistribution. You may reproduce and distribute copies of the
90
+ Work or Derivative Works thereof in any medium, with or without
91
+ modifications, and in Source or Object form, provided that You
92
+ meet the following conditions:
93
+
94
+ (a) You must give any other recipients of the Work or
95
+ Derivative Works a copy of this License; and
96
+
97
+ (b) You must cause any modified files to carry prominent notices
98
+ stating that You changed the files; and
99
+
100
+ (c) You must retain, in the Source form of any Derivative Works
101
+ that You distribute, all copyright, patent, trademark, and
102
+ attribution notices from the Source form of the Work,
103
+ excluding those notices that do not pertain to any part of
104
+ the Derivative Works; and
105
+
106
+ (d) If the Work includes a "NOTICE" text file as part of its
107
+ distribution, then any Derivative Works that You distribute must
108
+ include a readable copy of the attribution notices contained
109
+ within such NOTICE file, excluding those notices that do not
110
+ pertain to any part of the Derivative Works, in at least one
111
+ of the following places: within a NOTICE text file distributed
112
+ as part of the Derivative Works; within the Source form or
113
+ documentation, if provided along with the Derivative Works; or,
114
+ within a display generated by the Derivative Works, if and
115
+ wherever such third-party notices normally appear. The contents
116
+ of the NOTICE file are for informational purposes only and
117
+ do not modify the License. You may add Your own attribution
118
+ notices within Derivative Works that You distribute, alongside
119
+ or as an addendum to the NOTICE text from the Work, provided
120
+ that such additional attribution notices cannot be construed
121
+ as modifying the License.
122
+
123
+ You may add Your own copyright statement to Your modifications and
124
+ may provide additional or different license terms and conditions
125
+ for use, reproduction, or distribution of Your modifications, or
126
+ for any such Derivative Works as a whole, provided Your use,
127
+ reproduction, and distribution of the Work otherwise complies with
128
+ the conditions stated in this License.
129
+
130
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
131
+ any Contribution intentionally submitted for inclusion in the Work
132
+ by You to the Licensor shall be under the terms and conditions of
133
+ this License, without any additional terms or conditions.
134
+ Notwithstanding the above, nothing herein shall supersede or modify
135
+ the terms of any separate license agreement you may have executed
136
+ with Licensor regarding such Contributions.
137
+
138
+ 6. Trademarks. This License does not grant permission to use the trade
139
+ names, trademarks, service marks, or product names of the Licensor,
140
+ except as required for reasonable and customary use in describing the
141
+ origin of the Work and reproducing the content of the NOTICE file.
142
+
143
+ 7. Disclaimer of Warranty. Unless required by applicable law or
144
+ agreed to in writing, Licensor provides the Work (and each
145
+ Contributor provides its Contributions) on an "AS IS" BASIS,
146
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147
+ implied, including, without limitation, any warranties or conditions
148
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149
+ PARTICULAR PURPOSE. You are solely responsible for determining the
150
+ appropriateness of using or redistributing the Work and assume any
151
+ risks associated with Your exercise of permissions under this License.
152
+
153
+ 8. Limitation of Liability. In no event and under no legal theory,
154
+ whether in tort (including negligence), contract, or otherwise,
155
+ unless required by applicable law (such as deliberate and grossly
156
+ negligent acts) or agreed to in writing, shall any Contributor be
157
+ liable to You for damages, including any direct, indirect, special,
158
+ incidental, or consequential damages of any character arising as a
159
+ result of this License or out of the use or inability to use the
160
+ Work (including but not limited to damages for loss of goodwill,
161
+ work stoppage, computer failure or malfunction, or any and all
162
+ other commercial damages or losses), even if such Contributor
163
+ has been advised of the possibility of such damages.
164
+
165
+ 9. Accepting Warranty or Additional Liability. While redistributing
166
+ the Work or Derivative Works thereof, You may choose to offer,
167
+ and charge a fee for, acceptance of support, warranty, indemnity,
168
+ or other liability obligations and/or rights consistent with this
169
+ License. However, in accepting such obligations, You may act only
170
+ on Your own behalf and on Your sole responsibility, not on behalf
171
+ of any other Contributor, and only if You agree to indemnify,
172
+ defend, and hold each Contributor harmless for any liability
173
+ incurred by, or claims asserted against, such Contributor by reason
174
+ of your accepting any such warranty or additional liability.
175
+
176
+ END OF TERMS AND CONDITIONS
177
+
178
+ APPENDIX: How to apply the Apache License to your work.
179
+
180
+ To apply the Apache License to your work, attach the following
181
+ boilerplate notice, with the fields enclosed by brackets "[]"
182
+ replaced with your own identifying information. (Don't include
183
+ the brackets!) The text should be enclosed in the appropriate
184
+ comment syntax for the file format. We also recommend that a
185
+ file or class name and description of purpose be included on the
186
+ same "printed page" as the copyright notice for easier
187
+ identification within third-party archives.
188
+
189
+ Copyright [yyyy] [name of copyright owner]
190
+
191
+ Licensed under the Apache License, Version 2.0 (the "License");
192
+ you may not use this file except in compliance with the License.
193
+ You may obtain a copy of the License at
194
+
195
+ http://www.apache.org/licenses/LICENSE-2.0
196
+
197
+ Unless required by applicable law or agreed to in writing, software
198
+ distributed under the License is distributed on an "AS IS" BASIS,
199
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200
+ See the License for the specific language governing permissions and
201
+ limitations under the License.
configs/TS_test.json ADDED
@@ -0,0 +1,406 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "training": [
3
+ {
4
+ "image": "/processed/Public/CT_TotalSegmentator/s0013.nii.gz",
5
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0013.nii.gz"
6
+ },
7
+ {
8
+ "image": "/processed/Public/CT_TotalSegmentator/s0029.nii.gz",
9
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0029.nii.gz"
10
+ },
11
+ {
12
+ "image": "/processed/Public/CT_TotalSegmentator/s0038.nii.gz",
13
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0038.nii.gz"
14
+ },
15
+ {
16
+ "image": "/processed/Public/CT_TotalSegmentator/s0040.nii.gz",
17
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0040.nii.gz"
18
+ },
19
+ {
20
+ "image": "/processed/Public/CT_TotalSegmentator/s0119.nii.gz",
21
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0119.nii.gz"
22
+ },
23
+ {
24
+ "image": "/processed/Public/CT_TotalSegmentator/s0230.nii.gz",
25
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0230.nii.gz"
26
+ },
27
+ {
28
+ "image": "/processed/Public/CT_TotalSegmentator/s0235.nii.gz",
29
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0235.nii.gz"
30
+ },
31
+ {
32
+ "image": "/processed/Public/CT_TotalSegmentator/s0236.nii.gz",
33
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0236.nii.gz"
34
+ },
35
+ {
36
+ "image": "/processed/Public/CT_TotalSegmentator/s0244.nii.gz",
37
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0244.nii.gz"
38
+ },
39
+ {
40
+ "image": "/processed/Public/CT_TotalSegmentator/s0291.nii.gz",
41
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0291.nii.gz"
42
+ },
43
+ {
44
+ "image": "/processed/Public/CT_TotalSegmentator/s0308.nii.gz",
45
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0308.nii.gz"
46
+ },
47
+ {
48
+ "image": "/processed/Public/CT_TotalSegmentator/s0311.nii.gz",
49
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0311.nii.gz"
50
+ },
51
+ {
52
+ "image": "/processed/Public/CT_TotalSegmentator/s0423.nii.gz",
53
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0423.nii.gz"
54
+ },
55
+ {
56
+ "image": "/processed/Public/CT_TotalSegmentator/s0440.nii.gz",
57
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0440.nii.gz"
58
+ },
59
+ {
60
+ "image": "/processed/Public/CT_TotalSegmentator/s0441.nii.gz",
61
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0441.nii.gz"
62
+ },
63
+ {
64
+ "image": "/processed/Public/CT_TotalSegmentator/s0450.nii.gz",
65
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0450.nii.gz"
66
+ },
67
+ {
68
+ "image": "/processed/Public/CT_TotalSegmentator/s0459.nii.gz",
69
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0459.nii.gz"
70
+ },
71
+ {
72
+ "image": "/processed/Public/CT_TotalSegmentator/s0468.nii.gz",
73
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0468.nii.gz"
74
+ },
75
+ {
76
+ "image": "/processed/Public/CT_TotalSegmentator/s0470.nii.gz",
77
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0470.nii.gz"
78
+ },
79
+ {
80
+ "image": "/processed/Public/CT_TotalSegmentator/s0482.nii.gz",
81
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0482.nii.gz"
82
+ },
83
+ {
84
+ "image": "/processed/Public/CT_TotalSegmentator/s0499.nii.gz",
85
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0499.nii.gz"
86
+ },
87
+ {
88
+ "image": "/processed/Public/CT_TotalSegmentator/s0505.nii.gz",
89
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0505.nii.gz"
90
+ },
91
+ {
92
+ "image": "/processed/Public/CT_TotalSegmentator/s0543.nii.gz",
93
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0543.nii.gz"
94
+ },
95
+ {
96
+ "image": "/processed/Public/CT_TotalSegmentator/s0561.nii.gz",
97
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0561.nii.gz"
98
+ },
99
+ {
100
+ "image": "/processed/Public/CT_TotalSegmentator/s0667.nii.gz",
101
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0667.nii.gz"
102
+ },
103
+ {
104
+ "image": "/processed/Public/CT_TotalSegmentator/s0687.nii.gz",
105
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0687.nii.gz"
106
+ },
107
+ {
108
+ "image": "/processed/Public/CT_TotalSegmentator/s0735.nii.gz",
109
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0735.nii.gz"
110
+ },
111
+ {
112
+ "image": "/processed/Public/CT_TotalSegmentator/s0753.nii.gz",
113
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0753.nii.gz"
114
+ },
115
+ {
116
+ "image": "/processed/Public/CT_TotalSegmentator/s0802.nii.gz",
117
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0802.nii.gz"
118
+ },
119
+ {
120
+ "image": "/processed/Public/CT_TotalSegmentator/s0829.nii.gz",
121
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0829.nii.gz"
122
+ },
123
+ {
124
+ "image": "/processed/Public/CT_TotalSegmentator/s0923.nii.gz",
125
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0923.nii.gz"
126
+ },
127
+ {
128
+ "image": "/processed/Public/CT_TotalSegmentator/s0933.nii.gz",
129
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0933.nii.gz"
130
+ },
131
+ {
132
+ "image": "/processed/Public/CT_TotalSegmentator/s0994.nii.gz",
133
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0994.nii.gz"
134
+ },
135
+ {
136
+ "image": "/processed/Public/CT_TotalSegmentator/s1094.nii.gz",
137
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s1094.nii.gz"
138
+ },
139
+ {
140
+ "image": "/processed/Public/CT_TotalSegmentator/s1096.nii.gz",
141
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s1096.nii.gz"
142
+ },
143
+ {
144
+ "image": "/processed/Public/CT_TotalSegmentator/s1119.nii.gz",
145
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s1119.nii.gz"
146
+ },
147
+ {
148
+ "image": "/processed/Public/CT_TotalSegmentator/s1121.nii.gz",
149
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s1121.nii.gz"
150
+ },
151
+ {
152
+ "image": "/processed/Public/CT_TotalSegmentator/s1152.nii.gz",
153
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s1152.nii.gz"
154
+ },
155
+ {
156
+ "image": "/processed/Public/CT_TotalSegmentator/s1174.nii.gz",
157
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s1174.nii.gz"
158
+ },
159
+ {
160
+ "image": "/processed/Public/CT_TotalSegmentator/s1176.nii.gz",
161
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s1176.nii.gz"
162
+ },
163
+ {
164
+ "image": "/processed/Public/CT_TotalSegmentator/s1212.nii.gz",
165
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s1212.nii.gz"
166
+ },
167
+ {
168
+ "image": "/processed/Public/CT_TotalSegmentator/s1240.nii.gz",
169
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s1240.nii.gz"
170
+ },
171
+ {
172
+ "image": "/processed/Public/CT_TotalSegmentator/s1248.nii.gz",
173
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s1248.nii.gz"
174
+ },
175
+ {
176
+ "image": "/processed/Public/CT_TotalSegmentator/s1249.nii.gz",
177
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s1249.nii.gz"
178
+ },
179
+ {
180
+ "image": "/processed/Public/CT_TotalSegmentator/s1276.nii.gz",
181
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s1276.nii.gz"
182
+ },
183
+ {
184
+ "image": "/processed/Public/CT_TotalSegmentator/s1322.nii.gz",
185
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s1322.nii.gz"
186
+ },
187
+ {
188
+ "image": "/processed/Public/CT_TotalSegmentator/s1323.nii.gz",
189
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s1323.nii.gz"
190
+ },
191
+ {
192
+ "image": "/processed/Public/CT_TotalSegmentator/s1347.nii.gz",
193
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s1347.nii.gz"
194
+ },
195
+ {
196
+ "image": "/processed/Public/CT_TotalSegmentator/s1377.nii.gz",
197
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s1377.nii.gz"
198
+ },
199
+ {
200
+ "image": "/processed/Public/CT_TotalSegmentator/s1386.nii.gz",
201
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s1386.nii.gz"
202
+ }
203
+ ],
204
+ "validation": [
205
+ {
206
+ "image": "/processed/Public/CT_TotalSegmentator/s0013.nii.gz",
207
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0013.nii.gz"
208
+ },
209
+ {
210
+ "image": "/processed/Public/CT_TotalSegmentator/s0029.nii.gz",
211
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0029.nii.gz"
212
+ },
213
+ {
214
+ "image": "/processed/Public/CT_TotalSegmentator/s0038.nii.gz",
215
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0038.nii.gz"
216
+ },
217
+ {
218
+ "image": "/processed/Public/CT_TotalSegmentator/s0040.nii.gz",
219
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0040.nii.gz"
220
+ },
221
+ {
222
+ "image": "/processed/Public/CT_TotalSegmentator/s0119.nii.gz",
223
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0119.nii.gz"
224
+ },
225
+ {
226
+ "image": "/processed/Public/CT_TotalSegmentator/s0230.nii.gz",
227
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0230.nii.gz"
228
+ },
229
+ {
230
+ "image": "/processed/Public/CT_TotalSegmentator/s0235.nii.gz",
231
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0235.nii.gz"
232
+ },
233
+ {
234
+ "image": "/processed/Public/CT_TotalSegmentator/s0236.nii.gz",
235
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0236.nii.gz"
236
+ },
237
+ {
238
+ "image": "/processed/Public/CT_TotalSegmentator/s0244.nii.gz",
239
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0244.nii.gz"
240
+ },
241
+ {
242
+ "image": "/processed/Public/CT_TotalSegmentator/s0291.nii.gz",
243
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0291.nii.gz"
244
+ },
245
+ {
246
+ "image": "/processed/Public/CT_TotalSegmentator/s0308.nii.gz",
247
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0308.nii.gz"
248
+ },
249
+ {
250
+ "image": "/processed/Public/CT_TotalSegmentator/s0311.nii.gz",
251
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0311.nii.gz"
252
+ },
253
+ {
254
+ "image": "/processed/Public/CT_TotalSegmentator/s0423.nii.gz",
255
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0423.nii.gz"
256
+ },
257
+ {
258
+ "image": "/processed/Public/CT_TotalSegmentator/s0440.nii.gz",
259
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0440.nii.gz"
260
+ },
261
+ {
262
+ "image": "/processed/Public/CT_TotalSegmentator/s0441.nii.gz",
263
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0441.nii.gz"
264
+ },
265
+ {
266
+ "image": "/processed/Public/CT_TotalSegmentator/s0450.nii.gz",
267
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0450.nii.gz"
268
+ },
269
+ {
270
+ "image": "/processed/Public/CT_TotalSegmentator/s0459.nii.gz",
271
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0459.nii.gz"
272
+ },
273
+ {
274
+ "image": "/processed/Public/CT_TotalSegmentator/s0468.nii.gz",
275
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0468.nii.gz"
276
+ },
277
+ {
278
+ "image": "/processed/Public/CT_TotalSegmentator/s0470.nii.gz",
279
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0470.nii.gz"
280
+ },
281
+ {
282
+ "image": "/processed/Public/CT_TotalSegmentator/s0482.nii.gz",
283
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0482.nii.gz"
284
+ },
285
+ {
286
+ "image": "/processed/Public/CT_TotalSegmentator/s0499.nii.gz",
287
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0499.nii.gz"
288
+ },
289
+ {
290
+ "image": "/processed/Public/CT_TotalSegmentator/s0505.nii.gz",
291
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0505.nii.gz"
292
+ },
293
+ {
294
+ "image": "/processed/Public/CT_TotalSegmentator/s0543.nii.gz",
295
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0543.nii.gz"
296
+ },
297
+ {
298
+ "image": "/processed/Public/CT_TotalSegmentator/s0561.nii.gz",
299
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0561.nii.gz"
300
+ },
301
+ {
302
+ "image": "/processed/Public/CT_TotalSegmentator/s0667.nii.gz",
303
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0667.nii.gz"
304
+ },
305
+ {
306
+ "image": "/processed/Public/CT_TotalSegmentator/s0687.nii.gz",
307
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0687.nii.gz"
308
+ },
309
+ {
310
+ "image": "/processed/Public/CT_TotalSegmentator/s0735.nii.gz",
311
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0735.nii.gz"
312
+ },
313
+ {
314
+ "image": "/processed/Public/CT_TotalSegmentator/s0753.nii.gz",
315
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0753.nii.gz"
316
+ },
317
+ {
318
+ "image": "/processed/Public/CT_TotalSegmentator/s0802.nii.gz",
319
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0802.nii.gz"
320
+ },
321
+ {
322
+ "image": "/processed/Public/CT_TotalSegmentator/s0829.nii.gz",
323
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0829.nii.gz"
324
+ },
325
+ {
326
+ "image": "/processed/Public/CT_TotalSegmentator/s0923.nii.gz",
327
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0923.nii.gz"
328
+ },
329
+ {
330
+ "image": "/processed/Public/CT_TotalSegmentator/s0933.nii.gz",
331
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0933.nii.gz"
332
+ },
333
+ {
334
+ "image": "/processed/Public/CT_TotalSegmentator/s0994.nii.gz",
335
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s0994.nii.gz"
336
+ },
337
+ {
338
+ "image": "/processed/Public/CT_TotalSegmentator/s1094.nii.gz",
339
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s1094.nii.gz"
340
+ },
341
+ {
342
+ "image": "/processed/Public/CT_TotalSegmentator/s1096.nii.gz",
343
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s1096.nii.gz"
344
+ },
345
+ {
346
+ "image": "/processed/Public/CT_TotalSegmentator/s1119.nii.gz",
347
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s1119.nii.gz"
348
+ },
349
+ {
350
+ "image": "/processed/Public/CT_TotalSegmentator/s1121.nii.gz",
351
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s1121.nii.gz"
352
+ },
353
+ {
354
+ "image": "/processed/Public/CT_TotalSegmentator/s1152.nii.gz",
355
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s1152.nii.gz"
356
+ },
357
+ {
358
+ "image": "/processed/Public/CT_TotalSegmentator/s1174.nii.gz",
359
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s1174.nii.gz"
360
+ },
361
+ {
362
+ "image": "/processed/Public/CT_TotalSegmentator/s1176.nii.gz",
363
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s1176.nii.gz"
364
+ },
365
+ {
366
+ "image": "/processed/Public/CT_TotalSegmentator/s1212.nii.gz",
367
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s1212.nii.gz"
368
+ },
369
+ {
370
+ "image": "/processed/Public/CT_TotalSegmentator/s1240.nii.gz",
371
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s1240.nii.gz"
372
+ },
373
+ {
374
+ "image": "/processed/Public/CT_TotalSegmentator/s1248.nii.gz",
375
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s1248.nii.gz"
376
+ },
377
+ {
378
+ "image": "/processed/Public/CT_TotalSegmentator/s1249.nii.gz",
379
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s1249.nii.gz"
380
+ },
381
+ {
382
+ "image": "/processed/Public/CT_TotalSegmentator/s1276.nii.gz",
383
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s1276.nii.gz"
384
+ },
385
+ {
386
+ "image": "/processed/Public/CT_TotalSegmentator/s1322.nii.gz",
387
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s1322.nii.gz"
388
+ },
389
+ {
390
+ "image": "/processed/Public/CT_TotalSegmentator/s1323.nii.gz",
391
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s1323.nii.gz"
392
+ },
393
+ {
394
+ "image": "/processed/Public/CT_TotalSegmentator/s1347.nii.gz",
395
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s1347.nii.gz"
396
+ },
397
+ {
398
+ "image": "/processed/Public/CT_TotalSegmentator/s1377.nii.gz",
399
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s1377.nii.gz"
400
+ },
401
+ {
402
+ "image": "/processed/Public/CT_TotalSegmentator/s1386.nii.gz",
403
+ "label": "/processed/Public/CT_TotalSegmentator/TS_split/test/s1386.nii.gz"
404
+ }
405
+ ]
406
+ }
configs/evaluate-standalone-parallel.yaml ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ imports:
3
+ - "$import glob"
4
+ - "$import json"
5
+ - "$import os"
6
+ - "$from scripts.compute_metric import compute_abdominal_ct_metrics"
7
+ - "$from scripts.compute_metric import compute"
8
+ workflow_type: evaluate
9
+ spatial_dims: "$len(@spatial_size)"
10
+ bundle_root: "."
11
+ output_dir: "$@bundle_root + '/eval/dynunet_FT_trt_32'"
12
+ dataset_dir: "/processed/Public/CT_TotalSegmentator/TS_split/test/"
13
+ data_list_file_path: "$@bundle_root + '/configs/TS_test.json'"
14
+ datalist: "$monai.data.load_decathlon_datalist(@data_list_file_path, data_list_key='validation')"
15
+ datalist_pred: "$[{**d, 'pred': os.path.join(@output_dir, d['label'].split('/')[-1].split('.')[0] + '_trans.nii.gz')} for d in @datalist]"
16
+ run:
17
+ #- "$compute_abdominal_ct_metrics(@datalist_pred, @output_dir)"
18
+ - "$compute(@datalist_pred, @output_dir)"
configs/evaluate-standalone.yaml ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ imports:
3
+ - "$import glob"
4
+ - "$import json"
5
+ - "$import os"
6
+ - "$from scripts.compute_metric import compute_abdominal_ct_metrics"
7
+ - "$from scripts.compute_metric import compute"
8
+ workflow_type: evaluate
9
+ spatial_dims: "$len(@spatial_size)"
10
+ bundle_root: "."
11
+ output_dir: "$@bundle_root + '/eval/dynunet_FT_trt_32'"
12
+ dataset_dir: "/processed/Public/CT_TotalSegmentator/TS_split/test/"
13
+ data_list_file_path: "$@bundle_root + '/configs/TS_test.json'"
14
+ datalist: "$monai.data.load_decathlon_datalist(@data_list_file_path, data_list_key='validation')"
15
+ datalist_pred: "$[{**d, 'pred': os.path.join(@output_dir, d['label'].split('/')[-1].split('.')[0] + '_trans.nii.gz')} for d in @datalist]"
16
+ run:
17
+ - "$compute_single_node(@datalist_pred, @output_dir)"
configs/evaluate.yaml ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ validate#postprocessing:
3
+ _target_: Compose
4
+ transforms:
5
+ - _target_: Activationsd
6
+ keys: pred
7
+ softmax: true
8
+ - _target_: Invertd
9
+ keys:
10
+ - pred
11
+ - label
12
+ transform: "@validate#preprocessing"
13
+ orig_keys: image
14
+ meta_key_postfix: meta_dict
15
+ nearest_interp:
16
+ - false
17
+ - true
18
+ to_tensor: true
19
+ - _target_: AsDiscreted
20
+ keys:
21
+ - pred
22
+ - label
23
+ argmax:
24
+ - true
25
+ - false
26
+ to_onehot: 4
27
+ - _target_: CopyItemsd
28
+ keys: "pred"
29
+ times: 1
30
+ names: "pred_save"
31
+ - _target_: AsDiscreted
32
+ keys:
33
+ - pred_save
34
+ argmax:
35
+ - true
36
+ - _target_: SaveImaged
37
+ keys: pred_save
38
+ meta_keys: pred_meta_dict
39
+ output_dir: "@output_dir"
40
+ resample: false
41
+ squeeze_end_dims: true
42
+ validate#dataset:
43
+ _target_: Dataset
44
+ data: "@val_datalist"
45
+ transform: "@validate#preprocessing"
46
+ validate#handlers:
47
+ - _target_: CheckpointLoader
48
+ load_path: "$@ckpt_dir + '/dynunet_FT.pt'"
49
+ load_dict:
50
+ model: "@network"
51
+ - _target_: StatsHandler
52
+ iteration_log: false
53
+ - _target_: MetricsSaver
54
+ save_dir: "@output_dir"
55
+ metrics: val_dice
56
+ metric_details:
57
+ - val_dice
58
+ #batch_transform: "$monai.handlers.from_engine(['image_meta_dict'])"
59
+ summary_ops: "*"
60
+ initialize:
61
+ - "$setattr(torch.backends.cudnn, 'benchmark', True)"
62
+ run:
63
+ - "$@validate#evaluator.run()"
configs/inference.yaml ADDED
@@ -0,0 +1,161 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ imports:
3
+ - "$import glob"
4
+ - "$import os"
5
+ - "$import scripts.monai_utils"
6
+ workflow_type: inference
7
+ input_channels: 1
8
+ output_classes: 4
9
+ output_channels: 4
10
+ # arch_ckpt_path: "$@bundle_root + '/models/dynunet_FT.pt'"
11
+ # arch_ckpt: "$torch.load(@arch_ckpt_path, map_location=torch.device('cuda'))"
12
+ bundle_root: "."
13
+ output_dir: "$@bundle_root + '/eval/dynunet_FT'"
14
+ dataset_dir: "/processed/Public/CT_TotalSegmentator/TS_split/test/"
15
+ data_list_file_path: "$@bundle_root + '/configs/TS_test.json'"
16
+ datalist: "$monai.data.load_decathlon_datalist(@data_list_file_path, data_list_key='validation')"
17
+ device: "$torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')"
18
+ spatial_size:
19
+ - 96
20
+ - 96
21
+ - 96
22
+ spatial_dims: "$len(@spatial_size)"
23
+ labels:
24
+ background: 0
25
+ liver: 1
26
+ spleen: 2
27
+ pancreas: 3
28
+ network_def:
29
+ _target_: monai.networks.nets.DynUNet
30
+ spatial_dims: "@spatial_dims"
31
+ in_channels: "@input_channels"
32
+ out_channels: "@output_channels"
33
+ kernel_size:
34
+ - 3
35
+ - 3
36
+ - 3
37
+ - 3
38
+ - 3
39
+ - 3
40
+ strides:
41
+ - 1
42
+ - 2
43
+ - 2
44
+ - 2
45
+ - 2
46
+ -
47
+ - 2
48
+ - 2
49
+ - 1
50
+ upsample_kernel_size:
51
+ - 2
52
+ - 2
53
+ - 2
54
+ - 2
55
+ -
56
+ - 2
57
+ - 2
58
+ - 1
59
+ norm_name: "instance"
60
+ deep_supervision: false
61
+ res_block: true
62
+ network: "$@network_def.to(@device)"
63
+ image_key: image
64
+ preprocessing:
65
+ _target_: Compose
66
+ transforms:
67
+ - _target_: LoadImaged
68
+ keys: "@image_key"
69
+ reader: ITKReader
70
+ - _target_: EnsureChannelFirstd
71
+ keys: "@image_key"
72
+ - _target_: Orientationd
73
+ keys: image
74
+ axcodes: RAS
75
+ - _target_: Spacingd
76
+ keys:
77
+ - "@image_key"
78
+ pixdim:
79
+ - 1.5
80
+ - 1.5
81
+ - 3.0
82
+ mode:
83
+ - bilinear
84
+ - _target_: ScaleIntensityRanged
85
+ keys: "@image_key"
86
+ a_min: -250
87
+ a_max: 400
88
+ b_min: 0
89
+ b_max: 1
90
+ clip: true
91
+ - _target_: CropForegroundd
92
+ keys:
93
+ - "@image_key"
94
+ source_key: "@image_key"
95
+ mode:
96
+ - "minimum"
97
+ - _target_: EnsureTyped
98
+ keys: image
99
+ - _target_: CastToTyped
100
+ keys: "@image_key"
101
+ dtype: "$torch.float32"
102
+ dataset:
103
+ _target_: Dataset
104
+ data: "@datalist"
105
+ transform: "@preprocessing"
106
+ dataloader:
107
+ _target_: DataLoader
108
+ dataset: "@dataset"
109
+ batch_size: 1
110
+ shuffle: false
111
+ num_workers: 4
112
+ inferer:
113
+ _target_: SlidingWindowInferer
114
+ roi_size:
115
+ - 96
116
+ - 96
117
+ - 96
118
+ sw_batch_size: 4
119
+ overlap: 0.75
120
+ postprocessing:
121
+ _target_: Compose
122
+ transforms:
123
+ - _target_: Activationsd
124
+ keys: pred
125
+ softmax: true
126
+ - _target_: Invertd
127
+ keys: pred
128
+ transform: "@preprocessing"
129
+ orig_keys: image
130
+ meta_key_postfix: meta_dict
131
+ nearest_interp: false
132
+ to_tensor: true
133
+ - _target_: AsDiscreted
134
+ keys: pred
135
+ argmax: true
136
+ - _target_: SaveImaged
137
+ keys: pred
138
+ meta_keys: pred_meta_dict
139
+ output_dir: "@output_dir"
140
+ separate_folder: false
141
+ output_dtype: "$torch.int16"
142
+ handlers:
143
+ - _target_: CheckpointLoader
144
+ load_path: "$@bundle_root + '/models/dynunet_FT.pt'"
145
+ load_dict:
146
+ model: "@network"
147
+ - _target_: StatsHandler
148
+ iteration_log: false
149
+ evaluator:
150
+ _target_: SupervisedEvaluator
151
+ device: "@device"
152
+ val_data_loader: "@dataloader"
153
+ network: "@network"
154
+ inferer: "@inferer"
155
+ postprocessing: "@postprocessing"
156
+ val_handlers: "@handlers"
157
+ amp: true
158
+ initialize:
159
+ - "$setattr(torch.backends.cudnn, 'benchmark', True)"
160
+ run:
161
+ - "$@evaluator.run()"
configs/inference_segresnet.yaml ADDED
@@ -0,0 +1,140 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ imports:
3
+ - "$import glob"
4
+ - "$import os"
5
+ - "$import scripts.monai_utils"
6
+ workflow_type: inference
7
+ input_channels: 1
8
+ output_classes: 4
9
+ output_channels: 4
10
+ bundle_root: "."
11
+ output_dir: "$@bundle_root + '/eval/segresnet_FT'"
12
+ dataset_dir: "/processed/Public/CT_TotalSegmentator/TS_split/test/"
13
+ data_list_file_path: "$@bundle_root + '/configs/TS_test.json'"
14
+ datalist: "$monai.data.load_decathlon_datalist(@data_list_file_path, data_list_key='validation')"
15
+ device: "$torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')"
16
+ spatial_size:
17
+ - 96
18
+ - 96
19
+ - 96
20
+ spatial_dims: "$len(@spatial_size)"
21
+ labels:
22
+ background: 0
23
+ liver: 1
24
+ spleen: 2
25
+ pancreas: 3
26
+ network_def:
27
+ _target_: monai.networks.nets.SegResNet
28
+ blocks_down:
29
+ - 1
30
+ - 2
31
+ - 2
32
+ - 4
33
+ blocks_up:
34
+ - 1
35
+ - 1
36
+ - 1
37
+ init_filters: 16
38
+ in_channels: "@input_channels"
39
+ out_channels: "@output_channels"
40
+ dropout_prob: 0.0
41
+ network: "$@network_def.to(@device)"
42
+ image_key: image
43
+ preprocessing:
44
+ _target_: Compose
45
+ transforms:
46
+ - _target_: LoadImaged
47
+ keys: "@image_key"
48
+ reader: ITKReader
49
+ - _target_: EnsureChannelFirstd
50
+ keys: "@image_key"
51
+ - _target_: Orientationd
52
+ keys: image
53
+ axcodes: RAS
54
+ - _target_: Spacingd
55
+ keys:
56
+ - "@image_key"
57
+ pixdim:
58
+ - 1.5
59
+ - 1.5
60
+ - 3.0
61
+ mode:
62
+ - bilinear
63
+ - _target_: ScaleIntensityRanged
64
+ keys: "@image_key"
65
+ a_min: -250
66
+ a_max: 400
67
+ b_min: 0
68
+ b_max: 1
69
+ clip: true
70
+ - _target_: CropForegroundd
71
+ keys:
72
+ - "@image_key"
73
+ source_key: "@image_key"
74
+ mode:
75
+ - "minimum"
76
+ - _target_: EnsureTyped
77
+ keys: image
78
+ - _target_: CastToTyped
79
+ keys: "@image_key"
80
+ dtype: "$torch.float32"
81
+ dataset:
82
+ _target_: Dataset
83
+ data: "@datalist"
84
+ transform: "@preprocessing"
85
+ dataloader:
86
+ _target_: DataLoader
87
+ dataset: "@dataset"
88
+ batch_size: 1
89
+ shuffle: false
90
+ num_workers: 4
91
+ inferer:
92
+ _target_: SlidingWindowInferer
93
+ roi_size:
94
+ - 96
95
+ - 96
96
+ - 96
97
+ sw_batch_size: 4
98
+ overlap: 0.75
99
+ postprocessing:
100
+ _target_: Compose
101
+ transforms:
102
+ - _target_: Activationsd
103
+ keys: pred
104
+ softmax: true
105
+ - _target_: Invertd
106
+ keys: pred
107
+ transform: "@preprocessing"
108
+ orig_keys: image
109
+ meta_key_postfix: meta_dict
110
+ nearest_interp: false
111
+ to_tensor: true
112
+ - _target_: AsDiscreted
113
+ keys: pred
114
+ argmax: true
115
+ - _target_: SaveImaged
116
+ keys: pred
117
+ meta_keys: pred_meta_dict
118
+ output_dir: "@output_dir"
119
+ separate_folder: false
120
+ output_dtype: "$torch.int16"
121
+ handlers:
122
+ - _target_: CheckpointLoader
123
+ load_path: "$@bundle_root + '/models/segresnet_FT.pt'"
124
+ load_dict:
125
+ model: "@network"
126
+ - _target_: StatsHandler
127
+ iteration_log: false
128
+ evaluator:
129
+ _target_: SupervisedEvaluator
130
+ device: "@device"
131
+ val_data_loader: "@dataloader"
132
+ network: "@network"
133
+ inferer: "@inferer"
134
+ postprocessing: "@postprocessing"
135
+ val_handlers: "@handlers"
136
+ amp: true
137
+ initialize:
138
+ - "$setattr(torch.backends.cudnn, 'benchmark', True)"
139
+ run:
140
+ - "$@evaluator.run()"
configs/inference_swinunetr.yaml ADDED
@@ -0,0 +1,137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ imports:
3
+ - "$import glob"
4
+ - "$import os"
5
+ - "$import scripts.monai_utils"
6
+ workflow_type: inference
7
+ input_channels: 1
8
+ output_classes: 4
9
+ output_channels: 4
10
+ # arch_ckpt_path: "$@bundle_root + '/models/dynunet_FT.pt'"
11
+ # arch_ckpt: "$torch.load(@arch_ckpt_path, map_location=torch.device('cuda'))"
12
+ bundle_root: "."
13
+ output_dir: "$@bundle_root + '/eval/swinunetr_FT'"
14
+ dataset_dir: "/processed/Public/CT_TotalSegmentator/TS_split/test/"
15
+ data_list_file_path: "$@bundle_root + '/configs/TS_test.json'"
16
+ datalist: "$monai.data.load_decathlon_datalist(@data_list_file_path, data_list_key='validation')"
17
+ device: "$torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')"
18
+ spatial_size:
19
+ - 96
20
+ - 96
21
+ - 96
22
+ spatial_dims: "$len(@spatial_size)"
23
+ labels:
24
+ background: 0
25
+ liver: 1
26
+ spleen: 2
27
+ pancreas: 3
28
+ network_def:
29
+ _target_: monai.networks.nets.SwinUNETR
30
+ img_size: "@spatial_size"
31
+ in_channels: "@input_channels"
32
+ out_channels: "@output_channels"
33
+ feature_size: 48
34
+ drop_rate: 0.0
35
+ attn_drop_rate: 0.0
36
+ dropout_path_rate: 0.0
37
+ use_checkpoint: false
38
+ network: "$@network_def.to(@device)"
39
+ image_key: image
40
+ preprocessing:
41
+ _target_: Compose
42
+ transforms:
43
+ - _target_: LoadImaged
44
+ keys: "@image_key"
45
+ reader: ITKReader
46
+ - _target_: EnsureChannelFirstd
47
+ keys: "@image_key"
48
+ - _target_: Orientationd
49
+ keys: image
50
+ axcodes: RAS
51
+ - _target_: Spacingd
52
+ keys:
53
+ - "@image_key"
54
+ pixdim:
55
+ - 1.5
56
+ - 1.5
57
+ - 3.0
58
+ mode:
59
+ - bilinear
60
+ - _target_: ScaleIntensityRanged
61
+ keys: "@image_key"
62
+ a_min: -250
63
+ a_max: 400
64
+ b_min: 0
65
+ b_max: 1
66
+ clip: true
67
+ - _target_: CropForegroundd
68
+ keys:
69
+ - "@image_key"
70
+ source_key: "@image_key"
71
+ mode:
72
+ - "minimum"
73
+ - _target_: EnsureTyped
74
+ keys: image
75
+ - _target_: CastToTyped
76
+ keys: "@image_key"
77
+ dtype: "$torch.float32"
78
+ dataset:
79
+ _target_: Dataset
80
+ data: "@datalist"
81
+ transform: "@preprocessing"
82
+ dataloader:
83
+ _target_: DataLoader
84
+ dataset: "@dataset"
85
+ batch_size: 1
86
+ shuffle: false
87
+ num_workers: 4
88
+ inferer:
89
+ _target_: SlidingWindowInferer
90
+ roi_size:
91
+ - 96
92
+ - 96
93
+ - 96
94
+ sw_batch_size: 4
95
+ overlap: 0.75
96
+ postprocessing:
97
+ _target_: Compose
98
+ transforms:
99
+ - _target_: Activationsd
100
+ keys: pred
101
+ softmax: true
102
+ - _target_: Invertd
103
+ keys: pred
104
+ transform: "@preprocessing"
105
+ orig_keys: image
106
+ meta_key_postfix: meta_dict
107
+ nearest_interp: false
108
+ to_tensor: true
109
+ - _target_: AsDiscreted
110
+ keys: pred
111
+ argmax: true
112
+ - _target_: SaveImaged
113
+ keys: pred
114
+ meta_keys: pred_meta_dict
115
+ output_dir: "@output_dir"
116
+ separate_folder: false
117
+ output_dtype: "$torch.int16"
118
+ handlers:
119
+ - _target_: CheckpointLoader
120
+ load_path: "$@bundle_root + '/models/swinunetr_FT.pt'"
121
+ load_dict:
122
+ model: "@network"
123
+ - _target_: StatsHandler
124
+ iteration_log: false
125
+ evaluator:
126
+ _target_: SupervisedEvaluator
127
+ device: "@device"
128
+ val_data_loader: "@dataloader"
129
+ network: "@network"
130
+ inferer: "@inferer"
131
+ postprocessing: "@postprocessing"
132
+ val_handlers: "@handlers"
133
+ amp: true
134
+ initialize:
135
+ - "$setattr(torch.backends.cudnn, 'benchmark', True)"
136
+ run:
137
+ - "$@evaluator.run()"
configs/inference_trt.yaml ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ imports:
3
+ - "$import glob"
4
+ - "$import os"
5
+ - "$import torch_tensorrt"
6
+ handlers#0#_disabled_: true
7
+ network_def: "$torch.jit.load(@bundle_root + '/models/A100/dynunet_FT_trt_16.ts')"
8
+ evaluator#amp: false
configs/logging.conf ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [loggers]
2
+ keys=root
3
+
4
+ [handlers]
5
+ keys=consoleHandler
6
+
7
+ [formatters]
8
+ keys=fullFormatter
9
+
10
+ [logger_root]
11
+ level=INFO
12
+ handlers=consoleHandler
13
+
14
+ [handler_consoleHandler]
15
+ class=StreamHandler
16
+ level=INFO
17
+ formatter=fullFormatter
18
+ args=(sys.stdout,)
19
+
20
+ [formatter_fullFormatter]
21
+ format=%(asctime)s - %(name)s - %(levelname)s - %(message)s
configs/metadata.json ADDED
@@ -0,0 +1,85 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "schema": "https://github.com/Project-MONAI/MONAI-extra-test-data/releases/download/0.8.1/meta_schema_20220324.json",
3
+ "version": "0.4.5",
4
+ "changelog": {
5
+ "0.4.5": "update to huggingface hosting",
6
+ "0.4.4": "initial bundle assemblage."
7
+ },
8
+ "monai_version": "1.3.0",
9
+ "pytorch_version": "2.1.0",
10
+ "numpy_version": "1.22.2",
11
+ "optional_packages_version": {
12
+ "fire": "0.4.0",
13
+ "nibabel": "4.0.1",
14
+ "pytorch-ignite": "0.4.11"
15
+ },
16
+ "name": "CT-Ped-Abdominal-Seg",
17
+ "task": "Training and Prediction of 3D Segmentation of Liver, Spleen and Pancreas from Abdominal CT images",
18
+ "description": "TotalSegmentator, TCIA and BTCV dataset pre-trained model for segmenting liver, spleen and pancreas, fine-tuned on Cincinnati Children's Healthy Pediatric Dataset with High Quality Masks. WandB hyperparameter search was used to find the best hyperparameters for training.",
19
+ "authors": "Cincinnati Children's (CCHMC) - CAIIR Center (https://www.cincinnatichildrens.org/research/divisions/r/radiology/labs/caiir)",
20
+ "copyright": "Copyright (c) MONAI Consortium",
21
+ "data_source": "TotalSegmentator, TCIA and BTCV dataset public data",
22
+ "data_type": "nifti",
23
+ "image_classes": "single channel 3D data HU thresholded and clipped to a range of 0 to 1",
24
+ "label_classes": "single channel data, 1 is liver, 2 is spleen, 3 is pancreas and 0 is everything else",
25
+ "pred_classes": "single channel data, 1 is liver, 2 is spleen, 3 is pancreas and 0 is everything else",
26
+ "eval_metrics": {
27
+ "TS_mean_dice": 0.9,
28
+ "TCIA_mean_dice": 0.87,
29
+ "CCHMC_mean_dice": 0.89
30
+ },
31
+ "intended_use": "Pediatric model - Validation on institutional data required before clinical use",
32
+ "references": [
33
+ "MedArxiv paper: url to be updated"
34
+ ],
35
+ "network_data_format": {
36
+ "inputs": {
37
+ "image": {
38
+ "type": "image",
39
+ "format": "hounsfield",
40
+ "modality": "CT",
41
+ "num_channels": 1,
42
+ "spatial_shape": [
43
+ 96,
44
+ 96,
45
+ 96
46
+ ],
47
+ "dtype": "float32",
48
+ "value_range": [
49
+ 0,
50
+ 1
51
+ ],
52
+ "is_patch_data": true,
53
+ "channel_def": {
54
+ "0": "image"
55
+ }
56
+ }
57
+ },
58
+ "outputs": {
59
+ "pred": {
60
+ "type": "image",
61
+ "format": "segmentation",
62
+ "num_channels": 4,
63
+ "spatial_shape": [
64
+ 96,
65
+ 96,
66
+ 96
67
+ ],
68
+ "dtype": "float32",
69
+ "value_range": [
70
+ 0,
71
+ 1,
72
+ 2,
73
+ 3
74
+ ],
75
+ "is_patch_data": true,
76
+ "label_def": {
77
+ "0": "background",
78
+ "1": "liver",
79
+ "2": "spleen",
80
+ "3": "pancreas"
81
+ }
82
+ }
83
+ }
84
+ }
85
+ }
configs/train-multigpu.yaml ADDED
@@ -0,0 +1,372 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ imports:
3
+ - "$import glob"
4
+ - "$import json"
5
+ - "$import os"
6
+ - "$import ignite"
7
+ - "$from scipy import ndimage"
8
+ - $import scripts
9
+ - $import scripts.monai_utils
10
+ - $import scripts.lr_scheduler
11
+ - $import scripts.utils
12
+ - $from monai.data.utils import list_data_collate
13
+ - "$import monai.apps.deepedit.transforms"
14
+ workflow_type: train
15
+ input_channels: 1
16
+ output_channels: 4
17
+ output_classes: 4
18
+ #arch_ckpt_path: "$@bundle_root + '/models/dynunet_FT.pt'"
19
+ #arch_ckpt: "$torch.load(@arch_ckpt_path, map_location=torch.device('cuda'))"
20
+ bundle_root: "."
21
+ ckpt_dir: "$@bundle_root + '/models'"
22
+ output_dir: "$@bundle_root + '/eval'"
23
+ dataset_dir: "/processed/Public/CT_TotalSegmentator/TS_split/test/" #"/workspace/data"
24
+ data_list_file_path: "$@bundle_root + '/configs/TS_test.json'"
25
+ train_datalist: "$monai.data.load_decathlon_datalist(@data_list_file_path, data_list_key='training')"
26
+ val_datalist: "$monai.data.load_decathlon_datalist(@data_list_file_path, data_list_key='validation')"
27
+ n_gpu:
28
+ - 0
29
+ - 1
30
+ device: "$torch.device('cuda:' + str(@n_gpu[0]) if torch.cuda.is_available() else 'cpu')"
31
+ device_list: "$scripts.monai_utils.get_device_list(@n_gpu)"
32
+ spatial_size:
33
+ - 96
34
+ - 96
35
+ - 96
36
+ spatial_dims: "$len(@spatial_size)"
37
+ labels:
38
+ background: 0
39
+ liver: 1
40
+ spleen: 2
41
+ pancreas: 3
42
+ network_def:
43
+ _target_: monai.networks.nets.DynUNet
44
+ spatial_dims: "@spatial_dims"
45
+ in_channels: "@input_channels"
46
+ out_channels: "@output_channels"
47
+ kernel_size:
48
+ - 3
49
+ - 3
50
+ - 3
51
+ - 3
52
+ - 3
53
+ - 3
54
+ strides:
55
+ - 1
56
+ - 2
57
+ - 2
58
+ - 2
59
+ - 2
60
+ -
61
+ - 2
62
+ - 2
63
+ - 1
64
+ upsample_kernel_size:
65
+ - 2
66
+ - 2
67
+ - 2
68
+ - 2
69
+ -
70
+ - 2
71
+ - 2
72
+ - 1
73
+ norm_name: "instance"
74
+ deep_supervision: false
75
+ res_block: true
76
+ network: "$@network_def.to(@device)"
77
+ loss:
78
+ _target_: DiceCELoss
79
+ include_background: false
80
+ to_onehot_y: true
81
+ softmax: true
82
+ squared_pred: true
83
+ batch: true
84
+ smooth_nr: 1.0e-06
85
+ smooth_dr: 1.0e-06
86
+ optimizer:
87
+ _target_: torch.optim.AdamW
88
+ params: "$@network.parameters()"
89
+ weight_decay: 1.0e-05
90
+ lr: 0.00005
91
+ max_epochs: 15
92
+ lr_scheduler:
93
+ _target_: scripts.lr_scheduler.LinearWarmupCosineAnnealingLR
94
+ optimizer: "@optimizer"
95
+ warmup_epochs: 10
96
+ warmup_start_lr: 0.0000005
97
+ eta_min: 1.0e-08
98
+ max_epochs: "@max_epochs"
99
+ image_key: image
100
+ label_key: label
101
+ val_interval: 2
102
+ train:
103
+ deterministic_transforms:
104
+ - _target_: LoadImaged
105
+ keys:
106
+ - "@image_key"
107
+ - "@label_key"
108
+ reader: ITKReader
109
+ - _target_: EnsureChannelFirstd
110
+ keys:
111
+ - "@image_key"
112
+ - "@label_key"
113
+ - _target_: Orientationd
114
+ keys:
115
+ - "@image_key"
116
+ - "@label_key"
117
+ axcodes: RAS
118
+ - _target_: Spacingd
119
+ keys:
120
+ - "@image_key"
121
+ - "@label_key"
122
+ pixdim:
123
+ - 1.5
124
+ - 1.5
125
+ - 3.0
126
+ mode:
127
+ - bilinear
128
+ - nearest
129
+ - _target_: scripts.monai_utils.AddLabelNamesd # monai.apps.deepedit.transforms
130
+ #_mode_: "debug"
131
+ keys: "@label_key"
132
+ label_names: "@labels"
133
+ - _target_: ScaleIntensityRanged
134
+ keys: "@image_key"
135
+ a_min: -250
136
+ a_max: 400
137
+ b_min: 0
138
+ b_max: 1
139
+ clip: true
140
+ - _target_: CropForegroundd
141
+ keys:
142
+ - "@image_key"
143
+ - "@label_key"
144
+ source_key: "@image_key"
145
+ mode:
146
+ - "minimum"
147
+ - "minimum"
148
+ - _target_: EnsureTyped
149
+ keys:
150
+ - "@image_key"
151
+ - "@label_key"
152
+ - _target_: CastToTyped
153
+ keys: "@image_key"
154
+ dtype: "$torch.float32"
155
+ random_transforms:
156
+ - _target_: RandCropByLabelClassesd
157
+ keys:
158
+ - "@image_key"
159
+ - "@label_key"
160
+ label_key: "@label_key" # label4crop
161
+ spatial_size: "@spatial_size"
162
+ num_classes: 4
163
+ ratios: null
164
+ allow_smaller: true
165
+ num_samples: 8
166
+ # - _target_: RandSpatialCropSamplesd
167
+ # keys:
168
+ # - "@image_key"
169
+ # - "@label_key"
170
+ # roi_size: "$[int(x * 0.75) for x in @spatial_size]"
171
+ # num_samples: 1
172
+ # max_roi_size: "@spatial_size"
173
+ # random_center: true
174
+ # random_size: true
175
+ # allow_missing_keys: false
176
+ - _target_: SpatialPadd
177
+ keys:
178
+ - "@image_key"
179
+ - "@label_key"
180
+ spatial_size: "@spatial_size"
181
+ method: "symmetric"
182
+ mode:
183
+ - "minimum"
184
+ - "minimum"
185
+ allow_missing_keys: false
186
+ - _target_: RandRotate90d
187
+ keys:
188
+ - "@image_key"
189
+ - "@label_key"
190
+ prob: 0.5
191
+ max_k: 3
192
+ allow_missing_keys: false
193
+ - _target_: SelectItemsd
194
+ keys:
195
+ - "@image_key"
196
+ - "@label_key"
197
+ - "label_names"
198
+ - _target_: CastToTyped
199
+ keys:
200
+ - "@image_key"
201
+ - "@label_key"
202
+ dtype:
203
+ - "$torch.float32"
204
+ - "$torch.uint8"
205
+ - _target_: ToTensord
206
+ keys:
207
+ - "@image_key"
208
+ - "@label_key"
209
+ preprocessing:
210
+ _target_: Compose
211
+ transforms: "$@train#deterministic_transforms + @train#random_transforms"
212
+ dataset:
213
+ _target_: PersistentDataset
214
+ data: "@train_datalist"
215
+ transform: "@train#preprocessing"
216
+ cache_dir: "$@bundle_root + '/cache'"
217
+ dataloader:
218
+ _target_: DataLoader
219
+ dataset: "@train#dataset"
220
+ batch_size: 2
221
+ shuffle: true
222
+ num_workers: 4
223
+ collate_fn: $list_data_collate
224
+ inferer:
225
+ _target_: SimpleInferer
226
+ postprocessing:
227
+ _target_: Compose
228
+ transforms:
229
+ - _target_: Activationsd
230
+ keys: pred
231
+ softmax: true
232
+ - _target_: AsDiscreted
233
+ keys:
234
+ - pred
235
+ - label
236
+ argmax:
237
+ - true
238
+ - false
239
+ to_onehot:
240
+ - "@output_classes"
241
+ - "@output_classes"
242
+ - _target_: scripts.monai_utils.SplitPredsLabeld # monai.apps.deepedit.transforms
243
+ keys: pred
244
+ # dice_function:
245
+ # _target_: "$engine.state.metrics['train_dice']"
246
+ handlers:
247
+ - _target_: LrScheduleHandler
248
+ lr_scheduler: "@lr_scheduler"
249
+ print_lr: true
250
+ # step_transform: "@dice_function"
251
+ - _target_: ValidationHandler
252
+ validator: "@validate#evaluator"
253
+ epoch_level: true
254
+ interval: "@val_interval"
255
+ - _target_: StatsHandler
256
+ tag_name: train_loss
257
+ output_transform: "$monai.handlers.from_engine(['loss'], first=True)"
258
+ - _target_: TensorBoardStatsHandler
259
+ log_dir: "@output_dir"
260
+ tag_name: train_loss
261
+ output_transform: "$monai.handlers.from_engine(['loss'], first=True)"
262
+ key_metric:
263
+ train_dice:
264
+ _target_: MeanDice
265
+ output_transform: "$monai.handlers.from_engine(['pred', 'label'])"
266
+ include_background: false
267
+ additional_metrics:
268
+ liver_dice:
269
+ _target_: monai.handlers.MeanDice
270
+ output_transform: "$monai.handlers.from_engine(['pred_liver', 'label_liver'])"
271
+ include_background: false
272
+ spleen_dice:
273
+ _target_: monai.handlers.MeanDice
274
+ output_transform: "$monai.handlers.from_engine(['pred_spleen', 'label_spleen'])"
275
+ include_background: false
276
+ pancreas_dice:
277
+ _target_: monai.handlers.MeanDice
278
+ output_transform: "$monai.handlers.from_engine(['pred_pancreas', 'label_pancreas'])"
279
+ include_background: false
280
+ trainer:
281
+ _target_: scripts.monai_utils.SupervisedTrainerMGPU # SupervisedTrainer
282
+ device: "@device_list" # "@device"
283
+ max_epochs: "@max_epochs"
284
+ train_data_loader: "@train#dataloader"
285
+ network: "@network"
286
+ loss_function: "@loss"
287
+ # train_interaction: null
288
+ optimizer: "@optimizer"
289
+ inferer: "@train#inferer"
290
+ postprocessing: "@train#postprocessing"
291
+ key_train_metric: "@train#key_metric"
292
+ additional_metrics: "@train#additional_metrics"
293
+ train_handlers: "@train#handlers"
294
+ amp: true
295
+ validate:
296
+ preprocessing:
297
+ _target_: Compose
298
+ transforms: "%train#deterministic_transforms"
299
+ dataset:
300
+ # _target_: CacheDataset
301
+ # data: "@val_datalist"
302
+ # transform: "@validate#preprocessing"
303
+ # cache_rate: 0.025
304
+ _target_: PersistentDataset
305
+ data: "@val_datalist"
306
+ transform: "@validate#preprocessing"
307
+ cache_dir: "$@bundle_root + '/cache'"
308
+ dataloader:
309
+ _target_: DataLoader
310
+ dataset: "@validate#dataset"
311
+ batch_size: 1
312
+ shuffle: false
313
+ num_workers: 4
314
+ collate_fn: $list_data_collate
315
+ inferer:
316
+ _target_: SlidingWindowInferer
317
+ roi_size: "@spatial_size"
318
+ sw_batch_size: 4
319
+ mode: "constant"
320
+ overlap: 0.5
321
+ postprocessing: "%train#postprocessing"
322
+ handlers:
323
+ - _target_: StatsHandler
324
+ iteration_log: false
325
+ - _target_: TensorBoardStatsHandler
326
+ log_dir: "@output_dir"
327
+ iteration_log: false
328
+ - _target_: CheckpointSaver
329
+ save_dir: "@ckpt_dir"
330
+ save_dict:
331
+ model: "@network"
332
+ save_key_metric: true
333
+ key_metric_filename: model_latest.pt
334
+ key_metric:
335
+ val_dice:
336
+ _target_: MeanDice
337
+ output_transform: "$monai.handlers.from_engine(['pred', 'label'])"
338
+ include_background: false
339
+ additional_metrics:
340
+ val_liver_dice:
341
+ _target_: monai.handlers.MeanDice
342
+ output_transform: "$monai.handlers.from_engine(['pred_liver', 'label_liver'])"
343
+ include_background: false
344
+ val_spleen_dice:
345
+ _target_: monai.handlers.MeanDice
346
+ output_transform: "$monai.handlers.from_engine(['pred_spleen', 'label_spleen'])"
347
+ include_background: false
348
+ val_pancreas_dice:
349
+ _target_: monai.handlers.MeanDice
350
+ output_transform: "$monai.handlers.from_engine(['pred_pancreas', 'label_pancreas'])"
351
+ include_background: false
352
+ evaluator:
353
+ _target_: SupervisedEvaluator
354
+ device: "@device"
355
+ val_data_loader: "@validate#dataloader"
356
+ network: "@network"
357
+ inferer: "@validate#inferer"
358
+ postprocessing: "@validate#postprocessing"
359
+ key_val_metric: "@validate#key_metric"
360
+ additional_metrics: "@validate#additional_metrics"
361
+ val_handlers: "@validate#handlers"
362
+ amp: true
363
+ initialize:
364
+ - "$monai.utils.set_determinism(seed=123)"
365
+ run:
366
+ - "$print('Training started... ')"
367
+ - "$print('output_channels: ', @output_channels )"
368
+ - "$print('spatial_dims: ', @spatial_dims)"
369
+ - "$print('Labels dict: ', @labels)"
370
+ - "$print('Get device list: ', scripts.monai_utils.get_device_list(@n_gpu))"
371
+ #- "$[print(i,': ', data['image'].shape) for i, data in enumerate(@train#dataloader)]"
372
+ - "$@train#trainer.run()"
configs/train.yaml ADDED
@@ -0,0 +1,361 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ imports:
3
+ - "$import glob"
4
+ - "$import json"
5
+ - "$import os"
6
+ - "$import ignite"
7
+ - "$from scipy import ndimage"
8
+ - "$import scripts.monai_utils"
9
+ - "$import scripts.lr_scheduler"
10
+ - "$import monai.apps.deepedit.transforms"
11
+ workflow_type: train
12
+ input_channels: 1
13
+ output_channels: 4
14
+ output_classes: 4
15
+ #arch_ckpt_path: "$@bundle_root + '/models/dynunet_FT.pt'"
16
+ #arch_ckpt: "$torch.load(@arch_ckpt_path, map_location=torch.device('cuda'))"
17
+ bundle_root: "."
18
+ ckpt_dir: "$@bundle_root + '/models'"
19
+ output_dir: "$@bundle_root + '/eval'"
20
+ dataset_dir: "/processed/Public/CT_TotalSegmentator/TS_split/test/" #"/workspace/data"
21
+ data_list_file_path: "$@bundle_root + '/configs/TS_test.json'"
22
+ train_datalist: "$monai.data.load_decathlon_datalist(@data_list_file_path, data_list_key='training')"
23
+ val_datalist: "$monai.data.load_decathlon_datalist(@data_list_file_path, data_list_key='validation')"
24
+ n_gpu: 0
25
+ device: "$torch.device('cuda:' + str(@n_gpu) if torch.cuda.is_available() else 'cpu')"
26
+ spatial_size:
27
+ - 96
28
+ - 96
29
+ - 96
30
+ spatial_dims: "$len(@spatial_size)"
31
+ labels:
32
+ background: 0
33
+ liver: 1
34
+ spleen: 2
35
+ pancreas: 3
36
+ network_def:
37
+ _target_: monai.networks.nets.DynUNet
38
+ spatial_dims: "@spatial_dims"
39
+ in_channels: "@input_channels"
40
+ out_channels: "@output_channels"
41
+ kernel_size:
42
+ - 3
43
+ - 3
44
+ - 3
45
+ - 3
46
+ - 3
47
+ - 3
48
+ strides:
49
+ - 1
50
+ - 2
51
+ - 2
52
+ - 2
53
+ - 2
54
+ -
55
+ - 2
56
+ - 2
57
+ - 1
58
+ upsample_kernel_size:
59
+ - 2
60
+ - 2
61
+ - 2
62
+ - 2
63
+ -
64
+ - 2
65
+ - 2
66
+ - 1
67
+ norm_name: "instance"
68
+ deep_supervision: false
69
+ res_block: true
70
+ network: "$@network_def.to(@device)"
71
+ loss:
72
+ _target_: DiceCELoss
73
+ include_background: false
74
+ to_onehot_y: true
75
+ softmax: true
76
+ squared_pred: true
77
+ batch: true
78
+ smooth_nr: 1.0e-06
79
+ smooth_dr: 1.0e-06
80
+ optimizer:
81
+ _target_: torch.optim.AdamW
82
+ params: "$@network.parameters()"
83
+ weight_decay: 1.0e-05
84
+ lr: 0.00005
85
+ max_epochs: 15
86
+ lr_scheduler:
87
+ _target_: scripts.lr_scheduler.LinearWarmupCosineAnnealingLR
88
+ optimizer: "@optimizer"
89
+ warmup_epochs: 10
90
+ warmup_start_lr: 0.0000005
91
+ eta_min: 1.0e-08
92
+ max_epochs: "@max_epochs"
93
+ image_key: image
94
+ label_key: label
95
+ val_interval: 2
96
+ train:
97
+ deterministic_transforms:
98
+ - _target_: LoadImaged
99
+ keys:
100
+ - "@image_key"
101
+ - "@label_key"
102
+ reader: ITKReader
103
+ - _target_: EnsureChannelFirstd
104
+ keys:
105
+ - "@image_key"
106
+ - "@label_key"
107
+ - _target_: Orientationd
108
+ keys:
109
+ - "@image_key"
110
+ - "@label_key"
111
+ axcodes: RAS
112
+ - _target_: Spacingd
113
+ keys:
114
+ - "@image_key"
115
+ - "@label_key"
116
+ pixdim:
117
+ - 1.5
118
+ - 1.5
119
+ - 3.0
120
+ mode:
121
+ - bilinear
122
+ - nearest
123
+ - _target_: scripts.monai_utils.AddLabelNamesd
124
+ keys: "@label_key"
125
+ label_names: "@labels"
126
+ - _target_: ScaleIntensityRanged
127
+ keys: "@image_key"
128
+ a_min: -250
129
+ a_max: 400
130
+ b_min: 0
131
+ b_max: 1
132
+ clip: true
133
+ - _target_: CropForegroundd
134
+ keys:
135
+ - "@image_key"
136
+ - "@label_key"
137
+ source_key: "@image_key"
138
+ mode:
139
+ - "minimum"
140
+ - "minimum"
141
+ - _target_: EnsureTyped
142
+ keys:
143
+ - "@image_key"
144
+ - "@label_key"
145
+ - _target_: CastToTyped
146
+ keys: "@image_key"
147
+ dtype: "$torch.float32"
148
+ random_transforms:
149
+ - _target_: RandCropByLabelClassesd
150
+ keys:
151
+ - "@image_key"
152
+ - "@label_key"
153
+ label_key: "@label_key" # label4crop
154
+ spatial_size: "@spatial_size"
155
+ num_classes: 4
156
+ ratios: null
157
+ allow_smaller: true
158
+ num_samples: 8
159
+ # - _target_: RandSpatialCropSamplesd
160
+ # keys:
161
+ # - "@image_key"
162
+ # - "@label_key"
163
+ # roi_size: "$[int(x * 0.75) for x in @spatial_size]"
164
+ # num_samples: 1
165
+ # max_roi_size: "@spatial_size"
166
+ # random_center: true
167
+ # random_size: true
168
+ # allow_missing_keys: false
169
+ - _target_: SpatialPadd
170
+ keys:
171
+ - "@image_key"
172
+ - "@label_key"
173
+ spatial_size: "@spatial_size"
174
+ method: "symmetric"
175
+ mode:
176
+ - "minimum"
177
+ - "minimum"
178
+ allow_missing_keys: false
179
+ - _target_: RandRotate90d
180
+ keys:
181
+ - "@image_key"
182
+ - "@label_key"
183
+ prob: 0.5
184
+ max_k: 3
185
+ allow_missing_keys: false
186
+ # - _target_: SelectItemsd
187
+ # keys:
188
+ # - "@image_key"
189
+ # - "@label_key"
190
+ # - "label_names"
191
+ - _target_: CastToTyped
192
+ keys:
193
+ - "@image_key"
194
+ - "@label_key"
195
+ dtype:
196
+ - "$torch.float32"
197
+ - "$torch.uint8"
198
+ - _target_: ToTensord
199
+ keys:
200
+ - "@image_key"
201
+ - "@label_key"
202
+ preprocessing:
203
+ _target_: Compose
204
+ transforms: "$@train#deterministic_transforms + @train#random_transforms"
205
+ dataset:
206
+ _target_: PersistentDataset
207
+ data: "@train_datalist"
208
+ transform: "@train#preprocessing"
209
+ cache_dir: "$@bundle_root + '/cache'"
210
+ dataloader:
211
+ _target_: DataLoader
212
+ dataset: "@train#dataset"
213
+ batch_size: 1
214
+ shuffle: true
215
+ num_workers: 4
216
+ inferer:
217
+ _target_: SimpleInferer
218
+ postprocessing:
219
+ _target_: Compose
220
+ transforms:
221
+ - _target_: Activationsd
222
+ keys: pred
223
+ softmax: true
224
+ - _target_: AsDiscreted
225
+ keys:
226
+ - pred
227
+ - label
228
+ argmax:
229
+ - true
230
+ - false
231
+ to_onehot:
232
+ - "@output_classes"
233
+ - "@output_classes"
234
+ - _target_: scripts.monai_utils.SplitPredsLabeld
235
+ keys: pred
236
+ # dice_function:
237
+ # _target_: "$engine.state.metrics['train_dice']"
238
+ handlers:
239
+ - _target_: LrScheduleHandler
240
+ lr_scheduler: "@lr_scheduler"
241
+ print_lr: true
242
+ # step_transform: "@dice_function"
243
+ - _target_: ValidationHandler
244
+ validator: "@validate#evaluator"
245
+ epoch_level: true
246
+ interval: "@val_interval"
247
+ - _target_: StatsHandler
248
+ tag_name: train_loss
249
+ output_transform: "$monai.handlers.from_engine(['loss'], first=True)"
250
+ - _target_: TensorBoardStatsHandler
251
+ log_dir: "@output_dir"
252
+ tag_name: train_loss
253
+ output_transform: "$monai.handlers.from_engine(['loss'], first=True)"
254
+ key_metric:
255
+ train_dice:
256
+ _target_: MeanDice
257
+ output_transform: "$monai.handlers.from_engine(['pred', 'label'])"
258
+ include_background: false
259
+ additional_metrics:
260
+ liver_dice:
261
+ _target_: monai.handlers.MeanDice
262
+ output_transform: "$monai.handlers.from_engine(['pred_liver', 'label_liver'])"
263
+ include_background: false
264
+ spleen_dice:
265
+ _target_: monai.handlers.MeanDice
266
+ output_transform: "$monai.handlers.from_engine(['pred_spleen', 'label_spleen'])"
267
+ include_background: false
268
+ pancreas_dice:
269
+ _target_: monai.handlers.MeanDice
270
+ output_transform: "$monai.handlers.from_engine(['pred_pancreas', 'label_pancreas'])"
271
+ include_background: false
272
+ trainer:
273
+ _target_: SupervisedTrainer
274
+ device: "@device"
275
+ max_epochs: "@max_epochs"
276
+ train_data_loader: "@train#dataloader"
277
+ network: "@network"
278
+ loss_function: "@loss"
279
+ # train_interaction: null
280
+ optimizer: "@optimizer"
281
+ inferer: "@train#inferer"
282
+ postprocessing: "@train#postprocessing"
283
+ key_train_metric: "@train#key_metric"
284
+ additional_metrics: "@train#additional_metrics"
285
+ train_handlers: "@train#handlers"
286
+ amp: true
287
+ validate:
288
+ preprocessing:
289
+ _target_: Compose
290
+ transforms: "%train#deterministic_transforms"
291
+ dataset:
292
+ # _target_: CacheDataset
293
+ # data: "@val_datalist"
294
+ # transform: "@validate#preprocessing"
295
+ # cache_rate: 0.025
296
+ _target_: PersistentDataset
297
+ data: "@val_datalist"
298
+ transform: "@validate#preprocessing"
299
+ cache_dir: "$@bundle_root + '/cache'"
300
+ dataloader:
301
+ _target_: DataLoader
302
+ dataset: "@validate#dataset"
303
+ batch_size: 1
304
+ shuffle: false
305
+ num_workers: 4
306
+ inferer:
307
+ _target_: SlidingWindowInferer
308
+ roi_size: "@spatial_size"
309
+ sw_batch_size: 4
310
+ mode: "constant"
311
+ overlap: 0.5
312
+ postprocessing: "%train#postprocessing"
313
+ handlers:
314
+ - _target_: StatsHandler
315
+ iteration_log: false
316
+ - _target_: TensorBoardStatsHandler
317
+ log_dir: "@output_dir"
318
+ iteration_log: false
319
+ - _target_: CheckpointSaver
320
+ save_dir: "@ckpt_dir"
321
+ save_dict:
322
+ model: "@network"
323
+ save_key_metric: true
324
+ key_metric_filename: model_latest.pt
325
+ key_metric:
326
+ val_dice:
327
+ _target_: MeanDice
328
+ output_transform: "$monai.handlers.from_engine(['pred', 'label'])"
329
+ include_background: false
330
+ additional_metrics:
331
+ val_liver_dice:
332
+ _target_: monai.handlers.MeanDice
333
+ output_transform: "$monai.handlers.from_engine(['pred_liver', 'label_liver'])"
334
+ include_background: false
335
+ val_spleen_dice:
336
+ _target_: monai.handlers.MeanDice
337
+ output_transform: "$monai.handlers.from_engine(['pred_spleen', 'label_spleen'])"
338
+ include_background: false
339
+ val_pancreas_dice:
340
+ _target_: monai.handlers.MeanDice
341
+ output_transform: "$monai.handlers.from_engine(['pred_pancreas', 'label_pancreas'])"
342
+ include_background: false
343
+ evaluator:
344
+ _target_: SupervisedEvaluator
345
+ device: "@device"
346
+ val_data_loader: "@validate#dataloader"
347
+ network: "@network"
348
+ inferer: "@validate#inferer"
349
+ postprocessing: "@validate#postprocessing"
350
+ key_val_metric: "@validate#key_metric"
351
+ #additional_metrics: "@validate#additional_metrics"
352
+ val_handlers: "@validate#handlers"
353
+ amp: true
354
+ initialize:
355
+ - "$monai.utils.set_determinism(seed=123)"
356
+ run:
357
+ - "$print('Training started... ')"
358
+ - "$print('output_channels: ', @output_channels )"
359
+ - "$print('spatial_dims: ', @spatial_dims)"
360
+ - "$print('Labels dict: ', @labels)"
361
+ - "$@train#trainer.run()"
docs/README.md ADDED
@@ -0,0 +1,164 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Model Overview
2
+ A Pediatric 3D Abdominal Organ Segmentation model, pretrained on adult and pediatric public datasets, and fine tuned for institutional pediatric data.
3
+
4
+ Please cite this manuscript:
5
+ Somasundaram E, Taylor Z, Alves VV, et al. Deep-Learning Models for Abdominal CT Organ Segmentation in Children: Development and Validation in Internal and Heterogeneous Public Datasets. AJR 2024 May 1 [published online]. Accepted manuscript. doi:10.2214/AJR.24.30931
6
+
7
+ ## Data
8
+ Modality:
9
+ - CT
10
+
11
+ Organs Segmented:
12
+ - Liver
13
+ - Spleen
14
+ - Pancreas
15
+
16
+ Pre-training data:
17
+ - Total Segmentator (815)
18
+ - BTCV (30)
19
+ - TCIA Pediatric (282)
20
+
21
+ Fine-tuning data:
22
+ - Cincinnati Children's Liver Spleen CT dataset (275)
23
+ - Cincinnati Children's Pancreas CT dataset (146)
24
+
25
+ Testing data:
26
+ - Cincinnati Children's Liver-Spleen (57)
27
+ - Cincinnati Children's Pancreas (35)
28
+ - TCIA-Pediatric (74)
29
+ - Total Segmentator (50)
30
+
31
+ External dataset licenses can be found in accompanying text file. Internal datasets currently not publicly available.
32
+
33
+ To load data for training / inference / evaluate:
34
+
35
+ Ensure that the "image" and "label" parameters within the "training" and "validation" sections in configs/TS_test.json (or a new dataset json), as well as the "datalist" and "dataset_dir" in configs/train.yaml, configs/inference.yaml, and configs/evaluate-standalone.yaml files (or the according yaml if using multigpu / parallel or different model inferencing) are each changed to match the intended dataset's values.
36
+
37
+ One may make separate .json files detailing which exam images / masks are to be used in the same format as configs/TS_test.json with "training" and "validation" under root, as long as the "datalist_file_path" and "dataset_dir" values is changed accordingly in configs/train.yaml and configs/inference.yaml, and configs/evaluate-standalone.yaml (or the according yaml in different circumstances).
38
+
39
+ Ensure data folder structure is as follows, with scan files in the primary dataset folder, and mask files in the /labels/final subfolder:
40
+ dataset/
41
+ ├─ exam_001.nii.gz
42
+ ├─ exam_002.nii.gz
43
+ ├─ ...
44
+ ├─ labels/
45
+ │ ├─ final/
46
+ │ │ ├─ exam_001.nii.gz
47
+ │ │ ├─ exam_002.nii.gz
48
+ │ │ ├─ ...
49
+
50
+ Configuration defaults are currently set to the external TotalSegmentator CT dataset.
51
+
52
+ ### Model Architectures
53
+ - DynUNet
54
+ - SegResNet
55
+ - SwinUNETR
56
+
57
+ ### Hyper-Parameter Tuning
58
+ Weights and Biases was used to extensively tune each model for learning rate, scheduler and optimizer. For fine-tuning the fraction of trainable layers was also optimized. DynUNet performed overall better on all test datasets. The Total Segmentator model was also compared and the DynUNet model significantly outperformed Total Segmentator on institutional test data while maintaining relatively stable performance on adult and TCIA datasets.
59
+
60
+ ### Input
61
+ One channel CT image
62
+
63
+ ### Output
64
+ Four channel CT label
65
+ - Label 3: pancreas
66
+ - Label 2: spleen
67
+ - Label 1: liver
68
+ - Label 0: background
69
+ - 96x96x96
70
+
71
+ ## Performance
72
+ - MedArxiv to be linked
73
+
74
+
75
+ ## MONAI Bundle Commands
76
+ In addition to the Pythonic APIs, a few command line interfaces (CLI) are provided to interact with the bundle. The CLI supports flexible use cases, such as overriding configs at runtime and predefining arguments in a file.
77
+
78
+ For more details usage instructions, visit the [MONAI Bundle Configuration Page](https://docs.monai.io/en/latest/config_syntax.html).
79
+
80
+
81
+ #### Execute training:
82
+ Dataset used defaults to TotalSegmentator (https://zenodo.org/records/6802614#.ZFPll4TMKUk)
83
+ ```
84
+ python -m monai.bundle run --config_file configs/train.yaml
85
+ ```
86
+
87
+ Please note that if the default dataset path is not modified with the actual path in the bundle config files, you can also override it by using `--dataset_dir`:
88
+
89
+ ```
90
+ python -m monai.bundle run --config_file configs/train.yaml --dataset_dir <actual dataset path>
91
+ ```
92
+
93
+ #### `train` config to execute multi-GPU training:
94
+
95
+ ```
96
+ torchrun --nnodes=1 --nproc_per_node=8 -m monai.bundle run --config_file configs/train-multigpu.yaml
97
+ ```
98
+
99
+ #### Override the `train` config to execute evaluation with the trained model:
100
+
101
+ ```
102
+ python -m monai.bundle run --config_file "['configs/train.yaml','configs/evaluate.yaml']"
103
+ ```
104
+
105
+ #### Execute inference:
106
+
107
+ ```
108
+ python -m monai.bundle run --config_file configs/inference.yaml
109
+ ```
110
+
111
+ #### Execute standalone `evaluate`:
112
+ ```
113
+ python -m monai.bundle run --config_file configs/evaluate.yaml
114
+ ```
115
+
116
+
117
+ #### Execute standalone `evaluate` in parallel:
118
+ ```
119
+ torchrun --nnodes=1 --nproc_per_node=8 -m monai.bundle run --config_file configs/evaluate-standalone.yaml
120
+ ```
121
+
122
+
123
+ #### Export checkpoint for TorchScript:
124
+
125
+ ```
126
+ python -m monai.bundle ckpt_export network_def --filepath models/dynunet_FT.ts --ckpt_file models/dynunet_FT.pt --meta_file configs/metadata.json --config_file configs/inference.yaml
127
+ ```
128
+
129
+ #### Export checkpoint to TensorRT based models with fp32 or fp16 precision:
130
+
131
+ ```
132
+ python -m monai.bundle trt_export --net_id network_def --filepath models/A100/dynunet_FT_trt_16.ts --ckpt_file models/dynunet_FT.pt --meta_file configs/metadata.json --config_file configs/inference.yaml --precision <fp32/fp16> --use_trace "True" --dynamic_batchsize "[1, 4, 8]" --converter_kwargs "{'truncate_long_and_double':True, 'torch_executed_ops': ['aten::upsample_trilinear3d']}"
133
+ ```
134
+
135
+ #### Execute inference with the TensorRT model:
136
+
137
+ ```
138
+ python -m monai.bundle run --config_file "['configs/inference.yaml', 'configs/inference_trt.yaml']"
139
+ ```
140
+
141
+ # References
142
+
143
+ [1] Somasundaram E, Taylor Z, Alves VV, et al. Deep-Learning Models for Abdominal CT Organ Segmentation in Children: Development and Validation in Internal and Heterogeneous Public Datasets. AJR 2024 May 1 [published online]. Accepted manuscript. doi:10.2214/AJR.24.30931
144
+
145
+ [2] Wasserthal, J., Breit, H.-C., Meyer, M. T., Pradella, M., Hinck, D., Sauter, A. W., Heye, T., Boll, D., Cyriac, J., Yang, S., Bach, M., & Segeroth, M. (2023, June 16). TotalSegmentator: Robust segmentation of 104 anatomical structures in CT images. arXiv.org. https://arxiv.org/abs/2208.05868 . https://doi.org/10.1148/ryai.230024
146
+
147
+ [3] Jordan, P., Adamson, P. M., Bhattbhatt, V., Beriwal, S., Shen, S., Radermecker, O., Bose, S., Strain, L. S., Offe, M., Fraley, D., Principi, S., Ye, D. H., Wang, A. S., Van Heteren, J., Vo, N.-J., & Schmidt, T. G. (2021). Pediatric Chest/Abdomen/Pelvic CT Exams with Expert Organ Contours (Pediatric-CT-SEG) (Version 2) [Data set]. The Cancer Imaging Archive. https://doi.org/10.7937/TCIA.X0H0-1706
148
+
149
+ [4] https://www.synapse.org/#!Synapse:syn3193805/wiki/89480
150
+
151
+ # License
152
+ Copyright (c) MONAI Consortium
153
+
154
+ Licensed under the Apache License, Version 2.0 (the "License");
155
+ you may not use this file except in compliance with the License.
156
+ You may obtain a copy of the License at
157
+
158
+ http://www.apache.org/licenses/LICENSE-2.0
159
+
160
+ Unless required by applicable law or agreed to in writing, software
161
+ distributed under the License is distributed on an "AS IS" BASIS,
162
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
163
+ See the License for the specific language governing permissions and
164
+ limitations under the License.
docs/data_license.txt ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Third Party Licenses
2
+ -----------------------------------------------------------------------
3
+
4
+ /*********************************************************************/
5
+ i. The Cancer Imaging Archive Pediatric-CT-SEG
6
+ https://www.cancerimagingarchive.net/collection/pediatric-ct-seg/
7
+ /*********************************************************************/
8
+
9
+ Data Usage Agreement / Citations
10
+
11
+ Data Citation Required: Users must abide by the TCIA Data Usage Policy and Restrictions. Attribution must include the following citation, including the Digital Object Identifier:
12
+
13
+ Jordan, P., Adamson, P. M., Bhattbhatt, V., Beriwal, S., Shen, S., Radermecker, O., Bose, S., Strain, L. S., Offe, M., Fraley, D., Principi, S., Ye, D. H., Wang, A. S., Van Heteren, J., Vo, N.-J., & Schmidt, T. G. (2021). Pediatric Chest/Abdomen/Pelvic CT Exams with Expert Organ Contours (Pediatric-CT-SEG) (Version 2) [Data set]. The Cancer Imaging Archive. https://doi.org/10.7937/TCIA.X0H0-1706
14
+
15
+
16
+ /*********************************************************************/
17
+ ii. Multi-Atlas Labeling Beyond the Cranial Vault - Workshop and Challenge
18
+ https://www.synapse.org/#!Synapse:syn3193805/wiki/89480
19
+ /*********************************************************************/
20
+
21
+ Data Usage Agreement / Citations
22
+
23
+ Data license may be requested at URL:
24
+ https://www.synapse.org/#!Synapse:syn3193805/wiki/217753
25
+
26
+ /*********************************************************************/
27
+ iii. TotalSegmentator v1
28
+ https://zenodo.org/records/6802614#.ZFPll4TMKUk
29
+ /*********************************************************************/
30
+
31
+ Data Usage Agreement / Citations
32
+
33
+ More details about the dataset can be found in the corresponding paper: https://arxiv.org/abs/2208.05868 (the paper describes v1 of the dataset). Please cite this paper if you use the dataset.
34
+
35
+ Wasserthal, J., Breit, H.-C., Meyer, M. T., Pradella, M., Hinck, D., Sauter, A. W., Heye, T., Boll, D., Cyriac, J., Yang, S., Bach, M., & Segeroth, M. (2023, June 16). TotalSegmentator: Robust segmentation of 104 anatomical structures in CT images. arXiv.org. https://arxiv.org/abs/2208.05868 . https://doi.org/10.1148/ryai.230024
models/A100/dynunet_FT_trt_16.ts ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:079a5a65757055121a7badbfd770b7b39e108e379dbf3ce3c1533e094c9b609f
3
+ size 86090106
models/dynunet_FT.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5d5c616267cfb8b111cb9d2e3dc54419f8d7af952228b8c049aa6fa1e5224bfd
3
+ size 124032446
scripts/__init__.py ADDED
@@ -0,0 +1 @@
 
 
1
+ import scripts.utils
scripts/compute_metric.py ADDED
@@ -0,0 +1,150 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) MONAI Consortium
2
+ # Licensed under the Apache License, Version 2.0 (the "License");
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ # http://www.apache.org/licenses/LICENSE-2.0
6
+ # Unless required by applicable law or agreed to in writing, software
7
+ # distributed under the License is distributed on an "AS IS" BASIS,
8
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
9
+ # See the License for the specific language governing permissions and
10
+ # limitations under the License.
11
+
12
+ """
13
+ This example shows how to efficiently compute Dice scores for pairs of segmentation prediction
14
+ and references in multi-processing based on MONAI's metrics API.
15
+ It can even run on multi-nodes.
16
+ Main steps to set up the distributed data parallel:
17
+
18
+ - Execute `torchrun` to create processes on every node for every process.
19
+ It receives parameters as below:
20
+ `--nproc_per_node=NUM_PROCESSES_PER_NODE`
21
+ `--nnodes=NUM_NODES`
22
+ For more details, refer to https://github.com/pytorch/pytorch/blob/master/torch/distributed/run.py.
23
+ Alternatively, we can also use `torch.multiprocessing.spawn` to start program, but it that case, need to handle
24
+ all the above parameters and compute `rank` manually, then set to `init_process_group`, etc.
25
+ `torchrun` is even more efficient than `torch.multiprocessing.spawn`.
26
+ - Use `init_process_group` to initialize every process.
27
+ - Partition the saved predictions and labels into ranks for parallel computation.
28
+ - Compute `Dice Metric` on every process, reduce the results after synchronization.
29
+
30
+ Note:
31
+ `torchrun` will launch `nnodes * nproc_per_node = world_size` processes in total.
32
+ Example script to execute this program on a single node with 2 processes:
33
+ `torchrun --nproc_per_node=2 compute_metric.py`
34
+
35
+ Referring to: https://pytorch.org/tutorials/intermediate/ddp_tutorial.html
36
+
37
+ """
38
+
39
+ import os
40
+
41
+ import torch
42
+ import torch.distributed as dist
43
+ from monai.data import partition_dataset
44
+ from monai.handlers import write_metrics_reports
45
+ from monai.metrics import DiceMetric
46
+ from monai.transforms import (
47
+ AddLabelNamesd,
48
+ AsDiscreted,
49
+ Compose,
50
+ EnsureChannelFirstd,
51
+ LoadImaged,
52
+ Orientationd,
53
+ ToDeviced,
54
+ )
55
+ from monai.utils import string_list_all_gather
56
+ from scripts.monai_utils import CopyFilenamesd
57
+
58
+
59
+ def compute(datalist, output_dir):
60
+ # generate synthetic data for the example
61
+ local_rank = int(os.environ["LOCAL_RANK"])
62
+ # initialize the distributed evaluation process, change to gloo backend if computing on CPU
63
+ dist.init_process_group(backend="nccl", init_method="env://")
64
+
65
+ # split data for every subprocess, for example, 16 processes compute in parallel
66
+ data_part = partition_dataset(
67
+ data=datalist, num_partitions=dist.get_world_size(), shuffle=False, even_divisible=False
68
+ )[dist.get_rank()]
69
+
70
+ device = torch.device(f"cuda:{local_rank}")
71
+ torch.cuda.set_device(device)
72
+ # define transforms for predictions and labels
73
+ # labels = {'background': 0, 'liver': 1, 'spleen': 2, 'pancreas': 3}
74
+ transforms = Compose(
75
+ [
76
+ CopyFilenamesd(keys="label"),
77
+ LoadImaged(keys=["pred", "label"]),
78
+ ToDeviced(keys=["pred", "label"], device=device),
79
+ EnsureChannelFirstd(keys=["pred", "label"]),
80
+ Orientationd(keys=("pred", "label"), axcodes="RAS"),
81
+ AsDiscreted(keys=("pred", "label"), argmax=(False, False), to_onehot=(4, 4)),
82
+ ]
83
+ )
84
+
85
+ data_part = [transforms(item) for item in data_part]
86
+
87
+ # compute metrics for current process
88
+ metric = DiceMetric(include_background=False, reduction="mean", get_not_nans=False)
89
+ metric(y_pred=[i["pred"] for i in data_part], y=[i["label"] for i in data_part])
90
+ filenames = [item["filename"] for item in data_part]
91
+ # all-gather results from all the processes and reduce for final result
92
+ result = metric.aggregate().item()
93
+ filenames = string_list_all_gather(strings=filenames)
94
+
95
+ if local_rank == 0:
96
+ print("mean dice: ", result)
97
+ # generate metrics reports at: output/mean_dice_raw.csv, output/mean_dice_summary.csv, output/metrics.csv
98
+ write_metrics_reports(
99
+ save_dir=output_dir,
100
+ images=filenames,
101
+ metrics={"mean_dice": result},
102
+ metric_details={"mean_dice": metric.get_buffer()},
103
+ summary_ops="*",
104
+ )
105
+
106
+ metric.reset()
107
+
108
+ dist.destroy_process_group()
109
+
110
+
111
+ def compute_single_node(datalist, output_dir):
112
+ local_rank = int(os.environ["LOCAL_RANK"])
113
+
114
+ filenames = [d["label"].split("/")[-1] for d in datalist]
115
+
116
+ data_part = datalist
117
+ device = torch.device(f"cuda:{local_rank}")
118
+ torch.cuda.set_device(device)
119
+
120
+ # define transforms for predictions and labels
121
+ labels = {"background": 0, "liver": 1, "spleen": 2, "pancreas": 3}
122
+ transforms = Compose(
123
+ [
124
+ LoadImaged(keys=["pred", "label"]),
125
+ ToDeviced(keys=["pred", "label"], device=device),
126
+ EnsureChannelFirstd(keys=["pred", "label"]),
127
+ Orientationd(keys=("pred", "label"), axcodes="RAS"),
128
+ AddLabelNamesd(keys=("pred", "label"), label_names=labels),
129
+ AsDiscreted(keys=("pred", "label"), argmax=(False, False), to_onehot=(4, 4)),
130
+ ]
131
+ )
132
+ data_part = [transforms(item) for item in data_part]
133
+ # compute metrics for current process
134
+ metric = DiceMetric(include_background=False, reduction="mean", get_not_nans=False)
135
+ for d in datalist:
136
+ d = transforms(d)
137
+ metric(y_pred=[d["pred"]], y=[d["label"]])
138
+
139
+ result = metric.aggregate().item()
140
+
141
+ print("mean dice: ", result)
142
+ write_metrics_reports(
143
+ save_dir=output_dir,
144
+ images=filenames,
145
+ metrics={"mean_dice": result},
146
+ metric_details={"mean_dice": metric.get_buffer()},
147
+ summary_ops="*",
148
+ )
149
+
150
+ metric.reset()
scripts/lr_scheduler.py ADDED
@@ -0,0 +1,171 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2020 - 2021 MONAI Consortium
2
+ # Licensed under the Apache License, Version 2.0 (the "License");
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ # http://www.apache.org/licenses/LICENSE-2.0
6
+ # Unless required by applicable law or agreed to in writing, software
7
+ # distributed under the License is distributed on an "AS IS" BASIS,
8
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
9
+ # See the License for the specific language governing permissions and
10
+ # limitations under the License.
11
+
12
+ import math
13
+ import warnings
14
+ from typing import List
15
+
16
+ from torch.optim import Optimizer
17
+ from torch.optim.lr_scheduler import LambdaLR, _LRScheduler
18
+
19
+ __all__ = ["LinearLR", "ExponentialLR"]
20
+
21
+
22
+ class _LRSchedulerMONAI(_LRScheduler):
23
+ """Base class for increasing the learning rate between two boundaries over a number
24
+ of iterations"""
25
+
26
+ def __init__(self, optimizer: Optimizer, end_lr: float, num_iter: int, last_epoch: int = -1) -> None:
27
+ """
28
+ Args:
29
+ optimizer: wrapped optimizer.
30
+ end_lr: the final learning rate.
31
+ num_iter: the number of iterations over which the test occurs.
32
+ last_epoch: the index of last epoch.
33
+ Returns:
34
+ None
35
+ """
36
+ self.end_lr = end_lr
37
+ self.num_iter = num_iter
38
+ super(_LRSchedulerMONAI, self).__init__(optimizer, last_epoch)
39
+
40
+
41
+ class LinearLR(_LRSchedulerMONAI):
42
+ """Linearly increases the learning rate between two boundaries over a number of
43
+ iterations.
44
+ """
45
+
46
+ def get_lr(self):
47
+ r = self.last_epoch / (self.num_iter - 1)
48
+ return [base_lr + r * (self.end_lr - base_lr) for base_lr in self.base_lrs]
49
+
50
+
51
+ class ExponentialLR(_LRSchedulerMONAI):
52
+ """Exponentially increases the learning rate between two boundaries over a number of
53
+ iterations.
54
+ """
55
+
56
+ def get_lr(self):
57
+ r = self.last_epoch / (self.num_iter - 1)
58
+ return [base_lr * (self.end_lr / base_lr) ** r for base_lr in self.base_lrs]
59
+
60
+
61
+ class WarmupCosineSchedule(LambdaLR):
62
+ """Linear warmup and then cosine decay.
63
+ Based on https://huggingface.co/ implementation.
64
+ """
65
+
66
+ def __init__(
67
+ self, optimizer: Optimizer, warmup_steps: int, t_total: int, cycles: float = 0.5, last_epoch: int = -1
68
+ ) -> None:
69
+ """
70
+ Args:
71
+ optimizer: wrapped optimizer.
72
+ warmup_steps: number of warmup iterations.
73
+ t_total: total number of training iterations.
74
+ cycles: cosine cycles parameter.
75
+ last_epoch: the index of last epoch.
76
+ Returns:
77
+ None
78
+ """
79
+ self.warmup_steps = warmup_steps
80
+ self.t_total = t_total
81
+ self.cycles = cycles
82
+ super(WarmupCosineSchedule, self).__init__(optimizer, self.lr_lambda, last_epoch)
83
+
84
+ def lr_lambda(self, step):
85
+ if step < self.warmup_steps:
86
+ return float(step) / float(max(1.0, self.warmup_steps))
87
+ progress = float(step - self.warmup_steps) / float(max(1, self.t_total - self.warmup_steps))
88
+ return max(0.0, 0.5 * (1.0 + math.cos(math.pi * float(self.cycles) * 2.0 * progress)))
89
+
90
+
91
+ class LinearWarmupCosineAnnealingLR(_LRScheduler):
92
+ def __init__(
93
+ self,
94
+ optimizer: Optimizer,
95
+ warmup_epochs: int,
96
+ max_epochs: int,
97
+ warmup_start_lr: float = 0.0,
98
+ eta_min: float = 0.0,
99
+ last_epoch: int = -1,
100
+ ) -> None:
101
+ """
102
+ Args:
103
+ optimizer (Optimizer): Wrapped optimizer.
104
+ warmup_epochs (int): Maximum number of iterations for linear warmup
105
+ max_epochs (int): Maximum number of iterations
106
+ warmup_start_lr (float): Learning rate to start the linear warmup. Default: 0.
107
+ eta_min (float): Minimum learning rate. Default: 0.
108
+ last_epoch (int): The index of last epoch. Default: -1.
109
+ """
110
+ self.warmup_epochs = warmup_epochs
111
+ self.max_epochs = max_epochs
112
+ self.warmup_start_lr = warmup_start_lr
113
+ self.eta_min = eta_min
114
+
115
+ super(LinearWarmupCosineAnnealingLR, self).__init__(optimizer, last_epoch)
116
+
117
+ def get_lr(self) -> List[float]:
118
+ """
119
+ Compute learning rate using chainable form of the scheduler
120
+ """
121
+ if not self._get_lr_called_within_step:
122
+ warnings.warn(
123
+ "To get the last learning rate computed by the scheduler, " "please use `get_last_lr()`.", UserWarning
124
+ )
125
+
126
+ if self.last_epoch == 0:
127
+ return [self.warmup_start_lr] * len(self.base_lrs)
128
+ elif self.last_epoch < self.warmup_epochs:
129
+ return [
130
+ group["lr"] + (base_lr - self.warmup_start_lr) / (self.warmup_epochs - 1)
131
+ for base_lr, group in zip(self.base_lrs, self.optimizer.param_groups)
132
+ ]
133
+ elif self.last_epoch == self.warmup_epochs:
134
+ return self.base_lrs
135
+ elif (self.last_epoch - 1 - self.max_epochs) % (2 * (self.max_epochs - self.warmup_epochs)) == 0:
136
+ return [
137
+ group["lr"]
138
+ + (base_lr - self.eta_min) * (1 - math.cos(math.pi / (self.max_epochs - self.warmup_epochs))) / 2
139
+ for base_lr, group in zip(self.base_lrs, self.optimizer.param_groups)
140
+ ]
141
+
142
+ return [
143
+ (1 + math.cos(math.pi * (self.last_epoch - self.warmup_epochs) / (self.max_epochs - self.warmup_epochs)))
144
+ / (
145
+ 1
146
+ + math.cos(
147
+ math.pi * (self.last_epoch - self.warmup_epochs - 1) / (self.max_epochs - self.warmup_epochs)
148
+ )
149
+ )
150
+ * (group["lr"] - self.eta_min)
151
+ + self.eta_min
152
+ for group in self.optimizer.param_groups
153
+ ]
154
+
155
+ def _get_closed_form_lr(self) -> List[float]:
156
+ """
157
+ Called when epoch is passed as a param to the `step` function of the scheduler.
158
+ """
159
+ if self.last_epoch < self.warmup_epochs:
160
+ return [
161
+ self.warmup_start_lr + self.last_epoch * (base_lr - self.warmup_start_lr) / (self.warmup_epochs - 1)
162
+ for base_lr in self.base_lrs
163
+ ]
164
+
165
+ return [
166
+ self.eta_min
167
+ + 0.5
168
+ * (base_lr - self.eta_min)
169
+ * (1 + math.cos(math.pi * (self.last_epoch - self.warmup_epochs) / (self.max_epochs - self.warmup_epochs)))
170
+ for base_lr in self.base_lrs
171
+ ]
scripts/monai_utils.py ADDED
@@ -0,0 +1,214 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+ import os
5
+ from collections.abc import Hashable, Mapping
6
+ from typing import Any, Callable, Sequence
7
+
8
+ import numpy as np
9
+ import torch
10
+ import torch.nn
11
+ from ignite.engine import Engine
12
+ from ignite.metrics import Metric
13
+ from monai.config import KeysCollection
14
+ from monai.engines import SupervisedTrainer
15
+ from monai.engines.utils import get_devices_spec
16
+ from monai.inferers import Inferer
17
+ from monai.transforms.transform import MapTransform, Transform
18
+ from torch.nn.parallel import DataParallel, DistributedDataParallel
19
+ from torch.optim.optimizer import Optimizer
20
+
21
+ # measure, _ = optional_import("skimage.measure", "0.14.2", min_version)
22
+
23
+ logger = logging.getLogger(__name__)
24
+
25
+ # distance_transform_cdt, _ = optional_import("scipy.ndimage.morphology", name="distance_transform_cdt")
26
+
27
+
28
+ def get_device_list(n_gpu):
29
+ if type(n_gpu) is not list:
30
+ n_gpu = [n_gpu]
31
+ device_list = get_devices_spec(n_gpu)
32
+ if torch.cuda.device_count() >= max(n_gpu):
33
+ device_list = [d for d in device_list if d in n_gpu]
34
+ else:
35
+ logging.info(
36
+ """Highest GPU ID provided in 'n_gpu' is larger than number of GPUs available, assigning GPUs starting from 0
37
+ to match n_gpu length of {}""".format(
38
+ len(n_gpu)
39
+ )
40
+ )
41
+ device_list = device_list[: len(n_gpu)]
42
+ return device_list
43
+
44
+
45
+ def supervised_trainer_multi_gpu(
46
+ max_epochs: int,
47
+ train_data_loader,
48
+ network: torch.nn.Module,
49
+ optimizer: Optimizer,
50
+ loss_function: Callable,
51
+ device: Sequence[str | torch.device] | None = None,
52
+ epoch_length: int | None = None,
53
+ non_blocking: bool = False,
54
+ iteration_update: Callable[[Engine, Any], Any] | None = None,
55
+ inferer: Inferer | None = None,
56
+ postprocessing: Transform | None = None,
57
+ key_train_metric: dict[str, Metric] | None = None,
58
+ additional_metrics: dict[str, Metric] | None = None,
59
+ train_handlers: Sequence | None = None,
60
+ amp: bool = False,
61
+ distributed: bool = False,
62
+ ):
63
+ devices_ = device
64
+ if not device:
65
+ devices_ = get_devices_spec(device) # Using all devices i.e GPUs
66
+
67
+ # if device:
68
+ # if next(network.parameters()).device.index != device[0]:
69
+ # network.to(devices_[0])
70
+ # else:
71
+ # if next(network.parameters()).device.index != devices_[0].index:
72
+ # network.to(devices_[0])
73
+ #
74
+ net = network
75
+ if distributed:
76
+ if len(devices_) > 1:
77
+ raise ValueError(f"for distributed training, `devices` must contain only 1 GPU or CPU, but got {devices_}.")
78
+ net = DistributedDataParallel(net, device_ids=devices_)
79
+ elif len(devices_) > 1:
80
+ net = DataParallel(net, device_ids=devices_) # ,output_device=devices_[0])
81
+
82
+ return SupervisedTrainer(
83
+ device=devices_[0],
84
+ network=net,
85
+ optimizer=optimizer,
86
+ loss_function=loss_function,
87
+ max_epochs=max_epochs,
88
+ train_data_loader=train_data_loader,
89
+ epoch_length=epoch_length,
90
+ non_blocking=non_blocking,
91
+ iteration_update=iteration_update,
92
+ inferer=inferer,
93
+ postprocessing=postprocessing,
94
+ key_train_metric=key_train_metric,
95
+ additional_metrics=additional_metrics,
96
+ train_handlers=train_handlers,
97
+ amp=amp,
98
+ )
99
+
100
+
101
+ class SupervisedTrainerMGPU(SupervisedTrainer):
102
+ def __init__(
103
+ self,
104
+ max_epochs: int,
105
+ train_data_loader,
106
+ network: torch.nn.Module,
107
+ optimizer: Optimizer,
108
+ loss_function: Callable,
109
+ device: Sequence[str | torch.device] | None = None,
110
+ epoch_length: int | None = None,
111
+ non_blocking: bool = False,
112
+ iteration_update: Callable[[Engine, Any], Any] | None = None,
113
+ inferer: Inferer | None = None,
114
+ postprocessing: Transform | None = None,
115
+ key_train_metric: dict[str, Metric] | None = None,
116
+ additional_metrics: dict[str, Metric] | None = None,
117
+ train_handlers: Sequence | None = None,
118
+ amp: bool = False,
119
+ distributed: bool = False,
120
+ ):
121
+ self.devices_ = device
122
+ if not device:
123
+ self.devices_ = get_devices_spec(device) # Using all devices i.e GPUs
124
+
125
+ # if device:
126
+ # if next(network.parameters()).device.index != device[0]:
127
+ # network.to(devices_[0])
128
+ # else:
129
+ # if next(network.parameters()).device.index != devices_[0].index:
130
+ # network.to(devices_[0])
131
+ #
132
+ self.net = network
133
+ if distributed:
134
+ if len(self.devices_) > 1:
135
+ raise ValueError(
136
+ f"for distributed training, `devices` must contain only 1 GPU or CPU, but got {self.devices_}."
137
+ )
138
+ self.net = DistributedDataParallel(self.net, device_ids=self.devices_)
139
+ elif len(self.devices_) > 1:
140
+ self.net = DataParallel(self.net, device_ids=self.devices_) # ,output_device=devices_[0])
141
+
142
+ super().__init__(
143
+ device=self.devices_[0],
144
+ network=self.net,
145
+ optimizer=optimizer,
146
+ loss_function=loss_function,
147
+ max_epochs=max_epochs,
148
+ train_data_loader=train_data_loader,
149
+ epoch_length=epoch_length,
150
+ non_blocking=non_blocking,
151
+ iteration_update=iteration_update,
152
+ inferer=inferer,
153
+ postprocessing=postprocessing,
154
+ key_train_metric=key_train_metric,
155
+ additional_metrics=additional_metrics,
156
+ train_handlers=train_handlers,
157
+ amp=amp,
158
+ )
159
+
160
+
161
+ class AddLabelNamesd(MapTransform):
162
+ def __init__(
163
+ self, keys: KeysCollection, label_names: dict[str, int] | None = None, allow_missing_keys: bool = False
164
+ ):
165
+ """
166
+ Normalize label values according to label names dictionary
167
+
168
+ Args:
169
+ keys: The ``keys`` parameter will be used to get and set the actual data item to transform
170
+ label_names: all label names
171
+ """
172
+ super().__init__(keys, allow_missing_keys)
173
+
174
+ self.label_names = label_names or {}
175
+
176
+ def __call__(self, data: Mapping[Hashable, np.ndarray]) -> dict[Hashable, np.ndarray]:
177
+ d: dict = dict(data)
178
+ d["label_names"] = self.label_names
179
+ return d
180
+
181
+
182
+ class CopyFilenamesd(MapTransform):
183
+ def __init__(self, keys: KeysCollection, allow_missing_keys: bool = False):
184
+ """
185
+ Copy Filenames for future use
186
+
187
+ Args:
188
+ keys: The ``keys`` parameter will be used to get and set the actual data item to transform
189
+ """
190
+ super().__init__(keys, allow_missing_keys)
191
+
192
+ def __call__(self, data: Mapping[Hashable, np.ndarray]) -> dict[Hashable, np.ndarray]:
193
+ d: dict = dict(data)
194
+ d["filename"] = os.path.basename(d["label"])
195
+ return d
196
+
197
+
198
+ class SplitPredsLabeld(MapTransform):
199
+ """
200
+ Split preds and labels for individual evaluation
201
+
202
+ """
203
+
204
+ def __call__(self, data: Mapping[Hashable, np.ndarray]) -> dict[Hashable, np.ndarray]:
205
+ d: dict = dict(data)
206
+ for key in self.key_iterator(d):
207
+ if key == "pred":
208
+ for idx, (key_label, _) in enumerate(d["label_names"].items()):
209
+ if key_label != "background":
210
+ d[f"pred_{key_label}"] = d[key][idx, ...][None]
211
+ d[f"label_{key_label}"] = d["label"][idx, ...][None]
212
+ elif key != "pred":
213
+ logger.info("This is only for pred key")
214
+ return d
scripts/prepare_datalist_monailabel.py ADDED
@@ -0,0 +1,124 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import argparse
2
+ import glob
3
+ import json
4
+ import logging
5
+ import os
6
+ import sys
7
+
8
+ import monai
9
+ from sklearn.model_selection import train_test_split
10
+
11
+
12
+ def produce_datalist_splits(datalist, splits: list = None, train_split: float = 0.80, valid_test_split: float = 0.50):
13
+ """
14
+ This function is used to split the dataset.
15
+ It will produce "train_size" number of samples for training.
16
+ """
17
+ if splits is None:
18
+ splits = ["test"]
19
+ if "train" in splits:
20
+ train_list, other_list = train_test_split(datalist, train_size=train_split)
21
+ if "valid" in splits:
22
+ val_list, test_list = train_test_split(other_list, train_size=valid_test_split)
23
+ return {"training": train_list, "validation": val_list, "testing": test_list}
24
+ else:
25
+ return {"training": train_list, "testing": other_list}
26
+ elif "valid" in splits:
27
+ val_list, test_list = train_test_split(datalist, train_size=valid_test_split)
28
+ return {"validation": val_list, "testing": test_list}
29
+ else:
30
+ return {"testing": datalist}
31
+
32
+
33
+ def keep_image_label_pairs_only(a_images, a_labels, i_folder, l_folder):
34
+ image_names = [a.split("/")[-1] for a in a_images]
35
+ label_names = [a.split("/")[-1] for a in a_labels]
36
+ # Check if all_labels == all_images, if all_images < all_labels, truncate all_labels
37
+ # image_set = set(image_names)
38
+ # label_set = set(label_names)
39
+ # labelmissing = image_set.difference(label_set)
40
+ # Find names labels not in images
41
+ # imagemissing = label_set.difference(image_set)
42
+ # print('Data_path: ', a_images[0])
43
+ # print('Data folder: ',a_images[0].split('/')[-2])
44
+ # print('Labels missing for: ', len(labelmissing))
45
+ # print('Images missing for: ', len(imagemissing))
46
+ a_images = sorted([os.path.join(i_folder, a) for a in image_names if a in label_names])
47
+ # Keep only labels that have a scan
48
+ image_names = [a.split("/")[-1] for a in a_images]
49
+ a_labels = sorted([os.path.join(l_folder, a) for a in label_names if a in image_names])
50
+ return a_images, a_labels
51
+
52
+
53
+ def parse_files(images_folder, labels_folder, file_extension_pattern):
54
+ logging.info(f"parsing files at: {os.path.join(images_folder, file_extension_pattern)}")
55
+ all_images = sorted(glob.glob(os.path.join(images_folder, file_extension_pattern)))
56
+ all_labels = sorted(glob.glob(os.path.join(labels_folder, file_extension_pattern)))
57
+ return all_images, all_labels
58
+
59
+
60
+ def get_datalist(args, images_folder, labels_folder):
61
+ file_extension_pattern = "*" + args.file_extension + "*"
62
+ if type(images_folder) is list:
63
+ all_images = []
64
+ all_labels = []
65
+ for ifolder, lfolder in zip(images_folder, labels_folder):
66
+ a_images, a_labels = parse_files(ifolder, lfolder, file_extension_pattern)
67
+ a_images, a_labels = keep_image_label_pairs_only(a_images, a_labels, ifolder, lfolder)
68
+ all_images += a_images
69
+ all_labels += a_labels
70
+ else:
71
+ all_images, all_labels = parse_files(images_folder, labels_folder, file_extension_pattern)
72
+ all_images, all_labels = keep_image_label_pairs_only(all_images, all_labels, images_folder, labels_folder)
73
+
74
+ logging.info("Length of all_images: {}".format(len(all_images)))
75
+ logging.info("Length of all_labels: {}".format(len(all_labels)))
76
+
77
+ datalist = [{"image": image_name, "label": label_name} for image_name, label_name in zip(all_images, all_labels)]
78
+
79
+ # datalist = datalist[0 : args.limit] if args.limit else datalist
80
+ logging.info(f"datalist length is {len(datalist)}")
81
+ return datalist
82
+
83
+
84
+ def main(args):
85
+ """
86
+ split the dataset and output the data list into a json file.
87
+ """
88
+ data_file_base_dir = args.path
89
+ output_json = args.output
90
+ # produce deterministic data splits
91
+ monai.utils.set_determinism(seed=123)
92
+ datalist = get_datalist(args, data_file_base_dir, os.path.join(data_file_base_dir, args.labels_folder))
93
+ datalist = produce_datalist_splits(datalist, args.splits, args.train_split, args.valid_test_split)
94
+ with open(output_json, "w") as f:
95
+ json.dump(datalist, f, ensure_ascii=True, indent=4)
96
+ logging.info("datalist json file saved to: {}".format(output_json))
97
+
98
+
99
+ if __name__ == "__main__":
100
+ logging.basicConfig(
101
+ stream=sys.stdout,
102
+ level=logging.DEBUG,
103
+ format="[%(asctime)s.%(msecs)03d][%(levelname)5s](%(name)s) - %(message)s",
104
+ datefmt="%Y-%m-%d %H:%M:%S",
105
+ )
106
+ parser = argparse.ArgumentParser(description="")
107
+ parser.add_argument(
108
+ "--path",
109
+ type=str,
110
+ default="/workspace/data/msd/Task07_Pancreas",
111
+ help="root path of MSD Task07_Pancreas dataset.",
112
+ )
113
+ parser.add_argument(
114
+ "--output", type=str, default="dataset_0.json", help="relative path of output datalist json file."
115
+ )
116
+ parser.add_argument("--train_split", type=int, default=0.80, help="fraction of Training samples.")
117
+ parser.add_argument("--valid_test_split", type=int, default=0.50, help="fraction of valid/test samples.")
118
+ parser.add_argument("--splits", type=list, default=["test"], help="splits to use for train, valid, and test.")
119
+ parser.add_argument("--file_extension", type=str, default="nii", help="file extension of images and labels.")
120
+ parser.add_argument("--labels_folder", type=str, default="labels/final", help="labels sub folder name")
121
+
122
+ args = parser.parse_args()
123
+
124
+ main(args)
scripts/utils.py ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ def test():
2
+ print("Test Function PRINTS")