Valentin Boussot commited on
Commit
ac44a0c
·
1 Parent(s): 8efb82a

Integrate uncertainty handling and embed uncertainty code directly into KonfAI as a builtin component Konfai==1.3.6

Browse files
CBCT/Evaluation.yml ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Evaluator:
2
+ metrics:
3
+ sCT:
4
+ targets_criterions:
5
+ reference:
6
+ criterions_loader:
7
+ MAE:
8
+ reduction: mean
9
+ PSNR:
10
+ dynamic_range: None
11
+ SSIM:
12
+ dynamic_range: None
13
+ sCT_seg:
14
+ targets_criterions:
15
+ reference_seg:
16
+ criterions_loader:
17
+ Dice:
18
+ labels: None
19
+ Dataset:
20
+ groups_src:
21
+ sCT:
22
+ groups_dest:
23
+ sCT:
24
+ transforms: None
25
+ sCT_seg:
26
+ transforms:
27
+ KonfAIInference:
28
+ repo_id: VBoussot/MRSegmentator-KonfAI
29
+ model_name: MRSegmentator
30
+ number_of_ensemble: 1
31
+ number_of_tta: 0
32
+ number_of_mc_dropout: 0
33
+ per_channel: false
34
+ Save:
35
+ dataset: ./Dataset:nii.gz
36
+ group: None
37
+ Volume:
38
+ groups_dest:
39
+ reference:
40
+ transforms: None
41
+ reference_seg:
42
+ transforms:
43
+ KonfAIInference:
44
+ repo_id: VBoussot/MRSegmentator-KonfAI
45
+ model_name: MRSegmentator
46
+ number_of_ensemble: 1
47
+ number_of_tta: 0
48
+ number_of_mc_dropout: 0
49
+ per_channel: false
50
+ Save:
51
+ dataset: ./Dataset:nii.gz
52
+ group: None
53
+ subset: None
54
+ dataset_filenames:
55
+ - ./Dataset:a:nii.gz
56
+ - ./Predictions/ImpactSynth/Output:i:nii.gz
57
+ validation: None
58
+ train_name: ImpactSynth
CBCT/Model.py CHANGED
@@ -28,41 +28,4 @@ class UNetpp(network.Network):
28
  classes=1,
29
  activation=None
30
  ))
31
- self.add_module("Head", Head())
32
-
33
- class Concat(Reduction):
34
-
35
- def __init__(self):
36
- pass
37
-
38
- def __call__(self, tensor: torch.Tensor | list[torch.Tensor]) -> torch.Tensor:
39
- if isinstance(tensor, list):
40
- return torch.stack(tensor, dim=2).squeeze(1)
41
- else:
42
- return tensor.view(tensor.shape[0]*tensor.shape[1], -1, *tensor.shape[3:])
43
-
44
- class Uncertainty(Transform):
45
-
46
- def __init__(self):
47
- pass
48
-
49
- def __call__(self, name: str, tensors: torch.Tensor, cache_attribute: Attribute) -> torch.Tensor:
50
- dataset = Dataset("./Predictions/ImpactSynth/Dataset", "mha")
51
- for i, tensor in enumerate(tensors):
52
- dataset.write(f"sCT_{i}", name, data_to_image(tensor.unsqueeze(0).numpy(), cache_attribute))
53
- data_var = tensors.var(0) if tensors.shape[0] > 1 else torch.zeros_like(tensors[0])
54
- dataset.write(f"sCT_var", name, data_to_image(data_var.unsqueeze(0).numpy(), cache_attribute))
55
- return tensors.mean(0).unsqueeze(0)
56
-
57
- class UnNormalize(Transform):
58
-
59
- def __init__(self) -> None:
60
- super().__init__()
61
- self.v_min = -1024
62
- self.v_max = 3071
63
-
64
- def __call__(self, name: str, input : torch.Tensor, cache_attribute: Attribute) -> torch.Tensor:
65
- return (input + 1)/2*(self.v_max-self.v_min) + self.v_min
66
-
67
- def inverse(self, name: str, input : torch.Tensor, cache_attribute: Attribute) -> torch.Tensor:
68
- pass
 
28
  classes=1,
29
  activation=None
30
  ))
31
+ self.add_module("Head", Head())
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
CBCT/Prediction.yml CHANGED
@@ -30,7 +30,7 @@ Predictor:
30
  max_value: 1
31
  inverse: true
32
  is_input: true
33
- augmentations:
34
  DataAugmentation_0:
35
  data_augmentations:
36
  Flip:
@@ -62,23 +62,28 @@ Predictor:
62
  before_reduction_transforms: None
63
  after_reduction_transforms: None
64
  final_transforms:
65
- Model:UnNormalize: {}
66
- Model:Uncertainty: {}
 
67
  TensorCast:
68
  dtype: int16
69
- inverse: true
70
- dataset_filename: Dataset:mha
 
 
 
 
71
  group: sCT
72
  same_as_group: Volume:Volume
73
  patch_combine: None
74
  inverse_transform: false
75
- reduction: Model:Concat
76
- Model:Concat: {}
77
  train_name: ImpactSynth
78
  manual_seed: 32
79
  gpu_checkpoints: None
80
  images_log: None
81
- combine: Model:Concat
82
  autocast: false
83
  data_log: None
84
- Model:Concat: {}
 
30
  max_value: 1
31
  inverse: true
32
  is_input: true
33
+ augmentations:
34
  DataAugmentation_0:
35
  data_augmentations:
36
  Flip:
 
62
  before_reduction_transforms: None
63
  after_reduction_transforms: None
64
  final_transforms:
65
+ UnNormalize:
66
+ min_value: -1024
67
+ max_value: 3071
68
  TensorCast:
69
  dtype: int16
70
+ inverse: false
71
+ InferenceStack:
72
+ dataset: Uncertainty:nii.gz
73
+ name: InferenceStack
74
+ mode: mean
75
+ dataset_filename: Output:nii.gz
76
  group: sCT
77
  same_as_group: Volume:Volume
78
  patch_combine: None
79
  inverse_transform: false
80
+ reduction: Concat
81
+ Concat: {}
82
  train_name: ImpactSynth
83
  manual_seed: 32
84
  gpu_checkpoints: None
85
  images_log: None
86
+ combine: Concat
87
  autocast: false
88
  data_log: None
89
+ Concat: {}
CBCT/Uncertainty.yml ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Evaluator:
2
+ metrics:
3
+ Uncertainty:
4
+ targets_criterions:
5
+ None:
6
+ criterions_loader:
7
+ Mean: {}
8
+ Comformity:
9
+ targets_criterions:
10
+ None:
11
+ criterions_loader:
12
+ Mean: {}
13
+ Dataset:
14
+ groups_src:
15
+ InferenceStack:
16
+ groups_dest:
17
+ Uncertainty:
18
+ transforms:
19
+ Variance: {}
20
+ Save:
21
+ dataset: ./Uncertainty:nii.gz
22
+ group: None
23
+ Comformity:
24
+ transforms:
25
+ KonfAIInference:
26
+ repo_id: VBoussot/MRSegmentator-KonfAI
27
+ model_name: MRSegmentator
28
+ number_of_ensemble: 1
29
+ number_of_tta: 0
30
+ number_of_mc_dropout: 0
31
+ per_channel: true
32
+ Save/1:
33
+ dataset: ./Uncertainty:nii.gz
34
+ group: None
35
+ Variance: {}
36
+ Save/2:
37
+ dataset: ./Uncertainty:nii.gz
38
+ group: Comformity_var
39
+ subset: None
40
+ dataset_filenames:
41
+ - ./Uncertainty:nii.gz
42
+ validation: None
43
+ train_name: ImpactSynth
CBCT/requirements.txt CHANGED
@@ -1 +1,2 @@
1
- segmentation_models_pytorch
 
 
1
+ segmentation_models_pytorch
2
+ konfai==1.3.6
MR/Evaluation.yml ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Evaluator:
2
+ metrics:
3
+ sCT:
4
+ targets_criterions:
5
+ reference:
6
+ criterions_loader:
7
+ MAE:
8
+ reduction: mean
9
+ PSNR:
10
+ dynamic_range: None
11
+ SSIM:
12
+ dynamic_range: None
13
+ sCT_seg:
14
+ targets_criterions:
15
+ reference_seg:
16
+ criterions_loader:
17
+ Dice:
18
+ labels: None
19
+ Dataset:
20
+ groups_src:
21
+ sCT:
22
+ groups_dest:
23
+ sCT:
24
+ transforms: None
25
+ sCT_seg:
26
+ transforms:
27
+ KonfAIInference:
28
+ repo_id: VBoussot/MRSegmentator-KonfAI
29
+ model_name: MRSegmentator
30
+ number_of_ensemble: 1
31
+ number_of_tta: 0
32
+ number_of_mc_dropout: 0
33
+ per_channel: false
34
+ Save:
35
+ dataset: ./Dataset:nii.gz
36
+ group: None
37
+ Volume:
38
+ groups_dest:
39
+ reference:
40
+ transforms: None
41
+ reference_seg:
42
+ transforms:
43
+ KonfAIInference:
44
+ repo_id: VBoussot/MRSegmentator-KonfAI
45
+ model_name: MRSegmentator
46
+ number_of_ensemble: 1
47
+ number_of_tta: 0
48
+ number_of_mc_dropout: 0
49
+ per_channel: false
50
+ Save:
51
+ dataset: ./Dataset:nii.gz
52
+ group: None
53
+ subset: None
54
+ dataset_filenames:
55
+ - ./Dataset:a:nii.gz
56
+ - ./Predictions/ImpactSynth/Output:i:nii.gz
57
+ validation: None
58
+ train_name: ImpactSynth
MR/Model.py CHANGED
@@ -28,43 +28,4 @@ class UNetpp(network.Network):
28
  classes=1,
29
  activation=None
30
  ))
31
- self.add_module("Head", Head())
32
-
33
- class Concat(Reduction):
34
-
35
- def __init__(self):
36
- pass
37
-
38
- def __call__(self, tensor: torch.Tensor | list[torch.Tensor]) -> torch.Tensor:
39
- if isinstance(tensor, list):
40
- return torch.stack(tensor, dim=2).squeeze(1)
41
- else:
42
- return tensor.view(tensor.shape[0]*tensor.shape[1], -1, *tensor.shape[3:])
43
-
44
-
45
- class Uncertainty(Transform):
46
-
47
- def __init__(self):
48
- pass
49
-
50
- def __call__(self, name: str, tensors: torch.Tensor, cache_attribute: Attribute) -> torch.Tensor:
51
- dataset = Dataset("./Predictions/ImpactSynth/Dataset", "mha")
52
- print("end", tensors.shape)
53
- for i, tensor in enumerate(tensors):
54
- dataset.write(f"sCT_{i}", name, data_to_image(tensor.unsqueeze(0).numpy(), cache_attribute))
55
- data_var = tensors.var(0) if tensors.shape[0] > 1 else torch.zeros_like(tensors[0])
56
- dataset.write(f"sCT_var", name, data_to_image(data_var.unsqueeze(0).numpy(), cache_attribute))
57
- return tensors.mean(0).unsqueeze(0)
58
-
59
- class UnNormalize(Transform):
60
-
61
- def __init__(self) -> None:
62
- super().__init__()
63
- self.v_min = -1024
64
- self.v_max = 3071
65
-
66
- def __call__(self, name: str, input : torch.Tensor, cache_attribute: Attribute) -> torch.Tensor:
67
- return (input + 1)/2*(self.v_max-self.v_min) + self.v_min
68
-
69
- def inverse(self, name: str, input : torch.Tensor, cache_attribute: Attribute) -> torch.Tensor:
70
- pass
 
28
  classes=1,
29
  activation=None
30
  ))
31
+ self.add_module("Head", Head())
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
MR/Prediction.yml CHANGED
@@ -10,13 +10,25 @@ Predictor:
10
  groups_dest:
11
  Volume:
12
  transforms:
13
- Standardize:
14
- lazy: false
15
- mean: None
16
- std: None
 
17
  mask: None
 
 
 
 
 
 
 
 
 
 
 
 
18
  inverse: true
19
- patch_transforms: None
20
  is_input: true
21
  augmentations:
22
  DataAugmentation_0:
@@ -50,23 +62,28 @@ Predictor:
50
  before_reduction_transforms: None
51
  after_reduction_transforms: None
52
  final_transforms:
53
- Model:UnNormalize: {}
54
- Model:Uncertainty: {}
 
55
  TensorCast:
56
  dtype: int16
57
- inverse: true
58
- dataset_filename: Dataset:mha
 
 
 
 
59
  group: sCT
60
  same_as_group: Volume:Volume
61
  patch_combine: None
62
  inverse_transform: false
63
- reduction: Model:Concat
64
- Model:Concat: {}
65
  train_name: ImpactSynth
66
  manual_seed: 32
67
  gpu_checkpoints: None
68
  images_log: None
69
- combine: Model:Concat
70
  autocast: false
71
  data_log: None
72
- Model:Concat: {}
 
10
  groups_dest:
11
  Volume:
12
  transforms:
13
+ Clip:
14
+ min_value: min
15
+ max_value: percentile:99.5
16
+ save_clip_min: false
17
+ save_clip_max: false
18
  mask: None
19
+ Normalize:
20
+ lazy: true
21
+ channels: None
22
+ min_value: -1
23
+ max_value: 1
24
+ inverse: true
25
+ patch_transforms:
26
+ Normalize:
27
+ lazy: false
28
+ channels: None
29
+ min_value: -1
30
+ max_value: 1
31
  inverse: true
 
32
  is_input: true
33
  augmentations:
34
  DataAugmentation_0:
 
62
  before_reduction_transforms: None
63
  after_reduction_transforms: None
64
  final_transforms:
65
+ UnNormalize:
66
+ min_value: -1024
67
+ max_value: 3071
68
  TensorCast:
69
  dtype: int16
70
+ inverse: false
71
+ InferenceStack:
72
+ dataset: Uncertainty:nii.gz
73
+ name: InferenceStack
74
+ mode: mean
75
+ dataset_filename: Output:nii.gz
76
  group: sCT
77
  same_as_group: Volume:Volume
78
  patch_combine: None
79
  inverse_transform: false
80
+ reduction: Concat
81
+ Concat: {}
82
  train_name: ImpactSynth
83
  manual_seed: 32
84
  gpu_checkpoints: None
85
  images_log: None
86
+ combine: Concat
87
  autocast: false
88
  data_log: None
89
+ Concat: {}
MR/Uncertainty.yml ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Evaluator:
2
+ metrics:
3
+ Uncertainty:
4
+ targets_criterions:
5
+ None:
6
+ criterions_loader:
7
+ Mean: {}
8
+ Comformity:
9
+ targets_criterions:
10
+ None:
11
+ criterions_loader:
12
+ Mean: {}
13
+ Dataset:
14
+ groups_src:
15
+ InferenceStack:
16
+ groups_dest:
17
+ Uncertainty:
18
+ transforms:
19
+ Variance: {}
20
+ Save:
21
+ dataset: ./Uncertainty:nii.gz
22
+ group: None
23
+ Comformity:
24
+ transforms:
25
+ KonfAIInference:
26
+ repo_id: VBoussot/MRSegmentator-KonfAI
27
+ model_name: MRSegmentator
28
+ number_of_ensemble: 1
29
+ number_of_tta: 0
30
+ number_of_mc_dropout: 0
31
+ per_channel: true
32
+ Save/1:
33
+ dataset: ./Uncertainty:nii.gz
34
+ group: None
35
+ Variance: {}
36
+ Save/2:
37
+ dataset: ./Uncertainty:nii.gz
38
+ group: Comformity_var
39
+ subset: None
40
+ dataset_filenames:
41
+ - ./Uncertainty:nii.gz
42
+ validation: None
43
+ train_name: ImpactSynth
MR/requirements.txt CHANGED
@@ -1 +1,2 @@
1
- segmentation_models_pytorch
 
 
1
+ segmentation_models_pytorch
2
+ konfai==1.3.6