dfdad808d0979d6e45419720fa0d73b4cedcbb96,niftynet/application/segmentation_application.py,SegmentationApplication,initialise_dataset_loader,#SegmentationApplication#Any#Any#,59
Before Change
if self.is_training:
self.reader = ImageReader(SUPPORTED_INPUT)
else: // in the inference process use image input only
self.reader = ImageReader(["image"])
self.reader.initialise_reader(data_param, task_param)
if self.net_param.normalise_foreground_only:
foreground_masking_layer = BinaryMaskingLayer(
type_str=self.net_param.foreground_type,
multimod_fusion=self.net_param.multimod_foreground_type,
threshold=0.0)
else:
foreground_masking_layer = None
mean_var_normaliser = MeanVarNormalisationLayer(
image_name="image", binary_masking_func=foreground_masking_layer)
if self.net_param.histogram_ref_file:
histogram_normaliser = HistogramNormalisationLayer(
image_name="image",
modalities=vars(task_param).get("image"),
model_filename=self.net_param.histogram_ref_file,
binary_masking_func=foreground_masking_layer,
norm_type=self.net_param.norm_type,
cutoff=self.net_param.cutoff,
name="hist_norm_layer")
else:
histogram_normaliser = None
if self.net_param.histogram_ref_file:
label_normaliser = DiscreteLabelNormalisationLayer(
image_name="label",
modalities=vars(task_param).get("label"),
model_filename=self.net_param.histogram_ref_file)
else:
label_normaliser = None
normalisation_layers = []
if self.net_param.normalisation:
normalisation_layers.append(histogram_normaliser)
if self.net_param.whitening:
normalisation_layers.append(mean_var_normaliser)
if task_param.label_normalisation:
normalisation_layers.append(label_normaliser)
augmentation_layers = []
if self.is_training:
if self.action_param.random_flipping_axes != -1:
augmentation_layers.append(RandomFlipLayer(
flip_axes=self.action_param.random_flipping_axes))
if self.action_param.scaling_percentage:
augmentation_layers.append(RandomSpatialScalingLayer(
min_percentage=self.action_param.scaling_percentage[0],
max_percentage=self.action_param.scaling_percentage[1]))
if self.action_param.rotation_angle or \
self.action_param.rotation_angle_x or \
self.action_param.rotation_angle_y or \
self.action_param.rotation_angle_z:
rotation_layer = RandomRotationLayer()
if self.action_param.rotation_angle:
rotation_layer.init_uniform_angle(
self.action_param.rotation_angle)
else:
rotation_layer.init_non_uniform_angle(
self.action_param.rotation_angle_x,
self.action_param.rotation_angle_y,
self.action_param.rotation_angle_z)
augmentation_layers.append(rotation_layer)
volume_padding_layer = []
if self.net_param.volume_padding_size:
volume_padding_layer.append(PadLayer(
image_name=SUPPORTED_INPUT,
border=self.net_param.volume_padding_size))
self.reader.add_preprocessing_layers(
volume_padding_layer + normalisation_layers + augmentation_layers)
def initialise_uniform_sampler(self):
After Change
// read each line of csv files into an instance of Subject
if self.is_training:
self.readers = [ImageReader(SUPPORTED_INPUT, phase="train"),
ImageReader(SUPPORTED_INPUT, phase="validation")]
else: // in the inference process use image input only
self.readers = [ImageReader(["image"], phase="test")]
for reader in self.readers:
reader.initialise_reader(data_param, task_param)
if self.net_param.normalise_foreground_only:
foreground_masking_layer = BinaryMaskingLayer(
type_str=self.net_param.foreground_type,
multimod_fusion=self.net_param.multimod_foreground_type,
threshold=0.0)
else:
foreground_masking_layer = None
mean_var_normaliser = MeanVarNormalisationLayer(
image_name="image", binary_masking_func=foreground_masking_layer)
if self.net_param.histogram_ref_file:
histogram_normaliser = HistogramNormalisationLayer(
image_name="image",
modalities=vars(task_param).get("image"),
model_filename=self.net_param.histogram_ref_file,
binary_masking_func=foreground_masking_layer,
norm_type=self.net_param.norm_type,
cutoff=self.net_param.cutoff,
name="hist_norm_layer")
else:
histogram_normaliser = None
if self.net_param.histogram_ref_file:
label_normaliser = DiscreteLabelNormalisationLayer(
image_name="label",
modalities=vars(task_param).get("label"),
model_filename=self.net_param.histogram_ref_file)
else:
label_normaliser = None
normalisation_layers = []
if self.net_param.normalisation:
normalisation_layers.append(histogram_normaliser)
if self.net_param.whitening:
normalisation_layers.append(mean_var_normaliser)
if task_param.label_normalisation:
normalisation_layers.append(label_normaliser)
augmentation_layers = []
if self.is_training:
if self.action_param.random_flipping_axes != -1:
augmentation_layers.append(RandomFlipLayer(
flip_axes=self.action_param.random_flipping_axes))
if self.action_param.scaling_percentage:
augmentation_layers.append(RandomSpatialScalingLayer(
min_percentage=self.action_param.scaling_percentage[0],
max_percentage=self.action_param.scaling_percentage[1]))
if self.action_param.rotation_angle or \
self.action_param.rotation_angle_x or \
self.action_param.rotation_angle_y or \
self.action_param.rotation_angle_z:
rotation_layer = RandomRotationLayer()
if self.action_param.rotation_angle:
rotation_layer.init_uniform_angle(
self.action_param.rotation_angle)
else:
rotation_layer.init_non_uniform_angle(
self.action_param.rotation_angle_x,
self.action_param.rotation_angle_y,
self.action_param.rotation_angle_z)
augmentation_layers.append(rotation_layer)
volume_padding_layer = []
if self.net_param.volume_padding_size:
volume_padding_layer.append(PadLayer(
image_name=SUPPORTED_INPUT,
border=self.net_param.volume_padding_size))
for reader in self.readers:
reader.add_preprocessing_layers(
volume_padding_layer + normalisation_layers + augmentation_layers)
def initialise_uniform_sampler(self):
self.sampler = [[UniformSampler(
reader=reader,
data_param=self.data_param,
In pattern: SUPERPATTERN
Frequency: 3
Non-data size: 17
Instances
Project Name: NifTK/NiftyNet
Commit Name: dfdad808d0979d6e45419720fa0d73b4cedcbb96
Time: 2017-11-01
Author: eli.gibson@gmail.com
File Name: niftynet/application/segmentation_application.py
Class Name: SegmentationApplication
Method Name: initialise_dataset_loader
Project Name: NifTK/NiftyNet
Commit Name: dfdad808d0979d6e45419720fa0d73b4cedcbb96
Time: 2017-11-01
Author: eli.gibson@gmail.com
File Name: niftynet/application/segmentation_application.py
Class Name: SegmentationApplication
Method Name: initialise_dataset_loader
Project Name: NifTK/NiftyNet
Commit Name: 53633acd7c861fd73e3954088a48d0ac8dc42895
Time: 2017-11-01
Author: eli.gibson@gmail.com
File Name: niftynet/application/gan_application.py
Class Name: GANApplication
Method Name: initialise_dataset_loader
Project Name: NifTK/NiftyNet
Commit Name: 53633acd7c861fd73e3954088a48d0ac8dc42895
Time: 2017-11-01
Author: eli.gibson@gmail.com
File Name: niftynet/application/regression_application.py
Class Name: RegressionApplication
Method Name: initialise_dataset_loader