def initialise_dataset_loader(self, data_param=None, task_param=None,
system_param=None):
BaseApplication.initialise_dataset_loader(
self, data_param, task_param, system_param)
self.data_param = data_param
self.autoencoder_param = task_param
if not self.is_training:
self._infer_type = look_up_operations(
self.autoencoder_param.inference_type, SUPPORTED_INFERENCE)
else:
self._infer_type = None
// read each line of csv files into an instance of Subject
if self.is_training:
self.readers = [ImageReader(["image"], "train")]
if self.has_validation_data and self.action_param.validate_every_n:
self.readers.append(ImageReader(["image"], "validation"))
if self._infer_type in ("encode", "encode-decode"):
self.readers = [ImageReader(["image"], phase="test")]
After Change
elif self._infer_type == "linear_interpolation":
self.readers = [ImageReader(["feature"])]
file_list = data_partitioner.get_file_list()
for reader in self.readers:
reader.initialise(data_param, task_param, file_list)
//if self.is_training or self._infer_type in ("encode", "encode-decode"):
// mean_var_normaliser = MeanVarNormalisationLayer(image_name="image")