From fa552d33b87d74b2b548aabb3fe77db575b4c285 Mon Sep 17 00:00:00 2001 From: Fabian Isensee Date: Mon, 29 Jan 2024 13:21:52 +0100 Subject: [PATCH] update comments --- nnunetv2/evaluation/evaluate_predictions.py | 2 +- .../experiment_planners/default_experiment_planner.py | 2 +- nnunetv2/inference/predict_from_raw_data.py | 1 - nnunetv2/preprocessing/resampling/default_resampling.py | 2 +- .../variants/data_augmentation/nnUNetTrainerDA5.py | 3 --- nnunetv2/utilities/json_export.py | 5 +++-- 6 files changed, 6 insertions(+), 9 deletions(-) diff --git a/nnunetv2/evaluation/evaluate_predictions.py b/nnunetv2/evaluation/evaluate_predictions.py index 80e4d242f..18f0df9f4 100644 --- a/nnunetv2/evaluation/evaluate_predictions.py +++ b/nnunetv2/evaluation/evaluate_predictions.py @@ -33,7 +33,7 @@ def key_to_label_or_region(key: str): def save_summary_json(results: dict, output_file: str): """ - stupid json does not support tuples as keys (why does it have to be so shitty) so we need to convert that shit + json does not support tuples as keys (why does it have to be so shitty) so we need to convert that shit ourselves """ results_converted = deepcopy(results) diff --git a/nnunetv2/experiment_planning/experiment_planners/default_experiment_planner.py b/nnunetv2/experiment_planning/experiment_planners/default_experiment_planner.py index ccb4a251e..b74f43304 100644 --- a/nnunetv2/experiment_planning/experiment_planners/default_experiment_planner.py +++ b/nnunetv2/experiment_planning/experiment_planners/default_experiment_planner.py @@ -464,7 +464,7 @@ def plan_experiment(self): shutil.copy(join(self.raw_dataset_folder, 'dataset.json'), join(nnUNet_preprocessed, self.dataset_name, 'dataset.json')) - # json is stupid and I hate it... "Object of type int64 is not JSON serializable" -> my ass + # json is ###. I hate it... "Object of type int64 is not JSON serializable" plans = { 'dataset_name': self.dataset_name, 'plans_name': self.plans_identifier, diff --git a/nnunetv2/inference/predict_from_raw_data.py b/nnunetv2/inference/predict_from_raw_data.py index 790e98007..2c2e1f03e 100644 --- a/nnunetv2/inference/predict_from_raw_data.py +++ b/nnunetv2/inference/predict_from_raw_data.py @@ -57,7 +57,6 @@ def __init__(self, self.use_mirroring = use_mirroring if device.type == 'cuda': # device = torch.device(type='cuda', index=0) # set the desired GPU with CUDA_VISIBLE_DEVICES! - # why would I ever want to do that. Stupid dobby. This kills DDP inference... pass if device.type != 'cuda': print(f'perform_everything_on_device=True is only supported for cuda devices! Setting this to False') diff --git a/nnunetv2/preprocessing/resampling/default_resampling.py b/nnunetv2/preprocessing/resampling/default_resampling.py index e83f61463..e23e14da8 100644 --- a/nnunetv2/preprocessing/resampling/default_resampling.py +++ b/nnunetv2/preprocessing/resampling/default_resampling.py @@ -83,7 +83,7 @@ def resample_data_or_seg_to_shape(data: Union[torch.Tensor, np.ndarray], force_separate_z: Union[bool, None] = False, separate_z_anisotropy_threshold: float = ANISO_THRESHOLD): """ - needed for segmentation export. Stupid, I know. Maybe we can fix that with Leos new resampling functions + needed for segmentation export. Stupid, I know """ if isinstance(data, torch.Tensor): data = data.cpu().numpy() diff --git a/nnunetv2/training/nnUNetTrainer/variants/data_augmentation/nnUNetTrainerDA5.py b/nnunetv2/training/nnUNetTrainer/variants/data_augmentation/nnUNetTrainerDA5.py index 7250fb845..a96cb2bda 100644 --- a/nnunetv2/training/nnUNetTrainer/variants/data_augmentation/nnUNetTrainerDA5.py +++ b/nnunetv2/training/nnUNetTrainer/variants/data_augmentation/nnUNetTrainerDA5.py @@ -34,9 +34,6 @@ class nnUNetTrainerDA5(nnUNetTrainer): def configure_rotation_dummyDA_mirroring_and_inital_patch_size(self): - """ - This function is stupid and certainly one of the weakest spots of this implementation. Not entirely sure how we can fix it. - """ patch_size = self.configuration_manager.patch_size dim = len(patch_size) # todo rotation should be defined dynamically based on patch size (more isotropic patch sizes = more rotation) diff --git a/nnunetv2/utilities/json_export.py b/nnunetv2/utilities/json_export.py index 5ea463c27..d6bcd0644 100644 --- a/nnunetv2/utilities/json_export.py +++ b/nnunetv2/utilities/json_export.py @@ -5,7 +5,8 @@ def recursive_fix_for_json_export(my_dict: dict): - # json is stupid. 'cannot serialize object of type bool_/int64/float64'. Come on bro. + # json is ... a very nice thing to have + # 'cannot serialize object of type bool_/int64/float64'. Apart from that of course... keys = list(my_dict.keys()) # cannot iterate over keys() if we change keys.... for k in keys: if isinstance(k, (np.int64, np.int32, np.int8, np.uint8)): @@ -37,7 +38,7 @@ def recursive_fix_for_json_export(my_dict: dict): def fix_types_iterable(iterable, output_type): - # this sh!t is hacky as hell and will break if you use it for anything outside nnunet. Keep you hands off of this. + # this sh!t is hacky as hell and will break if you use it for anything outside nnunet. Keep your hands off of this. out = [] for i in iterable: if type(i) in (np.int64, np.int32, np.int8, np.uint8):