Skip to content

Commit

Permalink
feat: Add legacy flag to switch between pre and post MED_2D
Browse files Browse the repository at this point in the history
  • Loading branch information
shernshiou committed Jul 1, 2024
1 parent e92463d commit e77c801
Show file tree
Hide file tree
Showing 5 changed files with 115 additions and 59 deletions.
8 changes: 5 additions & 3 deletions darwin/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ def _run(args: Namespace, parser: ArgumentParser) -> None:
print(__version__)

elif args.command == "convert":
f.convert(args.format, args.files, args.output_dir)
f.convert(args.format, args.files, args.output_dir, legacy=args.legacy)
elif args.command == "dataset":
if args.action == "remote":
f.list_remote_datasets(args.all, args.team)
Expand Down Expand Up @@ -167,11 +167,13 @@ def _run(args: Namespace, parser: ArgumentParser) -> None:
args.import_annotators,
args.import_reviewers,
args.overwrite,
isotropic=args.isotropic,
legacy=args.legacy,
cpu_limit=args.cpu_limit,
)
elif args.action == "convert":
f.dataset_convert(args.dataset, args.format, args.output_dir)
f.dataset_convert(
args.dataset, args.format, args.output_dir, legacy=args.legacy
)
elif args.action == "set-file-status":
f.set_file_status(args.dataset, args.status, args.files)
elif args.action == "delete-files":
Expand Down
31 changes: 25 additions & 6 deletions darwin/cli_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -853,7 +853,7 @@ def dataset_import(
import_annotators: bool = False,
import_reviewers: bool = False,
overwrite: bool = False,
isotropic: bool = False,
legacy: bool = False,
use_multi_cpu: bool = False,
cpu_limit: Optional[int] = None,
) -> None:
Expand Down Expand Up @@ -887,7 +887,7 @@ def dataset_import(
overwrite : bool, default: False
If ``True`` it will bypass a warning that the import will overwrite the current annotations if any are present.
If ``False`` this warning will be skipped and the import will overwrite the current annotations without warning.
isotropic : bool, default: False
legacy : bool, default: False
If ``True`` it will not resize the annotations to be isotropic.
If ``False`` it will resize the annotations to be isotropic.
use_multi_cpu : bool, default: False
Expand All @@ -901,8 +901,8 @@ def dataset_import(
try:
importer: ImportParser = get_importer(format)

if format == "nifti" and isotropic:
importer = partial(importer, isotropic=True)
if format == "nifti" and legacy:
importer = partial(importer, legacy=True)

dataset: RemoteDataset = client.get_remote_dataset(
dataset_identifier=dataset_slug
Expand Down Expand Up @@ -1172,7 +1172,10 @@ def validate_schemas(


def dataset_convert(
dataset_identifier: str, format: str, output_dir: Optional[PathLike] = None
dataset_identifier: str,
format: str,
output_dir: Optional[PathLike] = None,
legacy: bool = False,
) -> None:
"""
Converts the annotations from the given dataset to the given format.
Expand All @@ -1188,12 +1191,20 @@ def dataset_convert(
output_dir : Optional[PathLike], default: None
The folder where the exported annotation files will be. If None it will be the inside the
annotations folder of the dataset under 'other_formats/{format}'.
legacy : bool, default: False
This flag is only for the nifti format.
If True, it will not export the annotations using legacy calculations.
If False, it will resize the annotations using the new calculation by dividing with pixdims.
"""
identifier: DatasetIdentifier = DatasetIdentifier.parse(dataset_identifier)
client: Client = _load_client(team_slug=identifier.team_slug)

try:
parser: ExportParser = get_exporter(format)

if format == "nifti" and legacy:
parser = partial(parser, legacy=True)

dataset: RemoteDataset = client.get_remote_dataset(
dataset_identifier=identifier
)
Expand Down Expand Up @@ -1224,7 +1235,9 @@ def dataset_convert(
_error(f"No dataset with name '{e.name}'")


def convert(format: str, files: List[PathLike], output_dir: Path) -> None:
def convert(
format: str, files: List[PathLike], output_dir: Path, legacy: bool = False
) -> None:
"""
Converts the given files to the specified format.
Expand All @@ -1236,9 +1249,15 @@ def convert(format: str, files: List[PathLike], output_dir: Path) -> None:
List of files to be converted.
output_dir: Path
Folder where the exported annotations will be placed.
legacy: bool, default: False
This flag is only for the nifti format.
If True, it will not export the annotations using legacy calculations.
If False, it will resize the annotations using the new calculation by dividing with pixdims.
"""
try:
parser: ExportParser = get_exporter(format)
if format == "nifti" and legacy:
parser = partial(parser, legacy=True)
except ExporterNotFoundError:
_error(f"Unsupported export format, currently supported: {export_formats}")
except AttributeError:
Expand Down
60 changes: 46 additions & 14 deletions darwin/exporter/formats/nifti.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,11 @@ class Volume:
from_raster_layer: bool


def export(annotation_files: Iterable[dt.AnnotationFile], output_dir: Path) -> None:
def export(
annotation_files: Iterable[dt.AnnotationFile],
output_dir: Path,
legacy: bool = False,
) -> None:
"""
Exports the given ``AnnotationFile``\\s into nifti format inside of the given
``output_dir``. Deletes everything within ``output_dir/masks`` before writting to it.
Expand All @@ -55,13 +59,23 @@ def export(annotation_files: Iterable[dt.AnnotationFile], output_dir: Path) -> N
The ``AnnotationFile``\\s to be exported.
output_dir : Path
The folder where the new instance mask files will be.
legacy : bool, default=False
If ``True``, the exporter will use the legacy calculation.
If ``False``, the exporter will use the new calculation by dividing with pixdims.
Returns
-------
sends output volumes, image_id and output_dir to the write_output_volume_to_disk function
"""

if legacy:
console.print(
"Legacy flag is set to True. Annotations will be resized using legacy calculations.",
style="bold blue",
)

video_annotations = list(annotation_files)
for video_annotation in video_annotations:
image_id = check_for_error_and_return_imageid(video_annotation, output_dir)
Expand Down Expand Up @@ -92,10 +106,10 @@ def export(annotation_files: Iterable[dt.AnnotationFile], output_dir: Path) -> N
]
if polygon_annotations:
populate_output_volumes_from_polygons(
polygon_annotations, slot_map, output_volumes
polygon_annotations, slot_map, output_volumes, legacy=legacy
)
write_output_volume_to_disk(
output_volumes, image_id=image_id, output_dir=output_dir
output_volumes, image_id=image_id, output_dir=output_dir, legacy=legacy
)
# Need to map raster layers to SeriesInstanceUIDs
if mask_present:
Expand Down Expand Up @@ -124,7 +138,10 @@ def export(annotation_files: Iterable[dt.AnnotationFile], output_dir: Path) -> N
output_volumes=raster_output_volumes,
)
write_output_volume_to_disk(
raster_output_volumes, image_id=image_id, output_dir=output_dir
raster_output_volumes,
image_id=image_id,
output_dir=output_dir,
legacy=legacy,
)


Expand Down Expand Up @@ -302,6 +319,7 @@ def populate_output_volumes_from_polygons(
annotations: List[Union[dt.Annotation, dt.VideoAnnotation]],
slot_map: Dict,
output_volumes: Dict,
legacy: bool = False,
):
"""
Populates the output volumes with the given polygon annotations. The annotations are converted into masks
Expand All @@ -315,6 +333,9 @@ def populate_output_volumes_from_polygons(
Dictionary of the different slots within the annotation file
output_volumes : Dict
Volumes created from the build_output_volumes file
legacy : bool, default=False
If ``True``, the exporter will use the legacy calculation.
If ``False``, the exporter will use the new calculation by dividing with pixdims.
"""
for annotation in annotations:
slot_name = annotation.slot_names[0]
Expand All @@ -341,7 +362,7 @@ def populate_output_volumes_from_polygons(
if "paths" in frame_data:
# Dealing with a complex polygon
polygons = [
shift_polygon_coords(polygon_path, pixdims)
shift_polygon_coords(polygon_path, pixdims, legacy=legacy)
for polygon_path in frame_data["paths"]
]
else:
Expand Down Expand Up @@ -412,7 +433,10 @@ def populate_output_volumes_from_raster_layer(


def write_output_volume_to_disk(
output_volumes: Dict, image_id: str, output_dir: Union[str, Path]
output_volumes: Dict,
image_id: str,
output_dir: Union[str, Path],
legacy: bool = False,
) -> None:
"""Writes the given output volumes to disk.
Expand All @@ -424,6 +448,9 @@ def write_output_volume_to_disk(
The specific image id
output_dir : Union[str, Path]
The output directory to write the volumes to
legacy : bool, default=False
If ``True``, the exporter will use the legacy calculation.
If ``False``, the exporter will use the new calculation by dividing with pixdims.
Returns
-------
Expand All @@ -446,7 +473,7 @@ def unnest_dict_to_list(d: Dict) -> List:
dataobj=np.flip(volume.pixel_array, (0, 1, 2)).astype(np.int16),
affine=volume.affine,
)
if volume.original_affine is not None:
if legacy and volume.original_affine is not None:
orig_ornt = io_orientation(
volume.original_affine
) # Get orientation of current affine
Expand All @@ -464,14 +491,19 @@ def unnest_dict_to_list(d: Dict) -> List:
nib.save(img=img, filename=output_path)


def shift_polygon_coords(polygon: List[Dict], pixdim: List[Number]) -> List:
# Need to make it clear that we flip x/y because we need to take the transpose later.
if pixdim[1] > pixdim[0]:
return [{"x": p["y"], "y": p["x"] * pixdim[1] / pixdim[0]} for p in polygon]
elif pixdim[1] < pixdim[0]:
return [{"x": p["y"] * pixdim[0] / pixdim[1], "y": p["x"]} for p in polygon]
def shift_polygon_coords(
polygon: List[Dict], pixdim: List[Number], legacy: bool = False
) -> List:
if legacy:
# Need to make it clear that we flip x/y because we need to take the transpose later.
if pixdim[1] > pixdim[0]:
return [{"x": p["y"], "y": p["x"] * pixdim[1] / pixdim[0]} for p in polygon]
elif pixdim[1] < pixdim[0]:
return [{"x": p["y"] * pixdim[0] / pixdim[1], "y": p["x"]} for p in polygon]
else:
return [{"x": p["y"], "y": p["x"]} for p in polygon]
else:
return [{"x": p["y"], "y": p["x"]} for p in polygon]
return [{"x": p["y"] // pixdim[1], "y": p["x"] // pixdim[0]} for p in polygon]


def get_view_idx(frame_idx: int, groups: List) -> int:
Expand Down
Loading

0 comments on commit e77c801

Please sign in to comment.