diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 659dcb43..93944e15 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -60,6 +60,8 @@ jobs: S3_URL_IMAGENET_DATASET_LABELS: ${{ secrets.S3_URL_IMAGENET_DATASET_LABELS }} S3_URL_COCO_DATASET: ${{ secrets.S3_URL_COCO_DATASET }} S3_URL_COCO_DATASET_ANNOTATIONS: ${{ secrets.S3_URL_COCO_DATASET_ANNOTATIONS }} + S3_URL_KITS19_REDUCED_DATASET: ${{ secrets.S3_URL_KITS19_REDUCED_DATASET }} + S3_URL_UNET_KITS_PYTORCH_FP32: ${{ secrets.S3_URL_UNET_KITS_PYTORCH_FP32 }} HF_HUB_TOKEN: ${{ secrets.HF_HUB_TOKEN }} steps: - name: Install git @@ -123,6 +125,8 @@ jobs: S3_URL_IMAGENET_DATASET_LABELS: ${{ secrets.S3_URL_IMAGENET_DATASET_LABELS }} S3_URL_COCO_DATASET: ${{ secrets.S3_URL_COCO_DATASET }} S3_URL_COCO_DATASET_ANNOTATIONS: ${{ secrets.S3_URL_COCO_DATASET_ANNOTATIONS }} + S3_URL_KITS19_REDUCED_DATASET: ${{ secrets.S3_URL_KITS19_REDUCED_DATASET }} + S3_URL_UNET_KITS_PYTORCH_FP32: ${{ secrets.S3_URL_UNET_KITS_PYTORCH_FP32 }} S3_URL_COVOST2_DATASET: ${{ secrets.S3_URL_COVOST2_DATASET }} HF_HUB_TOKEN: ${{ secrets.HF_HUB_TOKEN }} steps: @@ -185,6 +189,8 @@ jobs: S3_URL_IMAGENET_DATASET_LABELS: ${{ secrets.S3_URL_IMAGENET_DATASET_LABELS }} S3_URL_COCO_DATASET: ${{ secrets.S3_URL_COCO_DATASET }} S3_URL_COCO_DATASET_ANNOTATIONS: ${{ secrets.S3_URL_COCO_DATASET_ANNOTATIONS }} + S3_URL_KITS19_REDUCED_DATASET: ${{ secrets.S3_URL_KITS19_REDUCED_DATASET }} + S3_URL_UNET_KITS_PYTORCH_FP32: ${{ secrets.S3_URL_UNET_KITS_PYTORCH_FP32 }} S3_URL_COVOST2_DATASET: ${{ secrets.S3_URL_COVOST2_DATASET }} HF_HUB_TOKEN: ${{ secrets.HF_HUB_TOKEN }} steps: @@ -229,6 +235,8 @@ jobs: S3_URL_COCO_DATASET: ${{ secrets.S3_URL_COCO_DATASET }} S3_URL_COCO_DATASET_ANNOTATIONS: ${{ secrets.S3_URL_COCO_DATASET_ANNOTATIONS }} S3_URL_COVOST2_DATASET: ${{ secrets.S3_URL_COVOST2_DATASET }} + S3_URL_KITS19_REDUCED_DATASET: ${{ secrets.S3_URL_KITS19_REDUCED_DATASET }} + S3_URL_UNET_KITS_PYTORCH_FP32: ${{ secrets.S3_URL_UNET_KITS_PYTORCH_FP32 }} HF_HUB_TOKEN: ${{ secrets.HF_HUB_TOKEN }} steps: - name: Git checkout & pull submodules diff --git a/computer_vision/semantic_segmentation/unet_3d/kits_19/run.py b/computer_vision/semantic_segmentation/unet_3d/kits_19/run.py index f548c258..807a9379 100644 --- a/computer_vision/semantic_segmentation/unet_3d/kits_19/run.py +++ b/computer_vision/semantic_segmentation/unet_3d/kits_19/run.py @@ -1,5 +1,7 @@ # SPDX-License-Identifier: Apache-2.0 # Copyright (c) 2024, Ampere Computing LLC + + try: from utils import misc # noqa except ModuleNotFoundError: @@ -18,31 +20,6 @@ sys.exit(1) -def parse_args(): - import argparse - parser = argparse.ArgumentParser(description="Run 3D Unet KiTS 2019 model.") - parser.add_argument("-m", "--model_path", - type=str, - help="path to the model") - parser.add_argument("-p", "--precision", - type=str, choices=["fp32"], required=True, - help="precision of the model provided") - parser.add_argument("-f", "--framework", - type=str, default="tf", - choices=["tf"], - help="specify the framework in which a model should be run") - parser.add_argument("--timeout", - type=float, default=60.0, - help="timeout in seconds") - parser.add_argument("--num_runs", - type=int, - help="number of passes through network to execute") - parser.add_argument("--kits_path", - type=str, - help="path to directory with KiTS19 dataset") - return parser.parse_args() - - def run_tf_fp(model_path, num_runs, timeout, kits_path): import numpy as np import tensorflow as tf @@ -64,27 +41,47 @@ def run_single_pass(tf_runner, kits): return run_model(run_single_pass, runner, dataset, 1, num_runs, timeout) +def run_pytorch_fp(model_path, num_runs, timeout, kits_path): + import torch + import numpy as np + import tensorflow as tf + from utils.pytorch import PyTorchRunnerV2 + from utils.cv.kits import KiTS19 + from utils.benchmark import run_model + + def run_single_pass(pytorch_runner, kits): + output = pytorch_runner.run(1, torch.from_numpy(np.expand_dims(kits.get_input_array(), axis=0))) + kits.submit_predictions(tf.convert_to_tensor(output.numpy())) + + dataset = KiTS19(dataset_dir_path=kits_path) + model = torch.jit.load(model_path, map_location=torch.device('cpu')).eval() + model = torch.jit.freeze(model) + runner = PyTorchRunnerV2(model) + + return run_model(run_single_pass, runner, dataset, 1, num_runs, timeout) + + def run_tf_fp32(model_path, num_runs, timeout, kits_path, **kwargs): return run_tf_fp(model_path, num_runs, timeout, kits_path) +def run_pytorch_fp32(model_path, num_runs, timeout, kits_path, **kwargs): + return run_pytorch_fp(model_path, num_runs, timeout, kits_path) + + def main(): - from utils.misc import print_goodbye_message_and_die - args = parse_args() - if args.framework == "tf": - if args.model_path is None: - print_goodbye_message_and_die( - "a path to model is unspecified!") - - if args.precision == "fp32": - run_tf_fp32(**vars(args)) - else: - print_goodbye_message_and_die( - "this model seems to be unsupported in a specified precision: " + args.precision) + from utils.helpers import DefaultArgParser + parser = DefaultArgParser(["tf", "pytorch"]) + parser.require_model_path() + parser.add_argument("--kits_path", + type=str, + help="path to directory with KiTS19 dataset") + args = parser.parse() + if args.framework == 'tf': + run_tf_fp32(**vars(parser.parse())) else: - print_goodbye_message_and_die( - "this model seems to be unsupported in a specified framework: " + args.framework) + run_pytorch_fp32(**vars(parser.parse())) if __name__ == "__main__": diff --git a/tests/test_pytorch_models.py b/tests/test_pytorch_models.py index 00be4edf..4b102b05 100644 --- a/tests/test_pytorch_models.py +++ b/tests/test_pytorch_models.py @@ -227,6 +227,42 @@ def wrapper(**kwargs): self.assertTrue(acc["f1"] / f1_ref > 0.95) +class UNET_KITS(unittest.TestCase): + + def setUp(self): + self.dataset_path = pathlib.Path(get_downloads_path(), "kits19") + if not self.dataset_path.exists(): + url = os.environ.get("S3_URL_KITS19_REDUCED_DATASET") + assert url is not None + subprocess.run(f"wget -P /tmp {url}".split(), + check=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) + subprocess.run(f"tar -xf /tmp/kits19_reduced.tar.gz -C {get_downloads_path()}".split(), + check=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) + subprocess.run("rm /tmp/kits19_reduced.tar.gz".split(), + check=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) + + self.model_path = pathlib.Path(get_downloads_path(), "3d_unet_kits_pytorch_fp32.ptc") + if not self.model_path.exists(): + url = os.environ.get("S3_URL_UNET_KITS_PYTORCH_FP32") + subprocess.run(f"wget -P {get_downloads_path()} {url}".split(), + check=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) + + @unittest.skipIf(psutil.virtual_memory().available / 1024 ** 3 < 100, "too little memory") + @unittest.skipUnless('_aio_profiler_print' in dir(torch._C), "Ampere optimized PyTorch required") + def test_unet_kits(self): + from computer_vision.semantic_segmentation.unet_3d.kits_19.run import run_pytorch_fp32 + + def wrapper(**kwargs): + kwargs["q"].put(run_pytorch_fp32(**kwargs)[0]) + + mean_kidney_acc, mean_tumor_acc = 0.927, 0.837 + acc = run_process(wrapper, {"model_path": self.model_path, "kits_path": self.dataset_path, + "batch_size": 1, "num_runs": 500, "timeout": 200, "debug": True}) + + self.assertTrue(acc["mean_kidney_acc"] / mean_kidney_acc > 0.90) + self.assertTrue(acc["mean_tumor_acc"] / mean_tumor_acc > 0.80) + + def download_imagenet_maybe(): dataset_path = pathlib.Path(get_downloads_path(), "ILSVRC2012_onspecta") if not dataset_path.exists():