From 357818db02c9315bd118a2f651f07287347d6913 Mon Sep 17 00:00:00 2001 From: guozixu2001 <11324293+guozixu2001@user.noreply.gitee.com> Date: Sun, 29 Sep 2024 18:47:26 +0800 Subject: [PATCH 1/4] Add api_mapping.json and test cases in --- paconvert/api_mapping.json | 355 ++++++++++++++++++ tests/vision/__init__.py | 14 + .../__pycache__/__init__.cpython-310.pyc | Bin 0 -> 150 bytes .../__pycache__/image_apibase.cpython-310.pyc | Bin 0 -> 1092 bytes ...st_CenterCrop.cpython-310-pytest-8.3.3.pyc | Bin 0 -> 2621 bytes ...t_ColorJitter.cpython-310-pytest-8.3.3.pyc | Bin 0 -> 2076 bytes .../test_Compose.cpython-310-pytest-8.3.3.pyc | Bin 0 -> 2682 bytes ...est_Grayscale.cpython-310-pytest-8.3.3.pyc | Bin 0 -> 3691 bytes ...est_Normalize.cpython-310-pytest-8.3.3.pyc | Bin 0 -> 3047 bytes .../test_Pad.cpython-310-pytest-8.3.3.pyc | Bin 0 -> 3477 bytes ...st_RandomCrop.cpython-310-pytest-8.3.3.pyc | Bin 0 -> 3264 bytes ...RandomErasing.cpython-310-pytest-8.3.3.pyc | Bin 0 -> 5025 bytes ...orizontalFlip.cpython-310-pytest-8.3.3.pyc | Bin 0 -> 4187 bytes ...omPerspective.cpython-310-pytest-8.3.3.pyc | Bin 0 -> 5741 bytes ...omResizedCrop.cpython-310-pytest-8.3.3.pyc | Bin 0 -> 4286 bytes ...andomRotation.cpython-310-pytest-8.3.3.pyc | Bin 0 -> 3208 bytes ...mVerticalFlip.cpython-310-pytest-8.3.3.pyc | Bin 0 -> 4163 bytes .../test_Resize.cpython-310-pytest-8.3.3.pyc | Bin 0 -> 3455 bytes ...test_ToTensor.cpython-310-pytest-8.3.3.pyc | Bin 0 -> 2556 bytes ...st_brightness.cpython-310-pytest-8.3.3.pyc | Bin 0 -> 2309 bytes ...just_contrast.cpython-310-pytest-8.3.3.pyc | Bin 0 -> 2480 bytes ...st_adjust_hue.cpython-310-pytest-8.3.3.pyc | Bin 0 -> 2431 bytes ...t_center_crop.cpython-310-pytest-8.3.3.pyc | Bin 0 -> 2630 bytes .../test_crop.cpython-310-pytest-8.3.3.pyc | Bin 0 -> 3264 bytes .../test_erase.cpython-310-pytest-8.3.3.pyc | Bin 0 -> 3937 bytes .../test_hflip.cpython-310-pytest-8.3.3.pyc | Bin 0 -> 2353 bytes ...est_normalize.cpython-310-pytest-8.3.3.pyc | Bin 0 -> 3447 bytes .../test_pad.cpython-310-pytest-8.3.3.pyc | Bin 0 -> 3438 bytes ...t_perspective.cpython-310-pytest-8.3.3.pyc | Bin 0 -> 4299 bytes .../test_resize.cpython-310-pytest-8.3.3.pyc | Bin 0 -> 3565 bytes ..._to_grayscale.cpython-310-pytest-8.3.3.pyc | Bin 0 -> 2121 bytes ...est_to_tensor.cpython-310-pytest-8.3.3.pyc | Bin 0 -> 2546 bytes .../test_vflip.cpython-310-pytest-8.3.3.pyc | Bin 0 -> 3904 bytes tests/vision/image_apibase.py | 62 +++ tests/vision/test_CenterCrop.py | 93 +++++ tests/vision/test_Compose.py | 106 ++++++ tests/vision/test_Grayscale.py | 156 ++++++++ tests/vision/test_Normalize.py | 123 ++++++ tests/vision/test_Pad.py | 152 ++++++++ tests/vision/test_RandomCrop.py | 130 +++++++ tests/vision/test_RandomErasing.py | 185 +++++++++ tests/vision/test_RandomHorizontalFlip.py | 179 +++++++++ tests/vision/test_RandomPerspective.py | 206 ++++++++++ tests/vision/test_RandomResizedCrop.py | 158 ++++++++ tests/vision/test_RandomRotation.py | 130 +++++++ tests/vision/test_RandomVerticalFlip.py | 179 +++++++++ tests/vision/test_Resize.py | 135 +++++++ tests/vision/test_ToTensor.py | 101 +++++ tests/vision/test_adjust_brightness.py | 83 ++++ tests/vision/test_adjust_contrast.py | 87 +++++ tests/vision/test_adjust_hue.py | 87 +++++ tests/vision/test_center_crop.py | 92 +++++ tests/vision/test_crop.py | 126 +++++++ tests/vision/test_erase.py | 152 ++++++++ tests/vision/test_hflip.py | 103 +++++ tests/vision/test_normalize.py | 146 +++++++ tests/vision/test_pad.py | 142 +++++++ tests/vision/test_perspective.py | 140 +++++++ tests/vision/test_resize.py | 129 +++++++ tests/vision/test_to_grayscale.py | 89 +++++ tests/vision/test_to_tensor.py | 99 +++++ tests/vision/test_vflip.py | 165 ++++++++ 62 files changed, 4104 insertions(+) create mode 100644 tests/vision/__init__.py create mode 100644 tests/vision/__pycache__/__init__.cpython-310.pyc create mode 100644 tests/vision/__pycache__/image_apibase.cpython-310.pyc create mode 100644 tests/vision/__pycache__/test_CenterCrop.cpython-310-pytest-8.3.3.pyc create mode 100644 tests/vision/__pycache__/test_ColorJitter.cpython-310-pytest-8.3.3.pyc create mode 100644 tests/vision/__pycache__/test_Compose.cpython-310-pytest-8.3.3.pyc create mode 100644 tests/vision/__pycache__/test_Grayscale.cpython-310-pytest-8.3.3.pyc create mode 100644 tests/vision/__pycache__/test_Normalize.cpython-310-pytest-8.3.3.pyc create mode 100644 tests/vision/__pycache__/test_Pad.cpython-310-pytest-8.3.3.pyc create mode 100644 tests/vision/__pycache__/test_RandomCrop.cpython-310-pytest-8.3.3.pyc create mode 100644 tests/vision/__pycache__/test_RandomErasing.cpython-310-pytest-8.3.3.pyc create mode 100644 tests/vision/__pycache__/test_RandomHorizontalFlip.cpython-310-pytest-8.3.3.pyc create mode 100644 tests/vision/__pycache__/test_RandomPerspective.cpython-310-pytest-8.3.3.pyc create mode 100644 tests/vision/__pycache__/test_RandomResizedCrop.cpython-310-pytest-8.3.3.pyc create mode 100644 tests/vision/__pycache__/test_RandomRotation.cpython-310-pytest-8.3.3.pyc create mode 100644 tests/vision/__pycache__/test_RandomVerticalFlip.cpython-310-pytest-8.3.3.pyc create mode 100644 tests/vision/__pycache__/test_Resize.cpython-310-pytest-8.3.3.pyc create mode 100644 tests/vision/__pycache__/test_ToTensor.cpython-310-pytest-8.3.3.pyc create mode 100644 tests/vision/__pycache__/test_adjust_brightness.cpython-310-pytest-8.3.3.pyc create mode 100644 tests/vision/__pycache__/test_adjust_contrast.cpython-310-pytest-8.3.3.pyc create mode 100644 tests/vision/__pycache__/test_adjust_hue.cpython-310-pytest-8.3.3.pyc create mode 100644 tests/vision/__pycache__/test_center_crop.cpython-310-pytest-8.3.3.pyc create mode 100644 tests/vision/__pycache__/test_crop.cpython-310-pytest-8.3.3.pyc create mode 100644 tests/vision/__pycache__/test_erase.cpython-310-pytest-8.3.3.pyc create mode 100644 tests/vision/__pycache__/test_hflip.cpython-310-pytest-8.3.3.pyc create mode 100644 tests/vision/__pycache__/test_normalize.cpython-310-pytest-8.3.3.pyc create mode 100644 tests/vision/__pycache__/test_pad.cpython-310-pytest-8.3.3.pyc create mode 100644 tests/vision/__pycache__/test_perspective.cpython-310-pytest-8.3.3.pyc create mode 100644 tests/vision/__pycache__/test_resize.cpython-310-pytest-8.3.3.pyc create mode 100644 tests/vision/__pycache__/test_to_grayscale.cpython-310-pytest-8.3.3.pyc create mode 100644 tests/vision/__pycache__/test_to_tensor.cpython-310-pytest-8.3.3.pyc create mode 100644 tests/vision/__pycache__/test_vflip.cpython-310-pytest-8.3.3.pyc create mode 100644 tests/vision/image_apibase.py create mode 100644 tests/vision/test_CenterCrop.py create mode 100644 tests/vision/test_Compose.py create mode 100644 tests/vision/test_Grayscale.py create mode 100644 tests/vision/test_Normalize.py create mode 100644 tests/vision/test_Pad.py create mode 100644 tests/vision/test_RandomCrop.py create mode 100644 tests/vision/test_RandomErasing.py create mode 100644 tests/vision/test_RandomHorizontalFlip.py create mode 100644 tests/vision/test_RandomPerspective.py create mode 100644 tests/vision/test_RandomResizedCrop.py create mode 100644 tests/vision/test_RandomRotation.py create mode 100644 tests/vision/test_RandomVerticalFlip.py create mode 100644 tests/vision/test_Resize.py create mode 100644 tests/vision/test_ToTensor.py create mode 100644 tests/vision/test_adjust_brightness.py create mode 100644 tests/vision/test_adjust_contrast.py create mode 100644 tests/vision/test_adjust_hue.py create mode 100644 tests/vision/test_center_crop.py create mode 100644 tests/vision/test_crop.py create mode 100644 tests/vision/test_erase.py create mode 100644 tests/vision/test_hflip.py create mode 100644 tests/vision/test_normalize.py create mode 100644 tests/vision/test_pad.py create mode 100644 tests/vision/test_perspective.py create mode 100644 tests/vision/test_resize.py create mode 100644 tests/vision/test_to_grayscale.py create mode 100644 tests/vision/test_to_tensor.py create mode 100644 tests/vision/test_vflip.py diff --git a/paconvert/api_mapping.json b/paconvert/api_mapping.json index bfb91376a..0340ed6f9 100644 --- a/paconvert/api_mapping.json +++ b/paconvert/api_mapping.json @@ -16354,6 +16354,361 @@ "dtype": "dtype" } }, + "torchvision.transforms.CenterCrop": { + "Matcher": "GenericMatcher", + "paddle_api": "paddle.vision.transforms.CenterCrop", + "args_list": [ + "size" + ] + }, + "torchvision.transforms.ColorJitter": { + "Matcher": "GenericMatcher", + "paddle_api": "paddle.vision.transforms.ColorJitter", + "args_list": [ + "brightness", + "contrast", + "saturation", + "hue" + ] + }, + "torchvision.transforms.Compose": { + "Matcher": "GenericMatcher", + "paddle_api": "paddle.vision.transforms.Compose", + "args_list": [ + "transforms" + ] + }, + "torchvision.transforms.Grayscale": { + "Matcher": "GenericMatcher", + "paddle_api": "paddle.vision.transforms.Grayscale", + "args_list": [ + "num_output_channels" + ] + }, + "torchvision.transforms.InterpolationMode.BICUBIC": { + "Matcher": "GenericMatcher", + "paddle_api": "'bicubic'", + "min_input_args": 0 + }, + "torchvision.transforms.InterpolationMode.BILINEAR": { + "Matcher": "GenericMatcher", + "paddle_api": "'bilinear'", + "min_input_args": 0 + }, + "torchvision.transforms.InterpolationMode.BOX": { + "Matcher": "GenericMatcher", + "paddle_api": "'box'", + "min_input_args": 0 + }, + "torchvision.transforms.InterpolationMode.HAMMING": { + "Matcher": "GenericMatcher", + "paddle_api": "'hamming'", + "min_input_args": 0 + }, + "torchvision.transforms.InterpolationMode.LANCZOS": { + "Matcher": "GenericMatcher", + "paddle_api": "'lanczos'", + "min_input_args": 0 + }, + "torchvision.transforms.InterpolationMode.NEAREST": { + "Matcher": "GenericMatcher", + "paddle_api": "'nearest'", + "min_input_args": 0 + }, + "torchvision.transforms.InterpolationMode.NEAREST_EXACT": { + "Matcher": "GenericMatcher", + "paddle_api": "'nearest_exact'", + "min_input_args": 0 + }, + "torchvision.transforms.Normalize": { + "Matcher": "GenericMatcher", + "paddle_api": "paddle.vision.transforms.Normalize", + "args_list": [ + "mean", + "std" + ], + "unsupport_args": [ + "inplace" + ] + }, + "torchvision.transforms.Pad": { + "Matcher": "GenericMatcher", + "paddle_api": "paddle.vision.transforms.Pad", + "args_list": [ + "padding", + "fill", + "padding_mode" + ] + }, + "torchvision.transforms.RandomAffine": { + "Matcher": "GenericMatcher", + "paddle_api": "paddle.vision.transforms.RandomAffine", + "args_list": [ + "degrees", + "translate", + "scale", + "shear", + "interpolation", + "fill", + "center" + ] + }, + "torchvision.transforms.RandomCrop": { + "Matcher": "GenericMatcher", + "paddle_api": "paddle.vision.transforms.RandomCrop", + "args_list": [ + "size", + "padding", + "pad_if_needed", + "fill", + "padding_mode" + ] + }, + "torchvision.transforms.RandomErasing": { + "Matcher": "GenericMatcher", + "paddle_api": "paddle.vision.transforms.RandomErasing", + "args_list": [ + "p", + "scale", + "ratio", + "value", + "inplace" + ], + "kwargs_change": { + "p": "prob" + } + }, + "torchvision.transforms.RandomHorizontalFlip": { + "Matcher": "GenericMatcher", + "paddle_api": "paddle.vision.transforms.RandomHorizontalFlip", + "args_list": [ + "p" + ], + "kwargs_change": { + "p": "prob" + } + }, + "torchvision.transforms.RandomPerspective": { + "Matcher": "GenericMatcher", + "paddle_api": "paddle.vision.transforms.RandomPerspective", + "args_list": [ + "distortion_scale", + "p", + "interpolation", + "fill" + ], + "kwargs_change": { + "p": "prob" + } + }, + "torchvision.transforms.RandomResizedCrop": { + "Matcher": "GenericMatcher", + "paddle_api": "paddle.vision.transforms.RandomResizedCrop", + "args_list": [ + "size", + "scale", + "ratio", + "interpolation" + ], + "unsupport_args": [ + "antialias" + ] + }, + "torchvision.transforms.RandomRotation": { + "Matcher": "GenericMatcher", + "paddle_api": "paddle.vision.transforms.RandomRotation", + "args_list": [ + "degrees", + "interpolation", + "expand", + "center", + "fill" + ] + }, + "torchvision.transforms.RandomVerticalFlip": { + "Matcher": "GenericMatcher", + "paddle_api": "paddle.vision.transforms.RandomVerticalFlip", + "args_list": [ + "p" + ], + "kwargs_change": { + "p": "prob" + } + }, + "torchvision.transforms.Resize": { + "Matcher": "GenericMatcher", + "paddle_api": "paddle.vision.transforms.Resize", + "args_list": [ + "size", + "interpolation" + ], + "unsupport_args": [ + "max_size", + "antialias" + ] + }, + "torchvision.transforms.ToTensor": { + "Matcher": "GenericMatcher", + "paddle_api": "paddle.vision.transforms.ToTensor" + }, + "torchvision.transforms.functional.adjust_brightness": { + "Matcher": "GenericMatcher", + "paddle_api": "paddle.vision.transforms.adjust_brightness", + "args_list": [ + "img", + "brightness_factor" + ] + }, + "torchvision.transforms.functional.adjust_contrast": { + "Matcher": "GenericMatcher", + "paddle_api": "paddle.vision.transforms.adjust_contrast", + "args_list": [ + "img", + "contrast_factor" + ] + }, + "torchvision.transforms.functional.adjust_hue": { + "Matcher": "GenericMatcher", + "paddle_api": "paddle.vision.transforms.adjust_hue", + "args_list": [ + "img", + "hue_factor" + ] + }, + "torchvision.transforms.functional.affine": { + "Matcher": "GenericMatcher", + "paddle_api": "paddle.vision.transforms.affine", + "args_list": [ + "img", + "angle", + "translate", + "scale", + "shear", + "interpolation", + "fill", + "center" + ] + }, + "torchvision.transforms.functional.center_crop": { + "Matcher": "GenericMatcher", + "paddle_api": "paddle.vision.transforms.center_crop", + "args_list": [ + "img", + "output_size" + ] + }, + "torchvision.transforms.functional.crop": { + "Matcher": "GenericMatcher", + "paddle_api": "paddle.vision.transforms.crop", + "args_list": [ + "img", + "top", + "left", + "height", + "width" + ] + }, + "torchvision.transforms.functional.erase": { + "Matcher": "GenericMatcher", + "paddle_api": "paddle.vision.transforms.erase", + "args_list": [ + "img", + "i", + "j", + "h", + "w", + "v", + "inplace" + ] + }, + "torchvision.transforms.functional.hflip": { + "Matcher": "GenericMatcher", + "paddle_api": "paddle.vision.transforms.hflip", + "args_list": [ + "img" + ] + }, + "torchvision.transforms.functional.normalize": { + "Matcher": "GenericMatcher", + "paddle_api": "paddle.vision.transforms.normalize", + "args_list": [ + "img", + "mean", + "std" + ], + "unsupport_args": [ + "inplace" + ] + }, + "torchvision.transforms.functional.pad": { + "Matcher": "GenericMatcher", + "paddle_api": "paddle.vision.transforms.pad", + "args_list": [ + "img", + "padding", + "fill", + "padding_mode" + ] + }, + "torchvision.transforms.functional.perspective": { + "Matcher": "GenericMatcher", + "paddle_api": "paddle.vision.transforms.perspective", + "args_list": [ + "img", + "startpoints", + "endpoints", + "interpolation", + "fill" + ] + }, + "torchvision.transforms.functional.resize": { + "Matcher": "GenericMatcher", + "paddle_api": "paddle.vision.transforms.resize", + "args_list": [ + "img", + "size", + "interpolation" + ], + "unsupport_args": [ + "max_size", + "antialias" + ] + }, + "torchvision.transforms.functional.rotate": { + "Matcher": "GenericMatcher", + "paddle_api": "paddle.vision.transforms.rotate", + "args_list": [ + "img", + "i", + "j", + "h", + "w", + "v", + "inplace" + ] + }, + "torchvision.transforms.functional.to_grayscale": { + "Matcher": "GenericMatcher", + "paddle_api": "paddle.vision.transforms.to_grayscale", + "args_list": [ + "img", + "num_output_channels" + ] + }, + "torchvision.transforms.functional.to_tensor": { + "Matcher": "GenericMatcher", + "paddle_api": "paddle.vision.transforms.to_tensor", + "args_list": [ + "pic" + ] + }, + "torchvision.transforms.functional.vflip": { + "Matcher": "GenericMatcher", + "paddle_api": "paddle.vision.transforms.vflip", + "args_list": [ + "img" + ] + }, "transformers.AddedToken": { "Matcher": "GenericMatcher", "paddle_api": "paddlenlp.transformers.AddedToken", diff --git a/tests/vision/__init__.py b/tests/vision/__init__.py new file mode 100644 index 000000000..c8ae46174 --- /dev/null +++ b/tests/vision/__init__.py @@ -0,0 +1,14 @@ +# Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/vision/__pycache__/__init__.cpython-310.pyc b/tests/vision/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..05d0c35e9cfa38009d43ac0d2bcc7525a407eff1 GIT binary patch literal 150 zcmd1j<>g`kf->f>X(0MBh(HF6K#l_t7qb9~6oz01O-8?!3`HPe1o6vKKO;XkRlg`d zIXPcHJ+maEG)X@o(K$b_EVZaaHzl=9za+J|q*%W!vp6$9Pd`3BGcU6wK3=b&@)n0p TZhlH>PO2Tq$YLfS!NLFlBU&O} literal 0 HcmV?d00001 diff --git a/tests/vision/__pycache__/image_apibase.cpython-310.pyc b/tests/vision/__pycache__/image_apibase.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1e0333519297f6b477b8fc50d5a2e4d13d106351 GIT binary patch literal 1092 zcmYjQ&1)1f6i;R{`!T!QgDp}jmbvMIc0r_wNd4$RDpCq92y+SPOxm4xW@k*Y3)@aF zR_ICphTUUtUi=FR{t>x)C>}f)yy#1Ix9)?yys!M;N0PkN@)6M0)y=_&3PL}PGEW4Q zSFq$$U@?kuib4##lRB9jy4cv38k*c1fQ4P*HrCXBHD=THS ztc`0hLh!c9$rC@A(q^6&Oz>c5>wRE*l)*tR0{*ej(nJl{ZUawVLifSL)3f&TKz3Qd zgDjC5Q&IO(pmGs)r`ALUr)Q6Xg2i#l=WQqV{{tlEc7m8Gc8jvzu4_M$Nlz-)i@0!L zZrbS;+G9enq4sQ2U4iN#t8Q2;wRM88UvQzT(WIGnLleo<15J7?<9eYO+5)NIvY#s5 zn$$K^8&Q`>$8^BbKG#zvR>Okp#fe9%yr74I#R=~zO@zu*O&IX7HAA+=gv}|i)h@BF zQyQgAN=h%$pN&p8&v-}VQIvNM6V>hScXrrj-WzbC9>;vpQCuq78NdwkUdK*|vLe}s z$GcVx#RAM@$vh~-mWhvje8=%|Gpa%qKWoj9cqC`#U_|Zaea+HjU#45|i&l{+OrzO%#453( zY;`(AbMF?>rb43!C!sd zcRFn7A6*N%aQF7Tz~enL6izSL<-!llHP#E6SQEVO;xk4Ufj?Qe*Wmts6-=mnAe4M0 zZ8$bIJ`>?B%Jv%}3&Nlgr$RdRm<(qBFe>2$+XXh+t;@&* z4NR7(B>QWjBr4#XN*RINIe%IJ#7%QKro|nA+iY%6MT>TdPIu1`w}f%GEoL)ocPDLz7ZVN+hQ$tUZ>_lqeo& zMIO;XVn=xppAD)KSnnr6yR-z8(t;?63(zwWV(0C-)=4P{$! z^UPWt7AX(Ei0si&2KP}wvw#KJ|qw5u2 P)rqERcvs-3>$ATB!Txiu literal 0 HcmV?d00001 diff --git a/tests/vision/__pycache__/test_ColorJitter.cpython-310-pytest-8.3.3.pyc b/tests/vision/__pycache__/test_ColorJitter.cpython-310-pytest-8.3.3.pyc new file mode 100644 index 0000000000000000000000000000000000000000..87eae4ffa4505e23a8e50bc3a674229c2e867ec0 GIT binary patch literal 2076 zcmds2OK;Oa5Z<-Z#7UgKC4|HwYA=mcoF;8~2#Q*IDneByZmufJ$!-%@u{YY?v=qS! zen@-F@8Dnf%84@<#06&Tyeb4K95}Gj&SPiC9?ds1D@vsTfn|RDdipX?$R|`LmjTKy zY!v}vgwYOhX~K=rGzcYO#+{+5CF^FvVsvt$l0y}k8MN9u;!5poIQ8*m$yKkT;d1J=262v!aW*yg zGqI)lYkeX8Zk%PD@kqy}Xq?5S>_vU*+TF_}OI}kj-Y3$ARDav6wnfORQZ$>Q+6r{L z*Qg%(`yx8$Qr~C%ysEj3@rGmsRSMN&k9IHjZn(qy{pt7w`-G7n^WnwS39G%H{Cq_1J(K%xQ9c8`Nl`;(0K zGct#n#OjT6&|)6+j>Jhk4TjAsUP@qC0%*so{SJ! z2C1?TTa8}O(LtnQd$)V(jkLQSOhK4ej;|C2)oOJ=Ow`R&l{Rd3+ok# U9Ymq)%B*eJ)G{pGwq@J)5BFsUhyVZp literal 0 HcmV?d00001 diff --git a/tests/vision/__pycache__/test_Compose.cpython-310-pytest-8.3.3.pyc b/tests/vision/__pycache__/test_Compose.cpython-310-pytest-8.3.3.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7fdaae662f3585c5d9260cfb61cbb97272ec93d5 GIT binary patch literal 2682 zcmd5;L2uhO6c%mARvbA^)2+aOVGwp$Ex?m&C+$*T9@_d)1Q>#>yD2aP#bja)l4y{0 z(iH7!{R#a6?J@so&~>Mshhe}j+k2Glu!&ov7>ZT^A4T$seB}3ieBzBp-Gb-IUwrz@SS;R^!!3rrxszDV-N=YrX16%i%lhw2y)QQDx zTKSa(Ypkwohpy1-3oB^Q8nvNbr>nFM^)Uel=X^G@|rxi zROwau8H9dJUY9U%SiHO!?ZD>i?(~UaLZMu}-onl+LAa50FG8x%KN=RXQMYFd&mwLf z%>8iTi|_qD%M=rR!N<+6*Ls`R9&A5K;-{>cvMB2v-1hIZ+?g4)T&d{gPGhoxcM)cs z9%F=fzP@6a4~M*c9IMgf zu)QDkd3M5t`j)bjwqjDrcELH_2&a_y$7lH(j)p@JOxU@JY`4KwNQqSb-Mk^zq48E> z2vi7?e*|)w@8+YE|1p!u>E?FJ-S++uVY^7!ZG)>jg9Rk)px0L6rSO!zT^riMlL}zjsC9IiE(vVNocml*7MOnrY+3UO$X&tqnvD4}5&GzY zGqR0|!lj&aHQQ)S6b6K$vrWf`_RI|43F(q-vt`I~!TSOsUrkMh^xu#x?hNzKJzV2n z;kDr1SmT2TeETQl*flgazaU#A*2X_zj(oCQ>DlFcNNZt${5Egn(JsVbLjgE$uX#c&BgW^qJEQKl02V;^T;$_L1fL)D5>f&1?Y28(~*myTPd+2CT84j*jl` z@2?r%mcC}J4_g-$Iw&?!@pyPybHNS==%8KivKc7)sMwr7z#7_h3kFz6#l~=$PC0C* zpXmpqVdQdHXS!VtSa)U^R}zb}a;$Rt4L@PlIIa*%sL$h4aPW0pVv&CakQtwftupO^ z&@+kEA@YBG-aZOE+Gc@m2knu=kD`P2p7kv7k16Ai2t96d8uGB61XWbb6jjFfG+scx zX#*OC;D#C*ZrvSx9L9jP5 z*pE>>nGTs`4m2)8ZZZdIHwz|8=4k01V{Q6GJuwH0e!)=Uh$M~|XrG8VP;~w!hS?k- zfmKi}pg?FQViCUb80)s7A&;>KY#CanW9)t&V}~s8^u67$^m8&3WSXA1Jj2aQX24Z& zMmy;s>1n+VxxpY;ASrw^e0tJ&_+n@KVM}iSIJ(W2ZU=6_wi?|&!U2BGrmlaYzn}zG z1$RS6lhYVP?EYDqRdLtx_{LRW7!#<2;Vn5WB)u=eC?AqKy8|uLLsGrvklZK)S2^Ko z0N~K{vH-x|CoxC@u)`?zZxjNyGQ)1K55VYcj$~`>?o7M=E95jOI`a@OY@8YQQ#bi~ z7k28*B;CLpSddgsT;?Ki)!({V!K{Ve3ewDpf_`cGaeHP3vle;_Bn~%HCJOqYw>b-8 z`yZQSDGFOthV-0{P>_H=w|rnUD?$W$S#G^{X$EENCKFnYoqPUHEsDXvc1H0Ugn zP>YExEUsE(=KvB?hX3N?yXRCUE=6;qDxn|qC{|Hmo+=Uvadins6;KeaCYi{koJ9T9 g3rOVB&j1Qih_Ly1v8L2yRZ;PMpnjk(t4lTgZ-9wIX8-^I literal 0 HcmV?d00001 diff --git a/tests/vision/__pycache__/test_Normalize.cpython-310-pytest-8.3.3.pyc b/tests/vision/__pycache__/test_Normalize.cpython-310-pytest-8.3.3.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0cd1395087172f8d34abffab3e21e969b5445ff6 GIT binary patch literal 3047 zcmc&$OK;Oa5VoB(antkxqM`^67NoaPoN!3%J(Yk3zuj^tg(VcZN^^B@eog_c0^*Ndm zZI;ZC)MuqWPtrt(c7e>2477_xeWT>(z5|xBpI3wQ%Y(gFmQUd`yXROPdfh)axX0SZ zC$?{UZjm#~^^ZK}_{DvYR@XkKZFCVmDtsj2-G%qZ6A)avP>Fh}epI-2k@x^XchpM_ zLk9T7zV!uWWQ^;_c6uJ;A>!DjOQ3HSl0|8V4q;VjCT`)`mWdhWQ z4^ynp6i_BWjT(prngYrMs750`UwJcc<2IWChOiWhITT3QP%OZg%ssON12Xqk*bEEp^}&oon(O_R;O$>DY`WTlf(pA$wEw+vf1EI!m}E3e)Dn@ zhY9zXpq3U%84@<#07pcaUE~dwAHdjSn*8e&71e0=l%Sf=Wuzstl|1; zat zQBL(o4yA5zNIJw$Wkc8VAZQYjiW0THA5w;?tw9(>!U@FI)ROBQpa#QE^x>p0KH!5IKGI#p5j2iWm5jV}Wof9*7mLZ3BwI2eSD zLsuM)4;p*U(=a$@TzpE|aYL|3M2$)6)u6xU(E8{!UXnBY0npmF-iNKoKvU3jdg1T& z4Ze!OTx4QV-G>-(wl2f71wh*{Yg;qxY#U;jwYO)d5V#Gq_JjWy=J^8f!;7+6 zl8uD&qyfciS!CKR2(rlBK)Np?Yw6a?%Pr&qO<80`$!R*PF$<#BM zC%lBM+LYO`HTBV0jNF&ZtID)Bk;Ha$o&pF%Dp<|A8o+_lm~R74EZDH;+mN&*HRjuZ z7pZYY91?+2W4;Y|ks4RTkziWIk=^)t82)#M?1XUc+yJB7>;gtV;P-BX(O>@)!jb8l z{AOuKB1~o${vIT?_1HKVyPj}^D6Tvjo%YkwMju5S z%C5Q-MJUkZ#{g$1+!ZWdOhs|&Tvo#~uBoH{cV*D5fpC^LtyI)2s3YkY=se(bT&<#$LVf&a-$C0EgnoFVT`<74A) H<)eQ9riVR) literal 0 HcmV?d00001 diff --git a/tests/vision/__pycache__/test_RandomCrop.cpython-310-pytest-8.3.3.pyc b/tests/vision/__pycache__/test_RandomCrop.cpython-310-pytest-8.3.3.pyc new file mode 100644 index 0000000000000000000000000000000000000000..63f543ef81c84c35e7bd5cd827a6687429d1f5d2 GIT binary patch literal 3264 zcmdT`O>Y}T7~ZuV+i{$J&?=}GwB=%3+S=X^hah1T8mTA}L{Z`7C|YfH#?F@g((F2E zDmcNF8$SS!`6c{^y>jB*6I|e#@p^Yt(-vBY#LDxI=j)kgW}f$*2{$)e622cE{5Eah zkfdLDq5M@)*hPv17$OnblP0prD}J>i$&z22tjUX(`lODQir4U)lcuPvdh32`(vl_G zB(-nl$p&qSyiL|f{X1#0NgAYy{03PkE#x=J25BR|MdUA}-sZ2EQ#$C$Y2%Z_{fBl; z@wdJ2+cTP#pWRPF=A53nu^R^cgxNtn4VfSJAKL*5{XG^&4#!x2GXAU~?IAr^U=ry^ znaDrM-$+U2N%d>6pUI?3YTs8me{fhkup{h2OUXXhk3yCxV%ofN%0hopwZcTn1{8!^ z`7i;}_w8VAdsa*-=?;5EYwSLw%2?6!?haOiiUO(2yNtOh4Rie&IxV0R8pI*%9_0{x z5l2Uws_3d>sET>4UTIQ}Myj%{Dj%rI&PvmVs-g{%Fj~`BS~b9FCK3>|?PIiGLB^y2 z)pn48qYqcc>zou=>A=xfdFg=F!Au8=zRFDpr~z^YPz?PqW;Xz9fSv&yV|!(G1F#0j z8Ne}D$(evPLC)lRwo1+fs0nf=P|Tg<<9y}EszQ>Bh>jP>G4(<_DIFD~@!U&rTKr_Y zIJjOuGOaHfA6>#}oKxz2Wu4jHoc3yIGoeqDb7n_rolru9B&~)gPtq!z2k6|6F2rP( z6B2qUu{K2h`|jX0^yz?wjuQ@MZgM(5864VsVQ@xS@_^8@K|SwHmR_;eX!~60D{;%Fk9 z5I5kiqUD@k;b5wg4H##+s!Uza8$)whh2>+z#~mjO;=~S;PKkjC4~P4oEn|U*VEL>I zhZS}GfSz|dk3W6bQI#&#YW7sc3B8bwyZVTGKxTS9MJbQ(NAo0dpHi>e1)(8EE&7YX z%lq;j;eF9fAP)s@4*UvvG9E-9u{!K!jWv)lE{L>&#aK`yl92Qr zRFs8uhuuca>xK0B^#x?3bc~shx?p3y%6L(( zWj%0BQZl23ATu!!bNv6CLUDs{7Xf#a3v{%Ii^U6Y_J;8m5qxw#0ySq`#do;53)8!u zHc#fRm$*Tkws)foD=*!%@L<5*-nZizj{*LSX7rr72~F4Ys3fN49{Yk@) z+!H(t7?dqd?q=^_m%q`iWmA>&Sm)*zH~h6ABE4`LL9ld7i2FiZ70r)urOrM2F@Lqk V+`-$tQM(}LSj3A+DZg1R8(3aR;z_msw%j#+h%d%Xza8U z!3nN>2RQbN@F8;L#2FzWF7SKLiDM_@-9l08cE+B0zj^P?%=Zh;Q6Ec=jktR zE6T6bh(8%Lo}lEgGHzj1>=jkTDcafZ)!wF^ zvp0TFdL=t=7w|6IMSBzPie0kHcyHP2=SsEm8~7{Fs_Lxp(etMtTfXznSx^NXQB z^v3l-SY!Xx6C=O=!W!G&=o4Z2!|~aGhAkcy9~&qKD8Fl{0_7*wR)1E%Q-aLP?6*+v zOtrIi?#C>t46z%}tWUFwQqtzna5V8mpt0nr^Hg}Fuxm{jZ9X8OQ%!q14jeJ@Tq}U? zpLw=ZikLC$`p6njEw}GGj=iH-W7NeowXT-nxijhN^=3`;2bSw}cl5eh)AV|)TGIpx zp&n41^(J~QEO+X3^_n&uPh4x@bPp}pcVbe8qciXiH3^)t?}?qGNI_bOWQ%*i&O~XV zwDjXz0?vRoC>@ktlsze89~48U%Am@Wf((*iw5TGEMn^wR$XXFC(bQ-|Rg@-5tG$8` zRRyJkvWv1OMeKuO=u{Cm!<2#yl3=u`YE#u||65g;D_Yaq(&bpLXieLfA`&YKEt+yv zd_{rUU0v2_k&kPd9b8SEZfGB;u4e@?+YydGbptY=g{khqoT}0ASw2J@UaP(G;M^H} z)n|LW&drK}^L213tjR2IJGL_pW?Ap#aa9*=$&C;VW)<<~};&hW9|cfu7eJo^jvl$&48u zXCD~d!or$Mzvna*lCW1~aM8lE{}aii4PR7C`aDem!HFswmu}{i48bZ)hFmZ5Agi%R zfh>5L2Iq2O^Z74{FpRg9n@nA9h;tfERSLj3rcpLs4a3RAD$8!{eB?EzA?1#Y$uv~R zok@P2{V9Eni34a54-QH3QR|Mqv-x7h zJ0FQRZ{(d{7VbGMv|`qr?k{{egws-p4D&+TsqqN=LjTx+#p&^X$QjYTk_9ZDBep}b z0~9)*NWF#7N6AwVB_Nq;F?4aZ1So!zlvrX2C)RAw2IU!pl8oH186Nr$TQG4qNp14- zwFye(t$S0*l7b?$iQbaI3{9eVm(~Q|DbF?O63tVlZJ2vl?zKw7fM?Wc;h2ypX~-sS zM#++fG&;~AQPPl5b@?yZ?8G{(Fj=4fdJj(#+yB~w6$5W{xC&WgdZwI23y_rx8sZuvg$mGpa) za4y9#aAuoP*Q~JUD`7BSDJ>z8|CL_BnhZ~Hy%h9onb@R?Zk2q(=Ys?*5W5}lJ%O(X d#SccFJ$0QA>9;GNMm^fDWQz3nxcIR0-ap;f11bOj literal 0 HcmV?d00001 diff --git a/tests/vision/__pycache__/test_RandomHorizontalFlip.cpython-310-pytest-8.3.3.pyc b/tests/vision/__pycache__/test_RandomHorizontalFlip.cpython-310-pytest-8.3.3.pyc new file mode 100644 index 0000000000000000000000000000000000000000..90e94c98024ca92598c5cf09f2b9dfae56ee5a5a GIT binary patch literal 4187 zcmc&%&2QsG6t|PKNz=65E(--#LKc@zL`dRqN1|vyq{5=rZnfMJRgs%YBH*|whE28!&+zY55ww~J~68N76)O{I@BHP>Y z-0_g1%`Ooy3%DQQ{!Ky?2|tQN{7L*;hzf_ruMp3^NQ$KNeUU{3Zx)`~uP~}Aon5X! z4rnBC2NK~P4g6S?@ku%xkmp)80pM^g-yThDZxA{TsdpOZy?Y2l+LZDj>T)Zs>H7On zjL{-z6b7{3PmHh#`hB@6DLc&zLaDDdC2a@N3*vrXZ%Rf+*KA5wj&8Rp$(@X{40E!w zvy%YtG$j(9jGfKc$vrQyqg1HW2`65J7+B18gwseyP8Z|Oty3^-c;F1b7#!KpZZ zE0OaeI;Qq`x=0-2jG}2V*gc#U>0|`u+W3SU8w>;DoC;A`Q?SsH~+B6srgZ91~ z9ZYuH+xDYibmY+JL*g8@BPWc)cD$hcW01E1t?|ipnW+wjVCq2L0C-qIQxXee>Cd+| z=o&uel8~0r%ScwhUoQ#ub&*U?&HR`KyO{S*tCP5K74OEIY(Q%K4XM|Z)C@_qrZksn zO{tqhHn{^@#nie?Wf@$Sfhg-4JF)>W=qu;(R|bI# zoU-J!MHkW1GHYtAVT6sFRlKF?rpOSa>2{aig5-L-X=%Fg^tJu?$t+j#Tzk%ff1BLS z*i3nHPGIBD7Eg9+ibq$IuG2D8x-sXtyRDIPT)+3^UQ?=LrPRiG-hfhTJc-8c3&*S1 zvBI4O_=c=`p3>6_U=>r4J99O_2Cgw5gRv7B%6tsQU8XS~16G*E6@DYu<~10M$jyu#;5<7*&B$a{ zuG5cmBouf`%ymRz)S8=G_*Kw@uH_J2k~SAnNywj*3e z#f+#HZs2K|O$b-fU_yh#8k^hv$<<&$!}MxE(@-KVEYBtXFYc;hM>Sa25seO0dS;Ks zb#z$QS-FELJwFy5)^)_91I@^f#ee|=u^2!zvir8!8v`Z`L}LKY$e(@#CQLYN0?*8M z*n|la4x2zT^Bp!}z=Xpl(5%j!MYLeTg3A{0to-S>V8VjK7VxZmhb@@s!r?B^y7>-w zVW10#yFlxL;1?G$`TFPE&{PJzQHjp{v_ig&v@xPa7{cmK!Zb~;(qMv{A z@BypRjGCxC!@vvZCNIlK6YPH>D^{Z+Ny$>>*6%u31r_ka>lFilsKXAkFdH))keIkx z!J{}aUt8PsA1*ETI(-w8#_efkcj9`HI|`?@`{R>A^7YOD=Mx+tT6P%X?7@z-)H$YZ z31RWX>KH*Cu5Z?9~R-CdkLC_B9O?wp-KU3Oj`%({xqb9VHnw^_rEBz&XD w>n`Ub_mZCPH2E9fZdN|>12XZPkJ;flWZZp!wN|K!l|qHx?^fQc+^s2p0a!SvkpKVy literal 0 HcmV?d00001 diff --git a/tests/vision/__pycache__/test_RandomPerspective.cpython-310-pytest-8.3.3.pyc b/tests/vision/__pycache__/test_RandomPerspective.cpython-310-pytest-8.3.3.pyc new file mode 100644 index 0000000000000000000000000000000000000000..15c751b70045cf85dfe3ae3bd4fd238cee592532 GIT binary patch literal 5741 zcmd5=&u`l{6qal!b{r>Z`fJ#L720;F4S0?v%Z>vqLz|#20(3=?wujjW48>v+cd}(r za@q{rF57kg!*#6N|xnFo)fgiDww1t@_l?HzsL7Tdub^v z;rV^#ufyNolcbmQ5`GeRxr5|fHS>*m)FiBQH`B<7 zZ%J#Rn{8xeiDk^x_i|&AWks1Y=gj#Zq{fn&HZv%f%>^@ya>ZOUb11KvOXf1lRddC> zg7T^ zXY@?FyUV!KXKlB0!rBBidSv{WL)u1qr66;qpJY@1S^idX6VH<0U{FW0nKV;BCTUnu zW$uyjC8j2;_)n+Xx4El`-h+>a-0pgxA_^(KKt!*iveR=J@7tE)Vhmr{Cd&qZ0>0EW zdIQ61IgFY4av@YZ8m_XbL~!$F$ALIuww$(MvCVR+T2%U*YN=dQI^jMyBN~*pb{_6L zx_5W4s2p}IYct$yr+b9{Jk4Czb8Mb(1`5;cG@Du#J)xKKesQ8kY2u?-8CT;~?xSi# z%Ey~7Cu)#IFg+2`WelpS)r-m~i^$gWqT;1URYfZ*UfSOeG~F*MrhD3Fo8HVETDIYa z)`~M{V7WALqeaVOA`5}|aNcWi{|rXcK4$H&S|^4zU}vde#$_+u=iKNI=S^m^o;yt1 z2hWB{KIoxwu74_eY_-9hGl?&O&3`{wJGQ%Qjoa`@bj|T^9yT-QNJ7L`Y)MO`X zE^}OG&7-M!X+;=R>Yol5sa>lL%WJ9FEhHy{EF~x8)IT>r;465UYF9jkTC$s$?}FW^ zp|$8@rmzxG))V9JJ$@1_O0ZTJHL!IuuyWMEs7K$Fn(BT?l?xb?3jTs`MMaMg5mr_! zD(JKxsoDUwVNWWgRV`Ah0!{_NRUoLfINJcIO6CC^Eo#OZDFv-G;Am03G{9=mOaqD* z)lCCv10J^(w%r)FRqf09%1_72xPm<#fR6P)?^giz=rBREKgpQ1tqn zvtUjRJ9j zx(f+lI295=ThA30gunyNSZ|65*{d(&NqHh$a0O$pILn!YsCbs*v}N-JVqY5SwctDW zB7K^{&&pFY3iIIImhKSQHep~zsBU%wDj zo?x>KDgOUN)cWfX0`p6RfT7PK#JPx?B4PSnE*JL|xB+0qa(|DEOSQ}~@+wN_nq0<*M2c*>(n}k>hT4>rS#>Zzu z8^68gVLBB5TqqQ;(p^_L)T#2D7jj^KzSE&j^>sWO&Q|2vP+ZcpO$O7#ux171cPRn_ z%W6RcBw}sfC@C63I22GmHUt_6<$jM-1+Xtrq%{u8#{xkYZ#qJ%{Q^4i8*wZl~uAb9efut>Eqb7VdL!=Tb5p2X{4etI64O-f`J*A^0>*kG^FgEUst_F5>(~ z+UR!TdCA}41O8A=~!KFZw4T+h}21>({+YybcN literal 0 HcmV?d00001 diff --git a/tests/vision/__pycache__/test_RandomResizedCrop.cpython-310-pytest-8.3.3.pyc b/tests/vision/__pycache__/test_RandomResizedCrop.cpython-310-pytest-8.3.3.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1be6bf1fe6819944f9af7307082ab07110bbb0b9 GIT binary patch literal 4286 zcmeHLL2uhO6qal!b{r>ZyDq^npy0McZ9r8l$xc&X9@+#;V4%x_6vF@qfuWd8qE4~| zN=}+$+hx7(KWxYTC0%#gxtDE+Z67H|R-ro01{BDE2_R+iJ$`&7-+TOIe_^4j;Q4*| zw{CGFAxe0PiYiofm7Xf!1+Q38RK+XxX4FJ7+na@?;Fi5guOj=3?wnWc zRaJ#mXz2&FH_xi_U86H}_Jz`0pk-RYdy&r3D&9+Up4RZbK^N#E-Z!cGL}@Jjin*1Y zh8mY2?RGcpkl|;o>)FR_+Jhhx;gQe1P~WqM)c5vS=sah1ll#Fy zKwUg4e$1e3qP!xgBIPHQsz0mWDN*5R@mnnBSfxc;`mrdM15;*p?C-H9RWg1YFYtLp zA}`^E~8!wRaBT2M=WE#Kic(3l#0yacHTuJ$_# zE{$}D5#xdH+7S}O7g+to1M~4_WR77RZ*}1!lM@IBwkyK0-qdltP5r?{#KjbeNI0}^ zPDF1HRgFmGj;u8zu{yHWh?8AHC$}Jdqx-nK^ZBE_jPWuN?RFnuWF46@Q#VSIf$Y7;pI610IVuFi2zQt!zzbtX#d3+94T*fN<2mwAX zx@w{lUY}G;LL=k0<~llFW!&~Qe-AzXUANsA`#MQE|GVuqc3XZ^_~lzX)cR5=-4@mR zSMtn1uR}sb=1k-MtqB!bv#GDke6fXWLX(u3j!*Nep(9zK%4phQzy?v0Hq!y`I_qwJ zwb9+oEbt#D-Nz#3%Jet6D3G)A$+Qg`IH8?c1rQLCZH?ws4bT&*F&#tfB{0nC7??$B zOvk`1LgNxUM0yt*(=jlM(7417!2^Ycz(_abYB$4`LacR!z{+x$5Bc(yLT%?dgjzeZ zDHow8FZ6V)GM50{PJ~)2N@J_3-^ZR*wXE4gm_g}4~x z?H~wv<5pZb964^}48yqgFgWk0tq=P66aZb|cnA!Wi-I0>>W^DsmJ literal 0 HcmV?d00001 diff --git a/tests/vision/__pycache__/test_RandomRotation.cpython-310-pytest-8.3.3.pyc b/tests/vision/__pycache__/test_RandomRotation.cpython-310-pytest-8.3.3.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1902ab1a840ebafb0c5e29ff74f4b951f4553600 GIT binary patch literal 3208 zcmc&$OK;mo5GEyAqGdUcCUIL71r`O0PynsBYze4}*lyDbNDaeHFA@ZS)~+Nbd||ng z?Vvri*Zzn0=yU%>e*>>Q^_)x5OFFxhNXl|7BQR3Jv#Xt%ot^Q1Gds@aW>$i4^5&o6 zpPQ2O7hlA`1T3Bac{U7K!m=rKWHBeKWI~c9E7e((14+7*21&xqSV~6`yrj8qWjk3} zA_`9ZEO$0YR?IoPhSR@DolTs<3e0!#I?lp;7jNJk%=fVTy_Dbl8_tvt^Rl1WJ8JLi z9)X{^wxy5AZ2jtU=Fq`-YIufY7Z}xTZ|G3VD;(=KcC2HE=?o+TZfE|<@M8_=0O&P> zfl0r~SpHr9Nn(i?$sZwtk&Kf#^=p#H0Di0;>VLqlESi0WT0PuCXTbt@@gQ}k z-kQxZLxwd1_24i@Y|o);H#Eqj?RLu=s`Ru4ChAr-ROGeXxJq=-Y?vhsE`!%bLM~Fi!pIprrMl~QasWR9D7PA+s0&CWW-}$F#v#A^cSz9;GX*%T&X_*H22FNf>}?2Atfi| z)SIo3=zUnc6|OV|QVz%0?+1=^zzb|2wV%y_EK)GK{29Ed$hq);A1n(3yye)HMd0mj zS4$eImZIC9-{MLPYsiQvPirZa3j6J6?ZdD4jx|KiT=1;*oK8s0jR5*0l7dZ6)yK!* z>_66!3ec|RH8gNchqly89WV_}mhyRo!q8xE!JRPII3uR23Nh@2IZ(eww*op~UF;F+ zLN_$L0?w4zt*0~%<3) z#5@a-b$wCgSOEg;UoNdRVAp|mmPNb3ZU8k)SJcxo0JQ>CtuLDdHplVKvS=6B4WMTI zDw7XKc$au1o7wZ2jaI@SL-p@ zMG2v9Hw8)}{6~2K7Xw)oIuPVVRF3%T+lb`1T8|HY*l!;!lw&BfB4KZkPK$+lk5_7r z>Znpf#PBC_n}@KvTnDv!zU!I|HE4EbHG94X=65aJ^n)7!xHi57;74=|IQjd&axyVY zX4syed*Yt=!_~)q=vbh!DCnLC-3V`oDLJJEBmR2G@-uT@7B-7MM5p2%@iV$>oIp=P zdBg8Nob|8<@3s4LQIdPU!G{NY;H`>yIz(~_2ZI-fH%qa%NBP2X@Wdov^3L1i;eU1~ Qm&nOVLgDm@vXw*s0RVX>N&o-= literal 0 HcmV?d00001 diff --git a/tests/vision/__pycache__/test_RandomVerticalFlip.cpython-310-pytest-8.3.3.pyc b/tests/vision/__pycache__/test_RandomVerticalFlip.cpython-310-pytest-8.3.3.pyc new file mode 100644 index 0000000000000000000000000000000000000000..889a86614b0cc1672deb59628f200db518e29516 GIT binary patch literal 4163 zcmc&%O>g5w7`BtNNz=6Z!NO`+AX!{CkwOxGI}%0vfeMvYRkbT|NK{2`#%;aWu{CzO zTeKh{xb8pT0LT8NT)DuR3vhvF#*ORNPFt?)eTtYL6*q!kHTP$R7nl@b+SV0xZfbFq=EZQvPRZ%zeU97LhHuw@GWe& z#98&x&emf)a`4#L^6dlX^!=Mw9MJLMksGYIRrA{>UVvK=}xryPl(w?*Bq;c~EtQsFWOhd7gXRu1-F%*u2+fpH@|=FUdrfH)@tUBjyX{blzs@SQFV#^a!S z;Kqm3z3z_vB$yn5IiC>cs2e*`9CefB7DB4MjyPiIsmo zxJz%~Wg!7+1+9!=zrA~c$(dPSOt9YD(5o$}IU;CnX(5~1Qon#uau3u==yQ>-GWaS3 zQPy*QWCPWps9eNR86;HztH`;2ih^puDq6k?fh&Mj%>RVPl*!K~T}DZ(tZ1-;kuoXP z@suT%BBLTps{8aNYOW^L=TP$W>)`^mv-NbYMqEqJxAKqVt>o)e;V75h6f}A5a5+;m zYzt|yhcCR>F;mmU1PnKrd8T(FaXkFa#Ch3#@bp1jYGUQo)|pnos1r`((0%22%_dg7 z*8=~blxHHIH2}+)f!teY0d{bU#TbmOz)%)rFfKET#Tf9yEG~(Ik(XI4#()=QaY-DE z>GL?!3AsdgGlV^kfsSsp=W=_;$bg2n1KjIu1gN8dxu)hvDujZC9_H@R2K z(Csp2_BvTwC&aug_ol;`voOiVpe{?l7etO{YbKd(iD%rRW}b7whC-F(0B1@GGb5+6 z+-8{QB@DPc76zg)S}l}F4x^+p2~(>W1rAhDSOuDjUBQJ7M-q+7zGaBJ3OqHp{opn# zW<<4c0Z+?qI=G1j7a9WA*xVLRt_BAhrdI=+hIDamc`n3%eixMls=>RC(dclc=k`?G zMu&Hur9QaQi^rnFyN{?F-JzqSJQGnTp3{OpUz zk63ob+UUqQ5(22aP32=-0Liq&dKQo2lqdArF?K?D5pcu5EFK!YFVVcuunA$4)R zL`QKlzA~rjKV4YrTl9U@v~JI8dsEko-AOcSJPeOV>D#>#<K2bnGZ%CsZ6=sq>P$ zu`^pqt7g@CT^*hI>B6Qw?+&VV=z2k+UN{z0C@C0O1=( wo^Lr9d6e{Q$H|}gwzKxBACRf%e8!H+5##O0>y1)Ftd(l)x?lUK_F+T$7jrh9mH+?% literal 0 HcmV?d00001 diff --git a/tests/vision/__pycache__/test_Resize.cpython-310-pytest-8.3.3.pyc b/tests/vision/__pycache__/test_Resize.cpython-310-pytest-8.3.3.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1faabff3ac5ce6af5464f1d7b9547c406c365dbc GIT binary patch literal 3455 zcmd6q&uimG6vrjoi5GSe84I zlg-ke+COCXQpmlh-uqAJ+EdTHlwR8J8#%Iq<%D)ySb;{KnKz$#qxovyv%j`hmT;Zl z|Ks?_bxHb_D(NeS$}`--2!$g#@=)r^;+gaEIZ2khLU&1ydzQP)=*bO>Ua4CWZTaDf zSMHW&iItqf_i}fYmBq8-EIG?RNZmE3=#=nWcUGJ-o_Cy8r-J8QM}8sI)_%jB(q2uD zid*}gZ98E2S?PH837ghmeGvM5aC+tju0N`W+#UtTKKFw90Snw$Y(V3$E*U=xxOZ^> z{s=`Vy_OyMjr^Sy=3eK&h36;on;iL#x$}GW&lpiwrkCrDeI6=ex}@=#`(E5M=YcXE z5IdqOol(g6*dN*{YVbq(W8Pv>%&uUe&idyPdt~TL-B*T<-)Z;Z}Xb&a00PKHGk*DpgZe z%$lkU{Grb`tGZ>W%2iRTDN5oE#@EM_aO}Qh!)mpuDou=v?L}p=tymRoVJ65;!A-ze z=42Scw|Ko(Of-83(Q@2GtR#J50vBg?{PaeE#Cg)D*FPV>d!J3VdGeB{NZIxn2`3J=#>9 z=kQ8ZtV8vhy$DCWma4Q><%z0%Li))x(MW(KF>o}1v}}?Flm?atmpxb@O1EW@QiGp0iHpN1kV7^$o9?v%>c~+%>d2JrkS9bpqcc}Kr^#x zCTJ#TCTJ#TR&&83EbuJwEbuJwtZbeIo&}x-o&{bj+q)KMEznw^wLoiS(^{alKx={4 z0=>+g8z+u@uxq0nCQaJH3k)Lal$#nDhJ z@FE=v^xa2!L{UyD^vFOYMKPsAJtdKyK9p4 zCnAfN0?2K+lT!dhBJx;rWU(qiNs(kJC_5{1#;G_J;3(s2pgO9cmBy=q=4i4+RZ@N? zJ8M)Et4>x(<+b|-)zpeGMY@r z9CkmLpS9r!qh_HztzxvXgG{IBcGp4jyeb4y(m1TjDJh$X@}Egc@ibjS zwmXCny5^VYauq;XR^;-VO^ww7ximTq!-Ns|wK&h`iNHhjzuhwI!vCh#|3`jjD_}aS zAb=XnTq*p?YJe^}#kzs~bp#s#mO8_G0tH(5BhR^T62w!?t1y0_^$F?dhB4$+7`Hvf zylLaGP>E%5cvz(3pk*9FN@jSDWH=&%{0?tM84OyNs00bzYVKMsV+I7l>H{GMNZ5hs zWH(}17x2GCuepPzlglkC5znv@)^}er6V7=bnA^*0(d-IAtBb~UK^UG;k!1tlvhWPO zT|vO&HMbD-XS#(mOsjW^(H6ej?g_hk&x=B#>>$k7A#1l;5cbhKs>}lfQk^RV3(@09M2>%itT7-l2QaK z_z+m;KiI%0SnOD_W`V>8?u--H&_dD;Sn{2@cOITO=iHlwrIIee^YHz*?k4m;b0_^M z;M{_jj=>-a$pfh^i&pV7iX=;Zw!I|BBe`}CMwEf-Yi&)$GK0LYw{=;<8p^(w+XbwP zwuqKc?v2zgAr)!RmQfz*&@Q6_Dnh%0N~jF&2_!$2s>@&C+tTBz9I3arx9&I;L$|o) zJ3T!0U#zo`xc#w5y)ZBtaRS;6iBHY$C~z6XoPp_}XCun&4)J<@7GO$UzU%Cfp_7I8 zF1%mQfnm~H8OiVDR}xd+W?sU*dh$Dk-wIBYdFFnA&mV?_8R8H}2LE<4!=c9g z)T*9wBi_kmIG`b^?Ck7VW~*j^*{vA|eeN813y$nolTURW7fuXcYnoOPB$$o{41)I$ zBUbVJUd@P?9c&Zj;x&_Oq%r)Q?Ga}fsc$=X!(faFyM*w#&M;-P9wR6m`w*4pa6c;WdA1Acup4J! zXR3k8%8H!*v6?5#;QWzr!kOo!R{k-m4NmGtOzP#D(J-xDa1t(N@=CXy@n%X3R~BEk z&X;Y*U%NV8mXgDK<}K4Ypq(rMXe7r?fg8?dY|79})6u*DfixXDS%Ju0I$nHCkm!=o zH@3GP8PiM?$)Aj-)9<*MCU!CdyjQvSI}UORBXckW<+aK1$z9K-JT6)_&M_2 zta8H(xhNkZ0R)TUl&Sh7Dv>Bc+I2rfqXE9jf8Hs-(fM*wDax9n@wcdzv_et;2|`OW Ae*gdg literal 0 HcmV?d00001 diff --git a/tests/vision/__pycache__/test_adjust_contrast.cpython-310-pytest-8.3.3.pyc b/tests/vision/__pycache__/test_adjust_contrast.cpython-310-pytest-8.3.3.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ab661b644b708fbf5c94385a515b91524902175f GIT binary patch literal 2480 zcmc&$OK;Oa5cWE0;v`P`pn#A#SV*Xe6r4v}C=x}b6-AX$Rh65o%5t-t)KzRpyPK9) zaDpGw9{WQO{01MmaN^tp5*L`=B(7Vap*&aG*_~O>dUocU*$wjftOD)Yn@`R6F#8~w zcvFE{fX;S-UDND37Ue?MYg=jeW3RyXl zm7|VFaO$;U<#8HoFcxqIXJH)0IjqAthV!@p<2Xjol+x%&__wlJLSg#R+RCEC2u$@A z&uNi9e^BEAZEWwl%nf{#Q^#k`fO^bqcKrqinbS5MywhdeZUjE8Gu{w;4jO_<68aMK zlbb-ea)>ZGLN67r9wuJEsaohr6=wo_606QSXk?9kbG=SLxgkyXuJ9te42K!ZF|C#{ z-`mOL#Ag95ZftB+&DpX6WV39X&Vh;429}%||Fqs_Df-{QiFVUzz_G#Px>=1@DPi3< z2jTL0rx#tXRW{CPpKcqbdaB3CP~+q|-=j_^OkskF&%;EpwG$?2*FQ#<-r1MlhG=4P ztWX_7_W5yTJMc(_28~8gX}Ns6yH#0pmI8m5P<{uK-3lj+vq~=-lCjT$!tCsaIq{X< z0F>Fa>##4=fFzNMl3ylrv;fQxp+g0R1U!oW7#=kNkC`4k?v;(2S>608c+_LCn!AMF zw9vcT```V(UJM??008!#2Az(8(`ZVNoFF2WJ(7o6JRJ4gAc%(}OGiO6G#m%-B4{>g z;2CQxPmO-g$|4f2#uNFnIV*QEeX>`aT7R-QRW^#?+_aFCxVhyx}njC}+{##GI@-`K350#KbHT2VSc_TP!DJNf?7O4@30qiumY?AXkBuCPHni>$bV;vru2??AsA{!-hKp zD!b`02KRw@Cs49ST~5MGBns05Sr(M#Mc`1GZC2XpxLa^3Q1K$NCi~^V6^{^mu#^yX zWCR%(MBEj!Ldh(XF_c!uN}Mcqvd(D_ya0FGla16N6{*~?&&*ZR%%Iss*xIQ?AZWmnSdP+Lh@wX}ua zPC6rg7|`;@#)hFcsv3~Zsx}yd5SyLeJn)~@v>kXc+!ihtPf!we+YFQ| z30Ie0uT|9sHiLblOwZsr9Vv{wVtdr?L>Wvl@mZ7#wsxWv?fS>aR6F~^y_OSTax765 zV)SjXwjFq+MgzwQYAu&-ceiTm_Hy9w63V7A*{v}Wvar^3zYwee0QAm&RNxCN2f$>_ z+=4Ab1xOmnDE)n+KuaK82ouWj!l7{QA46e=Lt(B5g@;vbMmILE1ciD6Li5+Ko8@*7 zdgndr+a;iINyH7^7=)URfWK&llL99^f<01%Q8E(s`=CfhB2Py_Gc*zhA7dCcY2az= ztIxH5YKk%u&nENrx~VC4(tWa5o_g_YX{xG~!LRj7Rda%NKo`n0h5O6byFTwbHe28Q|Q{ljE z;D{c~1mxz1!}?6Q<3e2W`gn!>F!cG~+^7$BsJW11G4QL}G!(p_;&mll&n-Y8lDN*% z$*Y9szrb}PhIPZZ8mz%hzu=mV!GniExUtF?-QeUVkjg})Y<1l>bNw(<7d!h_EZw%? zvj7!bx5E%hKL6}fvPWG;qFk(svZuN{7>j~$Q;}X~*6z4lQ2Z(X!7(TMwZP^76xQif zir*v0$v7wcn;^=ONGlOQamz$K6O~OAHRZ7v;BK2d;r|yQ5BA-XDyvA572ZoqQ7NeE E4?%G?g#Z8m literal 0 HcmV?d00001 diff --git a/tests/vision/__pycache__/test_center_crop.cpython-310-pytest-8.3.3.pyc b/tests/vision/__pycache__/test_center_crop.cpython-310-pytest-8.3.3.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c23fb3af318a38c47c237b1ac0d3f888ad80b6d9 GIT binary patch literal 2630 zcmcguTW`}y6!ti2;v`NA1+-d;2McLMA_eEtmW#ASTqIB>K-Kc*D6(A7G&PDn(Rk9* z3Z8cV!Ttew>>t@5;n#hEXC7AC7dU5}I4*)-go-7f@j2re&z$f3&Y0KhRSmAc|M_o6 z)->&3bn=%D&I7pPJupE`nVh#j-g_OaV4xeRiZJ2 z7XKtpja5}^(j{8@OLOY9Obuuov_h-UF4Gz{p}j=wv;pm9N?vNM<&W@f?L~_u<%e6F zkGz<{qq!M+BR2C7Z%Hov-Mt_Vcw|fAMez<7VQlYABVR(y8{0mMBonSL_{7Kg<`;p7 zBHWMR{&XFT)ZP(F-jlbQ)ZZ1}z>Y@by^i~VwG>`>BN$P&W>*kSxR93m{5OLqt;zCY z>q&0Z$l6F}vL%aRE}DbEplh$Ut+^Stt*-(Q$tIYu=~(+SW*D}zHSnpNOr;wKhYU6| z=(VlhFzXa7o{lBNpXAYK2H~h}Z<3TKqkv^n;M=c~f|y1} z#4#ra3If;XlpSfp1RVc=*xBVF>j>`qyfX^q?sU7e21CYq9-YP5`zhMeYpy zuX(F{r2aicax_@!?EV=()kD4XP;5Oc?LGT8Y9#yAamWV<`41G(v+HN=K46*Gk14L6 z1>M6B+0uWeejtad3;nET@>%a*tbD{JNXltSS$~c%T|sjVOlu`Ewx_{Z22q@t4<-k0 z=Kboz!39T*?Zq)1O8EC8*uDrPODb6?DbHh77_3etPbd{TWp5H}!|5dOKh3#1t4jg? z8oBeX0Qx}CRiO|?PvM%_=f}$#Zsk@>kJ>_m0J%vfd*SeZ)D^6DZYPYH? zuI7}!Q@d+!Rn|3U#VLQUbn8yV(NM2Dt4P~UOt&N}M5j{2q2*!T&$l@|>) zt~@!|e`<#=UTgclJ#x>#Uw#k;VmLnbLNAy$BVkX&qd@p!^JqRDMrgAq&7lZpLjt;Z zRJ@cR&mh0Phays5sgC+u{Z@$zuZrKmm67_oK#stO;tM+h8CCuK@%&jJB35meqX5>!8Ky3BWPSV%wF+=BuZW_APH?DN)y|$ru41LGYcXM5PhR(Jj zn9X>umjRAh5FoHlKihvr4J?2LwhI9$-_8#1ak3C5a)9!@ksLZW%;Z4fc|$osO;~CI z#pE}cVnT-r!%cviot!BqbeOQz1gMp_)PfERmRiJS-ck#o7A&=ZV(s?(DGmKb`kKB( z!OYMn?onjuW7ivvQJ;8DG{$lg=kOK3bhL28`6NQ$bGaM0Jbz?h*_T6HYKzOs#IsVY zMef(piLhsJ*>N3r8pXxn@T<5e=F>B^TbrHA{Wur~j(etv8aDMG4_o8FcUvMD4ujUn zi^lWA)`9&jm>#<#`p|KYTag<^VJq3M@?$`;(wv>fYt%IuBH{+@ZGeRuijrDTOMh;@ zE7tLGZA1wQEkw=BA5!;^MBwWO`=9COu_Gfm?OhD0e;hj!syTH}wjO-(>C*>>zJ={! zH4J?iOak#}i?^|=*k9X?hOS=*-qHo3VadW>E`eBqH)kbLK_!TUxPrwSfO|ptQ>;-z zbSDKesj^5Ox6rUiURB&h%gyBdlG5`=$h&UurQ4SREj1y_R-SYken~p9D0y$7?K)K% zj>QI%cmtj0lMJ1!V)Hh1+Mg8v#|%z%Kq$>wkaj*Zb-FhF7rEUXJ_^JHhS3} z6}wdIP|?YcW3*8i;W5HvgvXZPF~TD_LL;yxSOi5#(lFG3$=a3iMh+sslSx+IKEmuk zNF}M>%MX#J!R8C=r5{V65?2WAcnvmt%U6G^5+ zQNBf}TAyb^^}cxPR-uZk<>h7TjU+$#iaeK3Vj#SIC5g$1XkR@!u1jY?q$K!X(tMkY z*!D9tuZeqTX>7*Y;oO@<-ZYGBk7uWY^y{4gE(OT6y?1_MdvqxuTy!DZDq@w2J5b97G|=;~SUiv>;B{L)}ePe;mwGDZqs#jg&ks;}s+`?Wz$ z*Lc+_{h$vvoHeKXqc*5J6{m{lrnBzU@Z54XoI0Mj9DQHg-uxAFY0tLxr1Hh{r(fDJ z$7}tmZ%_Dn`{lH3VTt5nhWNPBv&VRFp*)e-kn}24o z7X~{v*a_=1)1ceJ&Dclmn<(Pqp~uIPjY1J{!R)|cvHOy*YnuS_NF`&a)ZdTM;xM0Y%k^uCixR|s?{|l58_ZfILN-DEm&@; z9fc0jwJUarI*M$8s5(q>w@LMF{%c8HNibEkE33(Pq$>Hh@)X4yu(rG@H0)+f!#ko zZ61d{Z;Eg<3Y!yG9#4;&&+W%yFylf#boi_(c`V~*3QN@(D!??N(`19jh9g9Xp?R0W zK^3f|7xdDfcRmnXX#8X35EMcvI(&uLn62f;q66F=cN_PAj}GSLqr=w%L1STcR}T*4 z{0gvXjL1g|;aWiwPr^*TzY~zi=M_Bh+|5N^4Zpv3D*7;-79T@m!|{R6qyZJGn&_f;Qqc0`okg(qFS8n^Umu zou?0~j;$4O(r{p6PO}4o&zn0}Rd2#3vj@UF`8B6SMLESV3T2(E>L(OhSh|IItX`h| z7G_rI5A(FG75!NYesO2ylXcfb$dSgheAN`&|yjZ11Y(j&xmT)uMx zM_eXUW`G5?WM>XplgW5JdE;KAld^?cT-6Us~@a z1hr4_hrnb09^U-gr#@GeP+!`a^~L9qTmllp($0EkXLe`iH#2rruh%qqde=UTM)3A0 zE{cy1iw7{05D3#4@im92Sr1COMzo;pERbxc;#6Qq_p5>77>ZZ&7lWEpBN{hY`89Et zcumbFTVRzpnp0<0X28767FiAE6}H4om@hH%t7g^zf;+XR7D=nW>}+qj35Qp6J8;K* zx_-4LBQf0D_YyA(n^L%8GKxfyG)I$gD1qnt&ApND#X~&l_#yC8hVc-_-`9ai?JZ&C z9r;6(`rFd);KP`_)6oO4qV&|g0U9+reY_x!grw@?GnwaEL*0Zvsuk46^yloTrsQD~ ziAFEq#QS=^j!nCL`vjBr)@{1c2kr@WueWK_JEvrSu<4Iy$lkSSr+rGe1J`xB{eFIM z-izDP@A5*`u9j1N`t z2E&N)BTblqz`sAW_M(8dL^K>mt+6NfCcCX2_hA(7b0KdtzTc8Okx45Hu38Kfvd#D~ zUBbP=5E5k2`3^xffRu?&%Ku%yB9>t>7k`0?02sgeCwJsa0(>fD%JVr`=N25NngZXZ z>mS2}iLqhRv*5-pMBJu#XCUHvvsK@kfhHEfeNjQOgaon3NFCmaf$iP~MlrC<;u3H! z46GX$i)SRFfbMKRq0=C%V$S_8M*g#_NFCJ-`9Wjt`QxoMn>M;oIt5=LTbuDj#@a2oU?*CMFpq~r2w*-j-?bH3(mtiDj?dyJMeg=m8op`6Fx@+ z%P+U8c+GFz_oIVPtwjavQCUQ(2nDwLi?Ck5k3y?Rt^&CjgZN*?5LRD@eWpVy&je?!@z@7bdCsAU+)AtM3QU{-Iqr z-6Vn5j^Ay;4}>Rqx|p-l>M^ed%Bs7&LS?g4bz^TAI=#R@6X)u*O?dcIFgV^Sp&g4z wt{}k=fNEAMo0WluMyFby>UPGxAYv1r-^U+>1l_*5Z0aU4bOXn?#;UpcKhnP@dH?_b literal 0 HcmV?d00001 diff --git a/tests/vision/__pycache__/test_normalize.cpython-310-pytest-8.3.3.pyc b/tests/vision/__pycache__/test_normalize.cpython-310-pytest-8.3.3.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ccf74d64bf7e4ddbe88a350c35b412d690f16f60 GIT binary patch literal 3447 zcmc&%&u`O66!ti2;-*epU!)?|0Q4dfH-qm?PYmyl8oK_V7E&Hmi)$^_uhPO-h7^Uj;qy*2G@s;zYhOc z)wDnHC4cGgau4n#0zoxOdYVgQtA_<$BU)H=m&j04a!a7md*#q@4cS-dEr%7iLNsR3 z;%nlru!?L=xI&%ddv*jJelK{*x;WbSBJfaz`vKga zZh{cnJ3`5O@=6o>yTb1OERWhe6u!Xr@2Fl@o1B<7p@$`@nZSFWAsS(oB(C@668iKtL&$-`EOO#O-iL?+O z{FWB@AUY$i**}%(@wzc(XBszQssH|^aU6%N!Q*Z>ZX5;TcyQ2o>OY916UN0)l$|sL zOGMHbuA+SL6pifuX}W@XuM3RyS~p|NSjMGj;upMU58Ig&1V6G z+HBfQWYkb(sT)zMg^IDb7^S3_gZ^7~bBb4ORBWTznW4Rnid__UKs!ynmJGNx3=A-) zhHn8T1TIlp05fd>uwWp8T7WbQ1hDItBcU9Cni|8AvvVYr15ncja)OS8asaB+QA_#t z2sX+u-U%Q zvZx7+&%*4uz~P36*yNJQqiC#Lm>HnXj84uoC+0+#v%nCicsH8F|23Nz-4=r7#GL4I z7SiF8%`%?gG$#f>UsK3D0M{hNa$>>>&9KVISzzEhmuv>Z&0O@`2$mCbU~?AI;gZef zMYoG!IWY$|XCWOf*=%CbRUW_P5!Uu5c2k>Qfu7ox)HoOfJrP7nYToOgdg^V%gPIQN zFWXNNsMoOa=j=HT1WT7yS6UwTRlr%6Yd+^?7}+iR{onvfH;(@{?hm6%bS ziZ4rRs6{;}C}mYAPgl9y%OG1_HxqZxRH7=B{i8dg0u0KEh zd-%(`qWq2z=_>;dPv8zuprHy?U8SqaKI3IGimG_I?ut4c$#?TGl5q>3*43me>#lmm zZc$a3Msq)@-8EK}eTlBn{Lf0aObb+lex0t;BJ>+{jh3L_q-DAe{T5ZfQ>q((fNy2L zsz!yU2Yb(~kip;5o@X7g`1$)UMZoRj6DM>6zb3flhr@t-VQo0}Z2>aNt&J>dqn(RO zg+DpCpTqsvXV3`cjY`$G>W_-ZyvhCmD>+i%W^f%4Lw4W#6=oDkd^z4I;DSiE3x!w3 zLadOyC??Y&GV1t8q(jU^HgsGU1PwwGQLHxb0?J@$#}52ZSiabqS#mtIB7>?4=7#~V z^pg1)d9P>a#O&+u2uZK0lU5(3?}&T7woV?-=|0lQ;|03AIx!k^$_BVI%zi&H+}8;e zlM(AogB-enC8{&OoP}doz#3j)zACUDoy6j`>+z#d>B6K1lkhqHBXVh*7fZ%QpDlMHX#hXrc7eLNv0Jx?Ay9{Up zmSccLqdn)xcnD$uv9Son2Ef<^v!*dO&on^{X3f_85Cm?5S@Y5VigdmLsNs2R*04bU zPn$CQOXJZrK#<1c1HJ*0>+$$9&4EK6c;sO36^Vf<899Ma8m%8hhCSHu=8QRFNLI`? z>^7vRQT-5Zlrh$Pc3Ro__T{r3om4QbRh`&@8}LpAb1xlK(R{L)Ti_*`t@9i&V3v>? zq5{y}00@cWk$hbWoBN=YPT@M=2Fdj)Y^EvYmQ!dX8yJ&!a(HnzbeTPWcBS(33bO+* zT(deObJ(HUXe>s~8FMR`tOlg6-B_jo*KWZYIfqb*ar~a?-i^;A2R|VO7eiSX z;;YzfVS@#lJQXFcN(jp+Q^%@2VCC7WeGOIl*k#|~S4D`6`C`44DXCgU!~22uxwcih F{~uY8Jl+5R literal 0 HcmV?d00001 diff --git a/tests/vision/__pycache__/test_perspective.cpython-310-pytest-8.3.3.pyc b/tests/vision/__pycache__/test_perspective.cpython-310-pytest-8.3.3.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b14f3e96e2761649bbd501de76757f5a4ddab3c9 GIT binary patch literal 4299 zcmeHK&2J+$6rV}bBu&%qwtOr|97b^1M4BdP+Eg3}+IF?7B9&6LXps^{(KO?<1CyDN z$LR-x6Z|2&$KK(enJXvGTo4!d*^@XjhT3ou%e49K3Sy5*~N71SvNrcRzEbtwVg+|lpbZVwi>zYPqYdUW6Io7wj zUBmdu_?$YY4y!;tWqAwpgopB?oU#g}IeBR3=l zu}UyvfpbaRQi)pvBw}PS5r{{`R=Fz(FxeJI0Ic9ui#hF4LcBRBb%2ghVyqZt*BueX z0*>oKNiG6@k`g81$Yt12&b0&;OhY+S(~!qt8n+y&vfJ8k9lY2%GK~S4lqKyb8&ki| zM0rLo*#)(NC~Fg&cr40+NzXs|DoWFlcdfUJ!IkjoULU@Bti7G8 z_0(Z!qm$~P_1yQ)am?#BIj=GjvalL^y_obAMJmBnw8SBLeZ+8Y;|XYnd2|^)rDy(F zdyB4MGB-FWk5V3hKfMXSIf4Bq0C|uP7Q+D&dxNC}B!_&<)0_g%gm}Ysm z#5*2Takq?rYHZZkv7pAr8hwP7`(xu6EM&z0uP!I}uDGNyZjLXJ&2CrfB2(VXjW$85 zS8y4qXc8~JOhhiLFye(iEp_B3zr<%}Vx<$O`Zzat%^7fU@AI2e!psAZFn0jNFkL|0 zV$zAFxR;Fph@lc0O$gipA|(}C&5>rK{;_GybcyV?abr$fBFH8JblMU@Hnzb_HpLCN z`EOt(9UwHCC}`b_cF+vGP=h ze?EWnn(4*y7jfCtJn%hoBXgHCG{-YGFIl2AiY3b5O!uYo&0PSgsk};8v2uSZfA7Cq z=7{(~mt4im<5VivnVb!G6Nw%pyhOZ0>WUX6Cwc{GqXIvbXp&u==#^K)K7TXt+eE$@ z;C`aq-=o&mL!JqZ+SCI;UU?R9*+I5m1B8^|-*!nwnuk{sTkMAWtx~-2weo6|KN&eL zbG$GrJ`1jTa`t`?wLhx#iWP?Z`GyxYB^MM=t7uU!MY+jZ0m!1Lx2On@cFqc%6BPIq z{;Rd$9zTJ_JF+*~N@I&Y=G`Lic-=1w91$8qK&3bmWs<0n@}CTSd*qT&`J+ALdVIK2 ROcnKfD$m=y`FD!-KLJWy9w7h# literal 0 HcmV?d00001 diff --git a/tests/vision/__pycache__/test_resize.cpython-310-pytest-8.3.3.pyc b/tests/vision/__pycache__/test_resize.cpython-310-pytest-8.3.3.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b713a021b4c2444dfa725c333ed41a19eb713bcd GIT binary patch literal 3565 zcmds)&u`;I6vyo(ZPGODZkI}c0Lj_|n+TJ{zmi4TqHU`MMJiR*g-fC;a^q=herKD% zpKRWfq+h5q{mY>86jv0Wa3n|eq_!;X89$qmWXaF9m*k{pxxI{@jFZEPpSx*POglzK6?vH7 zK4|UO5yRh7%ePP2sQ&VUIOM&vb2oCspc->Kh)zQ8N7X|Xxi49d{Kr2Te{#5XasBo_ zidcFjJMwG!TPe=G%6@~$PvzGciW_5R_wAqHQB+1h*YAfsR>W{=;|UM_q-iDsW#rHb zV#fQSXUB-}i_l?j;#@r$1U+)Fy)lQ22fFTj(Apb!i%BV_-6G7YZwCY0>qd+@Wvw#l zrKwT2l(L~JMkV!e{ZljwP&Hr|thKvZ*R&MOAvC7xJyLZds}_E-Dp8nf9;t z2XWth!Mt+0t}1nSCHA7S*jB^=TbK!QU2sz{9(~}&h%K3eG6Gcs|Jbdiy0V2SS)5>6tcJ>4q6msv6z8T$l``L zFp97sMmnu`Usk(c?X-3$Gt8GDO}?WCjUdQG%+=!IkJ21r$`BlViSIIcPk zRcWfqV^#Ttv^Cd6BXN@az|jEG=94s_G_W+lG|=?c~+%>d1uPcuO?K{IL2Kr`plOwdfwOwdfwtonk7 zu)wpxv%s^!v*z6CP~@Sm!7=H>xtbq9ib#B92}#} zj*>K5xXAA`{qf&P?S$724j@_>XOiDd9MT}~DlD8t|MWC6JrWx}ekA@Dips{Ya6E9m z*bSm#>1qG6o7Ud#A{RvJR<)xDc^-XpaCX7nmKBdo`h}OZw S^-`uJ7cvF9HVf~SKKu)$t80+} literal 0 HcmV?d00001 diff --git a/tests/vision/__pycache__/test_to_grayscale.cpython-310-pytest-8.3.3.pyc b/tests/vision/__pycache__/test_to_grayscale.cpython-310-pytest-8.3.3.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7ffb0bb5304f2fb16db5d354f22d537d2a41b444 GIT binary patch literal 2121 zcmd5-O>fgM7>?7fP1CgdU=l*&5FwNpJMO@b;KD!P5{{fWb3sT5UMKy6X<7vg;l!`~+OcCl&+ErtE+-3+zDZx&Z!&`L zkvr2#1ZNAf-v@&QjBG(eQ7JkJQ9y!|)aFp!lhRVqBid<4(xj*@VP_m!laW9qoP33} zERmy9!E-qETF`PhjU_1aID=&<=W!M*P%hvc&O^C~(V)NHeR|k z)$r9ld1_ck-HfLyyX2^_^7!7)N=Yr$OKQDXQcchH=w_kTXq41dc#Fl!P)hv3W-xm1 zr|lLjr&Chhz|p;c^#Z1wUBh*W?Qd3#!Z=w9nt zn4p1shBT!&h_UBELV(>ypAaz{CdoS(PjJ^fRD)=KKygddJu*L9BhTAxXE(E;f+lWYSSRqZu72rZWh2?4$ix)1?Qp&j7~-z&i4*)&Q{TYHW(oeo3^>j!?Y{xcF0HB+4J@ z@!L@!btRxHOn`nUB+mXHWg}_{*9k z{f5ZmqX6;QI(Wm z$xe-GV${hBsl1k)I;j#B#s*m>8jNeCMsyh0Nu4xcyhG%lq}JN+u($NAC8yQL2m8BT zLg82654_OjIYNXUzvce!u&mZ8Uprqt|L1qi7Pbou;+5)iyTaZM80!GMY>$ z99HjLp0()*<5nR&tz>g9%BjlfbAHCWIIR#$XvotlOm(B9pVJbXh8MD<$1|b4JBkRs zkeCj}`RlvxNfgj7i$^r(B_J&D3o%J|oWo_0A+c+$;0ArQAPd?%izHRQV^ z@S$toN1Lkv%CaJt|K8A81CSft!_ZIYf;Yv1K8g!IXh-4P%p*_HA9mZY3xAl_030!o z3$_A!vI+vosLXJ}daMrUq6MrkfRHR$KxOMdS!x0AIizK=Umj`UG>B&yO<{bU^)G_Z z4P%I;Fz$Gad9&tWAren{cvuABux%VdAZB=uWH=&%e1$is42SI-M8XVQYCg2u#vBNO zH2^{mkgyFt$Uej{8{oY`uDOi~lh0ceBK~0^j8`|APtJMosoT#o(CmqcRu7fy!DRSD zL6!}C%fd7C9|{7dthtF`FqbWiVOsqgl(ukhyD#+WLjkbNZImxon>L(2+ts!& zdPqRiSQdiK!Ejg@8(4(TV!8vqo%$gk+@gX&FkB1oi!!TXY-8-EC8!z<+sLwb?{#6) zV!W@jyFghQ@8`c3>VK4shsyU}(S3|r4%WQP@+PAihXuy@gt zYY6V$#yB3d+W)l}V3cB7io`8UBI*Rjpvd8#bv=PZEZeXavY}Y?-vlSSgT`A@d$)do zNsF=AV4nb`wUMevQ-8w!FiG`C@yso-PhIGU(7ifdl0dh^n+Bt2%;z*+El6o~PSaqq zXm(IRF;|?b7yC!h_!!=)oO}6Y#m8%mJ7<-E#WoPEBEWtqT7t+E5f4Jktes#-UT7@p fH$gg5w7`Br%anq!q3kU>~1uHd?Lels z0l0JLw0rE2;Kq+YzH;Ks1#y9A#;uzVyUnVsu;LkeX8gX-^L|X;af`*gfa|yHUmdzG z2*0zJ$t#7IN4UN3&=7%$eW5AxJ~c?E1W_1dnoD9hlWk@(lj`RNQd8n%>HhK{-^`1G zBazGxVspjG^S(fqNcKmeStL0kp~y-}04x3dN$tcPI5q0F+itDf^H0XdwFCR9J3Mu$ z|1oh+Yrf<8UM+M7e`#^+S4Zc;3Y%-S;lx(`9cFk5O-4+KnSVC(bQLd&KBCN7%nv_D zWZsA$=IqD48@gnlIYg_-TI7(rVwG>KOP7F=W?8es8fLJtDdK68z48zP-UNGzCF0%e z?HqD5GQec0#SY$<6S48f{ay8)q36=lVI-3U{`4CksUtQkvU*gRTF9qhB}A$bH)U32 zt0Fg!raqck7^e(jUx&uJGP5>>qGAS0>w4_0ff?wnTd%FP0glkD)Z^qTEIXlD(PC#| z0R@^Bb4F{#{HP)m|9s@^hIa4tUE41&n#(-eE;BU8qKt)7kVf)Swzc0CergSiCd2PN zx`mNDlh6Jf5gwuczr!yA!Xn`MI=l)>WNC~~*s4Pom=&3YVbTgXy?$G`aWVBMF2Hj~gy*)*FOUGpB8AK{2KAk|?Z z9hmgEEr+ktfmCOE9Wd#M>~$d3!Cr?%QyNMzW6rUn=`AhQvd$5TeW;dcMcn{F(yS8Mw-d$md*EV5T${1V}QBZU%* z#kqp@F)NT@Mm;K!N-#fuylxGPr3lqrr8QNlE?!IN-!m(&vF5Es3}bJh_K#5fmw#e& zj9#ytiZHQkzrOhY*~}&u%^`B*Hi{`hcLeNg^}0w%8b86#ljXx2eH$a?tw1^+_xgTs z=mmvGqw`ku_HGMj3Do{o+w*WvUns<70kQuqiKZnmK#aJI1kr zvZHD9!Bt($k3OyIsWhhOCTo^i!%h>tsOF^&&lw&c9O^`A&Wm(jpG%((Tr%!EpRu!{ Y#~6LMT1XW{DJ8MHD6L2zNJgRfFV#I7;{X5v literal 0 HcmV?d00001 diff --git a/tests/vision/image_apibase.py b/tests/vision/image_apibase.py new file mode 100644 index 000000000..3ceffb6e7 --- /dev/null +++ b/tests/vision/image_apibase.py @@ -0,0 +1,62 @@ +# Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import numpy as np +from apibase import APIBase +from PIL import Image + + +class ImageAPIBase(APIBase): + def compare( + self, + name, + pytorch_result, + paddle_result, + check_value=True, + check_dtype=True, + check_stop_gradient=True, + rtol=1.0e-6, + atol=0.0, + ): + """ + Compare PIL Images for equality. + """ + if isinstance(pytorch_result, Image.Image) and isinstance( + paddle_result, Image.Image + ): + pytorch_array = np.array(pytorch_result) + paddle_array = np.array(paddle_result) + + assert ( + pytorch_array.shape == paddle_array.shape + ), "API ({}): shape mismatch, torch shape is {}, paddle shape is {}".format( + name, pytorch_array.shape, paddle_array.shape + ) + + if check_value: + assert np.array_equal( + pytorch_array, paddle_array + ), "API ({}): image data mismatch".format(name) + return + + super().compare( + name, + pytorch_result, + paddle_result, + check_value, + check_dtype, + check_stop_gradient, + rtol, + atol, + ) diff --git a/tests/vision/test_CenterCrop.py b/tests/vision/test_CenterCrop.py new file mode 100644 index 000000000..1877fd586 --- /dev/null +++ b/tests/vision/test_CenterCrop.py @@ -0,0 +1,93 @@ +# Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import textwrap + +from apibase import APIBase +from vision.image_apibase import ImageAPIBase + +obj = APIBase("torchvision.transforms.CenterCrop") +img_obj = ImageAPIBase("torchvision.transforms.CenterCrop") + + +def test_case_1(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms as transforms + img = torch.tensor([[[0.5, 0.5], [0.5, 0.5]], + [[0.5, 0.5], [0.5, 0.5]], + [[0.5, 0.5], [0.5, 0.5]]]) + center_crop = transforms.CenterCrop((1, 1)) + result = center_crop(img) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_2(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms as transforms + img = torch.tensor([[[0.1, 0.4], [0.7, 1.0]], + [[0.2, 0.5], [0.8, 1.0]], + [[0.3, 0.6], [0.9, 1.0]]]) + center_crop = transforms.CenterCrop([1, 1]) + result = center_crop(img) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_3(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + import torchvision.transforms as transforms + img = Image.new('RGB', (4, 4), color=(100, 100, 100)) + center_crop = transforms.CenterCrop(2) + result = center_crop(img) + """ + ) + img_obj.run(pytorch_code, ["result"]) + + +def test_case_4(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms as transforms + img = torch.tensor([[[0.1, 0.2, 0.3, 0.4], + [0.5, 0.6, 0.7, 0.8]], + [[0.9, 1.0, 1.1, 1.2], + [1.3, 1.4, 1.5, 1.6]]]) + center_crop = transforms.CenterCrop((2, 2)) + result = center_crop(img) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_5(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + import torchvision.transforms as transforms + img = Image.new('RGB', (3, 3), color=(50, 100, 150)) + center_crop = transforms.CenterCrop((2, 2)) + result = center_crop(img) + """ + ) + img_obj.run(pytorch_code, ["result"]) diff --git a/tests/vision/test_Compose.py b/tests/vision/test_Compose.py new file mode 100644 index 000000000..c28a59786 --- /dev/null +++ b/tests/vision/test_Compose.py @@ -0,0 +1,106 @@ +# Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import textwrap + +from apibase import APIBase +from vision.image_apibase import ImageAPIBase + +obj = APIBase("torchvision.transforms.Compose") +img_obj = ImageAPIBase("torchvision.transforms.Compose") + + +def test_case_1(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms as transforms + + composed = transforms.Compose([ + transforms.CenterCrop(2), + transforms.Normalize(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5]), + ]) + + img = torch.tensor([ + [[0.6, 0.6, 0.6], [0.6, 0.6, 0.6], [0.6, 0.6, 0.6], [0.6, 0.6, 0.6]], + [[0.6, 0.6, 0.6], [0.6, 0.6, 0.6], [0.6, 0.6, 0.6], [0.6, 0.6, 0.6]], + [[0.6, 0.6, 0.6], [0.6, 0.6, 0.6], [0.6, 0.6, 0.6], [0.6, 0.6, 0.6]], + ]) + + result = composed(img) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_2(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms as transforms + + composed = transforms.Compose([ + transforms.Resize((4, 4)), + ]) + + img = torch.tensor([ + [[0.2, 0.4], [0.6, 0.8]], + [[0.1, 0.3], [0.5, 0.7]], + [[0.0, 0.2], [0.4, 0.6]], + ]) + + result = composed(img) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_3(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + import torchvision.transforms as transforms + + composed = transforms.Compose([ + transforms.Grayscale(num_output_channels=1), + ]) + + img = Image.new('RGB', (4, 4), color=(128, 128, 128)) + result = composed(img) + """ + ) + img_obj.run(pytorch_code, ["result"]) + + +def test_case_4(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms as transforms + + composed = transforms.Compose([ + transforms.CenterCrop(1), + transforms.Normalize(mean=[0.0, 0.0, 0.0], std=[1.0, 1.0, 1.0]), + ]) + + img = torch.tensor([ + [[0.3, 0.3], [0.3, 0.3], [0.3, 0.3]], + [[0.6, 0.6], [0.6, 0.6], [0.6, 0.6]], + [[0.9, 0.9], [0.9, 0.9], [0.9, 0.9]], + ]) + + result = composed(img) + """ + ) + obj.run(pytorch_code, ["result"]) diff --git a/tests/vision/test_Grayscale.py b/tests/vision/test_Grayscale.py new file mode 100644 index 000000000..d1237376c --- /dev/null +++ b/tests/vision/test_Grayscale.py @@ -0,0 +1,156 @@ +# Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import textwrap + +from apibase import APIBase +from vision.image_apibase import ImageAPIBase + +obj = APIBase("torchvision.transforms.Grayscale") +img_obj = ImageAPIBase("torchvision.transforms.Grayscale") + + +def test_case_1(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms as transforms + + grayscale = transforms.Grayscale(num_output_channels=1) + + img = torch.tensor([ + [[0.2, 0.4], + [0.6, 0.8]], + + [[0.1, 0.3], + [0.5, 0.7]], + + [[0.0, 0.2], + [0.4, 0.6]] + ]) + + result = grayscale(img) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_2(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms as transforms + + grayscale = transforms.Grayscale(3) + + img = torch.tensor([ + [[0.1, 0.2, 0.3], + [0.4, 0.5, 0.6], + [0.7, 0.8, 0.9]], + + [[0.2, 0.3, 0.4], + [0.5, 0.6, 0.7], + [0.8, 0.9, 1.0]], + + [[0.3, 0.4, 0.5], + [0.6, 0.7, 0.8], + [0.9, 1.0, 1.1]] + ]) + + result = grayscale(img) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_3(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + import torchvision.transforms as transforms + + num = 1 + grayscale = transforms.Grayscale(num_output_channels=num) + + img = Image.new('RGB', (2, 2), color=(255, 0, 0)) # Red image + + result = grayscale(img) + """ + ) + img_obj.run(pytorch_code, ["result"]) + + +def test_case_4(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + import torchvision.transforms as transforms + + grayscale = transforms.Grayscale(num_output_channels=3) + + img = Image.new('RGB', (3, 3), color=(0, 255, 0)) # Green image + + result = grayscale(img) + """ + ) + img_obj.run(pytorch_code, ["result"]) + + +def test_case_5(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms as transforms + + grayscale = transforms.Grayscale(1) + + img = torch.tensor([ + [ + [[0.3, 0.3, 0.3, 0.3], + [0.3, 0.3, 0.3, 0.3], + [0.3, 0.3, 0.3, 0.3], + [0.3, 0.3, 0.3, 0.3]], + + [[0.6, 0.6, 0.6, 0.6], + [0.6, 0.6, 0.6, 0.6], + [0.6, 0.6, 0.6, 0.6], + [0.6, 0.6, 0.6, 0.6]], + + [[0.9, 0.9, 0.9, 0.9], + [0.9, 0.9, 0.9, 0.9], + [0.9, 0.9, 0.9, 0.9], + [0.9, 0.9, 0.9, 0.9]] + ] + ]) + + result = grayscale(img) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_6(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + import torchvision.transforms as transforms + + grayscale = transforms.Grayscale(num_output_channels=3) + + img = Image.new('RGB', (5, 5), color=(0, 0, 255)) # Blue image + + result = grayscale(img) + """ + ) + img_obj.run(pytorch_code, ["result"]) diff --git a/tests/vision/test_Normalize.py b/tests/vision/test_Normalize.py new file mode 100644 index 000000000..e78f5501c --- /dev/null +++ b/tests/vision/test_Normalize.py @@ -0,0 +1,123 @@ +# Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import textwrap + +from apibase import APIBase +from vision.image_apibase import ImageAPIBase + +obj = APIBase("torchvision.transforms.Normalize") +img_obj = ImageAPIBase("torchvision.transforms.Normalize") + + +def test_case_1(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms as transforms + + normalize = transforms.Normalize(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5]) + + img = torch.tensor([ + [[0.5, 0.5], + [0.5, 0.5]], + + [[0.5, 0.5], + [0.5, 0.5]], + + [[0.5, 0.5], + [0.5, 0.5]] + ]) + + result = normalize(img) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_2(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms as transforms + + normalize = transforms.Normalize(mean=[0.0, 0.0, 0.0], std=[1.0, 1.0, 1.0]) + + img = torch.tensor([ + [[1.0, 2.0, 3.0], + [4.0, 5.0, 6.0], + [7.0, 8.0, 9.0]], + + [[10.0, 11.0, 12.0], + [13.0, 14.0, 15.0], + [16.0, 17.0, 18.0]], + + [[19.0, 20.0, 21.0], + [22.0, 23.0, 24.0], + [25.0, 26.0, 27.0]] + ]) + + result = normalize(img) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_3(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms as transforms + + normalize = transforms.Normalize(mean=(0.485, 0.456, 0.406), std=(0.229, 0.224, 0.225)) + + img = torch.tensor([ + [ + [[0.5, 0.5, 0.5, 0.5], + [0.5, 0.5, 0.5, 0.5], + [0.5, 0.5, 0.5, 0.5], + [0.5, 0.5, 0.5, 0.5]], + + [[0.4, 0.4, 0.4, 0.4], + [0.4, 0.4, 0.4, 0.4], + [0.4, 0.4, 0.4, 0.4], + [0.4, 0.4, 0.4, 0.4]], + + [[0.3, 0.3, 0.3, 0.3], + [0.3, 0.3, 0.3, 0.3], + [0.3, 0.3, 0.3, 0.3], + [0.3, 0.3, 0.3, 0.3]] + ], + [ + [[0.6, 0.6, 0.6, 0.6], + [0.6, 0.6, 0.6, 0.6], + [0.6, 0.6, 0.6, 0.6], + [0.6, 0.6, 0.6, 0.6]], + + [[0.7, 0.7, 0.7, 0.7], + [0.7, 0.7, 0.7, 0.7], + [0.7, 0.7, 0.7, 0.7], + [0.7, 0.7, 0.7, 0.7]], + + [[0.8, 0.8, 0.8, 0.8], + [0.8, 0.8, 0.8, 0.8], + [0.8, 0.8, 0.8, 0.8], + [0.8, 0.8, 0.8, 0.8]] + ] + ]) + + result = normalize(img) + """ + ) + obj.run(pytorch_code, ["result"]) diff --git a/tests/vision/test_Pad.py b/tests/vision/test_Pad.py new file mode 100644 index 000000000..4f51a00ec --- /dev/null +++ b/tests/vision/test_Pad.py @@ -0,0 +1,152 @@ +# Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import textwrap + +from apibase import APIBase +from vision.image_apibase import ImageAPIBase + +obj = APIBase("torchvision.transforms.Pad") +img_obj = ImageAPIBase("torchvision.transforms.Pad") + + +def test_case_1(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import Pad + + padding = 2 + fill = 0 + padding_mode = 'constant' + + img = torch.tensor([ + [[1, 2], + [3, 4]], + + [[5, 6], + [7, 8]], + + [[9, 10], + [11, 12]] + ], dtype=torch.float) + + pad = Pad(padding=padding, fill=fill, padding_mode=padding_mode) + + result = pad(img) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_2(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import Pad + + padding = [1, 2, 3, 4] + fill = 1.0 + padding_mode = 'constant' + + img = torch.tensor([ + [[1, 2, 3], + [4, 5, 6], + [7, 8, 9]], + + [[10, 11, 12], + [13, 14, 15], + [16, 17, 18]], + + [[19, 20, 21], + [22, 23, 24], + [25, 26, 27]] + ], dtype=torch.float) + + pad = Pad(padding=padding, fill=fill, padding_mode=padding_mode) + + result = pad(img) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_3(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + from torchvision.transforms import Pad + + padding = [2, 3] + fill = (255, 0, 0) + padding_mode = 'constant' + + img = Image.new('RGB', (2, 2), color=(0, 255, 0)) + + pad = Pad(padding=padding, fill=fill, padding_mode=padding_mode) + + result = pad(img) + """ + ) + img_obj.run(pytorch_code, ["result"]) + + +def test_case_4(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + from torchvision.transforms import Pad + + padding = 1 + padding_mode = 'reflect' + + img = Image.new('L', (3, 3)) + img.putpixel((0, 0), 50) + img.putpixel((1, 0), 100) + img.putpixel((2, 0), 150) + img.putpixel((0, 1), 200) + img.putpixel((1, 1), 250) + img.putpixel((2, 1), 100) + img.putpixel((0, 2), 150) + img.putpixel((1, 2), 200) + img.putpixel((2, 2), 250) + + pad = Pad(padding=padding, padding_mode=padding_mode) + + result = pad(img) + """ + ) + img_obj.run(pytorch_code, ["result"]) + + +def test_case_5(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + from torchvision.transforms import Pad + + padding = [1, 1, 1, 1] + fill = (0, 0, 255, 128) + padding_mode = 'symmetric' + + img = Image.new('RGBA', (5, 5), color=(0, 0, 255, 128)) + img.putpixel((0, 0), (255, 0, 0, 255)) + img.putpixel((4, 4), (0, 255, 0, 255)) + + pad = Pad(padding=padding, fill=fill, padding_mode=padding_mode) + + result = pad(img) + """ + ) + img_obj.run(pytorch_code, ["result"]) diff --git a/tests/vision/test_RandomCrop.py b/tests/vision/test_RandomCrop.py new file mode 100644 index 000000000..60241d135 --- /dev/null +++ b/tests/vision/test_RandomCrop.py @@ -0,0 +1,130 @@ +# Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import textwrap + +from apibase import APIBase +from vision.image_apibase import ImageAPIBase + +obj = APIBase("torchvision.transforms.RandomCrop") +img_obj = ImageAPIBase("torchvision.transforms.RandomCrop") + + +def test_case_1(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import RandomCrop + + torch.manual_seed(0) + + size = 2 + crop = RandomCrop(size=size) + + img = torch.tensor([ + [[1, 2, 3, 4], + [5, 6, 7, 8], + [9, 10, 11, 12], + [13, 14, 15, 16]], + + [[17, 18, 19, 20], + [21, 22, 23, 24], + [25, 26, 27, 28], + [29, 30, 31, 32]], + + [[33, 34, 35, 36], + [37, 38, 39, 40], + [41, 42, 43, 44], + [45, 46, 47, 48]] + ], dtype=torch.float) + + result = crop(img) + """ + ) + obj.run(pytorch_code, ["result"], check_value=False) + + +def test_case_2(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import RandomCrop + + torch.manual_seed(1) + + size = [3, 3] + padding = 1 + crop = RandomCrop(size=size, padding=padding, fill=0, padding_mode='constant') + + from PIL import Image + img = Image.new('RGB', (4, 4), color=(255, 255, 255)) + img.putpixel((0, 0), (255, 0, 0)) + img.putpixel((3, 3), (0, 255, 0)) + + result = crop(img) + """ + ) + img_obj.run(pytorch_code, ["result"], check_value=False) + + +def test_case_3(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import RandomCrop + + torch.manual_seed(3) + + size = (3, 3) + padding = [1, 1, 1, 1] + pad_if_needed = True + fill = (0, 0, 0) + padding_mode = 'reflect' + crop = RandomCrop(size=size, padding=padding, pad_if_needed=pad_if_needed, fill=fill, padding_mode=padding_mode) + + from PIL import Image + img = Image.new('RGB', (3, 3), color=(100, 100, 100)) + img.putpixel((0, 0), (255, 0, 0)) + img.putpixel((2, 2), (0, 255, 0)) + + result = crop(img) + """ + ) + img_obj.run(pytorch_code, ["result"], check_value=False) + + +def test_case_4(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import RandomCrop + + torch.manual_seed(5) + + size = (4, 4) + padding = 2 + pad_if_needed = True + padding_mode = 'edge' + fill = 0 + crop = RandomCrop(size=size, padding=padding, pad_if_needed=pad_if_needed, fill=fill, padding_mode=padding_mode) + + from PIL import Image + img = Image.new('RGB', (6, 6), color=(50, 100, 150)) + img.putpixel((0, 0), (255, 0, 0)) + img.putpixel((5, 5), (0, 255, 0)) + + result = crop(img) + """ + ) + img_obj.run(pytorch_code, ["result"], check_value=False) diff --git a/tests/vision/test_RandomErasing.py b/tests/vision/test_RandomErasing.py new file mode 100644 index 000000000..a3074b34f --- /dev/null +++ b/tests/vision/test_RandomErasing.py @@ -0,0 +1,185 @@ +# Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import textwrap + +from apibase import APIBase + +obj = APIBase("torchvision.transforms.RandomErasing") + + +def test_case_1(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import RandomErasing, InterpolationMode + + torch.manual_seed(0) + + transform = RandomErasing(p=0.3, scale=(0.2, 0.4), ratio=(0.3, 3.3), value=0, inplace=False) + + img = torch.tensor([ + [ + [0, 20, 30, 40], + [50, 60, 70, 80], + [90, 100, 110, 120], + [130, 140, 150, 160] + ], + [ + [15, 25, 35, 45], + [55, 65, 75, 85], + [95, 105, 115, 125], + [135, 145, 155, 165] + ], + [ + [20, 30, 40, 50], + [60, 70, 80, 90], + [100, 110, 120, 130], + [140, 150, 160, 170] + ] + ], dtype=torch.float) + + result = transform(img) + """ + ) + obj.run(pytorch_code, ["result"], check_value=False) + + +def test_case_2(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import RandomErasing, InterpolationMode + + torch.manual_seed(1) + + transform = RandomErasing(0.0, (0.1, 0.2), (0.5, 2.0), 1, True) + + img = torch.tensor([ + [ + [5, 10, 15, 20, 25], + [30, 35, 40, 45, 50], + [55, 60, 65, 70, 75], + [80, 85, 90, 95, 100], + [105, 110, 115, 120, 125] + ] + ], dtype=torch.float) + + result = transform(img) + """ + ) + obj.run(pytorch_code, ["result"], check_value=False) + + +def test_case_3(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import RandomErasing, InterpolationMode + + torch.manual_seed(2) + + scale = (0.1, 0.3) + ratio = (0.5, 2.0) + value = (255, 255, 255) + inplace = False + transform = RandomErasing(p=0.5, scale=scale, ratio=ratio, value=value, inplace=inplace) + + img = torch.tensor([ + [ + [100, 150, 200], + [150, 200, 250], + [200, 250, 300] + ], + [ + [110, 160, 210], + [160, 210, 260], + [210, 260, 310] + ], + [ + [120, 170, 220], + [170, 220, 270], + [220, 270, 320] + ] + ], dtype=torch.float) + + result = transform(img) + """ + ) + obj.run(pytorch_code, ["result"], check_value=False) + + +def test_case_4(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import RandomErasing, InterpolationMode + + torch.manual_seed(4) + + transform = RandomErasing(0.3, (0.05, 0.2), ratio=(0.3, 3.3), value=[0, 0, 0], inplace=True) + + img = torch.tensor([ + [ + [ + [10, 20, 30, 40, 50], + [60, 70, 80, 90, 100], + [110, 120, 130, 140, 150], + [160, 170, 180, 190, 200], + [210, 220, 230, 240, 250] + ], + [ + [255, 0, 255, 0, 255], + [0, 255, 0, 255, 0], + [255, 0, 255, 0, 255], + [0, 255, 0, 255, 0], + [255, 0, 255, 0, 255] + ], + [ + [128, 128, 128, 128, 128], + [64, 64, 64, 64, 64], + [32, 32, 32, 32, 32], + [16, 16, 16, 16, 16], + [8, 8, 8, 8, 8] + ] + ], + [ + [ + [5, 10, 15, 20, 25], + [30, 35, 40, 45, 50], + [55, 60, 65, 70, 75], + [80, 85, 90, 95, 100], + [105, 110, 115, 120, 125] + ], + [ + [0, 255, 0, 255, 0], + [255, 0, 255, 0, 255], + [0, 255, 0, 255, 0], + [255, 0, 255, 0, 255], + [0, 255, 0, 255, 0] + ], + [ + [8, 16, 24, 32, 40], + [16, 32, 48, 64, 80], + [24, 48, 72, 96, 120], + [32, 64, 96, 128, 160], + [40, 80, 120, 160, 200] + ] + ] + ], dtype=torch.float) + + result = transform(img) + """ + ) + obj.run(pytorch_code, ["result"], check_value=False) diff --git a/tests/vision/test_RandomHorizontalFlip.py b/tests/vision/test_RandomHorizontalFlip.py new file mode 100644 index 000000000..890bed955 --- /dev/null +++ b/tests/vision/test_RandomHorizontalFlip.py @@ -0,0 +1,179 @@ +# Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import textwrap + +from apibase import APIBase +from vision.image_apibase import ImageAPIBase + +obj = APIBase("torchvision.transforms.RandomHorizontalFlip") +img_obj = ImageAPIBase("torchvision.transforms.RandomHorizontalFlip") + + +def test_case_1(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import RandomHorizontalFlip + + torch.manual_seed(0) + + flip = RandomHorizontalFlip(1.0) + + img = torch.tensor([ + [[1, 2], + [3, 4]], + + [[5, 6], + [7, 8]], + + [[9, 10], + [11, 12]] + ], dtype=torch.float) + + result = flip(img) + """ + ) + obj.run(pytorch_code, ["result"], check_value=False) + + +def test_case_2(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import RandomHorizontalFlip + + torch.manual_seed(1) + + prob = 0.0 + flip = RandomHorizontalFlip(p=prob) + + img = torch.tensor([ + [[1, 2, 3], + [4, 5, 6], + [7, 8, 9]], + + [[10, 11, 12], + [13, 14, 15], + [16, 17, 18]], + + [[19, 20, 21], + [22, 23, 24], + [25, 26, 27]] + ], dtype=torch.float) + + result = flip(img) + """ + ) + obj.run(pytorch_code, ["result"], check_value=False) + + +def test_case_3(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + from torchvision.transforms import RandomHorizontalFlip + + import random + random.seed(3) + + flip = RandomHorizontalFlip(p=0.7) + + img = Image.new('L', (3, 3)) + img.putpixel((0, 0), 50) + img.putpixel((1, 0), 100) + img.putpixel((2, 0), 150) + img.putpixel((0, 1), 200) + img.putpixel((1, 1), 250) + img.putpixel((2, 1), 100) + img.putpixel((0, 2), 150) + img.putpixel((1, 2), 200) + img.putpixel((2, 2), 250) + + result = flip(img) + """ + ) + img_obj.run(pytorch_code, ["result"], check_value=False) + + +def test_case_4(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import RandomHorizontalFlip + + torch.manual_seed(4) + + flip = RandomHorizontalFlip(p=0.3) + + img = torch.tensor([ + [ + [[1, 2, 3, 4], + [5, 6, 7, 8], + [9, 10, 11, 12], + [13, 14, 15, 16]], + + [[17, 18, 19, 20], + [21, 22, 23, 24], + [25, 26, 27, 28], + [29, 30, 31, 32]], + + [[33, 34, 35, 36], + [37, 38, 39, 40], + [41, 42, 43, 44], + [45, 46, 47, 48]] + ], + [ + [[49, 50, 51, 52], + [53, 54, 55, 56], + [57, 58, 59, 60], + [61, 62, 63, 64]], + + [[65, 66, 67, 68], + [69, 70, 71, 72], + [73, 74, 75, 76], + [77, 78, 79, 80]], + + [[81, 82, 83, 84], + [85, 86, 87, 88], + [89, 90, 91, 92], + [93, 94, 95, 96]] + ] + ], dtype=torch.float) + + result = flip(img) + """ + ) + obj.run(pytorch_code, ["result"], check_value=False) + + +def test_case_5(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + from torchvision.transforms import RandomHorizontalFlip + + import random + random.seed(5) + + flip = RandomHorizontalFlip(p=0.9) + + img = Image.new('RGBA', (6, 6), color=(0, 0, 255, 128)) + img.putpixel((0, 0), (255, 0, 0, 255)) + img.putpixel((5, 5), (0, 255, 0, 255)) + + result = flip(img) + """ + ) + img_obj.run(pytorch_code, ["result"], check_value=False) diff --git a/tests/vision/test_RandomPerspective.py b/tests/vision/test_RandomPerspective.py new file mode 100644 index 000000000..b8b2f5472 --- /dev/null +++ b/tests/vision/test_RandomPerspective.py @@ -0,0 +1,206 @@ +# Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import textwrap + +from apibase import APIBase +from vision.image_apibase import ImageAPIBase + +obj = APIBase("torchvision.transforms.RandomPerspective") +img_obj = ImageAPIBase("torchvision.transforms.RandomPerspective") + + +def test_case_1(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import RandomPerspective, InterpolationMode + + torch.manual_seed(0) + + transform = RandomPerspective(distortion_scale=0.5, p=1.0, interpolation=InterpolationMode.BILINEAR, fill=0) + + img = torch.tensor([ + [[255, 0, 0], + [0, 255, 0], + [0, 0, 255]], + + [[255, 255, 0], + [0, 255, 255], + [255, 0, 255]], + + [[128, 128, 128], + [64, 64, 64], + [32, 32, 32]] + ], dtype=torch.float) + + result = transform(img) + """ + ) + obj.run(pytorch_code, ["result"], check_value=False) + + +def test_case_2(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import RandomPerspective, InterpolationMode + + torch.manual_seed(1) + + mode = InterpolationMode.NEAREST + transform = RandomPerspective(distortion_scale=0.7, p=0.0, interpolation=mode, fill=[1]) + + img = torch.tensor([ + [[1, 2, 3, 4], + [5, 6, 7, 8], + [9, 10, 11, 12], + [13, 14, 15, 16]], + + [[17, 18, 19, 20], + [21, 22, 23, 24], + [25, 26, 27, 28], + [29, 30, 31, 32]], + + [[33, 34, 35, 36], + [37, 38, 39, 40], + [41, 42, 43, 44], + [45, 46, 47, 48]] + ], dtype=torch.float) + + result = transform(img) + """ + ) + obj.run(pytorch_code, ["result"], check_value=False) + + +def test_case_3(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + from torchvision.transforms import RandomPerspective, InterpolationMode + + import random + random.seed(2) + + transform = RandomPerspective(distortion_scale=0.3, p=0.5, interpolation=InterpolationMode.BICUBIC, fill=(255, 255, 255)) + + img = Image.new('RGB', (4, 4), color=(255, 255, 255)) + img.putpixel((0, 0), (255, 0, 0)) # Red + img.putpixel((3, 3), (0, 255, 0)) # Green + img.putpixel((1, 1), (0, 0, 255)) # Blue + + result = transform(img) + """ + ) + img_obj.run(pytorch_code, ["result"], check_value=False) + + +def test_case_4(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + from torchvision.transforms import RandomPerspective, InterpolationMode + + import random + random.seed(3) + + transform = RandomPerspective(distortion_scale=0.6, p=0.8, interpolation=InterpolationMode.NEAREST, fill=0) + + img = Image.new('L', (5, 5), color=128) # Gray image + img.putpixel((0, 0), 50) + img.putpixel((4, 4), 200) + + result = transform(img) + """ + ) + img_obj.run(pytorch_code, ["result"], check_value=False) + + +def test_case_5(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import RandomPerspective, InterpolationMode + + torch.manual_seed(4) + + transform = RandomPerspective(distortion_scale=0.4, p=0.3, interpolation=InterpolationMode.BILINEAR, fill=[0, 0, 0]) + + img = torch.tensor([ + [ + [[10, 20, 30, 40, 50], + [60, 70, 80, 90, 100], + [110, 120, 130, 140, 150], + [160, 170, 180, 190, 200], + [210, 220, 230, 240, 250]], + + [[255, 0, 255, 0, 255], + [0, 255, 0, 255, 0], + [255, 0, 255, 0, 255], + [0, 255, 0, 255, 0], + [255, 0, 255, 0, 255]], + + [[128, 128, 128, 128, 128], + [64, 64, 64, 64, 64], + [32, 32, 32, 32, 32], + [16, 16, 16, 16, 16], + [8, 8, 8, 8, 8]] + ], + [ + [[5, 10, 15, 20, 25], + [30, 35, 40, 45, 50], + [55, 60, 65, 70, 75], + [80, 85, 90, 95, 100], + [105, 110, 115, 120, 125]], + + [[0, 255, 0, 255, 0], + [255, 0, 255, 0, 255], + [0, 255, 0, 255, 0], + [255, 0, 255, 0, 255], + [0, 255, 0, 255, 0]], + + [[8, 16, 24, 32, 40], + [16, 32, 48, 64, 80], + [24, 48, 72, 96, 120], + [32, 64, 96, 128, 160], + [40, 80, 120, 160, 200]] + ] + ], dtype=torch.float) + + result = transform(img) + """ + ) + obj.run(pytorch_code, ["result"], check_value=False) + + +def test_case_6(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + from torchvision.transforms import RandomPerspective, InterpolationMode + + import random + random.seed(5) + + transform = RandomPerspective(distortion_scale=0.9, p=0.9, interpolation=InterpolationMode.BILINEAR, fill=(255, 255, 255, 255)) + + img = Image.new('RGBA', (6, 6), color=(0, 0, 255, 128)) + img.putpixel((0, 0), (255, 0, 0, 255)) + img.putpixel((5, 5), (0, 255, 0, 255)) + + result = transform(img) + """ + ) + img_obj.run(pytorch_code, ["result"], check_value=False) diff --git a/tests/vision/test_RandomResizedCrop.py b/tests/vision/test_RandomResizedCrop.py new file mode 100644 index 000000000..b2c7f9200 --- /dev/null +++ b/tests/vision/test_RandomResizedCrop.py @@ -0,0 +1,158 @@ +# Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import textwrap + +from apibase import APIBase +from vision.image_apibase import ImageAPIBase + +obj = APIBase("torchvision.transforms.RandomResizedCrop") +img_obj = ImageAPIBase("torchvision.transforms.RandomResizedCrop") + + +def test_case_1(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import RandomResizedCrop + + torch.manual_seed(0) + + size = 2 + crop = RandomResizedCrop(size=size) + + img = torch.tensor([ + [[1, 2, 3, 4], + [5, 6, 7, 8], + [9, 10, 11, 12], + [13, 14, 15, 16]], + + [[17, 18, 19, 20], + [21, 22, 23, 24], + [25, 26, 27, 28], + [29, 30, 31, 32]], + + [[33, 34, 35, 36], + [37, 38, 39, 40], + [41, 42, 43, 44], + [45, 46, 47, 48]] + ], dtype=torch.float) + + result = crop(img) + """ + ) + obj.run(pytorch_code, ["result"], check_value=False) + + +def test_case_2(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import RandomResizedCrop, InterpolationMode + + torch.manual_seed(1) + + size = (3, 3) + scale = (0.5, 0.9) + ratio = (0.8, 1.2) + crop = RandomResizedCrop(size=size, scale=scale, ratio=ratio, interpolation=InterpolationMode.BILINEAR) + + from PIL import Image + img = Image.new('RGB', (4, 4), color=(255, 255, 255)) # White image + img.putpixel((0, 0), (255, 0, 0)) # Red + img.putpixel((3, 3), (0, 255, 0)) # Green + + result = crop(img) + """ + ) + img_obj.run(pytorch_code, ["result"], check_value=False) + + +def test_case_3(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import RandomResizedCrop, InterpolationMode + + torch.manual_seed(1) + + size = [3, 3] + scale = (0.5, 0.9) + ratio = (0.8, 1.2) + crop = RandomResizedCrop(size=size, scale=scale, ratio=ratio, interpolation=InterpolationMode.BILINEAR) + + from PIL import Image + img = Image.new('RGB', (4, 4), color=(255, 255, 255)) # White image + img.putpixel((0, 0), (255, 0, 0)) # Red + img.putpixel((3, 3), (0, 255, 0)) # Green + + result = crop(img) + """ + ) + img_obj.run(pytorch_code, ["result"], check_value=False) + + +def test_case_4(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import RandomResizedCrop, InterpolationMode + + torch.manual_seed(3) + + size = (3, 3) + scale = (0.8, 0.8) # Fixed scale + ratio = (1.0, 1.0) # Fixed aspect ratio + crop = RandomResizedCrop(size=size, scale=scale, ratio=ratio, interpolation=InterpolationMode.BICUBIC) + + from PIL import Image + img = Image.new('L', (3, 3)) + img.putpixel((0, 0), 50) + img.putpixel((1, 0), 100) + img.putpixel((2, 0), 150) + img.putpixel((0, 1), 200) + img.putpixel((1, 1), 250) + img.putpixel((2, 1), 100) + img.putpixel((0, 2), 150) + img.putpixel((1, 2), 200) + img.putpixel((2, 2), 250) + + result = crop(img) + """ + ) + img_obj.run(pytorch_code, ["result"], check_value=False) + + +def test_case_5(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import RandomResizedCrop, InterpolationMode + + torch.manual_seed(5) + + size = (4, 4) + scale = (0.3, 0.8) + ratio = (0.75, 1.3333) + crop = RandomResizedCrop(size=size, scale=scale, ratio=ratio, interpolation=InterpolationMode.BICUBIC) + + from PIL import Image + img = Image.new('RGBA', (6, 6), color=(0, 0, 255, 128)) # Semi-transparent Blue + img.putpixel((0, 0), (255, 0, 0, 255)) # Red + img.putpixel((5, 5), (0, 255, 0, 255)) # Green + + result = crop(img) + """ + ) + img_obj.run(pytorch_code, ["result"], check_value=False) diff --git a/tests/vision/test_RandomRotation.py b/tests/vision/test_RandomRotation.py new file mode 100644 index 000000000..c228ee242 --- /dev/null +++ b/tests/vision/test_RandomRotation.py @@ -0,0 +1,130 @@ +# Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import textwrap + +from apibase import APIBase +from vision.image_apibase import ImageAPIBase + +obj = APIBase("torchvision.transforms.RandomRotation") +img_obj = ImageAPIBase("torchvision.transforms.RandomRotation") + + +def test_case_1(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import RandomRotation, InterpolationMode + + torch.manual_seed(0) + + degrees = 45 + rotation = RandomRotation(degrees=degrees) + + img = torch.tensor([ + [[1, 2], + [3, 4]], + + [[5, 6], + [7, 8]], + + [[9, 10], + [11, 12]] + ], dtype=torch.float) + + result = rotation(img) + """ + ) + obj.run(pytorch_code, ["result"], check_value=False) + + +def test_case_2(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import RandomRotation, InterpolationMode + from PIL import Image + + import random + random.seed(1) + + degrees = [-30, 30] + rotation = RandomRotation(degrees=degrees, interpolation=InterpolationMode.BILINEAR, expand=True) + + img = Image.new('RGB', (3, 3), color=(255, 0, 0)) + img.putpixel((0, 0), (0, 255, 0)) + img.putpixel((2, 2), (0, 0, 255)) + + result = rotation(img) + """ + ) + img_obj.run(pytorch_code, ["result"], check_value=False) + + +def test_case_3(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import RandomRotation, InterpolationMode + + torch.manual_seed(2) + + degrees = 90 + center = (1, 1) + fill = [255, 255, 255] + rotation = RandomRotation(degrees=degrees, center=center, fill=fill) + + img = torch.tensor([ + [[10, 20, 30], + [40, 50, 60], + [70, 80, 90]], + + [[15, 25, 35], + [45, 55, 65], + [75, 85, 95]], + + [[12, 22, 32], + [42, 52, 62], + [72, 82, 92]] + ], dtype=torch.float) + + result = rotation(img) + """ + ) + obj.run(pytorch_code, ["result"], check_value=False) + + +def test_case_4(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import RandomRotation, InterpolationMode + from PIL import Image + + import random + random.seed(5) + + degrees = (-90, 90) + center = (2, 2) + fill = (0, 0, 255, 128) + rotation = RandomRotation(degrees=degrees, interpolation=InterpolationMode.BICUBIC, expand=True, center=center, fill=fill) + + img = Image.new('RGBA', (5, 5), color=(0, 0, 255, 128)) + img.putpixel((0, 0), (255, 0, 0, 255)) + img.putpixel((4, 4), (0, 255, 0, 255)) + + result = rotation(img) + """ + ) + img_obj.run(pytorch_code, ["result"], check_value=False) diff --git a/tests/vision/test_RandomVerticalFlip.py b/tests/vision/test_RandomVerticalFlip.py new file mode 100644 index 000000000..f0e2db6f9 --- /dev/null +++ b/tests/vision/test_RandomVerticalFlip.py @@ -0,0 +1,179 @@ +# Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import textwrap + +from apibase import APIBase +from vision.image_apibase import ImageAPIBase + +obj = APIBase("torchvision.transforms.RandomVerticalFlip") +img_obj = ImageAPIBase("torchvision.transforms.RandomVerticalFlip") + + +def test_case_1(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import RandomVerticalFlip + + torch.manual_seed(0) + + flip = RandomVerticalFlip(p=1.0) + + img = torch.tensor([ + [[1, 2], + [3, 4]], + + [[5, 6], + [7, 8]], + + [[9, 10], + [11, 12]] + ], dtype=torch.float) + + result = flip(img) + """ + ) + obj.run(pytorch_code, ["result"], check_value=False) + + +def test_case_2(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import RandomVerticalFlip + + torch.manual_seed(1) + + flip = RandomVerticalFlip(0.0) + + img = torch.tensor([ + [[1, 2, 3], + [4, 5, 6], + [7, 8, 9]], + + [[10, 11, 12], + [13, 14, 15], + [16, 17, 18]], + + [[19, 20, 21], + [22, 23, 24], + [25, 26, 27]] + ], dtype=torch.float) + + result = flip(img) + """ + ) + obj.run(pytorch_code, ["result"], check_value=False) + + +def test_case_3(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + from torchvision.transforms import RandomVerticalFlip + + import random + random.seed(3) + + prob = 0.7 + flip = RandomVerticalFlip(p=prob) + + img = Image.new('L', (3, 3)) + img.putpixel((0, 0), 50) + img.putpixel((1, 0), 100) + img.putpixel((2, 0), 150) + img.putpixel((0, 1), 200) + img.putpixel((1, 1), 250) + img.putpixel((2, 1), 100) + img.putpixel((0, 2), 150) + img.putpixel((1, 2), 200) + img.putpixel((2, 2), 250) + + result = flip(img) + """ + ) + img_obj.run(pytorch_code, ["result"], check_value=False) + + +def test_case_4(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import RandomVerticalFlip + + torch.manual_seed(4) + + flip = RandomVerticalFlip(p=0.3) + + img = torch.tensor([ + [ + [[1, 2, 3, 4], + [5, 6, 7, 8], + [9, 10, 11, 12], + [13, 14, 15, 16]], + + [[17, 18, 19, 20], + [21, 22, 23, 24], + [25, 26, 27, 28], + [29, 30, 31, 32]], + + [[33, 34, 35, 36], + [37, 38, 39, 40], + [41, 42, 43, 44], + [45, 46, 47, 48]] + ], + [ + [[49, 50, 51, 52], + [53, 54, 55, 56], + [57, 58, 59, 60], + [61, 62, 63, 64]], + + [[65, 66, 67, 68], + [69, 70, 71, 72], + [73, 74, 75, 76], + [77, 78, 79, 80]], + + [[81, 82, 83, 84], + [85, 86, 87, 88], + [89, 90, 91, 92], + [93, 94, 95, 96]] + ] + ], dtype=torch.float) + + result = flip(img) + """ + ) + obj.run(pytorch_code, ["result"], check_value=False) + + +def test_case_5(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + from torchvision.transforms import RandomVerticalFlip + + import random + random.seed(5) + + flip = RandomVerticalFlip(p=0.9) + + img = Image.new('RGBA', (5, 5), color=(0, 0, 255, 128)) + img.putpixel((0, 0), (255, 0, 0, 255)) + img.putpixel((4, 4), (0, 255, 0, 255)) + + result = flip(img) + """ + ) + img_obj.run(pytorch_code, ["result"], check_value=False) diff --git a/tests/vision/test_Resize.py b/tests/vision/test_Resize.py new file mode 100644 index 000000000..b5dabd402 --- /dev/null +++ b/tests/vision/test_Resize.py @@ -0,0 +1,135 @@ +# Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import textwrap + +from apibase import APIBase +from vision.image_apibase import ImageAPIBase + +obj = APIBase("torchvision.transforms.Resize") +img_obj = ImageAPIBase("torchvision.transforms.Resize") + + +def test_case_1(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import Resize, InterpolationMode + from PIL import Image + + torch.manual_seed(1) + + size = (3, 3) + resize = Resize(size=size, interpolation=InterpolationMode.BILINEAR) + + img = Image.new('RGB', (4, 4), color=(255, 255, 255)) + img.putpixel((0, 0), (255, 0, 0)) + img.putpixel((3, 3), (0, 255, 0)) + + result = resize(img) + """ + ) + img_obj.run(pytorch_code, ["result"]) + + +def test_case_2(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import Resize, InterpolationMode + from PIL import Image + + torch.manual_seed(3) + + size = 3 + resize = Resize(size=size, interpolation=InterpolationMode.BICUBIC) + + img = Image.new('L', (3, 3)) + img.putpixel((0, 0), 50) + img.putpixel((1, 0), 100) + img.putpixel((2, 0), 150) + img.putpixel((0, 1), 200) + img.putpixel((1, 1), 250) + img.putpixel((2, 1), 100) + img.putpixel((0, 2), 150) + img.putpixel((1, 2), 200) + img.putpixel((2, 2), 250) + + result = resize(img) + """ + ) + img_obj.run(pytorch_code, ["result"]) + + +def test_case_3(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import Resize, InterpolationMode + + torch.manual_seed(4) + + size = [4, 4] + resize = Resize(size=size, interpolation=InterpolationMode.NEAREST) + + img = torch.tensor([ + [ + [[1, 2, 3, 4, 5], + [6, 7, 8, 9, 10], + [11, 12, 13, 14, 15], + [16, 17, 18, 19, 20], + [21, 22, 23, 24, 25]], + + [[26, 27, 28, 29, 30], + [31, 32, 33, 34, 35], + [36, 37, 38, 39, 40], + [41, 42, 43, 44, 45], + [46, 47, 48, 49, 50]], + + [[51, 52, 53, 54, 55], + [56, 57, 58, 59, 60], + [61, 62, 63, 64, 65], + [66, 67, 68, 69, 70], + [71, 72, 73, 74, 75]] + ] + ], dtype=torch.float) + + img = img[0] + + result = resize(img) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_4(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import Resize, InterpolationMode + from PIL import Image + + torch.manual_seed(5) + + size = (4, 4) + resize = Resize(size=size, interpolation=InterpolationMode.BICUBIC) + + img = Image.new('RGBA', (6, 6), color=(0, 0, 255, 128)) + img.putpixel((0, 0), (255, 0, 0, 255)) + img.putpixel((5, 5), (0, 255, 0, 255)) + + result = resize(img) + """ + ) + img_obj.run(pytorch_code, ["result"]) diff --git a/tests/vision/test_ToTensor.py b/tests/vision/test_ToTensor.py new file mode 100644 index 000000000..65dab810a --- /dev/null +++ b/tests/vision/test_ToTensor.py @@ -0,0 +1,101 @@ +# Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import textwrap + +from apibase import APIBase +from vision.image_apibase import ImageAPIBase + +obj = APIBase("torchvision.transforms.functional.to_tensor") +img_obj = ImageAPIBase("torchvision.transforms.functional.to_tensor") + + +def test_case_1(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + import torchvision.transforms.functional as F + + img = Image.new('RGB', (3, 3), color=(255, 0, 0)) + + result = F.to_tensor(img) + """ + ) + img_obj.run(pytorch_code, ["result"]) + + +def test_case_2(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + import torchvision.transforms.functional as F + + img = Image.new('L', (4, 4), color=128) + + result = F.to_tensor(img) + """ + ) + img_obj.run(pytorch_code, ["result"]) + + +def test_case_3(): + pytorch_code = textwrap.dedent( + """ + import numpy as np + import torchvision.transforms.functional as F + + img_np = np.array([ + [[255, 0, 0], [0, 255, 0], [0, 0, 255], [255, 255, 0], [0, 255, 255]], + [[255, 0, 255], [192, 192, 192], [128, 128, 128], [64, 64, 64], [0, 0, 0]], + [[255, 165, 0], [0, 128, 128], [128, 0, 128], [128, 128, 0], [0, 0, 128]], + [[75, 0, 130], [238, 130, 238], [245, 222, 179], [255, 105, 180], [0, 255, 127]], + [[255, 20, 147], [173, 216, 230], [144, 238, 144], [255, 182, 193], [64, 224, 208]] + ], dtype=np.uint8) + + result = F.to_tensor(img_np) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_4(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + import torchvision.transforms.functional as F + + img = Image.new('RGBA', (2, 4), color=(0, 0, 255, 128)) + + result = F.to_tensor(img) + """ + ) + img_obj.run(pytorch_code, ["result"]) + + +def test_case_5(): + pytorch_code = textwrap.dedent( + """ + import numpy as np + import torchvision.transforms.functional as F + + img_np = np.array([ + [0, 128, 255], + [64, 192, 32], + [16, 240, 80] + ], dtype=np.uint8) + + result = F.to_tensor(img_np) + """ + ) + obj.run(pytorch_code, ["result"]) diff --git a/tests/vision/test_adjust_brightness.py b/tests/vision/test_adjust_brightness.py new file mode 100644 index 000000000..20ca819e4 --- /dev/null +++ b/tests/vision/test_adjust_brightness.py @@ -0,0 +1,83 @@ +# Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import textwrap + +from apibase import APIBase +from vision.image_apibase import ImageAPIBase + +obj = APIBase("torchvision.transforms.functional.adjust_brightness") +img_obj = ImageAPIBase("torchvision.transforms.functional.adjust_brightness") + + +def test_case_1(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms.functional as F + img = torch.tensor([[[0.5, 0.5], [0.5, 0.5]], [[0.5, 0.5], [0.5, 0.5]], [[0.5, 0.5], [0.5, 0.5]]]) + factor = 2.0 + result = F.adjust_brightness(img, factor) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_2(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms.functional as F + img = torch.tensor([[[0.1, 0.4], [0.7, 1.0]], + [[0.2, 0.5], [0.8, 1.0]], + [[0.3, 0.6], [0.9, 1.0]]]) + result = F.adjust_brightness(img, 0.0) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_3(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + import torchvision.transforms.functional as F + img = Image.new('RGB', (2, 2), color=(100, 100, 100)) + result = F.adjust_brightness(img, 1.0) + """ + ) + img_obj.run(pytorch_code, ["result"]) + + +def test_case_4(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms.functional as F + result = F.adjust_brightness(torch.tensor([[[0.1, 0.2], [0.3, 0.4]], [[0.5, 0.6], [0.7, 0.8]], [[0.9, 1.0], [1.0, 1.0]]]), 2.0) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_5(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + import torchvision.transforms.functional as F + img = Image.new('RGB', (2, 2), color=(50, 100, 150)) + result = F.adjust_brightness(img, 2.5) + """ + ) + img_obj.run(pytorch_code, ["result"]) diff --git a/tests/vision/test_adjust_contrast.py b/tests/vision/test_adjust_contrast.py new file mode 100644 index 000000000..08d3f82c4 --- /dev/null +++ b/tests/vision/test_adjust_contrast.py @@ -0,0 +1,87 @@ +# Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import textwrap + +from apibase import APIBase +from vision.image_apibase import ImageAPIBase + +obj = APIBase("torchvision.transforms.functional.adjust_contrast") +img_obj = ImageAPIBase("torchvision.transforms.functional.adjust_contrast") + + +def test_case_1(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms.functional as F + img = torch.tensor([[[0.5, 0.5], [0.5, 0.5]], + [[0.5, 0.5], [0.5, 0.5]], + [[0.5, 0.5], [0.5, 0.5]]]) + contrast_factor = 2.0 + result = F.adjust_contrast(img, contrast_factor) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_2(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms.functional as F + img = torch.tensor([[[0.1, 0.4], [0.7, 1.0]], + [[0.2, 0.5], [0.8, 1.0]], + [[0.3, 0.6], [0.9, 1.0]]]) + result = F.adjust_contrast(img, 1.0) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_3(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + import torchvision.transforms.functional as F + img = Image.new('RGB', (2, 2), color=(100, 100, 100)) + result = F.adjust_contrast(img, 1.0) + """ + ) + img_obj.run(pytorch_code, ["result"]) + + +def test_case_4(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms.functional as F + result = F.adjust_contrast(torch.tensor([[[0.1, 0.2], [0.3, 0.4]], + [[0.5, 0.6], [0.7, 0.8]], + [[0.9, 1.0], [1.0, 1.0]]]), 2.0) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_5(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + import torchvision.transforms.functional as F + img = Image.new('RGB', (2, 2), color=(50, 100, 150)) + result = F.adjust_contrast(img, 0.5) + """ + ) + img_obj.run(pytorch_code, ["result"]) diff --git a/tests/vision/test_adjust_hue.py b/tests/vision/test_adjust_hue.py new file mode 100644 index 000000000..a67ecc6e6 --- /dev/null +++ b/tests/vision/test_adjust_hue.py @@ -0,0 +1,87 @@ +# Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import textwrap + +from apibase import APIBase +from vision.image_apibase import ImageAPIBase + +obj = APIBase("torchvision.transforms.functional.adjust_hue") +img_obj = ImageAPIBase("torchvision.transforms.functional.adjust_hue") + + +def test_case_1(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms.functional as F + img = torch.tensor([[[0.5, 0.5], [0.5, 0.5]], + [[0.5, 0.5], [0.5, 0.5]], + [[0.5, 0.5], [0.5, 0.5]]]) + hue_factor = 0.5 + result = F.adjust_hue(img, hue_factor) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_2(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms.functional as F + img = torch.tensor([[[0.1, 0.4], [0.7, 1.0]], + [[0.2, 0.5], [0.8, 1.0]], + [[0.3, 0.6], [0.9, 1.0]]]) + result = F.adjust_hue(img, 0.0) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_3(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + import torchvision.transforms.functional as F + img = Image.new('RGB', (2, 2), color=(100, 100, 100)) + result = F.adjust_hue(img, 0.25) + """ + ) + img_obj.run(pytorch_code, ["result"]) + + +def test_case_4(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms.functional as F + result = F.adjust_hue(torch.tensor([[[0.0, 0.2], [0.3, 0.4]], + [[0.5, 0.6], [0.7, 0.8]], + [[0.9, 1.0], [1.0, 1.0]]]), -0.5) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_5(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + import torchvision.transforms.functional as F + img = Image.new('RGB', (2, 2), color=(50, 100, 150)) + result = F.adjust_hue(img, -0.25) + """ + ) + img_obj.run(pytorch_code, ["result"]) diff --git a/tests/vision/test_center_crop.py b/tests/vision/test_center_crop.py new file mode 100644 index 000000000..cda90cb74 --- /dev/null +++ b/tests/vision/test_center_crop.py @@ -0,0 +1,92 @@ +# Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import textwrap + +from apibase import APIBase +from vision.image_apibase import ImageAPIBase + +obj = APIBase("torchvision.transforms.functional.center_crop") +img_obj = ImageAPIBase("torchvision.transforms.functional.center_crop") + + +def test_case_1(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms.functional as F + img = torch.tensor([[[0.5, 0.5, 0.5], [0.5, 0.5, 0.5]], + [[0.5, 0.5, 0.5], [0.5, 0.5, 0.5]], + [[0.5, 0.5, 0.5], [0.5, 0.5, 0.5]]]) + output_size = [1, 1] + result = F.center_crop(img, output_size) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_2(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms.functional as F + output_size = [1, 1] + result = F.center_crop(img = torch.tensor([[[0.1, 0.4, 0.7], + [0.2, 0.5, 0.8]], [[0.3, 0.6, 0.9], [0.4, 0.7, 1.0]]]), + output_size=output_size) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_3(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + import torchvision.transforms.functional as F + img = Image.new('RGB', (4, 4), color=(100, 100, 100)) + output_size = (2, 2) + result = F.center_crop(img, output_size) + """ + ) + img_obj.run(pytorch_code, ["result"]) + + +def test_case_4(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms.functional as F + img = torch.tensor([[[0.1, 0.2, 0.3, 0.4], + [0.5, 0.6, 0.7, 0.8]], + [[0.9, 1.0, 1.1, 1.2], + [1.3, 1.4, 1.5, 1.6]]]) + output_size = [2, 2] + result = F.center_crop(img, output_size) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_5(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + import torchvision.transforms.functional as F + img = Image.new('RGB', (3, 3), color=(50, 100, 150)) + output_size = (2, 2) + result = F.center_crop(img, output_size) + """ + ) + img_obj.run(pytorch_code, ["result"]) diff --git a/tests/vision/test_crop.py b/tests/vision/test_crop.py new file mode 100644 index 000000000..c6dcc143f --- /dev/null +++ b/tests/vision/test_crop.py @@ -0,0 +1,126 @@ +# Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import textwrap + +from apibase import APIBase +from vision.image_apibase import ImageAPIBase + +obj = APIBase("torchvision.transforms.functional.crop") +img_obj = ImageAPIBase("torchvision.transforms.functional.crop") + + +def test_case_1(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms.functional as F + + img = torch.tensor([ + [[1, 2, 3, 4], + [5, 6, 7, 8], + [9, 10, 11, 12], + [13, 14, 15, 16]], + + [[17, 18, 19, 20], + [21, 22, 23, 24], + [25, 26, 27, 28], + [29, 30, 31, 32]], + + [[33, 34, 35, 36], + [37, 38, 39, 40], + [41, 42, 43, 44], + [45, 46, 47, 48]] + ]) + + top, left, height, width = 1, 1, 2, 2 + result = F.crop(img, top, left, height, width) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_2(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + import torchvision.transforms.functional as F + + img = Image.new('RGB', (4, 4), color=(255, 0, 0)) + + result = F.crop(img, 0, 0, 2, 2) + """ + ) + img_obj.run(pytorch_code, ["result"]) + + +def test_case_3(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + import torchvision.transforms.functional as F + + top, left, height, width = 1, 1, 3, 3 + result = F.crop(Image.new('RGB', (3, 3), color=(0, 255, 0)), top, left, height, width) + """ + ) + img_obj.run(pytorch_code, ["result"]) + + +def test_case_4(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms.functional as F + + img = torch.tensor([ + [[10, 20, 30, 40, 50], + [60, 70, 80, 90, 100], + [110, 120, 130, 140, 150], + [160, 170, 180, 190, 200], + [210, 220, 230, 240, 250]], + + [[255, 245, 235, 225, 215], + [205, 195, 185, 175, 165], + [155, 145, 135, 125, 115], + [105, 95, 85, 75, 65], + [55, 45, 35, 25, 15]], + + [[5, 15, 25, 35, 45], + [55, 65, 75, 85, 95], + [105, 115, 125, 135, 145], + [155, 165, 175, 185, 195], + [205, 215, 225, 235, 245]] + ]) + + top, left, height, width = 2, 2, 2, 2 + result = F.crop(img, top, left, height, width) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_5(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + import torchvision.transforms.functional as F + + img = Image.new('RGB', (5, 5), color=(0, 0, 255)) + + top, left, height, width = 3, 3, 2, 2 + result = F.crop(img, top, left, height, width) + """ + ) + img_obj.run(pytorch_code, ["result"]) diff --git a/tests/vision/test_erase.py b/tests/vision/test_erase.py new file mode 100644 index 000000000..5861ce033 --- /dev/null +++ b/tests/vision/test_erase.py @@ -0,0 +1,152 @@ +# Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import textwrap + +from apibase import APIBase + +obj = APIBase("torchvision.transforms.functional.erase") + + +def test_case_1(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms.functional as F + + i, j, h, w = 1, 1, 2, 2 # Upper-left corner and size of the region to erase + v = 0.0 # Scalar fill value + inplace = False + + img = torch.tensor([ + [ + [10, 20, 30, 40], + [50, 60, 70, 80], + [90, 100, 110, 120], + [130, 140, 150, 160] + ], + [ + [15, 25, 35, 45], + [55, 65, 75, 85], + [95, 105, 115, 125], + [135, 145, 155, 165] + ], + [ + [20, 30, 40, 50], + [60, 70, 80, 90], + [100, 110, 120, 130], + [140, 150, 160, 170] + ] + ], dtype=torch.float) + + result = F.erase(img, i, j, h, w, v, inplace) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_2(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms.functional as F + + i, j, h, w = 0, 0, 3, 3 # Upper-left corner and size of the region to erase + v = 1.0 # Scalar fill value + inplace = True + + img = torch.tensor([ + [ + [5, 10, 15, 20, 25], + [30, 35, 40, 45, 50], + [55, 60, 65, 70, 75], + [80, 85, 90, 95, 100], + [105, 110, 115, 120, 125] + ] + ], dtype=torch.float) + + result = F.erase(img=img, i=i, j=j, h=h, w=w, v=v, inplace=inplace) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_3(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms.functional as F + + i, j, h, w = 3, 3, 2, 2 # Upper-left corner and size of the region to erase (partially outside) + v = 0.5 # Scalar fill value + inplace = False + + img = torch.tensor([ + [ + [10, 20, 30, 40], + [50, 60, 70, 80], + [90, 100, 110, 120], + [130, 140, 150, 160] + ], + [ + [15, 25, 35, 45], + [55, 65, 75, 85], + [95, 105, 115, 125], + [135, 145, 155, 165] + ], + [ + [20, 30, 40, 50], + [60, 70, 80, 90], + [100, 110, 120, 130], + [140, 150, 160, 170] + ] + ], dtype=torch.float) + + result = F.erase(img, i, j, h, w, v) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_4(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms.functional as F + + img = torch.tensor([ + [ + [1, 2, 3, 4], + [5, 6, 7, 8] + ], + [ + [9, 10, 11, 12], + [13, 14, 15, 16] + ], + [ + [17, 18, 19, 20], + [21, 22, 23, 24] + ], + [ + [25, 26, 27, 28], + [29, 30, 31, 32] + ] + ], dtype=torch.float) + + v = torch.tensor([10.0, 20.0, 30.0, 40.0]) + + result = F.erase(img, 0, 0, 1, 4, torch.tensor([10.0, 20.0, 30.0, 40.0]), False) + """ + ) + obj.run(pytorch_code, ["result"]) diff --git a/tests/vision/test_hflip.py b/tests/vision/test_hflip.py new file mode 100644 index 000000000..ad80d79e2 --- /dev/null +++ b/tests/vision/test_hflip.py @@ -0,0 +1,103 @@ +# Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import textwrap + +from apibase import APIBase +from vision.image_apibase import ImageAPIBase + +obj = APIBase("torchvision.transforms.functional.hflip") +img_obj = ImageAPIBase("torchvision.transforms.functional.hflip") + + +def test_case_1(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms.functional as F + + img = torch.tensor([ + [[1, 2], + [3, 4]], + + [[5, 6], + [7, 8]], + + [[9, 10], + [11, 12]] + ]) + + result = F.hflip(img) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_2(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms.functional as F + + result = F.hflip(torch.tensor([ + [ + [1, 2, 3], + [4, 5, 6], + [7, 8, 9] + ] + ])) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_3(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + import torchvision.transforms.functional as F + + img = Image.new('RGB', (2, 2)) + img.putpixel((0, 0), (255, 0, 0)) # Red + img.putpixel((1, 0), (0, 255, 0)) # Green + img.putpixel((0, 1), (0, 0, 255)) # Blue + img.putpixel((1, 1), (255, 255, 0)) # Yellow + + result = F.hflip(img) + """ + ) + img_obj.run(pytorch_code, ["result"]) + + +def test_case_4(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + import torchvision.transforms.functional as F + + img = Image.new('L', (3, 3)) + img.putpixel((0, 0), 50) + img.putpixel((1, 0), 100) + img.putpixel((2, 0), 150) + img.putpixel((0, 1), 200) + img.putpixel((1, 1), 250) + img.putpixel((2, 1), 100) + img.putpixel((0, 2), 150) + img.putpixel((1, 2), 200) + img.putpixel((2, 2), 250) + + result = F.hflip(img) + """ + ) + img_obj.run(pytorch_code, ["result"]) diff --git a/tests/vision/test_normalize.py b/tests/vision/test_normalize.py new file mode 100644 index 000000000..cd8f398a9 --- /dev/null +++ b/tests/vision/test_normalize.py @@ -0,0 +1,146 @@ +# Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import textwrap + +from apibase import APIBase +from vision.image_apibase import ImageAPIBase + +obj = APIBase("torchvision.transforms.functional.normalize") +img_obj = ImageAPIBase("torchvision.transforms.functional.normalize") + + +def test_case_1(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms.functional as F + + mean = 0.5, 0.5, 0.5 + std = [0.5, 0.5, 0.5] + + img = torch.tensor([ + [[0.5, 0.5], + [0.5, 0.5]], + + [[0.5, 0.5], + [0.5, 0.5]], + + [[0.5, 0.5], + [0.5, 0.5]] + ]) + + result = F.normalize(img, mean=mean, std=std) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_2(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms.functional as F + + mean = (0.0, 0.0, 0.0) + std = [1.0, 1.0, 1.0] + + img = torch.tensor([ + [[1.0, 2.0, 3.0], + [4.0, 5.0, 6.0], + [7.0, 8.0, 9.0]], + + [[10.0, 11.0, 12.0], + [13.0, 14.0, 15.0], + [16.0, 17.0, 18.0]], + + [[19.0, 20.0, 21.0], + [22.0, 23.0, 24.0], + [25.0, 26.0, 27.0]] + ]) + + result = F.normalize(img, mean=mean, std=std) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_3(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms.functional as F + + mean = 0.5 + std = [0.5] + + img = torch.tensor([ + [[0.2, 0.4], + [0.6, 0.8]] + ]) + + result = F.normalize(img, mean=mean, std=std) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_4(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms.functional as F + + mean = [0.485, 0.456, 0.406] + std = [0.229, 0.224, 0.225] + + img = torch.tensor([ + [ + [[0.5, 0.5, 0.5, 0.5], + [0.5, 0.5, 0.5, 0.5], + [0.5, 0.5, 0.5, 0.5], + [0.5, 0.5, 0.5, 0.5]], + + [[0.4, 0.4, 0.4, 0.4], + [0.4, 0.4, 0.4, 0.4], + [0.4, 0.4, 0.4, 0.4], + [0.4, 0.4, 0.4, 0.4]], + + [[0.3, 0.3, 0.3, 0.3], + [0.3, 0.3, 0.3, 0.3], + [0.3, 0.3, 0.3, 0.3], + [0.3, 0.3, 0.3, 0.3]] + ], + [ + [[0.6, 0.6, 0.6, 0.6], + [0.6, 0.6, 0.6, 0.6], + [0.6, 0.6, 0.6, 0.6], + [0.6, 0.6, 0.6, 0.6]], + + [[0.7, 0.7, 0.7, 0.7], + [0.7, 0.7, 0.7, 0.7], + [0.7, 0.7, 0.7, 0.7], + [0.7, 0.7, 0.7, 0.7]], + + [[0.8, 0.8, 0.8, 0.8], + [0.8, 0.8, 0.8, 0.8], + [0.8, 0.8, 0.8, 0.8], + [0.8, 0.8, 0.8, 0.8]] + ] + ]) + + result = F.normalize(img, mean=mean, std=std) + """ + ) + obj.run(pytorch_code, ["result"]) diff --git a/tests/vision/test_pad.py b/tests/vision/test_pad.py new file mode 100644 index 000000000..65c7416a5 --- /dev/null +++ b/tests/vision/test_pad.py @@ -0,0 +1,142 @@ +# Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import textwrap + +from apibase import APIBase +from vision.image_apibase import ImageAPIBase + +obj = APIBase("torchvision.transforms.functional.pad") +img_obj = ImageAPIBase("torchvision.transforms.functional.pad") + + +def test_case_1(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms.functional as F + + padding = 2 + fill = 0 + padding_mode = 'constant' + + img = torch.tensor([ + [[1, 2], + [3, 4]], + + [[5, 6], + [7, 8]], + + [[9, 10], + [11, 12]] + ], dtype=torch.float) + + result = F.pad(img, padding=padding, fill=fill, padding_mode=padding_mode) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_2(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms.functional as F + + padding = [1, 2, 3, 4] + fill = 1.0 + padding_mode = 'constant' + + img = torch.tensor([ + [[1, 2, 3], + [4, 5, 6], + [7, 8, 9]], + + [[10, 11, 12], + [13, 14, 15], + [16, 17, 18]], + + [[19, 20, 21], + [22, 23, 24], + [25, 26, 27]] + ], dtype=torch.float) + + result = F.pad(img, padding=padding, fill=fill, padding_mode=padding_mode) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_3(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + import torchvision.transforms.functional as F + + padding = [2, 3] + fill = (255, 0, 0) + padding_mode = 'constant' + + img = Image.new('RGB', (2, 2), color=(0, 255, 0)) + + result = F.pad(img, padding=padding, fill=fill, padding_mode=padding_mode) + """ + ) + img_obj.run(pytorch_code, ["result"]) + + +def test_case_4(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + import torchvision.transforms.functional as F + + padding = 1 + padding_mode = 'reflect' + + img = Image.new('L', (3, 3)) + img.putpixel((0, 0), 50) + img.putpixel((1, 0), 100) + img.putpixel((2, 0), 150) + img.putpixel((0, 1), 200) + img.putpixel((1, 1), 250) + img.putpixel((2, 1), 100) + img.putpixel((0, 2), 150) + img.putpixel((1, 2), 200) + img.putpixel((2, 2), 250) + + result = F.pad(img, padding=padding, padding_mode=padding_mode) + """ + ) + img_obj.run(pytorch_code, ["result"]) + + +def test_case_5(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + import torchvision.transforms.functional as F + + padding = [1, 1, 1, 1] + fill = (0, 0, 255, 128) + padding_mode = 'symmetric' + + img = Image.new('RGBA', (5, 5), color=(0, 0, 255, 128)) + img.putpixel((0, 0), (255, 0, 0, 255)) + img.putpixel((4, 4), (0, 255, 0, 255)) + + result = F.pad(img, padding=padding, fill=fill, padding_mode=padding_mode) + """ + ) + img_obj.run(pytorch_code, ["result"]) diff --git a/tests/vision/test_perspective.py b/tests/vision/test_perspective.py new file mode 100644 index 000000000..6fc77b8e2 --- /dev/null +++ b/tests/vision/test_perspective.py @@ -0,0 +1,140 @@ +# Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import textwrap + +from apibase import APIBase +from vision.image_apibase import ImageAPIBase + +obj = APIBase("torchvision.transforms.functional.perspective") +img_obj = ImageAPIBase( + "torchvision.transforms.functional.perspective" +) # Supports both Tensor and PIL Image + + +def test_case_1(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import InterpolationMode + from torchvision.transforms.functional import perspective + + startpoints = [[0, 0], [4, 0], [4, 4], [0, 4]] # Original corners of a 5x5 image + endpoints = [[0, 0], [4, 0], [3, 4], [0, 4]] # Vertical skew + + from PIL import Image + img = Image.new('RGB', (5, 5), color=(255, 255, 255)) # White image + img.putpixel((0, 0), (255, 0, 0)) # Red + img.putpixel((4, 4), (0, 255, 0)) # Green + img.putpixel((2, 2), (0, 0, 255)) # Blue + + fill = 0, 0, 0 + result = perspective(img, startpoints, endpoints, InterpolationMode.BILINEAR, fill) + """ + ) + img_obj.run(pytorch_code, ["result"]) + + +def test_case_2(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import InterpolationMode + from torchvision.transforms.functional import perspective + + startpoints = [[0, 0], [4, 0], [4, 4], [0, 4]] # Original corners of a 5x5 image + endpoints = [[0, 0], [4, 0], [3, 4], [0, 4]] # Vertical skew + + from PIL import Image + img = Image.new('RGB', (5, 5), color=(255, 255, 255)) # White image + img.putpixel((0, 0), (255, 0, 0)) # Red + img.putpixel((4, 4), (0, 255, 0)) # Green + img.putpixel((2, 2), (0, 0, 255)) # Blue + + result = perspective(img, startpoints, endpoints, interpolation=InterpolationMode.BILINEAR, fill=(0, 0, 0)) + """ + ) + img_obj.run(pytorch_code, ["result"]) + + +def test_case_3(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import InterpolationMode + from torchvision.transforms.functional import perspective + + startpoints = [[0, 0], [3, 0], [3, 3], [0, 3]] + endpoints = [[0, 0], [3, 0], [2.5, 3], [0, 3]] + + img = torch.tensor([ + [ + [[255, 0, 0, 0], + [0, 255, 0, 0], + [0, 0, 255, 0], + [0, 0, 0, 255]], + + [[255, 255, 0, 0], + [0, 255, 255, 0], + [255, 0, 255, 0], + [0, 255, 0, 255]], + + [[128, 128, 128, 128], + [64, 64, 64, 64], + [32, 32, 32, 32], + [16, 16, 16, 16]] + ], + [ + [[0, 0, 255, 255], + [0, 255, 0, 255], + [255, 0, 0, 255], + [255, 255, 255, 255]], + + [[0, 255, 255, 255], + [255, 255, 0, 255], + [0, 255, 255, 255], + [255, 255, 0, 255]], + + [[16, 32, 64, 128], + [32, 64, 128, 256], + [64, 128, 256, 512], + [128, 256, 512, 1024]] + ] + ], dtype=torch.float) + + result = perspective(img, startpoints, endpoints, interpolation=InterpolationMode.NEAREST, fill=None) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_4(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import InterpolationMode + from torchvision.transforms.functional import perspective + from PIL import Image + + startpoints = [[0, 0], [2, 0], [2, 2], [0, 2]] + endpoints = [[0, 0], [2, 0], [2, 1.8], [0, 2]] + + img = Image.new('L', (3, 3), color=128) # Gray image + img.putpixel((0, 0), 50) + img.putpixel((2, 2), 200) + + result = perspective(img, startpoints, endpoints, interpolation=InterpolationMode.NEAREST, fill=0) + """ + ) + img_obj.run(pytorch_code, ["result"]) diff --git a/tests/vision/test_resize.py b/tests/vision/test_resize.py new file mode 100644 index 000000000..b4be5cd3e --- /dev/null +++ b/tests/vision/test_resize.py @@ -0,0 +1,129 @@ +# Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import textwrap + +from apibase import APIBase +from vision.image_apibase import ImageAPIBase + +obj = APIBase("torchvision.transforms.Resize") +img_obj = ImageAPIBase("torchvision.transforms.Resize") + + +def test_case_1(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import InterpolationMode + from torchvision.transforms.functional import resize + from PIL import Image + + torch.manual_seed(1) + + size = (3, 3) + img = Image.new('RGB', (4, 4), color=(255, 255, 255)) + img.putpixel((0, 0), (255, 0, 0)) + img.putpixel((3, 3), (0, 255, 0)) + + result = resize(img, size=size, interpolation=InterpolationMode.BILINEAR) + """ + ) + img_obj.run(pytorch_code, ["result"]) + + +def test_case_2(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import InterpolationMode + from torchvision.transforms.functional import resize + from PIL import Image + + torch.manual_seed(3) + + size = 3 + img = Image.new('L', (3, 3)) + img.putpixel((0, 0), 50) + img.putpixel((1, 0), 100) + img.putpixel((2, 0), 150) + img.putpixel((0, 1), 200) + img.putpixel((1, 1), 250) + img.putpixel((2, 1), 100) + img.putpixel((0, 2), 150) + img.putpixel((1, 2), 200) + img.putpixel((2, 2), 250) + + result = resize(img, size=size, interpolation=InterpolationMode.BICUBIC) + """ + ) + img_obj.run(pytorch_code, ["result"]) + + +def test_case_3(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import InterpolationMode + from torchvision.transforms.functional import resize + + torch.manual_seed(4) + + size = [4, 4] + img = torch.tensor([ + [ + [[1, 2, 3, 4, 5], + [6, 7, 8, 9, 10], + [11, 12, 13, 14, 15], + [16, 17, 18, 19, 20], + [21, 22, 23, 24, 25]], + + [[26, 27, 28, 29, 30], + [31, 32, 33, 34, 35], + [36, 37, 38, 39, 40], + [41, 42, 43, 44, 45], + [46, 47, 48, 49, 50]], + + [[51, 52, 53, 54, 55], + [56, 57, 58, 59, 60], + [61, 62, 63, 64, 65], + [66, 67, 68, 69, 70], + [71, 72, 73, 74, 75]] + ] + ], dtype=torch.float) + img = img[0] + result = resize(img, size=size, interpolation=InterpolationMode.NEAREST) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_4(): + pytorch_code = textwrap.dedent( + """ + import torch + from torchvision.transforms import InterpolationMode + from torchvision.transforms.functional import resize + from PIL import Image + + torch.manual_seed(5) + + size = (4, 4) + img = Image.new('RGBA', (6, 6), color=(0, 0, 255, 128)) + img.putpixel((0, 0), (255, 0, 0, 255)) + img.putpixel((5, 5), (0, 255, 0, 255)) + + result = resize(img, size=size, interpolation=InterpolationMode.BICUBIC) + """ + ) + img_obj.run(pytorch_code, ["result"]) diff --git a/tests/vision/test_to_grayscale.py b/tests/vision/test_to_grayscale.py new file mode 100644 index 000000000..102ca74d3 --- /dev/null +++ b/tests/vision/test_to_grayscale.py @@ -0,0 +1,89 @@ +# Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import textwrap + +from apibase import APIBase +from vision.image_apibase import ImageAPIBase + +obj = APIBase("torchvision.transforms.functional.to_grayscale") +img_obj = ImageAPIBase("torchvision.transforms.functional.to_grayscale") + + +def test_case_1(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + import torchvision.transforms.functional as F + + img = Image.new('RGB', (3, 3), color=(255, 0, 0)) + + result = F.to_grayscale(img, num_output_channels=1) + """ + ) + img_obj.run(pytorch_code, ["result"]) + + +def test_case_2(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + import torchvision.transforms.functional as F + + img = Image.new('RGB', (4, 4), color=(0, 255, 0)) + + result = F.to_grayscale(img, 3) + """ + ) + img_obj.run(pytorch_code, ["result"]) + + +def test_case_3(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + import torchvision.transforms.functional as F + + result = F.to_grayscale(Image.new('HSV', (5, 5), color=(120, 100, 100)), num_output_channels=1) + """ + ) + img_obj.run(pytorch_code, ["result"]) + + +def test_case_4(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + import torchvision.transforms.functional as F + + img = Image.new('RGBA', (2, 4), color=(0, 0, 255, 128)) + + result = F.to_grayscale(img, num_output_channels=1) + """ + ) + img_obj.run(pytorch_code, ["result"]) + + +def test_case_5(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + import torchvision.transforms.functional as F + + img = Image.new('CMYK', (3, 3), color=(0, 128, 128, 0)) + + result = F.to_grayscale(img, num_output_channels=3) + """ + ) + img_obj.run(pytorch_code, ["result"]) diff --git a/tests/vision/test_to_tensor.py b/tests/vision/test_to_tensor.py new file mode 100644 index 000000000..d357b5e98 --- /dev/null +++ b/tests/vision/test_to_tensor.py @@ -0,0 +1,99 @@ +# Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import textwrap + +from apibase import APIBase +from vision.image_apibase import ImageAPIBase + +obj = APIBase("torchvision.transforms.functional.to_tensor") +img_obj = ImageAPIBase("torchvision.transforms.functional.to_tensor") + + +def test_case_1(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + import torchvision.transforms.functional as F + + img = Image.new('RGB', (3, 3), color=(255, 0, 0)) + + result = F.to_tensor(img) + """ + ) + img_obj.run(pytorch_code, ["result"]) + + +def test_case_2(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + import torchvision.transforms.functional as F + + result = F.to_tensor(Image.new('L', (4, 4), color=128)) + """ + ) + img_obj.run(pytorch_code, ["result"]) + + +def test_case_3(): + pytorch_code = textwrap.dedent( + """ + import numpy as np + import torchvision.transforms.functional as F + + img_np = np.array([ + [[255, 0, 0], [0, 255, 0], [0, 0, 255], [255, 255, 0], [0, 255, 255]], + [[255, 0, 255], [192, 192, 192], [128, 128, 128], [64, 64, 64], [0, 0, 0]], + [[255, 165, 0], [0, 128, 128], [128, 0, 128], [128, 128, 0], [0, 0, 128]], + [[75, 0, 130], [238, 130, 238], [245, 222, 179], [255, 105, 180], [0, 255, 127]], + [[255, 20, 147], [173, 216, 230], [144, 238, 144], [255, 182, 193], [64, 224, 208]] + ], dtype=np.uint8) + + result = F.to_tensor(img_np) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_4(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + import torchvision.transforms.functional as F + + img = Image.new('RGBA', (2, 4), color=(0, 0, 255, 128)) + + result = F.to_tensor(img) + """ + ) + img_obj.run(pytorch_code, ["result"]) + + +def test_case_5(): + pytorch_code = textwrap.dedent( + """ + import numpy as np + import torchvision.transforms.functional as F + + img_np = np.array([ + [0, 128, 255], + [64, 192, 32], + [16, 240, 80] + ], dtype=np.uint8) + + result = F.to_tensor(img_np) + """ + ) + obj.run(pytorch_code, ["result"]) diff --git a/tests/vision/test_vflip.py b/tests/vision/test_vflip.py new file mode 100644 index 000000000..9bd439379 --- /dev/null +++ b/tests/vision/test_vflip.py @@ -0,0 +1,165 @@ +# Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import textwrap + +from apibase import APIBase +from vision.image_apibase import ImageAPIBase + +obj = APIBase("torchvision.transforms.functional.vflip") +img_obj = ImageAPIBase("torchvision.transforms.functional.vflip") + + +def test_case_1(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + import torchvision.transforms.functional as F + + img = Image.new('RGB', (2, 2), color=(255, 0, 0)) # Red image + + result = F.vflip(img) + """ + ) + img_obj.run(pytorch_code, ["result"]) + + +def test_case_2(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + import torchvision.transforms.functional as F + + result = F.vflip(Image.new('L', (4, 4), color=128)) + """ + ) + img_obj.run(pytorch_code, ["result"]) + + +def test_case_3(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms.functional as F + + img = torch.tensor([ + [ + [1, 2, 3], + [4, 5, 6], + [7, 8, 9] + ], + [ + [10, 11, 12], + [13, 14, 15], + [16, 17, 18] + ], + [ + [19, 20, 21], + [22, 23, 24], + [25, 26, 27] + ] + ], dtype=torch.float) + + result = F.vflip(img) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_4(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms.functional as F + + img = torch.tensor([ + [ + [ + [1, 2, 3, 4], + [5, 6, 7, 8], + [9, 10, 11, 12], + [13, 14, 15, 16] + ], + [ + [17, 18, 19, 20], + [21, 22, 23, 24], + [25, 26, 27, 28], + [29, 30, 31, 32] + ], + [ + [33, 34, 35, 36], + [37, 38, 39, 40], + [41, 42, 43, 44], + [45, 46, 47, 48] + ] + ], + [ + [ + [49, 50, 51, 52], + [53, 54, 55, 56], + [57, 58, 59, 60], + [61, 62, 63, 64] + ], + [ + [65, 66, 67, 68], + [69, 70, 71, 72], + [73, 74, 75, 76], + [77, 78, 79, 80] + ], + [ + [81, 82, 83, 84], + [85, 86, 87, 88], + [89, 90, 91, 92], + [93, 94, 95, 96] + ] + ] + ], dtype=torch.float) + + result = F.vflip(img) + """ + ) + obj.run(pytorch_code, ["result"]) + + +def test_case_5(): + pytorch_code = textwrap.dedent( + """ + from PIL import Image + import torchvision.transforms.functional as F + + img = Image.new('RGBA', (5, 5), color=(0, 0, 255, 128)) + + result = F.vflip(img) + """ + ) + img_obj.run(pytorch_code, ["result"]) + + +def test_case_6(): + pytorch_code = textwrap.dedent( + """ + import torch + import torchvision.transforms.functional as F + + result = F.vflip(torch.tensor([ + [ + [ + [1, 2], + [3, 4] + ] + ] + ], dtype=torch.float)) + """ + ) + obj.run(pytorch_code, ["result"]) From c848bd34f995487eddc5d4875d0f3a43252ab97a Mon Sep 17 00:00:00 2001 From: guozixu2001 <11324293+guozixu2001@user.noreply.gitee.com> Date: Sun, 29 Sep 2024 19:01:43 +0800 Subject: [PATCH 2/4] Remove useless files --- .../vision/__pycache__/__init__.cpython-310.pyc | Bin 150 -> 0 bytes .../__pycache__/image_apibase.cpython-310.pyc | Bin 1092 -> 0 bytes .../test_CenterCrop.cpython-310-pytest-8.3.3.pyc | Bin 2621 -> 0 bytes ...test_ColorJitter.cpython-310-pytest-8.3.3.pyc | Bin 2076 -> 0 bytes .../test_Compose.cpython-310-pytest-8.3.3.pyc | Bin 2682 -> 0 bytes .../test_Grayscale.cpython-310-pytest-8.3.3.pyc | Bin 3691 -> 0 bytes .../test_Normalize.cpython-310-pytest-8.3.3.pyc | Bin 3047 -> 0 bytes .../test_Pad.cpython-310-pytest-8.3.3.pyc | Bin 3477 -> 0 bytes .../test_RandomCrop.cpython-310-pytest-8.3.3.pyc | Bin 3264 -> 0 bytes ...st_RandomErasing.cpython-310-pytest-8.3.3.pyc | Bin 5025 -> 0 bytes ...omHorizontalFlip.cpython-310-pytest-8.3.3.pyc | Bin 4187 -> 0 bytes ...andomPerspective.cpython-310-pytest-8.3.3.pyc | Bin 5741 -> 0 bytes ...andomResizedCrop.cpython-310-pytest-8.3.3.pyc | Bin 4286 -> 0 bytes ...t_RandomRotation.cpython-310-pytest-8.3.3.pyc | Bin 3208 -> 0 bytes ...ndomVerticalFlip.cpython-310-pytest-8.3.3.pyc | Bin 4163 -> 0 bytes .../test_Resize.cpython-310-pytest-8.3.3.pyc | Bin 3455 -> 0 bytes .../test_ToTensor.cpython-310-pytest-8.3.3.pyc | Bin 2556 -> 0 bytes ...djust_brightness.cpython-310-pytest-8.3.3.pyc | Bin 2309 -> 0 bytes ..._adjust_contrast.cpython-310-pytest-8.3.3.pyc | Bin 2480 -> 0 bytes .../test_adjust_hue.cpython-310-pytest-8.3.3.pyc | Bin 2431 -> 0 bytes ...test_center_crop.cpython-310-pytest-8.3.3.pyc | Bin 2630 -> 0 bytes .../test_crop.cpython-310-pytest-8.3.3.pyc | Bin 3264 -> 0 bytes .../test_erase.cpython-310-pytest-8.3.3.pyc | Bin 3937 -> 0 bytes .../test_hflip.cpython-310-pytest-8.3.3.pyc | Bin 2353 -> 0 bytes .../test_normalize.cpython-310-pytest-8.3.3.pyc | Bin 3447 -> 0 bytes .../test_pad.cpython-310-pytest-8.3.3.pyc | Bin 3438 -> 0 bytes ...test_perspective.cpython-310-pytest-8.3.3.pyc | Bin 4299 -> 0 bytes .../test_resize.cpython-310-pytest-8.3.3.pyc | Bin 3565 -> 0 bytes ...est_to_grayscale.cpython-310-pytest-8.3.3.pyc | Bin 2121 -> 0 bytes .../test_to_tensor.cpython-310-pytest-8.3.3.pyc | Bin 2546 -> 0 bytes .../test_vflip.cpython-310-pytest-8.3.3.pyc | Bin 3904 -> 0 bytes 31 files changed, 0 insertions(+), 0 deletions(-) delete mode 100644 tests/vision/__pycache__/__init__.cpython-310.pyc delete mode 100644 tests/vision/__pycache__/image_apibase.cpython-310.pyc delete mode 100644 tests/vision/__pycache__/test_CenterCrop.cpython-310-pytest-8.3.3.pyc delete mode 100644 tests/vision/__pycache__/test_ColorJitter.cpython-310-pytest-8.3.3.pyc delete mode 100644 tests/vision/__pycache__/test_Compose.cpython-310-pytest-8.3.3.pyc delete mode 100644 tests/vision/__pycache__/test_Grayscale.cpython-310-pytest-8.3.3.pyc delete mode 100644 tests/vision/__pycache__/test_Normalize.cpython-310-pytest-8.3.3.pyc delete mode 100644 tests/vision/__pycache__/test_Pad.cpython-310-pytest-8.3.3.pyc delete mode 100644 tests/vision/__pycache__/test_RandomCrop.cpython-310-pytest-8.3.3.pyc delete mode 100644 tests/vision/__pycache__/test_RandomErasing.cpython-310-pytest-8.3.3.pyc delete mode 100644 tests/vision/__pycache__/test_RandomHorizontalFlip.cpython-310-pytest-8.3.3.pyc delete mode 100644 tests/vision/__pycache__/test_RandomPerspective.cpython-310-pytest-8.3.3.pyc delete mode 100644 tests/vision/__pycache__/test_RandomResizedCrop.cpython-310-pytest-8.3.3.pyc delete mode 100644 tests/vision/__pycache__/test_RandomRotation.cpython-310-pytest-8.3.3.pyc delete mode 100644 tests/vision/__pycache__/test_RandomVerticalFlip.cpython-310-pytest-8.3.3.pyc delete mode 100644 tests/vision/__pycache__/test_Resize.cpython-310-pytest-8.3.3.pyc delete mode 100644 tests/vision/__pycache__/test_ToTensor.cpython-310-pytest-8.3.3.pyc delete mode 100644 tests/vision/__pycache__/test_adjust_brightness.cpython-310-pytest-8.3.3.pyc delete mode 100644 tests/vision/__pycache__/test_adjust_contrast.cpython-310-pytest-8.3.3.pyc delete mode 100644 tests/vision/__pycache__/test_adjust_hue.cpython-310-pytest-8.3.3.pyc delete mode 100644 tests/vision/__pycache__/test_center_crop.cpython-310-pytest-8.3.3.pyc delete mode 100644 tests/vision/__pycache__/test_crop.cpython-310-pytest-8.3.3.pyc delete mode 100644 tests/vision/__pycache__/test_erase.cpython-310-pytest-8.3.3.pyc delete mode 100644 tests/vision/__pycache__/test_hflip.cpython-310-pytest-8.3.3.pyc delete mode 100644 tests/vision/__pycache__/test_normalize.cpython-310-pytest-8.3.3.pyc delete mode 100644 tests/vision/__pycache__/test_pad.cpython-310-pytest-8.3.3.pyc delete mode 100644 tests/vision/__pycache__/test_perspective.cpython-310-pytest-8.3.3.pyc delete mode 100644 tests/vision/__pycache__/test_resize.cpython-310-pytest-8.3.3.pyc delete mode 100644 tests/vision/__pycache__/test_to_grayscale.cpython-310-pytest-8.3.3.pyc delete mode 100644 tests/vision/__pycache__/test_to_tensor.cpython-310-pytest-8.3.3.pyc delete mode 100644 tests/vision/__pycache__/test_vflip.cpython-310-pytest-8.3.3.pyc diff --git a/tests/vision/__pycache__/__init__.cpython-310.pyc b/tests/vision/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 05d0c35e9cfa38009d43ac0d2bcc7525a407eff1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 150 zcmd1j<>g`kf->f>X(0MBh(HF6K#l_t7qb9~6oz01O-8?!3`HPe1o6vKKO;XkRlg`d zIXPcHJ+maEG)X@o(K$b_EVZaaHzl=9za+J|q*%W!vp6$9Pd`3BGcU6wK3=b&@)n0p TZhlH>PO2Tq$YLfS!NLFlBU&O} diff --git a/tests/vision/__pycache__/image_apibase.cpython-310.pyc b/tests/vision/__pycache__/image_apibase.cpython-310.pyc deleted file mode 100644 index 1e0333519297f6b477b8fc50d5a2e4d13d106351..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1092 zcmYjQ&1)1f6i;R{`!T!QgDp}jmbvMIc0r_wNd4$RDpCq92y+SPOxm4xW@k*Y3)@aF zR_ICphTUUtUi=FR{t>x)C>}f)yy#1Ix9)?yys!M;N0PkN@)6M0)y=_&3PL}PGEW4Q zSFq$$U@?kuib4##lRB9jy4cv38k*c1fQ4P*HrCXBHD=THS ztc`0hLh!c9$rC@A(q^6&Oz>c5>wRE*l)*tR0{*ej(nJl{ZUawVLifSL)3f&TKz3Qd zgDjC5Q&IO(pmGs)r`ALUr)Q6Xg2i#l=WQqV{{tlEc7m8Gc8jvzu4_M$Nlz-)i@0!L zZrbS;+G9enq4sQ2U4iN#t8Q2;wRM88UvQzT(WIGnLleo<15J7?<9eYO+5)NIvY#s5 zn$$K^8&Q`>$8^BbKG#zvR>Okp#fe9%yr74I#R=~zO@zu*O&IX7HAA+=gv}|i)h@BF zQyQgAN=h%$pN&p8&v-}VQIvNM6V>hScXrrj-WzbC9>;vpQCuq78NdwkUdK*|vLe}s z$GcVx#RAM@$vh~-mWhvje8=%|Gpa%qKWoj9cqC`#U_|Zaea+HjU#45|i&l{+OrzO%#453( zY;`(AbMF?>rb43!C!sd zcRFn7A6*N%aQF7Tz~enL6izSL<-!llHP#E6SQEVO;xk4Ufj?Qe*Wmts6-=mnAe4M0 zZ8$bIJ`>?B%Jv%}3&Nlgr$RdRm<(qBFe>2$+XXh+t;@&* z4NR7(B>QWjBr4#XN*RINIe%IJ#7%QKro|nA+iY%6MT>TdPIu1`w}f%GEoL)ocPDLz7ZVN+hQ$tUZ>_lqeo& zMIO;XVn=xppAD)KSnnr6yR-z8(t;?63(zwWV(0C-)=4P{$! z^UPWt7AX(Ei0si&2KP}wvw#KJ|qw5u2 P)rqERcvs-3>$ATB!Txiu diff --git a/tests/vision/__pycache__/test_ColorJitter.cpython-310-pytest-8.3.3.pyc b/tests/vision/__pycache__/test_ColorJitter.cpython-310-pytest-8.3.3.pyc deleted file mode 100644 index 87eae4ffa4505e23a8e50bc3a674229c2e867ec0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2076 zcmds2OK;Oa5Z<-Z#7UgKC4|HwYA=mcoF;8~2#Q*IDneByZmufJ$!-%@u{YY?v=qS! zen@-F@8Dnf%84@<#06&Tyeb4K95}Gj&SPiC9?ds1D@vsTfn|RDdipX?$R|`LmjTKy zY!v}vgwYOhX~K=rGzcYO#+{+5CF^FvVsvt$l0y}k8MN9u;!5poIQ8*m$yKkT;d1J=262v!aW*yg zGqI)lYkeX8Zk%PD@kqy}Xq?5S>_vU*+TF_}OI}kj-Y3$ARDav6wnfORQZ$>Q+6r{L z*Qg%(`yx8$Qr~C%ysEj3@rGmsRSMN&k9IHjZn(qy{pt7w`-G7n^WnwS39G%H{Cq_1J(K%xQ9c8`Nl`;(0K zGct#n#OjT6&|)6+j>Jhk4TjAsUP@qC0%*so{SJ! z2C1?TTa8}O(LtnQd$)V(jkLQSOhK4ej;|C2)oOJ=Ow`R&l{Rd3+ok# U9Ymq)%B*eJ)G{pGwq@J)5BFsUhyVZp diff --git a/tests/vision/__pycache__/test_Compose.cpython-310-pytest-8.3.3.pyc b/tests/vision/__pycache__/test_Compose.cpython-310-pytest-8.3.3.pyc deleted file mode 100644 index 7fdaae662f3585c5d9260cfb61cbb97272ec93d5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2682 zcmd5;L2uhO6c%mARvbA^)2+aOVGwp$Ex?m&C+$*T9@_d)1Q>#>yD2aP#bja)l4y{0 z(iH7!{R#a6?J@so&~>Mshhe}j+k2Glu!&ov7>ZT^A4T$seB}3ieBzBp-Gb-IUwrz@SS;R^!!3rrxszDV-N=YrX16%i%lhw2y)QQDx zTKSa(Ypkwohpy1-3oB^Q8nvNbr>nFM^)Uel=X^G@|rxi zROwau8H9dJUY9U%SiHO!?ZD>i?(~UaLZMu}-onl+LAa50FG8x%KN=RXQMYFd&mwLf z%>8iTi|_qD%M=rR!N<+6*Ls`R9&A5K;-{>cvMB2v-1hIZ+?g4)T&d{gPGhoxcM)cs z9%F=fzP@6a4~M*c9IMgf zu)QDkd3M5t`j)bjwqjDrcELH_2&a_y$7lH(j)p@JOxU@JY`4KwNQqSb-Mk^zq48E> z2vi7?e*|)w@8+YE|1p!u>E?FJ-S++uVY^7!ZG)>jg9Rk)px0L6rSO!zT^riMlL}zjsC9IiE(vVNocml*7MOnrY+3UO$X&tqnvD4}5&GzY zGqR0|!lj&aHQQ)S6b6K$vrWf`_RI|43F(q-vt`I~!TSOsUrkMh^xu#x?hNzKJzV2n z;kDr1SmT2TeETQl*flgazaU#A*2X_zj(oCQ>DlFcNNZt${5Egn(JsVbLjgE$uX#c&BgW^qJEQKl02V;^T;$_L1fL)D5>f&1?Y28(~*myTPd+2CT84j*jl` z@2?r%mcC}J4_g-$Iw&?!@pyPybHNS==%8KivKc7)sMwr7z#7_h3kFz6#l~=$PC0C* zpXmpqVdQdHXS!VtSa)U^R}zb}a;$Rt4L@PlIIa*%sL$h4aPW0pVv&CakQtwftupO^ z&@+kEA@YBG-aZOE+Gc@m2knu=kD`P2p7kv7k16Ai2t96d8uGB61XWbb6jjFfG+scx zX#*OC;D#C*ZrvSx9L9jP5 z*pE>>nGTs`4m2)8ZZZdIHwz|8=4k01V{Q6GJuwH0e!)=Uh$M~|XrG8VP;~w!hS?k- zfmKi}pg?FQViCUb80)s7A&;>KY#CanW9)t&V}~s8^u67$^m8&3WSXA1Jj2aQX24Z& zMmy;s>1n+VxxpY;ASrw^e0tJ&_+n@KVM}iSIJ(W2ZU=6_wi?|&!U2BGrmlaYzn}zG z1$RS6lhYVP?EYDqRdLtx_{LRW7!#<2;Vn5WB)u=eC?AqKy8|uLLsGrvklZK)S2^Ko z0N~K{vH-x|CoxC@u)`?zZxjNyGQ)1K55VYcj$~`>?o7M=E95jOI`a@OY@8YQQ#bi~ z7k28*B;CLpSddgsT;?Ki)!({V!K{Ve3ewDpf_`cGaeHP3vle;_Bn~%HCJOqYw>b-8 z`yZQSDGFOthV-0{P>_H=w|rnUD?$W$S#G^{X$EENCKFnYoqPUHEsDXvc1H0Ugn zP>YExEUsE(=KvB?hX3N?yXRCUE=6;qDxn|qC{|Hmo+=Uvadins6;KeaCYi{koJ9T9 g3rOVB&j1Qih_Ly1v8L2yRZ;PMpnjk(t4lTgZ-9wIX8-^I diff --git a/tests/vision/__pycache__/test_Normalize.cpython-310-pytest-8.3.3.pyc b/tests/vision/__pycache__/test_Normalize.cpython-310-pytest-8.3.3.pyc deleted file mode 100644 index 0cd1395087172f8d34abffab3e21e969b5445ff6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3047 zcmc&$OK;Oa5VoB(antkxqM`^67NoaPoN!3%J(Yk3zuj^tg(VcZN^^B@eog_c0^*Ndm zZI;ZC)MuqWPtrt(c7e>2477_xeWT>(z5|xBpI3wQ%Y(gFmQUd`yXROPdfh)axX0SZ zC$?{UZjm#~^^ZK}_{DvYR@XkKZFCVmDtsj2-G%qZ6A)avP>Fh}epI-2k@x^XchpM_ zLk9T7zV!uWWQ^;_c6uJ;A>!DjOQ3HSl0|8V4q;VjCT`)`mWdhWQ z4^ynp6i_BWjT(prngYrMs750`UwJcc<2IWChOiWhITT3QP%OZg%ssON12Xqk*bEEp^}&oon(O_R;O$>DY`WTlf(pA$wEw+vf1EI!m}E3e)Dn@ zhY9zXpq3U%84@<#07pcaUE~dwAHdjSn*8e&71e0=l%Sf=Wuzstl|1; zat zQBL(o4yA5zNIJw$Wkc8VAZQYjiW0THA5w;?tw9(>!U@FI)ROBQpa#QE^x>p0KH!5IKGI#p5j2iWm5jV}Wof9*7mLZ3BwI2eSD zLsuM)4;p*U(=a$@TzpE|aYL|3M2$)6)u6xU(E8{!UXnBY0npmF-iNKoKvU3jdg1T& z4Ze!OTx4QV-G>-(wl2f71wh*{Yg;qxY#U;jwYO)d5V#Gq_JjWy=J^8f!;7+6 zl8uD&qyfciS!CKR2(rlBK)Np?Yw6a?%Pr&qO<80`$!R*PF$<#BM zC%lBM+LYO`HTBV0jNF&ZtID)Bk;Ha$o&pF%Dp<|A8o+_lm~R74EZDH;+mN&*HRjuZ z7pZYY91?+2W4;Y|ks4RTkziWIk=^)t82)#M?1XUc+yJB7>;gtV;P-BX(O>@)!jb8l z{AOuKB1~o${vIT?_1HKVyPj}^D6Tvjo%YkwMju5S z%C5Q-MJUkZ#{g$1+!ZWdOhs|&Tvo#~uBoH{cV*D5fpC^LtyI)2s3YkY=se(bT&<#$LVf&a-$C0EgnoFVT`<74A) H<)eQ9riVR) diff --git a/tests/vision/__pycache__/test_RandomCrop.cpython-310-pytest-8.3.3.pyc b/tests/vision/__pycache__/test_RandomCrop.cpython-310-pytest-8.3.3.pyc deleted file mode 100644 index 63f543ef81c84c35e7bd5cd827a6687429d1f5d2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3264 zcmdT`O>Y}T7~ZuV+i{$J&?=}GwB=%3+S=X^hah1T8mTA}L{Z`7C|YfH#?F@g((F2E zDmcNF8$SS!`6c{^y>jB*6I|e#@p^Yt(-vBY#LDxI=j)kgW}f$*2{$)e622cE{5Eah zkfdLDq5M@)*hPv17$OnblP0prD}J>i$&z22tjUX(`lODQir4U)lcuPvdh32`(vl_G zB(-nl$p&qSyiL|f{X1#0NgAYy{03PkE#x=J25BR|MdUA}-sZ2EQ#$C$Y2%Z_{fBl; z@wdJ2+cTP#pWRPF=A53nu^R^cgxNtn4VfSJAKL*5{XG^&4#!x2GXAU~?IAr^U=ry^ znaDrM-$+U2N%d>6pUI?3YTs8me{fhkup{h2OUXXhk3yCxV%ofN%0hopwZcTn1{8!^ z`7i;}_w8VAdsa*-=?;5EYwSLw%2?6!?haOiiUO(2yNtOh4Rie&IxV0R8pI*%9_0{x z5l2Uws_3d>sET>4UTIQ}Myj%{Dj%rI&PvmVs-g{%Fj~`BS~b9FCK3>|?PIiGLB^y2 z)pn48qYqcc>zou=>A=xfdFg=F!Au8=zRFDpr~z^YPz?PqW;Xz9fSv&yV|!(G1F#0j z8Ne}D$(evPLC)lRwo1+fs0nf=P|Tg<<9y}EszQ>Bh>jP>G4(<_DIFD~@!U&rTKr_Y zIJjOuGOaHfA6>#}oKxz2Wu4jHoc3yIGoeqDb7n_rolru9B&~)gPtq!z2k6|6F2rP( z6B2qUu{K2h`|jX0^yz?wjuQ@MZgM(5864VsVQ@xS@_^8@K|SwHmR_;eX!~60D{;%Fk9 z5I5kiqUD@k;b5wg4H##+s!Uza8$)whh2>+z#~mjO;=~S;PKkjC4~P4oEn|U*VEL>I zhZS}GfSz|dk3W6bQI#&#YW7sc3B8bwyZVTGKxTS9MJbQ(NAo0dpHi>e1)(8EE&7YX z%lq;j;eF9fAP)s@4*UvvG9E-9u{!K!jWv)lE{L>&#aK`yl92Qr zRFs8uhuuca>xK0B^#x?3bc~shx?p3y%6L(( zWj%0BQZl23ATu!!bNv6CLUDs{7Xf#a3v{%Ii^U6Y_J;8m5qxw#0ySq`#do;53)8!u zHc#fRm$*Tkws)foD=*!%@L<5*-nZizj{*LSX7rr72~F4Ys3fN49{Yk@) z+!H(t7?dqd?q=^_m%q`iWmA>&Sm)*zH~h6ABE4`LL9ld7i2FiZ70r)urOrM2F@Lqk V+`-$tQM(}LSj3A+DZg1R8(3aR;z_msw%j#+h%d%Xza8U z!3nN>2RQbN@F8;L#2FzWF7SKLiDM_@-9l08cE+B0zj^P?%=Zh;Q6Ec=jktR zE6T6bh(8%Lo}lEgGHzj1>=jkTDcafZ)!wF^ zvp0TFdL=t=7w|6IMSBzPie0kHcyHP2=SsEm8~7{Fs_Lxp(etMtTfXznSx^NXQB z^v3l-SY!Xx6C=O=!W!G&=o4Z2!|~aGhAkcy9~&qKD8Fl{0_7*wR)1E%Q-aLP?6*+v zOtrIi?#C>t46z%}tWUFwQqtzna5V8mpt0nr^Hg}Fuxm{jZ9X8OQ%!q14jeJ@Tq}U? zpLw=ZikLC$`p6njEw}GGj=iH-W7NeowXT-nxijhN^=3`;2bSw}cl5eh)AV|)TGIpx zp&n41^(J~QEO+X3^_n&uPh4x@bPp}pcVbe8qciXiH3^)t?}?qGNI_bOWQ%*i&O~XV zwDjXz0?vRoC>@ktlsze89~48U%Am@Wf((*iw5TGEMn^wR$XXFC(bQ-|Rg@-5tG$8` zRRyJkvWv1OMeKuO=u{Cm!<2#yl3=u`YE#u||65g;D_Yaq(&bpLXieLfA`&YKEt+yv zd_{rUU0v2_k&kPd9b8SEZfGB;u4e@?+YydGbptY=g{khqoT}0ASw2J@UaP(G;M^H} z)n|LW&drK}^L213tjR2IJGL_pW?Ap#aa9*=$&C;VW)<<~};&hW9|cfu7eJo^jvl$&48u zXCD~d!or$Mzvna*lCW1~aM8lE{}aii4PR7C`aDem!HFswmu}{i48bZ)hFmZ5Agi%R zfh>5L2Iq2O^Z74{FpRg9n@nA9h;tfERSLj3rcpLs4a3RAD$8!{eB?EzA?1#Y$uv~R zok@P2{V9Eni34a54-QH3QR|Mqv-x7h zJ0FQRZ{(d{7VbGMv|`qr?k{{egws-p4D&+TsqqN=LjTx+#p&^X$QjYTk_9ZDBep}b z0~9)*NWF#7N6AwVB_Nq;F?4aZ1So!zlvrX2C)RAw2IU!pl8oH186Nr$TQG4qNp14- zwFye(t$S0*l7b?$iQbaI3{9eVm(~Q|DbF?O63tVlZJ2vl?zKw7fM?Wc;h2ypX~-sS zM#++fG&;~AQPPl5b@?yZ?8G{(Fj=4fdJj(#+yB~w6$5W{xC&WgdZwI23y_rx8sZuvg$mGpa) za4y9#aAuoP*Q~JUD`7BSDJ>z8|CL_BnhZ~Hy%h9onb@R?Zk2q(=Ys?*5W5}lJ%O(X d#SccFJ$0QA>9;GNMm^fDWQz3nxcIR0-ap;f11bOj diff --git a/tests/vision/__pycache__/test_RandomHorizontalFlip.cpython-310-pytest-8.3.3.pyc b/tests/vision/__pycache__/test_RandomHorizontalFlip.cpython-310-pytest-8.3.3.pyc deleted file mode 100644 index 90e94c98024ca92598c5cf09f2b9dfae56ee5a5a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 4187 zcmc&%&2QsG6t|PKNz=65E(--#LKc@zL`dRqN1|vyq{5=rZnfMJRgs%YBH*|whE28!&+zY55ww~J~68N76)O{I@BHP>Y z-0_g1%`Ooy3%DQQ{!Ky?2|tQN{7L*;hzf_ruMp3^NQ$KNeUU{3Zx)`~uP~}Aon5X! z4rnBC2NK~P4g6S?@ku%xkmp)80pM^g-yThDZxA{TsdpOZy?Y2l+LZDj>T)Zs>H7On zjL{-z6b7{3PmHh#`hB@6DLc&zLaDDdC2a@N3*vrXZ%Rf+*KA5wj&8Rp$(@X{40E!w zvy%YtG$j(9jGfKc$vrQyqg1HW2`65J7+B18gwseyP8Z|Oty3^-c;F1b7#!KpZZ zE0OaeI;Qq`x=0-2jG}2V*gc#U>0|`u+W3SU8w>;DoC;A`Q?SsH~+B6srgZ91~ z9ZYuH+xDYibmY+JL*g8@BPWc)cD$hcW01E1t?|ipnW+wjVCq2L0C-qIQxXee>Cd+| z=o&uel8~0r%ScwhUoQ#ub&*U?&HR`KyO{S*tCP5K74OEIY(Q%K4XM|Z)C@_qrZksn zO{tqhHn{^@#nie?Wf@$Sfhg-4JF)>W=qu;(R|bI# zoU-J!MHkW1GHYtAVT6sFRlKF?rpOSa>2{aig5-L-X=%Fg^tJu?$t+j#Tzk%ff1BLS z*i3nHPGIBD7Eg9+ibq$IuG2D8x-sXtyRDIPT)+3^UQ?=LrPRiG-hfhTJc-8c3&*S1 zvBI4O_=c=`p3>6_U=>r4J99O_2Cgw5gRv7B%6tsQU8XS~16G*E6@DYu<~10M$jyu#;5<7*&B$a{ zuG5cmBouf`%ymRz)S8=G_*Kw@uH_J2k~SAnNywj*3e z#f+#HZs2K|O$b-fU_yh#8k^hv$<<&$!}MxE(@-KVEYBtXFYc;hM>Sa25seO0dS;Ks zb#z$QS-FELJwFy5)^)_91I@^f#ee|=u^2!zvir8!8v`Z`L}LKY$e(@#CQLYN0?*8M z*n|la4x2zT^Bp!}z=Xpl(5%j!MYLeTg3A{0to-S>V8VjK7VxZmhb@@s!r?B^y7>-w zVW10#yFlxL;1?G$`TFPE&{PJzQHjp{v_ig&v@xPa7{cmK!Zb~;(qMv{A z@BypRjGCxC!@vvZCNIlK6YPH>D^{Z+Ny$>>*6%u31r_ka>lFilsKXAkFdH))keIkx z!J{}aUt8PsA1*ETI(-w8#_efkcj9`HI|`?@`{R>A^7YOD=Mx+tT6P%X?7@z-)H$YZ z31RWX>KH*Cu5Z?9~R-CdkLC_B9O?wp-KU3Oj`%({xqb9VHnw^_rEBz&XD w>n`Ub_mZCPH2E9fZdN|>12XZPkJ;flWZZp!wN|K!l|qHx?^fQc+^s2p0a!SvkpKVy diff --git a/tests/vision/__pycache__/test_RandomPerspective.cpython-310-pytest-8.3.3.pyc b/tests/vision/__pycache__/test_RandomPerspective.cpython-310-pytest-8.3.3.pyc deleted file mode 100644 index 15c751b70045cf85dfe3ae3bd4fd238cee592532..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 5741 zcmd5=&u`l{6qal!b{r>Z`fJ#L720;F4S0?v%Z>vqLz|#20(3=?wujjW48>v+cd}(r za@q{rF57kg!*#6N|xnFo)fgiDww1t@_l?HzsL7Tdub^v z;rV^#ufyNolcbmQ5`GeRxr5|fHS>*m)FiBQH`B<7 zZ%J#Rn{8xeiDk^x_i|&AWks1Y=gj#Zq{fn&HZv%f%>^@ya>ZOUb11KvOXf1lRddC> zg7T^ zXY@?FyUV!KXKlB0!rBBidSv{WL)u1qr66;qpJY@1S^idX6VH<0U{FW0nKV;BCTUnu zW$uyjC8j2;_)n+Xx4El`-h+>a-0pgxA_^(KKt!*iveR=J@7tE)Vhmr{Cd&qZ0>0EW zdIQ61IgFY4av@YZ8m_XbL~!$F$ALIuww$(MvCVR+T2%U*YN=dQI^jMyBN~*pb{_6L zx_5W4s2p}IYct$yr+b9{Jk4Czb8Mb(1`5;cG@Du#J)xKKesQ8kY2u?-8CT;~?xSi# z%Ey~7Cu)#IFg+2`WelpS)r-m~i^$gWqT;1URYfZ*UfSOeG~F*MrhD3Fo8HVETDIYa z)`~M{V7WALqeaVOA`5}|aNcWi{|rXcK4$H&S|^4zU}vde#$_+u=iKNI=S^m^o;yt1 z2hWB{KIoxwu74_eY_-9hGl?&O&3`{wJGQ%Qjoa`@bj|T^9yT-QNJ7L`Y)MO`X zE^}OG&7-M!X+;=R>Yol5sa>lL%WJ9FEhHy{EF~x8)IT>r;465UYF9jkTC$s$?}FW^ zp|$8@rmzxG))V9JJ$@1_O0ZTJHL!IuuyWMEs7K$Fn(BT?l?xb?3jTs`MMaMg5mr_! zD(JKxsoDUwVNWWgRV`Ah0!{_NRUoLfINJcIO6CC^Eo#OZDFv-G;Am03G{9=mOaqD* z)lCCv10J^(w%r)FRqf09%1_72xPm<#fR6P)?^giz=rBREKgpQ1tqn zvtUjRJ9j zx(f+lI295=ThA30gunyNSZ|65*{d(&NqHh$a0O$pILn!YsCbs*v}N-JVqY5SwctDW zB7K^{&&pFY3iIIImhKSQHep~zsBU%wDj zo?x>KDgOUN)cWfX0`p6RfT7PK#JPx?B4PSnE*JL|xB+0qa(|DEOSQ}~@+wN_nq0<*M2c*>(n}k>hT4>rS#>Zzu z8^68gVLBB5TqqQ;(p^_L)T#2D7jj^KzSE&j^>sWO&Q|2vP+ZcpO$O7#ux171cPRn_ z%W6RcBw}sfC@C63I22GmHUt_6<$jM-1+Xtrq%{u8#{xkYZ#qJ%{Q^4i8*wZl~uAb9efut>Eqb7VdL!=Tb5p2X{4etI64O-f`J*A^0>*kG^FgEUst_F5>(~ z+UR!TdCA}41O8A=~!KFZw4T+h}21>({+YybcN diff --git a/tests/vision/__pycache__/test_RandomResizedCrop.cpython-310-pytest-8.3.3.pyc b/tests/vision/__pycache__/test_RandomResizedCrop.cpython-310-pytest-8.3.3.pyc deleted file mode 100644 index 1be6bf1fe6819944f9af7307082ab07110bbb0b9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 4286 zcmeHLL2uhO6qal!b{r>ZyDq^npy0McZ9r8l$xc&X9@+#;V4%x_6vF@qfuWd8qE4~| zN=}+$+hx7(KWxYTC0%#gxtDE+Z67H|R-ro01{BDE2_R+iJ$`&7-+TOIe_^4j;Q4*| zw{CGFAxe0PiYiofm7Xf!1+Q38RK+XxX4FJ7+na@?;Fi5guOj=3?wnWc zRaJ#mXz2&FH_xi_U86H}_Jz`0pk-RYdy&r3D&9+Up4RZbK^N#E-Z!cGL}@Jjin*1Y zh8mY2?RGcpkl|;o>)FR_+Jhhx;gQe1P~WqM)c5vS=sah1ll#Fy zKwUg4e$1e3qP!xgBIPHQsz0mWDN*5R@mnnBSfxc;`mrdM15;*p?C-H9RWg1YFYtLp zA}`^E~8!wRaBT2M=WE#Kic(3l#0yacHTuJ$_# zE{$}D5#xdH+7S}O7g+to1M~4_WR77RZ*}1!lM@IBwkyK0-qdltP5r?{#KjbeNI0}^ zPDF1HRgFmGj;u8zu{yHWh?8AHC$}Jdqx-nK^ZBE_jPWuN?RFnuWF46@Q#VSIf$Y7;pI610IVuFi2zQt!zzbtX#d3+94T*fN<2mwAX zx@w{lUY}G;LL=k0<~llFW!&~Qe-AzXUANsA`#MQE|GVuqc3XZ^_~lzX)cR5=-4@mR zSMtn1uR}sb=1k-MtqB!bv#GDke6fXWLX(u3j!*Nep(9zK%4phQzy?v0Hq!y`I_qwJ zwb9+oEbt#D-Nz#3%Jet6D3G)A$+Qg`IH8?c1rQLCZH?ws4bT&*F&#tfB{0nC7??$B zOvk`1LgNxUM0yt*(=jlM(7417!2^Ycz(_abYB$4`LacR!z{+x$5Bc(yLT%?dgjzeZ zDHow8FZ6V)GM50{PJ~)2N@J_3-^ZR*wXE4gm_g}4~x z?H~wv<5pZb964^}48yqgFgWk0tq=P66aZb|cnA!Wi-I0>>W^DsmJ diff --git a/tests/vision/__pycache__/test_RandomRotation.cpython-310-pytest-8.3.3.pyc b/tests/vision/__pycache__/test_RandomRotation.cpython-310-pytest-8.3.3.pyc deleted file mode 100644 index 1902ab1a840ebafb0c5e29ff74f4b951f4553600..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3208 zcmc&$OK;mo5GEyAqGdUcCUIL71r`O0PynsBYze4}*lyDbNDaeHFA@ZS)~+Nbd||ng z?Vvri*Zzn0=yU%>e*>>Q^_)x5OFFxhNXl|7BQR3Jv#Xt%ot^Q1Gds@aW>$i4^5&o6 zpPQ2O7hlA`1T3Bac{U7K!m=rKWHBeKWI~c9E7e((14+7*21&xqSV~6`yrj8qWjk3} zA_`9ZEO$0YR?IoPhSR@DolTs<3e0!#I?lp;7jNJk%=fVTy_Dbl8_tvt^Rl1WJ8JLi z9)X{^wxy5AZ2jtU=Fq`-YIufY7Z}xTZ|G3VD;(=KcC2HE=?o+TZfE|<@M8_=0O&P> zfl0r~SpHr9Nn(i?$sZwtk&Kf#^=p#H0Di0;>VLqlESi0WT0PuCXTbt@@gQ}k z-kQxZLxwd1_24i@Y|o);H#Eqj?RLu=s`Ru4ChAr-ROGeXxJq=-Y?vhsE`!%bLM~Fi!pIprrMl~QasWR9D7PA+s0&CWW-}$F#v#A^cSz9;GX*%T&X_*H22FNf>}?2Atfi| z)SIo3=zUnc6|OV|QVz%0?+1=^zzb|2wV%y_EK)GK{29Ed$hq);A1n(3yye)HMd0mj zS4$eImZIC9-{MLPYsiQvPirZa3j6J6?ZdD4jx|KiT=1;*oK8s0jR5*0l7dZ6)yK!* z>_66!3ec|RH8gNchqly89WV_}mhyRo!q8xE!JRPII3uR23Nh@2IZ(eww*op~UF;F+ zLN_$L0?w4zt*0~%<3) z#5@a-b$wCgSOEg;UoNdRVAp|mmPNb3ZU8k)SJcxo0JQ>CtuLDdHplVKvS=6B4WMTI zDw7XKc$au1o7wZ2jaI@SL-p@ zMG2v9Hw8)}{6~2K7Xw)oIuPVVRF3%T+lb`1T8|HY*l!;!lw&BfB4KZkPK$+lk5_7r z>Znpf#PBC_n}@KvTnDv!zU!I|HE4EbHG94X=65aJ^n)7!xHi57;74=|IQjd&axyVY zX4syed*Yt=!_~)q=vbh!DCnLC-3V`oDLJJEBmR2G@-uT@7B-7MM5p2%@iV$>oIp=P zdBg8Nob|8<@3s4LQIdPU!G{NY;H`>yIz(~_2ZI-fH%qa%NBP2X@Wdov^3L1i;eU1~ Qm&nOVLgDm@vXw*s0RVX>N&o-= diff --git a/tests/vision/__pycache__/test_RandomVerticalFlip.cpython-310-pytest-8.3.3.pyc b/tests/vision/__pycache__/test_RandomVerticalFlip.cpython-310-pytest-8.3.3.pyc deleted file mode 100644 index 889a86614b0cc1672deb59628f200db518e29516..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 4163 zcmc&%O>g5w7`BtNNz=6Z!NO`+AX!{CkwOxGI}%0vfeMvYRkbT|NK{2`#%;aWu{CzO zTeKh{xb8pT0LT8NT)DuR3vhvF#*ORNPFt?)eTtYL6*q!kHTP$R7nl@b+SV0xZfbFq=EZQvPRZ%zeU97LhHuw@GWe& z#98&x&emf)a`4#L^6dlX^!=Mw9MJLMksGYIRrA{>UVvK=}xryPl(w?*Bq;c~EtQsFWOhd7gXRu1-F%*u2+fpH@|=FUdrfH)@tUBjyX{blzs@SQFV#^a!S z;Kqm3z3z_vB$yn5IiC>cs2e*`9CefB7DB4MjyPiIsmo zxJz%~Wg!7+1+9!=zrA~c$(dPSOt9YD(5o$}IU;CnX(5~1Qon#uau3u==yQ>-GWaS3 zQPy*QWCPWps9eNR86;HztH`;2ih^puDq6k?fh&Mj%>RVPl*!K~T}DZ(tZ1-;kuoXP z@suT%BBLTps{8aNYOW^L=TP$W>)`^mv-NbYMqEqJxAKqVt>o)e;V75h6f}A5a5+;m zYzt|yhcCR>F;mmU1PnKrd8T(FaXkFa#Ch3#@bp1jYGUQo)|pnos1r`((0%22%_dg7 z*8=~blxHHIH2}+)f!teY0d{bU#TbmOz)%)rFfKET#Tf9yEG~(Ik(XI4#()=QaY-DE z>GL?!3AsdgGlV^kfsSsp=W=_;$bg2n1KjIu1gN8dxu)hvDujZC9_H@R2K z(Csp2_BvTwC&aug_ol;`voOiVpe{?l7etO{YbKd(iD%rRW}b7whC-F(0B1@GGb5+6 z+-8{QB@DPc76zg)S}l}F4x^+p2~(>W1rAhDSOuDjUBQJ7M-q+7zGaBJ3OqHp{opn# zW<<4c0Z+?qI=G1j7a9WA*xVLRt_BAhrdI=+hIDamc`n3%eixMls=>RC(dclc=k`?G zMu&Hur9QaQi^rnFyN{?F-JzqSJQGnTp3{OpUz zk63ob+UUqQ5(22aP32=-0Liq&dKQo2lqdArF?K?D5pcu5EFK!YFVVcuunA$4)R zL`QKlzA~rjKV4YrTl9U@v~JI8dsEko-AOcSJPeOV>D#>#<K2bnGZ%CsZ6=sq>P$ zu`^pqt7g@CT^*hI>B6Qw?+&VV=z2k+UN{z0C@C0O1=( wo^Lr9d6e{Q$H|}gwzKxBACRf%e8!H+5##O0>y1)Ftd(l)x?lUK_F+T$7jrh9mH+?% diff --git a/tests/vision/__pycache__/test_Resize.cpython-310-pytest-8.3.3.pyc b/tests/vision/__pycache__/test_Resize.cpython-310-pytest-8.3.3.pyc deleted file mode 100644 index 1faabff3ac5ce6af5464f1d7b9547c406c365dbc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3455 zcmd6q&uimG6vrjoi5GSe84I zlg-ke+COCXQpmlh-uqAJ+EdTHlwR8J8#%Iq<%D)ySb;{KnKz$#qxovyv%j`hmT;Zl z|Ks?_bxHb_D(NeS$}`--2!$g#@=)r^;+gaEIZ2khLU&1ydzQP)=*bO>Ua4CWZTaDf zSMHW&iItqf_i}fYmBq8-EIG?RNZmE3=#=nWcUGJ-o_Cy8r-J8QM}8sI)_%jB(q2uD zid*}gZ98E2S?PH837ghmeGvM5aC+tju0N`W+#UtTKKFw90Snw$Y(V3$E*U=xxOZ^> z{s=`Vy_OyMjr^Sy=3eK&h36;on;iL#x$}GW&lpiwrkCrDeI6=ex}@=#`(E5M=YcXE z5IdqOol(g6*dN*{YVbq(W8Pv>%&uUe&idyPdt~TL-B*T<-)Z;Z}Xb&a00PKHGk*DpgZe z%$lkU{Grb`tGZ>W%2iRTDN5oE#@EM_aO}Qh!)mpuDou=v?L}p=tymRoVJ65;!A-ze z=42Scw|Ko(Of-83(Q@2GtR#J50vBg?{PaeE#Cg)D*FPV>d!J3VdGeB{NZIxn2`3J=#>9 z=kQ8ZtV8vhy$DCWma4Q><%z0%Li))x(MW(KF>o}1v}}?Flm?atmpxb@O1EW@QiGp0iHpN1kV7^$o9?v%>c~+%>d2JrkS9bpqcc}Kr^#x zCTJ#TCTJ#TR&&83EbuJwEbuJwtZbeIo&}x-o&{bj+q)KMEznw^wLoiS(^{alKx={4 z0=>+g8z+u@uxq0nCQaJH3k)Lal$#nDhJ z@FE=v^xa2!L{UyD^vFOYMKPsAJtdKyK9p4 zCnAfN0?2K+lT!dhBJx;rWU(qiNs(kJC_5{1#;G_J;3(s2pgO9cmBy=q=4i4+RZ@N? zJ8M)Et4>x(<+b|-)zpeGMY@r z9CkmLpS9r!qh_HztzxvXgG{IBcGp4jyeb4y(m1TjDJh$X@}Egc@ibjS zwmXCny5^VYauq;XR^;-VO^ww7ximTq!-Ns|wK&h`iNHhjzuhwI!vCh#|3`jjD_}aS zAb=XnTq*p?YJe^}#kzs~bp#s#mO8_G0tH(5BhR^T62w!?t1y0_^$F?dhB4$+7`Hvf zylLaGP>E%5cvz(3pk*9FN@jSDWH=&%{0?tM84OyNs00bzYVKMsV+I7l>H{GMNZ5hs zWH(}17x2GCuepPzlglkC5znv@)^}er6V7=bnA^*0(d-IAtBb~UK^UG;k!1tlvhWPO zT|vO&HMbD-XS#(mOsjW^(H6ej?g_hk&x=B#>>$k7A#1l;5cbhKs>}lfQk^RV3(@09M2>%itT7-l2QaK z_z+m;KiI%0SnOD_W`V>8?u--H&_dD;Sn{2@cOITO=iHlwrIIee^YHz*?k4m;b0_^M z;M{_jj=>-a$pfh^i&pV7iX=;Zw!I|BBe`}CMwEf-Yi&)$GK0LYw{=;<8p^(w+XbwP zwuqKc?v2zgAr)!RmQfz*&@Q6_Dnh%0N~jF&2_!$2s>@&C+tTBz9I3arx9&I;L$|o) zJ3T!0U#zo`xc#w5y)ZBtaRS;6iBHY$C~z6XoPp_}XCun&4)J<@7GO$UzU%Cfp_7I8 zF1%mQfnm~H8OiVDR}xd+W?sU*dh$Dk-wIBYdFFnA&mV?_8R8H}2LE<4!=c9g z)T*9wBi_kmIG`b^?Ck7VW~*j^*{vA|eeN813y$nolTURW7fuXcYnoOPB$$o{41)I$ zBUbVJUd@P?9c&Zj;x&_Oq%r)Q?Ga}fsc$=X!(faFyM*w#&M;-P9wR6m`w*4pa6c;WdA1Acup4J! zXR3k8%8H!*v6?5#;QWzr!kOo!R{k-m4NmGtOzP#D(J-xDa1t(N@=CXy@n%X3R~BEk z&X;Y*U%NV8mXgDK<}K4Ypq(rMXe7r?fg8?dY|79})6u*DfixXDS%Ju0I$nHCkm!=o zH@3GP8PiM?$)Aj-)9<*MCU!CdyjQvSI}UORBXckW<+aK1$z9K-JT6)_&M_2 zta8H(xhNkZ0R)TUl&Sh7Dv>Bc+I2rfqXE9jf8Hs-(fM*wDax9n@wcdzv_et;2|`OW Ae*gdg diff --git a/tests/vision/__pycache__/test_adjust_contrast.cpython-310-pytest-8.3.3.pyc b/tests/vision/__pycache__/test_adjust_contrast.cpython-310-pytest-8.3.3.pyc deleted file mode 100644 index ab661b644b708fbf5c94385a515b91524902175f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2480 zcmc&$OK;Oa5cWE0;v`P`pn#A#SV*Xe6r4v}C=x}b6-AX$Rh65o%5t-t)KzRpyPK9) zaDpGw9{WQO{01MmaN^tp5*L`=B(7Vap*&aG*_~O>dUocU*$wjftOD)Yn@`R6F#8~w zcvFE{fX;S-UDND37Ue?MYg=jeW3RyXl zm7|VFaO$;U<#8HoFcxqIXJH)0IjqAthV!@p<2Xjol+x%&__wlJLSg#R+RCEC2u$@A z&uNi9e^BEAZEWwl%nf{#Q^#k`fO^bqcKrqinbS5MywhdeZUjE8Gu{w;4jO_<68aMK zlbb-ea)>ZGLN67r9wuJEsaohr6=wo_606QSXk?9kbG=SLxgkyXuJ9te42K!ZF|C#{ z-`mOL#Ag95ZftB+&DpX6WV39X&Vh;429}%||Fqs_Df-{QiFVUzz_G#Px>=1@DPi3< z2jTL0rx#tXRW{CPpKcqbdaB3CP~+q|-=j_^OkskF&%;EpwG$?2*FQ#<-r1MlhG=4P ztWX_7_W5yTJMc(_28~8gX}Ns6yH#0pmI8m5P<{uK-3lj+vq~=-lCjT$!tCsaIq{X< z0F>Fa>##4=fFzNMl3ylrv;fQxp+g0R1U!oW7#=kNkC`4k?v;(2S>608c+_LCn!AMF zw9vcT```V(UJM??008!#2Az(8(`ZVNoFF2WJ(7o6JRJ4gAc%(}OGiO6G#m%-B4{>g z;2CQxPmO-g$|4f2#uNFnIV*QEeX>`aT7R-QRW^#?+_aFCxVhyx}njC}+{##GI@-`K350#KbHT2VSc_TP!DJNf?7O4@30qiumY?AXkBuCPHni>$bV;vru2??AsA{!-hKp zD!b`02KRw@Cs49ST~5MGBns05Sr(M#Mc`1GZC2XpxLa^3Q1K$NCi~^V6^{^mu#^yX zWCR%(MBEj!Ldh(XF_c!uN}Mcqvd(D_ya0FGla16N6{*~?&&*ZR%%Iss*xIQ?AZWmnSdP+Lh@wX}ua zPC6rg7|`;@#)hFcsv3~Zsx}yd5SyLeJn)~@v>kXc+!ihtPf!we+YFQ| z30Ie0uT|9sHiLblOwZsr9Vv{wVtdr?L>Wvl@mZ7#wsxWv?fS>aR6F~^y_OSTax765 zV)SjXwjFq+MgzwQYAu&-ceiTm_Hy9w63V7A*{v}Wvar^3zYwee0QAm&RNxCN2f$>_ z+=4Ab1xOmnDE)n+KuaK82ouWj!l7{QA46e=Lt(B5g@;vbMmILE1ciD6Li5+Ko8@*7 zdgndr+a;iINyH7^7=)URfWK&llL99^f<01%Q8E(s`=CfhB2Py_Gc*zhA7dCcY2az= ztIxH5YKk%u&nENrx~VC4(tWa5o_g_YX{xG~!LRj7Rda%NKo`n0h5O6byFTwbHe28Q|Q{ljE z;D{c~1mxz1!}?6Q<3e2W`gn!>F!cG~+^7$BsJW11G4QL}G!(p_;&mll&n-Y8lDN*% z$*Y9szrb}PhIPZZ8mz%hzu=mV!GniExUtF?-QeUVkjg})Y<1l>bNw(<7d!h_EZw%? zvj7!bx5E%hKL6}fvPWG;qFk(svZuN{7>j~$Q;}X~*6z4lQ2Z(X!7(TMwZP^76xQif zir*v0$v7wcn;^=ONGlOQamz$K6O~OAHRZ7v;BK2d;r|yQ5BA-XDyvA572ZoqQ7NeE E4?%G?g#Z8m diff --git a/tests/vision/__pycache__/test_center_crop.cpython-310-pytest-8.3.3.pyc b/tests/vision/__pycache__/test_center_crop.cpython-310-pytest-8.3.3.pyc deleted file mode 100644 index c23fb3af318a38c47c237b1ac0d3f888ad80b6d9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2630 zcmcguTW`}y6!ti2;v`NA1+-d;2McLMA_eEtmW#ASTqIB>K-Kc*D6(A7G&PDn(Rk9* z3Z8cV!Ttew>>t@5;n#hEXC7AC7dU5}I4*)-go-7f@j2re&z$f3&Y0KhRSmAc|M_o6 z)->&3bn=%D&I7pPJupE`nVh#j-g_OaV4xeRiZJ2 z7XKtpja5}^(j{8@OLOY9Obuuov_h-UF4Gz{p}j=wv;pm9N?vNM<&W@f?L~_u<%e6F zkGz<{qq!M+BR2C7Z%Hov-Mt_Vcw|fAMez<7VQlYABVR(y8{0mMBonSL_{7Kg<`;p7 zBHWMR{&XFT)ZP(F-jlbQ)ZZ1}z>Y@by^i~VwG>`>BN$P&W>*kSxR93m{5OLqt;zCY z>q&0Z$l6F}vL%aRE}DbEplh$Ut+^Stt*-(Q$tIYu=~(+SW*D}zHSnpNOr;wKhYU6| z=(VlhFzXa7o{lBNpXAYK2H~h}Z<3TKqkv^n;M=c~f|y1} z#4#ra3If;XlpSfp1RVc=*xBVF>j>`qyfX^q?sU7e21CYq9-YP5`zhMeYpy zuX(F{r2aicax_@!?EV=()kD4XP;5Oc?LGT8Y9#yAamWV<`41G(v+HN=K46*Gk14L6 z1>M6B+0uWeejtad3;nET@>%a*tbD{JNXltSS$~c%T|sjVOlu`Ewx_{Z22q@t4<-k0 z=Kboz!39T*?Zq)1O8EC8*uDrPODb6?DbHh77_3etPbd{TWp5H}!|5dOKh3#1t4jg? z8oBeX0Qx}CRiO|?PvM%_=f}$#Zsk@>kJ>_m0J%vfd*SeZ)D^6DZYPYH? zuI7}!Q@d+!Rn|3U#VLQUbn8yV(NM2Dt4P~UOt&N}M5j{2q2*!T&$l@|>) zt~@!|e`<#=UTgclJ#x>#Uw#k;VmLnbLNAy$BVkX&qd@p!^JqRDMrgAq&7lZpLjt;Z zRJ@cR&mh0Phays5sgC+u{Z@$zuZrKmm67_oK#stO;tM+h8CCuK@%&jJB35meqX5>!8Ky3BWPSV%wF+=BuZW_APH?DN)y|$ru41LGYcXM5PhR(Jj zn9X>umjRAh5FoHlKihvr4J?2LwhI9$-_8#1ak3C5a)9!@ksLZW%;Z4fc|$osO;~CI z#pE}cVnT-r!%cviot!BqbeOQz1gMp_)PfERmRiJS-ck#o7A&=ZV(s?(DGmKb`kKB( z!OYMn?onjuW7ivvQJ;8DG{$lg=kOK3bhL28`6NQ$bGaM0Jbz?h*_T6HYKzOs#IsVY zMef(piLhsJ*>N3r8pXxn@T<5e=F>B^TbrHA{Wur~j(etv8aDMG4_o8FcUvMD4ujUn zi^lWA)`9&jm>#<#`p|KYTag<^VJq3M@?$`;(wv>fYt%IuBH{+@ZGeRuijrDTOMh;@ zE7tLGZA1wQEkw=BA5!;^MBwWO`=9COu_Gfm?OhD0e;hj!syTH}wjO-(>C*>>zJ={! zH4J?iOak#}i?^|=*k9X?hOS=*-qHo3VadW>E`eBqH)kbLK_!TUxPrwSfO|ptQ>;-z zbSDKesj^5Ox6rUiURB&h%gyBdlG5`=$h&UurQ4SREj1y_R-SYken~p9D0y$7?K)K% zj>QI%cmtj0lMJ1!V)Hh1+Mg8v#|%z%Kq$>wkaj*Zb-FhF7rEUXJ_^JHhS3} z6}wdIP|?YcW3*8i;W5HvgvXZPF~TD_LL;yxSOi5#(lFG3$=a3iMh+sslSx+IKEmuk zNF}M>%MX#J!R8C=r5{V65?2WAcnvmt%U6G^5+ zQNBf}TAyb^^}cxPR-uZk<>h7TjU+$#iaeK3Vj#SIC5g$1XkR@!u1jY?q$K!X(tMkY z*!D9tuZeqTX>7*Y;oO@<-ZYGBk7uWY^y{4gE(OT6y?1_MdvqxuTy!DZDq@w2J5b97G|=;~SUiv>;B{L)}ePe;mwGDZqs#jg&ks;}s+`?Wz$ z*Lc+_{h$vvoHeKXqc*5J6{m{lrnBzU@Z54XoI0Mj9DQHg-uxAFY0tLxr1Hh{r(fDJ z$7}tmZ%_Dn`{lH3VTt5nhWNPBv&VRFp*)e-kn}24o z7X~{v*a_=1)1ceJ&Dclmn<(Pqp~uIPjY1J{!R)|cvHOy*YnuS_NF`&a)ZdTM;xM0Y%k^uCixR|s?{|l58_ZfILN-DEm&@; z9fc0jwJUarI*M$8s5(q>w@LMF{%c8HNibEkE33(Pq$>Hh@)X4yu(rG@H0)+f!#ko zZ61d{Z;Eg<3Y!yG9#4;&&+W%yFylf#boi_(c`V~*3QN@(D!??N(`19jh9g9Xp?R0W zK^3f|7xdDfcRmnXX#8X35EMcvI(&uLn62f;q66F=cN_PAj}GSLqr=w%L1STcR}T*4 z{0gvXjL1g|;aWiwPr^*TzY~zi=M_Bh+|5N^4Zpv3D*7;-79T@m!|{R6qyZJGn&_f;Qqc0`okg(qFS8n^Umu zou?0~j;$4O(r{p6PO}4o&zn0}Rd2#3vj@UF`8B6SMLESV3T2(E>L(OhSh|IItX`h| z7G_rI5A(FG75!NYesO2ylXcfb$dSgheAN`&|yjZ11Y(j&xmT)uMx zM_eXUW`G5?WM>XplgW5JdE;KAld^?cT-6Us~@a z1hr4_hrnb09^U-gr#@GeP+!`a^~L9qTmllp($0EkXLe`iH#2rruh%qqde=UTM)3A0 zE{cy1iw7{05D3#4@im92Sr1COMzo;pERbxc;#6Qq_p5>77>ZZ&7lWEpBN{hY`89Et zcumbFTVRzpnp0<0X28767FiAE6}H4om@hH%t7g^zf;+XR7D=nW>}+qj35Qp6J8;K* zx_-4LBQf0D_YyA(n^L%8GKxfyG)I$gD1qnt&ApND#X~&l_#yC8hVc-_-`9ai?JZ&C z9r;6(`rFd);KP`_)6oO4qV&|g0U9+reY_x!grw@?GnwaEL*0Zvsuk46^yloTrsQD~ ziAFEq#QS=^j!nCL`vjBr)@{1c2kr@WueWK_JEvrSu<4Iy$lkSSr+rGe1J`xB{eFIM z-izDP@A5*`u9j1N`t z2E&N)BTblqz`sAW_M(8dL^K>mt+6NfCcCX2_hA(7b0KdtzTc8Okx45Hu38Kfvd#D~ zUBbP=5E5k2`3^xffRu?&%Ku%yB9>t>7k`0?02sgeCwJsa0(>fD%JVr`=N25NngZXZ z>mS2}iLqhRv*5-pMBJu#XCUHvvsK@kfhHEfeNjQOgaon3NFCmaf$iP~MlrC<;u3H! z46GX$i)SRFfbMKRq0=C%V$S_8M*g#_NFCJ-`9Wjt`QxoMn>M;oIt5=LTbuDj#@a2oU?*CMFpq~r2w*-j-?bH3(mtiDj?dyJMeg=m8op`6Fx@+ z%P+U8c+GFz_oIVPtwjavQCUQ(2nDwLi?Ck5k3y?Rt^&CjgZN*?5LRD@eWpVy&je?!@z@7bdCsAU+)AtM3QU{-Iqr z-6Vn5j^Ay;4}>Rqx|p-l>M^ed%Bs7&LS?g4bz^TAI=#R@6X)u*O?dcIFgV^Sp&g4z wt{}k=fNEAMo0WluMyFby>UPGxAYv1r-^U+>1l_*5Z0aU4bOXn?#;UpcKhnP@dH?_b diff --git a/tests/vision/__pycache__/test_normalize.cpython-310-pytest-8.3.3.pyc b/tests/vision/__pycache__/test_normalize.cpython-310-pytest-8.3.3.pyc deleted file mode 100644 index ccf74d64bf7e4ddbe88a350c35b412d690f16f60..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3447 zcmc&%&u`O66!ti2;-*epU!)?|0Q4dfH-qm?PYmyl8oK_V7E&Hmi)$^_uhPO-h7^Uj;qy*2G@s;zYhOc z)wDnHC4cGgau4n#0zoxOdYVgQtA_<$BU)H=m&j04a!a7md*#q@4cS-dEr%7iLNsR3 z;%nlru!?L=xI&%ddv*jJelK{*x;WbSBJfaz`vKga zZh{cnJ3`5O@=6o>yTb1OERWhe6u!Xr@2Fl@o1B<7p@$`@nZSFWAsS(oB(C@668iKtL&$-`EOO#O-iL?+O z{FWB@AUY$i**}%(@wzc(XBszQssH|^aU6%N!Q*Z>ZX5;TcyQ2o>OY916UN0)l$|sL zOGMHbuA+SL6pifuX}W@XuM3RyS~p|NSjMGj;upMU58Ig&1V6G z+HBfQWYkb(sT)zMg^IDb7^S3_gZ^7~bBb4ORBWTznW4Rnid__UKs!ynmJGNx3=A-) zhHn8T1TIlp05fd>uwWp8T7WbQ1hDItBcU9Cni|8AvvVYr15ncja)OS8asaB+QA_#t z2sX+u-U%Q zvZx7+&%*4uz~P36*yNJQqiC#Lm>HnXj84uoC+0+#v%nCicsH8F|23Nz-4=r7#GL4I z7SiF8%`%?gG$#f>UsK3D0M{hNa$>>>&9KVISzzEhmuv>Z&0O@`2$mCbU~?AI;gZef zMYoG!IWY$|XCWOf*=%CbRUW_P5!Uu5c2k>Qfu7ox)HoOfJrP7nYToOgdg^V%gPIQN zFWXNNsMoOa=j=HT1WT7yS6UwTRlr%6Yd+^?7}+iR{onvfH;(@{?hm6%bS ziZ4rRs6{;}C}mYAPgl9y%OG1_HxqZxRH7=B{i8dg0u0KEh zd-%(`qWq2z=_>;dPv8zuprHy?U8SqaKI3IGimG_I?ut4c$#?TGl5q>3*43me>#lmm zZc$a3Msq)@-8EK}eTlBn{Lf0aObb+lex0t;BJ>+{jh3L_q-DAe{T5ZfQ>q((fNy2L zsz!yU2Yb(~kip;5o@X7g`1$)UMZoRj6DM>6zb3flhr@t-VQo0}Z2>aNt&J>dqn(RO zg+DpCpTqsvXV3`cjY`$G>W_-ZyvhCmD>+i%W^f%4Lw4W#6=oDkd^z4I;DSiE3x!w3 zLadOyC??Y&GV1t8q(jU^HgsGU1PwwGQLHxb0?J@$#}52ZSiabqS#mtIB7>?4=7#~V z^pg1)d9P>a#O&+u2uZK0lU5(3?}&T7woV?-=|0lQ;|03AIx!k^$_BVI%zi&H+}8;e zlM(AogB-enC8{&OoP}doz#3j)zACUDoy6j`>+z#d>B6K1lkhqHBXVh*7fZ%QpDlMHX#hXrc7eLNv0Jx?Ay9{Up zmSccLqdn)xcnD$uv9Son2Ef<^v!*dO&on^{X3f_85Cm?5S@Y5VigdmLsNs2R*04bU zPn$CQOXJZrK#<1c1HJ*0>+$$9&4EK6c;sO36^Vf<899Ma8m%8hhCSHu=8QRFNLI`? z>^7vRQT-5Zlrh$Pc3Ro__T{r3om4QbRh`&@8}LpAb1xlK(R{L)Ti_*`t@9i&V3v>? zq5{y}00@cWk$hbWoBN=YPT@M=2Fdj)Y^EvYmQ!dX8yJ&!a(HnzbeTPWcBS(33bO+* zT(deObJ(HUXe>s~8FMR`tOlg6-B_jo*KWZYIfqb*ar~a?-i^;A2R|VO7eiSX z;;YzfVS@#lJQXFcN(jp+Q^%@2VCC7WeGOIl*k#|~S4D`6`C`44DXCgU!~22uxwcih F{~uY8Jl+5R diff --git a/tests/vision/__pycache__/test_perspective.cpython-310-pytest-8.3.3.pyc b/tests/vision/__pycache__/test_perspective.cpython-310-pytest-8.3.3.pyc deleted file mode 100644 index b14f3e96e2761649bbd501de76757f5a4ddab3c9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 4299 zcmeHK&2J+$6rV}bBu&%qwtOr|97b^1M4BdP+Eg3}+IF?7B9&6LXps^{(KO?<1CyDN z$LR-x6Z|2&$KK(enJXvGTo4!d*^@XjhT3ou%e49K3Sy5*~N71SvNrcRzEbtwVg+|lpbZVwi>zYPqYdUW6Io7wj zUBmdu_?$YY4y!;tWqAwpgopB?oU#g}IeBR3=l zu}UyvfpbaRQi)pvBw}PS5r{{`R=Fz(FxeJI0Ic9ui#hF4LcBRBb%2ghVyqZt*BueX z0*>oKNiG6@k`g81$Yt12&b0&;OhY+S(~!qt8n+y&vfJ8k9lY2%GK~S4lqKyb8&ki| zM0rLo*#)(NC~Fg&cr40+NzXs|DoWFlcdfUJ!IkjoULU@Bti7G8 z_0(Z!qm$~P_1yQ)am?#BIj=GjvalL^y_obAMJmBnw8SBLeZ+8Y;|XYnd2|^)rDy(F zdyB4MGB-FWk5V3hKfMXSIf4Bq0C|uP7Q+D&dxNC}B!_&<)0_g%gm}Ysm z#5*2Takq?rYHZZkv7pAr8hwP7`(xu6EM&z0uP!I}uDGNyZjLXJ&2CrfB2(VXjW$85 zS8y4qXc8~JOhhiLFye(iEp_B3zr<%}Vx<$O`Zzat%^7fU@AI2e!psAZFn0jNFkL|0 zV$zAFxR;Fph@lc0O$gipA|(}C&5>rK{;_GybcyV?abr$fBFH8JblMU@Hnzb_HpLCN z`EOt(9UwHCC}`b_cF+vGP=h ze?EWnn(4*y7jfCtJn%hoBXgHCG{-YGFIl2AiY3b5O!uYo&0PSgsk};8v2uSZfA7Cq z=7{(~mt4im<5VivnVb!G6Nw%pyhOZ0>WUX6Cwc{GqXIvbXp&u==#^K)K7TXt+eE$@ z;C`aq-=o&mL!JqZ+SCI;UU?R9*+I5m1B8^|-*!nwnuk{sTkMAWtx~-2weo6|KN&eL zbG$GrJ`1jTa`t`?wLhx#iWP?Z`GyxYB^MM=t7uU!MY+jZ0m!1Lx2On@cFqc%6BPIq z{;Rd$9zTJ_JF+*~N@I&Y=G`Lic-=1w91$8qK&3bmWs<0n@}CTSd*qT&`J+ALdVIK2 ROcnKfD$m=y`FD!-KLJWy9w7h# diff --git a/tests/vision/__pycache__/test_resize.cpython-310-pytest-8.3.3.pyc b/tests/vision/__pycache__/test_resize.cpython-310-pytest-8.3.3.pyc deleted file mode 100644 index b713a021b4c2444dfa725c333ed41a19eb713bcd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3565 zcmds)&u`;I6vyo(ZPGODZkI}c0Lj_|n+TJ{zmi4TqHU`MMJiR*g-fC;a^q=herKD% zpKRWfq+h5q{mY>86jv0Wa3n|eq_!;X89$qmWXaF9m*k{pxxI{@jFZEPpSx*POglzK6?vH7 zK4|UO5yRh7%ePP2sQ&VUIOM&vb2oCspc->Kh)zQ8N7X|Xxi49d{Kr2Te{#5XasBo_ zidcFjJMwG!TPe=G%6@~$PvzGciW_5R_wAqHQB+1h*YAfsR>W{=;|UM_q-iDsW#rHb zV#fQSXUB-}i_l?j;#@r$1U+)Fy)lQ22fFTj(Apb!i%BV_-6G7YZwCY0>qd+@Wvw#l zrKwT2l(L~JMkV!e{ZljwP&Hr|thKvZ*R&MOAvC7xJyLZds}_E-Dp8nf9;t z2XWth!Mt+0t}1nSCHA7S*jB^=TbK!QU2sz{9(~}&h%K3eG6Gcs|Jbdiy0V2SS)5>6tcJ>4q6msv6z8T$l``L zFp97sMmnu`Usk(c?X-3$Gt8GDO}?WCjUdQG%+=!IkJ21r$`BlViSIIcPk zRcWfqV^#Ttv^Cd6BXN@az|jEG=94s_G_W+lG|=?c~+%>d1uPcuO?K{IL2Kr`plOwdfwOwdfwtonk7 zu)wpxv%s^!v*z6CP~@Sm!7=H>xtbq9ib#B92}#} zj*>K5xXAA`{qf&P?S$724j@_>XOiDd9MT}~DlD8t|MWC6JrWx}ekA@Dips{Ya6E9m z*bSm#>1qG6o7Ud#A{RvJR<)xDc^-XpaCX7nmKBdo`h}OZw S^-`uJ7cvF9HVf~SKKu)$t80+} diff --git a/tests/vision/__pycache__/test_to_grayscale.cpython-310-pytest-8.3.3.pyc b/tests/vision/__pycache__/test_to_grayscale.cpython-310-pytest-8.3.3.pyc deleted file mode 100644 index 7ffb0bb5304f2fb16db5d354f22d537d2a41b444..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2121 zcmd5-O>fgM7>?7fP1CgdU=l*&5FwNpJMO@b;KD!P5{{fWb3sT5UMKy6X<7vg;l!`~+OcCl&+ErtE+-3+zDZx&Z!&`L zkvr2#1ZNAf-v@&QjBG(eQ7JkJQ9y!|)aFp!lhRVqBid<4(xj*@VP_m!laW9qoP33} zERmy9!E-qETF`PhjU_1aID=&<=W!M*P%hvc&O^C~(V)NHeR|k z)$r9ld1_ck-HfLyyX2^_^7!7)N=Yr$OKQDXQcchH=w_kTXq41dc#Fl!P)hv3W-xm1 zr|lLjr&Chhz|p;c^#Z1wUBh*W?Qd3#!Z=w9nt zn4p1shBT!&h_UBELV(>ypAaz{CdoS(PjJ^fRD)=KKygddJu*L9BhTAxXE(E;f+lWYSSRqZu72rZWh2?4$ix)1?Qp&j7~-z&i4*)&Q{TYHW(oeo3^>j!?Y{xcF0HB+4J@ z@!L@!btRxHOn`nUB+mXHWg}_{*9k z{f5ZmqX6;QI(Wm z$xe-GV${hBsl1k)I;j#B#s*m>8jNeCMsyh0Nu4xcyhG%lq}JN+u($NAC8yQL2m8BT zLg82654_OjIYNXUzvce!u&mZ8Uprqt|L1qi7Pbou;+5)iyTaZM80!GMY>$ z99HjLp0()*<5nR&tz>g9%BjlfbAHCWIIR#$XvotlOm(B9pVJbXh8MD<$1|b4JBkRs zkeCj}`RlvxNfgj7i$^r(B_J&D3o%J|oWo_0A+c+$;0ArQAPd?%izHRQV^ z@S$toN1Lkv%CaJt|K8A81CSft!_ZIYf;Yv1K8g!IXh-4P%p*_HA9mZY3xAl_030!o z3$_A!vI+vosLXJ}daMrUq6MrkfRHR$KxOMdS!x0AIizK=Umj`UG>B&yO<{bU^)G_Z z4P%I;Fz$Gad9&tWAren{cvuABux%VdAZB=uWH=&%e1$is42SI-M8XVQYCg2u#vBNO zH2^{mkgyFt$Uej{8{oY`uDOi~lh0ceBK~0^j8`|APtJMosoT#o(CmqcRu7fy!DRSD zL6!}C%fd7C9|{7dthtF`FqbWiVOsqgl(ukhyD#+WLjkbNZImxon>L(2+ts!& zdPqRiSQdiK!Ejg@8(4(TV!8vqo%$gk+@gX&FkB1oi!!TXY-8-EC8!z<+sLwb?{#6) zV!W@jyFghQ@8`c3>VK4shsyU}(S3|r4%WQP@+PAihXuy@gt zYY6V$#yB3d+W)l}V3cB7io`8UBI*Rjpvd8#bv=PZEZeXavY}Y?-vlSSgT`A@d$)do zNsF=AV4nb`wUMevQ-8w!FiG`C@yso-PhIGU(7ifdl0dh^n+Bt2%;z*+El6o~PSaqq zXm(IRF;|?b7yC!h_!!=)oO}6Y#m8%mJ7<-E#WoPEBEWtqT7t+E5f4Jktes#-UT7@p fH$gg5w7`Br%anq!q3kU>~1uHd?Lels z0l0JLw0rE2;Kq+YzH;Ks1#y9A#;uzVyUnVsu;LkeX8gX-^L|X;af`*gfa|yHUmdzG z2*0zJ$t#7IN4UN3&=7%$eW5AxJ~c?E1W_1dnoD9hlWk@(lj`RNQd8n%>HhK{-^`1G zBazGxVspjG^S(fqNcKmeStL0kp~y-}04x3dN$tcPI5q0F+itDf^H0XdwFCR9J3Mu$ z|1oh+Yrf<8UM+M7e`#^+S4Zc;3Y%-S;lx(`9cFk5O-4+KnSVC(bQLd&KBCN7%nv_D zWZsA$=IqD48@gnlIYg_-TI7(rVwG>KOP7F=W?8es8fLJtDdK68z48zP-UNGzCF0%e z?HqD5GQec0#SY$<6S48f{ay8)q36=lVI-3U{`4CksUtQkvU*gRTF9qhB}A$bH)U32 zt0Fg!raqck7^e(jUx&uJGP5>>qGAS0>w4_0ff?wnTd%FP0glkD)Z^qTEIXlD(PC#| z0R@^Bb4F{#{HP)m|9s@^hIa4tUE41&n#(-eE;BU8qKt)7kVf)Swzc0CergSiCd2PN zx`mNDlh6Jf5gwuczr!yA!Xn`MI=l)>WNC~~*s4Pom=&3YVbTgXy?$G`aWVBMF2Hj~gy*)*FOUGpB8AK{2KAk|?Z z9hmgEEr+ktfmCOE9Wd#M>~$d3!Cr?%QyNMzW6rUn=`AhQvd$5TeW;dcMcn{F(yS8Mw-d$md*EV5T${1V}QBZU%* z#kqp@F)NT@Mm;K!N-#fuylxGPr3lqrr8QNlE?!IN-!m(&vF5Es3}bJh_K#5fmw#e& zj9#ytiZHQkzrOhY*~}&u%^`B*Hi{`hcLeNg^}0w%8b86#ljXx2eH$a?tw1^+_xgTs z=mmvGqw`ku_HGMj3Do{o+w*WvUns<70kQuqiKZnmK#aJI1kr zvZHD9!Bt($k3OyIsWhhOCTo^i!%h>tsOF^&&lw&c9O^`A&Wm(jpG%((Tr%!EpRu!{ Y#~6LMT1XW{DJ8MHD6L2zNJgRfFV#I7;{X5v From 233546eb4c5448026d175565ec0a8024536889f4 Mon Sep 17 00:00:00 2001 From: guozixu2001 <11324293+guozixu2001@user.noreply.gitee.com> Date: Mon, 30 Sep 2024 17:09:49 +0800 Subject: [PATCH 3/4] fix: Properly initialize tensor to resolve test case failure --- tests/vision/test_adjust_hue.py | 46 +++++---------------------------- 1 file changed, 7 insertions(+), 39 deletions(-) diff --git a/tests/vision/test_adjust_hue.py b/tests/vision/test_adjust_hue.py index a67ecc6e6..d78b2b1f2 100644 --- a/tests/vision/test_adjust_hue.py +++ b/tests/vision/test_adjust_hue.py @@ -24,64 +24,32 @@ def test_case_1(): pytorch_code = textwrap.dedent( """ - import torch + from PIL import Image import torchvision.transforms.functional as F - img = torch.tensor([[[0.5, 0.5], [0.5, 0.5]], - [[0.5, 0.5], [0.5, 0.5]], - [[0.5, 0.5], [0.5, 0.5]]]) - hue_factor = 0.5 - result = F.adjust_hue(img, hue_factor) + result = F.adjust_hue(Image.new('RGB', (2, 2), color=(100, 100, 100)), 0.25) """ ) - obj.run(pytorch_code, ["result"]) + img_obj.run(pytorch_code, ["result"]) def test_case_2(): - pytorch_code = textwrap.dedent( - """ - import torch - import torchvision.transforms.functional as F - img = torch.tensor([[[0.1, 0.4], [0.7, 1.0]], - [[0.2, 0.5], [0.8, 1.0]], - [[0.3, 0.6], [0.9, 1.0]]]) - result = F.adjust_hue(img, 0.0) - """ - ) - obj.run(pytorch_code, ["result"]) - - -def test_case_3(): pytorch_code = textwrap.dedent( """ from PIL import Image import torchvision.transforms.functional as F - img = Image.new('RGB', (2, 2), color=(100, 100, 100)) - result = F.adjust_hue(img, 0.25) + img = Image.new('RGB', (2, 2), color=(50, 100, 150)) + result = F.adjust_hue(img, -0.25) """ ) img_obj.run(pytorch_code, ["result"]) -def test_case_4(): - pytorch_code = textwrap.dedent( - """ - import torch - import torchvision.transforms.functional as F - result = F.adjust_hue(torch.tensor([[[0.0, 0.2], [0.3, 0.4]], - [[0.5, 0.6], [0.7, 0.8]], - [[0.9, 1.0], [1.0, 1.0]]]), -0.5) - """ - ) - obj.run(pytorch_code, ["result"]) - - -def test_case_5(): +def test_case_3(): pytorch_code = textwrap.dedent( """ from PIL import Image import torchvision.transforms.functional as F - img = Image.new('RGB', (2, 2), color=(50, 100, 150)) - result = F.adjust_hue(img, -0.25) + result = F.adjust_hue(img=Image.new('RGB', (2, 2), color=(100, 100, 100)), hue_factor=0.25) """ ) img_obj.run(pytorch_code, ["result"]) From de2201761d29374ddda299869e7799644bb97d67 Mon Sep 17 00:00:00 2001 From: guozixu2001 <11324293+guozixu2001@user.noreply.gitee.com> Date: Tue, 8 Oct 2024 22:24:49 +0800 Subject: [PATCH 4/4] feat: Add matcher related to torchvision dataset --- paconvert/api_mapping.json | 108 +++++++++++++++++++++++++++++++ paconvert/api_matcher.py | 126 +++++++++++++++++++++++++++++++++++++ 2 files changed, 234 insertions(+) diff --git a/paconvert/api_mapping.json b/paconvert/api_mapping.json index 0340ed6f9..0ee6967db 100644 --- a/paconvert/api_mapping.json +++ b/paconvert/api_mapping.json @@ -16354,6 +16354,114 @@ "dtype": "dtype" } }, + "torchvision.datasets.CIFAR10": { + "Matcher": "Cifar10Matcher", + "paddle_api": "paddle.vision.datasets.Cifar10", + "args_list": [ + "root", + "train", + "transform", + "target_transform", + "download" + ], + "kwargs_change": { + "root": "data_file", + "train": "mode" + }, + "unsupport_args": [ + "target_transform" + ] + }, + "torchvision.datasets.CIFAR100": { + "Matcher": "Cifar100Matcher", + "paddle_api": "paddle.vision.datasets.Cifar100", + "args_list": [ + "root", + "train", + "transform", + "target_transform", + "download" + ], + "unsupport_args": [ + "target_transform" + ] + }, + "torchvision.datasets.DatasetFolder": { + "Matcher": "DatasetFolderMatcher", + "paddle_api": "paddle.vision.datasets.DatasetFolder", + "args_list": [ + "root", + "loader", + "extensions", + "transform", + "target_transform", + "is_valid_file", + "allow_empty " + ], + "unsupport_args": [ + "target_transform", + "allow_empty" + ] + }, + "torchvision.datasets.FashionMNIST": { + "Matcher": "FashionMNISTMatcher", + "paddle_api": "paddle.vision.datasets.FashionMNIST", + "args_list": [ + "root", + "train", + "download", + "transform", + "target_transform" + ], + "unsupport_args": [ + "target_transform" + ] + }, + "torchvision.datasets.ImageFolder": { + "Matcher": "ImageFolderMatcher", + "paddle_api": "paddle.vision.datasets.ImageFolder", + "args_list": [ + "root", + "transform", + "target_transform", + "loader", + "is_valid_file", + "allow_empty " + ], + "unsupport_args": [ + "target_transform", + "allow_empty" + ] + }, + "torchvision.datasets.MNIST": { + "Matcher": "MNISTMatcher", + "paddle_api": "paddle.vision.datasets.MNIST", + "args_list": [ + "root", + "train", + "download", + "transform", + "target_transform" + ], + "unsupport_args": [ + "target_transform" + ] + }, + "torchvision.io.ImageReadMode.GRAY": { + "Matcher": "GenericMatcher", + "paddle_api": "'gray'", + "min_input_args": 0 + }, + "torchvision.io.ImageReadMode.RGB": { + "Matcher": "GenericMatcher", + "paddle_api": "'rgb'", + "min_input_args": 0 + }, + "torchvision.io.ImageReadMode.UNCHANGED": { + "Matcher": "GenericMatcher", + "paddle_api": "'unchanged'", + "min_input_args": 0 + }, "torchvision.transforms.CenterCrop": { "Matcher": "GenericMatcher", "paddle_api": "paddle.vision.transforms.CenterCrop", diff --git a/paconvert/api_matcher.py b/paconvert/api_matcher.py index fa0dfb2be..63123f162 100644 --- a/paconvert/api_matcher.py +++ b/paconvert/api_matcher.py @@ -4792,3 +4792,129 @@ def generate_code(self, kwargs): self.kwargs_to_str(kwargs_bin_edges), ) return code + + +class DatasetFolderMatcher(BaseMatcher): + def generate_code(self, kwargs): + if "root" in kwargs: + kwargs["root"] = "str(Path({}))".format(kwargs["root"]) + API_TEMPLATE = textwrap.dedent( + """ + from pathlib import Path + {}({}) + """ + ) + return API_TEMPLATE.format(self.get_paddle_api(), self.kwargs_to_str(kwargs)) + + +class ImageFolderMatcher(BaseMatcher): + def generate_code(self, kwargs): + if "root" in kwargs: + kwargs["root"] = "str(Path({}))".format(kwargs["root"]) + API_TEMPLATE = textwrap.dedent( + """ + from pathlib import Path + {}({}) + """ + ) + return API_TEMPLATE.format(self.get_paddle_api(), self.kwargs_to_str(kwargs)) + + +class Cifar10Matcher(BaseMatcher): + def generate_code(self, kwargs): + if "root" in kwargs: + root = kwargs.pop("root") + data_file = "cifar-10-python.tar.gz" + kwargs["data_file"] = "str(Path({}) / '{}')".format(root, data_file) + if "train" in kwargs: + kwargs["mode"] = "'train' if {} else 'test'".format(kwargs.pop("train")) + + API_TEMPLATE = textwrap.dedent( + """ + from pathlib import Path + {}({}) + """ + ) + return API_TEMPLATE.format(self.get_paddle_api(), self.kwargs_to_str(kwargs)) + + +class Cifar100Matcher(BaseMatcher): + def generate_code(self, kwargs): + if "root" in kwargs: + root = kwargs.pop("root") + data_file = "cifar-100-python.tar.gz" + kwargs["data_file"] = "str(Path({}) / '{}')".format(root, data_file) + if "train" in kwargs: + kwargs["mode"] = "'train' if {} else 'test'".format(kwargs.pop("train")) + + API_TEMPLATE = textwrap.dedent( + """ + from pathlib import Path + {}({}) + """ + ) + return API_TEMPLATE.format(self.get_paddle_api(), self.kwargs_to_str(kwargs)) + + +class MNISTMatcher(BaseMatcher): + def generate_code(self, kwargs): + train = True + if "train" in kwargs: + train = kwargs.pop("train") + kwargs["mode"] = "'train' if {} else 'test'".format(train) + if "root" in kwargs: + root = kwargs.pop("root") + file_paths = { + "train_image": "MNIST/raw/train-images-idx3-ubyte.gz", + "train_label": "MNIST/raw/train-labels-idx1-ubyte.gz", + "test_image": "MNIST/raw/t10k-images-idx3-ubyte.gz", + "test_label": "MNIST/raw/t10k-labels-idx1-ubyte.gz", + } + kwargs["image_path"] = ( + f"str(Path({root}) / '{file_paths['train_image']}') if {train} else " + f"str(Path({root}) / '{file_paths['test_image']}')" + ) + kwargs["label_path"] = ( + f"str(Path({root}) / '{file_paths['train_label']}') if {train} else " + f"str(Path({root}) / '{file_paths['test_label']}')" + ) + + API_TEMPLATE = textwrap.dedent( + """ + from pathlib import Path + {}({}) + """ + ) + return API_TEMPLATE.format(self.get_paddle_api(), self.kwargs_to_str(kwargs)) + + +class FashionMNISTMatcher(BaseMatcher): + def generate_code(self, kwargs): + train = True + if "train" in kwargs: + train = kwargs.pop("train") + kwargs["mode"] = "'train' if {} else 'test'".format(train) + if "root" in kwargs: + root = kwargs.pop("root") + file_paths = { + "train_image": "FashionMNIST/raw/train-images-idx3-ubyte.gz", + "train_label": "FashionMNIST/raw/train-labels-idx1-ubyte.gz", + "test_image": "FashionMNIST/raw/t10k-images-idx3-ubyte.gz", + "test_label": "FashionMNIST/raw/t10k-labels-idx1-ubyte.gz", + } + kwargs["image_path"] = ( + f"str(Path({root}) / '{file_paths['train_image']}') if {train} else " + f"str(Path({root}) / '{file_paths['test_image']}')" + ) + kwargs["label_path"] = ( + f"str(Path({root}) / '{file_paths['train_label']}') if {train} else " + f"str(Path({root}) / '{file_paths['test_label']}')" + ) + + API_TEMPLATE = textwrap.dedent( + """ + from pathlib import Path + {}({}) + """ + ) + return API_TEMPLATE.format(self.get_paddle_api(), self.kwargs_to_str(kwargs))