Skip to content

Commit

Permalink
R2.3/fix pad3d infer shape (#42414)
Browse files Browse the repository at this point in the history
* fix pad3d infer shape

* fix pad3d

* fix pad default value

* fix order

* add unit test

* fix unittest for ci coverage

* add ndhwc check
  • Loading branch information
littletomatodonkey authored Apr 30, 2022
1 parent 3b2bc0a commit 2dce1e8
Show file tree
Hide file tree
Showing 2 changed files with 71 additions and 13 deletions.
30 changes: 18 additions & 12 deletions paddle/phi/infermeta/unary.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1277,32 +1277,40 @@ void Pad3dInferMeta(const MetaTensor& x,
"5, but received %d. ",
x_dim.size()));

std::vector<int64_t> out_dims(x_dim.size());
std::vector<int64_t> out_dims(x_dim.size(), -1);
out_dims[0] = x_dim[0];
auto& paddings = paddings_int_array.GetData();
if (data_format == "NCDHW") {
out_dims[1] = x_dim[1];
} else {
out_dims[4] = x_dim[4];
}
if (paddings_int_array.FromTensor()) {
if (config.is_runtime) {
PADDLE_ENFORCE_EQ(
paddings_int_array.GetData().size(),
paddings.size(),
6,
errors::InvalidArgument("Shape of Input(Paddings) should be equal to "
"[6], but received [%d].",
paddings_int_array.GetData().size()));
paddings.size()));
if (data_format == "NCDHW") {
out_dims[2] = x_dim[2] + paddings[4] + paddings[5];
out_dims[3] = x_dim[3] + paddings[2] + paddings[3];
out_dims[4] = x_dim[4] + paddings[0] + paddings[1];
} else {
out_dims[1] = x_dim[1] + paddings[4] + paddings[5];
out_dims[2] = x_dim[2] + paddings[2] + paddings[3];
out_dims[3] = x_dim[3] + paddings[0] + paddings[1];
}
}
out_dims[1] = x_dim[1];
out_dims[2] = x_dim[2];
out_dims[3] = x_dim[3];
out_dims[4] = x_dim[4];
} else {
auto paddings = paddings_int_array.GetData();

PADDLE_ENFORCE_EQ(
paddings.size(),
6,
errors::InvalidArgument(
"Size of paddings should be equal to 6, but received %d.",
static_cast<int>(paddings.size())));
if (data_format == "NCDHW") {
out_dims[1] = x_dim[1]; // channel
out_dims[2] = ((!config.is_runtime) && (x_dim[2] < 0))
? x_dim[2]
: (x_dim[2] + paddings[4] + paddings[5]); // depth
Expand All @@ -1315,8 +1323,6 @@ void Pad3dInferMeta(const MetaTensor& x,
? x_dim[4]
: (x_dim[4] + paddings[0] + paddings[1]); // width
} else { // NDHWC
out_dims[4] = x_dim[4]; // channel

out_dims[1] = ((!config.is_runtime) && (x_dim[1] < 0))
? x_dim[1]
: (x_dim[1] + paddings[4] + paddings[5]); // depth
Expand Down
54 changes: 53 additions & 1 deletion python/paddle/fluid/tests/unittests/test_pad3d_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@ class TestPad3dOp(OpTest):
def setUp(self):
paddle.enable_static()
self.value = 0.0
self.variable_paddings = False
self.initTestCase()
self.op_type = "pad3d"
self.python_api = paddle.nn.functional.pad
Expand Down Expand Up @@ -84,6 +83,7 @@ def initTestCase(self):
self.mode = "constant"
self.data_format = "NCDHW"
self.pad_value = 0.0
self.variable_paddings = False


class TestCase1(TestPad3dOp):
Expand All @@ -93,6 +93,7 @@ def initTestCase(self):
self.mode = "constant"
self.data_format = "NCDHW"
self.value = 1.0
self.variable_paddings = False


class TestCase2(TestPad3dOp):
Expand All @@ -102,6 +103,7 @@ def initTestCase(self):
self.mode = "constant"
self.data_format = "NDHWC"
self.value = 1.0
self.variable_paddings = False


class TestCase3(TestPad3dOp):
Expand All @@ -110,6 +112,7 @@ def initTestCase(self):
self.paddings = [0, 1, 1, 0, 2, 3]
self.mode = "reflect"
self.data_format = "NCDHW"
self.variable_paddings = False


class TestCase4(TestPad3dOp):
Expand All @@ -118,6 +121,7 @@ def initTestCase(self):
self.paddings = [0, 1, 2, 1, 2, 3]
self.mode = "reflect"
self.data_format = "NDHWC"
self.variable_paddings = False


class TestCase5(TestPad3dOp):
Expand All @@ -126,6 +130,7 @@ def initTestCase(self):
self.paddings = [0, 1, 2, 3, 2, 1]
self.mode = "replicate"
self.data_format = "NCDHW"
self.variable_paddings = False


class TestCase6(TestPad3dOp):
Expand All @@ -134,6 +139,7 @@ def initTestCase(self):
self.paddings = [5, 4, 2, 1, 2, 3]
self.mode = "replicate"
self.data_format = "NDHWC"
self.variable_paddings = False


class TestCase7(TestPad3dOp):
Expand All @@ -142,6 +148,7 @@ def initTestCase(self):
self.paddings = [0, 1, 2, 3, 2, 1]
self.mode = "circular"
self.data_format = "NCDHW"
self.variable_paddings = False


class TestCase8(TestPad3dOp):
Expand All @@ -150,6 +157,27 @@ def initTestCase(self):
self.paddings = [0, 1, 2, 1, 2, 3]
self.mode = "circular"
self.data_format = "NDHWC"
self.variable_paddings = False


class TestCase9(TestPad3dOp):
def initTestCase(self):
self.shape = (2, 3, 4, 5, 6)
self.paddings = [0, 1, 2, 3, 4, 5]
self.mode = "constant"
self.data_format = "NCDHW"
self.value = 1.0
self.variable_paddings = True


class TestCase10(TestPad3dOp):
def initTestCase(self):
self.shape = (2, 3, 4, 5, 6)
self.paddings = [0, 1, 2, 3, 4, 5]
self.mode = "constant"
self.data_format = "NDHWC"
self.value = 1.0
self.variable_paddings = True


class TestPadAPI(unittest.TestCase):
Expand Down Expand Up @@ -681,6 +709,30 @@ def test_class(self):
input_data, pad, "circular", data_format="NCDHW")
self.assertTrue(np.allclose(output.numpy(), np_out))

def test_pad_tensor(self):
paddle.disable_static()
for place in self.places:
input_shape = (3, 4, 5, 6, 7)
pad = [1, 2, 2, 1, 1, 0]
pad_tensor = paddle.to_tensor(pad)
input_data = np.random.rand(*input_shape).astype(np.float32)

pad_reflection_ncdhw = nn.Pad3D(
padding=pad_tensor, mode="reflect", data_format="NCDHW")
pad_reflection_ndhwc = nn.Pad3D(
padding=pad_tensor, mode="reflect", data_format="NDHWC")
data = paddle.to_tensor(input_data)

output = pad_reflection_ncdhw(data)
np_out = self._get_numpy_out(
input_data, pad, "reflect", data_format="NCDHW")
self.assertTrue(np.allclose(output.numpy(), np_out))

output = pad_reflection_ndhwc(data)
np_out = self._get_numpy_out(
input_data, pad, "reflect", data_format="NDHWC")
self.assertTrue(np.allclose(output.numpy(), np_out))


class TestPad3dOpError(unittest.TestCase):
def setUp(self):
Expand Down

0 comments on commit 2dce1e8

Please sign in to comment.