Skip to content

Commit

Permalink
fix
Browse files Browse the repository at this point in the history
  • Loading branch information
zhupengyang committed Aug 11, 2020
1 parent 486c895 commit df37ef2
Show file tree
Hide file tree
Showing 3 changed files with 27 additions and 26 deletions.
41 changes: 23 additions & 18 deletions python/paddle/fluid/tests/unittests/test_log_softmax.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def ref_log_softmax_grad(x, axis):
axis += len(x.shape)
out = np.apply_along_axis(ref_log_softmax, axis, x)
axis_dim = x.shape[axis]
dout = np.full(x.shape, fill_value=1 / x.size, dtype='float64')
dout = np.full_like(x, fill_value=1 / x.size)
dx = dout - np.exp(out) * dout.copy().sum(axis=axis, keepdims=True).repeat(
axis_dim, axis=axis)
return dx
Expand Down Expand Up @@ -87,19 +87,21 @@ def setUp(self):
def check_api(self, axis=-1):
ref_out = np.apply_along_axis(ref_log_softmax, axis, self.x)

main_program = paddle.Program()
logsoftmax = paddle.nn.LogSoftmax(axis)
with paddle.program_guard(main_program):
# test static api
with paddle.static.program_guard(paddle.static.Program()):
x = paddle.data(name='x', shape=self.x_shape)
y = logsoftmax(x)
exe = paddle.Executor(self.place)
out = exe.run(main_program, feed={'x': self.x}, fetch_list=[y])
exe = paddle.static.Executor(self.place)
out = exe.run(feed={'x': self.x}, fetch_list=[y])
self.assertTrue(np.allclose(out[0], ref_out))

with paddle.imperative.guard(self.place):
x = paddle.imperative.to_variable(self.x)
y = logsoftmax(x)
# test dygrapg api
paddle.disable_static()
x = paddle.to_variable(self.x)
y = logsoftmax(x)
self.assertTrue(np.allclose(y.numpy(), ref_out))
paddle.enable_static()

def test_check_api(self):
for axis in [-1, 1]:
Expand All @@ -115,27 +117,30 @@ def setUp(self):
else paddle.CPUPlace()

def check_api(self, axis=-1, dtype=None):
ref_out = np.apply_along_axis(ref_log_softmax, axis, self.x)
main_program = paddle.Program()
with paddle.program_guard(main_program):
x = self.x.copy()
if dtype is not None:
x = x.astype(dtype)
ref_out = np.apply_along_axis(ref_log_softmax, axis, x)
with paddle.static.program_guard(paddle.static.Program()):
x = paddle.data(name='x', shape=self.x_shape)
y = F.log_softmax(x, axis, dtype)
exe = paddle.Executor(self.place)
out = exe.run(main_program, feed={'x': self.x}, fetch_list=[y])
exe = paddle.static.Executor(self.place)
out = exe.run(feed={'x': self.x}, fetch_list=[y])
self.assertTrue(np.allclose(out[0], ref_out))

with paddle.imperative.guard(self.place):
x = paddle.imperative.to_variable(self.x)
y = F.log_softmax(x, axis, dtype)
self.assertTrue(np.allclose(y.numpy(), ref_out))
paddle.disable_static()
x = paddle.to_variable(self.x)
y = F.log_softmax(x, axis, dtype)
self.assertTrue(np.allclose(y.numpy(), ref_out), True)
paddle.enable_static()

def test_check_api(self):
for axis in [-1, 1]:
self.check_api(axis)
self.check_api(-1, 'float64')

def test_errors(self):
with paddle.program_guard(paddle.Program(), paddle.Program()):
with paddle.static.program_guard(paddle.static.Program()):
x = paddle.data(name='X1', shape=[100], dtype='int32')
self.assertRaises(TypeError, F.log_softmax, x)

Expand Down
6 changes: 2 additions & 4 deletions python/paddle/nn/functional/activation.py
Original file line number Diff line number Diff line change
Expand Up @@ -401,8 +401,6 @@ def softmax(x, axis=-1, name=None):
import paddle.nn.functional as F
import numpy as np
paddle.enable_imperative()
x = np.array([[[2.0, 3.0, 4.0, 5.0],
[3.0, 4.0, 5.0, 6.0],
[7.0, 8.0, 8.0, 9.0]],
Expand Down Expand Up @@ -457,15 +455,15 @@ def log_softmax(x, axis=-1, dtype=None, name=None):
import paddle.nn.functional as F
import numpy as np
paddle.enable_imperative()
paddle.disable_static()
x = np.array([[[-2.0, 3.0, -4.0, 5.0],
[3.0, -4.0, 5.0, -6.0],
[-7.0, -8.0, 8.0, 9.0]],
[[1.0, -2.0, -3.0, 4.0],
[-5.0, 6.0, 7.0, -8.0],
[6.0, 7.0, 8.0, 9.0]]]).astype('float32')
x = paddle.imperative.to_variable(x)
x = paddle.to_variable(x)
out1 = F.log_softmax(x)
out2 = F.log_softmax(x, dtype='float64')
# out1's data type is float32; out2's data type is float64
Expand Down
6 changes: 2 additions & 4 deletions python/paddle/nn/layer/activation.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,8 +232,6 @@ class LeakyReLU(layers.Layer):
import paddle
import numpy as np
paddle.enable_imperative()
lrelu = paddle.nn.LeakyReLU()
x = paddle.imperative.to_variable(np.array([-2, 0, 1], 'float32'))
out = lrelu(x) # [-0.02, 0, 1]
Expand Down Expand Up @@ -320,7 +318,7 @@ class LogSoftmax(layers.Layer):
import paddle
import numpy as np
paddle.enable_imperative()
paddle.disable_static()
x = np.array([[[-2.0, 3.0, -4.0, 5.0],
[3.0, -4.0, 5.0, -6.0],
Expand All @@ -329,7 +327,7 @@ class LogSoftmax(layers.Layer):
[-5.0, 6.0, 7.0, -8.0],
[6.0, 7.0, 8.0, 9.0]]], 'float32')
log_softnmax = paddle.nn.LogSoftmax()
x = paddle.imperative.to_variable(x)
x = paddle.to_variable(x)
out = log_softnmax(x)
# [[[ -7.1278396 -2.1278396 -9.127839 -0.12783948]
# [ -2.1270514 -9.127051 -0.12705144 -11.127051 ]
Expand Down

0 comments on commit df37ef2

Please sign in to comment.