Skip to content

Commit

Permalink
[Dy2St] pir dy2st unittest verification - Part 14 (#59546)
Browse files Browse the repository at this point in the history

---------

Co-authored-by: SigureMo <sigure.qaq@gmail.com>
  • Loading branch information
gouzil and SigureMo authored Dec 6, 2023
1 parent ac0fe2c commit 7e99dea
Show file tree
Hide file tree
Showing 8 changed files with 80 additions and 101 deletions.
2 changes: 1 addition & 1 deletion test/dygraph_to_static/test_cinn.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def train(self, use_cinn):

res.append(out.numpy())

if use_cinn and paddle.device.is_compiled_with_cinn():
if use_cinn and paddle.is_compiled_with_cinn():
self.assertTrue(
paddle.framework.core.is_run_with_cinn(),
msg="The test was not running with CINN! Please check.",
Expand Down
5 changes: 0 additions & 5 deletions test/dygraph_to_static/test_dict.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,11 +119,6 @@ def update_cache(cache):


class TestNetWithDict(Dy2StTestBase):
"""
TestCase for the transformation from control flow `if/else`
dependent on tensor in Dygraph into Static `base.layers.cond`.
"""

def setUp(self):
self.x = np.random.random([10, 16]).astype('float32')
self.batch_size = self.x.shape[0]
Expand Down
5 changes: 0 additions & 5 deletions test/dygraph_to_static/test_duplicate_output.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,11 +42,6 @@ def forward(self, x):


class TestDuplicateOutput(Dy2StTestBase):
"""
TestCase for the transformation from control flow `if/else`
dependent on tensor in Dygraph into Static `base.layers.cond`.
"""

def _run_static(self):
net = paddle.jit.to_static(SimpleNet())
x = paddle.to_tensor([1.0])
Expand Down
5 changes: 0 additions & 5 deletions test/dygraph_to_static/test_ifelse.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,11 +77,6 @@ def test_error(self):


class TestDygraphIfElse(Dy2StTestBase):
"""
TestCase for the transformation from control flow `if/else`
dependent on tensor in Dygraph into Static `base.layers.cond`.
"""

def setUp(self):
self.x = np.random.random([10, 16]).astype('float32')
self.dyfunc = dyfunc_with_if_else
Expand Down
77 changes: 40 additions & 37 deletions test/dygraph_to_static/test_lambda.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,15 +15,17 @@
import unittest

import numpy as np
from dygraph_to_static_utils import Dy2StTestBase
from dygraph_to_static_utils import (
Dy2StTestBase,
test_legacy_and_pt_and_pir,
)

import paddle
import paddle.nn.functional as F
from paddle import base


def call_lambda_as_func(x):
x = base.dygraph.to_variable(x)
x = paddle.to_tensor(x)

add_func = lambda x, y: x + y
mean_func = lambda x: paddle.mean(x)
Expand All @@ -36,7 +38,7 @@ def call_lambda_as_func(x):


def call_lambda_directly(x):
x = base.dygraph.to_variable(x)
x = paddle.to_tensor(x)

y = (lambda x, y: x + y)(x, x)
out = (lambda x: paddle.mean(x))(y)
Expand All @@ -45,7 +47,7 @@ def call_lambda_directly(x):


def call_lambda_in_func(x):
x = base.dygraph.to_variable(x)
x = paddle.to_tensor(x)

add_func = lambda x: x + 1

Expand All @@ -55,8 +57,8 @@ def call_lambda_in_func(x):
return out


def call_lambda_with_ifExpr(x):
x = base.dygraph.to_variable(x)
def call_lambda_with_if_expr(x):
x = paddle.to_tensor(x)

add_func = lambda x: x + 1

Expand All @@ -66,8 +68,8 @@ def call_lambda_with_ifExpr(x):
return out


def call_lambda_with_ifExpr2(x):
x = base.dygraph.to_variable(x)
def call_lambda_with_if_expr2(x):
x = paddle.to_tensor(x)

add_func = lambda x: x + 1

Expand All @@ -84,39 +86,40 @@ class TestLambda(Dy2StTestBase):
def setUp(self):
self.x = np.random.random([10, 16]).astype('float32')
self.x = np.array([1, 3]).astype('float32')
self.place = (
base.CUDAPlace(0)
if base.is_compiled_with_cuda()
else base.CPUPlace()
)
self.init_func()

def init_func(self):
self.dyfuncs = [
call_lambda_as_func,
call_lambda_directly,
call_lambda_in_func,
call_lambda_with_ifExpr,
call_lambda_with_ifExpr2,
]

def run_static(self, func):
return self.run_dygraph(func, to_static=True)

def run_dygraph(self, func, to_static=False):
with base.dygraph.guard(self.place):
x_v = base.dygraph.to_variable(self.x)
if to_static:
ret = paddle.jit.to_static(func)(x_v)
else:
ret = func(x_v)
return ret.numpy()

def test_ast_to_func(self):
for func in self.dyfuncs:
self.assertTrue(
(self.run_dygraph(func) == self.run_static(func)).all()
)
x_v = paddle.to_tensor(self.x)
if to_static:
ret = paddle.jit.to_static(func)(x_v)
else:
ret = func(x_v)
return ret.numpy()

@test_legacy_and_pt_and_pir
def test_call_lambda_as_func(self):
fn = call_lambda_as_func
self.assertTrue((self.run_dygraph(fn) == self.run_static(fn)).all())

@test_legacy_and_pt_and_pir
def test_call_lambda_directly(self):
fn = call_lambda_directly
self.assertTrue((self.run_dygraph(fn) == self.run_static(fn)).all())

def test_call_lambda_in_func(self):
fn = call_lambda_in_func
self.assertTrue((self.run_dygraph(fn) == self.run_static(fn)).all())

def test_call_lambda_with_if_expr(self):
fn = call_lambda_with_if_expr
self.assertTrue((self.run_dygraph(fn) == self.run_static(fn)).all())

@test_legacy_and_pt_and_pir
def test_call_lambda_with_if_expr2(self):
fn = call_lambda_with_if_expr2
self.assertTrue((self.run_dygraph(fn) == self.run_static(fn)).all())


if __name__ == '__main__':
Expand Down
6 changes: 3 additions & 3 deletions test/dygraph_to_static/test_params_no_grad.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

import unittest

from dygraph_to_static_utils import Dy2StTestBase
from dygraph_to_static_utils import Dy2StTestBase, test_legacy_and_pt_and_pir

import paddle
import paddle.distributed as dist
Expand All @@ -40,8 +40,7 @@ def forward(self, ids):

def train():
paddle.distributed.init_parallel_env()
net = Net()
net = paddle.jit.to_static(net)
net = paddle.jit.to_static(Net())

sgd = paddle.optimizer.SGD(learning_rate=0.1, parameters=net.parameters())
dp_net = paddle.DataParallel(net)
Expand All @@ -55,6 +54,7 @@ def train():


class TestParamsNoGrad(Dy2StTestBase):
@test_legacy_and_pt_and_pir
def test_two_card(self):
if (
paddle.is_compiled_with_cuda()
Expand Down
65 changes: 29 additions & 36 deletions test/dygraph_to_static/test_ptb_lm.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,12 +17,10 @@
import unittest

import numpy as np
from dygraph_to_static_utils import Dy2StTestBase
from dygraph_to_static_utils import Dy2StTestBase, test_legacy_and_pt_and_pir

import paddle
from paddle import base
from paddle.base.dygraph.base import to_variable
from paddle.jit.api import to_static
from paddle.base.framework import unique_name
from paddle.optimizer import SGD

PRINT_STEP = 20
Expand All @@ -49,7 +47,7 @@ def __init__(

for i in range(self._num_layers):
weight_1 = self.create_parameter(
attr=base.ParamAttr(
attr=paddle.ParamAttr(
initializer=paddle.nn.initializer.Uniform(
low=-self._init_scale, high=self._init_scale
)
Expand All @@ -62,7 +60,7 @@ def __init__(
)
self.weight_1_arr.append(self.add_parameter('w_%d' % i, weight_1))
bias_1 = self.create_parameter(
attr=base.ParamAttr(
attr=paddle.ParamAttr(
initializer=paddle.nn.initializer.Uniform(
low=-self._init_scale, high=self._init_scale
)
Expand Down Expand Up @@ -157,23 +155,23 @@ def __init__(
vocab_size,
hidden_size,
sparse=False,
weight_attr=base.ParamAttr(
weight_attr=paddle.ParamAttr(
name='embedding_para',
initializer=paddle.nn.initializer.Uniform(
low=-init_scale, high=init_scale
),
),
)
self.softmax_weight = self.create_parameter(
attr=base.ParamAttr(),
attr=paddle.ParamAttr(),
shape=[self.hidden_size, self.vocab_size],
dtype="float32",
default_initializer=paddle.nn.initializer.Uniform(
low=-self.init_scale, high=self.init_scale
),
)
self.softmax_bias = self.create_parameter(
attr=base.ParamAttr(),
attr=paddle.ParamAttr(),
shape=[self.vocab_size],
dtype="float32",
default_initializer=paddle.nn.initializer.Uniform(
Expand All @@ -184,7 +182,6 @@ def __init__(
def build_once(self, input, label, init_hidden, init_cell):
pass

@to_static
def forward(self, input, label, init_hidden, init_cell):
init_h = paddle.reshape(
init_hidden, shape=[self.num_layers, -1, self.hidden_size]
Expand Down Expand Up @@ -225,7 +222,7 @@ def debug_emb(self):
np.save("emb_grad", self.x_emb.gradient())


def train(place):
def train():
num_layers = 1
batch_size = 4
hidden_size = 10
Expand All @@ -236,16 +233,18 @@ def train(place):
vocab_size = 1000
batch_num = 200

with base.dygraph.guard(place):
with unique_name.guard():
paddle.seed(SEED)
paddle.framework.random._manual_program_seed(SEED)
ptb_model = PtbModel(
hidden_size=hidden_size,
vocab_size=vocab_size,
num_layers=num_layers,
num_steps=num_steps,
init_scale=init_scale,
dropout=dropout,
ptb_model = paddle.jit.to_static(
PtbModel(
hidden_size=hidden_size,
vocab_size=vocab_size,
num_layers=num_layers,
num_steps=num_steps,
init_scale=init_scale,
dropout=dropout,
)
)

sgd = SGD(learning_rate=1e-3, parameters=ptb_model.parameters())
Expand All @@ -262,8 +261,8 @@ def train(place):
(num_layers, batch_size, hidden_size), dtype='float32'
)

init_hidden = to_variable(init_hidden_data)
init_cell = to_variable(init_cell_data)
init_hidden = paddle.to_tensor(init_hidden_data)
init_cell = paddle.to_tensor(init_cell_data)
for step_id in range(batch_num):
x_data = np.arange(12).reshape(4, 3).astype('int64')
y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
Expand All @@ -272,8 +271,8 @@ def train(place):
x_data = x_data.reshape((-1, num_steps, 1))
y_data = y_data.reshape((-1, num_steps, 1))

x = to_variable(x_data)
y = to_variable(y_data)
x = paddle.to_tensor(x_data)
y = paddle.to_tensor(y_data)

dy_loss, last_hidden, last_cell = ptb_model(
x, y, init_hidden, init_cell
Expand Down Expand Up @@ -310,27 +309,21 @@ def train(place):
return out_loss, last_hidden.numpy(), last_cell.numpy()


def train_dygraph(place):
def train_dygraph():
paddle.jit.enable_to_static(False)
return train(place)
return train()


def train_static(place):
def train_static():
paddle.jit.enable_to_static(True)
return train(place)
return train()


class TestPtb(Dy2StTestBase):
def setUp(self):
self.place = (
base.CUDAPlace(0)
if base.is_compiled_with_cuda()
else base.CPUPlace()
)

@test_legacy_and_pt_and_pir
def test_check_result(self):
loss_1, hidden_1, cell_1 = train_static(self.place)
loss_2, hidden_2, cell_2 = train_dygraph(self.place)
loss_1, hidden_1, cell_1 = train_static()
loss_2, hidden_2, cell_2 = train_dygraph()

np.testing.assert_allclose(loss_1, loss_2, rtol=1e-05)
np.testing.assert_allclose(hidden_1, hidden_2, rtol=1e-05)
Expand Down
Loading

0 comments on commit 7e99dea

Please sign in to comment.