Skip to content

Commit

Permalink
fix ut, test=develop
Browse files Browse the repository at this point in the history
  • Loading branch information
sandyhouse committed Mar 25, 2021
1 parent 0350ab0 commit 7d5d371
Showing 1 changed file with 1 addition and 15 deletions.
16 changes: 1 addition & 15 deletions python/paddle/fluid/optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -4136,20 +4136,6 @@ def _add_op_device_attr_for_op(self, op, idx, block):
# For LRSched ops, we should put them on all sub-programs to
# make sure each sub-program update the lr correctly
op._set_attr(self._op_device_key, "gpu:all")
elif op.type == "sum" and self._is_backward_op(op):
# For sum ops that compute the sum of @RENAMED@ vars
for name in op.desc.input_arg_names():
assert '@RENAME@' in name, \
"The op must be sum used to accumulate renamed vars."
assert len(op.desc.output_arg_names()) == 1
out_name = op.desc.output_arg_names()[0]
post_op = self._find_post_op(block.ops, op, out_name)
assert post_op.has_attr(self._op_device_key), \
"{} has no op_device attr for var {}".format(
post_op.type, out_name)
device = post_op.attr(self._op_device_key)
assert device, "The post op must have op_device set."
op._set_attr(self._op_device_key, device)
elif (op.type == "cast" or
op.type == "scale") and self._is_backward_op(op):
prev_op = self._find_real_prev_op(block.ops, op,
Expand Down Expand Up @@ -4386,7 +4372,7 @@ def _insert_sendrecv_ops_for_boundaries(self, block):
inputs={'X': [var]},
outputs={'Out': [var]},
attrs={
self._op_device_key: cur_device,
self._op_device_key: prev_device,
self._op_role_key: self._op_role.Backward,
'ring_id': ring_id,
})
Expand Down

1 comment on commit 7d5d371

@paddle-bot-old
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Congratulation! Your pull request passed all required CI. You could ask reviewer(s) to approve and merge. 🎉

Please sign in to comment.