Skip to content

Commit

Permalink
Merge branch 'release/3.0-beta2' of https://github.com/PaddlePaddle/P…
Browse files Browse the repository at this point in the history
…addleNLP into origin_release_3.0-beta2
  • Loading branch information
DesmonDay committed Nov 8, 2024
2 parents 32701d4 + d02c406 commit 5a03c3b
Show file tree
Hide file tree
Showing 4 changed files with 13 additions and 11 deletions.
6 changes: 4 additions & 2 deletions paddlenlp/trainer/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -1988,7 +1988,7 @@ def get_expected_keys(inputs, keys):
self.optimizer = mix_precision_utils.MixPrecisionOptimizer(self.optimizer)
self.optimizer = fleet.distributed_optimizer(self.optimizer)

if self.args.enable_sharding_comm_overlap:
if hasattr(self.args, "enable_sharding_comm_overlap") and self.args.enable_sharding_comm_overlap:
model.register_sharding_comm_overlap_hook(self.optimizer)

Check warning on line 1992 in paddlenlp/trainer/trainer.py

View check run for this annotation

Codecov / codecov/patch

paddlenlp/trainer/trainer.py#L1991-L1992

Added lines #L1991 - L1992 were not covered by tests

# No pipeline mode, sharding only
Expand Down Expand Up @@ -2764,7 +2764,9 @@ def _load_optimizer_and_scheduler(self, checkpoint):
else:
opt_state_dict = None
else:
model = self.model_wrapped if self.args.enable_sharding_comm_overlap else self.model
model = self.model
if hasattr(self.args, "enable_sharding_comm_overlap") and self.args.enable_sharding_comm_overlap:
model = self.model_wrapped

Check warning on line 2769 in paddlenlp/trainer/trainer.py

View check run for this annotation

Codecov / codecov/patch

paddlenlp/trainer/trainer.py#L2767-L2769

Added lines #L2767 - L2769 were not covered by tests
opt_state_dict = self.unified_checkpoint_handler.load_unified_optimizer(
model=model,
optimizer=self.optimizer,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,7 @@ def load_unified_optimizer_split_param(args, model, optimizer, resume_from_check
param_shape_info = {}

comm_buffer_list = optimizer._inner_opt._comm_buffer_list
if args.enable_sharding_comm_overlap:
if hasattr(args, "enable_sharding_comm_overlap") and args.enable_sharding_comm_overlap:
comm_buffer_list = list(chain(*model._chunk_2_comm_buffers.values()))
model = unwrap_model(model)

Check warning on line 186 in paddlenlp/trainer/unified_checkpoint/sharding_split_param_utils.py

View check run for this annotation

Codecov / codecov/patch

paddlenlp/trainer/unified_checkpoint/sharding_split_param_utils.py#L183-L186

Added lines #L183 - L186 were not covered by tests

Expand Down
8 changes: 4 additions & 4 deletions paddlenlp/trl/dpo_criterion.py
Original file line number Diff line number Diff line change
Expand Up @@ -287,10 +287,10 @@ def forward(
)
loss = dpo_loss + sft_loss
if self.use_infohub:
infohub.policy_chosen_logps.append(policy_chosen_logps)
infohub.policy_rejected_logps.append(policy_rejected_logps)
infohub.sft_loss.append(sft_loss)
infohub.dpo_loss.append(dpo_loss)
infohub.policy_chosen_logps.append(policy_chosen_logps.detach())
infohub.policy_rejected_logps.append(policy_rejected_logps.detach())
infohub.sft_loss.append(sft_loss.detach())
infohub.dpo_loss.append(dpo_loss.detach())
return loss
else:
return policy_chosen_logps, policy_rejected_logps, sft_loss, dpo_loss, loss
8 changes: 4 additions & 4 deletions paddlenlp/trl/kto_criterion.py
Original file line number Diff line number Diff line change
Expand Up @@ -247,10 +247,10 @@ def forward(
reference_kl_logps,
)
if self.use_infohub:
infohub.policy_chosen_logps.append(policy_chosen_logps)
infohub.policy_rejected_logps.append(policy_rejected_logps)
infohub.policy_kl_logps.append(policy_kl_logps)
infohub.kl.append(kl)
infohub.policy_chosen_logps.append(policy_chosen_logps.detach())
infohub.policy_rejected_logps.append(policy_rejected_logps.detach())
infohub.policy_kl_logps.append(policy_kl_logps.detach())
infohub.kl.append(kl.detach())
return loss
else:
return (
Expand Down

0 comments on commit 5a03c3b

Please sign in to comment.