From 4f2badd3194169254115e2c313b1c0795b84761e Mon Sep 17 00:00:00 2001 From: BeingGod Date: Tue, 19 Sep 2023 14:00:05 +0000 Subject: [PATCH] fix --- paddle/phi/core/distributed/nccl_comm_context.cc | 2 ++ paddle/phi/core/distributed/nccl_comm_context.h | 10 ++++++---- test/legacy_test/distributed_fused_lamb_test_base.py | 5 ++++- 3 files changed, 12 insertions(+), 5 deletions(-) diff --git a/paddle/phi/core/distributed/nccl_comm_context.cc b/paddle/phi/core/distributed/nccl_comm_context.cc index faf29add30d916..bd49f0cff17086 100644 --- a/paddle/phi/core/distributed/nccl_comm_context.cc +++ b/paddle/phi/core/distributed/nccl_comm_context.cc @@ -231,6 +231,7 @@ void NCCLCommContext::GroupStart() { } void NCCLCommContext::GroupEnd() { NCCL_CHECK(phi::dynload::ncclGroupEnd()); } +#if NCCL_VERSION_CODE >= 21100 void NCCLCommContext::RedOpCreatePreMulSum(ncclRedOp_t* op, void* scalar, ncclDataType_t dtype, @@ -242,6 +243,7 @@ void NCCLCommContext::RedOpCreatePreMulSum(ncclRedOp_t* op, void NCCLCommContext::RedOpDestroy(ncclRedOp_t op) { PADDLE_ENFORCE_GPU_SUCCESS(phi::dynload::ncclRedOpDestroy(op, nccl_comm_)); } +#endif } // namespace distributed } // namespace phi diff --git a/paddle/phi/core/distributed/nccl_comm_context.h b/paddle/phi/core/distributed/nccl_comm_context.h index 61c3fb06c0e33b..615112481ce2bd 100644 --- a/paddle/phi/core/distributed/nccl_comm_context.h +++ b/paddle/phi/core/distributed/nccl_comm_context.h @@ -98,16 +98,18 @@ class NCCLCommContext final : public CommContext { int root, gpuStream_t stream); + void GroupStart(); + + void GroupEnd(); + +#if NCCL_VERSION_CODE >= 21100 void RedOpCreatePreMulSum(ncclRedOp_t* op, void* scalar, ncclDataType_t dtype, ncclScalarResidence_t residence); void RedOpDestroy(ncclRedOp_t op); - - void GroupStart(); - - void GroupEnd(); +#endif private: DISABLE_COPY_AND_ASSIGN(NCCLCommContext); diff --git a/test/legacy_test/distributed_fused_lamb_test_base.py b/test/legacy_test/distributed_fused_lamb_test_base.py index baffc7dd5e5460..ec45942bd0aa16 100644 --- a/test/legacy_test/distributed_fused_lamb_test_base.py +++ b/test/legacy_test/distributed_fused_lamb_test_base.py @@ -270,7 +270,10 @@ def setUpClass(cls): paddle.enable_static() paddle.set_flags({'FLAGS_cudnn_deterministic': True}) _clip_by_global_norm_using_mp_type(True) - fleet.init(role_maker=get_role_maker()) + if os.environ.get("FLAGS_dynamic_static_unified_comm") == "1": + fleet.init(role_maker=get_role_maker()) + else: + paddle.distributed.collective._init_parallel_env("nccl") def config(self): clip_after_allreduce = bool(