Skip to content

Commit

Permalink
remove useless logs (PaddlePaddle#55729)
Browse files Browse the repository at this point in the history
  • Loading branch information
sneaxiy authored and wz1qqx committed Jul 31, 2023
1 parent 169f481 commit b8a932e
Show file tree
Hide file tree
Showing 3 changed files with 4 additions and 8 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,8 @@
from ...utils.log_util import logger
from ...utils.tensor_fusion_helper import fused_parameters

g_shard_use_reduce = int(os.environ.get("FLAGS_shard_use_reduce", 0))
logger.info(f"g_shard_use_reduce {g_shard_use_reduce}")
g_shard_norm_align_dp = int(os.environ.get("FLAGS_shard_norm_align_dp", 1))
logger.info(f"g_shard_norm_align_dp {g_shard_norm_align_dp}")
g_shard_use_reduce = int(os.environ.get("FLAGS_shard_use_reduce", 1))
g_shard_norm_align_dp = int(os.environ.get("FLAGS_shard_norm_align_dp", 0))

if g_shard_norm_align_dp:
assert (
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,8 +39,7 @@

__all__ = []

g_shard_norm_align_dp = int(os.environ.get("FLAGS_shard_norm_align_dp", 1))
logger.info(f"g_shard_norm_align_dp {g_shard_norm_align_dp}")
g_shard_norm_align_dp = int(os.environ.get("FLAGS_shard_norm_align_dp", 0))


class HybridParallelClipGrad:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,7 @@

__all__ = []

g_shard_use_reduce = int(os.environ.get("FLAGS_shard_use_reduce", 0))
logger.info(f"g_shard_use_reduce {g_shard_use_reduce}")
g_shard_use_reduce = int(os.environ.get("FLAGS_shard_use_reduce", 1))


# assume only the first stage and last stage need data, and data consumption is ordred
Expand Down

0 comments on commit b8a932e

Please sign in to comment.