Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[BugFix] Try except sequence parallel utils #8189

Merged
merged 3 commits into from
Apr 12, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 14 additions & 10 deletions paddlenlp/transformers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,16 +29,20 @@
from .feature_extraction_utils import BatchFeature, FeatureExtractionMixin
from .image_processing_utils import ImageProcessingMixin
from .attention_utils import create_bigbird_rand_mask_idx_list
from paddle.distributed.fleet.utils.sequence_parallel_utils import (
GatherOp,
ScatterOp,
AllGatherOp,
ReduceScatterOp,
ColumnSequenceParallelLinear,
RowSequenceParallelLinear,
mark_as_sequence_parallel_parameter,
register_sequence_parallel_allreduce_hooks,
)

try:
from paddle.distributed.fleet.utils.sequence_parallel_utils import (
GatherOp,
ScatterOp,
AllGatherOp,
ReduceScatterOp,
ColumnSequenceParallelLinear,
RowSequenceParallelLinear,
mark_as_sequence_parallel_parameter,
register_sequence_parallel_allreduce_hooks,
)
except:
pass

Check warning on line 45 in paddlenlp/transformers/__init__.py

View check run for this annotation

Codecov / codecov/patch

paddlenlp/transformers/__init__.py#L44-L45

Added lines #L44 - L45 were not covered by tests
from .export import export_model

# isort: split
Expand Down
18 changes: 11 additions & 7 deletions paddlenlp/transformers/gpt/modeling.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,13 +29,17 @@
from paddle.distributed import fleet
from paddle.distributed.fleet.meta_parallel import get_rng_state_tracker
from paddle.distributed.fleet.utils import recompute
from paddle.distributed.fleet.utils.sequence_parallel_utils import (
ColumnSequenceParallelLinear,
GatherOp,
RowSequenceParallelLinear,
ScatterOp,
mark_as_sequence_parallel_parameter,
)

try:
from paddle.distributed.fleet.utils.sequence_parallel_utils import (
ColumnSequenceParallelLinear,
GatherOp,
RowSequenceParallelLinear,
ScatterOp,
mark_as_sequence_parallel_parameter,
)
except:
pass

Check warning on line 42 in paddlenlp/transformers/gpt/modeling.py

View check run for this annotation

Codecov / codecov/patch

paddlenlp/transformers/gpt/modeling.py#L41-L42

Added lines #L41 - L42 were not covered by tests
from paddle.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss

from ...utils.converter import StateDictNameMapping
Expand Down
10 changes: 7 additions & 3 deletions paddlenlp/transformers/gpt/modeling_pp.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,13 @@
SharedLayerDesc,
)
from paddle.distributed.fleet.utils import recompute
from paddle.distributed.fleet.utils.sequence_parallel_utils import (
mark_as_sequence_parallel_parameter,
)

try:
from paddle.distributed.fleet.utils.sequence_parallel_utils import (
mark_as_sequence_parallel_parameter,
)
except:
pass

Check warning on line 29 in paddlenlp/transformers/gpt/modeling_pp.py

View check run for this annotation

Codecov / codecov/patch

paddlenlp/transformers/gpt/modeling_pp.py#L28-L29

Added lines #L28 - L29 were not covered by tests

from paddlenlp.transformers.model_utils import PipelinePretrainedModel

Expand Down
17 changes: 10 additions & 7 deletions paddlenlp/transformers/llama/modeling.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,13 +44,16 @@
return F.silu(x) * y


from paddle.distributed.fleet.utils.sequence_parallel_utils import (
ColumnSequenceParallelLinear,
GatherOp,
RowSequenceParallelLinear,
ScatterOp,
mark_as_sequence_parallel_parameter,
)
try:
from paddle.distributed.fleet.utils.sequence_parallel_utils import (
ColumnSequenceParallelLinear,
GatherOp,
RowSequenceParallelLinear,
ScatterOp,
mark_as_sequence_parallel_parameter,
)
except:
pass

Check warning on line 56 in paddlenlp/transformers/llama/modeling.py

View check run for this annotation

Codecov / codecov/patch

paddlenlp/transformers/llama/modeling.py#L55-L56

Added lines #L55 - L56 were not covered by tests
from paddle.utils import try_import

from paddlenlp.transformers.conversion_utils import (
Expand Down
17 changes: 10 additions & 7 deletions paddlenlp/transformers/mixtral/modeling.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,13 +33,16 @@
except ImportError:
fused_rotary_position_embedding = None

from paddle.distributed.fleet.utils.sequence_parallel_utils import (
ColumnSequenceParallelLinear,
GatherOp,
RowSequenceParallelLinear,
ScatterOp,
mark_as_sequence_parallel_parameter,
)
try:
from paddle.distributed.fleet.utils.sequence_parallel_utils import (
ColumnSequenceParallelLinear,
GatherOp,
RowSequenceParallelLinear,
ScatterOp,
mark_as_sequence_parallel_parameter,
)
except:
pass

Check warning on line 45 in paddlenlp/transformers/mixtral/modeling.py

View check run for this annotation

Codecov / codecov/patch

paddlenlp/transformers/mixtral/modeling.py#L44-L45

Added lines #L44 - L45 were not covered by tests

from paddlenlp.transformers.conversion_utils import (
StateDictNameMapping,
Expand Down
Loading