diff --git a/paddlenlp/transformers/model_utils.py b/paddlenlp/transformers/model_utils.py index 00ee3a0e6670..22fcb0f1faf5 100644 --- a/paddlenlp/transformers/model_utils.py +++ b/paddlenlp/transformers/model_utils.py @@ -286,8 +286,8 @@ def __getattr__(self, name): result = getattr(self.config, name) logger.warning( - f"do not access config from `model.{name}` which will be deprecated after v2.6.0, " - f"you should use: `model.config.{name}`" + f"Do not access config from `model.{name}` which will be deprecated after v2.6.0, " + f"Instead, do `model.config.{name}`" ) return result diff --git a/tests/transformers/test_modeling_common.py b/tests/transformers/test_modeling_common.py index d08e0845b728..378bb8985b3b 100644 --- a/tests/transformers/test_modeling_common.py +++ b/tests/transformers/test_modeling_common.py @@ -67,9 +67,7 @@ class ModelTesterMixin: test_resize_position_embeddings = False test_mismatched_shapes = True test_missing_keys = True - test_model_compatibility_keys = False - use_test_inputs_embeds = False use_test_model_name_list = True is_encoder_decoder = False @@ -551,7 +549,7 @@ def test_for_missed_attribute(self): if type(new_value) != type(old_value): continue - assert old_value == new_value + self.assertEqual(old_value, new_value) class ModelTesterPretrainedMixin: