Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[ArtistP0] add PretrainedConfig and unit test #4989

Closed
wants to merge 13 commits into from
2 changes: 2 additions & 0 deletions paddlenlp/transformers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,6 +110,7 @@
from .mbart.configuration import *
from .megatronbert.modeling import *
from .megatronbert.tokenizer import *
from .megatronbert.configuration import *
from .prophetnet.modeling import *
from .prophetnet.tokenizer import *
from .mobilebert.modeling import *
Expand Down Expand Up @@ -172,6 +173,7 @@
from .codegen.configuration import *
from .artist.modeling import *
from .artist.tokenizer import *
from .artist.configuration import *
from .dallebart.modeling import *
from .dallebart.tokenizer import *
from .clip.modeling import *
Expand Down
117 changes: 117 additions & 0 deletions paddlenlp/transformers/artist/configuration.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,117 @@
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" MBart model configuration"""
from __future__ import annotations

__all__ = [
"Artist_PRETRAINED_INIT_CONFIGURATION",
"Artist_PRETRAINED_RESOURCE_FILES_MAP",
]

Artist_PRETRAINED_INIT_CONFIGURATION = {
"pai-painter-base-zh": {
"vocab_size": 37512,
"hidden_size": 768,
"num_hidden_layers": 12,
"num_attention_heads": 12,
"intermediate_size": 3072,
"hidden_act": "gelu_python",
"hidden_dropout_prob": 0.0,
"attention_probs_dropout_prob": 0.0,
"max_position_embeddings": 288,
"type_vocab_size": 1, # no use
"initializer_range": 0.02,
"pad_token_id": 16384, # 0 + 16384
"eos_token_id": 16486, # 102 + 16384
"bos_token_id": 16485, # 101 + 16384
"eol_token_id": 16486, # 102 + 16384
},
"pai-painter-painting-base-zh": {
"vocab_size": 37512,
"hidden_size": 768,
"num_hidden_layers": 12,
"num_attention_heads": 12,
"intermediate_size": 3072,
"hidden_act": "gelu_python",
"hidden_dropout_prob": 0.0,
"attention_probs_dropout_prob": 0.0,
"max_position_embeddings": 288,
"type_vocab_size": 1, # no use
"initializer_range": 0.02,
"pad_token_id": 16384, # 0 + 16384
"eos_token_id": 16486, # 102 + 16384
"bos_token_id": 16485, # 101 + 16384
"eol_token_id": 16486, # 102 + 16384
},
"pai-painter-scenery-base-zh": {
"vocab_size": 37512,
"hidden_size": 768,
"num_hidden_layers": 12,
"num_attention_heads": 12,
"intermediate_size": 3072,
"hidden_act": "gelu_python",
"hidden_dropout_prob": 0.0,
"attention_probs_dropout_prob": 0.0,
"max_position_embeddings": 288,
"type_vocab_size": 1, # no use
"initializer_range": 0.02,
"pad_token_id": 16384, # 0 + 16384
"eos_token_id": 16486, # 102 + 16384
"bos_token_id": 16485, # 101 + 16384
"eol_token_id": 16486, # 102 + 16384
},
"pai-painter-commercial-base-zh": {
"vocab_size": 37512,
"hidden_size": 768,
"num_hidden_layers": 12,
"num_attention_heads": 12,
"intermediate_size": 3072,
"hidden_act": "gelu_python",
"hidden_dropout_prob": 0.0,
"attention_probs_dropout_prob": 0.0,
"max_position_embeddings": 288,
"type_vocab_size": 1, # no use
"initializer_range": 0.02,
"pad_token_id": 16384, # 0 + 16384
"eos_token_id": 16486, # 102 + 16384
"bos_token_id": 16485, # 101 + 16384
"eol_token_id": 16486, # 102 + 16384
},
"pai-painter-large-zh": {
"vocab_size": 37512,
"hidden_size": 1024,
"num_hidden_layers": 24,
"num_attention_heads": 16,
"intermediate_size": 4096,
"hidden_act": "gelu_python",
"hidden_dropout_prob": 0.0,
"attention_probs_dropout_prob": 0.0,
"max_position_embeddings": 288,
"type_vocab_size": 1,
"initializer_range": 0.02,
"pad_token_id": 16384, # 0 + 16384
"eos_token_id": 16486, # 102 + 16384
"bos_token_id": 16485, # 101 + 16384
"eol_token_id": 16486, # 102 + 16384
},
}
Artist_PRETRAINED_RESOURCE_FILES_MAP = {
"model_state": {
"pai-painter-base-zh": "https://bj.bcebos.com/paddlenlp/models/transformers/artist/pai-painter-base-zh/model_state.pdparams",
"pai-painter-painting-base-zh": "https://bj.bcebos.com/paddlenlp/models/transformers/artist/pai-painter-painting-base-zh/model_state.pdparams",
"pai-painter-scenery-base-zh": "https://bj.bcebos.com/paddlenlp/models/transformers/artist/pai-painter-scenery-base-zh/model_state.pdparams",
"pai-painter-commercial-base-zh": "https://bj.bcebos.com/paddlenlp/models/transformers/artist/pai-painter-commercial-base-zh/model_state.pdparams",
"pai-painter-large-zh": "https://bj.bcebos.com/paddlenlp/models/transformers/artist/pai-painter-large-zh/model_state.pdparams",
}
}
113 changes: 10 additions & 103 deletions paddlenlp/transformers/artist/modeling.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,10 @@
from ...utils.log import logger
from ..dallebart.modeling import VQGanDetokenizer
from ..gpt.modeling import GPTLMHead, GPTLMHeadModel, GPTModel
from .configuration import (
Artist_PRETRAINED_INIT_CONFIGURATION,
Artist_PRETRAINED_RESOURCE_FILES_MAP,
)

__all__ = [
"ArtistModel",
Expand All @@ -30,107 +34,10 @@
# set gelu_new
F.gelu_python = F.gelu

pretrained_init_configuration = {
"pai-painter-base-zh": {
"vocab_size": 37512,
"hidden_size": 768,
"num_hidden_layers": 12,
"num_attention_heads": 12,
"intermediate_size": 3072,
"hidden_act": "gelu_python",
"hidden_dropout_prob": 0.0,
"attention_probs_dropout_prob": 0.0,
"max_position_embeddings": 288,
"type_vocab_size": 1, # no use
"initializer_range": 0.02,
"pad_token_id": 16384, # 0 + 16384
"eos_token_id": 16486, # 102 + 16384
"bos_token_id": 16485, # 101 + 16384
"eol_token_id": 16486, # 102 + 16384
},
"pai-painter-painting-base-zh": {
"vocab_size": 37512,
"hidden_size": 768,
"num_hidden_layers": 12,
"num_attention_heads": 12,
"intermediate_size": 3072,
"hidden_act": "gelu_python",
"hidden_dropout_prob": 0.0,
"attention_probs_dropout_prob": 0.0,
"max_position_embeddings": 288,
"type_vocab_size": 1, # no use
"initializer_range": 0.02,
"pad_token_id": 16384, # 0 + 16384
"eos_token_id": 16486, # 102 + 16384
"bos_token_id": 16485, # 101 + 16384
"eol_token_id": 16486, # 102 + 16384
},
"pai-painter-scenery-base-zh": {
"vocab_size": 37512,
"hidden_size": 768,
"num_hidden_layers": 12,
"num_attention_heads": 12,
"intermediate_size": 3072,
"hidden_act": "gelu_python",
"hidden_dropout_prob": 0.0,
"attention_probs_dropout_prob": 0.0,
"max_position_embeddings": 288,
"type_vocab_size": 1, # no use
"initializer_range": 0.02,
"pad_token_id": 16384, # 0 + 16384
"eos_token_id": 16486, # 102 + 16384
"bos_token_id": 16485, # 101 + 16384
"eol_token_id": 16486, # 102 + 16384
},
"pai-painter-commercial-base-zh": {
"vocab_size": 37512,
"hidden_size": 768,
"num_hidden_layers": 12,
"num_attention_heads": 12,
"intermediate_size": 3072,
"hidden_act": "gelu_python",
"hidden_dropout_prob": 0.0,
"attention_probs_dropout_prob": 0.0,
"max_position_embeddings": 288,
"type_vocab_size": 1, # no use
"initializer_range": 0.02,
"pad_token_id": 16384, # 0 + 16384
"eos_token_id": 16486, # 102 + 16384
"bos_token_id": 16485, # 101 + 16384
"eol_token_id": 16486, # 102 + 16384
},
"pai-painter-large-zh": {
"vocab_size": 37512,
"hidden_size": 1024,
"num_hidden_layers": 24,
"num_attention_heads": 16,
"intermediate_size": 4096,
"hidden_act": "gelu_python",
"hidden_dropout_prob": 0.0,
"attention_probs_dropout_prob": 0.0,
"max_position_embeddings": 288,
"type_vocab_size": 1,
"initializer_range": 0.02,
"pad_token_id": 16384, # 0 + 16384
"eos_token_id": 16486, # 102 + 16384
"bos_token_id": 16485, # 101 + 16384
"eol_token_id": 16486, # 102 + 16384
},
}
pretrained_resource_files_map = {
"model_state": {
"pai-painter-base-zh": "https://bj.bcebos.com/paddlenlp/models/transformers/artist/pai-painter-base-zh/model_state.pdparams",
"pai-painter-painting-base-zh": "https://bj.bcebos.com/paddlenlp/models/transformers/artist/pai-painter-painting-base-zh/model_state.pdparams",
"pai-painter-scenery-base-zh": "https://bj.bcebos.com/paddlenlp/models/transformers/artist/pai-painter-scenery-base-zh/model_state.pdparams",
"pai-painter-commercial-base-zh": "https://bj.bcebos.com/paddlenlp/models/transformers/artist/pai-painter-commercial-base-zh/model_state.pdparams",
"pai-painter-large-zh": "https://bj.bcebos.com/paddlenlp/models/transformers/artist/pai-painter-large-zh/model_state.pdparams",
}
}


class ArtistModel(GPTModel):
pretrained_init_configuration = pretrained_init_configuration
pretrained_resource_files_map = pretrained_resource_files_map
pretrained_init_configuration = Artist_PRETRAINED_INIT_CONFIGURATION
pretrained_resource_files_map = Artist_PRETRAINED_RESOURCE_FILES_MAP


class ArtistForConditionalGeneration(GPTLMHeadModel):
Expand All @@ -143,8 +50,8 @@ class ArtistForConditionalGeneration(GPTLMHeadModel):

"""

pretrained_init_configuration = pretrained_init_configuration
pretrained_resource_files_map = pretrained_resource_files_map
pretrained_init_configuration = Artist_PRETRAINED_INIT_CONFIGURATION
pretrained_resource_files_map = Artist_PRETRAINED_RESOURCE_FILES_MAP

def __init__(self, gpt):
super().__init__(gpt)
Expand All @@ -168,8 +75,8 @@ class ArtistForImageGeneration(ArtistForConditionalGeneration):
The vocabulary size of image.
Defaults to `16384`.
"""
pretrained_init_configuration = pretrained_init_configuration
pretrained_resource_files_map = pretrained_resource_files_map
pretrained_init_configuration = Artist_PRETRAINED_INIT_CONFIGURATION
pretrained_resource_files_map = Artist_PRETRAINED_RESOURCE_FILES_MAP

def __init__(self, gpt, image_vocab_size=16384):
super().__init__(gpt)
Expand Down
Loading