From 0a2f4725788d1edb1a27b455a981a5c8a369e09c Mon Sep 17 00:00:00 2001 From: yoonlee666 Date: Tue, 8 Sep 2020 15:08:48 +0800 Subject: [PATCH] add hub config for bert base and nezha --- .../official/nlp/bert/mindspore_hub_conf.py | 77 +++++++++++++++++++ 1 file changed, 77 insertions(+) create mode 100644 model_zoo/official/nlp/bert/mindspore_hub_conf.py diff --git a/model_zoo/official/nlp/bert/mindspore_hub_conf.py b/model_zoo/official/nlp/bert/mindspore_hub_conf.py new file mode 100644 index 000000000..012ac9501 --- /dev/null +++ b/model_zoo/official/nlp/bert/mindspore_hub_conf.py @@ -0,0 +1,77 @@ +# Copyright 2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +''' +Bert hub interface for bert base and bert nezha +''' +from src.bert_model import BertModel +from src.bert_model import BertConfig +import mindspore.common.dtype as mstype +bert_net_cfg_base = BertConfig( + batch_size=32, + seq_length=128, + vocab_size=21128, + hidden_size=768, + num_hidden_layers=12, + num_attention_heads=12, + intermediate_size=3072, + hidden_act="gelu", + hidden_dropout_prob=0.1, + attention_probs_dropout_prob=0.1, + max_position_embeddings=512, + type_vocab_size=2, + initializer_range=0.02, + use_relative_positions=False, + input_mask_from_dataset=True, + token_type_ids_from_dataset=True, + dtype=mstype.float32, + compute_type=mstype.float16 +) +bert_net_cfg_nezha = BertConfig( + batch_size=32, + seq_length=128, + vocab_size=21128, + hidden_size=1024, + num_hidden_layers=24, + num_attention_heads=16, + intermediate_size=4096, + hidden_act="gelu", + hidden_dropout_prob=0.1, + attention_probs_dropout_prob=0.1, + max_position_embeddings=512, + type_vocab_size=2, + initializer_range=0.02, + use_relative_positions=True, + input_mask_from_dataset=True, + token_type_ids_from_dataset=True, + dtype=mstype.float32, + compute_type=mstype.float16 +) +def create_network(name, *args, **kwargs): + ''' + Create bert network for base and nezha. + ''' + if name == 'bert_base': + if "batch_size" in kwargs: + bert_net_cfg_base.batch_size = kwargs["batch_size"] + if "seq_length" in kwargs: + bert_net_cfg_base.seq_length = kwargs["seq_length"] + return BertModel(bert_net_cfg_base, *args) + if name == 'bert_nezha': + if "batch_size" in kwargs: + bert_net_cfg_nezha.batch_size = kwargs["batch_size"] + if "seq_length" in kwargs: + bert_net_cfg_nezha.seq_length = kwargs["seq_length"] + return BertModel(bert_net_cfg_nezha, *args) + raise NotImplementedError(f"{name} is not implemented in the repo") -- GitLab