提交 837eff99 编写于 作者: G guosheng

Rename Model.self as model in test_text.py

test=develop
上级 503d40a7
...@@ -56,13 +56,13 @@ class ModuleApiTest(unittest.TestCase): ...@@ -56,13 +56,13 @@ class ModuleApiTest(unittest.TestCase):
return __impl__ return __impl__
@staticmethod @staticmethod
def model_init(self, *args, **kwargs): def model_init(model, *args, **kwargs):
raise NotImplementedError( raise NotImplementedError(
"model_init acts as `Model.__init__`, thus must implement it") "model_init acts as `Model.__init__`, thus must implement it")
@staticmethod @staticmethod
def model_forward(self, *args, **kwargs): def model_forward(model, *args, **kwargs):
return self.module(*args, **kwargs) return model.module(*args, **kwargs)
def make_inputs(self): def make_inputs(self):
# TODO(guosheng): add default from `self.inputs` # TODO(guosheng): add default from `self.inputs`
...@@ -118,7 +118,7 @@ class ModuleApiTest(unittest.TestCase): ...@@ -118,7 +118,7 @@ class ModuleApiTest(unittest.TestCase):
class TestBasicLSTM(ModuleApiTest): class TestBasicLSTM(ModuleApiTest):
def setUp(self): def setUp(self):
# TODO(guosheng): Change to big size. Currentlys bigger hidden size for # TODO(guosheng): Change to big size. Currently bigger hidden size for
# LSTM would fail, the second static graph run might get diff output # LSTM would fail, the second static graph run might get diff output
# with others. # with others.
shape = (2, 4, 16) shape = (2, 4, 16)
...@@ -128,8 +128,8 @@ class TestBasicLSTM(ModuleApiTest): ...@@ -128,8 +128,8 @@ class TestBasicLSTM(ModuleApiTest):
self.param_states = {} self.param_states = {}
@staticmethod @staticmethod
def model_init(self, input_size, hidden_size): def model_init(model, input_size, hidden_size):
self.lstm = RNN( model.lstm = RNN(
BasicLSTMCell( BasicLSTMCell(
input_size, input_size,
hidden_size, hidden_size,
...@@ -137,8 +137,8 @@ class TestBasicLSTM(ModuleApiTest): ...@@ -137,8 +137,8 @@ class TestBasicLSTM(ModuleApiTest):
bias_attr=fluid.ParamAttr(name="lstm_bias"))) bias_attr=fluid.ParamAttr(name="lstm_bias")))
@staticmethod @staticmethod
def model_forward(self, inputs): def model_forward(model, inputs):
return self.lstm(inputs)[0] return model.lstm(inputs)[0]
def make_inputs(self): def make_inputs(self):
inputs = [ inputs = [
...@@ -162,12 +162,12 @@ class TestBasicGRU(ModuleApiTest): ...@@ -162,12 +162,12 @@ class TestBasicGRU(ModuleApiTest):
self.param_states = {} self.param_states = {}
@staticmethod @staticmethod
def model_init(self, input_size, hidden_size): def model_init(model, input_size, hidden_size):
self.gru = RNN(BasicGRUCell(input_size, hidden_size)) model.gru = RNN(BasicGRUCell(input_size, hidden_size))
@staticmethod @staticmethod
def model_forward(self, inputs): def model_forward(model, inputs):
return self.gru(inputs)[0] return model.gru(inputs)[0]
def make_inputs(self): def make_inputs(self):
inputs = [ inputs = [
...@@ -220,8 +220,8 @@ class TestBeamSearch(ModuleApiTest): ...@@ -220,8 +220,8 @@ class TestBeamSearch(ModuleApiTest):
decoder, max_step_num=max_step_num, is_test=True) decoder, max_step_num=max_step_num, is_test=True)
@staticmethod @staticmethod
def model_forward(self, init_hidden, init_cell): def model_forward(model, init_hidden, init_cell):
return self.beam_search_decoder([init_hidden, init_cell])[0] return model.beam_search_decoder([init_hidden, init_cell])[0]
def make_inputs(self): def make_inputs(self):
inputs = [ inputs = [
...@@ -258,7 +258,7 @@ class TestTransformerEncoder(ModuleApiTest): ...@@ -258,7 +258,7 @@ class TestTransformerEncoder(ModuleApiTest):
self.param_states = {} self.param_states = {}
@staticmethod @staticmethod
def model_init(self, def model_init(model,
n_layer, n_layer,
n_head, n_head,
d_key, d_key,
...@@ -271,14 +271,14 @@ class TestTransformerEncoder(ModuleApiTest): ...@@ -271,14 +271,14 @@ class TestTransformerEncoder(ModuleApiTest):
preprocess_cmd="n", preprocess_cmd="n",
postprocess_cmd="da", postprocess_cmd="da",
ffn_fc1_act="relu"): ffn_fc1_act="relu"):
self.encoder = TransformerEncoder( model.encoder = TransformerEncoder(
n_layer, n_head, d_key, d_value, d_model, d_inner_hid, n_layer, n_head, d_key, d_value, d_model, d_inner_hid,
prepostprocess_dropout, attention_dropout, relu_dropout, prepostprocess_dropout, attention_dropout, relu_dropout,
preprocess_cmd, postprocess_cmd, ffn_fc1_act) preprocess_cmd, postprocess_cmd, ffn_fc1_act)
@staticmethod @staticmethod
def model_forward(self, enc_input, attn_bias): def model_forward(model, enc_input, attn_bias):
return self.encoder(enc_input, attn_bias) return model.encoder(enc_input, attn_bias)
def make_inputs(self): def make_inputs(self):
inputs = [ inputs = [
...@@ -321,7 +321,7 @@ class TestTransformerDecoder(TestTransformerEncoder): ...@@ -321,7 +321,7 @@ class TestTransformerDecoder(TestTransformerEncoder):
self.param_states = {} self.param_states = {}
@staticmethod @staticmethod
def model_init(self, def model_init(model,
n_layer, n_layer,
n_head, n_head,
d_key, d_key,
...@@ -333,20 +333,20 @@ class TestTransformerDecoder(TestTransformerEncoder): ...@@ -333,20 +333,20 @@ class TestTransformerDecoder(TestTransformerEncoder):
relu_dropout=0.1, relu_dropout=0.1,
preprocess_cmd="n", preprocess_cmd="n",
postprocess_cmd="da"): postprocess_cmd="da"):
self.decoder = TransformerDecoder( model.decoder = TransformerDecoder(
n_layer, n_head, d_key, d_value, d_model, d_inner_hid, n_layer, n_head, d_key, d_value, d_model, d_inner_hid,
prepostprocess_dropout, attention_dropout, relu_dropout, prepostprocess_dropout, attention_dropout, relu_dropout,
preprocess_cmd, postprocess_cmd) preprocess_cmd, postprocess_cmd)
@staticmethod @staticmethod
def model_forward(self, def model_forward(model,
dec_input, dec_input,
enc_output, enc_output,
self_attn_bias, self_attn_bias,
cross_attn_bias, cross_attn_bias,
caches=None): caches=None):
return self.decoder(dec_input, enc_output, self_attn_bias, return model.decoder(dec_input, enc_output, self_attn_bias,
cross_attn_bias, caches) cross_attn_bias, caches)
def make_inputs(self): def make_inputs(self):
inputs = [ inputs = [
...@@ -394,7 +394,7 @@ class TestTransformerBeamSearchDecoder(ModuleApiTest): ...@@ -394,7 +394,7 @@ class TestTransformerBeamSearchDecoder(ModuleApiTest):
self.param_states = {} self.param_states = {}
@staticmethod @staticmethod
def model_init(self, def model_init(model,
vocab_size, vocab_size,
n_layer, n_layer,
n_head, n_head,
...@@ -411,7 +411,7 @@ class TestTransformerBeamSearchDecoder(ModuleApiTest): ...@@ -411,7 +411,7 @@ class TestTransformerBeamSearchDecoder(ModuleApiTest):
eos_id=1, eos_id=1,
beam_size=4, beam_size=4,
max_step_num=20): max_step_num=20):
self.beam_size = beam_size model.beam_size = beam_size
def embeder_init(self, size): def embeder_init(self, size):
Layer.__init__(self) Layer.__init__(self)
...@@ -423,13 +423,13 @@ class TestTransformerBeamSearchDecoder(ModuleApiTest): ...@@ -423,13 +423,13 @@ class TestTransformerBeamSearchDecoder(ModuleApiTest):
}) })
embedder = Embedder(size=[vocab_size, d_model]) embedder = Embedder(size=[vocab_size, d_model])
output_layer = Linear(d_model, vocab_size) output_layer = Linear(d_model, vocab_size)
self.decoder = TransformerDecoder( model.decoder = TransformerDecoder(
n_layer, n_head, d_key, d_value, d_model, d_inner_hid, n_layer, n_head, d_key, d_value, d_model, d_inner_hid,
prepostprocess_dropout, attention_dropout, relu_dropout, prepostprocess_dropout, attention_dropout, relu_dropout,
preprocess_cmd, postprocess_cmd) preprocess_cmd, postprocess_cmd)
transformer_cell = TransformerCell(self.decoder, embedder, transformer_cell = TransformerCell(model.decoder, embedder,
output_layer) output_layer)
self.beam_search_decoder = DynamicDecode( model.beam_search_decoder = DynamicDecode(
TransformerBeamSearchDecoder( TransformerBeamSearchDecoder(
transformer_cell, transformer_cell,
bos_id, bos_id,
...@@ -440,14 +440,14 @@ class TestTransformerBeamSearchDecoder(ModuleApiTest): ...@@ -440,14 +440,14 @@ class TestTransformerBeamSearchDecoder(ModuleApiTest):
is_test=True) is_test=True)
@staticmethod @staticmethod
def model_forward(self, enc_output, trg_src_attn_bias): def model_forward(model, enc_output, trg_src_attn_bias):
caches = self.decoder.prepare_incremental_cache(enc_output) caches = model.decoder.prepare_incremental_cache(enc_output)
enc_output = TransformerBeamSearchDecoder.tile_beam_merge_with_batch( enc_output = TransformerBeamSearchDecoder.tile_beam_merge_with_batch(
enc_output, self.beam_size) enc_output, model.beam_size)
trg_src_attn_bias = TransformerBeamSearchDecoder.tile_beam_merge_with_batch( trg_src_attn_bias = TransformerBeamSearchDecoder.tile_beam_merge_with_batch(
trg_src_attn_bias, self.beam_size) trg_src_attn_bias, model.beam_size)
static_caches = self.decoder.prepare_static_cache(enc_output) static_caches = model.decoder.prepare_static_cache(enc_output)
rs, _ = self.beam_search_decoder( rs, _ = model.beam_search_decoder(
inits=caches, inits=caches,
enc_output=enc_output, enc_output=enc_output,
trg_src_attn_bias=trg_src_attn_bias, trg_src_attn_bias=trg_src_attn_bias,
...@@ -483,7 +483,7 @@ class TestSequenceTagging(ModuleApiTest): ...@@ -483,7 +483,7 @@ class TestSequenceTagging(ModuleApiTest):
self.param_states = {} self.param_states = {}
@staticmethod @staticmethod
def model_init(self, def model_init(model,
vocab_size, vocab_size,
num_labels, num_labels,
word_emb_dim=128, word_emb_dim=128,
...@@ -492,13 +492,13 @@ class TestSequenceTagging(ModuleApiTest): ...@@ -492,13 +492,13 @@ class TestSequenceTagging(ModuleApiTest):
crf_learning_rate=0.1, crf_learning_rate=0.1,
bigru_num=2, bigru_num=2,
init_bound=0.1): init_bound=0.1):
self.tagger = SequenceTagging(vocab_size, num_labels, word_emb_dim, model.tagger = SequenceTagging(
grnn_hidden_dim, emb_learning_rate, vocab_size, num_labels, word_emb_dim, grnn_hidden_dim,
crf_learning_rate, bigru_num, init_bound) emb_learning_rate, crf_learning_rate, bigru_num, init_bound)
@staticmethod @staticmethod
def model_forward(self, word, lengths, target=None): def model_forward(model, word, lengths, target=None):
return self.tagger(word, lengths, target) return model.tagger(word, lengths, target)
def make_inputs(self): def make_inputs(self):
inputs = [ inputs = [
...@@ -535,13 +535,13 @@ class TestStackedRNN(ModuleApiTest): ...@@ -535,13 +535,13 @@ class TestStackedRNN(ModuleApiTest):
self.param_states = {} self.param_states = {}
@staticmethod @staticmethod
def model_init(self, input_size, hidden_size, num_layers): def model_init(model, input_size, hidden_size, num_layers):
cells = [ cells = [
BasicLSTMCell(input_size, hidden_size), BasicLSTMCell(input_size, hidden_size),
BasicLSTMCell(hidden_size, hidden_size) BasicLSTMCell(hidden_size, hidden_size)
] ]
stacked_cell = StackedRNNCell(cells) stacked_cell = StackedRNNCell(cells)
self.lstm = RNN(stacked_cell) model.lstm = RNN(stacked_cell)
@staticmethod @staticmethod
def model_forward(self, inputs): def model_forward(self, inputs):
...@@ -569,12 +569,12 @@ class TestLSTM(ModuleApiTest): ...@@ -569,12 +569,12 @@ class TestLSTM(ModuleApiTest):
self.param_states = {} self.param_states = {}
@staticmethod @staticmethod
def model_init(self, input_size, hidden_size, num_layers): def model_init(model, input_size, hidden_size, num_layers):
self.lstm = LSTM(input_size, hidden_size, num_layers=num_layers) model.lstm = LSTM(input_size, hidden_size, num_layers=num_layers)
@staticmethod @staticmethod
def model_forward(self, inputs): def model_forward(model, inputs):
return self.lstm(inputs)[0] return model.lstm(inputs)[0]
def make_inputs(self): def make_inputs(self):
inputs = [ inputs = [
...@@ -598,13 +598,13 @@ class TestBiLSTM(ModuleApiTest): ...@@ -598,13 +598,13 @@ class TestBiLSTM(ModuleApiTest):
self.param_states = {} self.param_states = {}
@staticmethod @staticmethod
def model_init(self, def model_init(model,
input_size, input_size,
hidden_size, hidden_size,
num_layers, num_layers,
merge_mode="concat", merge_mode="concat",
merge_each_layer=False): merge_each_layer=False):
self.bilstm = BidirectionalLSTM( model.bilstm = BidirectionalLSTM(
input_size, input_size,
hidden_size, hidden_size,
num_layers=num_layers, num_layers=num_layers,
...@@ -612,8 +612,8 @@ class TestBiLSTM(ModuleApiTest): ...@@ -612,8 +612,8 @@ class TestBiLSTM(ModuleApiTest):
merge_each_layer=merge_each_layer) merge_each_layer=merge_each_layer)
@staticmethod @staticmethod
def model_forward(self, inputs): def model_forward(model, inputs):
return self.bilstm(inputs)[0] return model.bilstm(inputs)[0]
def make_inputs(self): def make_inputs(self):
inputs = [ inputs = [
...@@ -641,12 +641,12 @@ class TestGRU(ModuleApiTest): ...@@ -641,12 +641,12 @@ class TestGRU(ModuleApiTest):
self.param_states = {} self.param_states = {}
@staticmethod @staticmethod
def model_init(self, input_size, hidden_size, num_layers): def model_init(model, input_size, hidden_size, num_layers):
self.gru = GRU(input_size, hidden_size, num_layers=num_layers) model.gru = GRU(input_size, hidden_size, num_layers=num_layers)
@staticmethod @staticmethod
def model_forward(self, inputs): def model_forward(model, inputs):
return self.gru(inputs)[0] return model.gru(inputs)[0]
def make_inputs(self): def make_inputs(self):
inputs = [ inputs = [
...@@ -670,13 +670,13 @@ class TestBiGRU(ModuleApiTest): ...@@ -670,13 +670,13 @@ class TestBiGRU(ModuleApiTest):
self.param_states = {} self.param_states = {}
@staticmethod @staticmethod
def model_init(self, def model_init(model,
input_size, input_size,
hidden_size, hidden_size,
num_layers, num_layers,
merge_mode="concat", merge_mode="concat",
merge_each_layer=False): merge_each_layer=False):
self.bigru = BidirectionalGRU( model.bigru = BidirectionalGRU(
input_size, input_size,
hidden_size, hidden_size,
num_layers=num_layers, num_layers=num_layers,
...@@ -684,8 +684,8 @@ class TestBiGRU(ModuleApiTest): ...@@ -684,8 +684,8 @@ class TestBiGRU(ModuleApiTest):
merge_each_layer=merge_each_layer) merge_each_layer=merge_each_layer)
@staticmethod @staticmethod
def model_forward(self, inputs): def model_forward(model, inputs):
return self.bigru(inputs)[0] return model.bigru(inputs)[0]
def make_inputs(self): def make_inputs(self):
inputs = [ inputs = [
...@@ -713,8 +713,8 @@ class TestCNNEncoder(ModuleApiTest): ...@@ -713,8 +713,8 @@ class TestCNNEncoder(ModuleApiTest):
self.param_states = {} self.param_states = {}
@staticmethod @staticmethod
def model_init(self, num_channels, num_filters, num_layers): def model_init(model, num_channels, num_filters, num_layers):
self.cnn_encoder = CNNEncoder( model.cnn_encoder = CNNEncoder(
num_layers=2, num_layers=2,
num_channels=num_channels, num_channels=num_channels,
num_filters=num_filters, num_filters=num_filters,
...@@ -722,8 +722,8 @@ class TestCNNEncoder(ModuleApiTest): ...@@ -722,8 +722,8 @@ class TestCNNEncoder(ModuleApiTest):
pool_size=[7, 6]) pool_size=[7, 6])
@staticmethod @staticmethod
def model_forward(self, inputs): def model_forward(model, inputs):
return self.cnn_encoder(inputs) return model.cnn_encoder(inputs)
def make_inputs(self): def make_inputs(self):
inputs = [ inputs = [
...@@ -734,7 +734,7 @@ class TestCNNEncoder(ModuleApiTest): ...@@ -734,7 +734,7 @@ class TestCNNEncoder(ModuleApiTest):
] ]
return inputs return inputs
def test_check_output_merge0(self): def test_check_output(self):
self.check_output() self.check_output()
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册