From dbc6102e55d0dbea7468bc303b084b0745db1ac1 Mon Sep 17 00:00:00 2001 From: Kexin Zhao Date: Wed, 23 May 2018 15:24:43 -0700 Subject: [PATCH] simplify label_sementic_example --- .../tests/book/test_label_semantic_roles.py | 67 ++++++++----------- 1 file changed, 27 insertions(+), 40 deletions(-) diff --git a/python/paddle/fluid/tests/book/test_label_semantic_roles.py b/python/paddle/fluid/tests/book/test_label_semantic_roles.py index f1ee5dfd9..3eb0c1069 100644 --- a/python/paddle/fluid/tests/book/test_label_semantic_roles.py +++ b/python/paddle/fluid/tests/book/test_label_semantic_roles.py @@ -116,29 +116,6 @@ def db_lstm(word, predicate, ctx_n2, ctx_n1, ctx_0, ctx_p1, ctx_p2, mark, return feature_out -def to_lodtensor(data, place): - seq_lens = [len(seq) for seq in data] - cur_len = 0 - lod = [cur_len] - for l in seq_lens: - cur_len += l - lod.append(cur_len) - flattened_data = np.concatenate(data, axis=0).astype("int64") - flattened_data = flattened_data.reshape([len(flattened_data), 1]) - res = fluid.LoDTensor() - res.set(flattened_data, place) - res.set_lod([lod]) - return res - - -def create_random_lodtensor(lod, place, low, high): - data = np.random.random_integers(low, high, [lod[-1], 1]).astype("int64") - res = fluid.LoDTensor() - res.set(data, place) - res.set_lod([lod]) - return res - - def train(use_cuda, save_dirname=None, is_local=True): # define network topology word = fluid.layers.data( @@ -271,23 +248,33 @@ def infer(use_cuda, save_dirname=None): [inference_program, feed_target_names, fetch_targets] = fluid.io.load_inference_model(save_dirname, exe) - lod = [0, 4, 10] - word = create_random_lodtensor( - lod, place, low=0, high=word_dict_len - 1) - pred = create_random_lodtensor( - lod, place, low=0, high=pred_dict_len - 1) - ctx_n2 = create_random_lodtensor( - lod, place, low=0, high=word_dict_len - 1) - ctx_n1 = create_random_lodtensor( - lod, place, low=0, high=word_dict_len - 1) - ctx_0 = create_random_lodtensor( - lod, place, low=0, high=word_dict_len - 1) - ctx_p1 = create_random_lodtensor( - lod, place, low=0, high=word_dict_len - 1) - ctx_p2 = create_random_lodtensor( - lod, place, low=0, high=word_dict_len - 1) - mark = create_random_lodtensor( - lod, place, low=0, high=mark_dict_len - 1) + # Setup inputs by creating LoDTensors to represent sequences of words. + # Here each word is the basic element of these LoDTensors and the shape of + # each word (base_shape) should be [1] since it is simply an index to + # look up for the corresponding word vector. + # Suppose the length_based level of detail (lod) info is set to [[3, 4, 2]], + # which has only one lod level. Then the created LoDTensors will have only + # one higher level structure (sequence of words, or sentence) than the basic + # element (word). Hence the LoDTensor will hold data for three sentences of + # length 3, 4 and 2, respectively. + lod = [[3, 4, 2]] + base_shape = [1] + word = fluid.create_random_lodtensor( + lod, base_shape, place, low=0, high=word_dict_len - 1) + pred = fluid.create_random_lodtensor( + lod, base_shape, place, low=0, high=pred_dict_len - 1) + ctx_n2 = fluid.create_random_lodtensor( + lod, base_shape, place, low=0, high=word_dict_len - 1) + ctx_n1 = fluid.create_random_lodtensor( + lod, base_shape, place, low=0, high=word_dict_len - 1) + ctx_0 = fluid.create_random_lodtensor( + lod, base_shape, place, low=0, high=word_dict_len - 1) + ctx_p1 = fluid.create_random_lodtensor( + lod, base_shape, place, low=0, high=word_dict_len - 1) + ctx_p2 = fluid.create_random_lodtensor( + lod, base_shape, place, low=0, high=word_dict_len - 1) + mark = fluid.create_random_lodtensor( + lod, base_shape, place, low=0, high=mark_dict_len - 1) # Construct feed as a dictionary of {feed_target_name: feed_target_data} # and results will contain a list of data corresponding to fetch_targets. -- GitLab