From cfa304bad47ec86b6d44c8fa1807deb5664a2ffa Mon Sep 17 00:00:00 2001 From: zhangjcqq <664122220@qq.com> Date: Tue, 15 Nov 2016 16:13:05 +0800 Subject: [PATCH] to loop --- demo/semantic_role_labeling/db_lstm.py | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) diff --git a/demo/semantic_role_labeling/db_lstm.py b/demo/semantic_role_labeling/db_lstm.py index 32dcd07483a..943076d9148 100644 --- a/demo/semantic_role_labeling/db_lstm.py +++ b/demo/semantic_role_labeling/db_lstm.py @@ -130,21 +130,14 @@ ctx_p1_embedding = embedding_layer(size=word_dim, input=ctx_p1, param_attr=emb_p ctx_p2_embedding = embedding_layer(size=word_dim, input=ctx_p2, param_attr=emb_para) mark_embedding = embedding_layer(name='word_ctx-in_embedding', size=mark_dim, input=mark, param_attr=std_0) +all_emb=[word_embedding, predicate_embedding, ctx_n2_embedding, ctx_n1_embedding, ctx_0_embedding, + ctx_p1_embedding, ctx_p2_embedding, mark_embedding] hidden_0 = mixed_layer( name='hidden0', size=hidden_dim, bias_attr=std_default, - input=[ - full_matrix_projection(input=word_embedding, param_attr=std_default), - full_matrix_projection(input=predicate_embedding, param_attr=std_default), - full_matrix_projection(input=ctx_n2_embedding, param_attr=std_default), - full_matrix_projection(input=ctx_n1_embedding, param_attr=std_default), - full_matrix_projection(input=ctx_0_embedding, param_attr=std_default), - full_matrix_projection(input=ctx_p1_embedding, param_attr=std_default), - full_matrix_projection(input=ctx_p2_embedding, param_attr=std_default), - full_matrix_projection(input=mark_embedding, param_attr=std_default) - ]) + input=[ full_matrix_projection(input=emb, param_attr=std_default ) for emb in all_emb ]) mix_hidden_lr = 1e-3 -- GitLab