diff --git a/demo/semantic_role_labeling/db_lstm.py b/demo/semantic_role_labeling/db_lstm.py index 68feb9c4a77eb7d60180c2b3dbfc7ac2367cdb67..32dcd07483acd6dbdf6a9c379a41b93e8d102cbc 100644 --- a/demo/semantic_role_labeling/db_lstm.py +++ b/demo/semantic_role_labeling/db_lstm.py @@ -122,7 +122,7 @@ std_default = ParameterAttribute(initial_std=default_std) word_embedding = embedding_layer(size=word_dim, input=word, param_attr=emb_para) predicate_embedding = embedding_layer(size=word_dim, input=predicate, param_attr=ParameterAttribute(name='vemb',initial_std=default_std)) -ctx_n2_embedding = embedding_layer(size=word_dim, input=ctx_n2, param_attr=emb_para) + ctx_n2_embedding = embedding_layer(size=word_dim, input=ctx_n2, param_attr=emb_para) ctx_n1_embedding = embedding_layer(size=word_dim, input=ctx_n1, param_attr=emb_para) ctx_0_embedding = embedding_layer(size=word_dim, input=ctx_0, param_attr=emb_para)