From ab3686eaff25933048a36dae71584ee005117a84 Mon Sep 17 00:00:00 2001 From: Feiyu Chan Date: Tue, 14 Jul 2020 17:46:02 +0800 Subject: [PATCH] fix nce docstring for python 3, test=develop, test=document_fix (#2225) fix nce docstring for python 3 --- doc/fluid/api_cn/layers_cn/nce_cn.rst | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/doc/fluid/api_cn/layers_cn/nce_cn.rst b/doc/fluid/api_cn/layers_cn/nce_cn.rst index aea9b3598..ef4532b84 100644 --- a/doc/fluid/api_cn/layers_cn/nce_cn.rst +++ b/doc/fluid/api_cn/layers_cn/nce_cn.rst @@ -43,35 +43,35 @@ nce window_size = 5 words = [] - for i in xrange(window_size): - words.append(fluid.layers.data( - name='word_{0}'.format(i), shape=[1], dtype='int64')) + for i in range(window_size): + words.append(fluid.data( + name='word_{0}'.format(i), shape=[-1, 1], dtype='int64')) dict_size = 10000 label_word = int(window_size / 2) + 1 embs = [] - for i in xrange(window_size): + for i in range(window_size): if i == label_word: continue emb = fluid.layers.embedding(input=words[i], size=[dict_size, 32], - param_attr='embed', is_sparse=True) + param_attr='embed', is_sparse=True) embs.append(emb) embs = fluid.layers.concat(input=embs, axis=1) loss = fluid.layers.nce(input=embs, label=words[label_word], - num_total_classes=dict_size, param_attr='nce.w_0', - bias_attr='nce.b_0') + num_total_classes=dict_size, param_attr='nce.w_0', + bias_attr='nce.b_0') - # 或使用自定义分布 + #or use custom distribution dist = np.array([0.05,0.5,0.1,0.3,0.05]) loss = fluid.layers.nce(input=embs, label=words[label_word], - num_total_classes=5, param_attr='nce.w_1', - bias_attr='nce.b_1', - num_neg_samples=3, - sampler="custom_dist", - custom_dist=dist) + num_total_classes=5, param_attr='nce.w_1', + bias_attr='nce.b_1', + num_neg_samples=3, + sampler="custom_dist", + custom_dist=dist) -- GitLab