From f0f22c89aa7cf24e7add3da3bfb93710cb4ac686 Mon Sep 17 00:00:00 2001 From: Travis CI Date: Mon, 10 Apr 2017 09:06:23 +0000 Subject: [PATCH] Deploy to GitHub Pages: 4b1b599a6a9944cb37b45b00a3e5b43fe975428a --- develop/doc/api/v1/trainer_config_helpers/layers.html | 1 + develop/doc/api/v2/config/layer.html | 1 + develop/doc_cn/api/v1/trainer_config_helpers/layers.html | 1 + develop/doc_cn/api/v2/config/layer.html | 1 + 4 files changed, 4 insertions(+) diff --git a/develop/doc/api/v1/trainer_config_helpers/layers.html b/develop/doc/api/v1/trainer_config_helpers/layers.html index 73a36aae31c..989dee056d6 100644 --- a/develop/doc/api/v1/trainer_config_helpers/layers.html +++ b/develop/doc/api/v1/trainer_config_helpers/layers.html @@ -3488,6 +3488,7 @@ A fast and simple algorithm for training neural probabilistic language models.label (LayerOutput) – label layer
  • weight (LayerOutput) – weight layer, can be None(default)
  • num_classes (int) – number of classes.
  • +
  • act (BaseActivation) – Activation, default is Sigmoid.
  • num_neg_samples (int) – number of negative samples. Default is 10.
  • neg_distribution (list|tuple|collections.Sequence|None) – The distribution for generating the random negative labels. A uniform distribution will be used if not provided. diff --git a/develop/doc/api/v2/config/layer.html b/develop/doc/api/v2/config/layer.html index be00e43bbfb..65c26eb16ba 100644 --- a/develop/doc/api/v2/config/layer.html +++ b/develop/doc/api/v2/config/layer.html @@ -4140,6 +4140,7 @@ A fast and simple algorithm for training neural probabilistic language models.label (paddle.v2.config_base.Layer) – label layer
  • weight (paddle.v2.config_base.Layer) – weight layer, can be None(default)
  • num_classes (int) – number of classes.
  • +
  • act (paddle.v2.Activation.Base) – Activation, default is Sigmoid.
  • num_neg_samples (int) – number of negative samples. Default is 10.
  • neg_distribution (list|tuple|collections.Sequence|None) – The distribution for generating the random negative labels. A uniform distribution will be used if not provided. diff --git a/develop/doc_cn/api/v1/trainer_config_helpers/layers.html b/develop/doc_cn/api/v1/trainer_config_helpers/layers.html index b549631786c..8108ce920c3 100644 --- a/develop/doc_cn/api/v1/trainer_config_helpers/layers.html +++ b/develop/doc_cn/api/v1/trainer_config_helpers/layers.html @@ -3495,6 +3495,7 @@ A fast and simple algorithm for training neural probabilistic language models.label (LayerOutput) – label layer
  • weight (LayerOutput) – weight layer, can be None(default)
  • num_classes (int) – number of classes.
  • +
  • act (BaseActivation) – Activation, default is Sigmoid.
  • num_neg_samples (int) – number of negative samples. Default is 10.
  • neg_distribution (list|tuple|collections.Sequence|None) – The distribution for generating the random negative labels. A uniform distribution will be used if not provided. diff --git a/develop/doc_cn/api/v2/config/layer.html b/develop/doc_cn/api/v2/config/layer.html index a43dff00170..0ee55275501 100644 --- a/develop/doc_cn/api/v2/config/layer.html +++ b/develop/doc_cn/api/v2/config/layer.html @@ -4147,6 +4147,7 @@ A fast and simple algorithm for training neural probabilistic language models.label (paddle.v2.config_base.Layer) – label layer
  • weight (paddle.v2.config_base.Layer) – weight layer, can be None(default)
  • num_classes (int) – number of classes.
  • +
  • act (paddle.v2.Activation.Base) – Activation, default is Sigmoid.
  • num_neg_samples (int) – number of negative samples. Default is 10.
  • neg_distribution (list|tuple|collections.Sequence|None) – The distribution for generating the random negative labels. A uniform distribution will be used if not provided. -- GitLab