From 0fa1b2f3cab0eab642529370cea6d37530494260 Mon Sep 17 00:00:00 2001 From: littletomatodonkey <2120160898@bit.edu.cn> Date: Wed, 8 Apr 2020 16:01:44 +0800 Subject: [PATCH] fix initalization and regularization docs (#23492) --- python/paddle/fluid/initializer.py | 11 +++++++---- python/paddle/fluid/regularizer.py | 8 ++++---- 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/python/paddle/fluid/initializer.py b/python/paddle/fluid/initializer.py index 21fdccc0239..32a2d7d704b 100644 --- a/python/paddle/fluid/initializer.py +++ b/python/paddle/fluid/initializer.py @@ -133,14 +133,16 @@ class ConstantInitializer(Initializer): Args: value (float32): constant value to initialize the variable + force_cpu (bool): place for initialization, if set true, initialization will + be forced on CPU even if executor is set on CUDA. default false. Examples: .. code-block:: python import paddle.fluid as fluid - x = fluid.data(name="data", shape=[8, 32, 32], dtype="float32") + x = fluid.data(name="data", shape=[32, 32], dtype="float32") fc = fluid.layers.fc(input=x, size=10, - param_attr=fluid.initializer.Constant(value=2.0)) + param_attr=fluid.initializer.ConstantInitializer(value=2.0)) """ @@ -744,15 +746,16 @@ class BilinearInitializer(Initializer): .. code-block:: python import paddle.fluid as fluid + import math factor = 2 C = 2 B = 8 H = W = 32 w_attr = fluid.param_attr.ParamAttr( - learning_rate=0., + learning_rate=0., regularizer=fluid.regularizer.L2Decay(0.), initializer=fluid.initializer.Bilinear()) - x = fluid.data(name="data", shape=[B, 3, H, W], + x = fluid.data(name="data", shape=[B, 3, H, W], dtype="float32") conv_up = fluid.layers.conv2d_transpose( input=x, diff --git a/python/paddle/fluid/regularizer.py b/python/paddle/fluid/regularizer.py index d6774faf686..44e803c1eb8 100644 --- a/python/paddle/fluid/regularizer.py +++ b/python/paddle/fluid/regularizer.py @@ -134,8 +134,8 @@ class L2DecayRegularizer(WeightDecayRegularizer): main_prog = fluid.Program() startup_prog = fluid.Program() with fluid.program_guard(main_prog, startup_prog): - data = fluid.layers.data(name='image', shape=[3, 28, 28], dtype='float32') - label = fluid.layers.data(name='label', shape=[1], dtype='int64') + data = fluid.data(name='image', shape=[256, 3, 28, 28], dtype='float32') + label = fluid.data(name='label', shape=[256, 1], dtype='int64') hidden = fluid.layers.fc(input=data, size=128, act='relu') prediction = fluid.layers.fc(input=hidden, size=10, act='softmax') loss = fluid.layers.cross_entropy(input=prediction, label=label) @@ -213,8 +213,8 @@ class L1DecayRegularizer(WeightDecayRegularizer): main_prog = fluid.Program() startup_prog = fluid.Program() with fluid.program_guard(main_prog, startup_prog): - data = fluid.layers.data(name='image', shape=[3, 28, 28], dtype='float32') - label = fluid.layers.data(name='label', shape=[1], dtype='int64') + data = fluid.data(name='image', shape=[256, 3, 28, 28], dtype='float32') + label = fluid.data(name='label', shape=[256, 1], dtype='int64') hidden = fluid.layers.fc(input=data, size=128, act='relu') prediction = fluid.layers.fc(input=hidden, size=10, act='softmax') loss = fluid.layers.cross_entropy(input=prediction, label=label) -- GitLab