未验证 提交 b301adc9 编写于 作者: Y Yiqun Liu 提交者: GitHub

Update all the examples which use paddle.static.nn.fc. (#27904)

上级 41aad9bf
...@@ -1345,7 +1345,7 @@ def append_backward(loss, ...@@ -1345,7 +1345,7 @@ def append_backward(loss,
x = paddle.static.data(name='x', shape=[None, 13], dtype='int64') x = paddle.static.data(name='x', shape=[None, 13], dtype='int64')
y = paddle.static.data(name='y', shape=[None, 1], dtype='float32') y = paddle.static.data(name='y', shape=[None, 1], dtype='float32')
x_emb = paddle.static.nn.embedding(x, size=[100, 256]) x_emb = paddle.static.nn.embedding(x, size=[100, 256])
y_predict = paddle.static.nn.fc(input=x_emb, size=1, act=None, name='my_fc') y_predict = paddle.static.nn.fc(x=x_emb, size=1, activation=None, name='my_fc')
loss = F.square_error_cost(input=y_predict, label=y) loss = F.square_error_cost(input=y_predict, label=y)
avg_loss = paddle.mean(loss) avg_loss = paddle.mean(loss)
......
...@@ -120,7 +120,7 @@ class CompiledProgram(object): ...@@ -120,7 +120,7 @@ class CompiledProgram(object):
exe = static.Executor(place) exe = static.Executor(place)
data = static.data(name='X', shape=[None, 1], dtype='float32') data = static.data(name='X', shape=[None, 1], dtype='float32')
hidden = static.nn.fc(input=data, size=10) hidden = static.nn.fc(x=data, size=10)
loss = paddle.mean(hidden) loss = paddle.mean(hidden)
paddle.optimizer.SGD(learning_rate=0.01).minimize(loss) paddle.optimizer.SGD(learning_rate=0.01).minimize(loss)
...@@ -243,7 +243,7 @@ class CompiledProgram(object): ...@@ -243,7 +243,7 @@ class CompiledProgram(object):
exe = static.Executor(place) exe = static.Executor(place)
data = static.data(name='X', shape=[None, 1], dtype='float32') data = static.data(name='X', shape=[None, 1], dtype='float32')
hidden = static.nn.fc(input=data, size=10) hidden = static.nn.fc(x=data, size=10)
loss = paddle.mean(hidden) loss = paddle.mean(hidden)
test_program = static.default_main_program().clone(for_test=True) test_program = static.default_main_program().clone(for_test=True)
......
...@@ -3996,7 +3996,7 @@ class Program(object): ...@@ -3996,7 +3996,7 @@ class Program(object):
with static.program_guard(main_program=main_program, startup_program=startup_program): with static.program_guard(main_program=main_program, startup_program=startup_program):
x = static.data(name="x", shape=[-1, 784], dtype='float32') x = static.data(name="x", shape=[-1, 784], dtype='float32')
y = static.data(name="y", shape=[-1, 1], dtype='int32') y = static.data(name="y", shape=[-1, 1], dtype='int32')
z = static.nn.fc(name="fc", input=x, size=10, act="relu") z = static.nn.fc(name="fc", x=x, size=10, activation="relu")
print("main program is: {}".format(main_program)) print("main program is: {}".format(main_program))
print("start up program is: {}".format(startup_program)) print("start up program is: {}".format(startup_program))
...@@ -4344,7 +4344,7 @@ class Program(object): ...@@ -4344,7 +4344,7 @@ class Program(object):
paddle.enable_static() paddle.enable_static()
img = static.data(name='image', shape=[None, 784]) img = static.data(name='image', shape=[None, 784])
pred = static.nn.fc(input=img, size=10, act='relu') pred = static.nn.fc(x=img, size=10, actvation='relu')
loss = paddle.mean(pred) loss = paddle.mean(pred)
# Here we use clone before Momentum # Here we use clone before Momentum
test_program = static.default_main_program().clone(for_test=True) test_program = static.default_main_program().clone(for_test=True)
...@@ -4415,10 +4415,10 @@ class Program(object): ...@@ -4415,10 +4415,10 @@ class Program(object):
with static.program_guard(train_program, startup_program): with static.program_guard(train_program, startup_program):
with utils.unique_name.guard(): with utils.unique_name.guard():
img = static.data(name='image', shape=[None, 784]) img = static.data(name='image', shape=[None, 784])
hidden = static.nn.fc(input=img, size=200, act='relu') hidden = static.nn.fc(x=img, size=200, activation='relu')
hidden = F.dropout(hidden, p=0.5) hidden = F.dropout(hidden, p=0.5)
loss = F.cross_entropy( loss = F.cross_entropy(
input=static.nn.fc(hidden, size=10, act='softmax'), input=static.nn.fc(x=hidden, size=10, activation='softmax'),
label=static.data(name='label', shape=[1], dtype='int64')) label=static.data(name='label', shape=[1], dtype='int64'))
avg_loss = paddle.mean(loss) avg_loss = paddle.mean(loss)
test_program = train_program.clone(for_test=True) test_program = train_program.clone(for_test=True)
...@@ -4462,10 +4462,10 @@ class Program(object): ...@@ -4462,10 +4462,10 @@ class Program(object):
def network(): def network():
img = static.data(name='image', shape=[None, 784]) img = static.data(name='image', shape=[None, 784])
hidden = static.nn.fc(input=img, size=200, act='relu') hidden = static.nn.fc(x=img, size=200, activation='relu')
hidden = F.dropout(hidden, p=0.5) hidden = F.dropout(hidden, p=0.5)
loss = F.cross_entropy( loss = F.cross_entropy(
input=static.nn.fc(hidden, size=10, act='softmax'), input=static.nn.fc(x=hidden, size=10, activation='softmax'),
label=static.data(name='label', shape=[1], dtype='int64')) label=static.data(name='label', shape=[1], dtype='int64'))
avg_loss = paddle.mean(loss) avg_loss = paddle.mean(loss)
return avg_loss return avg_loss
...@@ -5079,7 +5079,7 @@ class Program(object): ...@@ -5079,7 +5079,7 @@ class Program(object):
program = static.default_main_program() program = static.default_main_program()
data = static.data(name='x', shape=[None, 13], dtype='float32') data = static.data(name='x', shape=[None, 13], dtype='float32')
hidden = static.nn.fc(input=data, size=10) hidden = static.nn.fc(x=data, size=10)
loss = paddle.mean(hidden) loss = paddle.mean(hidden)
paddle.optimizer.SGD(learning_rate=0.01).minimize(loss) paddle.optimizer.SGD(learning_rate=0.01).minimize(loss)
...@@ -5347,7 +5347,7 @@ def default_startup_program(): ...@@ -5347,7 +5347,7 @@ def default_startup_program():
with paddle.static.program_guard(main_program=main_program, startup_program=startup_program): with paddle.static.program_guard(main_program=main_program, startup_program=startup_program):
x = paddle.data(name="x", shape=[-1, 784], dtype='float32') x = paddle.data(name="x", shape=[-1, 784], dtype='float32')
y = paddle.data(name="y", shape=[-1, 1], dtype='int32') y = paddle.data(name="y", shape=[-1, 1], dtype='int32')
z = paddle.static.nn.fc(name="fc", input=x, size=10, act="relu") z = paddle.static.nn.fc(name="fc", x=x, size=10, activation="relu")
print("main program is: {}".format(paddle.static.default_main_program())) print("main program is: {}".format(paddle.static.default_main_program()))
print("start up program is: {}".format(paddle.static.default_startup_program())) print("start up program is: {}".format(paddle.static.default_startup_program()))
...@@ -5389,8 +5389,8 @@ def default_main_program(): ...@@ -5389,8 +5389,8 @@ def default_main_program():
bn2 = paddle.static.nn.batch_norm(conv2, act='relu') bn2 = paddle.static.nn.batch_norm(conv2, act='relu')
pool2 = paddle.nn.functional.pool2d(bn2, 2, 'max', 2) pool2 = paddle.nn.functional.pool2d(bn2, 2, 'max', 2)
fc1 = paddle.static.nn.fc(pool2, size=50, act='relu') fc1 = paddle.static.nn.fc(x=pool2, size=50, activation='relu')
fc2 = paddle.static.nn.fc(fc1, size=102, act='softmax') fc2 = paddle.static.nn.fc(x=fc1, size=102, activation='softmax')
loss = paddle.nn.functional.loss.cross_entropy(input=fc2, label=label) loss = paddle.nn.functional.loss.cross_entropy(input=fc2, label=label)
loss = paddle.mean(loss) loss = paddle.mean(loss)
...@@ -5467,7 +5467,7 @@ def program_guard(main_program, startup_program=None): ...@@ -5467,7 +5467,7 @@ def program_guard(main_program, startup_program=None):
startup_program = paddle.static.Program() startup_program = paddle.static.Program()
with paddle.static.program_guard(main_program, startup_program): with paddle.static.program_guard(main_program, startup_program):
data = paddle.static.data(name='image', shape=[None, 784, 784], dtype='float32') data = paddle.static.data(name='image', shape=[None, 784, 784], dtype='float32')
hidden = paddle.static.nn.fc(input=data, size=10, act='relu') hidden = paddle.static.nn.fc(x=data, size=10, activation='relu')
Notes: The temporary :code:`Program` can be used if the user does not need Notes: The temporary :code:`Program` can be used if the user does not need
to construct either of startup program or main program. to construct either of startup program or main program.
......
...@@ -13598,7 +13598,7 @@ def py_func(func, x, out, backward_func=None, skip_vars_in_backward_input=None): ...@@ -13598,7 +13598,7 @@ def py_func(func, x, out, backward_func=None, skip_vars_in_backward_input=None):
# User-defined debug functions that print out the input Tensor # User-defined debug functions that print out the input Tensor
paddle.static.nn.py_func(func=debug_func, x=hidden, out=None) paddle.static.nn.py_func(func=debug_func, x=hidden, out=None)
prediction = paddle.static.nn.fc(hidden, size=10, act='softmax') prediction = paddle.static.nn.fc(hidden, size=10, activation='softmax')
loss = paddle.static.nn.cross_entropy(input=prediction, label=label) loss = paddle.static.nn.cross_entropy(input=prediction, label=label)
return paddle.mean(loss) return paddle.mean(loss)
......
...@@ -264,17 +264,17 @@ class WeightNormParamAttr(ParamAttr): ...@@ -264,17 +264,17 @@ class WeightNormParamAttr(ParamAttr):
data = paddle.static.data(name="data", shape=[3, 32, 32], dtype="float32") data = paddle.static.data(name="data", shape=[3, 32, 32], dtype="float32")
fc = paddle.static.nn.fc(input=data, fc = paddle.static.nn.fc(x=data,
size=1000, size=1000,
param_attr=paddle.static.WeightNormParamAttr( weight_attr=paddle.static.WeightNormParamAttr(
dim=None, dim=None,
name='weight_norm_param', name='weight_norm_param',
initializer=paddle.nn.initializer.Constant(1.0), initializer=paddle.nn.initializer.Constant(1.0),
learning_rate=1.0, learning_rate=1.0,
regularizer=paddle.regularizer.L2Decay(0.1), regularizer=paddle.regularizer.L2Decay(0.1),
trainable=True, trainable=True,
do_model_average=False, do_model_average=False,
need_clip=True)) need_clip=True))
""" """
# List to record the parameters reparameterized by weight normalization. # List to record the parameters reparameterized by weight normalization.
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册