未验证 提交 72f6e566 编写于 作者: M MRXLT 提交者: GitHub

fix sample code (#26962)

上级 46057dd2
...@@ -169,7 +169,7 @@ class Optimizer(object): ...@@ -169,7 +169,7 @@ class Optimizer(object):
import paddle import paddle
paddle.disable_static() paddle.disable_static()
emb = paddle.nn.Embedding([10, 10]) emb = paddle.nn.Embedding(10, 10)
adam = paddle.optimizer.Adam(0.001, parameters=emb.parameters()) adam = paddle.optimizer.Adam(0.001, parameters=emb.parameters())
state_dict = adam.state_dict() state_dict = adam.state_dict()
...@@ -199,7 +199,7 @@ class Optimizer(object): ...@@ -199,7 +199,7 @@ class Optimizer(object):
import paddle import paddle
paddle.disable_static() paddle.disable_static()
emb = paddle.nn.Embedding([10, 10]) emb = paddle.nn.Embedding(10, 10)
state_dict = emb.state_dict() state_dict = emb.state_dict()
paddle.framework.save(state_dict, "paddle_dy") paddle.framework.save(state_dict, "paddle_dy")
...@@ -371,7 +371,7 @@ class Optimizer(object): ...@@ -371,7 +371,7 @@ class Optimizer(object):
import paddle import paddle
# example1: _LRScheduler is not used, return value is all the same # example1: _LRScheduler is not used, return value is all the same
paddle.disable_static() paddle.disable_static()
emb = paddle.nn.Embedding([10, 10]) emb = paddle.nn.Embedding(10, 10)
adam = paddle.optimizer.Adam(0.001, parameters = emb.parameters()) adam = paddle.optimizer.Adam(0.001, parameters = emb.parameters())
lr = adam.get_lr() lr = adam.get_lr()
print(lr) # 0.001 print(lr) # 0.001
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册