From 43f64a177e9d8b93cfdb9da6f57253f8d1d52ca2 Mon Sep 17 00:00:00 2001 From: Jiabin Yang Date: Fri, 28 Jun 2019 13:58:14 +0800 Subject: [PATCH] Fix/program doc (#17908) * test=develop, add some comments for Program.clone * test=develop, add API.spec * test=develop, refine comments * refine Program doc and clone doc * test=develop, refine doc --- paddle/fluid/API.spec | 2 +- python/paddle/fluid/framework.py | 25 +++++++++++++++++++++++-- 2 files changed, 24 insertions(+), 3 deletions(-) diff --git a/paddle/fluid/API.spec b/paddle/fluid/API.spec index fc4aaf0b8a5..793a421cede 100644 --- a/paddle/fluid/API.spec +++ b/paddle/fluid/API.spec @@ -1,6 +1,6 @@ paddle.fluid.Program.__init__ (ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None), ('document', '6adf97f83acf6453d4a6a4b1070f3754')) paddle.fluid.Program.block (ArgSpec(args=['self', 'index'], varargs=None, keywords=None, defaults=None), ('document', '86cd9499e226be661a3d686260ee1150')) -paddle.fluid.Program.clone (ArgSpec(args=['self', 'for_test'], varargs=None, keywords=None, defaults=(False,)), ('document', '17d059efb24c81dde6166c6b0b93e9d0')) +paddle.fluid.Program.clone (ArgSpec(args=['self', 'for_test'], varargs=None, keywords=None, defaults=(False,)), ('document', 'a65221387f84c74eee5130d7678ca900')) paddle.fluid.Program.current_block (ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None), ('document', 'd601c7719e425e3d9cf862ea4ad194ca')) paddle.fluid.Program.global_block (ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None), ('document', 'd64ea1dc96e9f674499ea3006d470aa4')) paddle.fluid.Program.list_vars (ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None), ('document', '32c14b0f12baae4b352200fa09b5e789')) diff --git a/python/paddle/fluid/framework.py b/python/paddle/fluid/framework.py index 01e1175050c..56680ae6d0d 100644 --- a/python/paddle/fluid/framework.py +++ b/python/paddle/fluid/framework.py @@ -376,7 +376,7 @@ class Variable(object): two variables in different blocks could have the same name. There are many kinds of variables. Each kind of them has its own attributes - and usages. Please reference the framework.proto for details. + and usages. Please refer to the framework.proto for details. Most of a Variable's member variables can be setted to be None. It mean it is not available or will be specified later. @@ -2771,9 +2771,18 @@ class Program(object): create c++ Program. A program is a self-contained programing language like container. It has at least one Block, when the control flow op like conditional_block, while_op is included, - it will contains nested block. + it will contain nested block. Please reference the framework.proto for details. + A set of Program usually contains startup program and main program. + A startup program is set to contain some initial work , and the main + program will contain the network structure and vars for train. + + A set of Program can be used for test or train, in train program , + Paddle will contain all content to build a train network, in test + program Paddle will prune some content which is irrelevant to test, eg. + backward ops and vars. + Notes: we have default_startup_program and default_main_program by default, a pair of them will shared the parameters. The default_startup_program only run once to initialize parameters, @@ -3106,6 +3115,9 @@ class Program(object): train_program = fluid.Program() startup_program = fluid.Program() + + # startup_program is used to do some parameter init work, + # and main program is used to hold the network with fluid.program_guard(train_program, startup_program): with fluid.unique_name.guard(): img = fluid.layers.data(name='image', shape=[784]) @@ -3117,6 +3129,15 @@ class Program(object): avg_loss = fluid.layers.mean(loss) test_program = train_program.clone(for_test=False) print_prog(test_program) + + # Due to parameter sharing usage for train and test, so we need to use startup program of train + # instead of using test startup program, while nothing is in test's startup program + + # In Paddle Fluid we will share weights by using the same Variable name. In train and test program + # all parameters will have the same name and this can make train and test program sharing parameters, + # that's why we need to use startup program of train. And for startup program of test, it has nothing, + # since it is a new program. + with fluid.program_guard(train_program, startup_program): with fluid.unique_name.guard(): sgd = fluid.optimizer.SGD(learning_rate=1e-3) -- GitLab