From efcdeb512f73619fd8c6893602aefa1ac97c4cac Mon Sep 17 00:00:00 2001 From: Chen Weihang Date: Tue, 14 Jan 2020 14:57:01 +0800 Subject: [PATCH] Add all_parameters api for Program (#22180) * add all_parameters for Program, test=develop * refine unittest, test=develop * add example print result, test=develop --- python/paddle/fluid/framework.py | 59 +++++++++++++++++++ .../fluid/tests/unittests/test_program.py | 13 ++++ 2 files changed, 72 insertions(+) diff --git a/python/paddle/fluid/framework.py b/python/paddle/fluid/framework.py index 35950f7430..8e4056b387 100644 --- a/python/paddle/fluid/framework.py +++ b/python/paddle/fluid/framework.py @@ -4524,6 +4524,65 @@ class Program(object): for each_var in list(each_block.vars.values()): yield each_var + @dygraph_not_support + def all_parameters(self): + """ + Get all :ref:`api_guide_parameter_en` from this Program. A list object is returned. + + Returns: + list[ :ref:`api_guide_parameter_en` ]: The list contians all parameters in this program. + + Examples: + .. code-block:: python + + import paddle.fluid as fluid + + program = fluid.default_main_program() + data = fluid.data(name='x', shape=[None, 13], dtype='float32') + hidden = fluid.layers.fc(input=data, size=10) + loss = fluid.layers.mean(hidden) + fluid.optimizer.SGD(learning_rate=0.01).minimize(loss) + + for param in program.all_parameters(): + print(param) + + # Here will print all parameters in current program, in this example, + # the result is like: + # + # name: "fc_0.w_0" + # type { + # type: LOD_TENSOR + # lod_tensor { + # tensor { + # data_type: FP32 + # dims: 13 + # dims: 10 + # } + # } + # } + # persistable: true + # + # name: "fc_0.b_0" + # type { + # type: LOD_TENSOR + # lod_tensor { + # tensor { + # data_type: FP32 + # dims: 10 + # } + # } + # } + # persistable: true + # + # Here print(param) will print out all the properties of a parameter, + # including name, type and persistable, you can access to specific + # property of a parameter, such as param.name, param.type + """ + parameters = [] + for each_block in self.blocks: + parameters.extend(each_block.all_parameters()) + return parameters + @six.add_metaclass(ParameterMetaClass) class Parameter(Variable): diff --git a/python/paddle/fluid/tests/unittests/test_program.py b/python/paddle/fluid/tests/unittests/test_program.py index cb1d94809b..325345e606 100644 --- a/python/paddle/fluid/tests/unittests/test_program.py +++ b/python/paddle/fluid/tests/unittests/test_program.py @@ -132,6 +132,19 @@ class TestProgram(unittest.TestCase): for i in range(len(no_read_ops)): self.assertEqual(no_read_ops[i].type, keep_read_ops[i + 2].type) + def test_program_all_parameters(self): + program = fluid.default_main_program() + data = fluid.data(name='x', shape=[None, 13], dtype='float32') + hidden = fluid.layers.fc(input=data, size=10) + loss = fluid.layers.mean(hidden) + fluid.optimizer.SGD(learning_rate=0.01).minimize(loss) + + # NOTE: here the parameters are fc_0.w_0 and fc_0.b_0 + param_list = program.all_parameters() + self.assertEqual(len(param_list), 2) + self.assertEqual(param_list[0].name, "fc_0.w_0") + self.assertEqual(param_list[1].name, "fc_0.b_0") + if __name__ == '__main__': unittest.main() -- GitLab