From d89ff5b6144461a967bd73fa739d251691f2a8bc Mon Sep 17 00:00:00 2001 From: Yu Yang Date: Mon, 27 Nov 2017 17:09:07 +0800 Subject: [PATCH] Restore the param infos in Program.clone() (#5873) * Restore the param infos in Program.clone() The Program.clone only clone the variables and ops in the program into a new program. However, the information of Parameter is not clone. So we need restore the information of Parameters. Fix #5871 * Follow comments * Fix CI * Fix CI * Fix CI --- python/paddle/v2/fluid/framework.py | 56 +++++++++++++++++++- python/paddle/v2/fluid/tests/test_program.py | 24 +++++++-- 2 files changed, 75 insertions(+), 5 deletions(-) diff --git a/python/paddle/v2/fluid/framework.py b/python/paddle/v2/fluid/framework.py index 9a62698b86b..6d6ea23f55e 100644 --- a/python/paddle/v2/fluid/framework.py +++ b/python/paddle/v2/fluid/framework.py @@ -395,7 +395,11 @@ class Block(object): return v def all_parameters(self): - return {v for k, v in self.vars.iteritems() if isinstance(v, Parameter)} + return list(self.iter_parameters()) + + def iter_parameters(self): + return (item[1] for item in self.vars.iteritems() + if isinstance(item[1], Parameter)) def create_var(self, *args, **kwargs): var = Variable(self, *args, **kwargs) @@ -469,6 +473,37 @@ class Block(object): for index in range(len(self.ops)): assert self.ops[index].desc == ops_in_cpp[index] + def copy_param_info_from(self, other): + """ + Copy the information of parameters from other block + Args: + other(Block): other block + + Returns: + None + """ + if not isinstance(other, Block): + raise TypeError("copy_param_info_from should be invoked with Block") + for p in other.iter_parameters(): + assert isinstance(p, Parameter) + v = self.vars.get(p.name, None) + if v is None: + raise ValueError("copy_param_info_from should be invoked with " + "same topology") + assert isinstance(v, Variable) + new_p = Parameter( + block=self, + shape=v.shape, + dtype=v.dtype, + type=v.type, + lod_level=v.lod_level, + stop_gradient=p.stop_gradient, + trainable=p.trainable, + optimize_attr=p.optimize_attr, + regularizer=p.regularizer, + name=v.name) + self.vars[new_p.name] = new_p + class Program(object): def __init__(self): @@ -489,6 +524,7 @@ class Program(object): p.desc = core.ProgramDesc(self.desc) p.blocks = [Block(p, i) for i in xrange(self.desc.num_blocks())] p.sync_with_cpp() + p.copy_param_info_from(self) return p def prune(self, targets): @@ -572,6 +608,24 @@ class Program(object): for block in self.blocks: block.sync_with_cpp() + def copy_param_info_from(self, other): + """ + Copy the information of parameters from other program. + Args: + other(Program): Other program + + Returns: + None + """ + if not isinstance(other, Program): + raise TypeError("copy_param_info_from should be invoked with " + "Program") + + if len(self.blocks) != len(other.blocks): + raise ValueError("copy_param_info_from should be invoked with two " + "program, with represent the same topology") + self.global_block().copy_param_info_from(other.global_block()) + def list_vars(self): for each_block in self.blocks: for each_var in each_block.vars.itervalues(): diff --git a/python/paddle/v2/fluid/tests/test_program.py b/python/paddle/v2/fluid/tests/test_program.py index e9bcefd2156..15653a1dbf5 100644 --- a/python/paddle/v2/fluid/tests/test_program.py +++ b/python/paddle/v2/fluid/tests/test_program.py @@ -1,7 +1,9 @@ +from __future__ import print_function import unittest from paddle.v2.fluid.framework import Program from paddle.v2.fluid.framework import g_main_program +import paddle.v2.fluid.layers as layers class TestProgram(unittest.TestCase): @@ -48,8 +50,8 @@ class TestProgram(unittest.TestCase): # FIXME(yuyang18): We manual compare the output string, since the order # of variable could be changed. - print prog - print prog.clone() + print(prog) + print(prog.clone()) def test_parse_program_from_string(self): prog = Program() @@ -67,8 +69,8 @@ class TestProgram(unittest.TestCase): binary_str = prog.desc.serialize_to_string() prog_restored = Program.parse_from_string(binary_str) - print prog - print prog_restored + print(prog) + print(prog_restored) def test_append_backward(self): prog = Program() @@ -123,6 +125,20 @@ class TestProgram(unittest.TestCase): actual_ops.append(op.type) self.assertEqual(actual_ops, expect_ops) + def test_program_clone_with_parameter(self): + main_program = Program() + startup_program = Program() + kwargs = { + 'main_program': main_program, + 'startup_program': startup_program + } + d = layers.data(name='x', shape=[784], dtype='float32', **kwargs) + hidden = layers.fc(input=d, size=100, **kwargs) + layers.fc(input=hidden, size=100, **kwargs) + + new_program = main_program.clone() + self.assertNotEqual(0, len(new_program.blocks[0].all_parameters())) + if __name__ == '__main__': unittest.main() -- GitLab