From ea6716a55b7d6d5b5365bf033584e37b38ca572a Mon Sep 17 00:00:00 2001 From: wanghuancoder Date: Wed, 19 Aug 2020 10:24:17 +0800 Subject: [PATCH] Add check if fluid.data() variable no feed data (#25858) * add check if fluid.data() variable no feed data, test=develop * Add testcase for feed check, test=develop --- python/paddle/fluid/executor.py | 20 +++++ .../unittests/test_executor_check_feed.py | 84 +++++++++++++++++++ 2 files changed, 104 insertions(+) create mode 100644 python/paddle/fluid/tests/unittests/test_executor_check_feed.py diff --git a/python/paddle/fluid/executor.py b/python/paddle/fluid/executor.py index f16da029e29..5759b942763 100644 --- a/python/paddle/fluid/executor.py +++ b/python/paddle/fluid/executor.py @@ -1156,6 +1156,26 @@ class Executor(object): compiled = isinstance(program, compiler.CompiledProgram) + # Check if fluid.data() variable no feed data + if use_prune: + if compiled: + global_block = program._program.global_block() + else: + global_block = program.global_block() + for varname in global_block.vars: + vardesc = global_block.desc.find_var(cpt.to_bytes(varname)) + varobj = global_block.vars[varname] + + # Can not check var build by fluid.layers.data(), bucause fluid.layers.data() had not set need_check_feed + if vardesc.persistable() == False and \ + vardesc.type() == core.VarDesc.VarType.LOD_TENSOR and \ + vardesc.need_check_feed() == True and \ + varobj._stop_gradient == True and \ + varobj.is_data == True and \ + varobj.belong_to_optimizer == False and \ + varname not in feed: + raise ValueError('Need feed data for variable %s' % varname) + acp._auto_checkpoint(self, program) # For backward compatibility, run directly. diff --git a/python/paddle/fluid/tests/unittests/test_executor_check_feed.py b/python/paddle/fluid/tests/unittests/test_executor_check_feed.py new file mode 100644 index 00000000000..6b1e3c5a28a --- /dev/null +++ b/python/paddle/fluid/tests/unittests/test_executor_check_feed.py @@ -0,0 +1,84 @@ +# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import unittest + +import numpy +import paddle.fluid.core as core +import paddle.fluid as fluid + + +class TestExecutor(unittest.TestCase): + def net(self): + lr = fluid.data(name="lr", shape=[1], dtype='float32') + x = fluid.data(name="x", shape=[None, 1], dtype='float32') + y = fluid.data(name="y", shape=[None, 1], dtype='float32') + y_predict = fluid.layers.fc(input=x, size=1, act=None) + + cost = fluid.layers.square_error_cost(input=y_predict, label=y) + avg_cost = fluid.layers.mean(cost) + + opt = fluid.optimizer.Adam(learning_rate=lr) + opt.minimize(avg_cost) + + return lr, avg_cost + + def test_program_check_feed(self): + main_program = fluid.Program() + startup_program = fluid.Program() + scope = fluid.Scope() + with fluid.program_guard(main_program, startup_program): + with fluid.scope_guard(scope): + cpu = fluid.CPUPlace() + exe = fluid.Executor(cpu) + lr, cost = self.net() + exe.run(startup_program) + train_data = [[1.0], [2.0], [3.0], [4.0]] + y_true = [[2.0], [4.0], [6.0], [8.0]] + a = 0 + with self.assertRaises(ValueError): + exe.run(feed={'x': train_data, + 'lr': a}, + fetch_list=[lr, cost], + return_numpy=False, + use_prune=True) + + def test_compiled_program_check_feed(self): + main_program = fluid.Program() + startup_program = fluid.Program() + scope = fluid.Scope() + with fluid.program_guard(main_program, startup_program): + with fluid.scope_guard(scope): + cpu = fluid.CPUPlace() + exe = fluid.Executor(cpu) + lr, cost = self.net() + exe.run(startup_program) + compiled_prog = fluid.CompiledProgram( + main_program).with_data_parallel(loss_name=cost.name) + train_data = [[1.0], [2.0], [3.0], [4.0]] + y_true = [[2.0], [4.0], [6.0], [8.0]] + a = 0 + with self.assertRaises(ValueError): + exe.run(compiled_prog, + feed={'x': train_data, + 'lr': a}, + fetch_list=[lr, cost], + return_numpy=False, + use_prune=True) + + +if __name__ == '__main__': + unittest.main() -- GitLab