未验证 提交 f8d0a358 编写于 作者: Y YuanRisheng 提交者: GitHub

[Fluid Clean]Move and Remove some io API in Fluid (#50842)

* move io

* fix ci bugs

* fix ci bugs

* fix py3 bugs

* fix example code

* fix example code

* fix text

* fix text

* deal with ci bugs

* perfect code according comment

* delete import batch
上级 25a77450
...@@ -22,8 +22,8 @@ from functools import reduce ...@@ -22,8 +22,8 @@ from functools import reduce
import numpy as np import numpy as np
import paddle import paddle
from paddle.fluid.io import is_belong_to_optimizer, is_parameter
from paddle.framework import core from paddle.framework import core
from paddle.framework.io_utils import is_belong_to_optimizer, is_parameter
from paddle.static import Variable from paddle.static import Variable
from .dist_attribute import OperatorDistAttr, TensorDistAttr from .dist_attribute import OperatorDistAttr, TensorDistAttr
......
...@@ -109,7 +109,6 @@ class ParameterServerRuntime(RuntimeBase): ...@@ -109,7 +109,6 @@ class ParameterServerRuntime(RuntimeBase):
assert isinstance(each_var, Variable) assert isinstance(each_var, Variable)
origin_varname, _, _ = _get_varname_parts(each_var.name) origin_varname, _, _ = _get_varname_parts(each_var.name)
new_var = paddle.static.io._clone_var_in_block(load_block, each_var) new_var = paddle.static.io._clone_var_in_block(load_block, each_var)
var_path = os.path.join(dirname, origin_varname) var_path = os.path.join(dirname, origin_varname)
if not os.path.exists(var_path): if not os.path.exists(var_path):
......
...@@ -16,10 +16,175 @@ import os ...@@ -16,10 +16,175 @@ import os
import paddle import paddle
from paddle.fluid.framework import Program, static_only from paddle.fluid.framework import Program, static_only
from paddle.fluid.io import load_persistables
from paddle.framework import core, dygraph_not_support from paddle.framework import core, dygraph_not_support
def _load_distributed_persistables(executor, dirname, main_program=None):
"""
customized load_persistables for distributed training.
it should be used on parameter server,
Args:
executor(Executor): The executor to run for saving parameters.
dirname(str): The load directory path.
main_program(Program): The program whose parameters will be
loaded. the main_program must be the pserver_program
get after transpiler.
Returns:
None
Examples:
.. code-block:: python
import paddle
import paddle.fluid as fluid
paddle.enable_static()
exe = fluid.Executor(fluid.CPUPlace())
param_path = "./my_paddle_model"
t = paddle.distributed.transpiler.DistributeTranspiler()
t.transpile(...)
pserver_prog = t.get_pserver_program(...)
_load_distributed_persistables(executor=exe, dirname=param_path, main_program=pserver_prog)
"""
def __is_distributed_part_var(varname):
trainer_idx = varname.find(".trainer_")
block_idx = varname.find(".block")
return trainer_idx or block_idx
def __load_persistable_vars(executor, dirname, need_load_vars):
load_prog = Program()
load_block = load_prog.global_block()
need_delete_vars = []
for param in need_load_vars:
origin_var = param.origin
slice_var = param.slice
is_slice = param.is_slice
offset = param.offset
if is_slice:
slice = load_block.create_var(
name=slice_var.name,
type=slice_var.type,
shape=slice_var.shape,
dtype=slice_var.dtype,
persistable=True,
)
load_block.append_op(
type='load',
inputs={},
outputs={'Out': [slice]},
attrs={
'file_path': os.path.join(dirname, origin_var.name),
'seek': offset,
'shape': slice.shape,
},
)
else:
origin = load_block.create_var(
name="{}".format(origin_var.name),
type=origin_var.type,
shape=origin_var.shape,
dtype=origin_var.dtype,
persistable=True,
)
load_block.append_op(
type='load',
inputs={},
outputs={'Out': [origin]},
attrs={'file_path': os.path.join(dirname, origin_var.name)},
)
load_block.append_op(
type='delete_var',
inputs={'X': need_delete_vars},
)
executor.run(load_prog)
if not isinstance(main_program, Program):
raise TypeError("'main_program' should be an instance of Program.")
if not main_program._is_distributed:
raise ValueError(
"'_load_distributed_persistables' just be designed for distributed training."
)
if not main_program._ps_endpoint:
raise ValueError(
"'_load_distributed_persistables' need current_endpoint set in DistributeTranspiler.transpile"
)
need_load_vars = (
main_program._parameters_on_pservers.get_distributed_vars_by_ep(
main_program._ps_endpoint
)
)
__load_persistable_vars(executor, dirname, need_load_vars)
@dygraph_not_support
def load_persistables(executor, dirname, main_program=None, filename=None):
"""
:api_attr: Static Graph
This API filters out all variables with ``persistable==True`` from the
given ``main_program`` and then tries to load these variables from the
directory ``dirname`` or the file ``filename``.
Use the ``dirname`` to specify the directory where persistable variables
(refer to :ref:`api_guide_model_save_reader_en`) were saved. If variables
were saved in separate files, set ``filename`` as None; if all variables
were saved in a single file, use ``filename`` to specify the file name.
Args:
executor(Executor): The executor used for loading persistable variables.
See :ref:`api_guide_executor_en` for more details about it.
dirname(str): The directory path.
main_program(Program, optional): The program whose persistable variables will
be loaded. If it is None, the ``default_main_program``
will be used automatically. See :ref:`api_guide_Program_en`
for more about ``Program``.
Default: None.
filename(str, optional): The file which saved all persistable variables. If variables
were saved in separated files, set it to None.
Default: None.
Returns:
None
Examples:
.. code-block:: python
import paddle
import paddle.fluid as fluid
paddle.enable_static()
exe = fluid.Executor(fluid.CPUPlace())
param_path = "./my_paddle_model"
prog = fluid.default_main_program()
paddle.distributed.io.load_persistables(executor=exe, dirname=param_path,
main_program=None)
"""
if main_program and main_program._is_distributed:
_load_distributed_persistables(
executor, dirname=dirname, main_program=main_program
)
else:
paddle.static.io.load_vars(
executor,
dirname=dirname,
main_program=main_program,
predicate=is_persistable,
filename=filename,
)
def _save_distributed_persistables(executor, dirname, main_program): def _save_distributed_persistables(executor, dirname, main_program):
""" """
save_persistables for distributed training. save_persistables for distributed training.
......
...@@ -89,7 +89,6 @@ from .compiler import * ...@@ -89,7 +89,6 @@ from .compiler import *
from paddle.fluid.layers.math_op_patch import monkey_patch_variable from paddle.fluid.layers.math_op_patch import monkey_patch_variable
from .dygraph.layers import * from .dygraph.layers import *
from .dygraph.base import enable_dygraph, disable_dygraph from .dygraph.base import enable_dygraph, disable_dygraph
from .io import save, load, load_program_state, set_program_state
from .dygraph.varbase_patch_methods import monkey_patch_varbase from .dygraph.varbase_patch_methods import monkey_patch_varbase
from .core import _cuda_synchronize from .core import _cuda_synchronize
from .trainer_desc import ( from .trainer_desc import (
...@@ -146,8 +145,6 @@ __all__ = ( ...@@ -146,8 +145,6 @@ __all__ = (
'profiler', 'profiler',
'unique_name', 'unique_name',
'Scope', 'Scope',
'save',
'load',
'_cuda_synchronize', '_cuda_synchronize',
] ]
) )
......
...@@ -34,7 +34,7 @@ class PaddleModel(SerializableBase): ...@@ -34,7 +34,7 @@ class PaddleModel(SerializableBase):
self._file_name = "_paddle_fleet_param__" self._file_name = "_paddle_fleet_param__"
def serialize(self, path): def serialize(self, path):
from ...io import save_persistables from paddle.distributed.io import save_persistables
save_persistables( save_persistables(
executor=self._exe, executor=self._exe,
...@@ -44,7 +44,7 @@ class PaddleModel(SerializableBase): ...@@ -44,7 +44,7 @@ class PaddleModel(SerializableBase):
) )
def deserialize(self, path): def deserialize(self, path):
from ...io import load_persistables from paddle.distributed.io import load_persistables
load_persistables( load_persistables(
executor=self._exe, executor=self._exe,
......
此差异已折叠。
...@@ -22,7 +22,7 @@ from test_dist_base import RUN_STEP, runtime_main ...@@ -22,7 +22,7 @@ from test_dist_base import RUN_STEP, runtime_main
import paddle import paddle
import paddle.fluid as fluid import paddle.fluid as fluid
from paddle.fluid import core, io from paddle.fluid import core
class TestDistSaveLoad2x2(TestDistSimnetBow2x2): class TestDistSaveLoad2x2(TestDistSimnetBow2x2):
...@@ -56,7 +56,7 @@ class TestDistSaveLoad2x2(TestDistSimnetBow2x2): ...@@ -56,7 +56,7 @@ class TestDistSaveLoad2x2(TestDistSimnetBow2x2):
return var.persistable return var.persistable
io.load_vars( paddle.static.io.load_vars(
executor, executor,
dirname=dirname, dirname=dirname,
main_program=program, main_program=program,
...@@ -89,7 +89,9 @@ class TestDistSaveLoad2x2(TestDistSimnetBow2x2): ...@@ -89,7 +89,9 @@ class TestDistSaveLoad2x2(TestDistSimnetBow2x2):
exe.run(startup_prog) exe.run(startup_prog)
if need_load and model_dir: if need_load and model_dir:
fluid.io.load_persistables(exe, model_dir, pserver_prog) paddle.distributed.io.load_persistables(
exe, model_dir, pserver_prog
)
exe.run(pserver_prog) exe.run(pserver_prog)
......
...@@ -172,7 +172,7 @@ def train_static(args, batch_generator): ...@@ -172,7 +172,7 @@ def train_static(args, batch_generator):
model_path = os.path.join( model_path = os.path.join(
args.save_static_model_path, "transformer" args.save_static_model_path, "transformer"
) )
fluid.save(train_prog, model_path) paddle.static.save(train_prog, model_path)
break break
return np.array(avg_loss) return np.array(avg_loss)
......
...@@ -20,7 +20,6 @@ import numpy as np ...@@ -20,7 +20,6 @@ import numpy as np
import paddle import paddle
import paddle.dataset.wmt16 as wmt16 import paddle.dataset.wmt16 as wmt16
import paddle.fluid as fluid
def get_input_descs(args, mode="train"): def get_input_descs(args, mode="train"):
...@@ -310,7 +309,7 @@ def load(program, model_path, executor=None, var_list=None): ...@@ -310,7 +309,7 @@ def load(program, model_path, executor=None, var_list=None):
To load python2 saved models in python3. To load python2 saved models in python3.
""" """
try: try:
fluid.load(program, model_path, executor, var_list) paddle.static.load(program, model_path, executor, var_list)
except UnicodeDecodeError: except UnicodeDecodeError:
warnings.warn( warnings.warn(
"An UnicodeDecodeError is catched, which might be caused by loading " "An UnicodeDecodeError is catched, which might be caused by loading "
...@@ -319,7 +318,7 @@ def load(program, model_path, executor=None, var_list=None): ...@@ -319,7 +318,7 @@ def load(program, model_path, executor=None, var_list=None):
) )
load_bak = pickle.load load_bak = pickle.load
pickle.load = partial(load_bak, encoding="latin1") pickle.load = partial(load_bak, encoding="latin1")
fluid.load(program, model_path, executor, var_list) paddle.static.load(program, model_path, executor, var_list)
pickle.load = load_bak pickle.load = load_bak
......
...@@ -42,7 +42,7 @@ def main_test_func(place, dtype): ...@@ -42,7 +42,7 @@ def main_test_func(place, dtype):
adam_optimizer.minimize(avg_cost) adam_optimizer.minimize(avg_cost)
fetch_list = [avg_cost] fetch_list = [avg_cost]
train_reader = fluid.io.batch( train_reader = paddle.batch(
paddle.dataset.uci_housing.train(), batch_size=1 paddle.dataset.uci_housing.train(), batch_size=1
) )
feeder = fluid.DataFeeder(place=place, feed_list=[x, y]) feeder = fluid.DataFeeder(place=place, feed_list=[x, y])
......
...@@ -40,7 +40,7 @@ class TestClass(unittest.TestCase): ...@@ -40,7 +40,7 @@ class TestClass(unittest.TestCase):
yield img, label yield img, label
reader = paddle.reader.cache(fake_reader) reader = paddle.reader.cache(fake_reader)
batch_reader = fluid.io.batch(reader, batch_size=batch_size) batch_reader = paddle.batch(reader, batch_size=batch_size)
places = [fluid.CPUPlace()] places = [fluid.CPUPlace()]
if fluid.core.is_compiled_with_cuda(): if fluid.core.is_compiled_with_cuda():
......
...@@ -264,7 +264,7 @@ class TestDistMnistAsync2x2WithGauss(TestFleetBase): ...@@ -264,7 +264,7 @@ class TestDistMnistAsync2x2WithGauss(TestFleetBase):
feeder = fluid.DataFeeder(place=fluid.CPUPlace(), feed_list=datas) feeder = fluid.DataFeeder(place=fluid.CPUPlace(), feed_list=datas)
exe.run(fluid.default_startup_program()) exe.run(fluid.default_startup_program())
fluid.io.load_persistables(exe, model_file) paddle.distributed.io.load_persistables(exe, model_file)
for batch_id, data in enumerate(reader()): for batch_id, data in enumerate(reader()):
score = exe.run( score = exe.run(
......
...@@ -150,12 +150,12 @@ class TestDygraphLoadStatic(unittest.TestCase): ...@@ -150,12 +150,12 @@ class TestDygraphLoadStatic(unittest.TestCase):
) )
out = exe.run(framework.default_startup_program()) out = exe.run(framework.default_startup_program())
fluid.save( paddle.static.save(
framework.default_main_program(), framework.default_main_program(),
os.path.join(temp_dir.name, "test_1"), os.path.join(temp_dir.name, "test_1"),
) )
para_dict = fluid.load_program_state( para_dict = paddle.static.load_program_state(
os.path.join(temp_dir.name, "test_1") os.path.join(temp_dir.name, "test_1")
) )
......
...@@ -25,14 +25,13 @@ import paddle.fluid as fluid ...@@ -25,14 +25,13 @@ import paddle.fluid as fluid
import paddle.fluid.core as core import paddle.fluid.core as core
import paddle.fluid.executor as executor import paddle.fluid.executor as executor
import paddle.fluid.optimizer as optimizer import paddle.fluid.optimizer as optimizer
from paddle.distributed.io import load_inference_model_distributed from paddle.distributed.io import (
from paddle.fluid.compiler import CompiledProgram load_inference_model_distributed,
from paddle.fluid.framework import Program, program_guard
from paddle.fluid.io import (
load_inference_model,
save_inference_model,
save_persistables, save_persistables,
) )
from paddle.fluid.compiler import CompiledProgram
from paddle.fluid.framework import Program, program_guard
from paddle.fluid.io import load_inference_model, save_inference_model
paddle.enable_static() paddle.enable_static()
......
...@@ -18,6 +18,7 @@ import unittest ...@@ -18,6 +18,7 @@ import unittest
import paddle import paddle
import paddle.fluid as fluid import paddle.fluid as fluid
import paddle.static as static
from paddle.fluid import core from paddle.fluid import core
...@@ -34,24 +35,24 @@ class TestSaveLoadAPIError(unittest.TestCase): ...@@ -34,24 +35,24 @@ class TestSaveLoadAPIError(unittest.TestCase):
graph = core.Graph(core.ProgramDesc()) graph = core.Graph(core.ProgramDesc())
compiled_program = fluid.CompiledProgram(graph) compiled_program = fluid.CompiledProgram(graph)
with self.assertRaises(TypeError): with self.assertRaises(TypeError):
fluid.io._get_valid_program(compiled_program) paddle.static.io._get_valid_program(compiled_program)
# case 2: main_program type error # case 2: main_program type error
with self.assertRaises(TypeError): with self.assertRaises(TypeError):
fluid.io._get_valid_program("program") paddle.static.io._get_valid_program("program")
def test_load_vars_error(self): def test_load_vars_error(self):
place = fluid.CPUPlace() place = fluid.CPUPlace()
exe = fluid.Executor(place) exe = fluid.Executor(place)
# case 1: main_program type error when vars None # case 1: main_program type error when vars None
with self.assertRaises(TypeError): with self.assertRaises(TypeError):
fluid.io.load_vars( static.io.load_vars(
executor=exe, dirname=self.save_dir, main_program="program" executor=exe, dirname=self.save_dir, main_program="program"
) )
# case 2: main_program type error when vars not None # case 2: main_program type error when vars not None
with self.assertRaises(TypeError): with self.assertRaises(TypeError):
fluid.io.load_vars( static.io.load_vars(
executor=exe, executor=exe,
dirname=self.save_dir, dirname=self.save_dir,
main_program="program", main_program="program",
......
...@@ -75,7 +75,7 @@ class TestLoadStateDictFromSaveInferenceModel(unittest.TestCase): ...@@ -75,7 +75,7 @@ class TestLoadStateDictFromSaveInferenceModel(unittest.TestCase):
def tearDown(self): def tearDown(self):
self.temp_dir.cleanup() self.temp_dir.cleanup()
def train_and_save_model(self, only_params=False): def train_and_save_model(self):
with new_program_scope(): with new_program_scope():
startup_program = fluid.default_startup_program() startup_program = fluid.default_startup_program()
main_program = fluid.default_main_program() main_program = fluid.default_main_program()
...@@ -122,19 +122,14 @@ class TestLoadStateDictFromSaveInferenceModel(unittest.TestCase): ...@@ -122,19 +122,14 @@ class TestLoadStateDictFromSaveInferenceModel(unittest.TestCase):
param.name param.name
) )
if only_params: fluid.io.save_inference_model(
fluid.io.save_params( self.save_dirname,
exe, self.save_dirname, filename=self.params_filename ["img"],
) [prediction],
else: exe,
fluid.io.save_inference_model( model_filename=self.model_filename,
self.save_dirname, params_filename=self.params_filename,
["img"], )
[prediction],
exe,
model_filename=self.model_filename,
params_filename=self.params_filename,
)
return static_param_dict return static_param_dict
...@@ -195,16 +190,6 @@ class TestLoadStateDictFromSaveInferenceModel(unittest.TestCase): ...@@ -195,16 +190,6 @@ class TestLoadStateDictFromSaveInferenceModel(unittest.TestCase):
) )
self.check_load_state_dict(orig_param_dict, new_load_param_dict) self.check_load_state_dict(orig_param_dict, new_load_param_dict)
def test_load_state_dict_from_save_params(self):
self.save_dirname = os.path.join(
self.temp_dir.name, "static_mnist.load_state_dict.save_params"
)
self.params_filename = None
orig_param_dict = self.train_and_save_model(True)
new_load_param_dict = paddle.load(self.save_dirname)
self.check_load_state_dict(orig_param_dict, new_load_param_dict)
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import shutil
import unittest
import paddle
import paddle.fluid as fluid
from paddle.fluid.executor import Executor
class TestLoadVarsShapeCheck(unittest.TestCase):
def setUp(self):
self.model_path = "./model_temp/"
def test_shape_check_save_load(self):
program_1 = fluid.Program()
startup_program_1 = fluid.Program()
with fluid.program_guard(program_1, startup_program_1):
input = paddle.static.data(
name="x", shape=[-1, 10], dtype='float32'
)
out = paddle.static.nn.fc(input, 20)
place = fluid.CPUPlace()
exe = Executor(place)
exe.run(startup_program_1)
fluid.io.save_params(exe, self.model_path, main_program=program_1)
fluid.io.load_params(exe, self.model_path, main_program=program_1)
def tearDown(self):
if os.path.exists(self.model_path):
shutil.rmtree(self.model_path)
if __name__ == "__main__":
unittest.main()
...@@ -235,7 +235,7 @@ class TestSaveLoadAny(unittest.TestCase): ...@@ -235,7 +235,7 @@ class TestSaveLoadAny(unittest.TestCase):
# paddle.save, legacy paddle.fluid.load # paddle.save, legacy paddle.fluid.load
self.replace_static_save(prog, path) self.replace_static_save(prog, path)
self.set_zero(prog, place) self.set_zero(prog, place)
paddle.fluid.io.load(prog, path) paddle.static.load(prog, path)
for var in prog.list_vars(): for var in prog.list_vars():
if isinstance(var, framework.Parameter) or var.persistable: if isinstance(var, framework.Parameter) or var.persistable:
new_t = np.array( new_t = np.array(
...@@ -244,7 +244,7 @@ class TestSaveLoadAny(unittest.TestCase): ...@@ -244,7 +244,7 @@ class TestSaveLoadAny(unittest.TestCase):
base_t = base_map[var.name] base_t = base_map[var.name]
np.testing.assert_array_equal(new_t, np.array(base_t)) np.testing.assert_array_equal(new_t, np.array(base_t))
# legacy paddle.fluid.save, paddle.load # legacy paddle.fluid.save, paddle.load
paddle.fluid.io.save(prog, path) paddle.static.save(prog, path)
self.set_zero(prog, place) self.set_zero(prog, place)
self.replace_static_load(prog, path) self.replace_static_load(prog, path)
for var in prog.list_vars(): for var in prog.list_vars():
...@@ -904,7 +904,7 @@ class TestSaveLoadToMemory(unittest.TestCase): ...@@ -904,7 +904,7 @@ class TestSaveLoadToMemory(unittest.TestCase):
with self.assertRaises(ValueError): with self.assertRaises(ValueError):
paddle.save(state_dict, '') paddle.save(state_dict, '')
with self.assertRaises(ValueError): with self.assertRaises(ValueError):
paddle.fluid.io._open_file_buffer('temp', 'b') paddle.framework.io_utils._open_file_buffer('temp', 'b')
def test_static_save_to_memory(self): def test_static_save_to_memory(self):
paddle.enable_static() paddle.enable_static()
......
...@@ -107,7 +107,7 @@ class TestSaveLoadBinaryFormat(unittest.TestCase): ...@@ -107,7 +107,7 @@ class TestSaveLoadBinaryFormat(unittest.TestCase):
var_list = list( var_list = list(
filter(lambda var: var.persistable, prog.list_vars()) filter(lambda var: var.persistable, prog.list_vars())
) )
fluid.io.load_vars( paddle.static.io.load_vars(
exe, path_vars1, main_program=prog, vars=var_list exe, path_vars1, main_program=prog, vars=var_list
) )
...@@ -123,7 +123,7 @@ class TestSaveLoadBinaryFormat(unittest.TestCase): ...@@ -123,7 +123,7 @@ class TestSaveLoadBinaryFormat(unittest.TestCase):
path_vars2 = os.path.join( path_vars2 = os.path.join(
self.temp_dir.name, 'test_replace_save_load_vars_binary2/model/' self.temp_dir.name, 'test_replace_save_load_vars_binary2/model/'
) )
fluid.io.save_vars( paddle.static.io.save_vars(
exe, path_vars2, main_program=prog, vars=var_list exe, path_vars2, main_program=prog, vars=var_list
) )
self.set_zero(prog, place) self.set_zero(prog, place)
......
...@@ -19,7 +19,6 @@ import numpy as np ...@@ -19,7 +19,6 @@ import numpy as np
import paddle import paddle
import paddle.fluid.core as core import paddle.fluid.core as core
import paddle.fluid.io as io
from paddle.fluid.dygraph import guard from paddle.fluid.dygraph import guard
from paddle.fluid.executor import Executor from paddle.fluid.executor import Executor
from paddle.fluid.framework import ParamBase, Variable, default_main_program from paddle.fluid.framework import ParamBase, Variable, default_main_program
...@@ -47,8 +46,6 @@ class ParameterChecks(unittest.TestCase): ...@@ -47,8 +46,6 @@ class ParameterChecks(unittest.TestCase):
exe = Executor(paddle.CPUPlace()) exe = Executor(paddle.CPUPlace())
p = exe.run(main_program, fetch_list=[param])[0] p = exe.run(main_program, fetch_list=[param])[0]
np.testing.assert_array_equal(p, np.ones(shape) * val) np.testing.assert_array_equal(p, np.ones(shape) * val)
p = io.get_parameter_value_by_name('fc.w', exe, main_program)
np.testing.assert_array_equal(p, np.ones(shape) * val)
zero_dim_param = b.create_parameter(name='x', shape=[], dtype='float32') zero_dim_param = b.create_parameter(name='x', shape=[], dtype='float32')
self.assertEqual(zero_dim_param.shape, ()) self.assertEqual(zero_dim_param.shape, ())
......
...@@ -346,7 +346,9 @@ class TestSaveLoadBase(unittest.TestCase): ...@@ -346,7 +346,9 @@ class TestSaveLoadBase(unittest.TestCase):
self.assertTrue(np.sum(np.abs(t)) != 0) self.assertTrue(np.sum(np.abs(t)) != 0)
base_map[var.name] = t base_map[var.name] = t
fluid.save(main_program, os.path.join(temp_dir.name, "test_1")) paddle.static.save(
main_program, os.path.join(temp_dir.name, "test_1")
)
# set var to zero # set var to zero
for var in main_program.list_vars(): for var in main_program.list_vars():
...@@ -360,7 +362,7 @@ class TestSaveLoadBase(unittest.TestCase): ...@@ -360,7 +362,7 @@ class TestSaveLoadBase(unittest.TestCase):
# make sure all the paramerter or optimizer var have been set to zero # make sure all the paramerter or optimizer var have been set to zero
self.assertTrue(np.sum(np.abs(new_t)) == 0) self.assertTrue(np.sum(np.abs(new_t)) == 0)
fluid.load( paddle.static.load(
main_program, main_program,
os.path.join(temp_dir.name, "test_1.pdparams"), os.path.join(temp_dir.name, "test_1.pdparams"),
exe, exe,
...@@ -485,7 +487,9 @@ class TestSaveLoadPartial(unittest.TestCase): ...@@ -485,7 +487,9 @@ class TestSaveLoadPartial(unittest.TestCase):
self.assertTrue(np.sum(np.abs(t)) != 0) self.assertTrue(np.sum(np.abs(t)) != 0)
base_map[var.name] = t base_map[var.name] = t
fluid.save(main_program, os.path.join(temp_dir.name, "test_1")) paddle.static.save(
main_program, os.path.join(temp_dir.name, "test_1")
)
# set var to zero # set var to zero
for var in main_program.list_vars(): for var in main_program.list_vars():
...@@ -499,7 +503,7 @@ class TestSaveLoadPartial(unittest.TestCase): ...@@ -499,7 +503,7 @@ class TestSaveLoadPartial(unittest.TestCase):
# make sure all the paramerter or optimizer var have been set to zero # make sure all the paramerter or optimizer var have been set to zero
self.assertTrue(np.sum(np.abs(new_t)) == 0) self.assertTrue(np.sum(np.abs(new_t)) == 0)
fluid.load( paddle.static.load(
test_program, os.path.join(temp_dir.name, "test_1.pdopt"), None test_program, os.path.join(temp_dir.name, "test_1.pdopt"), None
) )
...@@ -510,7 +514,7 @@ class TestSaveLoadPartial(unittest.TestCase): ...@@ -510,7 +514,7 @@ class TestSaveLoadPartial(unittest.TestCase):
) )
base_t = base_map[var.name] base_t = base_map[var.name]
np.testing.assert_array_equal(new_t, base_t) np.testing.assert_array_equal(new_t, base_t)
fluid.load( paddle.static.load(
test_program, test_program,
os.path.join(temp_dir.name, "test_1.pdmodel"), os.path.join(temp_dir.name, "test_1.pdmodel"),
None, None,
...@@ -617,7 +621,9 @@ class TestSaveLoadSetStateDict(unittest.TestCase): ...@@ -617,7 +621,9 @@ class TestSaveLoadSetStateDict(unittest.TestCase):
self.assertTrue(np.sum(np.abs(t)) != 0) self.assertTrue(np.sum(np.abs(t)) != 0)
base_map[var.name] = t base_map[var.name] = t
fluid.save(main_program, os.path.join(temp_dir.name, "test_1")) paddle.static.save(
main_program, os.path.join(temp_dir.name, "test_1")
)
# set var to zero # set var to zero
for var in main_program.list_vars(): for var in main_program.list_vars():
...@@ -631,7 +637,9 @@ class TestSaveLoadSetStateDict(unittest.TestCase): ...@@ -631,7 +637,9 @@ class TestSaveLoadSetStateDict(unittest.TestCase):
# make sure all the paramerter or optimizer var have been set to zero # make sure all the paramerter or optimizer var have been set to zero
self.assertTrue(np.sum(np.abs(new_t)) == 0) self.assertTrue(np.sum(np.abs(new_t)) == 0)
fluid.load(main_program, os.path.join(temp_dir.name, "test_1"), exe) paddle.static.load(
main_program, os.path.join(temp_dir.name, "test_1"), exe
)
for var in main_program.list_vars(): for var in main_program.list_vars():
if isinstance(var, framework.Parameter) or var.persistable: if isinstance(var, framework.Parameter) or var.persistable:
...@@ -752,7 +760,9 @@ class TestProgramStatePartial(unittest.TestCase): ...@@ -752,7 +760,9 @@ class TestProgramStatePartial(unittest.TestCase):
self.assertTrue(np.sum(np.abs(t)) != 0) self.assertTrue(np.sum(np.abs(t)) != 0)
base_map[var.name] = t base_map[var.name] = t
fluid.save(main_program, os.path.join(temp_dir.name, 'test_1')) paddle.static.save(
main_program, os.path.join(temp_dir.name, 'test_1')
)
# set var to zero # set var to zero
for var in main_program.list_vars(): for var in main_program.list_vars():
...@@ -767,23 +777,23 @@ class TestProgramStatePartial(unittest.TestCase): ...@@ -767,23 +777,23 @@ class TestProgramStatePartial(unittest.TestCase):
self.assertTrue(np.sum(np.abs(new_t)) == 0) self.assertTrue(np.sum(np.abs(new_t)) == 0)
# fluid.load(test_program, "./test_1", None ) # fluid.load(test_program, "./test_1", None )
program_state = fluid.load_program_state( program_state = paddle.static.load_program_state(
os.path.join(temp_dir.name, 'test_1') os.path.join(temp_dir.name, 'test_1')
) )
program_state_1 = fluid.load_program_state( program_state_1 = paddle.static.load_program_state(
os.path.join(temp_dir.name, 'test_1.pdparams') os.path.join(temp_dir.name, 'test_1.pdparams')
) )
program_state_2 = fluid.load_program_state( program_state_2 = paddle.static.load_program_state(
os.path.join(temp_dir.name, 'test_1.pdopt') os.path.join(temp_dir.name, 'test_1.pdopt')
) )
program_state_3 = fluid.load_program_state( program_state_3 = paddle.static.load_program_state(
os.path.join(temp_dir.name, 'test_1.pdmodel') os.path.join(temp_dir.name, 'test_1.pdmodel')
) )
fluid.set_program_state(test_program, program_state) paddle.static.set_program_state(test_program, program_state)
for var in test_program.list_vars(): for var in test_program.list_vars():
if isinstance(var, framework.Parameter) or var.persistable: if isinstance(var, framework.Parameter) or var.persistable:
...@@ -805,7 +815,7 @@ class TestProgramStatePartial(unittest.TestCase): ...@@ -805,7 +815,7 @@ class TestProgramStatePartial(unittest.TestCase):
# make sure all the paramerter or optimizer var have been set to zero # make sure all the paramerter or optimizer var have been set to zero
self.assertTrue(np.sum(np.abs(new_t)) == 0) self.assertTrue(np.sum(np.abs(new_t)) == 0)
fluid.set_program_state(test_program, program_state_1) paddle.static.set_program_state(test_program, program_state_1)
for var in test_program.list_vars(): for var in test_program.list_vars():
if isinstance(var, framework.Parameter) or var.persistable: if isinstance(var, framework.Parameter) or var.persistable:
...@@ -827,7 +837,7 @@ class TestProgramStatePartial(unittest.TestCase): ...@@ -827,7 +837,7 @@ class TestProgramStatePartial(unittest.TestCase):
# make sure all the paramerter or optimizer var have been set to zero # make sure all the paramerter or optimizer var have been set to zero
self.assertTrue(np.sum(np.abs(new_t)) == 0) self.assertTrue(np.sum(np.abs(new_t)) == 0)
fluid.set_program_state(test_program, program_state_2) paddle.static.set_program_state(test_program, program_state_2)
for var in test_program.list_vars(): for var in test_program.list_vars():
if isinstance(var, framework.Parameter) or var.persistable: if isinstance(var, framework.Parameter) or var.persistable:
...@@ -849,7 +859,7 @@ class TestProgramStatePartial(unittest.TestCase): ...@@ -849,7 +859,7 @@ class TestProgramStatePartial(unittest.TestCase):
# make sure all the paramerter or optimizer var have been set to zero # make sure all the paramerter or optimizer var have been set to zero
self.assertTrue(np.sum(np.abs(new_t)) == 0) self.assertTrue(np.sum(np.abs(new_t)) == 0)
fluid.set_program_state(test_program, program_state_3) paddle.static.set_program_state(test_program, program_state_3)
for var in test_program.list_vars(): for var in test_program.list_vars():
if isinstance(var, framework.Parameter) or var.persistable: if isinstance(var, framework.Parameter) or var.persistable:
...@@ -880,7 +890,7 @@ class TestVariableInit(unittest.TestCase): ...@@ -880,7 +890,7 @@ class TestVariableInit(unittest.TestCase):
exe.run(fluid.default_startup_program()) exe.run(fluid.default_startup_program())
temp_dir = tempfile.TemporaryDirectory() temp_dir = tempfile.TemporaryDirectory()
fluid.save( paddle.static.save(
fluid.default_main_program(), fluid.default_main_program(),
os.path.join(temp_dir.name, "test_path"), os.path.join(temp_dir.name, "test_path"),
) )
...@@ -905,7 +915,7 @@ class TestVariableInit(unittest.TestCase): ...@@ -905,7 +915,7 @@ class TestVariableInit(unittest.TestCase):
place = self.set_place() place = self.set_place()
exe = fluid.Executor(place) exe = fluid.Executor(place)
parameter_list = list( parameter_list = list(
filter(fluid.io.is_parameter, program.list_vars()) filter(paddle.framework.is_parameter, program.list_vars())
) )
fluid.core._create_loaded_parameter( fluid.core._create_loaded_parameter(
...@@ -925,7 +935,10 @@ class TestVariableInit(unittest.TestCase): ...@@ -925,7 +935,10 @@ class TestVariableInit(unittest.TestCase):
set_var(new_v, load_dict[v.name]) set_var(new_v, load_dict[v.name])
opt_list = list( opt_list = list(
filter(fluid.io.is_belong_to_optimizer, program.list_vars()) filter(
paddle.framework.io_utils.is_belong_to_optimizer,
program.list_vars(),
)
) )
fluid.core._create_loaded_parameter( fluid.core._create_loaded_parameter(
...@@ -1092,7 +1105,7 @@ class TestLoadFromOldInterface(unittest.TestCase): ...@@ -1092,7 +1105,7 @@ class TestLoadFromOldInterface(unittest.TestCase):
# make sure all the paramerter or optimizer var have been set to zero # make sure all the paramerter or optimizer var have been set to zero
self.assertTrue(np.sum(np.abs(new_t)) == 0) self.assertTrue(np.sum(np.abs(new_t)) == 0)
fluid.load( paddle.static.load(
main_program, os.path.join(self.temp_dir.name, "test_path"), exe main_program, os.path.join(self.temp_dir.name, "test_path"), exe
) )
...@@ -1112,7 +1125,7 @@ class TestLoadFromOldInterface(unittest.TestCase): ...@@ -1112,7 +1125,7 @@ class TestLoadFromOldInterface(unittest.TestCase):
var.desc.set_shape(new_shape) var.desc.set_shape(new_shape)
with self.assertRaises(RuntimeError): with self.assertRaises(RuntimeError):
fluid.load( paddle.static.load(
main_program, main_program,
os.path.join(self.temp_dir.name, "test_path"), os.path.join(self.temp_dir.name, "test_path"),
exe, exe,
...@@ -1120,7 +1133,7 @@ class TestLoadFromOldInterface(unittest.TestCase): ...@@ -1120,7 +1133,7 @@ class TestLoadFromOldInterface(unittest.TestCase):
# check unused parameter # check unused parameter
fluid.load( paddle.static.load(
test_clone_program, test_clone_program,
os.path.join(self.temp_dir.name, "test_path"), os.path.join(self.temp_dir.name, "test_path"),
exe, exe,
...@@ -1239,7 +1252,7 @@ class TestLoadFromOldInterface(unittest.TestCase): ...@@ -1239,7 +1252,7 @@ class TestLoadFromOldInterface(unittest.TestCase):
# make sure all the paramerter or optimizer var have been set to zero # make sure all the paramerter or optimizer var have been set to zero
self.assertTrue(np.sum(np.abs(new_t)) == 0) self.assertTrue(np.sum(np.abs(new_t)) == 0)
fluid.load( paddle.static.load(
main_program, main_program,
os.path.join(self.temp_dir.name, "test_static_load_var_list"), os.path.join(self.temp_dir.name, "test_static_load_var_list"),
exe, exe,
...@@ -1377,11 +1390,11 @@ class TestLoadFromOldInterfaceSingleFile(unittest.TestCase): ...@@ -1377,11 +1390,11 @@ class TestLoadFromOldInterfaceSingleFile(unittest.TestCase):
self.assertTrue(np.sum(np.abs(new_t)) == 0) self.assertTrue(np.sum(np.abs(new_t)) == 0)
file_model_path = os.path.join(save_dir, "model_single") file_model_path = os.path.join(save_dir, "model_single")
fluid.load( paddle.static.load(
main_program, main_program,
file_model_path, file_model_path,
exe, exe,
fluid.io.get_program_persistable_vars(main_program), paddle.static.io.get_program_persistable_vars(main_program),
) )
for var in main_program.list_vars(): for var in main_program.list_vars():
...@@ -1403,36 +1416,33 @@ class TestLoadFromOldInterfaceSingleFile(unittest.TestCase): ...@@ -1403,36 +1416,33 @@ class TestLoadFromOldInterfaceSingleFile(unittest.TestCase):
var.desc.set_shape(new_shape) var.desc.set_shape(new_shape)
with self.assertRaises(RuntimeError): with self.assertRaises(RuntimeError):
fluid.load( paddle.static.load(
main_program, main_program,
file_model_path, file_model_path,
exe, exe,
fluid.io.get_program_persistable_vars(main_program), paddle.static.io.get_program_persistable_vars(main_program),
) )
fluid.io.save_params(
exe, "test_path", main_program, filename="model_single"
)
with self.assertRaises(RuntimeError): with self.assertRaises(RuntimeError):
fluid.load( paddle.static.load(
main_program, main_program,
file_model_path, file_model_path,
exe, exe,
fluid.io.get_program_persistable_vars(main_program), paddle.static.io.get_program_persistable_vars(main_program),
) )
# check when executor is None # check when executor is None
with self.assertRaises(ValueError): with self.assertRaises(ValueError):
fluid.load( paddle.static.load(
main_program, main_program,
file_model_path, file_model_path,
None, None,
fluid.io.get_program_persistable_vars(main_program), paddle.static.io.get_program_persistable_vars(main_program),
) )
# check when var list is None # check when var list is None
with self.assertRaises(ValueError): with self.assertRaises(ValueError):
fluid.load(main_program, file_model_path, exe, None) paddle.static.load(main_program, file_model_path, exe, None)
# check save params, load var_list = get_program_persistable_vars # check save params, load var_list = get_program_persistable_vars
with self.assertRaises(RuntimeError): with self.assertRaises(RuntimeError):
...@@ -1440,7 +1450,7 @@ class TestLoadFromOldInterfaceSingleFile(unittest.TestCase): ...@@ -1440,7 +1450,7 @@ class TestLoadFromOldInterfaceSingleFile(unittest.TestCase):
main_program.global_block(), shape=[1], name="test_temp_var" main_program.global_block(), shape=[1], name="test_temp_var"
) )
all_var_list = list(main_program.list_vars()) all_var_list = list(main_program.list_vars())
fluid.load( paddle.static.load(
main_program, main_program,
file_model_path, file_model_path,
exe, exe,
...@@ -1579,8 +1589,8 @@ class TestProgramStateOldSave(unittest.TestCase): ...@@ -1579,8 +1589,8 @@ class TestProgramStateOldSave(unittest.TestCase):
self.assertTrue(np.sum(np.abs(new_t)) == 0) self.assertTrue(np.sum(np.abs(new_t)) == 0)
# case 1: load basic # case 1: load basic
program_state = fluid.load_program_state(save_dir) program_state = paddle.static.load_program_state(save_dir)
fluid.set_program_state(main_program, program_state) paddle.static.set_program_state(main_program, program_state)
self.check_in_static(main_program, base_map) self.check_in_static(main_program, base_map)
# case 2: load with no need file # case 2: load with no need file
...@@ -1594,21 +1604,21 @@ class TestProgramStateOldSave(unittest.TestCase): ...@@ -1594,21 +1604,21 @@ class TestProgramStateOldSave(unittest.TestCase):
else: else:
raise e raise e
program_state = fluid.load_program_state(save_dir) program_state = paddle.static.load_program_state(save_dir)
fluid.set_program_state(main_program, program_state) paddle.static.set_program_state(main_program, program_state)
self.check_in_static(main_program, base_map) self.check_in_static(main_program, base_map)
# case 3: load with var_list # case 3: load with var_list
program_state = fluid.load_program_state( program_state = paddle.static.load_program_state(
save_dir, main_program.all_parameters() save_dir, main_program.all_parameters()
) )
fluid.set_program_state(main_program, program_state) paddle.static.set_program_state(main_program, program_state)
self.check_in_static(main_program, base_map) self.check_in_static(main_program, base_map)
if self.test_dygraph: if self.test_dygraph:
# make sure `load_program_state` can be used in dynamic graph mode # make sure `load_program_state` can be used in dynamic graph mode
with fluid.dygraph.guard(place): with fluid.dygraph.guard(place):
load_state = fluid.load_program_state(save_dir) load_state = paddle.static.load_program_state(save_dir)
for k, v in load_state.items(): for k, v in load_state.items():
np.testing.assert_array_equal(base_map[k], v) np.testing.assert_array_equal(base_map[k], v)
...@@ -1758,11 +1768,13 @@ class TestProgramStateOldSaveSingleModel(unittest.TestCase): ...@@ -1758,11 +1768,13 @@ class TestProgramStateOldSaveSingleModel(unittest.TestCase):
self.assertTrue(np.sum(np.abs(new_t)) == 0) self.assertTrue(np.sum(np.abs(new_t)) == 0)
# fluid.load(test_program, "./test_1", None ) # fluid.load(test_program, "./test_1", None )
program_state = fluid.load_program_state( program_state = paddle.static.load_program_state(
os.path.join(save_dir, "model_1"), os.path.join(save_dir, "model_1"),
var_list=fluid.io.get_program_persistable_vars(main_program), var_list=paddle.static.io.get_program_persistable_vars(
main_program
),
) )
fluid.set_program_state(main_program, program_state) paddle.static.set_program_state(main_program, program_state)
for var in main_program.list_vars(): for var in main_program.list_vars():
if isinstance(var, framework.Parameter) or var.persistable: if isinstance(var, framework.Parameter) or var.persistable:
...@@ -1773,15 +1785,17 @@ class TestProgramStateOldSaveSingleModel(unittest.TestCase): ...@@ -1773,15 +1785,17 @@ class TestProgramStateOldSaveSingleModel(unittest.TestCase):
np.testing.assert_array_equal(new_t, base_t) np.testing.assert_array_equal(new_t, base_t)
with self.assertRaises(ValueError): with self.assertRaises(ValueError):
fluid.load_program_state(os.path.join(save_dir, "model_1")) paddle.static.load_program_state(
os.path.join(save_dir, "model_1")
)
with self.assertRaises(TypeError): with self.assertRaises(TypeError):
fluid.load_program_state( paddle.static.load_program_state(
os.path.join(save_dir, "model_1"), var_list=["str"] os.path.join(save_dir, "model_1"), var_list=["str"]
) )
with self.assertRaises(RuntimeError): with self.assertRaises(RuntimeError):
fluid.load_program_state( paddle.static.load_program_state(
os.path.join(save_dir, "model_1"), os.path.join(save_dir, "model_1"),
var_list=[ var_list=[
main_program.global_block().create_var( main_program.global_block().create_var(
...@@ -1827,17 +1841,17 @@ class TestStaticSaveLoadPickle(unittest.TestCase): ...@@ -1827,17 +1841,17 @@ class TestStaticSaveLoadPickle(unittest.TestCase):
) )
with self.assertRaises(ValueError): with self.assertRaises(ValueError):
paddle.fluid.save(prog, path, 2.0) paddle.static.save(prog, path, 2.0)
with self.assertRaises(ValueError): with self.assertRaises(ValueError):
paddle.fluid.save(prog, path, 1) paddle.static.save(prog, path, 1)
with self.assertRaises(ValueError): with self.assertRaises(ValueError):
paddle.fluid.save(prog, path, 5) paddle.static.save(prog, path, 5)
protocols = [2, 3, 4] protocols = [2, 3, 4]
for protocol in protocols: for protocol in protocols:
paddle.fluid.save(prog, path, protocol) paddle.static.save(prog, path, protocol)
# set var to zero # set var to zero
for var in prog.list_vars(): for var in prog.list_vars():
if isinstance(var, framework.Parameter) or var.persistable: if isinstance(var, framework.Parameter) or var.persistable:
...@@ -1851,7 +1865,7 @@ class TestStaticSaveLoadPickle(unittest.TestCase): ...@@ -1851,7 +1865,7 @@ class TestStaticSaveLoadPickle(unittest.TestCase):
) )
self.assertTrue(np.sum(np.abs(new_t)) == 0) self.assertTrue(np.sum(np.abs(new_t)) == 0)
paddle.fluid.load(prog, path) paddle.static.load(prog, path)
for var in prog.list_vars(): for var in prog.list_vars():
if isinstance(var, framework.Parameter) or var.persistable: if isinstance(var, framework.Parameter) or var.persistable:
......
...@@ -134,7 +134,7 @@ class TestSaveLoadBF16(unittest.TestCase): ...@@ -134,7 +134,7 @@ class TestSaveLoadBF16(unittest.TestCase):
self.assertTrue(np.sum(np.abs(t)) != 0) self.assertTrue(np.sum(np.abs(t)) != 0)
base_map[var.name] = t base_map[var.name] = t
save_dir = os.path.join(self.temp_dir.name, "test_1") save_dir = os.path.join(self.temp_dir.name, "test_1")
fluid.save(main_program, save_dir) paddle.static.save(main_program, save_dir)
# set var to zero # set var to zero
for var in main_program.list_vars(): for var in main_program.list_vars():
...@@ -148,7 +148,7 @@ class TestSaveLoadBF16(unittest.TestCase): ...@@ -148,7 +148,7 @@ class TestSaveLoadBF16(unittest.TestCase):
# make sure all the paramerter or optimizer var have been set to zero # make sure all the paramerter or optimizer var have been set to zero
self.assertTrue(np.sum(np.abs(new_t)) == 0) self.assertTrue(np.sum(np.abs(new_t)) == 0)
fluid.load( paddle.static.load(
main_program, main_program,
os.path.join(self.temp_dir.name, "test_1.pdparams"), os.path.join(self.temp_dir.name, "test_1.pdparams"),
exe, exe,
......
...@@ -58,7 +58,7 @@ class TestStaticSaveLoadLargeParameters(unittest.TestCase): ...@@ -58,7 +58,7 @@ class TestStaticSaveLoadLargeParameters(unittest.TestCase):
) )
path = os.path.join(path, "static_save") path = os.path.join(path, "static_save")
protocol = 4 protocol = 4
paddle.fluid.save(prog, path, pickle_protocol=protocol) paddle.static.save(prog, path, pickle_protocol=protocol)
# set var to zero # set var to zero
for var in prog.list_vars(): for var in prog.list_vars():
if isinstance(var, framework.Parameter) or var.persistable: if isinstance(var, framework.Parameter) or var.persistable:
...@@ -70,7 +70,7 @@ class TestStaticSaveLoadLargeParameters(unittest.TestCase): ...@@ -70,7 +70,7 @@ class TestStaticSaveLoadLargeParameters(unittest.TestCase):
) )
self.assertTrue(np.sum(np.abs(new_t)) == 0) self.assertTrue(np.sum(np.abs(new_t)) == 0)
paddle.fluid.load(prog, path) paddle.static.load(prog, path)
for var in prog.list_vars(): for var in prog.list_vars():
if isinstance(var, framework.Parameter) or var.persistable: if isinstance(var, framework.Parameter) or var.persistable:
...@@ -91,8 +91,8 @@ class TestStaticSaveLoadLargeParameters(unittest.TestCase): ...@@ -91,8 +91,8 @@ class TestStaticSaveLoadLargeParameters(unittest.TestCase):
) )
self.assertTrue(np.sum(np.abs(new_t)) == 0) self.assertTrue(np.sum(np.abs(new_t)) == 0)
program_state = fluid.load_program_state(path) program_state = paddle.static.load_program_state(path)
fluid.set_program_state(prog, program_state) paddle.static.set_program_state(prog, program_state)
for var in prog.list_vars(): for var in prog.list_vars():
if isinstance(var, framework.Parameter) or var.persistable: if isinstance(var, framework.Parameter) or var.persistable:
new_t = np.array( new_t = np.array(
......
...@@ -36,6 +36,16 @@ from ..fluid.dygraph.base import grad # noqa: F401 ...@@ -36,6 +36,16 @@ from ..fluid.dygraph.base import grad # noqa: F401
from .io import save # noqa: F401 from .io import save # noqa: F401
from .io import load # noqa: F401 from .io import load # noqa: F401
from .io_utils import _open_file_buffer # noqa: F401
from .io_utils import is_parameter # noqa: F401
from .io_utils import is_persistable # noqa: F401
from .io_utils import is_belong_to_optimizer # noqa: F401
from .io_utils import _clone_var_in_block_ # noqa: F401
from .io_utils import _pickle_loads_mac
from .io_utils import _pack_loaded_dict
from .io_utils import _unpack_saved_dict
from .io_utils import _load_program_scope
from ..fluid import monkey_patch_variable from ..fluid import monkey_patch_variable
from ..fluid.dygraph import monkey_patch_math_varbase from ..fluid.dygraph import monkey_patch_math_varbase
from ..fluid.framework import disable_signal_handler # noqa: F401 from ..fluid.framework import disable_signal_handler # noqa: F401
......
...@@ -37,14 +37,6 @@ from paddle.fluid.framework import ( ...@@ -37,14 +37,6 @@ from paddle.fluid.framework import (
_non_static_mode, _non_static_mode,
_varbase_creator, _varbase_creator,
) )
from paddle.fluid.io import _is_file_path, _is_memory_buffer
from paddle.fluid.io import _legacy_save as _legacy_static_save
from paddle.fluid.io import (
_open_file_buffer,
_pack_loaded_dict,
_pickle_loads_mac,
_unpack_saved_dict,
)
from paddle.jit.api import _SaveLoadConfig from paddle.jit.api import _SaveLoadConfig
from paddle.jit.translated_layer import ( from paddle.jit.translated_layer import (
INFER_MODEL_SUFFIX, INFER_MODEL_SUFFIX,
...@@ -53,6 +45,16 @@ from paddle.jit.translated_layer import ( ...@@ -53,6 +45,16 @@ from paddle.jit.translated_layer import (
_construct_program_holders, _construct_program_holders,
) )
from .io_utils import (
_is_file_path,
_is_memory_buffer,
_legacy_static_save,
_open_file_buffer,
_pack_loaded_dict,
_pickle_loads_mac,
_unpack_saved_dict,
)
__all__ = [] __all__ = []
......
# Copyright (c) 2023 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import math
import os
import pickle
import sys
from io import BytesIO
import numpy as np
import paddle
from paddle.fluid import core
from paddle.fluid.framework import Parameter, Variable, static_only
from paddle.fluid.log_helper import get_logger
from paddle.fluid.wrapped_decorator import signature_safe_contextmanager
_logger = get_logger(
__name__, logging.INFO, fmt='%(asctime)s-%(levelname)s: %(message)s'
)
# This file contains various utility functions that are used in static.io(io related api that used in static graph)
# and framework.io(io related api that used in dygraph)
class _open_buffer:
def __init__(self, buffer):
self.buffer = buffer
def __enter__(self):
return self.buffer
class _buffer_reader(_open_buffer):
def __init__(self, buffer):
super().__init__(buffer)
self.initial_tell = self.buffer.tell()
def __exit__(self, *args):
# `args[0]` is type of exception. When the `read` is abnormal, the file pointer returns to the initial position.
if args[0] is not None:
self.buffer.seek(self.initial_tell)
class _buffer_writer(_open_buffer):
def __exit__(self, *args):
self.buffer.flush()
def _is_file_path(path):
return isinstance(path, str)
def _open_file_buffer(path_or_buffer, mode):
if _is_file_path(path_or_buffer):
return open(path_or_buffer, mode)
else:
if 'w' in mode:
return _buffer_writer(path_or_buffer)
elif 'r' in mode:
return _buffer_reader(path_or_buffer)
else:
raise ValueError(
"Expected 'r' or 'w' in mode but got {}".format(mode)
)
def _is_memory_buffer(buffer):
return isinstance(buffer, BytesIO)
def is_persistable(var):
"""
Check whether the given variable is persistable.
Args:
var(Variable): The variable to be checked.
Returns:
bool: True if the given `var` is persistable
False if not.
Examples:
.. code-block:: python
import paddle
import paddle.fluid as fluid
paddle.enable_static()
param = fluid.default_main_program().global_block().var('fc.b')
res = fluid.io.is_persistable(param)
"""
if (
var.desc.type() == core.VarDesc.VarType.FEED_MINIBATCH
or var.desc.type() == core.VarDesc.VarType.FETCH_LIST
or var.desc.type() == core.VarDesc.VarType.READER
):
return False
return var.persistable
def is_parameter(var):
"""
Check whether the given variable is an instance of Parameter.
Args:
var(Variable): The variable to be checked.
Returns:
bool: True if the given `var` is an instance of Parameter,
False if not.
Examples:
.. code-block:: python
import paddle
import paddle.fluid as fluid
paddle.enable_static()
param = fluid.default_main_program().global_block().var('fc.w')
res = fluid.io.is_parameter(param)
"""
return isinstance(var, Parameter)
def is_belong_to_optimizer(var):
if not (isinstance(var, Parameter) or var.desc.need_check_feed()):
return is_persistable(var)
return False
def _clone_var_in_block_(block, var):
assert isinstance(var, Variable)
if var.desc.type() == core.VarDesc.VarType.LOD_TENSOR:
return block.create_var(
name=var.name,
shape=var.shape,
dtype=var.dtype,
type=var.type,
lod_level=var.lod_level,
persistable=True,
)
else:
return block.create_var(
name=var.name,
shape=var.shape,
dtype=var.dtype,
type=var.type,
persistable=True,
)
@signature_safe_contextmanager
def _load_program_scope(main=None, startup=None, scope=None):
prog = main if main else paddle.fluid.Program()
startup_prog = startup if startup else paddle.fluid.Program()
scope = scope if scope else paddle.fluid.core.Scope()
with paddle.fluid.scope_guard(scope):
with paddle.fluid.program_guard(prog, startup_prog):
with paddle.fluid.unique_name.guard():
with paddle.fluid.framework._dygraph_guard(None):
yield
@static_only
def _legacy_static_save(param_dict, model_path, protocol=2):
def get_tensor(var):
if isinstance(var, (core.VarBase, core.eager.Tensor)):
return var.numpy()
elif isinstance(var, core.LoDTensor):
return np.array(var)
return var
param_dict = {name: get_tensor(param_dict[name]) for name in param_dict}
# When value of dict is lager than 4GB ,there is a Bug on 'MAC python3'
if (
_is_file_path(model_path)
and sys.platform == 'darwin'
and sys.version_info.major == 3
):
pickle_bytes = pickle.dumps(param_dict, protocol=protocol)
with open(model_path, 'wb') as f:
max_bytes = 2**30
for i in range(0, len(pickle_bytes), max_bytes):
f.write(pickle_bytes[i : i + max_bytes])
else:
with _open_file_buffer(model_path, 'wb') as f:
pickle.dump(param_dict, f, protocol=protocol)
def _pickle_loads_mac(path, f):
pickle_bytes = bytearray(0)
file_size = os.path.getsize(path)
max_bytes = 2**30
for _ in range(0, file_size, max_bytes):
pickle_bytes += f.read(max_bytes)
load_result = pickle.loads(pickle_bytes, encoding='latin1')
return load_result
def _pack_loaded_dict(load_obj):
if isinstance(load_obj, dict):
unpack_info = 'UnpackBigParamInfor@@'
if unpack_info in load_obj:
removes = []
for key, value in load_obj[unpack_info].items():
slices = [load_obj[part] for part in value["slices"]]
load_obj[key] = np.concatenate(slices).reshape(
value["OriginShape"]
)
removes += value["slices"]
for key in removes:
load_obj.pop(key)
load_obj.pop(unpack_info)
return load_obj
def _unpack_saved_dict(saved_obj, protocol):
temp_saved_obj = {}
unpack_infor = {}
# When pickle protocol=2 or protocol=3 the serialized object cannot be larger than 4G.
if 1 < protocol < 4:
if isinstance(saved_obj, dict):
for key, value in saved_obj.items():
if isinstance(value, np.ndarray):
MAX_NUMBER_OF_ELEMENT = int(
(2**30 - 1) / value.dtype.itemsize
)
num_element = np.prod(value.shape)
if num_element > MAX_NUMBER_OF_ELEMENT:
unpack_infor[key] = {}
unpack_infor[key]["OriginShape"] = value.shape
unpack_infor[key]["slices"] = []
value = value.flatten()
for i in range(
int(
math.ceil(
num_element * 1.0 / MAX_NUMBER_OF_ELEMENT
)
)
):
part_name = key + "@@." + str(i)
unpack_infor[key]["slices"].append(part_name)
temp_saved_obj[part_name] = value[
i
* MAX_NUMBER_OF_ELEMENT : MAX_NUMBER_OF_ELEMENT
* (i + 1)
]
if unpack_infor:
for key, value in unpack_infor.items():
if key in saved_obj:
saved_obj.pop(key)
for part in value['slices']:
saved_obj[part] = temp_saved_obj[part]
saved_obj['UnpackBigParamInfor@@'] = unpack_infor
return saved_obj
...@@ -34,9 +34,9 @@ from paddle.fluid.executor import global_scope ...@@ -34,9 +34,9 @@ from paddle.fluid.executor import global_scope
from paddle.fluid.framework import Variable from paddle.fluid.framework import Variable
from paddle.fluid.framework import _current_expected_place as _get_device from paddle.fluid.framework import _current_expected_place as _get_device
from paddle.fluid.framework import _get_paddle_place, _non_static_mode from paddle.fluid.framework import _get_paddle_place, _non_static_mode
from paddle.fluid.io import is_belong_to_optimizer
from paddle.fluid.layers import collective from paddle.fluid.layers import collective
from paddle.fluid.layers.utils import flatten from paddle.fluid.layers.utils import flatten
from paddle.framework.io_utils import is_belong_to_optimizer
from paddle.io import DataLoader, Dataset, DistributedBatchSampler from paddle.io import DataLoader, Dataset, DistributedBatchSampler
from paddle.jit.translated_layer import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX from paddle.jit.translated_layer import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX
from paddle.metric import Metric from paddle.metric import Metric
......
...@@ -23,6 +23,7 @@ import time ...@@ -23,6 +23,7 @@ import time
import numpy as np import numpy as np
import paddle
import paddle.fluid as fluid import paddle.fluid as fluid
from paddle.distributed.fleet.utils.fs import HDFSClient from paddle.distributed.fleet.utils.fs import HDFSClient
from paddle.fluid.log_helper import get_logger from paddle.fluid.log_helper import get_logger
...@@ -1087,11 +1088,13 @@ class FleetUtil: ...@@ -1087,11 +1088,13 @@ class FleetUtil:
vars = [program.global_block().var(i) for i in var_names] vars = [program.global_block().var(i) for i in var_names]
with fluid.scope_guard(scope): with fluid.scope_guard(scope):
if save_combine: if save_combine:
fluid.io.save_vars( paddle.static.io.save_vars(
executor, "./", program, vars=vars, filename=model_name executor, "./", program, vars=vars, filename=model_name
) )
else: else:
fluid.io.save_vars(executor, model_name, program, vars=vars) paddle.static.io.save_vars(
executor, model_name, program, vars=vars
)
configs = { configs = {
"fs.default.name": hadoop_fs_name, "fs.default.name": hadoop_fs_name,
......
...@@ -22,6 +22,7 @@ from google.protobuf import text_format ...@@ -22,6 +22,7 @@ from google.protobuf import text_format
import paddle import paddle
import paddle.fluid as fluid import paddle.fluid as fluid
import paddle.framework.io_utils as io_utils
from paddle.fluid import core, debugger from paddle.fluid import core, debugger
from paddle.fluid.framework import Program from paddle.fluid.framework import Program
from paddle.fluid.proto import framework_pb2 from paddle.fluid.proto import framework_pb2
...@@ -92,7 +93,7 @@ def check_pruned_program_vars(train_prog, pruned_prog): ...@@ -92,7 +93,7 @@ def check_pruned_program_vars(train_prog, pruned_prog):
pruned_vars = [ pruned_vars = [
(v.name, v) (v.name, v)
for v in pruned_prog.list_vars() for v in pruned_prog.list_vars()
if fluid.io.is_persistable(v) if io_utils.is_persistable(v)
] ]
pruned_vars = OrderedDict(pruned_vars) pruned_vars = OrderedDict(pruned_vars)
pruned_vars_name = [name for name in pruned_vars] pruned_vars_name = [name for name in pruned_vars]
...@@ -451,7 +452,7 @@ def check_saved_vars_try_dump( ...@@ -451,7 +452,7 @@ def check_saved_vars_try_dump(
os.path.join(dump_dir, dump_prog_fn), is_text_dump_program os.path.join(dump_dir, dump_prog_fn), is_text_dump_program
) )
saved_params = [ saved_params = [
v for v in dump_prog.list_vars() if fluid.io.is_persistable(v) v for v in dump_prog.list_vars() if io_utils.is_persistable(v)
] ]
logger.info( logger.info(
"persistable vars in dump program: {}".format( "persistable vars in dump program: {}".format(
...@@ -477,7 +478,7 @@ def parse_program(program, output_dir): ...@@ -477,7 +478,7 @@ def parse_program(program, output_dir):
# persistable vars # persistable vars
output = {} output = {}
persistable_vars = [ persistable_vars = [
v for v in program.list_vars() if fluid.io.is_persistable(v) v for v in program.list_vars() if io_utils.is_persistable(v)
] ]
output["persistable_vars"] = [ output["persistable_vars"] = [
{ {
......
...@@ -1209,7 +1209,11 @@ def save(layer, path, input_spec=None, **configs): ...@@ -1209,7 +1209,11 @@ def save(layer, path, input_spec=None, **configs):
paddle.static.save_vars( paddle.static.save_vars(
Executor(_current_expected_place()), Executor(_current_expected_place()),
dirname=model_path, dirname=model_path,
vars=list(filter(paddle.fluid.io.is_persistable, ordered_vars)), vars=list(
filter(
paddle.framework.io_utils.is_persistable, ordered_vars
)
),
filename=params_filename, filename=params_filename,
) )
# save property # save property
......
...@@ -27,6 +27,13 @@ from .io import serialize_program # noqa: F401 ...@@ -27,6 +27,13 @@ from .io import serialize_program # noqa: F401
from .io import load_from_file # noqa: F401 from .io import load_from_file # noqa: F401
from .io import save_to_file # noqa: F401 from .io import save_to_file # noqa: F401
from .io import normalize_program # noqa: F401 from .io import normalize_program # noqa: F401
from .io import is_persistable # noqa: F401
from .io import save_vars # noqa: F401
from .io import load_vars # noqa: F401
from .io import save # noqa: F401
from .io import load # noqa: F401
from .io import load_program_state # noqa: F401
from .io import set_program_state # noqa: F401
from ..fluid import Scope # noqa: F401 from ..fluid import Scope # noqa: F401
from .input import data # noqa: F401 from .input import data # noqa: F401
from .input import InputSpec # noqa: F401 from .input import InputSpec # noqa: F401
...@@ -66,13 +73,6 @@ from ..fluid.param_attr import WeightNormParamAttr # noqa: F401 ...@@ -66,13 +73,6 @@ from ..fluid.param_attr import WeightNormParamAttr # noqa: F401
from ..fluid.optimizer import Optimizer # noqa: F401 from ..fluid.optimizer import Optimizer # noqa: F401
from ..fluid.optimizer import Adam # noqa: F401 from ..fluid.optimizer import Adam # noqa: F401
from ..fluid.optimizer import ExponentialMovingAverage # noqa: F401 from ..fluid.optimizer import ExponentialMovingAverage # noqa: F401
from ..fluid.io import save # noqa: F401
from ..fluid.io import load # noqa: F401
from ..fluid.io import load_program_state # noqa: F401
from ..fluid.io import set_program_state # noqa: F401
from ..fluid.io import load_vars # noqa: F401
from ..fluid.io import save_vars # noqa: F401
from ..fluid.io import batch # noqa: F401
from ..fluid.contrib.layers import ctr_metric_bundle # noqa: F401 from ..fluid.contrib.layers import ctr_metric_bundle # noqa: F401
from ..fluid.layers import exponential_decay # noqa: F401 from ..fluid.layers import exponential_decay # noqa: F401
......
此差异已折叠。
...@@ -15,8 +15,6 @@ ...@@ -15,8 +15,6 @@
import os import os
import unittest import unittest
import numpy as np
import paddle import paddle
from paddle.fluid.framework import IrGraph from paddle.fluid.framework import IrGraph
from paddle.framework import core from paddle.framework import core
...@@ -103,40 +101,6 @@ class TestGraph(unittest.TestCase): ...@@ -103,40 +101,6 @@ class TestGraph(unittest.TestCase):
_train(origin_binary) _train(origin_binary)
_train(backup_binary) _train(backup_binary)
checkponit_dir = "checkpoint_gpu" if use_cuda else "checkpoint_cpu"
def _set_zero(var_name, scope, place):
var = scope.find_var(var_name).get_tensor()
var_array = np.zeros(var._get_dims()).astype("float32")
var.set(var_array, place)
sum_before = np.sum(
np.array(
paddle.static.global_scope()
.find_var('conv2d_1.w_0')
.get_tensor()
)
)
paddle.fluid.io._save_persistable_nodes(exe, checkponit_dir, graph)
_set_zero('conv2d_1.w_0', paddle.static.global_scope(), place)
set_after = np.sum(
np.array(
paddle.static.global_scope()
.find_var('conv2d_1.w_0')
.get_tensor()
)
)
self.assertEqual(set_after, 0)
paddle.fluid.io._load_persistable_nodes(exe, checkponit_dir, graph)
sum_after = np.sum(
np.array(
paddle.static.global_scope()
.find_var('conv2d_1.w_0')
.get_tensor()
)
)
self.assertEqual(sum_before, sum_after)
marked_nodes = set() marked_nodes = set()
for op in graph.all_op_nodes(): for op in graph.all_op_nodes():
if op.name().find('conv2d') > -1: if op.name().find('conv2d') > -1:
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册