未验证 提交 c2ad3815 编写于 作者: Y Yu Yang 提交者: GitHub

Merge pull request #11560 from JiayiFeng/doc_non_layer_api

Doc of non layer api
......@@ -36,6 +36,25 @@ def _is_number_or_matrix_(var):
class WeightedAverage(object):
"""
Calculate weighted average.
The average calculating is accomplished via Python totally.
They do not change Paddle's Program, nor do anything to
modify NN model's configuration. They are completely
wrappers of Python functions.
Examples:
.. code-block:: python
avg = fluid.average.WeightedAverage()
avg.add(value=2.0, weight=1)
avg.add(value=4.0, weight=2)
avg.eval()
# The result is 3.333333333.
# For (2.0 * 1 + 4.0 * 2) / (1 + 2) = 3.333333333
"""
def __init__(self):
warnings.warn(
"The %s is deprecated, please use fluid.metrics.Accuracy instead." %
......
......@@ -147,7 +147,7 @@ def _addup_repetitive_outputs_(op_descs):
else:
if len(renamed_vars[var_name]) == 1:
new_name = var_name + "@RENAME@" + \
str(var_rename_count[var_name])
str(var_rename_count[var_name])
var_rename_count[var_name] += 1
# rename original var_name
renamed_vars[var_name][0] = new_name
......@@ -155,7 +155,7 @@ def _addup_repetitive_outputs_(op_descs):
_rename_arg_(pending_sum_ops, var_name, new_name)
new_name = var_name + "@RENAME@" + \
str(var_rename_count[var_name])
str(var_rename_count[var_name])
var_rename_count[var_name] += 1
op_desc.rename_output(var_name, new_name)
renamed_vars[var_name].append(new_name)
......@@ -435,18 +435,65 @@ def _get_stop_gradients_(program):
def append_backward(loss, parameter_list=None, no_grad_set=None,
callbacks=None):
"""
Append backward part to main_program
Append backward part to main_program.
Args:
loss(Variable): The variable generated by cost function.
parameter_list(list[string]): Parameters that need to be updated by
optimizer. If None, it means all parameters need to be updated.
no_grad_set(set): Variables that have no gradients in Block 0.
All variables with `step_gradient=True` from all blocks will be
automatically added.
A complete neural network training is made up of forward and backward
propagation. However, when we configure a network, we only need to
specify its forwrd part. The backward part is generated automatically
according to the forward part by this function.
Return:
(list[(Variable,Variable)]): list of (parameter, gradient) pair.
In most cases, users do not need to invoke this function manually. It
will be automatically invoked by the optimizer's `minimize` function.
Args:
loss(Variable): The loss variable of the network.
parameter_list(list[string]|None): Names of parameters that need
to be updated by optimizers.
If it is None, all parameters
will be updated.
Default: None
no_grad_set(set|None): Variables in the Block 0 whose gradients
should be ignored. All variables with
`step_gradient=True` from all blocks will
be automatically added into this set.
Default: None
callbacks(list[callable object]|None): The callbacks are used for
doing some custom jobs during
backward part building. All
callable objects in it will
be invoked once each time a
new gradient operator is added
into the program. The callable
object must has two input
parameters: 'block' and 'context'.
The 'block' is the block which
the new gradient operator will
be added to. The 'context' is a
map, whose keys are gradient
variable names and values are
corresponding original variables.
In addition to this, the 'context'
has another special key-value pair:
the key is string '__current_op_desc__'
and the value is the op_desc of the
gradient operator who has just
triggered the callable object.
Returns:
list[(Variable,Variable)]: Pairs of parameter and its
corresponding gradients. The key is the parameter and the
value is gradient variable.
Raises:
AssertionError: If `loss` is not an instance of Variable.
Examples:
.. code-block:: python
# network configuration code
# ...
avg_loss = fluid.layers.mean(loss)
param_grad_list = fluid.backward.append_backward(loss=avg_loss)
"""
assert isinstance(loss, framework.Variable)
......
......@@ -30,20 +30,42 @@ __all__ = [
def is_parameter(var):
"""Check whether the variable is a Parameter.
This function checks whether the input variable is a Parameter.
"""
Check whether the given variable is an instance of Parameter.
Args:
var : The input variable.
var(Variable): The variable to be checked.
Returns:
boolean result whether the variable is a Parameter.
bool: True if the given `var` is an instance of Parameter,
False if not.
Examples:
.. code-block:: python
param = fluid.default_main_program().global_block().var('fc.w')
res = fluid.io.is_parameter(param)
"""
return isinstance(var, Parameter)
def is_persistable(var):
"""
Check whether the given variable is persistable.
Args:
var(Variable): The variable to be checked.
Returns:
bool: True if the given `var` is persistable
False if not.
Examples:
.. code-block:: python
param = fluid.default_main_program().global_block().var('fc.w')
res = fluid.io.is_persistable(param)
"""
if var.desc.type() == core.VarDesc.VarType.FEED_MINIBATCH or \
var.desc.type() == core.VarDesc.VarType.FETCH_LIST:
return False
......@@ -68,20 +90,69 @@ def save_vars(executor,
predicate=None,
filename=None):
"""
Save variables to directory by executor.
Save variables to the given directory by executor.
There are two ways to specify variables to be saved: The first way, list
variables in a list and assign it to the `vars`. The second way, assign the
`main_program` with an existing program, then all variables in the program
will be saved. The first way has a higher priority. In other words, if `vars`
are assigned, the `main_program` and the `predicate` will be ignored.
:param executor: executor that save variable
:param dirname: directory path
:param main_program: program. If vars is None, then filter all variables in this
program which fit `predicate`. Default default_main_program.
:param predicate: The Predicate describes a callable that returns a variable
as a bool. If it returns true, the corresponding input variable will be saved.
:param vars: variables need to be saved. If vars is specified, program & predicate
will be ignored
:param filename: The name of a single file that all vars are saved to.
If it is None, save variables to separate files.
The `dirname` are used to specify the folder where to save variables.
If you prefer to save variables in separate files in the folder `dirname`,
set `filename` None; if you prefer to save all variables in a single file,
use `filename` to specify it.
:return: None
Args:
executor(Executor): The executor to run for saving variables.
dirname(str): The directory path.
main_program(Program|None): The program whose variables will be saved.
If it is None, the default main program will
be used automatically.
Default: None
vars(list[Variable]|None): The list that contains all variables to save.
It has a higher priority than the `main_program`.
Default: None
predicate(function|None): If it is not None, only variables in the
`main_program` that makes predicate(variable)==True
will be saved. It only works when we are using the
`main_program` to specify variables (In other words
`vars` is None).
Default: None
filename(str|None): The file which to save all variables. If you prefer to save
variables separately, set it to None.
Default: None
Returns:
None
Raises:
TypeError: If `main_program` is not an instance of Program nor None.
Examples:
.. code-block:: python
exe = fluid.Executor(fluid.CPUPlace())
param_path = "./my_paddle_model"
# The first usage: using `main_program` to specify variables
def name_has_fc(var):
res = "fc" in var.name
return res
prog = fluid.default_main_program()
fluid.io.save_vars(executor=exe, dirname=path, main_program=prog,
vars=None)
# All variables in `main_program` whose name includes "fc" will be saved.
# And variables are going to be saved separately.
# The second usage: using `vars` to specify variables
var_list = [var_a, var_b, var_c]
fluid.io.save_vars(executor=exe, dirname=path, vars=var_list,
filename="vars_file")
# var_a, var_b and var_c will be saved. And they are going to be
# saved in the same file named 'var_file' in the path "./my_paddle_model".
"""
if vars is None:
if main_program is None:
......@@ -129,7 +200,42 @@ def save_vars(executor,
def save_params(executor, dirname, main_program=None, filename=None):
"""
Save all parameters to directory with executor.
This function filters out all parameters from the give `main_program`
and then save them to the folder `dirname` or the file `filename`.
Use the `dirname` to specify the saving folder. If you would like to
save parameters in separate files, set `filename` None; if you would
like to save all parameters in a single file, use `filename` to specify
the file name.
NOTICE: Some variables are not Parameter while they are necessary for
training. So you can NOT save and continue your training just by
`save_params()` and `load_params()`. Please use `save_persistables()`
and `load_persistables()` instead.
Args:
executor(Executor): The executor to run for saving parameters.
dirname(str): The saving directory path.
main_program(Program|None): The program whose parameters will be
saved. If it is None, the default
main program will be used automatically.
Default: None
filename(str|None): The file to save all parameters. If you prefer
to save parameters in differnet files, set it
to None.
Default: None
Returns:
None
Examples:
.. code-block:: python
exe = fluid.Executor(fluid.CPUPlace())
param_path = "./my_paddle_model"
prog = fluid.default_main_program()
fluid.io.save_params(executor=exe, dirname=param_path,
main_program=None)
"""
save_vars(
executor,
......@@ -142,7 +248,37 @@ def save_params(executor, dirname, main_program=None, filename=None):
def save_persistables(executor, dirname, main_program=None, filename=None):
"""
Save all persistables to directory with executor.
This function filters out all variables with `persistable==True` from the
give `main_program` and then saves these variables to the folder `dirname`
or file `filename`.
The `dirname` is used to specify the folder where persistable variables
are going to be saved. If you would like to save variables in separate
files, set `filename` None; if you would like to save all variables in a
single file, use `filename` to specify the file name.
Args:
executor(Executor): The executor to run for saving persistable variables.
dirname(str): The directory path.
main_program(Program|None): The program whose persistbale variables will
be saved. If it is None, the default main
program will be used automatically.
Default: None
filename(str|None): The file to saved all variables. If you prefer to
save variables in differnet files, set it to None.
Default: None
Returns:
None
Examples:
.. code-block:: python
exe = fluid.Executor(fluid.CPUPlace())
param_path = "./my_paddle_model"
prog = fluid.default_main_program()
fluid.io.save_persistables(executor=exe, dirname=param_path,
main_program=None)
"""
save_vars(
executor,
......@@ -160,20 +296,69 @@ def load_vars(executor,
predicate=None,
filename=None):
"""
Load variables from directory by executor.
Load variables from the given directory by executor.
There are two ways to specify variables to be loaded: The first way, list
variables in a list and assign it to the `vars`. The second way, assign the
`main_program` with an existing program, then all variables in the program
will be loaded. The first way has a higher priority. In other words if `vars`
are assigned, the `main_program` and the `predicate` will be ignored.
The `dirname` are used to specify the folder where to load variables.
If variables were saved in separate files in the folder `dirname`,
set `filename` None; if all variables were saved in a single file,
use `filename` to specify it.
:param executor: executor that load variable
:param dirname: directory path
:param main_program: program. If vars is None, then filter all variables in this
program which fit `predicate`. Default default_main_program().
:param predicate: The Predicate describes a callable that returns a variable
as a bool. If it returns true, the corresponding input variable will be loaded.
:param vars: variables need to be loaded. If vars is specified, program &
predicate will be ignored
:param filename: The name of the single file that all vars are loaded from.
If it is None, load variables from separate files.
Args:
executor(Executor): The executor to run for loading variables.
dirname(str): The directory path.
main_program(Program|None): The program whose variables will be loaded.
If it is None, the default main program will
be used automatically.
Default: None
vars(list[Variable]|None): The list that contains all variables to load.
It has a higher priority than the `main_program`.
Default: None
predicate(function|None): If it is not None, only variables in the
`main_program` that makes predicate(variable)==True
will be loaded. It only works when we are using the
`main_program` to specify variables (In other words
`vars` is None).
Default: None
filename(str|None): The file which saved all required variables. If variables
were saved in differnet files, set it to None.
Default: None
Returns:
None
Raises:
TypeError: If `main_program` is not an instance of Program nor None.
Examples:
.. code-block:: python
exe = fluid.Executor(fluid.CPUPlace())
param_path = "./my_paddle_model"
# The first usage: using `main_program` to specify variables
def name_has_fc(var):
res = "fc" in var.name
return res
:return: None
prog = fluid.default_main_program()
fluid.io.load_vars(executor=exe, dirname=path, main_program=prog,
vars=None)
# All variables in `main_program` whose name includes "fc" will be loaded.
# And all the variables are supposed to have been saved in differnet files.
# The second usage: using `vars` to specify variables
var_list = [var_a, var_b, var_c]
fluid.io.load_vars(executor=exe, dirname=path, vars=var_list,
filename="vars_file")
# var_a, var_b and var_c will be loaded. And they are supposed to haven
# been saved in the same file named 'var_file' in the path "./my_paddle_model".
"""
if vars is None:
if main_program is None:
......@@ -221,7 +406,42 @@ def load_vars(executor,
def load_params(executor, dirname, main_program=None, filename=None):
"""
load all parameters from directory by executor.
This function filters out all parameters from the give `main_program`
and then trys to load these parameters from the folder `dirname` or
the file `filename`.
Use the `dirname` to specify the folder where parameters were saved. If
parameters were saved in separate files in the folder `dirname`, set
`filename` None; if all parameters were saved in a single file, use
`filename` to specify the file name.
NOTICE: Some variables are not Parameter while they are necessary for
training. So you can NOT save and continue your training just by
`save_params()` and `load_params()`. Please use `save_persistables()`
and `load_persistables()` instead.
Args:
executor(Executor): The executor to run for loading parameters.
dirname(str): The directory path.
main_program(Program|None): The program whose parameters will be
loaded. If it is None, the default
main program will be used automatically.
Default: None
filename(str|None): The file which saved all parameters. If parameters
were saved in differnet files, set it to None.
Default: None
Returns:
None
Examples:
.. code-block:: python
exe = fluid.Executor(fluid.CPUPlace())
param_path = "./my_paddle_model"
prog = fluid.default_main_program()
fluid.io.load_params(executor=exe, dirname=param_path,
main_program=None)
"""
load_vars(
executor,
......@@ -233,7 +453,37 @@ def load_params(executor, dirname, main_program=None, filename=None):
def load_persistables(executor, dirname, main_program=None, filename=None):
"""
load all persistables from directory by executor.
This function filters out all variables with `persistable==True` from the
give `main_program` and then trys to load these variables from the folder
`dirname` or the file `filename`.
Use the `dirname` to specify the folder where persistable variables were
saved. If variables were saved in separate files, set `filename` None;
if all variables were saved in a single file, use `filename` to specify
the file name.
Args:
executor(Executor): The executor to run for loading persistable variables.
dirname(str): The directory path.
main_program(Program|None): The program whose persistbale variables will
be loaded. If it is None, the default main
program will be used automatically.
Default: None
filename(str|None): The file which saved all variables. If variables were
saved in differnet files, set it to None.
Default: None
Returns:
None
Examples:
.. code-block:: python
exe = fluid.Executor(fluid.CPUPlace())
param_path = "./my_paddle_model"
prog = fluid.default_main_program()
fluid.io.load_persistables(executor=exe, dirname=param_path,
main_program=None)
"""
load_vars(
executor,
......@@ -306,22 +556,48 @@ def save_inference_model(dirname,
model_filename=None,
params_filename=None):
"""
Build a model especially for inference,
and save it to directory by the executor.
Prune the given `main_program` to build a new program especially for inference,
and then save it and all related parameters to given `dirname` by the `executor`.
Args:
dirname(str): The directory path to save the inference model.
feeded_var_names(list[str]): Names of variables that need to be feeded data
during inference.
target_vars(list[Variable]): Variables from which we can get inference
results.
executor(Executor): The executor that saves the inference model.
main_program(Program|None): The original program, which will be pruned to
build the inference model. If is setted None,
the default main program will be used.
Default: None.
model_filename(str|None): The name of file to save the inference program
itself. If is setted None, a default filename
`__model__` will be used.
params_filename(str|None): The name of file to save all related parameters.
If it is setted None, parameters will be saved
in separate files .
:param dirname: directory path
:param feeded_var_names: Names of variables that need to be feeded data during inference
:param target_vars: Variables from which we can get inference results.
:param executor: executor that save inference model
:param main_program: original program, which will be pruned to build the inference model.
Default default_main_program().
:param model_filename: The name of file to save inference program.
If not specified, default filename `__model__` will be used.
:param params_filename: The name of file to save parameters.
It is used for the case that all parameters are saved in a single binary file.
If not specified, parameters are considered saved in separate files.
Returns:
None
Raises:
ValueError: If `feed_var_names` is not a list of basestring.
ValueError: If `target_vars` is not a list of Variable.
Examples:
.. code-block:: python
exe = fluid.Executor(fluid.CPUPlace())
path = "./infer_model"
fluid.io.save_inference_model(dirname=path, feeded_var_names=['img'],
target_vars=[predict_var], executor=exe)
# In this exsample, the function will prune the default main program
# to make it suitable for infering the `predict_var`. The pruned
# inference program is going to be saved in the "./infer_model/__model__"
# and parameters are going to be saved in separate files under folder
# "./infer_model".
:return: None
"""
if isinstance(feeded_var_names, basestring):
feeded_var_names = [feeded_var_names]
......@@ -382,18 +658,49 @@ def load_inference_model(dirname,
"""
Load inference model from a directory
:param dirname: directory path
:param executor: executor that load inference model
:param model_filename: The name of file to load inference program.
If not specified, default filename `__model__` will be used.
:param params_filename: The name of file to load parameters.
It is used for the case that all parameters are saved in a single binary file.
If not specified, parameters are considered saved in separate files.
Args:
dirname(str): The directory path
executor(Executor): The executor to run for loading inference model.
model_filename(str|None): The name of file to load inference program.
If it is None, the default filename
'__model__' will be used.
Default: None
params_filename(str|None): The name of file to load all parameters.
It is only used for the case that all
parameters were saved in a single binary
file. If parameters were saved in separate
files, set it as 'None'.
Returns:
tuple: The return of this function is a tuple with three elements:
(program, feed_target_names, fetch_targets). The `program` is a
Program, it's the program for inference. The `feed_target_names` is
a list of str, it contains Names of variables that need to feed
data in the inference program. The `fetch_targets` is a list of
Variable. It contains variables from which we can get inference
results.
Raises:
ValueError: If `dirname` is not a existing directory.
Examples:
.. code-block:: python
exe = fluid.Executor(fluid.CPUPlace())
path = "./infer_model"
[inference_program, feed_target_names, fetch_targets] =
fluid.io.load_inference_model(dirname=path, executor=exe)
results = exe.run(inference_program,
feed={feed_target_names[0]: tensor_img},
fetch_list=fetch_targets)
# In this exsample, the inference program was saved in the
# "./infer_model/__model__" and parameters were saved in
# separate files in ""./infer_model".
# After getting inference program, feed target names and
# fetch targets, we can use an Executor to run the inference
# program to get the inference result.
:return: [program, feed_target_names, fetch_targets]
program: program especially for inference.
feed_target_names: Names of variables that need to feed data
fetch_targets: Variables from which we can get inference results.
"""
if not os.path.isdir(dirname):
raise ValueError("There is no directory named '%s'", dirname)
......@@ -424,12 +731,25 @@ def load_inference_model(dirname,
def get_parameter_value(para, executor):
"""
Get the LoDTensor for the parameter
Get the LoDTensor value of the given parameter.
Args:
para(Parameter): The parameter to get value from.
executor(Executor): The executor to run for retrieving the value.
Returns:
numpy.array: The given parameter's values.
Raises:
AssertionError: If the `para` is not an instance of Parameter.
:param executor: executor for retrieving the value
:param para: the given parameter
Examples:
.. code-block:: python
exe = fluid.Executor(fluid.CPUPlace())
param = fluid.default_main_program().global_block().var('fc.w')
p = fluid.io.get_parameter_value(param, exe)
:return: the LoDTensor for the parameter
"""
assert is_parameter(para)
......@@ -441,14 +761,30 @@ def get_parameter_value(para, executor):
def get_parameter_value_by_name(name, executor, program=None):
"""
Get the LoDTensor for paramter with the given name
Get the LoDTensor value of a certain parameter by its name.
Args:
name(str): The parameter's name.
executor(Executor): The executor to run for retrieving the value.
program(Program | None): The program where to find the parameter.
If it's set to be None, the function will
try to find the parameter in the default
main program.
:param executor: executor for retrieving the value
:param name: the name of the parameter
:param program: the program where the variable is found
Default default_main_program().
Returns:
numpy.array: The parameter's values.
:return: the LoDTensor for the variable
Raises:
TypeError: If given `name` is not an instance of basestring.
TypeError: If the parameter with the given name doesn't exist.
AssertionError: If there is a varibale named `name` in the
given program but it is not a Parameter.
Examples:
.. code-block:: python
exe = fluid.Executor(fluid.CPUPlace())
p = fluid.io.get_parameter_value('fc.w', exe)
"""
if program is None:
program = default_main_program()
......@@ -470,16 +806,58 @@ def save_checkpoint(executor,
main_program=None,
max_num_checkpoints=3):
"""
Save Checkpoint will save persistable LodTensor variables from main_program in checkpoint directory,
the directory named by serial number from 0 to (n -1), save_checkpoint use LRU strategy
to keep numbers of checkpoint directory, the numbers of checkpoint directory are max_num_checkpoints at most,
The interval between two saved checkpoints must greater than save_interval_secs.
This function filters out all checkpoint variables from the give
main_program and then saves these variables to the `checkpoint_dir`
directory.
In the training precess, we generally save a checkpoint in each
iteration. So there might be a lot of checkpoints in the
`checkpoint_dir`. To avoid them taking too much disk space, the
`max_num_checkpoints` are introduced to limit the total number of
checkpoints. If the number of existing checkpints is greater than
the `max_num_checkpoints`, oldest ones will be scroll deleted.
A variable is a checkpoint variable and will be saved if it meets
all following conditions:
1. It's persistable.
2. It's type is not FEED_MINIBATCH nor FETCH_LIST nor RAW.
3. It's name contains no "@GRAD" nor ".trainer_" nor ".block".
:param executor executor for save the value
:param checkpoint_dir the checkpoint directory
:param trainer_id currect trainer id, if id is equal to 0, the trainer is chief
:param main_program will save all variables in program
:param max_num_checkpoints will keep numbers of checkpoint serials not bigger than max_num_checkpoints
Args:
executor(Executor): The executor to run for save checkpoint.
checkpoint_dir(str): The folder where to save checkpoints.
trainer_id(int): currect trainer id, if id is equal to 0, the trainer
is chief.
trainer_args(dict|None): Current training arguments. Such as 'epoch_id'
and 'step_id'.
Defaut: None
main_program(Program|None): The program whose checkpoint variables will
be saved. If it is None, the default main program will be used.
max_num_checkpoints(int): The max number of total number of existing
checkpoints.
Default: 3
Returns:
None
Raises:
ValueError: If `checkpoint_dir` is None.
AssertionError: If `trainer_args` is not a dict.
Examples:
.. code-block:: python
exe = fluid.Executor(fluid.CPUPlace())
path = "./checkpoints"
prog = fluid.default_main_program()
trainer_args = {"epoch_id": 200,
"step_id": 20} # just an example
fluid.io.save_checkpoint(executor=exe,
checkpoint_dir=path,
trainer_id=0,
trainer_args=trainer_args,
main_program=prog,
max_num_checkpoints=3)
"""
if checkpoint_dir is None:
raise ValueError("'checkpoint_dir' should not be None")
......@@ -503,13 +881,50 @@ def save_checkpoint(executor,
def load_checkpoint(executor, checkpoint_dir, serial, main_program):
"""
Load checkpoint from a directory by executor,
it will find the most recent saved checkpoint file and load it auto.
This function filters out all checkpoint variables from the give
main_program and then try to load these variables from the
`checkpoint_dir` directory.
In the training precess, we generally save a checkpoint in each
iteration. So there are more than one checkpoint in the
`checkpoint_dir` (each checkpoint has its own sub folder), use
`serial` to specify which serial of checkpoint you would like to
load.
A variable is a checkpoint variable and will be loaded if it meets
all following conditions:
1. It's persistable.
2. It's type is not FEED_MINIBATCH nor FETCH_LIST nor RAW.
3. It's name contains no "@GRAD" nor ".trainer_" nor ".block".
Args:
executor(Executor): The executor to run for loading checkpoint.
checkpoint_dir(str): The folder where all checkpoints are.
serial(int): The serial of checkpoint you would like to load.
main_program(Program): The program whose checkpoint variables will
be loaded.
:param executor executor for load the value
:param checkpoint_dir the checkpoint directory
:param serial the serial folder in checkpoint directory will be load
:param main_program will load all variables in program
Returns:
None
Raises:
ValueError: If `checkpoint_dir` is None.
ValueError: If `serial` is None or `serial` is less than 0.
ValueError: If `main_program` is None.
Examples:
.. code-block:: python
exe = fluid.Executor(fluid.CPUPlace())
path = "./checkpoints"
prog = fluid.default_main_program()
fluid.io.load_checkpoint(executor=exe, checkpoint_dir=path,
serial=9, main_program=prog)
# In this example, `load_checkpoint` function
# will first filters out all checkpoint variables in the default
# main program, and then try to load these variables form the
# folder "./checkpoints/checkpoint_9/__model__".
"""
if checkpoint_dir is None:
......@@ -528,10 +943,10 @@ def load_checkpoint(executor, checkpoint_dir, serial, main_program):
def clean_checkpoint(checkpoint_dir, delete_dir=False):
"""
clean the checkpoint dir, when the train exits normally, the trainer will call clean_checkpoint to delete checkpoint directory saved before.
delete_dir only works when the directory is empty, otherwise, OSError is raised.
delete_dir only works when the directory is empty, otherwise, OSError is raised.
:param checkpoint_dir
:param delete_dir
: param checkpoint_dir
: param delete_dir
"""
if checkpoint_dir is None:
......@@ -547,13 +962,40 @@ def load_persist_vars_without_grad(executor,
program,
has_model_dir=False):
"""
load_persist_vars_without_grad will load variables from a directory by an executor,
the variable named end with "@GRAD" will not be loaded.
This function filters out all checkpoint variables from the give
program and then trys to load these variables from the given directory.
A variable is a checkpoint variable if it meets all following
conditions:
1. It's persistable.
2. It's type is not FEED_MINIBATCH nor FETCH_LIST nor RAW.
3. It's name contains no "@GRAD" nor ".trainer_" nor ".block".
:param executor executor for load the value
:param dirname the checkpoint directory
:param program will load all variables in program
:param has_model_dir if has_model_dir is True, will load variables from sub directory named __model__
Args:
executor(Executor): The executor to run for loading variables.
dirname(str): The directory path.
program(Program): The program whose checkpoint variables will
be loaded.
has_model_dir(bool): if True, the function loads variables
from a sub directory named '__model__'.
Default: False
Returns:
None
Examples:
.. code-block:: python
exe = fluid.Executor(fluid.CPUPlace())
param_path = "./my_paddle_model"
prog = fluid.default_main_program()
fluid.io.load_persist_vars_without_grad(executor=exe,
dirname=param_path, program=prog, has_model_dir=True)
# In this example, `load_persist_vars_without_grad` function
# will first filters out all checkpoint variables in the default
# main program, and then trys to load these variables form the
# folder "./my_paddle_model/__model__".
"""
if has_model_dir:
......@@ -569,12 +1011,38 @@ def load_persist_vars_without_grad(executor,
def save_persist_vars_without_grad(executor, dirname, program):
"""
save_persist_vars_without_grad will save variables to a directory by an executor,
the variable named end with "@GRAD" will not be saved.
This function filters out all checkpoint variables from the give
program and then save these variables to a sub-folder '__model__' of
the given directory.
A variable is a checkpoint variable if it meets all following
conditions:
1. It's persistable.
2. It's type is not FEED_MINIBATCH nor FETCH_LIST nor RAW.
3. It's name contains no "@GRAD" nor ".trainer_" nor ".block".
Args:
executor(Executor): The executor to run for saving variables.
dirname(str): The directory path.
program(Program): The program whose checkpoint variables will
be saved.
Returns:
None
Examples:
.. code-block:: python
exe = fluid.Executor(fluid.CPUPlace())
param_path = "./my_paddle_model"
prog = fluid.default_main_program()
fluid.io.save_persist_vars_without_grad(executor=exe,
dirname=param_path, program=prog)
:param executor executor for load the value
:param dirname the checkpoint directory
:param program will load all variables in program
# In this example, `save_persist_vars_without_grad` function
# will first filters out all checkpoint variables in the default
# main program, and then saves these variables to the folder
# "./my_paddle_model/__model__".
"""
cur_dir = _get_model_dir(dirname)
save_vars(
......@@ -620,7 +1088,7 @@ def _is_checkpoint_var(var):
the checkpoint will not save or load all the variables.
var type is FEED_MINIBATCH/FETCH_LIST/RAW or var name ends with @GRAD are discarded.
:param var
: param var
"""
if var.desc.type() == core.VarDesc.VarType.FEED_MINIBATCH or \
var.desc.type() == core.VarDesc.VarType.FETCH_LIST or \
......@@ -701,7 +1169,7 @@ def _write_success(dirname):
"""
write an empty file named "_SUCCESS" in checkpoint dir, indicate this checkpoint is correct.
:param dirname
: param dirname
"""
success_file = os.path.join(dirname, SUCCESS_MARK_FILENAME)
with open(success_file, 'a') as f:
......@@ -713,7 +1181,7 @@ def get_latest_checkpoint_serial(checkpoint_dir):
"""
get the latest file in checkpoint directory, the _SUCCESS file must exist in the directory
:param checkpoint_dir
: param checkpoint_dir
"""
if not checkpoint_dir:
return -1
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册