提交 6695a204 编写于 作者: X xuwei06

helper functions fetch_var and get_var

fetch_var for getting the values of a variable with given name
get_var for getting the Variable with given name
上级 292c1951
...@@ -17,7 +17,9 @@ import contextlib ...@@ -17,7 +17,9 @@ import contextlib
from framework import Program, default_main_program from framework import Program, default_main_program
from . import core from . import core
__all__ = ['Executor', 'global_scope', 'scope_guard', 'switch_scope'] __all__ = [
'Executor', 'global_scope', 'scope_guard', 'switch_scope', 'fetch_var'
]
g_scope = core.Scope() g_scope = core.Scope()
...@@ -80,12 +82,12 @@ def has_feed_operators(block, feed_targets, feed_holder_name): ...@@ -80,12 +82,12 @@ def has_feed_operators(block, feed_targets, feed_holder_name):
Args: Args:
block: a block instance (typically global block of a program) block: a block instance (typically global block of a program)
feed_targets: a dictionary of {feed_target_name: feed_target_data} feed_targets: a dictionary of {feed_target_name: feed_target_data}
feed_holder_name: the name of the variable that holds the data of feed_holder_name: the name of the variable that holds the data of
all feed targets. The type of this feed_holder variable is all feed targets. The type of this feed_holder variable is
FEED_MINIBATCH, which is essentially vector<LoDTensor>. FEED_MINIBATCH, which is essentially vector<LoDTensor>.
Returns: Returns:
A boolean value that indicates whether a block has feed operators A boolean value that indicates whether a block has feed operators
that match the info contained in feed_targets and feed_holder_name. that match the info contained in feed_targets and feed_holder_name.
""" """
...@@ -108,7 +110,7 @@ def has_feed_operators(block, feed_targets, feed_holder_name): ...@@ -108,7 +110,7 @@ def has_feed_operators(block, feed_targets, feed_holder_name):
def has_fetch_operators(block, fetch_targets, fetch_holder_name): def has_fetch_operators(block, fetch_targets, fetch_holder_name):
""" Check whether the block already has fetch operators. """ Check whether the block already has fetch operators.
Return false if the block does not have any fetch operators. Return false if the block does not have any fetch operators.
If some fetch operators have been appended to the block, check that If some fetch operators have been appended to the block, check that
the info contained in these fetch operators matches the fetch_targets the info contained in these fetch operators matches the fetch_targets
...@@ -118,13 +120,13 @@ def has_fetch_operators(block, fetch_targets, fetch_holder_name): ...@@ -118,13 +120,13 @@ def has_fetch_operators(block, fetch_targets, fetch_holder_name):
Args: Args:
block: a block instance (typically global block of a program) block: a block instance (typically global block of a program)
fetch_targets: a dictionary of {fetch_target_name: fetch_target_data} fetch_targets: a dictionary of {fetch_target_name: fetch_target_data}
fetch_holder_name: the name of the variable that holds the data of fetch_holder_name: the name of the variable that holds the data of
all fetch targets. The type of this fetch_holder variable is all fetch targets. The type of this fetch_holder variable is
FETCH_LIST, which is essentially vector<LoDTensor>. FETCH_LIST, which is essentially vector<LoDTensor>.
Return: Return:
A boolean value that indicates whether a block has fetch operators A boolean value that indicates whether a block has fetch operators
that match the info contained in fetch_targets and fetch_holder_name. that match the info contained in fetch_targets and fetch_holder_name.
""" """
fetch_count = 0 fetch_count = 0
...@@ -146,6 +148,30 @@ def has_fetch_operators(block, fetch_targets, fetch_holder_name): ...@@ -146,6 +148,30 @@ def has_fetch_operators(block, fetch_targets, fetch_holder_name):
return fetch_count > 0 return fetch_count > 0
def fetch_var(name, scope=None, return_numpy=True):
"""
Fetch the value of the variable with the given name from the given scope
Args:
name(str): name of the variable
scope(core.Scope|None): scope object.
If None, global_scope() will be used.
return_numpy(bool): whether convert the tensor to numpy.ndarray
Returns:
LodTensor|numpy.ndarray
"""
assert isinstance(name, str)
if scope is None:
scope = global_scope()
assert isinstance(scope, core.Scope)
var = global_scope().find_var(name)
assert var is not None, "Cannot find '%s' in scope." % name
tensor = var.get_tensor()
if return_numpy:
tensor = as_numpy(tensor)
return tensor
class Executor(object): class Executor(object):
def __init__(self, places): def __init__(self, places):
if not isinstance(places, list) and not isinstance(places, tuple): if not isinstance(places, list) and not isinstance(places, tuple):
......
...@@ -31,6 +31,7 @@ __all__ = [ ...@@ -31,6 +31,7 @@ __all__ = [
'program_guard', 'program_guard',
'switch_startup_program', 'switch_startup_program',
'switch_main_program', 'switch_main_program',
'get_var',
] ]
EMPTY_VAR_NAME = core.kEmptyVarName() EMPTY_VAR_NAME = core.kEmptyVarName()
...@@ -1124,3 +1125,22 @@ def program_guard(main_program, startup_program=None): ...@@ -1124,3 +1125,22 @@ def program_guard(main_program, startup_program=None):
switch_main_program(main_program) switch_main_program(main_program)
if startup_program is not None: if startup_program is not None:
switch_startup_program(startup_program) switch_startup_program(startup_program)
def get_var(name, program=None):
"""
Get a variable by name from the global block of a program
Args:
name(str): name of the variable
program(Program|None): program object.
If None, default_global_program() will be used.
Returns:
Variable
"""
if program is None:
program = default_main_program()
assert isinstance(name, str)
assert isinstance(name, Program)
return program.global_block().var(name)
...@@ -35,13 +35,15 @@ __all__ = [ ...@@ -35,13 +35,15 @@ __all__ = [
] ]
def create_tensor(dtype, name=None): def create_tensor(dtype, name=None, persistable=False):
helper = LayerHelper("create_tensor", **locals()) helper = LayerHelper("create_tensor", **locals())
return helper.create_variable(name=helper.name, dtype=dtype) return helper.create_variable(
name=helper.name, dtype=dtype, persistable=persistable)
def create_parameter(shape, def create_parameter(shape,
dtype, dtype,
name=None,
attr=None, attr=None,
is_bias=False, is_bias=False,
default_initializer=None): default_initializer=None):
...@@ -62,7 +64,7 @@ def create_parameter(shape, ...@@ -62,7 +64,7 @@ def create_parameter(shape,
""" """
helper = LayerHelper("create_parameter", **locals()) helper = LayerHelper("create_parameter", **locals())
if attr is None: if attr is None:
attr = ParamAttr() attr = ParamAttr(name=name)
return helper.create_parameter(attr, shape, dtype, is_bias, return helper.create_parameter(attr, shape, dtype, is_bias,
default_initializer) default_initializer)
......
import paddle.v2.fluid as fluid
import paddle.v2.fluid.layers as layers
import op_test
import numpy
import unittest
class TestFetchVar(op_test.OpTest):
def test_fetch_var(self):
val = numpy.array([1, 3, 5]).astype(numpy.int32)
x = layers.create_tensor(dtype="int32", persistable=True, name="x")
layers.assign(input=val, output=x)
exe = fluid.Executor(fluid.CPUPlace())
exe.run(fluid.default_main_program(), feed={}, fetch_list=[])
fetched_x = fluid.fetch_var("x")
self.assertTrue(
numpy.array_equal(fetched_x, val),
"fetch_x=%s val=%s" % (fetched_x, val))
self.assertEqual(fetched_x.dtype, val.dtype)
if __name__ == '__main__':
unittest.main()
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册