未验证 提交 0aceeee1 编写于 作者: Y Yu Yang 提交者: GitHub

Feature/remove g program (#5930)

* Unify fluid submodules to fluid module

Change books just use `import fluid`, not submodules

* Remove g_main_program/g_startup_program

Use default_main_program/default_startup_program instead

* Typo

* Fix CI
上级 0a8a86e0
...@@ -26,9 +26,9 @@ class Evaluator(object): ...@@ -26,9 +26,9 @@ class Evaluator(object):
name(str): The name of evaluator. such as, "accuracy". Used for generate name(str): The name of evaluator. such as, "accuracy". Used for generate
temporary variable name. temporary variable name.
main_program(Program, optional): The evaluator should be added to this main_program(Program, optional): The evaluator should be added to this
main_program. Default g_main_program main_program. Default default_main_program()
startup_program(Program, optional):The parameter should be added to this startup_program(Program, optional):The parameter should be added to this
startup_program. Default g_startup_program startup_program. Default default_startup_program()
Attributes: Attributes:
states(list): The list of state variables. states will be reset to zero states(list): The list of state variables. states will be reset to zero
......
import numpy as np import numpy as np
from . import core from . import core
from framework import Program, g_main_program from framework import Program, default_main_program
__all__ = ['Executor', 'g_scope'] __all__ = ['Executor', 'g_scope']
...@@ -103,7 +103,7 @@ class Executor(object): ...@@ -103,7 +103,7 @@ class Executor(object):
fetch_list = [] fetch_list = []
if program is None: if program is None:
program = g_main_program program = default_main_program()
if not isinstance(program, Program): if not isinstance(program, Program):
raise TypeError() raise TypeError()
......
...@@ -6,7 +6,7 @@ import proto.framework_pb2 as framework_pb2 ...@@ -6,7 +6,7 @@ import proto.framework_pb2 as framework_pb2
__all__ = [ __all__ = [
'Block', 'Variable', 'Program', 'Operator', 'default_startup_program', 'Block', 'Variable', 'Program', 'Operator', 'default_startup_program',
'default_main_program', 'g_startup_program', 'g_main_program' 'default_main_program'
] ]
...@@ -654,13 +654,13 @@ class Parameter(Variable): ...@@ -654,13 +654,13 @@ class Parameter(Variable):
# program is a global instance. # program is a global instance.
g_main_program = Program() _main_program_ = Program()
g_startup_program = Program() _startup_program_ = Program()
def default_startup_program(): def default_startup_program():
return g_startup_program return _startup_program_
def default_main_program(): def default_main_program():
return g_main_program return _main_program_
import os import os
import cPickle as pickle import cPickle as pickle
from paddle.v2.fluid.framework import Program, Parameter, g_main_program, \ from paddle.v2.fluid.framework import Program, Parameter, default_main_program, Variable
Variable
__all__ = [ __all__ = [
'save_vars', 'save_params', 'save_persistables', 'load_vars', 'load_params', 'save_vars', 'save_params', 'save_persistables', 'load_vars', 'load_params',
...@@ -46,7 +45,7 @@ def save_vars(executor, dirname, main_program=None, vars=None, predicate=None): ...@@ -46,7 +45,7 @@ def save_vars(executor, dirname, main_program=None, vars=None, predicate=None):
""" """
if vars is None: if vars is None:
if main_program is None: if main_program is None:
main_program = g_main_program main_program = default_main_program()
if not isinstance(main_program, Program): if not isinstance(main_program, Program):
raise TypeError("program should be as Program type or None") raise TypeError("program should be as Program type or None")
...@@ -98,7 +97,7 @@ def load_vars(executor, dirname, main_program=None, vars=None, predicate=None): ...@@ -98,7 +97,7 @@ def load_vars(executor, dirname, main_program=None, vars=None, predicate=None):
:param executor: executor that save variable :param executor: executor that save variable
:param dirname: directory path :param dirname: directory path
:param main_program: program. If vars is None, then filter all variables in this :param main_program: program. If vars is None, then filter all variables in this
program which fit `predicate`. Default g_program. program which fit `predicate`. Default default_main_program().
:param predicate: The Predicate describes a callable that returns a variable :param predicate: The Predicate describes a callable that returns a variable
as a bool. If it returns true, the variables will be loaded. as a bool. If it returns true, the variables will be loaded.
:param vars: variables need to be loaded. If specify vars, program & :param vars: variables need to be loaded. If specify vars, program &
...@@ -107,7 +106,7 @@ def load_vars(executor, dirname, main_program=None, vars=None, predicate=None): ...@@ -107,7 +106,7 @@ def load_vars(executor, dirname, main_program=None, vars=None, predicate=None):
""" """
if vars is None: if vars is None:
if main_program is None: if main_program is None:
main_program = g_main_program main_program = default_main_program()
if not isinstance(main_program, Program): if not isinstance(main_program, Program):
raise TypeError("program's type should be Program") raise TypeError("program's type should be Program")
...@@ -154,7 +153,7 @@ def load_persistables(executor, dirname, main_program=None): ...@@ -154,7 +153,7 @@ def load_persistables(executor, dirname, main_program=None):
def get_inference_program(target_vars, main_program=None): def get_inference_program(target_vars, main_program=None):
if main_program is None: if main_program is None:
main_program = g_main_program main_program = default_main_program()
if not isinstance(target_vars, list): if not isinstance(target_vars, list):
target_vars = [target_vars] target_vars = [target_vars]
...@@ -177,12 +176,12 @@ def save_inference_model(dirname, ...@@ -177,12 +176,12 @@ def save_inference_model(dirname,
:param target_vars: Variables from which we can get inference results. :param target_vars: Variables from which we can get inference results.
:param executor: executor that save inference model :param executor: executor that save inference model
:param main_program: original program, which will be pruned to build the inference model. :param main_program: original program, which will be pruned to build the inference model.
Default g_main_program. Default default_main_program().
:return: None :return: None
""" """
if main_program is None: if main_program is None:
main_program = g_main_program main_program = default_main_program()
if not isinstance(target_vars, list): if not isinstance(target_vars, list):
target_vars = [target_vars] target_vars = [target_vars]
...@@ -272,10 +271,10 @@ def get_parameter_value_by_name(name, executor, program=None): ...@@ -272,10 +271,10 @@ def get_parameter_value_by_name(name, executor, program=None):
:param executor: executor for retrieving the value :param executor: executor for retrieving the value
:param name: the name of the parameter :param name: the name of the parameter
:param program: the program where the variable is found :param program: the program where the variable is found
Default g_main_program. Default default_main_program().
:return: the LoDTensor for the variable :return: the LoDTensor for the variable
""" """
if program is None: if program is None:
program = g_main_program program = default_main_program()
var = program.global_block().var(name) var = program.global_block().var(name)
return get_parameter_value(var, executor) return get_parameter_value(var, executor)
import copy import copy
import itertools import itertools
from framework import Variable, g_main_program, \ from framework import Variable, default_main_program, default_startup_program, unique_name, dtype_is_floating
g_startup_program, unique_name, dtype_is_floating
from paddle.v2.fluid.initializer import Constant, Xavier from paddle.v2.fluid.initializer import Constant, Xavier
...@@ -22,7 +21,7 @@ class LayerHelper(object): ...@@ -22,7 +21,7 @@ class LayerHelper(object):
def main_program(self): def main_program(self):
prog = self.kwargs.get('main_program', None) prog = self.kwargs.get('main_program', None)
if prog is None: if prog is None:
return g_main_program return default_main_program()
else: else:
return prog return prog
...@@ -30,7 +29,7 @@ class LayerHelper(object): ...@@ -30,7 +29,7 @@ class LayerHelper(object):
def startup_program(self): def startup_program(self):
prog = self.kwargs.get('startup_program', None) prog = self.kwargs.get('startup_program', None)
if prog is None: if prog is None:
return g_startup_program return default_startup_program()
else: else:
return prog return prog
......
from . import core import core
import proto.framework_pb2 as framework_pb2 import proto.framework_pb2 as framework_pb2
from framework import OpProtoHolder, Variable, Program, Operator from framework import OpProtoHolder, Variable, Program, Operator
from initializer import Constant, Normal, Xavier from initializer import Constant, Normal, Xavier
......
...@@ -3,7 +3,7 @@ import paddle.v2.fluid.core as core ...@@ -3,7 +3,7 @@ import paddle.v2.fluid.core as core
import paddle.v2.fluid.layers as layers import paddle.v2.fluid.layers as layers
from paddle.v2.fluid.executor import Executor from paddle.v2.fluid.executor import Executor
from paddle.v2.fluid.backward import append_backward_ops from paddle.v2.fluid.backward import append_backward_ops
from paddle.v2.fluid.framework import g_main_program from paddle.v2.fluid.framework import default_main_program
import numpy import numpy
...@@ -66,7 +66,7 @@ class TestArrayReadWrite(unittest.TestCase): ...@@ -66,7 +66,7 @@ class TestArrayReadWrite(unittest.TestCase):
append_backward_ops(total_sum_scaled) append_backward_ops(total_sum_scaled)
g_vars = map(g_main_program.global_block().var, g_vars = map(default_main_program().global_block().var,
[each_x.name + "@GRAD" for each_x in x]) [each_x.name + "@GRAD" for each_x in x])
g_out = [ g_out = [
item.sum() item.sum()
......
import unittest import unittest
import paddle.v2.fluid.layers as layers import paddle.v2.fluid.layers as layers
import paddle.v2.fluid.core as core import paddle.v2.fluid.core as core
from paddle.v2.fluid.framework import g_startup_program, g_main_program from paddle.v2.fluid.framework import default_startup_program, default_main_program
from paddle.v2.fluid.executor import Executor from paddle.v2.fluid.executor import Executor
from paddle.v2.fluid.backward import append_backward_ops from paddle.v2.fluid.backward import append_backward_ops
import numpy import numpy
...@@ -19,7 +19,7 @@ class ConditionalBlock(unittest.TestCase): ...@@ -19,7 +19,7 @@ class ConditionalBlock(unittest.TestCase):
cpu = core.CPUPlace() cpu = core.CPUPlace()
exe = Executor(cpu) exe = Executor(cpu)
exe.run(g_startup_program) exe.run(default_startup_program())
x = numpy.random.random(size=(10, 1)).astype('float32') x = numpy.random.random(size=(10, 1)).astype('float32')
...@@ -29,7 +29,9 @@ class ConditionalBlock(unittest.TestCase): ...@@ -29,7 +29,9 @@ class ConditionalBlock(unittest.TestCase):
append_backward_ops(loss=loss) append_backward_ops(loss=loss)
outs = exe.run( outs = exe.run(
feed={'X': x}, feed={'X': x},
fetch_list=[g_main_program.block(0).var(data.name + "@GRAD")])[0] fetch_list=[
default_main_program().block(0).var(data.name + "@GRAD")
])[0]
print outs print outs
......
import unittest import unittest
from paddle.v2.fluid.layers import mul, data, sequence_pool
import numpy
import paddle.v2.fluid.core as core import paddle.v2.fluid.core as core
from paddle.v2.fluid.executor import Executor from paddle.v2.fluid.executor import Executor
from paddle.v2.fluid.framework import g_main_program from paddle.v2.fluid.layers import mul, data
import numpy
class TestExecutor(unittest.TestCase): class TestExecutor(unittest.TestCase):
...@@ -19,10 +20,7 @@ class TestExecutor(unittest.TestCase): ...@@ -19,10 +20,7 @@ class TestExecutor(unittest.TestCase):
a_np = numpy.random.random((100, 784)).astype('float32') a_np = numpy.random.random((100, 784)).astype('float32')
b_np = numpy.random.random((784, 100)).astype('float32') b_np = numpy.random.random((784, 100)).astype('float32')
exe = Executor(place) exe = Executor(place)
outs = exe.run(g_main_program, outs = exe.run(feed={'a': a_np, 'b': b_np}, fetch_list=[out])
feed={'a': a_np,
'b': b_np},
fetch_list=[out])
out = outs[0] out = outs[0]
self.assertEqual((100, 100), out.shape) self.assertEqual((100, 100), out.shape)
self.assertTrue(numpy.allclose(out, numpy.dot(a_np, b_np))) self.assertTrue(numpy.allclose(out, numpy.dot(a_np, b_np)))
......
from paddle.v2.fluid.layers import lod_rank_table, data from paddle.v2.fluid.layers import lod_rank_table, data
from paddle.v2.fluid.executor import Executor from paddle.v2.fluid.executor import Executor
from paddle.v2.fluid.framework import g_main_program
import paddle.v2.fluid.core as core import paddle.v2.fluid.core as core
import numpy import numpy
import unittest import unittest
...@@ -18,7 +17,7 @@ class TestLoDRankTable(unittest.TestCase): ...@@ -18,7 +17,7 @@ class TestLoDRankTable(unittest.TestCase):
tensor = core.LoDTensor() tensor = core.LoDTensor()
tensor.set(numpy.random.random(size=(17, 100)), cpu) tensor.set(numpy.random.random(size=(17, 100)), cpu)
tensor.set_lod([[0, 1, 3], [0, 5, 6, 7], [0, 3, 4, 9, 10, 13, 16, 17]]) tensor.set_lod([[0, 1, 3], [0, 5, 6, 7], [0, 3, 4, 9, 10, 13, 16, 17]])
exe.run(g_main_program, scope=scope, feed={'x': tensor}) exe.run(scope=scope, feed={'x': tensor})
var = scope.find_var(rank_table.name) var = scope.find_var(rank_table.name)
table = var.get_lod_rank_table() table = var.get_lod_rank_table()
self.assertEqual([(0, 5), (1, 1), (2, 1)], table.items()) self.assertEqual([(0, 5), (1, 1), (2, 1)], table.items())
......
import unittest import unittest
from paddle.v2.fluid.framework import Variable, Program, g_main_program
import paddle.v2.fluid.core as core import paddle.v2.fluid.core as core
from paddle.v2.fluid.framework import Program, default_startup_program
main_program = default_startup_program()
class TestOperator(unittest.TestCase): class TestOperator(unittest.TestCase):
def test_error_type(self): def test_error_type(self):
block = g_main_program.create_block() block = main_program.create_block()
try: try:
block.append_op() block.append_op()
self.assertFail() self.assertFail()
......
import unittest import unittest
from paddle.v2.fluid.framework import g_main_program from paddle.v2.fluid.framework import default_main_program
import paddle.v2.fluid.core as core import paddle.v2.fluid.core as core
from paddle.v2.fluid.executor import Executor from paddle.v2.fluid.executor import Executor
import paddle.v2.fluid.io as io import paddle.v2.fluid.io as io
from paddle.v2.fluid.initializer import ConstantInitializer from paddle.v2.fluid.initializer import ConstantInitializer
import numpy as np import numpy as np
main_program = default_main_program()
class TestParameter(unittest.TestCase): class TestParameter(unittest.TestCase):
def test_param(self): def test_param(self):
shape = [784, 100] shape = [784, 100]
val = 1.0625 val = 1.0625
b = g_main_program.global_block() b = main_program.global_block()
param = b.create_parameter( param = b.create_parameter(
name='fc.w', name='fc.w',
shape=shape, shape=shape,
...@@ -23,9 +25,9 @@ class TestParameter(unittest.TestCase): ...@@ -23,9 +25,9 @@ class TestParameter(unittest.TestCase):
self.assertEqual(core.DataType.FP32, param.dtype) self.assertEqual(core.DataType.FP32, param.dtype)
self.assertEqual(0, param.block.idx) self.assertEqual(0, param.block.idx)
exe = Executor(core.CPUPlace()) exe = Executor(core.CPUPlace())
p = exe.run(g_main_program, fetch_list=[param])[0] p = exe.run(main_program, fetch_list=[param])[0]
self.assertTrue(np.allclose(p, np.ones(shape) * val)) self.assertTrue(np.allclose(p, np.ones(shape) * val))
p = io.get_parameter_value_by_name('fc.w', exe, g_main_program) p = io.get_parameter_value_by_name('fc.w', exe, main_program)
self.assertTrue(np.allclose(np.array(p), np.ones(shape) * val)) self.assertTrue(np.allclose(np.array(p), np.ones(shape) * val))
......
from __future__ import print_function from __future__ import print_function
import unittest import unittest
from paddle.v2.fluid.framework import Program from paddle.v2.fluid.framework import Program, default_main_program
from paddle.v2.fluid.framework import g_main_program
import paddle.v2.fluid.layers as layers import paddle.v2.fluid.layers as layers
main_program = default_main_program()
class TestProgram(unittest.TestCase): class TestProgram(unittest.TestCase):
def test_program(self): def test_program(self):
b = g_main_program.current_block() b = main_program.current_block()
self.assertEqual(-1, b.parent_idx) self.assertEqual(-1, b.parent_idx)
self.assertEqual(0, b.idx) self.assertEqual(0, b.idx)
b = g_main_program.create_block() b = main_program.create_block()
self.assertEqual(1, b.idx) self.assertEqual(1, b.idx)
self.assertEqual(0, b.parent_idx) self.assertEqual(0, b.parent_idx)
b = g_main_program.create_block() b = main_program.create_block()
self.assertEqual(2, b.idx) self.assertEqual(2, b.idx)
self.assertEqual(1, b.parent_idx) self.assertEqual(1, b.parent_idx)
g_main_program.rollback() main_program.rollback()
b = g_main_program.current_block() b = main_program.current_block()
self.assertEqual(1, b.idx) self.assertEqual(1, b.idx)
self.assertEqual(0, b.parent_idx) self.assertEqual(0, b.parent_idx)
b = g_main_program.create_block() b = main_program.create_block()
self.assertEqual(3, b.idx) self.assertEqual(3, b.idx)
self.assertEqual(1, b.parent_idx) self.assertEqual(1, b.parent_idx)
g_main_program.rollback() main_program.rollback()
b = g_main_program.current_block() b = main_program.current_block()
self.assertEqual(1, b.idx) self.assertEqual(1, b.idx)
self.assertEqual(0, b.parent_idx) self.assertEqual(0, b.parent_idx)
......
...@@ -3,9 +3,11 @@ import paddle.v2.fluid.core as core ...@@ -3,9 +3,11 @@ import paddle.v2.fluid.core as core
from paddle.v2.fluid.executor import Executor from paddle.v2.fluid.executor import Executor
import paddle.v2.fluid.layers as layers import paddle.v2.fluid.layers as layers
from paddle.v2.fluid.backward import append_backward_ops from paddle.v2.fluid.backward import append_backward_ops
from paddle.v2.fluid.framework import g_main_program from paddle.v2.fluid.framework import default_main_program
import numpy import numpy
main_program = default_main_program()
class TestShrinkRNNMemory(unittest.TestCase): class TestShrinkRNNMemory(unittest.TestCase):
def test_shrink_rnn_memory(self): def test_shrink_rnn_memory(self):
...@@ -36,7 +38,7 @@ class TestShrinkRNNMemory(unittest.TestCase): ...@@ -36,7 +38,7 @@ class TestShrinkRNNMemory(unittest.TestCase):
append_backward_ops(loss=mem3_mean) append_backward_ops(loss=mem3_mean)
x_grad = exe.run( x_grad = exe.run(
feed={'x': tensor}, feed={'x': tensor},
fetch_list=[g_main_program.global_block().var('x@GRAD')])[0] fetch_list=[main_program.global_block().var('x@GRAD')])[0]
self.assertAlmostEqual(1.0, x_grad.sum(), delta=0.1) self.assertAlmostEqual(1.0, x_grad.sum(), delta=0.1)
......
import unittest import unittest
from paddle.v2.fluid.framework import g_main_program, Program, convert_np_dtype_to_dtype_ from paddle.v2.fluid.framework import default_main_program, Program, convert_np_dtype_to_dtype_
import paddle.v2.fluid.core as core import paddle.v2.fluid.core as core
import numpy as np import numpy as np
...@@ -18,7 +18,7 @@ class TestVariable(unittest.TestCase): ...@@ -18,7 +18,7 @@ class TestVariable(unittest.TestCase):
self.assertRaises(ValueError, lambda: convert("int8")) self.assertRaises(ValueError, lambda: convert("int8"))
def test_var(self): def test_var(self):
b = g_main_program.current_block() b = default_main_program().current_block()
w = b.create_var( w = b.create_var(
dtype="float64", shape=[784, 100], lod_level=0, name="fc.w") dtype="float64", shape=[784, 100], lod_level=0, name="fc.w")
self.assertNotEqual(str(w), "") self.assertNotEqual(str(w), "")
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册