提交 1faf5e03 编写于 作者: Y Yu Yang 提交者: GitHub

Merge pull request #2864 from reyoung/feature/op_creation_methods

Get OpProtos in Python
...@@ -216,17 +216,17 @@ class OpRegistry { ...@@ -216,17 +216,17 @@ class OpRegistry {
return op; return op;
} }
static std::unordered_map<std::string, OpProto>& protos() {
static std::unordered_map<std::string, OpProto> protos_;
return protos_;
};
private: private:
static std::unordered_map<std::string, OpCreator>& creators() { static std::unordered_map<std::string, OpCreator>& creators() {
static std::unordered_map<std::string, OpCreator> creators_; static std::unordered_map<std::string, OpCreator> creators_;
return creators_; return creators_;
} }
static std::unordered_map<std::string, OpProto>& protos() {
static std::unordered_map<std::string, OpProto> protos_;
return protos_;
};
static std::unordered_map<std::string, OpAttrChecker>& op_checkers() { static std::unordered_map<std::string, OpAttrChecker>& op_checkers() {
static std::unordered_map<std::string, OpAttrChecker> op_checkers_; static std::unordered_map<std::string, OpAttrChecker> op_checkers_;
return op_checkers_; return op_checkers_;
......
cc_library(paddle_pybind SHARED SRCS pybind.cc DEPS pybind python) cc_library(paddle_pybind SHARED SRCS pybind.cc DEPS pybind python add_op)
...@@ -13,12 +13,18 @@ See the License for the specific language governing permissions and ...@@ -13,12 +13,18 @@ See the License for the specific language governing permissions and
limitations under the License. */ limitations under the License. */
#include <Python.h> #include <Python.h>
#include <paddle/framework/op_registry.h>
#include <paddle/framework/scope.h> #include <paddle/framework/scope.h>
#include <pybind11/pybind11.h> #include <pybind11/pybind11.h>
#include <pybind11/stl.h>
#include <fstream>
#include <vector>
namespace py = pybind11; namespace py = pybind11;
namespace pd = paddle::framework; namespace pd = paddle::framework;
USE_OP(add_two);
PYBIND11_PLUGIN(core) { PYBIND11_PLUGIN(core) {
py::module m("core", "C++ core of Paddle Paddle"); py::module m("core", "C++ core of Paddle Paddle");
...@@ -43,5 +49,20 @@ All parameter, weight, gradient are variables in Paddle. ...@@ -43,5 +49,20 @@ All parameter, weight, gradient are variables in Paddle.
&pd::Scope::CreateVariable, &pd::Scope::CreateVariable,
py::return_value_policy::reference); py::return_value_policy::reference);
//! @note: Be careful! PyBind will return std::string as an unicode, not
//! Python str. If you want a str object, you should cast them in Python.
m.def("get_all_op_protos", []() -> std::vector<std::string> {
auto& protos = pd::OpRegistry::protos();
std::vector<std::string> ret_values;
for (auto it = protos.begin(); it != protos.end(); ++it) {
PADDLE_ENFORCE(it->second.IsInitialized(),
"OpProto must all be initialized");
ret_values.emplace_back();
PADDLE_ENFORCE(it->second.SerializeToString(&ret_values.back()),
"Serialize OpProto Error. This could be a bug of Paddle.");
}
return ret_values;
});
return m.ptr(); return m.ptr();
} }
...@@ -20,7 +20,6 @@ import trainer ...@@ -20,7 +20,6 @@ import trainer
import event import event
import data_type import data_type
import topology import topology
import data_feeder
import networks import networks
import evaluator import evaluator
from . import dataset from . import dataset
...@@ -31,7 +30,6 @@ import op ...@@ -31,7 +30,6 @@ import op
import pooling import pooling
import inference import inference
import networks import networks
import py_paddle.swig_paddle as api
import minibatch import minibatch
import plot import plot
import image import image
...@@ -47,7 +45,6 @@ __all__ = [ ...@@ -47,7 +45,6 @@ __all__ = [
'data_type', 'data_type',
'attr', 'attr',
'pooling', 'pooling',
'data_feeder',
'dataset', 'dataset',
'reader', 'reader',
'topology', 'topology',
...@@ -61,6 +58,7 @@ __all__ = [ ...@@ -61,6 +58,7 @@ __all__ = [
def init(**kwargs): def init(**kwargs):
import py_paddle.swig_paddle as api
args = [] args = []
args_dict = {} args_dict = {}
# NOTE: append arguments if they are in ENV # NOTE: append arguments if they are in ENV
......
...@@ -11,7 +11,6 @@ ...@@ -11,7 +11,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from py_paddle import DataProviderConverter from py_paddle import DataProviderConverter
import collections import collections
import paddle.trainer.PyDataProvider2 as pydp2 import paddle.trainer.PyDataProvider2 as pydp2
......
...@@ -9,8 +9,6 @@ There are: ...@@ -9,8 +9,6 @@ There are:
* BeginPass * BeginPass
* EndPass * EndPass
""" """
import py_paddle.swig_paddle as api
__all__ = [ __all__ = [
'EndIteration', 'BeginIteration', 'BeginPass', 'EndPass', 'TestResult' 'EndIteration', 'BeginIteration', 'BeginPass', 'EndPass', 'TestResult'
] ]
...@@ -18,6 +16,7 @@ __all__ = [ ...@@ -18,6 +16,7 @@ __all__ = [
class WithMetric(object): class WithMetric(object):
def __init__(self, evaluator): def __init__(self, evaluator):
import py_paddle.swig_paddle as api
if not isinstance(evaluator, api.Evaluator): if not isinstance(evaluator, api.Evaluator):
raise TypeError("Evaluator should be api.Evaluator type") raise TypeError("Evaluator should be api.Evaluator type")
self.__evaluator__ = evaluator self.__evaluator__ = evaluator
......
import paddle.v2.framework.core as core
import paddle.v2.framework.proto.op_proto_pb2 as op_proto_pb2
def get_all_op_protos():
protostrs = core.get_all_op_protos()
ret_values = []
for pbstr in protostrs:
op_proto = op_proto_pb2.OpProto.FromString(str(pbstr))
ret_values.append(op_proto)
return ret_values
add_python_test(test_framework test_protobuf.py test_scope.py add_python_test(test_framework test_protobuf.py test_scope.py
test_default_scope_funcs.py) test_default_scope_funcs.py test_op_creation_methods.py)
import unittest
import paddle.v2.framework.create_op_creation_methods as creation
class TestOpCreationsMethods(unittest.TestCase):
def test_all_protos(self):
all_protos = creation.get_all_op_protos()
self.assertNotEqual(0, len(all_protos))
for each in all_protos:
self.assertTrue(each.IsInitialized())
if __name__ == "__main__":
unittest.main()
import numpy import numpy
import py_paddle.swig_paddle as api
import collections import collections
import topology import topology
import minibatch import minibatch
from data_feeder import DataFeeder
__all__ = ['infer', 'Inference'] __all__ = ['infer', 'Inference']
...@@ -28,6 +26,7 @@ class Inference(object): ...@@ -28,6 +26,7 @@ class Inference(object):
""" """
def __init__(self, output_layer, parameters): def __init__(self, output_layer, parameters):
import py_paddle.swig_paddle as api
topo = topology.Topology(output_layer) topo = topology.Topology(output_layer)
gm = api.GradientMachine.createFromConfigProto( gm = api.GradientMachine.createFromConfigProto(
topo.proto(), api.CREATE_MODE_TESTING, [api.PARAMETER_VALUE]) topo.proto(), api.CREATE_MODE_TESTING, [api.PARAMETER_VALUE])
...@@ -40,6 +39,7 @@ class Inference(object): ...@@ -40,6 +39,7 @@ class Inference(object):
self.__data_types__ = topo.data_type() self.__data_types__ = topo.data_type()
def iter_infer(self, input, feeding=None): def iter_infer(self, input, feeding=None):
from data_feeder import DataFeeder
feeder = DataFeeder(self.__data_types__, feeding) feeder = DataFeeder(self.__data_types__, feeding)
batch_size = len(input) batch_size = len(input)
......
import py_paddle.swig_paddle as swig_api
import paddle.trainer_config_helpers.config_parser_utils as config_parser_utils import paddle.trainer_config_helpers.config_parser_utils as config_parser_utils
import paddle.trainer_config_helpers.optimizers as v1_optimizers import paddle.trainer_config_helpers.optimizers as v1_optimizers
""" """
...@@ -18,6 +16,7 @@ __all__ = [ ...@@ -18,6 +16,7 @@ __all__ = [
class Optimizer(object): class Optimizer(object):
def __init__(self, **kwargs): def __init__(self, **kwargs):
import py_paddle.swig_paddle as swig_api
if 'batch_size' in kwargs: if 'batch_size' in kwargs:
del kwargs['batch_size'] # not important for python library. del kwargs['batch_size'] # not important for python library.
...@@ -268,6 +267,7 @@ ModelAverage = v1_optimizers.ModelAverage ...@@ -268,6 +267,7 @@ ModelAverage = v1_optimizers.ModelAverage
L2Regularization = v1_optimizers.L2Regularization L2Regularization = v1_optimizers.L2Regularization
if __name__ == '__main__': if __name__ == '__main__':
import py_paddle.swig_paddle as swig_api
swig_api.initPaddle('--use_gpu=false') swig_api.initPaddle('--use_gpu=false')
for opt in [ for opt in [
Momentum(), Adam(), Adamax(), AdaGrad(), DecayedAdaGrad(), Momentum(), Adam(), Adamax(), AdaGrad(), DecayedAdaGrad(),
......
import numpy as np import numpy as np
import py_paddle.swig_paddle as api
from paddle.proto.ParameterConfig_pb2 import ParameterConfig from paddle.proto.ParameterConfig_pb2 import ParameterConfig
import paddle.trainer.config_parser as cp import paddle.trainer.config_parser as cp
import struct import struct
...@@ -124,6 +123,7 @@ class Parameters(object): ...@@ -124,6 +123,7 @@ class Parameters(object):
:return: parameter value :return: parameter value
:rtype: np.ndarray :rtype: np.ndarray
""" """
import py_paddle.swig_paddle as api
shape = self.get_shape(key) shape = self.get_shape(key)
if len(self.__gradient_machines__) == 0: if len(self.__gradient_machines__) == 0:
...@@ -223,7 +223,7 @@ class Parameters(object): ...@@ -223,7 +223,7 @@ class Parameters(object):
:type gradient_machine: api.GradientMachine :type gradient_machine: api.GradientMachine
:return: :return:
""" """
import py_paddle.swig_paddle as api
if not isinstance(gradient_machine, api.GradientMachine): if not isinstance(gradient_machine, api.GradientMachine):
raise ValueError("gradient_machine should be api.GradientMachine") raise ValueError("gradient_machine should be api.GradientMachine")
...@@ -359,6 +359,7 @@ def __copy_parameter_to_gradient_machine__(gradient_machine, name, arr): ...@@ -359,6 +359,7 @@ def __copy_parameter_to_gradient_machine__(gradient_machine, name, arr):
:return: :return:
:rtype: api.Parameter :rtype: api.Parameter
""" """
import py_paddle.swig_paddle as api
param = __get_parameter_in_gradient_machine__(gradient_machine, name) param = __get_parameter_in_gradient_machine__(gradient_machine, name)
vec = param.getBuf(api.PARAMETER_VALUE) vec = param.getBuf(api.PARAMETER_VALUE)
assert isinstance(vec, api.Vector) assert isinstance(vec, api.Vector)
......
...@@ -2,12 +2,6 @@ ...@@ -2,12 +2,6 @@
Module Trainer Module Trainer
""" """
import collections import collections
import gzip
import os
import py_paddle.swig_paddle as api
from data_feeder import DataFeeder
from topology import Topology from topology import Topology
from . import event as v2_event from . import event as v2_event
from . import optimizer as v2_optimizer from . import optimizer as v2_optimizer
...@@ -59,6 +53,7 @@ class SGD(object): ...@@ -59,6 +53,7 @@ class SGD(object):
if not isinstance(update_equation, v2_optimizer.Optimizer): if not isinstance(update_equation, v2_optimizer.Optimizer):
raise TypeError("update equation parameter must be " raise TypeError("update equation parameter must be "
"paddle.v2.optimizer.Optimizer") "paddle.v2.optimizer.Optimizer")
import py_paddle.swig_paddle as api
topology = Topology(cost, extra_layers=extra_layers) topology = Topology(cost, extra_layers=extra_layers)
self.__optimizer__ = update_equation self.__optimizer__ = update_equation
self.__topology__ = topology self.__topology__ = topology
...@@ -124,6 +119,8 @@ class SGD(object): ...@@ -124,6 +119,8 @@ class SGD(object):
:type feeding: dict|list :type feeding: dict|list
:return: :return:
""" """
import py_paddle.swig_paddle as api
from data_feeder import DataFeeder
if event_handler is None: if event_handler is None:
event_handler = default_event_handler event_handler = default_event_handler
__check_train_args__(**locals()) __check_train_args__(**locals())
...@@ -187,6 +184,8 @@ class SGD(object): ...@@ -187,6 +184,8 @@ class SGD(object):
:type feeding: dict :type feeding: dict
:return: :return:
""" """
import py_paddle.swig_paddle as api
from data_feeder import DataFeeder
feeder = DataFeeder(self.__data_types__, feeding) feeder = DataFeeder(self.__data_types__, feeding)
evaluator = self.__gradient_machine__.makeEvaluator() evaluator = self.__gradient_machine__.makeEvaluator()
out_args = api.Arguments.createArguments(0) out_args = api.Arguments.createArguments(0)
......
...@@ -19,7 +19,8 @@ setup_requires=["requests", ...@@ -19,7 +19,8 @@ setup_requires=["requests",
"recordio", "recordio",
"matplotlib", "matplotlib",
"rarfile", "rarfile",
"scipy>=0.19.0"] "scipy>=0.19.0",
"nltk"]
if '${CMAKE_SYSTEM_PROCESSOR}' not in ['arm', 'armv7-a', 'aarch64']: if '${CMAKE_SYSTEM_PROCESSOR}' not in ['arm', 'armv7-a', 'aarch64']:
setup_requires+=["opencv-python"] setup_requires+=["opencv-python"]
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册