diff --git a/paddle/fluid/jit/function_utils.cc b/paddle/fluid/jit/function_utils.cc index 83da12d2652a3293edcaa819b48bc6004b1c1e03..c3811935a52cce006986fa24fbe88b19bc6093c3 100644 --- a/paddle/fluid/jit/function_utils.cc +++ b/paddle/fluid/jit/function_utils.cc @@ -73,12 +73,17 @@ void ShareIntoScope(const std::vector &ordered_input_names, void ShareParamsIntoScope(const std::vector ¶m_names, const Name2VariableMap ¶ms_dict, framework::Scope *scope) { - VLOG(3) << "param_names size: " << param_names.size(); for (size_t i = 0; i < param_names.size(); ++i) { std::string name = param_names[i]; + PADDLE_ENFORCE_EQ(params_dict.count(name), + 1, + phi::errors::InvalidArgument( + "Parameter named %s is not exist in param_names. " + "Please check that your model was saved correctly", + name)); + auto ¶m = params_dict.find(name)->second; auto &dense_tensor = param->Get(); - VLOG(3) << "share into scope: " << name; auto *var = scope->Var(name); auto *dst_tensor = var->GetMutable(); *dst_tensor = dense_tensor; diff --git a/paddle/fluid/jit/layer.cc b/paddle/fluid/jit/layer.cc index 6cf8b98b1cbcb270ffad8223774ed7e1aaa25486..868b5a3ee250a580f1e134f433dd91d9ef6d6cc2 100644 --- a/paddle/fluid/jit/layer.cc +++ b/paddle/fluid/jit/layer.cc @@ -68,6 +68,14 @@ const std::shared_ptr& Layer::FunctionInfo( return info_map_.at(name); } +std::vector Layer::FunctionNames() const { + std::vector names; + for (auto it = info_map_.begin(); it != info_map_.end(); ++it) { + names.emplace_back(it->first); + } + return names; +} + #define PD_SPECIALZE_ATTRIBUTE_TYPE(T) \ template <> \ T Layer::Attribute(const std::string& name) const { \ diff --git a/paddle/fluid/jit/layer.h b/paddle/fluid/jit/layer.h index 6f92ac44d63799d169c84092cbdcb88b743a10a4..8a4001cf89c16103a3307e7de20f56e38947e611 100644 --- a/paddle/fluid/jit/layer.h +++ b/paddle/fluid/jit/layer.h @@ -70,6 +70,8 @@ class Layer { const std::shared_ptr& FunctionInfo( const std::string& name) const; + std::vector FunctionNames() const; + private: Name2VariableMap params_map_; Name2VariableMap attrs_map_; diff --git a/paddle/fluid/pybind/eager_functions.cc b/paddle/fluid/pybind/eager_functions.cc index 62cfc330ae3ffefc4fa2552ded56cede23068b4e..9596551136c201e8c525576b29744f3df3b3d3cb 100644 --- a/paddle/fluid/pybind/eager_functions.cc +++ b/paddle/fluid/pybind/eager_functions.cc @@ -372,8 +372,9 @@ static PyObject* eager_api_jit_function_call(PyObject* self, PyObject* args, PyObject* kwargs) { EAGER_TRY - std::shared_ptr function = - CastPyArg2BaseEngine(PyTuple_GET_ITEM(args, 0), 0); + + std::shared_ptr function = + CastPyArg2JitFunction(PyTuple_GET_ITEM(args, 0), 0); std::vector ins = CastPyArg2VectorOfTensor(PyTuple_GET_ITEM(args, 1), 1); std::vector outs = (*function)(ins); diff --git a/paddle/fluid/pybind/eager_utils.cc b/paddle/fluid/pybind/eager_utils.cc index 82e1fa873f8d10780254e5c36ab7ef0dc8519490..a92ddf388c2204596f235f4592dfa8993dfa8183 100644 --- a/paddle/fluid/pybind/eager_utils.cc +++ b/paddle/fluid/pybind/eager_utils.cc @@ -22,8 +22,7 @@ limitations under the License. */ #include "paddle/fluid/framework/convert_utils.h" #include "paddle/fluid/framework/scope.h" #include "paddle/fluid/framework/scope_guard.h" -#include "paddle/fluid/jit/engine/executor_engine.h" -#include "paddle/fluid/jit/engine/pe_engine.h" +#include "paddle/fluid/jit/function.h" #include "paddle/fluid/memory/allocation/allocator.h" #include "paddle/fluid/operators/py_func_op.h" #include "paddle/fluid/operators/utils.h" @@ -54,8 +53,7 @@ extern PyTypeObject* g_customplace_pytype; extern PyTypeObject* g_framework_tensor_pytype; extern PyTypeObject* g_framework_lodtensorarray_pytype; extern PyTypeObject* g_custom_op_kernel_ctx_pytype; -extern PyTypeObject* g_executor_engine_pytype; -extern PyTypeObject* g_pe_engine_pytype; +extern PyTypeObject* g_jit_function_pytype; int TensorDtype2NumpyDtype(phi::DataType dtype) { switch (dtype) { @@ -232,14 +230,11 @@ std::shared_ptr CastPyArg2VarBase(PyObject* obj, return py::cast>(obj); } -std::shared_ptr CastPyArg2BaseEngine(PyObject* obj, - ssize_t arg_pos) { - if (PyObject_IsInstance( - obj, reinterpret_cast(g_executor_engine_pytype))) { - return ::pybind11::handle(obj).cast>(); - } else if (PyObject_IsInstance( - obj, reinterpret_cast(g_pe_engine_pytype))) { - return ::pybind11::handle(obj).cast>(); +std::shared_ptr CastPyArg2JitFunction(PyObject* obj, + ssize_t arg_pos) { + if (PyObject_IsInstance(obj, + reinterpret_cast(g_jit_function_pytype))) { + return ::pybind11::handle(obj).cast>(); } else { PADDLE_THROW(platform::errors::InvalidArgument( "argument (position %d) must be " diff --git a/paddle/fluid/pybind/eager_utils.h b/paddle/fluid/pybind/eager_utils.h index 94e8ce4e04aa413471826da7c52e52a8a3e3d4d9..df959b9abf4f15aaa676824debc5a71696adb3c2 100644 --- a/paddle/fluid/pybind/eager_utils.h +++ b/paddle/fluid/pybind/eager_utils.h @@ -20,7 +20,7 @@ typedef SSIZE_T ssize_t; #include "paddle/fluid/eager/hooks.h" #include "paddle/fluid/framework/lod_tensor.h" #include "paddle/fluid/framework/tensor.h" -#include "paddle/fluid/jit/engine/base_engine.h" +#include "paddle/fluid/jit/function.h" #include "paddle/fluid/platform/place.h" #include "paddle/phi/common/backend.h" #include "paddle/phi/common/data_type.h" @@ -75,8 +75,8 @@ framework::proto::VarType::Type CastPyArg2ProtoType(PyObject* obj, std::unordered_map CastPyArg2Vocab(PyObject* obj, ssize_t arg_pos); std::vector CastPyArg2Strings(PyObject* obj, ssize_t arg_pos); -std::shared_ptr CastPyArg2BaseEngine(PyObject* obj, - ssize_t arg_pos); +std::shared_ptr CastPyArg2JitFunction(PyObject* obj, + ssize_t arg_pos); PyObject* ToPyObject(int value); PyObject* ToPyObject(uint32_t value); diff --git a/paddle/fluid/pybind/jit.cc b/paddle/fluid/pybind/jit.cc index 752b5a3021af500b0b6f549245c557f84dfb6b60..a9c844093d1a5c46e278b4b49a4e8a90a7d1bdbf 100644 --- a/paddle/fluid/pybind/jit.cc +++ b/paddle/fluid/pybind/jit.cc @@ -18,8 +18,7 @@ limitations under the License. */ #include "paddle/fluid/imperative/layer.h" #include "paddle/fluid/platform/place.h" -#include "paddle/fluid/jit/engine/executor_engine.h" -#include "paddle/fluid/jit/engine/pe_engine.h" +#include "paddle/fluid/jit/function.h" #include "paddle/fluid/jit/function_schema.h" #include "paddle/fluid/jit/layer.h" #include "paddle/fluid/jit/serializer.h" @@ -29,26 +28,18 @@ namespace py = pybind11; namespace paddle { namespace pybind { -PyTypeObject *g_executor_engine_pytype = nullptr; -PyTypeObject *g_pe_engine_pytype = nullptr; +PyTypeObject *g_jit_function_pytype = nullptr; using Variable = paddle::framework::Variable; void BindJit(pybind11::module *m) { py::class_(*m, "Layer", R"DOC(Layer Class.)DOC") - .def("function_dict", - &jit::Layer::EngineMap, - py::return_value_policy::reference); + .def("function_names", &jit::Layer::FunctionNames) + .def("function", &jit::Layer::Function) + .def("function_info", &jit::Layer::FunctionInfo); - py::class_> - executor_engine(*m, "ExecutorEngine", R"DOC(ExecutorEngine Class.)DOC"); - g_executor_engine_pytype = - reinterpret_cast(executor_engine.ptr()); - executor_engine.def("info", &jit::ExecutorEngine::Info); - - py::class_> pe_engine( - *m, "PEEngine", R"DOC(PEEngine Class.)DOC"); - g_pe_engine_pytype = reinterpret_cast(pe_engine.ptr()); - pe_engine.def("info", &jit::PEEngine::Info); + py::class_> function( + *m, "Function", R"DOC(Function Class.)DOC"); + g_jit_function_pytype = reinterpret_cast(function.ptr()); py::class_>( *m, "FunctionInfo", R"DOC(FunctionInfo Class.)DOC") diff --git a/python/paddle/jit/layer.py b/python/paddle/jit/layer.py index 4aee7a8f5c02a3e478c45bd988c10777018d11cd..97b598948500b1772656bfc4bd9154caa1e6078e 100644 --- a/python/paddle/jit/layer.py +++ b/python/paddle/jit/layer.py @@ -26,18 +26,19 @@ class Layer(object): def load(self, load_path, place): self.cpp_layer = Load(load_path, place) - function_dict = self.cpp_layer.function_dict() - for name, function in function_dict.items(): - self.functions[name] = Function(function) + for name in self.cpp_layer.function_names(): + function = self.cpp_layer.function(name) + info = self.cpp_layer.function_info(name) + self.functions[name] = Function(function, info) setattr(self, name, self.functions[name]) class Function(): - def __init__(self, function): + def __init__(self, function, info): self.function = function - self.info = FunctionInfo(function.info()) + self.info = FunctionInfo(info) def __call__(self, *args): return core.eager.jit_function_call(self.function, args)