提交 70f398e2 编写于 作者: Y Yu Yang

Update

上级 859dba59
...@@ -24,6 +24,9 @@ static ProgramDesc* g_program_desc = nullptr; ...@@ -24,6 +24,9 @@ static ProgramDesc* g_program_desc = nullptr;
ProgramDesc& GetProgramDesc() { ProgramDesc& GetProgramDesc() {
if (g_program_desc == nullptr) { if (g_program_desc == nullptr) {
g_program_desc = new ProgramDesc(); g_program_desc = new ProgramDesc();
auto root_block = g_program_desc->mutable_blocks()->Add();
root_block->set_idx(0);
root_block->set_parent_idx(-1);
} }
return *g_program_desc; return *g_program_desc;
} }
......
...@@ -316,21 +316,75 @@ All parameter, weight, gradient are variables in Paddle. ...@@ -316,21 +316,75 @@ All parameter, weight, gradient are variables in Paddle.
m.def("is_compile_gpu", IsCompileGPU); m.def("is_compile_gpu", IsCompileGPU);
py::class_<ProgramDesc>(m, "ProgramDesc", "") py::class_<ProgramDesc>(m, "ProgramDesc", "")
.def_static("instance", [] { return &GetProgramDesc(); }) .def_static("instance",
.def("append_block", [](ProgramDesc &self) { [] { return &GetProgramDesc(); },
auto desc = self.mutable_blocks()->Add(); py::return_value_policy::reference)
desc->set_idx(self.mutable_blocks()->size() - 1); .def("append_block",
return desc; [](ProgramDesc &self, BlockDesc &parent) {
}); auto desc = self.mutable_blocks()->Add();
desc->set_idx(self.mutable_blocks()->size() - 1);
desc->set_parent_idx(parent.idx());
return desc;
})
.def("root_block",
[](ProgramDesc &self) { return self.mutable_blocks()[0]; });
py::class_<BlockDesc>(m, "BlockDesc", "") py::class_<BlockDesc>(m, "BlockDesc", "")
.def("idx", [](BlockDesc &self) { return self.idx(); }) .def("idx", [](BlockDesc &self) { return self.idx(); })
.def("set_parent", .def("parent", [](BlockDesc &self) { return self.parent_idx(); })
[](BlockDesc &self, int32_t idx) { self.set_parent_idx(idx); }) .def("append_op",
.def("parent", [](BlockDesc &self) { return self.parent_idx(); }); [](BlockDesc &self) { return self.mutable_ops()->Add(); });
py::class_<VarDesc>(m, "VarDesc", ""); py::class_<VarDesc>(m, "VarDesc", "");
py::class_<OpDesc>(m, "OpDesc", ""); auto op_desc_set_var = [](OpDesc::Var *var,
const std::string &parameter,
const std::vector<std::string> &arguments) {
var->set_parameter(parameter);
auto args = var->mutable_arguments();
args->Reserve(static_cast<int>(arguments.size()));
for (auto &arg : arguments) {
*args->Add() = arg;
}
};
auto op_desc_set_attr = [](OpDesc &desc, const std::string &name) {
auto attr = desc.mutable_attrs()->Add();
attr->set_name(name);
return attr;
};
py::class_<OpDesc>(m, "OpDesc", "")
.def("type", [](OpDesc &op) { return op.type(); })
.def("set_input",
[op_desc_set_var](OpDesc &self,
const std::string &parameter,
const std::vector<std::string> &arguments) {
auto ipt = self.mutable_inputs()->Add();
op_desc_set_var(ipt, parameter, arguments);
})
.def("input_names",
[](OpDesc &self) {
std::vector<std::string> ret_val;
ret_val.reserve(static_cast<size_t>(self.inputs().size()));
std::transform(
self.inputs().begin(),
self.inputs().end(),
std::back_inserter(ret_val),
[](const OpDesc::Var &var) { return var.parameter(); });
return ret_val;
})
.def("__str__", [](OpDesc &self) { return self.DebugString(); })
.def("set_output",
[op_desc_set_var](OpDesc &self,
const std::string &parameter,
const std::vector<std::string> &arguments) {
auto opt = self.mutable_outputs()->Add();
op_desc_set_var(opt, parameter, arguments);
})
.def("set_attr",
[op_desc_set_attr](OpDesc &self, const std::string &name, int i) {
op_desc_set_attr(self, name)->set_i(i);
});
return m.ptr(); return m.ptr();
} }
......
import unittest
import paddle.v2.framework.core as core
class TestProgramDesc(unittest.TestCase):
def test_instance(self):
program_desc = core.ProgramDesc.instance()
self.assertIsNotNone(program_desc)
del program_desc
program_desc = core.ProgramDesc.instance()
self.assertIsNotNone(program_desc)
del program_desc
if __name__ == '__main__':
unittest.main()
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册