提交 a6d23083 编写于 作者: X Xin Pan

some tracing

test=develop
上级 dac92e56
cc_library(layer SRCS layer.cc) cc_library(layer SRCS layer.cc)
cc_library(tracer SRCS tracer.cc DEPS proto_desc)
cc_library(engine SRCS engine.cc)
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "paddle/fluid/imperative/engine.h"
#include <mutex> // NOLINT
#include <vector>
#include "glog/logging.h"
namespace paddle {
namespace imperative {
static std::once_flag init_engine;
static Engine* engine;
class DummyEngine : public Engine {
public:
void Enqueue(Runnable* runnable) override {
queued_runnables_.push_back(runnable);
}
size_t Size() const override { return queued_runnables_.size(); }
void Sync() override {
for (Runnable* l : queued_runnables_) {
LOG(INFO) << "running " << reinterpret_cast<void*>(l);
}
queued_runnables_.clear();
}
private:
std::vector<Runnable*> queued_runnables_;
};
Engine* GetEngine() {
std::call_once(init_engine, []() { engine = new DummyEngine(); });
return engine;
}
} // namespace imperative
} // namespace paddle
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#pragma once
#include <cstddef>
#include <cstdint>
namespace paddle {
namespace imperative {
struct Runnable {};
class Engine {
public:
virtual ~Engine() {}
virtual void Enqueue(Runnable* runnable) = 0;
virtual size_t Size() const = 0;
virtual void Sync() = 0;
};
Engine* GetEngine();
} // namespace imperative
} // namespace paddle
...@@ -27,9 +27,9 @@ class Layer { ...@@ -27,9 +27,9 @@ class Layer {
public: public:
virtual ~Layer() {} virtual ~Layer() {}
virtual void Forward() { LOG(ERROR) << "at cpp."; } virtual void Forward() { LOG(ERROR) << "forward at cpp."; }
virtual void Backward() {} virtual void Backward() { LOG(ERROR) << "backward at cpp."; }
}; };
} // namespace imperative } // namespace imperative
......
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "paddle/fluid/imperative/tracer.h"
namespace paddle {
namespace imperative {} // namespace imperative
} // namespace paddle
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#pragma once
#include <vector>
#include "paddle/fluid/framework/op_desc.h"
#include "paddle/fluid/imperative/engine.h"
namespace paddle {
namespace imperative {
class Tracer {
public:
Tracer() {}
void Trace(framework::OpDesc* op_desc) {
LOG(ERROR) << "tracing " << op_desc->Type();
}
private:
std::vector<Runnable*> runnables_;
};
} // namespace imperative
} // namespace paddle
...@@ -13,7 +13,17 @@ See the License for the specific language governing permissions and ...@@ -13,7 +13,17 @@ See the License for the specific language governing permissions and
limitations under the License. */ limitations under the License. */
#include "paddle/fluid/pybind/imperative.h" #include "paddle/fluid/pybind/imperative.h"
#include "paddle/fluid/imperative/tracer.h"
namespace paddle { namespace paddle {
namespace pybind {} // namespace pybind11 namespace pybind {
// Bind Methods
void BindTracer(pybind11::module *m) {
pybind11::class_<imperative::Tracer>(*m, "Tracer", "")
.def(pybind11::init<>())
.def("trace", &imperative::Tracer::Trace);
}
} // namespace pybind
} // namespace paddle } // namespace paddle
...@@ -20,9 +20,9 @@ limitations under the License. */ ...@@ -20,9 +20,9 @@ limitations under the License. */
namespace paddle { namespace paddle {
namespace pybind { namespace pybind {
class PyLayer : public paddle::imperative::Layer { class PyLayer : public imperative::Layer {
public: public:
using paddle::imperative::Layer::Layer; // Inherit constructors using imperative::Layer::Layer; // Inherit constructors
void Forward() override { void Forward() override {
PYBIND11_OVERLOAD(void, Layer, Forward, ); // NOLINT PYBIND11_OVERLOAD(void, Layer, Forward, ); // NOLINT
...@@ -33,5 +33,7 @@ class PyLayer : public paddle::imperative::Layer { ...@@ -33,5 +33,7 @@ class PyLayer : public paddle::imperative::Layer {
} }
}; };
void BindTracer(pybind11::module *m);
} // namespace pybind } // namespace pybind
} // namespace paddle } // namespace paddle
...@@ -105,6 +105,7 @@ PYBIND11_MODULE(core, m) { ...@@ -105,6 +105,7 @@ PYBIND11_MODULE(core, m) {
layer.def(py::init<>()) layer.def(py::init<>())
.def("forward", &imperative::Layer::Forward) .def("forward", &imperative::Layer::Forward)
.def("backward", &imperative::Layer::Backward); .def("backward", &imperative::Layer::Backward);
BindTracer(&m);
py::class_<Tensor>(m, "Tensor", py::buffer_protocol()) py::class_<Tensor>(m, "Tensor", py::buffer_protocol())
.def_buffer( .def_buffer(
......
...@@ -49,6 +49,16 @@ GRAD_VAR_SUFFIX = core.kGradVarSuffix() ...@@ -49,6 +49,16 @@ GRAD_VAR_SUFFIX = core.kGradVarSuffix()
ZERO_VAR_SUFFIX = core.kZeroVarSuffix() ZERO_VAR_SUFFIX = core.kZeroVarSuffix()
CONTROL_DEP_VAR_PREFIX = core.kControlDepVarName() CONTROL_DEP_VAR_PREFIX = core.kControlDepVarName()
_imperative_tracer_ = None
def _in_imperative_mode():
return _imperative_tracer_ is not None
def _imperative_tracer():
return _imperative_tracer_
class NameScope(object): class NameScope(object):
def __init__(self, name="", parent=None): def __init__(self, name="", parent=None):
...@@ -1203,6 +1213,12 @@ class Block(object): ...@@ -1203,6 +1213,12 @@ class Block(object):
Returns: Returns:
Operator: the append Operator. Operator: the append Operator.
""" """
if _in_imperative_mode():
op_desc = core.OpDesc()
op = Operator(block=self, desc=op_desc, *args, **kwargs)
_imperative_tracer().trace(op.desc)
return
op_desc = self.desc.append_op() op_desc = self.desc.append_op()
op = Operator(block=self, desc=op_desc, *args, **kwargs) op = Operator(block=self, desc=op_desc, *args, **kwargs)
self.ops.append(op) self.ops.append(op)
...@@ -2208,3 +2224,12 @@ def _get_var(name, program=None): ...@@ -2208,3 +2224,12 @@ def _get_var(name, program=None):
assert isinstance(program, Program) assert isinstance(program, Program)
return program.global_block().var(name) return program.global_block().var(name)
@contextlib.contextmanager
def _imperative_guard():
global _imperative_tracer_
tmp_trace = _imperative_tracer_
_imperative_tracer_ = core.Tracer()
yield
_imperative_tracer_ = tmp_trace
...@@ -14,8 +14,12 @@ ...@@ -14,8 +14,12 @@
from __future__ import print_function from __future__ import print_function
from . import base
from .base import *
from . import layers from . import layers
from .layers import * from .layers import *
__all__ = [] __all__ = []
__all__ += layers.__all__ __all__ += layers.__all__
__all__ += base.__all__
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import contextlib
from paddle.fluid import core
from paddle.fluid import framework
__all__ = ['enabled', 'guard']
def enabled():
return framework._in_imperative_mode()
@contextlib.contextmanager
def guard():
train = framework.Program()
startup = framework.Program()
with framework.program_guard(train, startup):
with framework.unique_name.guard():
with framework._imperative_guard():
yield
# TODO: check train, startup not changed.
...@@ -13,6 +13,7 @@ ...@@ -13,6 +13,7 @@
# limitations under the License. # limitations under the License.
import unittest import unittest
import sys
import paddle.fluid as fluid import paddle.fluid as fluid
from paddle.fluid import core from paddle.fluid import core
...@@ -24,6 +25,14 @@ class TestImperative(unittest.TestCase): ...@@ -24,6 +25,14 @@ class TestImperative(unittest.TestCase):
l = fluid.imperative.PyLayer() l = fluid.imperative.PyLayer()
l.forward() l.forward()
def test_imperative_trace(self):
with fluid.imperative.guard():
self.assertTrue(fluid.imperative.enabled())
x = fluid.layers.data(name='x', shape=[3, 4], dtype='float32')
x = fluid.layers.relu(x)
x = fluid.layers.elementwise_mul(x, x)
self.assertIsNotNone(x)
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册