diff --git a/paddle/fluid/imperative/CMakeLists.txt b/paddle/fluid/imperative/CMakeLists.txt index 12dfd6f1e7c268467296b6463757c3d7e45d1eb0..dff80dff0be7f886b225d184993dc79da534022e 100644 --- a/paddle/fluid/imperative/CMakeLists.txt +++ b/paddle/fluid/imperative/CMakeLists.txt @@ -1 +1,3 @@ cc_library(layer SRCS layer.cc) +cc_library(tracer SRCS tracer.cc DEPS proto_desc) +cc_library(engine SRCS engine.cc) diff --git a/paddle/fluid/imperative/engine.cc b/paddle/fluid/imperative/engine.cc new file mode 100644 index 0000000000000000000000000000000000000000..de7ab0e5918281579728ef48d1517be2cd530af7 --- /dev/null +++ b/paddle/fluid/imperative/engine.cc @@ -0,0 +1,53 @@ +// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "paddle/fluid/imperative/engine.h" + +#include // NOLINT +#include + +#include "glog/logging.h" + +namespace paddle { +namespace imperative { + +static std::once_flag init_engine; +static Engine* engine; + +class DummyEngine : public Engine { + public: + void Enqueue(Runnable* runnable) override { + queued_runnables_.push_back(runnable); + } + + size_t Size() const override { return queued_runnables_.size(); } + + void Sync() override { + for (Runnable* l : queued_runnables_) { + LOG(INFO) << "running " << reinterpret_cast(l); + } + queued_runnables_.clear(); + } + + private: + std::vector queued_runnables_; +}; + +Engine* GetEngine() { + std::call_once(init_engine, []() { engine = new DummyEngine(); }); + return engine; +} + +} // namespace imperative +} // namespace paddle diff --git a/paddle/fluid/imperative/engine.h b/paddle/fluid/imperative/engine.h new file mode 100644 index 0000000000000000000000000000000000000000..a1dfa5bda38d0c419aa4ccbea77b32eb7e0d5b23 --- /dev/null +++ b/paddle/fluid/imperative/engine.h @@ -0,0 +1,39 @@ +// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#pragma once + +#include +#include + +namespace paddle { +namespace imperative { + +struct Runnable {}; + +class Engine { + public: + virtual ~Engine() {} + + virtual void Enqueue(Runnable* runnable) = 0; + + virtual size_t Size() const = 0; + + virtual void Sync() = 0; +}; + +Engine* GetEngine(); + +} // namespace imperative +} // namespace paddle diff --git a/paddle/fluid/imperative/layer.h b/paddle/fluid/imperative/layer.h index cf6aec60d190dd82b18a940855f4225dab1a69f0..42cc65ddc938b88b5ec50cf72edc9798e77d20d0 100644 --- a/paddle/fluid/imperative/layer.h +++ b/paddle/fluid/imperative/layer.h @@ -27,9 +27,9 @@ class Layer { public: virtual ~Layer() {} - virtual void Forward() { LOG(ERROR) << "at cpp."; } + virtual void Forward() { LOG(ERROR) << "forward at cpp."; } - virtual void Backward() {} + virtual void Backward() { LOG(ERROR) << "backward at cpp."; } }; } // namespace imperative diff --git a/paddle/fluid/imperative/tracer.cc b/paddle/fluid/imperative/tracer.cc new file mode 100644 index 0000000000000000000000000000000000000000..f64f9e72c4a23528948183b909d65e90783a4463 --- /dev/null +++ b/paddle/fluid/imperative/tracer.cc @@ -0,0 +1,19 @@ +// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "paddle/fluid/imperative/tracer.h" + +namespace paddle { +namespace imperative {} // namespace imperative +} // namespace paddle diff --git a/paddle/fluid/imperative/tracer.h b/paddle/fluid/imperative/tracer.h new file mode 100644 index 0000000000000000000000000000000000000000..91e34a4783abc5aef117b478830ed8e1e0941e6a --- /dev/null +++ b/paddle/fluid/imperative/tracer.h @@ -0,0 +1,37 @@ +// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#pragma once + +#include +#include "paddle/fluid/framework/op_desc.h" +#include "paddle/fluid/imperative/engine.h" + +namespace paddle { +namespace imperative { + +class Tracer { + public: + Tracer() {} + + void Trace(framework::OpDesc* op_desc) { + LOG(ERROR) << "tracing " << op_desc->Type(); + } + + private: + std::vector runnables_; +}; + +} // namespace imperative +} // namespace paddle diff --git a/paddle/fluid/pybind/imperative.cc b/paddle/fluid/pybind/imperative.cc index af010f09dd465c5b8630099ac7546f5b20ec0c0d..cd97cd63529b5fa88d94beb4ad68328728a5edaa 100644 --- a/paddle/fluid/pybind/imperative.cc +++ b/paddle/fluid/pybind/imperative.cc @@ -13,7 +13,17 @@ See the License for the specific language governing permissions and limitations under the License. */ #include "paddle/fluid/pybind/imperative.h" +#include "paddle/fluid/imperative/tracer.h" namespace paddle { -namespace pybind {} // namespace pybind11 +namespace pybind { + +// Bind Methods +void BindTracer(pybind11::module *m) { + pybind11::class_(*m, "Tracer", "") + .def(pybind11::init<>()) + .def("trace", &imperative::Tracer::Trace); +} + +} // namespace pybind } // namespace paddle diff --git a/paddle/fluid/pybind/imperative.h b/paddle/fluid/pybind/imperative.h index d1b9024990835ac997a8ad6d86688aea18394993..bfab6bd9b90854aed782965d8a949be22f6ca95c 100644 --- a/paddle/fluid/pybind/imperative.h +++ b/paddle/fluid/pybind/imperative.h @@ -20,9 +20,9 @@ limitations under the License. */ namespace paddle { namespace pybind { -class PyLayer : public paddle::imperative::Layer { +class PyLayer : public imperative::Layer { public: - using paddle::imperative::Layer::Layer; // Inherit constructors + using imperative::Layer::Layer; // Inherit constructors void Forward() override { PYBIND11_OVERLOAD(void, Layer, Forward, ); // NOLINT @@ -33,5 +33,7 @@ class PyLayer : public paddle::imperative::Layer { } }; +void BindTracer(pybind11::module *m); + } // namespace pybind } // namespace paddle diff --git a/paddle/fluid/pybind/pybind.cc b/paddle/fluid/pybind/pybind.cc index 4d8c0fdb1370329fa68bc0ee748683758c702017..fa3e3835361f1a3a5cc552ecfe643016f02194ed 100644 --- a/paddle/fluid/pybind/pybind.cc +++ b/paddle/fluid/pybind/pybind.cc @@ -105,6 +105,7 @@ PYBIND11_MODULE(core, m) { layer.def(py::init<>()) .def("forward", &imperative::Layer::Forward) .def("backward", &imperative::Layer::Backward); + BindTracer(&m); py::class_(m, "Tensor", py::buffer_protocol()) .def_buffer( diff --git a/python/paddle/fluid/framework.py b/python/paddle/fluid/framework.py index b991187d424108db176ebd6996d7d161f11dcd3d..525c36702b0bfdc5d4b1b2b1afdbae2ff9e81564 100644 --- a/python/paddle/fluid/framework.py +++ b/python/paddle/fluid/framework.py @@ -49,6 +49,16 @@ GRAD_VAR_SUFFIX = core.kGradVarSuffix() ZERO_VAR_SUFFIX = core.kZeroVarSuffix() CONTROL_DEP_VAR_PREFIX = core.kControlDepVarName() +_imperative_tracer_ = None + + +def _in_imperative_mode(): + return _imperative_tracer_ is not None + + +def _imperative_tracer(): + return _imperative_tracer_ + class NameScope(object): def __init__(self, name="", parent=None): @@ -1203,6 +1213,12 @@ class Block(object): Returns: Operator: the append Operator. """ + if _in_imperative_mode(): + op_desc = core.OpDesc() + op = Operator(block=self, desc=op_desc, *args, **kwargs) + _imperative_tracer().trace(op.desc) + return + op_desc = self.desc.append_op() op = Operator(block=self, desc=op_desc, *args, **kwargs) self.ops.append(op) @@ -2208,3 +2224,12 @@ def _get_var(name, program=None): assert isinstance(program, Program) return program.global_block().var(name) + + +@contextlib.contextmanager +def _imperative_guard(): + global _imperative_tracer_ + tmp_trace = _imperative_tracer_ + _imperative_tracer_ = core.Tracer() + yield + _imperative_tracer_ = tmp_trace diff --git a/python/paddle/fluid/imperative/__init__.py b/python/paddle/fluid/imperative/__init__.py index 8ce1dd7aa32ecbdb7e8d9f6c65f33d772ae0949e..922308b6b18b335535d41f24d544cde04991b794 100644 --- a/python/paddle/fluid/imperative/__init__.py +++ b/python/paddle/fluid/imperative/__init__.py @@ -14,8 +14,12 @@ from __future__ import print_function +from . import base +from .base import * + from . import layers from .layers import * __all__ = [] __all__ += layers.__all__ +__all__ += base.__all__ diff --git a/python/paddle/fluid/imperative/base.py b/python/paddle/fluid/imperative/base.py new file mode 100644 index 0000000000000000000000000000000000000000..900a65a3aad4df65c2cc5bbfb2b6491e49016c72 --- /dev/null +++ b/python/paddle/fluid/imperative/base.py @@ -0,0 +1,33 @@ +# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import contextlib +from paddle.fluid import core +from paddle.fluid import framework + +__all__ = ['enabled', 'guard'] + + +def enabled(): + return framework._in_imperative_mode() + + +@contextlib.contextmanager +def guard(): + train = framework.Program() + startup = framework.Program() + with framework.program_guard(train, startup): + with framework.unique_name.guard(): + with framework._imperative_guard(): + yield + # TODO: check train, startup not changed. diff --git a/python/paddle/fluid/tests/unittests/test_imperative.py b/python/paddle/fluid/tests/unittests/test_imperative.py index 36432d83687e6dee546299714d972fa9c37c6770..cdd90accc1d6de5554430b2233977f5c920338e3 100644 --- a/python/paddle/fluid/tests/unittests/test_imperative.py +++ b/python/paddle/fluid/tests/unittests/test_imperative.py @@ -13,6 +13,7 @@ # limitations under the License. import unittest +import sys import paddle.fluid as fluid from paddle.fluid import core @@ -24,6 +25,14 @@ class TestImperative(unittest.TestCase): l = fluid.imperative.PyLayer() l.forward() + def test_imperative_trace(self): + with fluid.imperative.guard(): + self.assertTrue(fluid.imperative.enabled()) + x = fluid.layers.data(name='x', shape=[3, 4], dtype='float32') + x = fluid.layers.relu(x) + x = fluid.layers.elementwise_mul(x, x) + self.assertIsNotNone(x) + if __name__ == '__main__': unittest.main()