diff --git a/paddle/fluid/framework/details/eager_deletion_op_handle.cc b/paddle/fluid/framework/details/eager_deletion_op_handle.cc index c8e27c7275fe70598e41cbb2cc8482d610c2e113..f8723fe75f8f0304e149ab2195f29bc4c7223bc4 100644 --- a/paddle/fluid/framework/details/eager_deletion_op_handle.cc +++ b/paddle/fluid/framework/details/eager_deletion_op_handle.cc @@ -20,6 +20,7 @@ #include "paddle/fluid/framework/lod_tensor_array.h" #include "paddle/fluid/framework/scope.h" #include "paddle/fluid/framework/selected_rows.h" +#include "paddle/fluid/platform/profiler.h" #ifdef PADDLE_WITH_CUDA #include "paddle/fluid/platform/cuda_device_guard.h" #endif @@ -65,6 +66,7 @@ EagerDeletionOpHandle::~EagerDeletionOpHandle() { std::string EagerDeletionOpHandle::Name() const { return "eager_deletion"; } void EagerDeletionOpHandle::RunImpl() { + platform::RecordEvent record_event(Name()); Scope *exec_scope = nullptr; std::deque> garbages; for (auto &name : var_names_) { diff --git a/paddle/fluid/framework/details/rpc_op_handle.cc b/paddle/fluid/framework/details/rpc_op_handle.cc index 3e082f247adf7fe22db2b62802f0a87c9c93447a..a87b03451bb00643ecb9d9e2339141fe7f25d2e3 100644 --- a/paddle/fluid/framework/details/rpc_op_handle.cc +++ b/paddle/fluid/framework/details/rpc_op_handle.cc @@ -14,6 +14,7 @@ #include "paddle/fluid/framework/details/rpc_op_handle.h" #include "paddle/fluid/framework/ir/graph.h" +#include "paddle/fluid/platform/profiler.h" namespace paddle { namespace framework { @@ -29,6 +30,8 @@ RPCOpHandle::RPCOpHandle(ir::Node *node, const framework::OpDesc &op_desc, place_(place) {} void RPCOpHandle::RunImpl() { + platform::RecordEvent record_event(Name()); + for (auto *in : inputs_) { auto &p = static_cast(in)->place(); if (ir::IsControlDepVar(*in->Node())) { diff --git a/paddle/fluid/framework/details/scale_loss_grad_op_handle.cc b/paddle/fluid/framework/details/scale_loss_grad_op_handle.cc index 6924549f36d6365534ab288257899a78107675cc..67b4fed0d3083b105eae4838cf264bba7f7a44c3 100644 --- a/paddle/fluid/framework/details/scale_loss_grad_op_handle.cc +++ b/paddle/fluid/framework/details/scale_loss_grad_op_handle.cc @@ -13,8 +13,8 @@ // limitations under the License. #include "paddle/fluid/framework/details/scale_loss_grad_op_handle.h" - #include +#include "paddle/fluid/platform/profiler.h" namespace paddle { namespace framework { @@ -67,6 +67,7 @@ struct ScaleLossGradFunctor { }; void ScaleLossGradOpHandle::RunImpl() { + platform::RecordEvent record_event(Name()); // Doesn't wait any event std::string var_name = static_cast(this->outputs_[0])->name(); auto &local_scope = *scope_->FindVar(kLocalExecScopeName)->Get(); diff --git a/paddle/fluid/operators/CMakeLists.txt b/paddle/fluid/operators/CMakeLists.txt index 6e8d6f459c51170c0f29542154aa3b1c0fd894f1..7a9c03ab5334a2c8c140743eafd23f3ac12fb7f9 100644 --- a/paddle/fluid/operators/CMakeLists.txt +++ b/paddle/fluid/operators/CMakeLists.txt @@ -8,7 +8,6 @@ file(WRITE ${pybind_file} "// Generated by the paddle/fluid/operator/CMakeLists. add_subdirectory(math) add_subdirectory(controlflow) -add_subdirectory(csp) add_subdirectory(detection) add_subdirectory(elementwise) add_subdirectory(fused) diff --git a/paddle/fluid/operators/csp/CMakeLists.txt b/paddle/fluid/operators/csp/CMakeLists.txt deleted file mode 100644 index 5d468316e8eacb73c4a4ce81c784880bb5e46c2d..0000000000000000000000000000000000000000 --- a/paddle/fluid/operators/csp/CMakeLists.txt +++ /dev/null @@ -1,2 +0,0 @@ -include(operators) -register_operators() diff --git a/paddle/fluid/operators/csp/go_op.cc b/paddle/fluid/operators/csp/go_op.cc deleted file mode 100644 index 48f9d967adc90838dc4c7a09bfaf5a5a1ac9c99b..0000000000000000000000000000000000000000 --- a/paddle/fluid/operators/csp/go_op.cc +++ /dev/null @@ -1,110 +0,0 @@ -/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. */ - -#include // NOLINT -#include -#include "paddle/fluid/framework/executor.h" -#include "paddle/fluid/framework/lod_tensor.h" -#include "paddle/fluid/framework/op_registry.h" - -namespace paddle { -namespace operators { - -using StepScopeVar = std::vector; - -static constexpr char kBlock[] = "sub_block"; -static constexpr char kX[] = "X"; - -class GoOp : public framework::OperatorBase { - public: - GoOp(const std::string &type, const framework::VariableNameMap &inputs, - const framework::VariableNameMap &outputs, - const framework::AttributeMap &attrs) - : framework::OperatorBase(type, inputs, outputs, attrs) {} - - private: - void ExecuteOnThread(framework::Executor *executor, - framework::BlockDesc *block, - framework::Scope *scope) const { - framework::ProgramDesc *program = block->Program(); - executor->Run(*program, scope, block->ID(), false /*create_local_scope*/); - } - - void RunImpl(const framework::Scope &scope, - const platform::Place &dev_place) const override { - /* - * Determine the global scope. Create a new child scope. - * Within the child scope, add all the local variables relevant - * to that scope. - * - * Now go through all the inputs to the op to ensure that - * all of them are in the newly created scope. This is important - * to ensure that they don't get destroyed when the parent scope - * is deleted. - * */ - - // TODO(varunarora): Consider moving this root scope lookup to scope.h. - const framework::Scope *root_scope = &scope; - const framework::Scope *parent_scope = root_scope->parent(); - - while (parent_scope != nullptr) { - root_scope = parent_scope; - parent_scope = parent_scope->parent(); - } - - framework::BlockDesc *block = Attr(kBlock); - framework::Executor executor(dev_place); - framework::Scope &new_scope = root_scope->NewScope(); - - for (auto &var : block->AllVars()) { - new_scope.Var(var->Name()); - } - - auto &inputs = Inputs(kX); - for (size_t i = 0; i < inputs.size(); i++) { - PADDLE_ENFORCE_NOT_NULL(new_scope.FindVar(inputs.at(i)), - "All variables used in the go block " - "should be created in the global scope"); - } - - // Now execute the go op with the newly created scope. - std::thread go_thread([dev_place, block, &new_scope, this]() { - framework::Executor executor(dev_place); - ExecuteOnThread(&executor, block, &new_scope); - }); - go_thread.detach(); - } -}; - -class GoOpMaker : public framework::OpProtoAndCheckerMaker { - public: - void Make() override { - AddInput(kX, - "A set of variables, which are required by operators inside the " - "block of Go Op.") - .AsDuplicable(); - AddAttr(kBlock, "The block inside GoOp"); - AddComment(R"DOC( -)DOC"); - } -}; - -// TODO(thuan): Look into Gradient Operator for GO_OP - -} // namespace operators -} // namespace paddle - -REGISTER_OPERATOR(go, paddle::operators::GoOp, - paddle::framework::EmptyGradOpMaker, - paddle::operators::GoOpMaker);