print_op.cc 6.7 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Y
Yan Chunwei 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14 15

   Licensed under the Apache License, Version 2.0 (the "License");
   you may not use this file except in compliance with the License.
   You may obtain a copy of the License at

   http://www.apache.org/licenses/LICENSE-2.0

   Unless required by applicable law or agreed to in writing, software
   distributed under the License is distributed on an "AS IS" BASIS,
   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
   See the License for the specific language governing permissions and
   limitations under the License. */

#include <algorithm>
16
#include "paddle/fluid/framework/data_layout.h"
Y
Yi Wang 已提交
17
#include "paddle/fluid/framework/op_registry.h"
S
sneaxiy 已提交
18
#include "paddle/fluid/framework/var_type.h"
19
#include "paddle/fluid/operators/assign_op.h"
H
Huihuang Zheng 已提交
20
#include "paddle/fluid/operators/tensor_formatter.h"
Y
Yan Chunwei 已提交
21 22 23

namespace paddle {
namespace operators {
Y
Yu Yang 已提交
24
using framework::GradVarName;
Y
Yan Chunwei 已提交
25 26 27

#define CLOG std::cout

28 29 30
const char kForward[] = "FORWARD";
const char kBackward[] = "BACKWARD";
const char kBoth[] = "BOTH";
Y
yangyaming 已提交
31

Y
Yan Chunwei 已提交
32
// TODO(ChunweiYan) there should be some other printers for TensorArray
33
class PrintOp : public framework::OperatorBase {
Y
Yan Chunwei 已提交
34
 public:
35 36 37
  PrintOp(const std::string &type, const framework::VariableNameMap &inputs,
          const framework::VariableNameMap &outputs,
          const framework::AttributeMap &attrs)
Y
Yan Chunwei 已提交
38 39
      : OperatorBase(type, inputs, outputs, attrs) {}

40
 private:
Y
Yu Yang 已提交
41 42
  void RunImpl(const framework::Scope &scope,
               const platform::Place &place) const override {
43 44
    const auto in_var = scope.FindVar(Input("In"));
    auto out_var = scope.FindVar(Output("Out"));
45 46 47 48 49 50 51 52

    PADDLE_ENFORCE_NOT_NULL(
        in_var, platform::errors::NotFound("The input:%s not found in scope",
                                           Input("In")));
    PADDLE_ENFORCE_NOT_NULL(
        out_var, platform::errors::NotFound("The output:%s not found in scope",
                                            Output("Out")));

53 54 55 56 57 58 59 60
    auto &in_tensor = in_var->Get<framework::LoDTensor>();
    framework::LoDTensor *out_tensor =
        out_var->GetMutable<framework::LoDTensor>();

    PrintValue(place, Inputs("In").front(), in_tensor);
    framework::TensorCopy(in_tensor, place, out_tensor);
    out_tensor->set_lod(in_tensor.lod());
  }
Y
yangyaming 已提交
61

62 63 64
  void PrintValue(const platform::Place &place,
                  const std::string &printed_var_name,
                  const framework::LoDTensor &in_tensor) const {
Y
yangyaming 已提交
65
    std::string print_phase = Attr<std::string>("print_phase");
Y
Yu Yang 已提交
66 67 68 69
    bool is_forward = Attr<bool>("is_forward");

    if ((is_forward && print_phase == kBackward) ||
        (!is_forward && print_phase == kForward)) {
Y
yangyaming 已提交
70 71 72
      return;
    }

Y
Yan Chunwei 已提交
73 74 75
    int first_n = Attr<int>("first_n");
    if (first_n > 0 && ++times_ > first_n) return;

Y
yangyaming 已提交
76 77 78
    framework::LoDTensor printed_tensor;
    printed_tensor.set_lod(in_tensor.lod());
    printed_tensor.Resize(in_tensor.dims());
Y
Yan Chunwei 已提交
79

80
    if (is_cpu_place(in_tensor.place())) {
Y
yangyaming 已提交
81 82 83 84
      printed_tensor.ShareDataWith(in_tensor);
    } else {
      // copy data to cpu to print
      platform::CPUPlace place;
85
      TensorCopy(in_tensor, place, &printed_tensor);
Y
yangyaming 已提交
86
    }
Y
Yan Chunwei 已提交
87

H
Huihuang Zheng 已提交
88 89 90 91 92 93 94 95 96
    TensorFormatter formatter;
    const std::string &name =
        Attr<bool>("print_tensor_name") ? printed_var_name : "";
    formatter.SetPrintTensorType(Attr<bool>("print_tensor_type"));
    formatter.SetPrintTensorShape(Attr<bool>("print_tensor_shape"));
    formatter.SetPrintTensorLod(Attr<bool>("print_tensor_lod"));
    formatter.SetPrintTensorLayout(Attr<bool>("print_tensor_layout"));
    formatter.SetSummarize(static_cast<int64_t>(Attr<int>("summarize")));
    formatter.Print(printed_tensor, name, Attr<std::string>("message"));
Y
Yan Chunwei 已提交
97 98 99 100 101 102 103 104
  }

 private:
  mutable int times_{0};
};

class PrintOpProtoAndCheckMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
105
  void Make() override {
Y
yangyaming 已提交
106
    AddInput("In", "Input tensor to be displayed.");
107
    AddOutput("Out", "The output tensor.");
Y
Yan Chunwei 已提交
108 109
    AddAttr<int>("first_n", "Only log `first_n` number of times.");
    AddAttr<std::string>("message", "A string message to print as a prefix.");
Y
yangyaming 已提交
110
    AddAttr<int>("summarize", "Number of elements printed.");
111 112 113 114 115 116 117 118 119 120 121
    AddAttr<bool>("print_tensor_name", "Whether to print the tensor name.")
        .SetDefault(true);
    AddAttr<bool>("print_tensor_type", "Whether to print the tensor's dtype.")
        .SetDefault(true);
    AddAttr<bool>("print_tensor_shape", "Whether to print the tensor's shape.")
        .SetDefault(true);
    AddAttr<bool>("print_tensor_layout",
                  "Whether to print the tensor's layout.")
        .SetDefault(true);
    AddAttr<bool>("print_tensor_lod", "Whether to print the tensor's lod.")
        .SetDefault(true);
Y
Yu Yang 已提交
122 123 124 125
    AddAttr<std::string>("print_phase",
                         "(string, default 'FORWARD') Which phase to display "
                         "including 'FORWARD' "
                         "'BACKWARD' and 'BOTH'.")
126 127 128
        .SetDefault(std::string(kBoth))
        .InEnum({std::string(kForward), std::string(kBackward),
                 std::string(kBoth)});
Y
Yu Yang 已提交
129
    AddAttr<bool>("is_forward", "Whether is forward or not").SetDefault(true);
Y
Yan Chunwei 已提交
130
    AddComment(R"DOC(
Y
yangyaming 已提交
131
Creates a print op that will print when a tensor is accessed.
Y
Yan Chunwei 已提交
132

Y
yangyaming 已提交
133 134 135
Wraps the tensor passed in so that whenever that a tensor is accessed,
the message `message` is printed, along with the current value of the
tensor `t`.)DOC");
Y
Yan Chunwei 已提交
136 137 138
  }
};

139 140 141 142
class PrintOpInferShape : public framework::InferShapeBase {
 public:
  void operator()(framework::InferShapeContext *ctx) const override {
    VLOG(10) << "PrintOpInferShape";
143 144
    OP_INOUT_CHECK(ctx->HasInput("In"), "Input", "In", "Print");
    OP_INOUT_CHECK(ctx->HasOutput("Out"), "Output", "Out", "Print");
145 146 147 148 149 150
    ctx->ShareDim("In", /*->*/ "Out");
    ctx->ShareLoD("In", /*->*/ "Out");
  }
};

class PrintOpVarTypeInference : public framework::VarTypeInference {
Y
Yan Chunwei 已提交
151
 public:
152
  void operator()(framework::InferVarTypeContext *ctx) const override {
153
    ctx->SetOutputType("Out", ctx->GetInputType("In"));
Y
Yan Chunwei 已提交
154 155 156
  }
};

H
hong 已提交
157 158
template <typename T>
class PrintOpGradientMaker : public framework::SingleGradOpMaker<T> {
Y
yangyaming 已提交
159
 public:
H
hong 已提交
160
  using framework::SingleGradOpMaker<T>::SingleGradOpMaker;
Y
yangyaming 已提交
161

162
  void Apply(GradOpPtr<T> op_desc_ptr) const override {
Y
Yu Yang 已提交
163
    op_desc_ptr->SetType("print");
H
hong 已提交
164 165 166
    op_desc_ptr->SetInput("In", this->OutputGrad("Out"));
    op_desc_ptr->SetOutput("Out", this->InputGrad("In"));
    op_desc_ptr->SetAttrMap(this->Attrs());
Y
Yu Yang 已提交
167
    op_desc_ptr->SetAttr("is_forward", false);
Y
yangyaming 已提交
168 169 170
  }
};

Y
Yan Chunwei 已提交
171 172 173
}  // namespace operators
}  // namespace paddle

Y
yangyaming 已提交
174 175
namespace ops = paddle::operators;

176
REGISTER_OPERATOR(print, ops::PrintOp, ops::PrintOpProtoAndCheckMaker,
H
hong 已提交
177 178 179
                  ops::PrintOpGradientMaker<paddle::framework::OpDesc>,
                  ops::PrintOpGradientMaker<paddle::imperative::OpBase>,
                  ops::PrintOpInferShape, ops::PrintOpVarTypeInference);