print_op.cc 7.7 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Y
Yan Chunwei 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14 15

   Licensed under the Apache License, Version 2.0 (the "License");
   you may not use this file except in compliance with the License.
   You may obtain a copy of the License at

   http://www.apache.org/licenses/LICENSE-2.0

   Unless required by applicable law or agreed to in writing, software
   distributed under the License is distributed on an "AS IS" BASIS,
   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
   See the License for the specific language governing permissions and
   limitations under the License. */

#include <algorithm>
D
dzhwinter 已提交
16
#include <iostream>
Y
Yi Wang 已提交
17
#include "paddle/fluid/framework/op_registry.h"
S
sneaxiy 已提交
18
#include "paddle/fluid/framework/var_type.h"
Y
Yan Chunwei 已提交
19 20 21

namespace paddle {
namespace operators {
Y
Yu Yang 已提交
22
using framework::GradVarName;
Y
Yan Chunwei 已提交
23 24 25

#define CLOG std::cout

26 27 28
const char kForward[] = "FORWARD";
const char kBackward[] = "BACKWARD";
const char kBoth[] = "BOTH";
Y
yangyaming 已提交
29

Y
Yan Chunwei 已提交
30 31 32 33
struct Formater {
  std::string message;
  std::string name;
  std::vector<int> dims;
34
  std::type_index dtype{typeid(const char)};
Y
Yan Chunwei 已提交
35 36
  framework::LoD lod;
  int summarize;
Y
Yu Yang 已提交
37
  void *data{nullptr};
Y
Yan Chunwei 已提交
38 39 40 41 42 43 44 45 46 47 48

  void operator()(size_t size) {
    PrintMessage();
    PrintName();
    PrintDims();
    PrintDtype();
    PrintLod();
    PrintData(size);
  }

 private:
49
  void PrintMessage() { CLOG << std::time(nullptr) << "\t" << message << "\t"; }
Y
Yan Chunwei 已提交
50 51 52 53 54 55 56 57 58 59 60 61 62 63 64
  void PrintName() {
    if (!name.empty()) {
      CLOG << "Tensor[" << name << "]" << std::endl;
    }
  }
  void PrintDims() {
    if (!dims.empty()) {
      CLOG << "\tshape: [";
      for (auto i : dims) {
        CLOG << i << ",";
      }
      CLOG << "]" << std::endl;
    }
  }
  void PrintDtype() {
S
sneaxiy 已提交
65
    if (!framework::IsType<const char>(dtype)) {
Y
Yan Chunwei 已提交
66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85
      CLOG << "\tdtype: " << dtype.name() << std::endl;
    }
  }
  void PrintLod() {
    if (!lod.empty()) {
      CLOG << "\tLoD: [";
      for (auto level : lod) {
        CLOG << "[ ";
        for (auto i : level) {
          CLOG << i << ",";
        }
        CLOG << " ]";
      }
      CLOG << "]" << std::endl;
    }
  }

  void PrintData(size_t size) {
    PADDLE_ENFORCE_NOT_NULL(data);
    // print float
S
sneaxiy 已提交
86
    if (framework::IsType<const float>(dtype)) {
Y
Yan Chunwei 已提交
87
      Display<float>(size);
S
sneaxiy 已提交
88
    } else if (framework::IsType<const double>(dtype)) {
Y
Yan Chunwei 已提交
89
      Display<double>(size);
S
sneaxiy 已提交
90
    } else if (framework::IsType<const int>(dtype)) {
Y
Yan Chunwei 已提交
91
      Display<int>(size);
S
sneaxiy 已提交
92
    } else if (framework::IsType<const int64_t>(dtype)) {
Y
Yan Chunwei 已提交
93
      Display<int64_t>(size);
S
sneaxiy 已提交
94
    } else if (framework::IsType<const bool>(dtype)) {
95 96 97
      Display<bool>(size);
    } else {
      CLOG << "\tdata: unprintable type: " << dtype.name() << std::endl;
Y
Yan Chunwei 已提交
98 99 100 101 102
    }
  }

  template <typename T>
  void Display(size_t size) {
Y
Yu Yang 已提交
103
    auto *d = reinterpret_cast<T *>(data);
Y
Yan Chunwei 已提交
104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121
    CLOG << "\tdata: ";
    if (summarize != -1) {
      summarize = std::min(size, (size_t)summarize);
      for (int i = 0; i < summarize; i++) {
        CLOG << d[i] << ",";
      }
    } else {
      for (size_t i = 0; i < size; i++) {
        CLOG << d[i] << ",";
      }
    }
    CLOG << std::endl;
  }
};

// TODO(ChunweiYan) there should be some other printers for TensorArray
class TensorPrintOp : public framework::OperatorBase {
 public:
Y
Yu Yang 已提交
122 123 124 125
  TensorPrintOp(const std::string &type,
                const framework::VariableNameMap &inputs,
                const framework::VariableNameMap &outputs,
                const framework::AttributeMap &attrs)
Y
Yan Chunwei 已提交
126 127
      : OperatorBase(type, inputs, outputs, attrs) {}

Y
Yu Yang 已提交
128
  TensorPrintOp(const TensorPrintOp &o)
Y
Yan Chunwei 已提交
129
      : framework::OperatorBase(
Y
Yu Yang 已提交
130
            static_cast<const framework::OperatorBase &>(o)) {
Y
yangyaming 已提交
131
    PADDLE_THROW("Not implemented.");
Y
Yan Chunwei 已提交
132 133
  }

134
 private:
Y
Yu Yang 已提交
135 136 137
  void RunImpl(const framework::Scope &scope,
               const platform::Place &place) const override {
    const framework::Variable *in_var_ptr = nullptr;
Y
yangyaming 已提交
138 139
    std::string printed_var_name = "";

Y
Yu Yang 已提交
140 141
    in_var_ptr = scope.FindVar(Input("In"));
    printed_var_name = Inputs("In").front();
Y
yangyaming 已提交
142 143 144

    PADDLE_ENFORCE_NOT_NULL(in_var_ptr);

Y
Yu Yang 已提交
145
    auto &in_tensor = in_var_ptr->Get<framework::LoDTensor>();
Y
yangyaming 已提交
146 147

    std::string print_phase = Attr<std::string>("print_phase");
Y
Yu Yang 已提交
148 149 150 151
    bool is_forward = Attr<bool>("is_forward");

    if ((is_forward && print_phase == kBackward) ||
        (!is_forward && print_phase == kForward)) {
Y
yangyaming 已提交
152 153 154
      return;
    }

Y
Yan Chunwei 已提交
155 156 157
    int first_n = Attr<int>("first_n");
    if (first_n > 0 && ++times_ > first_n) return;

Y
yangyaming 已提交
158 159 160
    framework::LoDTensor printed_tensor;
    printed_tensor.set_lod(in_tensor.lod());
    printed_tensor.Resize(in_tensor.dims());
Y
Yan Chunwei 已提交
161

Y
yangyaming 已提交
162 163 164 165 166
    if (platform::is_cpu_place(in_tensor.place())) {
      printed_tensor.ShareDataWith(in_tensor);
    } else {
      // copy data to cpu to print
      platform::CPUPlace place;
Y
Yi Wang 已提交
167
      framework::TensorCopy(in_tensor, place, &printed_tensor);
Y
yangyaming 已提交
168
    }
Y
Yan Chunwei 已提交
169 170

    Formater formater;
171
    formater.message = Attr<std::string>("message");
Y
Yan Chunwei 已提交
172
    if (Attr<bool>("print_tensor_name")) {
Y
yangyaming 已提交
173
      formater.name = printed_var_name;
Y
Yan Chunwei 已提交
174 175
    }
    if (Attr<bool>("print_tensor_type")) {
Y
yangyaming 已提交
176
      formater.dtype = printed_tensor.type();
Y
Yan Chunwei 已提交
177 178
    }
    if (Attr<bool>("print_tensor_shape")) {
Y
Yu Yang 已提交
179
      auto &dims = printed_tensor.dims();
Y
yangyaming 已提交
180 181
      formater.dims.resize(dims.size());
      for (int i = 0; i < dims.size(); ++i) formater.dims[i] = dims[i];
Y
Yan Chunwei 已提交
182 183
    }
    if (Attr<bool>("print_tensor_lod")) {
Y
yangyaming 已提交
184
      formater.lod = printed_tensor.lod();
Y
Yan Chunwei 已提交
185 186
    }
    formater.summarize = Attr<int>("summarize");
Y
Yu Yang 已提交
187
    formater.data = reinterpret_cast<void *>(printed_tensor.data<void>());
Y
yangyaming 已提交
188
    formater(printed_tensor.numel());
Y
Yan Chunwei 已提交
189 190 191 192 193 194 195 196
  }

 private:
  mutable int times_{0};
};

class PrintOpProtoAndCheckMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
197
  void Make() override {
Y
yangyaming 已提交
198
    AddInput("In", "Input tensor to be displayed.");
Y
Yan Chunwei 已提交
199 200
    AddAttr<int>("first_n", "Only log `first_n` number of times.");
    AddAttr<std::string>("message", "A string message to print as a prefix.");
Y
yangyaming 已提交
201
    AddAttr<int>("summarize", "Number of elements printed.");
Y
Yan Chunwei 已提交
202 203 204 205
    AddAttr<bool>("print_tensor_name", "Whether to print the tensor name.");
    AddAttr<bool>("print_tensor_type", "Whether to print the tensor's dtype.");
    AddAttr<bool>("print_tensor_shape", "Whether to print the tensor's shape.");
    AddAttr<bool>("print_tensor_lod", "Whether to print the tensor's lod.");
Y
Yu Yang 已提交
206 207 208 209
    AddAttr<std::string>("print_phase",
                         "(string, default 'FORWARD') Which phase to display "
                         "including 'FORWARD' "
                         "'BACKWARD' and 'BOTH'.")
210 211 212
        .SetDefault(std::string(kBoth))
        .InEnum({std::string(kForward), std::string(kBackward),
                 std::string(kBoth)});
Y
Yu Yang 已提交
213
    AddAttr<bool>("is_forward", "Whether is forward or not").SetDefault(true);
Y
Yan Chunwei 已提交
214
    AddComment(R"DOC(
Y
yangyaming 已提交
215
Creates a print op that will print when a tensor is accessed.
Y
Yan Chunwei 已提交
216

Y
yangyaming 已提交
217 218 219
Wraps the tensor passed in so that whenever that a tensor is accessed,
the message `message` is printed, along with the current value of the
tensor `t`.)DOC");
Y
Yan Chunwei 已提交
220 221 222
  }
};

Y
yangyaming 已提交
223
class InferShapeForward : public framework::InferShapeBase {
Y
Yan Chunwei 已提交
224
 public:
Y
Yu Yang 已提交
225
  void operator()(framework::InferShapeContext *context) const override {
Y
yangyaming 已提交
226
    PADDLE_ENFORCE(context->HasInput("In"), "Input(In) should not be null.");
Y
Yan Chunwei 已提交
227 228 229
  }
};

Y
Yu Yang 已提交
230
class PrintOpGradientMaker : public framework::SingleGradOpDescMaker {
Y
yangyaming 已提交
231 232 233 234
 public:
  using framework::SingleGradOpDescMaker::SingleGradOpDescMaker;

  std::unique_ptr<framework::OpDesc> Apply() const override {
Y
Yu Yang 已提交
235 236 237
    auto *op_desc_ptr = new framework::OpDesc();
    op_desc_ptr->SetType("print");
    op_desc_ptr->SetInput("In", InputGrad("In"));
Y
yangyaming 已提交
238
    op_desc_ptr->SetAttrMap(Attrs());
Y
Yu Yang 已提交
239
    op_desc_ptr->SetAttr("is_forward", false);
Y
yangyaming 已提交
240 241 242 243
    return std::unique_ptr<framework::OpDesc>(op_desc_ptr);
  }
};

Y
Yan Chunwei 已提交
244 245 246
}  // namespace operators
}  // namespace paddle

Y
yangyaming 已提交
247 248 249
namespace ops = paddle::operators;

REGISTER_OPERATOR(print, ops::TensorPrintOp, ops::PrintOpProtoAndCheckMaker,
Y
Yu Yang 已提交
250
                  ops::PrintOpGradientMaker, ops::InferShapeForward);