ipu_compiler.cc 31.9 KB
Newer Older
J
jianghaicheng 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
// Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

A
Allen Guo 已提交
15
#include "paddle/fluid/platform/device/ipu/ipu_compiler.h"
J
jianghaicheng 已提交
16

A
Allen Guo 已提交
17 18 19 20
#include <popart/adam.hpp>
#include <popart/adaptive.hpp>
#include <popart/optimizer.hpp>
#include <popart/sgd.hpp>
A
Allen Guo 已提交
21

J
jianghaicheng 已提交
22
#include "paddle/fluid/framework/ir/graph_helper.h"
A
Allen Guo 已提交
23
#include "paddle/fluid/platform/device/ipu/ipu_utils.h"
J
jianghaicheng 已提交
24 25 26 27 28

namespace paddle {
namespace platform {
namespace ipu {

A
Allen Guo 已提交
29 30
popart::AdamMode AdamModeFromStr(const std::string& str,
                                 const bool& use_no_bias_optimizer) {
A
Allen Guo 已提交
31
  if (str == "adam") {
A
Allen Guo 已提交
32 33 34 35
    if (!use_no_bias_optimizer)
      return popart::AdamMode::Adam;
    else
      return popart::AdamMode::AdamNoBias;
A
Allen Guo 已提交
36 37 38
  } else if (str == "adamax") {
    return popart::AdamMode::AdaMax;
  } else if (str == "lamb") {
A
Allen Guo 已提交
39 40 41 42
    if (!use_no_bias_optimizer)
      return popart::AdamMode::Lamb;
    else
      return popart::AdamMode::LambNoBias;
A
Allen Guo 已提交
43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80
  } else {
    PADDLE_THROW(platform::errors::InvalidArgument(
        "Uknown AdamMode: %s, AdamMode must be one of these values: adam, "
        "adamax or lamb",
        str));
  }
}

popart::AdaptiveMode AdaptiveModeFromStr(const std::string& str) {
  if (str == "adadelta") {
    return popart::AdaptiveMode::AdaDelta;
  } else if (str == "adagrad") {
    return popart::AdaptiveMode::AdaGrad;
  } else if (str == "rmsprop") {
    return popart::AdaptiveMode::RMSProp;
  } else if (str == "centered_rmsprop") {
    return popart::AdaptiveMode::CenteredRMSProp;
  } else {
    PADDLE_THROW(platform::errors::InvalidArgument(
        "Uknown AdaptiveMode: %s, AdaptiveMode must be one of these values: "
        "adadelta, adagrad, rmsprop or centered_rmsprop",
        str));
  }
}

popart::WeightDecayMode WeightDecayModeFromStr(const std::string& str) {
  if (str == "decay") {
    return popart::WeightDecayMode::Decay;
  } else if (str == "l2_regularization") {
    return popart::WeightDecayMode::L2Regularization;
  } else {
    PADDLE_THROW(platform::errors::InvalidArgument(
        "Uknown WeightDecayMode: %s, WeightDecayMode must be decay or "
        "l2_regularization",
        str));
  }
}

A
Allen Guo 已提交
81 82 83 84 85 86 87 88 89 90 91
popart::DataType DataTypeFromStr(const std::string& str) {
  if (str == "FLOAT") {
    return popart::DataType::FLOAT;
  } else if (str == "FLOAT16") {
    return popart::DataType::FLOAT16;
  } else {
    PADDLE_THROW(
        platform::errors::Unimplemented("Unsupported DataType: %s", str));
  }
}

J
jianghaicheng 已提交
92
template <typename T>
A
Allen Guo 已提交
93
T GetAttrAllowNull(std::string attr, OpDesc* op_desc) {
J
jianghaicheng 已提交
94 95 96 97 98 99 100 101
  if (op_desc->HasAttr(attr)) {
    return BOOST_GET_CONST(T, op_desc->GetAttr(attr));
  } else {
    return {};
  }
}

template <typename T>
A
Allen Guo 已提交
102
nonstd::optional<T> GetOptAttrAllowNull(std::string attr, OpDesc* op_desc) {
J
jianghaicheng 已提交
103 104 105 106 107 108 109
  if (op_desc->HasAttr(attr)) {
    return BOOST_GET_CONST(T, op_desc->GetAttr(attr));
  } else {
    return {};
  }
}

A
Allen Guo 已提交
110 111 112 113 114 115 116 117 118 119
template <typename TI, typename TO>
TO GetCastSigAttrAllowNull(std::string attr, OpDesc* op_desc) {
  if (op_desc->HasAttr(attr)) {
    auto x = BOOST_GET_CONST(TI, op_desc->GetAttr(attr));
    return static_cast<TO>(x);
  } else {
    return {};
  }
}

A
Allen Guo 已提交
120 121 122 123 124 125 126 127 128 129 130 131 132
GraphHelper::GraphHelper(const Graph* g) {
  graph = g;
  sorted_ops = framework::ir::TopologySortOperations(*g);
  for (auto* node : g->Nodes()) {
    nodes_id_map[node->id()] = node;
    if (node->IsVar()) {
      vars_name_map[node->Name()] = node;
      sorted_vars_id.push_back(node->id());
    }
  }
  std::sort(sorted_vars_id.begin(), sorted_vars_id.end());
}

A
Allen Guo 已提交
133 134 135 136 137
Compiler::Compiler() { RegisterOpFunc(); }

Compiler::~Compiler() {
  builder_.reset();
  resources_.reset();
J
jianghaicheng 已提交
138 139
}

A
Allen Guo 已提交
140
void Compiler::Prepare(const Graph* graph) {
A
Allen Guo 已提交
141 142
  builder_ = popart::Builder::create();
  resources_ = std::make_unique<CompilerResources>();
A
Allen Guo 已提交
143
  graph_helper_ = std::make_unique<GraphHelper>(graph);
A
Allen Guo 已提交
144 145 146 147 148 149 150 151 152 153 154
  // Set the flag of set_amp_for_all_
  for (auto* node : graph_helper_->sorted_ops) {
    auto* op_desc = node->Op();
    auto op_type = op_desc->Type();
    if (op_type == "popart_matmul") {
      if (op_desc->HasAttr(sAvailMemAttribute)) {
        set_amp_for_all_ = false;
        return;
      }
    }
  }
A
Allen Guo 已提交
155
}
J
jianghaicheng 已提交
156 157 158 159

void Compiler::RegisterOpFunc() {
  VLOG(10) << "enter Compiler::RegisterOpFunc";
#define INT_VEC std::vector<std::int64_t>
A
Allen Guo 已提交
160
#define INT32_VEC std::vector<std::int32_t>
J
jianghaicheng 已提交
161 162 163
#define FLOAT_VEC std::vector<float>
#define FLOAT float
#define INT std::int64_t
A
Allen Guo 已提交
164
#define INT32 std::int32_t
J
jianghaicheng 已提交
165 166 167 168 169 170 171
#define BOOL bool
#define STRING std::string
#define STRING_VEC std::vector<std::string*>
#define NONE

#define ARG(Type, Name) , GetAttrAllowNull<Type>(#Name, op_desc)
#define OPT_ARG(Type, Name) , GetOptAttrAllowNull<Type>(#Name, op_desc)
A
Allen Guo 已提交
172
#define SIG_ARG(TI, TO, Name) , GetCastSigAttrAllowNull<TI, TO>(#Name, op_desc)
J
jianghaicheng 已提交
173 174 175 176 177 178 179
#define POPART_CONST_ARG(Name) , const PopartConstant& Name
#define HOST_SIDE_CONST_ARG(Name) , const HostSideConstant& Name
#define POPART_ATTRIB_VEC_ARG(Name)
#define BODY_ARG(Name) NONE

  name_function_ = {
#define OP_DECL(FuncName, OnnxImpl, Args)                     \
A
Allen Guo 已提交
180
  {#FuncName, [&](OpDesc* op_desc) {                          \
J
jianghaicheng 已提交
181 182 183 184 185 186 187
     auto op_type = op_desc->Type();                          \
     VLOG(10) << "build op:" << op_type << " args " << #Args; \
     auto inputs = GetOpInputs(op_desc);                      \
     auto output_names = GetOpOutputs(op_desc);               \
     auto debug_context = BuildDebugContext(op_desc);         \
     auto aiGraphcoreOpset = builder_->aiGraphcoreOpset1();   \
     auto aiOnnxOpset = builder_->aiOnnxOpset11();            \
A
Allen Guo 已提交
188
     PushNameScope(op_desc);                                  \
J
jianghaicheng 已提交
189
     auto output_ids = OnnxImpl(inputs Args, debug_context);  \
A
Allen Guo 已提交
190
     PopNameScope(op_desc);                                   \
J
jianghaicheng 已提交
191
     SetIpuIndexStage(output_ids, op_desc);                   \
A
Allen Guo 已提交
192 193
     SetAMPAttributes(output_ids, op_desc);                   \
     SetSerializeAttributes(output_ids, op_desc);             \
J
jianghaicheng 已提交
194 195
     InsertTensors(output_names, output_ids);                 \
   }},  // NOLINT
A
Allen Guo 已提交
196 197
#include "paddle/fluid/platform/device/ipu/supported_ops_autogen.h"
#include "paddle/fluid/platform/device/ipu/supported_ops_custom.h"
J
jianghaicheng 已提交
198 199 200 201 202 203 204
  };

#undef OP_DECL
#undef BODY_ARG
#undef POPART_ATTRIB_VEC_ARG
#undef HOST_SIDE_CONST_ARG
#undef POPART_CONST_ARG
A
Allen Guo 已提交
205
#undef SIG_ARG
J
jianghaicheng 已提交
206 207 208 209 210 211
#undef OPT_ARG
#undef ARG
#undef NONE
#undef STRING_VEC
#undef STRING
#undef BOOL
A
Allen Guo 已提交
212
#undef INT32
J
jianghaicheng 已提交
213 214 215
#undef INT
#undef FLOAT
#undef FLOAT_VEC
A
Allen Guo 已提交
216
#undef INT32_VEC
J
jianghaicheng 已提交
217 218 219
#undef INT_VEC
}

A
Allen Guo 已提交
220
void Compiler::InitInputs(const std::vector<std::string>& feed_list) {
J
jianghaicheng 已提交
221
  for (const auto& feed_name : feed_list) {
A
Allen Guo 已提交
222 223 224 225 226 227 228 229 230 231 232
    auto* node = graph_helper_->vars_name_map[feed_name];
    auto* var_desc = node->Var();
    VLOG(10) << "feed_name= " << var_desc->Name();
    auto data_type = VarType2PopartType(var_desc->GetDataType());
    popart::TensorInfo input_info{data_type, var_desc->GetShape()};
    VLOG(10) << "popart input_info = " << input_info;
    popart::TensorId tensor_id =
        builder_->addInputTensor(input_info, feed_name);
    VLOG(10) << "popart input tensor id = " << tensor_id;
    resources_->inputs.push_back(tensor_id);
    resources_->tensors.emplace(var_desc->Name(), tensor_id);
J
jianghaicheng 已提交
233 234 235 236 237
  }
}

void Compiler::InitOutputs(const std::vector<std::string>& fetch_list) {
  for (const auto& fetch_name : fetch_list) {
A
Allen Guo 已提交
238 239 240 241 242 243
    auto tensor = resources_->tensors.find(fetch_name);
    PADDLE_ENFORCE_NE(
        tensor, resources_->tensors.end(),
        platform::errors::NotFound(
            "Output tensor %s is not found, please check the model.",
            fetch_name));
J
jianghaicheng 已提交
244 245 246
    VLOG(10) << "fetch_name= " << fetch_name;
    VLOG(10) << "popart output tensor id = " << tensor->second;
    builder_->addOutputTensor(tensor->second);
A
Allen Guo 已提交
247 248 249 250
    resources_->outputs.push_back(tensor->second);
  }
}

A
Allen Guo 已提交
251
void Compiler::LowerConstants(const Scope* scope) {
A
Allen Guo 已提交
252 253
  auto& kid_scope = scope->NewScope();
  VLOG(10) << "enter Compiler::LowerConstants";
A
Allen Guo 已提交
254
  for (auto* node : graph_helper_->sorted_ops) {
A
Allen Guo 已提交
255 256 257 258 259 260 261 262 263 264 265 266 267 268
    auto* op_desc = node->Op();
    auto op_type = op_desc->Type();
    if (op_type == "popart_constant") {
      auto shape =
          BOOST_GET_CONST(std::vector<int64_t>, op_desc->GetAttr("dims"));
      auto dtype_ = BOOST_GET_CONST(int, op_desc->GetAttr("dtype"));
      auto dtype = PopartType2VarType(OnnxDtype2PopartType(dtype_));
      auto tensor_name = op_desc->Output("__outputs__")[0];
      auto* var = kid_scope.Var(tensor_name);
      VLOG(10) << "lowering constant: " << tensor_name;
      auto* tensor = var->GetMutable<framework::LoDTensor>();
      ConstantOpAttrVisitor visitor(tensor, dtype);
      auto value = op_desc->GetAttr("value");
      boost::apply_visitor(visitor, value);
269
      auto ddim = phi::make_ddim(shape);
A
Allen Guo 已提交
270 271 272
      tensor->Resize(ddim);

      auto const_data = std::unique_ptr<popart::ConstVoidData>();
A
Allen Guo 已提交
273 274
      popart::TensorInfo tensor_info(PdDataType2PopartType(tensor->dtype()),
                                     shape);
A
Allen Guo 已提交
275
      const_data.reset(new popart::ConstVoidData(tensor->data(), tensor_info));
A
Allen Guo 已提交
276
      PushNameScope(op_desc);
A
Allen Guo 已提交
277
      popart::TensorId result = builder_->aiOnnxOpset11().constant(*const_data);
A
Allen Guo 已提交
278
      PopNameScope(op_desc);
A
Allen Guo 已提交
279 280 281
      SetIpuIndexStage(result, op_desc);
      resources_->tensors.emplace(tensor_name, result);
    }
J
jianghaicheng 已提交
282
  }
A
Allen Guo 已提交
283
  VLOG(10) << "leave Compiler::LowerConstants";
J
jianghaicheng 已提交
284 285
}

A
Allen Guo 已提交
286
void Compiler::LowerWeights(const Scope* scope) {
A
Allen Guo 已提交
287
  VLOG(10) << "enter Compiler::LowerWeights";
J
jianghaicheng 已提交
288
  // at this step, the graph doesn't contains optimizer related states
A
Allen Guo 已提交
289 290
  for (auto id : graph_helper_->sorted_vars_id) {
    auto* node = graph_helper_->nodes_id_map[id];
J
jianghaicheng 已提交
291 292 293
    if (node->IsVar() && !node->IsCtrlVar() && node->Var()) {
      if (node->Var()->Persistable() && node->inputs.empty()) {
        auto var_name = node->Var()->Name();
A
Allen Guo 已提交
294
        if (resources_->tensors.count(var_name) != 0) {
A
Allen Guo 已提交
295
          VLOG(10) << "found existed one, skip lowering Weight: " << var_name;
J
jianghaicheng 已提交
296 297
          continue;
        }
A
Allen Guo 已提交
298 299 300 301
        if (var_name.rfind("learning_rate", 0) == 0) {
          VLOG(10) << "skip learning_rate_var: " << var_name;
          continue;
        }
A
Allen Guo 已提交
302
        VLOG(10) << "lowering weight: " << var_name;
J
jianghaicheng 已提交
303

A
Allen Guo 已提交
304
        auto var = scope->FindVar(var_name);
J
jianghaicheng 已提交
305 306
        if (var) {
          auto tensor = var->Get<framework::LoDTensor>();
A
Allen Guo 已提交
307
          auto dtype = PdDataType2PopartType(tensor.dtype());
J
jianghaicheng 已提交
308 309 310 311 312
          auto shape = std::vector<int64_t>();
          for (size_t i = 0; i < tensor.dims().size(); ++i) {
            shape.push_back(tensor.dims().at(i));
          }
          popart::TensorInfo tensor_info(dtype, shape);
313
          popart::ConstVoidData const_data{tensor.data(), tensor_info};
A
Allen Guo 已提交
314 315 316 317 318 319 320 321 322
          if (!node->outputs.empty()) {
            auto op_node = node->outputs[0];
            PushNameScope(op_node->Op());
            popart::TensorId result =
                builder_->addInitializedInputTensor(const_data, var_name);
            PopNameScope(op_node->Op());
            resources_->tensors.emplace(var_name, result);
            resources_->weights.push_back(var_name);
          }
J
jianghaicheng 已提交
323 324 325 326
        }
      }
    }
  }
A
Allen Guo 已提交
327 328 329
  VLOG(10) << "leave Compiler::LowerWeights";
}

A
Allen Guo 已提交
330 331 332 333 334 335 336 337 338 339 340 341 342 343
void Compiler::LowerBody() {
  VLOG(10) << "enter Compiler::LowerBody";
  for (auto* node : graph_helper_->sorted_ops) {
    auto* op_desc = node->Op();
    auto op_type = op_desc->Type();
    VLOG(10) << "lowering op: " << op_type;

    if (op_type == "popart_constant") {
      // pass
    } else if (op_type == "popart_optimizer") {
      // pass
    } else if (op_type == "popart_checkpointoutput") {
      auto inputs = GetOpInputs(op_desc);
      auto outputs = GetOpOutputs(op_desc);
A
Allen Guo 已提交
344
      PushNameScope(op_desc);
A
Allen Guo 已提交
345
      auto output_ids = builder_->checkpointOutput(inputs);
A
Allen Guo 已提交
346 347
      PopNameScope(op_desc);
      SetIpuIndexStage(output_ids, op_desc);
A
Allen Guo 已提交
348 349 350 351 352 353 354 355 356 357 358 359 360 361
      InsertTensors(outputs, output_ids);
    } else if (op_type == "popart_custom_op") {
      auto inputs = GetOpInputs(op_desc);
      auto outputs = GetOpOutputs(op_desc);
      auto debug_context = BuildDebugContext(op_desc);
      auto attributes = std::map<std::string, popart::any>{};
      for (auto& attr : op_desc->GetAttrMap()) {
        CustomOpAttrVisitor visitor(&attributes, attr.first);
        boost::apply_visitor(visitor, attr.second);
      }
      auto __op_type =
          BOOST_GET_CONST(std::string, op_desc->GetAttr("__op_type"));
      VLOG(10) << "Build graph from custom op: " << __op_type;
      auto it = custom_ops_.find(__op_type);
A
Allen Guo 已提交
362
      PushNameScope(op_desc);
A
Allen Guo 已提交
363 364 365
      auto output_ids =
          builder_->customOp(it->second.popart_op, it->second.popart_op.version,
                             inputs, outputs.size(), attributes, debug_context);
A
Allen Guo 已提交
366
      PopNameScope(op_desc);
A
Allen Guo 已提交
367 368 369 370 371 372 373 374 375
      SetIpuIndexStage(output_ids, op_desc);
      InsertTensors(outputs, output_ids);
    } else if (op_type == "popart_printtensor") {
      auto inputs = GetOpInputs(op_desc);
      auto outputs = GetOpOutputs(op_desc);
      auto debug_context = BuildDebugContext(op_desc);
      auto print_gradient =
          BOOST_GET_CONST(int64_t, op_desc->GetAttr("print_gradient"));
      auto title = BOOST_GET_CONST(std::string, op_desc->GetAttr("title"));
A
Allen Guo 已提交
376
      PushNameScope(op_desc);
A
Allen Guo 已提交
377 378
      auto output_ids = builder_->aiGraphcoreOpset1().printtensor(
          inputs, print_gradient, debug_context, title);
A
Allen Guo 已提交
379
      PopNameScope(op_desc);
A
Allen Guo 已提交
380 381 382 383 384 385 386 387 388 389 390 391
      SetIpuIndexStage(output_ids, op_desc);
      InsertTensors(outputs, output_ids);
    } else {
      auto itr = name_function_.find(op_type);
      if (itr != name_function_.end()) {
        itr->second(node->Op());
      } else {
        PADDLE_THROW(platform::errors::NotFound(
            "%s is not registered, please check for unsupported operators for "
            "running on IPU",
            op_type));
      }
A
Allen Guo 已提交
392
    }
A
Allen Guo 已提交
393 394 395
  }
  VLOG(10) << "leave Compiler::LowerBody";
}
A
Allen Guo 已提交
396

A
Allen Guo 已提交
397 398
void Compiler::LowerOptimizer(const Scope* scope) {
  for (auto* node : graph_helper_->sorted_ops) {
A
Allen Guo 已提交
399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419
    auto* op_desc = node->Op();
    auto op_type = op_desc->Type();
    if (op_type == "popart_optimizer") {
      auto raw_type =
          BOOST_GET_CONST(std::string, op_desc->GetAttr("raw_type"));
      resources_->optimizer_type = raw_type;
      auto loss_var =
          BOOST_GET_CONST(std::string, op_desc->GetAttr("loss_var"));
      resources_->loss_var = resources_->tensors[loss_var];
      resources_->with_lr_sched =
          BOOST_GET_CONST(bool, op_desc->GetAttr("with_lr_sched"));
      if (op_desc->HasAttr("lr_var")) {
        auto lr_var = BOOST_GET_CONST(std::string, op_desc->GetAttr("lr_var"));
        resources_->lr_var = lr_var;
        resources_->lr = GetSingleVarFromScope<float>(scope, lr_var);
      } else {
        // adadelta has no lr
        resources_->lr = 0.01f;
        resources_->with_lr_sched = false;
      }
      VLOG(10) << "Set initial lr: " << resources_->lr;
A
Allen Guo 已提交
420 421

      // Get the type of optimizer
A
Allen Guo 已提交
422
      auto type = BOOST_GET_CONST(std::string, op_desc->GetAttr("type"));
A
Allen Guo 已提交
423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444
      // Set weight decay by tensor names for Lamb
      auto weight_decay_vars = BOOST_GET_CONST(
          std::vector<std::string>, op_desc->GetAttr("weight_decay_vars"));
      auto weight_decay_values = BOOST_GET_CONST(
          std::vector<float>, op_desc->GetAttr("weight_decay_values"));
      // Get the maximum permissible value for gradient clipping
      std::vector<popart::ClipNormSettings> clip_norm_settings = {};
      if (op_desc->HasAttr("clip_norm")) {
        auto clip_norm = BOOST_GET_CONST(float, op_desc->GetAttr("clip_norm"));
        clip_norm_settings.push_back(
            popart::ClipNormSettings::clipAllWeights(clip_norm));
        VLOG(10) << "Set the global gradient clipping with the maximum "
                    "permissible value: "
                 << clip_norm;
      }

      // Values from ipu_strategy
      auto loss_scaling = ipu_strategy_->loss_scaling;
      auto accl1_type = DataTypeFromStr(ipu_strategy_->accl1_type);
      auto accl2_type = DataTypeFromStr(ipu_strategy_->accl2_type);
      auto accl3_type = DataTypeFromStr(ipu_strategy_->accl3_type);

A
Allen Guo 已提交
445 446 447 448 449 450 451
      if (type == "sgd") {
        auto weight_decay =
            BOOST_GET_CONST(float, op_desc->GetAttr("weight_decay"));
        auto momentum = BOOST_GET_CONST(float, op_desc->GetAttr("momentum"));
        resources_->optimizer_fn = [=](float lr) {
          return std::make_unique<popart::SGD>(
              popart::OptimizerValue(lr, false),
A
Allen Guo 已提交
452
              popart::OptimizerValue(weight_decay, false),
A
Allen Guo 已提交
453 454 455
              popart::OptimizerValue(momentum, true),
              popart::SGD::getUnsetDampening(),
              popart::SGD::getUnsetVelocityScaling(),
A
Allen Guo 已提交
456
              popart::OptimizerValue(loss_scaling, true), clip_norm_settings);
A
Allen Guo 已提交
457
        };
A
Allen Guo 已提交
458 459 460 461 462 463
        resources_->eval_optimizer = std::make_unique<popart::SGD>(
            popart::OptimizerValue(0.0, false),
            popart::OptimizerValue(0.0, false),
            popart::OptimizerValue(0.0, true), popart::SGD::getUnsetDampening(),
            popart::SGD::getUnsetVelocityScaling(),
            popart::OptimizerValue(loss_scaling, true), clip_norm_settings);
A
Allen Guo 已提交
464 465 466 467 468 469 470 471 472 473
      } else if (type == "adam") {
        auto weight_decay =
            BOOST_GET_CONST(float, op_desc->GetAttr("weight_decay"));
        auto beta1 = BOOST_GET_CONST(float, op_desc->GetAttr("beta1"));
        auto beta2 = BOOST_GET_CONST(float, op_desc->GetAttr("beta2"));
        auto eps = BOOST_GET_CONST(float, op_desc->GetAttr("eps"));
        auto mwn = ipu_strategy_->max_weight_norm;
        VLOG(10) << "set max_weight_norm: " << mwn;
        auto adam_mode_ =
            BOOST_GET_CONST(std::string, op_desc->GetAttr("adam_mode"));
A
Allen Guo 已提交
474 475 476
        auto adam_mode =
            AdamModeFromStr(adam_mode_, ipu_strategy_->use_no_bias_optimizer);
        auto weight_decay_mode_ = ipu_strategy_->weight_decay_mode;
A
Allen Guo 已提交
477
        auto scaled_optimizer_state_ = ipu_strategy_->scaled_optimizer_state;
A
Allen Guo 已提交
478 479 480 481
        if (weight_decay_mode_.empty()) {
          weight_decay_mode_ = BOOST_GET_CONST(
              std::string, op_desc->GetAttr("weight_decay_mode"));
        }
A
Allen Guo 已提交
482 483
        auto weight_decay_mode = WeightDecayModeFromStr(weight_decay_mode_);
        resources_->optimizer_fn = [=](float lr) {
A
Allen Guo 已提交
484 485 486 487 488 489 490 491 492 493 494 495
          if (adam_mode == popart::AdamMode::Lamb ||
              adam_mode == popart::AdamMode::LambNoBias) {
            const std::map<std::string, std::pair<float, bool>>
                optimizer_value = {{"defaultLearningRate", {lr, false}},
                                   {"defaultBeta1", {beta1, false}},
                                   {"defaultBeta2", {beta2, false}},
                                   {"defaultEps", {eps, true}},
                                   {"lossScaling", {loss_scaling, true}},
                                   {"defaultMaxWeightNorm", {mwn, true}}};
            auto optimizer_instance = std::make_unique<popart::Adam>(
                optimizer_value, adam_mode, weight_decay_mode,
                popart::DataType::UNDEFINED, accl1_type, accl2_type,
A
Allen Guo 已提交
496
                clip_norm_settings, scaled_optimizer_state_);
A
Allen Guo 已提交
497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514
            for (int i = 0; i < weight_decay_vars.size(); i++) {
              optimizer_instance->insertSpecific(
                  weight_decay_vars[i],
                  {{"weightDecay", {weight_decay_values[i], false}}});
              VLOG(10) << "Set Tensor " << weight_decay_vars[i]
                       << " weight decay as " << weight_decay_values[i];
            }
            return optimizer_instance;
          } else {
            return std::make_unique<popart::Adam>(
                popart::OptimizerValue(lr, false),
                popart::OptimizerValue(weight_decay, false),
                popart::OptimizerValue(beta1, false),
                popart::OptimizerValue(beta2, false),
                popart::OptimizerValue(eps, true),
                popart::OptimizerValue(loss_scaling, true),
                popart::OptimizerValue(mwn, true), adam_mode, weight_decay_mode,
                popart::DataType::UNDEFINED, accl1_type, accl2_type,
A
Allen Guo 已提交
515
                clip_norm_settings, scaled_optimizer_state_);
A
Allen Guo 已提交
516 517
          }
        };
A
Allen Guo 已提交
518
        if (adam_mode == popart::AdamMode::Lamb) {
A
Allen Guo 已提交
519 520 521 522 523 524 525 526 527 528
          const std::map<std::string, std::pair<float, bool>> optimizer_value =
              {{"defaultLearningRate", {0.0, false}},
               {"defaultBeta1", {beta1, false}},
               {"defaultBeta2", {beta2, false}},
               {"defaultEps", {eps, true}},
               {"lossScaling", {loss_scaling, true}},
               {"defaultMaxWeightNorm", {mwn, true}}};
          auto eval_optimizer = std::make_unique<popart::Adam>(
              optimizer_value, adam_mode, weight_decay_mode,
              popart::DataType::UNDEFINED, popart::DataType::FLOAT,
A
Allen Guo 已提交
529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548
              popart::DataType::FLOAT, clip_norm_settings,
              scaled_optimizer_state_);
          for (int i = 0; i < weight_decay_vars.size(); i++) {
            eval_optimizer->insertSpecific(weight_decay_vars[i],
                                           {{"weightDecay", {0.0, false}}});
          }
          resources_->eval_optimizer = std::move(eval_optimizer);
        } else if (adam_mode == popart::AdamMode::LambNoBias) {
          const std::map<std::string, std::pair<float, bool>> optimizer_value =
              {{"defaultLearningRate", {0.0, false}},
               {"defaultBeta1", {1.0, false}},
               {"defaultBeta2", {1.0, false}},
               {"defaultEps", {eps, true}},
               {"lossScaling", {loss_scaling, true}},
               {"defaultMaxWeightNorm", {mwn, true}}};
          auto eval_optimizer = std::make_unique<popart::Adam>(
              optimizer_value, adam_mode, weight_decay_mode,
              popart::DataType::UNDEFINED, popart::DataType::FLOAT,
              popart::DataType::FLOAT, clip_norm_settings,
              scaled_optimizer_state_);
A
Allen Guo 已提交
549 550 551 552 553 554 555 556 557 558 559
          for (int i = 0; i < weight_decay_vars.size(); i++) {
            eval_optimizer->insertSpecific(weight_decay_vars[i],
                                           {{"weightDecay", {0.0, false}}});
          }
          resources_->eval_optimizer = std::move(eval_optimizer);
        } else {
          resources_->eval_optimizer = std::make_unique<popart::Adam>(
              popart::OptimizerValue(0.0, false),
              popart::OptimizerValue(0.0, false),
              popart::OptimizerValue(beta1, false),
              popart::OptimizerValue(beta2, false),
A
Allen Guo 已提交
560 561 562 563
              popart::OptimizerValue(eps, true),
              popart::OptimizerValue(loss_scaling, true),
              popart::OptimizerValue(mwn, true), adam_mode, weight_decay_mode,
              popart::DataType::UNDEFINED, popart::DataType::FLOAT,
A
Allen Guo 已提交
564 565
              popart::DataType::FLOAT, clip_norm_settings,
              scaled_optimizer_state_);
A
Allen Guo 已提交
566
        }
A
Allen Guo 已提交
567 568 569 570 571 572 573 574 575
      } else if (type == "adaptive") {
        auto alpha = BOOST_GET_CONST(float, op_desc->GetAttr("alpha"));
        auto momentum = BOOST_GET_CONST(float, op_desc->GetAttr("momentum"));
        auto eps = BOOST_GET_CONST(float, op_desc->GetAttr("eps"));
        auto weight_decay =
            BOOST_GET_CONST(float, op_desc->GetAttr("weight_decay"));
        auto adaptive_mode_ =
            BOOST_GET_CONST(std::string, op_desc->GetAttr("adaptive_mode"));
        auto adaptive_mode = AdaptiveModeFromStr(adaptive_mode_);
A
Allen Guo 已提交
576 577 578 579 580
        auto weight_decay_mode_ = ipu_strategy_->weight_decay_mode;
        if (weight_decay_mode_.empty()) {
          weight_decay_mode_ = BOOST_GET_CONST(
              std::string, op_desc->GetAttr("weight_decay_mode"));
        }
A
Allen Guo 已提交
581 582 583 584
        auto weight_decay_mode = WeightDecayModeFromStr(weight_decay_mode_);
        resources_->optimizer_fn = [=](float lr) {
          return std::make_unique<popart::Adaptive>(
              popart::OptimizerValue(lr, false),
A
Allen Guo 已提交
585
              popart::OptimizerValue(weight_decay, false),
A
Allen Guo 已提交
586 587 588 589
              popart::OptimizerValue(alpha, true),
              popart::OptimizerValue(momentum, true),
              popart::OptimizerValue(eps, true),
              popart::OptimizerValue(loss_scaling, true), adaptive_mode,
A
Allen Guo 已提交
590 591
              weight_decay_mode, popart::DataType::UNDEFINED, accl1_type,
              accl2_type, accl3_type);
A
Allen Guo 已提交
592
        };
A
Allen Guo 已提交
593 594 595 596 597 598 599 600 601 602
        resources_->eval_optimizer = std::make_unique<popart::Adaptive>(
            popart::OptimizerValue(0.0, false),
            popart::OptimizerValue(0.0, false),
            popart::OptimizerValue(alpha, true),
            popart::OptimizerValue(momentum, true),
            popart::OptimizerValue(eps, true),
            popart::OptimizerValue(loss_scaling, true), adaptive_mode,
            weight_decay_mode, popart::DataType::UNDEFINED,
            popart::DataType::FLOAT, popart::DataType::FLOAT,
            popart::DataType::UNDEFINED);
A
Allen Guo 已提交
603 604 605 606 607 608
      } else {
        PADDLE_THROW(platform::errors::Unimplemented(
            "optimizer %s is not implemented", type));
      }
    }
  }
J
jianghaicheng 已提交
609 610 611 612 613 614 615 616
}

void Compiler::InsertTensors(const std::vector<std::string>& output_names,
                             const std::vector<std::string>& tensor_ids) {
  PADDLE_ENFORCE_EQ(output_names.size(), tensor_ids.size(),
                    platform::errors::Fatal("InsertTensors size mismatch"));
  for (int i = 0; i < tensor_ids.size(); i++) {
    std::string tensor_id = tensor_ids[i];
A
Allen Guo 已提交
617
    resources_->tensors.emplace(output_names[i], tensor_ids[i]);
J
jianghaicheng 已提交
618 619 620 621 622 623 624
  }
}

void Compiler::InsertTensors(const std::vector<std::string>& output_names,
                             const std::string& tensor_id) {
  PADDLE_ENFORCE_EQ(output_names.size(), 1,
                    platform::errors::Fatal("InsertTensors size mismatch"));
A
Allen Guo 已提交
625
  resources_->tensors.emplace(output_names[0], tensor_id);
J
jianghaicheng 已提交
626 627 628
}

void Compiler::SetIpuIndexStage(const std::vector<std::string>& tensor_ids,
A
Allen Guo 已提交
629
                                const OpDesc* op_desc) {
J
jianghaicheng 已提交
630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649
  VLOG(10) << "enter Compiler::SetIpuIndexStage";
  auto tensor_ids_set =
      std::set<std::string>(tensor_ids.begin(), tensor_ids.end());

  if (op_desc->HasAttr(sIpuIndexAttr)) {
    auto ipu_index = BOOST_GET_CONST(int, op_desc->GetAttr(sIpuIndexAttr));
    builder_->virtualGraph(tensor_ids_set, ipu_index);
    VLOG(10) << "set " << sIpuIndexAttr << " = " << ipu_index
             << " for op: " << op_desc->Type();
    if (op_desc->HasAttr(sIpuStageAttr)) {
      auto ipu_stage = BOOST_GET_CONST(int, op_desc->GetAttr(sIpuStageAttr));
      builder_->pipelineStage(tensor_ids_set, ipu_stage);
      VLOG(10) << "set " << sIpuStageAttr << "= " << ipu_stage
               << " for op: " << op_desc->Type();
    }
  }
  VLOG(10) << "leave Compiler::SetIpuIndexStage";
}

void Compiler::SetIpuIndexStage(const std::string& tensor_id,
A
Allen Guo 已提交
650
                                const OpDesc* op_desc) {
J
jianghaicheng 已提交
651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667
  VLOG(10) << "enter Compiler::SetIpuIndexStage";

  if (op_desc->HasAttr(sIpuIndexAttr)) {
    auto ipu_index = BOOST_GET_CONST(int, op_desc->GetAttr(sIpuIndexAttr));
    builder_->virtualGraph(tensor_id, ipu_index);
    VLOG(10) << "set " << sIpuIndexAttr << " = " << ipu_index
             << " for op: " << op_desc->Type();
    if (op_desc->HasAttr(sIpuStageAttr)) {
      auto ipu_stage = BOOST_GET_CONST(int, op_desc->GetAttr(sIpuStageAttr));
      builder_->pipelineStage(tensor_id, ipu_stage);
      VLOG(10) << "set " << sIpuStageAttr << "= " << ipu_stage
               << " for op: " << op_desc->Type();
    }
  }
  VLOG(10) << "leave Compiler::SetIpuIndexStage";
}

A
Allen Guo 已提交
668 669 670 671 672 673 674 675 676 677 678 679 680
void Compiler::SetAMPAttributes(const std::vector<std::string>& tensor_ids,
                                const OpDesc* op_desc) {
  if (op_desc->Type() == "popart_matmul") {
    for (const auto& tensor_id : tensor_ids) {
      SetAMPAttributes(tensor_id, op_desc);
    }
  }
}

void Compiler::SetAMPAttributes(const std::string& tensor_id,
                                const OpDesc* op_desc) {
  VLOG(10) << "enter Compiler::SetAMPAttributes";
  if (op_desc->Type() == "popart_matmul") {
A
Allen Guo 已提交
681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706
    if (set_amp_for_all_) {
      auto amp = ipu_strategy_->available_memory_proportion;
      if (amp < 0.0f || amp > 1.0) {
        PADDLE_THROW(platform::errors::InvalidArgument(
            "AvailableMemoryProportion %f is invalid, which should be set 0 <= "
            "amp <= 1",
            amp));
      }
      if (amp > 0.0f) {
        builder_->setAvailableMemoryProportion(tensor_id, amp);
      }
    } else {
      if (op_desc->HasAttr(sAvailMemAttribute)) {
        auto amp = BOOST_GET_CONST(float, op_desc->GetAttr(sAvailMemAttribute));
        if (amp < 0.0f || amp > 1.0) {
          PADDLE_THROW(platform::errors::InvalidArgument(
              "AvailableMemoryProportion %f is invalid, which should be set 0 "
              "<= amp <= 1",
              amp));
        }
        if (amp > 0.0f) {
          builder_->setAvailableMemoryProportion(tensor_id, amp);
          VLOG(10) << "set available_memory_proportion for tensor: "
                   << tensor_id << " as " << amp;
        }
      }
A
Allen Guo 已提交
707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737
    }
  }
  VLOG(10) << "leave Compiler::SetAMPAttributes";
}

void Compiler::SetSerializeAttributes(
    const std::vector<std::string>& tensor_ids, const OpDesc* op_desc) {
  VLOG(10) << "enter Compiler::SetSerializeAttributes";
  auto tensor_ids_set =
      std::set<std::string>(tensor_ids.begin(), tensor_ids.end());

  if (op_desc->Type() == "popart_matmul") {
    if (op_desc->HasAttr(sMatmulSerializeFactor)) {
      auto factor =
          BOOST_GET_CONST(int, op_desc->GetAttr(sMatmulSerializeFactor));
      std::string mode = "output_channels";
      if (op_desc->HasAttr(sMatmulSerializeMode)) {
        mode = BOOST_GET_CONST(std::string,
                               op_desc->GetAttr(sMatmulSerializeMode));
      }
      builder_->setSerializeMatMul(tensor_ids_set, mode, (int64_t)factor, true);
    }
  }
  VLOG(10) << "leave Compiler::SetSerializeAttributes";
}

void Compiler::SetSerializeAttributes(const std::string& tensor_id,
                                      const OpDesc* op_desc) {
  std::vector<std::string> tensor_ids = {tensor_id};
  SetSerializeAttributes(tensor_ids, op_desc);
}
J
jianghaicheng 已提交
738

A
Allen Guo 已提交
739 740 741 742 743 744 745 746
void Compiler::SetCustomOps(
    const std::vector<IpuCustomOpIdentifier>& custom_ops) {
  for (auto x : custom_ops) {
    custom_ops_.emplace(x.paddle_op, x);
  }
}

std::string Compiler::GetFP16ModelProto() {
J
jianghaicheng 已提交
747 748
  popart::GraphTransformer graph_transformer(builder_->getModelProto());
  graph_transformer.convertFloatsToHalfs();
A
Allen Guo 已提交
749
  return graph_transformer.getModelProto();
J
jianghaicheng 已提交
750 751 752
}

std::string Compiler::GetModelProto() {
A
Allen Guo 已提交
753 754 755 756
  if (ipu_strategy_->enable_fp16) {
    return GetFP16ModelProto();
  } else {
    return builder_->getModelProto();
J
jianghaicheng 已提交
757 758 759 760 761 762 763 764 765 766 767 768 769 770
  }
}

void Compiler::SaveModelProto(const std::string& path) {
  builder_->saveModelProto(path);
}

void Compiler::SaveModelProtoNoCheck(const std::string& path) {
  auto proto = GetModelProto();
  std::ofstream onnxfile(path, std::ios_base::binary);
  onnxfile.write(proto.data(), proto.size());
  onnxfile.close();
}

A
Allen Guo 已提交
771
std::vector<std::string> Compiler::GetOpInputs(const OpDesc* op) {
J
jianghaicheng 已提交
772 773 774
  auto ins = op->Input("__inputs__");
  std::vector<std::string> inputs;
  for (const auto& in : ins) {
A
Allen Guo 已提交
775 776
    if (resources_->tensors.find(in) != resources_->tensors.end()) {
      inputs.push_back(resources_->tensors[in]);
J
jianghaicheng 已提交
777 778 779 780 781 782 783
    } else {
      inputs.push_back(in);
    }
  }
  return inputs;
}

A
Allen Guo 已提交
784
const std::vector<std::string>& Compiler::GetOpOutputs(const OpDesc* op) {
J
jianghaicheng 已提交
785 786 787
  return op->Output("__outputs__");
}

A
Allen Guo 已提交
788
popart::DebugContext Compiler::BuildDebugContext(const OpDesc* op) {
J
jianghaicheng 已提交
789 790 791 792 793 794 795
  auto op_identify_id =
      BOOST_GET_CONST(std::string, op->GetAttr(sOpIdentifyIdAttr));
  VLOG(10) << "op_identify_id of op: " << op->Type() << " is "
           << op_identify_id;
  return popart::DebugContext(op_identify_id);
}

A
Allen Guo 已提交
796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818
void Compiler::PushNameScope(const OpDesc* op) {
  auto op_namescope = BOOST_GET_CONST(std::string, op->GetAttr(sOpNamescope));
  if (op_namescope == "/") {
    return;
  }
  if (!op_namescope.empty()) {
    op_namescope.pop_back();
  }
  if (!op_namescope.empty()) {
    op_namescope.erase(op_namescope.begin());
  }
  VLOG(10) << "name_scope is: " << op_namescope;
  builder_->pushNameScope(op_namescope);
}

void Compiler::PopNameScope(const OpDesc* op) {
  auto op_namescope = BOOST_GET_CONST(std::string, op->GetAttr(sOpNamescope));
  if (op_namescope == "/") {
    return;
  }
  builder_->popNameScope();
}

J
jianghaicheng 已提交
819 820 821
}  // namespace ipu
}  // namespace platform
}  // namespace paddle