ngraph_bridge.cc 4.1 KB
Newer Older
B
baojun-nervana 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#include <algorithm>
#include <functional>
B
baojun-nervana 已提交
17
#include <vector>
B
baojun-nervana 已提交
18 19

#include "paddle/fluid/framework/ngraph_bridge.h"
B
baojun-nervana 已提交
20 21
#include "paddle/fluid/framework/operator.h"
#include "paddle/fluid/platform/enforce.h"
B
baojun-nervana 已提交
22 23 24 25 26 27

#include "ngraph/ngraph.hpp"

namespace paddle {
namespace framework {

B
baojun-nervana 已提交
28
static std::shared_ptr<ngraph::Node> GetNode(
B
baojun-nervana 已提交
29
    const std::shared_ptr<OperatorBase>& op, const std::string name,
B
baojun-nervana 已提交
30 31 32 33
    const VariableNameMap& var_map,
    std::shared_ptr<
        std::unordered_map<std::string, std::shared_ptr<ngraph::Node>>>
        ngb_node_map) {
B
baojun-nervana 已提交
34
  auto& var_names = var_map.at(name);
B
baojun-nervana 已提交
35
  PADDLE_ENFORCE_EQ(var_names.size(), 1,
B
baojun-nervana 已提交
36 37
                    "op %s name %s expects one associated var", op->Type(),
                    name);
B
baojun-nervana 已提交
38 39 40 41 42 43 44 45
  if (ngb_node_map->find(var_names[0]) != ngb_node_map->end()) {
    return (*ngb_node_map)[var_names[0]];
  } else {
    return nullptr;
  }
}

static std::shared_ptr<ngraph::Node> GetInputNode(
B
baojun-nervana 已提交
46
    const std::shared_ptr<OperatorBase>& op, const std::string name,
B
baojun-nervana 已提交
47 48 49
    std::shared_ptr<
        std::unordered_map<std::string, std::shared_ptr<ngraph::Node>>>
        ngb_node_map) {
B
baojun-nervana 已提交
50
  return GetNode(op, name, op->Inputs(), ngb_node_map);
B
baojun-nervana 已提交
51 52 53
}

static std::shared_ptr<ngraph::Node> GetOutputNode(
B
baojun-nervana 已提交
54
    const std::shared_ptr<OperatorBase>& op, const std::string name,
B
baojun-nervana 已提交
55 56 57
    std::shared_ptr<
        std::unordered_map<std::string, std::shared_ptr<ngraph::Node>>>
        ngb_node_map) {
B
baojun-nervana 已提交
58
  return GetNode(op, name, op->Outputs(), ngb_node_map);
B
baojun-nervana 已提交
59 60 61
}

static void SetOutputNode(
B
baojun-nervana 已提交
62
    const std::shared_ptr<OperatorBase>& op, const std::string name,
B
baojun-nervana 已提交
63 64 65 66
    std::shared_ptr<ngraph::Node> node,
    std::shared_ptr<
        std::unordered_map<std::string, std::shared_ptr<ngraph::Node>>>
        ngb_node_map) {
B
baojun-nervana 已提交
67
  auto& var_names = op->Outputs().at(name);
B
baojun-nervana 已提交
68 69 70 71 72
  if (var_names.size() == 1) {
    (*ngb_node_map)[var_names[0]] = node;
  } else if (var_names.size() == 0) {
    (*ngb_node_map)[""] = node;
  } else {
B
baojun-nervana 已提交
73
    PADDLE_THROW("name %s has more than 1 var_names.", name);
B
baojun-nervana 已提交
74 75 76 77
  }
}

static bool HasOutput(const std::shared_ptr<OperatorBase>& op,
B
baojun-nervana 已提交
78
                      const std::string name) {
B
baojun-nervana 已提交
79
  auto& outputs = op->Outputs();
B
baojun-nervana 已提交
80 81
  if (outputs.find(name) == outputs.end()) return false;
  return outputs.at(name).size() > 0;
B
baojun-nervana 已提交
82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106
}

template <typename T>
static void BuildBinaryNode(
    const std::shared_ptr<OperatorBase>& op,
    std::shared_ptr<
        std::unordered_map<std::string, std::shared_ptr<ngraph::Node>>>
        ngb_node_map) {
  auto x = GetInputNode(op, "X", ngb_node_map);
  auto y = GetInputNode(op, "Y", ngb_node_map);
  auto out = std::make_shared<T>(x, y);
  SetOutputNode(op, "Out", out, ngb_node_map);
}

template <typename T>
static void BuildUnaryNode(
    const std::shared_ptr<OperatorBase>& op,
    std::shared_ptr<
        std::unordered_map<std::string, std::shared_ptr<ngraph::Node>>>
        ngb_node_map) {
  auto input = GetInputNode(op, "X", ngb_node_map);
  auto out = std::make_shared<T>(input);
  SetOutputNode(op, "Out", out, ngb_node_map);
}

B
baojun-nervana 已提交
107 108 109 110
std::map<std::string,
         std::function<void(const std::shared_ptr<OperatorBase>&,
                            std::shared_ptr<std::unordered_map<
                                std::string, std::shared_ptr<ngraph::Node>>>)>>
B
baojun-nervana 已提交
111 112
    NgraphBridge::NG_NODE_MAP = {{"relu", BuildUnaryNode<ngraph::op::Relu>},
                                 {"tanh", BuildUnaryNode<ngraph::op::Tanh>}};
B
baojun-nervana 已提交
113

B
baojun-nervana 已提交
114
void NgraphBridge::BuildNgNode(const std::shared_ptr<OperatorBase>& op) {
B
baojun-nervana 已提交
115
  auto& op_type = op->Type();
B
baojun-nervana 已提交
116
  NG_NODE_MAP[op_type](op, ngb_node_map_);
B
baojun-nervana 已提交
117 118 119 120
}

}  // namespace framework
}  // namespace paddle