serializer.cc 5.1 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#include "paddle/fluid/jit/serializer.h"

17 18
#include <set>

19 20
#include "paddle/fluid/framework/var_desc.h"
#include "paddle/fluid/framework/variable.h"
21 22
#include "paddle/fluid/platform/device_context.h"

23
#include "paddle/fluid/jit/engine/executor_engine.h"
24
#include "paddle/fluid/jit/engine/interpreter_engine.h"
25
#include "paddle/fluid/jit/engine/pe_engine.h"
26 27
#include "paddle/fluid/jit/layer.h"
#include "paddle/fluid/jit/property.h"
28 29
#include "paddle/fluid/jit/serializer_utils.h"

30 31
DECLARE_string(jit_engine_type);

32 33
namespace paddle {
namespace jit {
34
using FunctionInfoMap =
35
    std::unordered_map<std::string, std::shared_ptr<FunctionInfo>>;
36 37 38
Layer Deserializer::operator()(const std::string& path,
                               const phi::Place& place) {
  const auto& pdmodel_paths = utils::PdmodelFilePaths(path);
39 40
  // set is ordered
  std::set<std::string> param_names_set;
41
  FunctionInfoMap info_map;
42
  for (auto& it : pdmodel_paths) {
43
    auto& func_name = it.first;
44
    auto program_desc = LoadProgram(it.second);
45

46 47
    std::vector<std::string> persist_var_names;
    auto all_var_desc = program_desc.Block(0).AllVars();
48
    for (auto* desc_ptr : all_var_desc) {
49
      if (utils::IsPersistable(desc_ptr)) {
50
        persist_var_names.emplace_back(desc_ptr->Name());
51 52 53
      }
    }

54
    param_names_set.insert(persist_var_names.begin(), persist_var_names.end());
55 56
    info_map[func_name] = std::make_shared<FunctionInfo>(
        func_name, persist_var_names, program_desc);
57 58
  }

59 60
  VariableMap params_dict;
  VariableMap attrs_dict;
61
  ReadTensorData(path + PDPARAMS_SUFFIX, param_names_set, place, &params_dict);
62

63 64 65 66 67
  if (utils::FileExists(path + PROPERTY_SUFFIX)) {
    ReadAttributeData(path + PROPERTY_SUFFIX, &attrs_dict);
    VLOG(3) << "Read Property Success!";
  }

68
  Layer layer = Layer(params_dict, attrs_dict, info_map, place);
69

70 71 72
  for (auto it = info_map.begin(); it != info_map.end(); ++it) {
    const std::string& func_name = it->first;
    auto& info = it->second;
73
    if (FLAGS_jit_engine_type == "Executor") {
74 75 76 77 78
      VLOG(3) << "Add function type: ExecutorEngine. Function name: "
              << func_name;
      layer.SetEngine(
          func_name,
          utils::MakeEngine<ExecutorEngine>(info, params_dict, place));
79
    } else if (FLAGS_jit_engine_type == "PE") {
80 81 82
      VLOG(3) << "Add function type: PEEngine. Function name: " << func_name;
      layer.SetEngine(func_name,
                      utils::MakeEngine<PEEngine>(info, params_dict, place));
83 84 85 86 87 88
    } else if (FLAGS_jit_engine_type == "New") {
      VLOG(3) << "Add function type: InterpreterEngine. Function name: "
              << func_name;
      layer.SetEngine(
          func_name,
          utils::MakeEngine<InterpreterEngine>(info, params_dict, place));
89
    } else {
90
      PD_THROW("Invalid JitLayer engine type.");
91
    }
92 93
  }

94
  return layer;
95 96
}

97 98 99
void Deserializer::ReadTensorData(const std::string& file_name,
                                  const std::set<std::string>& var_name,
                                  const phi::Place& place,
100
                                  VariableMap* params_dict) const {
101 102 103
  VLOG(3) << "ReadTensorData from: " << file_name;
  std::ifstream fin(file_name, std::ios::binary);
  platform::DeviceContextPool& pool = platform::DeviceContextPool::Instance();
104
  auto& dev_ctx = *pool.Get(place);
105 106 107 108 109 110
  for (auto it = var_name.begin(); it != var_name.end(); it++) {
    VLOG(3) << "load Tensor: " << *it;
    Variable v;
    // TODO(dev): Support framework::Vocab
    DenseTensor* dense_tesnor = v.GetMutable<DenseTensor>();
    framework::DeserializeFromStream(fin, dense_tesnor, dev_ctx);
111
    (*params_dict)[*it] = std::make_shared<Variable>(v);
112 113 114
  }
}

115
void Deserializer::ReadAttributeData(const std::string& file_path,
116
                                     VariableMap* attrs_dict) const {
117 118 119
  VLOG(3) << "ReadPropertyData from: " << file_path;
  Property p;
  p.Deserialization(file_path);
120
  *attrs_dict = static_cast<VariableMap>(p.Values());
121 122
  return;
}
123

124
framework::ProgramDesc Deserializer::LoadProgram(const std::string& file_name) {
125
  VLOG(3) << "LoadProgram from: " << file_name;
126 127 128 129 130 131 132 133 134
  std::ifstream fin(file_name, std::ios::in | std::ios::binary);
  fin.seekg(0, std::ios::end);
  std::string buffer(fin.tellg(), ' ');
  fin.seekg(0, std::ios::beg);
  fin.read(&buffer[0], buffer.size());
  fin.close();
  return framework::ProgramDesc(buffer);
}

135
Layer Load(const std::string& file_path, const phi::Place& place) {
136
  auto deserializer = Deserializer();
137
  return deserializer(file_path, place);
138 139 140 141
}

}  // namespace jit
}  // namespace paddle