未验证 提交 0dae79a9 编写于 作者: W WangZhen 提交者: GitHub

[JitLayer]Remove include fluid head files in JitLayer (#44597)

* Remove include fluid head files in JitLayer

* Format code

* Remove const to fix ci error

* Fix param error

* Polish jit layer include and cp some headers to python/include

* Fix comment
上级 28aa0c61
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#pragma once
#include "base_function.h"
#include "layer.h"
#include "serializer.h"
#include "serializer_utils.h"
......@@ -15,7 +15,6 @@
#pragma once
#include "paddle/phi/api/include/tensor.h"
#include "paddle/phi/core/dense_tensor.h"
namespace paddle {
namespace jit {
......
......@@ -16,6 +16,8 @@
#include "paddle/phi/core/enforce.h"
#include "paddle/fluid/jit/base_function.h"
namespace paddle {
namespace jit {
......
......@@ -14,13 +14,14 @@
#pragma once
#include <memory>
#include <string>
#include <unordered_map>
#include "paddle/fluid/jit/base_function.h"
#include <vector>
namespace paddle {
namespace jit {
class BaseFunction;
using Name2FunctionMap =
std::unordered_map<std::string, std::shared_ptr<BaseFunction>>;
......
......@@ -14,6 +14,7 @@
#include "paddle/fluid/jit/function_schema.h"
#include "paddle/fluid/framework/program_desc.h"
#include "paddle/phi/core/enforce.h"
#include "paddle/fluid/jit/function_utils.h"
......@@ -52,14 +53,13 @@ void FunctionSchema::AddOutputArg(const std::string& name) {
FunctionInfo::FunctionInfo(const std::string& func_name,
const std::vector<std::string>& param_names,
const framework::ProgramDesc& program_desc)
: func_name_(func_name),
param_names_(param_names),
program_desc_(program_desc) {
: func_name_(func_name), param_names_(param_names) {
program_desc_.reset(new framework::ProgramDesc(program_desc));
// Parse FunctionSchema
for (auto& in_name : program_desc_.GetFeedTargetNames()) {
for (auto& in_name : program_desc_->GetFeedTargetNames()) {
schema_.AddInputArg(in_name);
}
for (auto& out_name : program_desc_.GetFetchTargetNames()) {
for (auto& out_name : program_desc_->GetFetchTargetNames()) {
schema_.AddOutputArg(out_name);
}
}
......@@ -67,7 +67,7 @@ FunctionInfo::FunctionInfo(const std::string& func_name,
const std::string& FunctionInfo::FunctionName() const { return func_name_; }
const framework::ProgramDesc& FunctionInfo::ProgramDesc() const {
return program_desc_;
return *program_desc_.get();
}
const std::vector<std::string>& FunctionInfo::ParamNames() const {
......@@ -83,7 +83,7 @@ const std::vector<std::string> FunctionInfo::OutputArgNames() const {
}
void FunctionInfo::RemoveDescFeedFetch() {
utils::RemoveFeedFetch(&program_desc_);
utils::RemoveFeedFetch(program_desc_.get());
}
} // namespace jit
......
......@@ -14,15 +14,17 @@
#pragma once
#include <memory>
#include <string>
#include <vector>
#include "paddle/fluid/framework/program_desc.h"
#include "paddle/fluid/framework/variable.h"
namespace paddle {
namespace framework {
class ProgramDesc;
} // namespace framework
namespace jit {
using Variable = paddle::framework::Variable;
class Argument {
public:
......@@ -75,7 +77,7 @@ class FunctionInfo {
private:
std::string func_name_;
std::vector<std::string> param_names_;
framework::ProgramDesc program_desc_;
std::shared_ptr<framework::ProgramDesc> program_desc_;
FunctionSchema schema_;
};
......
......@@ -15,7 +15,9 @@
#include "paddle/fluid/jit/function_utils.h"
#include "paddle/fluid/framework/program_desc.h"
#include "paddle/fluid/framework/scope.h"
#include "paddle/fluid/framework/var_desc.h"
#include "paddle/fluid/framework/variable.h"
#include "paddle/phi/core/enforce.h"
namespace paddle {
......@@ -75,7 +77,7 @@ void ShareParamsIntoScope(const std::vector<std::string> &param_names,
for (size_t i = 0; i < param_names.size(); ++i) {
std::string name = param_names[i];
auto &param = params_dict.find(name)->second;
auto &dense_tensor = param.Get<DenseTensor>();
auto &dense_tensor = param->Get<DenseTensor>();
VLOG(3) << "share into scope: " << name;
auto *var = scope->Var(name);
auto *dst_tensor = var->GetMutable<DenseTensor>();
......
......@@ -18,18 +18,23 @@
#include <unordered_map>
#include <vector>
#include "paddle/fluid/framework/scope.h"
#include "paddle/fluid/framework/variable.h"
#include "paddle/phi/api/include/tensor.h"
#include "paddle/phi/common/place.h"
#include "paddle/phi/core/dense_tensor.h"
#include "paddle/fluid/jit/function_schema.h"
namespace paddle {
namespace framework {
class Variable;
class ProgramDesc;
class Scope;
} // namespace framework
namespace jit {
using Variable = paddle::framework::Variable;
using Name2VariableMap = std::unordered_map<std::string, Variable>;
using Name2VariableMap =
std::unordered_map<std::string, std::shared_ptr<Variable>>;
using DenseTensor = phi::DenseTensor;
using Tensor = paddle::experimental::Tensor;
......
......@@ -14,17 +14,21 @@
#include "paddle/fluid/jit/layer.h"
#include "paddle/fluid/framework/variable.h"
#include "paddle/fluid/jit/base_function.h"
#include "paddle/fluid/jit/compilation_unit.h"
#include "paddle/fluid/jit/function_schema.h"
namespace paddle {
namespace jit {
Layer::Layer(const std::vector<std::shared_ptr<FunctionInfo>>& infos,
const Name2VariableMap& params_dict,
const phi::Place& place)
Layer::Layer(const Name2VariableMap& params_dict, const phi::Place& place)
: params_dict_(params_dict) {
VLOG(3) << "infos size: " << infos.size();
unit_.reset(new CompilationUnit());
}
std::shared_ptr<BaseFunction> Layer::Function(const std::string& name) const {
return unit_.Function(name);
return unit_->Function(name);
}
std::vector<Tensor> Layer::forward(const std::vector<Tensor>& inputs) {
......@@ -42,15 +46,15 @@ void Layer::to(const phi::Place& place) {}
void Layer::SetFunction(const std::string& name,
const std::shared_ptr<BaseFunction>& function) {
unit_.SetFunction(name, function);
unit_->SetFunction(name, function);
}
std::vector<std::string> Layer::FunctionNames() const {
return unit_.FunctionNames();
return unit_->FunctionNames();
}
const Name2FunctionMap& Layer::FunctionMap() const {
return unit_.FunctionMap();
return unit_->FunctionMap();
}
} // namespace jit
......
......@@ -18,23 +18,31 @@
#include <unordered_map>
#include <vector>
#include "paddle/fluid/framework/variable.h"
#include "paddle/phi/api/include/tensor.h"
#include "paddle/phi/common/place.h"
#include "paddle/fluid/jit/base_function.h"
#include "paddle/fluid/jit/compilation_unit.h"
#include "paddle/fluid/jit/function_schema.h"
#include "base_function.h"
namespace paddle {
namespace framework {
class Variable;
} // namespace framework
namespace jit {
class CompilationUnit;
using DenseTensor = phi::DenseTensor;
using Tensor = paddle::experimental::Tensor;
using Variable = paddle::framework::Variable;
using Name2VariableMap = std::unordered_map<std::string, Variable>;
using Name2VariableMap =
std::unordered_map<std::string, std::shared_ptr<Variable>>;
using Name2FunctionMap =
std::unordered_map<std::string, std::shared_ptr<BaseFunction>>;
class Layer {
public:
Layer(const std::vector<std::shared_ptr<FunctionInfo>>& infos,
const Name2VariableMap& params_dict,
const phi::Place& place);
Layer(const Name2VariableMap& params_dict, const phi::Place& place);
std::shared_ptr<BaseFunction> Function(const std::string& name) const;
......@@ -56,7 +64,7 @@ class Layer {
private:
Name2VariableMap params_dict_;
Name2VariableMap attrs_dict_;
CompilationUnit unit_;
std::shared_ptr<CompilationUnit> unit_;
};
} // namespace jit
......
......@@ -16,10 +16,14 @@
#include <set>
#include "paddle/fluid/framework/var_desc.h"
#include "paddle/fluid/framework/variable.h"
#include "paddle/fluid/platform/device_context.h"
#include "paddle/fluid/jit/executor_function.h"
#include "paddle/fluid/jit/layer.h"
#include "paddle/fluid/jit/pe_function.h"
#include "paddle/fluid/jit/property.h"
#include "paddle/fluid/jit/serializer_utils.h"
DECLARE_string(jit_engine_type);
......@@ -55,7 +59,7 @@ Layer Deserializer::operator()(const std::string& path,
ReadTensorData(path + PDPARAMS_SUFFIX, param_names_set, place, &params_dict);
// ReadAttributeData();
Layer layer = Layer(infos, params_dict, place);
Layer layer = Layer(params_dict, place);
for (auto& info : infos) {
if (FLAGS_jit_engine_type == "Executor") {
......@@ -90,7 +94,7 @@ void Deserializer::ReadTensorData(const std::string& file_name,
// TODO(dev): Support framework::Vocab
DenseTensor* dense_tesnor = v.GetMutable<DenseTensor>();
framework::DeserializeFromStream(fin, dense_tesnor, dev_ctx);
(*params_dict)[*it] = v;
(*params_dict)[*it] = std::make_shared<Variable>(v);
}
}
......
......@@ -14,16 +14,26 @@
#pragma once
#include <memory>
#include <set>
#include <string>
#include <unordered_map>
#include "paddle/fluid/framework/var_desc.h"
#include "paddle/fluid/framework/variable.h"
#include "paddle/fluid/jit/property.h"
#include "paddle/fluid/jit/layer.h"
#include "paddle/phi/common/place.h"
namespace paddle {
namespace framework {
class Variable;
class ProgramDesc;
} // namespace framework
namespace jit {
class Layer;
using Variable = paddle::framework::Variable;
using Name2VariableMap =
std::unordered_map<std::string, std::shared_ptr<Variable>>;
// Export Layer into local disk
class Serializer {
public:
......
......@@ -17,6 +17,7 @@
#include <dirent.h>
#include <fstream>
#include "paddle/fluid/framework/phi_utils.h"
#include "paddle/fluid/framework/var_desc.h"
namespace paddle {
......@@ -100,6 +101,10 @@ const std::vector<std::pair<std::string, std::string>> PdmodelFilePaths(
return pdmodel_paths;
}
void InitKernelSignatureMap() {
paddle::framework::InitDefaultKernelSignatureMap();
}
} // namespace utils
} // namespace jit
} // namespace paddle
......@@ -17,9 +17,12 @@
#include <string>
#include <vector>
#include "paddle/fluid/framework/var_desc.h"
namespace paddle {
namespace framework {
class VarDesc;
} // namespace framework
namespace jit {
static const char PDMODEL_SUFFIX[] = ".pdmodel";
static const char PDPARAMS_SUFFIX[] = ".pdiparams";
......@@ -40,6 +43,8 @@ bool FileExists(const std::string& file_path);
const std::vector<std::pair<std::string, std::string>> PdmodelFilePaths(
const std::string& path);
void InitKernelSignatureMap();
} // namespace utils
} // namespace jit
} // namespace paddle
......@@ -621,8 +621,12 @@ headers = (
list(find_files('*.h', '@PADDLE_SOURCE_DIR@/paddle/phi/kernels', recursive=True)) + # phi kernels headers
# capi headers
list(find_files('*.h', '@PADDLE_SOURCE_DIR@/paddle/phi/capi', recursive=True)) + # phi capi headers
# utila api headers
list(find_files('*.h', '@PADDLE_SOURCE_DIR@/paddle/utils', recursive=True))) # paddle utils headers
# utils api headers
list(find_files('*.h', '@PADDLE_SOURCE_DIR@/paddle/utils', recursive=True))) # paddle utils headers
jit_layer_headers = ['layer.h', 'serializer.h', 'serializer_utils.h', 'all.h', 'base_function.h']
for f in jit_layer_headers:
headers += list(find_files(f, '@PADDLE_SOURCE_DIR@/paddle/fluid/jit', recursive=False))
if '${WITH_MKLDNN}' == 'ON':
headers += list(find_files('*', '${MKLDNN_INSTALL_DIR}/include')) # mkldnn
......@@ -667,6 +671,10 @@ class InstallHeaders(Command):
elif 'third_party' not in header:
# paddle headers
install_dir = re.sub('@PADDLE_SOURCE_DIR@/', '', header)
print('install_dir: ', install_dir)
if 'fluid/jit' in install_dir:
install_dir = re.sub('fluid/jit', 'jit', install_dir)
print('fluid/jit install_dir: ', install_dir)
else:
# third_party
install_dir = re.sub('${THIRD_PARTY_PATH}', 'third_party', header)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册