提交 fa139e6b 编写于 作者: R Ray Liu 提交者: GitHub

Merge pull request #1462 from codeWorm2015/develop

fix #1461  put tensor when kernel init in scope
...@@ -103,7 +103,7 @@ class OperatorWithKernel : public OperatorBase<Dtype> { ...@@ -103,7 +103,7 @@ class OperatorWithKernel : public OperatorBase<Dtype> {
const VariableNameMap &outputs, const AttributeMap &attrs, const VariableNameMap &outputs, const AttributeMap &attrs,
std::shared_ptr<Scope> scope) std::shared_ptr<Scope> scope)
: OperatorBase<Dtype>(type, inputs, outputs, attrs, scope), : OperatorBase<Dtype>(type, inputs, outputs, attrs, scope),
param_(inputs, outputs, attrs, *scope) { param_(inputs, outputs, attrs, scope.get()) {
#ifdef PADDLE_MOBILE_CL #ifdef PADDLE_MOBILE_CL
kernel_.InitCLHelper(scope->GetCLScpoe()); kernel_.InitCLHelper(scope->GetCLScpoe());
#endif #endif
......
...@@ -35,7 +35,7 @@ class FillConstantOp : public framework::OperatorBase<DeviceType> { ...@@ -35,7 +35,7 @@ class FillConstantOp : public framework::OperatorBase<DeviceType> {
std::shared_ptr<framework::Scope> scope) std::shared_ptr<framework::Scope> scope)
: framework::OperatorBase<DeviceType>(type, inputs, outputs, attrs, : framework::OperatorBase<DeviceType>(type, inputs, outputs, attrs,
scope), scope),
param_(inputs, outputs, attrs, *scope) {} param_(inputs, outputs, attrs, scope.get()) {}
void RunImpl() { void RunImpl() {
auto data_type = auto data_type =
static_cast<_PaddleMobile__Framework__Proto__VarType__Type>( static_cast<_PaddleMobile__Framework__Proto__VarType__Type>(
......
...@@ -41,8 +41,11 @@ bool ConvAddBNReluKernel<CPU, float>::Init( ...@@ -41,8 +41,11 @@ bool ConvAddBNReluKernel<CPU, float>::Init(
inv_std_ptr[i] = inv_std_ptr[i] =
1 / static_cast<float>(pow((variance_ptr[i] + epsilon), 0.5)); 1 / static_cast<float>(pow((variance_ptr[i] + epsilon), 0.5));
} }
Tensor *new_scale = new Tensor(); // Tensor *new_scale = new Tensor();
Tensor *new_bias = new Tensor(); // Tensor *new_bias = new Tensor();
Tensor *new_scale = param->CreateNewScale<Tensor>();
Tensor *new_bias = param->CreateNewBiase<Tensor>();
auto new_scale_ptr = new_scale->mutable_data<float>({C}); auto new_scale_ptr = new_scale->mutable_data<float>({C});
auto new_bias_ptr = new_bias->mutable_data<float>({C}); auto new_bias_ptr = new_bias->mutable_data<float>({C});
for (int i = 0; i < C; i++) { for (int i = 0; i < C; i++) {
......
...@@ -42,8 +42,8 @@ bool ConvBNReluKernel<CPU, float>::Init(FusionConvBNReluParam<CPU> *param) { ...@@ -42,8 +42,8 @@ bool ConvBNReluKernel<CPU, float>::Init(FusionConvBNReluParam<CPU> *param) {
inv_std_ptr[i] = inv_std_ptr[i] =
1 / static_cast<float>(pow((variance_ptr[i] + epsilon), 0.5)); 1 / static_cast<float>(pow((variance_ptr[i] + epsilon), 0.5));
} }
Tensor *new_scale = new Tensor(); Tensor *new_scale = param->CreateNewScale<Tensor>();
Tensor *new_bias = new Tensor(); Tensor *new_bias = param->CreateNewBiase<Tensor>();
auto new_scale_ptr = new_scale->mutable_data<float>({C}); auto new_scale_ptr = new_scale->mutable_data<float>({C});
auto new_bias_ptr = new_bias->mutable_data<float>({C}); auto new_bias_ptr = new_bias->mutable_data<float>({C});
for (int i = 0; i < C; i++) { for (int i = 0; i < C; i++) {
......
...@@ -27,12 +27,14 @@ class AnchorGeneratorParam : public OpParam { ...@@ -27,12 +27,14 @@ class AnchorGeneratorParam : public OpParam {
public: public:
AnchorGeneratorParam(const VariableNameMap &inputs, AnchorGeneratorParam(const VariableNameMap &inputs,
const VariableNameMap &outputs, const VariableNameMap &outputs,
const AttributeMap &attrs, const Scope &scope) { const AttributeMap &attrs, Scope *scope)
input_ = OpParam::GetVarValue<framework::LoDTensor>("Input", inputs, scope); : OpParam(inputs, outputs, attrs, scope) {
input_ =
OpParam::GetVarValue<framework::LoDTensor>("Input", inputs, *scope);
output_anchors_ = output_anchors_ =
OpParam::GetVarValue<framework::LoDTensor>("Anchors", outputs, scope); OpParam::GetVarValue<framework::LoDTensor>("Anchors", outputs, *scope);
output_variances_ = output_variances_ = OpParam::GetVarValue<framework::LoDTensor>(
OpParam::GetVarValue<framework::LoDTensor>("Variances", outputs, scope); "Variances", outputs, *scope);
anchor_sizes_ = OpParam::GetAttr<std::vector<float>>("anchor_sizes", attrs); anchor_sizes_ = OpParam::GetAttr<std::vector<float>>("anchor_sizes", attrs);
aspect_ratios_ = aspect_ratios_ =
...@@ -64,22 +66,23 @@ template <typename Dtype> ...@@ -64,22 +66,23 @@ template <typename Dtype>
class ProposalParam : public OpParam { class ProposalParam : public OpParam {
public: public:
ProposalParam(const VariableNameMap &inputs, const VariableNameMap &outputs, ProposalParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
const AttributeMap &attrs, const Scope &scope) { const AttributeMap &attrs, Scope *scope)
: OpParam(inputs, outputs, attrs, scope) {
scores_ = scores_ =
OpParam::GetVarValue<framework::LoDTensor>("Scores", inputs, scope); OpParam::GetVarValue<framework::LoDTensor>("Scores", inputs, *scope);
bbox_deltas_ = bbox_deltas_ = OpParam::GetVarValue<framework::LoDTensor>("BboxDeltas",
OpParam::GetVarValue<framework::LoDTensor>("BboxDeltas", inputs, scope); inputs, *scope);
im_info_ = im_info_ =
OpParam::GetVarValue<framework::LoDTensor>("ImInfo", inputs, scope); OpParam::GetVarValue<framework::LoDTensor>("ImInfo", inputs, *scope);
anchors_ = anchors_ =
OpParam::GetVarValue<framework::LoDTensor>("Anchors", inputs, scope); OpParam::GetVarValue<framework::LoDTensor>("Anchors", inputs, *scope);
variances_ = variances_ =
OpParam::GetVarValue<framework::LoDTensor>("Variances", inputs, scope); OpParam::GetVarValue<framework::LoDTensor>("Variances", inputs, *scope);
rpn_rois_ = rpn_rois_ =
OpParam::GetVarValue<framework::LoDTensor>("RpnRois", outputs, scope); OpParam::GetVarValue<framework::LoDTensor>("RpnRois", outputs, *scope);
rpn_probs_ = OpParam::GetVarValue<framework::LoDTensor>("RpnRoiProbs", rpn_probs_ = OpParam::GetVarValue<framework::LoDTensor>("RpnRoiProbs",
outputs, scope); outputs, *scope);
pre_nms_topn_ = OpParam::GetAttr<int>("pre_nms_topN", attrs); pre_nms_topn_ = OpParam::GetAttr<int>("pre_nms_topN", attrs);
post_nms_topn_ = OpParam::GetAttr<int>("post_nms_topN", attrs); post_nms_topn_ = OpParam::GetAttr<int>("post_nms_topN", attrs);
...@@ -117,11 +120,13 @@ template <typename Dtype> ...@@ -117,11 +120,13 @@ template <typename Dtype>
class PSRoiPoolParam : public OpParam { class PSRoiPoolParam : public OpParam {
public: public:
PSRoiPoolParam(const VariableNameMap &inputs, const VariableNameMap &outputs, PSRoiPoolParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
const AttributeMap &attrs, const Scope &scope) { const AttributeMap &attrs, Scope *scope)
input_x_ = OpParam::GetVarValue<framework::LoDTensor>("X", inputs, scope); : OpParam(inputs, outputs, attrs, scope) {
input_x_ = OpParam::GetVarValue<framework::LoDTensor>("X", inputs, *scope);
input_rois_ = input_rois_ =
OpParam::GetVarValue<framework::LoDTensor>("ROIs", inputs, scope); OpParam::GetVarValue<framework::LoDTensor>("ROIs", inputs, *scope);
output_ = OpParam::GetVarValue<framework::LoDTensor>("Out", outputs, scope); output_ =
OpParam::GetVarValue<framework::LoDTensor>("Out", outputs, *scope);
output_channels_ = OpParam::GetAttr<int>("output_channels", attrs); output_channels_ = OpParam::GetAttr<int>("output_channels", attrs);
pooled_height_ = OpParam::GetAttr<int>("pooled_height", attrs); pooled_height_ = OpParam::GetAttr<int>("pooled_height", attrs);
...@@ -152,11 +157,13 @@ class RoiPerspectiveParam : public OpParam { ...@@ -152,11 +157,13 @@ class RoiPerspectiveParam : public OpParam {
public: public:
RoiPerspectiveParam(const VariableNameMap &inputs, RoiPerspectiveParam(const VariableNameMap &inputs,
const VariableNameMap &outputs, const AttributeMap &attrs, const VariableNameMap &outputs, const AttributeMap &attrs,
const Scope &scope) { Scope *scope)
input_x_ = OpParam::GetVarValue<framework::LoDTensor>("X", inputs, scope); : OpParam(inputs, outputs, attrs, scope) {
input_x_ = OpParam::GetVarValue<framework::LoDTensor>("X", inputs, *scope);
input_rois_ = input_rois_ =
OpParam::GetVarValue<framework::LoDTensor>("ROIs", inputs, scope); OpParam::GetVarValue<framework::LoDTensor>("ROIs", inputs, *scope);
output_ = OpParam::GetVarValue<framework::LoDTensor>("Out", outputs, scope); output_ =
OpParam::GetVarValue<framework::LoDTensor>("Out", outputs, *scope);
spatial_scale_ = OpParam::GetAttr<float>("spatial_scale", attrs); spatial_scale_ = OpParam::GetAttr<float>("spatial_scale", attrs);
transformed_height_ = OpParam::GetAttr<int>("transformed_height", attrs); transformed_height_ = OpParam::GetAttr<int>("transformed_height", attrs);
......
...@@ -25,10 +25,13 @@ template <typename Dtype> ...@@ -25,10 +25,13 @@ template <typename Dtype>
class WhileParam : public OpParam { class WhileParam : public OpParam {
public: public:
WhileParam(const VariableNameMap &inputs, const VariableNameMap &outputs, WhileParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
const AttributeMap &attrs, const Scope &scope) const AttributeMap &attrs, Scope *scope)
: inputs_(inputs), outputs_(outputs), scope_(scope) { : inputs_(inputs),
outputs_(outputs),
scope_(*scope),
OpParam(inputs, outputs, attrs, scope) {
cond_ = cond_ =
OpParam::GetVarValue<framework::LoDTensor>("Condition", inputs, scope); OpParam::GetVarValue<framework::LoDTensor>("Condition", inputs, *scope);
sub_block_ = OpParam::GetAttr<int>("sub_block", attrs); sub_block_ = OpParam::GetAttr<int>("sub_block", attrs);
} }
......
此差异已折叠。
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册