提交 ff6c6996 编写于 作者: W wjj19950828

add cumtom layer for onnx

上级 2b2e5bce
...@@ -18,3 +18,5 @@ from .pad_all_dim2 import PadAllDim2 ...@@ -18,3 +18,5 @@ from .pad_all_dim2 import PadAllDim2
from .pad_all_dim4 import PadAllDim4 from .pad_all_dim4 import PadAllDim4
from .pad_all_dim4_one_input import PadAllDim4WithOneInput from .pad_all_dim4_one_input import PadAllDim4WithOneInput
from .nms import NMS from .nms import NMS
from .roi_align import ROIAlign
from .roi_pooling import ROIPooling
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import paddle
from paddle import _C_ops
from paddle import in_dynamic_mode
from paddle.common_ops_import import Variable, LayerHelper, check_variable_and_dtype, check_type, check_dtype
@paddle.jit.not_to_static
def roi_align(input,
rois,
output_size,
spatial_scale=1.0,
sampling_ratio=-1,
rois_num=None,
aligned=True,
name=None):
check_type(output_size, 'output_size', (int, tuple), 'roi_align')
if isinstance(output_size, int):
output_size = (output_size, output_size)
pooled_height, pooled_width = output_size
if in_dynamic_mode():
assert rois_num is not None, "rois_num should not be None in dygraph mode."
align_out = _C_ops.roi_align(
input, rois, rois_num, "pooled_height", pooled_height,
"pooled_width", pooled_width, "spatial_scale", spatial_scale,
"sampling_ratio", sampling_ratio, "aligned", aligned)
return align_out
else:
check_variable_and_dtype(input, 'input', ['float32', 'float64'],
'roi_align')
check_variable_and_dtype(rois, 'rois', ['float32', 'float64'],
'roi_align')
helper = LayerHelper('roi_align', **locals())
dtype = helper.input_dtype()
align_out = helper.create_variable_for_type_inference(dtype)
inputs = {
"X": input,
"ROIs": rois,
}
if rois_num is not None:
inputs['RoisNum'] = rois_num
helper.append_op(
type="roi_align",
inputs=inputs,
outputs={"Out": align_out},
attrs={
"pooled_height": pooled_height,
"pooled_width": pooled_width,
"spatial_scale": spatial_scale,
"sampling_ratio": sampling_ratio,
"aligned": aligned,
})
return align_out
class ROIAlign(object):
def __init__(self, pooled_height, pooled_width, spatial_scale,
sampling_ratio, rois_num):
self.roialign_layer_attrs = {
"pooled_height": pooled_height,
"pooled_width": pooled_width,
"spatial_scale": spatial_scale,
'sampling_ratio': sampling_ratio,
'rois_num': rois_num,
}
def __call__(self, x0, x1):
out = roi_align(input=x0, rois=x1, **self.roialign_layer_attrs)
return out
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import paddle
from paddle import _C_ops
from paddle import in_dynamic_mode
from paddle.common_ops_import import Variable, LayerHelper, check_variable_and_dtype, check_type, check_dtype
@paddle.jit.not_to_static
def roi_pool(input,
rois,
pooled_height,
pooled_width,
spatial_scale=1.0,
rois_num=None,
name=None):
if in_dynamic_mode():
assert rois_num is not None, "rois_num should not be None in dygraph mode."
pool_out, argmaxes = _C_ops.roi_pool(
input, rois, rois_num, "pooled_height", pooled_height,
"pooled_width", pooled_width, "spatial_scale", spatial_scale)
return pool_out, argmaxes
else:
check_variable_and_dtype(input, 'input', ['float32'], 'roi_pool')
check_variable_and_dtype(rois, 'rois', ['float32'], 'roi_pool')
helper = LayerHelper('roi_pool', **locals())
dtype = helper.input_dtype()
pool_out = helper.create_variable_for_type_inference(dtype)
argmaxes = helper.create_variable_for_type_inference(dtype='int32')
inputs = {
"X": input,
"ROIs": rois,
}
if rois_num is not None:
inputs['RoisNum'] = rois_num
helper.append_op(
type="roi_pool",
inputs=inputs,
outputs={"Out": pool_out,
"Argmax": argmaxes},
attrs={
"pooled_height": pooled_height,
"pooled_width": pooled_width,
"spatial_scale": spatial_scale
})
return pool_out, argmaxes
class ROIPooling(object):
def __init__(self, pooled_height, pooled_width, spatial_scale):
self.roipooling_layer_attrs = {
"pooled_height": pooled_height,
"pooled_width": pooled_width,
"spatial_scale": spatial_scale
}
def __call__(self, x0, x1):
out = roi_pool(input=x0, rois=x1, **self.roipooling_layer_attrs)
return out
...@@ -541,7 +541,7 @@ class OpSet9(): ...@@ -541,7 +541,7 @@ class OpSet9():
'rois_num': val_rois_num, 'rois_num': val_rois_num,
} }
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.fluid.layers.roi_align', 'custom_layer:ROIAlign',
inputs={'input': val_x.name, inputs={'input': val_x.name,
'rois': val_rois.name}, 'rois': val_rois.name},
outputs=[node.name], outputs=[node.name],
...@@ -560,7 +560,7 @@ class OpSet9(): ...@@ -560,7 +560,7 @@ class OpSet9():
'spatial_scale': spatial_scale, 'spatial_scale': spatial_scale,
} }
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.fluid.layers.roi_pool', 'custom_layer:ROIPooling',
inputs={'input': val_x.name, inputs={'input': val_x.name,
'rois': val_rois.name}, 'rois': val_rois.name},
outputs=[node.name], outputs=[node.name],
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册