diff --git a/x2paddle/op_mapper/caffe2paddle/caffe_custom_layer/detectionoutput.py b/x2paddle/op_mapper/caffe2paddle/caffe_custom_layer/detectionoutput.py index 2ecb176f363a3a8e5bcb808460fb562544963367..f365bf8b32152b4e87d3f0b45d3e939aa661b95f 100644 --- a/x2paddle/op_mapper/caffe2paddle/caffe_custom_layer/detectionoutput.py +++ b/x2paddle/op_mapper/caffe2paddle/caffe_custom_layer/detectionoutput.py @@ -34,7 +34,7 @@ class DetectionOutput(object): pbv = priorbox_list[1] pb = paddle.reshape(x=pb, shape=[-1, 4]) pbv = paddle.reshape(x=pbv, shape=[-1, 4]) - pb_dim = fluid.layers.shape(pb)[0] + pb_dim = paddle.shape(pb)[0] loc = paddle.reshape(x0, shape=[-1, pb_dim, 4]) conf_flatten = paddle.reshape(x1, shape=[0, pb_dim, -1]) out = fluid.layers.detection_output( diff --git a/x2paddle/op_mapper/caffe2paddle/caffe_custom_layer/normalize.py b/x2paddle/op_mapper/caffe2paddle/caffe_custom_layer/normalize.py index c54758244f5ce6bb3d8162ed8a28ddc9b5d0682d..db22d14b9c77bc6387adaf9c9decb5cf62d05b14 100644 --- a/x2paddle/op_mapper/caffe2paddle/caffe_custom_layer/normalize.py +++ b/x2paddle/op_mapper/caffe2paddle/caffe_custom_layer/normalize.py @@ -13,7 +13,6 @@ # limitations under the License. import paddle -import paddle.fluid as fluid class Normalize(object): @@ -21,7 +20,7 @@ class Normalize(object): self.axis = axis def __call__(self, x, param): - l2_norm = fluid.layers.l2_normalize(x=x, axis=1) + l2_norm = paddle.norm(x=x, p=2, axis=1) param = paddle.reshape(param, [param.shape[-1]]) perm = list(range(len(l2_norm.shape))) perm.pop(self.axis) diff --git a/x2paddle/op_mapper/caffe2paddle/caffe_custom_layer/select.py b/x2paddle/op_mapper/caffe2paddle/caffe_custom_layer/select.py index 9238f1097994d2ea464a58d0be4065da836f93ef..a7b7075c497328d4eaf4d8c7625a41ca530926e8 100644 --- a/x2paddle/op_mapper/caffe2paddle/caffe_custom_layer/select.py +++ b/x2paddle/op_mapper/caffe2paddle/caffe_custom_layer/select.py @@ -13,7 +13,6 @@ # limitations under the License. import paddle -import paddle.fluid as fluid class Select(object): diff --git a/x2paddle/op_mapper/caffe2paddle/caffe_op_mapper.py b/x2paddle/op_mapper/caffe2paddle/caffe_op_mapper.py index edd16da4f394664dff6a671fad8ed2d000cbfeeb..a9a9794b3380d2b9f88ce5542d91b36b081b28ab 100644 --- a/x2paddle/op_mapper/caffe2paddle/caffe_op_mapper.py +++ b/x2paddle/op_mapper/caffe2paddle/caffe_op_mapper.py @@ -435,7 +435,7 @@ class CaffeOpMapper(): "beta": params.beta, } self.paddle_graph.add_layer( - "paddle.fluid.layers.lrn", + "paddle.nn.LocalResponseNorm", inputs={"input": input.name}, outputs=[node.layer_name], **layer_attrs) diff --git a/x2paddle/op_mapper/prim2code.py b/x2paddle/op_mapper/prim2code.py index 4fc71f92f301c94d240eb2667690b026088fc7ff..8b4e7bc662fe1b6a312c571a5c4dd3d92a640d39 100644 --- a/x2paddle/op_mapper/prim2code.py +++ b/x2paddle/op_mapper/prim2code.py @@ -612,7 +612,7 @@ def prim_shape_dim(layer, forward_func=[], layer_id=None, different_attrs=None): - line = "{} = fluid.layers.shape({})[{}]".format( + line = "{} = paddle.shape({})[{}]".format( layer.outputs[0], get_value(layer, "input", different_attrs), get_value(layer, "dim", different_attrs)) diff --git a/x2paddle/op_mapper/pytorch2paddle/aten.py b/x2paddle/op_mapper/pytorch2paddle/aten.py index 71ee05e58c73a9da127824d0a1a9c4fdea06eb3a..676fee6e277951048971917210cfc59cdaa04335 100755 --- a/x2paddle/op_mapper/pytorch2paddle/aten.py +++ b/x2paddle/op_mapper/pytorch2paddle/aten.py @@ -6025,7 +6025,7 @@ def aten_upsample_bilinear2d(mapper, graph, node): inputs={"input": inputs_name[1]}, outputs=[inputs_name[1] + "_isinstance"], scope_name=scope_name, - cls="paddle.fluid.Variable") + cls="paddle.static.Variable") # TODO(syf): paddle.Variable graph.add_layer( "prim.if", {"input": inputs_name[1] + "_isinstance"}, @@ -6103,7 +6103,7 @@ def aten_upsample_nearest2d(mapper, graph, node): inputs={"input": inputs_name[1]}, outputs=[inputs_name[1] + "_isinstance"], scope_name=scope_name, - cls="paddle.fluid.Variable") + cls="paddle.static.Variable") # TODO(syf): paddle.Variable graph.add_layer( "prim.if", {"input": inputs_name[1] + "_isinstance"}, diff --git a/x2paddle/op_mapper/pytorch2paddle/pytorch_custom_layer/instance_norm.py b/x2paddle/op_mapper/pytorch2paddle/pytorch_custom_layer/instance_norm.py index 6f446c206088ce4471a32293e2ca0c6539782bf4..de58c4f395407141988d363ed65fa469ad17a062 100644 --- a/x2paddle/op_mapper/pytorch2paddle/pytorch_custom_layer/instance_norm.py +++ b/x2paddle/op_mapper/pytorch2paddle/pytorch_custom_layer/instance_norm.py @@ -14,7 +14,7 @@ import paddle from paddle.nn.functional import instance_norm -from paddle.fluid.initializer import Constant +from paddle.nn.initializer import Constant class InstanceNorm(paddle.nn.Layer): diff --git a/x2paddle/optimizer/fusion/interpolate_bilinear_fuser.py b/x2paddle/optimizer/fusion/interpolate_bilinear_fuser.py index d19fd9e957a3ef56ab4b5214c1d0a4f577c44f56..566ac62919a7540ee2f35a8a3a312ff122b5b861 100644 --- a/x2paddle/optimizer/fusion/interpolate_bilinear_fuser.py +++ b/x2paddle/optimizer/fusion/interpolate_bilinear_fuser.py @@ -46,7 +46,7 @@ class InterpolateBilinearFuser(FuseBase): if x2271 : x2274 = x2197[0] x2275 = x2197[1] - x2233_isinstance = isinstance(x2233, paddle.fluid.Variable) + x2233_isinstance = isinstance(x2233, paddle.static.Variable) if x2233_isinstance : x2233 = x2233.numpy().tolist() x2276 = paddle.nn.functional.interpolate(x=x2181, size=x2233, scale_factor=x2274, align_corners=False, align_mode=0, mode='bilinear') @@ -146,7 +146,7 @@ class InterpolateBilinearFuser(FuseBase): "prim.isinstance", inputs={"input": "interpolate-input-3"}, outputs=["interpolate-input-0_isinstance"], - cls="paddle.fluid.Variable") + cls="paddle.static.Variable") pattern_block_block.add_layer( "prim.if", {"input": "interpolate-input-0_isinstance"}, outputs=["interpolate-input-0_if1"]) diff --git a/x2paddle/utils.py b/x2paddle/utils.py index c3a6d1449867288d8c3df45d9121e518e538e369..12be0b0de96c015af6f776897562ecdd5fd23cb4 100644 --- a/x2paddle/utils.py +++ b/x2paddle/utils.py @@ -103,15 +103,7 @@ class PaddleDtypes(): self.t_int64 = paddle.int64 self.t_bool = paddle.bool else: - self.t_float16 = "paddle.fluid.core.VarDesc.VarType.FP16" - self.t_float32 = "paddle.fluid.core.VarDesc.VarType.FP32" - self.t_float64 = "paddle.fluid.core.VarDesc.VarType.FP64" - self.t_uint8 = "paddle.fluid.core.VarDesc.VarType.UINT8" - self.t_int8 = "paddle.fluid.core.VarDesc.VarType.INT8" - self.t_int16 = "paddle.fluid.core.VarDesc.VarType.INT16" - self.t_int32 = "paddle.fluid.core.VarDesc.VarType.INT32" - self.t_int64 = "paddle.fluid.core.VarDesc.VarType.INT64" - self.t_bool = "paddle.fluid.core.VarDesc.VarType.BOOL" + raise Exception("Paddle>=2.0.0 is required, Please update version!") is_new_version = check_version()