提交 07929a13 编写于 作者: W wjj19950828

clean fluid

上级 6bbecdc4
...@@ -34,7 +34,7 @@ class DetectionOutput(object): ...@@ -34,7 +34,7 @@ class DetectionOutput(object):
pbv = priorbox_list[1] pbv = priorbox_list[1]
pb = paddle.reshape(x=pb, shape=[-1, 4]) pb = paddle.reshape(x=pb, shape=[-1, 4])
pbv = paddle.reshape(x=pbv, shape=[-1, 4]) pbv = paddle.reshape(x=pbv, shape=[-1, 4])
pb_dim = fluid.layers.shape(pb)[0] pb_dim = paddle.shape(pb)[0]
loc = paddle.reshape(x0, shape=[-1, pb_dim, 4]) loc = paddle.reshape(x0, shape=[-1, pb_dim, 4])
conf_flatten = paddle.reshape(x1, shape=[0, pb_dim, -1]) conf_flatten = paddle.reshape(x1, shape=[0, pb_dim, -1])
out = fluid.layers.detection_output( out = fluid.layers.detection_output(
......
...@@ -13,7 +13,6 @@ ...@@ -13,7 +13,6 @@
# limitations under the License. # limitations under the License.
import paddle import paddle
import paddle.fluid as fluid
class Normalize(object): class Normalize(object):
...@@ -21,7 +20,7 @@ class Normalize(object): ...@@ -21,7 +20,7 @@ class Normalize(object):
self.axis = axis self.axis = axis
def __call__(self, x, param): def __call__(self, x, param):
l2_norm = fluid.layers.l2_normalize(x=x, axis=1) l2_norm = paddle.norm(x=x, p=2, axis=1)
param = paddle.reshape(param, [param.shape[-1]]) param = paddle.reshape(param, [param.shape[-1]])
perm = list(range(len(l2_norm.shape))) perm = list(range(len(l2_norm.shape)))
perm.pop(self.axis) perm.pop(self.axis)
......
...@@ -13,7 +13,6 @@ ...@@ -13,7 +13,6 @@
# limitations under the License. # limitations under the License.
import paddle import paddle
import paddle.fluid as fluid
class Select(object): class Select(object):
......
...@@ -435,7 +435,7 @@ class CaffeOpMapper(): ...@@ -435,7 +435,7 @@ class CaffeOpMapper():
"beta": params.beta, "beta": params.beta,
} }
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.fluid.layers.lrn", "paddle.nn.LocalResponseNorm",
inputs={"input": input.name}, inputs={"input": input.name},
outputs=[node.layer_name], outputs=[node.layer_name],
**layer_attrs) **layer_attrs)
......
...@@ -612,7 +612,7 @@ def prim_shape_dim(layer, ...@@ -612,7 +612,7 @@ def prim_shape_dim(layer,
forward_func=[], forward_func=[],
layer_id=None, layer_id=None,
different_attrs=None): different_attrs=None):
line = "{} = fluid.layers.shape({})[{}]".format( line = "{} = paddle.shape({})[{}]".format(
layer.outputs[0], layer.outputs[0],
get_value(layer, "input", different_attrs), get_value(layer, "input", different_attrs),
get_value(layer, "dim", different_attrs)) get_value(layer, "dim", different_attrs))
......
...@@ -6025,7 +6025,7 @@ def aten_upsample_bilinear2d(mapper, graph, node): ...@@ -6025,7 +6025,7 @@ def aten_upsample_bilinear2d(mapper, graph, node):
inputs={"input": inputs_name[1]}, inputs={"input": inputs_name[1]},
outputs=[inputs_name[1] + "_isinstance"], outputs=[inputs_name[1] + "_isinstance"],
scope_name=scope_name, scope_name=scope_name,
cls="paddle.fluid.Variable") cls="paddle.static.Variable")
# TODO(syf): paddle.Variable # TODO(syf): paddle.Variable
graph.add_layer( graph.add_layer(
"prim.if", {"input": inputs_name[1] + "_isinstance"}, "prim.if", {"input": inputs_name[1] + "_isinstance"},
...@@ -6103,7 +6103,7 @@ def aten_upsample_nearest2d(mapper, graph, node): ...@@ -6103,7 +6103,7 @@ def aten_upsample_nearest2d(mapper, graph, node):
inputs={"input": inputs_name[1]}, inputs={"input": inputs_name[1]},
outputs=[inputs_name[1] + "_isinstance"], outputs=[inputs_name[1] + "_isinstance"],
scope_name=scope_name, scope_name=scope_name,
cls="paddle.fluid.Variable") cls="paddle.static.Variable")
# TODO(syf): paddle.Variable # TODO(syf): paddle.Variable
graph.add_layer( graph.add_layer(
"prim.if", {"input": inputs_name[1] + "_isinstance"}, "prim.if", {"input": inputs_name[1] + "_isinstance"},
......
...@@ -14,7 +14,7 @@ ...@@ -14,7 +14,7 @@
import paddle import paddle
from paddle.nn.functional import instance_norm from paddle.nn.functional import instance_norm
from paddle.fluid.initializer import Constant from paddle.nn.initializer import Constant
class InstanceNorm(paddle.nn.Layer): class InstanceNorm(paddle.nn.Layer):
......
...@@ -46,7 +46,7 @@ class InterpolateBilinearFuser(FuseBase): ...@@ -46,7 +46,7 @@ class InterpolateBilinearFuser(FuseBase):
if x2271 : if x2271 :
x2274 = x2197[0] x2274 = x2197[0]
x2275 = x2197[1] x2275 = x2197[1]
x2233_isinstance = isinstance(x2233, paddle.fluid.Variable) x2233_isinstance = isinstance(x2233, paddle.static.Variable)
if x2233_isinstance : if x2233_isinstance :
x2233 = x2233.numpy().tolist() x2233 = x2233.numpy().tolist()
x2276 = paddle.nn.functional.interpolate(x=x2181, size=x2233, scale_factor=x2274, align_corners=False, align_mode=0, mode='bilinear') x2276 = paddle.nn.functional.interpolate(x=x2181, size=x2233, scale_factor=x2274, align_corners=False, align_mode=0, mode='bilinear')
...@@ -146,7 +146,7 @@ class InterpolateBilinearFuser(FuseBase): ...@@ -146,7 +146,7 @@ class InterpolateBilinearFuser(FuseBase):
"prim.isinstance", "prim.isinstance",
inputs={"input": "interpolate-input-3"}, inputs={"input": "interpolate-input-3"},
outputs=["interpolate-input-0_isinstance"], outputs=["interpolate-input-0_isinstance"],
cls="paddle.fluid.Variable") cls="paddle.static.Variable")
pattern_block_block.add_layer( pattern_block_block.add_layer(
"prim.if", {"input": "interpolate-input-0_isinstance"}, "prim.if", {"input": "interpolate-input-0_isinstance"},
outputs=["interpolate-input-0_if1"]) outputs=["interpolate-input-0_if1"])
......
...@@ -103,15 +103,7 @@ class PaddleDtypes(): ...@@ -103,15 +103,7 @@ class PaddleDtypes():
self.t_int64 = paddle.int64 self.t_int64 = paddle.int64
self.t_bool = paddle.bool self.t_bool = paddle.bool
else: else:
self.t_float16 = "paddle.fluid.core.VarDesc.VarType.FP16" raise Exception("Paddle>=2.0.0 is required, Please update version!")
self.t_float32 = "paddle.fluid.core.VarDesc.VarType.FP32"
self.t_float64 = "paddle.fluid.core.VarDesc.VarType.FP64"
self.t_uint8 = "paddle.fluid.core.VarDesc.VarType.UINT8"
self.t_int8 = "paddle.fluid.core.VarDesc.VarType.INT8"
self.t_int16 = "paddle.fluid.core.VarDesc.VarType.INT16"
self.t_int32 = "paddle.fluid.core.VarDesc.VarType.INT32"
self.t_int64 = "paddle.fluid.core.VarDesc.VarType.INT64"
self.t_bool = "paddle.fluid.core.VarDesc.VarType.BOOL"
is_new_version = check_version() is_new_version = check_version()
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册