未验证 提交 e9589e35 编写于 作者: C Chen Weihang 提交者: GitHub

[Eager] Polish append op using for model perf (#43102)

* polish append op using

* fix var error

* fix group norm impl
上级 f9e55dee
......@@ -125,6 +125,11 @@ std::map<std::string, std::set<std::string>> op_ins_map = {
{"X", "Scale", "Bias", "Mean", "Variance", "MomentumTensor"}},
{"inplace_abn",
{"X", "Scale", "Bias", "Mean", "Variance", "MomentumTensor"}},
{"linear_interp", {"X", "OutSize"}},
{"bilinear_interp", {"X", "OutSize"}},
{"trilinear_interp", {"X", "OutSize"}},
{"nearest_interp", {"X", "OutSize"}},
{"bicubic_interp", {"X", "OutSize"}},
};
// NOTE(zhiqiu): Like op_ins_map.
......@@ -270,6 +275,7 @@ std::map<std::string, std::set<std::string>> op_passing_outs_map = {
{"split", {"Out"}},
{"concat", {"Out"}},
{"fused_multi_transformer", {"CacheKVOut"}},
{"group_norm", {"Mean", "Variance"}},
};
// NOTE(pangyoki): Tensor View Strategy.
......
......@@ -3016,9 +3016,15 @@ class GroupNorm(layers.Layer):
is_bias=True)
def forward(self, input):
if in_dygraph_mode():
mean_out = self._helper.create_variable_for_type_inference(
dtype=self._dtype, stop_gradient=True)
variance_out = self._helper.create_variable_for_type_inference(
dtype=self._dtype, stop_gradient=True)
if _non_static_mode():
attrs = ('epsilon', self._epsilon, 'groups', self._groups)
out, _, _ = _C_ops.group_norm(input, self.weight, self.bias, *attrs)
out, _, _ = _C_ops.group_norm(input, self.weight, self.bias,
mean_out, variance_out, *attrs)
return dygraph_utils._append_activation_in_dygraph(out, self._act)
else:
......@@ -3029,10 +3035,6 @@ class GroupNorm(layers.Layer):
inputs['Scale'] = self.weight
# create output
mean_out = self._helper.create_variable_for_type_inference(
dtype=self._dtype, stop_gradient=True)
variance_out = self._helper.create_variable_for_type_inference(
dtype=self._dtype, stop_gradient=True)
group_norm_out = self._helper.create_variable_for_type_inference(
dtype=self._dtype)
......
......@@ -3600,6 +3600,10 @@ class Block(object):
attrs = kwargs.get("attrs", {})
inplace_map = kwargs.get("inplace_map", None)
type = kwargs.get("type", None)
warnings.warn(
"Op `%s` is executed through `append_op` under the dynamic mode, "
"the corresponding API implementation needs to be upgraded to "
"using `_C_ops` method." % type, DeprecationWarning)
op = Operator(
block=self,
desc=None,
......
......@@ -7793,10 +7793,18 @@ def image_resize(input,
}
if out_shape is not None:
if isinstance(out_shape, Variable):
if isinstance(out_shape, Variable) and not _non_static_mode():
out_shape.stop_gradient = True
inputs['OutSize'] = out_shape
else:
if _non_static_mode():
if isinstance(out_shape, Variable):
out_shape = list(out_shape.numpy())
else:
out_shape = list(out_shape)
for i, dim in enumerate(out_shape):
if isinstance(dim, Variable):
out_shape[i] = dim.numpy()[0]
if not (_is_list_or_turple_(out_shape)):
raise TypeError(
"out_shape should be a list or tuple or Variable.")
......@@ -7863,7 +7871,9 @@ def image_resize(input,
attrs['out_w'] = out_shape[2]
else:
if isinstance(scale, Variable):
if _non_static_mode() and isinstance(scale, Variable):
scale = scale.numpy()
elif isinstance(scale, Variable):
scale.stop_gradient = True
inputs["Scale"] = scale
elif isinstance(scale, float) or isinstance(scale, int):
......@@ -7883,6 +7893,26 @@ def image_resize(input,
inputs["OutSize"] = actual_shape
elif actual_shape is not None:
raise TypeError("actual_shape should either be Variable or None.")
if _non_static_mode():
attr_list = []
for k, v in attrs.items():
attr_list.append(k)
attr_list.append(v)
dy_attr = tuple(attr_list)
if resample_type == "linear":
out = _C_ops.linear_interp(input, actual_shape, *dy_attr)
elif resample_type == "bilinear":
out = _C_ops.bilinear_interp(input, actual_shape, *dy_attr)
elif resample_type == "trilinear":
out = _C_ops.trilinear_interp(input, actual_shape, *dy_attr)
elif resample_type == "nearest":
out = _C_ops.nearest_interp(input, actual_shape, *dy_attr)
elif resample_type == "bicubic":
out = _C_ops.bicubic_interp(input, actual_shape, *dy_attr)
return out
out = helper.create_variable_for_type_inference(dtype)
helper.append_op(
type='{}_interp'.format(resample_type),
......
......@@ -681,14 +681,19 @@ def assign(input, output=None):
"saving it to file and 'load_op' to load it")
if output is None:
output = helper.create_variable_for_type_inference(dtype=dtype)
helper.append_op(
type='assign_value',
outputs={'Out': [output]},
attrs={
'dtype': dtype,
'shape': list(input.shape),
value_name: values
})
if _non_static_mode():
_C_ops.assign_value(output, 'shape',
list(input.shape), 'dtype', dtype, value_name,
values)
else:
helper.append_op(
type='assign_value',
outputs={'Out': [output]},
attrs={
'dtype': dtype,
'shape': list(input.shape),
value_name: values
})
if is_inplace and _non_static_mode():
output._bump_inplace_version()
......
......@@ -32,7 +32,7 @@ import six
from ...fluid.dygraph import BatchNorm # noqa: F401
from ...fluid.dygraph import SpectralNorm # noqa: F401
from ...framework import get_default_dtype, set_default_dtype
from ...framework import get_default_dtype, set_default_dtype, _non_static_mode
from ..initializer import Constant
from ...framework import ParamAttr
......@@ -404,6 +404,25 @@ class GroupNorm(Layer):
self.bias.stop_gradient = self._bias_attr != None and self._bias_attr.learning_rate == 0.
def forward(self, input):
mean_out = self._helper.create_variable_for_type_inference(
dtype=input.dtype, stop_gradient=True)
variance_out = self._helper.create_variable_for_type_inference(
dtype=input.dtype, stop_gradient=True)
if _non_static_mode():
pre_act, _, _ = _C_ops.group_norm(
input,
self.weight,
self.bias,
mean_out,
variance_out,
'epsilon',
self._epsilon,
'groups',
self._num_groups, )
return dygraph_utils._append_activation_in_dygraph(
pre_act, act=None)
inputs = {'X': input}
if self.bias is not None:
inputs['Bias'] = self.bias
......@@ -411,10 +430,6 @@ class GroupNorm(Layer):
inputs['Scale'] = self.weight
# create output
mean_out = self._helper.create_variable_for_type_inference(
dtype=input.dtype, stop_gradient=True)
variance_out = self._helper.create_variable_for_type_inference(
dtype=input.dtype, stop_gradient=True)
group_norm_out = self._helper.create_variable_for_type_inference(
dtype=input.dtype)
......
......@@ -1568,14 +1568,19 @@ def assign(x, output=None):
if output is None:
output = helper.create_variable_for_type_inference(
dtype=input.dtype)
helper.append_op(
type='assign_value',
outputs={'Out': [output]},
attrs={
'dtype': dtype,
'shape': list(input.shape),
value_name: values
})
if _non_static_mode():
_C_ops.assign_value(output, 'shape',
list(input.shape), 'dtype', dtype, value_name,
values)
else:
helper.append_op(
type='assign_value',
outputs={'Out': [output]},
attrs={
'dtype': dtype,
'shape': list(input.shape),
value_name: values
})
if is_inplace and _in_legacy_dygraph():
output._bump_inplace_version()
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册