未验证 提交 4d0efee4 编写于 作者: S songyouwei 提交者: GitHub

Linear revert to matmul op (#23654)

test=develop
上级 517929f1
......@@ -936,9 +936,8 @@ class Linear(layers.Layer):
def forward(self, input):
if in_dygraph_mode():
pre_bias = core.ops.mul(input, self.weight, 'x_num_col_dims',
len(input.shape) - 1, 'y_num_col_dims', 1)
pre_bias = core.ops.matmul(input, self.weight, 'transpose_X', False,
'transpose_Y', False, "alpha", 1)
pre_act = dygraph_utils._append_bias_in_dygraph(
pre_bias, self.bias, axis=len(input.shape) - 1)
......@@ -949,14 +948,15 @@ class Linear(layers.Layer):
['float16', 'float32', 'float64'], "Linear")
attrs = {
"x_num_col_dims": len(input.shape) - 1,
"y_num_col_dims": 1,
"transpose_X": False,
"transpose_Y": False,
"alpha": 1,
}
inputs = {"X": [input], "Y": [self.weight]}
tmp = self._helper.create_variable_for_type_inference(self._dtype)
self._helper.append_op(
type="mul", inputs=inputs, outputs={"Out": tmp}, attrs=attrs)
type="matmul", inputs=inputs, outputs={"Out": tmp}, attrs=attrs)
if self.bias:
pre_activation = self._helper.create_variable_for_type_inference(
dtype=self._dtype)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册