未验证 提交 abb07f35 编写于 作者: Z zyfncg 提交者: GitHub

Rename full infer_meta (#38332)

* rename full infer_meta

* fix merge problem
上级 90e9a486
......@@ -30,7 +30,7 @@ DenseTensor FullLike(
DataType dtype = DataType::UNDEFINED,
Backend backend = Backend::UNDEFINED, // Is backend needed here?
DataLayout layout = DataLayout::UNDEFINED) {
auto out_meta = FullLikeInferMeta(x.meta(), dtype, layout);
auto out_meta = CreateLikeInferMeta(x.meta(), dtype, layout);
pten::DenseTensor dense_out(
pten::make_intrusive<paddle::experimental::SharedStorage>(
dev_ctx.GetPlace()),
......
......@@ -17,16 +17,16 @@ limitations under the License. */
namespace pten {
DenseTensorMeta FullInferMeta(const std::vector<int64_t>& shape,
DataType dtype,
DataLayout layout) {
DenseTensorMeta CreateInferMeta(const std::vector<int64_t>& shape,
DataType dtype,
DataLayout layout) {
const auto& out_dims = paddle::framework::make_ddim(shape);
return {dtype, out_dims, layout};
}
DenseTensorMeta FullInferMeta(const ScalarArray& shape,
DataType dtype,
DataLayout layout) {
DenseTensorMeta CreateInferMeta(const ScalarArray& shape,
DataType dtype,
DataLayout layout) {
const auto& out_dims = paddle::framework::make_ddim(shape.GetData());
return {dtype, out_dims, layout};
}
......
......@@ -27,12 +27,12 @@ namespace pten {
// Because functions in this file
// not only can infer shape, but alse need infer lod or other useful data.
DenseTensorMeta FullInferMeta(const std::vector<int64_t>& shape,
DataType dtype,
DataLayout layout);
DenseTensorMeta CreateInferMeta(const std::vector<int64_t>& shape,
DataType dtype,
DataLayout layout);
DenseTensorMeta FullInferMeta(const ScalarArray& shape,
DataType dtype,
DataLayout layout);
DenseTensorMeta CreateInferMeta(const ScalarArray& shape,
DataType dtype,
DataLayout layout);
} // namespace pten
......@@ -81,9 +81,9 @@ DenseTensorMeta CastInferMeta(const DenseTensorMeta& x_meta,
return out_meta;
}
DenseTensorMeta FullLikeInferMeta(const DenseTensorMeta& x_meta,
DataType dtype,
DataLayout layout) {
DenseTensorMeta CreateLikeInferMeta(const DenseTensorMeta& x_meta,
DataType dtype,
DataLayout layout) {
return {dtype == DataType::UNDEFINED ? x_meta.dtype : dtype,
x_meta.dims,
layout == DataLayout::UNDEFINED ? x_meta.layout : layout};
......
......@@ -44,9 +44,9 @@ DenseTensorMeta FlattenInferMeta(const DenseTensorMeta& x_meta,
DenseTensorMeta CastInferMeta(const DenseTensorMeta& x_meta,
const DataType out_dtype);
DenseTensorMeta FullLikeInferMeta(const DenseTensorMeta& x_meta,
DataType dtype,
DataLayout layout);
DenseTensorMeta CreateLikeInferMeta(const DenseTensorMeta& x_meta,
DataType dtype,
DataLayout layout);
DenseTensorMeta InferMetaFromVecValue(const DenseTensorMeta& x_meta,
const std::vector<int64_t>& shape);
......
......@@ -48,7 +48,7 @@
args : (const ScalarArray& shape, const Scalar& value, DataType dtype=DataType::FLOAT32, Backend place=Backend::CPU, DataLayout layout=DataLayout::NCHW)
output: Tensor
infer_meta :
func : FullInferMeta
func : CreateInferMeta
param : [shape, dtype, layout]
kernel :
func : full
......@@ -61,7 +61,7 @@
args : (const Tensor& x, const Scalar& value, DataType dtype = DataType::UNDEFINED, Backend place = Backend::UNDEFINED, DataLayout layout = DataLayout::UNDEFINED)
output: Tensor
infer_meta :
func : FullLikeInferMeta
func : CreateLikeInferMeta
param : [x, dtype, layout]
kernel :
func : full_like
......@@ -146,15 +146,6 @@
output : Tensor
invoke : full_like(x, 0, dtype, place, layout)
# - api : full_like
# args : (const Tensor& x, const Scalar& value, DataType dtype, Backend place)->Tensor
# output: {Tensor : dtype}
# kernel : fill_any_like
# T : [dtype, x]
# backend : [place, x]
# layout : []
# InferMeta : UnchangedInferMeta(x)
- api : conj
args : (const Tensor& x)
output : Tensor
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册