diff --git a/example/Bert_NEZHA_cnwiki/train.py b/example/Bert_NEZHA_cnwiki/train.py index 87f425e21c78018dffad8c5f1d9b8a37c084ed52..86e033fc9fdf90a5cc7a197e0cb4e27cf2b91667 100644 --- a/example/Bert_NEZHA_cnwiki/train.py +++ b/example/Bert_NEZHA_cnwiki/train.py @@ -36,7 +36,7 @@ import os import numpy as np from config import bert_train_cfg, bert_net_cfg import mindspore.dataset.engine.datasets as de -import mindspore._c_dataengine as deMap +import mindspore.dataset.transforms.c_transforms as C from mindspore import context from mindspore.common.tensor import Tensor from mindspore.train.model import Model @@ -52,7 +52,7 @@ def create_train_dataset(batch_size): ds = de.StorageDataset([bert_train_cfg.DATA_DIR], bert_train_cfg.SCHEMA_DIR, columns_list=["input_ids", "input_mask", "segment_ids", "next_sentence_labels", "masked_lm_positions", "masked_lm_ids", "masked_lm_weights"]) - type_cast_op = deMap.TypeCastOp("int32") + type_cast_op = C.TypeCast(mstype.int32) ds = ds.map(input_columns="masked_lm_ids", operations=type_cast_op) ds = ds.map(input_columns="masked_lm_positions", operations=type_cast_op) ds = ds.map(input_columns="next_sentence_labels", operations=type_cast_op) diff --git a/tests/st/mem_reuse/resnet_cifar_memreuse.py b/tests/st/mem_reuse/resnet_cifar_memreuse.py index 4699c00e736558d9ad88f2b7c63ce15d6ee76dd3..4edcdd8fb8a345b55e25497d56df4173aa12b185 100644 --- a/tests/st/mem_reuse/resnet_cifar_memreuse.py +++ b/tests/st/mem_reuse/resnet_cifar_memreuse.py @@ -24,8 +24,7 @@ import numpy as np import mindspore.ops.functional as F from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor from mindspore.train.serialization import load_checkpoint, load_param_into_net -import mindspore.dataengine as de -import mindspore._c_dataengine as deMap +import mindspore.dataset as de import mindspore.dataset.transforms.c_transforms as C import mindspore.dataset.transforms.vision.c_transforms as vision from mindspore.communication.management import init diff --git a/tests/st/mem_reuse/resnet_cifar_normal.py b/tests/st/mem_reuse/resnet_cifar_normal.py index bff0c2d6e66b606ef7bcf1057933dd5ce8161d28..39f6e7fe5958b7c351cddabac71e509bec63874c 100644 --- a/tests/st/mem_reuse/resnet_cifar_normal.py +++ b/tests/st/mem_reuse/resnet_cifar_normal.py @@ -24,8 +24,7 @@ import numpy as np import mindspore.ops.functional as F from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor from mindspore.train.serialization import load_checkpoint, load_param_into_net -import mindspore.dataengine as de -import mindspore._c_dataengine as deMap +import mindspore.dataset as de import mindspore.dataset.transforms.c_transforms as C import mindspore.dataset.transforms.vision.c_transforms as vision from mindspore.communication.management import init diff --git a/tests/st/networks/models/bert/bert_tdt_no_lossscale.py b/tests/st/networks/models/bert/bert_tdt_no_lossscale.py index c1ca6f64997d03438f7630a1455a36853cad6cfa..7c50707fbdbe5204dde091a91a178aa29f3cf8e9 100644 --- a/tests/st/networks/models/bert/bert_tdt_no_lossscale.py +++ b/tests/st/networks/models/bert/bert_tdt_no_lossscale.py @@ -21,7 +21,7 @@ import numpy as np from numpy import allclose import mindspore.common.dtype as mstype import mindspore.dataset.engine.datasets as de -import mindspore._c_dataengine as deMap +import mindspore.dataset.transforms.c_transforms as C from mindspore import context from mindspore.common.tensor import Tensor from mindspore.train.model import Model @@ -106,7 +106,7 @@ def me_de_train_dataset(): ds = de.StorageDataset(DATA_DIR, SCHEMA_DIR, columns_list=["input_ids", "input_mask", "segment_ids", "next_sentence_labels", "masked_lm_positions", "masked_lm_ids", "masked_lm_weights"]) - type_cast_op = deMap.TypeCastOp("int32") + type_cast_op = C.TypeCast(mstype.int32) ds = ds.map(input_columns="masked_lm_ids", operations=type_cast_op) ds = ds.map(input_columns="masked_lm_positions", operations=type_cast_op) ds = ds.map(input_columns="next_sentence_labels", operations=type_cast_op) diff --git a/tests/st/ops/davinci/test_tdt_data_ms.py b/tests/st/ops/davinci/test_tdt_data_ms.py index 6463401d82fa1b3f8ac9416c8714e3f5a9a6647f..89f6f212d0cacdf58fd2bfcc0887b141711a1fe0 100644 --- a/tests/st/ops/davinci/test_tdt_data_ms.py +++ b/tests/st/ops/davinci/test_tdt_data_ms.py @@ -12,11 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ -import mindspore._c_dataengine as deMap import mindspore.dataset as ds +import mindspore.dataset.transforms.vision.c_transforms as vision +from mindspore.dataset.transforms.vision import Inter import numpy as np import sys -from mindspore._c_dataengine import InterpolationMode import mindspore.context as context import mindspore.nn as nn @@ -32,7 +32,7 @@ SCHEMA_DIR = "{0}/resnet_all_datasetSchema.json".format(data_path) def test_me_de_train_dataset(): data_list = ["{0}/train-00001-of-01024.data".format(data_path)] data_set = ds.StorageDataset(data_list, schema=SCHEMA_DIR, - columns_list=["image/encoded", "image/class/label"]) + columns_list=["image/encoded", "image/class/label"]) resize_height = 224 resize_width = 224 @@ -41,19 +41,17 @@ def test_me_de_train_dataset(): # define map operations - decode_op = deMap.DecodeOp() - resize_op = deMap.ResizeOp(resize_height, resize_width, - InterpolationMode.DE_INTER_LINEAR) # Bilinear as default - rescale_op = deMap.RescaleOp(rescale, shift) - changemode_op = deMap.ChangeModeOp() + decode_op = vision.Decode() + resize_op = vision.Resize(resize_height, resize_width, + Inter.LINEAR) # Bilinear as default + rescale_op = vision.Rescale(rescale, shift) # apply map operations on images - data_set = data_set.map(input_column_names="image/encoded", operation=decode_op) - data_set = data_set.map(input_column_names="image/encoded", operation=resize_op) - data_set = data_set.map(input_column_names="image/encoded", operation=rescale_op) - data_set = data_set.map(input_column_names="image/encoded", operation=changemode_op) - changeswap_op = deMap.ChannelSwapOp() - data_set = data_set.map(input_column_names="image/encoded", operation=changeswap_op) + data_set = data_set.map(input_columns="image/encoded", operations=decode_op) + data_set = data_set.map(input_columns="image/encoded", operations=resize_op) + data_set = data_set.map(input_columns="image/encoded", operations=rescale_op) + hwc2chw_op = vision.HWC2CHW() + data_set = data_set.map(input_columns="image/encoded", operations=hwc2chw_op) data_set = data_set.repeat(1) # apply batch operations batch_size = 32 diff --git a/tests/ut/python/dataset/test_minddataset.py b/tests/ut/python/dataset/test_minddataset.py index 8b8cbc807a9ed5ebe63a0796f1d2fe1cae3a35ae..da22f5c3b7c0b8aedfe1260dcc8a6ac62daf61ea 100644 --- a/tests/ut/python/dataset/test_minddataset.py +++ b/tests/ut/python/dataset/test_minddataset.py @@ -24,7 +24,6 @@ import string import mindspore.dataset.transforms.vision.c_transforms as vision import numpy as np import pytest -from mindspore._c_dataengine import InterpolationMode from mindspore.dataset.transforms.vision import Inter from mindspore import log as logger diff --git a/tests/ut/python/dataset/test_project.py b/tests/ut/python/dataset/test_project.py index de600e07dbd85dcf2eea8a52c2e5baae472574db..522788ac37b9b1d4a47215d65a49ab43e595a64b 100644 --- a/tests/ut/python/dataset/test_project.py +++ b/tests/ut/python/dataset/test_project.py @@ -13,7 +13,8 @@ # limitations under the License. # ============================================================================== import mindspore.dataset.transforms.vision.c_transforms as vision -import mindspore._c_dataengine as de_map +import mindspore.dataset.transforms.c_transforms as C +from mindspore.common import dtype as mstype from util import ordered_save_and_check import mindspore.dataset as ds @@ -63,9 +64,8 @@ def test_case_project_map(): data1 = ds.TFRecordDataset(DATA_DIR_TF, SCHEMA_DIR_TF, shuffle=False) data1 = data1.project(columns=columns) - no_op = de_map.NoOp() - - data1 = data1.map(input_columns=["col_3d"], operations=no_op) + type_cast_op = C.TypeCast(mstype.int64) + data1 = data1.map(input_columns=["col_3d"], operations=type_cast_op) filename = "project_map_after_result.npz" ordered_save_and_check(data1, parameters, filename, generate_golden=GENERATE_GOLDEN) @@ -77,8 +77,8 @@ def test_case_map_project(): data1 = ds.TFRecordDataset(DATA_DIR_TF, SCHEMA_DIR_TF, shuffle=False) - no_op = de_map.NoOp() - data1 = data1.map(input_columns=["col_sint64"], operations=no_op) + type_cast_op = C.TypeCast(mstype.int64) + data1 = data1.map(input_columns=["col_sint64"], operations=type_cast_op) data1 = data1.project(columns=columns) @@ -92,19 +92,19 @@ def test_case_project_between_maps(): data1 = ds.TFRecordDataset(DATA_DIR_TF, SCHEMA_DIR_TF, shuffle=False) - no_op = de_map.NoOp() - data1 = data1.map(input_columns=["col_3d"], operations=no_op) - data1 = data1.map(input_columns=["col_3d"], operations=no_op) - data1 = data1.map(input_columns=["col_3d"], operations=no_op) - data1 = data1.map(input_columns=["col_3d"], operations=no_op) + type_cast_op = C.TypeCast(mstype.int64) + data1 = data1.map(input_columns=["col_3d"], operations=type_cast_op) + data1 = data1.map(input_columns=["col_3d"], operations=type_cast_op) + data1 = data1.map(input_columns=["col_3d"], operations=type_cast_op) + data1 = data1.map(input_columns=["col_3d"], operations=type_cast_op) data1 = data1.project(columns=columns) - data1 = data1.map(input_columns=["col_3d"], operations=no_op) - data1 = data1.map(input_columns=["col_3d"], operations=no_op) - data1 = data1.map(input_columns=["col_3d"], operations=no_op) - data1 = data1.map(input_columns=["col_3d"], operations=no_op) - data1 = data1.map(input_columns=["col_3d"], operations=no_op) + data1 = data1.map(input_columns=["col_3d"], operations=type_cast_op) + data1 = data1.map(input_columns=["col_3d"], operations=type_cast_op) + data1 = data1.map(input_columns=["col_3d"], operations=type_cast_op) + data1 = data1.map(input_columns=["col_3d"], operations=type_cast_op) + data1 = data1.map(input_columns=["col_3d"], operations=type_cast_op) filename = "project_between_maps_result.npz" ordered_save_and_check(data1, parameters, filename, generate_golden=GENERATE_GOLDEN) @@ -145,12 +145,12 @@ def test_case_map_project_map_project(): data1 = ds.TFRecordDataset(DATA_DIR_TF, SCHEMA_DIR_TF, shuffle=False) - no_op = de_map.NoOp() - data1 = data1.map(input_columns=["col_sint64"], operations=no_op) + type_cast_op = C.TypeCast(mstype.int64) + data1 = data1.map(input_columns=["col_sint64"], operations=type_cast_op) data1 = data1.project(columns=columns) - data1 = data1.map(input_columns=["col_2d"], operations=no_op) + data1 = data1.map(input_columns=["col_2d"], operations=type_cast_op) data1 = data1.project(columns=columns)