From 8d554d432282629801a4c4780475eaac9e95ae99 Mon Sep 17 00:00:00 2001 From: xiefangqi Date: Mon, 30 Mar 2020 15:34:48 +0800 Subject: [PATCH] fix and remove useless import of example, st, ut --- example/Bert_NEZHA_cnwiki/train.py | 4 +- tests/st/mem_reuse/resnet_cifar_memreuse.py | 3 +- tests/st/mem_reuse/resnet_cifar_normal.py | 3 +- .../models/bert/bert_tdt_no_lossscale.py | 4 +- tests/st/ops/davinci/test_tdt_data_ms.py | 26 ++++++------- tests/ut/python/dataset/test_minddataset.py | 1 - tests/ut/python/dataset/test_project.py | 38 +++++++++---------- 7 files changed, 37 insertions(+), 42 deletions(-) diff --git a/example/Bert_NEZHA_cnwiki/train.py b/example/Bert_NEZHA_cnwiki/train.py index 87f425e21..86e033fc9 100644 --- a/example/Bert_NEZHA_cnwiki/train.py +++ b/example/Bert_NEZHA_cnwiki/train.py @@ -36,7 +36,7 @@ import os import numpy as np from config import bert_train_cfg, bert_net_cfg import mindspore.dataset.engine.datasets as de -import mindspore._c_dataengine as deMap +import mindspore.dataset.transforms.c_transforms as C from mindspore import context from mindspore.common.tensor import Tensor from mindspore.train.model import Model @@ -52,7 +52,7 @@ def create_train_dataset(batch_size): ds = de.StorageDataset([bert_train_cfg.DATA_DIR], bert_train_cfg.SCHEMA_DIR, columns_list=["input_ids", "input_mask", "segment_ids", "next_sentence_labels", "masked_lm_positions", "masked_lm_ids", "masked_lm_weights"]) - type_cast_op = deMap.TypeCastOp("int32") + type_cast_op = C.TypeCast(mstype.int32) ds = ds.map(input_columns="masked_lm_ids", operations=type_cast_op) ds = ds.map(input_columns="masked_lm_positions", operations=type_cast_op) ds = ds.map(input_columns="next_sentence_labels", operations=type_cast_op) diff --git a/tests/st/mem_reuse/resnet_cifar_memreuse.py b/tests/st/mem_reuse/resnet_cifar_memreuse.py index 4699c00e7..4edcdd8fb 100644 --- a/tests/st/mem_reuse/resnet_cifar_memreuse.py +++ b/tests/st/mem_reuse/resnet_cifar_memreuse.py @@ -24,8 +24,7 @@ import numpy as np import mindspore.ops.functional as F from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor from mindspore.train.serialization import load_checkpoint, load_param_into_net -import mindspore.dataengine as de -import mindspore._c_dataengine as deMap +import mindspore.dataset as de import mindspore.dataset.transforms.c_transforms as C import mindspore.dataset.transforms.vision.c_transforms as vision from mindspore.communication.management import init diff --git a/tests/st/mem_reuse/resnet_cifar_normal.py b/tests/st/mem_reuse/resnet_cifar_normal.py index bff0c2d6e..39f6e7fe5 100644 --- a/tests/st/mem_reuse/resnet_cifar_normal.py +++ b/tests/st/mem_reuse/resnet_cifar_normal.py @@ -24,8 +24,7 @@ import numpy as np import mindspore.ops.functional as F from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor from mindspore.train.serialization import load_checkpoint, load_param_into_net -import mindspore.dataengine as de -import mindspore._c_dataengine as deMap +import mindspore.dataset as de import mindspore.dataset.transforms.c_transforms as C import mindspore.dataset.transforms.vision.c_transforms as vision from mindspore.communication.management import init diff --git a/tests/st/networks/models/bert/bert_tdt_no_lossscale.py b/tests/st/networks/models/bert/bert_tdt_no_lossscale.py index c1ca6f649..7c50707fb 100644 --- a/tests/st/networks/models/bert/bert_tdt_no_lossscale.py +++ b/tests/st/networks/models/bert/bert_tdt_no_lossscale.py @@ -21,7 +21,7 @@ import numpy as np from numpy import allclose import mindspore.common.dtype as mstype import mindspore.dataset.engine.datasets as de -import mindspore._c_dataengine as deMap +import mindspore.dataset.transforms.c_transforms as C from mindspore import context from mindspore.common.tensor import Tensor from mindspore.train.model import Model @@ -106,7 +106,7 @@ def me_de_train_dataset(): ds = de.StorageDataset(DATA_DIR, SCHEMA_DIR, columns_list=["input_ids", "input_mask", "segment_ids", "next_sentence_labels", "masked_lm_positions", "masked_lm_ids", "masked_lm_weights"]) - type_cast_op = deMap.TypeCastOp("int32") + type_cast_op = C.TypeCast(mstype.int32) ds = ds.map(input_columns="masked_lm_ids", operations=type_cast_op) ds = ds.map(input_columns="masked_lm_positions", operations=type_cast_op) ds = ds.map(input_columns="next_sentence_labels", operations=type_cast_op) diff --git a/tests/st/ops/davinci/test_tdt_data_ms.py b/tests/st/ops/davinci/test_tdt_data_ms.py index 6463401d8..89f6f212d 100644 --- a/tests/st/ops/davinci/test_tdt_data_ms.py +++ b/tests/st/ops/davinci/test_tdt_data_ms.py @@ -12,11 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ -import mindspore._c_dataengine as deMap import mindspore.dataset as ds +import mindspore.dataset.transforms.vision.c_transforms as vision +from mindspore.dataset.transforms.vision import Inter import numpy as np import sys -from mindspore._c_dataengine import InterpolationMode import mindspore.context as context import mindspore.nn as nn @@ -32,7 +32,7 @@ SCHEMA_DIR = "{0}/resnet_all_datasetSchema.json".format(data_path) def test_me_de_train_dataset(): data_list = ["{0}/train-00001-of-01024.data".format(data_path)] data_set = ds.StorageDataset(data_list, schema=SCHEMA_DIR, - columns_list=["image/encoded", "image/class/label"]) + columns_list=["image/encoded", "image/class/label"]) resize_height = 224 resize_width = 224 @@ -41,19 +41,17 @@ def test_me_de_train_dataset(): # define map operations - decode_op = deMap.DecodeOp() - resize_op = deMap.ResizeOp(resize_height, resize_width, - InterpolationMode.DE_INTER_LINEAR) # Bilinear as default - rescale_op = deMap.RescaleOp(rescale, shift) - changemode_op = deMap.ChangeModeOp() + decode_op = vision.Decode() + resize_op = vision.Resize(resize_height, resize_width, + Inter.LINEAR) # Bilinear as default + rescale_op = vision.Rescale(rescale, shift) # apply map operations on images - data_set = data_set.map(input_column_names="image/encoded", operation=decode_op) - data_set = data_set.map(input_column_names="image/encoded", operation=resize_op) - data_set = data_set.map(input_column_names="image/encoded", operation=rescale_op) - data_set = data_set.map(input_column_names="image/encoded", operation=changemode_op) - changeswap_op = deMap.ChannelSwapOp() - data_set = data_set.map(input_column_names="image/encoded", operation=changeswap_op) + data_set = data_set.map(input_columns="image/encoded", operations=decode_op) + data_set = data_set.map(input_columns="image/encoded", operations=resize_op) + data_set = data_set.map(input_columns="image/encoded", operations=rescale_op) + hwc2chw_op = vision.HWC2CHW() + data_set = data_set.map(input_columns="image/encoded", operations=hwc2chw_op) data_set = data_set.repeat(1) # apply batch operations batch_size = 32 diff --git a/tests/ut/python/dataset/test_minddataset.py b/tests/ut/python/dataset/test_minddataset.py index 8b8cbc807..da22f5c3b 100644 --- a/tests/ut/python/dataset/test_minddataset.py +++ b/tests/ut/python/dataset/test_minddataset.py @@ -24,7 +24,6 @@ import string import mindspore.dataset.transforms.vision.c_transforms as vision import numpy as np import pytest -from mindspore._c_dataengine import InterpolationMode from mindspore.dataset.transforms.vision import Inter from mindspore import log as logger diff --git a/tests/ut/python/dataset/test_project.py b/tests/ut/python/dataset/test_project.py index de600e07d..522788ac3 100644 --- a/tests/ut/python/dataset/test_project.py +++ b/tests/ut/python/dataset/test_project.py @@ -13,7 +13,8 @@ # limitations under the License. # ============================================================================== import mindspore.dataset.transforms.vision.c_transforms as vision -import mindspore._c_dataengine as de_map +import mindspore.dataset.transforms.c_transforms as C +from mindspore.common import dtype as mstype from util import ordered_save_and_check import mindspore.dataset as ds @@ -63,9 +64,8 @@ def test_case_project_map(): data1 = ds.TFRecordDataset(DATA_DIR_TF, SCHEMA_DIR_TF, shuffle=False) data1 = data1.project(columns=columns) - no_op = de_map.NoOp() - - data1 = data1.map(input_columns=["col_3d"], operations=no_op) + type_cast_op = C.TypeCast(mstype.int64) + data1 = data1.map(input_columns=["col_3d"], operations=type_cast_op) filename = "project_map_after_result.npz" ordered_save_and_check(data1, parameters, filename, generate_golden=GENERATE_GOLDEN) @@ -77,8 +77,8 @@ def test_case_map_project(): data1 = ds.TFRecordDataset(DATA_DIR_TF, SCHEMA_DIR_TF, shuffle=False) - no_op = de_map.NoOp() - data1 = data1.map(input_columns=["col_sint64"], operations=no_op) + type_cast_op = C.TypeCast(mstype.int64) + data1 = data1.map(input_columns=["col_sint64"], operations=type_cast_op) data1 = data1.project(columns=columns) @@ -92,19 +92,19 @@ def test_case_project_between_maps(): data1 = ds.TFRecordDataset(DATA_DIR_TF, SCHEMA_DIR_TF, shuffle=False) - no_op = de_map.NoOp() - data1 = data1.map(input_columns=["col_3d"], operations=no_op) - data1 = data1.map(input_columns=["col_3d"], operations=no_op) - data1 = data1.map(input_columns=["col_3d"], operations=no_op) - data1 = data1.map(input_columns=["col_3d"], operations=no_op) + type_cast_op = C.TypeCast(mstype.int64) + data1 = data1.map(input_columns=["col_3d"], operations=type_cast_op) + data1 = data1.map(input_columns=["col_3d"], operations=type_cast_op) + data1 = data1.map(input_columns=["col_3d"], operations=type_cast_op) + data1 = data1.map(input_columns=["col_3d"], operations=type_cast_op) data1 = data1.project(columns=columns) - data1 = data1.map(input_columns=["col_3d"], operations=no_op) - data1 = data1.map(input_columns=["col_3d"], operations=no_op) - data1 = data1.map(input_columns=["col_3d"], operations=no_op) - data1 = data1.map(input_columns=["col_3d"], operations=no_op) - data1 = data1.map(input_columns=["col_3d"], operations=no_op) + data1 = data1.map(input_columns=["col_3d"], operations=type_cast_op) + data1 = data1.map(input_columns=["col_3d"], operations=type_cast_op) + data1 = data1.map(input_columns=["col_3d"], operations=type_cast_op) + data1 = data1.map(input_columns=["col_3d"], operations=type_cast_op) + data1 = data1.map(input_columns=["col_3d"], operations=type_cast_op) filename = "project_between_maps_result.npz" ordered_save_and_check(data1, parameters, filename, generate_golden=GENERATE_GOLDEN) @@ -145,12 +145,12 @@ def test_case_map_project_map_project(): data1 = ds.TFRecordDataset(DATA_DIR_TF, SCHEMA_DIR_TF, shuffle=False) - no_op = de_map.NoOp() - data1 = data1.map(input_columns=["col_sint64"], operations=no_op) + type_cast_op = C.TypeCast(mstype.int64) + data1 = data1.map(input_columns=["col_sint64"], operations=type_cast_op) data1 = data1.project(columns=columns) - data1 = data1.map(input_columns=["col_2d"], operations=no_op) + data1 = data1.map(input_columns=["col_2d"], operations=type_cast_op) data1 = data1.project(columns=columns) -- GitLab