提交 deb11464 编写于 作者: T tony_liu2

reorganize headers and tests

remove headers

fix iterator file

fix pr comments

fix new file
上级 64b0feb7
...@@ -13,37 +13,11 @@ ...@@ -13,37 +13,11 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
#include <fstream>
#include <iostream>
#include <memory>
#include <vector>
#include <string>
#include "utils/log_adapter.h"
#include "utils/ms_utils.h"
#include "common/common.h" #include "common/common.h"
#include "gtest/gtest.h"
#include "securec.h"
#include "minddata/dataset/include/datasets.h" #include "minddata/dataset/include/datasets.h"
#include "minddata/dataset/include/status.h"
#include "minddata/dataset/include/transforms.h"
#include "minddata/dataset/include/iterator.h"
#include "minddata/dataset/core/constants.h"
#include "minddata/dataset/core/tensor_shape.h"
#include "minddata/dataset/core/tensor.h"
#include "minddata/dataset/include/samplers.h"
using namespace mindspore::dataset::api; using namespace mindspore::dataset::api;
using mindspore::MsLogLevel::ERROR;
using mindspore::ExceptionType::NoExceptionType;
using mindspore::LogStream;
using mindspore::dataset::Tensor; using mindspore::dataset::Tensor;
using mindspore::dataset::TensorShape;
using mindspore::dataset::TensorImpl;
using mindspore::dataset::DataType;
using mindspore::dataset::Status;
using mindspore::dataset::BorderType;
using mindspore::dataset::dsize_t;
class MindDataTestPipeline : public UT::DatasetOpTesting { class MindDataTestPipeline : public UT::DatasetOpTesting {
protected: protected:
...@@ -83,14 +57,6 @@ TEST_F(MindDataTestPipeline, TestCifar10Dataset) { ...@@ -83,14 +57,6 @@ TEST_F(MindDataTestPipeline, TestCifar10Dataset) {
iter->Stop(); iter->Stop();
} }
TEST_F(MindDataTestPipeline, TestCifar10DatasetFail1) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCifar10DatasetFail1.";
// Create a Cifar10 Dataset
std::shared_ptr<Dataset> ds = Cifar10("", RandomSampler(false, 10));
EXPECT_EQ(ds, nullptr);
}
TEST_F(MindDataTestPipeline, TestCifar100Dataset) { TEST_F(MindDataTestPipeline, TestCifar100Dataset) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCifar100Dataset."; MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCifar100Dataset.";
...@@ -133,3 +99,11 @@ TEST_F(MindDataTestPipeline, TestCifar100DatasetFail1) { ...@@ -133,3 +99,11 @@ TEST_F(MindDataTestPipeline, TestCifar100DatasetFail1) {
std::shared_ptr<Dataset> ds = Cifar100("", RandomSampler(false, 10)); std::shared_ptr<Dataset> ds = Cifar100("", RandomSampler(false, 10));
EXPECT_EQ(ds, nullptr); EXPECT_EQ(ds, nullptr);
} }
TEST_F(MindDataTestPipeline, TestCifar10DatasetFail1) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCifar10DatasetFail1.";
// Create a Cifar10 Dataset
std::shared_ptr<Dataset> ds = Cifar10("", RandomSampler(false, 10));
EXPECT_EQ(ds, nullptr);
}
...@@ -14,9 +14,9 @@ ...@@ -14,9 +14,9 @@
* limitations under the License. * limitations under the License.
*/ */
#include "common/common.h" #include "common/common.h"
#include "minddata/dataset/include/datasets.h"
#include "minddata/dataset/core/global_context.h"
#include "minddata/dataset/core/config_manager.h" #include "minddata/dataset/core/config_manager.h"
#include "minddata/dataset/core/global_context.h"
#include "minddata/dataset/include/datasets.h"
using namespace mindspore::dataset::api; using namespace mindspore::dataset::api;
using mindspore::dataset::ShuffleMode; using mindspore::dataset::ShuffleMode;
...@@ -27,76 +27,6 @@ class MindDataTestPipeline : public UT::DatasetOpTesting { ...@@ -27,76 +27,6 @@ class MindDataTestPipeline : public UT::DatasetOpTesting {
protected: protected:
}; };
TEST_F(MindDataTestPipeline, TestCLUEDatasetBasic) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCLUEDatasetBasic.";
// Create a CLUEFile Dataset, with single CLUE file
std::string clue_file = datasets_root_path_ + "/testCLUE/afqmc/train.json";
std::string task = "AFQMC";
std::string usage = "train";
std::shared_ptr<Dataset> ds = CLUE({clue_file}, task, usage, 2);
EXPECT_NE(ds, nullptr);
// Create an iterator over the result of the above dataset
// This will trigger the creation of the Execution Tree and launch it.
std::shared_ptr<Iterator> iter = ds->CreateIterator();
EXPECT_NE(iter, nullptr);
// Iterate the dataset and get each row
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
iter->GetNextRow(&row);
EXPECT_NE(row.find("sentence1"), row.end());
uint64_t i = 0;
while (row.size() != 0) {
auto text = row["sentence1"];
MS_LOG(INFO) << "Tensor text shape: " << text->shape();
i++;
iter->GetNextRow(&row);
}
// Expect 2 samples
EXPECT_EQ(i, 2);
// Manually terminate the pipeline
iter->Stop();
}
TEST_F(MindDataTestPipeline, TestCLUEDatasetDistribution) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCLUEDatasetDistribution.";
// Create a CLUEFile Dataset, with single CLUE file
std::string clue_file = datasets_root_path_ + "/testCLUE/afqmc/train.json";
std::string task = "AFQMC";
std::string usage = "train";
std::shared_ptr<Dataset> ds = CLUE({clue_file}, task, usage, 0, ShuffleMode::kGlobal, 3, 0);
EXPECT_NE(ds, nullptr);
// Create an iterator over the result of the above dataset
// This will trigger the creation of the Execution Tree and launch it.
std::shared_ptr<Iterator> iter = ds->CreateIterator();
EXPECT_NE(iter, nullptr);
// Iterate the dataset and get each row
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
iter->GetNextRow(&row);
EXPECT_NE(row.find("sentence1"), row.end());
uint64_t i = 0;
while (row.size() != 0) {
auto text = row["sentence1"];
MS_LOG(INFO) << "Tensor text shape: " << text->shape();
i++;
iter->GetNextRow(&row);
}
// Expect 1 samples
EXPECT_EQ(i, 1);
// Manually terminate the pipeline
iter->Stop();
}
TEST_F(MindDataTestPipeline, TestCLUEDatasetAFQMC) { TEST_F(MindDataTestPipeline, TestCLUEDatasetAFQMC) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCLUEDatasetAFQMC."; MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCLUEDatasetAFQMC.";
...@@ -194,6 +124,41 @@ TEST_F(MindDataTestPipeline, TestCLUEDatasetAFQMC) { ...@@ -194,6 +124,41 @@ TEST_F(MindDataTestPipeline, TestCLUEDatasetAFQMC) {
iter->Stop(); iter->Stop();
} }
TEST_F(MindDataTestPipeline, TestCLUEDatasetBasic) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCLUEDatasetBasic.";
// Create a CLUEFile Dataset, with single CLUE file
std::string clue_file = datasets_root_path_ + "/testCLUE/afqmc/train.json";
std::string task = "AFQMC";
std::string usage = "train";
std::shared_ptr<Dataset> ds = CLUE({clue_file}, task, usage, 2);
EXPECT_NE(ds, nullptr);
// Create an iterator over the result of the above dataset
// This will trigger the creation of the Execution Tree and launch it.
std::shared_ptr<Iterator> iter = ds->CreateIterator();
EXPECT_NE(iter, nullptr);
// Iterate the dataset and get each row
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
iter->GetNextRow(&row);
EXPECT_NE(row.find("sentence1"), row.end());
uint64_t i = 0;
while (row.size() != 0) {
auto text = row["sentence1"];
MS_LOG(INFO) << "Tensor text shape: " << text->shape();
i++;
iter->GetNextRow(&row);
}
// Expect 2 samples
EXPECT_EQ(i, 2);
// Manually terminate the pipeline
iter->Stop();
}
TEST_F(MindDataTestPipeline, TestCLUEDatasetCMNLI) { TEST_F(MindDataTestPipeline, TestCLUEDatasetCMNLI) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCLUEDatasetCMNLI."; MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCLUEDatasetCMNLI.";
...@@ -284,14 +249,14 @@ TEST_F(MindDataTestPipeline, TestCLUEDatasetCSL) { ...@@ -284,14 +249,14 @@ TEST_F(MindDataTestPipeline, TestCLUEDatasetCSL) {
iter->Stop(); iter->Stop();
} }
TEST_F(MindDataTestPipeline, TestCLUEDatasetIFLYTEK) { TEST_F(MindDataTestPipeline, TestCLUEDatasetDistribution) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCLUEDatasetIFLYTEK."; MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCLUEDatasetDistribution.";
// Create a CLUEFile Dataset, with single CLUE file // Create a CLUEFile Dataset, with single CLUE file
std::string clue_file = datasets_root_path_ + "/testCLUE/iflytek/train.json"; std::string clue_file = datasets_root_path_ + "/testCLUE/afqmc/train.json";
std::string task = "IFLYTEK"; std::string task = "AFQMC";
std::string usage = "train"; std::string usage = "train";
std::shared_ptr<Dataset> ds = CLUE({clue_file}, task, usage, 0, ShuffleMode::kFalse); std::shared_ptr<Dataset> ds = CLUE({clue_file}, task, usage, 0, ShuffleMode::kGlobal, 3, 0);
EXPECT_NE(ds, nullptr); EXPECT_NE(ds, nullptr);
// Create an iterator over the result of the above dataset // Create an iterator over the result of the above dataset
...@@ -303,38 +268,61 @@ TEST_F(MindDataTestPipeline, TestCLUEDatasetIFLYTEK) { ...@@ -303,38 +268,61 @@ TEST_F(MindDataTestPipeline, TestCLUEDatasetIFLYTEK) {
std::unordered_map<std::string, std::shared_ptr<Tensor>> row; std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
iter->GetNextRow(&row); iter->GetNextRow(&row);
EXPECT_NE(row.find("sentence"), row.end()); EXPECT_NE(row.find("sentence1"), row.end());
std::vector<std::string> expected_result = {
"第一个文本",
"第二个文本",
"第三个文本"
};
uint64_t i = 0; uint64_t i = 0;
while (row.size() != 0) { while (row.size() != 0) {
auto text = row["sentence"]; auto text = row["sentence1"];
std::string_view sv;
text->GetItemAt(&sv, {0});
std::string ss(sv);
EXPECT_STREQ(ss.c_str(), expected_result[i].c_str());
MS_LOG(INFO) << "Tensor text shape: " << text->shape(); MS_LOG(INFO) << "Tensor text shape: " << text->shape();
iter->GetNextRow(&row);
i++; i++;
iter->GetNextRow(&row);
} }
// Expect 3 samples // Expect 1 samples
EXPECT_EQ(i, 3); EXPECT_EQ(i, 1);
// Manually terminate the pipeline // Manually terminate the pipeline
iter->Stop(); iter->Stop();
} }
TEST_F(MindDataTestPipeline, TestCLUEDatasetTNEWS) { TEST_F(MindDataTestPipeline, TestCLUEDatasetException) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCLUEDatasetTNEWS."; MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCLUEDatasetException.";
// Create a CLUE Dataset
std::string clue_file = datasets_root_path_ + "/testCLUE/wsc/train.json";
std::string task = "WSC";
std::string usage = "train";
std::string invalid_clue_file = "./NotExistFile";
std::shared_ptr<Dataset> ds0 = CLUE({}, task, usage);
EXPECT_EQ(ds0, nullptr);
std::shared_ptr<Dataset> ds1 = CLUE({invalid_clue_file}, task, usage);
EXPECT_EQ(ds1, nullptr);
std::shared_ptr<Dataset> ds2 = CLUE({clue_file}, "invalid_task", usage);
EXPECT_EQ(ds2, nullptr);
std::shared_ptr<Dataset> ds3 = CLUE({clue_file}, task, "invalid_usage");
EXPECT_EQ(ds3, nullptr);
std::shared_ptr<Dataset> ds4 = CLUE({clue_file}, task, usage, 0, ShuffleMode::kGlobal, 2, 2);
EXPECT_EQ(ds4, nullptr);
std::shared_ptr<Dataset> ds5 = CLUE({clue_file}, task, usage, -1, ShuffleMode::kGlobal);
EXPECT_EQ(ds5, nullptr);
std::shared_ptr<Dataset> ds6 = CLUE({clue_file}, task, usage, 0, ShuffleMode::kGlobal, -1);
EXPECT_EQ(ds6, nullptr);
std::shared_ptr<Dataset> ds7 = CLUE({clue_file}, task, usage, 0, ShuffleMode::kGlobal, 0, -1);
EXPECT_EQ(ds7, nullptr);
}
TEST_F(MindDataTestPipeline, TestCLUEDatasetIFLYTEK) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCLUEDatasetIFLYTEK.";
// Create a CLUEFile Dataset, with single CLUE file // Create a CLUEFile Dataset, with single CLUE file
std::string clue_file = datasets_root_path_ + "/testCLUE/tnews/train.json"; std::string clue_file = datasets_root_path_ + "/testCLUE/iflytek/train.json";
std::string task = "TNEWS"; std::string task = "IFLYTEK";
std::string usage = "train"; std::string usage = "train";
std::shared_ptr<Dataset> ds = CLUE({clue_file}, task, usage, 0, ShuffleMode::kFalse); std::shared_ptr<Dataset> ds = CLUE({clue_file}, task, usage, 0, ShuffleMode::kFalse);
EXPECT_NE(ds, nullptr); EXPECT_NE(ds, nullptr);
...@@ -350,9 +338,9 @@ TEST_F(MindDataTestPipeline, TestCLUEDatasetTNEWS) { ...@@ -350,9 +338,9 @@ TEST_F(MindDataTestPipeline, TestCLUEDatasetTNEWS) {
EXPECT_NE(row.find("sentence"), row.end()); EXPECT_NE(row.find("sentence"), row.end());
std::vector<std::string> expected_result = { std::vector<std::string> expected_result = {
"新闻1", "第一个文本",
"新闻2", "第二个文本",
"新闻3" "第三个文本"
}; };
uint64_t i = 0; uint64_t i = 0;
...@@ -374,17 +362,31 @@ TEST_F(MindDataTestPipeline, TestCLUEDatasetTNEWS) { ...@@ -374,17 +362,31 @@ TEST_F(MindDataTestPipeline, TestCLUEDatasetTNEWS) {
iter->Stop(); iter->Stop();
} }
TEST_F(MindDataTestPipeline, TestCLUEDatasetWSC) { TEST_F(MindDataTestPipeline, TestCLUEDatasetShuffleFiles) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCLUEDatasetWSC."; MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCLUEDatasetShuffleFiles.";
// Test CLUE Dataset with files shuffle, num_parallel_workers=1
// Create a CLUEFile Dataset, with single CLUE file // Set configuration
std::string clue_file = datasets_root_path_ + "/testCLUE/wsc/train.json"; uint32_t original_seed = GlobalContext::config_manager()->seed();
std::string task = "WSC"; uint32_t original_num_parallel_workers = GlobalContext::config_manager()->num_parallel_workers();
MS_LOG(DEBUG) << "ORIGINAL seed: " << original_seed << ", num_parallel_workers: " << original_num_parallel_workers;
GlobalContext::config_manager()->set_seed(135);
GlobalContext::config_manager()->set_num_parallel_workers(1);
// Create a CLUE Dataset, with two text files
// Note: train.json has 3 rows
// Note: dev.json has 3 rows
// Use default of all samples
// They have the same keywords
// Set shuffle to files shuffle
std::string clue_file1 = datasets_root_path_ + "/testCLUE/afqmc/train.json";
std::string clue_file2 = datasets_root_path_ + "/testCLUE/afqmc/dev.json";
std::string task = "AFQMC";
std::string usage = "train"; std::string usage = "train";
std::shared_ptr<Dataset> ds = CLUE({clue_file}, task, usage, 0, ShuffleMode::kFalse); std::shared_ptr<Dataset> ds = CLUE({clue_file1, clue_file2}, task, usage, 0, ShuffleMode::kFiles);
EXPECT_NE(ds, nullptr); EXPECT_NE(ds, nullptr);
// Create an iterator over the result of the above dataset // Create an iterator over the result of the above dataset.
// This will trigger the creation of the Execution Tree and launch it. // This will trigger the creation of the Execution Tree and launch it.
std::shared_ptr<Iterator> iter = ds->CreateIterator(); std::shared_ptr<Iterator> iter = ds->CreateIterator();
EXPECT_NE(iter, nullptr); EXPECT_NE(iter, nullptr);
...@@ -393,30 +395,37 @@ TEST_F(MindDataTestPipeline, TestCLUEDatasetWSC) { ...@@ -393,30 +395,37 @@ TEST_F(MindDataTestPipeline, TestCLUEDatasetWSC) {
std::unordered_map<std::string, std::shared_ptr<Tensor>> row; std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
iter->GetNextRow(&row); iter->GetNextRow(&row);
EXPECT_NE(row.find("text"), row.end()); EXPECT_NE(row.find("sentence1"), row.end());
std::vector<std::string> expected_result = { std::vector<std::string> expected_result = {
"小明呢,他在哪?", "蚂蚁借呗等额还款能否换成先息后本",
"小红刚刚看到小明,他在操场", "蚂蚁花呗说我违约了",
"等小明回来,小张你叫他交作业" "帮我看看本月花呗账单结清了没",
"你有花呗吗",
"吃饭能用花呗吗",
"蚂蚁花呗支付金额有什么限制"
}; };
uint64_t i = 0; uint64_t i = 0;
while (row.size() != 0) { while (row.size() != 0) {
auto text = row["text"]; auto text = row["sentence1"];
std::string_view sv; std::string_view sv;
text->GetItemAt(&sv, {0}); text->GetItemAt(&sv, {0});
std::string ss(sv); std::string ss(sv);
// Compare against expected result
EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); EXPECT_STREQ(ss.c_str(), expected_result[i].c_str());
MS_LOG(INFO) << "Tensor text shape: " << text->shape();
iter->GetNextRow(&row);
i++; i++;
iter->GetNextRow(&row);
} }
// Expect 3 samples // Expect 3 + 3 = 6 samples
EXPECT_EQ(i, 3); EXPECT_EQ(i, 6);
// Manually terminate the pipeline // Manually terminate the pipeline
iter->Stop(); iter->Stop();
// Restore configuration
GlobalContext::config_manager()->set_seed(original_seed);
GlobalContext::config_manager()->set_num_parallel_workers(original_num_parallel_workers);
} }
TEST_F(MindDataTestPipeline, TestCLUEDatasetShuffleGlobal) { TEST_F(MindDataTestPipeline, TestCLUEDatasetShuffleGlobal) {
...@@ -476,31 +485,17 @@ TEST_F(MindDataTestPipeline, TestCLUEDatasetShuffleGlobal) { ...@@ -476,31 +485,17 @@ TEST_F(MindDataTestPipeline, TestCLUEDatasetShuffleGlobal) {
GlobalContext::config_manager()->set_num_parallel_workers(original_num_parallel_workers); GlobalContext::config_manager()->set_num_parallel_workers(original_num_parallel_workers);
} }
TEST_F(MindDataTestPipeline, TestCLUEDatasetShuffleFiles) { TEST_F(MindDataTestPipeline, TestCLUEDatasetTNEWS) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCLUEDatasetShuffleFiles."; MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCLUEDatasetTNEWS.";
// Test CLUE Dataset with files shuffle, num_parallel_workers=1
// Set configuration
uint32_t original_seed = GlobalContext::config_manager()->seed();
uint32_t original_num_parallel_workers = GlobalContext::config_manager()->num_parallel_workers();
MS_LOG(DEBUG) << "ORIGINAL seed: " << original_seed << ", num_parallel_workers: " << original_num_parallel_workers;
GlobalContext::config_manager()->set_seed(135);
GlobalContext::config_manager()->set_num_parallel_workers(1);
// Create a CLUE Dataset, with two text files // Create a CLUEFile Dataset, with single CLUE file
// Note: train.json has 3 rows std::string clue_file = datasets_root_path_ + "/testCLUE/tnews/train.json";
// Note: dev.json has 3 rows std::string task = "TNEWS";
// Use default of all samples
// They have the same keywords
// Set shuffle to files shuffle
std::string clue_file1 = datasets_root_path_ + "/testCLUE/afqmc/train.json";
std::string clue_file2 = datasets_root_path_ + "/testCLUE/afqmc/dev.json";
std::string task = "AFQMC";
std::string usage = "train"; std::string usage = "train";
std::shared_ptr<Dataset> ds = CLUE({clue_file1, clue_file2}, task, usage, 0, ShuffleMode::kFiles); std::shared_ptr<Dataset> ds = CLUE({clue_file}, task, usage, 0, ShuffleMode::kFalse);
EXPECT_NE(ds, nullptr); EXPECT_NE(ds, nullptr);
// Create an iterator over the result of the above dataset. // Create an iterator over the result of the above dataset
// This will trigger the creation of the Execution Tree and launch it. // This will trigger the creation of the Execution Tree and launch it.
std::shared_ptr<Iterator> iter = ds->CreateIterator(); std::shared_ptr<Iterator> iter = ds->CreateIterator();
EXPECT_NE(iter, nullptr); EXPECT_NE(iter, nullptr);
...@@ -509,68 +504,73 @@ TEST_F(MindDataTestPipeline, TestCLUEDatasetShuffleFiles) { ...@@ -509,68 +504,73 @@ TEST_F(MindDataTestPipeline, TestCLUEDatasetShuffleFiles) {
std::unordered_map<std::string, std::shared_ptr<Tensor>> row; std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
iter->GetNextRow(&row); iter->GetNextRow(&row);
EXPECT_NE(row.find("sentence1"), row.end()); EXPECT_NE(row.find("sentence"), row.end());
std::vector<std::string> expected_result = { std::vector<std::string> expected_result = {
"蚂蚁借呗等额还款能否换成先息后本", "新闻1",
"蚂蚁花呗说我违约了", "新闻2",
"帮我看看本月花呗账单结清了没", "新闻3"
"你有花呗吗",
"吃饭能用花呗吗",
"蚂蚁花呗支付金额有什么限制"
}; };
uint64_t i = 0; uint64_t i = 0;
while (row.size() != 0) { while (row.size() != 0) {
auto text = row["sentence1"]; auto text = row["sentence"];
std::string_view sv; std::string_view sv;
text->GetItemAt(&sv, {0}); text->GetItemAt(&sv, {0});
std::string ss(sv); std::string ss(sv);
// Compare against expected result
EXPECT_STREQ(ss.c_str(), expected_result[i].c_str()); EXPECT_STREQ(ss.c_str(), expected_result[i].c_str());
i++; MS_LOG(INFO) << "Tensor text shape: " << text->shape();
iter->GetNextRow(&row); iter->GetNextRow(&row);
i++;
} }
// Expect 3 + 3 = 6 samples // Expect 3 samples
EXPECT_EQ(i, 6); EXPECT_EQ(i, 3);
// Manually terminate the pipeline // Manually terminate the pipeline
iter->Stop(); iter->Stop();
// Restore configuration
GlobalContext::config_manager()->set_seed(original_seed);
GlobalContext::config_manager()->set_num_parallel_workers(original_num_parallel_workers);
} }
TEST_F(MindDataTestPipeline, TestCLUEDatasetException) { TEST_F(MindDataTestPipeline, TestCLUEDatasetWSC) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCLUEDatasetException."; MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCLUEDatasetWSC.";
// Create a CLUE Dataset
// Create a CLUEFile Dataset, with single CLUE file
std::string clue_file = datasets_root_path_ + "/testCLUE/wsc/train.json"; std::string clue_file = datasets_root_path_ + "/testCLUE/wsc/train.json";
std::string task = "WSC"; std::string task = "WSC";
std::string usage = "train"; std::string usage = "train";
std::string invalid_clue_file = "./NotExistFile"; std::shared_ptr<Dataset> ds = CLUE({clue_file}, task, usage, 0, ShuffleMode::kFalse);
EXPECT_NE(ds, nullptr);
std::shared_ptr<Dataset> ds0 = CLUE({}, task, usage);
EXPECT_EQ(ds0, nullptr);
std::shared_ptr<Dataset> ds1 = CLUE({invalid_clue_file}, task, usage);
EXPECT_EQ(ds1, nullptr);
std::shared_ptr<Dataset> ds2 = CLUE({clue_file}, "invalid_task", usage); // Create an iterator over the result of the above dataset
EXPECT_EQ(ds2, nullptr); // This will trigger the creation of the Execution Tree and launch it.
std::shared_ptr<Iterator> iter = ds->CreateIterator();
EXPECT_NE(iter, nullptr);
std::shared_ptr<Dataset> ds3 = CLUE({clue_file}, task, "invalid_usage"); // Iterate the dataset and get each row
EXPECT_EQ(ds3, nullptr); std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
iter->GetNextRow(&row);
std::shared_ptr<Dataset> ds4 = CLUE({clue_file}, task, usage, 0, ShuffleMode::kGlobal, 2, 2); EXPECT_NE(row.find("text"), row.end());
EXPECT_EQ(ds4, nullptr); std::vector<std::string> expected_result = {
"小明呢,他在哪?",
"小红刚刚看到小明,他在操场",
"等小明回来,小张你叫他交作业"
};
std::shared_ptr<Dataset> ds5 = CLUE({clue_file}, task, usage, -1, ShuffleMode::kGlobal); uint64_t i = 0;
EXPECT_EQ(ds5, nullptr); while (row.size() != 0) {
auto text = row["text"];
std::string_view sv;
text->GetItemAt(&sv, {0});
std::string ss(sv);
EXPECT_STREQ(ss.c_str(), expected_result[i].c_str());
MS_LOG(INFO) << "Tensor text shape: " << text->shape();
iter->GetNextRow(&row);
i++;
}
std::shared_ptr<Dataset> ds6 = CLUE({clue_file}, task, usage, 0, ShuffleMode::kGlobal, -1); // Expect 3 samples
EXPECT_EQ(ds6, nullptr); EXPECT_EQ(i, 3);
std::shared_ptr<Dataset> ds7 = CLUE({clue_file}, task, usage, 0, ShuffleMode::kGlobal, 0, -1); // Manually terminate the pipeline
EXPECT_EQ(ds7, nullptr); iter->Stop();
} }
...@@ -13,49 +13,25 @@ ...@@ -13,49 +13,25 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
#include <fstream>
#include <iostream>
#include <memory>
#include <vector>
#include <string>
#include "utils/log_adapter.h"
#include "utils/ms_utils.h"
#include "common/common.h" #include "common/common.h"
#include "gtest/gtest.h"
#include "securec.h"
#include "minddata/dataset/include/datasets.h" #include "minddata/dataset/include/datasets.h"
#include "minddata/dataset/include/status.h"
#include "minddata/dataset/include/transforms.h"
#include "minddata/dataset/include/iterator.h"
#include "minddata/dataset/core/constants.h"
#include "minddata/dataset/core/tensor_shape.h"
#include "minddata/dataset/core/tensor.h"
#include "minddata/dataset/include/samplers.h"
using namespace mindspore::dataset::api; using namespace mindspore::dataset::api;
using mindspore::MsLogLevel::ERROR;
using mindspore::ExceptionType::NoExceptionType;
using mindspore::LogStream;
using mindspore::dataset::Tensor; using mindspore::dataset::Tensor;
using mindspore::dataset::TensorShape; using mindspore::dataset::TensorShape;
using mindspore::dataset::TensorImpl;
using mindspore::dataset::DataType;
using mindspore::dataset::Status;
using mindspore::dataset::BorderType;
using mindspore::dataset::dsize_t; using mindspore::dataset::dsize_t;
class MindDataTestPipeline : public UT::DatasetOpTesting { class MindDataTestPipeline : public UT::DatasetOpTesting {
protected: protected:
}; };
TEST_F(MindDataTestPipeline, TestCocoDetection) { TEST_F(MindDataTestPipeline, TestCocoDefault) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCocoDetection."; MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCocoDefault.";
// Create a Coco Dataset // Create a Coco Dataset
std::string folder_path = datasets_root_path_ + "/testCOCO/train"; std::string folder_path = datasets_root_path_ + "/testCOCO/train";
std::string annotation_file = datasets_root_path_ + "/testCOCO/annotations/train.json"; std::string annotation_file = datasets_root_path_ + "/testCOCO/annotations/train.json";
std::shared_ptr<Dataset> ds = Coco(folder_path, annotation_file, "Detection", false, SequentialSampler(0, 6)); std::shared_ptr<Dataset> ds = Coco(folder_path, annotation_file);
EXPECT_NE(ds, nullptr); EXPECT_NE(ds, nullptr);
// Create an iterator over the result of the above dataset // Create an iterator over the result of the above dataset
...@@ -67,28 +43,14 @@ TEST_F(MindDataTestPipeline, TestCocoDetection) { ...@@ -67,28 +43,14 @@ TEST_F(MindDataTestPipeline, TestCocoDetection) {
std::unordered_map<std::string, std::shared_ptr<Tensor>> row; std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
iter->GetNextRow(&row); iter->GetNextRow(&row);
std::string expect_file[] = {"000000391895", "000000318219", "000000554625", "000000574769", "000000060623",
"000000309022"};
std::vector<std::vector<float>> expect_bbox_vector = {{10.0, 10.0, 10.0, 10.0, 70.0, 70.0, 70.0, 70.0},
{20.0, 20.0, 20.0, 20.0, 80.0, 80.0, 80.0, 80.0},
{30.0, 30.0, 30.0, 30.0}, {40.0, 40.0, 40.0, 40.0},
{50.0, 50.0, 50.0, 50.0}, {60.0, 60.0, 60.0, 60.0}};
std::vector<std::vector<uint32_t>> expect_catagoryid_list = {{1, 7}, {2, 8}, {3}, {4}, {5}, {6}};
uint64_t i = 0; uint64_t i = 0;
while (row.size() != 0) { while (row.size() != 0) {
auto image = row["image"]; auto image = row["image"];
auto bbox = row["bbox"]; auto bbox = row["bbox"];
auto category_id = row["category_id"]; auto category_id = row["category_id"];
std::shared_ptr<Tensor> expect_image; MS_LOG(INFO) << "Tensor image shape: " << image->shape();
Tensor::CreateFromFile(folder_path + "/" + expect_file[i] + ".jpg", &expect_image); MS_LOG(INFO) << "Tensor bbox shape: " << bbox->shape();
EXPECT_EQ(*image, *expect_image); MS_LOG(INFO) << "Tensor category_id shape: " << category_id->shape();
std::shared_ptr<Tensor> expect_bbox;
dsize_t bbox_num = static_cast<dsize_t>(expect_bbox_vector[i].size() / 4);
Tensor::CreateFromVector(expect_bbox_vector[i], TensorShape({bbox_num, 4}), &expect_bbox);
EXPECT_EQ(*bbox, *expect_bbox);
std::shared_ptr<Tensor> expect_categoryid;
Tensor::CreateFromVector(expect_catagoryid_list[i], TensorShape({bbox_num, 1}), &expect_categoryid);
EXPECT_EQ(*category_id, *expect_categoryid);
iter->GetNextRow(&row); iter->GetNextRow(&row);
i++; i++;
} }
...@@ -99,13 +61,13 @@ TEST_F(MindDataTestPipeline, TestCocoDetection) { ...@@ -99,13 +61,13 @@ TEST_F(MindDataTestPipeline, TestCocoDetection) {
iter->Stop(); iter->Stop();
} }
TEST_F(MindDataTestPipeline, TestCocoStuff) { TEST_F(MindDataTestPipeline, TestCocoDetection) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCocoStuff."; MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCocoDetection.";
// Create a Coco Dataset // Create a Coco Dataset
std::string folder_path = datasets_root_path_ + "/testCOCO/train"; std::string folder_path = datasets_root_path_ + "/testCOCO/train";
std::string annotation_file = datasets_root_path_ + "/testCOCO/annotations/train.json"; std::string annotation_file = datasets_root_path_ + "/testCOCO/annotations/train.json";
std::shared_ptr<Dataset> ds = Coco(folder_path, annotation_file, "Stuff", false, SequentialSampler(0, 6)); std::shared_ptr<Dataset> ds = Coco(folder_path, annotation_file, "Detection", false, SequentialSampler(0, 6));
EXPECT_NE(ds, nullptr); EXPECT_NE(ds, nullptr);
// Create an iterator over the result of the above dataset // Create an iterator over the result of the above dataset
...@@ -119,27 +81,26 @@ TEST_F(MindDataTestPipeline, TestCocoStuff) { ...@@ -119,27 +81,26 @@ TEST_F(MindDataTestPipeline, TestCocoStuff) {
std::string expect_file[] = {"000000391895", "000000318219", "000000554625", "000000574769", "000000060623", std::string expect_file[] = {"000000391895", "000000318219", "000000554625", "000000574769", "000000060623",
"000000309022"}; "000000309022"};
std::vector<std::vector<float>> expect_segmentation_vector = std::vector<std::vector<float>> expect_bbox_vector = {{10.0, 10.0, 10.0, 10.0, 70.0, 70.0, 70.0, 70.0},
{{10.0, 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, {20.0, 20.0, 20.0, 20.0, 80.0, 80.0, 80.0, 80.0},
70.0, 72.0, 73.0, 74.0, 75.0, -1.0, -1.0, -1.0, -1.0, -1.0}, {30.0, 30.0, 30.0, 30.0}, {40.0, 40.0, 40.0, 40.0},
{20.0, 22.0, 23.0, 24.0, 25.0, 26.0, 27.0, 28.0, 29.0, 30.0, 31.0, {50.0, 50.0, 50.0, 50.0}, {60.0, 60.0, 60.0, 60.0}};
10.0, 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, -1.0}, std::vector<std::vector<uint32_t>> expect_catagoryid_list = {{1, 7}, {2, 8}, {3}, {4}, {5}, {6}};
{40.0, 42.0, 43.0, 44.0, 45.0, 46.0, 47.0, 48.0, 49.0, 40.0, 41.0, 42.0},
{50.0, 52.0, 53.0, 54.0, 55.0, 56.0, 57.0, 58.0, 59.0, 60.0, 61.0, 62.0, 63.0},
{60.0, 62.0, 63.0, 64.0, 65.0, 66.0, 67.0, 68.0, 69.0, 70.0, 71.0, 72.0, 73.0, 74.0},
{60.0, 62.0, 63.0, 64.0, 65.0, 66.0, 67.0, 68.0, 69.0, 70.0, 71.0, 72.0, 73.0, 74.0}};
std::vector<std::vector<dsize_t>> expect_size = {{2, 10}, {2, 11}, {1, 12}, {1, 13}, {1, 14}, {2, 7}};
uint64_t i = 0; uint64_t i = 0;
while (row.size() != 0) { while (row.size() != 0) {
auto image = row["image"]; auto image = row["image"];
auto segmentation = row["segmentation"]; auto bbox = row["bbox"];
auto iscrowd = row["iscrowd"]; auto category_id = row["category_id"];
std::shared_ptr<Tensor> expect_image; std::shared_ptr<Tensor> expect_image;
Tensor::CreateFromFile(folder_path + "/" + expect_file[i] + ".jpg", &expect_image); Tensor::CreateFromFile(folder_path + "/" + expect_file[i] + ".jpg", &expect_image);
EXPECT_EQ(*image, *expect_image); EXPECT_EQ(*image, *expect_image);
std::shared_ptr<Tensor> expect_segmentation; std::shared_ptr<Tensor> expect_bbox;
Tensor::CreateFromVector(expect_segmentation_vector[i], TensorShape(expect_size[i]), &expect_segmentation); dsize_t bbox_num = static_cast<dsize_t>(expect_bbox_vector[i].size() / 4);
EXPECT_EQ(*segmentation, *expect_segmentation); Tensor::CreateFromVector(expect_bbox_vector[i], TensorShape({bbox_num, 4}), &expect_bbox);
EXPECT_EQ(*bbox, *expect_bbox);
std::shared_ptr<Tensor> expect_categoryid;
Tensor::CreateFromVector(expect_catagoryid_list[i], TensorShape({bbox_num, 1}), &expect_categoryid);
EXPECT_EQ(*category_id, *expect_categoryid);
iter->GetNextRow(&row); iter->GetNextRow(&row);
i++; i++;
} }
...@@ -150,6 +111,24 @@ TEST_F(MindDataTestPipeline, TestCocoStuff) { ...@@ -150,6 +111,24 @@ TEST_F(MindDataTestPipeline, TestCocoStuff) {
iter->Stop(); iter->Stop();
} }
TEST_F(MindDataTestPipeline, TestCocoException) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCocoException.";
// Create a Coco Dataset
std::string folder_path = datasets_root_path_ + "/testCOCO/train";
std::string annotation_file = datasets_root_path_ + "/testCOCO/annotations/train.json";
std::string invalid_folder_path = "./NotExist";
std::string invalid_annotation_file = "./NotExistFile";
std::shared_ptr<Dataset> ds = Coco(invalid_folder_path, annotation_file);
EXPECT_EQ(ds, nullptr);
std::shared_ptr<Dataset> ds1 = Coco(folder_path, invalid_annotation_file);
EXPECT_EQ(ds1, nullptr);
std::shared_ptr<Dataset> ds2 = Coco(folder_path, annotation_file, "valid_mode");
EXPECT_EQ(ds2, nullptr);
}
TEST_F(MindDataTestPipeline, TestCocoKeypoint) { TEST_F(MindDataTestPipeline, TestCocoKeypoint) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCocoKeypoint."; MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCocoKeypoint.";
// Create a Coco Dataset // Create a Coco Dataset
...@@ -261,13 +240,13 @@ TEST_F(MindDataTestPipeline, TestCocoPanoptic) { ...@@ -261,13 +240,13 @@ TEST_F(MindDataTestPipeline, TestCocoPanoptic) {
iter->Stop(); iter->Stop();
} }
TEST_F(MindDataTestPipeline, TestCocoDefault) { TEST_F(MindDataTestPipeline, TestCocoStuff) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCocoDetection."; MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCocoStuff.";
// Create a Coco Dataset // Create a Coco Dataset
std::string folder_path = datasets_root_path_ + "/testCOCO/train"; std::string folder_path = datasets_root_path_ + "/testCOCO/train";
std::string annotation_file = datasets_root_path_ + "/testCOCO/annotations/train.json"; std::string annotation_file = datasets_root_path_ + "/testCOCO/annotations/train.json";
std::shared_ptr<Dataset> ds = Coco(folder_path, annotation_file); std::shared_ptr<Dataset> ds = Coco(folder_path, annotation_file, "Stuff", false, SequentialSampler(0, 6));
EXPECT_NE(ds, nullptr); EXPECT_NE(ds, nullptr);
// Create an iterator over the result of the above dataset // Create an iterator over the result of the above dataset
...@@ -279,14 +258,29 @@ TEST_F(MindDataTestPipeline, TestCocoDefault) { ...@@ -279,14 +258,29 @@ TEST_F(MindDataTestPipeline, TestCocoDefault) {
std::unordered_map<std::string, std::shared_ptr<Tensor>> row; std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
iter->GetNextRow(&row); iter->GetNextRow(&row);
std::string expect_file[] = {"000000391895", "000000318219", "000000554625", "000000574769", "000000060623",
"000000309022"};
std::vector<std::vector<float>> expect_segmentation_vector =
{{10.0, 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0,
70.0, 72.0, 73.0, 74.0, 75.0, -1.0, -1.0, -1.0, -1.0, -1.0},
{20.0, 22.0, 23.0, 24.0, 25.0, 26.0, 27.0, 28.0, 29.0, 30.0, 31.0,
10.0, 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, -1.0},
{40.0, 42.0, 43.0, 44.0, 45.0, 46.0, 47.0, 48.0, 49.0, 40.0, 41.0, 42.0},
{50.0, 52.0, 53.0, 54.0, 55.0, 56.0, 57.0, 58.0, 59.0, 60.0, 61.0, 62.0, 63.0},
{60.0, 62.0, 63.0, 64.0, 65.0, 66.0, 67.0, 68.0, 69.0, 70.0, 71.0, 72.0, 73.0, 74.0},
{60.0, 62.0, 63.0, 64.0, 65.0, 66.0, 67.0, 68.0, 69.0, 70.0, 71.0, 72.0, 73.0, 74.0}};
std::vector<std::vector<dsize_t>> expect_size = {{2, 10}, {2, 11}, {1, 12}, {1, 13}, {1, 14}, {2, 7}};
uint64_t i = 0; uint64_t i = 0;
while (row.size() != 0) { while (row.size() != 0) {
auto image = row["image"]; auto image = row["image"];
auto bbox = row["bbox"]; auto segmentation = row["segmentation"];
auto category_id = row["category_id"]; auto iscrowd = row["iscrowd"];
MS_LOG(INFO) << "Tensor image shape: " << image->shape(); std::shared_ptr<Tensor> expect_image;
MS_LOG(INFO) << "Tensor bbox shape: " << bbox->shape(); Tensor::CreateFromFile(folder_path + "/" + expect_file[i] + ".jpg", &expect_image);
MS_LOG(INFO) << "Tensor category_id shape: " << category_id->shape(); EXPECT_EQ(*image, *expect_image);
std::shared_ptr<Tensor> expect_segmentation;
Tensor::CreateFromVector(expect_segmentation_vector[i], TensorShape(expect_size[i]), &expect_segmentation);
EXPECT_EQ(*segmentation, *expect_segmentation);
iter->GetNextRow(&row); iter->GetNextRow(&row);
i++; i++;
} }
...@@ -296,21 +290,3 @@ TEST_F(MindDataTestPipeline, TestCocoDefault) { ...@@ -296,21 +290,3 @@ TEST_F(MindDataTestPipeline, TestCocoDefault) {
// Manually terminate the pipeline // Manually terminate the pipeline
iter->Stop(); iter->Stop();
} }
TEST_F(MindDataTestPipeline, TestCocoException) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCocoDetection.";
// Create a Coco Dataset
std::string folder_path = datasets_root_path_ + "/testCOCO/train";
std::string annotation_file = datasets_root_path_ + "/testCOCO/annotations/train.json";
std::string invalid_folder_path = "./NotExist";
std::string invalid_annotation_file = "./NotExistFile";
std::shared_ptr<Dataset> ds = Coco(invalid_folder_path, annotation_file);
EXPECT_EQ(ds, nullptr);
std::shared_ptr<Dataset> ds1 = Coco(folder_path, invalid_annotation_file);
EXPECT_EQ(ds1, nullptr);
std::shared_ptr<Dataset> ds2 = Coco(folder_path, annotation_file, "valid_mode");
EXPECT_EQ(ds2, nullptr);
}
...@@ -13,41 +13,15 @@ ...@@ -13,41 +13,15 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
#include <fstream>
#include <iostream>
#include <memory>
#include <vector>
#include <string>
#include "utils/log_adapter.h"
#include "utils/ms_utils.h"
#include "common/common.h" #include "common/common.h"
#include "gtest/gtest.h"
#include "./securec.h"
#include "minddata/dataset/core/client.h"
#include "minddata/dataset/core/config_manager.h" #include "minddata/dataset/core/config_manager.h"
#include "minddata/dataset/core/constants.h"
#include "minddata/dataset/core/global_context.h" #include "minddata/dataset/core/global_context.h"
#include "minddata/dataset/core/tensor.h"
#include "minddata/dataset/core/tensor_shape.h"
#include "minddata/dataset/include/datasets.h" #include "minddata/dataset/include/datasets.h"
#include "minddata/dataset/include/iterator.h"
#include "minddata/dataset/include/samplers.h"
#include "minddata/dataset/include/status.h"
#include "minddata/dataset/include/transforms.h"
using namespace mindspore::dataset; using namespace mindspore::dataset;
using namespace mindspore::dataset::api; using namespace mindspore::dataset::api;
using mindspore::LogStream;
using mindspore::dataset::DataType;
using mindspore::dataset::ShuffleMode; using mindspore::dataset::ShuffleMode;
using mindspore::dataset::Status;
using mindspore::dataset::Tensor; using mindspore::dataset::Tensor;
using mindspore::dataset::TensorImpl;
using mindspore::dataset::TensorShape;
using mindspore::ExceptionType::NoExceptionType;
using mindspore::MsLogLevel::ERROR;
class MindDataTestPipeline : public UT::DatasetOpTesting { class MindDataTestPipeline : public UT::DatasetOpTesting {
protected: protected:
...@@ -109,6 +83,87 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetBasic) { ...@@ -109,6 +83,87 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetBasic) {
GlobalContext::config_manager()->set_num_parallel_workers(original_num_parallel_workers); GlobalContext::config_manager()->set_num_parallel_workers(original_num_parallel_workers);
} }
TEST_F(MindDataTestPipeline, TestTextFileDatasetFail1) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail1.";
// Attempt to create a TextFile Dataset
// with invalid samplers=-1
std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt";
std::shared_ptr<Dataset> ds = TextFile({tf_file1}, -1);
// Expect failure: Number of samples cannot be negative
EXPECT_EQ(ds, nullptr);
}
TEST_F(MindDataTestPipeline, TestTextFileDatasetFail2) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail2.";
// Attempt to create a TextFile Dataset
// with wrongful empty dataset_files input
std::shared_ptr<Dataset> ds = TextFile({});
// Expect failure: dataset_files is not specified
EXPECT_EQ(ds, nullptr);
}
TEST_F(MindDataTestPipeline, TestTextFileDatasetFail3) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail3.";
// Attempt to create a TextFile Dataset
// with non-existent dataset_files input
std::shared_ptr<Dataset> ds = TextFile({"notexist.txt"}, 0, ShuffleMode::kFalse);
// Expect failure: specified dataset_files does not exist
EXPECT_EQ(ds, nullptr);
}
TEST_F(MindDataTestPipeline, TestTextFileDatasetFail4) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail4.";
// Attempt to create a TextFile Dataset
// with empty string dataset_files input
std::shared_ptr<Dataset> ds = TextFile({""}, 0, ShuffleMode::kFiles);
// Expect failure: specified dataset_files does not exist
EXPECT_EQ(ds, nullptr);
}
TEST_F(MindDataTestPipeline, TestTextFileDatasetFail5) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail5.";
// Attempt to create a TextFile Dataset
// with invalid num_shards=0 value
std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt";
std::shared_ptr<Dataset> ds = TextFile({tf_file1}, 1, ShuffleMode::kFalse, 0);
// Expect failure: Number of shards cannot be <=0
EXPECT_EQ(ds, nullptr);
}
TEST_F(MindDataTestPipeline, TestTextFileDatasetFail6) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail6.";
// Attempt to create a TextFile Dataset
// with invalid shard_id=-1 value
std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt";
std::shared_ptr<Dataset> ds = TextFile({tf_file1}, 0, ShuffleMode::kFiles, -1);
// Expect failure: shard_id cannot be negative
EXPECT_EQ(ds, nullptr);
}
TEST_F(MindDataTestPipeline, TestTextFileDatasetFail7) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail7.";
// Attempt to create a TextFile Dataset
// with invalid shard_id=2 and num_shards=2 combination
std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt";
std::shared_ptr<Dataset> ds = TextFile({tf_file1}, 0, ShuffleMode::kGlobal, 2, 2);
// Expect failure: Cannot have shard_id >= num_shards
EXPECT_EQ(ds, nullptr);
}
TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleFalse1) { TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleFalse1) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetShuffleFalse1."; MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetShuffleFalse1.";
// Test TextFile Dataset with two text files and no shuffle, num_parallel_workers=1 // Test TextFile Dataset with two text files and no shuffle, num_parallel_workers=1
...@@ -224,22 +279,25 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleFalse4Shard) { ...@@ -224,22 +279,25 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleFalse4Shard) {
GlobalContext::config_manager()->set_num_parallel_workers(original_num_parallel_workers); GlobalContext::config_manager()->set_num_parallel_workers(original_num_parallel_workers);
} }
TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleGlobal1A) { TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleFiles1) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetShuffleGlobal1A."; MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetShuffleFiles1.";
// Test TextFile Dataset with 1 text file, global shuffle, num_parallel_workers=1 // Test TextFile Dataset with files shuffle, num_parallel_workers=1
// Set configuration // Set configuration
uint32_t original_seed = GlobalContext::config_manager()->seed(); uint32_t original_seed = GlobalContext::config_manager()->seed();
uint32_t original_num_parallel_workers = GlobalContext::config_manager()->num_parallel_workers(); uint32_t original_num_parallel_workers = GlobalContext::config_manager()->num_parallel_workers();
MS_LOG(DEBUG) << "ORIGINAL seed: " << original_seed << ", num_parallel_workers: " << original_num_parallel_workers; MS_LOG(DEBUG) << "ORIGINAL seed: " << original_seed << ", num_parallel_workers: " << original_num_parallel_workers;
GlobalContext::config_manager()->set_seed(246); GlobalContext::config_manager()->set_seed(135);
GlobalContext::config_manager()->set_num_parallel_workers(1); GlobalContext::config_manager()->set_num_parallel_workers(1);
// Create a TextFile Dataset, with two text files // Create a TextFile Dataset, with two text files
// Note: 1.txt has 3 rows // Note: 1.txt has 3 rows
// Set shuffle to global shuffle // Note: 2.txt has 2 rows
// Use default of all samples
// Set shuffle to files shuffle
std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt"; std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt";
std::shared_ptr<Dataset> ds = TextFile({tf_file1}, 0, ShuffleMode::kGlobal); std::string tf_file2 = datasets_root_path_ + "/testTextFileDataset/2.txt";
std::shared_ptr<Dataset> ds = TextFile({tf_file1, tf_file2}, 0, ShuffleMode::kFiles);
EXPECT_NE(ds, nullptr); EXPECT_NE(ds, nullptr);
// Create an iterator over the result of the above dataset. // Create an iterator over the result of the above dataset.
...@@ -252,7 +310,9 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleGlobal1A) { ...@@ -252,7 +310,9 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleGlobal1A) {
iter->GetNextRow(&row); iter->GetNextRow(&row);
EXPECT_NE(row.find("text"), row.end()); EXPECT_NE(row.find("text"), row.end());
std::vector<std::string> expected_result = {"Good luck to everyone.", "This is a text file.", "Be happy every day."}; std::vector<std::string> expected_result = {
"This is a text file.", "Be happy every day.", "Good luck to everyone.", "Another file.", "End of file.",
};
uint64_t i = 0; uint64_t i = 0;
while (row.size() != 0) { while (row.size() != 0) {
...@@ -268,8 +328,8 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleGlobal1A) { ...@@ -268,8 +328,8 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleGlobal1A) {
iter->GetNextRow(&row); iter->GetNextRow(&row);
} }
// Expect 3 samples // Expect 2 + 3 = 5 samples
EXPECT_EQ(i, 3); EXPECT_EQ(i, 5);
// Manually terminate the pipeline // Manually terminate the pipeline
iter->Stop(); iter->Stop();
...@@ -279,24 +339,25 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleGlobal1A) { ...@@ -279,24 +339,25 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleGlobal1A) {
GlobalContext::config_manager()->set_num_parallel_workers(original_num_parallel_workers); GlobalContext::config_manager()->set_num_parallel_workers(original_num_parallel_workers);
} }
TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleGlobal1B) { TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleFiles4) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetShuffleGlobal1B."; MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetShuffleFiles4.";
// Test TextFile Dataset with 2 text files, global shuffle, num_parallel_workers=1 // Test TextFile Dataset with files shuffle, num_parallel_workers=4
// Set configuration // Set configuration
uint32_t original_seed = GlobalContext::config_manager()->seed(); uint32_t original_seed = GlobalContext::config_manager()->seed();
uint32_t original_num_parallel_workers = GlobalContext::config_manager()->num_parallel_workers(); uint32_t original_num_parallel_workers = GlobalContext::config_manager()->num_parallel_workers();
MS_LOG(DEBUG) << "ORIGINAL seed: " << original_seed << ", num_parallel_workers: " << original_num_parallel_workers; MS_LOG(DEBUG) << "ORIGINAL seed: " << original_seed << ", num_parallel_workers: " << original_num_parallel_workers;
GlobalContext::config_manager()->set_seed(246); GlobalContext::config_manager()->set_seed(135);
GlobalContext::config_manager()->set_num_parallel_workers(1); GlobalContext::config_manager()->set_num_parallel_workers(4);
// Create a TextFile Dataset, with two text files // Create a TextFile Dataset, with two text files
// Note: 1.txt has 3 rows // Note: 1.txt has 3 rows
// Note: 2.txt has 2 rows // Note: 2.txt has 2 rows
// Set shuffle to global shuffle // Use default of all samples
// Set shuffle to files shuffle
std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt"; std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt";
std::string tf_file2 = datasets_root_path_ + "/testTextFileDataset/2.txt"; std::string tf_file2 = datasets_root_path_ + "/testTextFileDataset/2.txt";
std::shared_ptr<Dataset> ds = TextFile({tf_file1, tf_file2}, 0, ShuffleMode::kGlobal); std::shared_ptr<Dataset> ds = TextFile({tf_file1, tf_file2}, 0, ShuffleMode::kFiles);
EXPECT_NE(ds, nullptr); EXPECT_NE(ds, nullptr);
// Create an iterator over the result of the above dataset. // Create an iterator over the result of the above dataset.
...@@ -309,8 +370,8 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleGlobal1B) { ...@@ -309,8 +370,8 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleGlobal1B) {
iter->GetNextRow(&row); iter->GetNextRow(&row);
EXPECT_NE(row.find("text"), row.end()); EXPECT_NE(row.find("text"), row.end());
std::vector<std::string> expected_result = {"Another file.", "Good luck to everyone.", "This is a text file.", std::vector<std::string> expected_result = {"This is a text file.", "Another file.", "Be happy every day.",
"End of file.", "Be happy every day."}; "End of file.", "Good luck to everyone."};
uint64_t i = 0; uint64_t i = 0;
while (row.size() != 0) { while (row.size() != 0) {
...@@ -337,24 +398,22 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleGlobal1B) { ...@@ -337,24 +398,22 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleGlobal1B) {
GlobalContext::config_manager()->set_num_parallel_workers(original_num_parallel_workers); GlobalContext::config_manager()->set_num_parallel_workers(original_num_parallel_workers);
} }
TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleGlobal4) { TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleGlobal1A) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetShuffleGlobal4."; MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetShuffleGlobal1A.";
// Test TextFile Dataset with 2 text files, global shuffle, num_parallel_workers=4 // Test TextFile Dataset with 1 text file, global shuffle, num_parallel_workers=1
// Set configuration // Set configuration
uint32_t original_seed = GlobalContext::config_manager()->seed(); uint32_t original_seed = GlobalContext::config_manager()->seed();
uint32_t original_num_parallel_workers = GlobalContext::config_manager()->num_parallel_workers(); uint32_t original_num_parallel_workers = GlobalContext::config_manager()->num_parallel_workers();
MS_LOG(DEBUG) << "ORIGINAL seed: " << original_seed << ", num_parallel_workers: " << original_num_parallel_workers; MS_LOG(DEBUG) << "ORIGINAL seed: " << original_seed << ", num_parallel_workers: " << original_num_parallel_workers;
GlobalContext::config_manager()->set_seed(246); GlobalContext::config_manager()->set_seed(246);
GlobalContext::config_manager()->set_num_parallel_workers(4); GlobalContext::config_manager()->set_num_parallel_workers(1);
// Create a TextFile Dataset, with two text files // Create a TextFile Dataset, with two text files
// Note: 1.txt has 3 rows // Note: 1.txt has 3 rows
// Note: 2.txt has 2 rows
// Set shuffle to global shuffle // Set shuffle to global shuffle
std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt"; std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt";
std::string tf_file2 = datasets_root_path_ + "/testTextFileDataset/2.txt"; std::shared_ptr<Dataset> ds = TextFile({tf_file1}, 0, ShuffleMode::kGlobal);
std::shared_ptr<Dataset> ds = TextFile({tf_file1, tf_file2}, 0, ShuffleMode::kGlobal);
EXPECT_NE(ds, nullptr); EXPECT_NE(ds, nullptr);
// Create an iterator over the result of the above dataset. // Create an iterator over the result of the above dataset.
...@@ -367,8 +426,7 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleGlobal4) { ...@@ -367,8 +426,7 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleGlobal4) {
iter->GetNextRow(&row); iter->GetNextRow(&row);
EXPECT_NE(row.find("text"), row.end()); EXPECT_NE(row.find("text"), row.end());
std::vector<std::string> expected_result = {"Another file.", "Good luck to everyone.", "End of file.", std::vector<std::string> expected_result = {"Good luck to everyone.", "This is a text file.", "Be happy every day."};
"This is a text file.", "Be happy every day."};
uint64_t i = 0; uint64_t i = 0;
while (row.size() != 0) { while (row.size() != 0) {
...@@ -384,8 +442,8 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleGlobal4) { ...@@ -384,8 +442,8 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleGlobal4) {
iter->GetNextRow(&row); iter->GetNextRow(&row);
} }
// Expect 2 + 3 = 5 samples // Expect 3 samples
EXPECT_EQ(i, 5); EXPECT_EQ(i, 3);
// Manually terminate the pipeline // Manually terminate the pipeline
iter->Stop(); iter->Stop();
...@@ -395,25 +453,24 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleGlobal4) { ...@@ -395,25 +453,24 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleGlobal4) {
GlobalContext::config_manager()->set_num_parallel_workers(original_num_parallel_workers); GlobalContext::config_manager()->set_num_parallel_workers(original_num_parallel_workers);
} }
TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleFiles1) { TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleGlobal1B) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetShuffleFiles1."; MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetShuffleGlobal1B.";
// Test TextFile Dataset with files shuffle, num_parallel_workers=1 // Test TextFile Dataset with 2 text files, global shuffle, num_parallel_workers=1
// Set configuration // Set configuration
uint32_t original_seed = GlobalContext::config_manager()->seed(); uint32_t original_seed = GlobalContext::config_manager()->seed();
uint32_t original_num_parallel_workers = GlobalContext::config_manager()->num_parallel_workers(); uint32_t original_num_parallel_workers = GlobalContext::config_manager()->num_parallel_workers();
MS_LOG(DEBUG) << "ORIGINAL seed: " << original_seed << ", num_parallel_workers: " << original_num_parallel_workers; MS_LOG(DEBUG) << "ORIGINAL seed: " << original_seed << ", num_parallel_workers: " << original_num_parallel_workers;
GlobalContext::config_manager()->set_seed(135); GlobalContext::config_manager()->set_seed(246);
GlobalContext::config_manager()->set_num_parallel_workers(1); GlobalContext::config_manager()->set_num_parallel_workers(1);
// Create a TextFile Dataset, with two text files // Create a TextFile Dataset, with two text files
// Note: 1.txt has 3 rows // Note: 1.txt has 3 rows
// Note: 2.txt has 2 rows // Note: 2.txt has 2 rows
// Use default of all samples // Set shuffle to global shuffle
// Set shuffle to files shuffle
std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt"; std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt";
std::string tf_file2 = datasets_root_path_ + "/testTextFileDataset/2.txt"; std::string tf_file2 = datasets_root_path_ + "/testTextFileDataset/2.txt";
std::shared_ptr<Dataset> ds = TextFile({tf_file1, tf_file2}, 0, ShuffleMode::kFiles); std::shared_ptr<Dataset> ds = TextFile({tf_file1, tf_file2}, 0, ShuffleMode::kGlobal);
EXPECT_NE(ds, nullptr); EXPECT_NE(ds, nullptr);
// Create an iterator over the result of the above dataset. // Create an iterator over the result of the above dataset.
...@@ -426,9 +483,8 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleFiles1) { ...@@ -426,9 +483,8 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleFiles1) {
iter->GetNextRow(&row); iter->GetNextRow(&row);
EXPECT_NE(row.find("text"), row.end()); EXPECT_NE(row.find("text"), row.end());
std::vector<std::string> expected_result = { std::vector<std::string> expected_result = {"Another file.", "Good luck to everyone.", "This is a text file.",
"This is a text file.", "Be happy every day.", "Good luck to everyone.", "Another file.", "End of file.", "End of file.", "Be happy every day."};
};
uint64_t i = 0; uint64_t i = 0;
while (row.size() != 0) { while (row.size() != 0) {
...@@ -455,25 +511,24 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleFiles1) { ...@@ -455,25 +511,24 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleFiles1) {
GlobalContext::config_manager()->set_num_parallel_workers(original_num_parallel_workers); GlobalContext::config_manager()->set_num_parallel_workers(original_num_parallel_workers);
} }
TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleFiles4) { TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleGlobal4) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetShuffleFiles4."; MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetShuffleGlobal4.";
// Test TextFile Dataset with files shuffle, num_parallel_workers=4 // Test TextFile Dataset with 2 text files, global shuffle, num_parallel_workers=4
// Set configuration // Set configuration
uint32_t original_seed = GlobalContext::config_manager()->seed(); uint32_t original_seed = GlobalContext::config_manager()->seed();
uint32_t original_num_parallel_workers = GlobalContext::config_manager()->num_parallel_workers(); uint32_t original_num_parallel_workers = GlobalContext::config_manager()->num_parallel_workers();
MS_LOG(DEBUG) << "ORIGINAL seed: " << original_seed << ", num_parallel_workers: " << original_num_parallel_workers; MS_LOG(DEBUG) << "ORIGINAL seed: " << original_seed << ", num_parallel_workers: " << original_num_parallel_workers;
GlobalContext::config_manager()->set_seed(135); GlobalContext::config_manager()->set_seed(246);
GlobalContext::config_manager()->set_num_parallel_workers(4); GlobalContext::config_manager()->set_num_parallel_workers(4);
// Create a TextFile Dataset, with two text files // Create a TextFile Dataset, with two text files
// Note: 1.txt has 3 rows // Note: 1.txt has 3 rows
// Note: 2.txt has 2 rows // Note: 2.txt has 2 rows
// Use default of all samples // Set shuffle to global shuffle
// Set shuffle to files shuffle
std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt"; std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt";
std::string tf_file2 = datasets_root_path_ + "/testTextFileDataset/2.txt"; std::string tf_file2 = datasets_root_path_ + "/testTextFileDataset/2.txt";
std::shared_ptr<Dataset> ds = TextFile({tf_file1, tf_file2}, 0, ShuffleMode::kFiles); std::shared_ptr<Dataset> ds = TextFile({tf_file1, tf_file2}, 0, ShuffleMode::kGlobal);
EXPECT_NE(ds, nullptr); EXPECT_NE(ds, nullptr);
// Create an iterator over the result of the above dataset. // Create an iterator over the result of the above dataset.
...@@ -486,8 +541,8 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleFiles4) { ...@@ -486,8 +541,8 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleFiles4) {
iter->GetNextRow(&row); iter->GetNextRow(&row);
EXPECT_NE(row.find("text"), row.end()); EXPECT_NE(row.find("text"), row.end());
std::vector<std::string> expected_result = {"This is a text file.", "Another file.", "Be happy every day.", std::vector<std::string> expected_result = {"Another file.", "Good luck to everyone.", "End of file.",
"End of file.", "Good luck to everyone."}; "This is a text file.", "Be happy every day."};
uint64_t i = 0; uint64_t i = 0;
while (row.size() != 0) { while (row.size() != 0) {
...@@ -513,84 +568,3 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleFiles4) { ...@@ -513,84 +568,3 @@ TEST_F(MindDataTestPipeline, TestTextFileDatasetShuffleFiles4) {
GlobalContext::config_manager()->set_seed(original_seed); GlobalContext::config_manager()->set_seed(original_seed);
GlobalContext::config_manager()->set_num_parallel_workers(original_num_parallel_workers); GlobalContext::config_manager()->set_num_parallel_workers(original_num_parallel_workers);
} }
TEST_F(MindDataTestPipeline, TestTextFileDatasetFail1) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail1.";
// Attempt to create a TextFile Dataset
// with invalid samplers=-1
std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt";
std::shared_ptr<Dataset> ds = TextFile({tf_file1}, -1);
// Expect failure: Number of samples cannot be negative
EXPECT_EQ(ds, nullptr);
}
TEST_F(MindDataTestPipeline, TestTextFileDatasetFail2) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail2.";
// Attempt to create a TextFile Dataset
// with wrongful empty dataset_files input
std::shared_ptr<Dataset> ds = TextFile({});
// Expect failure: dataset_files is not specified
EXPECT_EQ(ds, nullptr);
}
TEST_F(MindDataTestPipeline, TestTextFileDatasetFail3) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail3.";
// Attempt to create a TextFile Dataset
// with non-existent dataset_files input
std::shared_ptr<Dataset> ds = TextFile({"notexist.txt"}, 0, ShuffleMode::kFalse);
// Expect failure: specified dataset_files does not exist
EXPECT_EQ(ds, nullptr);
}
TEST_F(MindDataTestPipeline, TestTextFileDatasetFail4) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail4.";
// Attempt to create a TextFile Dataset
// with empty string dataset_files input
std::shared_ptr<Dataset> ds = TextFile({""}, 0, ShuffleMode::kFiles);
// Expect failure: specified dataset_files does not exist
EXPECT_EQ(ds, nullptr);
}
TEST_F(MindDataTestPipeline, TestTextFileDatasetFail5) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail5.";
// Attempt to create a TextFile Dataset
// with invalid num_shards=0 value
std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt";
std::shared_ptr<Dataset> ds = TextFile({tf_file1}, 1, ShuffleMode::kFalse, 0);
// Expect failure: Number of shards cannot be <=0
EXPECT_EQ(ds, nullptr);
}
TEST_F(MindDataTestPipeline, TestTextFileDatasetFail6) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail6.";
// Attempt to create a TextFile Dataset
// with invalid shard_id=-1 value
std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt";
std::shared_ptr<Dataset> ds = TextFile({tf_file1}, 0, ShuffleMode::kFiles, -1);
// Expect failure: shard_id cannot be negative
EXPECT_EQ(ds, nullptr);
}
TEST_F(MindDataTestPipeline, TestTextFileDatasetFail7) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestTextFileDatasetFail7.";
// Attempt to create a TextFile Dataset
// with invalid shard_id=2 and num_shards=2 combination
std::string tf_file1 = datasets_root_path_ + "/testTextFileDataset/1.txt";
std::shared_ptr<Dataset> ds = TextFile({tf_file1}, 0, ShuffleMode::kGlobal, 2, 2);
// Expect failure: Cannot have shard_id >= num_shards
EXPECT_EQ(ds, nullptr);
}
...@@ -13,139 +13,107 @@ ...@@ -13,139 +13,107 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
#include <fstream>
#include <iostream>
#include <memory>
#include <vector>
#include <string>
#include "utils/log_adapter.h"
#include "utils/ms_utils.h"
#include "common/common.h" #include "common/common.h"
#include "gtest/gtest.h"
#include "securec.h"
#include "minddata/dataset/include/datasets.h" #include "minddata/dataset/include/datasets.h"
#include "minddata/dataset/include/status.h"
#include "minddata/dataset/include/transforms.h"
#include "minddata/dataset/include/iterator.h"
#include "minddata/dataset/core/constants.h"
#include "minddata/dataset/core/tensor_shape.h"
#include "minddata/dataset/core/tensor.h"
#include "minddata/dataset/include/samplers.h"
using namespace mindspore::dataset::api; using namespace mindspore::dataset::api;
using mindspore::MsLogLevel::ERROR;
using mindspore::ExceptionType::NoExceptionType;
using mindspore::LogStream;
using mindspore::dataset::Tensor; using mindspore::dataset::Tensor;
using mindspore::dataset::TensorShape; using mindspore::dataset::TensorShape;
using mindspore::dataset::TensorImpl;
using mindspore::dataset::DataType;
using mindspore::dataset::Status;
using mindspore::dataset::BorderType;
using mindspore::dataset::dsize_t;
class MindDataTestPipeline : public UT::DatasetOpTesting { class MindDataTestPipeline : public UT::DatasetOpTesting {
protected: protected:
}; };
TEST_F(MindDataTestPipeline, TestIteratorEmptyColumn) {
TEST_F(MindDataTestPipeline, TestIteratorOneColumn) { MS_LOG(INFO) << "Doing MindDataTestPipeline-TestIteratorEmptyColumn.";
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestIteratorOneColumn."; // Create a Cifar10 Dataset
// Create a Mnist Dataset std::string folder_path = datasets_root_path_ + "/testCifar10Data/";
std::string folder_path = datasets_root_path_ + "/testMnistData/"; std::shared_ptr<Dataset> ds = Cifar10(folder_path, RandomSampler(false, 5));
std::shared_ptr<Dataset> ds = Mnist(folder_path, RandomSampler(false, 4));
EXPECT_NE(ds, nullptr); EXPECT_NE(ds, nullptr);
// Create a Batch operation on ds // Create a Rename operation on ds
int32_t batch_size = 2; ds = ds->Rename({"image", "label"}, {"col1", "col2"});
ds = ds->Batch(batch_size);
EXPECT_NE(ds, nullptr); EXPECT_NE(ds, nullptr);
// Create an iterator over the result of the above dataset // No columns are specified, use all columns
// Only select "image" column and drop others std::shared_ptr<Iterator> iter = ds->CreateIterator();
std::vector<std::string> columns = {"image"};
std::shared_ptr<Iterator> iter = ds->CreateIterator(columns);
EXPECT_NE(iter, nullptr); EXPECT_NE(iter, nullptr);
// Iterate the dataset and get each row // Iterate the dataset and get each row
std::vector<std::shared_ptr<Tensor>> row; std::vector<std::shared_ptr<Tensor>> row;
iter->GetNextRow(&row); iter->GetNextRow(&row);
TensorShape expect({2, 28, 28, 1}); TensorShape expect0({32, 32, 3});
TensorShape expect1({});
uint64_t i = 0; uint64_t i = 0;
while (row.size() != 0) { while (row.size() != 0) {
for (auto &v : row) { MS_LOG(INFO) << "row[0]:" << row[0]->shape() << ", row[1]:" << row[1]->shape();
MS_LOG(INFO) << "image shape:" << v->shape(); EXPECT_EQ(expect0, row[0]->shape());
EXPECT_EQ(expect, v->shape()); EXPECT_EQ(expect1, row[1]->shape());
}
iter->GetNextRow(&row); iter->GetNextRow(&row);
i++; i++;
} }
EXPECT_EQ(i, 2); EXPECT_EQ(i, 5);
// Manually terminate the pipeline // Manually terminate the pipeline
iter->Stop(); iter->Stop();
} }
TEST_F(MindDataTestPipeline, TestIteratorTwoColumns) { TEST_F(MindDataTestPipeline, TestIteratorOneColumn) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestIteratorTwoColumns."; MS_LOG(INFO) << "Doing MindDataTestPipeline-TestIteratorOneColumn.";
// Create a VOC Dataset // Create a Mnist Dataset
std::string folder_path = datasets_root_path_ + "/testVOC2012_2"; std::string folder_path = datasets_root_path_ + "/testMnistData/";
std::shared_ptr<Dataset> ds = VOC(folder_path, "Detection", "train", {}, false, SequentialSampler(0, 4)); std::shared_ptr<Dataset> ds = Mnist(folder_path, RandomSampler(false, 4));
EXPECT_NE(ds, nullptr); EXPECT_NE(ds, nullptr);
// Create a Repeat operation on ds // Create a Batch operation on ds
int32_t repeat_num = 2; int32_t batch_size = 2;
ds = ds->Repeat(repeat_num); ds = ds->Batch(batch_size);
EXPECT_NE(ds, nullptr); EXPECT_NE(ds, nullptr);
// Create an iterator over the result of the above dataset // Create an iterator over the result of the above dataset
// Only select "image" and "bbox" column // Only select "image" column and drop others
std::vector<std::string> columns = {"image", "bbox"}; std::vector<std::string> columns = {"image"};
std::shared_ptr<Iterator> iter = ds->CreateIterator(columns); std::shared_ptr<Iterator> iter = ds->CreateIterator(columns);
EXPECT_NE(iter, nullptr); EXPECT_NE(iter, nullptr);
// Iterate the dataset and get each row // Iterate the dataset and get each row
std::vector<std::shared_ptr<Tensor>> row; std::vector<std::shared_ptr<Tensor>> row;
iter->GetNextRow(&row); iter->GetNextRow(&row);
std::vector<TensorShape> expect = {TensorShape({173673}), TensorShape({1, 4}), TensorShape expect({2, 28, 28, 1});
TensorShape({173673}), TensorShape({1, 4}),
TensorShape({147025}), TensorShape({1, 4}),
TensorShape({211653}), TensorShape({1, 4})};
uint64_t i = 0; uint64_t i = 0;
uint64_t j = 0;
while (row.size() != 0) { while (row.size() != 0) {
MS_LOG(INFO) << "row[0]:" << row[0]->shape() << ", row[1]:" << row[1]->shape(); for (auto &v : row) {
EXPECT_EQ(2, row.size()); MS_LOG(INFO) << "image shape:" << v->shape();
EXPECT_EQ(expect[j++], row[0]->shape()); EXPECT_EQ(expect, v->shape());
EXPECT_EQ(expect[j++], row[1]->shape()); }
iter->GetNextRow(&row); iter->GetNextRow(&row);
i++; i++;
j = (j == expect.size()) ? 0 : j;
} }
EXPECT_EQ(i, 8); EXPECT_EQ(i, 2);
// Manually terminate the pipeline // Manually terminate the pipeline
iter->Stop(); iter->Stop();
} }
TEST_F(MindDataTestPipeline, TestIteratorEmptyColumn) { TEST_F(MindDataTestPipeline, TestIteratorReOrder) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestIteratorEmptyColumn."; MS_LOG(INFO) << "Doing MindDataTestPipeline-TestIteratorReOrder.";
// Create a Cifar10 Dataset // Create a Cifar10 Dataset
std::string folder_path = datasets_root_path_ + "/testCifar10Data/"; std::string folder_path = datasets_root_path_ + "/testCifar10Data/";
std::shared_ptr<Dataset> ds = Cifar10(folder_path, RandomSampler(false, 5)); std::shared_ptr<Dataset> ds = Cifar10(folder_path, SequentialSampler(false, 4));
EXPECT_NE(ds, nullptr); EXPECT_NE(ds, nullptr);
// Create a Rename operation on ds // Create a Take operation on ds
ds = ds->Rename({"image", "label"}, {"col1", "col2"}); ds = ds->Take(2);
EXPECT_NE(ds, nullptr); EXPECT_NE(ds, nullptr);
// No columns are specified, use all columns // Create an iterator over the result of the above dataset
std::shared_ptr<Iterator> iter = ds->CreateIterator(); // Reorder "image" and "label" column
std::vector<std::string> columns = {"label", "image"};
std::shared_ptr<Iterator> iter = ds->CreateIterator(columns);
EXPECT_NE(iter, nullptr); EXPECT_NE(iter, nullptr);
// Iterate the dataset and get each row // Iterate the dataset and get each row
...@@ -154,56 +122,62 @@ TEST_F(MindDataTestPipeline, TestIteratorEmptyColumn) { ...@@ -154,56 +122,62 @@ TEST_F(MindDataTestPipeline, TestIteratorEmptyColumn) {
TensorShape expect0({32, 32, 3}); TensorShape expect0({32, 32, 3});
TensorShape expect1({}); TensorShape expect1({});
// Check if we will catch "label" before "image" in row
std::vector<std::string> expect = {"label", "image"};
uint64_t i = 0; uint64_t i = 0;
while (row.size() != 0) { while (row.size() != 0) {
MS_LOG(INFO) << "row[0]:" << row[0]->shape() << ", row[1]:" << row[1]->shape(); MS_LOG(INFO) << "row[0]:" << row[0]->shape() << ", row[1]:" << row[1]->shape();
EXPECT_EQ(expect0, row[0]->shape()); EXPECT_EQ(expect1, row[0]->shape());
EXPECT_EQ(expect1, row[1]->shape()); EXPECT_EQ(expect0, row[1]->shape());
iter->GetNextRow(&row); iter->GetNextRow(&row);
i++; i++;
} }
EXPECT_EQ(i, 5); EXPECT_EQ(i, 2);
// Manually terminate the pipeline // Manually terminate the pipeline
iter->Stop(); iter->Stop();
} }
TEST_F(MindDataTestPipeline, TestIteratorReOrder) { TEST_F(MindDataTestPipeline, TestIteratorTwoColumns) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestIteratorReOrder."; MS_LOG(INFO) << "Doing MindDataTestPipeline-TestIteratorTwoColumns.";
// Create a Cifar10 Dataset // Create a VOC Dataset
std::string folder_path = datasets_root_path_ + "/testCifar10Data/"; std::string folder_path = datasets_root_path_ + "/testVOC2012_2";
std::shared_ptr<Dataset> ds = Cifar10(folder_path, SequentialSampler(false, 4)); std::shared_ptr<Dataset> ds = VOC(folder_path, "Detection", "train", {}, false, SequentialSampler(0, 4));
EXPECT_NE(ds, nullptr); EXPECT_NE(ds, nullptr);
// Create a Take operation on ds // Create a Repeat operation on ds
ds = ds->Take(2); int32_t repeat_num = 2;
ds = ds->Repeat(repeat_num);
EXPECT_NE(ds, nullptr); EXPECT_NE(ds, nullptr);
// Create an iterator over the result of the above dataset // Create an iterator over the result of the above dataset
// Reorder "image" and "label" column // Only select "image" and "bbox" column
std::vector<std::string> columns = {"label", "image"}; std::vector<std::string> columns = {"image", "bbox"};
std::shared_ptr<Iterator> iter = ds->CreateIterator(columns); std::shared_ptr<Iterator> iter = ds->CreateIterator(columns);
EXPECT_NE(iter, nullptr); EXPECT_NE(iter, nullptr);
// Iterate the dataset and get each row // Iterate the dataset and get each row
std::vector<std::shared_ptr<Tensor>> row; std::vector<std::shared_ptr<Tensor>> row;
iter->GetNextRow(&row); iter->GetNextRow(&row);
TensorShape expect0({32, 32, 3}); std::vector<TensorShape> expect = {TensorShape({173673}), TensorShape({1, 4}),
TensorShape expect1({}); TensorShape({173673}), TensorShape({1, 4}),
TensorShape({147025}), TensorShape({1, 4}),
TensorShape({211653}), TensorShape({1, 4})};
// Check if we will catch "label" before "image" in row
std::vector<std::string> expect = {"label", "image"};
uint64_t i = 0; uint64_t i = 0;
uint64_t j = 0;
while (row.size() != 0) { while (row.size() != 0) {
MS_LOG(INFO) << "row[0]:" << row[0]->shape() << ", row[1]:" << row[1]->shape(); MS_LOG(INFO) << "row[0]:" << row[0]->shape() << ", row[1]:" << row[1]->shape();
EXPECT_EQ(expect1, row[0]->shape()); EXPECT_EQ(2, row.size());
EXPECT_EQ(expect0, row[1]->shape()); EXPECT_EQ(expect[j++], row[0]->shape());
EXPECT_EQ(expect[j++], row[1]->shape());
iter->GetNextRow(&row); iter->GetNextRow(&row);
i++; i++;
j = (j == expect.size()) ? 0 : j;
} }
EXPECT_EQ(i, 2); EXPECT_EQ(i, 8);
// Manually terminate the pipeline // Manually terminate the pipeline
iter->Stop(); iter->Stop();
......
...@@ -13,54 +13,30 @@ ...@@ -13,54 +13,30 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
#include <fstream>
#include <iostream>
#include <memory>
#include <vector>
#include <string>
#include "utils/log_adapter.h"
#include "utils/ms_utils.h"
#include "common/common.h" #include "common/common.h"
#include "gtest/gtest.h"
#include "securec.h"
#include "minddata/dataset/include/datasets.h"
#include "minddata/dataset/include/status.h"
#include "minddata/dataset/include/transforms.h"
#include "minddata/dataset/include/iterator.h"
#include "minddata/dataset/core/constants.h"
#include "minddata/dataset/core/tensor_shape.h"
#include "minddata/dataset/core/tensor.h"
#include "minddata/dataset/include/samplers.h"
#include "minddata/dataset/engine/datasetops/source/voc_op.h" #include "minddata/dataset/engine/datasetops/source/voc_op.h"
#include "minddata/dataset/include/datasets.h"
using namespace mindspore::dataset::api; using namespace mindspore::dataset::api;
using mindspore::MsLogLevel::ERROR;
using mindspore::ExceptionType::NoExceptionType;
using mindspore::LogStream;
using mindspore::dataset::Tensor; using mindspore::dataset::Tensor;
using mindspore::dataset::TensorShape; using mindspore::dataset::TensorShape;
using mindspore::dataset::TensorImpl;
using mindspore::dataset::DataType; using mindspore::dataset::DataType;
using mindspore::dataset::Status;
using mindspore::dataset::BorderType;
using mindspore::dataset::dsize_t;
class MindDataTestPipeline : public UT::DatasetOpTesting { class MindDataTestPipeline : public UT::DatasetOpTesting {
protected: protected:
}; };
TEST_F(MindDataTestPipeline, TestVOCSegmentation) { TEST_F(MindDataTestPipeline, TestVOCClassIndex) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestVOCSegmentation."; MS_LOG(INFO) << "Doing MindDataTestPipeline-TestVOCClassIndex.";
// Create a VOC Dataset // Create a VOC Dataset
std::string folder_path = datasets_root_path_ + "/testVOC2012_2"; std::string folder_path = datasets_root_path_ + "/testVOC2012_2";
std::shared_ptr<Dataset> ds = VOC(folder_path, "Segmentation", "train", {}, false, SequentialSampler(0, 3)); std::map<std::string, int32_t> class_index;
EXPECT_NE(ds, nullptr); class_index["car"] = 0;
class_index["cat"] = 1;
class_index["train"] = 9;
// Create a Repeat operation on ds std::shared_ptr<Dataset> ds = VOC(folder_path, "Detection", "train", class_index, false, SequentialSampler(0, 6));
int32_t repeat_num = 2;
ds = ds->Repeat(repeat_num);
EXPECT_NE(ds, nullptr); EXPECT_NE(ds, nullptr);
// Create an iterator over the result of the above dataset // Create an iterator over the result of the above dataset
...@@ -72,23 +48,20 @@ TEST_F(MindDataTestPipeline, TestVOCSegmentation) { ...@@ -72,23 +48,20 @@ TEST_F(MindDataTestPipeline, TestVOCSegmentation) {
std::unordered_map<std::string, std::shared_ptr<Tensor>> row; std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
iter->GetNextRow(&row); iter->GetNextRow(&row);
// Check if VOCOp read correct images/targets // Check if VOCOp read correct labels
using Tensor = mindspore::dataset::Tensor; // When we provide class_index, label of ["car","cat","train"] become [0,1,9]
std::string expect_file[] = {"32", "33", "39", "32", "33", "39"}; std::shared_ptr<Tensor> expect_label;
Tensor::CreateFromMemory(TensorShape({1, 1}), DataType(DataType::DE_UINT32), nullptr, &expect_label);
uint32_t expect[] = {9, 9, 9, 1, 1, 0};
uint64_t i = 0; uint64_t i = 0;
while (row.size() != 0) { while (row.size() != 0) {
auto image = row["image"]; auto image = row["image"];
auto target = row["target"]; auto label = row["label"];
MS_LOG(INFO) << "Tensor image shape: " << image->shape(); MS_LOG(INFO) << "Tensor image shape: " << image->shape();
MS_LOG(INFO) << "Tensor target shape: " << target->shape(); MS_LOG(INFO) << "Tensor label shape: " << label->shape();
expect_label->SetItemAt({0, 0}, expect[i]);
std::shared_ptr<Tensor> expect_image; EXPECT_EQ(*label, *expect_label);
Tensor::CreateFromFile(folder_path + "/JPEGImages/" + expect_file[i] + ".jpg", &expect_image);
EXPECT_EQ(*image, *expect_image);
std::shared_ptr<Tensor> expect_target;
Tensor::CreateFromFile(folder_path + "/SegmentationClass/" + expect_file[i] + ".png", &expect_target);
EXPECT_EQ(*target, *expect_target);
iter->GetNextRow(&row); iter->GetNextRow(&row);
i++; i++;
...@@ -100,33 +73,6 @@ TEST_F(MindDataTestPipeline, TestVOCSegmentation) { ...@@ -100,33 +73,6 @@ TEST_F(MindDataTestPipeline, TestVOCSegmentation) {
iter->Stop(); iter->Stop();
} }
TEST_F(MindDataTestPipeline, TestVOCSegmentationError1) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestVOCSegmentationError1.";
// Create a VOC Dataset
std::map<std::string, int32_t> class_index;
class_index["car"] = 0;
std::string folder_path = datasets_root_path_ + "/testVOC2012_2";
std::shared_ptr<Dataset> ds = VOC(folder_path, "Segmentation", "train", class_index, false, RandomSampler(false, 6));
// Expect nullptr for segmentation task with class_index
EXPECT_EQ(ds, nullptr);
}
TEST_F(MindDataTestPipeline, TestVOCInvalidTaskOrMode) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestVOCInvalidTaskOrMode.";
// Create a VOC Dataset
std::string folder_path = datasets_root_path_ + "/testVOC2012_2";
std::shared_ptr<Dataset> ds_1 = VOC(folder_path, "Classification", "train", {}, false, SequentialSampler(0, 3));
// Expect nullptr for invalid task
EXPECT_EQ(ds_1, nullptr);
std::shared_ptr<Dataset> ds_2 = VOC(folder_path, "Segmentation", "validation", {}, false, RandomSampler(false, 4));
// Expect nullptr for invalid mode
EXPECT_EQ(ds_2, nullptr);
}
TEST_F(MindDataTestPipeline, TestVOCDetection) { TEST_F(MindDataTestPipeline, TestVOCDetection) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestVOCDetection."; MS_LOG(INFO) << "Doing MindDataTestPipeline-TestVOCDetection.";
...@@ -173,17 +119,31 @@ TEST_F(MindDataTestPipeline, TestVOCDetection) { ...@@ -173,17 +119,31 @@ TEST_F(MindDataTestPipeline, TestVOCDetection) {
iter->Stop(); iter->Stop();
} }
TEST_F(MindDataTestPipeline, TestVOCClassIndex) { TEST_F(MindDataTestPipeline, TestVOCInvalidTaskOrMode) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestVOCClassIndex."; MS_LOG(INFO) << "Doing MindDataTestPipeline-TestVOCInvalidTaskOrMode.";
// Create a VOC Dataset // Create a VOC Dataset
std::string folder_path = datasets_root_path_ + "/testVOC2012_2"; std::string folder_path = datasets_root_path_ + "/testVOC2012_2";
std::map<std::string, int32_t> class_index; std::shared_ptr<Dataset> ds_1 = VOC(folder_path, "Classification", "train", {}, false, SequentialSampler(0, 3));
class_index["car"] = 0; // Expect nullptr for invalid task
class_index["cat"] = 1; EXPECT_EQ(ds_1, nullptr);
class_index["train"] = 9;
std::shared_ptr<Dataset> ds = VOC(folder_path, "Detection", "train", class_index, false, SequentialSampler(0, 6)); std::shared_ptr<Dataset> ds_2 = VOC(folder_path, "Segmentation", "validation", {}, false, RandomSampler(false, 4));
// Expect nullptr for invalid mode
EXPECT_EQ(ds_2, nullptr);
}
TEST_F(MindDataTestPipeline, TestVOCSegmentation) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestVOCSegmentation.";
// Create a VOC Dataset
std::string folder_path = datasets_root_path_ + "/testVOC2012_2";
std::shared_ptr<Dataset> ds = VOC(folder_path, "Segmentation", "train", {}, false, SequentialSampler(0, 3));
EXPECT_NE(ds, nullptr);
// Create a Repeat operation on ds
int32_t repeat_num = 2;
ds = ds->Repeat(repeat_num);
EXPECT_NE(ds, nullptr); EXPECT_NE(ds, nullptr);
// Create an iterator over the result of the above dataset // Create an iterator over the result of the above dataset
...@@ -195,20 +155,23 @@ TEST_F(MindDataTestPipeline, TestVOCClassIndex) { ...@@ -195,20 +155,23 @@ TEST_F(MindDataTestPipeline, TestVOCClassIndex) {
std::unordered_map<std::string, std::shared_ptr<Tensor>> row; std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
iter->GetNextRow(&row); iter->GetNextRow(&row);
// Check if VOCOp read correct labels // Check if VOCOp read correct images/targets
// When we provide class_index, label of ["car","cat","train"] become [0,1,9] using Tensor = mindspore::dataset::Tensor;
std::shared_ptr<Tensor> expect_label; std::string expect_file[] = {"32", "33", "39", "32", "33", "39"};
Tensor::CreateFromMemory(TensorShape({1, 1}), DataType(DataType::DE_UINT32), nullptr, &expect_label);
uint32_t expect[] = {9, 9, 9, 1, 1, 0};
uint64_t i = 0; uint64_t i = 0;
while (row.size() != 0) { while (row.size() != 0) {
auto image = row["image"]; auto image = row["image"];
auto label = row["label"]; auto target = row["target"];
MS_LOG(INFO) << "Tensor image shape: " << image->shape(); MS_LOG(INFO) << "Tensor image shape: " << image->shape();
MS_LOG(INFO) << "Tensor label shape: " << label->shape(); MS_LOG(INFO) << "Tensor target shape: " << target->shape();
expect_label->SetItemAt({0, 0}, expect[i]);
EXPECT_EQ(*label, *expect_label); std::shared_ptr<Tensor> expect_image;
Tensor::CreateFromFile(folder_path + "/JPEGImages/" + expect_file[i] + ".jpg", &expect_image);
EXPECT_EQ(*image, *expect_image);
std::shared_ptr<Tensor> expect_target;
Tensor::CreateFromFile(folder_path + "/SegmentationClass/" + expect_file[i] + ".png", &expect_target);
EXPECT_EQ(*target, *expect_target);
iter->GetNextRow(&row); iter->GetNextRow(&row);
i++; i++;
...@@ -219,3 +182,16 @@ TEST_F(MindDataTestPipeline, TestVOCClassIndex) { ...@@ -219,3 +182,16 @@ TEST_F(MindDataTestPipeline, TestVOCClassIndex) {
// Manually terminate the pipeline // Manually terminate the pipeline
iter->Stop(); iter->Stop();
} }
TEST_F(MindDataTestPipeline, TestVOCSegmentationError1) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestVOCSegmentationError1.";
// Create a VOC Dataset
std::map<std::string, int32_t> class_index;
class_index["car"] = 0;
std::string folder_path = datasets_root_path_ + "/testVOC2012_2";
std::shared_ptr<Dataset> ds = VOC(folder_path, "Segmentation", "train", class_index, false, RandomSampler(false, 6));
// Expect nullptr for segmentation task with class_index
EXPECT_EQ(ds, nullptr);
}
...@@ -13,58 +13,17 @@ ...@@ -13,58 +13,17 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
#include <fstream>
#include <iostream>
#include <memory>
#include <vector>
#include <string>
#include "utils/log_adapter.h"
#include "utils/ms_utils.h"
#include "common/common.h" #include "common/common.h"
#include "gtest/gtest.h"
#include "securec.h"
#include "minddata/dataset/include/datasets.h" #include "minddata/dataset/include/datasets.h"
#include "minddata/dataset/include/status.h"
#include "minddata/dataset/include/transforms.h"
#include "minddata/dataset/include/iterator.h"
#include "minddata/dataset/core/constants.h"
#include "minddata/dataset/core/tensor_shape.h"
#include "minddata/dataset/core/tensor.h"
#include "minddata/dataset/include/samplers.h"
using namespace mindspore::dataset::api; using namespace mindspore::dataset::api;
using mindspore::MsLogLevel::ERROR;
using mindspore::ExceptionType::NoExceptionType;
using mindspore::LogStream;
using mindspore::dataset::Tensor; using mindspore::dataset::Tensor;
using mindspore::dataset::TensorShape; using mindspore::dataset::TensorShape;
using mindspore::dataset::TensorImpl;
using mindspore::dataset::DataType;
using mindspore::dataset::Status;
using mindspore::dataset::BorderType;
using mindspore::dataset::dsize_t;
class MindDataTestPipeline : public UT::DatasetOpTesting { class MindDataTestPipeline : public UT::DatasetOpTesting {
protected: protected:
}; };
TEST_F(MindDataTestPipeline, TestMnistFail1) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestMnistFail1.";
// Create a Mnist Dataset
std::shared_ptr<Dataset> ds = Mnist("", RandomSampler(false, 10));
EXPECT_EQ(ds, nullptr);
}
TEST_F(MindDataTestPipeline, TestImageFolderFail1) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestImageFolderFail1.";
// Create an ImageFolder Dataset
std::shared_ptr<Dataset> ds = ImageFolder("", true, nullptr);
EXPECT_EQ(ds, nullptr);
}
TEST_F(MindDataTestPipeline, TestCelebADataset) { TEST_F(MindDataTestPipeline, TestCelebADataset) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCelebADataset."; MS_LOG(INFO) << "Doing MindDataTestPipeline-TestCelebADataset.";
...@@ -158,3 +117,19 @@ TEST_F(MindDataTestPipeline, TestCelebAException) { ...@@ -158,3 +117,19 @@ TEST_F(MindDataTestPipeline, TestCelebAException) {
std::shared_ptr<Dataset> ds1 = CelebA(folder_path, invalid_dataset_type); std::shared_ptr<Dataset> ds1 = CelebA(folder_path, invalid_dataset_type);
EXPECT_EQ(ds1, nullptr); EXPECT_EQ(ds1, nullptr);
} }
TEST_F(MindDataTestPipeline, TestImageFolderFail1) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestImageFolderFail1.";
// Create an ImageFolder Dataset
std::shared_ptr<Dataset> ds = ImageFolder("", true, nullptr);
EXPECT_EQ(ds, nullptr);
}
TEST_F(MindDataTestPipeline, TestMnistFail1) {
MS_LOG(INFO) << "Doing MindDataTestPipeline-TestMnistFail1.";
// Create a Mnist Dataset
std::shared_ptr<Dataset> ds = Mnist("", RandomSampler(false, 10));
EXPECT_EQ(ds, nullptr);
}
...@@ -13,33 +13,11 @@ ...@@ -13,33 +13,11 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
#include <fstream>
#include <iostream>
#include <memory>
#include <vector>
#include <string>
#include "utils/log_adapter.h"
#include "utils/ms_utils.h"
#include "common/common.h" #include "common/common.h"
#include "gtest/gtest.h"
#include "securec.h"
#include "minddata/dataset/include/datasets.h" #include "minddata/dataset/include/datasets.h"
#include "minddata/dataset/include/status.h"
#include "minddata/dataset/include/transforms.h"
#include "minddata/dataset/include/iterator.h"
#include "minddata/dataset/core/constants.h"
#include "minddata/dataset/core/tensor_shape.h"
#include "minddata/dataset/core/tensor.h"
#include "minddata/dataset/include/samplers.h"
using namespace mindspore::dataset::api; using namespace mindspore::dataset::api;
using mindspore::MsLogLevel::ERROR;
using mindspore::ExceptionType::NoExceptionType;
using mindspore::LogStream;
using mindspore::dataset::Tensor; using mindspore::dataset::Tensor;
using mindspore::dataset::Status;
using mindspore::dataset::BorderType;
class MindDataTestPipeline : public UT::DatasetOpTesting { class MindDataTestPipeline : public UT::DatasetOpTesting {
protected: protected:
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册