test_dataset.py 48.7 KB
Newer Older
X
xjqbest 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
X
xjqbest 已提交
14
"""
X
xjqbest 已提交
15 16
TestCases for Dataset,
including create, config, run, etc.
X
xjqbest 已提交
17
"""
X
xjqbest 已提交
18 19

import os
20
import tempfile
X
xjqbest 已提交
21 22
import unittest

23 24 25 26
import paddle
import paddle.fluid as fluid
import paddle.fluid.core as core

X
xjqbest 已提交
27 28

class TestDataset(unittest.TestCase):
29
    """TestCases for Dataset."""
30

Z
Zeng Jinle 已提交
31 32 33 34 35
    def setUp(self):
        self.use_data_loader = False
        self.epoch_num = 10
        self.drop_last = False

X
xjqbest 已提交
36
    def test_dataset_create(self):
37
        """Testcase for dataset create."""
X
xjqbest 已提交
38
        try:
39
            dataset = paddle.distributed.InMemoryDataset()
X
xjqbest 已提交
40 41 42 43
        except:
            self.assertTrue(False)

        try:
44
            dataset = paddle.distributed.QueueDataset()
X
xjqbest 已提交
45 46 47
        except:
            self.assertTrue(False)

48
        try:
49
            dataset = paddle.distributed.fleet.dataset.FileInstantDataset()
50 51 52
        except:
            self.assertTrue(False)

X
xjqbest 已提交
53
        try:
54
            dataset = paddle.distributed.fleet.dataset.MyOwnDataset()
X
xjqbest 已提交
55 56 57 58
            self.assertTrue(False)
        except:
            self.assertTrue(True)

59 60 61 62 63 64 65
    def test_config(self):
        """
        Testcase for python config.
        """
        dataset = fluid.InMemoryDataset()
        dataset.set_parse_ins_id(True)
        dataset.set_parse_content(True)
66
        dataset._set_trainer_num(1)
67 68
        self.assertTrue(dataset.parse_ins_id)
        self.assertTrue(dataset.parse_content)
69
        self.assertEqual(dataset.trainer_num, 1)
70

71 72 73 74 75 76 77 78
    def test_shuffle_by_uid(self):
        """
        Testcase for shuffle_by_uid.
        """
        dataset = paddle.distributed.InMemoryDataset()
        dataset._set_uid_slot('6048')
        dataset._set_shuffle_by_uid(True)

79 80 81 82
    def test_run_with_dump(self):
        """
        Testcase for InMemoryDataset from create to run.
        """
83 84 85 86 87 88

        temp_dir = tempfile.TemporaryDirectory()
        dump_a_path = os.path.join(temp_dir.name, 'test_run_with_dump_a.txt')
        dump_b_path = os.path.join(temp_dir.name, 'test_run_with_dump_b.txt')

        with open(dump_a_path, "w") as f:
89 90 91 92
            data = "1 a 1 a 1 1 2 3 3 4 5 5 5 5 1 1\n"
            data += "1 b 1 b 1 2 2 3 4 4 6 6 6 6 1 2\n"
            data += "1 c 1 c 1 3 2 3 5 4 7 7 7 7 1 3\n"
            f.write(data)
93
        with open(dump_b_path, "w") as f:
94 95 96 97 98 99 100 101 102
            data = "1 d 1 d 1 4 2 3 3 4 5 5 5 5 1 4\n"
            data += "1 e 1 e 1 5 2 3 4 4 6 6 6 6 1 5\n"
            data += "1 f 1 f 1 6 2 3 5 4 7 7 7 7 1 6\n"
            data += "1 g 1 g 1 7 2 3 6 4 8 8 8 8 1 7\n"
            f.write(data)

        slots = ["slot1", "slot2", "slot3", "slot4"]
        slots_vars = []
        for slot in slots:
G
GGBond8488 已提交
103 104
            var = paddle.static.data(
                name=slot, shape=[-1, 1], dtype="int64", lod_level=1
105
            )
106 107
            slots_vars.append(var)

108
        dataset = paddle.distributed.InMemoryDataset()
109 110 111
        dataset.init(
            batch_size=32, thread_num=3, pipe_command="cat", use_var=slots_vars
        )
112
        dataset.update_settings(pipe_command="cat1")
113 114 115 116 117 118
        dataset._init_distributed_settings(
            parse_ins_id=True,
            parse_content=True,
            fea_eval=True,
            candidate_size=10000,
        )
119
        dataset.set_filelist([dump_a_path, dump_b_path])
120 121 122
        dataset.load_into_memory()
        dataset.local_shuffle()

123 124 125 126 127 128
        paddle.enable_static()

        exe = paddle.static.Executor(paddle.CPUPlace())
        startup_program = paddle.static.Program()
        main_program = paddle.static.Program()
        exe.run(startup_program)
129 130
        for i in range(2):
            try:
131
                exe.train_from_dataset(main_program, dataset)
132 133 134 135 136
            except ImportError as e:
                pass
            except Exception as e:
                self.assertTrue(False)

137
        temp_dir.cleanup()
138

X
xjqbest 已提交
139
    def test_dataset_config(self):
140
        """Testcase for dataset configuration."""
X
xjqbest 已提交
141 142 143 144 145
        dataset = fluid.core.Dataset("MultiSlotDataset")
        dataset.set_thread_num(12)
        dataset.set_filelist(["a.txt", "b.txt", "c.txt"])
        dataset.set_trainer_num(4)
        dataset.set_hdfs_config("my_fs_name", "my_fs_ugi")
146
        dataset.set_download_cmd("./read_from_afs my_fs_name my_fs_ugi")
147
        dataset.set_enable_pv_merge(False)
X
xjqbest 已提交
148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164

        thread_num = dataset.get_thread_num()
        self.assertEqual(thread_num, 12)

        filelist = dataset.get_filelist()
        self.assertEqual(len(filelist), 3)
        self.assertEqual(filelist[0], "a.txt")
        self.assertEqual(filelist[1], "b.txt")
        self.assertEqual(filelist[2], "c.txt")

        trainer_num = dataset.get_trainer_num()
        self.assertEqual(trainer_num, 4)

        name, ugi = dataset.get_hdfs_config()
        self.assertEqual(name, "my_fs_name")
        self.assertEqual(ugi, "my_fs_ugi")

165 166 167 168 169 170 171
        download_cmd = dataset.get_download_cmd()
        self.assertEqual(download_cmd, "./read_from_afs my_fs_name my_fs_ugi")

    def test_set_download_cmd(self):
        """
        Testcase for InMemoryDataset from create to run.
        """
172
        temp_dir = tempfile.TemporaryDirectory()
173 174 175 176 177 178
        filename1 = os.path.join(
            temp_dir.name, "afs:test_in_memory_dataset_run_a.txt"
        )
        filename2 = os.path.join(
            temp_dir.name, "afs:test_in_memory_dataset_run_b.txt"
        )
179

180 181 182 183 184 185 186 187 188 189 190 191 192 193 194
        with open(filename1, "w") as f:
            data = "1 1 2 3 3 4 5 5 5 5 1 1\n"
            data += "1 2 2 3 4 4 6 6 6 6 1 2\n"
            data += "1 3 2 3 5 4 7 7 7 7 1 3\n"
            f.write(data)
        with open(filename2, "w") as f:
            data = "1 4 2 3 3 4 5 5 5 5 1 4\n"
            data += "1 5 2 3 4 4 6 6 6 6 1 5\n"
            data += "1 6 2 3 5 4 7 7 7 7 1 6\n"
            data += "1 7 2 3 6 4 8 8 8 8 1 7\n"
            f.write(data)

        slots = ["slot1", "slot2", "slot3", "slot4"]
        slots_vars = []
        for slot in slots:
G
GGBond8488 已提交
195 196
            var = paddle.static.data(
                name=slot, shape=[-1, 1], dtype="int64", lod_level=1
197
            )
198 199
            slots_vars.append(var)

200
        dataset = paddle.distributed.InMemoryDataset()
201 202 203 204 205 206 207
        dataset.init(
            batch_size=32,
            thread_num=3,
            pipe_command="cat",
            download_cmd="cat",
            use_var=slots_vars,
        )
208 209
        dataset.set_filelist([filename1, filename2])
        dataset.load_into_memory()
210 211 212 213 214
        paddle.enable_static()

        exe = paddle.static.Executor(paddle.CPUPlace())
        startup_program = paddle.static.Program()
        main_program = paddle.static.Program()
215
        exe = fluid.Executor(fluid.CPUPlace())
216
        exe.run(startup_program)
217
        if self.use_data_loader:
218
            data_loader = fluid.io.DataLoader.from_dataset(
219 220
                dataset, fluid.cpu_places(), self.drop_last
            )
221 222
            for i in range(self.epoch_num):
                for data in data_loader():
223
                    exe.run(main_program, feed=data)
224 225 226
        else:
            for i in range(self.epoch_num):
                try:
227
                    exe.train_from_dataset(main_program, dataset)
228 229 230
                except Exception as e:
                    self.assertTrue(False)

231
        temp_dir.cleanup()
232

X
xjqbest 已提交
233
    def test_in_memory_dataset_run(self):
X
xjqbest 已提交
234
        """
X
xjqbest 已提交
235
        Testcase for InMemoryDataset from create to run.
X
xjqbest 已提交
236
        """
237
        temp_dir = tempfile.TemporaryDirectory()
238 239 240 241 242 243
        filename1 = os.path.join(
            temp_dir.name, "test_in_memory_dataset_run_a.txt"
        )
        filename2 = os.path.join(
            temp_dir.name, "test_in_memory_dataset_run_b.txt"
        )
244 245

        with open(filename1, "w") as f:
X
xjqbest 已提交
246 247 248 249
            data = "1 1 2 3 3 4 5 5 5 5 1 1\n"
            data += "1 2 2 3 4 4 6 6 6 6 1 2\n"
            data += "1 3 2 3 5 4 7 7 7 7 1 3\n"
            f.write(data)
250
        with open(filename2, "w") as f:
X
xjqbest 已提交
251 252 253 254 255 256
            data = "1 4 2 3 3 4 5 5 5 5 1 4\n"
            data += "1 5 2 3 4 4 6 6 6 6 1 5\n"
            data += "1 6 2 3 5 4 7 7 7 7 1 6\n"
            data += "1 7 2 3 6 4 8 8 8 8 1 7\n"
            f.write(data)

257
        slots = ["slot1", "slot2", "slot3", "slot4"]
X
xjqbest 已提交
258 259
        slots_vars = []
        for slot in slots:
G
GGBond8488 已提交
260 261
            var = paddle.static.data(
                name=slot, shape=[-1, 1], dtype="int64", lod_level=1
262
            )
X
xjqbest 已提交
263 264
            slots_vars.append(var)

265
        dataset = paddle.distributed.InMemoryDataset()
266 267 268
        dataset.init(
            batch_size=32, thread_num=3, pipe_command="cat", use_var=slots_vars
        )
269
        dataset._init_distributed_settings(fea_eval=True, candidate_size=1)
270
        dataset.set_filelist([filename1, filename2])
X
xjqbest 已提交
271
        dataset.load_into_memory()
272
        dataset.slots_shuffle(["slot1"])
X
xjqbest 已提交
273
        dataset.local_shuffle()
274 275
        dataset._set_generate_unique_feasigns(True, 15)
        dataset._generate_local_tables_unlock(0, 11, 1, 25, 15)
X
xjqbest 已提交
276 277
        exe = fluid.Executor(fluid.CPUPlace())
        exe.run(fluid.default_startup_program())
Z
Zeng Jinle 已提交
278
        if self.use_data_loader:
279
            data_loader = fluid.io.DataLoader.from_dataset(
280 281
                dataset, fluid.cpu_places(), self.drop_last
            )
Z
Zeng Jinle 已提交
282 283 284 285 286 287
            for i in range(self.epoch_num):
                for data in data_loader():
                    exe.run(fluid.default_main_program(), feed=data)
        else:
            for i in range(self.epoch_num):
                try:
288 289 290
                    exe.train_from_dataset(
                        fluid.default_main_program(), dataset
                    )
Z
Zeng Jinle 已提交
291 292
                except Exception as e:
                    self.assertTrue(False)
X
xjqbest 已提交
293

294
        temp_dir.cleanup()
X
xjqbest 已提交
295

L
lxsbupt 已提交
296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316
    def test_in_memory_dataset_gpugraph_mode(self):
        """
        Testcase for InMemoryDataset in gpugraph mode.
        """
        dataset = fluid.DatasetFactory().create_dataset("InMemoryDataset")
        dataset.set_feed_type("SlotRecordInMemoryDataFeed")
        graph_config = {
            "walk_len": 24,
            "walk_degree": 10,
            "once_sample_startid_len": 80000,
            "sample_times_one_chunk": 5,
            "window": 3,
            "debug_mode": 0,
            "batch_size": 800,
            "meta_path": "cuid2clk-clk2cuid;cuid2conv-conv2cuid;clk2cuid-cuid2clk;clk2cuid-cuid2conv",
            "gpu_graph_training": 1,
        }
        dataset.set_graph_config(graph_config)
        dataset.set_pass_id(0)
        dataset.get_pass_id()

317 318 319 320
    def test_in_memory_dataset_masterpatch(self):
        """
        Testcase for InMemoryDataset from create to run.
        """
321
        temp_dir = tempfile.TemporaryDirectory()
322 323 324 325 326 327
        filename1 = os.path.join(
            temp_dir.name, "test_in_memory_dataset_masterpatch_a.txt"
        )
        filename2 = os.path.join(
            temp_dir.name, "test_in_memory_dataset_masterpatch_b.txt"
        )
328 329

        with open(filename1, "w") as f:
330 331 332 333 334 335 336 337 338 339
            data = "1 id1 1 1 2 3 3 4 5 5 5 5 1 1\n"
            data += "1 id1 1 2 2 3 4 4 6 6 6 6 1 2\n"
            data += "1 id2 1 1 1 1 1 0 1 0\n"
            data += "1 id3 1 0 1 0 1 1 1 1\n"
            data += "1 id3 1 1 1 1 1 0 1 0\n"
            data += "1 id4 1 0 1 0 1 1 1 1\n"
            data += "1 id4 1 0 1 0 1 1 1 1\n"
            data += "1 id5 1 1 1 1 1 0 1 0\n"
            data += "1 id5 1 1 1 1 1 0 1 0\n"
            f.write(data)
340
        with open(filename2, "w") as f:
341 342 343 344 345 346 347 348 349 350 351 352
            data = "1 id6 1 4 2 3 3 4 5 5 5 5 1 4\n"
            data += "1 id6 1 1 2 3 4 4 6 6 6 6 1 5\n"
            data += "1 id6 1 6 2 3 5 4 7 7 7 7 1 6\n"
            data += "1 id6 1 7 2 3 6 4 8 8 8 8 1 7\n"
            f.write(data)

        slots = ["slot1", "slot2", "slot3", "slot4"]
        slots_vars = []
        train_program = fluid.Program()
        startup_program = fluid.Program()
        with fluid.program_guard(train_program, startup_program):
            for slot in slots[:2]:
G
GGBond8488 已提交
353 354
                var = paddle.static.data(
                    name=slot, shape=[-1, 1], dtype="int64", lod_level=1
355
                )
356 357
                slots_vars.append(var)
            for slot in slots[2:]:
G
GGBond8488 已提交
358 359
                var = paddle.static.data(
                    name=slot, shape=[-1, 1], dtype="float32", lod_level=1
360
                )
361 362
                slots_vars.append(var)

363
        dataset = paddle.distributed.InMemoryDataset()
364 365 366
        dataset.init(
            batch_size=32, thread_num=1, pipe_command="cat", use_var=slots_vars
        )
367
        dataset._init_distributed_settings(parse_ins_id=True)
368 369 370 371 372 373
        dataset.set_filelist(
            [
                "test_in_memory_dataset_masterpatch_a.txt",
                "test_in_memory_dataset_masterpatch_b.txt",
            ]
        )
374 375 376 377 378 379 380 381 382 383 384 385 386 387
        dataset.load_into_memory()
        dataset.local_shuffle()

        exe = fluid.Executor(fluid.CPUPlace())
        exe.run(startup_program)

        for i in range(2):
            try:
                exe.train_from_dataset(train_program, dataset)
            except ImportError as e:
                pass
            except Exception as e:
                self.assertTrue(False)

388
        # dataset._set_merge_by_lineid(2)
389
        dataset.update_settings(merge_size=2)
390 391
        dataset.dataset.merge_by_lineid()

392
        temp_dir.cleanup()
393

394 395 396 397
    def test_in_memory_dataset_masterpatch1(self):
        """
        Testcase for InMemoryDataset from create to run.
        """
398
        temp_dir = tempfile.TemporaryDirectory()
399 400 401 402 403 404
        filename1 = os.path.join(
            temp_dir.name, "test_in_memory_dataset_masterpatch1_a.txt"
        )
        filename2 = os.path.join(
            temp_dir.name, "test_in_memory_dataset_masterpatch1_b.txt"
        )
405 406

        with open(filename1, "w") as f:
407 408 409 410 411 412 413 414 415 416
            data = "1 id1 1 1 2 3 3 4 5 5 5 5 1 1\n"
            data += "1 id1 1 2 2 3 4 4 6 6 6 6 1 2\n"
            data += "1 id2 1 1 1 1 1 0 1 0\n"
            data += "1 id3 1 0 1 0 1 1 1 1\n"
            data += "1 id3 1 1 1 1 1 0 1 0\n"
            data += "1 id4 1 0 1 0 1 1 1 1\n"
            data += "1 id4 1 0 1 0 1 1 1 1\n"
            data += "1 id5 1 1 1 1 1 0 1 0\n"
            data += "1 id5 1 1 1 1 1 0 1 0\n"
            f.write(data)
417
        with open(filename2, "w") as f:
418 419 420 421 422 423 424 425 426 427
            data = "1 id6 1 4 2 3 3 4 5 5 5 5 1 4\n"
            data += "1 id6 1 1 2 3 4 4 6 6 6 6 1 5\n"
            data += "1 id6 1 6 2 3 5 4 7 7 7 7 1 6\n"
            data += "1 id6 1 7 2 3 6 4 8 8 8 8 1 7\n"
            f.write(data)

        slots_vars = []
        train_program = fluid.Program()
        startup_program = fluid.Program()
        with fluid.program_guard(train_program, startup_program):
G
GGBond8488 已提交
428 429
            var1 = paddle.static.data(
                name="slot1", shape=[-1, 1], dtype="int64", lod_level=0
430
            )
G
GGBond8488 已提交
431 432
            var2 = paddle.static.data(
                name="slot2", shape=[-1, 1], dtype="int64", lod_level=0
433
            )
G
GGBond8488 已提交
434 435
            var3 = paddle.static.data(
                name="slot3", shape=[-1, 1], dtype="float32", lod_level=0
436
            )
G
GGBond8488 已提交
437 438
            var4 = paddle.static.data(
                name="slot4", shape=[-1, 1], dtype="float32", lod_level=0
439
            )
440 441
            slots_vars = [var1, var2, var3, var4]

442
        dataset = paddle.distributed.InMemoryDataset()
443 444 445
        dataset.init(
            batch_size=32, thread_num=1, pipe_command="cat", use_var=slots_vars
        )
446
        dataset._init_distributed_settings(parse_ins_id=True)
447 448 449 450 451 452
        dataset.set_filelist(
            [
                "test_in_memory_dataset_masterpatch1_a.txt",
                "test_in_memory_dataset_masterpatch1_b.txt",
            ]
        )
453 454 455 456 457 458 459 460 461 462 463 464 465 466
        dataset.load_into_memory()
        dataset.local_shuffle()

        exe = fluid.Executor(fluid.CPUPlace())
        exe.run(startup_program)

        for i in range(2):
            try:
                exe.train_from_dataset(train_program, dataset)
            except ImportError as e:
                pass
            except Exception as e:
                self.assertTrue(False)

467
        dataset._set_merge_by_lineid(2)
468 469
        dataset.dataset.merge_by_lineid()

470
        temp_dir.cleanup()
471

472 473 474 475 476 477
    def test_in_memory_dataset_run_2(self):
        """
        Testcase for InMemoryDataset from create to run.
        Use CUDAPlace
        Use float type id
        """
478
        temp_dir = tempfile.TemporaryDirectory()
479 480 481 482 483 484
        filename1 = os.path.join(
            temp_dir.name, "test_in_memory_dataset_run_a.txt"
        )
        filename2 = os.path.join(
            temp_dir.name, "test_in_memory_dataset_run_b.txt"
        )
485 486

        with open(filename1, "w") as f:
487 488 489 490
            data = "1 1 2 3 3 4 5 5 5 5 1 1\n"
            data += "1 2 2 3 4 4 6 6 6 6 1 2\n"
            data += "1 3 2 3 5 4 7 7 7 7 1 3\n"
            f.write(data)
491
        with open(filename2, "w") as f:
492 493 494 495 496 497 498 499 500
            data = "1 4 2 3 3 4 5 5 5 5 1 4\n"
            data += "1 5 2 3 4 4 6 6 6 6 1 5\n"
            data += "1 6 2 3 5 4 7 7 7 7 1 6\n"
            data += "1 7 2 3 6 4 8 8 8 8 1 7\n"
            f.write(data)

        slots = ["slot1_f", "slot2_f", "slot3_f", "slot4_f"]
        slots_vars = []
        for slot in slots:
G
GGBond8488 已提交
501 502
            var = paddle.static.data(
                name=slot, shape=[-1, 1], dtype="float32", lod_level=1
503
            )
504 505
            slots_vars.append(var)

506
        dataset = paddle.distributed.InMemoryDataset()
507 508 509
        dataset.init(
            batch_size=32, thread_num=3, pipe_command="cat", use_var=slots_vars
        )
510
        dataset.set_filelist([filename1, filename2])
511 512 513
        dataset.load_into_memory()
        dataset.local_shuffle()

514 515 516 517 518
        exe = fluid.Executor(
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
519
        exe.run(fluid.default_startup_program())
520 521 522 523

        for i in range(2):
            try:
                exe.train_from_dataset(fluid.default_main_program(), dataset)
524 525 526 527 528 529 530 531 532 533 534 535 536 537 538
                exe.train_from_dataset(
                    fluid.default_main_program(), dataset, thread=1
                )
                exe.train_from_dataset(
                    fluid.default_main_program(), dataset, thread=2
                )
                exe.train_from_dataset(
                    fluid.default_main_program(), dataset, thread=2
                )
                exe.train_from_dataset(
                    fluid.default_main_program(), dataset, thread=3
                )
                exe.train_from_dataset(
                    fluid.default_main_program(), dataset, thread=4
                )
539 540 541 542 543
            except ImportError as e:
                pass
            except Exception as e:
                self.assertTrue(False)

Z
Zeng Jinle 已提交
544
        if self.use_data_loader:
545
            data_loader = fluid.io.DataLoader.from_dataset(
546 547
                dataset, fluid.cpu_places(), self.drop_last
            )
Z
Zeng Jinle 已提交
548 549 550 551 552 553
            for i in range(self.epoch_num):
                for data in data_loader():
                    exe.run(fluid.default_main_program(), feed=data)
        else:
            for i in range(self.epoch_num):
                try:
554 555 556
                    exe.train_from_dataset(
                        fluid.default_main_program(), dataset
                    )
Z
Zeng Jinle 已提交
557 558
                except Exception as e:
                    self.assertTrue(False)
559

560 561 562
        dataset._set_merge_by_lineid(2)
        dataset._set_parse_ins_id(False)
        dataset._set_fleet_send_sleep_seconds(2)
563 564 565 566
        dataset.preload_into_memory()
        dataset.wait_preload_done()
        dataset.preload_into_memory(1)
        dataset.wait_preload_done()
567
        dataset.dataset.merge_by_lineid()
568 569
        dataset._set_merge_by_lineid(30)
        dataset._set_parse_ins_id(False)
570 571
        dataset.load_into_memory()
        dataset.dataset.merge_by_lineid()
572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587
        dataset.update_settings(
            batch_size=1,
            thread_num=2,
            input_type=1,
            pipe_command="cat",
            use_var=[],
            fs_name="",
            fs_ugi="",
            download_cmd="cat",
            merge_size=-1,
            parse_ins_id=False,
            parse_content=False,
            fleet_send_batch_size=2,
            fleet_send_sleep_seconds=2,
            fea_eval=True,
        )
588
        fleet_ptr = fluid.core.Fleet()
589
        fleet_ptr.set_client2client_config(1, 1, 1)
590
        fleet_ptr.get_cache_threshold(0)
591

592
        temp_dir.cleanup()
593

X
xjqbest 已提交
594
    def test_queue_dataset_run(self):
X
xjqbest 已提交
595
        """
X
xjqbest 已提交
596
        Testcase for QueueDataset from create to run.
X
xjqbest 已提交
597
        """
598 599 600 601 602
        temp_dir = tempfile.TemporaryDirectory()
        filename1 = os.path.join(temp_dir.name, "test_queue_dataset_run_a.txt")
        filename2 = os.path.join(temp_dir.name, "test_queue_dataset_run_b.txt")

        with open(filename1, "w") as f:
X
xjqbest 已提交
603 604 605 606
            data = "1 1 2 3 3 4 5 5 5 5 1 1\n"
            data += "1 2 2 3 4 4 6 6 6 6 1 2\n"
            data += "1 3 2 3 5 4 7 7 7 7 1 3\n"
            f.write(data)
607
        with open(filename2, "w") as f:
X
xjqbest 已提交
608 609 610 611 612 613
            data = "1 4 2 3 3 4 5 5 5 5 1 4\n"
            data += "1 5 2 3 4 4 6 6 6 6 1 5\n"
            data += "1 6 2 3 5 4 7 7 7 7 1 6\n"
            data += "1 7 2 3 6 4 8 8 8 8 1 7\n"
            f.write(data)

614
        slots = ["slot1", "slot2", "slot3", "slot4"]
X
xjqbest 已提交
615 616
        slots_vars = []
        for slot in slots:
G
GGBond8488 已提交
617 618
            var = paddle.static.data(
                name=slot, shape=[-1, 1], dtype="int64", lod_level=1
619
            )
X
xjqbest 已提交
620 621
            slots_vars.append(var)

622
        dataset = paddle.distributed.QueueDataset()
623 624 625
        dataset.init(
            batch_size=32, thread_num=3, pipe_command="cat", use_var=slots_vars
        )
626
        dataset.set_filelist([filename1, filename2])
X
xjqbest 已提交
627 628 629

        exe = fluid.Executor(fluid.CPUPlace())
        exe.run(fluid.default_startup_program())
Z
Zeng Jinle 已提交
630
        if self.use_data_loader:
631
            data_loader = fluid.io.DataLoader.from_dataset(
632 633
                dataset, fluid.cpu_places(), self.drop_last
            )
Z
Zeng Jinle 已提交
634 635 636 637 638 639
            for i in range(self.epoch_num):
                for data in data_loader():
                    exe.run(fluid.default_main_program(), feed=data)
        else:
            for i in range(self.epoch_num):
                try:
640 641 642
                    exe.train_from_dataset(
                        fluid.default_main_program(), dataset
                    )
Z
Zeng Jinle 已提交
643 644
                except Exception as e:
                    self.assertTrue(False)
X
xjqbest 已提交
645

646
        dataset2 = paddle.distributed.QueueDataset()
647 648 649
        dataset2.init(
            batch_size=32, thread_num=3, pipe_command="cat", use_var=slots_vars
        )
650 651 652 653 654 655 656 657
        dataset.set_filelist([])
        try:
            exe.train_from_dataset(fluid.default_main_program(), dataset2)
        except ImportError as e:
            print("warning: we skip trainer_desc_pb2 import problem in windows")
        except Exception as e:
            self.assertTrue(False)

658
        temp_dir.cleanup()
X
xjqbest 已提交
659

660 661 662 663 664 665
    def test_queue_dataset_run_2(self):
        """
        Testcase for QueueDataset from create to run.
        Use CUDAPlace
        Use float type id
        """
666 667 668 669 670
        temp_dir = tempfile.TemporaryDirectory()
        filename1 = os.path.join(temp_dir.name, "test_queue_dataset_run_a.txt")
        filename2 = os.path.join(temp_dir.name, "test_queue_dataset_run_b.txt")

        with open(filename1, "w") as f:
671 672 673 674
            data = "1 1 2 3 3 4 5 5 5 5 1 1\n"
            data += "1 2 2 3 4 4 6 6 6 6 1 2\n"
            data += "1 3 2 3 5 4 7 7 7 7 1 3\n"
            f.write(data)
675
        with open(filename2, "w") as f:
676 677 678 679 680 681 682 683 684
            data = "1 4 2 3 3 4 5 5 5 5 1 4\n"
            data += "1 5 2 3 4 4 6 6 6 6 1 5\n"
            data += "1 6 2 3 5 4 7 7 7 7 1 6\n"
            data += "1 7 2 3 6 4 8 8 8 8 1 7\n"
            f.write(data)

        slots = ["slot1_f", "slot2_f", "slot3_f", "slot4_f"]
        slots_vars = []
        for slot in slots:
G
GGBond8488 已提交
685 686
            var = paddle.static.data(
                name=slot, shape=[-1, 1], dtype="float32", lod_level=1
687
            )
688 689
            slots_vars.append(var)

690
        dataset = paddle.distributed.QueueDataset()
691 692 693
        dataset.init(
            batch_size=32, thread_num=3, pipe_command="cat", use_var=slots_vars
        )
694
        dataset.set_filelist([filename1, filename2])
695

696 697 698 699 700
        exe = fluid.Executor(
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
701 702
        exe.run(fluid.default_startup_program())
        if self.use_data_loader:
703
            data_loader = fluid.io.DataLoader.from_dataset(
704 705
                dataset, fluid.cpu_places(), self.drop_last
            )
706 707 708 709 710 711
            for i in range(self.epoch_num):
                for data in data_loader():
                    exe.run(fluid.default_main_program(), feed=data)
        else:
            for i in range(self.epoch_num):
                try:
712 713 714
                    exe.train_from_dataset(
                        fluid.default_main_program(), dataset
                    )
715 716 717
                except Exception as e:
                    self.assertTrue(False)

718
        temp_dir.cleanup()
719 720 721 722 723 724 725

    def test_queue_dataset_run_3(self):
        """
        Testcase for QueueDataset from create to run.
        Use CUDAPlace
        Use float type id
        """
726 727 728 729 730
        temp_dir = tempfile.TemporaryDirectory()
        filename1 = os.path.join(temp_dir.name, "test_queue_dataset_run_a.txt")
        filename2 = os.path.join(temp_dir.name, "test_queue_dataset_run_b.txt")

        with open(filename1, "w") as f:
731 732 733 734 735
            data = "2 1 2 2 5 4 2 2 7 2 1 3\n"
            data += "2 6 2 2 1 4 2 2 4 2 2 3\n"
            data += "2 5 2 2 9 9 2 2 7 2 1 3\n"
            data += "2 7 2 2 1 9 2 3 7 2 5 3\n"
            f.write(data)
736
        with open(filename2, "w") as f:
737 738 739 740 741 742 743 744 745
            data = "2 1 2 2 5 4 2 2 7 2 1 3\n"
            data += "2 6 2 2 1 4 2 2 4 2 2 3\n"
            data += "2 5 2 2 9 9 2 2 7 2 1 3\n"
            data += "2 7 2 2 1 9 2 3 7 2 5 3\n"
            f.write(data)

        slots = ["slot1", "slot2", "slot3", "slot4"]
        slots_vars = []
        for slot in slots:
746 747 748
            var = fluid.data(
                name=slot, shape=[None, 1], dtype="int64", lod_level=1
            )
749 750
            slots_vars.append(var)

751
        dataset = paddle.distributed.InMemoryDataset()
752 753 754 755 756 757 758
        dataset.init(
            batch_size=1,
            thread_num=2,
            input_type=1,
            pipe_command="cat",
            use_var=slots_vars,
        )
759
        dataset.set_filelist([filename1, filename2])
760 761
        dataset.load_into_memory()

762 763 764 765 766
        exe = fluid.Executor(
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
767
        exe.run(fluid.default_startup_program())
Z
Zeng Jinle 已提交
768
        if self.use_data_loader:
769
            data_loader = fluid.io.DataLoader.from_dataset(
770 771
                dataset, fluid.cpu_places(), self.drop_last
            )
Z
Zeng Jinle 已提交
772 773 774 775 776 777
            for i in range(self.epoch_num):
                for data in data_loader():
                    exe.run(fluid.default_main_program(), feed=data)
        else:
            for i in range(self.epoch_num):
                try:
778 779 780
                    exe.train_from_dataset(
                        fluid.default_main_program(), dataset
                    )
Z
Zeng Jinle 已提交
781 782
                except Exception as e:
                    self.assertTrue(False)
783

784
        temp_dir.cleanup()
785

D
danleifeng 已提交
786 787 788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809
    def test_run_with_inmemory_dataset_train_debug_mode(self):
        """
        Testcase for InMemoryDataset from create to run.
        """

        temp_dir = tempfile.TemporaryDirectory()
        dump_a_path = os.path.join(temp_dir.name, 'test_run_with_dump_a.txt')
        dump_b_path = os.path.join(temp_dir.name, 'test_run_with_dump_b.txt')

        with open(dump_a_path, "w") as f:
            data = "1 a 1 a 1 1 2 3 3 4 5 5 5 5 1 1\n"
            data += "1 b 1 b 1 2 2 3 4 4 6 6 6 6 1 2\n"
            data += "1 c 1 c 1 3 2 3 5 4 7 7 7 7 1 3\n"
            f.write(data)
        with open(dump_b_path, "w") as f:
            data = "1 d 1 d 1 4 2 3 3 4 5 5 5 5 1 4\n"
            data += "1 e 1 e 1 5 2 3 4 4 6 6 6 6 1 5\n"
            data += "1 f 1 f 1 6 2 3 5 4 7 7 7 7 1 6\n"
            data += "1 g 1 g 1 7 2 3 6 4 8 8 8 8 1 7\n"
            f.write(data)

        slots = ["slot1", "slot2", "slot3", "slot4"]
        slots_vars = []
        for slot in slots:
G
GGBond8488 已提交
810 811
            var = paddle.static.data(
                name=slot, shape=[-1, 1], dtype="int64", lod_level=1
812
            )
D
danleifeng 已提交
813 814 815
            slots_vars.append(var)

        dataset = paddle.distributed.InMemoryDataset()
816 817 818 819 820 821 822 823 824 825 826 827 828
        dataset.init(
            batch_size=32,
            thread_num=1,
            pipe_command="cat",
            data_feed_type="SlotRecordInMemoryDataFeed",
            use_var=slots_vars,
        )
        dataset._init_distributed_settings(
            parse_ins_id=True,
            parse_content=True,
            fea_eval=True,
            candidate_size=10000,
        )
D
danleifeng 已提交
829 830 831 832 833 834 835 836 837 838 839 840 841 842 843 844 845 846 847
        dataset.set_filelist([dump_a_path, dump_b_path])
        dataset.load_into_memory()

        paddle.enable_static()

        exe = paddle.static.Executor(paddle.CPUPlace())
        startup_program = paddle.static.Program()
        main_program = paddle.static.Program()
        exe.run(startup_program)
        for i in range(2):
            try:
                exe.train_from_dataset(main_program, dataset, debug=True)
            except ImportError as e:
                pass
            except Exception as e:
                self.assertTrue(False)

        temp_dir.cleanup()

L
lxsbupt 已提交
848 849 850 851 852 853 854 855 856 857 858 859 860 861 862 863 864 865 866 867 868 869 870 871 872 873 874
    def test_cuda_in_memory_dataset_run(self):
        """
        Testcase for cuda inmemory dataset hogwild_worker train to run(barrier).
        """
        temp_dir = tempfile.TemporaryDirectory()
        filename1 = os.path.join(
            temp_dir.name, "test_in_memory_dataset_run_a.txt"
        )
        filename2 = os.path.join(
            temp_dir.name, "test_in_memory_dataset_run_b.txt"
        )

        with open(filename1, "w") as f:
            data = "1 1 2 3 3 4 5 5 5 5 1 1\n"
            data += "1 2 2 3 4 4 6 6 6 6 1 2\n"
            data += "1 3 2 3 5 4 7 7 7 7 1 3\n"
            f.write(data)
        with open(filename2, "w") as f:
            data = "1 4 2 3 3 4 5 5 5 5 1 4\n"
            data += "1 5 2 3 4 4 6 6 6 6 1 5\n"
            data += "1 6 2 3 5 4 7 7 7 7 1 6\n"
            data += "1 7 2 3 6 4 8 8 8 8 1 7\n"
            f.write(data)

        slots = ["slot1", "slot2", "slot3", "slot4"]
        slots_vars = []
        for slot in slots:
G
GGBond8488 已提交
875 876
            var = paddle.static.data(
                name=slot, shape=[-1, 1], dtype="int64", lod_level=1
L
lxsbupt 已提交
877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896 897 898 899 900 901 902 903 904 905 906 907 908 909 910 911 912 913 914 915 916 917 918
            )
            slots_vars.append(var)

        dataset = fluid.DatasetFactory().create_dataset("InMemoryDataset")
        dataset.set_feed_type("SlotRecordInMemoryDataFeed")
        dataset.set_batch_size(1)
        dataset.set_pipe_command("cat")
        dataset.set_use_var(slots_vars)
        dataset.set_filelist([filename1, filename2])

        graph_config = {
            "walk_len": 24,
            "walk_degree": 10,
            "once_sample_startid_len": 80000,
            "sample_times_one_chunk": 5,
            "window": 3,
            "debug_mode": 0,
            "batch_size": 800,
            "meta_path": "cuid2clk-clk2cuid;cuid2conv-conv2cuid;clk2cuid-cuid2clk;clk2cuid-cuid2conv",
            "gpu_graph_training": 1,
        }
        dataset.set_graph_config(graph_config)
        dataset.set_pass_id(2)
        pass_id = dataset.get_pass_id()

        dataset.load_into_memory()

        dataset.get_memory_data_size()

        exe = fluid.Executor(
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
        exe.run(fluid.default_startup_program())
        for i in range(self.epoch_num):
            try:
                exe.train_from_dataset(fluid.default_main_program(), dataset)
            except Exception as e:
                self.assertTrue(False)
        temp_dir.cleanup()

X
xjqbest 已提交
919

Z
Zeng Jinle 已提交
920
class TestDatasetWithDataLoader(TestDataset):
X
xujiaqi01 已提交
921 922 923 924
    """
    Test Dataset With Data Loader class. TestCases.
    """

Z
Zeng Jinle 已提交
925
    def setUp(self):
X
xujiaqi01 已提交
926 927 928
        """
        Test Dataset With Data Loader, setUp.
        """
Z
Zeng Jinle 已提交
929 930 931 932 933
        self.use_data_loader = True
        self.epoch_num = 10
        self.drop_last = False


934
class TestDatasetWithFetchHandler(unittest.TestCase):
X
xujiaqi01 已提交
935 936 937 938
    """
    Test Dataset With Fetch Handler. TestCases.
    """

939
    def net(self):
X
xujiaqi01 已提交
940 941 942
        """
        Test Dataset With Fetch Handler. TestCases.
        """
943 944 945 946
        slots = ["slot1", "slot2", "slot3", "slot4"]
        slots_vars = []
        poolings = []
        for slot in slots:
G
GGBond8488 已提交
947 948
            data = paddle.static.data(
                name=slot, shape=[-1, 1], dtype="int64", lod_level=1
949
            )
950 951 952 953 954 955 956
            var = fluid.layers.cast(x=data, dtype='float32')
            pool = fluid.layers.sequence_pool(input=var, pool_type='AVERAGE')

            slots_vars.append(data)
            poolings.append(pool)

        concated = fluid.layers.concat(poolings, axis=1)
C
Charles-hit 已提交
957
        fc = paddle.static.nn.fc(x=concated, activation='tanh', size=32)
958 959 960
        return slots_vars, fc

    def get_dataset(self, inputs, files):
X
xujiaqi01 已提交
961 962 963 964 965 966 967
        """
        Test Dataset With Fetch Handler. TestCases.

        Args:
            inputs(list): inputs of get_dataset
            files(list): files of  get_dataset
        """
968
        dataset = paddle.distributed.QueueDataset()
969 970 971
        dataset.init(
            batch_size=32, thread_num=3, pipe_command="cat", use_var=inputs
        )
972 973 974 975
        dataset.set_filelist(files)
        return dataset

    def setUp(self):
X
xujiaqi01 已提交
976 977 978
        """
        Test Dataset With Fetch Handler. TestCases.
        """
979
        self.temp_dir = tempfile.TemporaryDirectory()
980 981 982 983 984 985
        self.filename1 = os.path.join(
            self.temp_dir.name, "test_queue_dataset_run_a.txt"
        )
        self.filename2 = os.path.join(
            self.temp_dir.name, "test_queue_dataset_run_b.txt"
        )
986 987

        with open(self.filename1, "w") as f:
988 989 990 991
            data = "1 1 2 3 3 4 5 5 5 5 1 1\n"
            data += "1 2 2 3 4 4 6 6 6 6 1 2\n"
            data += "1 3 2 3 5 4 7 7 7 7 1 3\n"
            f.write(data)
992
        with open(self.filename2, "w") as f:
993 994 995 996 997 998 999
            data = "1 4 2 3 3 4 5 5 5 5 1 4\n"
            data += "1 5 2 3 4 4 6 6 6 6 1 5\n"
            data += "1 6 2 3 5 4 7 7 7 7 1 6\n"
            data += "1 7 2 3 6 4 8 8 8 8 1 7\n"
            f.write(data)

    def tearDown(self):
X
xujiaqi01 已提交
1000 1001 1002
        """
        Test Dataset With Fetch Handler. TestCases.
        """
1003
        self.temp_dir.cleanup()
1004 1005

    def test_dataset_none(self):
X
xujiaqi01 已提交
1006 1007 1008
        """
        Test Dataset With Fetch Handler. TestCases.
        """
1009
        slots_vars, out = self.net()
1010
        files = [self.filename1, self.filename2]
1011 1012 1013 1014 1015 1016 1017 1018 1019 1020 1021 1022
        dataset = self.get_dataset(slots_vars, files)

        exe = fluid.Executor(fluid.CPUPlace())
        exe.run(fluid.default_startup_program())

        # test dataset->None
        try:
            exe.train_from_dataset(fluid.default_main_program(), None)
        except ImportError as e:
            print("warning: we skip trainer_desc_pb2 import problem in windows")
        except RuntimeError as e:
            error_msg = "dataset is need and should be initialized"
1023
            self.assertEqual(error_msg, str(e))
1024 1025 1026 1027
        except Exception as e:
            self.assertTrue(False)

    def test_infer_from_dataset(self):
X
xujiaqi01 已提交
1028 1029 1030
        """
        Test Dataset With Fetch Handler. TestCases.
        """
1031
        slots_vars, out = self.net()
1032
        files = [self.filename1, self.filename2]
1033 1034 1035 1036 1037 1038 1039 1040 1041 1042 1043 1044
        dataset = self.get_dataset(slots_vars, files)

        exe = fluid.Executor(fluid.CPUPlace())
        exe.run(fluid.default_startup_program())

        try:
            exe.infer_from_dataset(fluid.default_main_program(), dataset)
        except ImportError as e:
            print("warning: we skip trainer_desc_pb2 import problem in windows")
        except Exception as e:
            self.assertTrue(False)

1045 1046 1047 1048 1049
    def test_fetch_handler(self):
        """
        Test Dataset With Fetch Handler. TestCases.
        """
        slots_vars, out = self.net()
1050
        files = [self.filename1, self.filename2]
1051 1052 1053 1054 1055 1056 1057 1058 1059
        dataset = self.get_dataset(slots_vars, files)

        exe = fluid.Executor(fluid.CPUPlace())
        exe.run(fluid.default_startup_program())

        fh = fluid.executor.FetchHandler(out.name)
        fh.help()

        try:
1060 1061 1062 1063 1064
            exe.train_from_dataset(
                program=fluid.default_main_program(),
                dataset=dataset,
                fetch_handler=fh,
            )
1065 1066 1067 1068
        except ImportError as e:
            print("warning: we skip trainer_desc_pb2 import problem in windows")
        except RuntimeError as e:
            error_msg = "dataset is need and should be initialized"
1069
            self.assertEqual(error_msg, str(e))
1070 1071 1072
        except Exception as e:
            self.assertTrue(False)

1073

X
xujiaqi01 已提交
1074
class TestDataset2(unittest.TestCase):
1075
    """TestCases for Dataset."""
X
xujiaqi01 已提交
1076 1077

    def setUp(self):
1078
        """TestCases for Dataset."""
X
xujiaqi01 已提交
1079 1080 1081 1082 1083 1084 1085 1086
        self.use_data_loader = False
        self.epoch_num = 10
        self.drop_last = False

    def test_dataset_fleet(self):
        """
        Testcase for InMemoryDataset from create to run.
        """
1087
        temp_dir = tempfile.TemporaryDirectory()
1088 1089 1090 1091 1092 1093
        filename1 = os.path.join(
            temp_dir.name, "test_in_memory_dataset2_run_a.txt"
        )
        filename2 = os.path.join(
            temp_dir.name, "test_in_memory_dataset2_run_b.txt"
        )
1094 1095 1096

        self.skipTest("parameter server will add pslib UT later")

1097
        with open(filename1, "w") as f:
X
xujiaqi01 已提交
1098 1099 1100 1101
            data = "1 1 2 3 3 4 5 5 5 5 1 1\n"
            data += "1 2 2 3 4 4 6 6 6 6 1 2\n"
            data += "1 3 2 3 5 4 7 7 7 7 1 3\n"
            f.write(data)
1102
        with open(filename2, "w") as f:
X
xujiaqi01 已提交
1103 1104 1105 1106 1107 1108 1109 1110 1111
            data = "1 4 2 3 3 4 5 5 5 5 1 4\n"
            data += "1 5 2 3 4 4 6 6 6 6 1 5\n"
            data += "1 6 2 3 5 4 7 7 7 7 1 6\n"
            data += "1 7 2 3 6 4 8 8 8 8 1 7\n"
            f.write(data)

        train_program = fluid.Program()
        startup_program = fluid.Program()
        scope = fluid.Scope()
1112 1113 1114 1115
        from paddle.fluid.incubate.fleet.parameter_server.distribute_transpiler import (
            fleet,
        )

X
xujiaqi01 已提交
1116 1117 1118 1119
        with fluid.program_guard(train_program, startup_program):
            slots = ["slot1_ff", "slot2_ff", "slot3_ff", "slot4_ff"]
            slots_vars = []
            for slot in slots:
G
GGBond8488 已提交
1120 1121
                var = paddle.static.data(
                    name=slot, shape=[-1, 1], dtype="float32", lod_level=1
1122
                )
X
xujiaqi01 已提交
1123
                slots_vars.append(var)
1124
            fake_cost = paddle.subtract(slots_vars[0], slots_vars[-1])
1125
            fake_cost = paddle.mean(fake_cost)
X
xujiaqi01 已提交
1126 1127 1128 1129
        with fluid.scope_guard(scope):
            place = fluid.CPUPlace()
            exe = fluid.Executor(place)
            try:
X
xujiaqi01 已提交
1130
                fleet.init()
X
xujiaqi01 已提交
1131 1132 1133 1134 1135 1136 1137 1138 1139 1140 1141
            except ImportError as e:
                print("warning: no mpi4py")
            adam = fluid.optimizer.Adam(learning_rate=0.000005)
            try:
                adam = fleet.distributed_optimizer(adam)
                adam.minimize([fake_cost], [scope])
            except AttributeError as e:
                print("warning: no mpi")
            except ImportError as e:
                print("warning: no mpi4py")
            exe.run(startup_program)
1142 1143
            dataset = paddle.distributed.InMemoryDataset()

1144 1145 1146 1147 1148 1149
            dataset.init(
                batch_size=32,
                thread_num=3,
                pipe_command="cat",
                use_var=slots_vars,
            )
1150
            dataset.set_filelist([filename1, filename2])
X
xujiaqi01 已提交
1151 1152 1153 1154
            dataset.load_into_memory()
            fleet._opt_info = None
            fleet._fleet_ptr = None

1155
        temp_dir.cleanup()
X
xujiaqi01 已提交
1156 1157 1158 1159 1160

    def test_dataset_fleet2(self):
        """
        Testcase for InMemoryDataset from create to run.
        """
1161
        temp_dir = tempfile.TemporaryDirectory()
1162 1163 1164 1165 1166 1167
        filename1 = os.path.join(
            temp_dir.name, "test_in_memory_dataset2_run2_a.txt"
        )
        filename2 = os.path.join(
            temp_dir.name, "test_in_memory_dataset2_run2_b.txt"
        )
1168 1169

        with open(filename1, "w") as f:
X
xujiaqi01 已提交
1170 1171 1172 1173
            data = "1 1 2 3 3 4 5 5 5 5 1 1\n"
            data += "1 2 2 3 4 4 6 6 6 6 1 2\n"
            data += "1 3 2 3 5 4 7 7 7 7 1 3\n"
            f.write(data)
1174
        with open(filename2, "w") as f:
X
xujiaqi01 已提交
1175 1176 1177 1178 1179 1180 1181 1182 1183
            data = "1 4 2 3 3 4 5 5 5 5 1 4\n"
            data += "1 5 2 3 4 4 6 6 6 6 1 5\n"
            data += "1 6 2 3 5 4 7 7 7 7 1 6\n"
            data += "1 7 2 3 6 4 8 8 8 8 1 7\n"
            f.write(data)

        train_program = fluid.Program()
        startup_program = fluid.Program()
        scope = fluid.Scope()
1184
        from paddle.fluid.incubate.fleet.parameter_server.pslib import fleet
1185

X
xujiaqi01 已提交
1186 1187 1188 1189
        with fluid.program_guard(train_program, startup_program):
            slots = ["slot1_ff", "slot2_ff", "slot3_ff", "slot4_ff"]
            slots_vars = []
            for slot in slots:
G
GGBond8488 已提交
1190 1191
                var = paddle.static.data(
                    name=slot, shape=[-1, 1], dtype="float32", lod_level=1
1192
                )
X
xujiaqi01 已提交
1193
                slots_vars.append(var)
1194
            fake_cost = paddle.subtract(slots_vars[0], slots_vars[-1])
1195
            fake_cost = paddle.mean(fake_cost)
X
xujiaqi01 已提交
1196 1197 1198 1199
        with fluid.scope_guard(scope):
            place = fluid.CPUPlace()
            exe = fluid.Executor(place)
            try:
X
xujiaqi01 已提交
1200
                fleet.init()
X
xujiaqi01 已提交
1201 1202 1203 1204
            except ImportError as e:
                print("warning: no mpi4py")
            adam = fluid.optimizer.Adam(learning_rate=0.000005)
            try:
1205 1206 1207 1208 1209 1210 1211 1212 1213
                adam = fleet.distributed_optimizer(
                    adam,
                    strategy={
                        "fs_uri": "fs_uri_xxx",
                        "fs_user": "fs_user_xxx",
                        "fs_passwd": "fs_passwd_xxx",
                        "fs_hadoop_bin": "fs_hadoop_bin_xxx",
                    },
                )
X
xujiaqi01 已提交
1214 1215 1216 1217 1218 1219
                adam.minimize([fake_cost], [scope])
            except AttributeError as e:
                print("warning: no mpi")
            except ImportError as e:
                print("warning: no mpi4py")
            exe.run(startup_program)
1220
            dataset = paddle.distributed.InMemoryDataset()
1221 1222 1223 1224 1225 1226
            dataset.init(
                batch_size=32,
                thread_num=3,
                pipe_command="cat",
                use_var=slots_vars,
            )
1227
            dataset.set_filelist([filename1, filename2])
X
xujiaqi01 已提交
1228
            dataset.load_into_memory()
X
xujiaqi01 已提交
1229 1230 1231 1232
            try:
                dataset.global_shuffle(fleet)
            except:
                print("warning: catch expected error")
X
xujiaqi01 已提交
1233 1234
            fleet._opt_info = None
            fleet._fleet_ptr = None
1235 1236
            dataset = paddle.distributed.InMemoryDataset()
            dataset.init(fs_name="", fs_ugi="")
1237
            d = paddle.distributed.fleet.DatasetBase()
1238
            try:
1239
                dataset._set_feed_type("MultiSlotInMemoryDataFeed")
1240 1241 1242 1243 1244 1245 1246 1247 1248 1249 1250 1251 1252 1253 1254 1255 1256 1257 1258
            except:
                print("warning: catch expected error")
            dataset.thread_num = 0
            try:
                dataset._prepare_to_run()
            except:
                print("warning: catch expected error")
            try:
                dataset.preprocess_instance()
            except:
                print("warning: catch expected error")
            try:
                dataset.set_current_phase(1)
            except:
                print("warning: catch expected error")
            try:
                dataset.postprocess_instance()
            except:
                print("warning: catch expected error")
1259
            dataset._set_fleet_send_batch_size(1024)
1260 1261 1262 1263
            try:
                dataset.global_shuffle()
            except:
                print("warning: catch expected error")
1264
            # dataset.get_pv_data_size()
1265 1266
            dataset.get_memory_data_size()
            dataset.get_shuffle_data_size()
1267
            dataset = paddle.distributed.QueueDataset()
1268 1269 1270 1271 1272 1273 1274 1275
            try:
                dataset.local_shuffle()
            except:
                print("warning: catch expected error")
            try:
                dataset.global_shuffle()
            except:
                print("warning: catch expected error")
1276
            dataset = paddle.distributed.fleet.FileInstantDataset()
1277 1278 1279 1280 1281 1282 1283 1284
            try:
                dataset.local_shuffle()
            except:
                print("warning: catch expected error")
            try:
                dataset.global_shuffle()
            except:
                print("warning: catch expected error")
X
xujiaqi01 已提交
1285

1286
        temp_dir.cleanup()
X
xujiaqi01 已提交
1287

1288 1289 1290 1291
    def test_bosps_dataset_fleet2(self):
        """
        Testcase for InMemoryDataset from create to run.
        """
1292
        temp_dir = tempfile.TemporaryDirectory()
1293 1294 1295 1296 1297 1298
        filename1 = os.path.join(
            temp_dir.name, "test_in_memory_dataset2_run2_a.txt"
        )
        filename2 = os.path.join(
            temp_dir.name, "test_in_memory_dataset2_run2_b.txt"
        )
1299 1300

        with open(filename1, "w") as f:
1301 1302 1303 1304
            data = "1 1 2 3 3 4 5 5 5 5 1 1\n"
            data += "1 2 2 3 4 4 6 6 6 6 1 2\n"
            data += "1 3 2 3 5 4 7 7 7 7 1 3\n"
            f.write(data)
1305
        with open(filename2, "w") as f:
1306 1307 1308 1309 1310 1311 1312 1313 1314 1315
            data = "1 4 2 3 3 4 5 5 5 5 1 4\n"
            data += "1 5 2 3 4 4 6 6 6 6 1 5\n"
            data += "1 6 2 3 5 4 7 7 7 7 1 6\n"
            data += "1 7 2 3 6 4 8 8 8 8 1 7\n"
            f.write(data)

        train_program = fluid.Program()
        startup_program = fluid.Program()
        scope = fluid.Scope()
        from paddle.fluid.incubate.fleet.parameter_server.pslib import fleet
1316

1317 1318 1319 1320
        with fluid.program_guard(train_program, startup_program):
            slots = ["slot1_ff", "slot2_ff", "slot3_ff", "slot4_ff"]
            slots_vars = []
            for slot in slots:
G
GGBond8488 已提交
1321 1322
                var = paddle.static.data(
                    name=slot, shape=[-1, 1], dtype="float32", lod_level=1
1323
                )
1324
                slots_vars.append(var)
1325
            fake_cost = paddle.subtract(slots_vars[0], slots_vars[-1])
1326
            fake_cost = paddle.mean(fake_cost)
1327 1328 1329 1330 1331 1332 1333 1334 1335
        with fluid.scope_guard(scope):
            place = fluid.CPUPlace()
            exe = fluid.Executor(place)
            try:
                fleet.init()
            except ImportError as e:
                print("warning: no mpi4py")
            adam = fluid.optimizer.Adam(learning_rate=0.000005)
            try:
1336 1337 1338 1339 1340 1341 1342 1343 1344
                adam = fleet.distributed_optimizer(
                    adam,
                    strategy={
                        "fs_uri": "fs_uri_xxx",
                        "fs_user": "fs_user_xxx",
                        "fs_passwd": "fs_passwd_xxx",
                        "fs_hadoop_bin": "fs_hadoop_bin_xxx",
                    },
                )
1345 1346 1347 1348 1349 1350 1351
                adam.minimize([fake_cost], [scope])
            except AttributeError as e:
                print("warning: no mpi")
            except ImportError as e:
                print("warning: no mpi4py")
            exe.run(startup_program)
            dataset = paddle.distributed.fleet.BoxPSDataset()
1352 1353 1354 1355 1356 1357
            dataset.init(
                batch_size=32,
                thread_num=3,
                pipe_command="cat",
                use_var=slots_vars,
            )
1358
            dataset.set_filelist([filename1, filename2])
1359 1360 1361 1362 1363 1364 1365 1366
            dataset.load_into_memory()
            try:
                dataset.global_shuffle(fleet)
            except:
                print("warning: catch expected error")
            fleet._opt_info = None
            fleet._fleet_ptr = None
            dataset = paddle.distributed.fleet.BoxPSDataset()
1367 1368 1369 1370 1371 1372 1373 1374 1375 1376
            dataset.init(
                rank_offset="",
                pv_batch_size=1,
                fs_name="",
                fs_ugi="",
                data_feed_type="MultiSlotInMemoryDataFeed",
                parse_logkey=True,
                merge_by_sid=True,
                enable_pv_merge=True,
            )
1377 1378 1379 1380 1381 1382 1383 1384 1385 1386 1387 1388 1389 1390 1391 1392 1393 1394 1395 1396 1397 1398 1399 1400 1401 1402 1403 1404 1405 1406
            d = paddle.distributed.fleet.DatasetBase()
            try:
                dataset._set_feed_type("MultiSlotInMemoryDataFeed")
            except:
                print("warning: catch expected error")
            dataset.thread_num = 0
            try:
                dataset._prepare_to_run()
            except:
                print("warning: catch expected error")
            dataset._set_parse_logkey(True)
            dataset._set_merge_by_sid(True)
            dataset._set_enable_pv_merge(True)
            try:
                dataset.preprocess_instance()
            except:
                print("warning: catch expected error")
            try:
                dataset.set_current_phase(1)
            except:
                print("warning: catch expected error")
            try:
                dataset.postprocess_instance()
            except:
                print("warning: catch expected error")
            dataset._set_fleet_send_batch_size(1024)
            try:
                dataset.global_shuffle()
            except:
                print("warning: catch expected error")
1407
            # dataset.get_pv_data_size()
1408 1409
            dataset.get_memory_data_size()
            dataset.get_shuffle_data_size()
1410
        temp_dir.cleanup()
1411

X
xujiaqi01 已提交
1412

X
xjqbest 已提交
1413
if __name__ == '__main__':
X
xjqbest 已提交
1414
    unittest.main()