fs.py 32.5 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import os
import sys
import subprocess
import multiprocessing
from datetime import datetime

import re
import copy
import errno
import time
import logging
import six
import abc
import paddle.fluid as fluid
G
gongweibao 已提交
29
from paddle.fluid import core
30 31 32 33
import functools

import shutil

34
__all__ = []
35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52


class ExecuteError(Exception):
    pass


class FSFileExistsError(Exception):
    pass


class FSFileNotExistsError(Exception):
    pass


class FSTimeOut(Exception):
    pass


G
gongweibao 已提交
53 54 55 56
class FSShellCmdAborted(ExecuteError):
    pass


57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98
class FS(object):
    @abc.abstractmethod
    def ls_dir(self, fs_path):
        raise NotImplementedError

    @abc.abstractmethod
    def is_file(self, fs_path):
        raise NotImplementedError

    @abc.abstractmethod
    def is_dir(self, fs_path):
        raise NotImplementedError

    @abc.abstractmethod
    def is_exist(self, fs_path):
        raise NotImplementedError

    @abc.abstractmethod
    def upload(self, local_path, fs_path):
        raise NotImplementedError

    @abc.abstractmethod
    def download(self, fs_path, local_path):
        raise NotImplementedError

    @abc.abstractmethod
    def mkdirs(self, fs_path):
        raise NotImplementedError

    @abc.abstractmethod
    def delete(self, fs_path):
        raise NotImplementedError

    @abc.abstractmethod
    def need_upload_download(self):
        raise NotImplementedError

    @abc.abstractmethod
    def rename(self, fs_src_path, fs_dst_path):
        raise NotImplementedError

    @abc.abstractmethod
G
gongweibao 已提交
99
    def mv(self, fs_src_path, fs_dst_path, overwrite=False, test_exists=False):
100 101 102 103 104 105 106
        raise NotImplementedError

    @abc.abstractmethod
    def upload_dir(self, local_dir, dest_dir):
        raise NotImplementedError

    @abc.abstractmethod
G
gongweibao 已提交
107
    def list_dirs(self, fs_path):
108 109 110
        raise NotImplementedError

    @abc.abstractmethod
G
gongweibao 已提交
111
    def touch(self, fs_path, exist_ok=True):
112 113
        raise NotImplementedError

114 115 116 117
    @abc.abstractmethod
    def cat(self, fs_path=None):
        raise NotImplementedError

118 119

class LocalFS(FS):
120 121 122 123 124 125
    """
    A tool of local file system.

    Examples:
        .. code-block:: python

126
            from paddle.distributed.fleet.utils import LocalFS
127 128 129 130 131

            client = LocalFS()
            subdirs, files = client.ls_dir("./")
    """

132
    def ls_dir(self, fs_path):
133 134 135 136 137 138 139 140 141 142 143 144 145
        """	
        List directorys and files under `fs_path` .

        Args:
            fs_path(str): The local file path.

        Returns:
            Tuple: Return a 2-tuple, the first is a list of all its subdirectories, 
            and the second is a list of all its subfiles, e.g. ([subdirname1, subdirname1, ...], [filename1, filename2, ...]).

        Examples:
            .. code-block:: python

146
                from paddle.distributed.fleet.utils import LocalFS
147 148 149 150

                client = LocalFS()
                subdirs, files = client.ls_dir("./")
        """
151 152 153 154 155 156 157 158 159 160 161 162 163 164
        if not self.is_exist(fs_path):
            return [], []

        dirs = []
        files = []
        for f in os.listdir(fs_path):
            if os.path.isdir(fs_path + "/" + f):
                dirs.append(f)
            else:
                files.append(f)

        return dirs, files

    def mkdirs(self, fs_path):
165
        """
166
        Create a local directory.
167 168 169 170 171 172 173

        Args:
            fs_path(str): The local directory path.

        Examples:
            .. code-block:: python

174
                from paddle.distributed.fleet.utils import LocalFS
175 176 177 178 179

                client = LocalFS()
                client.mkdirs("test_mkdirs")
                client.delete("test_mkdirs")
        """
180 181 182 183
        assert not os.path.isfile(fs_path), "{} is already a file".format(
            fs_path)
        os.system("mkdir -p {}".format(fs_path))

G
gongweibao 已提交
184
    def rename(self, fs_src_path, fs_dst_path):
185 186 187 188 189 190 191 192 193 194
        """
        Rename the file.

        Args:
            fs_src_path(str): The actual name of the file or directory
            fs_dst_path(str): The new name of the file or directory.

        Examples:
            .. code-block:: python

195
                from paddle.distributed.fleet.utils import LocalFS
196 197 198 199 200 201 202 203 204

                client = LocalFS()
                client.touch("test_rename_src")
                print(client.is_exists("test_rename_src")) # True
                client.rename("test_rename_src", "test_rename_dst")
                print(client.is_exists("test_rename_src")) # False
                print(client.is_exists("test_rename_dst")) # True
                client.delete("test_rename_dst")
        """
G
gongweibao 已提交
205
        os.rename(fs_src_path, fs_dst_path)
206 207 208 209 210 211 212 213

    def _rmr(self, fs_path):
        shutil.rmtree(fs_path)

    def _rm(self, fs_path):
        os.remove(fs_path)

    def delete(self, fs_path):
214 215 216 217 218 219 220 221 222
        """
        Delete the local file path, whether it's a file or directory.

        Args:
            fs_path(str): The local file path.

        Examples:
            .. code-block:: python

223
                from paddle.distributed.fleet.utils import LocalFS
224 225 226 227 228

                client = LocalFS()
                client.mkdirs("test_localFS_mkdirs")
                client.delete("test_localFS_mkdirs")
        """
229 230 231 232 233 234 235 236 237 238 239
        if not self.is_exist(fs_path):
            return

        if os.path.isfile(fs_path):
            return self._rm(fs_path)

        return self._rmr(fs_path)

    def need_upload_download(self):
        return False

G
gongweibao 已提交
240
    def is_file(self, fs_path):
241 242 243 244 245 246 247 248 249 250 251 252
        """
        Whether the local file path is a file.

        Args:
            fs_path(str): The local file path.

        Returns:
            Bool: Return true if the path exists and it's a file, otherwise return false.

        Examples:
            .. code-block:: python

253
                from paddle.distributed.fleet.utils import LocalFS
254 255 256 257 258 259

                client = LocalFS()
                client.touch("test_is_file")
                print(client.is_file("test_is_file")) # True
                client.delete("test_is_file")
        """
G
gongweibao 已提交
260 261 262
        return os.path.isfile(fs_path)

    def is_dir(self, fs_path):
263 264 265 266 267 268 269 270 271 272 273 274
        """
        Whether the local file path is a directory.

        Args:
            fs_path(str): The local file path.

        Returns:
            Bool: Return true if the path exists and it's a directory, otherwise return false.

        Examples:
            .. code-block:: python

275
                from paddle.distributed.fleet.utils import LocalFS
276 277 278 279 280 281

                client = LocalFS()
                client.mkdirs("test_is_dir")
                print(client.is_dir("test_is_file")) # True
                client.delete("test_is_dir")
        """
G
gongweibao 已提交
282 283 284
        return os.path.isdir(fs_path)

    def is_exist(self, fs_path):
285 286 287 288 289 290 291 292 293 294 295 296 297
        """
        Whether the local file path exists.

        Args:
            fs_path(str): The local file path.

        Returns:
            Bool: Wheter it's a file or directory, return true if the path exists, 
            otherwise return false.

        Examples:
            .. code-block:: python

298
                from paddle.distributed.fleet.utils import LocalFS
299 300 301 302

                client = LocalFS()
                ret = local_fs.is_exist("test_is_exist")
        """
G
gongweibao 已提交
303 304 305
        return os.path.exists(fs_path)

    def touch(self, fs_path, exist_ok=True):
306 307 308 309 310 311 312 313 314 315 316
        """
        Create a local file.

        Args:
            fs_path(str): The local file path.
            exist_ok(bool): When `fs_path` exists, if `exist_ok` is set false,
            program will throw an Exception. Default is true.

        Examples:
            .. code-block:: python

317
                from paddle.distributed.fleet.utils import LocalFS
318 319 320 321 322

                client = LocalFS()
                client.touch("test_touch")
                client.delete("test_touch")
        """
G
gongweibao 已提交
323 324 325 326 327
        if self.is_exist(fs_path):
            if exist_ok:
                return
            raise FSFileExistsError

1
123malin 已提交
328
        os.system("touch {}".format(fs_path))
329

G
gongweibao 已提交
330
    def mv(self, src_path, dst_path, overwrite=False, test_exists=False):
331 332 333 334 335 336 337 338 339 340 341
        """
        Move a local file or directory from `src_path` to `dst_path` .

        Args:
            src_path(str):  Name of the file or directory, that's needed to be moved.
            dst_path(str):  Name of the file or directory to which to move to.
            overwrite(bool): Whether to re-write `dst_path` if that exists. Default is False.

        Examples:
            .. code-block:: python

342
                from paddle.distributed.fleet.utils import LocalFS
343 344 345 346 347 348

                client = LocalFS()
                client.touch("test_mv_src")
                client.mv("test_mv_src", "test_mv_dst")
                client.delete("test_mv_dst")
        """
349 350 351
        if not self.is_exist(src_path):
            raise FSFileNotExistsError

G
gongweibao 已提交
352 353 354
        if overwrite and self.is_exist(dst_path):
            self.delete(dst_path)

355 356 357 358 359
        if self.is_exist(dst_path):
            raise FSFileExistsError

        return self.rename(src_path, dst_path)

G
gongweibao 已提交
360 361
    def list_dirs(self, fs_path):
        """	
362 363 364 365 366 367 368 369 370 371 372
        Only list directorys under `fs_path` .

        Args:
            fs_path(str): The local file path.

        Returns:
            List: A list of all its subdirectories, e.g. [subdirname1, subdirname1, ...].

        Examples:
            .. code-block:: python

373
                from paddle.distributed.fleet.utils import LocalFS
374 375 376

                client = LocalFS()
                subdirs = client.list_dirs("./")
G
gongweibao 已提交
377 378 379 380 381 382 383 384 385 386
        """
        if not self.is_exist(fs_path):
            return []

        dirs = [
            f for f in os.listdir(fs_path) if os.path.isdir(fs_path + "/" + f)
        ]

        return dirs

387

G
gongweibao 已提交
388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404
def _handle_errors(max_time_out=None):
    def decorator(f):
        @functools.wraps(f)
        def handler(*args, **kwargs):
            o = args[0]
            time_out = max_time_out
            if time_out is None:
                time_out = float(o._time_out) / 1000.0
            else:
                time_out /= 1000.0
            inter = float(o._sleep_inter) / 1000.0

            start = time.time()
            last_print_time = start
            while True:
                try:
                    return f(*args, **kwargs)
405
                # important: only ExecuteError need to retry
G
gongweibao 已提交
406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422
                except ExecuteError as e:
                    if time.time() - start >= time_out:
                        raise FSTimeOut("args:{} timeout:{}".format(
                            args, time.time() - start))

                    time.sleep(inter)

                if time.time() - last_print_time > 30:
                    print("hadoop operator timeout:args:{} timeout:{}".format(
                        args, time.time() - start))
                    last_print_time = time.time()

        return handler

    return decorator


423
class HDFSClient(FS):
424 425 426 427 428 429 430 431 432 433 434 435
    """
    A tool of HDFS.

    Args:
        hadoop_home(str): Hadoop home. 
        configs(dict): Hadoop config. It is a dictionary and needs to contain the
            keys: "fs.default.name" and "hadoop.job.ugi".

    Examples:

        .. code-block:: text

436
            from paddle.distributed.fleet.utils import HDFSClient
437 438 439 440 441 442 443 444 445 446 447
            hadoop_home = "/home/client/hadoop-client/hadoop/"

            configs = {
                "fs.default.name": "hdfs://xxx.hadoop.com:54310",
                "hadoop.job.ugi": "hello,hello123"
            }

            client = HDFSClient(hadoop_home, configs)
            client.ls_dir("hdfs:/test_hdfs_client")
    """

448 449 450 451
    def __init__(
            self,
            hadoop_home,
            configs,
452 453
            time_out=5 * 60 * 1000,  # ms
            sleep_inter=1000):  # ms
454 455 456 457 458 459 460 461
        self.pre_commands = []
        hadoop_bin = '%s/bin/hadoop' % hadoop_home
        self.pre_commands.append(hadoop_bin)
        dfs = 'fs'
        self.pre_commands.append(dfs)

        if configs:
            for k, v in six.iteritems(configs):
G
gongweibao 已提交
462 463
                config_command = '-D%s=%s' % (k, v)
                self.pre_commands.append(config_command)
464 465 466 467 468 469 470 471

        self._time_out = time_out
        self._sleep_inter = sleep_inter
        self._base_cmd = " ".join(self.pre_commands)
        self._bd_err_re = re.compile(
            r'\s?responseErrorMsg\s?\:.*, errorCode\:\s?[0-9]+, path\:')

    def _run_cmd(self, cmd, redirect_stderr=False):
G
gongweibao 已提交
472 473 474 475 476 477 478 479 480
        exe_cmd = "{} -{}".format(self._base_cmd, cmd)
        ret, output = core.shell_execute_cmd(exe_cmd, 0, 0, redirect_stderr)
        ret = int(ret)
        if ret == 134:
            raise FSShellCmdAborted(cmd)
        return ret, output.splitlines()

    @_handle_errors()
    def list_dirs(self, fs_path):
481 482 483 484 485 486 487 488 489 490 491 492 493
        """	
        Only list directorys under `fs_path` .

        Args:
            fs_path(str): The HDFS file path.

        Returns:
            List: A list of all its subdirectories, e.g. [subdirname1, subdirname1, ...].

        Examples:

            .. code-block:: text

494
                from paddle.distributed.fleet.utils import HDFSClient
495 496 497 498 499 500 501 502 503 504

                hadoop_home = "/home/client/hadoop-client/hadoop/"
                configs = {
                    "fs.default.name": "hdfs://xxx.hadoop.com:54310",
                    "hadoop.job.ugi": "hello,hello123"
                }

                client = HDFSClient(hadoop_home, configs)
                subdirs = client.list_dirs("hdfs:/test_hdfs_client")
        """
G
gongweibao 已提交
505 506
        if not self.is_exist(fs_path):
            return []
507

G
gongweibao 已提交
508 509 510 511
        dirs, files = self._ls_dir(fs_path)
        return dirs

    @_handle_errors()
512 513
    def ls_dir(self, fs_path):
        """	
514 515 516 517 518 519 520 521 522 523 524 525 526
        List directorys and files under `fs_path` .

        Args:
            fs_path(str): The HDFS file path.

        Returns:
            Tuple: Return a 2-tuple, the first element is the list of all its subdirectories, 
            and the second one is the list of all its subfiles, e.g. ([subdirname1, subdirname1, ...], [filename1, filename2, ...]).

        Examples:

            .. code-block:: text

527
                from paddle.distributed.fleet.utils import HDFSClient
528 529 530 531 532 533 534 535 536

                hadoop_home = "/home/client/hadoop-client/hadoop/"
                configs = {
                    "fs.default.name": "hdfs://xxx.hadoop.com:54310",
                    "hadoop.job.ugi": "hello,hello123"
                }

                client = HDFSClient(hadoop_home, configs)
                subdirs, files = client.ls_dir("hdfs:/test_hdfs_client")
537 538 539 540
        """
        if not self.is_exist(fs_path):
            return [], []

G
gongweibao 已提交
541 542 543 544
        return self._ls_dir(fs_path)

    def _ls_dir(self, fs_path):
        cmd = "ls {}".format(fs_path)
545 546 547
        ret, lines = self._run_cmd(cmd)

        if ret != 0:
G
gongweibao 已提交
548
            raise ExecuteError(cmd)
549 550 551 552 553 554 555 556

        dirs = []
        files = []
        for line in lines:
            arr = line.split()
            if len(arr) != 8:
                continue

1
123malin 已提交
557
            p = os.path.basename(arr[7])
558
            if arr[0][0] == 'd':
1
123malin 已提交
559
                dirs.append(p)
560
            else:
1
123malin 已提交
561
                files.append(p)
562 563 564 565 566 567 568 569 570 571 572

        return dirs, files

    def _test_match(self, lines):
        for l in lines:
            m = self._bd_err_re.match(l)
            if m != None:
                return m

        return None

G
gongweibao 已提交
573
    @_handle_errors()
574
    def is_dir(self, fs_path):
575 576 577 578 579 580 581 582 583 584 585 586 587
        """
        Whether the remote HDFS path is a directory.

        Args:
            fs_path(str): The HDFS file path.

        Returns:
            Bool: Return true if the path exists and it's a directory, otherwise return false.

        Examples:

            .. code-block:: text

588
                from paddle.distributed.fleet.utils import HDFSClient
589 590 591 592 593 594 595 596 597 598

                hadoop_home = "/home/client/hadoop-client/hadoop/"
                configs = {
                    "fs.default.name": "hdfs://xxx.hadoop.com:54310",
                    "hadoop.job.ugi": "hello,hello123"
                }

                client = HDFSClient(hadoop_home, configs)
                ret = client.is_file("hdfs:/test_hdfs_client")
        """
599 600 601
        if not self.is_exist(fs_path):
            return False

G
gongweibao 已提交
602 603 604 605
        return self._is_dir(fs_path)

    def _is_dir(self, fs_path):
        cmd = "test -d {}".format(fs_path, redirect_stderr=True)
606 607 608
        ret, lines = self._run_cmd(cmd)
        if ret:
            # other error
G
gongweibao 已提交
609 610
            if self._test_match(lines):
                raise ExecuteError(cmd)
611 612 613 614 615 616

            return False

        return True

    def is_file(self, fs_path):
617 618 619 620 621 622 623 624 625 626 627 628 629
        """
        Whether the remote HDFS path is a file.

        Args:
            fs_path(str): The HDFS file path.

        Returns:
            Bool: Return true if the path exists and it's a file, otherwise return false.

        Examples:

            .. code-block:: text

630
                from paddle.distributed.fleet.utils import HDFSClient
631 632 633 634 635 636 637 638 639 640

                hadoop_home = "/home/client/hadoop-client/hadoop/"
                configs = {
                    "fs.default.name": "hdfs://xxx.hadoop.com:54310",
                    "hadoop.job.ugi": "hello,hello123"
                }

                client = HDFSClient(hadoop_home, configs)
                ret = client.is_file("hdfs:/test_hdfs_client")
        """
641 642 643
        if not self.is_exist(fs_path):
            return False

G
gongweibao 已提交
644
        return not self._is_dir(fs_path)
645

G
gongweibao 已提交
646
    @_handle_errors()
647
    def is_exist(self, fs_path):
648 649 650 651 652 653 654 655 656 657 658 659 660 661
        """
        Whether the remote HDFS path exists.

        Args:
            fs_path(str): The hdfs file path.

        Returns:
            Bool: Whether it's is file or directory, return true if the path exists,
            otherwise return false.

        Examples:

            .. code-block:: text

662
                from paddle.distributed.fleet.utils import HDFSClient
663 664 665 666 667 668 669 670 671 672

                hadoop_home = "/home/client/hadoop-client/hadoop/"
                configs = {
                    "fs.default.name": "hdfs://xxx.hadoop.com:54310",
                    "hadoop.job.ugi": "hello,hello123"
                }

                client = HDFSClient(hadoop_home, configs)
                ret = client.is_exist("hdfs:/test_hdfs_client")
        """
G
gongweibao 已提交
673
        cmd = "ls {} ".format(fs_path)
674 675 676 677 678
        ret, out = self._run_cmd(cmd, redirect_stderr=True)
        if ret != 0:
            for l in out:
                if "No such file or directory" in l:
                    return False
G
gongweibao 已提交
679
            raise ExecuteError(cmd)
680 681 682

        return True

683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701
    def upload_dir(self, local_dir, dest_dir, overwrite=False):
        """
        upload dir to hdfs
        Args:
            local_dir(str): local dir
            dest_dir(str): hdfs dest dir
            overwrite(bool): is overwrite
        Returns:
            return code
        """
        local_dir = local_dir.rstrip("/")
        dest_dir = dest_dir.rstrip("/")
        local_basename = os.path.basename(local_dir)
        if self.is_exist(dest_dir + "/" + local_basename) and overwrite:
            self.delete(dest_dir + "/" + local_basename)
        if not self.is_exist(dest_dir):
            self.mkdirs(dest_dir)
        self._try_upload(local_dir, dest_dir)

G
gongweibao 已提交
702
    # can't retry
703
    def upload(self, local_path, fs_path, multi_processes=1, overwrite=False):
704 705 706 707 708 709
        """
        Upload the local path to remote HDFS.

        Args:
            local_path(str): The local path.
            fs_path(str): The HDFS path.
710 711
            multi_processes(int|1): the upload data process at the same time, default=5
            overwrite(bool|False): will overwrite file on HDFS or not
712 713 714 715 716

        Examples:

            .. code-block:: text

717
                from paddle.distributed.fleet.utils import HDFSClient
718 719 720 721 722 723 724 725 726 727

                hadoop_home = "/home/client/hadoop-client/hadoop/"
                configs = {
                    "fs.default.name": "hdfs://xxx.hadoop.com:54310",
                    "hadoop.job.ugi": "hello,hello123"
                }

                client = HDFSClient(hadoop_home, configs)
                client.upload("test_hdfs_client", "hdfs:/test_hdfs_client")
        """
728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752

        def __subprocess_upload(hdfs_path_single, datas):
            for data in datas:
                self._try_upload(data, hdfs_path_single)

        def get_local_files(path):
            """
            get local files
            Args:
                path(str): local path
            Returns:
                list of local files
            """
            rlist = []

            if not os.path.exists(path):
                return rlist

            if os.path.isdir(path):
                for file in os.listdir(path):
                    t = os.path.join(path, file)
                    rlist.append(t)
            else:
                rlist.append(path)
            return rlist
753 754 755

        local = LocalFS()
        if not local.is_exist(local_path):
G
gongweibao 已提交
756
            raise FSFileNotExistsError("{} not exists".format(local_path))
757 758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777
        # upload_dir
        if local.is_dir(local_path):
            self.upload_dir(local_path, fs_path, overwrite=overwrite)
            return
        # upload files
        all_files = get_local_files(local_path)
        if not all_files:
            print("there are nothing need to upload, function exit")
            return

        if self.is_exist(fs_path) and overwrite:
            self.delete(fs_path)
            self.mkdirs(fs_path)

        procs = []
        for i in range(multi_processes):
            process_datas = self._split_files(all_files, i, multi_processes)
            p = multiprocessing.Process(
                target=__subprocess_upload, args=(fs_path, process_datas))
            procs.append(p)
            p.start()
G
gongweibao 已提交
778

779 780 781
        # complete the processes
        for proc in procs:
            proc.join()
G
gongweibao 已提交
782 783 784 785 786 787

    @_handle_errors()
    def _try_upload(self, local_path, fs_path):
        cmd = "put {} {}".format(local_path, fs_path)
        ret = 0
        try:
788
            ret, _ = self._run_cmd(cmd)
G
gongweibao 已提交
789 790 791 792 793 794 795
            if ret != 0:
                raise ExecuteError(cmd)
        except Exception as e:
            self.delete(fs_path)
            raise e

    # can't retry
796
    def download(self, fs_path, local_path, multi_processes=1, overwrite=False):
797 798 799 800 801 802
        """
        Download remote HDFS path to the local.

        Args:
            fs_path(str):  The HDFS path.
            local_path(str): The local path.
803 804
            multi_processes(int|1): the download data process at the same time, default=1
            overwrite(bool): is overwrite
805 806 807 808 809

        Examples:

            .. code-block:: text

810
                from paddle.distributed.fleet.utils import HDFSClient
811 812 813 814 815 816 817 818 819 820

                hadoop_home = "/home/client/hadoop-client/hadoop/"
                configs = {
                    "fs.default.name": "hdfs://xxx.hadoop.com:54310",
                    "hadoop.job.ugi": "hello,hello123"
                }

                client = HDFSClient(hadoop_home, configs)
                client.download("hdfs:/test_hdfs_client", "./")
        """
821 822 823 824 825 826 827 828 829 830 831

        def __subprocess_download(local_path, datas):
            """
            download file from HDFS
            Args:
                local_path(str): the local file path
                datas(str): the hdfs file path list
            """
            for data in datas:
                self._try_download(data, local_path)

832
        if not self.is_exist(fs_path):
G
gongweibao 已提交
833
            raise FSFileNotExistsError("{} not exits".format(fs_path))
834 835 836 837 838 839 840 841 842 843 844 845 846 847 848 849 850
        # download file
        if self.is_file(fs_path):
            return self._try_download(fs_path, local_path)
        # download dir
        _, all_files = self.ls_dir(fs_path)

        procs = []
        for i in range(multi_processes):
            process_datas = self._split_files(all_files, i, multi_processes)
            p = multiprocessing.Process(
                target=__subprocess_download, args=(local_path, process_datas))
            procs.append(p)
            p.start()

        # complete the processes
        for proc in procs:
            proc.join()
G
gongweibao 已提交
851 852 853 854 855 856

    @_handle_errors()
    def _try_download(self, fs_path, local_path):
        cmd = "get {} {}".format(fs_path, local_path)
        ret = 0
        try:
857
            ret, _ = self._run_cmd(cmd)
G
gongweibao 已提交
858 859 860 861 862 863 864 865
            if ret != 0:
                raise ExecuteError(cmd)
        except Exception as e:
            local_fs = LocalFS()
            local_fs.delete(local_path)
            raise e

    @_handle_errors()
866
    def mkdirs(self, fs_path):
867 868 869 870 871 872 873 874 875 876
        """
        Create a remote HDFS directory.

        Args:
            fs_path(str): The HDFS directory path.

        Examples:

            .. code-block:: text

877
                from paddle.distributed.fleet.utils import HDFSClient
878 879 880 881 882 883 884 885 886 887

                hadoop_home = "/home/client/hadoop-client/hadoop/"
                configs = {
                    "fs.default.name": "hdfs://xxx.hadoop.com:54310",
                    "hadoop.job.ugi": "hello,hello123"
                }

                client = HDFSClient(hadoop_home, configs)
                client.mkdirs("hdfs:/test_hdfs_client")
        """
888 889 890
        if self.is_exist(fs_path):
            return

G
gongweibao 已提交
891 892 893 894
        out_hdfs = False

        cmd = "mkdir {} ".format(fs_path)
        ret, out = self._run_cmd(cmd, redirect_stderr=True)
895
        if ret != 0:
G
gongweibao 已提交
896 897 898 899 900 901 902 903 904
            for l in out:
                if "No such file or directory" in l:
                    out_hdfs = True
                    break
            if not out_hdfs:
                raise ExecuteError(cmd)

        if out_hdfs and not self.is_exist(fs_path):
            cmd = "mkdir -p {}".format(fs_path)
905
            ret, _ = self._run_cmd(cmd)
G
gongweibao 已提交
906 907 908 909
            if ret != 0:
                raise ExecuteError(cmd)

    def mv(self, fs_src_path, fs_dst_path, overwrite=False, test_exists=True):
910 911 912 913 914 915 916 917 918 919 920 921 922
        """
        Move a remote HDFS file or directory from `fs_src_path` to `fs_dst_path` .

        Args:
            fs_src_path(str):  Name of the file or directory, that's needed to be moved.
            fs_dst_path(str):  Name of the file or directory to which to move to.
            overwrite(bool): Whether to re-write `fs_dst_path` if that exists. Default is False.
            test_exists(bool): Check the existence of `fs_src_path` and `fs_dst_path` . When `test_exists` is set true, if `fs_src_path` doesn't exist or `fs_dst_path` exists, program will throw an Excetption. 

        Examples:

            .. code-block:: text

923
                from paddle.distributed.fleet.utils import HDFSClient
924 925 926 927 928 929 930 931 932 933

                hadoop_home = "/home/client/hadoop-client/hadoop/"
                configs = {
                    "fs.default.name": "hdfs://xxx.hadoop.com:54310",
                    "hadoop.job.ugi": "hello,hello123"
                }

                client = HDFSClient(hadoop_home, configs)
                client.mv("hdfs:/test_hdfs_client", "hdfs:/test_hdfs_client2")
        """
G
gongweibao 已提交
934 935
        if overwrite and self.is_exist(fs_dst_path):
            self.delete(fs_dst_path)
936 937 938

        if test_exists:
            if not self.is_exist(fs_src_path):
G
gongweibao 已提交
939 940
                raise FSFileNotExistsError("{} is not exists".format(
                    fs_src_path))
941 942

            if self.is_exist(fs_dst_path):
Z
zhangchunle 已提交
943
                raise FSFileExistsError("{} exists already".format(fs_dst_path))
G
gongweibao 已提交
944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959

        return self._try_mv(fs_src_path, fs_dst_path)

    @_handle_errors()
    def _try_mv(self, fs_src_path, fs_dst_path):
        cmd = "mv {} {}".format(fs_src_path, fs_dst_path)
        ret = 0
        try:
            ret, _ = self._run_cmd(cmd)
            if ret != 0:
                raise ExecuteError(cmd)
        except Exception as e:
            if not self.is_exist(fs_src_path) and \
                    self.is_exist(fs_dst_path):
                return
            raise e
960 961

    def _rmr(self, fs_path):
G
gongweibao 已提交
962
        cmd = "rmr {}".format(fs_path)
963 964
        ret, _ = self._run_cmd(cmd)
        if ret != 0:
G
gongweibao 已提交
965
            raise ExecuteError(cmd)
966 967

    def _rm(self, fs_path):
G
gongweibao 已提交
968
        cmd = "rm {}".format(fs_path)
969 970
        ret, _ = self._run_cmd(cmd)
        if ret != 0:
G
gongweibao 已提交
971
            raise ExecuteError(cmd)
972

G
gongweibao 已提交
973
    @_handle_errors()
974
    def delete(self, fs_path):
975 976 977 978 979 980 981 982 983 984
        """
        Delete a remote HDFS path, whether it's a file or directory.

        Args:
            fs_path(str): The HDFS file path.

        Examples:

            .. code-block:: text

985
                from paddle.distributed.fleet.utils import HDFSClient
986 987 988 989 990 991 992 993 994 995

                hadoop_home = "/home/client/hadoop-client/hadoop/"
                configs = {
                    "fs.default.name": "hdfs://xxx.hadoop.com:54310",
                    "hadoop.job.ugi": "hello,hello123"
                }

                client = HDFSClient(hadoop_home, configs)
                client.delete("hdfs:/test_hdfs_client")
        """
996 997 998
        if not self.is_exist(fs_path):
            return

G
gongweibao 已提交
999
        is_dir = self._is_dir(fs_path)
1000 1001 1002 1003 1004
        if is_dir:
            return self._rmr(fs_path)

        return self._rm(fs_path)

G
gongweibao 已提交
1005
    def touch(self, fs_path, exist_ok=True):
1006 1007 1008 1009 1010
        """
        Create a remote HDFS file.

        Args:
            fs_path(str): The HDFS file path.
1011 1012
            exist_ok(bool): When `fs_path` exists, if `exist_ok` is set false,
            program will throw an Exception. Default is true.
1013 1014 1015 1016 1017

        Examples:

            .. code-block:: text

1018
                from paddle.distributed.fleet.utils import HDFSClient
1019 1020 1021 1022 1023 1024 1025 1026 1027 1028

                hadoop_home = "/home/client/hadoop-client/hadoop/"
                configs = {
                    "fs.default.name": "hdfs://xxx.hadoop.com:54310",
                    "hadoop.job.ugi": "hello,hello123"
                }

                client = HDFSClient(hadoop_home, configs)
                client.touch("hdfs:/test_hdfs_client")
        """
G
gongweibao 已提交
1029 1030 1031 1032 1033 1034 1035 1036 1037 1038 1039 1040
        if self.is_exist(fs_path):
            if exist_ok:
                return
            raise FSFileExistsError

        return self._touchz(fs_path)

    @_handle_errors()
    def _touchz(self, fs_path):
        cmd = "touchz {}".format(fs_path)
        ret, _ = self._run_cmd(cmd)
        if ret != 0:
1041
            raise ExecuteError(cmd)
G
gongweibao 已提交
1042

1043 1044
    def need_upload_download(self):
        return True
1045 1046 1047 1048 1049 1050 1051 1052 1053 1054 1055 1056 1057 1058 1059 1060 1061 1062 1063 1064 1065 1066 1067 1068 1069 1070 1071 1072 1073 1074 1075 1076 1077 1078 1079 1080 1081 1082 1083 1084 1085 1086 1087 1088 1089 1090 1091 1092 1093 1094 1095 1096 1097 1098 1099 1100 1101 1102 1103 1104 1105 1106 1107 1108

    def cat(self, fs_path=None):
        """
        Cat a remote HDFS file.

        Args:
            fs_path(str): The HDFS file path.

        Returns:
            file content

        Examples:

            .. code-block:: text

                from paddle.distributed.fleet.utils import HDFSClient

                hadoop_home = "/home/client/hadoop-client/hadoop/"
                configs = {
                    "fs.default.name": "hdfs://xxx.hadoop.com:54310",
                    "hadoop.job.ugi": "hello,hello123"
                }

                client = HDFSClient(hadoop_home, configs)
                client.cat("hdfs:/test_hdfs_client")
        """
        if self.is_file(fs_path):
            output = self._try_cat(fs_path)
            return "\n".join(output)
        else:
            return ""

    @_handle_errors()
    def _try_cat(self, fs_path):
        cmd = "cat {}".format(fs_path)
        ret, output = self._run_cmd(cmd)
        if ret != 0:
            raise ExecuteError(cmd)
        return output

    def _split_files(self, files, trainer_id, trainers):
        """
        split file list
        Args:
            files(list): file list
            trainer_id(int): trainer mpi rank id
            trainers(int): all trainers num
        Returns:
            fileist(list): file list of current trainer
        """
        remainder = len(files) % trainers
        blocksize = len(files) // trainers

        blocks = [blocksize] * trainers
        for i in range(remainder):
            blocks[i] += 1

        trainer_files = [[]] * trainers
        begin = 0
        for i in range(trainers):
            trainer_files[i] = files[begin:begin + blocks[i]]
            begin += blocks[i]

        return trainer_files[trainer_id]
Y
yaoxuefeng 已提交
1109 1110 1111 1112 1113 1114 1115 1116 1117 1118 1119 1120 1121 1122 1123 1124 1125 1126 1127 1128 1129 1130 1131 1132 1133 1134 1135 1136 1137 1138 1139 1140

    def list_files_info(self, path_list):
        """
        list_files return file path and size
        Args:
            path_list(list): file list
        Returns:
            fileist(list): file list with file path and size
        """
        if len(path_list) <= 0:
            return []

        file_list = []

        #concat filelist can speed up 'hadoop ls'
        str_concat = ""
        for path in path_list:
            str_concat += path + " "
        cmd = "ls " + str_concat + " | awk '{if ($8 != \"\") {print $5\" \"$8 }}'"
        ret, lines = self._run_cmd(cmd)
        if (len(lines) == 0):
            logger.warning("list_files empty, path[%s]" % path_list)
            return []
        for line in lines:
            arr = line.split(' ')
            if len(arr) < 2:
                continue
            file_path = arr[1]
            file_size = int(arr[0])
            file_list.append({'path': file_path, 'size': file_size})

        return file_list