ParamUtil.h 3.8 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
Z
zhangjinchao01 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14 15 16

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#pragma once

X
Xin Pan 已提交
17
#include "paddle/legacy/utils/Util.h"
Z
zhangjinchao01 已提交
18 19 20 21

#include <stdio.h>

#include "hl_gpu.h"
X
Xin Pan 已提交
22 23
#include "paddle/legacy/gserver/dataproviders/DataProvider.h"
#include "paddle/legacy/gserver/gradientmachines/GradientMachine.h"
Z
zhangjinchao01 已提交
24

Y
Yu Yang 已提交
25 26 27
#include <stdlib.h>
#include <fstream>
#include "ParameterUpdater.h"
Z
zhangjinchao01 已提交
28 29 30 31 32 33 34 35 36 37 38
#include "TrainerConfig.pb.h"
#include "TrainerConfigHelper.h"

namespace paddle {

/**
 * Configuration for parameter utils.
 */
struct ParameterUtilConfig {
  DISABLE_COPY(ParameterUtilConfig);

39 40
  ParameterUtilConfig(bool save_only_one,
                      int saving_period,
Z
zhangjinchao01 已提交
41
                      bool load_save_parameters_in_pserver,
42 43 44 45 46
                      std::string config)
      : save_only_one_(save_only_one),
        saving_period_(saving_period),
        load_save_param_pserver_(load_save_parameters_in_pserver),
        config_(config) {}
Z
zhangjinchao01 已提交
47 48 49 50 51 52 53 54 55 56 57 58

  bool save_only_one_;
  int saving_period_;
  bool load_save_param_pserver_;
  std::string config_;
};

/**
 * ParameterUtil
 * Utility class for loading and saving parameters
 */
class ParameterUtil {
W
Wu Yi 已提交
59
 public:
Z
zhangjinchao01 已提交
60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80
  /**
   * Ctor.
   *
   * @param config
   * @param intconfig
   * @param gradientMachine
   * @param parameterUpdater
   * @return
   */
  ParameterUtil(const std::shared_ptr<TrainerConfigHelper> &config,
                std::unique_ptr<ParameterUtilConfig> &&intconfig,
                const GradientMachinePtr &gradientMachine,
                const std::shared_ptr<ParameterUpdater> &parameterUpdater);

  /// Load parameter from the saved parameter file as pass passId
  /// if loadsave_parameters_in_pserver is set, some parameters MUST
  /// load in pserver, which is "remote".
  /// loadParameters can choose to load local/remote parameter, or both.
  bool loadParameters(int passId, bool local = true, bool remote = false);

  /// load parameters given path info
81 82 83
  void loadParametersWithPath(const std::string &dir,
                              bool local = true,
                              bool remote = false);
Z
zhangjinchao01 已提交
84 85 86 87 88 89 90 91 92 93 94 95 96 97 98

  /// Save parameter to dist for pass passId
  /// passInnerId means saving times in one pass, some users want to
  /// save parameters when have processed some batches in one pass
  /// passInnerId = 0 means do not need to save in one inner pass
  void saveParameters(int passId, int passInnerId = 0);

  /// save parameters for one pass, when passInnerId > 0 means saving
  /// the passInnerId times in one pass
  void saveParametersOnePass(int passId, int passInnerId = 0);

  /// delete parameter from disk via passId
  void deleteParameters(int passId, int passInnerId = 0);

  /// save config given path info
99
  void saveConfigWithPath(const std::string &path);
Z
zhangjinchao01 已提交
100 101 102 103 104 105

  /**
   * Try to load parameter from config.
   * @return true if can load from trainer config.
   */
  inline bool tryLoadParametersFromConfig() {
106
    auto &c = config_->getConfig();
Z
zhangjinchao01 已提交
107 108 109 110 111 112 113 114 115 116 117
    if (!c.init_model_path().empty()) {
      loadParametersWithPath(c.init_model_path());
      return true;
    } else if (c.start_pass() > 0) {
      CHECK(loadParameters(c.start_pass() - 1));
      return true;
    } else {
      return false;
    }
  }

W
Wu Yi 已提交
118
 private:
Z
zhangjinchao01 已提交
119 120 121 122 123 124 125
  std::shared_ptr<TrainerConfigHelper> config_;
  std::unique_ptr<ParameterUtilConfig> intConfig_;
  GradientMachinePtr gserver_;
  std::shared_ptr<ParameterUpdater> pUpdater_;
};

}  //  namespace paddle