Trainer.cpp 21.9 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
Z
zhangjinchao01 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#include "Trainer.h"

#include <stdio.h>

#include <iomanip>
Y
Yu Yang 已提交
20
#include <iostream>
Z
zhangjinchao01 已提交
21
#include <limits>
Y
Yu Yang 已提交
22
#include <sstream>
Z
zhangjinchao01 已提交
23 24 25

#include <google/protobuf/text_format.h>

L
liaogang 已提交
26
#include "paddle/utils/Common.h"
Y
Yu Yang 已提交
27
#include "paddle/utils/GlobalConstants.h"
Z
zhangjinchao01 已提交
28 29 30 31
#include "paddle/utils/PythonUtil.h"
#include "paddle/utils/Stat.h"
#include "paddle/utils/Util.h"

Y
Yu Yang 已提交
32
#include "RemoteParameterUpdater.h"
Z
zhangjinchao01 已提交
33 34 35
#include "TesterConfig.h"
#include "ThreadParameterUpdater.h"
#include "TrainerConfigHelper.h"
Y
Yu Yang 已提交
36 37 38
#include "paddle/gserver/gradientmachines/GradientMachineMode.h"
#include "paddle/gserver/gradientmachines/NeuralNetwork.h"
#include "paddle/gserver/layers/ValidationLayer.h"
Z
zhangjinchao01 已提交
39

40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89
DEFINE_string(config, "", "Trainer config file");

DEFINE_int32(test_period,
             0,
             "if equal 0, do test on all test data at the end of "
             "each pass. While if equal non-zero, do test on all test "
             "data every test_period batches");
DEFINE_bool(test_all_data_in_one_period,
            false,
            "This option was deprecated, since we will always do "
            "test on all test set ");

DEFINE_bool(local, true, "Train in local mode or not");

DEFINE_int32(average_test_period,
             0,
             "Do test on average parameter every so"
             " many batches. MUST be devided by FLAGS_log_period."
             " Default 0 means do not test average parameter");

DEFINE_int32(saving_period, 1, "Save parameteres every so many passes");
DEFINE_int64(saving_period_by_batches,
             0,
             "Save parameters every so many batches in one pass");
DEFINE_string(save_dir, "", "Directory for saving model parameter");
DEFINE_int32(start_pass,
             0,
             "Start training from this pass. "
             "Will load parameter from the previous pass");
DEFINE_int32(test_pass, -1, "Will load parameter start from this pass to test");
DEFINE_int32(test_wait, 0, "Waiting for pass parameter if not exist");
DEFINE_bool(with_cost, true, "enable cost layer or not");
DEFINE_bool(distribute_test, false, "test in distribute mode");

DEFINE_int32(num_passes, 100, "train for so many passes");

DEFINE_string(config_args,
              "",
              "arguments passed to config file."
              "Format: key1=value1,key2=value2");

DEFINE_bool(save_only_one,
            false,
            "Save only parameters in last pass, remove previous.");

DEFINE_string(feat_file, "", "File name of extracted feature.");
DEFINE_string(predict_output_dir,
              "",
              "Directory that saves the predicted results of output layers");
DEFINE_string(model_list, "", "File that saves the model list when evaluation");
Z
zhangjinchao01 已提交
90 91 92

namespace paddle {

93
void Trainer::init(const std::shared_ptr<TrainerConfigHelper>& config,
Z
zhangjinchao01 已提交
94
                   bool testing,
95 96 97
                   const std::shared_ptr<GradientMachine>& gradientMachine,
                   const std::shared_ptr<DataProvider>& dataProvider,
                   const std::shared_ptr<DataProvider>& testDataProvider) {
Z
zhangjinchao01 已提交
98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150
  this->stats_ = std::make_shared<TrainerStats>();

  config_ = config;

  config_->updateConfigFromFlags();

  testing_ = testing;

  // in testing, mode_ may GradientMachine::kTesting or
  // GradientMachine::kSgdSparseCpuTraining

  if (FLAGS_local) {
    CHECK(!FLAGS_loadsave_parameters_in_pserver)
        << "local and loadsave_parameters_in_pserver can not both true";
    if (config_->getOptConfig().use_sparse_remote_updater()) {
      config_->disableRemoteSparseUpdaterForEachParams();
      LOG(INFO) << "ignore sparse_remote_update=true due to  --local=true";
    }
  }
  if (FLAGS_loadsave_parameters_in_pserver) {
    CHECK(config_->getOptConfig().use_sparse_remote_updater())
        << "no parameter to load from pserver, please check network config";
  }
  if (testing && !FLAGS_loadsave_parameters_in_pserver) {
    if (config_->getOptConfig().use_sparse_remote_updater()) {
      config_->disableRemoteSparseUpdater();
      LOG(INFO) << "because parameter is loaded local,"
                << "tester ignore sparse_remote_update flag";
    }
  }

  CHECK(TrainAlgorithm::isValid(config_->getOptConfig().algorithm()))
      << "invalid algorithm configuration: "
      << config_->getOptConfig().algorithm();

  bool useSparseUpdater = false;
  for (auto& paraConfig : config_->getModelConfig().parameters()) {
    if (paraConfig.sparse_update() || paraConfig.sparse_remote_update()) {
      useSparseUpdater = true;
    }
  }

  if (testing) {
    LOG(INFO) << "trainer: in testing mode";
    if (config_->getOptConfig().use_sparse_remote_updater() ||
        FLAGS_trainer_count > 1) {
      mode_ = GradientMachine::kSgdSparseCpuTraining;
      LOG(INFO) << "trainer mode: SgdSparseCpuTraining";
    } else {
      mode_ = GradientMachine::kTesting;
      LOG(INFO) << "trainer mode: Testing";
    }
  } else if (IGradientMachineMode::tryGetMode(
151 152 153 154 155
                 (int*)&mode_,
                 config_->getOptConfig().algorithm(),
                 FLAGS_trainer_count,
                 FLAGS_local,
                 FLAGS_use_gpu)) {
Z
zhangjinchao01 已提交
156 157
    LOG(INFO) << "Custom trainer mode.";
  } else if ((config_->getOptConfig().algorithm() == TrainAlgorithm::SGD ||
158 159 160
              config_->getOptConfig().algorithm() ==
                  TrainAlgorithm::AsyncSGD) &&
             useSparseUpdater) {
Z
zhangjinchao01 已提交
161 162 163 164 165 166 167 168
    mode_ = GradientMachine::kSgdSparseCpuTraining;
    LOG(INFO) << "trainer mode: SgdSparseCpuTraining";
  } else {
    mode_ = GradientMachine::kNormal;
    LOG(INFO) << "trainer mode: Normal";
  }

  // initialize trainer internal
169 170
  trainerInternal_.init(config_,
                        gradientMachine,
Z
zhangjinchao01 已提交
171
                        TrainerInternalConfig::createFromMode(mode_),
172 173
                        stats_,
                        testing);
Z
zhangjinchao01 已提交
174
  std::unique_ptr<ParameterUtilConfig> paramConfig(
175 176 177 178
      new ParameterUtilConfig(FLAGS_save_only_one,
                              FLAGS_saving_period,
                              FLAGS_loadsave_parameters_in_pserver,
                              FLAGS_config));
Z
zhangjinchao01 已提交
179 180

  paramUtil_.reset(
181 182 183 184
      new paddle::ParameterUtil(config_,
                                std::move(paramConfig),
                                trainerInternal_.getGradientMachine(),
                                trainerInternal_.getParameterUpdater()));
Z
zhangjinchao01 已提交
185

186 187 188
  bool gpuData =
      FLAGS_use_gpu && (!FLAGS_parallel_nn) &&
      (!IGradientMachineMode::dataMustInCpu(mode_, FLAGS_trainer_count));
Z
zhangjinchao01 已提交
189 190

  dataProvider_ = dataProvider;
X
xuwei06 已提交
191
  if (!dataProvider_ && config_->hasDataConfig() && !testing_) {
192
    dataProvider_.reset(DataProvider::create(*config_, *config_, gpuData));
Z
zhangjinchao01 已提交
193
  }
E
emailweixu 已提交
194 195
  if (!testDataProvider_) {
    // No evaluator_ if there is testDataProvider but no dataProvider.
Z
zhangjinchao01 已提交
196 197 198 199 200 201 202 203 204 205 206 207 208 209 210
    evaluator_.reset(trainerInternal_.getGradientMachine()->makeEvaluator());
    currentEvaluator_.reset(
        trainerInternal_.getGradientMachine()->makeEvaluator());
    if (FLAGS_average_test_period > 0 && FLAGS_trainer_id == 0 &&
        config_->getOptConfig().average_window() > 0) {
      CHECK_EQ(FLAGS_average_test_period % FLAGS_log_period, 0)
          << "FLAGS_average_test_period must be divided by FALGS_log_period";
      averageEvaluator_.reset(
          trainerInternal_.getGradientMachine()->makeEvaluator());
    }
  }

  testDataProvider_ = testDataProvider;
  if (!testDataProvider_ && config_->hasTestDataConfig()) {
    testDataProvider_.reset(
211
        DataProvider::create(config_->getTestDataConfig(), *config_, gpuData));
Z
zhangjinchao01 已提交
212 213
  }
  if (testDataProvider_) {
E
emailweixu 已提交
214
    createTester();
Z
zhangjinchao01 已提交
215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241
  }

  if (!testing &&
      (trainerInternal_.getGradientMachine()->hasStaticParameters())) {
    CHECK(!FLAGS_loadsave_parameters_in_pserver)
        << "is_static and loadsave_parameters_in_pserver can not both true";
  }
  if (testing) {
    // will load per pass for tester
  } else if (paramUtil_->tryLoadParametersFromConfig()) {
    // load from config already.
  } else {
    trainerInternal_.getGradientMachine()->randParameters();
  }

  // Only non static parameters need to be updated
  std::vector<ParameterPtr>& parameters =
      trainerInternal_.getGradientMachine()->getNonStaticParameters();
  if (trainerInternal_.getParameterUpdater()) {
    trainerInternal_.getParameterUpdater()->init(parameters);

    if (FLAGS_loadsave_parameters_in_pserver && FLAGS_trainer_id == 0) {
      if (testing) {
        // will load per pass for tester
      } else if (!config_->getConfig().init_model_path().empty() &&
                 (FLAGS_local || FLAGS_trainer_id == 0)) {
        paramUtil_->loadParametersWithPath(
242 243 244
            config_->getConfig().init_model_path(),
            false /*local*/,
            true /*remote*/);
Z
zhangjinchao01 已提交
245 246 247
      } else if (config_->getConfig().start_pass() > 0 &&
                 (FLAGS_local || FLAGS_trainer_id == 0)) {
        CHECK(paramUtil_->loadParameters(config_->getConfig().start_pass() - 1,
248 249
                                         false /*local*/,
                                         true /*remote*/));
Z
zhangjinchao01 已提交
250 251 252 253 254 255 256 257 258 259 260 261
      } else {
        trainerInternal_.getParameterUpdater()->randParametersRemote();
      }
    }
  }

  // set current evaluator and evalutor
  trainerInternal_.setCurrentEvaluator(currentEvaluator_.get());
  trainerInternal_.setEvaluator(evaluator_.get());
}

void Trainer::train(size_t numPasses) {
E
emailweixu 已提交
262
  startTrain();
Z
zhangjinchao01 已提交
263 264 265 266
  for (size_t i = 0; i < numPasses; ++i) {
    if (IGradientMachineMode::trainWholeDataInOneBatch(mode_)) {
      trainOnePassBatch(config_->getConfig().start_pass() + i);
    } else {
E
emailweixu 已提交
267
      trainOnePass();
Z
zhangjinchao01 已提交
268 269 270 271 272 273
    }
    if (i < numPasses - 1) {
      dataProvider_->reset();
    }
  }

E
emailweixu 已提交
274
  finishTrain();
Z
zhangjinchao01 已提交
275 276 277
}

static double genPerturbation(real* d, real* grad, size_t dim) {
278
  auto& reng = ThreadLocalRandomEngine::get();
Z
zhangjinchao01 已提交
279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299
  std::uniform_real_distribution<double> dist(-1, 1);
  double gradNorm = 0, dNorm = 0;
  for (size_t i = 0; i < dim; ++i) {
    d[i] = dist(reng);
    dNorm += d[i] * d[i];
    gradNorm += grad[i] * grad[i];
  }
  if (gradNorm > 0) {
    real s = 0.5 * sqrt(gradNorm / dNorm);
    for (size_t i = 0; i < dim; ++i) {
      d[i] = s * d[i] + grad[i];
    }
  }
  double delta = 0;
  for (size_t i = 0; i < dim; ++i) {
    delta += grad[i] * d[i];
  }
  return delta;
}

real Trainer::checkGradient() {
300
  trainerInternal_.getGradientMachine()->start();
Z
zhangjinchao01 已提交
301 302 303 304 305 306 307 308 309 310 311 312
  std::vector<ParameterPtr>& parameters =
      trainerInternal_.getGradientMachine()->getNonStaticParameters();
  DataBatch dataBatch;
  int32_t batchSize = config_->getOptConfig().batch_size();

  dataProvider_->getNextBatch(batchSize, &dataBatch);

  CHECK(dataBatch.getSize()) << "No data from data provider";
  std::vector<Argument>& inArgs = dataBatch.getStreams();
  std::vector<Argument> outArgs;

  trainerInternal_.getGradientMachine()->forward(inArgs, &outArgs, PASS_GC);
313
  real cost = Argument::sum(outArgs);
Z
zhangjinchao01 已提交
314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342
  LOG(INFO) << "original cost=" << cost;
  trainerInternal_.getGradientMachine()->backward();

  real maxDiff = 0;
  char fill = ' ';
  for (auto& parameter : parameters) {
    CpuVector oldPara(parameter->getSize());
    CpuVector newPara(parameter->getSize());
    oldPara.copyFrom(*parameter->getBuf(PARAMETER_VALUE));
    real* newp = newPara.getData();
    real* oldp = oldPara.getData();
    CpuVector cpuGrad(*parameter->getBuf(PARAMETER_GRADIENT));
    real* grad = cpuGrad.getData();
    size_t dim = parameter->getSize();
    std::vector<real> d(dim);

    double delta = genPerturbation(d.data(), grad, dim);

    // use a step such that delta / cost is FLAGS_checkgrad_eps
    real step =
        (delta != 0) ? cost / delta * FLAGS_checkgrad_eps : FLAGS_checkgrad_eps;
    delta *= step;
    for (size_t i = 0; i < dim; ++i) {
      newp[i] = oldp[i] + step * d[i];
    }

    parameter->getBuf(PARAMETER_VALUE)->copyFrom(newPara);
    parameter->setValueUpdated();
    trainerInternal_.getGradientMachine()->forward(inArgs, &outArgs, PASS_GC);
343
    real newCost1 = Argument::sum(outArgs);
Z
zhangjinchao01 已提交
344 345 346 347 348 349 350 351

    for (size_t i = 0; i < dim; ++i) {
      newp[i] = oldp[i] - step * d[i];
    }

    parameter->getBuf(PARAMETER_VALUE)->copyFrom(newPara);
    parameter->setValueUpdated();
    trainerInternal_.getGradientMachine()->forward(inArgs, &outArgs, PASS_GC);
352
    real newCost2 = Argument::sum(outArgs);
Z
zhangjinchao01 已提交
353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374

    real trueDelta = 0.5 * (newCost1 - newCost2);
    real diff = (1e-20 + trueDelta) / (1e-20 + delta) - 1;
    LOG(INFO) << std::setiosflags(std::ios::left) << std::setfill(fill)
              << std::setw(20) << parameter->getName()
              << "step=" << std::setw(15) << step << "cost1=" << std::setw(10)
              << newCost1 << "cost2=" << std::setw(10) << newCost2
              << "true_delta=" << std::setw(15) << trueDelta
              << "analytic_delta=" << std::setw(15) << delta << "diff=" << diff
              << (std::abs(diff) > 0.01 ? " ***" : "");

    maxDiff = std::max(maxDiff, std::abs(diff));

    // restore parameter
    parameter->getBuf(PARAMETER_VALUE)->copyFrom(oldPara);
    parameter->setValueUpdated();

    fill = (fill == ' ') ? '.' : ' ';
  }
  return maxDiff;
}

E
emailweixu 已提交
375 376 377 378 379 380 381
void Trainer::startTrain() {
  trainPassContext_.passId = config_->getConfig().start_pass();
  srand(config_->getConfig().start_pass() + 1);
  if (dataProvider_) {
    dataProvider_->reset();
  }

382
  trainerInternal_.getGradientMachine()->start();
E
emailweixu 已提交
383 384
}

385
void Trainer::finishTrain() { trainerInternal_.getGradientMachine()->finish(); }
E
emailweixu 已提交
386 387 388 389 390 391 392

void Trainer::startTrainPass() {
  stats_->reset();
  trainPassContext_.batchId = 0;
  trainPassContext_.avgTestCost = 0;
  trainPassContext_.numAvgTests = 0;
  trainPassContext_.passInnerId = 1;
Z
zhangjinchao01 已提交
393 394 395 396 397 398 399

  trainerInternal_.getParameterUpdater()->startPass();
  evaluator_->start();
  if (FLAGS_prev_batch_state) {
    trainerInternal_.getGradientMachine()->resetState();
    trainerInternal_.getGradientMachine()->getState(testState_);
  }
E
emailweixu 已提交
400
}
Z
zhangjinchao01 已提交
401

E
emailweixu 已提交
402 403 404 405 406 407 408
void Trainer::trainOneDataBatch(DataBatch& dataBatch) {
  int num = dataBatch.getSize();
  if (averageEvaluator_) {
    int64_t mod = trainPassContext_.batchId % FLAGS_average_test_period;
    if (mod >= FLAGS_average_test_period - FLAGS_log_period) {
      if (mod == FLAGS_average_test_period - FLAGS_log_period) {
        averageEvaluator_->start();
Z
zhangjinchao01 已提交
409
      }
E
emailweixu 已提交
410 411 412 413
      trainerInternal_.getParameterUpdater()->apply();
      if (FLAGS_prev_batch_state) {
        trainerInternal_.getGradientMachine()->getState(trainState_);
      }
414 415
      trainPassContext_.avgTestCost += tester_->forwardOneBatch(
          dataBatch, averageEvaluator_.get(), &forwardOutput_);
E
emailweixu 已提交
416 417 418 419 420
      if (FLAGS_prev_batch_state) {
        trainerInternal_.getGradientMachine()->setState(trainState_);
      }
      trainPassContext_.numAvgTests += num;
      trainerInternal_.getParameterUpdater()->restore();
Z
zhangjinchao01 已提交
421
    }
E
emailweixu 已提交
422 423 424 425
  }
  {
    REGISTER_TIMER("TrainBatch");
    trainerInternal_.trainOneBatch(
426
        trainPassContext_.batchId, dataBatch, &forwardOutput_);
E
emailweixu 已提交
427
  }
Z
zhangjinchao01 已提交
428

E
emailweixu 已提交
429
  if (averageEvaluator_ &&
430 431
      trainPassContext_.batchId % FLAGS_average_test_period ==
          FLAGS_average_test_period - 1) {
E
emailweixu 已提交
432 433
    averageEvaluator_->finish();
    LOG(INFO) << " Averaged parameter:"
434 435
              << " cost="
              << trainPassContext_.avgTestCost / trainPassContext_.numAvgTests
E
emailweixu 已提交
436 437 438 439
              << " Eval: " << *averageEvaluator_;
    trainPassContext_.numAvgTests = 0;
    trainPassContext_.avgTestCost = 0;
  }
Z
zhangjinchao01 已提交
440

E
emailweixu 已提交
441
  ++trainPassContext_.batchId;
Z
zhangjinchao01 已提交
442

E
emailweixu 已提交
443 444 445 446 447
  if (trainPassContext_.batchId % FLAGS_log_period == 0) {
    FOR_TIMING(globalStat.setThreadInfo(true));
    FOR_TIMING(globalStat.printAllStatus());
    FOR_TIMING(globalStat.reset());
  }
Z
zhangjinchao01 已提交
448

W
wangyanfei01 已提交
449 450 451
  if (testDataProvider_ && FLAGS_test_period > 0 &&
      trainPassContext_.batchId % FLAGS_test_period == 0) {
    tester_->testOnePeriod();
E
emailweixu 已提交
452
  }
Z
zhangjinchao01 已提交
453

E
emailweixu 已提交
454
  if (FLAGS_saving_period_by_batches > 0 &&
455 456
      trainPassContext_.batchId >
          FLAGS_saving_period_by_batches * trainPassContext_.passInnerId &&
E
emailweixu 已提交
457 458 459
      0 == FLAGS_trainer_id) {
    trainerInternal_.getParameterUpdater()->catchUpWith();
    if (testDataProvider_) {
W
wangyanfei01 已提交
460
      tester_->testOnePeriod();
Z
zhangjinchao01 已提交
461
    }
462 463
    paramUtil_->saveParametersOnePass(trainPassContext_.passId,
                                      trainPassContext_.passInnerId);
E
emailweixu 已提交
464
    ++trainPassContext_.passInnerId;
Z
zhangjinchao01 已提交
465
  }
E
emailweixu 已提交
466
}
Z
zhangjinchao01 已提交
467

E
emailweixu 已提交
468 469
void Trainer::finishTrainPass() {
  if (trainPassContext_.batchId == 0) {
Z
zhangjinchao01 已提交
470 471 472 473
    // This means no more data from DataProvider
    return;
  }

474 475
  trainerInternal_.finishTrainPass(trainPassContext_.passId,
                                   trainPassContext_.batchId);
Z
zhangjinchao01 已提交
476 477 478 479 480 481 482 483 484

  FOR_TIMING(globalStat.setThreadInfo(true));
  FOR_TIMING(globalStat.printAllStatus());
  FOR_TIMING(globalStat.reset());

  if (testDataProvider_) {
    tester_->testOnePeriod();
  }

485 486
  if (trainPassContext_.passId % FLAGS_saving_period == 0 &&
      FLAGS_trainer_id == 0) {
E
emailweixu 已提交
487
    paramUtil_->saveParametersOnePass(trainPassContext_.passId);
Z
zhangjinchao01 已提交
488
  }
E
emailweixu 已提交
489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508
  ++trainPassContext_.passId;
}

void Trainer::trainOnePass() {
  startTrainPass();
  size_t batchSize = config_->getOptConfig().batch_size();
  while (true) {
    DataBatch dataBatch;

    int num = 0;
    {
      REGISTER_TIMER("getTrainBatch");
      num = dataProvider_->getNextBatch(batchSize, &dataBatch);
    }
    if (num == 0) break;
    CHECK_EQ(num, dataBatch.getSize());
    trainOneDataBatch(dataBatch);
  }

  finishTrainPass();
Z
zhangjinchao01 已提交
509 510 511 512 513 514 515 516 517
}

void Trainer::trainOnePassBatch(int passId) {
  this->stats_->reset();

  trainerInternal_.getParameterUpdater()->startPass();
  const std::vector<Argument> inArgs;
  {
    REGISTER_TIMER("onePass");
518 519
    trainerInternal_.getGradientMachine()->forwardBackward(
        inArgs, nullptr, PASS_TRAIN, nullptr);
Z
zhangjinchao01 已提交
520 521 522 523 524 525 526 527 528
  }

  real cost = .0;
  int64_t num = 0;
  trainerInternal_.getGradientMachine()->getStats(cost, num);
  *stats_ += {num, cost};

  trainerInternal_.getGradientMachine()->onPassEnd();

529
  bool accepted = trainerInternal_.getParameterUpdater()->finishPass();
Z
zhangjinchao01 已提交
530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549

  globalStat.setThreadInfo(true);
  globalStat.printAllStatus();
  globalStat.reset();

  LOG(INFO) << " Pass=" << passId
            << " AcceptedPass=" << (accepted ? acceptedPassId_ : -1)
            << stats_->getStats(false /*withCurrentCost*/);

  if (accepted) {
    if (acceptedPassId_ % FLAGS_saving_period == 0 && FLAGS_trainer_id == 0) {
      paramUtil_->saveParameters(acceptedPassId_);
    }
    acceptedPassId_++;
    if (FLAGS_save_only_one && acceptedPassId_ >= FLAGS_saving_period) {
      paramUtil_->deleteParameters(acceptedPassId_ - FLAGS_saving_period);
    }
  }
}

550 551
real Trainer::calcGradient(const DataBatch& dataBatch,
                           const Vector& value,
Z
zhangjinchao01 已提交
552 553 554
                           Vector& gradient) {
  CHECK_EQ(value.getSize(), gradient.getSize());
  std::vector<ParameterPtr>& parameters =
555
      trainerInternal_.getGradientMachine()->getParameters();
Z
zhangjinchao01 已提交
556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575

  clearGradient();

  size_t offset = 0;
  size_t valueSize = value.getSize();

  for (auto& para : parameters) {
    CHECK_LE(offset + para->getSize(), valueSize);
    VectorPtr val =
        Vector::create(para->getSize(), value.getMemoryHandle(), offset);
    para->getBuf(PARAMETER_VALUE)->copyFrom(*val);
    para->setValueUpdated();
    offset += para->getSize();
  }

  CHECK_EQ(offset, valueSize);

  std::vector<Argument> inArgs = dataBatch.getStreams();
  std::vector<Argument> outArgs;

576 577
  trainerInternal_.getGradientMachine()->forwardBackward(
      inArgs, &outArgs, PASS_TRAIN);
578
  real cost = Argument::sum(outArgs);
Z
zhangjinchao01 已提交
579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602

  offset = 0;
  for (auto& para : parameters) {
    VectorPtr grad =
        Vector::create(para->getSize(), gradient.getMemoryHandle(), offset);
    if (para->getBuf(PARAMETER_GRADIENT)) {
      grad->copyFrom(*para->getBuf(PARAMETER_GRADIENT));
    }
    offset += para->getSize();
  }

  return cost;
}

void Trainer::clearGradient() {
  std::vector<ParameterPtr>& parameters =
      trainerInternal_.getGradientMachine()->getNonStaticParameters();
  for (auto& parameter : parameters) {
    parameter->clearGradient();
  }
}

int Trainer::getBatchSize() { return config_->getOptConfig().batch_size(); }

E
emailweixu 已提交
603
void Trainer::createTester() {
604 605
  tester_.reset(new paddle::Tester(config_,
                                   createTesterConfig(),
E
emailweixu 已提交
606 607 608 609 610
                                   trainerInternal_.getGradientMachine(),
                                   trainerInternal_.getParameterUpdater(),
                                   testDataProvider_));
}

611
void Trainer::test() { tester_->test(); }
Z
zhangjinchao01 已提交
612 613 614

std::unique_ptr<TesterConfig> Trainer::createTesterConfig() {
  TesterConfig* conf = new TesterConfig;
W
wangyanfei01 已提交
615
  if (FLAGS_test_period) {
Y
Yu Yang 已提交
616 617 618 619
    LOG(WARNING) << "The meaning of --test_period is changed: "
                 << "if equal 0, do test on all test data at the end of "
                 << "each pass. While if equal non-zero, do test on all test "
                 << "data every test_period batches ";
W
wangyanfei01 已提交
620 621
  }
  if (FLAGS_test_all_data_in_one_period) {
Y
Yu Yang 已提交
622 623
    LOG(WARNING) << "--test_all_data_in_one_period was deprecated, since "
                 << "we will always do test on all test set ";
W
wangyanfei01 已提交
624
  }
W
wangyanfei01 已提交
625
  conf->testPeriod = FLAGS_test_period;
Z
zhangjinchao01 已提交
626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647
  conf->prevBatchState = FLAGS_prev_batch_state;
  conf->logPeriod = FLAGS_log_period;
  conf->loadsaveParametersInPserver = FLAGS_loadsave_parameters_in_pserver;
  conf->featFile = FLAGS_feat_file;
  conf->predictOutputDir = FLAGS_predict_output_dir;
  conf->trainerId = FLAGS_trainer_id;
  conf->distributeTest = FLAGS_distribute_test;
  conf->config = FLAGS_config;
  conf->modelList = FLAGS_model_list;
  conf->testPass = FLAGS_test_pass;
  conf->numPasses = FLAGS_num_passes;
  conf->savingPeriod = FLAGS_saving_period;
  conf->testWait = FLAGS_test_wait;
  conf->initModelPath = FLAGS_init_model_path;
  conf->saveOnlyOne = FLAGS_save_only_one;
  conf->testing = testing_;
  conf->mode = mode_;
  conf->trainState = &trainState_;
  conf->testState = &testState_;
  return std::unique_ptr<TesterConfig>(conf);
}

648
ParameterUtil* Trainer::getParameterUtilPtr() { return paramUtil_.get(); }
Z
zhangjinchao01 已提交
649
}  // namespace paddle