From 8bc604571fea9283434b5fb47f29d1bff844e6bc Mon Sep 17 00:00:00 2001 From: Sylwester Fraczek Date: Thu, 21 Feb 2019 11:16:38 +0100 Subject: [PATCH] fix typo seriazlized->serialized --- paddle/fluid/inference/api/analysis_predictor.cc | 2 +- paddle/fluid/inference/api/analysis_predictor.h | 2 +- paddle/fluid/inference/api/analysis_predictor_tester.cc | 4 ++-- paddle/fluid/inference/api/paddle_api.h | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/paddle/fluid/inference/api/analysis_predictor.cc b/paddle/fluid/inference/api/analysis_predictor.cc index 712e010db..cd6e95877 100644 --- a/paddle/fluid/inference/api/analysis_predictor.cc +++ b/paddle/fluid/inference/api/analysis_predictor.cc @@ -726,7 +726,7 @@ bool AnalysisPredictor::need_collect_var_shapes_for_memory_optim() { return need; } -std::string AnalysisPredictor::GetSeriazlizedProgram() const { +std::string AnalysisPredictor::GetSerializedProgram() const { return inference_program_->Proto()->SerializeAsString(); } diff --git a/paddle/fluid/inference/api/analysis_predictor.h b/paddle/fluid/inference/api/analysis_predictor.h index 014df4ee8..d5445c58e 100644 --- a/paddle/fluid/inference/api/analysis_predictor.h +++ b/paddle/fluid/inference/api/analysis_predictor.h @@ -74,7 +74,7 @@ class AnalysisPredictor : public PaddlePredictor { void SetMkldnnThreadID(int tid); - std::string GetSeriazlizedProgram() const override; + std::string GetSerializedProgram() const override; protected: // For memory optimization. diff --git a/paddle/fluid/inference/api/analysis_predictor_tester.cc b/paddle/fluid/inference/api/analysis_predictor_tester.cc index 002ba90e4..6696839b5 100644 --- a/paddle/fluid/inference/api/analysis_predictor_tester.cc +++ b/paddle/fluid/inference/api/analysis_predictor_tester.cc @@ -214,8 +214,8 @@ TEST(AnalysisPredictor, memory_optim) { { // The first predictor help to cache the memory optimize strategy. auto predictor = CreatePaddlePredictor(config); - LOG(INFO) << "serialized program: " << predictor->GetSeriazlizedProgram(); - ASSERT_FALSE(predictor->GetSeriazlizedProgram().empty()); + LOG(INFO) << "serialized program: " << predictor->GetSerializedProgram(); + ASSERT_FALSE(predictor->GetSerializedProgram().empty()); // Run several times to check the parameters are not reused by mistake. for (int i = 0; i < 5; i++) { diff --git a/paddle/fluid/inference/api/paddle_api.h b/paddle/fluid/inference/api/paddle_api.h index f90a74b91..c9a45b4aa 100644 --- a/paddle/fluid/inference/api/paddle_api.h +++ b/paddle/fluid/inference/api/paddle_api.h @@ -248,7 +248,7 @@ class PaddlePredictor { /** \brief Get the serialized model program that executes in inference phase. * Its data type is ProgramDesc, which is a protobuf message. */ - virtual std::string GetSeriazlizedProgram() const { + virtual std::string GetSerializedProgram() const { assert(false); // Force raise error. return "NotImplemented"; } -- GitLab