model_parser_test.cc 3.2 KB
Newer Older
Y
Yan Chunwei 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111
// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#include "lite/model_parser/model_parser.h"
#include <gflags/gflags.h>
#include <gtest/gtest.h>
#include "lite/core/scope.h"

DEFINE_string(model_dir, "", "");

namespace paddle {
namespace lite {

TEST(ModelParser, LoadProgram) {
  CHECK(!FLAGS_model_dir.empty());
  auto program = LoadProgram(FLAGS_model_dir + "/__model__");
}

TEST(ModelParser, LoadParam) {
  Scope scope;
  auto* v = scope.Var("xxx");
  LoadParam(FLAGS_model_dir + "/fc_0.b_0", v);
  const auto& t = v->Get<Tensor>();
  LOG(INFO) << "loaded\n";
  LOG(INFO) << t;
}

TEST(ModelParser, LoadModelPb) {
  CHECK(!FLAGS_model_dir.empty());
  cpp::ProgramDesc prog;
  Scope scope;
  LoadModelPb(FLAGS_model_dir, &scope, &prog);
}

TEST(ModelParser, SaveModelPb) {
  CHECK(!FLAGS_model_dir.empty());
  cpp::ProgramDesc prog;
  Scope scope;
  LoadModelPb(FLAGS_model_dir, &scope, &prog);
  const std::string save_pb_model_path = FLAGS_model_dir + ".saved.pb";
  SaveModelPb(save_pb_model_path, scope, prog);
}

TEST(ModelParser, SaveParamNaive) {
  Scope scope;
  auto* tensor = scope.Var("xxx")->GetMutable<lite::Tensor>();
  auto& lod = *tensor->mutable_lod();
  lod.resize(2);
  lod[0] = {1, 2, 3};
  lod[1] = {4, 5};
  std::vector<int64_t> dim({1, 2, 5});
  tensor->Resize(lite::DDim(dim));
  auto* data = tensor->mutable_data<float>();
  size_t size = tensor->data_size();
  for (size_t i = 0; i < size; ++i) {
    data[i] = i / static_cast<float>(size);
  }
  SaveParamNaive("./fc_0.w", scope, "xxx");
}

TEST(ModelParser, LoadParamNaive) {
  Scope scope;
  LoadParamNaive("./fc_0.w", &scope, "xxx");
  auto& tensor = scope.Var("xxx")->Get<lite::Tensor>();
  std::vector<int64_t> bg_dim({1, 2, 5});
  size_t size = 10;
  std::vector<std::vector<uint64_t>> bg_lod({{1, 2, 3}, {4, 5}});
  std::vector<float> bg_data(size);
  for (size_t i = 0; i < size; ++i) {
    bg_data[i] = i / static_cast<float>(size);
  }

  ASSERT_EQ(bg_dim, tensor.dims().Vectorize());
  ASSERT_EQ(bg_lod, tensor.lod());
  ASSERT_EQ(tensor.data_size(), size);
  auto* data = tensor.data<float>();
  for (int i = 0; i < size; ++i) {
    EXPECT_NEAR(bg_data[i], data[i], 1e-6);
  }
}

TEST(ModelParser, SaveModelNaive) {
  CHECK(!FLAGS_model_dir.empty());
  cpp::ProgramDesc prog;
  Scope scope;
  LoadModelPb(FLAGS_model_dir, &scope, &prog);
  const std::string save_pb_model_path = FLAGS_model_dir + ".saved.naive";
  SaveModelNaive(save_pb_model_path, scope, prog);
}

TEST(ModelParser, LoadModelNaive) {
  CHECK(!FLAGS_model_dir.empty());
  cpp::ProgramDesc prog;
  Scope scope;
  const std::string model_path = FLAGS_model_dir + ".saved.naive";
  LoadModelNaive(model_path, &scope, &prog);
}

}  // namespace lite
}  // namespace paddle