cache.h 4.1 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#pragma once
16

17
#include <algorithm>
18
#include <numeric>
19

20
#include "paddle/phi/common/data_type.h"
21
#include "paddle/phi/kernels/autotune/cache_base.h"
22 23 24 25

namespace phi {
namespace autotune {

26 27 28 29
struct ConvAutoTuneResult {
  ConvAutoTuneResult() {}
  ConvAutoTuneResult(int64_t a, size_t size, bool search)
      : algo(a), workspace_size(size), exhaustive_search(search) {}
H
hong 已提交
30 31 32

  int64_t algo;
  size_t workspace_size = 0;
33
  bool exhaustive_search = false;
H
hong 已提交
34 35
};

36 37 38 39
size_t TransposeKey(const std::vector<int64_t>& x_dims,
                    const std::vector<int32_t>& perm,
                    phi::DataType dtype);

40 41 42 43
enum class AlgorithmType {
  kConvForward = 1,
  kConvBackwardData = 2,
  kConvBackwardFilter = 3,
44 45
  kTranspose = 4,
  kAlgorithmCount = 5
46 47
};

48
// AlgorithmsConfigKey -> AlgorithmsID
H
hong 已提交
49
// (todo. hong) use cudnnConvolutionFwdAlgo_t
50
using AlgorithmsCacheMap = AlgorithmsCache<size_t, int64_t>;
51 52
// AlgorithmType -> AlgorithmsCache
using AlgorithmsTypeMap = std::unordered_map<int64_t, AlgorithmsCacheMap>;
53 54 55
using ConvAlgorithmsCacheMap = ConvAlgorithmsCache<ConvAutoTuneResult>;
using ConvAlgorithmsTypeMap =
    std::unordered_map<int64_t, ConvAlgorithmsCacheMap>;
56 57 58 59 60 61 62 63

class AutoTuneCache {
 public:
  static AutoTuneCache& Instance() {
    static AutoTuneCache autotune_cache;
    return autotune_cache;
  }

64 65
  AlgorithmsCacheMap& Get(const AlgorithmType& algo_type) {
    return auto_tune_map_[static_cast<int64_t>(algo_type)];
66 67
  }

68 69
  ConvAlgorithmsCacheMap& GetConv(const AlgorithmType& algo_type) {
    return conv_auto_tune_map_[static_cast<int64_t>(algo_type)];
Y
Yiqun Liu 已提交
70 71
  }

72 73
  AlgorithmsCacheMap& GetTranspose() { return Get(AlgorithmType::kTranspose); }

74
  void Clean() {
75
    for (auto& v : auto_tune_map_) {
76
      v.second.Clean();
77
    }
H
hong 已提交
78

79
    for (auto& v : conv_auto_tune_map_) {
H
hong 已提交
80 81
      v.second.Clean();
    }
82 83
  }

84 85
  void UpdateStatus();

86 87 88 89 90 91 92 93 94 95 96 97 98 99 100
  // The number of total config cached
  int64_t Size() const { return total_size_; }

  int64_t CacheHits() const { return total_cache_hits_; }

  int64_t CacheMisses() const { return total_cache_misses_; }

  float CacheHitRate() const {
    float total_cache_hit_rate = 0.;
    int64_t total_num_accesses = total_cache_hits_ + total_cache_misses_;
    if (total_num_accesses != 0) {
      total_cache_hit_rate = static_cast<float>(total_cache_hits_) /
                             static_cast<float>(total_num_accesses);
    }
    return total_cache_hit_rate;
101 102 103
  }

 private:
104 105 106 107 108 109 110 111
  AutoTuneCache() : autotune_cache_mutex_(new std::mutex()) {
    for (int i = 1; i < static_cast<int>(AlgorithmType::kAlgorithmCount); ++i) {
      Register(static_cast<AlgorithmType>(i));
    }
  }

  void Register(const AlgorithmType& algo_type) {
    std::lock_guard<std::mutex> lock(*autotune_cache_mutex_);
H
hong 已提交
112 113 114 115 116
    if (algo_type == AlgorithmType::kConvForward ||
        algo_type == AlgorithmType::kConvBackwardData ||
        algo_type == AlgorithmType::kConvBackwardFilter) {
      int64_t key = static_cast<int64_t>(algo_type);
      if (auto_tune_map_.find(key) == auto_tune_map_.end()) {
117 118
        ConvAlgorithmsCacheMap cache;
        conv_auto_tune_map_[key] = cache;
H
hong 已提交
119 120 121 122 123 124 125
      }
    } else {
      int64_t key = static_cast<int64_t>(algo_type);
      if (auto_tune_map_.find(key) == auto_tune_map_.end()) {
        AlgorithmsCacheMap cache;
        auto_tune_map_[key] = cache;
      }
126 127 128
    }
  }

129
  AlgorithmsTypeMap auto_tune_map_;
130
  ConvAlgorithmsTypeMap conv_auto_tune_map_;
131
  std::shared_ptr<std::mutex> autotune_cache_mutex_;
132 133 134
  int64_t total_cache_hits_{0};
  int64_t total_cache_misses_{0};
  int64_t total_size_{0};
135 136
};

137 138
}  // namespace autotune
}  // namespace phi