metaspace.hpp 14.2 KB
Newer Older
1
/*
2
 * Copyright (c) 2011, 2013, Oracle and/or its affiliates. All rights reserved.
3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59
 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
 *
 * This code is free software; you can redistribute it and/or modify it
 * under the terms of the GNU General Public License version 2 only, as
 * published by the Free Software Foundation.
 *
 * This code is distributed in the hope that it will be useful, but WITHOUT
 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 * version 2 for more details (a copy is included in the LICENSE file that
 * accompanied this code).
 *
 * You should have received a copy of the GNU General Public License version
 * 2 along with this work; if not, write to the Free Software Foundation,
 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 *
 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 * or visit www.oracle.com if you need additional information or have any
 * questions.
 *
 */
#ifndef SHARE_VM_MEMORY_METASPACE_HPP
#define SHARE_VM_MEMORY_METASPACE_HPP

#include "memory/allocation.hpp"
#include "memory/memRegion.hpp"
#include "runtime/virtualspace.hpp"
#include "utilities/exceptions.hpp"

// Metaspace
//
// Metaspaces are Arenas for the VM's metadata.
// They are allocated one per class loader object, and one for the null
// bootstrap class loader
// Eventually for bootstrap loader we'll have a read-only section and read-write
// to write for DumpSharedSpaces and read for UseSharedSpaces
//
//    block X ---+       +-------------------+
//               |       |  Virtualspace     |
//               |       |                   |
//               |       |                   |
//               |       |-------------------|
//               |       || Chunk            |
//               |       ||                  |
//               |       ||----------        |
//               +------>||| block 0 |       |
//                       ||----------        |
//                       ||| block 1 |       |
//                       ||----------        |
//                       ||                  |
//                       |-------------------|
//                       |                   |
//                       |                   |
//                       +-------------------+
//

class ClassLoaderData;
60
class Metablock;
61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88
class MetaWord;
class Mutex;
class outputStream;
class SpaceManager;

// Metaspaces each have a  SpaceManager and allocations
// are done by the SpaceManager.  Allocations are done
// out of the current Metachunk.  When the current Metachunk
// is exhausted, the SpaceManager gets a new one from
// the current VirtualSpace.  When the VirtualSpace is exhausted
// the SpaceManager gets a new one.  The SpaceManager
// also manages freelists of available Chunks.
//
// Currently the space manager maintains the list of
// virtual spaces and the list of chunks in use.  Its
// allocate() method returns a block for use as a
// quantum of metadata.

class VirtualSpaceList;

class Metaspace : public CHeapObj<mtClass> {
  friend class VMStructs;
  friend class SpaceManager;
  friend class VM_CollectForMetadataAllocation;
  friend class MetaspaceGC;
  friend class MetaspaceAux;

 public:
89 90 91 92
  enum MetadataType {ClassType = 0,
                     NonClassType = ClassType + 1,
                     MetadataTypeCount = ClassType + 2
  };
93 94 95 96 97 98 99 100
  enum MetaspaceType {
    StandardMetaspaceType,
    BootMetaspaceType,
    ROMetaspaceType,
    ReadWriteMetaspaceType,
    AnonymousMetaspaceType,
    ReflectionMetaspaceType
  };
101 102

 private:
103 104 105 106
  void initialize(Mutex* lock, MetaspaceType type);

  // Align up the word size to the allocation word size
  static size_t align_word_size_up(size_t);
107

108 109 110 111 112 113 114 115 116 117
  // Aligned size of the metaspace.
  static size_t _class_metaspace_size;

  static size_t class_metaspace_size() {
    return _class_metaspace_size;
  }
  static void set_class_metaspace_size(size_t metaspace_size) {
    _class_metaspace_size = metaspace_size;
  }

118
  static size_t _first_chunk_word_size;
119
  static size_t _first_class_chunk_word_size;
120 121 122 123 124 125 126

  SpaceManager* _vsm;
  SpaceManager* vsm() const { return _vsm; }

  SpaceManager* _class_vsm;
  SpaceManager* class_vsm() const { return _class_vsm; }

127 128 129 130
  // Allocate space for metadata of type mdtype. This is space
  // within a Metachunk and is used by
  //   allocate(ClassLoaderData*, size_t, bool, MetadataType, TRAPS)
  // which returns a Metablock.
131 132 133 134 135 136 137 138 139
  MetaWord* allocate(size_t word_size, MetadataType mdtype);

  // Virtual Space lists for both classes and other metadata
  static VirtualSpaceList* _space_list;
  static VirtualSpaceList* _class_space_list;

  static VirtualSpaceList* space_list()       { return _space_list; }
  static VirtualSpaceList* class_space_list() { return _class_space_list; }

140 141 142 143
  // This is used by DumpSharedSpaces only, where only _vsm is used. So we will
  // maintain a single list for now.
  void record_allocation(void* ptr, MetaspaceObj::Type type, size_t word_size);

144 145 146 147 148 149 150 151 152 153 154
#ifdef _LP64
  static void set_narrow_klass_base_and_shift(address metaspace_base, address cds_base);

  // Returns true if can use CDS with metaspace allocated as specified address.
  static bool can_use_cds_with_metaspace_addr(char* metaspace_base, address cds_base);

  static void allocate_metaspace_compressed_klass_ptrs(char* requested_addr, address cds_base);

  static void initialize_class_space(ReservedSpace rs);
#endif

155 156 157 158 159 160 161 162 163 164 165 166 167
  class AllocRecord : public CHeapObj<mtClass> {
  public:
    AllocRecord(address ptr, MetaspaceObj::Type type, int byte_size)
      : _next(NULL), _ptr(ptr), _type(type), _byte_size(byte_size) {}
    AllocRecord *_next;
    address _ptr;
    MetaspaceObj::Type _type;
    int _byte_size;
  };

  AllocRecord * _alloc_record_head;
  AllocRecord * _alloc_record_tail;

168 169
 public:

170
  Metaspace(Mutex* lock, MetaspaceType type);
171 172 173 174 175 176
  ~Metaspace();

  // Initialize globals for Metaspace
  static void global_initialize();

  static size_t first_chunk_word_size() { return _first_chunk_word_size; }
177
  static size_t first_class_chunk_word_size() { return _first_class_chunk_word_size; }
178 179

  char*  bottom() const;
180
  size_t used_words_slow(MetadataType mdtype) const;
181
  size_t free_words(MetadataType mdtype) const;
182
  size_t capacity_words_slow(MetadataType mdtype) const;
183 184
  size_t waste_words(MetadataType mdtype) const;

185 186 187
  size_t used_bytes_slow(MetadataType mdtype) const;
  size_t capacity_bytes_slow(MetadataType mdtype) const;

188 189
  static Metablock* allocate(ClassLoaderData* loader_data, size_t word_size,
                             bool read_only, MetaspaceObj::Type type, TRAPS);
190 191
  void deallocate(MetaWord* ptr, size_t byte_size, bool is_class);

192 193 194
  MetaWord* expand_and_allocate(size_t size,
                                MetadataType mdtype);

195
  static bool contains(const void *ptr);
196 197
  void dump(outputStream* const out) const;

198 199 200
  // Free empty virtualspaces
  static void purge();

201 202 203
  void print_on(outputStream* st) const;
  // Debugging support
  void verify();
204 205 206 207 208 209 210

  class AllocRecordClosure :  public StackObj {
  public:
    virtual void doit(address ptr, MetaspaceObj::Type type, int byte_size) = 0;
  };

  void iterate(AllocRecordClosure *closure);
211 212 213 214 215 216

  // Return TRUE only if UseCompressedKlassPointers is True and DumpSharedSpaces is False.
  static bool using_class_space() {
    return NOT_LP64(false) LP64_ONLY(UseCompressedKlassPointers && !DumpSharedSpaces);
  }

217 218 219
};

class MetaspaceAux : AllStatic {
S
sla 已提交
220 221
  static size_t free_chunks_total(Metaspace::MetadataType mdtype);
  static size_t free_chunks_total_in_bytes(Metaspace::MetadataType mdtype);
222

S
sla 已提交
223
 public:
224
  // Statistics for class space and data space in metaspace.
225 226 227 228

  // These methods iterate over the classloader data graph
  // for the given Metaspace type.  These are slow.
  static size_t used_bytes_slow(Metaspace::MetadataType mdtype);
229
  static size_t free_in_bytes(Metaspace::MetadataType mdtype);
230 231 232
  static size_t capacity_bytes_slow(Metaspace::MetadataType mdtype);

  // Iterates over the virtual space list.
233 234
  static size_t reserved_in_bytes(Metaspace::MetadataType mdtype);

235 236
  // Running sum of space in all Metachunks that has been
  // allocated to a Metaspace.  This is used instead of
237 238 239
  // iterating over all the classloaders. One for each
  // type of Metadata
  static size_t _allocated_capacity_words[Metaspace:: MetadataTypeCount];
240
  // Running sum of space in all Metachunks that have
241 242 243
  // are being used for metadata. One for each
  // type of Metadata.
  static size_t _allocated_used_words[Metaspace:: MetadataTypeCount];
244 245 246

 public:
  // Decrement and increment _allocated_capacity_words
247 248
  static void dec_capacity(Metaspace::MetadataType type, size_t words);
  static void inc_capacity(Metaspace::MetadataType type, size_t words);
249 250

  // Decrement and increment _allocated_used_words
251 252
  static void dec_used(Metaspace::MetadataType type, size_t words);
  static void inc_used(Metaspace::MetadataType type, size_t words);
253 254 255 256 257 258 259 260 261 262 263 264 265

  // Total of space allocated to metadata in all Metaspaces.
  // This sums the space used in each Metachunk by
  // iterating over the classloader data graph
  static size_t used_bytes_slow() {
    return used_bytes_slow(Metaspace::ClassType) +
           used_bytes_slow(Metaspace::NonClassType);
  }

  // Used by MetaspaceCounters
  static size_t free_chunks_total();
  static size_t free_chunks_total_in_bytes();

266 267 268
  static size_t allocated_capacity_words(Metaspace::MetadataType mdtype) {
    return _allocated_capacity_words[mdtype];
  }
269
  static size_t allocated_capacity_words() {
270 271 272
    return _allocated_capacity_words[Metaspace::NonClassType] +
           (Metaspace::using_class_space() ?
           _allocated_capacity_words[Metaspace::ClassType] : 0);
273 274 275
  }
  static size_t allocated_capacity_bytes(Metaspace::MetadataType mdtype) {
    return allocated_capacity_words(mdtype) * BytesPerWord;
276 277
  }
  static size_t allocated_capacity_bytes() {
278
    return allocated_capacity_words() * BytesPerWord;
279 280
  }

281 282 283
  static size_t allocated_used_words(Metaspace::MetadataType mdtype) {
    return _allocated_used_words[mdtype];
  }
284
  static size_t allocated_used_words() {
285 286 287
    return _allocated_used_words[Metaspace::NonClassType] +
           (Metaspace::using_class_space() ?
           _allocated_used_words[Metaspace::ClassType] : 0);
288 289 290
  }
  static size_t allocated_used_bytes(Metaspace::MetadataType mdtype) {
    return allocated_used_words(mdtype) * BytesPerWord;
291 292
  }
  static size_t allocated_used_bytes() {
293
    return allocated_used_words() * BytesPerWord;
294
  }
295

296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313
  static size_t free_bytes();

  // Total capacity in all Metaspaces
  static size_t capacity_bytes_slow() {
#ifdef PRODUCT
    // Use allocated_capacity_bytes() in PRODUCT instead of this function.
    guarantee(false, "Should not call capacity_bytes_slow() in the PRODUCT");
#endif
    size_t class_capacity = capacity_bytes_slow(Metaspace::ClassType);
    size_t non_class_capacity = capacity_bytes_slow(Metaspace::NonClassType);
    assert(allocated_capacity_bytes() == class_capacity + non_class_capacity,
           err_msg("bad accounting: allocated_capacity_bytes() " SIZE_FORMAT
             " class_capacity + non_class_capacity " SIZE_FORMAT
             " class_capacity " SIZE_FORMAT " non_class_capacity " SIZE_FORMAT,
             allocated_capacity_bytes(), class_capacity + non_class_capacity,
             class_capacity, non_class_capacity));

    return class_capacity + non_class_capacity;
314
  }
315 316

  // Total space reserved in all Metaspaces
317 318 319 320
  static size_t reserved_in_bytes() {
    return reserved_in_bytes(Metaspace::ClassType) +
           reserved_in_bytes(Metaspace::NonClassType);
  }
321 322 323 324 325 326 327 328

  static size_t min_chunk_size();

  // Print change in used metadata.
  static void print_metaspace_change(size_t prev_metadata_used);
  static void print_on(outputStream * out);
  static void print_on(outputStream * out, Metaspace::MetadataType mdtype);

329
  static void print_class_waste(outputStream* out);
330 331
  static void print_waste(outputStream* out);
  static void dump(outputStream* out);
332
  static void verify_free_chunks();
333 334 335 336 337
  // Checks that the values returned by allocated_capacity_bytes() and
  // capacity_bytes_slow() are the same.
  static void verify_capacity();
  static void verify_used();
  static void verify_metrics();
338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396
};

// Metaspace are deallocated when their class loader are GC'ed.
// This class implements a policy for inducing GC's to recover
// Metaspaces.

class MetaspaceGC : AllStatic {

  // The current high-water-mark for inducing a GC.  When
  // the capacity of all space in the virtual lists reaches this value,
  // a GC is induced and the value is increased.  This should be changed
  // to the space actually used for allocations to avoid affects of
  // fragmentation losses to partially used chunks.  Size is in words.
  static size_t _capacity_until_GC;

  // After a GC is done any allocation that fails should try to expand
  // the capacity of the Metaspaces.  This flag is set during attempts
  // to allocate in the VMGCOperation that does the GC.
  static bool _expand_after_GC;

  // For a CMS collection, signal that a concurrent collection should
  // be started.
  static bool _should_concurrent_collect;

  static uint _shrink_factor;

  static void set_capacity_until_GC(size_t v) { _capacity_until_GC = v; }

  static size_t shrink_factor() { return _shrink_factor; }
  void set_shrink_factor(uint v) { _shrink_factor = v; }

 public:

  static size_t capacity_until_GC() { return _capacity_until_GC; }
  static void inc_capacity_until_GC(size_t v) { _capacity_until_GC += v; }
  static void dec_capacity_until_GC(size_t v) {
    _capacity_until_GC = _capacity_until_GC > v ? _capacity_until_GC - v : 0;
  }
  static bool expand_after_GC()           { return _expand_after_GC; }
  static void set_expand_after_GC(bool v) { _expand_after_GC = v; }

  static bool should_concurrent_collect() { return _should_concurrent_collect; }
  static void set_should_concurrent_collect(bool v) {
    _should_concurrent_collect = v;
  }

  // The amount to increase the high-water-mark (_capacity_until_GC)
  static size_t delta_capacity_until_GC(size_t word_size);

  // It is expected that this will be called when the current capacity
  // has been used and a GC should be considered.
  static bool should_expand(VirtualSpaceList* vsl, size_t word_size);

  // Calculate the new high-water mark at which to induce
  // a GC.
  static void compute_new_size();
};

#endif // SHARE_VM_MEMORY_METASPACE_HPP