提交 72f55a45 编写于 作者: I iveresov

6783381: NUMA allocator: don't pretouch eden space with UseNUMA

Summary: Moved pretouching to MutableSpace. Also MutableSpace now turns on page interleaving for the region it covers.
Reviewed-by: jmasa, jcoomes
上级 7a40ef32
...@@ -116,7 +116,7 @@ void PSOldGen::initialize_work(const char* perf_data_name, int level) { ...@@ -116,7 +116,7 @@ void PSOldGen::initialize_work(const char* perf_data_name, int level) {
// ObjectSpace stuff // ObjectSpace stuff
// //
_object_space = new MutableSpace(); _object_space = new MutableSpace(virtual_space()->alignment());
if (_object_space == NULL) if (_object_space == NULL)
vm_exit_during_initialization("Could not allocate an old gen space"); vm_exit_during_initialization("Could not allocate an old gen space");
...@@ -385,10 +385,10 @@ void PSOldGen::post_resize() { ...@@ -385,10 +385,10 @@ void PSOldGen::post_resize() {
start_array()->set_covered_region(new_memregion); start_array()->set_covered_region(new_memregion);
Universe::heap()->barrier_set()->resize_covered_region(new_memregion); Universe::heap()->barrier_set()->resize_covered_region(new_memregion);
HeapWord* const virtual_space_high = (HeapWord*) virtual_space()->high();
// ALWAYS do this last!! // ALWAYS do this last!!
object_space()->set_end(virtual_space_high); object_space()->initialize(new_memregion,
SpaceDecorator::DontClear,
SpaceDecorator::DontMangle);
assert(new_word_size == heap_word_size(object_space()->capacity_in_bytes()), assert(new_word_size == heap_word_size(object_space()->capacity_in_bytes()),
"Sanity"); "Sanity");
......
...@@ -78,7 +78,7 @@ void PSVirtualSpace::release() { ...@@ -78,7 +78,7 @@ void PSVirtualSpace::release() {
_special = false; _special = false;
} }
bool PSVirtualSpace::expand_by(size_t bytes, bool pre_touch) { bool PSVirtualSpace::expand_by(size_t bytes) {
assert(is_aligned(bytes), "arg not aligned"); assert(is_aligned(bytes), "arg not aligned");
DEBUG_ONLY(PSVirtualSpaceVerifier this_verifier(this)); DEBUG_ONLY(PSVirtualSpaceVerifier this_verifier(this));
...@@ -92,15 +92,6 @@ bool PSVirtualSpace::expand_by(size_t bytes, bool pre_touch) { ...@@ -92,15 +92,6 @@ bool PSVirtualSpace::expand_by(size_t bytes, bool pre_touch) {
_committed_high_addr += bytes; _committed_high_addr += bytes;
} }
if (pre_touch || AlwaysPreTouch) {
for (char* curr = base_addr;
curr < _committed_high_addr;
curr += os::vm_page_size()) {
char tmp = *curr;
*curr = 0;
}
}
return result; return result;
} }
...@@ -255,7 +246,7 @@ PSVirtualSpaceHighToLow::PSVirtualSpaceHighToLow(ReservedSpace rs) { ...@@ -255,7 +246,7 @@ PSVirtualSpaceHighToLow::PSVirtualSpaceHighToLow(ReservedSpace rs) {
DEBUG_ONLY(verify()); DEBUG_ONLY(verify());
} }
bool PSVirtualSpaceHighToLow::expand_by(size_t bytes, bool pre_touch) { bool PSVirtualSpaceHighToLow::expand_by(size_t bytes) {
assert(is_aligned(bytes), "arg not aligned"); assert(is_aligned(bytes), "arg not aligned");
DEBUG_ONLY(PSVirtualSpaceVerifier this_verifier(this)); DEBUG_ONLY(PSVirtualSpaceVerifier this_verifier(this));
...@@ -269,15 +260,6 @@ bool PSVirtualSpaceHighToLow::expand_by(size_t bytes, bool pre_touch) { ...@@ -269,15 +260,6 @@ bool PSVirtualSpaceHighToLow::expand_by(size_t bytes, bool pre_touch) {
_committed_low_addr -= bytes; _committed_low_addr -= bytes;
} }
if (pre_touch || AlwaysPreTouch) {
for (char* curr = base_addr;
curr < _committed_high_addr;
curr += os::vm_page_size()) {
char tmp = *curr;
*curr = 0;
}
}
return result; return result;
} }
......
...@@ -80,7 +80,7 @@ class PSVirtualSpace : public CHeapObj { ...@@ -80,7 +80,7 @@ class PSVirtualSpace : public CHeapObj {
inline void set_reserved(char* low_addr, char* high_addr, bool special); inline void set_reserved(char* low_addr, char* high_addr, bool special);
inline void set_reserved(ReservedSpace rs); inline void set_reserved(ReservedSpace rs);
inline void set_committed(char* low_addr, char* high_addr); inline void set_committed(char* low_addr, char* high_addr);
virtual bool expand_by(size_t bytes, bool pre_touch = false); virtual bool expand_by(size_t bytes);
virtual bool shrink_by(size_t bytes); virtual bool shrink_by(size_t bytes);
virtual size_t expand_into(PSVirtualSpace* space, size_t bytes); virtual size_t expand_into(PSVirtualSpace* space, size_t bytes);
void release(); void release();
...@@ -127,7 +127,7 @@ class PSVirtualSpaceHighToLow : public PSVirtualSpace { ...@@ -127,7 +127,7 @@ class PSVirtualSpaceHighToLow : public PSVirtualSpace {
PSVirtualSpaceHighToLow(ReservedSpace rs, size_t alignment); PSVirtualSpaceHighToLow(ReservedSpace rs, size_t alignment);
PSVirtualSpaceHighToLow(ReservedSpace rs); PSVirtualSpaceHighToLow(ReservedSpace rs);
virtual bool expand_by(size_t bytes, bool pre_touch = false); virtual bool expand_by(size_t bytes);
virtual bool shrink_by(size_t bytes); virtual bool shrink_by(size_t bytes);
virtual size_t expand_into(PSVirtualSpace* space, size_t bytes); virtual size_t expand_into(PSVirtualSpace* space, size_t bytes);
......
...@@ -64,12 +64,12 @@ void PSYoungGen::initialize_work() { ...@@ -64,12 +64,12 @@ void PSYoungGen::initialize_work() {
} }
if (UseNUMA) { if (UseNUMA) {
_eden_space = new MutableNUMASpace(); _eden_space = new MutableNUMASpace(virtual_space()->alignment());
} else { } else {
_eden_space = new MutableSpace(); _eden_space = new MutableSpace(virtual_space()->alignment());
} }
_from_space = new MutableSpace(); _from_space = new MutableSpace(virtual_space()->alignment());
_to_space = new MutableSpace(); _to_space = new MutableSpace(virtual_space()->alignment());
if (_eden_space == NULL || _from_space == NULL || _to_space == NULL) { if (_eden_space == NULL || _from_space == NULL || _to_space == NULL) {
vm_exit_during_initialization("Could not allocate a young gen space"); vm_exit_during_initialization("Could not allocate a young gen space");
......
...@@ -27,7 +27,7 @@ ...@@ -27,7 +27,7 @@
# include "incls/_mutableNUMASpace.cpp.incl" # include "incls/_mutableNUMASpace.cpp.incl"
MutableNUMASpace::MutableNUMASpace() { MutableNUMASpace::MutableNUMASpace(size_t alignment) : MutableSpace(alignment) {
_lgrp_spaces = new (ResourceObj::C_HEAP) GrowableArray<LGRPSpace*>(0, true); _lgrp_spaces = new (ResourceObj::C_HEAP) GrowableArray<LGRPSpace*>(0, true);
_page_size = os::vm_page_size(); _page_size = os::vm_page_size();
_adaptation_cycles = 0; _adaptation_cycles = 0;
...@@ -221,7 +221,7 @@ bool MutableNUMASpace::update_layout(bool force) { ...@@ -221,7 +221,7 @@ bool MutableNUMASpace::update_layout(bool force) {
} }
} }
if (!found) { if (!found) {
lgrp_spaces()->append(new LGRPSpace(lgrp_ids[i])); lgrp_spaces()->append(new LGRPSpace(lgrp_ids[i], alignment()));
} }
} }
...@@ -443,10 +443,10 @@ void MutableNUMASpace::select_tails(MemRegion new_region, MemRegion intersection ...@@ -443,10 +443,10 @@ void MutableNUMASpace::select_tails(MemRegion new_region, MemRegion intersection
// Is there bottom? // Is there bottom?
if (new_region.start() < intersection.start()) { // Yes if (new_region.start() < intersection.start()) { // Yes
// Try to coalesce small pages into a large one. // Try to coalesce small pages into a large one.
if (UseLargePages && page_size() >= os::large_page_size()) { if (UseLargePages && page_size() >= alignment()) {
HeapWord* p = (HeapWord*)round_to((intptr_t) intersection.start(), os::large_page_size()); HeapWord* p = (HeapWord*)round_to((intptr_t) intersection.start(), alignment());
if (new_region.contains(p) if (new_region.contains(p)
&& pointer_delta(p, new_region.start(), sizeof(char)) >= os::large_page_size()) { && pointer_delta(p, new_region.start(), sizeof(char)) >= alignment()) {
if (intersection.contains(p)) { if (intersection.contains(p)) {
intersection = MemRegion(p, intersection.end()); intersection = MemRegion(p, intersection.end());
} else { } else {
...@@ -462,10 +462,10 @@ void MutableNUMASpace::select_tails(MemRegion new_region, MemRegion intersection ...@@ -462,10 +462,10 @@ void MutableNUMASpace::select_tails(MemRegion new_region, MemRegion intersection
// Is there top? // Is there top?
if (intersection.end() < new_region.end()) { // Yes if (intersection.end() < new_region.end()) { // Yes
// Try to coalesce small pages into a large one. // Try to coalesce small pages into a large one.
if (UseLargePages && page_size() >= os::large_page_size()) { if (UseLargePages && page_size() >= alignment()) {
HeapWord* p = (HeapWord*)round_down((intptr_t) intersection.end(), os::large_page_size()); HeapWord* p = (HeapWord*)round_down((intptr_t) intersection.end(), alignment());
if (new_region.contains(p) if (new_region.contains(p)
&& pointer_delta(new_region.end(), p, sizeof(char)) >= os::large_page_size()) { && pointer_delta(new_region.end(), p, sizeof(char)) >= alignment()) {
if (intersection.contains(p)) { if (intersection.contains(p)) {
intersection = MemRegion(intersection.start(), p); intersection = MemRegion(intersection.start(), p);
} else { } else {
...@@ -504,12 +504,12 @@ void MutableNUMASpace::merge_regions(MemRegion new_region, MemRegion* intersecti ...@@ -504,12 +504,12 @@ void MutableNUMASpace::merge_regions(MemRegion new_region, MemRegion* intersecti
// That's the only case we have to make an additional bias_region() call. // That's the only case we have to make an additional bias_region() call.
HeapWord* start = invalid_region->start(); HeapWord* start = invalid_region->start();
HeapWord* end = invalid_region->end(); HeapWord* end = invalid_region->end();
if (UseLargePages && page_size() >= os::large_page_size()) { if (UseLargePages && page_size() >= alignment()) {
HeapWord *p = (HeapWord*)round_down((intptr_t) start, os::large_page_size()); HeapWord *p = (HeapWord*)round_down((intptr_t) start, alignment());
if (new_region.contains(p)) { if (new_region.contains(p)) {
start = p; start = p;
} }
p = (HeapWord*)round_to((intptr_t) end, os::large_page_size()); p = (HeapWord*)round_to((intptr_t) end, alignment());
if (new_region.contains(end)) { if (new_region.contains(end)) {
end = p; end = p;
} }
...@@ -526,7 +526,8 @@ void MutableNUMASpace::merge_regions(MemRegion new_region, MemRegion* intersecti ...@@ -526,7 +526,8 @@ void MutableNUMASpace::merge_regions(MemRegion new_region, MemRegion* intersecti
void MutableNUMASpace::initialize(MemRegion mr, void MutableNUMASpace::initialize(MemRegion mr,
bool clear_space, bool clear_space,
bool mangle_space) { bool mangle_space,
bool setup_pages) {
assert(clear_space, "Reallocation will destory data!"); assert(clear_space, "Reallocation will destory data!");
assert(lgrp_spaces()->length() > 0, "There should be at least one space"); assert(lgrp_spaces()->length() > 0, "There should be at least one space");
...@@ -538,7 +539,7 @@ void MutableNUMASpace::initialize(MemRegion mr, ...@@ -538,7 +539,7 @@ void MutableNUMASpace::initialize(MemRegion mr,
// Compute chunk sizes // Compute chunk sizes
size_t prev_page_size = page_size(); size_t prev_page_size = page_size();
set_page_size(UseLargePages ? os::large_page_size() : os::vm_page_size()); set_page_size(UseLargePages ? alignment() : os::vm_page_size());
HeapWord* rounded_bottom = (HeapWord*)round_to((intptr_t) bottom(), page_size()); HeapWord* rounded_bottom = (HeapWord*)round_to((intptr_t) bottom(), page_size());
HeapWord* rounded_end = (HeapWord*)round_down((intptr_t) end(), page_size()); HeapWord* rounded_end = (HeapWord*)round_down((intptr_t) end(), page_size());
size_t base_space_size_pages = pointer_delta(rounded_end, rounded_bottom, sizeof(char)) / page_size(); size_t base_space_size_pages = pointer_delta(rounded_end, rounded_bottom, sizeof(char)) / page_size();
...@@ -666,7 +667,7 @@ void MutableNUMASpace::initialize(MemRegion mr, ...@@ -666,7 +667,7 @@ void MutableNUMASpace::initialize(MemRegion mr,
} }
// Clear space (set top = bottom) but never mangle. // Clear space (set top = bottom) but never mangle.
s->initialize(new_region, SpaceDecorator::Clear, SpaceDecorator::DontMangle); s->initialize(new_region, SpaceDecorator::Clear, SpaceDecorator::DontMangle, MutableSpace::DontSetupPages);
set_adaptation_cycles(samples_count()); set_adaptation_cycles(samples_count());
} }
......
...@@ -82,8 +82,8 @@ class MutableNUMASpace : public MutableSpace { ...@@ -82,8 +82,8 @@ class MutableNUMASpace : public MutableSpace {
char* last_page_scanned() { return _last_page_scanned; } char* last_page_scanned() { return _last_page_scanned; }
void set_last_page_scanned(char* p) { _last_page_scanned = p; } void set_last_page_scanned(char* p) { _last_page_scanned = p; }
public: public:
LGRPSpace(int l) : _lgrp_id(l), _last_page_scanned(NULL), _allocation_failed(false) { LGRPSpace(int l, size_t alignment) : _lgrp_id(l), _last_page_scanned(NULL), _allocation_failed(false) {
_space = new MutableSpace(); _space = new MutableSpace(alignment);
_alloc_rate = new AdaptiveWeightedAverage(NUMAChunkResizeWeight); _alloc_rate = new AdaptiveWeightedAverage(NUMAChunkResizeWeight);
} }
~LGRPSpace() { ~LGRPSpace() {
...@@ -183,10 +183,10 @@ class MutableNUMASpace : public MutableSpace { ...@@ -183,10 +183,10 @@ class MutableNUMASpace : public MutableSpace {
public: public:
GrowableArray<LGRPSpace*>* lgrp_spaces() const { return _lgrp_spaces; } GrowableArray<LGRPSpace*>* lgrp_spaces() const { return _lgrp_spaces; }
MutableNUMASpace(); MutableNUMASpace(size_t alignment);
virtual ~MutableNUMASpace(); virtual ~MutableNUMASpace();
// Space initialization. // Space initialization.
virtual void initialize(MemRegion mr, bool clear_space, bool mangle_space); virtual void initialize(MemRegion mr, bool clear_space, bool mangle_space, bool setup_pages = SetupPages);
// Update space layout if necessary. Do all adaptive resizing job. // Update space layout if necessary. Do all adaptive resizing job.
virtual void update(); virtual void update();
// Update allocation rate averages. // Update allocation rate averages.
......
...@@ -25,7 +25,10 @@ ...@@ -25,7 +25,10 @@
# include "incls/_precompiled.incl" # include "incls/_precompiled.incl"
# include "incls/_mutableSpace.cpp.incl" # include "incls/_mutableSpace.cpp.incl"
MutableSpace::MutableSpace(): ImmutableSpace(), _top(NULL) { MutableSpace::MutableSpace(size_t alignment): ImmutableSpace(), _top(NULL), _alignment(alignment) {
assert(MutableSpace::alignment() >= 0 &&
MutableSpace::alignment() % os::vm_page_size() == 0,
"Space should be aligned");
_mangler = new MutableSpaceMangler(this); _mangler = new MutableSpaceMangler(this);
} }
...@@ -33,16 +36,88 @@ MutableSpace::~MutableSpace() { ...@@ -33,16 +36,88 @@ MutableSpace::~MutableSpace() {
delete _mangler; delete _mangler;
} }
void MutableSpace::numa_setup_pages(MemRegion mr, bool clear_space) {
if (!mr.is_empty()) {
size_t page_size = UseLargePages ? alignment() : os::vm_page_size();
HeapWord *start = (HeapWord*)round_to((intptr_t) mr.start(), page_size);
HeapWord *end = (HeapWord*)round_down((intptr_t) mr.end(), page_size);
if (end > start) {
size_t size = pointer_delta(end, start, sizeof(char));
if (clear_space) {
// Prefer page reallocation to migration.
os::free_memory((char*)start, size);
}
os::numa_make_global((char*)start, size);
}
}
}
void MutableSpace::pretouch_pages(MemRegion mr) {
for (volatile char *p = (char*)mr.start(); p < (char*)mr.end(); p += os::vm_page_size()) {
char t = *p; *p = t;
}
}
void MutableSpace::initialize(MemRegion mr, void MutableSpace::initialize(MemRegion mr,
bool clear_space, bool clear_space,
bool mangle_space) { bool mangle_space,
HeapWord* bottom = mr.start(); bool setup_pages) {
HeapWord* end = mr.end();
assert(Universe::on_page_boundary(bottom) && Universe::on_page_boundary(end), assert(Universe::on_page_boundary(mr.start()) && Universe::on_page_boundary(mr.end()),
"invalid space boundaries"); "invalid space boundaries");
set_bottom(bottom);
set_end(end); if (setup_pages && (UseNUMA || AlwaysPreTouch)) {
// The space may move left and right or expand/shrink.
// We'd like to enforce the desired page placement.
MemRegion head, tail;
if (last_setup_region().is_empty()) {
// If it's the first initialization don't limit the amount of work.
head = mr;
tail = MemRegion(mr.end(), mr.end());
} else {
// Is there an intersection with the address space?
MemRegion intersection = last_setup_region().intersection(mr);
if (intersection.is_empty()) {
intersection = MemRegion(mr.end(), mr.end());
}
// All the sizes below are in words.
size_t head_size = 0, tail_size = 0;
if (mr.start() <= intersection.start()) {
head_size = pointer_delta(intersection.start(), mr.start());
}
if(intersection.end() <= mr.end()) {
tail_size = pointer_delta(mr.end(), intersection.end());
}
// Limit the amount of page manipulation if necessary.
if (NUMASpaceResizeRate > 0 && !AlwaysPreTouch) {
const size_t change_size = head_size + tail_size;
const float setup_rate_words = NUMASpaceResizeRate >> LogBytesPerWord;
head_size = MIN2((size_t)(setup_rate_words * head_size / change_size),
head_size);
tail_size = MIN2((size_t)(setup_rate_words * tail_size / change_size),
tail_size);
}
head = MemRegion(intersection.start() - head_size, intersection.start());
tail = MemRegion(intersection.end(), intersection.end() + tail_size);
}
assert(mr.contains(head) && mr.contains(tail), "Sanity");
if (UseNUMA) {
numa_setup_pages(head, clear_space);
numa_setup_pages(tail, clear_space);
}
if (AlwaysPreTouch) {
pretouch_pages(head);
pretouch_pages(tail);
}
// Remember where we stopped so that we can continue later.
set_last_setup_region(MemRegion(head.start(), tail.end()));
}
set_bottom(mr.start());
set_end(mr.end());
if (clear_space) { if (clear_space) {
clear(mangle_space); clear(mangle_space);
......
...@@ -25,7 +25,10 @@ ...@@ -25,7 +25,10 @@
// A MutableSpace is a subtype of ImmutableSpace that supports the // A MutableSpace is a subtype of ImmutableSpace that supports the
// concept of allocation. This includes the concepts that a space may // concept of allocation. This includes the concepts that a space may
// be only partially full, and the querry methods that go with such // be only partially full, and the querry methods that go with such
// an assumption. // an assumption. MutableSpace is also responsible for minimizing the
// page allocation time by having the memory pretouched (with
// AlwaysPretouch) and for optimizing page placement on NUMA systems
// by make the underlying region interleaved (with UseNUMA).
// //
// Invariant: (ImmutableSpace +) bottom() <= top() <= end() // Invariant: (ImmutableSpace +) bottom() <= top() <= end()
// top() is inclusive and end() is exclusive. // top() is inclusive and end() is exclusive.
...@@ -37,15 +40,23 @@ class MutableSpace: public ImmutableSpace { ...@@ -37,15 +40,23 @@ class MutableSpace: public ImmutableSpace {
// Helper for mangling unused space in debug builds // Helper for mangling unused space in debug builds
MutableSpaceMangler* _mangler; MutableSpaceMangler* _mangler;
// The last region which page had been setup to be interleaved.
MemRegion _last_setup_region;
size_t _alignment;
protected: protected:
HeapWord* _top; HeapWord* _top;
MutableSpaceMangler* mangler() { return _mangler; } MutableSpaceMangler* mangler() { return _mangler; }
void numa_setup_pages(MemRegion mr, bool clear_space);
void pretouch_pages(MemRegion mr);
void set_last_setup_region(MemRegion mr) { _last_setup_region = mr; }
MemRegion last_setup_region() const { return _last_setup_region; }
public: public:
virtual ~MutableSpace(); virtual ~MutableSpace();
MutableSpace(); MutableSpace(size_t page_size);
// Accessors // Accessors
HeapWord* top() const { return _top; } HeapWord* top() const { return _top; }
...@@ -57,13 +68,20 @@ class MutableSpace: public ImmutableSpace { ...@@ -57,13 +68,20 @@ class MutableSpace: public ImmutableSpace {
virtual void set_bottom(HeapWord* value) { _bottom = value; } virtual void set_bottom(HeapWord* value) { _bottom = value; }
virtual void set_end(HeapWord* value) { _end = value; } virtual void set_end(HeapWord* value) { _end = value; }
size_t alignment() { return _alignment; }
// Returns a subregion containing all objects in this space. // Returns a subregion containing all objects in this space.
MemRegion used_region() { return MemRegion(bottom(), top()); } MemRegion used_region() { return MemRegion(bottom(), top()); }
static const bool SetupPages = true;
static const bool DontSetupPages = false;
// Initialization // Initialization
virtual void initialize(MemRegion mr, virtual void initialize(MemRegion mr,
bool clear_space, bool clear_space,
bool mangle_space); bool mangle_space,
bool setup_pages = SetupPages);
virtual void clear(bool mangle_space); virtual void clear(bool mangle_space);
// Does the usual initialization but optionally resets top to bottom. // Does the usual initialization but optionally resets top to bottom.
#if 0 // MANGLE_SPACE #if 0 // MANGLE_SPACE
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册