提交 b0e4358a 编写于 作者: N nloodin

8000617: It should be possible to allocate memory without the VM dying.

Reviewed-by: coleenp, kamg
上级 8216ef94
...@@ -92,6 +92,26 @@ void* ResourceObj::operator new(size_t size, allocation_type type, MEMFLAGS flag ...@@ -92,6 +92,26 @@ void* ResourceObj::operator new(size_t size, allocation_type type, MEMFLAGS flag
return res; return res;
} }
void* ResourceObj::operator new(size_t size, const std::nothrow_t& nothrow_constant,
allocation_type type, MEMFLAGS flags) {
//should only call this with std::nothrow, use other operator new() otherwise
address res;
switch (type) {
case C_HEAP:
res = (address)AllocateHeap(size, flags, CALLER_PC, AllocFailStrategy::RETURN_NULL);
DEBUG_ONLY(if (res!= NULL) set_allocation_type(res, C_HEAP);)
break;
case RESOURCE_AREA:
// new(size) sets allocation type RESOURCE_AREA.
res = (address)operator new(size, std::nothrow);
break;
default:
ShouldNotReachHere();
}
return res;
}
void ResourceObj::operator delete(void* p) { void ResourceObj::operator delete(void* p) {
assert(((ResourceObj *)p)->allocated_on_C_heap(), assert(((ResourceObj *)p)->allocated_on_C_heap(),
"delete only allowed for C_HEAP objects"); "delete only allowed for C_HEAP objects");
...@@ -506,7 +526,7 @@ void Arena::signal_out_of_memory(size_t sz, const char* whence) const { ...@@ -506,7 +526,7 @@ void Arena::signal_out_of_memory(size_t sz, const char* whence) const {
} }
// Grow a new Chunk // Grow a new Chunk
void* Arena::grow( size_t x ) { void* Arena::grow(size_t x, AllocFailType alloc_failmode) {
// Get minimal required size. Either real big, or even bigger for giant objs // Get minimal required size. Either real big, or even bigger for giant objs
size_t len = MAX2(x, (size_t) Chunk::size); size_t len = MAX2(x, (size_t) Chunk::size);
...@@ -514,7 +534,10 @@ void* Arena::grow( size_t x ) { ...@@ -514,7 +534,10 @@ void* Arena::grow( size_t x ) {
_chunk = new (len) Chunk(len); _chunk = new (len) Chunk(len);
if (_chunk == NULL) { if (_chunk == NULL) {
signal_out_of_memory(len * Chunk::aligned_overhead_size(), "Arena::grow"); if (alloc_failmode == AllocFailStrategy::EXIT_OOM) {
signal_out_of_memory(len * Chunk::aligned_overhead_size(), "Arena::grow");
}
return NULL;
} }
if (k) k->set_next(_chunk); // Append new chunk to end of linked list if (k) k->set_next(_chunk); // Append new chunk to end of linked list
else _first = _chunk; else _first = _chunk;
...@@ -529,13 +552,16 @@ void* Arena::grow( size_t x ) { ...@@ -529,13 +552,16 @@ void* Arena::grow( size_t x ) {
// Reallocate storage in Arena. // Reallocate storage in Arena.
void *Arena::Arealloc(void* old_ptr, size_t old_size, size_t new_size) { void *Arena::Arealloc(void* old_ptr, size_t old_size, size_t new_size, AllocFailType alloc_failmode) {
assert(new_size >= 0, "bad size"); assert(new_size >= 0, "bad size");
if (new_size == 0) return NULL; if (new_size == 0) return NULL;
#ifdef ASSERT #ifdef ASSERT
if (UseMallocOnly) { if (UseMallocOnly) {
// always allocate a new object (otherwise we'll free this one twice) // always allocate a new object (otherwise we'll free this one twice)
char* copy = (char*)Amalloc(new_size); char* copy = (char*)Amalloc(new_size, alloc_failmode);
if (copy == NULL) {
return NULL;
}
size_t n = MIN2(old_size, new_size); size_t n = MIN2(old_size, new_size);
if (n > 0) memcpy(copy, old_ptr, n); if (n > 0) memcpy(copy, old_ptr, n);
Afree(old_ptr,old_size); // Mostly done to keep stats accurate Afree(old_ptr,old_size); // Mostly done to keep stats accurate
...@@ -561,7 +587,10 @@ void *Arena::Arealloc(void* old_ptr, size_t old_size, size_t new_size) { ...@@ -561,7 +587,10 @@ void *Arena::Arealloc(void* old_ptr, size_t old_size, size_t new_size) {
} }
// Oops, got to relocate guts // Oops, got to relocate guts
void *new_ptr = Amalloc(new_size); void *new_ptr = Amalloc(new_size, alloc_failmode);
if (new_ptr == NULL) {
return NULL;
}
memcpy( new_ptr, c_old, old_size ); memcpy( new_ptr, c_old, old_size );
Afree(c_old,old_size); // Mostly done to keep stats accurate Afree(c_old,old_size); // Mostly done to keep stats accurate
return new_ptr; return new_ptr;
......
...@@ -53,6 +53,12 @@ ...@@ -53,6 +53,12 @@
#endif #endif
#endif #endif
class AllocFailStrategy {
public:
enum AllocFailEnum { EXIT_OOM, RETURN_NULL };
};
typedef AllocFailStrategy::AllocFailEnum AllocFailType;
// All classes in the virtual machine must be subclassed // All classes in the virtual machine must be subclassed
// by one of the following allocation classes: // by one of the following allocation classes:
// //
...@@ -315,7 +321,8 @@ protected: ...@@ -315,7 +321,8 @@ protected:
Chunk *_first; // First chunk Chunk *_first; // First chunk
Chunk *_chunk; // current chunk Chunk *_chunk; // current chunk
char *_hwm, *_max; // High water mark and max in current chunk char *_hwm, *_max; // High water mark and max in current chunk
void* grow(size_t x); // Get a new Chunk of at least size x // Get a new Chunk of at least size x
void* grow(size_t x, AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);
size_t _size_in_bytes; // Size of arena (used for native memory tracking) size_t _size_in_bytes; // Size of arena (used for native memory tracking)
NOT_PRODUCT(static julong _bytes_allocated;) // total #bytes allocated since start NOT_PRODUCT(static julong _bytes_allocated;) // total #bytes allocated since start
...@@ -350,14 +357,14 @@ protected: ...@@ -350,14 +357,14 @@ protected:
void operator delete(void* p); void operator delete(void* p);
// Fast allocate in the arena. Common case is: pointer test + increment. // Fast allocate in the arena. Common case is: pointer test + increment.
void* Amalloc(size_t x) { void* Amalloc(size_t x, AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) {
assert(is_power_of_2(ARENA_AMALLOC_ALIGNMENT) , "should be a power of 2"); assert(is_power_of_2(ARENA_AMALLOC_ALIGNMENT) , "should be a power of 2");
x = ARENA_ALIGN(x); x = ARENA_ALIGN(x);
debug_only(if (UseMallocOnly) return malloc(x);) debug_only(if (UseMallocOnly) return malloc(x);)
check_for_overflow(x, "Arena::Amalloc"); check_for_overflow(x, "Arena::Amalloc");
NOT_PRODUCT(inc_bytes_allocated(x);) NOT_PRODUCT(inc_bytes_allocated(x);)
if (_hwm + x > _max) { if (_hwm + x > _max) {
return grow(x); return grow(x, alloc_failmode);
} else { } else {
char *old = _hwm; char *old = _hwm;
_hwm += x; _hwm += x;
...@@ -365,13 +372,13 @@ protected: ...@@ -365,13 +372,13 @@ protected:
} }
} }
// Further assume size is padded out to words // Further assume size is padded out to words
void *Amalloc_4(size_t x) { void *Amalloc_4(size_t x, AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) {
assert( (x&(sizeof(char*)-1)) == 0, "misaligned size" ); assert( (x&(sizeof(char*)-1)) == 0, "misaligned size" );
debug_only(if (UseMallocOnly) return malloc(x);) debug_only(if (UseMallocOnly) return malloc(x);)
check_for_overflow(x, "Arena::Amalloc_4"); check_for_overflow(x, "Arena::Amalloc_4");
NOT_PRODUCT(inc_bytes_allocated(x);) NOT_PRODUCT(inc_bytes_allocated(x);)
if (_hwm + x > _max) { if (_hwm + x > _max) {
return grow(x); return grow(x, alloc_failmode);
} else { } else {
char *old = _hwm; char *old = _hwm;
_hwm += x; _hwm += x;
...@@ -381,7 +388,7 @@ protected: ...@@ -381,7 +388,7 @@ protected:
// Allocate with 'double' alignment. It is 8 bytes on sparc. // Allocate with 'double' alignment. It is 8 bytes on sparc.
// In other cases Amalloc_D() should be the same as Amalloc_4(). // In other cases Amalloc_D() should be the same as Amalloc_4().
void* Amalloc_D(size_t x) { void* Amalloc_D(size_t x, AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) {
assert( (x&(sizeof(char*)-1)) == 0, "misaligned size" ); assert( (x&(sizeof(char*)-1)) == 0, "misaligned size" );
debug_only(if (UseMallocOnly) return malloc(x);) debug_only(if (UseMallocOnly) return malloc(x);)
#if defined(SPARC) && !defined(_LP64) #if defined(SPARC) && !defined(_LP64)
...@@ -392,7 +399,7 @@ protected: ...@@ -392,7 +399,7 @@ protected:
check_for_overflow(x, "Arena::Amalloc_D"); check_for_overflow(x, "Arena::Amalloc_D");
NOT_PRODUCT(inc_bytes_allocated(x);) NOT_PRODUCT(inc_bytes_allocated(x);)
if (_hwm + x > _max) { if (_hwm + x > _max) {
return grow(x); // grow() returns a result aligned >= 8 bytes. return grow(x, alloc_failmode); // grow() returns a result aligned >= 8 bytes.
} else { } else {
char *old = _hwm; char *old = _hwm;
_hwm += x; _hwm += x;
...@@ -412,7 +419,8 @@ protected: ...@@ -412,7 +419,8 @@ protected:
if (((char*)ptr) + size == _hwm) _hwm = (char*)ptr; if (((char*)ptr) + size == _hwm) _hwm = (char*)ptr;
} }
void *Arealloc( void *old_ptr, size_t old_size, size_t new_size ); void *Arealloc( void *old_ptr, size_t old_size, size_t new_size,
AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);
// Move contents of this arena into an empty arena // Move contents of this arena into an empty arena
Arena *move_contents(Arena *empty_arena); Arena *move_contents(Arena *empty_arena);
...@@ -458,9 +466,12 @@ private: ...@@ -458,9 +466,12 @@ private:
//%note allocation_1 //%note allocation_1
extern char* resource_allocate_bytes(size_t size); extern char* resource_allocate_bytes(size_t size,
extern char* resource_allocate_bytes(Thread* thread, size_t size); AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);
extern char* resource_reallocate_bytes( char *old, size_t old_size, size_t new_size); extern char* resource_allocate_bytes(Thread* thread, size_t size,
AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);
extern char* resource_reallocate_bytes( char *old, size_t old_size, size_t new_size,
AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);
extern void resource_free_bytes( char *old, size_t size ); extern void resource_free_bytes( char *old, size_t size );
//---------------------------------------------------------------------- //----------------------------------------------------------------------
...@@ -496,6 +507,8 @@ class ResourceObj ALLOCATION_SUPER_CLASS_SPEC { ...@@ -496,6 +507,8 @@ class ResourceObj ALLOCATION_SUPER_CLASS_SPEC {
public: public:
void* operator new(size_t size, allocation_type type, MEMFLAGS flags); void* operator new(size_t size, allocation_type type, MEMFLAGS flags);
void* operator new(size_t size, const std::nothrow_t& nothrow_constant,
allocation_type type, MEMFLAGS flags);
void* operator new(size_t size, Arena *arena) { void* operator new(size_t size, Arena *arena) {
address res = (address)arena->Amalloc(size); address res = (address)arena->Amalloc(size);
DEBUG_ONLY(set_allocation_type(res, ARENA);) DEBUG_ONLY(set_allocation_type(res, ARENA);)
...@@ -506,6 +519,13 @@ class ResourceObj ALLOCATION_SUPER_CLASS_SPEC { ...@@ -506,6 +519,13 @@ class ResourceObj ALLOCATION_SUPER_CLASS_SPEC {
DEBUG_ONLY(set_allocation_type(res, RESOURCE_AREA);) DEBUG_ONLY(set_allocation_type(res, RESOURCE_AREA);)
return res; return res;
} }
void* operator new(size_t size, const std::nothrow_t& nothrow_constant) {
address res = (address)resource_allocate_bytes(size, AllocFailStrategy::RETURN_NULL);
DEBUG_ONLY(if (res != NULL) set_allocation_type(res, RESOURCE_AREA);)
return res;
}
void operator delete(void* p); void operator delete(void* p);
}; };
......
...@@ -48,7 +48,8 @@ inline void inc_stat_counter(volatile julong* dest, julong add_value) { ...@@ -48,7 +48,8 @@ inline void inc_stat_counter(volatile julong* dest, julong add_value) {
#endif #endif
// allocate using malloc; will fail if no memory available // allocate using malloc; will fail if no memory available
inline char* AllocateHeap(size_t size, MEMFLAGS flags, address pc = 0) { inline char* AllocateHeap(size_t size, MEMFLAGS flags, address pc = 0,
AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) {
if (pc == 0) { if (pc == 0) {
pc = CURRENT_PC; pc = CURRENT_PC;
} }
...@@ -56,16 +57,17 @@ inline char* AllocateHeap(size_t size, MEMFLAGS flags, address pc = 0) { ...@@ -56,16 +57,17 @@ inline char* AllocateHeap(size_t size, MEMFLAGS flags, address pc = 0) {
#ifdef ASSERT #ifdef ASSERT
if (PrintMallocFree) trace_heap_malloc(size, "AllocateHeap", p); if (PrintMallocFree) trace_heap_malloc(size, "AllocateHeap", p);
#endif #endif
if (p == NULL) vm_exit_out_of_memory(size, "AllocateHeap"); if (p == NULL && alloc_failmode == AllocFailStrategy::EXIT_OOM) vm_exit_out_of_memory(size, "AllocateHeap");
return p; return p;
} }
inline char* ReallocateHeap(char *old, size_t size, MEMFLAGS flags) { inline char* ReallocateHeap(char *old, size_t size, MEMFLAGS flags,
AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) {
char* p = (char*) os::realloc(old, size, flags, CURRENT_PC); char* p = (char*) os::realloc(old, size, flags, CURRENT_PC);
#ifdef ASSERT #ifdef ASSERT
if (PrintMallocFree) trace_heap_malloc(size, "ReallocateHeap", p); if (PrintMallocFree) trace_heap_malloc(size, "ReallocateHeap", p);
#endif #endif
if (p == NULL) vm_exit_out_of_memory(size, "ReallocateHeap"); if (p == NULL && alloc_failmode == AllocFailStrategy::EXIT_OOM) vm_exit_out_of_memory(size, "ReallocateHeap");
return p; return p;
} }
...@@ -91,11 +93,13 @@ template <MEMFLAGS F> void* CHeapObj<F>::operator new(size_t size, ...@@ -91,11 +93,13 @@ template <MEMFLAGS F> void* CHeapObj<F>::operator new(size_t size,
template <MEMFLAGS F> void* CHeapObj<F>::operator new (size_t size, template <MEMFLAGS F> void* CHeapObj<F>::operator new (size_t size,
const std::nothrow_t& nothrow_constant, address caller_pc) { const std::nothrow_t& nothrow_constant, address caller_pc) {
#ifdef ASSERT #ifdef ASSERT
void* p = os::malloc(size, F, (caller_pc != 0 ? caller_pc : CALLER_PC)); void* p = (void*)AllocateHeap(size, F, (caller_pc != 0 ? caller_pc : CALLER_PC),
AllocFailStrategy::RETURN_NULL);
if (PrintMallocFree) trace_heap_malloc(size, "CHeapObj-new", p); if (PrintMallocFree) trace_heap_malloc(size, "CHeapObj-new", p);
return p; return p;
#else #else
return os::malloc(size, F, (caller_pc != 0 ? caller_pc : CALLER_PC)); return (void *) AllocateHeap(size, F, (caller_pc != 0 ? caller_pc : CALLER_PC),
AllocFailStrategy::RETURN_NULL);
#endif #endif
} }
......
...@@ -45,15 +45,15 @@ debug_only(int ResourceArea::_warned;) // to suppress multiple warnings ...@@ -45,15 +45,15 @@ debug_only(int ResourceArea::_warned;) // to suppress multiple warnings
// The following routines are declared in allocation.hpp and used everywhere: // The following routines are declared in allocation.hpp and used everywhere:
// Allocation in thread-local resource area // Allocation in thread-local resource area
extern char* resource_allocate_bytes(size_t size) { extern char* resource_allocate_bytes(size_t size, AllocFailType alloc_failmode) {
return Thread::current()->resource_area()->allocate_bytes(size); return Thread::current()->resource_area()->allocate_bytes(size, alloc_failmode);
} }
extern char* resource_allocate_bytes(Thread* thread, size_t size) { extern char* resource_allocate_bytes(Thread* thread, size_t size, AllocFailType alloc_failmode) {
return thread->resource_area()->allocate_bytes(size); return thread->resource_area()->allocate_bytes(size, alloc_failmode);
} }
extern char* resource_reallocate_bytes( char *old, size_t old_size, size_t new_size){ extern char* resource_reallocate_bytes( char *old, size_t old_size, size_t new_size, AllocFailType alloc_failmode){
return (char*)Thread::current()->resource_area()->Arealloc(old, old_size, new_size); return (char*)Thread::current()->resource_area()->Arealloc(old, old_size, new_size, alloc_failmode);
} }
extern void resource_free_bytes( char *old, size_t size ) { extern void resource_free_bytes( char *old, size_t size ) {
......
...@@ -68,7 +68,7 @@ public: ...@@ -68,7 +68,7 @@ public:
debug_only(_nesting = 0;); debug_only(_nesting = 0;);
} }
char* allocate_bytes(size_t size) { char* allocate_bytes(size_t size, AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) {
#ifdef ASSERT #ifdef ASSERT
if (_nesting < 1 && !_warned++) if (_nesting < 1 && !_warned++)
fatal("memory leak: allocating without ResourceMark"); fatal("memory leak: allocating without ResourceMark");
...@@ -78,7 +78,7 @@ public: ...@@ -78,7 +78,7 @@ public:
return (*save = (char*)os::malloc(size, mtThread)); return (*save = (char*)os::malloc(size, mtThread));
} }
#endif #endif
return (char*)Amalloc(size); return (char*)Amalloc(size, alloc_failmode);
} }
debug_only(int nesting() const { return _nesting; }); debug_only(int nesting() const { return _nesting; });
......
...@@ -177,7 +177,8 @@ void* Thread::allocate(size_t size, bool throw_excpt, MEMFLAGS flags) { ...@@ -177,7 +177,8 @@ void* Thread::allocate(size_t size, bool throw_excpt, MEMFLAGS flags) {
const int alignment = markOopDesc::biased_lock_alignment; const int alignment = markOopDesc::biased_lock_alignment;
size_t aligned_size = size + (alignment - sizeof(intptr_t)); size_t aligned_size = size + (alignment - sizeof(intptr_t));
void* real_malloc_addr = throw_excpt? AllocateHeap(aligned_size, flags, CURRENT_PC) void* real_malloc_addr = throw_excpt? AllocateHeap(aligned_size, flags, CURRENT_PC)
: os::malloc(aligned_size, flags, CURRENT_PC); : AllocateHeap(aligned_size, flags, CURRENT_PC,
AllocFailStrategy::RETURN_NULL);
void* aligned_addr = (void*) align_size_up((intptr_t) real_malloc_addr, alignment); void* aligned_addr = (void*) align_size_up((intptr_t) real_malloc_addr, alignment);
assert(((uintptr_t) aligned_addr + (uintptr_t) size) <= assert(((uintptr_t) aligned_addr + (uintptr_t) size) <=
((uintptr_t) real_malloc_addr + (uintptr_t) aligned_size), ((uintptr_t) real_malloc_addr + (uintptr_t) aligned_size),
...@@ -191,7 +192,7 @@ void* Thread::allocate(size_t size, bool throw_excpt, MEMFLAGS flags) { ...@@ -191,7 +192,7 @@ void* Thread::allocate(size_t size, bool throw_excpt, MEMFLAGS flags) {
return aligned_addr; return aligned_addr;
} else { } else {
return throw_excpt? AllocateHeap(size, flags, CURRENT_PC) return throw_excpt? AllocateHeap(size, flags, CURRENT_PC)
: os::malloc(size, flags, CURRENT_PC); : AllocateHeap(size, flags, CURRENT_PC, AllocFailStrategy::RETURN_NULL);
} }
} }
......
...@@ -110,7 +110,7 @@ class Thread: public ThreadShadow { ...@@ -110,7 +110,7 @@ class Thread: public ThreadShadow {
void* _real_malloc_address; void* _real_malloc_address;
public: public:
void* operator new(size_t size) { return allocate(size, true); } void* operator new(size_t size) { return allocate(size, true); }
void* operator new(size_t size, std::nothrow_t& nothrow_constant) { return allocate(size, false); } void* operator new(size_t size, const std::nothrow_t& nothrow_constant) { return allocate(size, false); }
void operator delete(void* p); void operator delete(void* p);
protected: protected:
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册