From 893ceddc6db4e37bfb0a58d15ee40467baa93563 Mon Sep 17 00:00:00 2001 From: johnc Date: Tue, 29 May 2012 10:18:02 -0700 Subject: [PATCH] 7143858: G1: Back to back young GCs with the second GC having a minimally sized eden Summary: Before the last thread to leave a JNI critical region was able to schedule a GCLocker Initiated GC, another thread was attempting an allocation and saw that the GCLocker region was no longer active and successfully scheduled a GC. Stall allocating threads until the GCLocker Initiated GC is performed and then retry the allocation. Reviewed-by: brutisso, huntch --- .../gc_implementation/g1/g1CollectedHeap.cpp | 36 +++++++++++++++---- 1 file changed, 30 insertions(+), 6 deletions(-) diff --git a/src/share/vm/gc_implementation/g1/g1CollectedHeap.cpp b/src/share/vm/gc_implementation/g1/g1CollectedHeap.cpp index 9d1f9d92d..212d578e0 100644 --- a/src/share/vm/gc_implementation/g1/g1CollectedHeap.cpp +++ b/src/share/vm/gc_implementation/g1/g1CollectedHeap.cpp @@ -952,9 +952,18 @@ HeapWord* G1CollectedHeap::attempt_allocation_slow(size_t word_size, } should_try_gc = false; } else { - // Read the GC count while still holding the Heap_lock. - gc_count_before = total_collections(); - should_try_gc = true; + // The GCLocker may not be active but the GCLocker initiated + // GC may not yet have been performed (GCLocker::needs_gc() + // returns true). In this case we do not try this GC and + // wait until the GCLocker initiated GC is performed, and + // then retry the allocation. + if (GC_locker::needs_gc()) { + should_try_gc = false; + } else { + // Read the GC count while still holding the Heap_lock. + gc_count_before = total_collections(); + should_try_gc = true; + } } } @@ -975,6 +984,9 @@ HeapWord* G1CollectedHeap::attempt_allocation_slow(size_t word_size, return NULL; } } else { + // The GCLocker is either active or the GCLocker initiated + // GC has not yet been performed. Stall until it is and + // then retry the allocation. GC_locker::stall_until_clear(); } @@ -1054,9 +1066,18 @@ HeapWord* G1CollectedHeap::attempt_allocation_humongous(size_t word_size, if (GC_locker::is_active_and_needs_gc()) { should_try_gc = false; } else { - // Read the GC count while still holding the Heap_lock. - gc_count_before = total_collections(); - should_try_gc = true; + // The GCLocker may not be active but the GCLocker initiated + // GC may not yet have been performed (GCLocker::needs_gc() + // returns true). In this case we do not try this GC and + // wait until the GCLocker initiated GC is performed, and + // then retry the allocation. + if (GC_locker::needs_gc()) { + should_try_gc = false; + } else { + // Read the GC count while still holding the Heap_lock. + gc_count_before = total_collections(); + should_try_gc = true; + } } } @@ -1081,6 +1102,9 @@ HeapWord* G1CollectedHeap::attempt_allocation_humongous(size_t word_size, return NULL; } } else { + // The GCLocker is either active or the GCLocker initiated + // GC has not yet been performed. Stall until it is and + // then retry the allocation. GC_locker::stall_until_clear(); } -- GitLab