提交 bf0e7f79 编写于 作者: Y ysr

6684579: SoftReference processing can be made more efficient

Summary: For current soft-ref clearing policies, we can decide at marking time if a soft-reference will definitely not be cleared, postponing the decision of whether it will definitely be cleared to the final reference processing phase. This can be especially beneficial in the case of concurrent collectors where the marking is usually concurrent but reference processing is usually not.
Reviewed-by: jmasa
上级 fb7c8a34
...@@ -1961,6 +1961,7 @@ void CMSCollector::do_compaction_work(bool clear_all_soft_refs) { ...@@ -1961,6 +1961,7 @@ void CMSCollector::do_compaction_work(bool clear_all_soft_refs) {
ref_processor()->set_enqueuing_is_done(false); ref_processor()->set_enqueuing_is_done(false);
ref_processor()->enable_discovery(); ref_processor()->enable_discovery();
ref_processor()->snap_policy(clear_all_soft_refs);
// If an asynchronous collection finishes, the _modUnionTable is // If an asynchronous collection finishes, the _modUnionTable is
// all clear. If we are assuming the collection from an asynchronous // all clear. If we are assuming the collection from an asynchronous
// collection, clear the _modUnionTable. // collection, clear the _modUnionTable.
...@@ -2384,6 +2385,9 @@ void CMSCollector::collect_in_foreground(bool clear_all_soft_refs) { ...@@ -2384,6 +2385,9 @@ void CMSCollector::collect_in_foreground(bool clear_all_soft_refs) {
Universe::verify(true); Universe::verify(true);
} }
// Snapshot the soft reference policy to be used in this collection cycle.
ref_processor()->snap_policy(clear_all_soft_refs);
bool init_mark_was_synchronous = false; // until proven otherwise bool init_mark_was_synchronous = false; // until proven otherwise
while (_collectorState != Idling) { while (_collectorState != Idling) {
if (TraceCMSState) { if (TraceCMSState) {
...@@ -4591,11 +4595,11 @@ size_t CMSCollector::preclean_mod_union_table( ...@@ -4591,11 +4595,11 @@ size_t CMSCollector::preclean_mod_union_table(
if (!dirtyRegion.is_empty()) { if (!dirtyRegion.is_empty()) {
assert(numDirtyCards > 0, "consistency check"); assert(numDirtyCards > 0, "consistency check");
HeapWord* stop_point = NULL; HeapWord* stop_point = NULL;
stopTimer();
CMSTokenSyncWithLocks ts(true, gen->freelistLock(),
bitMapLock());
startTimer();
{ {
stopTimer();
CMSTokenSyncWithLocks ts(true, gen->freelistLock(),
bitMapLock());
startTimer();
verify_work_stacks_empty(); verify_work_stacks_empty();
verify_overflow_empty(); verify_overflow_empty();
sample_eden(); sample_eden();
...@@ -4612,10 +4616,6 @@ size_t CMSCollector::preclean_mod_union_table( ...@@ -4612,10 +4616,6 @@ size_t CMSCollector::preclean_mod_union_table(
assert((CMSPermGenPrecleaningEnabled && (gen == _permGen)) || assert((CMSPermGenPrecleaningEnabled && (gen == _permGen)) ||
(_collectorState == AbortablePreclean && should_abort_preclean()), (_collectorState == AbortablePreclean && should_abort_preclean()),
"Unparsable objects should only be in perm gen."); "Unparsable objects should only be in perm gen.");
stopTimer();
CMSTokenSyncWithLocks ts(true, bitMapLock());
startTimer();
_modUnionTable.mark_range(MemRegion(stop_point, dirtyRegion.end())); _modUnionTable.mark_range(MemRegion(stop_point, dirtyRegion.end()));
if (should_abort_preclean()) { if (should_abort_preclean()) {
break; // out of preclean loop break; // out of preclean loop
...@@ -5678,23 +5678,14 @@ void CMSCollector::refProcessingWork(bool asynch, bool clear_all_soft_refs) { ...@@ -5678,23 +5678,14 @@ void CMSCollector::refProcessingWork(bool asynch, bool clear_all_soft_refs) {
ResourceMark rm; ResourceMark rm;
HandleMark hm; HandleMark hm;
ReferencePolicy* soft_ref_policy;
assert(!ref_processor()->enqueuing_is_done(), "Enqueuing should not be complete"); ReferenceProcessor* rp = ref_processor();
assert(rp->span().equals(_span), "Spans should be equal");
assert(!rp->enqueuing_is_done(), "Enqueuing should not be complete");
// Process weak references. // Process weak references.
if (clear_all_soft_refs) { rp->snap_policy(clear_all_soft_refs);
soft_ref_policy = new AlwaysClearPolicy();
} else {
#ifdef COMPILER2
soft_ref_policy = new LRUMaxHeapPolicy();
#else
soft_ref_policy = new LRUCurrentHeapPolicy();
#endif // COMPILER2
}
verify_work_stacks_empty(); verify_work_stacks_empty();
ReferenceProcessor* rp = ref_processor();
assert(rp->span().equals(_span), "Spans should be equal");
CMSKeepAliveClosure cmsKeepAliveClosure(this, _span, &_markBitMap, CMSKeepAliveClosure cmsKeepAliveClosure(this, _span, &_markBitMap,
&_markStack, false /* !preclean */); &_markStack, false /* !preclean */);
CMSDrainMarkingStackClosure cmsDrainMarkingStackClosure(this, CMSDrainMarkingStackClosure cmsDrainMarkingStackClosure(this,
...@@ -5704,14 +5695,12 @@ void CMSCollector::refProcessingWork(bool asynch, bool clear_all_soft_refs) { ...@@ -5704,14 +5695,12 @@ void CMSCollector::refProcessingWork(bool asynch, bool clear_all_soft_refs) {
TraceTime t("weak refs processing", PrintGCDetails, false, gclog_or_tty); TraceTime t("weak refs processing", PrintGCDetails, false, gclog_or_tty);
if (rp->processing_is_mt()) { if (rp->processing_is_mt()) {
CMSRefProcTaskExecutor task_executor(*this); CMSRefProcTaskExecutor task_executor(*this);
rp->process_discovered_references(soft_ref_policy, rp->process_discovered_references(&_is_alive_closure,
&_is_alive_closure,
&cmsKeepAliveClosure, &cmsKeepAliveClosure,
&cmsDrainMarkingStackClosure, &cmsDrainMarkingStackClosure,
&task_executor); &task_executor);
} else { } else {
rp->process_discovered_references(soft_ref_policy, rp->process_discovered_references(&_is_alive_closure,
&_is_alive_closure,
&cmsKeepAliveClosure, &cmsKeepAliveClosure,
&cmsDrainMarkingStackClosure, &cmsDrainMarkingStackClosure,
NULL); NULL);
...@@ -6166,8 +6155,8 @@ void CMSCollector::verify_ok_to_terminate() const { ...@@ -6166,8 +6155,8 @@ void CMSCollector::verify_ok_to_terminate() const {
#endif #endif
size_t CMSCollector::block_size_using_printezis_bits(HeapWord* addr) const { size_t CMSCollector::block_size_using_printezis_bits(HeapWord* addr) const {
assert(_markBitMap.isMarked(addr) && _markBitMap.isMarked(addr + 1), assert(_markBitMap.isMarked(addr) && _markBitMap.isMarked(addr + 1),
"missing Printezis mark?"); "missing Printezis mark?");
HeapWord* nextOneAddr = _markBitMap.getNextMarkedWordAddress(addr + 2); HeapWord* nextOneAddr = _markBitMap.getNextMarkedWordAddress(addr + 2);
size_t size = pointer_delta(nextOneAddr + 1, addr); size_t size = pointer_delta(nextOneAddr + 1, addr);
assert(size == CompactibleFreeListSpace::adjustObjectSize(size), assert(size == CompactibleFreeListSpace::adjustObjectSize(size),
......
...@@ -811,6 +811,7 @@ void ConcurrentMark::checkpointRootsInitialPost() { ...@@ -811,6 +811,7 @@ void ConcurrentMark::checkpointRootsInitialPost() {
ReferenceProcessor* rp = g1h->ref_processor(); ReferenceProcessor* rp = g1h->ref_processor();
rp->verify_no_references_recorded(); rp->verify_no_references_recorded();
rp->enable_discovery(); // enable ("weak") refs discovery rp->enable_discovery(); // enable ("weak") refs discovery
rp->snap_policy(false); // snapshot the soft ref policy to be used in this cycle
SATBMarkQueueSet& satb_mq_set = JavaThread::satb_mark_queue_set(); SATBMarkQueueSet& satb_mq_set = JavaThread::satb_mark_queue_set();
satb_mq_set.set_process_completed_threshold(G1SATBProcessCompletedThreshold); satb_mq_set.set_process_completed_threshold(G1SATBProcessCompletedThreshold);
...@@ -1829,32 +1830,21 @@ class G1CMDrainMarkingStackClosure: public VoidClosure { ...@@ -1829,32 +1830,21 @@ class G1CMDrainMarkingStackClosure: public VoidClosure {
void ConcurrentMark::weakRefsWork(bool clear_all_soft_refs) { void ConcurrentMark::weakRefsWork(bool clear_all_soft_refs) {
ResourceMark rm; ResourceMark rm;
HandleMark hm; HandleMark hm;
ReferencePolicy* soft_ref_policy; G1CollectedHeap* g1h = G1CollectedHeap::heap();
ReferenceProcessor* rp = g1h->ref_processor();
// Process weak references. // Process weak references.
if (clear_all_soft_refs) { rp->snap_policy(clear_all_soft_refs);
soft_ref_policy = new AlwaysClearPolicy();
} else {
#ifdef COMPILER2
soft_ref_policy = new LRUMaxHeapPolicy();
#else
soft_ref_policy = new LRUCurrentHeapPolicy();
#endif
}
assert(_markStack.isEmpty(), "mark stack should be empty"); assert(_markStack.isEmpty(), "mark stack should be empty");
G1CollectedHeap* g1 = G1CollectedHeap::heap(); G1CMIsAliveClosure g1IsAliveClosure (g1h);
G1CMIsAliveClosure g1IsAliveClosure(g1); G1CMKeepAliveClosure g1KeepAliveClosure(g1h, this, nextMarkBitMap());
G1CMKeepAliveClosure g1KeepAliveClosure(g1, this, nextMarkBitMap());
G1CMDrainMarkingStackClosure G1CMDrainMarkingStackClosure
g1DrainMarkingStackClosure(nextMarkBitMap(), &_markStack, g1DrainMarkingStackClosure(nextMarkBitMap(), &_markStack,
&g1KeepAliveClosure); &g1KeepAliveClosure);
// XXXYYY Also: copy the parallel ref processing code from CMS. // XXXYYY Also: copy the parallel ref processing code from CMS.
ReferenceProcessor* rp = g1->ref_processor(); rp->process_discovered_references(&g1IsAliveClosure,
rp->process_discovered_references(soft_ref_policy,
&g1IsAliveClosure,
&g1KeepAliveClosure, &g1KeepAliveClosure,
&g1DrainMarkingStackClosure, &g1DrainMarkingStackClosure,
NULL); NULL);
......
...@@ -891,6 +891,7 @@ void G1CollectedHeap::do_collection(bool full, bool clear_all_soft_refs, ...@@ -891,6 +891,7 @@ void G1CollectedHeap::do_collection(bool full, bool clear_all_soft_refs,
ReferenceProcessorIsAliveMutator rp_is_alive_null(ref_processor(), NULL); ReferenceProcessorIsAliveMutator rp_is_alive_null(ref_processor(), NULL);
ref_processor()->enable_discovery(); ref_processor()->enable_discovery();
ref_processor()->snap_policy(clear_all_soft_refs);
// Do collection work // Do collection work
{ {
...@@ -2463,7 +2464,7 @@ G1CollectedHeap::do_collection_pause_at_safepoint(HeapRegion* popular_region) { ...@@ -2463,7 +2464,7 @@ G1CollectedHeap::do_collection_pause_at_safepoint(HeapRegion* popular_region) {
COMPILER2_PRESENT(DerivedPointerTable::clear()); COMPILER2_PRESENT(DerivedPointerTable::clear());
// We want to turn off ref discovere, if necessary, and turn it back on // We want to turn off ref discovery, if necessary, and turn it back on
// on again later if we do. // on again later if we do.
bool was_enabled = ref_processor()->discovery_enabled(); bool was_enabled = ref_processor()->discovery_enabled();
if (was_enabled) ref_processor()->disable_discovery(); if (was_enabled) ref_processor()->disable_discovery();
......
...@@ -33,8 +33,9 @@ void G1MarkSweep::invoke_at_safepoint(ReferenceProcessor* rp, ...@@ -33,8 +33,9 @@ void G1MarkSweep::invoke_at_safepoint(ReferenceProcessor* rp,
// hook up weak ref data so it can be used during Mark-Sweep // hook up weak ref data so it can be used during Mark-Sweep
assert(GenMarkSweep::ref_processor() == NULL, "no stomping"); assert(GenMarkSweep::ref_processor() == NULL, "no stomping");
GenMarkSweep::_ref_processor = rp;
assert(rp != NULL, "should be non-NULL"); assert(rp != NULL, "should be non-NULL");
GenMarkSweep::_ref_processor = rp;
rp->snap_policy(clear_all_softrefs);
// When collecting the permanent generation methodOops may be moving, // When collecting the permanent generation methodOops may be moving,
// so we either have to flush all bcp data or convert it into bci. // so we either have to flush all bcp data or convert it into bci.
...@@ -121,23 +122,12 @@ void G1MarkSweep::mark_sweep_phase1(bool& marked_for_unloading, ...@@ -121,23 +122,12 @@ void G1MarkSweep::mark_sweep_phase1(bool& marked_for_unloading,
&GenMarkSweep::follow_root_closure); &GenMarkSweep::follow_root_closure);
// Process reference objects found during marking // Process reference objects found during marking
ReferencePolicy *soft_ref_policy; ReferenceProcessor* rp = GenMarkSweep::ref_processor();
if (clear_all_softrefs) { rp->snap_policy(clear_all_softrefs);
soft_ref_policy = new AlwaysClearPolicy(); rp->process_discovered_references(&GenMarkSweep::is_alive,
} else { &GenMarkSweep::keep_alive,
#ifdef COMPILER2 &GenMarkSweep::follow_stack_closure,
soft_ref_policy = new LRUMaxHeapPolicy(); NULL);
#else
soft_ref_policy = new LRUCurrentHeapPolicy();
#endif
}
assert(soft_ref_policy != NULL,"No soft reference policy");
GenMarkSweep::ref_processor()->process_discovered_references(
soft_ref_policy,
&GenMarkSweep::is_alive,
&GenMarkSweep::keep_alive,
&GenMarkSweep::follow_stack_closure,
NULL);
// Follow system dictionary roots and unload classes // Follow system dictionary roots and unload classes
bool purged_class = SystemDictionary::do_unloading(&GenMarkSweep::is_alive); bool purged_class = SystemDictionary::do_unloading(&GenMarkSweep::is_alive);
......
...@@ -759,17 +759,12 @@ void ParNewGeneration::collect(bool full, ...@@ -759,17 +759,12 @@ void ParNewGeneration::collect(bool full,
thread_state_set.steals(), thread_state_set.steals(),
thread_state_set.pops()+thread_state_set.steals()); thread_state_set.pops()+thread_state_set.steals());
} }
assert(thread_state_set.pushes() == thread_state_set.pops() + thread_state_set.steals(), assert(thread_state_set.pushes() == thread_state_set.pops()
+ thread_state_set.steals(),
"Or else the queues are leaky."); "Or else the queues are leaky.");
// For now, process discovered weak refs sequentially.
#ifdef COMPILER2
ReferencePolicy *soft_ref_policy = new LRUMaxHeapPolicy();
#else
ReferencePolicy *soft_ref_policy = new LRUCurrentHeapPolicy();
#endif // COMPILER2
// Process (weak) reference objects found during scavenge. // Process (weak) reference objects found during scavenge.
ReferenceProcessor* rp = ref_processor();
IsAliveClosure is_alive(this); IsAliveClosure is_alive(this);
ScanWeakRefClosure scan_weak_ref(this); ScanWeakRefClosure scan_weak_ref(this);
KeepAliveClosure keep_alive(&scan_weak_ref); KeepAliveClosure keep_alive(&scan_weak_ref);
...@@ -778,18 +773,17 @@ void ParNewGeneration::collect(bool full, ...@@ -778,18 +773,17 @@ void ParNewGeneration::collect(bool full,
set_promo_failure_scan_stack_closure(&scan_without_gc_barrier); set_promo_failure_scan_stack_closure(&scan_without_gc_barrier);
EvacuateFollowersClosureGeneral evacuate_followers(gch, _level, EvacuateFollowersClosureGeneral evacuate_followers(gch, _level,
&scan_without_gc_barrier, &scan_with_gc_barrier); &scan_without_gc_barrier, &scan_with_gc_barrier);
if (ref_processor()->processing_is_mt()) { rp->snap_policy(clear_all_soft_refs);
if (rp->processing_is_mt()) {
ParNewRefProcTaskExecutor task_executor(*this, thread_state_set); ParNewRefProcTaskExecutor task_executor(*this, thread_state_set);
ref_processor()->process_discovered_references( rp->process_discovered_references(&is_alive, &keep_alive,
soft_ref_policy, &is_alive, &keep_alive, &evacuate_followers, &evacuate_followers, &task_executor);
&task_executor);
} else { } else {
thread_state_set.flush(); thread_state_set.flush();
gch->set_par_threads(0); // 0 ==> non-parallel. gch->set_par_threads(0); // 0 ==> non-parallel.
gch->save_marks(); gch->save_marks();
ref_processor()->process_discovered_references( rp->process_discovered_references(&is_alive, &keep_alive,
soft_ref_policy, &is_alive, &keep_alive, &evacuate_followers, &evacuate_followers, NULL);
NULL);
} }
if (!promotion_failed()) { if (!promotion_failed()) {
// Swap the survivor spaces. // Swap the survivor spaces.
...@@ -851,14 +845,14 @@ void ParNewGeneration::collect(bool full, ...@@ -851,14 +845,14 @@ void ParNewGeneration::collect(bool full,
SpecializationStats::print(); SpecializationStats::print();
ref_processor()->set_enqueuing_is_done(true); rp->set_enqueuing_is_done(true);
if (ref_processor()->processing_is_mt()) { if (rp->processing_is_mt()) {
ParNewRefProcTaskExecutor task_executor(*this, thread_state_set); ParNewRefProcTaskExecutor task_executor(*this, thread_state_set);
ref_processor()->enqueue_discovered_references(&task_executor); rp->enqueue_discovered_references(&task_executor);
} else { } else {
ref_processor()->enqueue_discovered_references(NULL); rp->enqueue_discovered_references(NULL);
} }
ref_processor()->verify_no_references_recorded(); rp->verify_no_references_recorded();
} }
static int sum; static int sum;
......
...@@ -172,6 +172,7 @@ void PSMarkSweep::invoke_no_policy(bool clear_all_softrefs) { ...@@ -172,6 +172,7 @@ void PSMarkSweep::invoke_no_policy(bool clear_all_softrefs) {
COMPILER2_PRESENT(DerivedPointerTable::clear()); COMPILER2_PRESENT(DerivedPointerTable::clear());
ref_processor()->enable_discovery(); ref_processor()->enable_discovery();
ref_processor()->snap_policy(clear_all_softrefs);
mark_sweep_phase1(clear_all_softrefs); mark_sweep_phase1(clear_all_softrefs);
...@@ -517,20 +518,9 @@ void PSMarkSweep::mark_sweep_phase1(bool clear_all_softrefs) { ...@@ -517,20 +518,9 @@ void PSMarkSweep::mark_sweep_phase1(bool clear_all_softrefs) {
// Process reference objects found during marking // Process reference objects found during marking
{ {
ReferencePolicy *soft_ref_policy; ref_processor()->snap_policy(clear_all_softrefs);
if (clear_all_softrefs) {
soft_ref_policy = new AlwaysClearPolicy();
} else {
#ifdef COMPILER2
soft_ref_policy = new LRUMaxHeapPolicy();
#else
soft_ref_policy = new LRUCurrentHeapPolicy();
#endif // COMPILER2
}
assert(soft_ref_policy != NULL,"No soft reference policy");
ref_processor()->process_discovered_references( ref_processor()->process_discovered_references(
soft_ref_policy, is_alive_closure(), mark_and_push_closure(), is_alive_closure(), mark_and_push_closure(), follow_stack_closure(), NULL);
follow_stack_closure(), NULL);
} }
// Follow system dictionary roots and unload classes // Follow system dictionary roots and unload classes
......
...@@ -1578,6 +1578,7 @@ void PSParallelCompact::invoke_no_policy(bool maximum_heap_compaction) { ...@@ -1578,6 +1578,7 @@ void PSParallelCompact::invoke_no_policy(bool maximum_heap_compaction) {
COMPILER2_PRESENT(DerivedPointerTable::clear()); COMPILER2_PRESENT(DerivedPointerTable::clear());
ref_processor()->enable_discovery(); ref_processor()->enable_discovery();
ref_processor()->snap_policy(maximum_heap_compaction);
bool marked_for_unloading = false; bool marked_for_unloading = false;
...@@ -1894,26 +1895,14 @@ void PSParallelCompact::marking_phase(ParCompactionManager* cm, ...@@ -1894,26 +1895,14 @@ void PSParallelCompact::marking_phase(ParCompactionManager* cm,
// Process reference objects found during marking // Process reference objects found during marking
{ {
TraceTime tm_r("reference processing", print_phases(), true, gclog_or_tty); TraceTime tm_r("reference processing", print_phases(), true, gclog_or_tty);
ReferencePolicy *soft_ref_policy;
if (maximum_heap_compaction) {
soft_ref_policy = new AlwaysClearPolicy();
} else {
#ifdef COMPILER2
soft_ref_policy = new LRUMaxHeapPolicy();
#else
soft_ref_policy = new LRUCurrentHeapPolicy();
#endif // COMPILER2
}
assert(soft_ref_policy != NULL, "No soft reference policy");
if (ref_processor()->processing_is_mt()) { if (ref_processor()->processing_is_mt()) {
RefProcTaskExecutor task_executor; RefProcTaskExecutor task_executor;
ref_processor()->process_discovered_references( ref_processor()->process_discovered_references(
soft_ref_policy, is_alive_closure(), &mark_and_push_closure, is_alive_closure(), &mark_and_push_closure, &follow_stack_closure,
&follow_stack_closure, &task_executor); &task_executor);
} else { } else {
ref_processor()->process_discovered_references( ref_processor()->process_discovered_references(
soft_ref_policy, is_alive_closure(), &mark_and_push_closure, is_alive_closure(), &mark_and_push_closure, &follow_stack_closure, NULL);
&follow_stack_closure, NULL);
} }
} }
......
...@@ -330,6 +330,7 @@ bool PSScavenge::invoke_no_policy() { ...@@ -330,6 +330,7 @@ bool PSScavenge::invoke_no_policy() {
COMPILER2_PRESENT(DerivedPointerTable::clear()); COMPILER2_PRESENT(DerivedPointerTable::clear());
reference_processor()->enable_discovery(); reference_processor()->enable_discovery();
reference_processor()->snap_policy(false);
// We track how much was promoted to the next generation for // We track how much was promoted to the next generation for
// the AdaptiveSizePolicy. // the AdaptiveSizePolicy.
...@@ -394,24 +395,16 @@ bool PSScavenge::invoke_no_policy() { ...@@ -394,24 +395,16 @@ bool PSScavenge::invoke_no_policy() {
// Process reference objects discovered during scavenge // Process reference objects discovered during scavenge
{ {
#ifdef COMPILER2 reference_processor()->snap_policy(false); // not always_clear
ReferencePolicy *soft_ref_policy = new LRUMaxHeapPolicy();
#else
ReferencePolicy *soft_ref_policy = new LRUCurrentHeapPolicy();
#endif // COMPILER2
PSKeepAliveClosure keep_alive(promotion_manager); PSKeepAliveClosure keep_alive(promotion_manager);
PSEvacuateFollowersClosure evac_followers(promotion_manager); PSEvacuateFollowersClosure evac_followers(promotion_manager);
assert(soft_ref_policy != NULL,"No soft reference policy");
if (reference_processor()->processing_is_mt()) { if (reference_processor()->processing_is_mt()) {
PSRefProcTaskExecutor task_executor; PSRefProcTaskExecutor task_executor;
reference_processor()->process_discovered_references( reference_processor()->process_discovered_references(
soft_ref_policy, &_is_alive_closure, &keep_alive, &evac_followers, &_is_alive_closure, &keep_alive, &evac_followers, &task_executor);
&task_executor);
} else { } else {
reference_processor()->process_discovered_references( reference_processor()->process_discovered_references(
soft_ref_policy, &_is_alive_closure, &keep_alive, &evac_followers, &_is_alive_closure, &keep_alive, &evac_followers, NULL);
NULL);
} }
} }
......
...@@ -3434,6 +3434,7 @@ referenceProcessor.cpp referenceProcessor.hpp ...@@ -3434,6 +3434,7 @@ referenceProcessor.cpp referenceProcessor.hpp
referenceProcessor.cpp systemDictionary.hpp referenceProcessor.cpp systemDictionary.hpp
referenceProcessor.hpp instanceRefKlass.hpp referenceProcessor.hpp instanceRefKlass.hpp
referenceProcessor.hpp referencePolicy.hpp
reflection.cpp arguments.hpp reflection.cpp arguments.hpp
reflection.cpp handles.inline.hpp reflection.cpp handles.inline.hpp
......
...@@ -540,14 +540,6 @@ void DefNewGeneration::collect(bool full, ...@@ -540,14 +540,6 @@ void DefNewGeneration::collect(bool full,
assert(gch->no_allocs_since_save_marks(0), assert(gch->no_allocs_since_save_marks(0),
"save marks have not been newly set."); "save marks have not been newly set.");
// Weak refs.
// FIXME: Are these storage leaks, or are they resource objects?
#ifdef COMPILER2
ReferencePolicy *soft_ref_policy = new LRUMaxHeapPolicy();
#else
ReferencePolicy *soft_ref_policy = new LRUCurrentHeapPolicy();
#endif // COMPILER2
// Not very pretty. // Not very pretty.
CollectorPolicy* cp = gch->collector_policy(); CollectorPolicy* cp = gch->collector_policy();
...@@ -574,8 +566,10 @@ void DefNewGeneration::collect(bool full, ...@@ -574,8 +566,10 @@ void DefNewGeneration::collect(bool full,
evacuate_followers.do_void(); evacuate_followers.do_void();
FastKeepAliveClosure keep_alive(this, &scan_weak_ref); FastKeepAliveClosure keep_alive(this, &scan_weak_ref);
ref_processor()->process_discovered_references( ReferenceProcessor* rp = ref_processor();
soft_ref_policy, &is_alive, &keep_alive, &evacuate_followers, NULL); rp->snap_policy(clear_all_soft_refs);
rp->process_discovered_references(&is_alive, &keep_alive, &evacuate_followers,
NULL);
if (!promotion_failed()) { if (!promotion_failed()) {
// Swap the survivor spaces. // Swap the survivor spaces.
eden()->clear(SpaceDecorator::Mangle); eden()->clear(SpaceDecorator::Mangle);
......
...@@ -525,8 +525,9 @@ void GenCollectedHeap::do_collection(bool full, ...@@ -525,8 +525,9 @@ void GenCollectedHeap::do_collection(bool full,
if (rp->discovery_is_atomic()) { if (rp->discovery_is_atomic()) {
rp->verify_no_references_recorded(); rp->verify_no_references_recorded();
rp->enable_discovery(); rp->enable_discovery();
rp->snap_policy(clear_all_soft_refs);
} else { } else {
// collect() will enable discovery as appropriate // collect() below will enable discovery as appropriate
} }
_gens[i]->collect(full, clear_all_soft_refs, size, is_tlab); _gens[i]->collect(full, clear_all_soft_refs, size, is_tlab);
if (!rp->enqueuing_is_done()) { if (!rp->enqueuing_is_done()) {
......
...@@ -31,8 +31,9 @@ void GenMarkSweep::invoke_at_safepoint(int level, ReferenceProcessor* rp, ...@@ -31,8 +31,9 @@ void GenMarkSweep::invoke_at_safepoint(int level, ReferenceProcessor* rp,
// hook up weak ref data so it can be used during Mark-Sweep // hook up weak ref data so it can be used during Mark-Sweep
assert(ref_processor() == NULL, "no stomping"); assert(ref_processor() == NULL, "no stomping");
_ref_processor = rp;
assert(rp != NULL, "should be non-NULL"); assert(rp != NULL, "should be non-NULL");
_ref_processor = rp;
rp->snap_policy(clear_all_softrefs);
TraceTime t1("Full GC", PrintGC && !PrintGCDetails, true, gclog_or_tty); TraceTime t1("Full GC", PrintGC && !PrintGCDetails, true, gclog_or_tty);
...@@ -245,20 +246,9 @@ void GenMarkSweep::mark_sweep_phase1(int level, ...@@ -245,20 +246,9 @@ void GenMarkSweep::mark_sweep_phase1(int level,
// Process reference objects found during marking // Process reference objects found during marking
{ {
ReferencePolicy *soft_ref_policy; ref_processor()->snap_policy(clear_all_softrefs);
if (clear_all_softrefs) {
soft_ref_policy = new AlwaysClearPolicy();
} else {
#ifdef COMPILER2
soft_ref_policy = new LRUMaxHeapPolicy();
#else
soft_ref_policy = new LRUCurrentHeapPolicy();
#endif // COMPILER2
}
assert(soft_ref_policy != NULL,"No soft reference policy");
ref_processor()->process_discovered_references( ref_processor()->process_discovered_references(
soft_ref_policy, &is_alive, &keep_alive, &is_alive, &keep_alive, &follow_stack_closure, NULL);
&follow_stack_closure, NULL);
} }
// Follow system dictionary roots and unload classes // Follow system dictionary roots and unload classes
......
...@@ -26,6 +26,11 @@ ...@@ -26,6 +26,11 @@
# include "incls/_referencePolicy.cpp.incl" # include "incls/_referencePolicy.cpp.incl"
LRUCurrentHeapPolicy::LRUCurrentHeapPolicy() { LRUCurrentHeapPolicy::LRUCurrentHeapPolicy() {
snap();
}
// Capture state (of-the-VM) information needed to evaluate the policy
void LRUCurrentHeapPolicy::snap() {
_max_interval = (Universe::get_heap_free_at_last_gc() / M) * SoftRefLRUPolicyMSPerMB; _max_interval = (Universe::get_heap_free_at_last_gc() / M) * SoftRefLRUPolicyMSPerMB;
assert(_max_interval >= 0,"Sanity check"); assert(_max_interval >= 0,"Sanity check");
} }
...@@ -47,6 +52,11 @@ bool LRUCurrentHeapPolicy::should_clear_reference(oop p) { ...@@ -47,6 +52,11 @@ bool LRUCurrentHeapPolicy::should_clear_reference(oop p) {
/////////////////////// MaxHeap ////////////////////// /////////////////////// MaxHeap //////////////////////
LRUMaxHeapPolicy::LRUMaxHeapPolicy() { LRUMaxHeapPolicy::LRUMaxHeapPolicy() {
snap();
}
// Capture state (of-the-VM) information needed to evaluate the policy
void LRUMaxHeapPolicy::snap() {
size_t max_heap = MaxHeapSize; size_t max_heap = MaxHeapSize;
max_heap -= Universe::get_heap_used_at_last_gc(); max_heap -= Universe::get_heap_used_at_last_gc();
max_heap /= M; max_heap /= M;
......
...@@ -26,9 +26,11 @@ ...@@ -26,9 +26,11 @@
// should be cleared. // should be cleared.
class ReferencePolicy : public ResourceObj { class ReferencePolicy : public CHeapObj {
public: public:
virtual bool should_clear_reference(oop p) { ShouldNotReachHere(); return true; } virtual bool should_clear_reference(oop p) { ShouldNotReachHere(); return true; }
// Capture state (of-the-VM) information needed to evaluate the policy
virtual void snap() { /* do nothing */ }
}; };
class NeverClearPolicy : public ReferencePolicy { class NeverClearPolicy : public ReferencePolicy {
...@@ -48,6 +50,8 @@ class LRUCurrentHeapPolicy : public ReferencePolicy { ...@@ -48,6 +50,8 @@ class LRUCurrentHeapPolicy : public ReferencePolicy {
public: public:
LRUCurrentHeapPolicy(); LRUCurrentHeapPolicy();
// Capture state (of-the-VM) information needed to evaluate the policy
void snap();
bool should_clear_reference(oop p); bool should_clear_reference(oop p);
}; };
...@@ -58,5 +62,7 @@ class LRUMaxHeapPolicy : public ReferencePolicy { ...@@ -58,5 +62,7 @@ class LRUMaxHeapPolicy : public ReferencePolicy {
public: public:
LRUMaxHeapPolicy(); LRUMaxHeapPolicy();
// Capture state (of-the-VM) information needed to evaluate the policy
void snap();
bool should_clear_reference(oop p); bool should_clear_reference(oop p);
}; };
...@@ -25,6 +25,11 @@ ...@@ -25,6 +25,11 @@
# include "incls/_precompiled.incl" # include "incls/_precompiled.incl"
# include "incls/_referenceProcessor.cpp.incl" # include "incls/_referenceProcessor.cpp.incl"
ReferencePolicy* ReferenceProcessor::_always_clear_soft_ref_policy = NULL;
ReferencePolicy* ReferenceProcessor::_default_soft_ref_policy = NULL;
oop ReferenceProcessor::_sentinelRef = NULL;
const int subclasses_of_ref = REF_PHANTOM - REF_OTHER;
// List of discovered references. // List of discovered references.
class DiscoveredList { class DiscoveredList {
public: public:
...@@ -58,10 +63,6 @@ private: ...@@ -58,10 +63,6 @@ private:
size_t _len; size_t _len;
}; };
oop ReferenceProcessor::_sentinelRef = NULL;
const int subclasses_of_ref = REF_PHANTOM - REF_OTHER;
void referenceProcessor_init() { void referenceProcessor_init() {
ReferenceProcessor::init_statics(); ReferenceProcessor::init_statics();
} }
...@@ -82,6 +83,12 @@ void ReferenceProcessor::init_statics() { ...@@ -82,6 +83,12 @@ void ReferenceProcessor::init_statics() {
} }
assert(_sentinelRef != NULL && _sentinelRef->is_oop(), assert(_sentinelRef != NULL && _sentinelRef->is_oop(),
"Just constructed it!"); "Just constructed it!");
_always_clear_soft_ref_policy = new AlwaysClearPolicy();
_default_soft_ref_policy = new COMPILER2_PRESENT(LRUMaxHeapPolicy())
NOT_COMPILER2(LRUCurrentHeapPolicy());
if (_always_clear_soft_ref_policy == NULL || _default_soft_ref_policy == NULL) {
vm_exit_during_initialization("Could not allocate reference policy object");
}
guarantee(RefDiscoveryPolicy == ReferenceBasedDiscovery || guarantee(RefDiscoveryPolicy == ReferenceBasedDiscovery ||
RefDiscoveryPolicy == ReferentBasedDiscovery, RefDiscoveryPolicy == ReferentBasedDiscovery,
"Unrecongnized RefDiscoveryPolicy"); "Unrecongnized RefDiscoveryPolicy");
...@@ -108,6 +115,7 @@ ReferenceProcessor::create_ref_processor(MemRegion span, ...@@ -108,6 +115,7 @@ ReferenceProcessor::create_ref_processor(MemRegion span,
vm_exit_during_initialization("Could not allocate ReferenceProcessor object"); vm_exit_during_initialization("Could not allocate ReferenceProcessor object");
} }
rp->set_is_alive_non_header(is_alive_non_header); rp->set_is_alive_non_header(is_alive_non_header);
rp->snap_policy(false /* default soft ref policy */);
return rp; return rp;
} }
...@@ -194,7 +202,6 @@ void ReferenceProcessor::update_soft_ref_master_clock() { ...@@ -194,7 +202,6 @@ void ReferenceProcessor::update_soft_ref_master_clock() {
} }
void ReferenceProcessor::process_discovered_references( void ReferenceProcessor::process_discovered_references(
ReferencePolicy* policy,
BoolObjectClosure* is_alive, BoolObjectClosure* is_alive,
OopClosure* keep_alive, OopClosure* keep_alive,
VoidClosure* complete_gc, VoidClosure* complete_gc,
...@@ -209,7 +216,7 @@ void ReferenceProcessor::process_discovered_references( ...@@ -209,7 +216,7 @@ void ReferenceProcessor::process_discovered_references(
// Soft references // Soft references
{ {
TraceTime tt("SoftReference", trace_time, false, gclog_or_tty); TraceTime tt("SoftReference", trace_time, false, gclog_or_tty);
process_discovered_reflist(_discoveredSoftRefs, policy, true, process_discovered_reflist(_discoveredSoftRefs, _current_soft_ref_policy, true,
is_alive, keep_alive, complete_gc, task_executor); is_alive, keep_alive, complete_gc, task_executor);
} }
...@@ -1092,15 +1099,28 @@ bool ReferenceProcessor::discover_reference(oop obj, ReferenceType rt) { ...@@ -1092,15 +1099,28 @@ bool ReferenceProcessor::discover_reference(oop obj, ReferenceType rt) {
// reachable. // reachable.
if (is_alive_non_header() != NULL) { if (is_alive_non_header() != NULL) {
oop referent = java_lang_ref_Reference::referent(obj); oop referent = java_lang_ref_Reference::referent(obj);
// We'd like to assert the following: // In the case of non-concurrent discovery, the last
// assert(referent != NULL, "Refs with null referents already filtered"); // disjunct below should hold. It may not hold in the
// However, since this code may be executed concurrently with // case of concurrent discovery because mutators may
// mutators, which can clear() the referent, it is not // concurrently clear() a Reference.
// guaranteed that the referent is non-NULL. assert(UseConcMarkSweepGC || UseG1GC || referent != NULL,
"Refs with null referents already filtered");
if (is_alive_non_header()->do_object_b(referent)) { if (is_alive_non_header()->do_object_b(referent)) {
return false; // referent is reachable return false; // referent is reachable
} }
} }
if (rt == REF_SOFT) {
// For soft refs we can decide now if these are not
// current candidates for clearing, in which case we
// can mark through them now, rather than delaying that
// to the reference-processing phase. Since all current
// time-stamp policies advance the soft-ref clock only
// at a major collection cycle, this is always currently
// accurate.
if (!_current_soft_ref_policy->should_clear_reference(obj)) {
return false;
}
}
HeapWord* const discovered_addr = java_lang_ref_Reference::discovered_addr(obj); HeapWord* const discovered_addr = java_lang_ref_Reference::discovered_addr(obj);
const oop discovered = java_lang_ref_Reference::discovered(obj); const oop discovered = java_lang_ref_Reference::discovered(obj);
......
...@@ -23,7 +23,7 @@ ...@@ -23,7 +23,7 @@
*/ */
// ReferenceProcessor class encapsulates the per-"collector" processing // ReferenceProcessor class encapsulates the per-"collector" processing
// of "weak" references for GC. The interface is useful for supporting // of java.lang.Reference objects for GC. The interface is useful for supporting
// a generational abstraction, in particular when there are multiple // a generational abstraction, in particular when there are multiple
// generations that are being independently collected -- possibly // generations that are being independently collected -- possibly
// concurrently and/or incrementally. Note, however, that the // concurrently and/or incrementally. Note, however, that the
...@@ -75,6 +75,14 @@ class ReferenceProcessor : public CHeapObj { ...@@ -75,6 +75,14 @@ class ReferenceProcessor : public CHeapObj {
// all collectors but the CMS collector). // all collectors but the CMS collector).
BoolObjectClosure* _is_alive_non_header; BoolObjectClosure* _is_alive_non_header;
// Soft ref clearing policies
// . the default policy
static ReferencePolicy* _default_soft_ref_policy;
// . the "clear all" policy
static ReferencePolicy* _always_clear_soft_ref_policy;
// . the current policy below is either one of the above
ReferencePolicy* _current_soft_ref_policy;
// The discovered ref lists themselves // The discovered ref lists themselves
// The MT'ness degree of the queues below // The MT'ness degree of the queues below
...@@ -90,6 +98,12 @@ class ReferenceProcessor : public CHeapObj { ...@@ -90,6 +98,12 @@ class ReferenceProcessor : public CHeapObj {
DiscoveredList* discovered_soft_refs() { return _discoveredSoftRefs; } DiscoveredList* discovered_soft_refs() { return _discoveredSoftRefs; }
static oop sentinel_ref() { return _sentinelRef; } static oop sentinel_ref() { return _sentinelRef; }
static oop* adr_sentinel_ref() { return &_sentinelRef; } static oop* adr_sentinel_ref() { return &_sentinelRef; }
ReferencePolicy* snap_policy(bool always_clear) {
_current_soft_ref_policy = always_clear ?
_always_clear_soft_ref_policy : _default_soft_ref_policy;
_current_soft_ref_policy->snap(); // snapshot the policy threshold
return _current_soft_ref_policy;
}
public: public:
// Process references with a certain reachability level. // Process references with a certain reachability level.
...@@ -297,8 +311,7 @@ class ReferenceProcessor : public CHeapObj { ...@@ -297,8 +311,7 @@ class ReferenceProcessor : public CHeapObj {
bool discover_reference(oop obj, ReferenceType rt); bool discover_reference(oop obj, ReferenceType rt);
// Process references found during GC (called by the garbage collector) // Process references found during GC (called by the garbage collector)
void process_discovered_references(ReferencePolicy* policy, void process_discovered_references(BoolObjectClosure* is_alive,
BoolObjectClosure* is_alive,
OopClosure* keep_alive, OopClosure* keep_alive,
VoidClosure* complete_gc, VoidClosure* complete_gc,
AbstractRefProcTaskExecutor* task_executor); AbstractRefProcTaskExecutor* task_executor);
......
...@@ -96,7 +96,7 @@ bool Universe::_bootstrapping = false; ...@@ -96,7 +96,7 @@ bool Universe::_bootstrapping = false;
bool Universe::_fully_initialized = false; bool Universe::_fully_initialized = false;
size_t Universe::_heap_capacity_at_last_gc; size_t Universe::_heap_capacity_at_last_gc;
size_t Universe::_heap_used_at_last_gc; size_t Universe::_heap_used_at_last_gc = 0;
CollectedHeap* Universe::_collectedHeap = NULL; CollectedHeap* Universe::_collectedHeap = NULL;
address Universe::_heap_base = NULL; address Universe::_heap_base = NULL;
......
...@@ -65,8 +65,10 @@ ...@@ -65,8 +65,10 @@
// COMPILER2 variant // COMPILER2 variant
#ifdef COMPILER2 #ifdef COMPILER2
#define COMPILER2_PRESENT(code) code #define COMPILER2_PRESENT(code) code
#define NOT_COMPILER2(code)
#else // COMPILER2 #else // COMPILER2
#define COMPILER2_PRESENT(code) #define COMPILER2_PRESENT(code)
#define NOT_COMPILER2(code) code
#endif // COMPILER2 #endif // COMPILER2
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册