提交 8320b4d3 编写于 作者: Y ysr

Merge

...@@ -3655,9 +3655,7 @@ bool CMSCollector::markFromRootsWork(bool asynch) { ...@@ -3655,9 +3655,7 @@ bool CMSCollector::markFromRootsWork(bool asynch) {
verify_work_stacks_empty(); verify_work_stacks_empty();
verify_overflow_empty(); verify_overflow_empty();
assert(_revisitStack.isEmpty(), "tabula rasa"); assert(_revisitStack.isEmpty(), "tabula rasa");
DEBUG_ONLY(RememberKlassesChecker cmx(should_unload_classes());)
DEBUG_ONLY(RememberKlassesChecker cmx(CMSClassUnloadingEnabled);)
bool result = false; bool result = false;
if (CMSConcurrentMTEnabled && ParallelCMSThreads > 0) { if (CMSConcurrentMTEnabled && ParallelCMSThreads > 0) {
result = do_marking_mt(asynch); result = do_marking_mt(asynch);
...@@ -4124,7 +4122,6 @@ void CMSConcMarkingTask::do_work_steal(int i) { ...@@ -4124,7 +4122,6 @@ void CMSConcMarkingTask::do_work_steal(int i) {
void CMSConcMarkingTask::coordinator_yield() { void CMSConcMarkingTask::coordinator_yield() {
assert(ConcurrentMarkSweepThread::cms_thread_has_cms_token(), assert(ConcurrentMarkSweepThread::cms_thread_has_cms_token(),
"CMS thread should hold CMS token"); "CMS thread should hold CMS token");
DEBUG_ONLY(RememberKlassesChecker mux(false);) DEBUG_ONLY(RememberKlassesChecker mux(false);)
// First give up the locks, then yield, then re-lock // First give up the locks, then yield, then re-lock
// We should probably use a constructor/destructor idiom to // We should probably use a constructor/destructor idiom to
...@@ -4201,9 +4198,7 @@ bool CMSCollector::do_marking_mt(bool asynch) { ...@@ -4201,9 +4198,7 @@ bool CMSCollector::do_marking_mt(bool asynch) {
// Mutate the Refs discovery so it is MT during the // Mutate the Refs discovery so it is MT during the
// multi-threaded marking phase. // multi-threaded marking phase.
ReferenceProcessorMTMutator mt(ref_processor(), num_workers > 1); ReferenceProcessorMTMutator mt(ref_processor(), num_workers > 1);
DEBUG_ONLY(RememberKlassesChecker cmx(should_unload_classes());)
DEBUG_ONLY(RememberKlassesChecker cmx(CMSClassUnloadingEnabled);)
conc_workers()->start_task(&tsk); conc_workers()->start_task(&tsk);
while (tsk.yielded()) { while (tsk.yielded()) {
tsk.coordinator_yield(); tsk.coordinator_yield();
...@@ -4472,7 +4467,7 @@ size_t CMSCollector::preclean_work(bool clean_refs, bool clean_survivor) { ...@@ -4472,7 +4467,7 @@ size_t CMSCollector::preclean_work(bool clean_refs, bool clean_survivor) {
// for cleaner interfaces. // for cleaner interfaces.
rp->preclean_discovered_references( rp->preclean_discovered_references(
rp->is_alive_non_header(), &keep_alive, &complete_trace, rp->is_alive_non_header(), &keep_alive, &complete_trace,
&yield_cl); &yield_cl, should_unload_classes());
} }
if (clean_survivor) { // preclean the active survivor space(s) if (clean_survivor) { // preclean the active survivor space(s)
...@@ -4494,7 +4489,7 @@ size_t CMSCollector::preclean_work(bool clean_refs, bool clean_survivor) { ...@@ -4494,7 +4489,7 @@ size_t CMSCollector::preclean_work(bool clean_refs, bool clean_survivor) {
SurvivorSpacePrecleanClosure SurvivorSpacePrecleanClosure
sss_cl(this, _span, &_markBitMap, &_markStack, sss_cl(this, _span, &_markBitMap, &_markStack,
&pam_cl, before_count, CMSYield); &pam_cl, before_count, CMSYield);
DEBUG_ONLY(RememberKlassesChecker mx(CMSClassUnloadingEnabled);) DEBUG_ONLY(RememberKlassesChecker mx(should_unload_classes());)
dng->from()->object_iterate_careful(&sss_cl); dng->from()->object_iterate_careful(&sss_cl);
dng->to()->object_iterate_careful(&sss_cl); dng->to()->object_iterate_careful(&sss_cl);
} }
...@@ -4665,7 +4660,7 @@ size_t CMSCollector::preclean_mod_union_table( ...@@ -4665,7 +4660,7 @@ size_t CMSCollector::preclean_mod_union_table(
verify_work_stacks_empty(); verify_work_stacks_empty();
verify_overflow_empty(); verify_overflow_empty();
sample_eden(); sample_eden();
DEBUG_ONLY(RememberKlassesChecker mx(CMSClassUnloadingEnabled);) DEBUG_ONLY(RememberKlassesChecker mx(should_unload_classes());)
stop_point = stop_point =
gen->cmsSpace()->object_iterate_careful_m(dirtyRegion, cl); gen->cmsSpace()->object_iterate_careful_m(dirtyRegion, cl);
} }
...@@ -4753,7 +4748,7 @@ size_t CMSCollector::preclean_card_table(ConcurrentMarkSweepGeneration* gen, ...@@ -4753,7 +4748,7 @@ size_t CMSCollector::preclean_card_table(ConcurrentMarkSweepGeneration* gen,
sample_eden(); sample_eden();
verify_work_stacks_empty(); verify_work_stacks_empty();
verify_overflow_empty(); verify_overflow_empty();
DEBUG_ONLY(RememberKlassesChecker mx(CMSClassUnloadingEnabled);) DEBUG_ONLY(RememberKlassesChecker mx(should_unload_classes());)
HeapWord* stop_point = HeapWord* stop_point =
gen->cmsSpace()->object_iterate_careful_m(dirtyRegion, cl); gen->cmsSpace()->object_iterate_careful_m(dirtyRegion, cl);
if (stop_point != NULL) { if (stop_point != NULL) {
...@@ -4853,7 +4848,7 @@ void CMSCollector::checkpointRootsFinalWork(bool asynch, ...@@ -4853,7 +4848,7 @@ void CMSCollector::checkpointRootsFinalWork(bool asynch,
assert(haveFreelistLocks(), "must have free list locks"); assert(haveFreelistLocks(), "must have free list locks");
assert_lock_strong(bitMapLock()); assert_lock_strong(bitMapLock());
DEBUG_ONLY(RememberKlassesChecker fmx(CMSClassUnloadingEnabled);) DEBUG_ONLY(RememberKlassesChecker fmx(should_unload_classes());)
if (!init_mark_was_synchronous) { if (!init_mark_was_synchronous) {
// We might assume that we need not fill TLAB's when // We might assume that we need not fill TLAB's when
// CMSScavengeBeforeRemark is set, because we may have just done // CMSScavengeBeforeRemark is set, because we may have just done
......
...@@ -296,23 +296,32 @@ public: ...@@ -296,23 +296,32 @@ public:
// RememberKlassesChecker can be passed "false" to turn off checking. // RememberKlassesChecker can be passed "false" to turn off checking.
// It is used by CMS when CMS yields to a different collector. // It is used by CMS when CMS yields to a different collector.
class RememberKlassesChecker: StackObj { class RememberKlassesChecker: StackObj {
bool _state; bool _saved_state;
bool _skip; bool _do_check;
public: public:
RememberKlassesChecker(bool checking_on) : _state(false), _skip(false) { RememberKlassesChecker(bool checking_on) : _saved_state(false),
_skip = !(ClassUnloading && !UseConcMarkSweepGC || _do_check(true) {
CMSClassUnloadingEnabled && UseConcMarkSweepGC); // The ClassUnloading unloading flag affects the collectors except
if (_skip) { // for CMS.
return; // CMS unloads classes if CMSClassUnloadingEnabled is true or
// if ExplicitGCInvokesConcurrentAndUnloadsClasses is true and
// the current collection is an explicit collection. Turning
// on the checking in general for
// ExplicitGCInvokesConcurrentAndUnloadsClasses and
// UseConcMarkSweepGC should not lead to false positives.
_do_check =
ClassUnloading && !UseConcMarkSweepGC ||
CMSClassUnloadingEnabled && UseConcMarkSweepGC ||
ExplicitGCInvokesConcurrentAndUnloadsClasses && UseConcMarkSweepGC;
if (_do_check) {
_saved_state = OopClosure::must_remember_klasses();
OopClosure::set_must_remember_klasses(checking_on);
} }
_state = OopClosure::must_remember_klasses();
OopClosure::set_must_remember_klasses(checking_on);
} }
~RememberKlassesChecker() { ~RememberKlassesChecker() {
if (_skip) { if (_do_check) {
return; OopClosure::set_must_remember_klasses(_saved_state);
} }
OopClosure::set_must_remember_klasses(_state);
} }
}; };
#endif // ASSERT #endif // ASSERT
...@@ -1227,13 +1227,16 @@ void ReferenceProcessor::preclean_discovered_references( ...@@ -1227,13 +1227,16 @@ void ReferenceProcessor::preclean_discovered_references(
BoolObjectClosure* is_alive, BoolObjectClosure* is_alive,
OopClosure* keep_alive, OopClosure* keep_alive,
VoidClosure* complete_gc, VoidClosure* complete_gc,
YieldClosure* yield) { YieldClosure* yield,
bool should_unload_classes) {
NOT_PRODUCT(verify_ok_to_handle_reflists()); NOT_PRODUCT(verify_ok_to_handle_reflists());
#ifdef ASSERT #ifdef ASSERT
bool must_remember_klasses = ClassUnloading && !UseConcMarkSweepGC || bool must_remember_klasses = ClassUnloading && !UseConcMarkSweepGC ||
CMSClassUnloadingEnabled && UseConcMarkSweepGC; CMSClassUnloadingEnabled && UseConcMarkSweepGC ||
ExplicitGCInvokesConcurrentAndUnloadsClasses &&
UseConcMarkSweepGC && should_unload_classes;
RememberKlassesChecker mx(must_remember_klasses); RememberKlassesChecker mx(must_remember_klasses);
#endif #endif
// Soft references // Soft references
......
...@@ -170,11 +170,13 @@ class ReferenceProcessor : public CHeapObj { ...@@ -170,11 +170,13 @@ class ReferenceProcessor : public CHeapObj {
// The caller is responsible for taking care of potential // The caller is responsible for taking care of potential
// interference with concurrent operations on these lists // interference with concurrent operations on these lists
// (or predicates involved) by other threads. Currently // (or predicates involved) by other threads. Currently
// only used by the CMS collector. // only used by the CMS collector. should_unload_classes is
// used to aid assertion checking when classes are collected.
void preclean_discovered_references(BoolObjectClosure* is_alive, void preclean_discovered_references(BoolObjectClosure* is_alive,
OopClosure* keep_alive, OopClosure* keep_alive,
VoidClosure* complete_gc, VoidClosure* complete_gc,
YieldClosure* yield); YieldClosure* yield,
bool should_unload_classes);
// Delete entries in the discovered lists that have // Delete entries in the discovered lists that have
// either a null referent or are not active. Such // either a null referent or are not active. Such
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册