提交 afd71569 编写于 作者: J jmasa

6895236: CMS: cmsOopClosures.inline.hpp:43 assert(..., "Should remember klasses in this context")

Summary: Adjust assertion checking for ExplicitGCInvokesConcurrentAndUnloadsClasses as a reason for class unloading
Reviewed-by: ysr
上级 564656ec
......@@ -3655,9 +3655,7 @@ bool CMSCollector::markFromRootsWork(bool asynch) {
verify_work_stacks_empty();
verify_overflow_empty();
assert(_revisitStack.isEmpty(), "tabula rasa");
DEBUG_ONLY(RememberKlassesChecker cmx(CMSClassUnloadingEnabled);)
DEBUG_ONLY(RememberKlassesChecker cmx(should_unload_classes());)
bool result = false;
if (CMSConcurrentMTEnabled && ParallelCMSThreads > 0) {
result = do_marking_mt(asynch);
......@@ -4124,7 +4122,6 @@ void CMSConcMarkingTask::do_work_steal(int i) {
void CMSConcMarkingTask::coordinator_yield() {
assert(ConcurrentMarkSweepThread::cms_thread_has_cms_token(),
"CMS thread should hold CMS token");
DEBUG_ONLY(RememberKlassesChecker mux(false);)
// First give up the locks, then yield, then re-lock
// We should probably use a constructor/destructor idiom to
......@@ -4201,9 +4198,7 @@ bool CMSCollector::do_marking_mt(bool asynch) {
// Mutate the Refs discovery so it is MT during the
// multi-threaded marking phase.
ReferenceProcessorMTMutator mt(ref_processor(), num_workers > 1);
DEBUG_ONLY(RememberKlassesChecker cmx(CMSClassUnloadingEnabled);)
DEBUG_ONLY(RememberKlassesChecker cmx(should_unload_classes());)
conc_workers()->start_task(&tsk);
while (tsk.yielded()) {
tsk.coordinator_yield();
......@@ -4472,7 +4467,7 @@ size_t CMSCollector::preclean_work(bool clean_refs, bool clean_survivor) {
// for cleaner interfaces.
rp->preclean_discovered_references(
rp->is_alive_non_header(), &keep_alive, &complete_trace,
&yield_cl);
&yield_cl, should_unload_classes());
}
if (clean_survivor) { // preclean the active survivor space(s)
......@@ -4494,7 +4489,7 @@ size_t CMSCollector::preclean_work(bool clean_refs, bool clean_survivor) {
SurvivorSpacePrecleanClosure
sss_cl(this, _span, &_markBitMap, &_markStack,
&pam_cl, before_count, CMSYield);
DEBUG_ONLY(RememberKlassesChecker mx(CMSClassUnloadingEnabled);)
DEBUG_ONLY(RememberKlassesChecker mx(should_unload_classes());)
dng->from()->object_iterate_careful(&sss_cl);
dng->to()->object_iterate_careful(&sss_cl);
}
......@@ -4665,7 +4660,7 @@ size_t CMSCollector::preclean_mod_union_table(
verify_work_stacks_empty();
verify_overflow_empty();
sample_eden();
DEBUG_ONLY(RememberKlassesChecker mx(CMSClassUnloadingEnabled);)
DEBUG_ONLY(RememberKlassesChecker mx(should_unload_classes());)
stop_point =
gen->cmsSpace()->object_iterate_careful_m(dirtyRegion, cl);
}
......@@ -4753,7 +4748,7 @@ size_t CMSCollector::preclean_card_table(ConcurrentMarkSweepGeneration* gen,
sample_eden();
verify_work_stacks_empty();
verify_overflow_empty();
DEBUG_ONLY(RememberKlassesChecker mx(CMSClassUnloadingEnabled);)
DEBUG_ONLY(RememberKlassesChecker mx(should_unload_classes());)
HeapWord* stop_point =
gen->cmsSpace()->object_iterate_careful_m(dirtyRegion, cl);
if (stop_point != NULL) {
......@@ -4853,7 +4848,7 @@ void CMSCollector::checkpointRootsFinalWork(bool asynch,
assert(haveFreelistLocks(), "must have free list locks");
assert_lock_strong(bitMapLock());
DEBUG_ONLY(RememberKlassesChecker fmx(CMSClassUnloadingEnabled);)
DEBUG_ONLY(RememberKlassesChecker fmx(should_unload_classes());)
if (!init_mark_was_synchronous) {
// We might assume that we need not fill TLAB's when
// CMSScavengeBeforeRemark is set, because we may have just done
......
......@@ -296,23 +296,32 @@ public:
// RememberKlassesChecker can be passed "false" to turn off checking.
// It is used by CMS when CMS yields to a different collector.
class RememberKlassesChecker: StackObj {
bool _state;
bool _skip;
bool _saved_state;
bool _do_check;
public:
RememberKlassesChecker(bool checking_on) : _state(false), _skip(false) {
_skip = !(ClassUnloading && !UseConcMarkSweepGC ||
CMSClassUnloadingEnabled && UseConcMarkSweepGC);
if (_skip) {
return;
RememberKlassesChecker(bool checking_on) : _saved_state(false),
_do_check(true) {
// The ClassUnloading unloading flag affects the collectors except
// for CMS.
// CMS unloads classes if CMSClassUnloadingEnabled is true or
// if ExplicitGCInvokesConcurrentAndUnloadsClasses is true and
// the current collection is an explicit collection. Turning
// on the checking in general for
// ExplicitGCInvokesConcurrentAndUnloadsClasses and
// UseConcMarkSweepGC should not lead to false positives.
_do_check =
ClassUnloading && !UseConcMarkSweepGC ||
CMSClassUnloadingEnabled && UseConcMarkSweepGC ||
ExplicitGCInvokesConcurrentAndUnloadsClasses && UseConcMarkSweepGC;
if (_do_check) {
_saved_state = OopClosure::must_remember_klasses();
OopClosure::set_must_remember_klasses(checking_on);
}
_state = OopClosure::must_remember_klasses();
OopClosure::set_must_remember_klasses(checking_on);
}
~RememberKlassesChecker() {
if (_skip) {
return;
if (_do_check) {
OopClosure::set_must_remember_klasses(_saved_state);
}
OopClosure::set_must_remember_klasses(_state);
}
};
#endif // ASSERT
......@@ -1227,13 +1227,16 @@ void ReferenceProcessor::preclean_discovered_references(
BoolObjectClosure* is_alive,
OopClosure* keep_alive,
VoidClosure* complete_gc,
YieldClosure* yield) {
YieldClosure* yield,
bool should_unload_classes) {
NOT_PRODUCT(verify_ok_to_handle_reflists());
#ifdef ASSERT
bool must_remember_klasses = ClassUnloading && !UseConcMarkSweepGC ||
CMSClassUnloadingEnabled && UseConcMarkSweepGC;
CMSClassUnloadingEnabled && UseConcMarkSweepGC ||
ExplicitGCInvokesConcurrentAndUnloadsClasses &&
UseConcMarkSweepGC && should_unload_classes;
RememberKlassesChecker mx(must_remember_klasses);
#endif
// Soft references
......
......@@ -170,11 +170,13 @@ class ReferenceProcessor : public CHeapObj {
// The caller is responsible for taking care of potential
// interference with concurrent operations on these lists
// (or predicates involved) by other threads. Currently
// only used by the CMS collector.
// only used by the CMS collector. should_unload_classes is
// used to aid assertion checking when classes are collected.
void preclean_discovered_references(BoolObjectClosure* is_alive,
OopClosure* keep_alive,
VoidClosure* complete_gc,
YieldClosure* yield);
YieldClosure* yield,
bool should_unload_classes);
// Delete entries in the discovered lists that have
// either a null referent or are not active. Such
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册