提交 d5318109 编写于 作者: Y ysr

4965777: GC changes to support use of discovered field for pending references

Summary: If and when the reference handler thread is able to use the discovered field to link reference objects in its pending list, so will GC. In that case, GC will scan through this field once a reference object has been placed on the pending list, but not scan that field before that stage, as the field is used by the concurrent GC thread to link discovered objects. When ReferenceHandleR thread does not use the discovered field for the purpose of linking the elements in the pending list, as would be the case in older JDKs, the JVM will fall back to the old behaviour of using the next field for that purpose.
Reviewed-by: jcoomes, mchung, stefank
上级 8967afd6
...@@ -36,6 +36,7 @@ ...@@ -36,6 +36,7 @@
ReferencePolicy* ReferenceProcessor::_always_clear_soft_ref_policy = NULL; ReferencePolicy* ReferenceProcessor::_always_clear_soft_ref_policy = NULL;
ReferencePolicy* ReferenceProcessor::_default_soft_ref_policy = NULL; ReferencePolicy* ReferenceProcessor::_default_soft_ref_policy = NULL;
const int subclasses_of_ref = REF_PHANTOM - REF_OTHER; const int subclasses_of_ref = REF_PHANTOM - REF_OTHER;
bool ReferenceProcessor::_pending_list_uses_discovered_field = false;
// List of discovered references. // List of discovered references.
class DiscoveredList { class DiscoveredList {
...@@ -87,6 +88,7 @@ void ReferenceProcessor::init_statics() { ...@@ -87,6 +88,7 @@ void ReferenceProcessor::init_statics() {
guarantee(RefDiscoveryPolicy == ReferenceBasedDiscovery || guarantee(RefDiscoveryPolicy == ReferenceBasedDiscovery ||
RefDiscoveryPolicy == ReferentBasedDiscovery, RefDiscoveryPolicy == ReferentBasedDiscovery,
"Unrecongnized RefDiscoveryPolicy"); "Unrecongnized RefDiscoveryPolicy");
_pending_list_uses_discovered_field = JDK_Version::current().pending_list_uses_discovered_field();
} }
ReferenceProcessor::ReferenceProcessor(MemRegion span, ReferenceProcessor::ReferenceProcessor(MemRegion span,
...@@ -122,7 +124,7 @@ ReferenceProcessor::ReferenceProcessor(MemRegion span, ...@@ -122,7 +124,7 @@ ReferenceProcessor::ReferenceProcessor(MemRegion span,
_discoveredSoftRefs[i].set_head(NULL); _discoveredSoftRefs[i].set_head(NULL);
_discoveredSoftRefs[i].set_length(0); _discoveredSoftRefs[i].set_length(0);
} }
// If we do barreirs, cache a copy of the barrier set. // If we do barriers, cache a copy of the barrier set.
if (discovered_list_needs_barrier) { if (discovered_list_needs_barrier) {
_bs = Universe::heap()->barrier_set(); _bs = Universe::heap()->barrier_set();
} }
...@@ -307,46 +309,77 @@ bool ReferenceProcessor::enqueue_discovered_references(AbstractRefProcTaskExecut ...@@ -307,46 +309,77 @@ bool ReferenceProcessor::enqueue_discovered_references(AbstractRefProcTaskExecut
void ReferenceProcessor::enqueue_discovered_reflist(DiscoveredList& refs_list, void ReferenceProcessor::enqueue_discovered_reflist(DiscoveredList& refs_list,
HeapWord* pending_list_addr) { HeapWord* pending_list_addr) {
// Given a list of refs linked through the "discovered" field // Given a list of refs linked through the "discovered" field
// (java.lang.ref.Reference.discovered) chain them through the // (java.lang.ref.Reference.discovered), self-loop their "next" field
// "next" field (java.lang.ref.Reference.next) and prepend // thus distinguishing them from active References, then
// to the pending list. // prepend them to the pending list.
// BKWRD COMPATIBILITY NOTE: For older JDKs (prior to the fix for 4956777),
// the "next" field is used to chain the pending list, not the discovered
// field.
if (TraceReferenceGC && PrintGCDetails) { if (TraceReferenceGC && PrintGCDetails) {
gclog_or_tty->print_cr("ReferenceProcessor::enqueue_discovered_reflist list " gclog_or_tty->print_cr("ReferenceProcessor::enqueue_discovered_reflist list "
INTPTR_FORMAT, (address)refs_list.head()); INTPTR_FORMAT, (address)refs_list.head());
} }
oop obj = NULL; oop obj = NULL;
oop next = refs_list.head(); oop next_d = refs_list.head();
// Walk down the list, copying the discovered field into if (pending_list_uses_discovered_field()) { // New behaviour
// the next field and clearing it. // Walk down the list, self-looping the next field
while (obj != next) { // so that the References are not considered active.
obj = next; while (obj != next_d) {
assert(obj->is_instanceRef(), "should be reference object"); obj = next_d;
next = java_lang_ref_Reference::discovered(obj); assert(obj->is_instanceRef(), "should be reference object");
if (TraceReferenceGC && PrintGCDetails) { next_d = java_lang_ref_Reference::discovered(obj);
gclog_or_tty->print_cr(" obj " INTPTR_FORMAT "/next " INTPTR_FORMAT, if (TraceReferenceGC && PrintGCDetails) {
obj, next); gclog_or_tty->print_cr(" obj " INTPTR_FORMAT "/next_d " INTPTR_FORMAT,
obj, next_d);
}
assert(java_lang_ref_Reference::next(obj) == NULL,
"Reference not active; should not be discovered");
// Self-loop next, so as to make Ref not active.
java_lang_ref_Reference::set_next(obj, obj);
if (next_d == obj) { // obj is last
// Swap refs_list into pendling_list_addr and
// set obj's discovered to what we read from pending_list_addr.
oop old = oopDesc::atomic_exchange_oop(refs_list.head(), pending_list_addr);
// Need oop_check on pending_list_addr above;
// see special oop-check code at the end of
// enqueue_discovered_reflists() further below.
java_lang_ref_Reference::set_discovered(obj, old); // old may be NULL
}
} }
assert(java_lang_ref_Reference::next(obj) == NULL, } else { // Old behaviour
"The reference should not be enqueued"); // Walk down the list, copying the discovered field into
if (next == obj) { // obj is last // the next field and clearing the discovered field.
// Swap refs_list into pendling_list_addr and while (obj != next_d) {
// set obj's next to what we read from pending_list_addr. obj = next_d;
oop old = oopDesc::atomic_exchange_oop(refs_list.head(), pending_list_addr); assert(obj->is_instanceRef(), "should be reference object");
// Need oop_check on pending_list_addr above; next_d = java_lang_ref_Reference::discovered(obj);
// see special oop-check code at the end of if (TraceReferenceGC && PrintGCDetails) {
// enqueue_discovered_reflists() further below. gclog_or_tty->print_cr(" obj " INTPTR_FORMAT "/next_d " INTPTR_FORMAT,
if (old == NULL) { obj, next_d);
// obj should be made to point to itself, since }
// pending list was empty. assert(java_lang_ref_Reference::next(obj) == NULL,
java_lang_ref_Reference::set_next(obj, obj); "The reference should not be enqueued");
if (next_d == obj) { // obj is last
// Swap refs_list into pendling_list_addr and
// set obj's next to what we read from pending_list_addr.
oop old = oopDesc::atomic_exchange_oop(refs_list.head(), pending_list_addr);
// Need oop_check on pending_list_addr above;
// see special oop-check code at the end of
// enqueue_discovered_reflists() further below.
if (old == NULL) {
// obj should be made to point to itself, since
// pending list was empty.
java_lang_ref_Reference::set_next(obj, obj);
} else {
java_lang_ref_Reference::set_next(obj, old);
}
} else { } else {
java_lang_ref_Reference::set_next(obj, old); java_lang_ref_Reference::set_next(obj, next_d);
} }
} else { java_lang_ref_Reference::set_discovered(obj, (oop) NULL);
java_lang_ref_Reference::set_next(obj, next);
} }
java_lang_ref_Reference::set_discovered(obj, (oop) NULL);
} }
} }
...@@ -615,7 +648,7 @@ ReferenceProcessor::process_phase1(DiscoveredList& refs_list, ...@@ -615,7 +648,7 @@ ReferenceProcessor::process_phase1(DiscoveredList& refs_list,
NOT_PRODUCT( NOT_PRODUCT(
if (PrintGCDetails && TraceReferenceGC) { if (PrintGCDetails && TraceReferenceGC) {
gclog_or_tty->print_cr(" Dropped %d dead Refs out of %d " gclog_or_tty->print_cr(" Dropped %d dead Refs out of %d "
"discovered Refs by policy list " INTPTR_FORMAT, "discovered Refs by policy, from list " INTPTR_FORMAT,
iter.removed(), iter.processed(), (address)refs_list.head()); iter.removed(), iter.processed(), (address)refs_list.head());
} }
) )
...@@ -1115,20 +1148,16 @@ ReferenceProcessor::add_to_discovered_list_mt(DiscoveredList& refs_list, ...@@ -1115,20 +1148,16 @@ ReferenceProcessor::add_to_discovered_list_mt(DiscoveredList& refs_list,
// here is when *discovered_addr is NULL (see the CAS further below), // here is when *discovered_addr is NULL (see the CAS further below),
// so this will expand to nothing. As a result, we have manually // so this will expand to nothing. As a result, we have manually
// elided this out for G1, but left in the test for some future // elided this out for G1, but left in the test for some future
// collector that might have need for a pre-barrier here. // collector that might have need for a pre-barrier here, e.g.:-
if (_discovered_list_needs_barrier && !UseG1GC) { // _bs->write_ref_field_pre((oop* or narrowOop*)discovered_addr, next_discovered);
if (UseCompressedOops) { assert(!_discovered_list_needs_barrier || UseG1GC,
_bs->write_ref_field_pre((narrowOop*)discovered_addr, next_discovered); "Need to check non-G1 collector: "
} else { "may need a pre-write-barrier for CAS from NULL below");
_bs->write_ref_field_pre((oop*)discovered_addr, next_discovered);
}
guarantee(false, "Need to check non-G1 collector");
}
oop retest = oopDesc::atomic_compare_exchange_oop(next_discovered, discovered_addr, oop retest = oopDesc::atomic_compare_exchange_oop(next_discovered, discovered_addr,
NULL); NULL);
if (retest == NULL) { if (retest == NULL) {
// This thread just won the right to enqueue the object. // This thread just won the right to enqueue the object.
// We have separate lists for enqueueing so no synchronization // We have separate lists for enqueueing, so no synchronization
// is necessary. // is necessary.
refs_list.set_head(obj); refs_list.set_head(obj);
refs_list.inc_length(1); refs_list.inc_length(1);
...@@ -1137,14 +1166,14 @@ ReferenceProcessor::add_to_discovered_list_mt(DiscoveredList& refs_list, ...@@ -1137,14 +1166,14 @@ ReferenceProcessor::add_to_discovered_list_mt(DiscoveredList& refs_list,
} }
if (TraceReferenceGC) { if (TraceReferenceGC) {
gclog_or_tty->print_cr("Enqueued reference (mt) (" INTPTR_FORMAT ": %s)", gclog_or_tty->print_cr("Discovered reference (mt) (" INTPTR_FORMAT ": %s)",
obj, obj->blueprint()->internal_name()); obj, obj->blueprint()->internal_name());
} }
} else { } else {
// If retest was non NULL, another thread beat us to it: // If retest was non NULL, another thread beat us to it:
// The reference has already been discovered... // The reference has already been discovered...
if (TraceReferenceGC) { if (TraceReferenceGC) {
gclog_or_tty->print_cr("Already enqueued reference (" INTPTR_FORMAT ": %s)", gclog_or_tty->print_cr("Already discovered reference (" INTPTR_FORMAT ": %s)",
obj, obj->blueprint()->internal_name()); obj, obj->blueprint()->internal_name());
} }
} }
...@@ -1169,7 +1198,7 @@ void ReferenceProcessor::verify_referent(oop obj) { ...@@ -1169,7 +1198,7 @@ void ReferenceProcessor::verify_referent(oop obj) {
// (or part of the heap being collected, indicated by our "span" // (or part of the heap being collected, indicated by our "span"
// we don't treat it specially (i.e. we scan it as we would // we don't treat it specially (i.e. we scan it as we would
// a normal oop, treating its references as strong references). // a normal oop, treating its references as strong references).
// This means that references can't be enqueued unless their // This means that references can't be discovered unless their
// referent is also in the same span. This is the simplest, // referent is also in the same span. This is the simplest,
// most "local" and most conservative approach, albeit one // most "local" and most conservative approach, albeit one
// that may cause weak references to be enqueued least promptly. // that may cause weak references to be enqueued least promptly.
...@@ -1191,14 +1220,13 @@ void ReferenceProcessor::verify_referent(oop obj) { ...@@ -1191,14 +1220,13 @@ void ReferenceProcessor::verify_referent(oop obj) {
// and complexity in processing these references. // and complexity in processing these references.
// We call this choice the "RefeferentBasedDiscovery" policy. // We call this choice the "RefeferentBasedDiscovery" policy.
bool ReferenceProcessor::discover_reference(oop obj, ReferenceType rt) { bool ReferenceProcessor::discover_reference(oop obj, ReferenceType rt) {
// We enqueue references only if we are discovering refs // Make sure we are discovering refs (rather than processing discovered refs).
// (rather than processing discovered refs).
if (!_discovering_refs || !RegisterReferences) { if (!_discovering_refs || !RegisterReferences) {
return false; return false;
} }
// We only enqueue active references. // We only discover active references.
oop next = java_lang_ref_Reference::next(obj); oop next = java_lang_ref_Reference::next(obj);
if (next != NULL) { if (next != NULL) { // Ref is no longer active
return false; return false;
} }
...@@ -1211,8 +1239,8 @@ bool ReferenceProcessor::discover_reference(oop obj, ReferenceType rt) { ...@@ -1211,8 +1239,8 @@ bool ReferenceProcessor::discover_reference(oop obj, ReferenceType rt) {
return false; return false;
} }
// We only enqueue references whose referents are not (yet) strongly // We only discover references whose referents are not (yet)
// reachable. // known to be strongly reachable.
if (is_alive_non_header() != NULL) { if (is_alive_non_header() != NULL) {
verify_referent(obj); verify_referent(obj);
if (is_alive_non_header()->do_object_b(java_lang_ref_Reference::referent(obj))) { if (is_alive_non_header()->do_object_b(java_lang_ref_Reference::referent(obj))) {
...@@ -1238,7 +1266,7 @@ bool ReferenceProcessor::discover_reference(oop obj, ReferenceType rt) { ...@@ -1238,7 +1266,7 @@ bool ReferenceProcessor::discover_reference(oop obj, ReferenceType rt) {
if (discovered != NULL) { if (discovered != NULL) {
// The reference has already been discovered... // The reference has already been discovered...
if (TraceReferenceGC) { if (TraceReferenceGC) {
gclog_or_tty->print_cr("Already enqueued reference (" INTPTR_FORMAT ": %s)", gclog_or_tty->print_cr("Already discovered reference (" INTPTR_FORMAT ": %s)",
obj, obj->blueprint()->internal_name()); obj, obj->blueprint()->internal_name());
} }
if (RefDiscoveryPolicy == ReferentBasedDiscovery) { if (RefDiscoveryPolicy == ReferentBasedDiscovery) {
...@@ -1260,9 +1288,9 @@ bool ReferenceProcessor::discover_reference(oop obj, ReferenceType rt) { ...@@ -1260,9 +1288,9 @@ bool ReferenceProcessor::discover_reference(oop obj, ReferenceType rt) {
if (RefDiscoveryPolicy == ReferentBasedDiscovery) { if (RefDiscoveryPolicy == ReferentBasedDiscovery) {
verify_referent(obj); verify_referent(obj);
// enqueue if and only if either: // Discover if and only if EITHER:
// reference is in our span or // .. reference is in our span, OR
// we are an atomic collector and referent is in our span // .. we are an atomic collector and referent is in our span
if (_span.contains(obj_addr) || if (_span.contains(obj_addr) ||
(discovery_is_atomic() && (discovery_is_atomic() &&
_span.contains(java_lang_ref_Reference::referent(obj)))) { _span.contains(java_lang_ref_Reference::referent(obj)))) {
...@@ -1294,15 +1322,10 @@ bool ReferenceProcessor::discover_reference(oop obj, ReferenceType rt) { ...@@ -1294,15 +1322,10 @@ bool ReferenceProcessor::discover_reference(oop obj, ReferenceType rt) {
// As in the case further above, since we are over-writing a NULL // As in the case further above, since we are over-writing a NULL
// pre-value, we can safely elide the pre-barrier here for the case of G1. // pre-value, we can safely elide the pre-barrier here for the case of G1.
// e.g.:- _bs->write_ref_field_pre((oop* or narrowOop*)discovered_addr, next_discovered);
assert(discovered == NULL, "control point invariant"); assert(discovered == NULL, "control point invariant");
if (_discovered_list_needs_barrier && !UseG1GC) { // safe to elide for G1 assert(!_discovered_list_needs_barrier || UseG1GC,
if (UseCompressedOops) { "For non-G1 collector, may need a pre-write-barrier for CAS from NULL below");
_bs->write_ref_field_pre((narrowOop*)discovered_addr, next_discovered);
} else {
_bs->write_ref_field_pre((oop*)discovered_addr, next_discovered);
}
guarantee(false, "Need to check non-G1 collector");
}
oop_store_raw(discovered_addr, next_discovered); oop_store_raw(discovered_addr, next_discovered);
if (_discovered_list_needs_barrier) { if (_discovered_list_needs_barrier) {
_bs->write_ref_field((void*)discovered_addr, next_discovered); _bs->write_ref_field((void*)discovered_addr, next_discovered);
...@@ -1311,11 +1334,11 @@ bool ReferenceProcessor::discover_reference(oop obj, ReferenceType rt) { ...@@ -1311,11 +1334,11 @@ bool ReferenceProcessor::discover_reference(oop obj, ReferenceType rt) {
list->inc_length(1); list->inc_length(1);
if (TraceReferenceGC) { if (TraceReferenceGC) {
gclog_or_tty->print_cr("Enqueued reference (" INTPTR_FORMAT ": %s)", gclog_or_tty->print_cr("Discovered reference (" INTPTR_FORMAT ": %s)",
obj, obj->blueprint()->internal_name()); obj, obj->blueprint()->internal_name());
} }
} }
assert(obj->is_oop(), "Enqueued a bad reference"); assert(obj->is_oop(), "Discovered a bad reference");
verify_referent(obj); verify_referent(obj);
return true; return true;
} }
......
...@@ -52,6 +52,8 @@ class DiscoveredList; ...@@ -52,6 +52,8 @@ class DiscoveredList;
class ReferenceProcessor : public CHeapObj { class ReferenceProcessor : public CHeapObj {
protected: protected:
// Compatibility with pre-4965777 JDK's
static bool _pending_list_uses_discovered_field;
MemRegion _span; // (right-open) interval of heap MemRegion _span; // (right-open) interval of heap
// subject to wkref discovery // subject to wkref discovery
bool _discovering_refs; // true when discovery enabled bool _discovering_refs; // true when discovery enabled
...@@ -111,7 +113,6 @@ class ReferenceProcessor : public CHeapObj { ...@@ -111,7 +113,6 @@ class ReferenceProcessor : public CHeapObj {
return _current_soft_ref_policy; return _current_soft_ref_policy;
} }
public:
// Process references with a certain reachability level. // Process references with a certain reachability level.
void process_discovered_reflist(DiscoveredList refs_lists[], void process_discovered_reflist(DiscoveredList refs_lists[],
ReferencePolicy* policy, ReferencePolicy* policy,
...@@ -297,6 +298,13 @@ class ReferenceProcessor : public CHeapObj { ...@@ -297,6 +298,13 @@ class ReferenceProcessor : public CHeapObj {
bool discovery_is_atomic() const { return _discovery_is_atomic; } bool discovery_is_atomic() const { return _discovery_is_atomic; }
void set_atomic_discovery(bool atomic) { _discovery_is_atomic = atomic; } void set_atomic_discovery(bool atomic) { _discovery_is_atomic = atomic; }
// whether the JDK in which we are embedded is a pre-4965777 JDK,
// and thus whether or not it uses the discovered field to chain
// the entries in the pending list.
static bool pending_list_uses_discovered_field() {
return _pending_list_uses_discovered_field;
}
// whether discovery is done by multiple threads same-old-timeously // whether discovery is done by multiple threads same-old-timeously
bool discovery_is_mt() const { return _discovery_is_mt; } bool discovery_is_mt() const { return _discovery_is_mt; }
void set_mt_discovery(bool mt) { _discovery_is_mt = mt; } void set_mt_discovery(bool mt) { _discovery_is_mt = mt; }
......
...@@ -56,9 +56,8 @@ static void specialized_oop_follow_contents(instanceRefKlass* ref, oop obj) { ...@@ -56,9 +56,8 @@ static void specialized_oop_follow_contents(instanceRefKlass* ref, oop obj) {
if (!oopDesc::is_null(heap_oop)) { if (!oopDesc::is_null(heap_oop)) {
oop referent = oopDesc::decode_heap_oop_not_null(heap_oop); oop referent = oopDesc::decode_heap_oop_not_null(heap_oop);
if (!referent->is_gc_marked() && if (!referent->is_gc_marked() &&
MarkSweep::ref_processor()-> MarkSweep::ref_processor()->discover_reference(obj, ref->reference_type())) {
discover_reference(obj, ref->reference_type())) { // reference was discovered, referent will be traversed later
// reference already enqueued, referent will be traversed later
ref->instanceKlass::oop_follow_contents(obj); ref->instanceKlass::oop_follow_contents(obj);
debug_only( debug_only(
if(TraceReferenceGC && PrintGCDetails) { if(TraceReferenceGC && PrintGCDetails) {
...@@ -76,8 +75,34 @@ static void specialized_oop_follow_contents(instanceRefKlass* ref, oop obj) { ...@@ -76,8 +75,34 @@ static void specialized_oop_follow_contents(instanceRefKlass* ref, oop obj) {
MarkSweep::mark_and_push(referent_addr); MarkSweep::mark_and_push(referent_addr);
} }
} }
// treat next as normal oop. next is a link in the pending list.
T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj); T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
if (ReferenceProcessor::pending_list_uses_discovered_field()) {
// Treat discovered as normal oop, if ref is not "active",
// i.e. if next is non-NULL.
T next_oop = oopDesc::load_heap_oop(next_addr);
if (!oopDesc::is_null(next_oop)) { // i.e. ref is not "active"
T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
debug_only(
if(TraceReferenceGC && PrintGCDetails) {
gclog_or_tty->print_cr(" Process discovered as normal "
INTPTR_FORMAT, discovered_addr);
}
)
MarkSweep::mark_and_push(discovered_addr);
}
} else {
#ifdef ASSERT
// In the case of older JDKs which do not use the discovered
// field for the pending list, an inactive ref (next != NULL)
// must always have a NULL discovered field.
oop next = oopDesc::load_decode_heap_oop(next_addr);
oop discovered = java_lang_ref_Reference::discovered(obj);
assert(oopDesc::is_null(next) || oopDesc::is_null(discovered),
err_msg("Found an inactive reference " PTR_FORMAT " with a non-NULL discovered field",
obj));
#endif
}
// treat next as normal oop. next is a link in the reference queue.
debug_only( debug_only(
if(TraceReferenceGC && PrintGCDetails) { if(TraceReferenceGC && PrintGCDetails) {
gclog_or_tty->print_cr(" Process next as normal " INTPTR_FORMAT, next_addr); gclog_or_tty->print_cr(" Process next as normal " INTPTR_FORMAT, next_addr);
...@@ -130,13 +155,33 @@ void specialized_oop_follow_contents(instanceRefKlass* ref, ...@@ -130,13 +155,33 @@ void specialized_oop_follow_contents(instanceRefKlass* ref,
PSParallelCompact::mark_and_push(cm, referent_addr); PSParallelCompact::mark_and_push(cm, referent_addr);
} }
} }
// treat next as normal oop. next is a link in the pending list.
T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj); T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
debug_only( if (ReferenceProcessor::pending_list_uses_discovered_field()) {
if(TraceReferenceGC && PrintGCDetails) { // Treat discovered as normal oop, if ref is not "active",
gclog_or_tty->print_cr(" Process next as normal " INTPTR_FORMAT, next_addr); // i.e. if next is non-NULL.
T next_oop = oopDesc::load_heap_oop(next_addr);
if (!oopDesc::is_null(next_oop)) { // i.e. ref is not "active"
T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
debug_only(
if(TraceReferenceGC && PrintGCDetails) {
gclog_or_tty->print_cr(" Process discovered as normal "
INTPTR_FORMAT, discovered_addr);
}
)
PSParallelCompact::mark_and_push(cm, discovered_addr);
} }
) } else {
#ifdef ASSERT
// In the case of older JDKs which do not use the discovered
// field for the pending list, an inactive ref (next != NULL)
// must always have a NULL discovered field.
T next = oopDesc::load_heap_oop(next_addr);
oop discovered = java_lang_ref_Reference::discovered(obj);
assert(oopDesc::is_null(next) || oopDesc::is_null(discovered),
err_msg("Found an inactive reference " PTR_FORMAT " with a non-NULL discovered field",
obj));
#endif
}
PSParallelCompact::mark_and_push(cm, next_addr); PSParallelCompact::mark_and_push(cm, next_addr);
ref->instanceKlass::oop_follow_contents(cm, obj); ref->instanceKlass::oop_follow_contents(cm, obj);
} }
...@@ -197,27 +242,53 @@ int instanceRefKlass::oop_adjust_pointers(oop obj) { ...@@ -197,27 +242,53 @@ int instanceRefKlass::oop_adjust_pointers(oop obj) {
} }
#define InstanceRefKlass_SPECIALIZED_OOP_ITERATE(T, nv_suffix, contains) \ #define InstanceRefKlass_SPECIALIZED_OOP_ITERATE(T, nv_suffix, contains) \
T* disc_addr = (T*)java_lang_ref_Reference::discovered_addr(obj); \
if (closure->apply_to_weak_ref_discovered_field()) { \ if (closure->apply_to_weak_ref_discovered_field()) { \
T* disc_addr = (T*)java_lang_ref_Reference::discovered_addr(obj); \
closure->do_oop##nv_suffix(disc_addr); \ closure->do_oop##nv_suffix(disc_addr); \
} \ } \
\ \
T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj); \ T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj); \
T heap_oop = oopDesc::load_heap_oop(referent_addr); \ T heap_oop = oopDesc::load_heap_oop(referent_addr); \
if (!oopDesc::is_null(heap_oop) && contains(referent_addr)) { \ ReferenceProcessor* rp = closure->_ref_processor; \
ReferenceProcessor* rp = closure->_ref_processor; \ if (!oopDesc::is_null(heap_oop)) { \
oop referent = oopDesc::decode_heap_oop_not_null(heap_oop); \ oop referent = oopDesc::decode_heap_oop_not_null(heap_oop); \
if (!referent->is_gc_marked() && (rp != NULL) && \ if (!referent->is_gc_marked() && (rp != NULL) && \
rp->discover_reference(obj, reference_type())) { \ rp->discover_reference(obj, reference_type())) { \
return size; \ return size; \
} else { \ } else if (contains(referent_addr)) { \
/* treat referent as normal oop */ \ /* treat referent as normal oop */ \
SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::irk);\ SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::irk);\
closure->do_oop##nv_suffix(referent_addr); \ closure->do_oop##nv_suffix(referent_addr); \
} \ } \
} \ } \
/* treat next as normal oop */ \
T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj); \ T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj); \
if (ReferenceProcessor::pending_list_uses_discovered_field()) { \
T next_oop = oopDesc::load_heap_oop(next_addr); \
/* Treat discovered as normal oop, if ref is not "active" (next non-NULL) */\
if (!oopDesc::is_null(next_oop) && contains(disc_addr)) { \
/* i.e. ref is not "active" */ \
debug_only( \
if(TraceReferenceGC && PrintGCDetails) { \
gclog_or_tty->print_cr(" Process discovered as normal " \
INTPTR_FORMAT, disc_addr); \
} \
) \
SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::irk);\
closure->do_oop##nv_suffix(disc_addr); \
} \
} else { \
/* In the case of older JDKs which do not use the discovered field for */ \
/* the pending list, an inactive ref (next != NULL) must always have a */ \
/* NULL discovered field. */ \
debug_only( \
T next_oop = oopDesc::load_heap_oop(next_addr); \
T disc_oop = oopDesc::load_heap_oop(disc_addr); \
assert(oopDesc::is_null(next_oop) || oopDesc::is_null(disc_oop), \
err_msg("Found an inactive reference " PTR_FORMAT " with a non-NULL" \
"discovered field", obj)); \
) \
} \
/* treat next as normal oop */ \
if (contains(next_addr)) { \ if (contains(next_addr)) { \
SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::irk); \ SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::irk); \
closure->do_oop##nv_suffix(next_addr); \ closure->do_oop##nv_suffix(next_addr); \
...@@ -306,8 +377,37 @@ void specialized_oop_push_contents(instanceRefKlass *ref, ...@@ -306,8 +377,37 @@ void specialized_oop_push_contents(instanceRefKlass *ref,
pm->claim_or_forward_depth(referent_addr); pm->claim_or_forward_depth(referent_addr);
} }
} }
// treat next as normal oop // Treat discovered as normal oop, if ref is not "active",
// i.e. if next is non-NULL.
T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj); T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
if (ReferenceProcessor::pending_list_uses_discovered_field()) {
T next_oop = oopDesc::load_heap_oop(next_addr);
if (!oopDesc::is_null(next_oop)) { // i.e. ref is not "active"
T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
debug_only(
if(TraceReferenceGC && PrintGCDetails) {
gclog_or_tty->print_cr(" Process discovered as normal "
INTPTR_FORMAT, discovered_addr);
}
)
if (PSScavenge::should_scavenge(discovered_addr)) {
pm->claim_or_forward_depth(discovered_addr);
}
}
} else {
#ifdef ASSERT
// In the case of older JDKs which do not use the discovered
// field for the pending list, an inactive ref (next != NULL)
// must always have a NULL discovered field.
oop next = oopDesc::load_decode_heap_oop(next_addr);
oop discovered = java_lang_ref_Reference::discovered(obj);
assert(oopDesc::is_null(next) || oopDesc::is_null(discovered),
err_msg("Found an inactive reference " PTR_FORMAT " with a non-NULL discovered field",
obj));
#endif
}
// Treat next as normal oop; next is a link in the reference queue.
if (PSScavenge::should_scavenge(next_addr)) { if (PSScavenge::should_scavenge(next_addr)) {
pm->claim_or_forward_depth(next_addr); pm->claim_or_forward_depth(next_addr);
} }
......
...@@ -1650,7 +1650,8 @@ typedef struct { ...@@ -1650,7 +1650,8 @@ typedef struct {
*/ */
unsigned int thread_park_blocker : 1; unsigned int thread_park_blocker : 1;
unsigned int post_vm_init_hook_enabled : 1; unsigned int post_vm_init_hook_enabled : 1;
unsigned int : 30; unsigned int pending_list_uses_discovered_field : 1;
unsigned int : 29;
unsigned int : 32; unsigned int : 32;
unsigned int : 32; unsigned int : 32;
} jdk_version_info; } jdk_version_info;
......
...@@ -672,7 +672,8 @@ void JDK_Version::initialize() { ...@@ -672,7 +672,8 @@ void JDK_Version::initialize() {
_current = JDK_Version(major, minor, micro, info.update_version, _current = JDK_Version(major, minor, micro, info.update_version,
info.special_update_version, build, info.special_update_version, build,
info.thread_park_blocker == 1, info.thread_park_blocker == 1,
info.post_vm_init_hook_enabled == 1); info.post_vm_init_hook_enabled == 1,
info.pending_list_uses_discovered_field == 1);
} }
} }
......
...@@ -92,6 +92,7 @@ class JDK_Version VALUE_OBJ_CLASS_SPEC { ...@@ -92,6 +92,7 @@ class JDK_Version VALUE_OBJ_CLASS_SPEC {
bool _partially_initialized; bool _partially_initialized;
bool _thread_park_blocker; bool _thread_park_blocker;
bool _pending_list_uses_discovered_field;
bool _post_vm_init_hook_enabled; bool _post_vm_init_hook_enabled;
bool is_valid() const { bool is_valid() const {
...@@ -114,15 +115,18 @@ class JDK_Version VALUE_OBJ_CLASS_SPEC { ...@@ -114,15 +115,18 @@ class JDK_Version VALUE_OBJ_CLASS_SPEC {
JDK_Version() : _major(0), _minor(0), _micro(0), _update(0), JDK_Version() : _major(0), _minor(0), _micro(0), _update(0),
_special(0), _build(0), _partially_initialized(false), _special(0), _build(0), _partially_initialized(false),
_thread_park_blocker(false), _post_vm_init_hook_enabled(false) {} _thread_park_blocker(false), _post_vm_init_hook_enabled(false),
_pending_list_uses_discovered_field(false) {}
JDK_Version(uint8_t major, uint8_t minor = 0, uint8_t micro = 0, JDK_Version(uint8_t major, uint8_t minor = 0, uint8_t micro = 0,
uint8_t update = 0, uint8_t special = 0, uint8_t build = 0, uint8_t update = 0, uint8_t special = 0, uint8_t build = 0,
bool thread_park_blocker = false, bool post_vm_init_hook_enabled = false) : bool thread_park_blocker = false, bool post_vm_init_hook_enabled = false,
bool pending_list_uses_discovered_field = false) :
_major(major), _minor(minor), _micro(micro), _update(update), _major(major), _minor(minor), _micro(micro), _update(update),
_special(special), _build(build), _partially_initialized(false), _special(special), _build(build), _partially_initialized(false),
_thread_park_blocker(thread_park_blocker), _thread_park_blocker(thread_park_blocker),
_post_vm_init_hook_enabled(post_vm_init_hook_enabled) {} _post_vm_init_hook_enabled(post_vm_init_hook_enabled),
_pending_list_uses_discovered_field(pending_list_uses_discovered_field) {}
// Returns the current running JDK version // Returns the current running JDK version
static JDK_Version current() { return _current; } static JDK_Version current() { return _current; }
...@@ -149,6 +153,10 @@ class JDK_Version VALUE_OBJ_CLASS_SPEC { ...@@ -149,6 +153,10 @@ class JDK_Version VALUE_OBJ_CLASS_SPEC {
bool post_vm_init_hook_enabled() const { bool post_vm_init_hook_enabled() const {
return _post_vm_init_hook_enabled; return _post_vm_init_hook_enabled;
} }
// For compatibility wrt pre-4965777 JDK's
bool pending_list_uses_discovered_field() const {
return _pending_list_uses_discovered_field;
}
// Performs a full ordering comparison using all fields (update, build, etc.) // Performs a full ordering comparison using all fields (update, build, etc.)
int compare(const JDK_Version& other) const; int compare(const JDK_Version& other) const;
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册