提交 975dcb15 编写于 作者: P pliden

8031703: Missing post-barrier in ReferenceProcessor

Reviewed-by: tonyp, tschatzl
上级 07b78379
......@@ -2266,7 +2266,7 @@ void G1CollectedHeap::ref_processing_init() {
// (for efficiency/performance)
false);
// Setting next fields of discovered
// lists requires a barrier.
// lists does not require a barrier.
}
size_t G1CollectedHeap::capacity() const {
......
......@@ -95,11 +95,11 @@ ReferenceProcessor::ReferenceProcessor(MemRegion span,
uint mt_discovery_degree,
bool atomic_discovery,
BoolObjectClosure* is_alive_non_header,
bool discovered_list_needs_barrier) :
bool discovered_list_needs_post_barrier) :
_discovering_refs(false),
_enqueuing_is_done(false),
_is_alive_non_header(is_alive_non_header),
_discovered_list_needs_barrier(discovered_list_needs_barrier),
_discovered_list_needs_post_barrier(discovered_list_needs_post_barrier),
_processing_is_mt(mt_processing),
_next_id(0)
{
......@@ -490,13 +490,13 @@ void DiscoveredListIterator::remove() {
} else {
new_next = _next;
}
if (UseCompressedOops) {
// Remove Reference object from list.
oopDesc::encode_store_heap_oop((narrowOop*)_prev_next, new_next);
} else {
// Remove Reference object from list.
oopDesc::store_heap_oop((oop*)_prev_next, new_next);
// Remove Reference object from discovered list. Note that G1 does not need a
// pre-barrier here because we know the Reference has already been found/marked,
// that's how it ended up in the discovered list in the first place.
oop_store_raw(_prev_next, new_next);
if (_discovered_list_needs_post_barrier && _prev_next != _refs_list.adr_head()) {
// Needs post-barrier and this is not the list head (which is not on the heap)
oopDesc::bs()->write_ref_field(_prev_next, new_next);
}
NOT_PRODUCT(_removed++);
_refs_list.dec_length(1);
......@@ -544,7 +544,7 @@ ReferenceProcessor::process_phase1(DiscoveredList& refs_list,
OopClosure* keep_alive,
VoidClosure* complete_gc) {
assert(policy != NULL, "Must have a non-NULL policy");
DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
DiscoveredListIterator iter(refs_list, keep_alive, is_alive, _discovered_list_needs_post_barrier);
// Decide which softly reachable refs should be kept alive.
while (iter.has_next()) {
iter.load_ptrs(DEBUG_ONLY(!discovery_is_atomic() /* allow_null_referent */));
......@@ -584,7 +584,7 @@ ReferenceProcessor::pp2_work(DiscoveredList& refs_list,
BoolObjectClosure* is_alive,
OopClosure* keep_alive) {
assert(discovery_is_atomic(), "Error");
DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
DiscoveredListIterator iter(refs_list, keep_alive, is_alive, _discovered_list_needs_post_barrier);
while (iter.has_next()) {
iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
DEBUG_ONLY(oop next = java_lang_ref_Reference::next(iter.obj());)
......@@ -621,7 +621,7 @@ ReferenceProcessor::pp2_work_concurrent_discovery(DiscoveredList& refs_list,
OopClosure* keep_alive,
VoidClosure* complete_gc) {
assert(!discovery_is_atomic(), "Error");
DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
DiscoveredListIterator iter(refs_list, keep_alive, is_alive, _discovered_list_needs_post_barrier);
while (iter.has_next()) {
iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
HeapWord* next_addr = java_lang_ref_Reference::next_addr(iter.obj());
......@@ -664,7 +664,7 @@ ReferenceProcessor::process_phase3(DiscoveredList& refs_list,
OopClosure* keep_alive,
VoidClosure* complete_gc) {
ResourceMark rm;
DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
DiscoveredListIterator iter(refs_list, keep_alive, is_alive, _discovered_list_needs_post_barrier);
while (iter.has_next()) {
iter.update_discovered();
iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
......@@ -782,8 +782,8 @@ private:
void ReferenceProcessor::set_discovered(oop ref, oop value) {
java_lang_ref_Reference::set_discovered_raw(ref, value);
if (_discovered_list_needs_barrier) {
oopDesc::bs()->write_ref_field(ref, value);
if (_discovered_list_needs_post_barrier) {
oopDesc::bs()->write_ref_field(java_lang_ref_Reference::discovered_addr(ref), value);
}
}
......@@ -980,7 +980,7 @@ void ReferenceProcessor::clean_up_discovered_references() {
void ReferenceProcessor::clean_up_discovered_reflist(DiscoveredList& refs_list) {
assert(!discovery_is_atomic(), "Else why call this method?");
DiscoveredListIterator iter(refs_list, NULL, NULL);
DiscoveredListIterator iter(refs_list, NULL, NULL, _discovered_list_needs_post_barrier);
while (iter.has_next()) {
iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
oop next = java_lang_ref_Reference::next(iter.obj());
......@@ -1076,7 +1076,7 @@ ReferenceProcessor::add_to_discovered_list_mt(DiscoveredList& refs_list,
// elided this out for G1, but left in the test for some future
// collector that might have need for a pre-barrier here, e.g.:-
// oopDesc::bs()->write_ref_field_pre((oop* or narrowOop*)discovered_addr, next_discovered);
assert(!_discovered_list_needs_barrier || UseG1GC,
assert(!_discovered_list_needs_post_barrier || UseG1GC,
"Need to check non-G1 collector: "
"may need a pre-write-barrier for CAS from NULL below");
oop retest = oopDesc::atomic_compare_exchange_oop(next_discovered, discovered_addr,
......@@ -1087,7 +1087,7 @@ ReferenceProcessor::add_to_discovered_list_mt(DiscoveredList& refs_list,
// is necessary.
refs_list.set_head(obj);
refs_list.inc_length(1);
if (_discovered_list_needs_barrier) {
if (_discovered_list_needs_post_barrier) {
oopDesc::bs()->write_ref_field((void*)discovered_addr, next_discovered);
}
......@@ -1240,7 +1240,7 @@ bool ReferenceProcessor::discover_reference(oop obj, ReferenceType rt) {
if (_discovery_is_mt) {
add_to_discovered_list_mt(*list, obj, discovered_addr);
} else {
// If "_discovered_list_needs_barrier", we do write barriers when
// If "_discovered_list_needs_post_barrier", we do write barriers when
// updating the discovered reference list. Otherwise, we do a raw store
// here: the field will be visited later when processing the discovered
// references.
......@@ -1252,10 +1252,10 @@ bool ReferenceProcessor::discover_reference(oop obj, ReferenceType rt) {
// pre-value, we can safely elide the pre-barrier here for the case of G1.
// e.g.:- oopDesc::bs()->write_ref_field_pre((oop* or narrowOop*)discovered_addr, next_discovered);
assert(discovered == NULL, "control point invariant");
assert(!_discovered_list_needs_barrier || UseG1GC,
assert(!_discovered_list_needs_post_barrier || UseG1GC,
"For non-G1 collector, may need a pre-write-barrier for CAS from NULL below");
oop_store_raw(discovered_addr, next_discovered);
if (_discovered_list_needs_barrier) {
if (_discovered_list_needs_post_barrier) {
oopDesc::bs()->write_ref_field((void*)discovered_addr, next_discovered);
}
list->set_head(obj);
......@@ -1351,7 +1351,7 @@ ReferenceProcessor::preclean_discovered_reflist(DiscoveredList& refs_list,
OopClosure* keep_alive,
VoidClosure* complete_gc,
YieldClosure* yield) {
DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
DiscoveredListIterator iter(refs_list, keep_alive, is_alive, _discovered_list_needs_post_barrier);
while (iter.has_next()) {
iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
oop obj = iter.obj();
......
......@@ -99,6 +99,7 @@ private:
oop _referent;
OopClosure* _keep_alive;
BoolObjectClosure* _is_alive;
bool _discovered_list_needs_post_barrier;
DEBUG_ONLY(
oop _first_seen; // cyclic linked list check
......@@ -112,7 +113,8 @@ private:
public:
inline DiscoveredListIterator(DiscoveredList& refs_list,
OopClosure* keep_alive,
BoolObjectClosure* is_alive):
BoolObjectClosure* is_alive,
bool discovered_list_needs_post_barrier = false):
_refs_list(refs_list),
_prev_next(refs_list.adr_head()),
_prev(NULL),
......@@ -126,7 +128,8 @@ public:
#endif
_next(NULL),
_keep_alive(keep_alive),
_is_alive(is_alive)
_is_alive(is_alive),
_discovered_list_needs_post_barrier(discovered_list_needs_post_barrier)
{ }
// End Of List.
......@@ -228,12 +231,12 @@ class ReferenceProcessor : public CHeapObj<mtGC> {
bool _discovery_is_mt; // true if reference discovery is MT.
// If true, setting "next" field of a discovered refs list requires
// write barrier(s). (Must be true if used in a collector in which
// write post barrier. (Must be true if used in a collector in which
// elements of a discovered list may be moved during discovery: for
// example, a collector like Garbage-First that moves objects during a
// long-term concurrent marking phase that does weak reference
// discovery.)
bool _discovered_list_needs_barrier;
bool _discovered_list_needs_post_barrier;
bool _enqueuing_is_done; // true if all weak references enqueued
bool _processing_is_mt; // true during phases when
......@@ -380,8 +383,8 @@ class ReferenceProcessor : public CHeapObj<mtGC> {
protected:
// Set the 'discovered' field of the given reference to
// the given value - emitting barriers depending upon
// the value of _discovered_list_needs_barrier.
// the given value - emitting post barriers depending upon
// the value of _discovered_list_needs_post_barrier.
void set_discovered(oop ref, oop value);
// "Preclean" the given discovered reference list
......@@ -425,7 +428,7 @@ class ReferenceProcessor : public CHeapObj<mtGC> {
bool mt_discovery = false, uint mt_discovery_degree = 1,
bool atomic_discovery = true,
BoolObjectClosure* is_alive_non_header = NULL,
bool discovered_list_needs_barrier = false);
bool discovered_list_needs_post_barrier = false);
// RefDiscoveryPolicy values
enum DiscoveryPolicy {
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册