提交 f676ecf8 编写于 作者: A asaha

Merge

......@@ -758,3 +758,4 @@ ab64d7ea4f48ea4bdbcc43d4a653be157d9c29e3 jdk8u66-b12
a6f2a7ba281291f5dab79fa494f7cfaa6232c88b jdk8u66-b17
b8f426369187c32551f0a3d571d933908988c81c jdk8u72-b00
c0205eddb31766ece562483595ec28a7506971e9 jdk8u72-b01
15ef554f2f2e0a8d7c330191432fcd2126d19dab jdk8u72-b02
......@@ -521,15 +521,17 @@ void CodeCache::gc_prologue() {
void CodeCache::gc_epilogue() {
assert_locked_or_safepoint(CodeCache_lock);
FOR_ALL_ALIVE_BLOBS(cb) {
if (cb->is_nmethod()) {
nmethod *nm = (nmethod*)cb;
assert(!nm->is_unloaded(), "Tautology");
if (needs_cache_clean()) {
nm->cleanup_inline_caches();
NOT_DEBUG(if (needs_cache_clean())) {
FOR_ALL_ALIVE_BLOBS(cb) {
if (cb->is_nmethod()) {
nmethod *nm = (nmethod*)cb;
assert(!nm->is_unloaded(), "Tautology");
DEBUG_ONLY(if (needs_cache_clean())) {
nm->cleanup_inline_caches();
}
DEBUG_ONLY(nm->verify());
DEBUG_ONLY(nm->verify_oop_relocations());
}
DEBUG_ONLY(nm->verify());
DEBUG_ONLY(nm->verify_oop_relocations());
}
}
set_needs_cache_clean(false);
......@@ -734,27 +736,6 @@ int CodeCache::mark_for_deoptimization(Method* dependee) {
return number_of_marked_CodeBlobs;
}
void CodeCache::make_marked_nmethods_zombies() {
assert(SafepointSynchronize::is_at_safepoint(), "must be at a safepoint");
FOR_ALL_ALIVE_NMETHODS(nm) {
if (nm->is_marked_for_deoptimization()) {
// If the nmethod has already been made non-entrant and it can be converted
// then zombie it now. Otherwise make it non-entrant and it will eventually
// be zombied when it is no longer seen on the stack. Note that the nmethod
// might be "entrant" and not on the stack and so could be zombied immediately
// but we can't tell because we don't track it on stack until it becomes
// non-entrant.
if (nm->is_not_entrant() && nm->can_not_entrant_be_converted()) {
nm->make_zombie();
} else {
nm->make_not_entrant();
}
}
}
}
void CodeCache::make_marked_nmethods_not_entrant() {
assert_locked_or_safepoint(CodeCache_lock);
FOR_ALL_ALIVE_NMETHODS(nm) {
......
......@@ -179,7 +179,6 @@ class CodeCache : AllStatic {
static void mark_all_nmethods_for_deoptimization();
static int mark_for_deoptimization(Method* dependee);
static void make_marked_nmethods_zombies();
static void make_marked_nmethods_not_entrant();
// tells how many nmethods have dependencies
......
......@@ -155,6 +155,14 @@ address CompiledIC::stub_address() const {
return _ic_call->destination();
}
// Clears the IC stub if the compiled IC is in transition state
void CompiledIC::clear_ic_stub() {
if (is_in_transition_state()) {
ICStub* stub = ICStub_from_destination_address(stub_address());
stub->clear();
}
}
//-----------------------------------------------------------------------------
// High-level access to an inline cache. Guaranteed to be MT-safe.
......@@ -279,6 +287,7 @@ bool CompiledIC::is_call_to_compiled() const {
assert( is_c1_method ||
!is_monomorphic ||
is_optimized() ||
!caller->is_alive() ||
(cached_metadata() != NULL && cached_metadata()->is_klass()), "sanity check");
#endif // ASSERT
return is_monomorphic;
......@@ -313,7 +322,7 @@ bool CompiledIC::is_call_to_interpreted() const {
}
void CompiledIC::set_to_clean() {
void CompiledIC::set_to_clean(bool in_use) {
assert(SafepointSynchronize::is_at_safepoint() || CompiledIC_lock->is_locked() , "MT-unsafe call");
if (TraceInlineCacheClearing || TraceICs) {
tty->print_cr("IC@" INTPTR_FORMAT ": set to clean", p2i(instruction_address()));
......@@ -329,17 +338,14 @@ void CompiledIC::set_to_clean() {
// A zombie transition will always be safe, since the metadata has already been set to NULL, so
// we only need to patch the destination
bool safe_transition = is_optimized() || SafepointSynchronize::is_at_safepoint();
bool safe_transition = !in_use || is_optimized() || SafepointSynchronize::is_at_safepoint();
if (safe_transition) {
// Kill any leftover stub we might have too
if (is_in_transition_state()) {
ICStub* old_stub = ICStub_from_destination_address(stub_address());
old_stub->clear();
}
clear_ic_stub();
if (is_optimized()) {
set_ic_destination(entry);
} else {
set_ic_destination(entry);
} else {
set_ic_destination_and_value(entry, (void*)NULL);
}
} else {
......
......@@ -228,8 +228,9 @@ class CompiledIC: public ResourceObj {
//
// They all takes a TRAP argument, since they can cause a GC if the inline-cache buffer is full.
//
void set_to_clean(); // Can only be called during a safepoint operation
void set_to_clean(bool in_use = true);
void set_to_monomorphic(CompiledICInfo& info);
void clear_ic_stub();
// Returns true if successful and false otherwise. The call can fail if memory
// allocation in the code cache fails.
......
......@@ -1148,9 +1148,20 @@ void nmethod::clear_inline_caches() {
}
}
// Clear ICStubs of all compiled ICs
void nmethod::clear_ic_stubs() {
assert_locked_or_safepoint(CompiledIC_lock);
RelocIterator iter(this);
while(iter.next()) {
if (iter.type() == relocInfo::virtual_call_type) {
CompiledIC* ic = CompiledIC_at(&iter);
ic->clear_ic_stub();
}
}
}
void nmethod::cleanup_inline_caches() {
void nmethod::cleanup_inline_caches() {
assert_locked_or_safepoint(CompiledIC_lock);
// If the method is not entrant or zombie then a JMP is plastered over the
......@@ -1166,7 +1177,8 @@ void nmethod::cleanup_inline_caches() {
// In fact, why are we bothering to look at oops in a non-entrant method??
}
// Find all calls in an nmethod, and clear the ones that points to zombie methods
// Find all calls in an nmethod and clear the ones that point to non-entrant,
// zombie and unloaded nmethods.
ResourceMark rm;
RelocIterator iter(this, low_boundary);
while(iter.next()) {
......@@ -1178,8 +1190,8 @@ void nmethod::cleanup_inline_caches() {
CodeBlob *cb = CodeCache::find_blob_unsafe(ic->ic_destination());
if( cb != NULL && cb->is_nmethod() ) {
nmethod* nm = (nmethod*)cb;
// Clean inline caches pointing to both zombie and not_entrant methods
if (!nm->is_in_use() || (nm->method()->code() != nm)) ic->set_to_clean();
// Clean inline caches pointing to zombie, non-entrant and unloaded methods
if (!nm->is_in_use() || (nm->method()->code() != nm)) ic->set_to_clean(is_alive());
}
break;
}
......@@ -1188,7 +1200,7 @@ void nmethod::cleanup_inline_caches() {
CodeBlob *cb = CodeCache::find_blob_unsafe(csc->destination());
if( cb != NULL && cb->is_nmethod() ) {
nmethod* nm = (nmethod*)cb;
// Clean inline caches pointing to both zombie and not_entrant methods
// Clean inline caches pointing to zombie, non-entrant and unloaded methods
if (!nm->is_in_use() || (nm->method()->code() != nm)) csc->set_to_clean();
}
break;
......@@ -1279,7 +1291,7 @@ void nmethod::mark_as_seen_on_stack() {
// Tell if a non-entrant method can be converted to a zombie (i.e.,
// there are no activations on the stack, not in use by the VM,
// and not in use by the ServiceThread)
bool nmethod::can_not_entrant_be_converted() {
bool nmethod::can_convert_to_zombie() {
assert(is_not_entrant(), "must be a non-entrant method");
// Since the nmethod sweeper only does partial sweep the sweeper's traversal
......@@ -2695,7 +2707,7 @@ void nmethod::verify() {
// Hmm. OSR methods can be deopted but not marked as zombie or not_entrant
// seems odd.
if( is_zombie() || is_not_entrant() )
if (is_zombie() || is_not_entrant() || is_unloaded())
return;
// Make sure all the entry points are correctly aligned for patching.
......
......@@ -577,6 +577,7 @@ public:
// Inline cache support
void clear_inline_caches();
void clear_ic_stubs();
void cleanup_inline_caches();
bool inlinecache_check_contains(address addr) const {
return (addr >= code_begin() && addr < verified_entry_point());
......@@ -604,7 +605,7 @@ public:
// See comment at definition of _last_seen_on_stack
void mark_as_seen_on_stack();
bool can_not_entrant_be_converted();
bool can_convert_to_zombie();
// Evolution support. We make old (discarded) compiled methods point to new Method*s.
void set_method(Method* method) { _method = method; }
......
......@@ -3751,7 +3751,7 @@ void VM_RedefineClasses::flush_dependent_code(instanceKlassHandle k_h, TRAPS) {
// Deoptimize all activations depending on marked nmethods
Deoptimization::deoptimize_dependents();
// Make the dependent methods not entrant (in VM_Deoptimize they are made zombies)
// Make the dependent methods not entrant
CodeCache::make_marked_nmethods_not_entrant();
// From now on we know that the dependency information is complete
......
......@@ -538,10 +538,14 @@ int NMethodSweeper::process_nmethod(nmethod *nm) {
} else if (nm->is_not_entrant()) {
// If there are no current activations of this method on the
// stack we can safely convert it to a zombie method
if (nm->can_not_entrant_be_converted()) {
if (nm->can_convert_to_zombie()) {
if (PrintMethodFlushing && Verbose) {
tty->print_cr("### Nmethod %3d/" PTR_FORMAT " (not entrant) being made zombie", nm->compile_id(), nm);
}
// Clear ICStubs to prevent back patching stubs of zombie or unloaded
// nmethods during the next safepoint (see ICStub::finalize).
MutexLocker cl(CompiledIC_lock);
nm->clear_ic_stubs();
// Code cache state change is tracked in make_zombie()
nm->make_zombie();
_zombified_count++;
......@@ -567,6 +571,12 @@ int NMethodSweeper::process_nmethod(nmethod *nm) {
release_nmethod(nm);
_flushed_count++;
} else {
{
// Clean ICs of unloaded nmethods as well because they may reference other
// unloaded nmethods that may be flushed earlier in the sweeper cycle.
MutexLocker cl(CompiledIC_lock);
nm->cleanup_inline_caches();
}
// Code cache state change is tracked in make_zombie()
nm->make_zombie();
_zombified_count++;
......
......@@ -106,8 +106,8 @@ void VM_Deoptimize::doit() {
// Deoptimize all activations depending on marked nmethods
Deoptimization::deoptimize_dependents();
// Make the dependent methods zombies
CodeCache::make_marked_nmethods_zombies();
// Make the dependent methods not entrant
CodeCache::make_marked_nmethods_not_entrant();
}
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册