diff --git a/src/share/vm/code/codeCache.cpp b/src/share/vm/code/codeCache.cpp index eef8b4db540bd0be76a6fca2bdccfc9b30678e4d..d6629967227cbfc10d5acd10838bf0f2122e3b8c 100644 --- a/src/share/vm/code/codeCache.cpp +++ b/src/share/vm/code/codeCache.cpp @@ -524,12 +524,15 @@ void CodeCache::gc_epilogue() { FOR_ALL_ALIVE_BLOBS(cb) { if (cb->is_nmethod()) { nmethod *nm = (nmethod*)cb; - assert(!nm->is_unloaded(), "Tautology"); - if (needs_cache_clean()) { - nm->cleanup_inline_caches(); + if (!nm->is_zombie()) { + if (needs_cache_clean()) { + // Clean ICs of unloaded nmethods as well because they may reference other + // unloaded nmethods that may be flushed earlier in the sweeper cycle. + nm->cleanup_inline_caches(); + } + DEBUG_ONLY(nm->verify()); + DEBUG_ONLY(nm->verify_oop_relocations()); } - DEBUG_ONLY(nm->verify()); - DEBUG_ONLY(nm->verify_oop_relocations()); } } set_needs_cache_clean(false); @@ -734,27 +737,6 @@ int CodeCache::mark_for_deoptimization(Method* dependee) { return number_of_marked_CodeBlobs; } -void CodeCache::make_marked_nmethods_zombies() { - assert(SafepointSynchronize::is_at_safepoint(), "must be at a safepoint"); - FOR_ALL_ALIVE_NMETHODS(nm) { - if (nm->is_marked_for_deoptimization()) { - - // If the nmethod has already been made non-entrant and it can be converted - // then zombie it now. Otherwise make it non-entrant and it will eventually - // be zombied when it is no longer seen on the stack. Note that the nmethod - // might be "entrant" and not on the stack and so could be zombied immediately - // but we can't tell because we don't track it on stack until it becomes - // non-entrant. - - if (nm->is_not_entrant() && nm->can_not_entrant_be_converted()) { - nm->make_zombie(); - } else { - nm->make_not_entrant(); - } - } - } -} - void CodeCache::make_marked_nmethods_not_entrant() { assert_locked_or_safepoint(CodeCache_lock); FOR_ALL_ALIVE_NMETHODS(nm) { diff --git a/src/share/vm/code/codeCache.hpp b/src/share/vm/code/codeCache.hpp index 966304fce096c2c6e7a285a4e8770cdd875dfe53..f098284a30a472cccc948eb5011159923f44e98b 100644 --- a/src/share/vm/code/codeCache.hpp +++ b/src/share/vm/code/codeCache.hpp @@ -179,7 +179,6 @@ class CodeCache : AllStatic { static void mark_all_nmethods_for_deoptimization(); static int mark_for_deoptimization(Method* dependee); - static void make_marked_nmethods_zombies(); static void make_marked_nmethods_not_entrant(); // tells how many nmethods have dependencies diff --git a/src/share/vm/code/compiledIC.cpp b/src/share/vm/code/compiledIC.cpp index 6f0df1f4ad190911162b787eb85e277a674cb4fd..45f25274eeb2a90d81fa8534a8591ae69373e364 100644 --- a/src/share/vm/code/compiledIC.cpp +++ b/src/share/vm/code/compiledIC.cpp @@ -343,8 +343,8 @@ void CompiledIC::set_to_clean() { // Kill any leftover stub we might have too clear_ic_stub(); if (is_optimized()) { - set_ic_destination(entry); - } else { + set_ic_destination(entry); + } else { set_ic_destination_and_value(entry, (void*)NULL); } } else { diff --git a/src/share/vm/code/compiledIC.hpp b/src/share/vm/code/compiledIC.hpp index 8da359ee43c82cbbeb03642debfb57cd60fac1c6..a24bf315e435bbc53b768c2ccbe105797cded258 100644 --- a/src/share/vm/code/compiledIC.hpp +++ b/src/share/vm/code/compiledIC.hpp @@ -228,7 +228,7 @@ class CompiledIC: public ResourceObj { // // They all takes a TRAP argument, since they can cause a GC if the inline-cache buffer is full. // - void set_to_clean(); // Can only be called during a safepoint operation + void set_to_clean(); void set_to_monomorphic(CompiledICInfo& info); void clear_ic_stub(); diff --git a/src/share/vm/code/nmethod.cpp b/src/share/vm/code/nmethod.cpp index f008fe9f4a1905de15bb3f32c74d83b17cdf6e80..f8be257ecde65333a9c34510340ae82de008a909 100644 --- a/src/share/vm/code/nmethod.cpp +++ b/src/share/vm/code/nmethod.cpp @@ -1162,7 +1162,6 @@ void nmethod::clear_ic_stubs() { void nmethod::cleanup_inline_caches() { - assert_locked_or_safepoint(CompiledIC_lock); // If the method is not entrant or zombie then a JMP is plastered over the @@ -1178,7 +1177,8 @@ void nmethod::cleanup_inline_caches() { // In fact, why are we bothering to look at oops in a non-entrant method?? } - // Find all calls in an nmethod, and clear the ones that points to zombie methods + // Find all calls in an nmethod and clear the ones that point to non-entrant, + // zombie and unloaded nmethods. ResourceMark rm; RelocIterator iter(this, low_boundary); while(iter.next()) { @@ -1190,7 +1190,7 @@ void nmethod::cleanup_inline_caches() { CodeBlob *cb = CodeCache::find_blob_unsafe(ic->ic_destination()); if( cb != NULL && cb->is_nmethod() ) { nmethod* nm = (nmethod*)cb; - // Clean inline caches pointing to both zombie and not_entrant methods + // Clean inline caches pointing to zombie, non-entrant and unloaded methods if (!nm->is_in_use() || (nm->method()->code() != nm)) ic->set_to_clean(); } break; @@ -1200,7 +1200,7 @@ void nmethod::cleanup_inline_caches() { CodeBlob *cb = CodeCache::find_blob_unsafe(csc->destination()); if( cb != NULL && cb->is_nmethod() ) { nmethod* nm = (nmethod*)cb; - // Clean inline caches pointing to both zombie and not_entrant methods + // Clean inline caches pointing to zombie, non-entrant and unloaded methods if (!nm->is_in_use() || (nm->method()->code() != nm)) csc->set_to_clean(); } break; @@ -2707,7 +2707,7 @@ void nmethod::verify() { // Hmm. OSR methods can be deopted but not marked as zombie or not_entrant // seems odd. - if( is_zombie() || is_not_entrant() ) + if (is_zombie() || is_not_entrant() || is_unloaded()) return; // Make sure all the entry points are correctly aligned for patching. diff --git a/src/share/vm/prims/jvmtiRedefineClasses.cpp b/src/share/vm/prims/jvmtiRedefineClasses.cpp index d19b4a1cf1e7accd635c081340fd37d36e808f7c..07a8a7980ec40d57fd9b97188ce7d2aa46d18b77 100644 --- a/src/share/vm/prims/jvmtiRedefineClasses.cpp +++ b/src/share/vm/prims/jvmtiRedefineClasses.cpp @@ -3751,7 +3751,7 @@ void VM_RedefineClasses::flush_dependent_code(instanceKlassHandle k_h, TRAPS) { // Deoptimize all activations depending on marked nmethods Deoptimization::deoptimize_dependents(); - // Make the dependent methods not entrant (in VM_Deoptimize they are made zombies) + // Make the dependent methods not entrant CodeCache::make_marked_nmethods_not_entrant(); // From now on we know that the dependency information is complete diff --git a/src/share/vm/runtime/vm_operations.cpp b/src/share/vm/runtime/vm_operations.cpp index d46e1f775acdf29f3045989e67e157c4c68f4aed..a4219d2fd09d329a2b132dbdb3742496a0920da6 100644 --- a/src/share/vm/runtime/vm_operations.cpp +++ b/src/share/vm/runtime/vm_operations.cpp @@ -106,8 +106,8 @@ void VM_Deoptimize::doit() { // Deoptimize all activations depending on marked nmethods Deoptimization::deoptimize_dependents(); - // Make the dependent methods zombies - CodeCache::make_marked_nmethods_zombies(); + // Make the dependent methods not entrant + CodeCache::make_marked_nmethods_not_entrant(); }